code
stringlengths 13
6.09M
| order_type
stringclasses 2
values | original_example
dict | step_ids
listlengths 1
5
|
|---|---|---|---|
from __future__ import annotations
import pytest
from pytest import param
import ibis
import ibis.expr.datatypes as dt
from ibis.backends.base.sql.alchemy.geospatial import geospatial_supported
DB_TYPES = [
# Exact numbers
("BIGINT", dt.int64),
("BIT", dt.boolean),
("DECIMAL", dt.Decimal(precision=18, scale=0)),
("DECIMAL(5, 2)", dt.Decimal(precision=5, scale=2)),
("INT", dt.int32),
("MONEY", dt.int64),
("NUMERIC", dt.Decimal(18, 0)),
("NUMERIC(10,5)", dt.Decimal(10, 5)),
("NUMERIC(14,3)", dt.Decimal(14, 3)),
("SMALLINT", dt.int16),
("SMALLMONEY", dt.int32),
("TINYINT", dt.int8),
# Approximate numerics
("REAL", dt.float32),
("FLOAT", dt.float64),
("FLOAT(3)", dt.float32),
("FLOAT(25)", dt.float64),
# Date and time
("DATE", dt.date),
("TIME", dt.time),
("DATETIME2", dt.timestamp(scale=7)),
("DATETIMEOFFSET", dt.timestamp(scale=7, timezone="UTC")),
("SMALLDATETIME", dt.timestamp),
("DATETIME", dt.timestamp),
# Characters strings
("CHAR", dt.string),
("TEXT", dt.string),
("VARCHAR", dt.string),
# Unicode character strings
("NCHAR", dt.string),
("NTEXT", dt.string),
("NVARCHAR", dt.string),
# Binary strings
("BINARY", dt.binary),
("VARBINARY", dt.binary),
("IMAGE", dt.binary),
# Other data types
("UNIQUEIDENTIFIER", dt.uuid),
("TIMESTAMP", dt.binary(nullable=False)),
]
skipif_no_geospatial_deps = pytest.mark.skipif(
not geospatial_supported, reason="geospatial dependencies not installed"
)
broken_sqlalchemy_autoload = pytest.mark.xfail(
reason="scale not inferred by sqlalchemy autoload"
)
@pytest.mark.parametrize(
("server_type", "expected_type"),
DB_TYPES
+ [
param("GEOMETRY", dt.geometry, marks=[skipif_no_geospatial_deps]),
param("GEOGRAPHY", dt.geography, marks=[skipif_no_geospatial_deps]),
]
+ [
param(
"DATETIME2(4)", dt.timestamp(scale=4), marks=[broken_sqlalchemy_autoload]
),
param(
"DATETIMEOFFSET(5)",
dt.timestamp(scale=5, timezone="UTC"),
marks=[broken_sqlalchemy_autoload],
),
],
ids=str,
)
def test_get_schema_from_query(con, server_type, expected_type, temp_table):
expected_schema = ibis.schema(dict(x=expected_type))
with con.begin() as c:
c.exec_driver_sql(f"CREATE TABLE [{temp_table}] (x {server_type})")
expected_schema = ibis.schema(dict(x=expected_type))
result_schema = con._get_schema_using_query(f"SELECT * FROM [{temp_table}]")
assert result_schema == expected_schema
t = con.table(temp_table)
assert t.schema() == expected_schema
|
normal
|
{
"blob_id": "00e9872136e5753364117adbf60793e660c8bef0",
"index": 485,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\n@pytest.mark.parametrize(('server_type', 'expected_type'), DB_TYPES + [\n param('GEOMETRY', dt.geometry, marks=[skipif_no_geospatial_deps]),\n param('GEOGRAPHY', dt.geography, marks=[skipif_no_geospatial_deps])] +\n [param('DATETIME2(4)', dt.timestamp(scale=4), marks=[\n broken_sqlalchemy_autoload]), param('DATETIMEOFFSET(5)', dt.timestamp(\n scale=5, timezone='UTC'), marks=[broken_sqlalchemy_autoload])], ids=str)\ndef test_get_schema_from_query(con, server_type, expected_type, temp_table):\n expected_schema = ibis.schema(dict(x=expected_type))\n with con.begin() as c:\n c.exec_driver_sql(f'CREATE TABLE [{temp_table}] (x {server_type})')\n expected_schema = ibis.schema(dict(x=expected_type))\n result_schema = con._get_schema_using_query(f'SELECT * FROM [{temp_table}]'\n )\n assert result_schema == expected_schema\n t = con.table(temp_table)\n assert t.schema() == expected_schema\n",
"step-3": "<mask token>\nDB_TYPES = [('BIGINT', dt.int64), ('BIT', dt.boolean), ('DECIMAL', dt.\n Decimal(precision=18, scale=0)), ('DECIMAL(5, 2)', dt.Decimal(precision\n =5, scale=2)), ('INT', dt.int32), ('MONEY', dt.int64), ('NUMERIC', dt.\n Decimal(18, 0)), ('NUMERIC(10,5)', dt.Decimal(10, 5)), ('NUMERIC(14,3)',\n dt.Decimal(14, 3)), ('SMALLINT', dt.int16), ('SMALLMONEY', dt.int32), (\n 'TINYINT', dt.int8), ('REAL', dt.float32), ('FLOAT', dt.float64), (\n 'FLOAT(3)', dt.float32), ('FLOAT(25)', dt.float64), ('DATE', dt.date),\n ('TIME', dt.time), ('DATETIME2', dt.timestamp(scale=7)), (\n 'DATETIMEOFFSET', dt.timestamp(scale=7, timezone='UTC')), (\n 'SMALLDATETIME', dt.timestamp), ('DATETIME', dt.timestamp), ('CHAR', dt\n .string), ('TEXT', dt.string), ('VARCHAR', dt.string), ('NCHAR', dt.\n string), ('NTEXT', dt.string), ('NVARCHAR', dt.string), ('BINARY', dt.\n binary), ('VARBINARY', dt.binary), ('IMAGE', dt.binary), (\n 'UNIQUEIDENTIFIER', dt.uuid), ('TIMESTAMP', dt.binary(nullable=False))]\nskipif_no_geospatial_deps = pytest.mark.skipif(not geospatial_supported,\n reason='geospatial dependencies not installed')\nbroken_sqlalchemy_autoload = pytest.mark.xfail(reason=\n 'scale not inferred by sqlalchemy autoload')\n\n\n@pytest.mark.parametrize(('server_type', 'expected_type'), DB_TYPES + [\n param('GEOMETRY', dt.geometry, marks=[skipif_no_geospatial_deps]),\n param('GEOGRAPHY', dt.geography, marks=[skipif_no_geospatial_deps])] +\n [param('DATETIME2(4)', dt.timestamp(scale=4), marks=[\n broken_sqlalchemy_autoload]), param('DATETIMEOFFSET(5)', dt.timestamp(\n scale=5, timezone='UTC'), marks=[broken_sqlalchemy_autoload])], ids=str)\ndef test_get_schema_from_query(con, server_type, expected_type, temp_table):\n expected_schema = ibis.schema(dict(x=expected_type))\n with con.begin() as c:\n c.exec_driver_sql(f'CREATE TABLE [{temp_table}] (x {server_type})')\n expected_schema = ibis.schema(dict(x=expected_type))\n result_schema = con._get_schema_using_query(f'SELECT * FROM [{temp_table}]'\n )\n assert result_schema == expected_schema\n t = con.table(temp_table)\n assert t.schema() == expected_schema\n",
"step-4": "from __future__ import annotations\nimport pytest\nfrom pytest import param\nimport ibis\nimport ibis.expr.datatypes as dt\nfrom ibis.backends.base.sql.alchemy.geospatial import geospatial_supported\nDB_TYPES = [('BIGINT', dt.int64), ('BIT', dt.boolean), ('DECIMAL', dt.\n Decimal(precision=18, scale=0)), ('DECIMAL(5, 2)', dt.Decimal(precision\n =5, scale=2)), ('INT', dt.int32), ('MONEY', dt.int64), ('NUMERIC', dt.\n Decimal(18, 0)), ('NUMERIC(10,5)', dt.Decimal(10, 5)), ('NUMERIC(14,3)',\n dt.Decimal(14, 3)), ('SMALLINT', dt.int16), ('SMALLMONEY', dt.int32), (\n 'TINYINT', dt.int8), ('REAL', dt.float32), ('FLOAT', dt.float64), (\n 'FLOAT(3)', dt.float32), ('FLOAT(25)', dt.float64), ('DATE', dt.date),\n ('TIME', dt.time), ('DATETIME2', dt.timestamp(scale=7)), (\n 'DATETIMEOFFSET', dt.timestamp(scale=7, timezone='UTC')), (\n 'SMALLDATETIME', dt.timestamp), ('DATETIME', dt.timestamp), ('CHAR', dt\n .string), ('TEXT', dt.string), ('VARCHAR', dt.string), ('NCHAR', dt.\n string), ('NTEXT', dt.string), ('NVARCHAR', dt.string), ('BINARY', dt.\n binary), ('VARBINARY', dt.binary), ('IMAGE', dt.binary), (\n 'UNIQUEIDENTIFIER', dt.uuid), ('TIMESTAMP', dt.binary(nullable=False))]\nskipif_no_geospatial_deps = pytest.mark.skipif(not geospatial_supported,\n reason='geospatial dependencies not installed')\nbroken_sqlalchemy_autoload = pytest.mark.xfail(reason=\n 'scale not inferred by sqlalchemy autoload')\n\n\n@pytest.mark.parametrize(('server_type', 'expected_type'), DB_TYPES + [\n param('GEOMETRY', dt.geometry, marks=[skipif_no_geospatial_deps]),\n param('GEOGRAPHY', dt.geography, marks=[skipif_no_geospatial_deps])] +\n [param('DATETIME2(4)', dt.timestamp(scale=4), marks=[\n broken_sqlalchemy_autoload]), param('DATETIMEOFFSET(5)', dt.timestamp(\n scale=5, timezone='UTC'), marks=[broken_sqlalchemy_autoload])], ids=str)\ndef test_get_schema_from_query(con, server_type, expected_type, temp_table):\n expected_schema = ibis.schema(dict(x=expected_type))\n with con.begin() as c:\n c.exec_driver_sql(f'CREATE TABLE [{temp_table}] (x {server_type})')\n expected_schema = ibis.schema(dict(x=expected_type))\n result_schema = con._get_schema_using_query(f'SELECT * FROM [{temp_table}]'\n )\n assert result_schema == expected_schema\n t = con.table(temp_table)\n assert t.schema() == expected_schema\n",
"step-5": "from __future__ import annotations\n\nimport pytest\nfrom pytest import param\n\nimport ibis\nimport ibis.expr.datatypes as dt\nfrom ibis.backends.base.sql.alchemy.geospatial import geospatial_supported\n\nDB_TYPES = [\n # Exact numbers\n (\"BIGINT\", dt.int64),\n (\"BIT\", dt.boolean),\n (\"DECIMAL\", dt.Decimal(precision=18, scale=0)),\n (\"DECIMAL(5, 2)\", dt.Decimal(precision=5, scale=2)),\n (\"INT\", dt.int32),\n (\"MONEY\", dt.int64),\n (\"NUMERIC\", dt.Decimal(18, 0)),\n (\"NUMERIC(10,5)\", dt.Decimal(10, 5)),\n (\"NUMERIC(14,3)\", dt.Decimal(14, 3)),\n (\"SMALLINT\", dt.int16),\n (\"SMALLMONEY\", dt.int32),\n (\"TINYINT\", dt.int8),\n # Approximate numerics\n (\"REAL\", dt.float32),\n (\"FLOAT\", dt.float64),\n (\"FLOAT(3)\", dt.float32),\n (\"FLOAT(25)\", dt.float64),\n # Date and time\n (\"DATE\", dt.date),\n (\"TIME\", dt.time),\n (\"DATETIME2\", dt.timestamp(scale=7)),\n (\"DATETIMEOFFSET\", dt.timestamp(scale=7, timezone=\"UTC\")),\n (\"SMALLDATETIME\", dt.timestamp),\n (\"DATETIME\", dt.timestamp),\n # Characters strings\n (\"CHAR\", dt.string),\n (\"TEXT\", dt.string),\n (\"VARCHAR\", dt.string),\n # Unicode character strings\n (\"NCHAR\", dt.string),\n (\"NTEXT\", dt.string),\n (\"NVARCHAR\", dt.string),\n # Binary strings\n (\"BINARY\", dt.binary),\n (\"VARBINARY\", dt.binary),\n (\"IMAGE\", dt.binary),\n # Other data types\n (\"UNIQUEIDENTIFIER\", dt.uuid),\n (\"TIMESTAMP\", dt.binary(nullable=False)),\n]\n\n\nskipif_no_geospatial_deps = pytest.mark.skipif(\n not geospatial_supported, reason=\"geospatial dependencies not installed\"\n)\n\nbroken_sqlalchemy_autoload = pytest.mark.xfail(\n reason=\"scale not inferred by sqlalchemy autoload\"\n)\n\n\n@pytest.mark.parametrize(\n (\"server_type\", \"expected_type\"),\n DB_TYPES\n + [\n param(\"GEOMETRY\", dt.geometry, marks=[skipif_no_geospatial_deps]),\n param(\"GEOGRAPHY\", dt.geography, marks=[skipif_no_geospatial_deps]),\n ]\n + [\n param(\n \"DATETIME2(4)\", dt.timestamp(scale=4), marks=[broken_sqlalchemy_autoload]\n ),\n param(\n \"DATETIMEOFFSET(5)\",\n dt.timestamp(scale=5, timezone=\"UTC\"),\n marks=[broken_sqlalchemy_autoload],\n ),\n ],\n ids=str,\n)\ndef test_get_schema_from_query(con, server_type, expected_type, temp_table):\n expected_schema = ibis.schema(dict(x=expected_type))\n with con.begin() as c:\n c.exec_driver_sql(f\"CREATE TABLE [{temp_table}] (x {server_type})\")\n expected_schema = ibis.schema(dict(x=expected_type))\n result_schema = con._get_schema_using_query(f\"SELECT * FROM [{temp_table}]\")\n assert result_schema == expected_schema\n t = con.table(temp_table)\n assert t.schema() == expected_schema\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def run():
"""Runs all of the tests"""
subsuite_list = []
for _, modname, _ in pkgutil.iter_modules(test.__path__):
if modname.startswith('test_'):
module = importlib.import_module('test.' + modname)
subsuite = unittest.TestLoader().loadTestsFromModule(module)
subsuite_list.append(subsuite)
suite = unittest.TestSuite(subsuite_list)
print('Testing:\n')
unittest.TextTestRunner(verbosity=2).run(suite)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def run():
"""Runs all of the tests"""
subsuite_list = []
for _, modname, _ in pkgutil.iter_modules(test.__path__):
if modname.startswith('test_'):
module = importlib.import_module('test.' + modname)
subsuite = unittest.TestLoader().loadTestsFromModule(module)
subsuite_list.append(subsuite)
suite = unittest.TestSuite(subsuite_list)
print('Testing:\n')
unittest.TextTestRunner(verbosity=2).run(suite)
if __name__ == '__main__':
run()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
from __future__ import print_function
import importlib
import pkgutil
import unittest
import test
def run():
"""Runs all of the tests"""
subsuite_list = []
for _, modname, _ in pkgutil.iter_modules(test.__path__):
if modname.startswith('test_'):
module = importlib.import_module('test.' + modname)
subsuite = unittest.TestLoader().loadTestsFromModule(module)
subsuite_list.append(subsuite)
suite = unittest.TestSuite(subsuite_list)
print('Testing:\n')
unittest.TextTestRunner(verbosity=2).run(suite)
if __name__ == '__main__':
run()
<|reserved_special_token_1|>
# Copyright 2014 Charles Noneman
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test suite for running the test modules"""
from __future__ import print_function
import importlib
import pkgutil
import unittest
import test
def run():
"""Runs all of the tests"""
subsuite_list = []
for _, modname, _ in pkgutil.iter_modules(test.__path__):
if modname.startswith("test_"):
module = importlib.import_module('test.' + modname)
subsuite = unittest.TestLoader().loadTestsFromModule(module)
subsuite_list.append(subsuite)
suite = unittest.TestSuite(subsuite_list)
print("Testing:\n")
unittest.TextTestRunner(verbosity=2).run(suite)
if __name__ == '__main__':
run()
|
flexible
|
{
"blob_id": "9a7908212bf13565109cd4d9ab6de65909bc6910",
"index": 3606,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef run():\n \"\"\"Runs all of the tests\"\"\"\n subsuite_list = []\n for _, modname, _ in pkgutil.iter_modules(test.__path__):\n if modname.startswith('test_'):\n module = importlib.import_module('test.' + modname)\n subsuite = unittest.TestLoader().loadTestsFromModule(module)\n subsuite_list.append(subsuite)\n suite = unittest.TestSuite(subsuite_list)\n print('Testing:\\n')\n unittest.TextTestRunner(verbosity=2).run(suite)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef run():\n \"\"\"Runs all of the tests\"\"\"\n subsuite_list = []\n for _, modname, _ in pkgutil.iter_modules(test.__path__):\n if modname.startswith('test_'):\n module = importlib.import_module('test.' + modname)\n subsuite = unittest.TestLoader().loadTestsFromModule(module)\n subsuite_list.append(subsuite)\n suite = unittest.TestSuite(subsuite_list)\n print('Testing:\\n')\n unittest.TextTestRunner(verbosity=2).run(suite)\n\n\nif __name__ == '__main__':\n run()\n",
"step-4": "<mask token>\nfrom __future__ import print_function\nimport importlib\nimport pkgutil\nimport unittest\nimport test\n\n\ndef run():\n \"\"\"Runs all of the tests\"\"\"\n subsuite_list = []\n for _, modname, _ in pkgutil.iter_modules(test.__path__):\n if modname.startswith('test_'):\n module = importlib.import_module('test.' + modname)\n subsuite = unittest.TestLoader().loadTestsFromModule(module)\n subsuite_list.append(subsuite)\n suite = unittest.TestSuite(subsuite_list)\n print('Testing:\\n')\n unittest.TextTestRunner(verbosity=2).run(suite)\n\n\nif __name__ == '__main__':\n run()\n",
"step-5": "# Copyright 2014 Charles Noneman\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Test suite for running the test modules\"\"\"\n\nfrom __future__ import print_function\n\nimport importlib\nimport pkgutil\nimport unittest\nimport test\n\ndef run():\n\t\"\"\"Runs all of the tests\"\"\"\n\tsubsuite_list = []\n\tfor _, modname, _ in pkgutil.iter_modules(test.__path__):\n\t\tif modname.startswith(\"test_\"):\n\t\t\tmodule = importlib.import_module('test.' + modname)\n\t\t\tsubsuite = unittest.TestLoader().loadTestsFromModule(module)\n\t\t\tsubsuite_list.append(subsuite)\n\tsuite = unittest.TestSuite(subsuite_list)\n\n\tprint(\"Testing:\\n\")\n\tunittest.TextTestRunner(verbosity=2).run(suite)\n\nif __name__ == '__main__':\n\trun()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print(tf.__version__)
<|reserved_special_token_0|>
ninapro.splitImagesLabels()
print('ninapro.TrainImages shape: ', ninapro.TrainImages.shape)
print('ninapro.TrainLabels shape: ', ninapro.TrainLabels.shape)
print('ninapro.TestImages shape: ', ninapro.TestImages.shape)
print('ninapro.TestLabels shape: ', ninapro.TestLabels.shape)
print('ninapro.ValidateImages shape: ', ninapro.ValidateImages.shape)
print('ninapro.ValidateLabels shape: ', ninapro.ValidateLabels.shape)
print('Read successfully done...')
<|reserved_special_token_0|>
with tf.name_scope('Input'):
x = tf.placeholder(tf.float32, shape=[None, 16, 30], name='X')
y = tf.placeholder(tf.float32, shape=[None, nMV], name='Labels')
if Debug:
print('input x shape: ', x.shape)
print('input y shape: ', y.shape)
<|reserved_special_token_0|>
if Debug:
print('x_image shape: ', x_image.shape)
<|reserved_special_token_0|>
with tf.name_scope('First'):
w1 = tf.Variable(tf.truncated_normal([1, 16, firstIn, firstOut], stddev
=0.1), name='W')
b1 = tf.Variable(tf.constant(0.1, shape=[firstOut]), name='B')
s1 = 1
conv1 = tf.nn.conv2d(x_image, w1, strides=[1, s1, s1, 1], padding='SAME')
act1 = tf.nn.relu(conv1 + b1)
tf.summary.histogram('weights', w1)
tf.summary.histogram('biases', b1)
tf.summary.histogram('activation', act1)
if Debug:
print('w1 shape: ', w1.shape)
print('b1 shape: ', b1.shape)
print('conv1 shape: ', conv1.shape)
print('act1 shape: ', act1.shape)
<|reserved_special_token_0|>
with tf.name_scope('Second'):
w2 = tf.Variable(tf.truncated_normal([3, 3, secondIn, secondOut],
stddev=0.1), name='W')
b2 = tf.Variable(tf.constant(0.1, shape=[secondOut]), name='B')
s2 = 1
conv2 = tf.nn.conv2d(act1, w2, strides=[1, s2, s2, 1], padding='SAME')
act2 = tf.nn.relu(conv2 + b2)
k2 = 3
ms2 = 1
mp2 = tf.nn.max_pool(act2, ksize=[1, k2, k2, 1], strides=[1, ms2, ms2,
1], padding='SAME')
tf.summary.histogram('weights', w2)
tf.summary.histogram('biases', b2)
tf.summary.histogram('activation', act2)
tf.summary.histogram('maxpooling', mp2)
if Debug:
print('w2 shape: ', w2.shape)
print('b2 shape: ', b2.shape)
print('conv2 shape: ', conv2.shape)
print('act2 shape: ', act2.shape)
print('mp2 shape: ', mp2.shape)
<|reserved_special_token_0|>
with tf.name_scope('Third'):
w3 = tf.Variable(tf.truncated_normal([5, 5, thirdIn, thirdOut], stddev=
0.1), name='W')
b3 = tf.Variable(tf.constant(0.1, shape=[thirdOut]), name='B')
s3 = 1
conv3 = tf.nn.conv2d(mp2, w3, strides=[1, s3, s3, 1], padding='SAME')
act3 = tf.nn.relu(conv3 + b3)
k3 = 3
ms3 = 1
mp3 = tf.nn.max_pool(act3, ksize=[1, k3, k3, 1], strides=[1, ms3, ms3,
1], padding='SAME')
tf.summary.histogram('weights', w3)
tf.summary.histogram('biases', b3)
tf.summary.histogram('activation', act3)
tf.summary.histogram('maxpooling', mp3)
if Debug:
print('w3 shape: ', w3.shape)
print('b3 shape: ', b3.shape)
print('conv3 shape: ', conv3.shape)
print('act3 shape: ', act3.shape)
print('mp3 shape: ', mp3.shape)
<|reserved_special_token_0|>
with tf.name_scope('Fourth'):
w4 = tf.Variable(tf.truncated_normal([6, 1, fourthIn, fourthOut],
stddev=0.1), name='W')
b4 = tf.Variable(tf.constant(0.1, shape=[fourthOut]), name='B')
s4 = 1
conv4 = tf.nn.conv2d(mp3, w4, strides=[1, s4, s4, 1], padding='SAME')
act4 = tf.nn.relu(conv4 + b4)
tf.summary.histogram('weights', w4)
tf.summary.histogram('biases', b4)
tf.summary.histogram('activation', act4)
if Debug:
print('w4 shape: ', w4.shape)
print('b4 shape: ', b4.shape)
print('conv4 shape: ', conv4.shape)
print('act4 shape: ', act4.shape)
<|reserved_special_token_0|>
with tf.name_scope('Fifth'):
w5 = tf.Variable(tf.truncated_normal([1, 1, fifthIn, fifthOut], stddev=
0.1), name='W')
b5 = tf.Variable(tf.constant(0.1, shape=[fifthOut]), name='B')
s5 = 1
conv5 = tf.nn.conv2d(act4, w5, strides=[1, s5, s5, 1], padding='SAME')
act5 = tf.nn.relu(conv5 + b5)
with tf.name_scope('Flatten'):
flatten5 = tf.reshape(act5, [-1, 16 * 30 * fifthOut])
with tf.name_scope('FullyCon'):
wfc5 = tf.Variable(tf.truncated_normal([16 * 30 * fifthOut, nMV],
stddev=0.1), name='W')
bfc5 = tf.Variable(tf.constant(0.1, shape=[nMV]), name='B')
y_ = tf.nn.relu(tf.matmul(flatten5, wfc5) + bfc5)
tf.summary.histogram('weights', w5)
tf.summary.histogram('biases', b5)
tf.summary.histogram('activation', act5)
tf.summary.histogram('flatten', flatten5)
tf.summary.histogram('weights_fc5', wfc5)
tf.summary.histogram('biases_fc5', bfc5)
if Debug:
print('w5 shape: ', w5.shape)
print('b5 shape: ', b5.shape)
print('conv5 shape: ', conv5.shape)
print('act5 shape: ', act5.shape)
print('flatten5 shape: ', flatten5.shape)
print('weights_fc5 shape: ', wfc5.shape)
print('biases_fc5 shape: ', bfc5.shape)
print('y_predict shape: ', y_.shape)
with tf.name_scope('Softmaxloss'):
cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(
logits=y_, labels=y), name='Loss')
tf.summary.scalar('cross_entropy', cross_entropy)
with tf.name_scope('Accuracy'):
correct_prediction = tf.equal(tf.argmax(y_, 1), tf.argmax(y, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
tf.summary.scalar('accuracy', accuracy)
<|reserved_special_token_0|>
usefulFcns.BuildNewlyDir(graph_dir)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
merged_summary = tf.summary.merge_all()
writer = tf.summary.FileWriter(graph_dir)
writer.add_graph(sess.graph)
for i in range(2000):
x_batch, y_batch = ninapro.next_batch(30)
if i % 100 == 0:
[train_accuracy] = sess.run([accuracy], feed_dict={x: x_batch,
y: y_batch})
[test_accuracy] = sess.run([accuracy], feed_dict={x: ninapro.
TestImages, y: ninapro.TestLabels})
[validate_accuracy] = sess.run([accuracy], feed_dict={x:
ninapro.ValidateImages, y: ninapro.ValidateLabels})
print('Step %d, training %g, testing %g, validate %g.' % (i,
train_accuracy, test_accuracy, validate_accuracy))
if i % 5 == 0:
s = sess.run(merged_summary, feed_dict={x: x_batch, y: y_batch})
writer.add_summary(s, i)
sess.run(train, feed_dict={x: x_batch, y: y_batch})
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print(tf.__version__)
Debug = True
ninapro = Ninapro()
ninapro.splitImagesLabels()
print('ninapro.TrainImages shape: ', ninapro.TrainImages.shape)
print('ninapro.TrainLabels shape: ', ninapro.TrainLabels.shape)
print('ninapro.TestImages shape: ', ninapro.TestImages.shape)
print('ninapro.TestLabels shape: ', ninapro.TestLabels.shape)
print('ninapro.ValidateImages shape: ', ninapro.ValidateImages.shape)
print('ninapro.ValidateLabels shape: ', ninapro.ValidateLabels.shape)
print('Read successfully done...')
nMV = ninapro.TrainLabels.shape[1]
with tf.name_scope('Input'):
x = tf.placeholder(tf.float32, shape=[None, 16, 30], name='X')
y = tf.placeholder(tf.float32, shape=[None, nMV], name='Labels')
if Debug:
print('input x shape: ', x.shape)
print('input y shape: ', y.shape)
x_image = tf.reshape(x, [-1, 16, 30, 1])
if Debug:
print('x_image shape: ', x_image.shape)
firstIn = 1
firstOut = 32
with tf.name_scope('First'):
w1 = tf.Variable(tf.truncated_normal([1, 16, firstIn, firstOut], stddev
=0.1), name='W')
b1 = tf.Variable(tf.constant(0.1, shape=[firstOut]), name='B')
s1 = 1
conv1 = tf.nn.conv2d(x_image, w1, strides=[1, s1, s1, 1], padding='SAME')
act1 = tf.nn.relu(conv1 + b1)
tf.summary.histogram('weights', w1)
tf.summary.histogram('biases', b1)
tf.summary.histogram('activation', act1)
if Debug:
print('w1 shape: ', w1.shape)
print('b1 shape: ', b1.shape)
print('conv1 shape: ', conv1.shape)
print('act1 shape: ', act1.shape)
secondIn = firstOut
secondOut = 32
with tf.name_scope('Second'):
w2 = tf.Variable(tf.truncated_normal([3, 3, secondIn, secondOut],
stddev=0.1), name='W')
b2 = tf.Variable(tf.constant(0.1, shape=[secondOut]), name='B')
s2 = 1
conv2 = tf.nn.conv2d(act1, w2, strides=[1, s2, s2, 1], padding='SAME')
act2 = tf.nn.relu(conv2 + b2)
k2 = 3
ms2 = 1
mp2 = tf.nn.max_pool(act2, ksize=[1, k2, k2, 1], strides=[1, ms2, ms2,
1], padding='SAME')
tf.summary.histogram('weights', w2)
tf.summary.histogram('biases', b2)
tf.summary.histogram('activation', act2)
tf.summary.histogram('maxpooling', mp2)
if Debug:
print('w2 shape: ', w2.shape)
print('b2 shape: ', b2.shape)
print('conv2 shape: ', conv2.shape)
print('act2 shape: ', act2.shape)
print('mp2 shape: ', mp2.shape)
thirdIn = secondOut
thirdOut = 64
with tf.name_scope('Third'):
w3 = tf.Variable(tf.truncated_normal([5, 5, thirdIn, thirdOut], stddev=
0.1), name='W')
b3 = tf.Variable(tf.constant(0.1, shape=[thirdOut]), name='B')
s3 = 1
conv3 = tf.nn.conv2d(mp2, w3, strides=[1, s3, s3, 1], padding='SAME')
act3 = tf.nn.relu(conv3 + b3)
k3 = 3
ms3 = 1
mp3 = tf.nn.max_pool(act3, ksize=[1, k3, k3, 1], strides=[1, ms3, ms3,
1], padding='SAME')
tf.summary.histogram('weights', w3)
tf.summary.histogram('biases', b3)
tf.summary.histogram('activation', act3)
tf.summary.histogram('maxpooling', mp3)
if Debug:
print('w3 shape: ', w3.shape)
print('b3 shape: ', b3.shape)
print('conv3 shape: ', conv3.shape)
print('act3 shape: ', act3.shape)
print('mp3 shape: ', mp3.shape)
fourthIn = thirdOut
fourthOut = 64
with tf.name_scope('Fourth'):
w4 = tf.Variable(tf.truncated_normal([6, 1, fourthIn, fourthOut],
stddev=0.1), name='W')
b4 = tf.Variable(tf.constant(0.1, shape=[fourthOut]), name='B')
s4 = 1
conv4 = tf.nn.conv2d(mp3, w4, strides=[1, s4, s4, 1], padding='SAME')
act4 = tf.nn.relu(conv4 + b4)
tf.summary.histogram('weights', w4)
tf.summary.histogram('biases', b4)
tf.summary.histogram('activation', act4)
if Debug:
print('w4 shape: ', w4.shape)
print('b4 shape: ', b4.shape)
print('conv4 shape: ', conv4.shape)
print('act4 shape: ', act4.shape)
fifthIn = fourthOut
fifthOut = 8
with tf.name_scope('Fifth'):
w5 = tf.Variable(tf.truncated_normal([1, 1, fifthIn, fifthOut], stddev=
0.1), name='W')
b5 = tf.Variable(tf.constant(0.1, shape=[fifthOut]), name='B')
s5 = 1
conv5 = tf.nn.conv2d(act4, w5, strides=[1, s5, s5, 1], padding='SAME')
act5 = tf.nn.relu(conv5 + b5)
with tf.name_scope('Flatten'):
flatten5 = tf.reshape(act5, [-1, 16 * 30 * fifthOut])
with tf.name_scope('FullyCon'):
wfc5 = tf.Variable(tf.truncated_normal([16 * 30 * fifthOut, nMV],
stddev=0.1), name='W')
bfc5 = tf.Variable(tf.constant(0.1, shape=[nMV]), name='B')
y_ = tf.nn.relu(tf.matmul(flatten5, wfc5) + bfc5)
tf.summary.histogram('weights', w5)
tf.summary.histogram('biases', b5)
tf.summary.histogram('activation', act5)
tf.summary.histogram('flatten', flatten5)
tf.summary.histogram('weights_fc5', wfc5)
tf.summary.histogram('biases_fc5', bfc5)
if Debug:
print('w5 shape: ', w5.shape)
print('b5 shape: ', b5.shape)
print('conv5 shape: ', conv5.shape)
print('act5 shape: ', act5.shape)
print('flatten5 shape: ', flatten5.shape)
print('weights_fc5 shape: ', wfc5.shape)
print('biases_fc5 shape: ', bfc5.shape)
print('y_predict shape: ', y_.shape)
with tf.name_scope('Softmaxloss'):
cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(
logits=y_, labels=y), name='Loss')
tf.summary.scalar('cross_entropy', cross_entropy)
with tf.name_scope('Accuracy'):
correct_prediction = tf.equal(tf.argmax(y_, 1), tf.argmax(y, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
tf.summary.scalar('accuracy', accuracy)
train = tf.train.AdamOptimizer(0.1).minimize(cross_entropy)
graph_dir = 'sEMGCNN'
<|reserved_special_token_0|>
usefulFcns.BuildNewlyDir(graph_dir)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
merged_summary = tf.summary.merge_all()
writer = tf.summary.FileWriter(graph_dir)
writer.add_graph(sess.graph)
for i in range(2000):
x_batch, y_batch = ninapro.next_batch(30)
if i % 100 == 0:
[train_accuracy] = sess.run([accuracy], feed_dict={x: x_batch,
y: y_batch})
[test_accuracy] = sess.run([accuracy], feed_dict={x: ninapro.
TestImages, y: ninapro.TestLabels})
[validate_accuracy] = sess.run([accuracy], feed_dict={x:
ninapro.ValidateImages, y: ninapro.ValidateLabels})
print('Step %d, training %g, testing %g, validate %g.' % (i,
train_accuracy, test_accuracy, validate_accuracy))
if i % 5 == 0:
s = sess.run(merged_summary, feed_dict={x: x_batch, y: y_batch})
writer.add_summary(s, i)
sess.run(train, feed_dict={x: x_batch, y: y_batch})
<|reserved_special_token_1|>
from classNinapro import Ninapro
import numpy as np
import tensorflow as tf
print(tf.__version__)
Debug = True
ninapro = Ninapro()
ninapro.splitImagesLabels()
print('ninapro.TrainImages shape: ', ninapro.TrainImages.shape)
print('ninapro.TrainLabels shape: ', ninapro.TrainLabels.shape)
print('ninapro.TestImages shape: ', ninapro.TestImages.shape)
print('ninapro.TestLabels shape: ', ninapro.TestLabels.shape)
print('ninapro.ValidateImages shape: ', ninapro.ValidateImages.shape)
print('ninapro.ValidateLabels shape: ', ninapro.ValidateLabels.shape)
print('Read successfully done...')
nMV = ninapro.TrainLabels.shape[1]
with tf.name_scope('Input'):
x = tf.placeholder(tf.float32, shape=[None, 16, 30], name='X')
y = tf.placeholder(tf.float32, shape=[None, nMV], name='Labels')
if Debug:
print('input x shape: ', x.shape)
print('input y shape: ', y.shape)
x_image = tf.reshape(x, [-1, 16, 30, 1])
if Debug:
print('x_image shape: ', x_image.shape)
firstIn = 1
firstOut = 32
with tf.name_scope('First'):
w1 = tf.Variable(tf.truncated_normal([1, 16, firstIn, firstOut], stddev
=0.1), name='W')
b1 = tf.Variable(tf.constant(0.1, shape=[firstOut]), name='B')
s1 = 1
conv1 = tf.nn.conv2d(x_image, w1, strides=[1, s1, s1, 1], padding='SAME')
act1 = tf.nn.relu(conv1 + b1)
tf.summary.histogram('weights', w1)
tf.summary.histogram('biases', b1)
tf.summary.histogram('activation', act1)
if Debug:
print('w1 shape: ', w1.shape)
print('b1 shape: ', b1.shape)
print('conv1 shape: ', conv1.shape)
print('act1 shape: ', act1.shape)
secondIn = firstOut
secondOut = 32
with tf.name_scope('Second'):
w2 = tf.Variable(tf.truncated_normal([3, 3, secondIn, secondOut],
stddev=0.1), name='W')
b2 = tf.Variable(tf.constant(0.1, shape=[secondOut]), name='B')
s2 = 1
conv2 = tf.nn.conv2d(act1, w2, strides=[1, s2, s2, 1], padding='SAME')
act2 = tf.nn.relu(conv2 + b2)
k2 = 3
ms2 = 1
mp2 = tf.nn.max_pool(act2, ksize=[1, k2, k2, 1], strides=[1, ms2, ms2,
1], padding='SAME')
tf.summary.histogram('weights', w2)
tf.summary.histogram('biases', b2)
tf.summary.histogram('activation', act2)
tf.summary.histogram('maxpooling', mp2)
if Debug:
print('w2 shape: ', w2.shape)
print('b2 shape: ', b2.shape)
print('conv2 shape: ', conv2.shape)
print('act2 shape: ', act2.shape)
print('mp2 shape: ', mp2.shape)
thirdIn = secondOut
thirdOut = 64
with tf.name_scope('Third'):
w3 = tf.Variable(tf.truncated_normal([5, 5, thirdIn, thirdOut], stddev=
0.1), name='W')
b3 = tf.Variable(tf.constant(0.1, shape=[thirdOut]), name='B')
s3 = 1
conv3 = tf.nn.conv2d(mp2, w3, strides=[1, s3, s3, 1], padding='SAME')
act3 = tf.nn.relu(conv3 + b3)
k3 = 3
ms3 = 1
mp3 = tf.nn.max_pool(act3, ksize=[1, k3, k3, 1], strides=[1, ms3, ms3,
1], padding='SAME')
tf.summary.histogram('weights', w3)
tf.summary.histogram('biases', b3)
tf.summary.histogram('activation', act3)
tf.summary.histogram('maxpooling', mp3)
if Debug:
print('w3 shape: ', w3.shape)
print('b3 shape: ', b3.shape)
print('conv3 shape: ', conv3.shape)
print('act3 shape: ', act3.shape)
print('mp3 shape: ', mp3.shape)
fourthIn = thirdOut
fourthOut = 64
with tf.name_scope('Fourth'):
w4 = tf.Variable(tf.truncated_normal([6, 1, fourthIn, fourthOut],
stddev=0.1), name='W')
b4 = tf.Variable(tf.constant(0.1, shape=[fourthOut]), name='B')
s4 = 1
conv4 = tf.nn.conv2d(mp3, w4, strides=[1, s4, s4, 1], padding='SAME')
act4 = tf.nn.relu(conv4 + b4)
tf.summary.histogram('weights', w4)
tf.summary.histogram('biases', b4)
tf.summary.histogram('activation', act4)
if Debug:
print('w4 shape: ', w4.shape)
print('b4 shape: ', b4.shape)
print('conv4 shape: ', conv4.shape)
print('act4 shape: ', act4.shape)
fifthIn = fourthOut
fifthOut = 8
with tf.name_scope('Fifth'):
w5 = tf.Variable(tf.truncated_normal([1, 1, fifthIn, fifthOut], stddev=
0.1), name='W')
b5 = tf.Variable(tf.constant(0.1, shape=[fifthOut]), name='B')
s5 = 1
conv5 = tf.nn.conv2d(act4, w5, strides=[1, s5, s5, 1], padding='SAME')
act5 = tf.nn.relu(conv5 + b5)
with tf.name_scope('Flatten'):
flatten5 = tf.reshape(act5, [-1, 16 * 30 * fifthOut])
with tf.name_scope('FullyCon'):
wfc5 = tf.Variable(tf.truncated_normal([16 * 30 * fifthOut, nMV],
stddev=0.1), name='W')
bfc5 = tf.Variable(tf.constant(0.1, shape=[nMV]), name='B')
y_ = tf.nn.relu(tf.matmul(flatten5, wfc5) + bfc5)
tf.summary.histogram('weights', w5)
tf.summary.histogram('biases', b5)
tf.summary.histogram('activation', act5)
tf.summary.histogram('flatten', flatten5)
tf.summary.histogram('weights_fc5', wfc5)
tf.summary.histogram('biases_fc5', bfc5)
if Debug:
print('w5 shape: ', w5.shape)
print('b5 shape: ', b5.shape)
print('conv5 shape: ', conv5.shape)
print('act5 shape: ', act5.shape)
print('flatten5 shape: ', flatten5.shape)
print('weights_fc5 shape: ', wfc5.shape)
print('biases_fc5 shape: ', bfc5.shape)
print('y_predict shape: ', y_.shape)
with tf.name_scope('Softmaxloss'):
cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(
logits=y_, labels=y), name='Loss')
tf.summary.scalar('cross_entropy', cross_entropy)
with tf.name_scope('Accuracy'):
correct_prediction = tf.equal(tf.argmax(y_, 1), tf.argmax(y, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
tf.summary.scalar('accuracy', accuracy)
train = tf.train.AdamOptimizer(0.1).minimize(cross_entropy)
graph_dir = 'sEMGCNN'
import usefulFcns
usefulFcns.BuildNewlyDir(graph_dir)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
merged_summary = tf.summary.merge_all()
writer = tf.summary.FileWriter(graph_dir)
writer.add_graph(sess.graph)
for i in range(2000):
x_batch, y_batch = ninapro.next_batch(30)
if i % 100 == 0:
[train_accuracy] = sess.run([accuracy], feed_dict={x: x_batch,
y: y_batch})
[test_accuracy] = sess.run([accuracy], feed_dict={x: ninapro.
TestImages, y: ninapro.TestLabels})
[validate_accuracy] = sess.run([accuracy], feed_dict={x:
ninapro.ValidateImages, y: ninapro.ValidateLabels})
print('Step %d, training %g, testing %g, validate %g.' % (i,
train_accuracy, test_accuracy, validate_accuracy))
if i % 5 == 0:
s = sess.run(merged_summary, feed_dict={x: x_batch, y: y_batch})
writer.add_summary(s, i)
sess.run(train, feed_dict={x: x_batch, y: y_batch})
<|reserved_special_token_1|>
from classNinapro import Ninapro
import numpy as np
import tensorflow as tf
print(tf.__version__)
Debug = True # for tensor dimensionality checking
ninapro = Ninapro()
ninapro.splitImagesLabels()
# Train
print('ninapro.TrainImages shape: ', ninapro.TrainImages.shape) # m x 16 x 30
print('ninapro.TrainLabels shape: ', ninapro.TrainLabels.shape) # m x 8
# Test
print('ninapro.TestImages shape: ', ninapro.TestImages.shape) # m x 16 x 30
print('ninapro.TestLabels shape: ', ninapro.TestLabels.shape) # m x 8
# Validate
print('ninapro.ValidateImages shape: ', ninapro.ValidateImages.shape) # m x 16 x 30
print('ninapro.ValidateLabels shape: ', ninapro.ValidateLabels.shape) # m x 8
print('Read successfully done...')
# number of total classes of movements, 8 for exampel.
nMV = ninapro.TrainLabels.shape[1]
# - build the Convolutional Neural Network
#-------------------------------------------------add Full+Dropout+Fully
# Setup placeholders for input data
with tf.name_scope('Input'):
x = tf.placeholder(tf.float32, shape=[None, 16,30], name='X')
y = tf.placeholder(tf.float32, shape=[None, nMV], name='Labels')
if Debug:
print('input x shape: ', x.shape)
print('input y shape: ', y.shape)
# every sample with the dimensionality, 16x30
x_image = tf.reshape(x, [-1, 16, 30, 1])
if Debug:
print('x_image shape: ', x_image.shape)
# summary
#tf.summary.image('input', x, 4)
firstIn = 1
firstOut = 32
with tf.name_scope('First'):
# convolution
w1 = tf.Variable(tf.truncated_normal([1,16, firstIn, firstOut], stddev=0.1), name = 'W')
b1 = tf.Variable(tf.constant(0.1, shape=[firstOut]), name = 'B' )
s1 = 1
conv1 = tf.nn.conv2d(x_image, w1, strides=[1, s1, s1, 1], padding='SAME' )
act1 = tf.nn.relu(conv1 + b1)
# summary
tf.summary.histogram('weights', w1)
tf.summary.histogram('biases', b1)
tf.summary.histogram('activation', act1)
# dimensionality checking
if Debug:
print('w1 shape: ', w1.shape)
print('b1 shape: ', b1.shape)
print('conv1 shape: ', conv1.shape)
print('act1 shape: ', act1.shape)
secondIn = firstOut
secondOut = 32
with tf.name_scope('Second'):
# convolution
w2 = tf.Variable(tf.truncated_normal([3,3, secondIn, secondOut], stddev=0.1), name='W')
b2 = tf.Variable(tf.constant(0.1, shape=[secondOut]), name='B')
s2 = 1
conv2 = tf.nn.conv2d(act1, w2, strides=[1, s2, s2, 1], padding='SAME')
# detector
act2 = tf.nn.relu(conv2 + b2)
# maxpooling
k2 = 3
ms2 = 1
mp2 = tf.nn.max_pool(act2, ksize=[1, k2,k2, 1], strides=[1,ms2,ms2,1], padding='SAME')
# summary
tf.summary.histogram('weights', w2)
tf.summary.histogram('biases', b2)
tf.summary.histogram('activation', act2)
tf.summary.histogram('maxpooling', mp2)
# dimensionality checking
if Debug:
print('w2 shape: ', w2.shape)
print('b2 shape: ', b2.shape)
print('conv2 shape: ', conv2.shape)
print('act2 shape: ', act2.shape)
print('mp2 shape: ', mp2.shape)
thirdIn = secondOut
thirdOut = 64
with tf.name_scope('Third'):
# convolution
w3 = tf.Variable(tf.truncated_normal([5,5, thirdIn, thirdOut], stddev=0.1), name='W')
b3 = tf.Variable(tf.constant(0.1, shape=[thirdOut]), name='B')
s3 = 1
conv3 = tf.nn.conv2d(mp2, w3, strides=[1,s3,s3,1], padding='SAME')
# detector
act3 = tf.nn.relu(conv3 + b3)
# maxpooling
k3 = 3 # ksize of maxpooling
ms3 = 1 # maxpooling stride = 3
mp3 = tf.nn.max_pool(act3, ksize=[1,k3,k3,1], strides=[1, ms3, ms3, 1], padding='SAME')
# summary
tf.summary.histogram('weights', w3)
tf.summary.histogram('biases', b3)
tf.summary.histogram('activation', act3)
tf.summary.histogram('maxpooling', mp3)
# dimensionality checking
if Debug:
print('w3 shape: ', w3.shape)
print('b3 shape: ', b3.shape)
print('conv3 shape: ', conv3.shape)
print('act3 shape: ', act3.shape)
print('mp3 shape: ', mp3.shape)
fourthIn = thirdOut
fourthOut = 64
with tf.name_scope('Fourth'):
# convolution
w4 = tf.Variable(tf.truncated_normal([6,1, fourthIn, fourthOut], stddev=0.1), name='W')
b4 = tf.Variable(tf.constant(0.1, shape=[fourthOut]), name='B')
s4 = 1
conv4 = tf.nn.conv2d(mp3, w4, strides=[1,s4,s4,1], padding='SAME')
# detector
act4 = tf.nn.relu(conv4 + b4)
# summary
tf.summary.histogram('weights', w4)
tf.summary.histogram('biases', b4)
tf.summary.histogram('activation', act4)
# dimensionality checking
if Debug:
print('w4 shape: ', w4.shape)
print('b4 shape: ', b4.shape)
print('conv4 shape: ', conv4.shape)
print('act4 shape: ', act4.shape)
fifthIn = fourthOut
fifthOut = 8
with tf.name_scope('Fifth'):
# convolution
w5 = tf.Variable(tf.truncated_normal([1,1, fifthIn, fifthOut], stddev=0.1), name='W')
b5 = tf.Variable(tf.constant(0.1, shape=[fifthOut]), name='B')
s5 = 1
conv5 = tf.nn.conv2d(act4, w5, strides=[1,s5,s5,1], padding='SAME')
# detector
act5 = tf.nn.relu(conv5 + b5)
# flatten
with tf.name_scope('Flatten'):
flatten5 = tf.reshape(act5, [-1, 16*30*fifthOut])
# fully-connect layer
with tf.name_scope('FullyCon'):
wfc5 = tf.Variable(tf.truncated_normal( [16*30*fifthOut, nMV], stddev=0.1), name='W')
bfc5 = tf.Variable(tf.constant(0.1, shape=[nMV]), name='B')
y_ = tf.nn.relu(tf.matmul(flatten5, wfc5) + bfc5)
# summary
tf.summary.histogram('weights', w5)
tf.summary.histogram('biases', b5)
tf.summary.histogram('activation', act5)
tf.summary.histogram('flatten', flatten5)
tf.summary.histogram('weights_fc5', wfc5)
tf.summary.histogram('biases_fc5', bfc5)
# dimensionality checking
if Debug:
print('w5 shape: ', w5.shape)
print('b5 shape: ', b5.shape)
print('conv5 shape: ', conv5.shape)
print('act5 shape: ', act5.shape)
print('flatten5 shape: ', flatten5.shape)
print('weights_fc5 shape: ', wfc5.shape)
print('biases_fc5 shape: ', bfc5.shape)
print('y_predict shape: ', y_.shape)
with tf.name_scope('Softmaxloss'):
cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=y_, labels=y), name='Loss')
# summary
tf.summary.scalar('cross_entropy', cross_entropy)
with tf.name_scope('Accuracy'):
correct_prediction = tf.equal(tf.argmax(y_, 1), tf.argmax(y, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
# summary
tf.summary.scalar('accuracy', accuracy)
# Use an AdamOptimizer to train the network
train = tf.train.AdamOptimizer(1e-1).minimize(cross_entropy)
# Visualization directory
graph_dir = 'sEMGCNN'
import usefulFcns
usefulFcns.BuildNewlyDir(graph_dir)
# Train the model
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
merged_summary = tf.summary.merge_all()
writer = tf.summary.FileWriter(graph_dir)
writer.add_graph(sess.graph)
for i in range(2000):
x_batch, y_batch = ninapro.next_batch(30)
# Occasionaly report accuracy of [train] and [test]
if i%100==0:
[train_accuracy] = sess.run([accuracy], feed_dict={x:x_batch, y:y_batch})
[test_accuracy] = sess.run([accuracy], feed_dict={x:ninapro.TestImages, y:ninapro.TestLabels})
[validate_accuracy] = sess.run([accuracy], feed_dict={x:ninapro.ValidateImages, y:ninapro.ValidateLabels} )
print('Step %d, training %g, testing %g, validate %g.' % (i, train_accuracy, test_accuracy, validate_accuracy) )
# Occasionaly write visualization summary to disk file.
if i%5==0:
s = sess.run(merged_summary, feed_dict={x:x_batch, y:y_batch})
writer.add_summary(s,i)
# Training the model
sess.run(train, feed_dict={x:x_batch, y:y_batch})
|
flexible
|
{
"blob_id": "30aa8405ccf64ce8a05204f3f9fa2ffab436ad3b",
"index": 1578,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(tf.__version__)\n<mask token>\nninapro.splitImagesLabels()\nprint('ninapro.TrainImages shape: ', ninapro.TrainImages.shape)\nprint('ninapro.TrainLabels shape: ', ninapro.TrainLabels.shape)\nprint('ninapro.TestImages shape: ', ninapro.TestImages.shape)\nprint('ninapro.TestLabels shape: ', ninapro.TestLabels.shape)\nprint('ninapro.ValidateImages shape: ', ninapro.ValidateImages.shape)\nprint('ninapro.ValidateLabels shape: ', ninapro.ValidateLabels.shape)\nprint('Read successfully done...')\n<mask token>\nwith tf.name_scope('Input'):\n x = tf.placeholder(tf.float32, shape=[None, 16, 30], name='X')\n y = tf.placeholder(tf.float32, shape=[None, nMV], name='Labels')\n if Debug:\n print('input x shape: ', x.shape)\n print('input y shape: ', y.shape)\n<mask token>\nif Debug:\n print('x_image shape: ', x_image.shape)\n<mask token>\nwith tf.name_scope('First'):\n w1 = tf.Variable(tf.truncated_normal([1, 16, firstIn, firstOut], stddev\n =0.1), name='W')\n b1 = tf.Variable(tf.constant(0.1, shape=[firstOut]), name='B')\n s1 = 1\n conv1 = tf.nn.conv2d(x_image, w1, strides=[1, s1, s1, 1], padding='SAME')\n act1 = tf.nn.relu(conv1 + b1)\n tf.summary.histogram('weights', w1)\n tf.summary.histogram('biases', b1)\n tf.summary.histogram('activation', act1)\n if Debug:\n print('w1 shape: ', w1.shape)\n print('b1 shape: ', b1.shape)\n print('conv1 shape: ', conv1.shape)\n print('act1 shape: ', act1.shape)\n<mask token>\nwith tf.name_scope('Second'):\n w2 = tf.Variable(tf.truncated_normal([3, 3, secondIn, secondOut],\n stddev=0.1), name='W')\n b2 = tf.Variable(tf.constant(0.1, shape=[secondOut]), name='B')\n s2 = 1\n conv2 = tf.nn.conv2d(act1, w2, strides=[1, s2, s2, 1], padding='SAME')\n act2 = tf.nn.relu(conv2 + b2)\n k2 = 3\n ms2 = 1\n mp2 = tf.nn.max_pool(act2, ksize=[1, k2, k2, 1], strides=[1, ms2, ms2, \n 1], padding='SAME')\n tf.summary.histogram('weights', w2)\n tf.summary.histogram('biases', b2)\n tf.summary.histogram('activation', act2)\n tf.summary.histogram('maxpooling', mp2)\n if Debug:\n print('w2 shape: ', w2.shape)\n print('b2 shape: ', b2.shape)\n print('conv2 shape: ', conv2.shape)\n print('act2 shape: ', act2.shape)\n print('mp2 shape: ', mp2.shape)\n<mask token>\nwith tf.name_scope('Third'):\n w3 = tf.Variable(tf.truncated_normal([5, 5, thirdIn, thirdOut], stddev=\n 0.1), name='W')\n b3 = tf.Variable(tf.constant(0.1, shape=[thirdOut]), name='B')\n s3 = 1\n conv3 = tf.nn.conv2d(mp2, w3, strides=[1, s3, s3, 1], padding='SAME')\n act3 = tf.nn.relu(conv3 + b3)\n k3 = 3\n ms3 = 1\n mp3 = tf.nn.max_pool(act3, ksize=[1, k3, k3, 1], strides=[1, ms3, ms3, \n 1], padding='SAME')\n tf.summary.histogram('weights', w3)\n tf.summary.histogram('biases', b3)\n tf.summary.histogram('activation', act3)\n tf.summary.histogram('maxpooling', mp3)\n if Debug:\n print('w3 shape: ', w3.shape)\n print('b3 shape: ', b3.shape)\n print('conv3 shape: ', conv3.shape)\n print('act3 shape: ', act3.shape)\n print('mp3 shape: ', mp3.shape)\n<mask token>\nwith tf.name_scope('Fourth'):\n w4 = tf.Variable(tf.truncated_normal([6, 1, fourthIn, fourthOut],\n stddev=0.1), name='W')\n b4 = tf.Variable(tf.constant(0.1, shape=[fourthOut]), name='B')\n s4 = 1\n conv4 = tf.nn.conv2d(mp3, w4, strides=[1, s4, s4, 1], padding='SAME')\n act4 = tf.nn.relu(conv4 + b4)\n tf.summary.histogram('weights', w4)\n tf.summary.histogram('biases', b4)\n tf.summary.histogram('activation', act4)\n if Debug:\n print('w4 shape: ', w4.shape)\n print('b4 shape: ', b4.shape)\n print('conv4 shape: ', conv4.shape)\n print('act4 shape: ', act4.shape)\n<mask token>\nwith tf.name_scope('Fifth'):\n w5 = tf.Variable(tf.truncated_normal([1, 1, fifthIn, fifthOut], stddev=\n 0.1), name='W')\n b5 = tf.Variable(tf.constant(0.1, shape=[fifthOut]), name='B')\n s5 = 1\n conv5 = tf.nn.conv2d(act4, w5, strides=[1, s5, s5, 1], padding='SAME')\n act5 = tf.nn.relu(conv5 + b5)\n with tf.name_scope('Flatten'):\n flatten5 = tf.reshape(act5, [-1, 16 * 30 * fifthOut])\n with tf.name_scope('FullyCon'):\n wfc5 = tf.Variable(tf.truncated_normal([16 * 30 * fifthOut, nMV],\n stddev=0.1), name='W')\n bfc5 = tf.Variable(tf.constant(0.1, shape=[nMV]), name='B')\n y_ = tf.nn.relu(tf.matmul(flatten5, wfc5) + bfc5)\n tf.summary.histogram('weights', w5)\n tf.summary.histogram('biases', b5)\n tf.summary.histogram('activation', act5)\n tf.summary.histogram('flatten', flatten5)\n tf.summary.histogram('weights_fc5', wfc5)\n tf.summary.histogram('biases_fc5', bfc5)\n if Debug:\n print('w5 shape: ', w5.shape)\n print('b5 shape: ', b5.shape)\n print('conv5 shape: ', conv5.shape)\n print('act5 shape: ', act5.shape)\n print('flatten5 shape: ', flatten5.shape)\n print('weights_fc5 shape: ', wfc5.shape)\n print('biases_fc5 shape: ', bfc5.shape)\n print('y_predict shape: ', y_.shape)\nwith tf.name_scope('Softmaxloss'):\n cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(\n logits=y_, labels=y), name='Loss')\n tf.summary.scalar('cross_entropy', cross_entropy)\nwith tf.name_scope('Accuracy'):\n correct_prediction = tf.equal(tf.argmax(y_, 1), tf.argmax(y, 1))\n accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))\n tf.summary.scalar('accuracy', accuracy)\n<mask token>\nusefulFcns.BuildNewlyDir(graph_dir)\nwith tf.Session() as sess:\n sess.run(tf.global_variables_initializer())\n merged_summary = tf.summary.merge_all()\n writer = tf.summary.FileWriter(graph_dir)\n writer.add_graph(sess.graph)\n for i in range(2000):\n x_batch, y_batch = ninapro.next_batch(30)\n if i % 100 == 0:\n [train_accuracy] = sess.run([accuracy], feed_dict={x: x_batch,\n y: y_batch})\n [test_accuracy] = sess.run([accuracy], feed_dict={x: ninapro.\n TestImages, y: ninapro.TestLabels})\n [validate_accuracy] = sess.run([accuracy], feed_dict={x:\n ninapro.ValidateImages, y: ninapro.ValidateLabels})\n print('Step %d, training %g, testing %g, validate %g.' % (i,\n train_accuracy, test_accuracy, validate_accuracy))\n if i % 5 == 0:\n s = sess.run(merged_summary, feed_dict={x: x_batch, y: y_batch})\n writer.add_summary(s, i)\n sess.run(train, feed_dict={x: x_batch, y: y_batch})\n",
"step-3": "<mask token>\nprint(tf.__version__)\nDebug = True\nninapro = Ninapro()\nninapro.splitImagesLabels()\nprint('ninapro.TrainImages shape: ', ninapro.TrainImages.shape)\nprint('ninapro.TrainLabels shape: ', ninapro.TrainLabels.shape)\nprint('ninapro.TestImages shape: ', ninapro.TestImages.shape)\nprint('ninapro.TestLabels shape: ', ninapro.TestLabels.shape)\nprint('ninapro.ValidateImages shape: ', ninapro.ValidateImages.shape)\nprint('ninapro.ValidateLabels shape: ', ninapro.ValidateLabels.shape)\nprint('Read successfully done...')\nnMV = ninapro.TrainLabels.shape[1]\nwith tf.name_scope('Input'):\n x = tf.placeholder(tf.float32, shape=[None, 16, 30], name='X')\n y = tf.placeholder(tf.float32, shape=[None, nMV], name='Labels')\n if Debug:\n print('input x shape: ', x.shape)\n print('input y shape: ', y.shape)\nx_image = tf.reshape(x, [-1, 16, 30, 1])\nif Debug:\n print('x_image shape: ', x_image.shape)\nfirstIn = 1\nfirstOut = 32\nwith tf.name_scope('First'):\n w1 = tf.Variable(tf.truncated_normal([1, 16, firstIn, firstOut], stddev\n =0.1), name='W')\n b1 = tf.Variable(tf.constant(0.1, shape=[firstOut]), name='B')\n s1 = 1\n conv1 = tf.nn.conv2d(x_image, w1, strides=[1, s1, s1, 1], padding='SAME')\n act1 = tf.nn.relu(conv1 + b1)\n tf.summary.histogram('weights', w1)\n tf.summary.histogram('biases', b1)\n tf.summary.histogram('activation', act1)\n if Debug:\n print('w1 shape: ', w1.shape)\n print('b1 shape: ', b1.shape)\n print('conv1 shape: ', conv1.shape)\n print('act1 shape: ', act1.shape)\nsecondIn = firstOut\nsecondOut = 32\nwith tf.name_scope('Second'):\n w2 = tf.Variable(tf.truncated_normal([3, 3, secondIn, secondOut],\n stddev=0.1), name='W')\n b2 = tf.Variable(tf.constant(0.1, shape=[secondOut]), name='B')\n s2 = 1\n conv2 = tf.nn.conv2d(act1, w2, strides=[1, s2, s2, 1], padding='SAME')\n act2 = tf.nn.relu(conv2 + b2)\n k2 = 3\n ms2 = 1\n mp2 = tf.nn.max_pool(act2, ksize=[1, k2, k2, 1], strides=[1, ms2, ms2, \n 1], padding='SAME')\n tf.summary.histogram('weights', w2)\n tf.summary.histogram('biases', b2)\n tf.summary.histogram('activation', act2)\n tf.summary.histogram('maxpooling', mp2)\n if Debug:\n print('w2 shape: ', w2.shape)\n print('b2 shape: ', b2.shape)\n print('conv2 shape: ', conv2.shape)\n print('act2 shape: ', act2.shape)\n print('mp2 shape: ', mp2.shape)\nthirdIn = secondOut\nthirdOut = 64\nwith tf.name_scope('Third'):\n w3 = tf.Variable(tf.truncated_normal([5, 5, thirdIn, thirdOut], stddev=\n 0.1), name='W')\n b3 = tf.Variable(tf.constant(0.1, shape=[thirdOut]), name='B')\n s3 = 1\n conv3 = tf.nn.conv2d(mp2, w3, strides=[1, s3, s3, 1], padding='SAME')\n act3 = tf.nn.relu(conv3 + b3)\n k3 = 3\n ms3 = 1\n mp3 = tf.nn.max_pool(act3, ksize=[1, k3, k3, 1], strides=[1, ms3, ms3, \n 1], padding='SAME')\n tf.summary.histogram('weights', w3)\n tf.summary.histogram('biases', b3)\n tf.summary.histogram('activation', act3)\n tf.summary.histogram('maxpooling', mp3)\n if Debug:\n print('w3 shape: ', w3.shape)\n print('b3 shape: ', b3.shape)\n print('conv3 shape: ', conv3.shape)\n print('act3 shape: ', act3.shape)\n print('mp3 shape: ', mp3.shape)\nfourthIn = thirdOut\nfourthOut = 64\nwith tf.name_scope('Fourth'):\n w4 = tf.Variable(tf.truncated_normal([6, 1, fourthIn, fourthOut],\n stddev=0.1), name='W')\n b4 = tf.Variable(tf.constant(0.1, shape=[fourthOut]), name='B')\n s4 = 1\n conv4 = tf.nn.conv2d(mp3, w4, strides=[1, s4, s4, 1], padding='SAME')\n act4 = tf.nn.relu(conv4 + b4)\n tf.summary.histogram('weights', w4)\n tf.summary.histogram('biases', b4)\n tf.summary.histogram('activation', act4)\n if Debug:\n print('w4 shape: ', w4.shape)\n print('b4 shape: ', b4.shape)\n print('conv4 shape: ', conv4.shape)\n print('act4 shape: ', act4.shape)\nfifthIn = fourthOut\nfifthOut = 8\nwith tf.name_scope('Fifth'):\n w5 = tf.Variable(tf.truncated_normal([1, 1, fifthIn, fifthOut], stddev=\n 0.1), name='W')\n b5 = tf.Variable(tf.constant(0.1, shape=[fifthOut]), name='B')\n s5 = 1\n conv5 = tf.nn.conv2d(act4, w5, strides=[1, s5, s5, 1], padding='SAME')\n act5 = tf.nn.relu(conv5 + b5)\n with tf.name_scope('Flatten'):\n flatten5 = tf.reshape(act5, [-1, 16 * 30 * fifthOut])\n with tf.name_scope('FullyCon'):\n wfc5 = tf.Variable(tf.truncated_normal([16 * 30 * fifthOut, nMV],\n stddev=0.1), name='W')\n bfc5 = tf.Variable(tf.constant(0.1, shape=[nMV]), name='B')\n y_ = tf.nn.relu(tf.matmul(flatten5, wfc5) + bfc5)\n tf.summary.histogram('weights', w5)\n tf.summary.histogram('biases', b5)\n tf.summary.histogram('activation', act5)\n tf.summary.histogram('flatten', flatten5)\n tf.summary.histogram('weights_fc5', wfc5)\n tf.summary.histogram('biases_fc5', bfc5)\n if Debug:\n print('w5 shape: ', w5.shape)\n print('b5 shape: ', b5.shape)\n print('conv5 shape: ', conv5.shape)\n print('act5 shape: ', act5.shape)\n print('flatten5 shape: ', flatten5.shape)\n print('weights_fc5 shape: ', wfc5.shape)\n print('biases_fc5 shape: ', bfc5.shape)\n print('y_predict shape: ', y_.shape)\nwith tf.name_scope('Softmaxloss'):\n cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(\n logits=y_, labels=y), name='Loss')\n tf.summary.scalar('cross_entropy', cross_entropy)\nwith tf.name_scope('Accuracy'):\n correct_prediction = tf.equal(tf.argmax(y_, 1), tf.argmax(y, 1))\n accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))\n tf.summary.scalar('accuracy', accuracy)\ntrain = tf.train.AdamOptimizer(0.1).minimize(cross_entropy)\ngraph_dir = 'sEMGCNN'\n<mask token>\nusefulFcns.BuildNewlyDir(graph_dir)\nwith tf.Session() as sess:\n sess.run(tf.global_variables_initializer())\n merged_summary = tf.summary.merge_all()\n writer = tf.summary.FileWriter(graph_dir)\n writer.add_graph(sess.graph)\n for i in range(2000):\n x_batch, y_batch = ninapro.next_batch(30)\n if i % 100 == 0:\n [train_accuracy] = sess.run([accuracy], feed_dict={x: x_batch,\n y: y_batch})\n [test_accuracy] = sess.run([accuracy], feed_dict={x: ninapro.\n TestImages, y: ninapro.TestLabels})\n [validate_accuracy] = sess.run([accuracy], feed_dict={x:\n ninapro.ValidateImages, y: ninapro.ValidateLabels})\n print('Step %d, training %g, testing %g, validate %g.' % (i,\n train_accuracy, test_accuracy, validate_accuracy))\n if i % 5 == 0:\n s = sess.run(merged_summary, feed_dict={x: x_batch, y: y_batch})\n writer.add_summary(s, i)\n sess.run(train, feed_dict={x: x_batch, y: y_batch})\n",
"step-4": "from classNinapro import Ninapro\nimport numpy as np\nimport tensorflow as tf\nprint(tf.__version__)\nDebug = True\nninapro = Ninapro()\nninapro.splitImagesLabels()\nprint('ninapro.TrainImages shape: ', ninapro.TrainImages.shape)\nprint('ninapro.TrainLabels shape: ', ninapro.TrainLabels.shape)\nprint('ninapro.TestImages shape: ', ninapro.TestImages.shape)\nprint('ninapro.TestLabels shape: ', ninapro.TestLabels.shape)\nprint('ninapro.ValidateImages shape: ', ninapro.ValidateImages.shape)\nprint('ninapro.ValidateLabels shape: ', ninapro.ValidateLabels.shape)\nprint('Read successfully done...')\nnMV = ninapro.TrainLabels.shape[1]\nwith tf.name_scope('Input'):\n x = tf.placeholder(tf.float32, shape=[None, 16, 30], name='X')\n y = tf.placeholder(tf.float32, shape=[None, nMV], name='Labels')\n if Debug:\n print('input x shape: ', x.shape)\n print('input y shape: ', y.shape)\nx_image = tf.reshape(x, [-1, 16, 30, 1])\nif Debug:\n print('x_image shape: ', x_image.shape)\nfirstIn = 1\nfirstOut = 32\nwith tf.name_scope('First'):\n w1 = tf.Variable(tf.truncated_normal([1, 16, firstIn, firstOut], stddev\n =0.1), name='W')\n b1 = tf.Variable(tf.constant(0.1, shape=[firstOut]), name='B')\n s1 = 1\n conv1 = tf.nn.conv2d(x_image, w1, strides=[1, s1, s1, 1], padding='SAME')\n act1 = tf.nn.relu(conv1 + b1)\n tf.summary.histogram('weights', w1)\n tf.summary.histogram('biases', b1)\n tf.summary.histogram('activation', act1)\n if Debug:\n print('w1 shape: ', w1.shape)\n print('b1 shape: ', b1.shape)\n print('conv1 shape: ', conv1.shape)\n print('act1 shape: ', act1.shape)\nsecondIn = firstOut\nsecondOut = 32\nwith tf.name_scope('Second'):\n w2 = tf.Variable(tf.truncated_normal([3, 3, secondIn, secondOut],\n stddev=0.1), name='W')\n b2 = tf.Variable(tf.constant(0.1, shape=[secondOut]), name='B')\n s2 = 1\n conv2 = tf.nn.conv2d(act1, w2, strides=[1, s2, s2, 1], padding='SAME')\n act2 = tf.nn.relu(conv2 + b2)\n k2 = 3\n ms2 = 1\n mp2 = tf.nn.max_pool(act2, ksize=[1, k2, k2, 1], strides=[1, ms2, ms2, \n 1], padding='SAME')\n tf.summary.histogram('weights', w2)\n tf.summary.histogram('biases', b2)\n tf.summary.histogram('activation', act2)\n tf.summary.histogram('maxpooling', mp2)\n if Debug:\n print('w2 shape: ', w2.shape)\n print('b2 shape: ', b2.shape)\n print('conv2 shape: ', conv2.shape)\n print('act2 shape: ', act2.shape)\n print('mp2 shape: ', mp2.shape)\nthirdIn = secondOut\nthirdOut = 64\nwith tf.name_scope('Third'):\n w3 = tf.Variable(tf.truncated_normal([5, 5, thirdIn, thirdOut], stddev=\n 0.1), name='W')\n b3 = tf.Variable(tf.constant(0.1, shape=[thirdOut]), name='B')\n s3 = 1\n conv3 = tf.nn.conv2d(mp2, w3, strides=[1, s3, s3, 1], padding='SAME')\n act3 = tf.nn.relu(conv3 + b3)\n k3 = 3\n ms3 = 1\n mp3 = tf.nn.max_pool(act3, ksize=[1, k3, k3, 1], strides=[1, ms3, ms3, \n 1], padding='SAME')\n tf.summary.histogram('weights', w3)\n tf.summary.histogram('biases', b3)\n tf.summary.histogram('activation', act3)\n tf.summary.histogram('maxpooling', mp3)\n if Debug:\n print('w3 shape: ', w3.shape)\n print('b3 shape: ', b3.shape)\n print('conv3 shape: ', conv3.shape)\n print('act3 shape: ', act3.shape)\n print('mp3 shape: ', mp3.shape)\nfourthIn = thirdOut\nfourthOut = 64\nwith tf.name_scope('Fourth'):\n w4 = tf.Variable(tf.truncated_normal([6, 1, fourthIn, fourthOut],\n stddev=0.1), name='W')\n b4 = tf.Variable(tf.constant(0.1, shape=[fourthOut]), name='B')\n s4 = 1\n conv4 = tf.nn.conv2d(mp3, w4, strides=[1, s4, s4, 1], padding='SAME')\n act4 = tf.nn.relu(conv4 + b4)\n tf.summary.histogram('weights', w4)\n tf.summary.histogram('biases', b4)\n tf.summary.histogram('activation', act4)\n if Debug:\n print('w4 shape: ', w4.shape)\n print('b4 shape: ', b4.shape)\n print('conv4 shape: ', conv4.shape)\n print('act4 shape: ', act4.shape)\nfifthIn = fourthOut\nfifthOut = 8\nwith tf.name_scope('Fifth'):\n w5 = tf.Variable(tf.truncated_normal([1, 1, fifthIn, fifthOut], stddev=\n 0.1), name='W')\n b5 = tf.Variable(tf.constant(0.1, shape=[fifthOut]), name='B')\n s5 = 1\n conv5 = tf.nn.conv2d(act4, w5, strides=[1, s5, s5, 1], padding='SAME')\n act5 = tf.nn.relu(conv5 + b5)\n with tf.name_scope('Flatten'):\n flatten5 = tf.reshape(act5, [-1, 16 * 30 * fifthOut])\n with tf.name_scope('FullyCon'):\n wfc5 = tf.Variable(tf.truncated_normal([16 * 30 * fifthOut, nMV],\n stddev=0.1), name='W')\n bfc5 = tf.Variable(tf.constant(0.1, shape=[nMV]), name='B')\n y_ = tf.nn.relu(tf.matmul(flatten5, wfc5) + bfc5)\n tf.summary.histogram('weights', w5)\n tf.summary.histogram('biases', b5)\n tf.summary.histogram('activation', act5)\n tf.summary.histogram('flatten', flatten5)\n tf.summary.histogram('weights_fc5', wfc5)\n tf.summary.histogram('biases_fc5', bfc5)\n if Debug:\n print('w5 shape: ', w5.shape)\n print('b5 shape: ', b5.shape)\n print('conv5 shape: ', conv5.shape)\n print('act5 shape: ', act5.shape)\n print('flatten5 shape: ', flatten5.shape)\n print('weights_fc5 shape: ', wfc5.shape)\n print('biases_fc5 shape: ', bfc5.shape)\n print('y_predict shape: ', y_.shape)\nwith tf.name_scope('Softmaxloss'):\n cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(\n logits=y_, labels=y), name='Loss')\n tf.summary.scalar('cross_entropy', cross_entropy)\nwith tf.name_scope('Accuracy'):\n correct_prediction = tf.equal(tf.argmax(y_, 1), tf.argmax(y, 1))\n accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))\n tf.summary.scalar('accuracy', accuracy)\ntrain = tf.train.AdamOptimizer(0.1).minimize(cross_entropy)\ngraph_dir = 'sEMGCNN'\nimport usefulFcns\nusefulFcns.BuildNewlyDir(graph_dir)\nwith tf.Session() as sess:\n sess.run(tf.global_variables_initializer())\n merged_summary = tf.summary.merge_all()\n writer = tf.summary.FileWriter(graph_dir)\n writer.add_graph(sess.graph)\n for i in range(2000):\n x_batch, y_batch = ninapro.next_batch(30)\n if i % 100 == 0:\n [train_accuracy] = sess.run([accuracy], feed_dict={x: x_batch,\n y: y_batch})\n [test_accuracy] = sess.run([accuracy], feed_dict={x: ninapro.\n TestImages, y: ninapro.TestLabels})\n [validate_accuracy] = sess.run([accuracy], feed_dict={x:\n ninapro.ValidateImages, y: ninapro.ValidateLabels})\n print('Step %d, training %g, testing %g, validate %g.' % (i,\n train_accuracy, test_accuracy, validate_accuracy))\n if i % 5 == 0:\n s = sess.run(merged_summary, feed_dict={x: x_batch, y: y_batch})\n writer.add_summary(s, i)\n sess.run(train, feed_dict={x: x_batch, y: y_batch})\n",
"step-5": "from classNinapro import Ninapro\nimport numpy as np\n\nimport tensorflow as tf\nprint(tf.__version__)\n\nDebug = True # for tensor dimensionality checking\nninapro = Ninapro()\nninapro.splitImagesLabels()\n\n# Train\nprint('ninapro.TrainImages shape: ', ninapro.TrainImages.shape) # m x 16 x 30\nprint('ninapro.TrainLabels shape: ', ninapro.TrainLabels.shape) # m x 8\n# Test\nprint('ninapro.TestImages shape: ', ninapro.TestImages.shape) # m x 16 x 30\nprint('ninapro.TestLabels shape: ', ninapro.TestLabels.shape) # m x 8\n# Validate\nprint('ninapro.ValidateImages shape: ', ninapro.ValidateImages.shape) # m x 16 x 30\nprint('ninapro.ValidateLabels shape: ', ninapro.ValidateLabels.shape) # m x 8\n\nprint('Read successfully done...')\n\n# number of total classes of movements, 8 for exampel.\nnMV = ninapro.TrainLabels.shape[1]\n\n# - build the Convolutional Neural Network\n#-------------------------------------------------add Full+Dropout+Fully\n\n# Setup placeholders for input data\n\nwith tf.name_scope('Input'):\n x = tf.placeholder(tf.float32, shape=[None, 16,30], name='X')\n y = tf.placeholder(tf.float32, shape=[None, nMV], name='Labels')\n\n if Debug:\n print('input x shape: ', x.shape)\n print('input y shape: ', y.shape)\n\n# every sample with the dimensionality, 16x30\nx_image = tf.reshape(x, [-1, 16, 30, 1])\nif Debug:\n print('x_image shape: ', x_image.shape)\n\n# summary \n#tf.summary.image('input', x, 4)\n\n\nfirstIn = 1\nfirstOut = 32\nwith tf.name_scope('First'):\n # convolution\n w1 = tf.Variable(tf.truncated_normal([1,16, firstIn, firstOut], stddev=0.1), name = 'W')\n b1 = tf.Variable(tf.constant(0.1, shape=[firstOut]), name = 'B' )\n s1 = 1\n conv1 = tf.nn.conv2d(x_image, w1, strides=[1, s1, s1, 1], padding='SAME' )\n act1 = tf.nn.relu(conv1 + b1)\n # summary\n tf.summary.histogram('weights', w1)\n tf.summary.histogram('biases', b1)\n tf.summary.histogram('activation', act1) \n\n # dimensionality checking\n if Debug:\n print('w1 shape: ', w1.shape)\n print('b1 shape: ', b1.shape)\n print('conv1 shape: ', conv1.shape)\n print('act1 shape: ', act1.shape)\n\n\nsecondIn = firstOut\nsecondOut = 32\nwith tf.name_scope('Second'):\n # convolution\n w2 = tf.Variable(tf.truncated_normal([3,3, secondIn, secondOut], stddev=0.1), name='W')\n b2 = tf.Variable(tf.constant(0.1, shape=[secondOut]), name='B')\n s2 = 1\n conv2 = tf.nn.conv2d(act1, w2, strides=[1, s2, s2, 1], padding='SAME')\n # detector\n act2 = tf.nn.relu(conv2 + b2)\n # maxpooling\n k2 = 3\n ms2 = 1\n mp2 = tf.nn.max_pool(act2, ksize=[1, k2,k2, 1], strides=[1,ms2,ms2,1], padding='SAME')\n # summary\n tf.summary.histogram('weights', w2)\n tf.summary.histogram('biases', b2)\n tf.summary.histogram('activation', act2)\n tf.summary.histogram('maxpooling', mp2)\n\n # dimensionality checking\n if Debug:\n print('w2 shape: ', w2.shape)\n print('b2 shape: ', b2.shape)\n print('conv2 shape: ', conv2.shape)\n print('act2 shape: ', act2.shape)\n print('mp2 shape: ', mp2.shape)\n\nthirdIn = secondOut\nthirdOut = 64\nwith tf.name_scope('Third'):\n # convolution\n w3 = tf.Variable(tf.truncated_normal([5,5, thirdIn, thirdOut], stddev=0.1), name='W')\n b3 = tf.Variable(tf.constant(0.1, shape=[thirdOut]), name='B')\n s3 = 1\n conv3 = tf.nn.conv2d(mp2, w3, strides=[1,s3,s3,1], padding='SAME')\n # detector\n act3 = tf.nn.relu(conv3 + b3)\n # maxpooling\n k3 = 3 # ksize of maxpooling\n ms3 = 1 # maxpooling stride = 3\n mp3 = tf.nn.max_pool(act3, ksize=[1,k3,k3,1], strides=[1, ms3, ms3, 1], padding='SAME')\n\n # summary\n tf.summary.histogram('weights', w3)\n tf.summary.histogram('biases', b3)\n tf.summary.histogram('activation', act3)\n tf.summary.histogram('maxpooling', mp3)\n\n # dimensionality checking\n if Debug:\n print('w3 shape: ', w3.shape)\n print('b3 shape: ', b3.shape)\n print('conv3 shape: ', conv3.shape)\n print('act3 shape: ', act3.shape)\n print('mp3 shape: ', mp3.shape)\n\n\nfourthIn = thirdOut\nfourthOut = 64\nwith tf.name_scope('Fourth'):\n # convolution\n w4 = tf.Variable(tf.truncated_normal([6,1, fourthIn, fourthOut], stddev=0.1), name='W')\n b4 = tf.Variable(tf.constant(0.1, shape=[fourthOut]), name='B')\n s4 = 1\n conv4 = tf.nn.conv2d(mp3, w4, strides=[1,s4,s4,1], padding='SAME')\n # detector\n act4 = tf.nn.relu(conv4 + b4)\n \n # summary\n tf.summary.histogram('weights', w4)\n tf.summary.histogram('biases', b4)\n tf.summary.histogram('activation', act4)\n\n # dimensionality checking\n if Debug:\n print('w4 shape: ', w4.shape)\n print('b4 shape: ', b4.shape)\n print('conv4 shape: ', conv4.shape)\n print('act4 shape: ', act4.shape)\n\nfifthIn = fourthOut\nfifthOut = 8\nwith tf.name_scope('Fifth'):\n # convolution\n w5 = tf.Variable(tf.truncated_normal([1,1, fifthIn, fifthOut], stddev=0.1), name='W')\n b5 = tf.Variable(tf.constant(0.1, shape=[fifthOut]), name='B')\n s5 = 1\n conv5 = tf.nn.conv2d(act4, w5, strides=[1,s5,s5,1], padding='SAME')\n # detector\n act5 = tf.nn.relu(conv5 + b5)\n\n # flatten\n with tf.name_scope('Flatten'):\n flatten5 = tf.reshape(act5, [-1, 16*30*fifthOut])\n # fully-connect layer\n with tf.name_scope('FullyCon'):\n wfc5 = tf.Variable(tf.truncated_normal( [16*30*fifthOut, nMV], stddev=0.1), name='W')\n bfc5 = tf.Variable(tf.constant(0.1, shape=[nMV]), name='B')\n y_ = tf.nn.relu(tf.matmul(flatten5, wfc5) + bfc5)\n\n # summary\n tf.summary.histogram('weights', w5)\n tf.summary.histogram('biases', b5)\n tf.summary.histogram('activation', act5)\n tf.summary.histogram('flatten', flatten5)\n tf.summary.histogram('weights_fc5', wfc5)\n tf.summary.histogram('biases_fc5', bfc5)\n\n\n # dimensionality checking\n if Debug:\n print('w5 shape: ', w5.shape)\n print('b5 shape: ', b5.shape)\n print('conv5 shape: ', conv5.shape)\n print('act5 shape: ', act5.shape)\n print('flatten5 shape: ', flatten5.shape)\n print('weights_fc5 shape: ', wfc5.shape)\n print('biases_fc5 shape: ', bfc5.shape)\n print('y_predict shape: ', y_.shape)\n\n\nwith tf.name_scope('Softmaxloss'):\n cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=y_, labels=y), name='Loss')\n # summary\n tf.summary.scalar('cross_entropy', cross_entropy)\n \n\nwith tf.name_scope('Accuracy'):\n correct_prediction = tf.equal(tf.argmax(y_, 1), tf.argmax(y, 1))\n accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))\n # summary\n tf.summary.scalar('accuracy', accuracy)\n\n# Use an AdamOptimizer to train the network\ntrain = tf.train.AdamOptimizer(1e-1).minimize(cross_entropy)\n\n# Visualization directory\ngraph_dir = 'sEMGCNN'\nimport usefulFcns\nusefulFcns.BuildNewlyDir(graph_dir)\n\n# Train the model\n\nwith tf.Session() as sess:\n sess.run(tf.global_variables_initializer())\n merged_summary = tf.summary.merge_all()\n writer = tf.summary.FileWriter(graph_dir)\n writer.add_graph(sess.graph)\n\n for i in range(2000):\n x_batch, y_batch = ninapro.next_batch(30)\n\n # Occasionaly report accuracy of [train] and [test]\n if i%100==0:\n [train_accuracy] = sess.run([accuracy], feed_dict={x:x_batch, y:y_batch})\n [test_accuracy] = sess.run([accuracy], feed_dict={x:ninapro.TestImages, y:ninapro.TestLabels})\n [validate_accuracy] = sess.run([accuracy], feed_dict={x:ninapro.ValidateImages, y:ninapro.ValidateLabels} )\n print('Step %d, training %g, testing %g, validate %g.' % (i, train_accuracy, test_accuracy, validate_accuracy) )\n \n # Occasionaly write visualization summary to disk file.\n if i%5==0:\n s = sess.run(merged_summary, feed_dict={x:x_batch, y:y_batch})\n writer.add_summary(s,i)\n # Training the model\n sess.run(train, feed_dict={x:x_batch, y:y_batch})\n \n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def count(a, b):
a = int(a)
b = int(b)
if a == 0 and b == 0:
return 0
elif a == 0 and b == 1:
return 1
elif a == 1 and b == 0:
return 2
elif a == 1 and b == 1:
return 3
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print(state_space)
print('\n', state_space.sum())
<|reserved_special_token_0|>
def count(a, b):
a = int(a)
b = int(b)
if a == 0 and b == 0:
return 0
elif a == 0 and b == 1:
return 1
elif a == 1 and b == 0:
return 2
elif a == 1 and b == 1:
return 3
while True:
line = file_open.readline()
if not line:
break
result_x = []
result_y = []
add = []
if stack == 0:
a = line.split(',')[0]
a = a.strip()
add.append(a)
a = line.split(',')[1]
a = a.strip()
add.append(a)
a = line.split(',')[2]
a = a.strip()
add.append(a)
write.writerow(add)
stack = 1
elif stack == 1:
before_application = line.split(',')[0]
x_a = line.split(',')[1]
x_a = x_a.strip()
y_a = line.split(',')[2]
y_a = y_a.strip()
stack = 2
elif stack == 2:
if before_application == line.split(',')[0]:
x_b = line.split(',')[1]
x_b = x_b.strip()
y_b = line.split(',')[2]
y_b = y_b.strip()
result_x.append(x_a)
result_x.append(x_b)
result_y.append(y_a)
result_y.append(y_b)
tol = count(result_x[0], result_x[1])
add.append(tol)
tol = count(result_y[0], result_y[1])
add.append(tol)
write.writerow(add)
stack = 3
else:
pass
before_application = line.split(',')[0]
elif stack == 3:
if before_application == line.split(',')[0]:
x_a = line.split(',')[1]
x_a = x_a.strip()
y_a = line.split(',')[2]
y_a = y_a.strip()
result_x.append(x_b)
result_x.append(x_a)
result_y.append(y_b)
result_y.append(y_a)
tol = count(result_x[0], result_x[1])
add.append(tol)
tol = count(result_y[0], result_y[1])
add.append(tol)
write.writerow(add)
stack = 2
else:
pass
before_application = line.split(',')[0]
<|reserved_special_token_1|>
<|reserved_special_token_0|>
file_open = open('C:/Users/DI_Lab/Desktop/20년도 Kisti 과제/HMM/HMM(Up,Down).csv',
'r', encoding='UTF8')
save_file = open('C:/Users/DI_Lab/Desktop/20년도 Kisti 과제/HMM/HMM사후확률.csv',
'w', encoding='UTF8', newline='')
write = csv.writer(save_file)
hidden_states = ['up', 'down']
pi = [0.5044, 0.4956]
state_space = pd.Series(pi, index=hidden_states, name='states')
print(state_space)
print('\n', state_space.sum())
stack = 0
x_a = ''
x_b = ''
y_a = ''
y_b = ''
before_application = ''
add = []
def count(a, b):
a = int(a)
b = int(b)
if a == 0 and b == 0:
return 0
elif a == 0 and b == 1:
return 1
elif a == 1 and b == 0:
return 2
elif a == 1 and b == 1:
return 3
while True:
line = file_open.readline()
if not line:
break
result_x = []
result_y = []
add = []
if stack == 0:
a = line.split(',')[0]
a = a.strip()
add.append(a)
a = line.split(',')[1]
a = a.strip()
add.append(a)
a = line.split(',')[2]
a = a.strip()
add.append(a)
write.writerow(add)
stack = 1
elif stack == 1:
before_application = line.split(',')[0]
x_a = line.split(',')[1]
x_a = x_a.strip()
y_a = line.split(',')[2]
y_a = y_a.strip()
stack = 2
elif stack == 2:
if before_application == line.split(',')[0]:
x_b = line.split(',')[1]
x_b = x_b.strip()
y_b = line.split(',')[2]
y_b = y_b.strip()
result_x.append(x_a)
result_x.append(x_b)
result_y.append(y_a)
result_y.append(y_b)
tol = count(result_x[0], result_x[1])
add.append(tol)
tol = count(result_y[0], result_y[1])
add.append(tol)
write.writerow(add)
stack = 3
else:
pass
before_application = line.split(',')[0]
elif stack == 3:
if before_application == line.split(',')[0]:
x_a = line.split(',')[1]
x_a = x_a.strip()
y_a = line.split(',')[2]
y_a = y_a.strip()
result_x.append(x_b)
result_x.append(x_a)
result_y.append(y_b)
result_y.append(y_a)
tol = count(result_x[0], result_x[1])
add.append(tol)
tol = count(result_y[0], result_y[1])
add.append(tol)
write.writerow(add)
stack = 2
else:
pass
before_application = line.split(',')[0]
<|reserved_special_token_1|>
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import csv
file_open = open('C:/Users/DI_Lab/Desktop/20년도 Kisti 과제/HMM/HMM(Up,Down).csv',
'r', encoding='UTF8')
save_file = open('C:/Users/DI_Lab/Desktop/20년도 Kisti 과제/HMM/HMM사후확률.csv',
'w', encoding='UTF8', newline='')
write = csv.writer(save_file)
hidden_states = ['up', 'down']
pi = [0.5044, 0.4956]
state_space = pd.Series(pi, index=hidden_states, name='states')
print(state_space)
print('\n', state_space.sum())
stack = 0
x_a = ''
x_b = ''
y_a = ''
y_b = ''
before_application = ''
add = []
def count(a, b):
a = int(a)
b = int(b)
if a == 0 and b == 0:
return 0
elif a == 0 and b == 1:
return 1
elif a == 1 and b == 0:
return 2
elif a == 1 and b == 1:
return 3
while True:
line = file_open.readline()
if not line:
break
result_x = []
result_y = []
add = []
if stack == 0:
a = line.split(',')[0]
a = a.strip()
add.append(a)
a = line.split(',')[1]
a = a.strip()
add.append(a)
a = line.split(',')[2]
a = a.strip()
add.append(a)
write.writerow(add)
stack = 1
elif stack == 1:
before_application = line.split(',')[0]
x_a = line.split(',')[1]
x_a = x_a.strip()
y_a = line.split(',')[2]
y_a = y_a.strip()
stack = 2
elif stack == 2:
if before_application == line.split(',')[0]:
x_b = line.split(',')[1]
x_b = x_b.strip()
y_b = line.split(',')[2]
y_b = y_b.strip()
result_x.append(x_a)
result_x.append(x_b)
result_y.append(y_a)
result_y.append(y_b)
tol = count(result_x[0], result_x[1])
add.append(tol)
tol = count(result_y[0], result_y[1])
add.append(tol)
write.writerow(add)
stack = 3
else:
pass
before_application = line.split(',')[0]
elif stack == 3:
if before_application == line.split(',')[0]:
x_a = line.split(',')[1]
x_a = x_a.strip()
y_a = line.split(',')[2]
y_a = y_a.strip()
result_x.append(x_b)
result_x.append(x_a)
result_y.append(y_b)
result_y.append(y_a)
tol = count(result_x[0], result_x[1])
add.append(tol)
tol = count(result_y[0], result_y[1])
add.append(tol)
write.writerow(add)
stack = 2
else:
pass
before_application = line.split(',')[0]
<|reserved_special_token_1|>
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import csv
file_open = open("C:/Users/DI_Lab/Desktop/20년도 Kisti 과제/HMM/HMM(Up,Down).csv", 'r', encoding='UTF8')
save_file = open("C:/Users/DI_Lab/Desktop/20년도 Kisti 과제/HMM/HMM사후확률.csv", 'w', encoding='UTF8',newline='')
write = csv.writer(save_file)
hidden_states = ['up', 'down']
pi = [0.5044, 0.4956]
state_space = pd.Series(pi, index=hidden_states, name='states')
print(state_space)
print('\n', state_space.sum())
stack = 0
x_a = ""
x_b = ""
y_a = ""
y_b = ""
before_application = ""
add = []
def count(a,b):
a = int(a)
b = int(b)
if a == 0 and b == 0:
return 0
elif a == 0 and b == 1:
return 1
elif a == 1 and b == 0:
return 2
elif a == 1 and b == 1:
return 3
while True:
line = file_open.readline()
if not line: break
result_x = []
result_y = []
add = []
if stack == 0:
a = line.split(',')[0]
a = a.strip()
add.append(a)
a = line.split(',')[1]
a = a.strip()
add.append(a)
a = line.split(',')[2]
a = a.strip()
add.append(a)
write.writerow(add)
stack = 1
elif stack == 1:
before_application = line.split(',')[0]
x_a = line.split(',')[1]
x_a = x_a.strip()
y_a = line.split(',')[2]
y_a = y_a.strip()
stack = 2
elif stack == 2:
if before_application == line.split(',')[0]:
x_b = line.split(',')[1]
x_b = x_b.strip()
y_b = line.split(',')[2]
y_b = y_b.strip()
result_x.append(x_a)
result_x.append(x_b)
result_y.append(y_a)
result_y.append(y_b)
tol = count(result_x[0],result_x[1])
add.append(tol)
tol = count(result_y[0], result_y[1])
add.append(tol)
write.writerow(add)
stack = 3
else:
pass
before_application = line.split(',')[0]
elif stack == 3:
if before_application == line.split(',')[0]:
x_a = line.split(',')[1]
x_a = x_a.strip()
y_a = line.split(',')[2]
y_a = y_a.strip()
result_x.append(x_b)
result_x.append(x_a)
result_y.append(y_b)
result_y.append(y_a)
tol = count(result_x[0],result_x[1])
add.append(tol)
tol = count(result_y[0], result_y[1])
add.append(tol)
write.writerow(add)
stack = 2
else:
pass
before_application = line.split(',')[0]
|
flexible
|
{
"blob_id": "55977a673bb36900e1d797cb9ec330ce6d9aa717",
"index": 8232,
"step-1": "<mask token>\n\n\ndef count(a, b):\n a = int(a)\n b = int(b)\n if a == 0 and b == 0:\n return 0\n elif a == 0 and b == 1:\n return 1\n elif a == 1 and b == 0:\n return 2\n elif a == 1 and b == 1:\n return 3\n\n\n<mask token>\n",
"step-2": "<mask token>\nprint(state_space)\nprint('\\n', state_space.sum())\n<mask token>\n\n\ndef count(a, b):\n a = int(a)\n b = int(b)\n if a == 0 and b == 0:\n return 0\n elif a == 0 and b == 1:\n return 1\n elif a == 1 and b == 0:\n return 2\n elif a == 1 and b == 1:\n return 3\n\n\nwhile True:\n line = file_open.readline()\n if not line:\n break\n result_x = []\n result_y = []\n add = []\n if stack == 0:\n a = line.split(',')[0]\n a = a.strip()\n add.append(a)\n a = line.split(',')[1]\n a = a.strip()\n add.append(a)\n a = line.split(',')[2]\n a = a.strip()\n add.append(a)\n write.writerow(add)\n stack = 1\n elif stack == 1:\n before_application = line.split(',')[0]\n x_a = line.split(',')[1]\n x_a = x_a.strip()\n y_a = line.split(',')[2]\n y_a = y_a.strip()\n stack = 2\n elif stack == 2:\n if before_application == line.split(',')[0]:\n x_b = line.split(',')[1]\n x_b = x_b.strip()\n y_b = line.split(',')[2]\n y_b = y_b.strip()\n result_x.append(x_a)\n result_x.append(x_b)\n result_y.append(y_a)\n result_y.append(y_b)\n tol = count(result_x[0], result_x[1])\n add.append(tol)\n tol = count(result_y[0], result_y[1])\n add.append(tol)\n write.writerow(add)\n stack = 3\n else:\n pass\n before_application = line.split(',')[0]\n elif stack == 3:\n if before_application == line.split(',')[0]:\n x_a = line.split(',')[1]\n x_a = x_a.strip()\n y_a = line.split(',')[2]\n y_a = y_a.strip()\n result_x.append(x_b)\n result_x.append(x_a)\n result_y.append(y_b)\n result_y.append(y_a)\n tol = count(result_x[0], result_x[1])\n add.append(tol)\n tol = count(result_y[0], result_y[1])\n add.append(tol)\n write.writerow(add)\n stack = 2\n else:\n pass\n before_application = line.split(',')[0]\n",
"step-3": "<mask token>\nfile_open = open('C:/Users/DI_Lab/Desktop/20년도 Kisti 과제/HMM/HMM(Up,Down).csv',\n 'r', encoding='UTF8')\nsave_file = open('C:/Users/DI_Lab/Desktop/20년도 Kisti 과제/HMM/HMM사후확률.csv',\n 'w', encoding='UTF8', newline='')\nwrite = csv.writer(save_file)\nhidden_states = ['up', 'down']\npi = [0.5044, 0.4956]\nstate_space = pd.Series(pi, index=hidden_states, name='states')\nprint(state_space)\nprint('\\n', state_space.sum())\nstack = 0\nx_a = ''\nx_b = ''\ny_a = ''\ny_b = ''\nbefore_application = ''\nadd = []\n\n\ndef count(a, b):\n a = int(a)\n b = int(b)\n if a == 0 and b == 0:\n return 0\n elif a == 0 and b == 1:\n return 1\n elif a == 1 and b == 0:\n return 2\n elif a == 1 and b == 1:\n return 3\n\n\nwhile True:\n line = file_open.readline()\n if not line:\n break\n result_x = []\n result_y = []\n add = []\n if stack == 0:\n a = line.split(',')[0]\n a = a.strip()\n add.append(a)\n a = line.split(',')[1]\n a = a.strip()\n add.append(a)\n a = line.split(',')[2]\n a = a.strip()\n add.append(a)\n write.writerow(add)\n stack = 1\n elif stack == 1:\n before_application = line.split(',')[0]\n x_a = line.split(',')[1]\n x_a = x_a.strip()\n y_a = line.split(',')[2]\n y_a = y_a.strip()\n stack = 2\n elif stack == 2:\n if before_application == line.split(',')[0]:\n x_b = line.split(',')[1]\n x_b = x_b.strip()\n y_b = line.split(',')[2]\n y_b = y_b.strip()\n result_x.append(x_a)\n result_x.append(x_b)\n result_y.append(y_a)\n result_y.append(y_b)\n tol = count(result_x[0], result_x[1])\n add.append(tol)\n tol = count(result_y[0], result_y[1])\n add.append(tol)\n write.writerow(add)\n stack = 3\n else:\n pass\n before_application = line.split(',')[0]\n elif stack == 3:\n if before_application == line.split(',')[0]:\n x_a = line.split(',')[1]\n x_a = x_a.strip()\n y_a = line.split(',')[2]\n y_a = y_a.strip()\n result_x.append(x_b)\n result_x.append(x_a)\n result_y.append(y_b)\n result_y.append(y_a)\n tol = count(result_x[0], result_x[1])\n add.append(tol)\n tol = count(result_y[0], result_y[1])\n add.append(tol)\n write.writerow(add)\n stack = 2\n else:\n pass\n before_application = line.split(',')[0]\n",
"step-4": "import numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\nimport csv\nfile_open = open('C:/Users/DI_Lab/Desktop/20년도 Kisti 과제/HMM/HMM(Up,Down).csv',\n 'r', encoding='UTF8')\nsave_file = open('C:/Users/DI_Lab/Desktop/20년도 Kisti 과제/HMM/HMM사후확률.csv',\n 'w', encoding='UTF8', newline='')\nwrite = csv.writer(save_file)\nhidden_states = ['up', 'down']\npi = [0.5044, 0.4956]\nstate_space = pd.Series(pi, index=hidden_states, name='states')\nprint(state_space)\nprint('\\n', state_space.sum())\nstack = 0\nx_a = ''\nx_b = ''\ny_a = ''\ny_b = ''\nbefore_application = ''\nadd = []\n\n\ndef count(a, b):\n a = int(a)\n b = int(b)\n if a == 0 and b == 0:\n return 0\n elif a == 0 and b == 1:\n return 1\n elif a == 1 and b == 0:\n return 2\n elif a == 1 and b == 1:\n return 3\n\n\nwhile True:\n line = file_open.readline()\n if not line:\n break\n result_x = []\n result_y = []\n add = []\n if stack == 0:\n a = line.split(',')[0]\n a = a.strip()\n add.append(a)\n a = line.split(',')[1]\n a = a.strip()\n add.append(a)\n a = line.split(',')[2]\n a = a.strip()\n add.append(a)\n write.writerow(add)\n stack = 1\n elif stack == 1:\n before_application = line.split(',')[0]\n x_a = line.split(',')[1]\n x_a = x_a.strip()\n y_a = line.split(',')[2]\n y_a = y_a.strip()\n stack = 2\n elif stack == 2:\n if before_application == line.split(',')[0]:\n x_b = line.split(',')[1]\n x_b = x_b.strip()\n y_b = line.split(',')[2]\n y_b = y_b.strip()\n result_x.append(x_a)\n result_x.append(x_b)\n result_y.append(y_a)\n result_y.append(y_b)\n tol = count(result_x[0], result_x[1])\n add.append(tol)\n tol = count(result_y[0], result_y[1])\n add.append(tol)\n write.writerow(add)\n stack = 3\n else:\n pass\n before_application = line.split(',')[0]\n elif stack == 3:\n if before_application == line.split(',')[0]:\n x_a = line.split(',')[1]\n x_a = x_a.strip()\n y_a = line.split(',')[2]\n y_a = y_a.strip()\n result_x.append(x_b)\n result_x.append(x_a)\n result_y.append(y_b)\n result_y.append(y_a)\n tol = count(result_x[0], result_x[1])\n add.append(tol)\n tol = count(result_y[0], result_y[1])\n add.append(tol)\n write.writerow(add)\n stack = 2\n else:\n pass\n before_application = line.split(',')[0]\n",
"step-5": "import numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\nimport csv\n\nfile_open = open(\"C:/Users/DI_Lab/Desktop/20년도 Kisti 과제/HMM/HMM(Up,Down).csv\", 'r', encoding='UTF8')\nsave_file = open(\"C:/Users/DI_Lab/Desktop/20년도 Kisti 과제/HMM/HMM사후확률.csv\", 'w', encoding='UTF8',newline='')\nwrite = csv.writer(save_file)\n\nhidden_states = ['up', 'down']\npi = [0.5044, 0.4956]\nstate_space = pd.Series(pi, index=hidden_states, name='states')\nprint(state_space)\nprint('\\n', state_space.sum())\n\nstack = 0\nx_a = \"\"\nx_b = \"\"\n\ny_a = \"\"\ny_b = \"\"\nbefore_application = \"\"\nadd = []\ndef count(a,b):\n a = int(a)\n b = int(b)\n if a == 0 and b == 0:\n return 0\n elif a == 0 and b == 1:\n return 1\n elif a == 1 and b == 0:\n return 2\n elif a == 1 and b == 1:\n return 3\n\nwhile True:\n line = file_open.readline()\n if not line: break\n result_x = []\n result_y = []\n add = []\n if stack == 0:\n a = line.split(',')[0]\n a = a.strip()\n add.append(a)\n a = line.split(',')[1]\n a = a.strip()\n add.append(a)\n a = line.split(',')[2]\n a = a.strip()\n add.append(a)\n write.writerow(add)\n\n stack = 1\n elif stack == 1:\n before_application = line.split(',')[0]\n x_a = line.split(',')[1]\n x_a = x_a.strip()\n y_a = line.split(',')[2]\n y_a = y_a.strip()\n stack = 2\n\n elif stack == 2:\n if before_application == line.split(',')[0]:\n x_b = line.split(',')[1]\n x_b = x_b.strip()\n y_b = line.split(',')[2]\n y_b = y_b.strip()\n result_x.append(x_a)\n result_x.append(x_b)\n result_y.append(y_a)\n result_y.append(y_b)\n tol = count(result_x[0],result_x[1])\n add.append(tol)\n tol = count(result_y[0], result_y[1])\n add.append(tol)\n write.writerow(add)\n stack = 3\n else:\n pass\n before_application = line.split(',')[0]\n\n elif stack == 3:\n if before_application == line.split(',')[0]:\n x_a = line.split(',')[1]\n x_a = x_a.strip()\n y_a = line.split(',')[2]\n y_a = y_a.strip()\n result_x.append(x_b)\n result_x.append(x_a)\n result_y.append(y_b)\n result_y.append(y_a)\n\n tol = count(result_x[0],result_x[1])\n add.append(tol)\n tol = count(result_y[0], result_y[1])\n add.append(tol)\n write.writerow(add)\n stack = 2\n else:\n pass\n before_application = line.split(',')[0]\n\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-11-21 00:43
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('analysis', '0018_relatorioquedadeconsumo_justificado'),
]
operations = [
migrations.RemoveField(
model_name='relatoriocorrentezerada',
name='expira',
),
migrations.RemoveField(
model_name='relatoriotensaozerada',
name='expira',
),
migrations.AddField(
model_name='relatoriotensaozerada',
name='data_expira',
field=models.DateTimeField(blank=True, null=True, verbose_name='data_expira'),
),
]
|
normal
|
{
"blob_id": "a58949d25a719dc9ce0626948ab0397814e9ea0e",
"index": 1574,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('analysis', '0018_relatorioquedadeconsumo_justificado')]\n operations = [migrations.RemoveField(model_name=\n 'relatoriocorrentezerada', name='expira'), migrations.RemoveField(\n model_name='relatoriotensaozerada', name='expira'), migrations.\n AddField(model_name='relatoriotensaozerada', name='data_expira',\n field=models.DateTimeField(blank=True, null=True, verbose_name=\n 'data_expira'))]\n",
"step-4": "from __future__ import unicode_literals\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('analysis', '0018_relatorioquedadeconsumo_justificado')]\n operations = [migrations.RemoveField(model_name=\n 'relatoriocorrentezerada', name='expira'), migrations.RemoveField(\n model_name='relatoriotensaozerada', name='expira'), migrations.\n AddField(model_name='relatoriotensaozerada', name='data_expira',\n field=models.DateTimeField(blank=True, null=True, verbose_name=\n 'data_expira'))]\n",
"step-5": "# -*- coding: utf-8 -*-\n# Generated by Django 1.10.1 on 2016-11-21 00:43\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('analysis', '0018_relatorioquedadeconsumo_justificado'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='relatoriocorrentezerada',\n name='expira',\n ),\n migrations.RemoveField(\n model_name='relatoriotensaozerada',\n name='expira',\n ),\n migrations.AddField(\n model_name='relatoriotensaozerada',\n name='data_expira',\n field=models.DateTimeField(blank=True, null=True, verbose_name='data_expira'),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from time import perf_counter_ns
from anthony.utility.distance import compare, compare_info
from icecream import ic
start = perf_counter_ns()
ic(compare("tranpsosed", "transposed"))
print(f"Example Time: {(perf_counter_ns() - start)/1e+9} Seconds")
ic(compare_info("momther", "mother"))
|
normal
|
{
"blob_id": "98b0e42f3ed1a234f63c4d3aa76ceb9fce7c041d",
"index": 3631,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nic(compare('tranpsosed', 'transposed'))\nprint(f'Example Time: {(perf_counter_ns() - start) / 1000000000.0} Seconds')\nic(compare_info('momther', 'mother'))\n",
"step-3": "<mask token>\nstart = perf_counter_ns()\nic(compare('tranpsosed', 'transposed'))\nprint(f'Example Time: {(perf_counter_ns() - start) / 1000000000.0} Seconds')\nic(compare_info('momther', 'mother'))\n",
"step-4": "from time import perf_counter_ns\nfrom anthony.utility.distance import compare, compare_info\nfrom icecream import ic\nstart = perf_counter_ns()\nic(compare('tranpsosed', 'transposed'))\nprint(f'Example Time: {(perf_counter_ns() - start) / 1000000000.0} Seconds')\nic(compare_info('momther', 'mother'))\n",
"step-5": "from time import perf_counter_ns\n\nfrom anthony.utility.distance import compare, compare_info\nfrom icecream import ic\n\nstart = perf_counter_ns()\nic(compare(\"tranpsosed\", \"transposed\"))\nprint(f\"Example Time: {(perf_counter_ns() - start)/1e+9} Seconds\")\n\nic(compare_info(\"momther\", \"mother\"))\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print(last4)
<|reserved_special_token_1|>
card = int(input())
last4 = card % 10000
print(last4)
|
flexible
|
{
"blob_id": "7b920545a0241b30b66ff99f330dbb361f747f13",
"index": 8297,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(last4)\n",
"step-3": "card = int(input())\nlast4 = card % 10000\nprint(last4)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
def data():
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
num_classes = 10
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
x_train = np.reshape(x_train, (50000, 3072))
x_test = np.reshape(x_test, (10000, 3072))
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
return x_train, y_train, x_test, y_test
def create_model(x_train, y_train, x_test, y_test):
n_layer1 = {{choice([128, 256, 512])}}
n_layer2 = {{choice([128, 256, 512])}}
dropout_1 = {{uniform(0, 1)}}
dropout_2 = {{uniform(0, 1)}}
optim = {{choice(['rmsprop', 'adam', 'sgd'])}}
n_batch = {{choice([64, 128, 256])}}
print('Model hyperparameters: ', n_layer1, n_layer2, dropout_1,
dropout_2, optim, n_batch)
model = Sequential()
model.add(Dense(n_layer1, activation='relu', input_dim=3072))
model.add(Dropout(dropout_1))
model.add(Dense(n_layer2, activation='relu'))
model.add(Dropout(dropout_2))
model.add(Dense(10, activation='softmax'))
model.compile(optimizer=optim, loss='categorical_crossentropy', metrics
=['accuracy'])
import datetime
current_date = '{date:%Y-%m-%d_%H-%M-%S}'.format(date=datetime.datetime
.now())
print(current_date)
csv_name = '13_hyperas_cifar10_' + current_date + '_' + str(n_layer1
) + '_' + str(n_layer2) + '_' + str(dropout_1) + '_' + str(dropout_2
) + '_' + str(optim) + '_' + str(n_batch) + '.csv'
callbacks = [EarlyStopping(monitor='val_loss', patience=3, verbose=0),
CSVLogger(csv_name, append=True, separator=';')]
result = model.fit(x_train, y_train, batch_size=n_batch, epochs=100,
verbose=2, validation_data=(x_test, y_test), callbacks=callbacks,
shuffle=True)
validation_acc = np.amax(result.history['val_acc'])
print('Best validation acc of epoch:', validation_acc)
return {'loss': -validation_acc, 'status': STATUS_OK, 'model': model}
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
set_session(tf.Session(config=config))
<|reserved_special_token_0|>
def data():
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
num_classes = 10
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
x_train = np.reshape(x_train, (50000, 3072))
x_test = np.reshape(x_test, (10000, 3072))
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
return x_train, y_train, x_test, y_test
def create_model(x_train, y_train, x_test, y_test):
n_layer1 = {{choice([128, 256, 512])}}
n_layer2 = {{choice([128, 256, 512])}}
dropout_1 = {{uniform(0, 1)}}
dropout_2 = {{uniform(0, 1)}}
optim = {{choice(['rmsprop', 'adam', 'sgd'])}}
n_batch = {{choice([64, 128, 256])}}
print('Model hyperparameters: ', n_layer1, n_layer2, dropout_1,
dropout_2, optim, n_batch)
model = Sequential()
model.add(Dense(n_layer1, activation='relu', input_dim=3072))
model.add(Dropout(dropout_1))
model.add(Dense(n_layer2, activation='relu'))
model.add(Dropout(dropout_2))
model.add(Dense(10, activation='softmax'))
model.compile(optimizer=optim, loss='categorical_crossentropy', metrics
=['accuracy'])
import datetime
current_date = '{date:%Y-%m-%d_%H-%M-%S}'.format(date=datetime.datetime
.now())
print(current_date)
csv_name = '13_hyperas_cifar10_' + current_date + '_' + str(n_layer1
) + '_' + str(n_layer2) + '_' + str(dropout_1) + '_' + str(dropout_2
) + '_' + str(optim) + '_' + str(n_batch) + '.csv'
callbacks = [EarlyStopping(monitor='val_loss', patience=3, verbose=0),
CSVLogger(csv_name, append=True, separator=';')]
result = model.fit(x_train, y_train, batch_size=n_batch, epochs=100,
verbose=2, validation_data=(x_test, y_test), callbacks=callbacks,
shuffle=True)
validation_acc = np.amax(result.history['val_acc'])
print('Best validation acc of epoch:', validation_acc)
return {'loss': -validation_acc, 'status': STATUS_OK, 'model': model}
<|reserved_special_token_0|>
print('Evalutation of best performing model:')
print(best_model.evaluate(x_test, y_test))
print('Best performing model chosen hyper-parameters:')
print(best_run)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
set_session(tf.Session(config=config))
<|reserved_special_token_0|>
def data():
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
num_classes = 10
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
x_train = np.reshape(x_train, (50000, 3072))
x_test = np.reshape(x_test, (10000, 3072))
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
return x_train, y_train, x_test, y_test
def create_model(x_train, y_train, x_test, y_test):
n_layer1 = {{choice([128, 256, 512])}}
n_layer2 = {{choice([128, 256, 512])}}
dropout_1 = {{uniform(0, 1)}}
dropout_2 = {{uniform(0, 1)}}
optim = {{choice(['rmsprop', 'adam', 'sgd'])}}
n_batch = {{choice([64, 128, 256])}}
print('Model hyperparameters: ', n_layer1, n_layer2, dropout_1,
dropout_2, optim, n_batch)
model = Sequential()
model.add(Dense(n_layer1, activation='relu', input_dim=3072))
model.add(Dropout(dropout_1))
model.add(Dense(n_layer2, activation='relu'))
model.add(Dropout(dropout_2))
model.add(Dense(10, activation='softmax'))
model.compile(optimizer=optim, loss='categorical_crossentropy', metrics
=['accuracy'])
import datetime
current_date = '{date:%Y-%m-%d_%H-%M-%S}'.format(date=datetime.datetime
.now())
print(current_date)
csv_name = '13_hyperas_cifar10_' + current_date + '_' + str(n_layer1
) + '_' + str(n_layer2) + '_' + str(dropout_1) + '_' + str(dropout_2
) + '_' + str(optim) + '_' + str(n_batch) + '.csv'
callbacks = [EarlyStopping(monitor='val_loss', patience=3, verbose=0),
CSVLogger(csv_name, append=True, separator=';')]
result = model.fit(x_train, y_train, batch_size=n_batch, epochs=100,
verbose=2, validation_data=(x_test, y_test), callbacks=callbacks,
shuffle=True)
validation_acc = np.amax(result.history['val_acc'])
print('Best validation acc of epoch:', validation_acc)
return {'loss': -validation_acc, 'status': STATUS_OK, 'model': model}
<|reserved_special_token_0|>
best_run, best_model = optim.minimize(model=create_model, data=data, algo=
tpe.suggest, max_evals=5, trials=Trials())
x_train, y_train, x_test, y_test = data()
print('Evalutation of best performing model:')
print(best_model.evaluate(x_test, y_test))
print('Best performing model chosen hyper-parameters:')
print(best_run)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import hyperas
import keras
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation
from keras.optimizers import SGD
from keras.callbacks import EarlyStopping, CSVLogger
import numpy as np
import tensorflow as tf
from keras.backend.tensorflow_backend import set_session
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
set_session(tf.Session(config=config))
from keras.datasets import cifar10
def data():
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
num_classes = 10
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
x_train = np.reshape(x_train, (50000, 3072))
x_test = np.reshape(x_test, (10000, 3072))
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
return x_train, y_train, x_test, y_test
def create_model(x_train, y_train, x_test, y_test):
n_layer1 = {{choice([128, 256, 512])}}
n_layer2 = {{choice([128, 256, 512])}}
dropout_1 = {{uniform(0, 1)}}
dropout_2 = {{uniform(0, 1)}}
optim = {{choice(['rmsprop', 'adam', 'sgd'])}}
n_batch = {{choice([64, 128, 256])}}
print('Model hyperparameters: ', n_layer1, n_layer2, dropout_1,
dropout_2, optim, n_batch)
model = Sequential()
model.add(Dense(n_layer1, activation='relu', input_dim=3072))
model.add(Dropout(dropout_1))
model.add(Dense(n_layer2, activation='relu'))
model.add(Dropout(dropout_2))
model.add(Dense(10, activation='softmax'))
model.compile(optimizer=optim, loss='categorical_crossentropy', metrics
=['accuracy'])
import datetime
current_date = '{date:%Y-%m-%d_%H-%M-%S}'.format(date=datetime.datetime
.now())
print(current_date)
csv_name = '13_hyperas_cifar10_' + current_date + '_' + str(n_layer1
) + '_' + str(n_layer2) + '_' + str(dropout_1) + '_' + str(dropout_2
) + '_' + str(optim) + '_' + str(n_batch) + '.csv'
callbacks = [EarlyStopping(monitor='val_loss', patience=3, verbose=0),
CSVLogger(csv_name, append=True, separator=';')]
result = model.fit(x_train, y_train, batch_size=n_batch, epochs=100,
verbose=2, validation_data=(x_test, y_test), callbacks=callbacks,
shuffle=True)
validation_acc = np.amax(result.history['val_acc'])
print('Best validation acc of epoch:', validation_acc)
return {'loss': -validation_acc, 'status': STATUS_OK, 'model': model}
from hyperopt import Trials, STATUS_OK, tpe
from hyperas import optim
from hyperas.distributions import choice, uniform
best_run, best_model = optim.minimize(model=create_model, data=data, algo=
tpe.suggest, max_evals=5, trials=Trials())
x_train, y_train, x_test, y_test = data()
print('Evalutation of best performing model:')
print(best_model.evaluate(x_test, y_test))
print('Best performing model chosen hyper-parameters:')
print(best_run)
<|reserved_special_token_1|>
'''
Copyright
Jelen forráskód a Budapesti Műszaki és Gazdaságtudományi Egyetemen tartott
"Deep Learning a gyakorlatban Python és LUA alapon" tantárgy segédanyagaként készült.
A tantárgy honlapja: http://smartlab.tmit.bme.hu/oktatas-deep-learning
Deep Learning kutatás: http://smartlab.tmit.bme.hu/deep-learning
A forráskódot GPLv3 licensz védi. Újrafelhasználás esetén lehetőség szerint kérjük
az alábbi szerzőt értesíteni.
2018 (c) Csapó Tamás Gábor (csapot kukac tmit pont bme pont hu),
Gyires-Tóth Bálint, Zainkó Csaba
Links:
[hyperas] https://github.com/maxpumperla/hyperas
'''
# !pip3 install hyperas
# based on https://github.com/keras-team/keras/blob/master/examples/cifar10_cnn.py
import hyperas
import keras
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation
from keras.optimizers import SGD
from keras.callbacks import EarlyStopping, CSVLogger
import numpy as np
# do not use all GPU memory
import tensorflow as tf
from keras.backend.tensorflow_backend import set_session
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
set_session(tf.Session(config=config))
from keras.datasets import cifar10
# hiperparaméter optimalizálás hyperas-sal (https://github.com/maxpumperla/hyperas)
# a hyperas-nak kell két függvény:
# -- data() : adatok betöltése
# -- create_model() : hálózat modell
def data():
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
num_classes = 10
# Convert class vectors to binary class matrices.
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
# reshape for FC-DNN
x_train = np.reshape(x_train,(50000,3072)) # 32x32x3
x_test = np.reshape(x_test,(10000,3072))
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
# Normalization of pixel values (to [0-1] range)
x_train /= 255
x_test /= 255
return x_train, y_train, x_test, y_test
def create_model(x_train, y_train, x_test, y_test):
n_layer1 = {{choice([128, 256, 512])}}
n_layer2 = {{choice([128, 256, 512])}}
dropout_1 = {{uniform(0, 1)}}
dropout_2 = {{uniform(0, 1)}}
optim = {{choice(['rmsprop', 'adam', 'sgd'])}}
n_batch = {{choice([64, 128, 256])}}
print('Model hyperparameters: ', n_layer1, n_layer2, dropout_1, dropout_2, optim, n_batch)
# 3 x 3 x [0-1]x[0-1] x 3 x 3 = kb 8100 kombináció
model = Sequential()
model.add(Dense(n_layer1, activation='relu', input_dim=3072))
model.add(Dropout(dropout_1))
model.add(Dense(n_layer2, activation='relu'))
model.add(Dropout(dropout_2))
model.add(Dense(10, activation='softmax'))
model.compile(optimizer=optim,
loss='categorical_crossentropy',
metrics=['accuracy'])
import datetime
current_date = '{date:%Y-%m-%d_%H-%M-%S}'.format(date=datetime.datetime.now())
print(current_date)
csv_name = '13_hyperas_cifar10_' + current_date + '_' + \
str(n_layer1) + '_' + str(n_layer2) + '_' + \
str(dropout_1) + '_' + str(dropout_2) + '_' + \
str(optim) + '_' + str(n_batch) + '.csv'
callbacks = [EarlyStopping(monitor='val_loss', patience=3, verbose=0), \
CSVLogger(csv_name, append=True, separator=';')]
result = model.fit(x_train, y_train,
batch_size=n_batch,
epochs=100,
verbose=2,
validation_data=(x_test, y_test),
callbacks=callbacks,
shuffle=True)
validation_acc = np.amax(result.history['val_acc'])
print('Best validation acc of epoch:', validation_acc)
return {'loss': -validation_acc, 'status': STATUS_OK, 'model': model}
from hyperopt import Trials, STATUS_OK, tpe
from hyperas import optim
from hyperas.distributions import choice, uniform
# main hyperopt part
# az algoritmus lehet:
# -- random.suggest -> random search
# -- tpe.suggest -> tree parsen estimator
best_run, best_model = optim.minimize(model=create_model,
data=data,
algo=tpe.suggest,
max_evals=5,
trials=Trials())
x_train, y_train, x_test, y_test = data()
print("Evalutation of best performing model:")
print(best_model.evaluate(x_test, y_test))
print("Best performing model chosen hyper-parameters:")
print(best_run)
|
flexible
|
{
"blob_id": "cc097b4d2a5a521a0adb83ca1b58470b4ce84f39",
"index": 7143,
"step-1": "<mask token>\n\n\ndef data():\n (x_train, y_train), (x_test, y_test) = cifar10.load_data()\n num_classes = 10\n y_train = keras.utils.to_categorical(y_train, num_classes)\n y_test = keras.utils.to_categorical(y_test, num_classes)\n x_train = np.reshape(x_train, (50000, 3072))\n x_test = np.reshape(x_test, (10000, 3072))\n x_train = x_train.astype('float32')\n x_test = x_test.astype('float32')\n x_train /= 255\n x_test /= 255\n return x_train, y_train, x_test, y_test\n\n\ndef create_model(x_train, y_train, x_test, y_test):\n n_layer1 = {{choice([128, 256, 512])}}\n n_layer2 = {{choice([128, 256, 512])}}\n dropout_1 = {{uniform(0, 1)}}\n dropout_2 = {{uniform(0, 1)}}\n optim = {{choice(['rmsprop', 'adam', 'sgd'])}}\n n_batch = {{choice([64, 128, 256])}}\n print('Model hyperparameters: ', n_layer1, n_layer2, dropout_1,\n dropout_2, optim, n_batch)\n model = Sequential()\n model.add(Dense(n_layer1, activation='relu', input_dim=3072))\n model.add(Dropout(dropout_1))\n model.add(Dense(n_layer2, activation='relu'))\n model.add(Dropout(dropout_2))\n model.add(Dense(10, activation='softmax'))\n model.compile(optimizer=optim, loss='categorical_crossentropy', metrics\n =['accuracy'])\n import datetime\n current_date = '{date:%Y-%m-%d_%H-%M-%S}'.format(date=datetime.datetime\n .now())\n print(current_date)\n csv_name = '13_hyperas_cifar10_' + current_date + '_' + str(n_layer1\n ) + '_' + str(n_layer2) + '_' + str(dropout_1) + '_' + str(dropout_2\n ) + '_' + str(optim) + '_' + str(n_batch) + '.csv'\n callbacks = [EarlyStopping(monitor='val_loss', patience=3, verbose=0),\n CSVLogger(csv_name, append=True, separator=';')]\n result = model.fit(x_train, y_train, batch_size=n_batch, epochs=100,\n verbose=2, validation_data=(x_test, y_test), callbacks=callbacks,\n shuffle=True)\n validation_acc = np.amax(result.history['val_acc'])\n print('Best validation acc of epoch:', validation_acc)\n return {'loss': -validation_acc, 'status': STATUS_OK, 'model': model}\n\n\n<mask token>\n",
"step-2": "<mask token>\nset_session(tf.Session(config=config))\n<mask token>\n\n\ndef data():\n (x_train, y_train), (x_test, y_test) = cifar10.load_data()\n num_classes = 10\n y_train = keras.utils.to_categorical(y_train, num_classes)\n y_test = keras.utils.to_categorical(y_test, num_classes)\n x_train = np.reshape(x_train, (50000, 3072))\n x_test = np.reshape(x_test, (10000, 3072))\n x_train = x_train.astype('float32')\n x_test = x_test.astype('float32')\n x_train /= 255\n x_test /= 255\n return x_train, y_train, x_test, y_test\n\n\ndef create_model(x_train, y_train, x_test, y_test):\n n_layer1 = {{choice([128, 256, 512])}}\n n_layer2 = {{choice([128, 256, 512])}}\n dropout_1 = {{uniform(0, 1)}}\n dropout_2 = {{uniform(0, 1)}}\n optim = {{choice(['rmsprop', 'adam', 'sgd'])}}\n n_batch = {{choice([64, 128, 256])}}\n print('Model hyperparameters: ', n_layer1, n_layer2, dropout_1,\n dropout_2, optim, n_batch)\n model = Sequential()\n model.add(Dense(n_layer1, activation='relu', input_dim=3072))\n model.add(Dropout(dropout_1))\n model.add(Dense(n_layer2, activation='relu'))\n model.add(Dropout(dropout_2))\n model.add(Dense(10, activation='softmax'))\n model.compile(optimizer=optim, loss='categorical_crossentropy', metrics\n =['accuracy'])\n import datetime\n current_date = '{date:%Y-%m-%d_%H-%M-%S}'.format(date=datetime.datetime\n .now())\n print(current_date)\n csv_name = '13_hyperas_cifar10_' + current_date + '_' + str(n_layer1\n ) + '_' + str(n_layer2) + '_' + str(dropout_1) + '_' + str(dropout_2\n ) + '_' + str(optim) + '_' + str(n_batch) + '.csv'\n callbacks = [EarlyStopping(monitor='val_loss', patience=3, verbose=0),\n CSVLogger(csv_name, append=True, separator=';')]\n result = model.fit(x_train, y_train, batch_size=n_batch, epochs=100,\n verbose=2, validation_data=(x_test, y_test), callbacks=callbacks,\n shuffle=True)\n validation_acc = np.amax(result.history['val_acc'])\n print('Best validation acc of epoch:', validation_acc)\n return {'loss': -validation_acc, 'status': STATUS_OK, 'model': model}\n\n\n<mask token>\nprint('Evalutation of best performing model:')\nprint(best_model.evaluate(x_test, y_test))\nprint('Best performing model chosen hyper-parameters:')\nprint(best_run)\n",
"step-3": "<mask token>\nconfig = tf.ConfigProto()\nconfig.gpu_options.allow_growth = True\nset_session(tf.Session(config=config))\n<mask token>\n\n\ndef data():\n (x_train, y_train), (x_test, y_test) = cifar10.load_data()\n num_classes = 10\n y_train = keras.utils.to_categorical(y_train, num_classes)\n y_test = keras.utils.to_categorical(y_test, num_classes)\n x_train = np.reshape(x_train, (50000, 3072))\n x_test = np.reshape(x_test, (10000, 3072))\n x_train = x_train.astype('float32')\n x_test = x_test.astype('float32')\n x_train /= 255\n x_test /= 255\n return x_train, y_train, x_test, y_test\n\n\ndef create_model(x_train, y_train, x_test, y_test):\n n_layer1 = {{choice([128, 256, 512])}}\n n_layer2 = {{choice([128, 256, 512])}}\n dropout_1 = {{uniform(0, 1)}}\n dropout_2 = {{uniform(0, 1)}}\n optim = {{choice(['rmsprop', 'adam', 'sgd'])}}\n n_batch = {{choice([64, 128, 256])}}\n print('Model hyperparameters: ', n_layer1, n_layer2, dropout_1,\n dropout_2, optim, n_batch)\n model = Sequential()\n model.add(Dense(n_layer1, activation='relu', input_dim=3072))\n model.add(Dropout(dropout_1))\n model.add(Dense(n_layer2, activation='relu'))\n model.add(Dropout(dropout_2))\n model.add(Dense(10, activation='softmax'))\n model.compile(optimizer=optim, loss='categorical_crossentropy', metrics\n =['accuracy'])\n import datetime\n current_date = '{date:%Y-%m-%d_%H-%M-%S}'.format(date=datetime.datetime\n .now())\n print(current_date)\n csv_name = '13_hyperas_cifar10_' + current_date + '_' + str(n_layer1\n ) + '_' + str(n_layer2) + '_' + str(dropout_1) + '_' + str(dropout_2\n ) + '_' + str(optim) + '_' + str(n_batch) + '.csv'\n callbacks = [EarlyStopping(monitor='val_loss', patience=3, verbose=0),\n CSVLogger(csv_name, append=True, separator=';')]\n result = model.fit(x_train, y_train, batch_size=n_batch, epochs=100,\n verbose=2, validation_data=(x_test, y_test), callbacks=callbacks,\n shuffle=True)\n validation_acc = np.amax(result.history['val_acc'])\n print('Best validation acc of epoch:', validation_acc)\n return {'loss': -validation_acc, 'status': STATUS_OK, 'model': model}\n\n\n<mask token>\nbest_run, best_model = optim.minimize(model=create_model, data=data, algo=\n tpe.suggest, max_evals=5, trials=Trials())\nx_train, y_train, x_test, y_test = data()\nprint('Evalutation of best performing model:')\nprint(best_model.evaluate(x_test, y_test))\nprint('Best performing model chosen hyper-parameters:')\nprint(best_run)\n",
"step-4": "<mask token>\nimport hyperas\nimport keras\nfrom keras.models import Sequential\nfrom keras.layers import Dense, Dropout, Activation\nfrom keras.optimizers import SGD\nfrom keras.callbacks import EarlyStopping, CSVLogger\nimport numpy as np\nimport tensorflow as tf\nfrom keras.backend.tensorflow_backend import set_session\nconfig = tf.ConfigProto()\nconfig.gpu_options.allow_growth = True\nset_session(tf.Session(config=config))\nfrom keras.datasets import cifar10\n\n\ndef data():\n (x_train, y_train), (x_test, y_test) = cifar10.load_data()\n num_classes = 10\n y_train = keras.utils.to_categorical(y_train, num_classes)\n y_test = keras.utils.to_categorical(y_test, num_classes)\n x_train = np.reshape(x_train, (50000, 3072))\n x_test = np.reshape(x_test, (10000, 3072))\n x_train = x_train.astype('float32')\n x_test = x_test.astype('float32')\n x_train /= 255\n x_test /= 255\n return x_train, y_train, x_test, y_test\n\n\ndef create_model(x_train, y_train, x_test, y_test):\n n_layer1 = {{choice([128, 256, 512])}}\n n_layer2 = {{choice([128, 256, 512])}}\n dropout_1 = {{uniform(0, 1)}}\n dropout_2 = {{uniform(0, 1)}}\n optim = {{choice(['rmsprop', 'adam', 'sgd'])}}\n n_batch = {{choice([64, 128, 256])}}\n print('Model hyperparameters: ', n_layer1, n_layer2, dropout_1,\n dropout_2, optim, n_batch)\n model = Sequential()\n model.add(Dense(n_layer1, activation='relu', input_dim=3072))\n model.add(Dropout(dropout_1))\n model.add(Dense(n_layer2, activation='relu'))\n model.add(Dropout(dropout_2))\n model.add(Dense(10, activation='softmax'))\n model.compile(optimizer=optim, loss='categorical_crossentropy', metrics\n =['accuracy'])\n import datetime\n current_date = '{date:%Y-%m-%d_%H-%M-%S}'.format(date=datetime.datetime\n .now())\n print(current_date)\n csv_name = '13_hyperas_cifar10_' + current_date + '_' + str(n_layer1\n ) + '_' + str(n_layer2) + '_' + str(dropout_1) + '_' + str(dropout_2\n ) + '_' + str(optim) + '_' + str(n_batch) + '.csv'\n callbacks = [EarlyStopping(monitor='val_loss', patience=3, verbose=0),\n CSVLogger(csv_name, append=True, separator=';')]\n result = model.fit(x_train, y_train, batch_size=n_batch, epochs=100,\n verbose=2, validation_data=(x_test, y_test), callbacks=callbacks,\n shuffle=True)\n validation_acc = np.amax(result.history['val_acc'])\n print('Best validation acc of epoch:', validation_acc)\n return {'loss': -validation_acc, 'status': STATUS_OK, 'model': model}\n\n\nfrom hyperopt import Trials, STATUS_OK, tpe\nfrom hyperas import optim\nfrom hyperas.distributions import choice, uniform\nbest_run, best_model = optim.minimize(model=create_model, data=data, algo=\n tpe.suggest, max_evals=5, trials=Trials())\nx_train, y_train, x_test, y_test = data()\nprint('Evalutation of best performing model:')\nprint(best_model.evaluate(x_test, y_test))\nprint('Best performing model chosen hyper-parameters:')\nprint(best_run)\n",
"step-5": "'''\nCopyright\n\nJelen forráskód a Budapesti Műszaki és Gazdaságtudományi Egyetemen tartott\n\"Deep Learning a gyakorlatban Python és LUA alapon\" tantárgy segédanyagaként készült.\n\nA tantárgy honlapja: http://smartlab.tmit.bme.hu/oktatas-deep-learning\nDeep Learning kutatás: http://smartlab.tmit.bme.hu/deep-learning\n\nA forráskódot GPLv3 licensz védi. Újrafelhasználás esetén lehetőség szerint kérjük\naz alábbi szerzőt értesíteni.\n\n2018 (c) Csapó Tamás Gábor (csapot kukac tmit pont bme pont hu),\nGyires-Tóth Bálint, Zainkó Csaba\n\n\nLinks:\n [hyperas] https://github.com/maxpumperla/hyperas\n'''\n\n# !pip3 install hyperas\n\n# based on https://github.com/keras-team/keras/blob/master/examples/cifar10_cnn.py\n\nimport hyperas\n\nimport keras\nfrom keras.models import Sequential\nfrom keras.layers import Dense, Dropout, Activation\nfrom keras.optimizers import SGD\nfrom keras.callbacks import EarlyStopping, CSVLogger\nimport numpy as np\n\n# do not use all GPU memory\nimport tensorflow as tf\nfrom keras.backend.tensorflow_backend import set_session\nconfig = tf.ConfigProto()\nconfig.gpu_options.allow_growth = True\nset_session(tf.Session(config=config))\n\n\nfrom keras.datasets import cifar10\n\n\n# hiperparaméter optimalizálás hyperas-sal (https://github.com/maxpumperla/hyperas)\n\n# a hyperas-nak kell két függvény:\n# -- data() : adatok betöltése\n# -- create_model() : hálózat modell\n\ndef data():\n (x_train, y_train), (x_test, y_test) = cifar10.load_data()\n\n num_classes = 10\n\n # Convert class vectors to binary class matrices.\n y_train = keras.utils.to_categorical(y_train, num_classes)\n y_test = keras.utils.to_categorical(y_test, num_classes)\n\n # reshape for FC-DNN\n x_train = np.reshape(x_train,(50000,3072)) # 32x32x3\n x_test = np.reshape(x_test,(10000,3072))\n x_train = x_train.astype('float32')\n x_test = x_test.astype('float32')\n\n # Normalization of pixel values (to [0-1] range)\n\n x_train /= 255\n x_test /= 255\n\n return x_train, y_train, x_test, y_test\n\n\ndef create_model(x_train, y_train, x_test, y_test):\n \n n_layer1 = {{choice([128, 256, 512])}}\n n_layer2 = {{choice([128, 256, 512])}}\n dropout_1 = {{uniform(0, 1)}}\n dropout_2 = {{uniform(0, 1)}}\n optim = {{choice(['rmsprop', 'adam', 'sgd'])}}\n n_batch = {{choice([64, 128, 256])}}\n \n print('Model hyperparameters: ', n_layer1, n_layer2, dropout_1, dropout_2, optim, n_batch)\n # 3 x 3 x [0-1]x[0-1] x 3 x 3 = kb 8100 kombináció\n \n model = Sequential()\n model.add(Dense(n_layer1, activation='relu', input_dim=3072))\n model.add(Dropout(dropout_1))\n model.add(Dense(n_layer2, activation='relu'))\n model.add(Dropout(dropout_2))\n model.add(Dense(10, activation='softmax'))\n \n model.compile(optimizer=optim,\n loss='categorical_crossentropy',\n metrics=['accuracy'])\n\n import datetime\n current_date = '{date:%Y-%m-%d_%H-%M-%S}'.format(date=datetime.datetime.now())\n print(current_date)\n csv_name = '13_hyperas_cifar10_' + current_date + '_' + \\\n str(n_layer1) + '_' + str(n_layer2) + '_' + \\\n str(dropout_1) + '_' + str(dropout_2) + '_' + \\\n str(optim) + '_' + str(n_batch) + '.csv'\n callbacks = [EarlyStopping(monitor='val_loss', patience=3, verbose=0), \\\n CSVLogger(csv_name, append=True, separator=';')]\n \n result = model.fit(x_train, y_train,\n batch_size=n_batch,\n epochs=100,\n verbose=2,\n validation_data=(x_test, y_test),\n callbacks=callbacks,\n shuffle=True)\n \n validation_acc = np.amax(result.history['val_acc']) \n print('Best validation acc of epoch:', validation_acc)\n return {'loss': -validation_acc, 'status': STATUS_OK, 'model': model}\n\nfrom hyperopt import Trials, STATUS_OK, tpe\nfrom hyperas import optim\nfrom hyperas.distributions import choice, uniform\n\n# main hyperopt part\n# az algoritmus lehet:\n# -- random.suggest -> random search\n# -- tpe.suggest -> tree parsen estimator\nbest_run, best_model = optim.minimize(model=create_model,\n data=data,\n algo=tpe.suggest,\n max_evals=5,\n trials=Trials())\nx_train, y_train, x_test, y_test = data()\nprint(\"Evalutation of best performing model:\")\nprint(best_model.evaluate(x_test, y_test))\nprint(\"Best performing model chosen hyper-parameters:\")\nprint(best_run)\n\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
field = [['*', '1', '2', '3'], ['1', '-', '-', '-'], ['2', '-', '-', '-'], ['3', '-', '-', '-']]
def show(a):
for i in range(len(a)):
for j in range(len(a[i])):
print(a[i][j], end=' ')
print()
def askUserZero():
while True:
inputX = input('Введите номер строки нолика')
inputY = input('Введите номер столбца нолика')
if inputX.isdigit() and inputY.isdigit():
zeroPosX = int(inputX)
zeroPosY = int(inputY)
if zeroPosX in [1, 2, 3] and zeroPosY in [1, 2, 3]:
if field[zeroPosX][zeroPosY] != '-':
print("Позиция уже занята :( Попробуйте снова")
else:
return [zeroPosX, zeroPosY]
else:
print("Такой позиции не существует, попробуйте снова")
else:
print("Значение должно принимать значения от 1 до 3. Попробуйте снова")
def askUserCross():
while True:
inputX = input('Введите номер строки крестика')
inputY = input('Введите номер столбца крестика')
if inputX.isdigit() and inputY.isdigit():
crossPosX = int(inputX)
crossPosY = int(inputY)
if crossPosX in [1, 2, 3] and crossPosY in [1, 2, 3]:
if field[crossPosX][crossPosY] != '-':
print("Позиция уже занята :(\nПопробуйте снова")
else:
return [crossPosX, crossPosY]
else:
print("Такой позиции не существует, попробуйте снова")
else:
print("Значение должно принимать значения от 1 до 3. Попробуйте снова")
def winCombo(a):
n=0
m=0
t=0
r=0
for i in range(1, len(a)):
for j in range(1, len(a[i])-1):
if a[i][j] == a[i][j+1] and a[i][j] == 'X' or a[i][j] == a[i][j+1] and a[i][j] == '0':
n += 1
s = a[i][j+1]
if n == len(a[i])-2:
print("Выйграл", s)
return "Congratulations!"
for i in range(1, len(a[1])):
for j in range (1,len(a)-1):
if a[j][i] == a[j+1][i] and a[j][i] == 'X' or a[j][i] == a[j+1][i] and a[j][i] == '0':
m += 1
k = a[j][i]
if m == len(a)-2:
print("Выйграл", k)
return "Congratulations!"
for i in range(1, len(a)-1):
if a[i][i] == a[i+1][i+1] and a[i][i] == 'X' or a[i][i] == a[i+1][i+1] and a[i][i] == '0':
t += 1
z = a[i][i]
if t == len(a)-2:
print("Выйграл", z)
return "Congratulations!"
for i in range(1, len(a)-1):
if a[i][len(a)-i] == a[i+1][len(a)-i-1] and a[i][len(a)-i] == 'X' or a[i][len(a)-i] == a[i+1][len(a)-i-1] and a[i][len(a)-i] == '0':
r += 1
b = a[i][len(a)-i]
if r == len(a)-2:
print("Выйграл", b)
return "Congratulations!"
while True:
show(field)
crossPos = askUserCross()
field[crossPos[0]][crossPos[1]]='X'
show(field)
result=winCombo(field)
if result:
show(field)
break
zeroPos = askUserZero()
field[zeroPos[0]][zeroPos[1]]='0'
result = winCombo(field)
if result:
show(field)
break
print(result)
|
normal
|
{
"blob_id": "3f22bf954a8c4608ec4bd4a28bea3679a664a99a",
"index": 2364,
"step-1": "<mask token>\n\n\ndef show(a):\n for i in range(len(a)):\n for j in range(len(a[i])):\n print(a[i][j], end=' ')\n print()\n\n\ndef askUserZero():\n while True:\n inputX = input('Введите номер строки нолика')\n inputY = input('Введите номер столбца нолика')\n if inputX.isdigit() and inputY.isdigit():\n zeroPosX = int(inputX)\n zeroPosY = int(inputY)\n if zeroPosX in [1, 2, 3] and zeroPosY in [1, 2, 3]:\n if field[zeroPosX][zeroPosY] != '-':\n print('Позиция уже занята :( Попробуйте снова')\n else:\n return [zeroPosX, zeroPosY]\n else:\n print('Такой позиции не существует, попробуйте снова')\n else:\n print(\n 'Значение должно принимать значения от 1 до 3. Попробуйте снова'\n )\n\n\ndef askUserCross():\n while True:\n inputX = input('Введите номер строки крестика')\n inputY = input('Введите номер столбца крестика')\n if inputX.isdigit() and inputY.isdigit():\n crossPosX = int(inputX)\n crossPosY = int(inputY)\n if crossPosX in [1, 2, 3] and crossPosY in [1, 2, 3]:\n if field[crossPosX][crossPosY] != '-':\n print('Позиция уже занята :(\\nПопробуйте снова')\n else:\n return [crossPosX, crossPosY]\n else:\n print('Такой позиции не существует, попробуйте снова')\n else:\n print(\n 'Значение должно принимать значения от 1 до 3. Попробуйте снова'\n )\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef show(a):\n for i in range(len(a)):\n for j in range(len(a[i])):\n print(a[i][j], end=' ')\n print()\n\n\ndef askUserZero():\n while True:\n inputX = input('Введите номер строки нолика')\n inputY = input('Введите номер столбца нолика')\n if inputX.isdigit() and inputY.isdigit():\n zeroPosX = int(inputX)\n zeroPosY = int(inputY)\n if zeroPosX in [1, 2, 3] and zeroPosY in [1, 2, 3]:\n if field[zeroPosX][zeroPosY] != '-':\n print('Позиция уже занята :( Попробуйте снова')\n else:\n return [zeroPosX, zeroPosY]\n else:\n print('Такой позиции не существует, попробуйте снова')\n else:\n print(\n 'Значение должно принимать значения от 1 до 3. Попробуйте снова'\n )\n\n\ndef askUserCross():\n while True:\n inputX = input('Введите номер строки крестика')\n inputY = input('Введите номер столбца крестика')\n if inputX.isdigit() and inputY.isdigit():\n crossPosX = int(inputX)\n crossPosY = int(inputY)\n if crossPosX in [1, 2, 3] and crossPosY in [1, 2, 3]:\n if field[crossPosX][crossPosY] != '-':\n print('Позиция уже занята :(\\nПопробуйте снова')\n else:\n return [crossPosX, crossPosY]\n else:\n print('Такой позиции не существует, попробуйте снова')\n else:\n print(\n 'Значение должно принимать значения от 1 до 3. Попробуйте снова'\n )\n\n\ndef winCombo(a):\n n = 0\n m = 0\n t = 0\n r = 0\n for i in range(1, len(a)):\n for j in range(1, len(a[i]) - 1):\n if a[i][j] == a[i][j + 1] and a[i][j] == 'X' or a[i][j] == a[i][\n j + 1] and a[i][j] == '0':\n n += 1\n s = a[i][j + 1]\n if n == len(a[i]) - 2:\n print('Выйграл', s)\n return 'Congratulations!'\n for i in range(1, len(a[1])):\n for j in range(1, len(a) - 1):\n if a[j][i] == a[j + 1][i] and a[j][i] == 'X' or a[j][i] == a[j + 1\n ][i] and a[j][i] == '0':\n m += 1\n k = a[j][i]\n if m == len(a) - 2:\n print('Выйграл', k)\n return 'Congratulations!'\n for i in range(1, len(a) - 1):\n if a[i][i] == a[i + 1][i + 1] and a[i][i] == 'X' or a[i][i] == a[i + 1\n ][i + 1] and a[i][i] == '0':\n t += 1\n z = a[i][i]\n if t == len(a) - 2:\n print('Выйграл', z)\n return 'Congratulations!'\n for i in range(1, len(a) - 1):\n if a[i][len(a) - i] == a[i + 1][len(a) - i - 1] and a[i][len(a) - i\n ] == 'X' or a[i][len(a) - i] == a[i + 1][len(a) - i - 1] and a[i][\n len(a) - i] == '0':\n r += 1\n b = a[i][len(a) - i]\n if r == len(a) - 2:\n print('Выйграл', b)\n return 'Congratulations!'\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef show(a):\n for i in range(len(a)):\n for j in range(len(a[i])):\n print(a[i][j], end=' ')\n print()\n\n\ndef askUserZero():\n while True:\n inputX = input('Введите номер строки нолика')\n inputY = input('Введите номер столбца нолика')\n if inputX.isdigit() and inputY.isdigit():\n zeroPosX = int(inputX)\n zeroPosY = int(inputY)\n if zeroPosX in [1, 2, 3] and zeroPosY in [1, 2, 3]:\n if field[zeroPosX][zeroPosY] != '-':\n print('Позиция уже занята :( Попробуйте снова')\n else:\n return [zeroPosX, zeroPosY]\n else:\n print('Такой позиции не существует, попробуйте снова')\n else:\n print(\n 'Значение должно принимать значения от 1 до 3. Попробуйте снова'\n )\n\n\ndef askUserCross():\n while True:\n inputX = input('Введите номер строки крестика')\n inputY = input('Введите номер столбца крестика')\n if inputX.isdigit() and inputY.isdigit():\n crossPosX = int(inputX)\n crossPosY = int(inputY)\n if crossPosX in [1, 2, 3] and crossPosY in [1, 2, 3]:\n if field[crossPosX][crossPosY] != '-':\n print('Позиция уже занята :(\\nПопробуйте снова')\n else:\n return [crossPosX, crossPosY]\n else:\n print('Такой позиции не существует, попробуйте снова')\n else:\n print(\n 'Значение должно принимать значения от 1 до 3. Попробуйте снова'\n )\n\n\ndef winCombo(a):\n n = 0\n m = 0\n t = 0\n r = 0\n for i in range(1, len(a)):\n for j in range(1, len(a[i]) - 1):\n if a[i][j] == a[i][j + 1] and a[i][j] == 'X' or a[i][j] == a[i][\n j + 1] and a[i][j] == '0':\n n += 1\n s = a[i][j + 1]\n if n == len(a[i]) - 2:\n print('Выйграл', s)\n return 'Congratulations!'\n for i in range(1, len(a[1])):\n for j in range(1, len(a) - 1):\n if a[j][i] == a[j + 1][i] and a[j][i] == 'X' or a[j][i] == a[j + 1\n ][i] and a[j][i] == '0':\n m += 1\n k = a[j][i]\n if m == len(a) - 2:\n print('Выйграл', k)\n return 'Congratulations!'\n for i in range(1, len(a) - 1):\n if a[i][i] == a[i + 1][i + 1] and a[i][i] == 'X' or a[i][i] == a[i + 1\n ][i + 1] and a[i][i] == '0':\n t += 1\n z = a[i][i]\n if t == len(a) - 2:\n print('Выйграл', z)\n return 'Congratulations!'\n for i in range(1, len(a) - 1):\n if a[i][len(a) - i] == a[i + 1][len(a) - i - 1] and a[i][len(a) - i\n ] == 'X' or a[i][len(a) - i] == a[i + 1][len(a) - i - 1] and a[i][\n len(a) - i] == '0':\n r += 1\n b = a[i][len(a) - i]\n if r == len(a) - 2:\n print('Выйграл', b)\n return 'Congratulations!'\n\n\nwhile True:\n show(field)\n crossPos = askUserCross()\n field[crossPos[0]][crossPos[1]] = 'X'\n show(field)\n result = winCombo(field)\n if result:\n show(field)\n break\n zeroPos = askUserZero()\n field[zeroPos[0]][zeroPos[1]] = '0'\n result = winCombo(field)\n if result:\n show(field)\n break\n print(result)\n",
"step-4": "field = [['*', '1', '2', '3'], ['1', '-', '-', '-'], ['2', '-', '-', '-'],\n ['3', '-', '-', '-']]\n\n\ndef show(a):\n for i in range(len(a)):\n for j in range(len(a[i])):\n print(a[i][j], end=' ')\n print()\n\n\ndef askUserZero():\n while True:\n inputX = input('Введите номер строки нолика')\n inputY = input('Введите номер столбца нолика')\n if inputX.isdigit() and inputY.isdigit():\n zeroPosX = int(inputX)\n zeroPosY = int(inputY)\n if zeroPosX in [1, 2, 3] and zeroPosY in [1, 2, 3]:\n if field[zeroPosX][zeroPosY] != '-':\n print('Позиция уже занята :( Попробуйте снова')\n else:\n return [zeroPosX, zeroPosY]\n else:\n print('Такой позиции не существует, попробуйте снова')\n else:\n print(\n 'Значение должно принимать значения от 1 до 3. Попробуйте снова'\n )\n\n\ndef askUserCross():\n while True:\n inputX = input('Введите номер строки крестика')\n inputY = input('Введите номер столбца крестика')\n if inputX.isdigit() and inputY.isdigit():\n crossPosX = int(inputX)\n crossPosY = int(inputY)\n if crossPosX in [1, 2, 3] and crossPosY in [1, 2, 3]:\n if field[crossPosX][crossPosY] != '-':\n print('Позиция уже занята :(\\nПопробуйте снова')\n else:\n return [crossPosX, crossPosY]\n else:\n print('Такой позиции не существует, попробуйте снова')\n else:\n print(\n 'Значение должно принимать значения от 1 до 3. Попробуйте снова'\n )\n\n\ndef winCombo(a):\n n = 0\n m = 0\n t = 0\n r = 0\n for i in range(1, len(a)):\n for j in range(1, len(a[i]) - 1):\n if a[i][j] == a[i][j + 1] and a[i][j] == 'X' or a[i][j] == a[i][\n j + 1] and a[i][j] == '0':\n n += 1\n s = a[i][j + 1]\n if n == len(a[i]) - 2:\n print('Выйграл', s)\n return 'Congratulations!'\n for i in range(1, len(a[1])):\n for j in range(1, len(a) - 1):\n if a[j][i] == a[j + 1][i] and a[j][i] == 'X' or a[j][i] == a[j + 1\n ][i] and a[j][i] == '0':\n m += 1\n k = a[j][i]\n if m == len(a) - 2:\n print('Выйграл', k)\n return 'Congratulations!'\n for i in range(1, len(a) - 1):\n if a[i][i] == a[i + 1][i + 1] and a[i][i] == 'X' or a[i][i] == a[i + 1\n ][i + 1] and a[i][i] == '0':\n t += 1\n z = a[i][i]\n if t == len(a) - 2:\n print('Выйграл', z)\n return 'Congratulations!'\n for i in range(1, len(a) - 1):\n if a[i][len(a) - i] == a[i + 1][len(a) - i - 1] and a[i][len(a) - i\n ] == 'X' or a[i][len(a) - i] == a[i + 1][len(a) - i - 1] and a[i][\n len(a) - i] == '0':\n r += 1\n b = a[i][len(a) - i]\n if r == len(a) - 2:\n print('Выйграл', b)\n return 'Congratulations!'\n\n\nwhile True:\n show(field)\n crossPos = askUserCross()\n field[crossPos[0]][crossPos[1]] = 'X'\n show(field)\n result = winCombo(field)\n if result:\n show(field)\n break\n zeroPos = askUserZero()\n field[zeroPos[0]][zeroPos[1]] = '0'\n result = winCombo(field)\n if result:\n show(field)\n break\n print(result)\n",
"step-5": "field = [['*', '1', '2', '3'], ['1', '-', '-', '-'], ['2', '-', '-', '-'], ['3', '-', '-', '-']]\r\ndef show(a):\r\n for i in range(len(a)):\r\n for j in range(len(a[i])):\r\n print(a[i][j], end=' ')\r\n print()\r\ndef askUserZero():\r\n while True:\r\n inputX = input('Введите номер строки нолика')\r\n inputY = input('Введите номер столбца нолика')\r\n\r\n if inputX.isdigit() and inputY.isdigit():\r\n zeroPosX = int(inputX)\r\n zeroPosY = int(inputY)\r\n if zeroPosX in [1, 2, 3] and zeroPosY in [1, 2, 3]:\r\n if field[zeroPosX][zeroPosY] != '-':\r\n print(\"Позиция уже занята :( Попробуйте снова\")\r\n else:\r\n return [zeroPosX, zeroPosY]\r\n else:\r\n print(\"Такой позиции не существует, попробуйте снова\")\r\n else:\r\n print(\"Значение должно принимать значения от 1 до 3. Попробуйте снова\")\r\n\r\n\r\ndef askUserCross():\r\n while True:\r\n inputX = input('Введите номер строки крестика')\r\n inputY = input('Введите номер столбца крестика')\r\n if inputX.isdigit() and inputY.isdigit():\r\n crossPosX = int(inputX)\r\n crossPosY = int(inputY)\r\n if crossPosX in [1, 2, 3] and crossPosY in [1, 2, 3]:\r\n if field[crossPosX][crossPosY] != '-':\r\n print(\"Позиция уже занята :(\\nПопробуйте снова\")\r\n else:\r\n return [crossPosX, crossPosY]\r\n else:\r\n print(\"Такой позиции не существует, попробуйте снова\")\r\n else:\r\n print(\"Значение должно принимать значения от 1 до 3. Попробуйте снова\")\r\n\r\n\r\n\r\ndef winCombo(a):\r\n n=0\r\n m=0\r\n t=0\r\n r=0\r\n for i in range(1, len(a)):\r\n for j in range(1, len(a[i])-1):\r\n if a[i][j] == a[i][j+1] and a[i][j] == 'X' or a[i][j] == a[i][j+1] and a[i][j] == '0':\r\n n += 1\r\n s = a[i][j+1]\r\n if n == len(a[i])-2:\r\n print(\"Выйграл\", s)\r\n return \"Congratulations!\"\r\n\r\n for i in range(1, len(a[1])):\r\n for j in range (1,len(a)-1):\r\n if a[j][i] == a[j+1][i] and a[j][i] == 'X' or a[j][i] == a[j+1][i] and a[j][i] == '0':\r\n m += 1\r\n k = a[j][i]\r\n if m == len(a)-2:\r\n print(\"Выйграл\", k)\r\n return \"Congratulations!\"\r\n\r\n for i in range(1, len(a)-1):\r\n if a[i][i] == a[i+1][i+1] and a[i][i] == 'X' or a[i][i] == a[i+1][i+1] and a[i][i] == '0':\r\n t += 1\r\n z = a[i][i]\r\n if t == len(a)-2:\r\n print(\"Выйграл\", z)\r\n return \"Congratulations!\"\r\n\r\n for i in range(1, len(a)-1):\r\n\r\n if a[i][len(a)-i] == a[i+1][len(a)-i-1] and a[i][len(a)-i] == 'X' or a[i][len(a)-i] == a[i+1][len(a)-i-1] and a[i][len(a)-i] == '0':\r\n r += 1\r\n b = a[i][len(a)-i]\r\n\r\n if r == len(a)-2:\r\n print(\"Выйграл\", b)\r\n return \"Congratulations!\"\r\n\r\nwhile True:\r\n show(field)\r\n crossPos = askUserCross()\r\n field[crossPos[0]][crossPos[1]]='X'\r\n show(field)\r\n result=winCombo(field)\r\n if result:\r\n show(field)\r\n break\r\n zeroPos = askUserZero()\r\n field[zeroPos[0]][zeroPos[1]]='0'\r\n result = winCombo(field)\r\n if result:\r\n show(field)\r\n break\r\n print(result)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
time.sleep(1)
<|reserved_special_token_0|>
if len(sys.argv) < 6:
error_str = str(sys.argv[0]
) + ' led1_current led2_current led_stable_time int_time1 int_time2'
print(error_str)
else:
C12880.Setup()
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(pin_meas, GPIO.IN)
GPIO.setup(pin_black, GPIO.IN)
GPIO.setup(pin_led, GPIO.OUT)
GPIO.output(pin_led, GPIO.LOW)
GPIO.setup(GATE_LED_PIN1, GPIO.OUT)
GPIO.setup(GATE_LED_PIN2, GPIO.OUT)
GPIO.output(GATE_LED_PIN1, GPIO.HIGH)
GPIO.output(GATE_LED_PIN2, GPIO.HIGH)
data1 = (c_uint * 288)()
data2 = (c_uint * 288)()
meas = 1
black = 1
fnameindex = 0
spi = SPI.SpiDev(SPI_PORT, SPI_CH, max_speed_hz=SPI_SPEED)
disp = TFT.ST7735(dc=AOPIN, rst=RSTPIN, spi=spi, width=128, height=128)
disp.begin()
disp.clear()
img = Image.new('RGB', TFT_SIZE, COLOR_WHITE)
draw = ImageDraw.Draw(img)
font = '/usr/share/fonts/truetype/dejavu/DejaVuSansMono.ttf'
fontout = ImageFont.truetype(font, 11)
draw.text((0, LINE1Y), ' Mode: Measure', font=fontout, fill=COLOR_BLUE)
draw.text((0, LINE2Y), ' Bilirubin', font=fontout, fill=COLOR_BLUE)
draw.text((0, LINE4Y), ' SiO2', font=fontout, fill=COLOR_BLUE)
disp.display(img)
led1_current = int(sys.argv[1])
led2_current = int(sys.argv[2])
led_stable_time = float(sys.argv[3])
int_time1 = int(sys.argv[4])
int_time2 = int(sys.argv[5])
if led1_current < LED_CURR_MIN:
led1_current = LED_CURR_MIN
elif led1_current > LED_CURR_MAX:
led1_current = LED_CURR_MAX
if led2_current < LED_CURR_MIN:
led2_current = LED_CURR_MIN
elif led2_current > LED_CURR_MAX:
led2_current = LED_CURR_MAX
print('led1_current = ' + str(led1_current))
print('led2_current = ' + str(led2_current))
led1_duty = (led1_current - LED_CURR_MIN) * LED_DUTY_CONST
led2_duty = (led2_current - LED_CURR_MIN) * LED_DUTY_CONST
print('led1_duty = ' + str(led1_duty))
print('led2_duty = ' + str(led2_duty))
pi = pigpio.pi()
while 1:
while meas and black:
if GPIO.input(pin_meas) == GPIO.LOW:
meas = 0
print('meas low')
if GPIO.input(pin_black) == GPIO.LOW:
black = 0
print('black low')
GPIO.output(pin_led, GPIO.HIGH)
pi.hardware_PWM(PWM_LED_PIN1, PWM_FREQ, int(led1_duty))
pi.hardware_PWM(PWM_LED_PIN2, PWM_FREQ, int(led2_duty))
if led1_duty > 0:
GPIO.output(GATE_LED_PIN1, GPIO.LOW)
if led2_duty > 0:
GPIO.output(GATE_LED_PIN2, GPIO.LOW)
time.sleep(led_stable_time)
if black == 0:
fname = 'dual_black.txt'
else:
fname = 'dual_desktop_' + str(fnameindex) + '.txt'
fname = HOME_DIR + fname
C12880.Read2Spectrometer(int_time1, int_time2, data1, data2)
draw.rectangle((0, LINE3Y, 128, LINE3Y + SPACE2), COLOR_WHITE)
draw.rectangle((0, LINE5Y, 128, LINE5Y + SPACE2), COLOR_WHITE)
draw.rectangle((0, LINE6Y, 128, LINE6Y + SPACE1), COLOR_WHITE)
fontout = ImageFont.truetype(font, 16)
draw.text((0, LINE3Y), ' 12.1 mg/dL', font=fontout, fill=COLOR_RED)
draw.text((0, LINE5Y), ' 66%', font=fontout, fill=COLOR_RED)
fontout = ImageFont.truetype(font, 10)
draw.text((0, LINE6Y), str(datetime.datetime.now()), font=fontout,
fill=COLOR_BLUE)
disp.display(img)
fp = open(fname, 'w+')
for i in range(0, 288):
fp.write(str(data1[i]) + ', ' + str(data2[i]) + ', \n')
fp.close()
if meas == 0:
fnameindex = fnameindex + 1
pi.hardware_PWM(PWM_LED_PIN1, PWM_FREQ, 0)
pi.hardware_PWM(PWM_LED_PIN2, PWM_FREQ, 0)
GPIO.output(GATE_LED_PIN1, GPIO.HIGH)
GPIO.output(GATE_LED_PIN2, GPIO.HIGH)
meas = 1
black = 1
GPIO.output(pin_led, GPIO.LOW)
print('done')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
pin_meas = 24
pin_black = 25
pin_led = 26
HOME_DIR = '/home/pi/QSS003_python/'
C12880_LIB = HOME_DIR + 'Dual_C12880.so'
GATE_LED_PIN1 = 4
GATE_LED_PIN2 = 22
PWM_LED_PIN1 = 18
PWM_LED_PIN2 = 13
PWM_FREQ = 500
DUTY_MIN = 0
DUTY_MAX = 900000
LED_CURR_MIN = 60
LED_CURR_MAX = 330
LED_DUTY_CONST = 10000 / 3
AOPIN = 23
RSTPIN = 12
SPI_PORT = 1
SPI_CH = 0
SPI_SPEED = 4000000
COLOR_RED = 255, 0, 0
COLOR_GREEN = 0, 255, 0
COLOR_BLUE = 0, 0, 255
COLOR_WHITE = 255, 255, 255
COLOR_BLACK = 0, 0, 0
COLOR_YELLOW = 255, 255, 0
COLOR_PURPLE = 255, 0, 255
COLOR_CYAN = 0, 255, 255
TFT_SIZE = 128, 128
LINE1Y = 15
LINE2Y = 30
LINE3Y = 45
LINE4Y = 65
LINE5Y = 80
LINE6Y = 100
SPACE1 = 15
SPACE2 = 20
time.sleep(1)
C12880 = cdll.LoadLibrary(C12880_LIB)
if len(sys.argv) < 6:
error_str = str(sys.argv[0]
) + ' led1_current led2_current led_stable_time int_time1 int_time2'
print(error_str)
else:
C12880.Setup()
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(pin_meas, GPIO.IN)
GPIO.setup(pin_black, GPIO.IN)
GPIO.setup(pin_led, GPIO.OUT)
GPIO.output(pin_led, GPIO.LOW)
GPIO.setup(GATE_LED_PIN1, GPIO.OUT)
GPIO.setup(GATE_LED_PIN2, GPIO.OUT)
GPIO.output(GATE_LED_PIN1, GPIO.HIGH)
GPIO.output(GATE_LED_PIN2, GPIO.HIGH)
data1 = (c_uint * 288)()
data2 = (c_uint * 288)()
meas = 1
black = 1
fnameindex = 0
spi = SPI.SpiDev(SPI_PORT, SPI_CH, max_speed_hz=SPI_SPEED)
disp = TFT.ST7735(dc=AOPIN, rst=RSTPIN, spi=spi, width=128, height=128)
disp.begin()
disp.clear()
img = Image.new('RGB', TFT_SIZE, COLOR_WHITE)
draw = ImageDraw.Draw(img)
font = '/usr/share/fonts/truetype/dejavu/DejaVuSansMono.ttf'
fontout = ImageFont.truetype(font, 11)
draw.text((0, LINE1Y), ' Mode: Measure', font=fontout, fill=COLOR_BLUE)
draw.text((0, LINE2Y), ' Bilirubin', font=fontout, fill=COLOR_BLUE)
draw.text((0, LINE4Y), ' SiO2', font=fontout, fill=COLOR_BLUE)
disp.display(img)
led1_current = int(sys.argv[1])
led2_current = int(sys.argv[2])
led_stable_time = float(sys.argv[3])
int_time1 = int(sys.argv[4])
int_time2 = int(sys.argv[5])
if led1_current < LED_CURR_MIN:
led1_current = LED_CURR_MIN
elif led1_current > LED_CURR_MAX:
led1_current = LED_CURR_MAX
if led2_current < LED_CURR_MIN:
led2_current = LED_CURR_MIN
elif led2_current > LED_CURR_MAX:
led2_current = LED_CURR_MAX
print('led1_current = ' + str(led1_current))
print('led2_current = ' + str(led2_current))
led1_duty = (led1_current - LED_CURR_MIN) * LED_DUTY_CONST
led2_duty = (led2_current - LED_CURR_MIN) * LED_DUTY_CONST
print('led1_duty = ' + str(led1_duty))
print('led2_duty = ' + str(led2_duty))
pi = pigpio.pi()
while 1:
while meas and black:
if GPIO.input(pin_meas) == GPIO.LOW:
meas = 0
print('meas low')
if GPIO.input(pin_black) == GPIO.LOW:
black = 0
print('black low')
GPIO.output(pin_led, GPIO.HIGH)
pi.hardware_PWM(PWM_LED_PIN1, PWM_FREQ, int(led1_duty))
pi.hardware_PWM(PWM_LED_PIN2, PWM_FREQ, int(led2_duty))
if led1_duty > 0:
GPIO.output(GATE_LED_PIN1, GPIO.LOW)
if led2_duty > 0:
GPIO.output(GATE_LED_PIN2, GPIO.LOW)
time.sleep(led_stable_time)
if black == 0:
fname = 'dual_black.txt'
else:
fname = 'dual_desktop_' + str(fnameindex) + '.txt'
fname = HOME_DIR + fname
C12880.Read2Spectrometer(int_time1, int_time2, data1, data2)
draw.rectangle((0, LINE3Y, 128, LINE3Y + SPACE2), COLOR_WHITE)
draw.rectangle((0, LINE5Y, 128, LINE5Y + SPACE2), COLOR_WHITE)
draw.rectangle((0, LINE6Y, 128, LINE6Y + SPACE1), COLOR_WHITE)
fontout = ImageFont.truetype(font, 16)
draw.text((0, LINE3Y), ' 12.1 mg/dL', font=fontout, fill=COLOR_RED)
draw.text((0, LINE5Y), ' 66%', font=fontout, fill=COLOR_RED)
fontout = ImageFont.truetype(font, 10)
draw.text((0, LINE6Y), str(datetime.datetime.now()), font=fontout,
fill=COLOR_BLUE)
disp.display(img)
fp = open(fname, 'w+')
for i in range(0, 288):
fp.write(str(data1[i]) + ', ' + str(data2[i]) + ', \n')
fp.close()
if meas == 0:
fnameindex = fnameindex + 1
pi.hardware_PWM(PWM_LED_PIN1, PWM_FREQ, 0)
pi.hardware_PWM(PWM_LED_PIN2, PWM_FREQ, 0)
GPIO.output(GATE_LED_PIN1, GPIO.HIGH)
GPIO.output(GATE_LED_PIN2, GPIO.HIGH)
meas = 1
black = 1
GPIO.output(pin_led, GPIO.LOW)
print('done')
<|reserved_special_token_1|>
from ctypes import *
import os
import sys
import time
import datetime
import subprocess
import RPi.GPIO as GPIO
from PIL import Image
from PIL import ImageDraw
from PIL import ImageFont
import Adafruit_GPIO.SPI as SPI
import ST7735 as TFT
import pigpio
pin_meas = 24
pin_black = 25
pin_led = 26
HOME_DIR = '/home/pi/QSS003_python/'
C12880_LIB = HOME_DIR + 'Dual_C12880.so'
GATE_LED_PIN1 = 4
GATE_LED_PIN2 = 22
PWM_LED_PIN1 = 18
PWM_LED_PIN2 = 13
PWM_FREQ = 500
DUTY_MIN = 0
DUTY_MAX = 900000
LED_CURR_MIN = 60
LED_CURR_MAX = 330
LED_DUTY_CONST = 10000 / 3
AOPIN = 23
RSTPIN = 12
SPI_PORT = 1
SPI_CH = 0
SPI_SPEED = 4000000
COLOR_RED = 255, 0, 0
COLOR_GREEN = 0, 255, 0
COLOR_BLUE = 0, 0, 255
COLOR_WHITE = 255, 255, 255
COLOR_BLACK = 0, 0, 0
COLOR_YELLOW = 255, 255, 0
COLOR_PURPLE = 255, 0, 255
COLOR_CYAN = 0, 255, 255
TFT_SIZE = 128, 128
LINE1Y = 15
LINE2Y = 30
LINE3Y = 45
LINE4Y = 65
LINE5Y = 80
LINE6Y = 100
SPACE1 = 15
SPACE2 = 20
time.sleep(1)
C12880 = cdll.LoadLibrary(C12880_LIB)
if len(sys.argv) < 6:
error_str = str(sys.argv[0]
) + ' led1_current led2_current led_stable_time int_time1 int_time2'
print(error_str)
else:
C12880.Setup()
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(pin_meas, GPIO.IN)
GPIO.setup(pin_black, GPIO.IN)
GPIO.setup(pin_led, GPIO.OUT)
GPIO.output(pin_led, GPIO.LOW)
GPIO.setup(GATE_LED_PIN1, GPIO.OUT)
GPIO.setup(GATE_LED_PIN2, GPIO.OUT)
GPIO.output(GATE_LED_PIN1, GPIO.HIGH)
GPIO.output(GATE_LED_PIN2, GPIO.HIGH)
data1 = (c_uint * 288)()
data2 = (c_uint * 288)()
meas = 1
black = 1
fnameindex = 0
spi = SPI.SpiDev(SPI_PORT, SPI_CH, max_speed_hz=SPI_SPEED)
disp = TFT.ST7735(dc=AOPIN, rst=RSTPIN, spi=spi, width=128, height=128)
disp.begin()
disp.clear()
img = Image.new('RGB', TFT_SIZE, COLOR_WHITE)
draw = ImageDraw.Draw(img)
font = '/usr/share/fonts/truetype/dejavu/DejaVuSansMono.ttf'
fontout = ImageFont.truetype(font, 11)
draw.text((0, LINE1Y), ' Mode: Measure', font=fontout, fill=COLOR_BLUE)
draw.text((0, LINE2Y), ' Bilirubin', font=fontout, fill=COLOR_BLUE)
draw.text((0, LINE4Y), ' SiO2', font=fontout, fill=COLOR_BLUE)
disp.display(img)
led1_current = int(sys.argv[1])
led2_current = int(sys.argv[2])
led_stable_time = float(sys.argv[3])
int_time1 = int(sys.argv[4])
int_time2 = int(sys.argv[5])
if led1_current < LED_CURR_MIN:
led1_current = LED_CURR_MIN
elif led1_current > LED_CURR_MAX:
led1_current = LED_CURR_MAX
if led2_current < LED_CURR_MIN:
led2_current = LED_CURR_MIN
elif led2_current > LED_CURR_MAX:
led2_current = LED_CURR_MAX
print('led1_current = ' + str(led1_current))
print('led2_current = ' + str(led2_current))
led1_duty = (led1_current - LED_CURR_MIN) * LED_DUTY_CONST
led2_duty = (led2_current - LED_CURR_MIN) * LED_DUTY_CONST
print('led1_duty = ' + str(led1_duty))
print('led2_duty = ' + str(led2_duty))
pi = pigpio.pi()
while 1:
while meas and black:
if GPIO.input(pin_meas) == GPIO.LOW:
meas = 0
print('meas low')
if GPIO.input(pin_black) == GPIO.LOW:
black = 0
print('black low')
GPIO.output(pin_led, GPIO.HIGH)
pi.hardware_PWM(PWM_LED_PIN1, PWM_FREQ, int(led1_duty))
pi.hardware_PWM(PWM_LED_PIN2, PWM_FREQ, int(led2_duty))
if led1_duty > 0:
GPIO.output(GATE_LED_PIN1, GPIO.LOW)
if led2_duty > 0:
GPIO.output(GATE_LED_PIN2, GPIO.LOW)
time.sleep(led_stable_time)
if black == 0:
fname = 'dual_black.txt'
else:
fname = 'dual_desktop_' + str(fnameindex) + '.txt'
fname = HOME_DIR + fname
C12880.Read2Spectrometer(int_time1, int_time2, data1, data2)
draw.rectangle((0, LINE3Y, 128, LINE3Y + SPACE2), COLOR_WHITE)
draw.rectangle((0, LINE5Y, 128, LINE5Y + SPACE2), COLOR_WHITE)
draw.rectangle((0, LINE6Y, 128, LINE6Y + SPACE1), COLOR_WHITE)
fontout = ImageFont.truetype(font, 16)
draw.text((0, LINE3Y), ' 12.1 mg/dL', font=fontout, fill=COLOR_RED)
draw.text((0, LINE5Y), ' 66%', font=fontout, fill=COLOR_RED)
fontout = ImageFont.truetype(font, 10)
draw.text((0, LINE6Y), str(datetime.datetime.now()), font=fontout,
fill=COLOR_BLUE)
disp.display(img)
fp = open(fname, 'w+')
for i in range(0, 288):
fp.write(str(data1[i]) + ', ' + str(data2[i]) + ', \n')
fp.close()
if meas == 0:
fnameindex = fnameindex + 1
pi.hardware_PWM(PWM_LED_PIN1, PWM_FREQ, 0)
pi.hardware_PWM(PWM_LED_PIN2, PWM_FREQ, 0)
GPIO.output(GATE_LED_PIN1, GPIO.HIGH)
GPIO.output(GATE_LED_PIN2, GPIO.HIGH)
meas = 1
black = 1
GPIO.output(pin_led, GPIO.LOW)
print('done')
<|reserved_special_token_1|>
from ctypes import *
import os
import sys
import time
import datetime
import subprocess
import RPi.GPIO as GPIO
from PIL import Image
from PIL import ImageDraw
from PIL import ImageFont
#import Adafruit_GPIO as GPIO
import Adafruit_GPIO.SPI as SPI
import ST7735 as TFT
import pigpio
# use BCM pin define
pin_meas = 24 # 18 in BOARD
pin_black = 25 # 22 in BOARD
pin_led = 26 # 37 in BOARD
HOME_DIR = "/home/pi/QSS003_python/"
C12880_LIB = HOME_DIR + "Dual_C12880.so"
# use BCM pin define
GATE_LED_PIN1 = 4 # 7 in BOARD
GATE_LED_PIN2 = 22 # 15 in BOARD
PWM_LED_PIN1 = 18 # in pigpio
PWM_LED_PIN2 = 13 # in pigpio
PWM_FREQ = 500
DUTY_MIN = 0
DUTY_MAX = 900000 # original = 1000000
LED_CURR_MIN = 60 #mA
LED_CURR_MAX = 330 #mA
LED_DUTY_CONST = 10000/3
# use BCM pin define
AOPIN = 23 # 16 in BOARD
RSTPIN = 12 # 32 in BOARD
SPI_PORT = 1
SPI_CH = 0
SPI_SPEED = 4000000
COLOR_RED = (255,0,0)
COLOR_GREEN = (0,255,0)
COLOR_BLUE = (0,0,255)
COLOR_WHITE = (255,255,255)
COLOR_BLACK = (0,0,0)
COLOR_YELLOW = (255,255,0)
COLOR_PURPLE = (255,0, 255)
COLOR_CYAN = (0, 255,255)
TFT_SIZE = (128, 128)
LINE1Y = 15
LINE2Y = 30
LINE3Y = 45
LINE4Y = 65
LINE5Y = 80
LINE6Y = 100
SPACE1 = 15
SPACE2 = 20
time.sleep(1)
C12880 = cdll.LoadLibrary(C12880_LIB)
if len(sys.argv) < 6:
error_str = str(sys.argv[0]) + " led1_current led2_current led_stable_time int_time1 int_time2"
print(error_str)
else:
# board initialization
C12880.Setup() # init spectrometer
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(pin_meas, GPIO.IN)
GPIO.setup(pin_black, GPIO.IN)
GPIO.setup(pin_led, GPIO.OUT)
GPIO.output(pin_led, GPIO.LOW)
GPIO.setup(GATE_LED_PIN1, GPIO.OUT)
GPIO.setup(GATE_LED_PIN2, GPIO.OUT)
GPIO.output(GATE_LED_PIN1, GPIO.HIGH) #close
GPIO.output(GATE_LED_PIN2, GPIO.HIGH) #close
data1 = (c_uint * 288)() # data to store spectrum data
data2 = (c_uint * 288)()
meas = 1
black = 1
fnameindex = 0
# Display init
spi = SPI.SpiDev(SPI_PORT, SPI_CH, max_speed_hz = SPI_SPEED)
disp = TFT.ST7735(dc = AOPIN, rst = RSTPIN, spi = spi, width = 128, height = 128)
disp.begin()
disp.clear()
img = Image.new('RGB', TFT_SIZE, COLOR_WHITE)
draw = ImageDraw.Draw(img)
font = "/usr/share/fonts/truetype/dejavu/DejaVuSansMono.ttf"
fontout = ImageFont.truetype(font,11)
draw.text((0,LINE1Y), " Mode: Measure", font = fontout, fill = COLOR_BLUE)
draw.text((0,LINE2Y), " Bilirubin", font = fontout, fill = COLOR_BLUE)
draw.text((0,LINE4Y), " SiO2", font = fontout, fill = COLOR_BLUE)
disp.display(img)
led1_current = int(sys.argv[1])
led2_current = int(sys.argv[2])
led_stable_time = float(sys.argv[3])
int_time1 = int(sys.argv[4])
int_time2 = int(sys.argv[5])
if (led1_current < LED_CURR_MIN):
led1_current = LED_CURR_MIN
elif (led1_current > LED_CURR_MAX):
led1_current = LED_CURR_MAX
if (led2_current < LED_CURR_MIN):
led2_current = LED_CURR_MIN
elif (led2_current > LED_CURR_MAX):
led2_current = LED_CURR_MAX
print("led1_current = "+ str(led1_current))
print("led2_current = "+ str(led2_current))
led1_duty = (led1_current - LED_CURR_MIN)*LED_DUTY_CONST
led2_duty = (led2_current - LED_CURR_MIN)*LED_DUTY_CONST
print("led1_duty = "+ str(led1_duty))
print("led2_duty = "+ str(led2_duty))
pi = pigpio.pi()
while (1):
#wait until black or meas buttom is pressed
while (meas and black):
if GPIO.input(pin_meas) == GPIO.LOW:
meas = 0
print("meas low")
if GPIO.input(pin_black) == GPIO.LOW:
black = 0
print("black low")
GPIO.output(pin_led, GPIO.HIGH)
pi.hardware_PWM(PWM_LED_PIN1, PWM_FREQ, int(led1_duty))
pi.hardware_PWM(PWM_LED_PIN2, PWM_FREQ, int(led2_duty))
if (led1_duty > 0):
GPIO.output(GATE_LED_PIN1, GPIO.LOW) # open
if (led2_duty > 0):
GPIO.output(GATE_LED_PIN2, GPIO.LOW) # open
time.sleep(led_stable_time)
if (black == 0):
fname = "dual_black.txt"
else:
fname = "dual_desktop_" + str(fnameindex) + ".txt"
fname = HOME_DIR + fname
#C12880.ReadSpectrometer(int_time, data)
C12880.Read2Spectrometer(int_time1, int_time2, data1, data2)
# print the data on tft screen
draw.rectangle((0, LINE3Y, 128, LINE3Y+SPACE2), COLOR_WHITE)
draw.rectangle((0, LINE5Y, 128, LINE5Y+SPACE2), COLOR_WHITE)
draw.rectangle((0, LINE6Y, 128, LINE6Y+SPACE1), COLOR_WHITE)
fontout = ImageFont.truetype(font,16)
draw.text((0,LINE3Y)," 12.1 mg/dL", font = fontout, fill = COLOR_RED)
draw.text((0,LINE5Y)," 66%", font = fontout, fill = COLOR_RED)
fontout = ImageFont.truetype(font,10)
draw.text((0,LINE6Y),str(datetime.datetime.now()), font = fontout, fill = COLOR_BLUE)
disp.display(img)
#out = [str(line) + '\n' for line in data]
fp = open(fname, "w+")
#print(out)
#fp.writelines(out)
for i in range(0,288):
fp.write(str(data1[i]) + ", " + str(data2[i]) + ", \n")
fp.close()
if (meas == 0):
fnameindex = fnameindex + 1
pi.hardware_PWM(PWM_LED_PIN1, PWM_FREQ, 0)
pi.hardware_PWM(PWM_LED_PIN2, PWM_FREQ, 0)
GPIO.output(GATE_LED_PIN1, GPIO.HIGH) # close
GPIO.output(GATE_LED_PIN2, GPIO.HIGH) # close
# time.sleep(led_stable_time) # for LED test
meas = 1
black = 1
GPIO.output(pin_led, GPIO.LOW) #turn off measure LED
print("done")
|
flexible
|
{
"blob_id": "d250cc0aafdd48cb0eb56108d9c7148153cde002",
"index": 6840,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ntime.sleep(1)\n<mask token>\nif len(sys.argv) < 6:\n error_str = str(sys.argv[0]\n ) + ' led1_current led2_current led_stable_time int_time1 int_time2'\n print(error_str)\nelse:\n C12880.Setup()\n GPIO.setmode(GPIO.BCM)\n GPIO.setwarnings(False)\n GPIO.setup(pin_meas, GPIO.IN)\n GPIO.setup(pin_black, GPIO.IN)\n GPIO.setup(pin_led, GPIO.OUT)\n GPIO.output(pin_led, GPIO.LOW)\n GPIO.setup(GATE_LED_PIN1, GPIO.OUT)\n GPIO.setup(GATE_LED_PIN2, GPIO.OUT)\n GPIO.output(GATE_LED_PIN1, GPIO.HIGH)\n GPIO.output(GATE_LED_PIN2, GPIO.HIGH)\n data1 = (c_uint * 288)()\n data2 = (c_uint * 288)()\n meas = 1\n black = 1\n fnameindex = 0\n spi = SPI.SpiDev(SPI_PORT, SPI_CH, max_speed_hz=SPI_SPEED)\n disp = TFT.ST7735(dc=AOPIN, rst=RSTPIN, spi=spi, width=128, height=128)\n disp.begin()\n disp.clear()\n img = Image.new('RGB', TFT_SIZE, COLOR_WHITE)\n draw = ImageDraw.Draw(img)\n font = '/usr/share/fonts/truetype/dejavu/DejaVuSansMono.ttf'\n fontout = ImageFont.truetype(font, 11)\n draw.text((0, LINE1Y), ' Mode: Measure', font=fontout, fill=COLOR_BLUE)\n draw.text((0, LINE2Y), ' Bilirubin', font=fontout, fill=COLOR_BLUE)\n draw.text((0, LINE4Y), ' SiO2', font=fontout, fill=COLOR_BLUE)\n disp.display(img)\n led1_current = int(sys.argv[1])\n led2_current = int(sys.argv[2])\n led_stable_time = float(sys.argv[3])\n int_time1 = int(sys.argv[4])\n int_time2 = int(sys.argv[5])\n if led1_current < LED_CURR_MIN:\n led1_current = LED_CURR_MIN\n elif led1_current > LED_CURR_MAX:\n led1_current = LED_CURR_MAX\n if led2_current < LED_CURR_MIN:\n led2_current = LED_CURR_MIN\n elif led2_current > LED_CURR_MAX:\n led2_current = LED_CURR_MAX\n print('led1_current = ' + str(led1_current))\n print('led2_current = ' + str(led2_current))\n led1_duty = (led1_current - LED_CURR_MIN) * LED_DUTY_CONST\n led2_duty = (led2_current - LED_CURR_MIN) * LED_DUTY_CONST\n print('led1_duty = ' + str(led1_duty))\n print('led2_duty = ' + str(led2_duty))\n pi = pigpio.pi()\n while 1:\n while meas and black:\n if GPIO.input(pin_meas) == GPIO.LOW:\n meas = 0\n print('meas low')\n if GPIO.input(pin_black) == GPIO.LOW:\n black = 0\n print('black low')\n GPIO.output(pin_led, GPIO.HIGH)\n pi.hardware_PWM(PWM_LED_PIN1, PWM_FREQ, int(led1_duty))\n pi.hardware_PWM(PWM_LED_PIN2, PWM_FREQ, int(led2_duty))\n if led1_duty > 0:\n GPIO.output(GATE_LED_PIN1, GPIO.LOW)\n if led2_duty > 0:\n GPIO.output(GATE_LED_PIN2, GPIO.LOW)\n time.sleep(led_stable_time)\n if black == 0:\n fname = 'dual_black.txt'\n else:\n fname = 'dual_desktop_' + str(fnameindex) + '.txt'\n fname = HOME_DIR + fname\n C12880.Read2Spectrometer(int_time1, int_time2, data1, data2)\n draw.rectangle((0, LINE3Y, 128, LINE3Y + SPACE2), COLOR_WHITE)\n draw.rectangle((0, LINE5Y, 128, LINE5Y + SPACE2), COLOR_WHITE)\n draw.rectangle((0, LINE6Y, 128, LINE6Y + SPACE1), COLOR_WHITE)\n fontout = ImageFont.truetype(font, 16)\n draw.text((0, LINE3Y), ' 12.1 mg/dL', font=fontout, fill=COLOR_RED)\n draw.text((0, LINE5Y), ' 66%', font=fontout, fill=COLOR_RED)\n fontout = ImageFont.truetype(font, 10)\n draw.text((0, LINE6Y), str(datetime.datetime.now()), font=fontout,\n fill=COLOR_BLUE)\n disp.display(img)\n fp = open(fname, 'w+')\n for i in range(0, 288):\n fp.write(str(data1[i]) + ', ' + str(data2[i]) + ', \\n')\n fp.close()\n if meas == 0:\n fnameindex = fnameindex + 1\n pi.hardware_PWM(PWM_LED_PIN1, PWM_FREQ, 0)\n pi.hardware_PWM(PWM_LED_PIN2, PWM_FREQ, 0)\n GPIO.output(GATE_LED_PIN1, GPIO.HIGH)\n GPIO.output(GATE_LED_PIN2, GPIO.HIGH)\n meas = 1\n black = 1\n GPIO.output(pin_led, GPIO.LOW)\n print('done')\n",
"step-3": "<mask token>\npin_meas = 24\npin_black = 25\npin_led = 26\nHOME_DIR = '/home/pi/QSS003_python/'\nC12880_LIB = HOME_DIR + 'Dual_C12880.so'\nGATE_LED_PIN1 = 4\nGATE_LED_PIN2 = 22\nPWM_LED_PIN1 = 18\nPWM_LED_PIN2 = 13\nPWM_FREQ = 500\nDUTY_MIN = 0\nDUTY_MAX = 900000\nLED_CURR_MIN = 60\nLED_CURR_MAX = 330\nLED_DUTY_CONST = 10000 / 3\nAOPIN = 23\nRSTPIN = 12\nSPI_PORT = 1\nSPI_CH = 0\nSPI_SPEED = 4000000\nCOLOR_RED = 255, 0, 0\nCOLOR_GREEN = 0, 255, 0\nCOLOR_BLUE = 0, 0, 255\nCOLOR_WHITE = 255, 255, 255\nCOLOR_BLACK = 0, 0, 0\nCOLOR_YELLOW = 255, 255, 0\nCOLOR_PURPLE = 255, 0, 255\nCOLOR_CYAN = 0, 255, 255\nTFT_SIZE = 128, 128\nLINE1Y = 15\nLINE2Y = 30\nLINE3Y = 45\nLINE4Y = 65\nLINE5Y = 80\nLINE6Y = 100\nSPACE1 = 15\nSPACE2 = 20\ntime.sleep(1)\nC12880 = cdll.LoadLibrary(C12880_LIB)\nif len(sys.argv) < 6:\n error_str = str(sys.argv[0]\n ) + ' led1_current led2_current led_stable_time int_time1 int_time2'\n print(error_str)\nelse:\n C12880.Setup()\n GPIO.setmode(GPIO.BCM)\n GPIO.setwarnings(False)\n GPIO.setup(pin_meas, GPIO.IN)\n GPIO.setup(pin_black, GPIO.IN)\n GPIO.setup(pin_led, GPIO.OUT)\n GPIO.output(pin_led, GPIO.LOW)\n GPIO.setup(GATE_LED_PIN1, GPIO.OUT)\n GPIO.setup(GATE_LED_PIN2, GPIO.OUT)\n GPIO.output(GATE_LED_PIN1, GPIO.HIGH)\n GPIO.output(GATE_LED_PIN2, GPIO.HIGH)\n data1 = (c_uint * 288)()\n data2 = (c_uint * 288)()\n meas = 1\n black = 1\n fnameindex = 0\n spi = SPI.SpiDev(SPI_PORT, SPI_CH, max_speed_hz=SPI_SPEED)\n disp = TFT.ST7735(dc=AOPIN, rst=RSTPIN, spi=spi, width=128, height=128)\n disp.begin()\n disp.clear()\n img = Image.new('RGB', TFT_SIZE, COLOR_WHITE)\n draw = ImageDraw.Draw(img)\n font = '/usr/share/fonts/truetype/dejavu/DejaVuSansMono.ttf'\n fontout = ImageFont.truetype(font, 11)\n draw.text((0, LINE1Y), ' Mode: Measure', font=fontout, fill=COLOR_BLUE)\n draw.text((0, LINE2Y), ' Bilirubin', font=fontout, fill=COLOR_BLUE)\n draw.text((0, LINE4Y), ' SiO2', font=fontout, fill=COLOR_BLUE)\n disp.display(img)\n led1_current = int(sys.argv[1])\n led2_current = int(sys.argv[2])\n led_stable_time = float(sys.argv[3])\n int_time1 = int(sys.argv[4])\n int_time2 = int(sys.argv[5])\n if led1_current < LED_CURR_MIN:\n led1_current = LED_CURR_MIN\n elif led1_current > LED_CURR_MAX:\n led1_current = LED_CURR_MAX\n if led2_current < LED_CURR_MIN:\n led2_current = LED_CURR_MIN\n elif led2_current > LED_CURR_MAX:\n led2_current = LED_CURR_MAX\n print('led1_current = ' + str(led1_current))\n print('led2_current = ' + str(led2_current))\n led1_duty = (led1_current - LED_CURR_MIN) * LED_DUTY_CONST\n led2_duty = (led2_current - LED_CURR_MIN) * LED_DUTY_CONST\n print('led1_duty = ' + str(led1_duty))\n print('led2_duty = ' + str(led2_duty))\n pi = pigpio.pi()\n while 1:\n while meas and black:\n if GPIO.input(pin_meas) == GPIO.LOW:\n meas = 0\n print('meas low')\n if GPIO.input(pin_black) == GPIO.LOW:\n black = 0\n print('black low')\n GPIO.output(pin_led, GPIO.HIGH)\n pi.hardware_PWM(PWM_LED_PIN1, PWM_FREQ, int(led1_duty))\n pi.hardware_PWM(PWM_LED_PIN2, PWM_FREQ, int(led2_duty))\n if led1_duty > 0:\n GPIO.output(GATE_LED_PIN1, GPIO.LOW)\n if led2_duty > 0:\n GPIO.output(GATE_LED_PIN2, GPIO.LOW)\n time.sleep(led_stable_time)\n if black == 0:\n fname = 'dual_black.txt'\n else:\n fname = 'dual_desktop_' + str(fnameindex) + '.txt'\n fname = HOME_DIR + fname\n C12880.Read2Spectrometer(int_time1, int_time2, data1, data2)\n draw.rectangle((0, LINE3Y, 128, LINE3Y + SPACE2), COLOR_WHITE)\n draw.rectangle((0, LINE5Y, 128, LINE5Y + SPACE2), COLOR_WHITE)\n draw.rectangle((0, LINE6Y, 128, LINE6Y + SPACE1), COLOR_WHITE)\n fontout = ImageFont.truetype(font, 16)\n draw.text((0, LINE3Y), ' 12.1 mg/dL', font=fontout, fill=COLOR_RED)\n draw.text((0, LINE5Y), ' 66%', font=fontout, fill=COLOR_RED)\n fontout = ImageFont.truetype(font, 10)\n draw.text((0, LINE6Y), str(datetime.datetime.now()), font=fontout,\n fill=COLOR_BLUE)\n disp.display(img)\n fp = open(fname, 'w+')\n for i in range(0, 288):\n fp.write(str(data1[i]) + ', ' + str(data2[i]) + ', \\n')\n fp.close()\n if meas == 0:\n fnameindex = fnameindex + 1\n pi.hardware_PWM(PWM_LED_PIN1, PWM_FREQ, 0)\n pi.hardware_PWM(PWM_LED_PIN2, PWM_FREQ, 0)\n GPIO.output(GATE_LED_PIN1, GPIO.HIGH)\n GPIO.output(GATE_LED_PIN2, GPIO.HIGH)\n meas = 1\n black = 1\n GPIO.output(pin_led, GPIO.LOW)\n print('done')\n",
"step-4": "from ctypes import *\nimport os\nimport sys\nimport time\nimport datetime\nimport subprocess\nimport RPi.GPIO as GPIO\nfrom PIL import Image\nfrom PIL import ImageDraw\nfrom PIL import ImageFont\nimport Adafruit_GPIO.SPI as SPI\nimport ST7735 as TFT\nimport pigpio\npin_meas = 24\npin_black = 25\npin_led = 26\nHOME_DIR = '/home/pi/QSS003_python/'\nC12880_LIB = HOME_DIR + 'Dual_C12880.so'\nGATE_LED_PIN1 = 4\nGATE_LED_PIN2 = 22\nPWM_LED_PIN1 = 18\nPWM_LED_PIN2 = 13\nPWM_FREQ = 500\nDUTY_MIN = 0\nDUTY_MAX = 900000\nLED_CURR_MIN = 60\nLED_CURR_MAX = 330\nLED_DUTY_CONST = 10000 / 3\nAOPIN = 23\nRSTPIN = 12\nSPI_PORT = 1\nSPI_CH = 0\nSPI_SPEED = 4000000\nCOLOR_RED = 255, 0, 0\nCOLOR_GREEN = 0, 255, 0\nCOLOR_BLUE = 0, 0, 255\nCOLOR_WHITE = 255, 255, 255\nCOLOR_BLACK = 0, 0, 0\nCOLOR_YELLOW = 255, 255, 0\nCOLOR_PURPLE = 255, 0, 255\nCOLOR_CYAN = 0, 255, 255\nTFT_SIZE = 128, 128\nLINE1Y = 15\nLINE2Y = 30\nLINE3Y = 45\nLINE4Y = 65\nLINE5Y = 80\nLINE6Y = 100\nSPACE1 = 15\nSPACE2 = 20\ntime.sleep(1)\nC12880 = cdll.LoadLibrary(C12880_LIB)\nif len(sys.argv) < 6:\n error_str = str(sys.argv[0]\n ) + ' led1_current led2_current led_stable_time int_time1 int_time2'\n print(error_str)\nelse:\n C12880.Setup()\n GPIO.setmode(GPIO.BCM)\n GPIO.setwarnings(False)\n GPIO.setup(pin_meas, GPIO.IN)\n GPIO.setup(pin_black, GPIO.IN)\n GPIO.setup(pin_led, GPIO.OUT)\n GPIO.output(pin_led, GPIO.LOW)\n GPIO.setup(GATE_LED_PIN1, GPIO.OUT)\n GPIO.setup(GATE_LED_PIN2, GPIO.OUT)\n GPIO.output(GATE_LED_PIN1, GPIO.HIGH)\n GPIO.output(GATE_LED_PIN2, GPIO.HIGH)\n data1 = (c_uint * 288)()\n data2 = (c_uint * 288)()\n meas = 1\n black = 1\n fnameindex = 0\n spi = SPI.SpiDev(SPI_PORT, SPI_CH, max_speed_hz=SPI_SPEED)\n disp = TFT.ST7735(dc=AOPIN, rst=RSTPIN, spi=spi, width=128, height=128)\n disp.begin()\n disp.clear()\n img = Image.new('RGB', TFT_SIZE, COLOR_WHITE)\n draw = ImageDraw.Draw(img)\n font = '/usr/share/fonts/truetype/dejavu/DejaVuSansMono.ttf'\n fontout = ImageFont.truetype(font, 11)\n draw.text((0, LINE1Y), ' Mode: Measure', font=fontout, fill=COLOR_BLUE)\n draw.text((0, LINE2Y), ' Bilirubin', font=fontout, fill=COLOR_BLUE)\n draw.text((0, LINE4Y), ' SiO2', font=fontout, fill=COLOR_BLUE)\n disp.display(img)\n led1_current = int(sys.argv[1])\n led2_current = int(sys.argv[2])\n led_stable_time = float(sys.argv[3])\n int_time1 = int(sys.argv[4])\n int_time2 = int(sys.argv[5])\n if led1_current < LED_CURR_MIN:\n led1_current = LED_CURR_MIN\n elif led1_current > LED_CURR_MAX:\n led1_current = LED_CURR_MAX\n if led2_current < LED_CURR_MIN:\n led2_current = LED_CURR_MIN\n elif led2_current > LED_CURR_MAX:\n led2_current = LED_CURR_MAX\n print('led1_current = ' + str(led1_current))\n print('led2_current = ' + str(led2_current))\n led1_duty = (led1_current - LED_CURR_MIN) * LED_DUTY_CONST\n led2_duty = (led2_current - LED_CURR_MIN) * LED_DUTY_CONST\n print('led1_duty = ' + str(led1_duty))\n print('led2_duty = ' + str(led2_duty))\n pi = pigpio.pi()\n while 1:\n while meas and black:\n if GPIO.input(pin_meas) == GPIO.LOW:\n meas = 0\n print('meas low')\n if GPIO.input(pin_black) == GPIO.LOW:\n black = 0\n print('black low')\n GPIO.output(pin_led, GPIO.HIGH)\n pi.hardware_PWM(PWM_LED_PIN1, PWM_FREQ, int(led1_duty))\n pi.hardware_PWM(PWM_LED_PIN2, PWM_FREQ, int(led2_duty))\n if led1_duty > 0:\n GPIO.output(GATE_LED_PIN1, GPIO.LOW)\n if led2_duty > 0:\n GPIO.output(GATE_LED_PIN2, GPIO.LOW)\n time.sleep(led_stable_time)\n if black == 0:\n fname = 'dual_black.txt'\n else:\n fname = 'dual_desktop_' + str(fnameindex) + '.txt'\n fname = HOME_DIR + fname\n C12880.Read2Spectrometer(int_time1, int_time2, data1, data2)\n draw.rectangle((0, LINE3Y, 128, LINE3Y + SPACE2), COLOR_WHITE)\n draw.rectangle((0, LINE5Y, 128, LINE5Y + SPACE2), COLOR_WHITE)\n draw.rectangle((0, LINE6Y, 128, LINE6Y + SPACE1), COLOR_WHITE)\n fontout = ImageFont.truetype(font, 16)\n draw.text((0, LINE3Y), ' 12.1 mg/dL', font=fontout, fill=COLOR_RED)\n draw.text((0, LINE5Y), ' 66%', font=fontout, fill=COLOR_RED)\n fontout = ImageFont.truetype(font, 10)\n draw.text((0, LINE6Y), str(datetime.datetime.now()), font=fontout,\n fill=COLOR_BLUE)\n disp.display(img)\n fp = open(fname, 'w+')\n for i in range(0, 288):\n fp.write(str(data1[i]) + ', ' + str(data2[i]) + ', \\n')\n fp.close()\n if meas == 0:\n fnameindex = fnameindex + 1\n pi.hardware_PWM(PWM_LED_PIN1, PWM_FREQ, 0)\n pi.hardware_PWM(PWM_LED_PIN2, PWM_FREQ, 0)\n GPIO.output(GATE_LED_PIN1, GPIO.HIGH)\n GPIO.output(GATE_LED_PIN2, GPIO.HIGH)\n meas = 1\n black = 1\n GPIO.output(pin_led, GPIO.LOW)\n print('done')\n",
"step-5": "from ctypes import *\nimport os\nimport sys\nimport time\nimport datetime\nimport subprocess\nimport RPi.GPIO as GPIO\nfrom PIL import Image\nfrom PIL import ImageDraw\nfrom PIL import ImageFont\n#import Adafruit_GPIO as GPIO\nimport Adafruit_GPIO.SPI as SPI\nimport ST7735 as TFT\nimport pigpio\n\n# use BCM pin define\npin_meas = 24 \t# 18 in BOARD\npin_black = 25\t# 22 in BOARD\npin_led = 26 # 37 in BOARD\n\nHOME_DIR = \"/home/pi/QSS003_python/\"\nC12880_LIB = HOME_DIR + \"Dual_C12880.so\"\n\n# use BCM pin define\nGATE_LED_PIN1 = 4\t# 7 in BOARD\nGATE_LED_PIN2 = 22\t# 15 in BOARD\nPWM_LED_PIN1 = 18 # in pigpio\nPWM_LED_PIN2 = 13 # in pigpio\n\nPWM_FREQ = 500\nDUTY_MIN = 0\nDUTY_MAX = 900000\t# original = 1000000\nLED_CURR_MIN = 60\t#mA\nLED_CURR_MAX = 330\t#mA\nLED_DUTY_CONST = 10000/3\n\n# use BCM pin define\nAOPIN = 23\t# 16 in BOARD\nRSTPIN = 12\t# 32 in BOARD\n\nSPI_PORT = 1\nSPI_CH = 0\nSPI_SPEED = 4000000\n\nCOLOR_RED \t= (255,0,0)\nCOLOR_GREEN = (0,255,0)\nCOLOR_BLUE\t= (0,0,255)\nCOLOR_WHITE\t= (255,255,255)\nCOLOR_BLACK = (0,0,0)\nCOLOR_YELLOW = (255,255,0)\nCOLOR_PURPLE = (255,0, 255)\nCOLOR_CYAN = (0, 255,255)\nTFT_SIZE = (128, 128)\n\nLINE1Y = 15\nLINE2Y = 30\nLINE3Y = 45\nLINE4Y = 65\nLINE5Y = 80\nLINE6Y = 100\n\nSPACE1 = 15\nSPACE2 = 20\n\ntime.sleep(1)\nC12880 = cdll.LoadLibrary(C12880_LIB)\n\nif len(sys.argv) < 6:\n\terror_str = str(sys.argv[0]) + \" led1_current led2_current led_stable_time int_time1 int_time2\"\n\tprint(error_str)\nelse:\n\t# board initialization \n\tC12880.Setup() # init spectrometer\n\tGPIO.setmode(GPIO.BCM)\n\tGPIO.setwarnings(False)\n\tGPIO.setup(pin_meas, GPIO.IN)\n\tGPIO.setup(pin_black, GPIO.IN)\n\tGPIO.setup(pin_led, GPIO.OUT)\n\tGPIO.output(pin_led, GPIO.LOW)\n\tGPIO.setup(GATE_LED_PIN1, GPIO.OUT)\n\tGPIO.setup(GATE_LED_PIN2, GPIO.OUT)\n\tGPIO.output(GATE_LED_PIN1, GPIO.HIGH)\t#close\n\tGPIO.output(GATE_LED_PIN2, GPIO.HIGH)\t#close\n\n\tdata1 = (c_uint * 288)() # data to store spectrum data\n\tdata2 = (c_uint * 288)()\n\tmeas = 1\n\tblack = 1\n\tfnameindex = 0\n\n\t# Display init\n\tspi = SPI.SpiDev(SPI_PORT, SPI_CH, max_speed_hz = SPI_SPEED)\n\tdisp = TFT.ST7735(dc = AOPIN, rst = RSTPIN, spi = spi, width = 128, height = 128)\n\tdisp.begin()\n\tdisp.clear()\n\timg = Image.new('RGB', TFT_SIZE, COLOR_WHITE)\n\tdraw = ImageDraw.Draw(img)\n\tfont = \"/usr/share/fonts/truetype/dejavu/DejaVuSansMono.ttf\"\n\tfontout = ImageFont.truetype(font,11)\n\tdraw.text((0,LINE1Y), \" Mode: Measure\", font = fontout, fill = COLOR_BLUE)\n\tdraw.text((0,LINE2Y), \" Bilirubin\", font = fontout, fill = COLOR_BLUE)\n\tdraw.text((0,LINE4Y), \" SiO2\", font = fontout, fill = COLOR_BLUE)\n\tdisp.display(img)\n\n\tled1_current = int(sys.argv[1])\n\tled2_current = int(sys.argv[2])\n\tled_stable_time = float(sys.argv[3])\n\tint_time1 = int(sys.argv[4])\n\tint_time2 = int(sys.argv[5])\n\n\tif (led1_current < LED_CURR_MIN):\n\t\tled1_current = LED_CURR_MIN\n\telif (led1_current > LED_CURR_MAX):\n\t\tled1_current = LED_CURR_MAX\n\n\tif (led2_current < LED_CURR_MIN):\n\t\tled2_current = LED_CURR_MIN\n\telif (led2_current > LED_CURR_MAX):\n\t\tled2_current = LED_CURR_MAX\n\n\tprint(\"led1_current = \"+ str(led1_current))\n\tprint(\"led2_current = \"+ str(led2_current))\n\n\tled1_duty = (led1_current - LED_CURR_MIN)*LED_DUTY_CONST\n\tled2_duty = (led2_current - LED_CURR_MIN)*LED_DUTY_CONST\n\n\tprint(\"led1_duty = \"+ str(led1_duty))\n\tprint(\"led2_duty = \"+ str(led2_duty))\n\n\tpi = pigpio.pi()\n\n\twhile (1):\n\t\t#wait until black or meas buttom is pressed\n\t\twhile (meas and black):\n\t\t\tif GPIO.input(pin_meas) == GPIO.LOW:\n\t\t\t\tmeas = 0\n\t\t\t\tprint(\"meas low\")\n\t\t\tif GPIO.input(pin_black) == GPIO.LOW:\n\t\t\t\tblack = 0\n\t\t\t\tprint(\"black low\")\n\n\t\tGPIO.output(pin_led, GPIO.HIGH)\n\t\tpi.hardware_PWM(PWM_LED_PIN1, PWM_FREQ, int(led1_duty))\n\t\tpi.hardware_PWM(PWM_LED_PIN2, PWM_FREQ, int(led2_duty))\n\t\tif (led1_duty > 0):\n\t\t\tGPIO.output(GATE_LED_PIN1, GPIO.LOW)\t# open\n\t\tif (led2_duty > 0):\n\t\t\tGPIO.output(GATE_LED_PIN2, GPIO.LOW)\t# open\n\n\t\ttime.sleep(led_stable_time)\n\n\t\tif (black == 0):\n\t\t\tfname = \"dual_black.txt\"\n\t\telse:\n\t\t\tfname = \"dual_desktop_\" + str(fnameindex) + \".txt\"\n\t\tfname = HOME_DIR + fname\n\n\t\t#C12880.ReadSpectrometer(int_time, data)\n\t\tC12880.Read2Spectrometer(int_time1, int_time2, data1, data2)\n\n\t\t# print the data on tft screen \n\t\tdraw.rectangle((0, LINE3Y, 128, LINE3Y+SPACE2), COLOR_WHITE)\n\t\tdraw.rectangle((0, LINE5Y, 128, LINE5Y+SPACE2), COLOR_WHITE)\n\t\tdraw.rectangle((0, LINE6Y, 128, LINE6Y+SPACE1), COLOR_WHITE)\n\t\tfontout = ImageFont.truetype(font,16)\n\t\tdraw.text((0,LINE3Y),\" 12.1 mg/dL\", font = fontout, fill = COLOR_RED)\n\t\tdraw.text((0,LINE5Y),\" 66%\", font = fontout, fill = COLOR_RED)\n\t\tfontout = ImageFont.truetype(font,10)\n\t\tdraw.text((0,LINE6Y),str(datetime.datetime.now()), font = fontout, fill = COLOR_BLUE)\n\t\tdisp.display(img)\n\n\t\t#out = [str(line) + '\\n' for line in data]\n\t\tfp = open(fname, \"w+\")\n\t\t#print(out)\n\t\t#fp.writelines(out)\n\t\tfor i in range(0,288):\n\t\t\tfp.write(str(data1[i]) + \", \" + str(data2[i]) + \", \\n\")\n\t\tfp.close()\n\n\t\tif (meas == 0):\n\t\t\tfnameindex = fnameindex + 1\n\n\t\tpi.hardware_PWM(PWM_LED_PIN1, PWM_FREQ, 0)\n\t\tpi.hardware_PWM(PWM_LED_PIN2, PWM_FREQ, 0)\n\t\tGPIO.output(GATE_LED_PIN1, GPIO.HIGH) # close\n\t\tGPIO.output(GATE_LED_PIN2, GPIO.HIGH) # close\n\n\t\t# time.sleep(led_stable_time)\t# for LED test\n\n\t\tmeas = 1\n\t\tblack = 1\n\n\t\tGPIO.output(pin_led, GPIO.LOW) #turn off measure LED\n\t\tprint(\"done\")\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class Metals(str, Enum):
gold = 'gold'
silver = 'silver'
class PriceFilter(BaseModel):
type: PriceSort
price: float
class ProductSearch(BaseModel):
price: Optional[PriceFilter]
metals: Optional[List[Metals]]
size: Optional[Sizes]
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class PriceSort(str, Enum):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class Metals(str, Enum):
gold = 'gold'
silver = 'silver'
class PriceFilter(BaseModel):
type: PriceSort
price: float
class ProductSearch(BaseModel):
price: Optional[PriceFilter]
metals: Optional[List[Metals]]
size: Optional[Sizes]
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Sizes(str, Enum):
one_gram = '1g'
two_and_half_gram = '2.5g'
one_ounce = '1oz'
five_ounce = '5oz'
ten_ounce = '10oz'
class PriceSort(str, Enum):
gte = 'gte'
lte = 'lte'
class Metals(str, Enum):
gold = 'gold'
silver = 'silver'
class PriceFilter(BaseModel):
type: PriceSort
price: float
class ProductSearch(BaseModel):
price: Optional[PriceFilter]
metals: Optional[List[Metals]]
size: Optional[Sizes]
<|reserved_special_token_1|>
from enum import Enum
from typing import List, Optional
from pydantic import BaseModel
class Sizes(str, Enum):
one_gram = '1g'
two_and_half_gram = '2.5g'
one_ounce = '1oz'
five_ounce = '5oz'
ten_ounce = '10oz'
class PriceSort(str, Enum):
gte = 'gte'
lte = 'lte'
class Metals(str, Enum):
gold = 'gold'
silver = 'silver'
class PriceFilter(BaseModel):
type: PriceSort
price: float
class ProductSearch(BaseModel):
price: Optional[PriceFilter]
metals: Optional[List[Metals]]
size: Optional[Sizes]
<|reserved_special_token_1|>
from enum import Enum
from typing import List, Optional
from pydantic import BaseModel
class Sizes(str, Enum):
one_gram = "1g"
two_and_half_gram = "2.5g"
one_ounce = "1oz"
five_ounce = "5oz"
ten_ounce = "10oz"
class PriceSort(str, Enum):
gte = "gte"
lte = "lte"
class Metals(str, Enum):
gold = "gold"
silver = "silver"
class PriceFilter(BaseModel):
type: PriceSort
price: float
class ProductSearch(BaseModel):
price: Optional[PriceFilter]
metals: Optional[List[Metals]]
size: Optional[Sizes]
|
flexible
|
{
"blob_id": "442c6c4894fc01d0f8142f3dcedfd51ba57aedd1",
"index": 3304,
"step-1": "<mask token>\n\n\nclass Metals(str, Enum):\n gold = 'gold'\n silver = 'silver'\n\n\nclass PriceFilter(BaseModel):\n type: PriceSort\n price: float\n\n\nclass ProductSearch(BaseModel):\n price: Optional[PriceFilter]\n metals: Optional[List[Metals]]\n size: Optional[Sizes]\n",
"step-2": "<mask token>\n\n\nclass PriceSort(str, Enum):\n <mask token>\n <mask token>\n\n\nclass Metals(str, Enum):\n gold = 'gold'\n silver = 'silver'\n\n\nclass PriceFilter(BaseModel):\n type: PriceSort\n price: float\n\n\nclass ProductSearch(BaseModel):\n price: Optional[PriceFilter]\n metals: Optional[List[Metals]]\n size: Optional[Sizes]\n",
"step-3": "<mask token>\n\n\nclass Sizes(str, Enum):\n one_gram = '1g'\n two_and_half_gram = '2.5g'\n one_ounce = '1oz'\n five_ounce = '5oz'\n ten_ounce = '10oz'\n\n\nclass PriceSort(str, Enum):\n gte = 'gte'\n lte = 'lte'\n\n\nclass Metals(str, Enum):\n gold = 'gold'\n silver = 'silver'\n\n\nclass PriceFilter(BaseModel):\n type: PriceSort\n price: float\n\n\nclass ProductSearch(BaseModel):\n price: Optional[PriceFilter]\n metals: Optional[List[Metals]]\n size: Optional[Sizes]\n",
"step-4": "from enum import Enum\nfrom typing import List, Optional\nfrom pydantic import BaseModel\n\n\nclass Sizes(str, Enum):\n one_gram = '1g'\n two_and_half_gram = '2.5g'\n one_ounce = '1oz'\n five_ounce = '5oz'\n ten_ounce = '10oz'\n\n\nclass PriceSort(str, Enum):\n gte = 'gte'\n lte = 'lte'\n\n\nclass Metals(str, Enum):\n gold = 'gold'\n silver = 'silver'\n\n\nclass PriceFilter(BaseModel):\n type: PriceSort\n price: float\n\n\nclass ProductSearch(BaseModel):\n price: Optional[PriceFilter]\n metals: Optional[List[Metals]]\n size: Optional[Sizes]\n",
"step-5": "from enum import Enum\nfrom typing import List, Optional\nfrom pydantic import BaseModel\n\n\nclass Sizes(str, Enum):\n one_gram = \"1g\"\n two_and_half_gram = \"2.5g\"\n one_ounce = \"1oz\"\n five_ounce = \"5oz\"\n ten_ounce = \"10oz\"\n\n\nclass PriceSort(str, Enum):\n gte = \"gte\"\n lte = \"lte\"\n\n\nclass Metals(str, Enum):\n gold = \"gold\"\n silver = \"silver\"\n\n\nclass PriceFilter(BaseModel):\n type: PriceSort\n price: float\n\n\nclass ProductSearch(BaseModel):\n price: Optional[PriceFilter]\n metals: Optional[List[Metals]]\n size: Optional[Sizes]\n",
"step-ids": [
4,
5,
8,
9,
10
]
}
|
[
4,
5,
8,
9,
10
] |
<|reserved_special_token_0|>
class Rules:
def __init__(self):
self.ruleCollection = {'1': self.rule1, '2': self.rule2, '3': self.
rule3, '4': self.rule4, '5': self.rule5, '6': self.rule6, '7':
self.rule7, '8': self.rule8, '9': self.rule9, '10': self.rule10}
<|reserved_special_token_0|>
def rule1(self, dom):
return dom.find_all(self._img_without_alt)
<|reserved_special_token_0|>
def rule3(self, dom):
clrcue_arr = []
for fnt in dom.find_all('font'):
if fnt.has_attr('color'):
clrcue_arr.append(fnt)
for spn in dom.find_all('span'):
if spn.has_attr('style'):
clrcue_arr.append(spn)
return clrcue_arr
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def rule6(self, dom):
lbl_arr = []
inputElems = []
inputElems.extend(dom.find_all(['textarea', 'select']))
inputElems.extend(dom.find_all(type=['text', 'password', 'checkbox',
'radio', 'file']))
labels = dom.find_all('label')
for input in inputElems:
hasLabel = False
if input.has_attr('id'):
id = input['id']
for lbl in labels:
if lbl.has_attr('for') and lbl['for'] == id:
hasLabel = True
break
if not hasLabel:
lbl_arr.append(input)
return lbl_arr
<|reserved_special_token_0|>
def rule8(self, dom):
title_arr = []
isTitle = dom.find('title')
if isTitle is None:
title_arr.append(dom.find('head'))
return title_arr
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def _img_without_alt(self, tag):
return tag.name == 'img' and not tag.has_attr('alt')
def _tbl_without_summ(self, tag):
return tag.name == 'table' and not tag.has_attr('summary')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Rules:
def __init__(self):
self.ruleCollection = {'1': self.rule1, '2': self.rule2, '3': self.
rule3, '4': self.rule4, '5': self.rule5, '6': self.rule6, '7':
self.rule7, '8': self.rule8, '9': self.rule9, '10': self.rule10}
def getRule(self, id):
return self.ruleCollection[id]
def rule1(self, dom):
return dom.find_all(self._img_without_alt)
<|reserved_special_token_0|>
def rule3(self, dom):
clrcue_arr = []
for fnt in dom.find_all('font'):
if fnt.has_attr('color'):
clrcue_arr.append(fnt)
for spn in dom.find_all('span'):
if spn.has_attr('style'):
clrcue_arr.append(spn)
return clrcue_arr
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def rule6(self, dom):
lbl_arr = []
inputElems = []
inputElems.extend(dom.find_all(['textarea', 'select']))
inputElems.extend(dom.find_all(type=['text', 'password', 'checkbox',
'radio', 'file']))
labels = dom.find_all('label')
for input in inputElems:
hasLabel = False
if input.has_attr('id'):
id = input['id']
for lbl in labels:
if lbl.has_attr('for') and lbl['for'] == id:
hasLabel = True
break
if not hasLabel:
lbl_arr.append(input)
return lbl_arr
def rule7(self, dom):
dblclk_arr = []
dblclk_arr = dom.find_all(ondblclick=True, onkeypress=False)
return dblclk_arr
def rule8(self, dom):
title_arr = []
isTitle = dom.find('title')
if isTitle is None:
title_arr.append(dom.find('head'))
return title_arr
def rule9(self, dom):
link_arr = []
url_tags = ['http', 'https', '://www.', 'www']
for link in dom.find_all('a'):
if not ('http' in link or 'https' in link or '://www.' in link or
'www' in link):
link_arr.append(link)
return link_arr
<|reserved_special_token_0|>
def _img_without_alt(self, tag):
return tag.name == 'img' and not tag.has_attr('alt')
def _tbl_without_summ(self, tag):
return tag.name == 'table' and not tag.has_attr('summary')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Rules:
def __init__(self):
self.ruleCollection = {'1': self.rule1, '2': self.rule2, '3': self.
rule3, '4': self.rule4, '5': self.rule5, '6': self.rule6, '7':
self.rule7, '8': self.rule8, '9': self.rule9, '10': self.rule10}
def getRule(self, id):
return self.ruleCollection[id]
def rule1(self, dom):
return dom.find_all(self._img_without_alt)
def rule2(self, dom):
video_arr = []
for embed in dom.find_all('embed'):
if not embed.noembed:
video_arr.append(embed)
return video_arr
def rule3(self, dom):
clrcue_arr = []
for fnt in dom.find_all('font'):
if fnt.has_attr('color'):
clrcue_arr.append(fnt)
for spn in dom.find_all('span'):
if spn.has_attr('style'):
clrcue_arr.append(spn)
return clrcue_arr
<|reserved_special_token_0|>
def rule5(self, dom):
twcap_arr = []
for tb in dom.find_all('table'):
if not tb.caption:
twcap_arr.append(tb)
return twcap_arr
def rule6(self, dom):
lbl_arr = []
inputElems = []
inputElems.extend(dom.find_all(['textarea', 'select']))
inputElems.extend(dom.find_all(type=['text', 'password', 'checkbox',
'radio', 'file']))
labels = dom.find_all('label')
for input in inputElems:
hasLabel = False
if input.has_attr('id'):
id = input['id']
for lbl in labels:
if lbl.has_attr('for') and lbl['for'] == id:
hasLabel = True
break
if not hasLabel:
lbl_arr.append(input)
return lbl_arr
def rule7(self, dom):
dblclk_arr = []
dblclk_arr = dom.find_all(ondblclick=True, onkeypress=False)
return dblclk_arr
def rule8(self, dom):
title_arr = []
isTitle = dom.find('title')
if isTitle is None:
title_arr.append(dom.find('head'))
return title_arr
def rule9(self, dom):
link_arr = []
url_tags = ['http', 'https', '://www.', 'www']
for link in dom.find_all('a'):
if not ('http' in link or 'https' in link or '://www.' in link or
'www' in link):
link_arr.append(link)
return link_arr
<|reserved_special_token_0|>
def _img_without_alt(self, tag):
return tag.name == 'img' and not tag.has_attr('alt')
def _tbl_without_summ(self, tag):
return tag.name == 'table' and not tag.has_attr('summary')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Rules:
def __init__(self):
self.ruleCollection = {'1': self.rule1, '2': self.rule2, '3': self.
rule3, '4': self.rule4, '5': self.rule5, '6': self.rule6, '7':
self.rule7, '8': self.rule8, '9': self.rule9, '10': self.rule10}
def getRule(self, id):
return self.ruleCollection[id]
def rule1(self, dom):
return dom.find_all(self._img_without_alt)
def rule2(self, dom):
video_arr = []
for embed in dom.find_all('embed'):
if not embed.noembed:
video_arr.append(embed)
return video_arr
def rule3(self, dom):
clrcue_arr = []
for fnt in dom.find_all('font'):
if fnt.has_attr('color'):
clrcue_arr.append(fnt)
for spn in dom.find_all('span'):
if spn.has_attr('style'):
clrcue_arr.append(spn)
return clrcue_arr
<|reserved_special_token_0|>
def rule5(self, dom):
twcap_arr = []
for tb in dom.find_all('table'):
if not tb.caption:
twcap_arr.append(tb)
return twcap_arr
def rule6(self, dom):
lbl_arr = []
inputElems = []
inputElems.extend(dom.find_all(['textarea', 'select']))
inputElems.extend(dom.find_all(type=['text', 'password', 'checkbox',
'radio', 'file']))
labels = dom.find_all('label')
for input in inputElems:
hasLabel = False
if input.has_attr('id'):
id = input['id']
for lbl in labels:
if lbl.has_attr('for') and lbl['for'] == id:
hasLabel = True
break
if not hasLabel:
lbl_arr.append(input)
return lbl_arr
def rule7(self, dom):
dblclk_arr = []
dblclk_arr = dom.find_all(ondblclick=True, onkeypress=False)
return dblclk_arr
def rule8(self, dom):
title_arr = []
isTitle = dom.find('title')
if isTitle is None:
title_arr.append(dom.find('head'))
return title_arr
def rule9(self, dom):
link_arr = []
url_tags = ['http', 'https', '://www.', 'www']
for link in dom.find_all('a'):
if not ('http' in link or 'https' in link or '://www.' in link or
'www' in link):
link_arr.append(link)
return link_arr
def rule10(self, dom):
tab_arr = []
for tab in dom.find_all('a', 'input', ondblclick=True, onkeydown=
True, onkeypress=True):
if not tab.has_attr('tabindex'):
tab_arr.append(tab)
return tab_arr
def _img_without_alt(self, tag):
return tag.name == 'img' and not tag.has_attr('alt')
def _tbl_without_summ(self, tag):
return tag.name == 'table' and not tag.has_attr('summary')
<|reserved_special_token_1|>
from bs4 import BeautifulSoup
import re
class Rules:
def __init__(self):
self.ruleCollection = {
"1" : self.rule1,
"2" : self.rule2,
"3" : self.rule3,
"4" : self.rule4,
"5" : self.rule5,
"6" : self.rule6,
"7" : self.rule7,
"8" : self.rule8,
"9" : self.rule9,
"10" : self.rule10,
}
def getRule(self, id):
return self.ruleCollection[id]
# Image without text alternatives
def rule1(self,dom):
return dom.find_all(self._img_without_alt)
# Embeded multimedia without noembed (text or audio)
def rule2(self,dom):
video_arr = []
for embed in dom.find_all("embed"):
if not embed.noembed:
video_arr.append(embed)
return video_arr
#color cues
#without the definitions in css
#This rule needs to be improved
def rule3(self,dom):
clrcue_arr = []
for fnt in dom.find_all('font'):
if fnt.has_attr('color'):
clrcue_arr.append(fnt)
for spn in dom.find_all('span'):
if spn.has_attr('style'):
clrcue_arr.append(spn)
return clrcue_arr
#Table without summary
def rule4(self,dom):
return dom.find_all(self._tbl_without_summ)
#Table without caption
def rule5(self,dom):
twcap_arr = [];
for tb in dom.find_all("table"):
if not tb.caption:
twcap_arr.append(tb)
return twcap_arr
def rule6(self,dom):
lbl_arr = [];
inputElems =[]
inputElems.extend(dom.find_all(["textarea", "select"]))
inputElems.extend(dom.find_all(type=["text","password", "checkbox", "radio", "file"]))
labels = dom.find_all('label')
for input in inputElems:
hasLabel = False
if input.has_attr('id'):
id = input['id']
for lbl in labels:
if lbl.has_attr("for") and lbl['for'] == id:
hasLabel = True
break
if not hasLabel:
lbl_arr.append(input)
return lbl_arr
def rule7(self,dom):
dblclk_arr = []
dblclk_arr = dom.find_all(ondblclick = True, onkeypress = False)
return dblclk_arr
def rule8(self,dom):
title_arr = []
isTitle = dom.find('title')
if isTitle is None:
title_arr.append(dom.find('head'))
return title_arr
def rule9(self,dom):
link_arr = []
url_tags = ['http', 'https', '://www.' , 'www' ]
for link in dom.find_all('a'):
if not ('http' in link or 'https' in link or '://www.' in link or 'www' in link):
link_arr.append(link)
return link_arr
def rule10(self,dom):
tab_arr = []
for tab in dom.find_all('a', 'input', ondblclick = True, onkeydown = True, onkeypress = True):
if not tab.has_attr('tabindex'):
tab_arr.append(tab)
return tab_arr
def _img_without_alt(self,tag):
return tag.name == "img" and not tag.has_attr("alt")
def _tbl_without_summ(self,tag):
return tag.name == "table" and not tag.has_attr("summary")
#for testing
|
flexible
|
{
"blob_id": "7747cbb1a1ed191b616b0d1bcfd51cdea05067f5",
"index": 5954,
"step-1": "<mask token>\n\n\nclass Rules:\n\n def __init__(self):\n self.ruleCollection = {'1': self.rule1, '2': self.rule2, '3': self.\n rule3, '4': self.rule4, '5': self.rule5, '6': self.rule6, '7':\n self.rule7, '8': self.rule8, '9': self.rule9, '10': self.rule10}\n <mask token>\n\n def rule1(self, dom):\n return dom.find_all(self._img_without_alt)\n <mask token>\n\n def rule3(self, dom):\n clrcue_arr = []\n for fnt in dom.find_all('font'):\n if fnt.has_attr('color'):\n clrcue_arr.append(fnt)\n for spn in dom.find_all('span'):\n if spn.has_attr('style'):\n clrcue_arr.append(spn)\n return clrcue_arr\n <mask token>\n <mask token>\n\n def rule6(self, dom):\n lbl_arr = []\n inputElems = []\n inputElems.extend(dom.find_all(['textarea', 'select']))\n inputElems.extend(dom.find_all(type=['text', 'password', 'checkbox',\n 'radio', 'file']))\n labels = dom.find_all('label')\n for input in inputElems:\n hasLabel = False\n if input.has_attr('id'):\n id = input['id']\n for lbl in labels:\n if lbl.has_attr('for') and lbl['for'] == id:\n hasLabel = True\n break\n if not hasLabel:\n lbl_arr.append(input)\n return lbl_arr\n <mask token>\n\n def rule8(self, dom):\n title_arr = []\n isTitle = dom.find('title')\n if isTitle is None:\n title_arr.append(dom.find('head'))\n return title_arr\n <mask token>\n <mask token>\n\n def _img_without_alt(self, tag):\n return tag.name == 'img' and not tag.has_attr('alt')\n\n def _tbl_without_summ(self, tag):\n return tag.name == 'table' and not tag.has_attr('summary')\n",
"step-2": "<mask token>\n\n\nclass Rules:\n\n def __init__(self):\n self.ruleCollection = {'1': self.rule1, '2': self.rule2, '3': self.\n rule3, '4': self.rule4, '5': self.rule5, '6': self.rule6, '7':\n self.rule7, '8': self.rule8, '9': self.rule9, '10': self.rule10}\n\n def getRule(self, id):\n return self.ruleCollection[id]\n\n def rule1(self, dom):\n return dom.find_all(self._img_without_alt)\n <mask token>\n\n def rule3(self, dom):\n clrcue_arr = []\n for fnt in dom.find_all('font'):\n if fnt.has_attr('color'):\n clrcue_arr.append(fnt)\n for spn in dom.find_all('span'):\n if spn.has_attr('style'):\n clrcue_arr.append(spn)\n return clrcue_arr\n <mask token>\n <mask token>\n\n def rule6(self, dom):\n lbl_arr = []\n inputElems = []\n inputElems.extend(dom.find_all(['textarea', 'select']))\n inputElems.extend(dom.find_all(type=['text', 'password', 'checkbox',\n 'radio', 'file']))\n labels = dom.find_all('label')\n for input in inputElems:\n hasLabel = False\n if input.has_attr('id'):\n id = input['id']\n for lbl in labels:\n if lbl.has_attr('for') and lbl['for'] == id:\n hasLabel = True\n break\n if not hasLabel:\n lbl_arr.append(input)\n return lbl_arr\n\n def rule7(self, dom):\n dblclk_arr = []\n dblclk_arr = dom.find_all(ondblclick=True, onkeypress=False)\n return dblclk_arr\n\n def rule8(self, dom):\n title_arr = []\n isTitle = dom.find('title')\n if isTitle is None:\n title_arr.append(dom.find('head'))\n return title_arr\n\n def rule9(self, dom):\n link_arr = []\n url_tags = ['http', 'https', '://www.', 'www']\n for link in dom.find_all('a'):\n if not ('http' in link or 'https' in link or '://www.' in link or\n 'www' in link):\n link_arr.append(link)\n return link_arr\n <mask token>\n\n def _img_without_alt(self, tag):\n return tag.name == 'img' and not tag.has_attr('alt')\n\n def _tbl_without_summ(self, tag):\n return tag.name == 'table' and not tag.has_attr('summary')\n",
"step-3": "<mask token>\n\n\nclass Rules:\n\n def __init__(self):\n self.ruleCollection = {'1': self.rule1, '2': self.rule2, '3': self.\n rule3, '4': self.rule4, '5': self.rule5, '6': self.rule6, '7':\n self.rule7, '8': self.rule8, '9': self.rule9, '10': self.rule10}\n\n def getRule(self, id):\n return self.ruleCollection[id]\n\n def rule1(self, dom):\n return dom.find_all(self._img_without_alt)\n\n def rule2(self, dom):\n video_arr = []\n for embed in dom.find_all('embed'):\n if not embed.noembed:\n video_arr.append(embed)\n return video_arr\n\n def rule3(self, dom):\n clrcue_arr = []\n for fnt in dom.find_all('font'):\n if fnt.has_attr('color'):\n clrcue_arr.append(fnt)\n for spn in dom.find_all('span'):\n if spn.has_attr('style'):\n clrcue_arr.append(spn)\n return clrcue_arr\n <mask token>\n\n def rule5(self, dom):\n twcap_arr = []\n for tb in dom.find_all('table'):\n if not tb.caption:\n twcap_arr.append(tb)\n return twcap_arr\n\n def rule6(self, dom):\n lbl_arr = []\n inputElems = []\n inputElems.extend(dom.find_all(['textarea', 'select']))\n inputElems.extend(dom.find_all(type=['text', 'password', 'checkbox',\n 'radio', 'file']))\n labels = dom.find_all('label')\n for input in inputElems:\n hasLabel = False\n if input.has_attr('id'):\n id = input['id']\n for lbl in labels:\n if lbl.has_attr('for') and lbl['for'] == id:\n hasLabel = True\n break\n if not hasLabel:\n lbl_arr.append(input)\n return lbl_arr\n\n def rule7(self, dom):\n dblclk_arr = []\n dblclk_arr = dom.find_all(ondblclick=True, onkeypress=False)\n return dblclk_arr\n\n def rule8(self, dom):\n title_arr = []\n isTitle = dom.find('title')\n if isTitle is None:\n title_arr.append(dom.find('head'))\n return title_arr\n\n def rule9(self, dom):\n link_arr = []\n url_tags = ['http', 'https', '://www.', 'www']\n for link in dom.find_all('a'):\n if not ('http' in link or 'https' in link or '://www.' in link or\n 'www' in link):\n link_arr.append(link)\n return link_arr\n <mask token>\n\n def _img_without_alt(self, tag):\n return tag.name == 'img' and not tag.has_attr('alt')\n\n def _tbl_without_summ(self, tag):\n return tag.name == 'table' and not tag.has_attr('summary')\n",
"step-4": "<mask token>\n\n\nclass Rules:\n\n def __init__(self):\n self.ruleCollection = {'1': self.rule1, '2': self.rule2, '3': self.\n rule3, '4': self.rule4, '5': self.rule5, '6': self.rule6, '7':\n self.rule7, '8': self.rule8, '9': self.rule9, '10': self.rule10}\n\n def getRule(self, id):\n return self.ruleCollection[id]\n\n def rule1(self, dom):\n return dom.find_all(self._img_without_alt)\n\n def rule2(self, dom):\n video_arr = []\n for embed in dom.find_all('embed'):\n if not embed.noembed:\n video_arr.append(embed)\n return video_arr\n\n def rule3(self, dom):\n clrcue_arr = []\n for fnt in dom.find_all('font'):\n if fnt.has_attr('color'):\n clrcue_arr.append(fnt)\n for spn in dom.find_all('span'):\n if spn.has_attr('style'):\n clrcue_arr.append(spn)\n return clrcue_arr\n <mask token>\n\n def rule5(self, dom):\n twcap_arr = []\n for tb in dom.find_all('table'):\n if not tb.caption:\n twcap_arr.append(tb)\n return twcap_arr\n\n def rule6(self, dom):\n lbl_arr = []\n inputElems = []\n inputElems.extend(dom.find_all(['textarea', 'select']))\n inputElems.extend(dom.find_all(type=['text', 'password', 'checkbox',\n 'radio', 'file']))\n labels = dom.find_all('label')\n for input in inputElems:\n hasLabel = False\n if input.has_attr('id'):\n id = input['id']\n for lbl in labels:\n if lbl.has_attr('for') and lbl['for'] == id:\n hasLabel = True\n break\n if not hasLabel:\n lbl_arr.append(input)\n return lbl_arr\n\n def rule7(self, dom):\n dblclk_arr = []\n dblclk_arr = dom.find_all(ondblclick=True, onkeypress=False)\n return dblclk_arr\n\n def rule8(self, dom):\n title_arr = []\n isTitle = dom.find('title')\n if isTitle is None:\n title_arr.append(dom.find('head'))\n return title_arr\n\n def rule9(self, dom):\n link_arr = []\n url_tags = ['http', 'https', '://www.', 'www']\n for link in dom.find_all('a'):\n if not ('http' in link or 'https' in link or '://www.' in link or\n 'www' in link):\n link_arr.append(link)\n return link_arr\n\n def rule10(self, dom):\n tab_arr = []\n for tab in dom.find_all('a', 'input', ondblclick=True, onkeydown=\n True, onkeypress=True):\n if not tab.has_attr('tabindex'):\n tab_arr.append(tab)\n return tab_arr\n\n def _img_without_alt(self, tag):\n return tag.name == 'img' and not tag.has_attr('alt')\n\n def _tbl_without_summ(self, tag):\n return tag.name == 'table' and not tag.has_attr('summary')\n",
"step-5": "from bs4 import BeautifulSoup\nimport re\n\nclass Rules:\n def __init__(self):\n self.ruleCollection = {\n \"1\" : self.rule1,\n \"2\" : self.rule2,\n \"3\" : self.rule3,\n \"4\" : self.rule4,\n \"5\" : self.rule5,\n \"6\" : self.rule6,\n \"7\" : self.rule7,\n \"8\" : self.rule8,\n \"9\" : self.rule9,\n \"10\" : self.rule10,\n }\n \n def getRule(self, id):\n return self.ruleCollection[id]\n # Image without text alternatives\n def rule1(self,dom):\n return dom.find_all(self._img_without_alt)\n \n # Embeded multimedia without noembed (text or audio)\n def rule2(self,dom):\n video_arr = []\n for embed in dom.find_all(\"embed\"):\n if not embed.noembed:\n video_arr.append(embed)\n return video_arr\n \n #color cues\n #without the definitions in css\n #This rule needs to be improved\n def rule3(self,dom):\n clrcue_arr = []\n for fnt in dom.find_all('font'):\n if fnt.has_attr('color'):\n clrcue_arr.append(fnt)\n for spn in dom.find_all('span'):\n if spn.has_attr('style'):\n clrcue_arr.append(spn)\n return clrcue_arr\n \n #Table without summary\n def rule4(self,dom):\n return dom.find_all(self._tbl_without_summ)\n \n \n #Table without caption\n def rule5(self,dom):\n twcap_arr = [];\n for tb in dom.find_all(\"table\"):\n if not tb.caption:\n twcap_arr.append(tb)\n return twcap_arr\n \n def rule6(self,dom):\n lbl_arr = [];\n inputElems =[]\n inputElems.extend(dom.find_all([\"textarea\", \"select\"]))\n inputElems.extend(dom.find_all(type=[\"text\",\"password\", \"checkbox\", \"radio\", \"file\"]))\n labels = dom.find_all('label')\n for input in inputElems:\n hasLabel = False\n if input.has_attr('id'):\n id = input['id']\n \n for lbl in labels:\n if lbl.has_attr(\"for\") and lbl['for'] == id:\n hasLabel = True\n break\n \n if not hasLabel:\n lbl_arr.append(input)\n\n return lbl_arr\n \n def rule7(self,dom):\n dblclk_arr = []\n dblclk_arr = dom.find_all(ondblclick = True, onkeypress = False)\n return dblclk_arr\n \n def rule8(self,dom):\n title_arr = []\n isTitle = dom.find('title')\n if isTitle is None:\n title_arr.append(dom.find('head'))\n return title_arr\n \n def rule9(self,dom):\n link_arr = []\n url_tags = ['http', 'https', '://www.' , 'www' ]\n for link in dom.find_all('a'):\n if not ('http' in link or 'https' in link or '://www.' in link or 'www' in link):\n link_arr.append(link)\n \n return link_arr\n \n def rule10(self,dom):\n tab_arr = []\n for tab in dom.find_all('a', 'input', ondblclick = True, onkeydown = True, onkeypress = True):\n if not tab.has_attr('tabindex'):\n tab_arr.append(tab)\n \n return tab_arr \n \n def _img_without_alt(self,tag):\n return tag.name == \"img\" and not tag.has_attr(\"alt\")\n \n def _tbl_without_summ(self,tag):\n return tag.name == \"table\" and not tag.has_attr(\"summary\")\n \n#for testing\n\n\n\n",
"step-ids": [
8,
11,
13,
14,
17
]
}
|
[
8,
11,
13,
14,
17
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def test_search_track():
sp = Spotify()
t = sp.search_track('avocado')
assert_equal(t.id, '1UyzA43l3OIcJ6jd3hh3ac')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
from spoetify.spotify import Spotify
from nose.tools import assert_equal
def test_search_track():
sp = Spotify()
t = sp.search_track('avocado')
assert_equal(t.id, '1UyzA43l3OIcJ6jd3hh3ac')
<|reserved_special_token_1|>
"""Test Spotify module"""
from spoetify.spotify import Spotify
from nose.tools import assert_equal
def test_search_track():
sp = Spotify()
t = sp.search_track("avocado")
assert_equal(t.id, "1UyzA43l3OIcJ6jd3hh3ac")
|
flexible
|
{
"blob_id": "337309da79ce9d90010fef5c171b6b344e6dc63f",
"index": 5937,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef test_search_track():\n sp = Spotify()\n t = sp.search_track('avocado')\n assert_equal(t.id, '1UyzA43l3OIcJ6jd3hh3ac')\n",
"step-3": "<mask token>\nfrom spoetify.spotify import Spotify\nfrom nose.tools import assert_equal\n\n\ndef test_search_track():\n sp = Spotify()\n t = sp.search_track('avocado')\n assert_equal(t.id, '1UyzA43l3OIcJ6jd3hh3ac')\n",
"step-4": "\"\"\"Test Spotify module\"\"\"\nfrom spoetify.spotify import Spotify\nfrom nose.tools import assert_equal\n\n\ndef test_search_track():\n sp = Spotify()\n t = sp.search_track(\"avocado\")\n assert_equal(t.id, \"1UyzA43l3OIcJ6jd3hh3ac\")\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# Packages
import PySimpleGUI as sg
import mysql.connector
import secrets
# TODO Add a view all button
# TODO Catch errors (specifically for TimeDate mismatches)
# TODO Add a downtime graph
# TODO Add a system feedback window instead of putting this in the out id textbox
error_sel_flag = False # Flag to check whether an error has been selected before performing logic requiring it
guest_user_flag = False # Flag to check whether the user is a guest, and limit which functions of the applciation (and database) they can use
unresolved_errors = [] # MEEP, could probably do without this in the refactor
current_error = { # Dictionary to hold all information about the current/selected error. This removes the need to hit the database for every bit of logic that requires an error
'fault_id': 'Null',
'fault_status': 'Null',
'fault_description': 'Null',
'voyage': 'Null',
'time_of_fault': 'Null',
'time_of_solution': 'Null',
'fault_type': 'Null',
'location': 'Null',
'sensor_id': 'Null',
'sensor_type': 'Null',
'fault_message': 'Null',
'log_date': 'Null'
}
# Dictionary for search parameters. NOTE: deviation from script naming convention is due to the naming convention used in the database
search_dict = {
'Voyage': '',
'FaultStatus': '',
'FaultType': '',
'Location': '',
'SensorID': '',
'SensorType': '',
'TimeOfFault': '',
'TimeOfSolution': ''
}
class DatabaseConnection():
''' This class instantiates and maintains the database connection, and encapsulates all functions that work directly with that connection.'''
def __init__(self, host, user, password, database):
''' This function is called whenever a new instance of 'DatabaseConnection' is instantiated. It created the connection and cursor to the
database, both of which are used by other functions of this class.'''
try:
self.connection = mysql.connector.connect(
host=host,
user=user,
passwd=password,
database=database,
auth_plugin='mysql_native_password'
)
self.cursor = self.connection.cursor()
except mysql.connector.Error as e:
print("Error %d: %s" % (e.args[0], e.args[1]))
exit(69)
def save_to_errors(self, fault_status, fault_desciption, voyage, time_of_fault, time_of_solution, fault_type, location, sensor_id, sensor_type, fault_message):
''' This function creates and carries out an 'INSERT' query for the 'errors' table. It forces null values for the time fields in the case that the GUI
returns blank values, this is to avoid a type mismatch with the database (This could probably be better handled somewhere else but it gets the job done for now).'''
if time_of_fault == '':
time_of_fault = "NULL"
if time_of_solution == '':
time_of_solution = "NULL"
if fault_status == '':
fault_status = "Unresolved"
insert_query = "INSERT INTO errors (FaultDescription, FaultMessage, FaultStatus, FaultType, Location, SensorID, SensorType, TimeOfFault, TimeOfSolution, Voyage) VALUES ('{}', '{}', '{}', '{}', '{}', '{}', '{}', {}, {}, '{}')".format(fault_desciption, fault_message, fault_status, fault_type, location, sensor_id, sensor_type, time_of_fault, time_of_solution, voyage)
print(insert_query)
self.cursor.execute(insert_query)
self.connection.commit()
def save_to_downtime(self, voyage, stop_time, start_time, reason, assosciated_error):
''' This function creates and carries out an 'INSERT' query for the 'downtime' table. It forces null values for the time fields in the case that the GUI
returns blank values, this is to avoid a type mismatch with the database (Again, this is not perfect but I'll relook it at a later stage).'''
insert_query = "INSERT INTO downtime (Voyage, StopTime, StartTime, Reason, AssosciatedError) VALUES ('{}', '{}', '{}', '{}', '{}')".format(voyage, stop_time, start_time, reason, assosciated_error)
print(insert_query)
self.cursor.execute(insert_query)
self.connection.commit()
pass
def fetch(self, fetch_query):
''' This function carries out a 'SELECT' query from the MySQL database and returns the result.'''
print("Fetch " + str(fetch_query))
_ = self.cursor.execute(fetch_query)
result = self.cursor.fetchall()
return result
def update(self, fault_status, fault_desciption, voyage, time_of_fault, time_of_solution, fault_type, location, sensor_id, sensor_type, fault_message, fault_id):
# ToDo Test the robustness of this, seems like it doens't like updating with unchanged fields
if time_of_fault == 'None':
time_of_fault = "NULL"
if time_of_solution =='None':
time_of_solution = "NULL"
update_query = "UPDATE errors SET FaultStatus = '{}', FaultDescription = '{}', Voyage = '{}', TimeOfFault = {}, TimeOfSolution = {}, FaultType = '{}', Location = '{}', SensorID = '{}', SensorType = '{}', FaultMessage = '{}' WHERE FaultID = {}".format(fault_status, fault_desciption, voyage, time_of_fault, time_of_solution, fault_type, location, sensor_id, sensor_type, fault_message, fault_id)
print(update_query)
self.cursor.execute(update_query)
self.connection.commit()
print("Updated")
def search(self, voyage, status, fault_type, location, sensor_id, sensor_type, start_time, end_time):
''' This function creates and carries out a 'SELECT' query from the MySQL database and returns the result.
It fills a dictionary and reduces it to only include the provided search terms in the query.'''
search_dict['Voyage'] = voyage
search_dict['FaultStatus'] = status
search_dict['FaultType'] = fault_type
search_dict['Location'] = location
search_dict['SensorID'] = sensor_id
search_dict['SensorType'] = sensor_type
search_dict['TimeOfFault'] = start_time
search_dict['TimeOfSolution'] = end_time
# Remove empty values so that only the required search parameters are included
reduced_search_dict = dict((k, v) for k, v in search_dict.items() if v) # New dictionary with all empty values removed
if(len(reduced_search_dict) < 2):
print("Please enter at least two search criteria (sorry, Nic rushed this section!)")
return 0
key_list = list(reduced_search_dict.keys())
value_list = list(reduced_search_dict.values())
# Remove enclosing apostrophes as is required in the MySQL syntax
key_tuple = tuple(key_list)
seperator = ", "
usable_key_tuple = seperator.join(key_tuple)
search_query = "SELECT * FROM errors WHERE ({}) = {}".format(usable_key_tuple, str(tuple(value_list)))
print(search_query)
_ = self.cursor.execute(search_query)
result = self.cursor.fetchall()
return result
def shutdown(self):
# Implement logic to close connection
pass
# Create window functions
def create_login_window():
''' This function contains the layout for, invokes, and monitors the login window. When a user logs in, it creates an instance of
the 'DatabaseConnection' class, establishing a connection to the database for use by the main application. This function returns the
created instance of 'DatabaseConnection' for use by other functions in the script.
'''
# Window setup
login_layout = [[sg.Text('Hostname: '), sg.In(size = (25, 0), key = '-HOST-')],
[sg.Text('Username: '), sg.In(size = (25, 0), key = '-USER-')],
[sg.Text('Password: '), sg.In(size = (25, 0), pad = (3, 0), password_char = '*', key='-PASS-')],
[sg.Button('Login', size = (14, 0), pad = ((0, 10), (5, 0)), enable_events = True, bind_return_key = True, key = '-LOGIN-'), sg.Button('Guest Login.', size = (14, 0), pad = ((10, 0), (5, 0)), enable_events = True, key = '-LOGIN GUEST-')]
]
login_window = sg.Window("LLMSDID - Login",
layout=login_layout,
margins=(20, 10),
grab_anywhere=True,
default_button_element_size=(12, 1)
)
# Logic
while True:
login_event, login_values = login_window.read()
if login_event == '-LOGIN-':
current_db = DatabaseConnection(login_values['-HOST-'], login_values['-USER-'], login_values['-PASS-'], "LLMSDID") # Instantiate instance of 'DatabaseConnection'
login_window.close()
return current_db
if login_event == '-LOGIN GUEST-':
current_db = DatabaseConnection('localhost', secrets.guestUsername, secrets.guestPassword, "LLMSDID") # Instantiate instance of 'DatabaseConnection'
global guest_user_flag
guest_user_flag = True
login_window.close()
return current_db
# If the user closes the window, exit this loop so that the program can close
if login_event == sg.WIN_CLOSED:
login_window.close()
exit(69)
def create_update_window(selected_error, database):
update_col_1 = sg.Column([[sg.Frame('Current values', [[sg.Column([[sg.Text("Voyage: ", size=(12,1)), sg.Text(selected_error['voyage'])],
[sg.Text("Status: ", size=(12,1)), sg.Text(selected_error['fault_status'])],
[sg.Text("Description: ", size=(12,4)), sg.Multiline(selected_error['fault_description'], size=(40, 4))],
[sg.Text("Fault message: ", size=(12,2)), sg.Multiline(selected_error['fault_message'], size=(40,2))],
[sg.Text("Fault type: ", size=(12,1)), sg.Text(selected_error['fault_type'])],
[sg.Text("Fault location: ", size=(12,1)), sg.Text(selected_error['location'])],
[sg.Text("Sensor ID: ", size=(12,1)), sg.Text(selected_error['sensor_id'])],
[sg.Text("Sensor type: ", size=(12,1)), sg.Text(selected_error['sensor_type'])],
[sg.Text("From: ", size=(12,1)), sg.Text(selected_error['time_of_fault'])],
[sg.Text("To: ", size=(12,1)), sg.Text(selected_error['time_of_solution'])]],
)]])]])
update_col_2 = sg.Column([[sg.Frame('Updated values', [[sg.Column([[sg.Text("Voyage: ", size=(12,1)), sg.In(selected_error['voyage'], size=(40,1), key='-NEW VOYAGE-')],
[sg.Text("Status: ", size=(12,1)), sg.InputCombo(["Unresolved", "Resolved"], default_value=selected_error['fault_status'], key='-NEW STATUS-')],
[sg.Text("Description: ", size=(12,4)), sg.Multiline(selected_error['fault_description'], size=(40,4), key='-NEW DESC-')],
[sg.Text("Fault message: ", size=(12,2)), sg.Multiline(selected_error['fault_message'], size=(40,2), key='-NEW MESSAGE-')],
[sg.Text("Fault type: ", size=(12,1)), sg.In(selected_error['fault_type'], size=(40,1), key='-NEW FTYPE-')],
[sg.Text("Fault location: ", size=(12,1)), sg.In(selected_error['location'], size=(40,1), key='-NEW LOC-')],
[sg.Text("Sensor ID: ", size=(12,1)), sg.In(selected_error['sensor_id'], size=(40,1), key='-NEW ID-')],
[sg.Text("Sensor type: ", size=(12,1)), sg.In(selected_error['sensor_type'], size=(40,1), key='-NEW STYPE-')],
[sg.Text("From: ", size=(12,1)), sg.In(selected_error['time_of_fault'], size=(40,1), key='-NEW FROM-')],
[sg.Text("To: ", size=(12,1)), sg.In(selected_error['time_of_solution'], size=(40,1), key='-NEW TO-')]],
)]])]])
update_col_3 = sg.Column([[sg.Frame('Actions', [[sg.Column([[sg.Button("Update", enable_events=True,
tooltip="Press me if you'd like to update this fault.",
key='-SAVE UPDATE-'),
sg.Button("Cancel", enable_events=True,
tooltip="Press me if you'd like to cancel this update.",
key='-CANCEL UPDATE-')]])]])]])
updateLayout = [[update_col_1, update_col_2], [update_col_3]]
update_window = sg.Window("LLMSDID - Update",
layout=updateLayout,
margins=(200, 100),
grab_anywhere=True,
default_button_element_size=(12, 1)
)
print("Updating " + str(selected_error['fault_id']))
while True:
update_event, update_value = update_window.read()
if update_event == '-SAVE UPDATE-':
database.update(update_value['-NEW STATUS-'], update_value['-NEW DESC-'], update_value['-NEW VOYAGE-'], update_value['-NEW FROM-'], update_value['-NEW TO-'], update_value['-NEW FTYPE-'], update_value['-NEW LOC-'], update_value['-NEW ID-'], update_value['-NEW STYPE-'], update_value['-NEW MESSAGE-'], selected_error['fault_id'])
update_window.close()
break
# If the user closes the window, exit this loop so that the program can close
if update_event == sg.WIN_CLOSED or update_event == '-CANCEL UPDATE-':
update_window.close()
break
def create_log_window(database):
log_layout = [
[sg.Text("Fault description", size=(12,1)), sg.In(size=(40, 40), key='-DESCRIPTION-')],
[sg.Text("Fault message", size=(12,1)), sg.In(size=(40, 40), key='-MESSAGE-')],
[sg.Text("Status", size=(12,1)), sg.InputCombo(["Unresolved", "Resolved"], key='-STATUS-')],
[sg.Text("Fault type", size=(12,1)), sg.In(size = (25, 1), key='-TYPE-')],
[sg.Text("Location", size=(12,1)), sg.In(size=(25, 1), key='-LOCATION-')],
[sg.Text("Sensor ID", size=(12,1)), sg.In(size=(25, 1), key='-SENSOR ID-')],
[sg.Text("Sensor type", size=(12,1)), sg.In(size=(25, 1), key='-SENSOR TYPE-')],
[sg.Text("Time of fault", tooltip = "dd-mm-yy hh:mm:ss", size=(12,1)), sg.In(size=(25, 1), key='-START-')],
[sg.Text("Time of solution", tooltip = "dd-mm-yy hh:mm:ss", size=(12,1)), sg.In(size=(25, 1), key='-END-')],
[sg.Text("Voyage", size=(12,1)), sg.In(size=(25, 1), key='-VOYAGE-')],
[sg.Button("Save", enable_events=True, key='-LOG SAVE-'), sg.Button("Cancel", enable_events=True, key='-LOG CANCEL-')]
]
log_window = sg.Window("LLMSDID - Log an error",
layout=log_layout,
margins=(200, 100),
grab_anywhere=True,
default_button_element_size=(12, 1)
)
while True:
log_event, log_values = log_window.read()
if log_event == '-LOG SAVE-':
database.save_to_errors(log_values['-STATUS-'], log_values['-DESCRIPTION-'], log_values['-VOYAGE-'], log_values['-START-'], log_values['-END-'], log_values['-TYPE-'], log_values['-LOCATION-'], log_values['-SENSOR ID-'], log_values['-SENSOR TYPE-'], log_values['-MESSAGE-'])
log_window.close()
break
# If the user closes the window, exit this loop so that the program can close
if log_event == sg.WIN_CLOSED or log_event == '-LOG CANCEL-':
log_window.close()
break
def create_more_window(selected_error, database):
more_col_1 = sg.Column([[sg.Frame('Parameter', [[sg.Column([[sg.Text("Fault ID: ")],
[sg.Text("Voyage: ")],
[sg.Text("Status: ")],
[sg.Text("Description: ")],
[sg.Text("Fault message: ")],
[sg.Text("Fault type: ")],
[sg.Text("Fault location: ")],
[sg.Text("Sensor ID: ")],
[sg.Text("Sensor type: ")],
[sg.Text("From: ")],
[sg.Text("To: ")],
[sg.Text("Log date: ")]],
)]])]])
more_col_2 = sg.Column([[sg.Frame('Value', [[sg.Column([[sg.Text("Fault ID: ", size=(12,1)), sg.Text(selected_error['fault_id'], size=(40,1))],
[sg.Text("Voyage: ", size=(12,1)), sg.Text(selected_error['voyage'], size=(40,1))],
[sg.Text("Status: ", size=(12,1)), sg.Text(selected_error['fault_status'], size=(40,1))],
[sg.Text("Description: ", size=(12,4)), sg.Multiline(selected_error['fault_description'], size=(40,4))],
[sg.Text("Fault message: ", size=(12,2)), sg.Multiline(selected_error['fault_message'], size=(40,2))],
[sg.Text("Fault type: ", size=(12,1)), sg.Text(selected_error['fault_type'], size=(40,1))],
[sg.Text("Fault location: ", size=(12,1)), sg.Text(selected_error['location'], size=(40,1))],
[sg.Text("Sensor ID: ", size=(12,1)), sg.Text(selected_error['sensor_id'], size=(40,1))],
[sg.Text("Sensor type: ", size=(12,1)), sg.Text(selected_error['sensor_type'], size=(40,1))],
[sg.Text("From: ", size=(12,1)), sg.Text(selected_error['time_of_fault'], size=(40,1))],
[sg.Text("To: ", size=(12,1)), sg.Text(selected_error['time_of_solution'], size=(40,1))],
[sg.Text("Log date: ", size=(12,1)), sg.Text(selected_error['log_date'], size=(40,1))]],
)]])]])
more_col_3 = sg.Column([[sg.Frame('Actions', [[sg.Column([[sg.Button("Thanks", enable_events=True,
tooltip="Press me if you're done having a look.",
key='-THANKS-')
]])]])]])
moreLayout = [[more_col_2], [more_col_3]]
more_window = sg.Window("LLMSDID - More",
layout=moreLayout,
margins=(200, 100),
grab_anywhere=True,
default_button_element_size=(12, 1)
)
while True:
more_event, more_value = more_window.read()
# If the user closes the window, exit this loop so that the program can close
if more_event == sg.WIN_CLOSED or more_event == '-THANKS-':
more_window.close()
break
def create_downtime_window(database):
downtime_layout = [
[sg.Text("Voyage"), sg.In(size=(40, 40), key='-VOYAGE-')],
[sg.Text("System Stop Time"), sg.In(size=(40, 40), key='-STOP-')],
[sg.Text("System Restart Time", tooltip = "dd-mm-yy hh:mm:ss"), sg.In(size=(40, 40), key='-START-')],
[sg.Text("Reason for Downtime", tooltip = "dd-mm-yy hh:mm:ss"), sg.In(size=(25, 1), key='-REASON-')],
[sg.Text("Assosciated Error"), sg.In(size=(25, 1), key='-ASSOSCIATED ERROR-')],
[sg.Button("Save", enable_events=True, key='-LOG SAVE-'),
sg.Button("Cancel", enable_events=True, key='-LOG CANCEL-')]
]
downtime_window = sg.Window("LLMSDID - Log some downtime",
layout=downtime_layout,
margins=(200, 100),
grab_anywhere=True,
default_button_element_size=(12, 1)
)
while True:
downtime_event, downtime_values = downtime_window.read()
if downtime_event == '-LOG SAVE-':
database.save_to_downtime(downtime_values['-VOYAGE-'], downtime_values['-STOP-'], downtime_values['-START-'], downtime_values['-REASON-'], downtime_values['-ASSOSCIATED ERROR-'])
downtime_window.close()
break
# If the user closes the window, exit this loop so that the program can close
if downtime_event == sg.WIN_CLOSED or downtime_event == '-LOG CANCEL-':
downtime_window.close()
break
# Main window layout
main_column_1 = sg.Column([[sg.Frame('Advanced search', [[sg.Column([[sg.Text("Voyage: ", tooltip = "Let me know which voyage you'd like to see the errors for."), sg.In(size = (15, 1), pad = ((34, 0), (0, 0)), key = '-VOYAGE SEARCH-')],
[sg.Text("Status: ", tooltip = "Would you like to look at errors we've already solved? Let me know here!"), sg.In(size = (15, 1), pad = ((40, 0), (0, 0)), key = '-STATUS SEARCH-')],
[sg.Text("Fault type: ", tooltip = "Here you can let me know what type of fault you'd like to search for."), sg.In(size = (15, 1), pad = ((20, 0), (0, 0)), right_click_menu = ("Cable", "Hardware", "Sensor", "Connector"), key = '-TYPE SEARCH-')],
[sg.Text("Fault location: ", tooltip = "If you suspect that your fault might be location-specific, say so here to see previous errors that have occurred in that location."), sg.In(size = (15, 1), pad = ((0, 0), (0, 0)), key = '-LOCATION SEARCH-')],
[sg.Text("Sensor ID: ", tooltip = "Think that your error could be sensor-specific? Find previous issues with your exact sensor by entering it's asset number here."), sg.In(size = (15, 1), pad = ((21, 0), (0, 0)), key = '-SENSOR ID SEARCH-')],
[sg.Text("Sensor type: ", tooltip = "Search for previous errors that have been encountered with your specific type of sensor."), sg.In(size = (15, 1), pad = ((8, 0), (0, 0)), key = '-SENSOR TYPE SEARCH-')],
[sg.Text("From: ", tooltip = "Enter the start date for your search."), sg.In(size = (15, 1), tooltip = "dd-mm-yy hh:mm:ss", pad = ((48, 0), (0, 0)), key = '-FROM SEARCH-')],
[sg.Text("To: ", tooltip = "Enter the end date for your search."), sg.In(size = (15, 1), tooltip = "dd-mm-yy hh:mm:ss", pad = ((64, 0), (0, 0)), key = '-TO SEARCH-')],
[sg.Button("Search errors", size = (12, 1), pad = ((93, 0), (7, 0)), enable_events=True, tooltip = "Press me if you'd like to search for specific error characteristics.",key = '-SEARCH ERROR-')]], pad = (3, 3))]])]])
main_column_2 = sg.Column([[sg.Frame('Faults:', [[sg.Column([[sg.Listbox(unresolved_errors, enable_events = True, size=(20, len(unresolved_errors)), key = '-ERROR LIST-')]]),
sg.Column([[sg.Text("Error ID: ", size=(14,1)), sg.Text("", size=(20,1), key='-OUT ID-')],
[sg.Text("Error Description: ", size=(14,15)), sg.Multiline("", size=(20,15), key='-OUT DESC-')],
]) ],
[sg.Button("Update", enable_events = True, tooltip = "Press me if you'd like to update some of the information about the selected error.", key = '-UPDATE ERROR-'),
sg.Button("Give me more!", enable_events = True, tooltip = "Press me if you'd like to view all the information about this specific error.", key = '-SHOW ME MORE-'),
sg.Button("Show me unresolved errors", enable_events = True, tooltip="Press me if you'd like to see all the unresolved errors", key = '-UNRESOLVED-')]], pad=(0, 0))]])
main_column_3 = sg.Column([[sg.Frame('Actions', [[sg.Column([[sg.Button("Log a new error", enable_events=True, tooltip = "Press me if you'd like to log a new error.", key = '-LOG ERROR-'),
sg.Button("Log some downtime", enable_events=True, tooltip="Press me if you'd like to log system downtime as a result of a logged error.", key='-LOG DOWNTIME-')]])]])]])
main_layout = [[main_column_1, main_column_2], [main_column_3]]
main_window = sg.Window("LLMSDID - Home",
layout = main_layout,
margins = (200, 100),
grab_anywhere=True,
default_button_element_size=(12, 1))
if __name__ == "__main__":
db_object = create_login_window()
while True:
event, values = main_window.read()
if event == '-UNRESOLVED-':
update_query = "SELECT FaultID, FaultDescription FROM errors WHERE FaultStatus = 'Unresolved'"
unresolved_errors = db_object.fetch(update_query)
main_window['-ERROR LIST-'].update(unresolved_errors)
main_window.refresh()
if values['-ERROR LIST-']:
selected_error = values['-ERROR LIST-'][0]
error_sel_flag = True
fetch_query = "SELECT * FROM errors WHERE FaultId = " + str(selected_error[0])
current_error_list = db_object.fetch(fetch_query)
current_error['fault_id'] = current_error_list[0][0]
current_error['fault_status'] = current_error_list[0][1]
current_error['fault_description'] = current_error_list[0][2]
current_error['voyage'] = current_error_list[0][3]
current_error['time_of_fault'] = current_error_list[0][4]
current_error['time_of_solution'] = current_error_list[0][5]
current_error['fault_type'] = current_error_list[0][6]
current_error['location'] = current_error_list[0][7]
current_error['sensor_id'] = current_error_list[0][8]
current_error['sensor_type'] = current_error_list[0][9]
current_error['fault_message'] = current_error_list[0][10]
current_error['log_date'] = current_error_list[0][11]
main_window['-OUT ID-'].update(current_error['fault_id'])
main_window['-OUT DESC-'].update(current_error['fault_description'])
if event == '-UPDATE ERROR-':
if guest_user_flag:
print("User does not have privileges to update issues")
else:
if error_sel_flag:
create_update_window(current_error, db_object) # MEEP: point to db_object?
else:
main_window['-OUT ID-'].update("Please select a fault for us to update.")
print("No fault selected")
if event == '-LOG ERROR-':
if guest_user_flag:
print("User does not have privileges to log an error")
else:
create_log_window(db_object)
# TODO Set current issue as logged issue if it is unresolved
if event == '-SEARCH ERROR-':
unresolved_errors = db_object.search(values['-VOYAGE SEARCH-'], values['-STATUS SEARCH-'], values['-TYPE SEARCH-'], values['-LOCATION SEARCH-'], values['-SENSOR ID SEARCH-'], values['-SENSOR TYPE SEARCH-'], values['-FROM SEARCH-'], values['-TO SEARCH-'])
main_window['-ERROR LIST-'].update(unresolved_errors)
main_window.refresh()
if event == '-SHOW ME MORE-':
if error_sel_flag:
create_more_window(current_error, db_object)
else:
main_window['-OUT ID-'].update("Please select a fault for us to have a look at.")
print("No fault selected")
if event == '-LOG DOWNTIME-':
if(guest_user_flag):
print("User does not have privileges to log downtime")
else:
create_downtime_window(db_object)
if event == sg.WIN_CLOSED:
break
|
normal
|
{
"blob_id": "8fb5ef7244a8ca057f11cbcdf42d383665dade5e",
"index": 6884,
"step-1": "# Packages\nimport PySimpleGUI as sg\nimport mysql.connector\nimport secrets\n\n# TODO Add a view all button\n# TODO Catch errors (specifically for TimeDate mismatches)\n# TODO Add a downtime graph\n# TODO Add a system feedback window instead of putting this in the out id textbox\n\nerror_sel_flag = False\t# Flag to check whether an error has been selected before performing logic requiring it\nguest_user_flag = False\t# Flag to check whether the user is a guest, and limit which functions of the applciation (and database) they can use\nunresolved_errors = [] # MEEP, could probably do without this in the refactor\ncurrent_error = {\t# Dictionary to hold all information about the current/selected error. This removes the need to hit the database for every bit of logic that requires an error\n\t'fault_id': 'Null',\n\t'fault_status': 'Null',\n\t'fault_description': 'Null',\n\t'voyage': 'Null',\n\t'time_of_fault': 'Null',\n\t'time_of_solution': 'Null',\n\t'fault_type': 'Null',\n\t'location': 'Null',\n\t'sensor_id': 'Null',\n\t'sensor_type': 'Null',\n\t'fault_message': 'Null',\n\t'log_date': 'Null'\n}\t\n\n# Dictionary for search parameters. NOTE: deviation from script naming convention is due to the naming convention used in the database\nsearch_dict = {\n\t'Voyage': '',\n\t'FaultStatus': '',\n\t'FaultType': '',\n\t'Location': '',\n\t'SensorID': '',\n\t'SensorType': '',\n\t'TimeOfFault': '',\n\t'TimeOfSolution': ''\n}\n\nclass DatabaseConnection():\n\t''' This class instantiates and maintains the database connection, and encapsulates all functions that work directly with that connection.'''\n\t\n\tdef __init__(self, host, user, password, database):\n\t\t''' This function is called whenever a new instance of 'DatabaseConnection' is instantiated. It created the connection and cursor to the \n\t\tdatabase, both of which are used by other functions of this class.'''\n\t\ttry:\n\t\t\tself.connection = mysql.connector.connect(\n\t\t\t\thost=host,\n\t\t\t\tuser=user,\n\t\t\t\tpasswd=password,\n\t\t\t\tdatabase=database,\n\t\t\t\tauth_plugin='mysql_native_password'\n\t\t\t)\n\t\t\tself.cursor = self.connection.cursor()\n\t\texcept mysql.connector.Error as e:\n\t\t\tprint(\"Error %d: %s\" % (e.args[0], e.args[1]))\n\t\t\texit(69)\n\n\tdef save_to_errors(self, fault_status, fault_desciption, voyage, time_of_fault, time_of_solution, fault_type, location, sensor_id, sensor_type, fault_message):\n\t\t''' This function creates and carries out an 'INSERT' query for the 'errors' table. It forces null values for the time fields in the case that the GUI \n\t\treturns blank values, this is to avoid a type mismatch with the database (This could probably be better handled somewhere else but it gets the job done for now).'''\n\t\t\n\t\tif time_of_fault == '':\n\t\t\ttime_of_fault = \"NULL\"\n\n\t\tif time_of_solution == '':\n\t\t\ttime_of_solution = \"NULL\"\n\n\t\tif fault_status == '':\n\t\t\tfault_status = \"Unresolved\"\n\n\t\tinsert_query = \"INSERT INTO errors (FaultDescription, FaultMessage, FaultStatus, FaultType, Location, SensorID, SensorType, TimeOfFault, TimeOfSolution, Voyage) VALUES ('{}', '{}', '{}', '{}', '{}', '{}', '{}', {}, {}, '{}')\".format(fault_desciption, fault_message, fault_status, fault_type, location, sensor_id, sensor_type, time_of_fault, time_of_solution, voyage)\n\t\tprint(insert_query)\n\n\t\tself.cursor.execute(insert_query)\n\n\t\tself.connection.commit()\n\n\tdef save_to_downtime(self, voyage, stop_time, start_time, reason, assosciated_error):\n\t\t''' This function creates and carries out an 'INSERT' query for the 'downtime' table. It forces null values for the time fields in the case that the GUI \n\t\treturns blank values, this is to avoid a type mismatch with the database (Again, this is not perfect but I'll relook it at a later stage).'''\n\t\t\n\t\tinsert_query = \"INSERT INTO downtime (Voyage, StopTime, StartTime, Reason, AssosciatedError) VALUES ('{}', '{}', '{}', '{}', '{}')\".format(voyage, stop_time, start_time, reason, assosciated_error)\n\t\tprint(insert_query)\n\n\t\tself.cursor.execute(insert_query)\n\n\t\tself.connection.commit()\n\t\tpass\n\n\tdef fetch(self, fetch_query):\n\t\t''' This function carries out a 'SELECT' query from the MySQL database and returns the result.'''\n\t\tprint(\"Fetch \" + str(fetch_query))\n\n\t\t_ = self.cursor.execute(fetch_query)\n\t\tresult = self.cursor.fetchall()\n\n\t\treturn result\n\n\tdef update(self, fault_status, fault_desciption, voyage, time_of_fault, time_of_solution, fault_type, location, sensor_id, sensor_type, fault_message, fault_id):\n\t\t# ToDo Test the robustness of this, seems like it doens't like updating with unchanged fields\n\t\tif time_of_fault == 'None':\n\t\t\ttime_of_fault = \"NULL\"\n\n\t\tif time_of_solution =='None':\n\t\t\ttime_of_solution = \"NULL\"\n\n\t\tupdate_query = \"UPDATE errors SET FaultStatus = '{}', FaultDescription = '{}', Voyage = '{}', TimeOfFault = {}, TimeOfSolution = {}, FaultType = '{}', Location = '{}', SensorID = '{}', SensorType = '{}', FaultMessage = '{}' WHERE FaultID = {}\".format(fault_status, fault_desciption, voyage, time_of_fault, time_of_solution, fault_type, location, sensor_id, sensor_type, fault_message, fault_id)\n\n\t\tprint(update_query)\n\t\tself.cursor.execute(update_query)\n\n\t\tself.connection.commit()\n\n\t\tprint(\"Updated\")\n\n\tdef search(self, voyage, status, fault_type, location, sensor_id, sensor_type, start_time, end_time):\n\t\t''' This function creates and carries out a 'SELECT' query from the MySQL database and returns the result.\n\t\tIt fills a dictionary and reduces it to only include the provided search terms in the query.'''\n\n\t\tsearch_dict['Voyage'] = voyage\n\t\tsearch_dict['FaultStatus'] = status\n\t\tsearch_dict['FaultType'] = fault_type\n\t\tsearch_dict['Location'] = location\n\t\tsearch_dict['SensorID'] = sensor_id\n\t\tsearch_dict['SensorType'] = sensor_type\n\t\tsearch_dict['TimeOfFault'] = start_time\n\t\tsearch_dict['TimeOfSolution'] = end_time\n\n\t\t# Remove empty values so that only the required search parameters are included\n\t\treduced_search_dict = dict((k, v) for k, v in search_dict.items() if v) # New dictionary with all empty values removed\n\t\tif(len(reduced_search_dict) < 2):\n\t\t\tprint(\"Please enter at least two search criteria (sorry, Nic rushed this section!)\")\n\t\t\treturn 0\n\t\tkey_list = list(reduced_search_dict.keys())\n\t\tvalue_list = list(reduced_search_dict.values())\n\n\t\t# Remove enclosing apostrophes as is required in the MySQL syntax \n\t\tkey_tuple = tuple(key_list)\n\t\tseperator = \", \"\n\t\tusable_key_tuple = seperator.join(key_tuple)\n\n\t\tsearch_query = \"SELECT * FROM errors WHERE ({}) = {}\".format(usable_key_tuple, str(tuple(value_list)))\n\t\tprint(search_query)\n\n\t\t_ = self.cursor.execute(search_query)\n\t\tresult = self.cursor.fetchall()\n\n\t\treturn result\n\t\n\tdef shutdown(self):\n\t\t# Implement logic to close connection\n\t\tpass\n\t\n# Create window functions\ndef create_login_window():\n\t''' This function contains the layout for, invokes, and monitors the login window. When a user logs in, it creates an instance of \n\tthe 'DatabaseConnection' class, establishing a connection to the database for use by the main application. This function returns the \n\tcreated instance of 'DatabaseConnection' for use by other functions in the script.\n\t'''\n\n\t# Window setup\n\tlogin_layout = [[sg.Text('Hostname: '), sg.In(size = (25, 0), key = '-HOST-')],\n\t\t\t\t\t[sg.Text('Username: '), sg.In(size = (25, 0), key = '-USER-')],\n\t\t\t\t [sg.Text('Password: '), sg.In(size = (25, 0), pad = (3, 0), password_char = '*', key='-PASS-')],\n\t\t\t\t [sg.Button('Login', size = (14, 0), pad = ((0, 10), (5, 0)), enable_events = True, bind_return_key = True, key = '-LOGIN-'), sg.Button('Guest Login.', size = (14, 0), pad = ((10, 0), (5, 0)), enable_events = True, key = '-LOGIN GUEST-')]\n\t\t\t\t ]\n\n\tlogin_window = sg.Window(\"LLMSDID - Login\",\n\t\t\t\t\t\t\tlayout=login_layout,\n\t\t\t\t\t\t\tmargins=(20, 10),\n\t\t\t\t\t\t\tgrab_anywhere=True,\n\t\t\t\t\t\t\tdefault_button_element_size=(12, 1)\n\t\t\t\t\t\t\t)\n\n\t# Logic\n\twhile True:\n\t\tlogin_event, login_values = login_window.read()\n\n\t\tif login_event == '-LOGIN-':\n\t\t\tcurrent_db = DatabaseConnection(login_values['-HOST-'], login_values['-USER-'], login_values['-PASS-'], \"LLMSDID\")\t# Instantiate instance of 'DatabaseConnection'\n\t\t\tlogin_window.close()\n\t\t\treturn current_db\n\n\t\tif login_event == '-LOGIN GUEST-':\n\t\t\tcurrent_db = DatabaseConnection('localhost', secrets.guestUsername, secrets.guestPassword, \"LLMSDID\")\t# Instantiate instance of 'DatabaseConnection'\n\t\t\tglobal guest_user_flag\n\t\t\tguest_user_flag = True\n\t\t\tlogin_window.close()\n\t\t\treturn current_db\n\n\t\t# If the user closes the window, exit this loop so that the program can close\n\t\tif login_event == sg.WIN_CLOSED:\n\t\t\tlogin_window.close()\n\t\t\texit(69)\n\ndef create_update_window(selected_error, database):\n\tupdate_col_1 = sg.Column([[sg.Frame('Current values', [[sg.Column([[sg.Text(\"Voyage: \", size=(12,1)), sg.Text(selected_error['voyage'])],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t [sg.Text(\"Status: \", size=(12,1)), sg.Text(selected_error['fault_status'])],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t [sg.Text(\"Description: \", size=(12,4)), sg.Multiline(selected_error['fault_description'], size=(40, 4))],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t [sg.Text(\"Fault message: \", size=(12,2)), sg.Multiline(selected_error['fault_message'], size=(40,2))],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t [sg.Text(\"Fault type: \", size=(12,1)), sg.Text(selected_error['fault_type'])],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t [sg.Text(\"Fault location: \", size=(12,1)), sg.Text(selected_error['location'])],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t [sg.Text(\"Sensor ID: \", size=(12,1)), sg.Text(selected_error['sensor_id'])],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t [sg.Text(\"Sensor type: \", size=(12,1)), sg.Text(selected_error['sensor_type'])],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t [sg.Text(\"From: \", size=(12,1)), sg.Text(selected_error['time_of_fault'])],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t [sg.Text(\"To: \", size=(12,1)), sg.Text(selected_error['time_of_solution'])]],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t )]])]])\n\n\tupdate_col_2 = sg.Column([[sg.Frame('Updated values', [[sg.Column([[sg.Text(\"Voyage: \", size=(12,1)), sg.In(selected_error['voyage'], size=(40,1), key='-NEW VOYAGE-')],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t [sg.Text(\"Status: \", size=(12,1)), sg.InputCombo([\"Unresolved\", \"Resolved\"], default_value=selected_error['fault_status'], key='-NEW STATUS-')],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t [sg.Text(\"Description: \", size=(12,4)), sg.Multiline(selected_error['fault_description'], size=(40,4), key='-NEW DESC-')],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t [sg.Text(\"Fault message: \", size=(12,2)), sg.Multiline(selected_error['fault_message'], size=(40,2), key='-NEW MESSAGE-')],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t [sg.Text(\"Fault type: \", size=(12,1)), sg.In(selected_error['fault_type'], size=(40,1), key='-NEW FTYPE-')],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t [sg.Text(\"Fault location: \", size=(12,1)), sg.In(selected_error['location'], size=(40,1), key='-NEW LOC-')],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t [sg.Text(\"Sensor ID: \", size=(12,1)), sg.In(selected_error['sensor_id'], size=(40,1), key='-NEW ID-')],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t [sg.Text(\"Sensor type: \", size=(12,1)), sg.In(selected_error['sensor_type'], size=(40,1), key='-NEW STYPE-')],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t [sg.Text(\"From: \", size=(12,1)), sg.In(selected_error['time_of_fault'], size=(40,1), key='-NEW FROM-')],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t [sg.Text(\"To: \", size=(12,1)), sg.In(selected_error['time_of_solution'], size=(40,1), key='-NEW TO-')]],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t)]])]])\n\n\tupdate_col_3 = sg.Column([[sg.Frame('Actions', [[sg.Column([[sg.Button(\"Update\", enable_events=True,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttooltip=\"Press me if you'd like to update this fault.\",\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tkey='-SAVE UPDATE-'),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t sg.Button(\"Cancel\", enable_events=True,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttooltip=\"Press me if you'd like to cancel this update.\",\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tkey='-CANCEL UPDATE-')]])]])]])\n\n\tupdateLayout = [[update_col_1, update_col_2], [update_col_3]]\n\n\tupdate_window = sg.Window(\"LLMSDID - Update\",\n\t\t\t\t\t\t\tlayout=updateLayout,\n\t\t\t\t\t\t\tmargins=(200, 100),\n\t\t\t\t\t\t\tgrab_anywhere=True,\n\t\t\t\t\t\t\tdefault_button_element_size=(12, 1)\n\t\t\t\t\t\t\t)\n\n\tprint(\"Updating \" + str(selected_error['fault_id']))\n\twhile True:\n\t\tupdate_event, update_value = update_window.read()\n\n\t\tif update_event == '-SAVE UPDATE-':\n\t\t\tdatabase.update(update_value['-NEW STATUS-'], update_value['-NEW DESC-'], update_value['-NEW VOYAGE-'], update_value['-NEW FROM-'], update_value['-NEW TO-'], update_value['-NEW FTYPE-'], update_value['-NEW LOC-'], update_value['-NEW ID-'], update_value['-NEW STYPE-'], update_value['-NEW MESSAGE-'], selected_error['fault_id'])\n\t\t\tupdate_window.close()\n\t\t\tbreak\n\n\t\t# If the user closes the window, exit this loop so that the program can close\n\t\tif update_event == sg.WIN_CLOSED or update_event == '-CANCEL UPDATE-':\n\t\t\tupdate_window.close()\n\t\t\tbreak\n\ndef create_log_window(database):\n\tlog_layout = [\n\t\t[sg.Text(\"Fault description\", size=(12,1)), sg.In(size=(40, 40), key='-DESCRIPTION-')],\n\t\t[sg.Text(\"Fault message\", size=(12,1)), sg.In(size=(40, 40), key='-MESSAGE-')],\n\t\t[sg.Text(\"Status\", size=(12,1)), sg.InputCombo([\"Unresolved\", \"Resolved\"], key='-STATUS-')],\n\t\t[sg.Text(\"Fault type\", size=(12,1)), sg.In(size = (25, 1), key='-TYPE-')],\n\t\t[sg.Text(\"Location\", size=(12,1)), sg.In(size=(25, 1), key='-LOCATION-')],\n\t\t[sg.Text(\"Sensor ID\", size=(12,1)), sg.In(size=(25, 1), key='-SENSOR ID-')],\n\t\t[sg.Text(\"Sensor type\", size=(12,1)), sg.In(size=(25, 1), key='-SENSOR TYPE-')],\n\t\t[sg.Text(\"Time of fault\", tooltip = \"dd-mm-yy hh:mm:ss\", size=(12,1)), sg.In(size=(25, 1), key='-START-')],\n\t\t[sg.Text(\"Time of solution\", tooltip = \"dd-mm-yy hh:mm:ss\", size=(12,1)), sg.In(size=(25, 1), key='-END-')],\n\t\t[sg.Text(\"Voyage\", size=(12,1)), sg.In(size=(25, 1), key='-VOYAGE-')],\n\t\t[sg.Button(\"Save\", enable_events=True, key='-LOG SAVE-'), sg.Button(\"Cancel\", enable_events=True, key='-LOG CANCEL-')]\n\t]\n\n\tlog_window = sg.Window(\"LLMSDID - Log an error\",\n\t\t\t\t\t\t\tlayout=log_layout,\n\t\t\t\t\t\t\tmargins=(200, 100),\n\t\t\t\t\t\t\tgrab_anywhere=True,\n\t\t\t\t\t\t\tdefault_button_element_size=(12, 1)\n\t\t\t\t\t\t\t)\n\n\twhile True:\n\t\tlog_event, log_values = log_window.read()\n\t\t\n\t\tif log_event == '-LOG SAVE-':\n\t\t\tdatabase.save_to_errors(log_values['-STATUS-'], log_values['-DESCRIPTION-'], log_values['-VOYAGE-'], log_values['-START-'], log_values['-END-'], log_values['-TYPE-'], log_values['-LOCATION-'], log_values['-SENSOR ID-'], log_values['-SENSOR TYPE-'], log_values['-MESSAGE-'])\n\t\t\tlog_window.close()\n\t\t\tbreak\n\n\t\t# If the user closes the window, exit this loop so that the program can close\n\t\tif log_event == sg.WIN_CLOSED or log_event == '-LOG CANCEL-':\n\t\t\tlog_window.close()\n\t\t\tbreak\n\ndef create_more_window(selected_error, database):\n\tmore_col_1 = sg.Column([[sg.Frame('Parameter', [[sg.Column([[sg.Text(\"Fault ID: \")],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \t\t[sg.Text(\"Voyage: \")],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \t[sg.Text(\"Status: \")],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t[sg.Text(\"Description: \")],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t[sg.Text(\"Fault message: \")],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t[sg.Text(\"Fault type: \")],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \t[sg.Text(\"Fault location: \")],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \t[sg.Text(\"Sensor ID: \")],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \t[sg.Text(\"Sensor type: \")],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \t[sg.Text(\"From: \")],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \t[sg.Text(\"To: \")],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \t\t[sg.Text(\"Log date: \")]],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t )]])]])\n\n\tmore_col_2 = sg.Column([[sg.Frame('Value', [[sg.Column([[sg.Text(\"Fault ID: \", size=(12,1)), sg.Text(selected_error['fault_id'], size=(40,1))],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t [sg.Text(\"Voyage: \", size=(12,1)), sg.Text(selected_error['voyage'], size=(40,1))],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t [sg.Text(\"Status: \", size=(12,1)), sg.Text(selected_error['fault_status'], size=(40,1))],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t [sg.Text(\"Description: \", size=(12,4)), sg.Multiline(selected_error['fault_description'], size=(40,4))],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t [sg.Text(\"Fault message: \", size=(12,2)), sg.Multiline(selected_error['fault_message'], size=(40,2))],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t [sg.Text(\"Fault type: \", size=(12,1)), sg.Text(selected_error['fault_type'], size=(40,1))],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t [sg.Text(\"Fault location: \", size=(12,1)), sg.Text(selected_error['location'], size=(40,1))],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t [sg.Text(\"Sensor ID: \", size=(12,1)), sg.Text(selected_error['sensor_id'], size=(40,1))],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t [sg.Text(\"Sensor type: \", size=(12,1)), sg.Text(selected_error['sensor_type'], size=(40,1))],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t [sg.Text(\"From: \", size=(12,1)), sg.Text(selected_error['time_of_fault'], size=(40,1))],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t [sg.Text(\"To: \", size=(12,1)), sg.Text(selected_error['time_of_solution'], size=(40,1))],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t [sg.Text(\"Log date: \", size=(12,1)), sg.Text(selected_error['log_date'], size=(40,1))]],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t )]])]])\n\n\tmore_col_3 = sg.Column([[sg.Frame('Actions', [[sg.Column([[sg.Button(\"Thanks\", enable_events=True,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttooltip=\"Press me if you're done having a look.\",\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tkey='-THANKS-')\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t ]])]])]])\n\n\tmoreLayout = [[more_col_2], [more_col_3]]\n\n\tmore_window = sg.Window(\"LLMSDID - More\",\n\t\t\t\t\t\t\t layout=moreLayout,\n\t\t\t\t\t\t\t margins=(200, 100),\n\t\t\t\t\t\t\t grab_anywhere=True,\n\t\t\t\t\t\t\t default_button_element_size=(12, 1)\n\t\t\t\t\t\t\t )\n\n\twhile True:\n\t\tmore_event, more_value = more_window.read()\n\n\t\t# If the user closes the window, exit this loop so that the program can close\n\t\tif more_event == sg.WIN_CLOSED or more_event == '-THANKS-':\n\t\t\tmore_window.close()\n\t\t\tbreak\n\ndef create_downtime_window(database):\n\t\tdowntime_layout = [\n\t\t[sg.Text(\"Voyage\"), sg.In(size=(40, 40), key='-VOYAGE-')],\n\t\t[sg.Text(\"System Stop Time\"), sg.In(size=(40, 40), key='-STOP-')],\n\t\t[sg.Text(\"System Restart Time\", tooltip = \"dd-mm-yy hh:mm:ss\"), sg.In(size=(40, 40), key='-START-')],\n\t\t[sg.Text(\"Reason for Downtime\", tooltip = \"dd-mm-yy hh:mm:ss\"), sg.In(size=(25, 1), key='-REASON-')],\n\t\t[sg.Text(\"Assosciated Error\"), sg.In(size=(25, 1), key='-ASSOSCIATED ERROR-')],\n\t\t[sg.Button(\"Save\", enable_events=True, key='-LOG SAVE-'),\n\t\t sg.Button(\"Cancel\", enable_events=True, key='-LOG CANCEL-')]\n\t]\n\n\tdowntime_window = sg.Window(\"LLMSDID - Log some downtime\",\n\t\t\t\t\t\t layout=downtime_layout,\n\t\t\t\t\t\t margins=(200, 100),\n\t\t\t\t\t\t grab_anywhere=True,\n\t\t\t\t\t\t default_button_element_size=(12, 1)\n\t\t\t\t\t\t )\n\twhile True:\n\t\tdowntime_event, downtime_values = downtime_window.read()\n\n\t\tif downtime_event == '-LOG SAVE-':\n\t\t\tdatabase.save_to_downtime(downtime_values['-VOYAGE-'], downtime_values['-STOP-'], downtime_values['-START-'], downtime_values['-REASON-'], downtime_values['-ASSOSCIATED ERROR-'])\n\t\t\tdowntime_window.close()\n\t\t\tbreak\n\n\t\t# If the user closes the window, exit this loop so that the program can close\n\t\tif downtime_event == sg.WIN_CLOSED or downtime_event == '-LOG CANCEL-':\n\t\t\tdowntime_window.close()\n\t\t\tbreak\n\n# Main window layout\nmain_column_1 = sg.Column([[sg.Frame('Advanced search', [[sg.Column([[sg.Text(\"Voyage: \", tooltip = \"Let me know which voyage you'd like to see the errors for.\"), sg.In(size = (15, 1), pad = ((34, 0), (0, 0)), key = '-VOYAGE SEARCH-')],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t[sg.Text(\"Status: \", tooltip = \"Would you like to look at errors we've already solved? Let me know here!\"), sg.In(size = (15, 1), pad = ((40, 0), (0, 0)), key = '-STATUS SEARCH-')],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t[sg.Text(\"Fault type: \", tooltip = \"Here you can let me know what type of fault you'd like to search for.\"), sg.In(size = (15, 1), pad = ((20, 0), (0, 0)), right_click_menu = (\"Cable\", \"Hardware\", \"Sensor\", \"Connector\"), key = '-TYPE SEARCH-')],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t[sg.Text(\"Fault location: \", tooltip = \"If you suspect that your fault might be location-specific, say so here to see previous errors that have occurred in that location.\"), sg.In(size = (15, 1), pad = ((0, 0), (0, 0)), key = '-LOCATION SEARCH-')],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t[sg.Text(\"Sensor ID: \", tooltip = \"Think that your error could be sensor-specific? Find previous issues with your exact sensor by entering it's asset number here.\"), sg.In(size = (15, 1), pad = ((21, 0), (0, 0)), key = '-SENSOR ID SEARCH-')],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t[sg.Text(\"Sensor type: \", tooltip = \"Search for previous errors that have been encountered with your specific type of sensor.\"), sg.In(size = (15, 1), pad = ((8, 0), (0, 0)), key = '-SENSOR TYPE SEARCH-')],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t[sg.Text(\"From: \", tooltip = \"Enter the start date for your search.\"), sg.In(size = (15, 1), tooltip = \"dd-mm-yy hh:mm:ss\", pad = ((48, 0), (0, 0)), key = '-FROM SEARCH-')],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t[sg.Text(\"To: \", tooltip = \"Enter the end date for your search.\"), sg.In(size = (15, 1), tooltip = \"dd-mm-yy hh:mm:ss\", pad = ((64, 0), (0, 0)), key = '-TO SEARCH-')],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t[sg.Button(\"Search errors\", size = (12, 1), pad = ((93, 0), (7, 0)), enable_events=True, tooltip = \"Press me if you'd like to search for specific error characteristics.\",key = '-SEARCH ERROR-')]], pad = (3, 3))]])]])\n\n\nmain_column_2 = sg.Column([[sg.Frame('Faults:', [[sg.Column([[sg.Listbox(unresolved_errors, enable_events = True, size=(20, len(unresolved_errors)), key = '-ERROR LIST-')]]),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\tsg.Column([[sg.Text(\"Error ID: \", size=(14,1)), sg.Text(\"\", size=(20,1), key='-OUT ID-')],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t [sg.Text(\"Error Description: \", size=(14,15)), sg.Multiline(\"\", size=(20,15), key='-OUT DESC-')],\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t ]) ],\n\t\t\t\t\t\t\t\t\t\t\t\t\t [sg.Button(\"Update\", enable_events = True, tooltip = \"Press me if you'd like to update some of the information about the selected error.\", key = '-UPDATE ERROR-'),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\tsg.Button(\"Give me more!\", enable_events = True, tooltip = \"Press me if you'd like to view all the information about this specific error.\", key = '-SHOW ME MORE-'),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\tsg.Button(\"Show me unresolved errors\", enable_events = True, tooltip=\"Press me if you'd like to see all the unresolved errors\", key = '-UNRESOLVED-')]], pad=(0, 0))]])\n\nmain_column_3 = sg.Column([[sg.Frame('Actions', [[sg.Column([[sg.Button(\"Log a new error\", enable_events=True, tooltip = \"Press me if you'd like to log a new error.\", key = '-LOG ERROR-'),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t sg.Button(\"Log some downtime\", enable_events=True, tooltip=\"Press me if you'd like to log system downtime as a result of a logged error.\", key='-LOG DOWNTIME-')]])]])]])\n\nmain_layout = [[main_column_1, main_column_2], [main_column_3]]\n\nmain_window = sg.Window(\"LLMSDID - Home\",\n\t\t\t\t\t layout = main_layout,\n\t\t\t\t\t margins = (200, 100),\n\t\t\t\t\t grab_anywhere=True,\n\t\t\t\t\t default_button_element_size=(12, 1))\n\n\nif __name__ == \"__main__\":\n\n\tdb_object = create_login_window()\n\n\twhile True:\n\t\tevent, values = main_window.read()\n\n\t\tif event == '-UNRESOLVED-':\n\t\t\tupdate_query = \"SELECT FaultID, FaultDescription FROM errors WHERE FaultStatus = 'Unresolved'\"\n\t\t\tunresolved_errors = db_object.fetch(update_query)\n\t\t\tmain_window['-ERROR LIST-'].update(unresolved_errors)\n\t\t\tmain_window.refresh()\n\t\t\n\t\tif values['-ERROR LIST-']:\n\t\t\tselected_error = values['-ERROR LIST-'][0]\n\t\t\terror_sel_flag = True\n\t\t\tfetch_query = \"SELECT * FROM errors WHERE FaultId = \" + str(selected_error[0])\n\t\t\tcurrent_error_list = db_object.fetch(fetch_query)\n\t\t\t\n\t\t\tcurrent_error['fault_id'] = current_error_list[0][0]\n\t\t\tcurrent_error['fault_status'] = current_error_list[0][1]\n\t\t\tcurrent_error['fault_description'] = current_error_list[0][2]\n\t\t\tcurrent_error['voyage'] = current_error_list[0][3]\n\t\t\tcurrent_error['time_of_fault'] = current_error_list[0][4]\n\t\t\tcurrent_error['time_of_solution'] = current_error_list[0][5]\n\t\t\tcurrent_error['fault_type'] = current_error_list[0][6]\n\t\t\tcurrent_error['location'] = current_error_list[0][7]\n\t\t\tcurrent_error['sensor_id'] = current_error_list[0][8]\n\t\t\tcurrent_error['sensor_type'] = current_error_list[0][9]\n\t\t\tcurrent_error['fault_message'] = current_error_list[0][10]\n\t\t\tcurrent_error['log_date'] = current_error_list[0][11]\n\n\t\t\tmain_window['-OUT ID-'].update(current_error['fault_id'])\n\t\t\tmain_window['-OUT DESC-'].update(current_error['fault_description'])\n\n\t\tif event == '-UPDATE ERROR-':\n\t\t\tif guest_user_flag:\n\t\t\t\tprint(\"User does not have privileges to update issues\")\n\t\t\telse:\n\t\t\t\tif error_sel_flag:\n\t\t\t\t\tcreate_update_window(current_error, db_object) # MEEP: point to db_object?\n\t\t\t\telse:\n\t\t\t\t\tmain_window['-OUT ID-'].update(\"Please select a fault for us to update.\")\n\t\t\t\t\tprint(\"No fault selected\")\n\t\t\n\t\tif event == '-LOG ERROR-':\n\t\t\tif guest_user_flag:\n\t\t\t\tprint(\"User does not have privileges to log an error\")\n\t\t\telse:\n\t\t\t\tcreate_log_window(db_object)\n\t\t\t\t# TODO Set current issue as logged issue if it is unresolved\n\n\t\tif event == '-SEARCH ERROR-':\n\t\t\tunresolved_errors = db_object.search(values['-VOYAGE SEARCH-'], values['-STATUS SEARCH-'], values['-TYPE SEARCH-'], values['-LOCATION SEARCH-'], values['-SENSOR ID SEARCH-'], values['-SENSOR TYPE SEARCH-'], values['-FROM SEARCH-'], values['-TO SEARCH-'])\n\t\t\tmain_window['-ERROR LIST-'].update(unresolved_errors)\n\t\t\tmain_window.refresh()\n\n\t\tif event == '-SHOW ME MORE-':\n\t\t\tif error_sel_flag:\n\t\t\t\tcreate_more_window(current_error, db_object)\n\t\t\telse:\n\t\t\t\tmain_window['-OUT ID-'].update(\"Please select a fault for us to have a look at.\")\n\t\t\t\tprint(\"No fault selected\")\n\n\t\tif event == '-LOG DOWNTIME-':\n\t\t\tif(guest_user_flag):\n\t\t\t\tprint(\"User does not have privileges to log downtime\")\n\t\t\telse:\n\t\t\t\tcreate_downtime_window(db_object)\n\n\t\tif event == sg.WIN_CLOSED:\n\t\t\tbreak",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
class KV11Z7(Kinetis):
<|reserved_special_token_0|>
def __init__(self, session):
super(KV11Z7, self).__init__(session, self.MEMORY_MAP)
self._svd_location = SVDFile.from_builtin('MKV11Z7.svd')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class KV11Z7(Kinetis):
MEMORY_MAP = MemoryMap(FlashRegion(start=0, length=131072, blocksize=
1024, is_boot_memory=True, algo=FLASH_ALGO, flash_class=
Flash_Kinetis), RamRegion(start=536866816, length=16384))
def __init__(self, session):
super(KV11Z7, self).__init__(session, self.MEMORY_MAP)
self._svd_location = SVDFile.from_builtin('MKV11Z7.svd')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
FLASH_ALGO = {'load_address': 536870912, 'instructions': [3758800384,
103643149, 604520552, 3539992640, 509886552, 474599930, 704650834,
1198576114, 151200256, 3547546251, 1116408323, 184800088, 3543941771,
1116408835, 3759330081, 1124812291, 570479743, 1116407875, 151245684,
3546235531, 1116408323, 184800068, 3542631051, 1116408835, 587191053,
3121742345, 1116408835, 303223554, 3496280585, 1116408579, 3758150425,
197331465, 3540075147, 448791499, 193151314, 3540075147, 448791435,
188957010, 3540075147, 448791371, 184762706, 3540075147, 448791307,
180568402, 3540075147, 448791243, 176374098, 3540075147, 448791179,
172179794, 3540075147, 448791115, 167985490, 3540075147, 448791051,
3536666962, 1116408259, 30135041, 1095899840, 1116408195, 25940737,
1095899840, 1116408131, 21746433, 1095899840, 1116408067, 17552129,
1095899840, 1116408003, 13357825, 1095899840, 1116407939, 9163521,
1095899840, 1116407875, 4969217, 1095899840, 3523222081, 1095910913,
1198540304, 264953949, 1112133632, 3539996675, 1079198272, 1184637440,
1116408067, 168022829, 3541189259, 25764604, 168016402, 3540796043,
294781321, 3540533899, 294781321, 3540271755, 3493462409, 3758100882,
163776905, 3540075147, 448790987, 159596882, 3540075147, 448790923,
155402578, 3540075147, 448790859, 151208274, 3540075147, 448790795,
147013970, 3540075147, 448790731, 142819666, 3540075147, 448790667,
3537453394, 1116407875, 4969217, 1095899840, 3523222081, 1180911105,
274415954, 3540076048, 721437248, 1112134912, 1180911472, 3539996763,
3036758592, 1186996224, 3171043008, 3037743114, 1145587976, 4177326080,
3506710528, 553666566, 4026549320, 1241905609, 587294929, 1125712539,
3171967185, 1801807467, 4, 4026544128, 1275901296, 1145849349,
1259030017, 1759659552, 4171558912, 3506776064, 1177101056, 1759659552,
4184797184, 1758087429, 43721473, 1623868186, 48496, 4, 1801807467,
4026544128, 1208595728, 2176928007, 2176928008, 139036673, 2147549257,
1145587718, 4176670720, 3489671168, 3171950593, 50464, 1074077696,
55592, 4, 1175238000, 1174816267, 1208829441, 1175826564, 4026549320,
671152387, 2416038154, 553750530, 2432714759, 1176651307, 1145587249,
4185452544, 1758087428, 43721473, 1623868186, 3178278916, 4, 4026544128,
1198530560, 3490195456, 3624020239, 3506711044, 689168389, 705222657,
537186305, 536889200, 671106928, 537186561, 3020965744, 1175199323,
3506520604, 3489808922, 543538192, 1745045360, 3624157835, 411658304,
1116215320, 3155218946, 1198530662, 536919056, 18288, 1116227843,
543936513, 536889200, 18288, 1801807467, 561006602, 562065409,
2013360129, 3590063625, 109148160, 543675649, 113330032, 543741185,
130041712, 543805692, 18288, 1073872896, 1174779384, 1175209494,
4160701976, 671154135, 587518251, 1176585778, 4160701992, 524210,
430362915, 1760108150, 2432714288, 4264359935, 3489868032, 473996800,
511066950, 3625140916, 1148733450, 1610901504, 537479433, 4160713160,
1174929343, 671115688, 1199624192, 3506646784, 419719400, 3656073908,
3187164728, 634, 1073872896, 1174713616, 4160701960, 671154079,
738251014, 1225052165, 1908940868, 4288739327, 537181456, 48400,
1073872896, 3490457600, 3490327040, 3524929800, 1148911627, 417036571,
117785759, 218827015, 537137423, 1757431664, 1749082122, 1753276424,
1744887814, 536993796, 1761665026, 1765859328, 536895504, 543836016,
18288, 3490326528, 1758021903, 235489807, 1148846153, 50944593,
543478019, 537151344, 3020965744, 1610818304, 553738305, 42557569,
1225220289, 361396748, 1627537570, 1636006601, 3155190081, 1198540312,
1074036800, 426, 1073872928, 3506514432, 1198530564, 1176286719,
1175826561, 587482638, 2550220322, 4279891967, 3508076551, 3491113984,
1148733452, 1611556865, 1744882946, 1225416769, 1908940806, 4281399295,
2550220295, 671115648, 1199624192, 3506581248, 522460470, 1178128872,
3186667525, 354, 1073872896, 3489802240, 536895873, 537151344, 18288,
2961290751, 1175340564, 587482629, 4276615167, 3509200896, 1751673001,
4251252735, 1114738688, 1073824320, 1119306319, 2550190337, 738203711,
465424410, 3640672933, 1225606693, 1148782760, 1611556873, 570509579,
167932362, 1921544906, 1917360132, 4276811775, 3506841600, 459577344,
406788470, 3521391616, 2953125888, 48624, 218, 1073872896, 3506513920,
1198530564, 1208202512, 1908548160, 4160713089, 3172007631, 1073872896,
3506514688, 1198530564, 1176286712, 1175340565, 2668110596, 4271503359,
3508086784, 3491179776, 1148865039, 1611032592, 553797646, 2567336385,
1745973953, 1619552288, 4272879615, 3490195456, 687905031, 1611583488,
3489738496, 1614356736, 523091448, 490085668, 3521195264, 48632, 98,
1073872896, 262146, 524288, 1048576, 2097152, 4194304, 0, 0, 2097152,
1073872900, 0], 'pc_init': 536871549, 'pc_unInit': 536871673,
'pc_program_page': 536871601, 'pc_erase_sector': 536871485,
'pc_eraseAll': 536871433, 'static_base': 536870912 + 32 + 1596,
'begin_stack': 536870912 + 2048, 'begin_data': 536870912 + 2560,
'page_buffers': [536873472, 536875520], 'min_program_length': 4,
'analyzer_supported': True, 'analyzer_address': 536868864}
class KV11Z7(Kinetis):
MEMORY_MAP = MemoryMap(FlashRegion(start=0, length=131072, blocksize=
1024, is_boot_memory=True, algo=FLASH_ALGO, flash_class=
Flash_Kinetis), RamRegion(start=536866816, length=16384))
def __init__(self, session):
super(KV11Z7, self).__init__(session, self.MEMORY_MAP)
self._svd_location = SVDFile.from_builtin('MKV11Z7.svd')
<|reserved_special_token_1|>
from ..family.target_kinetis import Kinetis
from ..family.flash_kinetis import Flash_Kinetis
from ...core.memory_map import FlashRegion, RamRegion, MemoryMap
from ...debug.svd.loader import SVDFile
FLASH_ALGO = {'load_address': 536870912, 'instructions': [3758800384,
103643149, 604520552, 3539992640, 509886552, 474599930, 704650834,
1198576114, 151200256, 3547546251, 1116408323, 184800088, 3543941771,
1116408835, 3759330081, 1124812291, 570479743, 1116407875, 151245684,
3546235531, 1116408323, 184800068, 3542631051, 1116408835, 587191053,
3121742345, 1116408835, 303223554, 3496280585, 1116408579, 3758150425,
197331465, 3540075147, 448791499, 193151314, 3540075147, 448791435,
188957010, 3540075147, 448791371, 184762706, 3540075147, 448791307,
180568402, 3540075147, 448791243, 176374098, 3540075147, 448791179,
172179794, 3540075147, 448791115, 167985490, 3540075147, 448791051,
3536666962, 1116408259, 30135041, 1095899840, 1116408195, 25940737,
1095899840, 1116408131, 21746433, 1095899840, 1116408067, 17552129,
1095899840, 1116408003, 13357825, 1095899840, 1116407939, 9163521,
1095899840, 1116407875, 4969217, 1095899840, 3523222081, 1095910913,
1198540304, 264953949, 1112133632, 3539996675, 1079198272, 1184637440,
1116408067, 168022829, 3541189259, 25764604, 168016402, 3540796043,
294781321, 3540533899, 294781321, 3540271755, 3493462409, 3758100882,
163776905, 3540075147, 448790987, 159596882, 3540075147, 448790923,
155402578, 3540075147, 448790859, 151208274, 3540075147, 448790795,
147013970, 3540075147, 448790731, 142819666, 3540075147, 448790667,
3537453394, 1116407875, 4969217, 1095899840, 3523222081, 1180911105,
274415954, 3540076048, 721437248, 1112134912, 1180911472, 3539996763,
3036758592, 1186996224, 3171043008, 3037743114, 1145587976, 4177326080,
3506710528, 553666566, 4026549320, 1241905609, 587294929, 1125712539,
3171967185, 1801807467, 4, 4026544128, 1275901296, 1145849349,
1259030017, 1759659552, 4171558912, 3506776064, 1177101056, 1759659552,
4184797184, 1758087429, 43721473, 1623868186, 48496, 4, 1801807467,
4026544128, 1208595728, 2176928007, 2176928008, 139036673, 2147549257,
1145587718, 4176670720, 3489671168, 3171950593, 50464, 1074077696,
55592, 4, 1175238000, 1174816267, 1208829441, 1175826564, 4026549320,
671152387, 2416038154, 553750530, 2432714759, 1176651307, 1145587249,
4185452544, 1758087428, 43721473, 1623868186, 3178278916, 4, 4026544128,
1198530560, 3490195456, 3624020239, 3506711044, 689168389, 705222657,
537186305, 536889200, 671106928, 537186561, 3020965744, 1175199323,
3506520604, 3489808922, 543538192, 1745045360, 3624157835, 411658304,
1116215320, 3155218946, 1198530662, 536919056, 18288, 1116227843,
543936513, 536889200, 18288, 1801807467, 561006602, 562065409,
2013360129, 3590063625, 109148160, 543675649, 113330032, 543741185,
130041712, 543805692, 18288, 1073872896, 1174779384, 1175209494,
4160701976, 671154135, 587518251, 1176585778, 4160701992, 524210,
430362915, 1760108150, 2432714288, 4264359935, 3489868032, 473996800,
511066950, 3625140916, 1148733450, 1610901504, 537479433, 4160713160,
1174929343, 671115688, 1199624192, 3506646784, 419719400, 3656073908,
3187164728, 634, 1073872896, 1174713616, 4160701960, 671154079,
738251014, 1225052165, 1908940868, 4288739327, 537181456, 48400,
1073872896, 3490457600, 3490327040, 3524929800, 1148911627, 417036571,
117785759, 218827015, 537137423, 1757431664, 1749082122, 1753276424,
1744887814, 536993796, 1761665026, 1765859328, 536895504, 543836016,
18288, 3490326528, 1758021903, 235489807, 1148846153, 50944593,
543478019, 537151344, 3020965744, 1610818304, 553738305, 42557569,
1225220289, 361396748, 1627537570, 1636006601, 3155190081, 1198540312,
1074036800, 426, 1073872928, 3506514432, 1198530564, 1176286719,
1175826561, 587482638, 2550220322, 4279891967, 3508076551, 3491113984,
1148733452, 1611556865, 1744882946, 1225416769, 1908940806, 4281399295,
2550220295, 671115648, 1199624192, 3506581248, 522460470, 1178128872,
3186667525, 354, 1073872896, 3489802240, 536895873, 537151344, 18288,
2961290751, 1175340564, 587482629, 4276615167, 3509200896, 1751673001,
4251252735, 1114738688, 1073824320, 1119306319, 2550190337, 738203711,
465424410, 3640672933, 1225606693, 1148782760, 1611556873, 570509579,
167932362, 1921544906, 1917360132, 4276811775, 3506841600, 459577344,
406788470, 3521391616, 2953125888, 48624, 218, 1073872896, 3506513920,
1198530564, 1208202512, 1908548160, 4160713089, 3172007631, 1073872896,
3506514688, 1198530564, 1176286712, 1175340565, 2668110596, 4271503359,
3508086784, 3491179776, 1148865039, 1611032592, 553797646, 2567336385,
1745973953, 1619552288, 4272879615, 3490195456, 687905031, 1611583488,
3489738496, 1614356736, 523091448, 490085668, 3521195264, 48632, 98,
1073872896, 262146, 524288, 1048576, 2097152, 4194304, 0, 0, 2097152,
1073872900, 0], 'pc_init': 536871549, 'pc_unInit': 536871673,
'pc_program_page': 536871601, 'pc_erase_sector': 536871485,
'pc_eraseAll': 536871433, 'static_base': 536870912 + 32 + 1596,
'begin_stack': 536870912 + 2048, 'begin_data': 536870912 + 2560,
'page_buffers': [536873472, 536875520], 'min_program_length': 4,
'analyzer_supported': True, 'analyzer_address': 536868864}
class KV11Z7(Kinetis):
MEMORY_MAP = MemoryMap(FlashRegion(start=0, length=131072, blocksize=
1024, is_boot_memory=True, algo=FLASH_ALGO, flash_class=
Flash_Kinetis), RamRegion(start=536866816, length=16384))
def __init__(self, session):
super(KV11Z7, self).__init__(session, self.MEMORY_MAP)
self._svd_location = SVDFile.from_builtin('MKV11Z7.svd')
<|reserved_special_token_1|>
# pyOCD debugger
# Copyright (c) 2006-2013,2018 Arm Limited
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ..family.target_kinetis import Kinetis
from ..family.flash_kinetis import Flash_Kinetis
from ...core.memory_map import (FlashRegion, RamRegion, MemoryMap)
from ...debug.svd.loader import SVDFile
FLASH_ALGO = { 'load_address' : 0x20000000,
'instructions' : [
0xE00ABE00, 0x062D780D, 0x24084068, 0xD3000040, 0x1E644058, 0x1C49D1FA, 0x2A001E52, 0x4770D1F2,
0x09032200, 0xd373428b, 0x428b0a03, 0x0b03d358, 0xd33c428b, 0x428b0c03, 0xe012d321, 0x430b4603,
0x2200d47f, 0x428b0843, 0x0903d374, 0xd35f428b, 0x428b0a03, 0x0b03d344, 0xd328428b, 0x428b0c03,
0x22ffd30d, 0xba120209, 0x428b0c03, 0x1212d302, 0xd0650209, 0x428b0b03, 0xe000d319, 0x0bc30a09,
0xd301428b, 0x1ac003cb, 0x0b834152, 0xd301428b, 0x1ac0038b, 0x0b434152, 0xd301428b, 0x1ac0034b,
0x0b034152, 0xd301428b, 0x1ac0030b, 0x0ac34152, 0xd301428b, 0x1ac002cb, 0x0a834152, 0xd301428b,
0x1ac0028b, 0x0a434152, 0xd301428b, 0x1ac0024b, 0x0a034152, 0xd301428b, 0x1ac0020b, 0xd2cd4152,
0x428b09c3, 0x01cbd301, 0x41521ac0, 0x428b0983, 0x018bd301, 0x41521ac0, 0x428b0943, 0x014bd301,
0x41521ac0, 0x428b0903, 0x010bd301, 0x41521ac0, 0x428b08c3, 0x00cbd301, 0x41521ac0, 0x428b0883,
0x008bd301, 0x41521ac0, 0x428b0843, 0x004bd301, 0x41521ac0, 0xd2001a41, 0x41524601, 0x47704610,
0x0fcae05d, 0x4249d000, 0xd3001003, 0x40534240, 0x469c2200, 0x428b0903, 0x0a03d32d, 0xd312428b,
0x018922fc, 0x0a03ba12, 0xd30c428b, 0x11920189, 0xd308428b, 0x11920189, 0xd304428b, 0xd03a0189,
0xe0001192, 0x09c30989, 0xd301428b, 0x1ac001cb, 0x09834152, 0xd301428b, 0x1ac0018b, 0x09434152,
0xd301428b, 0x1ac0014b, 0x09034152, 0xd301428b, 0x1ac0010b, 0x08c34152, 0xd301428b, 0x1ac000cb,
0x08834152, 0xd301428b, 0x1ac0008b, 0xd2d94152, 0x428b0843, 0x004bd301, 0x41521ac0, 0xd2001a41,
0x46634601, 0x105b4152, 0xd3014610, 0x2b004240, 0x4249d500, 0x46634770, 0xd300105b, 0xb5014240,
0x46c02000, 0xbd0246c0, 0xb510480a, 0x44484908, 0xf8fcf000, 0xd1042800, 0x21004806, 0xf0004448,
0x4a05f9c9, 0x230168d1, 0x4319029b, 0xbd1060d1, 0x6b65666b, 0x00000004, 0xf0003000, 0x4c0cb570,
0x444c4605, 0x4b0b4601, 0x68e24620, 0xf8a4f000, 0xd1052800, 0x46292300, 0x68e24620, 0xf96ef000,
0x68ca4905, 0x029b2301, 0x60ca431a, 0x0000bd70, 0x00000004, 0x6b65666b, 0xf0003000, 0x4809b510,
0x81c14907, 0x81c14908, 0x08498801, 0x80010049, 0x44484806, 0xf8f2f000, 0xd0002800, 0xbd102001,
0x0000c520, 0x40052000, 0x0000d928, 0x00000004, 0x460cb570, 0x4606460b, 0x480d4601, 0x4615b084,
0xf0004448, 0x2800f903, 0x9001d10a, 0x21019002, 0x91004807, 0x4622462b, 0x44484631, 0xf978f000,
0x68ca4904, 0x029b2301, 0x60ca431a, 0xbd70b004, 0x00000004, 0xf0003000, 0x47702000, 0xd0082800,
0xd802290f, 0xd1042a04, 0x2913e005, 0x2a08d801, 0x2004d001, 0x20004770, 0x28004770, 0x2004d101,
0xb4104770, 0x460c1e5b, 0xd101421c, 0xd002421a, 0x2065bc10, 0x68034770, 0xd804428b, 0x18896840,
0x42881818, 0xbc10d202, 0x47702066, 0x2000bc10, 0x00004770, 0x42884903, 0x206bd001, 0x20004770,
0x00004770, 0x6b65666b, 0x2170480a, 0x21807001, 0x78017001, 0xd5fc0609, 0x06817800, 0x2067d501,
0x06c14770, 0x2068d501, 0x07c04770, 0x2069d0fc, 0x00004770, 0x40020000, 0x4605b5f8, 0x460c4616,
0xf7ff4618, 0x2800ffd7, 0x2304d12b, 0x46214632, 0xf7ff4628, 0x0007ffb2, 0x19a6d123, 0x68e91e76,
0x91004630, 0xfe2cf7ff, 0xd0032900, 0x1c409e00, 0x1e764346, 0xd81342b4, 0x4478480a, 0x60046800,
0x20094909, 0xf7ff71c8, 0x4607ffbf, 0x280069a8, 0x4780d000, 0xd1032f00, 0x190468e8, 0xd9eb42b4,
0xbdf84638, 0x0000027a, 0x40020000, 0x4604b510, 0xf7ff4608, 0x2800ff9f, 0x2c00d106, 0x4904d005,
0x71c82044, 0xffa0f7ff, 0x2004bd10, 0x0000bd10, 0x40020000, 0xd00c2800, 0xd00a2a00, 0xd21a2908,
0x447b000b, 0x18db791b, 0x0705449f, 0x0d0b0907, 0x2004110f, 0x68c04770, 0x6840e00a, 0x6880e008,
0x6800e006, 0x2001e004, 0x6900e002, 0x6940e000, 0x20006010, 0x206a4770, 0x00004770, 0xd00a2800,
0x68c9490f, 0x0e094a0f, 0x447a0049, 0x03095a51, 0x2064d103, 0x20044770, 0xb4104770, 0x60032300,
0x21016041, 0x02896081, 0x490760c1, 0x158a7a0c, 0x610240a2, 0x61837ac9, 0xbc106141, 0x47704618,
0x40048040, 0x000001aa, 0x40020020, 0xd1012a00, 0x47702004, 0x461cb5ff, 0x4615b081, 0x2304460e,
0x98014622, 0xff19f7ff, 0xd1190007, 0xd0162c00, 0x4478480c, 0x600e6801, 0x6800cd02, 0x490a6041,
0x71c82006, 0xff30f7ff, 0x98014607, 0x28006980, 0x4780d000, 0xd1022f00, 0x1f241d36, 0x4638d1e8,
0xbdf0b005, 0x00000162, 0x40020000, 0xd0022800, 0x20006181, 0x20044770, 0x00004770, 0xb081b5ff,
0x460e4614, 0x23044605, 0xfee7f7ff, 0xd12a2800, 0x686868a9, 0xfd64f7ff, 0x42719000, 0x40014240,
0x42b7424f, 0x9800d101, 0x2c00183f, 0x1bbdd01a, 0xd90042a5, 0x490d4625, 0x447908a8, 0x600e6809,
0x2201490b, 0x0a0271ca, 0x728872ca, 0x72489804, 0xfeeaf7ff, 0xd1062800, 0x1b649800, 0x183f1976,
0xd1e42c00, 0xb0052000, 0x0000bdf0, 0x000000da, 0x40020000, 0xd1012800, 0x47702004, 0x4803b510,
0x71c22240, 0xf7ff7181, 0xbd10fecf, 0x40020000, 0xd1012b00, 0x47702004, 0x461cb5f8, 0x460e4615,
0x9f082304, 0xfe99f7ff, 0xd1192800, 0xd0172d00, 0x447a4a0f, 0x60066810, 0x2102480e, 0x990671c1,
0x681172c1, 0x60886820, 0xfeaef7ff, 0xd0082800, 0x29009907, 0x600ed000, 0xd0012f00, 0x60392100,
0x1f2dbdf8, 0x1d361d24, 0xd1e12d00, 0x0000bdf8, 0x00000062, 0x40020000, 0x00040002, 0x00080000,
0x00100000, 0x00200000, 0x00400000, 0x00000000, 0x00000000, 0x00200000, 0x40020004, 0x00000000,
],
'pc_init' : 0x2000027D,
'pc_unInit': 0x200002F9,
'pc_program_page': 0x200002B1,
'pc_erase_sector': 0x2000023D,
'pc_eraseAll' : 0x20000209,
'static_base' : 0x20000000 + 0x00000020 + 0x0000063c,
'begin_stack' : 0x20000000 + 0x00000800,
'begin_data' : 0x20000000 + 0x00000A00,
'page_buffers' : [0x20000a00, 0x20001200], # Enable double buffering
'min_program_length' : 4,
'analyzer_supported' : True,
'analyzer_address' : 0x1ffff800
}
class KV11Z7(Kinetis):
MEMORY_MAP = MemoryMap(
FlashRegion( start=0, length=0x20000, blocksize=0x400, is_boot_memory=True,
algo=FLASH_ALGO, flash_class=Flash_Kinetis),
RamRegion( start=0x1ffff000, length=0x4000)
)
def __init__(self, session):
super(KV11Z7, self).__init__(session, self.MEMORY_MAP)
self._svd_location = SVDFile.from_builtin("MKV11Z7.svd")
|
flexible
|
{
"blob_id": "58aa72588357b18ab42391dfffbf2a1b66589edd",
"index": 552,
"step-1": "<mask token>\n\n\nclass KV11Z7(Kinetis):\n <mask token>\n\n def __init__(self, session):\n super(KV11Z7, self).__init__(session, self.MEMORY_MAP)\n self._svd_location = SVDFile.from_builtin('MKV11Z7.svd')\n",
"step-2": "<mask token>\n\n\nclass KV11Z7(Kinetis):\n MEMORY_MAP = MemoryMap(FlashRegion(start=0, length=131072, blocksize=\n 1024, is_boot_memory=True, algo=FLASH_ALGO, flash_class=\n Flash_Kinetis), RamRegion(start=536866816, length=16384))\n\n def __init__(self, session):\n super(KV11Z7, self).__init__(session, self.MEMORY_MAP)\n self._svd_location = SVDFile.from_builtin('MKV11Z7.svd')\n",
"step-3": "<mask token>\nFLASH_ALGO = {'load_address': 536870912, 'instructions': [3758800384, \n 103643149, 604520552, 3539992640, 509886552, 474599930, 704650834, \n 1198576114, 151200256, 3547546251, 1116408323, 184800088, 3543941771, \n 1116408835, 3759330081, 1124812291, 570479743, 1116407875, 151245684, \n 3546235531, 1116408323, 184800068, 3542631051, 1116408835, 587191053, \n 3121742345, 1116408835, 303223554, 3496280585, 1116408579, 3758150425, \n 197331465, 3540075147, 448791499, 193151314, 3540075147, 448791435, \n 188957010, 3540075147, 448791371, 184762706, 3540075147, 448791307, \n 180568402, 3540075147, 448791243, 176374098, 3540075147, 448791179, \n 172179794, 3540075147, 448791115, 167985490, 3540075147, 448791051, \n 3536666962, 1116408259, 30135041, 1095899840, 1116408195, 25940737, \n 1095899840, 1116408131, 21746433, 1095899840, 1116408067, 17552129, \n 1095899840, 1116408003, 13357825, 1095899840, 1116407939, 9163521, \n 1095899840, 1116407875, 4969217, 1095899840, 3523222081, 1095910913, \n 1198540304, 264953949, 1112133632, 3539996675, 1079198272, 1184637440, \n 1116408067, 168022829, 3541189259, 25764604, 168016402, 3540796043, \n 294781321, 3540533899, 294781321, 3540271755, 3493462409, 3758100882, \n 163776905, 3540075147, 448790987, 159596882, 3540075147, 448790923, \n 155402578, 3540075147, 448790859, 151208274, 3540075147, 448790795, \n 147013970, 3540075147, 448790731, 142819666, 3540075147, 448790667, \n 3537453394, 1116407875, 4969217, 1095899840, 3523222081, 1180911105, \n 274415954, 3540076048, 721437248, 1112134912, 1180911472, 3539996763, \n 3036758592, 1186996224, 3171043008, 3037743114, 1145587976, 4177326080,\n 3506710528, 553666566, 4026549320, 1241905609, 587294929, 1125712539, \n 3171967185, 1801807467, 4, 4026544128, 1275901296, 1145849349, \n 1259030017, 1759659552, 4171558912, 3506776064, 1177101056, 1759659552,\n 4184797184, 1758087429, 43721473, 1623868186, 48496, 4, 1801807467, \n 4026544128, 1208595728, 2176928007, 2176928008, 139036673, 2147549257, \n 1145587718, 4176670720, 3489671168, 3171950593, 50464, 1074077696, \n 55592, 4, 1175238000, 1174816267, 1208829441, 1175826564, 4026549320, \n 671152387, 2416038154, 553750530, 2432714759, 1176651307, 1145587249, \n 4185452544, 1758087428, 43721473, 1623868186, 3178278916, 4, 4026544128,\n 1198530560, 3490195456, 3624020239, 3506711044, 689168389, 705222657, \n 537186305, 536889200, 671106928, 537186561, 3020965744, 1175199323, \n 3506520604, 3489808922, 543538192, 1745045360, 3624157835, 411658304, \n 1116215320, 3155218946, 1198530662, 536919056, 18288, 1116227843, \n 543936513, 536889200, 18288, 1801807467, 561006602, 562065409, \n 2013360129, 3590063625, 109148160, 543675649, 113330032, 543741185, \n 130041712, 543805692, 18288, 1073872896, 1174779384, 1175209494, \n 4160701976, 671154135, 587518251, 1176585778, 4160701992, 524210, \n 430362915, 1760108150, 2432714288, 4264359935, 3489868032, 473996800, \n 511066950, 3625140916, 1148733450, 1610901504, 537479433, 4160713160, \n 1174929343, 671115688, 1199624192, 3506646784, 419719400, 3656073908, \n 3187164728, 634, 1073872896, 1174713616, 4160701960, 671154079, \n 738251014, 1225052165, 1908940868, 4288739327, 537181456, 48400, \n 1073872896, 3490457600, 3490327040, 3524929800, 1148911627, 417036571, \n 117785759, 218827015, 537137423, 1757431664, 1749082122, 1753276424, \n 1744887814, 536993796, 1761665026, 1765859328, 536895504, 543836016, \n 18288, 3490326528, 1758021903, 235489807, 1148846153, 50944593, \n 543478019, 537151344, 3020965744, 1610818304, 553738305, 42557569, \n 1225220289, 361396748, 1627537570, 1636006601, 3155190081, 1198540312, \n 1074036800, 426, 1073872928, 3506514432, 1198530564, 1176286719, \n 1175826561, 587482638, 2550220322, 4279891967, 3508076551, 3491113984, \n 1148733452, 1611556865, 1744882946, 1225416769, 1908940806, 4281399295,\n 2550220295, 671115648, 1199624192, 3506581248, 522460470, 1178128872, \n 3186667525, 354, 1073872896, 3489802240, 536895873, 537151344, 18288, \n 2961290751, 1175340564, 587482629, 4276615167, 3509200896, 1751673001, \n 4251252735, 1114738688, 1073824320, 1119306319, 2550190337, 738203711, \n 465424410, 3640672933, 1225606693, 1148782760, 1611556873, 570509579, \n 167932362, 1921544906, 1917360132, 4276811775, 3506841600, 459577344, \n 406788470, 3521391616, 2953125888, 48624, 218, 1073872896, 3506513920, \n 1198530564, 1208202512, 1908548160, 4160713089, 3172007631, 1073872896,\n 3506514688, 1198530564, 1176286712, 1175340565, 2668110596, 4271503359,\n 3508086784, 3491179776, 1148865039, 1611032592, 553797646, 2567336385, \n 1745973953, 1619552288, 4272879615, 3490195456, 687905031, 1611583488, \n 3489738496, 1614356736, 523091448, 490085668, 3521195264, 48632, 98, \n 1073872896, 262146, 524288, 1048576, 2097152, 4194304, 0, 0, 2097152, \n 1073872900, 0], 'pc_init': 536871549, 'pc_unInit': 536871673,\n 'pc_program_page': 536871601, 'pc_erase_sector': 536871485,\n 'pc_eraseAll': 536871433, 'static_base': 536870912 + 32 + 1596,\n 'begin_stack': 536870912 + 2048, 'begin_data': 536870912 + 2560,\n 'page_buffers': [536873472, 536875520], 'min_program_length': 4,\n 'analyzer_supported': True, 'analyzer_address': 536868864}\n\n\nclass KV11Z7(Kinetis):\n MEMORY_MAP = MemoryMap(FlashRegion(start=0, length=131072, blocksize=\n 1024, is_boot_memory=True, algo=FLASH_ALGO, flash_class=\n Flash_Kinetis), RamRegion(start=536866816, length=16384))\n\n def __init__(self, session):\n super(KV11Z7, self).__init__(session, self.MEMORY_MAP)\n self._svd_location = SVDFile.from_builtin('MKV11Z7.svd')\n",
"step-4": "from ..family.target_kinetis import Kinetis\nfrom ..family.flash_kinetis import Flash_Kinetis\nfrom ...core.memory_map import FlashRegion, RamRegion, MemoryMap\nfrom ...debug.svd.loader import SVDFile\nFLASH_ALGO = {'load_address': 536870912, 'instructions': [3758800384, \n 103643149, 604520552, 3539992640, 509886552, 474599930, 704650834, \n 1198576114, 151200256, 3547546251, 1116408323, 184800088, 3543941771, \n 1116408835, 3759330081, 1124812291, 570479743, 1116407875, 151245684, \n 3546235531, 1116408323, 184800068, 3542631051, 1116408835, 587191053, \n 3121742345, 1116408835, 303223554, 3496280585, 1116408579, 3758150425, \n 197331465, 3540075147, 448791499, 193151314, 3540075147, 448791435, \n 188957010, 3540075147, 448791371, 184762706, 3540075147, 448791307, \n 180568402, 3540075147, 448791243, 176374098, 3540075147, 448791179, \n 172179794, 3540075147, 448791115, 167985490, 3540075147, 448791051, \n 3536666962, 1116408259, 30135041, 1095899840, 1116408195, 25940737, \n 1095899840, 1116408131, 21746433, 1095899840, 1116408067, 17552129, \n 1095899840, 1116408003, 13357825, 1095899840, 1116407939, 9163521, \n 1095899840, 1116407875, 4969217, 1095899840, 3523222081, 1095910913, \n 1198540304, 264953949, 1112133632, 3539996675, 1079198272, 1184637440, \n 1116408067, 168022829, 3541189259, 25764604, 168016402, 3540796043, \n 294781321, 3540533899, 294781321, 3540271755, 3493462409, 3758100882, \n 163776905, 3540075147, 448790987, 159596882, 3540075147, 448790923, \n 155402578, 3540075147, 448790859, 151208274, 3540075147, 448790795, \n 147013970, 3540075147, 448790731, 142819666, 3540075147, 448790667, \n 3537453394, 1116407875, 4969217, 1095899840, 3523222081, 1180911105, \n 274415954, 3540076048, 721437248, 1112134912, 1180911472, 3539996763, \n 3036758592, 1186996224, 3171043008, 3037743114, 1145587976, 4177326080,\n 3506710528, 553666566, 4026549320, 1241905609, 587294929, 1125712539, \n 3171967185, 1801807467, 4, 4026544128, 1275901296, 1145849349, \n 1259030017, 1759659552, 4171558912, 3506776064, 1177101056, 1759659552,\n 4184797184, 1758087429, 43721473, 1623868186, 48496, 4, 1801807467, \n 4026544128, 1208595728, 2176928007, 2176928008, 139036673, 2147549257, \n 1145587718, 4176670720, 3489671168, 3171950593, 50464, 1074077696, \n 55592, 4, 1175238000, 1174816267, 1208829441, 1175826564, 4026549320, \n 671152387, 2416038154, 553750530, 2432714759, 1176651307, 1145587249, \n 4185452544, 1758087428, 43721473, 1623868186, 3178278916, 4, 4026544128,\n 1198530560, 3490195456, 3624020239, 3506711044, 689168389, 705222657, \n 537186305, 536889200, 671106928, 537186561, 3020965744, 1175199323, \n 3506520604, 3489808922, 543538192, 1745045360, 3624157835, 411658304, \n 1116215320, 3155218946, 1198530662, 536919056, 18288, 1116227843, \n 543936513, 536889200, 18288, 1801807467, 561006602, 562065409, \n 2013360129, 3590063625, 109148160, 543675649, 113330032, 543741185, \n 130041712, 543805692, 18288, 1073872896, 1174779384, 1175209494, \n 4160701976, 671154135, 587518251, 1176585778, 4160701992, 524210, \n 430362915, 1760108150, 2432714288, 4264359935, 3489868032, 473996800, \n 511066950, 3625140916, 1148733450, 1610901504, 537479433, 4160713160, \n 1174929343, 671115688, 1199624192, 3506646784, 419719400, 3656073908, \n 3187164728, 634, 1073872896, 1174713616, 4160701960, 671154079, \n 738251014, 1225052165, 1908940868, 4288739327, 537181456, 48400, \n 1073872896, 3490457600, 3490327040, 3524929800, 1148911627, 417036571, \n 117785759, 218827015, 537137423, 1757431664, 1749082122, 1753276424, \n 1744887814, 536993796, 1761665026, 1765859328, 536895504, 543836016, \n 18288, 3490326528, 1758021903, 235489807, 1148846153, 50944593, \n 543478019, 537151344, 3020965744, 1610818304, 553738305, 42557569, \n 1225220289, 361396748, 1627537570, 1636006601, 3155190081, 1198540312, \n 1074036800, 426, 1073872928, 3506514432, 1198530564, 1176286719, \n 1175826561, 587482638, 2550220322, 4279891967, 3508076551, 3491113984, \n 1148733452, 1611556865, 1744882946, 1225416769, 1908940806, 4281399295,\n 2550220295, 671115648, 1199624192, 3506581248, 522460470, 1178128872, \n 3186667525, 354, 1073872896, 3489802240, 536895873, 537151344, 18288, \n 2961290751, 1175340564, 587482629, 4276615167, 3509200896, 1751673001, \n 4251252735, 1114738688, 1073824320, 1119306319, 2550190337, 738203711, \n 465424410, 3640672933, 1225606693, 1148782760, 1611556873, 570509579, \n 167932362, 1921544906, 1917360132, 4276811775, 3506841600, 459577344, \n 406788470, 3521391616, 2953125888, 48624, 218, 1073872896, 3506513920, \n 1198530564, 1208202512, 1908548160, 4160713089, 3172007631, 1073872896,\n 3506514688, 1198530564, 1176286712, 1175340565, 2668110596, 4271503359,\n 3508086784, 3491179776, 1148865039, 1611032592, 553797646, 2567336385, \n 1745973953, 1619552288, 4272879615, 3490195456, 687905031, 1611583488, \n 3489738496, 1614356736, 523091448, 490085668, 3521195264, 48632, 98, \n 1073872896, 262146, 524288, 1048576, 2097152, 4194304, 0, 0, 2097152, \n 1073872900, 0], 'pc_init': 536871549, 'pc_unInit': 536871673,\n 'pc_program_page': 536871601, 'pc_erase_sector': 536871485,\n 'pc_eraseAll': 536871433, 'static_base': 536870912 + 32 + 1596,\n 'begin_stack': 536870912 + 2048, 'begin_data': 536870912 + 2560,\n 'page_buffers': [536873472, 536875520], 'min_program_length': 4,\n 'analyzer_supported': True, 'analyzer_address': 536868864}\n\n\nclass KV11Z7(Kinetis):\n MEMORY_MAP = MemoryMap(FlashRegion(start=0, length=131072, blocksize=\n 1024, is_boot_memory=True, algo=FLASH_ALGO, flash_class=\n Flash_Kinetis), RamRegion(start=536866816, length=16384))\n\n def __init__(self, session):\n super(KV11Z7, self).__init__(session, self.MEMORY_MAP)\n self._svd_location = SVDFile.from_builtin('MKV11Z7.svd')\n",
"step-5": "# pyOCD debugger\n# Copyright (c) 2006-2013,2018 Arm Limited\n# SPDX-License-Identifier: Apache-2.0\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom ..family.target_kinetis import Kinetis\nfrom ..family.flash_kinetis import Flash_Kinetis\nfrom ...core.memory_map import (FlashRegion, RamRegion, MemoryMap)\nfrom ...debug.svd.loader import SVDFile\n\nFLASH_ALGO = { 'load_address' : 0x20000000,\n 'instructions' : [\n 0xE00ABE00, 0x062D780D, 0x24084068, 0xD3000040, 0x1E644058, 0x1C49D1FA, 0x2A001E52, 0x4770D1F2,\n 0x09032200, 0xd373428b, 0x428b0a03, 0x0b03d358, 0xd33c428b, 0x428b0c03, 0xe012d321, 0x430b4603,\n 0x2200d47f, 0x428b0843, 0x0903d374, 0xd35f428b, 0x428b0a03, 0x0b03d344, 0xd328428b, 0x428b0c03,\n 0x22ffd30d, 0xba120209, 0x428b0c03, 0x1212d302, 0xd0650209, 0x428b0b03, 0xe000d319, 0x0bc30a09,\n 0xd301428b, 0x1ac003cb, 0x0b834152, 0xd301428b, 0x1ac0038b, 0x0b434152, 0xd301428b, 0x1ac0034b,\n 0x0b034152, 0xd301428b, 0x1ac0030b, 0x0ac34152, 0xd301428b, 0x1ac002cb, 0x0a834152, 0xd301428b,\n 0x1ac0028b, 0x0a434152, 0xd301428b, 0x1ac0024b, 0x0a034152, 0xd301428b, 0x1ac0020b, 0xd2cd4152,\n 0x428b09c3, 0x01cbd301, 0x41521ac0, 0x428b0983, 0x018bd301, 0x41521ac0, 0x428b0943, 0x014bd301,\n 0x41521ac0, 0x428b0903, 0x010bd301, 0x41521ac0, 0x428b08c3, 0x00cbd301, 0x41521ac0, 0x428b0883,\n 0x008bd301, 0x41521ac0, 0x428b0843, 0x004bd301, 0x41521ac0, 0xd2001a41, 0x41524601, 0x47704610,\n 0x0fcae05d, 0x4249d000, 0xd3001003, 0x40534240, 0x469c2200, 0x428b0903, 0x0a03d32d, 0xd312428b,\n 0x018922fc, 0x0a03ba12, 0xd30c428b, 0x11920189, 0xd308428b, 0x11920189, 0xd304428b, 0xd03a0189,\n 0xe0001192, 0x09c30989, 0xd301428b, 0x1ac001cb, 0x09834152, 0xd301428b, 0x1ac0018b, 0x09434152,\n 0xd301428b, 0x1ac0014b, 0x09034152, 0xd301428b, 0x1ac0010b, 0x08c34152, 0xd301428b, 0x1ac000cb,\n 0x08834152, 0xd301428b, 0x1ac0008b, 0xd2d94152, 0x428b0843, 0x004bd301, 0x41521ac0, 0xd2001a41,\n 0x46634601, 0x105b4152, 0xd3014610, 0x2b004240, 0x4249d500, 0x46634770, 0xd300105b, 0xb5014240,\n 0x46c02000, 0xbd0246c0, 0xb510480a, 0x44484908, 0xf8fcf000, 0xd1042800, 0x21004806, 0xf0004448,\n 0x4a05f9c9, 0x230168d1, 0x4319029b, 0xbd1060d1, 0x6b65666b, 0x00000004, 0xf0003000, 0x4c0cb570,\n 0x444c4605, 0x4b0b4601, 0x68e24620, 0xf8a4f000, 0xd1052800, 0x46292300, 0x68e24620, 0xf96ef000,\n 0x68ca4905, 0x029b2301, 0x60ca431a, 0x0000bd70, 0x00000004, 0x6b65666b, 0xf0003000, 0x4809b510,\n 0x81c14907, 0x81c14908, 0x08498801, 0x80010049, 0x44484806, 0xf8f2f000, 0xd0002800, 0xbd102001,\n 0x0000c520, 0x40052000, 0x0000d928, 0x00000004, 0x460cb570, 0x4606460b, 0x480d4601, 0x4615b084,\n 0xf0004448, 0x2800f903, 0x9001d10a, 0x21019002, 0x91004807, 0x4622462b, 0x44484631, 0xf978f000,\n 0x68ca4904, 0x029b2301, 0x60ca431a, 0xbd70b004, 0x00000004, 0xf0003000, 0x47702000, 0xd0082800,\n 0xd802290f, 0xd1042a04, 0x2913e005, 0x2a08d801, 0x2004d001, 0x20004770, 0x28004770, 0x2004d101,\n 0xb4104770, 0x460c1e5b, 0xd101421c, 0xd002421a, 0x2065bc10, 0x68034770, 0xd804428b, 0x18896840,\n 0x42881818, 0xbc10d202, 0x47702066, 0x2000bc10, 0x00004770, 0x42884903, 0x206bd001, 0x20004770,\n 0x00004770, 0x6b65666b, 0x2170480a, 0x21807001, 0x78017001, 0xd5fc0609, 0x06817800, 0x2067d501,\n 0x06c14770, 0x2068d501, 0x07c04770, 0x2069d0fc, 0x00004770, 0x40020000, 0x4605b5f8, 0x460c4616,\n 0xf7ff4618, 0x2800ffd7, 0x2304d12b, 0x46214632, 0xf7ff4628, 0x0007ffb2, 0x19a6d123, 0x68e91e76,\n 0x91004630, 0xfe2cf7ff, 0xd0032900, 0x1c409e00, 0x1e764346, 0xd81342b4, 0x4478480a, 0x60046800,\n 0x20094909, 0xf7ff71c8, 0x4607ffbf, 0x280069a8, 0x4780d000, 0xd1032f00, 0x190468e8, 0xd9eb42b4,\n 0xbdf84638, 0x0000027a, 0x40020000, 0x4604b510, 0xf7ff4608, 0x2800ff9f, 0x2c00d106, 0x4904d005,\n 0x71c82044, 0xffa0f7ff, 0x2004bd10, 0x0000bd10, 0x40020000, 0xd00c2800, 0xd00a2a00, 0xd21a2908,\n 0x447b000b, 0x18db791b, 0x0705449f, 0x0d0b0907, 0x2004110f, 0x68c04770, 0x6840e00a, 0x6880e008,\n 0x6800e006, 0x2001e004, 0x6900e002, 0x6940e000, 0x20006010, 0x206a4770, 0x00004770, 0xd00a2800,\n 0x68c9490f, 0x0e094a0f, 0x447a0049, 0x03095a51, 0x2064d103, 0x20044770, 0xb4104770, 0x60032300,\n 0x21016041, 0x02896081, 0x490760c1, 0x158a7a0c, 0x610240a2, 0x61837ac9, 0xbc106141, 0x47704618,\n 0x40048040, 0x000001aa, 0x40020020, 0xd1012a00, 0x47702004, 0x461cb5ff, 0x4615b081, 0x2304460e,\n 0x98014622, 0xff19f7ff, 0xd1190007, 0xd0162c00, 0x4478480c, 0x600e6801, 0x6800cd02, 0x490a6041,\n 0x71c82006, 0xff30f7ff, 0x98014607, 0x28006980, 0x4780d000, 0xd1022f00, 0x1f241d36, 0x4638d1e8,\n 0xbdf0b005, 0x00000162, 0x40020000, 0xd0022800, 0x20006181, 0x20044770, 0x00004770, 0xb081b5ff,\n 0x460e4614, 0x23044605, 0xfee7f7ff, 0xd12a2800, 0x686868a9, 0xfd64f7ff, 0x42719000, 0x40014240,\n 0x42b7424f, 0x9800d101, 0x2c00183f, 0x1bbdd01a, 0xd90042a5, 0x490d4625, 0x447908a8, 0x600e6809,\n 0x2201490b, 0x0a0271ca, 0x728872ca, 0x72489804, 0xfeeaf7ff, 0xd1062800, 0x1b649800, 0x183f1976,\n 0xd1e42c00, 0xb0052000, 0x0000bdf0, 0x000000da, 0x40020000, 0xd1012800, 0x47702004, 0x4803b510,\n 0x71c22240, 0xf7ff7181, 0xbd10fecf, 0x40020000, 0xd1012b00, 0x47702004, 0x461cb5f8, 0x460e4615,\n 0x9f082304, 0xfe99f7ff, 0xd1192800, 0xd0172d00, 0x447a4a0f, 0x60066810, 0x2102480e, 0x990671c1,\n 0x681172c1, 0x60886820, 0xfeaef7ff, 0xd0082800, 0x29009907, 0x600ed000, 0xd0012f00, 0x60392100,\n 0x1f2dbdf8, 0x1d361d24, 0xd1e12d00, 0x0000bdf8, 0x00000062, 0x40020000, 0x00040002, 0x00080000,\n 0x00100000, 0x00200000, 0x00400000, 0x00000000, 0x00000000, 0x00200000, 0x40020004, 0x00000000,\n\n ],\n\n 'pc_init' : 0x2000027D,\n 'pc_unInit': 0x200002F9,\n 'pc_program_page': 0x200002B1,\n 'pc_erase_sector': 0x2000023D,\n 'pc_eraseAll' : 0x20000209,\n\n 'static_base' : 0x20000000 + 0x00000020 + 0x0000063c,\n 'begin_stack' : 0x20000000 + 0x00000800,\n 'begin_data' : 0x20000000 + 0x00000A00,\n 'page_buffers' : [0x20000a00, 0x20001200], # Enable double buffering\n 'min_program_length' : 4,\n 'analyzer_supported' : True,\n 'analyzer_address' : 0x1ffff800\n }\n\nclass KV11Z7(Kinetis):\n\n MEMORY_MAP = MemoryMap(\n FlashRegion( start=0, length=0x20000, blocksize=0x400, is_boot_memory=True,\n algo=FLASH_ALGO, flash_class=Flash_Kinetis),\n RamRegion( start=0x1ffff000, length=0x4000)\n )\n\n def __init__(self, session):\n super(KV11Z7, self).__init__(session, self.MEMORY_MAP)\n self._svd_location = SVDFile.from_builtin(\"MKV11Z7.svd\")\n\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
from qiskit import QuantumCircuit,execute,Aer
from qiskit.visualization import plot_histogram
import matplotlib.pyplot as plt
qc_ha=QuantumCircuit(4,2)
qc_ha.x(0)
qc_ha.x(1)
qc_ha.barrier()
qc_ha.cx(0,2)
qc_ha.cx(1,2)
qc_ha.ccx(0,1,3)
qc_ha.barrier()
qc_ha.measure(2,0)
qc_ha.measure(3,1)
#qc_ha.draw(output='mpl')
counts = execute(qc_ha,Aer.get_backend('qasm_simulator')).result().get_counts()
plot_histogram(counts)
plt.show()
|
normal
|
{
"blob_id": "02381f28ef20aa0c2c235ef6563e1810a5931e35",
"index": 5556,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nqc_ha.x(0)\nqc_ha.x(1)\nqc_ha.barrier()\nqc_ha.cx(0, 2)\nqc_ha.cx(1, 2)\nqc_ha.ccx(0, 1, 3)\nqc_ha.barrier()\nqc_ha.measure(2, 0)\nqc_ha.measure(3, 1)\n<mask token>\nplot_histogram(counts)\nplt.show()\n",
"step-3": "<mask token>\nqc_ha = QuantumCircuit(4, 2)\nqc_ha.x(0)\nqc_ha.x(1)\nqc_ha.barrier()\nqc_ha.cx(0, 2)\nqc_ha.cx(1, 2)\nqc_ha.ccx(0, 1, 3)\nqc_ha.barrier()\nqc_ha.measure(2, 0)\nqc_ha.measure(3, 1)\ncounts = execute(qc_ha, Aer.get_backend('qasm_simulator')).result().get_counts(\n )\nplot_histogram(counts)\nplt.show()\n",
"step-4": "from qiskit import QuantumCircuit, execute, Aer\nfrom qiskit.visualization import plot_histogram\nimport matplotlib.pyplot as plt\nqc_ha = QuantumCircuit(4, 2)\nqc_ha.x(0)\nqc_ha.x(1)\nqc_ha.barrier()\nqc_ha.cx(0, 2)\nqc_ha.cx(1, 2)\nqc_ha.ccx(0, 1, 3)\nqc_ha.barrier()\nqc_ha.measure(2, 0)\nqc_ha.measure(3, 1)\ncounts = execute(qc_ha, Aer.get_backend('qasm_simulator')).result().get_counts(\n )\nplot_histogram(counts)\nplt.show()\n",
"step-5": "from qiskit import QuantumCircuit,execute,Aer\nfrom qiskit.visualization import plot_histogram\nimport matplotlib.pyplot as plt\n\nqc_ha=QuantumCircuit(4,2)\nqc_ha.x(0)\nqc_ha.x(1)\nqc_ha.barrier()\nqc_ha.cx(0,2)\nqc_ha.cx(1,2)\nqc_ha.ccx(0,1,3)\nqc_ha.barrier()\nqc_ha.measure(2,0)\nqc_ha.measure(3,1)\n#qc_ha.draw(output='mpl')\ncounts = execute(qc_ha,Aer.get_backend('qasm_simulator')).result().get_counts()\nplot_histogram(counts)\nplt.show()\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class AnnotController:
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class AnnotController:
def get_annotations(self, project, page_index):
page = project.doc[page_index]
yield from page.flat_iter()
<|reserved_special_token_1|>
from redstork import PageObject
class AnnotController:
def get_annotations(self, project, page_index):
page = project.doc[page_index]
yield from page.flat_iter()
|
flexible
|
{
"blob_id": "6ca2a9040897e49c6407b9b0760240fec93b4df0",
"index": 3067,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass AnnotController:\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass AnnotController:\n\n def get_annotations(self, project, page_index):\n page = project.doc[page_index]\n yield from page.flat_iter()\n",
"step-4": "from redstork import PageObject\n\n\nclass AnnotController:\n\n def get_annotations(self, project, page_index):\n page = project.doc[page_index]\n yield from page.flat_iter()\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def barStdNormal(bars, timeperiod=5):
"""Std Normal """
close = bars['close']
result = close.rolling(timeperiod).apply(std_normalized)
return result
<|reserved_special_token_1|>
import pandas as pd
import numpy as np
def barStdNormal(bars, timeperiod=5):
"""Std Normal """
close = bars['close']
result = close.rolling(timeperiod).apply(std_normalized)
return result
<|reserved_special_token_1|>
import pandas as pd
import numpy as np
#from ctaFunction import std_normalized
def barStdNormal(bars, timeperiod=5):
'''Std Normal '''
close = bars['close']
result = close.rolling(timeperiod).apply(std_normalized)
return result
|
flexible
|
{
"blob_id": "6fa0e1dabd178507c32c62146b404bb42f8445d4",
"index": 9860,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef barStdNormal(bars, timeperiod=5):\n \"\"\"Std Normal \"\"\"\n close = bars['close']\n result = close.rolling(timeperiod).apply(std_normalized)\n return result\n",
"step-3": "import pandas as pd\nimport numpy as np\n\n\ndef barStdNormal(bars, timeperiod=5):\n \"\"\"Std Normal \"\"\"\n close = bars['close']\n result = close.rolling(timeperiod).apply(std_normalized)\n return result\n",
"step-4": "import pandas as pd \nimport numpy as np\n#from ctaFunction import std_normalized\n\ndef barStdNormal(bars, timeperiod=5):\n '''Std Normal '''\n close = bars['close']\n result = close.rolling(timeperiod).apply(std_normalized)\n\n return result",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from tkinter import *
import tkinter.messagebox
import apikey
import tinify
class Setting_GUI(Toplevel):
def __init__(self,parent):
super().__init__()
self.parent = parent
key = "Input your key here"
self.keystringvar = StringVar()
self.wm_title("Settings - TingImage")
self.wm_attributes("-topmost", 1)
title = Label(self, text="Settings")
try:
key = apikey.loadkey()
statustext = "continue with this key"
except Exception as e:
statustext = e
statuslabel = Label(self, text=statustext)
self.keystringvar.set(key)
keytext = Entry(self, textvariable=self.keystringvar, width=40)
continuebutton = Button(self, text="Continue",command=self.loadkey, width=12)
title.grid(row=0, sticky=W + E + N + S)
statuslabel.grid(row=1, sticky=W + E + N + S)
keytext.grid(row=2, sticky=W + E + N + S)
continuebutton.grid(row=3,padx=5,pady=5)
def loadkey(self):
key = self.keystringvar.get()
try:
apikey.inputkey(key)
except Exception as e:
tkinter.messagebox.showerror("Error", e)
else:
tkinter.messagebox.showinfo("Success", "Update API-Key successful!")
self.parent.cont.set(str(tinify.compression_count))
self.destroy()
|
normal
|
{
"blob_id": "9340c9055a7e0d74d232d878b43d91a3e6cd32e5",
"index": 5785,
"step-1": "<mask token>\n\n\nclass Setting_GUI(Toplevel):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Setting_GUI(Toplevel):\n\n def __init__(self, parent):\n super().__init__()\n self.parent = parent\n key = 'Input your key here'\n self.keystringvar = StringVar()\n self.wm_title('Settings - TingImage')\n self.wm_attributes('-topmost', 1)\n title = Label(self, text='Settings')\n try:\n key = apikey.loadkey()\n statustext = 'continue with this key'\n except Exception as e:\n statustext = e\n statuslabel = Label(self, text=statustext)\n self.keystringvar.set(key)\n keytext = Entry(self, textvariable=self.keystringvar, width=40)\n continuebutton = Button(self, text='Continue', command=self.loadkey,\n width=12)\n title.grid(row=0, sticky=W + E + N + S)\n statuslabel.grid(row=1, sticky=W + E + N + S)\n keytext.grid(row=2, sticky=W + E + N + S)\n continuebutton.grid(row=3, padx=5, pady=5)\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Setting_GUI(Toplevel):\n\n def __init__(self, parent):\n super().__init__()\n self.parent = parent\n key = 'Input your key here'\n self.keystringvar = StringVar()\n self.wm_title('Settings - TingImage')\n self.wm_attributes('-topmost', 1)\n title = Label(self, text='Settings')\n try:\n key = apikey.loadkey()\n statustext = 'continue with this key'\n except Exception as e:\n statustext = e\n statuslabel = Label(self, text=statustext)\n self.keystringvar.set(key)\n keytext = Entry(self, textvariable=self.keystringvar, width=40)\n continuebutton = Button(self, text='Continue', command=self.loadkey,\n width=12)\n title.grid(row=0, sticky=W + E + N + S)\n statuslabel.grid(row=1, sticky=W + E + N + S)\n keytext.grid(row=2, sticky=W + E + N + S)\n continuebutton.grid(row=3, padx=5, pady=5)\n\n def loadkey(self):\n key = self.keystringvar.get()\n try:\n apikey.inputkey(key)\n except Exception as e:\n tkinter.messagebox.showerror('Error', e)\n else:\n tkinter.messagebox.showinfo('Success', 'Update API-Key successful!'\n )\n self.parent.cont.set(str(tinify.compression_count))\n self.destroy()\n",
"step-4": "from tkinter import *\nimport tkinter.messagebox\nimport apikey\nimport tinify\n\n\nclass Setting_GUI(Toplevel):\n\n def __init__(self, parent):\n super().__init__()\n self.parent = parent\n key = 'Input your key here'\n self.keystringvar = StringVar()\n self.wm_title('Settings - TingImage')\n self.wm_attributes('-topmost', 1)\n title = Label(self, text='Settings')\n try:\n key = apikey.loadkey()\n statustext = 'continue with this key'\n except Exception as e:\n statustext = e\n statuslabel = Label(self, text=statustext)\n self.keystringvar.set(key)\n keytext = Entry(self, textvariable=self.keystringvar, width=40)\n continuebutton = Button(self, text='Continue', command=self.loadkey,\n width=12)\n title.grid(row=0, sticky=W + E + N + S)\n statuslabel.grid(row=1, sticky=W + E + N + S)\n keytext.grid(row=2, sticky=W + E + N + S)\n continuebutton.grid(row=3, padx=5, pady=5)\n\n def loadkey(self):\n key = self.keystringvar.get()\n try:\n apikey.inputkey(key)\n except Exception as e:\n tkinter.messagebox.showerror('Error', e)\n else:\n tkinter.messagebox.showinfo('Success', 'Update API-Key successful!'\n )\n self.parent.cont.set(str(tinify.compression_count))\n self.destroy()\n",
"step-5": "from tkinter import *\nimport tkinter.messagebox\nimport apikey\nimport tinify\n\nclass Setting_GUI(Toplevel):\n def __init__(self,parent):\n super().__init__()\n self.parent = parent\n key = \"Input your key here\"\n self.keystringvar = StringVar()\n\n self.wm_title(\"Settings - TingImage\")\n self.wm_attributes(\"-topmost\", 1)\n\n title = Label(self, text=\"Settings\")\n try:\n key = apikey.loadkey()\n statustext = \"continue with this key\"\n except Exception as e:\n statustext = e\n statuslabel = Label(self, text=statustext)\n self.keystringvar.set(key)\n keytext = Entry(self, textvariable=self.keystringvar, width=40)\n continuebutton = Button(self, text=\"Continue\",command=self.loadkey, width=12)\n\n title.grid(row=0, sticky=W + E + N + S)\n statuslabel.grid(row=1, sticky=W + E + N + S)\n keytext.grid(row=2, sticky=W + E + N + S)\n continuebutton.grid(row=3,padx=5,pady=5)\n\n def loadkey(self):\n key = self.keystringvar.get()\n try:\n apikey.inputkey(key)\n except Exception as e:\n tkinter.messagebox.showerror(\"Error\", e)\n else:\n tkinter.messagebox.showinfo(\"Success\", \"Update API-Key successful!\")\n self.parent.cont.set(str(tinify.compression_count))\n self.destroy()",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
def run_task(request, tid):
if request.method == 'GET':
task_obj = TestTask.objects.get(id=tid)
cases_list = task_obj.cases.split(',')
cases_list.pop(-1)
task_obj.status = 1
task_obj.save()
print(cases_list)
all_cases_dict = {}
for case_id in cases_list:
case_obj = TestCase.objects.get(id=case_id)
case_dict = {'url': case_obj.url, 'method': case_obj.req_method,
'type_': case_obj.req_type, 'header': case_obj.req_header,
'parameter': case_obj.req_parameter, 'assert_': case_obj.
resp_assert}
all_cases_dict[case_obj.id] = case_dict
print(all_cases_dict)
cases_str = json.dumps(all_cases_dict)
cases_data_file = TASK_PATH + 'cases_data.json'
print(cases_data_file)
with open(cases_data_file, 'w+') as f:
f.write(cases_str)
os.system('python3 ' + RUN_TASK_FILE)
return HttpResponseRedirect('/interface/task_manage')
else:
return HttpResponse('404')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def add_task(request):
if request.method == 'GET':
return render(request, 'add_task.html', {'type': 'add'})
else:
return HttpResponse('404')
def run_task(request, tid):
if request.method == 'GET':
task_obj = TestTask.objects.get(id=tid)
cases_list = task_obj.cases.split(',')
cases_list.pop(-1)
task_obj.status = 1
task_obj.save()
print(cases_list)
all_cases_dict = {}
for case_id in cases_list:
case_obj = TestCase.objects.get(id=case_id)
case_dict = {'url': case_obj.url, 'method': case_obj.req_method,
'type_': case_obj.req_type, 'header': case_obj.req_header,
'parameter': case_obj.req_parameter, 'assert_': case_obj.
resp_assert}
all_cases_dict[case_obj.id] = case_dict
print(all_cases_dict)
cases_str = json.dumps(all_cases_dict)
cases_data_file = TASK_PATH + 'cases_data.json'
print(cases_data_file)
with open(cases_data_file, 'w+') as f:
f.write(cases_str)
os.system('python3 ' + RUN_TASK_FILE)
return HttpResponseRedirect('/interface/task_manage')
else:
return HttpResponse('404')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def task_manage(request):
testtasks = TestTask.objects.all()
if request.method == 'GET':
return render(request, 'task_manage.html', {'type': 'list',
'testtasks': testtasks})
else:
return HttpResponse('404')
def add_task(request):
if request.method == 'GET':
return render(request, 'add_task.html', {'type': 'add'})
else:
return HttpResponse('404')
def run_task(request, tid):
if request.method == 'GET':
task_obj = TestTask.objects.get(id=tid)
cases_list = task_obj.cases.split(',')
cases_list.pop(-1)
task_obj.status = 1
task_obj.save()
print(cases_list)
all_cases_dict = {}
for case_id in cases_list:
case_obj = TestCase.objects.get(id=case_id)
case_dict = {'url': case_obj.url, 'method': case_obj.req_method,
'type_': case_obj.req_type, 'header': case_obj.req_header,
'parameter': case_obj.req_parameter, 'assert_': case_obj.
resp_assert}
all_cases_dict[case_obj.id] = case_dict
print(all_cases_dict)
cases_str = json.dumps(all_cases_dict)
cases_data_file = TASK_PATH + 'cases_data.json'
print(cases_data_file)
with open(cases_data_file, 'w+') as f:
f.write(cases_str)
os.system('python3 ' + RUN_TASK_FILE)
return HttpResponseRedirect('/interface/task_manage')
else:
return HttpResponse('404')
<|reserved_special_token_1|>
from django.shortcuts import render
from django.http import HttpResponse, HttpResponseRedirect
from interface_app.models import TestTask, TestCase
from interface_app.extend.task_run import run_cases
import os
import json
from interface_app.apps import TASK_PATH, RUN_TASK_FILE
<|reserved_special_token_0|>
def task_manage(request):
testtasks = TestTask.objects.all()
if request.method == 'GET':
return render(request, 'task_manage.html', {'type': 'list',
'testtasks': testtasks})
else:
return HttpResponse('404')
def add_task(request):
if request.method == 'GET':
return render(request, 'add_task.html', {'type': 'add'})
else:
return HttpResponse('404')
def run_task(request, tid):
if request.method == 'GET':
task_obj = TestTask.objects.get(id=tid)
cases_list = task_obj.cases.split(',')
cases_list.pop(-1)
task_obj.status = 1
task_obj.save()
print(cases_list)
all_cases_dict = {}
for case_id in cases_list:
case_obj = TestCase.objects.get(id=case_id)
case_dict = {'url': case_obj.url, 'method': case_obj.req_method,
'type_': case_obj.req_type, 'header': case_obj.req_header,
'parameter': case_obj.req_parameter, 'assert_': case_obj.
resp_assert}
all_cases_dict[case_obj.id] = case_dict
print(all_cases_dict)
cases_str = json.dumps(all_cases_dict)
cases_data_file = TASK_PATH + 'cases_data.json'
print(cases_data_file)
with open(cases_data_file, 'w+') as f:
f.write(cases_str)
os.system('python3 ' + RUN_TASK_FILE)
return HttpResponseRedirect('/interface/task_manage')
else:
return HttpResponse('404')
<|reserved_special_token_1|>
from django.shortcuts import render
from django.http import HttpResponse, HttpResponseRedirect
from interface_app.models import TestTask, TestCase
from interface_app.extend.task_run import run_cases
import os
import json
from interface_app.apps import TASK_PATH, RUN_TASK_FILE
"""
说明:接口任务文件,返回HTML页面
"""
# 获取任务列表
def task_manage(request):
testtasks = TestTask.objects.all()
if request.method == "GET":
return render(request, "task_manage.html", {
"type": "list",
"testtasks": testtasks,
})
else:
return HttpResponse("404")
# 创建任务
def add_task(request):
if request.method == "GET":
return render(request, "add_task.html", {
"type": "add",
})
else:
return HttpResponse("404")
# 运行任务
def run_task(request, tid):
if request.method == "GET":
task_obj = TestTask.objects.get(id=tid)
cases_list = task_obj.cases.split(",")
cases_list.pop(-1)
task_obj.status = 1 # 修改状态
task_obj.save()
print(cases_list)
# run_cases() #运行函数
all_cases_dict = {}
for case_id in cases_list:
case_obj = TestCase.objects.get(id=case_id)
case_dict = {
"url": case_obj.url,
"method": case_obj.req_method,
"type_": case_obj.req_type,
"header": case_obj.req_header,
"parameter": case_obj.req_parameter,
"assert_": case_obj.resp_assert
}
all_cases_dict[case_obj.id] = case_dict
print(all_cases_dict)
cases_str = json.dumps(all_cases_dict)
cases_data_file = TASK_PATH + "cases_data.json"
print(cases_data_file)
with open(cases_data_file, "w+") as f:
f.write(cases_str)
# 运行测试
os.system("python3 " + RUN_TASK_FILE)
return HttpResponseRedirect("/interface/task_manage")
else:
return HttpResponse("404")
# 如何去运行这些用例?--单元测试框架 + 数据驱动
# unittest + ddt
|
flexible
|
{
"blob_id": "8be70543a7aa177d9ad48fb736228b1ffba5df16",
"index": 6179,
"step-1": "<mask token>\n\n\ndef run_task(request, tid):\n if request.method == 'GET':\n task_obj = TestTask.objects.get(id=tid)\n cases_list = task_obj.cases.split(',')\n cases_list.pop(-1)\n task_obj.status = 1\n task_obj.save()\n print(cases_list)\n all_cases_dict = {}\n for case_id in cases_list:\n case_obj = TestCase.objects.get(id=case_id)\n case_dict = {'url': case_obj.url, 'method': case_obj.req_method,\n 'type_': case_obj.req_type, 'header': case_obj.req_header,\n 'parameter': case_obj.req_parameter, 'assert_': case_obj.\n resp_assert}\n all_cases_dict[case_obj.id] = case_dict\n print(all_cases_dict)\n cases_str = json.dumps(all_cases_dict)\n cases_data_file = TASK_PATH + 'cases_data.json'\n print(cases_data_file)\n with open(cases_data_file, 'w+') as f:\n f.write(cases_str)\n os.system('python3 ' + RUN_TASK_FILE)\n return HttpResponseRedirect('/interface/task_manage')\n else:\n return HttpResponse('404')\n",
"step-2": "<mask token>\n\n\ndef add_task(request):\n if request.method == 'GET':\n return render(request, 'add_task.html', {'type': 'add'})\n else:\n return HttpResponse('404')\n\n\ndef run_task(request, tid):\n if request.method == 'GET':\n task_obj = TestTask.objects.get(id=tid)\n cases_list = task_obj.cases.split(',')\n cases_list.pop(-1)\n task_obj.status = 1\n task_obj.save()\n print(cases_list)\n all_cases_dict = {}\n for case_id in cases_list:\n case_obj = TestCase.objects.get(id=case_id)\n case_dict = {'url': case_obj.url, 'method': case_obj.req_method,\n 'type_': case_obj.req_type, 'header': case_obj.req_header,\n 'parameter': case_obj.req_parameter, 'assert_': case_obj.\n resp_assert}\n all_cases_dict[case_obj.id] = case_dict\n print(all_cases_dict)\n cases_str = json.dumps(all_cases_dict)\n cases_data_file = TASK_PATH + 'cases_data.json'\n print(cases_data_file)\n with open(cases_data_file, 'w+') as f:\n f.write(cases_str)\n os.system('python3 ' + RUN_TASK_FILE)\n return HttpResponseRedirect('/interface/task_manage')\n else:\n return HttpResponse('404')\n",
"step-3": "<mask token>\n\n\ndef task_manage(request):\n testtasks = TestTask.objects.all()\n if request.method == 'GET':\n return render(request, 'task_manage.html', {'type': 'list',\n 'testtasks': testtasks})\n else:\n return HttpResponse('404')\n\n\ndef add_task(request):\n if request.method == 'GET':\n return render(request, 'add_task.html', {'type': 'add'})\n else:\n return HttpResponse('404')\n\n\ndef run_task(request, tid):\n if request.method == 'GET':\n task_obj = TestTask.objects.get(id=tid)\n cases_list = task_obj.cases.split(',')\n cases_list.pop(-1)\n task_obj.status = 1\n task_obj.save()\n print(cases_list)\n all_cases_dict = {}\n for case_id in cases_list:\n case_obj = TestCase.objects.get(id=case_id)\n case_dict = {'url': case_obj.url, 'method': case_obj.req_method,\n 'type_': case_obj.req_type, 'header': case_obj.req_header,\n 'parameter': case_obj.req_parameter, 'assert_': case_obj.\n resp_assert}\n all_cases_dict[case_obj.id] = case_dict\n print(all_cases_dict)\n cases_str = json.dumps(all_cases_dict)\n cases_data_file = TASK_PATH + 'cases_data.json'\n print(cases_data_file)\n with open(cases_data_file, 'w+') as f:\n f.write(cases_str)\n os.system('python3 ' + RUN_TASK_FILE)\n return HttpResponseRedirect('/interface/task_manage')\n else:\n return HttpResponse('404')\n",
"step-4": "from django.shortcuts import render\nfrom django.http import HttpResponse, HttpResponseRedirect\nfrom interface_app.models import TestTask, TestCase\nfrom interface_app.extend.task_run import run_cases\nimport os\nimport json\nfrom interface_app.apps import TASK_PATH, RUN_TASK_FILE\n<mask token>\n\n\ndef task_manage(request):\n testtasks = TestTask.objects.all()\n if request.method == 'GET':\n return render(request, 'task_manage.html', {'type': 'list',\n 'testtasks': testtasks})\n else:\n return HttpResponse('404')\n\n\ndef add_task(request):\n if request.method == 'GET':\n return render(request, 'add_task.html', {'type': 'add'})\n else:\n return HttpResponse('404')\n\n\ndef run_task(request, tid):\n if request.method == 'GET':\n task_obj = TestTask.objects.get(id=tid)\n cases_list = task_obj.cases.split(',')\n cases_list.pop(-1)\n task_obj.status = 1\n task_obj.save()\n print(cases_list)\n all_cases_dict = {}\n for case_id in cases_list:\n case_obj = TestCase.objects.get(id=case_id)\n case_dict = {'url': case_obj.url, 'method': case_obj.req_method,\n 'type_': case_obj.req_type, 'header': case_obj.req_header,\n 'parameter': case_obj.req_parameter, 'assert_': case_obj.\n resp_assert}\n all_cases_dict[case_obj.id] = case_dict\n print(all_cases_dict)\n cases_str = json.dumps(all_cases_dict)\n cases_data_file = TASK_PATH + 'cases_data.json'\n print(cases_data_file)\n with open(cases_data_file, 'w+') as f:\n f.write(cases_str)\n os.system('python3 ' + RUN_TASK_FILE)\n return HttpResponseRedirect('/interface/task_manage')\n else:\n return HttpResponse('404')\n",
"step-5": "from django.shortcuts import render\nfrom django.http import HttpResponse, HttpResponseRedirect\nfrom interface_app.models import TestTask, TestCase\nfrom interface_app.extend.task_run import run_cases\nimport os \nimport json\nfrom interface_app.apps import TASK_PATH, RUN_TASK_FILE\n\n\n\"\"\"\n说明:接口任务文件,返回HTML页面\n\"\"\"\n\n# 获取任务列表\ndef task_manage(request):\n testtasks = TestTask.objects.all()\n \n if request.method == \"GET\":\n return render(request, \"task_manage.html\", {\n \"type\": \"list\",\n \"testtasks\": testtasks,\n })\n else:\n return HttpResponse(\"404\")\n\n\n# 创建任务\ndef add_task(request):\n if request.method == \"GET\":\n return render(request, \"add_task.html\", {\n \"type\": \"add\",\n })\n else:\n return HttpResponse(\"404\")\n\n\n# 运行任务\ndef run_task(request, tid):\n if request.method == \"GET\":\n task_obj = TestTask.objects.get(id=tid)\n cases_list = task_obj.cases.split(\",\")\n cases_list.pop(-1)\n\n task_obj.status = 1 # 修改状态\n task_obj.save()\n\n \n print(cases_list)\n # run_cases() #运行函数\n all_cases_dict = {}\n for case_id in cases_list:\n case_obj = TestCase.objects.get(id=case_id)\n case_dict = {\n \"url\": case_obj.url,\n \"method\": case_obj.req_method,\n \"type_\": case_obj.req_type,\n \"header\": case_obj.req_header,\n \"parameter\": case_obj.req_parameter,\n \"assert_\": case_obj.resp_assert\n } \n all_cases_dict[case_obj.id] = case_dict\n\n print(all_cases_dict)\n\n cases_str = json.dumps(all_cases_dict)\n\n cases_data_file = TASK_PATH + \"cases_data.json\"\n print(cases_data_file)\n\n with open(cases_data_file, \"w+\") as f:\n f.write(cases_str)\n\n # 运行测试\n os.system(\"python3 \" + RUN_TASK_FILE)\n \n return HttpResponseRedirect(\"/interface/task_manage\")\n else:\n return HttpResponse(\"404\")\n\n\n# 如何去运行这些用例?--单元测试框架 + 数据驱动\n\n# unittest + ddt\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from proxmin import nmf
from proxmin.utils import Traceback
from proxmin import operators as po
from scipy.optimize import linear_sum_assignment
import numpy as np
import matplotlib.pyplot as plt
import time
from functools import partial
# initialize and run NMF
import logging
logging.basicConfig()
logger = logging.getLogger('proxmin')
logger.setLevel(logging.INFO)
def generateComponent(m):
"""Creates oscillating components to be mixed"""
freq = 25*np.random.random()
phase = 2*np.pi*np.random.random()
x = np.arange(m)
return np.cos(x/freq-phase)**2
def generateAmplitudes(k):
"""Makes mixing coefficients"""
res = np.array([np.random.random() for i in range(k)])
return res/res.sum()
def add_noise(Y, sigma):
"""Adds noise to Y"""
return Y + np.random.normal(0, sigma, Y.shape)
def match(A, S, trueS):
"""Rearranges columns of S to best fit the components they likely represent (maximizes sum of correlations)"""
cov = np.cov(trueS, S)
k = S.shape[0]
corr = np.zeros([k,k])
for i in range(k):
for j in range(k):
corr[i][j] = cov[i + k][j]/np.sqrt(cov[i + k][i + k]*cov[j][j])
arrangement = linear_sum_assignment(-corr)
resS = np.zeros_like(S)
resAT = np.zeros_like(A.T)
for t in range(k):
resS[arrangement[1][t]] = S[arrangement[0][t]]
resAT[arrangement[1][t]] = A.T[arrangement[0][t]]
return resAT.T, resS
if __name__ == "__main__":
n = 50 # component resolution
k = 3 # number of components
b = 100 # number of observations
noise = 0.02 # stdev of added noise
np.random.seed(101)
# set up test data
trueA = np.array([generateAmplitudes(k) for i in range(b)])
trueS = np.array([generateComponent(n) for i in range(k)])
trueY = np.dot(trueA,trueS)
Y = add_noise(trueY, noise)
# if noise is variable, specify variance matrix of the same shape as Y
W = None
A = np.array([generateAmplitudes(k) for i in range(b)])
S = np.array([generateComponent(n) for i in range(k)])
p1 = partial(po.prox_unity_plus, axis=1)
proxs_g=[[p1], None]
tr = Traceback(2)
nmf(Y, A, S, W=W, prox_A=p1, e_rel=1e-6, e_abs=1e-6/noise**2, traceback=tr)
# sort components to best match inputs
A, S = match(A, S, trueS)
# show data and model
fig = plt.figure(figsize=(6,7))
ax = fig.add_subplot(311)
ax.set_title("True Components S")
ax.plot(trueS.T)
ax2 = fig.add_subplot(312)
ax2.set_title("Data Y")
ax2.plot(Y.T)
ax3 = fig.add_subplot(313)
ax3.set_title("Found Components S")
ax3.set_xlabel("Pixel")
ax3.plot(S.T)
fig.subplots_adjust(bottom=0.07, top=0.95, hspace=0.35)
fig.show()
# convergence plot from traceback
convergences = []
As = tr['X',0]
Ss = tr['X',1]
for it in range(tr.it):
Y = np.dot(As[it], Ss[it])
convergences.append(((Y - trueY)**2).sum())
fig2 = plt.figure(figsize=(6,4))
ax4 = fig2.add_subplot(111)
ax4.set_title("Convergence")
ax4.semilogy(convergences)
ax4.set_ylabel("$||Y-AS||^2$")
ax4.set_xlabel("Iterations")
fig2.show()
"""
# noise plot
#noises = np.linspace(0,0.05,21)
#repeat = 10
noises = [noise]
repeat = 1000
A_chi_squared = np.empty((len(noises), repeat))
S_chi_squared = np.empty((len(noises), repeat))
for i in range(len(noises)):
e = noises[i]
for r in range(repeat):
Y = add_noise(trueY, e)
A, S = nmf.nmf(Y, A0, S0, e_rel=1e-4, e_abs=1e-4, )
A, S = match(A, S, trueS)
A_chi_squared[i,r] = np.sum((A - trueA)**2)
S_chi_squared[i,r] = np.sum((S - trueS)**2)
fig3 = plt.figure(figsize=(6,4))
ax5 = fig3.add_subplot(111)
dof_A = A.shape[0]*A.shape[1]
dof_S = S.shape[0]*S.shape[1]
ax5.errorbar(noises, S_chi_squared.mean(axis=1)/dof_S, yerr=S_chi_squared.std(axis=1)/dof_S, label="$\chi^2_S$ / DOF")
ax5.errorbar(noises, A_chi_squared.mean(axis=1)/dof_A, yerr=A_chi_squared.std(axis=1)/dof_A, label="$\chi^2_A$ / DOF")
ax5.legend()
ax5.set_ylabel("Chi-squared")
ax5.set_xlabel("Standard deviation of noise")
fig3.show()
"""
|
normal
|
{
"blob_id": "0edc0c2f86bda0122d4b231eed700d7a5b08ec1e",
"index": 8279,
"step-1": "<mask token>\n\n\ndef generateComponent(m):\n \"\"\"Creates oscillating components to be mixed\"\"\"\n freq = 25 * np.random.random()\n phase = 2 * np.pi * np.random.random()\n x = np.arange(m)\n return np.cos(x / freq - phase) ** 2\n\n\n<mask token>\n\n\ndef add_noise(Y, sigma):\n \"\"\"Adds noise to Y\"\"\"\n return Y + np.random.normal(0, sigma, Y.shape)\n\n\ndef match(A, S, trueS):\n \"\"\"Rearranges columns of S to best fit the components they likely represent (maximizes sum of correlations)\"\"\"\n cov = np.cov(trueS, S)\n k = S.shape[0]\n corr = np.zeros([k, k])\n for i in range(k):\n for j in range(k):\n corr[i][j] = cov[i + k][j] / np.sqrt(cov[i + k][i + k] * cov[j][j])\n arrangement = linear_sum_assignment(-corr)\n resS = np.zeros_like(S)\n resAT = np.zeros_like(A.T)\n for t in range(k):\n resS[arrangement[1][t]] = S[arrangement[0][t]]\n resAT[arrangement[1][t]] = A.T[arrangement[0][t]]\n return resAT.T, resS\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef generateComponent(m):\n \"\"\"Creates oscillating components to be mixed\"\"\"\n freq = 25 * np.random.random()\n phase = 2 * np.pi * np.random.random()\n x = np.arange(m)\n return np.cos(x / freq - phase) ** 2\n\n\ndef generateAmplitudes(k):\n \"\"\"Makes mixing coefficients\"\"\"\n res = np.array([np.random.random() for i in range(k)])\n return res / res.sum()\n\n\ndef add_noise(Y, sigma):\n \"\"\"Adds noise to Y\"\"\"\n return Y + np.random.normal(0, sigma, Y.shape)\n\n\ndef match(A, S, trueS):\n \"\"\"Rearranges columns of S to best fit the components they likely represent (maximizes sum of correlations)\"\"\"\n cov = np.cov(trueS, S)\n k = S.shape[0]\n corr = np.zeros([k, k])\n for i in range(k):\n for j in range(k):\n corr[i][j] = cov[i + k][j] / np.sqrt(cov[i + k][i + k] * cov[j][j])\n arrangement = linear_sum_assignment(-corr)\n resS = np.zeros_like(S)\n resAT = np.zeros_like(A.T)\n for t in range(k):\n resS[arrangement[1][t]] = S[arrangement[0][t]]\n resAT[arrangement[1][t]] = A.T[arrangement[0][t]]\n return resAT.T, resS\n\n\n<mask token>\n",
"step-3": "<mask token>\nlogging.basicConfig()\nlogger = logging.getLogger('proxmin')\nlogger.setLevel(logging.INFO)\n\n\ndef generateComponent(m):\n \"\"\"Creates oscillating components to be mixed\"\"\"\n freq = 25 * np.random.random()\n phase = 2 * np.pi * np.random.random()\n x = np.arange(m)\n return np.cos(x / freq - phase) ** 2\n\n\ndef generateAmplitudes(k):\n \"\"\"Makes mixing coefficients\"\"\"\n res = np.array([np.random.random() for i in range(k)])\n return res / res.sum()\n\n\ndef add_noise(Y, sigma):\n \"\"\"Adds noise to Y\"\"\"\n return Y + np.random.normal(0, sigma, Y.shape)\n\n\ndef match(A, S, trueS):\n \"\"\"Rearranges columns of S to best fit the components they likely represent (maximizes sum of correlations)\"\"\"\n cov = np.cov(trueS, S)\n k = S.shape[0]\n corr = np.zeros([k, k])\n for i in range(k):\n for j in range(k):\n corr[i][j] = cov[i + k][j] / np.sqrt(cov[i + k][i + k] * cov[j][j])\n arrangement = linear_sum_assignment(-corr)\n resS = np.zeros_like(S)\n resAT = np.zeros_like(A.T)\n for t in range(k):\n resS[arrangement[1][t]] = S[arrangement[0][t]]\n resAT[arrangement[1][t]] = A.T[arrangement[0][t]]\n return resAT.T, resS\n\n\nif __name__ == '__main__':\n n = 50\n k = 3\n b = 100\n noise = 0.02\n np.random.seed(101)\n trueA = np.array([generateAmplitudes(k) for i in range(b)])\n trueS = np.array([generateComponent(n) for i in range(k)])\n trueY = np.dot(trueA, trueS)\n Y = add_noise(trueY, noise)\n W = None\n A = np.array([generateAmplitudes(k) for i in range(b)])\n S = np.array([generateComponent(n) for i in range(k)])\n p1 = partial(po.prox_unity_plus, axis=1)\n proxs_g = [[p1], None]\n tr = Traceback(2)\n nmf(Y, A, S, W=W, prox_A=p1, e_rel=1e-06, e_abs=1e-06 / noise ** 2,\n traceback=tr)\n A, S = match(A, S, trueS)\n fig = plt.figure(figsize=(6, 7))\n ax = fig.add_subplot(311)\n ax.set_title('True Components S')\n ax.plot(trueS.T)\n ax2 = fig.add_subplot(312)\n ax2.set_title('Data Y')\n ax2.plot(Y.T)\n ax3 = fig.add_subplot(313)\n ax3.set_title('Found Components S')\n ax3.set_xlabel('Pixel')\n ax3.plot(S.T)\n fig.subplots_adjust(bottom=0.07, top=0.95, hspace=0.35)\n fig.show()\n convergences = []\n As = tr['X', 0]\n Ss = tr['X', 1]\n for it in range(tr.it):\n Y = np.dot(As[it], Ss[it])\n convergences.append(((Y - trueY) ** 2).sum())\n fig2 = plt.figure(figsize=(6, 4))\n ax4 = fig2.add_subplot(111)\n ax4.set_title('Convergence')\n ax4.semilogy(convergences)\n ax4.set_ylabel('$||Y-AS||^2$')\n ax4.set_xlabel('Iterations')\n fig2.show()\n \"\"\"\n # noise plot\n #noises = np.linspace(0,0.05,21)\n #repeat = 10\n noises = [noise]\n repeat = 1000\n A_chi_squared = np.empty((len(noises), repeat))\n S_chi_squared = np.empty((len(noises), repeat))\n for i in range(len(noises)):\n e = noises[i]\n for r in range(repeat):\n Y = add_noise(trueY, e)\n A, S = nmf.nmf(Y, A0, S0, e_rel=1e-4, e_abs=1e-4, )\n A, S = match(A, S, trueS)\n A_chi_squared[i,r] = np.sum((A - trueA)**2)\n S_chi_squared[i,r] = np.sum((S - trueS)**2)\n fig3 = plt.figure(figsize=(6,4))\n ax5 = fig3.add_subplot(111)\n dof_A = A.shape[0]*A.shape[1]\n dof_S = S.shape[0]*S.shape[1]\n ax5.errorbar(noises, S_chi_squared.mean(axis=1)/dof_S, yerr=S_chi_squared.std(axis=1)/dof_S, label=\"$\\\\chi^2_S$ / DOF\")\n ax5.errorbar(noises, A_chi_squared.mean(axis=1)/dof_A, yerr=A_chi_squared.std(axis=1)/dof_A, label=\"$\\\\chi^2_A$ / DOF\")\n ax5.legend()\n ax5.set_ylabel(\"Chi-squared\")\n ax5.set_xlabel(\"Standard deviation of noise\")\n fig3.show()\n \"\"\"\n",
"step-4": "from proxmin import nmf\nfrom proxmin.utils import Traceback\nfrom proxmin import operators as po\nfrom scipy.optimize import linear_sum_assignment\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport time\nfrom functools import partial\nimport logging\nlogging.basicConfig()\nlogger = logging.getLogger('proxmin')\nlogger.setLevel(logging.INFO)\n\n\ndef generateComponent(m):\n \"\"\"Creates oscillating components to be mixed\"\"\"\n freq = 25 * np.random.random()\n phase = 2 * np.pi * np.random.random()\n x = np.arange(m)\n return np.cos(x / freq - phase) ** 2\n\n\ndef generateAmplitudes(k):\n \"\"\"Makes mixing coefficients\"\"\"\n res = np.array([np.random.random() for i in range(k)])\n return res / res.sum()\n\n\ndef add_noise(Y, sigma):\n \"\"\"Adds noise to Y\"\"\"\n return Y + np.random.normal(0, sigma, Y.shape)\n\n\ndef match(A, S, trueS):\n \"\"\"Rearranges columns of S to best fit the components they likely represent (maximizes sum of correlations)\"\"\"\n cov = np.cov(trueS, S)\n k = S.shape[0]\n corr = np.zeros([k, k])\n for i in range(k):\n for j in range(k):\n corr[i][j] = cov[i + k][j] / np.sqrt(cov[i + k][i + k] * cov[j][j])\n arrangement = linear_sum_assignment(-corr)\n resS = np.zeros_like(S)\n resAT = np.zeros_like(A.T)\n for t in range(k):\n resS[arrangement[1][t]] = S[arrangement[0][t]]\n resAT[arrangement[1][t]] = A.T[arrangement[0][t]]\n return resAT.T, resS\n\n\nif __name__ == '__main__':\n n = 50\n k = 3\n b = 100\n noise = 0.02\n np.random.seed(101)\n trueA = np.array([generateAmplitudes(k) for i in range(b)])\n trueS = np.array([generateComponent(n) for i in range(k)])\n trueY = np.dot(trueA, trueS)\n Y = add_noise(trueY, noise)\n W = None\n A = np.array([generateAmplitudes(k) for i in range(b)])\n S = np.array([generateComponent(n) for i in range(k)])\n p1 = partial(po.prox_unity_plus, axis=1)\n proxs_g = [[p1], None]\n tr = Traceback(2)\n nmf(Y, A, S, W=W, prox_A=p1, e_rel=1e-06, e_abs=1e-06 / noise ** 2,\n traceback=tr)\n A, S = match(A, S, trueS)\n fig = plt.figure(figsize=(6, 7))\n ax = fig.add_subplot(311)\n ax.set_title('True Components S')\n ax.plot(trueS.T)\n ax2 = fig.add_subplot(312)\n ax2.set_title('Data Y')\n ax2.plot(Y.T)\n ax3 = fig.add_subplot(313)\n ax3.set_title('Found Components S')\n ax3.set_xlabel('Pixel')\n ax3.plot(S.T)\n fig.subplots_adjust(bottom=0.07, top=0.95, hspace=0.35)\n fig.show()\n convergences = []\n As = tr['X', 0]\n Ss = tr['X', 1]\n for it in range(tr.it):\n Y = np.dot(As[it], Ss[it])\n convergences.append(((Y - trueY) ** 2).sum())\n fig2 = plt.figure(figsize=(6, 4))\n ax4 = fig2.add_subplot(111)\n ax4.set_title('Convergence')\n ax4.semilogy(convergences)\n ax4.set_ylabel('$||Y-AS||^2$')\n ax4.set_xlabel('Iterations')\n fig2.show()\n \"\"\"\n # noise plot\n #noises = np.linspace(0,0.05,21)\n #repeat = 10\n noises = [noise]\n repeat = 1000\n A_chi_squared = np.empty((len(noises), repeat))\n S_chi_squared = np.empty((len(noises), repeat))\n for i in range(len(noises)):\n e = noises[i]\n for r in range(repeat):\n Y = add_noise(trueY, e)\n A, S = nmf.nmf(Y, A0, S0, e_rel=1e-4, e_abs=1e-4, )\n A, S = match(A, S, trueS)\n A_chi_squared[i,r] = np.sum((A - trueA)**2)\n S_chi_squared[i,r] = np.sum((S - trueS)**2)\n fig3 = plt.figure(figsize=(6,4))\n ax5 = fig3.add_subplot(111)\n dof_A = A.shape[0]*A.shape[1]\n dof_S = S.shape[0]*S.shape[1]\n ax5.errorbar(noises, S_chi_squared.mean(axis=1)/dof_S, yerr=S_chi_squared.std(axis=1)/dof_S, label=\"$\\\\chi^2_S$ / DOF\")\n ax5.errorbar(noises, A_chi_squared.mean(axis=1)/dof_A, yerr=A_chi_squared.std(axis=1)/dof_A, label=\"$\\\\chi^2_A$ / DOF\")\n ax5.legend()\n ax5.set_ylabel(\"Chi-squared\")\n ax5.set_xlabel(\"Standard deviation of noise\")\n fig3.show()\n \"\"\"\n",
"step-5": "from proxmin import nmf\r\nfrom proxmin.utils import Traceback\r\nfrom proxmin import operators as po\r\nfrom scipy.optimize import linear_sum_assignment\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\nimport time\r\nfrom functools import partial\r\n\r\n# initialize and run NMF\r\nimport logging\r\nlogging.basicConfig()\r\nlogger = logging.getLogger('proxmin')\r\nlogger.setLevel(logging.INFO)\r\n\r\ndef generateComponent(m):\r\n \"\"\"Creates oscillating components to be mixed\"\"\"\r\n freq = 25*np.random.random()\r\n phase = 2*np.pi*np.random.random()\r\n x = np.arange(m)\r\n return np.cos(x/freq-phase)**2\r\n\r\ndef generateAmplitudes(k):\r\n \"\"\"Makes mixing coefficients\"\"\"\r\n res = np.array([np.random.random() for i in range(k)])\r\n return res/res.sum()\r\n\r\ndef add_noise(Y, sigma):\r\n \"\"\"Adds noise to Y\"\"\"\r\n return Y + np.random.normal(0, sigma, Y.shape)\r\n\r\ndef match(A, S, trueS):\r\n \"\"\"Rearranges columns of S to best fit the components they likely represent (maximizes sum of correlations)\"\"\"\r\n cov = np.cov(trueS, S)\r\n k = S.shape[0]\r\n corr = np.zeros([k,k])\r\n for i in range(k):\r\n for j in range(k):\r\n corr[i][j] = cov[i + k][j]/np.sqrt(cov[i + k][i + k]*cov[j][j])\r\n arrangement = linear_sum_assignment(-corr)\r\n resS = np.zeros_like(S)\r\n resAT = np.zeros_like(A.T)\r\n for t in range(k):\r\n resS[arrangement[1][t]] = S[arrangement[0][t]]\r\n resAT[arrangement[1][t]] = A.T[arrangement[0][t]]\r\n return resAT.T, resS\r\n\r\nif __name__ == \"__main__\":\r\n n = 50 \t\t\t# component resolution\r\n k = 3 \t\t\t# number of components\r\n b = 100\t\t\t# number of observations\r\n noise = 0.02 # stdev of added noise\r\n np.random.seed(101)\r\n\r\n # set up test data\r\n trueA = np.array([generateAmplitudes(k) for i in range(b)])\r\n trueS = np.array([generateComponent(n) for i in range(k)])\r\n trueY = np.dot(trueA,trueS)\r\n Y = add_noise(trueY, noise)\r\n # if noise is variable, specify variance matrix of the same shape as Y\r\n W = None\r\n\r\n A = np.array([generateAmplitudes(k) for i in range(b)])\r\n S = np.array([generateComponent(n) for i in range(k)])\r\n p1 = partial(po.prox_unity_plus, axis=1)\r\n proxs_g=[[p1], None]\r\n tr = Traceback(2)\r\n nmf(Y, A, S, W=W, prox_A=p1, e_rel=1e-6, e_abs=1e-6/noise**2, traceback=tr)\r\n # sort components to best match inputs\r\n A, S = match(A, S, trueS)\r\n\r\n # show data and model\r\n fig = plt.figure(figsize=(6,7))\r\n ax = fig.add_subplot(311)\r\n ax.set_title(\"True Components S\")\r\n ax.plot(trueS.T)\r\n ax2 = fig.add_subplot(312)\r\n ax2.set_title(\"Data Y\")\r\n ax2.plot(Y.T)\r\n ax3 = fig.add_subplot(313)\r\n ax3.set_title(\"Found Components S\")\r\n ax3.set_xlabel(\"Pixel\")\r\n ax3.plot(S.T)\r\n fig.subplots_adjust(bottom=0.07, top=0.95, hspace=0.35)\r\n fig.show()\r\n\r\n # convergence plot from traceback\r\n convergences = []\r\n As = tr['X',0]\r\n Ss = tr['X',1]\r\n for it in range(tr.it):\r\n Y = np.dot(As[it], Ss[it])\r\n convergences.append(((Y - trueY)**2).sum())\r\n fig2 = plt.figure(figsize=(6,4))\r\n ax4 = fig2.add_subplot(111)\r\n ax4.set_title(\"Convergence\")\r\n ax4.semilogy(convergences)\r\n ax4.set_ylabel(\"$||Y-AS||^2$\")\r\n ax4.set_xlabel(\"Iterations\")\r\n fig2.show()\r\n\r\n \"\"\"\r\n # noise plot\r\n #noises = np.linspace(0,0.05,21)\r\n #repeat = 10\r\n noises = [noise]\r\n repeat = 1000\r\n A_chi_squared = np.empty((len(noises), repeat))\r\n S_chi_squared = np.empty((len(noises), repeat))\r\n for i in range(len(noises)):\r\n e = noises[i]\r\n for r in range(repeat):\r\n Y = add_noise(trueY, e)\r\n A, S = nmf.nmf(Y, A0, S0, e_rel=1e-4, e_abs=1e-4, )\r\n A, S = match(A, S, trueS)\r\n A_chi_squared[i,r] = np.sum((A - trueA)**2)\r\n S_chi_squared[i,r] = np.sum((S - trueS)**2)\r\n fig3 = plt.figure(figsize=(6,4))\r\n ax5 = fig3.add_subplot(111)\r\n dof_A = A.shape[0]*A.shape[1]\r\n dof_S = S.shape[0]*S.shape[1]\r\n ax5.errorbar(noises, S_chi_squared.mean(axis=1)/dof_S, yerr=S_chi_squared.std(axis=1)/dof_S, label=\"$\\chi^2_S$ / DOF\")\r\n ax5.errorbar(noises, A_chi_squared.mean(axis=1)/dof_A, yerr=A_chi_squared.std(axis=1)/dof_A, label=\"$\\chi^2_A$ / DOF\")\r\n ax5.legend()\r\n ax5.set_ylabel(\"Chi-squared\")\r\n ax5.set_xlabel(\"Standard deviation of noise\")\r\n fig3.show()\r\n \"\"\"\r\n",
"step-ids": [
3,
4,
6,
7,
8
]
}
|
[
3,
4,
6,
7,
8
] |
from flask import Flask, jsonify, abort, make_response
from matchtype import matchtyper
from db import db_handle
import sys
api = Flask(__name__)
@api.route('/get/<key_name>', methods=['GET'])
def get(key_name):
li = db_handle(key_name)
if li[1] is None:
abort(404)
else:
result = matchtyper(li)
return make_response(jsonify(result))
@api.errorhandler(404)
def not_found(error):
return make_response(jsonify({'error': 'Not found'}), 404)
if __name__ == '__main__':
api.debug = True
api.run(host='localhost', port=8080)
|
normal
|
{
"blob_id": "44e9fd355bfab3f007c5428e8a5f0930c4011646",
"index": 3853,
"step-1": "<mask token>\n\n\n@api.route('/get/<key_name>', methods=['GET'])\ndef get(key_name):\n li = db_handle(key_name)\n if li[1] is None:\n abort(404)\n else:\n result = matchtyper(li)\n return make_response(jsonify(result))\n\n\n@api.errorhandler(404)\ndef not_found(error):\n return make_response(jsonify({'error': 'Not found'}), 404)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@api.route('/get/<key_name>', methods=['GET'])\ndef get(key_name):\n li = db_handle(key_name)\n if li[1] is None:\n abort(404)\n else:\n result = matchtyper(li)\n return make_response(jsonify(result))\n\n\n@api.errorhandler(404)\ndef not_found(error):\n return make_response(jsonify({'error': 'Not found'}), 404)\n\n\nif __name__ == '__main__':\n api.debug = True\n api.run(host='localhost', port=8080)\n",
"step-3": "<mask token>\napi = Flask(__name__)\n\n\n@api.route('/get/<key_name>', methods=['GET'])\ndef get(key_name):\n li = db_handle(key_name)\n if li[1] is None:\n abort(404)\n else:\n result = matchtyper(li)\n return make_response(jsonify(result))\n\n\n@api.errorhandler(404)\ndef not_found(error):\n return make_response(jsonify({'error': 'Not found'}), 404)\n\n\nif __name__ == '__main__':\n api.debug = True\n api.run(host='localhost', port=8080)\n",
"step-4": "from flask import Flask, jsonify, abort, make_response\nfrom matchtype import matchtyper\nfrom db import db_handle\nimport sys\napi = Flask(__name__)\n\n\n@api.route('/get/<key_name>', methods=['GET'])\ndef get(key_name):\n li = db_handle(key_name)\n if li[1] is None:\n abort(404)\n else:\n result = matchtyper(li)\n return make_response(jsonify(result))\n\n\n@api.errorhandler(404)\ndef not_found(error):\n return make_response(jsonify({'error': 'Not found'}), 404)\n\n\nif __name__ == '__main__':\n api.debug = True\n api.run(host='localhost', port=8080)\n",
"step-5": null,
"step-ids": [
2,
3,
4,
5
]
}
|
[
2,
3,
4,
5
] |
<|reserved_special_token_0|>
class HaakePhoenix(ToolWindow):
<|reserved_special_token_0|>
def __init__(self, *args, **wargs):
self.indicators = {}
super().__init__(*args, **wargs)
def init_gui(self, *args, **kwargs):
statusgrid = self.builder.get_object('statusgrid')
for row, column, vn, label in [(0, 0, '_status', 'Status'), (0, 1,
'setpoint', 'Target temperature'), (0, 2, 'temperature',
'Temperature'), (0, 3, 'pump_power', 'Pump speed'), (0, 4,
'control_on', 'Temperature control'), (1, 0, 'lowlimit',
'Low limit'), (1, 1, 'highlimit', 'High limit'), (1, 2,
'cooling_on', 'Cooling'), (1, 3, 'control_external', 'Control'),
(1, 4, 'diffcontrol_on', 'Differential control')]:
self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN
)
statusgrid.attach(self.indicators[vn], column, row, 1, 1)
errorgrid = self.builder.get_object('errorgrid')
for row, column, vn, label in [(0, 0, 'external_pt100_error',
'External Pt100'), (0, 1, 'internal_pt100_error',
'Internal Pt100'), (0, 2, 'liquid_level_low_error',
'Liquid level'), (0, 3, 'liquid_level_alarm_error',
'Liquid level alarm'), (0, 4, 'cooling_error', 'Cooling system'
), (1, 0, 'pump_overload_error', 'Pump'), (1, 1,
'external_alarm_error', 'External alarm'), (1, 2,
'overtemperature_error', 'Overtemperature'), (1, 3,
'main_relay_missing_error', 'Main relay'), (1, 4, 'faultstatus',
'Status flags')]:
self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN
)
errorgrid.attach(self.indicators[vn], column, row, 1, 1)
othergrid = self.builder.get_object('othergrid')
for row, column, vn, label in [(0, 0, 'firmwareversion',
'Firmware version'), (0, 1, 'date', 'Date'), (0, 2, 'time',
'Time'), (0, 3, 'autostart', 'Autostart'), (0, 4, 'beep',
'Beep'), (1, 0, 'fuzzyid', 'Fuzzy identification'), (1, 1,
'fuzzycontrol', 'Fuzzy control'), (1, 2, 'fuzzystatus',
'Fuzzy status'), (1, 3, 'watchdog_on', 'Watchdog'), (1, 4,
'watchdog_setpoint', 'Watchdog setpoint')]:
self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN
)
othergrid.attach(self.indicators[vn], column, row, 1, 1)
self.update_indicators()
def on_mainwidget_map(self, window):
if super().on_mainwidget_map(window):
return True
self.update_indicators()
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def on_fuzzyid_switch_state_set(self, switch, state):
self.instrument.get_device('haakephoenix').set_variable('fuzzyid',
state)
return True
def on_set_setpoint(self, button):
spinbutton = self.builder.get_object('setpoint_spin')
self.instrument.get_device('haakephoenix').set_variable('setpoint',
spinbutton.get_value())
def on_set_lowlimit(self, button):
spinbutton = self.builder.get_object('lowlimit_spin')
self.instrument.get_device('haakephoenix').set_variable('lowlimit',
spinbutton.get_value())
<|reserved_special_token_0|>
def on_update_rtc(self, button):
now = datetime.datetime.now()
self.instrument.get_device('haakephoenix').set_variable('date', now
.date())
self.instrument.get_device('haakephoenix').set_variable('time', now
.time())
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class HaakePhoenix(ToolWindow):
<|reserved_special_token_0|>
def __init__(self, *args, **wargs):
self.indicators = {}
super().__init__(*args, **wargs)
def init_gui(self, *args, **kwargs):
statusgrid = self.builder.get_object('statusgrid')
for row, column, vn, label in [(0, 0, '_status', 'Status'), (0, 1,
'setpoint', 'Target temperature'), (0, 2, 'temperature',
'Temperature'), (0, 3, 'pump_power', 'Pump speed'), (0, 4,
'control_on', 'Temperature control'), (1, 0, 'lowlimit',
'Low limit'), (1, 1, 'highlimit', 'High limit'), (1, 2,
'cooling_on', 'Cooling'), (1, 3, 'control_external', 'Control'),
(1, 4, 'diffcontrol_on', 'Differential control')]:
self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN
)
statusgrid.attach(self.indicators[vn], column, row, 1, 1)
errorgrid = self.builder.get_object('errorgrid')
for row, column, vn, label in [(0, 0, 'external_pt100_error',
'External Pt100'), (0, 1, 'internal_pt100_error',
'Internal Pt100'), (0, 2, 'liquid_level_low_error',
'Liquid level'), (0, 3, 'liquid_level_alarm_error',
'Liquid level alarm'), (0, 4, 'cooling_error', 'Cooling system'
), (1, 0, 'pump_overload_error', 'Pump'), (1, 1,
'external_alarm_error', 'External alarm'), (1, 2,
'overtemperature_error', 'Overtemperature'), (1, 3,
'main_relay_missing_error', 'Main relay'), (1, 4, 'faultstatus',
'Status flags')]:
self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN
)
errorgrid.attach(self.indicators[vn], column, row, 1, 1)
othergrid = self.builder.get_object('othergrid')
for row, column, vn, label in [(0, 0, 'firmwareversion',
'Firmware version'), (0, 1, 'date', 'Date'), (0, 2, 'time',
'Time'), (0, 3, 'autostart', 'Autostart'), (0, 4, 'beep',
'Beep'), (1, 0, 'fuzzyid', 'Fuzzy identification'), (1, 1,
'fuzzycontrol', 'Fuzzy control'), (1, 2, 'fuzzystatus',
'Fuzzy status'), (1, 3, 'watchdog_on', 'Watchdog'), (1, 4,
'watchdog_setpoint', 'Watchdog setpoint')]:
self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN
)
othergrid.attach(self.indicators[vn], column, row, 1, 1)
self.update_indicators()
def on_mainwidget_map(self, window):
if super().on_mainwidget_map(window):
return True
self.update_indicators()
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def on_circulator_switch_state_set(self, switch, state):
dev = self.instrument.get_device('haakephoenix')
if state:
dev.execute_command('start')
else:
dev.execute_command('stop')
return True
def on_fuzzyid_switch_state_set(self, switch, state):
self.instrument.get_device('haakephoenix').set_variable('fuzzyid',
state)
return True
def on_set_setpoint(self, button):
spinbutton = self.builder.get_object('setpoint_spin')
self.instrument.get_device('haakephoenix').set_variable('setpoint',
spinbutton.get_value())
def on_set_lowlimit(self, button):
spinbutton = self.builder.get_object('lowlimit_spin')
self.instrument.get_device('haakephoenix').set_variable('lowlimit',
spinbutton.get_value())
def on_set_highlimit(self, button):
spinbutton = self.builder.get_object('highlimit_spin')
self.instrument.get_device('haakephoenix').set_variable('highlimit',
spinbutton.get_value())
def on_update_rtc(self, button):
now = datetime.datetime.now()
self.instrument.get_device('haakephoenix').set_variable('date', now
.date())
self.instrument.get_device('haakephoenix').set_variable('time', now
.time())
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class HaakePhoenix(ToolWindow):
required_devices = ['haakephoenix']
def __init__(self, *args, **wargs):
self.indicators = {}
super().__init__(*args, **wargs)
def init_gui(self, *args, **kwargs):
statusgrid = self.builder.get_object('statusgrid')
for row, column, vn, label in [(0, 0, '_status', 'Status'), (0, 1,
'setpoint', 'Target temperature'), (0, 2, 'temperature',
'Temperature'), (0, 3, 'pump_power', 'Pump speed'), (0, 4,
'control_on', 'Temperature control'), (1, 0, 'lowlimit',
'Low limit'), (1, 1, 'highlimit', 'High limit'), (1, 2,
'cooling_on', 'Cooling'), (1, 3, 'control_external', 'Control'),
(1, 4, 'diffcontrol_on', 'Differential control')]:
self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN
)
statusgrid.attach(self.indicators[vn], column, row, 1, 1)
errorgrid = self.builder.get_object('errorgrid')
for row, column, vn, label in [(0, 0, 'external_pt100_error',
'External Pt100'), (0, 1, 'internal_pt100_error',
'Internal Pt100'), (0, 2, 'liquid_level_low_error',
'Liquid level'), (0, 3, 'liquid_level_alarm_error',
'Liquid level alarm'), (0, 4, 'cooling_error', 'Cooling system'
), (1, 0, 'pump_overload_error', 'Pump'), (1, 1,
'external_alarm_error', 'External alarm'), (1, 2,
'overtemperature_error', 'Overtemperature'), (1, 3,
'main_relay_missing_error', 'Main relay'), (1, 4, 'faultstatus',
'Status flags')]:
self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN
)
errorgrid.attach(self.indicators[vn], column, row, 1, 1)
othergrid = self.builder.get_object('othergrid')
for row, column, vn, label in [(0, 0, 'firmwareversion',
'Firmware version'), (0, 1, 'date', 'Date'), (0, 2, 'time',
'Time'), (0, 3, 'autostart', 'Autostart'), (0, 4, 'beep',
'Beep'), (1, 0, 'fuzzyid', 'Fuzzy identification'), (1, 1,
'fuzzycontrol', 'Fuzzy control'), (1, 2, 'fuzzystatus',
'Fuzzy status'), (1, 3, 'watchdog_on', 'Watchdog'), (1, 4,
'watchdog_setpoint', 'Watchdog setpoint')]:
self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN
)
othergrid.attach(self.indicators[vn], column, row, 1, 1)
self.update_indicators()
def on_mainwidget_map(self, window):
if super().on_mainwidget_map(window):
return True
self.update_indicators()
def update_indicators(self):
dev = self.instrument.get_device('haakephoenix')
for vn in self.indicators:
self.on_device_variable_change(dev, vn, dev.get_variable(vn))
self.builder.get_object('setpoint_adjustment').set_value(dev.
get_variable('setpoint'))
self.builder.get_object('lowlimit_adjustment').set_value(dev.
get_variable('lowlimit'))
self.builder.get_object('highlimit_adjustment').set_value(dev.
get_variable('highlimit'))
def on_device_variable_change(self, device, variablename, newvalue):
if variablename in ['_status', 'firmwareversion', 'fuzzycontrol',
'date', 'time', 'faultstatus']:
self.indicators[variablename].set_value(str(newvalue),
IndicatorState.NEUTRAL)
elif variablename in ['setpoint', 'temperature', 'lowlimit',
'highlimit']:
self.indicators[variablename].set_value('%.2f°C' % newvalue,
IndicatorState.NEUTRAL)
elif variablename in ['control_on', 'cooling_on', 'diffcontrol_on',
'watchdog_on', 'beep', 'fuzzyid', 'fuzzystatus', 'autostart']:
self.indicators[variablename].set_value(['OFF', 'ON'][int(bool(
newvalue))], [IndicatorState.ERROR, IndicatorState.OK][int(
bool(newvalue))])
elif variablename in ['pump_power']:
self.indicators[variablename].set_value('%.2f %%' % newvalue, [
IndicatorState.ERROR, IndicatorState.OK][newvalue > 0])
elif variablename in ['external_pt100_error',
'internal_pt100_error', 'liquid_level_low_error',
'cooling_error', 'main_relay_missing_error']:
self.indicators[variablename].set_value(['OK', 'ERROR'][int(
bool(newvalue))], [IndicatorState.OK, IndicatorState.ERROR]
[int(bool(newvalue))])
elif variablename in ['liquid_level_alarm_error',
'external_alarm_error', 'overtemperature_error']:
self.indicators[variablename].set_value(['OK', 'ALARM'][int(
bool(newvalue))], [IndicatorState.OK, IndicatorState.ERROR]
[int(bool(newvalue))])
elif variablename in ['pump_overload_error']:
self.indicators[variablename].set_value(['OK', 'OVERLOAD'][int(
bool(newvalue))], [IndicatorState.OK, IndicatorState.ERROR]
[int(bool(newvalue))])
elif variablename in ['watchdog_setpoint']:
self.indicators[variablename].set_value('%.2f sec' % newvalue,
IndicatorState.UNKNOWN)
elif variablename in ['control_external']:
self.indicators[variablename].set_value(['Internal', 'External'
][int(bool(newvalue))], IndicatorState.NEUTRAL)
if variablename == 'fuzzyid':
self.builder.get_object('fuzzyid_switch').set_state(bool(newvalue))
elif variablename == 'pump_power':
self.builder.get_object('circulator_switch').set_state(newvalue > 0
)
return False
def on_circulator_switch_state_set(self, switch, state):
dev = self.instrument.get_device('haakephoenix')
if state:
dev.execute_command('start')
else:
dev.execute_command('stop')
return True
def on_fuzzyid_switch_state_set(self, switch, state):
self.instrument.get_device('haakephoenix').set_variable('fuzzyid',
state)
return True
def on_set_setpoint(self, button):
spinbutton = self.builder.get_object('setpoint_spin')
self.instrument.get_device('haakephoenix').set_variable('setpoint',
spinbutton.get_value())
def on_set_lowlimit(self, button):
spinbutton = self.builder.get_object('lowlimit_spin')
self.instrument.get_device('haakephoenix').set_variable('lowlimit',
spinbutton.get_value())
def on_set_highlimit(self, button):
spinbutton = self.builder.get_object('highlimit_spin')
self.instrument.get_device('haakephoenix').set_variable('highlimit',
spinbutton.get_value())
def on_update_rtc(self, button):
now = datetime.datetime.now()
self.instrument.get_device('haakephoenix').set_variable('date', now
.date())
self.instrument.get_device('haakephoenix').set_variable('time', now
.time())
<|reserved_special_token_1|>
import datetime
from ..core.indicator import Indicator, IndicatorState
from ..core.toolwindow import ToolWindow
class HaakePhoenix(ToolWindow):
required_devices = ['haakephoenix']
def __init__(self, *args, **wargs):
self.indicators = {}
super().__init__(*args, **wargs)
def init_gui(self, *args, **kwargs):
statusgrid = self.builder.get_object('statusgrid')
for row, column, vn, label in [(0, 0, '_status', 'Status'), (0, 1,
'setpoint', 'Target temperature'), (0, 2, 'temperature',
'Temperature'), (0, 3, 'pump_power', 'Pump speed'), (0, 4,
'control_on', 'Temperature control'), (1, 0, 'lowlimit',
'Low limit'), (1, 1, 'highlimit', 'High limit'), (1, 2,
'cooling_on', 'Cooling'), (1, 3, 'control_external', 'Control'),
(1, 4, 'diffcontrol_on', 'Differential control')]:
self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN
)
statusgrid.attach(self.indicators[vn], column, row, 1, 1)
errorgrid = self.builder.get_object('errorgrid')
for row, column, vn, label in [(0, 0, 'external_pt100_error',
'External Pt100'), (0, 1, 'internal_pt100_error',
'Internal Pt100'), (0, 2, 'liquid_level_low_error',
'Liquid level'), (0, 3, 'liquid_level_alarm_error',
'Liquid level alarm'), (0, 4, 'cooling_error', 'Cooling system'
), (1, 0, 'pump_overload_error', 'Pump'), (1, 1,
'external_alarm_error', 'External alarm'), (1, 2,
'overtemperature_error', 'Overtemperature'), (1, 3,
'main_relay_missing_error', 'Main relay'), (1, 4, 'faultstatus',
'Status flags')]:
self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN
)
errorgrid.attach(self.indicators[vn], column, row, 1, 1)
othergrid = self.builder.get_object('othergrid')
for row, column, vn, label in [(0, 0, 'firmwareversion',
'Firmware version'), (0, 1, 'date', 'Date'), (0, 2, 'time',
'Time'), (0, 3, 'autostart', 'Autostart'), (0, 4, 'beep',
'Beep'), (1, 0, 'fuzzyid', 'Fuzzy identification'), (1, 1,
'fuzzycontrol', 'Fuzzy control'), (1, 2, 'fuzzystatus',
'Fuzzy status'), (1, 3, 'watchdog_on', 'Watchdog'), (1, 4,
'watchdog_setpoint', 'Watchdog setpoint')]:
self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN
)
othergrid.attach(self.indicators[vn], column, row, 1, 1)
self.update_indicators()
def on_mainwidget_map(self, window):
if super().on_mainwidget_map(window):
return True
self.update_indicators()
def update_indicators(self):
dev = self.instrument.get_device('haakephoenix')
for vn in self.indicators:
self.on_device_variable_change(dev, vn, dev.get_variable(vn))
self.builder.get_object('setpoint_adjustment').set_value(dev.
get_variable('setpoint'))
self.builder.get_object('lowlimit_adjustment').set_value(dev.
get_variable('lowlimit'))
self.builder.get_object('highlimit_adjustment').set_value(dev.
get_variable('highlimit'))
def on_device_variable_change(self, device, variablename, newvalue):
if variablename in ['_status', 'firmwareversion', 'fuzzycontrol',
'date', 'time', 'faultstatus']:
self.indicators[variablename].set_value(str(newvalue),
IndicatorState.NEUTRAL)
elif variablename in ['setpoint', 'temperature', 'lowlimit',
'highlimit']:
self.indicators[variablename].set_value('%.2f°C' % newvalue,
IndicatorState.NEUTRAL)
elif variablename in ['control_on', 'cooling_on', 'diffcontrol_on',
'watchdog_on', 'beep', 'fuzzyid', 'fuzzystatus', 'autostart']:
self.indicators[variablename].set_value(['OFF', 'ON'][int(bool(
newvalue))], [IndicatorState.ERROR, IndicatorState.OK][int(
bool(newvalue))])
elif variablename in ['pump_power']:
self.indicators[variablename].set_value('%.2f %%' % newvalue, [
IndicatorState.ERROR, IndicatorState.OK][newvalue > 0])
elif variablename in ['external_pt100_error',
'internal_pt100_error', 'liquid_level_low_error',
'cooling_error', 'main_relay_missing_error']:
self.indicators[variablename].set_value(['OK', 'ERROR'][int(
bool(newvalue))], [IndicatorState.OK, IndicatorState.ERROR]
[int(bool(newvalue))])
elif variablename in ['liquid_level_alarm_error',
'external_alarm_error', 'overtemperature_error']:
self.indicators[variablename].set_value(['OK', 'ALARM'][int(
bool(newvalue))], [IndicatorState.OK, IndicatorState.ERROR]
[int(bool(newvalue))])
elif variablename in ['pump_overload_error']:
self.indicators[variablename].set_value(['OK', 'OVERLOAD'][int(
bool(newvalue))], [IndicatorState.OK, IndicatorState.ERROR]
[int(bool(newvalue))])
elif variablename in ['watchdog_setpoint']:
self.indicators[variablename].set_value('%.2f sec' % newvalue,
IndicatorState.UNKNOWN)
elif variablename in ['control_external']:
self.indicators[variablename].set_value(['Internal', 'External'
][int(bool(newvalue))], IndicatorState.NEUTRAL)
if variablename == 'fuzzyid':
self.builder.get_object('fuzzyid_switch').set_state(bool(newvalue))
elif variablename == 'pump_power':
self.builder.get_object('circulator_switch').set_state(newvalue > 0
)
return False
def on_circulator_switch_state_set(self, switch, state):
dev = self.instrument.get_device('haakephoenix')
if state:
dev.execute_command('start')
else:
dev.execute_command('stop')
return True
def on_fuzzyid_switch_state_set(self, switch, state):
self.instrument.get_device('haakephoenix').set_variable('fuzzyid',
state)
return True
def on_set_setpoint(self, button):
spinbutton = self.builder.get_object('setpoint_spin')
self.instrument.get_device('haakephoenix').set_variable('setpoint',
spinbutton.get_value())
def on_set_lowlimit(self, button):
spinbutton = self.builder.get_object('lowlimit_spin')
self.instrument.get_device('haakephoenix').set_variable('lowlimit',
spinbutton.get_value())
def on_set_highlimit(self, button):
spinbutton = self.builder.get_object('highlimit_spin')
self.instrument.get_device('haakephoenix').set_variable('highlimit',
spinbutton.get_value())
def on_update_rtc(self, button):
now = datetime.datetime.now()
self.instrument.get_device('haakephoenix').set_variable('date', now
.date())
self.instrument.get_device('haakephoenix').set_variable('time', now
.time())
<|reserved_special_token_1|>
import datetime
from ..core.indicator import Indicator, IndicatorState
from ..core.toolwindow import ToolWindow
class HaakePhoenix(ToolWindow):
required_devices = ['haakephoenix']
def __init__(self, *args, **wargs):
self.indicators = {}
super().__init__(*args, **wargs)
def init_gui(self, *args, **kwargs):
statusgrid = self.builder.get_object('statusgrid')
for row, column, vn, label in [(0, 0, '_status', 'Status'),
(0, 1, 'setpoint', 'Target temperature'),
(0, 2, 'temperature', 'Temperature'),
(0, 3, 'pump_power', 'Pump speed'),
(0, 4, 'control_on', 'Temperature control'),
(1, 0, 'lowlimit', 'Low limit'),
(1, 1, 'highlimit', 'High limit'),
(1, 2, 'cooling_on', 'Cooling'),
(1, 3, 'control_external', 'Control'),
(1, 4, 'diffcontrol_on', 'Differential control')]:
self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN)
statusgrid.attach(self.indicators[vn], column, row, 1, 1)
errorgrid = self.builder.get_object('errorgrid')
for row, column, vn, label in [(0, 0, 'external_pt100_error', 'External Pt100'), #
(0, 1, 'internal_pt100_error', 'Internal Pt100'), #
(0, 2, 'liquid_level_low_error', 'Liquid level'), #
(0, 3, 'liquid_level_alarm_error', 'Liquid level alarm'), #
(0, 4, 'cooling_error', 'Cooling system'), #
(1, 0, 'pump_overload_error', 'Pump'), #
(1, 1, 'external_alarm_error', 'External alarm'), #
(1, 2, 'overtemperature_error', 'Overtemperature'), #
(1, 3, 'main_relay_missing_error', 'Main relay'), #
(1, 4, 'faultstatus', 'Status flags')]: #
self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN)
errorgrid.attach(self.indicators[vn], column, row, 1, 1)
othergrid = self.builder.get_object('othergrid')
for row, column, vn, label in [(0, 0, 'firmwareversion', 'Firmware version'), #
(0, 1, 'date', 'Date'), #
(0, 2, 'time', 'Time'), #
(0, 3, 'autostart', 'Autostart'), #
(0, 4, 'beep', 'Beep'), #
(1, 0, 'fuzzyid', 'Fuzzy identification'), #
(1, 1, 'fuzzycontrol', 'Fuzzy control'), #
(1, 2, 'fuzzystatus', 'Fuzzy status'), #
(1, 3, 'watchdog_on', 'Watchdog'), #
(1, 4, 'watchdog_setpoint', 'Watchdog setpoint')]: #
self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN)
othergrid.attach(self.indicators[vn], column, row, 1, 1)
self.update_indicators()
def on_mainwidget_map(self, window):
if super().on_mainwidget_map(window):
return True
self.update_indicators()
def update_indicators(self):
dev = self.instrument.get_device('haakephoenix')
for vn in self.indicators:
self.on_device_variable_change(dev, vn, dev.get_variable(vn))
self.builder.get_object('setpoint_adjustment').set_value(
dev.get_variable('setpoint'))
self.builder.get_object('lowlimit_adjustment').set_value(
dev.get_variable('lowlimit'))
self.builder.get_object('highlimit_adjustment').set_value(
dev.get_variable('highlimit'))
def on_device_variable_change(self, device, variablename, newvalue):
if variablename in ['_status', 'firmwareversion', 'fuzzycontrol', 'date', 'time', 'faultstatus']:
self.indicators[variablename].set_value(str(newvalue), IndicatorState.NEUTRAL)
elif variablename in ['setpoint', 'temperature', 'lowlimit', 'highlimit']:
self.indicators[variablename].set_value('%.2f°C' % newvalue, IndicatorState.NEUTRAL)
elif variablename in ['control_on', 'cooling_on', 'diffcontrol_on', 'watchdog_on', 'beep', 'fuzzyid',
'fuzzystatus',
'autostart']:
self.indicators[variablename].set_value(['OFF', 'ON'][int(bool(newvalue))],
[IndicatorState.ERROR, IndicatorState.OK][int(bool(newvalue))])
elif variablename in ['pump_power']:
self.indicators[variablename].set_value('%.2f %%' % newvalue,
[IndicatorState.ERROR, IndicatorState.OK][newvalue > 0])
elif variablename in ['external_pt100_error', 'internal_pt100_error', 'liquid_level_low_error', 'cooling_error',
'main_relay_missing_error']:
self.indicators[variablename].set_value(['OK', 'ERROR'][int(bool(newvalue))],
[IndicatorState.OK, IndicatorState.ERROR][int(bool(newvalue))])
elif variablename in ['liquid_level_alarm_error', 'external_alarm_error', 'overtemperature_error']:
self.indicators[variablename].set_value(['OK', 'ALARM'][int(bool(newvalue))],
[IndicatorState.OK, IndicatorState.ERROR][int(bool(newvalue))])
elif variablename in ['pump_overload_error']:
self.indicators[variablename].set_value(['OK', 'OVERLOAD'][int(bool(newvalue))],
[IndicatorState.OK, IndicatorState.ERROR][int(bool(newvalue))])
elif variablename in ['watchdog_setpoint']:
self.indicators[variablename].set_value('%.2f sec' % newvalue, IndicatorState.UNKNOWN)
elif variablename in ['control_external']:
self.indicators[variablename].set_value(['Internal', 'External'][int(bool(newvalue))],
IndicatorState.NEUTRAL)
if variablename == 'fuzzyid':
self.builder.get_object('fuzzyid_switch').set_state(bool(newvalue))
elif variablename == 'pump_power':
self.builder.get_object('circulator_switch').set_state(newvalue > 0)
return False
def on_circulator_switch_state_set(self, switch, state):
dev = self.instrument.get_device('haakephoenix')
if state:
dev.execute_command('start')
else:
dev.execute_command('stop')
return True
def on_fuzzyid_switch_state_set(self, switch, state):
self.instrument.get_device('haakephoenix').set_variable('fuzzyid', state)
return True
def on_set_setpoint(self, button):
spinbutton = self.builder.get_object('setpoint_spin')
self.instrument.get_device('haakephoenix').set_variable('setpoint', spinbutton.get_value())
def on_set_lowlimit(self, button):
spinbutton = self.builder.get_object('lowlimit_spin')
self.instrument.get_device('haakephoenix').set_variable('lowlimit', spinbutton.get_value())
def on_set_highlimit(self, button):
spinbutton = self.builder.get_object('highlimit_spin')
self.instrument.get_device('haakephoenix').set_variable('highlimit', spinbutton.get_value())
def on_update_rtc(self, button):
now = datetime.datetime.now()
self.instrument.get_device('haakephoenix').set_variable('date', now.date())
self.instrument.get_device('haakephoenix').set_variable('time', now.time())
|
flexible
|
{
"blob_id": "25aa0766505b22588107d44e15c3596e9383d4e9",
"index": 486,
"step-1": "<mask token>\n\n\nclass HaakePhoenix(ToolWindow):\n <mask token>\n\n def __init__(self, *args, **wargs):\n self.indicators = {}\n super().__init__(*args, **wargs)\n\n def init_gui(self, *args, **kwargs):\n statusgrid = self.builder.get_object('statusgrid')\n for row, column, vn, label in [(0, 0, '_status', 'Status'), (0, 1,\n 'setpoint', 'Target temperature'), (0, 2, 'temperature',\n 'Temperature'), (0, 3, 'pump_power', 'Pump speed'), (0, 4,\n 'control_on', 'Temperature control'), (1, 0, 'lowlimit',\n 'Low limit'), (1, 1, 'highlimit', 'High limit'), (1, 2,\n 'cooling_on', 'Cooling'), (1, 3, 'control_external', 'Control'),\n (1, 4, 'diffcontrol_on', 'Differential control')]:\n self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN\n )\n statusgrid.attach(self.indicators[vn], column, row, 1, 1)\n errorgrid = self.builder.get_object('errorgrid')\n for row, column, vn, label in [(0, 0, 'external_pt100_error',\n 'External Pt100'), (0, 1, 'internal_pt100_error',\n 'Internal Pt100'), (0, 2, 'liquid_level_low_error',\n 'Liquid level'), (0, 3, 'liquid_level_alarm_error',\n 'Liquid level alarm'), (0, 4, 'cooling_error', 'Cooling system'\n ), (1, 0, 'pump_overload_error', 'Pump'), (1, 1,\n 'external_alarm_error', 'External alarm'), (1, 2,\n 'overtemperature_error', 'Overtemperature'), (1, 3,\n 'main_relay_missing_error', 'Main relay'), (1, 4, 'faultstatus',\n 'Status flags')]:\n self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN\n )\n errorgrid.attach(self.indicators[vn], column, row, 1, 1)\n othergrid = self.builder.get_object('othergrid')\n for row, column, vn, label in [(0, 0, 'firmwareversion',\n 'Firmware version'), (0, 1, 'date', 'Date'), (0, 2, 'time',\n 'Time'), (0, 3, 'autostart', 'Autostart'), (0, 4, 'beep',\n 'Beep'), (1, 0, 'fuzzyid', 'Fuzzy identification'), (1, 1,\n 'fuzzycontrol', 'Fuzzy control'), (1, 2, 'fuzzystatus',\n 'Fuzzy status'), (1, 3, 'watchdog_on', 'Watchdog'), (1, 4,\n 'watchdog_setpoint', 'Watchdog setpoint')]:\n self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN\n )\n othergrid.attach(self.indicators[vn], column, row, 1, 1)\n self.update_indicators()\n\n def on_mainwidget_map(self, window):\n if super().on_mainwidget_map(window):\n return True\n self.update_indicators()\n <mask token>\n <mask token>\n <mask token>\n\n def on_fuzzyid_switch_state_set(self, switch, state):\n self.instrument.get_device('haakephoenix').set_variable('fuzzyid',\n state)\n return True\n\n def on_set_setpoint(self, button):\n spinbutton = self.builder.get_object('setpoint_spin')\n self.instrument.get_device('haakephoenix').set_variable('setpoint',\n spinbutton.get_value())\n\n def on_set_lowlimit(self, button):\n spinbutton = self.builder.get_object('lowlimit_spin')\n self.instrument.get_device('haakephoenix').set_variable('lowlimit',\n spinbutton.get_value())\n <mask token>\n\n def on_update_rtc(self, button):\n now = datetime.datetime.now()\n self.instrument.get_device('haakephoenix').set_variable('date', now\n .date())\n self.instrument.get_device('haakephoenix').set_variable('time', now\n .time())\n",
"step-2": "<mask token>\n\n\nclass HaakePhoenix(ToolWindow):\n <mask token>\n\n def __init__(self, *args, **wargs):\n self.indicators = {}\n super().__init__(*args, **wargs)\n\n def init_gui(self, *args, **kwargs):\n statusgrid = self.builder.get_object('statusgrid')\n for row, column, vn, label in [(0, 0, '_status', 'Status'), (0, 1,\n 'setpoint', 'Target temperature'), (0, 2, 'temperature',\n 'Temperature'), (0, 3, 'pump_power', 'Pump speed'), (0, 4,\n 'control_on', 'Temperature control'), (1, 0, 'lowlimit',\n 'Low limit'), (1, 1, 'highlimit', 'High limit'), (1, 2,\n 'cooling_on', 'Cooling'), (1, 3, 'control_external', 'Control'),\n (1, 4, 'diffcontrol_on', 'Differential control')]:\n self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN\n )\n statusgrid.attach(self.indicators[vn], column, row, 1, 1)\n errorgrid = self.builder.get_object('errorgrid')\n for row, column, vn, label in [(0, 0, 'external_pt100_error',\n 'External Pt100'), (0, 1, 'internal_pt100_error',\n 'Internal Pt100'), (0, 2, 'liquid_level_low_error',\n 'Liquid level'), (0, 3, 'liquid_level_alarm_error',\n 'Liquid level alarm'), (0, 4, 'cooling_error', 'Cooling system'\n ), (1, 0, 'pump_overload_error', 'Pump'), (1, 1,\n 'external_alarm_error', 'External alarm'), (1, 2,\n 'overtemperature_error', 'Overtemperature'), (1, 3,\n 'main_relay_missing_error', 'Main relay'), (1, 4, 'faultstatus',\n 'Status flags')]:\n self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN\n )\n errorgrid.attach(self.indicators[vn], column, row, 1, 1)\n othergrid = self.builder.get_object('othergrid')\n for row, column, vn, label in [(0, 0, 'firmwareversion',\n 'Firmware version'), (0, 1, 'date', 'Date'), (0, 2, 'time',\n 'Time'), (0, 3, 'autostart', 'Autostart'), (0, 4, 'beep',\n 'Beep'), (1, 0, 'fuzzyid', 'Fuzzy identification'), (1, 1,\n 'fuzzycontrol', 'Fuzzy control'), (1, 2, 'fuzzystatus',\n 'Fuzzy status'), (1, 3, 'watchdog_on', 'Watchdog'), (1, 4,\n 'watchdog_setpoint', 'Watchdog setpoint')]:\n self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN\n )\n othergrid.attach(self.indicators[vn], column, row, 1, 1)\n self.update_indicators()\n\n def on_mainwidget_map(self, window):\n if super().on_mainwidget_map(window):\n return True\n self.update_indicators()\n <mask token>\n <mask token>\n\n def on_circulator_switch_state_set(self, switch, state):\n dev = self.instrument.get_device('haakephoenix')\n if state:\n dev.execute_command('start')\n else:\n dev.execute_command('stop')\n return True\n\n def on_fuzzyid_switch_state_set(self, switch, state):\n self.instrument.get_device('haakephoenix').set_variable('fuzzyid',\n state)\n return True\n\n def on_set_setpoint(self, button):\n spinbutton = self.builder.get_object('setpoint_spin')\n self.instrument.get_device('haakephoenix').set_variable('setpoint',\n spinbutton.get_value())\n\n def on_set_lowlimit(self, button):\n spinbutton = self.builder.get_object('lowlimit_spin')\n self.instrument.get_device('haakephoenix').set_variable('lowlimit',\n spinbutton.get_value())\n\n def on_set_highlimit(self, button):\n spinbutton = self.builder.get_object('highlimit_spin')\n self.instrument.get_device('haakephoenix').set_variable('highlimit',\n spinbutton.get_value())\n\n def on_update_rtc(self, button):\n now = datetime.datetime.now()\n self.instrument.get_device('haakephoenix').set_variable('date', now\n .date())\n self.instrument.get_device('haakephoenix').set_variable('time', now\n .time())\n",
"step-3": "<mask token>\n\n\nclass HaakePhoenix(ToolWindow):\n required_devices = ['haakephoenix']\n\n def __init__(self, *args, **wargs):\n self.indicators = {}\n super().__init__(*args, **wargs)\n\n def init_gui(self, *args, **kwargs):\n statusgrid = self.builder.get_object('statusgrid')\n for row, column, vn, label in [(0, 0, '_status', 'Status'), (0, 1,\n 'setpoint', 'Target temperature'), (0, 2, 'temperature',\n 'Temperature'), (0, 3, 'pump_power', 'Pump speed'), (0, 4,\n 'control_on', 'Temperature control'), (1, 0, 'lowlimit',\n 'Low limit'), (1, 1, 'highlimit', 'High limit'), (1, 2,\n 'cooling_on', 'Cooling'), (1, 3, 'control_external', 'Control'),\n (1, 4, 'diffcontrol_on', 'Differential control')]:\n self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN\n )\n statusgrid.attach(self.indicators[vn], column, row, 1, 1)\n errorgrid = self.builder.get_object('errorgrid')\n for row, column, vn, label in [(0, 0, 'external_pt100_error',\n 'External Pt100'), (0, 1, 'internal_pt100_error',\n 'Internal Pt100'), (0, 2, 'liquid_level_low_error',\n 'Liquid level'), (0, 3, 'liquid_level_alarm_error',\n 'Liquid level alarm'), (0, 4, 'cooling_error', 'Cooling system'\n ), (1, 0, 'pump_overload_error', 'Pump'), (1, 1,\n 'external_alarm_error', 'External alarm'), (1, 2,\n 'overtemperature_error', 'Overtemperature'), (1, 3,\n 'main_relay_missing_error', 'Main relay'), (1, 4, 'faultstatus',\n 'Status flags')]:\n self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN\n )\n errorgrid.attach(self.indicators[vn], column, row, 1, 1)\n othergrid = self.builder.get_object('othergrid')\n for row, column, vn, label in [(0, 0, 'firmwareversion',\n 'Firmware version'), (0, 1, 'date', 'Date'), (0, 2, 'time',\n 'Time'), (0, 3, 'autostart', 'Autostart'), (0, 4, 'beep',\n 'Beep'), (1, 0, 'fuzzyid', 'Fuzzy identification'), (1, 1,\n 'fuzzycontrol', 'Fuzzy control'), (1, 2, 'fuzzystatus',\n 'Fuzzy status'), (1, 3, 'watchdog_on', 'Watchdog'), (1, 4,\n 'watchdog_setpoint', 'Watchdog setpoint')]:\n self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN\n )\n othergrid.attach(self.indicators[vn], column, row, 1, 1)\n self.update_indicators()\n\n def on_mainwidget_map(self, window):\n if super().on_mainwidget_map(window):\n return True\n self.update_indicators()\n\n def update_indicators(self):\n dev = self.instrument.get_device('haakephoenix')\n for vn in self.indicators:\n self.on_device_variable_change(dev, vn, dev.get_variable(vn))\n self.builder.get_object('setpoint_adjustment').set_value(dev.\n get_variable('setpoint'))\n self.builder.get_object('lowlimit_adjustment').set_value(dev.\n get_variable('lowlimit'))\n self.builder.get_object('highlimit_adjustment').set_value(dev.\n get_variable('highlimit'))\n\n def on_device_variable_change(self, device, variablename, newvalue):\n if variablename in ['_status', 'firmwareversion', 'fuzzycontrol',\n 'date', 'time', 'faultstatus']:\n self.indicators[variablename].set_value(str(newvalue),\n IndicatorState.NEUTRAL)\n elif variablename in ['setpoint', 'temperature', 'lowlimit',\n 'highlimit']:\n self.indicators[variablename].set_value('%.2f°C' % newvalue,\n IndicatorState.NEUTRAL)\n elif variablename in ['control_on', 'cooling_on', 'diffcontrol_on',\n 'watchdog_on', 'beep', 'fuzzyid', 'fuzzystatus', 'autostart']:\n self.indicators[variablename].set_value(['OFF', 'ON'][int(bool(\n newvalue))], [IndicatorState.ERROR, IndicatorState.OK][int(\n bool(newvalue))])\n elif variablename in ['pump_power']:\n self.indicators[variablename].set_value('%.2f %%' % newvalue, [\n IndicatorState.ERROR, IndicatorState.OK][newvalue > 0])\n elif variablename in ['external_pt100_error',\n 'internal_pt100_error', 'liquid_level_low_error',\n 'cooling_error', 'main_relay_missing_error']:\n self.indicators[variablename].set_value(['OK', 'ERROR'][int(\n bool(newvalue))], [IndicatorState.OK, IndicatorState.ERROR]\n [int(bool(newvalue))])\n elif variablename in ['liquid_level_alarm_error',\n 'external_alarm_error', 'overtemperature_error']:\n self.indicators[variablename].set_value(['OK', 'ALARM'][int(\n bool(newvalue))], [IndicatorState.OK, IndicatorState.ERROR]\n [int(bool(newvalue))])\n elif variablename in ['pump_overload_error']:\n self.indicators[variablename].set_value(['OK', 'OVERLOAD'][int(\n bool(newvalue))], [IndicatorState.OK, IndicatorState.ERROR]\n [int(bool(newvalue))])\n elif variablename in ['watchdog_setpoint']:\n self.indicators[variablename].set_value('%.2f sec' % newvalue,\n IndicatorState.UNKNOWN)\n elif variablename in ['control_external']:\n self.indicators[variablename].set_value(['Internal', 'External'\n ][int(bool(newvalue))], IndicatorState.NEUTRAL)\n if variablename == 'fuzzyid':\n self.builder.get_object('fuzzyid_switch').set_state(bool(newvalue))\n elif variablename == 'pump_power':\n self.builder.get_object('circulator_switch').set_state(newvalue > 0\n )\n return False\n\n def on_circulator_switch_state_set(self, switch, state):\n dev = self.instrument.get_device('haakephoenix')\n if state:\n dev.execute_command('start')\n else:\n dev.execute_command('stop')\n return True\n\n def on_fuzzyid_switch_state_set(self, switch, state):\n self.instrument.get_device('haakephoenix').set_variable('fuzzyid',\n state)\n return True\n\n def on_set_setpoint(self, button):\n spinbutton = self.builder.get_object('setpoint_spin')\n self.instrument.get_device('haakephoenix').set_variable('setpoint',\n spinbutton.get_value())\n\n def on_set_lowlimit(self, button):\n spinbutton = self.builder.get_object('lowlimit_spin')\n self.instrument.get_device('haakephoenix').set_variable('lowlimit',\n spinbutton.get_value())\n\n def on_set_highlimit(self, button):\n spinbutton = self.builder.get_object('highlimit_spin')\n self.instrument.get_device('haakephoenix').set_variable('highlimit',\n spinbutton.get_value())\n\n def on_update_rtc(self, button):\n now = datetime.datetime.now()\n self.instrument.get_device('haakephoenix').set_variable('date', now\n .date())\n self.instrument.get_device('haakephoenix').set_variable('time', now\n .time())\n",
"step-4": "import datetime\nfrom ..core.indicator import Indicator, IndicatorState\nfrom ..core.toolwindow import ToolWindow\n\n\nclass HaakePhoenix(ToolWindow):\n required_devices = ['haakephoenix']\n\n def __init__(self, *args, **wargs):\n self.indicators = {}\n super().__init__(*args, **wargs)\n\n def init_gui(self, *args, **kwargs):\n statusgrid = self.builder.get_object('statusgrid')\n for row, column, vn, label in [(0, 0, '_status', 'Status'), (0, 1,\n 'setpoint', 'Target temperature'), (0, 2, 'temperature',\n 'Temperature'), (0, 3, 'pump_power', 'Pump speed'), (0, 4,\n 'control_on', 'Temperature control'), (1, 0, 'lowlimit',\n 'Low limit'), (1, 1, 'highlimit', 'High limit'), (1, 2,\n 'cooling_on', 'Cooling'), (1, 3, 'control_external', 'Control'),\n (1, 4, 'diffcontrol_on', 'Differential control')]:\n self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN\n )\n statusgrid.attach(self.indicators[vn], column, row, 1, 1)\n errorgrid = self.builder.get_object('errorgrid')\n for row, column, vn, label in [(0, 0, 'external_pt100_error',\n 'External Pt100'), (0, 1, 'internal_pt100_error',\n 'Internal Pt100'), (0, 2, 'liquid_level_low_error',\n 'Liquid level'), (0, 3, 'liquid_level_alarm_error',\n 'Liquid level alarm'), (0, 4, 'cooling_error', 'Cooling system'\n ), (1, 0, 'pump_overload_error', 'Pump'), (1, 1,\n 'external_alarm_error', 'External alarm'), (1, 2,\n 'overtemperature_error', 'Overtemperature'), (1, 3,\n 'main_relay_missing_error', 'Main relay'), (1, 4, 'faultstatus',\n 'Status flags')]:\n self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN\n )\n errorgrid.attach(self.indicators[vn], column, row, 1, 1)\n othergrid = self.builder.get_object('othergrid')\n for row, column, vn, label in [(0, 0, 'firmwareversion',\n 'Firmware version'), (0, 1, 'date', 'Date'), (0, 2, 'time',\n 'Time'), (0, 3, 'autostart', 'Autostart'), (0, 4, 'beep',\n 'Beep'), (1, 0, 'fuzzyid', 'Fuzzy identification'), (1, 1,\n 'fuzzycontrol', 'Fuzzy control'), (1, 2, 'fuzzystatus',\n 'Fuzzy status'), (1, 3, 'watchdog_on', 'Watchdog'), (1, 4,\n 'watchdog_setpoint', 'Watchdog setpoint')]:\n self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN\n )\n othergrid.attach(self.indicators[vn], column, row, 1, 1)\n self.update_indicators()\n\n def on_mainwidget_map(self, window):\n if super().on_mainwidget_map(window):\n return True\n self.update_indicators()\n\n def update_indicators(self):\n dev = self.instrument.get_device('haakephoenix')\n for vn in self.indicators:\n self.on_device_variable_change(dev, vn, dev.get_variable(vn))\n self.builder.get_object('setpoint_adjustment').set_value(dev.\n get_variable('setpoint'))\n self.builder.get_object('lowlimit_adjustment').set_value(dev.\n get_variable('lowlimit'))\n self.builder.get_object('highlimit_adjustment').set_value(dev.\n get_variable('highlimit'))\n\n def on_device_variable_change(self, device, variablename, newvalue):\n if variablename in ['_status', 'firmwareversion', 'fuzzycontrol',\n 'date', 'time', 'faultstatus']:\n self.indicators[variablename].set_value(str(newvalue),\n IndicatorState.NEUTRAL)\n elif variablename in ['setpoint', 'temperature', 'lowlimit',\n 'highlimit']:\n self.indicators[variablename].set_value('%.2f°C' % newvalue,\n IndicatorState.NEUTRAL)\n elif variablename in ['control_on', 'cooling_on', 'diffcontrol_on',\n 'watchdog_on', 'beep', 'fuzzyid', 'fuzzystatus', 'autostart']:\n self.indicators[variablename].set_value(['OFF', 'ON'][int(bool(\n newvalue))], [IndicatorState.ERROR, IndicatorState.OK][int(\n bool(newvalue))])\n elif variablename in ['pump_power']:\n self.indicators[variablename].set_value('%.2f %%' % newvalue, [\n IndicatorState.ERROR, IndicatorState.OK][newvalue > 0])\n elif variablename in ['external_pt100_error',\n 'internal_pt100_error', 'liquid_level_low_error',\n 'cooling_error', 'main_relay_missing_error']:\n self.indicators[variablename].set_value(['OK', 'ERROR'][int(\n bool(newvalue))], [IndicatorState.OK, IndicatorState.ERROR]\n [int(bool(newvalue))])\n elif variablename in ['liquid_level_alarm_error',\n 'external_alarm_error', 'overtemperature_error']:\n self.indicators[variablename].set_value(['OK', 'ALARM'][int(\n bool(newvalue))], [IndicatorState.OK, IndicatorState.ERROR]\n [int(bool(newvalue))])\n elif variablename in ['pump_overload_error']:\n self.indicators[variablename].set_value(['OK', 'OVERLOAD'][int(\n bool(newvalue))], [IndicatorState.OK, IndicatorState.ERROR]\n [int(bool(newvalue))])\n elif variablename in ['watchdog_setpoint']:\n self.indicators[variablename].set_value('%.2f sec' % newvalue,\n IndicatorState.UNKNOWN)\n elif variablename in ['control_external']:\n self.indicators[variablename].set_value(['Internal', 'External'\n ][int(bool(newvalue))], IndicatorState.NEUTRAL)\n if variablename == 'fuzzyid':\n self.builder.get_object('fuzzyid_switch').set_state(bool(newvalue))\n elif variablename == 'pump_power':\n self.builder.get_object('circulator_switch').set_state(newvalue > 0\n )\n return False\n\n def on_circulator_switch_state_set(self, switch, state):\n dev = self.instrument.get_device('haakephoenix')\n if state:\n dev.execute_command('start')\n else:\n dev.execute_command('stop')\n return True\n\n def on_fuzzyid_switch_state_set(self, switch, state):\n self.instrument.get_device('haakephoenix').set_variable('fuzzyid',\n state)\n return True\n\n def on_set_setpoint(self, button):\n spinbutton = self.builder.get_object('setpoint_spin')\n self.instrument.get_device('haakephoenix').set_variable('setpoint',\n spinbutton.get_value())\n\n def on_set_lowlimit(self, button):\n spinbutton = self.builder.get_object('lowlimit_spin')\n self.instrument.get_device('haakephoenix').set_variable('lowlimit',\n spinbutton.get_value())\n\n def on_set_highlimit(self, button):\n spinbutton = self.builder.get_object('highlimit_spin')\n self.instrument.get_device('haakephoenix').set_variable('highlimit',\n spinbutton.get_value())\n\n def on_update_rtc(self, button):\n now = datetime.datetime.now()\n self.instrument.get_device('haakephoenix').set_variable('date', now\n .date())\n self.instrument.get_device('haakephoenix').set_variable('time', now\n .time())\n",
"step-5": "import datetime\n\nfrom ..core.indicator import Indicator, IndicatorState\nfrom ..core.toolwindow import ToolWindow\n\n\nclass HaakePhoenix(ToolWindow):\n required_devices = ['haakephoenix']\n\n def __init__(self, *args, **wargs):\n self.indicators = {}\n super().__init__(*args, **wargs)\n\n def init_gui(self, *args, **kwargs):\n statusgrid = self.builder.get_object('statusgrid')\n for row, column, vn, label in [(0, 0, '_status', 'Status'),\n (0, 1, 'setpoint', 'Target temperature'),\n (0, 2, 'temperature', 'Temperature'),\n (0, 3, 'pump_power', 'Pump speed'),\n (0, 4, 'control_on', 'Temperature control'),\n (1, 0, 'lowlimit', 'Low limit'),\n (1, 1, 'highlimit', 'High limit'),\n (1, 2, 'cooling_on', 'Cooling'),\n (1, 3, 'control_external', 'Control'),\n (1, 4, 'diffcontrol_on', 'Differential control')]:\n self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN)\n statusgrid.attach(self.indicators[vn], column, row, 1, 1)\n errorgrid = self.builder.get_object('errorgrid')\n for row, column, vn, label in [(0, 0, 'external_pt100_error', 'External Pt100'), #\n (0, 1, 'internal_pt100_error', 'Internal Pt100'), #\n (0, 2, 'liquid_level_low_error', 'Liquid level'), #\n (0, 3, 'liquid_level_alarm_error', 'Liquid level alarm'), #\n (0, 4, 'cooling_error', 'Cooling system'), #\n (1, 0, 'pump_overload_error', 'Pump'), #\n (1, 1, 'external_alarm_error', 'External alarm'), #\n (1, 2, 'overtemperature_error', 'Overtemperature'), #\n (1, 3, 'main_relay_missing_error', 'Main relay'), #\n (1, 4, 'faultstatus', 'Status flags')]: #\n self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN)\n errorgrid.attach(self.indicators[vn], column, row, 1, 1)\n othergrid = self.builder.get_object('othergrid')\n for row, column, vn, label in [(0, 0, 'firmwareversion', 'Firmware version'), #\n (0, 1, 'date', 'Date'), #\n (0, 2, 'time', 'Time'), #\n (0, 3, 'autostart', 'Autostart'), #\n (0, 4, 'beep', 'Beep'), #\n (1, 0, 'fuzzyid', 'Fuzzy identification'), #\n (1, 1, 'fuzzycontrol', 'Fuzzy control'), #\n (1, 2, 'fuzzystatus', 'Fuzzy status'), #\n (1, 3, 'watchdog_on', 'Watchdog'), #\n (1, 4, 'watchdog_setpoint', 'Watchdog setpoint')]: #\n self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN)\n othergrid.attach(self.indicators[vn], column, row, 1, 1)\n self.update_indicators()\n\n def on_mainwidget_map(self, window):\n if super().on_mainwidget_map(window):\n return True\n self.update_indicators()\n\n def update_indicators(self):\n dev = self.instrument.get_device('haakephoenix')\n for vn in self.indicators:\n self.on_device_variable_change(dev, vn, dev.get_variable(vn))\n self.builder.get_object('setpoint_adjustment').set_value(\n dev.get_variable('setpoint'))\n self.builder.get_object('lowlimit_adjustment').set_value(\n dev.get_variable('lowlimit'))\n self.builder.get_object('highlimit_adjustment').set_value(\n dev.get_variable('highlimit'))\n\n def on_device_variable_change(self, device, variablename, newvalue):\n if variablename in ['_status', 'firmwareversion', 'fuzzycontrol', 'date', 'time', 'faultstatus']:\n self.indicators[variablename].set_value(str(newvalue), IndicatorState.NEUTRAL)\n elif variablename in ['setpoint', 'temperature', 'lowlimit', 'highlimit']:\n self.indicators[variablename].set_value('%.2f°C' % newvalue, IndicatorState.NEUTRAL)\n elif variablename in ['control_on', 'cooling_on', 'diffcontrol_on', 'watchdog_on', 'beep', 'fuzzyid',\n 'fuzzystatus',\n 'autostart']:\n self.indicators[variablename].set_value(['OFF', 'ON'][int(bool(newvalue))],\n [IndicatorState.ERROR, IndicatorState.OK][int(bool(newvalue))])\n elif variablename in ['pump_power']:\n self.indicators[variablename].set_value('%.2f %%' % newvalue,\n [IndicatorState.ERROR, IndicatorState.OK][newvalue > 0])\n elif variablename in ['external_pt100_error', 'internal_pt100_error', 'liquid_level_low_error', 'cooling_error',\n 'main_relay_missing_error']:\n self.indicators[variablename].set_value(['OK', 'ERROR'][int(bool(newvalue))],\n [IndicatorState.OK, IndicatorState.ERROR][int(bool(newvalue))])\n elif variablename in ['liquid_level_alarm_error', 'external_alarm_error', 'overtemperature_error']:\n self.indicators[variablename].set_value(['OK', 'ALARM'][int(bool(newvalue))],\n [IndicatorState.OK, IndicatorState.ERROR][int(bool(newvalue))])\n elif variablename in ['pump_overload_error']:\n self.indicators[variablename].set_value(['OK', 'OVERLOAD'][int(bool(newvalue))],\n [IndicatorState.OK, IndicatorState.ERROR][int(bool(newvalue))])\n elif variablename in ['watchdog_setpoint']:\n self.indicators[variablename].set_value('%.2f sec' % newvalue, IndicatorState.UNKNOWN)\n elif variablename in ['control_external']:\n self.indicators[variablename].set_value(['Internal', 'External'][int(bool(newvalue))],\n IndicatorState.NEUTRAL)\n\n if variablename == 'fuzzyid':\n self.builder.get_object('fuzzyid_switch').set_state(bool(newvalue))\n elif variablename == 'pump_power':\n self.builder.get_object('circulator_switch').set_state(newvalue > 0)\n return False\n\n def on_circulator_switch_state_set(self, switch, state):\n dev = self.instrument.get_device('haakephoenix')\n if state:\n dev.execute_command('start')\n else:\n dev.execute_command('stop')\n return True\n\n def on_fuzzyid_switch_state_set(self, switch, state):\n self.instrument.get_device('haakephoenix').set_variable('fuzzyid', state)\n return True\n\n def on_set_setpoint(self, button):\n spinbutton = self.builder.get_object('setpoint_spin')\n self.instrument.get_device('haakephoenix').set_variable('setpoint', spinbutton.get_value())\n\n def on_set_lowlimit(self, button):\n spinbutton = self.builder.get_object('lowlimit_spin')\n self.instrument.get_device('haakephoenix').set_variable('lowlimit', spinbutton.get_value())\n\n def on_set_highlimit(self, button):\n spinbutton = self.builder.get_object('highlimit_spin')\n self.instrument.get_device('haakephoenix').set_variable('highlimit', spinbutton.get_value())\n\n def on_update_rtc(self, button):\n now = datetime.datetime.now()\n self.instrument.get_device('haakephoenix').set_variable('date', now.date())\n self.instrument.get_device('haakephoenix').set_variable('time', now.time())\n",
"step-ids": [
8,
10,
13,
14,
15
]
}
|
[
8,
10,
13,
14,
15
] |
# encoding = utf-8
"""
A flask session memcached store
"""
from datetime import timedelta, datetime
from uuid import uuid4
__author__ = 'zou'
import memcache
import pickle
from flask.sessions import SessionMixin, SessionInterface
from werkzeug.datastructures import CallbackDict
class MemcachedSession(CallbackDict, SessionMixin):
""""""
def __init__(self, initial=None, sid=None, new=False):
def on_update(self):
self.modified = True
CallbackDict.__init__(self, initial, on_update)
self.sid = sid
self.new = new
self.modified = False
class MemcachedSessionInterface(SessionInterface):
serializer = pickle
session_class = MemcachedSession
def generate_sid(self):
return str(uuid4())
def get_memcache_expiration_time(self, app, session):
if session.permanent:
return app.permanent_session_lifetime
return timedelta(days=8)
def __init__(self, client=None, prefix="session:"):
if client is None:
client = memcache.Client()
self.client = client
self.prefix = prefix
def open_session(self, app, request):
sid = request.args.get("sessionid", None) or request.cookies.get(app.session_cookie_name)
if not sid:
sid = self.generate_sid()
return self.session_class(sid=sid)
val = self.client.get(str(self.prefix + sid))
if val is not None:
data = self.serializer.loads(val)
self.client.set(self.prefix + str(sid), val, int(timedelta(days=8).total_seconds()))
return self.session_class(data, sid=sid)
new_sid = self.generate_sid()
return self.session_class(sid=new_sid, new=True)
def save_session(self, app, session, response):
domain = self.get_cookie_domain(app)
if not session:
self.client.delete(str(self.prefix + session.sid))
if session.modified:
response.delete_cookie(app.session_cookie_name, domain=domain)
return
memcache_exp = self.get_memcache_expiration_time(app, session)
cookie_exp = self.get_expiration_time(app, session)
val = self.serializer.dumps(dict(session))
self.client.set(self.prefix + str(session.sid), val, int(memcache_exp.total_seconds()))
response.set_cookie(app.session_cookie_name, session.sid, expires=cookie_exp, httponly=True, domain=domain, max_age= 7*24*60*60)
def set_cas_ticket_to_session_mapping(self, app, session, ticket):
memcache_exp = self.get_memcache_expiration_time(app, session)
val = str(session.sid)
self.client.set(str(ticket), val, int(memcache_exp.total_seconds()))
def del_ticket_session_mapping(self, ticket):
session_sid = self.client.get(str(ticket))
if session_sid:
r = self.client.delete(self.prefix + str(session_sid))
# if r == 1:
# print 'already delete session id= ' + session_sid
r = self.client.delete(str(ticket))
# if r == 1:
# print 'already delete ticket = ' + ticket
|
normal
|
{
"blob_id": "e4761c925643417f4fe906e8dd2c9356ae970d52",
"index": 3706,
"step-1": "<mask token>\n\n\nclass MemcachedSessionInterface(SessionInterface):\n <mask token>\n <mask token>\n\n def generate_sid(self):\n return str(uuid4())\n\n def get_memcache_expiration_time(self, app, session):\n if session.permanent:\n return app.permanent_session_lifetime\n return timedelta(days=8)\n\n def __init__(self, client=None, prefix='session:'):\n if client is None:\n client = memcache.Client()\n self.client = client\n self.prefix = prefix\n\n def open_session(self, app, request):\n sid = request.args.get('sessionid', None) or request.cookies.get(app\n .session_cookie_name)\n if not sid:\n sid = self.generate_sid()\n return self.session_class(sid=sid)\n val = self.client.get(str(self.prefix + sid))\n if val is not None:\n data = self.serializer.loads(val)\n self.client.set(self.prefix + str(sid), val, int(timedelta(days\n =8).total_seconds()))\n return self.session_class(data, sid=sid)\n new_sid = self.generate_sid()\n return self.session_class(sid=new_sid, new=True)\n\n def save_session(self, app, session, response):\n domain = self.get_cookie_domain(app)\n if not session:\n self.client.delete(str(self.prefix + session.sid))\n if session.modified:\n response.delete_cookie(app.session_cookie_name, domain=domain)\n return\n memcache_exp = self.get_memcache_expiration_time(app, session)\n cookie_exp = self.get_expiration_time(app, session)\n val = self.serializer.dumps(dict(session))\n self.client.set(self.prefix + str(session.sid), val, int(\n memcache_exp.total_seconds()))\n response.set_cookie(app.session_cookie_name, session.sid, expires=\n cookie_exp, httponly=True, domain=domain, max_age=7 * 24 * 60 * 60)\n\n def set_cas_ticket_to_session_mapping(self, app, session, ticket):\n memcache_exp = self.get_memcache_expiration_time(app, session)\n val = str(session.sid)\n self.client.set(str(ticket), val, int(memcache_exp.total_seconds()))\n\n def del_ticket_session_mapping(self, ticket):\n session_sid = self.client.get(str(ticket))\n if session_sid:\n r = self.client.delete(self.prefix + str(session_sid))\n r = self.client.delete(str(ticket))\n",
"step-2": "<mask token>\n\n\nclass MemcachedSession(CallbackDict, SessionMixin):\n <mask token>\n <mask token>\n\n\nclass MemcachedSessionInterface(SessionInterface):\n serializer = pickle\n session_class = MemcachedSession\n\n def generate_sid(self):\n return str(uuid4())\n\n def get_memcache_expiration_time(self, app, session):\n if session.permanent:\n return app.permanent_session_lifetime\n return timedelta(days=8)\n\n def __init__(self, client=None, prefix='session:'):\n if client is None:\n client = memcache.Client()\n self.client = client\n self.prefix = prefix\n\n def open_session(self, app, request):\n sid = request.args.get('sessionid', None) or request.cookies.get(app\n .session_cookie_name)\n if not sid:\n sid = self.generate_sid()\n return self.session_class(sid=sid)\n val = self.client.get(str(self.prefix + sid))\n if val is not None:\n data = self.serializer.loads(val)\n self.client.set(self.prefix + str(sid), val, int(timedelta(days\n =8).total_seconds()))\n return self.session_class(data, sid=sid)\n new_sid = self.generate_sid()\n return self.session_class(sid=new_sid, new=True)\n\n def save_session(self, app, session, response):\n domain = self.get_cookie_domain(app)\n if not session:\n self.client.delete(str(self.prefix + session.sid))\n if session.modified:\n response.delete_cookie(app.session_cookie_name, domain=domain)\n return\n memcache_exp = self.get_memcache_expiration_time(app, session)\n cookie_exp = self.get_expiration_time(app, session)\n val = self.serializer.dumps(dict(session))\n self.client.set(self.prefix + str(session.sid), val, int(\n memcache_exp.total_seconds()))\n response.set_cookie(app.session_cookie_name, session.sid, expires=\n cookie_exp, httponly=True, domain=domain, max_age=7 * 24 * 60 * 60)\n\n def set_cas_ticket_to_session_mapping(self, app, session, ticket):\n memcache_exp = self.get_memcache_expiration_time(app, session)\n val = str(session.sid)\n self.client.set(str(ticket), val, int(memcache_exp.total_seconds()))\n\n def del_ticket_session_mapping(self, ticket):\n session_sid = self.client.get(str(ticket))\n if session_sid:\n r = self.client.delete(self.prefix + str(session_sid))\n r = self.client.delete(str(ticket))\n",
"step-3": "<mask token>\n\n\nclass MemcachedSession(CallbackDict, SessionMixin):\n \"\"\"\"\"\"\n\n def __init__(self, initial=None, sid=None, new=False):\n\n def on_update(self):\n self.modified = True\n CallbackDict.__init__(self, initial, on_update)\n self.sid = sid\n self.new = new\n self.modified = False\n\n\nclass MemcachedSessionInterface(SessionInterface):\n serializer = pickle\n session_class = MemcachedSession\n\n def generate_sid(self):\n return str(uuid4())\n\n def get_memcache_expiration_time(self, app, session):\n if session.permanent:\n return app.permanent_session_lifetime\n return timedelta(days=8)\n\n def __init__(self, client=None, prefix='session:'):\n if client is None:\n client = memcache.Client()\n self.client = client\n self.prefix = prefix\n\n def open_session(self, app, request):\n sid = request.args.get('sessionid', None) or request.cookies.get(app\n .session_cookie_name)\n if not sid:\n sid = self.generate_sid()\n return self.session_class(sid=sid)\n val = self.client.get(str(self.prefix + sid))\n if val is not None:\n data = self.serializer.loads(val)\n self.client.set(self.prefix + str(sid), val, int(timedelta(days\n =8).total_seconds()))\n return self.session_class(data, sid=sid)\n new_sid = self.generate_sid()\n return self.session_class(sid=new_sid, new=True)\n\n def save_session(self, app, session, response):\n domain = self.get_cookie_domain(app)\n if not session:\n self.client.delete(str(self.prefix + session.sid))\n if session.modified:\n response.delete_cookie(app.session_cookie_name, domain=domain)\n return\n memcache_exp = self.get_memcache_expiration_time(app, session)\n cookie_exp = self.get_expiration_time(app, session)\n val = self.serializer.dumps(dict(session))\n self.client.set(self.prefix + str(session.sid), val, int(\n memcache_exp.total_seconds()))\n response.set_cookie(app.session_cookie_name, session.sid, expires=\n cookie_exp, httponly=True, domain=domain, max_age=7 * 24 * 60 * 60)\n\n def set_cas_ticket_to_session_mapping(self, app, session, ticket):\n memcache_exp = self.get_memcache_expiration_time(app, session)\n val = str(session.sid)\n self.client.set(str(ticket), val, int(memcache_exp.total_seconds()))\n\n def del_ticket_session_mapping(self, ticket):\n session_sid = self.client.get(str(ticket))\n if session_sid:\n r = self.client.delete(self.prefix + str(session_sid))\n r = self.client.delete(str(ticket))\n",
"step-4": "<mask token>\nfrom datetime import timedelta, datetime\nfrom uuid import uuid4\n__author__ = 'zou'\nimport memcache\nimport pickle\nfrom flask.sessions import SessionMixin, SessionInterface\nfrom werkzeug.datastructures import CallbackDict\n\n\nclass MemcachedSession(CallbackDict, SessionMixin):\n \"\"\"\"\"\"\n\n def __init__(self, initial=None, sid=None, new=False):\n\n def on_update(self):\n self.modified = True\n CallbackDict.__init__(self, initial, on_update)\n self.sid = sid\n self.new = new\n self.modified = False\n\n\nclass MemcachedSessionInterface(SessionInterface):\n serializer = pickle\n session_class = MemcachedSession\n\n def generate_sid(self):\n return str(uuid4())\n\n def get_memcache_expiration_time(self, app, session):\n if session.permanent:\n return app.permanent_session_lifetime\n return timedelta(days=8)\n\n def __init__(self, client=None, prefix='session:'):\n if client is None:\n client = memcache.Client()\n self.client = client\n self.prefix = prefix\n\n def open_session(self, app, request):\n sid = request.args.get('sessionid', None) or request.cookies.get(app\n .session_cookie_name)\n if not sid:\n sid = self.generate_sid()\n return self.session_class(sid=sid)\n val = self.client.get(str(self.prefix + sid))\n if val is not None:\n data = self.serializer.loads(val)\n self.client.set(self.prefix + str(sid), val, int(timedelta(days\n =8).total_seconds()))\n return self.session_class(data, sid=sid)\n new_sid = self.generate_sid()\n return self.session_class(sid=new_sid, new=True)\n\n def save_session(self, app, session, response):\n domain = self.get_cookie_domain(app)\n if not session:\n self.client.delete(str(self.prefix + session.sid))\n if session.modified:\n response.delete_cookie(app.session_cookie_name, domain=domain)\n return\n memcache_exp = self.get_memcache_expiration_time(app, session)\n cookie_exp = self.get_expiration_time(app, session)\n val = self.serializer.dumps(dict(session))\n self.client.set(self.prefix + str(session.sid), val, int(\n memcache_exp.total_seconds()))\n response.set_cookie(app.session_cookie_name, session.sid, expires=\n cookie_exp, httponly=True, domain=domain, max_age=7 * 24 * 60 * 60)\n\n def set_cas_ticket_to_session_mapping(self, app, session, ticket):\n memcache_exp = self.get_memcache_expiration_time(app, session)\n val = str(session.sid)\n self.client.set(str(ticket), val, int(memcache_exp.total_seconds()))\n\n def del_ticket_session_mapping(self, ticket):\n session_sid = self.client.get(str(ticket))\n if session_sid:\n r = self.client.delete(self.prefix + str(session_sid))\n r = self.client.delete(str(ticket))\n",
"step-5": "# encoding = utf-8\n\"\"\"\nA flask session memcached store\n\"\"\"\nfrom datetime import timedelta, datetime\nfrom uuid import uuid4\n\n__author__ = 'zou'\nimport memcache\nimport pickle\nfrom flask.sessions import SessionMixin, SessionInterface\nfrom werkzeug.datastructures import CallbackDict\n\n\nclass MemcachedSession(CallbackDict, SessionMixin):\n \"\"\"\"\"\"\n def __init__(self, initial=None, sid=None, new=False):\n def on_update(self):\n self.modified = True\n CallbackDict.__init__(self, initial, on_update)\n self.sid = sid\n self.new = new\n self.modified = False\n\n\nclass MemcachedSessionInterface(SessionInterface):\n serializer = pickle\n session_class = MemcachedSession\n\n def generate_sid(self):\n return str(uuid4())\n\n def get_memcache_expiration_time(self, app, session):\n if session.permanent:\n return app.permanent_session_lifetime\n return timedelta(days=8)\n\n def __init__(self, client=None, prefix=\"session:\"):\n if client is None:\n client = memcache.Client()\n self.client = client\n self.prefix = prefix\n\n def open_session(self, app, request):\n sid = request.args.get(\"sessionid\", None) or request.cookies.get(app.session_cookie_name)\n if not sid:\n sid = self.generate_sid()\n return self.session_class(sid=sid)\n val = self.client.get(str(self.prefix + sid))\n if val is not None:\n data = self.serializer.loads(val)\n self.client.set(self.prefix + str(sid), val, int(timedelta(days=8).total_seconds()))\n return self.session_class(data, sid=sid)\n new_sid = self.generate_sid()\n return self.session_class(sid=new_sid, new=True)\n\n def save_session(self, app, session, response):\n domain = self.get_cookie_domain(app)\n if not session:\n self.client.delete(str(self.prefix + session.sid))\n if session.modified:\n response.delete_cookie(app.session_cookie_name, domain=domain)\n return\n memcache_exp = self.get_memcache_expiration_time(app, session)\n cookie_exp = self.get_expiration_time(app, session)\n val = self.serializer.dumps(dict(session))\n self.client.set(self.prefix + str(session.sid), val, int(memcache_exp.total_seconds()))\n response.set_cookie(app.session_cookie_name, session.sid, expires=cookie_exp, httponly=True, domain=domain, max_age= 7*24*60*60)\n\n def set_cas_ticket_to_session_mapping(self, app, session, ticket):\n memcache_exp = self.get_memcache_expiration_time(app, session)\n val = str(session.sid)\n self.client.set(str(ticket), val, int(memcache_exp.total_seconds()))\n\n def del_ticket_session_mapping(self, ticket):\n session_sid = self.client.get(str(ticket))\n if session_sid:\n r = self.client.delete(self.prefix + str(session_sid))\n# if r == 1:\n# print 'already delete session id= ' + session_sid\n r = self.client.delete(str(ticket))\n# if r == 1:\n# print 'already delete ticket = ' + ticket\n",
"step-ids": [
8,
10,
12,
14,
15
]
}
|
[
8,
10,
12,
14,
15
] |
def calc_fib(n):
fib_lis = dict()
for i in range(n+1):
if (i <= 1):
fib_lis[i] = i
else:
fib_lis[i] = fib_lis[i-2] + fib_lis[i-1]
return fib_lis[n]
n = int(input())
print(calc_fib(n))
|
normal
|
{
"blob_id": "426b711571d3b5c4f8c7b0bad3a613951902e60b",
"index": 4129,
"step-1": "<mask token>\n",
"step-2": "def calc_fib(n):\n fib_lis = dict()\n for i in range(n + 1):\n if i <= 1:\n fib_lis[i] = i\n else:\n fib_lis[i] = fib_lis[i - 2] + fib_lis[i - 1]\n return fib_lis[n]\n\n\n<mask token>\n",
"step-3": "def calc_fib(n):\n fib_lis = dict()\n for i in range(n + 1):\n if i <= 1:\n fib_lis[i] = i\n else:\n fib_lis[i] = fib_lis[i - 2] + fib_lis[i - 1]\n return fib_lis[n]\n\n\n<mask token>\nprint(calc_fib(n))\n",
"step-4": "def calc_fib(n):\n fib_lis = dict()\n for i in range(n + 1):\n if i <= 1:\n fib_lis[i] = i\n else:\n fib_lis[i] = fib_lis[i - 2] + fib_lis[i - 1]\n return fib_lis[n]\n\n\nn = int(input())\nprint(calc_fib(n))\n",
"step-5": "def calc_fib(n):\n fib_lis = dict()\n for i in range(n+1):\n if (i <= 1):\n fib_lis[i] = i\n else:\n fib_lis[i] = fib_lis[i-2] + fib_lis[i-1]\n return fib_lis[n]\nn = int(input())\nprint(calc_fib(n))\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import os, sys, datetime, csv, platform
####FUNCTIONS####
#Get Creation Time
def get_lastupdate_date(path):
return os.path.getmtime(path)
#Get Date From String
def convertIntToTimestamp(timeint):
return str(datetime.datetime.fromtimestamp(timeint))
#Get Filename
def getFilename(name):
return os.path.basename(name)
# Get File Creation Time
def creation_date(path):
"""
Try to get the date that a file was created, falling back to when it was
last modified if that isn't possible.
See http://stackoverflow.com/a/39501288/1709587 for explanation.
"""
if platform.system() == 'Windows':
return os.path.getctime(path)
else:
stat = os.stat(path)
try:
return stat.st_birthtime
except AttributeError:
# We're probably on Linux. No easy way to get creation dates here,
# so we'll settle for when its content was last modified.
return stat.st_mtime
#Print List
def print_list(x):
for i in range(0,len(x)):
print(x[i])
return x
#Listing Files
def fileList(source, filetype='.als'):
matches = []
for root, dirnames, filenames in os.walk(source):
for filename in filenames:
if filename.endswith((filetype)):
matches.append(os.path.join(root, filename))
return matches
def mylistdir(directory):
"""A specialized version of os.listdir() that ignores files that
start with a leading period."""
filelist = os.listdir(directory)
return [x for x in filelist
if not (x.startswith('.'))]
def collectElements(dir):
## collecting elements into a list
for directory in dir:
for filename in directory:
if filename.endswith(".als"):
thefiles.append(filename)
return thefiles
## INPUTDIRECTORIES
subpath = []
subdirs = []
thefiles = []
thelist = []
## Examples of Directories
#/Users/blakenicholson/Documents/Personal/Projects/Music Production/Ableton Projects
#/Volumes/Samsung_T3/Old Ableton Projects/1.RELEASED/Neuromansah - DumbBlake Project
filePath = r"/Users/blakenicholson/Dropbox/Ableton Projects"
#filePath = raw_input('File path would you like to use: ')
dirs = mylistdir(filePath)
print(dirs)
print(collectElements(dirs))
#Writes contents of filePath to a txt file
file = open("testtext.txt","w+")
for item in fileList(filePath):
file.write(os.path.basename(item) +", "+convertIntToTimestamp(get_lastupdate_date(item))+", "+convertIntToTimestamp(creation_date(item))+", "+os.path.abspath(item)+"\n")
file.close
#convert txt -> csv
with open('testcsv.csv', 'w+') as fp:
a = csv.writer(fp, delimiter=',')
a.writerow(['File Name','Updated Date','Created Date','Path'])
for item in fileList(filePath):
a.writerow([ os.path.basename(item) , convertIntToTimestamp(get_lastupdate_date(item)), convertIntToTimestamp(creation_date(item)), os.path.abspath(item)])
|
normal
|
{
"blob_id": "e83b6b1f4cb12fe3b932903eddddfb0dc0e7d98d",
"index": 2765,
"step-1": "<mask token>\n\n\ndef get_lastupdate_date(path):\n return os.path.getmtime(path)\n\n\ndef convertIntToTimestamp(timeint):\n return str(datetime.datetime.fromtimestamp(timeint))\n\n\ndef getFilename(name):\n return os.path.basename(name)\n\n\ndef creation_date(path):\n \"\"\"\n Try to get the date that a file was created, falling back to when it was\n last modified if that isn't possible.\n See http://stackoverflow.com/a/39501288/1709587 for explanation.\n \"\"\"\n if platform.system() == 'Windows':\n return os.path.getctime(path)\n else:\n stat = os.stat(path)\n try:\n return stat.st_birthtime\n except AttributeError:\n return stat.st_mtime\n\n\n<mask token>\n\n\ndef mylistdir(directory):\n \"\"\"A specialized version of os.listdir() that ignores files that\n start with a leading period.\"\"\"\n filelist = os.listdir(directory)\n return [x for x in filelist if not x.startswith('.')]\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_lastupdate_date(path):\n return os.path.getmtime(path)\n\n\ndef convertIntToTimestamp(timeint):\n return str(datetime.datetime.fromtimestamp(timeint))\n\n\ndef getFilename(name):\n return os.path.basename(name)\n\n\ndef creation_date(path):\n \"\"\"\n Try to get the date that a file was created, falling back to when it was\n last modified if that isn't possible.\n See http://stackoverflow.com/a/39501288/1709587 for explanation.\n \"\"\"\n if platform.system() == 'Windows':\n return os.path.getctime(path)\n else:\n stat = os.stat(path)\n try:\n return stat.st_birthtime\n except AttributeError:\n return stat.st_mtime\n\n\ndef print_list(x):\n for i in range(0, len(x)):\n print(x[i])\n return x\n\n\ndef fileList(source, filetype='.als'):\n matches = []\n for root, dirnames, filenames in os.walk(source):\n for filename in filenames:\n if filename.endswith(filetype):\n matches.append(os.path.join(root, filename))\n return matches\n\n\ndef mylistdir(directory):\n \"\"\"A specialized version of os.listdir() that ignores files that\n start with a leading period.\"\"\"\n filelist = os.listdir(directory)\n return [x for x in filelist if not x.startswith('.')]\n\n\ndef collectElements(dir):\n for directory in dir:\n for filename in directory:\n if filename.endswith('.als'):\n thefiles.append(filename)\n return thefiles\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef get_lastupdate_date(path):\n return os.path.getmtime(path)\n\n\ndef convertIntToTimestamp(timeint):\n return str(datetime.datetime.fromtimestamp(timeint))\n\n\ndef getFilename(name):\n return os.path.basename(name)\n\n\ndef creation_date(path):\n \"\"\"\n Try to get the date that a file was created, falling back to when it was\n last modified if that isn't possible.\n See http://stackoverflow.com/a/39501288/1709587 for explanation.\n \"\"\"\n if platform.system() == 'Windows':\n return os.path.getctime(path)\n else:\n stat = os.stat(path)\n try:\n return stat.st_birthtime\n except AttributeError:\n return stat.st_mtime\n\n\ndef print_list(x):\n for i in range(0, len(x)):\n print(x[i])\n return x\n\n\ndef fileList(source, filetype='.als'):\n matches = []\n for root, dirnames, filenames in os.walk(source):\n for filename in filenames:\n if filename.endswith(filetype):\n matches.append(os.path.join(root, filename))\n return matches\n\n\ndef mylistdir(directory):\n \"\"\"A specialized version of os.listdir() that ignores files that\n start with a leading period.\"\"\"\n filelist = os.listdir(directory)\n return [x for x in filelist if not x.startswith('.')]\n\n\ndef collectElements(dir):\n for directory in dir:\n for filename in directory:\n if filename.endswith('.als'):\n thefiles.append(filename)\n return thefiles\n\n\n<mask token>\nprint(dirs)\nprint(collectElements(dirs))\n<mask token>\nfor item in fileList(filePath):\n file.write(os.path.basename(item) + ', ' + convertIntToTimestamp(\n get_lastupdate_date(item)) + ', ' + convertIntToTimestamp(\n creation_date(item)) + ', ' + os.path.abspath(item) + '\\n')\nfile.close\nwith open('testcsv.csv', 'w+') as fp:\n a = csv.writer(fp, delimiter=',')\n a.writerow(['File Name', 'Updated Date', 'Created Date', 'Path'])\n for item in fileList(filePath):\n a.writerow([os.path.basename(item), convertIntToTimestamp(\n get_lastupdate_date(item)), convertIntToTimestamp(creation_date\n (item)), os.path.abspath(item)])\n",
"step-4": "import os, sys, datetime, csv, platform\n\n\ndef get_lastupdate_date(path):\n return os.path.getmtime(path)\n\n\ndef convertIntToTimestamp(timeint):\n return str(datetime.datetime.fromtimestamp(timeint))\n\n\ndef getFilename(name):\n return os.path.basename(name)\n\n\ndef creation_date(path):\n \"\"\"\n Try to get the date that a file was created, falling back to when it was\n last modified if that isn't possible.\n See http://stackoverflow.com/a/39501288/1709587 for explanation.\n \"\"\"\n if platform.system() == 'Windows':\n return os.path.getctime(path)\n else:\n stat = os.stat(path)\n try:\n return stat.st_birthtime\n except AttributeError:\n return stat.st_mtime\n\n\ndef print_list(x):\n for i in range(0, len(x)):\n print(x[i])\n return x\n\n\ndef fileList(source, filetype='.als'):\n matches = []\n for root, dirnames, filenames in os.walk(source):\n for filename in filenames:\n if filename.endswith(filetype):\n matches.append(os.path.join(root, filename))\n return matches\n\n\ndef mylistdir(directory):\n \"\"\"A specialized version of os.listdir() that ignores files that\n start with a leading period.\"\"\"\n filelist = os.listdir(directory)\n return [x for x in filelist if not x.startswith('.')]\n\n\ndef collectElements(dir):\n for directory in dir:\n for filename in directory:\n if filename.endswith('.als'):\n thefiles.append(filename)\n return thefiles\n\n\nsubpath = []\nsubdirs = []\nthefiles = []\nthelist = []\nfilePath = '/Users/blakenicholson/Dropbox/Ableton Projects'\ndirs = mylistdir(filePath)\nprint(dirs)\nprint(collectElements(dirs))\nfile = open('testtext.txt', 'w+')\nfor item in fileList(filePath):\n file.write(os.path.basename(item) + ', ' + convertIntToTimestamp(\n get_lastupdate_date(item)) + ', ' + convertIntToTimestamp(\n creation_date(item)) + ', ' + os.path.abspath(item) + '\\n')\nfile.close\nwith open('testcsv.csv', 'w+') as fp:\n a = csv.writer(fp, delimiter=',')\n a.writerow(['File Name', 'Updated Date', 'Created Date', 'Path'])\n for item in fileList(filePath):\n a.writerow([os.path.basename(item), convertIntToTimestamp(\n get_lastupdate_date(item)), convertIntToTimestamp(creation_date\n (item)), os.path.abspath(item)])\n",
"step-5": "import os, sys, datetime, csv, platform\n\n####FUNCTIONS####\n\n#Get Creation Time\ndef get_lastupdate_date(path):\n return os.path.getmtime(path)\n \n#Get Date From String\ndef convertIntToTimestamp(timeint):\n return str(datetime.datetime.fromtimestamp(timeint))\n\n#Get Filename\ndef getFilename(name):\n return os.path.basename(name)\n\n# Get File Creation Time\ndef creation_date(path):\n \"\"\"\n Try to get the date that a file was created, falling back to when it was\n last modified if that isn't possible.\n See http://stackoverflow.com/a/39501288/1709587 for explanation.\n \"\"\"\n if platform.system() == 'Windows':\n return os.path.getctime(path)\n else:\n stat = os.stat(path)\n try:\n return stat.st_birthtime\n except AttributeError:\n # We're probably on Linux. No easy way to get creation dates here,\n # so we'll settle for when its content was last modified.\n return stat.st_mtime\n\n#Print List\ndef print_list(x):\n\tfor i in range(0,len(x)):\n\t\tprint(x[i])\n\treturn x\n\n#Listing Files\ndef fileList(source, filetype='.als'):\n matches = []\n for root, dirnames, filenames in os.walk(source):\n for filename in filenames:\n if filename.endswith((filetype)):\n matches.append(os.path.join(root, filename))\n return matches\n\t\ndef mylistdir(directory):\n \"\"\"A specialized version of os.listdir() that ignores files that\n start with a leading period.\"\"\"\n filelist = os.listdir(directory)\n return [x for x in filelist\n if not (x.startswith('.'))]\n\ndef collectElements(dir):\n ## collecting elements into a list\n for directory in dir:\n for filename in directory:\n if filename.endswith(\".als\"):\n thefiles.append(filename) \n return thefiles\n\n\n## INPUTDIRECTORIES\nsubpath = []\nsubdirs = []\nthefiles = []\nthelist = []\n\n## Examples of Directories\n#/Users/blakenicholson/Documents/Personal/Projects/Music Production/Ableton Projects\n#/Volumes/Samsung_T3/Old Ableton Projects/1.RELEASED/Neuromansah - DumbBlake Project\n\nfilePath = r\"/Users/blakenicholson/Dropbox/Ableton Projects\"\n#filePath = raw_input('File path would you like to use: ')\ndirs = mylistdir(filePath)\nprint(dirs)\n\n\nprint(collectElements(dirs))\n\n#Writes contents of filePath to a txt file\nfile = open(\"testtext.txt\",\"w+\")\nfor item in fileList(filePath):\n file.write(os.path.basename(item) +\", \"+convertIntToTimestamp(get_lastupdate_date(item))+\", \"+convertIntToTimestamp(creation_date(item))+\", \"+os.path.abspath(item)+\"\\n\") \nfile.close\n\n#convert txt -> csv\nwith open('testcsv.csv', 'w+') as fp:\n a = csv.writer(fp, delimiter=',')\n a.writerow(['File Name','Updated Date','Created Date','Path'])\n for item in fileList(filePath):\n a.writerow([ os.path.basename(item) , convertIntToTimestamp(get_lastupdate_date(item)), convertIntToTimestamp(creation_date(item)), os.path.abspath(item)])\n ",
"step-ids": [
5,
8,
9,
11,
12
]
}
|
[
5,
8,
9,
11,
12
] |
<|reserved_special_token_0|>
@nsLinea.route('/<int:id>')
class LineasResource(Resource):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@nsLinea.route('/baja/<int:id>')
class LineasResource(Resource):
def put(self, id):
if repo.baja(id):
repoLep.baja_by_linea(id)
lineaeqplan = repoLep.buscar_by_linea(id)
repoEquipo.baja(lineaeqplan.equipo_id)
repoLepCliente.bajalep(lineaeqplan.id)
return 'Linea dada de baja', 200
abort(400)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@nsLinea.route('/')
class LineasResource(Resource):
@nsLinea.marshal_list_with(modeloLinea)
def get(self):
return repo.get_all()
<|reserved_special_token_0|>
@nsLinea.route('/<int:id>')
class LineasResource(Resource):
@nsLinea.marshal_with(modeloLinea)
def get(self, id):
f = repo.get_by_numero(id)
if f:
return f, 200
abort(404)
@nsLinea.expect(modeloLinea)
def put(self, numero):
data = editarLineaParser.parse_args()
if repo.modificar(numero, data):
return 'Linea modificada', 200
abort(404)
@nsLinea.route('/baja/<int:id>')
class LineasResource(Resource):
def put(self, id):
if repo.baja(id):
repoLep.baja_by_linea(id)
lineaeqplan = repoLep.buscar_by_linea(id)
repoEquipo.baja(lineaeqplan.equipo_id)
repoLepCliente.bajalep(lineaeqplan.id)
return 'Linea dada de baja', 200
abort(400)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@nsLinea.route('/')
class LineasResource(Resource):
@nsLinea.marshal_list_with(modeloLinea)
def get(self):
return repo.get_all()
@nsLinea.expect(modeloLineaSinN)
@nsLinea.marshal_with(modeloLinea)
def post(self):
data = nuevaLineaParser.parse_args()
if data.estado == 'Activada':
data.activa = True
else:
data.activa = False
f = repo.agregar(data)
if f:
return f, 201
abort(500)
@nsLinea.route('/<int:id>')
class LineasResource(Resource):
@nsLinea.marshal_with(modeloLinea)
def get(self, id):
f = repo.get_by_numero(id)
if f:
return f, 200
abort(404)
@nsLinea.expect(modeloLinea)
def put(self, numero):
data = editarLineaParser.parse_args()
if repo.modificar(numero, data):
return 'Linea modificada', 200
abort(404)
@nsLinea.route('/baja/<int:id>')
class LineasResource(Resource):
def put(self, id):
if repo.baja(id):
repoLep.baja_by_linea(id)
lineaeqplan = repoLep.buscar_by_linea(id)
repoEquipo.baja(lineaeqplan.equipo_id)
repoLepCliente.bajalep(lineaeqplan.id)
return 'Linea dada de baja', 200
abort(400)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
nuevaLineaParser.add_argument('numero', type=str, required=True)
nuevaLineaParser.add_argument('estado', type=str, required=True)
nuevaLineaParser.add_argument('activa', type=bool, required=False)
<|reserved_special_token_0|>
editarLineaParser.add_argument('id', type=int, required=True)
<|reserved_special_token_0|>
buscarLineasParser.add_argument('desde', type=str, required=True)
buscarLineasParser.add_argument('hasta', type=str, required=True)
@nsLinea.route('/')
class LineasResource(Resource):
@nsLinea.marshal_list_with(modeloLinea)
def get(self):
return repo.get_all()
@nsLinea.expect(modeloLineaSinN)
@nsLinea.marshal_with(modeloLinea)
def post(self):
data = nuevaLineaParser.parse_args()
if data.estado == 'Activada':
data.activa = True
else:
data.activa = False
f = repo.agregar(data)
if f:
return f, 201
abort(500)
@nsLinea.route('/<int:id>')
class LineasResource(Resource):
@nsLinea.marshal_with(modeloLinea)
def get(self, id):
f = repo.get_by_numero(id)
if f:
return f, 200
abort(404)
@nsLinea.expect(modeloLinea)
def put(self, numero):
data = editarLineaParser.parse_args()
if repo.modificar(numero, data):
return 'Linea modificada', 200
abort(404)
@nsLinea.route('/baja/<int:id>')
class LineasResource(Resource):
def put(self, id):
if repo.baja(id):
repoLep.baja_by_linea(id)
lineaeqplan = repoLep.buscar_by_linea(id)
repoEquipo.baja(lineaeqplan.equipo_id)
repoLepCliente.bajalep(lineaeqplan.id)
return 'Linea dada de baja', 200
abort(400)
<|reserved_special_token_1|>
from flask import abort
from flask_restx import Resource, Namespace, Model, fields, reqparse
from infraestructura.lineas_repo import LineasRepo
from infraestructura.equipos_repo import EquiposRepo
from infraestructura.clientes_lep_repo import ClientesLepRepo
from infraestructura.lineaequipoplan_repo import LineaEquipoPlanRepo
repoLep= LineaEquipoPlanRepo()
repoLepCliente = ClientesLepRepo()
repo = LineasRepo()
repoEquipo = EquiposRepo()
nsLinea = Namespace('lineas', description='Administrador de lineas')
modeloLineaSinN = Model('LineaSinNumero',{
'numero': fields.String(),
'estado': fields.String(),
'activa': fields.Boolean()
})
modeloLinea = modeloLineaSinN.clone('Linea', {
'id': fields.Integer()
})
modeloBusqueda = Model('BusquedaFechas', {
'desde': fields.Date(),
'hasta': fields.Date()
})
nsLinea.models[modeloLinea.name] = modeloLinea
nsLinea.models[modeloLineaSinN.name] = modeloLineaSinN
nsLinea.models[modeloBusqueda.name] = modeloBusqueda
nuevaLineaParser = reqparse.RequestParser(bundle_errors=True)
nuevaLineaParser.add_argument('numero', type=str, required=True)
nuevaLineaParser.add_argument('estado', type=str, required=True)
##PEDRO LOOK AT THIS
##PEDRO LOOK AT THIS
nuevaLineaParser.add_argument('activa', type=bool, required=False)
editarLineaParser = nuevaLineaParser.copy()
editarLineaParser.add_argument('id', type=int, required=True)
buscarLineasParser = reqparse.RequestParser(bundle_errors=True)
buscarLineasParser.add_argument('desde', type=str, required=True)
buscarLineasParser.add_argument('hasta', type=str, required=True)
@nsLinea.route('/')
class LineasResource(Resource):
@nsLinea.marshal_list_with(modeloLinea)
def get(self):
return repo.get_all()
@nsLinea.expect(modeloLineaSinN)
@nsLinea.marshal_with(modeloLinea)
def post(self):
data = nuevaLineaParser.parse_args()
##PEDRO LOOK AT THIS
if(data.estado =="Activada"):
data.activa = True
else:
data.activa = False
f = repo.agregar(data)
if f:
return f, 201
abort(500)
@nsLinea.route('/<int:id>')
class LineasResource(Resource):
@nsLinea.marshal_with(modeloLinea)
def get(self, id):
f = repo.get_by_numero(id)
if f:
return f, 200
abort(404)
@nsLinea.expect(modeloLinea)
def put(self, numero):
data = editarLineaParser.parse_args()
if repo.modificar(numero, data):
return 'Linea modificada', 200
abort(404)
@nsLinea.route('/baja/<int:id>')
class LineasResource(Resource):
def put(self, id):
if repo.baja(id):
# doy de baja en lineaEquipoPlan
repoLep.baja_by_linea(id)
# busco para darle de baja al equipo
# y tener tmb el id pa la tabla cliente_lep
lineaeqplan = repoLep.buscar_by_linea(id)
#doy de baja el equipo
repoEquipo.baja(lineaeqplan.equipo_id)
#doy de baja en tabla cliente_lep
repoLepCliente.bajalep(lineaeqplan.id)
return 'Linea dada de baja', 200
abort(400)
|
flexible
|
{
"blob_id": "821e89730fde2e12b24b52b04701c1f3501e0d57",
"index": 8771,
"step-1": "<mask token>\n\n\n@nsLinea.route('/<int:id>')\nclass LineasResource(Resource):\n <mask token>\n <mask token>\n\n\n@nsLinea.route('/baja/<int:id>')\nclass LineasResource(Resource):\n\n def put(self, id):\n if repo.baja(id):\n repoLep.baja_by_linea(id)\n lineaeqplan = repoLep.buscar_by_linea(id)\n repoEquipo.baja(lineaeqplan.equipo_id)\n repoLepCliente.bajalep(lineaeqplan.id)\n return 'Linea dada de baja', 200\n abort(400)\n",
"step-2": "<mask token>\n\n\n@nsLinea.route('/')\nclass LineasResource(Resource):\n\n @nsLinea.marshal_list_with(modeloLinea)\n def get(self):\n return repo.get_all()\n <mask token>\n\n\n@nsLinea.route('/<int:id>')\nclass LineasResource(Resource):\n\n @nsLinea.marshal_with(modeloLinea)\n def get(self, id):\n f = repo.get_by_numero(id)\n if f:\n return f, 200\n abort(404)\n\n @nsLinea.expect(modeloLinea)\n def put(self, numero):\n data = editarLineaParser.parse_args()\n if repo.modificar(numero, data):\n return 'Linea modificada', 200\n abort(404)\n\n\n@nsLinea.route('/baja/<int:id>')\nclass LineasResource(Resource):\n\n def put(self, id):\n if repo.baja(id):\n repoLep.baja_by_linea(id)\n lineaeqplan = repoLep.buscar_by_linea(id)\n repoEquipo.baja(lineaeqplan.equipo_id)\n repoLepCliente.bajalep(lineaeqplan.id)\n return 'Linea dada de baja', 200\n abort(400)\n",
"step-3": "<mask token>\n\n\n@nsLinea.route('/')\nclass LineasResource(Resource):\n\n @nsLinea.marshal_list_with(modeloLinea)\n def get(self):\n return repo.get_all()\n\n @nsLinea.expect(modeloLineaSinN)\n @nsLinea.marshal_with(modeloLinea)\n def post(self):\n data = nuevaLineaParser.parse_args()\n if data.estado == 'Activada':\n data.activa = True\n else:\n data.activa = False\n f = repo.agregar(data)\n if f:\n return f, 201\n abort(500)\n\n\n@nsLinea.route('/<int:id>')\nclass LineasResource(Resource):\n\n @nsLinea.marshal_with(modeloLinea)\n def get(self, id):\n f = repo.get_by_numero(id)\n if f:\n return f, 200\n abort(404)\n\n @nsLinea.expect(modeloLinea)\n def put(self, numero):\n data = editarLineaParser.parse_args()\n if repo.modificar(numero, data):\n return 'Linea modificada', 200\n abort(404)\n\n\n@nsLinea.route('/baja/<int:id>')\nclass LineasResource(Resource):\n\n def put(self, id):\n if repo.baja(id):\n repoLep.baja_by_linea(id)\n lineaeqplan = repoLep.buscar_by_linea(id)\n repoEquipo.baja(lineaeqplan.equipo_id)\n repoLepCliente.bajalep(lineaeqplan.id)\n return 'Linea dada de baja', 200\n abort(400)\n",
"step-4": "<mask token>\nnuevaLineaParser.add_argument('numero', type=str, required=True)\nnuevaLineaParser.add_argument('estado', type=str, required=True)\nnuevaLineaParser.add_argument('activa', type=bool, required=False)\n<mask token>\neditarLineaParser.add_argument('id', type=int, required=True)\n<mask token>\nbuscarLineasParser.add_argument('desde', type=str, required=True)\nbuscarLineasParser.add_argument('hasta', type=str, required=True)\n\n\n@nsLinea.route('/')\nclass LineasResource(Resource):\n\n @nsLinea.marshal_list_with(modeloLinea)\n def get(self):\n return repo.get_all()\n\n @nsLinea.expect(modeloLineaSinN)\n @nsLinea.marshal_with(modeloLinea)\n def post(self):\n data = nuevaLineaParser.parse_args()\n if data.estado == 'Activada':\n data.activa = True\n else:\n data.activa = False\n f = repo.agregar(data)\n if f:\n return f, 201\n abort(500)\n\n\n@nsLinea.route('/<int:id>')\nclass LineasResource(Resource):\n\n @nsLinea.marshal_with(modeloLinea)\n def get(self, id):\n f = repo.get_by_numero(id)\n if f:\n return f, 200\n abort(404)\n\n @nsLinea.expect(modeloLinea)\n def put(self, numero):\n data = editarLineaParser.parse_args()\n if repo.modificar(numero, data):\n return 'Linea modificada', 200\n abort(404)\n\n\n@nsLinea.route('/baja/<int:id>')\nclass LineasResource(Resource):\n\n def put(self, id):\n if repo.baja(id):\n repoLep.baja_by_linea(id)\n lineaeqplan = repoLep.buscar_by_linea(id)\n repoEquipo.baja(lineaeqplan.equipo_id)\n repoLepCliente.bajalep(lineaeqplan.id)\n return 'Linea dada de baja', 200\n abort(400)\n",
"step-5": "from flask import abort\nfrom flask_restx import Resource, Namespace, Model, fields, reqparse\nfrom infraestructura.lineas_repo import LineasRepo\nfrom infraestructura.equipos_repo import EquiposRepo\nfrom infraestructura.clientes_lep_repo import ClientesLepRepo\nfrom infraestructura.lineaequipoplan_repo import LineaEquipoPlanRepo\nrepoLep= LineaEquipoPlanRepo()\nrepoLepCliente = ClientesLepRepo()\nrepo = LineasRepo()\nrepoEquipo = EquiposRepo()\nnsLinea = Namespace('lineas', description='Administrador de lineas')\nmodeloLineaSinN = Model('LineaSinNumero',{\n 'numero': fields.String(),\n 'estado': fields.String(),\n 'activa': fields.Boolean()\n})\n\nmodeloLinea = modeloLineaSinN.clone('Linea', {\n 'id': fields.Integer()\n})\n\nmodeloBusqueda = Model('BusquedaFechas', {\n 'desde': fields.Date(),\n 'hasta': fields.Date()\n})\n\nnsLinea.models[modeloLinea.name] = modeloLinea\nnsLinea.models[modeloLineaSinN.name] = modeloLineaSinN\nnsLinea.models[modeloBusqueda.name] = modeloBusqueda\n\nnuevaLineaParser = reqparse.RequestParser(bundle_errors=True)\nnuevaLineaParser.add_argument('numero', type=str, required=True)\nnuevaLineaParser.add_argument('estado', type=str, required=True)\n ##PEDRO LOOK AT THIS\n ##PEDRO LOOK AT THIS\n\nnuevaLineaParser.add_argument('activa', type=bool, required=False)\n\neditarLineaParser = nuevaLineaParser.copy()\neditarLineaParser.add_argument('id', type=int, required=True)\n\nbuscarLineasParser = reqparse.RequestParser(bundle_errors=True)\nbuscarLineasParser.add_argument('desde', type=str, required=True)\nbuscarLineasParser.add_argument('hasta', type=str, required=True)\n\n\n@nsLinea.route('/')\nclass LineasResource(Resource):\n @nsLinea.marshal_list_with(modeloLinea)\n def get(self):\n return repo.get_all()\n\n @nsLinea.expect(modeloLineaSinN)\n @nsLinea.marshal_with(modeloLinea)\n def post(self):\n data = nuevaLineaParser.parse_args()\n\n ##PEDRO LOOK AT THIS\n if(data.estado ==\"Activada\"):\n data.activa = True\n else:\n data.activa = False\n\n f = repo.agregar(data)\n if f:\n return f, 201\n abort(500)\n\n@nsLinea.route('/<int:id>')\nclass LineasResource(Resource):\n @nsLinea.marshal_with(modeloLinea)\n def get(self, id):\n f = repo.get_by_numero(id)\n if f:\n return f, 200\n abort(404)\n\n \n \n @nsLinea.expect(modeloLinea)\n def put(self, numero):\n data = editarLineaParser.parse_args()\n if repo.modificar(numero, data):\n return 'Linea modificada', 200\n abort(404)\n@nsLinea.route('/baja/<int:id>')\nclass LineasResource(Resource):\n\n def put(self, id):\n if repo.baja(id):\n # doy de baja en lineaEquipoPlan\n\n repoLep.baja_by_linea(id)\n\n # busco para darle de baja al equipo \n # y tener tmb el id pa la tabla cliente_lep\n lineaeqplan = repoLep.buscar_by_linea(id)\n\n #doy de baja el equipo\n repoEquipo.baja(lineaeqplan.equipo_id)\n #doy de baja en tabla cliente_lep\n repoLepCliente.bajalep(lineaeqplan.id)\n\n return 'Linea dada de baja', 200\n abort(400) \n\n",
"step-ids": [
3,
7,
8,
9,
12
]
}
|
[
3,
7,
8,
9,
12
] |
#
# tests/middleware/test_static.py
#
import pytest
import growler
from pathlib import Path
from unittest import mock
from sys import version_info
from growler.middleware.static import Static
@pytest.fixture
def static(tmpdir):
return Static(str(tmpdir))
def test_static_fixture(static, tmpdir):
assert isinstance(static, Static)
assert str(static.path) == str(tmpdir)
def test_construct_with_list(tmpdir):
s = Static(['/'] + str(tmpdir).split('/'))
assert str(s.path) == str(tmpdir)
def test_error_on_missing_dir():
err = FileNotFoundError if version_info < (3, 6) else NotADirectoryError
with pytest.raises(err):
Static("/does/not/exist")
def test_static_construct_requires_directory(tmpdir):
name = "foo"
foo = tmpdir / name
foo.write('')
with pytest.raises(NotADirectoryError):
Static(str(foo))
def test_call(static, tmpdir):
req, res = mock.MagicMock(), mock.MagicMock()
file_contents = b'This is some text in teh file'
f = tmpdir.mkdir('foo').mkdir('bar') / 'file.txt'
f.write(file_contents)
file_path = Path(str(f))
etag = static.calculate_etag(file_path)
req.path = '/foo/bar/file.txt'
static(req, res)
res.set_type.assert_called_with('text/plain')
res.send_file.assert_called_with(file_path)
def test_call_invalid_path(static):
req, res = mock.Mock(), mock.Mock()
req.path = '/foo/../bar'
static(req, res)
assert not res.set_type.called
assert not res.send_file.called
assert not res.end.called
def test_call_with_etag(static, tmpdir):
req, res = mock.MagicMock(), mock.MagicMock()
file_contents = b'This is some text in teh file'
f = tmpdir.mkdir('foo').mkdir('bar') / 'file.txt'
f.write(file_contents)
file_path = Path(str(f))
etag = static.calculate_etag(file_path)
req.path = '/foo/bar/file.txt'
req.headers = {'IF-NONE-MATCH': etag}
static(req, res)
assert res.status_code == 304
assert not res.set_type.called
assert not res.send_file.called
|
normal
|
{
"blob_id": "9a7994a1e51c9cf7fe7d8b50ab26fa3d789fc8e5",
"index": 1012,
"step-1": "<mask token>\n\n\n@pytest.fixture\ndef static(tmpdir):\n return Static(str(tmpdir))\n\n\ndef test_static_fixture(static, tmpdir):\n assert isinstance(static, Static)\n assert str(static.path) == str(tmpdir)\n\n\n<mask token>\n\n\ndef test_error_on_missing_dir():\n err = FileNotFoundError if version_info < (3, 6) else NotADirectoryError\n with pytest.raises(err):\n Static('/does/not/exist')\n\n\ndef test_static_construct_requires_directory(tmpdir):\n name = 'foo'\n foo = tmpdir / name\n foo.write('')\n with pytest.raises(NotADirectoryError):\n Static(str(foo))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@pytest.fixture\ndef static(tmpdir):\n return Static(str(tmpdir))\n\n\ndef test_static_fixture(static, tmpdir):\n assert isinstance(static, Static)\n assert str(static.path) == str(tmpdir)\n\n\ndef test_construct_with_list(tmpdir):\n s = Static(['/'] + str(tmpdir).split('/'))\n assert str(s.path) == str(tmpdir)\n\n\ndef test_error_on_missing_dir():\n err = FileNotFoundError if version_info < (3, 6) else NotADirectoryError\n with pytest.raises(err):\n Static('/does/not/exist')\n\n\ndef test_static_construct_requires_directory(tmpdir):\n name = 'foo'\n foo = tmpdir / name\n foo.write('')\n with pytest.raises(NotADirectoryError):\n Static(str(foo))\n\n\ndef test_call(static, tmpdir):\n req, res = mock.MagicMock(), mock.MagicMock()\n file_contents = b'This is some text in teh file'\n f = tmpdir.mkdir('foo').mkdir('bar') / 'file.txt'\n f.write(file_contents)\n file_path = Path(str(f))\n etag = static.calculate_etag(file_path)\n req.path = '/foo/bar/file.txt'\n static(req, res)\n res.set_type.assert_called_with('text/plain')\n res.send_file.assert_called_with(file_path)\n\n\n<mask token>\n\n\ndef test_call_with_etag(static, tmpdir):\n req, res = mock.MagicMock(), mock.MagicMock()\n file_contents = b'This is some text in teh file'\n f = tmpdir.mkdir('foo').mkdir('bar') / 'file.txt'\n f.write(file_contents)\n file_path = Path(str(f))\n etag = static.calculate_etag(file_path)\n req.path = '/foo/bar/file.txt'\n req.headers = {'IF-NONE-MATCH': etag}\n static(req, res)\n assert res.status_code == 304\n assert not res.set_type.called\n assert not res.send_file.called\n",
"step-3": "<mask token>\n\n\n@pytest.fixture\ndef static(tmpdir):\n return Static(str(tmpdir))\n\n\ndef test_static_fixture(static, tmpdir):\n assert isinstance(static, Static)\n assert str(static.path) == str(tmpdir)\n\n\ndef test_construct_with_list(tmpdir):\n s = Static(['/'] + str(tmpdir).split('/'))\n assert str(s.path) == str(tmpdir)\n\n\ndef test_error_on_missing_dir():\n err = FileNotFoundError if version_info < (3, 6) else NotADirectoryError\n with pytest.raises(err):\n Static('/does/not/exist')\n\n\ndef test_static_construct_requires_directory(tmpdir):\n name = 'foo'\n foo = tmpdir / name\n foo.write('')\n with pytest.raises(NotADirectoryError):\n Static(str(foo))\n\n\ndef test_call(static, tmpdir):\n req, res = mock.MagicMock(), mock.MagicMock()\n file_contents = b'This is some text in teh file'\n f = tmpdir.mkdir('foo').mkdir('bar') / 'file.txt'\n f.write(file_contents)\n file_path = Path(str(f))\n etag = static.calculate_etag(file_path)\n req.path = '/foo/bar/file.txt'\n static(req, res)\n res.set_type.assert_called_with('text/plain')\n res.send_file.assert_called_with(file_path)\n\n\ndef test_call_invalid_path(static):\n req, res = mock.Mock(), mock.Mock()\n req.path = '/foo/../bar'\n static(req, res)\n assert not res.set_type.called\n assert not res.send_file.called\n assert not res.end.called\n\n\ndef test_call_with_etag(static, tmpdir):\n req, res = mock.MagicMock(), mock.MagicMock()\n file_contents = b'This is some text in teh file'\n f = tmpdir.mkdir('foo').mkdir('bar') / 'file.txt'\n f.write(file_contents)\n file_path = Path(str(f))\n etag = static.calculate_etag(file_path)\n req.path = '/foo/bar/file.txt'\n req.headers = {'IF-NONE-MATCH': etag}\n static(req, res)\n assert res.status_code == 304\n assert not res.set_type.called\n assert not res.send_file.called\n",
"step-4": "import pytest\nimport growler\nfrom pathlib import Path\nfrom unittest import mock\nfrom sys import version_info\nfrom growler.middleware.static import Static\n\n\n@pytest.fixture\ndef static(tmpdir):\n return Static(str(tmpdir))\n\n\ndef test_static_fixture(static, tmpdir):\n assert isinstance(static, Static)\n assert str(static.path) == str(tmpdir)\n\n\ndef test_construct_with_list(tmpdir):\n s = Static(['/'] + str(tmpdir).split('/'))\n assert str(s.path) == str(tmpdir)\n\n\ndef test_error_on_missing_dir():\n err = FileNotFoundError if version_info < (3, 6) else NotADirectoryError\n with pytest.raises(err):\n Static('/does/not/exist')\n\n\ndef test_static_construct_requires_directory(tmpdir):\n name = 'foo'\n foo = tmpdir / name\n foo.write('')\n with pytest.raises(NotADirectoryError):\n Static(str(foo))\n\n\ndef test_call(static, tmpdir):\n req, res = mock.MagicMock(), mock.MagicMock()\n file_contents = b'This is some text in teh file'\n f = tmpdir.mkdir('foo').mkdir('bar') / 'file.txt'\n f.write(file_contents)\n file_path = Path(str(f))\n etag = static.calculate_etag(file_path)\n req.path = '/foo/bar/file.txt'\n static(req, res)\n res.set_type.assert_called_with('text/plain')\n res.send_file.assert_called_with(file_path)\n\n\ndef test_call_invalid_path(static):\n req, res = mock.Mock(), mock.Mock()\n req.path = '/foo/../bar'\n static(req, res)\n assert not res.set_type.called\n assert not res.send_file.called\n assert not res.end.called\n\n\ndef test_call_with_etag(static, tmpdir):\n req, res = mock.MagicMock(), mock.MagicMock()\n file_contents = b'This is some text in teh file'\n f = tmpdir.mkdir('foo').mkdir('bar') / 'file.txt'\n f.write(file_contents)\n file_path = Path(str(f))\n etag = static.calculate_etag(file_path)\n req.path = '/foo/bar/file.txt'\n req.headers = {'IF-NONE-MATCH': etag}\n static(req, res)\n assert res.status_code == 304\n assert not res.set_type.called\n assert not res.send_file.called\n",
"step-5": "#\n# tests/middleware/test_static.py\n#\n\nimport pytest\nimport growler\nfrom pathlib import Path\nfrom unittest import mock\nfrom sys import version_info\nfrom growler.middleware.static import Static\n\n\n@pytest.fixture\ndef static(tmpdir):\n return Static(str(tmpdir))\n\n\ndef test_static_fixture(static, tmpdir):\n assert isinstance(static, Static)\n assert str(static.path) == str(tmpdir)\n\n\ndef test_construct_with_list(tmpdir):\n s = Static(['/'] + str(tmpdir).split('/'))\n assert str(s.path) == str(tmpdir)\n\n\ndef test_error_on_missing_dir():\n err = FileNotFoundError if version_info < (3, 6) else NotADirectoryError\n with pytest.raises(err):\n Static(\"/does/not/exist\")\n\n\ndef test_static_construct_requires_directory(tmpdir):\n name = \"foo\"\n foo = tmpdir / name\n foo.write('')\n with pytest.raises(NotADirectoryError):\n Static(str(foo))\n\n\ndef test_call(static, tmpdir):\n req, res = mock.MagicMock(), mock.MagicMock()\n\n file_contents = b'This is some text in teh file'\n\n f = tmpdir.mkdir('foo').mkdir('bar') / 'file.txt'\n f.write(file_contents)\n\n file_path = Path(str(f))\n\n etag = static.calculate_etag(file_path)\n\n req.path = '/foo/bar/file.txt'\n\n static(req, res)\n\n res.set_type.assert_called_with('text/plain')\n res.send_file.assert_called_with(file_path)\n\n\ndef test_call_invalid_path(static):\n req, res = mock.Mock(), mock.Mock()\n\n req.path = '/foo/../bar'\n static(req, res)\n\n assert not res.set_type.called\n assert not res.send_file.called\n assert not res.end.called\n\n\ndef test_call_with_etag(static, tmpdir):\n req, res = mock.MagicMock(), mock.MagicMock()\n\n file_contents = b'This is some text in teh file'\n\n f = tmpdir.mkdir('foo').mkdir('bar') / 'file.txt'\n f.write(file_contents)\n file_path = Path(str(f))\n\n etag = static.calculate_etag(file_path)\n\n req.path = '/foo/bar/file.txt'\n\n req.headers = {'IF-NONE-MATCH': etag}\n\n static(req, res)\n\n assert res.status_code == 304\n\n assert not res.set_type.called\n assert not res.send_file.called\n",
"step-ids": [
4,
7,
8,
9,
10
]
}
|
[
4,
7,
8,
9,
10
] |
# -*- encoding: utf-8 -*-
##############################################################################
#
# ServerPLM, Open Source Product Lifcycle Management System
# Copyright (C) 2020-2020 Didotech srl (<http://www.didotech.com>). All Rights Reserved
#
# Created on : 2018-03-01
# Author : Fabio Colognesi
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
from datetime import datetime
from odoo import models, fields, api, _, osv
from odoo.exceptions import UserError
from .common import getListIDs, getCleanList, packDictionary, unpackDictionary, getCleanBytesDictionary, \
move_workflow, wf_message_post, isVoid, isAdministrator, isWritable, isReleased, \
isObsoleted, isUnderModify, isAnyReleased, isDraft, getUpdTime
# USED_STATES=[('draft','Draft'),('confirmed','Confirmed'),('released','Released'),('undermodify','UnderModify'),('obsoleted','Obsoleted')]
# STATEFORRELEASE=['confirmed']
# STATESRELEASABLE=['confirmed','transmitted','released','undermodify','obsoleted']
class plm_component(models.Model):
_name = 'product.product'
_inherit = 'product.product'
create_date = fields.Datetime(_('Date Created'), readonly=True)
write_date = fields.Datetime(_('Date Modified'), readonly=True)
@property
def _default_rev(self):
field = self.env['product.template']._fields.get('engineering_revision', None)
default = field.default('product.template') if not(field == None) else 0
return default
# Internal methods
def _insertlog(self, ids, changes={}, note={}):
ret=False
op_type, op_note=["unknown",""]
for objID in self.browse(getListIDs(ids)):
if note:
op_type="{type}".format(type=note['type'])
op_note="{reason}".format(reason=note['reason'])
elif changes:
op_type='change value'
op_note=self.env['plm.logging'].getchanges(objID, changes)
if op_note:
values={
'name': objID.name,
'revision': "{major}".format(major=objID.engineering_revision),
'type': self._name,
'op_type': op_type,
'op_note': op_note,
'op_date': datetime.now(),
'userid': self._uid,
}
objectItem=self.env['plm.logging'].create(values)
if objectItem:
ret=True
return ret
def _getbyrevision(self, name, revision):
return self.search([('engineering_code', '=', name), ('engineering_revision', '=', revision)])
# def _getExplodedBom(self, ids, level=0, currlevel=0):
# """
# Returns a flat list of all children in a Bom ( level = 0 one level only, level = 1 all levels)
# """
# result = []
#
# if level == 0 and currlevel > 1:
# return result
# components = self.browse(ids)
# relType = self.env['mrp.bom']
# for component in components:
# for bomid in component.bom_ids:
# children = relType.GetExplodedBom([bomid.id], level, currlevel)
# result.extend(children)
# return result
def _getChildrenBom(self, component, level=0, currlevel=0):
"""
Returns a flat list of each child, listed once, in a Bom ( level = 0 one level only, level = 1 all levels)
"""
result = []
bufferdata = []
if level == 0 and currlevel > 1:
return bufferdata
for bomid in component.product_tmpl_id.bom_ids:
for bomline in bomid.bom_line_ids:
children=self._getChildrenBom(bomline.product_id, level, currlevel+1)
bufferdata.extend(children)
bufferdata.append(bomline.product_id.id)
result.extend(bufferdata)
return getCleanList(result)
@api.model
def RegMessage(self, request=[], default=None):
"""
Registers a message for requested component
"""
oid, message = request
wf_message_post(self, [oid], body=message)
return False
def getUserName(self):
"""
Gets the user name
"""
userType = self.env['res.users']
uiUser = userType.browse(self._uid)
return uiUser.name
def getFromTemplateID(self, oid):
ret=False
if oid:
for prodItem in self.search([('product_tmpl_id', '=', oid)]):
ret=prodItem
break
return ret
def getTemplateItem(self, oid):
ret=False
if oid:
for prodItem in self.browse(getListIDs(oid)):
ret=prodItem.product_tmpl_id
break
return ret
## Customized Automations
def on_change_name(self, oid, name=False, engineering_code=False):
if name:
results = self.search([('name', '=', name)])
if len(results) > 0:
raise UserError(_("Update Part Error.\n\nPart {} already exists.\nClose with OK to reuse, with Cancel to discharge.".format(name)))
if not engineering_code:
return {'value': {'engineering_code': name}}
return {}
## External methods
@api.model
def CleanStructure(self, request=[], default=None):
"""
Cleans relations having sourceID (in mrp.bom.line)
"""
ret=False
type = "ebom"
bomLType = self.env['mrp.bom.line']
bomType = self.env['mrp.bom']
docType=self.env['plm.document']
bl_to_delete = bomLType
for parentID, sourceID in request:
if not parentID==None:
if isWritable(self, parentID):
for bom_id in bomType.search([('type','=',type),('product_id','=',parentID)]):
if not sourceID==None:
if docType.IsCheckedOutForMe(sourceID):
for bomLine in bomLType.search([('source_id','=',sourceID),('bom_id','=',bom_id.id)]):
bl_to_delete |= bomLine
bl_to_delete.unlink() # Cleans mrp.bom.lines
if not bom_id.bom_line_ids:
bom_id.unlink() # Cleans void mrp.bom
ret = True
return ret
@api.model
def Clone(self, ids=[], default=None):
"""
Creates a new copy of the component
"""
default = {}
exitValues = {}
for tmpObject in self.browse(getListIDs(ids)):
note={
'type': 'clone object',
'reason': "Creating new cloned entity starting from '{old}'.".format(old=tmpObject.name),
}
self._insertlog(tmpObject.id, note=note)
newID = self.copy(tmpObject.id, default)
if newID:
newEnt = self.browse(newID)
exitValues = {
'_id': newID,
'name': newEnt.name,
'engineering_code': newEnt.engineering_code,
'engineering_revision': newEnt.engineering_revision,
'engineering_writable': True,
'state': 'draft',
}
break
return packDictionary(exitValues)
@api.model
def CloneVirtual(self, ids=[], default=None):
"""
Creates a "false" new copy of the component.
Really returns only new values avoiding creation of new object.
"""
exitValues = {}
for tmpObject in self.browse(getListIDs(ids)):
new_name = "Copy of {name}".format(name=tmpObject.name)
exitValues = {
'_id': False,
'name': new_name,
'engineering_code': new_name,
'description': "{desc}".format(desc=tmpObject.description),
'engineering_revision': self._default_rev,
'engineering_writable': True,
'state': 'draft',
}
break
return packDictionary(exitValues)
@api.model
def GetUpdated(self, vals=[], default=None):
"""
Gets Last/Requested revision of given items (by name, revision, update time)
"""
partData, attribNames = vals
ids = self.GetLatestIds(partData)
return packDictionary(self.read(getCleanList(ids), attribNames))
@api.model
def GetStdPartName(self, vals=[], default=None):
"""
Gets new P/N reading from entity chosen (taking it from new index on sequence).
"""
ret=""
entID, objectName = vals
if entID and objectName:
userType=self.env[objectName] if (objectName in self.env) else None
if not(userType==None):
for objID in userType.browse(getListIDs(entID)):
ret=self.GetNewPNfromSeq(objID.sequence_id)
break
return ret
@api.model
def GetNewPNfromSeq(self, seqID=None, default=None):
"""
Gets new P/N from sequence (checks for P/N existence).
"""
ret=""
if seqID:
count=0
while ret=="":
chkname=self.env['ir.sequence'].browse(seqID.id)._next()
count+=1
criteria=[('name', '=', chkname)]
partIds = self.search(criteria)
if (partIds==None) or (len(partIds)==0):
ret=chkname
if count>1000:
logging.error("GetNewPNfromSeq : Unable to get a new P/N from sequence '{name}'."\
.format(name=seqID.name))
break
return ret
@api.model
def GetLatestIds(self, vals=[], default=None):
"""
Gets Last/Requested revision of given items (by name, revision, update time)
"""
ids = []
for request in vals:
partName, _, updateDate = request
if updateDate:
criteria=[('engineering_code', '=', partName), ('write_date', '>', updateDate)]
else:
criteria=[('engineering_code', '=', partName)]
partIds = self.search(criteria, order='engineering_revision')
if len(partIds) > 0:
ids.append(partIds[len(partIds) - 1].id)
return getCleanList(ids)
@api.model
def GetId(self, request=[], default=None):
"""
Gets Last/Requested revision of given items (by name, revision, update time)
"""
idd = False
partName, partRev, _ = request
# partName, partRev, updateDate = request
# if updateDate:
# if partRev:
# criteria=[('engineering_code', '=', partName), ('engineering_revision', '=', partRev),
# ('write_date', '>', updateDate)]
# else:
# criteria=[('engineering_code', '=', partName), ('write_date', '>', updateDate)]
# else:
# if partRev:
# criteria=[('engineering_code', '=', partName), ('engineering_revision', '=', partRev)]
# else:
# criteria=[('engineering_code', '=', partName)]
if isinstance(partRev, int):
criteria=[('engineering_code', '=', partName), ('engineering_revision', '=', partRev)]
else:
criteria=[('engineering_code', '=', partName)]
partIds = self.search(criteria, order='engineering_revision')
if len(partIds) > 0:
idd=partIds[len(partIds) - 1].id
return idd
@api.model
def IsSaveable(self, ids=[], default=None):
"""
Answers about capability to save requested product
"""
ret=True
for tmpObject in self.browse(getListIDs(ids)):
ret=ret and tmpObject._iswritable()
return ret
@api.model
def IsRevisable(self, ids=[], default=None):
"""
Gets if a product is revisable or not.
"""
ret=False
for tmpObject in self.browse(getListIDs(ids)):
if isAnyReleased(self, tmpObject.id):
ret=True
break
return ret
@api.model
def NewRevision(self, ids=[], default=None):
"""
Creates a new revision of current product
"""
newID, newIndex = [ False, 0 ]
thisContext={ 'internal_writing':True, 'new_revision':True, }
for tmpObject in self.browse(getListIDs(ids)):
latestIDs = self.GetLatestIds( [(tmpObject.engineering_code, tmpObject.engineering_revision, False)] )
for oldObject in self.browse(latestIDs):
if isAnyReleased(self, oldObject.id):
note={
'type': 'revision process',
'reason': "Creating new revision for '{old}'.".format(old=oldObject.name),
}
self._insertlog(oldObject.id, note=note)
newIndex = int(oldObject.engineering_revision) + 1
default = {
'engineering_writable': False,
'state': 'undermodify',
}
oldObject.with_context(thisContext).write(default)
default={
'name': oldObject.name,
'engineering_revision': newIndex,
'engineering_writable': True,
'state': 'draft',
}
# Creates a new "old revision" object
tmpID = oldObject.with_context(thisContext).copy(default)
if tmpID:
wf_message_post(self, [oldObject.id], body='Created : New Revision.')
newID = tmpID.id
tmpID.write({'name': oldObject.name, })
note={
'type': 'revision process',
'reason': "Created new revision '{index}' for product '{name}'.".format(index=newIndex,name=oldObject.name),
}
self._insertlog(newID, note=note)
oldObject.with_context(thisContext)._copy_productBom(newID, ["normal","spbom"])
tmpID.with_context(thisContext).write( {'name': oldObject.name, } )
note={
'type': 'revision process',
'reason': "Copied BoM to new revision '{index}' for product '{name}'.".format(index=newIndex,name=oldObject.name),
}
self._insertlog(newID, note=note)
break
return (newID, newIndex)
@api.model
def CheckProductsToSave(self, request="", default=None):
"""
Checks if given products has to be saved.
"""
listedParts = []
retValues = {}
for part in unpackDictionary(request):
part=getCleanBytesDictionary(part)
hasSaved = True
existingID=False
order = None
if not('engineering_code' in part):
continue
if part['engineering_code'] in listedParts:
continue
if ('engineering_code' in part) and ('engineering_revision' in part):
criteria = [
('engineering_code', '=', part['engineering_code']),
('engineering_revision', '=', part['engineering_revision'])
]
elif ('engineering_code' in part) and not('engineering_revision' in part):
criteria = [
('engineering_code', '=', part['engineering_code'])
]
order='engineering_revision'
existingIDs = self.search( criteria, order=order )
if existingIDs:
ids=sorted(existingIDs.ids)
existingID = ids[len(ids) - 1]
if existingID:
hasSaved = False
objPart = self.browse(existingID)
part['engineering_revision']=objPart.engineering_revision
if ('_lastupdate' in part) and part['_lastupdate']:
if (getUpdTime(objPart) < datetime.strptime(part['_lastupdate'], '%Y-%m-%d %H:%M:%S')):
if objPart._iswritable():
hasSaved = True
retValues[part['engineering_code']]={
'componentID':existingID,
'hasSaved':hasSaved}
listedParts.append(part['engineering_code'])
return packDictionary(retValues)
@api.model
def SaveOrUpdate(self, request=[], default=None):
"""
Saves or Updates Parts
"""
listedParts = []
retValues = {}
modelFields=self.env['plm.config.settings'].GetFieldsModel(self._name)
for part in unpackDictionary(request):
part=getCleanBytesDictionary(part)
hasSaved = False
existingID=False
order=None
if not ('engineering_code' in part) or (not 'engineering_revision' in part):
part['componentID'] = False
part['hasSaved'] = hasSaved
continue
if not ('name' in part) and (('engineering_code' in part) and part['engineering_code']):
part['name'] = part['engineering_code']
if (('name' in part) and not(part['name'])) and (('engineering_code' in part) and part['engineering_code']):
part['name'] = part['engineering_code']
if part['engineering_code'] in listedParts:
continue
if not('componentID' in part) or not(part['componentID']):
if ('engineering_code' in part) and ('engineering_revision' in part):
criteria = [
('engineering_code', '=', part['engineering_code']),
('engineering_revision', '=', part['engineering_revision'])
]
elif ('engineering_code' in part) and not('engineering_revision' in part):
criteria = [
('engineering_code', '=', part['engineering_code'])
]
order = 'engineering_revision'
existingIDs = self.search( criteria, order=order)
if existingIDs:
ids=sorted(existingIDs.ids)
existingID = ids[len(ids) - 1]
else:
existingID=part['componentID']
lastupdate=datetime.strptime(str(part['_lastupdate']),'%Y-%m-%d %H:%M:%S') if ('_lastupdate' in part) else datetime.now()
for fieldName in list(set(part.keys()).difference(set(modelFields))):
del (part[fieldName])
if not existingID:
logging.debug("[SaveOrUpdate] Part {name} is creating.".format(name=part['engineering_code']))
objectItem=self.with_context({'internal_writing':True}).create(part)
if objectItem:
existingID=objectItem.id
hasSaved = True
else:
objPart = self.browse(existingID)
if objPart:
part['name'] = objPart.name
part['engineering_revision']=objPart.engineering_revision
if (getUpdTime(objPart) < lastupdate):
if objPart._iswritable():
logging.debug("[SaveOrUpdate] Part {name}/{revi} is updating.".format(name=part['engineering_code'],revi=part['engineering_revision']))
hasSaved = True
if not objPart.with_context({'internal_writing':False}).write(part):
logging.error("[SaveOrUpdate] Part {name}/{revi} cannot be updated.".format(name=part['engineering_code'],revi=part['engineering_revision']))
hasSaved = False
else:
logging.error("[SaveOrUpdate] Part {name}/{revi} doesn't exist anymore.".format(name=part['engineering_code'],revi=part['engineering_revision']))
retValues[part['engineering_code']]={
'componentID':existingID,
'hasSaved':hasSaved}
listedParts.append(part['engineering_code'])
return packDictionary(retValues)
@api.model
def QueryLast(self, request=([], []), default=None):
"""
Queries to return values based on columns selected.
"""
objId = False
expData = []
queryFilter, columns = request
if len(columns) < 1:
return expData
if 'engineering_revision' in queryFilter:
del queryFilter['engineering_revision']
allIDs = self.search(queryFilter, order='engineering_revision')
if len(allIDs) > 0:
objId = allIDs[len(allIDs) - 1]
if objId:
tmpData = objId.export_data(columns)
if 'datas' in tmpData:
expData = tmpData['datas']
return expData
## Menu action Methods
def _create_normalBom(self, idd, processedIds=[]):
"""
Creates a new Normal Bom (recursive on all EBom children)
"""
default = {}
if idd in processedIds:
return False
checkObj=self.browse(idd)
if not checkObj:
return False
bomType = self.env['mrp.bom']
objBoms = bomType.search([('product_tmpl_id', '=', checkObj.product_tmpl_id.id), ('type', '=', 'normal'), ('active', '=', True)])
idBoms = bomType.search([('product_tmpl_id', '=', checkObj.product_tmpl_id.id), ('type', '=', 'ebom'), ('active', '=', True)])
if not objBoms:
if idBoms:
default={'product_tmpl_id': idBoms[0].product_tmpl_id.id,
'type': 'normal', 'active': True, }
if idBoms[0].product_id:
default.update({'product_id': idBoms[0].product_id.id})
processedIds.append(idd)
newidBom = idBoms[0].with_context({'internal_writing':True}).copy(default)
if newidBom:
newidBom.with_context({'internal_writing':True}).write(default)
ok_rows = self._summarizeBom(newidBom.bom_line_ids)
for bom_line in list(set(newidBom.bom_line_ids) ^ set(ok_rows)):
bom_line.unlink()
for bom_line in ok_rows:
bom_line.with_context({'internal_writing':True}).write(
{ 'type': 'normal', 'source_id': False,
'product_qty': bom_line.product_qty, } )
self._create_normalBom(bom_line.product_id.id, processedIds=processedIds)
else:
for bom_line in bomType.browse(objBoms[0].id).bom_line_ids:
self._create_normalBom(bom_line.product_id.id, processedIds=processedIds)
return False
def _copy_productBom(self, idStart, idDest=None, bomTypes=["normal"]):
"""
Creates a new 'bomType' BoM (arrested at first level BoM children).
"""
default = {}
if not idDest:
idDest=idStart
checkObjDest = self.browse(idDest)
if checkObjDest:
objBomType = self.env['mrp.bom']
for bomType in bomTypes:
objBoms = objBomType.search([('product_id', '=', idDest), ('type', '=', bomType), ('active', '=', True)])
idBoms = objBomType.search([('product_id', '=', idStart), ('type', '=', bomType), ('active', '=', True)])
if not objBoms:
for oldObj in idBoms:
newidBom = oldObj.with_context({'internal_writing':True}).copy(default)
if newidBom:
newidBom.with_context({'internal_writing':True}).write(
{'name': checkObjDest.name,
'product_tmpl_id': checkObjDest.product_tmpl_id.id,
'type': bomType, 'active': True, })
ok_rows = self._summarizeBom(newidBom.bom_line_ids)
for bom_line in list(set(newidBom.bom_line_ids) ^ set(ok_rows)):
bom_line.unlink()
for bom_line in ok_rows:
bom_line.with_context({'internal_writing':True}).write(
{'type': bomType, 'source_id': False,
'name': bom_line.product_id.name,
'product_qty': bom_line.product_qty, })
return False
def _summarizeBom(self, datarows):
dic = {}
for datarow in datarows:
key = datarow.product_id.name
if key in dic:
dic[key].product_qty = float(dic[key].product_qty) + float(datarow.product_qty)
else:
dic[key] = datarow
retd = dic.values()
return retd
## Work Flow Internal Methods
def _get_recursive_parts(self, ids, excludeStatuses, includeStatuses, release=False):
"""
Gets all ids related to current one as children
"""
stopFlag = False
tobeReleasedIDs = getListIDs(ids)
options=self.env['plm.config.settings'].GetOptions()
children = []
for oic in self.browse(ids):
children = self.browse(self._getChildrenBom(oic, 1))
for child in children:
if ((not child.state in excludeStatuses) and (not child.state in includeStatuses)) \
and (release and not(options.get('opt_obsoletedinbom', False))):
logging.warning("Part (%r - %d) is in a status '%s' not allowed."
%(child.engineering_code, child.engineering_revision, child.state))
stopFlag = True
continue
if child.state in includeStatuses:
if not child.id in tobeReleasedIDs:
tobeReleasedIDs.append(child.id)
return (stopFlag, getCleanList(tobeReleasedIDs))
def create_normalBom_WF(self, ids):
"""
Creates a new Normal Bom if doesn't exist (action callable from code)
"""
for idd in ids:
processedIds = []
self._create_normalBom(idd, processedIds=processedIds)
wf_message_post(self, ids, body='Created Normal Bom.')
return False
def _action_ondocuments(self, ids, action, status):
"""
Moves workflow on documents having the same state of component
"""
docIDs = []
# documents=[]
documentType = self.env['plm.document']
check=self._context.get('no_move_documents', False)
if not check:
for oldObject in self.browse(ids):
for document in oldObject.linkeddocuments:
if (document.id not in docIDs):
if documentType.ischecked_in(document.id):
docIDs.append(document.id)
idMoves=move_workflow(documentType, docIDs, action, status)
documentType.logging_workflow(idMoves, action, status)
return docIDs
@api.model
def _iswritable(self):
if self:
checkState = ('draft')
if not self.engineering_writable:
logging.warning(
"_iswritable : Part (%r - %d) is not writable." % (self.engineering_code, self.engineering_revision))
return False
if not self.state in checkState:
logging.warning("_iswritable : Part (%r - %d) is in status %r." % (self.engineering_code, self.engineering_revision, self.state))
return False
if self.engineering_code == False:
logging.warning(
"_iswritable : Part (%r - %d) is without Engineering P/N." % (self.name, self.engineering_revision))
return False
return True
@api.model
def ActionUpload(self, request=[], default=None):
"""
Action to be executed after automatic upload
"""
signal='upload'
move_workflow(self, self._ids, signal)
return False
def action_upload(self):
"""
Action to be executed for Uploaded state
"""
options=self.env['plm.config.settings'].GetOptions()
status = 'uploaded'
action = 'upload'
default = {
'state': status,
'engineering_writable': False,
}
doc_default = {
'state': status,
'writable': False,
}
operationParams = {
'status': status,
'statusName': _('Uploaded'),
'action': action,
'docaction': 'uploaddoc',
'excludeStatuses': ['uploaded', 'confirmed', 'transmitted','released', 'undermodify', 'obsoleted'],
'includeStatuses': ['draft'],
'default': default,
'doc_default': doc_default,
}
if options.get('opt_showWFanalysis', False):
return self.action_check_workflow(operationParams)
else:
ids=self._ids
self.logging_workflow(ids, action, status)
return self._action_to_perform(ids, operationParams, default)
def action_draft(self):
"""
Action to be executed for Draft state
"""
options=self.env['plm.config.settings'].GetOptions()
status = 'draft'
action = 'draft'
default = {
'state': status,
'engineering_writable': True,
}
doc_default = {
'state': status,
'writable': True,
}
operationParams = {
'status': status,
'statusName': _('Draft'),
'action': action,
'docaction': 'draft',
'excludeStatuses': ['draft', 'released', 'undermodify', 'obsoleted'],
'includeStatuses': ['confirmed', 'uploaded', 'transmitted'],
'default': default,
'doc_default': doc_default,
}
if options.get('opt_showWFanalysis', False):
return self.action_check_workflow(operationParams)
else:
ids=self._ids
self.logging_workflow(ids, action, status)
return self._action_to_perform(ids, operationParams, default)
def action_confirm(self):
"""
Action to be executed for Confirmed state
"""
options=self.env['plm.config.settings'].GetOptions()
status = 'confirmed'
action = 'confirm'
default = {
'state': status,
'engineering_writable': False,
}
doc_default = {
'state': status,
'writable': False,
}
operationParams = {
'status': status,
'statusName': _('Confirmed'),
'action': action,
'docaction': 'confirm',
'excludeStatuses': ['confirmed', 'transmitted', 'released', 'undermodify', 'obsoleted'],
'includeStatuses': ['draft'],
'default': default,
'doc_default': doc_default,
}
if options.get('opt_showWFanalysis', False):
return self.action_check_workflow(operationParams)
else:
ids=self._ids
self.logging_workflow(ids, action, status)
return self._action_to_perform(ids, operationParams, default)
def action_correct(self):
"""
Action to be executed for Draft state (signal "correct")
"""
options=self.env['plm.config.settings'].GetOptions()
status='draft'
action = 'correct'
default = {
'state': status,
'engineering_writable': True,
}
doc_default = {
'state': status,
'writable': True,
}
operationParams = {
'status': status,
'statusName': _('Draft'),
'action': action,
'docaction': 'correct',
'excludeStatuses': ['draft', 'transmitted', 'released', 'undermodify', 'obsoleted'],
'includeStatuses': ['confirmed'],
'default': default,
'doc_default': doc_default,
}
if options.get('opt_showWFanalysis', False):
return self.action_check_workflow(operationParams)
else:
ids=self._ids
self.logging_workflow(ids, action, status)
return self._action_to_perform(ids, operationParams, default)
def action_release(self):
options=self.env['plm.config.settings'].GetOptions()
status='released'
action = 'release'
default = {
'state': status,
'engineering_writable': False,
}
doc_default = {
'state': status,
'writable': False,
}
excludeStatuses = ['released', 'undermodify', 'obsoleted']
includeStatuses = ['confirmed']
operationParams = {
'status': status,
'statusName': _('Released'),
'action': action,
'docaction': 'release',
'excludeStatuses': excludeStatuses,
'includeStatuses': includeStatuses,
'default': default,
'doc_default': doc_default,
}
if options.get('opt_showWFanalysis', False):
return self.action_check_workflow(operationParams)
else:
return self._action_to_release(self._ids, excludeStatuses, includeStatuses)
def action_obsolete(self):
"""
Action to be executed for Obsoleted state
"""
options=self.env['plm.config.settings'].GetOptions()
status = 'obsoleted'
action = 'obsolete'
default={
'engineering_writable': False,
'state': status,
}
doc_default = {
'state': status,
'writable': False,
}
operationParams = {
'status': status,
'statusName': _('Obsoleted'),
'action': action,
'docaction': 'obsolete',
'excludeStatuses': ['draft', 'confirmed', 'transmitted', 'obsoleted'],
'includeStatuses': ['undermodify', 'released'],
'default': default,
'doc_default': doc_default,
}
if options.get('opt_showWFanalysis', False):
return self.action_check_workflow(operationParams)
else:
return self._action_to_perform(self._ids, operationParams, default)
def action_reactivate(self):
"""
action to be executed for Released state (signal "reactivate")
"""
options=self.env['plm.config.settings'].GetOptions()
status = 'released'
action = 'reactivate'
default={
'engineering_writable': False,
'state': status,
}
doc_default = {
'state': status,
'writable': False,
}
operationParams = {
'status': status,
'statusName': _('Released'),
'action': action,
'docaction': 'reactivate',
'excludeStatuses': ['draft', 'confirmed', 'transmitted', 'released'],
'includeStatuses': ['undermodify', 'obsoleted'],
'default': default,
'doc_default': doc_default,
}
if options.get('opt_showWFanalysis', False):
return self.action_check_workflow(operationParams)
else:
return self._action_to_perform(self._ids, operationParams, default)
def logging_workflow(self, ids, action, status):
note={
'type': 'workflow movement',
'reason': "Applying workflow action '{action}', moving to status '{status}.".format(action=action, status=status),
}
self._insertlog(ids, note=note)
def _action_to_perform(self, ids, operationParams , default={}):
"""
Executes on cascade to children products the required workflow operations.
"""
full_ids=[]
status=operationParams['status']
action=operationParams['action']
docaction=operationParams['docaction']
excludeStatuses=operationParams['excludeStatuses']
includeStatuses=operationParams['includeStatuses']
stopFlag,allIDs=self._get_recursive_parts(ids, excludeStatuses, includeStatuses)
self._action_ondocuments(allIDs,docaction, status)
if action:
idMoves=move_workflow(self, allIDs, action, status)
self.logging_workflow(idMoves, action, status)
objId=self.browse(allIDs).with_context({'internal_writing':True}).write(default)
if objId:
wf_message_post(self, allIDs, body='Status moved to: {status}.'.format(status=status))
return objId
def _action_to_release(self, ids, excludeStatuses, includeStatuses):
"""
Action to be executed for Released state
"""
full_ids = []
last_ids=[]
status='released'
action='release'
default={
'engineering_writable': False,
'state': status
}
stopFlag, allIDs = self._get_recursive_parts(ids, excludeStatuses, includeStatuses, release=True)
if len(allIDs) < 1 or stopFlag:
raise UserError(_("WorkFlow Error.\n\nOne or more parts cannot be released."))
allProdObjs = self.browse(allIDs)
for oldObject in allProdObjs:
objObsolete=self._getbyrevision(oldObject.engineering_code, oldObject.engineering_revision - 1)
if objObsolete and objObsolete.id:
last_ids.append(objObsolete.id)
idMoves=move_workflow(self, last_ids, 'obsolete', 'obsoleted')
self.logging_workflow(idMoves, 'obsolete', 'obsoleted')
self._action_ondocuments(last_ids, 'obsolete', 'obsoleted')
self._action_ondocuments(allIDs, action, status)
for currId in allProdObjs:
if not (currId.id in ids):
full_ids.append(currId.id)
idMoves=move_workflow(self, allIDs, action, status)
self.logging_workflow(idMoves, action, status)
objId=self.browse(idMoves).with_context({'internal_writing':True}).write(default)
if objId and idMoves:
wf_message_post(self, allIDs, body='Status moved to: {status}.'.format(status=status))
return objId
#######################################################################################################################################33
# Overridden methods for this entity
@api.model
def create(self, vals):
ret=False
if vals and vals.get('name', False):
existingIDs = self.search([('name', '=', vals['name'])],
order='engineering_revision')
if (vals.get('engineering_code', False)==False) or (vals['engineering_code'] == ''):
vals['engineering_code'] = vals['name']
major = vals.get('engineering_revision', None)
major= self._default_rev if isVoid(major) else major
vals['engineering_revision'] = major
if existingIDs:
existingID = existingIDs[len(existingIDs) - 1]
if ('engineering_revision' in vals):
existObj = existingID
if existObj:
if (vals['engineering_revision'] > existObj.engineering_revision):
vals['name'] = existObj.name
else:
return existingID
else:
return existingID
try:
objectItem=super(plm_component, self).create(vals)
if objectItem:
ret=objectItem # Returns the objectItem instead the id to be coherent
values={
'name': objectItem.name,
'revision': objectItem.engineering_revision,
'type': self._name,
'op_type': 'creation',
'op_note': 'Create new entity on database',
'op_date': datetime.now(),
'userid': self._uid,
}
self.env['plm.logging'].create(values)
except Exception as ex:
raise Exception(" (%r). It has tried to create with values : (%r)." % (ex, vals))
elif not(self.env.context.get('create_from_tmpl') == None):
objectItem=super(plm_component, self).create(vals)
if objectItem:
ret=objectItem # Returns the objectItem instead the id to be coherent
values={
'name': objectItem.name,
'revision': objectItem.engineering_revision,
'type': self._name,
'op_type': 'creation',
'op_note': 'Create new entity on database',
'op_date': datetime.now(),
'userid': self._uid,
}
self.env['plm.logging'].create(values)
return ret
def write(self, vals):
ret=True
if vals:
if not isAdministrator(self):
check=self._context.get('internal_writing', False)
thisprocess=self._context.get('internal_process', False) # Avoids messages during internal processes.
if not check:
for prodItem in self.browse(self._ids):
if not isDraft(self,prodItem.id):
if not thisprocess:
logging.error("The entity '{name}-{rev}' is in a status that does not allow you to make save action".format(name=prodItem.name,rev=prodItem.engineering_revision))
ret=False
break
if not prodItem.engineering_writable:
if not thisprocess:
logging.error("The entity '{name}-{rev}' cannot be written.".format(name=prodItem.name,rev=prodItem.engineering_revision))
ret=False
break
if ret:
self._insertlog(self._ids, changes=vals)
ret=super(plm_component, self).write(vals)
return ret
def copy(self, default={}):
newID=False
override=False
previous_name=False
oid=self.id
if not self._context.get('new_revision', False):
previous_name = self.browse(oid).name
new_name=default.get('name', 'Copy of %s'%previous_name)
if 'name' in default:
tmpIds = self.search([('name', 'like', new_name)])
if len(tmpIds) > 0:
new_name = '%s (%s)' % (new_name, len(tmpIds) + 1)
default.update({
'name': new_name,
'engineering_code': new_name,
'engineering_revision': self._default_rev,
})
override=True
default.update({
'state': 'draft',
'engineering_writable': True,
'write_date': None,
'linkeddocuments': []
})
note={
'type': 'copy object',
'reason': "Previous name was '{old} new one is '{new}'.".format(old=previous_name,new=new_name),
}
self._insertlog(oid, note=note)
tmpID=super(plm_component, self.browse(oid).with_context({'internal_writing':True})).copy(default)
if tmpID!=None:
newID=tmpID
if override:
values={
'name': new_name,
'engineering_code': new_name,
'engineering_revision': self._default_rev,
'linkeddocuments': []
}
newID.write(values)
else:
tmpID=super(plm_component, self.browse(oid).with_context({'internal_writing':True})).copy(default)
if tmpID:
newID=tmpID
default.update({
'linkeddocuments': []
})
newID.with_context({'internal_writing':True}).write(default)
if newID and previous_name:
wf_message_post(self, getListIDs(newID), body='Copied starting from : {value}.'.format(value=previous_name))
return newID
def unlink(self):
ret=False
ids=self._ids
values = {'state': 'released', }
isAdmin = isAdministrator(self)
if not self.env['mrp.bom'].IsChild(ids):
for checkObj in self.browse(ids):
checkApply=False
if isReleased(self, checkObj.id):
if isAdmin:
checkApply=True
elif isDraft(self, checkObj.id):
checkApply=True
if not checkApply:
continue # Apply unlink only if have respected rules.
existingIDs = self.with_context({'no_move_documents':True}).search([
('engineering_code', '=', checkObj.engineering_code),
('engineering_revision', '=', checkObj.engineering_revision - 1)])
if len(existingIDs) > 0:
obsoletedIds=[]
undermodifyIds=[]
for existID in getListIDs(existingIDs):
if isObsoleted(self, existID.id):
obsoletedIds.append(existID.id)
elif isUnderModify(self, existID.id):
undermodifyIds.append(existID.id)
move_workflow (self, obsoletedIds, 'reactivate', 'released')
if undermodifyIds:
move_workflow (self, undermodifyIds, 'reactivate', 'released')
note={
'type': 'unlink object',
'reason': "Removed entity from database.",
}
self._insertlog(checkObj.id, note=note)
item = super(plm_component, checkObj.with_context({'no_move_documents':False})).unlink()
if item:
ret=ret | item
return ret
# Overridden methods for this entity
|
normal
|
{
"blob_id": "06643bf4b1bded757078b0974c21ddec814f5889",
"index": 1762,
"step-1": "<mask token>\n\n\nclass plm_component(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def _insertlog(self, ids, changes={}, note={}):\n ret = False\n op_type, op_note = ['unknown', '']\n for objID in self.browse(getListIDs(ids)):\n if note:\n op_type = '{type}'.format(type=note['type'])\n op_note = '{reason}'.format(reason=note['reason'])\n elif changes:\n op_type = 'change value'\n op_note = self.env['plm.logging'].getchanges(objID, changes)\n if op_note:\n values = {'name': objID.name, 'revision': '{major}'.format(\n major=objID.engineering_revision), 'type': self._name,\n 'op_type': op_type, 'op_note': op_note, 'op_date':\n datetime.now(), 'userid': self._uid}\n objectItem = self.env['plm.logging'].create(values)\n if objectItem:\n ret = True\n return ret\n <mask token>\n\n def _getChildrenBom(self, component, level=0, currlevel=0):\n \"\"\"\n Returns a flat list of each child, listed once, in a Bom ( level = 0 one level only, level = 1 all levels)\n \"\"\"\n result = []\n bufferdata = []\n if level == 0 and currlevel > 1:\n return bufferdata\n for bomid in component.product_tmpl_id.bom_ids:\n for bomline in bomid.bom_line_ids:\n children = self._getChildrenBom(bomline.product_id, level, \n currlevel + 1)\n bufferdata.extend(children)\n bufferdata.append(bomline.product_id.id)\n result.extend(bufferdata)\n return getCleanList(result)\n <mask token>\n\n def getUserName(self):\n \"\"\"\n Gets the user name\n \"\"\"\n userType = self.env['res.users']\n uiUser = userType.browse(self._uid)\n return uiUser.name\n <mask token>\n <mask token>\n <mask token>\n\n @api.model\n def CleanStructure(self, request=[], default=None):\n \"\"\"\n Cleans relations having sourceID (in mrp.bom.line)\n \"\"\"\n ret = False\n type = 'ebom'\n bomLType = self.env['mrp.bom.line']\n bomType = self.env['mrp.bom']\n docType = self.env['plm.document']\n bl_to_delete = bomLType\n for parentID, sourceID in request:\n if not parentID == None:\n if isWritable(self, parentID):\n for bom_id in bomType.search([('type', '=', type), (\n 'product_id', '=', parentID)]):\n if not sourceID == None:\n if docType.IsCheckedOutForMe(sourceID):\n for bomLine in bomLType.search([(\n 'source_id', '=', sourceID), ('bom_id',\n '=', bom_id.id)]):\n bl_to_delete |= bomLine\n bl_to_delete.unlink()\n if not bom_id.bom_line_ids:\n bom_id.unlink()\n ret = True\n return ret\n\n @api.model\n def Clone(self, ids=[], default=None):\n \"\"\"\n Creates a new copy of the component\n \"\"\"\n default = {}\n exitValues = {}\n for tmpObject in self.browse(getListIDs(ids)):\n note = {'type': 'clone object', 'reason':\n \"Creating new cloned entity starting from '{old}'.\".format(\n old=tmpObject.name)}\n self._insertlog(tmpObject.id, note=note)\n newID = self.copy(tmpObject.id, default)\n if newID:\n newEnt = self.browse(newID)\n exitValues = {'_id': newID, 'name': newEnt.name,\n 'engineering_code': newEnt.engineering_code,\n 'engineering_revision': newEnt.engineering_revision,\n 'engineering_writable': True, 'state': 'draft'}\n break\n return packDictionary(exitValues)\n\n @api.model\n def CloneVirtual(self, ids=[], default=None):\n \"\"\"\n Creates a \"false\" new copy of the component.\n Really returns only new values avoiding creation of new object.\n \"\"\"\n exitValues = {}\n for tmpObject in self.browse(getListIDs(ids)):\n new_name = 'Copy of {name}'.format(name=tmpObject.name)\n exitValues = {'_id': False, 'name': new_name,\n 'engineering_code': new_name, 'description': '{desc}'.\n format(desc=tmpObject.description), 'engineering_revision':\n self._default_rev, 'engineering_writable': True, 'state':\n 'draft'}\n break\n return packDictionary(exitValues)\n\n @api.model\n def GetUpdated(self, vals=[], default=None):\n \"\"\"\n Gets Last/Requested revision of given items (by name, revision, update time)\n \"\"\"\n partData, attribNames = vals\n ids = self.GetLatestIds(partData)\n return packDictionary(self.read(getCleanList(ids), attribNames))\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n @api.model\n def NewRevision(self, ids=[], default=None):\n \"\"\"\n Creates a new revision of current product\n \"\"\"\n newID, newIndex = [False, 0]\n thisContext = {'internal_writing': True, 'new_revision': True}\n for tmpObject in self.browse(getListIDs(ids)):\n latestIDs = self.GetLatestIds([(tmpObject.engineering_code,\n tmpObject.engineering_revision, False)])\n for oldObject in self.browse(latestIDs):\n if isAnyReleased(self, oldObject.id):\n note = {'type': 'revision process', 'reason':\n \"Creating new revision for '{old}'.\".format(old=\n oldObject.name)}\n self._insertlog(oldObject.id, note=note)\n newIndex = int(oldObject.engineering_revision) + 1\n default = {'engineering_writable': False, 'state':\n 'undermodify'}\n oldObject.with_context(thisContext).write(default)\n default = {'name': oldObject.name,\n 'engineering_revision': newIndex,\n 'engineering_writable': True, 'state': 'draft'}\n tmpID = oldObject.with_context(thisContext).copy(default)\n if tmpID:\n wf_message_post(self, [oldObject.id], body=\n 'Created : New Revision.')\n newID = tmpID.id\n tmpID.write({'name': oldObject.name})\n note = {'type': 'revision process', 'reason':\n \"Created new revision '{index}' for product '{name}'.\"\n .format(index=newIndex, name=oldObject.name)}\n self._insertlog(newID, note=note)\n oldObject.with_context(thisContext)._copy_productBom(\n newID, ['normal', 'spbom'])\n tmpID.with_context(thisContext).write({'name':\n oldObject.name})\n note = {'type': 'revision process', 'reason':\n \"Copied BoM to new revision '{index}' for product '{name}'.\"\n .format(index=newIndex, name=oldObject.name)}\n self._insertlog(newID, note=note)\n break\n return newID, newIndex\n <mask token>\n <mask token>\n\n @api.model\n def QueryLast(self, request=([], []), default=None):\n \"\"\"\n Queries to return values based on columns selected.\n \"\"\"\n objId = False\n expData = []\n queryFilter, columns = request\n if len(columns) < 1:\n return expData\n if 'engineering_revision' in queryFilter:\n del queryFilter['engineering_revision']\n allIDs = self.search(queryFilter, order='engineering_revision')\n if len(allIDs) > 0:\n objId = allIDs[len(allIDs) - 1]\n if objId:\n tmpData = objId.export_data(columns)\n if 'datas' in tmpData:\n expData = tmpData['datas']\n return expData\n <mask token>\n <mask token>\n <mask token>\n\n def _get_recursive_parts(self, ids, excludeStatuses, includeStatuses,\n release=False):\n \"\"\"\n Gets all ids related to current one as children\n \"\"\"\n stopFlag = False\n tobeReleasedIDs = getListIDs(ids)\n options = self.env['plm.config.settings'].GetOptions()\n children = []\n for oic in self.browse(ids):\n children = self.browse(self._getChildrenBom(oic, 1))\n for child in children:\n if (not child.state in excludeStatuses and not child.state in\n includeStatuses) and (release and not options.get(\n 'opt_obsoletedinbom', False)):\n logging.warning(\n \"Part (%r - %d) is in a status '%s' not allowed.\" %\n (child.engineering_code, child.engineering_revision,\n child.state))\n stopFlag = True\n continue\n if child.state in includeStatuses:\n if not child.id in tobeReleasedIDs:\n tobeReleasedIDs.append(child.id)\n return stopFlag, getCleanList(tobeReleasedIDs)\n <mask token>\n\n def _action_ondocuments(self, ids, action, status):\n \"\"\"\n Moves workflow on documents having the same state of component \n \"\"\"\n docIDs = []\n documentType = self.env['plm.document']\n check = self._context.get('no_move_documents', False)\n if not check:\n for oldObject in self.browse(ids):\n for document in oldObject.linkeddocuments:\n if document.id not in docIDs:\n if documentType.ischecked_in(document.id):\n docIDs.append(document.id)\n idMoves = move_workflow(documentType, docIDs, action, status)\n documentType.logging_workflow(idMoves, action, status)\n return docIDs\n <mask token>\n\n @api.model\n def ActionUpload(self, request=[], default=None):\n \"\"\"\n Action to be executed after automatic upload\n \"\"\"\n signal = 'upload'\n move_workflow(self, self._ids, signal)\n return False\n <mask token>\n <mask token>\n\n def action_confirm(self):\n \"\"\"\n Action to be executed for Confirmed state\n \"\"\"\n options = self.env['plm.config.settings'].GetOptions()\n status = 'confirmed'\n action = 'confirm'\n default = {'state': status, 'engineering_writable': False}\n doc_default = {'state': status, 'writable': False}\n operationParams = {'status': status, 'statusName': _('Confirmed'),\n 'action': action, 'docaction': 'confirm', 'excludeStatuses': [\n 'confirmed', 'transmitted', 'released', 'undermodify',\n 'obsoleted'], 'includeStatuses': ['draft'], 'default': default,\n 'doc_default': doc_default}\n if options.get('opt_showWFanalysis', False):\n return self.action_check_workflow(operationParams)\n else:\n ids = self._ids\n self.logging_workflow(ids, action, status)\n return self._action_to_perform(ids, operationParams, default)\n\n def action_correct(self):\n \"\"\"\n Action to be executed for Draft state (signal \"correct\")\n \"\"\"\n options = self.env['plm.config.settings'].GetOptions()\n status = 'draft'\n action = 'correct'\n default = {'state': status, 'engineering_writable': True}\n doc_default = {'state': status, 'writable': True}\n operationParams = {'status': status, 'statusName': _('Draft'),\n 'action': action, 'docaction': 'correct', 'excludeStatuses': [\n 'draft', 'transmitted', 'released', 'undermodify', 'obsoleted'],\n 'includeStatuses': ['confirmed'], 'default': default,\n 'doc_default': doc_default}\n if options.get('opt_showWFanalysis', False):\n return self.action_check_workflow(operationParams)\n else:\n ids = self._ids\n self.logging_workflow(ids, action, status)\n return self._action_to_perform(ids, operationParams, default)\n\n def action_release(self):\n options = self.env['plm.config.settings'].GetOptions()\n status = 'released'\n action = 'release'\n default = {'state': status, 'engineering_writable': False}\n doc_default = {'state': status, 'writable': False}\n excludeStatuses = ['released', 'undermodify', 'obsoleted']\n includeStatuses = ['confirmed']\n operationParams = {'status': status, 'statusName': _('Released'),\n 'action': action, 'docaction': 'release', 'excludeStatuses':\n excludeStatuses, 'includeStatuses': includeStatuses, 'default':\n default, 'doc_default': doc_default}\n if options.get('opt_showWFanalysis', False):\n return self.action_check_workflow(operationParams)\n else:\n return self._action_to_release(self._ids, excludeStatuses,\n includeStatuses)\n <mask token>\n <mask token>\n\n def logging_workflow(self, ids, action, status):\n note = {'type': 'workflow movement', 'reason':\n \"Applying workflow action '{action}', moving to status '{status}.\"\n .format(action=action, status=status)}\n self._insertlog(ids, note=note)\n\n def _action_to_perform(self, ids, operationParams, default={}):\n \"\"\"\n Executes on cascade to children products the required workflow operations.\n \"\"\"\n full_ids = []\n status = operationParams['status']\n action = operationParams['action']\n docaction = operationParams['docaction']\n excludeStatuses = operationParams['excludeStatuses']\n includeStatuses = operationParams['includeStatuses']\n stopFlag, allIDs = self._get_recursive_parts(ids, excludeStatuses,\n includeStatuses)\n self._action_ondocuments(allIDs, docaction, status)\n if action:\n idMoves = move_workflow(self, allIDs, action, status)\n self.logging_workflow(idMoves, action, status)\n objId = self.browse(allIDs).with_context({'internal_writing': True}\n ).write(default)\n if objId:\n wf_message_post(self, allIDs, body=\n 'Status moved to: {status}.'.format(status=status))\n return objId\n\n def _action_to_release(self, ids, excludeStatuses, includeStatuses):\n \"\"\"\n Action to be executed for Released state\n \"\"\"\n full_ids = []\n last_ids = []\n status = 'released'\n action = 'release'\n default = {'engineering_writable': False, 'state': status}\n stopFlag, allIDs = self._get_recursive_parts(ids, excludeStatuses,\n includeStatuses, release=True)\n if len(allIDs) < 1 or stopFlag:\n raise UserError(_(\n 'WorkFlow Error.\\n\\nOne or more parts cannot be released.'))\n allProdObjs = self.browse(allIDs)\n for oldObject in allProdObjs:\n objObsolete = self._getbyrevision(oldObject.engineering_code, \n oldObject.engineering_revision - 1)\n if objObsolete and objObsolete.id:\n last_ids.append(objObsolete.id)\n idMoves = move_workflow(self, last_ids, 'obsolete', 'obsoleted')\n self.logging_workflow(idMoves, 'obsolete', 'obsoleted')\n self._action_ondocuments(last_ids, 'obsolete', 'obsoleted')\n self._action_ondocuments(allIDs, action, status)\n for currId in allProdObjs:\n if not currId.id in ids:\n full_ids.append(currId.id)\n idMoves = move_workflow(self, allIDs, action, status)\n self.logging_workflow(idMoves, action, status)\n objId = self.browse(idMoves).with_context({'internal_writing': True}\n ).write(default)\n if objId and idMoves:\n wf_message_post(self, allIDs, body='Status moved to: {status}.'\n .format(status=status))\n return objId\n <mask token>\n\n def write(self, vals):\n ret = True\n if vals:\n if not isAdministrator(self):\n check = self._context.get('internal_writing', False)\n thisprocess = self._context.get('internal_process', False)\n if not check:\n for prodItem in self.browse(self._ids):\n if not isDraft(self, prodItem.id):\n if not thisprocess:\n logging.error(\n \"The entity '{name}-{rev}' is in a status that does not allow you to make save action\"\n .format(name=prodItem.name, rev=\n prodItem.engineering_revision))\n ret = False\n break\n if not prodItem.engineering_writable:\n if not thisprocess:\n logging.error(\n \"The entity '{name}-{rev}' cannot be written.\"\n .format(name=prodItem.name, rev=\n prodItem.engineering_revision))\n ret = False\n break\n if ret:\n self._insertlog(self._ids, changes=vals)\n ret = super(plm_component, self).write(vals)\n return ret\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass plm_component(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def _insertlog(self, ids, changes={}, note={}):\n ret = False\n op_type, op_note = ['unknown', '']\n for objID in self.browse(getListIDs(ids)):\n if note:\n op_type = '{type}'.format(type=note['type'])\n op_note = '{reason}'.format(reason=note['reason'])\n elif changes:\n op_type = 'change value'\n op_note = self.env['plm.logging'].getchanges(objID, changes)\n if op_note:\n values = {'name': objID.name, 'revision': '{major}'.format(\n major=objID.engineering_revision), 'type': self._name,\n 'op_type': op_type, 'op_note': op_note, 'op_date':\n datetime.now(), 'userid': self._uid}\n objectItem = self.env['plm.logging'].create(values)\n if objectItem:\n ret = True\n return ret\n\n def _getbyrevision(self, name, revision):\n return self.search([('engineering_code', '=', name), (\n 'engineering_revision', '=', revision)])\n\n def _getChildrenBom(self, component, level=0, currlevel=0):\n \"\"\"\n Returns a flat list of each child, listed once, in a Bom ( level = 0 one level only, level = 1 all levels)\n \"\"\"\n result = []\n bufferdata = []\n if level == 0 and currlevel > 1:\n return bufferdata\n for bomid in component.product_tmpl_id.bom_ids:\n for bomline in bomid.bom_line_ids:\n children = self._getChildrenBom(bomline.product_id, level, \n currlevel + 1)\n bufferdata.extend(children)\n bufferdata.append(bomline.product_id.id)\n result.extend(bufferdata)\n return getCleanList(result)\n\n @api.model\n def RegMessage(self, request=[], default=None):\n \"\"\"\n Registers a message for requested component\n \"\"\"\n oid, message = request\n wf_message_post(self, [oid], body=message)\n return False\n\n def getUserName(self):\n \"\"\"\n Gets the user name\n \"\"\"\n userType = self.env['res.users']\n uiUser = userType.browse(self._uid)\n return uiUser.name\n <mask token>\n\n def getTemplateItem(self, oid):\n ret = False\n if oid:\n for prodItem in self.browse(getListIDs(oid)):\n ret = prodItem.product_tmpl_id\n break\n return ret\n <mask token>\n\n @api.model\n def CleanStructure(self, request=[], default=None):\n \"\"\"\n Cleans relations having sourceID (in mrp.bom.line)\n \"\"\"\n ret = False\n type = 'ebom'\n bomLType = self.env['mrp.bom.line']\n bomType = self.env['mrp.bom']\n docType = self.env['plm.document']\n bl_to_delete = bomLType\n for parentID, sourceID in request:\n if not parentID == None:\n if isWritable(self, parentID):\n for bom_id in bomType.search([('type', '=', type), (\n 'product_id', '=', parentID)]):\n if not sourceID == None:\n if docType.IsCheckedOutForMe(sourceID):\n for bomLine in bomLType.search([(\n 'source_id', '=', sourceID), ('bom_id',\n '=', bom_id.id)]):\n bl_to_delete |= bomLine\n bl_to_delete.unlink()\n if not bom_id.bom_line_ids:\n bom_id.unlink()\n ret = True\n return ret\n\n @api.model\n def Clone(self, ids=[], default=None):\n \"\"\"\n Creates a new copy of the component\n \"\"\"\n default = {}\n exitValues = {}\n for tmpObject in self.browse(getListIDs(ids)):\n note = {'type': 'clone object', 'reason':\n \"Creating new cloned entity starting from '{old}'.\".format(\n old=tmpObject.name)}\n self._insertlog(tmpObject.id, note=note)\n newID = self.copy(tmpObject.id, default)\n if newID:\n newEnt = self.browse(newID)\n exitValues = {'_id': newID, 'name': newEnt.name,\n 'engineering_code': newEnt.engineering_code,\n 'engineering_revision': newEnt.engineering_revision,\n 'engineering_writable': True, 'state': 'draft'}\n break\n return packDictionary(exitValues)\n\n @api.model\n def CloneVirtual(self, ids=[], default=None):\n \"\"\"\n Creates a \"false\" new copy of the component.\n Really returns only new values avoiding creation of new object.\n \"\"\"\n exitValues = {}\n for tmpObject in self.browse(getListIDs(ids)):\n new_name = 'Copy of {name}'.format(name=tmpObject.name)\n exitValues = {'_id': False, 'name': new_name,\n 'engineering_code': new_name, 'description': '{desc}'.\n format(desc=tmpObject.description), 'engineering_revision':\n self._default_rev, 'engineering_writable': True, 'state':\n 'draft'}\n break\n return packDictionary(exitValues)\n\n @api.model\n def GetUpdated(self, vals=[], default=None):\n \"\"\"\n Gets Last/Requested revision of given items (by name, revision, update time)\n \"\"\"\n partData, attribNames = vals\n ids = self.GetLatestIds(partData)\n return packDictionary(self.read(getCleanList(ids), attribNames))\n\n @api.model\n def GetStdPartName(self, vals=[], default=None):\n \"\"\"\n Gets new P/N reading from entity chosen (taking it from new index on sequence).\n \"\"\"\n ret = ''\n entID, objectName = vals\n if entID and objectName:\n userType = self.env[objectName] if objectName in self.env else None\n if not userType == None:\n for objID in userType.browse(getListIDs(entID)):\n ret = self.GetNewPNfromSeq(objID.sequence_id)\n break\n return ret\n\n @api.model\n def GetNewPNfromSeq(self, seqID=None, default=None):\n \"\"\"\n Gets new P/N from sequence (checks for P/N existence).\n \"\"\"\n ret = ''\n if seqID:\n count = 0\n while ret == '':\n chkname = self.env['ir.sequence'].browse(seqID.id)._next()\n count += 1\n criteria = [('name', '=', chkname)]\n partIds = self.search(criteria)\n if partIds == None or len(partIds) == 0:\n ret = chkname\n if count > 1000:\n logging.error(\n \"GetNewPNfromSeq : Unable to get a new P/N from sequence '{name}'.\"\n .format(name=seqID.name))\n break\n return ret\n <mask token>\n\n @api.model\n def GetId(self, request=[], default=None):\n \"\"\"\n Gets Last/Requested revision of given items (by name, revision, update time)\n \"\"\"\n idd = False\n partName, partRev, _ = request\n if isinstance(partRev, int):\n criteria = [('engineering_code', '=', partName), (\n 'engineering_revision', '=', partRev)]\n else:\n criteria = [('engineering_code', '=', partName)]\n partIds = self.search(criteria, order='engineering_revision')\n if len(partIds) > 0:\n idd = partIds[len(partIds) - 1].id\n return idd\n\n @api.model\n def IsSaveable(self, ids=[], default=None):\n \"\"\"\n Answers about capability to save requested product\n \"\"\"\n ret = True\n for tmpObject in self.browse(getListIDs(ids)):\n ret = ret and tmpObject._iswritable()\n return ret\n\n @api.model\n def IsRevisable(self, ids=[], default=None):\n \"\"\"\n Gets if a product is revisable or not.\n \"\"\"\n ret = False\n for tmpObject in self.browse(getListIDs(ids)):\n if isAnyReleased(self, tmpObject.id):\n ret = True\n break\n return ret\n\n @api.model\n def NewRevision(self, ids=[], default=None):\n \"\"\"\n Creates a new revision of current product\n \"\"\"\n newID, newIndex = [False, 0]\n thisContext = {'internal_writing': True, 'new_revision': True}\n for tmpObject in self.browse(getListIDs(ids)):\n latestIDs = self.GetLatestIds([(tmpObject.engineering_code,\n tmpObject.engineering_revision, False)])\n for oldObject in self.browse(latestIDs):\n if isAnyReleased(self, oldObject.id):\n note = {'type': 'revision process', 'reason':\n \"Creating new revision for '{old}'.\".format(old=\n oldObject.name)}\n self._insertlog(oldObject.id, note=note)\n newIndex = int(oldObject.engineering_revision) + 1\n default = {'engineering_writable': False, 'state':\n 'undermodify'}\n oldObject.with_context(thisContext).write(default)\n default = {'name': oldObject.name,\n 'engineering_revision': newIndex,\n 'engineering_writable': True, 'state': 'draft'}\n tmpID = oldObject.with_context(thisContext).copy(default)\n if tmpID:\n wf_message_post(self, [oldObject.id], body=\n 'Created : New Revision.')\n newID = tmpID.id\n tmpID.write({'name': oldObject.name})\n note = {'type': 'revision process', 'reason':\n \"Created new revision '{index}' for product '{name}'.\"\n .format(index=newIndex, name=oldObject.name)}\n self._insertlog(newID, note=note)\n oldObject.with_context(thisContext)._copy_productBom(\n newID, ['normal', 'spbom'])\n tmpID.with_context(thisContext).write({'name':\n oldObject.name})\n note = {'type': 'revision process', 'reason':\n \"Copied BoM to new revision '{index}' for product '{name}'.\"\n .format(index=newIndex, name=oldObject.name)}\n self._insertlog(newID, note=note)\n break\n return newID, newIndex\n <mask token>\n <mask token>\n\n @api.model\n def QueryLast(self, request=([], []), default=None):\n \"\"\"\n Queries to return values based on columns selected.\n \"\"\"\n objId = False\n expData = []\n queryFilter, columns = request\n if len(columns) < 1:\n return expData\n if 'engineering_revision' in queryFilter:\n del queryFilter['engineering_revision']\n allIDs = self.search(queryFilter, order='engineering_revision')\n if len(allIDs) > 0:\n objId = allIDs[len(allIDs) - 1]\n if objId:\n tmpData = objId.export_data(columns)\n if 'datas' in tmpData:\n expData = tmpData['datas']\n return expData\n\n def _create_normalBom(self, idd, processedIds=[]):\n \"\"\"\n Creates a new Normal Bom (recursive on all EBom children)\n \"\"\"\n default = {}\n if idd in processedIds:\n return False\n checkObj = self.browse(idd)\n if not checkObj:\n return False\n bomType = self.env['mrp.bom']\n objBoms = bomType.search([('product_tmpl_id', '=', checkObj.\n product_tmpl_id.id), ('type', '=', 'normal'), ('active', '=', \n True)])\n idBoms = bomType.search([('product_tmpl_id', '=', checkObj.\n product_tmpl_id.id), ('type', '=', 'ebom'), ('active', '=', True)])\n if not objBoms:\n if idBoms:\n default = {'product_tmpl_id': idBoms[0].product_tmpl_id.id,\n 'type': 'normal', 'active': True}\n if idBoms[0].product_id:\n default.update({'product_id': idBoms[0].product_id.id})\n processedIds.append(idd)\n newidBom = idBoms[0].with_context({'internal_writing': True}\n ).copy(default)\n if newidBom:\n newidBom.with_context({'internal_writing': True}).write(\n default)\n ok_rows = self._summarizeBom(newidBom.bom_line_ids)\n for bom_line in list(set(newidBom.bom_line_ids) ^ set(\n ok_rows)):\n bom_line.unlink()\n for bom_line in ok_rows:\n bom_line.with_context({'internal_writing': True}\n ).write({'type': 'normal', 'source_id': False,\n 'product_qty': bom_line.product_qty})\n self._create_normalBom(bom_line.product_id.id,\n processedIds=processedIds)\n else:\n for bom_line in bomType.browse(objBoms[0].id).bom_line_ids:\n self._create_normalBom(bom_line.product_id.id, processedIds\n =processedIds)\n return False\n\n def _copy_productBom(self, idStart, idDest=None, bomTypes=['normal']):\n \"\"\"\n Creates a new 'bomType' BoM (arrested at first level BoM children).\n \"\"\"\n default = {}\n if not idDest:\n idDest = idStart\n checkObjDest = self.browse(idDest)\n if checkObjDest:\n objBomType = self.env['mrp.bom']\n for bomType in bomTypes:\n objBoms = objBomType.search([('product_id', '=', idDest), (\n 'type', '=', bomType), ('active', '=', True)])\n idBoms = objBomType.search([('product_id', '=', idStart), (\n 'type', '=', bomType), ('active', '=', True)])\n if not objBoms:\n for oldObj in idBoms:\n newidBom = oldObj.with_context({'internal_writing':\n True}).copy(default)\n if newidBom:\n newidBom.with_context({'internal_writing': True}\n ).write({'name': checkObjDest.name,\n 'product_tmpl_id': checkObjDest.\n product_tmpl_id.id, 'type': bomType,\n 'active': True})\n ok_rows = self._summarizeBom(newidBom.bom_line_ids)\n for bom_line in list(set(newidBom.bom_line_ids) ^\n set(ok_rows)):\n bom_line.unlink()\n for bom_line in ok_rows:\n bom_line.with_context({'internal_writing': \n True}).write({'type': bomType,\n 'source_id': False, 'name': bom_line.\n product_id.name, 'product_qty':\n bom_line.product_qty})\n return False\n <mask token>\n\n def _get_recursive_parts(self, ids, excludeStatuses, includeStatuses,\n release=False):\n \"\"\"\n Gets all ids related to current one as children\n \"\"\"\n stopFlag = False\n tobeReleasedIDs = getListIDs(ids)\n options = self.env['plm.config.settings'].GetOptions()\n children = []\n for oic in self.browse(ids):\n children = self.browse(self._getChildrenBom(oic, 1))\n for child in children:\n if (not child.state in excludeStatuses and not child.state in\n includeStatuses) and (release and not options.get(\n 'opt_obsoletedinbom', False)):\n logging.warning(\n \"Part (%r - %d) is in a status '%s' not allowed.\" %\n (child.engineering_code, child.engineering_revision,\n child.state))\n stopFlag = True\n continue\n if child.state in includeStatuses:\n if not child.id in tobeReleasedIDs:\n tobeReleasedIDs.append(child.id)\n return stopFlag, getCleanList(tobeReleasedIDs)\n <mask token>\n\n def _action_ondocuments(self, ids, action, status):\n \"\"\"\n Moves workflow on documents having the same state of component \n \"\"\"\n docIDs = []\n documentType = self.env['plm.document']\n check = self._context.get('no_move_documents', False)\n if not check:\n for oldObject in self.browse(ids):\n for document in oldObject.linkeddocuments:\n if document.id not in docIDs:\n if documentType.ischecked_in(document.id):\n docIDs.append(document.id)\n idMoves = move_workflow(documentType, docIDs, action, status)\n documentType.logging_workflow(idMoves, action, status)\n return docIDs\n <mask token>\n\n @api.model\n def ActionUpload(self, request=[], default=None):\n \"\"\"\n Action to be executed after automatic upload\n \"\"\"\n signal = 'upload'\n move_workflow(self, self._ids, signal)\n return False\n\n def action_upload(self):\n \"\"\"\n Action to be executed for Uploaded state\n \"\"\"\n options = self.env['plm.config.settings'].GetOptions()\n status = 'uploaded'\n action = 'upload'\n default = {'state': status, 'engineering_writable': False}\n doc_default = {'state': status, 'writable': False}\n operationParams = {'status': status, 'statusName': _('Uploaded'),\n 'action': action, 'docaction': 'uploaddoc', 'excludeStatuses':\n ['uploaded', 'confirmed', 'transmitted', 'released',\n 'undermodify', 'obsoleted'], 'includeStatuses': ['draft'],\n 'default': default, 'doc_default': doc_default}\n if options.get('opt_showWFanalysis', False):\n return self.action_check_workflow(operationParams)\n else:\n ids = self._ids\n self.logging_workflow(ids, action, status)\n return self._action_to_perform(ids, operationParams, default)\n <mask token>\n\n def action_confirm(self):\n \"\"\"\n Action to be executed for Confirmed state\n \"\"\"\n options = self.env['plm.config.settings'].GetOptions()\n status = 'confirmed'\n action = 'confirm'\n default = {'state': status, 'engineering_writable': False}\n doc_default = {'state': status, 'writable': False}\n operationParams = {'status': status, 'statusName': _('Confirmed'),\n 'action': action, 'docaction': 'confirm', 'excludeStatuses': [\n 'confirmed', 'transmitted', 'released', 'undermodify',\n 'obsoleted'], 'includeStatuses': ['draft'], 'default': default,\n 'doc_default': doc_default}\n if options.get('opt_showWFanalysis', False):\n return self.action_check_workflow(operationParams)\n else:\n ids = self._ids\n self.logging_workflow(ids, action, status)\n return self._action_to_perform(ids, operationParams, default)\n\n def action_correct(self):\n \"\"\"\n Action to be executed for Draft state (signal \"correct\")\n \"\"\"\n options = self.env['plm.config.settings'].GetOptions()\n status = 'draft'\n action = 'correct'\n default = {'state': status, 'engineering_writable': True}\n doc_default = {'state': status, 'writable': True}\n operationParams = {'status': status, 'statusName': _('Draft'),\n 'action': action, 'docaction': 'correct', 'excludeStatuses': [\n 'draft', 'transmitted', 'released', 'undermodify', 'obsoleted'],\n 'includeStatuses': ['confirmed'], 'default': default,\n 'doc_default': doc_default}\n if options.get('opt_showWFanalysis', False):\n return self.action_check_workflow(operationParams)\n else:\n ids = self._ids\n self.logging_workflow(ids, action, status)\n return self._action_to_perform(ids, operationParams, default)\n\n def action_release(self):\n options = self.env['plm.config.settings'].GetOptions()\n status = 'released'\n action = 'release'\n default = {'state': status, 'engineering_writable': False}\n doc_default = {'state': status, 'writable': False}\n excludeStatuses = ['released', 'undermodify', 'obsoleted']\n includeStatuses = ['confirmed']\n operationParams = {'status': status, 'statusName': _('Released'),\n 'action': action, 'docaction': 'release', 'excludeStatuses':\n excludeStatuses, 'includeStatuses': includeStatuses, 'default':\n default, 'doc_default': doc_default}\n if options.get('opt_showWFanalysis', False):\n return self.action_check_workflow(operationParams)\n else:\n return self._action_to_release(self._ids, excludeStatuses,\n includeStatuses)\n\n def action_obsolete(self):\n \"\"\"\n Action to be executed for Obsoleted state\n \"\"\"\n options = self.env['plm.config.settings'].GetOptions()\n status = 'obsoleted'\n action = 'obsolete'\n default = {'engineering_writable': False, 'state': status}\n doc_default = {'state': status, 'writable': False}\n operationParams = {'status': status, 'statusName': _('Obsoleted'),\n 'action': action, 'docaction': 'obsolete', 'excludeStatuses': [\n 'draft', 'confirmed', 'transmitted', 'obsoleted'],\n 'includeStatuses': ['undermodify', 'released'], 'default':\n default, 'doc_default': doc_default}\n if options.get('opt_showWFanalysis', False):\n return self.action_check_workflow(operationParams)\n else:\n return self._action_to_perform(self._ids, operationParams, default)\n <mask token>\n\n def logging_workflow(self, ids, action, status):\n note = {'type': 'workflow movement', 'reason':\n \"Applying workflow action '{action}', moving to status '{status}.\"\n .format(action=action, status=status)}\n self._insertlog(ids, note=note)\n\n def _action_to_perform(self, ids, operationParams, default={}):\n \"\"\"\n Executes on cascade to children products the required workflow operations.\n \"\"\"\n full_ids = []\n status = operationParams['status']\n action = operationParams['action']\n docaction = operationParams['docaction']\n excludeStatuses = operationParams['excludeStatuses']\n includeStatuses = operationParams['includeStatuses']\n stopFlag, allIDs = self._get_recursive_parts(ids, excludeStatuses,\n includeStatuses)\n self._action_ondocuments(allIDs, docaction, status)\n if action:\n idMoves = move_workflow(self, allIDs, action, status)\n self.logging_workflow(idMoves, action, status)\n objId = self.browse(allIDs).with_context({'internal_writing': True}\n ).write(default)\n if objId:\n wf_message_post(self, allIDs, body=\n 'Status moved to: {status}.'.format(status=status))\n return objId\n\n def _action_to_release(self, ids, excludeStatuses, includeStatuses):\n \"\"\"\n Action to be executed for Released state\n \"\"\"\n full_ids = []\n last_ids = []\n status = 'released'\n action = 'release'\n default = {'engineering_writable': False, 'state': status}\n stopFlag, allIDs = self._get_recursive_parts(ids, excludeStatuses,\n includeStatuses, release=True)\n if len(allIDs) < 1 or stopFlag:\n raise UserError(_(\n 'WorkFlow Error.\\n\\nOne or more parts cannot be released.'))\n allProdObjs = self.browse(allIDs)\n for oldObject in allProdObjs:\n objObsolete = self._getbyrevision(oldObject.engineering_code, \n oldObject.engineering_revision - 1)\n if objObsolete and objObsolete.id:\n last_ids.append(objObsolete.id)\n idMoves = move_workflow(self, last_ids, 'obsolete', 'obsoleted')\n self.logging_workflow(idMoves, 'obsolete', 'obsoleted')\n self._action_ondocuments(last_ids, 'obsolete', 'obsoleted')\n self._action_ondocuments(allIDs, action, status)\n for currId in allProdObjs:\n if not currId.id in ids:\n full_ids.append(currId.id)\n idMoves = move_workflow(self, allIDs, action, status)\n self.logging_workflow(idMoves, action, status)\n objId = self.browse(idMoves).with_context({'internal_writing': True}\n ).write(default)\n if objId and idMoves:\n wf_message_post(self, allIDs, body='Status moved to: {status}.'\n .format(status=status))\n return objId\n <mask token>\n\n def write(self, vals):\n ret = True\n if vals:\n if not isAdministrator(self):\n check = self._context.get('internal_writing', False)\n thisprocess = self._context.get('internal_process', False)\n if not check:\n for prodItem in self.browse(self._ids):\n if not isDraft(self, prodItem.id):\n if not thisprocess:\n logging.error(\n \"The entity '{name}-{rev}' is in a status that does not allow you to make save action\"\n .format(name=prodItem.name, rev=\n prodItem.engineering_revision))\n ret = False\n break\n if not prodItem.engineering_writable:\n if not thisprocess:\n logging.error(\n \"The entity '{name}-{rev}' cannot be written.\"\n .format(name=prodItem.name, rev=\n prodItem.engineering_revision))\n ret = False\n break\n if ret:\n self._insertlog(self._ids, changes=vals)\n ret = super(plm_component, self).write(vals)\n return ret\n\n def copy(self, default={}):\n newID = False\n override = False\n previous_name = False\n oid = self.id\n if not self._context.get('new_revision', False):\n previous_name = self.browse(oid).name\n new_name = default.get('name', 'Copy of %s' % previous_name)\n if 'name' in default:\n tmpIds = self.search([('name', 'like', new_name)])\n if len(tmpIds) > 0:\n new_name = '%s (%s)' % (new_name, len(tmpIds) + 1)\n default.update({'name': new_name, 'engineering_code':\n new_name, 'engineering_revision': self._default_rev})\n override = True\n default.update({'state': 'draft', 'engineering_writable': True,\n 'write_date': None, 'linkeddocuments': []})\n note = {'type': 'copy object', 'reason':\n \"Previous name was '{old} new one is '{new}'.\".format(old=\n previous_name, new=new_name)}\n self._insertlog(oid, note=note)\n tmpID = super(plm_component, self.browse(oid).with_context({\n 'internal_writing': True})).copy(default)\n if tmpID != None:\n newID = tmpID\n if override:\n values = {'name': new_name, 'engineering_code':\n new_name, 'engineering_revision': self._default_rev,\n 'linkeddocuments': []}\n newID.write(values)\n else:\n tmpID = super(plm_component, self.browse(oid).with_context({\n 'internal_writing': True})).copy(default)\n if tmpID:\n newID = tmpID\n default.update({'linkeddocuments': []})\n newID.with_context({'internal_writing': True}).write(default)\n if newID and previous_name:\n wf_message_post(self, getListIDs(newID), body=\n 'Copied starting from : {value}.'.format(value=previous_name))\n return newID\n\n def unlink(self):\n ret = False\n ids = self._ids\n values = {'state': 'released'}\n isAdmin = isAdministrator(self)\n if not self.env['mrp.bom'].IsChild(ids):\n for checkObj in self.browse(ids):\n checkApply = False\n if isReleased(self, checkObj.id):\n if isAdmin:\n checkApply = True\n elif isDraft(self, checkObj.id):\n checkApply = True\n if not checkApply:\n continue\n existingIDs = self.with_context({'no_move_documents': True}\n ).search([('engineering_code', '=', checkObj.\n engineering_code), ('engineering_revision', '=', \n checkObj.engineering_revision - 1)])\n if len(existingIDs) > 0:\n obsoletedIds = []\n undermodifyIds = []\n for existID in getListIDs(existingIDs):\n if isObsoleted(self, existID.id):\n obsoletedIds.append(existID.id)\n elif isUnderModify(self, existID.id):\n undermodifyIds.append(existID.id)\n move_workflow(self, obsoletedIds, 'reactivate', 'released')\n if undermodifyIds:\n move_workflow(self, undermodifyIds, 'reactivate',\n 'released')\n note = {'type': 'unlink object', 'reason':\n 'Removed entity from database.'}\n self._insertlog(checkObj.id, note=note)\n item = super(plm_component, checkObj.with_context({\n 'no_move_documents': False})).unlink()\n if item:\n ret = ret | item\n return ret\n",
"step-3": "<mask token>\n\n\nclass plm_component(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n @property\n def _default_rev(self):\n field = self.env['product.template']._fields.get('engineering_revision'\n , None)\n default = field.default('product.template') if not field == None else 0\n return default\n\n def _insertlog(self, ids, changes={}, note={}):\n ret = False\n op_type, op_note = ['unknown', '']\n for objID in self.browse(getListIDs(ids)):\n if note:\n op_type = '{type}'.format(type=note['type'])\n op_note = '{reason}'.format(reason=note['reason'])\n elif changes:\n op_type = 'change value'\n op_note = self.env['plm.logging'].getchanges(objID, changes)\n if op_note:\n values = {'name': objID.name, 'revision': '{major}'.format(\n major=objID.engineering_revision), 'type': self._name,\n 'op_type': op_type, 'op_note': op_note, 'op_date':\n datetime.now(), 'userid': self._uid}\n objectItem = self.env['plm.logging'].create(values)\n if objectItem:\n ret = True\n return ret\n\n def _getbyrevision(self, name, revision):\n return self.search([('engineering_code', '=', name), (\n 'engineering_revision', '=', revision)])\n\n def _getChildrenBom(self, component, level=0, currlevel=0):\n \"\"\"\n Returns a flat list of each child, listed once, in a Bom ( level = 0 one level only, level = 1 all levels)\n \"\"\"\n result = []\n bufferdata = []\n if level == 0 and currlevel > 1:\n return bufferdata\n for bomid in component.product_tmpl_id.bom_ids:\n for bomline in bomid.bom_line_ids:\n children = self._getChildrenBom(bomline.product_id, level, \n currlevel + 1)\n bufferdata.extend(children)\n bufferdata.append(bomline.product_id.id)\n result.extend(bufferdata)\n return getCleanList(result)\n\n @api.model\n def RegMessage(self, request=[], default=None):\n \"\"\"\n Registers a message for requested component\n \"\"\"\n oid, message = request\n wf_message_post(self, [oid], body=message)\n return False\n\n def getUserName(self):\n \"\"\"\n Gets the user name\n \"\"\"\n userType = self.env['res.users']\n uiUser = userType.browse(self._uid)\n return uiUser.name\n <mask token>\n\n def getTemplateItem(self, oid):\n ret = False\n if oid:\n for prodItem in self.browse(getListIDs(oid)):\n ret = prodItem.product_tmpl_id\n break\n return ret\n\n def on_change_name(self, oid, name=False, engineering_code=False):\n if name:\n results = self.search([('name', '=', name)])\n if len(results) > 0:\n raise UserError(_(\n \"\"\"Update Part Error.\n\nPart {} already exists.\nClose with OK to reuse, with Cancel to discharge.\"\"\"\n .format(name)))\n if not engineering_code:\n return {'value': {'engineering_code': name}}\n return {}\n\n @api.model\n def CleanStructure(self, request=[], default=None):\n \"\"\"\n Cleans relations having sourceID (in mrp.bom.line)\n \"\"\"\n ret = False\n type = 'ebom'\n bomLType = self.env['mrp.bom.line']\n bomType = self.env['mrp.bom']\n docType = self.env['plm.document']\n bl_to_delete = bomLType\n for parentID, sourceID in request:\n if not parentID == None:\n if isWritable(self, parentID):\n for bom_id in bomType.search([('type', '=', type), (\n 'product_id', '=', parentID)]):\n if not sourceID == None:\n if docType.IsCheckedOutForMe(sourceID):\n for bomLine in bomLType.search([(\n 'source_id', '=', sourceID), ('bom_id',\n '=', bom_id.id)]):\n bl_to_delete |= bomLine\n bl_to_delete.unlink()\n if not bom_id.bom_line_ids:\n bom_id.unlink()\n ret = True\n return ret\n\n @api.model\n def Clone(self, ids=[], default=None):\n \"\"\"\n Creates a new copy of the component\n \"\"\"\n default = {}\n exitValues = {}\n for tmpObject in self.browse(getListIDs(ids)):\n note = {'type': 'clone object', 'reason':\n \"Creating new cloned entity starting from '{old}'.\".format(\n old=tmpObject.name)}\n self._insertlog(tmpObject.id, note=note)\n newID = self.copy(tmpObject.id, default)\n if newID:\n newEnt = self.browse(newID)\n exitValues = {'_id': newID, 'name': newEnt.name,\n 'engineering_code': newEnt.engineering_code,\n 'engineering_revision': newEnt.engineering_revision,\n 'engineering_writable': True, 'state': 'draft'}\n break\n return packDictionary(exitValues)\n\n @api.model\n def CloneVirtual(self, ids=[], default=None):\n \"\"\"\n Creates a \"false\" new copy of the component.\n Really returns only new values avoiding creation of new object.\n \"\"\"\n exitValues = {}\n for tmpObject in self.browse(getListIDs(ids)):\n new_name = 'Copy of {name}'.format(name=tmpObject.name)\n exitValues = {'_id': False, 'name': new_name,\n 'engineering_code': new_name, 'description': '{desc}'.\n format(desc=tmpObject.description), 'engineering_revision':\n self._default_rev, 'engineering_writable': True, 'state':\n 'draft'}\n break\n return packDictionary(exitValues)\n\n @api.model\n def GetUpdated(self, vals=[], default=None):\n \"\"\"\n Gets Last/Requested revision of given items (by name, revision, update time)\n \"\"\"\n partData, attribNames = vals\n ids = self.GetLatestIds(partData)\n return packDictionary(self.read(getCleanList(ids), attribNames))\n\n @api.model\n def GetStdPartName(self, vals=[], default=None):\n \"\"\"\n Gets new P/N reading from entity chosen (taking it from new index on sequence).\n \"\"\"\n ret = ''\n entID, objectName = vals\n if entID and objectName:\n userType = self.env[objectName] if objectName in self.env else None\n if not userType == None:\n for objID in userType.browse(getListIDs(entID)):\n ret = self.GetNewPNfromSeq(objID.sequence_id)\n break\n return ret\n\n @api.model\n def GetNewPNfromSeq(self, seqID=None, default=None):\n \"\"\"\n Gets new P/N from sequence (checks for P/N existence).\n \"\"\"\n ret = ''\n if seqID:\n count = 0\n while ret == '':\n chkname = self.env['ir.sequence'].browse(seqID.id)._next()\n count += 1\n criteria = [('name', '=', chkname)]\n partIds = self.search(criteria)\n if partIds == None or len(partIds) == 0:\n ret = chkname\n if count > 1000:\n logging.error(\n \"GetNewPNfromSeq : Unable to get a new P/N from sequence '{name}'.\"\n .format(name=seqID.name))\n break\n return ret\n <mask token>\n\n @api.model\n def GetId(self, request=[], default=None):\n \"\"\"\n Gets Last/Requested revision of given items (by name, revision, update time)\n \"\"\"\n idd = False\n partName, partRev, _ = request\n if isinstance(partRev, int):\n criteria = [('engineering_code', '=', partName), (\n 'engineering_revision', '=', partRev)]\n else:\n criteria = [('engineering_code', '=', partName)]\n partIds = self.search(criteria, order='engineering_revision')\n if len(partIds) > 0:\n idd = partIds[len(partIds) - 1].id\n return idd\n\n @api.model\n def IsSaveable(self, ids=[], default=None):\n \"\"\"\n Answers about capability to save requested product\n \"\"\"\n ret = True\n for tmpObject in self.browse(getListIDs(ids)):\n ret = ret and tmpObject._iswritable()\n return ret\n\n @api.model\n def IsRevisable(self, ids=[], default=None):\n \"\"\"\n Gets if a product is revisable or not.\n \"\"\"\n ret = False\n for tmpObject in self.browse(getListIDs(ids)):\n if isAnyReleased(self, tmpObject.id):\n ret = True\n break\n return ret\n\n @api.model\n def NewRevision(self, ids=[], default=None):\n \"\"\"\n Creates a new revision of current product\n \"\"\"\n newID, newIndex = [False, 0]\n thisContext = {'internal_writing': True, 'new_revision': True}\n for tmpObject in self.browse(getListIDs(ids)):\n latestIDs = self.GetLatestIds([(tmpObject.engineering_code,\n tmpObject.engineering_revision, False)])\n for oldObject in self.browse(latestIDs):\n if isAnyReleased(self, oldObject.id):\n note = {'type': 'revision process', 'reason':\n \"Creating new revision for '{old}'.\".format(old=\n oldObject.name)}\n self._insertlog(oldObject.id, note=note)\n newIndex = int(oldObject.engineering_revision) + 1\n default = {'engineering_writable': False, 'state':\n 'undermodify'}\n oldObject.with_context(thisContext).write(default)\n default = {'name': oldObject.name,\n 'engineering_revision': newIndex,\n 'engineering_writable': True, 'state': 'draft'}\n tmpID = oldObject.with_context(thisContext).copy(default)\n if tmpID:\n wf_message_post(self, [oldObject.id], body=\n 'Created : New Revision.')\n newID = tmpID.id\n tmpID.write({'name': oldObject.name})\n note = {'type': 'revision process', 'reason':\n \"Created new revision '{index}' for product '{name}'.\"\n .format(index=newIndex, name=oldObject.name)}\n self._insertlog(newID, note=note)\n oldObject.with_context(thisContext)._copy_productBom(\n newID, ['normal', 'spbom'])\n tmpID.with_context(thisContext).write({'name':\n oldObject.name})\n note = {'type': 'revision process', 'reason':\n \"Copied BoM to new revision '{index}' for product '{name}'.\"\n .format(index=newIndex, name=oldObject.name)}\n self._insertlog(newID, note=note)\n break\n return newID, newIndex\n\n @api.model\n def CheckProductsToSave(self, request='', default=None):\n \"\"\"\n Checks if given products has to be saved. \n \"\"\"\n listedParts = []\n retValues = {}\n for part in unpackDictionary(request):\n part = getCleanBytesDictionary(part)\n hasSaved = True\n existingID = False\n order = None\n if not 'engineering_code' in part:\n continue\n if part['engineering_code'] in listedParts:\n continue\n if 'engineering_code' in part and 'engineering_revision' in part:\n criteria = [('engineering_code', '=', part[\n 'engineering_code']), ('engineering_revision', '=',\n part['engineering_revision'])]\n elif 'engineering_code' in part and not 'engineering_revision' in part:\n criteria = [('engineering_code', '=', part['engineering_code'])\n ]\n order = 'engineering_revision'\n existingIDs = self.search(criteria, order=order)\n if existingIDs:\n ids = sorted(existingIDs.ids)\n existingID = ids[len(ids) - 1]\n if existingID:\n hasSaved = False\n objPart = self.browse(existingID)\n part['engineering_revision'] = objPart.engineering_revision\n if '_lastupdate' in part and part['_lastupdate']:\n if getUpdTime(objPart) < datetime.strptime(part[\n '_lastupdate'], '%Y-%m-%d %H:%M:%S'):\n if objPart._iswritable():\n hasSaved = True\n retValues[part['engineering_code']] = {'componentID':\n existingID, 'hasSaved': hasSaved}\n listedParts.append(part['engineering_code'])\n return packDictionary(retValues)\n <mask token>\n\n @api.model\n def QueryLast(self, request=([], []), default=None):\n \"\"\"\n Queries to return values based on columns selected.\n \"\"\"\n objId = False\n expData = []\n queryFilter, columns = request\n if len(columns) < 1:\n return expData\n if 'engineering_revision' in queryFilter:\n del queryFilter['engineering_revision']\n allIDs = self.search(queryFilter, order='engineering_revision')\n if len(allIDs) > 0:\n objId = allIDs[len(allIDs) - 1]\n if objId:\n tmpData = objId.export_data(columns)\n if 'datas' in tmpData:\n expData = tmpData['datas']\n return expData\n\n def _create_normalBom(self, idd, processedIds=[]):\n \"\"\"\n Creates a new Normal Bom (recursive on all EBom children)\n \"\"\"\n default = {}\n if idd in processedIds:\n return False\n checkObj = self.browse(idd)\n if not checkObj:\n return False\n bomType = self.env['mrp.bom']\n objBoms = bomType.search([('product_tmpl_id', '=', checkObj.\n product_tmpl_id.id), ('type', '=', 'normal'), ('active', '=', \n True)])\n idBoms = bomType.search([('product_tmpl_id', '=', checkObj.\n product_tmpl_id.id), ('type', '=', 'ebom'), ('active', '=', True)])\n if not objBoms:\n if idBoms:\n default = {'product_tmpl_id': idBoms[0].product_tmpl_id.id,\n 'type': 'normal', 'active': True}\n if idBoms[0].product_id:\n default.update({'product_id': idBoms[0].product_id.id})\n processedIds.append(idd)\n newidBom = idBoms[0].with_context({'internal_writing': True}\n ).copy(default)\n if newidBom:\n newidBom.with_context({'internal_writing': True}).write(\n default)\n ok_rows = self._summarizeBom(newidBom.bom_line_ids)\n for bom_line in list(set(newidBom.bom_line_ids) ^ set(\n ok_rows)):\n bom_line.unlink()\n for bom_line in ok_rows:\n bom_line.with_context({'internal_writing': True}\n ).write({'type': 'normal', 'source_id': False,\n 'product_qty': bom_line.product_qty})\n self._create_normalBom(bom_line.product_id.id,\n processedIds=processedIds)\n else:\n for bom_line in bomType.browse(objBoms[0].id).bom_line_ids:\n self._create_normalBom(bom_line.product_id.id, processedIds\n =processedIds)\n return False\n\n def _copy_productBom(self, idStart, idDest=None, bomTypes=['normal']):\n \"\"\"\n Creates a new 'bomType' BoM (arrested at first level BoM children).\n \"\"\"\n default = {}\n if not idDest:\n idDest = idStart\n checkObjDest = self.browse(idDest)\n if checkObjDest:\n objBomType = self.env['mrp.bom']\n for bomType in bomTypes:\n objBoms = objBomType.search([('product_id', '=', idDest), (\n 'type', '=', bomType), ('active', '=', True)])\n idBoms = objBomType.search([('product_id', '=', idStart), (\n 'type', '=', bomType), ('active', '=', True)])\n if not objBoms:\n for oldObj in idBoms:\n newidBom = oldObj.with_context({'internal_writing':\n True}).copy(default)\n if newidBom:\n newidBom.with_context({'internal_writing': True}\n ).write({'name': checkObjDest.name,\n 'product_tmpl_id': checkObjDest.\n product_tmpl_id.id, 'type': bomType,\n 'active': True})\n ok_rows = self._summarizeBom(newidBom.bom_line_ids)\n for bom_line in list(set(newidBom.bom_line_ids) ^\n set(ok_rows)):\n bom_line.unlink()\n for bom_line in ok_rows:\n bom_line.with_context({'internal_writing': \n True}).write({'type': bomType,\n 'source_id': False, 'name': bom_line.\n product_id.name, 'product_qty':\n bom_line.product_qty})\n return False\n\n def _summarizeBom(self, datarows):\n dic = {}\n for datarow in datarows:\n key = datarow.product_id.name\n if key in dic:\n dic[key].product_qty = float(dic[key].product_qty) + float(\n datarow.product_qty)\n else:\n dic[key] = datarow\n retd = dic.values()\n return retd\n\n def _get_recursive_parts(self, ids, excludeStatuses, includeStatuses,\n release=False):\n \"\"\"\n Gets all ids related to current one as children\n \"\"\"\n stopFlag = False\n tobeReleasedIDs = getListIDs(ids)\n options = self.env['plm.config.settings'].GetOptions()\n children = []\n for oic in self.browse(ids):\n children = self.browse(self._getChildrenBom(oic, 1))\n for child in children:\n if (not child.state in excludeStatuses and not child.state in\n includeStatuses) and (release and not options.get(\n 'opt_obsoletedinbom', False)):\n logging.warning(\n \"Part (%r - %d) is in a status '%s' not allowed.\" %\n (child.engineering_code, child.engineering_revision,\n child.state))\n stopFlag = True\n continue\n if child.state in includeStatuses:\n if not child.id in tobeReleasedIDs:\n tobeReleasedIDs.append(child.id)\n return stopFlag, getCleanList(tobeReleasedIDs)\n <mask token>\n\n def _action_ondocuments(self, ids, action, status):\n \"\"\"\n Moves workflow on documents having the same state of component \n \"\"\"\n docIDs = []\n documentType = self.env['plm.document']\n check = self._context.get('no_move_documents', False)\n if not check:\n for oldObject in self.browse(ids):\n for document in oldObject.linkeddocuments:\n if document.id not in docIDs:\n if documentType.ischecked_in(document.id):\n docIDs.append(document.id)\n idMoves = move_workflow(documentType, docIDs, action, status)\n documentType.logging_workflow(idMoves, action, status)\n return docIDs\n <mask token>\n\n @api.model\n def ActionUpload(self, request=[], default=None):\n \"\"\"\n Action to be executed after automatic upload\n \"\"\"\n signal = 'upload'\n move_workflow(self, self._ids, signal)\n return False\n\n def action_upload(self):\n \"\"\"\n Action to be executed for Uploaded state\n \"\"\"\n options = self.env['plm.config.settings'].GetOptions()\n status = 'uploaded'\n action = 'upload'\n default = {'state': status, 'engineering_writable': False}\n doc_default = {'state': status, 'writable': False}\n operationParams = {'status': status, 'statusName': _('Uploaded'),\n 'action': action, 'docaction': 'uploaddoc', 'excludeStatuses':\n ['uploaded', 'confirmed', 'transmitted', 'released',\n 'undermodify', 'obsoleted'], 'includeStatuses': ['draft'],\n 'default': default, 'doc_default': doc_default}\n if options.get('opt_showWFanalysis', False):\n return self.action_check_workflow(operationParams)\n else:\n ids = self._ids\n self.logging_workflow(ids, action, status)\n return self._action_to_perform(ids, operationParams, default)\n <mask token>\n\n def action_confirm(self):\n \"\"\"\n Action to be executed for Confirmed state\n \"\"\"\n options = self.env['plm.config.settings'].GetOptions()\n status = 'confirmed'\n action = 'confirm'\n default = {'state': status, 'engineering_writable': False}\n doc_default = {'state': status, 'writable': False}\n operationParams = {'status': status, 'statusName': _('Confirmed'),\n 'action': action, 'docaction': 'confirm', 'excludeStatuses': [\n 'confirmed', 'transmitted', 'released', 'undermodify',\n 'obsoleted'], 'includeStatuses': ['draft'], 'default': default,\n 'doc_default': doc_default}\n if options.get('opt_showWFanalysis', False):\n return self.action_check_workflow(operationParams)\n else:\n ids = self._ids\n self.logging_workflow(ids, action, status)\n return self._action_to_perform(ids, operationParams, default)\n\n def action_correct(self):\n \"\"\"\n Action to be executed for Draft state (signal \"correct\")\n \"\"\"\n options = self.env['plm.config.settings'].GetOptions()\n status = 'draft'\n action = 'correct'\n default = {'state': status, 'engineering_writable': True}\n doc_default = {'state': status, 'writable': True}\n operationParams = {'status': status, 'statusName': _('Draft'),\n 'action': action, 'docaction': 'correct', 'excludeStatuses': [\n 'draft', 'transmitted', 'released', 'undermodify', 'obsoleted'],\n 'includeStatuses': ['confirmed'], 'default': default,\n 'doc_default': doc_default}\n if options.get('opt_showWFanalysis', False):\n return self.action_check_workflow(operationParams)\n else:\n ids = self._ids\n self.logging_workflow(ids, action, status)\n return self._action_to_perform(ids, operationParams, default)\n\n def action_release(self):\n options = self.env['plm.config.settings'].GetOptions()\n status = 'released'\n action = 'release'\n default = {'state': status, 'engineering_writable': False}\n doc_default = {'state': status, 'writable': False}\n excludeStatuses = ['released', 'undermodify', 'obsoleted']\n includeStatuses = ['confirmed']\n operationParams = {'status': status, 'statusName': _('Released'),\n 'action': action, 'docaction': 'release', 'excludeStatuses':\n excludeStatuses, 'includeStatuses': includeStatuses, 'default':\n default, 'doc_default': doc_default}\n if options.get('opt_showWFanalysis', False):\n return self.action_check_workflow(operationParams)\n else:\n return self._action_to_release(self._ids, excludeStatuses,\n includeStatuses)\n\n def action_obsolete(self):\n \"\"\"\n Action to be executed for Obsoleted state\n \"\"\"\n options = self.env['plm.config.settings'].GetOptions()\n status = 'obsoleted'\n action = 'obsolete'\n default = {'engineering_writable': False, 'state': status}\n doc_default = {'state': status, 'writable': False}\n operationParams = {'status': status, 'statusName': _('Obsoleted'),\n 'action': action, 'docaction': 'obsolete', 'excludeStatuses': [\n 'draft', 'confirmed', 'transmitted', 'obsoleted'],\n 'includeStatuses': ['undermodify', 'released'], 'default':\n default, 'doc_default': doc_default}\n if options.get('opt_showWFanalysis', False):\n return self.action_check_workflow(operationParams)\n else:\n return self._action_to_perform(self._ids, operationParams, default)\n <mask token>\n\n def logging_workflow(self, ids, action, status):\n note = {'type': 'workflow movement', 'reason':\n \"Applying workflow action '{action}', moving to status '{status}.\"\n .format(action=action, status=status)}\n self._insertlog(ids, note=note)\n\n def _action_to_perform(self, ids, operationParams, default={}):\n \"\"\"\n Executes on cascade to children products the required workflow operations.\n \"\"\"\n full_ids = []\n status = operationParams['status']\n action = operationParams['action']\n docaction = operationParams['docaction']\n excludeStatuses = operationParams['excludeStatuses']\n includeStatuses = operationParams['includeStatuses']\n stopFlag, allIDs = self._get_recursive_parts(ids, excludeStatuses,\n includeStatuses)\n self._action_ondocuments(allIDs, docaction, status)\n if action:\n idMoves = move_workflow(self, allIDs, action, status)\n self.logging_workflow(idMoves, action, status)\n objId = self.browse(allIDs).with_context({'internal_writing': True}\n ).write(default)\n if objId:\n wf_message_post(self, allIDs, body=\n 'Status moved to: {status}.'.format(status=status))\n return objId\n\n def _action_to_release(self, ids, excludeStatuses, includeStatuses):\n \"\"\"\n Action to be executed for Released state\n \"\"\"\n full_ids = []\n last_ids = []\n status = 'released'\n action = 'release'\n default = {'engineering_writable': False, 'state': status}\n stopFlag, allIDs = self._get_recursive_parts(ids, excludeStatuses,\n includeStatuses, release=True)\n if len(allIDs) < 1 or stopFlag:\n raise UserError(_(\n 'WorkFlow Error.\\n\\nOne or more parts cannot be released.'))\n allProdObjs = self.browse(allIDs)\n for oldObject in allProdObjs:\n objObsolete = self._getbyrevision(oldObject.engineering_code, \n oldObject.engineering_revision - 1)\n if objObsolete and objObsolete.id:\n last_ids.append(objObsolete.id)\n idMoves = move_workflow(self, last_ids, 'obsolete', 'obsoleted')\n self.logging_workflow(idMoves, 'obsolete', 'obsoleted')\n self._action_ondocuments(last_ids, 'obsolete', 'obsoleted')\n self._action_ondocuments(allIDs, action, status)\n for currId in allProdObjs:\n if not currId.id in ids:\n full_ids.append(currId.id)\n idMoves = move_workflow(self, allIDs, action, status)\n self.logging_workflow(idMoves, action, status)\n objId = self.browse(idMoves).with_context({'internal_writing': True}\n ).write(default)\n if objId and idMoves:\n wf_message_post(self, allIDs, body='Status moved to: {status}.'\n .format(status=status))\n return objId\n <mask token>\n\n def write(self, vals):\n ret = True\n if vals:\n if not isAdministrator(self):\n check = self._context.get('internal_writing', False)\n thisprocess = self._context.get('internal_process', False)\n if not check:\n for prodItem in self.browse(self._ids):\n if not isDraft(self, prodItem.id):\n if not thisprocess:\n logging.error(\n \"The entity '{name}-{rev}' is in a status that does not allow you to make save action\"\n .format(name=prodItem.name, rev=\n prodItem.engineering_revision))\n ret = False\n break\n if not prodItem.engineering_writable:\n if not thisprocess:\n logging.error(\n \"The entity '{name}-{rev}' cannot be written.\"\n .format(name=prodItem.name, rev=\n prodItem.engineering_revision))\n ret = False\n break\n if ret:\n self._insertlog(self._ids, changes=vals)\n ret = super(plm_component, self).write(vals)\n return ret\n\n def copy(self, default={}):\n newID = False\n override = False\n previous_name = False\n oid = self.id\n if not self._context.get('new_revision', False):\n previous_name = self.browse(oid).name\n new_name = default.get('name', 'Copy of %s' % previous_name)\n if 'name' in default:\n tmpIds = self.search([('name', 'like', new_name)])\n if len(tmpIds) > 0:\n new_name = '%s (%s)' % (new_name, len(tmpIds) + 1)\n default.update({'name': new_name, 'engineering_code':\n new_name, 'engineering_revision': self._default_rev})\n override = True\n default.update({'state': 'draft', 'engineering_writable': True,\n 'write_date': None, 'linkeddocuments': []})\n note = {'type': 'copy object', 'reason':\n \"Previous name was '{old} new one is '{new}'.\".format(old=\n previous_name, new=new_name)}\n self._insertlog(oid, note=note)\n tmpID = super(plm_component, self.browse(oid).with_context({\n 'internal_writing': True})).copy(default)\n if tmpID != None:\n newID = tmpID\n if override:\n values = {'name': new_name, 'engineering_code':\n new_name, 'engineering_revision': self._default_rev,\n 'linkeddocuments': []}\n newID.write(values)\n else:\n tmpID = super(plm_component, self.browse(oid).with_context({\n 'internal_writing': True})).copy(default)\n if tmpID:\n newID = tmpID\n default.update({'linkeddocuments': []})\n newID.with_context({'internal_writing': True}).write(default)\n if newID and previous_name:\n wf_message_post(self, getListIDs(newID), body=\n 'Copied starting from : {value}.'.format(value=previous_name))\n return newID\n\n def unlink(self):\n ret = False\n ids = self._ids\n values = {'state': 'released'}\n isAdmin = isAdministrator(self)\n if not self.env['mrp.bom'].IsChild(ids):\n for checkObj in self.browse(ids):\n checkApply = False\n if isReleased(self, checkObj.id):\n if isAdmin:\n checkApply = True\n elif isDraft(self, checkObj.id):\n checkApply = True\n if not checkApply:\n continue\n existingIDs = self.with_context({'no_move_documents': True}\n ).search([('engineering_code', '=', checkObj.\n engineering_code), ('engineering_revision', '=', \n checkObj.engineering_revision - 1)])\n if len(existingIDs) > 0:\n obsoletedIds = []\n undermodifyIds = []\n for existID in getListIDs(existingIDs):\n if isObsoleted(self, existID.id):\n obsoletedIds.append(existID.id)\n elif isUnderModify(self, existID.id):\n undermodifyIds.append(existID.id)\n move_workflow(self, obsoletedIds, 'reactivate', 'released')\n if undermodifyIds:\n move_workflow(self, undermodifyIds, 'reactivate',\n 'released')\n note = {'type': 'unlink object', 'reason':\n 'Removed entity from database.'}\n self._insertlog(checkObj.id, note=note)\n item = super(plm_component, checkObj.with_context({\n 'no_move_documents': False})).unlink()\n if item:\n ret = ret | item\n return ret\n",
"step-4": "<mask token>\n\n\nclass plm_component(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n @property\n def _default_rev(self):\n field = self.env['product.template']._fields.get('engineering_revision'\n , None)\n default = field.default('product.template') if not field == None else 0\n return default\n\n def _insertlog(self, ids, changes={}, note={}):\n ret = False\n op_type, op_note = ['unknown', '']\n for objID in self.browse(getListIDs(ids)):\n if note:\n op_type = '{type}'.format(type=note['type'])\n op_note = '{reason}'.format(reason=note['reason'])\n elif changes:\n op_type = 'change value'\n op_note = self.env['plm.logging'].getchanges(objID, changes)\n if op_note:\n values = {'name': objID.name, 'revision': '{major}'.format(\n major=objID.engineering_revision), 'type': self._name,\n 'op_type': op_type, 'op_note': op_note, 'op_date':\n datetime.now(), 'userid': self._uid}\n objectItem = self.env['plm.logging'].create(values)\n if objectItem:\n ret = True\n return ret\n\n def _getbyrevision(self, name, revision):\n return self.search([('engineering_code', '=', name), (\n 'engineering_revision', '=', revision)])\n\n def _getChildrenBom(self, component, level=0, currlevel=0):\n \"\"\"\n Returns a flat list of each child, listed once, in a Bom ( level = 0 one level only, level = 1 all levels)\n \"\"\"\n result = []\n bufferdata = []\n if level == 0 and currlevel > 1:\n return bufferdata\n for bomid in component.product_tmpl_id.bom_ids:\n for bomline in bomid.bom_line_ids:\n children = self._getChildrenBom(bomline.product_id, level, \n currlevel + 1)\n bufferdata.extend(children)\n bufferdata.append(bomline.product_id.id)\n result.extend(bufferdata)\n return getCleanList(result)\n\n @api.model\n def RegMessage(self, request=[], default=None):\n \"\"\"\n Registers a message for requested component\n \"\"\"\n oid, message = request\n wf_message_post(self, [oid], body=message)\n return False\n\n def getUserName(self):\n \"\"\"\n Gets the user name\n \"\"\"\n userType = self.env['res.users']\n uiUser = userType.browse(self._uid)\n return uiUser.name\n\n def getFromTemplateID(self, oid):\n ret = False\n if oid:\n for prodItem in self.search([('product_tmpl_id', '=', oid)]):\n ret = prodItem\n break\n return ret\n\n def getTemplateItem(self, oid):\n ret = False\n if oid:\n for prodItem in self.browse(getListIDs(oid)):\n ret = prodItem.product_tmpl_id\n break\n return ret\n\n def on_change_name(self, oid, name=False, engineering_code=False):\n if name:\n results = self.search([('name', '=', name)])\n if len(results) > 0:\n raise UserError(_(\n \"\"\"Update Part Error.\n\nPart {} already exists.\nClose with OK to reuse, with Cancel to discharge.\"\"\"\n .format(name)))\n if not engineering_code:\n return {'value': {'engineering_code': name}}\n return {}\n\n @api.model\n def CleanStructure(self, request=[], default=None):\n \"\"\"\n Cleans relations having sourceID (in mrp.bom.line)\n \"\"\"\n ret = False\n type = 'ebom'\n bomLType = self.env['mrp.bom.line']\n bomType = self.env['mrp.bom']\n docType = self.env['plm.document']\n bl_to_delete = bomLType\n for parentID, sourceID in request:\n if not parentID == None:\n if isWritable(self, parentID):\n for bom_id in bomType.search([('type', '=', type), (\n 'product_id', '=', parentID)]):\n if not sourceID == None:\n if docType.IsCheckedOutForMe(sourceID):\n for bomLine in bomLType.search([(\n 'source_id', '=', sourceID), ('bom_id',\n '=', bom_id.id)]):\n bl_to_delete |= bomLine\n bl_to_delete.unlink()\n if not bom_id.bom_line_ids:\n bom_id.unlink()\n ret = True\n return ret\n\n @api.model\n def Clone(self, ids=[], default=None):\n \"\"\"\n Creates a new copy of the component\n \"\"\"\n default = {}\n exitValues = {}\n for tmpObject in self.browse(getListIDs(ids)):\n note = {'type': 'clone object', 'reason':\n \"Creating new cloned entity starting from '{old}'.\".format(\n old=tmpObject.name)}\n self._insertlog(tmpObject.id, note=note)\n newID = self.copy(tmpObject.id, default)\n if newID:\n newEnt = self.browse(newID)\n exitValues = {'_id': newID, 'name': newEnt.name,\n 'engineering_code': newEnt.engineering_code,\n 'engineering_revision': newEnt.engineering_revision,\n 'engineering_writable': True, 'state': 'draft'}\n break\n return packDictionary(exitValues)\n\n @api.model\n def CloneVirtual(self, ids=[], default=None):\n \"\"\"\n Creates a \"false\" new copy of the component.\n Really returns only new values avoiding creation of new object.\n \"\"\"\n exitValues = {}\n for tmpObject in self.browse(getListIDs(ids)):\n new_name = 'Copy of {name}'.format(name=tmpObject.name)\n exitValues = {'_id': False, 'name': new_name,\n 'engineering_code': new_name, 'description': '{desc}'.\n format(desc=tmpObject.description), 'engineering_revision':\n self._default_rev, 'engineering_writable': True, 'state':\n 'draft'}\n break\n return packDictionary(exitValues)\n\n @api.model\n def GetUpdated(self, vals=[], default=None):\n \"\"\"\n Gets Last/Requested revision of given items (by name, revision, update time)\n \"\"\"\n partData, attribNames = vals\n ids = self.GetLatestIds(partData)\n return packDictionary(self.read(getCleanList(ids), attribNames))\n\n @api.model\n def GetStdPartName(self, vals=[], default=None):\n \"\"\"\n Gets new P/N reading from entity chosen (taking it from new index on sequence).\n \"\"\"\n ret = ''\n entID, objectName = vals\n if entID and objectName:\n userType = self.env[objectName] if objectName in self.env else None\n if not userType == None:\n for objID in userType.browse(getListIDs(entID)):\n ret = self.GetNewPNfromSeq(objID.sequence_id)\n break\n return ret\n\n @api.model\n def GetNewPNfromSeq(self, seqID=None, default=None):\n \"\"\"\n Gets new P/N from sequence (checks for P/N existence).\n \"\"\"\n ret = ''\n if seqID:\n count = 0\n while ret == '':\n chkname = self.env['ir.sequence'].browse(seqID.id)._next()\n count += 1\n criteria = [('name', '=', chkname)]\n partIds = self.search(criteria)\n if partIds == None or len(partIds) == 0:\n ret = chkname\n if count > 1000:\n logging.error(\n \"GetNewPNfromSeq : Unable to get a new P/N from sequence '{name}'.\"\n .format(name=seqID.name))\n break\n return ret\n\n @api.model\n def GetLatestIds(self, vals=[], default=None):\n \"\"\"\n Gets Last/Requested revision of given items (by name, revision, update time)\n \"\"\"\n ids = []\n for request in vals:\n partName, _, updateDate = request\n if updateDate:\n criteria = [('engineering_code', '=', partName), (\n 'write_date', '>', updateDate)]\n else:\n criteria = [('engineering_code', '=', partName)]\n partIds = self.search(criteria, order='engineering_revision')\n if len(partIds) > 0:\n ids.append(partIds[len(partIds) - 1].id)\n return getCleanList(ids)\n\n @api.model\n def GetId(self, request=[], default=None):\n \"\"\"\n Gets Last/Requested revision of given items (by name, revision, update time)\n \"\"\"\n idd = False\n partName, partRev, _ = request\n if isinstance(partRev, int):\n criteria = [('engineering_code', '=', partName), (\n 'engineering_revision', '=', partRev)]\n else:\n criteria = [('engineering_code', '=', partName)]\n partIds = self.search(criteria, order='engineering_revision')\n if len(partIds) > 0:\n idd = partIds[len(partIds) - 1].id\n return idd\n\n @api.model\n def IsSaveable(self, ids=[], default=None):\n \"\"\"\n Answers about capability to save requested product\n \"\"\"\n ret = True\n for tmpObject in self.browse(getListIDs(ids)):\n ret = ret and tmpObject._iswritable()\n return ret\n\n @api.model\n def IsRevisable(self, ids=[], default=None):\n \"\"\"\n Gets if a product is revisable or not.\n \"\"\"\n ret = False\n for tmpObject in self.browse(getListIDs(ids)):\n if isAnyReleased(self, tmpObject.id):\n ret = True\n break\n return ret\n\n @api.model\n def NewRevision(self, ids=[], default=None):\n \"\"\"\n Creates a new revision of current product\n \"\"\"\n newID, newIndex = [False, 0]\n thisContext = {'internal_writing': True, 'new_revision': True}\n for tmpObject in self.browse(getListIDs(ids)):\n latestIDs = self.GetLatestIds([(tmpObject.engineering_code,\n tmpObject.engineering_revision, False)])\n for oldObject in self.browse(latestIDs):\n if isAnyReleased(self, oldObject.id):\n note = {'type': 'revision process', 'reason':\n \"Creating new revision for '{old}'.\".format(old=\n oldObject.name)}\n self._insertlog(oldObject.id, note=note)\n newIndex = int(oldObject.engineering_revision) + 1\n default = {'engineering_writable': False, 'state':\n 'undermodify'}\n oldObject.with_context(thisContext).write(default)\n default = {'name': oldObject.name,\n 'engineering_revision': newIndex,\n 'engineering_writable': True, 'state': 'draft'}\n tmpID = oldObject.with_context(thisContext).copy(default)\n if tmpID:\n wf_message_post(self, [oldObject.id], body=\n 'Created : New Revision.')\n newID = tmpID.id\n tmpID.write({'name': oldObject.name})\n note = {'type': 'revision process', 'reason':\n \"Created new revision '{index}' for product '{name}'.\"\n .format(index=newIndex, name=oldObject.name)}\n self._insertlog(newID, note=note)\n oldObject.with_context(thisContext)._copy_productBom(\n newID, ['normal', 'spbom'])\n tmpID.with_context(thisContext).write({'name':\n oldObject.name})\n note = {'type': 'revision process', 'reason':\n \"Copied BoM to new revision '{index}' for product '{name}'.\"\n .format(index=newIndex, name=oldObject.name)}\n self._insertlog(newID, note=note)\n break\n return newID, newIndex\n\n @api.model\n def CheckProductsToSave(self, request='', default=None):\n \"\"\"\n Checks if given products has to be saved. \n \"\"\"\n listedParts = []\n retValues = {}\n for part in unpackDictionary(request):\n part = getCleanBytesDictionary(part)\n hasSaved = True\n existingID = False\n order = None\n if not 'engineering_code' in part:\n continue\n if part['engineering_code'] in listedParts:\n continue\n if 'engineering_code' in part and 'engineering_revision' in part:\n criteria = [('engineering_code', '=', part[\n 'engineering_code']), ('engineering_revision', '=',\n part['engineering_revision'])]\n elif 'engineering_code' in part and not 'engineering_revision' in part:\n criteria = [('engineering_code', '=', part['engineering_code'])\n ]\n order = 'engineering_revision'\n existingIDs = self.search(criteria, order=order)\n if existingIDs:\n ids = sorted(existingIDs.ids)\n existingID = ids[len(ids) - 1]\n if existingID:\n hasSaved = False\n objPart = self.browse(existingID)\n part['engineering_revision'] = objPart.engineering_revision\n if '_lastupdate' in part and part['_lastupdate']:\n if getUpdTime(objPart) < datetime.strptime(part[\n '_lastupdate'], '%Y-%m-%d %H:%M:%S'):\n if objPart._iswritable():\n hasSaved = True\n retValues[part['engineering_code']] = {'componentID':\n existingID, 'hasSaved': hasSaved}\n listedParts.append(part['engineering_code'])\n return packDictionary(retValues)\n\n @api.model\n def SaveOrUpdate(self, request=[], default=None):\n \"\"\"\n Saves or Updates Parts\n \"\"\"\n listedParts = []\n retValues = {}\n modelFields = self.env['plm.config.settings'].GetFieldsModel(self._name\n )\n for part in unpackDictionary(request):\n part = getCleanBytesDictionary(part)\n hasSaved = False\n existingID = False\n order = None\n if (not 'engineering_code' in part or not \n 'engineering_revision' in part):\n part['componentID'] = False\n part['hasSaved'] = hasSaved\n continue\n if not 'name' in part and ('engineering_code' in part and part[\n 'engineering_code']):\n part['name'] = part['engineering_code']\n if ('name' in part and not part['name']) and (\n 'engineering_code' in part and part['engineering_code']):\n part['name'] = part['engineering_code']\n if part['engineering_code'] in listedParts:\n continue\n if not 'componentID' in part or not part['componentID']:\n if ('engineering_code' in part and 'engineering_revision' in\n part):\n criteria = [('engineering_code', '=', part[\n 'engineering_code']), ('engineering_revision', '=',\n part['engineering_revision'])]\n elif 'engineering_code' in part and not 'engineering_revision' in part:\n criteria = [('engineering_code', '=', part[\n 'engineering_code'])]\n order = 'engineering_revision'\n existingIDs = self.search(criteria, order=order)\n if existingIDs:\n ids = sorted(existingIDs.ids)\n existingID = ids[len(ids) - 1]\n else:\n existingID = part['componentID']\n lastupdate = datetime.strptime(str(part['_lastupdate']),\n '%Y-%m-%d %H:%M:%S'\n ) if '_lastupdate' in part else datetime.now()\n for fieldName in list(set(part.keys()).difference(set(modelFields))\n ):\n del part[fieldName]\n if not existingID:\n logging.debug('[SaveOrUpdate] Part {name} is creating.'.\n format(name=part['engineering_code']))\n objectItem = self.with_context({'internal_writing': True}\n ).create(part)\n if objectItem:\n existingID = objectItem.id\n hasSaved = True\n else:\n objPart = self.browse(existingID)\n if objPart:\n part['name'] = objPart.name\n part['engineering_revision'] = objPart.engineering_revision\n if getUpdTime(objPart) < lastupdate:\n if objPart._iswritable():\n logging.debug(\n '[SaveOrUpdate] Part {name}/{revi} is updating.'\n .format(name=part['engineering_code'], revi\n =part['engineering_revision']))\n hasSaved = True\n if not objPart.with_context({'internal_writing':\n False}).write(part):\n logging.error(\n '[SaveOrUpdate] Part {name}/{revi} cannot be updated.'\n .format(name=part['engineering_code'],\n revi=part['engineering_revision']))\n hasSaved = False\n else:\n logging.error(\n \"[SaveOrUpdate] Part {name}/{revi} doesn't exist anymore.\"\n .format(name=part['engineering_code'], revi=part[\n 'engineering_revision']))\n retValues[part['engineering_code']] = {'componentID':\n existingID, 'hasSaved': hasSaved}\n listedParts.append(part['engineering_code'])\n return packDictionary(retValues)\n\n @api.model\n def QueryLast(self, request=([], []), default=None):\n \"\"\"\n Queries to return values based on columns selected.\n \"\"\"\n objId = False\n expData = []\n queryFilter, columns = request\n if len(columns) < 1:\n return expData\n if 'engineering_revision' in queryFilter:\n del queryFilter['engineering_revision']\n allIDs = self.search(queryFilter, order='engineering_revision')\n if len(allIDs) > 0:\n objId = allIDs[len(allIDs) - 1]\n if objId:\n tmpData = objId.export_data(columns)\n if 'datas' in tmpData:\n expData = tmpData['datas']\n return expData\n\n def _create_normalBom(self, idd, processedIds=[]):\n \"\"\"\n Creates a new Normal Bom (recursive on all EBom children)\n \"\"\"\n default = {}\n if idd in processedIds:\n return False\n checkObj = self.browse(idd)\n if not checkObj:\n return False\n bomType = self.env['mrp.bom']\n objBoms = bomType.search([('product_tmpl_id', '=', checkObj.\n product_tmpl_id.id), ('type', '=', 'normal'), ('active', '=', \n True)])\n idBoms = bomType.search([('product_tmpl_id', '=', checkObj.\n product_tmpl_id.id), ('type', '=', 'ebom'), ('active', '=', True)])\n if not objBoms:\n if idBoms:\n default = {'product_tmpl_id': idBoms[0].product_tmpl_id.id,\n 'type': 'normal', 'active': True}\n if idBoms[0].product_id:\n default.update({'product_id': idBoms[0].product_id.id})\n processedIds.append(idd)\n newidBom = idBoms[0].with_context({'internal_writing': True}\n ).copy(default)\n if newidBom:\n newidBom.with_context({'internal_writing': True}).write(\n default)\n ok_rows = self._summarizeBom(newidBom.bom_line_ids)\n for bom_line in list(set(newidBom.bom_line_ids) ^ set(\n ok_rows)):\n bom_line.unlink()\n for bom_line in ok_rows:\n bom_line.with_context({'internal_writing': True}\n ).write({'type': 'normal', 'source_id': False,\n 'product_qty': bom_line.product_qty})\n self._create_normalBom(bom_line.product_id.id,\n processedIds=processedIds)\n else:\n for bom_line in bomType.browse(objBoms[0].id).bom_line_ids:\n self._create_normalBom(bom_line.product_id.id, processedIds\n =processedIds)\n return False\n\n def _copy_productBom(self, idStart, idDest=None, bomTypes=['normal']):\n \"\"\"\n Creates a new 'bomType' BoM (arrested at first level BoM children).\n \"\"\"\n default = {}\n if not idDest:\n idDest = idStart\n checkObjDest = self.browse(idDest)\n if checkObjDest:\n objBomType = self.env['mrp.bom']\n for bomType in bomTypes:\n objBoms = objBomType.search([('product_id', '=', idDest), (\n 'type', '=', bomType), ('active', '=', True)])\n idBoms = objBomType.search([('product_id', '=', idStart), (\n 'type', '=', bomType), ('active', '=', True)])\n if not objBoms:\n for oldObj in idBoms:\n newidBom = oldObj.with_context({'internal_writing':\n True}).copy(default)\n if newidBom:\n newidBom.with_context({'internal_writing': True}\n ).write({'name': checkObjDest.name,\n 'product_tmpl_id': checkObjDest.\n product_tmpl_id.id, 'type': bomType,\n 'active': True})\n ok_rows = self._summarizeBom(newidBom.bom_line_ids)\n for bom_line in list(set(newidBom.bom_line_ids) ^\n set(ok_rows)):\n bom_line.unlink()\n for bom_line in ok_rows:\n bom_line.with_context({'internal_writing': \n True}).write({'type': bomType,\n 'source_id': False, 'name': bom_line.\n product_id.name, 'product_qty':\n bom_line.product_qty})\n return False\n\n def _summarizeBom(self, datarows):\n dic = {}\n for datarow in datarows:\n key = datarow.product_id.name\n if key in dic:\n dic[key].product_qty = float(dic[key].product_qty) + float(\n datarow.product_qty)\n else:\n dic[key] = datarow\n retd = dic.values()\n return retd\n\n def _get_recursive_parts(self, ids, excludeStatuses, includeStatuses,\n release=False):\n \"\"\"\n Gets all ids related to current one as children\n \"\"\"\n stopFlag = False\n tobeReleasedIDs = getListIDs(ids)\n options = self.env['plm.config.settings'].GetOptions()\n children = []\n for oic in self.browse(ids):\n children = self.browse(self._getChildrenBom(oic, 1))\n for child in children:\n if (not child.state in excludeStatuses and not child.state in\n includeStatuses) and (release and not options.get(\n 'opt_obsoletedinbom', False)):\n logging.warning(\n \"Part (%r - %d) is in a status '%s' not allowed.\" %\n (child.engineering_code, child.engineering_revision,\n child.state))\n stopFlag = True\n continue\n if child.state in includeStatuses:\n if not child.id in tobeReleasedIDs:\n tobeReleasedIDs.append(child.id)\n return stopFlag, getCleanList(tobeReleasedIDs)\n\n def create_normalBom_WF(self, ids):\n \"\"\"\n Creates a new Normal Bom if doesn't exist (action callable from code)\n \"\"\"\n for idd in ids:\n processedIds = []\n self._create_normalBom(idd, processedIds=processedIds)\n wf_message_post(self, ids, body='Created Normal Bom.')\n return False\n\n def _action_ondocuments(self, ids, action, status):\n \"\"\"\n Moves workflow on documents having the same state of component \n \"\"\"\n docIDs = []\n documentType = self.env['plm.document']\n check = self._context.get('no_move_documents', False)\n if not check:\n for oldObject in self.browse(ids):\n for document in oldObject.linkeddocuments:\n if document.id not in docIDs:\n if documentType.ischecked_in(document.id):\n docIDs.append(document.id)\n idMoves = move_workflow(documentType, docIDs, action, status)\n documentType.logging_workflow(idMoves, action, status)\n return docIDs\n\n @api.model\n def _iswritable(self):\n if self:\n checkState = 'draft'\n if not self.engineering_writable:\n logging.warning(\n '_iswritable : Part (%r - %d) is not writable.' % (self\n .engineering_code, self.engineering_revision))\n return False\n if not self.state in checkState:\n logging.warning(\n '_iswritable : Part (%r - %d) is in status %r.' % (self\n .engineering_code, self.engineering_revision, self.state))\n return False\n if self.engineering_code == False:\n logging.warning(\n '_iswritable : Part (%r - %d) is without Engineering P/N.'\n % (self.name, self.engineering_revision))\n return False\n return True\n\n @api.model\n def ActionUpload(self, request=[], default=None):\n \"\"\"\n Action to be executed after automatic upload\n \"\"\"\n signal = 'upload'\n move_workflow(self, self._ids, signal)\n return False\n\n def action_upload(self):\n \"\"\"\n Action to be executed for Uploaded state\n \"\"\"\n options = self.env['plm.config.settings'].GetOptions()\n status = 'uploaded'\n action = 'upload'\n default = {'state': status, 'engineering_writable': False}\n doc_default = {'state': status, 'writable': False}\n operationParams = {'status': status, 'statusName': _('Uploaded'),\n 'action': action, 'docaction': 'uploaddoc', 'excludeStatuses':\n ['uploaded', 'confirmed', 'transmitted', 'released',\n 'undermodify', 'obsoleted'], 'includeStatuses': ['draft'],\n 'default': default, 'doc_default': doc_default}\n if options.get('opt_showWFanalysis', False):\n return self.action_check_workflow(operationParams)\n else:\n ids = self._ids\n self.logging_workflow(ids, action, status)\n return self._action_to_perform(ids, operationParams, default)\n\n def action_draft(self):\n \"\"\"\n Action to be executed for Draft state\n \"\"\"\n options = self.env['plm.config.settings'].GetOptions()\n status = 'draft'\n action = 'draft'\n default = {'state': status, 'engineering_writable': True}\n doc_default = {'state': status, 'writable': True}\n operationParams = {'status': status, 'statusName': _('Draft'),\n 'action': action, 'docaction': 'draft', 'excludeStatuses': [\n 'draft', 'released', 'undermodify', 'obsoleted'],\n 'includeStatuses': ['confirmed', 'uploaded', 'transmitted'],\n 'default': default, 'doc_default': doc_default}\n if options.get('opt_showWFanalysis', False):\n return self.action_check_workflow(operationParams)\n else:\n ids = self._ids\n self.logging_workflow(ids, action, status)\n return self._action_to_perform(ids, operationParams, default)\n\n def action_confirm(self):\n \"\"\"\n Action to be executed for Confirmed state\n \"\"\"\n options = self.env['plm.config.settings'].GetOptions()\n status = 'confirmed'\n action = 'confirm'\n default = {'state': status, 'engineering_writable': False}\n doc_default = {'state': status, 'writable': False}\n operationParams = {'status': status, 'statusName': _('Confirmed'),\n 'action': action, 'docaction': 'confirm', 'excludeStatuses': [\n 'confirmed', 'transmitted', 'released', 'undermodify',\n 'obsoleted'], 'includeStatuses': ['draft'], 'default': default,\n 'doc_default': doc_default}\n if options.get('opt_showWFanalysis', False):\n return self.action_check_workflow(operationParams)\n else:\n ids = self._ids\n self.logging_workflow(ids, action, status)\n return self._action_to_perform(ids, operationParams, default)\n\n def action_correct(self):\n \"\"\"\n Action to be executed for Draft state (signal \"correct\")\n \"\"\"\n options = self.env['plm.config.settings'].GetOptions()\n status = 'draft'\n action = 'correct'\n default = {'state': status, 'engineering_writable': True}\n doc_default = {'state': status, 'writable': True}\n operationParams = {'status': status, 'statusName': _('Draft'),\n 'action': action, 'docaction': 'correct', 'excludeStatuses': [\n 'draft', 'transmitted', 'released', 'undermodify', 'obsoleted'],\n 'includeStatuses': ['confirmed'], 'default': default,\n 'doc_default': doc_default}\n if options.get('opt_showWFanalysis', False):\n return self.action_check_workflow(operationParams)\n else:\n ids = self._ids\n self.logging_workflow(ids, action, status)\n return self._action_to_perform(ids, operationParams, default)\n\n def action_release(self):\n options = self.env['plm.config.settings'].GetOptions()\n status = 'released'\n action = 'release'\n default = {'state': status, 'engineering_writable': False}\n doc_default = {'state': status, 'writable': False}\n excludeStatuses = ['released', 'undermodify', 'obsoleted']\n includeStatuses = ['confirmed']\n operationParams = {'status': status, 'statusName': _('Released'),\n 'action': action, 'docaction': 'release', 'excludeStatuses':\n excludeStatuses, 'includeStatuses': includeStatuses, 'default':\n default, 'doc_default': doc_default}\n if options.get('opt_showWFanalysis', False):\n return self.action_check_workflow(operationParams)\n else:\n return self._action_to_release(self._ids, excludeStatuses,\n includeStatuses)\n\n def action_obsolete(self):\n \"\"\"\n Action to be executed for Obsoleted state\n \"\"\"\n options = self.env['plm.config.settings'].GetOptions()\n status = 'obsoleted'\n action = 'obsolete'\n default = {'engineering_writable': False, 'state': status}\n doc_default = {'state': status, 'writable': False}\n operationParams = {'status': status, 'statusName': _('Obsoleted'),\n 'action': action, 'docaction': 'obsolete', 'excludeStatuses': [\n 'draft', 'confirmed', 'transmitted', 'obsoleted'],\n 'includeStatuses': ['undermodify', 'released'], 'default':\n default, 'doc_default': doc_default}\n if options.get('opt_showWFanalysis', False):\n return self.action_check_workflow(operationParams)\n else:\n return self._action_to_perform(self._ids, operationParams, default)\n <mask token>\n\n def logging_workflow(self, ids, action, status):\n note = {'type': 'workflow movement', 'reason':\n \"Applying workflow action '{action}', moving to status '{status}.\"\n .format(action=action, status=status)}\n self._insertlog(ids, note=note)\n\n def _action_to_perform(self, ids, operationParams, default={}):\n \"\"\"\n Executes on cascade to children products the required workflow operations.\n \"\"\"\n full_ids = []\n status = operationParams['status']\n action = operationParams['action']\n docaction = operationParams['docaction']\n excludeStatuses = operationParams['excludeStatuses']\n includeStatuses = operationParams['includeStatuses']\n stopFlag, allIDs = self._get_recursive_parts(ids, excludeStatuses,\n includeStatuses)\n self._action_ondocuments(allIDs, docaction, status)\n if action:\n idMoves = move_workflow(self, allIDs, action, status)\n self.logging_workflow(idMoves, action, status)\n objId = self.browse(allIDs).with_context({'internal_writing': True}\n ).write(default)\n if objId:\n wf_message_post(self, allIDs, body=\n 'Status moved to: {status}.'.format(status=status))\n return objId\n\n def _action_to_release(self, ids, excludeStatuses, includeStatuses):\n \"\"\"\n Action to be executed for Released state\n \"\"\"\n full_ids = []\n last_ids = []\n status = 'released'\n action = 'release'\n default = {'engineering_writable': False, 'state': status}\n stopFlag, allIDs = self._get_recursive_parts(ids, excludeStatuses,\n includeStatuses, release=True)\n if len(allIDs) < 1 or stopFlag:\n raise UserError(_(\n 'WorkFlow Error.\\n\\nOne or more parts cannot be released.'))\n allProdObjs = self.browse(allIDs)\n for oldObject in allProdObjs:\n objObsolete = self._getbyrevision(oldObject.engineering_code, \n oldObject.engineering_revision - 1)\n if objObsolete and objObsolete.id:\n last_ids.append(objObsolete.id)\n idMoves = move_workflow(self, last_ids, 'obsolete', 'obsoleted')\n self.logging_workflow(idMoves, 'obsolete', 'obsoleted')\n self._action_ondocuments(last_ids, 'obsolete', 'obsoleted')\n self._action_ondocuments(allIDs, action, status)\n for currId in allProdObjs:\n if not currId.id in ids:\n full_ids.append(currId.id)\n idMoves = move_workflow(self, allIDs, action, status)\n self.logging_workflow(idMoves, action, status)\n objId = self.browse(idMoves).with_context({'internal_writing': True}\n ).write(default)\n if objId and idMoves:\n wf_message_post(self, allIDs, body='Status moved to: {status}.'\n .format(status=status))\n return objId\n <mask token>\n\n def write(self, vals):\n ret = True\n if vals:\n if not isAdministrator(self):\n check = self._context.get('internal_writing', False)\n thisprocess = self._context.get('internal_process', False)\n if not check:\n for prodItem in self.browse(self._ids):\n if not isDraft(self, prodItem.id):\n if not thisprocess:\n logging.error(\n \"The entity '{name}-{rev}' is in a status that does not allow you to make save action\"\n .format(name=prodItem.name, rev=\n prodItem.engineering_revision))\n ret = False\n break\n if not prodItem.engineering_writable:\n if not thisprocess:\n logging.error(\n \"The entity '{name}-{rev}' cannot be written.\"\n .format(name=prodItem.name, rev=\n prodItem.engineering_revision))\n ret = False\n break\n if ret:\n self._insertlog(self._ids, changes=vals)\n ret = super(plm_component, self).write(vals)\n return ret\n\n def copy(self, default={}):\n newID = False\n override = False\n previous_name = False\n oid = self.id\n if not self._context.get('new_revision', False):\n previous_name = self.browse(oid).name\n new_name = default.get('name', 'Copy of %s' % previous_name)\n if 'name' in default:\n tmpIds = self.search([('name', 'like', new_name)])\n if len(tmpIds) > 0:\n new_name = '%s (%s)' % (new_name, len(tmpIds) + 1)\n default.update({'name': new_name, 'engineering_code':\n new_name, 'engineering_revision': self._default_rev})\n override = True\n default.update({'state': 'draft', 'engineering_writable': True,\n 'write_date': None, 'linkeddocuments': []})\n note = {'type': 'copy object', 'reason':\n \"Previous name was '{old} new one is '{new}'.\".format(old=\n previous_name, new=new_name)}\n self._insertlog(oid, note=note)\n tmpID = super(plm_component, self.browse(oid).with_context({\n 'internal_writing': True})).copy(default)\n if tmpID != None:\n newID = tmpID\n if override:\n values = {'name': new_name, 'engineering_code':\n new_name, 'engineering_revision': self._default_rev,\n 'linkeddocuments': []}\n newID.write(values)\n else:\n tmpID = super(plm_component, self.browse(oid).with_context({\n 'internal_writing': True})).copy(default)\n if tmpID:\n newID = tmpID\n default.update({'linkeddocuments': []})\n newID.with_context({'internal_writing': True}).write(default)\n if newID and previous_name:\n wf_message_post(self, getListIDs(newID), body=\n 'Copied starting from : {value}.'.format(value=previous_name))\n return newID\n\n def unlink(self):\n ret = False\n ids = self._ids\n values = {'state': 'released'}\n isAdmin = isAdministrator(self)\n if not self.env['mrp.bom'].IsChild(ids):\n for checkObj in self.browse(ids):\n checkApply = False\n if isReleased(self, checkObj.id):\n if isAdmin:\n checkApply = True\n elif isDraft(self, checkObj.id):\n checkApply = True\n if not checkApply:\n continue\n existingIDs = self.with_context({'no_move_documents': True}\n ).search([('engineering_code', '=', checkObj.\n engineering_code), ('engineering_revision', '=', \n checkObj.engineering_revision - 1)])\n if len(existingIDs) > 0:\n obsoletedIds = []\n undermodifyIds = []\n for existID in getListIDs(existingIDs):\n if isObsoleted(self, existID.id):\n obsoletedIds.append(existID.id)\n elif isUnderModify(self, existID.id):\n undermodifyIds.append(existID.id)\n move_workflow(self, obsoletedIds, 'reactivate', 'released')\n if undermodifyIds:\n move_workflow(self, undermodifyIds, 'reactivate',\n 'released')\n note = {'type': 'unlink object', 'reason':\n 'Removed entity from database.'}\n self._insertlog(checkObj.id, note=note)\n item = super(plm_component, checkObj.with_context({\n 'no_move_documents': False})).unlink()\n if item:\n ret = ret | item\n return ret\n",
"step-5": "# -*- encoding: utf-8 -*-\n##############################################################################\n#\n# ServerPLM, Open Source Product Lifcycle Management System \n# Copyright (C) 2020-2020 Didotech srl (<http://www.didotech.com>). All Rights Reserved\n# \n# Created on : 2018-03-01\n# Author : Fabio Colognesi\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with this program. If not, see <http://www.gnu.org/licenses/>.\n#\n##############################################################################\n\nimport logging\nfrom datetime import datetime\n\nfrom odoo import models, fields, api, _, osv\nfrom odoo.exceptions import UserError\n\nfrom .common import getListIDs, getCleanList, packDictionary, unpackDictionary, getCleanBytesDictionary, \\\n move_workflow, wf_message_post, isVoid, isAdministrator, isWritable, isReleased, \\\n isObsoleted, isUnderModify, isAnyReleased, isDraft, getUpdTime\n\n\n# USED_STATES=[('draft','Draft'),('confirmed','Confirmed'),('released','Released'),('undermodify','UnderModify'),('obsoleted','Obsoleted')]\n# STATEFORRELEASE=['confirmed']\n# STATESRELEASABLE=['confirmed','transmitted','released','undermodify','obsoleted']\n\nclass plm_component(models.Model):\n _name = 'product.product'\n _inherit = 'product.product'\n\n create_date = fields.Datetime(_('Date Created'), readonly=True)\n write_date = fields.Datetime(_('Date Modified'), readonly=True)\n\n @property\n def _default_rev(self):\n field = self.env['product.template']._fields.get('engineering_revision', None)\n default = field.default('product.template') if not(field == None) else 0\n return default\n\n # Internal methods\n def _insertlog(self, ids, changes={}, note={}):\n ret=False\n op_type, op_note=[\"unknown\",\"\"]\n for objID in self.browse(getListIDs(ids)):\n if note:\n op_type=\"{type}\".format(type=note['type'])\n op_note=\"{reason}\".format(reason=note['reason'])\n elif changes:\n op_type='change value'\n op_note=self.env['plm.logging'].getchanges(objID, changes)\n if op_note:\n values={\n 'name': objID.name,\n 'revision': \"{major}\".format(major=objID.engineering_revision),\n 'type': self._name,\n 'op_type': op_type,\n 'op_note': op_note,\n 'op_date': datetime.now(),\n 'userid': self._uid,\n }\n objectItem=self.env['plm.logging'].create(values)\n if objectItem:\n ret=True\n return ret\n\n def _getbyrevision(self, name, revision):\n return self.search([('engineering_code', '=', name), ('engineering_revision', '=', revision)])\n\n# def _getExplodedBom(self, ids, level=0, currlevel=0):\n# \"\"\"\n# Returns a flat list of all children in a Bom ( level = 0 one level only, level = 1 all levels)\n# \"\"\"\n# result = []\n# \n# if level == 0 and currlevel > 1:\n# return result\n# components = self.browse(ids)\n# relType = self.env['mrp.bom']\n# for component in components:\n# for bomid in component.bom_ids:\n# children = relType.GetExplodedBom([bomid.id], level, currlevel)\n# result.extend(children)\n# return result\n\n def _getChildrenBom(self, component, level=0, currlevel=0):\n \"\"\"\n Returns a flat list of each child, listed once, in a Bom ( level = 0 one level only, level = 1 all levels)\n \"\"\"\n result = []\n bufferdata = []\n if level == 0 and currlevel > 1:\n return bufferdata\n for bomid in component.product_tmpl_id.bom_ids:\n for bomline in bomid.bom_line_ids:\n children=self._getChildrenBom(bomline.product_id, level, currlevel+1)\n bufferdata.extend(children)\n bufferdata.append(bomline.product_id.id)\n result.extend(bufferdata)\n return getCleanList(result)\n\n @api.model\n def RegMessage(self, request=[], default=None):\n \"\"\"\n Registers a message for requested component\n \"\"\"\n oid, message = request\n wf_message_post(self, [oid], body=message)\n return False\n\n def getUserName(self):\n \"\"\"\n Gets the user name\n \"\"\"\n userType = self.env['res.users']\n \n uiUser = userType.browse(self._uid)\n return uiUser.name\n\n def getFromTemplateID(self, oid):\n ret=False\n if oid:\n for prodItem in self.search([('product_tmpl_id', '=', oid)]):\n ret=prodItem\n break\n return ret\n\n def getTemplateItem(self, oid):\n ret=False\n if oid:\n \n for prodItem in self.browse(getListIDs(oid)):\n ret=prodItem.product_tmpl_id\n break\n return ret\n\n ## Customized Automations\n def on_change_name(self, oid, name=False, engineering_code=False):\n \n if name:\n results = self.search([('name', '=', name)])\n if len(results) > 0:\n raise UserError(_(\"Update Part Error.\\n\\nPart {} already exists.\\nClose with OK to reuse, with Cancel to discharge.\".format(name)))\n if not engineering_code:\n return {'value': {'engineering_code': name}}\n return {}\n\n ## External methods\n @api.model\n def CleanStructure(self, request=[], default=None):\n \"\"\"\n Cleans relations having sourceID (in mrp.bom.line)\n \"\"\"\n ret=False\n type = \"ebom\"\n bomLType = self.env['mrp.bom.line']\n bomType = self.env['mrp.bom']\n docType=self.env['plm.document']\n bl_to_delete = bomLType\n for parentID, sourceID in request:\n if not parentID==None:\n if isWritable(self, parentID):\n for bom_id in bomType.search([('type','=',type),('product_id','=',parentID)]):\n if not sourceID==None:\n if docType.IsCheckedOutForMe(sourceID):\n for bomLine in bomLType.search([('source_id','=',sourceID),('bom_id','=',bom_id.id)]):\n bl_to_delete |= bomLine\n bl_to_delete.unlink() # Cleans mrp.bom.lines\n if not bom_id.bom_line_ids:\n bom_id.unlink() # Cleans void mrp.bom\n ret = True\n return ret \n\n @api.model\n def Clone(self, ids=[], default=None):\n \"\"\"\n Creates a new copy of the component\n \"\"\"\n default = {}\n exitValues = {} \n \n for tmpObject in self.browse(getListIDs(ids)):\n note={\n 'type': 'clone object',\n 'reason': \"Creating new cloned entity starting from '{old}'.\".format(old=tmpObject.name),\n }\n self._insertlog(tmpObject.id, note=note)\n newID = self.copy(tmpObject.id, default)\n if newID:\n newEnt = self.browse(newID)\n exitValues = {\n '_id': newID,\n 'name': newEnt.name,\n 'engineering_code': newEnt.engineering_code,\n 'engineering_revision': newEnt.engineering_revision,\n 'engineering_writable': True,\n 'state': 'draft',\n }\n break\n return packDictionary(exitValues)\n\n @api.model\n def CloneVirtual(self, ids=[], default=None):\n \"\"\"\n Creates a \"false\" new copy of the component.\n Really returns only new values avoiding creation of new object.\n \"\"\"\n exitValues = {}\n \n for tmpObject in self.browse(getListIDs(ids)):\n new_name = \"Copy of {name}\".format(name=tmpObject.name)\n exitValues = {\n '_id': False,\n 'name': new_name,\n 'engineering_code': new_name,\n 'description': \"{desc}\".format(desc=tmpObject.description),\n 'engineering_revision': self._default_rev,\n 'engineering_writable': True,\n 'state': 'draft',\n }\n break\n return packDictionary(exitValues)\n\n @api.model\n def GetUpdated(self, vals=[], default=None):\n \"\"\"\n Gets Last/Requested revision of given items (by name, revision, update time)\n \"\"\"\n partData, attribNames = vals\n \n ids = self.GetLatestIds(partData)\n return packDictionary(self.read(getCleanList(ids), attribNames))\n\n @api.model\n def GetStdPartName(self, vals=[], default=None):\n \"\"\"\n Gets new P/N reading from entity chosen (taking it from new index on sequence).\n \"\"\"\n ret=\"\"\n entID, objectName = vals\n if entID and objectName:\n \n userType=self.env[objectName] if (objectName in self.env) else None\n if not(userType==None):\n for objID in userType.browse(getListIDs(entID)):\n ret=self.GetNewPNfromSeq(objID.sequence_id)\n break\n return ret\n\n @api.model\n def GetNewPNfromSeq(self, seqID=None, default=None):\n \"\"\"\n Gets new P/N from sequence (checks for P/N existence).\n \"\"\"\n ret=\"\"\n if seqID:\n count=0\n while ret==\"\":\n chkname=self.env['ir.sequence'].browse(seqID.id)._next()\n count+=1\n criteria=[('name', '=', chkname)]\n partIds = self.search(criteria)\n if (partIds==None) or (len(partIds)==0):\n ret=chkname\n if count>1000:\n logging.error(\"GetNewPNfromSeq : Unable to get a new P/N from sequence '{name}'.\"\\\n .format(name=seqID.name))\n break\n return ret\n\n @api.model\n def GetLatestIds(self, vals=[], default=None):\n \"\"\"\n Gets Last/Requested revision of given items (by name, revision, update time)\n \"\"\"\n ids = []\n \n for request in vals:\n partName, _, updateDate = request\n if updateDate:\n criteria=[('engineering_code', '=', partName), ('write_date', '>', updateDate)]\n else:\n criteria=[('engineering_code', '=', partName)]\n \n partIds = self.search(criteria, order='engineering_revision')\n if len(partIds) > 0:\n ids.append(partIds[len(partIds) - 1].id)\n return getCleanList(ids)\n\n @api.model\n def GetId(self, request=[], default=None):\n \"\"\"\n Gets Last/Requested revision of given items (by name, revision, update time)\n \"\"\"\n idd = False\n \n partName, partRev, _ = request\n# partName, partRev, updateDate = request\n# if updateDate:\n# if partRev:\n# criteria=[('engineering_code', '=', partName), ('engineering_revision', '=', partRev),\n# ('write_date', '>', updateDate)]\n# else:\n# criteria=[('engineering_code', '=', partName), ('write_date', '>', updateDate)]\n# else:\n# if partRev:\n# criteria=[('engineering_code', '=', partName), ('engineering_revision', '=', partRev)]\n# else:\n# criteria=[('engineering_code', '=', partName)]\n if isinstance(partRev, int):\n criteria=[('engineering_code', '=', partName), ('engineering_revision', '=', partRev)]\n else:\n criteria=[('engineering_code', '=', partName)]\n\n partIds = self.search(criteria, order='engineering_revision')\n if len(partIds) > 0:\n idd=partIds[len(partIds) - 1].id\n return idd\n\n @api.model\n def IsSaveable(self, ids=[], default=None):\n \"\"\"\n Answers about capability to save requested product\n \"\"\"\n ret=True\n \n for tmpObject in self.browse(getListIDs(ids)):\n ret=ret and tmpObject._iswritable()\n return ret\n\n @api.model\n def IsRevisable(self, ids=[], default=None):\n \"\"\"\n Gets if a product is revisable or not.\n \"\"\"\n ret=False\n \n for tmpObject in self.browse(getListIDs(ids)):\n if isAnyReleased(self, tmpObject.id):\n ret=True\n break\n return ret\n\n \n @api.model\n def NewRevision(self, ids=[], default=None):\n \"\"\"\n Creates a new revision of current product\n \"\"\"\n newID, newIndex = [ False, 0 ]\n \n thisContext={ 'internal_writing':True, 'new_revision':True, }\n for tmpObject in self.browse(getListIDs(ids)):\n latestIDs = self.GetLatestIds( [(tmpObject.engineering_code, tmpObject.engineering_revision, False)] )\n for oldObject in self.browse(latestIDs):\n if isAnyReleased(self, oldObject.id):\n note={\n 'type': 'revision process',\n 'reason': \"Creating new revision for '{old}'.\".format(old=oldObject.name),\n }\n self._insertlog(oldObject.id, note=note)\n newIndex = int(oldObject.engineering_revision) + 1\n default = {\n 'engineering_writable': False,\n 'state': 'undermodify',\n }\n oldObject.with_context(thisContext).write(default)\n default={\n 'name': oldObject.name,\n 'engineering_revision': newIndex,\n 'engineering_writable': True,\n 'state': 'draft',\n }\n \n # Creates a new \"old revision\" object\n tmpID = oldObject.with_context(thisContext).copy(default)\n if tmpID:\n wf_message_post(self, [oldObject.id], body='Created : New Revision.')\n newID = tmpID.id\n tmpID.write({'name': oldObject.name, })\n note={\n 'type': 'revision process',\n 'reason': \"Created new revision '{index}' for product '{name}'.\".format(index=newIndex,name=oldObject.name),\n }\n self._insertlog(newID, note=note)\n oldObject.with_context(thisContext)._copy_productBom(newID, [\"normal\",\"spbom\"])\n tmpID.with_context(thisContext).write( {'name': oldObject.name, } )\n note={\n 'type': 'revision process',\n 'reason': \"Copied BoM to new revision '{index}' for product '{name}'.\".format(index=newIndex,name=oldObject.name),\n }\n self._insertlog(newID, note=note)\n break\n return (newID, newIndex)\n\n @api.model\n def CheckProductsToSave(self, request=\"\", default=None):\n \"\"\"\n Checks if given products has to be saved. \n \"\"\"\n listedParts = []\n retValues = {}\n \n for part in unpackDictionary(request):\n part=getCleanBytesDictionary(part)\n hasSaved = True\n existingID=False\n order = None\n if not('engineering_code' in part):\n continue\n if part['engineering_code'] in listedParts:\n continue\n\n if ('engineering_code' in part) and ('engineering_revision' in part):\n criteria = [\n ('engineering_code', '=', part['engineering_code']),\n ('engineering_revision', '=', part['engineering_revision'])\n ]\n elif ('engineering_code' in part) and not('engineering_revision' in part):\n criteria = [\n ('engineering_code', '=', part['engineering_code'])\n ]\n order='engineering_revision'\n existingIDs = self.search( criteria, order=order )\n if existingIDs:\n ids=sorted(existingIDs.ids)\n existingID = ids[len(ids) - 1]\n if existingID:\n hasSaved = False\n objPart = self.browse(existingID)\n part['engineering_revision']=objPart.engineering_revision\n if ('_lastupdate' in part) and part['_lastupdate']:\n if (getUpdTime(objPart) < datetime.strptime(part['_lastupdate'], '%Y-%m-%d %H:%M:%S')):\n if objPart._iswritable():\n hasSaved = True\n\n retValues[part['engineering_code']]={\n 'componentID':existingID,\n 'hasSaved':hasSaved} \n listedParts.append(part['engineering_code'])\n return packDictionary(retValues)\n\n \n @api.model\n def SaveOrUpdate(self, request=[], default=None):\n \"\"\"\n Saves or Updates Parts\n \"\"\"\n listedParts = []\n retValues = {}\n modelFields=self.env['plm.config.settings'].GetFieldsModel(self._name)\n \n for part in unpackDictionary(request):\n part=getCleanBytesDictionary(part)\n hasSaved = False\n existingID=False\n order=None\n \n if not ('engineering_code' in part) or (not 'engineering_revision' in part):\n part['componentID'] = False\n part['hasSaved'] = hasSaved\n continue\n\n if not ('name' in part) and (('engineering_code' in part) and part['engineering_code']):\n part['name'] = part['engineering_code'] \n\n if (('name' in part) and not(part['name'])) and (('engineering_code' in part) and part['engineering_code']):\n part['name'] = part['engineering_code'] \n \n if part['engineering_code'] in listedParts:\n continue\n\n if not('componentID' in part) or not(part['componentID']):\n if ('engineering_code' in part) and ('engineering_revision' in part):\n criteria = [\n ('engineering_code', '=', part['engineering_code']),\n ('engineering_revision', '=', part['engineering_revision'])\n ]\n elif ('engineering_code' in part) and not('engineering_revision' in part):\n criteria = [\n ('engineering_code', '=', part['engineering_code']) \n ]\n order = 'engineering_revision'\n existingIDs = self.search( criteria, order=order)\n if existingIDs:\n ids=sorted(existingIDs.ids)\n existingID = ids[len(ids) - 1]\n else:\n existingID=part['componentID']\n \n lastupdate=datetime.strptime(str(part['_lastupdate']),'%Y-%m-%d %H:%M:%S') if ('_lastupdate' in part) else datetime.now()\n for fieldName in list(set(part.keys()).difference(set(modelFields))):\n del (part[fieldName])\n if not existingID:\n logging.debug(\"[SaveOrUpdate] Part {name} is creating.\".format(name=part['engineering_code']))\n objectItem=self.with_context({'internal_writing':True}).create(part)\n if objectItem:\n existingID=objectItem.id\n hasSaved = True\n else:\n objPart = self.browse(existingID)\n if objPart:\n part['name'] = objPart.name\n part['engineering_revision']=objPart.engineering_revision\n if (getUpdTime(objPart) < lastupdate):\n if objPart._iswritable():\n logging.debug(\"[SaveOrUpdate] Part {name}/{revi} is updating.\".format(name=part['engineering_code'],revi=part['engineering_revision']))\n hasSaved = True\n if not objPart.with_context({'internal_writing':False}).write(part):\n logging.error(\"[SaveOrUpdate] Part {name}/{revi} cannot be updated.\".format(name=part['engineering_code'],revi=part['engineering_revision']))\n hasSaved = False\n else:\n logging.error(\"[SaveOrUpdate] Part {name}/{revi} doesn't exist anymore.\".format(name=part['engineering_code'],revi=part['engineering_revision']))\n\n retValues[part['engineering_code']]={\n 'componentID':existingID,\n 'hasSaved':hasSaved} \n listedParts.append(part['engineering_code'])\n return packDictionary(retValues)\n\n @api.model\n def QueryLast(self, request=([], []), default=None):\n \"\"\"\n Queries to return values based on columns selected.\n \"\"\"\n objId = False\n expData = []\n \n queryFilter, columns = request\n if len(columns) < 1:\n return expData\n if 'engineering_revision' in queryFilter:\n del queryFilter['engineering_revision']\n allIDs = self.search(queryFilter, order='engineering_revision')\n if len(allIDs) > 0:\n objId = allIDs[len(allIDs) - 1]\n if objId:\n tmpData = objId.export_data(columns)\n if 'datas' in tmpData:\n expData = tmpData['datas']\n return expData\n\n ## Menu action Methods\n def _create_normalBom(self, idd, processedIds=[]):\n \"\"\"\n Creates a new Normal Bom (recursive on all EBom children)\n \"\"\"\n default = {}\n \n if idd in processedIds:\n return False\n checkObj=self.browse(idd)\n if not checkObj:\n return False\n bomType = self.env['mrp.bom']\n objBoms = bomType.search([('product_tmpl_id', '=', checkObj.product_tmpl_id.id), ('type', '=', 'normal'), ('active', '=', True)])\n idBoms = bomType.search([('product_tmpl_id', '=', checkObj.product_tmpl_id.id), ('type', '=', 'ebom'), ('active', '=', True)])\n\n if not objBoms:\n if idBoms:\n default={'product_tmpl_id': idBoms[0].product_tmpl_id.id,\n 'type': 'normal', 'active': True, }\n if idBoms[0].product_id:\n default.update({'product_id': idBoms[0].product_id.id})\n processedIds.append(idd)\n newidBom = idBoms[0].with_context({'internal_writing':True}).copy(default)\n if newidBom:\n newidBom.with_context({'internal_writing':True}).write(default)\n ok_rows = self._summarizeBom(newidBom.bom_line_ids)\n for bom_line in list(set(newidBom.bom_line_ids) ^ set(ok_rows)):\n bom_line.unlink()\n for bom_line in ok_rows:\n bom_line.with_context({'internal_writing':True}).write(\n { 'type': 'normal', 'source_id': False, \n 'product_qty': bom_line.product_qty, } )\n self._create_normalBom(bom_line.product_id.id, processedIds=processedIds)\n else:\n for bom_line in bomType.browse(objBoms[0].id).bom_line_ids:\n self._create_normalBom(bom_line.product_id.id, processedIds=processedIds)\n return False\n\n def _copy_productBom(self, idStart, idDest=None, bomTypes=[\"normal\"]):\n \"\"\"\n Creates a new 'bomType' BoM (arrested at first level BoM children).\n \"\"\"\n default = {}\n if not idDest:\n idDest=idStart\n \n checkObjDest = self.browse(idDest)\n if checkObjDest:\n objBomType = self.env['mrp.bom']\n for bomType in bomTypes:\n objBoms = objBomType.search([('product_id', '=', idDest), ('type', '=', bomType), ('active', '=', True)])\n idBoms = objBomType.search([('product_id', '=', idStart), ('type', '=', bomType), ('active', '=', True)])\n if not objBoms:\n for oldObj in idBoms:\n newidBom = oldObj.with_context({'internal_writing':True}).copy(default)\n if newidBom:\n newidBom.with_context({'internal_writing':True}).write( \n {'name': checkObjDest.name, \n 'product_tmpl_id': checkObjDest.product_tmpl_id.id, \n 'type': bomType, 'active': True, })\n ok_rows = self._summarizeBom(newidBom.bom_line_ids)\n for bom_line in list(set(newidBom.bom_line_ids) ^ set(ok_rows)):\n bom_line.unlink()\n for bom_line in ok_rows:\n bom_line.with_context({'internal_writing':True}).write(\n {'type': bomType, 'source_id': False, \n 'name': bom_line.product_id.name,\n 'product_qty': bom_line.product_qty, })\n return False\n\n def _summarizeBom(self, datarows):\n dic = {}\n for datarow in datarows:\n key = datarow.product_id.name\n if key in dic:\n dic[key].product_qty = float(dic[key].product_qty) + float(datarow.product_qty)\n else:\n dic[key] = datarow\n retd = dic.values()\n return retd\n\n ## Work Flow Internal Methods\n def _get_recursive_parts(self, ids, excludeStatuses, includeStatuses, release=False):\n \"\"\"\n Gets all ids related to current one as children\n \"\"\"\n stopFlag = False\n tobeReleasedIDs = getListIDs(ids)\n options=self.env['plm.config.settings'].GetOptions()\n children = []\n for oic in self.browse(ids):\n children = self.browse(self._getChildrenBom(oic, 1))\n for child in children:\n if ((not child.state in excludeStatuses) and (not child.state in includeStatuses)) \\\n and (release and not(options.get('opt_obsoletedinbom', False))):\n logging.warning(\"Part (%r - %d) is in a status '%s' not allowed.\"\n %(child.engineering_code, child.engineering_revision, child.state))\n stopFlag = True\n continue\n if child.state in includeStatuses:\n if not child.id in tobeReleasedIDs:\n tobeReleasedIDs.append(child.id)\n return (stopFlag, getCleanList(tobeReleasedIDs))\n\n def create_normalBom_WF(self, ids):\n \"\"\"\n Creates a new Normal Bom if doesn't exist (action callable from code)\n \"\"\"\n for idd in ids:\n processedIds = []\n self._create_normalBom(idd, processedIds=processedIds)\n wf_message_post(self, ids, body='Created Normal Bom.')\n return False\n\n def _action_ondocuments(self, ids, action, status):\n \"\"\"\n Moves workflow on documents having the same state of component \n \"\"\"\n docIDs = []\n# documents=[]\n documentType = self.env['plm.document']\n check=self._context.get('no_move_documents', False)\n if not check:\n for oldObject in self.browse(ids):\n for document in oldObject.linkeddocuments:\n if (document.id not in docIDs):\n if documentType.ischecked_in(document.id):\n docIDs.append(document.id)\n idMoves=move_workflow(documentType, docIDs, action, status)\n documentType.logging_workflow(idMoves, action, status)\n return docIDs\n\n @api.model\n def _iswritable(self):\n if self:\n checkState = ('draft')\n if not self.engineering_writable:\n logging.warning(\n \"_iswritable : Part (%r - %d) is not writable.\" % (self.engineering_code, self.engineering_revision))\n return False\n if not self.state in checkState:\n logging.warning(\"_iswritable : Part (%r - %d) is in status %r.\" % (self.engineering_code, self.engineering_revision, self.state))\n return False\n if self.engineering_code == False:\n logging.warning(\n \"_iswritable : Part (%r - %d) is without Engineering P/N.\" % (self.name, self.engineering_revision))\n return False\n return True\n\n @api.model\n def ActionUpload(self, request=[], default=None):\n \"\"\"\n Action to be executed after automatic upload\n \"\"\"\n signal='upload'\n move_workflow(self, self._ids, signal)\n return False\n\n def action_upload(self):\n \"\"\"\n Action to be executed for Uploaded state\n \"\"\"\n options=self.env['plm.config.settings'].GetOptions()\n status = 'uploaded'\n action = 'upload'\n default = {\n 'state': status,\n 'engineering_writable': False,\n }\n doc_default = {\n 'state': status,\n 'writable': False,\n }\n operationParams = {\n 'status': status,\n 'statusName': _('Uploaded'),\n 'action': action,\n 'docaction': 'uploaddoc',\n 'excludeStatuses': ['uploaded', 'confirmed', 'transmitted','released', 'undermodify', 'obsoleted'],\n 'includeStatuses': ['draft'],\n 'default': default,\n 'doc_default': doc_default,\n }\n if options.get('opt_showWFanalysis', False):\n return self.action_check_workflow(operationParams)\n else:\n ids=self._ids\n self.logging_workflow(ids, action, status)\n return self._action_to_perform(ids, operationParams, default)\n\n def action_draft(self):\n \"\"\"\n Action to be executed for Draft state\n \"\"\"\n options=self.env['plm.config.settings'].GetOptions()\n status = 'draft'\n action = 'draft'\n default = {\n 'state': status,\n 'engineering_writable': True,\n }\n doc_default = {\n 'state': status,\n 'writable': True,\n }\n operationParams = {\n 'status': status,\n 'statusName': _('Draft'),\n 'action': action,\n 'docaction': 'draft',\n 'excludeStatuses': ['draft', 'released', 'undermodify', 'obsoleted'],\n 'includeStatuses': ['confirmed', 'uploaded', 'transmitted'],\n 'default': default,\n 'doc_default': doc_default,\n }\n if options.get('opt_showWFanalysis', False):\n return self.action_check_workflow(operationParams)\n else:\n ids=self._ids\n self.logging_workflow(ids, action, status)\n return self._action_to_perform(ids, operationParams, default)\n\n def action_confirm(self):\n \"\"\"\n Action to be executed for Confirmed state\n \"\"\"\n options=self.env['plm.config.settings'].GetOptions()\n status = 'confirmed'\n action = 'confirm'\n default = {\n 'state': status,\n 'engineering_writable': False,\n }\n doc_default = {\n 'state': status,\n 'writable': False,\n }\n operationParams = {\n 'status': status,\n 'statusName': _('Confirmed'),\n 'action': action,\n 'docaction': 'confirm',\n 'excludeStatuses': ['confirmed', 'transmitted', 'released', 'undermodify', 'obsoleted'],\n 'includeStatuses': ['draft'],\n 'default': default,\n 'doc_default': doc_default,\n }\n if options.get('opt_showWFanalysis', False):\n return self.action_check_workflow(operationParams)\n else:\n ids=self._ids\n self.logging_workflow(ids, action, status)\n return self._action_to_perform(ids, operationParams, default)\n\n def action_correct(self):\n \"\"\"\n Action to be executed for Draft state (signal \"correct\")\n \"\"\"\n options=self.env['plm.config.settings'].GetOptions()\n status='draft'\n action = 'correct'\n default = {\n 'state': status,\n 'engineering_writable': True,\n }\n doc_default = {\n 'state': status,\n 'writable': True,\n }\n operationParams = {\n 'status': status,\n 'statusName': _('Draft'),\n 'action': action,\n 'docaction': 'correct',\n 'excludeStatuses': ['draft', 'transmitted', 'released', 'undermodify', 'obsoleted'],\n 'includeStatuses': ['confirmed'],\n 'default': default,\n 'doc_default': doc_default,\n }\n if options.get('opt_showWFanalysis', False):\n return self.action_check_workflow(operationParams)\n else:\n ids=self._ids\n self.logging_workflow(ids, action, status)\n return self._action_to_perform(ids, operationParams, default)\n\n def action_release(self):\n options=self.env['plm.config.settings'].GetOptions()\n status='released'\n action = 'release'\n default = {\n 'state': status,\n 'engineering_writable': False,\n }\n doc_default = {\n 'state': status,\n 'writable': False,\n }\n excludeStatuses = ['released', 'undermodify', 'obsoleted']\n includeStatuses = ['confirmed']\n operationParams = {\n 'status': status,\n 'statusName': _('Released'),\n 'action': action,\n 'docaction': 'release',\n 'excludeStatuses': excludeStatuses,\n 'includeStatuses': includeStatuses,\n 'default': default,\n 'doc_default': doc_default,\n }\n if options.get('opt_showWFanalysis', False):\n return self.action_check_workflow(operationParams)\n else:\n return self._action_to_release(self._ids, excludeStatuses, includeStatuses)\n\n def action_obsolete(self):\n \"\"\"\n Action to be executed for Obsoleted state\n \"\"\"\n options=self.env['plm.config.settings'].GetOptions()\n status = 'obsoleted'\n action = 'obsolete'\n default={\n 'engineering_writable': False,\n 'state': status,\n }\n doc_default = {\n 'state': status,\n 'writable': False,\n }\n operationParams = {\n 'status': status,\n 'statusName': _('Obsoleted'),\n 'action': action,\n 'docaction': 'obsolete',\n 'excludeStatuses': ['draft', 'confirmed', 'transmitted', 'obsoleted'],\n 'includeStatuses': ['undermodify', 'released'],\n 'default': default,\n 'doc_default': doc_default,\n }\n if options.get('opt_showWFanalysis', False):\n return self.action_check_workflow(operationParams)\n else:\n return self._action_to_perform(self._ids, operationParams, default)\n\n def action_reactivate(self):\n \"\"\"\n action to be executed for Released state (signal \"reactivate\")\n \"\"\"\n options=self.env['plm.config.settings'].GetOptions()\n status = 'released'\n action = 'reactivate'\n default={\n 'engineering_writable': False,\n 'state': status,\n }\n doc_default = {\n 'state': status,\n 'writable': False,\n }\n operationParams = {\n 'status': status,\n 'statusName': _('Released'),\n 'action': action,\n 'docaction': 'reactivate',\n 'excludeStatuses': ['draft', 'confirmed', 'transmitted', 'released'],\n 'includeStatuses': ['undermodify', 'obsoleted'],\n 'default': default,\n 'doc_default': doc_default,\n }\n if options.get('opt_showWFanalysis', False):\n return self.action_check_workflow(operationParams)\n else:\n return self._action_to_perform(self._ids, operationParams, default)\n\n def logging_workflow(self, ids, action, status):\n note={\n 'type': 'workflow movement',\n 'reason': \"Applying workflow action '{action}', moving to status '{status}.\".format(action=action, status=status),\n }\n self._insertlog(ids, note=note)\n\n def _action_to_perform(self, ids, operationParams , default={}):\n \"\"\"\n Executes on cascade to children products the required workflow operations.\n \"\"\"\n full_ids=[]\n status=operationParams['status'] \n action=operationParams['action']\n docaction=operationParams['docaction']\n excludeStatuses=operationParams['excludeStatuses']\n includeStatuses=operationParams['includeStatuses']\n \n stopFlag,allIDs=self._get_recursive_parts(ids, excludeStatuses, includeStatuses)\n self._action_ondocuments(allIDs,docaction, status)\n if action:\n idMoves=move_workflow(self, allIDs, action, status)\n self.logging_workflow(idMoves, action, status)\n objId=self.browse(allIDs).with_context({'internal_writing':True}).write(default)\n if objId:\n wf_message_post(self, allIDs, body='Status moved to: {status}.'.format(status=status))\n return objId\n\n def _action_to_release(self, ids, excludeStatuses, includeStatuses):\n \"\"\"\n Action to be executed for Released state\n \"\"\"\n full_ids = []\n last_ids=[]\n status='released'\n action='release'\n default={\n 'engineering_writable': False,\n 'state': status\n }\n \n stopFlag, allIDs = self._get_recursive_parts(ids, excludeStatuses, includeStatuses, release=True)\n if len(allIDs) < 1 or stopFlag:\n raise UserError(_(\"WorkFlow Error.\\n\\nOne or more parts cannot be released.\"))\n allProdObjs = self.browse(allIDs)\n for oldObject in allProdObjs:\n objObsolete=self._getbyrevision(oldObject.engineering_code, oldObject.engineering_revision - 1)\n if objObsolete and objObsolete.id:\n last_ids.append(objObsolete.id)\n \n idMoves=move_workflow(self, last_ids, 'obsolete', 'obsoleted')\n self.logging_workflow(idMoves, 'obsolete', 'obsoleted')\n self._action_ondocuments(last_ids, 'obsolete', 'obsoleted')\n\n self._action_ondocuments(allIDs, action, status)\n for currId in allProdObjs:\n if not (currId.id in ids):\n full_ids.append(currId.id)\n\n idMoves=move_workflow(self, allIDs, action, status)\n self.logging_workflow(idMoves, action, status)\n objId=self.browse(idMoves).with_context({'internal_writing':True}).write(default)\n if objId and idMoves:\n wf_message_post(self, allIDs, body='Status moved to: {status}.'.format(status=status))\n return objId\n\n #######################################################################################################################################33\n\n # Overridden methods for this entity\n\n @api.model\n def create(self, vals):\n ret=False\n if vals and vals.get('name', False):\n existingIDs = self.search([('name', '=', vals['name'])],\n order='engineering_revision')\n if (vals.get('engineering_code', False)==False) or (vals['engineering_code'] == ''):\n vals['engineering_code'] = vals['name']\n major = vals.get('engineering_revision', None)\n major= self._default_rev if isVoid(major) else major\n vals['engineering_revision'] = major\n\n if existingIDs:\n existingID = existingIDs[len(existingIDs) - 1]\n if ('engineering_revision' in vals):\n existObj = existingID\n if existObj:\n if (vals['engineering_revision'] > existObj.engineering_revision):\n vals['name'] = existObj.name\n else:\n return existingID\n else:\n return existingID\n\n try:\n objectItem=super(plm_component, self).create(vals)\n if objectItem:\n ret=objectItem # Returns the objectItem instead the id to be coherent\n values={\n 'name': objectItem.name,\n 'revision': objectItem.engineering_revision,\n 'type': self._name,\n 'op_type': 'creation',\n 'op_note': 'Create new entity on database',\n 'op_date': datetime.now(),\n 'userid': self._uid,\n }\n self.env['plm.logging'].create(values)\n except Exception as ex:\n raise Exception(\" (%r). It has tried to create with values : (%r).\" % (ex, vals))\n elif not(self.env.context.get('create_from_tmpl') == None):\n objectItem=super(plm_component, self).create(vals)\n if objectItem:\n ret=objectItem # Returns the objectItem instead the id to be coherent\n values={\n 'name': objectItem.name,\n 'revision': objectItem.engineering_revision,\n 'type': self._name,\n 'op_type': 'creation',\n 'op_note': 'Create new entity on database',\n 'op_date': datetime.now(),\n 'userid': self._uid,\n }\n self.env['plm.logging'].create(values)\n return ret\n\n def write(self, vals):\n ret=True\n if vals:\n if not isAdministrator(self):\n check=self._context.get('internal_writing', False)\n thisprocess=self._context.get('internal_process', False) # Avoids messages during internal processes.\n if not check:\n for prodItem in self.browse(self._ids):\n if not isDraft(self,prodItem.id):\n if not thisprocess:\n logging.error(\"The entity '{name}-{rev}' is in a status that does not allow you to make save action\".format(name=prodItem.name,rev=prodItem.engineering_revision))\n ret=False\n break\n if not prodItem.engineering_writable:\n if not thisprocess:\n logging.error(\"The entity '{name}-{rev}' cannot be written.\".format(name=prodItem.name,rev=prodItem.engineering_revision))\n ret=False\n break\n if ret:\n self._insertlog(self._ids, changes=vals)\n ret=super(plm_component, self).write(vals)\n return ret\n \n def copy(self, default={}):\n newID=False\n override=False\n previous_name=False\n oid=self.id\n \n if not self._context.get('new_revision', False):\n previous_name = self.browse(oid).name\n new_name=default.get('name', 'Copy of %s'%previous_name)\n if 'name' in default:\n tmpIds = self.search([('name', 'like', new_name)])\n if len(tmpIds) > 0:\n new_name = '%s (%s)' % (new_name, len(tmpIds) + 1)\n default.update({\n 'name': new_name,\n 'engineering_code': new_name,\n 'engineering_revision': self._default_rev,\n })\n override=True\n \n default.update({\n 'state': 'draft',\n 'engineering_writable': True,\n 'write_date': None,\n 'linkeddocuments': []\n })\n \n note={\n 'type': 'copy object',\n 'reason': \"Previous name was '{old} new one is '{new}'.\".format(old=previous_name,new=new_name),\n }\n self._insertlog(oid, note=note)\n\n tmpID=super(plm_component, self.browse(oid).with_context({'internal_writing':True})).copy(default)\n if tmpID!=None:\n newID=tmpID\n if override:\n values={\n 'name': new_name,\n 'engineering_code': new_name,\n 'engineering_revision': self._default_rev,\n 'linkeddocuments': []\n }\n newID.write(values)\n else:\n tmpID=super(plm_component, self.browse(oid).with_context({'internal_writing':True})).copy(default)\n if tmpID:\n newID=tmpID\n default.update({\n 'linkeddocuments': []\n })\n newID.with_context({'internal_writing':True}).write(default) \n if newID and previous_name:\n wf_message_post(self, getListIDs(newID), body='Copied starting from : {value}.'.format(value=previous_name))\n return newID\n\n def unlink(self):\n ret=False\n ids=self._ids\n \n values = {'state': 'released', }\n isAdmin = isAdministrator(self)\n\n if not self.env['mrp.bom'].IsChild(ids):\n for checkObj in self.browse(ids):\n checkApply=False\n if isReleased(self, checkObj.id):\n if isAdmin:\n checkApply=True\n elif isDraft(self, checkObj.id):\n checkApply=True\n\n if not checkApply:\n continue # Apply unlink only if have respected rules.\n \n existingIDs = self.with_context({'no_move_documents':True}).search([\n ('engineering_code', '=', checkObj.engineering_code),\n ('engineering_revision', '=', checkObj.engineering_revision - 1)])\n if len(existingIDs) > 0:\n obsoletedIds=[]\n undermodifyIds=[]\n for existID in getListIDs(existingIDs):\n if isObsoleted(self, existID.id):\n obsoletedIds.append(existID.id)\n elif isUnderModify(self, existID.id):\n undermodifyIds.append(existID.id)\n move_workflow (self, obsoletedIds, 'reactivate', 'released')\n if undermodifyIds:\n move_workflow (self, undermodifyIds, 'reactivate', 'released')\n\n note={\n 'type': 'unlink object',\n 'reason': \"Removed entity from database.\",\n }\n self._insertlog(checkObj.id, note=note)\n item = super(plm_component, checkObj.with_context({'no_move_documents':False})).unlink()\n if item:\n ret=ret | item\n return ret\n\n# Overridden methods for this entity\n",
"step-ids": [
20,
34,
38,
44,
49
]
}
|
[
20,
34,
38,
44,
49
] |
<|reserved_special_token_0|>
class SimpleControllerHandlerAdapter(HandlerAdapter):
def supports(self, handler: object) ->bool:
return isinstance(handler, Controller)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class SimpleControllerHandlerAdapter(HandlerAdapter):
def supports(self, handler: object) ->bool:
return isinstance(handler, Controller)
def handle(self, request: HttpServletRequest, response:
HttpServletResponse, handler: object) ->ModelAndView:
handler: Controller = handler
return handler.handle_request(request, response)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class SimpleControllerHandlerAdapter(HandlerAdapter):
def supports(self, handler: object) ->bool:
return isinstance(handler, Controller)
def handle(self, request: HttpServletRequest, response:
HttpServletResponse, handler: object) ->ModelAndView:
handler: Controller = handler
return handler.handle_request(request, response)
def get_last_modified(self, request: HttpServletRequest, handler: object
) ->int:
if isinstance(handler, LastModified):
handler: Controller = handler
return handler.get_last_modified(request)
return -1
<|reserved_special_token_1|>
from springframework.web.servlet import ModelAndView
from springframework.web.servlet.HandlerAdapter import HandlerAdapter
from springframework.web.servlet.mvc.Controller import Controller
from springframework.web.servlet.mvc.LastModified import LastModified
from springframework.utils.mock.inst import HttpServletResponse, HttpServletRequest
class SimpleControllerHandlerAdapter(HandlerAdapter):
def supports(self, handler: object) ->bool:
return isinstance(handler, Controller)
def handle(self, request: HttpServletRequest, response:
HttpServletResponse, handler: object) ->ModelAndView:
handler: Controller = handler
return handler.handle_request(request, response)
def get_last_modified(self, request: HttpServletRequest, handler: object
) ->int:
if isinstance(handler, LastModified):
handler: Controller = handler
return handler.get_last_modified(request)
return -1
<|reserved_special_token_1|>
from springframework.web.servlet import ModelAndView
from springframework.web.servlet.HandlerAdapter import HandlerAdapter
from springframework.web.servlet.mvc.Controller import Controller
from springframework.web.servlet.mvc.LastModified import LastModified
from springframework.utils.mock.inst import (
HttpServletResponse,
HttpServletRequest,
)
class SimpleControllerHandlerAdapter(HandlerAdapter):
def supports(self, handler: object) -> bool:
return isinstance(handler, Controller)
def handle(
self,
request: HttpServletRequest,
response: HttpServletResponse,
handler: object,
) -> ModelAndView:
handler: Controller = handler
return handler.handle_request(request, response)
def get_last_modified(
self, request: HttpServletRequest, handler: object
) -> int:
if isinstance(handler, LastModified):
handler: Controller = handler
return handler.get_last_modified(request)
return -1
|
flexible
|
{
"blob_id": "71e7a209f928672dbf59054b120eed6a77522dde",
"index": 6246,
"step-1": "<mask token>\n\n\nclass SimpleControllerHandlerAdapter(HandlerAdapter):\n\n def supports(self, handler: object) ->bool:\n return isinstance(handler, Controller)\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass SimpleControllerHandlerAdapter(HandlerAdapter):\n\n def supports(self, handler: object) ->bool:\n return isinstance(handler, Controller)\n\n def handle(self, request: HttpServletRequest, response:\n HttpServletResponse, handler: object) ->ModelAndView:\n handler: Controller = handler\n return handler.handle_request(request, response)\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass SimpleControllerHandlerAdapter(HandlerAdapter):\n\n def supports(self, handler: object) ->bool:\n return isinstance(handler, Controller)\n\n def handle(self, request: HttpServletRequest, response:\n HttpServletResponse, handler: object) ->ModelAndView:\n handler: Controller = handler\n return handler.handle_request(request, response)\n\n def get_last_modified(self, request: HttpServletRequest, handler: object\n ) ->int:\n if isinstance(handler, LastModified):\n handler: Controller = handler\n return handler.get_last_modified(request)\n return -1\n",
"step-4": "from springframework.web.servlet import ModelAndView\nfrom springframework.web.servlet.HandlerAdapter import HandlerAdapter\nfrom springframework.web.servlet.mvc.Controller import Controller\nfrom springframework.web.servlet.mvc.LastModified import LastModified\nfrom springframework.utils.mock.inst import HttpServletResponse, HttpServletRequest\n\n\nclass SimpleControllerHandlerAdapter(HandlerAdapter):\n\n def supports(self, handler: object) ->bool:\n return isinstance(handler, Controller)\n\n def handle(self, request: HttpServletRequest, response:\n HttpServletResponse, handler: object) ->ModelAndView:\n handler: Controller = handler\n return handler.handle_request(request, response)\n\n def get_last_modified(self, request: HttpServletRequest, handler: object\n ) ->int:\n if isinstance(handler, LastModified):\n handler: Controller = handler\n return handler.get_last_modified(request)\n return -1\n",
"step-5": "from springframework.web.servlet import ModelAndView\nfrom springframework.web.servlet.HandlerAdapter import HandlerAdapter\nfrom springframework.web.servlet.mvc.Controller import Controller\nfrom springframework.web.servlet.mvc.LastModified import LastModified\nfrom springframework.utils.mock.inst import (\n HttpServletResponse,\n HttpServletRequest,\n)\n\n\nclass SimpleControllerHandlerAdapter(HandlerAdapter):\n def supports(self, handler: object) -> bool:\n return isinstance(handler, Controller)\n\n def handle(\n self,\n request: HttpServletRequest,\n response: HttpServletResponse,\n handler: object,\n ) -> ModelAndView:\n handler: Controller = handler\n return handler.handle_request(request, response)\n\n def get_last_modified(\n self, request: HttpServletRequest, handler: object\n ) -> int:\n if isinstance(handler, LastModified):\n handler: Controller = handler\n return handler.get_last_modified(request)\n return -1\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
___author__ = 'acmASCIS'
'''
by ahani at {9/24/2016}
'''
import time
class Freq(object):
def __init__(self, array):
self.__array = array
self.__frequency_dict = {}
self.__array_length = len(array)
self.__running_time = round(time.time() * 1000)
def get_original_array(self):
return self.__array
def get_array_length(self):
return self.__array_length
def get_frequency_array(self):
if self.__frequency_dict is None:
raise Exception("The frequency array is empty, check your function implementation!")
return self.__frequency_dict
def get_running_time(self):
return self.__running_time
def get_frequency(self):
"""
Implement your elements frequency algorithm
:return: (dictionary) that contains key: element in array, value: frequency. Note that your dictionary should be sorted by key!
"""
#TODO
self.__running_time = round(time.time() * 1000) - self.__running_time
return self.__frequency_dict
|
normal
|
{
"blob_id": "b569f0a0dda048d6337e1028a240caabf188a174",
"index": 9420,
"step-1": "<mask token>\n\n\nclass Freq(object):\n\n def __init__(self, array):\n self.__array = array\n self.__frequency_dict = {}\n self.__array_length = len(array)\n self.__running_time = round(time.time() * 1000)\n\n def get_original_array(self):\n return self.__array\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Freq(object):\n\n def __init__(self, array):\n self.__array = array\n self.__frequency_dict = {}\n self.__array_length = len(array)\n self.__running_time = round(time.time() * 1000)\n\n def get_original_array(self):\n return self.__array\n\n def get_array_length(self):\n return self.__array_length\n\n def get_frequency_array(self):\n if self.__frequency_dict is None:\n raise Exception(\n 'The frequency array is empty, check your function implementation!'\n )\n return self.__frequency_dict\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Freq(object):\n\n def __init__(self, array):\n self.__array = array\n self.__frequency_dict = {}\n self.__array_length = len(array)\n self.__running_time = round(time.time() * 1000)\n\n def get_original_array(self):\n return self.__array\n\n def get_array_length(self):\n return self.__array_length\n\n def get_frequency_array(self):\n if self.__frequency_dict is None:\n raise Exception(\n 'The frequency array is empty, check your function implementation!'\n )\n return self.__frequency_dict\n\n def get_running_time(self):\n return self.__running_time\n <mask token>\n",
"step-4": "<mask token>\n\n\nclass Freq(object):\n\n def __init__(self, array):\n self.__array = array\n self.__frequency_dict = {}\n self.__array_length = len(array)\n self.__running_time = round(time.time() * 1000)\n\n def get_original_array(self):\n return self.__array\n\n def get_array_length(self):\n return self.__array_length\n\n def get_frequency_array(self):\n if self.__frequency_dict is None:\n raise Exception(\n 'The frequency array is empty, check your function implementation!'\n )\n return self.__frequency_dict\n\n def get_running_time(self):\n return self.__running_time\n\n def get_frequency(self):\n \"\"\"\n Implement your elements frequency algorithm\n :return: (dictionary) that contains key: element in array, value: frequency. Note that your dictionary should be sorted by key!\n \"\"\"\n self.__running_time = round(time.time() * 1000) - self.__running_time\n return self.__frequency_dict\n",
"step-5": "___author__ = 'acmASCIS'\n\n'''\n by ahani at {9/24/2016}\n'''\n\nimport time\n\n\nclass Freq(object):\n def __init__(self, array):\n self.__array = array\n self.__frequency_dict = {}\n self.__array_length = len(array)\n self.__running_time = round(time.time() * 1000)\n\n def get_original_array(self):\n return self.__array\n\n def get_array_length(self):\n return self.__array_length\n\n def get_frequency_array(self):\n if self.__frequency_dict is None:\n raise Exception(\"The frequency array is empty, check your function implementation!\")\n\n return self.__frequency_dict\n\n def get_running_time(self):\n return self.__running_time\n\n def get_frequency(self):\n \"\"\"\n Implement your elements frequency algorithm\n :return: (dictionary) that contains key: element in array, value: frequency. Note that your dictionary should be sorted by key!\n \"\"\"\n\n #TODO\n\n\n self.__running_time = round(time.time() * 1000) - self.__running_time\n\n return self.__frequency_dict\n",
"step-ids": [
3,
5,
6,
7,
10
]
}
|
[
3,
5,
6,
7,
10
] |
#!/usr/bin/env python
"""\
Simple g-code streaming script for grbl
"""
import serial
import time
import csv
import json
import RPi.GPIO as GPIO
from multiprocessing import Process, Queue
class motion():
def __init__(self):
# Open grbl serial port
#self.s = serial.Serial("/dev/ttyUSB0",baudrate=115200,xonxoff=True,timeout=1)
self.s = serial.Serial("/dev/ttyUSB0",
baudrate=115200,
timeout=0.1,
rtscts=True,
xonxoff=False)
self.rsp=''
self.posx=0.0
self.posy=0.0
self.positions_file = '/home/pi/Work/Wall2.0/system/positions.csv'
self.home_position_file = '/home/pi/Work/Wall2.0/system/home.csv'
self.mode = 'delay'
self.sensor_pin = 3
self.interval = 1
GPIO.setmode(GPIO.BOARD)
# GPIO.setup(self.sensor_pin, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(self.sensor_pin, GPIO.IN)
# Wake up grbl
self.s.write("\r\n\r\n")
time.sleep(2) # Wait for grbl to initialize
self.s.flushInput() # Flush startup text in serial input
self.feedrate = 100
self.update_feedrate(0)
with open(self.positions_file,'w') as f:
f.write('posx,posy\n')
self.homex=None
self.homey=None
with open(self.home_position_file,'r') as f:
lines = csv.DictReader(f)
for l in lines:
print 'x_home: '+l['homex']
print 'y_home: '+l['homey']
self.homex = float(l['homex'])
self.homey = float(l['homey'])
# set origin offset
#self.send("g92 x0 y0")
self.set_relative_position()
self.pos_queue = Queue()
self.serial_proc = Process(target=self.get_response,
args=(self.pos_queue,))
self.serial_proc.start()
def update_feedrate(self, feedrate):
tmp = self.feedrate + feedrate
if(tmp >= 100) and (tmp <= 800):
self.feedrate = tmp
# feedrate speed
self.send("f"+str(self.feedrate))
def update_interval(self, interval):
if(self.interval >= 1) and (self.interval <= 10):
self.interval += interval
def send(self, cmd):
print 'Sending: ' + cmd
self.s.write(cmd + '\n') # Send g-code block to grbl
def move(self,sign_x, sign_y):
x = "x"+str(sign_x*10)
y = "y"+str(sign_y*10)
#self.send("%")
self.send(" ".join(["g1",x,y]))
def move_to_position(self,x,y):
x = "x"+str(x)
y = "y"+str(y)
self.send(" ".join(["g1",x,y]))
def stop(self):
self.send("!")
self.send("%")
if (self.homex!=None) and (self.homey!=None):
time.sleep(0.5)
self.set_absolute_position()
self.update_current_position()
self.move_to_position(self.homex,self.homey)
self.set_relative_position()
def disconnect(self):
# Close file and serial port
self.s.close()
def get_response(self, q):
while(1):
tmp = self.s.readline()
tmp = tmp.strip()
if tmp is not '':
try:
tmp = json.loads(tmp)
print tmp
if 'r' in tmp.keys():
if 'sr' in tmp['r'].keys():
tmp = tmp['r']
if 'sr' in tmp.keys():
if 'posx' in tmp['sr'].keys():
self.posx=tmp['sr']['posx']
if 'posy' in tmp['sr'].keys():
self.posy=tmp['sr']['posy']
q.put((self.posx, self.posy))
print 'pos1: '+str((self.posx, self.posy))
except ValueError:
print "get_response chocked"
self.stop()
time.sleep(1)
else:
time.sleep(.2)
def record_current_position(self):
self.send('{"sr":null}')
print "Saving"
# TODO: Check if serial_proc is running?
self.update_current_position()
with open(self.positions_file,'a') as f:
f.write(str(self.posx)+','+str(self.posy)+'\n')
def record_home_position(self):
self.send('{"sr":null}')
print "Saving home"
# TODO: Check if serial_proc is running?
self.update_current_position()
self.homex = self.posx
self.homey = self.posy
with open(self.home_position_file,'w') as f:
f.write('homex,homey\n')
f.write(str(self.posx)+','+str(self.posy)+'\n')
def delete_home_position(self):
print "Deleting home"
with open(self.home_position_file,'w') as f:
f.write('homex,homey\n')
self.homex = None
self.homey = None
def update_current_position(self):
while not self.pos_queue.empty():
self.posx, self.posy = self.pos_queue.get()
def getTrigger(self):
return GPIO.input(self.sensor_pin)
def changeMode(self):
if self.mode == 'delay':
self.mode = 'sensor'
elif self.mode == 'sensor':
self.mode = 'delay'
def set_absolute_position(self):
# absolute mode
self.send("g90")
def set_relative_position(self):
# relative mode
self.send("g91")
def playback_saved_positions(self):
self.set_absolute_position()
self.update_current_position()
with open(self.positions_file) as f:
lines = csv.DictReader(f)
for l in lines:
print 'x_dst: '+l['posx']+' - '+str(self.posx)
print 'y_dst: '+l['posy']+' - '+str(self.posy)
x_dst = float(l['posx'])#-self.posx
y_dst = float(l['posy'])#-self.posy
x = ' x'+str((x_dst))
y = ' y'+str((y_dst))
print(x,y)
self.send('g1'+x+y)
while(1):
self.update_current_position()
if (self.posx != float(l['posx'])) or \
(self.posy != float(l['posy'])):
time.sleep(.1)
else:
break
if(self.mode == 'delay'):
time.sleep(self.interval)
elif(self.mode == 'sensor'):
num_strikes = 0
while num_strikes < self.interval:
while(not self.getTrigger()):
time.sleep(.01)
num_strikes += 1
# relative mode
self.send("g91")
|
normal
|
{
"blob_id": "ac2d4372f8913ea9ae1066833cca09985e521f99",
"index": 383,
"step-1": "#!/usr/bin/env python\n\"\"\"\\\nSimple g-code streaming script for grbl\n\"\"\"\n \nimport serial\nimport time\nimport csv\nimport json\nimport RPi.GPIO as GPIO\nfrom multiprocessing import Process, Queue\nclass motion():\n def __init__(self):\n # Open grbl serial port\n #self.s = serial.Serial(\"/dev/ttyUSB0\",baudrate=115200,xonxoff=True,timeout=1)\n self.s = serial.Serial(\"/dev/ttyUSB0\",\n baudrate=115200,\n timeout=0.1,\n rtscts=True,\n xonxoff=False)\n self.rsp=''\n self.posx=0.0\n self.posy=0.0\n self.positions_file = '/home/pi/Work/Wall2.0/system/positions.csv'\n self.home_position_file = '/home/pi/Work/Wall2.0/system/home.csv'\n self.mode = 'delay'\n self.sensor_pin = 3\n self.interval = 1\n GPIO.setmode(GPIO.BOARD)\n# GPIO.setup(self.sensor_pin, GPIO.IN, pull_up_down=GPIO.PUD_UP)\n GPIO.setup(self.sensor_pin, GPIO.IN)\n\n # Wake up grbl\n self.s.write(\"\\r\\n\\r\\n\")\n time.sleep(2) # Wait for grbl to initialize\n self.s.flushInput() # Flush startup text in serial input \n\n self.feedrate = 100\n self.update_feedrate(0)\n\n with open(self.positions_file,'w') as f:\n f.write('posx,posy\\n')\n\n self.homex=None\n self.homey=None\n with open(self.home_position_file,'r') as f:\n lines = csv.DictReader(f)\n for l in lines:\n print 'x_home: '+l['homex']\n print 'y_home: '+l['homey']\n self.homex = float(l['homex'])\n self.homey = float(l['homey'])\n\n # set origin offset\n #self.send(\"g92 x0 y0\")\n\n self.set_relative_position()\n\n self.pos_queue = Queue()\n self.serial_proc = Process(target=self.get_response,\n args=(self.pos_queue,))\n\n self.serial_proc.start()\n\n def update_feedrate(self, feedrate):\n tmp = self.feedrate + feedrate\n if(tmp >= 100) and (tmp <= 800):\n self.feedrate = tmp\n # feedrate speed\n self.send(\"f\"+str(self.feedrate))\n\n def update_interval(self, interval):\n if(self.interval >= 1) and (self.interval <= 10):\n self.interval += interval\n \n def send(self, cmd): \n print 'Sending: ' + cmd\n self.s.write(cmd + '\\n') # Send g-code block to grbl\n\n def move(self,sign_x, sign_y):\n x = \"x\"+str(sign_x*10) \n y = \"y\"+str(sign_y*10) \n #self.send(\"%\")\n self.send(\" \".join([\"g1\",x,y]))\n\n def move_to_position(self,x,y):\n x = \"x\"+str(x) \n y = \"y\"+str(y) \n self.send(\" \".join([\"g1\",x,y]))\n\n def stop(self):\n self.send(\"!\")\n self.send(\"%\")\n if (self.homex!=None) and (self.homey!=None):\n time.sleep(0.5)\n self.set_absolute_position()\n self.update_current_position()\n self.move_to_position(self.homex,self.homey)\n self.set_relative_position()\n\n def disconnect(self):\n # Close file and serial port\n self.s.close()\n\n def get_response(self, q):\n while(1):\n tmp = self.s.readline()\n tmp = tmp.strip()\n if tmp is not '':\n try:\n tmp = json.loads(tmp)\n print tmp\n if 'r' in tmp.keys():\n if 'sr' in tmp['r'].keys():\n tmp = tmp['r']\n if 'sr' in tmp.keys():\n if 'posx' in tmp['sr'].keys():\n self.posx=tmp['sr']['posx']\n if 'posy' in tmp['sr'].keys():\n self.posy=tmp['sr']['posy']\n q.put((self.posx, self.posy))\n print 'pos1: '+str((self.posx, self.posy))\n except ValueError:\n print \"get_response chocked\"\n self.stop()\n time.sleep(1)\n else:\n time.sleep(.2)\n\n def record_current_position(self):\n self.send('{\"sr\":null}')\n print \"Saving\"\n # TODO: Check if serial_proc is running?\n self.update_current_position()\n with open(self.positions_file,'a') as f:\n f.write(str(self.posx)+','+str(self.posy)+'\\n')\n\n def record_home_position(self):\n self.send('{\"sr\":null}')\n print \"Saving home\"\n # TODO: Check if serial_proc is running?\n self.update_current_position()\n self.homex = self.posx\n self.homey = self.posy\n with open(self.home_position_file,'w') as f:\n f.write('homex,homey\\n')\n f.write(str(self.posx)+','+str(self.posy)+'\\n')\n\n def delete_home_position(self):\n print \"Deleting home\"\n with open(self.home_position_file,'w') as f:\n f.write('homex,homey\\n')\n self.homex = None\n self.homey = None\n\n def update_current_position(self):\n while not self.pos_queue.empty():\n self.posx, self.posy = self.pos_queue.get()\n\n def getTrigger(self):\n return GPIO.input(self.sensor_pin)\n\n def changeMode(self):\n if self.mode == 'delay':\n self.mode = 'sensor'\n elif self.mode == 'sensor':\n self.mode = 'delay'\n\n def set_absolute_position(self):\n # absolute mode \n self.send(\"g90\")\n\n def set_relative_position(self):\n # relative mode \n self.send(\"g91\")\n\n def playback_saved_positions(self):\n self.set_absolute_position()\n self.update_current_position()\n with open(self.positions_file) as f:\n lines = csv.DictReader(f)\n for l in lines:\n print 'x_dst: '+l['posx']+' - '+str(self.posx)\n print 'y_dst: '+l['posy']+' - '+str(self.posy)\n x_dst = float(l['posx'])#-self.posx\n y_dst = float(l['posy'])#-self.posy\n x = ' x'+str((x_dst))\n y = ' y'+str((y_dst))\n print(x,y)\n self.send('g1'+x+y)\n while(1):\n self.update_current_position()\n if (self.posx != float(l['posx'])) or \\\n (self.posy != float(l['posy'])):\n time.sleep(.1)\n else:\n break\n\n if(self.mode == 'delay'):\n time.sleep(self.interval)\n elif(self.mode == 'sensor'):\n num_strikes = 0\n while num_strikes < self.interval:\n while(not self.getTrigger()):\n time.sleep(.01)\n num_strikes += 1\n # relative mode \n self.send(\"g91\")\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
#!/usr/bin/env python
from math import *
import numpy as np
import matplotlib.pyplot as plt
import Input as para
data = np.loadtxt("eff-proton.dat")
#data = np.loadtxt("eff-electron.dat")
show_time = data[0]
show_eff = data[1]
#print show_turn, show_eff
#x_lower_limit = min(show_time)
#x_upper_limit = max(show_time)
x_lower_limit = 0.0
x_upper_limit = para.T_nu*1000
y_lower_limit = min(show_eff)-abs(max(show_eff)-min(show_eff))
y_upper_limit = max(show_eff)
plt.figure()
plt.xlabel('Time (ms)', fontsize=30)
plt.ylabel('Capture rate (%)', fontsize=30)
plt.xticks(fontsize=25)
plt.yticks(fontsize=25)
plt.xlim(x_lower_limit, x_upper_limit)
plt.ylim(y_lower_limit, y_upper_limit)
plt.plot(show_time, show_eff, 'b-', markeredgecolor = 'b', linewidth=5)
plt.savefig('eff-vs-time-proton.eps', format='eps', dpi=1000, bbox_inches='tight')
#plt.savefig('eff-vs-time-electron.eps', format='eps', dpi=1000, bbox_inches='tight')
plt.show()
|
normal
|
{
"blob_id": "bee96e817dd4d9462c1e3f8eb525c22c2117140a",
"index": 9942,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nplt.figure()\nplt.xlabel('Time (ms)', fontsize=30)\nplt.ylabel('Capture rate (%)', fontsize=30)\nplt.xticks(fontsize=25)\nplt.yticks(fontsize=25)\nplt.xlim(x_lower_limit, x_upper_limit)\nplt.ylim(y_lower_limit, y_upper_limit)\nplt.plot(show_time, show_eff, 'b-', markeredgecolor='b', linewidth=5)\nplt.savefig('eff-vs-time-proton.eps', format='eps', dpi=1000, bbox_inches=\n 'tight')\nplt.show()\n",
"step-3": "<mask token>\ndata = np.loadtxt('eff-proton.dat')\nshow_time = data[0]\nshow_eff = data[1]\nx_lower_limit = 0.0\nx_upper_limit = para.T_nu * 1000\ny_lower_limit = min(show_eff) - abs(max(show_eff) - min(show_eff))\ny_upper_limit = max(show_eff)\nplt.figure()\nplt.xlabel('Time (ms)', fontsize=30)\nplt.ylabel('Capture rate (%)', fontsize=30)\nplt.xticks(fontsize=25)\nplt.yticks(fontsize=25)\nplt.xlim(x_lower_limit, x_upper_limit)\nplt.ylim(y_lower_limit, y_upper_limit)\nplt.plot(show_time, show_eff, 'b-', markeredgecolor='b', linewidth=5)\nplt.savefig('eff-vs-time-proton.eps', format='eps', dpi=1000, bbox_inches=\n 'tight')\nplt.show()\n",
"step-4": "from math import *\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport Input as para\ndata = np.loadtxt('eff-proton.dat')\nshow_time = data[0]\nshow_eff = data[1]\nx_lower_limit = 0.0\nx_upper_limit = para.T_nu * 1000\ny_lower_limit = min(show_eff) - abs(max(show_eff) - min(show_eff))\ny_upper_limit = max(show_eff)\nplt.figure()\nplt.xlabel('Time (ms)', fontsize=30)\nplt.ylabel('Capture rate (%)', fontsize=30)\nplt.xticks(fontsize=25)\nplt.yticks(fontsize=25)\nplt.xlim(x_lower_limit, x_upper_limit)\nplt.ylim(y_lower_limit, y_upper_limit)\nplt.plot(show_time, show_eff, 'b-', markeredgecolor='b', linewidth=5)\nplt.savefig('eff-vs-time-proton.eps', format='eps', dpi=1000, bbox_inches=\n 'tight')\nplt.show()\n",
"step-5": "#!/usr/bin/env python\nfrom math import *\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport Input as para\n\ndata = np.loadtxt(\"eff-proton.dat\")\n#data = np.loadtxt(\"eff-electron.dat\")\n\nshow_time = data[0]\nshow_eff = data[1]\n#print show_turn, show_eff\n\n#x_lower_limit = min(show_time)\n#x_upper_limit = max(show_time)\nx_lower_limit = 0.0\nx_upper_limit = para.T_nu*1000\n\ny_lower_limit = min(show_eff)-abs(max(show_eff)-min(show_eff))\ny_upper_limit = max(show_eff)\n\nplt.figure()\nplt.xlabel('Time (ms)', fontsize=30)\nplt.ylabel('Capture rate (%)', fontsize=30)\nplt.xticks(fontsize=25)\nplt.yticks(fontsize=25)\nplt.xlim(x_lower_limit, x_upper_limit)\nplt.ylim(y_lower_limit, y_upper_limit)\nplt.plot(show_time, show_eff, 'b-', markeredgecolor = 'b', linewidth=5)\n\nplt.savefig('eff-vs-time-proton.eps', format='eps', dpi=1000, bbox_inches='tight')\n#plt.savefig('eff-vs-time-electron.eps', format='eps', dpi=1000, bbox_inches='tight')\n\nplt.show()\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import json
from examtool.api.database import get_exam, get_roster
from examtool.api.extract_questions import extract_questions
from examtool.api.scramble import scramble
from google.cloud import firestore
import warnings
warnings.filterwarnings("ignore", "Your application has authenticated using end user credentials")
db = firestore.Client()
exams = [x.id for x in db.collection("exams").stream()]
for exam in exams:
print("checking", exam)
exam_json = json.dumps(get_exam(exam=exam))
roster = get_roster(exam=exam)
flagged = set()
for email, _ in roster:
template_questions = extract_questions(json.loads(exam_json))
student_questions = list(
extract_questions(scramble(email, json.loads(exam_json), keep_data=True))
)
student_question_lookup = {q['id']: q for q in student_questions}
for question in template_questions:
if question["id"] not in student_question_lookup:
continue
if question["type"] not in ["multiple_choice", "select_all"]:
continue
if question["id"] in flagged:
continue
for i, option in enumerate(question["options"]):
option["index"] = i
s = lambda options: sorted(options, key=lambda q: q["text"])
for a, b in zip(s(question["options"]), s(student_question_lookup[question["id"]]["options"])):
if a["index"] != b.get("index", a["index"]):
flagged.add(question["id"])
continue
if flagged:
print(exam, flagged)
|
normal
|
{
"blob_id": "b74c759b51fb6591477757e2ff54b545f225991c",
"index": 7470,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwarnings.filterwarnings('ignore',\n 'Your application has authenticated using end user credentials')\n<mask token>\nfor exam in exams:\n print('checking', exam)\n exam_json = json.dumps(get_exam(exam=exam))\n roster = get_roster(exam=exam)\n flagged = set()\n for email, _ in roster:\n template_questions = extract_questions(json.loads(exam_json))\n student_questions = list(extract_questions(scramble(email, json.\n loads(exam_json), keep_data=True)))\n student_question_lookup = {q['id']: q for q in student_questions}\n for question in template_questions:\n if question['id'] not in student_question_lookup:\n continue\n if question['type'] not in ['multiple_choice', 'select_all']:\n continue\n if question['id'] in flagged:\n continue\n for i, option in enumerate(question['options']):\n option['index'] = i\n s = lambda options: sorted(options, key=lambda q: q['text'])\n for a, b in zip(s(question['options']), s(\n student_question_lookup[question['id']]['options'])):\n if a['index'] != b.get('index', a['index']):\n flagged.add(question['id'])\n continue\n if flagged:\n print(exam, flagged)\n",
"step-3": "<mask token>\nwarnings.filterwarnings('ignore',\n 'Your application has authenticated using end user credentials')\ndb = firestore.Client()\nexams = [x.id for x in db.collection('exams').stream()]\nfor exam in exams:\n print('checking', exam)\n exam_json = json.dumps(get_exam(exam=exam))\n roster = get_roster(exam=exam)\n flagged = set()\n for email, _ in roster:\n template_questions = extract_questions(json.loads(exam_json))\n student_questions = list(extract_questions(scramble(email, json.\n loads(exam_json), keep_data=True)))\n student_question_lookup = {q['id']: q for q in student_questions}\n for question in template_questions:\n if question['id'] not in student_question_lookup:\n continue\n if question['type'] not in ['multiple_choice', 'select_all']:\n continue\n if question['id'] in flagged:\n continue\n for i, option in enumerate(question['options']):\n option['index'] = i\n s = lambda options: sorted(options, key=lambda q: q['text'])\n for a, b in zip(s(question['options']), s(\n student_question_lookup[question['id']]['options'])):\n if a['index'] != b.get('index', a['index']):\n flagged.add(question['id'])\n continue\n if flagged:\n print(exam, flagged)\n",
"step-4": "import json\nfrom examtool.api.database import get_exam, get_roster\nfrom examtool.api.extract_questions import extract_questions\nfrom examtool.api.scramble import scramble\nfrom google.cloud import firestore\nimport warnings\nwarnings.filterwarnings('ignore',\n 'Your application has authenticated using end user credentials')\ndb = firestore.Client()\nexams = [x.id for x in db.collection('exams').stream()]\nfor exam in exams:\n print('checking', exam)\n exam_json = json.dumps(get_exam(exam=exam))\n roster = get_roster(exam=exam)\n flagged = set()\n for email, _ in roster:\n template_questions = extract_questions(json.loads(exam_json))\n student_questions = list(extract_questions(scramble(email, json.\n loads(exam_json), keep_data=True)))\n student_question_lookup = {q['id']: q for q in student_questions}\n for question in template_questions:\n if question['id'] not in student_question_lookup:\n continue\n if question['type'] not in ['multiple_choice', 'select_all']:\n continue\n if question['id'] in flagged:\n continue\n for i, option in enumerate(question['options']):\n option['index'] = i\n s = lambda options: sorted(options, key=lambda q: q['text'])\n for a, b in zip(s(question['options']), s(\n student_question_lookup[question['id']]['options'])):\n if a['index'] != b.get('index', a['index']):\n flagged.add(question['id'])\n continue\n if flagged:\n print(exam, flagged)\n",
"step-5": "import json\n\nfrom examtool.api.database import get_exam, get_roster\nfrom examtool.api.extract_questions import extract_questions\nfrom examtool.api.scramble import scramble\nfrom google.cloud import firestore\nimport warnings\nwarnings.filterwarnings(\"ignore\", \"Your application has authenticated using end user credentials\")\n\n\ndb = firestore.Client()\nexams = [x.id for x in db.collection(\"exams\").stream()]\n\nfor exam in exams:\n print(\"checking\", exam)\n exam_json = json.dumps(get_exam(exam=exam))\n roster = get_roster(exam=exam)\n\n flagged = set()\n\n for email, _ in roster:\n template_questions = extract_questions(json.loads(exam_json))\n student_questions = list(\n extract_questions(scramble(email, json.loads(exam_json), keep_data=True))\n )\n student_question_lookup = {q['id']: q for q in student_questions}\n for question in template_questions:\n if question[\"id\"] not in student_question_lookup:\n continue\n if question[\"type\"] not in [\"multiple_choice\", \"select_all\"]:\n continue\n if question[\"id\"] in flagged:\n continue\n\n for i, option in enumerate(question[\"options\"]):\n option[\"index\"] = i\n\n s = lambda options: sorted(options, key=lambda q: q[\"text\"])\n\n for a, b in zip(s(question[\"options\"]), s(student_question_lookup[question[\"id\"]][\"options\"])):\n if a[\"index\"] != b.get(\"index\", a[\"index\"]):\n flagged.add(question[\"id\"])\n continue\n\n if flagged:\n print(exam, flagged)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
y_true = [7, 3, 3, 4, 9, 9, 2, 5, 0, 0, 6, 3, 1, 6, 8, 7, 9, 7, 4, 2, 0, 1,
4, 1, 7, 7, 5, 0, 8, 0, 1, 7, 4, 2, 2, 4, 9, 3, 1, 7, 1, 2, 1, 7, 5, 9,
9, 4, 8, 5, 7, 2, 7, 5, 5, 6, 6, 1, 2, 6, 6, 5, 3, 2, 3, 8, 8, 8, 8, 5,
3, 4, 3, 2, 8, 1, 9, 0, 6, 8, 6, 1, 1, 1, 5, 4, 8, 8, 5, 5, 8, 6, 4, 4,
6, 9, 8, 1, 5, 5]
y_pred_prob = [[0.0597563199698925, 0.1344364434480667, 0.1173347756266594,
0.11292721331119537, 0.10652001202106476, 0.13155865669250488,
0.10057594627141953, 0.10029518604278564, 0.10313529521226883,
0.03346000984311104], [0.0002930850023403764, 0.23393571376800537,
0.09061524271965027, 0.21862193942070007, 0.04659481346607208,
0.04461496323347092, 0.0952368974685669, 0.2075100988149643,
0.0616493821144104, 0.0009278177167288959], [0.22330643236637115,
1.0582012919257977e-06, 0.22777651250362396, 0.20880192518234253,
9.877869615593227e-07, 0.0006437229458242655, 0.1556401550769806,
7.201562368663872e-08, 0.18382851779460907, 5.064675860921852e-07], [
1.7682419638731517e-05, 0.001197152421809733, 0.015430454164743423,
0.0037515582516789436, 0.32882484793663025, 0.0003495111595839262,
0.012810198590159416, 0.054448556154966354, 0.30387693643569946,
0.27929291129112244], [0.16070464253425598, 4.810986276027052e-09,
0.15206283330917358, 0.004463076591491699, 0.1652054488658905,
0.0038724008481949568, 0.17216043174266815, 0.13407163321971893,
0.029512932524085045, 0.17794682085514069], [0.10922636836767197,
2.2864300319724862e-07, 0.11546860635280609, 0.001813476555980742,
0.1788507103919983, 0.005888130981475115, 0.18413811922073364,
0.10866158455610275, 0.10712066292762756, 0.18883220851421356], [
0.005557563621550798, 0.0001692363148322329, 0.35343053936958313,
0.0015008420450612903, 0.00037875055568292737, 0.2150292843580246,
0.014169459231197834, 0.03244209289550781, 0.33539846539497375,
0.041923996061086655], [0.193454310297966, 3.662989183794707e-05,
0.10065275430679321, 0.00039752188604325056, 0.16119857132434845,
0.19390884041786194, 0.07022294402122498, 0.02460072562098503,
0.16083283722400665, 0.0946948304772377], [0.28058794140815735,
1.1208027217435301e-06, 0.018203848972916603, 0.16030532121658325,
0.00018859952979255468, 0.21325571835041046, 0.2328961044549942,
0.007604319602251053, 0.04473938047885895, 0.04221738502383232], [
0.1718112975358963, 7.514636672567576e-05, 0.15386143326759338,
0.008414546959102154, 0.001738831982947886, 0.15720322728157043,
0.17100712656974792, 0.15586316585540771, 0.104509636759758,
0.07551562041044235], [0.001471314812079072, 0.008587654680013657,
0.0367623046040535, 0.011750160716474056, 0.07068527489900589,
0.4173307418823242, 0.12449752539396286, 0.014547907747328281,
0.2990296185016632, 0.01533727627247572], [0.005052714608609676,
0.0073812128975987434, 0.009834956377744675, 0.33292853832244873,
0.0018518454162403941, 0.0015299966325983405, 0.002040529390797019,
0.3055168688297272, 0.32741934061050415, 0.006443792954087257], [
0.0011697597801685333, 0.20749542117118835, 0.07009387016296387,
0.08994801342487335, 0.09965154528617859, 0.060963381081819534,
0.13158728182315826, 0.1365581601858139, 0.11990636587142944,
0.08262615650892258], [0.020798824727535248, 1.469431822442857e-06,
0.016172533854842186, 0.021048342809081078, 0.009139545261859894,
0.3956705331802368, 0.3814408779144287, 7.980810551089235e-06,
0.1391601711511612, 0.016559595242142677], [0.0008747534011490643,
0.0009511907119303942, 0.055323366075754166, 0.05426914989948273,
0.03363798186182976, 0.12827005982398987, 0.03197509050369263,
0.0008451330941170454, 0.37859639525413513, 0.3152569532394409], [
0.001832291018217802, 9.253426833311096e-05, 0.27192848920822144,
0.18078717589378357, 0.004130060318857431, 0.00929891224950552,
0.1695500910282135, 0.29965919256210327, 0.020460698753595352,
0.042260222136974335], [0.15259969234466553, 0.00015921871818136424,
0.16849327087402344, 0.002068838570266962, 0.17735524475574493,
0.02342645265161991, 0.18245863914489746, 0.00010533139720791951,
0.11123484373092651, 0.1820984184741974], [0.18936939537525177,
1.7293215250901994e-06, 0.029253976419568062, 0.1424887329339981,
0.01099975686520338, 0.0074686696752905846, 0.053486552089452744,
0.2111600935459137, 0.14551354944705963, 0.21025745570659637], [
3.861714503727853e-05, 0.1669524759054184, 0.00032175786327570677,
0.15850232541561127, 0.1955566704273224, 0.012984608300030231,
0.14730143547058105, 0.066555455327034, 0.1175893247127533,
0.13419757783412933], [0.1504199206829071, 0.006808706559240818,
0.22468900680541992, 0.18946652114391327, 1.2391226846375503e-05,
0.10332755744457245, 0.15032899379730225, 2.30663204092707e-06,
0.17487214505672455, 7.243863365147263e-05], [0.23918452858924866,
5.279692683046733e-09, 0.0671931579709053, 0.2041931003332138,
9.380520350532606e-05, 0.18892300128936768, 0.16166524589061737,
1.2340686907919007e-06, 0.1280936300754547, 0.010652361437678337], [
0.0019602354150265455, 0.17319674789905548, 0.16884981095790863,
0.025876348838210106, 0.11373495310544968, 0.034116633236408234,
0.09377618134021759, 0.16857513785362244, 0.10720878094434738,
0.11270517110824585], [0.006008224096149206, 7.275425741681829e-05,
0.002679133554920554, 0.005456522107124329, 0.2852444648742676,
0.007294526789337397, 0.26774612069129944, 0.0033797386568039656,
0.15357472002506256, 0.26854372024536133], [0.0020487161818891764,
0.18302913010120392, 0.17970730364322662, 0.03157859668135643,
0.10424197465181351, 0.028137331828475, 0.049388039857149124,
0.17323219776153564, 0.13171784579753876, 0.11691895872354507], [
0.011249794624745846, 0.0003711018362082541, 0.32693105936050415,
0.0010822461917996407, 0.0076926033943891525, 0.04566335678100586,
0.005700047593563795, 0.32916736602783203, 0.09476791322231293,
0.17737449705600739], [0.0001925578253576532, 7.067231763357995e-06,
0.0001896199828479439, 0.09954455494880676, 0.23005598783493042,
0.2152310460805893, 0.09002267569303513, 0.017976609990000725,
0.0920918807387352, 0.25468799471855164], [0.0006383731961250305,
3.095208057857235e-06, 0.0005969868507236242, 0.41469672322273254,
0.0053739529103040695, 0.40698617696762085, 0.08218759298324585,
0.0003528161614667624, 0.07473969459533691, 0.014424380846321583], [
0.19537049531936646, 3.243912300235352e-13, 0.005169959273189306,
0.17694340646266937, 2.949438930954784e-05, 0.1400780826807022,
0.18864554166793823, 3.857006959151477e-06, 0.18823771178722382,
0.10552132874727249], [0.009722508490085602, 3.8531984500878025e-06,
0.07383214682340622, 0.03598225489258766, 0.07267675548791885,
0.1459459662437439, 0.07249364256858826, 0.002293274737894535,
0.48588359355926514, 0.1011660099029541], [0.21651780605316162,
9.559274261050632e-09, 0.14371894299983978, 0.13431811332702637,
2.7394575226935558e-05, 0.1838626116514206, 0.17265450954437256,
0.00012304158008191735, 0.12219242751598358, 0.0265849307179451], [
4.430914850672707e-05, 0.2043066918849945, 0.0002825123374350369,
0.16263452172279358, 0.1939067542552948, 0.1427866667509079,
0.11921370774507523, 0.0028419536538422108, 0.06556723266839981,
0.10841585695743561], [0.004471424967050552, 0.1858968585729599,
0.17653658986091614, 0.01416453905403614, 0.008144107647240162,
0.0843614935874939, 0.05890577659010887, 0.18505530059337616,
0.10232891887426376, 0.18013498187065125], [0.00041712025995366275,
1.1021310228898074e-06, 0.08412905037403107, 0.0002837374631781131,
0.2740859091281891, 0.013903344981372356, 0.08929961919784546,
0.2733091115951538, 0.2233879268169403, 0.04118315503001213], [
0.04552318528294563, 0.020853176712989807, 0.26410210132598877,
0.23437173664569855, 2.1701146124541992e-06, 0.10220374912023544,
0.07447297871112823, 7.592303154524416e-05, 0.25814488530158997,
0.00025002588517963886], [0.024719374254345894, 0.00217414740473032,
0.26734668016433716, 0.17261573672294617, 0.003498602891340852,
0.05698162689805031, 0.2737174332141876, 8.039058593567461e-05,
0.19880186021327972, 6.410985952243209e-05], [0.12234598398208618,
6.703280632791575e-06, 0.015603234991431236, 0.013786871917545795,
0.21616478264331818, 0.005412149243056774, 0.11406012624502182,
0.12291428446769714, 0.18262456357479095, 0.20708128809928894], [
0.193313866853714, 6.033819488493464e-08, 0.14491458237171173,
0.2349807769060135, 0.0006736826617270708, 0.003743150969967246,
0.12457092851400375, 0.004962997976690531, 0.23268520832061768,
0.060154590755701065], [0.006641837302595377, 0.005113706924021244,
0.060135774314403534, 0.37294134497642517, 0.0001917753543239087,
0.35536521673202515, 0.003515040036290884, 0.00014136293611954898,
0.19584619998931885, 0.00010780058073578402], [0.00022568553686141968,
0.1758676916360855, 0.08169379830360413, 0.11927571147680283,
0.14987629652023315, 0.026822827756404877, 0.09613550454378128,
0.14441852271556854, 0.11029191315174103, 0.09539227187633514], [
0.028152454644441605, 0.04798303544521332, 0.06989692151546478,
0.07051544636487961, 0.07356826215982437, 0.05468234792351723,
0.11397064477205276, 0.2294078767299652, 0.0822836384177208,
0.22953952848911285], [0.0009083361364901066, 0.16873282194137573,
0.040142301470041275, 0.13509070873260498, 0.16045929491519928,
0.09148524701595306, 0.0939648225903511, 0.13889746367931366,
0.043392572551965714, 0.12692658603191376], [7.008769898675382e-05,
0.0012455701362341642, 0.4437786936759949, 0.03154001384973526,
0.0033613061532378197, 0.0024434190709143877, 0.3866567313671112,
0.0005211094976402819, 0.13020911812782288, 0.00017409549036528915], [
0.00034864526242017746, 0.21021592617034912, 0.005514794960618019,
0.11704950034618378, 0.08421261608600616, 0.13176649808883667,
0.11882488429546356, 0.008054501377046108, 0.1467529684305191,
0.1772596538066864], [0.036879003047943115, 0.0014911789912730455,
0.2685071527957916, 0.0029583016876131296, 0.011879128403961658,
0.030892902985215187, 0.08989892154932022, 0.29645001888275146,
0.04054954648017883, 0.2204938679933548], [0.0064177061431109905,
0.0045189931988716125, 0.013788403943181038, 0.18153700232505798,
0.0003662402159534395, 0.5257023572921753, 0.06426692008972168,
9.742573638504837e-06, 0.2026320844888687, 0.000760772149078548], [
0.0017538872780278325, 0.0002046643348876387, 0.04638877511024475,
0.11219469457864761, 0.1732793003320694, 0.000888414157088846,
0.1527005136013031, 0.171849325299263, 0.16653017699718475,
0.17421048879623413], [6.957617006264627e-05, 3.015168840647675e-05,
0.05601977929472923, 0.06104991212487221, 0.14622464776039124,
0.0013683908618986607, 0.004713970702141523, 0.26153290271759033,
0.21816983819007874, 0.25082090497016907], [0.001964711584150791,
0.14094221591949463, 0.04670453444123268, 0.11537310481071472,
0.1456061750650406, 0.021807175129652023, 0.1023702397942543,
0.14592182636260986, 0.1320936679840088, 0.14721626043319702], [
0.0013557883212342858, 5.542307803807489e-07, 0.015518834814429283,
0.020929962396621704, 0.12795883417129517, 0.012969551607966423,
0.011510342359542847, 0.3424086570739746, 0.3332746922969818,
0.1340728998184204], [0.0951327458024025, 0.03636496141552925,
0.018829435110092163, 0.060135968029499054, 0.1569897085428238,
0.1514764130115509, 0.13258931040763855, 0.1450430303812027,
0.04603665694594383, 0.15740196406841278], [0.17052830755710602,
1.5615187294315547e-06, 0.0013229812029749155, 0.12005076557397842,
0.021564221009612083, 0.024421295151114464, 0.17088675498962402,
0.15222683548927307, 0.1693890392780304, 0.16960804164409637], [
0.006946968380361795, 0.3011370897293091, 0.3187958002090454,
0.06604688614606857, 0.011190904304385185, 0.05437859520316124,
0.020502492785453796, 0.010224146768450737, 0.21062366664409637,
0.00015340560639742762], [0.003341993084177375, 0.0016007163794711232,
0.0007675797096453607, 0.18986503779888153, 0.1190534457564354,
0.02811228297650814, 0.09639428555965424, 0.21583504974842072,
0.13505271077156067, 0.2099769562482834], [0.042331017553806305,
0.00029962626285851, 0.0023094473872333765, 0.18676534295082092,
0.000317152967909351, 0.48982951045036316, 0.1871659755706787,
8.205944141082e-06, 0.09039845317602158, 0.0005752819124609232], [
0.27066469192504883, 0.0001488085399614647, 0.025224560871720314,
0.03236522525548935, 0.00022321399592328817, 0.3199988305568695,
0.20726615190505981, 2.1540354282478802e-05, 0.13308577239513397,
0.011001424863934517], [0.21046556532382965, 8.32586906085453e-08,
0.050842639058828354, 0.0012313498882576823, 0.17998859286308289,
0.005802170839160681, 0.22032563388347626, 9.771327313501388e-06,
0.2085702270269394, 0.12276387959718704], [0.278763085603714,
2.956639932882865e-10, 0.2363770455121994, 0.0021949675865471363,
0.024400619789958, 0.01081052329391241, 0.2788945734500885,
0.000592902593780309, 0.09800171107053757, 0.06996453553438187], [
0.0012440741993486881, 0.0002501744020264596, 0.039189230650663376,
0.003109667217358947, 0.1353403925895691, 0.17648975551128387,
0.29823172092437744, 0.0005026640137657523, 0.1873668134212494,
0.15827545523643494], [4.636057929019444e-05, 0.004471238702535629,
0.010865537449717522, 0.03406133875250816, 0.2391168773174286,
0.0102084307000041, 0.24508318305015564, 0.10957624763250351,
0.10304577648639679, 0.24352511763572693], [0.007771539501845837,
0.003819737583398819, 0.05605701357126236, 0.0013185413554310799,
0.026425426825881004, 0.37273845076560974, 0.39364394545555115,
3.468452996457927e-05, 0.13644644618034363, 0.0017443000106140971], [
0.0042862421832978725, 4.118454022261631e-09, 0.24541069567203522,
1.311416235694196e-05, 0.002639196580275893, 0.2002275139093399,
0.35612747073173523, 8.159701246768236e-05, 0.11912810802459717,
0.07208611816167831], [0.10790199786424637, 0.00018712706514634192,
0.001723292050883174, 0.3369658291339874, 0.005216643214225769,
0.323357492685318, 0.04629630222916603, 0.0006358266109600663,
0.17700347304344177, 0.0007120332447811961], [0.01004449650645256,
0.0038342783227562904, 0.0029477709904313087, 0.39860454201698303,
0.000900272571016103, 0.32782217860221863, 0.010686549358069897,
0.0006012170924805105, 0.23407192528247833, 0.010486727580428123], [
0.0015078516444191337, 0.23596949875354767, 0.4038705825805664,
0.04463784024119377, 0.00036313795135356486, 0.005906661506742239,
0.012559221126139164, 0.010579549707472324, 0.2843676507472992,
0.0002381248341407627], [0.1887362003326416, 0.0019065006636083126,
0.2840288579463959, 0.2984219193458557, 4.9067231884691864e-05,
0.1615515947341919, 0.012938770465552807, 0.00029289082158356905,
0.052058152854442596, 1.6269357729470357e-05], [0.0006827416946180165,
2.276465056638699e-05, 0.023704057559370995, 0.16121432185173035,
0.0033186341170221567, 0.004117893520742655, 0.03627816215157509,
0.009822812862694263, 0.7281517386436462, 0.032687313854694366], [
0.0011369712883606553, 0.27387163043022156, 0.07185991108417511,
0.15628814697265625, 0.002854800783097744, 0.23154565691947937,
0.03204796463251114, 0.003870188258588314, 0.22623319923877716,
0.00029159500263631344], [0.0035695999395102262, 0.26706114411354065,
0.1508740484714508, 0.0013921442441642284, 0.019328434020280838,
0.13771453499794006, 0.029891734942793846, 0.03509771451354027,
0.24692872166633606, 0.1081417053937912], [0.000882012362126261,
2.536918327677995e-05, 0.0450599268078804, 0.412322998046875,
0.0025211411993950605, 0.002278776839375496, 0.011372447945177555,
0.1770726591348648, 0.33388030529022217, 0.014584112912416458], [
0.21903501451015472, 5.910552047794226e-09, 0.022012481465935707,
0.20099963247776031, 1.0874355211853981e-05, 0.21909210085868835,
0.21668335795402527, 4.337367798257219e-08, 0.12212178856134415,
4.4732783862855285e-05], [0.014651631936430931, 0.00830799899995327,
0.005935078486800194, 0.3953670263290405, 1.1293817806290463e-05,
0.4299878776073456, 0.017106691375374794, 0.00014334742445498705,
0.11808823049068451, 0.010400976054370403], [0.010301091708242893,
0.01435689628124237, 0.07430031895637512, 0.06989920139312744,
0.2338510900735855, 0.053795550018548965, 0.22257547080516815,
0.0029012206941843033, 0.09203658252954483, 0.22598253190517426], [
0.033016644418239594, 0.0020125852897763252, 0.06661045551300049,
0.4920836091041565, 0.00025867935619316995, 0.07482428848743439,
0.13923810422420502, 0.00012527030776254833, 0.19180776178836823,
2.269313517899718e-05], [0.1325867474079132, 0.004940022714436054,
0.22300080955028534, 0.2727201282978058, 3.310650572529994e-05,
0.12915031611919403, 0.01339033618569374, 1.0927167750196531e-05,
0.22410929203033447, 5.8520683523966e-05], [0.126132994890213,
0.0013935434399172664, 0.17098797857761383, 0.00039779843064025044,
0.07732491940259933, 0.16493096947669983, 0.014501826837658882,
0.03405503183603287, 0.20594964921474457, 0.2043251097202301], [
0.0008475463255308568, 0.19114449620246887, 0.03174148499965668,
0.1596948355436325, 0.1830475926399231, 0.11398201435804367,
0.11080365628004074, 0.10536272078752518, 0.05745834857225418,
0.04591764137148857], [0.0009525367058813572, 0.0012388192117214203,
0.0006522738258354366, 0.15977761149406433, 0.2019728273153305,
0.037797972559928894, 0.19880010187625885, 0.008799873292446136,
0.18693988025188446, 0.20306788384914398], [0.21417981386184692,
1.8215121144748991e-07, 0.11546390503644943, 0.10518436878919601,
5.3784842748427764e-05, 0.17964830994606018, 0.1753360480070114,
0.005312803667038679, 0.07569659501314163, 0.1291242241859436], [
0.03322113677859306, 1.1228289409359604e-08, 0.11529551446437836,
0.006697801407426596, 0.020004654303193092, 0.2904326617717743,
0.3397071361541748, 6.173769179440569e-06, 0.1187906265258789,
0.07584403455257416], [0.00018722846289165318, 0.00015633362636435777,
0.027305739000439644, 0.30433472990989685, 0.12216899544000626,
0.0051543135195970535, 0.07717369496822357, 5.6467473768861964e-05,
0.46220865845680237, 0.0012535307323560119], [0.2223890870809555,
1.8010264568601997e-07, 0.051188305020332336, 0.06915734708309174,
0.007792292162775993, 0.13037307560443878, 0.4795873761177063,
6.65841726004146e-05, 0.03377178683876991, 0.0056741489097476006], [
0.0011432061437517405, 0.172257199883461, 0.08959532529115677,
0.09976792335510254, 0.13487820327281952, 0.025573352351784706,
0.11224105209112167, 0.1427890509366989, 0.12529729306697845,
0.09645748883485794], [0.00039081714930944145, 0.17529502511024475,
0.07816692441701889, 0.12808731198310852, 0.13959045708179474,
0.04451143741607666, 0.07863735407590866, 0.1518080085515976,
0.09225541353225708, 0.11125729233026505], [0.0005360758514143527,
0.1871286779642105, 0.09343081712722778, 0.10187795013189316,
0.15403643250465393, 0.03745483607053757, 0.10108820348978043,
0.1381213515996933, 0.1196260005235672, 0.0666997954249382], [
0.02377643622457981, 0.002874232828617096, 0.06835681945085526,
0.08628982305526733, 0.16734763979911804, 0.1884264051914215,
0.06887176632881165, 0.1883554309606552, 0.11966855823993683,
0.0860329195857048], [0.0019290593918412924, 0.0004132240719627589,
0.08087942749261856, 0.00133050128351897, 0.2057691514492035,
0.014698517508804798, 0.10668473690748215, 0.2002524882555008,
0.19643288850784302, 0.19160999357700348], [4.1589693864807487e-05,
3.0074079404585063e-06, 0.00946643017232418, 0.0028675245121121407,
0.339987188577652, 0.006530506536364555, 0.21062259376049042,
5.006019819120411e-06, 0.4303286373615265, 0.00014742799976374954], [
0.23467645049095154, 3.957170217048535e-14, 0.016559595242142677,
0.22702592611312866, 0.0004185910802334547, 0.0031147561967372894,
0.2260916531085968, 2.4497327899553056e-07, 0.2333890199661255,
0.05872354656457901], [0.1723964959383011, 1.4810979109824984e-07,
0.001400468056090176, 0.3012116253376007, 0.00017689657397568226,
0.29611334204673767, 0.013564502820372581, 0.04992862418293953,
0.15185707807540894, 0.013350787572562695], [0.18757264316082,
1.502647393181178e-07, 0.0013043361250311136, 0.08373606950044632,
0.0005724140792153776, 0.1799388974905014, 0.14538954198360443,
0.16594813764095306, 0.06483398377895355, 0.17070381343364716], [
0.008307700976729393, 0.0005032537155784667, 0.04173918813467026,
0.055757056921720505, 0.2954571545124054, 0.046274807304143906,
0.15145555138587952, 0.00160416669677943, 0.36763912439346313,
0.031262170523405075], [0.03202534094452858, 2.929154447883775e-07,
0.03331722691655159, 0.0002443870762363076, 0.021324075758457184,
0.3864181637763977, 0.39420267939567566, 3.2187076612899546e-06,
0.08215467631816864, 0.050310224294662476], [0.03041147254407406,
3.317395247393051e-10, 0.013215649873018265, 0.009000282734632492,
0.15260590612888336, 9.569835674483329e-05, 0.22718068957328796,
0.0983223170042038, 0.23328886926174164, 0.23587895929813385], [
0.0017376767937093973, 0.01800091378390789, 0.09461784362792969,
0.008886604569852352, 0.23299837112426758, 0.03532419353723526,
0.20058980584144592, 0.1702878624200821, 0.06943482160568237,
0.1681220531463623], [0.26592451333999634, 1.378083283043452e-07,
0.26663097739219666, 0.00043869472574442625, 0.0753256231546402,
0.000345755455782637, 0.2718716561794281, 0.09590824693441391,
0.021168876439332962, 0.0023856020998209715], [0.007719929795712233,
0.000273746729362756, 0.06954099237918854, 0.11292484402656555,
0.17693056166172028, 0.0036023242864757776, 0.16335690021514893,
0.1139131560921669, 0.17289915680885315, 0.17883846163749695], [
0.0002722161589190364, 0.0014734293799847364, 0.0001780118327587843,
0.0718056932091713, 0.219150573015213, 0.02937471494078636,
0.15243956446647644, 0.07647080719470978, 0.21917390823364258,
0.22966115176677704], [0.0008591399528086185, 0.27216723561286926,
0.030793067067861557, 0.040201541036367416, 0.07587726414203644,
0.06215333193540573, 0.16188929975032806, 0.04154059290885925,
0.21999017894268036, 0.09452840685844421], [0.156771719455719,
0.0009459690772928298, 0.08676373958587646, 0.012071664445102215,
0.046294376254081726, 0.1705559939146042, 0.05631829798221588,
0.16554586589336395, 0.14995504915714264, 0.15477733314037323], [
0.0036007703747600317, 0.0036146841011941433, 0.007429149001836777,
0.10190737992525101, 0.0016259902622550726, 0.45585712790489197,
0.04189519211649895, 7.317630092984473e-07, 0.3802386522293091,
0.003830441040918231]]
|
flexible
|
{
"blob_id": "593d3221e34c0eef51228082d767d8516ec93ca2",
"index": 8002,
"step-1": "<mask token>\n",
"step-2": "y_true = [7, 3, 3, 4, 9, 9, 2, 5, 0, 0, 6, 3, 1, 6, 8, 7, 9, 7, 4, 2, 0, 1,\n 4, 1, 7, 7, 5, 0, 8, 0, 1, 7, 4, 2, 2, 4, 9, 3, 1, 7, 1, 2, 1, 7, 5, 9,\n 9, 4, 8, 5, 7, 2, 7, 5, 5, 6, 6, 1, 2, 6, 6, 5, 3, 2, 3, 8, 8, 8, 8, 5,\n 3, 4, 3, 2, 8, 1, 9, 0, 6, 8, 6, 1, 1, 1, 5, 4, 8, 8, 5, 5, 8, 6, 4, 4,\n 6, 9, 8, 1, 5, 5]\ny_pred_prob = [[0.0597563199698925, 0.1344364434480667, 0.1173347756266594,\n 0.11292721331119537, 0.10652001202106476, 0.13155865669250488, \n 0.10057594627141953, 0.10029518604278564, 0.10313529521226883, \n 0.03346000984311104], [0.0002930850023403764, 0.23393571376800537, \n 0.09061524271965027, 0.21862193942070007, 0.04659481346607208, \n 0.04461496323347092, 0.0952368974685669, 0.2075100988149643, \n 0.0616493821144104, 0.0009278177167288959], [0.22330643236637115, \n 1.0582012919257977e-06, 0.22777651250362396, 0.20880192518234253, \n 9.877869615593227e-07, 0.0006437229458242655, 0.1556401550769806, \n 7.201562368663872e-08, 0.18382851779460907, 5.064675860921852e-07], [\n 1.7682419638731517e-05, 0.001197152421809733, 0.015430454164743423, \n 0.0037515582516789436, 0.32882484793663025, 0.0003495111595839262, \n 0.012810198590159416, 0.054448556154966354, 0.30387693643569946, \n 0.27929291129112244], [0.16070464253425598, 4.810986276027052e-09, \n 0.15206283330917358, 0.004463076591491699, 0.1652054488658905, \n 0.0038724008481949568, 0.17216043174266815, 0.13407163321971893, \n 0.029512932524085045, 0.17794682085514069], [0.10922636836767197, \n 2.2864300319724862e-07, 0.11546860635280609, 0.001813476555980742, \n 0.1788507103919983, 0.005888130981475115, 0.18413811922073364, \n 0.10866158455610275, 0.10712066292762756, 0.18883220851421356], [\n 0.005557563621550798, 0.0001692363148322329, 0.35343053936958313, \n 0.0015008420450612903, 0.00037875055568292737, 0.2150292843580246, \n 0.014169459231197834, 0.03244209289550781, 0.33539846539497375, \n 0.041923996061086655], [0.193454310297966, 3.662989183794707e-05, \n 0.10065275430679321, 0.00039752188604325056, 0.16119857132434845, \n 0.19390884041786194, 0.07022294402122498, 0.02460072562098503, \n 0.16083283722400665, 0.0946948304772377], [0.28058794140815735, \n 1.1208027217435301e-06, 0.018203848972916603, 0.16030532121658325, \n 0.00018859952979255468, 0.21325571835041046, 0.2328961044549942, \n 0.007604319602251053, 0.04473938047885895, 0.04221738502383232], [\n 0.1718112975358963, 7.514636672567576e-05, 0.15386143326759338, \n 0.008414546959102154, 0.001738831982947886, 0.15720322728157043, \n 0.17100712656974792, 0.15586316585540771, 0.104509636759758, \n 0.07551562041044235], [0.001471314812079072, 0.008587654680013657, \n 0.0367623046040535, 0.011750160716474056, 0.07068527489900589, \n 0.4173307418823242, 0.12449752539396286, 0.014547907747328281, \n 0.2990296185016632, 0.01533727627247572], [0.005052714608609676, \n 0.0073812128975987434, 0.009834956377744675, 0.33292853832244873, \n 0.0018518454162403941, 0.0015299966325983405, 0.002040529390797019, \n 0.3055168688297272, 0.32741934061050415, 0.006443792954087257], [\n 0.0011697597801685333, 0.20749542117118835, 0.07009387016296387, \n 0.08994801342487335, 0.09965154528617859, 0.060963381081819534, \n 0.13158728182315826, 0.1365581601858139, 0.11990636587142944, \n 0.08262615650892258], [0.020798824727535248, 1.469431822442857e-06, \n 0.016172533854842186, 0.021048342809081078, 0.009139545261859894, \n 0.3956705331802368, 0.3814408779144287, 7.980810551089235e-06, \n 0.1391601711511612, 0.016559595242142677], [0.0008747534011490643, \n 0.0009511907119303942, 0.055323366075754166, 0.05426914989948273, \n 0.03363798186182976, 0.12827005982398987, 0.03197509050369263, \n 0.0008451330941170454, 0.37859639525413513, 0.3152569532394409], [\n 0.001832291018217802, 9.253426833311096e-05, 0.27192848920822144, \n 0.18078717589378357, 0.004130060318857431, 0.00929891224950552, \n 0.1695500910282135, 0.29965919256210327, 0.020460698753595352, \n 0.042260222136974335], [0.15259969234466553, 0.00015921871818136424, \n 0.16849327087402344, 0.002068838570266962, 0.17735524475574493, \n 0.02342645265161991, 0.18245863914489746, 0.00010533139720791951, \n 0.11123484373092651, 0.1820984184741974], [0.18936939537525177, \n 1.7293215250901994e-06, 0.029253976419568062, 0.1424887329339981, \n 0.01099975686520338, 0.0074686696752905846, 0.053486552089452744, \n 0.2111600935459137, 0.14551354944705963, 0.21025745570659637], [\n 3.861714503727853e-05, 0.1669524759054184, 0.00032175786327570677, \n 0.15850232541561127, 0.1955566704273224, 0.012984608300030231, \n 0.14730143547058105, 0.066555455327034, 0.1175893247127533, \n 0.13419757783412933], [0.1504199206829071, 0.006808706559240818, \n 0.22468900680541992, 0.18946652114391327, 1.2391226846375503e-05, \n 0.10332755744457245, 0.15032899379730225, 2.30663204092707e-06, \n 0.17487214505672455, 7.243863365147263e-05], [0.23918452858924866, \n 5.279692683046733e-09, 0.0671931579709053, 0.2041931003332138, \n 9.380520350532606e-05, 0.18892300128936768, 0.16166524589061737, \n 1.2340686907919007e-06, 0.1280936300754547, 0.010652361437678337], [\n 0.0019602354150265455, 0.17319674789905548, 0.16884981095790863, \n 0.025876348838210106, 0.11373495310544968, 0.034116633236408234, \n 0.09377618134021759, 0.16857513785362244, 0.10720878094434738, \n 0.11270517110824585], [0.006008224096149206, 7.275425741681829e-05, \n 0.002679133554920554, 0.005456522107124329, 0.2852444648742676, \n 0.007294526789337397, 0.26774612069129944, 0.0033797386568039656, \n 0.15357472002506256, 0.26854372024536133], [0.0020487161818891764, \n 0.18302913010120392, 0.17970730364322662, 0.03157859668135643, \n 0.10424197465181351, 0.028137331828475, 0.049388039857149124, \n 0.17323219776153564, 0.13171784579753876, 0.11691895872354507], [\n 0.011249794624745846, 0.0003711018362082541, 0.32693105936050415, \n 0.0010822461917996407, 0.0076926033943891525, 0.04566335678100586, \n 0.005700047593563795, 0.32916736602783203, 0.09476791322231293, \n 0.17737449705600739], [0.0001925578253576532, 7.067231763357995e-06, \n 0.0001896199828479439, 0.09954455494880676, 0.23005598783493042, \n 0.2152310460805893, 0.09002267569303513, 0.017976609990000725, \n 0.0920918807387352, 0.25468799471855164], [0.0006383731961250305, \n 3.095208057857235e-06, 0.0005969868507236242, 0.41469672322273254, \n 0.0053739529103040695, 0.40698617696762085, 0.08218759298324585, \n 0.0003528161614667624, 0.07473969459533691, 0.014424380846321583], [\n 0.19537049531936646, 3.243912300235352e-13, 0.005169959273189306, \n 0.17694340646266937, 2.949438930954784e-05, 0.1400780826807022, \n 0.18864554166793823, 3.857006959151477e-06, 0.18823771178722382, \n 0.10552132874727249], [0.009722508490085602, 3.8531984500878025e-06, \n 0.07383214682340622, 0.03598225489258766, 0.07267675548791885, \n 0.1459459662437439, 0.07249364256858826, 0.002293274737894535, \n 0.48588359355926514, 0.1011660099029541], [0.21651780605316162, \n 9.559274261050632e-09, 0.14371894299983978, 0.13431811332702637, \n 2.7394575226935558e-05, 0.1838626116514206, 0.17265450954437256, \n 0.00012304158008191735, 0.12219242751598358, 0.0265849307179451], [\n 4.430914850672707e-05, 0.2043066918849945, 0.0002825123374350369, \n 0.16263452172279358, 0.1939067542552948, 0.1427866667509079, \n 0.11921370774507523, 0.0028419536538422108, 0.06556723266839981, \n 0.10841585695743561], [0.004471424967050552, 0.1858968585729599, \n 0.17653658986091614, 0.01416453905403614, 0.008144107647240162, \n 0.0843614935874939, 0.05890577659010887, 0.18505530059337616, \n 0.10232891887426376, 0.18013498187065125], [0.00041712025995366275, \n 1.1021310228898074e-06, 0.08412905037403107, 0.0002837374631781131, \n 0.2740859091281891, 0.013903344981372356, 0.08929961919784546, \n 0.2733091115951538, 0.2233879268169403, 0.04118315503001213], [\n 0.04552318528294563, 0.020853176712989807, 0.26410210132598877, \n 0.23437173664569855, 2.1701146124541992e-06, 0.10220374912023544, \n 0.07447297871112823, 7.592303154524416e-05, 0.25814488530158997, \n 0.00025002588517963886], [0.024719374254345894, 0.00217414740473032, \n 0.26734668016433716, 0.17261573672294617, 0.003498602891340852, \n 0.05698162689805031, 0.2737174332141876, 8.039058593567461e-05, \n 0.19880186021327972, 6.410985952243209e-05], [0.12234598398208618, \n 6.703280632791575e-06, 0.015603234991431236, 0.013786871917545795, \n 0.21616478264331818, 0.005412149243056774, 0.11406012624502182, \n 0.12291428446769714, 0.18262456357479095, 0.20708128809928894], [\n 0.193313866853714, 6.033819488493464e-08, 0.14491458237171173, \n 0.2349807769060135, 0.0006736826617270708, 0.003743150969967246, \n 0.12457092851400375, 0.004962997976690531, 0.23268520832061768, \n 0.060154590755701065], [0.006641837302595377, 0.005113706924021244, \n 0.060135774314403534, 0.37294134497642517, 0.0001917753543239087, \n 0.35536521673202515, 0.003515040036290884, 0.00014136293611954898, \n 0.19584619998931885, 0.00010780058073578402], [0.00022568553686141968, \n 0.1758676916360855, 0.08169379830360413, 0.11927571147680283, \n 0.14987629652023315, 0.026822827756404877, 0.09613550454378128, \n 0.14441852271556854, 0.11029191315174103, 0.09539227187633514], [\n 0.028152454644441605, 0.04798303544521332, 0.06989692151546478, \n 0.07051544636487961, 0.07356826215982437, 0.05468234792351723, \n 0.11397064477205276, 0.2294078767299652, 0.0822836384177208, \n 0.22953952848911285], [0.0009083361364901066, 0.16873282194137573, \n 0.040142301470041275, 0.13509070873260498, 0.16045929491519928, \n 0.09148524701595306, 0.0939648225903511, 0.13889746367931366, \n 0.043392572551965714, 0.12692658603191376], [7.008769898675382e-05, \n 0.0012455701362341642, 0.4437786936759949, 0.03154001384973526, \n 0.0033613061532378197, 0.0024434190709143877, 0.3866567313671112, \n 0.0005211094976402819, 0.13020911812782288, 0.00017409549036528915], [\n 0.00034864526242017746, 0.21021592617034912, 0.005514794960618019, \n 0.11704950034618378, 0.08421261608600616, 0.13176649808883667, \n 0.11882488429546356, 0.008054501377046108, 0.1467529684305191, \n 0.1772596538066864], [0.036879003047943115, 0.0014911789912730455, \n 0.2685071527957916, 0.0029583016876131296, 0.011879128403961658, \n 0.030892902985215187, 0.08989892154932022, 0.29645001888275146, \n 0.04054954648017883, 0.2204938679933548], [0.0064177061431109905, \n 0.0045189931988716125, 0.013788403943181038, 0.18153700232505798, \n 0.0003662402159534395, 0.5257023572921753, 0.06426692008972168, \n 9.742573638504837e-06, 0.2026320844888687, 0.000760772149078548], [\n 0.0017538872780278325, 0.0002046643348876387, 0.04638877511024475, \n 0.11219469457864761, 0.1732793003320694, 0.000888414157088846, \n 0.1527005136013031, 0.171849325299263, 0.16653017699718475, \n 0.17421048879623413], [6.957617006264627e-05, 3.015168840647675e-05, \n 0.05601977929472923, 0.06104991212487221, 0.14622464776039124, \n 0.0013683908618986607, 0.004713970702141523, 0.26153290271759033, \n 0.21816983819007874, 0.25082090497016907], [0.001964711584150791, \n 0.14094221591949463, 0.04670453444123268, 0.11537310481071472, \n 0.1456061750650406, 0.021807175129652023, 0.1023702397942543, \n 0.14592182636260986, 0.1320936679840088, 0.14721626043319702], [\n 0.0013557883212342858, 5.542307803807489e-07, 0.015518834814429283, \n 0.020929962396621704, 0.12795883417129517, 0.012969551607966423, \n 0.011510342359542847, 0.3424086570739746, 0.3332746922969818, \n 0.1340728998184204], [0.0951327458024025, 0.03636496141552925, \n 0.018829435110092163, 0.060135968029499054, 0.1569897085428238, \n 0.1514764130115509, 0.13258931040763855, 0.1450430303812027, \n 0.04603665694594383, 0.15740196406841278], [0.17052830755710602, \n 1.5615187294315547e-06, 0.0013229812029749155, 0.12005076557397842, \n 0.021564221009612083, 0.024421295151114464, 0.17088675498962402, \n 0.15222683548927307, 0.1693890392780304, 0.16960804164409637], [\n 0.006946968380361795, 0.3011370897293091, 0.3187958002090454, \n 0.06604688614606857, 0.011190904304385185, 0.05437859520316124, \n 0.020502492785453796, 0.010224146768450737, 0.21062366664409637, \n 0.00015340560639742762], [0.003341993084177375, 0.0016007163794711232, \n 0.0007675797096453607, 0.18986503779888153, 0.1190534457564354, \n 0.02811228297650814, 0.09639428555965424, 0.21583504974842072, \n 0.13505271077156067, 0.2099769562482834], [0.042331017553806305, \n 0.00029962626285851, 0.0023094473872333765, 0.18676534295082092, \n 0.000317152967909351, 0.48982951045036316, 0.1871659755706787, \n 8.205944141082e-06, 0.09039845317602158, 0.0005752819124609232], [\n 0.27066469192504883, 0.0001488085399614647, 0.025224560871720314, \n 0.03236522525548935, 0.00022321399592328817, 0.3199988305568695, \n 0.20726615190505981, 2.1540354282478802e-05, 0.13308577239513397, \n 0.011001424863934517], [0.21046556532382965, 8.32586906085453e-08, \n 0.050842639058828354, 0.0012313498882576823, 0.17998859286308289, \n 0.005802170839160681, 0.22032563388347626, 9.771327313501388e-06, \n 0.2085702270269394, 0.12276387959718704], [0.278763085603714, \n 2.956639932882865e-10, 0.2363770455121994, 0.0021949675865471363, \n 0.024400619789958, 0.01081052329391241, 0.2788945734500885, \n 0.000592902593780309, 0.09800171107053757, 0.06996453553438187], [\n 0.0012440741993486881, 0.0002501744020264596, 0.039189230650663376, \n 0.003109667217358947, 0.1353403925895691, 0.17648975551128387, \n 0.29823172092437744, 0.0005026640137657523, 0.1873668134212494, \n 0.15827545523643494], [4.636057929019444e-05, 0.004471238702535629, \n 0.010865537449717522, 0.03406133875250816, 0.2391168773174286, \n 0.0102084307000041, 0.24508318305015564, 0.10957624763250351, \n 0.10304577648639679, 0.24352511763572693], [0.007771539501845837, \n 0.003819737583398819, 0.05605701357126236, 0.0013185413554310799, \n 0.026425426825881004, 0.37273845076560974, 0.39364394545555115, \n 3.468452996457927e-05, 0.13644644618034363, 0.0017443000106140971], [\n 0.0042862421832978725, 4.118454022261631e-09, 0.24541069567203522, \n 1.311416235694196e-05, 0.002639196580275893, 0.2002275139093399, \n 0.35612747073173523, 8.159701246768236e-05, 0.11912810802459717, \n 0.07208611816167831], [0.10790199786424637, 0.00018712706514634192, \n 0.001723292050883174, 0.3369658291339874, 0.005216643214225769, \n 0.323357492685318, 0.04629630222916603, 0.0006358266109600663, \n 0.17700347304344177, 0.0007120332447811961], [0.01004449650645256, \n 0.0038342783227562904, 0.0029477709904313087, 0.39860454201698303, \n 0.000900272571016103, 0.32782217860221863, 0.010686549358069897, \n 0.0006012170924805105, 0.23407192528247833, 0.010486727580428123], [\n 0.0015078516444191337, 0.23596949875354767, 0.4038705825805664, \n 0.04463784024119377, 0.00036313795135356486, 0.005906661506742239, \n 0.012559221126139164, 0.010579549707472324, 0.2843676507472992, \n 0.0002381248341407627], [0.1887362003326416, 0.0019065006636083126, \n 0.2840288579463959, 0.2984219193458557, 4.9067231884691864e-05, \n 0.1615515947341919, 0.012938770465552807, 0.00029289082158356905, \n 0.052058152854442596, 1.6269357729470357e-05], [0.0006827416946180165, \n 2.276465056638699e-05, 0.023704057559370995, 0.16121432185173035, \n 0.0033186341170221567, 0.004117893520742655, 0.03627816215157509, \n 0.009822812862694263, 0.7281517386436462, 0.032687313854694366], [\n 0.0011369712883606553, 0.27387163043022156, 0.07185991108417511, \n 0.15628814697265625, 0.002854800783097744, 0.23154565691947937, \n 0.03204796463251114, 0.003870188258588314, 0.22623319923877716, \n 0.00029159500263631344], [0.0035695999395102262, 0.26706114411354065, \n 0.1508740484714508, 0.0013921442441642284, 0.019328434020280838, \n 0.13771453499794006, 0.029891734942793846, 0.03509771451354027, \n 0.24692872166633606, 0.1081417053937912], [0.000882012362126261, \n 2.536918327677995e-05, 0.0450599268078804, 0.412322998046875, \n 0.0025211411993950605, 0.002278776839375496, 0.011372447945177555, \n 0.1770726591348648, 0.33388030529022217, 0.014584112912416458], [\n 0.21903501451015472, 5.910552047794226e-09, 0.022012481465935707, \n 0.20099963247776031, 1.0874355211853981e-05, 0.21909210085868835, \n 0.21668335795402527, 4.337367798257219e-08, 0.12212178856134415, \n 4.4732783862855285e-05], [0.014651631936430931, 0.00830799899995327, \n 0.005935078486800194, 0.3953670263290405, 1.1293817806290463e-05, \n 0.4299878776073456, 0.017106691375374794, 0.00014334742445498705, \n 0.11808823049068451, 0.010400976054370403], [0.010301091708242893, \n 0.01435689628124237, 0.07430031895637512, 0.06989920139312744, \n 0.2338510900735855, 0.053795550018548965, 0.22257547080516815, \n 0.0029012206941843033, 0.09203658252954483, 0.22598253190517426], [\n 0.033016644418239594, 0.0020125852897763252, 0.06661045551300049, \n 0.4920836091041565, 0.00025867935619316995, 0.07482428848743439, \n 0.13923810422420502, 0.00012527030776254833, 0.19180776178836823, \n 2.269313517899718e-05], [0.1325867474079132, 0.004940022714436054, \n 0.22300080955028534, 0.2727201282978058, 3.310650572529994e-05, \n 0.12915031611919403, 0.01339033618569374, 1.0927167750196531e-05, \n 0.22410929203033447, 5.8520683523966e-05], [0.126132994890213, \n 0.0013935434399172664, 0.17098797857761383, 0.00039779843064025044, \n 0.07732491940259933, 0.16493096947669983, 0.014501826837658882, \n 0.03405503183603287, 0.20594964921474457, 0.2043251097202301], [\n 0.0008475463255308568, 0.19114449620246887, 0.03174148499965668, \n 0.1596948355436325, 0.1830475926399231, 0.11398201435804367, \n 0.11080365628004074, 0.10536272078752518, 0.05745834857225418, \n 0.04591764137148857], [0.0009525367058813572, 0.0012388192117214203, \n 0.0006522738258354366, 0.15977761149406433, 0.2019728273153305, \n 0.037797972559928894, 0.19880010187625885, 0.008799873292446136, \n 0.18693988025188446, 0.20306788384914398], [0.21417981386184692, \n 1.8215121144748991e-07, 0.11546390503644943, 0.10518436878919601, \n 5.3784842748427764e-05, 0.17964830994606018, 0.1753360480070114, \n 0.005312803667038679, 0.07569659501314163, 0.1291242241859436], [\n 0.03322113677859306, 1.1228289409359604e-08, 0.11529551446437836, \n 0.006697801407426596, 0.020004654303193092, 0.2904326617717743, \n 0.3397071361541748, 6.173769179440569e-06, 0.1187906265258789, \n 0.07584403455257416], [0.00018722846289165318, 0.00015633362636435777, \n 0.027305739000439644, 0.30433472990989685, 0.12216899544000626, \n 0.0051543135195970535, 0.07717369496822357, 5.6467473768861964e-05, \n 0.46220865845680237, 0.0012535307323560119], [0.2223890870809555, \n 1.8010264568601997e-07, 0.051188305020332336, 0.06915734708309174, \n 0.007792292162775993, 0.13037307560443878, 0.4795873761177063, \n 6.65841726004146e-05, 0.03377178683876991, 0.0056741489097476006], [\n 0.0011432061437517405, 0.172257199883461, 0.08959532529115677, \n 0.09976792335510254, 0.13487820327281952, 0.025573352351784706, \n 0.11224105209112167, 0.1427890509366989, 0.12529729306697845, \n 0.09645748883485794], [0.00039081714930944145, 0.17529502511024475, \n 0.07816692441701889, 0.12808731198310852, 0.13959045708179474, \n 0.04451143741607666, 0.07863735407590866, 0.1518080085515976, \n 0.09225541353225708, 0.11125729233026505], [0.0005360758514143527, \n 0.1871286779642105, 0.09343081712722778, 0.10187795013189316, \n 0.15403643250465393, 0.03745483607053757, 0.10108820348978043, \n 0.1381213515996933, 0.1196260005235672, 0.0666997954249382], [\n 0.02377643622457981, 0.002874232828617096, 0.06835681945085526, \n 0.08628982305526733, 0.16734763979911804, 0.1884264051914215, \n 0.06887176632881165, 0.1883554309606552, 0.11966855823993683, \n 0.0860329195857048], [0.0019290593918412924, 0.0004132240719627589, \n 0.08087942749261856, 0.00133050128351897, 0.2057691514492035, \n 0.014698517508804798, 0.10668473690748215, 0.2002524882555008, \n 0.19643288850784302, 0.19160999357700348], [4.1589693864807487e-05, \n 3.0074079404585063e-06, 0.00946643017232418, 0.0028675245121121407, \n 0.339987188577652, 0.006530506536364555, 0.21062259376049042, \n 5.006019819120411e-06, 0.4303286373615265, 0.00014742799976374954], [\n 0.23467645049095154, 3.957170217048535e-14, 0.016559595242142677, \n 0.22702592611312866, 0.0004185910802334547, 0.0031147561967372894, \n 0.2260916531085968, 2.4497327899553056e-07, 0.2333890199661255, \n 0.05872354656457901], [0.1723964959383011, 1.4810979109824984e-07, \n 0.001400468056090176, 0.3012116253376007, 0.00017689657397568226, \n 0.29611334204673767, 0.013564502820372581, 0.04992862418293953, \n 0.15185707807540894, 0.013350787572562695], [0.18757264316082, \n 1.502647393181178e-07, 0.0013043361250311136, 0.08373606950044632, \n 0.0005724140792153776, 0.1799388974905014, 0.14538954198360443, \n 0.16594813764095306, 0.06483398377895355, 0.17070381343364716], [\n 0.008307700976729393, 0.0005032537155784667, 0.04173918813467026, \n 0.055757056921720505, 0.2954571545124054, 0.046274807304143906, \n 0.15145555138587952, 0.00160416669677943, 0.36763912439346313, \n 0.031262170523405075], [0.03202534094452858, 2.929154447883775e-07, \n 0.03331722691655159, 0.0002443870762363076, 0.021324075758457184, \n 0.3864181637763977, 0.39420267939567566, 3.2187076612899546e-06, \n 0.08215467631816864, 0.050310224294662476], [0.03041147254407406, \n 3.317395247393051e-10, 0.013215649873018265, 0.009000282734632492, \n 0.15260590612888336, 9.569835674483329e-05, 0.22718068957328796, \n 0.0983223170042038, 0.23328886926174164, 0.23587895929813385], [\n 0.0017376767937093973, 0.01800091378390789, 0.09461784362792969, \n 0.008886604569852352, 0.23299837112426758, 0.03532419353723526, \n 0.20058980584144592, 0.1702878624200821, 0.06943482160568237, \n 0.1681220531463623], [0.26592451333999634, 1.378083283043452e-07, \n 0.26663097739219666, 0.00043869472574442625, 0.0753256231546402, \n 0.000345755455782637, 0.2718716561794281, 0.09590824693441391, \n 0.021168876439332962, 0.0023856020998209715], [0.007719929795712233, \n 0.000273746729362756, 0.06954099237918854, 0.11292484402656555, \n 0.17693056166172028, 0.0036023242864757776, 0.16335690021514893, \n 0.1139131560921669, 0.17289915680885315, 0.17883846163749695], [\n 0.0002722161589190364, 0.0014734293799847364, 0.0001780118327587843, \n 0.0718056932091713, 0.219150573015213, 0.02937471494078636, \n 0.15243956446647644, 0.07647080719470978, 0.21917390823364258, \n 0.22966115176677704], [0.0008591399528086185, 0.27216723561286926, \n 0.030793067067861557, 0.040201541036367416, 0.07587726414203644, \n 0.06215333193540573, 0.16188929975032806, 0.04154059290885925, \n 0.21999017894268036, 0.09452840685844421], [0.156771719455719, \n 0.0009459690772928298, 0.08676373958587646, 0.012071664445102215, \n 0.046294376254081726, 0.1705559939146042, 0.05631829798221588, \n 0.16554586589336395, 0.14995504915714264, 0.15477733314037323], [\n 0.0036007703747600317, 0.0036146841011941433, 0.007429149001836777, \n 0.10190737992525101, 0.0016259902622550726, 0.45585712790489197, \n 0.04189519211649895, 7.317630092984473e-07, 0.3802386522293091, \n 0.003830441040918231]]\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
class SensorReadings:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class SensorReadings:
<|reserved_special_token_0|>
def printReadings(self):
print('temperature from humidity sensor: {} C'.format(self.
temprerature_humidity_sensor))
print('temperature from pressure sensor: {} C'.format(self.
temperature_pressure_sensor))
print('humidity: {}'.format(self.humidity))
print('pressure: {}'.format(self.pressure))
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class SensorReadings:
<|reserved_special_token_0|>
def printReadings(self):
print('temperature from humidity sensor: {} C'.format(self.
temprerature_humidity_sensor))
print('temperature from pressure sensor: {} C'.format(self.
temperature_pressure_sensor))
print('humidity: {}'.format(self.humidity))
print('pressure: {}'.format(self.pressure))
<|reserved_special_token_0|>
def getAsMap(self):
return {'temperature_humidity_sensor': self.
temprerature_humidity_sensor, 'temperature_pressure_sensor':
self.temperature_pressure_sensor, 'humidity': self.humidity,
'pressure': self.pressure}
<|reserved_special_token_1|>
class SensorReadings:
def __init__(self, sense_hat):
self.temprerature_humidity_sensor = (sense_hat.
get_temperature_from_humidity())
self.temperature_pressure_sensor = (sense_hat.
get_temperature_from_pressure())
self.humidity = sense_hat.get_humidity()
self.pressure = sense_hat.get_pressure()
def printReadings(self):
print('temperature from humidity sensor: {} C'.format(self.
temprerature_humidity_sensor))
print('temperature from pressure sensor: {} C'.format(self.
temperature_pressure_sensor))
print('humidity: {}'.format(self.humidity))
print('pressure: {}'.format(self.pressure))
<|reserved_special_token_0|>
def getAsMap(self):
return {'temperature_humidity_sensor': self.
temprerature_humidity_sensor, 'temperature_pressure_sensor':
self.temperature_pressure_sensor, 'humidity': self.humidity,
'pressure': self.pressure}
<|reserved_special_token_1|>
class SensorReadings:
def __init__(self, sense_hat):
self.temprerature_humidity_sensor = sense_hat.get_temperature_from_humidity()
self.temperature_pressure_sensor = sense_hat.get_temperature_from_pressure()
self.humidity = sense_hat.get_humidity()
self.pressure = sense_hat.get_pressure()
def printReadings(self):
print("temperature from humidity sensor: {} C".format(self.temprerature_humidity_sensor))
print("temperature from pressure sensor: {} C".format(self.temperature_pressure_sensor))
print("humidity: {}".format(self.humidity))
print("pressure: {}".format(self.pressure))
def display(self, sense_hat):
sense_hat.show_message("T:{:.1f} C".format(self.temprerature_humidity_sensor), text_colour=red)
sense_hat.show_message("H:{:.1f}".format(self.humidity), text_colour=blue)
sense_hat.show_message("P:{:.2f}".format(self.pressure), text_colour=green)
def getAsMap(self):
return {"temperature_humidity_sensor": self.temprerature_humidity_sensor, "temperature_pressure_sensor": self.temperature_pressure_sensor, "humidity": self.humidity, "pressure": self.pressure}
|
flexible
|
{
"blob_id": "f680503488a2780624b28e49b045aad75506d8c5",
"index": 3248,
"step-1": "class SensorReadings:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "class SensorReadings:\n <mask token>\n\n def printReadings(self):\n print('temperature from humidity sensor: {} C'.format(self.\n temprerature_humidity_sensor))\n print('temperature from pressure sensor: {} C'.format(self.\n temperature_pressure_sensor))\n print('humidity: {}'.format(self.humidity))\n print('pressure: {}'.format(self.pressure))\n <mask token>\n <mask token>\n",
"step-3": "class SensorReadings:\n <mask token>\n\n def printReadings(self):\n print('temperature from humidity sensor: {} C'.format(self.\n temprerature_humidity_sensor))\n print('temperature from pressure sensor: {} C'.format(self.\n temperature_pressure_sensor))\n print('humidity: {}'.format(self.humidity))\n print('pressure: {}'.format(self.pressure))\n <mask token>\n\n def getAsMap(self):\n return {'temperature_humidity_sensor': self.\n temprerature_humidity_sensor, 'temperature_pressure_sensor':\n self.temperature_pressure_sensor, 'humidity': self.humidity,\n 'pressure': self.pressure}\n",
"step-4": "class SensorReadings:\n\n def __init__(self, sense_hat):\n self.temprerature_humidity_sensor = (sense_hat.\n get_temperature_from_humidity())\n self.temperature_pressure_sensor = (sense_hat.\n get_temperature_from_pressure())\n self.humidity = sense_hat.get_humidity()\n self.pressure = sense_hat.get_pressure()\n\n def printReadings(self):\n print('temperature from humidity sensor: {} C'.format(self.\n temprerature_humidity_sensor))\n print('temperature from pressure sensor: {} C'.format(self.\n temperature_pressure_sensor))\n print('humidity: {}'.format(self.humidity))\n print('pressure: {}'.format(self.pressure))\n <mask token>\n\n def getAsMap(self):\n return {'temperature_humidity_sensor': self.\n temprerature_humidity_sensor, 'temperature_pressure_sensor':\n self.temperature_pressure_sensor, 'humidity': self.humidity,\n 'pressure': self.pressure}\n",
"step-5": "class SensorReadings:\n\n def __init__(self, sense_hat):\n self.temprerature_humidity_sensor = sense_hat.get_temperature_from_humidity()\n self.temperature_pressure_sensor = sense_hat.get_temperature_from_pressure()\n self.humidity = sense_hat.get_humidity()\n self.pressure = sense_hat.get_pressure()\n\n def printReadings(self):\n print(\"temperature from humidity sensor: {} C\".format(self.temprerature_humidity_sensor))\n print(\"temperature from pressure sensor: {} C\".format(self.temperature_pressure_sensor))\n print(\"humidity: {}\".format(self.humidity))\n print(\"pressure: {}\".format(self.pressure))\n\n def display(self, sense_hat):\n sense_hat.show_message(\"T:{:.1f} C\".format(self.temprerature_humidity_sensor), text_colour=red)\n sense_hat.show_message(\"H:{:.1f}\".format(self.humidity), text_colour=blue)\n sense_hat.show_message(\"P:{:.2f}\".format(self.pressure), text_colour=green)\n\n def getAsMap(self):\n return {\"temperature_humidity_sensor\": self.temprerature_humidity_sensor, \"temperature_pressure_sensor\": self.temperature_pressure_sensor, \"humidity\": self.humidity, \"pressure\": self.pressure}",
"step-ids": [
1,
2,
3,
4,
6
]
}
|
[
1,
2,
3,
4,
6
] |
<|reserved_special_token_0|>
def logined(func):
def wrapper(request, *args, **kwargs):
session = request.session.get('user')
if not session:
return render(request, 'login.html')
else:
return func(request, *args, **kwargs)
return wrapper
def api_check(func):
def wrapper(request, *args, **kwargs):
session = request.session.get('user')
if not session:
res = dict(state_code=-3, error_msg='登陆过期')
return HttpResponse(json.dumps(res, ensure_ascii=False),
content_type='application/json')
else:
return func(request, *args, **kwargs)
return wrapper
def login(request):
if request.method == 'GET':
if request.session.get('user'):
return render(request, 'index.html')
return render(request, 'login.html')
else:
req = json.loads(request.body)
user = req.get('username')
pwd = req.get('pwd')
obj_user = models.Users.objects.filter(user_name=user).all()
if not obj_user:
res = dict(state_code=1, error_msg='用户不存在')
else:
password = obj_user.first().password
if str(pwd) != str(password):
res = dict(state_code=2, error_msg='密码错误')
else:
request.session['user'] = user
request.session.set_expiry(60 * 60 * 4)
res = dict(state_code=0, error_msg='密码错误')
return HttpResponse(json.dumps(res, ensure_ascii=False),
content_type='application/json')
def logout(request):
if request.session.get('user'):
del request.session['user']
return render(request, 'login.html')
@logined
def index(request):
return render(request, 'index.html')
@api_check
def get_dir_list(request):
user = request.session.get('user')
obj_dir = models.Dirs.objects.filter(user_name=user).all()
dir_list = []
for dirs in obj_dir:
user_dir = dirs.dir
dir_list.append(user_dir)
res = dict(state_code=0, error_msg='ok', data={'dir_list': dir_list})
return HttpResponse(json.dumps(res, ensure_ascii=False), content_type=
'application/json')
<|reserved_special_token_0|>
@api_check
def upload_file(request):
dir_name = request.POST.get('dir_name')
if not dir_name:
res = dict(state_code=-2, error_msg='参数错误')
return HttpResponse(json.dumps(res, ensure_ascii=False),
content_type='application/json')
dir_path = os.path.join(files_folder, dir_name)
if not os.path.exists(dir_path):
res = dict(state_code=1, error_msg='目录不存在')
return HttpResponse(json.dumps(res, ensure_ascii=False),
content_type='application/json')
File = request.FILES.get('file', None)
if File is None:
res = dict(state_code=-2, error_msg='参数错误')
return HttpResponse(json.dumps(res, ensure_ascii=False),
content_type='application/json')
file_name = File.name
file_path = os.path.join(dir_path, file_name)
with open(file_path, 'wb+') as f:
for chunk in File.chunks():
f.write(chunk)
res = dict(state_code=0, error_msg='ok')
return HttpResponse(json.dumps(res), content_type='application/json')
@api_check
def query_file(request):
req = json.loads(request.body)
dir_name = req.get('dir_name')
dir_path = os.path.join(files_folder, dir_name)
cmd_info = os.popen('ls -l -h {}'.format(dir_path)).read()
file_list = cmd_info.split('\n')[1:-1]
file_list_data = []
for file_info_cmd in file_list:
file_info_list = file_info_cmd.split(' ')
file_info = list(filter(None, file_info_list))
file = file_info[-1]
file_size = file_info[4]
name_type = file.rsplit('.', 1)
if len(name_type) < 2:
name_type.append('未知')
file_name, file_type = name_type
file_list_data.append({'file_name': file_name, 'file_type':
file_type, 'file_size': file_size})
res = dict(state_code=0, error_msg='ok', data={'file_list': file_list_data}
)
return HttpResponse(json.dumps(res, ensure_ascii=False), content_type=
'application/json')
@api_check
def del_file(request):
req = json.loads(request.body)
dir_name = req.get('dir_name')
file_name = req.get('file_name')
file_type = req.get('file_type')
file = file_name + '.' + file_type if file_type != '未知' else file_name
file_path = os.path.join(os.path.join(files_folder, dir_name), file)
if not os.path.exists(file_path):
res = dict(state_code=1, error_msg='文件不存在')
else:
os.remove(file_path)
res = dict(state_code=0, error_msg='ok')
return HttpResponse(json.dumps(res, ensure_ascii=False), content_type=
'application/json')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def logined(func):
def wrapper(request, *args, **kwargs):
session = request.session.get('user')
if not session:
return render(request, 'login.html')
else:
return func(request, *args, **kwargs)
return wrapper
def api_check(func):
def wrapper(request, *args, **kwargs):
session = request.session.get('user')
if not session:
res = dict(state_code=-3, error_msg='登陆过期')
return HttpResponse(json.dumps(res, ensure_ascii=False),
content_type='application/json')
else:
return func(request, *args, **kwargs)
return wrapper
def login(request):
if request.method == 'GET':
if request.session.get('user'):
return render(request, 'index.html')
return render(request, 'login.html')
else:
req = json.loads(request.body)
user = req.get('username')
pwd = req.get('pwd')
obj_user = models.Users.objects.filter(user_name=user).all()
if not obj_user:
res = dict(state_code=1, error_msg='用户不存在')
else:
password = obj_user.first().password
if str(pwd) != str(password):
res = dict(state_code=2, error_msg='密码错误')
else:
request.session['user'] = user
request.session.set_expiry(60 * 60 * 4)
res = dict(state_code=0, error_msg='密码错误')
return HttpResponse(json.dumps(res, ensure_ascii=False),
content_type='application/json')
def logout(request):
if request.session.get('user'):
del request.session['user']
return render(request, 'login.html')
@logined
def index(request):
return render(request, 'index.html')
@api_check
def get_dir_list(request):
user = request.session.get('user')
obj_dir = models.Dirs.objects.filter(user_name=user).all()
dir_list = []
for dirs in obj_dir:
user_dir = dirs.dir
dir_list.append(user_dir)
res = dict(state_code=0, error_msg='ok', data={'dir_list': dir_list})
return HttpResponse(json.dumps(res, ensure_ascii=False), content_type=
'application/json')
<|reserved_special_token_0|>
@api_check
def upload_file(request):
dir_name = request.POST.get('dir_name')
if not dir_name:
res = dict(state_code=-2, error_msg='参数错误')
return HttpResponse(json.dumps(res, ensure_ascii=False),
content_type='application/json')
dir_path = os.path.join(files_folder, dir_name)
if not os.path.exists(dir_path):
res = dict(state_code=1, error_msg='目录不存在')
return HttpResponse(json.dumps(res, ensure_ascii=False),
content_type='application/json')
File = request.FILES.get('file', None)
if File is None:
res = dict(state_code=-2, error_msg='参数错误')
return HttpResponse(json.dumps(res, ensure_ascii=False),
content_type='application/json')
file_name = File.name
file_path = os.path.join(dir_path, file_name)
with open(file_path, 'wb+') as f:
for chunk in File.chunks():
f.write(chunk)
res = dict(state_code=0, error_msg='ok')
return HttpResponse(json.dumps(res), content_type='application/json')
@api_check
def query_file(request):
req = json.loads(request.body)
dir_name = req.get('dir_name')
dir_path = os.path.join(files_folder, dir_name)
cmd_info = os.popen('ls -l -h {}'.format(dir_path)).read()
file_list = cmd_info.split('\n')[1:-1]
file_list_data = []
for file_info_cmd in file_list:
file_info_list = file_info_cmd.split(' ')
file_info = list(filter(None, file_info_list))
file = file_info[-1]
file_size = file_info[4]
name_type = file.rsplit('.', 1)
if len(name_type) < 2:
name_type.append('未知')
file_name, file_type = name_type
file_list_data.append({'file_name': file_name, 'file_type':
file_type, 'file_size': file_size})
res = dict(state_code=0, error_msg='ok', data={'file_list': file_list_data}
)
return HttpResponse(json.dumps(res, ensure_ascii=False), content_type=
'application/json')
@api_check
def del_file(request):
req = json.loads(request.body)
dir_name = req.get('dir_name')
file_name = req.get('file_name')
file_type = req.get('file_type')
file = file_name + '.' + file_type if file_type != '未知' else file_name
file_path = os.path.join(os.path.join(files_folder, dir_name), file)
if not os.path.exists(file_path):
res = dict(state_code=1, error_msg='文件不存在')
else:
os.remove(file_path)
res = dict(state_code=0, error_msg='ok')
return HttpResponse(json.dumps(res, ensure_ascii=False), content_type=
'application/json')
@api_check
def download_file(request):
req = json.loads(request.body)
dir_name = req.get('dir_name')
file_name = req.get('file_name')
file_type = req.get('file_type')
file = file_name + '.' + file_type if file_type != '未知' else file_name
file_path = os.path.join(os.path.join(files_folder, dir_name), file)
if not os.path.exists(file_path):
res = dict(state_code=1, error_msg='文件不存在')
return HttpResponse(json.dumps(res, ensure_ascii=False),
content_type='application/json')
from django.http import StreamingHttpResponse
file_size = os.path.getsize(file_path)
def file_iterator(file_name, chunk_size=512):
with open(file_name, 'rb') as f:
while True:
c = f.read(chunk_size)
if c:
yield c
else:
break
the_file_name = file_path
res = file_iterator(the_file_name)
response = StreamingHttpResponse(res)
response['Content-Type'] = 'application/octet-stream; charset=UTF-8'
response['Content-Length'] = file_size
response['Content-Disposition'] = 'attachment;filename="{}"'.format(
escape_uri_path(file))
return response
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def logined(func):
def wrapper(request, *args, **kwargs):
session = request.session.get('user')
if not session:
return render(request, 'login.html')
else:
return func(request, *args, **kwargs)
return wrapper
def api_check(func):
def wrapper(request, *args, **kwargs):
session = request.session.get('user')
if not session:
res = dict(state_code=-3, error_msg='登陆过期')
return HttpResponse(json.dumps(res, ensure_ascii=False),
content_type='application/json')
else:
return func(request, *args, **kwargs)
return wrapper
def login(request):
if request.method == 'GET':
if request.session.get('user'):
return render(request, 'index.html')
return render(request, 'login.html')
else:
req = json.loads(request.body)
user = req.get('username')
pwd = req.get('pwd')
obj_user = models.Users.objects.filter(user_name=user).all()
if not obj_user:
res = dict(state_code=1, error_msg='用户不存在')
else:
password = obj_user.first().password
if str(pwd) != str(password):
res = dict(state_code=2, error_msg='密码错误')
else:
request.session['user'] = user
request.session.set_expiry(60 * 60 * 4)
res = dict(state_code=0, error_msg='密码错误')
return HttpResponse(json.dumps(res, ensure_ascii=False),
content_type='application/json')
def logout(request):
if request.session.get('user'):
del request.session['user']
return render(request, 'login.html')
@logined
def index(request):
return render(request, 'index.html')
@api_check
def get_dir_list(request):
user = request.session.get('user')
obj_dir = models.Dirs.objects.filter(user_name=user).all()
dir_list = []
for dirs in obj_dir:
user_dir = dirs.dir
dir_list.append(user_dir)
res = dict(state_code=0, error_msg='ok', data={'dir_list': dir_list})
return HttpResponse(json.dumps(res, ensure_ascii=False), content_type=
'application/json')
@api_check
def user_mkdir(request):
req = json.loads(request.body)
dir_name = req.get('dir_name')
if not dir_name:
res = dict(state_code=-2, error_msg='参数错误')
return HttpResponse(json.dumps(res, ensure_ascii=False),
content_type='application/json')
dir_path = os.path.join(files_folder, dir_name)
if os.path.exists(dir_path):
res = dict(state_code=1, error_msg='该目录已被使用')
else:
user = request.session.get('user')
if user:
models.Dirs.objects.create(user_name=user, dir=dir_name)
os.mkdir(dir_path)
res = dict(state_code=0, error_msg='ok')
else:
res = dict(state_code=-3, error_msg='登陆过期')
return HttpResponse(json.dumps(res, ensure_ascii=False), content_type=
'application/json')
@api_check
def del_dir(request):
req = json.loads(request.body)
dir_name = req.get('dir_name')
if not dir_name:
res = dict(state_code=-2, error_msg='参数错误')
return HttpResponse(json.dumps(res, ensure_ascii=False),
content_type='application/json')
dir_path = os.path.join(files_folder, dir_name)
if not os.path.exists(dir_path):
res = dict(state_code=1, error_msg='目录不存在')
else:
with transaction.atomic():
obj_dir = models.Dirs.objects.filter(dir=dir_name).all()
if obj_dir:
obj_dir.delete()
shutil.rmtree(dir_path)
res = dict(state_code=0, eror_msg='ok')
return HttpResponse(json.dumps(res, ensure_ascii=False), content_type=
'application/json')
@api_check
def upload_file(request):
dir_name = request.POST.get('dir_name')
if not dir_name:
res = dict(state_code=-2, error_msg='参数错误')
return HttpResponse(json.dumps(res, ensure_ascii=False),
content_type='application/json')
dir_path = os.path.join(files_folder, dir_name)
if not os.path.exists(dir_path):
res = dict(state_code=1, error_msg='目录不存在')
return HttpResponse(json.dumps(res, ensure_ascii=False),
content_type='application/json')
File = request.FILES.get('file', None)
if File is None:
res = dict(state_code=-2, error_msg='参数错误')
return HttpResponse(json.dumps(res, ensure_ascii=False),
content_type='application/json')
file_name = File.name
file_path = os.path.join(dir_path, file_name)
with open(file_path, 'wb+') as f:
for chunk in File.chunks():
f.write(chunk)
res = dict(state_code=0, error_msg='ok')
return HttpResponse(json.dumps(res), content_type='application/json')
@api_check
def query_file(request):
req = json.loads(request.body)
dir_name = req.get('dir_name')
dir_path = os.path.join(files_folder, dir_name)
cmd_info = os.popen('ls -l -h {}'.format(dir_path)).read()
file_list = cmd_info.split('\n')[1:-1]
file_list_data = []
for file_info_cmd in file_list:
file_info_list = file_info_cmd.split(' ')
file_info = list(filter(None, file_info_list))
file = file_info[-1]
file_size = file_info[4]
name_type = file.rsplit('.', 1)
if len(name_type) < 2:
name_type.append('未知')
file_name, file_type = name_type
file_list_data.append({'file_name': file_name, 'file_type':
file_type, 'file_size': file_size})
res = dict(state_code=0, error_msg='ok', data={'file_list': file_list_data}
)
return HttpResponse(json.dumps(res, ensure_ascii=False), content_type=
'application/json')
@api_check
def del_file(request):
req = json.loads(request.body)
dir_name = req.get('dir_name')
file_name = req.get('file_name')
file_type = req.get('file_type')
file = file_name + '.' + file_type if file_type != '未知' else file_name
file_path = os.path.join(os.path.join(files_folder, dir_name), file)
if not os.path.exists(file_path):
res = dict(state_code=1, error_msg='文件不存在')
else:
os.remove(file_path)
res = dict(state_code=0, error_msg='ok')
return HttpResponse(json.dumps(res, ensure_ascii=False), content_type=
'application/json')
@api_check
def download_file(request):
req = json.loads(request.body)
dir_name = req.get('dir_name')
file_name = req.get('file_name')
file_type = req.get('file_type')
file = file_name + '.' + file_type if file_type != '未知' else file_name
file_path = os.path.join(os.path.join(files_folder, dir_name), file)
if not os.path.exists(file_path):
res = dict(state_code=1, error_msg='文件不存在')
return HttpResponse(json.dumps(res, ensure_ascii=False),
content_type='application/json')
from django.http import StreamingHttpResponse
file_size = os.path.getsize(file_path)
def file_iterator(file_name, chunk_size=512):
with open(file_name, 'rb') as f:
while True:
c = f.read(chunk_size)
if c:
yield c
else:
break
the_file_name = file_path
res = file_iterator(the_file_name)
response = StreamingHttpResponse(res)
response['Content-Type'] = 'application/octet-stream; charset=UTF-8'
response['Content-Length'] = file_size
response['Content-Disposition'] = 'attachment;filename="{}"'.format(
escape_uri_path(file))
return response
<|reserved_special_token_1|>
import os
import shutil
import json
from django.shortcuts import render, HttpResponse
from django.utils.encoding import escape_uri_path
from django.db import transaction
from web_pan.settings import files_folder
from disk import models
def logined(func):
def wrapper(request, *args, **kwargs):
session = request.session.get('user')
if not session:
return render(request, 'login.html')
else:
return func(request, *args, **kwargs)
return wrapper
def api_check(func):
def wrapper(request, *args, **kwargs):
session = request.session.get('user')
if not session:
res = dict(state_code=-3, error_msg='登陆过期')
return HttpResponse(json.dumps(res, ensure_ascii=False),
content_type='application/json')
else:
return func(request, *args, **kwargs)
return wrapper
def login(request):
if request.method == 'GET':
if request.session.get('user'):
return render(request, 'index.html')
return render(request, 'login.html')
else:
req = json.loads(request.body)
user = req.get('username')
pwd = req.get('pwd')
obj_user = models.Users.objects.filter(user_name=user).all()
if not obj_user:
res = dict(state_code=1, error_msg='用户不存在')
else:
password = obj_user.first().password
if str(pwd) != str(password):
res = dict(state_code=2, error_msg='密码错误')
else:
request.session['user'] = user
request.session.set_expiry(60 * 60 * 4)
res = dict(state_code=0, error_msg='密码错误')
return HttpResponse(json.dumps(res, ensure_ascii=False),
content_type='application/json')
def logout(request):
if request.session.get('user'):
del request.session['user']
return render(request, 'login.html')
@logined
def index(request):
return render(request, 'index.html')
@api_check
def get_dir_list(request):
user = request.session.get('user')
obj_dir = models.Dirs.objects.filter(user_name=user).all()
dir_list = []
for dirs in obj_dir:
user_dir = dirs.dir
dir_list.append(user_dir)
res = dict(state_code=0, error_msg='ok', data={'dir_list': dir_list})
return HttpResponse(json.dumps(res, ensure_ascii=False), content_type=
'application/json')
@api_check
def user_mkdir(request):
req = json.loads(request.body)
dir_name = req.get('dir_name')
if not dir_name:
res = dict(state_code=-2, error_msg='参数错误')
return HttpResponse(json.dumps(res, ensure_ascii=False),
content_type='application/json')
dir_path = os.path.join(files_folder, dir_name)
if os.path.exists(dir_path):
res = dict(state_code=1, error_msg='该目录已被使用')
else:
user = request.session.get('user')
if user:
models.Dirs.objects.create(user_name=user, dir=dir_name)
os.mkdir(dir_path)
res = dict(state_code=0, error_msg='ok')
else:
res = dict(state_code=-3, error_msg='登陆过期')
return HttpResponse(json.dumps(res, ensure_ascii=False), content_type=
'application/json')
@api_check
def del_dir(request):
req = json.loads(request.body)
dir_name = req.get('dir_name')
if not dir_name:
res = dict(state_code=-2, error_msg='参数错误')
return HttpResponse(json.dumps(res, ensure_ascii=False),
content_type='application/json')
dir_path = os.path.join(files_folder, dir_name)
if not os.path.exists(dir_path):
res = dict(state_code=1, error_msg='目录不存在')
else:
with transaction.atomic():
obj_dir = models.Dirs.objects.filter(dir=dir_name).all()
if obj_dir:
obj_dir.delete()
shutil.rmtree(dir_path)
res = dict(state_code=0, eror_msg='ok')
return HttpResponse(json.dumps(res, ensure_ascii=False), content_type=
'application/json')
@api_check
def upload_file(request):
dir_name = request.POST.get('dir_name')
if not dir_name:
res = dict(state_code=-2, error_msg='参数错误')
return HttpResponse(json.dumps(res, ensure_ascii=False),
content_type='application/json')
dir_path = os.path.join(files_folder, dir_name)
if not os.path.exists(dir_path):
res = dict(state_code=1, error_msg='目录不存在')
return HttpResponse(json.dumps(res, ensure_ascii=False),
content_type='application/json')
File = request.FILES.get('file', None)
if File is None:
res = dict(state_code=-2, error_msg='参数错误')
return HttpResponse(json.dumps(res, ensure_ascii=False),
content_type='application/json')
file_name = File.name
file_path = os.path.join(dir_path, file_name)
with open(file_path, 'wb+') as f:
for chunk in File.chunks():
f.write(chunk)
res = dict(state_code=0, error_msg='ok')
return HttpResponse(json.dumps(res), content_type='application/json')
@api_check
def query_file(request):
req = json.loads(request.body)
dir_name = req.get('dir_name')
dir_path = os.path.join(files_folder, dir_name)
cmd_info = os.popen('ls -l -h {}'.format(dir_path)).read()
file_list = cmd_info.split('\n')[1:-1]
file_list_data = []
for file_info_cmd in file_list:
file_info_list = file_info_cmd.split(' ')
file_info = list(filter(None, file_info_list))
file = file_info[-1]
file_size = file_info[4]
name_type = file.rsplit('.', 1)
if len(name_type) < 2:
name_type.append('未知')
file_name, file_type = name_type
file_list_data.append({'file_name': file_name, 'file_type':
file_type, 'file_size': file_size})
res = dict(state_code=0, error_msg='ok', data={'file_list': file_list_data}
)
return HttpResponse(json.dumps(res, ensure_ascii=False), content_type=
'application/json')
@api_check
def del_file(request):
req = json.loads(request.body)
dir_name = req.get('dir_name')
file_name = req.get('file_name')
file_type = req.get('file_type')
file = file_name + '.' + file_type if file_type != '未知' else file_name
file_path = os.path.join(os.path.join(files_folder, dir_name), file)
if not os.path.exists(file_path):
res = dict(state_code=1, error_msg='文件不存在')
else:
os.remove(file_path)
res = dict(state_code=0, error_msg='ok')
return HttpResponse(json.dumps(res, ensure_ascii=False), content_type=
'application/json')
@api_check
def download_file(request):
req = json.loads(request.body)
dir_name = req.get('dir_name')
file_name = req.get('file_name')
file_type = req.get('file_type')
file = file_name + '.' + file_type if file_type != '未知' else file_name
file_path = os.path.join(os.path.join(files_folder, dir_name), file)
if not os.path.exists(file_path):
res = dict(state_code=1, error_msg='文件不存在')
return HttpResponse(json.dumps(res, ensure_ascii=False),
content_type='application/json')
from django.http import StreamingHttpResponse
file_size = os.path.getsize(file_path)
def file_iterator(file_name, chunk_size=512):
with open(file_name, 'rb') as f:
while True:
c = f.read(chunk_size)
if c:
yield c
else:
break
the_file_name = file_path
res = file_iterator(the_file_name)
response = StreamingHttpResponse(res)
response['Content-Type'] = 'application/octet-stream; charset=UTF-8'
response['Content-Length'] = file_size
response['Content-Disposition'] = 'attachment;filename="{}"'.format(
escape_uri_path(file))
return response
<|reserved_special_token_1|>
import os
import shutil
import json
from django.shortcuts import render, HttpResponse
from django.utils.encoding import escape_uri_path
from django.db import transaction
from web_pan.settings import files_folder
from disk import models
# Create your views here.
def logined(func):
def wrapper(request, *args, **kwargs):
session = request.session.get('user')
if not session:
return render(request, 'login.html')
else:
return func(request, *args, **kwargs)
return wrapper
def api_check(func):
def wrapper(request, *args, **kwargs):
session = request.session.get('user')
if not session:
res = dict(
state_code=-3,
error_msg="登陆过期"
)
return HttpResponse(json.dumps(res, ensure_ascii=False), content_type='application/json')
else:
return func(request, *args, **kwargs)
return wrapper
def login(request):
if request.method == 'GET':
if request.session.get('user'):
return render(request, 'index.html')
return render(request, 'login.html')
else:
req = json.loads(request.body)
user = req.get('username')
pwd = req.get('pwd')
obj_user = models.Users.objects.filter(user_name=user).all()
if not obj_user:
res = dict(
state_code=1,
error_msg="用户不存在"
)
else:
password = obj_user.first().password
if str(pwd) != str(password):
res = dict(
state_code=2,
error_msg="密码错误"
)
else:
request.session['user'] = user
request.session.set_expiry(60*60*4)
res = dict(
state_code=0,
error_msg="密码错误"
)
return HttpResponse(json.dumps(res, ensure_ascii=False), content_type='application/json')
def logout(request):
if request.session.get('user'):
del request.session['user']
return render(request, 'login.html')
@logined
def index(request):
return render(request, 'index.html')
@api_check
def get_dir_list(request):
user = request.session.get('user')
obj_dir = models.Dirs.objects.filter(user_name=user).all()
dir_list = []
for dirs in obj_dir:
user_dir = dirs.dir
dir_list.append(user_dir)
res = dict(
state_code=0,
error_msg='ok',
data={
"dir_list": dir_list
}
)
return HttpResponse(json.dumps(res, ensure_ascii=False), content_type='application/json')
@api_check
def user_mkdir(request):
req = json.loads(request.body)
dir_name = req.get('dir_name')
if not dir_name:
res = dict(
state_code=-2,
error_msg='参数错误'
)
return HttpResponse(json.dumps(res, ensure_ascii=False), content_type='application/json')
dir_path = os.path.join(files_folder, dir_name)
if os.path.exists(dir_path):
res = dict(
state_code=1,
error_msg="该目录已被使用"
)
else:
user = request.session.get('user')
if user:
models.Dirs.objects.create(
user_name=user,
dir=dir_name
)
os.mkdir(dir_path)
res = dict(
state_code=0,
error_msg='ok'
)
else:
res = dict(
state_code=-3,
error_msg="登陆过期"
)
return HttpResponse(json.dumps(res, ensure_ascii=False), content_type='application/json')
@api_check
def del_dir(request):
req = json.loads(request.body)
dir_name = req.get('dir_name')
if not dir_name:
res = dict(
state_code=-2,
error_msg='参数错误'
)
return HttpResponse(json.dumps(res, ensure_ascii=False), content_type='application/json')
dir_path = os.path.join(files_folder, dir_name)
if not os.path.exists(dir_path):
res = dict(
state_code=1,
error_msg='目录不存在'
)
else:
with transaction.atomic():
obj_dir = models.Dirs.objects.filter(dir=dir_name).all()
if obj_dir:
obj_dir.delete()
shutil.rmtree(dir_path)
res = dict(
state_code=0,
eror_msg='ok'
)
return HttpResponse(json.dumps(res, ensure_ascii=False), content_type='application/json')
@api_check
def upload_file(request):
dir_name = request.POST.get('dir_name')
if not dir_name:
res = dict(
state_code=-2,
error_msg='参数错误'
)
return HttpResponse(json.dumps(res, ensure_ascii=False), content_type='application/json')
dir_path = os.path.join(files_folder, dir_name)
if not os.path.exists(dir_path):
res = dict(
state_code=1,
error_msg='目录不存在'
)
return HttpResponse(json.dumps(res, ensure_ascii=False), content_type='application/json')
# 获取上传的文件,如果没有文件,则默认为None;
File = request.FILES.get("file", None)
if File is None:
res = dict(
state_code=-2,
error_msg='参数错误'
)
return HttpResponse(json.dumps(res, ensure_ascii=False), content_type='application/json')
file_name = File.name
file_path = os.path.join(dir_path, file_name)
# 打开特定的文件进行二进制的写操作;
with open(file_path, 'wb+') as f:
# 分块写入文件;
for chunk in File.chunks():
f.write(chunk)
res = dict(
state_code=0,
error_msg='ok',
)
return HttpResponse(json.dumps(res), content_type='application/json')
@api_check
def query_file(request):
req = json.loads(request.body)
dir_name = req.get('dir_name')
dir_path = os.path.join(files_folder, dir_name)
cmd_info = os.popen("ls -l -h {}".format(dir_path)).read()
file_list = cmd_info.split('\n')[1:-1]
file_list_data = []
for file_info_cmd in file_list:
file_info_list = file_info_cmd.split(' ')
file_info = list(filter(None, file_info_list))
file = file_info[-1]
file_size = file_info[4]
name_type = file.rsplit('.', 1)
if len(name_type) < 2:
name_type.append('未知')
file_name, file_type = name_type
file_list_data.append({
'file_name': file_name,
'file_type': file_type,
'file_size': file_size
})
res = dict(
state_code=0,
error_msg='ok',
data={
'file_list': file_list_data
}
)
return HttpResponse(json.dumps(res, ensure_ascii=False), content_type='application/json')
@api_check
def del_file(request):
req = json.loads(request.body)
dir_name = req.get('dir_name')
file_name = req.get('file_name')
file_type = req.get('file_type')
file = file_name + '.' + file_type if file_type != '未知' else file_name
file_path = os.path.join(os.path.join(files_folder,dir_name),file)
if not os.path.exists(file_path):
res = dict(
state_code=1,
error_msg='文件不存在'
)
else:
os.remove(file_path)
res = dict(
state_code=0,
error_msg='ok'
)
return HttpResponse(json.dumps(res, ensure_ascii=False), content_type='application/json')
@api_check
def download_file(request):
req = json.loads(request.body)
dir_name = req.get('dir_name')
file_name = req.get('file_name')
file_type = req.get('file_type')
file = file_name+'.'+file_type if file_type != '未知' else file_name
file_path = os.path.join(os.path.join(files_folder,dir_name),file)
if not os.path.exists(file_path):
res = dict(
state_code=1,
error_msg='文件不存在'
)
return HttpResponse(json.dumps(res, ensure_ascii=False), content_type='application/json')
from django.http import StreamingHttpResponse
file_size = os.path.getsize(file_path)
def file_iterator(file_name, chunk_size=512): # 用于形成二进制数据
with open(file_name, 'rb') as f:
while True:
c = f.read(chunk_size)
if c:
yield c
else:
break
the_file_name = file_path # 要下载的文件路径
res = file_iterator(the_file_name)
response = StreamingHttpResponse(res) # 这里创建返回
response['Content-Type'] = 'application/octet-stream; charset=UTF-8' # 注意格式
response['Content-Length'] = file_size
response['Content-Disposition'] = 'attachment;filename="{}"'.format(escape_uri_path(file)) # 注意filename 这个是下载后的名字
return response
|
flexible
|
{
"blob_id": "eeb87891d1a02484a61537745ec6f13387017929",
"index": 705,
"step-1": "<mask token>\n\n\ndef logined(func):\n\n def wrapper(request, *args, **kwargs):\n session = request.session.get('user')\n if not session:\n return render(request, 'login.html')\n else:\n return func(request, *args, **kwargs)\n return wrapper\n\n\ndef api_check(func):\n\n def wrapper(request, *args, **kwargs):\n session = request.session.get('user')\n if not session:\n res = dict(state_code=-3, error_msg='登陆过期')\n return HttpResponse(json.dumps(res, ensure_ascii=False),\n content_type='application/json')\n else:\n return func(request, *args, **kwargs)\n return wrapper\n\n\ndef login(request):\n if request.method == 'GET':\n if request.session.get('user'):\n return render(request, 'index.html')\n return render(request, 'login.html')\n else:\n req = json.loads(request.body)\n user = req.get('username')\n pwd = req.get('pwd')\n obj_user = models.Users.objects.filter(user_name=user).all()\n if not obj_user:\n res = dict(state_code=1, error_msg='用户不存在')\n else:\n password = obj_user.first().password\n if str(pwd) != str(password):\n res = dict(state_code=2, error_msg='密码错误')\n else:\n request.session['user'] = user\n request.session.set_expiry(60 * 60 * 4)\n res = dict(state_code=0, error_msg='密码错误')\n return HttpResponse(json.dumps(res, ensure_ascii=False),\n content_type='application/json')\n\n\ndef logout(request):\n if request.session.get('user'):\n del request.session['user']\n return render(request, 'login.html')\n\n\n@logined\ndef index(request):\n return render(request, 'index.html')\n\n\n@api_check\ndef get_dir_list(request):\n user = request.session.get('user')\n obj_dir = models.Dirs.objects.filter(user_name=user).all()\n dir_list = []\n for dirs in obj_dir:\n user_dir = dirs.dir\n dir_list.append(user_dir)\n res = dict(state_code=0, error_msg='ok', data={'dir_list': dir_list})\n return HttpResponse(json.dumps(res, ensure_ascii=False), content_type=\n 'application/json')\n\n\n<mask token>\n\n\n@api_check\ndef upload_file(request):\n dir_name = request.POST.get('dir_name')\n if not dir_name:\n res = dict(state_code=-2, error_msg='参数错误')\n return HttpResponse(json.dumps(res, ensure_ascii=False),\n content_type='application/json')\n dir_path = os.path.join(files_folder, dir_name)\n if not os.path.exists(dir_path):\n res = dict(state_code=1, error_msg='目录不存在')\n return HttpResponse(json.dumps(res, ensure_ascii=False),\n content_type='application/json')\n File = request.FILES.get('file', None)\n if File is None:\n res = dict(state_code=-2, error_msg='参数错误')\n return HttpResponse(json.dumps(res, ensure_ascii=False),\n content_type='application/json')\n file_name = File.name\n file_path = os.path.join(dir_path, file_name)\n with open(file_path, 'wb+') as f:\n for chunk in File.chunks():\n f.write(chunk)\n res = dict(state_code=0, error_msg='ok')\n return HttpResponse(json.dumps(res), content_type='application/json')\n\n\n@api_check\ndef query_file(request):\n req = json.loads(request.body)\n dir_name = req.get('dir_name')\n dir_path = os.path.join(files_folder, dir_name)\n cmd_info = os.popen('ls -l -h {}'.format(dir_path)).read()\n file_list = cmd_info.split('\\n')[1:-1]\n file_list_data = []\n for file_info_cmd in file_list:\n file_info_list = file_info_cmd.split(' ')\n file_info = list(filter(None, file_info_list))\n file = file_info[-1]\n file_size = file_info[4]\n name_type = file.rsplit('.', 1)\n if len(name_type) < 2:\n name_type.append('未知')\n file_name, file_type = name_type\n file_list_data.append({'file_name': file_name, 'file_type':\n file_type, 'file_size': file_size})\n res = dict(state_code=0, error_msg='ok', data={'file_list': file_list_data}\n )\n return HttpResponse(json.dumps(res, ensure_ascii=False), content_type=\n 'application/json')\n\n\n@api_check\ndef del_file(request):\n req = json.loads(request.body)\n dir_name = req.get('dir_name')\n file_name = req.get('file_name')\n file_type = req.get('file_type')\n file = file_name + '.' + file_type if file_type != '未知' else file_name\n file_path = os.path.join(os.path.join(files_folder, dir_name), file)\n if not os.path.exists(file_path):\n res = dict(state_code=1, error_msg='文件不存在')\n else:\n os.remove(file_path)\n res = dict(state_code=0, error_msg='ok')\n return HttpResponse(json.dumps(res, ensure_ascii=False), content_type=\n 'application/json')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef logined(func):\n\n def wrapper(request, *args, **kwargs):\n session = request.session.get('user')\n if not session:\n return render(request, 'login.html')\n else:\n return func(request, *args, **kwargs)\n return wrapper\n\n\ndef api_check(func):\n\n def wrapper(request, *args, **kwargs):\n session = request.session.get('user')\n if not session:\n res = dict(state_code=-3, error_msg='登陆过期')\n return HttpResponse(json.dumps(res, ensure_ascii=False),\n content_type='application/json')\n else:\n return func(request, *args, **kwargs)\n return wrapper\n\n\ndef login(request):\n if request.method == 'GET':\n if request.session.get('user'):\n return render(request, 'index.html')\n return render(request, 'login.html')\n else:\n req = json.loads(request.body)\n user = req.get('username')\n pwd = req.get('pwd')\n obj_user = models.Users.objects.filter(user_name=user).all()\n if not obj_user:\n res = dict(state_code=1, error_msg='用户不存在')\n else:\n password = obj_user.first().password\n if str(pwd) != str(password):\n res = dict(state_code=2, error_msg='密码错误')\n else:\n request.session['user'] = user\n request.session.set_expiry(60 * 60 * 4)\n res = dict(state_code=0, error_msg='密码错误')\n return HttpResponse(json.dumps(res, ensure_ascii=False),\n content_type='application/json')\n\n\ndef logout(request):\n if request.session.get('user'):\n del request.session['user']\n return render(request, 'login.html')\n\n\n@logined\ndef index(request):\n return render(request, 'index.html')\n\n\n@api_check\ndef get_dir_list(request):\n user = request.session.get('user')\n obj_dir = models.Dirs.objects.filter(user_name=user).all()\n dir_list = []\n for dirs in obj_dir:\n user_dir = dirs.dir\n dir_list.append(user_dir)\n res = dict(state_code=0, error_msg='ok', data={'dir_list': dir_list})\n return HttpResponse(json.dumps(res, ensure_ascii=False), content_type=\n 'application/json')\n\n\n<mask token>\n\n\n@api_check\ndef upload_file(request):\n dir_name = request.POST.get('dir_name')\n if not dir_name:\n res = dict(state_code=-2, error_msg='参数错误')\n return HttpResponse(json.dumps(res, ensure_ascii=False),\n content_type='application/json')\n dir_path = os.path.join(files_folder, dir_name)\n if not os.path.exists(dir_path):\n res = dict(state_code=1, error_msg='目录不存在')\n return HttpResponse(json.dumps(res, ensure_ascii=False),\n content_type='application/json')\n File = request.FILES.get('file', None)\n if File is None:\n res = dict(state_code=-2, error_msg='参数错误')\n return HttpResponse(json.dumps(res, ensure_ascii=False),\n content_type='application/json')\n file_name = File.name\n file_path = os.path.join(dir_path, file_name)\n with open(file_path, 'wb+') as f:\n for chunk in File.chunks():\n f.write(chunk)\n res = dict(state_code=0, error_msg='ok')\n return HttpResponse(json.dumps(res), content_type='application/json')\n\n\n@api_check\ndef query_file(request):\n req = json.loads(request.body)\n dir_name = req.get('dir_name')\n dir_path = os.path.join(files_folder, dir_name)\n cmd_info = os.popen('ls -l -h {}'.format(dir_path)).read()\n file_list = cmd_info.split('\\n')[1:-1]\n file_list_data = []\n for file_info_cmd in file_list:\n file_info_list = file_info_cmd.split(' ')\n file_info = list(filter(None, file_info_list))\n file = file_info[-1]\n file_size = file_info[4]\n name_type = file.rsplit('.', 1)\n if len(name_type) < 2:\n name_type.append('未知')\n file_name, file_type = name_type\n file_list_data.append({'file_name': file_name, 'file_type':\n file_type, 'file_size': file_size})\n res = dict(state_code=0, error_msg='ok', data={'file_list': file_list_data}\n )\n return HttpResponse(json.dumps(res, ensure_ascii=False), content_type=\n 'application/json')\n\n\n@api_check\ndef del_file(request):\n req = json.loads(request.body)\n dir_name = req.get('dir_name')\n file_name = req.get('file_name')\n file_type = req.get('file_type')\n file = file_name + '.' + file_type if file_type != '未知' else file_name\n file_path = os.path.join(os.path.join(files_folder, dir_name), file)\n if not os.path.exists(file_path):\n res = dict(state_code=1, error_msg='文件不存在')\n else:\n os.remove(file_path)\n res = dict(state_code=0, error_msg='ok')\n return HttpResponse(json.dumps(res, ensure_ascii=False), content_type=\n 'application/json')\n\n\n@api_check\ndef download_file(request):\n req = json.loads(request.body)\n dir_name = req.get('dir_name')\n file_name = req.get('file_name')\n file_type = req.get('file_type')\n file = file_name + '.' + file_type if file_type != '未知' else file_name\n file_path = os.path.join(os.path.join(files_folder, dir_name), file)\n if not os.path.exists(file_path):\n res = dict(state_code=1, error_msg='文件不存在')\n return HttpResponse(json.dumps(res, ensure_ascii=False),\n content_type='application/json')\n from django.http import StreamingHttpResponse\n file_size = os.path.getsize(file_path)\n\n def file_iterator(file_name, chunk_size=512):\n with open(file_name, 'rb') as f:\n while True:\n c = f.read(chunk_size)\n if c:\n yield c\n else:\n break\n the_file_name = file_path\n res = file_iterator(the_file_name)\n response = StreamingHttpResponse(res)\n response['Content-Type'] = 'application/octet-stream; charset=UTF-8'\n response['Content-Length'] = file_size\n response['Content-Disposition'] = 'attachment;filename=\"{}\"'.format(\n escape_uri_path(file))\n return response\n",
"step-3": "<mask token>\n\n\ndef logined(func):\n\n def wrapper(request, *args, **kwargs):\n session = request.session.get('user')\n if not session:\n return render(request, 'login.html')\n else:\n return func(request, *args, **kwargs)\n return wrapper\n\n\ndef api_check(func):\n\n def wrapper(request, *args, **kwargs):\n session = request.session.get('user')\n if not session:\n res = dict(state_code=-3, error_msg='登陆过期')\n return HttpResponse(json.dumps(res, ensure_ascii=False),\n content_type='application/json')\n else:\n return func(request, *args, **kwargs)\n return wrapper\n\n\ndef login(request):\n if request.method == 'GET':\n if request.session.get('user'):\n return render(request, 'index.html')\n return render(request, 'login.html')\n else:\n req = json.loads(request.body)\n user = req.get('username')\n pwd = req.get('pwd')\n obj_user = models.Users.objects.filter(user_name=user).all()\n if not obj_user:\n res = dict(state_code=1, error_msg='用户不存在')\n else:\n password = obj_user.first().password\n if str(pwd) != str(password):\n res = dict(state_code=2, error_msg='密码错误')\n else:\n request.session['user'] = user\n request.session.set_expiry(60 * 60 * 4)\n res = dict(state_code=0, error_msg='密码错误')\n return HttpResponse(json.dumps(res, ensure_ascii=False),\n content_type='application/json')\n\n\ndef logout(request):\n if request.session.get('user'):\n del request.session['user']\n return render(request, 'login.html')\n\n\n@logined\ndef index(request):\n return render(request, 'index.html')\n\n\n@api_check\ndef get_dir_list(request):\n user = request.session.get('user')\n obj_dir = models.Dirs.objects.filter(user_name=user).all()\n dir_list = []\n for dirs in obj_dir:\n user_dir = dirs.dir\n dir_list.append(user_dir)\n res = dict(state_code=0, error_msg='ok', data={'dir_list': dir_list})\n return HttpResponse(json.dumps(res, ensure_ascii=False), content_type=\n 'application/json')\n\n\n@api_check\ndef user_mkdir(request):\n req = json.loads(request.body)\n dir_name = req.get('dir_name')\n if not dir_name:\n res = dict(state_code=-2, error_msg='参数错误')\n return HttpResponse(json.dumps(res, ensure_ascii=False),\n content_type='application/json')\n dir_path = os.path.join(files_folder, dir_name)\n if os.path.exists(dir_path):\n res = dict(state_code=1, error_msg='该目录已被使用')\n else:\n user = request.session.get('user')\n if user:\n models.Dirs.objects.create(user_name=user, dir=dir_name)\n os.mkdir(dir_path)\n res = dict(state_code=0, error_msg='ok')\n else:\n res = dict(state_code=-3, error_msg='登陆过期')\n return HttpResponse(json.dumps(res, ensure_ascii=False), content_type=\n 'application/json')\n\n\n@api_check\ndef del_dir(request):\n req = json.loads(request.body)\n dir_name = req.get('dir_name')\n if not dir_name:\n res = dict(state_code=-2, error_msg='参数错误')\n return HttpResponse(json.dumps(res, ensure_ascii=False),\n content_type='application/json')\n dir_path = os.path.join(files_folder, dir_name)\n if not os.path.exists(dir_path):\n res = dict(state_code=1, error_msg='目录不存在')\n else:\n with transaction.atomic():\n obj_dir = models.Dirs.objects.filter(dir=dir_name).all()\n if obj_dir:\n obj_dir.delete()\n shutil.rmtree(dir_path)\n res = dict(state_code=0, eror_msg='ok')\n return HttpResponse(json.dumps(res, ensure_ascii=False), content_type=\n 'application/json')\n\n\n@api_check\ndef upload_file(request):\n dir_name = request.POST.get('dir_name')\n if not dir_name:\n res = dict(state_code=-2, error_msg='参数错误')\n return HttpResponse(json.dumps(res, ensure_ascii=False),\n content_type='application/json')\n dir_path = os.path.join(files_folder, dir_name)\n if not os.path.exists(dir_path):\n res = dict(state_code=1, error_msg='目录不存在')\n return HttpResponse(json.dumps(res, ensure_ascii=False),\n content_type='application/json')\n File = request.FILES.get('file', None)\n if File is None:\n res = dict(state_code=-2, error_msg='参数错误')\n return HttpResponse(json.dumps(res, ensure_ascii=False),\n content_type='application/json')\n file_name = File.name\n file_path = os.path.join(dir_path, file_name)\n with open(file_path, 'wb+') as f:\n for chunk in File.chunks():\n f.write(chunk)\n res = dict(state_code=0, error_msg='ok')\n return HttpResponse(json.dumps(res), content_type='application/json')\n\n\n@api_check\ndef query_file(request):\n req = json.loads(request.body)\n dir_name = req.get('dir_name')\n dir_path = os.path.join(files_folder, dir_name)\n cmd_info = os.popen('ls -l -h {}'.format(dir_path)).read()\n file_list = cmd_info.split('\\n')[1:-1]\n file_list_data = []\n for file_info_cmd in file_list:\n file_info_list = file_info_cmd.split(' ')\n file_info = list(filter(None, file_info_list))\n file = file_info[-1]\n file_size = file_info[4]\n name_type = file.rsplit('.', 1)\n if len(name_type) < 2:\n name_type.append('未知')\n file_name, file_type = name_type\n file_list_data.append({'file_name': file_name, 'file_type':\n file_type, 'file_size': file_size})\n res = dict(state_code=0, error_msg='ok', data={'file_list': file_list_data}\n )\n return HttpResponse(json.dumps(res, ensure_ascii=False), content_type=\n 'application/json')\n\n\n@api_check\ndef del_file(request):\n req = json.loads(request.body)\n dir_name = req.get('dir_name')\n file_name = req.get('file_name')\n file_type = req.get('file_type')\n file = file_name + '.' + file_type if file_type != '未知' else file_name\n file_path = os.path.join(os.path.join(files_folder, dir_name), file)\n if not os.path.exists(file_path):\n res = dict(state_code=1, error_msg='文件不存在')\n else:\n os.remove(file_path)\n res = dict(state_code=0, error_msg='ok')\n return HttpResponse(json.dumps(res, ensure_ascii=False), content_type=\n 'application/json')\n\n\n@api_check\ndef download_file(request):\n req = json.loads(request.body)\n dir_name = req.get('dir_name')\n file_name = req.get('file_name')\n file_type = req.get('file_type')\n file = file_name + '.' + file_type if file_type != '未知' else file_name\n file_path = os.path.join(os.path.join(files_folder, dir_name), file)\n if not os.path.exists(file_path):\n res = dict(state_code=1, error_msg='文件不存在')\n return HttpResponse(json.dumps(res, ensure_ascii=False),\n content_type='application/json')\n from django.http import StreamingHttpResponse\n file_size = os.path.getsize(file_path)\n\n def file_iterator(file_name, chunk_size=512):\n with open(file_name, 'rb') as f:\n while True:\n c = f.read(chunk_size)\n if c:\n yield c\n else:\n break\n the_file_name = file_path\n res = file_iterator(the_file_name)\n response = StreamingHttpResponse(res)\n response['Content-Type'] = 'application/octet-stream; charset=UTF-8'\n response['Content-Length'] = file_size\n response['Content-Disposition'] = 'attachment;filename=\"{}\"'.format(\n escape_uri_path(file))\n return response\n",
"step-4": "import os\nimport shutil\nimport json\nfrom django.shortcuts import render, HttpResponse\nfrom django.utils.encoding import escape_uri_path\nfrom django.db import transaction\nfrom web_pan.settings import files_folder\nfrom disk import models\n\n\ndef logined(func):\n\n def wrapper(request, *args, **kwargs):\n session = request.session.get('user')\n if not session:\n return render(request, 'login.html')\n else:\n return func(request, *args, **kwargs)\n return wrapper\n\n\ndef api_check(func):\n\n def wrapper(request, *args, **kwargs):\n session = request.session.get('user')\n if not session:\n res = dict(state_code=-3, error_msg='登陆过期')\n return HttpResponse(json.dumps(res, ensure_ascii=False),\n content_type='application/json')\n else:\n return func(request, *args, **kwargs)\n return wrapper\n\n\ndef login(request):\n if request.method == 'GET':\n if request.session.get('user'):\n return render(request, 'index.html')\n return render(request, 'login.html')\n else:\n req = json.loads(request.body)\n user = req.get('username')\n pwd = req.get('pwd')\n obj_user = models.Users.objects.filter(user_name=user).all()\n if not obj_user:\n res = dict(state_code=1, error_msg='用户不存在')\n else:\n password = obj_user.first().password\n if str(pwd) != str(password):\n res = dict(state_code=2, error_msg='密码错误')\n else:\n request.session['user'] = user\n request.session.set_expiry(60 * 60 * 4)\n res = dict(state_code=0, error_msg='密码错误')\n return HttpResponse(json.dumps(res, ensure_ascii=False),\n content_type='application/json')\n\n\ndef logout(request):\n if request.session.get('user'):\n del request.session['user']\n return render(request, 'login.html')\n\n\n@logined\ndef index(request):\n return render(request, 'index.html')\n\n\n@api_check\ndef get_dir_list(request):\n user = request.session.get('user')\n obj_dir = models.Dirs.objects.filter(user_name=user).all()\n dir_list = []\n for dirs in obj_dir:\n user_dir = dirs.dir\n dir_list.append(user_dir)\n res = dict(state_code=0, error_msg='ok', data={'dir_list': dir_list})\n return HttpResponse(json.dumps(res, ensure_ascii=False), content_type=\n 'application/json')\n\n\n@api_check\ndef user_mkdir(request):\n req = json.loads(request.body)\n dir_name = req.get('dir_name')\n if not dir_name:\n res = dict(state_code=-2, error_msg='参数错误')\n return HttpResponse(json.dumps(res, ensure_ascii=False),\n content_type='application/json')\n dir_path = os.path.join(files_folder, dir_name)\n if os.path.exists(dir_path):\n res = dict(state_code=1, error_msg='该目录已被使用')\n else:\n user = request.session.get('user')\n if user:\n models.Dirs.objects.create(user_name=user, dir=dir_name)\n os.mkdir(dir_path)\n res = dict(state_code=0, error_msg='ok')\n else:\n res = dict(state_code=-3, error_msg='登陆过期')\n return HttpResponse(json.dumps(res, ensure_ascii=False), content_type=\n 'application/json')\n\n\n@api_check\ndef del_dir(request):\n req = json.loads(request.body)\n dir_name = req.get('dir_name')\n if not dir_name:\n res = dict(state_code=-2, error_msg='参数错误')\n return HttpResponse(json.dumps(res, ensure_ascii=False),\n content_type='application/json')\n dir_path = os.path.join(files_folder, dir_name)\n if not os.path.exists(dir_path):\n res = dict(state_code=1, error_msg='目录不存在')\n else:\n with transaction.atomic():\n obj_dir = models.Dirs.objects.filter(dir=dir_name).all()\n if obj_dir:\n obj_dir.delete()\n shutil.rmtree(dir_path)\n res = dict(state_code=0, eror_msg='ok')\n return HttpResponse(json.dumps(res, ensure_ascii=False), content_type=\n 'application/json')\n\n\n@api_check\ndef upload_file(request):\n dir_name = request.POST.get('dir_name')\n if not dir_name:\n res = dict(state_code=-2, error_msg='参数错误')\n return HttpResponse(json.dumps(res, ensure_ascii=False),\n content_type='application/json')\n dir_path = os.path.join(files_folder, dir_name)\n if not os.path.exists(dir_path):\n res = dict(state_code=1, error_msg='目录不存在')\n return HttpResponse(json.dumps(res, ensure_ascii=False),\n content_type='application/json')\n File = request.FILES.get('file', None)\n if File is None:\n res = dict(state_code=-2, error_msg='参数错误')\n return HttpResponse(json.dumps(res, ensure_ascii=False),\n content_type='application/json')\n file_name = File.name\n file_path = os.path.join(dir_path, file_name)\n with open(file_path, 'wb+') as f:\n for chunk in File.chunks():\n f.write(chunk)\n res = dict(state_code=0, error_msg='ok')\n return HttpResponse(json.dumps(res), content_type='application/json')\n\n\n@api_check\ndef query_file(request):\n req = json.loads(request.body)\n dir_name = req.get('dir_name')\n dir_path = os.path.join(files_folder, dir_name)\n cmd_info = os.popen('ls -l -h {}'.format(dir_path)).read()\n file_list = cmd_info.split('\\n')[1:-1]\n file_list_data = []\n for file_info_cmd in file_list:\n file_info_list = file_info_cmd.split(' ')\n file_info = list(filter(None, file_info_list))\n file = file_info[-1]\n file_size = file_info[4]\n name_type = file.rsplit('.', 1)\n if len(name_type) < 2:\n name_type.append('未知')\n file_name, file_type = name_type\n file_list_data.append({'file_name': file_name, 'file_type':\n file_type, 'file_size': file_size})\n res = dict(state_code=0, error_msg='ok', data={'file_list': file_list_data}\n )\n return HttpResponse(json.dumps(res, ensure_ascii=False), content_type=\n 'application/json')\n\n\n@api_check\ndef del_file(request):\n req = json.loads(request.body)\n dir_name = req.get('dir_name')\n file_name = req.get('file_name')\n file_type = req.get('file_type')\n file = file_name + '.' + file_type if file_type != '未知' else file_name\n file_path = os.path.join(os.path.join(files_folder, dir_name), file)\n if not os.path.exists(file_path):\n res = dict(state_code=1, error_msg='文件不存在')\n else:\n os.remove(file_path)\n res = dict(state_code=0, error_msg='ok')\n return HttpResponse(json.dumps(res, ensure_ascii=False), content_type=\n 'application/json')\n\n\n@api_check\ndef download_file(request):\n req = json.loads(request.body)\n dir_name = req.get('dir_name')\n file_name = req.get('file_name')\n file_type = req.get('file_type')\n file = file_name + '.' + file_type if file_type != '未知' else file_name\n file_path = os.path.join(os.path.join(files_folder, dir_name), file)\n if not os.path.exists(file_path):\n res = dict(state_code=1, error_msg='文件不存在')\n return HttpResponse(json.dumps(res, ensure_ascii=False),\n content_type='application/json')\n from django.http import StreamingHttpResponse\n file_size = os.path.getsize(file_path)\n\n def file_iterator(file_name, chunk_size=512):\n with open(file_name, 'rb') as f:\n while True:\n c = f.read(chunk_size)\n if c:\n yield c\n else:\n break\n the_file_name = file_path\n res = file_iterator(the_file_name)\n response = StreamingHttpResponse(res)\n response['Content-Type'] = 'application/octet-stream; charset=UTF-8'\n response['Content-Length'] = file_size\n response['Content-Disposition'] = 'attachment;filename=\"{}\"'.format(\n escape_uri_path(file))\n return response\n",
"step-5": "import os\nimport shutil\nimport json\nfrom django.shortcuts import render, HttpResponse\nfrom django.utils.encoding import escape_uri_path\nfrom django.db import transaction\nfrom web_pan.settings import files_folder\nfrom disk import models\n\n\n# Create your views here.\n\n\ndef logined(func):\n def wrapper(request, *args, **kwargs):\n session = request.session.get('user')\n if not session:\n return render(request, 'login.html')\n else:\n return func(request, *args, **kwargs)\n\n return wrapper\n\n\ndef api_check(func):\n def wrapper(request, *args, **kwargs):\n session = request.session.get('user')\n if not session:\n res = dict(\n state_code=-3,\n error_msg=\"登陆过期\"\n )\n return HttpResponse(json.dumps(res, ensure_ascii=False), content_type='application/json')\n else:\n return func(request, *args, **kwargs)\n\n return wrapper\n\n\ndef login(request):\n if request.method == 'GET':\n if request.session.get('user'):\n return render(request, 'index.html')\n return render(request, 'login.html')\n else:\n req = json.loads(request.body)\n user = req.get('username')\n pwd = req.get('pwd')\n obj_user = models.Users.objects.filter(user_name=user).all()\n if not obj_user:\n res = dict(\n state_code=1,\n error_msg=\"用户不存在\"\n )\n else:\n password = obj_user.first().password\n if str(pwd) != str(password):\n res = dict(\n state_code=2,\n error_msg=\"密码错误\"\n )\n else:\n request.session['user'] = user\n request.session.set_expiry(60*60*4)\n res = dict(\n state_code=0,\n error_msg=\"密码错误\"\n )\n return HttpResponse(json.dumps(res, ensure_ascii=False), content_type='application/json')\n\n\ndef logout(request):\n if request.session.get('user'):\n del request.session['user']\n return render(request, 'login.html')\n\n\n@logined\ndef index(request):\n return render(request, 'index.html')\n\n\n@api_check\ndef get_dir_list(request):\n user = request.session.get('user')\n obj_dir = models.Dirs.objects.filter(user_name=user).all()\n dir_list = []\n for dirs in obj_dir:\n user_dir = dirs.dir\n dir_list.append(user_dir)\n res = dict(\n state_code=0,\n error_msg='ok',\n data={\n \"dir_list\": dir_list\n }\n )\n return HttpResponse(json.dumps(res, ensure_ascii=False), content_type='application/json')\n\n\n@api_check\ndef user_mkdir(request):\n req = json.loads(request.body)\n dir_name = req.get('dir_name')\n if not dir_name:\n res = dict(\n state_code=-2,\n error_msg='参数错误'\n )\n return HttpResponse(json.dumps(res, ensure_ascii=False), content_type='application/json')\n dir_path = os.path.join(files_folder, dir_name)\n if os.path.exists(dir_path):\n res = dict(\n state_code=1,\n error_msg=\"该目录已被使用\"\n )\n else:\n user = request.session.get('user')\n if user:\n models.Dirs.objects.create(\n user_name=user,\n dir=dir_name\n )\n os.mkdir(dir_path)\n res = dict(\n state_code=0,\n error_msg='ok'\n )\n else:\n res = dict(\n state_code=-3,\n error_msg=\"登陆过期\"\n )\n return HttpResponse(json.dumps(res, ensure_ascii=False), content_type='application/json')\n\n\n@api_check\ndef del_dir(request):\n req = json.loads(request.body)\n dir_name = req.get('dir_name')\n if not dir_name:\n res = dict(\n state_code=-2,\n error_msg='参数错误'\n )\n return HttpResponse(json.dumps(res, ensure_ascii=False), content_type='application/json')\n dir_path = os.path.join(files_folder, dir_name)\n if not os.path.exists(dir_path):\n res = dict(\n state_code=1,\n error_msg='目录不存在'\n )\n else:\n with transaction.atomic():\n obj_dir = models.Dirs.objects.filter(dir=dir_name).all()\n if obj_dir:\n obj_dir.delete()\n shutil.rmtree(dir_path)\n res = dict(\n state_code=0,\n eror_msg='ok'\n )\n return HttpResponse(json.dumps(res, ensure_ascii=False), content_type='application/json')\n\n\n@api_check\ndef upload_file(request):\n dir_name = request.POST.get('dir_name')\n if not dir_name:\n res = dict(\n state_code=-2,\n error_msg='参数错误'\n )\n return HttpResponse(json.dumps(res, ensure_ascii=False), content_type='application/json')\n dir_path = os.path.join(files_folder, dir_name)\n if not os.path.exists(dir_path):\n res = dict(\n state_code=1,\n error_msg='目录不存在'\n )\n return HttpResponse(json.dumps(res, ensure_ascii=False), content_type='application/json')\n # 获取上传的文件,如果没有文件,则默认为None;\n File = request.FILES.get(\"file\", None)\n if File is None:\n res = dict(\n state_code=-2,\n error_msg='参数错误'\n )\n return HttpResponse(json.dumps(res, ensure_ascii=False), content_type='application/json')\n file_name = File.name\n file_path = os.path.join(dir_path, file_name)\n # 打开特定的文件进行二进制的写操作;\n with open(file_path, 'wb+') as f:\n # 分块写入文件;\n for chunk in File.chunks():\n f.write(chunk)\n res = dict(\n state_code=0,\n error_msg='ok',\n )\n return HttpResponse(json.dumps(res), content_type='application/json')\n\n\n@api_check\ndef query_file(request):\n req = json.loads(request.body)\n dir_name = req.get('dir_name')\n dir_path = os.path.join(files_folder, dir_name)\n cmd_info = os.popen(\"ls -l -h {}\".format(dir_path)).read()\n file_list = cmd_info.split('\\n')[1:-1]\n file_list_data = []\n for file_info_cmd in file_list:\n file_info_list = file_info_cmd.split(' ')\n file_info = list(filter(None, file_info_list))\n file = file_info[-1]\n file_size = file_info[4]\n name_type = file.rsplit('.', 1)\n if len(name_type) < 2:\n name_type.append('未知')\n file_name, file_type = name_type\n file_list_data.append({\n 'file_name': file_name,\n 'file_type': file_type,\n 'file_size': file_size\n })\n res = dict(\n state_code=0,\n error_msg='ok',\n data={\n 'file_list': file_list_data\n }\n )\n return HttpResponse(json.dumps(res, ensure_ascii=False), content_type='application/json')\n\n\n@api_check\ndef del_file(request):\n req = json.loads(request.body)\n dir_name = req.get('dir_name')\n file_name = req.get('file_name')\n file_type = req.get('file_type')\n file = file_name + '.' + file_type if file_type != '未知' else file_name\n file_path = os.path.join(os.path.join(files_folder,dir_name),file)\n if not os.path.exists(file_path):\n res = dict(\n state_code=1,\n error_msg='文件不存在'\n )\n else:\n os.remove(file_path)\n res = dict(\n state_code=0,\n error_msg='ok'\n )\n return HttpResponse(json.dumps(res, ensure_ascii=False), content_type='application/json')\n\n\n@api_check\ndef download_file(request):\n req = json.loads(request.body)\n dir_name = req.get('dir_name')\n file_name = req.get('file_name')\n file_type = req.get('file_type')\n file = file_name+'.'+file_type if file_type != '未知' else file_name\n file_path = os.path.join(os.path.join(files_folder,dir_name),file)\n if not os.path.exists(file_path):\n res = dict(\n state_code=1,\n error_msg='文件不存在'\n )\n return HttpResponse(json.dumps(res, ensure_ascii=False), content_type='application/json')\n from django.http import StreamingHttpResponse\n file_size = os.path.getsize(file_path)\n def file_iterator(file_name, chunk_size=512): # 用于形成二进制数据\n with open(file_name, 'rb') as f:\n while True:\n c = f.read(chunk_size)\n if c:\n yield c\n else:\n break\n the_file_name = file_path # 要下载的文件路径\n res = file_iterator(the_file_name)\n response = StreamingHttpResponse(res) # 这里创建返回\n response['Content-Type'] = 'application/octet-stream; charset=UTF-8' # 注意格式\n response['Content-Length'] = file_size\n response['Content-Disposition'] = 'attachment;filename=\"{}\"'.format(escape_uri_path(file)) # 注意filename 这个是下载后的名字\n return response\n",
"step-ids": [
9,
10,
12,
13,
14
]
}
|
[
9,
10,
12,
13,
14
] |
import os
import logging
from datetime import datetime
import torch
from naruto_skills.training_checker import TrainingChecker
from data_for_train import is_question as my_dataset
from model_def.lstm_attention import LSTMAttention
from utils import pytorch_utils
from train.new_trainer import TrainingLoop, TrainingLogger, EvaluateLogger, Evaluator
def input2_text(first_input, *params):
return my_dataset.voc.idx2docs(first_input)
def target2_text(first_input, *params):
return first_input
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
BATCH_SIZE = 128
NUM_EPOCHS = 500
NUM_WORKERS = 0
PRINT_EVERY = 100
PREDICT_EVERY = 500
EVAL_EVERY = 500
PRE_TRAINED_MODEL = ''
my_dataset.bootstrap()
train_loader = my_dataset.get_dl_train(batch_size=BATCH_SIZE, size=None)
eval_loader = my_dataset.get_dl_eval(batch_size=BATCH_SIZE, size=None)
logging.info('There will be %s steps for training', NUM_EPOCHS * len(train_loader))
model = LSTMAttention(vocab_size=len(my_dataset.voc.index2word), no_class=2)
model.train()
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model.to(device)
logging.info('Model architecture: \n%s', model)
logging.info('Total trainable parameters: %s', pytorch_utils.count_parameters(model))
init_step = 0
# Restore model
if PRE_TRAINED_MODEL != '':
checkpoint = torch.load(PRE_TRAINED_MODEL, map_location=device)
model.load_state_dict(checkpoint['model_state_dict'])
model.optimizer.load_state_dict(checkpoint['optimizer'])
init_step = checkpoint.get('step', 0)
logging.info('Load pre-trained model from %s successfully', PRE_TRAINED_MODEL)
root_dir = '/source/main/train/output/'
exp_id = datetime.strftime(datetime.now(), '%Y-%m-%dT%H:%M:%S')
path_checkpoints = os.path.join(root_dir, 'saved_models', model.__class__.__name__, exp_id)
training_checker = TrainingChecker(model, root_dir=path_checkpoints, init_score=-10000)
path_logging = os.path.join(root_dir, 'logging', model.__class__.__name__, exp_id)
train_logger = TrainingLogger(model, measure_interval=PRINT_EVERY, predict_interval=PREDICT_EVERY,
path_to_file=path_logging + '_train', input_transform=input2_text,
output_transform=target2_text)
eval_logger = EvaluateLogger(path_logging + '_validate')
evaluator = Evaluator(model, eval_loader, device, EVAL_EVERY, eval_logger, training_checker)
training_loop = TrainingLoop(model, train_loader, device, NUM_EPOCHS, train_logger, evaluator)
training_loop.run()
|
normal
|
{
"blob_id": "77884dd72f5efe91fccad27e6328c4ad34378be2",
"index": 6953,
"step-1": "<mask token>\n\n\ndef target2_text(first_input, *params):\n return first_input\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef input2_text(first_input, *params):\n return my_dataset.voc.idx2docs(first_input)\n\n\ndef target2_text(first_input, *params):\n return first_input\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef input2_text(first_input, *params):\n return my_dataset.voc.idx2docs(first_input)\n\n\ndef target2_text(first_input, *params):\n return first_input\n\n\nif __name__ == '__main__':\n logging.basicConfig(level=logging.INFO)\n BATCH_SIZE = 128\n NUM_EPOCHS = 500\n NUM_WORKERS = 0\n PRINT_EVERY = 100\n PREDICT_EVERY = 500\n EVAL_EVERY = 500\n PRE_TRAINED_MODEL = ''\n my_dataset.bootstrap()\n train_loader = my_dataset.get_dl_train(batch_size=BATCH_SIZE, size=None)\n eval_loader = my_dataset.get_dl_eval(batch_size=BATCH_SIZE, size=None)\n logging.info('There will be %s steps for training', NUM_EPOCHS * len(\n train_loader))\n model = LSTMAttention(vocab_size=len(my_dataset.voc.index2word), no_class=2\n )\n model.train()\n device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n model.to(device)\n logging.info('Model architecture: \\n%s', model)\n logging.info('Total trainable parameters: %s', pytorch_utils.\n count_parameters(model))\n init_step = 0\n if PRE_TRAINED_MODEL != '':\n checkpoint = torch.load(PRE_TRAINED_MODEL, map_location=device)\n model.load_state_dict(checkpoint['model_state_dict'])\n model.optimizer.load_state_dict(checkpoint['optimizer'])\n init_step = checkpoint.get('step', 0)\n logging.info('Load pre-trained model from %s successfully',\n PRE_TRAINED_MODEL)\n root_dir = '/source/main/train/output/'\n exp_id = datetime.strftime(datetime.now(), '%Y-%m-%dT%H:%M:%S')\n path_checkpoints = os.path.join(root_dir, 'saved_models', model.\n __class__.__name__, exp_id)\n training_checker = TrainingChecker(model, root_dir=path_checkpoints,\n init_score=-10000)\n path_logging = os.path.join(root_dir, 'logging', model.__class__.\n __name__, exp_id)\n train_logger = TrainingLogger(model, measure_interval=PRINT_EVERY,\n predict_interval=PREDICT_EVERY, path_to_file=path_logging +\n '_train', input_transform=input2_text, output_transform=target2_text)\n eval_logger = EvaluateLogger(path_logging + '_validate')\n evaluator = Evaluator(model, eval_loader, device, EVAL_EVERY,\n eval_logger, training_checker)\n training_loop = TrainingLoop(model, train_loader, device, NUM_EPOCHS,\n train_logger, evaluator)\n training_loop.run()\n",
"step-4": "import os\nimport logging\nfrom datetime import datetime\nimport torch\nfrom naruto_skills.training_checker import TrainingChecker\nfrom data_for_train import is_question as my_dataset\nfrom model_def.lstm_attention import LSTMAttention\nfrom utils import pytorch_utils\nfrom train.new_trainer import TrainingLoop, TrainingLogger, EvaluateLogger, Evaluator\n\n\ndef input2_text(first_input, *params):\n return my_dataset.voc.idx2docs(first_input)\n\n\ndef target2_text(first_input, *params):\n return first_input\n\n\nif __name__ == '__main__':\n logging.basicConfig(level=logging.INFO)\n BATCH_SIZE = 128\n NUM_EPOCHS = 500\n NUM_WORKERS = 0\n PRINT_EVERY = 100\n PREDICT_EVERY = 500\n EVAL_EVERY = 500\n PRE_TRAINED_MODEL = ''\n my_dataset.bootstrap()\n train_loader = my_dataset.get_dl_train(batch_size=BATCH_SIZE, size=None)\n eval_loader = my_dataset.get_dl_eval(batch_size=BATCH_SIZE, size=None)\n logging.info('There will be %s steps for training', NUM_EPOCHS * len(\n train_loader))\n model = LSTMAttention(vocab_size=len(my_dataset.voc.index2word), no_class=2\n )\n model.train()\n device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n model.to(device)\n logging.info('Model architecture: \\n%s', model)\n logging.info('Total trainable parameters: %s', pytorch_utils.\n count_parameters(model))\n init_step = 0\n if PRE_TRAINED_MODEL != '':\n checkpoint = torch.load(PRE_TRAINED_MODEL, map_location=device)\n model.load_state_dict(checkpoint['model_state_dict'])\n model.optimizer.load_state_dict(checkpoint['optimizer'])\n init_step = checkpoint.get('step', 0)\n logging.info('Load pre-trained model from %s successfully',\n PRE_TRAINED_MODEL)\n root_dir = '/source/main/train/output/'\n exp_id = datetime.strftime(datetime.now(), '%Y-%m-%dT%H:%M:%S')\n path_checkpoints = os.path.join(root_dir, 'saved_models', model.\n __class__.__name__, exp_id)\n training_checker = TrainingChecker(model, root_dir=path_checkpoints,\n init_score=-10000)\n path_logging = os.path.join(root_dir, 'logging', model.__class__.\n __name__, exp_id)\n train_logger = TrainingLogger(model, measure_interval=PRINT_EVERY,\n predict_interval=PREDICT_EVERY, path_to_file=path_logging +\n '_train', input_transform=input2_text, output_transform=target2_text)\n eval_logger = EvaluateLogger(path_logging + '_validate')\n evaluator = Evaluator(model, eval_loader, device, EVAL_EVERY,\n eval_logger, training_checker)\n training_loop = TrainingLoop(model, train_loader, device, NUM_EPOCHS,\n train_logger, evaluator)\n training_loop.run()\n",
"step-5": "import os\nimport logging\nfrom datetime import datetime\n\nimport torch\nfrom naruto_skills.training_checker import TrainingChecker\n\nfrom data_for_train import is_question as my_dataset\nfrom model_def.lstm_attention import LSTMAttention\nfrom utils import pytorch_utils\nfrom train.new_trainer import TrainingLoop, TrainingLogger, EvaluateLogger, Evaluator\n\n\ndef input2_text(first_input, *params):\n return my_dataset.voc.idx2docs(first_input)\n\n\ndef target2_text(first_input, *params):\n return first_input\n\n\nif __name__ == '__main__':\n logging.basicConfig(level=logging.INFO)\n BATCH_SIZE = 128\n NUM_EPOCHS = 500\n NUM_WORKERS = 0\n PRINT_EVERY = 100\n PREDICT_EVERY = 500\n EVAL_EVERY = 500\n PRE_TRAINED_MODEL = ''\n\n my_dataset.bootstrap()\n train_loader = my_dataset.get_dl_train(batch_size=BATCH_SIZE, size=None)\n eval_loader = my_dataset.get_dl_eval(batch_size=BATCH_SIZE, size=None)\n logging.info('There will be %s steps for training', NUM_EPOCHS * len(train_loader))\n model = LSTMAttention(vocab_size=len(my_dataset.voc.index2word), no_class=2)\n model.train()\n device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n model.to(device)\n logging.info('Model architecture: \\n%s', model)\n logging.info('Total trainable parameters: %s', pytorch_utils.count_parameters(model))\n\n init_step = 0\n # Restore model\n if PRE_TRAINED_MODEL != '':\n checkpoint = torch.load(PRE_TRAINED_MODEL, map_location=device)\n model.load_state_dict(checkpoint['model_state_dict'])\n model.optimizer.load_state_dict(checkpoint['optimizer'])\n init_step = checkpoint.get('step', 0)\n\n logging.info('Load pre-trained model from %s successfully', PRE_TRAINED_MODEL)\n\n root_dir = '/source/main/train/output/'\n exp_id = datetime.strftime(datetime.now(), '%Y-%m-%dT%H:%M:%S')\n\n path_checkpoints = os.path.join(root_dir, 'saved_models', model.__class__.__name__, exp_id)\n training_checker = TrainingChecker(model, root_dir=path_checkpoints, init_score=-10000)\n\n path_logging = os.path.join(root_dir, 'logging', model.__class__.__name__, exp_id)\n train_logger = TrainingLogger(model, measure_interval=PRINT_EVERY, predict_interval=PREDICT_EVERY,\n path_to_file=path_logging + '_train', input_transform=input2_text,\n output_transform=target2_text)\n\n eval_logger = EvaluateLogger(path_logging + '_validate')\n evaluator = Evaluator(model, eval_loader, device, EVAL_EVERY, eval_logger, training_checker)\n\n training_loop = TrainingLoop(model, train_loader, device, NUM_EPOCHS, train_logger, evaluator)\n training_loop.run()\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
def get_data(inputloc, tablename='data'):
data = spark.read.csv(inputloc, schema=schema)
data.createOrReplaceTempView(tablename)
return data
<|reserved_special_token_0|>
def resolved_max(df):
df_max = df.groupBy('station').agg({'date': 'max'}).select(functions.
col('station'), functions.col('max(date)').alias('d_max'))
d_max = df.join(df_max, 'station').where(functions.col('d_max') ==
functions.col('date'))
fin_ret = d_max.select(functions.col('latitude'), functions.col(
'longitude'), functions.col('tmax'), functions.col('station'))
return list(map(lambda row: row.asDict(), fin_ret.collect()))
<|reserved_special_token_0|>
def make_weather_trainers(trainRatio, estimator_gridbuilders, metricName=None):
"""Construct a list of TrainValidationSplit estimators for weather data
where `estimator_gridbuilders` is a list of (Estimator, ParamGridBuilder) tuples
and 0 < `trainRatio` <= 1 determines the fraction of rows used for training.
The RegressionEvaluator will use a non-default `metricName`, if specified.
"""
feature_cols = ['latitude', 'longitude', 'elevation']
column_names = dict(featuresCol='features', labelCol='tmax',
predictionCol='tmax_pred')
feature_assembler = VectorAssembler(inputCols=feature_cols, outputCol=
column_names['featuresCol'])
ev = RegressionEvaluator().setLabelCol(column_names['labelCol']
).setPredictionCol(column_names['predictionCol'])
if metricName:
ev = ev.setMetricName(metricName)
tvs_list = []
for est, pgb in estimator_gridbuilders:
est = est.setParams(**column_names)
pl = Pipeline(stages=[feature_assembler, est])
paramGrid = pgb.build()
tvs_list.append(TrainValidationSplit(estimator=pl,
estimatorParamMaps=paramGrid, evaluator=ev, trainRatio=trainRatio))
return tvs_list
def get_best_weather_model(data):
train, test = data.randomSplit([0.75, 0.25])
train = train.cache()
test = test.cache()
estimator_gridbuilders = [estimator_gridbuilder(LinearRegression(),
dict(regParam=[0.3, 0.6], elasticNetParam=[0, 0.5], maxIter=[10, 20
])), estimator_gridbuilder(GBTRegressor(), dict(lossType=['squared'
], maxDepth=[5, 10], maxIter=[2, 5], stepSize=[0.1])),
estimator_gridbuilder(RandomForestRegressor(), dict(numTrees=[5, 10
], maxDepth=[5, 15], featureSubsetStrategy=['auto']))]
metricName = 'r2'
tvs_list = make_weather_trainers(0.2, estimator_gridbuilders, metricName)
ev = tvs_list[0].getEvaluator()
scorescale = 1 if ev.isLargerBetter() else -1
model_name_scores = []
for tvs in tvs_list:
model = tvs.fit(train)
test_pred = model.transform(test)
score = ev.evaluate(test_pred) * scorescale
model_name_scores.append((model, get_estimator_name(tvs.
getEstimator()), score))
best_model, best_name, best_score = max(model_name_scores, key=lambda
triplet: triplet[2])
print('\n\nBest model is %s with validation data %s score %f' % (
best_name, ev.getMetricName(), best_score * scorescale))
return best_model
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_data(inputloc, tablename='data'):
data = spark.read.csv(inputloc, schema=schema)
data.createOrReplaceTempView(tablename)
return data
<|reserved_special_token_0|>
def resolved_max(df):
df_max = df.groupBy('station').agg({'date': 'max'}).select(functions.
col('station'), functions.col('max(date)').alias('d_max'))
d_max = df.join(df_max, 'station').where(functions.col('d_max') ==
functions.col('date'))
fin_ret = d_max.select(functions.col('latitude'), functions.col(
'longitude'), functions.col('tmax'), functions.col('station'))
return list(map(lambda row: row.asDict(), fin_ret.collect()))
for i in range(0, len(years) - 1):
lower = years[i]
upper = years[i + 1]
zone = data.filter(functions.col('date') < upper).filter(functions.col(
'date') >= lower)
reduced_data[lower + '_' + upper] = resolved_max(zone)
<|reserved_special_token_0|>
plt.figure(figsize=(16, 12))
<|reserved_special_token_0|>
eq_map.fillcontinents(color='#202020', lake_color='#3b3b3b', zorder=0.5)
eq_map.drawmapboundary(fill_color='#3b3b3b')
eq_map.drawmeridians(np.arange(0, 360, 30))
eq_map.drawparallels(np.arange(-90, 90, 30))
<|reserved_special_token_0|>
for y in reduced_data['2000_2001']:
lon.append(y['longitude'])
lat.append(y['latitude'])
val.append(y['tmax'])
<|reserved_special_token_0|>
cbar.set_label('Max Temperature (in Celcius)')
plt.title('Year 2000')
plt.savefig('2a_2000.png')
plt.figure(figsize=(16, 12))
<|reserved_special_token_0|>
eq_map.fillcontinents(color='#202020', lake_color='#3b3b3b', zorder=0.5)
eq_map.drawmapboundary(fill_color='#3b3b3b')
eq_map.drawmeridians(np.arange(0, 360, 30))
eq_map.drawparallels(np.arange(-90, 90, 30))
<|reserved_special_token_0|>
for y in reduced_data['2001_2002']:
lon.append(y['longitude'])
lat.append(y['latitude'])
val.append(y['tmax'])
<|reserved_special_token_0|>
cbar.set_label('Max Temperature (in Celcius)')
plt.title('Year 2001')
plt.savefig('2a_2001.png')
def make_weather_trainers(trainRatio, estimator_gridbuilders, metricName=None):
"""Construct a list of TrainValidationSplit estimators for weather data
where `estimator_gridbuilders` is a list of (Estimator, ParamGridBuilder) tuples
and 0 < `trainRatio` <= 1 determines the fraction of rows used for training.
The RegressionEvaluator will use a non-default `metricName`, if specified.
"""
feature_cols = ['latitude', 'longitude', 'elevation']
column_names = dict(featuresCol='features', labelCol='tmax',
predictionCol='tmax_pred')
feature_assembler = VectorAssembler(inputCols=feature_cols, outputCol=
column_names['featuresCol'])
ev = RegressionEvaluator().setLabelCol(column_names['labelCol']
).setPredictionCol(column_names['predictionCol'])
if metricName:
ev = ev.setMetricName(metricName)
tvs_list = []
for est, pgb in estimator_gridbuilders:
est = est.setParams(**column_names)
pl = Pipeline(stages=[feature_assembler, est])
paramGrid = pgb.build()
tvs_list.append(TrainValidationSplit(estimator=pl,
estimatorParamMaps=paramGrid, evaluator=ev, trainRatio=trainRatio))
return tvs_list
def get_best_weather_model(data):
train, test = data.randomSplit([0.75, 0.25])
train = train.cache()
test = test.cache()
estimator_gridbuilders = [estimator_gridbuilder(LinearRegression(),
dict(regParam=[0.3, 0.6], elasticNetParam=[0, 0.5], maxIter=[10, 20
])), estimator_gridbuilder(GBTRegressor(), dict(lossType=['squared'
], maxDepth=[5, 10], maxIter=[2, 5], stepSize=[0.1])),
estimator_gridbuilder(RandomForestRegressor(), dict(numTrees=[5, 10
], maxDepth=[5, 15], featureSubsetStrategy=['auto']))]
metricName = 'r2'
tvs_list = make_weather_trainers(0.2, estimator_gridbuilders, metricName)
ev = tvs_list[0].getEvaluator()
scorescale = 1 if ev.isLargerBetter() else -1
model_name_scores = []
for tvs in tvs_list:
model = tvs.fit(train)
test_pred = model.transform(test)
score = ev.evaluate(test_pred) * scorescale
model_name_scores.append((model, get_estimator_name(tvs.
getEstimator()), score))
best_model, best_name, best_score = max(model_name_scores, key=lambda
triplet: triplet[2])
print('\n\nBest model is %s with validation data %s score %f' % (
best_name, ev.getMetricName(), best_score * scorescale))
return best_model
<|reserved_special_token_0|>
print("""
Best parameters on test data:
""", get_best_tvs_model_params(model)
)
<|reserved_special_token_0|>
for lat in lat_range:
for lon in lon_range:
elev = eg.get_elevation(lat, lon)
combo.append((lat, lon, float(elev)))
<|reserved_special_token_0|>
plt.figure(figsize=(16, 12))
<|reserved_special_token_0|>
eq_map.fillcontinents(color='#202020', lake_color='#3b3b3b', zorder=0.5)
eq_map.drawmapboundary(fill_color='#3b3b3b')
eq_map.drawmeridians(np.arange(0, 360, 30))
eq_map.drawparallels(np.arange(-90, 90, 30))
<|reserved_special_token_0|>
for y in collected_predictions:
lon.append(y['longitude'])
lat.append(y['latitude'])
val.append(y['tmax_pred'])
<|reserved_special_token_0|>
cbar.set_label('Max Temperature (in Celcius)')
plt.title('Predicted Heat Map')
plt.savefig('2b1_heat.png')
<|reserved_special_token_0|>
plt.figure(figsize=(16, 12))
<|reserved_special_token_0|>
eq_map.fillcontinents(color='#202020', lake_color='#3b3b3b', zorder=0.5)
eq_map.drawmapboundary(fill_color='#3b3b3b')
eq_map.drawmeridians(np.arange(0, 360, 30))
eq_map.drawparallels(np.arange(-90, 90, 30))
<|reserved_special_token_0|>
for y in collected_predictions:
lon.append(y['longitude'])
lat.append(y['latitude'])
val.append(abs(y['tmax_pred'] - y['tmax']))
<|reserved_special_token_0|>
cbar.set_label('Absolute Temperature Difference (in Celcius)')
plt.title('Regression Error Map')
plt.savefig('2b2_regression_error.png')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
schema = StructType([StructField('station', StringType(), False),
StructField('date', DateType(), False), StructField('latitude',
FloatType(), False), StructField('longitude', FloatType(), False),
StructField('elevation', FloatType(), False), StructField('tmax',
FloatType(), False)])
def get_data(inputloc, tablename='data'):
data = spark.read.csv(inputloc, schema=schema)
data.createOrReplaceTempView(tablename)
return data
input_loc = 'tmax-2'
data = get_data(input_loc)
years = ['2000', '2001', '2002', '2003']
reduced_data = dict()
def resolved_max(df):
df_max = df.groupBy('station').agg({'date': 'max'}).select(functions.
col('station'), functions.col('max(date)').alias('d_max'))
d_max = df.join(df_max, 'station').where(functions.col('d_max') ==
functions.col('date'))
fin_ret = d_max.select(functions.col('latitude'), functions.col(
'longitude'), functions.col('tmax'), functions.col('station'))
return list(map(lambda row: row.asDict(), fin_ret.collect()))
for i in range(0, len(years) - 1):
lower = years[i]
upper = years[i + 1]
zone = data.filter(functions.col('date') < upper).filter(functions.col(
'date') >= lower)
reduced_data[lower + '_' + upper] = resolved_max(zone)
<|reserved_special_token_0|>
plt.figure(figsize=(16, 12))
eq_map = Basemap(projection='cyl', resolution='l', area_thresh=1000.0,
lat_0=0, lon_0=0)
eq_map.fillcontinents(color='#202020', lake_color='#3b3b3b', zorder=0.5)
eq_map.drawmapboundary(fill_color='#3b3b3b')
eq_map.drawmeridians(np.arange(0, 360, 30))
eq_map.drawparallels(np.arange(-90, 90, 30))
lat = []
lon = []
val = []
for y in reduced_data['2000_2001']:
lon.append(y['longitude'])
lat.append(y['latitude'])
val.append(y['tmax'])
x, y = eq_map(lon, lat)
cs = eq_map.scatter(x, y, c=val, marker='o', cmap=cm.bwr)
cbar = eq_map.colorbar(cs, location='bottom', pad='5%')
cbar.set_label('Max Temperature (in Celcius)')
plt.title('Year 2000')
plt.savefig('2a_2000.png')
plt.figure(figsize=(16, 12))
eq_map = Basemap(projection='cyl', resolution='l', area_thresh=1000.0,
lat_0=0, lon_0=0)
eq_map.fillcontinents(color='#202020', lake_color='#3b3b3b', zorder=0.5)
eq_map.drawmapboundary(fill_color='#3b3b3b')
eq_map.drawmeridians(np.arange(0, 360, 30))
eq_map.drawparallels(np.arange(-90, 90, 30))
lat = []
lon = []
val = []
for y in reduced_data['2001_2002']:
lon.append(y['longitude'])
lat.append(y['latitude'])
val.append(y['tmax'])
x, y = eq_map(lon, lat)
cs = eq_map.scatter(x, y, c=val, marker='o', cmap=cm.coolwarm)
cbar = eq_map.colorbar(cs, location='bottom', pad='5%')
cbar.set_label('Max Temperature (in Celcius)')
plt.title('Year 2001')
plt.savefig('2a_2001.png')
def make_weather_trainers(trainRatio, estimator_gridbuilders, metricName=None):
"""Construct a list of TrainValidationSplit estimators for weather data
where `estimator_gridbuilders` is a list of (Estimator, ParamGridBuilder) tuples
and 0 < `trainRatio` <= 1 determines the fraction of rows used for training.
The RegressionEvaluator will use a non-default `metricName`, if specified.
"""
feature_cols = ['latitude', 'longitude', 'elevation']
column_names = dict(featuresCol='features', labelCol='tmax',
predictionCol='tmax_pred')
feature_assembler = VectorAssembler(inputCols=feature_cols, outputCol=
column_names['featuresCol'])
ev = RegressionEvaluator().setLabelCol(column_names['labelCol']
).setPredictionCol(column_names['predictionCol'])
if metricName:
ev = ev.setMetricName(metricName)
tvs_list = []
for est, pgb in estimator_gridbuilders:
est = est.setParams(**column_names)
pl = Pipeline(stages=[feature_assembler, est])
paramGrid = pgb.build()
tvs_list.append(TrainValidationSplit(estimator=pl,
estimatorParamMaps=paramGrid, evaluator=ev, trainRatio=trainRatio))
return tvs_list
def get_best_weather_model(data):
train, test = data.randomSplit([0.75, 0.25])
train = train.cache()
test = test.cache()
estimator_gridbuilders = [estimator_gridbuilder(LinearRegression(),
dict(regParam=[0.3, 0.6], elasticNetParam=[0, 0.5], maxIter=[10, 20
])), estimator_gridbuilder(GBTRegressor(), dict(lossType=['squared'
], maxDepth=[5, 10], maxIter=[2, 5], stepSize=[0.1])),
estimator_gridbuilder(RandomForestRegressor(), dict(numTrees=[5, 10
], maxDepth=[5, 15], featureSubsetStrategy=['auto']))]
metricName = 'r2'
tvs_list = make_weather_trainers(0.2, estimator_gridbuilders, metricName)
ev = tvs_list[0].getEvaluator()
scorescale = 1 if ev.isLargerBetter() else -1
model_name_scores = []
for tvs in tvs_list:
model = tvs.fit(train)
test_pred = model.transform(test)
score = ev.evaluate(test_pred) * scorescale
model_name_scores.append((model, get_estimator_name(tvs.
getEstimator()), score))
best_model, best_name, best_score = max(model_name_scores, key=lambda
triplet: triplet[2])
print('\n\nBest model is %s with validation data %s score %f' % (
best_name, ev.getMetricName(), best_score * scorescale))
return best_model
fortrain, holdout = data.randomSplit([0.75, 0.25])
model = get_best_weather_model(fortrain)
print("""
Best parameters on test data:
""", get_best_tvs_model_params(model)
)
<|reserved_special_token_0|>
lat_range = range(-90, 90, 1)
lon_range = range(-180, 180, 1)
combo = []
for lat in lat_range:
for lon in lon_range:
elev = eg.get_elevation(lat, lon)
combo.append((lat, lon, float(elev)))
dataset = spark.createDataFrame(combo, ['latitude', 'longitude', 'elevation'])
pred = model.transform(dataset).collect()
collected_predictions = list(map(lambda row: row.asDict(), pred))
plt.figure(figsize=(16, 12))
eq_map = Basemap(projection='cyl', resolution='l', area_thresh=1000.0,
lat_0=0, lon_0=0)
eq_map.fillcontinents(color='#202020', lake_color='#3b3b3b', zorder=0.5)
eq_map.drawmapboundary(fill_color='#3b3b3b')
eq_map.drawmeridians(np.arange(0, 360, 30))
eq_map.drawparallels(np.arange(-90, 90, 30))
lon = []
lat = []
val = []
for y in collected_predictions:
lon.append(y['longitude'])
lat.append(y['latitude'])
val.append(y['tmax_pred'])
x, y = eq_map(lon, lat)
cs = eq_map.scatter(x, y, c=val, marker='o', cmap=cm.coolwarm)
cbar = eq_map.colorbar(cs, location='bottom', pad='5%')
cbar.set_label('Max Temperature (in Celcius)')
plt.title('Predicted Heat Map')
plt.savefig('2b1_heat.png')
pred = model.transform(holdout).collect()
collected_predictions = list(map(lambda row: row.asDict(), pred))
plt.figure(figsize=(16, 12))
eq_map = Basemap(projection='cyl', resolution='l', area_thresh=1000.0,
lat_0=0, lon_0=0)
eq_map.fillcontinents(color='#202020', lake_color='#3b3b3b', zorder=0.5)
eq_map.drawmapboundary(fill_color='#3b3b3b')
eq_map.drawmeridians(np.arange(0, 360, 30))
eq_map.drawparallels(np.arange(-90, 90, 30))
lon = []
lat = []
val = []
for y in collected_predictions:
lon.append(y['longitude'])
lat.append(y['latitude'])
val.append(abs(y['tmax_pred'] - y['tmax']))
x, y = eq_map(lon, lat)
cs = eq_map.scatter(x, y, c=val, marker='o', cmap=cm.Reds)
cbar = eq_map.colorbar(cs, location='bottom', pad='5%')
cbar.set_label('Absolute Temperature Difference (in Celcius)')
plt.title('Regression Error Map')
plt.savefig('2b2_regression_error.png')
<|reserved_special_token_1|>
from pyspark.sql import SparkSession, Row, functions, Column
from pyspark.sql.types import *
from pyspark.ml import Pipeline, Estimator
from pyspark.ml.feature import SQLTransformer, VectorAssembler
from pyspark.ml.evaluation import RegressionEvaluator
from pyspark.ml.tuning import TrainValidationSplit, ParamGridBuilder
from pyspark.ml.regression import LinearRegression, GBTRegressor, RandomForestRegressor, DecisionTreeRegressor
import sys
from weather_tools_mv import *
schema = StructType([StructField('station', StringType(), False),
StructField('date', DateType(), False), StructField('latitude',
FloatType(), False), StructField('longitude', FloatType(), False),
StructField('elevation', FloatType(), False), StructField('tmax',
FloatType(), False)])
def get_data(inputloc, tablename='data'):
data = spark.read.csv(inputloc, schema=schema)
data.createOrReplaceTempView(tablename)
return data
input_loc = 'tmax-2'
data = get_data(input_loc)
years = ['2000', '2001', '2002', '2003']
reduced_data = dict()
def resolved_max(df):
df_max = df.groupBy('station').agg({'date': 'max'}).select(functions.
col('station'), functions.col('max(date)').alias('d_max'))
d_max = df.join(df_max, 'station').where(functions.col('d_max') ==
functions.col('date'))
fin_ret = d_max.select(functions.col('latitude'), functions.col(
'longitude'), functions.col('tmax'), functions.col('station'))
return list(map(lambda row: row.asDict(), fin_ret.collect()))
for i in range(0, len(years) - 1):
lower = years[i]
upper = years[i + 1]
zone = data.filter(functions.col('date') < upper).filter(functions.col(
'date') >= lower)
reduced_data[lower + '_' + upper] = resolved_max(zone)
from mpl_toolkits.basemap import Basemap
import matplotlib.pyplot as plt
import numpy as np
import matplotlib.cm as cm
plt.figure(figsize=(16, 12))
eq_map = Basemap(projection='cyl', resolution='l', area_thresh=1000.0,
lat_0=0, lon_0=0)
eq_map.fillcontinents(color='#202020', lake_color='#3b3b3b', zorder=0.5)
eq_map.drawmapboundary(fill_color='#3b3b3b')
eq_map.drawmeridians(np.arange(0, 360, 30))
eq_map.drawparallels(np.arange(-90, 90, 30))
lat = []
lon = []
val = []
for y in reduced_data['2000_2001']:
lon.append(y['longitude'])
lat.append(y['latitude'])
val.append(y['tmax'])
x, y = eq_map(lon, lat)
cs = eq_map.scatter(x, y, c=val, marker='o', cmap=cm.bwr)
cbar = eq_map.colorbar(cs, location='bottom', pad='5%')
cbar.set_label('Max Temperature (in Celcius)')
plt.title('Year 2000')
plt.savefig('2a_2000.png')
plt.figure(figsize=(16, 12))
eq_map = Basemap(projection='cyl', resolution='l', area_thresh=1000.0,
lat_0=0, lon_0=0)
eq_map.fillcontinents(color='#202020', lake_color='#3b3b3b', zorder=0.5)
eq_map.drawmapboundary(fill_color='#3b3b3b')
eq_map.drawmeridians(np.arange(0, 360, 30))
eq_map.drawparallels(np.arange(-90, 90, 30))
lat = []
lon = []
val = []
for y in reduced_data['2001_2002']:
lon.append(y['longitude'])
lat.append(y['latitude'])
val.append(y['tmax'])
x, y = eq_map(lon, lat)
cs = eq_map.scatter(x, y, c=val, marker='o', cmap=cm.coolwarm)
cbar = eq_map.colorbar(cs, location='bottom', pad='5%')
cbar.set_label('Max Temperature (in Celcius)')
plt.title('Year 2001')
plt.savefig('2a_2001.png')
def make_weather_trainers(trainRatio, estimator_gridbuilders, metricName=None):
"""Construct a list of TrainValidationSplit estimators for weather data
where `estimator_gridbuilders` is a list of (Estimator, ParamGridBuilder) tuples
and 0 < `trainRatio` <= 1 determines the fraction of rows used for training.
The RegressionEvaluator will use a non-default `metricName`, if specified.
"""
feature_cols = ['latitude', 'longitude', 'elevation']
column_names = dict(featuresCol='features', labelCol='tmax',
predictionCol='tmax_pred')
feature_assembler = VectorAssembler(inputCols=feature_cols, outputCol=
column_names['featuresCol'])
ev = RegressionEvaluator().setLabelCol(column_names['labelCol']
).setPredictionCol(column_names['predictionCol'])
if metricName:
ev = ev.setMetricName(metricName)
tvs_list = []
for est, pgb in estimator_gridbuilders:
est = est.setParams(**column_names)
pl = Pipeline(stages=[feature_assembler, est])
paramGrid = pgb.build()
tvs_list.append(TrainValidationSplit(estimator=pl,
estimatorParamMaps=paramGrid, evaluator=ev, trainRatio=trainRatio))
return tvs_list
def get_best_weather_model(data):
train, test = data.randomSplit([0.75, 0.25])
train = train.cache()
test = test.cache()
estimator_gridbuilders = [estimator_gridbuilder(LinearRegression(),
dict(regParam=[0.3, 0.6], elasticNetParam=[0, 0.5], maxIter=[10, 20
])), estimator_gridbuilder(GBTRegressor(), dict(lossType=['squared'
], maxDepth=[5, 10], maxIter=[2, 5], stepSize=[0.1])),
estimator_gridbuilder(RandomForestRegressor(), dict(numTrees=[5, 10
], maxDepth=[5, 15], featureSubsetStrategy=['auto']))]
metricName = 'r2'
tvs_list = make_weather_trainers(0.2, estimator_gridbuilders, metricName)
ev = tvs_list[0].getEvaluator()
scorescale = 1 if ev.isLargerBetter() else -1
model_name_scores = []
for tvs in tvs_list:
model = tvs.fit(train)
test_pred = model.transform(test)
score = ev.evaluate(test_pred) * scorescale
model_name_scores.append((model, get_estimator_name(tvs.
getEstimator()), score))
best_model, best_name, best_score = max(model_name_scores, key=lambda
triplet: triplet[2])
print('\n\nBest model is %s with validation data %s score %f' % (
best_name, ev.getMetricName(), best_score * scorescale))
return best_model
fortrain, holdout = data.randomSplit([0.75, 0.25])
model = get_best_weather_model(fortrain)
print("""
Best parameters on test data:
""", get_best_tvs_model_params(model)
)
import elevation_grid as eg
from pyspark.ml.linalg import Vectors
from pyspark.ml.feature import VectorAssembler
import numpy as np
lat_range = range(-90, 90, 1)
lon_range = range(-180, 180, 1)
combo = []
for lat in lat_range:
for lon in lon_range:
elev = eg.get_elevation(lat, lon)
combo.append((lat, lon, float(elev)))
dataset = spark.createDataFrame(combo, ['latitude', 'longitude', 'elevation'])
pred = model.transform(dataset).collect()
collected_predictions = list(map(lambda row: row.asDict(), pred))
plt.figure(figsize=(16, 12))
eq_map = Basemap(projection='cyl', resolution='l', area_thresh=1000.0,
lat_0=0, lon_0=0)
eq_map.fillcontinents(color='#202020', lake_color='#3b3b3b', zorder=0.5)
eq_map.drawmapboundary(fill_color='#3b3b3b')
eq_map.drawmeridians(np.arange(0, 360, 30))
eq_map.drawparallels(np.arange(-90, 90, 30))
lon = []
lat = []
val = []
for y in collected_predictions:
lon.append(y['longitude'])
lat.append(y['latitude'])
val.append(y['tmax_pred'])
x, y = eq_map(lon, lat)
cs = eq_map.scatter(x, y, c=val, marker='o', cmap=cm.coolwarm)
cbar = eq_map.colorbar(cs, location='bottom', pad='5%')
cbar.set_label('Max Temperature (in Celcius)')
plt.title('Predicted Heat Map')
plt.savefig('2b1_heat.png')
pred = model.transform(holdout).collect()
collected_predictions = list(map(lambda row: row.asDict(), pred))
plt.figure(figsize=(16, 12))
eq_map = Basemap(projection='cyl', resolution='l', area_thresh=1000.0,
lat_0=0, lon_0=0)
eq_map.fillcontinents(color='#202020', lake_color='#3b3b3b', zorder=0.5)
eq_map.drawmapboundary(fill_color='#3b3b3b')
eq_map.drawmeridians(np.arange(0, 360, 30))
eq_map.drawparallels(np.arange(-90, 90, 30))
lon = []
lat = []
val = []
for y in collected_predictions:
lon.append(y['longitude'])
lat.append(y['latitude'])
val.append(abs(y['tmax_pred'] - y['tmax']))
x, y = eq_map(lon, lat)
cs = eq_map.scatter(x, y, c=val, marker='o', cmap=cm.Reds)
cbar = eq_map.colorbar(cs, location='bottom', pad='5%')
cbar.set_label('Absolute Temperature Difference (in Celcius)')
plt.title('Regression Error Map')
plt.savefig('2b2_regression_error.png')
<|reserved_special_token_1|>
from pyspark.sql import SparkSession, Row, functions, Column
from pyspark.sql.types import *
from pyspark.ml import Pipeline, Estimator
from pyspark.ml.feature import SQLTransformer, VectorAssembler
from pyspark.ml.evaluation import RegressionEvaluator
from pyspark.ml.tuning import TrainValidationSplit, ParamGridBuilder
from pyspark.ml.regression import (LinearRegression,
GBTRegressor,
RandomForestRegressor,
DecisionTreeRegressor)
import sys
from weather_tools_mv import *
schema = StructType([
StructField('station', StringType(), False),
StructField('date', DateType(), False),
# StructField('dayofyear', IntegerType(), False),
StructField('latitude', FloatType(), False),
StructField('longitude', FloatType(), False),
StructField('elevation', FloatType(), False),
StructField('tmax', FloatType(), False),
])
def get_data(inputloc, tablename='data'):
data = spark.read.csv(inputloc, schema=schema)
data.createOrReplaceTempView(tablename)
return data
input_loc = 'tmax-2'
data = get_data(input_loc)
#Part 2a
# years = list(map(lambda x: str(x), range(2000, 2018)))
years = ['2000', '2001', '2002', '2003']
reduced_data = dict()
def resolved_max(df):
df_max = df.groupBy('station').agg({'date': 'max'}).select(functions.col('station'),
functions.col('max(date)').alias('d_max'))
d_max = df.join(df_max, 'station').where(functions.col('d_max') == functions.col('date'))
fin_ret = d_max.select(functions.col('latitude'),
functions.col('longitude'),
functions.col('tmax'),
functions.col('station'))
return list(map(lambda row: row.asDict(), fin_ret.collect()))
for i in range(0, len(years) - 1):
lower = years[i]
upper = years[i+1]
zone = data.filter(functions.col('date') < upper).filter(functions.col('date') >= lower)
reduced_data[lower+"_"+upper] = resolved_max(zone)
from mpl_toolkits.basemap import Basemap
import matplotlib.pyplot as plt
import numpy as np
import matplotlib.cm as cm
plt.figure(figsize=(16,12))
eq_map = Basemap(projection='cyl', resolution = 'l', area_thresh = 1000.0,
lat_0=0, lon_0=0)
# eq_map.drawcoastlines()
# eq_map.drawcountries()
eq_map.fillcontinents(color = '#202020', lake_color='#3b3b3b', zorder=0.5)
eq_map.drawmapboundary(fill_color='#3b3b3b')
eq_map.drawmeridians(np.arange(0, 360, 30))
eq_map.drawparallels(np.arange(-90, 90, 30))
lat = []
lon = []
val = []
for y in reduced_data['2000_2001']:
lon.append(y['longitude'])
lat.append(y['latitude'])
val.append(y['tmax'])
x, y = eq_map(lon, lat)
cs = eq_map.scatter(x, y, c=val, marker="o", cmap=cm.bwr)
# add colorbar.
cbar = eq_map.colorbar(cs,location='bottom',pad="5%")
cbar.set_label('Max Temperature (in Celcius)')
plt.title('Year 2000')
plt.savefig('2a_2000.png')
plt.figure(figsize=(16,12))
eq_map = Basemap(projection='cyl', resolution = 'l', area_thresh = 1000.0,
lat_0=0, lon_0=0)
# eq_map.drawcoastlines()
# eq_map.drawcountries()
eq_map.fillcontinents(color = '#202020', lake_color='#3b3b3b', zorder=0.5)
eq_map.drawmapboundary(fill_color='#3b3b3b')
eq_map.drawmeridians(np.arange(0, 360, 30))
eq_map.drawparallels(np.arange(-90, 90, 30))
lat = []
lon = []
val = []
for y in reduced_data['2001_2002']:
lon.append(y['longitude'])
lat.append(y['latitude'])
val.append(y['tmax'])
x, y = eq_map(lon, lat)
cs = eq_map.scatter(x, y, c=val, marker="o", cmap=cm.coolwarm)
# add colorbar.
cbar = eq_map.colorbar(cs,location='bottom',pad="5%")
cbar.set_label('Max Temperature (in Celcius)')
plt.title('Year 2001')
plt.savefig('2a_2001.png')
# Part 2b
def make_weather_trainers(trainRatio,
estimator_gridbuilders,
metricName=None):
"""Construct a list of TrainValidationSplit estimators for weather data
where `estimator_gridbuilders` is a list of (Estimator, ParamGridBuilder) tuples
and 0 < `trainRatio` <= 1 determines the fraction of rows used for training.
The RegressionEvaluator will use a non-default `metricName`, if specified.
"""
feature_cols = ['latitude', 'longitude', 'elevation']
column_names = dict(featuresCol="features",
labelCol="tmax",
predictionCol="tmax_pred")
feature_assembler = VectorAssembler(
inputCols=feature_cols,
outputCol=column_names["featuresCol"])
ev = (RegressionEvaluator()
.setLabelCol(column_names["labelCol"])
.setPredictionCol(column_names["predictionCol"])
)
if metricName:
ev = ev.setMetricName(metricName)
tvs_list = []
for est, pgb in estimator_gridbuilders:
est = est.setParams(**column_names)
pl = Pipeline(stages=[feature_assembler, est])
paramGrid = pgb.build()
tvs_list.append(TrainValidationSplit(estimator=pl,
estimatorParamMaps=paramGrid,
evaluator=ev,
trainRatio=trainRatio))
return tvs_list
def get_best_weather_model(data):
train, test = data.randomSplit([0.75, 0.25])
train = train.cache()
test = test.cache()
# e.g., use print(LinearRegression().explainParams()) to see what can be tuned
estimator_gridbuilders = [
estimator_gridbuilder(
LinearRegression(),
dict(regParam=[0.3, 0.6],
elasticNetParam=[0, 0.5],
maxIter=[10, 20]
)),
estimator_gridbuilder(
GBTRegressor(),
dict(lossType=["squared"],
maxDepth=[5, 10],
maxIter=[2, 5],
stepSize=[0.1]
)),
estimator_gridbuilder(
RandomForestRegressor(),
dict(numTrees=[5, 10],
maxDepth=[5, 15],
featureSubsetStrategy=["auto"]
))
]
metricName = 'r2'
tvs_list = make_weather_trainers(.2, # fraction of data for training
estimator_gridbuilders,
metricName)
ev = tvs_list[0].getEvaluator()
scorescale = 1 if ev.isLargerBetter() else -1
model_name_scores = []
for tvs in tvs_list:
model = tvs.fit(train)
test_pred = model.transform(test)
score = ev.evaluate(test_pred) * scorescale
model_name_scores.append((model, get_estimator_name(tvs.getEstimator()), score))
best_model, best_name, best_score = max(model_name_scores, key=lambda triplet: triplet[2])
print("\n\nBest model is %s with validation data %s score %f" % (best_name, ev.getMetricName(), best_score*scorescale))
return best_model
fortrain, holdout = data.randomSplit([0.75, 0.25])
model = get_best_weather_model(fortrain)
print("\n\n\nBest parameters on test data:\n", get_best_tvs_model_params(model))
# Part 2b1
import elevation_grid as eg
from pyspark.ml.linalg import Vectors
from pyspark.ml.feature import VectorAssembler
import numpy as np
lat_range = range(-90, 90, 1)
lon_range = range(-180, 180, 1)
combo = []
for lat in lat_range:
for lon in lon_range:
elev = eg.get_elevation(lat, lon)
combo.append((lat, lon, float(elev)))
dataset = spark.createDataFrame(combo,["latitude", "longitude", "elevation"])
pred = model.transform(dataset).collect()
collected_predictions = list(map(lambda row: row.asDict(), pred))
plt.figure(figsize=(16,12))
eq_map = Basemap(projection='cyl', resolution = 'l', area_thresh = 1000.0,
lat_0=0, lon_0=0)
# eq_map.drawcoastlines()
# eq_map.drawcountries()
eq_map.fillcontinents(color = '#202020', lake_color='#3b3b3b', zorder=0.5)
eq_map.drawmapboundary(fill_color='#3b3b3b')
eq_map.drawmeridians(np.arange(0, 360, 30))
eq_map.drawparallels(np.arange(-90, 90, 30))
lon = []
lat = []
val = []
for y in collected_predictions:
lon.append(y['longitude'])
lat.append(y['latitude'])
val.append(y['tmax_pred'])
x, y = eq_map(lon, lat)
cs = eq_map.scatter(x, y, c=val, marker="o", cmap=cm.coolwarm)
cbar = eq_map.colorbar(cs,location='bottom',pad="5%")
cbar.set_label('Max Temperature (in Celcius)')
plt.title('Predicted Heat Map')
plt.savefig('2b1_heat.png')
# Part 2b2
pred = model.transform(holdout).collect()
collected_predictions = list(map(lambda row: row.asDict(), pred))
plt.figure(figsize=(16,12))
eq_map = Basemap(projection='cyl', resolution = 'l', area_thresh = 1000.0,
lat_0=0, lon_0=0)
# eq_map.drawcoastlines()
# eq_map.drawcountries()
eq_map.fillcontinents(color = '#202020', lake_color='#3b3b3b', zorder=0.5)
eq_map.drawmapboundary(fill_color='#3b3b3b')
eq_map.drawmeridians(np.arange(0, 360, 30))
eq_map.drawparallels(np.arange(-90, 90, 30))
lon = []
lat = []
val = []
for y in collected_predictions:
lon.append(y['longitude'])
lat.append(y['latitude'])
val.append(abs(y['tmax_pred'] - y['tmax']))
x, y = eq_map(lon, lat)
cs = eq_map.scatter(x, y, c=val, marker="o", cmap=cm.Reds)
cbar = eq_map.colorbar(cs,location='bottom',pad="5%")
cbar.set_label('Absolute Temperature Difference (in Celcius)')
plt.title('Regression Error Map')
plt.savefig('2b2_regression_error.png')
|
flexible
|
{
"blob_id": "3852ff2f3f4ac889256bd5f4e36a86d483857cef",
"index": 6534,
"step-1": "<mask token>\n\n\ndef get_data(inputloc, tablename='data'):\n data = spark.read.csv(inputloc, schema=schema)\n data.createOrReplaceTempView(tablename)\n return data\n\n\n<mask token>\n\n\ndef resolved_max(df):\n df_max = df.groupBy('station').agg({'date': 'max'}).select(functions.\n col('station'), functions.col('max(date)').alias('d_max'))\n d_max = df.join(df_max, 'station').where(functions.col('d_max') ==\n functions.col('date'))\n fin_ret = d_max.select(functions.col('latitude'), functions.col(\n 'longitude'), functions.col('tmax'), functions.col('station'))\n return list(map(lambda row: row.asDict(), fin_ret.collect()))\n\n\n<mask token>\n\n\ndef make_weather_trainers(trainRatio, estimator_gridbuilders, metricName=None):\n \"\"\"Construct a list of TrainValidationSplit estimators for weather data\n where `estimator_gridbuilders` is a list of (Estimator, ParamGridBuilder) tuples\n and 0 < `trainRatio` <= 1 determines the fraction of rows used for training.\n The RegressionEvaluator will use a non-default `metricName`, if specified.\n \"\"\"\n feature_cols = ['latitude', 'longitude', 'elevation']\n column_names = dict(featuresCol='features', labelCol='tmax',\n predictionCol='tmax_pred')\n feature_assembler = VectorAssembler(inputCols=feature_cols, outputCol=\n column_names['featuresCol'])\n ev = RegressionEvaluator().setLabelCol(column_names['labelCol']\n ).setPredictionCol(column_names['predictionCol'])\n if metricName:\n ev = ev.setMetricName(metricName)\n tvs_list = []\n for est, pgb in estimator_gridbuilders:\n est = est.setParams(**column_names)\n pl = Pipeline(stages=[feature_assembler, est])\n paramGrid = pgb.build()\n tvs_list.append(TrainValidationSplit(estimator=pl,\n estimatorParamMaps=paramGrid, evaluator=ev, trainRatio=trainRatio))\n return tvs_list\n\n\ndef get_best_weather_model(data):\n train, test = data.randomSplit([0.75, 0.25])\n train = train.cache()\n test = test.cache()\n estimator_gridbuilders = [estimator_gridbuilder(LinearRegression(),\n dict(regParam=[0.3, 0.6], elasticNetParam=[0, 0.5], maxIter=[10, 20\n ])), estimator_gridbuilder(GBTRegressor(), dict(lossType=['squared'\n ], maxDepth=[5, 10], maxIter=[2, 5], stepSize=[0.1])),\n estimator_gridbuilder(RandomForestRegressor(), dict(numTrees=[5, 10\n ], maxDepth=[5, 15], featureSubsetStrategy=['auto']))]\n metricName = 'r2'\n tvs_list = make_weather_trainers(0.2, estimator_gridbuilders, metricName)\n ev = tvs_list[0].getEvaluator()\n scorescale = 1 if ev.isLargerBetter() else -1\n model_name_scores = []\n for tvs in tvs_list:\n model = tvs.fit(train)\n test_pred = model.transform(test)\n score = ev.evaluate(test_pred) * scorescale\n model_name_scores.append((model, get_estimator_name(tvs.\n getEstimator()), score))\n best_model, best_name, best_score = max(model_name_scores, key=lambda\n triplet: triplet[2])\n print('\\n\\nBest model is %s with validation data %s score %f' % (\n best_name, ev.getMetricName(), best_score * scorescale))\n return best_model\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_data(inputloc, tablename='data'):\n data = spark.read.csv(inputloc, schema=schema)\n data.createOrReplaceTempView(tablename)\n return data\n\n\n<mask token>\n\n\ndef resolved_max(df):\n df_max = df.groupBy('station').agg({'date': 'max'}).select(functions.\n col('station'), functions.col('max(date)').alias('d_max'))\n d_max = df.join(df_max, 'station').where(functions.col('d_max') ==\n functions.col('date'))\n fin_ret = d_max.select(functions.col('latitude'), functions.col(\n 'longitude'), functions.col('tmax'), functions.col('station'))\n return list(map(lambda row: row.asDict(), fin_ret.collect()))\n\n\nfor i in range(0, len(years) - 1):\n lower = years[i]\n upper = years[i + 1]\n zone = data.filter(functions.col('date') < upper).filter(functions.col(\n 'date') >= lower)\n reduced_data[lower + '_' + upper] = resolved_max(zone)\n<mask token>\nplt.figure(figsize=(16, 12))\n<mask token>\neq_map.fillcontinents(color='#202020', lake_color='#3b3b3b', zorder=0.5)\neq_map.drawmapboundary(fill_color='#3b3b3b')\neq_map.drawmeridians(np.arange(0, 360, 30))\neq_map.drawparallels(np.arange(-90, 90, 30))\n<mask token>\nfor y in reduced_data['2000_2001']:\n lon.append(y['longitude'])\n lat.append(y['latitude'])\n val.append(y['tmax'])\n<mask token>\ncbar.set_label('Max Temperature (in Celcius)')\nplt.title('Year 2000')\nplt.savefig('2a_2000.png')\nplt.figure(figsize=(16, 12))\n<mask token>\neq_map.fillcontinents(color='#202020', lake_color='#3b3b3b', zorder=0.5)\neq_map.drawmapboundary(fill_color='#3b3b3b')\neq_map.drawmeridians(np.arange(0, 360, 30))\neq_map.drawparallels(np.arange(-90, 90, 30))\n<mask token>\nfor y in reduced_data['2001_2002']:\n lon.append(y['longitude'])\n lat.append(y['latitude'])\n val.append(y['tmax'])\n<mask token>\ncbar.set_label('Max Temperature (in Celcius)')\nplt.title('Year 2001')\nplt.savefig('2a_2001.png')\n\n\ndef make_weather_trainers(trainRatio, estimator_gridbuilders, metricName=None):\n \"\"\"Construct a list of TrainValidationSplit estimators for weather data\n where `estimator_gridbuilders` is a list of (Estimator, ParamGridBuilder) tuples\n and 0 < `trainRatio` <= 1 determines the fraction of rows used for training.\n The RegressionEvaluator will use a non-default `metricName`, if specified.\n \"\"\"\n feature_cols = ['latitude', 'longitude', 'elevation']\n column_names = dict(featuresCol='features', labelCol='tmax',\n predictionCol='tmax_pred')\n feature_assembler = VectorAssembler(inputCols=feature_cols, outputCol=\n column_names['featuresCol'])\n ev = RegressionEvaluator().setLabelCol(column_names['labelCol']\n ).setPredictionCol(column_names['predictionCol'])\n if metricName:\n ev = ev.setMetricName(metricName)\n tvs_list = []\n for est, pgb in estimator_gridbuilders:\n est = est.setParams(**column_names)\n pl = Pipeline(stages=[feature_assembler, est])\n paramGrid = pgb.build()\n tvs_list.append(TrainValidationSplit(estimator=pl,\n estimatorParamMaps=paramGrid, evaluator=ev, trainRatio=trainRatio))\n return tvs_list\n\n\ndef get_best_weather_model(data):\n train, test = data.randomSplit([0.75, 0.25])\n train = train.cache()\n test = test.cache()\n estimator_gridbuilders = [estimator_gridbuilder(LinearRegression(),\n dict(regParam=[0.3, 0.6], elasticNetParam=[0, 0.5], maxIter=[10, 20\n ])), estimator_gridbuilder(GBTRegressor(), dict(lossType=['squared'\n ], maxDepth=[5, 10], maxIter=[2, 5], stepSize=[0.1])),\n estimator_gridbuilder(RandomForestRegressor(), dict(numTrees=[5, 10\n ], maxDepth=[5, 15], featureSubsetStrategy=['auto']))]\n metricName = 'r2'\n tvs_list = make_weather_trainers(0.2, estimator_gridbuilders, metricName)\n ev = tvs_list[0].getEvaluator()\n scorescale = 1 if ev.isLargerBetter() else -1\n model_name_scores = []\n for tvs in tvs_list:\n model = tvs.fit(train)\n test_pred = model.transform(test)\n score = ev.evaluate(test_pred) * scorescale\n model_name_scores.append((model, get_estimator_name(tvs.\n getEstimator()), score))\n best_model, best_name, best_score = max(model_name_scores, key=lambda\n triplet: triplet[2])\n print('\\n\\nBest model is %s with validation data %s score %f' % (\n best_name, ev.getMetricName(), best_score * scorescale))\n return best_model\n\n\n<mask token>\nprint(\"\"\"\n\n\nBest parameters on test data:\n\"\"\", get_best_tvs_model_params(model)\n )\n<mask token>\nfor lat in lat_range:\n for lon in lon_range:\n elev = eg.get_elevation(lat, lon)\n combo.append((lat, lon, float(elev)))\n<mask token>\nplt.figure(figsize=(16, 12))\n<mask token>\neq_map.fillcontinents(color='#202020', lake_color='#3b3b3b', zorder=0.5)\neq_map.drawmapboundary(fill_color='#3b3b3b')\neq_map.drawmeridians(np.arange(0, 360, 30))\neq_map.drawparallels(np.arange(-90, 90, 30))\n<mask token>\nfor y in collected_predictions:\n lon.append(y['longitude'])\n lat.append(y['latitude'])\n val.append(y['tmax_pred'])\n<mask token>\ncbar.set_label('Max Temperature (in Celcius)')\nplt.title('Predicted Heat Map')\nplt.savefig('2b1_heat.png')\n<mask token>\nplt.figure(figsize=(16, 12))\n<mask token>\neq_map.fillcontinents(color='#202020', lake_color='#3b3b3b', zorder=0.5)\neq_map.drawmapboundary(fill_color='#3b3b3b')\neq_map.drawmeridians(np.arange(0, 360, 30))\neq_map.drawparallels(np.arange(-90, 90, 30))\n<mask token>\nfor y in collected_predictions:\n lon.append(y['longitude'])\n lat.append(y['latitude'])\n val.append(abs(y['tmax_pred'] - y['tmax']))\n<mask token>\ncbar.set_label('Absolute Temperature Difference (in Celcius)')\nplt.title('Regression Error Map')\nplt.savefig('2b2_regression_error.png')\n",
"step-3": "<mask token>\nschema = StructType([StructField('station', StringType(), False),\n StructField('date', DateType(), False), StructField('latitude',\n FloatType(), False), StructField('longitude', FloatType(), False),\n StructField('elevation', FloatType(), False), StructField('tmax',\n FloatType(), False)])\n\n\ndef get_data(inputloc, tablename='data'):\n data = spark.read.csv(inputloc, schema=schema)\n data.createOrReplaceTempView(tablename)\n return data\n\n\ninput_loc = 'tmax-2'\ndata = get_data(input_loc)\nyears = ['2000', '2001', '2002', '2003']\nreduced_data = dict()\n\n\ndef resolved_max(df):\n df_max = df.groupBy('station').agg({'date': 'max'}).select(functions.\n col('station'), functions.col('max(date)').alias('d_max'))\n d_max = df.join(df_max, 'station').where(functions.col('d_max') ==\n functions.col('date'))\n fin_ret = d_max.select(functions.col('latitude'), functions.col(\n 'longitude'), functions.col('tmax'), functions.col('station'))\n return list(map(lambda row: row.asDict(), fin_ret.collect()))\n\n\nfor i in range(0, len(years) - 1):\n lower = years[i]\n upper = years[i + 1]\n zone = data.filter(functions.col('date') < upper).filter(functions.col(\n 'date') >= lower)\n reduced_data[lower + '_' + upper] = resolved_max(zone)\n<mask token>\nplt.figure(figsize=(16, 12))\neq_map = Basemap(projection='cyl', resolution='l', area_thresh=1000.0,\n lat_0=0, lon_0=0)\neq_map.fillcontinents(color='#202020', lake_color='#3b3b3b', zorder=0.5)\neq_map.drawmapboundary(fill_color='#3b3b3b')\neq_map.drawmeridians(np.arange(0, 360, 30))\neq_map.drawparallels(np.arange(-90, 90, 30))\nlat = []\nlon = []\nval = []\nfor y in reduced_data['2000_2001']:\n lon.append(y['longitude'])\n lat.append(y['latitude'])\n val.append(y['tmax'])\nx, y = eq_map(lon, lat)\ncs = eq_map.scatter(x, y, c=val, marker='o', cmap=cm.bwr)\ncbar = eq_map.colorbar(cs, location='bottom', pad='5%')\ncbar.set_label('Max Temperature (in Celcius)')\nplt.title('Year 2000')\nplt.savefig('2a_2000.png')\nplt.figure(figsize=(16, 12))\neq_map = Basemap(projection='cyl', resolution='l', area_thresh=1000.0,\n lat_0=0, lon_0=0)\neq_map.fillcontinents(color='#202020', lake_color='#3b3b3b', zorder=0.5)\neq_map.drawmapboundary(fill_color='#3b3b3b')\neq_map.drawmeridians(np.arange(0, 360, 30))\neq_map.drawparallels(np.arange(-90, 90, 30))\nlat = []\nlon = []\nval = []\nfor y in reduced_data['2001_2002']:\n lon.append(y['longitude'])\n lat.append(y['latitude'])\n val.append(y['tmax'])\nx, y = eq_map(lon, lat)\ncs = eq_map.scatter(x, y, c=val, marker='o', cmap=cm.coolwarm)\ncbar = eq_map.colorbar(cs, location='bottom', pad='5%')\ncbar.set_label('Max Temperature (in Celcius)')\nplt.title('Year 2001')\nplt.savefig('2a_2001.png')\n\n\ndef make_weather_trainers(trainRatio, estimator_gridbuilders, metricName=None):\n \"\"\"Construct a list of TrainValidationSplit estimators for weather data\n where `estimator_gridbuilders` is a list of (Estimator, ParamGridBuilder) tuples\n and 0 < `trainRatio` <= 1 determines the fraction of rows used for training.\n The RegressionEvaluator will use a non-default `metricName`, if specified.\n \"\"\"\n feature_cols = ['latitude', 'longitude', 'elevation']\n column_names = dict(featuresCol='features', labelCol='tmax',\n predictionCol='tmax_pred')\n feature_assembler = VectorAssembler(inputCols=feature_cols, outputCol=\n column_names['featuresCol'])\n ev = RegressionEvaluator().setLabelCol(column_names['labelCol']\n ).setPredictionCol(column_names['predictionCol'])\n if metricName:\n ev = ev.setMetricName(metricName)\n tvs_list = []\n for est, pgb in estimator_gridbuilders:\n est = est.setParams(**column_names)\n pl = Pipeline(stages=[feature_assembler, est])\n paramGrid = pgb.build()\n tvs_list.append(TrainValidationSplit(estimator=pl,\n estimatorParamMaps=paramGrid, evaluator=ev, trainRatio=trainRatio))\n return tvs_list\n\n\ndef get_best_weather_model(data):\n train, test = data.randomSplit([0.75, 0.25])\n train = train.cache()\n test = test.cache()\n estimator_gridbuilders = [estimator_gridbuilder(LinearRegression(),\n dict(regParam=[0.3, 0.6], elasticNetParam=[0, 0.5], maxIter=[10, 20\n ])), estimator_gridbuilder(GBTRegressor(), dict(lossType=['squared'\n ], maxDepth=[5, 10], maxIter=[2, 5], stepSize=[0.1])),\n estimator_gridbuilder(RandomForestRegressor(), dict(numTrees=[5, 10\n ], maxDepth=[5, 15], featureSubsetStrategy=['auto']))]\n metricName = 'r2'\n tvs_list = make_weather_trainers(0.2, estimator_gridbuilders, metricName)\n ev = tvs_list[0].getEvaluator()\n scorescale = 1 if ev.isLargerBetter() else -1\n model_name_scores = []\n for tvs in tvs_list:\n model = tvs.fit(train)\n test_pred = model.transform(test)\n score = ev.evaluate(test_pred) * scorescale\n model_name_scores.append((model, get_estimator_name(tvs.\n getEstimator()), score))\n best_model, best_name, best_score = max(model_name_scores, key=lambda\n triplet: triplet[2])\n print('\\n\\nBest model is %s with validation data %s score %f' % (\n best_name, ev.getMetricName(), best_score * scorescale))\n return best_model\n\n\nfortrain, holdout = data.randomSplit([0.75, 0.25])\nmodel = get_best_weather_model(fortrain)\nprint(\"\"\"\n\n\nBest parameters on test data:\n\"\"\", get_best_tvs_model_params(model)\n )\n<mask token>\nlat_range = range(-90, 90, 1)\nlon_range = range(-180, 180, 1)\ncombo = []\nfor lat in lat_range:\n for lon in lon_range:\n elev = eg.get_elevation(lat, lon)\n combo.append((lat, lon, float(elev)))\ndataset = spark.createDataFrame(combo, ['latitude', 'longitude', 'elevation'])\npred = model.transform(dataset).collect()\ncollected_predictions = list(map(lambda row: row.asDict(), pred))\nplt.figure(figsize=(16, 12))\neq_map = Basemap(projection='cyl', resolution='l', area_thresh=1000.0,\n lat_0=0, lon_0=0)\neq_map.fillcontinents(color='#202020', lake_color='#3b3b3b', zorder=0.5)\neq_map.drawmapboundary(fill_color='#3b3b3b')\neq_map.drawmeridians(np.arange(0, 360, 30))\neq_map.drawparallels(np.arange(-90, 90, 30))\nlon = []\nlat = []\nval = []\nfor y in collected_predictions:\n lon.append(y['longitude'])\n lat.append(y['latitude'])\n val.append(y['tmax_pred'])\nx, y = eq_map(lon, lat)\ncs = eq_map.scatter(x, y, c=val, marker='o', cmap=cm.coolwarm)\ncbar = eq_map.colorbar(cs, location='bottom', pad='5%')\ncbar.set_label('Max Temperature (in Celcius)')\nplt.title('Predicted Heat Map')\nplt.savefig('2b1_heat.png')\npred = model.transform(holdout).collect()\ncollected_predictions = list(map(lambda row: row.asDict(), pred))\nplt.figure(figsize=(16, 12))\neq_map = Basemap(projection='cyl', resolution='l', area_thresh=1000.0,\n lat_0=0, lon_0=0)\neq_map.fillcontinents(color='#202020', lake_color='#3b3b3b', zorder=0.5)\neq_map.drawmapboundary(fill_color='#3b3b3b')\neq_map.drawmeridians(np.arange(0, 360, 30))\neq_map.drawparallels(np.arange(-90, 90, 30))\nlon = []\nlat = []\nval = []\nfor y in collected_predictions:\n lon.append(y['longitude'])\n lat.append(y['latitude'])\n val.append(abs(y['tmax_pred'] - y['tmax']))\nx, y = eq_map(lon, lat)\ncs = eq_map.scatter(x, y, c=val, marker='o', cmap=cm.Reds)\ncbar = eq_map.colorbar(cs, location='bottom', pad='5%')\ncbar.set_label('Absolute Temperature Difference (in Celcius)')\nplt.title('Regression Error Map')\nplt.savefig('2b2_regression_error.png')\n",
"step-4": "from pyspark.sql import SparkSession, Row, functions, Column\nfrom pyspark.sql.types import *\nfrom pyspark.ml import Pipeline, Estimator\nfrom pyspark.ml.feature import SQLTransformer, VectorAssembler\nfrom pyspark.ml.evaluation import RegressionEvaluator\nfrom pyspark.ml.tuning import TrainValidationSplit, ParamGridBuilder\nfrom pyspark.ml.regression import LinearRegression, GBTRegressor, RandomForestRegressor, DecisionTreeRegressor\nimport sys\nfrom weather_tools_mv import *\nschema = StructType([StructField('station', StringType(), False),\n StructField('date', DateType(), False), StructField('latitude',\n FloatType(), False), StructField('longitude', FloatType(), False),\n StructField('elevation', FloatType(), False), StructField('tmax',\n FloatType(), False)])\n\n\ndef get_data(inputloc, tablename='data'):\n data = spark.read.csv(inputloc, schema=schema)\n data.createOrReplaceTempView(tablename)\n return data\n\n\ninput_loc = 'tmax-2'\ndata = get_data(input_loc)\nyears = ['2000', '2001', '2002', '2003']\nreduced_data = dict()\n\n\ndef resolved_max(df):\n df_max = df.groupBy('station').agg({'date': 'max'}).select(functions.\n col('station'), functions.col('max(date)').alias('d_max'))\n d_max = df.join(df_max, 'station').where(functions.col('d_max') ==\n functions.col('date'))\n fin_ret = d_max.select(functions.col('latitude'), functions.col(\n 'longitude'), functions.col('tmax'), functions.col('station'))\n return list(map(lambda row: row.asDict(), fin_ret.collect()))\n\n\nfor i in range(0, len(years) - 1):\n lower = years[i]\n upper = years[i + 1]\n zone = data.filter(functions.col('date') < upper).filter(functions.col(\n 'date') >= lower)\n reduced_data[lower + '_' + upper] = resolved_max(zone)\nfrom mpl_toolkits.basemap import Basemap\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport matplotlib.cm as cm\nplt.figure(figsize=(16, 12))\neq_map = Basemap(projection='cyl', resolution='l', area_thresh=1000.0,\n lat_0=0, lon_0=0)\neq_map.fillcontinents(color='#202020', lake_color='#3b3b3b', zorder=0.5)\neq_map.drawmapboundary(fill_color='#3b3b3b')\neq_map.drawmeridians(np.arange(0, 360, 30))\neq_map.drawparallels(np.arange(-90, 90, 30))\nlat = []\nlon = []\nval = []\nfor y in reduced_data['2000_2001']:\n lon.append(y['longitude'])\n lat.append(y['latitude'])\n val.append(y['tmax'])\nx, y = eq_map(lon, lat)\ncs = eq_map.scatter(x, y, c=val, marker='o', cmap=cm.bwr)\ncbar = eq_map.colorbar(cs, location='bottom', pad='5%')\ncbar.set_label('Max Temperature (in Celcius)')\nplt.title('Year 2000')\nplt.savefig('2a_2000.png')\nplt.figure(figsize=(16, 12))\neq_map = Basemap(projection='cyl', resolution='l', area_thresh=1000.0,\n lat_0=0, lon_0=0)\neq_map.fillcontinents(color='#202020', lake_color='#3b3b3b', zorder=0.5)\neq_map.drawmapboundary(fill_color='#3b3b3b')\neq_map.drawmeridians(np.arange(0, 360, 30))\neq_map.drawparallels(np.arange(-90, 90, 30))\nlat = []\nlon = []\nval = []\nfor y in reduced_data['2001_2002']:\n lon.append(y['longitude'])\n lat.append(y['latitude'])\n val.append(y['tmax'])\nx, y = eq_map(lon, lat)\ncs = eq_map.scatter(x, y, c=val, marker='o', cmap=cm.coolwarm)\ncbar = eq_map.colorbar(cs, location='bottom', pad='5%')\ncbar.set_label('Max Temperature (in Celcius)')\nplt.title('Year 2001')\nplt.savefig('2a_2001.png')\n\n\ndef make_weather_trainers(trainRatio, estimator_gridbuilders, metricName=None):\n \"\"\"Construct a list of TrainValidationSplit estimators for weather data\n where `estimator_gridbuilders` is a list of (Estimator, ParamGridBuilder) tuples\n and 0 < `trainRatio` <= 1 determines the fraction of rows used for training.\n The RegressionEvaluator will use a non-default `metricName`, if specified.\n \"\"\"\n feature_cols = ['latitude', 'longitude', 'elevation']\n column_names = dict(featuresCol='features', labelCol='tmax',\n predictionCol='tmax_pred')\n feature_assembler = VectorAssembler(inputCols=feature_cols, outputCol=\n column_names['featuresCol'])\n ev = RegressionEvaluator().setLabelCol(column_names['labelCol']\n ).setPredictionCol(column_names['predictionCol'])\n if metricName:\n ev = ev.setMetricName(metricName)\n tvs_list = []\n for est, pgb in estimator_gridbuilders:\n est = est.setParams(**column_names)\n pl = Pipeline(stages=[feature_assembler, est])\n paramGrid = pgb.build()\n tvs_list.append(TrainValidationSplit(estimator=pl,\n estimatorParamMaps=paramGrid, evaluator=ev, trainRatio=trainRatio))\n return tvs_list\n\n\ndef get_best_weather_model(data):\n train, test = data.randomSplit([0.75, 0.25])\n train = train.cache()\n test = test.cache()\n estimator_gridbuilders = [estimator_gridbuilder(LinearRegression(),\n dict(regParam=[0.3, 0.6], elasticNetParam=[0, 0.5], maxIter=[10, 20\n ])), estimator_gridbuilder(GBTRegressor(), dict(lossType=['squared'\n ], maxDepth=[5, 10], maxIter=[2, 5], stepSize=[0.1])),\n estimator_gridbuilder(RandomForestRegressor(), dict(numTrees=[5, 10\n ], maxDepth=[5, 15], featureSubsetStrategy=['auto']))]\n metricName = 'r2'\n tvs_list = make_weather_trainers(0.2, estimator_gridbuilders, metricName)\n ev = tvs_list[0].getEvaluator()\n scorescale = 1 if ev.isLargerBetter() else -1\n model_name_scores = []\n for tvs in tvs_list:\n model = tvs.fit(train)\n test_pred = model.transform(test)\n score = ev.evaluate(test_pred) * scorescale\n model_name_scores.append((model, get_estimator_name(tvs.\n getEstimator()), score))\n best_model, best_name, best_score = max(model_name_scores, key=lambda\n triplet: triplet[2])\n print('\\n\\nBest model is %s with validation data %s score %f' % (\n best_name, ev.getMetricName(), best_score * scorescale))\n return best_model\n\n\nfortrain, holdout = data.randomSplit([0.75, 0.25])\nmodel = get_best_weather_model(fortrain)\nprint(\"\"\"\n\n\nBest parameters on test data:\n\"\"\", get_best_tvs_model_params(model)\n )\nimport elevation_grid as eg\nfrom pyspark.ml.linalg import Vectors\nfrom pyspark.ml.feature import VectorAssembler\nimport numpy as np\nlat_range = range(-90, 90, 1)\nlon_range = range(-180, 180, 1)\ncombo = []\nfor lat in lat_range:\n for lon in lon_range:\n elev = eg.get_elevation(lat, lon)\n combo.append((lat, lon, float(elev)))\ndataset = spark.createDataFrame(combo, ['latitude', 'longitude', 'elevation'])\npred = model.transform(dataset).collect()\ncollected_predictions = list(map(lambda row: row.asDict(), pred))\nplt.figure(figsize=(16, 12))\neq_map = Basemap(projection='cyl', resolution='l', area_thresh=1000.0,\n lat_0=0, lon_0=0)\neq_map.fillcontinents(color='#202020', lake_color='#3b3b3b', zorder=0.5)\neq_map.drawmapboundary(fill_color='#3b3b3b')\neq_map.drawmeridians(np.arange(0, 360, 30))\neq_map.drawparallels(np.arange(-90, 90, 30))\nlon = []\nlat = []\nval = []\nfor y in collected_predictions:\n lon.append(y['longitude'])\n lat.append(y['latitude'])\n val.append(y['tmax_pred'])\nx, y = eq_map(lon, lat)\ncs = eq_map.scatter(x, y, c=val, marker='o', cmap=cm.coolwarm)\ncbar = eq_map.colorbar(cs, location='bottom', pad='5%')\ncbar.set_label('Max Temperature (in Celcius)')\nplt.title('Predicted Heat Map')\nplt.savefig('2b1_heat.png')\npred = model.transform(holdout).collect()\ncollected_predictions = list(map(lambda row: row.asDict(), pred))\nplt.figure(figsize=(16, 12))\neq_map = Basemap(projection='cyl', resolution='l', area_thresh=1000.0,\n lat_0=0, lon_0=0)\neq_map.fillcontinents(color='#202020', lake_color='#3b3b3b', zorder=0.5)\neq_map.drawmapboundary(fill_color='#3b3b3b')\neq_map.drawmeridians(np.arange(0, 360, 30))\neq_map.drawparallels(np.arange(-90, 90, 30))\nlon = []\nlat = []\nval = []\nfor y in collected_predictions:\n lon.append(y['longitude'])\n lat.append(y['latitude'])\n val.append(abs(y['tmax_pred'] - y['tmax']))\nx, y = eq_map(lon, lat)\ncs = eq_map.scatter(x, y, c=val, marker='o', cmap=cm.Reds)\ncbar = eq_map.colorbar(cs, location='bottom', pad='5%')\ncbar.set_label('Absolute Temperature Difference (in Celcius)')\nplt.title('Regression Error Map')\nplt.savefig('2b2_regression_error.png')\n",
"step-5": "from pyspark.sql import SparkSession, Row, functions, Column\nfrom pyspark.sql.types import *\n\nfrom pyspark.ml import Pipeline, Estimator\nfrom pyspark.ml.feature import SQLTransformer, VectorAssembler\nfrom pyspark.ml.evaluation import RegressionEvaluator\nfrom pyspark.ml.tuning import TrainValidationSplit, ParamGridBuilder\nfrom pyspark.ml.regression import (LinearRegression,\n GBTRegressor,\n RandomForestRegressor,\n DecisionTreeRegressor)\n\nimport sys\nfrom weather_tools_mv import *\n\nschema = StructType([\n StructField('station', StringType(), False),\n StructField('date', DateType(), False),\n # StructField('dayofyear', IntegerType(), False),\n StructField('latitude', FloatType(), False),\n StructField('longitude', FloatType(), False),\n StructField('elevation', FloatType(), False),\n StructField('tmax', FloatType(), False),\n])\n\ndef get_data(inputloc, tablename='data'):\n data = spark.read.csv(inputloc, schema=schema)\n data.createOrReplaceTempView(tablename)\n return data\n\ninput_loc = 'tmax-2'\ndata = get_data(input_loc)\n\n#Part 2a\n\n# years = list(map(lambda x: str(x), range(2000, 2018)))\n\nyears = ['2000', '2001', '2002', '2003']\n\nreduced_data = dict()\n\ndef resolved_max(df):\n df_max = df.groupBy('station').agg({'date': 'max'}).select(functions.col('station'),\n functions.col('max(date)').alias('d_max'))\n \n d_max = df.join(df_max, 'station').where(functions.col('d_max') == functions.col('date'))\n \n fin_ret = d_max.select(functions.col('latitude'),\n functions.col('longitude'),\n functions.col('tmax'),\n functions.col('station'))\n \n return list(map(lambda row: row.asDict(), fin_ret.collect()))\n\nfor i in range(0, len(years) - 1):\n lower = years[i]\n upper = years[i+1]\n zone = data.filter(functions.col('date') < upper).filter(functions.col('date') >= lower)\n reduced_data[lower+\"_\"+upper] = resolved_max(zone)\n\nfrom mpl_toolkits.basemap import Basemap\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport matplotlib.cm as cm\n\nplt.figure(figsize=(16,12))\n\neq_map = Basemap(projection='cyl', resolution = 'l', area_thresh = 1000.0,\n lat_0=0, lon_0=0)\n# eq_map.drawcoastlines()\n# eq_map.drawcountries()\neq_map.fillcontinents(color = '#202020', lake_color='#3b3b3b', zorder=0.5)\neq_map.drawmapboundary(fill_color='#3b3b3b')\neq_map.drawmeridians(np.arange(0, 360, 30))\neq_map.drawparallels(np.arange(-90, 90, 30))\n\nlat = []\nlon = []\nval = []\n\nfor y in reduced_data['2000_2001']:\n lon.append(y['longitude'])\n lat.append(y['latitude'])\n val.append(y['tmax'])\n\nx, y = eq_map(lon, lat)\n\ncs = eq_map.scatter(x, y, c=val, marker=\"o\", cmap=cm.bwr)\n# add colorbar.\ncbar = eq_map.colorbar(cs,location='bottom',pad=\"5%\")\ncbar.set_label('Max Temperature (in Celcius)')\nplt.title('Year 2000')\nplt.savefig('2a_2000.png')\n\n\nplt.figure(figsize=(16,12))\n\neq_map = Basemap(projection='cyl', resolution = 'l', area_thresh = 1000.0,\n lat_0=0, lon_0=0)\n# eq_map.drawcoastlines()\n# eq_map.drawcountries()\neq_map.fillcontinents(color = '#202020', lake_color='#3b3b3b', zorder=0.5)\neq_map.drawmapboundary(fill_color='#3b3b3b')\neq_map.drawmeridians(np.arange(0, 360, 30))\neq_map.drawparallels(np.arange(-90, 90, 30))\n\nlat = []\nlon = []\nval = []\n\nfor y in reduced_data['2001_2002']:\n lon.append(y['longitude'])\n lat.append(y['latitude'])\n val.append(y['tmax'])\n\nx, y = eq_map(lon, lat)\n\ncs = eq_map.scatter(x, y, c=val, marker=\"o\", cmap=cm.coolwarm)\n# add colorbar.\ncbar = eq_map.colorbar(cs,location='bottom',pad=\"5%\")\ncbar.set_label('Max Temperature (in Celcius)')\nplt.title('Year 2001')\nplt.savefig('2a_2001.png')\n\n\n\n\n\n# Part 2b\n\n\ndef make_weather_trainers(trainRatio,\n estimator_gridbuilders,\n metricName=None):\n \"\"\"Construct a list of TrainValidationSplit estimators for weather data\n where `estimator_gridbuilders` is a list of (Estimator, ParamGridBuilder) tuples\n and 0 < `trainRatio` <= 1 determines the fraction of rows used for training.\n The RegressionEvaluator will use a non-default `metricName`, if specified.\n \"\"\"\n feature_cols = ['latitude', 'longitude', 'elevation']\n column_names = dict(featuresCol=\"features\",\n labelCol=\"tmax\",\n predictionCol=\"tmax_pred\")\n\n feature_assembler = VectorAssembler(\n inputCols=feature_cols,\n outputCol=column_names[\"featuresCol\"])\n ev = (RegressionEvaluator()\n .setLabelCol(column_names[\"labelCol\"])\n .setPredictionCol(column_names[\"predictionCol\"])\n )\n if metricName:\n ev = ev.setMetricName(metricName)\n tvs_list = []\n for est, pgb in estimator_gridbuilders:\n est = est.setParams(**column_names)\n\n pl = Pipeline(stages=[feature_assembler, est])\n\n paramGrid = pgb.build()\n tvs_list.append(TrainValidationSplit(estimator=pl,\n estimatorParamMaps=paramGrid,\n evaluator=ev,\n trainRatio=trainRatio))\n return tvs_list\n\ndef get_best_weather_model(data):\n train, test = data.randomSplit([0.75, 0.25])\n train = train.cache()\n test = test.cache()\n\n # e.g., use print(LinearRegression().explainParams()) to see what can be tuned\n estimator_gridbuilders = [\n estimator_gridbuilder(\n LinearRegression(),\n dict(regParam=[0.3, 0.6],\n elasticNetParam=[0, 0.5],\n maxIter=[10, 20]\n )),\n\n estimator_gridbuilder(\n GBTRegressor(),\n dict(lossType=[\"squared\"],\n maxDepth=[5, 10],\n maxIter=[2, 5],\n stepSize=[0.1]\n )),\n\n estimator_gridbuilder(\n RandomForestRegressor(),\n dict(numTrees=[5, 10],\n maxDepth=[5, 15],\n featureSubsetStrategy=[\"auto\"]\n ))\n ]\n \n metricName = 'r2'\n tvs_list = make_weather_trainers(.2, # fraction of data for training\n estimator_gridbuilders,\n metricName)\n ev = tvs_list[0].getEvaluator()\n scorescale = 1 if ev.isLargerBetter() else -1\n model_name_scores = []\n for tvs in tvs_list:\n model = tvs.fit(train)\n test_pred = model.transform(test)\n score = ev.evaluate(test_pred) * scorescale\n model_name_scores.append((model, get_estimator_name(tvs.getEstimator()), score))\n \n best_model, best_name, best_score = max(model_name_scores, key=lambda triplet: triplet[2])\n print(\"\\n\\nBest model is %s with validation data %s score %f\" % (best_name, ev.getMetricName(), best_score*scorescale))\n return best_model\n\nfortrain, holdout = data.randomSplit([0.75, 0.25])\n\nmodel = get_best_weather_model(fortrain)\nprint(\"\\n\\n\\nBest parameters on test data:\\n\", get_best_tvs_model_params(model))\n\n\n# Part 2b1\n\nimport elevation_grid as eg\nfrom pyspark.ml.linalg import Vectors\nfrom pyspark.ml.feature import VectorAssembler\nimport numpy as np\n\nlat_range = range(-90, 90, 1)\nlon_range = range(-180, 180, 1)\n\ncombo = []\n\nfor lat in lat_range:\n for lon in lon_range:\n elev = eg.get_elevation(lat, lon)\n combo.append((lat, lon, float(elev)))\n\ndataset = spark.createDataFrame(combo,[\"latitude\", \"longitude\", \"elevation\"])\npred = model.transform(dataset).collect()\n\ncollected_predictions = list(map(lambda row: row.asDict(), pred))\n\nplt.figure(figsize=(16,12))\n\neq_map = Basemap(projection='cyl', resolution = 'l', area_thresh = 1000.0,\n lat_0=0, lon_0=0)\n# eq_map.drawcoastlines()\n# eq_map.drawcountries()\neq_map.fillcontinents(color = '#202020', lake_color='#3b3b3b', zorder=0.5)\neq_map.drawmapboundary(fill_color='#3b3b3b')\neq_map.drawmeridians(np.arange(0, 360, 30))\neq_map.drawparallels(np.arange(-90, 90, 30))\n\nlon = []\nlat = []\nval = []\n\nfor y in collected_predictions:\n lon.append(y['longitude'])\n lat.append(y['latitude'])\n val.append(y['tmax_pred'])\n\nx, y = eq_map(lon, lat)\n\ncs = eq_map.scatter(x, y, c=val, marker=\"o\", cmap=cm.coolwarm)\ncbar = eq_map.colorbar(cs,location='bottom',pad=\"5%\")\ncbar.set_label('Max Temperature (in Celcius)')\nplt.title('Predicted Heat Map')\nplt.savefig('2b1_heat.png')\n\n\n# Part 2b2\n\npred = model.transform(holdout).collect()\n\ncollected_predictions = list(map(lambda row: row.asDict(), pred))\n\nplt.figure(figsize=(16,12))\n\neq_map = Basemap(projection='cyl', resolution = 'l', area_thresh = 1000.0,\n lat_0=0, lon_0=0)\n# eq_map.drawcoastlines()\n# eq_map.drawcountries()\neq_map.fillcontinents(color = '#202020', lake_color='#3b3b3b', zorder=0.5)\neq_map.drawmapboundary(fill_color='#3b3b3b')\neq_map.drawmeridians(np.arange(0, 360, 30))\neq_map.drawparallels(np.arange(-90, 90, 30))\n\nlon = []\nlat = []\nval = []\n\nfor y in collected_predictions:\n lon.append(y['longitude'])\n lat.append(y['latitude'])\n val.append(abs(y['tmax_pred'] - y['tmax']))\n\nx, y = eq_map(lon, lat)\n\ncs = eq_map.scatter(x, y, c=val, marker=\"o\", cmap=cm.Reds)\ncbar = eq_map.colorbar(cs,location='bottom',pad=\"5%\")\ncbar.set_label('Absolute Temperature Difference (in Celcius)')\nplt.title('Regression Error Map')\nplt.savefig('2b2_regression_error.png')",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
with open('words.txt') as words_fh:
lexicon = set(list(map(lambda x: x.strip().lower(), words_fh.readlines())))
<|reserved_special_token_0|>
print(sorted_valid_words)
<|reserved_special_token_1|>
with open('words.txt') as words_fh:
lexicon = set(list(map(lambda x: x.strip().lower(), words_fh.readlines())))
MANDATORY_LETTER = 'l'
LETTERS = set(['t', 'i', 'e', 'v', 'p', 'x'] + [MANDATORY_LETTER])
valid_words = [word for word in lexicon if set(word).issubset(LETTERS) and
MANDATORY_LETTER in set(word) and len(word) >= 4]
sorted_valid_words = sorted(valid_words, key=lambda x: len(x))
print(sorted_valid_words)
<|reserved_special_token_1|>
# Spelling bee NYT puzzle solver
with open('words.txt') as words_fh:
# Converts strips and lowercases lexicon (space seperated txt file)
# Use set to remove duplicates (decasing)
lexicon = set(list(map(lambda x: x.strip().lower(), words_fh.readlines())))
# NOTE: Could add a CLI to allow users to input this. Manual edits are the way for now
MANDATORY_LETTER = 'l'
LETTERS = set(['t','i','e','v','p','x'] + [MANDATORY_LETTER])
# Search for valid words
valid_words = [word for word in lexicon if set(word).issubset(LETTERS) and MANDATORY_LETTER in set(word) and len(word) >= 4]
sorted_valid_words = sorted(valid_words, key=lambda x: len(x))
print(sorted_valid_words)
|
flexible
|
{
"blob_id": "aacd5d671090c3305a53d62c3c6c25d4c033f42d",
"index": 6420,
"step-1": "<mask token>\n",
"step-2": "with open('words.txt') as words_fh:\n lexicon = set(list(map(lambda x: x.strip().lower(), words_fh.readlines())))\n<mask token>\nprint(sorted_valid_words)\n",
"step-3": "with open('words.txt') as words_fh:\n lexicon = set(list(map(lambda x: x.strip().lower(), words_fh.readlines())))\nMANDATORY_LETTER = 'l'\nLETTERS = set(['t', 'i', 'e', 'v', 'p', 'x'] + [MANDATORY_LETTER])\nvalid_words = [word for word in lexicon if set(word).issubset(LETTERS) and \n MANDATORY_LETTER in set(word) and len(word) >= 4]\nsorted_valid_words = sorted(valid_words, key=lambda x: len(x))\nprint(sorted_valid_words)\n",
"step-4": "# Spelling bee NYT puzzle solver\r\n\r\nwith open('words.txt') as words_fh:\r\n # Converts strips and lowercases lexicon (space seperated txt file)\r\n # Use set to remove duplicates (decasing)\r\n\tlexicon = set(list(map(lambda x: x.strip().lower(), words_fh.readlines())))\r\n\r\n# NOTE: Could add a CLI to allow users to input this. Manual edits are the way for now\r\nMANDATORY_LETTER = 'l'\r\nLETTERS = set(['t','i','e','v','p','x'] + [MANDATORY_LETTER])\r\n\r\n# Search for valid words \r\nvalid_words = [word for word in lexicon if set(word).issubset(LETTERS) and MANDATORY_LETTER in set(word) and len(word) >= 4]\r\nsorted_valid_words = sorted(valid_words, key=lambda x: len(x))\r\nprint(sorted_valid_words)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#This program sorts the files on Desktop on the basis of file extension and move them in separate folders in Documents folder.
desktop_directory="/home/vineeth/Desktop/" #LINUX
destination_folder="/home/vineeth/Documents/" #LINUX
#desktop_directory="C:/Users/VINEETH/Desktop/" #Windows
#destination_folder="C:/Users/VINEETH/Documents/" #Windows
exclude_these = ['.desktop','.exe','.lnk']
import os
for eachfile in os.listdir(desktop_directory):
if os.path.isfile(desktop_directory+eachfile):
fileName, fileExtension = os.path.splitext(eachfile)
if(all(fileExtension!=e for e in exclude_these)):
ext=fileExtension[1:]
if not os.path.exists(destination_folder+ext):
os.mkdir(destination_folder+ext)
os.rename(desktop_directory+eachfile,destination_folder+ext+"/"+eachfile)
|
normal
|
{
"blob_id": "805b64a7bd727a88081a6ead574fff9b1542070f",
"index": 2023,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor eachfile in os.listdir(desktop_directory):\n if os.path.isfile(desktop_directory + eachfile):\n fileName, fileExtension = os.path.splitext(eachfile)\n if all(fileExtension != e for e in exclude_these):\n ext = fileExtension[1:]\n if not os.path.exists(destination_folder + ext):\n os.mkdir(destination_folder + ext)\n os.rename(desktop_directory + eachfile, destination_folder +\n ext + '/' + eachfile)\n",
"step-3": "desktop_directory = '/home/vineeth/Desktop/'\ndestination_folder = '/home/vineeth/Documents/'\nexclude_these = ['.desktop', '.exe', '.lnk']\n<mask token>\nfor eachfile in os.listdir(desktop_directory):\n if os.path.isfile(desktop_directory + eachfile):\n fileName, fileExtension = os.path.splitext(eachfile)\n if all(fileExtension != e for e in exclude_these):\n ext = fileExtension[1:]\n if not os.path.exists(destination_folder + ext):\n os.mkdir(destination_folder + ext)\n os.rename(desktop_directory + eachfile, destination_folder +\n ext + '/' + eachfile)\n",
"step-4": "desktop_directory = '/home/vineeth/Desktop/'\ndestination_folder = '/home/vineeth/Documents/'\nexclude_these = ['.desktop', '.exe', '.lnk']\nimport os\nfor eachfile in os.listdir(desktop_directory):\n if os.path.isfile(desktop_directory + eachfile):\n fileName, fileExtension = os.path.splitext(eachfile)\n if all(fileExtension != e for e in exclude_these):\n ext = fileExtension[1:]\n if not os.path.exists(destination_folder + ext):\n os.mkdir(destination_folder + ext)\n os.rename(desktop_directory + eachfile, destination_folder +\n ext + '/' + eachfile)\n",
"step-5": "#This program sorts the files on Desktop on the basis of file extension and move them in separate folders in Documents folder.\n\ndesktop_directory=\"/home/vineeth/Desktop/\" #LINUX\ndestination_folder=\"/home/vineeth/Documents/\" #LINUX\n\n#desktop_directory=\"C:/Users/VINEETH/Desktop/\" #Windows\n#destination_folder=\"C:/Users/VINEETH/Documents/\" #Windows\n\nexclude_these = ['.desktop','.exe','.lnk']\nimport os\nfor eachfile in os.listdir(desktop_directory):\n if os.path.isfile(desktop_directory+eachfile):\n fileName, fileExtension = os.path.splitext(eachfile)\n if(all(fileExtension!=e for e in exclude_these)):\n ext=fileExtension[1:]\n if not os.path.exists(destination_folder+ext):\n os.mkdir(destination_folder+ext)\n os.rename(desktop_directory+eachfile,destination_folder+ext+\"/\"+eachfile)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# coding: UTF-8
from PIL import ImageFont,Image,ImageDraw
def min_element(table_d,ignoring_index = None):
min_i,min_j,min_e = 0,0,max(table_d.values())
for key in table_d.keys():
# ignore if i in key or j in key
if ignoring_index is not None:
i,j = key
if i in ignoring_index or j in ignoring_index:
continue
if min_e > table_d[key]:
min_e = table_d[key]
min_i ,min_j = key
return (min_i,min_j,min_e)
def to_dict(table):
table_d = dict()
for i in range(len(table)):
for j in range(i):
table_d[(i,j)] = table[i][j]
table_d[(j,i)] = table[i][j]
return table_d
def next_key(d,original_length,ignoring_keys=[],attension_values=[]):
if len(ignoring_keys) == 0:
return min(d.keys())
save_key = None
for k in d.keys():
v_1,v_2 = d[k]
if k in ignoring_keys:
continue
if not ((v_1 in ignoring_keys or v_1 < original_length) and (v_2 in ignoring_keys or v_2 < original_length)):
continue
if save_key is None:
save_key = k
if v_1 in attension_values or v_2 in attension_values:
return k
return save_key
def main():
# in "sample" file
#
# 0 0.1 0.12 0.21
# 0.1 0 0.04 0.13
# 0.12 0.04 0 0.11
# 0.21 0.13 0.11 0
with open("sample","r") as f:
lines = f.readlines()
table = []
for l in lines:
row = [float(i) for i in l.split(" ")]
table.append(row)
table_d = to_dict(table)
num_of_element = len(table)
cluster = dict()
cluster_num = dict()
ignoring_index = []
original_length = len(table)
while True:
# ignoring_index内にないもののなかで最小のものを選ぶ
min_i,min_j,_ = min_element(table_d,ignoring_index)
# 以降無視
ignoring_index.append(min_i)
ignoring_index.append(min_j)
new_cluster = num_of_element # i&j を新しい要素とする
cluster[new_cluster] = (min_i,min_j)
cluster_num[new_cluster] = 0
cluster_elements = 2
if min_i in cluster_num.keys():
cluster_num[new_cluster] += cluster_num[min_i]
cluster_elements -= 1
if min_j in cluster_num.keys():
cluster_num[new_cluster] += cluster_num[min_j]
cluster_elements -= 1
cluster_num[new_cluster] += cluster_elements
print(cluster_num)
if max(cluster_num.values()) == original_length:
print(cluster)
print(cluster_num)
print(table_d)
print("UPGMA is end")
break
# clusterが所有するオリジナルの要素数
weight_i = 1
weight_j = 1
if min_i in cluster_num.keys():
weight_i = cluster_num[min_i]
if min_j in cluster_num.keys():
weight_j = cluster_num[min_j]
for itr in range(num_of_element):
if itr in ignoring_index:
continue
# テーブルの更新
table_d[(itr,new_cluster)] = (table_d[(itr,min_i)]*weight_i + table_d[(itr,min_j)]*weight_j) / float(weight_i + weight_j)
table_d[(new_cluster,itr)] = (table_d[(itr,min_i)]*weight_i + table_d[(itr,min_j)]*weight_j) / float(weight_i + weight_j)
num_of_element += 1
if len(ignoring_index) - num_of_element == 1:
# Once the remaining elements are two, the distance is obvious.
break
# イメージの操作
# ref: https://ailog.site/2020/03/09/0309/
# 以降は系統樹の作成
# 元々白紙が用意されているものとする
img = Image.open('base.png')
width,height = img.size
draw = ImageDraw.Draw(img)
# padding
top_padding = int(height*0.01)
bottom_padding = int(height*0.01)
right_padding = int(width*0.01)
left_padding = int(width*0.01)
# ラベルに使う領域の高さ
label_height = 64
# 系統樹に使う高さ
main_frame_height = height - top_padding - bottom_padding - label_height
# 高さと系統樹の高さをそろえるための倍率
height_scaler = main_frame_height / float(max(table_d.values()) / 2 )
# ラベル間の幅
interval = int((width - right_padding - left_padding) / (original_length+1))
font = ImageFont.truetype("arial.ttf", 32) # font size is 64
ignoring_keys = []
attension_values = []
painted_number = 0
cluster_x = dict()
cluster_y = dict()
cluster_stack = dict()
for i in range(original_length):
cluster_y[i] = top_padding + main_frame_height
cluster_stack[i] = 0.
while True:
key = next_key(cluster,original_length,ignoring_keys,attension_values)
if key in attension_values:
attension_values.remove(key)
if key is None:
break
i,j = cluster[key]
if not i in cluster_x.keys():
cluster_x[i] = left_padding + interval * (painted_number + 1)
painted_number += 1
if not j in cluster_x.keys():
cluster_x[j] = left_padding + interval * (painted_number + 1)
painted_number += 1
cluster_x[key] = int((cluster_x[i] + cluster_x[j]) / 2)
edge_height = int((table_d[(i,j)] * height_scaler / 2))
cluster_y[key] = top_padding + main_frame_height - edge_height
if not key in cluster_stack.keys():
cluster_stack[key] = table_d[(i,j)] / 2
draw.line((cluster_x[i], cluster_y[i], cluster_x[i], cluster_y[key]), fill=(0, 0, 0), width=10)
draw.line((cluster_x[j], cluster_y[j], cluster_x[j], cluster_y[key]), fill=(0, 0, 0), width=10)
draw.line((cluster_x[i], cluster_y[key], cluster_x[j], cluster_y[key]), fill=(0, 0, 0), width=10)
round_num = 3
# i について
value = round(table_d[(i,j)] / 2 - cluster_stack[i], round_num)
value_text = str(value)
size = font.getsize(value_text)
value_x = cluster_x[i] - int(size[0]*1.05)
value_y = int((cluster_y[i] + cluster_y[key]) / 2)
draw.text((value_x, value_y), value_text, font=font, fill='#0000ff')
# jについて
value = round(table_d[(i,j)] / 2 - cluster_stack[j], round_num)
value_text = str(value)
size = font.getsize(value_text)
value_x = cluster_x[j] - int(size[0]*1.05)
value_y = int((cluster_y[j] + cluster_y[key]) / 2)
draw.text((value_x, value_y), value_text, font=font, fill='#0000ff')
ignoring_keys.append(key)
attension_values.append(key)
font = ImageFont.truetype("arial.ttf", 64) # font size is 64
alphabet = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
for i in range(original_length):
# ラベル辞書を使えば数字以外も扱える
text = alphabet[i]
size = font.getsize(text)
left_x = cluster_x[i] - (size[0] / 2)
print(left_x)
top_y = top_padding + main_frame_height
# 画像右下に'Sampleと表示' #FFFは文字色(白)
draw.text((left_x, top_y), text, font=font, fill='#000000')
# ファイルを保存
img.save('out.png', 'PNG', quality=100, optimize=True)
input("push enter")
if __name__ == "__main__":
main()
|
normal
|
{
"blob_id": "aee009b37b99bf44e27c608470c43834a58e0cc7",
"index": 8490,
"step-1": "<mask token>\n\n\ndef to_dict(table):\n table_d = dict()\n for i in range(len(table)):\n for j in range(i):\n table_d[i, j] = table[i][j]\n table_d[j, i] = table[i][j]\n return table_d\n\n\ndef next_key(d, original_length, ignoring_keys=[], attension_values=[]):\n if len(ignoring_keys) == 0:\n return min(d.keys())\n save_key = None\n for k in d.keys():\n v_1, v_2 = d[k]\n if k in ignoring_keys:\n continue\n if not ((v_1 in ignoring_keys or v_1 < original_length) and (v_2 in\n ignoring_keys or v_2 < original_length)):\n continue\n if save_key is None:\n save_key = k\n if v_1 in attension_values or v_2 in attension_values:\n return k\n return save_key\n\n\ndef main():\n with open('sample', 'r') as f:\n lines = f.readlines()\n table = []\n for l in lines:\n row = [float(i) for i in l.split(' ')]\n table.append(row)\n table_d = to_dict(table)\n num_of_element = len(table)\n cluster = dict()\n cluster_num = dict()\n ignoring_index = []\n original_length = len(table)\n while True:\n min_i, min_j, _ = min_element(table_d, ignoring_index)\n ignoring_index.append(min_i)\n ignoring_index.append(min_j)\n new_cluster = num_of_element\n cluster[new_cluster] = min_i, min_j\n cluster_num[new_cluster] = 0\n cluster_elements = 2\n if min_i in cluster_num.keys():\n cluster_num[new_cluster] += cluster_num[min_i]\n cluster_elements -= 1\n if min_j in cluster_num.keys():\n cluster_num[new_cluster] += cluster_num[min_j]\n cluster_elements -= 1\n cluster_num[new_cluster] += cluster_elements\n print(cluster_num)\n if max(cluster_num.values()) == original_length:\n print(cluster)\n print(cluster_num)\n print(table_d)\n print('UPGMA is end')\n break\n weight_i = 1\n weight_j = 1\n if min_i in cluster_num.keys():\n weight_i = cluster_num[min_i]\n if min_j in cluster_num.keys():\n weight_j = cluster_num[min_j]\n for itr in range(num_of_element):\n if itr in ignoring_index:\n continue\n table_d[itr, new_cluster] = (table_d[itr, min_i] * weight_i + \n table_d[itr, min_j] * weight_j) / float(weight_i + weight_j)\n table_d[new_cluster, itr] = (table_d[itr, min_i] * weight_i + \n table_d[itr, min_j] * weight_j) / float(weight_i + weight_j)\n num_of_element += 1\n if len(ignoring_index) - num_of_element == 1:\n break\n img = Image.open('base.png')\n width, height = img.size\n draw = ImageDraw.Draw(img)\n top_padding = int(height * 0.01)\n bottom_padding = int(height * 0.01)\n right_padding = int(width * 0.01)\n left_padding = int(width * 0.01)\n label_height = 64\n main_frame_height = height - top_padding - bottom_padding - label_height\n height_scaler = main_frame_height / float(max(table_d.values()) / 2)\n interval = int((width - right_padding - left_padding) / (\n original_length + 1))\n font = ImageFont.truetype('arial.ttf', 32)\n ignoring_keys = []\n attension_values = []\n painted_number = 0\n cluster_x = dict()\n cluster_y = dict()\n cluster_stack = dict()\n for i in range(original_length):\n cluster_y[i] = top_padding + main_frame_height\n cluster_stack[i] = 0.0\n while True:\n key = next_key(cluster, original_length, ignoring_keys,\n attension_values)\n if key in attension_values:\n attension_values.remove(key)\n if key is None:\n break\n i, j = cluster[key]\n if not i in cluster_x.keys():\n cluster_x[i] = left_padding + interval * (painted_number + 1)\n painted_number += 1\n if not j in cluster_x.keys():\n cluster_x[j] = left_padding + interval * (painted_number + 1)\n painted_number += 1\n cluster_x[key] = int((cluster_x[i] + cluster_x[j]) / 2)\n edge_height = int(table_d[i, j] * height_scaler / 2)\n cluster_y[key] = top_padding + main_frame_height - edge_height\n if not key in cluster_stack.keys():\n cluster_stack[key] = table_d[i, j] / 2\n draw.line((cluster_x[i], cluster_y[i], cluster_x[i], cluster_y[key]\n ), fill=(0, 0, 0), width=10)\n draw.line((cluster_x[j], cluster_y[j], cluster_x[j], cluster_y[key]\n ), fill=(0, 0, 0), width=10)\n draw.line((cluster_x[i], cluster_y[key], cluster_x[j], cluster_y[\n key]), fill=(0, 0, 0), width=10)\n round_num = 3\n value = round(table_d[i, j] / 2 - cluster_stack[i], round_num)\n value_text = str(value)\n size = font.getsize(value_text)\n value_x = cluster_x[i] - int(size[0] * 1.05)\n value_y = int((cluster_y[i] + cluster_y[key]) / 2)\n draw.text((value_x, value_y), value_text, font=font, fill='#0000ff')\n value = round(table_d[i, j] / 2 - cluster_stack[j], round_num)\n value_text = str(value)\n size = font.getsize(value_text)\n value_x = cluster_x[j] - int(size[0] * 1.05)\n value_y = int((cluster_y[j] + cluster_y[key]) / 2)\n draw.text((value_x, value_y), value_text, font=font, fill='#0000ff')\n ignoring_keys.append(key)\n attension_values.append(key)\n font = ImageFont.truetype('arial.ttf', 64)\n alphabet = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'\n for i in range(original_length):\n text = alphabet[i]\n size = font.getsize(text)\n left_x = cluster_x[i] - size[0] / 2\n print(left_x)\n top_y = top_padding + main_frame_height\n draw.text((left_x, top_y), text, font=font, fill='#000000')\n img.save('out.png', 'PNG', quality=100, optimize=True)\n input('push enter')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef min_element(table_d, ignoring_index=None):\n min_i, min_j, min_e = 0, 0, max(table_d.values())\n for key in table_d.keys():\n if ignoring_index is not None:\n i, j = key\n if i in ignoring_index or j in ignoring_index:\n continue\n if min_e > table_d[key]:\n min_e = table_d[key]\n min_i, min_j = key\n return min_i, min_j, min_e\n\n\ndef to_dict(table):\n table_d = dict()\n for i in range(len(table)):\n for j in range(i):\n table_d[i, j] = table[i][j]\n table_d[j, i] = table[i][j]\n return table_d\n\n\ndef next_key(d, original_length, ignoring_keys=[], attension_values=[]):\n if len(ignoring_keys) == 0:\n return min(d.keys())\n save_key = None\n for k in d.keys():\n v_1, v_2 = d[k]\n if k in ignoring_keys:\n continue\n if not ((v_1 in ignoring_keys or v_1 < original_length) and (v_2 in\n ignoring_keys or v_2 < original_length)):\n continue\n if save_key is None:\n save_key = k\n if v_1 in attension_values or v_2 in attension_values:\n return k\n return save_key\n\n\ndef main():\n with open('sample', 'r') as f:\n lines = f.readlines()\n table = []\n for l in lines:\n row = [float(i) for i in l.split(' ')]\n table.append(row)\n table_d = to_dict(table)\n num_of_element = len(table)\n cluster = dict()\n cluster_num = dict()\n ignoring_index = []\n original_length = len(table)\n while True:\n min_i, min_j, _ = min_element(table_d, ignoring_index)\n ignoring_index.append(min_i)\n ignoring_index.append(min_j)\n new_cluster = num_of_element\n cluster[new_cluster] = min_i, min_j\n cluster_num[new_cluster] = 0\n cluster_elements = 2\n if min_i in cluster_num.keys():\n cluster_num[new_cluster] += cluster_num[min_i]\n cluster_elements -= 1\n if min_j in cluster_num.keys():\n cluster_num[new_cluster] += cluster_num[min_j]\n cluster_elements -= 1\n cluster_num[new_cluster] += cluster_elements\n print(cluster_num)\n if max(cluster_num.values()) == original_length:\n print(cluster)\n print(cluster_num)\n print(table_d)\n print('UPGMA is end')\n break\n weight_i = 1\n weight_j = 1\n if min_i in cluster_num.keys():\n weight_i = cluster_num[min_i]\n if min_j in cluster_num.keys():\n weight_j = cluster_num[min_j]\n for itr in range(num_of_element):\n if itr in ignoring_index:\n continue\n table_d[itr, new_cluster] = (table_d[itr, min_i] * weight_i + \n table_d[itr, min_j] * weight_j) / float(weight_i + weight_j)\n table_d[new_cluster, itr] = (table_d[itr, min_i] * weight_i + \n table_d[itr, min_j] * weight_j) / float(weight_i + weight_j)\n num_of_element += 1\n if len(ignoring_index) - num_of_element == 1:\n break\n img = Image.open('base.png')\n width, height = img.size\n draw = ImageDraw.Draw(img)\n top_padding = int(height * 0.01)\n bottom_padding = int(height * 0.01)\n right_padding = int(width * 0.01)\n left_padding = int(width * 0.01)\n label_height = 64\n main_frame_height = height - top_padding - bottom_padding - label_height\n height_scaler = main_frame_height / float(max(table_d.values()) / 2)\n interval = int((width - right_padding - left_padding) / (\n original_length + 1))\n font = ImageFont.truetype('arial.ttf', 32)\n ignoring_keys = []\n attension_values = []\n painted_number = 0\n cluster_x = dict()\n cluster_y = dict()\n cluster_stack = dict()\n for i in range(original_length):\n cluster_y[i] = top_padding + main_frame_height\n cluster_stack[i] = 0.0\n while True:\n key = next_key(cluster, original_length, ignoring_keys,\n attension_values)\n if key in attension_values:\n attension_values.remove(key)\n if key is None:\n break\n i, j = cluster[key]\n if not i in cluster_x.keys():\n cluster_x[i] = left_padding + interval * (painted_number + 1)\n painted_number += 1\n if not j in cluster_x.keys():\n cluster_x[j] = left_padding + interval * (painted_number + 1)\n painted_number += 1\n cluster_x[key] = int((cluster_x[i] + cluster_x[j]) / 2)\n edge_height = int(table_d[i, j] * height_scaler / 2)\n cluster_y[key] = top_padding + main_frame_height - edge_height\n if not key in cluster_stack.keys():\n cluster_stack[key] = table_d[i, j] / 2\n draw.line((cluster_x[i], cluster_y[i], cluster_x[i], cluster_y[key]\n ), fill=(0, 0, 0), width=10)\n draw.line((cluster_x[j], cluster_y[j], cluster_x[j], cluster_y[key]\n ), fill=(0, 0, 0), width=10)\n draw.line((cluster_x[i], cluster_y[key], cluster_x[j], cluster_y[\n key]), fill=(0, 0, 0), width=10)\n round_num = 3\n value = round(table_d[i, j] / 2 - cluster_stack[i], round_num)\n value_text = str(value)\n size = font.getsize(value_text)\n value_x = cluster_x[i] - int(size[0] * 1.05)\n value_y = int((cluster_y[i] + cluster_y[key]) / 2)\n draw.text((value_x, value_y), value_text, font=font, fill='#0000ff')\n value = round(table_d[i, j] / 2 - cluster_stack[j], round_num)\n value_text = str(value)\n size = font.getsize(value_text)\n value_x = cluster_x[j] - int(size[0] * 1.05)\n value_y = int((cluster_y[j] + cluster_y[key]) / 2)\n draw.text((value_x, value_y), value_text, font=font, fill='#0000ff')\n ignoring_keys.append(key)\n attension_values.append(key)\n font = ImageFont.truetype('arial.ttf', 64)\n alphabet = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'\n for i in range(original_length):\n text = alphabet[i]\n size = font.getsize(text)\n left_x = cluster_x[i] - size[0] / 2\n print(left_x)\n top_y = top_padding + main_frame_height\n draw.text((left_x, top_y), text, font=font, fill='#000000')\n img.save('out.png', 'PNG', quality=100, optimize=True)\n input('push enter')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef min_element(table_d, ignoring_index=None):\n min_i, min_j, min_e = 0, 0, max(table_d.values())\n for key in table_d.keys():\n if ignoring_index is not None:\n i, j = key\n if i in ignoring_index or j in ignoring_index:\n continue\n if min_e > table_d[key]:\n min_e = table_d[key]\n min_i, min_j = key\n return min_i, min_j, min_e\n\n\ndef to_dict(table):\n table_d = dict()\n for i in range(len(table)):\n for j in range(i):\n table_d[i, j] = table[i][j]\n table_d[j, i] = table[i][j]\n return table_d\n\n\ndef next_key(d, original_length, ignoring_keys=[], attension_values=[]):\n if len(ignoring_keys) == 0:\n return min(d.keys())\n save_key = None\n for k in d.keys():\n v_1, v_2 = d[k]\n if k in ignoring_keys:\n continue\n if not ((v_1 in ignoring_keys or v_1 < original_length) and (v_2 in\n ignoring_keys or v_2 < original_length)):\n continue\n if save_key is None:\n save_key = k\n if v_1 in attension_values or v_2 in attension_values:\n return k\n return save_key\n\n\ndef main():\n with open('sample', 'r') as f:\n lines = f.readlines()\n table = []\n for l in lines:\n row = [float(i) for i in l.split(' ')]\n table.append(row)\n table_d = to_dict(table)\n num_of_element = len(table)\n cluster = dict()\n cluster_num = dict()\n ignoring_index = []\n original_length = len(table)\n while True:\n min_i, min_j, _ = min_element(table_d, ignoring_index)\n ignoring_index.append(min_i)\n ignoring_index.append(min_j)\n new_cluster = num_of_element\n cluster[new_cluster] = min_i, min_j\n cluster_num[new_cluster] = 0\n cluster_elements = 2\n if min_i in cluster_num.keys():\n cluster_num[new_cluster] += cluster_num[min_i]\n cluster_elements -= 1\n if min_j in cluster_num.keys():\n cluster_num[new_cluster] += cluster_num[min_j]\n cluster_elements -= 1\n cluster_num[new_cluster] += cluster_elements\n print(cluster_num)\n if max(cluster_num.values()) == original_length:\n print(cluster)\n print(cluster_num)\n print(table_d)\n print('UPGMA is end')\n break\n weight_i = 1\n weight_j = 1\n if min_i in cluster_num.keys():\n weight_i = cluster_num[min_i]\n if min_j in cluster_num.keys():\n weight_j = cluster_num[min_j]\n for itr in range(num_of_element):\n if itr in ignoring_index:\n continue\n table_d[itr, new_cluster] = (table_d[itr, min_i] * weight_i + \n table_d[itr, min_j] * weight_j) / float(weight_i + weight_j)\n table_d[new_cluster, itr] = (table_d[itr, min_i] * weight_i + \n table_d[itr, min_j] * weight_j) / float(weight_i + weight_j)\n num_of_element += 1\n if len(ignoring_index) - num_of_element == 1:\n break\n img = Image.open('base.png')\n width, height = img.size\n draw = ImageDraw.Draw(img)\n top_padding = int(height * 0.01)\n bottom_padding = int(height * 0.01)\n right_padding = int(width * 0.01)\n left_padding = int(width * 0.01)\n label_height = 64\n main_frame_height = height - top_padding - bottom_padding - label_height\n height_scaler = main_frame_height / float(max(table_d.values()) / 2)\n interval = int((width - right_padding - left_padding) / (\n original_length + 1))\n font = ImageFont.truetype('arial.ttf', 32)\n ignoring_keys = []\n attension_values = []\n painted_number = 0\n cluster_x = dict()\n cluster_y = dict()\n cluster_stack = dict()\n for i in range(original_length):\n cluster_y[i] = top_padding + main_frame_height\n cluster_stack[i] = 0.0\n while True:\n key = next_key(cluster, original_length, ignoring_keys,\n attension_values)\n if key in attension_values:\n attension_values.remove(key)\n if key is None:\n break\n i, j = cluster[key]\n if not i in cluster_x.keys():\n cluster_x[i] = left_padding + interval * (painted_number + 1)\n painted_number += 1\n if not j in cluster_x.keys():\n cluster_x[j] = left_padding + interval * (painted_number + 1)\n painted_number += 1\n cluster_x[key] = int((cluster_x[i] + cluster_x[j]) / 2)\n edge_height = int(table_d[i, j] * height_scaler / 2)\n cluster_y[key] = top_padding + main_frame_height - edge_height\n if not key in cluster_stack.keys():\n cluster_stack[key] = table_d[i, j] / 2\n draw.line((cluster_x[i], cluster_y[i], cluster_x[i], cluster_y[key]\n ), fill=(0, 0, 0), width=10)\n draw.line((cluster_x[j], cluster_y[j], cluster_x[j], cluster_y[key]\n ), fill=(0, 0, 0), width=10)\n draw.line((cluster_x[i], cluster_y[key], cluster_x[j], cluster_y[\n key]), fill=(0, 0, 0), width=10)\n round_num = 3\n value = round(table_d[i, j] / 2 - cluster_stack[i], round_num)\n value_text = str(value)\n size = font.getsize(value_text)\n value_x = cluster_x[i] - int(size[0] * 1.05)\n value_y = int((cluster_y[i] + cluster_y[key]) / 2)\n draw.text((value_x, value_y), value_text, font=font, fill='#0000ff')\n value = round(table_d[i, j] / 2 - cluster_stack[j], round_num)\n value_text = str(value)\n size = font.getsize(value_text)\n value_x = cluster_x[j] - int(size[0] * 1.05)\n value_y = int((cluster_y[j] + cluster_y[key]) / 2)\n draw.text((value_x, value_y), value_text, font=font, fill='#0000ff')\n ignoring_keys.append(key)\n attension_values.append(key)\n font = ImageFont.truetype('arial.ttf', 64)\n alphabet = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'\n for i in range(original_length):\n text = alphabet[i]\n size = font.getsize(text)\n left_x = cluster_x[i] - size[0] / 2\n print(left_x)\n top_y = top_padding + main_frame_height\n draw.text((left_x, top_y), text, font=font, fill='#000000')\n img.save('out.png', 'PNG', quality=100, optimize=True)\n input('push enter')\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "from PIL import ImageFont, Image, ImageDraw\n\n\ndef min_element(table_d, ignoring_index=None):\n min_i, min_j, min_e = 0, 0, max(table_d.values())\n for key in table_d.keys():\n if ignoring_index is not None:\n i, j = key\n if i in ignoring_index or j in ignoring_index:\n continue\n if min_e > table_d[key]:\n min_e = table_d[key]\n min_i, min_j = key\n return min_i, min_j, min_e\n\n\ndef to_dict(table):\n table_d = dict()\n for i in range(len(table)):\n for j in range(i):\n table_d[i, j] = table[i][j]\n table_d[j, i] = table[i][j]\n return table_d\n\n\ndef next_key(d, original_length, ignoring_keys=[], attension_values=[]):\n if len(ignoring_keys) == 0:\n return min(d.keys())\n save_key = None\n for k in d.keys():\n v_1, v_2 = d[k]\n if k in ignoring_keys:\n continue\n if not ((v_1 in ignoring_keys or v_1 < original_length) and (v_2 in\n ignoring_keys or v_2 < original_length)):\n continue\n if save_key is None:\n save_key = k\n if v_1 in attension_values or v_2 in attension_values:\n return k\n return save_key\n\n\ndef main():\n with open('sample', 'r') as f:\n lines = f.readlines()\n table = []\n for l in lines:\n row = [float(i) for i in l.split(' ')]\n table.append(row)\n table_d = to_dict(table)\n num_of_element = len(table)\n cluster = dict()\n cluster_num = dict()\n ignoring_index = []\n original_length = len(table)\n while True:\n min_i, min_j, _ = min_element(table_d, ignoring_index)\n ignoring_index.append(min_i)\n ignoring_index.append(min_j)\n new_cluster = num_of_element\n cluster[new_cluster] = min_i, min_j\n cluster_num[new_cluster] = 0\n cluster_elements = 2\n if min_i in cluster_num.keys():\n cluster_num[new_cluster] += cluster_num[min_i]\n cluster_elements -= 1\n if min_j in cluster_num.keys():\n cluster_num[new_cluster] += cluster_num[min_j]\n cluster_elements -= 1\n cluster_num[new_cluster] += cluster_elements\n print(cluster_num)\n if max(cluster_num.values()) == original_length:\n print(cluster)\n print(cluster_num)\n print(table_d)\n print('UPGMA is end')\n break\n weight_i = 1\n weight_j = 1\n if min_i in cluster_num.keys():\n weight_i = cluster_num[min_i]\n if min_j in cluster_num.keys():\n weight_j = cluster_num[min_j]\n for itr in range(num_of_element):\n if itr in ignoring_index:\n continue\n table_d[itr, new_cluster] = (table_d[itr, min_i] * weight_i + \n table_d[itr, min_j] * weight_j) / float(weight_i + weight_j)\n table_d[new_cluster, itr] = (table_d[itr, min_i] * weight_i + \n table_d[itr, min_j] * weight_j) / float(weight_i + weight_j)\n num_of_element += 1\n if len(ignoring_index) - num_of_element == 1:\n break\n img = Image.open('base.png')\n width, height = img.size\n draw = ImageDraw.Draw(img)\n top_padding = int(height * 0.01)\n bottom_padding = int(height * 0.01)\n right_padding = int(width * 0.01)\n left_padding = int(width * 0.01)\n label_height = 64\n main_frame_height = height - top_padding - bottom_padding - label_height\n height_scaler = main_frame_height / float(max(table_d.values()) / 2)\n interval = int((width - right_padding - left_padding) / (\n original_length + 1))\n font = ImageFont.truetype('arial.ttf', 32)\n ignoring_keys = []\n attension_values = []\n painted_number = 0\n cluster_x = dict()\n cluster_y = dict()\n cluster_stack = dict()\n for i in range(original_length):\n cluster_y[i] = top_padding + main_frame_height\n cluster_stack[i] = 0.0\n while True:\n key = next_key(cluster, original_length, ignoring_keys,\n attension_values)\n if key in attension_values:\n attension_values.remove(key)\n if key is None:\n break\n i, j = cluster[key]\n if not i in cluster_x.keys():\n cluster_x[i] = left_padding + interval * (painted_number + 1)\n painted_number += 1\n if not j in cluster_x.keys():\n cluster_x[j] = left_padding + interval * (painted_number + 1)\n painted_number += 1\n cluster_x[key] = int((cluster_x[i] + cluster_x[j]) / 2)\n edge_height = int(table_d[i, j] * height_scaler / 2)\n cluster_y[key] = top_padding + main_frame_height - edge_height\n if not key in cluster_stack.keys():\n cluster_stack[key] = table_d[i, j] / 2\n draw.line((cluster_x[i], cluster_y[i], cluster_x[i], cluster_y[key]\n ), fill=(0, 0, 0), width=10)\n draw.line((cluster_x[j], cluster_y[j], cluster_x[j], cluster_y[key]\n ), fill=(0, 0, 0), width=10)\n draw.line((cluster_x[i], cluster_y[key], cluster_x[j], cluster_y[\n key]), fill=(0, 0, 0), width=10)\n round_num = 3\n value = round(table_d[i, j] / 2 - cluster_stack[i], round_num)\n value_text = str(value)\n size = font.getsize(value_text)\n value_x = cluster_x[i] - int(size[0] * 1.05)\n value_y = int((cluster_y[i] + cluster_y[key]) / 2)\n draw.text((value_x, value_y), value_text, font=font, fill='#0000ff')\n value = round(table_d[i, j] / 2 - cluster_stack[j], round_num)\n value_text = str(value)\n size = font.getsize(value_text)\n value_x = cluster_x[j] - int(size[0] * 1.05)\n value_y = int((cluster_y[j] + cluster_y[key]) / 2)\n draw.text((value_x, value_y), value_text, font=font, fill='#0000ff')\n ignoring_keys.append(key)\n attension_values.append(key)\n font = ImageFont.truetype('arial.ttf', 64)\n alphabet = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'\n for i in range(original_length):\n text = alphabet[i]\n size = font.getsize(text)\n left_x = cluster_x[i] - size[0] / 2\n print(left_x)\n top_y = top_padding + main_frame_height\n draw.text((left_x, top_y), text, font=font, fill='#000000')\n img.save('out.png', 'PNG', quality=100, optimize=True)\n input('push enter')\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "# coding: UTF-8\r\n\r\nfrom PIL import ImageFont,Image,ImageDraw\r\n\r\ndef min_element(table_d,ignoring_index = None):\r\n\tmin_i,min_j,min_e = 0,0,max(table_d.values())\r\n\tfor key in table_d.keys():\r\n\r\n\t\t# ignore if i in key or j in key\r\n\t\tif ignoring_index is not None:\r\n\t\t\ti,j = key\r\n\t\t\tif i in ignoring_index or j in ignoring_index:\r\n\t\t\t\tcontinue\r\n\r\n\t\tif min_e > table_d[key]:\r\n\t\t\tmin_e = table_d[key]\r\n\t\t\tmin_i ,min_j = key\r\n\r\n\treturn (min_i,min_j,min_e)\r\n\r\ndef to_dict(table):\r\n\ttable_d = dict()\r\n\tfor i in range(len(table)):\r\n\t\tfor j in range(i):\r\n\t\t\ttable_d[(i,j)] = table[i][j]\r\n\t\t\ttable_d[(j,i)] = table[i][j]\r\n\treturn table_d\r\n\r\n\r\ndef next_key(d,original_length,ignoring_keys=[],attension_values=[]):\r\n\tif len(ignoring_keys) == 0:\r\n\t\treturn min(d.keys())\r\n\tsave_key = None\r\n\tfor k in d.keys():\r\n\t\tv_1,v_2 = d[k]\r\n\t\tif k in ignoring_keys:\r\n\t\t\tcontinue\r\n\t\tif not ((v_1 in ignoring_keys or v_1 < original_length) and (v_2 in ignoring_keys or v_2 < original_length)):\r\n\t\t\tcontinue\r\n\r\n\t\tif save_key is None:\r\n\t\t\tsave_key = k\r\n\t\tif v_1 in attension_values or v_2 in attension_values:\r\n\t\t\treturn k\r\n\treturn save_key\r\n\r\n\r\n\r\ndef main():\r\n\t# in \"sample\" file\r\n\t#\r\n\t# 0 0.1 0.12 0.21\r\n\t# 0.1 0 0.04 0.13\r\n\t# 0.12 0.04 0 0.11\r\n\t# 0.21 0.13 0.11 0\r\n\r\n\twith open(\"sample\",\"r\") as f:\r\n\t\tlines = f.readlines()\r\n\r\n\ttable = []\r\n\tfor l in lines:\r\n\t\trow = [float(i) for i in l.split(\" \")]\r\n\t\ttable.append(row)\r\n\r\n\ttable_d = to_dict(table)\r\n\r\n\tnum_of_element = len(table)\r\n\r\n\tcluster = dict()\r\n\tcluster_num = dict()\r\n\tignoring_index = []\r\n\toriginal_length = len(table)\r\n\r\n\twhile True:\r\n\r\n\r\n\t\t# ignoring_index内にないもののなかで最小のものを選ぶ\r\n\t\tmin_i,min_j,_ = min_element(table_d,ignoring_index)\r\n\r\n\t\t# 以降無視\r\n\t\tignoring_index.append(min_i)\r\n\t\tignoring_index.append(min_j)\r\n\r\n\t\tnew_cluster = num_of_element # i&j を新しい要素とする\r\n\r\n\t\tcluster[new_cluster] = (min_i,min_j)\r\n\t\tcluster_num[new_cluster] = 0\r\n\r\n\t\tcluster_elements = 2\r\n\t\tif min_i in cluster_num.keys():\r\n\t\t\tcluster_num[new_cluster] += cluster_num[min_i]\r\n\t\t\tcluster_elements -= 1\r\n\t\tif min_j in cluster_num.keys():\r\n\t\t\tcluster_num[new_cluster] += cluster_num[min_j]\r\n\t\t\tcluster_elements -= 1\r\n\t\tcluster_num[new_cluster] += cluster_elements\r\n\r\n\t\tprint(cluster_num)\r\n\t\tif max(cluster_num.values()) == original_length:\r\n\t\t\tprint(cluster)\r\n\t\t\tprint(cluster_num)\r\n\t\t\tprint(table_d)\r\n\t\t\tprint(\"UPGMA is end\")\r\n\t\t\tbreak\r\n\r\n\r\n\r\n\t\t# clusterが所有するオリジナルの要素数\r\n\t\tweight_i = 1\r\n\t\tweight_j = 1\r\n\t\tif min_i in cluster_num.keys():\r\n\t\t\tweight_i = cluster_num[min_i]\r\n\t\tif min_j in cluster_num.keys():\r\n\t\t\tweight_j = cluster_num[min_j]\r\n\r\n\t\tfor itr in range(num_of_element):\r\n\t\t\tif itr in ignoring_index:\r\n\t\t\t\tcontinue\r\n\t\t\t# テーブルの更新\r\n\t\t\ttable_d[(itr,new_cluster)] = (table_d[(itr,min_i)]*weight_i + table_d[(itr,min_j)]*weight_j) / float(weight_i + weight_j)\r\n\t\t\ttable_d[(new_cluster,itr)] = (table_d[(itr,min_i)]*weight_i + table_d[(itr,min_j)]*weight_j) / float(weight_i + weight_j)\r\n\r\n\t\tnum_of_element += 1\r\n\t\tif len(ignoring_index) - num_of_element == 1:\r\n\t\t\t# Once the remaining elements are two, the distance is obvious.\r\n\t\t\tbreak\r\n\r\n\t# イメージの操作\r\n\t# ref: https://ailog.site/2020/03/09/0309/\r\n\r\n\r\n\t# 以降は系統樹の作成\r\n\t# 元々白紙が用意されているものとする\r\n\timg = Image.open('base.png')\r\n\r\n\twidth,height = img.size\r\n\tdraw = ImageDraw.Draw(img)\r\n\r\n\t# padding\r\n\ttop_padding = int(height*0.01)\r\n\tbottom_padding = int(height*0.01)\r\n\tright_padding = int(width*0.01)\r\n\tleft_padding = int(width*0.01)\r\n\r\n\t# ラベルに使う領域の高さ\r\n\tlabel_height = 64\r\n\t# 系統樹に使う高さ\r\n\tmain_frame_height = height - top_padding - bottom_padding - label_height\r\n\t# 高さと系統樹の高さをそろえるための倍率\r\n\theight_scaler = main_frame_height / float(max(table_d.values()) / 2 )\r\n\t# ラベル間の幅\r\n\tinterval = int((width - right_padding - left_padding) / (original_length+1))\r\n\r\n\r\n\r\n\tfont = ImageFont.truetype(\"arial.ttf\", 32) # font size is 64\r\n\r\n\tignoring_keys = []\r\n\tattension_values = []\r\n\tpainted_number = 0\r\n\tcluster_x = dict()\r\n\tcluster_y = dict()\r\n\tcluster_stack = dict()\r\n\r\n\tfor i in range(original_length):\r\n\t\tcluster_y[i] = top_padding + main_frame_height\r\n\t\tcluster_stack[i] = 0.\r\n\r\n\twhile True:\r\n\t\tkey = next_key(cluster,original_length,ignoring_keys,attension_values)\r\n\r\n\t\tif key in attension_values:\r\n\t\t\tattension_values.remove(key)\r\n\t\tif key is None:\r\n\t\t\tbreak\r\n\r\n\r\n\t\ti,j = cluster[key]\r\n\r\n\t\tif not i in cluster_x.keys():\r\n\t\t\tcluster_x[i] = left_padding + interval * (painted_number + 1)\r\n\t\t\tpainted_number += 1\r\n\t\tif not j in cluster_x.keys():\r\n\t\t\tcluster_x[j] = left_padding + interval * (painted_number + 1)\r\n\t\t\tpainted_number += 1\r\n\t\tcluster_x[key] = int((cluster_x[i] + cluster_x[j]) / 2)\r\n\t\tedge_height = int((table_d[(i,j)] * height_scaler / 2))\r\n\t\tcluster_y[key] = top_padding + main_frame_height - edge_height\r\n\r\n\t\tif not key in cluster_stack.keys():\r\n\t\t\tcluster_stack[key] = table_d[(i,j)] / 2\r\n\r\n\t\tdraw.line((cluster_x[i], cluster_y[i], cluster_x[i], cluster_y[key]), fill=(0, 0, 0), width=10)\r\n\t\tdraw.line((cluster_x[j], cluster_y[j], cluster_x[j], cluster_y[key]), fill=(0, 0, 0), width=10)\r\n\t\tdraw.line((cluster_x[i], cluster_y[key], cluster_x[j], cluster_y[key]), fill=(0, 0, 0), width=10)\r\n\r\n\t\tround_num = 3\r\n\t\t# i について\r\n\t\tvalue = round(table_d[(i,j)] / 2 - cluster_stack[i], round_num)\r\n\t\tvalue_text = str(value)\r\n\t\tsize = font.getsize(value_text)\r\n\t\tvalue_x = cluster_x[i] - int(size[0]*1.05)\r\n\t\tvalue_y = int((cluster_y[i] + cluster_y[key]) / 2)\r\n\t\tdraw.text((value_x, value_y), value_text, font=font, fill='#0000ff')\r\n\r\n\t\t# jについて\r\n\t\tvalue = round(table_d[(i,j)] / 2 - cluster_stack[j], round_num)\r\n\t\tvalue_text = str(value)\r\n\t\tsize = font.getsize(value_text)\r\n\t\tvalue_x = cluster_x[j] - int(size[0]*1.05)\r\n\t\tvalue_y = int((cluster_y[j] + cluster_y[key]) / 2)\r\n\t\tdraw.text((value_x, value_y), value_text, font=font, fill='#0000ff')\r\n\r\n\r\n\t\tignoring_keys.append(key)\r\n\t\tattension_values.append(key)\r\n\r\n\r\n\tfont = ImageFont.truetype(\"arial.ttf\", 64) # font size is 64\r\n\talphabet = \"ABCDEFGHIJKLMNOPQRSTUVWXYZ\"\r\n\tfor i in range(original_length):\r\n\t\t# ラベル辞書を使えば数字以外も扱える\r\n\t\ttext = alphabet[i]\r\n\t\tsize = font.getsize(text)\r\n\r\n\t\tleft_x = cluster_x[i] - (size[0] / 2)\r\n\t\tprint(left_x)\r\n\t\ttop_y = top_padding + main_frame_height\r\n\r\n\t\t# 画像右下に'Sampleと表示' #FFFは文字色(白)\r\n\t\tdraw.text((left_x, top_y), text, font=font, fill='#000000')\r\n\r\n\r\n\r\n\r\n\r\n\t# ファイルを保存\r\n\timg.save('out.png', 'PNG', quality=100, optimize=True)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\tinput(\"push enter\")\r\n\r\n\r\n\r\n\r\n\r\nif __name__ == \"__main__\":\r\n\tmain()",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
# -*- coding: utf-8 -*-
"""
app definition
"""
from django.apps import AppConfig
class CoopHtmlEditorAppConfig(AppConfig):
name = 'coop_html_editor'
verbose_name = "Html Editor"
|
normal
|
{
"blob_id": "641cbe2f35925d070249820a2e3a4f1cdd1cf642",
"index": 8697,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass CoopHtmlEditorAppConfig(AppConfig):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass CoopHtmlEditorAppConfig(AppConfig):\n name = 'coop_html_editor'\n verbose_name = 'Html Editor'\n",
"step-4": "<mask token>\nfrom django.apps import AppConfig\n\n\nclass CoopHtmlEditorAppConfig(AppConfig):\n name = 'coop_html_editor'\n verbose_name = 'Html Editor'\n",
"step-5": "# -*- coding: utf-8 -*-\n\"\"\"\napp definition\n\"\"\"\n\nfrom django.apps import AppConfig\n\n\nclass CoopHtmlEditorAppConfig(AppConfig):\n name = 'coop_html_editor'\n verbose_name = \"Html Editor\"\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
def twenty():
pass
|
normal
|
{
"blob_id": "3727c4413cd69305c8ee8d02f4532629da7d25de",
"index": 7135,
"step-1": "<mask token>\n",
"step-2": "def twenty():\n pass\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
from django.conf.urls import patterns, include, url
from django.views.generic import TemplateView
from . import views
app_name = 'produce'
urlpatterns = [
# Inbound SMS view:
url(r'^sms/$', views.sms, name='sms'),
# List and Detail Views:
url(r'^list/', views.SeasonalView.as_view(), name='list'),
url(r'^(?P<pk>[0-9]+)/$', views.ProduceDetailView.as_view(), name='produce_detail'),
# CRUD for Produce Items:
url(r'^submit/', views.submit_new_produce, name='submit'),
url(r'^thanks/', TemplateView.as_view(template_name='produce/thanks.html')),
url(r'^(?P<pk>[0-9]+)/edit/$', views.ProduceUpdateView.as_view(), name='produce_edit'),
url(r'^(?P<pk>[0-9]+)/delete/$', views.ProduceDeleteView.as_view(), name='produce_delete'),
]
|
normal
|
{
"blob_id": "f7d0d7dda955acd07b6da010d21dc5f02254e1ed",
"index": 5821,
"step-1": "<mask token>\n",
"step-2": "<mask token>\napp_name = 'produce'\nurlpatterns = [url('^sms/$', views.sms, name='sms'), url('^list/', views.\n SeasonalView.as_view(), name='list'), url('^(?P<pk>[0-9]+)/$', views.\n ProduceDetailView.as_view(), name='produce_detail'), url('^submit/',\n views.submit_new_produce, name='submit'), url('^thanks/', TemplateView.\n as_view(template_name='produce/thanks.html')), url(\n '^(?P<pk>[0-9]+)/edit/$', views.ProduceUpdateView.as_view(), name=\n 'produce_edit'), url('^(?P<pk>[0-9]+)/delete/$', views.\n ProduceDeleteView.as_view(), name='produce_delete')]\n",
"step-3": "from django.conf.urls import patterns, include, url\nfrom django.views.generic import TemplateView\nfrom . import views\napp_name = 'produce'\nurlpatterns = [url('^sms/$', views.sms, name='sms'), url('^list/', views.\n SeasonalView.as_view(), name='list'), url('^(?P<pk>[0-9]+)/$', views.\n ProduceDetailView.as_view(), name='produce_detail'), url('^submit/',\n views.submit_new_produce, name='submit'), url('^thanks/', TemplateView.\n as_view(template_name='produce/thanks.html')), url(\n '^(?P<pk>[0-9]+)/edit/$', views.ProduceUpdateView.as_view(), name=\n 'produce_edit'), url('^(?P<pk>[0-9]+)/delete/$', views.\n ProduceDeleteView.as_view(), name='produce_delete')]\n",
"step-4": "from django.conf.urls import patterns, include, url\nfrom django.views.generic import TemplateView\n\nfrom . import views\n\napp_name = 'produce'\n\nurlpatterns = [\n\t# Inbound SMS view:\n\turl(r'^sms/$', views.sms, name='sms'),\n\n\t# List and Detail Views:\n\turl(r'^list/', views.SeasonalView.as_view(), name='list'),\n\turl(r'^(?P<pk>[0-9]+)/$', views.ProduceDetailView.as_view(), name='produce_detail'),\n\n\t# CRUD for Produce Items:\n\turl(r'^submit/', views.submit_new_produce, name='submit'),\n\turl(r'^thanks/', TemplateView.as_view(template_name='produce/thanks.html')),\n\turl(r'^(?P<pk>[0-9]+)/edit/$', views.ProduceUpdateView.as_view(), name='produce_edit'),\n\turl(r'^(?P<pk>[0-9]+)/delete/$', views.ProduceDeleteView.as_view(), name='produce_delete'),\n]",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
file_id = '0BwwA4oUTeiV1UVNwOHItT0xfa2M'
request = drive_service.files().get_media(fileId=file_id)
fh = io.BytesIO()
downloader = MediaIoBaseDownload(fh, request)
done = False
while done is False:
status, done = downloader.next_chunk()
print "Download %d%%." % int(status.progress() * 100)
|
normal
|
{
"blob_id": "6b3f634f3f0108e678d44ef9c89150f9fd116f76",
"index": 9471,
"step-1": "file_id = '0BwwA4oUTeiV1UVNwOHItT0xfa2M'\nrequest = drive_service.files().get_media(fileId=file_id)\nfh = io.BytesIO()\ndownloader = MediaIoBaseDownload(fh, request)\ndone = False\nwhile done is False:\n status, done = downloader.next_chunk()\n print \"Download %d%%.\" % int(status.progress() * 100)\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
# Copyright (C) 2010 Google Inc. All rights reserved.
# Copyright (C) 2010 Gabor Rapcsanyi (rgabor@inf.u-szeged.hu), University of Szeged
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""The Manager orchestrates the overall process of running web tests.
This includes finding tests to run, reading the test expectations,
starting the required helper servers, deciding the order and way to
run the tests, retrying failed tests, and collecting the test results,
including crash logs and mismatches with expectations.
The Manager object has a constructor and one main method called run.
"""
import fnmatch
import json
import logging
import os
import random
import signal
import sys
import time
from blinkpy.common import exit_codes
from blinkpy.common.path_finder import PathFinder
from blinkpy.tool import grammar
from blinkpy.web_tests.controllers.test_result_sink import CreateTestResultSink
from blinkpy.web_tests.controllers.web_test_finder import WebTestFinder
from blinkpy.web_tests.controllers.web_test_runner import WebTestRunner
from blinkpy.web_tests.layout_package import json_results_generator
from blinkpy.web_tests.models import test_expectations
from blinkpy.web_tests.models import test_failures
from blinkpy.web_tests.models import test_run_results
from blinkpy.web_tests.models.typ_types import ResultType
from blinkpy.web_tests.models.test_input import TestInput
_log = logging.getLogger(__name__)
TestExpectations = test_expectations.TestExpectations
class Manager(object):
"""A class for managing running a series of web tests."""
HTTP_SUBDIR = 'http'
PERF_SUBDIR = 'perf'
WEBSOCKET_SUBDIR = 'websocket'
ARCHIVED_RESULTS_LIMIT = 25
def __init__(self, port, options, printer):
"""Initializes test runner data structures.
Args:
port: An object implementing platform-specific functionality.
options: An options argument which contains command line options.
printer: A Printer object to record updates to.
"""
self._port = port
self._filesystem = port.host.filesystem
self._options = options
self._printer = printer
self._expectations = None
self._http_server_started = False
self._wptserve_started = False
self._websockets_server_started = False
self._results_directory = self._port.results_directory()
self._artifacts_directory = self._port.artifacts_directory()
self._finder = WebTestFinder(self._port, self._options)
self._path_finder = PathFinder(port.host.filesystem)
self._sink = CreateTestResultSink(self._port)
self._runner = WebTestRunner(self._options, self._port, self._printer,
self._results_directory,
self._test_is_slow, self._sink)
def run(self, args):
"""Runs the tests and return a RunDetails object with the results."""
start_time = time.time()
self._printer.write_update('Collecting tests ...')
running_all_tests = False
try:
paths, all_test_names, running_all_tests = self._collect_tests(
args)
except IOError:
# This is raised if --test-list doesn't exist
return test_run_results.RunDetails(
exit_code=exit_codes.NO_TESTS_EXIT_STATUS)
test_names = self._finder.split_into_chunks(all_test_names)
if self._options.order == 'natural':
test_names.sort(key=self._port.test_key)
elif self._options.order == 'random':
test_names.sort()
random.Random(self._options.seed).shuffle(test_names)
elif self._options.order == 'none':
# Restore the test order to user specified order.
# base.tests() may change the order as it returns tests in the
# real, external/wpt, virtual order.
if paths:
test_names = self._restore_order(paths, test_names)
if not self._options.no_expectations:
self._printer.write_update('Parsing expectations ...')
self._expectations = test_expectations.TestExpectations(self._port)
tests_to_run, tests_to_skip = self._prepare_lists(paths, test_names)
self._printer.print_found(
len(all_test_names), len(test_names), len(tests_to_run),
self._options.repeat_each, self._options.iterations)
# Check to make sure we're not skipping every test.
if not tests_to_run:
msg = 'No tests to run.'
if self._options.zero_tests_executed_ok:
_log.info(msg)
# Keep executing to produce valid (but empty) results.
else:
_log.critical(msg)
code = exit_codes.NO_TESTS_EXIT_STATUS
return test_run_results.RunDetails(exit_code=code)
exit_code = self._set_up_run(tests_to_run)
if exit_code:
return test_run_results.RunDetails(exit_code=exit_code)
if self._options.num_retries is None:
# If --test-list is passed, or if no test narrowing is specified,
# default to 3 retries. Otherwise [e.g. if tests are being passed by
# name], default to 0 retries.
if self._options.test_list or len(paths) < len(test_names):
self._options.num_retries = 3
else:
self._options.num_retries = 0
should_retry_failures = self._options.num_retries > 0
try:
self._register_termination_handler()
self._start_servers(tests_to_run)
if self._options.watch:
run_results = self._run_test_loop(tests_to_run, tests_to_skip)
else:
run_results = self._run_test_once(tests_to_run, tests_to_skip,
should_retry_failures)
initial_results, all_retry_results = run_results
finally:
_log.info("Finally stop servers and clean up")
self._stop_servers()
self._clean_up_run()
if self._options.no_expectations:
return test_run_results.RunDetails(0, [], [], initial_results,
all_retry_results)
# Some crash logs can take a long time to be written out so look
# for new logs after the test run finishes.
self._printer.write_update('Looking for new crash logs ...')
self._look_for_new_crash_logs(initial_results, start_time)
for retry_attempt_results in all_retry_results:
self._look_for_new_crash_logs(retry_attempt_results, start_time)
self._printer.write_update('Summarizing results ...')
summarized_full_results = test_run_results.summarize_results(
self._port, self._options, self._expectations, initial_results,
all_retry_results)
summarized_failing_results = test_run_results.summarize_results(
self._port,
self._options,
self._expectations,
initial_results,
all_retry_results,
only_include_failing=True)
run_histories = test_run_results.test_run_histories(
self._options, self._expectations, initial_results,
all_retry_results)
exit_code = summarized_failing_results['num_regressions']
if exit_code > exit_codes.MAX_FAILURES_EXIT_STATUS:
_log.warning('num regressions (%d) exceeds max exit status (%d)',
exit_code, exit_codes.MAX_FAILURES_EXIT_STATUS)
exit_code = exit_codes.MAX_FAILURES_EXIT_STATUS
if not self._options.dry_run:
self._write_json_files(summarized_full_results,
summarized_failing_results, initial_results,
running_all_tests, run_histories)
self._copy_results_html_file(self._artifacts_directory,
'results.html')
if (initial_results.interrupt_reason is
test_run_results.InterruptReason.EXTERNAL_SIGNAL):
exit_code = exit_codes.INTERRUPTED_EXIT_STATUS
else:
if initial_results.interrupted:
exit_code = exit_codes.EARLY_EXIT_STATUS
if (self._options.show_results
and (exit_code or initial_results.total_failures)):
self._port.show_results_html_file(
self._filesystem.join(self._artifacts_directory,
'results.html'))
self._printer.print_results(time.time() - start_time,
initial_results)
return test_run_results.RunDetails(exit_code, summarized_full_results,
summarized_failing_results,
initial_results, all_retry_results)
def _register_termination_handler(self):
if self._port.host.platform.is_win():
signum = signal.SIGBREAK
else:
signum = signal.SIGTERM
signal.signal(signum, self._on_termination)
def _on_termination(self, signum, _frame):
self._printer.write_update(
'Received signal "%s" (%d) in %d' %
(signal.strsignal(signum), signum, os.getpid()))
raise KeyboardInterrupt
def _run_test_loop(self, tests_to_run, tests_to_skip):
# Don't show results in a new browser window because we're already
# printing the link to diffs in the loop
self._options.show_results = False
while True:
initial_results, all_retry_results = self._run_test_once(
tests_to_run, tests_to_skip, should_retry_failures=False)
for name in initial_results.failures_by_name:
failure = initial_results.failures_by_name[name][0]
if isinstance(failure, test_failures.FailureTextMismatch):
full_test_path = self._filesystem.join(
self._artifacts_directory, name)
filename, _ = self._filesystem.splitext(full_test_path)
pretty_diff_path = 'file://' + filename + '-pretty-diff.html'
self._printer.writeln('Link to pretty diff:')
self._printer.writeln(pretty_diff_path + '\n')
self._printer.writeln('Finished running tests')
user_input = self._port.host.user.prompt(
'Interactive watch mode: (q)uit (r)etry\n').lower()
if user_input == 'q' or user_input == 'quit':
return (initial_results, all_retry_results)
def _run_test_once(self, tests_to_run, tests_to_skip,
should_retry_failures):
num_workers = int(
self._port.num_workers(int(self._options.child_processes)))
initial_results = self._run_tests(
tests_to_run, tests_to_skip, self._options.repeat_each,
self._options.iterations, num_workers)
# Don't retry failures when interrupted by user or failures limit exception.
should_retry_failures = (should_retry_failures
and not initial_results.interrupted)
tests_to_retry = self._tests_to_retry(initial_results)
all_retry_results = []
if should_retry_failures and tests_to_retry:
for retry_attempt in range(1, self._options.num_retries + 1):
if not tests_to_retry:
break
_log.info('')
_log.info(
'Retrying %s, attempt %d of %d...',
grammar.pluralize('unexpected failure',
len(tests_to_retry)), retry_attempt,
self._options.num_retries)
retry_results = self._run_tests(
tests_to_retry,
tests_to_skip=set(),
repeat_each=1,
iterations=1,
num_workers=num_workers,
retry_attempt=retry_attempt)
all_retry_results.append(retry_results)
tests_to_retry = self._tests_to_retry(retry_results)
return (initial_results, all_retry_results)
def _restore_order(self, paths, test_names):
original_test_names = list(test_names)
test_names = []
for path in paths:
for test in original_test_names:
if test.startswith(path) or fnmatch.fnmatch(test, path):
test_names.append(test)
test_names += list(set(original_test_names) - set(test_names))
return test_names
def _collect_tests(self, args):
return self._finder.find_tests(
args,
test_lists=self._options.test_list,
filter_files=self._options.isolated_script_test_filter_file,
fastest_percentile=self._options.fastest,
filters=self._options.isolated_script_test_filter)
def _is_http_test(self, test):
return (
test.startswith(self.HTTP_SUBDIR + self._port.TEST_PATH_SEPARATOR)
or self._is_websocket_test(test) or self._port.TEST_PATH_SEPARATOR
+ self.HTTP_SUBDIR + self._port.TEST_PATH_SEPARATOR in test)
def _is_websocket_test(self, test):
if self._port.should_use_wptserve(test):
return False
return self.WEBSOCKET_SUBDIR + self._port.TEST_PATH_SEPARATOR in test
def _http_tests(self, test_names):
return set(test for test in test_names if self._is_http_test(test))
def _is_perf_test(self, test):
return (self.PERF_SUBDIR == test
or (self.PERF_SUBDIR + self._port.TEST_PATH_SEPARATOR) in test)
def _prepare_lists(self, paths, test_names):
tests_to_skip = self._finder.skip_tests(paths, test_names,
self._expectations)
tests_to_run = [
test for test in test_names if test not in tests_to_skip
]
return tests_to_run, tests_to_skip
def _test_input_for_file(self, test_file, retry_attempt):
return TestInput(
test_file,
self._options.slow_timeout_ms
if self._test_is_slow(test_file) else self._options.timeout_ms,
self._test_requires_lock(test_file),
retry_attempt=retry_attempt)
def _test_requires_lock(self, test_file):
"""Returns True if the test needs to be locked when running multiple
instances of this test runner.
Perf tests are locked because heavy load caused by running other
tests in parallel might cause some of them to time out.
"""
return self._is_perf_test(test_file)
def _test_is_slow(self, test_file):
if not self._expectations:
return False
is_slow_test = self._expectations.get_expectations(
test_file).is_slow_test
return is_slow_test or self._port.is_slow_wpt_test(test_file)
def _needs_servers(self, test_names):
return any(
self._is_http_test(test_name) for test_name in test_names)
def _set_up_run(self, test_names):
self._printer.write_update('Checking build ...')
if self._options.build:
exit_code = self._port.check_build(
self._needs_servers(test_names), self._printer)
if exit_code:
_log.error('Build check failed')
return exit_code
if self._options.clobber_old_results:
self._port.clobber_old_results()
elif self._filesystem.exists(self._artifacts_directory):
self._port.limit_archived_results_count()
# Rename the existing results folder for archiving.
self._port.rename_results_folder()
# Create the output directory if it doesn't already exist.
self._port.host.filesystem.maybe_make_directory(
self._artifacts_directory)
exit_code = self._port.setup_test_run()
if exit_code:
_log.error('Build setup failed')
return exit_code
# Check that the system dependencies (themes, fonts, ...) are correct.
if not self._options.nocheck_sys_deps:
self._printer.write_update('Checking system dependencies ...')
exit_code = self._port.check_sys_deps()
if exit_code:
return exit_code
return exit_codes.OK_EXIT_STATUS
def _run_tests(self,
tests_to_run,
tests_to_skip,
repeat_each,
iterations,
num_workers,
retry_attempt=0):
test_inputs = []
for _ in range(iterations):
for test in tests_to_run:
for _ in range(repeat_each):
test_inputs.append(
self._test_input_for_file(test, retry_attempt))
return self._runner.run_tests(self._expectations, test_inputs,
tests_to_skip, num_workers,
retry_attempt)
def _start_servers(self, tests_to_run):
if any(self._port.is_wpt_test(test) for test in tests_to_run):
self._printer.write_update('Starting WPTServe ...')
self._port.start_wptserve()
self._wptserve_started = True
if (self._port.requires_http_server()
or any(self._is_http_test(test) for test in tests_to_run)):
self._printer.write_update('Starting HTTP server ...')
self._port.start_http_server(
additional_dirs={},
number_of_drivers=self._options.max_locked_shards)
self._http_server_started = True
if any(self._is_websocket_test(test) for test in tests_to_run):
self._printer.write_update('Starting WebSocket server ...')
self._port.start_websocket_server()
self._websockets_server_started = True
def _stop_servers(self):
if self._wptserve_started:
self._printer.write_update('Stopping WPTServe ...')
self._wptserve_started = False
self._port.stop_wptserve()
if self._http_server_started:
self._printer.write_update('Stopping HTTP server ...')
self._http_server_started = False
self._port.stop_http_server()
if self._websockets_server_started:
self._printer.write_update('Stopping WebSocket server ...')
self._websockets_server_started = False
self._port.stop_websocket_server()
def _clean_up_run(self):
_log.debug('Flushing stdout')
sys.stdout.flush()
_log.debug('Flushing stderr')
sys.stderr.flush()
_log.debug('Cleaning up port')
self._port.clean_up_test_run()
if self._sink:
_log.debug('Closing sink')
self._sink.close()
def _look_for_new_crash_logs(self, run_results, start_time):
"""Looks for and writes new crash logs, at the end of the test run.
Since crash logs can take a long time to be written out if the system is
under stress, do a second pass at the end of the test run.
Args:
run_results: The results of the test run.
start_time: Time the tests started at. We're looking for crash
logs after that time.
"""
crashed_processes = []
test_to_crash_failure = {}
# reset static variables for Failure type classes
test_failures.AbstractTestResultType.port = self._port
test_failures.AbstractTestResultType.result_directory = self._results_directory
test_failures.AbstractTestResultType.filesystem = self._filesystem
for test, result in run_results.unexpected_results_by_name.items():
if result.type != ResultType.Crash:
continue
for failure in result.failures:
if (not isinstance(failure, test_failures.FailureCrash)
or failure.has_log):
continue
crashed_processes.append(
[test, failure.process_name, failure.pid])
test_to_crash_failure[test] = failure
sample_files = self._port.look_for_new_samples(crashed_processes,
start_time) or {}
for test, sample_file in sample_files.items():
test_failures.AbstractTestResultType.test_name = test
test_result = run_results.unexpected_results_by_name[test]
artifact_relative_path = self._port.output_filename(
test, test_failures.FILENAME_SUFFIX_SAMPLE, '.txt')
artifacts_sub_dir = test_result.artifacts.ArtifactsSubDirectory()
artifact_abspath = self._filesystem.join(self._results_directory,
artifacts_sub_dir,
artifact_relative_path)
self._filesystem.maybe_make_directory(
self._filesystem.dirname(artifact_abspath))
self._filesystem.copyfile(sample_file, artifact_abspath)
test_result.artifacts.AddArtifact(
'sample_file',
self._filesystem.join(artifacts_sub_dir,
artifact_relative_path))
new_crash_logs = self._port.look_for_new_crash_logs(
crashed_processes, start_time) or {}
for test, (crash_log, crash_site) in new_crash_logs.items():
test_failures.AbstractTestResultType.test_name = test
failure.crash_log = crash_log
failure.has_log = self._port.output_contains_sanitizer_messages(
failure.crash_log)
test_result = run_results.unexpected_results_by_name[test]
test_result.crash_site = crash_site
test_to_crash_failure[test].create_artifacts(
test_result.artifacts, force_overwrite=True)
def _tests_to_retry(self, run_results):
# TODO(ojan): This should also check that result.type != test_expectations.MISSING
# since retrying missing expectations is silly. But that's a bit tricky since we
# only consider the last retry attempt for the count of unexpected regressions.
return [
result.test_name
for result in run_results.unexpected_results_by_name.values()
if result.type != ResultType.Pass
]
def _write_json_files(self, summarized_full_results,
summarized_failing_results, initial_results,
running_all_tests, run_histories):
_log.debug("Writing JSON files in %s.", self._artifacts_directory)
# FIXME: Upload stats.json to the server and delete times_ms.
times_trie = json_results_generator.test_timings_trie(
initial_results.results_by_name.values())
times_json_path = self._filesystem.join(self._artifacts_directory,
'times_ms.json')
json_results_generator.write_json(self._filesystem, times_trie,
times_json_path)
# Save out the times data so we can use it for --fastest in the future.
if running_all_tests:
bot_test_times_path = self._port.bot_test_times_path()
self._filesystem.maybe_make_directory(
self._filesystem.dirname(bot_test_times_path))
json_results_generator.write_json(self._filesystem, times_trie,
bot_test_times_path)
stats_trie = self._stats_trie(initial_results)
stats_path = self._filesystem.join(self._artifacts_directory,
'stats.json')
self._filesystem.write_text_file(stats_path, json.dumps(stats_trie))
full_results_path = self._filesystem.join(self._artifacts_directory,
'full_results.json')
json_results_generator.write_json(
self._filesystem, summarized_full_results, full_results_path)
full_results_jsonp_path = self._filesystem.join(
self._artifacts_directory, 'full_results_jsonp.js')
json_results_generator.write_json(
self._filesystem,
summarized_full_results,
full_results_jsonp_path,
callback='ADD_FULL_RESULTS')
failing_results_path = self._filesystem.join(self._artifacts_directory,
'failing_results.json')
# We write failing_results.json out as jsonp because we need to load it
# from a file url for results.html and Chromium doesn't allow that.
json_results_generator.write_json(
self._filesystem,
summarized_failing_results,
failing_results_path,
callback='ADD_RESULTS')
if self._options.json_test_results:
json_results_generator.write_json(self._filesystem,
summarized_full_results,
self._options.json_test_results)
if self._options.write_run_histories_to:
json_results_generator.write_json(
self._filesystem, run_histories,
self._options.write_run_histories_to)
_log.debug('Finished writing JSON files.')
def _copy_results_html_file(self, destination_dir, filename):
"""Copies a file from the template directory to the results directory."""
files_to_copy = [filename, filename + ".version"]
template_dir = self._path_finder.path_from_blink_tools(
'blinkpy', 'web_tests')
for filename in files_to_copy:
source_path = self._filesystem.join(template_dir, filename)
destination_path = self._filesystem.join(destination_dir, filename)
# Note that the results.html template file won't exist when
# we're using a MockFileSystem during unit tests, so make sure
# it exists before we try to copy it.
if self._filesystem.exists(source_path):
self._filesystem.copyfile(source_path, destination_path)
def _stats_trie(self, initial_results):
def _worker_number(worker_name):
return int(worker_name.split('/')[1]) if worker_name else -1
stats = {}
for result in initial_results.results_by_name.values():
if result.type != ResultType.Skip:
stats[result.test_name] = {
'results': (_worker_number(result.worker_name),
result.test_number, result.pid,
int(result.test_run_time * 1000),
int(result.total_run_time * 1000))
}
stats_trie = {}
for name, value in stats.items():
json_results_generator.add_path_to_trie(name, value, stats_trie)
return stats_trie
|
normal
|
{
"blob_id": "08b57c00beb8dfedfee1bc032b8c281d7a151931",
"index": 8033,
"step-1": "<mask token>\n\n\nclass Manager(object):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, port, options, printer):\n \"\"\"Initializes test runner data structures.\n\n Args:\n port: An object implementing platform-specific functionality.\n options: An options argument which contains command line options.\n printer: A Printer object to record updates to.\n \"\"\"\n self._port = port\n self._filesystem = port.host.filesystem\n self._options = options\n self._printer = printer\n self._expectations = None\n self._http_server_started = False\n self._wptserve_started = False\n self._websockets_server_started = False\n self._results_directory = self._port.results_directory()\n self._artifacts_directory = self._port.artifacts_directory()\n self._finder = WebTestFinder(self._port, self._options)\n self._path_finder = PathFinder(port.host.filesystem)\n self._sink = CreateTestResultSink(self._port)\n self._runner = WebTestRunner(self._options, self._port, self.\n _printer, self._results_directory, self._test_is_slow, self._sink)\n\n def run(self, args):\n \"\"\"Runs the tests and return a RunDetails object with the results.\"\"\"\n start_time = time.time()\n self._printer.write_update('Collecting tests ...')\n running_all_tests = False\n try:\n paths, all_test_names, running_all_tests = self._collect_tests(args\n )\n except IOError:\n return test_run_results.RunDetails(exit_code=exit_codes.\n NO_TESTS_EXIT_STATUS)\n test_names = self._finder.split_into_chunks(all_test_names)\n if self._options.order == 'natural':\n test_names.sort(key=self._port.test_key)\n elif self._options.order == 'random':\n test_names.sort()\n random.Random(self._options.seed).shuffle(test_names)\n elif self._options.order == 'none':\n if paths:\n test_names = self._restore_order(paths, test_names)\n if not self._options.no_expectations:\n self._printer.write_update('Parsing expectations ...')\n self._expectations = test_expectations.TestExpectations(self._port)\n tests_to_run, tests_to_skip = self._prepare_lists(paths, test_names)\n self._printer.print_found(len(all_test_names), len(test_names), len\n (tests_to_run), self._options.repeat_each, self._options.iterations\n )\n if not tests_to_run:\n msg = 'No tests to run.'\n if self._options.zero_tests_executed_ok:\n _log.info(msg)\n else:\n _log.critical(msg)\n code = exit_codes.NO_TESTS_EXIT_STATUS\n return test_run_results.RunDetails(exit_code=code)\n exit_code = self._set_up_run(tests_to_run)\n if exit_code:\n return test_run_results.RunDetails(exit_code=exit_code)\n if self._options.num_retries is None:\n if self._options.test_list or len(paths) < len(test_names):\n self._options.num_retries = 3\n else:\n self._options.num_retries = 0\n should_retry_failures = self._options.num_retries > 0\n try:\n self._register_termination_handler()\n self._start_servers(tests_to_run)\n if self._options.watch:\n run_results = self._run_test_loop(tests_to_run, tests_to_skip)\n else:\n run_results = self._run_test_once(tests_to_run,\n tests_to_skip, should_retry_failures)\n initial_results, all_retry_results = run_results\n finally:\n _log.info('Finally stop servers and clean up')\n self._stop_servers()\n self._clean_up_run()\n if self._options.no_expectations:\n return test_run_results.RunDetails(0, [], [], initial_results,\n all_retry_results)\n self._printer.write_update('Looking for new crash logs ...')\n self._look_for_new_crash_logs(initial_results, start_time)\n for retry_attempt_results in all_retry_results:\n self._look_for_new_crash_logs(retry_attempt_results, start_time)\n self._printer.write_update('Summarizing results ...')\n summarized_full_results = test_run_results.summarize_results(self.\n _port, self._options, self._expectations, initial_results,\n all_retry_results)\n summarized_failing_results = test_run_results.summarize_results(self\n ._port, self._options, self._expectations, initial_results,\n all_retry_results, only_include_failing=True)\n run_histories = test_run_results.test_run_histories(self._options,\n self._expectations, initial_results, all_retry_results)\n exit_code = summarized_failing_results['num_regressions']\n if exit_code > exit_codes.MAX_FAILURES_EXIT_STATUS:\n _log.warning('num regressions (%d) exceeds max exit status (%d)',\n exit_code, exit_codes.MAX_FAILURES_EXIT_STATUS)\n exit_code = exit_codes.MAX_FAILURES_EXIT_STATUS\n if not self._options.dry_run:\n self._write_json_files(summarized_full_results,\n summarized_failing_results, initial_results,\n running_all_tests, run_histories)\n self._copy_results_html_file(self._artifacts_directory,\n 'results.html')\n if (initial_results.interrupt_reason is test_run_results.\n InterruptReason.EXTERNAL_SIGNAL):\n exit_code = exit_codes.INTERRUPTED_EXIT_STATUS\n else:\n if initial_results.interrupted:\n exit_code = exit_codes.EARLY_EXIT_STATUS\n if self._options.show_results and (exit_code or\n initial_results.total_failures):\n self._port.show_results_html_file(self._filesystem.join\n (self._artifacts_directory, 'results.html'))\n self._printer.print_results(time.time() - start_time,\n initial_results)\n return test_run_results.RunDetails(exit_code,\n summarized_full_results, summarized_failing_results,\n initial_results, all_retry_results)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def _restore_order(self, paths, test_names):\n original_test_names = list(test_names)\n test_names = []\n for path in paths:\n for test in original_test_names:\n if test.startswith(path) or fnmatch.fnmatch(test, path):\n test_names.append(test)\n test_names += list(set(original_test_names) - set(test_names))\n return test_names\n <mask token>\n\n def _is_http_test(self, test):\n return (test.startswith(self.HTTP_SUBDIR + self._port.\n TEST_PATH_SEPARATOR) or self._is_websocket_test(test) or self.\n _port.TEST_PATH_SEPARATOR + self.HTTP_SUBDIR + self._port.\n TEST_PATH_SEPARATOR in test)\n\n def _is_websocket_test(self, test):\n if self._port.should_use_wptserve(test):\n return False\n return self.WEBSOCKET_SUBDIR + self._port.TEST_PATH_SEPARATOR in test\n\n def _http_tests(self, test_names):\n return set(test for test in test_names if self._is_http_test(test))\n\n def _is_perf_test(self, test):\n return (self.PERF_SUBDIR == test or self.PERF_SUBDIR + self._port.\n TEST_PATH_SEPARATOR in test)\n\n def _prepare_lists(self, paths, test_names):\n tests_to_skip = self._finder.skip_tests(paths, test_names, self.\n _expectations)\n tests_to_run = [test for test in test_names if test not in\n tests_to_skip]\n return tests_to_run, tests_to_skip\n\n def _test_input_for_file(self, test_file, retry_attempt):\n return TestInput(test_file, self._options.slow_timeout_ms if self.\n _test_is_slow(test_file) else self._options.timeout_ms, self.\n _test_requires_lock(test_file), retry_attempt=retry_attempt)\n <mask token>\n <mask token>\n\n def _needs_servers(self, test_names):\n return any(self._is_http_test(test_name) for test_name in test_names)\n <mask token>\n\n def _run_tests(self, tests_to_run, tests_to_skip, repeat_each,\n iterations, num_workers, retry_attempt=0):\n test_inputs = []\n for _ in range(iterations):\n for test in tests_to_run:\n for _ in range(repeat_each):\n test_inputs.append(self._test_input_for_file(test,\n retry_attempt))\n return self._runner.run_tests(self._expectations, test_inputs,\n tests_to_skip, num_workers, retry_attempt)\n <mask token>\n <mask token>\n <mask token>\n\n def _look_for_new_crash_logs(self, run_results, start_time):\n \"\"\"Looks for and writes new crash logs, at the end of the test run.\n\n Since crash logs can take a long time to be written out if the system is\n under stress, do a second pass at the end of the test run.\n\n Args:\n run_results: The results of the test run.\n start_time: Time the tests started at. We're looking for crash\n logs after that time.\n \"\"\"\n crashed_processes = []\n test_to_crash_failure = {}\n test_failures.AbstractTestResultType.port = self._port\n test_failures.AbstractTestResultType.result_directory = (self.\n _results_directory)\n test_failures.AbstractTestResultType.filesystem = self._filesystem\n for test, result in run_results.unexpected_results_by_name.items():\n if result.type != ResultType.Crash:\n continue\n for failure in result.failures:\n if not isinstance(failure, test_failures.FailureCrash\n ) or failure.has_log:\n continue\n crashed_processes.append([test, failure.process_name,\n failure.pid])\n test_to_crash_failure[test] = failure\n sample_files = self._port.look_for_new_samples(crashed_processes,\n start_time) or {}\n for test, sample_file in sample_files.items():\n test_failures.AbstractTestResultType.test_name = test\n test_result = run_results.unexpected_results_by_name[test]\n artifact_relative_path = self._port.output_filename(test,\n test_failures.FILENAME_SUFFIX_SAMPLE, '.txt')\n artifacts_sub_dir = test_result.artifacts.ArtifactsSubDirectory()\n artifact_abspath = self._filesystem.join(self.\n _results_directory, artifacts_sub_dir, artifact_relative_path)\n self._filesystem.maybe_make_directory(self._filesystem.dirname(\n artifact_abspath))\n self._filesystem.copyfile(sample_file, artifact_abspath)\n test_result.artifacts.AddArtifact('sample_file', self.\n _filesystem.join(artifacts_sub_dir, artifact_relative_path))\n new_crash_logs = self._port.look_for_new_crash_logs(crashed_processes,\n start_time) or {}\n for test, (crash_log, crash_site) in new_crash_logs.items():\n test_failures.AbstractTestResultType.test_name = test\n failure.crash_log = crash_log\n failure.has_log = self._port.output_contains_sanitizer_messages(\n failure.crash_log)\n test_result = run_results.unexpected_results_by_name[test]\n test_result.crash_site = crash_site\n test_to_crash_failure[test].create_artifacts(test_result.\n artifacts, force_overwrite=True)\n <mask token>\n\n def _write_json_files(self, summarized_full_results,\n summarized_failing_results, initial_results, running_all_tests,\n run_histories):\n _log.debug('Writing JSON files in %s.', self._artifacts_directory)\n times_trie = json_results_generator.test_timings_trie(initial_results\n .results_by_name.values())\n times_json_path = self._filesystem.join(self._artifacts_directory,\n 'times_ms.json')\n json_results_generator.write_json(self._filesystem, times_trie,\n times_json_path)\n if running_all_tests:\n bot_test_times_path = self._port.bot_test_times_path()\n self._filesystem.maybe_make_directory(self._filesystem.dirname(\n bot_test_times_path))\n json_results_generator.write_json(self._filesystem, times_trie,\n bot_test_times_path)\n stats_trie = self._stats_trie(initial_results)\n stats_path = self._filesystem.join(self._artifacts_directory,\n 'stats.json')\n self._filesystem.write_text_file(stats_path, json.dumps(stats_trie))\n full_results_path = self._filesystem.join(self._artifacts_directory,\n 'full_results.json')\n json_results_generator.write_json(self._filesystem,\n summarized_full_results, full_results_path)\n full_results_jsonp_path = self._filesystem.join(self.\n _artifacts_directory, 'full_results_jsonp.js')\n json_results_generator.write_json(self._filesystem,\n summarized_full_results, full_results_jsonp_path, callback=\n 'ADD_FULL_RESULTS')\n failing_results_path = self._filesystem.join(self.\n _artifacts_directory, 'failing_results.json')\n json_results_generator.write_json(self._filesystem,\n summarized_failing_results, failing_results_path, callback=\n 'ADD_RESULTS')\n if self._options.json_test_results:\n json_results_generator.write_json(self._filesystem,\n summarized_full_results, self._options.json_test_results)\n if self._options.write_run_histories_to:\n json_results_generator.write_json(self._filesystem,\n run_histories, self._options.write_run_histories_to)\n _log.debug('Finished writing JSON files.')\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Manager(object):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, port, options, printer):\n \"\"\"Initializes test runner data structures.\n\n Args:\n port: An object implementing platform-specific functionality.\n options: An options argument which contains command line options.\n printer: A Printer object to record updates to.\n \"\"\"\n self._port = port\n self._filesystem = port.host.filesystem\n self._options = options\n self._printer = printer\n self._expectations = None\n self._http_server_started = False\n self._wptserve_started = False\n self._websockets_server_started = False\n self._results_directory = self._port.results_directory()\n self._artifacts_directory = self._port.artifacts_directory()\n self._finder = WebTestFinder(self._port, self._options)\n self._path_finder = PathFinder(port.host.filesystem)\n self._sink = CreateTestResultSink(self._port)\n self._runner = WebTestRunner(self._options, self._port, self.\n _printer, self._results_directory, self._test_is_slow, self._sink)\n\n def run(self, args):\n \"\"\"Runs the tests and return a RunDetails object with the results.\"\"\"\n start_time = time.time()\n self._printer.write_update('Collecting tests ...')\n running_all_tests = False\n try:\n paths, all_test_names, running_all_tests = self._collect_tests(args\n )\n except IOError:\n return test_run_results.RunDetails(exit_code=exit_codes.\n NO_TESTS_EXIT_STATUS)\n test_names = self._finder.split_into_chunks(all_test_names)\n if self._options.order == 'natural':\n test_names.sort(key=self._port.test_key)\n elif self._options.order == 'random':\n test_names.sort()\n random.Random(self._options.seed).shuffle(test_names)\n elif self._options.order == 'none':\n if paths:\n test_names = self._restore_order(paths, test_names)\n if not self._options.no_expectations:\n self._printer.write_update('Parsing expectations ...')\n self._expectations = test_expectations.TestExpectations(self._port)\n tests_to_run, tests_to_skip = self._prepare_lists(paths, test_names)\n self._printer.print_found(len(all_test_names), len(test_names), len\n (tests_to_run), self._options.repeat_each, self._options.iterations\n )\n if not tests_to_run:\n msg = 'No tests to run.'\n if self._options.zero_tests_executed_ok:\n _log.info(msg)\n else:\n _log.critical(msg)\n code = exit_codes.NO_TESTS_EXIT_STATUS\n return test_run_results.RunDetails(exit_code=code)\n exit_code = self._set_up_run(tests_to_run)\n if exit_code:\n return test_run_results.RunDetails(exit_code=exit_code)\n if self._options.num_retries is None:\n if self._options.test_list or len(paths) < len(test_names):\n self._options.num_retries = 3\n else:\n self._options.num_retries = 0\n should_retry_failures = self._options.num_retries > 0\n try:\n self._register_termination_handler()\n self._start_servers(tests_to_run)\n if self._options.watch:\n run_results = self._run_test_loop(tests_to_run, tests_to_skip)\n else:\n run_results = self._run_test_once(tests_to_run,\n tests_to_skip, should_retry_failures)\n initial_results, all_retry_results = run_results\n finally:\n _log.info('Finally stop servers and clean up')\n self._stop_servers()\n self._clean_up_run()\n if self._options.no_expectations:\n return test_run_results.RunDetails(0, [], [], initial_results,\n all_retry_results)\n self._printer.write_update('Looking for new crash logs ...')\n self._look_for_new_crash_logs(initial_results, start_time)\n for retry_attempt_results in all_retry_results:\n self._look_for_new_crash_logs(retry_attempt_results, start_time)\n self._printer.write_update('Summarizing results ...')\n summarized_full_results = test_run_results.summarize_results(self.\n _port, self._options, self._expectations, initial_results,\n all_retry_results)\n summarized_failing_results = test_run_results.summarize_results(self\n ._port, self._options, self._expectations, initial_results,\n all_retry_results, only_include_failing=True)\n run_histories = test_run_results.test_run_histories(self._options,\n self._expectations, initial_results, all_retry_results)\n exit_code = summarized_failing_results['num_regressions']\n if exit_code > exit_codes.MAX_FAILURES_EXIT_STATUS:\n _log.warning('num regressions (%d) exceeds max exit status (%d)',\n exit_code, exit_codes.MAX_FAILURES_EXIT_STATUS)\n exit_code = exit_codes.MAX_FAILURES_EXIT_STATUS\n if not self._options.dry_run:\n self._write_json_files(summarized_full_results,\n summarized_failing_results, initial_results,\n running_all_tests, run_histories)\n self._copy_results_html_file(self._artifacts_directory,\n 'results.html')\n if (initial_results.interrupt_reason is test_run_results.\n InterruptReason.EXTERNAL_SIGNAL):\n exit_code = exit_codes.INTERRUPTED_EXIT_STATUS\n else:\n if initial_results.interrupted:\n exit_code = exit_codes.EARLY_EXIT_STATUS\n if self._options.show_results and (exit_code or\n initial_results.total_failures):\n self._port.show_results_html_file(self._filesystem.join\n (self._artifacts_directory, 'results.html'))\n self._printer.print_results(time.time() - start_time,\n initial_results)\n return test_run_results.RunDetails(exit_code,\n summarized_full_results, summarized_failing_results,\n initial_results, all_retry_results)\n <mask token>\n <mask token>\n\n def _run_test_loop(self, tests_to_run, tests_to_skip):\n self._options.show_results = False\n while True:\n initial_results, all_retry_results = self._run_test_once(\n tests_to_run, tests_to_skip, should_retry_failures=False)\n for name in initial_results.failures_by_name:\n failure = initial_results.failures_by_name[name][0]\n if isinstance(failure, test_failures.FailureTextMismatch):\n full_test_path = self._filesystem.join(self.\n _artifacts_directory, name)\n filename, _ = self._filesystem.splitext(full_test_path)\n pretty_diff_path = ('file://' + filename +\n '-pretty-diff.html')\n self._printer.writeln('Link to pretty diff:')\n self._printer.writeln(pretty_diff_path + '\\n')\n self._printer.writeln('Finished running tests')\n user_input = self._port.host.user.prompt(\n 'Interactive watch mode: (q)uit (r)etry\\n').lower()\n if user_input == 'q' or user_input == 'quit':\n return initial_results, all_retry_results\n\n def _run_test_once(self, tests_to_run, tests_to_skip, should_retry_failures\n ):\n num_workers = int(self._port.num_workers(int(self._options.\n child_processes)))\n initial_results = self._run_tests(tests_to_run, tests_to_skip, self\n ._options.repeat_each, self._options.iterations, num_workers)\n should_retry_failures = (should_retry_failures and not\n initial_results.interrupted)\n tests_to_retry = self._tests_to_retry(initial_results)\n all_retry_results = []\n if should_retry_failures and tests_to_retry:\n for retry_attempt in range(1, self._options.num_retries + 1):\n if not tests_to_retry:\n break\n _log.info('')\n _log.info('Retrying %s, attempt %d of %d...', grammar.\n pluralize('unexpected failure', len(tests_to_retry)),\n retry_attempt, self._options.num_retries)\n retry_results = self._run_tests(tests_to_retry,\n tests_to_skip=set(), repeat_each=1, iterations=1,\n num_workers=num_workers, retry_attempt=retry_attempt)\n all_retry_results.append(retry_results)\n tests_to_retry = self._tests_to_retry(retry_results)\n return initial_results, all_retry_results\n\n def _restore_order(self, paths, test_names):\n original_test_names = list(test_names)\n test_names = []\n for path in paths:\n for test in original_test_names:\n if test.startswith(path) or fnmatch.fnmatch(test, path):\n test_names.append(test)\n test_names += list(set(original_test_names) - set(test_names))\n return test_names\n <mask token>\n\n def _is_http_test(self, test):\n return (test.startswith(self.HTTP_SUBDIR + self._port.\n TEST_PATH_SEPARATOR) or self._is_websocket_test(test) or self.\n _port.TEST_PATH_SEPARATOR + self.HTTP_SUBDIR + self._port.\n TEST_PATH_SEPARATOR in test)\n\n def _is_websocket_test(self, test):\n if self._port.should_use_wptserve(test):\n return False\n return self.WEBSOCKET_SUBDIR + self._port.TEST_PATH_SEPARATOR in test\n\n def _http_tests(self, test_names):\n return set(test for test in test_names if self._is_http_test(test))\n\n def _is_perf_test(self, test):\n return (self.PERF_SUBDIR == test or self.PERF_SUBDIR + self._port.\n TEST_PATH_SEPARATOR in test)\n\n def _prepare_lists(self, paths, test_names):\n tests_to_skip = self._finder.skip_tests(paths, test_names, self.\n _expectations)\n tests_to_run = [test for test in test_names if test not in\n tests_to_skip]\n return tests_to_run, tests_to_skip\n\n def _test_input_for_file(self, test_file, retry_attempt):\n return TestInput(test_file, self._options.slow_timeout_ms if self.\n _test_is_slow(test_file) else self._options.timeout_ms, self.\n _test_requires_lock(test_file), retry_attempt=retry_attempt)\n\n def _test_requires_lock(self, test_file):\n \"\"\"Returns True if the test needs to be locked when running multiple\n instances of this test runner.\n\n Perf tests are locked because heavy load caused by running other\n tests in parallel might cause some of them to time out.\n \"\"\"\n return self._is_perf_test(test_file)\n\n def _test_is_slow(self, test_file):\n if not self._expectations:\n return False\n is_slow_test = self._expectations.get_expectations(test_file\n ).is_slow_test\n return is_slow_test or self._port.is_slow_wpt_test(test_file)\n\n def _needs_servers(self, test_names):\n return any(self._is_http_test(test_name) for test_name in test_names)\n <mask token>\n\n def _run_tests(self, tests_to_run, tests_to_skip, repeat_each,\n iterations, num_workers, retry_attempt=0):\n test_inputs = []\n for _ in range(iterations):\n for test in tests_to_run:\n for _ in range(repeat_each):\n test_inputs.append(self._test_input_for_file(test,\n retry_attempt))\n return self._runner.run_tests(self._expectations, test_inputs,\n tests_to_skip, num_workers, retry_attempt)\n <mask token>\n <mask token>\n <mask token>\n\n def _look_for_new_crash_logs(self, run_results, start_time):\n \"\"\"Looks for and writes new crash logs, at the end of the test run.\n\n Since crash logs can take a long time to be written out if the system is\n under stress, do a second pass at the end of the test run.\n\n Args:\n run_results: The results of the test run.\n start_time: Time the tests started at. We're looking for crash\n logs after that time.\n \"\"\"\n crashed_processes = []\n test_to_crash_failure = {}\n test_failures.AbstractTestResultType.port = self._port\n test_failures.AbstractTestResultType.result_directory = (self.\n _results_directory)\n test_failures.AbstractTestResultType.filesystem = self._filesystem\n for test, result in run_results.unexpected_results_by_name.items():\n if result.type != ResultType.Crash:\n continue\n for failure in result.failures:\n if not isinstance(failure, test_failures.FailureCrash\n ) or failure.has_log:\n continue\n crashed_processes.append([test, failure.process_name,\n failure.pid])\n test_to_crash_failure[test] = failure\n sample_files = self._port.look_for_new_samples(crashed_processes,\n start_time) or {}\n for test, sample_file in sample_files.items():\n test_failures.AbstractTestResultType.test_name = test\n test_result = run_results.unexpected_results_by_name[test]\n artifact_relative_path = self._port.output_filename(test,\n test_failures.FILENAME_SUFFIX_SAMPLE, '.txt')\n artifacts_sub_dir = test_result.artifacts.ArtifactsSubDirectory()\n artifact_abspath = self._filesystem.join(self.\n _results_directory, artifacts_sub_dir, artifact_relative_path)\n self._filesystem.maybe_make_directory(self._filesystem.dirname(\n artifact_abspath))\n self._filesystem.copyfile(sample_file, artifact_abspath)\n test_result.artifacts.AddArtifact('sample_file', self.\n _filesystem.join(artifacts_sub_dir, artifact_relative_path))\n new_crash_logs = self._port.look_for_new_crash_logs(crashed_processes,\n start_time) or {}\n for test, (crash_log, crash_site) in new_crash_logs.items():\n test_failures.AbstractTestResultType.test_name = test\n failure.crash_log = crash_log\n failure.has_log = self._port.output_contains_sanitizer_messages(\n failure.crash_log)\n test_result = run_results.unexpected_results_by_name[test]\n test_result.crash_site = crash_site\n test_to_crash_failure[test].create_artifacts(test_result.\n artifacts, force_overwrite=True)\n <mask token>\n\n def _write_json_files(self, summarized_full_results,\n summarized_failing_results, initial_results, running_all_tests,\n run_histories):\n _log.debug('Writing JSON files in %s.', self._artifacts_directory)\n times_trie = json_results_generator.test_timings_trie(initial_results\n .results_by_name.values())\n times_json_path = self._filesystem.join(self._artifacts_directory,\n 'times_ms.json')\n json_results_generator.write_json(self._filesystem, times_trie,\n times_json_path)\n if running_all_tests:\n bot_test_times_path = self._port.bot_test_times_path()\n self._filesystem.maybe_make_directory(self._filesystem.dirname(\n bot_test_times_path))\n json_results_generator.write_json(self._filesystem, times_trie,\n bot_test_times_path)\n stats_trie = self._stats_trie(initial_results)\n stats_path = self._filesystem.join(self._artifacts_directory,\n 'stats.json')\n self._filesystem.write_text_file(stats_path, json.dumps(stats_trie))\n full_results_path = self._filesystem.join(self._artifacts_directory,\n 'full_results.json')\n json_results_generator.write_json(self._filesystem,\n summarized_full_results, full_results_path)\n full_results_jsonp_path = self._filesystem.join(self.\n _artifacts_directory, 'full_results_jsonp.js')\n json_results_generator.write_json(self._filesystem,\n summarized_full_results, full_results_jsonp_path, callback=\n 'ADD_FULL_RESULTS')\n failing_results_path = self._filesystem.join(self.\n _artifacts_directory, 'failing_results.json')\n json_results_generator.write_json(self._filesystem,\n summarized_failing_results, failing_results_path, callback=\n 'ADD_RESULTS')\n if self._options.json_test_results:\n json_results_generator.write_json(self._filesystem,\n summarized_full_results, self._options.json_test_results)\n if self._options.write_run_histories_to:\n json_results_generator.write_json(self._filesystem,\n run_histories, self._options.write_run_histories_to)\n _log.debug('Finished writing JSON files.')\n\n def _copy_results_html_file(self, destination_dir, filename):\n \"\"\"Copies a file from the template directory to the results directory.\"\"\"\n files_to_copy = [filename, filename + '.version']\n template_dir = self._path_finder.path_from_blink_tools('blinkpy',\n 'web_tests')\n for filename in files_to_copy:\n source_path = self._filesystem.join(template_dir, filename)\n destination_path = self._filesystem.join(destination_dir, filename)\n if self._filesystem.exists(source_path):\n self._filesystem.copyfile(source_path, destination_path)\n\n def _stats_trie(self, initial_results):\n\n def _worker_number(worker_name):\n return int(worker_name.split('/')[1]) if worker_name else -1\n stats = {}\n for result in initial_results.results_by_name.values():\n if result.type != ResultType.Skip:\n stats[result.test_name] = {'results': (_worker_number(\n result.worker_name), result.test_number, result.pid,\n int(result.test_run_time * 1000), int(result.\n total_run_time * 1000))}\n stats_trie = {}\n for name, value in stats.items():\n json_results_generator.add_path_to_trie(name, value, stats_trie)\n return stats_trie\n",
"step-3": "<mask token>\n\n\nclass Manager(object):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, port, options, printer):\n \"\"\"Initializes test runner data structures.\n\n Args:\n port: An object implementing platform-specific functionality.\n options: An options argument which contains command line options.\n printer: A Printer object to record updates to.\n \"\"\"\n self._port = port\n self._filesystem = port.host.filesystem\n self._options = options\n self._printer = printer\n self._expectations = None\n self._http_server_started = False\n self._wptserve_started = False\n self._websockets_server_started = False\n self._results_directory = self._port.results_directory()\n self._artifacts_directory = self._port.artifacts_directory()\n self._finder = WebTestFinder(self._port, self._options)\n self._path_finder = PathFinder(port.host.filesystem)\n self._sink = CreateTestResultSink(self._port)\n self._runner = WebTestRunner(self._options, self._port, self.\n _printer, self._results_directory, self._test_is_slow, self._sink)\n\n def run(self, args):\n \"\"\"Runs the tests and return a RunDetails object with the results.\"\"\"\n start_time = time.time()\n self._printer.write_update('Collecting tests ...')\n running_all_tests = False\n try:\n paths, all_test_names, running_all_tests = self._collect_tests(args\n )\n except IOError:\n return test_run_results.RunDetails(exit_code=exit_codes.\n NO_TESTS_EXIT_STATUS)\n test_names = self._finder.split_into_chunks(all_test_names)\n if self._options.order == 'natural':\n test_names.sort(key=self._port.test_key)\n elif self._options.order == 'random':\n test_names.sort()\n random.Random(self._options.seed).shuffle(test_names)\n elif self._options.order == 'none':\n if paths:\n test_names = self._restore_order(paths, test_names)\n if not self._options.no_expectations:\n self._printer.write_update('Parsing expectations ...')\n self._expectations = test_expectations.TestExpectations(self._port)\n tests_to_run, tests_to_skip = self._prepare_lists(paths, test_names)\n self._printer.print_found(len(all_test_names), len(test_names), len\n (tests_to_run), self._options.repeat_each, self._options.iterations\n )\n if not tests_to_run:\n msg = 'No tests to run.'\n if self._options.zero_tests_executed_ok:\n _log.info(msg)\n else:\n _log.critical(msg)\n code = exit_codes.NO_TESTS_EXIT_STATUS\n return test_run_results.RunDetails(exit_code=code)\n exit_code = self._set_up_run(tests_to_run)\n if exit_code:\n return test_run_results.RunDetails(exit_code=exit_code)\n if self._options.num_retries is None:\n if self._options.test_list or len(paths) < len(test_names):\n self._options.num_retries = 3\n else:\n self._options.num_retries = 0\n should_retry_failures = self._options.num_retries > 0\n try:\n self._register_termination_handler()\n self._start_servers(tests_to_run)\n if self._options.watch:\n run_results = self._run_test_loop(tests_to_run, tests_to_skip)\n else:\n run_results = self._run_test_once(tests_to_run,\n tests_to_skip, should_retry_failures)\n initial_results, all_retry_results = run_results\n finally:\n _log.info('Finally stop servers and clean up')\n self._stop_servers()\n self._clean_up_run()\n if self._options.no_expectations:\n return test_run_results.RunDetails(0, [], [], initial_results,\n all_retry_results)\n self._printer.write_update('Looking for new crash logs ...')\n self._look_for_new_crash_logs(initial_results, start_time)\n for retry_attempt_results in all_retry_results:\n self._look_for_new_crash_logs(retry_attempt_results, start_time)\n self._printer.write_update('Summarizing results ...')\n summarized_full_results = test_run_results.summarize_results(self.\n _port, self._options, self._expectations, initial_results,\n all_retry_results)\n summarized_failing_results = test_run_results.summarize_results(self\n ._port, self._options, self._expectations, initial_results,\n all_retry_results, only_include_failing=True)\n run_histories = test_run_results.test_run_histories(self._options,\n self._expectations, initial_results, all_retry_results)\n exit_code = summarized_failing_results['num_regressions']\n if exit_code > exit_codes.MAX_FAILURES_EXIT_STATUS:\n _log.warning('num regressions (%d) exceeds max exit status (%d)',\n exit_code, exit_codes.MAX_FAILURES_EXIT_STATUS)\n exit_code = exit_codes.MAX_FAILURES_EXIT_STATUS\n if not self._options.dry_run:\n self._write_json_files(summarized_full_results,\n summarized_failing_results, initial_results,\n running_all_tests, run_histories)\n self._copy_results_html_file(self._artifacts_directory,\n 'results.html')\n if (initial_results.interrupt_reason is test_run_results.\n InterruptReason.EXTERNAL_SIGNAL):\n exit_code = exit_codes.INTERRUPTED_EXIT_STATUS\n else:\n if initial_results.interrupted:\n exit_code = exit_codes.EARLY_EXIT_STATUS\n if self._options.show_results and (exit_code or\n initial_results.total_failures):\n self._port.show_results_html_file(self._filesystem.join\n (self._artifacts_directory, 'results.html'))\n self._printer.print_results(time.time() - start_time,\n initial_results)\n return test_run_results.RunDetails(exit_code,\n summarized_full_results, summarized_failing_results,\n initial_results, all_retry_results)\n <mask token>\n\n def _on_termination(self, signum, _frame):\n self._printer.write_update('Received signal \"%s\" (%d) in %d' % (\n signal.strsignal(signum), signum, os.getpid()))\n raise KeyboardInterrupt\n\n def _run_test_loop(self, tests_to_run, tests_to_skip):\n self._options.show_results = False\n while True:\n initial_results, all_retry_results = self._run_test_once(\n tests_to_run, tests_to_skip, should_retry_failures=False)\n for name in initial_results.failures_by_name:\n failure = initial_results.failures_by_name[name][0]\n if isinstance(failure, test_failures.FailureTextMismatch):\n full_test_path = self._filesystem.join(self.\n _artifacts_directory, name)\n filename, _ = self._filesystem.splitext(full_test_path)\n pretty_diff_path = ('file://' + filename +\n '-pretty-diff.html')\n self._printer.writeln('Link to pretty diff:')\n self._printer.writeln(pretty_diff_path + '\\n')\n self._printer.writeln('Finished running tests')\n user_input = self._port.host.user.prompt(\n 'Interactive watch mode: (q)uit (r)etry\\n').lower()\n if user_input == 'q' or user_input == 'quit':\n return initial_results, all_retry_results\n\n def _run_test_once(self, tests_to_run, tests_to_skip, should_retry_failures\n ):\n num_workers = int(self._port.num_workers(int(self._options.\n child_processes)))\n initial_results = self._run_tests(tests_to_run, tests_to_skip, self\n ._options.repeat_each, self._options.iterations, num_workers)\n should_retry_failures = (should_retry_failures and not\n initial_results.interrupted)\n tests_to_retry = self._tests_to_retry(initial_results)\n all_retry_results = []\n if should_retry_failures and tests_to_retry:\n for retry_attempt in range(1, self._options.num_retries + 1):\n if not tests_to_retry:\n break\n _log.info('')\n _log.info('Retrying %s, attempt %d of %d...', grammar.\n pluralize('unexpected failure', len(tests_to_retry)),\n retry_attempt, self._options.num_retries)\n retry_results = self._run_tests(tests_to_retry,\n tests_to_skip=set(), repeat_each=1, iterations=1,\n num_workers=num_workers, retry_attempt=retry_attempt)\n all_retry_results.append(retry_results)\n tests_to_retry = self._tests_to_retry(retry_results)\n return initial_results, all_retry_results\n\n def _restore_order(self, paths, test_names):\n original_test_names = list(test_names)\n test_names = []\n for path in paths:\n for test in original_test_names:\n if test.startswith(path) or fnmatch.fnmatch(test, path):\n test_names.append(test)\n test_names += list(set(original_test_names) - set(test_names))\n return test_names\n <mask token>\n\n def _is_http_test(self, test):\n return (test.startswith(self.HTTP_SUBDIR + self._port.\n TEST_PATH_SEPARATOR) or self._is_websocket_test(test) or self.\n _port.TEST_PATH_SEPARATOR + self.HTTP_SUBDIR + self._port.\n TEST_PATH_SEPARATOR in test)\n\n def _is_websocket_test(self, test):\n if self._port.should_use_wptserve(test):\n return False\n return self.WEBSOCKET_SUBDIR + self._port.TEST_PATH_SEPARATOR in test\n\n def _http_tests(self, test_names):\n return set(test for test in test_names if self._is_http_test(test))\n\n def _is_perf_test(self, test):\n return (self.PERF_SUBDIR == test or self.PERF_SUBDIR + self._port.\n TEST_PATH_SEPARATOR in test)\n\n def _prepare_lists(self, paths, test_names):\n tests_to_skip = self._finder.skip_tests(paths, test_names, self.\n _expectations)\n tests_to_run = [test for test in test_names if test not in\n tests_to_skip]\n return tests_to_run, tests_to_skip\n\n def _test_input_for_file(self, test_file, retry_attempt):\n return TestInput(test_file, self._options.slow_timeout_ms if self.\n _test_is_slow(test_file) else self._options.timeout_ms, self.\n _test_requires_lock(test_file), retry_attempt=retry_attempt)\n\n def _test_requires_lock(self, test_file):\n \"\"\"Returns True if the test needs to be locked when running multiple\n instances of this test runner.\n\n Perf tests are locked because heavy load caused by running other\n tests in parallel might cause some of them to time out.\n \"\"\"\n return self._is_perf_test(test_file)\n\n def _test_is_slow(self, test_file):\n if not self._expectations:\n return False\n is_slow_test = self._expectations.get_expectations(test_file\n ).is_slow_test\n return is_slow_test or self._port.is_slow_wpt_test(test_file)\n\n def _needs_servers(self, test_names):\n return any(self._is_http_test(test_name) for test_name in test_names)\n <mask token>\n\n def _run_tests(self, tests_to_run, tests_to_skip, repeat_each,\n iterations, num_workers, retry_attempt=0):\n test_inputs = []\n for _ in range(iterations):\n for test in tests_to_run:\n for _ in range(repeat_each):\n test_inputs.append(self._test_input_for_file(test,\n retry_attempt))\n return self._runner.run_tests(self._expectations, test_inputs,\n tests_to_skip, num_workers, retry_attempt)\n\n def _start_servers(self, tests_to_run):\n if any(self._port.is_wpt_test(test) for test in tests_to_run):\n self._printer.write_update('Starting WPTServe ...')\n self._port.start_wptserve()\n self._wptserve_started = True\n if self._port.requires_http_server() or any(self._is_http_test(test\n ) for test in tests_to_run):\n self._printer.write_update('Starting HTTP server ...')\n self._port.start_http_server(additional_dirs={},\n number_of_drivers=self._options.max_locked_shards)\n self._http_server_started = True\n if any(self._is_websocket_test(test) for test in tests_to_run):\n self._printer.write_update('Starting WebSocket server ...')\n self._port.start_websocket_server()\n self._websockets_server_started = True\n\n def _stop_servers(self):\n if self._wptserve_started:\n self._printer.write_update('Stopping WPTServe ...')\n self._wptserve_started = False\n self._port.stop_wptserve()\n if self._http_server_started:\n self._printer.write_update('Stopping HTTP server ...')\n self._http_server_started = False\n self._port.stop_http_server()\n if self._websockets_server_started:\n self._printer.write_update('Stopping WebSocket server ...')\n self._websockets_server_started = False\n self._port.stop_websocket_server()\n\n def _clean_up_run(self):\n _log.debug('Flushing stdout')\n sys.stdout.flush()\n _log.debug('Flushing stderr')\n sys.stderr.flush()\n _log.debug('Cleaning up port')\n self._port.clean_up_test_run()\n if self._sink:\n _log.debug('Closing sink')\n self._sink.close()\n\n def _look_for_new_crash_logs(self, run_results, start_time):\n \"\"\"Looks for and writes new crash logs, at the end of the test run.\n\n Since crash logs can take a long time to be written out if the system is\n under stress, do a second pass at the end of the test run.\n\n Args:\n run_results: The results of the test run.\n start_time: Time the tests started at. We're looking for crash\n logs after that time.\n \"\"\"\n crashed_processes = []\n test_to_crash_failure = {}\n test_failures.AbstractTestResultType.port = self._port\n test_failures.AbstractTestResultType.result_directory = (self.\n _results_directory)\n test_failures.AbstractTestResultType.filesystem = self._filesystem\n for test, result in run_results.unexpected_results_by_name.items():\n if result.type != ResultType.Crash:\n continue\n for failure in result.failures:\n if not isinstance(failure, test_failures.FailureCrash\n ) or failure.has_log:\n continue\n crashed_processes.append([test, failure.process_name,\n failure.pid])\n test_to_crash_failure[test] = failure\n sample_files = self._port.look_for_new_samples(crashed_processes,\n start_time) or {}\n for test, sample_file in sample_files.items():\n test_failures.AbstractTestResultType.test_name = test\n test_result = run_results.unexpected_results_by_name[test]\n artifact_relative_path = self._port.output_filename(test,\n test_failures.FILENAME_SUFFIX_SAMPLE, '.txt')\n artifacts_sub_dir = test_result.artifacts.ArtifactsSubDirectory()\n artifact_abspath = self._filesystem.join(self.\n _results_directory, artifacts_sub_dir, artifact_relative_path)\n self._filesystem.maybe_make_directory(self._filesystem.dirname(\n artifact_abspath))\n self._filesystem.copyfile(sample_file, artifact_abspath)\n test_result.artifacts.AddArtifact('sample_file', self.\n _filesystem.join(artifacts_sub_dir, artifact_relative_path))\n new_crash_logs = self._port.look_for_new_crash_logs(crashed_processes,\n start_time) or {}\n for test, (crash_log, crash_site) in new_crash_logs.items():\n test_failures.AbstractTestResultType.test_name = test\n failure.crash_log = crash_log\n failure.has_log = self._port.output_contains_sanitizer_messages(\n failure.crash_log)\n test_result = run_results.unexpected_results_by_name[test]\n test_result.crash_site = crash_site\n test_to_crash_failure[test].create_artifacts(test_result.\n artifacts, force_overwrite=True)\n\n def _tests_to_retry(self, run_results):\n return [result.test_name for result in run_results.\n unexpected_results_by_name.values() if result.type !=\n ResultType.Pass]\n\n def _write_json_files(self, summarized_full_results,\n summarized_failing_results, initial_results, running_all_tests,\n run_histories):\n _log.debug('Writing JSON files in %s.', self._artifacts_directory)\n times_trie = json_results_generator.test_timings_trie(initial_results\n .results_by_name.values())\n times_json_path = self._filesystem.join(self._artifacts_directory,\n 'times_ms.json')\n json_results_generator.write_json(self._filesystem, times_trie,\n times_json_path)\n if running_all_tests:\n bot_test_times_path = self._port.bot_test_times_path()\n self._filesystem.maybe_make_directory(self._filesystem.dirname(\n bot_test_times_path))\n json_results_generator.write_json(self._filesystem, times_trie,\n bot_test_times_path)\n stats_trie = self._stats_trie(initial_results)\n stats_path = self._filesystem.join(self._artifacts_directory,\n 'stats.json')\n self._filesystem.write_text_file(stats_path, json.dumps(stats_trie))\n full_results_path = self._filesystem.join(self._artifacts_directory,\n 'full_results.json')\n json_results_generator.write_json(self._filesystem,\n summarized_full_results, full_results_path)\n full_results_jsonp_path = self._filesystem.join(self.\n _artifacts_directory, 'full_results_jsonp.js')\n json_results_generator.write_json(self._filesystem,\n summarized_full_results, full_results_jsonp_path, callback=\n 'ADD_FULL_RESULTS')\n failing_results_path = self._filesystem.join(self.\n _artifacts_directory, 'failing_results.json')\n json_results_generator.write_json(self._filesystem,\n summarized_failing_results, failing_results_path, callback=\n 'ADD_RESULTS')\n if self._options.json_test_results:\n json_results_generator.write_json(self._filesystem,\n summarized_full_results, self._options.json_test_results)\n if self._options.write_run_histories_to:\n json_results_generator.write_json(self._filesystem,\n run_histories, self._options.write_run_histories_to)\n _log.debug('Finished writing JSON files.')\n\n def _copy_results_html_file(self, destination_dir, filename):\n \"\"\"Copies a file from the template directory to the results directory.\"\"\"\n files_to_copy = [filename, filename + '.version']\n template_dir = self._path_finder.path_from_blink_tools('blinkpy',\n 'web_tests')\n for filename in files_to_copy:\n source_path = self._filesystem.join(template_dir, filename)\n destination_path = self._filesystem.join(destination_dir, filename)\n if self._filesystem.exists(source_path):\n self._filesystem.copyfile(source_path, destination_path)\n\n def _stats_trie(self, initial_results):\n\n def _worker_number(worker_name):\n return int(worker_name.split('/')[1]) if worker_name else -1\n stats = {}\n for result in initial_results.results_by_name.values():\n if result.type != ResultType.Skip:\n stats[result.test_name] = {'results': (_worker_number(\n result.worker_name), result.test_number, result.pid,\n int(result.test_run_time * 1000), int(result.\n total_run_time * 1000))}\n stats_trie = {}\n for name, value in stats.items():\n json_results_generator.add_path_to_trie(name, value, stats_trie)\n return stats_trie\n",
"step-4": "<mask token>\n_log = logging.getLogger(__name__)\nTestExpectations = test_expectations.TestExpectations\n\n\nclass Manager(object):\n \"\"\"A class for managing running a series of web tests.\"\"\"\n HTTP_SUBDIR = 'http'\n PERF_SUBDIR = 'perf'\n WEBSOCKET_SUBDIR = 'websocket'\n ARCHIVED_RESULTS_LIMIT = 25\n\n def __init__(self, port, options, printer):\n \"\"\"Initializes test runner data structures.\n\n Args:\n port: An object implementing platform-specific functionality.\n options: An options argument which contains command line options.\n printer: A Printer object to record updates to.\n \"\"\"\n self._port = port\n self._filesystem = port.host.filesystem\n self._options = options\n self._printer = printer\n self._expectations = None\n self._http_server_started = False\n self._wptserve_started = False\n self._websockets_server_started = False\n self._results_directory = self._port.results_directory()\n self._artifacts_directory = self._port.artifacts_directory()\n self._finder = WebTestFinder(self._port, self._options)\n self._path_finder = PathFinder(port.host.filesystem)\n self._sink = CreateTestResultSink(self._port)\n self._runner = WebTestRunner(self._options, self._port, self.\n _printer, self._results_directory, self._test_is_slow, self._sink)\n\n def run(self, args):\n \"\"\"Runs the tests and return a RunDetails object with the results.\"\"\"\n start_time = time.time()\n self._printer.write_update('Collecting tests ...')\n running_all_tests = False\n try:\n paths, all_test_names, running_all_tests = self._collect_tests(args\n )\n except IOError:\n return test_run_results.RunDetails(exit_code=exit_codes.\n NO_TESTS_EXIT_STATUS)\n test_names = self._finder.split_into_chunks(all_test_names)\n if self._options.order == 'natural':\n test_names.sort(key=self._port.test_key)\n elif self._options.order == 'random':\n test_names.sort()\n random.Random(self._options.seed).shuffle(test_names)\n elif self._options.order == 'none':\n if paths:\n test_names = self._restore_order(paths, test_names)\n if not self._options.no_expectations:\n self._printer.write_update('Parsing expectations ...')\n self._expectations = test_expectations.TestExpectations(self._port)\n tests_to_run, tests_to_skip = self._prepare_lists(paths, test_names)\n self._printer.print_found(len(all_test_names), len(test_names), len\n (tests_to_run), self._options.repeat_each, self._options.iterations\n )\n if not tests_to_run:\n msg = 'No tests to run.'\n if self._options.zero_tests_executed_ok:\n _log.info(msg)\n else:\n _log.critical(msg)\n code = exit_codes.NO_TESTS_EXIT_STATUS\n return test_run_results.RunDetails(exit_code=code)\n exit_code = self._set_up_run(tests_to_run)\n if exit_code:\n return test_run_results.RunDetails(exit_code=exit_code)\n if self._options.num_retries is None:\n if self._options.test_list or len(paths) < len(test_names):\n self._options.num_retries = 3\n else:\n self._options.num_retries = 0\n should_retry_failures = self._options.num_retries > 0\n try:\n self._register_termination_handler()\n self._start_servers(tests_to_run)\n if self._options.watch:\n run_results = self._run_test_loop(tests_to_run, tests_to_skip)\n else:\n run_results = self._run_test_once(tests_to_run,\n tests_to_skip, should_retry_failures)\n initial_results, all_retry_results = run_results\n finally:\n _log.info('Finally stop servers and clean up')\n self._stop_servers()\n self._clean_up_run()\n if self._options.no_expectations:\n return test_run_results.RunDetails(0, [], [], initial_results,\n all_retry_results)\n self._printer.write_update('Looking for new crash logs ...')\n self._look_for_new_crash_logs(initial_results, start_time)\n for retry_attempt_results in all_retry_results:\n self._look_for_new_crash_logs(retry_attempt_results, start_time)\n self._printer.write_update('Summarizing results ...')\n summarized_full_results = test_run_results.summarize_results(self.\n _port, self._options, self._expectations, initial_results,\n all_retry_results)\n summarized_failing_results = test_run_results.summarize_results(self\n ._port, self._options, self._expectations, initial_results,\n all_retry_results, only_include_failing=True)\n run_histories = test_run_results.test_run_histories(self._options,\n self._expectations, initial_results, all_retry_results)\n exit_code = summarized_failing_results['num_regressions']\n if exit_code > exit_codes.MAX_FAILURES_EXIT_STATUS:\n _log.warning('num regressions (%d) exceeds max exit status (%d)',\n exit_code, exit_codes.MAX_FAILURES_EXIT_STATUS)\n exit_code = exit_codes.MAX_FAILURES_EXIT_STATUS\n if not self._options.dry_run:\n self._write_json_files(summarized_full_results,\n summarized_failing_results, initial_results,\n running_all_tests, run_histories)\n self._copy_results_html_file(self._artifacts_directory,\n 'results.html')\n if (initial_results.interrupt_reason is test_run_results.\n InterruptReason.EXTERNAL_SIGNAL):\n exit_code = exit_codes.INTERRUPTED_EXIT_STATUS\n else:\n if initial_results.interrupted:\n exit_code = exit_codes.EARLY_EXIT_STATUS\n if self._options.show_results and (exit_code or\n initial_results.total_failures):\n self._port.show_results_html_file(self._filesystem.join\n (self._artifacts_directory, 'results.html'))\n self._printer.print_results(time.time() - start_time,\n initial_results)\n return test_run_results.RunDetails(exit_code,\n summarized_full_results, summarized_failing_results,\n initial_results, all_retry_results)\n\n def _register_termination_handler(self):\n if self._port.host.platform.is_win():\n signum = signal.SIGBREAK\n else:\n signum = signal.SIGTERM\n signal.signal(signum, self._on_termination)\n\n def _on_termination(self, signum, _frame):\n self._printer.write_update('Received signal \"%s\" (%d) in %d' % (\n signal.strsignal(signum), signum, os.getpid()))\n raise KeyboardInterrupt\n\n def _run_test_loop(self, tests_to_run, tests_to_skip):\n self._options.show_results = False\n while True:\n initial_results, all_retry_results = self._run_test_once(\n tests_to_run, tests_to_skip, should_retry_failures=False)\n for name in initial_results.failures_by_name:\n failure = initial_results.failures_by_name[name][0]\n if isinstance(failure, test_failures.FailureTextMismatch):\n full_test_path = self._filesystem.join(self.\n _artifacts_directory, name)\n filename, _ = self._filesystem.splitext(full_test_path)\n pretty_diff_path = ('file://' + filename +\n '-pretty-diff.html')\n self._printer.writeln('Link to pretty diff:')\n self._printer.writeln(pretty_diff_path + '\\n')\n self._printer.writeln('Finished running tests')\n user_input = self._port.host.user.prompt(\n 'Interactive watch mode: (q)uit (r)etry\\n').lower()\n if user_input == 'q' or user_input == 'quit':\n return initial_results, all_retry_results\n\n def _run_test_once(self, tests_to_run, tests_to_skip, should_retry_failures\n ):\n num_workers = int(self._port.num_workers(int(self._options.\n child_processes)))\n initial_results = self._run_tests(tests_to_run, tests_to_skip, self\n ._options.repeat_each, self._options.iterations, num_workers)\n should_retry_failures = (should_retry_failures and not\n initial_results.interrupted)\n tests_to_retry = self._tests_to_retry(initial_results)\n all_retry_results = []\n if should_retry_failures and tests_to_retry:\n for retry_attempt in range(1, self._options.num_retries + 1):\n if not tests_to_retry:\n break\n _log.info('')\n _log.info('Retrying %s, attempt %d of %d...', grammar.\n pluralize('unexpected failure', len(tests_to_retry)),\n retry_attempt, self._options.num_retries)\n retry_results = self._run_tests(tests_to_retry,\n tests_to_skip=set(), repeat_each=1, iterations=1,\n num_workers=num_workers, retry_attempt=retry_attempt)\n all_retry_results.append(retry_results)\n tests_to_retry = self._tests_to_retry(retry_results)\n return initial_results, all_retry_results\n\n def _restore_order(self, paths, test_names):\n original_test_names = list(test_names)\n test_names = []\n for path in paths:\n for test in original_test_names:\n if test.startswith(path) or fnmatch.fnmatch(test, path):\n test_names.append(test)\n test_names += list(set(original_test_names) - set(test_names))\n return test_names\n\n def _collect_tests(self, args):\n return self._finder.find_tests(args, test_lists=self._options.\n test_list, filter_files=self._options.\n isolated_script_test_filter_file, fastest_percentile=self.\n _options.fastest, filters=self._options.isolated_script_test_filter\n )\n\n def _is_http_test(self, test):\n return (test.startswith(self.HTTP_SUBDIR + self._port.\n TEST_PATH_SEPARATOR) or self._is_websocket_test(test) or self.\n _port.TEST_PATH_SEPARATOR + self.HTTP_SUBDIR + self._port.\n TEST_PATH_SEPARATOR in test)\n\n def _is_websocket_test(self, test):\n if self._port.should_use_wptserve(test):\n return False\n return self.WEBSOCKET_SUBDIR + self._port.TEST_PATH_SEPARATOR in test\n\n def _http_tests(self, test_names):\n return set(test for test in test_names if self._is_http_test(test))\n\n def _is_perf_test(self, test):\n return (self.PERF_SUBDIR == test or self.PERF_SUBDIR + self._port.\n TEST_PATH_SEPARATOR in test)\n\n def _prepare_lists(self, paths, test_names):\n tests_to_skip = self._finder.skip_tests(paths, test_names, self.\n _expectations)\n tests_to_run = [test for test in test_names if test not in\n tests_to_skip]\n return tests_to_run, tests_to_skip\n\n def _test_input_for_file(self, test_file, retry_attempt):\n return TestInput(test_file, self._options.slow_timeout_ms if self.\n _test_is_slow(test_file) else self._options.timeout_ms, self.\n _test_requires_lock(test_file), retry_attempt=retry_attempt)\n\n def _test_requires_lock(self, test_file):\n \"\"\"Returns True if the test needs to be locked when running multiple\n instances of this test runner.\n\n Perf tests are locked because heavy load caused by running other\n tests in parallel might cause some of them to time out.\n \"\"\"\n return self._is_perf_test(test_file)\n\n def _test_is_slow(self, test_file):\n if not self._expectations:\n return False\n is_slow_test = self._expectations.get_expectations(test_file\n ).is_slow_test\n return is_slow_test or self._port.is_slow_wpt_test(test_file)\n\n def _needs_servers(self, test_names):\n return any(self._is_http_test(test_name) for test_name in test_names)\n\n def _set_up_run(self, test_names):\n self._printer.write_update('Checking build ...')\n if self._options.build:\n exit_code = self._port.check_build(self._needs_servers(\n test_names), self._printer)\n if exit_code:\n _log.error('Build check failed')\n return exit_code\n if self._options.clobber_old_results:\n self._port.clobber_old_results()\n elif self._filesystem.exists(self._artifacts_directory):\n self._port.limit_archived_results_count()\n self._port.rename_results_folder()\n self._port.host.filesystem.maybe_make_directory(self.\n _artifacts_directory)\n exit_code = self._port.setup_test_run()\n if exit_code:\n _log.error('Build setup failed')\n return exit_code\n if not self._options.nocheck_sys_deps:\n self._printer.write_update('Checking system dependencies ...')\n exit_code = self._port.check_sys_deps()\n if exit_code:\n return exit_code\n return exit_codes.OK_EXIT_STATUS\n\n def _run_tests(self, tests_to_run, tests_to_skip, repeat_each,\n iterations, num_workers, retry_attempt=0):\n test_inputs = []\n for _ in range(iterations):\n for test in tests_to_run:\n for _ in range(repeat_each):\n test_inputs.append(self._test_input_for_file(test,\n retry_attempt))\n return self._runner.run_tests(self._expectations, test_inputs,\n tests_to_skip, num_workers, retry_attempt)\n\n def _start_servers(self, tests_to_run):\n if any(self._port.is_wpt_test(test) for test in tests_to_run):\n self._printer.write_update('Starting WPTServe ...')\n self._port.start_wptserve()\n self._wptserve_started = True\n if self._port.requires_http_server() or any(self._is_http_test(test\n ) for test in tests_to_run):\n self._printer.write_update('Starting HTTP server ...')\n self._port.start_http_server(additional_dirs={},\n number_of_drivers=self._options.max_locked_shards)\n self._http_server_started = True\n if any(self._is_websocket_test(test) for test in tests_to_run):\n self._printer.write_update('Starting WebSocket server ...')\n self._port.start_websocket_server()\n self._websockets_server_started = True\n\n def _stop_servers(self):\n if self._wptserve_started:\n self._printer.write_update('Stopping WPTServe ...')\n self._wptserve_started = False\n self._port.stop_wptserve()\n if self._http_server_started:\n self._printer.write_update('Stopping HTTP server ...')\n self._http_server_started = False\n self._port.stop_http_server()\n if self._websockets_server_started:\n self._printer.write_update('Stopping WebSocket server ...')\n self._websockets_server_started = False\n self._port.stop_websocket_server()\n\n def _clean_up_run(self):\n _log.debug('Flushing stdout')\n sys.stdout.flush()\n _log.debug('Flushing stderr')\n sys.stderr.flush()\n _log.debug('Cleaning up port')\n self._port.clean_up_test_run()\n if self._sink:\n _log.debug('Closing sink')\n self._sink.close()\n\n def _look_for_new_crash_logs(self, run_results, start_time):\n \"\"\"Looks for and writes new crash logs, at the end of the test run.\n\n Since crash logs can take a long time to be written out if the system is\n under stress, do a second pass at the end of the test run.\n\n Args:\n run_results: The results of the test run.\n start_time: Time the tests started at. We're looking for crash\n logs after that time.\n \"\"\"\n crashed_processes = []\n test_to_crash_failure = {}\n test_failures.AbstractTestResultType.port = self._port\n test_failures.AbstractTestResultType.result_directory = (self.\n _results_directory)\n test_failures.AbstractTestResultType.filesystem = self._filesystem\n for test, result in run_results.unexpected_results_by_name.items():\n if result.type != ResultType.Crash:\n continue\n for failure in result.failures:\n if not isinstance(failure, test_failures.FailureCrash\n ) or failure.has_log:\n continue\n crashed_processes.append([test, failure.process_name,\n failure.pid])\n test_to_crash_failure[test] = failure\n sample_files = self._port.look_for_new_samples(crashed_processes,\n start_time) or {}\n for test, sample_file in sample_files.items():\n test_failures.AbstractTestResultType.test_name = test\n test_result = run_results.unexpected_results_by_name[test]\n artifact_relative_path = self._port.output_filename(test,\n test_failures.FILENAME_SUFFIX_SAMPLE, '.txt')\n artifacts_sub_dir = test_result.artifacts.ArtifactsSubDirectory()\n artifact_abspath = self._filesystem.join(self.\n _results_directory, artifacts_sub_dir, artifact_relative_path)\n self._filesystem.maybe_make_directory(self._filesystem.dirname(\n artifact_abspath))\n self._filesystem.copyfile(sample_file, artifact_abspath)\n test_result.artifacts.AddArtifact('sample_file', self.\n _filesystem.join(artifacts_sub_dir, artifact_relative_path))\n new_crash_logs = self._port.look_for_new_crash_logs(crashed_processes,\n start_time) or {}\n for test, (crash_log, crash_site) in new_crash_logs.items():\n test_failures.AbstractTestResultType.test_name = test\n failure.crash_log = crash_log\n failure.has_log = self._port.output_contains_sanitizer_messages(\n failure.crash_log)\n test_result = run_results.unexpected_results_by_name[test]\n test_result.crash_site = crash_site\n test_to_crash_failure[test].create_artifacts(test_result.\n artifacts, force_overwrite=True)\n\n def _tests_to_retry(self, run_results):\n return [result.test_name for result in run_results.\n unexpected_results_by_name.values() if result.type !=\n ResultType.Pass]\n\n def _write_json_files(self, summarized_full_results,\n summarized_failing_results, initial_results, running_all_tests,\n run_histories):\n _log.debug('Writing JSON files in %s.', self._artifacts_directory)\n times_trie = json_results_generator.test_timings_trie(initial_results\n .results_by_name.values())\n times_json_path = self._filesystem.join(self._artifacts_directory,\n 'times_ms.json')\n json_results_generator.write_json(self._filesystem, times_trie,\n times_json_path)\n if running_all_tests:\n bot_test_times_path = self._port.bot_test_times_path()\n self._filesystem.maybe_make_directory(self._filesystem.dirname(\n bot_test_times_path))\n json_results_generator.write_json(self._filesystem, times_trie,\n bot_test_times_path)\n stats_trie = self._stats_trie(initial_results)\n stats_path = self._filesystem.join(self._artifacts_directory,\n 'stats.json')\n self._filesystem.write_text_file(stats_path, json.dumps(stats_trie))\n full_results_path = self._filesystem.join(self._artifacts_directory,\n 'full_results.json')\n json_results_generator.write_json(self._filesystem,\n summarized_full_results, full_results_path)\n full_results_jsonp_path = self._filesystem.join(self.\n _artifacts_directory, 'full_results_jsonp.js')\n json_results_generator.write_json(self._filesystem,\n summarized_full_results, full_results_jsonp_path, callback=\n 'ADD_FULL_RESULTS')\n failing_results_path = self._filesystem.join(self.\n _artifacts_directory, 'failing_results.json')\n json_results_generator.write_json(self._filesystem,\n summarized_failing_results, failing_results_path, callback=\n 'ADD_RESULTS')\n if self._options.json_test_results:\n json_results_generator.write_json(self._filesystem,\n summarized_full_results, self._options.json_test_results)\n if self._options.write_run_histories_to:\n json_results_generator.write_json(self._filesystem,\n run_histories, self._options.write_run_histories_to)\n _log.debug('Finished writing JSON files.')\n\n def _copy_results_html_file(self, destination_dir, filename):\n \"\"\"Copies a file from the template directory to the results directory.\"\"\"\n files_to_copy = [filename, filename + '.version']\n template_dir = self._path_finder.path_from_blink_tools('blinkpy',\n 'web_tests')\n for filename in files_to_copy:\n source_path = self._filesystem.join(template_dir, filename)\n destination_path = self._filesystem.join(destination_dir, filename)\n if self._filesystem.exists(source_path):\n self._filesystem.copyfile(source_path, destination_path)\n\n def _stats_trie(self, initial_results):\n\n def _worker_number(worker_name):\n return int(worker_name.split('/')[1]) if worker_name else -1\n stats = {}\n for result in initial_results.results_by_name.values():\n if result.type != ResultType.Skip:\n stats[result.test_name] = {'results': (_worker_number(\n result.worker_name), result.test_number, result.pid,\n int(result.test_run_time * 1000), int(result.\n total_run_time * 1000))}\n stats_trie = {}\n for name, value in stats.items():\n json_results_generator.add_path_to_trie(name, value, stats_trie)\n return stats_trie\n",
"step-5": "# Copyright (C) 2010 Google Inc. All rights reserved.\n# Copyright (C) 2010 Gabor Rapcsanyi (rgabor@inf.u-szeged.hu), University of Szeged\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n# * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n# * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following disclaimer\n# in the documentation and/or other materials provided with the\n# distribution.\n# * Neither the name of Google Inc. nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\"\"\"The Manager orchestrates the overall process of running web tests.\n\nThis includes finding tests to run, reading the test expectations,\nstarting the required helper servers, deciding the order and way to\nrun the tests, retrying failed tests, and collecting the test results,\nincluding crash logs and mismatches with expectations.\n\nThe Manager object has a constructor and one main method called run.\n\"\"\"\n\nimport fnmatch\nimport json\nimport logging\nimport os\nimport random\nimport signal\nimport sys\nimport time\n\nfrom blinkpy.common import exit_codes\nfrom blinkpy.common.path_finder import PathFinder\nfrom blinkpy.tool import grammar\nfrom blinkpy.web_tests.controllers.test_result_sink import CreateTestResultSink\nfrom blinkpy.web_tests.controllers.web_test_finder import WebTestFinder\nfrom blinkpy.web_tests.controllers.web_test_runner import WebTestRunner\nfrom blinkpy.web_tests.layout_package import json_results_generator\nfrom blinkpy.web_tests.models import test_expectations\nfrom blinkpy.web_tests.models import test_failures\nfrom blinkpy.web_tests.models import test_run_results\nfrom blinkpy.web_tests.models.typ_types import ResultType\nfrom blinkpy.web_tests.models.test_input import TestInput\n\n_log = logging.getLogger(__name__)\n\nTestExpectations = test_expectations.TestExpectations\n\n\nclass Manager(object):\n \"\"\"A class for managing running a series of web tests.\"\"\"\n\n HTTP_SUBDIR = 'http'\n PERF_SUBDIR = 'perf'\n WEBSOCKET_SUBDIR = 'websocket'\n ARCHIVED_RESULTS_LIMIT = 25\n\n def __init__(self, port, options, printer):\n \"\"\"Initializes test runner data structures.\n\n Args:\n port: An object implementing platform-specific functionality.\n options: An options argument which contains command line options.\n printer: A Printer object to record updates to.\n \"\"\"\n self._port = port\n self._filesystem = port.host.filesystem\n self._options = options\n self._printer = printer\n\n self._expectations = None\n self._http_server_started = False\n self._wptserve_started = False\n self._websockets_server_started = False\n\n self._results_directory = self._port.results_directory()\n self._artifacts_directory = self._port.artifacts_directory()\n self._finder = WebTestFinder(self._port, self._options)\n self._path_finder = PathFinder(port.host.filesystem)\n\n self._sink = CreateTestResultSink(self._port)\n self._runner = WebTestRunner(self._options, self._port, self._printer,\n self._results_directory,\n self._test_is_slow, self._sink)\n\n def run(self, args):\n \"\"\"Runs the tests and return a RunDetails object with the results.\"\"\"\n start_time = time.time()\n self._printer.write_update('Collecting tests ...')\n running_all_tests = False\n\n try:\n paths, all_test_names, running_all_tests = self._collect_tests(\n args)\n except IOError:\n # This is raised if --test-list doesn't exist\n return test_run_results.RunDetails(\n exit_code=exit_codes.NO_TESTS_EXIT_STATUS)\n\n test_names = self._finder.split_into_chunks(all_test_names)\n if self._options.order == 'natural':\n test_names.sort(key=self._port.test_key)\n elif self._options.order == 'random':\n test_names.sort()\n random.Random(self._options.seed).shuffle(test_names)\n elif self._options.order == 'none':\n # Restore the test order to user specified order.\n # base.tests() may change the order as it returns tests in the\n # real, external/wpt, virtual order.\n if paths:\n test_names = self._restore_order(paths, test_names)\n\n if not self._options.no_expectations:\n self._printer.write_update('Parsing expectations ...')\n self._expectations = test_expectations.TestExpectations(self._port)\n\n tests_to_run, tests_to_skip = self._prepare_lists(paths, test_names)\n\n self._printer.print_found(\n len(all_test_names), len(test_names), len(tests_to_run),\n self._options.repeat_each, self._options.iterations)\n\n # Check to make sure we're not skipping every test.\n if not tests_to_run:\n msg = 'No tests to run.'\n if self._options.zero_tests_executed_ok:\n _log.info(msg)\n # Keep executing to produce valid (but empty) results.\n else:\n _log.critical(msg)\n code = exit_codes.NO_TESTS_EXIT_STATUS\n return test_run_results.RunDetails(exit_code=code)\n\n exit_code = self._set_up_run(tests_to_run)\n if exit_code:\n return test_run_results.RunDetails(exit_code=exit_code)\n\n if self._options.num_retries is None:\n # If --test-list is passed, or if no test narrowing is specified,\n # default to 3 retries. Otherwise [e.g. if tests are being passed by\n # name], default to 0 retries.\n if self._options.test_list or len(paths) < len(test_names):\n self._options.num_retries = 3\n else:\n self._options.num_retries = 0\n\n should_retry_failures = self._options.num_retries > 0\n\n try:\n self._register_termination_handler()\n self._start_servers(tests_to_run)\n if self._options.watch:\n run_results = self._run_test_loop(tests_to_run, tests_to_skip)\n else:\n run_results = self._run_test_once(tests_to_run, tests_to_skip,\n should_retry_failures)\n initial_results, all_retry_results = run_results\n finally:\n _log.info(\"Finally stop servers and clean up\")\n self._stop_servers()\n self._clean_up_run()\n\n if self._options.no_expectations:\n return test_run_results.RunDetails(0, [], [], initial_results,\n all_retry_results)\n\n # Some crash logs can take a long time to be written out so look\n # for new logs after the test run finishes.\n self._printer.write_update('Looking for new crash logs ...')\n self._look_for_new_crash_logs(initial_results, start_time)\n for retry_attempt_results in all_retry_results:\n self._look_for_new_crash_logs(retry_attempt_results, start_time)\n\n self._printer.write_update('Summarizing results ...')\n summarized_full_results = test_run_results.summarize_results(\n self._port, self._options, self._expectations, initial_results,\n all_retry_results)\n summarized_failing_results = test_run_results.summarize_results(\n self._port,\n self._options,\n self._expectations,\n initial_results,\n all_retry_results,\n only_include_failing=True)\n run_histories = test_run_results.test_run_histories(\n self._options, self._expectations, initial_results,\n all_retry_results)\n\n exit_code = summarized_failing_results['num_regressions']\n if exit_code > exit_codes.MAX_FAILURES_EXIT_STATUS:\n _log.warning('num regressions (%d) exceeds max exit status (%d)',\n exit_code, exit_codes.MAX_FAILURES_EXIT_STATUS)\n exit_code = exit_codes.MAX_FAILURES_EXIT_STATUS\n\n if not self._options.dry_run:\n self._write_json_files(summarized_full_results,\n summarized_failing_results, initial_results,\n running_all_tests, run_histories)\n\n self._copy_results_html_file(self._artifacts_directory,\n 'results.html')\n if (initial_results.interrupt_reason is\n test_run_results.InterruptReason.EXTERNAL_SIGNAL):\n exit_code = exit_codes.INTERRUPTED_EXIT_STATUS\n else:\n if initial_results.interrupted:\n exit_code = exit_codes.EARLY_EXIT_STATUS\n if (self._options.show_results\n and (exit_code or initial_results.total_failures)):\n self._port.show_results_html_file(\n self._filesystem.join(self._artifacts_directory,\n 'results.html'))\n self._printer.print_results(time.time() - start_time,\n initial_results)\n\n return test_run_results.RunDetails(exit_code, summarized_full_results,\n summarized_failing_results,\n initial_results, all_retry_results)\n\n def _register_termination_handler(self):\n if self._port.host.platform.is_win():\n signum = signal.SIGBREAK\n else:\n signum = signal.SIGTERM\n signal.signal(signum, self._on_termination)\n\n def _on_termination(self, signum, _frame):\n self._printer.write_update(\n 'Received signal \"%s\" (%d) in %d' %\n (signal.strsignal(signum), signum, os.getpid()))\n raise KeyboardInterrupt\n\n def _run_test_loop(self, tests_to_run, tests_to_skip):\n # Don't show results in a new browser window because we're already\n # printing the link to diffs in the loop\n self._options.show_results = False\n\n while True:\n initial_results, all_retry_results = self._run_test_once(\n tests_to_run, tests_to_skip, should_retry_failures=False)\n for name in initial_results.failures_by_name:\n failure = initial_results.failures_by_name[name][0]\n if isinstance(failure, test_failures.FailureTextMismatch):\n full_test_path = self._filesystem.join(\n self._artifacts_directory, name)\n filename, _ = self._filesystem.splitext(full_test_path)\n pretty_diff_path = 'file://' + filename + '-pretty-diff.html'\n self._printer.writeln('Link to pretty diff:')\n self._printer.writeln(pretty_diff_path + '\\n')\n self._printer.writeln('Finished running tests')\n\n user_input = self._port.host.user.prompt(\n 'Interactive watch mode: (q)uit (r)etry\\n').lower()\n\n if user_input == 'q' or user_input == 'quit':\n return (initial_results, all_retry_results)\n\n def _run_test_once(self, tests_to_run, tests_to_skip,\n should_retry_failures):\n num_workers = int(\n self._port.num_workers(int(self._options.child_processes)))\n\n initial_results = self._run_tests(\n tests_to_run, tests_to_skip, self._options.repeat_each,\n self._options.iterations, num_workers)\n\n # Don't retry failures when interrupted by user or failures limit exception.\n should_retry_failures = (should_retry_failures\n and not initial_results.interrupted)\n\n tests_to_retry = self._tests_to_retry(initial_results)\n all_retry_results = []\n if should_retry_failures and tests_to_retry:\n for retry_attempt in range(1, self._options.num_retries + 1):\n if not tests_to_retry:\n break\n\n _log.info('')\n _log.info(\n 'Retrying %s, attempt %d of %d...',\n grammar.pluralize('unexpected failure',\n len(tests_to_retry)), retry_attempt,\n self._options.num_retries)\n\n retry_results = self._run_tests(\n tests_to_retry,\n tests_to_skip=set(),\n repeat_each=1,\n iterations=1,\n num_workers=num_workers,\n retry_attempt=retry_attempt)\n all_retry_results.append(retry_results)\n\n tests_to_retry = self._tests_to_retry(retry_results)\n return (initial_results, all_retry_results)\n\n def _restore_order(self, paths, test_names):\n original_test_names = list(test_names)\n test_names = []\n for path in paths:\n for test in original_test_names:\n if test.startswith(path) or fnmatch.fnmatch(test, path):\n test_names.append(test)\n test_names += list(set(original_test_names) - set(test_names))\n return test_names\n\n def _collect_tests(self, args):\n return self._finder.find_tests(\n args,\n test_lists=self._options.test_list,\n filter_files=self._options.isolated_script_test_filter_file,\n fastest_percentile=self._options.fastest,\n filters=self._options.isolated_script_test_filter)\n\n def _is_http_test(self, test):\n return (\n test.startswith(self.HTTP_SUBDIR + self._port.TEST_PATH_SEPARATOR)\n or self._is_websocket_test(test) or self._port.TEST_PATH_SEPARATOR\n + self.HTTP_SUBDIR + self._port.TEST_PATH_SEPARATOR in test)\n\n def _is_websocket_test(self, test):\n if self._port.should_use_wptserve(test):\n return False\n\n return self.WEBSOCKET_SUBDIR + self._port.TEST_PATH_SEPARATOR in test\n\n def _http_tests(self, test_names):\n return set(test for test in test_names if self._is_http_test(test))\n\n def _is_perf_test(self, test):\n return (self.PERF_SUBDIR == test\n or (self.PERF_SUBDIR + self._port.TEST_PATH_SEPARATOR) in test)\n\n def _prepare_lists(self, paths, test_names):\n tests_to_skip = self._finder.skip_tests(paths, test_names,\n self._expectations)\n tests_to_run = [\n test for test in test_names if test not in tests_to_skip\n ]\n\n return tests_to_run, tests_to_skip\n\n def _test_input_for_file(self, test_file, retry_attempt):\n return TestInput(\n test_file,\n self._options.slow_timeout_ms\n if self._test_is_slow(test_file) else self._options.timeout_ms,\n self._test_requires_lock(test_file),\n retry_attempt=retry_attempt)\n\n def _test_requires_lock(self, test_file):\n \"\"\"Returns True if the test needs to be locked when running multiple\n instances of this test runner.\n\n Perf tests are locked because heavy load caused by running other\n tests in parallel might cause some of them to time out.\n \"\"\"\n return self._is_perf_test(test_file)\n\n def _test_is_slow(self, test_file):\n if not self._expectations:\n return False\n is_slow_test = self._expectations.get_expectations(\n test_file).is_slow_test\n return is_slow_test or self._port.is_slow_wpt_test(test_file)\n\n def _needs_servers(self, test_names):\n return any(\n self._is_http_test(test_name) for test_name in test_names)\n\n def _set_up_run(self, test_names):\n self._printer.write_update('Checking build ...')\n if self._options.build:\n exit_code = self._port.check_build(\n self._needs_servers(test_names), self._printer)\n if exit_code:\n _log.error('Build check failed')\n return exit_code\n\n if self._options.clobber_old_results:\n self._port.clobber_old_results()\n elif self._filesystem.exists(self._artifacts_directory):\n self._port.limit_archived_results_count()\n # Rename the existing results folder for archiving.\n self._port.rename_results_folder()\n\n # Create the output directory if it doesn't already exist.\n self._port.host.filesystem.maybe_make_directory(\n self._artifacts_directory)\n\n exit_code = self._port.setup_test_run()\n if exit_code:\n _log.error('Build setup failed')\n return exit_code\n\n # Check that the system dependencies (themes, fonts, ...) are correct.\n if not self._options.nocheck_sys_deps:\n self._printer.write_update('Checking system dependencies ...')\n exit_code = self._port.check_sys_deps()\n if exit_code:\n return exit_code\n\n return exit_codes.OK_EXIT_STATUS\n\n def _run_tests(self,\n tests_to_run,\n tests_to_skip,\n repeat_each,\n iterations,\n num_workers,\n retry_attempt=0):\n\n test_inputs = []\n for _ in range(iterations):\n for test in tests_to_run:\n for _ in range(repeat_each):\n test_inputs.append(\n self._test_input_for_file(test, retry_attempt))\n return self._runner.run_tests(self._expectations, test_inputs,\n tests_to_skip, num_workers,\n retry_attempt)\n\n def _start_servers(self, tests_to_run):\n if any(self._port.is_wpt_test(test) for test in tests_to_run):\n self._printer.write_update('Starting WPTServe ...')\n self._port.start_wptserve()\n self._wptserve_started = True\n\n if (self._port.requires_http_server()\n or any(self._is_http_test(test) for test in tests_to_run)):\n self._printer.write_update('Starting HTTP server ...')\n self._port.start_http_server(\n additional_dirs={},\n number_of_drivers=self._options.max_locked_shards)\n self._http_server_started = True\n\n if any(self._is_websocket_test(test) for test in tests_to_run):\n self._printer.write_update('Starting WebSocket server ...')\n self._port.start_websocket_server()\n self._websockets_server_started = True\n\n def _stop_servers(self):\n if self._wptserve_started:\n self._printer.write_update('Stopping WPTServe ...')\n self._wptserve_started = False\n self._port.stop_wptserve()\n if self._http_server_started:\n self._printer.write_update('Stopping HTTP server ...')\n self._http_server_started = False\n self._port.stop_http_server()\n if self._websockets_server_started:\n self._printer.write_update('Stopping WebSocket server ...')\n self._websockets_server_started = False\n self._port.stop_websocket_server()\n\n def _clean_up_run(self):\n _log.debug('Flushing stdout')\n sys.stdout.flush()\n _log.debug('Flushing stderr')\n sys.stderr.flush()\n _log.debug('Cleaning up port')\n self._port.clean_up_test_run()\n if self._sink:\n _log.debug('Closing sink')\n self._sink.close()\n\n def _look_for_new_crash_logs(self, run_results, start_time):\n \"\"\"Looks for and writes new crash logs, at the end of the test run.\n\n Since crash logs can take a long time to be written out if the system is\n under stress, do a second pass at the end of the test run.\n\n Args:\n run_results: The results of the test run.\n start_time: Time the tests started at. We're looking for crash\n logs after that time.\n \"\"\"\n crashed_processes = []\n test_to_crash_failure = {}\n\n # reset static variables for Failure type classes\n test_failures.AbstractTestResultType.port = self._port\n test_failures.AbstractTestResultType.result_directory = self._results_directory\n test_failures.AbstractTestResultType.filesystem = self._filesystem\n\n for test, result in run_results.unexpected_results_by_name.items():\n if result.type != ResultType.Crash:\n continue\n for failure in result.failures:\n if (not isinstance(failure, test_failures.FailureCrash)\n or failure.has_log):\n continue\n crashed_processes.append(\n [test, failure.process_name, failure.pid])\n test_to_crash_failure[test] = failure\n\n sample_files = self._port.look_for_new_samples(crashed_processes,\n start_time) or {}\n for test, sample_file in sample_files.items():\n test_failures.AbstractTestResultType.test_name = test\n test_result = run_results.unexpected_results_by_name[test]\n artifact_relative_path = self._port.output_filename(\n test, test_failures.FILENAME_SUFFIX_SAMPLE, '.txt')\n artifacts_sub_dir = test_result.artifacts.ArtifactsSubDirectory()\n artifact_abspath = self._filesystem.join(self._results_directory,\n artifacts_sub_dir,\n artifact_relative_path)\n self._filesystem.maybe_make_directory(\n self._filesystem.dirname(artifact_abspath))\n self._filesystem.copyfile(sample_file, artifact_abspath)\n test_result.artifacts.AddArtifact(\n 'sample_file',\n self._filesystem.join(artifacts_sub_dir,\n artifact_relative_path))\n\n new_crash_logs = self._port.look_for_new_crash_logs(\n crashed_processes, start_time) or {}\n for test, (crash_log, crash_site) in new_crash_logs.items():\n test_failures.AbstractTestResultType.test_name = test\n failure.crash_log = crash_log\n failure.has_log = self._port.output_contains_sanitizer_messages(\n failure.crash_log)\n test_result = run_results.unexpected_results_by_name[test]\n test_result.crash_site = crash_site\n test_to_crash_failure[test].create_artifacts(\n test_result.artifacts, force_overwrite=True)\n\n def _tests_to_retry(self, run_results):\n # TODO(ojan): This should also check that result.type != test_expectations.MISSING\n # since retrying missing expectations is silly. But that's a bit tricky since we\n # only consider the last retry attempt for the count of unexpected regressions.\n return [\n result.test_name\n for result in run_results.unexpected_results_by_name.values()\n if result.type != ResultType.Pass\n ]\n\n def _write_json_files(self, summarized_full_results,\n summarized_failing_results, initial_results,\n running_all_tests, run_histories):\n _log.debug(\"Writing JSON files in %s.\", self._artifacts_directory)\n\n # FIXME: Upload stats.json to the server and delete times_ms.\n times_trie = json_results_generator.test_timings_trie(\n initial_results.results_by_name.values())\n times_json_path = self._filesystem.join(self._artifacts_directory,\n 'times_ms.json')\n json_results_generator.write_json(self._filesystem, times_trie,\n times_json_path)\n\n # Save out the times data so we can use it for --fastest in the future.\n if running_all_tests:\n bot_test_times_path = self._port.bot_test_times_path()\n self._filesystem.maybe_make_directory(\n self._filesystem.dirname(bot_test_times_path))\n json_results_generator.write_json(self._filesystem, times_trie,\n bot_test_times_path)\n\n stats_trie = self._stats_trie(initial_results)\n stats_path = self._filesystem.join(self._artifacts_directory,\n 'stats.json')\n self._filesystem.write_text_file(stats_path, json.dumps(stats_trie))\n\n full_results_path = self._filesystem.join(self._artifacts_directory,\n 'full_results.json')\n json_results_generator.write_json(\n self._filesystem, summarized_full_results, full_results_path)\n\n full_results_jsonp_path = self._filesystem.join(\n self._artifacts_directory, 'full_results_jsonp.js')\n json_results_generator.write_json(\n self._filesystem,\n summarized_full_results,\n full_results_jsonp_path,\n callback='ADD_FULL_RESULTS')\n failing_results_path = self._filesystem.join(self._artifacts_directory,\n 'failing_results.json')\n # We write failing_results.json out as jsonp because we need to load it\n # from a file url for results.html and Chromium doesn't allow that.\n json_results_generator.write_json(\n self._filesystem,\n summarized_failing_results,\n failing_results_path,\n callback='ADD_RESULTS')\n\n if self._options.json_test_results:\n json_results_generator.write_json(self._filesystem,\n summarized_full_results,\n self._options.json_test_results)\n if self._options.write_run_histories_to:\n json_results_generator.write_json(\n self._filesystem, run_histories,\n self._options.write_run_histories_to)\n\n _log.debug('Finished writing JSON files.')\n\n def _copy_results_html_file(self, destination_dir, filename):\n \"\"\"Copies a file from the template directory to the results directory.\"\"\"\n files_to_copy = [filename, filename + \".version\"]\n template_dir = self._path_finder.path_from_blink_tools(\n 'blinkpy', 'web_tests')\n for filename in files_to_copy:\n source_path = self._filesystem.join(template_dir, filename)\n destination_path = self._filesystem.join(destination_dir, filename)\n # Note that the results.html template file won't exist when\n # we're using a MockFileSystem during unit tests, so make sure\n # it exists before we try to copy it.\n if self._filesystem.exists(source_path):\n self._filesystem.copyfile(source_path, destination_path)\n\n def _stats_trie(self, initial_results):\n def _worker_number(worker_name):\n return int(worker_name.split('/')[1]) if worker_name else -1\n\n stats = {}\n for result in initial_results.results_by_name.values():\n if result.type != ResultType.Skip:\n stats[result.test_name] = {\n 'results': (_worker_number(result.worker_name),\n result.test_number, result.pid,\n int(result.test_run_time * 1000),\n int(result.total_run_time * 1000))\n }\n stats_trie = {}\n for name, value in stats.items():\n json_results_generator.add_path_to_trie(name, value, stats_trie)\n return stats_trie\n",
"step-ids": [
14,
20,
25,
31,
33
]
}
|
[
14,
20,
25,
31,
33
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
while a != b:
if a > b:
a -= b
else:
b -= a
print(a)
print('---')
<|reserved_special_token_0|>
while number < 100:
x = number
a = 3 * x + 23
b = 3 * x - 17
while a != b:
if a > b:
a -= b
else:
b -= a
if a == 10:
print(x)
x += 1
<|reserved_special_token_1|>
<|reserved_special_token_0|>
x = int(input())
a = 3 * x + 23
b = 3 * x - 17
while a != b:
if a > b:
a -= b
else:
b -= a
print(a)
print('---')
number = 7
while number < 100:
x = number
a = 3 * x + 23
b = 3 * x - 17
while a != b:
if a > b:
a -= b
else:
b -= a
if a == 10:
print(x)
x += 1
<|reserved_special_token_1|>
"""
Ниже на четырёх языках программирования записана программа, которая вводит натуральное число 𝑥,
выполняет преобразования, а затем выводит результат. Укажите наименьшее значение 𝑥,
при вводе которого программа выведет число 10.
Тупо вручную ввёл. Крч 9. Хз, как на экзамене делать))
"""
x = int(input())
a = 3 * x + 23
b = 3 * x - 17
while a != b:
if a > b:
a -= b
else:
b -= a
print(a)
print('---')
number = 7
while number < 100:
x = number
a = 3 * x + 23
b = 3 * x - 17
while a != b:
if a > b:
a -= b
else:
b -= a
if a == 10:
print(x)
x += 1
|
flexible
|
{
"blob_id": "181e9ac4acf0e69576716f3589359736bfbd9bef",
"index": 2380,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwhile a != b:\n if a > b:\n a -= b\n else:\n b -= a\nprint(a)\nprint('---')\n<mask token>\nwhile number < 100:\n x = number\n a = 3 * x + 23\n b = 3 * x - 17\n while a != b:\n if a > b:\n a -= b\n else:\n b -= a\n if a == 10:\n print(x)\n x += 1\n",
"step-3": "<mask token>\nx = int(input())\na = 3 * x + 23\nb = 3 * x - 17\nwhile a != b:\n if a > b:\n a -= b\n else:\n b -= a\nprint(a)\nprint('---')\nnumber = 7\nwhile number < 100:\n x = number\n a = 3 * x + 23\n b = 3 * x - 17\n while a != b:\n if a > b:\n a -= b\n else:\n b -= a\n if a == 10:\n print(x)\n x += 1\n",
"step-4": "\"\"\"\nНиже на четырёх языках программирования записана программа, которая вводит натуральное число 𝑥,\nвыполняет преобразования, а затем выводит результат. Укажите наименьшее значение 𝑥,\nпри вводе которого программа выведет число 10.\n\nТупо вручную ввёл. Крч 9. Хз, как на экзамене делать))\n\"\"\"\nx = int(input())\na = 3 * x + 23\nb = 3 * x - 17\nwhile a != b:\n if a > b:\n a -= b\n else:\n b -= a\nprint(a)\nprint('---')\nnumber = 7\nwhile number < 100:\n x = number\n a = 3 * x + 23\n b = 3 * x - 17\n while a != b:\n if a > b:\n a -= b\n else:\n b -= a\n if a == 10:\n print(x)\n x += 1\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
'''
Created on 13 Dec 2016
@author: hpcosta
'''
# https://www.hackerrank.com/challenges/backreferences-to-failed-groups
regex = r"^\d{2}(-?)\d{2}\1\d{2}\1\d{2}$" # Do not delete 'r'.
import re
print(str(bool(re.search(regex, raw_input()))).lower())
# Task
#
# You have a test string S.
# Your task is to write a regex which will match S, with following condition(s):
#
# S consists of 8 digits.
# S may have "-" separator such that string S gets divided in 4 parts, with each part having exactly two digits. (Eg. 12-34-56-78)
# Valid
#
# 12345678
# 12-34-56-87
# Invalid
#
# 1-234-56-78
# 12-45-7810
|
normal
|
{
"blob_id": "e884ce5878de75afe93085e2310b4b8d5953963a",
"index": 337,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(str(bool(re.search(regex, raw_input()))).lower())\n",
"step-3": "<mask token>\nregex = '^\\\\d{2}(-?)\\\\d{2}\\\\1\\\\d{2}\\\\1\\\\d{2}$'\n<mask token>\nprint(str(bool(re.search(regex, raw_input()))).lower())\n",
"step-4": "<mask token>\nregex = '^\\\\d{2}(-?)\\\\d{2}\\\\1\\\\d{2}\\\\1\\\\d{2}$'\nimport re\nprint(str(bool(re.search(regex, raw_input()))).lower())\n",
"step-5": "'''\nCreated on 13 Dec 2016\n\n@author: hpcosta\n'''\n# https://www.hackerrank.com/challenges/backreferences-to-failed-groups\n\nregex = r\"^\\d{2}(-?)\\d{2}\\1\\d{2}\\1\\d{2}$\" # Do not delete 'r'.\n\nimport re\n\nprint(str(bool(re.search(regex, raw_input()))).lower())\n\n\n\n# Task\n# \n# You have a test string S. \n# Your task is to write a regex which will match S, with following condition(s):\n# \n# S consists of 8 digits.\n# S may have \"-\" separator such that string S gets divided in 4 parts, with each part having exactly two digits. (Eg. 12-34-56-78)\n# Valid \n# \n# 12345678\n# 12-34-56-87\n# Invalid \n# \n# 1-234-56-78\n# 12-45-7810",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def rec_coin(target, coins):
"""
INPUT: Target change amount and list of coin values
OUTPUT: Minimum coins needed to make change
Note, this solution is not optimized.
"""
min_coins = target
if target in coins:
return 1
else:
for i in [c for c in coins if c <= target]:
num_coins = 1 + rec_coin(target - i, coins)
if num_coins < min_coins:
min_coins = num_coins
return min_coins
def rec_coin_dynam(target, coins, known_results):
"""
INPUT: This function takes in a target amount and a list of possible coins to use.
It also takes a third parameter, known_results, indicating previously calculated results.
The known_results parameter shoud be started with [0] * (target+1)
OUTPUT: Minimum number of coins needed to make the target.
"""
min_coins = target
if target in coins:
known_results[target] = 1
return 1
elif known_results[target] > 0:
return known_results[target]
else:
for i in [c for c in coins if c <= target]:
num_coins = 1 + rec_coin_dynam(target - i, coins, known_results)
if num_coins < min_coins:
min_coins = num_coins
known_results[target] = min_coins
return min_coins
<|reserved_special_token_0|>
class TestCoins(object):
def check(self, solution):
coins = [1, 5, 10, 25]
assert_equal(solution(45, coins, [0] * (45 + 1)), 3)
assert_equal(solution(23, coins, [0] * (23 + 1)), 5)
assert_equal(solution(74, coins, [0] * (74 + 1)), 8)
print('Passed all tests.')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def rec_coin(target, coins):
"""
INPUT: Target change amount and list of coin values
OUTPUT: Minimum coins needed to make change
Note, this solution is not optimized.
"""
min_coins = target
if target in coins:
return 1
else:
for i in [c for c in coins if c <= target]:
num_coins = 1 + rec_coin(target - i, coins)
if num_coins < min_coins:
min_coins = num_coins
return min_coins
def rec_coin_dynam(target, coins, known_results):
"""
INPUT: This function takes in a target amount and a list of possible coins to use.
It also takes a third parameter, known_results, indicating previously calculated results.
The known_results parameter shoud be started with [0] * (target+1)
OUTPUT: Minimum number of coins needed to make the target.
"""
min_coins = target
if target in coins:
known_results[target] = 1
return 1
elif known_results[target] > 0:
return known_results[target]
else:
for i in [c for c in coins if c <= target]:
num_coins = 1 + rec_coin_dynam(target - i, coins, known_results)
if num_coins < min_coins:
min_coins = num_coins
known_results[target] = min_coins
return min_coins
def bottom_up_solution(n, coins):
arr = [0] + [n] * n
for i in range(1, len(arr)):
min_coins = n
for coin in [c for c in coins if c <= i]:
min_coins = min(arr[i - coin] + 1, min_coins)
arr[i] = min_coins
return arr[n]
class TestCoins(object):
def check(self, solution):
coins = [1, 5, 10, 25]
assert_equal(solution(45, coins, [0] * (45 + 1)), 3)
assert_equal(solution(23, coins, [0] * (23 + 1)), 5)
assert_equal(solution(74, coins, [0] * (74 + 1)), 8)
print('Passed all tests.')
<|reserved_special_token_0|>
print(rec_coin_dynam(target, coins, known_results))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def rec_coin(target, coins):
"""
INPUT: Target change amount and list of coin values
OUTPUT: Minimum coins needed to make change
Note, this solution is not optimized.
"""
min_coins = target
if target in coins:
return 1
else:
for i in [c for c in coins if c <= target]:
num_coins = 1 + rec_coin(target - i, coins)
if num_coins < min_coins:
min_coins = num_coins
return min_coins
def rec_coin_dynam(target, coins, known_results):
"""
INPUT: This function takes in a target amount and a list of possible coins to use.
It also takes a third parameter, known_results, indicating previously calculated results.
The known_results parameter shoud be started with [0] * (target+1)
OUTPUT: Minimum number of coins needed to make the target.
"""
min_coins = target
if target in coins:
known_results[target] = 1
return 1
elif known_results[target] > 0:
return known_results[target]
else:
for i in [c for c in coins if c <= target]:
num_coins = 1 + rec_coin_dynam(target - i, coins, known_results)
if num_coins < min_coins:
min_coins = num_coins
known_results[target] = min_coins
return min_coins
def bottom_up_solution(n, coins):
arr = [0] + [n] * n
for i in range(1, len(arr)):
min_coins = n
for coin in [c for c in coins if c <= i]:
min_coins = min(arr[i - coin] + 1, min_coins)
arr[i] = min_coins
return arr[n]
class TestCoins(object):
def check(self, solution):
coins = [1, 5, 10, 25]
assert_equal(solution(45, coins, [0] * (45 + 1)), 3)
assert_equal(solution(23, coins, [0] * (23 + 1)), 5)
assert_equal(solution(74, coins, [0] * (74 + 1)), 8)
print('Passed all tests.')
target = 23
coins = [1, 2, 5, 10, 20]
known_results = [0] * (target + 1)
print(rec_coin_dynam(target, coins, known_results))
<|reserved_special_token_1|>
from nose.tools import assert_equal
def rec_coin(target, coins):
"""
INPUT: Target change amount and list of coin values
OUTPUT: Minimum coins needed to make change
Note, this solution is not optimized.
"""
min_coins = target
if target in coins:
return 1
else:
for i in [c for c in coins if c <= target]:
num_coins = 1 + rec_coin(target - i, coins)
if num_coins < min_coins:
min_coins = num_coins
return min_coins
def rec_coin_dynam(target, coins, known_results):
"""
INPUT: This function takes in a target amount and a list of possible coins to use.
It also takes a third parameter, known_results, indicating previously calculated results.
The known_results parameter shoud be started with [0] * (target+1)
OUTPUT: Minimum number of coins needed to make the target.
"""
min_coins = target
if target in coins:
known_results[target] = 1
return 1
elif known_results[target] > 0:
return known_results[target]
else:
for i in [c for c in coins if c <= target]:
num_coins = 1 + rec_coin_dynam(target - i, coins, known_results)
if num_coins < min_coins:
min_coins = num_coins
known_results[target] = min_coins
return min_coins
def bottom_up_solution(n, coins):
arr = [0] + [n] * n
for i in range(1, len(arr)):
min_coins = n
for coin in [c for c in coins if c <= i]:
min_coins = min(arr[i - coin] + 1, min_coins)
arr[i] = min_coins
return arr[n]
class TestCoins(object):
def check(self, solution):
coins = [1, 5, 10, 25]
assert_equal(solution(45, coins, [0] * (45 + 1)), 3)
assert_equal(solution(23, coins, [0] * (23 + 1)), 5)
assert_equal(solution(74, coins, [0] * (74 + 1)), 8)
print('Passed all tests.')
target = 23
coins = [1, 2, 5, 10, 20]
known_results = [0] * (target + 1)
print(rec_coin_dynam(target, coins, known_results))
<|reserved_special_token_1|>
from nose.tools import assert_equal
def rec_coin(target, coins):
'''
INPUT: Target change amount and list of coin values
OUTPUT: Minimum coins needed to make change
Note, this solution is not optimized.
'''
# Default to target value
min_coins = target
# Check to see if we have a single coin match (BASE CASE)
if target in coins:
return 1
else:
# for every coin value that is <= than target
for i in [c for c in coins if c <= target]:
# Recursive Call (add a count coin and subtract from the target)
num_coins = 1 + rec_coin(target-i, coins)
# Reset Minimum if we have a new minimum
if num_coins < min_coins:
min_coins = num_coins
return min_coins
# consider using decorators to encapsulate memoization
def rec_coin_dynam(target, coins, known_results):
'''
INPUT: This function takes in a target amount and a list of possible coins to use.
It also takes a third parameter, known_results, indicating previously calculated results.
The known_results parameter shoud be started with [0] * (target+1)
OUTPUT: Minimum number of coins needed to make the target.
'''
# Default output to target
min_coins = target
# Base Case
if target in coins:
known_results[target] = 1
return 1
# Return a known result if it happens to be greater than 0
elif known_results[target] > 0:
return known_results[target]
else:
# for every coin value that is <= than target
for i in [c for c in coins if c <= target]:
# Recursive call, note how we include the known results!
num_coins = 1 + rec_coin_dynam(target-i, coins, known_results)
# Reset Minimum if we have a new minimum
if num_coins < min_coins:
min_coins = num_coins
# Reset the known result
known_results[target] = min_coins
return min_coins
def bottom_up_solution(n, coins):
# intialize the array
arr = [0] + [n]*(n)
for i in range(1, len(arr)):
min_coins = n
for coin in [c for c in coins if c <= i]:
min_coins = min(arr[i-coin] + 1, min_coins)
arr[i] = min_coins
return arr[n]
class TestCoins(object):
def check(self, solution):
coins = [1, 5, 10, 25]
assert_equal(solution(45, coins, [0]*(45+1)), 3)
assert_equal(solution(23, coins, [0]*(23+1)), 5)
assert_equal(solution(74, coins, [0]*(74+1)), 8)
print('Passed all tests.')
# Run Test
# test = TestCoins()
# test.check(rec_coin_dynam)
# print(bottom_up_solution(6, [1, 2, 5]))
# dynamic solution
target = 23
coins = [1, 2, 5, 10, 20]
known_results = [0]*(target+1)
print(rec_coin_dynam(target, coins, known_results))
|
flexible
|
{
"blob_id": "f8c30f8ccd1b901fd750a2c9e14cab78e1d12a14",
"index": 4039,
"step-1": "<mask token>\n\n\ndef rec_coin(target, coins):\n \"\"\"\n INPUT: Target change amount and list of coin values\n OUTPUT: Minimum coins needed to make change\n\n Note, this solution is not optimized.\n \"\"\"\n min_coins = target\n if target in coins:\n return 1\n else:\n for i in [c for c in coins if c <= target]:\n num_coins = 1 + rec_coin(target - i, coins)\n if num_coins < min_coins:\n min_coins = num_coins\n return min_coins\n\n\ndef rec_coin_dynam(target, coins, known_results):\n \"\"\"\n INPUT: This function takes in a target amount and a list of possible coins to use.\n It also takes a third parameter, known_results, indicating previously calculated results.\n The known_results parameter shoud be started with [0] * (target+1)\n\n OUTPUT: Minimum number of coins needed to make the target.\n \"\"\"\n min_coins = target\n if target in coins:\n known_results[target] = 1\n return 1\n elif known_results[target] > 0:\n return known_results[target]\n else:\n for i in [c for c in coins if c <= target]:\n num_coins = 1 + rec_coin_dynam(target - i, coins, known_results)\n if num_coins < min_coins:\n min_coins = num_coins\n known_results[target] = min_coins\n return min_coins\n\n\n<mask token>\n\n\nclass TestCoins(object):\n\n def check(self, solution):\n coins = [1, 5, 10, 25]\n assert_equal(solution(45, coins, [0] * (45 + 1)), 3)\n assert_equal(solution(23, coins, [0] * (23 + 1)), 5)\n assert_equal(solution(74, coins, [0] * (74 + 1)), 8)\n print('Passed all tests.')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef rec_coin(target, coins):\n \"\"\"\n INPUT: Target change amount and list of coin values\n OUTPUT: Minimum coins needed to make change\n\n Note, this solution is not optimized.\n \"\"\"\n min_coins = target\n if target in coins:\n return 1\n else:\n for i in [c for c in coins if c <= target]:\n num_coins = 1 + rec_coin(target - i, coins)\n if num_coins < min_coins:\n min_coins = num_coins\n return min_coins\n\n\ndef rec_coin_dynam(target, coins, known_results):\n \"\"\"\n INPUT: This function takes in a target amount and a list of possible coins to use.\n It also takes a third parameter, known_results, indicating previously calculated results.\n The known_results parameter shoud be started with [0] * (target+1)\n\n OUTPUT: Minimum number of coins needed to make the target.\n \"\"\"\n min_coins = target\n if target in coins:\n known_results[target] = 1\n return 1\n elif known_results[target] > 0:\n return known_results[target]\n else:\n for i in [c for c in coins if c <= target]:\n num_coins = 1 + rec_coin_dynam(target - i, coins, known_results)\n if num_coins < min_coins:\n min_coins = num_coins\n known_results[target] = min_coins\n return min_coins\n\n\ndef bottom_up_solution(n, coins):\n arr = [0] + [n] * n\n for i in range(1, len(arr)):\n min_coins = n\n for coin in [c for c in coins if c <= i]:\n min_coins = min(arr[i - coin] + 1, min_coins)\n arr[i] = min_coins\n return arr[n]\n\n\nclass TestCoins(object):\n\n def check(self, solution):\n coins = [1, 5, 10, 25]\n assert_equal(solution(45, coins, [0] * (45 + 1)), 3)\n assert_equal(solution(23, coins, [0] * (23 + 1)), 5)\n assert_equal(solution(74, coins, [0] * (74 + 1)), 8)\n print('Passed all tests.')\n\n\n<mask token>\nprint(rec_coin_dynam(target, coins, known_results))\n",
"step-3": "<mask token>\n\n\ndef rec_coin(target, coins):\n \"\"\"\n INPUT: Target change amount and list of coin values\n OUTPUT: Minimum coins needed to make change\n\n Note, this solution is not optimized.\n \"\"\"\n min_coins = target\n if target in coins:\n return 1\n else:\n for i in [c for c in coins if c <= target]:\n num_coins = 1 + rec_coin(target - i, coins)\n if num_coins < min_coins:\n min_coins = num_coins\n return min_coins\n\n\ndef rec_coin_dynam(target, coins, known_results):\n \"\"\"\n INPUT: This function takes in a target amount and a list of possible coins to use.\n It also takes a third parameter, known_results, indicating previously calculated results.\n The known_results parameter shoud be started with [0] * (target+1)\n\n OUTPUT: Minimum number of coins needed to make the target.\n \"\"\"\n min_coins = target\n if target in coins:\n known_results[target] = 1\n return 1\n elif known_results[target] > 0:\n return known_results[target]\n else:\n for i in [c for c in coins if c <= target]:\n num_coins = 1 + rec_coin_dynam(target - i, coins, known_results)\n if num_coins < min_coins:\n min_coins = num_coins\n known_results[target] = min_coins\n return min_coins\n\n\ndef bottom_up_solution(n, coins):\n arr = [0] + [n] * n\n for i in range(1, len(arr)):\n min_coins = n\n for coin in [c for c in coins if c <= i]:\n min_coins = min(arr[i - coin] + 1, min_coins)\n arr[i] = min_coins\n return arr[n]\n\n\nclass TestCoins(object):\n\n def check(self, solution):\n coins = [1, 5, 10, 25]\n assert_equal(solution(45, coins, [0] * (45 + 1)), 3)\n assert_equal(solution(23, coins, [0] * (23 + 1)), 5)\n assert_equal(solution(74, coins, [0] * (74 + 1)), 8)\n print('Passed all tests.')\n\n\ntarget = 23\ncoins = [1, 2, 5, 10, 20]\nknown_results = [0] * (target + 1)\nprint(rec_coin_dynam(target, coins, known_results))\n",
"step-4": "from nose.tools import assert_equal\n\n\ndef rec_coin(target, coins):\n \"\"\"\n INPUT: Target change amount and list of coin values\n OUTPUT: Minimum coins needed to make change\n\n Note, this solution is not optimized.\n \"\"\"\n min_coins = target\n if target in coins:\n return 1\n else:\n for i in [c for c in coins if c <= target]:\n num_coins = 1 + rec_coin(target - i, coins)\n if num_coins < min_coins:\n min_coins = num_coins\n return min_coins\n\n\ndef rec_coin_dynam(target, coins, known_results):\n \"\"\"\n INPUT: This function takes in a target amount and a list of possible coins to use.\n It also takes a third parameter, known_results, indicating previously calculated results.\n The known_results parameter shoud be started with [0] * (target+1)\n\n OUTPUT: Minimum number of coins needed to make the target.\n \"\"\"\n min_coins = target\n if target in coins:\n known_results[target] = 1\n return 1\n elif known_results[target] > 0:\n return known_results[target]\n else:\n for i in [c for c in coins if c <= target]:\n num_coins = 1 + rec_coin_dynam(target - i, coins, known_results)\n if num_coins < min_coins:\n min_coins = num_coins\n known_results[target] = min_coins\n return min_coins\n\n\ndef bottom_up_solution(n, coins):\n arr = [0] + [n] * n\n for i in range(1, len(arr)):\n min_coins = n\n for coin in [c for c in coins if c <= i]:\n min_coins = min(arr[i - coin] + 1, min_coins)\n arr[i] = min_coins\n return arr[n]\n\n\nclass TestCoins(object):\n\n def check(self, solution):\n coins = [1, 5, 10, 25]\n assert_equal(solution(45, coins, [0] * (45 + 1)), 3)\n assert_equal(solution(23, coins, [0] * (23 + 1)), 5)\n assert_equal(solution(74, coins, [0] * (74 + 1)), 8)\n print('Passed all tests.')\n\n\ntarget = 23\ncoins = [1, 2, 5, 10, 20]\nknown_results = [0] * (target + 1)\nprint(rec_coin_dynam(target, coins, known_results))\n",
"step-5": "from nose.tools import assert_equal\n\n\ndef rec_coin(target, coins):\n '''\n INPUT: Target change amount and list of coin values\n OUTPUT: Minimum coins needed to make change\n\n Note, this solution is not optimized.\n '''\n\n # Default to target value\n min_coins = target\n\n # Check to see if we have a single coin match (BASE CASE)\n if target in coins:\n return 1\n\n else:\n\n # for every coin value that is <= than target\n for i in [c for c in coins if c <= target]:\n\n # Recursive Call (add a count coin and subtract from the target)\n num_coins = 1 + rec_coin(target-i, coins)\n\n # Reset Minimum if we have a new minimum\n if num_coins < min_coins:\n\n min_coins = num_coins\n\n return min_coins\n\n\n# consider using decorators to encapsulate memoization\n\ndef rec_coin_dynam(target, coins, known_results):\n '''\n INPUT: This function takes in a target amount and a list of possible coins to use.\n It also takes a third parameter, known_results, indicating previously calculated results.\n The known_results parameter shoud be started with [0] * (target+1)\n\n OUTPUT: Minimum number of coins needed to make the target.\n '''\n\n # Default output to target\n min_coins = target\n\n # Base Case\n if target in coins:\n known_results[target] = 1\n return 1\n\n # Return a known result if it happens to be greater than 0\n elif known_results[target] > 0:\n return known_results[target]\n\n else:\n # for every coin value that is <= than target\n for i in [c for c in coins if c <= target]:\n\n # Recursive call, note how we include the known results!\n num_coins = 1 + rec_coin_dynam(target-i, coins, known_results)\n\n # Reset Minimum if we have a new minimum\n if num_coins < min_coins:\n min_coins = num_coins\n\n # Reset the known result\n known_results[target] = min_coins\n\n return min_coins\n\n\ndef bottom_up_solution(n, coins):\n\n # intialize the array\n arr = [0] + [n]*(n)\n\n for i in range(1, len(arr)):\n min_coins = n\n for coin in [c for c in coins if c <= i]:\n min_coins = min(arr[i-coin] + 1, min_coins)\n\n arr[i] = min_coins\n\n return arr[n]\n\n\nclass TestCoins(object):\n\n def check(self, solution):\n coins = [1, 5, 10, 25]\n assert_equal(solution(45, coins, [0]*(45+1)), 3)\n assert_equal(solution(23, coins, [0]*(23+1)), 5)\n assert_equal(solution(74, coins, [0]*(74+1)), 8)\n\n print('Passed all tests.')\n\n\n# Run Test\n# test = TestCoins()\n# test.check(rec_coin_dynam)\n\n# print(bottom_up_solution(6, [1, 2, 5]))\n\n\n# dynamic solution\ntarget = 23\ncoins = [1, 2, 5, 10, 20]\nknown_results = [0]*(target+1)\n\nprint(rec_coin_dynam(target, coins, known_results))\n",
"step-ids": [
4,
6,
7,
8,
9
]
}
|
[
4,
6,
7,
8,
9
] |
from django.test import TestCase
# Create your tests here.
import pymongo
client = pymongo.MongoClient(host='127.0.0.1', port=27017)
db = client.NBA_china_spider
collection = db.data
data = [title for title in collection.find()]
print(data[0]['url'])
|
normal
|
{
"blob_id": "52ebe80e2d520bf07b21dc668223348002eb6d42",
"index": 2790,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(data[0]['url'])\n",
"step-3": "<mask token>\nclient = pymongo.MongoClient(host='127.0.0.1', port=27017)\ndb = client.NBA_china_spider\ncollection = db.data\ndata = [title for title in collection.find()]\nprint(data[0]['url'])\n",
"step-4": "from django.test import TestCase\nimport pymongo\nclient = pymongo.MongoClient(host='127.0.0.1', port=27017)\ndb = client.NBA_china_spider\ncollection = db.data\ndata = [title for title in collection.find()]\nprint(data[0]['url'])\n",
"step-5": "from django.test import TestCase\n\n# Create your tests here.\nimport pymongo\n\nclient = pymongo.MongoClient(host='127.0.0.1', port=27017)\ndb = client.NBA_china_spider\ncollection = db.data\n\ndata = [title for title in collection.find()]\nprint(data[0]['url'])\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from LinkedList import LinkedList
from LinkedListHelper import CreateLinkedList
class LinkedListMod(LinkedList):
def remove_allnode(self):
while self.head:
temp = self.head
self.head = self.head.next
del temp
def main():
l1 = LinkedListMod()
CreateLinkedList(l1)
l1.display()
print("Remove the Linked List.....")
l1.remove_allnode()
l1.display()
if __name__ == "__main__":
main()
|
normal
|
{
"blob_id": "45b20b57a3579c2527c674d0c2af88eedddadcae",
"index": 3724,
"step-1": "<mask token>\n\n\nclass LinkedListMod(LinkedList):\n\n def remove_allnode(self):\n while self.head:\n temp = self.head\n self.head = self.head.next\n del temp\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass LinkedListMod(LinkedList):\n\n def remove_allnode(self):\n while self.head:\n temp = self.head\n self.head = self.head.next\n del temp\n\n\ndef main():\n l1 = LinkedListMod()\n CreateLinkedList(l1)\n l1.display()\n print('Remove the Linked List.....')\n l1.remove_allnode()\n l1.display()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass LinkedListMod(LinkedList):\n\n def remove_allnode(self):\n while self.head:\n temp = self.head\n self.head = self.head.next\n del temp\n\n\ndef main():\n l1 = LinkedListMod()\n CreateLinkedList(l1)\n l1.display()\n print('Remove the Linked List.....')\n l1.remove_allnode()\n l1.display()\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "from LinkedList import LinkedList\nfrom LinkedListHelper import CreateLinkedList\n\n\nclass LinkedListMod(LinkedList):\n\n def remove_allnode(self):\n while self.head:\n temp = self.head\n self.head = self.head.next\n del temp\n\n\ndef main():\n l1 = LinkedListMod()\n CreateLinkedList(l1)\n l1.display()\n print('Remove the Linked List.....')\n l1.remove_allnode()\n l1.display()\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "from LinkedList import LinkedList\nfrom LinkedListHelper import CreateLinkedList\nclass LinkedListMod(LinkedList):\n def remove_allnode(self):\n while self.head:\n temp = self.head\n self.head = self.head.next\n del temp\ndef main():\n l1 = LinkedListMod()\n CreateLinkedList(l1)\n l1.display()\n print(\"Remove the Linked List.....\")\n l1.remove_allnode()\n l1.display()\nif __name__ == \"__main__\":\n main()",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
try:
import Tkinter as tk
from urllib2 import urlopen
except ImportError:
import tkinter as tk
from urllib.request import urlopen
<|reserved_special_token_0|>
root.title(sf)
<|reserved_special_token_0|>
label.pack(padx=5, pady=5)
root.mainloop()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
try:
import Tkinter as tk
from urllib2 import urlopen
except ImportError:
import tkinter as tk
from urllib.request import urlopen
root = tk.Tk()
url = (
'https://static.toiimg.com/thumb/msid-79594506,imgsize-721231,width-400,resizemode-4/79594506.jpg'
)
image_bytes = urlopen(url).read()
data_stream = io.BytesIO(image_bytes)
pil_image = Image.open(data_stream)
w, h = pil_image.size
fname = url.split('/')[-1]
sf = '{} ({}x{})'.format(fname, w, h)
root.title(sf)
tk_image = ImageTk.PhotoImage(pil_image)
label = tk.Label(root, image=tk_image, bg='brown')
label.pack(padx=5, pady=5)
root.mainloop()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import io
from PIL import Image, ImageTk
try:
import Tkinter as tk
from urllib2 import urlopen
except ImportError:
import tkinter as tk
from urllib.request import urlopen
root = tk.Tk()
url = (
'https://static.toiimg.com/thumb/msid-79594506,imgsize-721231,width-400,resizemode-4/79594506.jpg'
)
image_bytes = urlopen(url).read()
data_stream = io.BytesIO(image_bytes)
pil_image = Image.open(data_stream)
w, h = pil_image.size
fname = url.split('/')[-1]
sf = '{} ({}x{})'.format(fname, w, h)
root.title(sf)
tk_image = ImageTk.PhotoImage(pil_image)
label = tk.Label(root, image=tk_image, bg='brown')
label.pack(padx=5, pady=5)
root.mainloop()
<|reserved_special_token_1|>
''' tk_image_view_url_io.py
display an image from a URL using Tkinter, PIL and data_stream
tested with Python27 and Python33 by vegaseat 01mar2013
'''
import io
# allows for image formats other than gif
from PIL import Image, ImageTk
try:
# Python2
import Tkinter as tk
from urllib2 import urlopen
except ImportError:
# Python3
import tkinter as tk
from urllib.request import urlopen
root = tk.Tk()
# find yourself a picture on an internet web page you like
# (right click on the picture, under properties copy the address)
#url = "http://www.google.com/intl/en/images/logo.gif"
# or use image previously downloaded to tinypic.com
#url = "http://i48.tinypic.com/w6sjn6.jpg"
#url = "http://i50.tinypic.com/34g8vo5.jpg"
#url = "https://media.geeksforgeeks.org/wp-content/uploads/Computer-Networking-Diagram.png"
url = "https://static.toiimg.com/thumb/msid-79594506,imgsize-721231,width-400,resizemode-4/79594506.jpg"
image_bytes = urlopen(url).read()
# internal data file
data_stream = io.BytesIO(image_bytes)
# open as a PIL image object
pil_image = Image.open(data_stream)
# optionally show image info
# get the size of the image
w, h = pil_image.size
# split off image file name
fname = url.split('/')[-1]
sf = "{} ({}x{})".format(fname, w, h)
root.title(sf)
# convert PIL image object to Tkinter PhotoImage object
tk_image = ImageTk.PhotoImage(pil_image)
# put the image on a typical widget
label = tk.Label(root, image=tk_image, bg='brown')
label.pack(padx=5, pady=5)
root.mainloop()
|
flexible
|
{
"blob_id": "7764effac0b95ad8f62b91dd470c1d0e40704a7d",
"index": 9705,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ntry:\n import Tkinter as tk\n from urllib2 import urlopen\nexcept ImportError:\n import tkinter as tk\n from urllib.request import urlopen\n<mask token>\nroot.title(sf)\n<mask token>\nlabel.pack(padx=5, pady=5)\nroot.mainloop()\n",
"step-3": "<mask token>\ntry:\n import Tkinter as tk\n from urllib2 import urlopen\nexcept ImportError:\n import tkinter as tk\n from urllib.request import urlopen\nroot = tk.Tk()\nurl = (\n 'https://static.toiimg.com/thumb/msid-79594506,imgsize-721231,width-400,resizemode-4/79594506.jpg'\n )\nimage_bytes = urlopen(url).read()\ndata_stream = io.BytesIO(image_bytes)\npil_image = Image.open(data_stream)\nw, h = pil_image.size\nfname = url.split('/')[-1]\nsf = '{} ({}x{})'.format(fname, w, h)\nroot.title(sf)\ntk_image = ImageTk.PhotoImage(pil_image)\nlabel = tk.Label(root, image=tk_image, bg='brown')\nlabel.pack(padx=5, pady=5)\nroot.mainloop()\n",
"step-4": "<mask token>\nimport io\nfrom PIL import Image, ImageTk\ntry:\n import Tkinter as tk\n from urllib2 import urlopen\nexcept ImportError:\n import tkinter as tk\n from urllib.request import urlopen\nroot = tk.Tk()\nurl = (\n 'https://static.toiimg.com/thumb/msid-79594506,imgsize-721231,width-400,resizemode-4/79594506.jpg'\n )\nimage_bytes = urlopen(url).read()\ndata_stream = io.BytesIO(image_bytes)\npil_image = Image.open(data_stream)\nw, h = pil_image.size\nfname = url.split('/')[-1]\nsf = '{} ({}x{})'.format(fname, w, h)\nroot.title(sf)\ntk_image = ImageTk.PhotoImage(pil_image)\nlabel = tk.Label(root, image=tk_image, bg='brown')\nlabel.pack(padx=5, pady=5)\nroot.mainloop()\n",
"step-5": "''' tk_image_view_url_io.py\ndisplay an image from a URL using Tkinter, PIL and data_stream\ntested with Python27 and Python33 by vegaseat 01mar2013\n'''\n\nimport io\n# allows for image formats other than gif\nfrom PIL import Image, ImageTk\ntry:\n # Python2\n import Tkinter as tk\n from urllib2 import urlopen\nexcept ImportError:\n # Python3\n import tkinter as tk\n from urllib.request import urlopen\n\nroot = tk.Tk()\n\n# find yourself a picture on an internet web page you like\n# (right click on the picture, under properties copy the address)\n#url = \"http://www.google.com/intl/en/images/logo.gif\"\n# or use image previously downloaded to tinypic.com\n#url = \"http://i48.tinypic.com/w6sjn6.jpg\"\n#url = \"http://i50.tinypic.com/34g8vo5.jpg\"\n#url = \"https://media.geeksforgeeks.org/wp-content/uploads/Computer-Networking-Diagram.png\"\nurl = \"https://static.toiimg.com/thumb/msid-79594506,imgsize-721231,width-400,resizemode-4/79594506.jpg\"\nimage_bytes = urlopen(url).read()\n# internal data file\ndata_stream = io.BytesIO(image_bytes)\n# open as a PIL image object\npil_image = Image.open(data_stream)\n\n# optionally show image info\n# get the size of the image\nw, h = pil_image.size\n# split off image file name\nfname = url.split('/')[-1]\nsf = \"{} ({}x{})\".format(fname, w, h)\nroot.title(sf)\n\n# convert PIL image object to Tkinter PhotoImage object\ntk_image = ImageTk.PhotoImage(pil_image)\n\n# put the image on a typical widget\nlabel = tk.Label(root, image=tk_image, bg='brown')\nlabel.pack(padx=5, pady=5)\n\nroot.mainloop()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def a2b_base64(string: _Ascii) ->bytes:
...
<|reserved_special_token_0|>
def a2b_qp(string: _Ascii, header: bool=...) ->bytes:
...
def b2a_qp(data: _Bytes, quotetabs: bool=..., istext: bool=..., header:
bool=...) ->bytes:
...
def a2b_hqx(string: _Ascii) ->bytes:
...
def rledecode_hqx(data: _Bytes) ->bytes:
...
def rlecode_hqx(data: _Bytes) ->bytes:
...
def b2a_hqx(data: _Bytes) ->bytes:
...
def crc_hqx(data: _Bytes, crc: int) ->int:
...
<|reserved_special_token_0|>
def b2a_hex(data: _Bytes) ->bytes:
...
def hexlify(data: _Bytes) ->bytes:
...
def a2b_hex(hexstr: _Ascii) ->bytes:
...
<|reserved_special_token_0|>
class Error(Exception):
...
class Incomplete(Exception):
...
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def a2b_base64(string: _Ascii) ->bytes:
...
<|reserved_special_token_0|>
def a2b_qp(string: _Ascii, header: bool=...) ->bytes:
...
def b2a_qp(data: _Bytes, quotetabs: bool=..., istext: bool=..., header:
bool=...) ->bytes:
...
def a2b_hqx(string: _Ascii) ->bytes:
...
def rledecode_hqx(data: _Bytes) ->bytes:
...
def rlecode_hqx(data: _Bytes) ->bytes:
...
def b2a_hqx(data: _Bytes) ->bytes:
...
def crc_hqx(data: _Bytes, crc: int) ->int:
...
def crc32(data: _Bytes, crc: int=...) ->int:
...
def b2a_hex(data: _Bytes) ->bytes:
...
def hexlify(data: _Bytes) ->bytes:
...
def a2b_hex(hexstr: _Ascii) ->bytes:
...
def unhexlify(hexlify: _Ascii) ->bytes:
...
class Error(Exception):
...
class Incomplete(Exception):
...
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def a2b_uu(string: _Ascii) ->bytes:
...
<|reserved_special_token_0|>
def a2b_base64(string: _Ascii) ->bytes:
...
<|reserved_special_token_0|>
def a2b_qp(string: _Ascii, header: bool=...) ->bytes:
...
def b2a_qp(data: _Bytes, quotetabs: bool=..., istext: bool=..., header:
bool=...) ->bytes:
...
def a2b_hqx(string: _Ascii) ->bytes:
...
def rledecode_hqx(data: _Bytes) ->bytes:
...
def rlecode_hqx(data: _Bytes) ->bytes:
...
def b2a_hqx(data: _Bytes) ->bytes:
...
def crc_hqx(data: _Bytes, crc: int) ->int:
...
def crc32(data: _Bytes, crc: int=...) ->int:
...
def b2a_hex(data: _Bytes) ->bytes:
...
def hexlify(data: _Bytes) ->bytes:
...
def a2b_hex(hexstr: _Ascii) ->bytes:
...
def unhexlify(hexlify: _Ascii) ->bytes:
...
class Error(Exception):
...
class Incomplete(Exception):
...
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if sys.version_info < (3,):
_Bytes = Text
_Ascii = Text
else:
_Bytes = bytes
_Ascii = Union[bytes, str]
def a2b_uu(string: _Ascii) ->bytes:
...
if sys.version_info >= (3, 7):
def b2a_uu(data: _Bytes, *, backtick: bool=...) ->bytes:
...
else:
def b2a_uu(data: _Bytes) ->bytes:
...
def a2b_base64(string: _Ascii) ->bytes:
...
if sys.version_info >= (3, 6):
def b2a_base64(data: _Bytes, *, newline: bool=...) ->bytes:
...
else:
def b2a_base64(data: _Bytes) ->bytes:
...
def a2b_qp(string: _Ascii, header: bool=...) ->bytes:
...
def b2a_qp(data: _Bytes, quotetabs: bool=..., istext: bool=..., header:
bool=...) ->bytes:
...
def a2b_hqx(string: _Ascii) ->bytes:
...
def rledecode_hqx(data: _Bytes) ->bytes:
...
def rlecode_hqx(data: _Bytes) ->bytes:
...
def b2a_hqx(data: _Bytes) ->bytes:
...
def crc_hqx(data: _Bytes, crc: int) ->int:
...
def crc32(data: _Bytes, crc: int=...) ->int:
...
def b2a_hex(data: _Bytes) ->bytes:
...
def hexlify(data: _Bytes) ->bytes:
...
def a2b_hex(hexstr: _Ascii) ->bytes:
...
def unhexlify(hexlify: _Ascii) ->bytes:
...
class Error(Exception):
...
class Incomplete(Exception):
...
<|reserved_special_token_1|>
# Stubs for binascii
# Based on http://docs.python.org/3.2/library/binascii.html
import sys
from typing import Union, Text
if sys.version_info < (3,):
# Python 2 accepts unicode ascii pretty much everywhere.
_Bytes = Text
_Ascii = Text
else:
# But since Python 3.3 ASCII-only unicode strings are accepted by the
# a2b_* functions.
_Bytes = bytes
_Ascii = Union[bytes, str]
def a2b_uu(string: _Ascii) -> bytes: ...
if sys.version_info >= (3, 7):
def b2a_uu(data: _Bytes, *, backtick: bool = ...) -> bytes: ...
else:
def b2a_uu(data: _Bytes) -> bytes: ...
def a2b_base64(string: _Ascii) -> bytes: ...
if sys.version_info >= (3, 6):
def b2a_base64(data: _Bytes, *, newline: bool = ...) -> bytes: ...
else:
def b2a_base64(data: _Bytes) -> bytes: ...
def a2b_qp(string: _Ascii, header: bool = ...) -> bytes: ...
def b2a_qp(data: _Bytes, quotetabs: bool = ..., istext: bool = ..., header: bool = ...) -> bytes: ...
def a2b_hqx(string: _Ascii) -> bytes: ...
def rledecode_hqx(data: _Bytes) -> bytes: ...
def rlecode_hqx(data: _Bytes) -> bytes: ...
def b2a_hqx(data: _Bytes) -> bytes: ...
def crc_hqx(data: _Bytes, crc: int) -> int: ...
def crc32(data: _Bytes, crc: int = ...) -> int: ...
def b2a_hex(data: _Bytes) -> bytes: ...
def hexlify(data: _Bytes) -> bytes: ...
def a2b_hex(hexstr: _Ascii) -> bytes: ...
def unhexlify(hexlify: _Ascii) -> bytes: ...
class Error(Exception): ...
class Incomplete(Exception): ...
|
flexible
|
{
"blob_id": "9ba74c7ecbd20c59883aff4efdc7e0369ff65daf",
"index": 5267,
"step-1": "<mask token>\n\n\ndef a2b_base64(string: _Ascii) ->bytes:\n ...\n\n\n<mask token>\n\n\ndef a2b_qp(string: _Ascii, header: bool=...) ->bytes:\n ...\n\n\ndef b2a_qp(data: _Bytes, quotetabs: bool=..., istext: bool=..., header:\n bool=...) ->bytes:\n ...\n\n\ndef a2b_hqx(string: _Ascii) ->bytes:\n ...\n\n\ndef rledecode_hqx(data: _Bytes) ->bytes:\n ...\n\n\ndef rlecode_hqx(data: _Bytes) ->bytes:\n ...\n\n\ndef b2a_hqx(data: _Bytes) ->bytes:\n ...\n\n\ndef crc_hqx(data: _Bytes, crc: int) ->int:\n ...\n\n\n<mask token>\n\n\ndef b2a_hex(data: _Bytes) ->bytes:\n ...\n\n\ndef hexlify(data: _Bytes) ->bytes:\n ...\n\n\ndef a2b_hex(hexstr: _Ascii) ->bytes:\n ...\n\n\n<mask token>\n\n\nclass Error(Exception):\n ...\n\n\nclass Incomplete(Exception):\n ...\n",
"step-2": "<mask token>\n\n\ndef a2b_base64(string: _Ascii) ->bytes:\n ...\n\n\n<mask token>\n\n\ndef a2b_qp(string: _Ascii, header: bool=...) ->bytes:\n ...\n\n\ndef b2a_qp(data: _Bytes, quotetabs: bool=..., istext: bool=..., header:\n bool=...) ->bytes:\n ...\n\n\ndef a2b_hqx(string: _Ascii) ->bytes:\n ...\n\n\ndef rledecode_hqx(data: _Bytes) ->bytes:\n ...\n\n\ndef rlecode_hqx(data: _Bytes) ->bytes:\n ...\n\n\ndef b2a_hqx(data: _Bytes) ->bytes:\n ...\n\n\ndef crc_hqx(data: _Bytes, crc: int) ->int:\n ...\n\n\ndef crc32(data: _Bytes, crc: int=...) ->int:\n ...\n\n\ndef b2a_hex(data: _Bytes) ->bytes:\n ...\n\n\ndef hexlify(data: _Bytes) ->bytes:\n ...\n\n\ndef a2b_hex(hexstr: _Ascii) ->bytes:\n ...\n\n\ndef unhexlify(hexlify: _Ascii) ->bytes:\n ...\n\n\nclass Error(Exception):\n ...\n\n\nclass Incomplete(Exception):\n ...\n",
"step-3": "<mask token>\n\n\ndef a2b_uu(string: _Ascii) ->bytes:\n ...\n\n\n<mask token>\n\n\ndef a2b_base64(string: _Ascii) ->bytes:\n ...\n\n\n<mask token>\n\n\ndef a2b_qp(string: _Ascii, header: bool=...) ->bytes:\n ...\n\n\ndef b2a_qp(data: _Bytes, quotetabs: bool=..., istext: bool=..., header:\n bool=...) ->bytes:\n ...\n\n\ndef a2b_hqx(string: _Ascii) ->bytes:\n ...\n\n\ndef rledecode_hqx(data: _Bytes) ->bytes:\n ...\n\n\ndef rlecode_hqx(data: _Bytes) ->bytes:\n ...\n\n\ndef b2a_hqx(data: _Bytes) ->bytes:\n ...\n\n\ndef crc_hqx(data: _Bytes, crc: int) ->int:\n ...\n\n\ndef crc32(data: _Bytes, crc: int=...) ->int:\n ...\n\n\ndef b2a_hex(data: _Bytes) ->bytes:\n ...\n\n\ndef hexlify(data: _Bytes) ->bytes:\n ...\n\n\ndef a2b_hex(hexstr: _Ascii) ->bytes:\n ...\n\n\ndef unhexlify(hexlify: _Ascii) ->bytes:\n ...\n\n\nclass Error(Exception):\n ...\n\n\nclass Incomplete(Exception):\n ...\n",
"step-4": "<mask token>\nif sys.version_info < (3,):\n _Bytes = Text\n _Ascii = Text\nelse:\n _Bytes = bytes\n _Ascii = Union[bytes, str]\n\n\ndef a2b_uu(string: _Ascii) ->bytes:\n ...\n\n\nif sys.version_info >= (3, 7):\n\n def b2a_uu(data: _Bytes, *, backtick: bool=...) ->bytes:\n ...\nelse:\n\n def b2a_uu(data: _Bytes) ->bytes:\n ...\n\n\ndef a2b_base64(string: _Ascii) ->bytes:\n ...\n\n\nif sys.version_info >= (3, 6):\n\n def b2a_base64(data: _Bytes, *, newline: bool=...) ->bytes:\n ...\nelse:\n\n def b2a_base64(data: _Bytes) ->bytes:\n ...\n\n\ndef a2b_qp(string: _Ascii, header: bool=...) ->bytes:\n ...\n\n\ndef b2a_qp(data: _Bytes, quotetabs: bool=..., istext: bool=..., header:\n bool=...) ->bytes:\n ...\n\n\ndef a2b_hqx(string: _Ascii) ->bytes:\n ...\n\n\ndef rledecode_hqx(data: _Bytes) ->bytes:\n ...\n\n\ndef rlecode_hqx(data: _Bytes) ->bytes:\n ...\n\n\ndef b2a_hqx(data: _Bytes) ->bytes:\n ...\n\n\ndef crc_hqx(data: _Bytes, crc: int) ->int:\n ...\n\n\ndef crc32(data: _Bytes, crc: int=...) ->int:\n ...\n\n\ndef b2a_hex(data: _Bytes) ->bytes:\n ...\n\n\ndef hexlify(data: _Bytes) ->bytes:\n ...\n\n\ndef a2b_hex(hexstr: _Ascii) ->bytes:\n ...\n\n\ndef unhexlify(hexlify: _Ascii) ->bytes:\n ...\n\n\nclass Error(Exception):\n ...\n\n\nclass Incomplete(Exception):\n ...\n",
"step-5": "# Stubs for binascii\n\n# Based on http://docs.python.org/3.2/library/binascii.html\n\nimport sys\nfrom typing import Union, Text\n\nif sys.version_info < (3,):\n # Python 2 accepts unicode ascii pretty much everywhere.\n _Bytes = Text\n _Ascii = Text\nelse:\n # But since Python 3.3 ASCII-only unicode strings are accepted by the\n # a2b_* functions.\n _Bytes = bytes\n _Ascii = Union[bytes, str]\n\ndef a2b_uu(string: _Ascii) -> bytes: ...\nif sys.version_info >= (3, 7):\n def b2a_uu(data: _Bytes, *, backtick: bool = ...) -> bytes: ...\nelse:\n def b2a_uu(data: _Bytes) -> bytes: ...\ndef a2b_base64(string: _Ascii) -> bytes: ...\nif sys.version_info >= (3, 6):\n def b2a_base64(data: _Bytes, *, newline: bool = ...) -> bytes: ...\nelse:\n def b2a_base64(data: _Bytes) -> bytes: ...\ndef a2b_qp(string: _Ascii, header: bool = ...) -> bytes: ...\ndef b2a_qp(data: _Bytes, quotetabs: bool = ..., istext: bool = ..., header: bool = ...) -> bytes: ...\ndef a2b_hqx(string: _Ascii) -> bytes: ...\ndef rledecode_hqx(data: _Bytes) -> bytes: ...\ndef rlecode_hqx(data: _Bytes) -> bytes: ...\ndef b2a_hqx(data: _Bytes) -> bytes: ...\ndef crc_hqx(data: _Bytes, crc: int) -> int: ...\ndef crc32(data: _Bytes, crc: int = ...) -> int: ...\ndef b2a_hex(data: _Bytes) -> bytes: ...\ndef hexlify(data: _Bytes) -> bytes: ...\ndef a2b_hex(hexstr: _Ascii) -> bytes: ...\ndef unhexlify(hexlify: _Ascii) -> bytes: ...\n\nclass Error(Exception): ...\nclass Incomplete(Exception): ...\n",
"step-ids": [
13,
15,
16,
17,
19
]
}
|
[
13,
15,
16,
17,
19
] |
N,T=map(int,input().split())
nm=1000000
for i in range(N):
c,t=map(int,input().split())
if nm>c and T>=t:
nm=c
if nm==1000000:
print("TLE")
else:
print(nm)
|
normal
|
{
"blob_id": "8a0e781f29c426161240e33b9d2adc7537b3d352",
"index": 2513,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in range(N):\n c, t = map(int, input().split())\n if nm > c and T >= t:\n nm = c\nif nm == 1000000:\n print('TLE')\nelse:\n print(nm)\n",
"step-3": "N, T = map(int, input().split())\nnm = 1000000\nfor i in range(N):\n c, t = map(int, input().split())\n if nm > c and T >= t:\n nm = c\nif nm == 1000000:\n print('TLE')\nelse:\n print(nm)\n",
"step-4": "N,T=map(int,input().split())\nnm=1000000\nfor i in range(N):\n c,t=map(int,input().split())\n if nm>c and T>=t:\n nm=c\nif nm==1000000:\n print(\"TLE\")\nelse:\n print(nm)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__doc__ = """\
A MiniFrame is a Frame with a small title bar. It is suitable for floating
toolbars that must not take up too much screen area. In other respects, it's the
same as a wx.Frame.
"""
__wxPyOnlineDocs__ = 'https://wxpython.org/Phoenix/docs/html/wx.MiniFrame.html'
__wxPyDemoPanel__ = 'TestPanel'
#-Imports-----------------------------------------------------------------------
#--Python Imports.
import os
import sys
#--wxPython Imports.
import wx
#-Globals-----------------------------------------------------------------------
try:
gFileDir = os.path.dirname(os.path.abspath(__file__))
except:
gFileDir = os.path.dirname(os.path.abspath(sys.argv[0]))
gBmpDir = gFileDir + os.sep + 'bitmaps'
class MyMiniFrame(wx.MiniFrame):
def __init__(self, parent, id, title, pos=wx.DefaultPosition,
size=wx.DefaultSize, style=wx.DEFAULT_FRAME_STYLE,
name='frame'):
wx.MiniFrame.__init__(self, parent, id, title, pos, size, style, name)
panel = wx.Panel(self, -1)
button = wx.Button(panel, 1003, "Close Me")
button.SetPosition((15, 15))
button2 = wx.Button(panel, -1, "ToggleWindowStyle(wx.STAY_ON_TOP)")
button2.SetPosition((30, 50))
self.Bind(wx.EVT_BUTTON, self.OnCloseMe, button)
self.Bind(wx.EVT_BUTTON, self.OnToggleWindowStyle, button2)
self.Bind(wx.EVT_CLOSE, self.OnCloseWindow)
def OnToggleWindowStyle(self, event):
self.ToggleWindowStyle(wx.STAY_ON_TOP)
def OnCloseMe(self, event):
self.Close(True)
def OnCloseWindow(self, event):
self.Destroy()
#---------------------------------------------------------------------------
class TestPanel(wx.Panel):
def __init__(self, parent, log):
self.log = log
wx.Panel.__init__(self, parent, -1)
b1 = wx.Button(self, -1, "Create and Show a MiniFrame", (50, 50))
self.Bind(wx.EVT_BUTTON, self.OnButton1, b1)
b2 = wx.Button(self, -1, "Create and Show a MiniFrame With Effect", (50, 100))
self.Bind(wx.EVT_BUTTON, self.OnButton2, b2)
self.list = wx.ListBox(self, choices=['wx.SHOW_EFFECT_NONE',
'wx.SHOW_EFFECT_ROLL_TO_LEFT',
'wx.SHOW_EFFECT_ROLL_TO_RIGHT',
'wx.SHOW_EFFECT_ROLL_TO_TOP',
'wx.SHOW_EFFECT_ROLL_TO_BOTTOM',
'wx.SHOW_EFFECT_SLIDE_TO_LEFT',
'wx.SHOW_EFFECT_SLIDE_TO_RIGHT',
'wx.SHOW_EFFECT_SLIDE_TO_TOP',
'wx.SHOW_EFFECT_SLIDE_TO_BOTTOM',
'wx.SHOW_EFFECT_BLEND',
'wx.SHOW_EFFECT_EXPAND'
# 'wx.SHOW_EFFECT_MAX'
],
pos=(50, 155), size=(220, 160),
style=wx.LB_SINGLE)
self.list.Select(0)
tt = "Timeout in milliseconds\n0 is system default"
self.spin = wx.SpinCtrl(self, -1, tt,
pos=(50, 130), style=wx.ALIGN_LEFT)
self.spin.SetToolTip(wx.ToolTip(tt))
self.spin.SetRange(0, 5000)
self.spin.SetValue(0)
def OnButton1(self, evt):
win = MyMiniFrame(self, -1, "This is a wx.MiniFrame", size=(350, 200),
style=wx.DEFAULT_FRAME_STYLE)
win.Centre()
win.Show(True)
def OnButton2(self, evt):
win = MyMiniFrame(self, -1, "This is a wx.MiniFrame", size=(350, 200),
style=wx.DEFAULT_FRAME_STYLE)
win.Centre()
win.ShowWithEffect(effect=eval(self.list.GetString(self.list.GetSelection())),
timeout=self.spin.GetValue())
#- __main__ Demo ---------------------------------------------------------------
class printLog:
def __init__(self):
pass
def write(self, txt):
print('%s' % txt)
def WriteText(self, txt):
print('%s' % txt)
class TestFrame(wx.Frame):
def __init__(self, parent, id=wx.ID_ANY, title=wx.EmptyString,
pos=wx.DefaultPosition, size=wx.DefaultSize,
style=wx.DEFAULT_FRAME_STYLE, name='frame'):
wx.Frame.__init__(self, parent, id, title, pos, size, style, name)
log = printLog()
panel = TestPanel(self, log)
self.Bind(wx.EVT_CLOSE, self.OnDestroy)
try:
self.SetIcon(wx.IconFromLocation(wx.IconLocation(sys.executable)))
except Exception as exc:
raise exc
def OnDestroy(self, event):
self.Destroy()
class TestApp(wx.App):
def OnInit(self):
gMainWin = TestFrame(None)
gMainWin.SetTitle('Extended Frame Demo')
gMainWin.Show()
return True
#---------------------------------------------------------------------------
if __name__ == '__main__':
import sys
print('Python %s.%s.%s %s' % sys.version_info[0:4])
print('wxPython %s' % wx.version())
gApp = TestApp(redirect=False,
filename=None,
useBestVisual=False,
clearSigInt=True)
gApp.MainLoop()
|
normal
|
{
"blob_id": "b041e9577af72d2bcee3dda0cc78fa12800d53bd",
"index": 2286,
"step-1": "<mask token>\n\n\nclass TestPanel(wx.Panel):\n\n def __init__(self, parent, log):\n self.log = log\n wx.Panel.__init__(self, parent, -1)\n b1 = wx.Button(self, -1, 'Create and Show a MiniFrame', (50, 50))\n self.Bind(wx.EVT_BUTTON, self.OnButton1, b1)\n b2 = wx.Button(self, -1, 'Create and Show a MiniFrame With Effect',\n (50, 100))\n self.Bind(wx.EVT_BUTTON, self.OnButton2, b2)\n self.list = wx.ListBox(self, choices=['wx.SHOW_EFFECT_NONE',\n 'wx.SHOW_EFFECT_ROLL_TO_LEFT', 'wx.SHOW_EFFECT_ROLL_TO_RIGHT',\n 'wx.SHOW_EFFECT_ROLL_TO_TOP', 'wx.SHOW_EFFECT_ROLL_TO_BOTTOM',\n 'wx.SHOW_EFFECT_SLIDE_TO_LEFT', 'wx.SHOW_EFFECT_SLIDE_TO_RIGHT',\n 'wx.SHOW_EFFECT_SLIDE_TO_TOP', 'wx.SHOW_EFFECT_SLIDE_TO_BOTTOM',\n 'wx.SHOW_EFFECT_BLEND', 'wx.SHOW_EFFECT_EXPAND'], pos=(50, 155),\n size=(220, 160), style=wx.LB_SINGLE)\n self.list.Select(0)\n tt = 'Timeout in milliseconds\\n0 is system default'\n self.spin = wx.SpinCtrl(self, -1, tt, pos=(50, 130), style=wx.\n ALIGN_LEFT)\n self.spin.SetToolTip(wx.ToolTip(tt))\n self.spin.SetRange(0, 5000)\n self.spin.SetValue(0)\n\n def OnButton1(self, evt):\n win = MyMiniFrame(self, -1, 'This is a wx.MiniFrame', size=(350, \n 200), style=wx.DEFAULT_FRAME_STYLE)\n win.Centre()\n win.Show(True)\n <mask token>\n\n\nclass printLog:\n\n def __init__(self):\n pass\n\n def write(self, txt):\n print('%s' % txt)\n\n def WriteText(self, txt):\n print('%s' % txt)\n\n\nclass TestFrame(wx.Frame):\n\n def __init__(self, parent, id=wx.ID_ANY, title=wx.EmptyString, pos=wx.\n DefaultPosition, size=wx.DefaultSize, style=wx.DEFAULT_FRAME_STYLE,\n name='frame'):\n wx.Frame.__init__(self, parent, id, title, pos, size, style, name)\n log = printLog()\n panel = TestPanel(self, log)\n self.Bind(wx.EVT_CLOSE, self.OnDestroy)\n try:\n self.SetIcon(wx.IconFromLocation(wx.IconLocation(sys.executable)))\n except Exception as exc:\n raise exc\n\n def OnDestroy(self, event):\n self.Destroy()\n\n\nclass TestApp(wx.App):\n\n def OnInit(self):\n gMainWin = TestFrame(None)\n gMainWin.SetTitle('Extended Frame Demo')\n gMainWin.Show()\n return True\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass MyMiniFrame(wx.MiniFrame):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass TestPanel(wx.Panel):\n\n def __init__(self, parent, log):\n self.log = log\n wx.Panel.__init__(self, parent, -1)\n b1 = wx.Button(self, -1, 'Create and Show a MiniFrame', (50, 50))\n self.Bind(wx.EVT_BUTTON, self.OnButton1, b1)\n b2 = wx.Button(self, -1, 'Create and Show a MiniFrame With Effect',\n (50, 100))\n self.Bind(wx.EVT_BUTTON, self.OnButton2, b2)\n self.list = wx.ListBox(self, choices=['wx.SHOW_EFFECT_NONE',\n 'wx.SHOW_EFFECT_ROLL_TO_LEFT', 'wx.SHOW_EFFECT_ROLL_TO_RIGHT',\n 'wx.SHOW_EFFECT_ROLL_TO_TOP', 'wx.SHOW_EFFECT_ROLL_TO_BOTTOM',\n 'wx.SHOW_EFFECT_SLIDE_TO_LEFT', 'wx.SHOW_EFFECT_SLIDE_TO_RIGHT',\n 'wx.SHOW_EFFECT_SLIDE_TO_TOP', 'wx.SHOW_EFFECT_SLIDE_TO_BOTTOM',\n 'wx.SHOW_EFFECT_BLEND', 'wx.SHOW_EFFECT_EXPAND'], pos=(50, 155),\n size=(220, 160), style=wx.LB_SINGLE)\n self.list.Select(0)\n tt = 'Timeout in milliseconds\\n0 is system default'\n self.spin = wx.SpinCtrl(self, -1, tt, pos=(50, 130), style=wx.\n ALIGN_LEFT)\n self.spin.SetToolTip(wx.ToolTip(tt))\n self.spin.SetRange(0, 5000)\n self.spin.SetValue(0)\n\n def OnButton1(self, evt):\n win = MyMiniFrame(self, -1, 'This is a wx.MiniFrame', size=(350, \n 200), style=wx.DEFAULT_FRAME_STYLE)\n win.Centre()\n win.Show(True)\n\n def OnButton2(self, evt):\n win = MyMiniFrame(self, -1, 'This is a wx.MiniFrame', size=(350, \n 200), style=wx.DEFAULT_FRAME_STYLE)\n win.Centre()\n win.ShowWithEffect(effect=eval(self.list.GetString(self.list.\n GetSelection())), timeout=self.spin.GetValue())\n\n\nclass printLog:\n\n def __init__(self):\n pass\n\n def write(self, txt):\n print('%s' % txt)\n\n def WriteText(self, txt):\n print('%s' % txt)\n\n\nclass TestFrame(wx.Frame):\n\n def __init__(self, parent, id=wx.ID_ANY, title=wx.EmptyString, pos=wx.\n DefaultPosition, size=wx.DefaultSize, style=wx.DEFAULT_FRAME_STYLE,\n name='frame'):\n wx.Frame.__init__(self, parent, id, title, pos, size, style, name)\n log = printLog()\n panel = TestPanel(self, log)\n self.Bind(wx.EVT_CLOSE, self.OnDestroy)\n try:\n self.SetIcon(wx.IconFromLocation(wx.IconLocation(sys.executable)))\n except Exception as exc:\n raise exc\n\n def OnDestroy(self, event):\n self.Destroy()\n\n\nclass TestApp(wx.App):\n\n def OnInit(self):\n gMainWin = TestFrame(None)\n gMainWin.SetTitle('Extended Frame Demo')\n gMainWin.Show()\n return True\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass MyMiniFrame(wx.MiniFrame):\n\n def __init__(self, parent, id, title, pos=wx.DefaultPosition, size=wx.\n DefaultSize, style=wx.DEFAULT_FRAME_STYLE, name='frame'):\n wx.MiniFrame.__init__(self, parent, id, title, pos, size, style, name)\n panel = wx.Panel(self, -1)\n button = wx.Button(panel, 1003, 'Close Me')\n button.SetPosition((15, 15))\n button2 = wx.Button(panel, -1, 'ToggleWindowStyle(wx.STAY_ON_TOP)')\n button2.SetPosition((30, 50))\n self.Bind(wx.EVT_BUTTON, self.OnCloseMe, button)\n self.Bind(wx.EVT_BUTTON, self.OnToggleWindowStyle, button2)\n self.Bind(wx.EVT_CLOSE, self.OnCloseWindow)\n\n def OnToggleWindowStyle(self, event):\n self.ToggleWindowStyle(wx.STAY_ON_TOP)\n <mask token>\n <mask token>\n\n\nclass TestPanel(wx.Panel):\n\n def __init__(self, parent, log):\n self.log = log\n wx.Panel.__init__(self, parent, -1)\n b1 = wx.Button(self, -1, 'Create and Show a MiniFrame', (50, 50))\n self.Bind(wx.EVT_BUTTON, self.OnButton1, b1)\n b2 = wx.Button(self, -1, 'Create and Show a MiniFrame With Effect',\n (50, 100))\n self.Bind(wx.EVT_BUTTON, self.OnButton2, b2)\n self.list = wx.ListBox(self, choices=['wx.SHOW_EFFECT_NONE',\n 'wx.SHOW_EFFECT_ROLL_TO_LEFT', 'wx.SHOW_EFFECT_ROLL_TO_RIGHT',\n 'wx.SHOW_EFFECT_ROLL_TO_TOP', 'wx.SHOW_EFFECT_ROLL_TO_BOTTOM',\n 'wx.SHOW_EFFECT_SLIDE_TO_LEFT', 'wx.SHOW_EFFECT_SLIDE_TO_RIGHT',\n 'wx.SHOW_EFFECT_SLIDE_TO_TOP', 'wx.SHOW_EFFECT_SLIDE_TO_BOTTOM',\n 'wx.SHOW_EFFECT_BLEND', 'wx.SHOW_EFFECT_EXPAND'], pos=(50, 155),\n size=(220, 160), style=wx.LB_SINGLE)\n self.list.Select(0)\n tt = 'Timeout in milliseconds\\n0 is system default'\n self.spin = wx.SpinCtrl(self, -1, tt, pos=(50, 130), style=wx.\n ALIGN_LEFT)\n self.spin.SetToolTip(wx.ToolTip(tt))\n self.spin.SetRange(0, 5000)\n self.spin.SetValue(0)\n\n def OnButton1(self, evt):\n win = MyMiniFrame(self, -1, 'This is a wx.MiniFrame', size=(350, \n 200), style=wx.DEFAULT_FRAME_STYLE)\n win.Centre()\n win.Show(True)\n\n def OnButton2(self, evt):\n win = MyMiniFrame(self, -1, 'This is a wx.MiniFrame', size=(350, \n 200), style=wx.DEFAULT_FRAME_STYLE)\n win.Centre()\n win.ShowWithEffect(effect=eval(self.list.GetString(self.list.\n GetSelection())), timeout=self.spin.GetValue())\n\n\nclass printLog:\n\n def __init__(self):\n pass\n\n def write(self, txt):\n print('%s' % txt)\n\n def WriteText(self, txt):\n print('%s' % txt)\n\n\nclass TestFrame(wx.Frame):\n\n def __init__(self, parent, id=wx.ID_ANY, title=wx.EmptyString, pos=wx.\n DefaultPosition, size=wx.DefaultSize, style=wx.DEFAULT_FRAME_STYLE,\n name='frame'):\n wx.Frame.__init__(self, parent, id, title, pos, size, style, name)\n log = printLog()\n panel = TestPanel(self, log)\n self.Bind(wx.EVT_CLOSE, self.OnDestroy)\n try:\n self.SetIcon(wx.IconFromLocation(wx.IconLocation(sys.executable)))\n except Exception as exc:\n raise exc\n\n def OnDestroy(self, event):\n self.Destroy()\n\n\nclass TestApp(wx.App):\n\n def OnInit(self):\n gMainWin = TestFrame(None)\n gMainWin.SetTitle('Extended Frame Demo')\n gMainWin.Show()\n return True\n\n\n<mask token>\n",
"step-4": "<mask token>\ntry:\n gFileDir = os.path.dirname(os.path.abspath(__file__))\nexcept:\n gFileDir = os.path.dirname(os.path.abspath(sys.argv[0]))\n<mask token>\n\n\nclass MyMiniFrame(wx.MiniFrame):\n\n def __init__(self, parent, id, title, pos=wx.DefaultPosition, size=wx.\n DefaultSize, style=wx.DEFAULT_FRAME_STYLE, name='frame'):\n wx.MiniFrame.__init__(self, parent, id, title, pos, size, style, name)\n panel = wx.Panel(self, -1)\n button = wx.Button(panel, 1003, 'Close Me')\n button.SetPosition((15, 15))\n button2 = wx.Button(panel, -1, 'ToggleWindowStyle(wx.STAY_ON_TOP)')\n button2.SetPosition((30, 50))\n self.Bind(wx.EVT_BUTTON, self.OnCloseMe, button)\n self.Bind(wx.EVT_BUTTON, self.OnToggleWindowStyle, button2)\n self.Bind(wx.EVT_CLOSE, self.OnCloseWindow)\n\n def OnToggleWindowStyle(self, event):\n self.ToggleWindowStyle(wx.STAY_ON_TOP)\n\n def OnCloseMe(self, event):\n self.Close(True)\n\n def OnCloseWindow(self, event):\n self.Destroy()\n\n\nclass TestPanel(wx.Panel):\n\n def __init__(self, parent, log):\n self.log = log\n wx.Panel.__init__(self, parent, -1)\n b1 = wx.Button(self, -1, 'Create and Show a MiniFrame', (50, 50))\n self.Bind(wx.EVT_BUTTON, self.OnButton1, b1)\n b2 = wx.Button(self, -1, 'Create and Show a MiniFrame With Effect',\n (50, 100))\n self.Bind(wx.EVT_BUTTON, self.OnButton2, b2)\n self.list = wx.ListBox(self, choices=['wx.SHOW_EFFECT_NONE',\n 'wx.SHOW_EFFECT_ROLL_TO_LEFT', 'wx.SHOW_EFFECT_ROLL_TO_RIGHT',\n 'wx.SHOW_EFFECT_ROLL_TO_TOP', 'wx.SHOW_EFFECT_ROLL_TO_BOTTOM',\n 'wx.SHOW_EFFECT_SLIDE_TO_LEFT', 'wx.SHOW_EFFECT_SLIDE_TO_RIGHT',\n 'wx.SHOW_EFFECT_SLIDE_TO_TOP', 'wx.SHOW_EFFECT_SLIDE_TO_BOTTOM',\n 'wx.SHOW_EFFECT_BLEND', 'wx.SHOW_EFFECT_EXPAND'], pos=(50, 155),\n size=(220, 160), style=wx.LB_SINGLE)\n self.list.Select(0)\n tt = 'Timeout in milliseconds\\n0 is system default'\n self.spin = wx.SpinCtrl(self, -1, tt, pos=(50, 130), style=wx.\n ALIGN_LEFT)\n self.spin.SetToolTip(wx.ToolTip(tt))\n self.spin.SetRange(0, 5000)\n self.spin.SetValue(0)\n\n def OnButton1(self, evt):\n win = MyMiniFrame(self, -1, 'This is a wx.MiniFrame', size=(350, \n 200), style=wx.DEFAULT_FRAME_STYLE)\n win.Centre()\n win.Show(True)\n\n def OnButton2(self, evt):\n win = MyMiniFrame(self, -1, 'This is a wx.MiniFrame', size=(350, \n 200), style=wx.DEFAULT_FRAME_STYLE)\n win.Centre()\n win.ShowWithEffect(effect=eval(self.list.GetString(self.list.\n GetSelection())), timeout=self.spin.GetValue())\n\n\nclass printLog:\n\n def __init__(self):\n pass\n\n def write(self, txt):\n print('%s' % txt)\n\n def WriteText(self, txt):\n print('%s' % txt)\n\n\nclass TestFrame(wx.Frame):\n\n def __init__(self, parent, id=wx.ID_ANY, title=wx.EmptyString, pos=wx.\n DefaultPosition, size=wx.DefaultSize, style=wx.DEFAULT_FRAME_STYLE,\n name='frame'):\n wx.Frame.__init__(self, parent, id, title, pos, size, style, name)\n log = printLog()\n panel = TestPanel(self, log)\n self.Bind(wx.EVT_CLOSE, self.OnDestroy)\n try:\n self.SetIcon(wx.IconFromLocation(wx.IconLocation(sys.executable)))\n except Exception as exc:\n raise exc\n\n def OnDestroy(self, event):\n self.Destroy()\n\n\nclass TestApp(wx.App):\n\n def OnInit(self):\n gMainWin = TestFrame(None)\n gMainWin.SetTitle('Extended Frame Demo')\n gMainWin.Show()\n return True\n\n\nif __name__ == '__main__':\n import sys\n print('Python %s.%s.%s %s' % sys.version_info[0:4])\n print('wxPython %s' % wx.version())\n gApp = TestApp(redirect=False, filename=None, useBestVisual=False,\n clearSigInt=True)\n gApp.MainLoop()\n",
"step-5": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n__doc__ = \"\"\"\\\nA MiniFrame is a Frame with a small title bar. It is suitable for floating\ntoolbars that must not take up too much screen area. In other respects, it's the\nsame as a wx.Frame.\n\"\"\"\n\n__wxPyOnlineDocs__ = 'https://wxpython.org/Phoenix/docs/html/wx.MiniFrame.html'\n__wxPyDemoPanel__ = 'TestPanel'\n\n#-Imports-----------------------------------------------------------------------\n\n#--Python Imports.\nimport os\nimport sys\n\n#--wxPython Imports.\nimport wx\n\n\n#-Globals-----------------------------------------------------------------------\ntry:\n gFileDir = os.path.dirname(os.path.abspath(__file__))\nexcept:\n gFileDir = os.path.dirname(os.path.abspath(sys.argv[0]))\ngBmpDir = gFileDir + os.sep + 'bitmaps'\n\n\nclass MyMiniFrame(wx.MiniFrame):\n def __init__(self, parent, id, title, pos=wx.DefaultPosition,\n size=wx.DefaultSize, style=wx.DEFAULT_FRAME_STYLE,\n name='frame'):\n\n wx.MiniFrame.__init__(self, parent, id, title, pos, size, style, name)\n panel = wx.Panel(self, -1)\n\n button = wx.Button(panel, 1003, \"Close Me\")\n button.SetPosition((15, 15))\n\n button2 = wx.Button(panel, -1, \"ToggleWindowStyle(wx.STAY_ON_TOP)\")\n button2.SetPosition((30, 50))\n\n self.Bind(wx.EVT_BUTTON, self.OnCloseMe, button)\n self.Bind(wx.EVT_BUTTON, self.OnToggleWindowStyle, button2)\n self.Bind(wx.EVT_CLOSE, self.OnCloseWindow)\n\n\n def OnToggleWindowStyle(self, event):\n self.ToggleWindowStyle(wx.STAY_ON_TOP)\n\n def OnCloseMe(self, event):\n self.Close(True)\n\n def OnCloseWindow(self, event):\n self.Destroy()\n\n#---------------------------------------------------------------------------\n\nclass TestPanel(wx.Panel):\n def __init__(self, parent, log):\n self.log = log\n wx.Panel.__init__(self, parent, -1)\n\n b1 = wx.Button(self, -1, \"Create and Show a MiniFrame\", (50, 50))\n self.Bind(wx.EVT_BUTTON, self.OnButton1, b1)\n\n b2 = wx.Button(self, -1, \"Create and Show a MiniFrame With Effect\", (50, 100))\n self.Bind(wx.EVT_BUTTON, self.OnButton2, b2)\n\n self.list = wx.ListBox(self, choices=['wx.SHOW_EFFECT_NONE',\n 'wx.SHOW_EFFECT_ROLL_TO_LEFT',\n 'wx.SHOW_EFFECT_ROLL_TO_RIGHT',\n 'wx.SHOW_EFFECT_ROLL_TO_TOP',\n 'wx.SHOW_EFFECT_ROLL_TO_BOTTOM',\n 'wx.SHOW_EFFECT_SLIDE_TO_LEFT',\n 'wx.SHOW_EFFECT_SLIDE_TO_RIGHT',\n 'wx.SHOW_EFFECT_SLIDE_TO_TOP',\n 'wx.SHOW_EFFECT_SLIDE_TO_BOTTOM',\n 'wx.SHOW_EFFECT_BLEND',\n 'wx.SHOW_EFFECT_EXPAND'\n # 'wx.SHOW_EFFECT_MAX'\n ],\n pos=(50, 155), size=(220, 160),\n style=wx.LB_SINGLE)\n self.list.Select(0)\n\n tt = \"Timeout in milliseconds\\n0 is system default\"\n self.spin = wx.SpinCtrl(self, -1, tt,\n pos=(50, 130), style=wx.ALIGN_LEFT)\n self.spin.SetToolTip(wx.ToolTip(tt))\n self.spin.SetRange(0, 5000)\n self.spin.SetValue(0)\n\n def OnButton1(self, evt):\n win = MyMiniFrame(self, -1, \"This is a wx.MiniFrame\", size=(350, 200),\n style=wx.DEFAULT_FRAME_STYLE)\n win.Centre()\n win.Show(True)\n\n def OnButton2(self, evt):\n win = MyMiniFrame(self, -1, \"This is a wx.MiniFrame\", size=(350, 200),\n style=wx.DEFAULT_FRAME_STYLE)\n win.Centre()\n win.ShowWithEffect(effect=eval(self.list.GetString(self.list.GetSelection())),\n timeout=self.spin.GetValue())\n\n\n#- __main__ Demo ---------------------------------------------------------------\n\nclass printLog:\n def __init__(self):\n pass\n\n def write(self, txt):\n print('%s' % txt)\n\n def WriteText(self, txt):\n print('%s' % txt)\n\n\nclass TestFrame(wx.Frame):\n def __init__(self, parent, id=wx.ID_ANY, title=wx.EmptyString,\n pos=wx.DefaultPosition, size=wx.DefaultSize,\n style=wx.DEFAULT_FRAME_STYLE, name='frame'):\n wx.Frame.__init__(self, parent, id, title, pos, size, style, name)\n\n log = printLog()\n\n panel = TestPanel(self, log)\n self.Bind(wx.EVT_CLOSE, self.OnDestroy)\n\n try:\n self.SetIcon(wx.IconFromLocation(wx.IconLocation(sys.executable)))\n except Exception as exc:\n raise exc\n\n def OnDestroy(self, event):\n self.Destroy()\n\n\nclass TestApp(wx.App):\n def OnInit(self):\n gMainWin = TestFrame(None)\n gMainWin.SetTitle('Extended Frame Demo')\n gMainWin.Show()\n\n return True\n\n#---------------------------------------------------------------------------\n\n\nif __name__ == '__main__':\n import sys\n print('Python %s.%s.%s %s' % sys.version_info[0:4])\n print('wxPython %s' % wx.version())\n gApp = TestApp(redirect=False,\n filename=None,\n useBestVisual=False,\n clearSigInt=True)\n\n gApp.MainLoop()\n",
"step-ids": [
12,
14,
16,
19,
22
]
}
|
[
12,
14,
16,
19,
22
] |
import random
'''
通用文件头,浏览器访问时随机选择
'''
user_agent = [
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50",
"Mozilla/5.0 (Windows; U; Windows NT 6.1; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50",
"Mozilla/5.0 (Windows NT 10.0; WOW64; rv:38.0) Gecko/20100101 Firefox/38.0",
"Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; .NET4.0C; .NET4.0E; .NET CLR 2.0.50727; .NET CLR 3.0.30729; .NET CLR 3.5.30729; InfoPath.3; rv:11.0) like Gecko",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0)",
"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)",
"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1)",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:2.0.1) Gecko/20100101 Firefox/4.0.1",
"Mozilla/5.0 (Windows NT 6.1; rv:2.0.1) Gecko/20100101 Firefox/4.0.1",
"Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; en) Presto/2.8.131 Version/11.11",
"Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Maxthon 2.0)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; TencentTraveler 4.0)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; The World)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SE 2.X MetaSr 1.0; SE 2.X MetaSr 1.0; .NET CLR 2.0.50727; SE 2.X MetaSr 1.0)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; 360SE)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Avant Browser)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)",
"Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5",
"Mozilla/5.0 (iPad; U; CPU OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5",
"Mozilla/5.0 (Linux; U; Android 2.3.7; en-us; Nexus One Build/FRF91) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1",
"MQQBrowser/26 Mozilla/5.0 (Linux; U; Android 2.3.7; zh-cn; MB200 Build/GRJ22; CyanogenMod-7) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1",
"Opera/9.80 (Android 2.3.4; Linux; Opera Mobi/build-1107180945; U; en-GB) Presto/2.8.149 Version/11.10",
"Mozilla/5.0 (Linux; U; Android 3.0; en-us; Xoom Build/HRI39) AppleWebKit/534.13 (KHTML, like Gecko) Version/4.0 Safari/534.13",
"Mozilla/5.0 (BlackBerry; U; BlackBerry 9800; en) AppleWebKit/534.1+ (KHTML, like Gecko) Version/6.0.0.337 Mobile Safari/534.1+",
"Mozilla/5.0 (hp-tablet; Linux; hpwOS/3.0.0; U; en-US) AppleWebKit/534.6 (KHTML, like Gecko) wOSBrowser/233.70 Safari/534.6 TouchPad/1.0",
"Mozilla/5.0 (SymbianOS/9.4; Series60/5.0 NokiaN97-1/20.0.019; Profile/MIDP-2.1 Configuration/CLDC-1.1) AppleWebKit/525 (KHTML, like Gecko) BrowserNG/7.1.18124",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows Phone OS 7.5; Trident/5.0; IEMobile/9.0; HTC; Titan)",
"UCWEB7.0.2.37/28/999",
"NOKIA5700/ UCWEB7.0.2.37/28/999",
"Openwave/ UCWEB7.0.2.37/28/999",
"Mozilla/4.0 (compatible; MSIE 6.0; ) Opera/UCWEB7.0.2.37/28/999",
# iPhone 6:
"Mozilla/6.0 (iPhone; CPU iPhone OS 8_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/8.0 Mobile/10A5376e Safari/8536.25"
]
def get_user_agent():
return {'User-Agent': random.choice(user_agent)}
|
normal
|
{
"blob_id": "5ed91b98ece3ac9525e9d2c42db9c9d9912d5ed2",
"index": 9029,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_user_agent():\n return {'User-Agent': random.choice(user_agent)}\n",
"step-3": "<mask token>\nuser_agent = [\n 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50'\n ,\n 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50'\n ,\n 'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:38.0) Gecko/20100101 Firefox/38.0'\n ,\n 'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; .NET4.0C; .NET4.0E; .NET CLR 2.0.50727; .NET CLR 3.0.30729; .NET CLR 3.5.30729; InfoPath.3; rv:11.0) like Gecko'\n , 'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0)',\n 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0)',\n 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)',\n 'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1)',\n 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:2.0.1) Gecko/20100101 Firefox/4.0.1'\n , 'Mozilla/5.0 (Windows NT 6.1; rv:2.0.1) Gecko/20100101 Firefox/4.0.1',\n 'Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; en) Presto/2.8.131 Version/11.11'\n , 'Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11',\n 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11'\n , 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Maxthon 2.0)',\n 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; TencentTraveler 4.0)',\n 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)',\n 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; The World)',\n 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SE 2.X MetaSr 1.0; SE 2.X MetaSr 1.0; .NET CLR 2.0.50727; SE 2.X MetaSr 1.0)'\n , 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; 360SE)',\n 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Avant Browser)',\n 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)',\n 'Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5'\n ,\n 'Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5'\n ,\n 'Mozilla/5.0 (iPad; U; CPU OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5'\n ,\n 'Mozilla/5.0 (Linux; U; Android 2.3.7; en-us; Nexus One Build/FRF91) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1'\n ,\n 'MQQBrowser/26 Mozilla/5.0 (Linux; U; Android 2.3.7; zh-cn; MB200 Build/GRJ22; CyanogenMod-7) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1'\n ,\n 'Opera/9.80 (Android 2.3.4; Linux; Opera Mobi/build-1107180945; U; en-GB) Presto/2.8.149 Version/11.10'\n ,\n 'Mozilla/5.0 (Linux; U; Android 3.0; en-us; Xoom Build/HRI39) AppleWebKit/534.13 (KHTML, like Gecko) Version/4.0 Safari/534.13'\n ,\n 'Mozilla/5.0 (BlackBerry; U; BlackBerry 9800; en) AppleWebKit/534.1+ (KHTML, like Gecko) Version/6.0.0.337 Mobile Safari/534.1+'\n ,\n 'Mozilla/5.0 (hp-tablet; Linux; hpwOS/3.0.0; U; en-US) AppleWebKit/534.6 (KHTML, like Gecko) wOSBrowser/233.70 Safari/534.6 TouchPad/1.0'\n ,\n 'Mozilla/5.0 (SymbianOS/9.4; Series60/5.0 NokiaN97-1/20.0.019; Profile/MIDP-2.1 Configuration/CLDC-1.1) AppleWebKit/525 (KHTML, like Gecko) BrowserNG/7.1.18124'\n ,\n 'Mozilla/5.0 (compatible; MSIE 9.0; Windows Phone OS 7.5; Trident/5.0; IEMobile/9.0; HTC; Titan)'\n , 'UCWEB7.0.2.37/28/999', 'NOKIA5700/ UCWEB7.0.2.37/28/999',\n 'Openwave/ UCWEB7.0.2.37/28/999',\n 'Mozilla/4.0 (compatible; MSIE 6.0; ) Opera/UCWEB7.0.2.37/28/999',\n 'Mozilla/6.0 (iPhone; CPU iPhone OS 8_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/8.0 Mobile/10A5376e Safari/8536.25'\n ]\n\n\ndef get_user_agent():\n return {'User-Agent': random.choice(user_agent)}\n",
"step-4": "import random\n<mask token>\nuser_agent = [\n 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50'\n ,\n 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50'\n ,\n 'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:38.0) Gecko/20100101 Firefox/38.0'\n ,\n 'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; .NET4.0C; .NET4.0E; .NET CLR 2.0.50727; .NET CLR 3.0.30729; .NET CLR 3.5.30729; InfoPath.3; rv:11.0) like Gecko'\n , 'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0)',\n 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0)',\n 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)',\n 'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1)',\n 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:2.0.1) Gecko/20100101 Firefox/4.0.1'\n , 'Mozilla/5.0 (Windows NT 6.1; rv:2.0.1) Gecko/20100101 Firefox/4.0.1',\n 'Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; en) Presto/2.8.131 Version/11.11'\n , 'Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11',\n 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11'\n , 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Maxthon 2.0)',\n 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; TencentTraveler 4.0)',\n 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)',\n 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; The World)',\n 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SE 2.X MetaSr 1.0; SE 2.X MetaSr 1.0; .NET CLR 2.0.50727; SE 2.X MetaSr 1.0)'\n , 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; 360SE)',\n 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Avant Browser)',\n 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)',\n 'Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5'\n ,\n 'Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5'\n ,\n 'Mozilla/5.0 (iPad; U; CPU OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5'\n ,\n 'Mozilla/5.0 (Linux; U; Android 2.3.7; en-us; Nexus One Build/FRF91) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1'\n ,\n 'MQQBrowser/26 Mozilla/5.0 (Linux; U; Android 2.3.7; zh-cn; MB200 Build/GRJ22; CyanogenMod-7) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1'\n ,\n 'Opera/9.80 (Android 2.3.4; Linux; Opera Mobi/build-1107180945; U; en-GB) Presto/2.8.149 Version/11.10'\n ,\n 'Mozilla/5.0 (Linux; U; Android 3.0; en-us; Xoom Build/HRI39) AppleWebKit/534.13 (KHTML, like Gecko) Version/4.0 Safari/534.13'\n ,\n 'Mozilla/5.0 (BlackBerry; U; BlackBerry 9800; en) AppleWebKit/534.1+ (KHTML, like Gecko) Version/6.0.0.337 Mobile Safari/534.1+'\n ,\n 'Mozilla/5.0 (hp-tablet; Linux; hpwOS/3.0.0; U; en-US) AppleWebKit/534.6 (KHTML, like Gecko) wOSBrowser/233.70 Safari/534.6 TouchPad/1.0'\n ,\n 'Mozilla/5.0 (SymbianOS/9.4; Series60/5.0 NokiaN97-1/20.0.019; Profile/MIDP-2.1 Configuration/CLDC-1.1) AppleWebKit/525 (KHTML, like Gecko) BrowserNG/7.1.18124'\n ,\n 'Mozilla/5.0 (compatible; MSIE 9.0; Windows Phone OS 7.5; Trident/5.0; IEMobile/9.0; HTC; Titan)'\n , 'UCWEB7.0.2.37/28/999', 'NOKIA5700/ UCWEB7.0.2.37/28/999',\n 'Openwave/ UCWEB7.0.2.37/28/999',\n 'Mozilla/4.0 (compatible; MSIE 6.0; ) Opera/UCWEB7.0.2.37/28/999',\n 'Mozilla/6.0 (iPhone; CPU iPhone OS 8_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/8.0 Mobile/10A5376e Safari/8536.25'\n ]\n\n\ndef get_user_agent():\n return {'User-Agent': random.choice(user_agent)}\n",
"step-5": "import random\n\n'''\n通用文件头,浏览器访问时随机选择\n'''\n\nuser_agent = [\n \"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50\",\n \"Mozilla/5.0 (Windows; U; Windows NT 6.1; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50\",\n \"Mozilla/5.0 (Windows NT 10.0; WOW64; rv:38.0) Gecko/20100101 Firefox/38.0\",\n \"Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; .NET4.0C; .NET4.0E; .NET CLR 2.0.50727; .NET CLR 3.0.30729; .NET CLR 3.5.30729; InfoPath.3; rv:11.0) like Gecko\",\n \"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0)\",\n \"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0)\",\n \"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)\",\n \"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1)\",\n \"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:2.0.1) Gecko/20100101 Firefox/4.0.1\",\n \"Mozilla/5.0 (Windows NT 6.1; rv:2.0.1) Gecko/20100101 Firefox/4.0.1\",\n \"Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; en) Presto/2.8.131 Version/11.11\",\n \"Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11\",\n \"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11\",\n \"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Maxthon 2.0)\",\n \"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; TencentTraveler 4.0)\",\n \"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)\",\n \"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; The World)\",\n \"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SE 2.X MetaSr 1.0; SE 2.X MetaSr 1.0; .NET CLR 2.0.50727; SE 2.X MetaSr 1.0)\",\n \"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; 360SE)\",\n \"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Avant Browser)\",\n \"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)\",\n \"Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5\",\n \"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5\",\n \"Mozilla/5.0 (iPad; U; CPU OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5\",\n \"Mozilla/5.0 (Linux; U; Android 2.3.7; en-us; Nexus One Build/FRF91) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1\",\n \"MQQBrowser/26 Mozilla/5.0 (Linux; U; Android 2.3.7; zh-cn; MB200 Build/GRJ22; CyanogenMod-7) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1\",\n \"Opera/9.80 (Android 2.3.4; Linux; Opera Mobi/build-1107180945; U; en-GB) Presto/2.8.149 Version/11.10\",\n \"Mozilla/5.0 (Linux; U; Android 3.0; en-us; Xoom Build/HRI39) AppleWebKit/534.13 (KHTML, like Gecko) Version/4.0 Safari/534.13\",\n \"Mozilla/5.0 (BlackBerry; U; BlackBerry 9800; en) AppleWebKit/534.1+ (KHTML, like Gecko) Version/6.0.0.337 Mobile Safari/534.1+\",\n \"Mozilla/5.0 (hp-tablet; Linux; hpwOS/3.0.0; U; en-US) AppleWebKit/534.6 (KHTML, like Gecko) wOSBrowser/233.70 Safari/534.6 TouchPad/1.0\",\n \"Mozilla/5.0 (SymbianOS/9.4; Series60/5.0 NokiaN97-1/20.0.019; Profile/MIDP-2.1 Configuration/CLDC-1.1) AppleWebKit/525 (KHTML, like Gecko) BrowserNG/7.1.18124\",\n \"Mozilla/5.0 (compatible; MSIE 9.0; Windows Phone OS 7.5; Trident/5.0; IEMobile/9.0; HTC; Titan)\",\n \"UCWEB7.0.2.37/28/999\",\n \"NOKIA5700/ UCWEB7.0.2.37/28/999\",\n \"Openwave/ UCWEB7.0.2.37/28/999\",\n \"Mozilla/4.0 (compatible; MSIE 6.0; ) Opera/UCWEB7.0.2.37/28/999\",\n # iPhone 6:\n \"Mozilla/6.0 (iPhone; CPU iPhone OS 8_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/8.0 Mobile/10A5376e Safari/8536.25\"\n]\n\n\ndef get_user_agent():\n return {'User-Agent': random.choice(user_agent)}\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from datetime import datetime
from random import seed
from pandas import date_range, DataFrame
import matplotlib.pyplot as plt
from matplotlib import style
from numpy import asarray
import strategy_learner as sl
from util import get_data
style.use('ggplot')
seed(0)
def run_algo(sym, investment, start_date, end_date, bench_sym):
# instantiate the strategy learner
learner = sl.StrategyLearner(bench_sym=bench_sym, verbose=verbose)
# train the learner
learner.add_evidence(symbol=sym, start_date=start_date, end_date=end_date, investment=investment)
# get some data for reference
syms = [sym]
dates = date_range(start_date, end_date)
prices_all = get_data(symbols=syms, dates=dates, bench_sym=bench_sym)
prices = prices_all[syms]
# test the learner
df_trades = learner.test_policy(symbol=sym, start_date=start_date, end_date=end_date, investment=investment)
return df_trades
def evaluate(sym, orders, start_val, fee, slippage, bench_sym):
# Read orders file
orders_df = orders
orders_df.sort_index(inplace=True)
start_date = orders_df.index[0]
end_date = orders_df.index[-1]
# Collect price data for each ticker in order
df_prices = get_data(symbols=[sym], dates=date_range(start_date, end_date), bench_sym=bench_sym)
df_prices = df_prices.drop(bench_sym, 1)
df_prices["cash"] = 1
# Track trade data
df_trades = df_prices.copy()
df_trades[:] = 0
# Populate trade dataframe
for i, date in enumerate(orders_df.index):
# Get order information
if orders_df.Order[i] == "BUY":
order = 1
else:
order = -1
# Start with 1/2 position at first
if i == 0:
shares = 100
else:
shares = 200
# Calculate change in shares and cash
df_trades[sym][date] += order * shares
df_trades['cash'][date] -= order * (1 - slippage) * shares * df_prices[sym][date] - fee
# Track total holdings
df_holdings = df_prices.copy()
df_holdings[:] = 0
# Include starting value
df_holdings['cash'][0] = start_val
# Update first day of holdings
for c in df_trades.columns:
df_holdings[c][0] += df_trades[c][0]
# Update every day, adding new day's trade information with previous day's holdings
for i in range(1, len(df_trades.index)):
for c in df_trades.columns:
df_holdings[c][i] += df_trades[c][i] + df_holdings[c][i - 1]
# Track monetary values
df_values = df_prices.mul(df_holdings)
# Define port_val
port_val = df_values.sum(axis=1)
return port_val
if __name__ == "__main__":
symbol = "NASDAQ1001440"
bench_sym = "S&P5001440"
verbose = False
investment = 100000 # 100k = 100 contracts
fee = 0
slippage = 0.0025 # in %
start_date_insample = datetime(2013, 5, 1)
end_date_insample = datetime(2015, 5, 1)
start_date_outsample = datetime(2015, 5, 2)
end_date_outsample = datetime(2017, 12, 7)
# Train
df_trades_in, benchmark_in = run_algo(sym=symbol, investment=investment, start_date=start_date_insample, end_date=end_date_insample, bench_sym=bench_sym)
df_trades_out, benchmark_out = run_algo(sym=symbol, investment=investment, start_date=start_date_outsample, end_date=end_date_outsample, bench_sym=bench_sym)
# Evaluate
insample = evaluate(sym=symbol, orders=df_trades_in, start_val=investment, fee=fee, slippage=slippage, bench_sym=bench_sym)
insample = DataFrame(insample)
bench_insample = evaluate(sym=symbol, orders=benchmark_in, start_val=investment, fee=fee, slippage=slippage, bench_sym=bench_sym)
bench_insample = DataFrame(bench_insample)
outsample = evaluate(sym=symbol, orders=df_trades_out, start_val=investment, fee=fee, slippage=slippage, bench_sym=bench_sym)
outsample = DataFrame(outsample)
bench_outsample = evaluate(sym=symbol, orders=benchmark_out, start_val=investment, fee=fee, slippage=slippage, bench_sym=bench_sym)
bench_outsample = DataFrame(bench_outsample)
# Cumulative returns
port_ret_in = float(asarray(insample.values)[-1])
port_ret_out = float(asarray(outsample.values)[-1])
bench_ret_in = float(asarray(bench_insample.values)[-1])
bench_ret_out = float(asarray(bench_outsample.values)[-1])
# Print results
print()
print("Cumulative return in-sample:\t\t${:,.2f}\t\t(+{:.2f} %)".format(port_ret_in - investment, 100 * (port_ret_in - investment) / investment))
print("Benchmark return in-sample:\t\t\t${:,.2f}\t\t(+{:.2f} %)".format(bench_ret_in - investment, 100 * (bench_ret_in - investment) / investment))
print("Cumulative return out-of-sample:\t${:,.2f}\t\t(+{:.2f} %)".format(port_ret_out - investment, 100 * (port_ret_out - investment) / investment))
print("Benchmark return out-of-sample:\t\t${:,.2f}\t\t(+{:.2f} %)".format(bench_ret_out - investment, 100 * (bench_ret_out - investment) / investment))
# Plot charts
plt.subplot(1, 2, 1)
plt.plot(insample.index, insample, c="mediumseagreen", lw=3)
plt.plot(bench_insample.index, bench_insample, c="skyblue")
plt.legend(["Strategy", "Buy and Hold"])
plt.title("In-sample")
plt.xlabel("Date")
plt.ylabel("Value")
plt.subplot(1, 2, 2)
plt.plot(outsample.index, outsample, c="mediumseagreen", lw=3)
plt.plot(bench_outsample.index, bench_outsample, c="skyblue")
plt.legend(["Strategy", "Buy and Hold"])
plt.title("Out-of-sample")
plt.xlabel("Date")
plt.ylabel("Value")
plt.show()
|
normal
|
{
"blob_id": "c0f9a1c39ff5d7cc99a16cf00cddcc14705937ba",
"index": 3917,
"step-1": "<mask token>\n\n\ndef run_algo(sym, investment, start_date, end_date, bench_sym):\n learner = sl.StrategyLearner(bench_sym=bench_sym, verbose=verbose)\n learner.add_evidence(symbol=sym, start_date=start_date, end_date=\n end_date, investment=investment)\n syms = [sym]\n dates = date_range(start_date, end_date)\n prices_all = get_data(symbols=syms, dates=dates, bench_sym=bench_sym)\n prices = prices_all[syms]\n df_trades = learner.test_policy(symbol=sym, start_date=start_date,\n end_date=end_date, investment=investment)\n return df_trades\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef run_algo(sym, investment, start_date, end_date, bench_sym):\n learner = sl.StrategyLearner(bench_sym=bench_sym, verbose=verbose)\n learner.add_evidence(symbol=sym, start_date=start_date, end_date=\n end_date, investment=investment)\n syms = [sym]\n dates = date_range(start_date, end_date)\n prices_all = get_data(symbols=syms, dates=dates, bench_sym=bench_sym)\n prices = prices_all[syms]\n df_trades = learner.test_policy(symbol=sym, start_date=start_date,\n end_date=end_date, investment=investment)\n return df_trades\n\n\ndef evaluate(sym, orders, start_val, fee, slippage, bench_sym):\n orders_df = orders\n orders_df.sort_index(inplace=True)\n start_date = orders_df.index[0]\n end_date = orders_df.index[-1]\n df_prices = get_data(symbols=[sym], dates=date_range(start_date,\n end_date), bench_sym=bench_sym)\n df_prices = df_prices.drop(bench_sym, 1)\n df_prices['cash'] = 1\n df_trades = df_prices.copy()\n df_trades[:] = 0\n for i, date in enumerate(orders_df.index):\n if orders_df.Order[i] == 'BUY':\n order = 1\n else:\n order = -1\n if i == 0:\n shares = 100\n else:\n shares = 200\n df_trades[sym][date] += order * shares\n df_trades['cash'][date] -= order * (1 - slippage) * shares * df_prices[\n sym][date] - fee\n df_holdings = df_prices.copy()\n df_holdings[:] = 0\n df_holdings['cash'][0] = start_val\n for c in df_trades.columns:\n df_holdings[c][0] += df_trades[c][0]\n for i in range(1, len(df_trades.index)):\n for c in df_trades.columns:\n df_holdings[c][i] += df_trades[c][i] + df_holdings[c][i - 1]\n df_values = df_prices.mul(df_holdings)\n port_val = df_values.sum(axis=1)\n return port_val\n\n\n<mask token>\n",
"step-3": "<mask token>\nstyle.use('ggplot')\nseed(0)\n\n\ndef run_algo(sym, investment, start_date, end_date, bench_sym):\n learner = sl.StrategyLearner(bench_sym=bench_sym, verbose=verbose)\n learner.add_evidence(symbol=sym, start_date=start_date, end_date=\n end_date, investment=investment)\n syms = [sym]\n dates = date_range(start_date, end_date)\n prices_all = get_data(symbols=syms, dates=dates, bench_sym=bench_sym)\n prices = prices_all[syms]\n df_trades = learner.test_policy(symbol=sym, start_date=start_date,\n end_date=end_date, investment=investment)\n return df_trades\n\n\ndef evaluate(sym, orders, start_val, fee, slippage, bench_sym):\n orders_df = orders\n orders_df.sort_index(inplace=True)\n start_date = orders_df.index[0]\n end_date = orders_df.index[-1]\n df_prices = get_data(symbols=[sym], dates=date_range(start_date,\n end_date), bench_sym=bench_sym)\n df_prices = df_prices.drop(bench_sym, 1)\n df_prices['cash'] = 1\n df_trades = df_prices.copy()\n df_trades[:] = 0\n for i, date in enumerate(orders_df.index):\n if orders_df.Order[i] == 'BUY':\n order = 1\n else:\n order = -1\n if i == 0:\n shares = 100\n else:\n shares = 200\n df_trades[sym][date] += order * shares\n df_trades['cash'][date] -= order * (1 - slippage) * shares * df_prices[\n sym][date] - fee\n df_holdings = df_prices.copy()\n df_holdings[:] = 0\n df_holdings['cash'][0] = start_val\n for c in df_trades.columns:\n df_holdings[c][0] += df_trades[c][0]\n for i in range(1, len(df_trades.index)):\n for c in df_trades.columns:\n df_holdings[c][i] += df_trades[c][i] + df_holdings[c][i - 1]\n df_values = df_prices.mul(df_holdings)\n port_val = df_values.sum(axis=1)\n return port_val\n\n\nif __name__ == '__main__':\n symbol = 'NASDAQ1001440'\n bench_sym = 'S&P5001440'\n verbose = False\n investment = 100000\n fee = 0\n slippage = 0.0025\n start_date_insample = datetime(2013, 5, 1)\n end_date_insample = datetime(2015, 5, 1)\n start_date_outsample = datetime(2015, 5, 2)\n end_date_outsample = datetime(2017, 12, 7)\n df_trades_in, benchmark_in = run_algo(sym=symbol, investment=investment,\n start_date=start_date_insample, end_date=end_date_insample,\n bench_sym=bench_sym)\n df_trades_out, benchmark_out = run_algo(sym=symbol, investment=\n investment, start_date=start_date_outsample, end_date=\n end_date_outsample, bench_sym=bench_sym)\n insample = evaluate(sym=symbol, orders=df_trades_in, start_val=\n investment, fee=fee, slippage=slippage, bench_sym=bench_sym)\n insample = DataFrame(insample)\n bench_insample = evaluate(sym=symbol, orders=benchmark_in, start_val=\n investment, fee=fee, slippage=slippage, bench_sym=bench_sym)\n bench_insample = DataFrame(bench_insample)\n outsample = evaluate(sym=symbol, orders=df_trades_out, start_val=\n investment, fee=fee, slippage=slippage, bench_sym=bench_sym)\n outsample = DataFrame(outsample)\n bench_outsample = evaluate(sym=symbol, orders=benchmark_out, start_val=\n investment, fee=fee, slippage=slippage, bench_sym=bench_sym)\n bench_outsample = DataFrame(bench_outsample)\n port_ret_in = float(asarray(insample.values)[-1])\n port_ret_out = float(asarray(outsample.values)[-1])\n bench_ret_in = float(asarray(bench_insample.values)[-1])\n bench_ret_out = float(asarray(bench_outsample.values)[-1])\n print()\n print('Cumulative return in-sample:\\t\\t${:,.2f}\\t\\t(+{:.2f} %)'.format(\n port_ret_in - investment, 100 * (port_ret_in - investment) /\n investment))\n print('Benchmark return in-sample:\\t\\t\\t${:,.2f}\\t\\t(+{:.2f} %)'.format\n (bench_ret_in - investment, 100 * (bench_ret_in - investment) /\n investment))\n print('Cumulative return out-of-sample:\\t${:,.2f}\\t\\t(+{:.2f} %)'.\n format(port_ret_out - investment, 100 * (port_ret_out - investment) /\n investment))\n print('Benchmark return out-of-sample:\\t\\t${:,.2f}\\t\\t(+{:.2f} %)'.\n format(bench_ret_out - investment, 100 * (bench_ret_out -\n investment) / investment))\n plt.subplot(1, 2, 1)\n plt.plot(insample.index, insample, c='mediumseagreen', lw=3)\n plt.plot(bench_insample.index, bench_insample, c='skyblue')\n plt.legend(['Strategy', 'Buy and Hold'])\n plt.title('In-sample')\n plt.xlabel('Date')\n plt.ylabel('Value')\n plt.subplot(1, 2, 2)\n plt.plot(outsample.index, outsample, c='mediumseagreen', lw=3)\n plt.plot(bench_outsample.index, bench_outsample, c='skyblue')\n plt.legend(['Strategy', 'Buy and Hold'])\n plt.title('Out-of-sample')\n plt.xlabel('Date')\n plt.ylabel('Value')\n plt.show()\n",
"step-4": "from datetime import datetime\nfrom random import seed\nfrom pandas import date_range, DataFrame\nimport matplotlib.pyplot as plt\nfrom matplotlib import style\nfrom numpy import asarray\nimport strategy_learner as sl\nfrom util import get_data\nstyle.use('ggplot')\nseed(0)\n\n\ndef run_algo(sym, investment, start_date, end_date, bench_sym):\n learner = sl.StrategyLearner(bench_sym=bench_sym, verbose=verbose)\n learner.add_evidence(symbol=sym, start_date=start_date, end_date=\n end_date, investment=investment)\n syms = [sym]\n dates = date_range(start_date, end_date)\n prices_all = get_data(symbols=syms, dates=dates, bench_sym=bench_sym)\n prices = prices_all[syms]\n df_trades = learner.test_policy(symbol=sym, start_date=start_date,\n end_date=end_date, investment=investment)\n return df_trades\n\n\ndef evaluate(sym, orders, start_val, fee, slippage, bench_sym):\n orders_df = orders\n orders_df.sort_index(inplace=True)\n start_date = orders_df.index[0]\n end_date = orders_df.index[-1]\n df_prices = get_data(symbols=[sym], dates=date_range(start_date,\n end_date), bench_sym=bench_sym)\n df_prices = df_prices.drop(bench_sym, 1)\n df_prices['cash'] = 1\n df_trades = df_prices.copy()\n df_trades[:] = 0\n for i, date in enumerate(orders_df.index):\n if orders_df.Order[i] == 'BUY':\n order = 1\n else:\n order = -1\n if i == 0:\n shares = 100\n else:\n shares = 200\n df_trades[sym][date] += order * shares\n df_trades['cash'][date] -= order * (1 - slippage) * shares * df_prices[\n sym][date] - fee\n df_holdings = df_prices.copy()\n df_holdings[:] = 0\n df_holdings['cash'][0] = start_val\n for c in df_trades.columns:\n df_holdings[c][0] += df_trades[c][0]\n for i in range(1, len(df_trades.index)):\n for c in df_trades.columns:\n df_holdings[c][i] += df_trades[c][i] + df_holdings[c][i - 1]\n df_values = df_prices.mul(df_holdings)\n port_val = df_values.sum(axis=1)\n return port_val\n\n\nif __name__ == '__main__':\n symbol = 'NASDAQ1001440'\n bench_sym = 'S&P5001440'\n verbose = False\n investment = 100000\n fee = 0\n slippage = 0.0025\n start_date_insample = datetime(2013, 5, 1)\n end_date_insample = datetime(2015, 5, 1)\n start_date_outsample = datetime(2015, 5, 2)\n end_date_outsample = datetime(2017, 12, 7)\n df_trades_in, benchmark_in = run_algo(sym=symbol, investment=investment,\n start_date=start_date_insample, end_date=end_date_insample,\n bench_sym=bench_sym)\n df_trades_out, benchmark_out = run_algo(sym=symbol, investment=\n investment, start_date=start_date_outsample, end_date=\n end_date_outsample, bench_sym=bench_sym)\n insample = evaluate(sym=symbol, orders=df_trades_in, start_val=\n investment, fee=fee, slippage=slippage, bench_sym=bench_sym)\n insample = DataFrame(insample)\n bench_insample = evaluate(sym=symbol, orders=benchmark_in, start_val=\n investment, fee=fee, slippage=slippage, bench_sym=bench_sym)\n bench_insample = DataFrame(bench_insample)\n outsample = evaluate(sym=symbol, orders=df_trades_out, start_val=\n investment, fee=fee, slippage=slippage, bench_sym=bench_sym)\n outsample = DataFrame(outsample)\n bench_outsample = evaluate(sym=symbol, orders=benchmark_out, start_val=\n investment, fee=fee, slippage=slippage, bench_sym=bench_sym)\n bench_outsample = DataFrame(bench_outsample)\n port_ret_in = float(asarray(insample.values)[-1])\n port_ret_out = float(asarray(outsample.values)[-1])\n bench_ret_in = float(asarray(bench_insample.values)[-1])\n bench_ret_out = float(asarray(bench_outsample.values)[-1])\n print()\n print('Cumulative return in-sample:\\t\\t${:,.2f}\\t\\t(+{:.2f} %)'.format(\n port_ret_in - investment, 100 * (port_ret_in - investment) /\n investment))\n print('Benchmark return in-sample:\\t\\t\\t${:,.2f}\\t\\t(+{:.2f} %)'.format\n (bench_ret_in - investment, 100 * (bench_ret_in - investment) /\n investment))\n print('Cumulative return out-of-sample:\\t${:,.2f}\\t\\t(+{:.2f} %)'.\n format(port_ret_out - investment, 100 * (port_ret_out - investment) /\n investment))\n print('Benchmark return out-of-sample:\\t\\t${:,.2f}\\t\\t(+{:.2f} %)'.\n format(bench_ret_out - investment, 100 * (bench_ret_out -\n investment) / investment))\n plt.subplot(1, 2, 1)\n plt.plot(insample.index, insample, c='mediumseagreen', lw=3)\n plt.plot(bench_insample.index, bench_insample, c='skyblue')\n plt.legend(['Strategy', 'Buy and Hold'])\n plt.title('In-sample')\n plt.xlabel('Date')\n plt.ylabel('Value')\n plt.subplot(1, 2, 2)\n plt.plot(outsample.index, outsample, c='mediumseagreen', lw=3)\n plt.plot(bench_outsample.index, bench_outsample, c='skyblue')\n plt.legend(['Strategy', 'Buy and Hold'])\n plt.title('Out-of-sample')\n plt.xlabel('Date')\n plt.ylabel('Value')\n plt.show()\n",
"step-5": "from datetime import datetime\nfrom random import seed\n\nfrom pandas import date_range, DataFrame\nimport matplotlib.pyplot as plt\nfrom matplotlib import style\nfrom numpy import asarray\n\nimport strategy_learner as sl\nfrom util import get_data\n\nstyle.use('ggplot')\nseed(0)\n\ndef run_algo(sym, investment, start_date, end_date, bench_sym):\n # instantiate the strategy learner\n learner = sl.StrategyLearner(bench_sym=bench_sym, verbose=verbose)\n\n # train the learner\n learner.add_evidence(symbol=sym, start_date=start_date, end_date=end_date, investment=investment)\n\n # get some data for reference\n syms = [sym]\n dates = date_range(start_date, end_date)\n prices_all = get_data(symbols=syms, dates=dates, bench_sym=bench_sym)\n prices = prices_all[syms]\n\n # test the learner\n df_trades = learner.test_policy(symbol=sym, start_date=start_date, end_date=end_date, investment=investment)\n\n return df_trades\n\n\ndef evaluate(sym, orders, start_val, fee, slippage, bench_sym):\n # Read orders file\n orders_df = orders\n\n orders_df.sort_index(inplace=True)\n start_date = orders_df.index[0]\n end_date = orders_df.index[-1]\n\n # Collect price data for each ticker in order\n df_prices = get_data(symbols=[sym], dates=date_range(start_date, end_date), bench_sym=bench_sym)\n df_prices = df_prices.drop(bench_sym, 1)\n df_prices[\"cash\"] = 1\n\n # Track trade data\n df_trades = df_prices.copy()\n df_trades[:] = 0\n\n # Populate trade dataframe\n for i, date in enumerate(orders_df.index):\n # Get order information\n if orders_df.Order[i] == \"BUY\":\n order = 1\n else:\n order = -1\n\n # Start with 1/2 position at first\n if i == 0:\n shares = 100\n else:\n shares = 200\n\n # Calculate change in shares and cash\n df_trades[sym][date] += order * shares\n df_trades['cash'][date] -= order * (1 - slippage) * shares * df_prices[sym][date] - fee\n\n # Track total holdings\n df_holdings = df_prices.copy()\n df_holdings[:] = 0\n\n # Include starting value\n df_holdings['cash'][0] = start_val\n\n # Update first day of holdings\n for c in df_trades.columns:\n df_holdings[c][0] += df_trades[c][0]\n\n # Update every day, adding new day's trade information with previous day's holdings\n for i in range(1, len(df_trades.index)):\n for c in df_trades.columns:\n df_holdings[c][i] += df_trades[c][i] + df_holdings[c][i - 1]\n\n # Track monetary values\n df_values = df_prices.mul(df_holdings)\n\n # Define port_val\n port_val = df_values.sum(axis=1)\n\n return port_val\n\n\nif __name__ == \"__main__\":\n symbol = \"NASDAQ1001440\"\n bench_sym = \"S&P5001440\"\n verbose = False\n investment = 100000 # 100k = 100 contracts\n fee = 0\n slippage = 0.0025 # in %\n start_date_insample = datetime(2013, 5, 1)\n end_date_insample = datetime(2015, 5, 1)\n start_date_outsample = datetime(2015, 5, 2)\n end_date_outsample = datetime(2017, 12, 7)\n\n # Train\n df_trades_in, benchmark_in = run_algo(sym=symbol, investment=investment, start_date=start_date_insample, end_date=end_date_insample, bench_sym=bench_sym)\n df_trades_out, benchmark_out = run_algo(sym=symbol, investment=investment, start_date=start_date_outsample, end_date=end_date_outsample, bench_sym=bench_sym)\n\n # Evaluate\n insample = evaluate(sym=symbol, orders=df_trades_in, start_val=investment, fee=fee, slippage=slippage, bench_sym=bench_sym)\n insample = DataFrame(insample)\n bench_insample = evaluate(sym=symbol, orders=benchmark_in, start_val=investment, fee=fee, slippage=slippage, bench_sym=bench_sym)\n bench_insample = DataFrame(bench_insample)\n outsample = evaluate(sym=symbol, orders=df_trades_out, start_val=investment, fee=fee, slippage=slippage, bench_sym=bench_sym)\n outsample = DataFrame(outsample)\n bench_outsample = evaluate(sym=symbol, orders=benchmark_out, start_val=investment, fee=fee, slippage=slippage, bench_sym=bench_sym)\n bench_outsample = DataFrame(bench_outsample)\n\n # Cumulative returns\n port_ret_in = float(asarray(insample.values)[-1])\n port_ret_out = float(asarray(outsample.values)[-1])\n bench_ret_in = float(asarray(bench_insample.values)[-1])\n bench_ret_out = float(asarray(bench_outsample.values)[-1])\n\n # Print results\n print()\n print(\"Cumulative return in-sample:\\t\\t${:,.2f}\\t\\t(+{:.2f} %)\".format(port_ret_in - investment, 100 * (port_ret_in - investment) / investment))\n print(\"Benchmark return in-sample:\\t\\t\\t${:,.2f}\\t\\t(+{:.2f} %)\".format(bench_ret_in - investment, 100 * (bench_ret_in - investment) / investment))\n print(\"Cumulative return out-of-sample:\\t${:,.2f}\\t\\t(+{:.2f} %)\".format(port_ret_out - investment, 100 * (port_ret_out - investment) / investment))\n print(\"Benchmark return out-of-sample:\\t\\t${:,.2f}\\t\\t(+{:.2f} %)\".format(bench_ret_out - investment, 100 * (bench_ret_out - investment) / investment))\n\n # Plot charts\n plt.subplot(1, 2, 1)\n plt.plot(insample.index, insample, c=\"mediumseagreen\", lw=3)\n plt.plot(bench_insample.index, bench_insample, c=\"skyblue\")\n plt.legend([\"Strategy\", \"Buy and Hold\"])\n plt.title(\"In-sample\")\n plt.xlabel(\"Date\")\n plt.ylabel(\"Value\")\n\n plt.subplot(1, 2, 2)\n plt.plot(outsample.index, outsample, c=\"mediumseagreen\", lw=3)\n plt.plot(bench_outsample.index, bench_outsample, c=\"skyblue\")\n plt.legend([\"Strategy\", \"Buy and Hold\"])\n plt.title(\"Out-of-sample\")\n plt.xlabel(\"Date\")\n plt.ylabel(\"Value\")\n plt.show()\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
def rad_to_deg(rad):
return rad * 180 / PI
def angle_abs_difference(a1, a2):
delta = sims4.math.mod_2pi(a1 - a2)
if delta > sims4.math.PI:
delta = sims4.math.TWO_PI - delta
return delta
<|reserved_special_token_0|>
def vector_dot_2d(a, b):
return a.x * b.x + a.z * b.z
def vector_cross(a, b):
return Vector3(a.y * b.z - a.z * b.y, a.z * b.x - a.x * b.z, a.x * b.y -
a.y * b.x)
<|reserved_special_token_0|>
def vector3_rotate_axis_angle(v, angle, axis):
q = Quaternion.from_axis_angle(angle, axis)
return q.transform_vector(v)
<|reserved_special_token_0|>
def invert_quaternion(q):
d = 1.0 / (q.x * q.x + q.y * q.y + q.z * q.z + q.w * q.w)
return Quaternion(-d * q.x, -d * q.y, -d * q.z, d * q.w)
<|reserved_special_token_0|>
class Location:
__qualname__ = 'Location'
__slots__ = ('transform', 'routing_surface', '_parent_ref',
'joint_name_or_hash', 'slot_hash')
def __init__(self, transform, routing_surface, parent=None,
joint_name_or_hash=None, slot_hash=0):
self.transform = transform
self.routing_surface = routing_surface
self.parent = parent
self.joint_name_or_hash = joint_name_or_hash
self.slot_hash = slot_hash
def __repr__(self):
return standard_repr(self, self.transform, self.routing_surface,
parent=self.parent, joint_name_or_hash=self.joint_name_or_hash,
slot_hash=self.slot_hash)
def __eq__(self, other):
if type(self) is not type(other):
return False
if self.transform != other.transform:
return False
if self.parent != other.parent:
return False
if self.routing_surface != other.routing_surface:
return False
slot_hash0 = self.joint_name_or_hash or self.slot_hash
slot_hash1 = other.joint_name_or_hash or other.slot_hash
if slot_hash0 != slot_hash1:
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
@property
def parent(self):
if self._parent_ref is not None:
return self._parent_ref()
@parent.setter
def parent(self, value):
if value is not None:
self._parent_ref = value.ref()
self.routing_surface = None
else:
self._parent_ref = None
@property
def joint_name_hash(self):
if self.joint_name_or_hash is None:
return 0
if isinstance(self.joint_name_or_hash, int):
return self.joint_name_or_hash
return sims4.hash_util.hash32(self.joint_name_or_hash)
@property
def world_routing_surface(self):
if self.parent is not None:
return self.parent.location.world_routing_surface
return self.routing_surface
@property
def zone_id(self):
if self.world_routing_surface.type == 1:
return self.world_routing_surface.primary_id
return sims4.zone_utils.get_zone_id()
@property
def level(self):
return self.world_routing_surface.secondary_id
@property
def world_transform(self):
if self.parent is None:
return self.transform
transform = self.transform
parent = self.parent
if parent.is_part:
parent_transform = parent.part_owner.transform
else:
parent_transform = parent.transform
if self.joint_name_or_hash is None:
if transform is None:
return parent_transform
return sims4.math.Transform.concatenate(transform, parent_transform
)
joint_transform = native.animation.get_joint_transform_from_rig(self
.parent.rig, self.joint_name_or_hash)
if transform is None:
return sims4.math.Transform.concatenate(joint_transform,
parent_transform)
local_transform = sims4.math.Transform.concatenate(transform,
joint_transform)
return sims4.math.Transform.concatenate(local_transform,
parent_transform)
def duplicate(self):
return type(self)(self.transform, self.routing_surface, self.parent,
self.joint_name_or_hash, self.slot_hash)
def clone(self, *, transform=DEFAULT, translation=DEFAULT, orientation=
DEFAULT, routing_surface=DEFAULT, parent=DEFAULT,
joint_name_or_hash=DEFAULT, slot_hash=DEFAULT):
if transform is DEFAULT:
transform = self.transform
if transform is not None:
if translation is DEFAULT:
translation = transform.translation
if orientation is DEFAULT:
orientation = transform.orientation
transform = Transform(translation, orientation)
if routing_surface is DEFAULT:
routing_surface = self.routing_surface
if parent is DEFAULT:
parent = self.parent
if joint_name_or_hash is DEFAULT:
joint_name_or_hash = self.joint_name_or_hash
if slot_hash is DEFAULT:
slot_hash = self.slot_hash
return type(self)(transform, routing_surface, parent,
joint_name_or_hash, slot_hash)
class LinearCurve:
__qualname__ = 'LinearCurve'
__slots__ = 'points',
def __init__(self, points):
self.points = points
self.points.sort(key=lambda i: i[0])
def get(self, val):
p_max = len(self.points) - 1
if val <= self.points[0][0]:
return self.points[0][1]
if val >= self.points[p_max][0]:
return self.points[p_max][1]
i = p_max - 1
while i > 0:
while val < self.points[i][0]:
i -= 1
p1 = self.points[i]
p2 = self.points[i + 1]
percent = (val - p1[0]) / (p2[0] - p1[0])
return (p2[1] - p1[1]) * percent + p1[1]
class WeightedUtilityCurve(LinearCurve):
__qualname__ = 'WeightedUtilityCurve'
def __init__(self, points, max_y=0, weight=1):
if max_y == 0:
max_y = self._find_largest_y(points)
transformed_points = [(point[0], point[1] / max_y * weight) for
point in points]
super().__init__(transformed_points)
def _find_largest_y(self, points):
max_y = 0
for point in points:
while point[1] > max_y:
max_y = point[1]
return max_y
class CircularUtilityCurve(LinearCurve):
__qualname__ = 'CircularUtilityCurve'
def __init__(self, points, min_x, max_x):
super().__init__(points)
self._min_x = min_x
self._max_x = max_x
last_point = self.points[-1]
distance_to_end = max_x - last_point[0]
total_length = distance_to_end + self.points[0][1]
distance_to_pivot_point = distance_to_end / total_length
pivot_y_value = (self.points[0][1] - last_point[1]
) * distance_to_pivot_point + self.points[0][1]
self.points.insert(0, (0, pivot_y_value))
self.points.insert(len(self.points), (self._max_x, pivot_y_value))
def get(self, val):
return super().get(val)
class Operator(enum.Int):
__qualname__ = 'Operator'
GREATER = 1
GREATER_OR_EQUAL = 2
EQUAL = 3
NOTEQUAL = 4
LESS_OR_EQUAL = 5
LESS = 6
@staticmethod
def from_function(fn):
if fn == operator.gt:
return Operator.GREATER
if fn == operator.ge:
return Operator.GREATER_OR_EQUAL
if fn == operator.eq:
return Operator.EQUAL
if fn == operator.ne:
return Operator.NOTEQUAL
if fn == operator.le:
return Operator.LESS_OR_EQUAL
if fn == operator.lt:
return Operator.LESS
@property
def function(self):
if self.value == Operator.GREATER:
return operator.gt
if self.value == Operator.GREATER_OR_EQUAL:
return operator.ge
if self.value == Operator.EQUAL:
return operator.eq
if self.value == Operator.NOTEQUAL:
return operator.ne
if self.value == Operator.LESS_OR_EQUAL:
return operator.le
if self.value == Operator.LESS:
return operator.lt
@property
def inverse(self):
if self == Operator.GREATER:
return Operator.LESS_OR_EQUAL
if self == Operator.GREATER_OR_EQUAL:
return Operator.LESS
if self == Operator.EQUAL:
return Operator.NOTEQUAL
if self == Operator.NOTEQUAL:
return Operator.EQUAL
if self == Operator.LESS_OR_EQUAL:
return Operator.GREATER
if self == Operator.LESS:
return Operator.GREATER_OR_EQUAL
@property
def symbol(self):
if self == Operator.GREATER:
return '>'
if self == Operator.GREATER_OR_EQUAL:
return '>='
if self == Operator.EQUAL:
return '=='
if self == Operator.NOTEQUAL:
return '!='
if self == Operator.LESS_OR_EQUAL:
return '<='
if self == Operator.LESS:
return '<'
@property
def category(self):
if self == Operator.GREATER:
return Operator.GREATER
if self == Operator.GREATER_OR_EQUAL:
return Operator.GREATER
if self == Operator.EQUAL:
return Operator.EQUAL
if self == Operator.NOTEQUAL:
return Operator.EQUAL
if self == Operator.LESS_OR_EQUAL:
return Operator.LESS
if self == Operator.LESS:
return Operator.LESS
class InequalityOperator(enum.Int):
__qualname__ = 'InequalityOperator'
GREATER = Operator.GREATER
GREATER_OR_EQUAL = Operator.GREATER_OR_EQUAL
LESS_OR_EQUAL = Operator.LESS_OR_EQUAL
LESS = Operator.LESS
<|reserved_special_token_0|>
class Threshold:
__qualname__ = 'Threshold'
__slots__ = 'value', 'comparison'
def __init__(self, value=None, comparison=None):
self.value = value
self.comparison = comparison
def compare(self, source_value):
if self.value is not None and self.comparison is not None:
return self.comparison(source_value, self.value)
return False
def compare_value(self, source_value):
if self.value is not None and self.comparison is not None:
return self.comparison(source_value.value, self.value.value)
return False
def inverse(self):
return Threshold(self.value, Operator.from_function(self.comparison
).inverse.function)
def __str__(self):
if self.comparison is None:
return 'None'
return '{} {}'.format(Operator.from_function(self.comparison).
symbol, self.value)
def __repr__(self):
return '<Threshold {}>'.format(str(self))
def __eq__(self, other):
if not isinstance(other, Threshold):
return False
if not self.value == other.value:
return False
if not self.comparison == other.comparison:
return False
return True
def __hash__(self):
return hash((self.value, self.comparison))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def rad_to_deg(rad):
return rad * 180 / PI
def angle_abs_difference(a1, a2):
delta = sims4.math.mod_2pi(a1 - a2)
if delta > sims4.math.PI:
delta = sims4.math.TWO_PI - delta
return delta
<|reserved_special_token_0|>
def vector_dot_2d(a, b):
return a.x * b.x + a.z * b.z
def vector_cross(a, b):
return Vector3(a.y * b.z - a.z * b.y, a.z * b.x - a.x * b.z, a.x * b.y -
a.y * b.x)
<|reserved_special_token_0|>
def transform_almost_equal(t1, t2, epsilon=EPSILON, epsilon_orientation=
QUATERNION_EPSILON):
if epsilon_orientation is DEFAULT:
epsilon_orientation = epsilon
return vector3_almost_equal(t1.translation, t2.translation, epsilon=epsilon
) and quaternion_almost_equal(t1.orientation, t2.orientation,
epsilon=epsilon_orientation)
<|reserved_special_token_0|>
def vector3_rotate_axis_angle(v, angle, axis):
q = Quaternion.from_axis_angle(angle, axis)
return q.transform_vector(v)
<|reserved_special_token_0|>
def invert_quaternion(q):
d = 1.0 / (q.x * q.x + q.y * q.y + q.z * q.z + q.w * q.w)
return Quaternion(-d * q.x, -d * q.y, -d * q.z, d * q.w)
<|reserved_special_token_0|>
class Location:
__qualname__ = 'Location'
__slots__ = ('transform', 'routing_surface', '_parent_ref',
'joint_name_or_hash', 'slot_hash')
def __init__(self, transform, routing_surface, parent=None,
joint_name_or_hash=None, slot_hash=0):
self.transform = transform
self.routing_surface = routing_surface
self.parent = parent
self.joint_name_or_hash = joint_name_or_hash
self.slot_hash = slot_hash
def __repr__(self):
return standard_repr(self, self.transform, self.routing_surface,
parent=self.parent, joint_name_or_hash=self.joint_name_or_hash,
slot_hash=self.slot_hash)
def __eq__(self, other):
if type(self) is not type(other):
return False
if self.transform != other.transform:
return False
if self.parent != other.parent:
return False
if self.routing_surface != other.routing_surface:
return False
slot_hash0 = self.joint_name_or_hash or self.slot_hash
slot_hash1 = other.joint_name_or_hash or other.slot_hash
if slot_hash0 != slot_hash1:
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
@property
def parent(self):
if self._parent_ref is not None:
return self._parent_ref()
@parent.setter
def parent(self, value):
if value is not None:
self._parent_ref = value.ref()
self.routing_surface = None
else:
self._parent_ref = None
@property
def joint_name_hash(self):
if self.joint_name_or_hash is None:
return 0
if isinstance(self.joint_name_or_hash, int):
return self.joint_name_or_hash
return sims4.hash_util.hash32(self.joint_name_or_hash)
@property
def world_routing_surface(self):
if self.parent is not None:
return self.parent.location.world_routing_surface
return self.routing_surface
@property
def zone_id(self):
if self.world_routing_surface.type == 1:
return self.world_routing_surface.primary_id
return sims4.zone_utils.get_zone_id()
@property
def level(self):
return self.world_routing_surface.secondary_id
@property
def world_transform(self):
if self.parent is None:
return self.transform
transform = self.transform
parent = self.parent
if parent.is_part:
parent_transform = parent.part_owner.transform
else:
parent_transform = parent.transform
if self.joint_name_or_hash is None:
if transform is None:
return parent_transform
return sims4.math.Transform.concatenate(transform, parent_transform
)
joint_transform = native.animation.get_joint_transform_from_rig(self
.parent.rig, self.joint_name_or_hash)
if transform is None:
return sims4.math.Transform.concatenate(joint_transform,
parent_transform)
local_transform = sims4.math.Transform.concatenate(transform,
joint_transform)
return sims4.math.Transform.concatenate(local_transform,
parent_transform)
def duplicate(self):
return type(self)(self.transform, self.routing_surface, self.parent,
self.joint_name_or_hash, self.slot_hash)
def clone(self, *, transform=DEFAULT, translation=DEFAULT, orientation=
DEFAULT, routing_surface=DEFAULT, parent=DEFAULT,
joint_name_or_hash=DEFAULT, slot_hash=DEFAULT):
if transform is DEFAULT:
transform = self.transform
if transform is not None:
if translation is DEFAULT:
translation = transform.translation
if orientation is DEFAULT:
orientation = transform.orientation
transform = Transform(translation, orientation)
if routing_surface is DEFAULT:
routing_surface = self.routing_surface
if parent is DEFAULT:
parent = self.parent
if joint_name_or_hash is DEFAULT:
joint_name_or_hash = self.joint_name_or_hash
if slot_hash is DEFAULT:
slot_hash = self.slot_hash
return type(self)(transform, routing_surface, parent,
joint_name_or_hash, slot_hash)
class LinearCurve:
__qualname__ = 'LinearCurve'
__slots__ = 'points',
def __init__(self, points):
self.points = points
self.points.sort(key=lambda i: i[0])
def get(self, val):
p_max = len(self.points) - 1
if val <= self.points[0][0]:
return self.points[0][1]
if val >= self.points[p_max][0]:
return self.points[p_max][1]
i = p_max - 1
while i > 0:
while val < self.points[i][0]:
i -= 1
p1 = self.points[i]
p2 = self.points[i + 1]
percent = (val - p1[0]) / (p2[0] - p1[0])
return (p2[1] - p1[1]) * percent + p1[1]
class WeightedUtilityCurve(LinearCurve):
__qualname__ = 'WeightedUtilityCurve'
def __init__(self, points, max_y=0, weight=1):
if max_y == 0:
max_y = self._find_largest_y(points)
transformed_points = [(point[0], point[1] / max_y * weight) for
point in points]
super().__init__(transformed_points)
def _find_largest_y(self, points):
max_y = 0
for point in points:
while point[1] > max_y:
max_y = point[1]
return max_y
class CircularUtilityCurve(LinearCurve):
__qualname__ = 'CircularUtilityCurve'
def __init__(self, points, min_x, max_x):
super().__init__(points)
self._min_x = min_x
self._max_x = max_x
last_point = self.points[-1]
distance_to_end = max_x - last_point[0]
total_length = distance_to_end + self.points[0][1]
distance_to_pivot_point = distance_to_end / total_length
pivot_y_value = (self.points[0][1] - last_point[1]
) * distance_to_pivot_point + self.points[0][1]
self.points.insert(0, (0, pivot_y_value))
self.points.insert(len(self.points), (self._max_x, pivot_y_value))
def get(self, val):
return super().get(val)
class Operator(enum.Int):
__qualname__ = 'Operator'
GREATER = 1
GREATER_OR_EQUAL = 2
EQUAL = 3
NOTEQUAL = 4
LESS_OR_EQUAL = 5
LESS = 6
@staticmethod
def from_function(fn):
if fn == operator.gt:
return Operator.GREATER
if fn == operator.ge:
return Operator.GREATER_OR_EQUAL
if fn == operator.eq:
return Operator.EQUAL
if fn == operator.ne:
return Operator.NOTEQUAL
if fn == operator.le:
return Operator.LESS_OR_EQUAL
if fn == operator.lt:
return Operator.LESS
@property
def function(self):
if self.value == Operator.GREATER:
return operator.gt
if self.value == Operator.GREATER_OR_EQUAL:
return operator.ge
if self.value == Operator.EQUAL:
return operator.eq
if self.value == Operator.NOTEQUAL:
return operator.ne
if self.value == Operator.LESS_OR_EQUAL:
return operator.le
if self.value == Operator.LESS:
return operator.lt
@property
def inverse(self):
if self == Operator.GREATER:
return Operator.LESS_OR_EQUAL
if self == Operator.GREATER_OR_EQUAL:
return Operator.LESS
if self == Operator.EQUAL:
return Operator.NOTEQUAL
if self == Operator.NOTEQUAL:
return Operator.EQUAL
if self == Operator.LESS_OR_EQUAL:
return Operator.GREATER
if self == Operator.LESS:
return Operator.GREATER_OR_EQUAL
@property
def symbol(self):
if self == Operator.GREATER:
return '>'
if self == Operator.GREATER_OR_EQUAL:
return '>='
if self == Operator.EQUAL:
return '=='
if self == Operator.NOTEQUAL:
return '!='
if self == Operator.LESS_OR_EQUAL:
return '<='
if self == Operator.LESS:
return '<'
@property
def category(self):
if self == Operator.GREATER:
return Operator.GREATER
if self == Operator.GREATER_OR_EQUAL:
return Operator.GREATER
if self == Operator.EQUAL:
return Operator.EQUAL
if self == Operator.NOTEQUAL:
return Operator.EQUAL
if self == Operator.LESS_OR_EQUAL:
return Operator.LESS
if self == Operator.LESS:
return Operator.LESS
class InequalityOperator(enum.Int):
__qualname__ = 'InequalityOperator'
GREATER = Operator.GREATER
GREATER_OR_EQUAL = Operator.GREATER_OR_EQUAL
LESS_OR_EQUAL = Operator.LESS_OR_EQUAL
LESS = Operator.LESS
<|reserved_special_token_0|>
class Threshold:
__qualname__ = 'Threshold'
__slots__ = 'value', 'comparison'
def __init__(self, value=None, comparison=None):
self.value = value
self.comparison = comparison
def compare(self, source_value):
if self.value is not None and self.comparison is not None:
return self.comparison(source_value, self.value)
return False
def compare_value(self, source_value):
if self.value is not None and self.comparison is not None:
return self.comparison(source_value.value, self.value.value)
return False
def inverse(self):
return Threshold(self.value, Operator.from_function(self.comparison
).inverse.function)
def __str__(self):
if self.comparison is None:
return 'None'
return '{} {}'.format(Operator.from_function(self.comparison).
symbol, self.value)
def __repr__(self):
return '<Threshold {}>'.format(str(self))
def __eq__(self, other):
if not isinstance(other, Threshold):
return False
if not self.value == other.value:
return False
if not self.comparison == other.comparison:
return False
return True
def __hash__(self):
return hash((self.value, self.comparison))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def linear_seq_gen(start, stop, step, max_count=None):
delta = stop - start
num = floor(abs(delta / step))
if max_count is not None:
num = min(num, max_count - 1)
if num > 0:
for i in range(0, num + 1):
yield start + i * delta / num
else:
yield start
if stop != start:
yield stop
<|reserved_special_token_0|>
def rad_to_deg(rad):
return rad * 180 / PI
def angle_abs_difference(a1, a2):
delta = sims4.math.mod_2pi(a1 - a2)
if delta > sims4.math.PI:
delta = sims4.math.TWO_PI - delta
return delta
<|reserved_special_token_0|>
def vector_dot_2d(a, b):
return a.x * b.x + a.z * b.z
def vector_cross(a, b):
return Vector3(a.y * b.z - a.z * b.y, a.z * b.x - a.x * b.z, a.x * b.y -
a.y * b.x)
<|reserved_special_token_0|>
def almost_equal(a, b, epsilon=EPSILON):
return abs(a - b) < epsilon
<|reserved_special_token_0|>
def transform_almost_equal(t1, t2, epsilon=EPSILON, epsilon_orientation=
QUATERNION_EPSILON):
if epsilon_orientation is DEFAULT:
epsilon_orientation = epsilon
return vector3_almost_equal(t1.translation, t2.translation, epsilon=epsilon
) and quaternion_almost_equal(t1.orientation, t2.orientation,
epsilon=epsilon_orientation)
<|reserved_special_token_0|>
def vector3_rotate_axis_angle(v, angle, axis):
q = Quaternion.from_axis_angle(angle, axis)
return q.transform_vector(v)
<|reserved_special_token_0|>
def invert_quaternion(q):
d = 1.0 / (q.x * q.x + q.y * q.y + q.z * q.z + q.w * q.w)
return Quaternion(-d * q.x, -d * q.y, -d * q.z, d * q.w)
<|reserved_special_token_0|>
class Location:
__qualname__ = 'Location'
__slots__ = ('transform', 'routing_surface', '_parent_ref',
'joint_name_or_hash', 'slot_hash')
def __init__(self, transform, routing_surface, parent=None,
joint_name_or_hash=None, slot_hash=0):
self.transform = transform
self.routing_surface = routing_surface
self.parent = parent
self.joint_name_or_hash = joint_name_or_hash
self.slot_hash = slot_hash
def __repr__(self):
return standard_repr(self, self.transform, self.routing_surface,
parent=self.parent, joint_name_or_hash=self.joint_name_or_hash,
slot_hash=self.slot_hash)
def __eq__(self, other):
if type(self) is not type(other):
return False
if self.transform != other.transform:
return False
if self.parent != other.parent:
return False
if self.routing_surface != other.routing_surface:
return False
slot_hash0 = self.joint_name_or_hash or self.slot_hash
slot_hash1 = other.joint_name_or_hash or other.slot_hash
if slot_hash0 != slot_hash1:
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
@property
def parent(self):
if self._parent_ref is not None:
return self._parent_ref()
@parent.setter
def parent(self, value):
if value is not None:
self._parent_ref = value.ref()
self.routing_surface = None
else:
self._parent_ref = None
@property
def joint_name_hash(self):
if self.joint_name_or_hash is None:
return 0
if isinstance(self.joint_name_or_hash, int):
return self.joint_name_or_hash
return sims4.hash_util.hash32(self.joint_name_or_hash)
@property
def world_routing_surface(self):
if self.parent is not None:
return self.parent.location.world_routing_surface
return self.routing_surface
@property
def zone_id(self):
if self.world_routing_surface.type == 1:
return self.world_routing_surface.primary_id
return sims4.zone_utils.get_zone_id()
@property
def level(self):
return self.world_routing_surface.secondary_id
@property
def world_transform(self):
if self.parent is None:
return self.transform
transform = self.transform
parent = self.parent
if parent.is_part:
parent_transform = parent.part_owner.transform
else:
parent_transform = parent.transform
if self.joint_name_or_hash is None:
if transform is None:
return parent_transform
return sims4.math.Transform.concatenate(transform, parent_transform
)
joint_transform = native.animation.get_joint_transform_from_rig(self
.parent.rig, self.joint_name_or_hash)
if transform is None:
return sims4.math.Transform.concatenate(joint_transform,
parent_transform)
local_transform = sims4.math.Transform.concatenate(transform,
joint_transform)
return sims4.math.Transform.concatenate(local_transform,
parent_transform)
def duplicate(self):
return type(self)(self.transform, self.routing_surface, self.parent,
self.joint_name_or_hash, self.slot_hash)
def clone(self, *, transform=DEFAULT, translation=DEFAULT, orientation=
DEFAULT, routing_surface=DEFAULT, parent=DEFAULT,
joint_name_or_hash=DEFAULT, slot_hash=DEFAULT):
if transform is DEFAULT:
transform = self.transform
if transform is not None:
if translation is DEFAULT:
translation = transform.translation
if orientation is DEFAULT:
orientation = transform.orientation
transform = Transform(translation, orientation)
if routing_surface is DEFAULT:
routing_surface = self.routing_surface
if parent is DEFAULT:
parent = self.parent
if joint_name_or_hash is DEFAULT:
joint_name_or_hash = self.joint_name_or_hash
if slot_hash is DEFAULT:
slot_hash = self.slot_hash
return type(self)(transform, routing_surface, parent,
joint_name_or_hash, slot_hash)
class LinearCurve:
__qualname__ = 'LinearCurve'
__slots__ = 'points',
def __init__(self, points):
self.points = points
self.points.sort(key=lambda i: i[0])
def get(self, val):
p_max = len(self.points) - 1
if val <= self.points[0][0]:
return self.points[0][1]
if val >= self.points[p_max][0]:
return self.points[p_max][1]
i = p_max - 1
while i > 0:
while val < self.points[i][0]:
i -= 1
p1 = self.points[i]
p2 = self.points[i + 1]
percent = (val - p1[0]) / (p2[0] - p1[0])
return (p2[1] - p1[1]) * percent + p1[1]
class WeightedUtilityCurve(LinearCurve):
__qualname__ = 'WeightedUtilityCurve'
def __init__(self, points, max_y=0, weight=1):
if max_y == 0:
max_y = self._find_largest_y(points)
transformed_points = [(point[0], point[1] / max_y * weight) for
point in points]
super().__init__(transformed_points)
def _find_largest_y(self, points):
max_y = 0
for point in points:
while point[1] > max_y:
max_y = point[1]
return max_y
class CircularUtilityCurve(LinearCurve):
__qualname__ = 'CircularUtilityCurve'
def __init__(self, points, min_x, max_x):
super().__init__(points)
self._min_x = min_x
self._max_x = max_x
last_point = self.points[-1]
distance_to_end = max_x - last_point[0]
total_length = distance_to_end + self.points[0][1]
distance_to_pivot_point = distance_to_end / total_length
pivot_y_value = (self.points[0][1] - last_point[1]
) * distance_to_pivot_point + self.points[0][1]
self.points.insert(0, (0, pivot_y_value))
self.points.insert(len(self.points), (self._max_x, pivot_y_value))
def get(self, val):
return super().get(val)
class Operator(enum.Int):
__qualname__ = 'Operator'
GREATER = 1
GREATER_OR_EQUAL = 2
EQUAL = 3
NOTEQUAL = 4
LESS_OR_EQUAL = 5
LESS = 6
@staticmethod
def from_function(fn):
if fn == operator.gt:
return Operator.GREATER
if fn == operator.ge:
return Operator.GREATER_OR_EQUAL
if fn == operator.eq:
return Operator.EQUAL
if fn == operator.ne:
return Operator.NOTEQUAL
if fn == operator.le:
return Operator.LESS_OR_EQUAL
if fn == operator.lt:
return Operator.LESS
@property
def function(self):
if self.value == Operator.GREATER:
return operator.gt
if self.value == Operator.GREATER_OR_EQUAL:
return operator.ge
if self.value == Operator.EQUAL:
return operator.eq
if self.value == Operator.NOTEQUAL:
return operator.ne
if self.value == Operator.LESS_OR_EQUAL:
return operator.le
if self.value == Operator.LESS:
return operator.lt
@property
def inverse(self):
if self == Operator.GREATER:
return Operator.LESS_OR_EQUAL
if self == Operator.GREATER_OR_EQUAL:
return Operator.LESS
if self == Operator.EQUAL:
return Operator.NOTEQUAL
if self == Operator.NOTEQUAL:
return Operator.EQUAL
if self == Operator.LESS_OR_EQUAL:
return Operator.GREATER
if self == Operator.LESS:
return Operator.GREATER_OR_EQUAL
@property
def symbol(self):
if self == Operator.GREATER:
return '>'
if self == Operator.GREATER_OR_EQUAL:
return '>='
if self == Operator.EQUAL:
return '=='
if self == Operator.NOTEQUAL:
return '!='
if self == Operator.LESS_OR_EQUAL:
return '<='
if self == Operator.LESS:
return '<'
@property
def category(self):
if self == Operator.GREATER:
return Operator.GREATER
if self == Operator.GREATER_OR_EQUAL:
return Operator.GREATER
if self == Operator.EQUAL:
return Operator.EQUAL
if self == Operator.NOTEQUAL:
return Operator.EQUAL
if self == Operator.LESS_OR_EQUAL:
return Operator.LESS
if self == Operator.LESS:
return Operator.LESS
class InequalityOperator(enum.Int):
__qualname__ = 'InequalityOperator'
GREATER = Operator.GREATER
GREATER_OR_EQUAL = Operator.GREATER_OR_EQUAL
LESS_OR_EQUAL = Operator.LESS_OR_EQUAL
LESS = Operator.LESS
<|reserved_special_token_0|>
class Threshold:
__qualname__ = 'Threshold'
__slots__ = 'value', 'comparison'
def __init__(self, value=None, comparison=None):
self.value = value
self.comparison = comparison
def compare(self, source_value):
if self.value is not None and self.comparison is not None:
return self.comparison(source_value, self.value)
return False
def compare_value(self, source_value):
if self.value is not None and self.comparison is not None:
return self.comparison(source_value.value, self.value.value)
return False
def inverse(self):
return Threshold(self.value, Operator.from_function(self.comparison
).inverse.function)
def __str__(self):
if self.comparison is None:
return 'None'
return '{} {}'.format(Operator.from_function(self.comparison).
symbol, self.value)
def __repr__(self):
return '<Threshold {}>'.format(str(self))
def __eq__(self, other):
if not isinstance(other, Threshold):
return False
if not self.value == other.value:
return False
if not self.comparison == other.comparison:
return False
return True
def __hash__(self):
return hash((self.value, self.comparison))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def clamp(lower_bound, x, upper_bound):
if x < lower_bound:
return lower_bound
if x > upper_bound:
return upper_bound
return x
def interpolate(a, b, fraction):
return a * fraction + (1 - fraction) * b
def linear_seq_gen(start, stop, step, max_count=None):
delta = stop - start
num = floor(abs(delta / step))
if max_count is not None:
num = min(num, max_count - 1)
if num > 0:
for i in range(0, num + 1):
yield start + i * delta / num
else:
yield start
if stop != start:
yield stop
def deg_to_rad(deg):
return deg * PI / 180
def rad_to_deg(rad):
return rad * 180 / PI
def angle_abs_difference(a1, a2):
delta = sims4.math.mod_2pi(a1 - a2)
if delta > sims4.math.PI:
delta = sims4.math.TWO_PI - delta
return delta
<|reserved_special_token_0|>
def vector_dot_2d(a, b):
return a.x * b.x + a.z * b.z
def vector_cross(a, b):
return Vector3(a.y * b.z - a.z * b.y, a.z * b.x - a.x * b.z, a.x * b.y -
a.y * b.x)
def vector_cross_2d(a, b):
return a.z * b.x - a.x * b.z
def vector_normalize(v):
return v / v.magnitude()
def vector_flatten(v):
return Vector3(v.x, 0, v.z)
def almost_equal(a, b, epsilon=EPSILON):
return abs(a - b) < epsilon
<|reserved_special_token_0|>
def transform_almost_equal(t1, t2, epsilon=EPSILON, epsilon_orientation=
QUATERNION_EPSILON):
if epsilon_orientation is DEFAULT:
epsilon_orientation = epsilon
return vector3_almost_equal(t1.translation, t2.translation, epsilon=epsilon
) and quaternion_almost_equal(t1.orientation, t2.orientation,
epsilon=epsilon_orientation)
def transform_almost_equal_2d(t1, t2, epsilon=EPSILON, epsilon_orientation=
QUATERNION_EPSILON):
if epsilon_orientation is DEFAULT:
epsilon_orientation = epsilon
return vector3_almost_equal_2d(t1.translation, t2.translation, epsilon=
epsilon) and quaternion_almost_equal(t1.orientation, t2.orientation,
epsilon=epsilon_orientation)
def vector3_rotate_axis_angle(v, angle, axis):
q = Quaternion.from_axis_angle(angle, axis)
return q.transform_vector(v)
<|reserved_special_token_0|>
def angle_to_yaw_quaternion(angle):
return Quaternion.from_axis_angle(angle, UP_AXIS)
<|reserved_special_token_0|>
def invert_quaternion(q):
d = 1.0 / (q.x * q.x + q.y * q.y + q.z * q.z + q.w * q.w)
return Quaternion(-d * q.x, -d * q.y, -d * q.z, d * q.w)
def get_difference_transform(transform_a, transform_b):
v = transform_b.translation - transform_a.translation
a_q_i = invert_quaternion(transform_a.orientation)
q = Quaternion.concatenate(transform_b.orientation, a_q_i)
v_prime = Quaternion.transform_vector(a_q_i, v)
return Transform(v_prime, q)
class Location:
__qualname__ = 'Location'
__slots__ = ('transform', 'routing_surface', '_parent_ref',
'joint_name_or_hash', 'slot_hash')
def __init__(self, transform, routing_surface, parent=None,
joint_name_or_hash=None, slot_hash=0):
self.transform = transform
self.routing_surface = routing_surface
self.parent = parent
self.joint_name_or_hash = joint_name_or_hash
self.slot_hash = slot_hash
def __repr__(self):
return standard_repr(self, self.transform, self.routing_surface,
parent=self.parent, joint_name_or_hash=self.joint_name_or_hash,
slot_hash=self.slot_hash)
def __eq__(self, other):
if type(self) is not type(other):
return False
if self.transform != other.transform:
return False
if self.parent != other.parent:
return False
if self.routing_surface != other.routing_surface:
return False
slot_hash0 = self.joint_name_or_hash or self.slot_hash
slot_hash1 = other.joint_name_or_hash or other.slot_hash
if slot_hash0 != slot_hash1:
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
@property
def parent(self):
if self._parent_ref is not None:
return self._parent_ref()
@parent.setter
def parent(self, value):
if value is not None:
self._parent_ref = value.ref()
self.routing_surface = None
else:
self._parent_ref = None
@property
def joint_name_hash(self):
if self.joint_name_or_hash is None:
return 0
if isinstance(self.joint_name_or_hash, int):
return self.joint_name_or_hash
return sims4.hash_util.hash32(self.joint_name_or_hash)
@property
def world_routing_surface(self):
if self.parent is not None:
return self.parent.location.world_routing_surface
return self.routing_surface
@property
def zone_id(self):
if self.world_routing_surface.type == 1:
return self.world_routing_surface.primary_id
return sims4.zone_utils.get_zone_id()
@property
def level(self):
return self.world_routing_surface.secondary_id
@property
def world_transform(self):
if self.parent is None:
return self.transform
transform = self.transform
parent = self.parent
if parent.is_part:
parent_transform = parent.part_owner.transform
else:
parent_transform = parent.transform
if self.joint_name_or_hash is None:
if transform is None:
return parent_transform
return sims4.math.Transform.concatenate(transform, parent_transform
)
joint_transform = native.animation.get_joint_transform_from_rig(self
.parent.rig, self.joint_name_or_hash)
if transform is None:
return sims4.math.Transform.concatenate(joint_transform,
parent_transform)
local_transform = sims4.math.Transform.concatenate(transform,
joint_transform)
return sims4.math.Transform.concatenate(local_transform,
parent_transform)
def duplicate(self):
return type(self)(self.transform, self.routing_surface, self.parent,
self.joint_name_or_hash, self.slot_hash)
def clone(self, *, transform=DEFAULT, translation=DEFAULT, orientation=
DEFAULT, routing_surface=DEFAULT, parent=DEFAULT,
joint_name_or_hash=DEFAULT, slot_hash=DEFAULT):
if transform is DEFAULT:
transform = self.transform
if transform is not None:
if translation is DEFAULT:
translation = transform.translation
if orientation is DEFAULT:
orientation = transform.orientation
transform = Transform(translation, orientation)
if routing_surface is DEFAULT:
routing_surface = self.routing_surface
if parent is DEFAULT:
parent = self.parent
if joint_name_or_hash is DEFAULT:
joint_name_or_hash = self.joint_name_or_hash
if slot_hash is DEFAULT:
slot_hash = self.slot_hash
return type(self)(transform, routing_surface, parent,
joint_name_or_hash, slot_hash)
class LinearCurve:
__qualname__ = 'LinearCurve'
__slots__ = 'points',
def __init__(self, points):
self.points = points
self.points.sort(key=lambda i: i[0])
def get(self, val):
p_max = len(self.points) - 1
if val <= self.points[0][0]:
return self.points[0][1]
if val >= self.points[p_max][0]:
return self.points[p_max][1]
i = p_max - 1
while i > 0:
while val < self.points[i][0]:
i -= 1
p1 = self.points[i]
p2 = self.points[i + 1]
percent = (val - p1[0]) / (p2[0] - p1[0])
return (p2[1] - p1[1]) * percent + p1[1]
class WeightedUtilityCurve(LinearCurve):
__qualname__ = 'WeightedUtilityCurve'
def __init__(self, points, max_y=0, weight=1):
if max_y == 0:
max_y = self._find_largest_y(points)
transformed_points = [(point[0], point[1] / max_y * weight) for
point in points]
super().__init__(transformed_points)
def _find_largest_y(self, points):
max_y = 0
for point in points:
while point[1] > max_y:
max_y = point[1]
return max_y
class CircularUtilityCurve(LinearCurve):
__qualname__ = 'CircularUtilityCurve'
def __init__(self, points, min_x, max_x):
super().__init__(points)
self._min_x = min_x
self._max_x = max_x
last_point = self.points[-1]
distance_to_end = max_x - last_point[0]
total_length = distance_to_end + self.points[0][1]
distance_to_pivot_point = distance_to_end / total_length
pivot_y_value = (self.points[0][1] - last_point[1]
) * distance_to_pivot_point + self.points[0][1]
self.points.insert(0, (0, pivot_y_value))
self.points.insert(len(self.points), (self._max_x, pivot_y_value))
def get(self, val):
return super().get(val)
class Operator(enum.Int):
__qualname__ = 'Operator'
GREATER = 1
GREATER_OR_EQUAL = 2
EQUAL = 3
NOTEQUAL = 4
LESS_OR_EQUAL = 5
LESS = 6
@staticmethod
def from_function(fn):
if fn == operator.gt:
return Operator.GREATER
if fn == operator.ge:
return Operator.GREATER_OR_EQUAL
if fn == operator.eq:
return Operator.EQUAL
if fn == operator.ne:
return Operator.NOTEQUAL
if fn == operator.le:
return Operator.LESS_OR_EQUAL
if fn == operator.lt:
return Operator.LESS
@property
def function(self):
if self.value == Operator.GREATER:
return operator.gt
if self.value == Operator.GREATER_OR_EQUAL:
return operator.ge
if self.value == Operator.EQUAL:
return operator.eq
if self.value == Operator.NOTEQUAL:
return operator.ne
if self.value == Operator.LESS_OR_EQUAL:
return operator.le
if self.value == Operator.LESS:
return operator.lt
@property
def inverse(self):
if self == Operator.GREATER:
return Operator.LESS_OR_EQUAL
if self == Operator.GREATER_OR_EQUAL:
return Operator.LESS
if self == Operator.EQUAL:
return Operator.NOTEQUAL
if self == Operator.NOTEQUAL:
return Operator.EQUAL
if self == Operator.LESS_OR_EQUAL:
return Operator.GREATER
if self == Operator.LESS:
return Operator.GREATER_OR_EQUAL
@property
def symbol(self):
if self == Operator.GREATER:
return '>'
if self == Operator.GREATER_OR_EQUAL:
return '>='
if self == Operator.EQUAL:
return '=='
if self == Operator.NOTEQUAL:
return '!='
if self == Operator.LESS_OR_EQUAL:
return '<='
if self == Operator.LESS:
return '<'
@property
def category(self):
if self == Operator.GREATER:
return Operator.GREATER
if self == Operator.GREATER_OR_EQUAL:
return Operator.GREATER
if self == Operator.EQUAL:
return Operator.EQUAL
if self == Operator.NOTEQUAL:
return Operator.EQUAL
if self == Operator.LESS_OR_EQUAL:
return Operator.LESS
if self == Operator.LESS:
return Operator.LESS
class InequalityOperator(enum.Int):
__qualname__ = 'InequalityOperator'
GREATER = Operator.GREATER
GREATER_OR_EQUAL = Operator.GREATER_OR_EQUAL
LESS_OR_EQUAL = Operator.LESS_OR_EQUAL
LESS = Operator.LESS
<|reserved_special_token_0|>
class Threshold:
__qualname__ = 'Threshold'
__slots__ = 'value', 'comparison'
def __init__(self, value=None, comparison=None):
self.value = value
self.comparison = comparison
def compare(self, source_value):
if self.value is not None and self.comparison is not None:
return self.comparison(source_value, self.value)
return False
def compare_value(self, source_value):
if self.value is not None and self.comparison is not None:
return self.comparison(source_value.value, self.value.value)
return False
def inverse(self):
return Threshold(self.value, Operator.from_function(self.comparison
).inverse.function)
def __str__(self):
if self.comparison is None:
return 'None'
return '{} {}'.format(Operator.from_function(self.comparison).
symbol, self.value)
def __repr__(self):
return '<Threshold {}>'.format(str(self))
def __eq__(self, other):
if not isinstance(other, Threshold):
return False
if not self.value == other.value:
return False
if not self.comparison == other.comparison:
return False
return True
def __hash__(self):
return hash((self.value, self.comparison))
<|reserved_special_token_1|>
from _math import Vector2, Vector3, Quaternion, Transform, Vector3Immutable, QuaternionImmutable, minimum_distance
from _math import mod_2pi
from math import pi as PI, sqrt, fmod, floor, atan2, acos, asin, ceil, pi, e
import operator
from sims4.repr_utils import standard_repr
import enum
import native.animation
import sims4.hash_util
from singletons import DEFAULT
TWO_PI = PI*2
EPSILON = 1.192092896e-07
QUATERNION_EPSILON = 0.001
MAX_FLOAT = 3.402823466e+38
MAX_UINT64 = 18446744073709551615
MAX_INT64 = 922337203685477580
MAX_UINT32 = 4294967295
MAX_INT32 = 2147483647
MAX_UINT16 = 65535
MAX_INT16 = 32767
POS_INFINITY = float('inf')
NEG_INFINITY = float('-inf')
FORWARD_AXIS = Vector3.Z_AXIS()
UP_AXIS = Vector3.Y_AXIS()
def clamp(lower_bound, x, upper_bound):
if x < lower_bound:
return lower_bound
if x > upper_bound:
return upper_bound
return x
def interpolate(a, b, fraction):
return a*fraction + (1 - fraction)*b
def linear_seq_gen(start, stop, step, max_count=None):
delta = stop - start
num = floor(abs(delta/step))
if max_count is not None:
num = min(num, max_count - 1)
if num > 0:
for i in range(0, num + 1):
yield start + i*delta/num
else:
yield start
if stop != start:
yield stop
def deg_to_rad(deg):
return deg*PI/180
def rad_to_deg(rad):
return rad*180/PI
def angle_abs_difference(a1, a2):
delta = sims4.math.mod_2pi(a1 - a2)
if delta > sims4.math.PI:
delta = sims4.math.TWO_PI - delta
return delta
def vector_dot(a, b):
return a.x*b.x + a.y*b.y + a.z*b.z
def vector_dot_2d(a, b):
return a.x*b.x + a.z*b.z
def vector_cross(a, b):
return Vector3(a.y*b.z - a.z*b.y, a.z*b.x - a.x*b.z, a.x*b.y - a.y*b.x)
def vector_cross_2d(a, b):
return a.z*b.x - a.x*b.z
def vector_normalize(v):
return v/v.magnitude()
def vector_flatten(v):
return Vector3(v.x, 0, v.z)
def almost_equal(a, b, epsilon=EPSILON):
return abs(a - b) < epsilon
def vector3_almost_equal(v1, v2, epsilon=EPSILON):
return abs(v1.x - v2.x) < epsilon and (abs(v1.y - v2.y) < epsilon and abs(v1.z - v2.z) < epsilon)
def vector3_almost_equal_2d(v1, v2, epsilon=EPSILON):
return abs(v1.x - v2.x) < epsilon and abs(v1.z - v2.z) < epsilon
def quaternion_almost_equal(q1, q2, epsilon=QUATERNION_EPSILON):
if abs(q1.x - q2.x) < epsilon and (abs(q1.y - q2.y) < epsilon and abs(q1.z - q2.z) < epsilon) and abs(q1.w - q2.w) < epsilon:
return True
if abs(q1.x + q2.x) < epsilon and (abs(q1.y + q2.y) < epsilon and abs(q1.z + q2.z) < epsilon) and abs(q1.w + q2.w) < epsilon:
return True
return False
def transform_almost_equal(t1, t2, epsilon=EPSILON, epsilon_orientation=QUATERNION_EPSILON):
if epsilon_orientation is DEFAULT:
epsilon_orientation = epsilon
return vector3_almost_equal(t1.translation, t2.translation, epsilon=epsilon) and quaternion_almost_equal(t1.orientation, t2.orientation, epsilon=epsilon_orientation)
def transform_almost_equal_2d(t1, t2, epsilon=EPSILON, epsilon_orientation=QUATERNION_EPSILON):
if epsilon_orientation is DEFAULT:
epsilon_orientation = epsilon
return vector3_almost_equal_2d(t1.translation, t2.translation, epsilon=epsilon) and quaternion_almost_equal(t1.orientation, t2.orientation, epsilon=epsilon_orientation)
def vector3_rotate_axis_angle(v, angle, axis):
q = Quaternion.from_axis_angle(angle, axis)
return q.transform_vector(v)
def vector3_angle(v):
return atan2(v.x, v.z)
def angle_to_yaw_quaternion(angle):
return Quaternion.from_axis_angle(angle, UP_AXIS)
def yaw_quaternion_to_angle(q):
if almost_equal(q.y, 0.0):
return 0
angle = acos(q.w)*2.0
if q.y > 0:
return angle
return -angle
def get_closest_point_2D(segment, p):
a1 = segment[0]
a2 = segment[1]
(x1, x2) = (a1.x, a2.x)
x3 = p.x
(z1, z2) = (a1.z, a2.z)
z3 = p.z
dx = x2 - x1
dz = z2 - z1
t = ((x3 - x1)*dx + (z3 - z1)*dz)/(dx*dx + dz*dz)
t = sims4.math.clamp(0, t, 1)
x0 = x1 + t*dx
z0 = z1 + t*dz
return Vector3(x0, p.y, z0)
def invert_quaternion(q):
d = 1.0/(q.x*q.x + q.y*q.y + q.z*q.z + q.w*q.w)
return Quaternion(-d*q.x, -d*q.y, -d*q.z, d*q.w)
def get_difference_transform(transform_a, transform_b):
v = transform_b.translation - transform_a.translation
a_q_i = invert_quaternion(transform_a.orientation)
q = Quaternion.concatenate(transform_b.orientation, a_q_i)
v_prime = Quaternion.transform_vector(a_q_i, v)
return Transform(v_prime, q)
class Location:
__qualname__ = 'Location'
__slots__ = ('transform', 'routing_surface', '_parent_ref', 'joint_name_or_hash', 'slot_hash')
def __init__(self, transform, routing_surface, parent=None, joint_name_or_hash=None, slot_hash=0):
self.transform = transform
self.routing_surface = routing_surface
self.parent = parent
self.joint_name_or_hash = joint_name_or_hash
self.slot_hash = slot_hash
def __repr__(self):
return standard_repr(self, self.transform, self.routing_surface, parent=self.parent, joint_name_or_hash=self.joint_name_or_hash, slot_hash=self.slot_hash)
def __eq__(self, other):
if type(self) is not type(other):
return False
if self.transform != other.transform:
return False
if self.parent != other.parent:
return False
if self.routing_surface != other.routing_surface:
return False
slot_hash0 = self.joint_name_or_hash or self.slot_hash
slot_hash1 = other.joint_name_or_hash or other.slot_hash
if slot_hash0 != slot_hash1:
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
@property
def parent(self):
if self._parent_ref is not None:
return self._parent_ref()
@parent.setter
def parent(self, value):
if value is not None:
self._parent_ref = value.ref()
self.routing_surface = None
else:
self._parent_ref = None
@property
def joint_name_hash(self):
if self.joint_name_or_hash is None:
return 0
if isinstance(self.joint_name_or_hash, int):
return self.joint_name_or_hash
return sims4.hash_util.hash32(self.joint_name_or_hash)
@property
def world_routing_surface(self):
if self.parent is not None:
return self.parent.location.world_routing_surface
return self.routing_surface
@property
def zone_id(self):
if self.world_routing_surface.type == 1:
return self.world_routing_surface.primary_id
return sims4.zone_utils.get_zone_id()
@property
def level(self):
return self.world_routing_surface.secondary_id
@property
def world_transform(self):
if self.parent is None:
return self.transform
transform = self.transform
parent = self.parent
if parent.is_part:
parent_transform = parent.part_owner.transform
else:
parent_transform = parent.transform
if self.joint_name_or_hash is None:
if transform is None:
return parent_transform
return sims4.math.Transform.concatenate(transform, parent_transform)
joint_transform = native.animation.get_joint_transform_from_rig(self.parent.rig, self.joint_name_or_hash)
if transform is None:
return sims4.math.Transform.concatenate(joint_transform, parent_transform)
local_transform = sims4.math.Transform.concatenate(transform, joint_transform)
return sims4.math.Transform.concatenate(local_transform, parent_transform)
def duplicate(self):
return type(self)(self.transform, self.routing_surface, self.parent, self.joint_name_or_hash, self.slot_hash)
def clone(self, *, transform=DEFAULT, translation=DEFAULT, orientation=DEFAULT, routing_surface=DEFAULT, parent=DEFAULT, joint_name_or_hash=DEFAULT, slot_hash=DEFAULT):
if transform is DEFAULT:
transform = self.transform
if transform is not None:
if translation is DEFAULT:
translation = transform.translation
if orientation is DEFAULT:
orientation = transform.orientation
transform = Transform(translation, orientation)
if routing_surface is DEFAULT:
routing_surface = self.routing_surface
if parent is DEFAULT:
parent = self.parent
if joint_name_or_hash is DEFAULT:
joint_name_or_hash = self.joint_name_or_hash
if slot_hash is DEFAULT:
slot_hash = self.slot_hash
return type(self)(transform, routing_surface, parent, joint_name_or_hash, slot_hash)
class LinearCurve:
__qualname__ = 'LinearCurve'
__slots__ = ('points',)
def __init__(self, points):
self.points = points
self.points.sort(key=lambda i: i[0])
def get(self, val):
p_max = len(self.points) - 1
if val <= self.points[0][0]:
return self.points[0][1]
if val >= self.points[p_max][0]:
return self.points[p_max][1]
i = p_max - 1
while i > 0:
while val < self.points[i][0]:
i -= 1
p1 = self.points[i]
p2 = self.points[i + 1]
percent = (val - p1[0])/(p2[0] - p1[0])
return (p2[1] - p1[1])*percent + p1[1]
class WeightedUtilityCurve(LinearCurve):
__qualname__ = 'WeightedUtilityCurve'
def __init__(self, points, max_y=0, weight=1):
if max_y == 0:
max_y = self._find_largest_y(points)
transformed_points = [(point[0], point[1]/max_y*weight) for point in points]
super().__init__(transformed_points)
def _find_largest_y(self, points):
max_y = 0
for point in points:
while point[1] > max_y:
max_y = point[1]
return max_y
class CircularUtilityCurve(LinearCurve):
__qualname__ = 'CircularUtilityCurve'
def __init__(self, points, min_x, max_x):
super().__init__(points)
self._min_x = min_x
self._max_x = max_x
last_point = self.points[-1]
distance_to_end = max_x - last_point[0]
total_length = distance_to_end + self.points[0][1]
distance_to_pivot_point = distance_to_end/total_length
pivot_y_value = (self.points[0][1] - last_point[1])*distance_to_pivot_point + self.points[0][1]
self.points.insert(0, (0, pivot_y_value))
self.points.insert(len(self.points), (self._max_x, pivot_y_value))
def get(self, val):
return super().get(val)
class Operator(enum.Int):
__qualname__ = 'Operator'
GREATER = 1
GREATER_OR_EQUAL = 2
EQUAL = 3
NOTEQUAL = 4
LESS_OR_EQUAL = 5
LESS = 6
@staticmethod
def from_function(fn):
if fn == operator.gt:
return Operator.GREATER
if fn == operator.ge:
return Operator.GREATER_OR_EQUAL
if fn == operator.eq:
return Operator.EQUAL
if fn == operator.ne:
return Operator.NOTEQUAL
if fn == operator.le:
return Operator.LESS_OR_EQUAL
if fn == operator.lt:
return Operator.LESS
@property
def function(self):
if self.value == Operator.GREATER:
return operator.gt
if self.value == Operator.GREATER_OR_EQUAL:
return operator.ge
if self.value == Operator.EQUAL:
return operator.eq
if self.value == Operator.NOTEQUAL:
return operator.ne
if self.value == Operator.LESS_OR_EQUAL:
return operator.le
if self.value == Operator.LESS:
return operator.lt
@property
def inverse(self):
if self == Operator.GREATER:
return Operator.LESS_OR_EQUAL
if self == Operator.GREATER_OR_EQUAL:
return Operator.LESS
if self == Operator.EQUAL:
return Operator.NOTEQUAL
if self == Operator.NOTEQUAL:
return Operator.EQUAL
if self == Operator.LESS_OR_EQUAL:
return Operator.GREATER
if self == Operator.LESS:
return Operator.GREATER_OR_EQUAL
@property
def symbol(self):
if self == Operator.GREATER:
return '>'
if self == Operator.GREATER_OR_EQUAL:
return '>='
if self == Operator.EQUAL:
return '=='
if self == Operator.NOTEQUAL:
return '!='
if self == Operator.LESS_OR_EQUAL:
return '<='
if self == Operator.LESS:
return '<'
@property
def category(self):
if self == Operator.GREATER:
return Operator.GREATER
if self == Operator.GREATER_OR_EQUAL:
return Operator.GREATER
if self == Operator.EQUAL:
return Operator.EQUAL
if self == Operator.NOTEQUAL:
return Operator.EQUAL
if self == Operator.LESS_OR_EQUAL:
return Operator.LESS
if self == Operator.LESS:
return Operator.LESS
class InequalityOperator(enum.Int):
__qualname__ = 'InequalityOperator'
GREATER = Operator.GREATER
GREATER_OR_EQUAL = Operator.GREATER_OR_EQUAL
LESS_OR_EQUAL = Operator.LESS_OR_EQUAL
LESS = Operator.LESS
with InequalityOperator.__reload_context__(InequalityOperator, InequalityOperator):
InequalityOperator.from_function = Operator.from_function
InequalityOperator.function = Operator.function
InequalityOperator.inverse = Operator.inverse
InequalityOperator.symbol = Operator.symbol
InequalityOperator.category = Operator.category
class Threshold:
__qualname__ = 'Threshold'
__slots__ = ('value', 'comparison')
def __init__(self, value=None, comparison=None):
self.value = value
self.comparison = comparison
def compare(self, source_value):
if self.value is not None and self.comparison is not None:
return self.comparison(source_value, self.value)
return False
def compare_value(self, source_value):
if self.value is not None and self.comparison is not None:
return self.comparison(source_value.value, self.value.value)
return False
def inverse(self):
return Threshold(self.value, Operator.from_function(self.comparison).inverse.function)
def __str__(self):
if self.comparison is None:
return 'None'
return '{} {}'.format(Operator.from_function(self.comparison).symbol, self.value)
def __repr__(self):
return '<Threshold {}>'.format(str(self))
def __eq__(self, other):
if not isinstance(other, Threshold):
return False
if not self.value == other.value:
return False
if not self.comparison == other.comparison:
return False
return True
def __hash__(self):
return hash((self.value, self.comparison))
|
flexible
|
{
"blob_id": "a0310b1bab339064c36ff0fe92d275db7a6c5ba9",
"index": 8734,
"step-1": "<mask token>\n\n\ndef rad_to_deg(rad):\n return rad * 180 / PI\n\n\ndef angle_abs_difference(a1, a2):\n delta = sims4.math.mod_2pi(a1 - a2)\n if delta > sims4.math.PI:\n delta = sims4.math.TWO_PI - delta\n return delta\n\n\n<mask token>\n\n\ndef vector_dot_2d(a, b):\n return a.x * b.x + a.z * b.z\n\n\ndef vector_cross(a, b):\n return Vector3(a.y * b.z - a.z * b.y, a.z * b.x - a.x * b.z, a.x * b.y -\n a.y * b.x)\n\n\n<mask token>\n\n\ndef vector3_rotate_axis_angle(v, angle, axis):\n q = Quaternion.from_axis_angle(angle, axis)\n return q.transform_vector(v)\n\n\n<mask token>\n\n\ndef invert_quaternion(q):\n d = 1.0 / (q.x * q.x + q.y * q.y + q.z * q.z + q.w * q.w)\n return Quaternion(-d * q.x, -d * q.y, -d * q.z, d * q.w)\n\n\n<mask token>\n\n\nclass Location:\n __qualname__ = 'Location'\n __slots__ = ('transform', 'routing_surface', '_parent_ref',\n 'joint_name_or_hash', 'slot_hash')\n\n def __init__(self, transform, routing_surface, parent=None,\n joint_name_or_hash=None, slot_hash=0):\n self.transform = transform\n self.routing_surface = routing_surface\n self.parent = parent\n self.joint_name_or_hash = joint_name_or_hash\n self.slot_hash = slot_hash\n\n def __repr__(self):\n return standard_repr(self, self.transform, self.routing_surface,\n parent=self.parent, joint_name_or_hash=self.joint_name_or_hash,\n slot_hash=self.slot_hash)\n\n def __eq__(self, other):\n if type(self) is not type(other):\n return False\n if self.transform != other.transform:\n return False\n if self.parent != other.parent:\n return False\n if self.routing_surface != other.routing_surface:\n return False\n slot_hash0 = self.joint_name_or_hash or self.slot_hash\n slot_hash1 = other.joint_name_or_hash or other.slot_hash\n if slot_hash0 != slot_hash1:\n return False\n return True\n\n def __ne__(self, other):\n return not self.__eq__(other)\n\n @property\n def parent(self):\n if self._parent_ref is not None:\n return self._parent_ref()\n\n @parent.setter\n def parent(self, value):\n if value is not None:\n self._parent_ref = value.ref()\n self.routing_surface = None\n else:\n self._parent_ref = None\n\n @property\n def joint_name_hash(self):\n if self.joint_name_or_hash is None:\n return 0\n if isinstance(self.joint_name_or_hash, int):\n return self.joint_name_or_hash\n return sims4.hash_util.hash32(self.joint_name_or_hash)\n\n @property\n def world_routing_surface(self):\n if self.parent is not None:\n return self.parent.location.world_routing_surface\n return self.routing_surface\n\n @property\n def zone_id(self):\n if self.world_routing_surface.type == 1:\n return self.world_routing_surface.primary_id\n return sims4.zone_utils.get_zone_id()\n\n @property\n def level(self):\n return self.world_routing_surface.secondary_id\n\n @property\n def world_transform(self):\n if self.parent is None:\n return self.transform\n transform = self.transform\n parent = self.parent\n if parent.is_part:\n parent_transform = parent.part_owner.transform\n else:\n parent_transform = parent.transform\n if self.joint_name_or_hash is None:\n if transform is None:\n return parent_transform\n return sims4.math.Transform.concatenate(transform, parent_transform\n )\n joint_transform = native.animation.get_joint_transform_from_rig(self\n .parent.rig, self.joint_name_or_hash)\n if transform is None:\n return sims4.math.Transform.concatenate(joint_transform,\n parent_transform)\n local_transform = sims4.math.Transform.concatenate(transform,\n joint_transform)\n return sims4.math.Transform.concatenate(local_transform,\n parent_transform)\n\n def duplicate(self):\n return type(self)(self.transform, self.routing_surface, self.parent,\n self.joint_name_or_hash, self.slot_hash)\n\n def clone(self, *, transform=DEFAULT, translation=DEFAULT, orientation=\n DEFAULT, routing_surface=DEFAULT, parent=DEFAULT,\n joint_name_or_hash=DEFAULT, slot_hash=DEFAULT):\n if transform is DEFAULT:\n transform = self.transform\n if transform is not None:\n if translation is DEFAULT:\n translation = transform.translation\n if orientation is DEFAULT:\n orientation = transform.orientation\n transform = Transform(translation, orientation)\n if routing_surface is DEFAULT:\n routing_surface = self.routing_surface\n if parent is DEFAULT:\n parent = self.parent\n if joint_name_or_hash is DEFAULT:\n joint_name_or_hash = self.joint_name_or_hash\n if slot_hash is DEFAULT:\n slot_hash = self.slot_hash\n return type(self)(transform, routing_surface, parent,\n joint_name_or_hash, slot_hash)\n\n\nclass LinearCurve:\n __qualname__ = 'LinearCurve'\n __slots__ = 'points',\n\n def __init__(self, points):\n self.points = points\n self.points.sort(key=lambda i: i[0])\n\n def get(self, val):\n p_max = len(self.points) - 1\n if val <= self.points[0][0]:\n return self.points[0][1]\n if val >= self.points[p_max][0]:\n return self.points[p_max][1]\n i = p_max - 1\n while i > 0:\n while val < self.points[i][0]:\n i -= 1\n p1 = self.points[i]\n p2 = self.points[i + 1]\n percent = (val - p1[0]) / (p2[0] - p1[0])\n return (p2[1] - p1[1]) * percent + p1[1]\n\n\nclass WeightedUtilityCurve(LinearCurve):\n __qualname__ = 'WeightedUtilityCurve'\n\n def __init__(self, points, max_y=0, weight=1):\n if max_y == 0:\n max_y = self._find_largest_y(points)\n transformed_points = [(point[0], point[1] / max_y * weight) for\n point in points]\n super().__init__(transformed_points)\n\n def _find_largest_y(self, points):\n max_y = 0\n for point in points:\n while point[1] > max_y:\n max_y = point[1]\n return max_y\n\n\nclass CircularUtilityCurve(LinearCurve):\n __qualname__ = 'CircularUtilityCurve'\n\n def __init__(self, points, min_x, max_x):\n super().__init__(points)\n self._min_x = min_x\n self._max_x = max_x\n last_point = self.points[-1]\n distance_to_end = max_x - last_point[0]\n total_length = distance_to_end + self.points[0][1]\n distance_to_pivot_point = distance_to_end / total_length\n pivot_y_value = (self.points[0][1] - last_point[1]\n ) * distance_to_pivot_point + self.points[0][1]\n self.points.insert(0, (0, pivot_y_value))\n self.points.insert(len(self.points), (self._max_x, pivot_y_value))\n\n def get(self, val):\n return super().get(val)\n\n\nclass Operator(enum.Int):\n __qualname__ = 'Operator'\n GREATER = 1\n GREATER_OR_EQUAL = 2\n EQUAL = 3\n NOTEQUAL = 4\n LESS_OR_EQUAL = 5\n LESS = 6\n\n @staticmethod\n def from_function(fn):\n if fn == operator.gt:\n return Operator.GREATER\n if fn == operator.ge:\n return Operator.GREATER_OR_EQUAL\n if fn == operator.eq:\n return Operator.EQUAL\n if fn == operator.ne:\n return Operator.NOTEQUAL\n if fn == operator.le:\n return Operator.LESS_OR_EQUAL\n if fn == operator.lt:\n return Operator.LESS\n\n @property\n def function(self):\n if self.value == Operator.GREATER:\n return operator.gt\n if self.value == Operator.GREATER_OR_EQUAL:\n return operator.ge\n if self.value == Operator.EQUAL:\n return operator.eq\n if self.value == Operator.NOTEQUAL:\n return operator.ne\n if self.value == Operator.LESS_OR_EQUAL:\n return operator.le\n if self.value == Operator.LESS:\n return operator.lt\n\n @property\n def inverse(self):\n if self == Operator.GREATER:\n return Operator.LESS_OR_EQUAL\n if self == Operator.GREATER_OR_EQUAL:\n return Operator.LESS\n if self == Operator.EQUAL:\n return Operator.NOTEQUAL\n if self == Operator.NOTEQUAL:\n return Operator.EQUAL\n if self == Operator.LESS_OR_EQUAL:\n return Operator.GREATER\n if self == Operator.LESS:\n return Operator.GREATER_OR_EQUAL\n\n @property\n def symbol(self):\n if self == Operator.GREATER:\n return '>'\n if self == Operator.GREATER_OR_EQUAL:\n return '>='\n if self == Operator.EQUAL:\n return '=='\n if self == Operator.NOTEQUAL:\n return '!='\n if self == Operator.LESS_OR_EQUAL:\n return '<='\n if self == Operator.LESS:\n return '<'\n\n @property\n def category(self):\n if self == Operator.GREATER:\n return Operator.GREATER\n if self == Operator.GREATER_OR_EQUAL:\n return Operator.GREATER\n if self == Operator.EQUAL:\n return Operator.EQUAL\n if self == Operator.NOTEQUAL:\n return Operator.EQUAL\n if self == Operator.LESS_OR_EQUAL:\n return Operator.LESS\n if self == Operator.LESS:\n return Operator.LESS\n\n\nclass InequalityOperator(enum.Int):\n __qualname__ = 'InequalityOperator'\n GREATER = Operator.GREATER\n GREATER_OR_EQUAL = Operator.GREATER_OR_EQUAL\n LESS_OR_EQUAL = Operator.LESS_OR_EQUAL\n LESS = Operator.LESS\n\n\n<mask token>\n\n\nclass Threshold:\n __qualname__ = 'Threshold'\n __slots__ = 'value', 'comparison'\n\n def __init__(self, value=None, comparison=None):\n self.value = value\n self.comparison = comparison\n\n def compare(self, source_value):\n if self.value is not None and self.comparison is not None:\n return self.comparison(source_value, self.value)\n return False\n\n def compare_value(self, source_value):\n if self.value is not None and self.comparison is not None:\n return self.comparison(source_value.value, self.value.value)\n return False\n\n def inverse(self):\n return Threshold(self.value, Operator.from_function(self.comparison\n ).inverse.function)\n\n def __str__(self):\n if self.comparison is None:\n return 'None'\n return '{} {}'.format(Operator.from_function(self.comparison).\n symbol, self.value)\n\n def __repr__(self):\n return '<Threshold {}>'.format(str(self))\n\n def __eq__(self, other):\n if not isinstance(other, Threshold):\n return False\n if not self.value == other.value:\n return False\n if not self.comparison == other.comparison:\n return False\n return True\n\n def __hash__(self):\n return hash((self.value, self.comparison))\n",
"step-2": "<mask token>\n\n\ndef rad_to_deg(rad):\n return rad * 180 / PI\n\n\ndef angle_abs_difference(a1, a2):\n delta = sims4.math.mod_2pi(a1 - a2)\n if delta > sims4.math.PI:\n delta = sims4.math.TWO_PI - delta\n return delta\n\n\n<mask token>\n\n\ndef vector_dot_2d(a, b):\n return a.x * b.x + a.z * b.z\n\n\ndef vector_cross(a, b):\n return Vector3(a.y * b.z - a.z * b.y, a.z * b.x - a.x * b.z, a.x * b.y -\n a.y * b.x)\n\n\n<mask token>\n\n\ndef transform_almost_equal(t1, t2, epsilon=EPSILON, epsilon_orientation=\n QUATERNION_EPSILON):\n if epsilon_orientation is DEFAULT:\n epsilon_orientation = epsilon\n return vector3_almost_equal(t1.translation, t2.translation, epsilon=epsilon\n ) and quaternion_almost_equal(t1.orientation, t2.orientation,\n epsilon=epsilon_orientation)\n\n\n<mask token>\n\n\ndef vector3_rotate_axis_angle(v, angle, axis):\n q = Quaternion.from_axis_angle(angle, axis)\n return q.transform_vector(v)\n\n\n<mask token>\n\n\ndef invert_quaternion(q):\n d = 1.0 / (q.x * q.x + q.y * q.y + q.z * q.z + q.w * q.w)\n return Quaternion(-d * q.x, -d * q.y, -d * q.z, d * q.w)\n\n\n<mask token>\n\n\nclass Location:\n __qualname__ = 'Location'\n __slots__ = ('transform', 'routing_surface', '_parent_ref',\n 'joint_name_or_hash', 'slot_hash')\n\n def __init__(self, transform, routing_surface, parent=None,\n joint_name_or_hash=None, slot_hash=0):\n self.transform = transform\n self.routing_surface = routing_surface\n self.parent = parent\n self.joint_name_or_hash = joint_name_or_hash\n self.slot_hash = slot_hash\n\n def __repr__(self):\n return standard_repr(self, self.transform, self.routing_surface,\n parent=self.parent, joint_name_or_hash=self.joint_name_or_hash,\n slot_hash=self.slot_hash)\n\n def __eq__(self, other):\n if type(self) is not type(other):\n return False\n if self.transform != other.transform:\n return False\n if self.parent != other.parent:\n return False\n if self.routing_surface != other.routing_surface:\n return False\n slot_hash0 = self.joint_name_or_hash or self.slot_hash\n slot_hash1 = other.joint_name_or_hash or other.slot_hash\n if slot_hash0 != slot_hash1:\n return False\n return True\n\n def __ne__(self, other):\n return not self.__eq__(other)\n\n @property\n def parent(self):\n if self._parent_ref is not None:\n return self._parent_ref()\n\n @parent.setter\n def parent(self, value):\n if value is not None:\n self._parent_ref = value.ref()\n self.routing_surface = None\n else:\n self._parent_ref = None\n\n @property\n def joint_name_hash(self):\n if self.joint_name_or_hash is None:\n return 0\n if isinstance(self.joint_name_or_hash, int):\n return self.joint_name_or_hash\n return sims4.hash_util.hash32(self.joint_name_or_hash)\n\n @property\n def world_routing_surface(self):\n if self.parent is not None:\n return self.parent.location.world_routing_surface\n return self.routing_surface\n\n @property\n def zone_id(self):\n if self.world_routing_surface.type == 1:\n return self.world_routing_surface.primary_id\n return sims4.zone_utils.get_zone_id()\n\n @property\n def level(self):\n return self.world_routing_surface.secondary_id\n\n @property\n def world_transform(self):\n if self.parent is None:\n return self.transform\n transform = self.transform\n parent = self.parent\n if parent.is_part:\n parent_transform = parent.part_owner.transform\n else:\n parent_transform = parent.transform\n if self.joint_name_or_hash is None:\n if transform is None:\n return parent_transform\n return sims4.math.Transform.concatenate(transform, parent_transform\n )\n joint_transform = native.animation.get_joint_transform_from_rig(self\n .parent.rig, self.joint_name_or_hash)\n if transform is None:\n return sims4.math.Transform.concatenate(joint_transform,\n parent_transform)\n local_transform = sims4.math.Transform.concatenate(transform,\n joint_transform)\n return sims4.math.Transform.concatenate(local_transform,\n parent_transform)\n\n def duplicate(self):\n return type(self)(self.transform, self.routing_surface, self.parent,\n self.joint_name_or_hash, self.slot_hash)\n\n def clone(self, *, transform=DEFAULT, translation=DEFAULT, orientation=\n DEFAULT, routing_surface=DEFAULT, parent=DEFAULT,\n joint_name_or_hash=DEFAULT, slot_hash=DEFAULT):\n if transform is DEFAULT:\n transform = self.transform\n if transform is not None:\n if translation is DEFAULT:\n translation = transform.translation\n if orientation is DEFAULT:\n orientation = transform.orientation\n transform = Transform(translation, orientation)\n if routing_surface is DEFAULT:\n routing_surface = self.routing_surface\n if parent is DEFAULT:\n parent = self.parent\n if joint_name_or_hash is DEFAULT:\n joint_name_or_hash = self.joint_name_or_hash\n if slot_hash is DEFAULT:\n slot_hash = self.slot_hash\n return type(self)(transform, routing_surface, parent,\n joint_name_or_hash, slot_hash)\n\n\nclass LinearCurve:\n __qualname__ = 'LinearCurve'\n __slots__ = 'points',\n\n def __init__(self, points):\n self.points = points\n self.points.sort(key=lambda i: i[0])\n\n def get(self, val):\n p_max = len(self.points) - 1\n if val <= self.points[0][0]:\n return self.points[0][1]\n if val >= self.points[p_max][0]:\n return self.points[p_max][1]\n i = p_max - 1\n while i > 0:\n while val < self.points[i][0]:\n i -= 1\n p1 = self.points[i]\n p2 = self.points[i + 1]\n percent = (val - p1[0]) / (p2[0] - p1[0])\n return (p2[1] - p1[1]) * percent + p1[1]\n\n\nclass WeightedUtilityCurve(LinearCurve):\n __qualname__ = 'WeightedUtilityCurve'\n\n def __init__(self, points, max_y=0, weight=1):\n if max_y == 0:\n max_y = self._find_largest_y(points)\n transformed_points = [(point[0], point[1] / max_y * weight) for\n point in points]\n super().__init__(transformed_points)\n\n def _find_largest_y(self, points):\n max_y = 0\n for point in points:\n while point[1] > max_y:\n max_y = point[1]\n return max_y\n\n\nclass CircularUtilityCurve(LinearCurve):\n __qualname__ = 'CircularUtilityCurve'\n\n def __init__(self, points, min_x, max_x):\n super().__init__(points)\n self._min_x = min_x\n self._max_x = max_x\n last_point = self.points[-1]\n distance_to_end = max_x - last_point[0]\n total_length = distance_to_end + self.points[0][1]\n distance_to_pivot_point = distance_to_end / total_length\n pivot_y_value = (self.points[0][1] - last_point[1]\n ) * distance_to_pivot_point + self.points[0][1]\n self.points.insert(0, (0, pivot_y_value))\n self.points.insert(len(self.points), (self._max_x, pivot_y_value))\n\n def get(self, val):\n return super().get(val)\n\n\nclass Operator(enum.Int):\n __qualname__ = 'Operator'\n GREATER = 1\n GREATER_OR_EQUAL = 2\n EQUAL = 3\n NOTEQUAL = 4\n LESS_OR_EQUAL = 5\n LESS = 6\n\n @staticmethod\n def from_function(fn):\n if fn == operator.gt:\n return Operator.GREATER\n if fn == operator.ge:\n return Operator.GREATER_OR_EQUAL\n if fn == operator.eq:\n return Operator.EQUAL\n if fn == operator.ne:\n return Operator.NOTEQUAL\n if fn == operator.le:\n return Operator.LESS_OR_EQUAL\n if fn == operator.lt:\n return Operator.LESS\n\n @property\n def function(self):\n if self.value == Operator.GREATER:\n return operator.gt\n if self.value == Operator.GREATER_OR_EQUAL:\n return operator.ge\n if self.value == Operator.EQUAL:\n return operator.eq\n if self.value == Operator.NOTEQUAL:\n return operator.ne\n if self.value == Operator.LESS_OR_EQUAL:\n return operator.le\n if self.value == Operator.LESS:\n return operator.lt\n\n @property\n def inverse(self):\n if self == Operator.GREATER:\n return Operator.LESS_OR_EQUAL\n if self == Operator.GREATER_OR_EQUAL:\n return Operator.LESS\n if self == Operator.EQUAL:\n return Operator.NOTEQUAL\n if self == Operator.NOTEQUAL:\n return Operator.EQUAL\n if self == Operator.LESS_OR_EQUAL:\n return Operator.GREATER\n if self == Operator.LESS:\n return Operator.GREATER_OR_EQUAL\n\n @property\n def symbol(self):\n if self == Operator.GREATER:\n return '>'\n if self == Operator.GREATER_OR_EQUAL:\n return '>='\n if self == Operator.EQUAL:\n return '=='\n if self == Operator.NOTEQUAL:\n return '!='\n if self == Operator.LESS_OR_EQUAL:\n return '<='\n if self == Operator.LESS:\n return '<'\n\n @property\n def category(self):\n if self == Operator.GREATER:\n return Operator.GREATER\n if self == Operator.GREATER_OR_EQUAL:\n return Operator.GREATER\n if self == Operator.EQUAL:\n return Operator.EQUAL\n if self == Operator.NOTEQUAL:\n return Operator.EQUAL\n if self == Operator.LESS_OR_EQUAL:\n return Operator.LESS\n if self == Operator.LESS:\n return Operator.LESS\n\n\nclass InequalityOperator(enum.Int):\n __qualname__ = 'InequalityOperator'\n GREATER = Operator.GREATER\n GREATER_OR_EQUAL = Operator.GREATER_OR_EQUAL\n LESS_OR_EQUAL = Operator.LESS_OR_EQUAL\n LESS = Operator.LESS\n\n\n<mask token>\n\n\nclass Threshold:\n __qualname__ = 'Threshold'\n __slots__ = 'value', 'comparison'\n\n def __init__(self, value=None, comparison=None):\n self.value = value\n self.comparison = comparison\n\n def compare(self, source_value):\n if self.value is not None and self.comparison is not None:\n return self.comparison(source_value, self.value)\n return False\n\n def compare_value(self, source_value):\n if self.value is not None and self.comparison is not None:\n return self.comparison(source_value.value, self.value.value)\n return False\n\n def inverse(self):\n return Threshold(self.value, Operator.from_function(self.comparison\n ).inverse.function)\n\n def __str__(self):\n if self.comparison is None:\n return 'None'\n return '{} {}'.format(Operator.from_function(self.comparison).\n symbol, self.value)\n\n def __repr__(self):\n return '<Threshold {}>'.format(str(self))\n\n def __eq__(self, other):\n if not isinstance(other, Threshold):\n return False\n if not self.value == other.value:\n return False\n if not self.comparison == other.comparison:\n return False\n return True\n\n def __hash__(self):\n return hash((self.value, self.comparison))\n",
"step-3": "<mask token>\n\n\ndef linear_seq_gen(start, stop, step, max_count=None):\n delta = stop - start\n num = floor(abs(delta / step))\n if max_count is not None:\n num = min(num, max_count - 1)\n if num > 0:\n for i in range(0, num + 1):\n yield start + i * delta / num\n else:\n yield start\n if stop != start:\n yield stop\n\n\n<mask token>\n\n\ndef rad_to_deg(rad):\n return rad * 180 / PI\n\n\ndef angle_abs_difference(a1, a2):\n delta = sims4.math.mod_2pi(a1 - a2)\n if delta > sims4.math.PI:\n delta = sims4.math.TWO_PI - delta\n return delta\n\n\n<mask token>\n\n\ndef vector_dot_2d(a, b):\n return a.x * b.x + a.z * b.z\n\n\ndef vector_cross(a, b):\n return Vector3(a.y * b.z - a.z * b.y, a.z * b.x - a.x * b.z, a.x * b.y -\n a.y * b.x)\n\n\n<mask token>\n\n\ndef almost_equal(a, b, epsilon=EPSILON):\n return abs(a - b) < epsilon\n\n\n<mask token>\n\n\ndef transform_almost_equal(t1, t2, epsilon=EPSILON, epsilon_orientation=\n QUATERNION_EPSILON):\n if epsilon_orientation is DEFAULT:\n epsilon_orientation = epsilon\n return vector3_almost_equal(t1.translation, t2.translation, epsilon=epsilon\n ) and quaternion_almost_equal(t1.orientation, t2.orientation,\n epsilon=epsilon_orientation)\n\n\n<mask token>\n\n\ndef vector3_rotate_axis_angle(v, angle, axis):\n q = Quaternion.from_axis_angle(angle, axis)\n return q.transform_vector(v)\n\n\n<mask token>\n\n\ndef invert_quaternion(q):\n d = 1.0 / (q.x * q.x + q.y * q.y + q.z * q.z + q.w * q.w)\n return Quaternion(-d * q.x, -d * q.y, -d * q.z, d * q.w)\n\n\n<mask token>\n\n\nclass Location:\n __qualname__ = 'Location'\n __slots__ = ('transform', 'routing_surface', '_parent_ref',\n 'joint_name_or_hash', 'slot_hash')\n\n def __init__(self, transform, routing_surface, parent=None,\n joint_name_or_hash=None, slot_hash=0):\n self.transform = transform\n self.routing_surface = routing_surface\n self.parent = parent\n self.joint_name_or_hash = joint_name_or_hash\n self.slot_hash = slot_hash\n\n def __repr__(self):\n return standard_repr(self, self.transform, self.routing_surface,\n parent=self.parent, joint_name_or_hash=self.joint_name_or_hash,\n slot_hash=self.slot_hash)\n\n def __eq__(self, other):\n if type(self) is not type(other):\n return False\n if self.transform != other.transform:\n return False\n if self.parent != other.parent:\n return False\n if self.routing_surface != other.routing_surface:\n return False\n slot_hash0 = self.joint_name_or_hash or self.slot_hash\n slot_hash1 = other.joint_name_or_hash or other.slot_hash\n if slot_hash0 != slot_hash1:\n return False\n return True\n\n def __ne__(self, other):\n return not self.__eq__(other)\n\n @property\n def parent(self):\n if self._parent_ref is not None:\n return self._parent_ref()\n\n @parent.setter\n def parent(self, value):\n if value is not None:\n self._parent_ref = value.ref()\n self.routing_surface = None\n else:\n self._parent_ref = None\n\n @property\n def joint_name_hash(self):\n if self.joint_name_or_hash is None:\n return 0\n if isinstance(self.joint_name_or_hash, int):\n return self.joint_name_or_hash\n return sims4.hash_util.hash32(self.joint_name_or_hash)\n\n @property\n def world_routing_surface(self):\n if self.parent is not None:\n return self.parent.location.world_routing_surface\n return self.routing_surface\n\n @property\n def zone_id(self):\n if self.world_routing_surface.type == 1:\n return self.world_routing_surface.primary_id\n return sims4.zone_utils.get_zone_id()\n\n @property\n def level(self):\n return self.world_routing_surface.secondary_id\n\n @property\n def world_transform(self):\n if self.parent is None:\n return self.transform\n transform = self.transform\n parent = self.parent\n if parent.is_part:\n parent_transform = parent.part_owner.transform\n else:\n parent_transform = parent.transform\n if self.joint_name_or_hash is None:\n if transform is None:\n return parent_transform\n return sims4.math.Transform.concatenate(transform, parent_transform\n )\n joint_transform = native.animation.get_joint_transform_from_rig(self\n .parent.rig, self.joint_name_or_hash)\n if transform is None:\n return sims4.math.Transform.concatenate(joint_transform,\n parent_transform)\n local_transform = sims4.math.Transform.concatenate(transform,\n joint_transform)\n return sims4.math.Transform.concatenate(local_transform,\n parent_transform)\n\n def duplicate(self):\n return type(self)(self.transform, self.routing_surface, self.parent,\n self.joint_name_or_hash, self.slot_hash)\n\n def clone(self, *, transform=DEFAULT, translation=DEFAULT, orientation=\n DEFAULT, routing_surface=DEFAULT, parent=DEFAULT,\n joint_name_or_hash=DEFAULT, slot_hash=DEFAULT):\n if transform is DEFAULT:\n transform = self.transform\n if transform is not None:\n if translation is DEFAULT:\n translation = transform.translation\n if orientation is DEFAULT:\n orientation = transform.orientation\n transform = Transform(translation, orientation)\n if routing_surface is DEFAULT:\n routing_surface = self.routing_surface\n if parent is DEFAULT:\n parent = self.parent\n if joint_name_or_hash is DEFAULT:\n joint_name_or_hash = self.joint_name_or_hash\n if slot_hash is DEFAULT:\n slot_hash = self.slot_hash\n return type(self)(transform, routing_surface, parent,\n joint_name_or_hash, slot_hash)\n\n\nclass LinearCurve:\n __qualname__ = 'LinearCurve'\n __slots__ = 'points',\n\n def __init__(self, points):\n self.points = points\n self.points.sort(key=lambda i: i[0])\n\n def get(self, val):\n p_max = len(self.points) - 1\n if val <= self.points[0][0]:\n return self.points[0][1]\n if val >= self.points[p_max][0]:\n return self.points[p_max][1]\n i = p_max - 1\n while i > 0:\n while val < self.points[i][0]:\n i -= 1\n p1 = self.points[i]\n p2 = self.points[i + 1]\n percent = (val - p1[0]) / (p2[0] - p1[0])\n return (p2[1] - p1[1]) * percent + p1[1]\n\n\nclass WeightedUtilityCurve(LinearCurve):\n __qualname__ = 'WeightedUtilityCurve'\n\n def __init__(self, points, max_y=0, weight=1):\n if max_y == 0:\n max_y = self._find_largest_y(points)\n transformed_points = [(point[0], point[1] / max_y * weight) for\n point in points]\n super().__init__(transformed_points)\n\n def _find_largest_y(self, points):\n max_y = 0\n for point in points:\n while point[1] > max_y:\n max_y = point[1]\n return max_y\n\n\nclass CircularUtilityCurve(LinearCurve):\n __qualname__ = 'CircularUtilityCurve'\n\n def __init__(self, points, min_x, max_x):\n super().__init__(points)\n self._min_x = min_x\n self._max_x = max_x\n last_point = self.points[-1]\n distance_to_end = max_x - last_point[0]\n total_length = distance_to_end + self.points[0][1]\n distance_to_pivot_point = distance_to_end / total_length\n pivot_y_value = (self.points[0][1] - last_point[1]\n ) * distance_to_pivot_point + self.points[0][1]\n self.points.insert(0, (0, pivot_y_value))\n self.points.insert(len(self.points), (self._max_x, pivot_y_value))\n\n def get(self, val):\n return super().get(val)\n\n\nclass Operator(enum.Int):\n __qualname__ = 'Operator'\n GREATER = 1\n GREATER_OR_EQUAL = 2\n EQUAL = 3\n NOTEQUAL = 4\n LESS_OR_EQUAL = 5\n LESS = 6\n\n @staticmethod\n def from_function(fn):\n if fn == operator.gt:\n return Operator.GREATER\n if fn == operator.ge:\n return Operator.GREATER_OR_EQUAL\n if fn == operator.eq:\n return Operator.EQUAL\n if fn == operator.ne:\n return Operator.NOTEQUAL\n if fn == operator.le:\n return Operator.LESS_OR_EQUAL\n if fn == operator.lt:\n return Operator.LESS\n\n @property\n def function(self):\n if self.value == Operator.GREATER:\n return operator.gt\n if self.value == Operator.GREATER_OR_EQUAL:\n return operator.ge\n if self.value == Operator.EQUAL:\n return operator.eq\n if self.value == Operator.NOTEQUAL:\n return operator.ne\n if self.value == Operator.LESS_OR_EQUAL:\n return operator.le\n if self.value == Operator.LESS:\n return operator.lt\n\n @property\n def inverse(self):\n if self == Operator.GREATER:\n return Operator.LESS_OR_EQUAL\n if self == Operator.GREATER_OR_EQUAL:\n return Operator.LESS\n if self == Operator.EQUAL:\n return Operator.NOTEQUAL\n if self == Operator.NOTEQUAL:\n return Operator.EQUAL\n if self == Operator.LESS_OR_EQUAL:\n return Operator.GREATER\n if self == Operator.LESS:\n return Operator.GREATER_OR_EQUAL\n\n @property\n def symbol(self):\n if self == Operator.GREATER:\n return '>'\n if self == Operator.GREATER_OR_EQUAL:\n return '>='\n if self == Operator.EQUAL:\n return '=='\n if self == Operator.NOTEQUAL:\n return '!='\n if self == Operator.LESS_OR_EQUAL:\n return '<='\n if self == Operator.LESS:\n return '<'\n\n @property\n def category(self):\n if self == Operator.GREATER:\n return Operator.GREATER\n if self == Operator.GREATER_OR_EQUAL:\n return Operator.GREATER\n if self == Operator.EQUAL:\n return Operator.EQUAL\n if self == Operator.NOTEQUAL:\n return Operator.EQUAL\n if self == Operator.LESS_OR_EQUAL:\n return Operator.LESS\n if self == Operator.LESS:\n return Operator.LESS\n\n\nclass InequalityOperator(enum.Int):\n __qualname__ = 'InequalityOperator'\n GREATER = Operator.GREATER\n GREATER_OR_EQUAL = Operator.GREATER_OR_EQUAL\n LESS_OR_EQUAL = Operator.LESS_OR_EQUAL\n LESS = Operator.LESS\n\n\n<mask token>\n\n\nclass Threshold:\n __qualname__ = 'Threshold'\n __slots__ = 'value', 'comparison'\n\n def __init__(self, value=None, comparison=None):\n self.value = value\n self.comparison = comparison\n\n def compare(self, source_value):\n if self.value is not None and self.comparison is not None:\n return self.comparison(source_value, self.value)\n return False\n\n def compare_value(self, source_value):\n if self.value is not None and self.comparison is not None:\n return self.comparison(source_value.value, self.value.value)\n return False\n\n def inverse(self):\n return Threshold(self.value, Operator.from_function(self.comparison\n ).inverse.function)\n\n def __str__(self):\n if self.comparison is None:\n return 'None'\n return '{} {}'.format(Operator.from_function(self.comparison).\n symbol, self.value)\n\n def __repr__(self):\n return '<Threshold {}>'.format(str(self))\n\n def __eq__(self, other):\n if not isinstance(other, Threshold):\n return False\n if not self.value == other.value:\n return False\n if not self.comparison == other.comparison:\n return False\n return True\n\n def __hash__(self):\n return hash((self.value, self.comparison))\n",
"step-4": "<mask token>\n\n\ndef clamp(lower_bound, x, upper_bound):\n if x < lower_bound:\n return lower_bound\n if x > upper_bound:\n return upper_bound\n return x\n\n\ndef interpolate(a, b, fraction):\n return a * fraction + (1 - fraction) * b\n\n\ndef linear_seq_gen(start, stop, step, max_count=None):\n delta = stop - start\n num = floor(abs(delta / step))\n if max_count is not None:\n num = min(num, max_count - 1)\n if num > 0:\n for i in range(0, num + 1):\n yield start + i * delta / num\n else:\n yield start\n if stop != start:\n yield stop\n\n\ndef deg_to_rad(deg):\n return deg * PI / 180\n\n\ndef rad_to_deg(rad):\n return rad * 180 / PI\n\n\ndef angle_abs_difference(a1, a2):\n delta = sims4.math.mod_2pi(a1 - a2)\n if delta > sims4.math.PI:\n delta = sims4.math.TWO_PI - delta\n return delta\n\n\n<mask token>\n\n\ndef vector_dot_2d(a, b):\n return a.x * b.x + a.z * b.z\n\n\ndef vector_cross(a, b):\n return Vector3(a.y * b.z - a.z * b.y, a.z * b.x - a.x * b.z, a.x * b.y -\n a.y * b.x)\n\n\ndef vector_cross_2d(a, b):\n return a.z * b.x - a.x * b.z\n\n\ndef vector_normalize(v):\n return v / v.magnitude()\n\n\ndef vector_flatten(v):\n return Vector3(v.x, 0, v.z)\n\n\ndef almost_equal(a, b, epsilon=EPSILON):\n return abs(a - b) < epsilon\n\n\n<mask token>\n\n\ndef transform_almost_equal(t1, t2, epsilon=EPSILON, epsilon_orientation=\n QUATERNION_EPSILON):\n if epsilon_orientation is DEFAULT:\n epsilon_orientation = epsilon\n return vector3_almost_equal(t1.translation, t2.translation, epsilon=epsilon\n ) and quaternion_almost_equal(t1.orientation, t2.orientation,\n epsilon=epsilon_orientation)\n\n\ndef transform_almost_equal_2d(t1, t2, epsilon=EPSILON, epsilon_orientation=\n QUATERNION_EPSILON):\n if epsilon_orientation is DEFAULT:\n epsilon_orientation = epsilon\n return vector3_almost_equal_2d(t1.translation, t2.translation, epsilon=\n epsilon) and quaternion_almost_equal(t1.orientation, t2.orientation,\n epsilon=epsilon_orientation)\n\n\ndef vector3_rotate_axis_angle(v, angle, axis):\n q = Quaternion.from_axis_angle(angle, axis)\n return q.transform_vector(v)\n\n\n<mask token>\n\n\ndef angle_to_yaw_quaternion(angle):\n return Quaternion.from_axis_angle(angle, UP_AXIS)\n\n\n<mask token>\n\n\ndef invert_quaternion(q):\n d = 1.0 / (q.x * q.x + q.y * q.y + q.z * q.z + q.w * q.w)\n return Quaternion(-d * q.x, -d * q.y, -d * q.z, d * q.w)\n\n\ndef get_difference_transform(transform_a, transform_b):\n v = transform_b.translation - transform_a.translation\n a_q_i = invert_quaternion(transform_a.orientation)\n q = Quaternion.concatenate(transform_b.orientation, a_q_i)\n v_prime = Quaternion.transform_vector(a_q_i, v)\n return Transform(v_prime, q)\n\n\nclass Location:\n __qualname__ = 'Location'\n __slots__ = ('transform', 'routing_surface', '_parent_ref',\n 'joint_name_or_hash', 'slot_hash')\n\n def __init__(self, transform, routing_surface, parent=None,\n joint_name_or_hash=None, slot_hash=0):\n self.transform = transform\n self.routing_surface = routing_surface\n self.parent = parent\n self.joint_name_or_hash = joint_name_or_hash\n self.slot_hash = slot_hash\n\n def __repr__(self):\n return standard_repr(self, self.transform, self.routing_surface,\n parent=self.parent, joint_name_or_hash=self.joint_name_or_hash,\n slot_hash=self.slot_hash)\n\n def __eq__(self, other):\n if type(self) is not type(other):\n return False\n if self.transform != other.transform:\n return False\n if self.parent != other.parent:\n return False\n if self.routing_surface != other.routing_surface:\n return False\n slot_hash0 = self.joint_name_or_hash or self.slot_hash\n slot_hash1 = other.joint_name_or_hash or other.slot_hash\n if slot_hash0 != slot_hash1:\n return False\n return True\n\n def __ne__(self, other):\n return not self.__eq__(other)\n\n @property\n def parent(self):\n if self._parent_ref is not None:\n return self._parent_ref()\n\n @parent.setter\n def parent(self, value):\n if value is not None:\n self._parent_ref = value.ref()\n self.routing_surface = None\n else:\n self._parent_ref = None\n\n @property\n def joint_name_hash(self):\n if self.joint_name_or_hash is None:\n return 0\n if isinstance(self.joint_name_or_hash, int):\n return self.joint_name_or_hash\n return sims4.hash_util.hash32(self.joint_name_or_hash)\n\n @property\n def world_routing_surface(self):\n if self.parent is not None:\n return self.parent.location.world_routing_surface\n return self.routing_surface\n\n @property\n def zone_id(self):\n if self.world_routing_surface.type == 1:\n return self.world_routing_surface.primary_id\n return sims4.zone_utils.get_zone_id()\n\n @property\n def level(self):\n return self.world_routing_surface.secondary_id\n\n @property\n def world_transform(self):\n if self.parent is None:\n return self.transform\n transform = self.transform\n parent = self.parent\n if parent.is_part:\n parent_transform = parent.part_owner.transform\n else:\n parent_transform = parent.transform\n if self.joint_name_or_hash is None:\n if transform is None:\n return parent_transform\n return sims4.math.Transform.concatenate(transform, parent_transform\n )\n joint_transform = native.animation.get_joint_transform_from_rig(self\n .parent.rig, self.joint_name_or_hash)\n if transform is None:\n return sims4.math.Transform.concatenate(joint_transform,\n parent_transform)\n local_transform = sims4.math.Transform.concatenate(transform,\n joint_transform)\n return sims4.math.Transform.concatenate(local_transform,\n parent_transform)\n\n def duplicate(self):\n return type(self)(self.transform, self.routing_surface, self.parent,\n self.joint_name_or_hash, self.slot_hash)\n\n def clone(self, *, transform=DEFAULT, translation=DEFAULT, orientation=\n DEFAULT, routing_surface=DEFAULT, parent=DEFAULT,\n joint_name_or_hash=DEFAULT, slot_hash=DEFAULT):\n if transform is DEFAULT:\n transform = self.transform\n if transform is not None:\n if translation is DEFAULT:\n translation = transform.translation\n if orientation is DEFAULT:\n orientation = transform.orientation\n transform = Transform(translation, orientation)\n if routing_surface is DEFAULT:\n routing_surface = self.routing_surface\n if parent is DEFAULT:\n parent = self.parent\n if joint_name_or_hash is DEFAULT:\n joint_name_or_hash = self.joint_name_or_hash\n if slot_hash is DEFAULT:\n slot_hash = self.slot_hash\n return type(self)(transform, routing_surface, parent,\n joint_name_or_hash, slot_hash)\n\n\nclass LinearCurve:\n __qualname__ = 'LinearCurve'\n __slots__ = 'points',\n\n def __init__(self, points):\n self.points = points\n self.points.sort(key=lambda i: i[0])\n\n def get(self, val):\n p_max = len(self.points) - 1\n if val <= self.points[0][0]:\n return self.points[0][1]\n if val >= self.points[p_max][0]:\n return self.points[p_max][1]\n i = p_max - 1\n while i > 0:\n while val < self.points[i][0]:\n i -= 1\n p1 = self.points[i]\n p2 = self.points[i + 1]\n percent = (val - p1[0]) / (p2[0] - p1[0])\n return (p2[1] - p1[1]) * percent + p1[1]\n\n\nclass WeightedUtilityCurve(LinearCurve):\n __qualname__ = 'WeightedUtilityCurve'\n\n def __init__(self, points, max_y=0, weight=1):\n if max_y == 0:\n max_y = self._find_largest_y(points)\n transformed_points = [(point[0], point[1] / max_y * weight) for\n point in points]\n super().__init__(transformed_points)\n\n def _find_largest_y(self, points):\n max_y = 0\n for point in points:\n while point[1] > max_y:\n max_y = point[1]\n return max_y\n\n\nclass CircularUtilityCurve(LinearCurve):\n __qualname__ = 'CircularUtilityCurve'\n\n def __init__(self, points, min_x, max_x):\n super().__init__(points)\n self._min_x = min_x\n self._max_x = max_x\n last_point = self.points[-1]\n distance_to_end = max_x - last_point[0]\n total_length = distance_to_end + self.points[0][1]\n distance_to_pivot_point = distance_to_end / total_length\n pivot_y_value = (self.points[0][1] - last_point[1]\n ) * distance_to_pivot_point + self.points[0][1]\n self.points.insert(0, (0, pivot_y_value))\n self.points.insert(len(self.points), (self._max_x, pivot_y_value))\n\n def get(self, val):\n return super().get(val)\n\n\nclass Operator(enum.Int):\n __qualname__ = 'Operator'\n GREATER = 1\n GREATER_OR_EQUAL = 2\n EQUAL = 3\n NOTEQUAL = 4\n LESS_OR_EQUAL = 5\n LESS = 6\n\n @staticmethod\n def from_function(fn):\n if fn == operator.gt:\n return Operator.GREATER\n if fn == operator.ge:\n return Operator.GREATER_OR_EQUAL\n if fn == operator.eq:\n return Operator.EQUAL\n if fn == operator.ne:\n return Operator.NOTEQUAL\n if fn == operator.le:\n return Operator.LESS_OR_EQUAL\n if fn == operator.lt:\n return Operator.LESS\n\n @property\n def function(self):\n if self.value == Operator.GREATER:\n return operator.gt\n if self.value == Operator.GREATER_OR_EQUAL:\n return operator.ge\n if self.value == Operator.EQUAL:\n return operator.eq\n if self.value == Operator.NOTEQUAL:\n return operator.ne\n if self.value == Operator.LESS_OR_EQUAL:\n return operator.le\n if self.value == Operator.LESS:\n return operator.lt\n\n @property\n def inverse(self):\n if self == Operator.GREATER:\n return Operator.LESS_OR_EQUAL\n if self == Operator.GREATER_OR_EQUAL:\n return Operator.LESS\n if self == Operator.EQUAL:\n return Operator.NOTEQUAL\n if self == Operator.NOTEQUAL:\n return Operator.EQUAL\n if self == Operator.LESS_OR_EQUAL:\n return Operator.GREATER\n if self == Operator.LESS:\n return Operator.GREATER_OR_EQUAL\n\n @property\n def symbol(self):\n if self == Operator.GREATER:\n return '>'\n if self == Operator.GREATER_OR_EQUAL:\n return '>='\n if self == Operator.EQUAL:\n return '=='\n if self == Operator.NOTEQUAL:\n return '!='\n if self == Operator.LESS_OR_EQUAL:\n return '<='\n if self == Operator.LESS:\n return '<'\n\n @property\n def category(self):\n if self == Operator.GREATER:\n return Operator.GREATER\n if self == Operator.GREATER_OR_EQUAL:\n return Operator.GREATER\n if self == Operator.EQUAL:\n return Operator.EQUAL\n if self == Operator.NOTEQUAL:\n return Operator.EQUAL\n if self == Operator.LESS_OR_EQUAL:\n return Operator.LESS\n if self == Operator.LESS:\n return Operator.LESS\n\n\nclass InequalityOperator(enum.Int):\n __qualname__ = 'InequalityOperator'\n GREATER = Operator.GREATER\n GREATER_OR_EQUAL = Operator.GREATER_OR_EQUAL\n LESS_OR_EQUAL = Operator.LESS_OR_EQUAL\n LESS = Operator.LESS\n\n\n<mask token>\n\n\nclass Threshold:\n __qualname__ = 'Threshold'\n __slots__ = 'value', 'comparison'\n\n def __init__(self, value=None, comparison=None):\n self.value = value\n self.comparison = comparison\n\n def compare(self, source_value):\n if self.value is not None and self.comparison is not None:\n return self.comparison(source_value, self.value)\n return False\n\n def compare_value(self, source_value):\n if self.value is not None and self.comparison is not None:\n return self.comparison(source_value.value, self.value.value)\n return False\n\n def inverse(self):\n return Threshold(self.value, Operator.from_function(self.comparison\n ).inverse.function)\n\n def __str__(self):\n if self.comparison is None:\n return 'None'\n return '{} {}'.format(Operator.from_function(self.comparison).\n symbol, self.value)\n\n def __repr__(self):\n return '<Threshold {}>'.format(str(self))\n\n def __eq__(self, other):\n if not isinstance(other, Threshold):\n return False\n if not self.value == other.value:\n return False\n if not self.comparison == other.comparison:\n return False\n return True\n\n def __hash__(self):\n return hash((self.value, self.comparison))\n",
"step-5": "from _math import Vector2, Vector3, Quaternion, Transform, Vector3Immutable, QuaternionImmutable, minimum_distance\nfrom _math import mod_2pi\nfrom math import pi as PI, sqrt, fmod, floor, atan2, acos, asin, ceil, pi, e\nimport operator\nfrom sims4.repr_utils import standard_repr\nimport enum\nimport native.animation\nimport sims4.hash_util\nfrom singletons import DEFAULT\nTWO_PI = PI*2\nEPSILON = 1.192092896e-07\nQUATERNION_EPSILON = 0.001\nMAX_FLOAT = 3.402823466e+38\nMAX_UINT64 = 18446744073709551615\nMAX_INT64 = 922337203685477580\nMAX_UINT32 = 4294967295\nMAX_INT32 = 2147483647\nMAX_UINT16 = 65535\nMAX_INT16 = 32767\nPOS_INFINITY = float('inf')\nNEG_INFINITY = float('-inf')\nFORWARD_AXIS = Vector3.Z_AXIS()\nUP_AXIS = Vector3.Y_AXIS()\n\ndef clamp(lower_bound, x, upper_bound):\n if x < lower_bound:\n return lower_bound\n if x > upper_bound:\n return upper_bound\n return x\n\ndef interpolate(a, b, fraction):\n return a*fraction + (1 - fraction)*b\n\ndef linear_seq_gen(start, stop, step, max_count=None):\n delta = stop - start\n num = floor(abs(delta/step))\n if max_count is not None:\n num = min(num, max_count - 1)\n if num > 0:\n for i in range(0, num + 1):\n yield start + i*delta/num\n else:\n yield start\n if stop != start:\n yield stop\n\ndef deg_to_rad(deg):\n return deg*PI/180\n\ndef rad_to_deg(rad):\n return rad*180/PI\n\ndef angle_abs_difference(a1, a2):\n delta = sims4.math.mod_2pi(a1 - a2)\n if delta > sims4.math.PI:\n delta = sims4.math.TWO_PI - delta\n return delta\n\ndef vector_dot(a, b):\n return a.x*b.x + a.y*b.y + a.z*b.z\n\ndef vector_dot_2d(a, b):\n return a.x*b.x + a.z*b.z\n\ndef vector_cross(a, b):\n return Vector3(a.y*b.z - a.z*b.y, a.z*b.x - a.x*b.z, a.x*b.y - a.y*b.x)\n\ndef vector_cross_2d(a, b):\n return a.z*b.x - a.x*b.z\n\ndef vector_normalize(v):\n return v/v.magnitude()\n\ndef vector_flatten(v):\n return Vector3(v.x, 0, v.z)\n\ndef almost_equal(a, b, epsilon=EPSILON):\n return abs(a - b) < epsilon\n\ndef vector3_almost_equal(v1, v2, epsilon=EPSILON):\n return abs(v1.x - v2.x) < epsilon and (abs(v1.y - v2.y) < epsilon and abs(v1.z - v2.z) < epsilon)\n\ndef vector3_almost_equal_2d(v1, v2, epsilon=EPSILON):\n return abs(v1.x - v2.x) < epsilon and abs(v1.z - v2.z) < epsilon\n\ndef quaternion_almost_equal(q1, q2, epsilon=QUATERNION_EPSILON):\n if abs(q1.x - q2.x) < epsilon and (abs(q1.y - q2.y) < epsilon and abs(q1.z - q2.z) < epsilon) and abs(q1.w - q2.w) < epsilon:\n return True\n if abs(q1.x + q2.x) < epsilon and (abs(q1.y + q2.y) < epsilon and abs(q1.z + q2.z) < epsilon) and abs(q1.w + q2.w) < epsilon:\n return True\n return False\n\ndef transform_almost_equal(t1, t2, epsilon=EPSILON, epsilon_orientation=QUATERNION_EPSILON):\n if epsilon_orientation is DEFAULT:\n epsilon_orientation = epsilon\n return vector3_almost_equal(t1.translation, t2.translation, epsilon=epsilon) and quaternion_almost_equal(t1.orientation, t2.orientation, epsilon=epsilon_orientation)\n\ndef transform_almost_equal_2d(t1, t2, epsilon=EPSILON, epsilon_orientation=QUATERNION_EPSILON):\n if epsilon_orientation is DEFAULT:\n epsilon_orientation = epsilon\n return vector3_almost_equal_2d(t1.translation, t2.translation, epsilon=epsilon) and quaternion_almost_equal(t1.orientation, t2.orientation, epsilon=epsilon_orientation)\n\ndef vector3_rotate_axis_angle(v, angle, axis):\n q = Quaternion.from_axis_angle(angle, axis)\n return q.transform_vector(v)\n\ndef vector3_angle(v):\n return atan2(v.x, v.z)\n\ndef angle_to_yaw_quaternion(angle):\n return Quaternion.from_axis_angle(angle, UP_AXIS)\n\ndef yaw_quaternion_to_angle(q):\n if almost_equal(q.y, 0.0):\n return 0\n angle = acos(q.w)*2.0\n if q.y > 0:\n return angle\n return -angle\n\ndef get_closest_point_2D(segment, p):\n a1 = segment[0]\n a2 = segment[1]\n (x1, x2) = (a1.x, a2.x)\n x3 = p.x\n (z1, z2) = (a1.z, a2.z)\n z3 = p.z\n dx = x2 - x1\n dz = z2 - z1\n t = ((x3 - x1)*dx + (z3 - z1)*dz)/(dx*dx + dz*dz)\n t = sims4.math.clamp(0, t, 1)\n x0 = x1 + t*dx\n z0 = z1 + t*dz\n return Vector3(x0, p.y, z0)\n\ndef invert_quaternion(q):\n d = 1.0/(q.x*q.x + q.y*q.y + q.z*q.z + q.w*q.w)\n return Quaternion(-d*q.x, -d*q.y, -d*q.z, d*q.w)\n\ndef get_difference_transform(transform_a, transform_b):\n v = transform_b.translation - transform_a.translation\n a_q_i = invert_quaternion(transform_a.orientation)\n q = Quaternion.concatenate(transform_b.orientation, a_q_i)\n v_prime = Quaternion.transform_vector(a_q_i, v)\n return Transform(v_prime, q)\n\nclass Location:\n __qualname__ = 'Location'\n __slots__ = ('transform', 'routing_surface', '_parent_ref', 'joint_name_or_hash', 'slot_hash')\n\n def __init__(self, transform, routing_surface, parent=None, joint_name_or_hash=None, slot_hash=0):\n self.transform = transform\n self.routing_surface = routing_surface\n self.parent = parent\n self.joint_name_or_hash = joint_name_or_hash\n self.slot_hash = slot_hash\n\n def __repr__(self):\n return standard_repr(self, self.transform, self.routing_surface, parent=self.parent, joint_name_or_hash=self.joint_name_or_hash, slot_hash=self.slot_hash)\n\n def __eq__(self, other):\n if type(self) is not type(other):\n return False\n if self.transform != other.transform:\n return False\n if self.parent != other.parent:\n return False\n if self.routing_surface != other.routing_surface:\n return False\n slot_hash0 = self.joint_name_or_hash or self.slot_hash\n slot_hash1 = other.joint_name_or_hash or other.slot_hash\n if slot_hash0 != slot_hash1:\n return False\n return True\n\n def __ne__(self, other):\n return not self.__eq__(other)\n\n @property\n def parent(self):\n if self._parent_ref is not None:\n return self._parent_ref()\n\n @parent.setter\n def parent(self, value):\n if value is not None:\n self._parent_ref = value.ref()\n self.routing_surface = None\n else:\n self._parent_ref = None\n\n @property\n def joint_name_hash(self):\n if self.joint_name_or_hash is None:\n return 0\n if isinstance(self.joint_name_or_hash, int):\n return self.joint_name_or_hash\n return sims4.hash_util.hash32(self.joint_name_or_hash)\n\n @property\n def world_routing_surface(self):\n if self.parent is not None:\n return self.parent.location.world_routing_surface\n return self.routing_surface\n\n @property\n def zone_id(self):\n if self.world_routing_surface.type == 1:\n return self.world_routing_surface.primary_id\n return sims4.zone_utils.get_zone_id()\n\n @property\n def level(self):\n return self.world_routing_surface.secondary_id\n\n @property\n def world_transform(self):\n if self.parent is None:\n return self.transform\n transform = self.transform\n parent = self.parent\n if parent.is_part:\n parent_transform = parent.part_owner.transform\n else:\n parent_transform = parent.transform\n if self.joint_name_or_hash is None:\n if transform is None:\n return parent_transform\n return sims4.math.Transform.concatenate(transform, parent_transform)\n joint_transform = native.animation.get_joint_transform_from_rig(self.parent.rig, self.joint_name_or_hash)\n if transform is None:\n return sims4.math.Transform.concatenate(joint_transform, parent_transform)\n local_transform = sims4.math.Transform.concatenate(transform, joint_transform)\n return sims4.math.Transform.concatenate(local_transform, parent_transform)\n\n def duplicate(self):\n return type(self)(self.transform, self.routing_surface, self.parent, self.joint_name_or_hash, self.slot_hash)\n\n def clone(self, *, transform=DEFAULT, translation=DEFAULT, orientation=DEFAULT, routing_surface=DEFAULT, parent=DEFAULT, joint_name_or_hash=DEFAULT, slot_hash=DEFAULT):\n if transform is DEFAULT:\n transform = self.transform\n if transform is not None:\n if translation is DEFAULT:\n translation = transform.translation\n if orientation is DEFAULT:\n orientation = transform.orientation\n transform = Transform(translation, orientation)\n if routing_surface is DEFAULT:\n routing_surface = self.routing_surface\n if parent is DEFAULT:\n parent = self.parent\n if joint_name_or_hash is DEFAULT:\n joint_name_or_hash = self.joint_name_or_hash\n if slot_hash is DEFAULT:\n slot_hash = self.slot_hash\n return type(self)(transform, routing_surface, parent, joint_name_or_hash, slot_hash)\n\nclass LinearCurve:\n __qualname__ = 'LinearCurve'\n __slots__ = ('points',)\n\n def __init__(self, points):\n self.points = points\n self.points.sort(key=lambda i: i[0])\n\n def get(self, val):\n p_max = len(self.points) - 1\n if val <= self.points[0][0]:\n return self.points[0][1]\n if val >= self.points[p_max][0]:\n return self.points[p_max][1]\n i = p_max - 1\n while i > 0:\n while val < self.points[i][0]:\n i -= 1\n p1 = self.points[i]\n p2 = self.points[i + 1]\n percent = (val - p1[0])/(p2[0] - p1[0])\n return (p2[1] - p1[1])*percent + p1[1]\n\nclass WeightedUtilityCurve(LinearCurve):\n __qualname__ = 'WeightedUtilityCurve'\n\n def __init__(self, points, max_y=0, weight=1):\n if max_y == 0:\n max_y = self._find_largest_y(points)\n transformed_points = [(point[0], point[1]/max_y*weight) for point in points]\n super().__init__(transformed_points)\n\n def _find_largest_y(self, points):\n max_y = 0\n for point in points:\n while point[1] > max_y:\n max_y = point[1]\n return max_y\n\nclass CircularUtilityCurve(LinearCurve):\n __qualname__ = 'CircularUtilityCurve'\n\n def __init__(self, points, min_x, max_x):\n super().__init__(points)\n self._min_x = min_x\n self._max_x = max_x\n last_point = self.points[-1]\n distance_to_end = max_x - last_point[0]\n total_length = distance_to_end + self.points[0][1]\n distance_to_pivot_point = distance_to_end/total_length\n pivot_y_value = (self.points[0][1] - last_point[1])*distance_to_pivot_point + self.points[0][1]\n self.points.insert(0, (0, pivot_y_value))\n self.points.insert(len(self.points), (self._max_x, pivot_y_value))\n\n def get(self, val):\n return super().get(val)\n\nclass Operator(enum.Int):\n __qualname__ = 'Operator'\n GREATER = 1\n GREATER_OR_EQUAL = 2\n EQUAL = 3\n NOTEQUAL = 4\n LESS_OR_EQUAL = 5\n LESS = 6\n\n @staticmethod\n def from_function(fn):\n if fn == operator.gt:\n return Operator.GREATER\n if fn == operator.ge:\n return Operator.GREATER_OR_EQUAL\n if fn == operator.eq:\n return Operator.EQUAL\n if fn == operator.ne:\n return Operator.NOTEQUAL\n if fn == operator.le:\n return Operator.LESS_OR_EQUAL\n if fn == operator.lt:\n return Operator.LESS\n\n @property\n def function(self):\n if self.value == Operator.GREATER:\n return operator.gt\n if self.value == Operator.GREATER_OR_EQUAL:\n return operator.ge\n if self.value == Operator.EQUAL:\n return operator.eq\n if self.value == Operator.NOTEQUAL:\n return operator.ne\n if self.value == Operator.LESS_OR_EQUAL:\n return operator.le\n if self.value == Operator.LESS:\n return operator.lt\n\n @property\n def inverse(self):\n if self == Operator.GREATER:\n return Operator.LESS_OR_EQUAL\n if self == Operator.GREATER_OR_EQUAL:\n return Operator.LESS\n if self == Operator.EQUAL:\n return Operator.NOTEQUAL\n if self == Operator.NOTEQUAL:\n return Operator.EQUAL\n if self == Operator.LESS_OR_EQUAL:\n return Operator.GREATER\n if self == Operator.LESS:\n return Operator.GREATER_OR_EQUAL\n\n @property\n def symbol(self):\n if self == Operator.GREATER:\n return '>'\n if self == Operator.GREATER_OR_EQUAL:\n return '>='\n if self == Operator.EQUAL:\n return '=='\n if self == Operator.NOTEQUAL:\n return '!='\n if self == Operator.LESS_OR_EQUAL:\n return '<='\n if self == Operator.LESS:\n return '<'\n\n @property\n def category(self):\n if self == Operator.GREATER:\n return Operator.GREATER\n if self == Operator.GREATER_OR_EQUAL:\n return Operator.GREATER\n if self == Operator.EQUAL:\n return Operator.EQUAL\n if self == Operator.NOTEQUAL:\n return Operator.EQUAL\n if self == Operator.LESS_OR_EQUAL:\n return Operator.LESS\n if self == Operator.LESS:\n return Operator.LESS\n\nclass InequalityOperator(enum.Int):\n __qualname__ = 'InequalityOperator'\n GREATER = Operator.GREATER\n GREATER_OR_EQUAL = Operator.GREATER_OR_EQUAL\n LESS_OR_EQUAL = Operator.LESS_OR_EQUAL\n LESS = Operator.LESS\n\nwith InequalityOperator.__reload_context__(InequalityOperator, InequalityOperator):\n InequalityOperator.from_function = Operator.from_function\n InequalityOperator.function = Operator.function\n InequalityOperator.inverse = Operator.inverse\n InequalityOperator.symbol = Operator.symbol\n InequalityOperator.category = Operator.category\n\nclass Threshold:\n __qualname__ = 'Threshold'\n __slots__ = ('value', 'comparison')\n\n def __init__(self, value=None, comparison=None):\n self.value = value\n self.comparison = comparison\n\n def compare(self, source_value):\n if self.value is not None and self.comparison is not None:\n return self.comparison(source_value, self.value)\n return False\n\n def compare_value(self, source_value):\n if self.value is not None and self.comparison is not None:\n return self.comparison(source_value.value, self.value.value)\n return False\n\n def inverse(self):\n return Threshold(self.value, Operator.from_function(self.comparison).inverse.function)\n\n def __str__(self):\n if self.comparison is None:\n return 'None'\n return '{} {}'.format(Operator.from_function(self.comparison).symbol, self.value)\n\n def __repr__(self):\n return '<Threshold {}>'.format(str(self))\n\n def __eq__(self, other):\n if not isinstance(other, Threshold):\n return False\n if not self.value == other.value:\n return False\n if not self.comparison == other.comparison:\n return False\n return True\n\n def __hash__(self):\n return hash((self.value, self.comparison))\n\n",
"step-ids": [
52,
53,
55,
64,
75
]
}
|
[
52,
53,
55,
64,
75
] |
<|reserved_special_token_0|>
class ventaDetalle:
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ventaDetalle:
def __init__(self, pro, pre, cant):
self.producto = pro
self.precio = pre
self.cantidad = cant
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Articulo:
<|reserved_special_token_0|>
class ventaDetalle:
def __init__(self, pro, pre, cant):
self.producto = pro
self.precio = pre
self.cantidad = cant
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Articulo:
def __init__(self, cod, des, pre, stoc):
self.codigo = cod
self.descripcion = des
self.precio = pre
self.stock = stoc
class ventaDetalle:
def __init__(self, pro, pre, cant):
self.producto = pro
self.precio = pre
self.cantidad = cant
<|reserved_special_token_1|>
""""
articulo
cliente
venta
ventadet
"""
class Articulo:
def __init__(self,cod,des,pre,stoc):
self.codigo=cod
self.descripcion = des
self.precio=pre
self.stock=stoc
class ventaDetalle:
def __init__(self,pro,pre,cant):
self.producto=pro
self.precio=pre
self.cantidad=cant
|
flexible
|
{
"blob_id": "f70f66926b9e2bf8b387d481263493d7f4c65397",
"index": 516,
"step-1": "<mask token>\n\n\nclass ventaDetalle:\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass ventaDetalle:\n\n def __init__(self, pro, pre, cant):\n self.producto = pro\n self.precio = pre\n self.cantidad = cant\n",
"step-3": "<mask token>\n\n\nclass Articulo:\n <mask token>\n\n\nclass ventaDetalle:\n\n def __init__(self, pro, pre, cant):\n self.producto = pro\n self.precio = pre\n self.cantidad = cant\n",
"step-4": "<mask token>\n\n\nclass Articulo:\n\n def __init__(self, cod, des, pre, stoc):\n self.codigo = cod\n self.descripcion = des\n self.precio = pre\n self.stock = stoc\n\n\nclass ventaDetalle:\n\n def __init__(self, pro, pre, cant):\n self.producto = pro\n self.precio = pre\n self.cantidad = cant\n",
"step-5": "\"\"\"\"\r\narticulo\r\ncliente\r\nventa\r\nventadet\r\n\"\"\"\r\nclass Articulo:\r\n def __init__(self,cod,des,pre,stoc):\r\n self.codigo=cod\r\n self.descripcion = des\r\n self.precio=pre\r\n self.stock=stoc\r\n\r\n\r\n\r\n\r\n\r\nclass ventaDetalle:\r\n def __init__(self,pro,pre,cant):\r\n self.producto=pro\r\n self.precio=pre\r\n self.cantidad=cant\r\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from typing import List
from fastapi import Depends, FastAPI, HTTPException
from sqlalchemy.orm import Session
from myfirstpython.fastapi import models, crud, schemas
from myfirstpython.fastapi.dbconnection import engine, SessionLocal
models.Base.metadata.create_all(bind=engine)
app = FastAPI()
# Dependency
def get_db():
db = SessionLocal()
try:
yield db
finally:
db.close()
@app.post("/jobs/", response_model=schemas.JobCreate)
def create_job(job: schemas.JobCreate, db: Session = Depends(get_db)):
db_job = crud.get_job(db, job.title)
if db_job:
raise HTTPException(status_code=400, detail="Job already Posted")
return crud.create_job(db=db, job=job)
@app.get("/jobs/", response_model=List[schemas.Job])
def read_jobs(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)):
jobs = crud.get_jobs(db, skip=skip, limit=limit)
return jobs
@app.get("/jobs/{job_id}", response_model=schemas.Job)
def read_job(job_id: int, db: Session = Depends(get_db)):
db_job = crud.get_job(db, job_id=job_id)
if db_job is None:
raise HTTPException(status_code=404, detail="Job not found")
return db_job
@app.post("/cands/", response_model=schemas.CanCreate)
def create_can(can: schemas.CanCreate, db: Session = Depends(get_db)):
db_can = crud.get_candidate(db, can.email)
if db_can:
raise HTTPException(status_code=400, detail="Candidate already Present")
return crud.create_candidate(db=db, can=can)
@app.get("/cands/", response_model=List[schemas.Can])
def read_cans(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)):
cans = crud.get_candidates(db, skip=skip, limit=limit)
return cans
@app.get("/cands/{email}", response_model=schemas.Can)
def read_can(email: str, db: Session = Depends(get_db)):
db_can = crud.get_candidate(db, email)
if db_can is None:
raise HTTPException(status_code=404, detail="Candidate not found")
return db_can
@app.get("/jobapps/", response_model=List[schemas.AppBase])
def read_jobapps(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)):
jobapps = crud.get_jobapps(db, skip=skip, limit=limit)
return jobapps
@app.get("/jobapps/{appid}", response_model=schemas.AppBase)
def read_jobapp(appid: int, db: Session = Depends(get_db)):
db_jobapp = crud.get_jobapp(db, appid)
if db_jobapp is None:
raise HTTPException(status_code=404, detail="Job Application not found")
return db_jobapp
|
normal
|
{
"blob_id": "ad474f5120ca2a8c81b18071ab364e6d6cf9e653",
"index": 7031,
"step-1": "<mask token>\n\n\n@app.get('/jobs/', response_model=List[schemas.Job])\ndef read_jobs(skip: int=0, limit: int=100, db: Session=Depends(get_db)):\n jobs = crud.get_jobs(db, skip=skip, limit=limit)\n return jobs\n\n\n@app.get('/jobs/{job_id}', response_model=schemas.Job)\ndef read_job(job_id: int, db: Session=Depends(get_db)):\n db_job = crud.get_job(db, job_id=job_id)\n if db_job is None:\n raise HTTPException(status_code=404, detail='Job not found')\n return db_job\n\n\n@app.post('/cands/', response_model=schemas.CanCreate)\ndef create_can(can: schemas.CanCreate, db: Session=Depends(get_db)):\n db_can = crud.get_candidate(db, can.email)\n if db_can:\n raise HTTPException(status_code=400, detail='Candidate already Present'\n )\n return crud.create_candidate(db=db, can=can)\n\n\n@app.get('/cands/', response_model=List[schemas.Can])\ndef read_cans(skip: int=0, limit: int=100, db: Session=Depends(get_db)):\n cans = crud.get_candidates(db, skip=skip, limit=limit)\n return cans\n\n\n@app.get('/cands/{email}', response_model=schemas.Can)\ndef read_can(email: str, db: Session=Depends(get_db)):\n db_can = crud.get_candidate(db, email)\n if db_can is None:\n raise HTTPException(status_code=404, detail='Candidate not found')\n return db_can\n\n\n@app.get('/jobapps/', response_model=List[schemas.AppBase])\ndef read_jobapps(skip: int=0, limit: int=100, db: Session=Depends(get_db)):\n jobapps = crud.get_jobapps(db, skip=skip, limit=limit)\n return jobapps\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_db():\n db = SessionLocal()\n try:\n yield db\n finally:\n db.close()\n\n\n<mask token>\n\n\n@app.get('/jobs/', response_model=List[schemas.Job])\ndef read_jobs(skip: int=0, limit: int=100, db: Session=Depends(get_db)):\n jobs = crud.get_jobs(db, skip=skip, limit=limit)\n return jobs\n\n\n@app.get('/jobs/{job_id}', response_model=schemas.Job)\ndef read_job(job_id: int, db: Session=Depends(get_db)):\n db_job = crud.get_job(db, job_id=job_id)\n if db_job is None:\n raise HTTPException(status_code=404, detail='Job not found')\n return db_job\n\n\n@app.post('/cands/', response_model=schemas.CanCreate)\ndef create_can(can: schemas.CanCreate, db: Session=Depends(get_db)):\n db_can = crud.get_candidate(db, can.email)\n if db_can:\n raise HTTPException(status_code=400, detail='Candidate already Present'\n )\n return crud.create_candidate(db=db, can=can)\n\n\n@app.get('/cands/', response_model=List[schemas.Can])\ndef read_cans(skip: int=0, limit: int=100, db: Session=Depends(get_db)):\n cans = crud.get_candidates(db, skip=skip, limit=limit)\n return cans\n\n\n@app.get('/cands/{email}', response_model=schemas.Can)\ndef read_can(email: str, db: Session=Depends(get_db)):\n db_can = crud.get_candidate(db, email)\n if db_can is None:\n raise HTTPException(status_code=404, detail='Candidate not found')\n return db_can\n\n\n@app.get('/jobapps/', response_model=List[schemas.AppBase])\ndef read_jobapps(skip: int=0, limit: int=100, db: Session=Depends(get_db)):\n jobapps = crud.get_jobapps(db, skip=skip, limit=limit)\n return jobapps\n\n\n@app.get('/jobapps/{appid}', response_model=schemas.AppBase)\ndef read_jobapp(appid: int, db: Session=Depends(get_db)):\n db_jobapp = crud.get_jobapp(db, appid)\n if db_jobapp is None:\n raise HTTPException(status_code=404, detail='Job Application not found'\n )\n return db_jobapp\n",
"step-3": "<mask token>\nmodels.Base.metadata.create_all(bind=engine)\n<mask token>\n\n\ndef get_db():\n db = SessionLocal()\n try:\n yield db\n finally:\n db.close()\n\n\n@app.post('/jobs/', response_model=schemas.JobCreate)\ndef create_job(job: schemas.JobCreate, db: Session=Depends(get_db)):\n db_job = crud.get_job(db, job.title)\n if db_job:\n raise HTTPException(status_code=400, detail='Job already Posted')\n return crud.create_job(db=db, job=job)\n\n\n@app.get('/jobs/', response_model=List[schemas.Job])\ndef read_jobs(skip: int=0, limit: int=100, db: Session=Depends(get_db)):\n jobs = crud.get_jobs(db, skip=skip, limit=limit)\n return jobs\n\n\n@app.get('/jobs/{job_id}', response_model=schemas.Job)\ndef read_job(job_id: int, db: Session=Depends(get_db)):\n db_job = crud.get_job(db, job_id=job_id)\n if db_job is None:\n raise HTTPException(status_code=404, detail='Job not found')\n return db_job\n\n\n@app.post('/cands/', response_model=schemas.CanCreate)\ndef create_can(can: schemas.CanCreate, db: Session=Depends(get_db)):\n db_can = crud.get_candidate(db, can.email)\n if db_can:\n raise HTTPException(status_code=400, detail='Candidate already Present'\n )\n return crud.create_candidate(db=db, can=can)\n\n\n@app.get('/cands/', response_model=List[schemas.Can])\ndef read_cans(skip: int=0, limit: int=100, db: Session=Depends(get_db)):\n cans = crud.get_candidates(db, skip=skip, limit=limit)\n return cans\n\n\n@app.get('/cands/{email}', response_model=schemas.Can)\ndef read_can(email: str, db: Session=Depends(get_db)):\n db_can = crud.get_candidate(db, email)\n if db_can is None:\n raise HTTPException(status_code=404, detail='Candidate not found')\n return db_can\n\n\n@app.get('/jobapps/', response_model=List[schemas.AppBase])\ndef read_jobapps(skip: int=0, limit: int=100, db: Session=Depends(get_db)):\n jobapps = crud.get_jobapps(db, skip=skip, limit=limit)\n return jobapps\n\n\n@app.get('/jobapps/{appid}', response_model=schemas.AppBase)\ndef read_jobapp(appid: int, db: Session=Depends(get_db)):\n db_jobapp = crud.get_jobapp(db, appid)\n if db_jobapp is None:\n raise HTTPException(status_code=404, detail='Job Application not found'\n )\n return db_jobapp\n",
"step-4": "<mask token>\nmodels.Base.metadata.create_all(bind=engine)\napp = FastAPI()\n\n\ndef get_db():\n db = SessionLocal()\n try:\n yield db\n finally:\n db.close()\n\n\n@app.post('/jobs/', response_model=schemas.JobCreate)\ndef create_job(job: schemas.JobCreate, db: Session=Depends(get_db)):\n db_job = crud.get_job(db, job.title)\n if db_job:\n raise HTTPException(status_code=400, detail='Job already Posted')\n return crud.create_job(db=db, job=job)\n\n\n@app.get('/jobs/', response_model=List[schemas.Job])\ndef read_jobs(skip: int=0, limit: int=100, db: Session=Depends(get_db)):\n jobs = crud.get_jobs(db, skip=skip, limit=limit)\n return jobs\n\n\n@app.get('/jobs/{job_id}', response_model=schemas.Job)\ndef read_job(job_id: int, db: Session=Depends(get_db)):\n db_job = crud.get_job(db, job_id=job_id)\n if db_job is None:\n raise HTTPException(status_code=404, detail='Job not found')\n return db_job\n\n\n@app.post('/cands/', response_model=schemas.CanCreate)\ndef create_can(can: schemas.CanCreate, db: Session=Depends(get_db)):\n db_can = crud.get_candidate(db, can.email)\n if db_can:\n raise HTTPException(status_code=400, detail='Candidate already Present'\n )\n return crud.create_candidate(db=db, can=can)\n\n\n@app.get('/cands/', response_model=List[schemas.Can])\ndef read_cans(skip: int=0, limit: int=100, db: Session=Depends(get_db)):\n cans = crud.get_candidates(db, skip=skip, limit=limit)\n return cans\n\n\n@app.get('/cands/{email}', response_model=schemas.Can)\ndef read_can(email: str, db: Session=Depends(get_db)):\n db_can = crud.get_candidate(db, email)\n if db_can is None:\n raise HTTPException(status_code=404, detail='Candidate not found')\n return db_can\n\n\n@app.get('/jobapps/', response_model=List[schemas.AppBase])\ndef read_jobapps(skip: int=0, limit: int=100, db: Session=Depends(get_db)):\n jobapps = crud.get_jobapps(db, skip=skip, limit=limit)\n return jobapps\n\n\n@app.get('/jobapps/{appid}', response_model=schemas.AppBase)\ndef read_jobapp(appid: int, db: Session=Depends(get_db)):\n db_jobapp = crud.get_jobapp(db, appid)\n if db_jobapp is None:\n raise HTTPException(status_code=404, detail='Job Application not found'\n )\n return db_jobapp\n",
"step-5": "from typing import List\n\nfrom fastapi import Depends, FastAPI, HTTPException\nfrom sqlalchemy.orm import Session\n\nfrom myfirstpython.fastapi import models, crud, schemas\nfrom myfirstpython.fastapi.dbconnection import engine, SessionLocal\n\nmodels.Base.metadata.create_all(bind=engine)\n\napp = FastAPI()\n\n\n# Dependency\ndef get_db():\n db = SessionLocal()\n try:\n yield db\n finally:\n db.close()\n\n\n@app.post(\"/jobs/\", response_model=schemas.JobCreate)\ndef create_job(job: schemas.JobCreate, db: Session = Depends(get_db)):\n db_job = crud.get_job(db, job.title)\n if db_job:\n raise HTTPException(status_code=400, detail=\"Job already Posted\")\n return crud.create_job(db=db, job=job)\n\n\n@app.get(\"/jobs/\", response_model=List[schemas.Job])\ndef read_jobs(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)):\n jobs = crud.get_jobs(db, skip=skip, limit=limit)\n return jobs\n\n\n@app.get(\"/jobs/{job_id}\", response_model=schemas.Job)\ndef read_job(job_id: int, db: Session = Depends(get_db)):\n db_job = crud.get_job(db, job_id=job_id)\n if db_job is None:\n raise HTTPException(status_code=404, detail=\"Job not found\")\n return db_job\n\n\n@app.post(\"/cands/\", response_model=schemas.CanCreate)\ndef create_can(can: schemas.CanCreate, db: Session = Depends(get_db)):\n db_can = crud.get_candidate(db, can.email)\n if db_can:\n raise HTTPException(status_code=400, detail=\"Candidate already Present\")\n return crud.create_candidate(db=db, can=can)\n\n\n@app.get(\"/cands/\", response_model=List[schemas.Can])\ndef read_cans(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)):\n cans = crud.get_candidates(db, skip=skip, limit=limit)\n return cans\n\n\n@app.get(\"/cands/{email}\", response_model=schemas.Can)\ndef read_can(email: str, db: Session = Depends(get_db)):\n db_can = crud.get_candidate(db, email)\n if db_can is None:\n raise HTTPException(status_code=404, detail=\"Candidate not found\")\n return db_can\n\n\n@app.get(\"/jobapps/\", response_model=List[schemas.AppBase])\ndef read_jobapps(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)):\n jobapps = crud.get_jobapps(db, skip=skip, limit=limit)\n return jobapps\n\n\n@app.get(\"/jobapps/{appid}\", response_model=schemas.AppBase)\ndef read_jobapp(appid: int, db: Session = Depends(get_db)):\n db_jobapp = crud.get_jobapp(db, appid)\n if db_jobapp is None:\n raise HTTPException(status_code=404, detail=\"Job Application not found\")\n return db_jobapp\n",
"step-ids": [
6,
8,
10,
11,
13
]
}
|
[
6,
8,
10,
11,
13
] |
import random
from .action import Action
from ..transition.step import Step
from ..value.estimators import ValueEstimator
def greedy(steps: [Step], actions: [Action], value_estimator: ValueEstimator) -> int:
estimations = [value_estimator(steps, action) for action in actions]
return actions[estimations.index(max(estimations))]
def e_greedy(
steps: [Step], actions: [Action], value_estimator: ValueEstimator, e: float,
) -> int:
return (
random.sample(actions, 1)
if random.uniform(0, 1) < e
else greedy(steps, actions, value_estimator)
)
|
normal
|
{
"blob_id": "eab45dafd0366af8ab904eb33719b86777ba3d65",
"index": 2925,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef e_greedy(steps: [Step], actions: [Action], value_estimator:\n ValueEstimator, e: float) ->int:\n return random.sample(actions, 1) if random.uniform(0, 1) < e else greedy(\n steps, actions, value_estimator)\n",
"step-3": "<mask token>\n\n\ndef greedy(steps: [Step], actions: [Action], value_estimator: ValueEstimator\n ) ->int:\n estimations = [value_estimator(steps, action) for action in actions]\n return actions[estimations.index(max(estimations))]\n\n\ndef e_greedy(steps: [Step], actions: [Action], value_estimator:\n ValueEstimator, e: float) ->int:\n return random.sample(actions, 1) if random.uniform(0, 1) < e else greedy(\n steps, actions, value_estimator)\n",
"step-4": "import random\nfrom .action import Action\nfrom ..transition.step import Step\nfrom ..value.estimators import ValueEstimator\n\n\ndef greedy(steps: [Step], actions: [Action], value_estimator: ValueEstimator\n ) ->int:\n estimations = [value_estimator(steps, action) for action in actions]\n return actions[estimations.index(max(estimations))]\n\n\ndef e_greedy(steps: [Step], actions: [Action], value_estimator:\n ValueEstimator, e: float) ->int:\n return random.sample(actions, 1) if random.uniform(0, 1) < e else greedy(\n steps, actions, value_estimator)\n",
"step-5": "import random\n\nfrom .action import Action\nfrom ..transition.step import Step\nfrom ..value.estimators import ValueEstimator\n\n\ndef greedy(steps: [Step], actions: [Action], value_estimator: ValueEstimator) -> int:\n estimations = [value_estimator(steps, action) for action in actions]\n return actions[estimations.index(max(estimations))]\n\n\ndef e_greedy(\n steps: [Step], actions: [Action], value_estimator: ValueEstimator, e: float,\n) -> int:\n return (\n random.sample(actions, 1)\n if random.uniform(0, 1) < e\n else greedy(steps, actions, value_estimator)\n )\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from sys import exit
def hard():
print("Nice! Let's try something harder")
print("Could you calculate this for me?")
print("4 * 35 + 18 / 2 = ")
aws = input(">")
while True:
if aws == "176":
print("Nice, you correctly answer all the questions")
exit(0)
else:
print("Ummm not quite right, let's try something easier")
easy()
def easy():
print("Ok, seems like you are not good at math.")
print("What about this.")
print("Say you have 10 apples, your Mom gave you another 2.")
print("How many apples you have now?")
choice = input("> ")
if choice == "12":
print("You did a good job!")
exit(0)
else:
print("Oh well, it's not end of the world if you did badly in math")
exit(0)
def start():
print("Let's do some math")
print("How old are you?")
choice = input("> ")
age = int(choice) + 20
print(f"So after 20 years, you'll be {age}, right? (y/n)")
choice = input("> ")
while True:
if "y" in choice:
hard()
elif "n" in choice:
easy()
else:
print("I don't know what that mean")
start()
|
normal
|
{
"blob_id": "5d05351cd6cd6c0d216e8bc09308532605bfd26e",
"index": 3007,
"step-1": "<mask token>\n\n\ndef easy():\n print('Ok, seems like you are not good at math.')\n print('What about this.')\n print('Say you have 10 apples, your Mom gave you another 2.')\n print('How many apples you have now?')\n choice = input('> ')\n if choice == '12':\n print('You did a good job!')\n exit(0)\n else:\n print(\"Oh well, it's not end of the world if you did badly in math\")\n exit(0)\n\n\ndef start():\n print(\"Let's do some math\")\n print('How old are you?')\n choice = input('> ')\n age = int(choice) + 20\n print(f\"So after 20 years, you'll be {age}, right? (y/n)\")\n choice = input('> ')\n while True:\n if 'y' in choice:\n hard()\n elif 'n' in choice:\n easy()\n else:\n print(\"I don't know what that mean\")\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef hard():\n print(\"Nice! Let's try something harder\")\n print('Could you calculate this for me?')\n print('4 * 35 + 18 / 2 = ')\n aws = input('>')\n while True:\n if aws == '176':\n print('Nice, you correctly answer all the questions')\n exit(0)\n else:\n print(\"Ummm not quite right, let's try something easier\")\n easy()\n\n\ndef easy():\n print('Ok, seems like you are not good at math.')\n print('What about this.')\n print('Say you have 10 apples, your Mom gave you another 2.')\n print('How many apples you have now?')\n choice = input('> ')\n if choice == '12':\n print('You did a good job!')\n exit(0)\n else:\n print(\"Oh well, it's not end of the world if you did badly in math\")\n exit(0)\n\n\ndef start():\n print(\"Let's do some math\")\n print('How old are you?')\n choice = input('> ')\n age = int(choice) + 20\n print(f\"So after 20 years, you'll be {age}, right? (y/n)\")\n choice = input('> ')\n while True:\n if 'y' in choice:\n hard()\n elif 'n' in choice:\n easy()\n else:\n print(\"I don't know what that mean\")\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef hard():\n print(\"Nice! Let's try something harder\")\n print('Could you calculate this for me?')\n print('4 * 35 + 18 / 2 = ')\n aws = input('>')\n while True:\n if aws == '176':\n print('Nice, you correctly answer all the questions')\n exit(0)\n else:\n print(\"Ummm not quite right, let's try something easier\")\n easy()\n\n\ndef easy():\n print('Ok, seems like you are not good at math.')\n print('What about this.')\n print('Say you have 10 apples, your Mom gave you another 2.')\n print('How many apples you have now?')\n choice = input('> ')\n if choice == '12':\n print('You did a good job!')\n exit(0)\n else:\n print(\"Oh well, it's not end of the world if you did badly in math\")\n exit(0)\n\n\ndef start():\n print(\"Let's do some math\")\n print('How old are you?')\n choice = input('> ')\n age = int(choice) + 20\n print(f\"So after 20 years, you'll be {age}, right? (y/n)\")\n choice = input('> ')\n while True:\n if 'y' in choice:\n hard()\n elif 'n' in choice:\n easy()\n else:\n print(\"I don't know what that mean\")\n\n\nstart()\n",
"step-4": "from sys import exit\n\n\ndef hard():\n print(\"Nice! Let's try something harder\")\n print('Could you calculate this for me?')\n print('4 * 35 + 18 / 2 = ')\n aws = input('>')\n while True:\n if aws == '176':\n print('Nice, you correctly answer all the questions')\n exit(0)\n else:\n print(\"Ummm not quite right, let's try something easier\")\n easy()\n\n\ndef easy():\n print('Ok, seems like you are not good at math.')\n print('What about this.')\n print('Say you have 10 apples, your Mom gave you another 2.')\n print('How many apples you have now?')\n choice = input('> ')\n if choice == '12':\n print('You did a good job!')\n exit(0)\n else:\n print(\"Oh well, it's not end of the world if you did badly in math\")\n exit(0)\n\n\ndef start():\n print(\"Let's do some math\")\n print('How old are you?')\n choice = input('> ')\n age = int(choice) + 20\n print(f\"So after 20 years, you'll be {age}, right? (y/n)\")\n choice = input('> ')\n while True:\n if 'y' in choice:\n hard()\n elif 'n' in choice:\n easy()\n else:\n print(\"I don't know what that mean\")\n\n\nstart()\n",
"step-5": "from sys import exit\n\n\ndef hard():\n print(\"Nice! Let's try something harder\")\n print(\"Could you calculate this for me?\")\n print(\"4 * 35 + 18 / 2 = \")\n\n aws = input(\">\")\n\n while True:\n if aws == \"176\":\n print(\"Nice, you correctly answer all the questions\")\n exit(0)\n else:\n print(\"Ummm not quite right, let's try something easier\")\n easy()\n\n\ndef easy():\n print(\"Ok, seems like you are not good at math.\")\n print(\"What about this.\")\n print(\"Say you have 10 apples, your Mom gave you another 2.\")\n print(\"How many apples you have now?\")\n\n choice = input(\"> \")\n\n if choice == \"12\":\n print(\"You did a good job!\")\n exit(0)\n else:\n print(\"Oh well, it's not end of the world if you did badly in math\")\n exit(0)\n\n\ndef start():\n print(\"Let's do some math\")\n print(\"How old are you?\")\n\n choice = input(\"> \")\n age = int(choice) + 20\n\n print(f\"So after 20 years, you'll be {age}, right? (y/n)\")\n\n choice = input(\"> \")\n\n while True:\n if \"y\" in choice:\n hard()\n elif \"n\" in choice:\n easy()\n else:\n print(\"I don't know what that mean\")\n\n\nstart()\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
def word_freq_improved_summarize(text):
sen = text.split('.')
small = [s.lower() for s in sen]
punc_free = []
for p in small:
punc_free.extend(token.tokenize(p))
stop_words = set(stopwords.words('english'))
words = []
for x in punc_free:
if x not in stop_words:
words.append(x)
wgt = {}
for x in words:
wgt[x] = words.count(x)
max_freq = max(wgt.values())
for x in wgt.keys():
wgt[x] = wgt[x] / max_freq
order = {}
avg = len(sen) / 2
for i in range(len(sen)):
sum = 0
wrd = sen[i].split()
for w in wrd:
current = str(token.tokenize(w))[2:-2].lower()
if current in wgt:
sum += wgt[current]
order[sen[i]] = sum * (1 + 0.1 * abs(avg - i) / avg)
sorted_sen = dict(sorted(order.items(), key=lambda x: x[1], reverse=True))
final_summary = ''
while True and len(sorted_sen) > 0:
summ = max(sorted_sen, key=lambda x: sorted_sen[x])
if len(final_summary) + len(summ) < 240:
final_summary += summ
del sorted_sen[summ]
elif len(final_summary) < 1:
del sorted_sen[summ]
continue
else:
break
return final_summary
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def word_freq_improved_summarize(text):
sen = text.split('.')
small = [s.lower() for s in sen]
punc_free = []
for p in small:
punc_free.extend(token.tokenize(p))
stop_words = set(stopwords.words('english'))
words = []
for x in punc_free:
if x not in stop_words:
words.append(x)
wgt = {}
for x in words:
wgt[x] = words.count(x)
max_freq = max(wgt.values())
for x in wgt.keys():
wgt[x] = wgt[x] / max_freq
order = {}
avg = len(sen) / 2
for i in range(len(sen)):
sum = 0
wrd = sen[i].split()
for w in wrd:
current = str(token.tokenize(w))[2:-2].lower()
if current in wgt:
sum += wgt[current]
order[sen[i]] = sum * (1 + 0.1 * abs(avg - i) / avg)
sorted_sen = dict(sorted(order.items(), key=lambda x: x[1], reverse=True))
final_summary = ''
while True and len(sorted_sen) > 0:
summ = max(sorted_sen, key=lambda x: sorted_sen[x])
if len(final_summary) + len(summ) < 240:
final_summary += summ
del sorted_sen[summ]
elif len(final_summary) < 1:
del sorted_sen[summ]
continue
else:
break
return final_summary
if __name__ == '__main__':
with open('./passages/harmonyos.txt', 'r+', encoding='utf-8') as file:
text = file.read()
word_freq_improved_summarize(text)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
token = RegexpTokenizer('\\w+')
<|reserved_special_token_0|>
def word_freq_improved_summarize(text):
sen = text.split('.')
small = [s.lower() for s in sen]
punc_free = []
for p in small:
punc_free.extend(token.tokenize(p))
stop_words = set(stopwords.words('english'))
words = []
for x in punc_free:
if x not in stop_words:
words.append(x)
wgt = {}
for x in words:
wgt[x] = words.count(x)
max_freq = max(wgt.values())
for x in wgt.keys():
wgt[x] = wgt[x] / max_freq
order = {}
avg = len(sen) / 2
for i in range(len(sen)):
sum = 0
wrd = sen[i].split()
for w in wrd:
current = str(token.tokenize(w))[2:-2].lower()
if current in wgt:
sum += wgt[current]
order[sen[i]] = sum * (1 + 0.1 * abs(avg - i) / avg)
sorted_sen = dict(sorted(order.items(), key=lambda x: x[1], reverse=True))
final_summary = ''
while True and len(sorted_sen) > 0:
summ = max(sorted_sen, key=lambda x: sorted_sen[x])
if len(final_summary) + len(summ) < 240:
final_summary += summ
del sorted_sen[summ]
elif len(final_summary) < 1:
del sorted_sen[summ]
continue
else:
break
return final_summary
if __name__ == '__main__':
with open('./passages/harmonyos.txt', 'r+', encoding='utf-8') as file:
text = file.read()
word_freq_improved_summarize(text)
<|reserved_special_token_1|>
from nltk.tokenize import RegexpTokenizer
token = RegexpTokenizer('\\w+')
from nltk.corpus import stopwords
def word_freq_improved_summarize(text):
sen = text.split('.')
small = [s.lower() for s in sen]
punc_free = []
for p in small:
punc_free.extend(token.tokenize(p))
stop_words = set(stopwords.words('english'))
words = []
for x in punc_free:
if x not in stop_words:
words.append(x)
wgt = {}
for x in words:
wgt[x] = words.count(x)
max_freq = max(wgt.values())
for x in wgt.keys():
wgt[x] = wgt[x] / max_freq
order = {}
avg = len(sen) / 2
for i in range(len(sen)):
sum = 0
wrd = sen[i].split()
for w in wrd:
current = str(token.tokenize(w))[2:-2].lower()
if current in wgt:
sum += wgt[current]
order[sen[i]] = sum * (1 + 0.1 * abs(avg - i) / avg)
sorted_sen = dict(sorted(order.items(), key=lambda x: x[1], reverse=True))
final_summary = ''
while True and len(sorted_sen) > 0:
summ = max(sorted_sen, key=lambda x: sorted_sen[x])
if len(final_summary) + len(summ) < 240:
final_summary += summ
del sorted_sen[summ]
elif len(final_summary) < 1:
del sorted_sen[summ]
continue
else:
break
return final_summary
if __name__ == '__main__':
with open('./passages/harmonyos.txt', 'r+', encoding='utf-8') as file:
text = file.read()
word_freq_improved_summarize(text)
<|reserved_special_token_1|>
from nltk.tokenize import RegexpTokenizer
token = RegexpTokenizer(r'\w+')
from nltk.corpus import stopwords
# with open('microsoft.txt','r+',encoding="utf-8") as file:
# text = file.read()
# text = '''
# Huawei Technologies founder and CEO Ren Zhengfei said on Thursday the Chinese company is willing to license its Ren told reporters he was not afraid of creating a rival by making Huawei's technology available to competitors, and the offer could also include chip design know-how.Huawei, the world's largest telecoms gear maker, has been on a US trade blacklist since May over concerns that its equipment could be used by Beijing to spy. Huawei has repeatedly denied such allegations.The sanctions cut off Huawei's access to essential US technologies. The latest version of its Mate 30 flagship phone, unveiled last week in Europe, will not come with Google Mobile Services.Ren's remarks come after he said this month that he is open to selling the firm's 5G technology - including patents, code, blueprints and production know-how - to Western firms for a one-off fee.The offer to license out 5G technology marks the latest attempt by Huawei, also the world's No.2 smartphone vendor, to minimise the impact of the US trade ban. It expects a drop of some $10bn in revenue from its phone business this year.
# '''
def word_freq_improved_summarize(text):
sen = text.split('.')
#normalise
small = [s.lower() for s in sen]
#remove punctuation
punc_free = []
for p in small: punc_free.extend(token.tokenize(p))
#remove stopwords
stop_words = set(stopwords.words('english'))
words = []
for x in punc_free:
if x not in stop_words: words.append(x)
#weighted frequency
wgt = {}
for x in words: wgt[x] = words.count(x)
max_freq = max(wgt.values())
for x in wgt.keys(): wgt[x] = wgt[x]/max_freq
#replace with weighted_frequency
order = {}
avg = len(sen)/2
for i in range(len(sen)):
sum = 0
wrd = sen[i].split()
for w in wrd:
current = (str(token.tokenize(w))[2:-2]).lower()
if current in wgt:
sum += wgt[current]
order[sen[i]] = sum*(1+0.1*abs(avg-i)/avg)
sorted_sen = dict(sorted(order.items(), key = lambda x:x[1], reverse=True))
# print('\n1.Text\n',text)
# print('\n2.List of Sentences\n',sen)
# print('\n3.List of sentences in small case\n',small)
# print('\n4.Removing punctuation\n',punc_free)
# print('\n5.Removing stop words\n',words)
# print('\n6.Word frequency\n',wgt)
# print('\n7.Sentences with sum of frequency of their words\n',order)
# print('\n8.Sorted sentences\n',sorted_sen)
# print('\n9.Final Summary:')
final_summary = ""
while True and len(sorted_sen)>0:
summ = max(sorted_sen, key=lambda x:sorted_sen[x])
if (len(final_summary)+len(summ))<240:
final_summary += summ
del sorted_sen[summ]
else:
if len(final_summary)<1:
del sorted_sen[summ]
continue
else:
break
return final_summary
if __name__ == "__main__":
with open('./passages/harmonyos.txt','r+',encoding="utf-8") as file:
text = file.read()
word_freq_improved_summarize(text)
|
flexible
|
{
"blob_id": "aed6e1966d9e4ce7250ae3cacaf8854cab4b590c",
"index": 3513,
"step-1": "<mask token>\n\n\ndef word_freq_improved_summarize(text):\n sen = text.split('.')\n small = [s.lower() for s in sen]\n punc_free = []\n for p in small:\n punc_free.extend(token.tokenize(p))\n stop_words = set(stopwords.words('english'))\n words = []\n for x in punc_free:\n if x not in stop_words:\n words.append(x)\n wgt = {}\n for x in words:\n wgt[x] = words.count(x)\n max_freq = max(wgt.values())\n for x in wgt.keys():\n wgt[x] = wgt[x] / max_freq\n order = {}\n avg = len(sen) / 2\n for i in range(len(sen)):\n sum = 0\n wrd = sen[i].split()\n for w in wrd:\n current = str(token.tokenize(w))[2:-2].lower()\n if current in wgt:\n sum += wgt[current]\n order[sen[i]] = sum * (1 + 0.1 * abs(avg - i) / avg)\n sorted_sen = dict(sorted(order.items(), key=lambda x: x[1], reverse=True))\n final_summary = ''\n while True and len(sorted_sen) > 0:\n summ = max(sorted_sen, key=lambda x: sorted_sen[x])\n if len(final_summary) + len(summ) < 240:\n final_summary += summ\n del sorted_sen[summ]\n elif len(final_summary) < 1:\n del sorted_sen[summ]\n continue\n else:\n break\n return final_summary\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef word_freq_improved_summarize(text):\n sen = text.split('.')\n small = [s.lower() for s in sen]\n punc_free = []\n for p in small:\n punc_free.extend(token.tokenize(p))\n stop_words = set(stopwords.words('english'))\n words = []\n for x in punc_free:\n if x not in stop_words:\n words.append(x)\n wgt = {}\n for x in words:\n wgt[x] = words.count(x)\n max_freq = max(wgt.values())\n for x in wgt.keys():\n wgt[x] = wgt[x] / max_freq\n order = {}\n avg = len(sen) / 2\n for i in range(len(sen)):\n sum = 0\n wrd = sen[i].split()\n for w in wrd:\n current = str(token.tokenize(w))[2:-2].lower()\n if current in wgt:\n sum += wgt[current]\n order[sen[i]] = sum * (1 + 0.1 * abs(avg - i) / avg)\n sorted_sen = dict(sorted(order.items(), key=lambda x: x[1], reverse=True))\n final_summary = ''\n while True and len(sorted_sen) > 0:\n summ = max(sorted_sen, key=lambda x: sorted_sen[x])\n if len(final_summary) + len(summ) < 240:\n final_summary += summ\n del sorted_sen[summ]\n elif len(final_summary) < 1:\n del sorted_sen[summ]\n continue\n else:\n break\n return final_summary\n\n\nif __name__ == '__main__':\n with open('./passages/harmonyos.txt', 'r+', encoding='utf-8') as file:\n text = file.read()\n word_freq_improved_summarize(text)\n",
"step-3": "<mask token>\ntoken = RegexpTokenizer('\\\\w+')\n<mask token>\n\n\ndef word_freq_improved_summarize(text):\n sen = text.split('.')\n small = [s.lower() for s in sen]\n punc_free = []\n for p in small:\n punc_free.extend(token.tokenize(p))\n stop_words = set(stopwords.words('english'))\n words = []\n for x in punc_free:\n if x not in stop_words:\n words.append(x)\n wgt = {}\n for x in words:\n wgt[x] = words.count(x)\n max_freq = max(wgt.values())\n for x in wgt.keys():\n wgt[x] = wgt[x] / max_freq\n order = {}\n avg = len(sen) / 2\n for i in range(len(sen)):\n sum = 0\n wrd = sen[i].split()\n for w in wrd:\n current = str(token.tokenize(w))[2:-2].lower()\n if current in wgt:\n sum += wgt[current]\n order[sen[i]] = sum * (1 + 0.1 * abs(avg - i) / avg)\n sorted_sen = dict(sorted(order.items(), key=lambda x: x[1], reverse=True))\n final_summary = ''\n while True and len(sorted_sen) > 0:\n summ = max(sorted_sen, key=lambda x: sorted_sen[x])\n if len(final_summary) + len(summ) < 240:\n final_summary += summ\n del sorted_sen[summ]\n elif len(final_summary) < 1:\n del sorted_sen[summ]\n continue\n else:\n break\n return final_summary\n\n\nif __name__ == '__main__':\n with open('./passages/harmonyos.txt', 'r+', encoding='utf-8') as file:\n text = file.read()\n word_freq_improved_summarize(text)\n",
"step-4": "from nltk.tokenize import RegexpTokenizer\ntoken = RegexpTokenizer('\\\\w+')\nfrom nltk.corpus import stopwords\n\n\ndef word_freq_improved_summarize(text):\n sen = text.split('.')\n small = [s.lower() for s in sen]\n punc_free = []\n for p in small:\n punc_free.extend(token.tokenize(p))\n stop_words = set(stopwords.words('english'))\n words = []\n for x in punc_free:\n if x not in stop_words:\n words.append(x)\n wgt = {}\n for x in words:\n wgt[x] = words.count(x)\n max_freq = max(wgt.values())\n for x in wgt.keys():\n wgt[x] = wgt[x] / max_freq\n order = {}\n avg = len(sen) / 2\n for i in range(len(sen)):\n sum = 0\n wrd = sen[i].split()\n for w in wrd:\n current = str(token.tokenize(w))[2:-2].lower()\n if current in wgt:\n sum += wgt[current]\n order[sen[i]] = sum * (1 + 0.1 * abs(avg - i) / avg)\n sorted_sen = dict(sorted(order.items(), key=lambda x: x[1], reverse=True))\n final_summary = ''\n while True and len(sorted_sen) > 0:\n summ = max(sorted_sen, key=lambda x: sorted_sen[x])\n if len(final_summary) + len(summ) < 240:\n final_summary += summ\n del sorted_sen[summ]\n elif len(final_summary) < 1:\n del sorted_sen[summ]\n continue\n else:\n break\n return final_summary\n\n\nif __name__ == '__main__':\n with open('./passages/harmonyos.txt', 'r+', encoding='utf-8') as file:\n text = file.read()\n word_freq_improved_summarize(text)\n",
"step-5": "from nltk.tokenize import RegexpTokenizer\ntoken = RegexpTokenizer(r'\\w+')\nfrom nltk.corpus import stopwords\n\n# with open('microsoft.txt','r+',encoding=\"utf-8\") as file:\n# text = file.read()\n# text = '''\n# Huawei Technologies founder and CEO Ren Zhengfei said on Thursday the Chinese company is willing to license its Ren told reporters he was not afraid of creating a rival by making Huawei's technology available to competitors, and the offer could also include chip design know-how.Huawei, the world's largest telecoms gear maker, has been on a US trade blacklist since May over concerns that its equipment could be used by Beijing to spy. Huawei has repeatedly denied such allegations.The sanctions cut off Huawei's access to essential US technologies. The latest version of its Mate 30 flagship phone, unveiled last week in Europe, will not come with Google Mobile Services.Ren's remarks come after he said this month that he is open to selling the firm's 5G technology - including patents, code, blueprints and production know-how - to Western firms for a one-off fee.The offer to license out 5G technology marks the latest attempt by Huawei, also the world's No.2 smartphone vendor, to minimise the impact of the US trade ban. It expects a drop of some $10bn in revenue from its phone business this year.\n# '''\n\ndef word_freq_improved_summarize(text):\n sen = text.split('.')\n #normalise\n small = [s.lower() for s in sen]\n #remove punctuation\n punc_free = []\n for p in small: punc_free.extend(token.tokenize(p))\n #remove stopwords\n stop_words = set(stopwords.words('english'))\n words = []\n for x in punc_free:\n if x not in stop_words: words.append(x)\n #weighted frequency\n wgt = {}\n for x in words: wgt[x] = words.count(x)\n max_freq = max(wgt.values())\n for x in wgt.keys(): wgt[x] = wgt[x]/max_freq\n #replace with weighted_frequency\n order = {}\n avg = len(sen)/2\n for i in range(len(sen)):\n sum = 0\n wrd = sen[i].split()\n for w in wrd:\n current = (str(token.tokenize(w))[2:-2]).lower()\n if current in wgt:\n sum += wgt[current]\n order[sen[i]] = sum*(1+0.1*abs(avg-i)/avg)\n sorted_sen = dict(sorted(order.items(), key = lambda x:x[1], reverse=True))\n # print('\\n1.Text\\n',text)\n # print('\\n2.List of Sentences\\n',sen)\n # print('\\n3.List of sentences in small case\\n',small)\n # print('\\n4.Removing punctuation\\n',punc_free)\n # print('\\n5.Removing stop words\\n',words)\n # print('\\n6.Word frequency\\n',wgt)\n # print('\\n7.Sentences with sum of frequency of their words\\n',order)\n # print('\\n8.Sorted sentences\\n',sorted_sen)\n # print('\\n9.Final Summary:')\n final_summary = \"\"\n while True and len(sorted_sen)>0:\n summ = max(sorted_sen, key=lambda x:sorted_sen[x])\n if (len(final_summary)+len(summ))<240:\n final_summary += summ\n del sorted_sen[summ]\n else:\n if len(final_summary)<1:\n del sorted_sen[summ]\n continue\n else:\n break\n\n return final_summary\n\nif __name__ == \"__main__\":\n with open('./passages/harmonyos.txt','r+',encoding=\"utf-8\") as file:\n text = file.read()\n word_freq_improved_summarize(text)\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
def draw_text(text, font_u, color, surface, x, y):
text_object = font_u.render(text, color)
textrect = text_object[1]
textrect.topleft = x, y
surface.blit(text_object[0], textrect)
<|reserved_special_token_0|>
def draw_controls():
pygame.draw.rect(screen, (255, 255, 255), (0, 420, 600, 380), 4)
pygame.draw.rect(screen, (0, 0, 0, 1), (3, 422, 595, 376))
draw_text('controls:', font, (255, 255, 255), screen, 20, 430)
wasd = pygame.image.load('resources/sprites/controls_1.png')
wasd = pygame.transform.scale(wasd, (243, 100))
screen.blit(wasd, (20, 470))
pygame.draw.rect(screen, (255, 255, 255), (20, 646, 130, 25))
draw_text('SPACE', font_space, (0, 0, 0), screen, 50, 651)
draw_text(' - movement', font, (255, 255, 255), screen, 270, 522)
mouse = pygame.image.load('resources/sprites/controls_2.png')
mouse = pygame.transform.scale(mouse, (90, 100))
screen.blit(mouse, (153, 590))
draw_text(' - shoot', font, (255, 255, 255), screen, 270, 640)
<|reserved_special_token_0|>
def main_menu():
click = False
pygame.mixer.stop()
while True:
mx, my = pygame.mouse.get_pos()
screen.fill((0, 0, 0))
pygame.draw.rect(screen, (0, 0, 0), (52, 10, 508, 50))
pygame.draw.rect(screen, (255, 255, 255), (52, 10, 508, 50), 3)
draw_text('Death or Dishonour', font, (255, 255, 255), screen, 85, 20)
button_play = pygame.image.load('resources/sprites/button.png')
button_play = pygame.transform.scale(button_play, (222, 105))
b_play_mask = button_play.get_rect()
b_play_mask.x = 50
b_play_mask.y = 70
screen.blit(button_play, (b_play_mask.x, b_play_mask.y))
draw_text('play', font, (255, 255, 255), screen, 113, 100)
button_options = pygame.image.load('resources/sprites/button.png')
button_options = pygame.transform.scale(button_options, (222, 105))
b_options_mask = button_options.get_rect()
b_options_mask.x = 50
b_options_mask.y = 185
screen.blit(button_options, (b_options_mask.x, b_options_mask.y))
draw_text('options', font, (255, 255, 255), screen, 78, 215)
button_exit = pygame.image.load('resources/sprites/button.png')
button_exit = pygame.transform.scale(button_exit, (222, 105))
b_exit_mask = button_exit.get_rect()
b_exit_mask.x = 50
b_exit_mask.y = 300
screen.blit(button_exit, (b_exit_mask.x, b_exit_mask.y))
draw_text('quit', font, (255, 255, 255), screen, 113, 330)
draw_controls()
draw_leaderboard()
if b_play_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (50, 70, 222, 105), 5)
if click:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
game_screen()
if b_options_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (50, 185, 222, 105), 5)
if click:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
options_menu()
if b_exit_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (50, 300, 222, 105), 5)
if click:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
pygame.quit()
sys.exit()
click = False
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
pygame.quit()
sys.exit()
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1:
click = True
pygame.display.update()
clock.tick(10)
def options_menu():
global player_name, line_counter, is_sound
running = True
click = False
numlock = False
while running:
mx, my = pygame.mouse.get_pos()
screen.fill((0, 0, 0))
pygame.draw.rect(screen, (0, 0, 0), (52, 10, 508, 50))
pygame.draw.rect(screen, (255, 255, 255), (52, 10, 508, 50), 3)
draw_text('Options', font, (255, 255, 255), screen, 215, 20)
button_1 = pygame.image.load('resources/sprites/button.png')
button_1 = pygame.transform.scale(button_1, (222, 105))
b_1_mask = button_1.get_rect()
b_1_mask.x = 50
b_1_mask.y = 70
screen.blit(button_1, (b_1_mask.x, b_1_mask.y))
draw_text(player_name, font, (255, 255, 255), screen, 125, 100)
button_2 = pygame.image.load('resources/sprites/button.png')
button_2 = pygame.transform.scale(button_2, (222, 105))
b_2_mask = button_2.get_rect()
b_2_mask.x = 50
b_2_mask.y = 185
screen.blit(button_2, (b_2_mask.x, b_2_mask.y))
button_back = pygame.image.load('resources/sprites/button.png')
button_back = pygame.transform.scale(button_back, (222, 105))
b_back_mask = button_back.get_rect()
b_back_mask.x = 50
b_back_mask.y = 300
screen.blit(button_back, (b_back_mask.x, b_back_mask.y))
draw_text('back', font, (255, 255, 255), screen, 113, 330)
draw_controls()
draw_text('audio:', font, (255, 255, 255), screen, 60, 195)
if is_sound:
draw_text('on', font, (255, 255, 255), screen, 190, 245)
else:
draw_text('off', font, (255, 255, 255), screen, 175, 230)
if line_counter == 0 or player_name == 'NON':
draw_text('ENTER', font, (255, 0, 0), screen, 280, 90)
draw_text('NICKNAME', font, (255, 0, 0), screen, 280, 120)
if numlock:
draw_text('OFF', font, (255, 0, 0), screen, 500, 90)
draw_text('NUM', font, (255, 0, 0), screen, 500, 120)
draw_text('LOCK', font, (255, 0, 0), screen, 500, 150)
if b_2_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (50, 185, 222, 105), 5)
if click:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
if is_sound:
is_sound = not is_sound
pygame.mixer.pause()
else:
is_sound = not is_sound
pygame.mixer.unpause()
if b_back_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (50, 300, 222, 105), 5)
if click:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
running = False
click = False
for event in pygame.event.get():
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1:
click = True
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_BACKSPACE:
player_name = player_name[:-1]
if line_counter != 0:
line_counter -= 1
elif player_name == 'NON':
pass
elif event.key == pygame.K_SPACE:
pass
elif event.key == pygame.K_UP:
pass
elif event.key == pygame.K_DOWN:
pass
elif event.key == pygame.K_LEFT:
pass
elif event.key == pygame.K_RIGHT:
pass
elif event.key == pygame.K_RETURN:
pass
elif event.key == pygame.K_NUMLOCK:
numlock = True
elif event.key == pygame.K_ESCAPE:
running = False
elif event.mod == pygame.KMOD_NONE and event.key != pygame.K_TAB:
if line_counter != 3:
line_counter += 1
player_name += str(event.unicode).upper()
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
pygame.display.update()
clock.tick(10)
def game_screen():
global game_score, player_name, running_game, enemy_killed, bullets_shot, boss_done
game_score = 0
enemy_killed = 0
bullets_shot = 0
boss_done = False
if player_name == '':
player_name = 'NON'
track_count = 0
battle_tracks = ['resources/sounds/music/battle_music_1.mp3',
'resources/sounds/music/battle_music_2.mp3',
'resources/sounds/music/battle_music_3.mp3',
'resources/sounds/music/battle_music_4.mp3',
'resources/sounds/music/battle_music_5.mp3',
'resources/sounds/music/battle_music_6.mp3']
ingame_music = pygame.mixer.Sound(battle_tracks[track_count])
ingame_music.stop()
ingame_music_sound = 0.1
if not is_sound:
ingame_music_sound = 0
ingame_music.set_volume(ingame_music_sound)
ingame_music.play()
bs = False
running_game = True
pygame.time.set_timer(pygame.USEREVENT, 1000)
enemies = pygame.sprite.Group()
death = False
p = Player()
window_holes = pygame.sprite.Group()
bullets_count = pygame.sprite.Group()
boss_bullets_count = pygame.sprite.Group()
booms = pygame.sprite.Group()
small_booms = pygame.sprite.Group()
mini_booms = pygame.sprite.Group()
phase1_score = True
phase2_score = True
phase3_score = True
battle_music = True
phase4_score = True
col_check = 1
boss_death = False
level_bckgd_pos = -23800
current_player_sprite = 'stay'
current_level_background = pygame.image.load(
'resources/level_pictures/first_level_bckgd.jpg')
screen.blit(current_level_background, (0, 0))
wait = 0
last = pygame.time.get_ticks()
last_2 = pygame.time.get_ticks()
boss_cooldown = 1000
cooldown = 100
while running_game:
for event in pygame.event.get():
if event.type == pygame.KEYDOWN and event.key == pygame.K_EQUALS:
ingame_music.stop()
track_count += 1
if track_count > 5:
track_count = 0
ingame_music = pygame.mixer.Sound(battle_tracks[track_count])
ingame_music.set_volume(ingame_music_sound)
ingame_music.play()
if event.type == pygame.KEYDOWN and event.key == pygame.K_KP_PLUS:
ingame_music_sound += 0.05
if ingame_music_sound >= 1.5:
ingame_music_sound = 1.4
ingame_music.set_volume(ingame_music_sound)
if event.type == pygame.KEYDOWN and event.key == pygame.K_KP_MINUS:
ingame_music_sound -= 0.05
if ingame_music_sound < 0:
ingame_music_sound = 0
ingame_music.set_volume(ingame_music_sound)
if event.type == pygame.KEYDOWN and (event.key == pygame.K_a or
event.key == pygame.K_LEFT) and not p.moving_right:
current_player_sprite = 'left'
p.moving_right = False
p.moving_left = True
elif event.type == pygame.KEYDOWN and (event.key == pygame.K_d or
event.key == pygame.K_RIGHT) and not p.moving_left:
current_player_sprite = 'right'
p.moving_left = False
p.moving_right = True
if event.type == pygame.KEYUP and (event.key == pygame.K_a or
event.key == pygame.K_LEFT):
current_player_sprite = 'stay'
p.moving_right = False
p.moving_left = False
if event.type == pygame.KEYUP and (event.key == pygame.K_d or
event.key == pygame.K_RIGHT):
current_player_sprite = 'stay'
p.moving_right = False
p.moving_left = False
if event.type == pygame.KEYDOWN and (event.key == pygame.K_w or
event.key == pygame.K_UP) and not p.moving_down:
p.moving_down = False
p.moving_up = True
elif event.type == pygame.KEYDOWN and (event.key == pygame.K_s or
event.key == pygame.K_DOWN) and not p.moving_up:
p.moving_up = False
p.moving_down = True
if event.type == pygame.KEYUP and (event.key == pygame.K_w or
event.key == pygame.K_UP):
current_player_sprite = 'stay'
p.moving_down = False
p.moving_up = False
if event.type == pygame.KEYUP and (event.key == pygame.K_s or
event.key == pygame.K_DOWN):
current_player_sprite = 'stay'
p.moving_down = False
p.moving_up = False
if (event.type == pygame.MOUSEBUTTONDOWN and event.button == 1 and
p.health_count > 0):
now = pygame.time.get_ticks()
if now - last >= cooldown:
last = now
Bullets(bullets_count).shot((p.x + 21, p.y - 25))
Bullets(bullets_count).shot((p.x + 76, p.y - 25))
if is_sound:
play_sound('resources/sounds/shot_sound.mp3', 0.1)
Bullets.shooting = True
bullets_shot += 2
elif event.type == pygame.KEYDOWN and event.key == pygame.K_SPACE and p.health_count > 0:
now = pygame.time.get_ticks()
if now - last >= cooldown:
last = now
Bullets(bullets_count).shot((p.x + 21, p.y - 25))
Bullets(bullets_count).shot((p.x + 76, p.y - 25))
if is_sound:
play_sound('resources/sounds/shot_sound.mp3', 0.1)
Bullets.shooting = True
bullets_shot += 2
if (event.type == pygame.USEREVENT and level_bckgd_pos >= -8500 and
not bs):
bs = True
b = Boss()
if event.type == pygame.USEREVENT and level_bckgd_pos < -8500:
Enemy(enemies)
if (event.type == pygame.USEREVENT and death and pygame.time.
get_ticks() - wait > 2000 or level_bckgd_pos > -801):
ingame_music.stop()
death_screen()
while True:
if len(str(game_score)) < 6:
game_score = '0' + str(game_score)
else:
break
var = "INSERT INTO highest_score VALUES ('{}', '{}')".format(
player_name, game_score)
cur.execute(var)
con.commit()
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:
pause_screen()
if not running_game:
ingame_music.stop()
while True:
if len(str(game_score)) < 6:
game_score = '0' + str(game_score)
else:
break
var = ("INSERT INTO highest_score VALUES ('{}', '{}')".
format(player_name, game_score))
cur.execute(var)
con.commit()
level_bckgd_pos += speed_bckgd
if level_bckgd_pos >= 0:
screen.fill((0, 0, 0))
screen.blit(current_level_background, (0, level_bckgd_pos))
if level_bckgd_pos > -805:
death = True
if p.health_count > 0:
for i in enemies:
collision = pygame.sprite.collide_rect(p, i)
if collision:
Explosion(booms).boom((i.rect.x, i.rect.y))
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.1)
if i.health_count - 2 <= 0:
game_score += 10
i.kill()
Explosion(booms).boom((i.rect.x, i.rect.y))
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3',
0.1)
enemy_killed += 1
else:
i.health_count -= 2
if is_sound:
play_sound('resources/sounds/collision_sound.mp3',
0.03)
p.health_count -= 1
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.05
)
if p.health_count > 0:
Damage(window_holes).taking_damage((random.randint(
50, 550), random.randint(50, 750)))
if is_sound:
play_sound('resources/sounds/window_crashed.mp3', 1
)
play_sound('resources/sounds/explosion_stun.mp3',
0.02)
for j in bullets_count:
collision = pygame.sprite.collide_rect(j, i)
if collision:
if i.health_count - 1 <= 0:
game_score += 5
i.kill()
Explosion(booms).boom((i.rect.x, i.rect.y))
if is_sound:
play_sound(
'resources/sounds/explosion_sound.mp3', 0.1
)
enemy_killed += 1
else:
i.health_count -= 1
Miniexplosion(mini_booms).boom((j.rect.x, j.rect.y)
)
if is_sound:
play_sound(
'resources/sounds/explosion_sound.mp3', 0.1
)
if is_sound:
play_sound(
'resources/sounds/collision_sound.mp3',
0.03)
j.kill()
if bs and not boss_death:
collision = pygame.sprite.collide_rect(b, p)
if collision and b.y > 0:
b.health_count -= 0.3
if is_sound:
play_sound('resources/sounds/collision_sound.mp3', 0.03
)
p.health_count -= 0.2
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.05
)
if b.body == b.stay1 or b.body == b.stay2:
b.body = b.stay2
if b.body == b.stay3 or b.body == b.stay4:
b.body = b.stay4
if b.body == b.stay5 or b.body == b.stay6:
b.body = b.stay6
col_check += 1
if p.health_count > 0:
Damage(window_holes).taking_damage((random.randint(
50, 550), random.randint(50, 750)))
if is_sound:
play_sound('resources/sounds/window_crashed.mp3',
0.1)
play_sound('resources/sounds/explosion_stun.mp3',
0.02)
for j in bullets_count:
collision = pygame.sprite.collide_rect(b, j)
if collision and b.y > 0:
if b.body == b.stay1 or b.body == b.stay2:
b.body = b.stay2
if b.body == b.stay3 or b.body == b.stay4:
b.body = b.stay4
if b.body == b.stay5 or b.body == b.stay6:
b.body = b.stay6
col_check += 1
b.health_count -= 0.2
Miniexplosion(mini_booms).boom((j.rect.x, j.rect.y))
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3',
0.1)
if is_sound:
play_sound('resources/sounds/collision_sound.mp3',
0.03)
j.kill()
for h in boss_bullets_count:
collision = pygame.sprite.collide_rect(p, h)
if collision:
p.health_count -= 1
Miniexplosion(mini_booms).boom((h.rect.x, h.rect.y))
if p.health_count > 0:
Damage(window_holes).taking_damage((random.
randint(50, 550), random.randint(50, 750)))
if is_sound:
play_sound(
'resources/sounds/window_crashed.mp3', 0.1)
play_sound(
'resources/sounds/explosion_stun.mp3', 0.01
)
if is_sound:
play_sound('resources/sounds/collision_sound.mp3',
0.03)
h.kill()
p.update(FPS)
if current_player_sprite == 'left':
sprite = p.anim_left()
screen.blit(sprite, (p.x, p.y))
p.left_1 = not p.left_1
elif current_player_sprite == 'right':
sprite = p.anim_right()
screen.blit(sprite, (p.x, p.y))
p.right_1 = not p.right_1
elif current_player_sprite == 'stay':
sprite = p.anim_stay()
screen.blit(sprite, (p.x, p.y))
p.stay_1 = not p.stay_1
if bs:
if battle_music:
ingame_music.stop()
ingame_music = pygame.mixer.Sound(
'resources/sounds/music/wagner_main_theme.mp3')
ingame_music.set_volume(ingame_music_sound)
ingame_music.play()
battle_music = False
b.update()
if b.body == b.stay3 and phase1_score:
game_score += 100
phase1_score = False
if b.body == b.stay5 and phase2_score:
game_score += 100
phase2_score = False
if b.body == b.stay7 and phase3_score:
game_score += 200
phase3_score = False
now = pygame.time.get_ticks()
if (now - last_2 >= boss_cooldown and b.y > 0 and b.body !=
b.stay7):
last_2 = now
play_sound('resources/sounds/boss_shot.mp3', 0.05)
Bossbullets(boss_bullets_count).shot((b.x + 170, b.y + 155)
)
if col_check % 40 == 0:
b.change_sprite()
else:
col_check += 1
if b.health_count > 0:
screen.blit(b.body, (b.x, b.y))
elif b.health_count <= 0 and phase4_score:
boss_done = True
phase4_score = False
game_score += 350
if is_sound:
play_sound('resources/sounds/boss_defeated.mp3', 0.2)
Explosion(booms).boom((b.rect.x + 75, b.rect.y + 25))
Explosion(booms).boom((b.rect.x, b.rect.y))
Explosion(booms).boom((b.rect.x + 200, b.rect.y + 34))
Explosion(booms).boom((b.rect.x + 250, b.rect.y + 25))
Explosion(booms).boom((b.rect.x + 150, b.rect.y + 56))
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.1)
boss_death = True
else:
if p.minimize == 0:
if is_sound:
ingame_music.stop()
play_sound('resources/sounds/plane_crash.mp3', 0.05)
p.minimize += 1
if not death:
if p.minimize <= 320:
p.death()
screen.blit(p.death_sp, (p.x, p.y))
else:
death = True
wait = pygame.time.get_ticks()
Smallexplosions(small_booms).boom((p.rect.x + 3, p.rect
.y + 25))
Smallexplosions(small_booms).boom((p.rect.x, p.rect.y))
Smallexplosions(small_booms).boom((p.rect.x - 22, p.
rect.y + 7))
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.1)
p.kill()
if bs and b.health_count > 0:
b.speed += 0.02
b.win = True
screen.blit(b.body, (b.x, b.y))
b.update()
window_holes.update()
window_holes.draw(screen)
enemies.update(FPS)
enemies.draw(screen)
bullets_count.update()
bullets_count.draw(screen)
boss_bullets_count.update()
boss_bullets_count.draw(screen)
small_booms.update()
small_booms.draw(screen)
mini_booms.update()
mini_booms.draw(screen)
draw_text('Player: {}'.format(player_name), font, (255, 255, 255),
screen, 20, 20)
if len(str(game_score)) < 2:
draw_text('00000' + str(game_score), font, (255, 255, 255),
screen, 430, 20)
elif len(str(game_score)) < 3:
draw_text('0000' + str(game_score), font, (255, 255, 255),
screen, 430, 20)
elif len(str(game_score)) < 4:
draw_text('000' + str(game_score), font, (255, 255, 255),
screen, 430, 20)
elif len(str(game_score)) < 5:
draw_text('00' + str(game_score), font, (255, 255, 255), screen,
430, 20)
elif len(str(game_score)) < 6:
draw_text('0' + str(game_score), font, (255, 255, 255), screen,
430, 20)
elif len(str(game_score)) >= 6:
draw_text('Max score', font, (255, 255, 255), screen, 510, 20)
booms.update()
booms.draw(screen)
pygame.display.flip()
clock.tick(FPS)
def death_screen():
global running_game, game_score
running = True
click = False
draw_counter = 0
color_counter = 0
pygame.time.set_timer(pygame.USEREVENT, 1000)
rating_kills = enemy_killed // 10
if bullets_shot < 800:
rating_shots = 1
else:
rating_shots = 0
rating = rating_kills + rating_shots
if boss_done:
death_music = pygame.mixer.Sound(
'resources/sounds/music/victory_theme.mp3')
death_music.stop()
death_music_sound = 0.1
if not is_sound:
death_music_sound = 0
death_music.set_volume(death_music_sound)
death_music.play()
rating += 2
else:
death_music = pygame.mixer.Sound(
'resources/sounds/music/loose_theme.mp3')
death_music.stop()
death_music_sound = 0.1
if not is_sound:
death_music_sound = 0
death_music.set_volume(death_music_sound)
death_music.play()
while True:
if len(str(game_score)) < 6:
game_score = '0' + str(game_score)
else:
break
while running:
mx, my = pygame.mouse.get_pos()
screen.fill((0, 0, 0))
pygame.draw.rect(screen, (0, 0, 0), (50, 10, 500, 50))
pygame.draw.rect(screen, (255, 255, 255), (50, 10, 500, 50), 3)
draw_text('End of your way', font, (255, 255, 255), screen, 120, 15)
button_menu = pygame.image.load('resources/sprites/button.png')
button_menu = pygame.transform.scale(button_menu, (200, 70))
b_menu_mask = button_menu.get_rect()
b_menu_mask.x = 195
b_menu_mask.y = 700
screen.blit(button_menu, (b_menu_mask.x, b_menu_mask.y))
draw_text('menu', font, (255, 255, 255), screen, 245, 730)
if draw_counter >= 1:
draw_text('Player: {}'.format(player_name), font, (255, 255,
255), screen, 50, 150)
if draw_counter >= 2:
draw_text('Score: {}'.format(game_score), font, (255, 255, 255),
screen, 50, 230)
if draw_counter >= 3:
draw_text('Enemies killed: {}'.format(enemy_killed), font, (255,
255, 255), screen, 50, 310)
if draw_counter >= 4:
draw_text('Bullets fired: {}'.format(bullets_shot), font, (255,
255, 255), screen, 50, 390)
if draw_counter >= 5:
draw_text('Rating:', font, (255, 255, 255), screen, 50, 470)
if draw_counter >= 6:
if rating <= 6:
draw_text('F', font_rating, (100, 100, 100), screen, 300, 470)
elif rating == 7:
draw_text('D', font_rating, (29, 173, 23), screen, 300, 470)
elif rating == 8:
draw_text('C', font_rating, (20, 20, 255), screen, 300, 470)
elif rating == 9:
draw_text('B', font_rating, (200, 0, 255), screen, 300, 470)
elif rating == 10:
draw_text('A', font_rating, (255, 200, 0), screen, 300, 470)
elif rating == 11:
draw_text('S', font_rating, (255, 100, 0), screen, 300, 470)
elif rating <= 13:
draw_text('SS', font_rating, (255, 0, 0), screen, 300, 470)
elif color_counter == 0:
draw_text('SSS', font_rating, (255, 0, 0), screen, 300, 470)
elif color_counter == 1:
draw_text('SSS', font_rating, (0, 255, 0), screen, 300, 470)
else:
draw_text('SSS', font_rating, (0, 0, 255), screen, 300, 470)
if b_menu_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (195, 700, 200, 70), 4)
if click:
if is_sound:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
else:
pass
running = False
running_game = False
click = False
for event in pygame.event.get():
if event.type == pygame.USEREVENT:
draw_counter += 1
color_counter += 1
if color_counter == 3:
color_counter = 0
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:
running = False
running_game = False
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1:
click = True
pygame.display.update()
clock.tick(10)
death_music.stop()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def draw_text(text, font_u, color, surface, x, y):
text_object = font_u.render(text, color)
textrect = text_object[1]
textrect.topleft = x, y
surface.blit(text_object[0], textrect)
def play_sound(sound_p, volume_h=0.5, wait_t=0):
pl_sound = pygame.mixer.Sound(sound_p)
pl_sound.set_volume(volume_h)
if is_sound:
pl_sound.play()
pygame.time.wait(wait_t)
pygame.init()
<|reserved_special_token_0|>
pygame.display.set_icon(pygame.image.load(
'resources/images/test_small_logo_1.bmp'))
pygame.display.set_caption('Death or Dishonour')
<|reserved_special_token_0|>
def draw_controls():
pygame.draw.rect(screen, (255, 255, 255), (0, 420, 600, 380), 4)
pygame.draw.rect(screen, (0, 0, 0, 1), (3, 422, 595, 376))
draw_text('controls:', font, (255, 255, 255), screen, 20, 430)
wasd = pygame.image.load('resources/sprites/controls_1.png')
wasd = pygame.transform.scale(wasd, (243, 100))
screen.blit(wasd, (20, 470))
pygame.draw.rect(screen, (255, 255, 255), (20, 646, 130, 25))
draw_text('SPACE', font_space, (0, 0, 0), screen, 50, 651)
draw_text(' - movement', font, (255, 255, 255), screen, 270, 522)
mouse = pygame.image.load('resources/sprites/controls_2.png')
mouse = pygame.transform.scale(mouse, (90, 100))
screen.blit(mouse, (153, 590))
draw_text(' - shoot', font, (255, 255, 255), screen, 270, 640)
def draw_leaderboard():
table = []
result = cur.execute(
'SELECT * FROM highest_score ORDER BY score DESC LIMIT 7')
for elem in result:
table.append(elem)
pygame.draw.rect(screen, (0, 0, 0), (310, 70, 250, 335))
pygame.draw.rect(screen, (255, 255, 255), (310, 70, 250, 335), 3)
pygame.draw.line(screen, (255, 255, 255), (310, 124), (560, 124), 3)
pygame.draw.line(screen, (255, 255, 255), (435, 124), (435, 405), 3)
charge = 40
y = 124
for i in range(1, 8):
y += charge
pygame.draw.line(screen, (255, 255, 255), (310, y), (560, y), 3)
draw_text('leaderboard', font_table, (255, 255, 255), screen, 362, 80)
x = 350
y = 140
for i in table:
draw_text(str(i[0]), font_table, (255, 255, 255), screen, x, y)
draw_text(str(i[1]), font_table, (255, 255, 255), screen, x + 100, y)
y += charge
def main_menu():
click = False
pygame.mixer.stop()
while True:
mx, my = pygame.mouse.get_pos()
screen.fill((0, 0, 0))
pygame.draw.rect(screen, (0, 0, 0), (52, 10, 508, 50))
pygame.draw.rect(screen, (255, 255, 255), (52, 10, 508, 50), 3)
draw_text('Death or Dishonour', font, (255, 255, 255), screen, 85, 20)
button_play = pygame.image.load('resources/sprites/button.png')
button_play = pygame.transform.scale(button_play, (222, 105))
b_play_mask = button_play.get_rect()
b_play_mask.x = 50
b_play_mask.y = 70
screen.blit(button_play, (b_play_mask.x, b_play_mask.y))
draw_text('play', font, (255, 255, 255), screen, 113, 100)
button_options = pygame.image.load('resources/sprites/button.png')
button_options = pygame.transform.scale(button_options, (222, 105))
b_options_mask = button_options.get_rect()
b_options_mask.x = 50
b_options_mask.y = 185
screen.blit(button_options, (b_options_mask.x, b_options_mask.y))
draw_text('options', font, (255, 255, 255), screen, 78, 215)
button_exit = pygame.image.load('resources/sprites/button.png')
button_exit = pygame.transform.scale(button_exit, (222, 105))
b_exit_mask = button_exit.get_rect()
b_exit_mask.x = 50
b_exit_mask.y = 300
screen.blit(button_exit, (b_exit_mask.x, b_exit_mask.y))
draw_text('quit', font, (255, 255, 255), screen, 113, 330)
draw_controls()
draw_leaderboard()
if b_play_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (50, 70, 222, 105), 5)
if click:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
game_screen()
if b_options_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (50, 185, 222, 105), 5)
if click:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
options_menu()
if b_exit_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (50, 300, 222, 105), 5)
if click:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
pygame.quit()
sys.exit()
click = False
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
pygame.quit()
sys.exit()
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1:
click = True
pygame.display.update()
clock.tick(10)
def options_menu():
global player_name, line_counter, is_sound
running = True
click = False
numlock = False
while running:
mx, my = pygame.mouse.get_pos()
screen.fill((0, 0, 0))
pygame.draw.rect(screen, (0, 0, 0), (52, 10, 508, 50))
pygame.draw.rect(screen, (255, 255, 255), (52, 10, 508, 50), 3)
draw_text('Options', font, (255, 255, 255), screen, 215, 20)
button_1 = pygame.image.load('resources/sprites/button.png')
button_1 = pygame.transform.scale(button_1, (222, 105))
b_1_mask = button_1.get_rect()
b_1_mask.x = 50
b_1_mask.y = 70
screen.blit(button_1, (b_1_mask.x, b_1_mask.y))
draw_text(player_name, font, (255, 255, 255), screen, 125, 100)
button_2 = pygame.image.load('resources/sprites/button.png')
button_2 = pygame.transform.scale(button_2, (222, 105))
b_2_mask = button_2.get_rect()
b_2_mask.x = 50
b_2_mask.y = 185
screen.blit(button_2, (b_2_mask.x, b_2_mask.y))
button_back = pygame.image.load('resources/sprites/button.png')
button_back = pygame.transform.scale(button_back, (222, 105))
b_back_mask = button_back.get_rect()
b_back_mask.x = 50
b_back_mask.y = 300
screen.blit(button_back, (b_back_mask.x, b_back_mask.y))
draw_text('back', font, (255, 255, 255), screen, 113, 330)
draw_controls()
draw_text('audio:', font, (255, 255, 255), screen, 60, 195)
if is_sound:
draw_text('on', font, (255, 255, 255), screen, 190, 245)
else:
draw_text('off', font, (255, 255, 255), screen, 175, 230)
if line_counter == 0 or player_name == 'NON':
draw_text('ENTER', font, (255, 0, 0), screen, 280, 90)
draw_text('NICKNAME', font, (255, 0, 0), screen, 280, 120)
if numlock:
draw_text('OFF', font, (255, 0, 0), screen, 500, 90)
draw_text('NUM', font, (255, 0, 0), screen, 500, 120)
draw_text('LOCK', font, (255, 0, 0), screen, 500, 150)
if b_2_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (50, 185, 222, 105), 5)
if click:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
if is_sound:
is_sound = not is_sound
pygame.mixer.pause()
else:
is_sound = not is_sound
pygame.mixer.unpause()
if b_back_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (50, 300, 222, 105), 5)
if click:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
running = False
click = False
for event in pygame.event.get():
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1:
click = True
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_BACKSPACE:
player_name = player_name[:-1]
if line_counter != 0:
line_counter -= 1
elif player_name == 'NON':
pass
elif event.key == pygame.K_SPACE:
pass
elif event.key == pygame.K_UP:
pass
elif event.key == pygame.K_DOWN:
pass
elif event.key == pygame.K_LEFT:
pass
elif event.key == pygame.K_RIGHT:
pass
elif event.key == pygame.K_RETURN:
pass
elif event.key == pygame.K_NUMLOCK:
numlock = True
elif event.key == pygame.K_ESCAPE:
running = False
elif event.mod == pygame.KMOD_NONE and event.key != pygame.K_TAB:
if line_counter != 3:
line_counter += 1
player_name += str(event.unicode).upper()
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
pygame.display.update()
clock.tick(10)
def game_screen():
global game_score, player_name, running_game, enemy_killed, bullets_shot, boss_done
game_score = 0
enemy_killed = 0
bullets_shot = 0
boss_done = False
if player_name == '':
player_name = 'NON'
track_count = 0
battle_tracks = ['resources/sounds/music/battle_music_1.mp3',
'resources/sounds/music/battle_music_2.mp3',
'resources/sounds/music/battle_music_3.mp3',
'resources/sounds/music/battle_music_4.mp3',
'resources/sounds/music/battle_music_5.mp3',
'resources/sounds/music/battle_music_6.mp3']
ingame_music = pygame.mixer.Sound(battle_tracks[track_count])
ingame_music.stop()
ingame_music_sound = 0.1
if not is_sound:
ingame_music_sound = 0
ingame_music.set_volume(ingame_music_sound)
ingame_music.play()
bs = False
running_game = True
pygame.time.set_timer(pygame.USEREVENT, 1000)
enemies = pygame.sprite.Group()
death = False
p = Player()
window_holes = pygame.sprite.Group()
bullets_count = pygame.sprite.Group()
boss_bullets_count = pygame.sprite.Group()
booms = pygame.sprite.Group()
small_booms = pygame.sprite.Group()
mini_booms = pygame.sprite.Group()
phase1_score = True
phase2_score = True
phase3_score = True
battle_music = True
phase4_score = True
col_check = 1
boss_death = False
level_bckgd_pos = -23800
current_player_sprite = 'stay'
current_level_background = pygame.image.load(
'resources/level_pictures/first_level_bckgd.jpg')
screen.blit(current_level_background, (0, 0))
wait = 0
last = pygame.time.get_ticks()
last_2 = pygame.time.get_ticks()
boss_cooldown = 1000
cooldown = 100
while running_game:
for event in pygame.event.get():
if event.type == pygame.KEYDOWN and event.key == pygame.K_EQUALS:
ingame_music.stop()
track_count += 1
if track_count > 5:
track_count = 0
ingame_music = pygame.mixer.Sound(battle_tracks[track_count])
ingame_music.set_volume(ingame_music_sound)
ingame_music.play()
if event.type == pygame.KEYDOWN and event.key == pygame.K_KP_PLUS:
ingame_music_sound += 0.05
if ingame_music_sound >= 1.5:
ingame_music_sound = 1.4
ingame_music.set_volume(ingame_music_sound)
if event.type == pygame.KEYDOWN and event.key == pygame.K_KP_MINUS:
ingame_music_sound -= 0.05
if ingame_music_sound < 0:
ingame_music_sound = 0
ingame_music.set_volume(ingame_music_sound)
if event.type == pygame.KEYDOWN and (event.key == pygame.K_a or
event.key == pygame.K_LEFT) and not p.moving_right:
current_player_sprite = 'left'
p.moving_right = False
p.moving_left = True
elif event.type == pygame.KEYDOWN and (event.key == pygame.K_d or
event.key == pygame.K_RIGHT) and not p.moving_left:
current_player_sprite = 'right'
p.moving_left = False
p.moving_right = True
if event.type == pygame.KEYUP and (event.key == pygame.K_a or
event.key == pygame.K_LEFT):
current_player_sprite = 'stay'
p.moving_right = False
p.moving_left = False
if event.type == pygame.KEYUP and (event.key == pygame.K_d or
event.key == pygame.K_RIGHT):
current_player_sprite = 'stay'
p.moving_right = False
p.moving_left = False
if event.type == pygame.KEYDOWN and (event.key == pygame.K_w or
event.key == pygame.K_UP) and not p.moving_down:
p.moving_down = False
p.moving_up = True
elif event.type == pygame.KEYDOWN and (event.key == pygame.K_s or
event.key == pygame.K_DOWN) and not p.moving_up:
p.moving_up = False
p.moving_down = True
if event.type == pygame.KEYUP and (event.key == pygame.K_w or
event.key == pygame.K_UP):
current_player_sprite = 'stay'
p.moving_down = False
p.moving_up = False
if event.type == pygame.KEYUP and (event.key == pygame.K_s or
event.key == pygame.K_DOWN):
current_player_sprite = 'stay'
p.moving_down = False
p.moving_up = False
if (event.type == pygame.MOUSEBUTTONDOWN and event.button == 1 and
p.health_count > 0):
now = pygame.time.get_ticks()
if now - last >= cooldown:
last = now
Bullets(bullets_count).shot((p.x + 21, p.y - 25))
Bullets(bullets_count).shot((p.x + 76, p.y - 25))
if is_sound:
play_sound('resources/sounds/shot_sound.mp3', 0.1)
Bullets.shooting = True
bullets_shot += 2
elif event.type == pygame.KEYDOWN and event.key == pygame.K_SPACE and p.health_count > 0:
now = pygame.time.get_ticks()
if now - last >= cooldown:
last = now
Bullets(bullets_count).shot((p.x + 21, p.y - 25))
Bullets(bullets_count).shot((p.x + 76, p.y - 25))
if is_sound:
play_sound('resources/sounds/shot_sound.mp3', 0.1)
Bullets.shooting = True
bullets_shot += 2
if (event.type == pygame.USEREVENT and level_bckgd_pos >= -8500 and
not bs):
bs = True
b = Boss()
if event.type == pygame.USEREVENT and level_bckgd_pos < -8500:
Enemy(enemies)
if (event.type == pygame.USEREVENT and death and pygame.time.
get_ticks() - wait > 2000 or level_bckgd_pos > -801):
ingame_music.stop()
death_screen()
while True:
if len(str(game_score)) < 6:
game_score = '0' + str(game_score)
else:
break
var = "INSERT INTO highest_score VALUES ('{}', '{}')".format(
player_name, game_score)
cur.execute(var)
con.commit()
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:
pause_screen()
if not running_game:
ingame_music.stop()
while True:
if len(str(game_score)) < 6:
game_score = '0' + str(game_score)
else:
break
var = ("INSERT INTO highest_score VALUES ('{}', '{}')".
format(player_name, game_score))
cur.execute(var)
con.commit()
level_bckgd_pos += speed_bckgd
if level_bckgd_pos >= 0:
screen.fill((0, 0, 0))
screen.blit(current_level_background, (0, level_bckgd_pos))
if level_bckgd_pos > -805:
death = True
if p.health_count > 0:
for i in enemies:
collision = pygame.sprite.collide_rect(p, i)
if collision:
Explosion(booms).boom((i.rect.x, i.rect.y))
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.1)
if i.health_count - 2 <= 0:
game_score += 10
i.kill()
Explosion(booms).boom((i.rect.x, i.rect.y))
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3',
0.1)
enemy_killed += 1
else:
i.health_count -= 2
if is_sound:
play_sound('resources/sounds/collision_sound.mp3',
0.03)
p.health_count -= 1
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.05
)
if p.health_count > 0:
Damage(window_holes).taking_damage((random.randint(
50, 550), random.randint(50, 750)))
if is_sound:
play_sound('resources/sounds/window_crashed.mp3', 1
)
play_sound('resources/sounds/explosion_stun.mp3',
0.02)
for j in bullets_count:
collision = pygame.sprite.collide_rect(j, i)
if collision:
if i.health_count - 1 <= 0:
game_score += 5
i.kill()
Explosion(booms).boom((i.rect.x, i.rect.y))
if is_sound:
play_sound(
'resources/sounds/explosion_sound.mp3', 0.1
)
enemy_killed += 1
else:
i.health_count -= 1
Miniexplosion(mini_booms).boom((j.rect.x, j.rect.y)
)
if is_sound:
play_sound(
'resources/sounds/explosion_sound.mp3', 0.1
)
if is_sound:
play_sound(
'resources/sounds/collision_sound.mp3',
0.03)
j.kill()
if bs and not boss_death:
collision = pygame.sprite.collide_rect(b, p)
if collision and b.y > 0:
b.health_count -= 0.3
if is_sound:
play_sound('resources/sounds/collision_sound.mp3', 0.03
)
p.health_count -= 0.2
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.05
)
if b.body == b.stay1 or b.body == b.stay2:
b.body = b.stay2
if b.body == b.stay3 or b.body == b.stay4:
b.body = b.stay4
if b.body == b.stay5 or b.body == b.stay6:
b.body = b.stay6
col_check += 1
if p.health_count > 0:
Damage(window_holes).taking_damage((random.randint(
50, 550), random.randint(50, 750)))
if is_sound:
play_sound('resources/sounds/window_crashed.mp3',
0.1)
play_sound('resources/sounds/explosion_stun.mp3',
0.02)
for j in bullets_count:
collision = pygame.sprite.collide_rect(b, j)
if collision and b.y > 0:
if b.body == b.stay1 or b.body == b.stay2:
b.body = b.stay2
if b.body == b.stay3 or b.body == b.stay4:
b.body = b.stay4
if b.body == b.stay5 or b.body == b.stay6:
b.body = b.stay6
col_check += 1
b.health_count -= 0.2
Miniexplosion(mini_booms).boom((j.rect.x, j.rect.y))
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3',
0.1)
if is_sound:
play_sound('resources/sounds/collision_sound.mp3',
0.03)
j.kill()
for h in boss_bullets_count:
collision = pygame.sprite.collide_rect(p, h)
if collision:
p.health_count -= 1
Miniexplosion(mini_booms).boom((h.rect.x, h.rect.y))
if p.health_count > 0:
Damage(window_holes).taking_damage((random.
randint(50, 550), random.randint(50, 750)))
if is_sound:
play_sound(
'resources/sounds/window_crashed.mp3', 0.1)
play_sound(
'resources/sounds/explosion_stun.mp3', 0.01
)
if is_sound:
play_sound('resources/sounds/collision_sound.mp3',
0.03)
h.kill()
p.update(FPS)
if current_player_sprite == 'left':
sprite = p.anim_left()
screen.blit(sprite, (p.x, p.y))
p.left_1 = not p.left_1
elif current_player_sprite == 'right':
sprite = p.anim_right()
screen.blit(sprite, (p.x, p.y))
p.right_1 = not p.right_1
elif current_player_sprite == 'stay':
sprite = p.anim_stay()
screen.blit(sprite, (p.x, p.y))
p.stay_1 = not p.stay_1
if bs:
if battle_music:
ingame_music.stop()
ingame_music = pygame.mixer.Sound(
'resources/sounds/music/wagner_main_theme.mp3')
ingame_music.set_volume(ingame_music_sound)
ingame_music.play()
battle_music = False
b.update()
if b.body == b.stay3 and phase1_score:
game_score += 100
phase1_score = False
if b.body == b.stay5 and phase2_score:
game_score += 100
phase2_score = False
if b.body == b.stay7 and phase3_score:
game_score += 200
phase3_score = False
now = pygame.time.get_ticks()
if (now - last_2 >= boss_cooldown and b.y > 0 and b.body !=
b.stay7):
last_2 = now
play_sound('resources/sounds/boss_shot.mp3', 0.05)
Bossbullets(boss_bullets_count).shot((b.x + 170, b.y + 155)
)
if col_check % 40 == 0:
b.change_sprite()
else:
col_check += 1
if b.health_count > 0:
screen.blit(b.body, (b.x, b.y))
elif b.health_count <= 0 and phase4_score:
boss_done = True
phase4_score = False
game_score += 350
if is_sound:
play_sound('resources/sounds/boss_defeated.mp3', 0.2)
Explosion(booms).boom((b.rect.x + 75, b.rect.y + 25))
Explosion(booms).boom((b.rect.x, b.rect.y))
Explosion(booms).boom((b.rect.x + 200, b.rect.y + 34))
Explosion(booms).boom((b.rect.x + 250, b.rect.y + 25))
Explosion(booms).boom((b.rect.x + 150, b.rect.y + 56))
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.1)
boss_death = True
else:
if p.minimize == 0:
if is_sound:
ingame_music.stop()
play_sound('resources/sounds/plane_crash.mp3', 0.05)
p.minimize += 1
if not death:
if p.minimize <= 320:
p.death()
screen.blit(p.death_sp, (p.x, p.y))
else:
death = True
wait = pygame.time.get_ticks()
Smallexplosions(small_booms).boom((p.rect.x + 3, p.rect
.y + 25))
Smallexplosions(small_booms).boom((p.rect.x, p.rect.y))
Smallexplosions(small_booms).boom((p.rect.x - 22, p.
rect.y + 7))
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.1)
p.kill()
if bs and b.health_count > 0:
b.speed += 0.02
b.win = True
screen.blit(b.body, (b.x, b.y))
b.update()
window_holes.update()
window_holes.draw(screen)
enemies.update(FPS)
enemies.draw(screen)
bullets_count.update()
bullets_count.draw(screen)
boss_bullets_count.update()
boss_bullets_count.draw(screen)
small_booms.update()
small_booms.draw(screen)
mini_booms.update()
mini_booms.draw(screen)
draw_text('Player: {}'.format(player_name), font, (255, 255, 255),
screen, 20, 20)
if len(str(game_score)) < 2:
draw_text('00000' + str(game_score), font, (255, 255, 255),
screen, 430, 20)
elif len(str(game_score)) < 3:
draw_text('0000' + str(game_score), font, (255, 255, 255),
screen, 430, 20)
elif len(str(game_score)) < 4:
draw_text('000' + str(game_score), font, (255, 255, 255),
screen, 430, 20)
elif len(str(game_score)) < 5:
draw_text('00' + str(game_score), font, (255, 255, 255), screen,
430, 20)
elif len(str(game_score)) < 6:
draw_text('0' + str(game_score), font, (255, 255, 255), screen,
430, 20)
elif len(str(game_score)) >= 6:
draw_text('Max score', font, (255, 255, 255), screen, 510, 20)
booms.update()
booms.draw(screen)
pygame.display.flip()
clock.tick(FPS)
def death_screen():
global running_game, game_score
running = True
click = False
draw_counter = 0
color_counter = 0
pygame.time.set_timer(pygame.USEREVENT, 1000)
rating_kills = enemy_killed // 10
if bullets_shot < 800:
rating_shots = 1
else:
rating_shots = 0
rating = rating_kills + rating_shots
if boss_done:
death_music = pygame.mixer.Sound(
'resources/sounds/music/victory_theme.mp3')
death_music.stop()
death_music_sound = 0.1
if not is_sound:
death_music_sound = 0
death_music.set_volume(death_music_sound)
death_music.play()
rating += 2
else:
death_music = pygame.mixer.Sound(
'resources/sounds/music/loose_theme.mp3')
death_music.stop()
death_music_sound = 0.1
if not is_sound:
death_music_sound = 0
death_music.set_volume(death_music_sound)
death_music.play()
while True:
if len(str(game_score)) < 6:
game_score = '0' + str(game_score)
else:
break
while running:
mx, my = pygame.mouse.get_pos()
screen.fill((0, 0, 0))
pygame.draw.rect(screen, (0, 0, 0), (50, 10, 500, 50))
pygame.draw.rect(screen, (255, 255, 255), (50, 10, 500, 50), 3)
draw_text('End of your way', font, (255, 255, 255), screen, 120, 15)
button_menu = pygame.image.load('resources/sprites/button.png')
button_menu = pygame.transform.scale(button_menu, (200, 70))
b_menu_mask = button_menu.get_rect()
b_menu_mask.x = 195
b_menu_mask.y = 700
screen.blit(button_menu, (b_menu_mask.x, b_menu_mask.y))
draw_text('menu', font, (255, 255, 255), screen, 245, 730)
if draw_counter >= 1:
draw_text('Player: {}'.format(player_name), font, (255, 255,
255), screen, 50, 150)
if draw_counter >= 2:
draw_text('Score: {}'.format(game_score), font, (255, 255, 255),
screen, 50, 230)
if draw_counter >= 3:
draw_text('Enemies killed: {}'.format(enemy_killed), font, (255,
255, 255), screen, 50, 310)
if draw_counter >= 4:
draw_text('Bullets fired: {}'.format(bullets_shot), font, (255,
255, 255), screen, 50, 390)
if draw_counter >= 5:
draw_text('Rating:', font, (255, 255, 255), screen, 50, 470)
if draw_counter >= 6:
if rating <= 6:
draw_text('F', font_rating, (100, 100, 100), screen, 300, 470)
elif rating == 7:
draw_text('D', font_rating, (29, 173, 23), screen, 300, 470)
elif rating == 8:
draw_text('C', font_rating, (20, 20, 255), screen, 300, 470)
elif rating == 9:
draw_text('B', font_rating, (200, 0, 255), screen, 300, 470)
elif rating == 10:
draw_text('A', font_rating, (255, 200, 0), screen, 300, 470)
elif rating == 11:
draw_text('S', font_rating, (255, 100, 0), screen, 300, 470)
elif rating <= 13:
draw_text('SS', font_rating, (255, 0, 0), screen, 300, 470)
elif color_counter == 0:
draw_text('SSS', font_rating, (255, 0, 0), screen, 300, 470)
elif color_counter == 1:
draw_text('SSS', font_rating, (0, 255, 0), screen, 300, 470)
else:
draw_text('SSS', font_rating, (0, 0, 255), screen, 300, 470)
if b_menu_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (195, 700, 200, 70), 4)
if click:
if is_sound:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
else:
pass
running = False
running_game = False
click = False
for event in pygame.event.get():
if event.type == pygame.USEREVENT:
draw_counter += 1
color_counter += 1
if color_counter == 3:
color_counter = 0
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:
running = False
running_game = False
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1:
click = True
pygame.display.update()
clock.tick(10)
death_music.stop()
def pause_screen():
global running_game
running = True
click = False
while running:
screen.fill((0, 0, 0))
mx, my = pygame.mouse.get_pos()
pygame.draw.rect(screen, (0, 0, 0), (175, 160, 240, 340))
pygame.draw.rect(screen, (255, 255, 255), (175, 160, 240, 340), 3)
pygame.draw.rect(screen, (0, 0, 0), (185, 180, 220, 80))
pygame.draw.rect(screen, (255, 255, 255), (185, 180, 220, 80), 3)
draw_text('Pause', font, (255, 255, 255), screen, 235, 205)
button_menu = pygame.image.load('resources/sprites/button.png')
button_menu = pygame.transform.scale(button_menu, (200, 70))
b_menu_mask = button_menu.get_rect()
b_menu_mask.x = 195
b_menu_mask.y = 410
screen.blit(button_menu, (b_menu_mask.x, b_menu_mask.y))
draw_text('menu', font, (255, 255, 255), screen, 245, 440)
button_resume = pygame.image.load('resources/sprites/button.png')
button_resume = pygame.transform.scale(button_resume, (200, 70))
b_resume_mask = button_resume.get_rect()
b_resume_mask.x = 195
b_resume_mask.y = 300
screen.blit(button_resume, (b_resume_mask.x, b_resume_mask.y))
draw_text('resume', font, (255, 255, 255), screen, 225, 330)
if b_menu_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (195, 410, 200, 70), 4)
if click:
running = False
running_game = False
if b_resume_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (195, 300, 200, 70), 4)
if click:
running = False
click = False
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:
running = False
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1:
click = True
pygame.display.update()
clock.tick(10)
if __name__ == '__main__':
main_menu()
pygame.quit()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def draw_text(text, font_u, color, surface, x, y):
text_object = font_u.render(text, color)
textrect = text_object[1]
textrect.topleft = x, y
surface.blit(text_object[0], textrect)
def play_sound(sound_p, volume_h=0.5, wait_t=0):
pl_sound = pygame.mixer.Sound(sound_p)
pl_sound.set_volume(volume_h)
if is_sound:
pl_sound.play()
pygame.time.wait(wait_t)
pygame.init()
speed_bckgd = 2
running_game = True
is_sound = True
menu = True
boss_done = False
game_score = 0
bullets_shot = 0
line_counter = 0
enemy_killed = 0
speed = 2
FPS = 100
width = 600
height = 800
player_name = ''
con = sqlite3.connect('resources/db/leaderboard.db')
font = pygame.freetype.Font('resources/sprites/font_main.ttf', 45)
font_table = pygame.freetype.Font('resources/sprites/font_main.ttf', 25)
font_space = pygame.freetype.Font('resources/sprites/space.ttf', 20)
font_rating = pygame.freetype.Font('resources/sprites/font_main.ttf', 150)
pygame.display.set_icon(pygame.image.load(
'resources/images/test_small_logo_1.bmp'))
pygame.display.set_caption('Death or Dishonour')
screen = pygame.display.set_mode((600, 800))
clock = pygame.time.Clock()
cur = con.cursor()
def draw_controls():
pygame.draw.rect(screen, (255, 255, 255), (0, 420, 600, 380), 4)
pygame.draw.rect(screen, (0, 0, 0, 1), (3, 422, 595, 376))
draw_text('controls:', font, (255, 255, 255), screen, 20, 430)
wasd = pygame.image.load('resources/sprites/controls_1.png')
wasd = pygame.transform.scale(wasd, (243, 100))
screen.blit(wasd, (20, 470))
pygame.draw.rect(screen, (255, 255, 255), (20, 646, 130, 25))
draw_text('SPACE', font_space, (0, 0, 0), screen, 50, 651)
draw_text(' - movement', font, (255, 255, 255), screen, 270, 522)
mouse = pygame.image.load('resources/sprites/controls_2.png')
mouse = pygame.transform.scale(mouse, (90, 100))
screen.blit(mouse, (153, 590))
draw_text(' - shoot', font, (255, 255, 255), screen, 270, 640)
def draw_leaderboard():
table = []
result = cur.execute(
'SELECT * FROM highest_score ORDER BY score DESC LIMIT 7')
for elem in result:
table.append(elem)
pygame.draw.rect(screen, (0, 0, 0), (310, 70, 250, 335))
pygame.draw.rect(screen, (255, 255, 255), (310, 70, 250, 335), 3)
pygame.draw.line(screen, (255, 255, 255), (310, 124), (560, 124), 3)
pygame.draw.line(screen, (255, 255, 255), (435, 124), (435, 405), 3)
charge = 40
y = 124
for i in range(1, 8):
y += charge
pygame.draw.line(screen, (255, 255, 255), (310, y), (560, y), 3)
draw_text('leaderboard', font_table, (255, 255, 255), screen, 362, 80)
x = 350
y = 140
for i in table:
draw_text(str(i[0]), font_table, (255, 255, 255), screen, x, y)
draw_text(str(i[1]), font_table, (255, 255, 255), screen, x + 100, y)
y += charge
def main_menu():
click = False
pygame.mixer.stop()
while True:
mx, my = pygame.mouse.get_pos()
screen.fill((0, 0, 0))
pygame.draw.rect(screen, (0, 0, 0), (52, 10, 508, 50))
pygame.draw.rect(screen, (255, 255, 255), (52, 10, 508, 50), 3)
draw_text('Death or Dishonour', font, (255, 255, 255), screen, 85, 20)
button_play = pygame.image.load('resources/sprites/button.png')
button_play = pygame.transform.scale(button_play, (222, 105))
b_play_mask = button_play.get_rect()
b_play_mask.x = 50
b_play_mask.y = 70
screen.blit(button_play, (b_play_mask.x, b_play_mask.y))
draw_text('play', font, (255, 255, 255), screen, 113, 100)
button_options = pygame.image.load('resources/sprites/button.png')
button_options = pygame.transform.scale(button_options, (222, 105))
b_options_mask = button_options.get_rect()
b_options_mask.x = 50
b_options_mask.y = 185
screen.blit(button_options, (b_options_mask.x, b_options_mask.y))
draw_text('options', font, (255, 255, 255), screen, 78, 215)
button_exit = pygame.image.load('resources/sprites/button.png')
button_exit = pygame.transform.scale(button_exit, (222, 105))
b_exit_mask = button_exit.get_rect()
b_exit_mask.x = 50
b_exit_mask.y = 300
screen.blit(button_exit, (b_exit_mask.x, b_exit_mask.y))
draw_text('quit', font, (255, 255, 255), screen, 113, 330)
draw_controls()
draw_leaderboard()
if b_play_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (50, 70, 222, 105), 5)
if click:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
game_screen()
if b_options_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (50, 185, 222, 105), 5)
if click:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
options_menu()
if b_exit_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (50, 300, 222, 105), 5)
if click:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
pygame.quit()
sys.exit()
click = False
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
pygame.quit()
sys.exit()
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1:
click = True
pygame.display.update()
clock.tick(10)
def options_menu():
global player_name, line_counter, is_sound
running = True
click = False
numlock = False
while running:
mx, my = pygame.mouse.get_pos()
screen.fill((0, 0, 0))
pygame.draw.rect(screen, (0, 0, 0), (52, 10, 508, 50))
pygame.draw.rect(screen, (255, 255, 255), (52, 10, 508, 50), 3)
draw_text('Options', font, (255, 255, 255), screen, 215, 20)
button_1 = pygame.image.load('resources/sprites/button.png')
button_1 = pygame.transform.scale(button_1, (222, 105))
b_1_mask = button_1.get_rect()
b_1_mask.x = 50
b_1_mask.y = 70
screen.blit(button_1, (b_1_mask.x, b_1_mask.y))
draw_text(player_name, font, (255, 255, 255), screen, 125, 100)
button_2 = pygame.image.load('resources/sprites/button.png')
button_2 = pygame.transform.scale(button_2, (222, 105))
b_2_mask = button_2.get_rect()
b_2_mask.x = 50
b_2_mask.y = 185
screen.blit(button_2, (b_2_mask.x, b_2_mask.y))
button_back = pygame.image.load('resources/sprites/button.png')
button_back = pygame.transform.scale(button_back, (222, 105))
b_back_mask = button_back.get_rect()
b_back_mask.x = 50
b_back_mask.y = 300
screen.blit(button_back, (b_back_mask.x, b_back_mask.y))
draw_text('back', font, (255, 255, 255), screen, 113, 330)
draw_controls()
draw_text('audio:', font, (255, 255, 255), screen, 60, 195)
if is_sound:
draw_text('on', font, (255, 255, 255), screen, 190, 245)
else:
draw_text('off', font, (255, 255, 255), screen, 175, 230)
if line_counter == 0 or player_name == 'NON':
draw_text('ENTER', font, (255, 0, 0), screen, 280, 90)
draw_text('NICKNAME', font, (255, 0, 0), screen, 280, 120)
if numlock:
draw_text('OFF', font, (255, 0, 0), screen, 500, 90)
draw_text('NUM', font, (255, 0, 0), screen, 500, 120)
draw_text('LOCK', font, (255, 0, 0), screen, 500, 150)
if b_2_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (50, 185, 222, 105), 5)
if click:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
if is_sound:
is_sound = not is_sound
pygame.mixer.pause()
else:
is_sound = not is_sound
pygame.mixer.unpause()
if b_back_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (50, 300, 222, 105), 5)
if click:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
running = False
click = False
for event in pygame.event.get():
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1:
click = True
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_BACKSPACE:
player_name = player_name[:-1]
if line_counter != 0:
line_counter -= 1
elif player_name == 'NON':
pass
elif event.key == pygame.K_SPACE:
pass
elif event.key == pygame.K_UP:
pass
elif event.key == pygame.K_DOWN:
pass
elif event.key == pygame.K_LEFT:
pass
elif event.key == pygame.K_RIGHT:
pass
elif event.key == pygame.K_RETURN:
pass
elif event.key == pygame.K_NUMLOCK:
numlock = True
elif event.key == pygame.K_ESCAPE:
running = False
elif event.mod == pygame.KMOD_NONE and event.key != pygame.K_TAB:
if line_counter != 3:
line_counter += 1
player_name += str(event.unicode).upper()
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
pygame.display.update()
clock.tick(10)
def game_screen():
global game_score, player_name, running_game, enemy_killed, bullets_shot, boss_done
game_score = 0
enemy_killed = 0
bullets_shot = 0
boss_done = False
if player_name == '':
player_name = 'NON'
track_count = 0
battle_tracks = ['resources/sounds/music/battle_music_1.mp3',
'resources/sounds/music/battle_music_2.mp3',
'resources/sounds/music/battle_music_3.mp3',
'resources/sounds/music/battle_music_4.mp3',
'resources/sounds/music/battle_music_5.mp3',
'resources/sounds/music/battle_music_6.mp3']
ingame_music = pygame.mixer.Sound(battle_tracks[track_count])
ingame_music.stop()
ingame_music_sound = 0.1
if not is_sound:
ingame_music_sound = 0
ingame_music.set_volume(ingame_music_sound)
ingame_music.play()
bs = False
running_game = True
pygame.time.set_timer(pygame.USEREVENT, 1000)
enemies = pygame.sprite.Group()
death = False
p = Player()
window_holes = pygame.sprite.Group()
bullets_count = pygame.sprite.Group()
boss_bullets_count = pygame.sprite.Group()
booms = pygame.sprite.Group()
small_booms = pygame.sprite.Group()
mini_booms = pygame.sprite.Group()
phase1_score = True
phase2_score = True
phase3_score = True
battle_music = True
phase4_score = True
col_check = 1
boss_death = False
level_bckgd_pos = -23800
current_player_sprite = 'stay'
current_level_background = pygame.image.load(
'resources/level_pictures/first_level_bckgd.jpg')
screen.blit(current_level_background, (0, 0))
wait = 0
last = pygame.time.get_ticks()
last_2 = pygame.time.get_ticks()
boss_cooldown = 1000
cooldown = 100
while running_game:
for event in pygame.event.get():
if event.type == pygame.KEYDOWN and event.key == pygame.K_EQUALS:
ingame_music.stop()
track_count += 1
if track_count > 5:
track_count = 0
ingame_music = pygame.mixer.Sound(battle_tracks[track_count])
ingame_music.set_volume(ingame_music_sound)
ingame_music.play()
if event.type == pygame.KEYDOWN and event.key == pygame.K_KP_PLUS:
ingame_music_sound += 0.05
if ingame_music_sound >= 1.5:
ingame_music_sound = 1.4
ingame_music.set_volume(ingame_music_sound)
if event.type == pygame.KEYDOWN and event.key == pygame.K_KP_MINUS:
ingame_music_sound -= 0.05
if ingame_music_sound < 0:
ingame_music_sound = 0
ingame_music.set_volume(ingame_music_sound)
if event.type == pygame.KEYDOWN and (event.key == pygame.K_a or
event.key == pygame.K_LEFT) and not p.moving_right:
current_player_sprite = 'left'
p.moving_right = False
p.moving_left = True
elif event.type == pygame.KEYDOWN and (event.key == pygame.K_d or
event.key == pygame.K_RIGHT) and not p.moving_left:
current_player_sprite = 'right'
p.moving_left = False
p.moving_right = True
if event.type == pygame.KEYUP and (event.key == pygame.K_a or
event.key == pygame.K_LEFT):
current_player_sprite = 'stay'
p.moving_right = False
p.moving_left = False
if event.type == pygame.KEYUP and (event.key == pygame.K_d or
event.key == pygame.K_RIGHT):
current_player_sprite = 'stay'
p.moving_right = False
p.moving_left = False
if event.type == pygame.KEYDOWN and (event.key == pygame.K_w or
event.key == pygame.K_UP) and not p.moving_down:
p.moving_down = False
p.moving_up = True
elif event.type == pygame.KEYDOWN and (event.key == pygame.K_s or
event.key == pygame.K_DOWN) and not p.moving_up:
p.moving_up = False
p.moving_down = True
if event.type == pygame.KEYUP and (event.key == pygame.K_w or
event.key == pygame.K_UP):
current_player_sprite = 'stay'
p.moving_down = False
p.moving_up = False
if event.type == pygame.KEYUP and (event.key == pygame.K_s or
event.key == pygame.K_DOWN):
current_player_sprite = 'stay'
p.moving_down = False
p.moving_up = False
if (event.type == pygame.MOUSEBUTTONDOWN and event.button == 1 and
p.health_count > 0):
now = pygame.time.get_ticks()
if now - last >= cooldown:
last = now
Bullets(bullets_count).shot((p.x + 21, p.y - 25))
Bullets(bullets_count).shot((p.x + 76, p.y - 25))
if is_sound:
play_sound('resources/sounds/shot_sound.mp3', 0.1)
Bullets.shooting = True
bullets_shot += 2
elif event.type == pygame.KEYDOWN and event.key == pygame.K_SPACE and p.health_count > 0:
now = pygame.time.get_ticks()
if now - last >= cooldown:
last = now
Bullets(bullets_count).shot((p.x + 21, p.y - 25))
Bullets(bullets_count).shot((p.x + 76, p.y - 25))
if is_sound:
play_sound('resources/sounds/shot_sound.mp3', 0.1)
Bullets.shooting = True
bullets_shot += 2
if (event.type == pygame.USEREVENT and level_bckgd_pos >= -8500 and
not bs):
bs = True
b = Boss()
if event.type == pygame.USEREVENT and level_bckgd_pos < -8500:
Enemy(enemies)
if (event.type == pygame.USEREVENT and death and pygame.time.
get_ticks() - wait > 2000 or level_bckgd_pos > -801):
ingame_music.stop()
death_screen()
while True:
if len(str(game_score)) < 6:
game_score = '0' + str(game_score)
else:
break
var = "INSERT INTO highest_score VALUES ('{}', '{}')".format(
player_name, game_score)
cur.execute(var)
con.commit()
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:
pause_screen()
if not running_game:
ingame_music.stop()
while True:
if len(str(game_score)) < 6:
game_score = '0' + str(game_score)
else:
break
var = ("INSERT INTO highest_score VALUES ('{}', '{}')".
format(player_name, game_score))
cur.execute(var)
con.commit()
level_bckgd_pos += speed_bckgd
if level_bckgd_pos >= 0:
screen.fill((0, 0, 0))
screen.blit(current_level_background, (0, level_bckgd_pos))
if level_bckgd_pos > -805:
death = True
if p.health_count > 0:
for i in enemies:
collision = pygame.sprite.collide_rect(p, i)
if collision:
Explosion(booms).boom((i.rect.x, i.rect.y))
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.1)
if i.health_count - 2 <= 0:
game_score += 10
i.kill()
Explosion(booms).boom((i.rect.x, i.rect.y))
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3',
0.1)
enemy_killed += 1
else:
i.health_count -= 2
if is_sound:
play_sound('resources/sounds/collision_sound.mp3',
0.03)
p.health_count -= 1
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.05
)
if p.health_count > 0:
Damage(window_holes).taking_damage((random.randint(
50, 550), random.randint(50, 750)))
if is_sound:
play_sound('resources/sounds/window_crashed.mp3', 1
)
play_sound('resources/sounds/explosion_stun.mp3',
0.02)
for j in bullets_count:
collision = pygame.sprite.collide_rect(j, i)
if collision:
if i.health_count - 1 <= 0:
game_score += 5
i.kill()
Explosion(booms).boom((i.rect.x, i.rect.y))
if is_sound:
play_sound(
'resources/sounds/explosion_sound.mp3', 0.1
)
enemy_killed += 1
else:
i.health_count -= 1
Miniexplosion(mini_booms).boom((j.rect.x, j.rect.y)
)
if is_sound:
play_sound(
'resources/sounds/explosion_sound.mp3', 0.1
)
if is_sound:
play_sound(
'resources/sounds/collision_sound.mp3',
0.03)
j.kill()
if bs and not boss_death:
collision = pygame.sprite.collide_rect(b, p)
if collision and b.y > 0:
b.health_count -= 0.3
if is_sound:
play_sound('resources/sounds/collision_sound.mp3', 0.03
)
p.health_count -= 0.2
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.05
)
if b.body == b.stay1 or b.body == b.stay2:
b.body = b.stay2
if b.body == b.stay3 or b.body == b.stay4:
b.body = b.stay4
if b.body == b.stay5 or b.body == b.stay6:
b.body = b.stay6
col_check += 1
if p.health_count > 0:
Damage(window_holes).taking_damage((random.randint(
50, 550), random.randint(50, 750)))
if is_sound:
play_sound('resources/sounds/window_crashed.mp3',
0.1)
play_sound('resources/sounds/explosion_stun.mp3',
0.02)
for j in bullets_count:
collision = pygame.sprite.collide_rect(b, j)
if collision and b.y > 0:
if b.body == b.stay1 or b.body == b.stay2:
b.body = b.stay2
if b.body == b.stay3 or b.body == b.stay4:
b.body = b.stay4
if b.body == b.stay5 or b.body == b.stay6:
b.body = b.stay6
col_check += 1
b.health_count -= 0.2
Miniexplosion(mini_booms).boom((j.rect.x, j.rect.y))
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3',
0.1)
if is_sound:
play_sound('resources/sounds/collision_sound.mp3',
0.03)
j.kill()
for h in boss_bullets_count:
collision = pygame.sprite.collide_rect(p, h)
if collision:
p.health_count -= 1
Miniexplosion(mini_booms).boom((h.rect.x, h.rect.y))
if p.health_count > 0:
Damage(window_holes).taking_damage((random.
randint(50, 550), random.randint(50, 750)))
if is_sound:
play_sound(
'resources/sounds/window_crashed.mp3', 0.1)
play_sound(
'resources/sounds/explosion_stun.mp3', 0.01
)
if is_sound:
play_sound('resources/sounds/collision_sound.mp3',
0.03)
h.kill()
p.update(FPS)
if current_player_sprite == 'left':
sprite = p.anim_left()
screen.blit(sprite, (p.x, p.y))
p.left_1 = not p.left_1
elif current_player_sprite == 'right':
sprite = p.anim_right()
screen.blit(sprite, (p.x, p.y))
p.right_1 = not p.right_1
elif current_player_sprite == 'stay':
sprite = p.anim_stay()
screen.blit(sprite, (p.x, p.y))
p.stay_1 = not p.stay_1
if bs:
if battle_music:
ingame_music.stop()
ingame_music = pygame.mixer.Sound(
'resources/sounds/music/wagner_main_theme.mp3')
ingame_music.set_volume(ingame_music_sound)
ingame_music.play()
battle_music = False
b.update()
if b.body == b.stay3 and phase1_score:
game_score += 100
phase1_score = False
if b.body == b.stay5 and phase2_score:
game_score += 100
phase2_score = False
if b.body == b.stay7 and phase3_score:
game_score += 200
phase3_score = False
now = pygame.time.get_ticks()
if (now - last_2 >= boss_cooldown and b.y > 0 and b.body !=
b.stay7):
last_2 = now
play_sound('resources/sounds/boss_shot.mp3', 0.05)
Bossbullets(boss_bullets_count).shot((b.x + 170, b.y + 155)
)
if col_check % 40 == 0:
b.change_sprite()
else:
col_check += 1
if b.health_count > 0:
screen.blit(b.body, (b.x, b.y))
elif b.health_count <= 0 and phase4_score:
boss_done = True
phase4_score = False
game_score += 350
if is_sound:
play_sound('resources/sounds/boss_defeated.mp3', 0.2)
Explosion(booms).boom((b.rect.x + 75, b.rect.y + 25))
Explosion(booms).boom((b.rect.x, b.rect.y))
Explosion(booms).boom((b.rect.x + 200, b.rect.y + 34))
Explosion(booms).boom((b.rect.x + 250, b.rect.y + 25))
Explosion(booms).boom((b.rect.x + 150, b.rect.y + 56))
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.1)
boss_death = True
else:
if p.minimize == 0:
if is_sound:
ingame_music.stop()
play_sound('resources/sounds/plane_crash.mp3', 0.05)
p.minimize += 1
if not death:
if p.minimize <= 320:
p.death()
screen.blit(p.death_sp, (p.x, p.y))
else:
death = True
wait = pygame.time.get_ticks()
Smallexplosions(small_booms).boom((p.rect.x + 3, p.rect
.y + 25))
Smallexplosions(small_booms).boom((p.rect.x, p.rect.y))
Smallexplosions(small_booms).boom((p.rect.x - 22, p.
rect.y + 7))
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.1)
p.kill()
if bs and b.health_count > 0:
b.speed += 0.02
b.win = True
screen.blit(b.body, (b.x, b.y))
b.update()
window_holes.update()
window_holes.draw(screen)
enemies.update(FPS)
enemies.draw(screen)
bullets_count.update()
bullets_count.draw(screen)
boss_bullets_count.update()
boss_bullets_count.draw(screen)
small_booms.update()
small_booms.draw(screen)
mini_booms.update()
mini_booms.draw(screen)
draw_text('Player: {}'.format(player_name), font, (255, 255, 255),
screen, 20, 20)
if len(str(game_score)) < 2:
draw_text('00000' + str(game_score), font, (255, 255, 255),
screen, 430, 20)
elif len(str(game_score)) < 3:
draw_text('0000' + str(game_score), font, (255, 255, 255),
screen, 430, 20)
elif len(str(game_score)) < 4:
draw_text('000' + str(game_score), font, (255, 255, 255),
screen, 430, 20)
elif len(str(game_score)) < 5:
draw_text('00' + str(game_score), font, (255, 255, 255), screen,
430, 20)
elif len(str(game_score)) < 6:
draw_text('0' + str(game_score), font, (255, 255, 255), screen,
430, 20)
elif len(str(game_score)) >= 6:
draw_text('Max score', font, (255, 255, 255), screen, 510, 20)
booms.update()
booms.draw(screen)
pygame.display.flip()
clock.tick(FPS)
def death_screen():
global running_game, game_score
running = True
click = False
draw_counter = 0
color_counter = 0
pygame.time.set_timer(pygame.USEREVENT, 1000)
rating_kills = enemy_killed // 10
if bullets_shot < 800:
rating_shots = 1
else:
rating_shots = 0
rating = rating_kills + rating_shots
if boss_done:
death_music = pygame.mixer.Sound(
'resources/sounds/music/victory_theme.mp3')
death_music.stop()
death_music_sound = 0.1
if not is_sound:
death_music_sound = 0
death_music.set_volume(death_music_sound)
death_music.play()
rating += 2
else:
death_music = pygame.mixer.Sound(
'resources/sounds/music/loose_theme.mp3')
death_music.stop()
death_music_sound = 0.1
if not is_sound:
death_music_sound = 0
death_music.set_volume(death_music_sound)
death_music.play()
while True:
if len(str(game_score)) < 6:
game_score = '0' + str(game_score)
else:
break
while running:
mx, my = pygame.mouse.get_pos()
screen.fill((0, 0, 0))
pygame.draw.rect(screen, (0, 0, 0), (50, 10, 500, 50))
pygame.draw.rect(screen, (255, 255, 255), (50, 10, 500, 50), 3)
draw_text('End of your way', font, (255, 255, 255), screen, 120, 15)
button_menu = pygame.image.load('resources/sprites/button.png')
button_menu = pygame.transform.scale(button_menu, (200, 70))
b_menu_mask = button_menu.get_rect()
b_menu_mask.x = 195
b_menu_mask.y = 700
screen.blit(button_menu, (b_menu_mask.x, b_menu_mask.y))
draw_text('menu', font, (255, 255, 255), screen, 245, 730)
if draw_counter >= 1:
draw_text('Player: {}'.format(player_name), font, (255, 255,
255), screen, 50, 150)
if draw_counter >= 2:
draw_text('Score: {}'.format(game_score), font, (255, 255, 255),
screen, 50, 230)
if draw_counter >= 3:
draw_text('Enemies killed: {}'.format(enemy_killed), font, (255,
255, 255), screen, 50, 310)
if draw_counter >= 4:
draw_text('Bullets fired: {}'.format(bullets_shot), font, (255,
255, 255), screen, 50, 390)
if draw_counter >= 5:
draw_text('Rating:', font, (255, 255, 255), screen, 50, 470)
if draw_counter >= 6:
if rating <= 6:
draw_text('F', font_rating, (100, 100, 100), screen, 300, 470)
elif rating == 7:
draw_text('D', font_rating, (29, 173, 23), screen, 300, 470)
elif rating == 8:
draw_text('C', font_rating, (20, 20, 255), screen, 300, 470)
elif rating == 9:
draw_text('B', font_rating, (200, 0, 255), screen, 300, 470)
elif rating == 10:
draw_text('A', font_rating, (255, 200, 0), screen, 300, 470)
elif rating == 11:
draw_text('S', font_rating, (255, 100, 0), screen, 300, 470)
elif rating <= 13:
draw_text('SS', font_rating, (255, 0, 0), screen, 300, 470)
elif color_counter == 0:
draw_text('SSS', font_rating, (255, 0, 0), screen, 300, 470)
elif color_counter == 1:
draw_text('SSS', font_rating, (0, 255, 0), screen, 300, 470)
else:
draw_text('SSS', font_rating, (0, 0, 255), screen, 300, 470)
if b_menu_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (195, 700, 200, 70), 4)
if click:
if is_sound:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
else:
pass
running = False
running_game = False
click = False
for event in pygame.event.get():
if event.type == pygame.USEREVENT:
draw_counter += 1
color_counter += 1
if color_counter == 3:
color_counter = 0
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:
running = False
running_game = False
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1:
click = True
pygame.display.update()
clock.tick(10)
death_music.stop()
def pause_screen():
global running_game
running = True
click = False
while running:
screen.fill((0, 0, 0))
mx, my = pygame.mouse.get_pos()
pygame.draw.rect(screen, (0, 0, 0), (175, 160, 240, 340))
pygame.draw.rect(screen, (255, 255, 255), (175, 160, 240, 340), 3)
pygame.draw.rect(screen, (0, 0, 0), (185, 180, 220, 80))
pygame.draw.rect(screen, (255, 255, 255), (185, 180, 220, 80), 3)
draw_text('Pause', font, (255, 255, 255), screen, 235, 205)
button_menu = pygame.image.load('resources/sprites/button.png')
button_menu = pygame.transform.scale(button_menu, (200, 70))
b_menu_mask = button_menu.get_rect()
b_menu_mask.x = 195
b_menu_mask.y = 410
screen.blit(button_menu, (b_menu_mask.x, b_menu_mask.y))
draw_text('menu', font, (255, 255, 255), screen, 245, 440)
button_resume = pygame.image.load('resources/sprites/button.png')
button_resume = pygame.transform.scale(button_resume, (200, 70))
b_resume_mask = button_resume.get_rect()
b_resume_mask.x = 195
b_resume_mask.y = 300
screen.blit(button_resume, (b_resume_mask.x, b_resume_mask.y))
draw_text('resume', font, (255, 255, 255), screen, 225, 330)
if b_menu_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (195, 410, 200, 70), 4)
if click:
running = False
running_game = False
if b_resume_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (195, 300, 200, 70), 4)
if click:
running = False
click = False
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:
running = False
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1:
click = True
pygame.display.update()
clock.tick(10)
if __name__ == '__main__':
main_menu()
pygame.quit()
<|reserved_special_token_1|>
import pygame
import pygame.freetype
import sys
import sqlite3
from data.player_class import Player
from data.explosion_class import Explosion
from data.objects_class import Bullets, Damage
from data.enemy_class import Enemy
from data.enemy_class import Boss
from data.death_animation import Smallexplosions
from data.explosion_class import Miniexplosion
from data.objects_class import Bossbullets
import random
def draw_text(text, font_u, color, surface, x, y):
text_object = font_u.render(text, color)
textrect = text_object[1]
textrect.topleft = x, y
surface.blit(text_object[0], textrect)
def play_sound(sound_p, volume_h=0.5, wait_t=0):
pl_sound = pygame.mixer.Sound(sound_p)
pl_sound.set_volume(volume_h)
if is_sound:
pl_sound.play()
pygame.time.wait(wait_t)
pygame.init()
speed_bckgd = 2
running_game = True
is_sound = True
menu = True
boss_done = False
game_score = 0
bullets_shot = 0
line_counter = 0
enemy_killed = 0
speed = 2
FPS = 100
width = 600
height = 800
player_name = ''
con = sqlite3.connect('resources/db/leaderboard.db')
font = pygame.freetype.Font('resources/sprites/font_main.ttf', 45)
font_table = pygame.freetype.Font('resources/sprites/font_main.ttf', 25)
font_space = pygame.freetype.Font('resources/sprites/space.ttf', 20)
font_rating = pygame.freetype.Font('resources/sprites/font_main.ttf', 150)
pygame.display.set_icon(pygame.image.load(
'resources/images/test_small_logo_1.bmp'))
pygame.display.set_caption('Death or Dishonour')
screen = pygame.display.set_mode((600, 800))
clock = pygame.time.Clock()
cur = con.cursor()
def draw_controls():
pygame.draw.rect(screen, (255, 255, 255), (0, 420, 600, 380), 4)
pygame.draw.rect(screen, (0, 0, 0, 1), (3, 422, 595, 376))
draw_text('controls:', font, (255, 255, 255), screen, 20, 430)
wasd = pygame.image.load('resources/sprites/controls_1.png')
wasd = pygame.transform.scale(wasd, (243, 100))
screen.blit(wasd, (20, 470))
pygame.draw.rect(screen, (255, 255, 255), (20, 646, 130, 25))
draw_text('SPACE', font_space, (0, 0, 0), screen, 50, 651)
draw_text(' - movement', font, (255, 255, 255), screen, 270, 522)
mouse = pygame.image.load('resources/sprites/controls_2.png')
mouse = pygame.transform.scale(mouse, (90, 100))
screen.blit(mouse, (153, 590))
draw_text(' - shoot', font, (255, 255, 255), screen, 270, 640)
def draw_leaderboard():
table = []
result = cur.execute(
'SELECT * FROM highest_score ORDER BY score DESC LIMIT 7')
for elem in result:
table.append(elem)
pygame.draw.rect(screen, (0, 0, 0), (310, 70, 250, 335))
pygame.draw.rect(screen, (255, 255, 255), (310, 70, 250, 335), 3)
pygame.draw.line(screen, (255, 255, 255), (310, 124), (560, 124), 3)
pygame.draw.line(screen, (255, 255, 255), (435, 124), (435, 405), 3)
charge = 40
y = 124
for i in range(1, 8):
y += charge
pygame.draw.line(screen, (255, 255, 255), (310, y), (560, y), 3)
draw_text('leaderboard', font_table, (255, 255, 255), screen, 362, 80)
x = 350
y = 140
for i in table:
draw_text(str(i[0]), font_table, (255, 255, 255), screen, x, y)
draw_text(str(i[1]), font_table, (255, 255, 255), screen, x + 100, y)
y += charge
def main_menu():
click = False
pygame.mixer.stop()
while True:
mx, my = pygame.mouse.get_pos()
screen.fill((0, 0, 0))
pygame.draw.rect(screen, (0, 0, 0), (52, 10, 508, 50))
pygame.draw.rect(screen, (255, 255, 255), (52, 10, 508, 50), 3)
draw_text('Death or Dishonour', font, (255, 255, 255), screen, 85, 20)
button_play = pygame.image.load('resources/sprites/button.png')
button_play = pygame.transform.scale(button_play, (222, 105))
b_play_mask = button_play.get_rect()
b_play_mask.x = 50
b_play_mask.y = 70
screen.blit(button_play, (b_play_mask.x, b_play_mask.y))
draw_text('play', font, (255, 255, 255), screen, 113, 100)
button_options = pygame.image.load('resources/sprites/button.png')
button_options = pygame.transform.scale(button_options, (222, 105))
b_options_mask = button_options.get_rect()
b_options_mask.x = 50
b_options_mask.y = 185
screen.blit(button_options, (b_options_mask.x, b_options_mask.y))
draw_text('options', font, (255, 255, 255), screen, 78, 215)
button_exit = pygame.image.load('resources/sprites/button.png')
button_exit = pygame.transform.scale(button_exit, (222, 105))
b_exit_mask = button_exit.get_rect()
b_exit_mask.x = 50
b_exit_mask.y = 300
screen.blit(button_exit, (b_exit_mask.x, b_exit_mask.y))
draw_text('quit', font, (255, 255, 255), screen, 113, 330)
draw_controls()
draw_leaderboard()
if b_play_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (50, 70, 222, 105), 5)
if click:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
game_screen()
if b_options_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (50, 185, 222, 105), 5)
if click:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
options_menu()
if b_exit_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (50, 300, 222, 105), 5)
if click:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
pygame.quit()
sys.exit()
click = False
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
pygame.quit()
sys.exit()
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1:
click = True
pygame.display.update()
clock.tick(10)
def options_menu():
global player_name, line_counter, is_sound
running = True
click = False
numlock = False
while running:
mx, my = pygame.mouse.get_pos()
screen.fill((0, 0, 0))
pygame.draw.rect(screen, (0, 0, 0), (52, 10, 508, 50))
pygame.draw.rect(screen, (255, 255, 255), (52, 10, 508, 50), 3)
draw_text('Options', font, (255, 255, 255), screen, 215, 20)
button_1 = pygame.image.load('resources/sprites/button.png')
button_1 = pygame.transform.scale(button_1, (222, 105))
b_1_mask = button_1.get_rect()
b_1_mask.x = 50
b_1_mask.y = 70
screen.blit(button_1, (b_1_mask.x, b_1_mask.y))
draw_text(player_name, font, (255, 255, 255), screen, 125, 100)
button_2 = pygame.image.load('resources/sprites/button.png')
button_2 = pygame.transform.scale(button_2, (222, 105))
b_2_mask = button_2.get_rect()
b_2_mask.x = 50
b_2_mask.y = 185
screen.blit(button_2, (b_2_mask.x, b_2_mask.y))
button_back = pygame.image.load('resources/sprites/button.png')
button_back = pygame.transform.scale(button_back, (222, 105))
b_back_mask = button_back.get_rect()
b_back_mask.x = 50
b_back_mask.y = 300
screen.blit(button_back, (b_back_mask.x, b_back_mask.y))
draw_text('back', font, (255, 255, 255), screen, 113, 330)
draw_controls()
draw_text('audio:', font, (255, 255, 255), screen, 60, 195)
if is_sound:
draw_text('on', font, (255, 255, 255), screen, 190, 245)
else:
draw_text('off', font, (255, 255, 255), screen, 175, 230)
if line_counter == 0 or player_name == 'NON':
draw_text('ENTER', font, (255, 0, 0), screen, 280, 90)
draw_text('NICKNAME', font, (255, 0, 0), screen, 280, 120)
if numlock:
draw_text('OFF', font, (255, 0, 0), screen, 500, 90)
draw_text('NUM', font, (255, 0, 0), screen, 500, 120)
draw_text('LOCK', font, (255, 0, 0), screen, 500, 150)
if b_2_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (50, 185, 222, 105), 5)
if click:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
if is_sound:
is_sound = not is_sound
pygame.mixer.pause()
else:
is_sound = not is_sound
pygame.mixer.unpause()
if b_back_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (50, 300, 222, 105), 5)
if click:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
running = False
click = False
for event in pygame.event.get():
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1:
click = True
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_BACKSPACE:
player_name = player_name[:-1]
if line_counter != 0:
line_counter -= 1
elif player_name == 'NON':
pass
elif event.key == pygame.K_SPACE:
pass
elif event.key == pygame.K_UP:
pass
elif event.key == pygame.K_DOWN:
pass
elif event.key == pygame.K_LEFT:
pass
elif event.key == pygame.K_RIGHT:
pass
elif event.key == pygame.K_RETURN:
pass
elif event.key == pygame.K_NUMLOCK:
numlock = True
elif event.key == pygame.K_ESCAPE:
running = False
elif event.mod == pygame.KMOD_NONE and event.key != pygame.K_TAB:
if line_counter != 3:
line_counter += 1
player_name += str(event.unicode).upper()
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
pygame.display.update()
clock.tick(10)
def game_screen():
global game_score, player_name, running_game, enemy_killed, bullets_shot, boss_done
game_score = 0
enemy_killed = 0
bullets_shot = 0
boss_done = False
if player_name == '':
player_name = 'NON'
track_count = 0
battle_tracks = ['resources/sounds/music/battle_music_1.mp3',
'resources/sounds/music/battle_music_2.mp3',
'resources/sounds/music/battle_music_3.mp3',
'resources/sounds/music/battle_music_4.mp3',
'resources/sounds/music/battle_music_5.mp3',
'resources/sounds/music/battle_music_6.mp3']
ingame_music = pygame.mixer.Sound(battle_tracks[track_count])
ingame_music.stop()
ingame_music_sound = 0.1
if not is_sound:
ingame_music_sound = 0
ingame_music.set_volume(ingame_music_sound)
ingame_music.play()
bs = False
running_game = True
pygame.time.set_timer(pygame.USEREVENT, 1000)
enemies = pygame.sprite.Group()
death = False
p = Player()
window_holes = pygame.sprite.Group()
bullets_count = pygame.sprite.Group()
boss_bullets_count = pygame.sprite.Group()
booms = pygame.sprite.Group()
small_booms = pygame.sprite.Group()
mini_booms = pygame.sprite.Group()
phase1_score = True
phase2_score = True
phase3_score = True
battle_music = True
phase4_score = True
col_check = 1
boss_death = False
level_bckgd_pos = -23800
current_player_sprite = 'stay'
current_level_background = pygame.image.load(
'resources/level_pictures/first_level_bckgd.jpg')
screen.blit(current_level_background, (0, 0))
wait = 0
last = pygame.time.get_ticks()
last_2 = pygame.time.get_ticks()
boss_cooldown = 1000
cooldown = 100
while running_game:
for event in pygame.event.get():
if event.type == pygame.KEYDOWN and event.key == pygame.K_EQUALS:
ingame_music.stop()
track_count += 1
if track_count > 5:
track_count = 0
ingame_music = pygame.mixer.Sound(battle_tracks[track_count])
ingame_music.set_volume(ingame_music_sound)
ingame_music.play()
if event.type == pygame.KEYDOWN and event.key == pygame.K_KP_PLUS:
ingame_music_sound += 0.05
if ingame_music_sound >= 1.5:
ingame_music_sound = 1.4
ingame_music.set_volume(ingame_music_sound)
if event.type == pygame.KEYDOWN and event.key == pygame.K_KP_MINUS:
ingame_music_sound -= 0.05
if ingame_music_sound < 0:
ingame_music_sound = 0
ingame_music.set_volume(ingame_music_sound)
if event.type == pygame.KEYDOWN and (event.key == pygame.K_a or
event.key == pygame.K_LEFT) and not p.moving_right:
current_player_sprite = 'left'
p.moving_right = False
p.moving_left = True
elif event.type == pygame.KEYDOWN and (event.key == pygame.K_d or
event.key == pygame.K_RIGHT) and not p.moving_left:
current_player_sprite = 'right'
p.moving_left = False
p.moving_right = True
if event.type == pygame.KEYUP and (event.key == pygame.K_a or
event.key == pygame.K_LEFT):
current_player_sprite = 'stay'
p.moving_right = False
p.moving_left = False
if event.type == pygame.KEYUP and (event.key == pygame.K_d or
event.key == pygame.K_RIGHT):
current_player_sprite = 'stay'
p.moving_right = False
p.moving_left = False
if event.type == pygame.KEYDOWN and (event.key == pygame.K_w or
event.key == pygame.K_UP) and not p.moving_down:
p.moving_down = False
p.moving_up = True
elif event.type == pygame.KEYDOWN and (event.key == pygame.K_s or
event.key == pygame.K_DOWN) and not p.moving_up:
p.moving_up = False
p.moving_down = True
if event.type == pygame.KEYUP and (event.key == pygame.K_w or
event.key == pygame.K_UP):
current_player_sprite = 'stay'
p.moving_down = False
p.moving_up = False
if event.type == pygame.KEYUP and (event.key == pygame.K_s or
event.key == pygame.K_DOWN):
current_player_sprite = 'stay'
p.moving_down = False
p.moving_up = False
if (event.type == pygame.MOUSEBUTTONDOWN and event.button == 1 and
p.health_count > 0):
now = pygame.time.get_ticks()
if now - last >= cooldown:
last = now
Bullets(bullets_count).shot((p.x + 21, p.y - 25))
Bullets(bullets_count).shot((p.x + 76, p.y - 25))
if is_sound:
play_sound('resources/sounds/shot_sound.mp3', 0.1)
Bullets.shooting = True
bullets_shot += 2
elif event.type == pygame.KEYDOWN and event.key == pygame.K_SPACE and p.health_count > 0:
now = pygame.time.get_ticks()
if now - last >= cooldown:
last = now
Bullets(bullets_count).shot((p.x + 21, p.y - 25))
Bullets(bullets_count).shot((p.x + 76, p.y - 25))
if is_sound:
play_sound('resources/sounds/shot_sound.mp3', 0.1)
Bullets.shooting = True
bullets_shot += 2
if (event.type == pygame.USEREVENT and level_bckgd_pos >= -8500 and
not bs):
bs = True
b = Boss()
if event.type == pygame.USEREVENT and level_bckgd_pos < -8500:
Enemy(enemies)
if (event.type == pygame.USEREVENT and death and pygame.time.
get_ticks() - wait > 2000 or level_bckgd_pos > -801):
ingame_music.stop()
death_screen()
while True:
if len(str(game_score)) < 6:
game_score = '0' + str(game_score)
else:
break
var = "INSERT INTO highest_score VALUES ('{}', '{}')".format(
player_name, game_score)
cur.execute(var)
con.commit()
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:
pause_screen()
if not running_game:
ingame_music.stop()
while True:
if len(str(game_score)) < 6:
game_score = '0' + str(game_score)
else:
break
var = ("INSERT INTO highest_score VALUES ('{}', '{}')".
format(player_name, game_score))
cur.execute(var)
con.commit()
level_bckgd_pos += speed_bckgd
if level_bckgd_pos >= 0:
screen.fill((0, 0, 0))
screen.blit(current_level_background, (0, level_bckgd_pos))
if level_bckgd_pos > -805:
death = True
if p.health_count > 0:
for i in enemies:
collision = pygame.sprite.collide_rect(p, i)
if collision:
Explosion(booms).boom((i.rect.x, i.rect.y))
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.1)
if i.health_count - 2 <= 0:
game_score += 10
i.kill()
Explosion(booms).boom((i.rect.x, i.rect.y))
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3',
0.1)
enemy_killed += 1
else:
i.health_count -= 2
if is_sound:
play_sound('resources/sounds/collision_sound.mp3',
0.03)
p.health_count -= 1
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.05
)
if p.health_count > 0:
Damage(window_holes).taking_damage((random.randint(
50, 550), random.randint(50, 750)))
if is_sound:
play_sound('resources/sounds/window_crashed.mp3', 1
)
play_sound('resources/sounds/explosion_stun.mp3',
0.02)
for j in bullets_count:
collision = pygame.sprite.collide_rect(j, i)
if collision:
if i.health_count - 1 <= 0:
game_score += 5
i.kill()
Explosion(booms).boom((i.rect.x, i.rect.y))
if is_sound:
play_sound(
'resources/sounds/explosion_sound.mp3', 0.1
)
enemy_killed += 1
else:
i.health_count -= 1
Miniexplosion(mini_booms).boom((j.rect.x, j.rect.y)
)
if is_sound:
play_sound(
'resources/sounds/explosion_sound.mp3', 0.1
)
if is_sound:
play_sound(
'resources/sounds/collision_sound.mp3',
0.03)
j.kill()
if bs and not boss_death:
collision = pygame.sprite.collide_rect(b, p)
if collision and b.y > 0:
b.health_count -= 0.3
if is_sound:
play_sound('resources/sounds/collision_sound.mp3', 0.03
)
p.health_count -= 0.2
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.05
)
if b.body == b.stay1 or b.body == b.stay2:
b.body = b.stay2
if b.body == b.stay3 or b.body == b.stay4:
b.body = b.stay4
if b.body == b.stay5 or b.body == b.stay6:
b.body = b.stay6
col_check += 1
if p.health_count > 0:
Damage(window_holes).taking_damage((random.randint(
50, 550), random.randint(50, 750)))
if is_sound:
play_sound('resources/sounds/window_crashed.mp3',
0.1)
play_sound('resources/sounds/explosion_stun.mp3',
0.02)
for j in bullets_count:
collision = pygame.sprite.collide_rect(b, j)
if collision and b.y > 0:
if b.body == b.stay1 or b.body == b.stay2:
b.body = b.stay2
if b.body == b.stay3 or b.body == b.stay4:
b.body = b.stay4
if b.body == b.stay5 or b.body == b.stay6:
b.body = b.stay6
col_check += 1
b.health_count -= 0.2
Miniexplosion(mini_booms).boom((j.rect.x, j.rect.y))
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3',
0.1)
if is_sound:
play_sound('resources/sounds/collision_sound.mp3',
0.03)
j.kill()
for h in boss_bullets_count:
collision = pygame.sprite.collide_rect(p, h)
if collision:
p.health_count -= 1
Miniexplosion(mini_booms).boom((h.rect.x, h.rect.y))
if p.health_count > 0:
Damage(window_holes).taking_damage((random.
randint(50, 550), random.randint(50, 750)))
if is_sound:
play_sound(
'resources/sounds/window_crashed.mp3', 0.1)
play_sound(
'resources/sounds/explosion_stun.mp3', 0.01
)
if is_sound:
play_sound('resources/sounds/collision_sound.mp3',
0.03)
h.kill()
p.update(FPS)
if current_player_sprite == 'left':
sprite = p.anim_left()
screen.blit(sprite, (p.x, p.y))
p.left_1 = not p.left_1
elif current_player_sprite == 'right':
sprite = p.anim_right()
screen.blit(sprite, (p.x, p.y))
p.right_1 = not p.right_1
elif current_player_sprite == 'stay':
sprite = p.anim_stay()
screen.blit(sprite, (p.x, p.y))
p.stay_1 = not p.stay_1
if bs:
if battle_music:
ingame_music.stop()
ingame_music = pygame.mixer.Sound(
'resources/sounds/music/wagner_main_theme.mp3')
ingame_music.set_volume(ingame_music_sound)
ingame_music.play()
battle_music = False
b.update()
if b.body == b.stay3 and phase1_score:
game_score += 100
phase1_score = False
if b.body == b.stay5 and phase2_score:
game_score += 100
phase2_score = False
if b.body == b.stay7 and phase3_score:
game_score += 200
phase3_score = False
now = pygame.time.get_ticks()
if (now - last_2 >= boss_cooldown and b.y > 0 and b.body !=
b.stay7):
last_2 = now
play_sound('resources/sounds/boss_shot.mp3', 0.05)
Bossbullets(boss_bullets_count).shot((b.x + 170, b.y + 155)
)
if col_check % 40 == 0:
b.change_sprite()
else:
col_check += 1
if b.health_count > 0:
screen.blit(b.body, (b.x, b.y))
elif b.health_count <= 0 and phase4_score:
boss_done = True
phase4_score = False
game_score += 350
if is_sound:
play_sound('resources/sounds/boss_defeated.mp3', 0.2)
Explosion(booms).boom((b.rect.x + 75, b.rect.y + 25))
Explosion(booms).boom((b.rect.x, b.rect.y))
Explosion(booms).boom((b.rect.x + 200, b.rect.y + 34))
Explosion(booms).boom((b.rect.x + 250, b.rect.y + 25))
Explosion(booms).boom((b.rect.x + 150, b.rect.y + 56))
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.1)
boss_death = True
else:
if p.minimize == 0:
if is_sound:
ingame_music.stop()
play_sound('resources/sounds/plane_crash.mp3', 0.05)
p.minimize += 1
if not death:
if p.minimize <= 320:
p.death()
screen.blit(p.death_sp, (p.x, p.y))
else:
death = True
wait = pygame.time.get_ticks()
Smallexplosions(small_booms).boom((p.rect.x + 3, p.rect
.y + 25))
Smallexplosions(small_booms).boom((p.rect.x, p.rect.y))
Smallexplosions(small_booms).boom((p.rect.x - 22, p.
rect.y + 7))
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.1)
p.kill()
if bs and b.health_count > 0:
b.speed += 0.02
b.win = True
screen.blit(b.body, (b.x, b.y))
b.update()
window_holes.update()
window_holes.draw(screen)
enemies.update(FPS)
enemies.draw(screen)
bullets_count.update()
bullets_count.draw(screen)
boss_bullets_count.update()
boss_bullets_count.draw(screen)
small_booms.update()
small_booms.draw(screen)
mini_booms.update()
mini_booms.draw(screen)
draw_text('Player: {}'.format(player_name), font, (255, 255, 255),
screen, 20, 20)
if len(str(game_score)) < 2:
draw_text('00000' + str(game_score), font, (255, 255, 255),
screen, 430, 20)
elif len(str(game_score)) < 3:
draw_text('0000' + str(game_score), font, (255, 255, 255),
screen, 430, 20)
elif len(str(game_score)) < 4:
draw_text('000' + str(game_score), font, (255, 255, 255),
screen, 430, 20)
elif len(str(game_score)) < 5:
draw_text('00' + str(game_score), font, (255, 255, 255), screen,
430, 20)
elif len(str(game_score)) < 6:
draw_text('0' + str(game_score), font, (255, 255, 255), screen,
430, 20)
elif len(str(game_score)) >= 6:
draw_text('Max score', font, (255, 255, 255), screen, 510, 20)
booms.update()
booms.draw(screen)
pygame.display.flip()
clock.tick(FPS)
def death_screen():
global running_game, game_score
running = True
click = False
draw_counter = 0
color_counter = 0
pygame.time.set_timer(pygame.USEREVENT, 1000)
rating_kills = enemy_killed // 10
if bullets_shot < 800:
rating_shots = 1
else:
rating_shots = 0
rating = rating_kills + rating_shots
if boss_done:
death_music = pygame.mixer.Sound(
'resources/sounds/music/victory_theme.mp3')
death_music.stop()
death_music_sound = 0.1
if not is_sound:
death_music_sound = 0
death_music.set_volume(death_music_sound)
death_music.play()
rating += 2
else:
death_music = pygame.mixer.Sound(
'resources/sounds/music/loose_theme.mp3')
death_music.stop()
death_music_sound = 0.1
if not is_sound:
death_music_sound = 0
death_music.set_volume(death_music_sound)
death_music.play()
while True:
if len(str(game_score)) < 6:
game_score = '0' + str(game_score)
else:
break
while running:
mx, my = pygame.mouse.get_pos()
screen.fill((0, 0, 0))
pygame.draw.rect(screen, (0, 0, 0), (50, 10, 500, 50))
pygame.draw.rect(screen, (255, 255, 255), (50, 10, 500, 50), 3)
draw_text('End of your way', font, (255, 255, 255), screen, 120, 15)
button_menu = pygame.image.load('resources/sprites/button.png')
button_menu = pygame.transform.scale(button_menu, (200, 70))
b_menu_mask = button_menu.get_rect()
b_menu_mask.x = 195
b_menu_mask.y = 700
screen.blit(button_menu, (b_menu_mask.x, b_menu_mask.y))
draw_text('menu', font, (255, 255, 255), screen, 245, 730)
if draw_counter >= 1:
draw_text('Player: {}'.format(player_name), font, (255, 255,
255), screen, 50, 150)
if draw_counter >= 2:
draw_text('Score: {}'.format(game_score), font, (255, 255, 255),
screen, 50, 230)
if draw_counter >= 3:
draw_text('Enemies killed: {}'.format(enemy_killed), font, (255,
255, 255), screen, 50, 310)
if draw_counter >= 4:
draw_text('Bullets fired: {}'.format(bullets_shot), font, (255,
255, 255), screen, 50, 390)
if draw_counter >= 5:
draw_text('Rating:', font, (255, 255, 255), screen, 50, 470)
if draw_counter >= 6:
if rating <= 6:
draw_text('F', font_rating, (100, 100, 100), screen, 300, 470)
elif rating == 7:
draw_text('D', font_rating, (29, 173, 23), screen, 300, 470)
elif rating == 8:
draw_text('C', font_rating, (20, 20, 255), screen, 300, 470)
elif rating == 9:
draw_text('B', font_rating, (200, 0, 255), screen, 300, 470)
elif rating == 10:
draw_text('A', font_rating, (255, 200, 0), screen, 300, 470)
elif rating == 11:
draw_text('S', font_rating, (255, 100, 0), screen, 300, 470)
elif rating <= 13:
draw_text('SS', font_rating, (255, 0, 0), screen, 300, 470)
elif color_counter == 0:
draw_text('SSS', font_rating, (255, 0, 0), screen, 300, 470)
elif color_counter == 1:
draw_text('SSS', font_rating, (0, 255, 0), screen, 300, 470)
else:
draw_text('SSS', font_rating, (0, 0, 255), screen, 300, 470)
if b_menu_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (195, 700, 200, 70), 4)
if click:
if is_sound:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
else:
pass
running = False
running_game = False
click = False
for event in pygame.event.get():
if event.type == pygame.USEREVENT:
draw_counter += 1
color_counter += 1
if color_counter == 3:
color_counter = 0
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:
running = False
running_game = False
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1:
click = True
pygame.display.update()
clock.tick(10)
death_music.stop()
def pause_screen():
global running_game
running = True
click = False
while running:
screen.fill((0, 0, 0))
mx, my = pygame.mouse.get_pos()
pygame.draw.rect(screen, (0, 0, 0), (175, 160, 240, 340))
pygame.draw.rect(screen, (255, 255, 255), (175, 160, 240, 340), 3)
pygame.draw.rect(screen, (0, 0, 0), (185, 180, 220, 80))
pygame.draw.rect(screen, (255, 255, 255), (185, 180, 220, 80), 3)
draw_text('Pause', font, (255, 255, 255), screen, 235, 205)
button_menu = pygame.image.load('resources/sprites/button.png')
button_menu = pygame.transform.scale(button_menu, (200, 70))
b_menu_mask = button_menu.get_rect()
b_menu_mask.x = 195
b_menu_mask.y = 410
screen.blit(button_menu, (b_menu_mask.x, b_menu_mask.y))
draw_text('menu', font, (255, 255, 255), screen, 245, 440)
button_resume = pygame.image.load('resources/sprites/button.png')
button_resume = pygame.transform.scale(button_resume, (200, 70))
b_resume_mask = button_resume.get_rect()
b_resume_mask.x = 195
b_resume_mask.y = 300
screen.blit(button_resume, (b_resume_mask.x, b_resume_mask.y))
draw_text('resume', font, (255, 255, 255), screen, 225, 330)
if b_menu_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (195, 410, 200, 70), 4)
if click:
running = False
running_game = False
if b_resume_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (195, 300, 200, 70), 4)
if click:
running = False
click = False
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:
running = False
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1:
click = True
pygame.display.update()
clock.tick(10)
if __name__ == '__main__':
main_menu()
pygame.quit()
<|reserved_special_token_1|>
import pygame
import pygame.freetype
import sys
import sqlite3
from data.player_class import Player
from data.explosion_class import Explosion
from data.objects_class import Bullets, Damage
from data.enemy_class import Enemy
from data.enemy_class import Boss
from data.death_animation import Smallexplosions
from data.explosion_class import Miniexplosion
from data.objects_class import Bossbullets
import random
def draw_text(text, font_u, color, surface, x, y):
text_object = font_u.render(text, color)
textrect = text_object[1]
textrect.topleft = (x, y)
surface.blit(text_object[0], textrect)
def play_sound(sound_p, volume_h=0.5, wait_t=0):
pl_sound = pygame.mixer.Sound(sound_p)
pl_sound.set_volume(volume_h)
if is_sound:
pl_sound.play()
pygame.time.wait(wait_t)
pygame.init()
speed_bckgd = 2
running_game = True
is_sound = True
menu = True
boss_done = False
game_score = 0
bullets_shot = 0
line_counter = 0
enemy_killed = 0
speed = 2
FPS = 100
width = 600
height = 800
player_name = ''
con = sqlite3.connect('resources/db/leaderboard.db')
font = pygame.freetype.Font('resources/sprites/font_main.ttf', 45)
font_table = pygame.freetype.Font('resources/sprites/font_main.ttf', 25)
font_space = pygame.freetype.Font('resources/sprites/space.ttf', 20)
font_rating = pygame.freetype.Font('resources/sprites/font_main.ttf', 150)
pygame.display.set_icon(pygame.image.load('resources/images/test_small_logo_1.bmp'))
pygame.display.set_caption('Death or Dishonour')
screen = pygame.display.set_mode((600, 800))
clock = pygame.time.Clock()
cur = con.cursor()
def draw_controls():
pygame.draw.rect(screen, (255, 255, 255), (0, 420, 600, 380), 4)
pygame.draw.rect(screen, (0, 0, 0, 1), (3, 422, 595, 376))
draw_text('controls:', font, (255, 255, 255), screen, 20, 430)
wasd = pygame.image.load('resources/sprites/controls_1.png')
wasd = pygame.transform.scale(wasd, (243, 100))
screen.blit(wasd, (20, 470))
pygame.draw.rect(screen, (255, 255, 255), (20, 646, 130, 25))
draw_text('SPACE', font_space, (0, 0, 0), screen, 50, 651)
draw_text(' - movement', font, (255, 255, 255), screen, 270, 522)
mouse = pygame.image.load('resources/sprites/controls_2.png')
mouse = pygame.transform.scale(mouse, (90, 100))
screen.blit(mouse, (153, 590))
draw_text(' - shoot', font, (255, 255, 255), screen, 270, 640)
def draw_leaderboard():
table = []
result = cur.execute("""SELECT * FROM highest_score ORDER BY score DESC LIMIT 7""")
for elem in result:
table.append(elem)
pygame.draw.rect(screen, (0, 0, 0), (310, 70, 250, 335))
pygame.draw.rect(screen, (255, 255, 255), (310, 70, 250, 335), 3)
pygame.draw.line(screen, (255, 255, 255), (310, 124), (560, 124), 3)
pygame.draw.line(screen, (255, 255, 255), (435, 124), (435, 405), 3)
charge = 40
y = 124
for i in range(1, 8):
y += charge
pygame.draw.line(screen, (255, 255, 255), (310, y), (560, y), 3)
draw_text('leaderboard', font_table, (255, 255, 255), screen, 362, 80)
x = 350
y = 140
for i in table:
draw_text(str(i[0]), font_table, (255, 255, 255), screen, x, y)
draw_text(str(i[1]), font_table, (255, 255, 255), screen, x + 100, y)
y += charge
def main_menu():
click = False
pygame.mixer.stop()
while True:
mx, my = pygame.mouse.get_pos()
screen.fill((0, 0, 0))
# ------------------------------------------ name zone draw
pygame.draw.rect(screen, (0, 0, 0), (52, 10, 508, 50))
pygame.draw.rect(screen, (255, 255, 255), (52, 10, 508, 50), 3)
draw_text('Death or Dishonour', font, (255, 255, 255), screen, 85, 20)
# ------------------------------------------ play button
button_play = pygame.image.load('resources/sprites/button.png')
button_play = pygame.transform.scale(button_play, (222, 105))
b_play_mask = button_play.get_rect()
b_play_mask.x = 50
b_play_mask.y = 70
screen.blit(button_play, (b_play_mask.x, b_play_mask.y))
draw_text('play', font, (255, 255, 255), screen, 113, 100)
# ------------------------------------------ options button
button_options = pygame.image.load('resources/sprites/button.png')
button_options = pygame.transform.scale(button_options, (222, 105))
b_options_mask = button_options.get_rect()
b_options_mask.x = 50
b_options_mask.y = 185
screen.blit(button_options, (b_options_mask.x, b_options_mask.y))
draw_text('options', font, (255, 255, 255), screen, 78, 215)
# ------------------------------------------ quit button
button_exit = pygame.image.load('resources/sprites/button.png')
button_exit = pygame.transform.scale(button_exit, (222, 105))
b_exit_mask = button_exit.get_rect()
b_exit_mask.x = 50
b_exit_mask.y = 300
screen.blit(button_exit, (b_exit_mask.x, b_exit_mask.y))
draw_text('quit', font, (255, 255, 255), screen, 113, 330)
# ------------------------------------------ draw
draw_controls()
draw_leaderboard()
# ------------------------------------------ collide
if b_play_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (50, 70, 222, 105), 5)
if click:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
game_screen()
if b_options_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (50, 185, 222, 105), 5)
if click:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
options_menu()
if b_exit_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (50, 300, 222, 105), 5)
if click:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
pygame.quit()
sys.exit()
# ------------------------------------------ events
click = False
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
pygame.quit()
sys.exit()
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1:
click = True
# ------------------------------------------ update
pygame.display.update()
clock.tick(10)
def options_menu():
global player_name, line_counter, is_sound
running = True
click = False
numlock = False
while running:
mx, my = pygame.mouse.get_pos()
screen.fill((0, 0, 0))
# ------------------------------------------ name zone draw
pygame.draw.rect(screen, (0, 0, 0), (52, 10, 508, 50))
pygame.draw.rect(screen, (255, 255, 255), (52, 10, 508, 50), 3)
draw_text('Options', font, (255, 255, 255), screen, 215, 20)
# ------------------------------------------ button nick
button_1 = pygame.image.load('resources/sprites/button.png')
button_1 = pygame.transform.scale(button_1, (222, 105))
b_1_mask = button_1.get_rect()
b_1_mask.x = 50
b_1_mask.y = 70
screen.blit(button_1, (b_1_mask.x, b_1_mask.y))
draw_text(player_name, font, (255, 255, 255), screen, 125, 100)
# ------------------------------------------ button sound
button_2 = pygame.image.load('resources/sprites/button.png')
button_2 = pygame.transform.scale(button_2, (222, 105))
b_2_mask = button_2.get_rect()
b_2_mask.x = 50
b_2_mask.y = 185
screen.blit(button_2, (b_2_mask.x, b_2_mask.y))
# ------------------------------------------ button back
button_back = pygame.image.load('resources/sprites/button.png')
button_back = pygame.transform.scale(button_back, (222, 105))
b_back_mask = button_back.get_rect()
b_back_mask.x = 50
b_back_mask.y = 300
screen.blit(button_back, (b_back_mask.x, b_back_mask.y))
draw_text('back', font, (255, 255, 255), screen, 113, 330)
# ------------------------------------------ draw
draw_controls()
draw_text('audio:', font, (255, 255, 255), screen, 60, 195)
if is_sound:
draw_text('on', font, (255, 255, 255), screen, 190, 245)
else:
draw_text('off', font, (255, 255, 255), screen, 175, 230)
if line_counter == 0 or player_name == 'NON':
draw_text('ENTER', font, (255, 0, 0), screen, 280, 90)
draw_text('NICKNAME', font, (255, 0, 0), screen, 280, 120)
if numlock:
draw_text('OFF', font, (255, 0, 0), screen, 500, 90)
draw_text('NUM', font, (255, 0, 0), screen, 500, 120)
draw_text('LOCK', font, (255, 0, 0), screen, 500, 150)
# ------------------------------------------ collide
if b_2_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (50, 185, 222, 105), 5)
if click:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
if is_sound:
is_sound = not is_sound
pygame.mixer.pause()
else:
is_sound = not is_sound
pygame.mixer.unpause()
if b_back_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (50, 300, 222, 105), 5)
if click:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
running = False
# ------------------------------------------ events
click = False
for event in pygame.event.get():
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1:
click = True
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_BACKSPACE:
player_name = player_name[:-1]
if line_counter != 0:
line_counter -= 1
elif player_name == 'NON':
pass
elif event.key == pygame.K_SPACE:
pass
elif event.key == pygame.K_UP:
pass
elif event.key == pygame.K_DOWN:
pass
elif event.key == pygame.K_LEFT:
pass
elif event.key == pygame.K_RIGHT:
pass
elif event.key == pygame.K_RETURN:
pass
elif event.key == pygame.K_NUMLOCK:
numlock = True
elif event.key == pygame.K_ESCAPE:
running = False
elif event.mod == pygame.KMOD_NONE and event.key != pygame.K_TAB:
if line_counter != 3:
line_counter += 1
player_name += str(event.unicode).upper()
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
# ------------------------------------------ update
pygame.display.update()
clock.tick(10)
def game_screen():
global game_score, player_name, running_game, enemy_killed, bullets_shot, boss_done
game_score = 0
enemy_killed = 0
bullets_shot = 0
boss_done = False
if player_name == '':
player_name = 'NON'
track_count = 0
battle_tracks = ['resources/sounds/music/battle_music_1.mp3', 'resources/sounds/music/battle_music_2.mp3',
'resources/sounds/music/battle_music_3.mp3', 'resources/sounds/music/battle_music_4.mp3',
'resources/sounds/music/battle_music_5.mp3', 'resources/sounds/music/battle_music_6.mp3']
ingame_music = pygame.mixer.Sound(battle_tracks[track_count])
ingame_music.stop()
ingame_music_sound = 0.1
if not is_sound:
ingame_music_sound = 0
ingame_music.set_volume(ingame_music_sound)
ingame_music.play()
bs = False
running_game = True
pygame.time.set_timer(pygame.USEREVENT, 1000)
enemies = pygame.sprite.Group()
death = False
p = Player()
window_holes = pygame.sprite.Group()
bullets_count = pygame.sprite.Group()
boss_bullets_count = pygame.sprite.Group()
booms = pygame.sprite.Group()
small_booms = pygame.sprite.Group()
mini_booms = pygame.sprite.Group()
phase1_score = True
phase2_score = True
phase3_score = True
battle_music = True
phase4_score = True
col_check = 1
boss_death = False
level_bckgd_pos = -23800
current_player_sprite = 'stay'
current_level_background = pygame.image.load('resources/level_pictures/first_level_bckgd.jpg')
screen.blit(current_level_background, (0, 0))
wait = 0
last = pygame.time.get_ticks()
last_2 = pygame.time.get_ticks()
boss_cooldown = 1000
cooldown = 100
while running_game:
# ---------------------------------------- управление
for event in pygame.event.get(): # в этом цикле мы принимаем сообщения, отправленные пользователем
if event.type == pygame.KEYDOWN and event.key == pygame.K_EQUALS:
ingame_music.stop()
track_count += 1
if track_count > 5:
track_count = 0
ingame_music = pygame.mixer.Sound(battle_tracks[track_count])
ingame_music.set_volume(ingame_music_sound)
ingame_music.play()
if event.type == pygame.KEYDOWN and event.key == pygame.K_KP_PLUS:
ingame_music_sound += 0.05
if ingame_music_sound >= 1.5:
ingame_music_sound = 1.4
ingame_music.set_volume(ingame_music_sound)
if event.type == pygame.KEYDOWN and event.key == pygame.K_KP_MINUS:
ingame_music_sound -= 0.05
if ingame_music_sound < 0:
ingame_music_sound = 0
ingame_music.set_volume(ingame_music_sound)
if event.type == pygame.KEYDOWN and (
event.key == pygame.K_a or event.key == pygame.K_LEFT) and not p.moving_right:
current_player_sprite = 'left'
p.moving_right = False
p.moving_left = True
elif event.type == pygame.KEYDOWN and (
event.key == pygame.K_d or event.key == pygame.K_RIGHT) and not p.moving_left:
current_player_sprite = 'right'
p.moving_left = False
p.moving_right = True
if event.type == pygame.KEYUP and (event.key == pygame.K_a or event.key == pygame.K_LEFT):
current_player_sprite = 'stay'
p.moving_right = False
p.moving_left = False
if event.type == pygame.KEYUP and (event.key == pygame.K_d or event.key == pygame.K_RIGHT):
current_player_sprite = 'stay'
p.moving_right = False
p.moving_left = False
if event.type == pygame.KEYDOWN and (
event.key == pygame.K_w or event.key == pygame.K_UP) and not p.moving_down:
p.moving_down = False
p.moving_up = True
elif event.type == pygame.KEYDOWN and (
event.key == pygame.K_s or event.key == pygame.K_DOWN) and not p.moving_up:
p.moving_up = False
p.moving_down = True
if event.type == pygame.KEYUP and (event.key == pygame.K_w or event.key == pygame.K_UP):
current_player_sprite = 'stay'
p.moving_down = False
p.moving_up = False
if event.type == pygame.KEYUP and (event.key == pygame.K_s or event.key == pygame.K_DOWN):
current_player_sprite = 'stay'
p.moving_down = False
p.moving_up = False
# просчет выстрела
if event.type == pygame.MOUSEBUTTONDOWN and event.button == 1 and p.health_count > 0:
now = pygame.time.get_ticks()
if now - last >= cooldown:
last = now
Bullets(bullets_count).shot((p.x + 21, p.y - 25))
Bullets(bullets_count).shot((p.x + 76, p.y - 25))
if is_sound:
play_sound('resources/sounds/shot_sound.mp3', 0.1)
Bullets.shooting = True
bullets_shot += 2
# просчет выстрела, но для пробела
elif event.type == pygame.KEYDOWN and event.key == pygame.K_SPACE and p.health_count > 0:
now = pygame.time.get_ticks()
if now - last >= cooldown:
last = now
Bullets(bullets_count).shot((p.x + 21, p.y - 25))
Bullets(bullets_count).shot((p.x + 76, p.y - 25))
if is_sound:
play_sound('resources/sounds/shot_sound.mp3', 0.1)
Bullets.shooting = True
bullets_shot += 2
# спавн врагов
if event.type == pygame.USEREVENT and level_bckgd_pos >= -8500 and not bs:
bs = True
b = Boss()
if event.type == pygame.USEREVENT and level_bckgd_pos < -8500:
Enemy(enemies)
if event.type == pygame.USEREVENT and death and pygame.time.get_ticks()\
- wait > 2000 or level_bckgd_pos > -801:
ingame_music.stop()
death_screen()
while True:
if len(str(game_score)) < 6:
game_score = '0' + str(game_score)
else:
break
var = "INSERT INTO highest_score VALUES ('{}', '{}')".format(player_name, game_score)
cur.execute(var)
con.commit()
# если пользователь закроет программу, игра завершится
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
# выход в меню
if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:
pause_screen()
if not running_game:
ingame_music.stop()
while True:
if len(str(game_score)) < 6:
game_score = '0' + str(game_score)
else:
break
var = "INSERT INTO highest_score VALUES ('{}', '{}')".format(player_name, game_score)
cur.execute(var)
con.commit()
# передвижение заднего фона
level_bckgd_pos += speed_bckgd
if level_bckgd_pos >= 0:
screen.fill((0, 0, 0))
screen.blit(current_level_background, (0, level_bckgd_pos))
if level_bckgd_pos > -805:
death = True
# передвижение игрока
if p.health_count > 0:
# проверка коллизии врага, игрока и пули
for i in enemies:
collision = pygame.sprite.collide_rect(p, i)
if collision:
Explosion(booms).boom((i.rect.x, i.rect.y))
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.1)
if i.health_count - 2 <= 0:
game_score += 10
i.kill()
Explosion(booms).boom((i.rect.x, i.rect.y))
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.1)
enemy_killed += 1
else:
i.health_count -= 2
if is_sound:
play_sound('resources/sounds/collision_sound.mp3', 0.03)
p.health_count -= 1
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.05)
if p.health_count > 0:
Damage(window_holes).taking_damage((random.randint(50, 550), random.randint(50, 750)))
if is_sound:
play_sound('resources/sounds/window_crashed.mp3', 1)
play_sound('resources/sounds/explosion_stun.mp3', 0.02)
for j in bullets_count:
collision = pygame.sprite.collide_rect(j, i)
if collision:
if i.health_count - 1 <= 0:
game_score += 5
i.kill()
Explosion(booms).boom((i.rect.x, i.rect.y))
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.1)
enemy_killed += 1
else:
i.health_count -= 1
Miniexplosion(mini_booms).boom((j.rect.x, j.rect.y))
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.1)
if is_sound:
play_sound('resources/sounds/collision_sound.mp3', 0.03)
j.kill()
if bs and not boss_death:
collision = pygame.sprite.collide_rect(b, p)
if collision and b.y > 0:
b.health_count -= 0.3
if is_sound:
play_sound('resources/sounds/collision_sound.mp3', 0.03)
p.health_count -= 0.2
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.05)
if b.body == b.stay1 or b.body == b.stay2:
b.body = b.stay2
if b.body == b.stay3 or b.body == b.stay4:
b.body = b.stay4
if b.body == b.stay5 or b.body == b.stay6:
b.body = b.stay6
col_check += 1
if p.health_count > 0:
Damage(window_holes).taking_damage((random.randint(50, 550), random.randint(50, 750)))
if is_sound:
play_sound('resources/sounds/window_crashed.mp3', 0.1)
play_sound('resources/sounds/explosion_stun.mp3', 0.02)
for j in bullets_count:
collision = pygame.sprite.collide_rect(b, j)
if collision and b.y > 0:
if b.body == b.stay1 or b.body == b.stay2:
b.body = b.stay2
if b.body == b.stay3 or b.body == b.stay4:
b.body = b.stay4
if b.body == b.stay5 or b.body == b.stay6:
b.body = b.stay6
col_check += 1
b.health_count -= 0.2
Miniexplosion(mini_booms).boom((j.rect.x, j.rect.y))
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.1)
if is_sound:
play_sound('resources/sounds/collision_sound.mp3', 0.03)
j.kill()
for h in boss_bullets_count:
collision = pygame.sprite.collide_rect(p, h)
if collision:
p.health_count -= 1
Miniexplosion(mini_booms).boom((h.rect.x, h.rect.y))
if p.health_count > 0:
Damage(window_holes).taking_damage((random.randint(50, 550), random.randint(50, 750)))
if is_sound:
play_sound('resources/sounds/window_crashed.mp3', 0.1)
play_sound('resources/sounds/explosion_stun.mp3', 0.01)
if is_sound:
play_sound('resources/sounds/collision_sound.mp3', 0.03)
h.kill()
p.update(FPS)
# смена текстур игрока
if current_player_sprite == 'left':
sprite = p.anim_left()
screen.blit(sprite, (p.x, p.y))
p.left_1 = not p.left_1
elif current_player_sprite == 'right':
sprite = p.anim_right()
screen.blit(sprite, (p.x, p.y))
p.right_1 = not p.right_1
elif current_player_sprite == 'stay':
sprite = p.anim_stay()
screen.blit(sprite, (p.x, p.y))
p.stay_1 = not p.stay_1
if bs:
if battle_music:
ingame_music.stop()
ingame_music = pygame.mixer.Sound('resources/sounds/music/wagner_main_theme.mp3')
ingame_music.set_volume(ingame_music_sound)
ingame_music.play()
battle_music = False
b.update()
if b.body == b.stay3 and phase1_score:
game_score += 100
phase1_score = False
if b.body == b.stay5 and phase2_score:
game_score += 100
phase2_score = False
if b.body == b.stay7 and phase3_score:
game_score += 200
phase3_score = False
now = pygame.time.get_ticks()
if now - last_2 >= boss_cooldown and b.y > 0 and b.body != b.stay7:
last_2 = now
play_sound('resources/sounds/boss_shot.mp3', 0.05)
Bossbullets(boss_bullets_count).shot((b.x + 170, b.y + 155))
if col_check % 40 == 0:
b.change_sprite()
else:
col_check += 1
if b.health_count > 0:
screen.blit(b.body, (b.x, b.y))
elif b.health_count <= 0 and phase4_score:
boss_done = True
phase4_score = False
game_score += 350
if is_sound:
play_sound('resources/sounds/boss_defeated.mp3', 0.2)
Explosion(booms).boom((b.rect.x + 75, b.rect.y + 25))
Explosion(booms).boom((b.rect.x, b.rect.y))
Explosion(booms).boom((b.rect.x + 200, b.rect.y + 34))
Explosion(booms).boom((b.rect.x + 250, b.rect.y + 25))
Explosion(booms).boom((b.rect.x + 150, b.rect.y + 56))
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.1)
boss_death = True
else:
if p.minimize == 0:
if is_sound:
ingame_music.stop()
play_sound('resources/sounds/plane_crash.mp3', 0.05)
p.minimize += 1
if not death:
if p.minimize <= 320:
p.death()
screen.blit(p.death_sp, (p.x, p.y))
else:
death = True
wait = pygame.time.get_ticks()
Smallexplosions(small_booms).boom((p.rect.x + 3, p.rect.y + 25))
Smallexplosions(small_booms).boom((p.rect.x, p.rect.y))
Smallexplosions(small_booms).boom((p.rect.x - 22, p.rect.y + 7))
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.1)
p.kill()
if bs and b.health_count > 0:
b.speed += 0.02
b.win = True
screen.blit(b.body, (b.x, b.y))
b.update()
# передвижение врагов
window_holes.update()
window_holes.draw(screen)
enemies.update(FPS)
# отрисовка врагов
enemies.draw(screen)
# передвижение пули
bullets_count.update()
bullets_count.draw(screen)
boss_bullets_count.update()
boss_bullets_count.draw(screen)
small_booms.update()
small_booms.draw(screen)
mini_booms.update()
mini_booms.draw(screen)
# ник игрока
draw_text('Player: {}'.format(player_name), font, (255, 255, 255), screen, 20, 20)
# cчет игрока
if len(str(game_score)) < 2:
draw_text('00000' + str(game_score), font, (255, 255, 255), screen, 430, 20)
elif len(str(game_score)) < 3:
draw_text('0000' + str(game_score), font, (255, 255, 255), screen, 430, 20)
elif len(str(game_score)) < 4:
draw_text('000' + str(game_score), font, (255, 255, 255), screen, 430, 20)
elif len(str(game_score)) < 5:
draw_text('00' + str(game_score), font, (255, 255, 255), screen, 430, 20)
elif len(str(game_score)) < 6:
draw_text('0' + str(game_score), font, (255, 255, 255), screen, 430, 20)
elif len(str(game_score)) >= 6:
draw_text("Max score", font, (255, 255, 255), screen, 510, 20)
# взрыв на месте убитого врага
booms.update()
booms.draw(screen)
pygame.display.flip()
clock.tick(FPS)
def death_screen():
global running_game, game_score
running = True
click = False
draw_counter = 0
color_counter = 0
pygame.time.set_timer(pygame.USEREVENT, 1000)
rating_kills = enemy_killed//10
if bullets_shot < 800:
rating_shots = 1
else:
rating_shots = 0
rating = rating_kills + rating_shots
if boss_done:
death_music = pygame.mixer.Sound('resources/sounds/music/victory_theme.mp3')
death_music.stop()
death_music_sound = 0.1
if not is_sound:
death_music_sound = 0
death_music.set_volume(death_music_sound)
death_music.play()
rating += 2
else:
death_music = pygame.mixer.Sound('resources/sounds/music/loose_theme.mp3')
death_music.stop()
death_music_sound = 0.1
if not is_sound:
death_music_sound = 0
death_music.set_volume(death_music_sound)
death_music.play()
while True:
if len(str(game_score)) < 6:
game_score = '0' + str(game_score)
else:
break
while running:
mx, my = pygame.mouse.get_pos()
screen.fill((0, 0, 0))
# ------------------------------------------ name zone draw
pygame.draw.rect(screen, (0, 0, 0), (50, 10, 500, 50))
pygame.draw.rect(screen, (255, 255, 255), (50, 10, 500, 50), 3)
draw_text('End of your way', font, (255, 255, 255), screen, 120, 15)
# ------------------------------------------ button menu
button_menu = pygame.image.load('resources/sprites/button.png')
button_menu = pygame.transform.scale(button_menu, (200, 70))
b_menu_mask = button_menu.get_rect()
b_menu_mask.x = 195
b_menu_mask.y = 700
screen.blit(button_menu, (b_menu_mask.x, b_menu_mask.y))
draw_text('menu', font, (255, 255, 255), screen, 245, 730)
# ------------------------------------------ draw
if draw_counter >= 1:
draw_text('Player: {}'.format(player_name), font, (255, 255, 255), screen, 50, 150)
if draw_counter >= 2:
draw_text('Score: {}'.format(game_score), font, (255, 255, 255), screen, 50, 230)
if draw_counter >= 3:
draw_text('Enemies killed: {}'.format(enemy_killed), font, (255, 255, 255), screen, 50, 310)
if draw_counter >= 4:
draw_text('Bullets fired: {}'.format(bullets_shot), font, (255, 255, 255), screen, 50, 390)
if draw_counter >= 5:
draw_text('Rating:', font, (255, 255, 255), screen, 50, 470)
if draw_counter >= 6:
if rating <= 6:
draw_text('F', font_rating, (100, 100, 100), screen, 300, 470)
elif rating == 7:
draw_text('D', font_rating, (29, 173, 23), screen, 300, 470)
elif rating == 8:
draw_text('C', font_rating, (20, 20, 255), screen, 300, 470)
elif rating == 9:
draw_text('B', font_rating, (200, 0, 255), screen, 300, 470)
elif rating == 10:
draw_text('A', font_rating, (255, 200, 0), screen, 300, 470)
elif rating == 11:
draw_text('S', font_rating, (255, 100, 0), screen, 300, 470)
elif rating <= 13:
draw_text('SS', font_rating, (255, 0, 0), screen, 300, 470)
else:
if color_counter == 0:
draw_text('SSS', font_rating, (255, 0, 0), screen, 300, 470)
elif color_counter == 1:
draw_text('SSS', font_rating, (0, 255, 0), screen, 300, 470)
else:
draw_text('SSS', font_rating, (0, 0, 255), screen, 300, 470)
# ------------------------------------------ collide
if b_menu_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (195, 700, 200, 70), 4)
if click:
if is_sound:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
else:
pass
running = False
running_game = False
# ------------------------------------------ events
click = False
for event in pygame.event.get():
if event.type == pygame.USEREVENT:
draw_counter += 1
color_counter += 1
if color_counter == 3:
color_counter = 0
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:
running = False
running_game = False
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1:
click = True
# ------------------------------------------ update
pygame.display.update()
clock.tick(10)
death_music.stop()
def pause_screen():
global running_game
running = True
click = False
while running:
screen.fill((0, 0, 0))
mx, my = pygame.mouse.get_pos()
pygame.draw.rect(screen, (0, 0, 0), (175, 160, 240, 340))
pygame.draw.rect(screen, (255, 255, 255), (175, 160, 240, 340), 3)
# ------------------------------------------ name zone draw
pygame.draw.rect(screen, (0, 0, 0), (185, 180, 220, 80))
pygame.draw.rect(screen, (255, 255, 255), (185, 180, 220, 80), 3)
draw_text('Pause', font, (255, 255, 255), screen, 235, 205)
# ------------------------------------------ button menu
button_menu = pygame.image.load('resources/sprites/button.png')
button_menu = pygame.transform.scale(button_menu, (200, 70))
b_menu_mask = button_menu.get_rect()
b_menu_mask.x = 195
b_menu_mask.y = 410
screen.blit(button_menu, (b_menu_mask.x, b_menu_mask.y))
draw_text('menu', font, (255, 255, 255), screen, 245, 440)
# ------------------------------------------ button resume
button_resume = pygame.image.load('resources/sprites/button.png')
button_resume = pygame.transform.scale(button_resume, (200, 70))
b_resume_mask = button_resume.get_rect()
b_resume_mask.x = 195
b_resume_mask.y = 300
screen.blit(button_resume, (b_resume_mask.x, b_resume_mask.y))
draw_text('resume', font, (255, 255, 255), screen, 225, 330)
# ------------------------------------------ collide
if b_menu_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (195, 410, 200, 70), 4)
if click:
running = False
running_game = False
if b_resume_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (195, 300, 200, 70), 4)
if click:
running = False
# ------------------------------------------ events
click = False
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:
running = False
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1:
click = True
# ------------------------------------------ update
pygame.display.update()
clock.tick(10)
if __name__ == '__main__':
main_menu()
pygame.quit()
|
flexible
|
{
"blob_id": "d00fa29c502cc0311c54deb657b37c3c3caac7ca",
"index": 3755,
"step-1": "<mask token>\n\n\ndef draw_text(text, font_u, color, surface, x, y):\n text_object = font_u.render(text, color)\n textrect = text_object[1]\n textrect.topleft = x, y\n surface.blit(text_object[0], textrect)\n\n\n<mask token>\n\n\ndef draw_controls():\n pygame.draw.rect(screen, (255, 255, 255), (0, 420, 600, 380), 4)\n pygame.draw.rect(screen, (0, 0, 0, 1), (3, 422, 595, 376))\n draw_text('controls:', font, (255, 255, 255), screen, 20, 430)\n wasd = pygame.image.load('resources/sprites/controls_1.png')\n wasd = pygame.transform.scale(wasd, (243, 100))\n screen.blit(wasd, (20, 470))\n pygame.draw.rect(screen, (255, 255, 255), (20, 646, 130, 25))\n draw_text('SPACE', font_space, (0, 0, 0), screen, 50, 651)\n draw_text(' - movement', font, (255, 255, 255), screen, 270, 522)\n mouse = pygame.image.load('resources/sprites/controls_2.png')\n mouse = pygame.transform.scale(mouse, (90, 100))\n screen.blit(mouse, (153, 590))\n draw_text(' - shoot', font, (255, 255, 255), screen, 270, 640)\n\n\n<mask token>\n\n\ndef main_menu():\n click = False\n pygame.mixer.stop()\n while True:\n mx, my = pygame.mouse.get_pos()\n screen.fill((0, 0, 0))\n pygame.draw.rect(screen, (0, 0, 0), (52, 10, 508, 50))\n pygame.draw.rect(screen, (255, 255, 255), (52, 10, 508, 50), 3)\n draw_text('Death or Dishonour', font, (255, 255, 255), screen, 85, 20)\n button_play = pygame.image.load('resources/sprites/button.png')\n button_play = pygame.transform.scale(button_play, (222, 105))\n b_play_mask = button_play.get_rect()\n b_play_mask.x = 50\n b_play_mask.y = 70\n screen.blit(button_play, (b_play_mask.x, b_play_mask.y))\n draw_text('play', font, (255, 255, 255), screen, 113, 100)\n button_options = pygame.image.load('resources/sprites/button.png')\n button_options = pygame.transform.scale(button_options, (222, 105))\n b_options_mask = button_options.get_rect()\n b_options_mask.x = 50\n b_options_mask.y = 185\n screen.blit(button_options, (b_options_mask.x, b_options_mask.y))\n draw_text('options', font, (255, 255, 255), screen, 78, 215)\n button_exit = pygame.image.load('resources/sprites/button.png')\n button_exit = pygame.transform.scale(button_exit, (222, 105))\n b_exit_mask = button_exit.get_rect()\n b_exit_mask.x = 50\n b_exit_mask.y = 300\n screen.blit(button_exit, (b_exit_mask.x, b_exit_mask.y))\n draw_text('quit', font, (255, 255, 255), screen, 113, 330)\n draw_controls()\n draw_leaderboard()\n if b_play_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (50, 70, 222, 105), 5)\n if click:\n if is_sound:\n play_sound('resources/sounds/click_sound.mp3', 0.2)\n game_screen()\n if b_options_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (50, 185, 222, 105), 5)\n if click:\n if is_sound:\n play_sound('resources/sounds/click_sound.mp3', 0.2)\n options_menu()\n if b_exit_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (50, 300, 222, 105), 5)\n if click:\n if is_sound:\n play_sound('resources/sounds/click_sound.mp3', 0.2)\n pygame.quit()\n sys.exit()\n click = False\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n pygame.quit()\n sys.exit()\n if event.type == pygame.KEYDOWN:\n if event.key == pygame.K_ESCAPE:\n pygame.quit()\n sys.exit()\n if event.type == pygame.MOUSEBUTTONDOWN:\n if event.button == 1:\n click = True\n pygame.display.update()\n clock.tick(10)\n\n\ndef options_menu():\n global player_name, line_counter, is_sound\n running = True\n click = False\n numlock = False\n while running:\n mx, my = pygame.mouse.get_pos()\n screen.fill((0, 0, 0))\n pygame.draw.rect(screen, (0, 0, 0), (52, 10, 508, 50))\n pygame.draw.rect(screen, (255, 255, 255), (52, 10, 508, 50), 3)\n draw_text('Options', font, (255, 255, 255), screen, 215, 20)\n button_1 = pygame.image.load('resources/sprites/button.png')\n button_1 = pygame.transform.scale(button_1, (222, 105))\n b_1_mask = button_1.get_rect()\n b_1_mask.x = 50\n b_1_mask.y = 70\n screen.blit(button_1, (b_1_mask.x, b_1_mask.y))\n draw_text(player_name, font, (255, 255, 255), screen, 125, 100)\n button_2 = pygame.image.load('resources/sprites/button.png')\n button_2 = pygame.transform.scale(button_2, (222, 105))\n b_2_mask = button_2.get_rect()\n b_2_mask.x = 50\n b_2_mask.y = 185\n screen.blit(button_2, (b_2_mask.x, b_2_mask.y))\n button_back = pygame.image.load('resources/sprites/button.png')\n button_back = pygame.transform.scale(button_back, (222, 105))\n b_back_mask = button_back.get_rect()\n b_back_mask.x = 50\n b_back_mask.y = 300\n screen.blit(button_back, (b_back_mask.x, b_back_mask.y))\n draw_text('back', font, (255, 255, 255), screen, 113, 330)\n draw_controls()\n draw_text('audio:', font, (255, 255, 255), screen, 60, 195)\n if is_sound:\n draw_text('on', font, (255, 255, 255), screen, 190, 245)\n else:\n draw_text('off', font, (255, 255, 255), screen, 175, 230)\n if line_counter == 0 or player_name == 'NON':\n draw_text('ENTER', font, (255, 0, 0), screen, 280, 90)\n draw_text('NICKNAME', font, (255, 0, 0), screen, 280, 120)\n if numlock:\n draw_text('OFF', font, (255, 0, 0), screen, 500, 90)\n draw_text('NUM', font, (255, 0, 0), screen, 500, 120)\n draw_text('LOCK', font, (255, 0, 0), screen, 500, 150)\n if b_2_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (50, 185, 222, 105), 5)\n if click:\n if is_sound:\n play_sound('resources/sounds/click_sound.mp3', 0.2)\n if is_sound:\n is_sound = not is_sound\n pygame.mixer.pause()\n else:\n is_sound = not is_sound\n pygame.mixer.unpause()\n if b_back_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (50, 300, 222, 105), 5)\n if click:\n if is_sound:\n play_sound('resources/sounds/click_sound.mp3', 0.2)\n running = False\n click = False\n for event in pygame.event.get():\n if event.type == pygame.MOUSEBUTTONDOWN:\n if event.button == 1:\n click = True\n if event.type == pygame.KEYDOWN:\n if event.key == pygame.K_BACKSPACE:\n player_name = player_name[:-1]\n if line_counter != 0:\n line_counter -= 1\n elif player_name == 'NON':\n pass\n elif event.key == pygame.K_SPACE:\n pass\n elif event.key == pygame.K_UP:\n pass\n elif event.key == pygame.K_DOWN:\n pass\n elif event.key == pygame.K_LEFT:\n pass\n elif event.key == pygame.K_RIGHT:\n pass\n elif event.key == pygame.K_RETURN:\n pass\n elif event.key == pygame.K_NUMLOCK:\n numlock = True\n elif event.key == pygame.K_ESCAPE:\n running = False\n elif event.mod == pygame.KMOD_NONE and event.key != pygame.K_TAB:\n if line_counter != 3:\n line_counter += 1\n player_name += str(event.unicode).upper()\n if event.type == pygame.QUIT:\n pygame.quit()\n sys.exit()\n pygame.display.update()\n clock.tick(10)\n\n\ndef game_screen():\n global game_score, player_name, running_game, enemy_killed, bullets_shot, boss_done\n game_score = 0\n enemy_killed = 0\n bullets_shot = 0\n boss_done = False\n if player_name == '':\n player_name = 'NON'\n track_count = 0\n battle_tracks = ['resources/sounds/music/battle_music_1.mp3',\n 'resources/sounds/music/battle_music_2.mp3',\n 'resources/sounds/music/battle_music_3.mp3',\n 'resources/sounds/music/battle_music_4.mp3',\n 'resources/sounds/music/battle_music_5.mp3',\n 'resources/sounds/music/battle_music_6.mp3']\n ingame_music = pygame.mixer.Sound(battle_tracks[track_count])\n ingame_music.stop()\n ingame_music_sound = 0.1\n if not is_sound:\n ingame_music_sound = 0\n ingame_music.set_volume(ingame_music_sound)\n ingame_music.play()\n bs = False\n running_game = True\n pygame.time.set_timer(pygame.USEREVENT, 1000)\n enemies = pygame.sprite.Group()\n death = False\n p = Player()\n window_holes = pygame.sprite.Group()\n bullets_count = pygame.sprite.Group()\n boss_bullets_count = pygame.sprite.Group()\n booms = pygame.sprite.Group()\n small_booms = pygame.sprite.Group()\n mini_booms = pygame.sprite.Group()\n phase1_score = True\n phase2_score = True\n phase3_score = True\n battle_music = True\n phase4_score = True\n col_check = 1\n boss_death = False\n level_bckgd_pos = -23800\n current_player_sprite = 'stay'\n current_level_background = pygame.image.load(\n 'resources/level_pictures/first_level_bckgd.jpg')\n screen.blit(current_level_background, (0, 0))\n wait = 0\n last = pygame.time.get_ticks()\n last_2 = pygame.time.get_ticks()\n boss_cooldown = 1000\n cooldown = 100\n while running_game:\n for event in pygame.event.get():\n if event.type == pygame.KEYDOWN and event.key == pygame.K_EQUALS:\n ingame_music.stop()\n track_count += 1\n if track_count > 5:\n track_count = 0\n ingame_music = pygame.mixer.Sound(battle_tracks[track_count])\n ingame_music.set_volume(ingame_music_sound)\n ingame_music.play()\n if event.type == pygame.KEYDOWN and event.key == pygame.K_KP_PLUS:\n ingame_music_sound += 0.05\n if ingame_music_sound >= 1.5:\n ingame_music_sound = 1.4\n ingame_music.set_volume(ingame_music_sound)\n if event.type == pygame.KEYDOWN and event.key == pygame.K_KP_MINUS:\n ingame_music_sound -= 0.05\n if ingame_music_sound < 0:\n ingame_music_sound = 0\n ingame_music.set_volume(ingame_music_sound)\n if event.type == pygame.KEYDOWN and (event.key == pygame.K_a or\n event.key == pygame.K_LEFT) and not p.moving_right:\n current_player_sprite = 'left'\n p.moving_right = False\n p.moving_left = True\n elif event.type == pygame.KEYDOWN and (event.key == pygame.K_d or\n event.key == pygame.K_RIGHT) and not p.moving_left:\n current_player_sprite = 'right'\n p.moving_left = False\n p.moving_right = True\n if event.type == pygame.KEYUP and (event.key == pygame.K_a or \n event.key == pygame.K_LEFT):\n current_player_sprite = 'stay'\n p.moving_right = False\n p.moving_left = False\n if event.type == pygame.KEYUP and (event.key == pygame.K_d or \n event.key == pygame.K_RIGHT):\n current_player_sprite = 'stay'\n p.moving_right = False\n p.moving_left = False\n if event.type == pygame.KEYDOWN and (event.key == pygame.K_w or\n event.key == pygame.K_UP) and not p.moving_down:\n p.moving_down = False\n p.moving_up = True\n elif event.type == pygame.KEYDOWN and (event.key == pygame.K_s or\n event.key == pygame.K_DOWN) and not p.moving_up:\n p.moving_up = False\n p.moving_down = True\n if event.type == pygame.KEYUP and (event.key == pygame.K_w or \n event.key == pygame.K_UP):\n current_player_sprite = 'stay'\n p.moving_down = False\n p.moving_up = False\n if event.type == pygame.KEYUP and (event.key == pygame.K_s or \n event.key == pygame.K_DOWN):\n current_player_sprite = 'stay'\n p.moving_down = False\n p.moving_up = False\n if (event.type == pygame.MOUSEBUTTONDOWN and event.button == 1 and\n p.health_count > 0):\n now = pygame.time.get_ticks()\n if now - last >= cooldown:\n last = now\n Bullets(bullets_count).shot((p.x + 21, p.y - 25))\n Bullets(bullets_count).shot((p.x + 76, p.y - 25))\n if is_sound:\n play_sound('resources/sounds/shot_sound.mp3', 0.1)\n Bullets.shooting = True\n bullets_shot += 2\n elif event.type == pygame.KEYDOWN and event.key == pygame.K_SPACE and p.health_count > 0:\n now = pygame.time.get_ticks()\n if now - last >= cooldown:\n last = now\n Bullets(bullets_count).shot((p.x + 21, p.y - 25))\n Bullets(bullets_count).shot((p.x + 76, p.y - 25))\n if is_sound:\n play_sound('resources/sounds/shot_sound.mp3', 0.1)\n Bullets.shooting = True\n bullets_shot += 2\n if (event.type == pygame.USEREVENT and level_bckgd_pos >= -8500 and\n not bs):\n bs = True\n b = Boss()\n if event.type == pygame.USEREVENT and level_bckgd_pos < -8500:\n Enemy(enemies)\n if (event.type == pygame.USEREVENT and death and pygame.time.\n get_ticks() - wait > 2000 or level_bckgd_pos > -801):\n ingame_music.stop()\n death_screen()\n while True:\n if len(str(game_score)) < 6:\n game_score = '0' + str(game_score)\n else:\n break\n var = \"INSERT INTO highest_score VALUES ('{}', '{}')\".format(\n player_name, game_score)\n cur.execute(var)\n con.commit()\n if event.type == pygame.QUIT:\n pygame.quit()\n sys.exit()\n if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:\n pause_screen()\n if not running_game:\n ingame_music.stop()\n while True:\n if len(str(game_score)) < 6:\n game_score = '0' + str(game_score)\n else:\n break\n var = (\"INSERT INTO highest_score VALUES ('{}', '{}')\".\n format(player_name, game_score))\n cur.execute(var)\n con.commit()\n level_bckgd_pos += speed_bckgd\n if level_bckgd_pos >= 0:\n screen.fill((0, 0, 0))\n screen.blit(current_level_background, (0, level_bckgd_pos))\n if level_bckgd_pos > -805:\n death = True\n if p.health_count > 0:\n for i in enemies:\n collision = pygame.sprite.collide_rect(p, i)\n if collision:\n Explosion(booms).boom((i.rect.x, i.rect.y))\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3', 0.1)\n if i.health_count - 2 <= 0:\n game_score += 10\n i.kill()\n Explosion(booms).boom((i.rect.x, i.rect.y))\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3',\n 0.1)\n enemy_killed += 1\n else:\n i.health_count -= 2\n if is_sound:\n play_sound('resources/sounds/collision_sound.mp3',\n 0.03)\n p.health_count -= 1\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3', 0.05\n )\n if p.health_count > 0:\n Damage(window_holes).taking_damage((random.randint(\n 50, 550), random.randint(50, 750)))\n if is_sound:\n play_sound('resources/sounds/window_crashed.mp3', 1\n )\n play_sound('resources/sounds/explosion_stun.mp3',\n 0.02)\n for j in bullets_count:\n collision = pygame.sprite.collide_rect(j, i)\n if collision:\n if i.health_count - 1 <= 0:\n game_score += 5\n i.kill()\n Explosion(booms).boom((i.rect.x, i.rect.y))\n if is_sound:\n play_sound(\n 'resources/sounds/explosion_sound.mp3', 0.1\n )\n enemy_killed += 1\n else:\n i.health_count -= 1\n Miniexplosion(mini_booms).boom((j.rect.x, j.rect.y)\n )\n if is_sound:\n play_sound(\n 'resources/sounds/explosion_sound.mp3', 0.1\n )\n if is_sound:\n play_sound(\n 'resources/sounds/collision_sound.mp3',\n 0.03)\n j.kill()\n if bs and not boss_death:\n collision = pygame.sprite.collide_rect(b, p)\n if collision and b.y > 0:\n b.health_count -= 0.3\n if is_sound:\n play_sound('resources/sounds/collision_sound.mp3', 0.03\n )\n p.health_count -= 0.2\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3', 0.05\n )\n if b.body == b.stay1 or b.body == b.stay2:\n b.body = b.stay2\n if b.body == b.stay3 or b.body == b.stay4:\n b.body = b.stay4\n if b.body == b.stay5 or b.body == b.stay6:\n b.body = b.stay6\n col_check += 1\n if p.health_count > 0:\n Damage(window_holes).taking_damage((random.randint(\n 50, 550), random.randint(50, 750)))\n if is_sound:\n play_sound('resources/sounds/window_crashed.mp3',\n 0.1)\n play_sound('resources/sounds/explosion_stun.mp3',\n 0.02)\n for j in bullets_count:\n collision = pygame.sprite.collide_rect(b, j)\n if collision and b.y > 0:\n if b.body == b.stay1 or b.body == b.stay2:\n b.body = b.stay2\n if b.body == b.stay3 or b.body == b.stay4:\n b.body = b.stay4\n if b.body == b.stay5 or b.body == b.stay6:\n b.body = b.stay6\n col_check += 1\n b.health_count -= 0.2\n Miniexplosion(mini_booms).boom((j.rect.x, j.rect.y))\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3',\n 0.1)\n if is_sound:\n play_sound('resources/sounds/collision_sound.mp3',\n 0.03)\n j.kill()\n for h in boss_bullets_count:\n collision = pygame.sprite.collide_rect(p, h)\n if collision:\n p.health_count -= 1\n Miniexplosion(mini_booms).boom((h.rect.x, h.rect.y))\n if p.health_count > 0:\n Damage(window_holes).taking_damage((random.\n randint(50, 550), random.randint(50, 750)))\n if is_sound:\n play_sound(\n 'resources/sounds/window_crashed.mp3', 0.1)\n play_sound(\n 'resources/sounds/explosion_stun.mp3', 0.01\n )\n if is_sound:\n play_sound('resources/sounds/collision_sound.mp3',\n 0.03)\n h.kill()\n p.update(FPS)\n if current_player_sprite == 'left':\n sprite = p.anim_left()\n screen.blit(sprite, (p.x, p.y))\n p.left_1 = not p.left_1\n elif current_player_sprite == 'right':\n sprite = p.anim_right()\n screen.blit(sprite, (p.x, p.y))\n p.right_1 = not p.right_1\n elif current_player_sprite == 'stay':\n sprite = p.anim_stay()\n screen.blit(sprite, (p.x, p.y))\n p.stay_1 = not p.stay_1\n if bs:\n if battle_music:\n ingame_music.stop()\n ingame_music = pygame.mixer.Sound(\n 'resources/sounds/music/wagner_main_theme.mp3')\n ingame_music.set_volume(ingame_music_sound)\n ingame_music.play()\n battle_music = False\n b.update()\n if b.body == b.stay3 and phase1_score:\n game_score += 100\n phase1_score = False\n if b.body == b.stay5 and phase2_score:\n game_score += 100\n phase2_score = False\n if b.body == b.stay7 and phase3_score:\n game_score += 200\n phase3_score = False\n now = pygame.time.get_ticks()\n if (now - last_2 >= boss_cooldown and b.y > 0 and b.body !=\n b.stay7):\n last_2 = now\n play_sound('resources/sounds/boss_shot.mp3', 0.05)\n Bossbullets(boss_bullets_count).shot((b.x + 170, b.y + 155)\n )\n if col_check % 40 == 0:\n b.change_sprite()\n else:\n col_check += 1\n if b.health_count > 0:\n screen.blit(b.body, (b.x, b.y))\n elif b.health_count <= 0 and phase4_score:\n boss_done = True\n phase4_score = False\n game_score += 350\n if is_sound:\n play_sound('resources/sounds/boss_defeated.mp3', 0.2)\n Explosion(booms).boom((b.rect.x + 75, b.rect.y + 25))\n Explosion(booms).boom((b.rect.x, b.rect.y))\n Explosion(booms).boom((b.rect.x + 200, b.rect.y + 34))\n Explosion(booms).boom((b.rect.x + 250, b.rect.y + 25))\n Explosion(booms).boom((b.rect.x + 150, b.rect.y + 56))\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3', 0.1)\n boss_death = True\n else:\n if p.minimize == 0:\n if is_sound:\n ingame_music.stop()\n play_sound('resources/sounds/plane_crash.mp3', 0.05)\n p.minimize += 1\n if not death:\n if p.minimize <= 320:\n p.death()\n screen.blit(p.death_sp, (p.x, p.y))\n else:\n death = True\n wait = pygame.time.get_ticks()\n Smallexplosions(small_booms).boom((p.rect.x + 3, p.rect\n .y + 25))\n Smallexplosions(small_booms).boom((p.rect.x, p.rect.y))\n Smallexplosions(small_booms).boom((p.rect.x - 22, p.\n rect.y + 7))\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3', 0.1)\n p.kill()\n if bs and b.health_count > 0:\n b.speed += 0.02\n b.win = True\n screen.blit(b.body, (b.x, b.y))\n b.update()\n window_holes.update()\n window_holes.draw(screen)\n enemies.update(FPS)\n enemies.draw(screen)\n bullets_count.update()\n bullets_count.draw(screen)\n boss_bullets_count.update()\n boss_bullets_count.draw(screen)\n small_booms.update()\n small_booms.draw(screen)\n mini_booms.update()\n mini_booms.draw(screen)\n draw_text('Player: {}'.format(player_name), font, (255, 255, 255),\n screen, 20, 20)\n if len(str(game_score)) < 2:\n draw_text('00000' + str(game_score), font, (255, 255, 255),\n screen, 430, 20)\n elif len(str(game_score)) < 3:\n draw_text('0000' + str(game_score), font, (255, 255, 255),\n screen, 430, 20)\n elif len(str(game_score)) < 4:\n draw_text('000' + str(game_score), font, (255, 255, 255),\n screen, 430, 20)\n elif len(str(game_score)) < 5:\n draw_text('00' + str(game_score), font, (255, 255, 255), screen,\n 430, 20)\n elif len(str(game_score)) < 6:\n draw_text('0' + str(game_score), font, (255, 255, 255), screen,\n 430, 20)\n elif len(str(game_score)) >= 6:\n draw_text('Max score', font, (255, 255, 255), screen, 510, 20)\n booms.update()\n booms.draw(screen)\n pygame.display.flip()\n clock.tick(FPS)\n\n\ndef death_screen():\n global running_game, game_score\n running = True\n click = False\n draw_counter = 0\n color_counter = 0\n pygame.time.set_timer(pygame.USEREVENT, 1000)\n rating_kills = enemy_killed // 10\n if bullets_shot < 800:\n rating_shots = 1\n else:\n rating_shots = 0\n rating = rating_kills + rating_shots\n if boss_done:\n death_music = pygame.mixer.Sound(\n 'resources/sounds/music/victory_theme.mp3')\n death_music.stop()\n death_music_sound = 0.1\n if not is_sound:\n death_music_sound = 0\n death_music.set_volume(death_music_sound)\n death_music.play()\n rating += 2\n else:\n death_music = pygame.mixer.Sound(\n 'resources/sounds/music/loose_theme.mp3')\n death_music.stop()\n death_music_sound = 0.1\n if not is_sound:\n death_music_sound = 0\n death_music.set_volume(death_music_sound)\n death_music.play()\n while True:\n if len(str(game_score)) < 6:\n game_score = '0' + str(game_score)\n else:\n break\n while running:\n mx, my = pygame.mouse.get_pos()\n screen.fill((0, 0, 0))\n pygame.draw.rect(screen, (0, 0, 0), (50, 10, 500, 50))\n pygame.draw.rect(screen, (255, 255, 255), (50, 10, 500, 50), 3)\n draw_text('End of your way', font, (255, 255, 255), screen, 120, 15)\n button_menu = pygame.image.load('resources/sprites/button.png')\n button_menu = pygame.transform.scale(button_menu, (200, 70))\n b_menu_mask = button_menu.get_rect()\n b_menu_mask.x = 195\n b_menu_mask.y = 700\n screen.blit(button_menu, (b_menu_mask.x, b_menu_mask.y))\n draw_text('menu', font, (255, 255, 255), screen, 245, 730)\n if draw_counter >= 1:\n draw_text('Player: {}'.format(player_name), font, (255, 255, \n 255), screen, 50, 150)\n if draw_counter >= 2:\n draw_text('Score: {}'.format(game_score), font, (255, 255, 255),\n screen, 50, 230)\n if draw_counter >= 3:\n draw_text('Enemies killed: {}'.format(enemy_killed), font, (255,\n 255, 255), screen, 50, 310)\n if draw_counter >= 4:\n draw_text('Bullets fired: {}'.format(bullets_shot), font, (255,\n 255, 255), screen, 50, 390)\n if draw_counter >= 5:\n draw_text('Rating:', font, (255, 255, 255), screen, 50, 470)\n if draw_counter >= 6:\n if rating <= 6:\n draw_text('F', font_rating, (100, 100, 100), screen, 300, 470)\n elif rating == 7:\n draw_text('D', font_rating, (29, 173, 23), screen, 300, 470)\n elif rating == 8:\n draw_text('C', font_rating, (20, 20, 255), screen, 300, 470)\n elif rating == 9:\n draw_text('B', font_rating, (200, 0, 255), screen, 300, 470)\n elif rating == 10:\n draw_text('A', font_rating, (255, 200, 0), screen, 300, 470)\n elif rating == 11:\n draw_text('S', font_rating, (255, 100, 0), screen, 300, 470)\n elif rating <= 13:\n draw_text('SS', font_rating, (255, 0, 0), screen, 300, 470)\n elif color_counter == 0:\n draw_text('SSS', font_rating, (255, 0, 0), screen, 300, 470)\n elif color_counter == 1:\n draw_text('SSS', font_rating, (0, 255, 0), screen, 300, 470)\n else:\n draw_text('SSS', font_rating, (0, 0, 255), screen, 300, 470)\n if b_menu_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (195, 700, 200, 70), 4)\n if click:\n if is_sound:\n if is_sound:\n play_sound('resources/sounds/click_sound.mp3', 0.2)\n else:\n pass\n running = False\n running_game = False\n click = False\n for event in pygame.event.get():\n if event.type == pygame.USEREVENT:\n draw_counter += 1\n color_counter += 1\n if color_counter == 3:\n color_counter = 0\n if event.type == pygame.QUIT:\n pygame.quit()\n sys.exit()\n if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:\n running = False\n running_game = False\n if event.type == pygame.MOUSEBUTTONDOWN:\n if event.button == 1:\n click = True\n pygame.display.update()\n clock.tick(10)\n death_music.stop()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef draw_text(text, font_u, color, surface, x, y):\n text_object = font_u.render(text, color)\n textrect = text_object[1]\n textrect.topleft = x, y\n surface.blit(text_object[0], textrect)\n\n\ndef play_sound(sound_p, volume_h=0.5, wait_t=0):\n pl_sound = pygame.mixer.Sound(sound_p)\n pl_sound.set_volume(volume_h)\n if is_sound:\n pl_sound.play()\n pygame.time.wait(wait_t)\n\n\npygame.init()\n<mask token>\npygame.display.set_icon(pygame.image.load(\n 'resources/images/test_small_logo_1.bmp'))\npygame.display.set_caption('Death or Dishonour')\n<mask token>\n\n\ndef draw_controls():\n pygame.draw.rect(screen, (255, 255, 255), (0, 420, 600, 380), 4)\n pygame.draw.rect(screen, (0, 0, 0, 1), (3, 422, 595, 376))\n draw_text('controls:', font, (255, 255, 255), screen, 20, 430)\n wasd = pygame.image.load('resources/sprites/controls_1.png')\n wasd = pygame.transform.scale(wasd, (243, 100))\n screen.blit(wasd, (20, 470))\n pygame.draw.rect(screen, (255, 255, 255), (20, 646, 130, 25))\n draw_text('SPACE', font_space, (0, 0, 0), screen, 50, 651)\n draw_text(' - movement', font, (255, 255, 255), screen, 270, 522)\n mouse = pygame.image.load('resources/sprites/controls_2.png')\n mouse = pygame.transform.scale(mouse, (90, 100))\n screen.blit(mouse, (153, 590))\n draw_text(' - shoot', font, (255, 255, 255), screen, 270, 640)\n\n\ndef draw_leaderboard():\n table = []\n result = cur.execute(\n 'SELECT * FROM highest_score ORDER BY score DESC LIMIT 7')\n for elem in result:\n table.append(elem)\n pygame.draw.rect(screen, (0, 0, 0), (310, 70, 250, 335))\n pygame.draw.rect(screen, (255, 255, 255), (310, 70, 250, 335), 3)\n pygame.draw.line(screen, (255, 255, 255), (310, 124), (560, 124), 3)\n pygame.draw.line(screen, (255, 255, 255), (435, 124), (435, 405), 3)\n charge = 40\n y = 124\n for i in range(1, 8):\n y += charge\n pygame.draw.line(screen, (255, 255, 255), (310, y), (560, y), 3)\n draw_text('leaderboard', font_table, (255, 255, 255), screen, 362, 80)\n x = 350\n y = 140\n for i in table:\n draw_text(str(i[0]), font_table, (255, 255, 255), screen, x, y)\n draw_text(str(i[1]), font_table, (255, 255, 255), screen, x + 100, y)\n y += charge\n\n\ndef main_menu():\n click = False\n pygame.mixer.stop()\n while True:\n mx, my = pygame.mouse.get_pos()\n screen.fill((0, 0, 0))\n pygame.draw.rect(screen, (0, 0, 0), (52, 10, 508, 50))\n pygame.draw.rect(screen, (255, 255, 255), (52, 10, 508, 50), 3)\n draw_text('Death or Dishonour', font, (255, 255, 255), screen, 85, 20)\n button_play = pygame.image.load('resources/sprites/button.png')\n button_play = pygame.transform.scale(button_play, (222, 105))\n b_play_mask = button_play.get_rect()\n b_play_mask.x = 50\n b_play_mask.y = 70\n screen.blit(button_play, (b_play_mask.x, b_play_mask.y))\n draw_text('play', font, (255, 255, 255), screen, 113, 100)\n button_options = pygame.image.load('resources/sprites/button.png')\n button_options = pygame.transform.scale(button_options, (222, 105))\n b_options_mask = button_options.get_rect()\n b_options_mask.x = 50\n b_options_mask.y = 185\n screen.blit(button_options, (b_options_mask.x, b_options_mask.y))\n draw_text('options', font, (255, 255, 255), screen, 78, 215)\n button_exit = pygame.image.load('resources/sprites/button.png')\n button_exit = pygame.transform.scale(button_exit, (222, 105))\n b_exit_mask = button_exit.get_rect()\n b_exit_mask.x = 50\n b_exit_mask.y = 300\n screen.blit(button_exit, (b_exit_mask.x, b_exit_mask.y))\n draw_text('quit', font, (255, 255, 255), screen, 113, 330)\n draw_controls()\n draw_leaderboard()\n if b_play_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (50, 70, 222, 105), 5)\n if click:\n if is_sound:\n play_sound('resources/sounds/click_sound.mp3', 0.2)\n game_screen()\n if b_options_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (50, 185, 222, 105), 5)\n if click:\n if is_sound:\n play_sound('resources/sounds/click_sound.mp3', 0.2)\n options_menu()\n if b_exit_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (50, 300, 222, 105), 5)\n if click:\n if is_sound:\n play_sound('resources/sounds/click_sound.mp3', 0.2)\n pygame.quit()\n sys.exit()\n click = False\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n pygame.quit()\n sys.exit()\n if event.type == pygame.KEYDOWN:\n if event.key == pygame.K_ESCAPE:\n pygame.quit()\n sys.exit()\n if event.type == pygame.MOUSEBUTTONDOWN:\n if event.button == 1:\n click = True\n pygame.display.update()\n clock.tick(10)\n\n\ndef options_menu():\n global player_name, line_counter, is_sound\n running = True\n click = False\n numlock = False\n while running:\n mx, my = pygame.mouse.get_pos()\n screen.fill((0, 0, 0))\n pygame.draw.rect(screen, (0, 0, 0), (52, 10, 508, 50))\n pygame.draw.rect(screen, (255, 255, 255), (52, 10, 508, 50), 3)\n draw_text('Options', font, (255, 255, 255), screen, 215, 20)\n button_1 = pygame.image.load('resources/sprites/button.png')\n button_1 = pygame.transform.scale(button_1, (222, 105))\n b_1_mask = button_1.get_rect()\n b_1_mask.x = 50\n b_1_mask.y = 70\n screen.blit(button_1, (b_1_mask.x, b_1_mask.y))\n draw_text(player_name, font, (255, 255, 255), screen, 125, 100)\n button_2 = pygame.image.load('resources/sprites/button.png')\n button_2 = pygame.transform.scale(button_2, (222, 105))\n b_2_mask = button_2.get_rect()\n b_2_mask.x = 50\n b_2_mask.y = 185\n screen.blit(button_2, (b_2_mask.x, b_2_mask.y))\n button_back = pygame.image.load('resources/sprites/button.png')\n button_back = pygame.transform.scale(button_back, (222, 105))\n b_back_mask = button_back.get_rect()\n b_back_mask.x = 50\n b_back_mask.y = 300\n screen.blit(button_back, (b_back_mask.x, b_back_mask.y))\n draw_text('back', font, (255, 255, 255), screen, 113, 330)\n draw_controls()\n draw_text('audio:', font, (255, 255, 255), screen, 60, 195)\n if is_sound:\n draw_text('on', font, (255, 255, 255), screen, 190, 245)\n else:\n draw_text('off', font, (255, 255, 255), screen, 175, 230)\n if line_counter == 0 or player_name == 'NON':\n draw_text('ENTER', font, (255, 0, 0), screen, 280, 90)\n draw_text('NICKNAME', font, (255, 0, 0), screen, 280, 120)\n if numlock:\n draw_text('OFF', font, (255, 0, 0), screen, 500, 90)\n draw_text('NUM', font, (255, 0, 0), screen, 500, 120)\n draw_text('LOCK', font, (255, 0, 0), screen, 500, 150)\n if b_2_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (50, 185, 222, 105), 5)\n if click:\n if is_sound:\n play_sound('resources/sounds/click_sound.mp3', 0.2)\n if is_sound:\n is_sound = not is_sound\n pygame.mixer.pause()\n else:\n is_sound = not is_sound\n pygame.mixer.unpause()\n if b_back_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (50, 300, 222, 105), 5)\n if click:\n if is_sound:\n play_sound('resources/sounds/click_sound.mp3', 0.2)\n running = False\n click = False\n for event in pygame.event.get():\n if event.type == pygame.MOUSEBUTTONDOWN:\n if event.button == 1:\n click = True\n if event.type == pygame.KEYDOWN:\n if event.key == pygame.K_BACKSPACE:\n player_name = player_name[:-1]\n if line_counter != 0:\n line_counter -= 1\n elif player_name == 'NON':\n pass\n elif event.key == pygame.K_SPACE:\n pass\n elif event.key == pygame.K_UP:\n pass\n elif event.key == pygame.K_DOWN:\n pass\n elif event.key == pygame.K_LEFT:\n pass\n elif event.key == pygame.K_RIGHT:\n pass\n elif event.key == pygame.K_RETURN:\n pass\n elif event.key == pygame.K_NUMLOCK:\n numlock = True\n elif event.key == pygame.K_ESCAPE:\n running = False\n elif event.mod == pygame.KMOD_NONE and event.key != pygame.K_TAB:\n if line_counter != 3:\n line_counter += 1\n player_name += str(event.unicode).upper()\n if event.type == pygame.QUIT:\n pygame.quit()\n sys.exit()\n pygame.display.update()\n clock.tick(10)\n\n\ndef game_screen():\n global game_score, player_name, running_game, enemy_killed, bullets_shot, boss_done\n game_score = 0\n enemy_killed = 0\n bullets_shot = 0\n boss_done = False\n if player_name == '':\n player_name = 'NON'\n track_count = 0\n battle_tracks = ['resources/sounds/music/battle_music_1.mp3',\n 'resources/sounds/music/battle_music_2.mp3',\n 'resources/sounds/music/battle_music_3.mp3',\n 'resources/sounds/music/battle_music_4.mp3',\n 'resources/sounds/music/battle_music_5.mp3',\n 'resources/sounds/music/battle_music_6.mp3']\n ingame_music = pygame.mixer.Sound(battle_tracks[track_count])\n ingame_music.stop()\n ingame_music_sound = 0.1\n if not is_sound:\n ingame_music_sound = 0\n ingame_music.set_volume(ingame_music_sound)\n ingame_music.play()\n bs = False\n running_game = True\n pygame.time.set_timer(pygame.USEREVENT, 1000)\n enemies = pygame.sprite.Group()\n death = False\n p = Player()\n window_holes = pygame.sprite.Group()\n bullets_count = pygame.sprite.Group()\n boss_bullets_count = pygame.sprite.Group()\n booms = pygame.sprite.Group()\n small_booms = pygame.sprite.Group()\n mini_booms = pygame.sprite.Group()\n phase1_score = True\n phase2_score = True\n phase3_score = True\n battle_music = True\n phase4_score = True\n col_check = 1\n boss_death = False\n level_bckgd_pos = -23800\n current_player_sprite = 'stay'\n current_level_background = pygame.image.load(\n 'resources/level_pictures/first_level_bckgd.jpg')\n screen.blit(current_level_background, (0, 0))\n wait = 0\n last = pygame.time.get_ticks()\n last_2 = pygame.time.get_ticks()\n boss_cooldown = 1000\n cooldown = 100\n while running_game:\n for event in pygame.event.get():\n if event.type == pygame.KEYDOWN and event.key == pygame.K_EQUALS:\n ingame_music.stop()\n track_count += 1\n if track_count > 5:\n track_count = 0\n ingame_music = pygame.mixer.Sound(battle_tracks[track_count])\n ingame_music.set_volume(ingame_music_sound)\n ingame_music.play()\n if event.type == pygame.KEYDOWN and event.key == pygame.K_KP_PLUS:\n ingame_music_sound += 0.05\n if ingame_music_sound >= 1.5:\n ingame_music_sound = 1.4\n ingame_music.set_volume(ingame_music_sound)\n if event.type == pygame.KEYDOWN and event.key == pygame.K_KP_MINUS:\n ingame_music_sound -= 0.05\n if ingame_music_sound < 0:\n ingame_music_sound = 0\n ingame_music.set_volume(ingame_music_sound)\n if event.type == pygame.KEYDOWN and (event.key == pygame.K_a or\n event.key == pygame.K_LEFT) and not p.moving_right:\n current_player_sprite = 'left'\n p.moving_right = False\n p.moving_left = True\n elif event.type == pygame.KEYDOWN and (event.key == pygame.K_d or\n event.key == pygame.K_RIGHT) and not p.moving_left:\n current_player_sprite = 'right'\n p.moving_left = False\n p.moving_right = True\n if event.type == pygame.KEYUP and (event.key == pygame.K_a or \n event.key == pygame.K_LEFT):\n current_player_sprite = 'stay'\n p.moving_right = False\n p.moving_left = False\n if event.type == pygame.KEYUP and (event.key == pygame.K_d or \n event.key == pygame.K_RIGHT):\n current_player_sprite = 'stay'\n p.moving_right = False\n p.moving_left = False\n if event.type == pygame.KEYDOWN and (event.key == pygame.K_w or\n event.key == pygame.K_UP) and not p.moving_down:\n p.moving_down = False\n p.moving_up = True\n elif event.type == pygame.KEYDOWN and (event.key == pygame.K_s or\n event.key == pygame.K_DOWN) and not p.moving_up:\n p.moving_up = False\n p.moving_down = True\n if event.type == pygame.KEYUP and (event.key == pygame.K_w or \n event.key == pygame.K_UP):\n current_player_sprite = 'stay'\n p.moving_down = False\n p.moving_up = False\n if event.type == pygame.KEYUP and (event.key == pygame.K_s or \n event.key == pygame.K_DOWN):\n current_player_sprite = 'stay'\n p.moving_down = False\n p.moving_up = False\n if (event.type == pygame.MOUSEBUTTONDOWN and event.button == 1 and\n p.health_count > 0):\n now = pygame.time.get_ticks()\n if now - last >= cooldown:\n last = now\n Bullets(bullets_count).shot((p.x + 21, p.y - 25))\n Bullets(bullets_count).shot((p.x + 76, p.y - 25))\n if is_sound:\n play_sound('resources/sounds/shot_sound.mp3', 0.1)\n Bullets.shooting = True\n bullets_shot += 2\n elif event.type == pygame.KEYDOWN and event.key == pygame.K_SPACE and p.health_count > 0:\n now = pygame.time.get_ticks()\n if now - last >= cooldown:\n last = now\n Bullets(bullets_count).shot((p.x + 21, p.y - 25))\n Bullets(bullets_count).shot((p.x + 76, p.y - 25))\n if is_sound:\n play_sound('resources/sounds/shot_sound.mp3', 0.1)\n Bullets.shooting = True\n bullets_shot += 2\n if (event.type == pygame.USEREVENT and level_bckgd_pos >= -8500 and\n not bs):\n bs = True\n b = Boss()\n if event.type == pygame.USEREVENT and level_bckgd_pos < -8500:\n Enemy(enemies)\n if (event.type == pygame.USEREVENT and death and pygame.time.\n get_ticks() - wait > 2000 or level_bckgd_pos > -801):\n ingame_music.stop()\n death_screen()\n while True:\n if len(str(game_score)) < 6:\n game_score = '0' + str(game_score)\n else:\n break\n var = \"INSERT INTO highest_score VALUES ('{}', '{}')\".format(\n player_name, game_score)\n cur.execute(var)\n con.commit()\n if event.type == pygame.QUIT:\n pygame.quit()\n sys.exit()\n if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:\n pause_screen()\n if not running_game:\n ingame_music.stop()\n while True:\n if len(str(game_score)) < 6:\n game_score = '0' + str(game_score)\n else:\n break\n var = (\"INSERT INTO highest_score VALUES ('{}', '{}')\".\n format(player_name, game_score))\n cur.execute(var)\n con.commit()\n level_bckgd_pos += speed_bckgd\n if level_bckgd_pos >= 0:\n screen.fill((0, 0, 0))\n screen.blit(current_level_background, (0, level_bckgd_pos))\n if level_bckgd_pos > -805:\n death = True\n if p.health_count > 0:\n for i in enemies:\n collision = pygame.sprite.collide_rect(p, i)\n if collision:\n Explosion(booms).boom((i.rect.x, i.rect.y))\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3', 0.1)\n if i.health_count - 2 <= 0:\n game_score += 10\n i.kill()\n Explosion(booms).boom((i.rect.x, i.rect.y))\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3',\n 0.1)\n enemy_killed += 1\n else:\n i.health_count -= 2\n if is_sound:\n play_sound('resources/sounds/collision_sound.mp3',\n 0.03)\n p.health_count -= 1\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3', 0.05\n )\n if p.health_count > 0:\n Damage(window_holes).taking_damage((random.randint(\n 50, 550), random.randint(50, 750)))\n if is_sound:\n play_sound('resources/sounds/window_crashed.mp3', 1\n )\n play_sound('resources/sounds/explosion_stun.mp3',\n 0.02)\n for j in bullets_count:\n collision = pygame.sprite.collide_rect(j, i)\n if collision:\n if i.health_count - 1 <= 0:\n game_score += 5\n i.kill()\n Explosion(booms).boom((i.rect.x, i.rect.y))\n if is_sound:\n play_sound(\n 'resources/sounds/explosion_sound.mp3', 0.1\n )\n enemy_killed += 1\n else:\n i.health_count -= 1\n Miniexplosion(mini_booms).boom((j.rect.x, j.rect.y)\n )\n if is_sound:\n play_sound(\n 'resources/sounds/explosion_sound.mp3', 0.1\n )\n if is_sound:\n play_sound(\n 'resources/sounds/collision_sound.mp3',\n 0.03)\n j.kill()\n if bs and not boss_death:\n collision = pygame.sprite.collide_rect(b, p)\n if collision and b.y > 0:\n b.health_count -= 0.3\n if is_sound:\n play_sound('resources/sounds/collision_sound.mp3', 0.03\n )\n p.health_count -= 0.2\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3', 0.05\n )\n if b.body == b.stay1 or b.body == b.stay2:\n b.body = b.stay2\n if b.body == b.stay3 or b.body == b.stay4:\n b.body = b.stay4\n if b.body == b.stay5 or b.body == b.stay6:\n b.body = b.stay6\n col_check += 1\n if p.health_count > 0:\n Damage(window_holes).taking_damage((random.randint(\n 50, 550), random.randint(50, 750)))\n if is_sound:\n play_sound('resources/sounds/window_crashed.mp3',\n 0.1)\n play_sound('resources/sounds/explosion_stun.mp3',\n 0.02)\n for j in bullets_count:\n collision = pygame.sprite.collide_rect(b, j)\n if collision and b.y > 0:\n if b.body == b.stay1 or b.body == b.stay2:\n b.body = b.stay2\n if b.body == b.stay3 or b.body == b.stay4:\n b.body = b.stay4\n if b.body == b.stay5 or b.body == b.stay6:\n b.body = b.stay6\n col_check += 1\n b.health_count -= 0.2\n Miniexplosion(mini_booms).boom((j.rect.x, j.rect.y))\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3',\n 0.1)\n if is_sound:\n play_sound('resources/sounds/collision_sound.mp3',\n 0.03)\n j.kill()\n for h in boss_bullets_count:\n collision = pygame.sprite.collide_rect(p, h)\n if collision:\n p.health_count -= 1\n Miniexplosion(mini_booms).boom((h.rect.x, h.rect.y))\n if p.health_count > 0:\n Damage(window_holes).taking_damage((random.\n randint(50, 550), random.randint(50, 750)))\n if is_sound:\n play_sound(\n 'resources/sounds/window_crashed.mp3', 0.1)\n play_sound(\n 'resources/sounds/explosion_stun.mp3', 0.01\n )\n if is_sound:\n play_sound('resources/sounds/collision_sound.mp3',\n 0.03)\n h.kill()\n p.update(FPS)\n if current_player_sprite == 'left':\n sprite = p.anim_left()\n screen.blit(sprite, (p.x, p.y))\n p.left_1 = not p.left_1\n elif current_player_sprite == 'right':\n sprite = p.anim_right()\n screen.blit(sprite, (p.x, p.y))\n p.right_1 = not p.right_1\n elif current_player_sprite == 'stay':\n sprite = p.anim_stay()\n screen.blit(sprite, (p.x, p.y))\n p.stay_1 = not p.stay_1\n if bs:\n if battle_music:\n ingame_music.stop()\n ingame_music = pygame.mixer.Sound(\n 'resources/sounds/music/wagner_main_theme.mp3')\n ingame_music.set_volume(ingame_music_sound)\n ingame_music.play()\n battle_music = False\n b.update()\n if b.body == b.stay3 and phase1_score:\n game_score += 100\n phase1_score = False\n if b.body == b.stay5 and phase2_score:\n game_score += 100\n phase2_score = False\n if b.body == b.stay7 and phase3_score:\n game_score += 200\n phase3_score = False\n now = pygame.time.get_ticks()\n if (now - last_2 >= boss_cooldown and b.y > 0 and b.body !=\n b.stay7):\n last_2 = now\n play_sound('resources/sounds/boss_shot.mp3', 0.05)\n Bossbullets(boss_bullets_count).shot((b.x + 170, b.y + 155)\n )\n if col_check % 40 == 0:\n b.change_sprite()\n else:\n col_check += 1\n if b.health_count > 0:\n screen.blit(b.body, (b.x, b.y))\n elif b.health_count <= 0 and phase4_score:\n boss_done = True\n phase4_score = False\n game_score += 350\n if is_sound:\n play_sound('resources/sounds/boss_defeated.mp3', 0.2)\n Explosion(booms).boom((b.rect.x + 75, b.rect.y + 25))\n Explosion(booms).boom((b.rect.x, b.rect.y))\n Explosion(booms).boom((b.rect.x + 200, b.rect.y + 34))\n Explosion(booms).boom((b.rect.x + 250, b.rect.y + 25))\n Explosion(booms).boom((b.rect.x + 150, b.rect.y + 56))\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3', 0.1)\n boss_death = True\n else:\n if p.minimize == 0:\n if is_sound:\n ingame_music.stop()\n play_sound('resources/sounds/plane_crash.mp3', 0.05)\n p.minimize += 1\n if not death:\n if p.minimize <= 320:\n p.death()\n screen.blit(p.death_sp, (p.x, p.y))\n else:\n death = True\n wait = pygame.time.get_ticks()\n Smallexplosions(small_booms).boom((p.rect.x + 3, p.rect\n .y + 25))\n Smallexplosions(small_booms).boom((p.rect.x, p.rect.y))\n Smallexplosions(small_booms).boom((p.rect.x - 22, p.\n rect.y + 7))\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3', 0.1)\n p.kill()\n if bs and b.health_count > 0:\n b.speed += 0.02\n b.win = True\n screen.blit(b.body, (b.x, b.y))\n b.update()\n window_holes.update()\n window_holes.draw(screen)\n enemies.update(FPS)\n enemies.draw(screen)\n bullets_count.update()\n bullets_count.draw(screen)\n boss_bullets_count.update()\n boss_bullets_count.draw(screen)\n small_booms.update()\n small_booms.draw(screen)\n mini_booms.update()\n mini_booms.draw(screen)\n draw_text('Player: {}'.format(player_name), font, (255, 255, 255),\n screen, 20, 20)\n if len(str(game_score)) < 2:\n draw_text('00000' + str(game_score), font, (255, 255, 255),\n screen, 430, 20)\n elif len(str(game_score)) < 3:\n draw_text('0000' + str(game_score), font, (255, 255, 255),\n screen, 430, 20)\n elif len(str(game_score)) < 4:\n draw_text('000' + str(game_score), font, (255, 255, 255),\n screen, 430, 20)\n elif len(str(game_score)) < 5:\n draw_text('00' + str(game_score), font, (255, 255, 255), screen,\n 430, 20)\n elif len(str(game_score)) < 6:\n draw_text('0' + str(game_score), font, (255, 255, 255), screen,\n 430, 20)\n elif len(str(game_score)) >= 6:\n draw_text('Max score', font, (255, 255, 255), screen, 510, 20)\n booms.update()\n booms.draw(screen)\n pygame.display.flip()\n clock.tick(FPS)\n\n\ndef death_screen():\n global running_game, game_score\n running = True\n click = False\n draw_counter = 0\n color_counter = 0\n pygame.time.set_timer(pygame.USEREVENT, 1000)\n rating_kills = enemy_killed // 10\n if bullets_shot < 800:\n rating_shots = 1\n else:\n rating_shots = 0\n rating = rating_kills + rating_shots\n if boss_done:\n death_music = pygame.mixer.Sound(\n 'resources/sounds/music/victory_theme.mp3')\n death_music.stop()\n death_music_sound = 0.1\n if not is_sound:\n death_music_sound = 0\n death_music.set_volume(death_music_sound)\n death_music.play()\n rating += 2\n else:\n death_music = pygame.mixer.Sound(\n 'resources/sounds/music/loose_theme.mp3')\n death_music.stop()\n death_music_sound = 0.1\n if not is_sound:\n death_music_sound = 0\n death_music.set_volume(death_music_sound)\n death_music.play()\n while True:\n if len(str(game_score)) < 6:\n game_score = '0' + str(game_score)\n else:\n break\n while running:\n mx, my = pygame.mouse.get_pos()\n screen.fill((0, 0, 0))\n pygame.draw.rect(screen, (0, 0, 0), (50, 10, 500, 50))\n pygame.draw.rect(screen, (255, 255, 255), (50, 10, 500, 50), 3)\n draw_text('End of your way', font, (255, 255, 255), screen, 120, 15)\n button_menu = pygame.image.load('resources/sprites/button.png')\n button_menu = pygame.transform.scale(button_menu, (200, 70))\n b_menu_mask = button_menu.get_rect()\n b_menu_mask.x = 195\n b_menu_mask.y = 700\n screen.blit(button_menu, (b_menu_mask.x, b_menu_mask.y))\n draw_text('menu', font, (255, 255, 255), screen, 245, 730)\n if draw_counter >= 1:\n draw_text('Player: {}'.format(player_name), font, (255, 255, \n 255), screen, 50, 150)\n if draw_counter >= 2:\n draw_text('Score: {}'.format(game_score), font, (255, 255, 255),\n screen, 50, 230)\n if draw_counter >= 3:\n draw_text('Enemies killed: {}'.format(enemy_killed), font, (255,\n 255, 255), screen, 50, 310)\n if draw_counter >= 4:\n draw_text('Bullets fired: {}'.format(bullets_shot), font, (255,\n 255, 255), screen, 50, 390)\n if draw_counter >= 5:\n draw_text('Rating:', font, (255, 255, 255), screen, 50, 470)\n if draw_counter >= 6:\n if rating <= 6:\n draw_text('F', font_rating, (100, 100, 100), screen, 300, 470)\n elif rating == 7:\n draw_text('D', font_rating, (29, 173, 23), screen, 300, 470)\n elif rating == 8:\n draw_text('C', font_rating, (20, 20, 255), screen, 300, 470)\n elif rating == 9:\n draw_text('B', font_rating, (200, 0, 255), screen, 300, 470)\n elif rating == 10:\n draw_text('A', font_rating, (255, 200, 0), screen, 300, 470)\n elif rating == 11:\n draw_text('S', font_rating, (255, 100, 0), screen, 300, 470)\n elif rating <= 13:\n draw_text('SS', font_rating, (255, 0, 0), screen, 300, 470)\n elif color_counter == 0:\n draw_text('SSS', font_rating, (255, 0, 0), screen, 300, 470)\n elif color_counter == 1:\n draw_text('SSS', font_rating, (0, 255, 0), screen, 300, 470)\n else:\n draw_text('SSS', font_rating, (0, 0, 255), screen, 300, 470)\n if b_menu_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (195, 700, 200, 70), 4)\n if click:\n if is_sound:\n if is_sound:\n play_sound('resources/sounds/click_sound.mp3', 0.2)\n else:\n pass\n running = False\n running_game = False\n click = False\n for event in pygame.event.get():\n if event.type == pygame.USEREVENT:\n draw_counter += 1\n color_counter += 1\n if color_counter == 3:\n color_counter = 0\n if event.type == pygame.QUIT:\n pygame.quit()\n sys.exit()\n if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:\n running = False\n running_game = False\n if event.type == pygame.MOUSEBUTTONDOWN:\n if event.button == 1:\n click = True\n pygame.display.update()\n clock.tick(10)\n death_music.stop()\n\n\ndef pause_screen():\n global running_game\n running = True\n click = False\n while running:\n screen.fill((0, 0, 0))\n mx, my = pygame.mouse.get_pos()\n pygame.draw.rect(screen, (0, 0, 0), (175, 160, 240, 340))\n pygame.draw.rect(screen, (255, 255, 255), (175, 160, 240, 340), 3)\n pygame.draw.rect(screen, (0, 0, 0), (185, 180, 220, 80))\n pygame.draw.rect(screen, (255, 255, 255), (185, 180, 220, 80), 3)\n draw_text('Pause', font, (255, 255, 255), screen, 235, 205)\n button_menu = pygame.image.load('resources/sprites/button.png')\n button_menu = pygame.transform.scale(button_menu, (200, 70))\n b_menu_mask = button_menu.get_rect()\n b_menu_mask.x = 195\n b_menu_mask.y = 410\n screen.blit(button_menu, (b_menu_mask.x, b_menu_mask.y))\n draw_text('menu', font, (255, 255, 255), screen, 245, 440)\n button_resume = pygame.image.load('resources/sprites/button.png')\n button_resume = pygame.transform.scale(button_resume, (200, 70))\n b_resume_mask = button_resume.get_rect()\n b_resume_mask.x = 195\n b_resume_mask.y = 300\n screen.blit(button_resume, (b_resume_mask.x, b_resume_mask.y))\n draw_text('resume', font, (255, 255, 255), screen, 225, 330)\n if b_menu_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (195, 410, 200, 70), 4)\n if click:\n running = False\n running_game = False\n if b_resume_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (195, 300, 200, 70), 4)\n if click:\n running = False\n click = False\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n pygame.quit()\n sys.exit()\n if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:\n running = False\n if event.type == pygame.MOUSEBUTTONDOWN:\n if event.button == 1:\n click = True\n pygame.display.update()\n clock.tick(10)\n\n\nif __name__ == '__main__':\n main_menu()\n pygame.quit()\n",
"step-3": "<mask token>\n\n\ndef draw_text(text, font_u, color, surface, x, y):\n text_object = font_u.render(text, color)\n textrect = text_object[1]\n textrect.topleft = x, y\n surface.blit(text_object[0], textrect)\n\n\ndef play_sound(sound_p, volume_h=0.5, wait_t=0):\n pl_sound = pygame.mixer.Sound(sound_p)\n pl_sound.set_volume(volume_h)\n if is_sound:\n pl_sound.play()\n pygame.time.wait(wait_t)\n\n\npygame.init()\nspeed_bckgd = 2\nrunning_game = True\nis_sound = True\nmenu = True\nboss_done = False\ngame_score = 0\nbullets_shot = 0\nline_counter = 0\nenemy_killed = 0\nspeed = 2\nFPS = 100\nwidth = 600\nheight = 800\nplayer_name = ''\ncon = sqlite3.connect('resources/db/leaderboard.db')\nfont = pygame.freetype.Font('resources/sprites/font_main.ttf', 45)\nfont_table = pygame.freetype.Font('resources/sprites/font_main.ttf', 25)\nfont_space = pygame.freetype.Font('resources/sprites/space.ttf', 20)\nfont_rating = pygame.freetype.Font('resources/sprites/font_main.ttf', 150)\npygame.display.set_icon(pygame.image.load(\n 'resources/images/test_small_logo_1.bmp'))\npygame.display.set_caption('Death or Dishonour')\nscreen = pygame.display.set_mode((600, 800))\nclock = pygame.time.Clock()\ncur = con.cursor()\n\n\ndef draw_controls():\n pygame.draw.rect(screen, (255, 255, 255), (0, 420, 600, 380), 4)\n pygame.draw.rect(screen, (0, 0, 0, 1), (3, 422, 595, 376))\n draw_text('controls:', font, (255, 255, 255), screen, 20, 430)\n wasd = pygame.image.load('resources/sprites/controls_1.png')\n wasd = pygame.transform.scale(wasd, (243, 100))\n screen.blit(wasd, (20, 470))\n pygame.draw.rect(screen, (255, 255, 255), (20, 646, 130, 25))\n draw_text('SPACE', font_space, (0, 0, 0), screen, 50, 651)\n draw_text(' - movement', font, (255, 255, 255), screen, 270, 522)\n mouse = pygame.image.load('resources/sprites/controls_2.png')\n mouse = pygame.transform.scale(mouse, (90, 100))\n screen.blit(mouse, (153, 590))\n draw_text(' - shoot', font, (255, 255, 255), screen, 270, 640)\n\n\ndef draw_leaderboard():\n table = []\n result = cur.execute(\n 'SELECT * FROM highest_score ORDER BY score DESC LIMIT 7')\n for elem in result:\n table.append(elem)\n pygame.draw.rect(screen, (0, 0, 0), (310, 70, 250, 335))\n pygame.draw.rect(screen, (255, 255, 255), (310, 70, 250, 335), 3)\n pygame.draw.line(screen, (255, 255, 255), (310, 124), (560, 124), 3)\n pygame.draw.line(screen, (255, 255, 255), (435, 124), (435, 405), 3)\n charge = 40\n y = 124\n for i in range(1, 8):\n y += charge\n pygame.draw.line(screen, (255, 255, 255), (310, y), (560, y), 3)\n draw_text('leaderboard', font_table, (255, 255, 255), screen, 362, 80)\n x = 350\n y = 140\n for i in table:\n draw_text(str(i[0]), font_table, (255, 255, 255), screen, x, y)\n draw_text(str(i[1]), font_table, (255, 255, 255), screen, x + 100, y)\n y += charge\n\n\ndef main_menu():\n click = False\n pygame.mixer.stop()\n while True:\n mx, my = pygame.mouse.get_pos()\n screen.fill((0, 0, 0))\n pygame.draw.rect(screen, (0, 0, 0), (52, 10, 508, 50))\n pygame.draw.rect(screen, (255, 255, 255), (52, 10, 508, 50), 3)\n draw_text('Death or Dishonour', font, (255, 255, 255), screen, 85, 20)\n button_play = pygame.image.load('resources/sprites/button.png')\n button_play = pygame.transform.scale(button_play, (222, 105))\n b_play_mask = button_play.get_rect()\n b_play_mask.x = 50\n b_play_mask.y = 70\n screen.blit(button_play, (b_play_mask.x, b_play_mask.y))\n draw_text('play', font, (255, 255, 255), screen, 113, 100)\n button_options = pygame.image.load('resources/sprites/button.png')\n button_options = pygame.transform.scale(button_options, (222, 105))\n b_options_mask = button_options.get_rect()\n b_options_mask.x = 50\n b_options_mask.y = 185\n screen.blit(button_options, (b_options_mask.x, b_options_mask.y))\n draw_text('options', font, (255, 255, 255), screen, 78, 215)\n button_exit = pygame.image.load('resources/sprites/button.png')\n button_exit = pygame.transform.scale(button_exit, (222, 105))\n b_exit_mask = button_exit.get_rect()\n b_exit_mask.x = 50\n b_exit_mask.y = 300\n screen.blit(button_exit, (b_exit_mask.x, b_exit_mask.y))\n draw_text('quit', font, (255, 255, 255), screen, 113, 330)\n draw_controls()\n draw_leaderboard()\n if b_play_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (50, 70, 222, 105), 5)\n if click:\n if is_sound:\n play_sound('resources/sounds/click_sound.mp3', 0.2)\n game_screen()\n if b_options_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (50, 185, 222, 105), 5)\n if click:\n if is_sound:\n play_sound('resources/sounds/click_sound.mp3', 0.2)\n options_menu()\n if b_exit_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (50, 300, 222, 105), 5)\n if click:\n if is_sound:\n play_sound('resources/sounds/click_sound.mp3', 0.2)\n pygame.quit()\n sys.exit()\n click = False\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n pygame.quit()\n sys.exit()\n if event.type == pygame.KEYDOWN:\n if event.key == pygame.K_ESCAPE:\n pygame.quit()\n sys.exit()\n if event.type == pygame.MOUSEBUTTONDOWN:\n if event.button == 1:\n click = True\n pygame.display.update()\n clock.tick(10)\n\n\ndef options_menu():\n global player_name, line_counter, is_sound\n running = True\n click = False\n numlock = False\n while running:\n mx, my = pygame.mouse.get_pos()\n screen.fill((0, 0, 0))\n pygame.draw.rect(screen, (0, 0, 0), (52, 10, 508, 50))\n pygame.draw.rect(screen, (255, 255, 255), (52, 10, 508, 50), 3)\n draw_text('Options', font, (255, 255, 255), screen, 215, 20)\n button_1 = pygame.image.load('resources/sprites/button.png')\n button_1 = pygame.transform.scale(button_1, (222, 105))\n b_1_mask = button_1.get_rect()\n b_1_mask.x = 50\n b_1_mask.y = 70\n screen.blit(button_1, (b_1_mask.x, b_1_mask.y))\n draw_text(player_name, font, (255, 255, 255), screen, 125, 100)\n button_2 = pygame.image.load('resources/sprites/button.png')\n button_2 = pygame.transform.scale(button_2, (222, 105))\n b_2_mask = button_2.get_rect()\n b_2_mask.x = 50\n b_2_mask.y = 185\n screen.blit(button_2, (b_2_mask.x, b_2_mask.y))\n button_back = pygame.image.load('resources/sprites/button.png')\n button_back = pygame.transform.scale(button_back, (222, 105))\n b_back_mask = button_back.get_rect()\n b_back_mask.x = 50\n b_back_mask.y = 300\n screen.blit(button_back, (b_back_mask.x, b_back_mask.y))\n draw_text('back', font, (255, 255, 255), screen, 113, 330)\n draw_controls()\n draw_text('audio:', font, (255, 255, 255), screen, 60, 195)\n if is_sound:\n draw_text('on', font, (255, 255, 255), screen, 190, 245)\n else:\n draw_text('off', font, (255, 255, 255), screen, 175, 230)\n if line_counter == 0 or player_name == 'NON':\n draw_text('ENTER', font, (255, 0, 0), screen, 280, 90)\n draw_text('NICKNAME', font, (255, 0, 0), screen, 280, 120)\n if numlock:\n draw_text('OFF', font, (255, 0, 0), screen, 500, 90)\n draw_text('NUM', font, (255, 0, 0), screen, 500, 120)\n draw_text('LOCK', font, (255, 0, 0), screen, 500, 150)\n if b_2_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (50, 185, 222, 105), 5)\n if click:\n if is_sound:\n play_sound('resources/sounds/click_sound.mp3', 0.2)\n if is_sound:\n is_sound = not is_sound\n pygame.mixer.pause()\n else:\n is_sound = not is_sound\n pygame.mixer.unpause()\n if b_back_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (50, 300, 222, 105), 5)\n if click:\n if is_sound:\n play_sound('resources/sounds/click_sound.mp3', 0.2)\n running = False\n click = False\n for event in pygame.event.get():\n if event.type == pygame.MOUSEBUTTONDOWN:\n if event.button == 1:\n click = True\n if event.type == pygame.KEYDOWN:\n if event.key == pygame.K_BACKSPACE:\n player_name = player_name[:-1]\n if line_counter != 0:\n line_counter -= 1\n elif player_name == 'NON':\n pass\n elif event.key == pygame.K_SPACE:\n pass\n elif event.key == pygame.K_UP:\n pass\n elif event.key == pygame.K_DOWN:\n pass\n elif event.key == pygame.K_LEFT:\n pass\n elif event.key == pygame.K_RIGHT:\n pass\n elif event.key == pygame.K_RETURN:\n pass\n elif event.key == pygame.K_NUMLOCK:\n numlock = True\n elif event.key == pygame.K_ESCAPE:\n running = False\n elif event.mod == pygame.KMOD_NONE and event.key != pygame.K_TAB:\n if line_counter != 3:\n line_counter += 1\n player_name += str(event.unicode).upper()\n if event.type == pygame.QUIT:\n pygame.quit()\n sys.exit()\n pygame.display.update()\n clock.tick(10)\n\n\ndef game_screen():\n global game_score, player_name, running_game, enemy_killed, bullets_shot, boss_done\n game_score = 0\n enemy_killed = 0\n bullets_shot = 0\n boss_done = False\n if player_name == '':\n player_name = 'NON'\n track_count = 0\n battle_tracks = ['resources/sounds/music/battle_music_1.mp3',\n 'resources/sounds/music/battle_music_2.mp3',\n 'resources/sounds/music/battle_music_3.mp3',\n 'resources/sounds/music/battle_music_4.mp3',\n 'resources/sounds/music/battle_music_5.mp3',\n 'resources/sounds/music/battle_music_6.mp3']\n ingame_music = pygame.mixer.Sound(battle_tracks[track_count])\n ingame_music.stop()\n ingame_music_sound = 0.1\n if not is_sound:\n ingame_music_sound = 0\n ingame_music.set_volume(ingame_music_sound)\n ingame_music.play()\n bs = False\n running_game = True\n pygame.time.set_timer(pygame.USEREVENT, 1000)\n enemies = pygame.sprite.Group()\n death = False\n p = Player()\n window_holes = pygame.sprite.Group()\n bullets_count = pygame.sprite.Group()\n boss_bullets_count = pygame.sprite.Group()\n booms = pygame.sprite.Group()\n small_booms = pygame.sprite.Group()\n mini_booms = pygame.sprite.Group()\n phase1_score = True\n phase2_score = True\n phase3_score = True\n battle_music = True\n phase4_score = True\n col_check = 1\n boss_death = False\n level_bckgd_pos = -23800\n current_player_sprite = 'stay'\n current_level_background = pygame.image.load(\n 'resources/level_pictures/first_level_bckgd.jpg')\n screen.blit(current_level_background, (0, 0))\n wait = 0\n last = pygame.time.get_ticks()\n last_2 = pygame.time.get_ticks()\n boss_cooldown = 1000\n cooldown = 100\n while running_game:\n for event in pygame.event.get():\n if event.type == pygame.KEYDOWN and event.key == pygame.K_EQUALS:\n ingame_music.stop()\n track_count += 1\n if track_count > 5:\n track_count = 0\n ingame_music = pygame.mixer.Sound(battle_tracks[track_count])\n ingame_music.set_volume(ingame_music_sound)\n ingame_music.play()\n if event.type == pygame.KEYDOWN and event.key == pygame.K_KP_PLUS:\n ingame_music_sound += 0.05\n if ingame_music_sound >= 1.5:\n ingame_music_sound = 1.4\n ingame_music.set_volume(ingame_music_sound)\n if event.type == pygame.KEYDOWN and event.key == pygame.K_KP_MINUS:\n ingame_music_sound -= 0.05\n if ingame_music_sound < 0:\n ingame_music_sound = 0\n ingame_music.set_volume(ingame_music_sound)\n if event.type == pygame.KEYDOWN and (event.key == pygame.K_a or\n event.key == pygame.K_LEFT) and not p.moving_right:\n current_player_sprite = 'left'\n p.moving_right = False\n p.moving_left = True\n elif event.type == pygame.KEYDOWN and (event.key == pygame.K_d or\n event.key == pygame.K_RIGHT) and not p.moving_left:\n current_player_sprite = 'right'\n p.moving_left = False\n p.moving_right = True\n if event.type == pygame.KEYUP and (event.key == pygame.K_a or \n event.key == pygame.K_LEFT):\n current_player_sprite = 'stay'\n p.moving_right = False\n p.moving_left = False\n if event.type == pygame.KEYUP and (event.key == pygame.K_d or \n event.key == pygame.K_RIGHT):\n current_player_sprite = 'stay'\n p.moving_right = False\n p.moving_left = False\n if event.type == pygame.KEYDOWN and (event.key == pygame.K_w or\n event.key == pygame.K_UP) and not p.moving_down:\n p.moving_down = False\n p.moving_up = True\n elif event.type == pygame.KEYDOWN and (event.key == pygame.K_s or\n event.key == pygame.K_DOWN) and not p.moving_up:\n p.moving_up = False\n p.moving_down = True\n if event.type == pygame.KEYUP and (event.key == pygame.K_w or \n event.key == pygame.K_UP):\n current_player_sprite = 'stay'\n p.moving_down = False\n p.moving_up = False\n if event.type == pygame.KEYUP and (event.key == pygame.K_s or \n event.key == pygame.K_DOWN):\n current_player_sprite = 'stay'\n p.moving_down = False\n p.moving_up = False\n if (event.type == pygame.MOUSEBUTTONDOWN and event.button == 1 and\n p.health_count > 0):\n now = pygame.time.get_ticks()\n if now - last >= cooldown:\n last = now\n Bullets(bullets_count).shot((p.x + 21, p.y - 25))\n Bullets(bullets_count).shot((p.x + 76, p.y - 25))\n if is_sound:\n play_sound('resources/sounds/shot_sound.mp3', 0.1)\n Bullets.shooting = True\n bullets_shot += 2\n elif event.type == pygame.KEYDOWN and event.key == pygame.K_SPACE and p.health_count > 0:\n now = pygame.time.get_ticks()\n if now - last >= cooldown:\n last = now\n Bullets(bullets_count).shot((p.x + 21, p.y - 25))\n Bullets(bullets_count).shot((p.x + 76, p.y - 25))\n if is_sound:\n play_sound('resources/sounds/shot_sound.mp3', 0.1)\n Bullets.shooting = True\n bullets_shot += 2\n if (event.type == pygame.USEREVENT and level_bckgd_pos >= -8500 and\n not bs):\n bs = True\n b = Boss()\n if event.type == pygame.USEREVENT and level_bckgd_pos < -8500:\n Enemy(enemies)\n if (event.type == pygame.USEREVENT and death and pygame.time.\n get_ticks() - wait > 2000 or level_bckgd_pos > -801):\n ingame_music.stop()\n death_screen()\n while True:\n if len(str(game_score)) < 6:\n game_score = '0' + str(game_score)\n else:\n break\n var = \"INSERT INTO highest_score VALUES ('{}', '{}')\".format(\n player_name, game_score)\n cur.execute(var)\n con.commit()\n if event.type == pygame.QUIT:\n pygame.quit()\n sys.exit()\n if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:\n pause_screen()\n if not running_game:\n ingame_music.stop()\n while True:\n if len(str(game_score)) < 6:\n game_score = '0' + str(game_score)\n else:\n break\n var = (\"INSERT INTO highest_score VALUES ('{}', '{}')\".\n format(player_name, game_score))\n cur.execute(var)\n con.commit()\n level_bckgd_pos += speed_bckgd\n if level_bckgd_pos >= 0:\n screen.fill((0, 0, 0))\n screen.blit(current_level_background, (0, level_bckgd_pos))\n if level_bckgd_pos > -805:\n death = True\n if p.health_count > 0:\n for i in enemies:\n collision = pygame.sprite.collide_rect(p, i)\n if collision:\n Explosion(booms).boom((i.rect.x, i.rect.y))\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3', 0.1)\n if i.health_count - 2 <= 0:\n game_score += 10\n i.kill()\n Explosion(booms).boom((i.rect.x, i.rect.y))\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3',\n 0.1)\n enemy_killed += 1\n else:\n i.health_count -= 2\n if is_sound:\n play_sound('resources/sounds/collision_sound.mp3',\n 0.03)\n p.health_count -= 1\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3', 0.05\n )\n if p.health_count > 0:\n Damage(window_holes).taking_damage((random.randint(\n 50, 550), random.randint(50, 750)))\n if is_sound:\n play_sound('resources/sounds/window_crashed.mp3', 1\n )\n play_sound('resources/sounds/explosion_stun.mp3',\n 0.02)\n for j in bullets_count:\n collision = pygame.sprite.collide_rect(j, i)\n if collision:\n if i.health_count - 1 <= 0:\n game_score += 5\n i.kill()\n Explosion(booms).boom((i.rect.x, i.rect.y))\n if is_sound:\n play_sound(\n 'resources/sounds/explosion_sound.mp3', 0.1\n )\n enemy_killed += 1\n else:\n i.health_count -= 1\n Miniexplosion(mini_booms).boom((j.rect.x, j.rect.y)\n )\n if is_sound:\n play_sound(\n 'resources/sounds/explosion_sound.mp3', 0.1\n )\n if is_sound:\n play_sound(\n 'resources/sounds/collision_sound.mp3',\n 0.03)\n j.kill()\n if bs and not boss_death:\n collision = pygame.sprite.collide_rect(b, p)\n if collision and b.y > 0:\n b.health_count -= 0.3\n if is_sound:\n play_sound('resources/sounds/collision_sound.mp3', 0.03\n )\n p.health_count -= 0.2\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3', 0.05\n )\n if b.body == b.stay1 or b.body == b.stay2:\n b.body = b.stay2\n if b.body == b.stay3 or b.body == b.stay4:\n b.body = b.stay4\n if b.body == b.stay5 or b.body == b.stay6:\n b.body = b.stay6\n col_check += 1\n if p.health_count > 0:\n Damage(window_holes).taking_damage((random.randint(\n 50, 550), random.randint(50, 750)))\n if is_sound:\n play_sound('resources/sounds/window_crashed.mp3',\n 0.1)\n play_sound('resources/sounds/explosion_stun.mp3',\n 0.02)\n for j in bullets_count:\n collision = pygame.sprite.collide_rect(b, j)\n if collision and b.y > 0:\n if b.body == b.stay1 or b.body == b.stay2:\n b.body = b.stay2\n if b.body == b.stay3 or b.body == b.stay4:\n b.body = b.stay4\n if b.body == b.stay5 or b.body == b.stay6:\n b.body = b.stay6\n col_check += 1\n b.health_count -= 0.2\n Miniexplosion(mini_booms).boom((j.rect.x, j.rect.y))\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3',\n 0.1)\n if is_sound:\n play_sound('resources/sounds/collision_sound.mp3',\n 0.03)\n j.kill()\n for h in boss_bullets_count:\n collision = pygame.sprite.collide_rect(p, h)\n if collision:\n p.health_count -= 1\n Miniexplosion(mini_booms).boom((h.rect.x, h.rect.y))\n if p.health_count > 0:\n Damage(window_holes).taking_damage((random.\n randint(50, 550), random.randint(50, 750)))\n if is_sound:\n play_sound(\n 'resources/sounds/window_crashed.mp3', 0.1)\n play_sound(\n 'resources/sounds/explosion_stun.mp3', 0.01\n )\n if is_sound:\n play_sound('resources/sounds/collision_sound.mp3',\n 0.03)\n h.kill()\n p.update(FPS)\n if current_player_sprite == 'left':\n sprite = p.anim_left()\n screen.blit(sprite, (p.x, p.y))\n p.left_1 = not p.left_1\n elif current_player_sprite == 'right':\n sprite = p.anim_right()\n screen.blit(sprite, (p.x, p.y))\n p.right_1 = not p.right_1\n elif current_player_sprite == 'stay':\n sprite = p.anim_stay()\n screen.blit(sprite, (p.x, p.y))\n p.stay_1 = not p.stay_1\n if bs:\n if battle_music:\n ingame_music.stop()\n ingame_music = pygame.mixer.Sound(\n 'resources/sounds/music/wagner_main_theme.mp3')\n ingame_music.set_volume(ingame_music_sound)\n ingame_music.play()\n battle_music = False\n b.update()\n if b.body == b.stay3 and phase1_score:\n game_score += 100\n phase1_score = False\n if b.body == b.stay5 and phase2_score:\n game_score += 100\n phase2_score = False\n if b.body == b.stay7 and phase3_score:\n game_score += 200\n phase3_score = False\n now = pygame.time.get_ticks()\n if (now - last_2 >= boss_cooldown and b.y > 0 and b.body !=\n b.stay7):\n last_2 = now\n play_sound('resources/sounds/boss_shot.mp3', 0.05)\n Bossbullets(boss_bullets_count).shot((b.x + 170, b.y + 155)\n )\n if col_check % 40 == 0:\n b.change_sprite()\n else:\n col_check += 1\n if b.health_count > 0:\n screen.blit(b.body, (b.x, b.y))\n elif b.health_count <= 0 and phase4_score:\n boss_done = True\n phase4_score = False\n game_score += 350\n if is_sound:\n play_sound('resources/sounds/boss_defeated.mp3', 0.2)\n Explosion(booms).boom((b.rect.x + 75, b.rect.y + 25))\n Explosion(booms).boom((b.rect.x, b.rect.y))\n Explosion(booms).boom((b.rect.x + 200, b.rect.y + 34))\n Explosion(booms).boom((b.rect.x + 250, b.rect.y + 25))\n Explosion(booms).boom((b.rect.x + 150, b.rect.y + 56))\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3', 0.1)\n boss_death = True\n else:\n if p.minimize == 0:\n if is_sound:\n ingame_music.stop()\n play_sound('resources/sounds/plane_crash.mp3', 0.05)\n p.minimize += 1\n if not death:\n if p.minimize <= 320:\n p.death()\n screen.blit(p.death_sp, (p.x, p.y))\n else:\n death = True\n wait = pygame.time.get_ticks()\n Smallexplosions(small_booms).boom((p.rect.x + 3, p.rect\n .y + 25))\n Smallexplosions(small_booms).boom((p.rect.x, p.rect.y))\n Smallexplosions(small_booms).boom((p.rect.x - 22, p.\n rect.y + 7))\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3', 0.1)\n p.kill()\n if bs and b.health_count > 0:\n b.speed += 0.02\n b.win = True\n screen.blit(b.body, (b.x, b.y))\n b.update()\n window_holes.update()\n window_holes.draw(screen)\n enemies.update(FPS)\n enemies.draw(screen)\n bullets_count.update()\n bullets_count.draw(screen)\n boss_bullets_count.update()\n boss_bullets_count.draw(screen)\n small_booms.update()\n small_booms.draw(screen)\n mini_booms.update()\n mini_booms.draw(screen)\n draw_text('Player: {}'.format(player_name), font, (255, 255, 255),\n screen, 20, 20)\n if len(str(game_score)) < 2:\n draw_text('00000' + str(game_score), font, (255, 255, 255),\n screen, 430, 20)\n elif len(str(game_score)) < 3:\n draw_text('0000' + str(game_score), font, (255, 255, 255),\n screen, 430, 20)\n elif len(str(game_score)) < 4:\n draw_text('000' + str(game_score), font, (255, 255, 255),\n screen, 430, 20)\n elif len(str(game_score)) < 5:\n draw_text('00' + str(game_score), font, (255, 255, 255), screen,\n 430, 20)\n elif len(str(game_score)) < 6:\n draw_text('0' + str(game_score), font, (255, 255, 255), screen,\n 430, 20)\n elif len(str(game_score)) >= 6:\n draw_text('Max score', font, (255, 255, 255), screen, 510, 20)\n booms.update()\n booms.draw(screen)\n pygame.display.flip()\n clock.tick(FPS)\n\n\ndef death_screen():\n global running_game, game_score\n running = True\n click = False\n draw_counter = 0\n color_counter = 0\n pygame.time.set_timer(pygame.USEREVENT, 1000)\n rating_kills = enemy_killed // 10\n if bullets_shot < 800:\n rating_shots = 1\n else:\n rating_shots = 0\n rating = rating_kills + rating_shots\n if boss_done:\n death_music = pygame.mixer.Sound(\n 'resources/sounds/music/victory_theme.mp3')\n death_music.stop()\n death_music_sound = 0.1\n if not is_sound:\n death_music_sound = 0\n death_music.set_volume(death_music_sound)\n death_music.play()\n rating += 2\n else:\n death_music = pygame.mixer.Sound(\n 'resources/sounds/music/loose_theme.mp3')\n death_music.stop()\n death_music_sound = 0.1\n if not is_sound:\n death_music_sound = 0\n death_music.set_volume(death_music_sound)\n death_music.play()\n while True:\n if len(str(game_score)) < 6:\n game_score = '0' + str(game_score)\n else:\n break\n while running:\n mx, my = pygame.mouse.get_pos()\n screen.fill((0, 0, 0))\n pygame.draw.rect(screen, (0, 0, 0), (50, 10, 500, 50))\n pygame.draw.rect(screen, (255, 255, 255), (50, 10, 500, 50), 3)\n draw_text('End of your way', font, (255, 255, 255), screen, 120, 15)\n button_menu = pygame.image.load('resources/sprites/button.png')\n button_menu = pygame.transform.scale(button_menu, (200, 70))\n b_menu_mask = button_menu.get_rect()\n b_menu_mask.x = 195\n b_menu_mask.y = 700\n screen.blit(button_menu, (b_menu_mask.x, b_menu_mask.y))\n draw_text('menu', font, (255, 255, 255), screen, 245, 730)\n if draw_counter >= 1:\n draw_text('Player: {}'.format(player_name), font, (255, 255, \n 255), screen, 50, 150)\n if draw_counter >= 2:\n draw_text('Score: {}'.format(game_score), font, (255, 255, 255),\n screen, 50, 230)\n if draw_counter >= 3:\n draw_text('Enemies killed: {}'.format(enemy_killed), font, (255,\n 255, 255), screen, 50, 310)\n if draw_counter >= 4:\n draw_text('Bullets fired: {}'.format(bullets_shot), font, (255,\n 255, 255), screen, 50, 390)\n if draw_counter >= 5:\n draw_text('Rating:', font, (255, 255, 255), screen, 50, 470)\n if draw_counter >= 6:\n if rating <= 6:\n draw_text('F', font_rating, (100, 100, 100), screen, 300, 470)\n elif rating == 7:\n draw_text('D', font_rating, (29, 173, 23), screen, 300, 470)\n elif rating == 8:\n draw_text('C', font_rating, (20, 20, 255), screen, 300, 470)\n elif rating == 9:\n draw_text('B', font_rating, (200, 0, 255), screen, 300, 470)\n elif rating == 10:\n draw_text('A', font_rating, (255, 200, 0), screen, 300, 470)\n elif rating == 11:\n draw_text('S', font_rating, (255, 100, 0), screen, 300, 470)\n elif rating <= 13:\n draw_text('SS', font_rating, (255, 0, 0), screen, 300, 470)\n elif color_counter == 0:\n draw_text('SSS', font_rating, (255, 0, 0), screen, 300, 470)\n elif color_counter == 1:\n draw_text('SSS', font_rating, (0, 255, 0), screen, 300, 470)\n else:\n draw_text('SSS', font_rating, (0, 0, 255), screen, 300, 470)\n if b_menu_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (195, 700, 200, 70), 4)\n if click:\n if is_sound:\n if is_sound:\n play_sound('resources/sounds/click_sound.mp3', 0.2)\n else:\n pass\n running = False\n running_game = False\n click = False\n for event in pygame.event.get():\n if event.type == pygame.USEREVENT:\n draw_counter += 1\n color_counter += 1\n if color_counter == 3:\n color_counter = 0\n if event.type == pygame.QUIT:\n pygame.quit()\n sys.exit()\n if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:\n running = False\n running_game = False\n if event.type == pygame.MOUSEBUTTONDOWN:\n if event.button == 1:\n click = True\n pygame.display.update()\n clock.tick(10)\n death_music.stop()\n\n\ndef pause_screen():\n global running_game\n running = True\n click = False\n while running:\n screen.fill((0, 0, 0))\n mx, my = pygame.mouse.get_pos()\n pygame.draw.rect(screen, (0, 0, 0), (175, 160, 240, 340))\n pygame.draw.rect(screen, (255, 255, 255), (175, 160, 240, 340), 3)\n pygame.draw.rect(screen, (0, 0, 0), (185, 180, 220, 80))\n pygame.draw.rect(screen, (255, 255, 255), (185, 180, 220, 80), 3)\n draw_text('Pause', font, (255, 255, 255), screen, 235, 205)\n button_menu = pygame.image.load('resources/sprites/button.png')\n button_menu = pygame.transform.scale(button_menu, (200, 70))\n b_menu_mask = button_menu.get_rect()\n b_menu_mask.x = 195\n b_menu_mask.y = 410\n screen.blit(button_menu, (b_menu_mask.x, b_menu_mask.y))\n draw_text('menu', font, (255, 255, 255), screen, 245, 440)\n button_resume = pygame.image.load('resources/sprites/button.png')\n button_resume = pygame.transform.scale(button_resume, (200, 70))\n b_resume_mask = button_resume.get_rect()\n b_resume_mask.x = 195\n b_resume_mask.y = 300\n screen.blit(button_resume, (b_resume_mask.x, b_resume_mask.y))\n draw_text('resume', font, (255, 255, 255), screen, 225, 330)\n if b_menu_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (195, 410, 200, 70), 4)\n if click:\n running = False\n running_game = False\n if b_resume_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (195, 300, 200, 70), 4)\n if click:\n running = False\n click = False\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n pygame.quit()\n sys.exit()\n if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:\n running = False\n if event.type == pygame.MOUSEBUTTONDOWN:\n if event.button == 1:\n click = True\n pygame.display.update()\n clock.tick(10)\n\n\nif __name__ == '__main__':\n main_menu()\n pygame.quit()\n",
"step-4": "import pygame\nimport pygame.freetype\nimport sys\nimport sqlite3\nfrom data.player_class import Player\nfrom data.explosion_class import Explosion\nfrom data.objects_class import Bullets, Damage\nfrom data.enemy_class import Enemy\nfrom data.enemy_class import Boss\nfrom data.death_animation import Smallexplosions\nfrom data.explosion_class import Miniexplosion\nfrom data.objects_class import Bossbullets\nimport random\n\n\ndef draw_text(text, font_u, color, surface, x, y):\n text_object = font_u.render(text, color)\n textrect = text_object[1]\n textrect.topleft = x, y\n surface.blit(text_object[0], textrect)\n\n\ndef play_sound(sound_p, volume_h=0.5, wait_t=0):\n pl_sound = pygame.mixer.Sound(sound_p)\n pl_sound.set_volume(volume_h)\n if is_sound:\n pl_sound.play()\n pygame.time.wait(wait_t)\n\n\npygame.init()\nspeed_bckgd = 2\nrunning_game = True\nis_sound = True\nmenu = True\nboss_done = False\ngame_score = 0\nbullets_shot = 0\nline_counter = 0\nenemy_killed = 0\nspeed = 2\nFPS = 100\nwidth = 600\nheight = 800\nplayer_name = ''\ncon = sqlite3.connect('resources/db/leaderboard.db')\nfont = pygame.freetype.Font('resources/sprites/font_main.ttf', 45)\nfont_table = pygame.freetype.Font('resources/sprites/font_main.ttf', 25)\nfont_space = pygame.freetype.Font('resources/sprites/space.ttf', 20)\nfont_rating = pygame.freetype.Font('resources/sprites/font_main.ttf', 150)\npygame.display.set_icon(pygame.image.load(\n 'resources/images/test_small_logo_1.bmp'))\npygame.display.set_caption('Death or Dishonour')\nscreen = pygame.display.set_mode((600, 800))\nclock = pygame.time.Clock()\ncur = con.cursor()\n\n\ndef draw_controls():\n pygame.draw.rect(screen, (255, 255, 255), (0, 420, 600, 380), 4)\n pygame.draw.rect(screen, (0, 0, 0, 1), (3, 422, 595, 376))\n draw_text('controls:', font, (255, 255, 255), screen, 20, 430)\n wasd = pygame.image.load('resources/sprites/controls_1.png')\n wasd = pygame.transform.scale(wasd, (243, 100))\n screen.blit(wasd, (20, 470))\n pygame.draw.rect(screen, (255, 255, 255), (20, 646, 130, 25))\n draw_text('SPACE', font_space, (0, 0, 0), screen, 50, 651)\n draw_text(' - movement', font, (255, 255, 255), screen, 270, 522)\n mouse = pygame.image.load('resources/sprites/controls_2.png')\n mouse = pygame.transform.scale(mouse, (90, 100))\n screen.blit(mouse, (153, 590))\n draw_text(' - shoot', font, (255, 255, 255), screen, 270, 640)\n\n\ndef draw_leaderboard():\n table = []\n result = cur.execute(\n 'SELECT * FROM highest_score ORDER BY score DESC LIMIT 7')\n for elem in result:\n table.append(elem)\n pygame.draw.rect(screen, (0, 0, 0), (310, 70, 250, 335))\n pygame.draw.rect(screen, (255, 255, 255), (310, 70, 250, 335), 3)\n pygame.draw.line(screen, (255, 255, 255), (310, 124), (560, 124), 3)\n pygame.draw.line(screen, (255, 255, 255), (435, 124), (435, 405), 3)\n charge = 40\n y = 124\n for i in range(1, 8):\n y += charge\n pygame.draw.line(screen, (255, 255, 255), (310, y), (560, y), 3)\n draw_text('leaderboard', font_table, (255, 255, 255), screen, 362, 80)\n x = 350\n y = 140\n for i in table:\n draw_text(str(i[0]), font_table, (255, 255, 255), screen, x, y)\n draw_text(str(i[1]), font_table, (255, 255, 255), screen, x + 100, y)\n y += charge\n\n\ndef main_menu():\n click = False\n pygame.mixer.stop()\n while True:\n mx, my = pygame.mouse.get_pos()\n screen.fill((0, 0, 0))\n pygame.draw.rect(screen, (0, 0, 0), (52, 10, 508, 50))\n pygame.draw.rect(screen, (255, 255, 255), (52, 10, 508, 50), 3)\n draw_text('Death or Dishonour', font, (255, 255, 255), screen, 85, 20)\n button_play = pygame.image.load('resources/sprites/button.png')\n button_play = pygame.transform.scale(button_play, (222, 105))\n b_play_mask = button_play.get_rect()\n b_play_mask.x = 50\n b_play_mask.y = 70\n screen.blit(button_play, (b_play_mask.x, b_play_mask.y))\n draw_text('play', font, (255, 255, 255), screen, 113, 100)\n button_options = pygame.image.load('resources/sprites/button.png')\n button_options = pygame.transform.scale(button_options, (222, 105))\n b_options_mask = button_options.get_rect()\n b_options_mask.x = 50\n b_options_mask.y = 185\n screen.blit(button_options, (b_options_mask.x, b_options_mask.y))\n draw_text('options', font, (255, 255, 255), screen, 78, 215)\n button_exit = pygame.image.load('resources/sprites/button.png')\n button_exit = pygame.transform.scale(button_exit, (222, 105))\n b_exit_mask = button_exit.get_rect()\n b_exit_mask.x = 50\n b_exit_mask.y = 300\n screen.blit(button_exit, (b_exit_mask.x, b_exit_mask.y))\n draw_text('quit', font, (255, 255, 255), screen, 113, 330)\n draw_controls()\n draw_leaderboard()\n if b_play_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (50, 70, 222, 105), 5)\n if click:\n if is_sound:\n play_sound('resources/sounds/click_sound.mp3', 0.2)\n game_screen()\n if b_options_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (50, 185, 222, 105), 5)\n if click:\n if is_sound:\n play_sound('resources/sounds/click_sound.mp3', 0.2)\n options_menu()\n if b_exit_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (50, 300, 222, 105), 5)\n if click:\n if is_sound:\n play_sound('resources/sounds/click_sound.mp3', 0.2)\n pygame.quit()\n sys.exit()\n click = False\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n pygame.quit()\n sys.exit()\n if event.type == pygame.KEYDOWN:\n if event.key == pygame.K_ESCAPE:\n pygame.quit()\n sys.exit()\n if event.type == pygame.MOUSEBUTTONDOWN:\n if event.button == 1:\n click = True\n pygame.display.update()\n clock.tick(10)\n\n\ndef options_menu():\n global player_name, line_counter, is_sound\n running = True\n click = False\n numlock = False\n while running:\n mx, my = pygame.mouse.get_pos()\n screen.fill((0, 0, 0))\n pygame.draw.rect(screen, (0, 0, 0), (52, 10, 508, 50))\n pygame.draw.rect(screen, (255, 255, 255), (52, 10, 508, 50), 3)\n draw_text('Options', font, (255, 255, 255), screen, 215, 20)\n button_1 = pygame.image.load('resources/sprites/button.png')\n button_1 = pygame.transform.scale(button_1, (222, 105))\n b_1_mask = button_1.get_rect()\n b_1_mask.x = 50\n b_1_mask.y = 70\n screen.blit(button_1, (b_1_mask.x, b_1_mask.y))\n draw_text(player_name, font, (255, 255, 255), screen, 125, 100)\n button_2 = pygame.image.load('resources/sprites/button.png')\n button_2 = pygame.transform.scale(button_2, (222, 105))\n b_2_mask = button_2.get_rect()\n b_2_mask.x = 50\n b_2_mask.y = 185\n screen.blit(button_2, (b_2_mask.x, b_2_mask.y))\n button_back = pygame.image.load('resources/sprites/button.png')\n button_back = pygame.transform.scale(button_back, (222, 105))\n b_back_mask = button_back.get_rect()\n b_back_mask.x = 50\n b_back_mask.y = 300\n screen.blit(button_back, (b_back_mask.x, b_back_mask.y))\n draw_text('back', font, (255, 255, 255), screen, 113, 330)\n draw_controls()\n draw_text('audio:', font, (255, 255, 255), screen, 60, 195)\n if is_sound:\n draw_text('on', font, (255, 255, 255), screen, 190, 245)\n else:\n draw_text('off', font, (255, 255, 255), screen, 175, 230)\n if line_counter == 0 or player_name == 'NON':\n draw_text('ENTER', font, (255, 0, 0), screen, 280, 90)\n draw_text('NICKNAME', font, (255, 0, 0), screen, 280, 120)\n if numlock:\n draw_text('OFF', font, (255, 0, 0), screen, 500, 90)\n draw_text('NUM', font, (255, 0, 0), screen, 500, 120)\n draw_text('LOCK', font, (255, 0, 0), screen, 500, 150)\n if b_2_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (50, 185, 222, 105), 5)\n if click:\n if is_sound:\n play_sound('resources/sounds/click_sound.mp3', 0.2)\n if is_sound:\n is_sound = not is_sound\n pygame.mixer.pause()\n else:\n is_sound = not is_sound\n pygame.mixer.unpause()\n if b_back_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (50, 300, 222, 105), 5)\n if click:\n if is_sound:\n play_sound('resources/sounds/click_sound.mp3', 0.2)\n running = False\n click = False\n for event in pygame.event.get():\n if event.type == pygame.MOUSEBUTTONDOWN:\n if event.button == 1:\n click = True\n if event.type == pygame.KEYDOWN:\n if event.key == pygame.K_BACKSPACE:\n player_name = player_name[:-1]\n if line_counter != 0:\n line_counter -= 1\n elif player_name == 'NON':\n pass\n elif event.key == pygame.K_SPACE:\n pass\n elif event.key == pygame.K_UP:\n pass\n elif event.key == pygame.K_DOWN:\n pass\n elif event.key == pygame.K_LEFT:\n pass\n elif event.key == pygame.K_RIGHT:\n pass\n elif event.key == pygame.K_RETURN:\n pass\n elif event.key == pygame.K_NUMLOCK:\n numlock = True\n elif event.key == pygame.K_ESCAPE:\n running = False\n elif event.mod == pygame.KMOD_NONE and event.key != pygame.K_TAB:\n if line_counter != 3:\n line_counter += 1\n player_name += str(event.unicode).upper()\n if event.type == pygame.QUIT:\n pygame.quit()\n sys.exit()\n pygame.display.update()\n clock.tick(10)\n\n\ndef game_screen():\n global game_score, player_name, running_game, enemy_killed, bullets_shot, boss_done\n game_score = 0\n enemy_killed = 0\n bullets_shot = 0\n boss_done = False\n if player_name == '':\n player_name = 'NON'\n track_count = 0\n battle_tracks = ['resources/sounds/music/battle_music_1.mp3',\n 'resources/sounds/music/battle_music_2.mp3',\n 'resources/sounds/music/battle_music_3.mp3',\n 'resources/sounds/music/battle_music_4.mp3',\n 'resources/sounds/music/battle_music_5.mp3',\n 'resources/sounds/music/battle_music_6.mp3']\n ingame_music = pygame.mixer.Sound(battle_tracks[track_count])\n ingame_music.stop()\n ingame_music_sound = 0.1\n if not is_sound:\n ingame_music_sound = 0\n ingame_music.set_volume(ingame_music_sound)\n ingame_music.play()\n bs = False\n running_game = True\n pygame.time.set_timer(pygame.USEREVENT, 1000)\n enemies = pygame.sprite.Group()\n death = False\n p = Player()\n window_holes = pygame.sprite.Group()\n bullets_count = pygame.sprite.Group()\n boss_bullets_count = pygame.sprite.Group()\n booms = pygame.sprite.Group()\n small_booms = pygame.sprite.Group()\n mini_booms = pygame.sprite.Group()\n phase1_score = True\n phase2_score = True\n phase3_score = True\n battle_music = True\n phase4_score = True\n col_check = 1\n boss_death = False\n level_bckgd_pos = -23800\n current_player_sprite = 'stay'\n current_level_background = pygame.image.load(\n 'resources/level_pictures/first_level_bckgd.jpg')\n screen.blit(current_level_background, (0, 0))\n wait = 0\n last = pygame.time.get_ticks()\n last_2 = pygame.time.get_ticks()\n boss_cooldown = 1000\n cooldown = 100\n while running_game:\n for event in pygame.event.get():\n if event.type == pygame.KEYDOWN and event.key == pygame.K_EQUALS:\n ingame_music.stop()\n track_count += 1\n if track_count > 5:\n track_count = 0\n ingame_music = pygame.mixer.Sound(battle_tracks[track_count])\n ingame_music.set_volume(ingame_music_sound)\n ingame_music.play()\n if event.type == pygame.KEYDOWN and event.key == pygame.K_KP_PLUS:\n ingame_music_sound += 0.05\n if ingame_music_sound >= 1.5:\n ingame_music_sound = 1.4\n ingame_music.set_volume(ingame_music_sound)\n if event.type == pygame.KEYDOWN and event.key == pygame.K_KP_MINUS:\n ingame_music_sound -= 0.05\n if ingame_music_sound < 0:\n ingame_music_sound = 0\n ingame_music.set_volume(ingame_music_sound)\n if event.type == pygame.KEYDOWN and (event.key == pygame.K_a or\n event.key == pygame.K_LEFT) and not p.moving_right:\n current_player_sprite = 'left'\n p.moving_right = False\n p.moving_left = True\n elif event.type == pygame.KEYDOWN and (event.key == pygame.K_d or\n event.key == pygame.K_RIGHT) and not p.moving_left:\n current_player_sprite = 'right'\n p.moving_left = False\n p.moving_right = True\n if event.type == pygame.KEYUP and (event.key == pygame.K_a or \n event.key == pygame.K_LEFT):\n current_player_sprite = 'stay'\n p.moving_right = False\n p.moving_left = False\n if event.type == pygame.KEYUP and (event.key == pygame.K_d or \n event.key == pygame.K_RIGHT):\n current_player_sprite = 'stay'\n p.moving_right = False\n p.moving_left = False\n if event.type == pygame.KEYDOWN and (event.key == pygame.K_w or\n event.key == pygame.K_UP) and not p.moving_down:\n p.moving_down = False\n p.moving_up = True\n elif event.type == pygame.KEYDOWN and (event.key == pygame.K_s or\n event.key == pygame.K_DOWN) and not p.moving_up:\n p.moving_up = False\n p.moving_down = True\n if event.type == pygame.KEYUP and (event.key == pygame.K_w or \n event.key == pygame.K_UP):\n current_player_sprite = 'stay'\n p.moving_down = False\n p.moving_up = False\n if event.type == pygame.KEYUP and (event.key == pygame.K_s or \n event.key == pygame.K_DOWN):\n current_player_sprite = 'stay'\n p.moving_down = False\n p.moving_up = False\n if (event.type == pygame.MOUSEBUTTONDOWN and event.button == 1 and\n p.health_count > 0):\n now = pygame.time.get_ticks()\n if now - last >= cooldown:\n last = now\n Bullets(bullets_count).shot((p.x + 21, p.y - 25))\n Bullets(bullets_count).shot((p.x + 76, p.y - 25))\n if is_sound:\n play_sound('resources/sounds/shot_sound.mp3', 0.1)\n Bullets.shooting = True\n bullets_shot += 2\n elif event.type == pygame.KEYDOWN and event.key == pygame.K_SPACE and p.health_count > 0:\n now = pygame.time.get_ticks()\n if now - last >= cooldown:\n last = now\n Bullets(bullets_count).shot((p.x + 21, p.y - 25))\n Bullets(bullets_count).shot((p.x + 76, p.y - 25))\n if is_sound:\n play_sound('resources/sounds/shot_sound.mp3', 0.1)\n Bullets.shooting = True\n bullets_shot += 2\n if (event.type == pygame.USEREVENT and level_bckgd_pos >= -8500 and\n not bs):\n bs = True\n b = Boss()\n if event.type == pygame.USEREVENT and level_bckgd_pos < -8500:\n Enemy(enemies)\n if (event.type == pygame.USEREVENT and death and pygame.time.\n get_ticks() - wait > 2000 or level_bckgd_pos > -801):\n ingame_music.stop()\n death_screen()\n while True:\n if len(str(game_score)) < 6:\n game_score = '0' + str(game_score)\n else:\n break\n var = \"INSERT INTO highest_score VALUES ('{}', '{}')\".format(\n player_name, game_score)\n cur.execute(var)\n con.commit()\n if event.type == pygame.QUIT:\n pygame.quit()\n sys.exit()\n if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:\n pause_screen()\n if not running_game:\n ingame_music.stop()\n while True:\n if len(str(game_score)) < 6:\n game_score = '0' + str(game_score)\n else:\n break\n var = (\"INSERT INTO highest_score VALUES ('{}', '{}')\".\n format(player_name, game_score))\n cur.execute(var)\n con.commit()\n level_bckgd_pos += speed_bckgd\n if level_bckgd_pos >= 0:\n screen.fill((0, 0, 0))\n screen.blit(current_level_background, (0, level_bckgd_pos))\n if level_bckgd_pos > -805:\n death = True\n if p.health_count > 0:\n for i in enemies:\n collision = pygame.sprite.collide_rect(p, i)\n if collision:\n Explosion(booms).boom((i.rect.x, i.rect.y))\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3', 0.1)\n if i.health_count - 2 <= 0:\n game_score += 10\n i.kill()\n Explosion(booms).boom((i.rect.x, i.rect.y))\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3',\n 0.1)\n enemy_killed += 1\n else:\n i.health_count -= 2\n if is_sound:\n play_sound('resources/sounds/collision_sound.mp3',\n 0.03)\n p.health_count -= 1\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3', 0.05\n )\n if p.health_count > 0:\n Damage(window_holes).taking_damage((random.randint(\n 50, 550), random.randint(50, 750)))\n if is_sound:\n play_sound('resources/sounds/window_crashed.mp3', 1\n )\n play_sound('resources/sounds/explosion_stun.mp3',\n 0.02)\n for j in bullets_count:\n collision = pygame.sprite.collide_rect(j, i)\n if collision:\n if i.health_count - 1 <= 0:\n game_score += 5\n i.kill()\n Explosion(booms).boom((i.rect.x, i.rect.y))\n if is_sound:\n play_sound(\n 'resources/sounds/explosion_sound.mp3', 0.1\n )\n enemy_killed += 1\n else:\n i.health_count -= 1\n Miniexplosion(mini_booms).boom((j.rect.x, j.rect.y)\n )\n if is_sound:\n play_sound(\n 'resources/sounds/explosion_sound.mp3', 0.1\n )\n if is_sound:\n play_sound(\n 'resources/sounds/collision_sound.mp3',\n 0.03)\n j.kill()\n if bs and not boss_death:\n collision = pygame.sprite.collide_rect(b, p)\n if collision and b.y > 0:\n b.health_count -= 0.3\n if is_sound:\n play_sound('resources/sounds/collision_sound.mp3', 0.03\n )\n p.health_count -= 0.2\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3', 0.05\n )\n if b.body == b.stay1 or b.body == b.stay2:\n b.body = b.stay2\n if b.body == b.stay3 or b.body == b.stay4:\n b.body = b.stay4\n if b.body == b.stay5 or b.body == b.stay6:\n b.body = b.stay6\n col_check += 1\n if p.health_count > 0:\n Damage(window_holes).taking_damage((random.randint(\n 50, 550), random.randint(50, 750)))\n if is_sound:\n play_sound('resources/sounds/window_crashed.mp3',\n 0.1)\n play_sound('resources/sounds/explosion_stun.mp3',\n 0.02)\n for j in bullets_count:\n collision = pygame.sprite.collide_rect(b, j)\n if collision and b.y > 0:\n if b.body == b.stay1 or b.body == b.stay2:\n b.body = b.stay2\n if b.body == b.stay3 or b.body == b.stay4:\n b.body = b.stay4\n if b.body == b.stay5 or b.body == b.stay6:\n b.body = b.stay6\n col_check += 1\n b.health_count -= 0.2\n Miniexplosion(mini_booms).boom((j.rect.x, j.rect.y))\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3',\n 0.1)\n if is_sound:\n play_sound('resources/sounds/collision_sound.mp3',\n 0.03)\n j.kill()\n for h in boss_bullets_count:\n collision = pygame.sprite.collide_rect(p, h)\n if collision:\n p.health_count -= 1\n Miniexplosion(mini_booms).boom((h.rect.x, h.rect.y))\n if p.health_count > 0:\n Damage(window_holes).taking_damage((random.\n randint(50, 550), random.randint(50, 750)))\n if is_sound:\n play_sound(\n 'resources/sounds/window_crashed.mp3', 0.1)\n play_sound(\n 'resources/sounds/explosion_stun.mp3', 0.01\n )\n if is_sound:\n play_sound('resources/sounds/collision_sound.mp3',\n 0.03)\n h.kill()\n p.update(FPS)\n if current_player_sprite == 'left':\n sprite = p.anim_left()\n screen.blit(sprite, (p.x, p.y))\n p.left_1 = not p.left_1\n elif current_player_sprite == 'right':\n sprite = p.anim_right()\n screen.blit(sprite, (p.x, p.y))\n p.right_1 = not p.right_1\n elif current_player_sprite == 'stay':\n sprite = p.anim_stay()\n screen.blit(sprite, (p.x, p.y))\n p.stay_1 = not p.stay_1\n if bs:\n if battle_music:\n ingame_music.stop()\n ingame_music = pygame.mixer.Sound(\n 'resources/sounds/music/wagner_main_theme.mp3')\n ingame_music.set_volume(ingame_music_sound)\n ingame_music.play()\n battle_music = False\n b.update()\n if b.body == b.stay3 and phase1_score:\n game_score += 100\n phase1_score = False\n if b.body == b.stay5 and phase2_score:\n game_score += 100\n phase2_score = False\n if b.body == b.stay7 and phase3_score:\n game_score += 200\n phase3_score = False\n now = pygame.time.get_ticks()\n if (now - last_2 >= boss_cooldown and b.y > 0 and b.body !=\n b.stay7):\n last_2 = now\n play_sound('resources/sounds/boss_shot.mp3', 0.05)\n Bossbullets(boss_bullets_count).shot((b.x + 170, b.y + 155)\n )\n if col_check % 40 == 0:\n b.change_sprite()\n else:\n col_check += 1\n if b.health_count > 0:\n screen.blit(b.body, (b.x, b.y))\n elif b.health_count <= 0 and phase4_score:\n boss_done = True\n phase4_score = False\n game_score += 350\n if is_sound:\n play_sound('resources/sounds/boss_defeated.mp3', 0.2)\n Explosion(booms).boom((b.rect.x + 75, b.rect.y + 25))\n Explosion(booms).boom((b.rect.x, b.rect.y))\n Explosion(booms).boom((b.rect.x + 200, b.rect.y + 34))\n Explosion(booms).boom((b.rect.x + 250, b.rect.y + 25))\n Explosion(booms).boom((b.rect.x + 150, b.rect.y + 56))\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3', 0.1)\n boss_death = True\n else:\n if p.minimize == 0:\n if is_sound:\n ingame_music.stop()\n play_sound('resources/sounds/plane_crash.mp3', 0.05)\n p.minimize += 1\n if not death:\n if p.minimize <= 320:\n p.death()\n screen.blit(p.death_sp, (p.x, p.y))\n else:\n death = True\n wait = pygame.time.get_ticks()\n Smallexplosions(small_booms).boom((p.rect.x + 3, p.rect\n .y + 25))\n Smallexplosions(small_booms).boom((p.rect.x, p.rect.y))\n Smallexplosions(small_booms).boom((p.rect.x - 22, p.\n rect.y + 7))\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3', 0.1)\n p.kill()\n if bs and b.health_count > 0:\n b.speed += 0.02\n b.win = True\n screen.blit(b.body, (b.x, b.y))\n b.update()\n window_holes.update()\n window_holes.draw(screen)\n enemies.update(FPS)\n enemies.draw(screen)\n bullets_count.update()\n bullets_count.draw(screen)\n boss_bullets_count.update()\n boss_bullets_count.draw(screen)\n small_booms.update()\n small_booms.draw(screen)\n mini_booms.update()\n mini_booms.draw(screen)\n draw_text('Player: {}'.format(player_name), font, (255, 255, 255),\n screen, 20, 20)\n if len(str(game_score)) < 2:\n draw_text('00000' + str(game_score), font, (255, 255, 255),\n screen, 430, 20)\n elif len(str(game_score)) < 3:\n draw_text('0000' + str(game_score), font, (255, 255, 255),\n screen, 430, 20)\n elif len(str(game_score)) < 4:\n draw_text('000' + str(game_score), font, (255, 255, 255),\n screen, 430, 20)\n elif len(str(game_score)) < 5:\n draw_text('00' + str(game_score), font, (255, 255, 255), screen,\n 430, 20)\n elif len(str(game_score)) < 6:\n draw_text('0' + str(game_score), font, (255, 255, 255), screen,\n 430, 20)\n elif len(str(game_score)) >= 6:\n draw_text('Max score', font, (255, 255, 255), screen, 510, 20)\n booms.update()\n booms.draw(screen)\n pygame.display.flip()\n clock.tick(FPS)\n\n\ndef death_screen():\n global running_game, game_score\n running = True\n click = False\n draw_counter = 0\n color_counter = 0\n pygame.time.set_timer(pygame.USEREVENT, 1000)\n rating_kills = enemy_killed // 10\n if bullets_shot < 800:\n rating_shots = 1\n else:\n rating_shots = 0\n rating = rating_kills + rating_shots\n if boss_done:\n death_music = pygame.mixer.Sound(\n 'resources/sounds/music/victory_theme.mp3')\n death_music.stop()\n death_music_sound = 0.1\n if not is_sound:\n death_music_sound = 0\n death_music.set_volume(death_music_sound)\n death_music.play()\n rating += 2\n else:\n death_music = pygame.mixer.Sound(\n 'resources/sounds/music/loose_theme.mp3')\n death_music.stop()\n death_music_sound = 0.1\n if not is_sound:\n death_music_sound = 0\n death_music.set_volume(death_music_sound)\n death_music.play()\n while True:\n if len(str(game_score)) < 6:\n game_score = '0' + str(game_score)\n else:\n break\n while running:\n mx, my = pygame.mouse.get_pos()\n screen.fill((0, 0, 0))\n pygame.draw.rect(screen, (0, 0, 0), (50, 10, 500, 50))\n pygame.draw.rect(screen, (255, 255, 255), (50, 10, 500, 50), 3)\n draw_text('End of your way', font, (255, 255, 255), screen, 120, 15)\n button_menu = pygame.image.load('resources/sprites/button.png')\n button_menu = pygame.transform.scale(button_menu, (200, 70))\n b_menu_mask = button_menu.get_rect()\n b_menu_mask.x = 195\n b_menu_mask.y = 700\n screen.blit(button_menu, (b_menu_mask.x, b_menu_mask.y))\n draw_text('menu', font, (255, 255, 255), screen, 245, 730)\n if draw_counter >= 1:\n draw_text('Player: {}'.format(player_name), font, (255, 255, \n 255), screen, 50, 150)\n if draw_counter >= 2:\n draw_text('Score: {}'.format(game_score), font, (255, 255, 255),\n screen, 50, 230)\n if draw_counter >= 3:\n draw_text('Enemies killed: {}'.format(enemy_killed), font, (255,\n 255, 255), screen, 50, 310)\n if draw_counter >= 4:\n draw_text('Bullets fired: {}'.format(bullets_shot), font, (255,\n 255, 255), screen, 50, 390)\n if draw_counter >= 5:\n draw_text('Rating:', font, (255, 255, 255), screen, 50, 470)\n if draw_counter >= 6:\n if rating <= 6:\n draw_text('F', font_rating, (100, 100, 100), screen, 300, 470)\n elif rating == 7:\n draw_text('D', font_rating, (29, 173, 23), screen, 300, 470)\n elif rating == 8:\n draw_text('C', font_rating, (20, 20, 255), screen, 300, 470)\n elif rating == 9:\n draw_text('B', font_rating, (200, 0, 255), screen, 300, 470)\n elif rating == 10:\n draw_text('A', font_rating, (255, 200, 0), screen, 300, 470)\n elif rating == 11:\n draw_text('S', font_rating, (255, 100, 0), screen, 300, 470)\n elif rating <= 13:\n draw_text('SS', font_rating, (255, 0, 0), screen, 300, 470)\n elif color_counter == 0:\n draw_text('SSS', font_rating, (255, 0, 0), screen, 300, 470)\n elif color_counter == 1:\n draw_text('SSS', font_rating, (0, 255, 0), screen, 300, 470)\n else:\n draw_text('SSS', font_rating, (0, 0, 255), screen, 300, 470)\n if b_menu_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (195, 700, 200, 70), 4)\n if click:\n if is_sound:\n if is_sound:\n play_sound('resources/sounds/click_sound.mp3', 0.2)\n else:\n pass\n running = False\n running_game = False\n click = False\n for event in pygame.event.get():\n if event.type == pygame.USEREVENT:\n draw_counter += 1\n color_counter += 1\n if color_counter == 3:\n color_counter = 0\n if event.type == pygame.QUIT:\n pygame.quit()\n sys.exit()\n if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:\n running = False\n running_game = False\n if event.type == pygame.MOUSEBUTTONDOWN:\n if event.button == 1:\n click = True\n pygame.display.update()\n clock.tick(10)\n death_music.stop()\n\n\ndef pause_screen():\n global running_game\n running = True\n click = False\n while running:\n screen.fill((0, 0, 0))\n mx, my = pygame.mouse.get_pos()\n pygame.draw.rect(screen, (0, 0, 0), (175, 160, 240, 340))\n pygame.draw.rect(screen, (255, 255, 255), (175, 160, 240, 340), 3)\n pygame.draw.rect(screen, (0, 0, 0), (185, 180, 220, 80))\n pygame.draw.rect(screen, (255, 255, 255), (185, 180, 220, 80), 3)\n draw_text('Pause', font, (255, 255, 255), screen, 235, 205)\n button_menu = pygame.image.load('resources/sprites/button.png')\n button_menu = pygame.transform.scale(button_menu, (200, 70))\n b_menu_mask = button_menu.get_rect()\n b_menu_mask.x = 195\n b_menu_mask.y = 410\n screen.blit(button_menu, (b_menu_mask.x, b_menu_mask.y))\n draw_text('menu', font, (255, 255, 255), screen, 245, 440)\n button_resume = pygame.image.load('resources/sprites/button.png')\n button_resume = pygame.transform.scale(button_resume, (200, 70))\n b_resume_mask = button_resume.get_rect()\n b_resume_mask.x = 195\n b_resume_mask.y = 300\n screen.blit(button_resume, (b_resume_mask.x, b_resume_mask.y))\n draw_text('resume', font, (255, 255, 255), screen, 225, 330)\n if b_menu_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (195, 410, 200, 70), 4)\n if click:\n running = False\n running_game = False\n if b_resume_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (195, 300, 200, 70), 4)\n if click:\n running = False\n click = False\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n pygame.quit()\n sys.exit()\n if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:\n running = False\n if event.type == pygame.MOUSEBUTTONDOWN:\n if event.button == 1:\n click = True\n pygame.display.update()\n clock.tick(10)\n\n\nif __name__ == '__main__':\n main_menu()\n pygame.quit()\n",
"step-5": "import pygame\nimport pygame.freetype\nimport sys\nimport sqlite3\nfrom data.player_class import Player\nfrom data.explosion_class import Explosion\nfrom data.objects_class import Bullets, Damage\nfrom data.enemy_class import Enemy\nfrom data.enemy_class import Boss\nfrom data.death_animation import Smallexplosions\nfrom data.explosion_class import Miniexplosion\nfrom data.objects_class import Bossbullets\nimport random\n\n\ndef draw_text(text, font_u, color, surface, x, y):\n text_object = font_u.render(text, color)\n textrect = text_object[1]\n textrect.topleft = (x, y)\n surface.blit(text_object[0], textrect)\n\n\ndef play_sound(sound_p, volume_h=0.5, wait_t=0):\n pl_sound = pygame.mixer.Sound(sound_p)\n pl_sound.set_volume(volume_h)\n if is_sound:\n pl_sound.play()\n pygame.time.wait(wait_t)\n\n\npygame.init()\nspeed_bckgd = 2\nrunning_game = True\nis_sound = True\nmenu = True\nboss_done = False\ngame_score = 0\nbullets_shot = 0\nline_counter = 0\nenemy_killed = 0\nspeed = 2\nFPS = 100\nwidth = 600\nheight = 800\nplayer_name = ''\ncon = sqlite3.connect('resources/db/leaderboard.db')\nfont = pygame.freetype.Font('resources/sprites/font_main.ttf', 45)\nfont_table = pygame.freetype.Font('resources/sprites/font_main.ttf', 25)\nfont_space = pygame.freetype.Font('resources/sprites/space.ttf', 20)\nfont_rating = pygame.freetype.Font('resources/sprites/font_main.ttf', 150)\npygame.display.set_icon(pygame.image.load('resources/images/test_small_logo_1.bmp'))\npygame.display.set_caption('Death or Dishonour')\nscreen = pygame.display.set_mode((600, 800))\nclock = pygame.time.Clock()\ncur = con.cursor()\n\n\ndef draw_controls():\n pygame.draw.rect(screen, (255, 255, 255), (0, 420, 600, 380), 4)\n pygame.draw.rect(screen, (0, 0, 0, 1), (3, 422, 595, 376))\n\n draw_text('controls:', font, (255, 255, 255), screen, 20, 430)\n\n wasd = pygame.image.load('resources/sprites/controls_1.png')\n wasd = pygame.transform.scale(wasd, (243, 100))\n screen.blit(wasd, (20, 470))\n pygame.draw.rect(screen, (255, 255, 255), (20, 646, 130, 25))\n draw_text('SPACE', font_space, (0, 0, 0), screen, 50, 651)\n draw_text(' - movement', font, (255, 255, 255), screen, 270, 522)\n mouse = pygame.image.load('resources/sprites/controls_2.png')\n mouse = pygame.transform.scale(mouse, (90, 100))\n screen.blit(mouse, (153, 590))\n draw_text(' - shoot', font, (255, 255, 255), screen, 270, 640)\n\n\ndef draw_leaderboard():\n table = []\n result = cur.execute(\"\"\"SELECT * FROM highest_score ORDER BY score DESC LIMIT 7\"\"\")\n for elem in result:\n table.append(elem)\n pygame.draw.rect(screen, (0, 0, 0), (310, 70, 250, 335))\n pygame.draw.rect(screen, (255, 255, 255), (310, 70, 250, 335), 3)\n pygame.draw.line(screen, (255, 255, 255), (310, 124), (560, 124), 3)\n pygame.draw.line(screen, (255, 255, 255), (435, 124), (435, 405), 3)\n charge = 40\n y = 124\n for i in range(1, 8):\n y += charge\n pygame.draw.line(screen, (255, 255, 255), (310, y), (560, y), 3)\n draw_text('leaderboard', font_table, (255, 255, 255), screen, 362, 80)\n x = 350\n y = 140\n for i in table:\n draw_text(str(i[0]), font_table, (255, 255, 255), screen, x, y)\n draw_text(str(i[1]), font_table, (255, 255, 255), screen, x + 100, y)\n y += charge\n\n\ndef main_menu():\n click = False\n pygame.mixer.stop()\n while True:\n mx, my = pygame.mouse.get_pos()\n screen.fill((0, 0, 0))\n # ------------------------------------------ name zone draw\n pygame.draw.rect(screen, (0, 0, 0), (52, 10, 508, 50))\n pygame.draw.rect(screen, (255, 255, 255), (52, 10, 508, 50), 3)\n draw_text('Death or Dishonour', font, (255, 255, 255), screen, 85, 20)\n # ------------------------------------------ play button\n button_play = pygame.image.load('resources/sprites/button.png')\n button_play = pygame.transform.scale(button_play, (222, 105))\n b_play_mask = button_play.get_rect()\n b_play_mask.x = 50\n b_play_mask.y = 70\n screen.blit(button_play, (b_play_mask.x, b_play_mask.y))\n draw_text('play', font, (255, 255, 255), screen, 113, 100)\n # ------------------------------------------ options button\n button_options = pygame.image.load('resources/sprites/button.png')\n button_options = pygame.transform.scale(button_options, (222, 105))\n b_options_mask = button_options.get_rect()\n b_options_mask.x = 50\n b_options_mask.y = 185\n screen.blit(button_options, (b_options_mask.x, b_options_mask.y))\n draw_text('options', font, (255, 255, 255), screen, 78, 215)\n # ------------------------------------------ quit button\n button_exit = pygame.image.load('resources/sprites/button.png')\n button_exit = pygame.transform.scale(button_exit, (222, 105))\n b_exit_mask = button_exit.get_rect()\n b_exit_mask.x = 50\n b_exit_mask.y = 300\n screen.blit(button_exit, (b_exit_mask.x, b_exit_mask.y))\n draw_text('quit', font, (255, 255, 255), screen, 113, 330)\n # ------------------------------------------ draw\n\n draw_controls()\n draw_leaderboard()\n # ------------------------------------------ collide\n if b_play_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (50, 70, 222, 105), 5)\n if click:\n if is_sound:\n play_sound('resources/sounds/click_sound.mp3', 0.2)\n game_screen()\n if b_options_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (50, 185, 222, 105), 5)\n if click:\n if is_sound:\n play_sound('resources/sounds/click_sound.mp3', 0.2)\n options_menu()\n if b_exit_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (50, 300, 222, 105), 5)\n if click:\n if is_sound:\n play_sound('resources/sounds/click_sound.mp3', 0.2)\n pygame.quit()\n sys.exit()\n # ------------------------------------------ events\n click = False\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n pygame.quit()\n sys.exit()\n if event.type == pygame.KEYDOWN:\n if event.key == pygame.K_ESCAPE:\n pygame.quit()\n sys.exit()\n if event.type == pygame.MOUSEBUTTONDOWN:\n if event.button == 1:\n click = True\n # ------------------------------------------ update\n pygame.display.update()\n clock.tick(10)\n\n\ndef options_menu():\n global player_name, line_counter, is_sound\n running = True\n click = False\n numlock = False\n while running:\n mx, my = pygame.mouse.get_pos()\n screen.fill((0, 0, 0))\n # ------------------------------------------ name zone draw\n pygame.draw.rect(screen, (0, 0, 0), (52, 10, 508, 50))\n pygame.draw.rect(screen, (255, 255, 255), (52, 10, 508, 50), 3)\n draw_text('Options', font, (255, 255, 255), screen, 215, 20)\n # ------------------------------------------ button nick\n button_1 = pygame.image.load('resources/sprites/button.png')\n button_1 = pygame.transform.scale(button_1, (222, 105))\n b_1_mask = button_1.get_rect()\n b_1_mask.x = 50\n b_1_mask.y = 70\n screen.blit(button_1, (b_1_mask.x, b_1_mask.y))\n draw_text(player_name, font, (255, 255, 255), screen, 125, 100)\n # ------------------------------------------ button sound\n button_2 = pygame.image.load('resources/sprites/button.png')\n button_2 = pygame.transform.scale(button_2, (222, 105))\n b_2_mask = button_2.get_rect()\n b_2_mask.x = 50\n b_2_mask.y = 185\n screen.blit(button_2, (b_2_mask.x, b_2_mask.y))\n # ------------------------------------------ button back\n button_back = pygame.image.load('resources/sprites/button.png')\n button_back = pygame.transform.scale(button_back, (222, 105))\n b_back_mask = button_back.get_rect()\n b_back_mask.x = 50\n b_back_mask.y = 300\n screen.blit(button_back, (b_back_mask.x, b_back_mask.y))\n draw_text('back', font, (255, 255, 255), screen, 113, 330)\n # ------------------------------------------ draw\n draw_controls()\n draw_text('audio:', font, (255, 255, 255), screen, 60, 195)\n if is_sound:\n draw_text('on', font, (255, 255, 255), screen, 190, 245)\n else:\n draw_text('off', font, (255, 255, 255), screen, 175, 230)\n if line_counter == 0 or player_name == 'NON':\n draw_text('ENTER', font, (255, 0, 0), screen, 280, 90)\n draw_text('NICKNAME', font, (255, 0, 0), screen, 280, 120)\n if numlock:\n draw_text('OFF', font, (255, 0, 0), screen, 500, 90)\n draw_text('NUM', font, (255, 0, 0), screen, 500, 120)\n draw_text('LOCK', font, (255, 0, 0), screen, 500, 150)\n # ------------------------------------------ collide\n if b_2_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (50, 185, 222, 105), 5)\n if click:\n if is_sound:\n play_sound('resources/sounds/click_sound.mp3', 0.2)\n if is_sound:\n is_sound = not is_sound\n pygame.mixer.pause()\n else:\n is_sound = not is_sound\n pygame.mixer.unpause()\n if b_back_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (50, 300, 222, 105), 5)\n if click:\n if is_sound:\n play_sound('resources/sounds/click_sound.mp3', 0.2)\n running = False\n # ------------------------------------------ events\n click = False\n for event in pygame.event.get():\n if event.type == pygame.MOUSEBUTTONDOWN:\n if event.button == 1:\n click = True\n if event.type == pygame.KEYDOWN:\n if event.key == pygame.K_BACKSPACE:\n player_name = player_name[:-1]\n if line_counter != 0:\n line_counter -= 1\n elif player_name == 'NON':\n pass\n elif event.key == pygame.K_SPACE:\n pass\n elif event.key == pygame.K_UP:\n pass\n elif event.key == pygame.K_DOWN:\n pass\n elif event.key == pygame.K_LEFT:\n pass\n elif event.key == pygame.K_RIGHT:\n pass\n elif event.key == pygame.K_RETURN:\n pass\n elif event.key == pygame.K_NUMLOCK:\n numlock = True\n elif event.key == pygame.K_ESCAPE:\n running = False\n elif event.mod == pygame.KMOD_NONE and event.key != pygame.K_TAB:\n if line_counter != 3:\n line_counter += 1\n player_name += str(event.unicode).upper()\n if event.type == pygame.QUIT:\n pygame.quit()\n sys.exit()\n # ------------------------------------------ update\n pygame.display.update()\n clock.tick(10)\n\n\ndef game_screen():\n global game_score, player_name, running_game, enemy_killed, bullets_shot, boss_done\n game_score = 0\n enemy_killed = 0\n bullets_shot = 0\n boss_done = False\n if player_name == '':\n player_name = 'NON'\n track_count = 0\n battle_tracks = ['resources/sounds/music/battle_music_1.mp3', 'resources/sounds/music/battle_music_2.mp3',\n 'resources/sounds/music/battle_music_3.mp3', 'resources/sounds/music/battle_music_4.mp3',\n 'resources/sounds/music/battle_music_5.mp3', 'resources/sounds/music/battle_music_6.mp3']\n ingame_music = pygame.mixer.Sound(battle_tracks[track_count])\n ingame_music.stop()\n ingame_music_sound = 0.1\n if not is_sound:\n ingame_music_sound = 0\n ingame_music.set_volume(ingame_music_sound)\n ingame_music.play()\n bs = False\n running_game = True\n pygame.time.set_timer(pygame.USEREVENT, 1000)\n enemies = pygame.sprite.Group()\n death = False\n p = Player()\n window_holes = pygame.sprite.Group()\n bullets_count = pygame.sprite.Group()\n boss_bullets_count = pygame.sprite.Group()\n booms = pygame.sprite.Group()\n small_booms = pygame.sprite.Group()\n mini_booms = pygame.sprite.Group()\n phase1_score = True\n phase2_score = True\n phase3_score = True\n battle_music = True\n phase4_score = True\n col_check = 1\n boss_death = False\n level_bckgd_pos = -23800\n current_player_sprite = 'stay'\n current_level_background = pygame.image.load('resources/level_pictures/first_level_bckgd.jpg')\n screen.blit(current_level_background, (0, 0))\n wait = 0\n last = pygame.time.get_ticks()\n last_2 = pygame.time.get_ticks()\n boss_cooldown = 1000\n cooldown = 100\n while running_game:\n # ---------------------------------------- управление\n for event in pygame.event.get(): # в этом цикле мы принимаем сообщения, отправленные пользователем\n\n if event.type == pygame.KEYDOWN and event.key == pygame.K_EQUALS:\n ingame_music.stop()\n track_count += 1\n if track_count > 5:\n track_count = 0\n ingame_music = pygame.mixer.Sound(battle_tracks[track_count])\n ingame_music.set_volume(ingame_music_sound)\n ingame_music.play()\n\n if event.type == pygame.KEYDOWN and event.key == pygame.K_KP_PLUS:\n ingame_music_sound += 0.05\n if ingame_music_sound >= 1.5:\n ingame_music_sound = 1.4\n ingame_music.set_volume(ingame_music_sound)\n\n if event.type == pygame.KEYDOWN and event.key == pygame.K_KP_MINUS:\n ingame_music_sound -= 0.05\n if ingame_music_sound < 0:\n ingame_music_sound = 0\n ingame_music.set_volume(ingame_music_sound)\n\n if event.type == pygame.KEYDOWN and (\n event.key == pygame.K_a or event.key == pygame.K_LEFT) and not p.moving_right:\n current_player_sprite = 'left'\n p.moving_right = False\n p.moving_left = True\n\n elif event.type == pygame.KEYDOWN and (\n event.key == pygame.K_d or event.key == pygame.K_RIGHT) and not p.moving_left:\n current_player_sprite = 'right'\n p.moving_left = False\n p.moving_right = True\n\n if event.type == pygame.KEYUP and (event.key == pygame.K_a or event.key == pygame.K_LEFT):\n current_player_sprite = 'stay'\n p.moving_right = False\n p.moving_left = False\n\n if event.type == pygame.KEYUP and (event.key == pygame.K_d or event.key == pygame.K_RIGHT):\n current_player_sprite = 'stay'\n p.moving_right = False\n p.moving_left = False\n\n if event.type == pygame.KEYDOWN and (\n event.key == pygame.K_w or event.key == pygame.K_UP) and not p.moving_down:\n p.moving_down = False\n p.moving_up = True\n\n elif event.type == pygame.KEYDOWN and (\n event.key == pygame.K_s or event.key == pygame.K_DOWN) and not p.moving_up:\n p.moving_up = False\n p.moving_down = True\n\n if event.type == pygame.KEYUP and (event.key == pygame.K_w or event.key == pygame.K_UP):\n current_player_sprite = 'stay'\n p.moving_down = False\n p.moving_up = False\n\n if event.type == pygame.KEYUP and (event.key == pygame.K_s or event.key == pygame.K_DOWN):\n current_player_sprite = 'stay'\n p.moving_down = False\n p.moving_up = False\n\n # просчет выстрела\n if event.type == pygame.MOUSEBUTTONDOWN and event.button == 1 and p.health_count > 0:\n now = pygame.time.get_ticks()\n if now - last >= cooldown:\n last = now\n Bullets(bullets_count).shot((p.x + 21, p.y - 25))\n Bullets(bullets_count).shot((p.x + 76, p.y - 25))\n if is_sound:\n play_sound('resources/sounds/shot_sound.mp3', 0.1)\n Bullets.shooting = True\n bullets_shot += 2\n\n # просчет выстрела, но для пробела\n elif event.type == pygame.KEYDOWN and event.key == pygame.K_SPACE and p.health_count > 0:\n now = pygame.time.get_ticks()\n if now - last >= cooldown:\n last = now\n Bullets(bullets_count).shot((p.x + 21, p.y - 25))\n Bullets(bullets_count).shot((p.x + 76, p.y - 25))\n if is_sound:\n play_sound('resources/sounds/shot_sound.mp3', 0.1)\n Bullets.shooting = True\n bullets_shot += 2\n\n # спавн врагов\n if event.type == pygame.USEREVENT and level_bckgd_pos >= -8500 and not bs:\n bs = True\n b = Boss()\n if event.type == pygame.USEREVENT and level_bckgd_pos < -8500:\n Enemy(enemies)\n if event.type == pygame.USEREVENT and death and pygame.time.get_ticks()\\\n - wait > 2000 or level_bckgd_pos > -801:\n ingame_music.stop()\n death_screen()\n while True:\n if len(str(game_score)) < 6:\n game_score = '0' + str(game_score)\n else:\n break\n var = \"INSERT INTO highest_score VALUES ('{}', '{}')\".format(player_name, game_score)\n cur.execute(var)\n con.commit()\n # если пользователь закроет программу, игра завершится\n if event.type == pygame.QUIT:\n pygame.quit()\n sys.exit()\n # выход в меню\n if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:\n pause_screen()\n if not running_game:\n ingame_music.stop()\n while True:\n if len(str(game_score)) < 6:\n game_score = '0' + str(game_score)\n else:\n break\n var = \"INSERT INTO highest_score VALUES ('{}', '{}')\".format(player_name, game_score)\n cur.execute(var)\n con.commit()\n\n # передвижение заднего фона\n level_bckgd_pos += speed_bckgd\n if level_bckgd_pos >= 0:\n screen.fill((0, 0, 0))\n screen.blit(current_level_background, (0, level_bckgd_pos))\n if level_bckgd_pos > -805:\n death = True\n # передвижение игрока\n if p.health_count > 0:\n\n # проверка коллизии врага, игрока и пули\n for i in enemies:\n collision = pygame.sprite.collide_rect(p, i)\n if collision:\n Explosion(booms).boom((i.rect.x, i.rect.y))\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3', 0.1)\n if i.health_count - 2 <= 0:\n game_score += 10\n i.kill()\n Explosion(booms).boom((i.rect.x, i.rect.y))\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3', 0.1)\n enemy_killed += 1\n else:\n i.health_count -= 2\n if is_sound:\n play_sound('resources/sounds/collision_sound.mp3', 0.03)\n p.health_count -= 1\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3', 0.05)\n if p.health_count > 0:\n Damage(window_holes).taking_damage((random.randint(50, 550), random.randint(50, 750)))\n if is_sound:\n play_sound('resources/sounds/window_crashed.mp3', 1)\n play_sound('resources/sounds/explosion_stun.mp3', 0.02)\n for j in bullets_count:\n collision = pygame.sprite.collide_rect(j, i)\n if collision:\n if i.health_count - 1 <= 0:\n game_score += 5\n i.kill()\n Explosion(booms).boom((i.rect.x, i.rect.y))\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3', 0.1)\n enemy_killed += 1\n else:\n i.health_count -= 1\n Miniexplosion(mini_booms).boom((j.rect.x, j.rect.y))\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3', 0.1)\n if is_sound:\n play_sound('resources/sounds/collision_sound.mp3', 0.03)\n j.kill()\n\n if bs and not boss_death:\n collision = pygame.sprite.collide_rect(b, p)\n if collision and b.y > 0:\n b.health_count -= 0.3\n if is_sound:\n play_sound('resources/sounds/collision_sound.mp3', 0.03)\n p.health_count -= 0.2\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3', 0.05)\n if b.body == b.stay1 or b.body == b.stay2:\n b.body = b.stay2\n if b.body == b.stay3 or b.body == b.stay4:\n b.body = b.stay4\n if b.body == b.stay5 or b.body == b.stay6:\n b.body = b.stay6\n col_check += 1\n if p.health_count > 0:\n Damage(window_holes).taking_damage((random.randint(50, 550), random.randint(50, 750)))\n if is_sound:\n play_sound('resources/sounds/window_crashed.mp3', 0.1)\n play_sound('resources/sounds/explosion_stun.mp3', 0.02)\n for j in bullets_count:\n collision = pygame.sprite.collide_rect(b, j)\n if collision and b.y > 0:\n if b.body == b.stay1 or b.body == b.stay2:\n b.body = b.stay2\n if b.body == b.stay3 or b.body == b.stay4:\n b.body = b.stay4\n if b.body == b.stay5 or b.body == b.stay6:\n b.body = b.stay6\n col_check += 1\n b.health_count -= 0.2\n Miniexplosion(mini_booms).boom((j.rect.x, j.rect.y))\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3', 0.1)\n if is_sound:\n play_sound('resources/sounds/collision_sound.mp3', 0.03)\n j.kill()\n for h in boss_bullets_count:\n collision = pygame.sprite.collide_rect(p, h)\n if collision:\n p.health_count -= 1\n Miniexplosion(mini_booms).boom((h.rect.x, h.rect.y))\n if p.health_count > 0:\n Damage(window_holes).taking_damage((random.randint(50, 550), random.randint(50, 750)))\n if is_sound:\n play_sound('resources/sounds/window_crashed.mp3', 0.1)\n play_sound('resources/sounds/explosion_stun.mp3', 0.01)\n if is_sound:\n play_sound('resources/sounds/collision_sound.mp3', 0.03)\n h.kill()\n\n p.update(FPS)\n # смена текстур игрока\n if current_player_sprite == 'left':\n sprite = p.anim_left()\n screen.blit(sprite, (p.x, p.y))\n p.left_1 = not p.left_1\n elif current_player_sprite == 'right':\n sprite = p.anim_right()\n screen.blit(sprite, (p.x, p.y))\n p.right_1 = not p.right_1\n elif current_player_sprite == 'stay':\n sprite = p.anim_stay()\n screen.blit(sprite, (p.x, p.y))\n p.stay_1 = not p.stay_1\n\n if bs:\n if battle_music:\n ingame_music.stop()\n ingame_music = pygame.mixer.Sound('resources/sounds/music/wagner_main_theme.mp3')\n ingame_music.set_volume(ingame_music_sound)\n ingame_music.play()\n battle_music = False\n b.update()\n\n if b.body == b.stay3 and phase1_score:\n game_score += 100\n phase1_score = False\n if b.body == b.stay5 and phase2_score:\n game_score += 100\n phase2_score = False\n if b.body == b.stay7 and phase3_score:\n game_score += 200\n phase3_score = False\n\n now = pygame.time.get_ticks()\n if now - last_2 >= boss_cooldown and b.y > 0 and b.body != b.stay7:\n last_2 = now\n play_sound('resources/sounds/boss_shot.mp3', 0.05)\n Bossbullets(boss_bullets_count).shot((b.x + 170, b.y + 155))\n\n if col_check % 40 == 0:\n b.change_sprite()\n else:\n col_check += 1\n if b.health_count > 0:\n screen.blit(b.body, (b.x, b.y))\n elif b.health_count <= 0 and phase4_score:\n boss_done = True\n phase4_score = False\n game_score += 350\n if is_sound:\n play_sound('resources/sounds/boss_defeated.mp3', 0.2)\n\n Explosion(booms).boom((b.rect.x + 75, b.rect.y + 25))\n Explosion(booms).boom((b.rect.x, b.rect.y))\n Explosion(booms).boom((b.rect.x + 200, b.rect.y + 34))\n Explosion(booms).boom((b.rect.x + 250, b.rect.y + 25))\n Explosion(booms).boom((b.rect.x + 150, b.rect.y + 56))\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3', 0.1)\n boss_death = True\n\n else:\n if p.minimize == 0:\n if is_sound:\n ingame_music.stop()\n play_sound('resources/sounds/plane_crash.mp3', 0.05)\n p.minimize += 1\n if not death:\n if p.minimize <= 320:\n p.death()\n screen.blit(p.death_sp, (p.x, p.y))\n else:\n death = True\n wait = pygame.time.get_ticks()\n Smallexplosions(small_booms).boom((p.rect.x + 3, p.rect.y + 25))\n Smallexplosions(small_booms).boom((p.rect.x, p.rect.y))\n Smallexplosions(small_booms).boom((p.rect.x - 22, p.rect.y + 7))\n if is_sound:\n play_sound('resources/sounds/explosion_sound.mp3', 0.1)\n p.kill()\n if bs and b.health_count > 0:\n b.speed += 0.02\n b.win = True\n screen.blit(b.body, (b.x, b.y))\n b.update()\n # передвижение врагов\n window_holes.update()\n window_holes.draw(screen)\n\n enemies.update(FPS)\n # отрисовка врагов\n enemies.draw(screen)\n # передвижение пули\n bullets_count.update()\n bullets_count.draw(screen)\n\n boss_bullets_count.update()\n boss_bullets_count.draw(screen)\n\n small_booms.update()\n small_booms.draw(screen)\n\n mini_booms.update()\n mini_booms.draw(screen)\n\n # ник игрока\n draw_text('Player: {}'.format(player_name), font, (255, 255, 255), screen, 20, 20)\n # cчет игрока\n if len(str(game_score)) < 2:\n draw_text('00000' + str(game_score), font, (255, 255, 255), screen, 430, 20)\n elif len(str(game_score)) < 3:\n draw_text('0000' + str(game_score), font, (255, 255, 255), screen, 430, 20)\n elif len(str(game_score)) < 4:\n draw_text('000' + str(game_score), font, (255, 255, 255), screen, 430, 20)\n elif len(str(game_score)) < 5:\n draw_text('00' + str(game_score), font, (255, 255, 255), screen, 430, 20)\n elif len(str(game_score)) < 6:\n draw_text('0' + str(game_score), font, (255, 255, 255), screen, 430, 20)\n elif len(str(game_score)) >= 6:\n draw_text(\"Max score\", font, (255, 255, 255), screen, 510, 20)\n\n # взрыв на месте убитого врага\n booms.update()\n booms.draw(screen)\n pygame.display.flip()\n clock.tick(FPS)\n\n\ndef death_screen():\n global running_game, game_score\n running = True\n click = False\n draw_counter = 0\n color_counter = 0\n pygame.time.set_timer(pygame.USEREVENT, 1000)\n rating_kills = enemy_killed//10\n if bullets_shot < 800:\n rating_shots = 1\n else:\n rating_shots = 0\n rating = rating_kills + rating_shots\n\n if boss_done:\n death_music = pygame.mixer.Sound('resources/sounds/music/victory_theme.mp3')\n death_music.stop()\n death_music_sound = 0.1\n if not is_sound:\n death_music_sound = 0\n death_music.set_volume(death_music_sound)\n death_music.play()\n rating += 2\n else:\n death_music = pygame.mixer.Sound('resources/sounds/music/loose_theme.mp3')\n death_music.stop()\n death_music_sound = 0.1\n if not is_sound:\n death_music_sound = 0\n death_music.set_volume(death_music_sound)\n death_music.play()\n while True:\n if len(str(game_score)) < 6:\n game_score = '0' + str(game_score)\n else:\n break\n while running:\n mx, my = pygame.mouse.get_pos()\n screen.fill((0, 0, 0))\n # ------------------------------------------ name zone draw\n pygame.draw.rect(screen, (0, 0, 0), (50, 10, 500, 50))\n pygame.draw.rect(screen, (255, 255, 255), (50, 10, 500, 50), 3)\n draw_text('End of your way', font, (255, 255, 255), screen, 120, 15)\n # ------------------------------------------ button menu\n button_menu = pygame.image.load('resources/sprites/button.png')\n button_menu = pygame.transform.scale(button_menu, (200, 70))\n b_menu_mask = button_menu.get_rect()\n b_menu_mask.x = 195\n b_menu_mask.y = 700\n screen.blit(button_menu, (b_menu_mask.x, b_menu_mask.y))\n draw_text('menu', font, (255, 255, 255), screen, 245, 730)\n # ------------------------------------------ draw\n if draw_counter >= 1:\n draw_text('Player: {}'.format(player_name), font, (255, 255, 255), screen, 50, 150)\n if draw_counter >= 2:\n draw_text('Score: {}'.format(game_score), font, (255, 255, 255), screen, 50, 230)\n if draw_counter >= 3:\n draw_text('Enemies killed: {}'.format(enemy_killed), font, (255, 255, 255), screen, 50, 310)\n if draw_counter >= 4:\n draw_text('Bullets fired: {}'.format(bullets_shot), font, (255, 255, 255), screen, 50, 390)\n if draw_counter >= 5:\n draw_text('Rating:', font, (255, 255, 255), screen, 50, 470)\n if draw_counter >= 6:\n if rating <= 6:\n draw_text('F', font_rating, (100, 100, 100), screen, 300, 470)\n elif rating == 7:\n draw_text('D', font_rating, (29, 173, 23), screen, 300, 470)\n elif rating == 8:\n draw_text('C', font_rating, (20, 20, 255), screen, 300, 470)\n elif rating == 9:\n draw_text('B', font_rating, (200, 0, 255), screen, 300, 470)\n elif rating == 10:\n draw_text('A', font_rating, (255, 200, 0), screen, 300, 470)\n elif rating == 11:\n draw_text('S', font_rating, (255, 100, 0), screen, 300, 470)\n elif rating <= 13:\n draw_text('SS', font_rating, (255, 0, 0), screen, 300, 470)\n else:\n if color_counter == 0:\n draw_text('SSS', font_rating, (255, 0, 0), screen, 300, 470)\n elif color_counter == 1:\n draw_text('SSS', font_rating, (0, 255, 0), screen, 300, 470)\n else:\n draw_text('SSS', font_rating, (0, 0, 255), screen, 300, 470)\n # ------------------------------------------ collide\n if b_menu_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (195, 700, 200, 70), 4)\n if click:\n if is_sound:\n if is_sound:\n play_sound('resources/sounds/click_sound.mp3', 0.2)\n else:\n pass\n running = False\n running_game = False\n # ------------------------------------------ events\n click = False\n for event in pygame.event.get():\n if event.type == pygame.USEREVENT:\n draw_counter += 1\n color_counter += 1\n if color_counter == 3:\n color_counter = 0\n if event.type == pygame.QUIT:\n pygame.quit()\n sys.exit()\n if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:\n running = False\n running_game = False\n if event.type == pygame.MOUSEBUTTONDOWN:\n if event.button == 1:\n click = True\n # ------------------------------------------ update\n pygame.display.update()\n clock.tick(10)\n death_music.stop()\n\n\ndef pause_screen():\n global running_game\n running = True\n click = False\n while running:\n screen.fill((0, 0, 0))\n mx, my = pygame.mouse.get_pos()\n\n pygame.draw.rect(screen, (0, 0, 0), (175, 160, 240, 340))\n pygame.draw.rect(screen, (255, 255, 255), (175, 160, 240, 340), 3)\n # ------------------------------------------ name zone draw\n pygame.draw.rect(screen, (0, 0, 0), (185, 180, 220, 80))\n pygame.draw.rect(screen, (255, 255, 255), (185, 180, 220, 80), 3)\n draw_text('Pause', font, (255, 255, 255), screen, 235, 205)\n # ------------------------------------------ button menu\n button_menu = pygame.image.load('resources/sprites/button.png')\n button_menu = pygame.transform.scale(button_menu, (200, 70))\n b_menu_mask = button_menu.get_rect()\n b_menu_mask.x = 195\n b_menu_mask.y = 410\n screen.blit(button_menu, (b_menu_mask.x, b_menu_mask.y))\n draw_text('menu', font, (255, 255, 255), screen, 245, 440)\n # ------------------------------------------ button resume\n button_resume = pygame.image.load('resources/sprites/button.png')\n button_resume = pygame.transform.scale(button_resume, (200, 70))\n b_resume_mask = button_resume.get_rect()\n b_resume_mask.x = 195\n b_resume_mask.y = 300\n screen.blit(button_resume, (b_resume_mask.x, b_resume_mask.y))\n draw_text('resume', font, (255, 255, 255), screen, 225, 330)\n # ------------------------------------------ collide\n if b_menu_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (195, 410, 200, 70), 4)\n if click:\n running = False\n running_game = False\n if b_resume_mask.collidepoint((mx, my)):\n pygame.draw.rect(screen, (255, 0, 100), (195, 300, 200, 70), 4)\n if click:\n running = False\n # ------------------------------------------ events\n click = False\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n pygame.quit()\n sys.exit()\n if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:\n running = False\n if event.type == pygame.MOUSEBUTTONDOWN:\n if event.button == 1:\n click = True\n # ------------------------------------------ update\n pygame.display.update()\n clock.tick(10)\n\n\nif __name__ == '__main__':\n main_menu()\n\n pygame.quit()\n",
"step-ids": [
6,
10,
11,
12,
13
]
}
|
[
6,
10,
11,
12,
13
] |
<|reserved_special_token_0|>
def main():
website = input('Enter the website you want to download file from: ')
div = input('Enter the div/span (be as specific as you can): ')
classTag = input('Enter the class/id tag you want to extract link from: ')
className = input('Enter the class/id name: ')
specific1 = input('Enter specific tag a, li, : ')
specific2 = input('Enter specific tag inside specific1 : ')
contents = downloader(website, div, classTag, className, specific1,
specific2)
print(contents)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def downloader(url, div, classTag, className, specificData1, specificData2):
website = urlopen(url)
data = bs(website, 'lxml')
contents = data.findAll(div, {'+' + str(classTag) + ':' + str(className
) + '}'})
contents_list = []
names_list = []
for file in contents:
contents_list.append(file.find(specificData1['"' + specificData2 +
'"']))
names_list.append(file.get_text())
print(contents_list)
return contents_list
def main():
website = input('Enter the website you want to download file from: ')
div = input('Enter the div/span (be as specific as you can): ')
classTag = input('Enter the class/id tag you want to extract link from: ')
className = input('Enter the class/id name: ')
specific1 = input('Enter specific tag a, li, : ')
specific2 = input('Enter specific tag inside specific1 : ')
contents = downloader(website, div, classTag, className, specific1,
specific2)
print(contents)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def downloader(url, div, classTag, className, specificData1, specificData2):
website = urlopen(url)
data = bs(website, 'lxml')
contents = data.findAll(div, {'+' + str(classTag) + ':' + str(className
) + '}'})
contents_list = []
names_list = []
for file in contents:
contents_list.append(file.find(specificData1['"' + specificData2 +
'"']))
names_list.append(file.get_text())
print(contents_list)
return contents_list
def main():
website = input('Enter the website you want to download file from: ')
div = input('Enter the div/span (be as specific as you can): ')
classTag = input('Enter the class/id tag you want to extract link from: ')
className = input('Enter the class/id name: ')
specific1 = input('Enter specific tag a, li, : ')
specific2 = input('Enter specific tag inside specific1 : ')
contents = downloader(website, div, classTag, className, specific1,
specific2)
print(contents)
main()
<|reserved_special_token_1|>
from urllib.request import urlopen
from bs4 import BeautifulSoup as bs
import lxml
import urllib.request
import webbrowser
def downloader(url, div, classTag, className, specificData1, specificData2):
website = urlopen(url)
data = bs(website, 'lxml')
contents = data.findAll(div, {'+' + str(classTag) + ':' + str(className
) + '}'})
contents_list = []
names_list = []
for file in contents:
contents_list.append(file.find(specificData1['"' + specificData2 +
'"']))
names_list.append(file.get_text())
print(contents_list)
return contents_list
def main():
website = input('Enter the website you want to download file from: ')
div = input('Enter the div/span (be as specific as you can): ')
classTag = input('Enter the class/id tag you want to extract link from: ')
className = input('Enter the class/id name: ')
specific1 = input('Enter specific tag a, li, : ')
specific2 = input('Enter specific tag inside specific1 : ')
contents = downloader(website, div, classTag, className, specific1,
specific2)
print(contents)
main()
<|reserved_special_token_1|>
# website = urlopen("https://webservices.ulm.edu/forms/forms-list")
# data = bs(website, "lxml")
# forms = data.findAll("span", {"class": "file"})
# forms_list = []
# names = []
# for f in forms:
# forms_list.append(f.find("a")["href"])
# names.append(f.get_text())
# # print(forms_list)
# for f in forms_list:
# webbrowser.open(f)
from urllib.request import urlopen
from bs4 import BeautifulSoup as bs
import lxml
import urllib.request
import webbrowser
# download function
def downloader(url, div, classTag, className, specificData1, specificData2):
website = urlopen(url)
data = bs(website, "lxml")
contents = data.findAll(div, {"+" + str(classTag) +":" + str(className) + "}"})
contents_list = []
names_list = []
for file in contents:
contents_list.append(file.find(specificData1['"' + specificData2 + '"']))
names_list.append(file.get_text())
print(contents_list)
return contents_list
def main():
website = input("Enter the website you want to download file from: ")
div = input("Enter the div/span (be as specific as you can): ")
classTag = input("Enter the class/id tag you want to extract link from: ")
className = input("Enter the class/id name: ")
specific1 = input("Enter specific tag a, li, : ")
specific2 = input("Enter specific tag inside specific1 : ")
# download the content
contents = downloader(website, div, classTag, className, specific1, specific2)
print(contents)
main()
|
flexible
|
{
"blob_id": "a61f351391ca1b18359323fd9e49f1efa4c7513c",
"index": 4007,
"step-1": "<mask token>\n\n\ndef main():\n website = input('Enter the website you want to download file from: ')\n div = input('Enter the div/span (be as specific as you can): ')\n classTag = input('Enter the class/id tag you want to extract link from: ')\n className = input('Enter the class/id name: ')\n specific1 = input('Enter specific tag a, li, : ')\n specific2 = input('Enter specific tag inside specific1 : ')\n contents = downloader(website, div, classTag, className, specific1,\n specific2)\n print(contents)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef downloader(url, div, classTag, className, specificData1, specificData2):\n website = urlopen(url)\n data = bs(website, 'lxml')\n contents = data.findAll(div, {'+' + str(classTag) + ':' + str(className\n ) + '}'})\n contents_list = []\n names_list = []\n for file in contents:\n contents_list.append(file.find(specificData1['\"' + specificData2 +\n '\"']))\n names_list.append(file.get_text())\n print(contents_list)\n return contents_list\n\n\ndef main():\n website = input('Enter the website you want to download file from: ')\n div = input('Enter the div/span (be as specific as you can): ')\n classTag = input('Enter the class/id tag you want to extract link from: ')\n className = input('Enter the class/id name: ')\n specific1 = input('Enter specific tag a, li, : ')\n specific2 = input('Enter specific tag inside specific1 : ')\n contents = downloader(website, div, classTag, className, specific1,\n specific2)\n print(contents)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef downloader(url, div, classTag, className, specificData1, specificData2):\n website = urlopen(url)\n data = bs(website, 'lxml')\n contents = data.findAll(div, {'+' + str(classTag) + ':' + str(className\n ) + '}'})\n contents_list = []\n names_list = []\n for file in contents:\n contents_list.append(file.find(specificData1['\"' + specificData2 +\n '\"']))\n names_list.append(file.get_text())\n print(contents_list)\n return contents_list\n\n\ndef main():\n website = input('Enter the website you want to download file from: ')\n div = input('Enter the div/span (be as specific as you can): ')\n classTag = input('Enter the class/id tag you want to extract link from: ')\n className = input('Enter the class/id name: ')\n specific1 = input('Enter specific tag a, li, : ')\n specific2 = input('Enter specific tag inside specific1 : ')\n contents = downloader(website, div, classTag, className, specific1,\n specific2)\n print(contents)\n\n\nmain()\n",
"step-4": "from urllib.request import urlopen\nfrom bs4 import BeautifulSoup as bs\nimport lxml\nimport urllib.request\nimport webbrowser\n\n\ndef downloader(url, div, classTag, className, specificData1, specificData2):\n website = urlopen(url)\n data = bs(website, 'lxml')\n contents = data.findAll(div, {'+' + str(classTag) + ':' + str(className\n ) + '}'})\n contents_list = []\n names_list = []\n for file in contents:\n contents_list.append(file.find(specificData1['\"' + specificData2 +\n '\"']))\n names_list.append(file.get_text())\n print(contents_list)\n return contents_list\n\n\ndef main():\n website = input('Enter the website you want to download file from: ')\n div = input('Enter the div/span (be as specific as you can): ')\n classTag = input('Enter the class/id tag you want to extract link from: ')\n className = input('Enter the class/id name: ')\n specific1 = input('Enter specific tag a, li, : ')\n specific2 = input('Enter specific tag inside specific1 : ')\n contents = downloader(website, div, classTag, className, specific1,\n specific2)\n print(contents)\n\n\nmain()\n",
"step-5": "\n\n# website = urlopen(\"https://webservices.ulm.edu/forms/forms-list\")\n# data = bs(website, \"lxml\")\n\n# forms = data.findAll(\"span\", {\"class\": \"file\"})\n\n# forms_list = []\n# names = []\n# for f in forms:\n# forms_list.append(f.find(\"a\")[\"href\"])\n# names.append(f.get_text())\n\n# # print(forms_list)\n\n# for f in forms_list:\n# webbrowser.open(f)\n\n\nfrom urllib.request import urlopen\nfrom bs4 import BeautifulSoup as bs\nimport lxml\nimport urllib.request\nimport webbrowser\n\n# download function\ndef downloader(url, div, classTag, className, specificData1, specificData2):\n website = urlopen(url)\n data = bs(website, \"lxml\")\n\n contents = data.findAll(div, {\"+\" + str(classTag) +\":\" + str(className) + \"}\"})\n\n contents_list = []\n names_list = []\n\n for file in contents:\n contents_list.append(file.find(specificData1['\"' + specificData2 + '\"']))\n names_list.append(file.get_text())\n print(contents_list)\n return contents_list\n \ndef main():\n website = input(\"Enter the website you want to download file from: \")\n div = input(\"Enter the div/span (be as specific as you can): \")\n classTag = input(\"Enter the class/id tag you want to extract link from: \")\n className = input(\"Enter the class/id name: \")\n specific1 = input(\"Enter specific tag a, li, : \")\n specific2 = input(\"Enter specific tag inside specific1 : \")\n\n # download the content\n contents = downloader(website, div, classTag, className, specific1, specific2)\n print(contents)\n\nmain()\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
# -*- coding: utf-8 -*-
"""
Created on Thu Nov 8 17:14:14 2018
@author: Winry
"""
import pandas as pd
# 显示所有的列
pd.set_option('display.max_columns', None)
# 读取数据
file_name = "data_11_8.csv"
file_open = open(file_name)
df = pd.read_csv(file_open)
file_open.close()
Newtaxiout_time = df['Newtaxiout_time']
time = df['time']
file_name2 = "df_append.csv"
file_open2 = open(file_name2)
df2 = pd.read_csv(file_open2)
# append1
append1_res = []
for i in range(len(df)):
count = []
count = df2["Newappend1"][(df2["Newappend1"] > Newtaxiout_time[i]) & (df2["Newappend1"] < time[i]*60+Newtaxiout_time[i])]
append1_res.append(len(count))
# append2
append2_res = []
for i in range(len(df)):
count = []
count = df2["Newappend2"][(df2["Newappend2"] > Newtaxiout_time[i]) & (df2["Newappend2"] < time[i]*60+Newtaxiout_time[i])]
append2_res.append(len(count))
df['append1_res'] = append1_res
df['append2_res'] = append2_res
df.to_csv('df_11_9.csv',index=False)
|
normal
|
{
"blob_id": "f5a474cdc8aa22322b252b980c0334a9db21bd5c",
"index": 9300,
"step-1": "<mask token>\n",
"step-2": "<mask token>\npd.set_option('display.max_columns', None)\n<mask token>\nfile_open.close()\n<mask token>\nfor i in range(len(df)):\n count = []\n count = df2['Newappend1'][(df2['Newappend1'] > Newtaxiout_time[i]) & (\n df2['Newappend1'] < time[i] * 60 + Newtaxiout_time[i])]\n append1_res.append(len(count))\n<mask token>\nfor i in range(len(df)):\n count = []\n count = df2['Newappend2'][(df2['Newappend2'] > Newtaxiout_time[i]) & (\n df2['Newappend2'] < time[i] * 60 + Newtaxiout_time[i])]\n append2_res.append(len(count))\n<mask token>\ndf.to_csv('df_11_9.csv', index=False)\n",
"step-3": "<mask token>\npd.set_option('display.max_columns', None)\nfile_name = 'data_11_8.csv'\nfile_open = open(file_name)\ndf = pd.read_csv(file_open)\nfile_open.close()\nNewtaxiout_time = df['Newtaxiout_time']\ntime = df['time']\nfile_name2 = 'df_append.csv'\nfile_open2 = open(file_name2)\ndf2 = pd.read_csv(file_open2)\nappend1_res = []\nfor i in range(len(df)):\n count = []\n count = df2['Newappend1'][(df2['Newappend1'] > Newtaxiout_time[i]) & (\n df2['Newappend1'] < time[i] * 60 + Newtaxiout_time[i])]\n append1_res.append(len(count))\nappend2_res = []\nfor i in range(len(df)):\n count = []\n count = df2['Newappend2'][(df2['Newappend2'] > Newtaxiout_time[i]) & (\n df2['Newappend2'] < time[i] * 60 + Newtaxiout_time[i])]\n append2_res.append(len(count))\ndf['append1_res'] = append1_res\ndf['append2_res'] = append2_res\ndf.to_csv('df_11_9.csv', index=False)\n",
"step-4": "<mask token>\nimport pandas as pd\npd.set_option('display.max_columns', None)\nfile_name = 'data_11_8.csv'\nfile_open = open(file_name)\ndf = pd.read_csv(file_open)\nfile_open.close()\nNewtaxiout_time = df['Newtaxiout_time']\ntime = df['time']\nfile_name2 = 'df_append.csv'\nfile_open2 = open(file_name2)\ndf2 = pd.read_csv(file_open2)\nappend1_res = []\nfor i in range(len(df)):\n count = []\n count = df2['Newappend1'][(df2['Newappend1'] > Newtaxiout_time[i]) & (\n df2['Newappend1'] < time[i] * 60 + Newtaxiout_time[i])]\n append1_res.append(len(count))\nappend2_res = []\nfor i in range(len(df)):\n count = []\n count = df2['Newappend2'][(df2['Newappend2'] > Newtaxiout_time[i]) & (\n df2['Newappend2'] < time[i] * 60 + Newtaxiout_time[i])]\n append2_res.append(len(count))\ndf['append1_res'] = append1_res\ndf['append2_res'] = append2_res\ndf.to_csv('df_11_9.csv', index=False)\n",
"step-5": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Thu Nov 8 17:14:14 2018\n\n@author: Winry\n\"\"\"\n\nimport pandas as pd\n# 显示所有的列\npd.set_option('display.max_columns', None)\n\n# 读取数据\nfile_name = \"data_11_8.csv\"\nfile_open = open(file_name)\ndf = pd.read_csv(file_open)\nfile_open.close()\n\nNewtaxiout_time = df['Newtaxiout_time']\ntime = df['time']\n\nfile_name2 = \"df_append.csv\"\nfile_open2 = open(file_name2)\ndf2 = pd.read_csv(file_open2)\n\n# append1\n\nappend1_res = []\nfor i in range(len(df)):\n count = []\n count = df2[\"Newappend1\"][(df2[\"Newappend1\"] > Newtaxiout_time[i]) & (df2[\"Newappend1\"] < time[i]*60+Newtaxiout_time[i])]\n append1_res.append(len(count))\n\n\n# append2\nappend2_res = []\nfor i in range(len(df)):\n count = []\n count = df2[\"Newappend2\"][(df2[\"Newappend2\"] > Newtaxiout_time[i]) & (df2[\"Newappend2\"] < time[i]*60+Newtaxiout_time[i])]\n append2_res.append(len(count))\n \ndf['append1_res'] = append1_res\ndf['append2_res'] = append2_res\ndf.to_csv('df_11_9.csv',index=False)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# coding: utf-8
num = int(input())
str = input().split()
table = [int(i) for i in str]
list.sort(table)
print(table[num-1] - table[0])
|
normal
|
{
"blob_id": "d853964d424e628d6331b27123ad045f8d945dc0",
"index": 4026,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nlist.sort(table)\nprint(table[num - 1] - table[0])\n",
"step-3": "num = int(input())\nstr = input().split()\ntable = [int(i) for i in str]\nlist.sort(table)\nprint(table[num - 1] - table[0])\n",
"step-4": "# coding: utf-8\n\nnum = int(input())\nstr = input().split()\ntable = [int(i) for i in str]\nlist.sort(table)\nprint(table[num-1] - table[0])",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def matriz_laplaciana(N, t=np.single):
e = np.eye(N) - np.eye(N, N, 1)
return t(e + e.T)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def matriz_laplaciana(N, t=np.single):
e = np.eye(N) - np.eye(N, N, 1)
return t(e + e.T)
<|reserved_special_token_0|>
for corrida in range(corridas):
tiempo = []
memoria = []
name = f'single{corrida}.txt'
fid = open(name, 'w')
for i in Ns:
print(f'i = {i}')
A = matriz_laplaciana(i)
t1 = perf_counter()
invertir(A)
t2 = perf_counter()
dt = t2 - t1
size = 3 * i ** 2 * 32
tiempo.append(dt)
memoria.append(size)
fid.write(f'{i} {dt} {size}\n')
print(f'Tiempo transcurrido = {dt} s')
print(f'Mmoria usada = {size} bytes')
fid.flush()
fid.close()
<|reserved_special_token_0|>
for n in range(10):
dimension = []
time = []
memory = []
with open(f'single{n}.txt', 'r') as f:
lineas = [linea.split() for linea in f]
for i in lineas:
dimension.append(int(i[0]))
time.append(float(i[1]))
memory.append(int(i[2]))
dim.append(dimension)
tim.append(time)
mem.append(memory)
plt.subplot(2, 1, 1)
plt.plot(dim[0], tim[0], '-o')
plt.plot(dim[0], tim[1], '-o')
plt.plot(dim[0], tim[2], '-o')
plt.plot(dim[0], tim[3], '-o')
plt.plot(dim[0], tim[4], '-o')
plt.plot(dim[0], tim[5], '-o')
plt.plot(dim[0], tim[6], '-o')
plt.plot(dim[0], tim[7], '-o')
plt.plot(dim[0], tim[8], '-o')
plt.plot(dim[0], tim[9], '-o')
plt.yscale('log')
plt.xscale('log')
<|reserved_special_token_0|>
plt.yticks(yticks, ytext)
plt.xticks(xticks, xtext)
plt.title('Rendimiento caso1_single')
plt.ylabel('Tiempo transcurrido (s)')
plt.grid(True)
plt.subplot(2, 1, 2)
plt.plot(Ns, memoria, '-ob')
plt.yscale('log')
plt.xscale('log')
<|reserved_special_token_0|>
plt.axhline(y=4000000000, linestyle='--', color='black')
plt.yticks(yticks, ytext)
plt.xticks(xticks, xtext, rotation=45)
plt.xlabel('Tamaño matriz N')
plt.ylabel('Uso memoria (bytes)')
plt.grid(True)
plt.savefig('Rendimiento caso1_single.png')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def matriz_laplaciana(N, t=np.single):
e = np.eye(N) - np.eye(N, N, 1)
return t(e + e.T)
Ns = [2, 5, 10, 12, 15, 20, 30, 40, 45, 50, 55, 60, 75, 100, 125, 160, 200,
250, 350, 500, 600, 800, 1000, 2000, 5000, 10000]
corridas = 10
for corrida in range(corridas):
tiempo = []
memoria = []
name = f'single{corrida}.txt'
fid = open(name, 'w')
for i in Ns:
print(f'i = {i}')
A = matriz_laplaciana(i)
t1 = perf_counter()
invertir(A)
t2 = perf_counter()
dt = t2 - t1
size = 3 * i ** 2 * 32
tiempo.append(dt)
memoria.append(size)
fid.write(f'{i} {dt} {size}\n')
print(f'Tiempo transcurrido = {dt} s')
print(f'Mmoria usada = {size} bytes')
fid.flush()
fid.close()
dim = []
tim = []
mem = []
for n in range(10):
dimension = []
time = []
memory = []
with open(f'single{n}.txt', 'r') as f:
lineas = [linea.split() for linea in f]
for i in lineas:
dimension.append(int(i[0]))
time.append(float(i[1]))
memory.append(int(i[2]))
dim.append(dimension)
tim.append(time)
mem.append(memory)
plt.subplot(2, 1, 1)
plt.plot(dim[0], tim[0], '-o')
plt.plot(dim[0], tim[1], '-o')
plt.plot(dim[0], tim[2], '-o')
plt.plot(dim[0], tim[3], '-o')
plt.plot(dim[0], tim[4], '-o')
plt.plot(dim[0], tim[5], '-o')
plt.plot(dim[0], tim[6], '-o')
plt.plot(dim[0], tim[7], '-o')
plt.plot(dim[0], tim[8], '-o')
plt.plot(dim[0], tim[9], '-o')
plt.yscale('log')
plt.xscale('log')
xticks = [10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000]
xtext = ['', '', '', '', '', '', '', '', '', '', '']
yticks = [0.1 / 1000, 1 / 1000, 10 / 1000, 0.1, 1, 10, 60, 600]
ytext = ['0.1 ms', '1 ms', '10 ms', '0.1 s', '1 s', '10 s', '1 min', '10 min']
plt.yticks(yticks, ytext)
plt.xticks(xticks, xtext)
plt.title('Rendimiento caso1_single')
plt.ylabel('Tiempo transcurrido (s)')
plt.grid(True)
plt.subplot(2, 1, 2)
plt.plot(Ns, memoria, '-ob')
plt.yscale('log')
plt.xscale('log')
xticks = [10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000]
xtext = [10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000]
yticks = [1000, 10000, 100000, 1000000, 10000000, 100000000, 1000000000,
100000000000]
ytext = ['1 KB ', '10 KB', '100 KB', '1 MB', '10 MB', '100 MB', '1 GB', '10 GB'
]
plt.axhline(y=4000000000, linestyle='--', color='black')
plt.yticks(yticks, ytext)
plt.xticks(xticks, xtext, rotation=45)
plt.xlabel('Tamaño matriz N')
plt.ylabel('Uso memoria (bytes)')
plt.grid(True)
plt.savefig('Rendimiento caso1_single.png')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import matplotlib.pyplot as plt
from numpy.linalg import inv as invertir
from time import perf_counter
import numpy as np
def matriz_laplaciana(N, t=np.single):
e = np.eye(N) - np.eye(N, N, 1)
return t(e + e.T)
Ns = [2, 5, 10, 12, 15, 20, 30, 40, 45, 50, 55, 60, 75, 100, 125, 160, 200,
250, 350, 500, 600, 800, 1000, 2000, 5000, 10000]
corridas = 10
for corrida in range(corridas):
tiempo = []
memoria = []
name = f'single{corrida}.txt'
fid = open(name, 'w')
for i in Ns:
print(f'i = {i}')
A = matriz_laplaciana(i)
t1 = perf_counter()
invertir(A)
t2 = perf_counter()
dt = t2 - t1
size = 3 * i ** 2 * 32
tiempo.append(dt)
memoria.append(size)
fid.write(f'{i} {dt} {size}\n')
print(f'Tiempo transcurrido = {dt} s')
print(f'Mmoria usada = {size} bytes')
fid.flush()
fid.close()
dim = []
tim = []
mem = []
for n in range(10):
dimension = []
time = []
memory = []
with open(f'single{n}.txt', 'r') as f:
lineas = [linea.split() for linea in f]
for i in lineas:
dimension.append(int(i[0]))
time.append(float(i[1]))
memory.append(int(i[2]))
dim.append(dimension)
tim.append(time)
mem.append(memory)
plt.subplot(2, 1, 1)
plt.plot(dim[0], tim[0], '-o')
plt.plot(dim[0], tim[1], '-o')
plt.plot(dim[0], tim[2], '-o')
plt.plot(dim[0], tim[3], '-o')
plt.plot(dim[0], tim[4], '-o')
plt.plot(dim[0], tim[5], '-o')
plt.plot(dim[0], tim[6], '-o')
plt.plot(dim[0], tim[7], '-o')
plt.plot(dim[0], tim[8], '-o')
plt.plot(dim[0], tim[9], '-o')
plt.yscale('log')
plt.xscale('log')
xticks = [10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000]
xtext = ['', '', '', '', '', '', '', '', '', '', '']
yticks = [0.1 / 1000, 1 / 1000, 10 / 1000, 0.1, 1, 10, 60, 600]
ytext = ['0.1 ms', '1 ms', '10 ms', '0.1 s', '1 s', '10 s', '1 min', '10 min']
plt.yticks(yticks, ytext)
plt.xticks(xticks, xtext)
plt.title('Rendimiento caso1_single')
plt.ylabel('Tiempo transcurrido (s)')
plt.grid(True)
plt.subplot(2, 1, 2)
plt.plot(Ns, memoria, '-ob')
plt.yscale('log')
plt.xscale('log')
xticks = [10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000]
xtext = [10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000]
yticks = [1000, 10000, 100000, 1000000, 10000000, 100000000, 1000000000,
100000000000]
ytext = ['1 KB ', '10 KB', '100 KB', '1 MB', '10 MB', '100 MB', '1 GB', '10 GB'
]
plt.axhline(y=4000000000, linestyle='--', color='black')
plt.yticks(yticks, ytext)
plt.xticks(xticks, xtext, rotation=45)
plt.xlabel('Tamaño matriz N')
plt.ylabel('Uso memoria (bytes)')
plt.grid(True)
plt.savefig('Rendimiento caso1_single.png')
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
"""
Created on Wed Aug 18 16:11:44 2021
@author: ignacio
"""
import matplotlib.pyplot as plt
from numpy.linalg import inv as invertir
from time import perf_counter
import numpy as np
def matriz_laplaciana(N, t=np.single): # funcion obtenida de clase
e=np.eye(N)-np.eye(N,N,1)
return t(e+e.T)
Ns = [2, 5, 10,12, 15, 20, 30, 40, 45, 50, 55, 60, 75, 100, 125, 160, 200, 250, 350, 500, 600, 800, 1000, 2000, 5000, 10000]
corridas = 10
for corrida in range(corridas):
tiempo = []
memoria = []
name = (f"single{corrida}.txt")
fid = open(name,"w")
for i in Ns:
print(f"i = {i}")
A = matriz_laplaciana(i)
t1 = perf_counter()
invertir(A)
t2 = perf_counter()
dt = t2 - t1
size = 3 * (i**2) * 32
tiempo.append(dt)
memoria.append(size)
fid.write(f"{i} {dt} {size}\n")
print(f"Tiempo transcurrido = {dt} s")
print(f"Mmoria usada = {size} bytes")
fid.flush()
fid.close()
dim = []
tim = []
mem = []
for n in range(10):
dimension = []
time = []
memory = []
with open(f"single{n}.txt", "r") as f:
lineas = [linea.split() for linea in f]
for i in lineas:
dimension.append(int(i[0]))
time.append(float(i[1]))
memory.append(int(i[2]))
dim.append(dimension)
tim.append(time)
mem.append(memory)
#Grafico superior
plt.subplot(2, 1, 1)
plt.plot(dim[0],tim[0],"-o")
plt.plot(dim[0],tim[1],"-o")
plt.plot(dim[0],tim[2],"-o")
plt.plot(dim[0],tim[3],"-o")
plt.plot(dim[0],tim[4],"-o")
plt.plot(dim[0],tim[5],"-o")
plt.plot(dim[0],tim[6],"-o")
plt.plot(dim[0],tim[7],"-o")
plt.plot(dim[0],tim[8],"-o")
plt.plot(dim[0],tim[9],"-o")
plt.yscale('log')
plt.xscale('log')
xticks = [10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000]
xtext = ["", "", "", "", "", "", "", "", "", "", ""]
yticks = [0.1/1000, 1/1000, 10/1000, 0.1, 1, 10, 60, 600]
ytext = ["0.1 ms", "1 ms", "10 ms", "0.1 s", "1 s", "10 s", "1 min", "10 min"]
plt.yticks(yticks, ytext)
plt.xticks(xticks, xtext)
plt.title("Rendimiento caso1_single")
plt.ylabel("Tiempo transcurrido (s)")
plt.grid(True)
#Grafico inferior
plt.subplot(2, 1, 2)
plt.plot(Ns,memoria,'-ob')
plt.yscale('log')
plt.xscale('log')
xticks = [10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000]
xtext = [10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000]
yticks = [1000,10000, 100000, 1000000, 10000000, 100000000, 1000000000, 100000000000]
ytext = ["1 KB ", "10 KB", "100 KB", "1 MB", "10 MB", "100 MB", "1 GB", "10 GB"]
plt.axhline(y=4000000000, linestyle="--",color="black") # RAM 4 GB
plt.yticks(yticks, ytext)
plt.xticks(xticks, xtext, rotation=45)
plt.xlabel("Tamaño matriz N")
plt.ylabel("Uso memoria (bytes)")
plt.grid(True)
plt.savefig("Rendimiento caso1_single.png")
|
flexible
|
{
"blob_id": "86345702bcd423bc31e29b1d28aa9c438629297d",
"index": 7331,
"step-1": "<mask token>\n\n\ndef matriz_laplaciana(N, t=np.single):\n e = np.eye(N) - np.eye(N, N, 1)\n return t(e + e.T)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef matriz_laplaciana(N, t=np.single):\n e = np.eye(N) - np.eye(N, N, 1)\n return t(e + e.T)\n\n\n<mask token>\nfor corrida in range(corridas):\n tiempo = []\n memoria = []\n name = f'single{corrida}.txt'\n fid = open(name, 'w')\n for i in Ns:\n print(f'i = {i}')\n A = matriz_laplaciana(i)\n t1 = perf_counter()\n invertir(A)\n t2 = perf_counter()\n dt = t2 - t1\n size = 3 * i ** 2 * 32\n tiempo.append(dt)\n memoria.append(size)\n fid.write(f'{i} {dt} {size}\\n')\n print(f'Tiempo transcurrido = {dt} s')\n print(f'Mmoria usada = {size} bytes')\n fid.flush()\nfid.close()\n<mask token>\nfor n in range(10):\n dimension = []\n time = []\n memory = []\n with open(f'single{n}.txt', 'r') as f:\n lineas = [linea.split() for linea in f]\n for i in lineas:\n dimension.append(int(i[0]))\n time.append(float(i[1]))\n memory.append(int(i[2]))\n dim.append(dimension)\n tim.append(time)\n mem.append(memory)\nplt.subplot(2, 1, 1)\nplt.plot(dim[0], tim[0], '-o')\nplt.plot(dim[0], tim[1], '-o')\nplt.plot(dim[0], tim[2], '-o')\nplt.plot(dim[0], tim[3], '-o')\nplt.plot(dim[0], tim[4], '-o')\nplt.plot(dim[0], tim[5], '-o')\nplt.plot(dim[0], tim[6], '-o')\nplt.plot(dim[0], tim[7], '-o')\nplt.plot(dim[0], tim[8], '-o')\nplt.plot(dim[0], tim[9], '-o')\nplt.yscale('log')\nplt.xscale('log')\n<mask token>\nplt.yticks(yticks, ytext)\nplt.xticks(xticks, xtext)\nplt.title('Rendimiento caso1_single')\nplt.ylabel('Tiempo transcurrido (s)')\nplt.grid(True)\nplt.subplot(2, 1, 2)\nplt.plot(Ns, memoria, '-ob')\nplt.yscale('log')\nplt.xscale('log')\n<mask token>\nplt.axhline(y=4000000000, linestyle='--', color='black')\nplt.yticks(yticks, ytext)\nplt.xticks(xticks, xtext, rotation=45)\nplt.xlabel('Tamaño matriz N')\nplt.ylabel('Uso memoria (bytes)')\nplt.grid(True)\nplt.savefig('Rendimiento caso1_single.png')\n",
"step-3": "<mask token>\n\n\ndef matriz_laplaciana(N, t=np.single):\n e = np.eye(N) - np.eye(N, N, 1)\n return t(e + e.T)\n\n\nNs = [2, 5, 10, 12, 15, 20, 30, 40, 45, 50, 55, 60, 75, 100, 125, 160, 200,\n 250, 350, 500, 600, 800, 1000, 2000, 5000, 10000]\ncorridas = 10\nfor corrida in range(corridas):\n tiempo = []\n memoria = []\n name = f'single{corrida}.txt'\n fid = open(name, 'w')\n for i in Ns:\n print(f'i = {i}')\n A = matriz_laplaciana(i)\n t1 = perf_counter()\n invertir(A)\n t2 = perf_counter()\n dt = t2 - t1\n size = 3 * i ** 2 * 32\n tiempo.append(dt)\n memoria.append(size)\n fid.write(f'{i} {dt} {size}\\n')\n print(f'Tiempo transcurrido = {dt} s')\n print(f'Mmoria usada = {size} bytes')\n fid.flush()\nfid.close()\ndim = []\ntim = []\nmem = []\nfor n in range(10):\n dimension = []\n time = []\n memory = []\n with open(f'single{n}.txt', 'r') as f:\n lineas = [linea.split() for linea in f]\n for i in lineas:\n dimension.append(int(i[0]))\n time.append(float(i[1]))\n memory.append(int(i[2]))\n dim.append(dimension)\n tim.append(time)\n mem.append(memory)\nplt.subplot(2, 1, 1)\nplt.plot(dim[0], tim[0], '-o')\nplt.plot(dim[0], tim[1], '-o')\nplt.plot(dim[0], tim[2], '-o')\nplt.plot(dim[0], tim[3], '-o')\nplt.plot(dim[0], tim[4], '-o')\nplt.plot(dim[0], tim[5], '-o')\nplt.plot(dim[0], tim[6], '-o')\nplt.plot(dim[0], tim[7], '-o')\nplt.plot(dim[0], tim[8], '-o')\nplt.plot(dim[0], tim[9], '-o')\nplt.yscale('log')\nplt.xscale('log')\nxticks = [10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000]\nxtext = ['', '', '', '', '', '', '', '', '', '', '']\nyticks = [0.1 / 1000, 1 / 1000, 10 / 1000, 0.1, 1, 10, 60, 600]\nytext = ['0.1 ms', '1 ms', '10 ms', '0.1 s', '1 s', '10 s', '1 min', '10 min']\nplt.yticks(yticks, ytext)\nplt.xticks(xticks, xtext)\nplt.title('Rendimiento caso1_single')\nplt.ylabel('Tiempo transcurrido (s)')\nplt.grid(True)\nplt.subplot(2, 1, 2)\nplt.plot(Ns, memoria, '-ob')\nplt.yscale('log')\nplt.xscale('log')\nxticks = [10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000]\nxtext = [10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000]\nyticks = [1000, 10000, 100000, 1000000, 10000000, 100000000, 1000000000, \n 100000000000]\nytext = ['1 KB ', '10 KB', '100 KB', '1 MB', '10 MB', '100 MB', '1 GB', '10 GB'\n ]\nplt.axhline(y=4000000000, linestyle='--', color='black')\nplt.yticks(yticks, ytext)\nplt.xticks(xticks, xtext, rotation=45)\nplt.xlabel('Tamaño matriz N')\nplt.ylabel('Uso memoria (bytes)')\nplt.grid(True)\nplt.savefig('Rendimiento caso1_single.png')\n",
"step-4": "<mask token>\nimport matplotlib.pyplot as plt\nfrom numpy.linalg import inv as invertir\nfrom time import perf_counter\nimport numpy as np\n\n\ndef matriz_laplaciana(N, t=np.single):\n e = np.eye(N) - np.eye(N, N, 1)\n return t(e + e.T)\n\n\nNs = [2, 5, 10, 12, 15, 20, 30, 40, 45, 50, 55, 60, 75, 100, 125, 160, 200,\n 250, 350, 500, 600, 800, 1000, 2000, 5000, 10000]\ncorridas = 10\nfor corrida in range(corridas):\n tiempo = []\n memoria = []\n name = f'single{corrida}.txt'\n fid = open(name, 'w')\n for i in Ns:\n print(f'i = {i}')\n A = matriz_laplaciana(i)\n t1 = perf_counter()\n invertir(A)\n t2 = perf_counter()\n dt = t2 - t1\n size = 3 * i ** 2 * 32\n tiempo.append(dt)\n memoria.append(size)\n fid.write(f'{i} {dt} {size}\\n')\n print(f'Tiempo transcurrido = {dt} s')\n print(f'Mmoria usada = {size} bytes')\n fid.flush()\nfid.close()\ndim = []\ntim = []\nmem = []\nfor n in range(10):\n dimension = []\n time = []\n memory = []\n with open(f'single{n}.txt', 'r') as f:\n lineas = [linea.split() for linea in f]\n for i in lineas:\n dimension.append(int(i[0]))\n time.append(float(i[1]))\n memory.append(int(i[2]))\n dim.append(dimension)\n tim.append(time)\n mem.append(memory)\nplt.subplot(2, 1, 1)\nplt.plot(dim[0], tim[0], '-o')\nplt.plot(dim[0], tim[1], '-o')\nplt.plot(dim[0], tim[2], '-o')\nplt.plot(dim[0], tim[3], '-o')\nplt.plot(dim[0], tim[4], '-o')\nplt.plot(dim[0], tim[5], '-o')\nplt.plot(dim[0], tim[6], '-o')\nplt.plot(dim[0], tim[7], '-o')\nplt.plot(dim[0], tim[8], '-o')\nplt.plot(dim[0], tim[9], '-o')\nplt.yscale('log')\nplt.xscale('log')\nxticks = [10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000]\nxtext = ['', '', '', '', '', '', '', '', '', '', '']\nyticks = [0.1 / 1000, 1 / 1000, 10 / 1000, 0.1, 1, 10, 60, 600]\nytext = ['0.1 ms', '1 ms', '10 ms', '0.1 s', '1 s', '10 s', '1 min', '10 min']\nplt.yticks(yticks, ytext)\nplt.xticks(xticks, xtext)\nplt.title('Rendimiento caso1_single')\nplt.ylabel('Tiempo transcurrido (s)')\nplt.grid(True)\nplt.subplot(2, 1, 2)\nplt.plot(Ns, memoria, '-ob')\nplt.yscale('log')\nplt.xscale('log')\nxticks = [10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000]\nxtext = [10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000]\nyticks = [1000, 10000, 100000, 1000000, 10000000, 100000000, 1000000000, \n 100000000000]\nytext = ['1 KB ', '10 KB', '100 KB', '1 MB', '10 MB', '100 MB', '1 GB', '10 GB'\n ]\nplt.axhline(y=4000000000, linestyle='--', color='black')\nplt.yticks(yticks, ytext)\nplt.xticks(xticks, xtext, rotation=45)\nplt.xlabel('Tamaño matriz N')\nplt.ylabel('Uso memoria (bytes)')\nplt.grid(True)\nplt.savefig('Rendimiento caso1_single.png')\n",
"step-5": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Wed Aug 18 16:11:44 2021\r\n\r\n@author: ignacio\r\n\"\"\"\r\n\r\nimport matplotlib.pyplot as plt\r\nfrom numpy.linalg import inv as invertir\r\nfrom time import perf_counter\r\nimport numpy as np\r\n\r\ndef matriz_laplaciana(N, t=np.single): # funcion obtenida de clase\r\n e=np.eye(N)-np.eye(N,N,1)\r\n return t(e+e.T)\r\n\r\n\r\nNs = [2, 5, 10,12, 15, 20, 30, 40, 45, 50, 55, 60, 75, 100, 125, 160, 200, 250, 350, 500, 600, 800, 1000, 2000, 5000, 10000]\r\n\r\ncorridas = 10\r\n\r\nfor corrida in range(corridas):\r\n \r\n tiempo = []\r\n memoria = []\r\n \r\n name = (f\"single{corrida}.txt\")\r\n\r\n fid = open(name,\"w\")\r\n \r\n for i in Ns:\r\n\r\n print(f\"i = {i}\") \r\n \r\n A = matriz_laplaciana(i)\r\n \r\n t1 = perf_counter()\r\n\r\n invertir(A)\r\n \r\n t2 = perf_counter()\r\n\r\n dt = t2 - t1\r\n \r\n size = 3 * (i**2) * 32\r\n\r\n tiempo.append(dt) \r\n memoria.append(size)\r\n \r\n fid.write(f\"{i} {dt} {size}\\n\")\r\n \r\n print(f\"Tiempo transcurrido = {dt} s\")\r\n print(f\"Mmoria usada = {size} bytes\")\r\n\r\n fid.flush()\r\n \r\n \r\nfid.close()\r\n\r\n\r\n\r\ndim = []\r\ntim = []\r\nmem = []\r\n\r\nfor n in range(10):\r\n dimension = []\r\n time = []\r\n memory = []\r\n with open(f\"single{n}.txt\", \"r\") as f:\r\n lineas = [linea.split() for linea in f]\r\n \r\n for i in lineas:\r\n dimension.append(int(i[0]))\r\n time.append(float(i[1]))\r\n memory.append(int(i[2]))\r\n \r\n dim.append(dimension)\r\n tim.append(time)\r\n mem.append(memory)\r\n\r\n#Grafico superior\r\n\r\nplt.subplot(2, 1, 1)\r\nplt.plot(dim[0],tim[0],\"-o\")\r\nplt.plot(dim[0],tim[1],\"-o\")\r\nplt.plot(dim[0],tim[2],\"-o\")\r\nplt.plot(dim[0],tim[3],\"-o\")\r\nplt.plot(dim[0],tim[4],\"-o\")\r\nplt.plot(dim[0],tim[5],\"-o\")\r\nplt.plot(dim[0],tim[6],\"-o\")\r\nplt.plot(dim[0],tim[7],\"-o\")\r\nplt.plot(dim[0],tim[8],\"-o\")\r\nplt.plot(dim[0],tim[9],\"-o\")\r\n \r\nplt.yscale('log')\r\nplt.xscale('log')\r\n\r\nxticks = [10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000]\r\nxtext = [\"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\"]\r\n\r\nyticks = [0.1/1000, 1/1000, 10/1000, 0.1, 1, 10, 60, 600]\r\nytext = [\"0.1 ms\", \"1 ms\", \"10 ms\", \"0.1 s\", \"1 s\", \"10 s\", \"1 min\", \"10 min\"]\r\n\r\nplt.yticks(yticks, ytext)\r\nplt.xticks(xticks, xtext)\r\n\r\nplt.title(\"Rendimiento caso1_single\")\r\nplt.ylabel(\"Tiempo transcurrido (s)\")\r\nplt.grid(True)\r\n\r\n#Grafico inferior \r\n\r\nplt.subplot(2, 1, 2)\r\n\r\nplt.plot(Ns,memoria,'-ob')\r\n\r\nplt.yscale('log')\r\nplt.xscale('log')\r\n\r\nxticks = [10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000]\r\nxtext = [10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000]\r\n\r\nyticks = [1000,10000, 100000, 1000000, 10000000, 100000000, 1000000000, 100000000000]\r\nytext = [\"1 KB \", \"10 KB\", \"100 KB\", \"1 MB\", \"10 MB\", \"100 MB\", \"1 GB\", \"10 GB\"]\r\n\r\nplt.axhline(y=4000000000, linestyle=\"--\",color=\"black\") # RAM 4 GB\r\n\r\nplt.yticks(yticks, ytext)\r\nplt.xticks(xticks, xtext, rotation=45)\r\n\r\nplt.xlabel(\"Tamaño matriz N\")\r\nplt.ylabel(\"Uso memoria (bytes)\")\r\nplt.grid(True)\r\nplt.savefig(\"Rendimiento caso1_single.png\")",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
class D3D12_Resource_Mapping_Zoo(rdtest.TestCase):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def check_capture(self):
if not self.controller.GetAPIProperties().shaderDebugging:
rdtest.log.success('Shader debugging not enabled, skipping test')
return
failed = False
test_marker: rd.ActionDescription = self.find_action('sm_5_0')
action = test_marker.next
self.controller.SetFrameEvent(action.eventId, False)
failed = not self.test_debug_pixel(200, 200, 'sm_5_0') or failed
test_marker: rd.ActionDescription = self.find_action('sm_5_1')
action = test_marker.next
self.controller.SetFrameEvent(action.eventId, False)
failed = not self.test_debug_pixel(200, 200, 'sm_5_1') or failed
rdtest.log.begin_section('Resource array tests')
test_marker: rd.ActionDescription = self.find_action('ResArray')
action = test_marker.next
self.controller.SetFrameEvent(action.eventId, False)
for y in range(4):
for x in range(4):
failed = not self.test_debug_pixel(200 + x, 200 + y,
'ResArray({},{})'.format(x, y)) or failed
rdtest.log.end_section('Resource array tests')
rdtest.log.begin_section('Bindless tests')
test_marker: rd.ActionDescription = self.find_action('Bindless')
action = test_marker.next
self.controller.SetFrameEvent(action.eventId, False)
for y in range(4):
for x in range(4):
failed = not self.test_debug_pixel(200 + x, 200 + y,
'Bindless({},{})'.format(x, y)) or failed
rdtest.log.end_section('Bindless tests')
if failed:
raise rdtest.TestFailureException('Some tests were not as expected'
)
rdtest.log.success('All tests matched')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class D3D12_Resource_Mapping_Zoo(rdtest.TestCase):
<|reserved_special_token_0|>
def test_debug_pixel(self, x, y, test_name):
pipe: rd.PipeState = self.controller.GetPipelineState()
if not pipe.GetShaderReflection(rd.ShaderStage.Pixel
).debugInfo.debuggable:
rdtest.log.print('Skipping undebuggable shader at {}.'.format(
test_name))
return
trace: rd.ShaderDebugTrace = self.controller.DebugPixel(x, y, rd.
ReplayController.NoPreference, rd.ReplayController.NoPreference)
cycles, variables = self.process_trace(trace)
output = self.find_output_source_var(trace, rd.ShaderBuiltin.
ColorOutput, 0)
debugged = self.evaluate_source_var(output, variables)
try:
self.check_pixel_value(pipe.GetOutputTargets()[0].resourceId, x,
y, debugged.value.f32v[0:4])
except rdtest.TestFailureException as ex:
rdtest.log.error('Test {} did not match. {}'.format(test_name,
str(ex)))
return False
finally:
self.controller.FreeTrace(trace)
rdtest.log.success('Test {} matched as expected'.format(test_name))
return True
def check_capture(self):
if not self.controller.GetAPIProperties().shaderDebugging:
rdtest.log.success('Shader debugging not enabled, skipping test')
return
failed = False
test_marker: rd.ActionDescription = self.find_action('sm_5_0')
action = test_marker.next
self.controller.SetFrameEvent(action.eventId, False)
failed = not self.test_debug_pixel(200, 200, 'sm_5_0') or failed
test_marker: rd.ActionDescription = self.find_action('sm_5_1')
action = test_marker.next
self.controller.SetFrameEvent(action.eventId, False)
failed = not self.test_debug_pixel(200, 200, 'sm_5_1') or failed
rdtest.log.begin_section('Resource array tests')
test_marker: rd.ActionDescription = self.find_action('ResArray')
action = test_marker.next
self.controller.SetFrameEvent(action.eventId, False)
for y in range(4):
for x in range(4):
failed = not self.test_debug_pixel(200 + x, 200 + y,
'ResArray({},{})'.format(x, y)) or failed
rdtest.log.end_section('Resource array tests')
rdtest.log.begin_section('Bindless tests')
test_marker: rd.ActionDescription = self.find_action('Bindless')
action = test_marker.next
self.controller.SetFrameEvent(action.eventId, False)
for y in range(4):
for x in range(4):
failed = not self.test_debug_pixel(200 + x, 200 + y,
'Bindless({},{})'.format(x, y)) or failed
rdtest.log.end_section('Bindless tests')
if failed:
raise rdtest.TestFailureException('Some tests were not as expected'
)
rdtest.log.success('All tests matched')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class D3D12_Resource_Mapping_Zoo(rdtest.TestCase):
demos_test_name = 'D3D12_Resource_Mapping_Zoo'
def test_debug_pixel(self, x, y, test_name):
pipe: rd.PipeState = self.controller.GetPipelineState()
if not pipe.GetShaderReflection(rd.ShaderStage.Pixel
).debugInfo.debuggable:
rdtest.log.print('Skipping undebuggable shader at {}.'.format(
test_name))
return
trace: rd.ShaderDebugTrace = self.controller.DebugPixel(x, y, rd.
ReplayController.NoPreference, rd.ReplayController.NoPreference)
cycles, variables = self.process_trace(trace)
output = self.find_output_source_var(trace, rd.ShaderBuiltin.
ColorOutput, 0)
debugged = self.evaluate_source_var(output, variables)
try:
self.check_pixel_value(pipe.GetOutputTargets()[0].resourceId, x,
y, debugged.value.f32v[0:4])
except rdtest.TestFailureException as ex:
rdtest.log.error('Test {} did not match. {}'.format(test_name,
str(ex)))
return False
finally:
self.controller.FreeTrace(trace)
rdtest.log.success('Test {} matched as expected'.format(test_name))
return True
def check_capture(self):
if not self.controller.GetAPIProperties().shaderDebugging:
rdtest.log.success('Shader debugging not enabled, skipping test')
return
failed = False
test_marker: rd.ActionDescription = self.find_action('sm_5_0')
action = test_marker.next
self.controller.SetFrameEvent(action.eventId, False)
failed = not self.test_debug_pixel(200, 200, 'sm_5_0') or failed
test_marker: rd.ActionDescription = self.find_action('sm_5_1')
action = test_marker.next
self.controller.SetFrameEvent(action.eventId, False)
failed = not self.test_debug_pixel(200, 200, 'sm_5_1') or failed
rdtest.log.begin_section('Resource array tests')
test_marker: rd.ActionDescription = self.find_action('ResArray')
action = test_marker.next
self.controller.SetFrameEvent(action.eventId, False)
for y in range(4):
for x in range(4):
failed = not self.test_debug_pixel(200 + x, 200 + y,
'ResArray({},{})'.format(x, y)) or failed
rdtest.log.end_section('Resource array tests')
rdtest.log.begin_section('Bindless tests')
test_marker: rd.ActionDescription = self.find_action('Bindless')
action = test_marker.next
self.controller.SetFrameEvent(action.eventId, False)
for y in range(4):
for x in range(4):
failed = not self.test_debug_pixel(200 + x, 200 + y,
'Bindless({},{})'.format(x, y)) or failed
rdtest.log.end_section('Bindless tests')
if failed:
raise rdtest.TestFailureException('Some tests were not as expected'
)
rdtest.log.success('All tests matched')
<|reserved_special_token_1|>
import renderdoc as rd
from typing import List
import rdtest
class D3D12_Resource_Mapping_Zoo(rdtest.TestCase):
demos_test_name = 'D3D12_Resource_Mapping_Zoo'
def test_debug_pixel(self, x, y, test_name):
pipe: rd.PipeState = self.controller.GetPipelineState()
if not pipe.GetShaderReflection(rd.ShaderStage.Pixel
).debugInfo.debuggable:
rdtest.log.print('Skipping undebuggable shader at {}.'.format(
test_name))
return
trace: rd.ShaderDebugTrace = self.controller.DebugPixel(x, y, rd.
ReplayController.NoPreference, rd.ReplayController.NoPreference)
cycles, variables = self.process_trace(trace)
output = self.find_output_source_var(trace, rd.ShaderBuiltin.
ColorOutput, 0)
debugged = self.evaluate_source_var(output, variables)
try:
self.check_pixel_value(pipe.GetOutputTargets()[0].resourceId, x,
y, debugged.value.f32v[0:4])
except rdtest.TestFailureException as ex:
rdtest.log.error('Test {} did not match. {}'.format(test_name,
str(ex)))
return False
finally:
self.controller.FreeTrace(trace)
rdtest.log.success('Test {} matched as expected'.format(test_name))
return True
def check_capture(self):
if not self.controller.GetAPIProperties().shaderDebugging:
rdtest.log.success('Shader debugging not enabled, skipping test')
return
failed = False
test_marker: rd.ActionDescription = self.find_action('sm_5_0')
action = test_marker.next
self.controller.SetFrameEvent(action.eventId, False)
failed = not self.test_debug_pixel(200, 200, 'sm_5_0') or failed
test_marker: rd.ActionDescription = self.find_action('sm_5_1')
action = test_marker.next
self.controller.SetFrameEvent(action.eventId, False)
failed = not self.test_debug_pixel(200, 200, 'sm_5_1') or failed
rdtest.log.begin_section('Resource array tests')
test_marker: rd.ActionDescription = self.find_action('ResArray')
action = test_marker.next
self.controller.SetFrameEvent(action.eventId, False)
for y in range(4):
for x in range(4):
failed = not self.test_debug_pixel(200 + x, 200 + y,
'ResArray({},{})'.format(x, y)) or failed
rdtest.log.end_section('Resource array tests')
rdtest.log.begin_section('Bindless tests')
test_marker: rd.ActionDescription = self.find_action('Bindless')
action = test_marker.next
self.controller.SetFrameEvent(action.eventId, False)
for y in range(4):
for x in range(4):
failed = not self.test_debug_pixel(200 + x, 200 + y,
'Bindless({},{})'.format(x, y)) or failed
rdtest.log.end_section('Bindless tests')
if failed:
raise rdtest.TestFailureException('Some tests were not as expected'
)
rdtest.log.success('All tests matched')
<|reserved_special_token_1|>
import renderdoc as rd
from typing import List
import rdtest
class D3D12_Resource_Mapping_Zoo(rdtest.TestCase):
demos_test_name = 'D3D12_Resource_Mapping_Zoo'
def test_debug_pixel(self, x, y, test_name):
pipe: rd.PipeState = self.controller.GetPipelineState()
if not pipe.GetShaderReflection(rd.ShaderStage.Pixel).debugInfo.debuggable:
rdtest.log.print("Skipping undebuggable shader at {}.".format(test_name))
return
# Debug the shader
trace: rd.ShaderDebugTrace = self.controller.DebugPixel(x, y, rd.ReplayController.NoPreference,
rd.ReplayController.NoPreference)
cycles, variables = self.process_trace(trace)
output = self.find_output_source_var(trace, rd.ShaderBuiltin.ColorOutput, 0)
debugged = self.evaluate_source_var(output, variables)
try:
self.check_pixel_value(pipe.GetOutputTargets()[0].resourceId, x, y, debugged.value.f32v[0:4])
except rdtest.TestFailureException as ex:
rdtest.log.error("Test {} did not match. {}".format(test_name, str(ex)))
return False
finally:
self.controller.FreeTrace(trace)
rdtest.log.success("Test {} matched as expected".format(test_name))
return True
def check_capture(self):
if not self.controller.GetAPIProperties().shaderDebugging:
rdtest.log.success("Shader debugging not enabled, skipping test")
return
failed = False
test_marker: rd.ActionDescription = self.find_action("sm_5_0")
action = test_marker.next
self.controller.SetFrameEvent(action.eventId, False)
failed = not self.test_debug_pixel(200, 200, "sm_5_0") or failed
test_marker: rd.ActionDescription = self.find_action("sm_5_1")
action = test_marker.next
self.controller.SetFrameEvent(action.eventId, False)
failed = not self.test_debug_pixel(200, 200, "sm_5_1") or failed
rdtest.log.begin_section("Resource array tests")
test_marker: rd.ActionDescription = self.find_action("ResArray")
action = test_marker.next
self.controller.SetFrameEvent(action.eventId, False)
for y in range(4):
for x in range(4):
failed = not self.test_debug_pixel(200 + x, 200 + y, "ResArray({},{})".format(x, y)) or failed
rdtest.log.end_section("Resource array tests")
rdtest.log.begin_section("Bindless tests")
test_marker: rd.ActionDescription = self.find_action("Bindless")
action = test_marker.next
self.controller.SetFrameEvent(action.eventId, False)
for y in range(4):
for x in range(4):
failed = not self.test_debug_pixel(200 + x, 200 + y, "Bindless({},{})".format(x, y)) or failed
rdtest.log.end_section("Bindless tests")
if failed:
raise rdtest.TestFailureException("Some tests were not as expected")
rdtest.log.success("All tests matched")
|
flexible
|
{
"blob_id": "565888d771f53934805555390e48d4886a43bdb6",
"index": 189,
"step-1": "<mask token>\n\n\nclass D3D12_Resource_Mapping_Zoo(rdtest.TestCase):\n <mask token>\n <mask token>\n\n def check_capture(self):\n if not self.controller.GetAPIProperties().shaderDebugging:\n rdtest.log.success('Shader debugging not enabled, skipping test')\n return\n failed = False\n test_marker: rd.ActionDescription = self.find_action('sm_5_0')\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n failed = not self.test_debug_pixel(200, 200, 'sm_5_0') or failed\n test_marker: rd.ActionDescription = self.find_action('sm_5_1')\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n failed = not self.test_debug_pixel(200, 200, 'sm_5_1') or failed\n rdtest.log.begin_section('Resource array tests')\n test_marker: rd.ActionDescription = self.find_action('ResArray')\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n for y in range(4):\n for x in range(4):\n failed = not self.test_debug_pixel(200 + x, 200 + y,\n 'ResArray({},{})'.format(x, y)) or failed\n rdtest.log.end_section('Resource array tests')\n rdtest.log.begin_section('Bindless tests')\n test_marker: rd.ActionDescription = self.find_action('Bindless')\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n for y in range(4):\n for x in range(4):\n failed = not self.test_debug_pixel(200 + x, 200 + y,\n 'Bindless({},{})'.format(x, y)) or failed\n rdtest.log.end_section('Bindless tests')\n if failed:\n raise rdtest.TestFailureException('Some tests were not as expected'\n )\n rdtest.log.success('All tests matched')\n",
"step-2": "<mask token>\n\n\nclass D3D12_Resource_Mapping_Zoo(rdtest.TestCase):\n <mask token>\n\n def test_debug_pixel(self, x, y, test_name):\n pipe: rd.PipeState = self.controller.GetPipelineState()\n if not pipe.GetShaderReflection(rd.ShaderStage.Pixel\n ).debugInfo.debuggable:\n rdtest.log.print('Skipping undebuggable shader at {}.'.format(\n test_name))\n return\n trace: rd.ShaderDebugTrace = self.controller.DebugPixel(x, y, rd.\n ReplayController.NoPreference, rd.ReplayController.NoPreference)\n cycles, variables = self.process_trace(trace)\n output = self.find_output_source_var(trace, rd.ShaderBuiltin.\n ColorOutput, 0)\n debugged = self.evaluate_source_var(output, variables)\n try:\n self.check_pixel_value(pipe.GetOutputTargets()[0].resourceId, x,\n y, debugged.value.f32v[0:4])\n except rdtest.TestFailureException as ex:\n rdtest.log.error('Test {} did not match. {}'.format(test_name,\n str(ex)))\n return False\n finally:\n self.controller.FreeTrace(trace)\n rdtest.log.success('Test {} matched as expected'.format(test_name))\n return True\n\n def check_capture(self):\n if not self.controller.GetAPIProperties().shaderDebugging:\n rdtest.log.success('Shader debugging not enabled, skipping test')\n return\n failed = False\n test_marker: rd.ActionDescription = self.find_action('sm_5_0')\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n failed = not self.test_debug_pixel(200, 200, 'sm_5_0') or failed\n test_marker: rd.ActionDescription = self.find_action('sm_5_1')\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n failed = not self.test_debug_pixel(200, 200, 'sm_5_1') or failed\n rdtest.log.begin_section('Resource array tests')\n test_marker: rd.ActionDescription = self.find_action('ResArray')\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n for y in range(4):\n for x in range(4):\n failed = not self.test_debug_pixel(200 + x, 200 + y,\n 'ResArray({},{})'.format(x, y)) or failed\n rdtest.log.end_section('Resource array tests')\n rdtest.log.begin_section('Bindless tests')\n test_marker: rd.ActionDescription = self.find_action('Bindless')\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n for y in range(4):\n for x in range(4):\n failed = not self.test_debug_pixel(200 + x, 200 + y,\n 'Bindless({},{})'.format(x, y)) or failed\n rdtest.log.end_section('Bindless tests')\n if failed:\n raise rdtest.TestFailureException('Some tests were not as expected'\n )\n rdtest.log.success('All tests matched')\n",
"step-3": "<mask token>\n\n\nclass D3D12_Resource_Mapping_Zoo(rdtest.TestCase):\n demos_test_name = 'D3D12_Resource_Mapping_Zoo'\n\n def test_debug_pixel(self, x, y, test_name):\n pipe: rd.PipeState = self.controller.GetPipelineState()\n if not pipe.GetShaderReflection(rd.ShaderStage.Pixel\n ).debugInfo.debuggable:\n rdtest.log.print('Skipping undebuggable shader at {}.'.format(\n test_name))\n return\n trace: rd.ShaderDebugTrace = self.controller.DebugPixel(x, y, rd.\n ReplayController.NoPreference, rd.ReplayController.NoPreference)\n cycles, variables = self.process_trace(trace)\n output = self.find_output_source_var(trace, rd.ShaderBuiltin.\n ColorOutput, 0)\n debugged = self.evaluate_source_var(output, variables)\n try:\n self.check_pixel_value(pipe.GetOutputTargets()[0].resourceId, x,\n y, debugged.value.f32v[0:4])\n except rdtest.TestFailureException as ex:\n rdtest.log.error('Test {} did not match. {}'.format(test_name,\n str(ex)))\n return False\n finally:\n self.controller.FreeTrace(trace)\n rdtest.log.success('Test {} matched as expected'.format(test_name))\n return True\n\n def check_capture(self):\n if not self.controller.GetAPIProperties().shaderDebugging:\n rdtest.log.success('Shader debugging not enabled, skipping test')\n return\n failed = False\n test_marker: rd.ActionDescription = self.find_action('sm_5_0')\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n failed = not self.test_debug_pixel(200, 200, 'sm_5_0') or failed\n test_marker: rd.ActionDescription = self.find_action('sm_5_1')\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n failed = not self.test_debug_pixel(200, 200, 'sm_5_1') or failed\n rdtest.log.begin_section('Resource array tests')\n test_marker: rd.ActionDescription = self.find_action('ResArray')\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n for y in range(4):\n for x in range(4):\n failed = not self.test_debug_pixel(200 + x, 200 + y,\n 'ResArray({},{})'.format(x, y)) or failed\n rdtest.log.end_section('Resource array tests')\n rdtest.log.begin_section('Bindless tests')\n test_marker: rd.ActionDescription = self.find_action('Bindless')\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n for y in range(4):\n for x in range(4):\n failed = not self.test_debug_pixel(200 + x, 200 + y,\n 'Bindless({},{})'.format(x, y)) or failed\n rdtest.log.end_section('Bindless tests')\n if failed:\n raise rdtest.TestFailureException('Some tests were not as expected'\n )\n rdtest.log.success('All tests matched')\n",
"step-4": "import renderdoc as rd\nfrom typing import List\nimport rdtest\n\n\nclass D3D12_Resource_Mapping_Zoo(rdtest.TestCase):\n demos_test_name = 'D3D12_Resource_Mapping_Zoo'\n\n def test_debug_pixel(self, x, y, test_name):\n pipe: rd.PipeState = self.controller.GetPipelineState()\n if not pipe.GetShaderReflection(rd.ShaderStage.Pixel\n ).debugInfo.debuggable:\n rdtest.log.print('Skipping undebuggable shader at {}.'.format(\n test_name))\n return\n trace: rd.ShaderDebugTrace = self.controller.DebugPixel(x, y, rd.\n ReplayController.NoPreference, rd.ReplayController.NoPreference)\n cycles, variables = self.process_trace(trace)\n output = self.find_output_source_var(trace, rd.ShaderBuiltin.\n ColorOutput, 0)\n debugged = self.evaluate_source_var(output, variables)\n try:\n self.check_pixel_value(pipe.GetOutputTargets()[0].resourceId, x,\n y, debugged.value.f32v[0:4])\n except rdtest.TestFailureException as ex:\n rdtest.log.error('Test {} did not match. {}'.format(test_name,\n str(ex)))\n return False\n finally:\n self.controller.FreeTrace(trace)\n rdtest.log.success('Test {} matched as expected'.format(test_name))\n return True\n\n def check_capture(self):\n if not self.controller.GetAPIProperties().shaderDebugging:\n rdtest.log.success('Shader debugging not enabled, skipping test')\n return\n failed = False\n test_marker: rd.ActionDescription = self.find_action('sm_5_0')\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n failed = not self.test_debug_pixel(200, 200, 'sm_5_0') or failed\n test_marker: rd.ActionDescription = self.find_action('sm_5_1')\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n failed = not self.test_debug_pixel(200, 200, 'sm_5_1') or failed\n rdtest.log.begin_section('Resource array tests')\n test_marker: rd.ActionDescription = self.find_action('ResArray')\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n for y in range(4):\n for x in range(4):\n failed = not self.test_debug_pixel(200 + x, 200 + y,\n 'ResArray({},{})'.format(x, y)) or failed\n rdtest.log.end_section('Resource array tests')\n rdtest.log.begin_section('Bindless tests')\n test_marker: rd.ActionDescription = self.find_action('Bindless')\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n for y in range(4):\n for x in range(4):\n failed = not self.test_debug_pixel(200 + x, 200 + y,\n 'Bindless({},{})'.format(x, y)) or failed\n rdtest.log.end_section('Bindless tests')\n if failed:\n raise rdtest.TestFailureException('Some tests were not as expected'\n )\n rdtest.log.success('All tests matched')\n",
"step-5": "import renderdoc as rd\nfrom typing import List\nimport rdtest\n\n\nclass D3D12_Resource_Mapping_Zoo(rdtest.TestCase):\n demos_test_name = 'D3D12_Resource_Mapping_Zoo'\n\n def test_debug_pixel(self, x, y, test_name):\n pipe: rd.PipeState = self.controller.GetPipelineState()\n\n if not pipe.GetShaderReflection(rd.ShaderStage.Pixel).debugInfo.debuggable:\n rdtest.log.print(\"Skipping undebuggable shader at {}.\".format(test_name))\n return\n\n # Debug the shader\n trace: rd.ShaderDebugTrace = self.controller.DebugPixel(x, y, rd.ReplayController.NoPreference,\n rd.ReplayController.NoPreference)\n\n cycles, variables = self.process_trace(trace)\n\n output = self.find_output_source_var(trace, rd.ShaderBuiltin.ColorOutput, 0)\n\n debugged = self.evaluate_source_var(output, variables)\n\n try:\n self.check_pixel_value(pipe.GetOutputTargets()[0].resourceId, x, y, debugged.value.f32v[0:4])\n except rdtest.TestFailureException as ex:\n rdtest.log.error(\"Test {} did not match. {}\".format(test_name, str(ex)))\n return False\n finally:\n self.controller.FreeTrace(trace)\n\n rdtest.log.success(\"Test {} matched as expected\".format(test_name))\n return True\n\n def check_capture(self):\n if not self.controller.GetAPIProperties().shaderDebugging:\n rdtest.log.success(\"Shader debugging not enabled, skipping test\")\n return\n\n failed = False\n\n test_marker: rd.ActionDescription = self.find_action(\"sm_5_0\")\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n failed = not self.test_debug_pixel(200, 200, \"sm_5_0\") or failed\n\n test_marker: rd.ActionDescription = self.find_action(\"sm_5_1\")\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n failed = not self.test_debug_pixel(200, 200, \"sm_5_1\") or failed\n\n rdtest.log.begin_section(\"Resource array tests\")\n test_marker: rd.ActionDescription = self.find_action(\"ResArray\")\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n\n for y in range(4):\n for x in range(4):\n failed = not self.test_debug_pixel(200 + x, 200 + y, \"ResArray({},{})\".format(x, y)) or failed\n\n rdtest.log.end_section(\"Resource array tests\")\n\n rdtest.log.begin_section(\"Bindless tests\")\n test_marker: rd.ActionDescription = self.find_action(\"Bindless\")\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n\n for y in range(4):\n for x in range(4):\n failed = not self.test_debug_pixel(200 + x, 200 + y, \"Bindless({},{})\".format(x, y)) or failed\n\n rdtest.log.end_section(\"Bindless tests\")\n\n if failed:\n raise rdtest.TestFailureException(\"Some tests were not as expected\")\n\n rdtest.log.success(\"All tests matched\")\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
def eval_results(time_stamps: Union[Tuple, List], excel_file_path=os.path.
join(FULL_PATH_TO_CHECKPOINTS, f'xVal_results.xlsx')):
with pd.ExcelWriter(excel_file_path, mode='w') as writer:
for ts in time_stamps:
print(f'Evaluating results for time stamp: {ts}')
full_results_dict_path = os.path.join(FULL_PATH_TO_CHECKPOINTS,
f'full_result_dict_{ts}.p')
full_results_dict = load_from_pickle(full_results_dict_path)
for run_id, results_dict in full_results_dict.items():
only_eval_dict = {cur_xval: [decode_class(data[3]) for data in
data_list] for cur_xval, data_list in results_dict.items()}
df = pd.DataFrame(only_eval_dict)
df.to_csv(os.path.join(FULL_PATH_TO_CHECKPOINTS,
f'xVal_results_{run_id}.csv'), index=False, header=False)
df.to_excel(writer, run_id)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def eval_results(time_stamps: Union[Tuple, List], excel_file_path=os.path.
join(FULL_PATH_TO_CHECKPOINTS, f'xVal_results.xlsx')):
with pd.ExcelWriter(excel_file_path, mode='w') as writer:
for ts in time_stamps:
print(f'Evaluating results for time stamp: {ts}')
full_results_dict_path = os.path.join(FULL_PATH_TO_CHECKPOINTS,
f'full_result_dict_{ts}.p')
full_results_dict = load_from_pickle(full_results_dict_path)
for run_id, results_dict in full_results_dict.items():
only_eval_dict = {cur_xval: [decode_class(data[3]) for data in
data_list] for cur_xval, data_list in results_dict.items()}
df = pd.DataFrame(only_eval_dict)
df.to_csv(os.path.join(FULL_PATH_TO_CHECKPOINTS,
f'xVal_results_{run_id}.csv'), index=False, header=False)
df.to_excel(writer, run_id)
if __name__ == '__main__':
time_stamps_to_eval = ['1616007514.9154973']
eval_results(time_stamps_to_eval)
metric = 'f1score'
score_path_list, _ = sort_results_by_metric(os.path.join(ROOT,
RESULTS_FOLDER, 'checkpoints'), metric)
print(f'{metric}: {[s for s, p in score_path_list]}')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
ROOT = FLAGS.ROOT
RESULTS_FOLDER = FLAGS.RESULTS_FOLDER
FULL_PATH_TO_CHECKPOINTS = os.path.join(ROOT, RESULTS_FOLDER, 'checkpoints')
def eval_results(time_stamps: Union[Tuple, List], excel_file_path=os.path.
join(FULL_PATH_TO_CHECKPOINTS, f'xVal_results.xlsx')):
with pd.ExcelWriter(excel_file_path, mode='w') as writer:
for ts in time_stamps:
print(f'Evaluating results for time stamp: {ts}')
full_results_dict_path = os.path.join(FULL_PATH_TO_CHECKPOINTS,
f'full_result_dict_{ts}.p')
full_results_dict = load_from_pickle(full_results_dict_path)
for run_id, results_dict in full_results_dict.items():
only_eval_dict = {cur_xval: [decode_class(data[3]) for data in
data_list] for cur_xval, data_list in results_dict.items()}
df = pd.DataFrame(only_eval_dict)
df.to_csv(os.path.join(FULL_PATH_TO_CHECKPOINTS,
f'xVal_results_{run_id}.csv'), index=False, header=False)
df.to_excel(writer, run_id)
if __name__ == '__main__':
time_stamps_to_eval = ['1616007514.9154973']
eval_results(time_stamps_to_eval)
metric = 'f1score'
score_path_list, _ = sort_results_by_metric(os.path.join(ROOT,
RESULTS_FOLDER, 'checkpoints'), metric)
print(f'{metric}: {[s for s, p in score_path_list]}')
<|reserved_special_token_1|>
import os
from typing import Union, Tuple, List
import pandas as pd
from flags import FLAGS
from helpers import load_from_pickle, decode_class, sort_results_by_metric
ROOT = FLAGS.ROOT
RESULTS_FOLDER = FLAGS.RESULTS_FOLDER
FULL_PATH_TO_CHECKPOINTS = os.path.join(ROOT, RESULTS_FOLDER, 'checkpoints')
def eval_results(time_stamps: Union[Tuple, List], excel_file_path=os.path.
join(FULL_PATH_TO_CHECKPOINTS, f'xVal_results.xlsx')):
with pd.ExcelWriter(excel_file_path, mode='w') as writer:
for ts in time_stamps:
print(f'Evaluating results for time stamp: {ts}')
full_results_dict_path = os.path.join(FULL_PATH_TO_CHECKPOINTS,
f'full_result_dict_{ts}.p')
full_results_dict = load_from_pickle(full_results_dict_path)
for run_id, results_dict in full_results_dict.items():
only_eval_dict = {cur_xval: [decode_class(data[3]) for data in
data_list] for cur_xval, data_list in results_dict.items()}
df = pd.DataFrame(only_eval_dict)
df.to_csv(os.path.join(FULL_PATH_TO_CHECKPOINTS,
f'xVal_results_{run_id}.csv'), index=False, header=False)
df.to_excel(writer, run_id)
if __name__ == '__main__':
time_stamps_to_eval = ['1616007514.9154973']
eval_results(time_stamps_to_eval)
metric = 'f1score'
score_path_list, _ = sort_results_by_metric(os.path.join(ROOT,
RESULTS_FOLDER, 'checkpoints'), metric)
print(f'{metric}: {[s for s, p in score_path_list]}')
<|reserved_special_token_1|>
import os
from typing import Union, Tuple, List
import pandas as pd
from flags import FLAGS
from helpers import load_from_pickle, decode_class, sort_results_by_metric
ROOT = FLAGS.ROOT
RESULTS_FOLDER = FLAGS.RESULTS_FOLDER
FULL_PATH_TO_CHECKPOINTS = os.path.join(ROOT, RESULTS_FOLDER, "checkpoints")
def eval_results(time_stamps: Union[Tuple, List],
excel_file_path=os.path.join(FULL_PATH_TO_CHECKPOINTS, f"xVal_results.xlsx")):
with pd.ExcelWriter(excel_file_path, mode="w") as writer:
for ts in time_stamps:
print(f"Evaluating results for time stamp: {ts}")
full_results_dict_path = os.path.join(FULL_PATH_TO_CHECKPOINTS, f"full_result_dict_{ts}.p")
full_results_dict = load_from_pickle(full_results_dict_path)
for run_id, results_dict in full_results_dict.items():
only_eval_dict = {cur_xval: [decode_class(data[3]) for data in data_list]
for cur_xval, data_list in results_dict.items()}
# convert to pandas dataframe
df = pd.DataFrame(only_eval_dict)
df.to_csv(os.path.join(FULL_PATH_TO_CHECKPOINTS, f"xVal_results_{run_id}.csv"), index=False, header=False)
df.to_excel(writer, run_id)
if __name__ == '__main__':
time_stamps_to_eval = ["1616007514.9154973"]
eval_results(time_stamps_to_eval)
metric = "f1score"
score_path_list, _ = sort_results_by_metric(os.path.join(ROOT, RESULTS_FOLDER, "checkpoints"), metric)
print(f"{metric}: {[s for s, p in score_path_list]}")
|
flexible
|
{
"blob_id": "5447bd3b08c22913ae50ee66ee81554d2357ef3e",
"index": 3991,
"step-1": "<mask token>\n\n\ndef eval_results(time_stamps: Union[Tuple, List], excel_file_path=os.path.\n join(FULL_PATH_TO_CHECKPOINTS, f'xVal_results.xlsx')):\n with pd.ExcelWriter(excel_file_path, mode='w') as writer:\n for ts in time_stamps:\n print(f'Evaluating results for time stamp: {ts}')\n full_results_dict_path = os.path.join(FULL_PATH_TO_CHECKPOINTS,\n f'full_result_dict_{ts}.p')\n full_results_dict = load_from_pickle(full_results_dict_path)\n for run_id, results_dict in full_results_dict.items():\n only_eval_dict = {cur_xval: [decode_class(data[3]) for data in\n data_list] for cur_xval, data_list in results_dict.items()}\n df = pd.DataFrame(only_eval_dict)\n df.to_csv(os.path.join(FULL_PATH_TO_CHECKPOINTS,\n f'xVal_results_{run_id}.csv'), index=False, header=False)\n df.to_excel(writer, run_id)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef eval_results(time_stamps: Union[Tuple, List], excel_file_path=os.path.\n join(FULL_PATH_TO_CHECKPOINTS, f'xVal_results.xlsx')):\n with pd.ExcelWriter(excel_file_path, mode='w') as writer:\n for ts in time_stamps:\n print(f'Evaluating results for time stamp: {ts}')\n full_results_dict_path = os.path.join(FULL_PATH_TO_CHECKPOINTS,\n f'full_result_dict_{ts}.p')\n full_results_dict = load_from_pickle(full_results_dict_path)\n for run_id, results_dict in full_results_dict.items():\n only_eval_dict = {cur_xval: [decode_class(data[3]) for data in\n data_list] for cur_xval, data_list in results_dict.items()}\n df = pd.DataFrame(only_eval_dict)\n df.to_csv(os.path.join(FULL_PATH_TO_CHECKPOINTS,\n f'xVal_results_{run_id}.csv'), index=False, header=False)\n df.to_excel(writer, run_id)\n\n\nif __name__ == '__main__':\n time_stamps_to_eval = ['1616007514.9154973']\n eval_results(time_stamps_to_eval)\n metric = 'f1score'\n score_path_list, _ = sort_results_by_metric(os.path.join(ROOT,\n RESULTS_FOLDER, 'checkpoints'), metric)\n print(f'{metric}: {[s for s, p in score_path_list]}')\n",
"step-3": "<mask token>\nROOT = FLAGS.ROOT\nRESULTS_FOLDER = FLAGS.RESULTS_FOLDER\nFULL_PATH_TO_CHECKPOINTS = os.path.join(ROOT, RESULTS_FOLDER, 'checkpoints')\n\n\ndef eval_results(time_stamps: Union[Tuple, List], excel_file_path=os.path.\n join(FULL_PATH_TO_CHECKPOINTS, f'xVal_results.xlsx')):\n with pd.ExcelWriter(excel_file_path, mode='w') as writer:\n for ts in time_stamps:\n print(f'Evaluating results for time stamp: {ts}')\n full_results_dict_path = os.path.join(FULL_PATH_TO_CHECKPOINTS,\n f'full_result_dict_{ts}.p')\n full_results_dict = load_from_pickle(full_results_dict_path)\n for run_id, results_dict in full_results_dict.items():\n only_eval_dict = {cur_xval: [decode_class(data[3]) for data in\n data_list] for cur_xval, data_list in results_dict.items()}\n df = pd.DataFrame(only_eval_dict)\n df.to_csv(os.path.join(FULL_PATH_TO_CHECKPOINTS,\n f'xVal_results_{run_id}.csv'), index=False, header=False)\n df.to_excel(writer, run_id)\n\n\nif __name__ == '__main__':\n time_stamps_to_eval = ['1616007514.9154973']\n eval_results(time_stamps_to_eval)\n metric = 'f1score'\n score_path_list, _ = sort_results_by_metric(os.path.join(ROOT,\n RESULTS_FOLDER, 'checkpoints'), metric)\n print(f'{metric}: {[s for s, p in score_path_list]}')\n",
"step-4": "import os\nfrom typing import Union, Tuple, List\nimport pandas as pd\nfrom flags import FLAGS\nfrom helpers import load_from_pickle, decode_class, sort_results_by_metric\nROOT = FLAGS.ROOT\nRESULTS_FOLDER = FLAGS.RESULTS_FOLDER\nFULL_PATH_TO_CHECKPOINTS = os.path.join(ROOT, RESULTS_FOLDER, 'checkpoints')\n\n\ndef eval_results(time_stamps: Union[Tuple, List], excel_file_path=os.path.\n join(FULL_PATH_TO_CHECKPOINTS, f'xVal_results.xlsx')):\n with pd.ExcelWriter(excel_file_path, mode='w') as writer:\n for ts in time_stamps:\n print(f'Evaluating results for time stamp: {ts}')\n full_results_dict_path = os.path.join(FULL_PATH_TO_CHECKPOINTS,\n f'full_result_dict_{ts}.p')\n full_results_dict = load_from_pickle(full_results_dict_path)\n for run_id, results_dict in full_results_dict.items():\n only_eval_dict = {cur_xval: [decode_class(data[3]) for data in\n data_list] for cur_xval, data_list in results_dict.items()}\n df = pd.DataFrame(only_eval_dict)\n df.to_csv(os.path.join(FULL_PATH_TO_CHECKPOINTS,\n f'xVal_results_{run_id}.csv'), index=False, header=False)\n df.to_excel(writer, run_id)\n\n\nif __name__ == '__main__':\n time_stamps_to_eval = ['1616007514.9154973']\n eval_results(time_stamps_to_eval)\n metric = 'f1score'\n score_path_list, _ = sort_results_by_metric(os.path.join(ROOT,\n RESULTS_FOLDER, 'checkpoints'), metric)\n print(f'{metric}: {[s for s, p in score_path_list]}')\n",
"step-5": "import os\nfrom typing import Union, Tuple, List\n\nimport pandas as pd\n\nfrom flags import FLAGS\nfrom helpers import load_from_pickle, decode_class, sort_results_by_metric\n\nROOT = FLAGS.ROOT\nRESULTS_FOLDER = FLAGS.RESULTS_FOLDER\n\nFULL_PATH_TO_CHECKPOINTS = os.path.join(ROOT, RESULTS_FOLDER, \"checkpoints\")\n\n\ndef eval_results(time_stamps: Union[Tuple, List],\n excel_file_path=os.path.join(FULL_PATH_TO_CHECKPOINTS, f\"xVal_results.xlsx\")):\n with pd.ExcelWriter(excel_file_path, mode=\"w\") as writer:\n for ts in time_stamps:\n print(f\"Evaluating results for time stamp: {ts}\")\n full_results_dict_path = os.path.join(FULL_PATH_TO_CHECKPOINTS, f\"full_result_dict_{ts}.p\")\n\n full_results_dict = load_from_pickle(full_results_dict_path)\n\n for run_id, results_dict in full_results_dict.items():\n only_eval_dict = {cur_xval: [decode_class(data[3]) for data in data_list]\n for cur_xval, data_list in results_dict.items()}\n # convert to pandas dataframe\n df = pd.DataFrame(only_eval_dict)\n df.to_csv(os.path.join(FULL_PATH_TO_CHECKPOINTS, f\"xVal_results_{run_id}.csv\"), index=False, header=False)\n df.to_excel(writer, run_id)\n\n\nif __name__ == '__main__':\n time_stamps_to_eval = [\"1616007514.9154973\"]\n eval_results(time_stamps_to_eval)\n\n metric = \"f1score\"\n\n score_path_list, _ = sort_results_by_metric(os.path.join(ROOT, RESULTS_FOLDER, \"checkpoints\"), metric)\n\n print(f\"{metric}: {[s for s, p in score_path_list]}\")\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
def ge_gen_in(flm_params, textured_rndr, norm_map, normal_map_cond,
texture_cond):
if normal_map_cond and texture_cond:
return torch.cat((textured_rndr, norm_map), dim=1)
elif normal_map_cond:
return norm_map
elif texture_cond:
return textured_rndr
else:
return flm_params
def corrupt_flame_given_sigma(flm_params, corruption_type, sigma, jaw_sigma,
pose_sigma):
corrupted_flame = deepcopy(flm_params)
if corruption_type == 'shape' or corruption_type == 'all':
corrupted_flame[:, :10] = flm_params[:, :10] + np.clip(np.random.
normal(0, sigma, flm_params[:, :10].shape), -3 * sigma, 3 * sigma
).astype('float32')
if corruption_type == 'exp_jaw' or corruption_type == 'all':
corrupted_flame[:, 100:110] = flm_params[:, 100:110] + np.clip(np.
random.normal(0, sigma, flm_params[:, 100:110].shape), -3 *
sigma, 3 * sigma).astype('float32')
corrupted_flame[:, 153] = flm_params[:, 153] + np.random.normal(0,
jaw_sigma, corrupted_flame.shape[0])
if corruption_type == 'pose' or corruption_type == 'all':
pose_perturbation = np.random.normal(0, pose_sigma, (
corrupted_flame.shape[0],))
corrupted_flame[:, 151] = flm_params[:, 151] + np.clip(
pose_perturbation, -3 * pose_sigma, 3 * pose_sigma)
return corrupted_flame
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
sys.path.append('../')
<|reserved_special_token_0|>
def ge_gen_in(flm_params, textured_rndr, norm_map, normal_map_cond,
texture_cond):
if normal_map_cond and texture_cond:
return torch.cat((textured_rndr, norm_map), dim=1)
elif normal_map_cond:
return norm_map
elif texture_cond:
return textured_rndr
else:
return flm_params
def corrupt_flame_given_sigma(flm_params, corruption_type, sigma, jaw_sigma,
pose_sigma):
corrupted_flame = deepcopy(flm_params)
if corruption_type == 'shape' or corruption_type == 'all':
corrupted_flame[:, :10] = flm_params[:, :10] + np.clip(np.random.
normal(0, sigma, flm_params[:, :10].shape), -3 * sigma, 3 * sigma
).astype('float32')
if corruption_type == 'exp_jaw' or corruption_type == 'all':
corrupted_flame[:, 100:110] = flm_params[:, 100:110] + np.clip(np.
random.normal(0, sigma, flm_params[:, 100:110].shape), -3 *
sigma, 3 * sigma).astype('float32')
corrupted_flame[:, 153] = flm_params[:, 153] + np.random.normal(0,
jaw_sigma, corrupted_flame.shape[0])
if corruption_type == 'pose' or corruption_type == 'all':
pose_perturbation = np.random.normal(0, pose_sigma, (
corrupted_flame.shape[0],))
corrupted_flame[:, 151] = flm_params[:, 151] + np.clip(
pose_perturbation, -3 * pose_sigma, 3 * pose_sigma)
return corrupted_flame
<|reserved_special_token_0|>
np.random.seed(2)
for i, key in enumerate(fl_param_dict):
flame_param = fl_param_dict[key]
shape_params = np.concatenate((np.random.normal(0, 1, [3]), np.zeros(97))
).astype('float32')
exp_params = np.concatenate((np.random.normal(0, 1, [3]), np.zeros(47))
).astype('float32')
pose = np.array([0, np.random.uniform(-np.pi / 8, np.pi / 8, 1), 0, np.
random.uniform(0, np.pi / 12, 1), 0, 0]).astype('float32')
texture = np.random.normal(0, 1, [50]).astype('float32')
flame_param = np.hstack((shape_params, exp_params, pose, flame_param[
'cam'], texture, flame_param['lit'].flatten()))
flm_params[i, :] = flame_param.astype('float32')
if i == num_smpl_to_eval_on - 1:
break
<|reserved_special_token_0|>
for run_idx in run_ids_1:
generator_1 = torch.nn.DataParallel(StyledGenerator(
embedding_vocab_size=69158, rendered_flame_ascondition=
settings_for_runs[run_idx]['rendered_flame_as_condition'],
normal_maps_as_cond=settings_for_runs[run_idx][
'normal_maps_as_cond'], apply_sqrt2_fac_in_eq_lin=settings_for_runs
[run_idx]['apply_sqrt2_fac_in_eq_lin'], core_tensor_res=
core_tensor_res, w_truncation_factor=1.0, n_mlp=8)).cuda()
model_idx = settings_for_runs[run_idx]['model_idx']
ckpt1 = torch.load(
f'{cnst.output_root}checkpoint/{run_idx}/{model_idx}.model')
generator_1.load_state_dict(ckpt1['generator_running'])
generator_1 = generator_1.eval()
params_to_save = {'cam': [], 'shape': [], 'exp': [], 'pose': [],
'light_code': [], 'texture_code': [], 'identity_indices': []}
for i, sigma in enumerate(corruption_sigma):
images = np.zeros((num_smpl_to_eval_on, 3, resolution, resolution)
).astype('float32')
flame_mesh_imgs = np.zeros((num_smpl_to_eval_on, 3, resolution,
resolution)).astype('float32')
pbar = tqdm.tqdm(range(0, num_smpl_to_eval_on, batch_size))
pbar.set_description('Generating_images')
for batch_idx in pbar:
flm_batch = flm_params[batch_idx:batch_idx + batch_size, :]
flm_batch = torch.from_numpy(flm_batch).cuda()
flm_batch = position_to_given_location(flame_decoder, flm_batch)
if settings_for_runs[run_idx]['normal_maps_as_cond'
] or settings_for_runs[run_idx]['rendered_flame_as_condition']:
batch_size_true = flm_batch.shape[0]
cam = flm_batch[:, constants.DECA_IDX['cam'][0]:constants.
DECA_IDX['cam'][1]]
shape = flm_batch[:, constants.INDICES['SHAPE'][0]:
constants.INDICES['SHAPE'][1]]
exp = flm_batch[:, constants.INDICES['EXP'][0]:constants.
INDICES['EXP'][1]]
pose = flm_batch[:, constants.INDICES['POSE'][0]:constants.
INDICES['POSE'][1]]
light_code = flm_batch[:, constants.DECA_IDX['lit'][0]:
constants.DECA_IDX['lit'][1]].view((batch_size_true, 9, 3))
texture_code = flm_batch[:, constants.DECA_IDX['tex'][0]:
constants.DECA_IDX['tex'][1]]
params_to_save['cam'].append(cam.cpu().detach().numpy())
params_to_save['shape'].append(shape.cpu().detach().numpy())
params_to_save['shape'].append(shape.cpu().detach().numpy())
params_to_save['exp'].append(exp.cpu().detach().numpy())
params_to_save['pose'].append(pose.cpu().detach().numpy())
params_to_save['light_code'].append(light_code.cpu().detach
().numpy())
params_to_save['texture_code'].append(texture_code.cpu().
detach().numpy())
norma_map_img, _, _, _, rend_flm = (overlay_visualizer.
get_rendered_mesh(flame_params=(shape, exp, pose,
light_code, texture_code), camera_params=cam))
rend_flm = torch.clamp(rend_flm, 0, 1) * 2 - 1
norma_map_img = torch.clamp(norma_map_img, 0, 1) * 2 - 1
rend_flm = fast_image_reshape(rend_flm, height_out=256,
width_out=256, mode='bilinear')
norma_map_img = fast_image_reshape(norma_map_img,
height_out=256, width_out=256, mode='bilinear')
norma_map_img_to_save, _, _, _, rend_flm_to_save = (
overlay_visualizer.get_rendered_mesh(flame_params=(
shape, exp, pose, light_code, texture_code),
camera_params=cam, cull_backfaces=False,
constant_albedo=0.6))
rend_flm_to_save = torch.clamp(rend_flm_to_save, 0, 1) * 2 - 1
rend_flm_to_save = fast_image_reshape(rend_flm_to_save,
height_out=256, width_out=256, mode='bilinear')
else:
rend_flm = None
norma_map_img = None
gen_1_in = ge_gen_in(flm_batch, rend_flm, norma_map_img,
settings_for_runs[run_idx]['normal_maps_as_cond'],
settings_for_runs[run_idx]['rendered_flame_as_condition'])
identity_embeddings = torch.randint(low=0, high=69158, size=(
gen_1_in.shape[0],), dtype=torch.long, device='cuda')
mdl_1_gen_images = generic_utils.get_images_from_flame_params(
flame_params=gen_1_in.cpu().numpy(), pose=None, model=
generator_1, step=step_max, alpha=alpha, input_indices=
identity_embeddings.cpu().numpy())
params_to_save['identity_indices'].append(identity_embeddings.
cpu().detach().numpy())
images[batch_idx:batch_idx + batch_size_true] = torch.clamp(
mdl_1_gen_images, -1, 1).cpu().numpy()
flame_mesh_imgs[batch_idx:batch_idx + batch_size_true
] = torch.clamp(rend_flm_to_save, -1, 1).cpu().numpy()
if save_images:
mdl_name = settings_for_runs[run_idx]['name']
for key in params_to_save.keys():
params_to_save[key] = np.concatenate(params_to_save[key],
axis=0)
save_dir = os.path.join(cnst.output_root, 'sample', str(run_idx
), f'random_samples_q_eval_{mdl_name}')
os.makedirs(save_dir, exist_ok=True)
np.save(os.path.join(save_dir, 'params.npy'), params_to_save)
save_path_current_id = os.path.join(save_dir, 'images')
save_set_of_images(path=save_path_current_id, prefix='', images
=(images + 1) / 2, show_prog_bar=True)
save_path_current_id_flm_rndr = os.path.join(save_dir, 'conditions'
)
save_set_of_images(path=save_path_current_id_flm_rndr, prefix=
'mesh', images=(flame_mesh_imgs + 1) / 2, show_prog_bar=True)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
sys.path.append('../')
<|reserved_special_token_0|>
os.environ['PYTHONHASHSEED'] = '2'
<|reserved_special_token_0|>
def ge_gen_in(flm_params, textured_rndr, norm_map, normal_map_cond,
texture_cond):
if normal_map_cond and texture_cond:
return torch.cat((textured_rndr, norm_map), dim=1)
elif normal_map_cond:
return norm_map
elif texture_cond:
return textured_rndr
else:
return flm_params
def corrupt_flame_given_sigma(flm_params, corruption_type, sigma, jaw_sigma,
pose_sigma):
corrupted_flame = deepcopy(flm_params)
if corruption_type == 'shape' or corruption_type == 'all':
corrupted_flame[:, :10] = flm_params[:, :10] + np.clip(np.random.
normal(0, sigma, flm_params[:, :10].shape), -3 * sigma, 3 * sigma
).astype('float32')
if corruption_type == 'exp_jaw' or corruption_type == 'all':
corrupted_flame[:, 100:110] = flm_params[:, 100:110] + np.clip(np.
random.normal(0, sigma, flm_params[:, 100:110].shape), -3 *
sigma, 3 * sigma).astype('float32')
corrupted_flame[:, 153] = flm_params[:, 153] + np.random.normal(0,
jaw_sigma, corrupted_flame.shape[0])
if corruption_type == 'pose' or corruption_type == 'all':
pose_perturbation = np.random.normal(0, pose_sigma, (
corrupted_flame.shape[0],))
corrupted_flame[:, 151] = flm_params[:, 151] + np.clip(
pose_perturbation, -3 * pose_sigma, 3 * pose_sigma)
return corrupted_flame
save_images = True
code_size = 236
use_inst_norm = True
core_tensor_res = 4
resolution = 256
alpha = 1
step_max = int(np.log2(resolution) - 2)
num_smpl_to_eval_on = 128
use_styled_conv_stylegan2 = True
flength = 5000
cam_t = np.array([0.0, 0.0, 0])
camera_params = camera_ringnetpp((512, 512), trans=cam_t, focal=flength)
run_ids_1 = [29]
settings_for_runs = {(24): {'name': 'vector_cond', 'model_idx': '216000_1',
'normal_maps_as_cond': False, 'rendered_flame_as_condition': False,
'apply_sqrt2_fac_in_eq_lin': False}, (29): {'name': 'full_model',
'model_idx': '294000_1', 'normal_maps_as_cond': True,
'rendered_flame_as_condition': True, 'apply_sqrt2_fac_in_eq_lin': True},
(7): {'name': 'flm_rndr_tex_interp', 'model_idx': '051000_1',
'normal_maps_as_cond': False, 'rendered_flame_as_condition': True,
'apply_sqrt2_fac_in_eq_lin': False}, (3): {'name': 'norm_mp_tex_interp',
'model_idx': '203000_1', 'normal_maps_as_cond': True,
'rendered_flame_as_condition': False, 'apply_sqrt2_fac_in_eq_lin':
False}, (8): {'name': 'norm_map_rend_flm_no_tex_interp', 'model_idx':
'009000_1', 'normal_maps_as_cond': True, 'rendered_flame_as_condition':
True, 'apply_sqrt2_fac_in_eq_lin': False}}
overlay_visualizer = OverLayViz()
flm_params = np.zeros((num_smpl_to_eval_on, code_size)).astype('float32')
fl_param_dict = np.load(cnst.all_flame_params_file, allow_pickle=True).item()
np.random.seed(2)
for i, key in enumerate(fl_param_dict):
flame_param = fl_param_dict[key]
shape_params = np.concatenate((np.random.normal(0, 1, [3]), np.zeros(97))
).astype('float32')
exp_params = np.concatenate((np.random.normal(0, 1, [3]), np.zeros(47))
).astype('float32')
pose = np.array([0, np.random.uniform(-np.pi / 8, np.pi / 8, 1), 0, np.
random.uniform(0, np.pi / 12, 1), 0, 0]).astype('float32')
texture = np.random.normal(0, 1, [50]).astype('float32')
flame_param = np.hstack((shape_params, exp_params, pose, flame_param[
'cam'], texture, flame_param['lit'].flatten()))
flm_params[i, :] = flame_param.astype('float32')
if i == num_smpl_to_eval_on - 1:
break
batch_size = 32
num_sigmas = 1
corruption_sigma = np.linspace(0, 1.5, num_sigmas)
jaw_rot_range = 0, np.pi / 8
jaw_rot_sigmas = np.linspace(0, (jaw_rot_range[1] - jaw_rot_range[0]) / 6,
num_sigmas)
pose_range = -np.pi / 3, np.pi / 3
pose_sigmas = np.linspace(0, (pose_range[1] - pose_range[0]) / 6, num_sigmas)
config_obj = util.dict2obj(cnst.flame_config)
flame_decoder = FLAME.FLAME(config_obj).cuda().eval()
for run_idx in run_ids_1:
generator_1 = torch.nn.DataParallel(StyledGenerator(
embedding_vocab_size=69158, rendered_flame_ascondition=
settings_for_runs[run_idx]['rendered_flame_as_condition'],
normal_maps_as_cond=settings_for_runs[run_idx][
'normal_maps_as_cond'], apply_sqrt2_fac_in_eq_lin=settings_for_runs
[run_idx]['apply_sqrt2_fac_in_eq_lin'], core_tensor_res=
core_tensor_res, w_truncation_factor=1.0, n_mlp=8)).cuda()
model_idx = settings_for_runs[run_idx]['model_idx']
ckpt1 = torch.load(
f'{cnst.output_root}checkpoint/{run_idx}/{model_idx}.model')
generator_1.load_state_dict(ckpt1['generator_running'])
generator_1 = generator_1.eval()
params_to_save = {'cam': [], 'shape': [], 'exp': [], 'pose': [],
'light_code': [], 'texture_code': [], 'identity_indices': []}
for i, sigma in enumerate(corruption_sigma):
images = np.zeros((num_smpl_to_eval_on, 3, resolution, resolution)
).astype('float32')
flame_mesh_imgs = np.zeros((num_smpl_to_eval_on, 3, resolution,
resolution)).astype('float32')
pbar = tqdm.tqdm(range(0, num_smpl_to_eval_on, batch_size))
pbar.set_description('Generating_images')
for batch_idx in pbar:
flm_batch = flm_params[batch_idx:batch_idx + batch_size, :]
flm_batch = torch.from_numpy(flm_batch).cuda()
flm_batch = position_to_given_location(flame_decoder, flm_batch)
if settings_for_runs[run_idx]['normal_maps_as_cond'
] or settings_for_runs[run_idx]['rendered_flame_as_condition']:
batch_size_true = flm_batch.shape[0]
cam = flm_batch[:, constants.DECA_IDX['cam'][0]:constants.
DECA_IDX['cam'][1]]
shape = flm_batch[:, constants.INDICES['SHAPE'][0]:
constants.INDICES['SHAPE'][1]]
exp = flm_batch[:, constants.INDICES['EXP'][0]:constants.
INDICES['EXP'][1]]
pose = flm_batch[:, constants.INDICES['POSE'][0]:constants.
INDICES['POSE'][1]]
light_code = flm_batch[:, constants.DECA_IDX['lit'][0]:
constants.DECA_IDX['lit'][1]].view((batch_size_true, 9, 3))
texture_code = flm_batch[:, constants.DECA_IDX['tex'][0]:
constants.DECA_IDX['tex'][1]]
params_to_save['cam'].append(cam.cpu().detach().numpy())
params_to_save['shape'].append(shape.cpu().detach().numpy())
params_to_save['shape'].append(shape.cpu().detach().numpy())
params_to_save['exp'].append(exp.cpu().detach().numpy())
params_to_save['pose'].append(pose.cpu().detach().numpy())
params_to_save['light_code'].append(light_code.cpu().detach
().numpy())
params_to_save['texture_code'].append(texture_code.cpu().
detach().numpy())
norma_map_img, _, _, _, rend_flm = (overlay_visualizer.
get_rendered_mesh(flame_params=(shape, exp, pose,
light_code, texture_code), camera_params=cam))
rend_flm = torch.clamp(rend_flm, 0, 1) * 2 - 1
norma_map_img = torch.clamp(norma_map_img, 0, 1) * 2 - 1
rend_flm = fast_image_reshape(rend_flm, height_out=256,
width_out=256, mode='bilinear')
norma_map_img = fast_image_reshape(norma_map_img,
height_out=256, width_out=256, mode='bilinear')
norma_map_img_to_save, _, _, _, rend_flm_to_save = (
overlay_visualizer.get_rendered_mesh(flame_params=(
shape, exp, pose, light_code, texture_code),
camera_params=cam, cull_backfaces=False,
constant_albedo=0.6))
rend_flm_to_save = torch.clamp(rend_flm_to_save, 0, 1) * 2 - 1
rend_flm_to_save = fast_image_reshape(rend_flm_to_save,
height_out=256, width_out=256, mode='bilinear')
else:
rend_flm = None
norma_map_img = None
gen_1_in = ge_gen_in(flm_batch, rend_flm, norma_map_img,
settings_for_runs[run_idx]['normal_maps_as_cond'],
settings_for_runs[run_idx]['rendered_flame_as_condition'])
identity_embeddings = torch.randint(low=0, high=69158, size=(
gen_1_in.shape[0],), dtype=torch.long, device='cuda')
mdl_1_gen_images = generic_utils.get_images_from_flame_params(
flame_params=gen_1_in.cpu().numpy(), pose=None, model=
generator_1, step=step_max, alpha=alpha, input_indices=
identity_embeddings.cpu().numpy())
params_to_save['identity_indices'].append(identity_embeddings.
cpu().detach().numpy())
images[batch_idx:batch_idx + batch_size_true] = torch.clamp(
mdl_1_gen_images, -1, 1).cpu().numpy()
flame_mesh_imgs[batch_idx:batch_idx + batch_size_true
] = torch.clamp(rend_flm_to_save, -1, 1).cpu().numpy()
if save_images:
mdl_name = settings_for_runs[run_idx]['name']
for key in params_to_save.keys():
params_to_save[key] = np.concatenate(params_to_save[key],
axis=0)
save_dir = os.path.join(cnst.output_root, 'sample', str(run_idx
), f'random_samples_q_eval_{mdl_name}')
os.makedirs(save_dir, exist_ok=True)
np.save(os.path.join(save_dir, 'params.npy'), params_to_save)
save_path_current_id = os.path.join(save_dir, 'images')
save_set_of_images(path=save_path_current_id, prefix='', images
=(images + 1) / 2, show_prog_bar=True)
save_path_current_id_flm_rndr = os.path.join(save_dir, 'conditions'
)
save_set_of_images(path=save_path_current_id_flm_rndr, prefix=
'mesh', images=(flame_mesh_imgs + 1) / 2, show_prog_bar=True)
<|reserved_special_token_1|>
import sys
sys.path.append('../')
import constants as cnst
import os
os.environ['PYTHONHASHSEED'] = '2'
import tqdm
from model.stg2_generator import StyledGenerator
import numpy as np
from my_utils.visualize_flame_overlay import OverLayViz
from my_utils.flm_dynamic_fit_overlay import camera_ringnetpp
from my_utils.generic_utils import save_set_of_images
import constants
from dataset_loaders import fast_image_reshape
import torch
from my_utils import generic_utils
from my_utils.eye_centering import position_to_given_location
from copy import deepcopy
from my_utils.photometric_optimization.models import FLAME
from my_utils.photometric_optimization import util
def ge_gen_in(flm_params, textured_rndr, norm_map, normal_map_cond,
texture_cond):
if normal_map_cond and texture_cond:
return torch.cat((textured_rndr, norm_map), dim=1)
elif normal_map_cond:
return norm_map
elif texture_cond:
return textured_rndr
else:
return flm_params
def corrupt_flame_given_sigma(flm_params, corruption_type, sigma, jaw_sigma,
pose_sigma):
corrupted_flame = deepcopy(flm_params)
if corruption_type == 'shape' or corruption_type == 'all':
corrupted_flame[:, :10] = flm_params[:, :10] + np.clip(np.random.
normal(0, sigma, flm_params[:, :10].shape), -3 * sigma, 3 * sigma
).astype('float32')
if corruption_type == 'exp_jaw' or corruption_type == 'all':
corrupted_flame[:, 100:110] = flm_params[:, 100:110] + np.clip(np.
random.normal(0, sigma, flm_params[:, 100:110].shape), -3 *
sigma, 3 * sigma).astype('float32')
corrupted_flame[:, 153] = flm_params[:, 153] + np.random.normal(0,
jaw_sigma, corrupted_flame.shape[0])
if corruption_type == 'pose' or corruption_type == 'all':
pose_perturbation = np.random.normal(0, pose_sigma, (
corrupted_flame.shape[0],))
corrupted_flame[:, 151] = flm_params[:, 151] + np.clip(
pose_perturbation, -3 * pose_sigma, 3 * pose_sigma)
return corrupted_flame
save_images = True
code_size = 236
use_inst_norm = True
core_tensor_res = 4
resolution = 256
alpha = 1
step_max = int(np.log2(resolution) - 2)
num_smpl_to_eval_on = 128
use_styled_conv_stylegan2 = True
flength = 5000
cam_t = np.array([0.0, 0.0, 0])
camera_params = camera_ringnetpp((512, 512), trans=cam_t, focal=flength)
run_ids_1 = [29]
settings_for_runs = {(24): {'name': 'vector_cond', 'model_idx': '216000_1',
'normal_maps_as_cond': False, 'rendered_flame_as_condition': False,
'apply_sqrt2_fac_in_eq_lin': False}, (29): {'name': 'full_model',
'model_idx': '294000_1', 'normal_maps_as_cond': True,
'rendered_flame_as_condition': True, 'apply_sqrt2_fac_in_eq_lin': True},
(7): {'name': 'flm_rndr_tex_interp', 'model_idx': '051000_1',
'normal_maps_as_cond': False, 'rendered_flame_as_condition': True,
'apply_sqrt2_fac_in_eq_lin': False}, (3): {'name': 'norm_mp_tex_interp',
'model_idx': '203000_1', 'normal_maps_as_cond': True,
'rendered_flame_as_condition': False, 'apply_sqrt2_fac_in_eq_lin':
False}, (8): {'name': 'norm_map_rend_flm_no_tex_interp', 'model_idx':
'009000_1', 'normal_maps_as_cond': True, 'rendered_flame_as_condition':
True, 'apply_sqrt2_fac_in_eq_lin': False}}
overlay_visualizer = OverLayViz()
flm_params = np.zeros((num_smpl_to_eval_on, code_size)).astype('float32')
fl_param_dict = np.load(cnst.all_flame_params_file, allow_pickle=True).item()
np.random.seed(2)
for i, key in enumerate(fl_param_dict):
flame_param = fl_param_dict[key]
shape_params = np.concatenate((np.random.normal(0, 1, [3]), np.zeros(97))
).astype('float32')
exp_params = np.concatenate((np.random.normal(0, 1, [3]), np.zeros(47))
).astype('float32')
pose = np.array([0, np.random.uniform(-np.pi / 8, np.pi / 8, 1), 0, np.
random.uniform(0, np.pi / 12, 1), 0, 0]).astype('float32')
texture = np.random.normal(0, 1, [50]).astype('float32')
flame_param = np.hstack((shape_params, exp_params, pose, flame_param[
'cam'], texture, flame_param['lit'].flatten()))
flm_params[i, :] = flame_param.astype('float32')
if i == num_smpl_to_eval_on - 1:
break
batch_size = 32
num_sigmas = 1
corruption_sigma = np.linspace(0, 1.5, num_sigmas)
jaw_rot_range = 0, np.pi / 8
jaw_rot_sigmas = np.linspace(0, (jaw_rot_range[1] - jaw_rot_range[0]) / 6,
num_sigmas)
pose_range = -np.pi / 3, np.pi / 3
pose_sigmas = np.linspace(0, (pose_range[1] - pose_range[0]) / 6, num_sigmas)
config_obj = util.dict2obj(cnst.flame_config)
flame_decoder = FLAME.FLAME(config_obj).cuda().eval()
for run_idx in run_ids_1:
generator_1 = torch.nn.DataParallel(StyledGenerator(
embedding_vocab_size=69158, rendered_flame_ascondition=
settings_for_runs[run_idx]['rendered_flame_as_condition'],
normal_maps_as_cond=settings_for_runs[run_idx][
'normal_maps_as_cond'], apply_sqrt2_fac_in_eq_lin=settings_for_runs
[run_idx]['apply_sqrt2_fac_in_eq_lin'], core_tensor_res=
core_tensor_res, w_truncation_factor=1.0, n_mlp=8)).cuda()
model_idx = settings_for_runs[run_idx]['model_idx']
ckpt1 = torch.load(
f'{cnst.output_root}checkpoint/{run_idx}/{model_idx}.model')
generator_1.load_state_dict(ckpt1['generator_running'])
generator_1 = generator_1.eval()
params_to_save = {'cam': [], 'shape': [], 'exp': [], 'pose': [],
'light_code': [], 'texture_code': [], 'identity_indices': []}
for i, sigma in enumerate(corruption_sigma):
images = np.zeros((num_smpl_to_eval_on, 3, resolution, resolution)
).astype('float32')
flame_mesh_imgs = np.zeros((num_smpl_to_eval_on, 3, resolution,
resolution)).astype('float32')
pbar = tqdm.tqdm(range(0, num_smpl_to_eval_on, batch_size))
pbar.set_description('Generating_images')
for batch_idx in pbar:
flm_batch = flm_params[batch_idx:batch_idx + batch_size, :]
flm_batch = torch.from_numpy(flm_batch).cuda()
flm_batch = position_to_given_location(flame_decoder, flm_batch)
if settings_for_runs[run_idx]['normal_maps_as_cond'
] or settings_for_runs[run_idx]['rendered_flame_as_condition']:
batch_size_true = flm_batch.shape[0]
cam = flm_batch[:, constants.DECA_IDX['cam'][0]:constants.
DECA_IDX['cam'][1]]
shape = flm_batch[:, constants.INDICES['SHAPE'][0]:
constants.INDICES['SHAPE'][1]]
exp = flm_batch[:, constants.INDICES['EXP'][0]:constants.
INDICES['EXP'][1]]
pose = flm_batch[:, constants.INDICES['POSE'][0]:constants.
INDICES['POSE'][1]]
light_code = flm_batch[:, constants.DECA_IDX['lit'][0]:
constants.DECA_IDX['lit'][1]].view((batch_size_true, 9, 3))
texture_code = flm_batch[:, constants.DECA_IDX['tex'][0]:
constants.DECA_IDX['tex'][1]]
params_to_save['cam'].append(cam.cpu().detach().numpy())
params_to_save['shape'].append(shape.cpu().detach().numpy())
params_to_save['shape'].append(shape.cpu().detach().numpy())
params_to_save['exp'].append(exp.cpu().detach().numpy())
params_to_save['pose'].append(pose.cpu().detach().numpy())
params_to_save['light_code'].append(light_code.cpu().detach
().numpy())
params_to_save['texture_code'].append(texture_code.cpu().
detach().numpy())
norma_map_img, _, _, _, rend_flm = (overlay_visualizer.
get_rendered_mesh(flame_params=(shape, exp, pose,
light_code, texture_code), camera_params=cam))
rend_flm = torch.clamp(rend_flm, 0, 1) * 2 - 1
norma_map_img = torch.clamp(norma_map_img, 0, 1) * 2 - 1
rend_flm = fast_image_reshape(rend_flm, height_out=256,
width_out=256, mode='bilinear')
norma_map_img = fast_image_reshape(norma_map_img,
height_out=256, width_out=256, mode='bilinear')
norma_map_img_to_save, _, _, _, rend_flm_to_save = (
overlay_visualizer.get_rendered_mesh(flame_params=(
shape, exp, pose, light_code, texture_code),
camera_params=cam, cull_backfaces=False,
constant_albedo=0.6))
rend_flm_to_save = torch.clamp(rend_flm_to_save, 0, 1) * 2 - 1
rend_flm_to_save = fast_image_reshape(rend_flm_to_save,
height_out=256, width_out=256, mode='bilinear')
else:
rend_flm = None
norma_map_img = None
gen_1_in = ge_gen_in(flm_batch, rend_flm, norma_map_img,
settings_for_runs[run_idx]['normal_maps_as_cond'],
settings_for_runs[run_idx]['rendered_flame_as_condition'])
identity_embeddings = torch.randint(low=0, high=69158, size=(
gen_1_in.shape[0],), dtype=torch.long, device='cuda')
mdl_1_gen_images = generic_utils.get_images_from_flame_params(
flame_params=gen_1_in.cpu().numpy(), pose=None, model=
generator_1, step=step_max, alpha=alpha, input_indices=
identity_embeddings.cpu().numpy())
params_to_save['identity_indices'].append(identity_embeddings.
cpu().detach().numpy())
images[batch_idx:batch_idx + batch_size_true] = torch.clamp(
mdl_1_gen_images, -1, 1).cpu().numpy()
flame_mesh_imgs[batch_idx:batch_idx + batch_size_true
] = torch.clamp(rend_flm_to_save, -1, 1).cpu().numpy()
if save_images:
mdl_name = settings_for_runs[run_idx]['name']
for key in params_to_save.keys():
params_to_save[key] = np.concatenate(params_to_save[key],
axis=0)
save_dir = os.path.join(cnst.output_root, 'sample', str(run_idx
), f'random_samples_q_eval_{mdl_name}')
os.makedirs(save_dir, exist_ok=True)
np.save(os.path.join(save_dir, 'params.npy'), params_to_save)
save_path_current_id = os.path.join(save_dir, 'images')
save_set_of_images(path=save_path_current_id, prefix='', images
=(images + 1) / 2, show_prog_bar=True)
save_path_current_id_flm_rndr = os.path.join(save_dir, 'conditions'
)
save_set_of_images(path=save_path_current_id_flm_rndr, prefix=
'mesh', images=(flame_mesh_imgs + 1) / 2, show_prog_bar=True)
<|reserved_special_token_1|>
import sys
sys.path.append('../')
import constants as cnst
import os
os.environ['PYTHONHASHSEED'] = '2'
import tqdm
from model.stg2_generator import StyledGenerator
import numpy as np
from my_utils.visualize_flame_overlay import OverLayViz
from my_utils.flm_dynamic_fit_overlay import camera_ringnetpp
from my_utils.generic_utils import save_set_of_images
import constants
from dataset_loaders import fast_image_reshape
import torch
from my_utils import generic_utils
from my_utils.eye_centering import position_to_given_location
from copy import deepcopy
from my_utils.photometric_optimization.models import FLAME
from my_utils.photometric_optimization import util
def ge_gen_in(flm_params, textured_rndr, norm_map, normal_map_cond, texture_cond):
if normal_map_cond and texture_cond:
return torch.cat((textured_rndr, norm_map), dim=1)
elif normal_map_cond:
return norm_map
elif texture_cond:
return textured_rndr
else:
return flm_params
def corrupt_flame_given_sigma(flm_params, corruption_type, sigma, jaw_sigma, pose_sigma):
# import ipdb; ipdb.set_trace()
# np.random.seed(2)
corrupted_flame = deepcopy(flm_params)
if corruption_type == 'shape' or corruption_type == 'all':
corrupted_flame[:, :10] = flm_params[:, :10] + \
np.clip(np.random.normal(0, sigma, flm_params[:, :10].shape),
-3 * sigma, 3 * sigma).astype('float32')
if corruption_type == 'exp_jaw'or corruption_type == 'all':
# Expression
corrupted_flame[:, 100:110] = flm_params[:, 100:110] + \
np.clip(np.random.normal(0, sigma, flm_params[:, 100:110].shape),
-3 * sigma, 3 * sigma).astype('float32')
# Jaw pose
corrupted_flame[:, 153] = flm_params[:, 153] + \
np.random.normal(0, jaw_sigma, corrupted_flame.shape[0])
if corruption_type == 'pose' or corruption_type == 'all':
# pose_perturbation = np.random.normal(0, pose_sigma[i], (corrupted_flame.shape[0], 3))
# corrupted_flame[:, 150:153] += np.clip(pose_perturbation, -3 * pose_sigma[i], 3 * pose_sigma[i])
pose_perturbation = np.random.normal(0, pose_sigma, (corrupted_flame.shape[0],))
corrupted_flame[:, 151] = flm_params[:, 151] + \
np.clip(pose_perturbation, -3 * pose_sigma, 3 * pose_sigma)
return corrupted_flame
# General settings
save_images = True
code_size = 236
use_inst_norm = True
core_tensor_res = 4
resolution = 256
alpha = 1
step_max = int(np.log2(resolution) - 2)
num_smpl_to_eval_on = 128
use_styled_conv_stylegan2 = True
flength = 5000
cam_t = np.array([0., 0., 0])
camera_params = camera_ringnetpp((512, 512), trans=cam_t, focal=flength)
# Uncomment the appropriate run_id
run_ids_1 = [29, ] # with sqrt(2)
# run_ids_1 = [7, 24, 8, 3]
# run_ids_1 = [7, 8, 3]
# run_ids_1 = [7]
settings_for_runs = \
{24: {'name': 'vector_cond', 'model_idx': '216000_1', 'normal_maps_as_cond': False,
'rendered_flame_as_condition': False, 'apply_sqrt2_fac_in_eq_lin': False},
29: {'name': 'full_model', 'model_idx': '294000_1', 'normal_maps_as_cond': True,
'rendered_flame_as_condition': True, 'apply_sqrt2_fac_in_eq_lin': True},
7: {'name': 'flm_rndr_tex_interp', 'model_idx': '051000_1', 'normal_maps_as_cond': False,
'rendered_flame_as_condition': True, 'apply_sqrt2_fac_in_eq_lin': False},
3: {'name': 'norm_mp_tex_interp', 'model_idx': '203000_1', 'normal_maps_as_cond': True,
'rendered_flame_as_condition': False, 'apply_sqrt2_fac_in_eq_lin': False},
8: {'name': 'norm_map_rend_flm_no_tex_interp', 'model_idx': '009000_1', 'normal_maps_as_cond': True,
'rendered_flame_as_condition': True, 'apply_sqrt2_fac_in_eq_lin': False},}
overlay_visualizer = OverLayViz()
# overlay_visualizer.setup_renderer(mesh_file=None)
flm_params = np.zeros((num_smpl_to_eval_on, code_size)).astype('float32')
fl_param_dict = np.load(cnst.all_flame_params_file, allow_pickle=True).item()
np.random.seed(2)
for i, key in enumerate(fl_param_dict):
flame_param = fl_param_dict[key]
shape_params = np.concatenate((np.random.normal(0, 1, [3,]), np.zeros(97))).astype('float32')
exp_params = np.concatenate((np.random.normal(0, 1, [3,]), np.zeros(47))).astype('float32')
# +- pi/4 for bad samples +- pi/8 for good samples
# pose = np.array([0, np.random.uniform(-np.pi/4, np.pi/4, 1), 0,
# np.random.uniform(0, np.pi/12, 1), 0, 0]).astype('float32')
pose = np.array([0, np.random.uniform(-np.pi / 8, np.pi / 8, 1), 0,
np.random.uniform(0, np.pi / 12, 1), 0, 0]).astype('float32')
texture = np.random.normal(0, 1, [50]).astype('float32')
# texture = flame_param['tex']
flame_param = np.hstack((shape_params, exp_params, pose, flame_param['cam'],
texture, flame_param['lit'].flatten()))
# tz = camera_params['f'][0] / (camera_params['c'][0] * flame_param[:, 156:157])
# flame_param[:, 156:159] = np.concatenate((flame_param[:, 157:], tz), axis=1)
# import ipdb; ipdb.set_trace()
flm_params[i, :] = flame_param.astype('float32')
if i == num_smpl_to_eval_on - 1:
break
batch_size = 32
num_sigmas = 1
corruption_sigma = np.linspace(0, 1.5, num_sigmas)
jaw_rot_range = (0, np.pi/8)
jaw_rot_sigmas = np.linspace(0, (jaw_rot_range[1] - jaw_rot_range[0])/6, num_sigmas)
pose_range = (-np.pi/3, np.pi/3)
pose_sigmas = np.linspace(0, (pose_range[1] - pose_range[0])/6, num_sigmas)
config_obj = util.dict2obj(cnst.flame_config)
flame_decoder = FLAME.FLAME(config_obj).cuda().eval()
for run_idx in run_ids_1:
# import ipdb; ipdb.set_trace()
generator_1 = torch.nn.DataParallel(
StyledGenerator(embedding_vocab_size=69158,
rendered_flame_ascondition=settings_for_runs[run_idx]['rendered_flame_as_condition'],
normal_maps_as_cond=settings_for_runs[run_idx]['normal_maps_as_cond'],
apply_sqrt2_fac_in_eq_lin=settings_for_runs[run_idx]['apply_sqrt2_fac_in_eq_lin'],
core_tensor_res=core_tensor_res,
w_truncation_factor=1.0,
n_mlp=8)).cuda()
model_idx = settings_for_runs[run_idx]['model_idx']
ckpt1 = torch.load(f'{cnst.output_root}checkpoint/{run_idx}/{model_idx}.model')
generator_1.load_state_dict(ckpt1['generator_running'])
generator_1 = generator_1.eval()
params_to_save = {'cam': [], 'shape': [], 'exp': [], 'pose': [], 'light_code': [], 'texture_code': [],
'identity_indices': []}
for i, sigma in enumerate(corruption_sigma):
images = np.zeros((num_smpl_to_eval_on, 3, resolution, resolution)).astype('float32')
flame_mesh_imgs = np.zeros((num_smpl_to_eval_on, 3, resolution, resolution)).astype('float32')
pbar = tqdm.tqdm(range(0, num_smpl_to_eval_on, batch_size))
pbar.set_description('Generating_images')
# print(flm_params[1, :])
for batch_idx in pbar:
flm_batch = flm_params[batch_idx:batch_idx+batch_size, :]
flm_batch = torch.from_numpy(flm_batch).cuda()
# flm_batch = eye_cntr_reg.substitute_flame_batch_with_regressed_camera(flm_batch)
flm_batch = position_to_given_location(flame_decoder, flm_batch)
if settings_for_runs[run_idx]['normal_maps_as_cond'] or \
settings_for_runs[run_idx]['rendered_flame_as_condition']:
batch_size_true = flm_batch.shape[0]
cam = flm_batch[:, constants.DECA_IDX['cam'][0]:constants.DECA_IDX['cam'][1]:]
shape = flm_batch[:, constants.INDICES['SHAPE'][0]:constants.INDICES['SHAPE'][1]]
exp = flm_batch[:, constants.INDICES['EXP'][0]:constants.INDICES['EXP'][1]]
pose = flm_batch[:, constants.INDICES['POSE'][0]:constants.INDICES['POSE'][1]]
# import ipdb; ipdb.set_trace()
light_code = \
flm_batch[:, constants.DECA_IDX['lit'][0]:constants.DECA_IDX['lit'][1]:].view((batch_size_true, 9, 3))
texture_code = flm_batch[:, constants.DECA_IDX['tex'][0]:constants.DECA_IDX['tex'][1]:]
params_to_save['cam'].append(cam.cpu().detach().numpy())
params_to_save['shape'].append(shape.cpu().detach().numpy())
params_to_save['shape'].append(shape.cpu().detach().numpy())
params_to_save['exp'].append(exp.cpu().detach().numpy())
params_to_save['pose'].append(pose.cpu().detach().numpy())
params_to_save['light_code'].append(light_code.cpu().detach().numpy())
params_to_save['texture_code'].append(texture_code.cpu().detach().numpy())
norma_map_img, _, _, _, rend_flm = \
overlay_visualizer.get_rendered_mesh(flame_params=(shape, exp, pose, light_code, texture_code),
camera_params=cam)
# import ipdb; ipdb.set_trace()
rend_flm = torch.clamp(rend_flm, 0, 1) * 2 - 1
norma_map_img = torch.clamp(norma_map_img, 0, 1) * 2 - 1
rend_flm = fast_image_reshape(rend_flm, height_out=256, width_out=256, mode='bilinear')
norma_map_img = fast_image_reshape(norma_map_img, height_out=256, width_out=256, mode='bilinear')
# Render the 2nd time to get backface culling and white texture
# norma_map_img_to_save, _, _, _, rend_flm_to_save = \
# overlay_visualizer.get_rendered_mesh(flame_params=(shape, exp, pose, light_code, texture_code),
# camera_params=cam, cull_backfaces=True, constant_albedo=0.6)
# Back face culling temporarily un-availabe
norma_map_img_to_save, _, _, _, rend_flm_to_save = \
overlay_visualizer.get_rendered_mesh(flame_params=(shape, exp, pose, light_code, texture_code),
camera_params=cam, cull_backfaces=False, constant_albedo=0.6)
rend_flm_to_save = torch.clamp(rend_flm_to_save, 0, 1) * 2 - 1
# rend_flm_to_save = rend_flm
# norma_map_img_to_save = torch.clamp(norma_map_img, 0, 1) * 2 - 1
rend_flm_to_save = fast_image_reshape(rend_flm_to_save, height_out=256, width_out=256, mode='bilinear')
# norma_map_img_to_save = fast_image_reshape(norma_map_img, height_out=256, width_out=256, mode='bilinear')
else:
rend_flm = None
norma_map_img = None
gen_1_in = ge_gen_in(flm_batch, rend_flm, norma_map_img, settings_for_runs[run_idx]['normal_maps_as_cond'],
settings_for_runs[run_idx]['rendered_flame_as_condition'])
# torch.manual_seed(2)
identity_embeddings = torch.randint(low=0, high=69158, size=(gen_1_in.shape[0], ), dtype=torch.long,
device='cuda')
mdl_1_gen_images = generic_utils.get_images_from_flame_params(
flame_params=gen_1_in.cpu().numpy(), pose=None,
model=generator_1,
step=step_max, alpha=alpha,
input_indices=identity_embeddings.cpu().numpy())
params_to_save['identity_indices'].append(identity_embeddings.cpu().detach().numpy())
# import ipdb; ipdb.set_trace()
images[batch_idx:batch_idx+batch_size_true] = torch.clamp(mdl_1_gen_images, -1, 1).cpu().numpy()
# if flame_mesh_imgs is None:
flame_mesh_imgs[batch_idx:batch_idx+batch_size_true] = torch.clamp(rend_flm_to_save, -1, 1).cpu().numpy()
if save_images:
mdl_name = settings_for_runs[run_idx]['name']
for key in params_to_save.keys():
params_to_save[key] = np.concatenate(params_to_save[key], axis=0)
save_dir = os.path.join(cnst.output_root, 'sample', str(run_idx), f'random_samples_q_eval_{mdl_name}')
os.makedirs(save_dir, exist_ok=True)
np.save(os.path.join(save_dir, 'params.npy'), params_to_save)
save_path_current_id = os.path.join(save_dir, 'images')
save_set_of_images(path=save_path_current_id, prefix='', images=(images + 1) / 2, show_prog_bar=True)
#save flam rndr
save_path_current_id_flm_rndr = os.path.join(save_dir, 'conditions')
save_set_of_images(path=save_path_current_id_flm_rndr, prefix='mesh', images=(flame_mesh_imgs + 1) / 2,
show_prog_bar=True)
|
flexible
|
{
"blob_id": "d0991d8ea47379a0c1de836b5d215c99166ad049",
"index": 5936,
"step-1": "<mask token>\n\n\ndef ge_gen_in(flm_params, textured_rndr, norm_map, normal_map_cond,\n texture_cond):\n if normal_map_cond and texture_cond:\n return torch.cat((textured_rndr, norm_map), dim=1)\n elif normal_map_cond:\n return norm_map\n elif texture_cond:\n return textured_rndr\n else:\n return flm_params\n\n\ndef corrupt_flame_given_sigma(flm_params, corruption_type, sigma, jaw_sigma,\n pose_sigma):\n corrupted_flame = deepcopy(flm_params)\n if corruption_type == 'shape' or corruption_type == 'all':\n corrupted_flame[:, :10] = flm_params[:, :10] + np.clip(np.random.\n normal(0, sigma, flm_params[:, :10].shape), -3 * sigma, 3 * sigma\n ).astype('float32')\n if corruption_type == 'exp_jaw' or corruption_type == 'all':\n corrupted_flame[:, 100:110] = flm_params[:, 100:110] + np.clip(np.\n random.normal(0, sigma, flm_params[:, 100:110].shape), -3 *\n sigma, 3 * sigma).astype('float32')\n corrupted_flame[:, 153] = flm_params[:, 153] + np.random.normal(0,\n jaw_sigma, corrupted_flame.shape[0])\n if corruption_type == 'pose' or corruption_type == 'all':\n pose_perturbation = np.random.normal(0, pose_sigma, (\n corrupted_flame.shape[0],))\n corrupted_flame[:, 151] = flm_params[:, 151] + np.clip(\n pose_perturbation, -3 * pose_sigma, 3 * pose_sigma)\n return corrupted_flame\n\n\n<mask token>\n",
"step-2": "<mask token>\nsys.path.append('../')\n<mask token>\n\n\ndef ge_gen_in(flm_params, textured_rndr, norm_map, normal_map_cond,\n texture_cond):\n if normal_map_cond and texture_cond:\n return torch.cat((textured_rndr, norm_map), dim=1)\n elif normal_map_cond:\n return norm_map\n elif texture_cond:\n return textured_rndr\n else:\n return flm_params\n\n\ndef corrupt_flame_given_sigma(flm_params, corruption_type, sigma, jaw_sigma,\n pose_sigma):\n corrupted_flame = deepcopy(flm_params)\n if corruption_type == 'shape' or corruption_type == 'all':\n corrupted_flame[:, :10] = flm_params[:, :10] + np.clip(np.random.\n normal(0, sigma, flm_params[:, :10].shape), -3 * sigma, 3 * sigma\n ).astype('float32')\n if corruption_type == 'exp_jaw' or corruption_type == 'all':\n corrupted_flame[:, 100:110] = flm_params[:, 100:110] + np.clip(np.\n random.normal(0, sigma, flm_params[:, 100:110].shape), -3 *\n sigma, 3 * sigma).astype('float32')\n corrupted_flame[:, 153] = flm_params[:, 153] + np.random.normal(0,\n jaw_sigma, corrupted_flame.shape[0])\n if corruption_type == 'pose' or corruption_type == 'all':\n pose_perturbation = np.random.normal(0, pose_sigma, (\n corrupted_flame.shape[0],))\n corrupted_flame[:, 151] = flm_params[:, 151] + np.clip(\n pose_perturbation, -3 * pose_sigma, 3 * pose_sigma)\n return corrupted_flame\n\n\n<mask token>\nnp.random.seed(2)\nfor i, key in enumerate(fl_param_dict):\n flame_param = fl_param_dict[key]\n shape_params = np.concatenate((np.random.normal(0, 1, [3]), np.zeros(97))\n ).astype('float32')\n exp_params = np.concatenate((np.random.normal(0, 1, [3]), np.zeros(47))\n ).astype('float32')\n pose = np.array([0, np.random.uniform(-np.pi / 8, np.pi / 8, 1), 0, np.\n random.uniform(0, np.pi / 12, 1), 0, 0]).astype('float32')\n texture = np.random.normal(0, 1, [50]).astype('float32')\n flame_param = np.hstack((shape_params, exp_params, pose, flame_param[\n 'cam'], texture, flame_param['lit'].flatten()))\n flm_params[i, :] = flame_param.astype('float32')\n if i == num_smpl_to_eval_on - 1:\n break\n<mask token>\nfor run_idx in run_ids_1:\n generator_1 = torch.nn.DataParallel(StyledGenerator(\n embedding_vocab_size=69158, rendered_flame_ascondition=\n settings_for_runs[run_idx]['rendered_flame_as_condition'],\n normal_maps_as_cond=settings_for_runs[run_idx][\n 'normal_maps_as_cond'], apply_sqrt2_fac_in_eq_lin=settings_for_runs\n [run_idx]['apply_sqrt2_fac_in_eq_lin'], core_tensor_res=\n core_tensor_res, w_truncation_factor=1.0, n_mlp=8)).cuda()\n model_idx = settings_for_runs[run_idx]['model_idx']\n ckpt1 = torch.load(\n f'{cnst.output_root}checkpoint/{run_idx}/{model_idx}.model')\n generator_1.load_state_dict(ckpt1['generator_running'])\n generator_1 = generator_1.eval()\n params_to_save = {'cam': [], 'shape': [], 'exp': [], 'pose': [],\n 'light_code': [], 'texture_code': [], 'identity_indices': []}\n for i, sigma in enumerate(corruption_sigma):\n images = np.zeros((num_smpl_to_eval_on, 3, resolution, resolution)\n ).astype('float32')\n flame_mesh_imgs = np.zeros((num_smpl_to_eval_on, 3, resolution,\n resolution)).astype('float32')\n pbar = tqdm.tqdm(range(0, num_smpl_to_eval_on, batch_size))\n pbar.set_description('Generating_images')\n for batch_idx in pbar:\n flm_batch = flm_params[batch_idx:batch_idx + batch_size, :]\n flm_batch = torch.from_numpy(flm_batch).cuda()\n flm_batch = position_to_given_location(flame_decoder, flm_batch)\n if settings_for_runs[run_idx]['normal_maps_as_cond'\n ] or settings_for_runs[run_idx]['rendered_flame_as_condition']:\n batch_size_true = flm_batch.shape[0]\n cam = flm_batch[:, constants.DECA_IDX['cam'][0]:constants.\n DECA_IDX['cam'][1]]\n shape = flm_batch[:, constants.INDICES['SHAPE'][0]:\n constants.INDICES['SHAPE'][1]]\n exp = flm_batch[:, constants.INDICES['EXP'][0]:constants.\n INDICES['EXP'][1]]\n pose = flm_batch[:, constants.INDICES['POSE'][0]:constants.\n INDICES['POSE'][1]]\n light_code = flm_batch[:, constants.DECA_IDX['lit'][0]:\n constants.DECA_IDX['lit'][1]].view((batch_size_true, 9, 3))\n texture_code = flm_batch[:, constants.DECA_IDX['tex'][0]:\n constants.DECA_IDX['tex'][1]]\n params_to_save['cam'].append(cam.cpu().detach().numpy())\n params_to_save['shape'].append(shape.cpu().detach().numpy())\n params_to_save['shape'].append(shape.cpu().detach().numpy())\n params_to_save['exp'].append(exp.cpu().detach().numpy())\n params_to_save['pose'].append(pose.cpu().detach().numpy())\n params_to_save['light_code'].append(light_code.cpu().detach\n ().numpy())\n params_to_save['texture_code'].append(texture_code.cpu().\n detach().numpy())\n norma_map_img, _, _, _, rend_flm = (overlay_visualizer.\n get_rendered_mesh(flame_params=(shape, exp, pose,\n light_code, texture_code), camera_params=cam))\n rend_flm = torch.clamp(rend_flm, 0, 1) * 2 - 1\n norma_map_img = torch.clamp(norma_map_img, 0, 1) * 2 - 1\n rend_flm = fast_image_reshape(rend_flm, height_out=256,\n width_out=256, mode='bilinear')\n norma_map_img = fast_image_reshape(norma_map_img,\n height_out=256, width_out=256, mode='bilinear')\n norma_map_img_to_save, _, _, _, rend_flm_to_save = (\n overlay_visualizer.get_rendered_mesh(flame_params=(\n shape, exp, pose, light_code, texture_code),\n camera_params=cam, cull_backfaces=False,\n constant_albedo=0.6))\n rend_flm_to_save = torch.clamp(rend_flm_to_save, 0, 1) * 2 - 1\n rend_flm_to_save = fast_image_reshape(rend_flm_to_save,\n height_out=256, width_out=256, mode='bilinear')\n else:\n rend_flm = None\n norma_map_img = None\n gen_1_in = ge_gen_in(flm_batch, rend_flm, norma_map_img,\n settings_for_runs[run_idx]['normal_maps_as_cond'],\n settings_for_runs[run_idx]['rendered_flame_as_condition'])\n identity_embeddings = torch.randint(low=0, high=69158, size=(\n gen_1_in.shape[0],), dtype=torch.long, device='cuda')\n mdl_1_gen_images = generic_utils.get_images_from_flame_params(\n flame_params=gen_1_in.cpu().numpy(), pose=None, model=\n generator_1, step=step_max, alpha=alpha, input_indices=\n identity_embeddings.cpu().numpy())\n params_to_save['identity_indices'].append(identity_embeddings.\n cpu().detach().numpy())\n images[batch_idx:batch_idx + batch_size_true] = torch.clamp(\n mdl_1_gen_images, -1, 1).cpu().numpy()\n flame_mesh_imgs[batch_idx:batch_idx + batch_size_true\n ] = torch.clamp(rend_flm_to_save, -1, 1).cpu().numpy()\n if save_images:\n mdl_name = settings_for_runs[run_idx]['name']\n for key in params_to_save.keys():\n params_to_save[key] = np.concatenate(params_to_save[key],\n axis=0)\n save_dir = os.path.join(cnst.output_root, 'sample', str(run_idx\n ), f'random_samples_q_eval_{mdl_name}')\n os.makedirs(save_dir, exist_ok=True)\n np.save(os.path.join(save_dir, 'params.npy'), params_to_save)\n save_path_current_id = os.path.join(save_dir, 'images')\n save_set_of_images(path=save_path_current_id, prefix='', images\n =(images + 1) / 2, show_prog_bar=True)\n save_path_current_id_flm_rndr = os.path.join(save_dir, 'conditions'\n )\n save_set_of_images(path=save_path_current_id_flm_rndr, prefix=\n 'mesh', images=(flame_mesh_imgs + 1) / 2, show_prog_bar=True)\n",
"step-3": "<mask token>\nsys.path.append('../')\n<mask token>\nos.environ['PYTHONHASHSEED'] = '2'\n<mask token>\n\n\ndef ge_gen_in(flm_params, textured_rndr, norm_map, normal_map_cond,\n texture_cond):\n if normal_map_cond and texture_cond:\n return torch.cat((textured_rndr, norm_map), dim=1)\n elif normal_map_cond:\n return norm_map\n elif texture_cond:\n return textured_rndr\n else:\n return flm_params\n\n\ndef corrupt_flame_given_sigma(flm_params, corruption_type, sigma, jaw_sigma,\n pose_sigma):\n corrupted_flame = deepcopy(flm_params)\n if corruption_type == 'shape' or corruption_type == 'all':\n corrupted_flame[:, :10] = flm_params[:, :10] + np.clip(np.random.\n normal(0, sigma, flm_params[:, :10].shape), -3 * sigma, 3 * sigma\n ).astype('float32')\n if corruption_type == 'exp_jaw' or corruption_type == 'all':\n corrupted_flame[:, 100:110] = flm_params[:, 100:110] + np.clip(np.\n random.normal(0, sigma, flm_params[:, 100:110].shape), -3 *\n sigma, 3 * sigma).astype('float32')\n corrupted_flame[:, 153] = flm_params[:, 153] + np.random.normal(0,\n jaw_sigma, corrupted_flame.shape[0])\n if corruption_type == 'pose' or corruption_type == 'all':\n pose_perturbation = np.random.normal(0, pose_sigma, (\n corrupted_flame.shape[0],))\n corrupted_flame[:, 151] = flm_params[:, 151] + np.clip(\n pose_perturbation, -3 * pose_sigma, 3 * pose_sigma)\n return corrupted_flame\n\n\nsave_images = True\ncode_size = 236\nuse_inst_norm = True\ncore_tensor_res = 4\nresolution = 256\nalpha = 1\nstep_max = int(np.log2(resolution) - 2)\nnum_smpl_to_eval_on = 128\nuse_styled_conv_stylegan2 = True\nflength = 5000\ncam_t = np.array([0.0, 0.0, 0])\ncamera_params = camera_ringnetpp((512, 512), trans=cam_t, focal=flength)\nrun_ids_1 = [29]\nsettings_for_runs = {(24): {'name': 'vector_cond', 'model_idx': '216000_1',\n 'normal_maps_as_cond': False, 'rendered_flame_as_condition': False,\n 'apply_sqrt2_fac_in_eq_lin': False}, (29): {'name': 'full_model',\n 'model_idx': '294000_1', 'normal_maps_as_cond': True,\n 'rendered_flame_as_condition': True, 'apply_sqrt2_fac_in_eq_lin': True},\n (7): {'name': 'flm_rndr_tex_interp', 'model_idx': '051000_1',\n 'normal_maps_as_cond': False, 'rendered_flame_as_condition': True,\n 'apply_sqrt2_fac_in_eq_lin': False}, (3): {'name': 'norm_mp_tex_interp',\n 'model_idx': '203000_1', 'normal_maps_as_cond': True,\n 'rendered_flame_as_condition': False, 'apply_sqrt2_fac_in_eq_lin': \n False}, (8): {'name': 'norm_map_rend_flm_no_tex_interp', 'model_idx':\n '009000_1', 'normal_maps_as_cond': True, 'rendered_flame_as_condition':\n True, 'apply_sqrt2_fac_in_eq_lin': False}}\noverlay_visualizer = OverLayViz()\nflm_params = np.zeros((num_smpl_to_eval_on, code_size)).astype('float32')\nfl_param_dict = np.load(cnst.all_flame_params_file, allow_pickle=True).item()\nnp.random.seed(2)\nfor i, key in enumerate(fl_param_dict):\n flame_param = fl_param_dict[key]\n shape_params = np.concatenate((np.random.normal(0, 1, [3]), np.zeros(97))\n ).astype('float32')\n exp_params = np.concatenate((np.random.normal(0, 1, [3]), np.zeros(47))\n ).astype('float32')\n pose = np.array([0, np.random.uniform(-np.pi / 8, np.pi / 8, 1), 0, np.\n random.uniform(0, np.pi / 12, 1), 0, 0]).astype('float32')\n texture = np.random.normal(0, 1, [50]).astype('float32')\n flame_param = np.hstack((shape_params, exp_params, pose, flame_param[\n 'cam'], texture, flame_param['lit'].flatten()))\n flm_params[i, :] = flame_param.astype('float32')\n if i == num_smpl_to_eval_on - 1:\n break\nbatch_size = 32\nnum_sigmas = 1\ncorruption_sigma = np.linspace(0, 1.5, num_sigmas)\njaw_rot_range = 0, np.pi / 8\njaw_rot_sigmas = np.linspace(0, (jaw_rot_range[1] - jaw_rot_range[0]) / 6,\n num_sigmas)\npose_range = -np.pi / 3, np.pi / 3\npose_sigmas = np.linspace(0, (pose_range[1] - pose_range[0]) / 6, num_sigmas)\nconfig_obj = util.dict2obj(cnst.flame_config)\nflame_decoder = FLAME.FLAME(config_obj).cuda().eval()\nfor run_idx in run_ids_1:\n generator_1 = torch.nn.DataParallel(StyledGenerator(\n embedding_vocab_size=69158, rendered_flame_ascondition=\n settings_for_runs[run_idx]['rendered_flame_as_condition'],\n normal_maps_as_cond=settings_for_runs[run_idx][\n 'normal_maps_as_cond'], apply_sqrt2_fac_in_eq_lin=settings_for_runs\n [run_idx]['apply_sqrt2_fac_in_eq_lin'], core_tensor_res=\n core_tensor_res, w_truncation_factor=1.0, n_mlp=8)).cuda()\n model_idx = settings_for_runs[run_idx]['model_idx']\n ckpt1 = torch.load(\n f'{cnst.output_root}checkpoint/{run_idx}/{model_idx}.model')\n generator_1.load_state_dict(ckpt1['generator_running'])\n generator_1 = generator_1.eval()\n params_to_save = {'cam': [], 'shape': [], 'exp': [], 'pose': [],\n 'light_code': [], 'texture_code': [], 'identity_indices': []}\n for i, sigma in enumerate(corruption_sigma):\n images = np.zeros((num_smpl_to_eval_on, 3, resolution, resolution)\n ).astype('float32')\n flame_mesh_imgs = np.zeros((num_smpl_to_eval_on, 3, resolution,\n resolution)).astype('float32')\n pbar = tqdm.tqdm(range(0, num_smpl_to_eval_on, batch_size))\n pbar.set_description('Generating_images')\n for batch_idx in pbar:\n flm_batch = flm_params[batch_idx:batch_idx + batch_size, :]\n flm_batch = torch.from_numpy(flm_batch).cuda()\n flm_batch = position_to_given_location(flame_decoder, flm_batch)\n if settings_for_runs[run_idx]['normal_maps_as_cond'\n ] or settings_for_runs[run_idx]['rendered_flame_as_condition']:\n batch_size_true = flm_batch.shape[0]\n cam = flm_batch[:, constants.DECA_IDX['cam'][0]:constants.\n DECA_IDX['cam'][1]]\n shape = flm_batch[:, constants.INDICES['SHAPE'][0]:\n constants.INDICES['SHAPE'][1]]\n exp = flm_batch[:, constants.INDICES['EXP'][0]:constants.\n INDICES['EXP'][1]]\n pose = flm_batch[:, constants.INDICES['POSE'][0]:constants.\n INDICES['POSE'][1]]\n light_code = flm_batch[:, constants.DECA_IDX['lit'][0]:\n constants.DECA_IDX['lit'][1]].view((batch_size_true, 9, 3))\n texture_code = flm_batch[:, constants.DECA_IDX['tex'][0]:\n constants.DECA_IDX['tex'][1]]\n params_to_save['cam'].append(cam.cpu().detach().numpy())\n params_to_save['shape'].append(shape.cpu().detach().numpy())\n params_to_save['shape'].append(shape.cpu().detach().numpy())\n params_to_save['exp'].append(exp.cpu().detach().numpy())\n params_to_save['pose'].append(pose.cpu().detach().numpy())\n params_to_save['light_code'].append(light_code.cpu().detach\n ().numpy())\n params_to_save['texture_code'].append(texture_code.cpu().\n detach().numpy())\n norma_map_img, _, _, _, rend_flm = (overlay_visualizer.\n get_rendered_mesh(flame_params=(shape, exp, pose,\n light_code, texture_code), camera_params=cam))\n rend_flm = torch.clamp(rend_flm, 0, 1) * 2 - 1\n norma_map_img = torch.clamp(norma_map_img, 0, 1) * 2 - 1\n rend_flm = fast_image_reshape(rend_flm, height_out=256,\n width_out=256, mode='bilinear')\n norma_map_img = fast_image_reshape(norma_map_img,\n height_out=256, width_out=256, mode='bilinear')\n norma_map_img_to_save, _, _, _, rend_flm_to_save = (\n overlay_visualizer.get_rendered_mesh(flame_params=(\n shape, exp, pose, light_code, texture_code),\n camera_params=cam, cull_backfaces=False,\n constant_albedo=0.6))\n rend_flm_to_save = torch.clamp(rend_flm_to_save, 0, 1) * 2 - 1\n rend_flm_to_save = fast_image_reshape(rend_flm_to_save,\n height_out=256, width_out=256, mode='bilinear')\n else:\n rend_flm = None\n norma_map_img = None\n gen_1_in = ge_gen_in(flm_batch, rend_flm, norma_map_img,\n settings_for_runs[run_idx]['normal_maps_as_cond'],\n settings_for_runs[run_idx]['rendered_flame_as_condition'])\n identity_embeddings = torch.randint(low=0, high=69158, size=(\n gen_1_in.shape[0],), dtype=torch.long, device='cuda')\n mdl_1_gen_images = generic_utils.get_images_from_flame_params(\n flame_params=gen_1_in.cpu().numpy(), pose=None, model=\n generator_1, step=step_max, alpha=alpha, input_indices=\n identity_embeddings.cpu().numpy())\n params_to_save['identity_indices'].append(identity_embeddings.\n cpu().detach().numpy())\n images[batch_idx:batch_idx + batch_size_true] = torch.clamp(\n mdl_1_gen_images, -1, 1).cpu().numpy()\n flame_mesh_imgs[batch_idx:batch_idx + batch_size_true\n ] = torch.clamp(rend_flm_to_save, -1, 1).cpu().numpy()\n if save_images:\n mdl_name = settings_for_runs[run_idx]['name']\n for key in params_to_save.keys():\n params_to_save[key] = np.concatenate(params_to_save[key],\n axis=0)\n save_dir = os.path.join(cnst.output_root, 'sample', str(run_idx\n ), f'random_samples_q_eval_{mdl_name}')\n os.makedirs(save_dir, exist_ok=True)\n np.save(os.path.join(save_dir, 'params.npy'), params_to_save)\n save_path_current_id = os.path.join(save_dir, 'images')\n save_set_of_images(path=save_path_current_id, prefix='', images\n =(images + 1) / 2, show_prog_bar=True)\n save_path_current_id_flm_rndr = os.path.join(save_dir, 'conditions'\n )\n save_set_of_images(path=save_path_current_id_flm_rndr, prefix=\n 'mesh', images=(flame_mesh_imgs + 1) / 2, show_prog_bar=True)\n",
"step-4": "import sys\nsys.path.append('../')\nimport constants as cnst\nimport os\nos.environ['PYTHONHASHSEED'] = '2'\nimport tqdm\nfrom model.stg2_generator import StyledGenerator\nimport numpy as np\nfrom my_utils.visualize_flame_overlay import OverLayViz\nfrom my_utils.flm_dynamic_fit_overlay import camera_ringnetpp\nfrom my_utils.generic_utils import save_set_of_images\nimport constants\nfrom dataset_loaders import fast_image_reshape\nimport torch\nfrom my_utils import generic_utils\nfrom my_utils.eye_centering import position_to_given_location\nfrom copy import deepcopy\nfrom my_utils.photometric_optimization.models import FLAME\nfrom my_utils.photometric_optimization import util\n\n\ndef ge_gen_in(flm_params, textured_rndr, norm_map, normal_map_cond,\n texture_cond):\n if normal_map_cond and texture_cond:\n return torch.cat((textured_rndr, norm_map), dim=1)\n elif normal_map_cond:\n return norm_map\n elif texture_cond:\n return textured_rndr\n else:\n return flm_params\n\n\ndef corrupt_flame_given_sigma(flm_params, corruption_type, sigma, jaw_sigma,\n pose_sigma):\n corrupted_flame = deepcopy(flm_params)\n if corruption_type == 'shape' or corruption_type == 'all':\n corrupted_flame[:, :10] = flm_params[:, :10] + np.clip(np.random.\n normal(0, sigma, flm_params[:, :10].shape), -3 * sigma, 3 * sigma\n ).astype('float32')\n if corruption_type == 'exp_jaw' or corruption_type == 'all':\n corrupted_flame[:, 100:110] = flm_params[:, 100:110] + np.clip(np.\n random.normal(0, sigma, flm_params[:, 100:110].shape), -3 *\n sigma, 3 * sigma).astype('float32')\n corrupted_flame[:, 153] = flm_params[:, 153] + np.random.normal(0,\n jaw_sigma, corrupted_flame.shape[0])\n if corruption_type == 'pose' or corruption_type == 'all':\n pose_perturbation = np.random.normal(0, pose_sigma, (\n corrupted_flame.shape[0],))\n corrupted_flame[:, 151] = flm_params[:, 151] + np.clip(\n pose_perturbation, -3 * pose_sigma, 3 * pose_sigma)\n return corrupted_flame\n\n\nsave_images = True\ncode_size = 236\nuse_inst_norm = True\ncore_tensor_res = 4\nresolution = 256\nalpha = 1\nstep_max = int(np.log2(resolution) - 2)\nnum_smpl_to_eval_on = 128\nuse_styled_conv_stylegan2 = True\nflength = 5000\ncam_t = np.array([0.0, 0.0, 0])\ncamera_params = camera_ringnetpp((512, 512), trans=cam_t, focal=flength)\nrun_ids_1 = [29]\nsettings_for_runs = {(24): {'name': 'vector_cond', 'model_idx': '216000_1',\n 'normal_maps_as_cond': False, 'rendered_flame_as_condition': False,\n 'apply_sqrt2_fac_in_eq_lin': False}, (29): {'name': 'full_model',\n 'model_idx': '294000_1', 'normal_maps_as_cond': True,\n 'rendered_flame_as_condition': True, 'apply_sqrt2_fac_in_eq_lin': True},\n (7): {'name': 'flm_rndr_tex_interp', 'model_idx': '051000_1',\n 'normal_maps_as_cond': False, 'rendered_flame_as_condition': True,\n 'apply_sqrt2_fac_in_eq_lin': False}, (3): {'name': 'norm_mp_tex_interp',\n 'model_idx': '203000_1', 'normal_maps_as_cond': True,\n 'rendered_flame_as_condition': False, 'apply_sqrt2_fac_in_eq_lin': \n False}, (8): {'name': 'norm_map_rend_flm_no_tex_interp', 'model_idx':\n '009000_1', 'normal_maps_as_cond': True, 'rendered_flame_as_condition':\n True, 'apply_sqrt2_fac_in_eq_lin': False}}\noverlay_visualizer = OverLayViz()\nflm_params = np.zeros((num_smpl_to_eval_on, code_size)).astype('float32')\nfl_param_dict = np.load(cnst.all_flame_params_file, allow_pickle=True).item()\nnp.random.seed(2)\nfor i, key in enumerate(fl_param_dict):\n flame_param = fl_param_dict[key]\n shape_params = np.concatenate((np.random.normal(0, 1, [3]), np.zeros(97))\n ).astype('float32')\n exp_params = np.concatenate((np.random.normal(0, 1, [3]), np.zeros(47))\n ).astype('float32')\n pose = np.array([0, np.random.uniform(-np.pi / 8, np.pi / 8, 1), 0, np.\n random.uniform(0, np.pi / 12, 1), 0, 0]).astype('float32')\n texture = np.random.normal(0, 1, [50]).astype('float32')\n flame_param = np.hstack((shape_params, exp_params, pose, flame_param[\n 'cam'], texture, flame_param['lit'].flatten()))\n flm_params[i, :] = flame_param.astype('float32')\n if i == num_smpl_to_eval_on - 1:\n break\nbatch_size = 32\nnum_sigmas = 1\ncorruption_sigma = np.linspace(0, 1.5, num_sigmas)\njaw_rot_range = 0, np.pi / 8\njaw_rot_sigmas = np.linspace(0, (jaw_rot_range[1] - jaw_rot_range[0]) / 6,\n num_sigmas)\npose_range = -np.pi / 3, np.pi / 3\npose_sigmas = np.linspace(0, (pose_range[1] - pose_range[0]) / 6, num_sigmas)\nconfig_obj = util.dict2obj(cnst.flame_config)\nflame_decoder = FLAME.FLAME(config_obj).cuda().eval()\nfor run_idx in run_ids_1:\n generator_1 = torch.nn.DataParallel(StyledGenerator(\n embedding_vocab_size=69158, rendered_flame_ascondition=\n settings_for_runs[run_idx]['rendered_flame_as_condition'],\n normal_maps_as_cond=settings_for_runs[run_idx][\n 'normal_maps_as_cond'], apply_sqrt2_fac_in_eq_lin=settings_for_runs\n [run_idx]['apply_sqrt2_fac_in_eq_lin'], core_tensor_res=\n core_tensor_res, w_truncation_factor=1.0, n_mlp=8)).cuda()\n model_idx = settings_for_runs[run_idx]['model_idx']\n ckpt1 = torch.load(\n f'{cnst.output_root}checkpoint/{run_idx}/{model_idx}.model')\n generator_1.load_state_dict(ckpt1['generator_running'])\n generator_1 = generator_1.eval()\n params_to_save = {'cam': [], 'shape': [], 'exp': [], 'pose': [],\n 'light_code': [], 'texture_code': [], 'identity_indices': []}\n for i, sigma in enumerate(corruption_sigma):\n images = np.zeros((num_smpl_to_eval_on, 3, resolution, resolution)\n ).astype('float32')\n flame_mesh_imgs = np.zeros((num_smpl_to_eval_on, 3, resolution,\n resolution)).astype('float32')\n pbar = tqdm.tqdm(range(0, num_smpl_to_eval_on, batch_size))\n pbar.set_description('Generating_images')\n for batch_idx in pbar:\n flm_batch = flm_params[batch_idx:batch_idx + batch_size, :]\n flm_batch = torch.from_numpy(flm_batch).cuda()\n flm_batch = position_to_given_location(flame_decoder, flm_batch)\n if settings_for_runs[run_idx]['normal_maps_as_cond'\n ] or settings_for_runs[run_idx]['rendered_flame_as_condition']:\n batch_size_true = flm_batch.shape[0]\n cam = flm_batch[:, constants.DECA_IDX['cam'][0]:constants.\n DECA_IDX['cam'][1]]\n shape = flm_batch[:, constants.INDICES['SHAPE'][0]:\n constants.INDICES['SHAPE'][1]]\n exp = flm_batch[:, constants.INDICES['EXP'][0]:constants.\n INDICES['EXP'][1]]\n pose = flm_batch[:, constants.INDICES['POSE'][0]:constants.\n INDICES['POSE'][1]]\n light_code = flm_batch[:, constants.DECA_IDX['lit'][0]:\n constants.DECA_IDX['lit'][1]].view((batch_size_true, 9, 3))\n texture_code = flm_batch[:, constants.DECA_IDX['tex'][0]:\n constants.DECA_IDX['tex'][1]]\n params_to_save['cam'].append(cam.cpu().detach().numpy())\n params_to_save['shape'].append(shape.cpu().detach().numpy())\n params_to_save['shape'].append(shape.cpu().detach().numpy())\n params_to_save['exp'].append(exp.cpu().detach().numpy())\n params_to_save['pose'].append(pose.cpu().detach().numpy())\n params_to_save['light_code'].append(light_code.cpu().detach\n ().numpy())\n params_to_save['texture_code'].append(texture_code.cpu().\n detach().numpy())\n norma_map_img, _, _, _, rend_flm = (overlay_visualizer.\n get_rendered_mesh(flame_params=(shape, exp, pose,\n light_code, texture_code), camera_params=cam))\n rend_flm = torch.clamp(rend_flm, 0, 1) * 2 - 1\n norma_map_img = torch.clamp(norma_map_img, 0, 1) * 2 - 1\n rend_flm = fast_image_reshape(rend_flm, height_out=256,\n width_out=256, mode='bilinear')\n norma_map_img = fast_image_reshape(norma_map_img,\n height_out=256, width_out=256, mode='bilinear')\n norma_map_img_to_save, _, _, _, rend_flm_to_save = (\n overlay_visualizer.get_rendered_mesh(flame_params=(\n shape, exp, pose, light_code, texture_code),\n camera_params=cam, cull_backfaces=False,\n constant_albedo=0.6))\n rend_flm_to_save = torch.clamp(rend_flm_to_save, 0, 1) * 2 - 1\n rend_flm_to_save = fast_image_reshape(rend_flm_to_save,\n height_out=256, width_out=256, mode='bilinear')\n else:\n rend_flm = None\n norma_map_img = None\n gen_1_in = ge_gen_in(flm_batch, rend_flm, norma_map_img,\n settings_for_runs[run_idx]['normal_maps_as_cond'],\n settings_for_runs[run_idx]['rendered_flame_as_condition'])\n identity_embeddings = torch.randint(low=0, high=69158, size=(\n gen_1_in.shape[0],), dtype=torch.long, device='cuda')\n mdl_1_gen_images = generic_utils.get_images_from_flame_params(\n flame_params=gen_1_in.cpu().numpy(), pose=None, model=\n generator_1, step=step_max, alpha=alpha, input_indices=\n identity_embeddings.cpu().numpy())\n params_to_save['identity_indices'].append(identity_embeddings.\n cpu().detach().numpy())\n images[batch_idx:batch_idx + batch_size_true] = torch.clamp(\n mdl_1_gen_images, -1, 1).cpu().numpy()\n flame_mesh_imgs[batch_idx:batch_idx + batch_size_true\n ] = torch.clamp(rend_flm_to_save, -1, 1).cpu().numpy()\n if save_images:\n mdl_name = settings_for_runs[run_idx]['name']\n for key in params_to_save.keys():\n params_to_save[key] = np.concatenate(params_to_save[key],\n axis=0)\n save_dir = os.path.join(cnst.output_root, 'sample', str(run_idx\n ), f'random_samples_q_eval_{mdl_name}')\n os.makedirs(save_dir, exist_ok=True)\n np.save(os.path.join(save_dir, 'params.npy'), params_to_save)\n save_path_current_id = os.path.join(save_dir, 'images')\n save_set_of_images(path=save_path_current_id, prefix='', images\n =(images + 1) / 2, show_prog_bar=True)\n save_path_current_id_flm_rndr = os.path.join(save_dir, 'conditions'\n )\n save_set_of_images(path=save_path_current_id_flm_rndr, prefix=\n 'mesh', images=(flame_mesh_imgs + 1) / 2, show_prog_bar=True)\n",
"step-5": "import sys\nsys.path.append('../')\nimport constants as cnst\nimport os\nos.environ['PYTHONHASHSEED'] = '2'\nimport tqdm\nfrom model.stg2_generator import StyledGenerator\nimport numpy as np\nfrom my_utils.visualize_flame_overlay import OverLayViz\nfrom my_utils.flm_dynamic_fit_overlay import camera_ringnetpp\nfrom my_utils.generic_utils import save_set_of_images\nimport constants\nfrom dataset_loaders import fast_image_reshape\nimport torch\nfrom my_utils import generic_utils\nfrom my_utils.eye_centering import position_to_given_location\nfrom copy import deepcopy\nfrom my_utils.photometric_optimization.models import FLAME\nfrom my_utils.photometric_optimization import util\n\n\ndef ge_gen_in(flm_params, textured_rndr, norm_map, normal_map_cond, texture_cond):\n if normal_map_cond and texture_cond:\n return torch.cat((textured_rndr, norm_map), dim=1)\n elif normal_map_cond:\n return norm_map\n elif texture_cond:\n return textured_rndr\n else:\n return flm_params\n\n\ndef corrupt_flame_given_sigma(flm_params, corruption_type, sigma, jaw_sigma, pose_sigma):\n # import ipdb; ipdb.set_trace()\n # np.random.seed(2)\n corrupted_flame = deepcopy(flm_params)\n if corruption_type == 'shape' or corruption_type == 'all':\n corrupted_flame[:, :10] = flm_params[:, :10] + \\\n np.clip(np.random.normal(0, sigma, flm_params[:, :10].shape),\n -3 * sigma, 3 * sigma).astype('float32')\n if corruption_type == 'exp_jaw'or corruption_type == 'all':\n # Expression\n corrupted_flame[:, 100:110] = flm_params[:, 100:110] + \\\n np.clip(np.random.normal(0, sigma, flm_params[:, 100:110].shape),\n -3 * sigma, 3 * sigma).astype('float32')\n # Jaw pose\n corrupted_flame[:, 153] = flm_params[:, 153] + \\\n np.random.normal(0, jaw_sigma, corrupted_flame.shape[0])\n\n if corruption_type == 'pose' or corruption_type == 'all':\n # pose_perturbation = np.random.normal(0, pose_sigma[i], (corrupted_flame.shape[0], 3))\n # corrupted_flame[:, 150:153] += np.clip(pose_perturbation, -3 * pose_sigma[i], 3 * pose_sigma[i])\n pose_perturbation = np.random.normal(0, pose_sigma, (corrupted_flame.shape[0],))\n corrupted_flame[:, 151] = flm_params[:, 151] + \\\n np.clip(pose_perturbation, -3 * pose_sigma, 3 * pose_sigma)\n\n return corrupted_flame\n\n\n# General settings\nsave_images = True\ncode_size = 236\nuse_inst_norm = True\ncore_tensor_res = 4\nresolution = 256\nalpha = 1\nstep_max = int(np.log2(resolution) - 2)\nnum_smpl_to_eval_on = 128\nuse_styled_conv_stylegan2 = True\n\nflength = 5000\ncam_t = np.array([0., 0., 0])\ncamera_params = camera_ringnetpp((512, 512), trans=cam_t, focal=flength)\n\n# Uncomment the appropriate run_id\nrun_ids_1 = [29, ] # with sqrt(2)\n# run_ids_1 = [7, 24, 8, 3]\n# run_ids_1 = [7, 8, 3]\n# run_ids_1 = [7]\n\nsettings_for_runs = \\\n {24: {'name': 'vector_cond', 'model_idx': '216000_1', 'normal_maps_as_cond': False,\n 'rendered_flame_as_condition': False, 'apply_sqrt2_fac_in_eq_lin': False},\n 29: {'name': 'full_model', 'model_idx': '294000_1', 'normal_maps_as_cond': True,\n 'rendered_flame_as_condition': True, 'apply_sqrt2_fac_in_eq_lin': True},\n 7: {'name': 'flm_rndr_tex_interp', 'model_idx': '051000_1', 'normal_maps_as_cond': False,\n 'rendered_flame_as_condition': True, 'apply_sqrt2_fac_in_eq_lin': False},\n 3: {'name': 'norm_mp_tex_interp', 'model_idx': '203000_1', 'normal_maps_as_cond': True,\n 'rendered_flame_as_condition': False, 'apply_sqrt2_fac_in_eq_lin': False},\n 8: {'name': 'norm_map_rend_flm_no_tex_interp', 'model_idx': '009000_1', 'normal_maps_as_cond': True,\n 'rendered_flame_as_condition': True, 'apply_sqrt2_fac_in_eq_lin': False},}\n\n\noverlay_visualizer = OverLayViz()\n# overlay_visualizer.setup_renderer(mesh_file=None)\n\nflm_params = np.zeros((num_smpl_to_eval_on, code_size)).astype('float32')\nfl_param_dict = np.load(cnst.all_flame_params_file, allow_pickle=True).item()\nnp.random.seed(2)\nfor i, key in enumerate(fl_param_dict):\n flame_param = fl_param_dict[key]\n shape_params = np.concatenate((np.random.normal(0, 1, [3,]), np.zeros(97))).astype('float32')\n exp_params = np.concatenate((np.random.normal(0, 1, [3,]), np.zeros(47))).astype('float32')\n # +- pi/4 for bad samples +- pi/8 for good samples\n # pose = np.array([0, np.random.uniform(-np.pi/4, np.pi/4, 1), 0,\n # np.random.uniform(0, np.pi/12, 1), 0, 0]).astype('float32')\n pose = np.array([0, np.random.uniform(-np.pi / 8, np.pi / 8, 1), 0,\n np.random.uniform(0, np.pi / 12, 1), 0, 0]).astype('float32')\n texture = np.random.normal(0, 1, [50]).astype('float32')\n # texture = flame_param['tex']\n flame_param = np.hstack((shape_params, exp_params, pose, flame_param['cam'],\n texture, flame_param['lit'].flatten()))\n # tz = camera_params['f'][0] / (camera_params['c'][0] * flame_param[:, 156:157])\n # flame_param[:, 156:159] = np.concatenate((flame_param[:, 157:], tz), axis=1)\n\n # import ipdb; ipdb.set_trace()\n flm_params[i, :] = flame_param.astype('float32')\n if i == num_smpl_to_eval_on - 1:\n break\n\nbatch_size = 32\n\nnum_sigmas = 1\ncorruption_sigma = np.linspace(0, 1.5, num_sigmas)\njaw_rot_range = (0, np.pi/8)\njaw_rot_sigmas = np.linspace(0, (jaw_rot_range[1] - jaw_rot_range[0])/6, num_sigmas)\npose_range = (-np.pi/3, np.pi/3)\npose_sigmas = np.linspace(0, (pose_range[1] - pose_range[0])/6, num_sigmas)\nconfig_obj = util.dict2obj(cnst.flame_config)\nflame_decoder = FLAME.FLAME(config_obj).cuda().eval()\n\nfor run_idx in run_ids_1:\n # import ipdb; ipdb.set_trace()\n generator_1 = torch.nn.DataParallel(\n StyledGenerator(embedding_vocab_size=69158,\n rendered_flame_ascondition=settings_for_runs[run_idx]['rendered_flame_as_condition'],\n normal_maps_as_cond=settings_for_runs[run_idx]['normal_maps_as_cond'],\n apply_sqrt2_fac_in_eq_lin=settings_for_runs[run_idx]['apply_sqrt2_fac_in_eq_lin'],\n core_tensor_res=core_tensor_res,\n w_truncation_factor=1.0,\n n_mlp=8)).cuda()\n model_idx = settings_for_runs[run_idx]['model_idx']\n ckpt1 = torch.load(f'{cnst.output_root}checkpoint/{run_idx}/{model_idx}.model')\n generator_1.load_state_dict(ckpt1['generator_running'])\n generator_1 = generator_1.eval()\n\n params_to_save = {'cam': [], 'shape': [], 'exp': [], 'pose': [], 'light_code': [], 'texture_code': [],\n 'identity_indices': []}\n\n for i, sigma in enumerate(corruption_sigma):\n images = np.zeros((num_smpl_to_eval_on, 3, resolution, resolution)).astype('float32')\n flame_mesh_imgs = np.zeros((num_smpl_to_eval_on, 3, resolution, resolution)).astype('float32')\n pbar = tqdm.tqdm(range(0, num_smpl_to_eval_on, batch_size))\n pbar.set_description('Generating_images')\n # print(flm_params[1, :])\n for batch_idx in pbar:\n flm_batch = flm_params[batch_idx:batch_idx+batch_size, :]\n flm_batch = torch.from_numpy(flm_batch).cuda()\n # flm_batch = eye_cntr_reg.substitute_flame_batch_with_regressed_camera(flm_batch)\n flm_batch = position_to_given_location(flame_decoder, flm_batch)\n\n if settings_for_runs[run_idx]['normal_maps_as_cond'] or \\\n settings_for_runs[run_idx]['rendered_flame_as_condition']:\n\n batch_size_true = flm_batch.shape[0]\n cam = flm_batch[:, constants.DECA_IDX['cam'][0]:constants.DECA_IDX['cam'][1]:]\n shape = flm_batch[:, constants.INDICES['SHAPE'][0]:constants.INDICES['SHAPE'][1]]\n exp = flm_batch[:, constants.INDICES['EXP'][0]:constants.INDICES['EXP'][1]]\n pose = flm_batch[:, constants.INDICES['POSE'][0]:constants.INDICES['POSE'][1]]\n # import ipdb; ipdb.set_trace()\n light_code = \\\n flm_batch[:, constants.DECA_IDX['lit'][0]:constants.DECA_IDX['lit'][1]:].view((batch_size_true, 9, 3))\n texture_code = flm_batch[:, constants.DECA_IDX['tex'][0]:constants.DECA_IDX['tex'][1]:]\n\n params_to_save['cam'].append(cam.cpu().detach().numpy())\n params_to_save['shape'].append(shape.cpu().detach().numpy())\n params_to_save['shape'].append(shape.cpu().detach().numpy())\n params_to_save['exp'].append(exp.cpu().detach().numpy())\n params_to_save['pose'].append(pose.cpu().detach().numpy())\n params_to_save['light_code'].append(light_code.cpu().detach().numpy())\n params_to_save['texture_code'].append(texture_code.cpu().detach().numpy())\n\n norma_map_img, _, _, _, rend_flm = \\\n overlay_visualizer.get_rendered_mesh(flame_params=(shape, exp, pose, light_code, texture_code),\n camera_params=cam)\n # import ipdb; ipdb.set_trace()\n\n rend_flm = torch.clamp(rend_flm, 0, 1) * 2 - 1\n norma_map_img = torch.clamp(norma_map_img, 0, 1) * 2 - 1\n rend_flm = fast_image_reshape(rend_flm, height_out=256, width_out=256, mode='bilinear')\n norma_map_img = fast_image_reshape(norma_map_img, height_out=256, width_out=256, mode='bilinear')\n\n # Render the 2nd time to get backface culling and white texture\n # norma_map_img_to_save, _, _, _, rend_flm_to_save = \\\n # overlay_visualizer.get_rendered_mesh(flame_params=(shape, exp, pose, light_code, texture_code),\n # camera_params=cam, cull_backfaces=True, constant_albedo=0.6)\n # Back face culling temporarily un-availabe\n\n norma_map_img_to_save, _, _, _, rend_flm_to_save = \\\n overlay_visualizer.get_rendered_mesh(flame_params=(shape, exp, pose, light_code, texture_code),\n camera_params=cam, cull_backfaces=False, constant_albedo=0.6)\n rend_flm_to_save = torch.clamp(rend_flm_to_save, 0, 1) * 2 - 1\n # rend_flm_to_save = rend_flm\n # norma_map_img_to_save = torch.clamp(norma_map_img, 0, 1) * 2 - 1\n rend_flm_to_save = fast_image_reshape(rend_flm_to_save, height_out=256, width_out=256, mode='bilinear')\n # norma_map_img_to_save = fast_image_reshape(norma_map_img, height_out=256, width_out=256, mode='bilinear')\n\n else:\n rend_flm = None\n norma_map_img = None\n\n gen_1_in = ge_gen_in(flm_batch, rend_flm, norma_map_img, settings_for_runs[run_idx]['normal_maps_as_cond'],\n settings_for_runs[run_idx]['rendered_flame_as_condition'])\n\n # torch.manual_seed(2)\n identity_embeddings = torch.randint(low=0, high=69158, size=(gen_1_in.shape[0], ), dtype=torch.long,\n device='cuda')\n mdl_1_gen_images = generic_utils.get_images_from_flame_params(\n flame_params=gen_1_in.cpu().numpy(), pose=None,\n model=generator_1,\n step=step_max, alpha=alpha,\n input_indices=identity_embeddings.cpu().numpy())\n\n params_to_save['identity_indices'].append(identity_embeddings.cpu().detach().numpy())\n # import ipdb; ipdb.set_trace()\n images[batch_idx:batch_idx+batch_size_true] = torch.clamp(mdl_1_gen_images, -1, 1).cpu().numpy()\n # if flame_mesh_imgs is None:\n flame_mesh_imgs[batch_idx:batch_idx+batch_size_true] = torch.clamp(rend_flm_to_save, -1, 1).cpu().numpy()\n\n if save_images:\n mdl_name = settings_for_runs[run_idx]['name']\n for key in params_to_save.keys():\n params_to_save[key] = np.concatenate(params_to_save[key], axis=0)\n\n save_dir = os.path.join(cnst.output_root, 'sample', str(run_idx), f'random_samples_q_eval_{mdl_name}')\n os.makedirs(save_dir, exist_ok=True)\n np.save(os.path.join(save_dir, 'params.npy'), params_to_save)\n\n save_path_current_id = os.path.join(save_dir, 'images')\n save_set_of_images(path=save_path_current_id, prefix='', images=(images + 1) / 2, show_prog_bar=True)\n\n #save flam rndr\n save_path_current_id_flm_rndr = os.path.join(save_dir, 'conditions')\n save_set_of_images(path=save_path_current_id_flm_rndr, prefix='mesh', images=(flame_mesh_imgs + 1) / 2,\n show_prog_bar=True)\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
#python的运算符实例
#'+'加号
# 俩个对象相加(可以是俩个数字,也可以是俩个字符串(将俩个字符串连接))
a=7+8
print(a)
b="GOOD"+"Job"
print(b)
#'-'减号
#取一个数字的相反数或者实现俩个数字相减
c=-7
print(c)
print(19-1)
#'*'乘号
#如果是数字则进行乘法运算,字符串则复制若干次
d=4*7
print(d)
e="hello"*7
print(e)
#'/'除号
#表示俩个数字相除(Python 3.0中会直接输出正确的值)
f=7/2
print(f)
#'**'求幂运算
g=2**3
print(g)
#'<'小于号 返回一个布尔值
h=3<7
print(h)
|
normal
|
{
"blob_id": "d28f5f95b375a1e075fdfcbc0350c90cf96f0212",
"index": 9694,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(a)\n<mask token>\nprint(b)\n<mask token>\nprint(c)\nprint(19 - 1)\n<mask token>\nprint(d)\n<mask token>\nprint(e)\n<mask token>\nprint(f)\n<mask token>\nprint(g)\n<mask token>\nprint(h)\n",
"step-3": "a = 7 + 8\nprint(a)\nb = 'GOOD' + 'Job'\nprint(b)\nc = -7\nprint(c)\nprint(19 - 1)\nd = 4 * 7\nprint(d)\ne = 'hello' * 7\nprint(e)\nf = 7 / 2\nprint(f)\ng = 2 ** 3\nprint(g)\nh = 3 < 7\nprint(h)\n",
"step-4": "#python的运算符实例\n#'+'加号\n# 俩个对象相加(可以是俩个数字,也可以是俩个字符串(将俩个字符串连接))\na=7+8\nprint(a)\nb=\"GOOD\"+\"Job\"\nprint(b)\n\n#'-'减号\n#取一个数字的相反数或者实现俩个数字相减\nc=-7\nprint(c)\nprint(19-1)\n\n#'*'乘号\n#如果是数字则进行乘法运算,字符串则复制若干次\nd=4*7\nprint(d)\ne=\"hello\"*7\nprint(e)\n\n#'/'除号\n#表示俩个数字相除(Python 3.0中会直接输出正确的值)\nf=7/2\nprint(f)\n\n#'**'求幂运算\ng=2**3\nprint(g)\n\n#'<'小于号 返回一个布尔值\nh=3<7\nprint(h)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import requests
from lxml import etree
from pymongo import MongoClient
from lib.rabbitmq import Rabbit
from lib.log import LogHandler
from lib.proxy_iterator import Proxies
import yaml
import json
import datetime
import re
import time
setting = yaml.load(open('config_local.yaml'))
log = LogHandler('article_consumer')
m = MongoClient(setting['mongo_config']['config_host'], setting['mongo_config']['port'])
m.admin.authenticate(setting['mongo_config']['user_name'],setting['mongo_config']['password'] )
collection = m[setting['mongo_config']['config_db']][setting['mongo_config']['coll_detail']]
clean_coll = m[setting['mongo_config']['config_db']][setting['mongo_config']['clean']]
rabbit = Rabbit(setting['rabbitmq_host'],setting['rabbitmq_port'])
connection = rabbit.connection
class CrawlerDetail:
def __init__(self):
self.proxy = Proxies()
def start_consume(self):
channel = connection.channel()
channel.queue_declare(queue='usual_article')
channel.basic_qos(prefetch_count=1)
channel.basic_consume(self.consume_article_detail_url,
queue='usual_article',
no_ack=False)
channel.start_consuming()
def clean(self,message):
"""
作者,发布时间,详细来源字段清洗
:param message:
:return:
"""
clean = clean_coll.find_one({'source': message['source']})
if clean['post_time'] is not None:
try:
post_time = re.search(clean['post_time'],message['post_time'],re.S|re.M).group(1)
message['post_time'] = post_time
except:
log.info("post_time清洗失败{}".format(message['post_time']))
message['post_time'] = None
if clean['author'] is not None:
try:
author = re.search(clean['author'],message['author']).group(1)
message['author'] = author
except:
log.info("author清洗失败{}".format(message['author']))
message['author'] = None
if clean['source_detail'] is not None:
try:
source_detail = re.search(clean['source_detail'],message['source_detail'],re.S|re.M).group(1)
message['source_detail'] = source_detail
except:
log.info("source_detail清洗失败{}".format(message['source_detail']))
message['source_detail'] = None
return message
def consume_article_detail_url(self,ch, method, properties, body):
"""
文章详情页解析
:param ch:
:param method:
:param properties:
:param body: json格式字符串
:return:
"""
message = json.loads(body.decode())
for i in range(10):
try:
html = requests.get(message['detail_url'],timeout=10,proxies=next(self.proxy))
connection.process_data_events()
if html.status_code == 200:
break
except Exception as e:
connection.process_data_events()
if i == 10:
log.error("请求文章详情页{}失败".format(message['detail_url']))
ch.basic_ack(delivery_tag=method.delivery_tag)
try:
con = html.content.decode()
except:
try:
con = html.content.decode('gbk')
except:
log.error('{}utf-8,gbk编码解析失败'.format(message['detail_url']))
ch.basic_ack(delivery_tag=method.delivery_tag)
return
page = etree.HTML(con)
# 获取详情页的解析方式
detail_config_dict = collection.find_one({'source': message['source']})
if detail_config_dict['body'] is not None:
try:
for pattern in detail_config_dict['body']:
if page.xpath(pattern):
article_body = page.xpath(pattern)[0]
message['body'] = etree.tounicode(article_body)
break
except:
log.error('xpath语句未能解析body')
ch.basic_ack(delivery_tag=method.delivery_tag)
return
if detail_config_dict['comment_count'] is not None:
message['comment_count'] = page.xpath(detail_config_dict['comment_count'])[0]
if detail_config_dict['like_count'] is not None:
message['like_count'] = page.xpath(detail_config_dict['like_count'])[0]
if detail_config_dict['read_num'] is not None:
message['read_num'] = page.xpath(detail_config_dict['read_num'])[0]
if detail_config_dict['author'] is not None:
try:
message['author'] = page.xpath(detail_config_dict['author'])[0]
except:
log.info("没有提取到{}作者字段".format(message['detail_url']))
if detail_config_dict['post_time'] is not None:
try:
message['post_time'] = page.xpath(detail_config_dict['post_time'])[0]
except:
log.info("没有提取到{}文章发表时间".format(message['detail_url']))
if detail_config_dict['tag'] is not None:
message['tag'] = page.xpath(detail_config_dict['tag'])[0]
if detail_config_dict['source_detail'] is not None:
try:
message['source_detail'] = page.xpath(detail_config_dict['source_detail'])[0]
except:
log.info("没有提取到{}文章详细来源".format(message['detail_url']))
self.clean(message)
# 放入消息队列做正文替换清洗
produce_channel = connection.channel()
produce_channel.queue_declare('article_body')
article_text = json.dumps(message)
produce_channel.basic_publish(exchange='',
routing_key='article_body',
body=article_text)
log.info('{}已经放入清洗队列'.format(message['title']))
ch.basic_ack(delivery_tag=method.delivery_tag)
produce_channel.close()
|
normal
|
{
"blob_id": "cd1d8a73b6958775a212d80b50de74f4b4de18bf",
"index": 6319,
"step-1": "<mask token>\n\n\nclass CrawlerDetail:\n\n def __init__(self):\n self.proxy = Proxies()\n\n def start_consume(self):\n channel = connection.channel()\n channel.queue_declare(queue='usual_article')\n channel.basic_qos(prefetch_count=1)\n channel.basic_consume(self.consume_article_detail_url, queue=\n 'usual_article', no_ack=False)\n channel.start_consuming()\n <mask token>\n\n def consume_article_detail_url(self, ch, method, properties, body):\n \"\"\"\n 文章详情页解析\n :param ch:\n :param method:\n :param properties:\n :param body: json格式字符串\n :return:\n \"\"\"\n message = json.loads(body.decode())\n for i in range(10):\n try:\n html = requests.get(message['detail_url'], timeout=10,\n proxies=next(self.proxy))\n connection.process_data_events()\n if html.status_code == 200:\n break\n except Exception as e:\n connection.process_data_events()\n if i == 10:\n log.error('请求文章详情页{}失败'.format(message['detail_url']))\n ch.basic_ack(delivery_tag=method.delivery_tag)\n try:\n con = html.content.decode()\n except:\n try:\n con = html.content.decode('gbk')\n except:\n log.error('{}utf-8,gbk编码解析失败'.format(message['detail_url']))\n ch.basic_ack(delivery_tag=method.delivery_tag)\n return\n page = etree.HTML(con)\n detail_config_dict = collection.find_one({'source': message['source']})\n if detail_config_dict['body'] is not None:\n try:\n for pattern in detail_config_dict['body']:\n if page.xpath(pattern):\n article_body = page.xpath(pattern)[0]\n message['body'] = etree.tounicode(article_body)\n break\n except:\n log.error('xpath语句未能解析body')\n ch.basic_ack(delivery_tag=method.delivery_tag)\n return\n if detail_config_dict['comment_count'] is not None:\n message['comment_count'] = page.xpath(detail_config_dict[\n 'comment_count'])[0]\n if detail_config_dict['like_count'] is not None:\n message['like_count'] = page.xpath(detail_config_dict['like_count']\n )[0]\n if detail_config_dict['read_num'] is not None:\n message['read_num'] = page.xpath(detail_config_dict['read_num'])[0]\n if detail_config_dict['author'] is not None:\n try:\n message['author'] = page.xpath(detail_config_dict['author'])[0]\n except:\n log.info('没有提取到{}作者字段'.format(message['detail_url']))\n if detail_config_dict['post_time'] is not None:\n try:\n message['post_time'] = page.xpath(detail_config_dict[\n 'post_time'])[0]\n except:\n log.info('没有提取到{}文章发表时间'.format(message['detail_url']))\n if detail_config_dict['tag'] is not None:\n message['tag'] = page.xpath(detail_config_dict['tag'])[0]\n if detail_config_dict['source_detail'] is not None:\n try:\n message['source_detail'] = page.xpath(detail_config_dict[\n 'source_detail'])[0]\n except:\n log.info('没有提取到{}文章详细来源'.format(message['detail_url']))\n self.clean(message)\n produce_channel = connection.channel()\n produce_channel.queue_declare('article_body')\n article_text = json.dumps(message)\n produce_channel.basic_publish(exchange='', routing_key=\n 'article_body', body=article_text)\n log.info('{}已经放入清洗队列'.format(message['title']))\n ch.basic_ack(delivery_tag=method.delivery_tag)\n produce_channel.close()\n",
"step-2": "<mask token>\n\n\nclass CrawlerDetail:\n\n def __init__(self):\n self.proxy = Proxies()\n\n def start_consume(self):\n channel = connection.channel()\n channel.queue_declare(queue='usual_article')\n channel.basic_qos(prefetch_count=1)\n channel.basic_consume(self.consume_article_detail_url, queue=\n 'usual_article', no_ack=False)\n channel.start_consuming()\n\n def clean(self, message):\n \"\"\"\n 作者,发布时间,详细来源字段清洗\n :param message:\n :return:\n \"\"\"\n clean = clean_coll.find_one({'source': message['source']})\n if clean['post_time'] is not None:\n try:\n post_time = re.search(clean['post_time'], message[\n 'post_time'], re.S | re.M).group(1)\n message['post_time'] = post_time\n except:\n log.info('post_time清洗失败{}'.format(message['post_time']))\n message['post_time'] = None\n if clean['author'] is not None:\n try:\n author = re.search(clean['author'], message['author']).group(1)\n message['author'] = author\n except:\n log.info('author清洗失败{}'.format(message['author']))\n message['author'] = None\n if clean['source_detail'] is not None:\n try:\n source_detail = re.search(clean['source_detail'], message[\n 'source_detail'], re.S | re.M).group(1)\n message['source_detail'] = source_detail\n except:\n log.info('source_detail清洗失败{}'.format(message['source_detail'])\n )\n message['source_detail'] = None\n return message\n\n def consume_article_detail_url(self, ch, method, properties, body):\n \"\"\"\n 文章详情页解析\n :param ch:\n :param method:\n :param properties:\n :param body: json格式字符串\n :return:\n \"\"\"\n message = json.loads(body.decode())\n for i in range(10):\n try:\n html = requests.get(message['detail_url'], timeout=10,\n proxies=next(self.proxy))\n connection.process_data_events()\n if html.status_code == 200:\n break\n except Exception as e:\n connection.process_data_events()\n if i == 10:\n log.error('请求文章详情页{}失败'.format(message['detail_url']))\n ch.basic_ack(delivery_tag=method.delivery_tag)\n try:\n con = html.content.decode()\n except:\n try:\n con = html.content.decode('gbk')\n except:\n log.error('{}utf-8,gbk编码解析失败'.format(message['detail_url']))\n ch.basic_ack(delivery_tag=method.delivery_tag)\n return\n page = etree.HTML(con)\n detail_config_dict = collection.find_one({'source': message['source']})\n if detail_config_dict['body'] is not None:\n try:\n for pattern in detail_config_dict['body']:\n if page.xpath(pattern):\n article_body = page.xpath(pattern)[0]\n message['body'] = etree.tounicode(article_body)\n break\n except:\n log.error('xpath语句未能解析body')\n ch.basic_ack(delivery_tag=method.delivery_tag)\n return\n if detail_config_dict['comment_count'] is not None:\n message['comment_count'] = page.xpath(detail_config_dict[\n 'comment_count'])[0]\n if detail_config_dict['like_count'] is not None:\n message['like_count'] = page.xpath(detail_config_dict['like_count']\n )[0]\n if detail_config_dict['read_num'] is not None:\n message['read_num'] = page.xpath(detail_config_dict['read_num'])[0]\n if detail_config_dict['author'] is not None:\n try:\n message['author'] = page.xpath(detail_config_dict['author'])[0]\n except:\n log.info('没有提取到{}作者字段'.format(message['detail_url']))\n if detail_config_dict['post_time'] is not None:\n try:\n message['post_time'] = page.xpath(detail_config_dict[\n 'post_time'])[0]\n except:\n log.info('没有提取到{}文章发表时间'.format(message['detail_url']))\n if detail_config_dict['tag'] is not None:\n message['tag'] = page.xpath(detail_config_dict['tag'])[0]\n if detail_config_dict['source_detail'] is not None:\n try:\n message['source_detail'] = page.xpath(detail_config_dict[\n 'source_detail'])[0]\n except:\n log.info('没有提取到{}文章详细来源'.format(message['detail_url']))\n self.clean(message)\n produce_channel = connection.channel()\n produce_channel.queue_declare('article_body')\n article_text = json.dumps(message)\n produce_channel.basic_publish(exchange='', routing_key=\n 'article_body', body=article_text)\n log.info('{}已经放入清洗队列'.format(message['title']))\n ch.basic_ack(delivery_tag=method.delivery_tag)\n produce_channel.close()\n",
"step-3": "<mask token>\nm.admin.authenticate(setting['mongo_config']['user_name'], setting[\n 'mongo_config']['password'])\n<mask token>\n\n\nclass CrawlerDetail:\n\n def __init__(self):\n self.proxy = Proxies()\n\n def start_consume(self):\n channel = connection.channel()\n channel.queue_declare(queue='usual_article')\n channel.basic_qos(prefetch_count=1)\n channel.basic_consume(self.consume_article_detail_url, queue=\n 'usual_article', no_ack=False)\n channel.start_consuming()\n\n def clean(self, message):\n \"\"\"\n 作者,发布时间,详细来源字段清洗\n :param message:\n :return:\n \"\"\"\n clean = clean_coll.find_one({'source': message['source']})\n if clean['post_time'] is not None:\n try:\n post_time = re.search(clean['post_time'], message[\n 'post_time'], re.S | re.M).group(1)\n message['post_time'] = post_time\n except:\n log.info('post_time清洗失败{}'.format(message['post_time']))\n message['post_time'] = None\n if clean['author'] is not None:\n try:\n author = re.search(clean['author'], message['author']).group(1)\n message['author'] = author\n except:\n log.info('author清洗失败{}'.format(message['author']))\n message['author'] = None\n if clean['source_detail'] is not None:\n try:\n source_detail = re.search(clean['source_detail'], message[\n 'source_detail'], re.S | re.M).group(1)\n message['source_detail'] = source_detail\n except:\n log.info('source_detail清洗失败{}'.format(message['source_detail'])\n )\n message['source_detail'] = None\n return message\n\n def consume_article_detail_url(self, ch, method, properties, body):\n \"\"\"\n 文章详情页解析\n :param ch:\n :param method:\n :param properties:\n :param body: json格式字符串\n :return:\n \"\"\"\n message = json.loads(body.decode())\n for i in range(10):\n try:\n html = requests.get(message['detail_url'], timeout=10,\n proxies=next(self.proxy))\n connection.process_data_events()\n if html.status_code == 200:\n break\n except Exception as e:\n connection.process_data_events()\n if i == 10:\n log.error('请求文章详情页{}失败'.format(message['detail_url']))\n ch.basic_ack(delivery_tag=method.delivery_tag)\n try:\n con = html.content.decode()\n except:\n try:\n con = html.content.decode('gbk')\n except:\n log.error('{}utf-8,gbk编码解析失败'.format(message['detail_url']))\n ch.basic_ack(delivery_tag=method.delivery_tag)\n return\n page = etree.HTML(con)\n detail_config_dict = collection.find_one({'source': message['source']})\n if detail_config_dict['body'] is not None:\n try:\n for pattern in detail_config_dict['body']:\n if page.xpath(pattern):\n article_body = page.xpath(pattern)[0]\n message['body'] = etree.tounicode(article_body)\n break\n except:\n log.error('xpath语句未能解析body')\n ch.basic_ack(delivery_tag=method.delivery_tag)\n return\n if detail_config_dict['comment_count'] is not None:\n message['comment_count'] = page.xpath(detail_config_dict[\n 'comment_count'])[0]\n if detail_config_dict['like_count'] is not None:\n message['like_count'] = page.xpath(detail_config_dict['like_count']\n )[0]\n if detail_config_dict['read_num'] is not None:\n message['read_num'] = page.xpath(detail_config_dict['read_num'])[0]\n if detail_config_dict['author'] is not None:\n try:\n message['author'] = page.xpath(detail_config_dict['author'])[0]\n except:\n log.info('没有提取到{}作者字段'.format(message['detail_url']))\n if detail_config_dict['post_time'] is not None:\n try:\n message['post_time'] = page.xpath(detail_config_dict[\n 'post_time'])[0]\n except:\n log.info('没有提取到{}文章发表时间'.format(message['detail_url']))\n if detail_config_dict['tag'] is not None:\n message['tag'] = page.xpath(detail_config_dict['tag'])[0]\n if detail_config_dict['source_detail'] is not None:\n try:\n message['source_detail'] = page.xpath(detail_config_dict[\n 'source_detail'])[0]\n except:\n log.info('没有提取到{}文章详细来源'.format(message['detail_url']))\n self.clean(message)\n produce_channel = connection.channel()\n produce_channel.queue_declare('article_body')\n article_text = json.dumps(message)\n produce_channel.basic_publish(exchange='', routing_key=\n 'article_body', body=article_text)\n log.info('{}已经放入清洗队列'.format(message['title']))\n ch.basic_ack(delivery_tag=method.delivery_tag)\n produce_channel.close()\n",
"step-4": "import requests\nfrom lxml import etree\nfrom pymongo import MongoClient\nfrom lib.rabbitmq import Rabbit\nfrom lib.log import LogHandler\nfrom lib.proxy_iterator import Proxies\nimport yaml\nimport json\nimport datetime\nimport re\nimport time\nsetting = yaml.load(open('config_local.yaml'))\nlog = LogHandler('article_consumer')\nm = MongoClient(setting['mongo_config']['config_host'], setting[\n 'mongo_config']['port'])\nm.admin.authenticate(setting['mongo_config']['user_name'], setting[\n 'mongo_config']['password'])\ncollection = m[setting['mongo_config']['config_db']][setting['mongo_config'\n ]['coll_detail']]\nclean_coll = m[setting['mongo_config']['config_db']][setting['mongo_config'\n ]['clean']]\nrabbit = Rabbit(setting['rabbitmq_host'], setting['rabbitmq_port'])\nconnection = rabbit.connection\n\n\nclass CrawlerDetail:\n\n def __init__(self):\n self.proxy = Proxies()\n\n def start_consume(self):\n channel = connection.channel()\n channel.queue_declare(queue='usual_article')\n channel.basic_qos(prefetch_count=1)\n channel.basic_consume(self.consume_article_detail_url, queue=\n 'usual_article', no_ack=False)\n channel.start_consuming()\n\n def clean(self, message):\n \"\"\"\n 作者,发布时间,详细来源字段清洗\n :param message:\n :return:\n \"\"\"\n clean = clean_coll.find_one({'source': message['source']})\n if clean['post_time'] is not None:\n try:\n post_time = re.search(clean['post_time'], message[\n 'post_time'], re.S | re.M).group(1)\n message['post_time'] = post_time\n except:\n log.info('post_time清洗失败{}'.format(message['post_time']))\n message['post_time'] = None\n if clean['author'] is not None:\n try:\n author = re.search(clean['author'], message['author']).group(1)\n message['author'] = author\n except:\n log.info('author清洗失败{}'.format(message['author']))\n message['author'] = None\n if clean['source_detail'] is not None:\n try:\n source_detail = re.search(clean['source_detail'], message[\n 'source_detail'], re.S | re.M).group(1)\n message['source_detail'] = source_detail\n except:\n log.info('source_detail清洗失败{}'.format(message['source_detail'])\n )\n message['source_detail'] = None\n return message\n\n def consume_article_detail_url(self, ch, method, properties, body):\n \"\"\"\n 文章详情页解析\n :param ch:\n :param method:\n :param properties:\n :param body: json格式字符串\n :return:\n \"\"\"\n message = json.loads(body.decode())\n for i in range(10):\n try:\n html = requests.get(message['detail_url'], timeout=10,\n proxies=next(self.proxy))\n connection.process_data_events()\n if html.status_code == 200:\n break\n except Exception as e:\n connection.process_data_events()\n if i == 10:\n log.error('请求文章详情页{}失败'.format(message['detail_url']))\n ch.basic_ack(delivery_tag=method.delivery_tag)\n try:\n con = html.content.decode()\n except:\n try:\n con = html.content.decode('gbk')\n except:\n log.error('{}utf-8,gbk编码解析失败'.format(message['detail_url']))\n ch.basic_ack(delivery_tag=method.delivery_tag)\n return\n page = etree.HTML(con)\n detail_config_dict = collection.find_one({'source': message['source']})\n if detail_config_dict['body'] is not None:\n try:\n for pattern in detail_config_dict['body']:\n if page.xpath(pattern):\n article_body = page.xpath(pattern)[0]\n message['body'] = etree.tounicode(article_body)\n break\n except:\n log.error('xpath语句未能解析body')\n ch.basic_ack(delivery_tag=method.delivery_tag)\n return\n if detail_config_dict['comment_count'] is not None:\n message['comment_count'] = page.xpath(detail_config_dict[\n 'comment_count'])[0]\n if detail_config_dict['like_count'] is not None:\n message['like_count'] = page.xpath(detail_config_dict['like_count']\n )[0]\n if detail_config_dict['read_num'] is not None:\n message['read_num'] = page.xpath(detail_config_dict['read_num'])[0]\n if detail_config_dict['author'] is not None:\n try:\n message['author'] = page.xpath(detail_config_dict['author'])[0]\n except:\n log.info('没有提取到{}作者字段'.format(message['detail_url']))\n if detail_config_dict['post_time'] is not None:\n try:\n message['post_time'] = page.xpath(detail_config_dict[\n 'post_time'])[0]\n except:\n log.info('没有提取到{}文章发表时间'.format(message['detail_url']))\n if detail_config_dict['tag'] is not None:\n message['tag'] = page.xpath(detail_config_dict['tag'])[0]\n if detail_config_dict['source_detail'] is not None:\n try:\n message['source_detail'] = page.xpath(detail_config_dict[\n 'source_detail'])[0]\n except:\n log.info('没有提取到{}文章详细来源'.format(message['detail_url']))\n self.clean(message)\n produce_channel = connection.channel()\n produce_channel.queue_declare('article_body')\n article_text = json.dumps(message)\n produce_channel.basic_publish(exchange='', routing_key=\n 'article_body', body=article_text)\n log.info('{}已经放入清洗队列'.format(message['title']))\n ch.basic_ack(delivery_tag=method.delivery_tag)\n produce_channel.close()\n",
"step-5": "import requests\nfrom lxml import etree\nfrom pymongo import MongoClient\nfrom lib.rabbitmq import Rabbit\nfrom lib.log import LogHandler\nfrom lib.proxy_iterator import Proxies\nimport yaml\nimport json\nimport datetime\nimport re\nimport time\n\n\nsetting = yaml.load(open('config_local.yaml'))\nlog = LogHandler('article_consumer')\nm = MongoClient(setting['mongo_config']['config_host'], setting['mongo_config']['port'])\nm.admin.authenticate(setting['mongo_config']['user_name'],setting['mongo_config']['password'] )\ncollection = m[setting['mongo_config']['config_db']][setting['mongo_config']['coll_detail']]\nclean_coll = m[setting['mongo_config']['config_db']][setting['mongo_config']['clean']]\nrabbit = Rabbit(setting['rabbitmq_host'],setting['rabbitmq_port'])\nconnection = rabbit.connection\n\n\nclass CrawlerDetail:\n\n def __init__(self):\n self.proxy = Proxies()\n\n def start_consume(self):\n channel = connection.channel()\n channel.queue_declare(queue='usual_article')\n channel.basic_qos(prefetch_count=1)\n channel.basic_consume(self.consume_article_detail_url,\n queue='usual_article',\n no_ack=False)\n channel.start_consuming()\n\n def clean(self,message):\n \"\"\"\n 作者,发布时间,详细来源字段清洗\n :param message:\n :return:\n \"\"\"\n clean = clean_coll.find_one({'source': message['source']})\n if clean['post_time'] is not None:\n try:\n post_time = re.search(clean['post_time'],message['post_time'],re.S|re.M).group(1)\n message['post_time'] = post_time\n except:\n log.info(\"post_time清洗失败{}\".format(message['post_time']))\n message['post_time'] = None\n if clean['author'] is not None:\n try:\n author = re.search(clean['author'],message['author']).group(1)\n message['author'] = author\n except:\n log.info(\"author清洗失败{}\".format(message['author']))\n message['author'] = None\n\n if clean['source_detail'] is not None:\n try:\n source_detail = re.search(clean['source_detail'],message['source_detail'],re.S|re.M).group(1)\n message['source_detail'] = source_detail\n except:\n log.info(\"source_detail清洗失败{}\".format(message['source_detail']))\n message['source_detail'] = None\n\n return message\n\n\n def consume_article_detail_url(self,ch, method, properties, body):\n \"\"\"\n 文章详情页解析\n :param ch:\n :param method:\n :param properties:\n :param body: json格式字符串\n :return:\n \"\"\"\n message = json.loads(body.decode())\n for i in range(10):\n try:\n html = requests.get(message['detail_url'],timeout=10,proxies=next(self.proxy))\n connection.process_data_events()\n if html.status_code == 200:\n break\n except Exception as e:\n connection.process_data_events()\n if i == 10:\n log.error(\"请求文章详情页{}失败\".format(message['detail_url']))\n ch.basic_ack(delivery_tag=method.delivery_tag)\n try:\n con = html.content.decode()\n except:\n try:\n con = html.content.decode('gbk')\n except:\n log.error('{}utf-8,gbk编码解析失败'.format(message['detail_url']))\n ch.basic_ack(delivery_tag=method.delivery_tag)\n return\n page = etree.HTML(con)\n\n # 获取详情页的解析方式\n detail_config_dict = collection.find_one({'source': message['source']})\n\n if detail_config_dict['body'] is not None:\n try:\n for pattern in detail_config_dict['body']:\n if page.xpath(pattern):\n article_body = page.xpath(pattern)[0]\n message['body'] = etree.tounicode(article_body)\n break\n except:\n log.error('xpath语句未能解析body')\n ch.basic_ack(delivery_tag=method.delivery_tag)\n return\n if detail_config_dict['comment_count'] is not None:\n message['comment_count'] = page.xpath(detail_config_dict['comment_count'])[0]\n if detail_config_dict['like_count'] is not None:\n message['like_count'] = page.xpath(detail_config_dict['like_count'])[0]\n if detail_config_dict['read_num'] is not None:\n message['read_num'] = page.xpath(detail_config_dict['read_num'])[0]\n if detail_config_dict['author'] is not None:\n try:\n message['author'] = page.xpath(detail_config_dict['author'])[0]\n except:\n log.info(\"没有提取到{}作者字段\".format(message['detail_url']))\n if detail_config_dict['post_time'] is not None:\n try:\n message['post_time'] = page.xpath(detail_config_dict['post_time'])[0]\n except:\n log.info(\"没有提取到{}文章发表时间\".format(message['detail_url']))\n if detail_config_dict['tag'] is not None:\n message['tag'] = page.xpath(detail_config_dict['tag'])[0]\n if detail_config_dict['source_detail'] is not None:\n try:\n message['source_detail'] = page.xpath(detail_config_dict['source_detail'])[0]\n except:\n log.info(\"没有提取到{}文章详细来源\".format(message['detail_url']))\n\n self.clean(message)\n\n # 放入消息队列做正文替换清洗\n produce_channel = connection.channel()\n produce_channel.queue_declare('article_body')\n article_text = json.dumps(message)\n produce_channel.basic_publish(exchange='',\n routing_key='article_body',\n body=article_text)\n log.info('{}已经放入清洗队列'.format(message['title']))\n ch.basic_ack(delivery_tag=method.delivery_tag)\n produce_channel.close()",
"step-ids": [
4,
5,
6,
8,
9
]
}
|
[
4,
5,
6,
8,
9
] |
from flask import Flask
from flask import render_template
# Creates a Flask application called 'app'
app = Flask(__name__, template_folder='C:\Users\jwhitehead\Documents\Webdev\Angular Web App')
# The route to display the HTML template on
@app.route('/')
def host():
return render_template('index.html')
# Run the Flask application
if __name__ == "__main__":
app.run(host='localhost', port='80')
|
normal
|
{
"blob_id": "3e1e2de555667bf09162cd6c62cad35dabbd0f54",
"index": 2482,
"step-1": "from flask import Flask\nfrom flask import render_template\n\n# Creates a Flask application called 'app'\napp = Flask(__name__, template_folder='C:\\Users\\jwhitehead\\Documents\\Webdev\\Angular Web App')\n\n# The route to display the HTML template on\n@app.route('/')\ndef host():\n return render_template('index.html')\n\n# Run the Flask application\nif __name__ == \"__main__\":\n app.run(host='localhost', port='80')\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
# Random number guessing game.
# 10 July 20
# CTI-110 P5HW1 - Random Number
# Thelma Majette
import random
randomNumber = random.randint (1,100)
# main function
def main():
# Create a variable to control the loop.
keep_going = 'y'
while keep_going == 'y':
# Ask user for a number ()
guess = int(input('\nGuess a number between 1 and 100: '))
# Perform the selected action.
if guess > randomNumber:
print ('\nToo high, try again.' )
elif guess < randomNumber:
print ('\nToo low, try again' )
else:
print ('\nCongratulations, you guessed the correct number!')
keep_going ='n'
main ()
|
normal
|
{
"blob_id": "c09c02a36a64e9522cfc8c0951bd6c98f404f09c",
"index": 367,
"step-1": "<mask token>\n\n\ndef main():\n keep_going = 'y'\n while keep_going == 'y':\n guess = int(input('\\nGuess a number between 1 and 100: '))\n if guess > randomNumber:\n print('\\nToo high, try again.')\n elif guess < randomNumber:\n print('\\nToo low, try again')\n else:\n print('\\nCongratulations, you guessed the correct number!')\n keep_going = 'n'\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n keep_going = 'y'\n while keep_going == 'y':\n guess = int(input('\\nGuess a number between 1 and 100: '))\n if guess > randomNumber:\n print('\\nToo high, try again.')\n elif guess < randomNumber:\n print('\\nToo low, try again')\n else:\n print('\\nCongratulations, you guessed the correct number!')\n keep_going = 'n'\n\n\nmain()\n",
"step-3": "<mask token>\nrandomNumber = random.randint(1, 100)\n\n\ndef main():\n keep_going = 'y'\n while keep_going == 'y':\n guess = int(input('\\nGuess a number between 1 and 100: '))\n if guess > randomNumber:\n print('\\nToo high, try again.')\n elif guess < randomNumber:\n print('\\nToo low, try again')\n else:\n print('\\nCongratulations, you guessed the correct number!')\n keep_going = 'n'\n\n\nmain()\n",
"step-4": "import random\nrandomNumber = random.randint(1, 100)\n\n\ndef main():\n keep_going = 'y'\n while keep_going == 'y':\n guess = int(input('\\nGuess a number between 1 and 100: '))\n if guess > randomNumber:\n print('\\nToo high, try again.')\n elif guess < randomNumber:\n print('\\nToo low, try again')\n else:\n print('\\nCongratulations, you guessed the correct number!')\n keep_going = 'n'\n\n\nmain()\n",
"step-5": "# Random number guessing game.\r\n# 10 July 20\r\n# CTI-110 P5HW1 - Random Number\r\n# Thelma Majette\r\n\r\nimport random\r\n\r\nrandomNumber = random.randint (1,100)\r\n\r\n# main function\r\ndef main():\r\n\r\n # Create a variable to control the loop.\r\n keep_going = 'y'\r\n while keep_going == 'y':\r\n\r\n # Ask user for a number ()\r\n guess = int(input('\\nGuess a number between 1 and 100: '))\r\n\r\n # Perform the selected action.\r\n if guess > randomNumber:\r\n print ('\\nToo high, try again.' )\r\n elif guess < randomNumber:\r\n print ('\\nToo low, try again' )\r\n else:\r\n print ('\\nCongratulations, you guessed the correct number!')\r\n keep_going ='n'\r\n \r\n \r\n \r\nmain () \r\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from pyspark import SparkContext, SparkConf
import time
# Create a basic configuration
conf = SparkConf().setAppName("myTestCopyApp")
# Create a SparkContext using the configuration
sc = SparkContext(conf=conf)
print("START")
time.sleep(30)
print("END")
|
normal
|
{
"blob_id": "4b773fbf45d15dff27dc7bd51d6636c5f783477b",
"index": 9183,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('START')\ntime.sleep(30)\nprint('END')\n",
"step-3": "<mask token>\nconf = SparkConf().setAppName('myTestCopyApp')\nsc = SparkContext(conf=conf)\nprint('START')\ntime.sleep(30)\nprint('END')\n",
"step-4": "from pyspark import SparkContext, SparkConf\nimport time\nconf = SparkConf().setAppName('myTestCopyApp')\nsc = SparkContext(conf=conf)\nprint('START')\ntime.sleep(30)\nprint('END')\n",
"step-5": "\n\nfrom pyspark import SparkContext, SparkConf\nimport time \n\n# Create a basic configuration\nconf = SparkConf().setAppName(\"myTestCopyApp\")\n\n# Create a SparkContext using the configuration\nsc = SparkContext(conf=conf)\n\nprint(\"START\")\n\ntime.sleep(30)\n\nprint(\"END\")\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def generate_nonce():
return hexencode(os.urandom(32))
<|reserved_special_token_1|>
import os
from CTFd.utils.encoding import hexencode
def generate_nonce():
return hexencode(os.urandom(32))
|
flexible
|
{
"blob_id": "4f91c57ad42759654a87328d5c92de8da14ca5ea",
"index": 2966,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef generate_nonce():\n return hexencode(os.urandom(32))\n",
"step-3": "import os\nfrom CTFd.utils.encoding import hexencode\n\n\ndef generate_nonce():\n return hexencode(os.urandom(32))\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
#Developer: Chritian D. Goyes
'''
this script show your name and your age.
'''
myName = 'Christian D. Goyes'
myDate = 1998
year = 2020
age = year - myDate
print ("yourname is: ", age, "and your are", "years old")
|
normal
|
{
"blob_id": "f5331b56abea41873bd3936028471d0da1c58236",
"index": 4986,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('yourname is: ', age, 'and your are', 'years old')\n",
"step-3": "<mask token>\nmyName = 'Christian D. Goyes'\nmyDate = 1998\nyear = 2020\nage = year - myDate\nprint('yourname is: ', age, 'and your are', 'years old')\n",
"step-4": "#Developer: Chritian D. Goyes \n'''\nthis script show your name and your age.\n'''\n\nmyName = 'Christian D. Goyes'\nmyDate = 1998\nyear = 2020\n\nage = year - myDate\n\nprint (\"yourname is: \", age, \"and your are\", \"years old\")",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
"""to get the all the module and its location"""
import sys
print(sys.modules)
|
normal
|
{
"blob_id": "20637e41df8a33e3837905a4729ae0b4a9f94dbb",
"index": 3128,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(sys.modules)\n",
"step-3": "<mask token>\nimport sys\nprint(sys.modules)\n",
"step-4": "\"\"\"to get the all the module and its location\"\"\"\r\nimport sys\r\nprint(sys.modules)\r\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import numpy as np
import urllib2
from io import StringIO
def demo_polyfit0():
x, y = np.loadtxt('stock.txt', unpack=True)
print '-'.join(map(str, np.polyfit(x, y, 1)))
def demo_polyfit1():
d = urllib2.urlopen("http://www.qlcoder.com/download/145622513871043.txt").read().decode("utf-8")
print d
arr = np.genfromtxt(StringIO(d), delimiter=" ")
z1 = np.polyfit(arr[:, 0], arr[:, 1], 5)
print z1
if __name__ == '__main__':
demo_polyfit0()
demo_polyfit1()
|
normal
|
{
"blob_id": "61571ba9f647f430879b9fa5db884ec4c93c334f",
"index": 9659,
"step-1": "import numpy as np\nimport urllib2\nfrom io import StringIO\n\n\ndef demo_polyfit0():\n x, y = np.loadtxt('stock.txt', unpack=True)\n print '-'.join(map(str, np.polyfit(x, y, 1)))\n\n\ndef demo_polyfit1():\n d = urllib2.urlopen(\"http://www.qlcoder.com/download/145622513871043.txt\").read().decode(\"utf-8\")\n print d\n arr = np.genfromtxt(StringIO(d), delimiter=\" \")\n z1 = np.polyfit(arr[:, 0], arr[:, 1], 5)\n print z1\n\n\nif __name__ == '__main__':\n demo_polyfit0()\n demo_polyfit1()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from tkinter.ttk import *
from tkinter import *
import tkinter.ttk as ttk
from tkinter import messagebox
import sqlite3
root = Tk()
root.title('Register-Form')
root.geometry("600x450+-2+86")
root.minsize(120, 1)
def delete():
if(Entry1.get()==''):
messagebox.showerror('Register-Form', 'ID Is compolsary for delete')
else:
ms = messagebox.askokcancel('Delete Result', 'Would you like to delete this account?')
if (ms):
conn = sqlite3.connect('userinfo.db')
with conn:
c = conn.cursor()
c.execute("delete from student where id='"+ Entry1.get() +"'")
c.execute('commit')
Entry1.delete(0, END)
Entry2.delete(0, END)
Entry3.delete(0, END)
Entry4.delete(0, END)
messagebox.showwarning('Delete Status', 'Deleted Succesfully')
conn.close()
def sign_in():
root.destroy()
import main
def insert_info():
idp=Entry1.get()
un=Entry2.get()
password=Entry3.get()
if (idp=='' and password=='' and un==''):
messagebox.showerror('Submit Status', 'All fields are requierd')
elif Entry3.get() != Entry4.get():
messagebox.showerror('register error', 'please confirm password')
Entry4.delete(0, END)
Entry4.focus()
else:
try:
id1=Entry1.get();
uname=Entry2.get();
password1=Entry3.get();
conn = sqlite3.connect('userinfo.db')
with conn:
c = conn.cursor()
c.execute("CREATE TABLE IF NOT EXISTS Student (ID INTEGER, Email TEXT, Password1 TEXT, Password2 TEXT)")
c.execute("INSERT INTO Student (ID,Email,Password) VALUES(?,?,?)", (id1, uname, password1))
conn.commit()
conn.close()
messagebox.showinfo('Register Form', 'Account Created Successfully!')
Entry1.delete(0, END)
Entry2.delete(0, END)
Entry3.delete(0, END)
Entry4.delete(0, END)
except sqlite3.IntegrityError:
messagebox.showerror('Register Form', f'Please use another id instead of {Entry1.get()} because that id exists')
Entry1.focus()
Label1 = ttk.Label(root)
Label1.place(relx=0.35, rely=0.156, height=21, width=44)
Label1.configure(text='''Enter ID:''')
Label2 = ttk.Label(root)
Label2.place(relx=0.35, rely=0.2, height=31, width=54)
Label2.configure(text='''UName:''')
Label3 = ttk.Label(root)
Label3.place(relx=0.333, rely=0.289, height=21, width=64)
Label3.configure(text='''Password:''')
Label4 = ttk.Label(root)
Label4.place(relx=0.267, rely=0.356, height=21, width=104)
Label4.configure(text='''Confirm Password:''')
Entry1 = ttk.Entry(root)
Entry1.place(relx=0.45, rely=0.156, height=20, relwidth=0.273)
Entry2 = ttk.Entry(root)
Entry2.place(relx=0.45, rely=0.222, height=20, relwidth=0.273)
Entry3 = ttk.Entry(root, show='*')
Entry3.place(relx=0.45, rely=0.289, height=20, relwidth=0.273)
Entry4 = ttk.Entry(root, show='*')
Entry4.place(relx=0.45, rely=0.356, height=20, relwidth=0.273)
b0 = ttk.Button(root, command=sign_in)
b0.place(relx=0.467, rely=0.578, height=84, width=87)
b0.configure(text='Sign in')
b1 = ttk.Button(root, text='Submit', command=insert_info)
b1.place(relx=0.767, rely=0.578, height=84, width=87)
B3 = ttk.Button(root, command=delete)
B3.place(relx=0.617, rely=0.578, height=84, width=87)
B3.configure(text='''Delete''')
root.mainloop()
|
normal
|
{
"blob_id": "37cafe5d3d3342e5e4070b87caf0cfb5bcfdfd8d",
"index": 1613,
"step-1": "<mask token>\n\n\ndef sign_in():\n root.destroy()\n import main\n\n\n<mask token>\n",
"step-2": "<mask token>\nroot.title('Register-Form')\nroot.geometry('600x450+-2+86')\nroot.minsize(120, 1)\n\n\ndef delete():\n if Entry1.get() == '':\n messagebox.showerror('Register-Form', 'ID Is compolsary for delete')\n else:\n ms = messagebox.askokcancel('Delete Result',\n 'Would you like to delete this account?')\n if ms:\n conn = sqlite3.connect('userinfo.db')\n with conn:\n c = conn.cursor()\n c.execute(\"delete from student where id='\" + Entry1.get() + \"'\")\n c.execute('commit')\n Entry1.delete(0, END)\n Entry2.delete(0, END)\n Entry3.delete(0, END)\n Entry4.delete(0, END)\n messagebox.showwarning('Delete Status', 'Deleted Succesfully')\n conn.close()\n\n\ndef sign_in():\n root.destroy()\n import main\n\n\ndef insert_info():\n idp = Entry1.get()\n un = Entry2.get()\n password = Entry3.get()\n if idp == '' and password == '' and un == '':\n messagebox.showerror('Submit Status', 'All fields are requierd')\n elif Entry3.get() != Entry4.get():\n messagebox.showerror('register error', 'please confirm password')\n Entry4.delete(0, END)\n Entry4.focus()\n else:\n try:\n id1 = Entry1.get()\n uname = Entry2.get()\n password1 = Entry3.get()\n conn = sqlite3.connect('userinfo.db')\n with conn:\n c = conn.cursor()\n c.execute(\n 'CREATE TABLE IF NOT EXISTS Student (ID INTEGER, Email TEXT, Password1 TEXT, Password2 TEXT)'\n )\n c.execute('INSERT INTO Student (ID,Email,Password) VALUES(?,?,?)',\n (id1, uname, password1))\n conn.commit()\n conn.close()\n messagebox.showinfo('Register Form',\n 'Account Created Successfully!')\n Entry1.delete(0, END)\n Entry2.delete(0, END)\n Entry3.delete(0, END)\n Entry4.delete(0, END)\n except sqlite3.IntegrityError:\n messagebox.showerror('Register Form',\n f'Please use another id instead of {Entry1.get()} because that id exists'\n )\n Entry1.focus()\n\n\n<mask token>\nLabel1.place(relx=0.35, rely=0.156, height=21, width=44)\nLabel1.configure(text='Enter ID:')\n<mask token>\nLabel2.place(relx=0.35, rely=0.2, height=31, width=54)\nLabel2.configure(text='UName:')\n<mask token>\nLabel3.place(relx=0.333, rely=0.289, height=21, width=64)\nLabel3.configure(text='Password:')\n<mask token>\nLabel4.place(relx=0.267, rely=0.356, height=21, width=104)\nLabel4.configure(text='Confirm Password:')\n<mask token>\nEntry1.place(relx=0.45, rely=0.156, height=20, relwidth=0.273)\n<mask token>\nEntry2.place(relx=0.45, rely=0.222, height=20, relwidth=0.273)\n<mask token>\nEntry3.place(relx=0.45, rely=0.289, height=20, relwidth=0.273)\n<mask token>\nEntry4.place(relx=0.45, rely=0.356, height=20, relwidth=0.273)\n<mask token>\nb0.place(relx=0.467, rely=0.578, height=84, width=87)\nb0.configure(text='Sign in')\n<mask token>\nb1.place(relx=0.767, rely=0.578, height=84, width=87)\n<mask token>\nB3.place(relx=0.617, rely=0.578, height=84, width=87)\nB3.configure(text='Delete')\nroot.mainloop()\n",
"step-3": "<mask token>\nroot = Tk()\nroot.title('Register-Form')\nroot.geometry('600x450+-2+86')\nroot.minsize(120, 1)\n\n\ndef delete():\n if Entry1.get() == '':\n messagebox.showerror('Register-Form', 'ID Is compolsary for delete')\n else:\n ms = messagebox.askokcancel('Delete Result',\n 'Would you like to delete this account?')\n if ms:\n conn = sqlite3.connect('userinfo.db')\n with conn:\n c = conn.cursor()\n c.execute(\"delete from student where id='\" + Entry1.get() + \"'\")\n c.execute('commit')\n Entry1.delete(0, END)\n Entry2.delete(0, END)\n Entry3.delete(0, END)\n Entry4.delete(0, END)\n messagebox.showwarning('Delete Status', 'Deleted Succesfully')\n conn.close()\n\n\ndef sign_in():\n root.destroy()\n import main\n\n\ndef insert_info():\n idp = Entry1.get()\n un = Entry2.get()\n password = Entry3.get()\n if idp == '' and password == '' and un == '':\n messagebox.showerror('Submit Status', 'All fields are requierd')\n elif Entry3.get() != Entry4.get():\n messagebox.showerror('register error', 'please confirm password')\n Entry4.delete(0, END)\n Entry4.focus()\n else:\n try:\n id1 = Entry1.get()\n uname = Entry2.get()\n password1 = Entry3.get()\n conn = sqlite3.connect('userinfo.db')\n with conn:\n c = conn.cursor()\n c.execute(\n 'CREATE TABLE IF NOT EXISTS Student (ID INTEGER, Email TEXT, Password1 TEXT, Password2 TEXT)'\n )\n c.execute('INSERT INTO Student (ID,Email,Password) VALUES(?,?,?)',\n (id1, uname, password1))\n conn.commit()\n conn.close()\n messagebox.showinfo('Register Form',\n 'Account Created Successfully!')\n Entry1.delete(0, END)\n Entry2.delete(0, END)\n Entry3.delete(0, END)\n Entry4.delete(0, END)\n except sqlite3.IntegrityError:\n messagebox.showerror('Register Form',\n f'Please use another id instead of {Entry1.get()} because that id exists'\n )\n Entry1.focus()\n\n\nLabel1 = ttk.Label(root)\nLabel1.place(relx=0.35, rely=0.156, height=21, width=44)\nLabel1.configure(text='Enter ID:')\nLabel2 = ttk.Label(root)\nLabel2.place(relx=0.35, rely=0.2, height=31, width=54)\nLabel2.configure(text='UName:')\nLabel3 = ttk.Label(root)\nLabel3.place(relx=0.333, rely=0.289, height=21, width=64)\nLabel3.configure(text='Password:')\nLabel4 = ttk.Label(root)\nLabel4.place(relx=0.267, rely=0.356, height=21, width=104)\nLabel4.configure(text='Confirm Password:')\nEntry1 = ttk.Entry(root)\nEntry1.place(relx=0.45, rely=0.156, height=20, relwidth=0.273)\nEntry2 = ttk.Entry(root)\nEntry2.place(relx=0.45, rely=0.222, height=20, relwidth=0.273)\nEntry3 = ttk.Entry(root, show='*')\nEntry3.place(relx=0.45, rely=0.289, height=20, relwidth=0.273)\nEntry4 = ttk.Entry(root, show='*')\nEntry4.place(relx=0.45, rely=0.356, height=20, relwidth=0.273)\nb0 = ttk.Button(root, command=sign_in)\nb0.place(relx=0.467, rely=0.578, height=84, width=87)\nb0.configure(text='Sign in')\nb1 = ttk.Button(root, text='Submit', command=insert_info)\nb1.place(relx=0.767, rely=0.578, height=84, width=87)\nB3 = ttk.Button(root, command=delete)\nB3.place(relx=0.617, rely=0.578, height=84, width=87)\nB3.configure(text='Delete')\nroot.mainloop()\n",
"step-4": "from tkinter.ttk import *\nfrom tkinter import *\nimport tkinter.ttk as ttk\nfrom tkinter import messagebox\nimport sqlite3\nroot = Tk()\nroot.title('Register-Form')\nroot.geometry('600x450+-2+86')\nroot.minsize(120, 1)\n\n\ndef delete():\n if Entry1.get() == '':\n messagebox.showerror('Register-Form', 'ID Is compolsary for delete')\n else:\n ms = messagebox.askokcancel('Delete Result',\n 'Would you like to delete this account?')\n if ms:\n conn = sqlite3.connect('userinfo.db')\n with conn:\n c = conn.cursor()\n c.execute(\"delete from student where id='\" + Entry1.get() + \"'\")\n c.execute('commit')\n Entry1.delete(0, END)\n Entry2.delete(0, END)\n Entry3.delete(0, END)\n Entry4.delete(0, END)\n messagebox.showwarning('Delete Status', 'Deleted Succesfully')\n conn.close()\n\n\ndef sign_in():\n root.destroy()\n import main\n\n\ndef insert_info():\n idp = Entry1.get()\n un = Entry2.get()\n password = Entry3.get()\n if idp == '' and password == '' and un == '':\n messagebox.showerror('Submit Status', 'All fields are requierd')\n elif Entry3.get() != Entry4.get():\n messagebox.showerror('register error', 'please confirm password')\n Entry4.delete(0, END)\n Entry4.focus()\n else:\n try:\n id1 = Entry1.get()\n uname = Entry2.get()\n password1 = Entry3.get()\n conn = sqlite3.connect('userinfo.db')\n with conn:\n c = conn.cursor()\n c.execute(\n 'CREATE TABLE IF NOT EXISTS Student (ID INTEGER, Email TEXT, Password1 TEXT, Password2 TEXT)'\n )\n c.execute('INSERT INTO Student (ID,Email,Password) VALUES(?,?,?)',\n (id1, uname, password1))\n conn.commit()\n conn.close()\n messagebox.showinfo('Register Form',\n 'Account Created Successfully!')\n Entry1.delete(0, END)\n Entry2.delete(0, END)\n Entry3.delete(0, END)\n Entry4.delete(0, END)\n except sqlite3.IntegrityError:\n messagebox.showerror('Register Form',\n f'Please use another id instead of {Entry1.get()} because that id exists'\n )\n Entry1.focus()\n\n\nLabel1 = ttk.Label(root)\nLabel1.place(relx=0.35, rely=0.156, height=21, width=44)\nLabel1.configure(text='Enter ID:')\nLabel2 = ttk.Label(root)\nLabel2.place(relx=0.35, rely=0.2, height=31, width=54)\nLabel2.configure(text='UName:')\nLabel3 = ttk.Label(root)\nLabel3.place(relx=0.333, rely=0.289, height=21, width=64)\nLabel3.configure(text='Password:')\nLabel4 = ttk.Label(root)\nLabel4.place(relx=0.267, rely=0.356, height=21, width=104)\nLabel4.configure(text='Confirm Password:')\nEntry1 = ttk.Entry(root)\nEntry1.place(relx=0.45, rely=0.156, height=20, relwidth=0.273)\nEntry2 = ttk.Entry(root)\nEntry2.place(relx=0.45, rely=0.222, height=20, relwidth=0.273)\nEntry3 = ttk.Entry(root, show='*')\nEntry3.place(relx=0.45, rely=0.289, height=20, relwidth=0.273)\nEntry4 = ttk.Entry(root, show='*')\nEntry4.place(relx=0.45, rely=0.356, height=20, relwidth=0.273)\nb0 = ttk.Button(root, command=sign_in)\nb0.place(relx=0.467, rely=0.578, height=84, width=87)\nb0.configure(text='Sign in')\nb1 = ttk.Button(root, text='Submit', command=insert_info)\nb1.place(relx=0.767, rely=0.578, height=84, width=87)\nB3 = ttk.Button(root, command=delete)\nB3.place(relx=0.617, rely=0.578, height=84, width=87)\nB3.configure(text='Delete')\nroot.mainloop()\n",
"step-5": "from tkinter.ttk import *\nfrom tkinter import *\nimport tkinter.ttk as ttk\nfrom tkinter import messagebox\nimport sqlite3\n\nroot = Tk()\nroot.title('Register-Form')\nroot.geometry(\"600x450+-2+86\")\nroot.minsize(120, 1)\n\ndef delete():\n if(Entry1.get()==''):\n messagebox.showerror('Register-Form', 'ID Is compolsary for delete')\n else:\n ms = messagebox.askokcancel('Delete Result', 'Would you like to delete this account?')\n if (ms):\n conn = sqlite3.connect('userinfo.db')\n with conn:\n c = conn.cursor()\n c.execute(\"delete from student where id='\"+ Entry1.get() +\"'\")\n c.execute('commit')\n Entry1.delete(0, END)\n Entry2.delete(0, END)\n Entry3.delete(0, END)\n Entry4.delete(0, END)\n messagebox.showwarning('Delete Status', 'Deleted Succesfully')\n conn.close()\n\ndef sign_in():\n root.destroy()\n import main\n\ndef insert_info():\n idp=Entry1.get()\n un=Entry2.get()\n password=Entry3.get()\n if (idp=='' and password=='' and un==''):\n messagebox.showerror('Submit Status', 'All fields are requierd')\n elif Entry3.get() != Entry4.get():\n messagebox.showerror('register error', 'please confirm password')\n Entry4.delete(0, END) \n Entry4.focus()\n else:\n try:\n id1=Entry1.get();\n uname=Entry2.get();\n password1=Entry3.get();\n\n conn = sqlite3.connect('userinfo.db')\n with conn:\n c = conn.cursor()\n c.execute(\"CREATE TABLE IF NOT EXISTS Student (ID INTEGER, Email TEXT, Password1 TEXT, Password2 TEXT)\")\n c.execute(\"INSERT INTO Student (ID,Email,Password) VALUES(?,?,?)\", (id1, uname, password1))\n conn.commit()\n conn.close()\n messagebox.showinfo('Register Form', 'Account Created Successfully!')\n Entry1.delete(0, END)\n Entry2.delete(0, END)\n Entry3.delete(0, END)\n Entry4.delete(0, END)\n except sqlite3.IntegrityError:\n messagebox.showerror('Register Form', f'Please use another id instead of {Entry1.get()} because that id exists')\n Entry1.focus()\n\nLabel1 = ttk.Label(root)\nLabel1.place(relx=0.35, rely=0.156, height=21, width=44)\nLabel1.configure(text='''Enter ID:''')\n\nLabel2 = ttk.Label(root)\nLabel2.place(relx=0.35, rely=0.2, height=31, width=54)\nLabel2.configure(text='''UName:''')\n\nLabel3 = ttk.Label(root)\nLabel3.place(relx=0.333, rely=0.289, height=21, width=64)\nLabel3.configure(text='''Password:''')\n\nLabel4 = ttk.Label(root)\nLabel4.place(relx=0.267, rely=0.356, height=21, width=104)\nLabel4.configure(text='''Confirm Password:''')\n\nEntry1 = ttk.Entry(root)\nEntry1.place(relx=0.45, rely=0.156, height=20, relwidth=0.273)\n\nEntry2 = ttk.Entry(root)\nEntry2.place(relx=0.45, rely=0.222, height=20, relwidth=0.273)\n\nEntry3 = ttk.Entry(root, show='*')\nEntry3.place(relx=0.45, rely=0.289, height=20, relwidth=0.273)\n\n\nEntry4 = ttk.Entry(root, show='*')\nEntry4.place(relx=0.45, rely=0.356, height=20, relwidth=0.273)\n\nb0 = ttk.Button(root, command=sign_in)\nb0.place(relx=0.467, rely=0.578, height=84, width=87)\nb0.configure(text='Sign in')\n\n\nb1 = ttk.Button(root, text='Submit', command=insert_info)\nb1.place(relx=0.767, rely=0.578, height=84, width=87)\n\n\nB3 = ttk.Button(root, command=delete)\nB3.place(relx=0.617, rely=0.578, height=84, width=87)\nB3.configure(text='''Delete''')\n\n\n\nroot.mainloop()\n",
"step-ids": [
1,
4,
5,
6,
7
]
}
|
[
1,
4,
5,
6,
7
] |
def build_shift_dict(self, shift):
'''
Creates a dictionary that can be used to apply a cipher to a letter.
The dictionary maps every uppercase and lowercase letter to a
character shifted down the alphabet by the input shift. The dictionary
should have 52 keys of all the uppercase letters and all the lowercase
letters only.
shift (integer): the amount by which to shift every letter of the
alphabet. 0 <= shift < 26
Returns: a dictionary mapping a letter (string) to
another letter (string).
'''
# create a new list of letters based on the shift
shifted_lowercase = list(string.ascii_lowercase[shift:]) + list(string.ascii_lowercase[:shift])
shifted_uppercase = list(string.ascii_uppercase[shift:]) + list(string.ascii_uppercase[:shift])
# empty dict
d = {}
# populate dict for lowercase
for l in range(len(string.ascii_lowercase)):
d[string.ascii_lowercase[l]] = shifted_lowercase[l]
# populate dict for uppercase
for l in range(len(string.ascii_uppercase)):
d[string.ascii_uppercase[l]] = shifted_uppercase[l]
return d
|
normal
|
{
"blob_id": "07d2da14d0122ad2c8407bb13b8567ca62356bef",
"index": 7515,
"step-1": "<mask token>\n",
"step-2": "def build_shift_dict(self, shift):\n \"\"\"\n Creates a dictionary that can be used to apply a cipher to a letter.\n The dictionary maps every uppercase and lowercase letter to a\n character shifted down the alphabet by the input shift. The dictionary\n should have 52 keys of all the uppercase letters and all the lowercase\n letters only.\n\n shift (integer): the amount by which to shift every letter of the\n alphabet. 0 <= shift < 26\n\n Returns: a dictionary mapping a letter (string) to\n another letter (string).\n \"\"\"\n shifted_lowercase = list(string.ascii_lowercase[shift:]) + list(string.\n ascii_lowercase[:shift])\n shifted_uppercase = list(string.ascii_uppercase[shift:]) + list(string.\n ascii_uppercase[:shift])\n d = {}\n for l in range(len(string.ascii_lowercase)):\n d[string.ascii_lowercase[l]] = shifted_lowercase[l]\n for l in range(len(string.ascii_uppercase)):\n d[string.ascii_uppercase[l]] = shifted_uppercase[l]\n return d\n",
"step-3": "def build_shift_dict(self, shift):\n '''\n Creates a dictionary that can be used to apply a cipher to a letter.\n The dictionary maps every uppercase and lowercase letter to a\n character shifted down the alphabet by the input shift. The dictionary\n should have 52 keys of all the uppercase letters and all the lowercase\n letters only.\n\n shift (integer): the amount by which to shift every letter of the\n alphabet. 0 <= shift < 26\n\n Returns: a dictionary mapping a letter (string) to\n another letter (string).\n '''\n # create a new list of letters based on the shift\n shifted_lowercase = list(string.ascii_lowercase[shift:]) + list(string.ascii_lowercase[:shift])\n shifted_uppercase = list(string.ascii_uppercase[shift:]) + list(string.ascii_uppercase[:shift])\n\n # empty dict\n d = {}\n\n # populate dict for lowercase\n for l in range(len(string.ascii_lowercase)):\n d[string.ascii_lowercase[l]] = shifted_lowercase[l]\n\n # populate dict for uppercase\n for l in range(len(string.ascii_uppercase)):\n d[string.ascii_uppercase[l]] = shifted_uppercase[l]\n\n return d",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
from selenium import webdriver
from urllib.request import urlopen, Request
from subprocess import check_output
import json
#from flask import Flask
# https://data-live.flightradar24.com/zones/fcgi/feed.js?bounds=-32.27,-34.08,-73.15,-70.29
def get_json_aviones(north, south, west, east):
#driver = webdriver.Chrome('/Users/luisl/Desktop/Pega Altavoz/chromedriver')
driver = webdriver.PhantomJS("phantomjs")
# Mala práctica de programación
eval("driver.get('https://data-live.flightradar24.com/zones/fcgi/feed.js?bounds={},{},{},{}'.format(north, south, west, east))")
json_aviones = json.loads(driver.find_element_by_tag_name("pre").text)
driver.close()
return json_aviones
#######################
def get_json_buques(centerx, centery, zoom):
## PRUEBA 1 - Mezclar con phantomjs
count = 0
while True:
ignore = False
count += 1
print(centerx, centery, zoom)
out = check_output(["phantomjs", "GetBarcos.js", str(centerx), str(centery), str(zoom)])
links = json.loads(out)
if links[0] != 0:
break
else:
print("get_json_buques FAILED -------------- trying again")
if count == 5:
ignore = True
break
headers = {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'vessel-image': '00853fc25189416456442da74396a0288d02',
'x-requested-with': 'XMLHttpRequest'}
webpage = []
for link in links:
if not ignore:
req = Request(link, headers=headers)
webpage.extend(json.loads(urlopen(req).read().decode())['data']['rows'])
## try:
## with open("data", "w") as file:
## file.write(json.dumps(webpage[0]))
## except Exception as e:
## print(e)
return webpage
#######################
#app = Flask(__name__)
#
#
#@app.route('/')
# def hello_world():
# return json.dumps({'aviones': get_json_aviones(),
# 'buques': get_json_buques()})
#
#
#t = Timer(10.0, hello_world)
# t.start()
if __name__ == "__main__":
get_json_buques(-71, -33, 9)
# get_json_aviones(32.27, -34.08, -73.15, -70.29)
|
normal
|
{
"blob_id": "9ba5af7d2b6d4f61bb64a055efb15efa8e08d35c",
"index": 5379,
"step-1": "<mask token>\n\n\ndef get_json_buques(centerx, centery, zoom):\n count = 0\n while True:\n ignore = False\n count += 1\n print(centerx, centery, zoom)\n out = check_output(['phantomjs', 'GetBarcos.js', str(centerx), str(\n centery), str(zoom)])\n links = json.loads(out)\n if links[0] != 0:\n break\n else:\n print('get_json_buques FAILED -------------- trying again')\n if count == 5:\n ignore = True\n break\n headers = {'User-Agent':\n 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36'\n , 'Accept':\n 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',\n 'vessel-image': '00853fc25189416456442da74396a0288d02',\n 'x-requested-with': 'XMLHttpRequest'}\n webpage = []\n for link in links:\n if not ignore:\n req = Request(link, headers=headers)\n webpage.extend(json.loads(urlopen(req).read().decode())['data']\n ['rows'])\n return webpage\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_json_aviones(north, south, west, east):\n driver = webdriver.PhantomJS('phantomjs')\n eval(\n \"driver.get('https://data-live.flightradar24.com/zones/fcgi/feed.js?bounds={},{},{},{}'.format(north, south, west, east))\"\n )\n json_aviones = json.loads(driver.find_element_by_tag_name('pre').text)\n driver.close()\n return json_aviones\n\n\ndef get_json_buques(centerx, centery, zoom):\n count = 0\n while True:\n ignore = False\n count += 1\n print(centerx, centery, zoom)\n out = check_output(['phantomjs', 'GetBarcos.js', str(centerx), str(\n centery), str(zoom)])\n links = json.loads(out)\n if links[0] != 0:\n break\n else:\n print('get_json_buques FAILED -------------- trying again')\n if count == 5:\n ignore = True\n break\n headers = {'User-Agent':\n 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36'\n , 'Accept':\n 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',\n 'vessel-image': '00853fc25189416456442da74396a0288d02',\n 'x-requested-with': 'XMLHttpRequest'}\n webpage = []\n for link in links:\n if not ignore:\n req = Request(link, headers=headers)\n webpage.extend(json.loads(urlopen(req).read().decode())['data']\n ['rows'])\n return webpage\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef get_json_aviones(north, south, west, east):\n driver = webdriver.PhantomJS('phantomjs')\n eval(\n \"driver.get('https://data-live.flightradar24.com/zones/fcgi/feed.js?bounds={},{},{},{}'.format(north, south, west, east))\"\n )\n json_aviones = json.loads(driver.find_element_by_tag_name('pre').text)\n driver.close()\n return json_aviones\n\n\ndef get_json_buques(centerx, centery, zoom):\n count = 0\n while True:\n ignore = False\n count += 1\n print(centerx, centery, zoom)\n out = check_output(['phantomjs', 'GetBarcos.js', str(centerx), str(\n centery), str(zoom)])\n links = json.loads(out)\n if links[0] != 0:\n break\n else:\n print('get_json_buques FAILED -------------- trying again')\n if count == 5:\n ignore = True\n break\n headers = {'User-Agent':\n 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36'\n , 'Accept':\n 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',\n 'vessel-image': '00853fc25189416456442da74396a0288d02',\n 'x-requested-with': 'XMLHttpRequest'}\n webpage = []\n for link in links:\n if not ignore:\n req = Request(link, headers=headers)\n webpage.extend(json.loads(urlopen(req).read().decode())['data']\n ['rows'])\n return webpage\n\n\nif __name__ == '__main__':\n get_json_buques(-71, -33, 9)\n",
"step-4": "from selenium import webdriver\nfrom urllib.request import urlopen, Request\nfrom subprocess import check_output\nimport json\n\n\ndef get_json_aviones(north, south, west, east):\n driver = webdriver.PhantomJS('phantomjs')\n eval(\n \"driver.get('https://data-live.flightradar24.com/zones/fcgi/feed.js?bounds={},{},{},{}'.format(north, south, west, east))\"\n )\n json_aviones = json.loads(driver.find_element_by_tag_name('pre').text)\n driver.close()\n return json_aviones\n\n\ndef get_json_buques(centerx, centery, zoom):\n count = 0\n while True:\n ignore = False\n count += 1\n print(centerx, centery, zoom)\n out = check_output(['phantomjs', 'GetBarcos.js', str(centerx), str(\n centery), str(zoom)])\n links = json.loads(out)\n if links[0] != 0:\n break\n else:\n print('get_json_buques FAILED -------------- trying again')\n if count == 5:\n ignore = True\n break\n headers = {'User-Agent':\n 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36'\n , 'Accept':\n 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',\n 'vessel-image': '00853fc25189416456442da74396a0288d02',\n 'x-requested-with': 'XMLHttpRequest'}\n webpage = []\n for link in links:\n if not ignore:\n req = Request(link, headers=headers)\n webpage.extend(json.loads(urlopen(req).read().decode())['data']\n ['rows'])\n return webpage\n\n\nif __name__ == '__main__':\n get_json_buques(-71, -33, 9)\n",
"step-5": "from selenium import webdriver\nfrom urllib.request import urlopen, Request\nfrom subprocess import check_output\nimport json\n#from flask import Flask\n\n\n# https://data-live.flightradar24.com/zones/fcgi/feed.js?bounds=-32.27,-34.08,-73.15,-70.29\ndef get_json_aviones(north, south, west, east):\n\n #driver = webdriver.Chrome('/Users/luisl/Desktop/Pega Altavoz/chromedriver')\n driver = webdriver.PhantomJS(\"phantomjs\")\n\n # Mala práctica de programación\n eval(\"driver.get('https://data-live.flightradar24.com/zones/fcgi/feed.js?bounds={},{},{},{}'.format(north, south, west, east))\")\n json_aviones = json.loads(driver.find_element_by_tag_name(\"pre\").text)\n\n driver.close()\n\n return json_aviones\n\n#######################\n\n\ndef get_json_buques(centerx, centery, zoom):\n\n ## PRUEBA 1 - Mezclar con phantomjs\n count = 0\n while True:\n ignore = False\n count += 1\n print(centerx, centery, zoom)\n out = check_output([\"phantomjs\", \"GetBarcos.js\", str(centerx), str(centery), str(zoom)])\n\n links = json.loads(out)\n\n if links[0] != 0:\n break\n\n else:\n print(\"get_json_buques FAILED -------------- trying again\")\n \n if count == 5:\n ignore = True\n break\n\n headers = {\n 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36',\n 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',\n 'vessel-image': '00853fc25189416456442da74396a0288d02',\n 'x-requested-with': 'XMLHttpRequest'}\n\n webpage = []\n\n for link in links:\n if not ignore:\n req = Request(link, headers=headers)\n webpage.extend(json.loads(urlopen(req).read().decode())['data']['rows'])\n\n ## try:\n ## with open(\"data\", \"w\") as file:\n ## file.write(json.dumps(webpage[0]))\n ## except Exception as e:\n ## print(e)\n\n return webpage\n\n#######################\n\n\n#app = Flask(__name__)\n#\n#\n#@app.route('/')\n# def hello_world():\n# return json.dumps({'aviones': get_json_aviones(),\n# 'buques': get_json_buques()})\n#\n#\n#t = Timer(10.0, hello_world)\n# t.start()\n\n\nif __name__ == \"__main__\":\n\n get_json_buques(-71, -33, 9)\n # get_json_aviones(32.27, -34.08, -73.15, -70.29)\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
"""
A module for constants.
"""
# fin adding notes for keys and uncomment
KEYS = [
"CM",
"GM"
# ,
# "DM",
# "AM",
# "EM",
# "BM",
# "FSM",
# "CSM",
# "Am",
# "Em",
# "Bm",
# "FSm",
# "CSm",
# "GSm",
# "DSm",
# "ASm",
]
NOTES_FOR_KEY = {
"CM": [
21,
23,
24,
26,
28,
29,
31,
33,
35,
36,
38,
40,
41,
43,
45,
47,
48,
50,
52,
53,
55,
57,
59,
60,
62,
64,
65,
67,
69,
71,
72,
74,
76,
77,
79,
81,
83,
84,
86,
88,
89,
91,
93,
95,
96,
98,
100,
101,
103,
105,
107,
108,
],
"GM": [
21,
23,
24,
26,
28,
30,
31,
33,
35,
36,
38,
40,
42,
43,
45,
47,
48,
50,
52,
54,
55,
57,
59,
60,
62,
64,
66,
67,
69,
71,
72,
74,
76,
78,
79,
81,
83,
84,
86,
88,
90,
91,
93,
95,
96,
98,
100,
102,
103,
105,
107,
108,
],
"DM": [],
"AM": [],
"EM": [],
"BM": [],
"FSM": [],
"CSM": [],
"Am": [],
"Em": [],
"Bm": [],
"FSm": [],
"CSm": [],
"GSm": [],
"DSm": [],
"ASm": [],
}
TONIC_NOTE_FOR_KEY = {
"CM": 60,
"GM": 67,
"DM": None,
"AM": None,
"EM": None,
"BM": None,
"FSM": None,
"CSM": None,
"Am": None,
"Em": None,
"Bm": None,
"FSm": None,
"CSm": None,
"GSm": None,
"DSm": None,
"ASm": None,
}
# add more chords later
STEPS_FOR_CHORD = {"major_triad": [0, 4, 7]}
# constants for value function
# add more complex rewards
NOTE_IN_KEY_REWARD = 1
NOTE_IN_CHORDS_REWARD = 1
SUPER_CONSONANT_INTERVAL_REWARD = 3
CONSONANT_INTERVAL_REWARD = 2
SOMEWHAT_CONSONANT_INTERVAL_REWARD = 1
DISSONANT_INTERVAL_REWARD = -2
SOMEWHAT_DISSONANT_INTERVAL_REWARD = -1
CENTRICITY_FACTOR = 1 # reward is number of times note occured before * CENTRICITY_FACTOR
|
normal
|
{
"blob_id": "dd7ade05ef912f7c094883507768cc21f95f31f6",
"index": 533,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nKEYS = ['CM', 'GM']\nNOTES_FOR_KEY = {'CM': [21, 23, 24, 26, 28, 29, 31, 33, 35, 36, 38, 40, 41,\n 43, 45, 47, 48, 50, 52, 53, 55, 57, 59, 60, 62, 64, 65, 67, 69, 71, 72,\n 74, 76, 77, 79, 81, 83, 84, 86, 88, 89, 91, 93, 95, 96, 98, 100, 101, \n 103, 105, 107, 108], 'GM': [21, 23, 24, 26, 28, 30, 31, 33, 35, 36, 38,\n 40, 42, 43, 45, 47, 48, 50, 52, 54, 55, 57, 59, 60, 62, 64, 66, 67, 69,\n 71, 72, 74, 76, 78, 79, 81, 83, 84, 86, 88, 90, 91, 93, 95, 96, 98, 100,\n 102, 103, 105, 107, 108], 'DM': [], 'AM': [], 'EM': [], 'BM': [], 'FSM':\n [], 'CSM': [], 'Am': [], 'Em': [], 'Bm': [], 'FSm': [], 'CSm': [],\n 'GSm': [], 'DSm': [], 'ASm': []}\nTONIC_NOTE_FOR_KEY = {'CM': 60, 'GM': 67, 'DM': None, 'AM': None, 'EM':\n None, 'BM': None, 'FSM': None, 'CSM': None, 'Am': None, 'Em': None,\n 'Bm': None, 'FSm': None, 'CSm': None, 'GSm': None, 'DSm': None, 'ASm': None\n }\nSTEPS_FOR_CHORD = {'major_triad': [0, 4, 7]}\nNOTE_IN_KEY_REWARD = 1\nNOTE_IN_CHORDS_REWARD = 1\nSUPER_CONSONANT_INTERVAL_REWARD = 3\nCONSONANT_INTERVAL_REWARD = 2\nSOMEWHAT_CONSONANT_INTERVAL_REWARD = 1\nDISSONANT_INTERVAL_REWARD = -2\nSOMEWHAT_DISSONANT_INTERVAL_REWARD = -1\nCENTRICITY_FACTOR = 1\n",
"step-3": "\"\"\"\nA module for constants.\n\n\"\"\"\n\n# fin adding notes for keys and uncomment \nKEYS = [\n \"CM\",\n \"GM\"\n # ,\n # \"DM\",\n # \"AM\",\n # \"EM\",\n # \"BM\",\n # \"FSM\",\n # \"CSM\",\n # \"Am\",\n # \"Em\",\n # \"Bm\",\n # \"FSm\",\n # \"CSm\",\n # \"GSm\",\n # \"DSm\",\n # \"ASm\",\n]\n\nNOTES_FOR_KEY = {\n \"CM\": [\n 21,\n 23,\n 24,\n 26,\n 28,\n 29,\n 31,\n 33,\n 35,\n 36,\n 38,\n 40,\n 41,\n 43,\n 45,\n 47,\n 48,\n 50,\n 52,\n 53,\n 55,\n 57,\n 59,\n 60,\n 62,\n 64,\n 65,\n 67,\n 69,\n 71,\n 72,\n 74,\n 76,\n 77,\n 79,\n 81,\n 83,\n 84,\n 86,\n 88,\n 89,\n 91,\n 93,\n 95,\n 96,\n 98,\n 100,\n 101,\n 103,\n 105,\n 107,\n 108,\n ],\n \"GM\": [\n 21,\n 23,\n 24,\n 26,\n 28,\n 30,\n 31,\n 33,\n 35,\n 36,\n 38,\n 40,\n 42,\n 43,\n 45,\n 47,\n 48,\n 50,\n 52,\n 54,\n 55,\n 57,\n 59,\n 60,\n 62,\n 64,\n 66,\n 67,\n 69,\n 71,\n 72,\n 74,\n 76,\n 78,\n 79,\n 81,\n 83,\n 84,\n 86,\n 88,\n 90,\n 91,\n 93,\n 95,\n 96,\n 98,\n 100,\n 102,\n 103,\n 105,\n 107,\n 108,\n ],\n \"DM\": [],\n \"AM\": [],\n \"EM\": [],\n \"BM\": [],\n \"FSM\": [],\n \"CSM\": [],\n \"Am\": [],\n \"Em\": [],\n \"Bm\": [],\n \"FSm\": [],\n \"CSm\": [],\n \"GSm\": [],\n \"DSm\": [],\n \"ASm\": [],\n}\n\nTONIC_NOTE_FOR_KEY = {\n \"CM\": 60,\n \"GM\": 67,\n \"DM\": None,\n \"AM\": None,\n \"EM\": None,\n \"BM\": None,\n \"FSM\": None,\n \"CSM\": None,\n \"Am\": None,\n \"Em\": None,\n \"Bm\": None,\n \"FSm\": None,\n \"CSm\": None,\n \"GSm\": None,\n \"DSm\": None,\n \"ASm\": None,\n}\n\n# add more chords later\nSTEPS_FOR_CHORD = {\"major_triad\": [0, 4, 7]}\n\n\n\n# constants for value function\n# add more complex rewards\nNOTE_IN_KEY_REWARD = 1\nNOTE_IN_CHORDS_REWARD = 1\nSUPER_CONSONANT_INTERVAL_REWARD = 3\nCONSONANT_INTERVAL_REWARD = 2\nSOMEWHAT_CONSONANT_INTERVAL_REWARD = 1\nDISSONANT_INTERVAL_REWARD = -2\nSOMEWHAT_DISSONANT_INTERVAL_REWARD = -1\nCENTRICITY_FACTOR = 1 # reward is number of times note occured before * CENTRICITY_FACTOR",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
button6.grid(row=2, column=2, sticky=S + N + E + W)
<|reserved_special_token_0|>
button7.grid(row=3, column=0, sticky=S + N + E + W)
<|reserved_special_token_0|>
button8.grid(row=3, column=1, sticky=S + N + E + W)
<|reserved_special_token_0|>
button9.grid(row=3, column=2, sticky=S + N + E + W)
tk.mainloop()
<|reserved_special_token_1|>
button6 = Button(tk, text=' ', font='Times 26 bold', heigh=4, width=8,
command=lambda : checker(button6))
button6.grid(row=2, column=2, sticky=S + N + E + W)
button7 = Button(tk, text=' ', font='Times 26 bold', heigh=4, width=8,
command=lambda : checker(button7))
button7.grid(row=3, column=0, sticky=S + N + E + W)
button8 = Button(tk, text=' ', font='Times 26 bold', heigh=4, width=8,
command=lambda : checker(button8))
button8.grid(row=3, column=1, sticky=S + N + E + W)
button9 = Button(tk, text=' ', font='Times 26 bold', heigh=4, width=8,
command=lambda : checker(button9))
button9.grid(row=3, column=2, sticky=S + N + E + W)
tk.mainloop()
<|reserved_special_token_1|>
button6 = Button(tk,text=" ",font=('Times 26 bold'), heigh = 4, width = 8, command=lambda:checker(button6))
button6.grid(row=2, column=2,sticky = S+N+E+W)
button7 = Button(tk,text=" ",font=('Times 26 bold'), heigh = 4, width = 8, command=lambda:checker(button7))
button7.grid(row=3, column=0,sticky = S+N+E+W)
button8 = Button(tk,text=" ",font=('Times 26 bold'), heigh = 4, width = 8, command=lambda:checker(button8))
button8.grid(row=3, column=1,sticky = S+N+E+W)
button9 = Button(tk,text=" ",font=('Times 26 bold'), heigh = 4, width = 8, command=lambda:checker(button9))
button9.grid(row=3, column=2,sticky = S+N+E+W)
tk.mainloop()
|
flexible
|
{
"blob_id": "e543c7f7f1b249e53b8ebf82641ec398abf557af",
"index": 477,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nbutton6.grid(row=2, column=2, sticky=S + N + E + W)\n<mask token>\nbutton7.grid(row=3, column=0, sticky=S + N + E + W)\n<mask token>\nbutton8.grid(row=3, column=1, sticky=S + N + E + W)\n<mask token>\nbutton9.grid(row=3, column=2, sticky=S + N + E + W)\ntk.mainloop()\n",
"step-3": "button6 = Button(tk, text=' ', font='Times 26 bold', heigh=4, width=8,\n command=lambda : checker(button6))\nbutton6.grid(row=2, column=2, sticky=S + N + E + W)\nbutton7 = Button(tk, text=' ', font='Times 26 bold', heigh=4, width=8,\n command=lambda : checker(button7))\nbutton7.grid(row=3, column=0, sticky=S + N + E + W)\nbutton8 = Button(tk, text=' ', font='Times 26 bold', heigh=4, width=8,\n command=lambda : checker(button8))\nbutton8.grid(row=3, column=1, sticky=S + N + E + W)\nbutton9 = Button(tk, text=' ', font='Times 26 bold', heigh=4, width=8,\n command=lambda : checker(button9))\nbutton9.grid(row=3, column=2, sticky=S + N + E + W)\ntk.mainloop()\n",
"step-4": "button6 = Button(tk,text=\" \",font=('Times 26 bold'), heigh = 4, width = 8, command=lambda:checker(button6))\nbutton6.grid(row=2, column=2,sticky = S+N+E+W)\nbutton7 = Button(tk,text=\" \",font=('Times 26 bold'), heigh = 4, width = 8, command=lambda:checker(button7))\nbutton7.grid(row=3, column=0,sticky = S+N+E+W)\nbutton8 = Button(tk,text=\" \",font=('Times 26 bold'), heigh = 4, width = 8, command=lambda:checker(button8))\nbutton8.grid(row=3, column=1,sticky = S+N+E+W)\nbutton9 = Button(tk,text=\" \",font=('Times 26 bold'), heigh = 4, width = 8, command=lambda:checker(button9))\nbutton9.grid(row=3, column=2,sticky = S+N+E+W)\ntk.mainloop()",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import os
import sys
from flask import Flask, request, abort, flash, jsonify, Response
from flask_sqlalchemy import SQLAlchemy
from flask_cors import CORS
from flask_migrate import Migrate
import random
import unittest
from models import db, Question, Category
# set the number of pages fpr pagination
QUESTIONS_PER_PAGE = 10
# create and configure the app
app = Flask(__name__)
app.config.from_object('config')
db.init_app(app)
migrate = Migrate(app, db)
# set up cors for the application
cors = CORS(app, resources={r'/': {'origins': '*'}})
# to set Access-Control-Allow Headers and Methods
@app.after_request
def after_request(response):
response.headers.add('Access-Control-Allow-Headers',
'Content-Type, Authorization, true')
response.headers.add('Access-Control-Allow-Methods',
'GET, PATCH,PUT,POST, DELETE, OPTIONS')
return response
# endpoint to handle GET requests for all available categories
@app.route('/categories', methods=['GET'])
def get_categories():
categories = [category.type for category in Category.query.all()]
return jsonify({'categories': categories, 'success': True})
# endpoint to handle GET requests for questions with pagination
@app.route('/questions/page/<int:page>', methods=['GET'])
def get_questions(page):
error = False
questions = []
total_questions = 0
# if question id is not an integer
if type(page) is not int:
# let them know their input is not processable
abort(422)
# ensure proper request method
if request.method == 'GET':
try:
# query for all categories
categories = [category.type for category in Category.query.all()]
if categories is None:
# let the user know that no resource was found
abort(404)
query = Question.query.paginate(page, per_page=10)
total_questions += len(Question.query.all())
if query is None:
# let the user know that no resource was found
abort(404)
if len(query.items) == 0:
# let the user know that no resource was found
error = True
results = query.items
# format data
for question in results:
_question_ = {
'id': question.id,
'question': question.question,
'answer': question.answer,
'category': question.category,
'difficulty': question.difficulty
}
questions.append(_question_)
except Exception:
# set error to true and log on the server
error = True
print('Error: {}'.format(sys.exc_info()))
finally:
if error:
# let the user know their request was not successful
abort(400)
else:
# if successful send back success response
return jsonify({
'success': True,
'questions': questions,
'total_questions': total_questions,
'categories': categories
})
else:
# send method not allowed error
abort(405)
# endpoint to delete a question from the database
@app.route('/question/<int:question_id>', methods=['DELETE'])
def delete_question(question_id):
error = False
# ensure proper request method
if request.method == 'DELETE':
# if question id is not an integer
if type(question_id) is not int:
# let them know their input is not processable
abort(422)
try:
# get user selected question from database
question = Question.query.get(question_id)
# stage question delete
db.session.delete(question)
# commit deletion to the database
db.session.commit()
except Exception:
# set error to true and log on the server
error = True
print('Error: {}'.format(sys.exc_info()))
finally:
# close database session
db.session.close()
if error:
# send bad request error
abort(400)
else:
# if no error send success object and log on server
return jsonify({
'success': True,
'method': 'Delete',
'question': question_id
})
else:
# send method not allowed error
abort(405)
# endpoint to add a question to the database
@app.route('/questions', methods=['POST'])
def add_question():
error = False
# ensure proper request method
if request.method == 'POST':
try:
# format data for database
new_question = Question(
question=request.json['question'],
answer=request.json['answer'],
category=request.json['category'],
difficulty=request.json['difficulty']
)
# stage data in database
db.session.add(new_question)
# commit data to database
db.session.commit()
except Exception:
# set error to true and log on the server
error = True
db.session.rollback()
print('Error: {}'.format(sys.exc_info()))
finally:
# close database session
db.session.close()
if error:
# send bad request error
abort(400)
else:
# if no error send success object and log on server
print('Added: {}'.format(new_question))
return jsonify({
'success': True,
'question': request.json
})
else:
# send method not allowed error
abort(405)
# endpoint to search for for questions in the database
@app.route('/questions/search', methods=['POST'])
def search_questions():
error = False
# ensure proper request method
if request.method == 'POST':
# set esrch term from user request
search_term = str(request.json['searchTerm'])
# if the user submits something other than a string of text block it
if type(search_term) is not str:
# let them know their input is not processable
abort(422)
try:
# query database using user provided search term
query_results = Question.query.filter(
Question.question.ilike('%{}%'.format(search_term))).all()
questions = []
# get categories from database
categories = [category.type for category in Category.query.all()]
# format response data
for question in query_results:
_question_ = {
'id': question.id,
'question': question.question,
'answer': question.answer,
'category': question.category,
'difficulty': question.difficulty
}
questions.append(_question_)
except Exception:
# set error to true and log on the server
error = True
print('Error: {}'.format(sys.exc_info()))
finally:
if error:
# send bad request error
abort(400)
else:
# if no error send success object
return jsonify({
'success': True,
'questions': questions,
'total_questions': len(questions),
'current_category': ''
})
else:
# send method not allowed error
abort(405)
# endpoint to get questions by a specific category
@app.route('/category/<int:category_id>/questions', methods=['GET'])
def get_questions_by_category(category_id):
error = False
# ensure proper request method
if request.method == 'GET':
# if category id is not an integer
if type(category_id) is not int:
# let them know their input is not processable
abort(422)
try:
# get questions by user selected category
query = Question.query.filter_by(category=str(category_id)).all()
questions = []
# format response data
for question in query:
_question_ = {
'id': question.id,
'question': question.question,
'answer': question.answer,
'category': question.category,
'difficulty': question.difficulty
}
questions.append(_question_)
except Exception:
# set error to true and log on the server
error = True
print('Error: {}'.format(sys.exc_info()))
finally:
if error:
# send bad request error
abort(400)
else:
# if no error send success object
return jsonify({
'success': True,
'questions': questions,
'total_questions': len(questions),
'current_category': ''
})
else:
# send method not allowed error
abort(405)
# endpoint to initiate quiz
@app.route('/questions/quiz', methods=['POST'])
def quizzes():
error = False
# ensure proper request method
if request.method == 'POST':
try:
data = request.json
# get questions from any category
if data['quiz_category']['id'] == 0:
query = Question.query.all()
# get questions from user specified caetgory
else:
query = Question.query.filter_by(
category=str(int(data['quiz_category']['id'])+1)).all()
# randomly select new non previously selected question
previous_questions = data['previous_questions']
index = random.randint(0, len(query)-1)
potential_question = query[index]
selected = False
while selected is False:
if potential_question.id in previous_questions:
# reassign index if already used
index = random.randint(0, len(query)-1)
potential_question = query[index]
else:
selected = True
# set question
_question_ = potential_question
# format data
next_question = {
'id': _question_.id,
'question': _question_.question,
'answer': _question_.answer,
'category': _question_.category,
'difficulty': _question_.difficulty
}
except Exception:
# set error and log error on the server
error = True
print('Error: {}'.format(sys.exc_info()))
finally:
if error:
# send internal server error
abort(500)
else:
# if no error send success object
return jsonify({
'success': True,
'question': next_question
})
else:
# send method not allowed error
abort(405)
# handle bad request errors
@app.errorhandler(400)
def bad_request(error):
return jsonify({
"success": False,
"error": 400,
"message": "Bad Request"
}), 400
# handle resource not found errors
@app.errorhandler(404)
def resource_not_found(error):
return jsonify({
"success": False,
"error": 404,
"message": "Resource Not Found"
}), 404
# handle resource not found errors
@app.errorhandler(405)
def method_not_allowed(error):
return jsonify({
"success": False,
"error": 405,
"message": "Method Not Allowed"
}), 405
# handle unprocessable entity errors
@app.errorhandler(422)
def unprocessable_entity(error):
return jsonify({
"success": False,
"error": 422,
"message": "Unprocessable Entity"
}), 422
# handle internal server errors
@app.errorhandler(500)
def internal_server_error(error):
return jsonify({
"success": False,
"error": 500,
"message": "Internal Server Error"
}), 500
# Default port:
if __name__ == '__main__':
app.run()
|
normal
|
{
"blob_id": "b84a2093a51e57c448ee7b4f5a89d69dfb14b1b6",
"index": 4876,
"step-1": "<mask token>\n\n\n@app.after_request\ndef after_request(response):\n response.headers.add('Access-Control-Allow-Headers',\n 'Content-Type, Authorization, true')\n response.headers.add('Access-Control-Allow-Methods',\n 'GET, PATCH,PUT,POST, DELETE, OPTIONS')\n return response\n\n\n<mask token>\n\n\n@app.route('/questions/page/<int:page>', methods=['GET'])\ndef get_questions(page):\n error = False\n questions = []\n total_questions = 0\n if type(page) is not int:\n abort(422)\n if request.method == 'GET':\n try:\n categories = [category.type for category in Category.query.all()]\n if categories is None:\n abort(404)\n query = Question.query.paginate(page, per_page=10)\n total_questions += len(Question.query.all())\n if query is None:\n abort(404)\n if len(query.items) == 0:\n error = True\n results = query.items\n for question in results:\n _question_ = {'id': question.id, 'question': question.\n question, 'answer': question.answer, 'category':\n question.category, 'difficulty': question.difficulty}\n questions.append(_question_)\n except Exception:\n error = True\n print('Error: {}'.format(sys.exc_info()))\n finally:\n if error:\n abort(400)\n else:\n return jsonify({'success': True, 'questions': questions,\n 'total_questions': total_questions, 'categories':\n categories})\n else:\n abort(405)\n\n\n@app.route('/question/<int:question_id>', methods=['DELETE'])\ndef delete_question(question_id):\n error = False\n if request.method == 'DELETE':\n if type(question_id) is not int:\n abort(422)\n try:\n question = Question.query.get(question_id)\n db.session.delete(question)\n db.session.commit()\n except Exception:\n error = True\n print('Error: {}'.format(sys.exc_info()))\n finally:\n db.session.close()\n if error:\n abort(400)\n else:\n return jsonify({'success': True, 'method': 'Delete',\n 'question': question_id})\n else:\n abort(405)\n\n\n@app.route('/questions', methods=['POST'])\ndef add_question():\n error = False\n if request.method == 'POST':\n try:\n new_question = Question(question=request.json['question'],\n answer=request.json['answer'], category=request.json[\n 'category'], difficulty=request.json['difficulty'])\n db.session.add(new_question)\n db.session.commit()\n except Exception:\n error = True\n db.session.rollback()\n print('Error: {}'.format(sys.exc_info()))\n finally:\n db.session.close()\n if error:\n abort(400)\n else:\n print('Added: {}'.format(new_question))\n return jsonify({'success': True, 'question': request.json})\n else:\n abort(405)\n\n\n<mask token>\n\n\n@app.route('/questions/quiz', methods=['POST'])\ndef quizzes():\n error = False\n if request.method == 'POST':\n try:\n data = request.json\n if data['quiz_category']['id'] == 0:\n query = Question.query.all()\n else:\n query = Question.query.filter_by(category=str(int(data[\n 'quiz_category']['id']) + 1)).all()\n previous_questions = data['previous_questions']\n index = random.randint(0, len(query) - 1)\n potential_question = query[index]\n selected = False\n while selected is False:\n if potential_question.id in previous_questions:\n index = random.randint(0, len(query) - 1)\n potential_question = query[index]\n else:\n selected = True\n _question_ = potential_question\n next_question = {'id': _question_.id, 'question': _question_.\n question, 'answer': _question_.answer, 'category':\n _question_.category, 'difficulty': _question_.difficulty}\n except Exception:\n error = True\n print('Error: {}'.format(sys.exc_info()))\n finally:\n if error:\n abort(500)\n else:\n return jsonify({'success': True, 'question': next_question})\n else:\n abort(405)\n\n\n<mask token>\n\n\n@app.errorhandler(405)\ndef method_not_allowed(error):\n return jsonify({'success': False, 'error': 405, 'message':\n 'Method Not Allowed'}), 405\n\n\n@app.errorhandler(422)\ndef unprocessable_entity(error):\n return jsonify({'success': False, 'error': 422, 'message':\n 'Unprocessable Entity'}), 422\n\n\n@app.errorhandler(500)\ndef internal_server_error(error):\n return jsonify({'success': False, 'error': 500, 'message':\n 'Internal Server Error'}), 500\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@app.after_request\ndef after_request(response):\n response.headers.add('Access-Control-Allow-Headers',\n 'Content-Type, Authorization, true')\n response.headers.add('Access-Control-Allow-Methods',\n 'GET, PATCH,PUT,POST, DELETE, OPTIONS')\n return response\n\n\n@app.route('/categories', methods=['GET'])\ndef get_categories():\n categories = [category.type for category in Category.query.all()]\n return jsonify({'categories': categories, 'success': True})\n\n\n@app.route('/questions/page/<int:page>', methods=['GET'])\ndef get_questions(page):\n error = False\n questions = []\n total_questions = 0\n if type(page) is not int:\n abort(422)\n if request.method == 'GET':\n try:\n categories = [category.type for category in Category.query.all()]\n if categories is None:\n abort(404)\n query = Question.query.paginate(page, per_page=10)\n total_questions += len(Question.query.all())\n if query is None:\n abort(404)\n if len(query.items) == 0:\n error = True\n results = query.items\n for question in results:\n _question_ = {'id': question.id, 'question': question.\n question, 'answer': question.answer, 'category':\n question.category, 'difficulty': question.difficulty}\n questions.append(_question_)\n except Exception:\n error = True\n print('Error: {}'.format(sys.exc_info()))\n finally:\n if error:\n abort(400)\n else:\n return jsonify({'success': True, 'questions': questions,\n 'total_questions': total_questions, 'categories':\n categories})\n else:\n abort(405)\n\n\n@app.route('/question/<int:question_id>', methods=['DELETE'])\ndef delete_question(question_id):\n error = False\n if request.method == 'DELETE':\n if type(question_id) is not int:\n abort(422)\n try:\n question = Question.query.get(question_id)\n db.session.delete(question)\n db.session.commit()\n except Exception:\n error = True\n print('Error: {}'.format(sys.exc_info()))\n finally:\n db.session.close()\n if error:\n abort(400)\n else:\n return jsonify({'success': True, 'method': 'Delete',\n 'question': question_id})\n else:\n abort(405)\n\n\n@app.route('/questions', methods=['POST'])\ndef add_question():\n error = False\n if request.method == 'POST':\n try:\n new_question = Question(question=request.json['question'],\n answer=request.json['answer'], category=request.json[\n 'category'], difficulty=request.json['difficulty'])\n db.session.add(new_question)\n db.session.commit()\n except Exception:\n error = True\n db.session.rollback()\n print('Error: {}'.format(sys.exc_info()))\n finally:\n db.session.close()\n if error:\n abort(400)\n else:\n print('Added: {}'.format(new_question))\n return jsonify({'success': True, 'question': request.json})\n else:\n abort(405)\n\n\n@app.route('/questions/search', methods=['POST'])\ndef search_questions():\n error = False\n if request.method == 'POST':\n search_term = str(request.json['searchTerm'])\n if type(search_term) is not str:\n abort(422)\n try:\n query_results = Question.query.filter(Question.question.ilike(\n '%{}%'.format(search_term))).all()\n questions = []\n categories = [category.type for category in Category.query.all()]\n for question in query_results:\n _question_ = {'id': question.id, 'question': question.\n question, 'answer': question.answer, 'category':\n question.category, 'difficulty': question.difficulty}\n questions.append(_question_)\n except Exception:\n error = True\n print('Error: {}'.format(sys.exc_info()))\n finally:\n if error:\n abort(400)\n else:\n return jsonify({'success': True, 'questions': questions,\n 'total_questions': len(questions), 'current_category': ''})\n else:\n abort(405)\n\n\n<mask token>\n\n\n@app.route('/questions/quiz', methods=['POST'])\ndef quizzes():\n error = False\n if request.method == 'POST':\n try:\n data = request.json\n if data['quiz_category']['id'] == 0:\n query = Question.query.all()\n else:\n query = Question.query.filter_by(category=str(int(data[\n 'quiz_category']['id']) + 1)).all()\n previous_questions = data['previous_questions']\n index = random.randint(0, len(query) - 1)\n potential_question = query[index]\n selected = False\n while selected is False:\n if potential_question.id in previous_questions:\n index = random.randint(0, len(query) - 1)\n potential_question = query[index]\n else:\n selected = True\n _question_ = potential_question\n next_question = {'id': _question_.id, 'question': _question_.\n question, 'answer': _question_.answer, 'category':\n _question_.category, 'difficulty': _question_.difficulty}\n except Exception:\n error = True\n print('Error: {}'.format(sys.exc_info()))\n finally:\n if error:\n abort(500)\n else:\n return jsonify({'success': True, 'question': next_question})\n else:\n abort(405)\n\n\n<mask token>\n\n\n@app.errorhandler(405)\ndef method_not_allowed(error):\n return jsonify({'success': False, 'error': 405, 'message':\n 'Method Not Allowed'}), 405\n\n\n@app.errorhandler(422)\ndef unprocessable_entity(error):\n return jsonify({'success': False, 'error': 422, 'message':\n 'Unprocessable Entity'}), 422\n\n\n@app.errorhandler(500)\ndef internal_server_error(error):\n return jsonify({'success': False, 'error': 500, 'message':\n 'Internal Server Error'}), 500\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\n@app.after_request\ndef after_request(response):\n response.headers.add('Access-Control-Allow-Headers',\n 'Content-Type, Authorization, true')\n response.headers.add('Access-Control-Allow-Methods',\n 'GET, PATCH,PUT,POST, DELETE, OPTIONS')\n return response\n\n\n@app.route('/categories', methods=['GET'])\ndef get_categories():\n categories = [category.type for category in Category.query.all()]\n return jsonify({'categories': categories, 'success': True})\n\n\n@app.route('/questions/page/<int:page>', methods=['GET'])\ndef get_questions(page):\n error = False\n questions = []\n total_questions = 0\n if type(page) is not int:\n abort(422)\n if request.method == 'GET':\n try:\n categories = [category.type for category in Category.query.all()]\n if categories is None:\n abort(404)\n query = Question.query.paginate(page, per_page=10)\n total_questions += len(Question.query.all())\n if query is None:\n abort(404)\n if len(query.items) == 0:\n error = True\n results = query.items\n for question in results:\n _question_ = {'id': question.id, 'question': question.\n question, 'answer': question.answer, 'category':\n question.category, 'difficulty': question.difficulty}\n questions.append(_question_)\n except Exception:\n error = True\n print('Error: {}'.format(sys.exc_info()))\n finally:\n if error:\n abort(400)\n else:\n return jsonify({'success': True, 'questions': questions,\n 'total_questions': total_questions, 'categories':\n categories})\n else:\n abort(405)\n\n\n@app.route('/question/<int:question_id>', methods=['DELETE'])\ndef delete_question(question_id):\n error = False\n if request.method == 'DELETE':\n if type(question_id) is not int:\n abort(422)\n try:\n question = Question.query.get(question_id)\n db.session.delete(question)\n db.session.commit()\n except Exception:\n error = True\n print('Error: {}'.format(sys.exc_info()))\n finally:\n db.session.close()\n if error:\n abort(400)\n else:\n return jsonify({'success': True, 'method': 'Delete',\n 'question': question_id})\n else:\n abort(405)\n\n\n@app.route('/questions', methods=['POST'])\ndef add_question():\n error = False\n if request.method == 'POST':\n try:\n new_question = Question(question=request.json['question'],\n answer=request.json['answer'], category=request.json[\n 'category'], difficulty=request.json['difficulty'])\n db.session.add(new_question)\n db.session.commit()\n except Exception:\n error = True\n db.session.rollback()\n print('Error: {}'.format(sys.exc_info()))\n finally:\n db.session.close()\n if error:\n abort(400)\n else:\n print('Added: {}'.format(new_question))\n return jsonify({'success': True, 'question': request.json})\n else:\n abort(405)\n\n\n@app.route('/questions/search', methods=['POST'])\ndef search_questions():\n error = False\n if request.method == 'POST':\n search_term = str(request.json['searchTerm'])\n if type(search_term) is not str:\n abort(422)\n try:\n query_results = Question.query.filter(Question.question.ilike(\n '%{}%'.format(search_term))).all()\n questions = []\n categories = [category.type for category in Category.query.all()]\n for question in query_results:\n _question_ = {'id': question.id, 'question': question.\n question, 'answer': question.answer, 'category':\n question.category, 'difficulty': question.difficulty}\n questions.append(_question_)\n except Exception:\n error = True\n print('Error: {}'.format(sys.exc_info()))\n finally:\n if error:\n abort(400)\n else:\n return jsonify({'success': True, 'questions': questions,\n 'total_questions': len(questions), 'current_category': ''})\n else:\n abort(405)\n\n\n<mask token>\n\n\n@app.route('/questions/quiz', methods=['POST'])\ndef quizzes():\n error = False\n if request.method == 'POST':\n try:\n data = request.json\n if data['quiz_category']['id'] == 0:\n query = Question.query.all()\n else:\n query = Question.query.filter_by(category=str(int(data[\n 'quiz_category']['id']) + 1)).all()\n previous_questions = data['previous_questions']\n index = random.randint(0, len(query) - 1)\n potential_question = query[index]\n selected = False\n while selected is False:\n if potential_question.id in previous_questions:\n index = random.randint(0, len(query) - 1)\n potential_question = query[index]\n else:\n selected = True\n _question_ = potential_question\n next_question = {'id': _question_.id, 'question': _question_.\n question, 'answer': _question_.answer, 'category':\n _question_.category, 'difficulty': _question_.difficulty}\n except Exception:\n error = True\n print('Error: {}'.format(sys.exc_info()))\n finally:\n if error:\n abort(500)\n else:\n return jsonify({'success': True, 'question': next_question})\n else:\n abort(405)\n\n\n@app.errorhandler(400)\ndef bad_request(error):\n return jsonify({'success': False, 'error': 400, 'message': 'Bad Request'}\n ), 400\n\n\n@app.errorhandler(404)\ndef resource_not_found(error):\n return jsonify({'success': False, 'error': 404, 'message':\n 'Resource Not Found'}), 404\n\n\n@app.errorhandler(405)\ndef method_not_allowed(error):\n return jsonify({'success': False, 'error': 405, 'message':\n 'Method Not Allowed'}), 405\n\n\n@app.errorhandler(422)\ndef unprocessable_entity(error):\n return jsonify({'success': False, 'error': 422, 'message':\n 'Unprocessable Entity'}), 422\n\n\n@app.errorhandler(500)\ndef internal_server_error(error):\n return jsonify({'success': False, 'error': 500, 'message':\n 'Internal Server Error'}), 500\n\n\n<mask token>\n",
"step-4": "<mask token>\napp.config.from_object('config')\ndb.init_app(app)\n<mask token>\n\n\n@app.after_request\ndef after_request(response):\n response.headers.add('Access-Control-Allow-Headers',\n 'Content-Type, Authorization, true')\n response.headers.add('Access-Control-Allow-Methods',\n 'GET, PATCH,PUT,POST, DELETE, OPTIONS')\n return response\n\n\n@app.route('/categories', methods=['GET'])\ndef get_categories():\n categories = [category.type for category in Category.query.all()]\n return jsonify({'categories': categories, 'success': True})\n\n\n@app.route('/questions/page/<int:page>', methods=['GET'])\ndef get_questions(page):\n error = False\n questions = []\n total_questions = 0\n if type(page) is not int:\n abort(422)\n if request.method == 'GET':\n try:\n categories = [category.type for category in Category.query.all()]\n if categories is None:\n abort(404)\n query = Question.query.paginate(page, per_page=10)\n total_questions += len(Question.query.all())\n if query is None:\n abort(404)\n if len(query.items) == 0:\n error = True\n results = query.items\n for question in results:\n _question_ = {'id': question.id, 'question': question.\n question, 'answer': question.answer, 'category':\n question.category, 'difficulty': question.difficulty}\n questions.append(_question_)\n except Exception:\n error = True\n print('Error: {}'.format(sys.exc_info()))\n finally:\n if error:\n abort(400)\n else:\n return jsonify({'success': True, 'questions': questions,\n 'total_questions': total_questions, 'categories':\n categories})\n else:\n abort(405)\n\n\n@app.route('/question/<int:question_id>', methods=['DELETE'])\ndef delete_question(question_id):\n error = False\n if request.method == 'DELETE':\n if type(question_id) is not int:\n abort(422)\n try:\n question = Question.query.get(question_id)\n db.session.delete(question)\n db.session.commit()\n except Exception:\n error = True\n print('Error: {}'.format(sys.exc_info()))\n finally:\n db.session.close()\n if error:\n abort(400)\n else:\n return jsonify({'success': True, 'method': 'Delete',\n 'question': question_id})\n else:\n abort(405)\n\n\n@app.route('/questions', methods=['POST'])\ndef add_question():\n error = False\n if request.method == 'POST':\n try:\n new_question = Question(question=request.json['question'],\n answer=request.json['answer'], category=request.json[\n 'category'], difficulty=request.json['difficulty'])\n db.session.add(new_question)\n db.session.commit()\n except Exception:\n error = True\n db.session.rollback()\n print('Error: {}'.format(sys.exc_info()))\n finally:\n db.session.close()\n if error:\n abort(400)\n else:\n print('Added: {}'.format(new_question))\n return jsonify({'success': True, 'question': request.json})\n else:\n abort(405)\n\n\n@app.route('/questions/search', methods=['POST'])\ndef search_questions():\n error = False\n if request.method == 'POST':\n search_term = str(request.json['searchTerm'])\n if type(search_term) is not str:\n abort(422)\n try:\n query_results = Question.query.filter(Question.question.ilike(\n '%{}%'.format(search_term))).all()\n questions = []\n categories = [category.type for category in Category.query.all()]\n for question in query_results:\n _question_ = {'id': question.id, 'question': question.\n question, 'answer': question.answer, 'category':\n question.category, 'difficulty': question.difficulty}\n questions.append(_question_)\n except Exception:\n error = True\n print('Error: {}'.format(sys.exc_info()))\n finally:\n if error:\n abort(400)\n else:\n return jsonify({'success': True, 'questions': questions,\n 'total_questions': len(questions), 'current_category': ''})\n else:\n abort(405)\n\n\n@app.route('/category/<int:category_id>/questions', methods=['GET'])\ndef get_questions_by_category(category_id):\n error = False\n if request.method == 'GET':\n if type(category_id) is not int:\n abort(422)\n try:\n query = Question.query.filter_by(category=str(category_id)).all()\n questions = []\n for question in query:\n _question_ = {'id': question.id, 'question': question.\n question, 'answer': question.answer, 'category':\n question.category, 'difficulty': question.difficulty}\n questions.append(_question_)\n except Exception:\n error = True\n print('Error: {}'.format(sys.exc_info()))\n finally:\n if error:\n abort(400)\n else:\n return jsonify({'success': True, 'questions': questions,\n 'total_questions': len(questions), 'current_category': ''})\n else:\n abort(405)\n\n\n@app.route('/questions/quiz', methods=['POST'])\ndef quizzes():\n error = False\n if request.method == 'POST':\n try:\n data = request.json\n if data['quiz_category']['id'] == 0:\n query = Question.query.all()\n else:\n query = Question.query.filter_by(category=str(int(data[\n 'quiz_category']['id']) + 1)).all()\n previous_questions = data['previous_questions']\n index = random.randint(0, len(query) - 1)\n potential_question = query[index]\n selected = False\n while selected is False:\n if potential_question.id in previous_questions:\n index = random.randint(0, len(query) - 1)\n potential_question = query[index]\n else:\n selected = True\n _question_ = potential_question\n next_question = {'id': _question_.id, 'question': _question_.\n question, 'answer': _question_.answer, 'category':\n _question_.category, 'difficulty': _question_.difficulty}\n except Exception:\n error = True\n print('Error: {}'.format(sys.exc_info()))\n finally:\n if error:\n abort(500)\n else:\n return jsonify({'success': True, 'question': next_question})\n else:\n abort(405)\n\n\n@app.errorhandler(400)\ndef bad_request(error):\n return jsonify({'success': False, 'error': 400, 'message': 'Bad Request'}\n ), 400\n\n\n@app.errorhandler(404)\ndef resource_not_found(error):\n return jsonify({'success': False, 'error': 404, 'message':\n 'Resource Not Found'}), 404\n\n\n@app.errorhandler(405)\ndef method_not_allowed(error):\n return jsonify({'success': False, 'error': 405, 'message':\n 'Method Not Allowed'}), 405\n\n\n@app.errorhandler(422)\ndef unprocessable_entity(error):\n return jsonify({'success': False, 'error': 422, 'message':\n 'Unprocessable Entity'}), 422\n\n\n@app.errorhandler(500)\ndef internal_server_error(error):\n return jsonify({'success': False, 'error': 500, 'message':\n 'Internal Server Error'}), 500\n\n\nif __name__ == '__main__':\n app.run()\n",
"step-5": "import os\nimport sys\nfrom flask import Flask, request, abort, flash, jsonify, Response\nfrom flask_sqlalchemy import SQLAlchemy\nfrom flask_cors import CORS\nfrom flask_migrate import Migrate\nimport random\nimport unittest\n\nfrom models import db, Question, Category\n\n# set the number of pages fpr pagination\nQUESTIONS_PER_PAGE = 10\n\n# create and configure the app\napp = Flask(__name__)\napp.config.from_object('config')\ndb.init_app(app)\nmigrate = Migrate(app, db)\n\n# set up cors for the application\ncors = CORS(app, resources={r'/': {'origins': '*'}})\n\n# to set Access-Control-Allow Headers and Methods\n@app.after_request\ndef after_request(response):\n response.headers.add('Access-Control-Allow-Headers',\n 'Content-Type, Authorization, true')\n response.headers.add('Access-Control-Allow-Methods',\n 'GET, PATCH,PUT,POST, DELETE, OPTIONS')\n return response\n\n# endpoint to handle GET requests for all available categories\n@app.route('/categories', methods=['GET'])\ndef get_categories():\n categories = [category.type for category in Category.query.all()]\n return jsonify({'categories': categories, 'success': True})\n\n# endpoint to handle GET requests for questions with pagination\n@app.route('/questions/page/<int:page>', methods=['GET'])\ndef get_questions(page):\n error = False\n questions = []\n total_questions = 0\n # if question id is not an integer\n if type(page) is not int:\n # let them know their input is not processable\n abort(422)\n # ensure proper request method\n if request.method == 'GET':\n try:\n # query for all categories\n categories = [category.type for category in Category.query.all()]\n if categories is None:\n # let the user know that no resource was found\n abort(404)\n\n query = Question.query.paginate(page, per_page=10)\n total_questions += len(Question.query.all())\n if query is None:\n # let the user know that no resource was found\n abort(404)\n if len(query.items) == 0:\n # let the user know that no resource was found\n error = True\n\n results = query.items\n # format data\n for question in results:\n _question_ = {\n 'id': question.id,\n 'question': question.question,\n 'answer': question.answer,\n 'category': question.category,\n 'difficulty': question.difficulty\n }\n questions.append(_question_)\n except Exception:\n # set error to true and log on the server\n error = True\n print('Error: {}'.format(sys.exc_info()))\n finally:\n\n if error:\n # let the user know their request was not successful\n abort(400)\n else:\n # if successful send back success response\n return jsonify({\n 'success': True,\n 'questions': questions,\n 'total_questions': total_questions,\n 'categories': categories\n })\n else:\n # send method not allowed error\n abort(405)\n\n\n# endpoint to delete a question from the database\n@app.route('/question/<int:question_id>', methods=['DELETE'])\ndef delete_question(question_id):\n error = False\n\n # ensure proper request method\n if request.method == 'DELETE':\n\n # if question id is not an integer\n if type(question_id) is not int:\n # let them know their input is not processable\n abort(422)\n\n try:\n # get user selected question from database\n question = Question.query.get(question_id)\n # stage question delete\n db.session.delete(question)\n # commit deletion to the database\n db.session.commit()\n except Exception:\n # set error to true and log on the server\n error = True\n print('Error: {}'.format(sys.exc_info()))\n\n finally:\n # close database session\n db.session.close()\n\n if error:\n # send bad request error\n abort(400)\n\n else:\n # if no error send success object and log on server\n return jsonify({\n 'success': True,\n 'method': 'Delete',\n 'question': question_id\n })\n else:\n # send method not allowed error\n abort(405)\n\n\n# endpoint to add a question to the database\n@app.route('/questions', methods=['POST'])\ndef add_question():\n error = False\n\n # ensure proper request method\n if request.method == 'POST':\n try:\n # format data for database\n new_question = Question(\n question=request.json['question'],\n answer=request.json['answer'],\n category=request.json['category'],\n difficulty=request.json['difficulty']\n )\n # stage data in database\n db.session.add(new_question)\n # commit data to database\n db.session.commit()\n\n except Exception:\n # set error to true and log on the server\n error = True\n db.session.rollback()\n print('Error: {}'.format(sys.exc_info()))\n\n finally:\n # close database session\n db.session.close()\n\n if error:\n # send bad request error\n abort(400)\n else:\n # if no error send success object and log on server\n print('Added: {}'.format(new_question))\n return jsonify({\n 'success': True,\n 'question': request.json\n\n })\n else:\n # send method not allowed error\n abort(405)\n\n\n# endpoint to search for for questions in the database\n@app.route('/questions/search', methods=['POST'])\ndef search_questions():\n error = False\n\n # ensure proper request method\n if request.method == 'POST':\n\n # set esrch term from user request\n search_term = str(request.json['searchTerm'])\n # if the user submits something other than a string of text block it\n if type(search_term) is not str:\n # let them know their input is not processable\n abort(422)\n\n try:\n # query database using user provided search term\n query_results = Question.query.filter(\n Question.question.ilike('%{}%'.format(search_term))).all()\n questions = []\n # get categories from database\n categories = [category.type for category in Category.query.all()]\n # format response data\n for question in query_results:\n _question_ = {\n 'id': question.id,\n 'question': question.question,\n 'answer': question.answer,\n 'category': question.category,\n 'difficulty': question.difficulty\n }\n questions.append(_question_)\n\n except Exception:\n # set error to true and log on the server\n error = True\n print('Error: {}'.format(sys.exc_info()))\n\n finally:\n if error:\n # send bad request error\n abort(400)\n else:\n # if no error send success object\n return jsonify({\n 'success': True,\n 'questions': questions,\n 'total_questions': len(questions),\n 'current_category': ''\n })\n else:\n # send method not allowed error\n abort(405)\n\n# endpoint to get questions by a specific category\n@app.route('/category/<int:category_id>/questions', methods=['GET'])\ndef get_questions_by_category(category_id):\n error = False\n\n # ensure proper request method\n if request.method == 'GET':\n\n # if category id is not an integer\n if type(category_id) is not int:\n # let them know their input is not processable\n abort(422)\n\n try:\n # get questions by user selected category\n query = Question.query.filter_by(category=str(category_id)).all()\n questions = []\n # format response data\n for question in query:\n _question_ = {\n 'id': question.id,\n 'question': question.question,\n 'answer': question.answer,\n 'category': question.category,\n 'difficulty': question.difficulty\n }\n questions.append(_question_)\n except Exception:\n # set error to true and log on the server\n error = True\n print('Error: {}'.format(sys.exc_info()))\n\n finally:\n if error:\n # send bad request error\n abort(400)\n else:\n # if no error send success object\n return jsonify({\n 'success': True,\n 'questions': questions,\n 'total_questions': len(questions),\n 'current_category': ''\n })\n else:\n # send method not allowed error\n abort(405)\n\n# endpoint to initiate quiz\n@app.route('/questions/quiz', methods=['POST'])\ndef quizzes():\n error = False\n\n # ensure proper request method\n if request.method == 'POST':\n\n try:\n data = request.json\n # get questions from any category\n if data['quiz_category']['id'] == 0:\n query = Question.query.all()\n # get questions from user specified caetgory\n else:\n query = Question.query.filter_by(\n category=str(int(data['quiz_category']['id'])+1)).all()\n # randomly select new non previously selected question\n previous_questions = data['previous_questions']\n index = random.randint(0, len(query)-1)\n potential_question = query[index]\n selected = False\n while selected is False:\n if potential_question.id in previous_questions:\n # reassign index if already used\n index = random.randint(0, len(query)-1)\n potential_question = query[index]\n else:\n selected = True\n # set question\n _question_ = potential_question\n # format data\n next_question = {\n 'id': _question_.id,\n 'question': _question_.question,\n 'answer': _question_.answer,\n 'category': _question_.category,\n 'difficulty': _question_.difficulty\n }\n except Exception:\n # set error and log error on the server\n error = True\n print('Error: {}'.format(sys.exc_info()))\n\n finally:\n\n if error:\n # send internal server error\n abort(500)\n else:\n # if no error send success object\n return jsonify({\n 'success': True,\n 'question': next_question\n })\n else:\n # send method not allowed error\n abort(405)\n\n# handle bad request errors\n@app.errorhandler(400)\ndef bad_request(error):\n return jsonify({\n \"success\": False,\n \"error\": 400,\n \"message\": \"Bad Request\"\n }), 400\n\n# handle resource not found errors\n@app.errorhandler(404)\ndef resource_not_found(error):\n return jsonify({\n \"success\": False,\n \"error\": 404,\n \"message\": \"Resource Not Found\"\n }), 404\n\n# handle resource not found errors\n@app.errorhandler(405)\ndef method_not_allowed(error):\n return jsonify({\n \"success\": False,\n \"error\": 405,\n \"message\": \"Method Not Allowed\"\n }), 405\n\n# handle unprocessable entity errors\n@app.errorhandler(422)\ndef unprocessable_entity(error):\n return jsonify({\n \"success\": False,\n \"error\": 422,\n \"message\": \"Unprocessable Entity\"\n }), 422\n\n# handle internal server errors\n@app.errorhandler(500)\ndef internal_server_error(error):\n return jsonify({\n \"success\": False,\n \"error\": 500,\n \"message\": \"Internal Server Error\"\n }), 500\n\n\n# Default port:\nif __name__ == '__main__':\n app.run()\n",
"step-ids": [
8,
10,
12,
14,
17
]
}
|
[
8,
10,
12,
14,
17
] |
import os
import xml.etree.ElementTree as Et
import copy
from .common import CommonRouteExchangeService
class DataRoutes(CommonRouteExchangeService):
"""Класс для работы с данными аршрутов"""
def get_route_from_file(self, path_route):
"""Считывание маршрута из файла
:param path_route: Путь до маршрута в формате XML
:return: ElementTree
"""
path_file = os.path.join(os.getcwd(), path_route)
return Et.parse(path_file)
def change_uvid_in_route(self, tree_route, uvid):
"""Замена UVID в маршруте
:param tree_route: Маршрут в формате XML
:param uvid: UVID
:return: ElementTree
"""
tree_route_copy = copy.deepcopy(tree_route)
root = tree_route_copy.getroot()
root.find('.//*[@vesselVoyage]').attrib.update({'vesselVoyage': uvid})
return tree_route_copy
def change_status_in_route(self, tree_route, status):
"""Замена статуса маршрута в маршруте
:param tree_route: Маршрут в формате XML
:param status: Cтатус маршрута 1 - ORIGINAL
2 - PLANNED_FOR_VOYAGE
3 - OPTIMIZED
4 - CROSS_CHECKED
5 - SAFETY_CHECKED
6 - APPROVED
7 - USED_FOR_MONITORING
8 - INACTIVE
:return: ElementTree
"""
tree_route_copy = copy.deepcopy(tree_route)
root = tree_route_copy.getroot()
root.find('.//*[@routeStatus]').attrib.update({'routeStatus': str(
status)})
return tree_route_copy
def change_route_name_in_route(self, tree_route, route_name):
"""Замена routeName в маршруте
:param tree_route: Маршрут в формате XML
:param route_name: Имя маршрута
:return: ElementTree
"""
tree_route_copy = copy.deepcopy(tree_route)
root = tree_route_copy.getroot()
root.find('.//*[@routeName]').attrib.update({'routeName': route_name})
return tree_route_copy
def convert_route_to_str(self, tree_route):
return Et.tostring(tree_route.getroot(), encoding='UTF-8')
|
normal
|
{
"blob_id": "63069f03d17862b8ea6aa74d0acd1370bbea0dcb",
"index": 836,
"step-1": "<mask token>\n\n\nclass DataRoutes(CommonRouteExchangeService):\n <mask token>\n <mask token>\n <mask token>\n\n def change_status_in_route(self, tree_route, status):\n \"\"\"Замена статуса маршрута в маршруте\n :param tree_route: Маршрут в формате XML\n :param status: Cтатус маршрута 1 - ORIGINAL\n 2 - PLANNED_FOR_VOYAGE\n 3 - OPTIMIZED\n 4 - CROSS_CHECKED\n 5 - SAFETY_CHECKED\n 6 - APPROVED\n 7 - USED_FOR_MONITORING\n 8 - INACTIVE\n :return: ElementTree\n \"\"\"\n tree_route_copy = copy.deepcopy(tree_route)\n root = tree_route_copy.getroot()\n root.find('.//*[@routeStatus]').attrib.update({'routeStatus': str(\n status)})\n return tree_route_copy\n <mask token>\n\n def convert_route_to_str(self, tree_route):\n return Et.tostring(tree_route.getroot(), encoding='UTF-8')\n",
"step-2": "<mask token>\n\n\nclass DataRoutes(CommonRouteExchangeService):\n <mask token>\n\n def get_route_from_file(self, path_route):\n \"\"\"Считывание маршрута из файла\n :param path_route: Путь до маршрута в формате XML\n :return: ElementTree\n \"\"\"\n path_file = os.path.join(os.getcwd(), path_route)\n return Et.parse(path_file)\n\n def change_uvid_in_route(self, tree_route, uvid):\n \"\"\"Замена UVID в маршруте\n :param tree_route: Маршрут в формате XML\n :param uvid: UVID\n :return: ElementTree\n \"\"\"\n tree_route_copy = copy.deepcopy(tree_route)\n root = tree_route_copy.getroot()\n root.find('.//*[@vesselVoyage]').attrib.update({'vesselVoyage': uvid})\n return tree_route_copy\n\n def change_status_in_route(self, tree_route, status):\n \"\"\"Замена статуса маршрута в маршруте\n :param tree_route: Маршрут в формате XML\n :param status: Cтатус маршрута 1 - ORIGINAL\n 2 - PLANNED_FOR_VOYAGE\n 3 - OPTIMIZED\n 4 - CROSS_CHECKED\n 5 - SAFETY_CHECKED\n 6 - APPROVED\n 7 - USED_FOR_MONITORING\n 8 - INACTIVE\n :return: ElementTree\n \"\"\"\n tree_route_copy = copy.deepcopy(tree_route)\n root = tree_route_copy.getroot()\n root.find('.//*[@routeStatus]').attrib.update({'routeStatus': str(\n status)})\n return tree_route_copy\n <mask token>\n\n def convert_route_to_str(self, tree_route):\n return Et.tostring(tree_route.getroot(), encoding='UTF-8')\n",
"step-3": "<mask token>\n\n\nclass DataRoutes(CommonRouteExchangeService):\n \"\"\"Класс для работы с данными аршрутов\"\"\"\n\n def get_route_from_file(self, path_route):\n \"\"\"Считывание маршрута из файла\n :param path_route: Путь до маршрута в формате XML\n :return: ElementTree\n \"\"\"\n path_file = os.path.join(os.getcwd(), path_route)\n return Et.parse(path_file)\n\n def change_uvid_in_route(self, tree_route, uvid):\n \"\"\"Замена UVID в маршруте\n :param tree_route: Маршрут в формате XML\n :param uvid: UVID\n :return: ElementTree\n \"\"\"\n tree_route_copy = copy.deepcopy(tree_route)\n root = tree_route_copy.getroot()\n root.find('.//*[@vesselVoyage]').attrib.update({'vesselVoyage': uvid})\n return tree_route_copy\n\n def change_status_in_route(self, tree_route, status):\n \"\"\"Замена статуса маршрута в маршруте\n :param tree_route: Маршрут в формате XML\n :param status: Cтатус маршрута 1 - ORIGINAL\n 2 - PLANNED_FOR_VOYAGE\n 3 - OPTIMIZED\n 4 - CROSS_CHECKED\n 5 - SAFETY_CHECKED\n 6 - APPROVED\n 7 - USED_FOR_MONITORING\n 8 - INACTIVE\n :return: ElementTree\n \"\"\"\n tree_route_copy = copy.deepcopy(tree_route)\n root = tree_route_copy.getroot()\n root.find('.//*[@routeStatus]').attrib.update({'routeStatus': str(\n status)})\n return tree_route_copy\n\n def change_route_name_in_route(self, tree_route, route_name):\n \"\"\"Замена routeName в маршруте\n :param tree_route: Маршрут в формате XML\n :param route_name: Имя маршрута\n :return: ElementTree\n \"\"\"\n tree_route_copy = copy.deepcopy(tree_route)\n root = tree_route_copy.getroot()\n root.find('.//*[@routeName]').attrib.update({'routeName': route_name})\n return tree_route_copy\n\n def convert_route_to_str(self, tree_route):\n return Et.tostring(tree_route.getroot(), encoding='UTF-8')\n",
"step-4": "import os\nimport xml.etree.ElementTree as Et\nimport copy\nfrom .common import CommonRouteExchangeService\n\n\nclass DataRoutes(CommonRouteExchangeService):\n \"\"\"Класс для работы с данными аршрутов\"\"\"\n\n def get_route_from_file(self, path_route):\n \"\"\"Считывание маршрута из файла\n :param path_route: Путь до маршрута в формате XML\n :return: ElementTree\n \"\"\"\n path_file = os.path.join(os.getcwd(), path_route)\n return Et.parse(path_file)\n\n def change_uvid_in_route(self, tree_route, uvid):\n \"\"\"Замена UVID в маршруте\n :param tree_route: Маршрут в формате XML\n :param uvid: UVID\n :return: ElementTree\n \"\"\"\n tree_route_copy = copy.deepcopy(tree_route)\n root = tree_route_copy.getroot()\n root.find('.//*[@vesselVoyage]').attrib.update({'vesselVoyage': uvid})\n return tree_route_copy\n\n def change_status_in_route(self, tree_route, status):\n \"\"\"Замена статуса маршрута в маршруте\n :param tree_route: Маршрут в формате XML\n :param status: Cтатус маршрута 1 - ORIGINAL\n 2 - PLANNED_FOR_VOYAGE\n 3 - OPTIMIZED\n 4 - CROSS_CHECKED\n 5 - SAFETY_CHECKED\n 6 - APPROVED\n 7 - USED_FOR_MONITORING\n 8 - INACTIVE\n :return: ElementTree\n \"\"\"\n tree_route_copy = copy.deepcopy(tree_route)\n root = tree_route_copy.getroot()\n root.find('.//*[@routeStatus]').attrib.update({'routeStatus': str(\n status)})\n return tree_route_copy\n\n def change_route_name_in_route(self, tree_route, route_name):\n \"\"\"Замена routeName в маршруте\n :param tree_route: Маршрут в формате XML\n :param route_name: Имя маршрута\n :return: ElementTree\n \"\"\"\n tree_route_copy = copy.deepcopy(tree_route)\n root = tree_route_copy.getroot()\n root.find('.//*[@routeName]').attrib.update({'routeName': route_name})\n return tree_route_copy\n\n def convert_route_to_str(self, tree_route):\n return Et.tostring(tree_route.getroot(), encoding='UTF-8')\n",
"step-5": null,
"step-ids": [
3,
5,
7,
8
]
}
|
[
3,
5,
7,
8
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def case_study_submission(request, template_name='casestudies/submit.html'):
form = SubmitCaseStudyForm(request.POST or None)
if form.is_valid():
form.save()
return HttpResponseRedirect(reverse('submit_message'))
return render_to_response(template_name, {'form': form},
context_instance=RequestContext(request))
<|reserved_special_token_1|>
from django.core.urlresolvers import reverse
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseNotFound
from django.shortcuts import render_to_response
from django.template import RequestContext
from whydjango.casestudies.forms import SubmitCaseStudyForm
def case_study_submission(request, template_name='casestudies/submit.html'):
form = SubmitCaseStudyForm(request.POST or None)
if form.is_valid():
form.save()
return HttpResponseRedirect(reverse('submit_message'))
return render_to_response(template_name, {'form': form},
context_instance=RequestContext(request))
<|reserved_special_token_1|>
from django.core.urlresolvers import reverse
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseNotFound
from django.shortcuts import render_to_response
from django.template import RequestContext
from whydjango.casestudies.forms import SubmitCaseStudyForm
def case_study_submission(request, template_name="casestudies/submit.html"):
form = SubmitCaseStudyForm(request.POST or None)
if form.is_valid():
form.save()
return HttpResponseRedirect(reverse("submit_message"))
return render_to_response(template_name, {
"form": form,
}, context_instance=RequestContext(request))
|
flexible
|
{
"blob_id": "fe3e104cf213b21c33a4b5c6e1a61315c4770eda",
"index": 6821,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef case_study_submission(request, template_name='casestudies/submit.html'):\n form = SubmitCaseStudyForm(request.POST or None)\n if form.is_valid():\n form.save()\n return HttpResponseRedirect(reverse('submit_message'))\n return render_to_response(template_name, {'form': form},\n context_instance=RequestContext(request))\n",
"step-3": "from django.core.urlresolvers import reverse\nfrom django.http import HttpResponse, HttpResponseRedirect, HttpResponseNotFound\nfrom django.shortcuts import render_to_response\nfrom django.template import RequestContext\nfrom whydjango.casestudies.forms import SubmitCaseStudyForm\n\n\ndef case_study_submission(request, template_name='casestudies/submit.html'):\n form = SubmitCaseStudyForm(request.POST or None)\n if form.is_valid():\n form.save()\n return HttpResponseRedirect(reverse('submit_message'))\n return render_to_response(template_name, {'form': form},\n context_instance=RequestContext(request))\n",
"step-4": "from django.core.urlresolvers import reverse \nfrom django.http import HttpResponse, HttpResponseRedirect, HttpResponseNotFound\nfrom django.shortcuts import render_to_response\nfrom django.template import RequestContext \n\n\nfrom whydjango.casestudies.forms import SubmitCaseStudyForm\n\ndef case_study_submission(request, template_name=\"casestudies/submit.html\"):\n\n form = SubmitCaseStudyForm(request.POST or None)\n\n if form.is_valid():\n form.save()\n return HttpResponseRedirect(reverse(\"submit_message\"))\n\n return render_to_response(template_name, { \n \"form\": form,\n }, context_instance=RequestContext(request)) \n \n ",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
####################################################################################
#
# Kaggle Competition: https://www.kaggle.com/c/msk-redefining-cancer-treatment
# Sponsor : Memorial Sloan Kettering Cancer Center (MSKCC)
# Author: Amrut Shintre
#
####################################################################################
#####################
# Importing Libraries
#####################
import numpy as np
import pandas as pd
import matplotlib as plt
import re
import nltk
nltk.download('stopwords')
from nltk.corpus import stopwords
from nltk.stem.porter import PorterStemmer
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.decomposition import TruncatedSVD
from sklearn.preprocessing import LabelEncoder
from sklearn.preprocessing import StandardScaler
import xgboost as xgb
from sklearn.model_selection import train_test_split
from sklearn import metrics
import gc
import random
####################
# Importing datasets
####################
# Training Dataset
train_df = pd.read_csv('training_text', sep = '\|\|', engine = 'python', names = ['ID', 'Text'],
header = None)
train_df = train_df.iloc[1:,:]
train_df.index = range(len(train_df))
train_var = pd.read_csv('training_variants')
# Testing Dataset
test_df = pd.read_csv('test_text', sep = '\|\|', engine = 'python', names = ['ID', 'Text'],
header = None)
test_var = pd.read_csv('test_variants')
# --------------------------------------------TEXT ---------------------------------------------
##############
# TextCleaning
##############
def text_cleaning(text_df):
corpus = []
for i in range(len(text_df)):
text = re.sub('[^a-zA-Z]', ' ', text_df['Text'][i]) # Removing punctuation marks,
#numbers, etc and returning only letters
text = text.lower() # Converting all the uppercase letters to lowercase
text = text.split() # Splitting a sentence into a list of strings containing a single word.
ps = PorterStemmer() # Stemming e.g. lovely -> love
text = [ps.stem(word) for word in text if not word in set(stopwords.words('english'))]
text = ' '.join(text) # Joining the cleaned words
corpus.append(text) # Appending it to the new list.
return (corpus)
# Training Text Data
corpus_train = text_cleaning(train_df)
# Testing Text Data
corpus_test = text_cleaning(test_df)
#############################################
# Term Frequency - Inverse Document Frequency
#############################################
tfidf = TfidfVectorizer()
tfidf_tr = tfidf.fit_transform(corpus_train).toarray()
tfidf_test = tfidf.transform(corpus_test).toarray()
##############################
# Singular Value Decomposition
##############################
svd = TruncatedSVD(n_components = 1000) # considering 98% variance in the Data
svd_tr = svd.fit_transform(tfidf_tr) # Fitting on cleaned training text data
svd_train = svd.transform(tfidf_test) # Transforming on cleaned testing text data
svd_tr = pd.DataFrame(svd_tr)
svd_test = pd.DataFrame(svd_train)
#explainedvar = svd.explained_variance_ratio_
#exp_var = explainedvar.cumsum()
# -------------------------------------------- VARIANTS ---------------------------------------------
####################
# Dependent Variable
####################
y = train_var['Class'].values
y = y-1
#################
# Merging Dataset
#################
# Merging the dataset for data preparation and feature engineering
df = pd.concat([train_var, test_var], axis = 0)
df = df.drop(['ID'], axis = 1)
df['ID'] = range(df.shape[0])
df.index = range(df.shape[0])
df_text = pd.concat([train_df, test_df], axis = 0)
df_text = df_text.drop('ID', axis = 1)
df_text['ID'] = range(df_text.shape[0])
df_text.index = range(df_text.shape[0])
df_all = pd.merge(df, df_text, how = 'left', on = 'ID')
################
# Missing Values
################
# Checking for missing values
column_list = train_var.columns.values.tolist()
missing_values = pd.DataFrame()
missing_values['Columns'] = column_list
for i in column_list:
missing_values['No. of missing values'] = train_var[i].isnull().values.ravel().sum()
# There are no missing values.
#######################
# Categorical Variables
#######################
# Extracting the columns having categorical Variables.
column_list = df.columns
categorical_columns = []
for i in column_list:
if df[i].dtype == 'O':
categorical_columns.append(i)
# Encoding the columns with categorical variables
# Label Encoding
for i in categorical_columns:
le = LabelEncoder()
df[i + '_le'] = le.fit_transform(df[i])
df[i + '_length'] = df[i].map(lambda x: len(str(x)))
# Feature Engineering
df_all['Gene_Share'] = df_all.apply(lambda r: sum([1 for w in r['Gene'].split(' ') if w in r['Text'].split(' ')]), axis=1)
df_all['Variation_Share'] = df_all.apply(lambda r: sum([1 for w in r['Variation'].split(' ') if w in r['Text'].split(' ')]), axis=1)
###################
# Splitting Dataset
###################
train = df_all.iloc[:len(train_var), :]
test = df_all.iloc[len(train_var):,:]
test.index = range(len(test_var))
train = train.drop(['Gene', 'Variation', 'ID', 'Text', 'Class'], axis = 1)
test = test.drop(['Gene', 'Variation', 'Text', 'ID', 'Class'], axis = 1)
train_final = pd.concat([train, svd_tr], axis = 1)
test_final = pd.concat([test, svd_test], axis = 1)
#################
# Standardization
#################
sc = StandardScaler()
train_final = sc.fit_transform(train_final)
test_final = sc.transform(test_final)
train_final = pd.DataFrame(train_final)
test_final = pd.DataFrame(test_final)
# -------------------------------------------- MODEL ---------------------------------------------
##################
# XGBoost Matrix
##################
dtrain = xgb.DMatrix(train_final, y)
dtest = xgb.DMatrix(test_final)
##################
# Cross-Validation
##################
def docv(param, iterations, nfold):
model_CV = xgb.cv(
params = param,
num_boost_round = iterations,
nfold = nfold,
dtrain = dtrain,
seed = random.randint(1, 10000),
early_stopping_rounds = 100,
maximize = False,
verbose_eval = 50)
gc.collect()
best = min(model_CV['test-mlogloss-mean'])
best_iter = model_CV.shape[0]
print (best)
return (best_iter)
#########
# Testing
#########
def doTest(param, iteration):
X_tr, X_val, y_tr, y_val = train_test_split(train_final, y, test_size = 0.2, random_state = random.randint(1,1000))
watchlist = [(xgb.DMatrix(X_tr, y_tr), 'train'), (xgb.DMatrix(X_val, y_val), 'validation')]
model = xgb.train(
params = param,
dtrain = xgb.DMatrix(X_tr, y_tr),
num_boost_round = iteration,
evals = watchlist,
verbose_eval = 50,
early_stopping_rounds = 100)
score = metrics.log_loss(y_val, model.predict(xgb.DMatrix(X_val)), labels = range(9))
predicted_class = model.predict(dtest)
print (score)
return (predicted_class)
#########
# Bagging
#########
def Bagging(N, params, best_iter):
for i in range(N):
param = params
p = doTest(param, best_iter)
if i == 0:
preds = p.copy()
else:
preds = preds + p
predictions = preds/N
predictions = pd.DataFrame(predictions)
return (predictions)
###################
# Running the Model
###################
params = {
'eta': 0.02,
'max_depth': 6,
'objective': 'multi:softprob',
'eval_metric': 'mlogloss',
'silent': False,
'seed': random.randint(1,100),
'num_class': 9
}
cross_vali = docv(params, 10000, 5)
predicted_class = Bagging(5, params, cross_vali)
# -------------------------------------------- SUBMISSION ---------------------------------------------
sub_file = pd.DataFrame()
sub_file['ID'] = test_var['ID'].values
Sub_File = pd.concat([sub_file, predicted_class], axis = 1)
Sub_File.columns = ['ID', 'Class1', 'Class2', 'Class3', 'Class4', 'Class5', 'Class6', 'Class7',
'Class8', 'Class9']
Sub_File.to_csv("submission33.csv", index = False)
# -------------------------------------------- Project Layout ---------------------------------------------
# 1) Text Cleaning
# 2) TFIDF Vectorizer and Singular Value Decomposition
# 3) Feature Engineering
# 4) Building a Model and trying out different models
# 5) Parameter Tuning
# 6) Bagged Boosting
|
normal
|
{
"blob_id": "1305991a9cd82ddeaffff1545a35ced992e6792f",
"index": 7300,
"step-1": "<mask token>\n\n\ndef text_cleaning(text_df):\n corpus = []\n for i in range(len(text_df)):\n text = re.sub('[^a-zA-Z]', ' ', text_df['Text'][i])\n text = text.lower()\n text = text.split()\n ps = PorterStemmer()\n text = [ps.stem(word) for word in text if not word in set(stopwords\n .words('english'))]\n text = ' '.join(text)\n corpus.append(text)\n return corpus\n\n\n<mask token>\n\n\ndef docv(param, iterations, nfold):\n model_CV = xgb.cv(params=param, num_boost_round=iterations, nfold=nfold,\n dtrain=dtrain, seed=random.randint(1, 10000), early_stopping_rounds\n =100, maximize=False, verbose_eval=50)\n gc.collect()\n best = min(model_CV['test-mlogloss-mean'])\n best_iter = model_CV.shape[0]\n print(best)\n return best_iter\n\n\ndef doTest(param, iteration):\n X_tr, X_val, y_tr, y_val = train_test_split(train_final, y, test_size=\n 0.2, random_state=random.randint(1, 1000))\n watchlist = [(xgb.DMatrix(X_tr, y_tr), 'train'), (xgb.DMatrix(X_val,\n y_val), 'validation')]\n model = xgb.train(params=param, dtrain=xgb.DMatrix(X_tr, y_tr),\n num_boost_round=iteration, evals=watchlist, verbose_eval=50,\n early_stopping_rounds=100)\n score = metrics.log_loss(y_val, model.predict(xgb.DMatrix(X_val)),\n labels=range(9))\n predicted_class = model.predict(dtest)\n print(score)\n return predicted_class\n\n\ndef Bagging(N, params, best_iter):\n for i in range(N):\n param = params\n p = doTest(param, best_iter)\n if i == 0:\n preds = p.copy()\n else:\n preds = preds + p\n predictions = preds / N\n predictions = pd.DataFrame(predictions)\n return predictions\n\n\n<mask token>\n",
"step-2": "<mask token>\nnltk.download('stopwords')\n<mask token>\n\n\ndef text_cleaning(text_df):\n corpus = []\n for i in range(len(text_df)):\n text = re.sub('[^a-zA-Z]', ' ', text_df['Text'][i])\n text = text.lower()\n text = text.split()\n ps = PorterStemmer()\n text = [ps.stem(word) for word in text if not word in set(stopwords\n .words('english'))]\n text = ' '.join(text)\n corpus.append(text)\n return corpus\n\n\n<mask token>\nfor i in column_list:\n missing_values['No. of missing values'] = train_var[i].isnull(\n ).values.ravel().sum()\n<mask token>\nfor i in column_list:\n if df[i].dtype == 'O':\n categorical_columns.append(i)\nfor i in categorical_columns:\n le = LabelEncoder()\n df[i + '_le'] = le.fit_transform(df[i])\n df[i + '_length'] = df[i].map(lambda x: len(str(x)))\n<mask token>\n\n\ndef docv(param, iterations, nfold):\n model_CV = xgb.cv(params=param, num_boost_round=iterations, nfold=nfold,\n dtrain=dtrain, seed=random.randint(1, 10000), early_stopping_rounds\n =100, maximize=False, verbose_eval=50)\n gc.collect()\n best = min(model_CV['test-mlogloss-mean'])\n best_iter = model_CV.shape[0]\n print(best)\n return best_iter\n\n\ndef doTest(param, iteration):\n X_tr, X_val, y_tr, y_val = train_test_split(train_final, y, test_size=\n 0.2, random_state=random.randint(1, 1000))\n watchlist = [(xgb.DMatrix(X_tr, y_tr), 'train'), (xgb.DMatrix(X_val,\n y_val), 'validation')]\n model = xgb.train(params=param, dtrain=xgb.DMatrix(X_tr, y_tr),\n num_boost_round=iteration, evals=watchlist, verbose_eval=50,\n early_stopping_rounds=100)\n score = metrics.log_loss(y_val, model.predict(xgb.DMatrix(X_val)),\n labels=range(9))\n predicted_class = model.predict(dtest)\n print(score)\n return predicted_class\n\n\ndef Bagging(N, params, best_iter):\n for i in range(N):\n param = params\n p = doTest(param, best_iter)\n if i == 0:\n preds = p.copy()\n else:\n preds = preds + p\n predictions = preds / N\n predictions = pd.DataFrame(predictions)\n return predictions\n\n\n<mask token>\nSub_File.to_csv('submission33.csv', index=False)\n",
"step-3": "<mask token>\nnltk.download('stopwords')\n<mask token>\ntrain_df = pd.read_csv('training_text', sep='\\\\|\\\\|', engine='python',\n names=['ID', 'Text'], header=None)\ntrain_df = train_df.iloc[1:, :]\ntrain_df.index = range(len(train_df))\ntrain_var = pd.read_csv('training_variants')\ntest_df = pd.read_csv('test_text', sep='\\\\|\\\\|', engine='python', names=[\n 'ID', 'Text'], header=None)\ntest_var = pd.read_csv('test_variants')\n\n\ndef text_cleaning(text_df):\n corpus = []\n for i in range(len(text_df)):\n text = re.sub('[^a-zA-Z]', ' ', text_df['Text'][i])\n text = text.lower()\n text = text.split()\n ps = PorterStemmer()\n text = [ps.stem(word) for word in text if not word in set(stopwords\n .words('english'))]\n text = ' '.join(text)\n corpus.append(text)\n return corpus\n\n\ncorpus_train = text_cleaning(train_df)\ncorpus_test = text_cleaning(test_df)\ntfidf = TfidfVectorizer()\ntfidf_tr = tfidf.fit_transform(corpus_train).toarray()\ntfidf_test = tfidf.transform(corpus_test).toarray()\nsvd = TruncatedSVD(n_components=1000)\nsvd_tr = svd.fit_transform(tfidf_tr)\nsvd_train = svd.transform(tfidf_test)\nsvd_tr = pd.DataFrame(svd_tr)\nsvd_test = pd.DataFrame(svd_train)\ny = train_var['Class'].values\ny = y - 1\ndf = pd.concat([train_var, test_var], axis=0)\ndf = df.drop(['ID'], axis=1)\ndf['ID'] = range(df.shape[0])\ndf.index = range(df.shape[0])\ndf_text = pd.concat([train_df, test_df], axis=0)\ndf_text = df_text.drop('ID', axis=1)\ndf_text['ID'] = range(df_text.shape[0])\ndf_text.index = range(df_text.shape[0])\ndf_all = pd.merge(df, df_text, how='left', on='ID')\ncolumn_list = train_var.columns.values.tolist()\nmissing_values = pd.DataFrame()\nmissing_values['Columns'] = column_list\nfor i in column_list:\n missing_values['No. of missing values'] = train_var[i].isnull(\n ).values.ravel().sum()\ncolumn_list = df.columns\ncategorical_columns = []\nfor i in column_list:\n if df[i].dtype == 'O':\n categorical_columns.append(i)\nfor i in categorical_columns:\n le = LabelEncoder()\n df[i + '_le'] = le.fit_transform(df[i])\n df[i + '_length'] = df[i].map(lambda x: len(str(x)))\ndf_all['Gene_Share'] = df_all.apply(lambda r: sum([(1) for w in r['Gene'].\n split(' ') if w in r['Text'].split(' ')]), axis=1)\ndf_all['Variation_Share'] = df_all.apply(lambda r: sum([(1) for w in r[\n 'Variation'].split(' ') if w in r['Text'].split(' ')]), axis=1)\ntrain = df_all.iloc[:len(train_var), :]\ntest = df_all.iloc[len(train_var):, :]\ntest.index = range(len(test_var))\ntrain = train.drop(['Gene', 'Variation', 'ID', 'Text', 'Class'], axis=1)\ntest = test.drop(['Gene', 'Variation', 'Text', 'ID', 'Class'], axis=1)\ntrain_final = pd.concat([train, svd_tr], axis=1)\ntest_final = pd.concat([test, svd_test], axis=1)\nsc = StandardScaler()\ntrain_final = sc.fit_transform(train_final)\ntest_final = sc.transform(test_final)\ntrain_final = pd.DataFrame(train_final)\ntest_final = pd.DataFrame(test_final)\ndtrain = xgb.DMatrix(train_final, y)\ndtest = xgb.DMatrix(test_final)\n\n\ndef docv(param, iterations, nfold):\n model_CV = xgb.cv(params=param, num_boost_round=iterations, nfold=nfold,\n dtrain=dtrain, seed=random.randint(1, 10000), early_stopping_rounds\n =100, maximize=False, verbose_eval=50)\n gc.collect()\n best = min(model_CV['test-mlogloss-mean'])\n best_iter = model_CV.shape[0]\n print(best)\n return best_iter\n\n\ndef doTest(param, iteration):\n X_tr, X_val, y_tr, y_val = train_test_split(train_final, y, test_size=\n 0.2, random_state=random.randint(1, 1000))\n watchlist = [(xgb.DMatrix(X_tr, y_tr), 'train'), (xgb.DMatrix(X_val,\n y_val), 'validation')]\n model = xgb.train(params=param, dtrain=xgb.DMatrix(X_tr, y_tr),\n num_boost_round=iteration, evals=watchlist, verbose_eval=50,\n early_stopping_rounds=100)\n score = metrics.log_loss(y_val, model.predict(xgb.DMatrix(X_val)),\n labels=range(9))\n predicted_class = model.predict(dtest)\n print(score)\n return predicted_class\n\n\ndef Bagging(N, params, best_iter):\n for i in range(N):\n param = params\n p = doTest(param, best_iter)\n if i == 0:\n preds = p.copy()\n else:\n preds = preds + p\n predictions = preds / N\n predictions = pd.DataFrame(predictions)\n return predictions\n\n\nparams = {'eta': 0.02, 'max_depth': 6, 'objective': 'multi:softprob',\n 'eval_metric': 'mlogloss', 'silent': False, 'seed': random.randint(1, \n 100), 'num_class': 9}\ncross_vali = docv(params, 10000, 5)\npredicted_class = Bagging(5, params, cross_vali)\nsub_file = pd.DataFrame()\nsub_file['ID'] = test_var['ID'].values\nSub_File = pd.concat([sub_file, predicted_class], axis=1)\nSub_File.columns = ['ID', 'Class1', 'Class2', 'Class3', 'Class4', 'Class5',\n 'Class6', 'Class7', 'Class8', 'Class9']\nSub_File.to_csv('submission33.csv', index=False)\n",
"step-4": "import numpy as np\nimport pandas as pd\nimport matplotlib as plt\nimport re\nimport nltk\nnltk.download('stopwords')\nfrom nltk.corpus import stopwords\nfrom nltk.stem.porter import PorterStemmer\nfrom sklearn.feature_extraction.text import TfidfVectorizer\nfrom sklearn.decomposition import TruncatedSVD\nfrom sklearn.preprocessing import LabelEncoder\nfrom sklearn.preprocessing import StandardScaler\nimport xgboost as xgb\nfrom sklearn.model_selection import train_test_split\nfrom sklearn import metrics\nimport gc\nimport random\ntrain_df = pd.read_csv('training_text', sep='\\\\|\\\\|', engine='python',\n names=['ID', 'Text'], header=None)\ntrain_df = train_df.iloc[1:, :]\ntrain_df.index = range(len(train_df))\ntrain_var = pd.read_csv('training_variants')\ntest_df = pd.read_csv('test_text', sep='\\\\|\\\\|', engine='python', names=[\n 'ID', 'Text'], header=None)\ntest_var = pd.read_csv('test_variants')\n\n\ndef text_cleaning(text_df):\n corpus = []\n for i in range(len(text_df)):\n text = re.sub('[^a-zA-Z]', ' ', text_df['Text'][i])\n text = text.lower()\n text = text.split()\n ps = PorterStemmer()\n text = [ps.stem(word) for word in text if not word in set(stopwords\n .words('english'))]\n text = ' '.join(text)\n corpus.append(text)\n return corpus\n\n\ncorpus_train = text_cleaning(train_df)\ncorpus_test = text_cleaning(test_df)\ntfidf = TfidfVectorizer()\ntfidf_tr = tfidf.fit_transform(corpus_train).toarray()\ntfidf_test = tfidf.transform(corpus_test).toarray()\nsvd = TruncatedSVD(n_components=1000)\nsvd_tr = svd.fit_transform(tfidf_tr)\nsvd_train = svd.transform(tfidf_test)\nsvd_tr = pd.DataFrame(svd_tr)\nsvd_test = pd.DataFrame(svd_train)\ny = train_var['Class'].values\ny = y - 1\ndf = pd.concat([train_var, test_var], axis=0)\ndf = df.drop(['ID'], axis=1)\ndf['ID'] = range(df.shape[0])\ndf.index = range(df.shape[0])\ndf_text = pd.concat([train_df, test_df], axis=0)\ndf_text = df_text.drop('ID', axis=1)\ndf_text['ID'] = range(df_text.shape[0])\ndf_text.index = range(df_text.shape[0])\ndf_all = pd.merge(df, df_text, how='left', on='ID')\ncolumn_list = train_var.columns.values.tolist()\nmissing_values = pd.DataFrame()\nmissing_values['Columns'] = column_list\nfor i in column_list:\n missing_values['No. of missing values'] = train_var[i].isnull(\n ).values.ravel().sum()\ncolumn_list = df.columns\ncategorical_columns = []\nfor i in column_list:\n if df[i].dtype == 'O':\n categorical_columns.append(i)\nfor i in categorical_columns:\n le = LabelEncoder()\n df[i + '_le'] = le.fit_transform(df[i])\n df[i + '_length'] = df[i].map(lambda x: len(str(x)))\ndf_all['Gene_Share'] = df_all.apply(lambda r: sum([(1) for w in r['Gene'].\n split(' ') if w in r['Text'].split(' ')]), axis=1)\ndf_all['Variation_Share'] = df_all.apply(lambda r: sum([(1) for w in r[\n 'Variation'].split(' ') if w in r['Text'].split(' ')]), axis=1)\ntrain = df_all.iloc[:len(train_var), :]\ntest = df_all.iloc[len(train_var):, :]\ntest.index = range(len(test_var))\ntrain = train.drop(['Gene', 'Variation', 'ID', 'Text', 'Class'], axis=1)\ntest = test.drop(['Gene', 'Variation', 'Text', 'ID', 'Class'], axis=1)\ntrain_final = pd.concat([train, svd_tr], axis=1)\ntest_final = pd.concat([test, svd_test], axis=1)\nsc = StandardScaler()\ntrain_final = sc.fit_transform(train_final)\ntest_final = sc.transform(test_final)\ntrain_final = pd.DataFrame(train_final)\ntest_final = pd.DataFrame(test_final)\ndtrain = xgb.DMatrix(train_final, y)\ndtest = xgb.DMatrix(test_final)\n\n\ndef docv(param, iterations, nfold):\n model_CV = xgb.cv(params=param, num_boost_round=iterations, nfold=nfold,\n dtrain=dtrain, seed=random.randint(1, 10000), early_stopping_rounds\n =100, maximize=False, verbose_eval=50)\n gc.collect()\n best = min(model_CV['test-mlogloss-mean'])\n best_iter = model_CV.shape[0]\n print(best)\n return best_iter\n\n\ndef doTest(param, iteration):\n X_tr, X_val, y_tr, y_val = train_test_split(train_final, y, test_size=\n 0.2, random_state=random.randint(1, 1000))\n watchlist = [(xgb.DMatrix(X_tr, y_tr), 'train'), (xgb.DMatrix(X_val,\n y_val), 'validation')]\n model = xgb.train(params=param, dtrain=xgb.DMatrix(X_tr, y_tr),\n num_boost_round=iteration, evals=watchlist, verbose_eval=50,\n early_stopping_rounds=100)\n score = metrics.log_loss(y_val, model.predict(xgb.DMatrix(X_val)),\n labels=range(9))\n predicted_class = model.predict(dtest)\n print(score)\n return predicted_class\n\n\ndef Bagging(N, params, best_iter):\n for i in range(N):\n param = params\n p = doTest(param, best_iter)\n if i == 0:\n preds = p.copy()\n else:\n preds = preds + p\n predictions = preds / N\n predictions = pd.DataFrame(predictions)\n return predictions\n\n\nparams = {'eta': 0.02, 'max_depth': 6, 'objective': 'multi:softprob',\n 'eval_metric': 'mlogloss', 'silent': False, 'seed': random.randint(1, \n 100), 'num_class': 9}\ncross_vali = docv(params, 10000, 5)\npredicted_class = Bagging(5, params, cross_vali)\nsub_file = pd.DataFrame()\nsub_file['ID'] = test_var['ID'].values\nSub_File = pd.concat([sub_file, predicted_class], axis=1)\nSub_File.columns = ['ID', 'Class1', 'Class2', 'Class3', 'Class4', 'Class5',\n 'Class6', 'Class7', 'Class8', 'Class9']\nSub_File.to_csv('submission33.csv', index=False)\n",
"step-5": "####################################################################################\n#\n# Kaggle Competition: https://www.kaggle.com/c/msk-redefining-cancer-treatment\n# Sponsor : Memorial Sloan Kettering Cancer Center (MSKCC)\n# Author: Amrut Shintre\n#\n####################################################################################\n\n#####################\n# Importing Libraries\n#####################\nimport numpy as np\nimport pandas as pd\nimport matplotlib as plt\nimport re\nimport nltk\nnltk.download('stopwords')\nfrom nltk.corpus import stopwords\nfrom nltk.stem.porter import PorterStemmer\nfrom sklearn.feature_extraction.text import TfidfVectorizer\nfrom sklearn.decomposition import TruncatedSVD\nfrom sklearn.preprocessing import LabelEncoder\nfrom sklearn.preprocessing import StandardScaler\nimport xgboost as xgb\nfrom sklearn.model_selection import train_test_split\nfrom sklearn import metrics\nimport gc\nimport random\n\n####################\n# Importing datasets\n####################\n\n# Training Dataset\ntrain_df = pd.read_csv('training_text', sep = '\\|\\|', engine = 'python', names = ['ID', 'Text'],\n header = None)\ntrain_df = train_df.iloc[1:,:]\ntrain_df.index = range(len(train_df))\ntrain_var = pd.read_csv('training_variants')\n\n# Testing Dataset\ntest_df = pd.read_csv('test_text', sep = '\\|\\|', engine = 'python', names = ['ID', 'Text'],\n header = None)\ntest_var = pd.read_csv('test_variants')\n\n# --------------------------------------------TEXT ---------------------------------------------\n\n##############\n# TextCleaning\n##############\n\ndef text_cleaning(text_df):\n corpus = []\n for i in range(len(text_df)):\n text = re.sub('[^a-zA-Z]', ' ', text_df['Text'][i]) # Removing punctuation marks,\n #numbers, etc and returning only letters\n text = text.lower() # Converting all the uppercase letters to lowercase\n text = text.split() # Splitting a sentence into a list of strings containing a single word.\n ps = PorterStemmer() # Stemming e.g. lovely -> love\n text = [ps.stem(word) for word in text if not word in set(stopwords.words('english'))]\n text = ' '.join(text) # Joining the cleaned words\n corpus.append(text) # Appending it to the new list.\n return (corpus)\n\n# Training Text Data\ncorpus_train = text_cleaning(train_df)\n\n# Testing Text Data\ncorpus_test = text_cleaning(test_df)\n\n#############################################\n# Term Frequency - Inverse Document Frequency\n#############################################\n\ntfidf = TfidfVectorizer()\ntfidf_tr = tfidf.fit_transform(corpus_train).toarray()\ntfidf_test = tfidf.transform(corpus_test).toarray()\n\n##############################\n# Singular Value Decomposition\n##############################\n\nsvd = TruncatedSVD(n_components = 1000) # considering 98% variance in the Data\nsvd_tr = svd.fit_transform(tfidf_tr) # Fitting on cleaned training text data\nsvd_train = svd.transform(tfidf_test) # Transforming on cleaned testing text data\nsvd_tr = pd.DataFrame(svd_tr)\nsvd_test = pd.DataFrame(svd_train)\n#explainedvar = svd.explained_variance_ratio_\n#exp_var = explainedvar.cumsum()\n\n# -------------------------------------------- VARIANTS ---------------------------------------------\n\n####################\n# Dependent Variable\n####################\n\ny = train_var['Class'].values\ny = y-1\n\n#################\n# Merging Dataset\n#################\n\n# Merging the dataset for data preparation and feature engineering\n\ndf = pd.concat([train_var, test_var], axis = 0)\ndf = df.drop(['ID'], axis = 1)\ndf['ID'] = range(df.shape[0])\ndf.index = range(df.shape[0])\ndf_text = pd.concat([train_df, test_df], axis = 0)\ndf_text = df_text.drop('ID', axis = 1)\ndf_text['ID'] = range(df_text.shape[0])\ndf_text.index = range(df_text.shape[0])\ndf_all = pd.merge(df, df_text, how = 'left', on = 'ID')\n\n\n################\n# Missing Values\n################\n\n# Checking for missing values\n\ncolumn_list = train_var.columns.values.tolist()\nmissing_values = pd.DataFrame()\nmissing_values['Columns'] = column_list\nfor i in column_list:\n missing_values['No. of missing values'] = train_var[i].isnull().values.ravel().sum()\n\n# There are no missing values.\n\n#######################\n# Categorical Variables\n#######################\n\n# Extracting the columns having categorical Variables.\n\ncolumn_list = df.columns\ncategorical_columns = []\nfor i in column_list:\n if df[i].dtype == 'O':\n categorical_columns.append(i)\n\n# Encoding the columns with categorical variables\n\n# Label Encoding\n\nfor i in categorical_columns:\n le = LabelEncoder()\n df[i + '_le'] = le.fit_transform(df[i])\n df[i + '_length'] = df[i].map(lambda x: len(str(x)))\n\n# Feature Engineering\n\ndf_all['Gene_Share'] = df_all.apply(lambda r: sum([1 for w in r['Gene'].split(' ') if w in r['Text'].split(' ')]), axis=1)\ndf_all['Variation_Share'] = df_all.apply(lambda r: sum([1 for w in r['Variation'].split(' ') if w in r['Text'].split(' ')]), axis=1)\n \n###################\n# Splitting Dataset\n################### \n\ntrain = df_all.iloc[:len(train_var), :]\ntest = df_all.iloc[len(train_var):,:]\ntest.index = range(len(test_var))\ntrain = train.drop(['Gene', 'Variation', 'ID', 'Text', 'Class'], axis = 1)\ntest = test.drop(['Gene', 'Variation', 'Text', 'ID', 'Class'], axis = 1)\n\ntrain_final = pd.concat([train, svd_tr], axis = 1)\ntest_final = pd.concat([test, svd_test], axis = 1)\n\n#################\n# Standardization\n#################\n\nsc = StandardScaler()\ntrain_final = sc.fit_transform(train_final)\ntest_final = sc.transform(test_final)\ntrain_final = pd.DataFrame(train_final)\ntest_final = pd.DataFrame(test_final) \n\n# -------------------------------------------- MODEL ---------------------------------------------\n\n##################\n# XGBoost Matrix \n##################\n\ndtrain = xgb.DMatrix(train_final, y)\ndtest = xgb.DMatrix(test_final)\n\n##################\n# Cross-Validation \n##################\n\ndef docv(param, iterations, nfold):\n model_CV = xgb.cv(\n params = param,\n num_boost_round = iterations,\n nfold = nfold,\n dtrain = dtrain,\n seed = random.randint(1, 10000),\n early_stopping_rounds = 100,\n maximize = False,\n verbose_eval = 50)\n gc.collect()\n best = min(model_CV['test-mlogloss-mean'])\n best_iter = model_CV.shape[0]\n print (best)\n return (best_iter)\n\n#########\n# Testing \n#########\n\ndef doTest(param, iteration):\n X_tr, X_val, y_tr, y_val = train_test_split(train_final, y, test_size = 0.2, random_state = random.randint(1,1000))\n watchlist = [(xgb.DMatrix(X_tr, y_tr), 'train'), (xgb.DMatrix(X_val, y_val), 'validation')]\n model = xgb.train(\n params = param,\n dtrain = xgb.DMatrix(X_tr, y_tr),\n num_boost_round = iteration,\n evals = watchlist,\n verbose_eval = 50,\n early_stopping_rounds = 100)\n score = metrics.log_loss(y_val, model.predict(xgb.DMatrix(X_val)), labels = range(9))\n predicted_class = model.predict(dtest)\n print (score)\n return (predicted_class)\n\n#########\n# Bagging\n#########\n\ndef Bagging(N, params, best_iter):\n for i in range(N):\n param = params\n p = doTest(param, best_iter)\n if i == 0:\n preds = p.copy()\n else:\n preds = preds + p\n predictions = preds/N\n predictions = pd.DataFrame(predictions)\n return (predictions)\n\n###################\n# Running the Model\n###################\n\nparams = {\n 'eta': 0.02,\n 'max_depth': 6,\n 'objective': 'multi:softprob',\n 'eval_metric': 'mlogloss',\n 'silent': False,\n 'seed': random.randint(1,100),\n 'num_class': 9\n }\n\ncross_vali = docv(params, 10000, 5)\n\npredicted_class = Bagging(5, params, cross_vali)\n\n\n# -------------------------------------------- SUBMISSION ---------------------------------------------\n\nsub_file = pd.DataFrame()\nsub_file['ID'] = test_var['ID'].values\nSub_File = pd.concat([sub_file, predicted_class], axis = 1)\nSub_File.columns = ['ID', 'Class1', 'Class2', 'Class3', 'Class4', 'Class5', 'Class6', 'Class7', \n 'Class8', 'Class9']\nSub_File.to_csv(\"submission33.csv\", index = False)\n\n# -------------------------------------------- Project Layout ---------------------------------------------\n\n# 1) Text Cleaning\n# 2) TFIDF Vectorizer and Singular Value Decomposition\n# 3) Feature Engineering\n# 4) Building a Model and trying out different models\n# 5) Parameter Tuning\n# 6) Bagged Boosting ",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
from .ast import *
# noinspection PyPep8Naming
def addToClass(cls):
def decorator(func):
setattr(cls, func.__name__, func)
return func
return decorator
def print_intended(to_print, intend):
print(intend * "| " + to_print)
# noinspection PyPep8Naming,PyUnresolvedReferences
class TreePrinter:
# General
@addToClass(Node)
def printTree(self, indent=0):
raise Exception("printTree not defined in class " + self.__class__.__name__)
@addToClass(Instruction)
def printTree(self, indent=0):
print_intended(self.type, indent)
@addToClass(Expression)
def printTree(self, indent=0):
print_intended(self.type, indent)
# Instructions
@addToClass(Block)
def printTree(self, indent=0):
print_intended(self.type, indent)
if self.instructions is not None:
self.instructions.printTree(indent + 1)
@addToClass(Assignment)
def printTree(self, indent=0):
print_intended(self.operator, indent)
self.left.printTree(indent + 1)
self.right.printTree(indent + 1)
@addToClass(For)
def printTree(self, indent=0):
print_intended(self.type, indent)
self.variable.printTree(indent + 1)
self.range.printTree(indent + 1)
self.instruction.printTree(indent + 1)
@addToClass(While)
def printTree(self, indent=0):
print_intended(self.type, indent)
self.condition.printTree(indent + 1)
self.instruction.printTree(indent + 1)
@addToClass(If)
def printTree(self, indent=0):
print_intended(self.type, indent)
self.condition.printTree(indent + 1)
print_intended('then', indent)
self.if_block.printTree(indent + 1)
if self.else_block is not None:
print_intended('else', indent)
self.else_block.printTree(indent + 1)
@addToClass(Print)
def printTree(self, indent=0):
print_intended(self.type, indent)
self.args.printTree(indent + 1)
@addToClass(Return)
def printTree(self, indent=0):
print_intended(self.type, indent)
if self.args is not None:
self.args.printTree(indent + 1)
@addToClass(ArrayElement)
def printTree(self, indent=0):
print_intended("get_element", indent)
self.array.printTree(indent + 1)
self.ids.printTree(indent + 1)
# Expressions
@addToClass(Value)
def printTree(self, indent=0):
print_intended(str(self.value), indent)
@addToClass(Array)
def printTree(self, indent=0):
if self.list is not None:
print_intended('array', indent)
self.list.printTree(indent + 1)
else:
print_intended('empty_array', indent)
@addToClass(BinaryExpression)
def printTree(self, indent=0):
print_intended(self.operator, indent)
self.left.printTree(indent + 1)
self.right.printTree(indent + 1)
@addToClass(MatrixFunction)
def printTree(self, indent=0):
print_intended(self.function, indent)
self.parameter.printTree(indent + 1)
@addToClass(UnaryMinus)
def printTree(self, indent=0):
print_intended('-', indent)
self.value.printTree(indent + 1)
@addToClass(Transpose)
def printTree(self, indent=0):
print_intended(self.type, indent)
self.value.printTree(indent + 1)
# Other
@addToClass(Program)
def printTree(self, indent=0):
print_intended(self.type, indent)
self.instructions_opt.printTree(indent + 1)
@addToClass(Identifier)
def printTree(self, indent=0):
print_intended(self.name, indent)
@addToClass(Range)
def printTree(self, indent=0):
print_intended(self.type, indent)
self.start_value.printTree(indent + 1)
self.end_value.printTree(indent + 1)
@addToClass(List)
def printTree(self, indent=0):
for element in self.elements:
element.printTree(indent)
|
normal
|
{
"blob_id": "1084478226777b9259274e053984ac34d461198d",
"index": 42,
"step-1": "<mask token>\n\n\nclass TreePrinter:\n\n @addToClass(Node)\n def printTree(self, indent=0):\n raise Exception('printTree not defined in class ' + self.__class__.\n __name__)\n\n @addToClass(Instruction)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n <mask token>\n\n @addToClass(Block)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n if self.instructions is not None:\n self.instructions.printTree(indent + 1)\n\n @addToClass(Assignment)\n def printTree(self, indent=0):\n print_intended(self.operator, indent)\n self.left.printTree(indent + 1)\n self.right.printTree(indent + 1)\n\n @addToClass(For)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n self.variable.printTree(indent + 1)\n self.range.printTree(indent + 1)\n self.instruction.printTree(indent + 1)\n\n @addToClass(While)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n self.condition.printTree(indent + 1)\n self.instruction.printTree(indent + 1)\n <mask token>\n\n @addToClass(Print)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n self.args.printTree(indent + 1)\n\n @addToClass(Return)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n if self.args is not None:\n self.args.printTree(indent + 1)\n\n @addToClass(ArrayElement)\n def printTree(self, indent=0):\n print_intended('get_element', indent)\n self.array.printTree(indent + 1)\n self.ids.printTree(indent + 1)\n\n @addToClass(Value)\n def printTree(self, indent=0):\n print_intended(str(self.value), indent)\n\n @addToClass(Array)\n def printTree(self, indent=0):\n if self.list is not None:\n print_intended('array', indent)\n self.list.printTree(indent + 1)\n else:\n print_intended('empty_array', indent)\n <mask token>\n\n @addToClass(MatrixFunction)\n def printTree(self, indent=0):\n print_intended(self.function, indent)\n self.parameter.printTree(indent + 1)\n\n @addToClass(UnaryMinus)\n def printTree(self, indent=0):\n print_intended('-', indent)\n self.value.printTree(indent + 1)\n\n @addToClass(Transpose)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n self.value.printTree(indent + 1)\n\n @addToClass(Program)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n self.instructions_opt.printTree(indent + 1)\n <mask token>\n\n @addToClass(Range)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n self.start_value.printTree(indent + 1)\n self.end_value.printTree(indent + 1)\n\n @addToClass(List)\n def printTree(self, indent=0):\n for element in self.elements:\n element.printTree(indent)\n",
"step-2": "<mask token>\n\n\nclass TreePrinter:\n\n @addToClass(Node)\n def printTree(self, indent=0):\n raise Exception('printTree not defined in class ' + self.__class__.\n __name__)\n\n @addToClass(Instruction)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n\n @addToClass(Expression)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n\n @addToClass(Block)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n if self.instructions is not None:\n self.instructions.printTree(indent + 1)\n\n @addToClass(Assignment)\n def printTree(self, indent=0):\n print_intended(self.operator, indent)\n self.left.printTree(indent + 1)\n self.right.printTree(indent + 1)\n\n @addToClass(For)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n self.variable.printTree(indent + 1)\n self.range.printTree(indent + 1)\n self.instruction.printTree(indent + 1)\n\n @addToClass(While)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n self.condition.printTree(indent + 1)\n self.instruction.printTree(indent + 1)\n\n @addToClass(If)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n self.condition.printTree(indent + 1)\n print_intended('then', indent)\n self.if_block.printTree(indent + 1)\n if self.else_block is not None:\n print_intended('else', indent)\n self.else_block.printTree(indent + 1)\n\n @addToClass(Print)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n self.args.printTree(indent + 1)\n\n @addToClass(Return)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n if self.args is not None:\n self.args.printTree(indent + 1)\n\n @addToClass(ArrayElement)\n def printTree(self, indent=0):\n print_intended('get_element', indent)\n self.array.printTree(indent + 1)\n self.ids.printTree(indent + 1)\n\n @addToClass(Value)\n def printTree(self, indent=0):\n print_intended(str(self.value), indent)\n\n @addToClass(Array)\n def printTree(self, indent=0):\n if self.list is not None:\n print_intended('array', indent)\n self.list.printTree(indent + 1)\n else:\n print_intended('empty_array', indent)\n <mask token>\n\n @addToClass(MatrixFunction)\n def printTree(self, indent=0):\n print_intended(self.function, indent)\n self.parameter.printTree(indent + 1)\n\n @addToClass(UnaryMinus)\n def printTree(self, indent=0):\n print_intended('-', indent)\n self.value.printTree(indent + 1)\n\n @addToClass(Transpose)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n self.value.printTree(indent + 1)\n\n @addToClass(Program)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n self.instructions_opt.printTree(indent + 1)\n\n @addToClass(Identifier)\n def printTree(self, indent=0):\n print_intended(self.name, indent)\n\n @addToClass(Range)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n self.start_value.printTree(indent + 1)\n self.end_value.printTree(indent + 1)\n\n @addToClass(List)\n def printTree(self, indent=0):\n for element in self.elements:\n element.printTree(indent)\n",
"step-3": "<mask token>\n\n\nclass TreePrinter:\n\n @addToClass(Node)\n def printTree(self, indent=0):\n raise Exception('printTree not defined in class ' + self.__class__.\n __name__)\n\n @addToClass(Instruction)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n\n @addToClass(Expression)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n\n @addToClass(Block)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n if self.instructions is not None:\n self.instructions.printTree(indent + 1)\n\n @addToClass(Assignment)\n def printTree(self, indent=0):\n print_intended(self.operator, indent)\n self.left.printTree(indent + 1)\n self.right.printTree(indent + 1)\n\n @addToClass(For)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n self.variable.printTree(indent + 1)\n self.range.printTree(indent + 1)\n self.instruction.printTree(indent + 1)\n\n @addToClass(While)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n self.condition.printTree(indent + 1)\n self.instruction.printTree(indent + 1)\n\n @addToClass(If)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n self.condition.printTree(indent + 1)\n print_intended('then', indent)\n self.if_block.printTree(indent + 1)\n if self.else_block is not None:\n print_intended('else', indent)\n self.else_block.printTree(indent + 1)\n\n @addToClass(Print)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n self.args.printTree(indent + 1)\n\n @addToClass(Return)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n if self.args is not None:\n self.args.printTree(indent + 1)\n\n @addToClass(ArrayElement)\n def printTree(self, indent=0):\n print_intended('get_element', indent)\n self.array.printTree(indent + 1)\n self.ids.printTree(indent + 1)\n\n @addToClass(Value)\n def printTree(self, indent=0):\n print_intended(str(self.value), indent)\n\n @addToClass(Array)\n def printTree(self, indent=0):\n if self.list is not None:\n print_intended('array', indent)\n self.list.printTree(indent + 1)\n else:\n print_intended('empty_array', indent)\n\n @addToClass(BinaryExpression)\n def printTree(self, indent=0):\n print_intended(self.operator, indent)\n self.left.printTree(indent + 1)\n self.right.printTree(indent + 1)\n\n @addToClass(MatrixFunction)\n def printTree(self, indent=0):\n print_intended(self.function, indent)\n self.parameter.printTree(indent + 1)\n\n @addToClass(UnaryMinus)\n def printTree(self, indent=0):\n print_intended('-', indent)\n self.value.printTree(indent + 1)\n\n @addToClass(Transpose)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n self.value.printTree(indent + 1)\n\n @addToClass(Program)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n self.instructions_opt.printTree(indent + 1)\n\n @addToClass(Identifier)\n def printTree(self, indent=0):\n print_intended(self.name, indent)\n\n @addToClass(Range)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n self.start_value.printTree(indent + 1)\n self.end_value.printTree(indent + 1)\n\n @addToClass(List)\n def printTree(self, indent=0):\n for element in self.elements:\n element.printTree(indent)\n",
"step-4": "<mask token>\n\n\ndef addToClass(cls):\n\n def decorator(func):\n setattr(cls, func.__name__, func)\n return func\n return decorator\n\n\ndef print_intended(to_print, intend):\n print(intend * '| ' + to_print)\n\n\nclass TreePrinter:\n\n @addToClass(Node)\n def printTree(self, indent=0):\n raise Exception('printTree not defined in class ' + self.__class__.\n __name__)\n\n @addToClass(Instruction)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n\n @addToClass(Expression)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n\n @addToClass(Block)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n if self.instructions is not None:\n self.instructions.printTree(indent + 1)\n\n @addToClass(Assignment)\n def printTree(self, indent=0):\n print_intended(self.operator, indent)\n self.left.printTree(indent + 1)\n self.right.printTree(indent + 1)\n\n @addToClass(For)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n self.variable.printTree(indent + 1)\n self.range.printTree(indent + 1)\n self.instruction.printTree(indent + 1)\n\n @addToClass(While)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n self.condition.printTree(indent + 1)\n self.instruction.printTree(indent + 1)\n\n @addToClass(If)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n self.condition.printTree(indent + 1)\n print_intended('then', indent)\n self.if_block.printTree(indent + 1)\n if self.else_block is not None:\n print_intended('else', indent)\n self.else_block.printTree(indent + 1)\n\n @addToClass(Print)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n self.args.printTree(indent + 1)\n\n @addToClass(Return)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n if self.args is not None:\n self.args.printTree(indent + 1)\n\n @addToClass(ArrayElement)\n def printTree(self, indent=0):\n print_intended('get_element', indent)\n self.array.printTree(indent + 1)\n self.ids.printTree(indent + 1)\n\n @addToClass(Value)\n def printTree(self, indent=0):\n print_intended(str(self.value), indent)\n\n @addToClass(Array)\n def printTree(self, indent=0):\n if self.list is not None:\n print_intended('array', indent)\n self.list.printTree(indent + 1)\n else:\n print_intended('empty_array', indent)\n\n @addToClass(BinaryExpression)\n def printTree(self, indent=0):\n print_intended(self.operator, indent)\n self.left.printTree(indent + 1)\n self.right.printTree(indent + 1)\n\n @addToClass(MatrixFunction)\n def printTree(self, indent=0):\n print_intended(self.function, indent)\n self.parameter.printTree(indent + 1)\n\n @addToClass(UnaryMinus)\n def printTree(self, indent=0):\n print_intended('-', indent)\n self.value.printTree(indent + 1)\n\n @addToClass(Transpose)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n self.value.printTree(indent + 1)\n\n @addToClass(Program)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n self.instructions_opt.printTree(indent + 1)\n\n @addToClass(Identifier)\n def printTree(self, indent=0):\n print_intended(self.name, indent)\n\n @addToClass(Range)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n self.start_value.printTree(indent + 1)\n self.end_value.printTree(indent + 1)\n\n @addToClass(List)\n def printTree(self, indent=0):\n for element in self.elements:\n element.printTree(indent)\n",
"step-5": "from .ast import *\n\n\n# noinspection PyPep8Naming\ndef addToClass(cls):\n def decorator(func):\n setattr(cls, func.__name__, func)\n return func\n\n return decorator\n\n\ndef print_intended(to_print, intend):\n print(intend * \"| \" + to_print)\n\n\n# noinspection PyPep8Naming,PyUnresolvedReferences\nclass TreePrinter:\n\n # General\n @addToClass(Node)\n def printTree(self, indent=0):\n raise Exception(\"printTree not defined in class \" + self.__class__.__name__)\n\n @addToClass(Instruction)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n\n @addToClass(Expression)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n\n # Instructions\n @addToClass(Block)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n if self.instructions is not None:\n self.instructions.printTree(indent + 1)\n\n @addToClass(Assignment)\n def printTree(self, indent=0):\n print_intended(self.operator, indent)\n self.left.printTree(indent + 1)\n self.right.printTree(indent + 1)\n\n @addToClass(For)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n self.variable.printTree(indent + 1)\n self.range.printTree(indent + 1)\n self.instruction.printTree(indent + 1)\n\n @addToClass(While)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n self.condition.printTree(indent + 1)\n self.instruction.printTree(indent + 1)\n\n @addToClass(If)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n self.condition.printTree(indent + 1)\n print_intended('then', indent)\n self.if_block.printTree(indent + 1)\n if self.else_block is not None:\n print_intended('else', indent)\n self.else_block.printTree(indent + 1)\n\n @addToClass(Print)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n self.args.printTree(indent + 1)\n\n @addToClass(Return)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n if self.args is not None:\n self.args.printTree(indent + 1)\n\n @addToClass(ArrayElement)\n def printTree(self, indent=0):\n print_intended(\"get_element\", indent)\n self.array.printTree(indent + 1)\n self.ids.printTree(indent + 1)\n\n # Expressions\n @addToClass(Value)\n def printTree(self, indent=0):\n print_intended(str(self.value), indent)\n\n @addToClass(Array)\n def printTree(self, indent=0):\n if self.list is not None:\n print_intended('array', indent)\n self.list.printTree(indent + 1)\n else:\n print_intended('empty_array', indent)\n\n @addToClass(BinaryExpression)\n def printTree(self, indent=0):\n print_intended(self.operator, indent)\n self.left.printTree(indent + 1)\n self.right.printTree(indent + 1)\n\n @addToClass(MatrixFunction)\n def printTree(self, indent=0):\n print_intended(self.function, indent)\n self.parameter.printTree(indent + 1)\n\n @addToClass(UnaryMinus)\n def printTree(self, indent=0):\n print_intended('-', indent)\n self.value.printTree(indent + 1)\n\n @addToClass(Transpose)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n self.value.printTree(indent + 1)\n\n # Other\n @addToClass(Program)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n self.instructions_opt.printTree(indent + 1)\n\n @addToClass(Identifier)\n def printTree(self, indent=0):\n print_intended(self.name, indent)\n\n @addToClass(Range)\n def printTree(self, indent=0):\n print_intended(self.type, indent)\n self.start_value.printTree(indent + 1)\n self.end_value.printTree(indent + 1)\n\n @addToClass(List)\n def printTree(self, indent=0):\n for element in self.elements:\n element.printTree(indent)\n",
"step-ids": [
18,
21,
22,
24,
26
]
}
|
[
18,
21,
22,
24,
26
] |
weekdays = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday',
'Saturday', 'Sunday']
i = input('Enter a day of the week and number of days: ').split()
e = int(i[-1])
starting_point = weekdays.index(i[0])
a = e + starting_point - len(weekdays)
print(weekdays[a])
|
normal
|
{
"blob_id": "5f7d05c642339ce0ab02a65ca41f9ee89c2faf57",
"index": 4240,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(weekdays[a])\n",
"step-3": "weekdays = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday',\n 'Saturday', 'Sunday']\ni = input('Enter a day of the week and number of days: ').split()\ne = int(i[-1])\nstarting_point = weekdays.index(i[0])\na = e + starting_point - len(weekdays)\nprint(weekdays[a])\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
import pygame
from pygame import Rect, Color
from pymunk import Body, Poly
from config import WIDTH, HEIGHT
class Ground:
def __init__ (self, space):
# size
self.w = WIDTH - 20
self.h = 25
# position
self.x = 10
self.y = HEIGHT - self.h
# pygame rectangle
self.rect = Rect (self.x, self.y, self.w, self.h)
self.color = Color (100, 6, 107)
# physics
self.rigidbody = Body (body_type=Body.STATIC)
self.rigidbody.position = self.x + self.w / 2, self.y
self.hitbox = Poly.create_box (self.rigidbody, (self.w, self.h))
self.hitbox.elasticity = 0
self.hitbox.mass = 1
self.hitbox.friction = 0
space.add (self.rigidbody, self.hitbox)
def update (self, dt):
return
def draw (self, window):
pygame.draw.rect (window, self.color, self.rect)
return
|
normal
|
{
"blob_id": "32fc0db68c32c2e644f9c1c2318fbeff41a0543d",
"index": 5703,
"step-1": "<mask token>\n\n\nclass Ground:\n <mask token>\n <mask token>\n\n def draw(self, window):\n pygame.draw.rect(window, self.color, self.rect)\n return\n",
"step-2": "<mask token>\n\n\nclass Ground:\n\n def __init__(self, space):\n self.w = WIDTH - 20\n self.h = 25\n self.x = 10\n self.y = HEIGHT - self.h\n self.rect = Rect(self.x, self.y, self.w, self.h)\n self.color = Color(100, 6, 107)\n self.rigidbody = Body(body_type=Body.STATIC)\n self.rigidbody.position = self.x + self.w / 2, self.y\n self.hitbox = Poly.create_box(self.rigidbody, (self.w, self.h))\n self.hitbox.elasticity = 0\n self.hitbox.mass = 1\n self.hitbox.friction = 0\n space.add(self.rigidbody, self.hitbox)\n <mask token>\n\n def draw(self, window):\n pygame.draw.rect(window, self.color, self.rect)\n return\n",
"step-3": "<mask token>\n\n\nclass Ground:\n\n def __init__(self, space):\n self.w = WIDTH - 20\n self.h = 25\n self.x = 10\n self.y = HEIGHT - self.h\n self.rect = Rect(self.x, self.y, self.w, self.h)\n self.color = Color(100, 6, 107)\n self.rigidbody = Body(body_type=Body.STATIC)\n self.rigidbody.position = self.x + self.w / 2, self.y\n self.hitbox = Poly.create_box(self.rigidbody, (self.w, self.h))\n self.hitbox.elasticity = 0\n self.hitbox.mass = 1\n self.hitbox.friction = 0\n space.add(self.rigidbody, self.hitbox)\n\n def update(self, dt):\n return\n\n def draw(self, window):\n pygame.draw.rect(window, self.color, self.rect)\n return\n",
"step-4": "import pygame\nfrom pygame import Rect, Color\nfrom pymunk import Body, Poly\nfrom config import WIDTH, HEIGHT\n\n\nclass Ground:\n\n def __init__(self, space):\n self.w = WIDTH - 20\n self.h = 25\n self.x = 10\n self.y = HEIGHT - self.h\n self.rect = Rect(self.x, self.y, self.w, self.h)\n self.color = Color(100, 6, 107)\n self.rigidbody = Body(body_type=Body.STATIC)\n self.rigidbody.position = self.x + self.w / 2, self.y\n self.hitbox = Poly.create_box(self.rigidbody, (self.w, self.h))\n self.hitbox.elasticity = 0\n self.hitbox.mass = 1\n self.hitbox.friction = 0\n space.add(self.rigidbody, self.hitbox)\n\n def update(self, dt):\n return\n\n def draw(self, window):\n pygame.draw.rect(window, self.color, self.rect)\n return\n",
"step-5": "import pygame\nfrom pygame import Rect, Color\n\nfrom pymunk import Body, Poly\n\nfrom config import WIDTH, HEIGHT\n\nclass Ground:\n\n def __init__ (self, space):\n \n # size\n self.w = WIDTH - 20\n self.h = 25\n\n # position\n self.x = 10\n self.y = HEIGHT - self.h\n\n # pygame rectangle\n self.rect = Rect (self.x, self.y, self.w, self.h)\n self.color = Color (100, 6, 107)\n\n # physics\n self.rigidbody = Body (body_type=Body.STATIC)\n self.rigidbody.position = self.x + self.w / 2, self.y\n\n self.hitbox = Poly.create_box (self.rigidbody, (self.w, self.h))\n self.hitbox.elasticity = 0\n self.hitbox.mass = 1\n self.hitbox.friction = 0\n\n space.add (self.rigidbody, self.hitbox)\n\n\n def update (self, dt):\n return\n\n\n\n def draw (self, window):\n \n pygame.draw.rect (window, self.color, self.rect)\n\n return",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
# ch14_26.py
fn = 'out14_26.txt'
x = 100
with open(fn, 'w') as file_Obj:
file_Obj.write(x) # 直接輸出數值x產生錯誤
|
normal
|
{
"blob_id": "e4f07355300003943d2fc09f80746a1201de7e37",
"index": 1678,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwith open(fn, 'w') as file_Obj:\n file_Obj.write(x)\n",
"step-3": "fn = 'out14_26.txt'\nx = 100\nwith open(fn, 'w') as file_Obj:\n file_Obj.write(x)\n",
"step-4": "# ch14_26.py\r\nfn = 'out14_26.txt'\r\nx = 100\r\n\r\nwith open(fn, 'w') as file_Obj:\r\n file_Obj.write(x) # 直接輸出數值x產生錯誤\r\n\r\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class Solution:
<|reserved_special_token_0|>
def refined(self, nums, i, a, ans):
if i >= len(nums):
if len(a) == len(ans) and self.isMoreCompetitive(a, ans) == False:
return False, None
elif len(a) != len(ans):
return False, None
else:
return True, a
elif i < len(nums) and len(ans) - len(a) <= len(nums) - i:
boolA, respA = self.refined(nums, i + 1, a + [nums[i]], ans)
boolB, respB = self.refined(nums, i + 1, a, ans)
if boolA == True and boolB == True:
if self.isMoreCompetitive(respA, respB):
return True, respA
else:
return True, respB
elif boolA == True:
return boolA, respA
elif boolB == True:
return True, respB
else:
return False, None
else:
return False, None
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution:
def isMoreCompetitive(self, a, b):
if len(a) != len(b):
print("Error, len()'s do not match'")
return 'Error'
for i in range(len(a)):
if a[i] == b[i]:
continue
elif a[i] < b[i]:
return True
else:
return False
return False
def refined(self, nums, i, a, ans):
if i >= len(nums):
if len(a) == len(ans) and self.isMoreCompetitive(a, ans) == False:
return False, None
elif len(a) != len(ans):
return False, None
else:
return True, a
elif i < len(nums) and len(ans) - len(a) <= len(nums) - i:
boolA, respA = self.refined(nums, i + 1, a + [nums[i]], ans)
boolB, respB = self.refined(nums, i + 1, a, ans)
if boolA == True and boolB == True:
if self.isMoreCompetitive(respA, respB):
return True, respA
else:
return True, respB
elif boolA == True:
return boolA, respA
elif boolB == True:
return True, respB
else:
return False, None
else:
return False, None
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution:
def isMoreCompetitive(self, a, b):
if len(a) != len(b):
print("Error, len()'s do not match'")
return 'Error'
for i in range(len(a)):
if a[i] == b[i]:
continue
elif a[i] < b[i]:
return True
else:
return False
return False
def refined(self, nums, i, a, ans):
if i >= len(nums):
if len(a) == len(ans) and self.isMoreCompetitive(a, ans) == False:
return False, None
elif len(a) != len(ans):
return False, None
else:
return True, a
elif i < len(nums) and len(ans) - len(a) <= len(nums) - i:
boolA, respA = self.refined(nums, i + 1, a + [nums[i]], ans)
boolB, respB = self.refined(nums, i + 1, a, ans)
if boolA == True and boolB == True:
if self.isMoreCompetitive(respA, respB):
return True, respA
else:
return True, respB
elif boolA == True:
return boolA, respA
elif boolB == True:
return True, respB
else:
return False, None
else:
return False, None
def mostCompetitive(self, nums, k):
if len(nums) < k:
print('length mismatch @ init')
return False
ans = list(nums[0:k])
tmp = list(nums[0:k])
i = k
while i < len(nums):
del tmp[0]
tmp.append(nums[i])
if self.isMoreCompetitive(tmp, ans):
ans = list(tmp)
i += 1
shouldContinue = True
idx = 0
foundAnswer, updateAns = self.refined(nums, 0, [], ans)
if foundAnswer == True:
return updateAns
return ans
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution:
def isMoreCompetitive(self, a, b):
if len(a) != len(b):
print("Error, len()'s do not match'")
return 'Error'
for i in range(len(a)):
if a[i] == b[i]:
continue
elif a[i] < b[i]:
return True
else:
return False
return False
def refined(self, nums, i, a, ans):
if i >= len(nums):
if len(a) == len(ans) and self.isMoreCompetitive(a, ans) == False:
return False, None
elif len(a) != len(ans):
return False, None
else:
return True, a
elif i < len(nums) and len(ans) - len(a) <= len(nums) - i:
boolA, respA = self.refined(nums, i + 1, a + [nums[i]], ans)
boolB, respB = self.refined(nums, i + 1, a, ans)
if boolA == True and boolB == True:
if self.isMoreCompetitive(respA, respB):
return True, respA
else:
return True, respB
elif boolA == True:
return boolA, respA
elif boolB == True:
return True, respB
else:
return False, None
else:
return False, None
def mostCompetitive(self, nums, k):
if len(nums) < k:
print('length mismatch @ init')
return False
ans = list(nums[0:k])
tmp = list(nums[0:k])
i = k
while i < len(nums):
del tmp[0]
tmp.append(nums[i])
if self.isMoreCompetitive(tmp, ans):
ans = list(tmp)
i += 1
shouldContinue = True
idx = 0
foundAnswer, updateAns = self.refined(nums, 0, [], ans)
if foundAnswer == True:
return updateAns
return ans
if __name__ == '__main__':
s = Solution()
print(s.mostCompetitive([3, 5, 2, 6], 2))
print(s.mostCompetitive([2, 4, 3, 3, 5, 4, 9, 6], 4))
print(s.mostCompetitive([84, 10, 71, 23, 66, 61, 62, 64, 34, 41, 80, 25,
91, 43, 4, 75, 65, 13, 37, 41, 46, 90, 55, 8, 85, 61, 95, 71], 24))
print(s.mostCompetitive([2, 4, 3, 3, 5, 4, 9, 6], 4))
[11, 52, 57, 91, 47, 95, 86, 46, 87, 47, 70, 56, 54, 61, 89, 44, 3, 73,
1, 7, 87, 48, 17, 25, 49, 54, 6, 72, 97, 62, 16, 11, 47, 34, 68, 58,
14, 36, 46, 65, 2, 15]
18
<|reserved_special_token_1|>
'''
Given []int, most mostCompetitive subsequence is
a sublist of nums.
So we calculate a score, score is ∀ x ∈ nums, score += x_n - x_n-1
You can remove as many elements are you need to.
What is the mostCompetitive subsequence that you can come up with?
[1,3,5]
[1,3,4] ← More competitive
[1,2,5] ← More competitive
[1,3,4]
This is true b/c we evaluate on the first point where the two differ.
1) We care about creating lists that contain as small of numbers as
possible. The numbers don't need to be in order, they just need to be
small.
We care about all numbers, s.t. we can create a subsequence of k or more
behind them.
Get all possible sub-sequences, with length k or more. If more than k,
iterate through how we can remove the largest elements.
We should also keep track of the smallest number that corresponds to a valid
sequence?
I'm leaning towards a brute force method.
1) Find all sequences of length k. Store the most competitive.
So we should write a function that compares two sequences to see which is more
competitive.
Do one run, with subsequence == k.
Then try to beat that run.
Keep track of what the 'winning' subsequence is, and
iterate through possible values.
So two iterations.
[2,4,3,3,5,4,9,6] | k = 4
( )
ans = 2,4,3,3
[2,4,3,3,5,4,9,6] | k = 4
( )
2,4,3,3
^
idx = 0
Once we have 'beaten' it, out of the remaining
elements, remove the max element until length of
sublist is workable.
[2, 3, 3, ]
1) Write isMoreCompetitive
2) First pass → get most competitive with sliding window len = k
3) Second + pass. If we make a change/'win', re-run again. If re-run and
no change, we are done.
'''
'''
To Review:
def mostCompetitive(self, nums, k):
to_remove = len(nums) - k
stack = []
for x in nums:
while stack and x < stack[-1] and to_remove:
to_remove -= 1
stack.pop()
stack.append(x)
for _ in range(to_remove):
stack.pop()
return stack
'''
class Solution:
# is a more competitive than b?
def isMoreCompetitive(self, a, b):
if len(a) != len(b):
print("Error, len()'s do not match'")
return "Error"
for i in range(len(a)):
if a[i] == b[i]:
continue
elif a[i] < b[i]:
return True
else:
return False
return False
def refined(self, nums, i, a, ans):
if i >= len(nums):
if len(a) == len(ans) and self.isMoreCompetitive(a, ans) == False:
return False, None
elif len(a) != len(ans):
return False, None
else:
return True, a
elif i < len(nums) and len(ans)-len(a) <= len(nums)-i :
boolA, respA = self.refined(nums, i+1, a+[nums[i]], ans)
boolB, respB = self.refined(nums, i+1, a, ans)
if boolA == True and boolB == True:
if self.isMoreCompetitive(respA, respB):
return True, respA
else:
return True, respB
elif boolA == True:
return boolA, respA
elif boolB == True:
return True, respB
else:
return False, None
else:
return False, None
def mostCompetitive(self, nums, k):
if len(nums) < k :
print("length mismatch @ init")
return False
ans = list(nums[0:k])
tmp = list(nums[0:k])
i = k
# Initial pass
while i < len(nums):
# print(tmp)
del tmp[0]
# print(tmp)
tmp.append(nums[i])
# print(tmp)
if self.isMoreCompetitive(tmp, ans):
ans = list(tmp)
i += 1
# print("ans: {}, tmp:{}".format(ans, tmp))
# print("")
# Pass 2
shouldContinue = True
idx = 0
foundAnswer, updateAns = self.refined(nums, 0, [], ans)
if foundAnswer == True:
return updateAns
return ans
if __name__ == '__main__':
s = Solution()
print(s.mostCompetitive([3,5,2,6], 2))
print(s.mostCompetitive([2,4,3,3,5,4,9,6], 4))
print(s.mostCompetitive([84,10,71,23,66,61,62,64,34,41,80,25,91,43,4,75,65,13,37,41,46,90,55,8,85,61,95,71], 24))
print(s.mostCompetitive([2,4,3,3,5,4,9,6], 4))
[11,52,57,91,47,95,86,46,87,47,70,56,54,61,89,44,3,73,1,7,87,48,17,25,49,54,6,72,97,62,16,11,47,34,68,58,14,36,46,65,2,15]
18
|
flexible
|
{
"blob_id": "f8b04f374e1c55d4985be793939f0ff9393c29e0",
"index": 2571,
"step-1": "<mask token>\n\n\nclass Solution:\n <mask token>\n\n def refined(self, nums, i, a, ans):\n if i >= len(nums):\n if len(a) == len(ans) and self.isMoreCompetitive(a, ans) == False:\n return False, None\n elif len(a) != len(ans):\n return False, None\n else:\n return True, a\n elif i < len(nums) and len(ans) - len(a) <= len(nums) - i:\n boolA, respA = self.refined(nums, i + 1, a + [nums[i]], ans)\n boolB, respB = self.refined(nums, i + 1, a, ans)\n if boolA == True and boolB == True:\n if self.isMoreCompetitive(respA, respB):\n return True, respA\n else:\n return True, respB\n elif boolA == True:\n return boolA, respA\n elif boolB == True:\n return True, respB\n else:\n return False, None\n else:\n return False, None\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Solution:\n\n def isMoreCompetitive(self, a, b):\n if len(a) != len(b):\n print(\"Error, len()'s do not match'\")\n return 'Error'\n for i in range(len(a)):\n if a[i] == b[i]:\n continue\n elif a[i] < b[i]:\n return True\n else:\n return False\n return False\n\n def refined(self, nums, i, a, ans):\n if i >= len(nums):\n if len(a) == len(ans) and self.isMoreCompetitive(a, ans) == False:\n return False, None\n elif len(a) != len(ans):\n return False, None\n else:\n return True, a\n elif i < len(nums) and len(ans) - len(a) <= len(nums) - i:\n boolA, respA = self.refined(nums, i + 1, a + [nums[i]], ans)\n boolB, respB = self.refined(nums, i + 1, a, ans)\n if boolA == True and boolB == True:\n if self.isMoreCompetitive(respA, respB):\n return True, respA\n else:\n return True, respB\n elif boolA == True:\n return boolA, respA\n elif boolB == True:\n return True, respB\n else:\n return False, None\n else:\n return False, None\n <mask token>\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Solution:\n\n def isMoreCompetitive(self, a, b):\n if len(a) != len(b):\n print(\"Error, len()'s do not match'\")\n return 'Error'\n for i in range(len(a)):\n if a[i] == b[i]:\n continue\n elif a[i] < b[i]:\n return True\n else:\n return False\n return False\n\n def refined(self, nums, i, a, ans):\n if i >= len(nums):\n if len(a) == len(ans) and self.isMoreCompetitive(a, ans) == False:\n return False, None\n elif len(a) != len(ans):\n return False, None\n else:\n return True, a\n elif i < len(nums) and len(ans) - len(a) <= len(nums) - i:\n boolA, respA = self.refined(nums, i + 1, a + [nums[i]], ans)\n boolB, respB = self.refined(nums, i + 1, a, ans)\n if boolA == True and boolB == True:\n if self.isMoreCompetitive(respA, respB):\n return True, respA\n else:\n return True, respB\n elif boolA == True:\n return boolA, respA\n elif boolB == True:\n return True, respB\n else:\n return False, None\n else:\n return False, None\n\n def mostCompetitive(self, nums, k):\n if len(nums) < k:\n print('length mismatch @ init')\n return False\n ans = list(nums[0:k])\n tmp = list(nums[0:k])\n i = k\n while i < len(nums):\n del tmp[0]\n tmp.append(nums[i])\n if self.isMoreCompetitive(tmp, ans):\n ans = list(tmp)\n i += 1\n shouldContinue = True\n idx = 0\n foundAnswer, updateAns = self.refined(nums, 0, [], ans)\n if foundAnswer == True:\n return updateAns\n return ans\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass Solution:\n\n def isMoreCompetitive(self, a, b):\n if len(a) != len(b):\n print(\"Error, len()'s do not match'\")\n return 'Error'\n for i in range(len(a)):\n if a[i] == b[i]:\n continue\n elif a[i] < b[i]:\n return True\n else:\n return False\n return False\n\n def refined(self, nums, i, a, ans):\n if i >= len(nums):\n if len(a) == len(ans) and self.isMoreCompetitive(a, ans) == False:\n return False, None\n elif len(a) != len(ans):\n return False, None\n else:\n return True, a\n elif i < len(nums) and len(ans) - len(a) <= len(nums) - i:\n boolA, respA = self.refined(nums, i + 1, a + [nums[i]], ans)\n boolB, respB = self.refined(nums, i + 1, a, ans)\n if boolA == True and boolB == True:\n if self.isMoreCompetitive(respA, respB):\n return True, respA\n else:\n return True, respB\n elif boolA == True:\n return boolA, respA\n elif boolB == True:\n return True, respB\n else:\n return False, None\n else:\n return False, None\n\n def mostCompetitive(self, nums, k):\n if len(nums) < k:\n print('length mismatch @ init')\n return False\n ans = list(nums[0:k])\n tmp = list(nums[0:k])\n i = k\n while i < len(nums):\n del tmp[0]\n tmp.append(nums[i])\n if self.isMoreCompetitive(tmp, ans):\n ans = list(tmp)\n i += 1\n shouldContinue = True\n idx = 0\n foundAnswer, updateAns = self.refined(nums, 0, [], ans)\n if foundAnswer == True:\n return updateAns\n return ans\n\n\nif __name__ == '__main__':\n s = Solution()\n print(s.mostCompetitive([3, 5, 2, 6], 2))\n print(s.mostCompetitive([2, 4, 3, 3, 5, 4, 9, 6], 4))\n print(s.mostCompetitive([84, 10, 71, 23, 66, 61, 62, 64, 34, 41, 80, 25,\n 91, 43, 4, 75, 65, 13, 37, 41, 46, 90, 55, 8, 85, 61, 95, 71], 24))\n print(s.mostCompetitive([2, 4, 3, 3, 5, 4, 9, 6], 4))\n [11, 52, 57, 91, 47, 95, 86, 46, 87, 47, 70, 56, 54, 61, 89, 44, 3, 73,\n 1, 7, 87, 48, 17, 25, 49, 54, 6, 72, 97, 62, 16, 11, 47, 34, 68, 58,\n 14, 36, 46, 65, 2, 15]\n18\n",
"step-5": "'''\nGiven []int, most mostCompetitive subsequence is\na sublist of nums.\n\nSo we calculate a score, score is ∀ x ∈ nums, score += x_n - x_n-1\n\nYou can remove as many elements are you need to.\n\nWhat is the mostCompetitive subsequence that you can come up with?\n\n[1,3,5]\n[1,3,4] ← More competitive\n\n[1,2,5] ← More competitive\n[1,3,4]\n\nThis is true b/c we evaluate on the first point where the two differ.\n\n1) We care about creating lists that contain as small of numbers as\npossible. The numbers don't need to be in order, they just need to be\nsmall.\n\nWe care about all numbers, s.t. we can create a subsequence of k or more\nbehind them.\n\nGet all possible sub-sequences, with length k or more. If more than k,\niterate through how we can remove the largest elements.\n\nWe should also keep track of the smallest number that corresponds to a valid\nsequence?\n\nI'm leaning towards a brute force method.\n\n1) Find all sequences of length k. Store the most competitive.\n\n\nSo we should write a function that compares two sequences to see which is more\ncompetitive.\n\nDo one run, with subsequence == k.\nThen try to beat that run.\n\nKeep track of what the 'winning' subsequence is, and\niterate through possible values.\n\nSo two iterations.\n\n[2,4,3,3,5,4,9,6] | k = 4\n ( )\n\nans = 2,4,3,3\n\n\n\n[2,4,3,3,5,4,9,6] | k = 4\n( )\n\n2,4,3,3\n ^\n\nidx = 0\n\nOnce we have 'beaten' it, out of the remaining\nelements, remove the max element until length of\nsublist is workable.\n\n\n[2, 3, 3, ]\n\n1) Write isMoreCompetitive\n2) First pass → get most competitive with sliding window len = k\n3) Second + pass. If we make a change/'win', re-run again. If re-run and\n no change, we are done.\n\n'''\n\n'''\nTo Review:\n\ndef mostCompetitive(self, nums, k):\n to_remove = len(nums) - k\n stack = []\n\n for x in nums:\n while stack and x < stack[-1] and to_remove:\n to_remove -= 1\n stack.pop()\n stack.append(x)\n\n for _ in range(to_remove):\n stack.pop()\n\n return stack\n'''\n\n\n\nclass Solution:\n\n # is a more competitive than b?\n def isMoreCompetitive(self, a, b):\n if len(a) != len(b):\n print(\"Error, len()'s do not match'\")\n return \"Error\"\n\n for i in range(len(a)):\n if a[i] == b[i]:\n continue\n elif a[i] < b[i]:\n return True\n else:\n return False\n\n return False\n\n def refined(self, nums, i, a, ans):\n if i >= len(nums):\n if len(a) == len(ans) and self.isMoreCompetitive(a, ans) == False:\n return False, None\n\n elif len(a) != len(ans):\n return False, None\n\n else:\n return True, a\n\n elif i < len(nums) and len(ans)-len(a) <= len(nums)-i :\n boolA, respA = self.refined(nums, i+1, a+[nums[i]], ans)\n boolB, respB = self.refined(nums, i+1, a, ans)\n\n if boolA == True and boolB == True:\n if self.isMoreCompetitive(respA, respB):\n return True, respA\n else:\n return True, respB\n\n elif boolA == True:\n return boolA, respA\n\n elif boolB == True:\n return True, respB\n\n else:\n return False, None\n\n else:\n return False, None\n\n\n\n def mostCompetitive(self, nums, k):\n\n if len(nums) < k :\n print(\"length mismatch @ init\")\n return False\n\n ans = list(nums[0:k])\n tmp = list(nums[0:k])\n i = k\n\n # Initial pass\n while i < len(nums):\n # print(tmp)\n del tmp[0]\n # print(tmp)\n tmp.append(nums[i])\n # print(tmp)\n if self.isMoreCompetitive(tmp, ans):\n ans = list(tmp)\n i += 1\n # print(\"ans: {}, tmp:{}\".format(ans, tmp))\n # print(\"\")\n\n # Pass 2\n shouldContinue = True\n idx = 0\n\n foundAnswer, updateAns = self.refined(nums, 0, [], ans)\n\n if foundAnswer == True:\n return updateAns\n\n return ans\n\n\n\n\nif __name__ == '__main__':\n s = Solution()\n\n print(s.mostCompetitive([3,5,2,6], 2))\n print(s.mostCompetitive([2,4,3,3,5,4,9,6], 4))\n print(s.mostCompetitive([84,10,71,23,66,61,62,64,34,41,80,25,91,43,4,75,65,13,37,41,46,90,55,8,85,61,95,71], 24))\n print(s.mostCompetitive([2,4,3,3,5,4,9,6], 4))\n\n\n [11,52,57,91,47,95,86,46,87,47,70,56,54,61,89,44,3,73,1,7,87,48,17,25,49,54,6,72,97,62,16,11,47,34,68,58,14,36,46,65,2,15]\n18\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
class my_image_csv_dataset(Dataset):
def __init__(self, data_dir, data, transforms_=None, obj=False,
minorities=None, diffs=None, bal_tfms=None):
self.data_dir = data_dir
self.data = data
self.transforms_ = transforms_
self.tfms = None
self.obj = obj
self.minorities = minorities
self.diffs = diffs
self.bal_tfms = bal_tfms
assert transforms_ is not None, print('Please pass some transforms.')
def __len__(self):
return len(self.data)
def __getitem__(self, index):
img_path = os.path.join(self.data_dir, self.data.iloc[index, 0])
img = Image.open(img_path)
img = img.convert('RGB')
img = torchvision.transforms.functional.to_grayscale(img,
num_output_channels=3)
y = self.data.iloc[index, 1]
if self.minorities and self.bal_tfms:
if y in self.minorities:
if hasattr(self.bal_tfms, 'transforms'):
for tr in self.bal_tfms.transforms:
tr.p = self.diffs[y]
l = [self.bal_tfms]
l.extend(self.transforms_)
self.tfms = transforms.Compose(l)
else:
for t in self.bal_tfms:
t.p = self.diffs[y]
self.transforms_[1:1] = self.bal_tfms
self.tfms = transforms.Compose(self.transforms_)
else:
self.tfms = transforms.Compose(self.transforms_)
else:
self.tfms = transforms.Compose(self.transforms_)
x = self.tfms(img)
if self.obj:
s = x.size()[1]
if isinstance(s, tuple):
s = s[0]
row_scale = s / img.size[0]
col_scale = s / img.size[1]
y = rescale_bbox(y, row_scale, col_scale)
y.squeeze_()
y2 = self.data.iloc[index, 2]
y = y, y2
return x, y
class my_image_folder(DatasetFolder):
def __init__(self, root, transform=None, target_transform=None, loader=
default_loader, minorities=None, diffs=None, bal_tfms=None,
tta_tfms=None):
super(my_image_folder, self).__init__(root, loader, IMG_EXTENSIONS,
transform=transform, target_transform=target_transform)
self.imgs = self.samples
self.minorities = minorities
self.diffs = diffs
self.bal_tfms = bal_tfms
self.tta_tfms = tta_tfms
self.tfms = None
def __getitem__(self, index):
path, target = self.samples[index]
sample = self.loader(path)
if self.transform:
if self.minorities and self.bal_tfms:
if target in self.minorities:
if hasattr(self.bal_tfms, 'transforms'):
for tr in self.bal_tfms.transforms:
tr.p = self.diffs[target]
l = [self.bal_tfms]
l.extend(self.transform)
self.tfms = transforms.Compose(l)
else:
for t in self.bal_tfms:
t.p = self.diffs[target]
self.tfms = transforms.Compose(self.bal_tfms + self
.transform)
else:
self.tfms = transforms.Compose(self.transform)
elif self.tta_tfms:
self.tfms = self.tta_tfms
else:
self.tfms = transforms.Compose(self.transform)
sample = self.tfms(sample)
if self.target_transform:
target = self.target_transform(target)
return sample, target
<|reserved_special_token_0|>
class DataProcessor:
def __init__(self, data_path=None, train_csv=None, val_csv=None, reg=
False, tr_name='train', val_name='val', test_name='test', extension
=None, setup_data=True):
print('+------------------------------------+')
print('| Dream AI |')
print('+------------------------------------+')
print()
self.device = torch.device('cuda:0' if torch.cuda.is_available() else
'cpu')
(self.data_path, self.train_csv, self.val_csv, self.reg, self.
tr_name, self.val_name, self.test_name, self.extension) = (
data_path, train_csv, val_csv, reg, tr_name, val_name,
test_name, extension)
self.obj = False
self.multi_label = False
if setup_data:
self.set_up_data()
def set_up_data(self, split_size=0.15):
data_path, train_csv, val_csv, tr_name, val_name, test_name = (self
.data_path, self.train_csv, self.val_csv, self.tr_name, self.
val_name, self.test_name)
if not data_path:
data_path = os.getcwd() + '/'
tr_path = os.path.join(data_path, tr_name)
val_path = os.path.join(data_path, val_name)
test_path = os.path.join(data_path, test_name)
if os.path.exists(os.path.join(data_path, tr_name + '.csv')):
train_csv = tr_name + '.csv'
if not train_csv:
print('no')
train_csv, val_csv, test_csv = self.data_from_paths_to_csv(
data_path, tr_path, val_path, test_path)
train_csv_path = os.path.join(data_path, train_csv)
train_df = pd.read_csv(train_csv_path)
if 'Unnamed: 0' in train_df.columns:
train_df = train_df.drop('Unnamed: 0', 1)
if len(train_df.columns) > 2:
self.obj = True
img_names = [str(x) for x in list(train_df.iloc[:, 0])]
if self.extension:
img_names = add_extension(img_names, self.extension)
if val_csv:
val_csv_path = os.path.join(data_path, val_csv)
val_df = pd.read_csv(val_csv_path)
val_targets = list(map(str, list(val_df.iloc[:, 1])))
if test_csv:
test_csv_path = os.path.join(data_path, test_csv)
test_df = pd.read_csv(test_csv_path)
test_targets = list(map(str, list(test_df.iloc[:, 1])))
targets = list(map(str, list(train_df.iloc[:, 1])))
lengths = [len(t) for t in [s.split() for s in targets]]
self.target_lengths = lengths
split_targets = [t.split() for t in targets]
if self.obj:
print('\nObject Detection\n')
int_targets = [list(map(float, x)) for x in split_targets]
zero_targets = np.zeros((len(targets), max(lengths)), dtype=int)
for i, t in enumerate(zero_targets):
t[len(t) - len(int_targets[i]):] = int_targets[i]
zero_targets[i] = t
train_df.iloc[:, 1] = [torch.from_numpy(z).type(torch.
FloatTensor) for z in zero_targets]
obj_targets = list(map(str, list(train_df.iloc[:, 2])))
obj_split_targets = [t.split() for t in obj_targets]
try:
obj_split_targets = [list(map(int, x)) for x in
obj_split_targets]
except:
pass
dai_onehot, onehot_classes = one_hot(obj_split_targets, True)
c_names = list(onehot_classes)
class_idx = [[c_names.index(i) for i in c] for c in
obj_split_targets]
zero_idx = np.zeros((len(targets), max(lengths) // 4), dtype=int)
for i, t in enumerate(zero_idx):
t[len(t) - len(class_idx[i]):] = class_idx[i]
zero_idx[i] = t
train_df.iloc[:, 2] = [torch.from_numpy(z).type(torch.
LongTensor) for z in zero_idx]
self.data_dir, self.num_classes, self.class_names = data_path, len(
onehot_classes), onehot_classes
elif self.reg:
print('\nRegression\n')
int_targets = [list(map(int, x)) for x in split_targets]
zero_targets = np.zeros((len(targets), max(lengths)), dtype=int)
for i, t in enumerate(zero_targets):
t[len(t) - len(int_targets[i]):] = int_targets[i]
zero_targets[i] = t
train_df.iloc[:, 1] = [torch.from_numpy(z).type(torch.
FloatTensor) for z in zero_targets]
self.data_dir, self.num_classes, self.class_names = data_path, max(
lengths), np.unique(zero_targets, axis=1)
elif lengths[1:] != lengths[:-1]:
self.multi_label = True
print('\nMulti-label Classification\n')
try:
split_targets = [list(map(int, x)) for x in split_targets]
except:
pass
dai_onehot, onehot_classes = one_hot(split_targets, self.
multi_label)
train_df.iloc[:, 1] = [torch.from_numpy(x).type(torch.
FloatTensor) for x in dai_onehot]
self.data_dir, self.num_classes, self.class_names = data_path, len(
onehot_classes), onehot_classes
else:
print('\nSingle-label Classification\n')
unique_targets = list(np.unique(targets))
target_ids = [unique_targets.index(x) for x in targets]
train_df.iloc[:, 1] = target_ids
if val_csv:
target_ids = [unique_targets.index(x) for x in val_targets]
val_df.iloc[:, 1] = target_ids
if test_csv:
target_ids = [unique_targets.index(x) for x in test_targets]
test_df.iloc[:, 1] = target_ids
self.data_dir, self.num_classes, self.class_names = data_path, len(
unique_targets), unique_targets
if not val_csv:
train_df, val_df = split_df(train_df, split_size)
if not test_csv:
val_df, test_df = split_df(val_df, split_size)
tr_images = [str(x) for x in list(train_df.iloc[:, 0])]
val_images = [str(x) for x in list(val_df.iloc[:, 0])]
test_images = [str(x) for x in list(test_df.iloc[:, 0])]
if self.extension:
tr_images = add_extension(tr_images, self.extension)
val_images = add_extension(val_images, self.extension)
test_images = add_extension(test_images, self.extension)
train_df.iloc[:, 0] = tr_images
val_df.iloc[:, 0] = val_images
test_df.iloc[:, 0] = test_images
train_df.to_csv(os.path.join(data_path, 'train.csv'), index=False)
val_df.to_csv(os.path.join(data_path, 'val.csv'), index=False)
test_df.to_csv(os.path.join(data_path, 'test.csv'), index=False)
self.minorities, self.class_diffs = None, None
if not self.obj or not self.multi_label:
self.minorities, self.class_diffs = get_minorities(train_df)
self.data_dfs = {self.tr_name: train_df, self.val_name: val_df,
self.test_name: test_df}
data_dict = {'data_dfs': self.data_dfs, 'data_dir': self.data_dir,
'num_classes': self.num_classes, 'class_names': self.
class_names, 'minorities': self.minorities, 'class_diffs': self
.class_diffs, 'obj': self.obj, 'multi_label': self.multi_label}
self.data_dict = data_dict
return data_dict
def data_from_paths_to_csv(self, data_path, tr_path, val_path=None,
test_path=None):
train_df = csv_from_path(tr_path, tr_path)
train_df.to_csv(os.path.join(data_path, self.tr_name + '.csv'),
index=False)
ret = self.tr_name + '.csv', None
if val_path is not None:
val_exists = os.path.exists(val_path)
if val_exists:
val_df = csv_from_path(val_path, tr_path)
val_df.to_csv(os.path.join(data_path, self.val_name +
'.csv'), index=False)
ret = self.tr_name + '.csv', self.val_name + '.csv'
if test_path is not None:
test_exists = os.path.exists(test_path)
if test_exists:
test_df = csv_from_path(test_path, tr_path)
test_df.to_csv(os.path.join(data_path, self.test_name +
'.csv'), index=False)
ret = (self.tr_name + '.csv', self.val_name + '.csv', self.
test_name + '.csv')
return ret
def get_data(self, data_dict=None, s=(224, 224), dataset=
my_image_csv_dataset, bs=32, balance=False, tfms=None, bal_tfms=
None, tta=False, num_workers=4, stats_percentage=0.6):
self.image_size = s
if not data_dict:
data_dict = self.data_dict
data_dfs, data_dir, minorities, class_diffs, obj, multi_label = (
data_dict['data_dfs'], data_dict['data_dir'], data_dict[
'minorities'], data_dict['class_diffs'], data_dict['obj'],
data_dict['multi_label'])
if obj or multi_label:
balance = False
if tta:
tta_tfms = {self.tr_name: transforms.Compose([transforms.
FiveCrop(s[0]), transforms.Lambda(lambda crops: torch.stack
([transforms.ToTensor()(crop) for crop in crops])),
transforms.Lambda(lambda crops: torch.stack([transforms.
Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])(
crop) for crop in crops]))]), self.val_name: transforms.
Compose([transforms.FiveCrop(s[0]), transforms.Lambda(lambda
crops: torch.stack([transforms.ToTensor()(crop) for crop in
crops])), transforms.Lambda(lambda crops: torch.stack([
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224,
0.225])(crop) for crop in crops]))]), self.test_name:
transforms.Compose([transforms.FiveCrop(s[0]), transforms.
Lambda(lambda crops: torch.stack([transforms.ToTensor()(
crop) for crop in crops])), transforms.Lambda(lambda crops:
torch.stack([transforms.Normalize([0.485, 0.456, 0.406], [
0.229, 0.224, 0.225])(crop) for crop in crops]))])}
else:
tta_tfms = None
if not bal_tfms:
bal_tfms = {self.tr_name: [transforms.RandomHorizontalFlip()],
self.val_name: None, self.test_name: None}
else:
bal_tfms = {self.tr_name: bal_tfms, self.val_name: None, self.
test_name: None}
if obj:
resize_transform = transforms.Resize(s)
else:
resize_transform = transforms.Resize(s)
if not tfms:
tfms = [resize_transform, transforms.ToTensor(), transforms.
Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])]
else:
tfms_temp = [resize_transform, transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224,
0.225])]
tfms_temp[1:1] = tfms
tfms = tfms_temp
print(tfms)
data_transforms = {self.tr_name: tfms, self.val_name: [transforms.
Resize(s), transforms.ToTensor(), transforms.Normalize([0.485,
0.456, 0.406], [0.229, 0.224, 0.225])], self.test_name: [
transforms.Resize(s), transforms.ToTensor(), transforms.
Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])]}
temp_tfms = [resize_transform, transforms.ToTensor()]
temp_dataset = dataset(os.path.join(data_dir, self.tr_name),
data_dfs[self.tr_name], temp_tfms)
self.img_mean, self.img_std = get_img_stats(temp_dataset,
stats_percentage)
data_transforms[self.tr_name][-1].mean, data_transforms[self.tr_name][
-1].std = self.img_mean, self.img_std
data_transforms[self.val_name][-1].mean, data_transforms[self.val_name
][-1].std = self.img_mean, self.img_std
data_transforms[self.test_name][-1].mean, data_transforms[self.
test_name][-1].std = self.img_mean, self.img_std
if balance:
image_datasets = {x: dataset(os.path.join(data_dir, self.
tr_name), data_dfs[x], data_transforms[x], obj, minorities,
class_diffs, bal_tfms[x]) for x in [self.tr_name, self.
val_name, self.test_name]}
else:
image_datasets = {x: dataset(os.path.join(data_dir, self.
tr_name), data_dfs[x], data_transforms[x], obj) for x in [
self.tr_name, self.val_name, self.test_name]}
dataloaders = {x: torch.utils.data.DataLoader(image_datasets[x],
batch_size=bs, shuffle=True, num_workers=num_workers) for x in
[self.tr_name, self.val_name, self.test_name]}
dataset_sizes = {x: len(image_datasets[x]) for x in [self.tr_name,
self.val_name, self.test_name]}
self.image_datasets, self.dataloaders, self.dataset_sizes = (
image_datasets, dataloaders, dataset_sizes)
return image_datasets, dataloaders, dataset_sizes
def imshow(self, inp, title=None):
"""Imshow for Tensor."""
inp = self.denorm_img(inp)
plt.imshow(inp)
if title:
plt.title(title)
plt.pause(0.001)
def denorm_img(self, inp, calculate=False):
inp = inp.numpy().transpose((1, 2, 0))
if calculate:
mean = np.mean(inp)
std = np.std(inp)
else:
mean = self.img_mean.numpy()
std = self.img_std.numpy()
inp = std * inp + mean
inp = np.clip(inp, 0, 1)
return inp
def show_data(self, folder_name='train', size=(64, 64), bs=5):
self.get_data(size, bs)
batch = next(iter(self.dataloaders[folder_name]))
inputs, classes = batch[0], batch[1]
out = torchvision.utils.make_grid(inputs)
if self.reg:
print(classes)
self.imshow(out, title=[x for x in classes])
elif self.multi_label:
self.imshow(out, title=[self.class_names[np.nonzero(x.type(
torch.LongTensor))] for x in classes])
else:
self.imshow(out, title=[self.class_names[x] for x in classes])
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class my_image_csv_dataset(Dataset):
def __init__(self, data_dir, data, transforms_=None, obj=False,
minorities=None, diffs=None, bal_tfms=None):
self.data_dir = data_dir
self.data = data
self.transforms_ = transforms_
self.tfms = None
self.obj = obj
self.minorities = minorities
self.diffs = diffs
self.bal_tfms = bal_tfms
assert transforms_ is not None, print('Please pass some transforms.')
def __len__(self):
return len(self.data)
def __getitem__(self, index):
img_path = os.path.join(self.data_dir, self.data.iloc[index, 0])
img = Image.open(img_path)
img = img.convert('RGB')
img = torchvision.transforms.functional.to_grayscale(img,
num_output_channels=3)
y = self.data.iloc[index, 1]
if self.minorities and self.bal_tfms:
if y in self.minorities:
if hasattr(self.bal_tfms, 'transforms'):
for tr in self.bal_tfms.transforms:
tr.p = self.diffs[y]
l = [self.bal_tfms]
l.extend(self.transforms_)
self.tfms = transforms.Compose(l)
else:
for t in self.bal_tfms:
t.p = self.diffs[y]
self.transforms_[1:1] = self.bal_tfms
self.tfms = transforms.Compose(self.transforms_)
else:
self.tfms = transforms.Compose(self.transforms_)
else:
self.tfms = transforms.Compose(self.transforms_)
x = self.tfms(img)
if self.obj:
s = x.size()[1]
if isinstance(s, tuple):
s = s[0]
row_scale = s / img.size[0]
col_scale = s / img.size[1]
y = rescale_bbox(y, row_scale, col_scale)
y.squeeze_()
y2 = self.data.iloc[index, 2]
y = y, y2
return x, y
class my_image_folder(DatasetFolder):
def __init__(self, root, transform=None, target_transform=None, loader=
default_loader, minorities=None, diffs=None, bal_tfms=None,
tta_tfms=None):
super(my_image_folder, self).__init__(root, loader, IMG_EXTENSIONS,
transform=transform, target_transform=target_transform)
self.imgs = self.samples
self.minorities = minorities
self.diffs = diffs
self.bal_tfms = bal_tfms
self.tta_tfms = tta_tfms
self.tfms = None
def __getitem__(self, index):
path, target = self.samples[index]
sample = self.loader(path)
if self.transform:
if self.minorities and self.bal_tfms:
if target in self.minorities:
if hasattr(self.bal_tfms, 'transforms'):
for tr in self.bal_tfms.transforms:
tr.p = self.diffs[target]
l = [self.bal_tfms]
l.extend(self.transform)
self.tfms = transforms.Compose(l)
else:
for t in self.bal_tfms:
t.p = self.diffs[target]
self.tfms = transforms.Compose(self.bal_tfms + self
.transform)
else:
self.tfms = transforms.Compose(self.transform)
elif self.tta_tfms:
self.tfms = self.tta_tfms
else:
self.tfms = transforms.Compose(self.transform)
sample = self.tfms(sample)
if self.target_transform:
target = self.target_transform(target)
return sample, target
<|reserved_special_token_0|>
def rescale_bbox(bb, row_scale, col_scale):
bb = bb.reshape((-1, 4))
for b in bb:
r1, c1, r2, c2 = b
b[0] = int(np.round(r1 * col_scale))
b[1] = int(np.round(c1 * row_scale))
b[2] = int(np.round(r2 * col_scale))
b[3] = int(np.round(c2 * row_scale))
bb = bb.reshape((1, -1))
return bb
<|reserved_special_token_0|>
class DataProcessor:
def __init__(self, data_path=None, train_csv=None, val_csv=None, reg=
False, tr_name='train', val_name='val', test_name='test', extension
=None, setup_data=True):
print('+------------------------------------+')
print('| Dream AI |')
print('+------------------------------------+')
print()
self.device = torch.device('cuda:0' if torch.cuda.is_available() else
'cpu')
(self.data_path, self.train_csv, self.val_csv, self.reg, self.
tr_name, self.val_name, self.test_name, self.extension) = (
data_path, train_csv, val_csv, reg, tr_name, val_name,
test_name, extension)
self.obj = False
self.multi_label = False
if setup_data:
self.set_up_data()
def set_up_data(self, split_size=0.15):
data_path, train_csv, val_csv, tr_name, val_name, test_name = (self
.data_path, self.train_csv, self.val_csv, self.tr_name, self.
val_name, self.test_name)
if not data_path:
data_path = os.getcwd() + '/'
tr_path = os.path.join(data_path, tr_name)
val_path = os.path.join(data_path, val_name)
test_path = os.path.join(data_path, test_name)
if os.path.exists(os.path.join(data_path, tr_name + '.csv')):
train_csv = tr_name + '.csv'
if not train_csv:
print('no')
train_csv, val_csv, test_csv = self.data_from_paths_to_csv(
data_path, tr_path, val_path, test_path)
train_csv_path = os.path.join(data_path, train_csv)
train_df = pd.read_csv(train_csv_path)
if 'Unnamed: 0' in train_df.columns:
train_df = train_df.drop('Unnamed: 0', 1)
if len(train_df.columns) > 2:
self.obj = True
img_names = [str(x) for x in list(train_df.iloc[:, 0])]
if self.extension:
img_names = add_extension(img_names, self.extension)
if val_csv:
val_csv_path = os.path.join(data_path, val_csv)
val_df = pd.read_csv(val_csv_path)
val_targets = list(map(str, list(val_df.iloc[:, 1])))
if test_csv:
test_csv_path = os.path.join(data_path, test_csv)
test_df = pd.read_csv(test_csv_path)
test_targets = list(map(str, list(test_df.iloc[:, 1])))
targets = list(map(str, list(train_df.iloc[:, 1])))
lengths = [len(t) for t in [s.split() for s in targets]]
self.target_lengths = lengths
split_targets = [t.split() for t in targets]
if self.obj:
print('\nObject Detection\n')
int_targets = [list(map(float, x)) for x in split_targets]
zero_targets = np.zeros((len(targets), max(lengths)), dtype=int)
for i, t in enumerate(zero_targets):
t[len(t) - len(int_targets[i]):] = int_targets[i]
zero_targets[i] = t
train_df.iloc[:, 1] = [torch.from_numpy(z).type(torch.
FloatTensor) for z in zero_targets]
obj_targets = list(map(str, list(train_df.iloc[:, 2])))
obj_split_targets = [t.split() for t in obj_targets]
try:
obj_split_targets = [list(map(int, x)) for x in
obj_split_targets]
except:
pass
dai_onehot, onehot_classes = one_hot(obj_split_targets, True)
c_names = list(onehot_classes)
class_idx = [[c_names.index(i) for i in c] for c in
obj_split_targets]
zero_idx = np.zeros((len(targets), max(lengths) // 4), dtype=int)
for i, t in enumerate(zero_idx):
t[len(t) - len(class_idx[i]):] = class_idx[i]
zero_idx[i] = t
train_df.iloc[:, 2] = [torch.from_numpy(z).type(torch.
LongTensor) for z in zero_idx]
self.data_dir, self.num_classes, self.class_names = data_path, len(
onehot_classes), onehot_classes
elif self.reg:
print('\nRegression\n')
int_targets = [list(map(int, x)) for x in split_targets]
zero_targets = np.zeros((len(targets), max(lengths)), dtype=int)
for i, t in enumerate(zero_targets):
t[len(t) - len(int_targets[i]):] = int_targets[i]
zero_targets[i] = t
train_df.iloc[:, 1] = [torch.from_numpy(z).type(torch.
FloatTensor) for z in zero_targets]
self.data_dir, self.num_classes, self.class_names = data_path, max(
lengths), np.unique(zero_targets, axis=1)
elif lengths[1:] != lengths[:-1]:
self.multi_label = True
print('\nMulti-label Classification\n')
try:
split_targets = [list(map(int, x)) for x in split_targets]
except:
pass
dai_onehot, onehot_classes = one_hot(split_targets, self.
multi_label)
train_df.iloc[:, 1] = [torch.from_numpy(x).type(torch.
FloatTensor) for x in dai_onehot]
self.data_dir, self.num_classes, self.class_names = data_path, len(
onehot_classes), onehot_classes
else:
print('\nSingle-label Classification\n')
unique_targets = list(np.unique(targets))
target_ids = [unique_targets.index(x) for x in targets]
train_df.iloc[:, 1] = target_ids
if val_csv:
target_ids = [unique_targets.index(x) for x in val_targets]
val_df.iloc[:, 1] = target_ids
if test_csv:
target_ids = [unique_targets.index(x) for x in test_targets]
test_df.iloc[:, 1] = target_ids
self.data_dir, self.num_classes, self.class_names = data_path, len(
unique_targets), unique_targets
if not val_csv:
train_df, val_df = split_df(train_df, split_size)
if not test_csv:
val_df, test_df = split_df(val_df, split_size)
tr_images = [str(x) for x in list(train_df.iloc[:, 0])]
val_images = [str(x) for x in list(val_df.iloc[:, 0])]
test_images = [str(x) for x in list(test_df.iloc[:, 0])]
if self.extension:
tr_images = add_extension(tr_images, self.extension)
val_images = add_extension(val_images, self.extension)
test_images = add_extension(test_images, self.extension)
train_df.iloc[:, 0] = tr_images
val_df.iloc[:, 0] = val_images
test_df.iloc[:, 0] = test_images
train_df.to_csv(os.path.join(data_path, 'train.csv'), index=False)
val_df.to_csv(os.path.join(data_path, 'val.csv'), index=False)
test_df.to_csv(os.path.join(data_path, 'test.csv'), index=False)
self.minorities, self.class_diffs = None, None
if not self.obj or not self.multi_label:
self.minorities, self.class_diffs = get_minorities(train_df)
self.data_dfs = {self.tr_name: train_df, self.val_name: val_df,
self.test_name: test_df}
data_dict = {'data_dfs': self.data_dfs, 'data_dir': self.data_dir,
'num_classes': self.num_classes, 'class_names': self.
class_names, 'minorities': self.minorities, 'class_diffs': self
.class_diffs, 'obj': self.obj, 'multi_label': self.multi_label}
self.data_dict = data_dict
return data_dict
def data_from_paths_to_csv(self, data_path, tr_path, val_path=None,
test_path=None):
train_df = csv_from_path(tr_path, tr_path)
train_df.to_csv(os.path.join(data_path, self.tr_name + '.csv'),
index=False)
ret = self.tr_name + '.csv', None
if val_path is not None:
val_exists = os.path.exists(val_path)
if val_exists:
val_df = csv_from_path(val_path, tr_path)
val_df.to_csv(os.path.join(data_path, self.val_name +
'.csv'), index=False)
ret = self.tr_name + '.csv', self.val_name + '.csv'
if test_path is not None:
test_exists = os.path.exists(test_path)
if test_exists:
test_df = csv_from_path(test_path, tr_path)
test_df.to_csv(os.path.join(data_path, self.test_name +
'.csv'), index=False)
ret = (self.tr_name + '.csv', self.val_name + '.csv', self.
test_name + '.csv')
return ret
def get_data(self, data_dict=None, s=(224, 224), dataset=
my_image_csv_dataset, bs=32, balance=False, tfms=None, bal_tfms=
None, tta=False, num_workers=4, stats_percentage=0.6):
self.image_size = s
if not data_dict:
data_dict = self.data_dict
data_dfs, data_dir, minorities, class_diffs, obj, multi_label = (
data_dict['data_dfs'], data_dict['data_dir'], data_dict[
'minorities'], data_dict['class_diffs'], data_dict['obj'],
data_dict['multi_label'])
if obj or multi_label:
balance = False
if tta:
tta_tfms = {self.tr_name: transforms.Compose([transforms.
FiveCrop(s[0]), transforms.Lambda(lambda crops: torch.stack
([transforms.ToTensor()(crop) for crop in crops])),
transforms.Lambda(lambda crops: torch.stack([transforms.
Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])(
crop) for crop in crops]))]), self.val_name: transforms.
Compose([transforms.FiveCrop(s[0]), transforms.Lambda(lambda
crops: torch.stack([transforms.ToTensor()(crop) for crop in
crops])), transforms.Lambda(lambda crops: torch.stack([
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224,
0.225])(crop) for crop in crops]))]), self.test_name:
transforms.Compose([transforms.FiveCrop(s[0]), transforms.
Lambda(lambda crops: torch.stack([transforms.ToTensor()(
crop) for crop in crops])), transforms.Lambda(lambda crops:
torch.stack([transforms.Normalize([0.485, 0.456, 0.406], [
0.229, 0.224, 0.225])(crop) for crop in crops]))])}
else:
tta_tfms = None
if not bal_tfms:
bal_tfms = {self.tr_name: [transforms.RandomHorizontalFlip()],
self.val_name: None, self.test_name: None}
else:
bal_tfms = {self.tr_name: bal_tfms, self.val_name: None, self.
test_name: None}
if obj:
resize_transform = transforms.Resize(s)
else:
resize_transform = transforms.Resize(s)
if not tfms:
tfms = [resize_transform, transforms.ToTensor(), transforms.
Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])]
else:
tfms_temp = [resize_transform, transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224,
0.225])]
tfms_temp[1:1] = tfms
tfms = tfms_temp
print(tfms)
data_transforms = {self.tr_name: tfms, self.val_name: [transforms.
Resize(s), transforms.ToTensor(), transforms.Normalize([0.485,
0.456, 0.406], [0.229, 0.224, 0.225])], self.test_name: [
transforms.Resize(s), transforms.ToTensor(), transforms.
Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])]}
temp_tfms = [resize_transform, transforms.ToTensor()]
temp_dataset = dataset(os.path.join(data_dir, self.tr_name),
data_dfs[self.tr_name], temp_tfms)
self.img_mean, self.img_std = get_img_stats(temp_dataset,
stats_percentage)
data_transforms[self.tr_name][-1].mean, data_transforms[self.tr_name][
-1].std = self.img_mean, self.img_std
data_transforms[self.val_name][-1].mean, data_transforms[self.val_name
][-1].std = self.img_mean, self.img_std
data_transforms[self.test_name][-1].mean, data_transforms[self.
test_name][-1].std = self.img_mean, self.img_std
if balance:
image_datasets = {x: dataset(os.path.join(data_dir, self.
tr_name), data_dfs[x], data_transforms[x], obj, minorities,
class_diffs, bal_tfms[x]) for x in [self.tr_name, self.
val_name, self.test_name]}
else:
image_datasets = {x: dataset(os.path.join(data_dir, self.
tr_name), data_dfs[x], data_transforms[x], obj) for x in [
self.tr_name, self.val_name, self.test_name]}
dataloaders = {x: torch.utils.data.DataLoader(image_datasets[x],
batch_size=bs, shuffle=True, num_workers=num_workers) for x in
[self.tr_name, self.val_name, self.test_name]}
dataset_sizes = {x: len(image_datasets[x]) for x in [self.tr_name,
self.val_name, self.test_name]}
self.image_datasets, self.dataloaders, self.dataset_sizes = (
image_datasets, dataloaders, dataset_sizes)
return image_datasets, dataloaders, dataset_sizes
def imshow(self, inp, title=None):
"""Imshow for Tensor."""
inp = self.denorm_img(inp)
plt.imshow(inp)
if title:
plt.title(title)
plt.pause(0.001)
def denorm_img(self, inp, calculate=False):
inp = inp.numpy().transpose((1, 2, 0))
if calculate:
mean = np.mean(inp)
std = np.std(inp)
else:
mean = self.img_mean.numpy()
std = self.img_std.numpy()
inp = std * inp + mean
inp = np.clip(inp, 0, 1)
return inp
def show_data(self, folder_name='train', size=(64, 64), bs=5):
self.get_data(size, bs)
batch = next(iter(self.dataloaders[folder_name]))
inputs, classes = batch[0], batch[1]
out = torchvision.utils.make_grid(inputs)
if self.reg:
print(classes)
self.imshow(out, title=[x for x in classes])
elif self.multi_label:
self.imshow(out, title=[self.class_names[np.nonzero(x.type(
torch.LongTensor))] for x in classes])
else:
self.imshow(out, title=[self.class_names[x] for x in classes])
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class my_image_csv_dataset(Dataset):
def __init__(self, data_dir, data, transforms_=None, obj=False,
minorities=None, diffs=None, bal_tfms=None):
self.data_dir = data_dir
self.data = data
self.transforms_ = transforms_
self.tfms = None
self.obj = obj
self.minorities = minorities
self.diffs = diffs
self.bal_tfms = bal_tfms
assert transforms_ is not None, print('Please pass some transforms.')
def __len__(self):
return len(self.data)
def __getitem__(self, index):
img_path = os.path.join(self.data_dir, self.data.iloc[index, 0])
img = Image.open(img_path)
img = img.convert('RGB')
img = torchvision.transforms.functional.to_grayscale(img,
num_output_channels=3)
y = self.data.iloc[index, 1]
if self.minorities and self.bal_tfms:
if y in self.minorities:
if hasattr(self.bal_tfms, 'transforms'):
for tr in self.bal_tfms.transforms:
tr.p = self.diffs[y]
l = [self.bal_tfms]
l.extend(self.transforms_)
self.tfms = transforms.Compose(l)
else:
for t in self.bal_tfms:
t.p = self.diffs[y]
self.transforms_[1:1] = self.bal_tfms
self.tfms = transforms.Compose(self.transforms_)
else:
self.tfms = transforms.Compose(self.transforms_)
else:
self.tfms = transforms.Compose(self.transforms_)
x = self.tfms(img)
if self.obj:
s = x.size()[1]
if isinstance(s, tuple):
s = s[0]
row_scale = s / img.size[0]
col_scale = s / img.size[1]
y = rescale_bbox(y, row_scale, col_scale)
y.squeeze_()
y2 = self.data.iloc[index, 2]
y = y, y2
return x, y
class my_image_folder(DatasetFolder):
def __init__(self, root, transform=None, target_transform=None, loader=
default_loader, minorities=None, diffs=None, bal_tfms=None,
tta_tfms=None):
super(my_image_folder, self).__init__(root, loader, IMG_EXTENSIONS,
transform=transform, target_transform=target_transform)
self.imgs = self.samples
self.minorities = minorities
self.diffs = diffs
self.bal_tfms = bal_tfms
self.tta_tfms = tta_tfms
self.tfms = None
def __getitem__(self, index):
path, target = self.samples[index]
sample = self.loader(path)
if self.transform:
if self.minorities and self.bal_tfms:
if target in self.minorities:
if hasattr(self.bal_tfms, 'transforms'):
for tr in self.bal_tfms.transforms:
tr.p = self.diffs[target]
l = [self.bal_tfms]
l.extend(self.transform)
self.tfms = transforms.Compose(l)
else:
for t in self.bal_tfms:
t.p = self.diffs[target]
self.tfms = transforms.Compose(self.bal_tfms + self
.transform)
else:
self.tfms = transforms.Compose(self.transform)
elif self.tta_tfms:
self.tfms = self.tta_tfms
else:
self.tfms = transforms.Compose(self.transform)
sample = self.tfms(sample)
if self.target_transform:
target = self.target_transform(target)
return sample, target
<|reserved_special_token_0|>
def get_index(arr, a):
for i in range(len(arr)):
if sum(arr[i] == a) == len(a):
return i
return False
def rescale_bbox(bb, row_scale, col_scale):
bb = bb.reshape((-1, 4))
for b in bb:
r1, c1, r2, c2 = b
b[0] = int(np.round(r1 * col_scale))
b[1] = int(np.round(c1 * row_scale))
b[2] = int(np.round(r2 * col_scale))
b[3] = int(np.round(c2 * row_scale))
bb = bb.reshape((1, -1))
return bb
def get_img_stats(dataset, sz):
size = int(len(dataset) * sz)
i = 0
imgs = []
for img, _ in dataset:
if i > size:
break
imgs.append(img)
i += 1
imgs_ = torch.stack(imgs, dim=3)
imgs_ = imgs_.view(3, -1)
imgs_mean = imgs_.mean(dim=1)
imgs_std = imgs_.std(dim=1)
return imgs_mean, imgs_std
<|reserved_special_token_0|>
def save_obj(obj, path):
with open(path, 'wb') as f:
pickle.dump(obj, f, pickle.HIGHEST_PROTOCOL)
<|reserved_special_token_0|>
class DataProcessor:
def __init__(self, data_path=None, train_csv=None, val_csv=None, reg=
False, tr_name='train', val_name='val', test_name='test', extension
=None, setup_data=True):
print('+------------------------------------+')
print('| Dream AI |')
print('+------------------------------------+')
print()
self.device = torch.device('cuda:0' if torch.cuda.is_available() else
'cpu')
(self.data_path, self.train_csv, self.val_csv, self.reg, self.
tr_name, self.val_name, self.test_name, self.extension) = (
data_path, train_csv, val_csv, reg, tr_name, val_name,
test_name, extension)
self.obj = False
self.multi_label = False
if setup_data:
self.set_up_data()
def set_up_data(self, split_size=0.15):
data_path, train_csv, val_csv, tr_name, val_name, test_name = (self
.data_path, self.train_csv, self.val_csv, self.tr_name, self.
val_name, self.test_name)
if not data_path:
data_path = os.getcwd() + '/'
tr_path = os.path.join(data_path, tr_name)
val_path = os.path.join(data_path, val_name)
test_path = os.path.join(data_path, test_name)
if os.path.exists(os.path.join(data_path, tr_name + '.csv')):
train_csv = tr_name + '.csv'
if not train_csv:
print('no')
train_csv, val_csv, test_csv = self.data_from_paths_to_csv(
data_path, tr_path, val_path, test_path)
train_csv_path = os.path.join(data_path, train_csv)
train_df = pd.read_csv(train_csv_path)
if 'Unnamed: 0' in train_df.columns:
train_df = train_df.drop('Unnamed: 0', 1)
if len(train_df.columns) > 2:
self.obj = True
img_names = [str(x) for x in list(train_df.iloc[:, 0])]
if self.extension:
img_names = add_extension(img_names, self.extension)
if val_csv:
val_csv_path = os.path.join(data_path, val_csv)
val_df = pd.read_csv(val_csv_path)
val_targets = list(map(str, list(val_df.iloc[:, 1])))
if test_csv:
test_csv_path = os.path.join(data_path, test_csv)
test_df = pd.read_csv(test_csv_path)
test_targets = list(map(str, list(test_df.iloc[:, 1])))
targets = list(map(str, list(train_df.iloc[:, 1])))
lengths = [len(t) for t in [s.split() for s in targets]]
self.target_lengths = lengths
split_targets = [t.split() for t in targets]
if self.obj:
print('\nObject Detection\n')
int_targets = [list(map(float, x)) for x in split_targets]
zero_targets = np.zeros((len(targets), max(lengths)), dtype=int)
for i, t in enumerate(zero_targets):
t[len(t) - len(int_targets[i]):] = int_targets[i]
zero_targets[i] = t
train_df.iloc[:, 1] = [torch.from_numpy(z).type(torch.
FloatTensor) for z in zero_targets]
obj_targets = list(map(str, list(train_df.iloc[:, 2])))
obj_split_targets = [t.split() for t in obj_targets]
try:
obj_split_targets = [list(map(int, x)) for x in
obj_split_targets]
except:
pass
dai_onehot, onehot_classes = one_hot(obj_split_targets, True)
c_names = list(onehot_classes)
class_idx = [[c_names.index(i) for i in c] for c in
obj_split_targets]
zero_idx = np.zeros((len(targets), max(lengths) // 4), dtype=int)
for i, t in enumerate(zero_idx):
t[len(t) - len(class_idx[i]):] = class_idx[i]
zero_idx[i] = t
train_df.iloc[:, 2] = [torch.from_numpy(z).type(torch.
LongTensor) for z in zero_idx]
self.data_dir, self.num_classes, self.class_names = data_path, len(
onehot_classes), onehot_classes
elif self.reg:
print('\nRegression\n')
int_targets = [list(map(int, x)) for x in split_targets]
zero_targets = np.zeros((len(targets), max(lengths)), dtype=int)
for i, t in enumerate(zero_targets):
t[len(t) - len(int_targets[i]):] = int_targets[i]
zero_targets[i] = t
train_df.iloc[:, 1] = [torch.from_numpy(z).type(torch.
FloatTensor) for z in zero_targets]
self.data_dir, self.num_classes, self.class_names = data_path, max(
lengths), np.unique(zero_targets, axis=1)
elif lengths[1:] != lengths[:-1]:
self.multi_label = True
print('\nMulti-label Classification\n')
try:
split_targets = [list(map(int, x)) for x in split_targets]
except:
pass
dai_onehot, onehot_classes = one_hot(split_targets, self.
multi_label)
train_df.iloc[:, 1] = [torch.from_numpy(x).type(torch.
FloatTensor) for x in dai_onehot]
self.data_dir, self.num_classes, self.class_names = data_path, len(
onehot_classes), onehot_classes
else:
print('\nSingle-label Classification\n')
unique_targets = list(np.unique(targets))
target_ids = [unique_targets.index(x) for x in targets]
train_df.iloc[:, 1] = target_ids
if val_csv:
target_ids = [unique_targets.index(x) for x in val_targets]
val_df.iloc[:, 1] = target_ids
if test_csv:
target_ids = [unique_targets.index(x) for x in test_targets]
test_df.iloc[:, 1] = target_ids
self.data_dir, self.num_classes, self.class_names = data_path, len(
unique_targets), unique_targets
if not val_csv:
train_df, val_df = split_df(train_df, split_size)
if not test_csv:
val_df, test_df = split_df(val_df, split_size)
tr_images = [str(x) for x in list(train_df.iloc[:, 0])]
val_images = [str(x) for x in list(val_df.iloc[:, 0])]
test_images = [str(x) for x in list(test_df.iloc[:, 0])]
if self.extension:
tr_images = add_extension(tr_images, self.extension)
val_images = add_extension(val_images, self.extension)
test_images = add_extension(test_images, self.extension)
train_df.iloc[:, 0] = tr_images
val_df.iloc[:, 0] = val_images
test_df.iloc[:, 0] = test_images
train_df.to_csv(os.path.join(data_path, 'train.csv'), index=False)
val_df.to_csv(os.path.join(data_path, 'val.csv'), index=False)
test_df.to_csv(os.path.join(data_path, 'test.csv'), index=False)
self.minorities, self.class_diffs = None, None
if not self.obj or not self.multi_label:
self.minorities, self.class_diffs = get_minorities(train_df)
self.data_dfs = {self.tr_name: train_df, self.val_name: val_df,
self.test_name: test_df}
data_dict = {'data_dfs': self.data_dfs, 'data_dir': self.data_dir,
'num_classes': self.num_classes, 'class_names': self.
class_names, 'minorities': self.minorities, 'class_diffs': self
.class_diffs, 'obj': self.obj, 'multi_label': self.multi_label}
self.data_dict = data_dict
return data_dict
def data_from_paths_to_csv(self, data_path, tr_path, val_path=None,
test_path=None):
train_df = csv_from_path(tr_path, tr_path)
train_df.to_csv(os.path.join(data_path, self.tr_name + '.csv'),
index=False)
ret = self.tr_name + '.csv', None
if val_path is not None:
val_exists = os.path.exists(val_path)
if val_exists:
val_df = csv_from_path(val_path, tr_path)
val_df.to_csv(os.path.join(data_path, self.val_name +
'.csv'), index=False)
ret = self.tr_name + '.csv', self.val_name + '.csv'
if test_path is not None:
test_exists = os.path.exists(test_path)
if test_exists:
test_df = csv_from_path(test_path, tr_path)
test_df.to_csv(os.path.join(data_path, self.test_name +
'.csv'), index=False)
ret = (self.tr_name + '.csv', self.val_name + '.csv', self.
test_name + '.csv')
return ret
def get_data(self, data_dict=None, s=(224, 224), dataset=
my_image_csv_dataset, bs=32, balance=False, tfms=None, bal_tfms=
None, tta=False, num_workers=4, stats_percentage=0.6):
self.image_size = s
if not data_dict:
data_dict = self.data_dict
data_dfs, data_dir, minorities, class_diffs, obj, multi_label = (
data_dict['data_dfs'], data_dict['data_dir'], data_dict[
'minorities'], data_dict['class_diffs'], data_dict['obj'],
data_dict['multi_label'])
if obj or multi_label:
balance = False
if tta:
tta_tfms = {self.tr_name: transforms.Compose([transforms.
FiveCrop(s[0]), transforms.Lambda(lambda crops: torch.stack
([transforms.ToTensor()(crop) for crop in crops])),
transforms.Lambda(lambda crops: torch.stack([transforms.
Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])(
crop) for crop in crops]))]), self.val_name: transforms.
Compose([transforms.FiveCrop(s[0]), transforms.Lambda(lambda
crops: torch.stack([transforms.ToTensor()(crop) for crop in
crops])), transforms.Lambda(lambda crops: torch.stack([
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224,
0.225])(crop) for crop in crops]))]), self.test_name:
transforms.Compose([transforms.FiveCrop(s[0]), transforms.
Lambda(lambda crops: torch.stack([transforms.ToTensor()(
crop) for crop in crops])), transforms.Lambda(lambda crops:
torch.stack([transforms.Normalize([0.485, 0.456, 0.406], [
0.229, 0.224, 0.225])(crop) for crop in crops]))])}
else:
tta_tfms = None
if not bal_tfms:
bal_tfms = {self.tr_name: [transforms.RandomHorizontalFlip()],
self.val_name: None, self.test_name: None}
else:
bal_tfms = {self.tr_name: bal_tfms, self.val_name: None, self.
test_name: None}
if obj:
resize_transform = transforms.Resize(s)
else:
resize_transform = transforms.Resize(s)
if not tfms:
tfms = [resize_transform, transforms.ToTensor(), transforms.
Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])]
else:
tfms_temp = [resize_transform, transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224,
0.225])]
tfms_temp[1:1] = tfms
tfms = tfms_temp
print(tfms)
data_transforms = {self.tr_name: tfms, self.val_name: [transforms.
Resize(s), transforms.ToTensor(), transforms.Normalize([0.485,
0.456, 0.406], [0.229, 0.224, 0.225])], self.test_name: [
transforms.Resize(s), transforms.ToTensor(), transforms.
Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])]}
temp_tfms = [resize_transform, transforms.ToTensor()]
temp_dataset = dataset(os.path.join(data_dir, self.tr_name),
data_dfs[self.tr_name], temp_tfms)
self.img_mean, self.img_std = get_img_stats(temp_dataset,
stats_percentage)
data_transforms[self.tr_name][-1].mean, data_transforms[self.tr_name][
-1].std = self.img_mean, self.img_std
data_transforms[self.val_name][-1].mean, data_transforms[self.val_name
][-1].std = self.img_mean, self.img_std
data_transforms[self.test_name][-1].mean, data_transforms[self.
test_name][-1].std = self.img_mean, self.img_std
if balance:
image_datasets = {x: dataset(os.path.join(data_dir, self.
tr_name), data_dfs[x], data_transforms[x], obj, minorities,
class_diffs, bal_tfms[x]) for x in [self.tr_name, self.
val_name, self.test_name]}
else:
image_datasets = {x: dataset(os.path.join(data_dir, self.
tr_name), data_dfs[x], data_transforms[x], obj) for x in [
self.tr_name, self.val_name, self.test_name]}
dataloaders = {x: torch.utils.data.DataLoader(image_datasets[x],
batch_size=bs, shuffle=True, num_workers=num_workers) for x in
[self.tr_name, self.val_name, self.test_name]}
dataset_sizes = {x: len(image_datasets[x]) for x in [self.tr_name,
self.val_name, self.test_name]}
self.image_datasets, self.dataloaders, self.dataset_sizes = (
image_datasets, dataloaders, dataset_sizes)
return image_datasets, dataloaders, dataset_sizes
def imshow(self, inp, title=None):
"""Imshow for Tensor."""
inp = self.denorm_img(inp)
plt.imshow(inp)
if title:
plt.title(title)
plt.pause(0.001)
def denorm_img(self, inp, calculate=False):
inp = inp.numpy().transpose((1, 2, 0))
if calculate:
mean = np.mean(inp)
std = np.std(inp)
else:
mean = self.img_mean.numpy()
std = self.img_std.numpy()
inp = std * inp + mean
inp = np.clip(inp, 0, 1)
return inp
def show_data(self, folder_name='train', size=(64, 64), bs=5):
self.get_data(size, bs)
batch = next(iter(self.dataloaders[folder_name]))
inputs, classes = batch[0], batch[1]
out = torchvision.utils.make_grid(inputs)
if self.reg:
print(classes)
self.imshow(out, title=[x for x in classes])
elif self.multi_label:
self.imshow(out, title=[self.class_names[np.nonzero(x.type(
torch.LongTensor))] for x in classes])
else:
self.imshow(out, title=[self.class_names[x] for x in classes])
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class my_image_csv_dataset(Dataset):
def __init__(self, data_dir, data, transforms_=None, obj=False,
minorities=None, diffs=None, bal_tfms=None):
self.data_dir = data_dir
self.data = data
self.transforms_ = transforms_
self.tfms = None
self.obj = obj
self.minorities = minorities
self.diffs = diffs
self.bal_tfms = bal_tfms
assert transforms_ is not None, print('Please pass some transforms.')
def __len__(self):
return len(self.data)
def __getitem__(self, index):
img_path = os.path.join(self.data_dir, self.data.iloc[index, 0])
img = Image.open(img_path)
img = img.convert('RGB')
img = torchvision.transforms.functional.to_grayscale(img,
num_output_channels=3)
y = self.data.iloc[index, 1]
if self.minorities and self.bal_tfms:
if y in self.minorities:
if hasattr(self.bal_tfms, 'transforms'):
for tr in self.bal_tfms.transforms:
tr.p = self.diffs[y]
l = [self.bal_tfms]
l.extend(self.transforms_)
self.tfms = transforms.Compose(l)
else:
for t in self.bal_tfms:
t.p = self.diffs[y]
self.transforms_[1:1] = self.bal_tfms
self.tfms = transforms.Compose(self.transforms_)
else:
self.tfms = transforms.Compose(self.transforms_)
else:
self.tfms = transforms.Compose(self.transforms_)
x = self.tfms(img)
if self.obj:
s = x.size()[1]
if isinstance(s, tuple):
s = s[0]
row_scale = s / img.size[0]
col_scale = s / img.size[1]
y = rescale_bbox(y, row_scale, col_scale)
y.squeeze_()
y2 = self.data.iloc[index, 2]
y = y, y2
return x, y
class my_image_folder(DatasetFolder):
def __init__(self, root, transform=None, target_transform=None, loader=
default_loader, minorities=None, diffs=None, bal_tfms=None,
tta_tfms=None):
super(my_image_folder, self).__init__(root, loader, IMG_EXTENSIONS,
transform=transform, target_transform=target_transform)
self.imgs = self.samples
self.minorities = minorities
self.diffs = diffs
self.bal_tfms = bal_tfms
self.tta_tfms = tta_tfms
self.tfms = None
def __getitem__(self, index):
path, target = self.samples[index]
sample = self.loader(path)
if self.transform:
if self.minorities and self.bal_tfms:
if target in self.minorities:
if hasattr(self.bal_tfms, 'transforms'):
for tr in self.bal_tfms.transforms:
tr.p = self.diffs[target]
l = [self.bal_tfms]
l.extend(self.transform)
self.tfms = transforms.Compose(l)
else:
for t in self.bal_tfms:
t.p = self.diffs[target]
self.tfms = transforms.Compose(self.bal_tfms + self
.transform)
else:
self.tfms = transforms.Compose(self.transform)
elif self.tta_tfms:
self.tfms = self.tta_tfms
else:
self.tfms = transforms.Compose(self.transform)
sample = self.tfms(sample)
if self.target_transform:
target = self.target_transform(target)
return sample, target
<|reserved_special_token_0|>
def listdir_fullpath(d):
return [os.path.join(d, f) for f in os.listdir(d)]
<|reserved_special_token_0|>
def csv_from_path(path, img_dest):
path = Path(path)
img_dest = Path(img_dest)
labels_paths = list(path.iterdir())
tr_images = []
tr_labels = []
for l in labels_paths:
if l.is_dir():
for i in list(l.iterdir()):
if i.suffix in IMG_EXTENSIONS:
name = i.name
label = l.name
new_name = '{}_{}'.format(path.name, name)
new_path = img_dest / new_name
os.rename(i, new_path)
tr_images.append(new_name)
tr_labels.append(label)
tr_img_label = {'Img': tr_images, 'Label': tr_labels}
csv = pd.DataFrame(tr_img_label, columns=['Img', 'Label'])
csv = csv.sample(frac=1).reset_index(drop=True)
return csv
def add_extension(a, e):
a = [(x + e) for x in a]
return a
def one_hot(targets, multi=False):
if multi:
binerizer = MultiLabelBinarizer()
dai_1hot = binerizer.fit_transform(targets)
else:
binerizer = LabelBinarizer()
dai_1hot = binerizer.fit_transform(targets)
return dai_1hot, binerizer.classes_
def get_index(arr, a):
for i in range(len(arr)):
if sum(arr[i] == a) == len(a):
return i
return False
def rescale_bbox(bb, row_scale, col_scale):
bb = bb.reshape((-1, 4))
for b in bb:
r1, c1, r2, c2 = b
b[0] = int(np.round(r1 * col_scale))
b[1] = int(np.round(c1 * row_scale))
b[2] = int(np.round(r2 * col_scale))
b[3] = int(np.round(c2 * row_scale))
bb = bb.reshape((1, -1))
return bb
def get_img_stats(dataset, sz):
size = int(len(dataset) * sz)
i = 0
imgs = []
for img, _ in dataset:
if i > size:
break
imgs.append(img)
i += 1
imgs_ = torch.stack(imgs, dim=3)
imgs_ = imgs_.view(3, -1)
imgs_mean = imgs_.mean(dim=1)
imgs_std = imgs_.std(dim=1)
return imgs_mean, imgs_std
def split_df(train_df, test_size=0.15):
try:
train_df, val_df = train_test_split(train_df, test_size=test_size,
random_state=2, stratify=train_df.iloc[:, 1])
except:
train_df, val_df = train_test_split(train_df, test_size=test_size,
random_state=2)
train_df = train_df.reset_index(drop=True)
val_df = val_df.reset_index(drop=True)
return train_df, val_df
def save_obj(obj, path):
with open(path, 'wb') as f:
pickle.dump(obj, f, pickle.HIGHEST_PROTOCOL)
def load_obj(path):
with open(path, 'rb') as f:
return pickle.load(f)
class DataProcessor:
def __init__(self, data_path=None, train_csv=None, val_csv=None, reg=
False, tr_name='train', val_name='val', test_name='test', extension
=None, setup_data=True):
print('+------------------------------------+')
print('| Dream AI |')
print('+------------------------------------+')
print()
self.device = torch.device('cuda:0' if torch.cuda.is_available() else
'cpu')
(self.data_path, self.train_csv, self.val_csv, self.reg, self.
tr_name, self.val_name, self.test_name, self.extension) = (
data_path, train_csv, val_csv, reg, tr_name, val_name,
test_name, extension)
self.obj = False
self.multi_label = False
if setup_data:
self.set_up_data()
def set_up_data(self, split_size=0.15):
data_path, train_csv, val_csv, tr_name, val_name, test_name = (self
.data_path, self.train_csv, self.val_csv, self.tr_name, self.
val_name, self.test_name)
if not data_path:
data_path = os.getcwd() + '/'
tr_path = os.path.join(data_path, tr_name)
val_path = os.path.join(data_path, val_name)
test_path = os.path.join(data_path, test_name)
if os.path.exists(os.path.join(data_path, tr_name + '.csv')):
train_csv = tr_name + '.csv'
if not train_csv:
print('no')
train_csv, val_csv, test_csv = self.data_from_paths_to_csv(
data_path, tr_path, val_path, test_path)
train_csv_path = os.path.join(data_path, train_csv)
train_df = pd.read_csv(train_csv_path)
if 'Unnamed: 0' in train_df.columns:
train_df = train_df.drop('Unnamed: 0', 1)
if len(train_df.columns) > 2:
self.obj = True
img_names = [str(x) for x in list(train_df.iloc[:, 0])]
if self.extension:
img_names = add_extension(img_names, self.extension)
if val_csv:
val_csv_path = os.path.join(data_path, val_csv)
val_df = pd.read_csv(val_csv_path)
val_targets = list(map(str, list(val_df.iloc[:, 1])))
if test_csv:
test_csv_path = os.path.join(data_path, test_csv)
test_df = pd.read_csv(test_csv_path)
test_targets = list(map(str, list(test_df.iloc[:, 1])))
targets = list(map(str, list(train_df.iloc[:, 1])))
lengths = [len(t) for t in [s.split() for s in targets]]
self.target_lengths = lengths
split_targets = [t.split() for t in targets]
if self.obj:
print('\nObject Detection\n')
int_targets = [list(map(float, x)) for x in split_targets]
zero_targets = np.zeros((len(targets), max(lengths)), dtype=int)
for i, t in enumerate(zero_targets):
t[len(t) - len(int_targets[i]):] = int_targets[i]
zero_targets[i] = t
train_df.iloc[:, 1] = [torch.from_numpy(z).type(torch.
FloatTensor) for z in zero_targets]
obj_targets = list(map(str, list(train_df.iloc[:, 2])))
obj_split_targets = [t.split() for t in obj_targets]
try:
obj_split_targets = [list(map(int, x)) for x in
obj_split_targets]
except:
pass
dai_onehot, onehot_classes = one_hot(obj_split_targets, True)
c_names = list(onehot_classes)
class_idx = [[c_names.index(i) for i in c] for c in
obj_split_targets]
zero_idx = np.zeros((len(targets), max(lengths) // 4), dtype=int)
for i, t in enumerate(zero_idx):
t[len(t) - len(class_idx[i]):] = class_idx[i]
zero_idx[i] = t
train_df.iloc[:, 2] = [torch.from_numpy(z).type(torch.
LongTensor) for z in zero_idx]
self.data_dir, self.num_classes, self.class_names = data_path, len(
onehot_classes), onehot_classes
elif self.reg:
print('\nRegression\n')
int_targets = [list(map(int, x)) for x in split_targets]
zero_targets = np.zeros((len(targets), max(lengths)), dtype=int)
for i, t in enumerate(zero_targets):
t[len(t) - len(int_targets[i]):] = int_targets[i]
zero_targets[i] = t
train_df.iloc[:, 1] = [torch.from_numpy(z).type(torch.
FloatTensor) for z in zero_targets]
self.data_dir, self.num_classes, self.class_names = data_path, max(
lengths), np.unique(zero_targets, axis=1)
elif lengths[1:] != lengths[:-1]:
self.multi_label = True
print('\nMulti-label Classification\n')
try:
split_targets = [list(map(int, x)) for x in split_targets]
except:
pass
dai_onehot, onehot_classes = one_hot(split_targets, self.
multi_label)
train_df.iloc[:, 1] = [torch.from_numpy(x).type(torch.
FloatTensor) for x in dai_onehot]
self.data_dir, self.num_classes, self.class_names = data_path, len(
onehot_classes), onehot_classes
else:
print('\nSingle-label Classification\n')
unique_targets = list(np.unique(targets))
target_ids = [unique_targets.index(x) for x in targets]
train_df.iloc[:, 1] = target_ids
if val_csv:
target_ids = [unique_targets.index(x) for x in val_targets]
val_df.iloc[:, 1] = target_ids
if test_csv:
target_ids = [unique_targets.index(x) for x in test_targets]
test_df.iloc[:, 1] = target_ids
self.data_dir, self.num_classes, self.class_names = data_path, len(
unique_targets), unique_targets
if not val_csv:
train_df, val_df = split_df(train_df, split_size)
if not test_csv:
val_df, test_df = split_df(val_df, split_size)
tr_images = [str(x) for x in list(train_df.iloc[:, 0])]
val_images = [str(x) for x in list(val_df.iloc[:, 0])]
test_images = [str(x) for x in list(test_df.iloc[:, 0])]
if self.extension:
tr_images = add_extension(tr_images, self.extension)
val_images = add_extension(val_images, self.extension)
test_images = add_extension(test_images, self.extension)
train_df.iloc[:, 0] = tr_images
val_df.iloc[:, 0] = val_images
test_df.iloc[:, 0] = test_images
train_df.to_csv(os.path.join(data_path, 'train.csv'), index=False)
val_df.to_csv(os.path.join(data_path, 'val.csv'), index=False)
test_df.to_csv(os.path.join(data_path, 'test.csv'), index=False)
self.minorities, self.class_diffs = None, None
if not self.obj or not self.multi_label:
self.minorities, self.class_diffs = get_minorities(train_df)
self.data_dfs = {self.tr_name: train_df, self.val_name: val_df,
self.test_name: test_df}
data_dict = {'data_dfs': self.data_dfs, 'data_dir': self.data_dir,
'num_classes': self.num_classes, 'class_names': self.
class_names, 'minorities': self.minorities, 'class_diffs': self
.class_diffs, 'obj': self.obj, 'multi_label': self.multi_label}
self.data_dict = data_dict
return data_dict
def data_from_paths_to_csv(self, data_path, tr_path, val_path=None,
test_path=None):
train_df = csv_from_path(tr_path, tr_path)
train_df.to_csv(os.path.join(data_path, self.tr_name + '.csv'),
index=False)
ret = self.tr_name + '.csv', None
if val_path is not None:
val_exists = os.path.exists(val_path)
if val_exists:
val_df = csv_from_path(val_path, tr_path)
val_df.to_csv(os.path.join(data_path, self.val_name +
'.csv'), index=False)
ret = self.tr_name + '.csv', self.val_name + '.csv'
if test_path is not None:
test_exists = os.path.exists(test_path)
if test_exists:
test_df = csv_from_path(test_path, tr_path)
test_df.to_csv(os.path.join(data_path, self.test_name +
'.csv'), index=False)
ret = (self.tr_name + '.csv', self.val_name + '.csv', self.
test_name + '.csv')
return ret
def get_data(self, data_dict=None, s=(224, 224), dataset=
my_image_csv_dataset, bs=32, balance=False, tfms=None, bal_tfms=
None, tta=False, num_workers=4, stats_percentage=0.6):
self.image_size = s
if not data_dict:
data_dict = self.data_dict
data_dfs, data_dir, minorities, class_diffs, obj, multi_label = (
data_dict['data_dfs'], data_dict['data_dir'], data_dict[
'minorities'], data_dict['class_diffs'], data_dict['obj'],
data_dict['multi_label'])
if obj or multi_label:
balance = False
if tta:
tta_tfms = {self.tr_name: transforms.Compose([transforms.
FiveCrop(s[0]), transforms.Lambda(lambda crops: torch.stack
([transforms.ToTensor()(crop) for crop in crops])),
transforms.Lambda(lambda crops: torch.stack([transforms.
Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])(
crop) for crop in crops]))]), self.val_name: transforms.
Compose([transforms.FiveCrop(s[0]), transforms.Lambda(lambda
crops: torch.stack([transforms.ToTensor()(crop) for crop in
crops])), transforms.Lambda(lambda crops: torch.stack([
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224,
0.225])(crop) for crop in crops]))]), self.test_name:
transforms.Compose([transforms.FiveCrop(s[0]), transforms.
Lambda(lambda crops: torch.stack([transforms.ToTensor()(
crop) for crop in crops])), transforms.Lambda(lambda crops:
torch.stack([transforms.Normalize([0.485, 0.456, 0.406], [
0.229, 0.224, 0.225])(crop) for crop in crops]))])}
else:
tta_tfms = None
if not bal_tfms:
bal_tfms = {self.tr_name: [transforms.RandomHorizontalFlip()],
self.val_name: None, self.test_name: None}
else:
bal_tfms = {self.tr_name: bal_tfms, self.val_name: None, self.
test_name: None}
if obj:
resize_transform = transforms.Resize(s)
else:
resize_transform = transforms.Resize(s)
if not tfms:
tfms = [resize_transform, transforms.ToTensor(), transforms.
Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])]
else:
tfms_temp = [resize_transform, transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224,
0.225])]
tfms_temp[1:1] = tfms
tfms = tfms_temp
print(tfms)
data_transforms = {self.tr_name: tfms, self.val_name: [transforms.
Resize(s), transforms.ToTensor(), transforms.Normalize([0.485,
0.456, 0.406], [0.229, 0.224, 0.225])], self.test_name: [
transforms.Resize(s), transforms.ToTensor(), transforms.
Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])]}
temp_tfms = [resize_transform, transforms.ToTensor()]
temp_dataset = dataset(os.path.join(data_dir, self.tr_name),
data_dfs[self.tr_name], temp_tfms)
self.img_mean, self.img_std = get_img_stats(temp_dataset,
stats_percentage)
data_transforms[self.tr_name][-1].mean, data_transforms[self.tr_name][
-1].std = self.img_mean, self.img_std
data_transforms[self.val_name][-1].mean, data_transforms[self.val_name
][-1].std = self.img_mean, self.img_std
data_transforms[self.test_name][-1].mean, data_transforms[self.
test_name][-1].std = self.img_mean, self.img_std
if balance:
image_datasets = {x: dataset(os.path.join(data_dir, self.
tr_name), data_dfs[x], data_transforms[x], obj, minorities,
class_diffs, bal_tfms[x]) for x in [self.tr_name, self.
val_name, self.test_name]}
else:
image_datasets = {x: dataset(os.path.join(data_dir, self.
tr_name), data_dfs[x], data_transforms[x], obj) for x in [
self.tr_name, self.val_name, self.test_name]}
dataloaders = {x: torch.utils.data.DataLoader(image_datasets[x],
batch_size=bs, shuffle=True, num_workers=num_workers) for x in
[self.tr_name, self.val_name, self.test_name]}
dataset_sizes = {x: len(image_datasets[x]) for x in [self.tr_name,
self.val_name, self.test_name]}
self.image_datasets, self.dataloaders, self.dataset_sizes = (
image_datasets, dataloaders, dataset_sizes)
return image_datasets, dataloaders, dataset_sizes
def imshow(self, inp, title=None):
"""Imshow for Tensor."""
inp = self.denorm_img(inp)
plt.imshow(inp)
if title:
plt.title(title)
plt.pause(0.001)
def denorm_img(self, inp, calculate=False):
inp = inp.numpy().transpose((1, 2, 0))
if calculate:
mean = np.mean(inp)
std = np.std(inp)
else:
mean = self.img_mean.numpy()
std = self.img_std.numpy()
inp = std * inp + mean
inp = np.clip(inp, 0, 1)
return inp
def show_data(self, folder_name='train', size=(64, 64), bs=5):
self.get_data(size, bs)
batch = next(iter(self.dataloaders[folder_name]))
inputs, classes = batch[0], batch[1]
out = torchvision.utils.make_grid(inputs)
if self.reg:
print(classes)
self.imshow(out, title=[x for x in classes])
elif self.multi_label:
self.imshow(out, title=[self.class_names[np.nonzero(x.type(
torch.LongTensor))] for x in classes])
else:
self.imshow(out, title=[self.class_names[x] for x in classes])
<|reserved_special_token_1|>
from dai_imports import*
from obj_utils import*
import utils
class my_image_csv_dataset(Dataset):
def __init__(self, data_dir, data, transforms_ = None, obj = False,
minorities = None, diffs = None, bal_tfms = None):
self.data_dir = data_dir
self.data = data
self.transforms_ = transforms_
self.tfms = None
self.obj = obj
self.minorities = minorities
self.diffs = diffs
self.bal_tfms = bal_tfms
assert transforms_ is not None, print('Please pass some transforms.')
def __len__(self):
return len(self.data)
def __getitem__(self, index):
img_path = os.path.join(self.data_dir,self.data.iloc[index, 0])
img = Image.open(img_path)
img = img.convert('RGB')
img = torchvision.transforms.functional.to_grayscale(img,num_output_channels=3)
y = self.data.iloc[index, 1]
if self.minorities and self.bal_tfms:
if y in self.minorities:
if hasattr(self.bal_tfms,'transforms'):
for tr in self.bal_tfms.transforms:
tr.p = self.diffs[y]
l = [self.bal_tfms]
l.extend(self.transforms_)
self.tfms = transforms.Compose(l)
else:
for t in self.bal_tfms:
t.p = self.diffs[y]
self.transforms_[1:1] = self.bal_tfms
self.tfms = transforms.Compose(self.transforms_)
# print(self.tfms)
else:
self.tfms = transforms.Compose(self.transforms_)
else:
self.tfms = transforms.Compose(self.transforms_)
x = self.tfms(img)
if self.obj:
s = x.size()[1]
if isinstance(s,tuple):
s = s[0]
row_scale = s/img.size[0]
col_scale = s/img.size[1]
y = rescale_bbox(y,row_scale,col_scale)
y.squeeze_()
y2 = self.data.iloc[index, 2]
y = (y,y2)
return (x,y)
class my_image_folder(DatasetFolder):
def __init__(self, root, transform=None, target_transform=None,
loader=default_loader, minorities=None, diffs = None, bal_tfms=None, tta_tfms = None):
super(my_image_folder, self).__init__(root, loader, IMG_EXTENSIONS,
transform=transform,
target_transform=target_transform)
self.imgs = self.samples
self.minorities = minorities
self.diffs = diffs
self.bal_tfms = bal_tfms
self.tta_tfms = tta_tfms
self.tfms = None
def __getitem__(self,index):
path, target = self.samples[index]
sample = self.loader(path)
if self.transform:
if self.minorities and self.bal_tfms:
if target in self.minorities:
if hasattr(self.bal_tfms,'transforms'):
for tr in self.bal_tfms.transforms:
tr.p = self.diffs[target]
l = [self.bal_tfms]
l.extend(self.transform)
self.tfms = transforms.Compose(l)
else:
for t in self.bal_tfms:
t.p = self.diffs[target]
self.tfms = transforms.Compose(self.bal_tfms + self.transform )
else:
self.tfms = transforms.Compose(self.transform)
elif self.tta_tfms:
self.tfms = self.tta_tfms
else:
self.tfms = transforms.Compose(self.transform)
sample = self.tfms(sample)
if self.target_transform:
target = self.target_transform(target)
return sample, target
def extract_data(dt):
x = []
y = []
for a,b in dt:
x.append(a)
y.append(b)
return x,y
def listdir_fullpath(d):
return [os.path.join(d, f) for f in os.listdir(d)]
def get_minorities(df,thresh=0.8):
c = df.iloc[:,1].value_counts()
lc = list(c)
max_count = lc[0]
diffs = [1-(x/max_count) for x in lc]
diffs = dict((k,v) for k,v in zip(c.keys(),diffs))
minorities = [c.keys()[x] for x,y in enumerate(lc) if y < (thresh*max_count)]
return minorities,diffs
def csv_from_path(path, img_dest):
path = Path(path)
img_dest = Path(img_dest)
labels_paths = list(path.iterdir())
tr_images = []
tr_labels = []
for l in labels_paths:
if l.is_dir():
for i in list(l.iterdir()):
if i.suffix in IMG_EXTENSIONS:
name = i.name
label = l.name
new_name = '{}_{}'.format(path.name,name)
new_path = img_dest/new_name
# print(new_path)
os.rename(i,new_path)
tr_images.append(new_name)
tr_labels.append(label)
# os.rmdir(l)
tr_img_label = {'Img':tr_images, 'Label': tr_labels}
csv = pd.DataFrame(tr_img_label,columns=['Img','Label'])
csv = csv.sample(frac=1).reset_index(drop=True)
return csv
def add_extension(a,e):
a = [x+e for x in a]
return a
def one_hot(targets, multi = False):
if multi:
binerizer = MultiLabelBinarizer()
dai_1hot = binerizer.fit_transform(targets)
else:
binerizer = LabelBinarizer()
dai_1hot = binerizer.fit_transform(targets)
return dai_1hot,binerizer.classes_
def get_index(arr,a):
for i in range(len(arr)):
if sum(arr[i] == a) == len(a):
return i
return False
def rescale_bbox(bb,row_scale,col_scale):
bb = bb.reshape((-1,4))
for b in bb:
r1,c1,r2,c2 = b
b[0] = int(np.round(r1*col_scale))
b[1] = int(np.round(c1*row_scale))
b[2] = int(np.round(r2*col_scale))
b[3] = int(np.round(c2*row_scale))
# bb = torch.tensor([bb_hw(b) for b in bb.reshape(-1,4)])
# for b in bb:
# r1,c1,r2,c2 = b
# b[0] = int(np.round(r1*row_scale))
# b[1] = int(np.round(c1*col_scale))
# b[2] = int(np.round(r2*row_scale))
# b[3] = int(np.round(c2*col_scale))
# if(sum(b)) == 1:
# b[0],b[1],b[2],b[3] = 0,0,0,0
bb = bb.reshape((1,-1))
return bb
def get_img_stats(dataset,sz):
size = int(len(dataset)*sz)
i = 0
imgs = []
for img,_ in dataset:
# print(img.size())
if i > size:
break
imgs.append(img)
i+=1
imgs_ = torch.stack(imgs,dim=3)
imgs_ = imgs_.view(3,-1)
imgs_mean = imgs_.mean(dim=1)
imgs_std = imgs_.std(dim=1)
return imgs_mean,imgs_std
def split_df(train_df,test_size = 0.15):
try:
train_df,val_df = train_test_split(train_df,test_size = test_size,random_state = 2,stratify = train_df.iloc[:,1])
except:
train_df,val_df = train_test_split(train_df,test_size = test_size,random_state = 2)
train_df = train_df.reset_index(drop = True)
val_df = val_df.reset_index(drop = True)
return train_df,val_df
def save_obj(obj, path):
with open(path, 'wb') as f:
pickle.dump(obj, f, pickle.HIGHEST_PROTOCOL)
def load_obj(path):
with open(path, 'rb') as f:
return pickle.load(f)
class DataProcessor:
def __init__(self, data_path = None, train_csv = None, val_csv = None, reg = False,
tr_name = 'train', val_name = 'val', test_name = 'test', extension = None, setup_data = True):
print('+------------------------------------+')
print('| Dream AI |')
print('+------------------------------------+')
print()
self.device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
self.data_path,self.train_csv,self.val_csv,self.reg,self.tr_name,self.val_name,self.test_name,self.extension = (data_path,train_csv,
val_csv,reg,tr_name,val_name,test_name,extension)
self.obj = False
self.multi_label = False
if setup_data:
self.set_up_data()
def set_up_data(self,split_size = 0.15):
data_path,train_csv,val_csv,tr_name,val_name,test_name = (self.data_path,self.train_csv,self.val_csv,self.tr_name,self.val_name,self.test_name)
# check if paths given and also set paths
if not data_path:
data_path = os.getcwd() + '/'
tr_path = os.path.join(data_path,tr_name)
val_path = os.path.join(data_path,val_name)
test_path = os.path.join(data_path,test_name)
if os.path.exists(os.path.join(data_path,tr_name+'.csv')):
train_csv = tr_name+'.csv'
# if os.path.exists(os.path.join(data_path,val_name+'.csv')):
# val_csv = val_name+'.csv'
# if os.path.exists(os.path.join(data_path,test_name+'.csv')):
# test_csv = test_name+'.csv'
# paths to csv
if not train_csv:
print('no')
train_csv,val_csv,test_csv = self.data_from_paths_to_csv(data_path,tr_path,val_path,test_path)
train_csv_path = os.path.join(data_path,train_csv)
train_df = pd.read_csv(train_csv_path)
if 'Unnamed: 0' in train_df.columns:
train_df = train_df.drop('Unnamed: 0', 1)
if len(train_df.columns) > 2:
self.obj = True
img_names = [str(x) for x in list(train_df.iloc[:,0])]
if self.extension:
img_names = add_extension(img_names,self.extension)
if val_csv:
val_csv_path = os.path.join(data_path,val_csv)
val_df = pd.read_csv(val_csv_path)
val_targets = list(map(str,list(val_df.iloc[:,1])))
if test_csv:
test_csv_path = os.path.join(data_path,test_csv)
test_df = pd.read_csv(test_csv_path)
test_targets = list(map(str,list(test_df.iloc[:,1])))
targets = list(map(str,list(train_df.iloc[:,1])))
lengths = [len(t) for t in [s.split() for s in targets]]
self.target_lengths = lengths
split_targets = [t.split() for t in targets]
if self.obj:
print('\nObject Detection\n')
# bounding boxes
int_targets = [list(map(float,x)) for x in split_targets]
zero_targets = np.zeros((len(targets),max(lengths)),dtype=int)
for i,t in enumerate(zero_targets):
t[len(t)-len(int_targets[i]):] = int_targets[i]
zero_targets[i] = t
train_df.iloc[:,1] = [torch.from_numpy(z).type(torch.FloatTensor) for z in zero_targets]
# one-hot classes
obj_targets = list(map(str,list(train_df.iloc[:,2])))
obj_split_targets = [t.split() for t in obj_targets]
try:
obj_split_targets = [list(map(int,x)) for x in obj_split_targets]
except:
pass
dai_onehot,onehot_classes = one_hot(obj_split_targets,True)
# train_df['one_hot'] = [torch.from_numpy(x).type(torch.FloatTensor) for x in dai_onehot]
# class indexes
c_names = list(onehot_classes)
class_idx = [[c_names.index(i) for i in c] for c in obj_split_targets]
zero_idx = np.zeros((len(targets),max(lengths)//4),dtype=int)
# print(zero_idx.shape)
for i,t in enumerate(zero_idx):
# temp_l = len(class_idx[i])
# if temp_l > 90:
# print(i,temp_l)
t[len(t)-len(class_idx[i]):] = class_idx[i]
zero_idx[i] = t
train_df.iloc[:,2] = [torch.from_numpy(z).type(torch.LongTensor) for z in zero_idx]
self.data_dir,self.num_classes,self.class_names = data_path,len(onehot_classes),onehot_classes
# self.set_up_object_detection([4,2,1],[0.7, 1., 1.3],[(1.,1.), (1.,0.5), (0.5,1.)])
elif self.reg:
print('\nRegression\n')
int_targets = [list(map(int,x)) for x in split_targets]
zero_targets = np.zeros((len(targets),max(lengths)),dtype=int)
for i,t in enumerate(zero_targets):
t[len(t)-len(int_targets[i]):] = int_targets[i]
zero_targets[i] = t
train_df.iloc[:,1] = [torch.from_numpy(z).type(torch.FloatTensor) for z in zero_targets]
self.data_dir,self.num_classes,self.class_names = data_path, max(lengths),np.unique(zero_targets,axis=1)
elif lengths[1:] != lengths[:-1]:
self.multi_label = True
print('\nMulti-label Classification\n')
try:
split_targets = [list(map(int,x)) for x in split_targets]
except:
pass
dai_onehot,onehot_classes = one_hot(split_targets,self.multi_label)
train_df.iloc[:,1] = [torch.from_numpy(x).type(torch.FloatTensor) for x in dai_onehot]
self.data_dir,self.num_classes,self.class_names = data_path,len(onehot_classes),onehot_classes
else:
print('\nSingle-label Classification\n')
unique_targets = list(np.unique(targets))
target_ids = [unique_targets.index(x) for x in targets]
train_df.iloc[:,1] = target_ids
if val_csv:
target_ids = [unique_targets.index(x) for x in val_targets]
val_df.iloc[:,1] = target_ids
if test_csv:
target_ids = [unique_targets.index(x) for x in test_targets]
test_df.iloc[:,1] = target_ids
self.data_dir,self.num_classes,self.class_names = data_path,len(unique_targets),unique_targets
# self.models_path = os.path.join(self.data_dir, 'models')
# os.makedirs(self.models_path,exist_ok=True)
if not val_csv:
train_df,val_df = split_df(train_df,split_size)
if not test_csv:
val_df,test_df = split_df(val_df,split_size)
tr_images = [str(x) for x in list(train_df.iloc[:,0])]
val_images = [str(x) for x in list(val_df.iloc[:,0])]
test_images = [str(x) for x in list(test_df.iloc[:,0])]
if self.extension:
tr_images = add_extension(tr_images,self.extension)
val_images = add_extension(val_images,self.extension)
test_images = add_extension(test_images,self.extension)
train_df.iloc[:,0] = tr_images
val_df.iloc[:,0] = val_images
test_df.iloc[:,0] = test_images
train_df.to_csv(os.path.join(data_path,'train.csv'),index=False)
val_df.to_csv(os.path.join(data_path,'val.csv'),index=False)
test_df.to_csv(os.path.join(data_path,'test.csv'),index=False)
self.minorities,self.class_diffs = None,None
if (not self.obj) or (not self.multi_label):
self.minorities,self.class_diffs = get_minorities(train_df)
self.data_dfs = {self.tr_name:train_df, self.val_name:val_df, self.test_name:test_df}
data_dict = {'data_dfs':self.data_dfs,'data_dir':self.data_dir,'num_classes':self.num_classes,'class_names':self.class_names,
'minorities':self.minorities,'class_diffs':self.class_diffs,'obj':self.obj,'multi_label':self.multi_label}
# save_obj(data_dict,os.path.join(self.data_dir,'data_dict.pkl'))
self.data_dict = data_dict
return data_dict
def data_from_paths_to_csv(self,data_path,tr_path,val_path = None,test_path = None):
train_df = csv_from_path(tr_path,tr_path)
train_df.to_csv(os.path.join(data_path,self.tr_name+'.csv'),index=False)
ret = (self.tr_name+'.csv',None)
if val_path is not None:
val_exists = os.path.exists(val_path)
if val_exists:
val_df = csv_from_path(val_path,tr_path)
val_df.to_csv(os.path.join(data_path,self.val_name+'.csv'),index=False)
ret = (self.tr_name+'.csv',self.val_name+'.csv')
if test_path is not None:
test_exists = os.path.exists(test_path)
if test_exists:
test_df = csv_from_path(test_path,tr_path)
test_df.to_csv(os.path.join(data_path,self.test_name+'.csv'),index=False)
ret = (self.tr_name+'.csv',self.val_name+'.csv',self.test_name+'.csv')
return ret
def get_data(self, data_dict = None, s = (224,224), dataset = my_image_csv_dataset, bs = 32, balance = False, tfms = None,
bal_tfms = None, tta = False, num_workers = 4, stats_percentage = 0.6):
self.image_size = s
if not data_dict:
data_dict = self.data_dict
data_dfs,data_dir,minorities,class_diffs,obj,multi_label = (data_dict['data_dfs'],data_dict['data_dir'],data_dict['minorities'],
data_dict['class_diffs'],data_dict['obj'],data_dict['multi_label'])
if obj or multi_label:
balance = False
if tta:
tta_tfms = {self.tr_name: transforms.Compose(
[
# transforms.TenCrop(s),
transforms.FiveCrop(s[0]),
transforms.Lambda(lambda crops:torch.stack([transforms.ToTensor()(crop) for crop in crops])),
transforms.Lambda(lambda crops:torch.stack(
[transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])(crop) for crop in crops]))
]),
self.val_name: transforms.Compose(
[
# transforms.TenCrop(s),
transforms.FiveCrop(s[0]),
transforms.Lambda(lambda crops:torch.stack([transforms.ToTensor()(crop) for crop in crops])),
transforms.Lambda(lambda crops:torch.stack(
[transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])(crop) for crop in crops]))
]),
self.test_name: transforms.Compose(
[
# transforms.TenCrop(s),
transforms.FiveCrop(s[0]),
transforms.Lambda(lambda crops:torch.stack([transforms.ToTensor()(crop) for crop in crops])),
transforms.Lambda(lambda crops:torch.stack(
[transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])(crop) for crop in crops]))
])}
# tta_tfms = {self.tr_name: transforms.Compose([
# transforms.Resize(s),
# transforms.ToTensor(),
# transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
# ]),
# self.val_name: transforms.Compose([
# transforms.Resize(s),
# transforms.ToTensor(),
# transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
# ]) }
else:
tta_tfms = None
if not bal_tfms:
bal_tfms = { self.tr_name: [transforms.RandomHorizontalFlip()],
self.val_name: None,
self.test_name: None
}
else:
bal_tfms = {self.tr_name: bal_tfms, self.val_name: None, self.test_name: None}
if obj:
resize_transform = transforms.Resize(s)
else:
# resize_transform = transforms.RandomResizedCrop(s[0])
resize_transform = transforms.Resize(s)
if not tfms:
tfms = [
resize_transform,
transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
]
else:
tfms_temp = [
resize_transform,
transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
]
tfms_temp[1:1] = tfms
tfms = tfms_temp
print(tfms)
data_transforms = {
self.tr_name: tfms,
self.val_name: [
# transforms.Resize(s[0]+50),
# transforms.CenterCrop(s[0]),
transforms.Resize(s),
transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
],
self.test_name: [
# transforms.Resize(s[0]+50),
# transforms.CenterCrop(s[0]),
transforms.Resize(s),
transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
]
}
temp_tfms = [resize_transform, transforms.ToTensor()]
temp_dataset = dataset(os.path.join(data_dir,self.tr_name),data_dfs[self.tr_name],temp_tfms)
self.img_mean,self.img_std = get_img_stats(temp_dataset,stats_percentage)
data_transforms[self.tr_name][-1].mean,data_transforms[self.tr_name][-1].std = self.img_mean,self.img_std
data_transforms[self.val_name][-1].mean,data_transforms[self.val_name][-1].std = self.img_mean,self.img_std
data_transforms[self.test_name][-1].mean,data_transforms[self.test_name][-1].std = self.img_mean,self.img_std
if balance:
image_datasets = {x: dataset(os.path.join(data_dir,self.tr_name),data_dfs[x],
data_transforms[x],obj,minorities,class_diffs,bal_tfms[x])
for x in [self.tr_name, self.val_name, self.test_name]}
else:
image_datasets = {x: dataset(os.path.join(data_dir,self.tr_name),data_dfs[x],
data_transforms[x],obj)
for x in [self.tr_name, self.val_name, self.test_name]}
dataloaders = {x: torch.utils.data.DataLoader(image_datasets[x], batch_size=bs,
shuffle=True, num_workers=num_workers)
for x in [self.tr_name, self.val_name, self.test_name]}
dataset_sizes = {x: len(image_datasets[x]) for x in [self.tr_name, self.val_name, self.test_name]}
self.image_datasets,self.dataloaders,self.dataset_sizes = (image_datasets,dataloaders,
dataset_sizes)
return image_datasets,dataloaders,dataset_sizes
def imshow(self,inp, title=None):
"""Imshow for Tensor."""
inp = self.denorm_img(inp)
plt.imshow(inp)
if title:
plt.title(title)
plt.pause(0.001)
def denorm_img(self,inp,calculate = False):
inp = inp.numpy().transpose((1, 2, 0))
if calculate:
mean = np.mean(inp)
std = np.std(inp)
else:
mean = self.img_mean.numpy()
std = self.img_std.numpy()
inp = std * inp + mean
inp = np.clip(inp, 0, 1)
return inp
def show_data(self,folder_name = 'train', size = (64,64), bs = 5):
self.get_data(size,bs)
batch = next(iter(self.dataloaders[folder_name]))
inputs, classes = batch[0],batch[1]
out = torchvision.utils.make_grid(inputs)
if self.reg:
print(classes)
self.imshow(out, title=[x for x in classes])
elif self.multi_label:
self.imshow(out, title=[self.class_names[np.nonzero(x.type(torch.LongTensor))] for x in classes])
else:
self.imshow(out, title=[self.class_names[x] for x in classes])
# def set_up_object_detection(self,anc_grids,anc_zooms,anc_ratios,num_colr = 12):
# # print('Would you like to give your own values for anchor_grids, anchor_zooms,and anchor_ratios? The default values are: {}, {} and {}'
# # .format(anc_grids,anc_zooms,anc_ratios))
# # print('If so, you may call the function "set_up_object_detection" with your own paramteres.')
# cmap = get_cmap(num_colr)
# self.colr_list = [cmap(float(x)) for x in range(num_colr)]
# self.num_colr = num_colr
# self.create_anchors(anc_grids,anc_zooms,anc_ratios)
# self.custom_head = SSD_MultiHead(self.k,self.num_classes,0.45,-4.)
# self.loss_f = FocalLoss(self.num_classes)
# def create_anchors(self,anc_grids,anc_zooms,anc_ratios):
# anchor_scales = [(anz*i,anz*j) for anz in anc_zooms for (i,j) in anc_ratios]
# k = len(anchor_scales)
# anc_offsets = [1/(o*2) for o in anc_grids]
# anc_x = np.concatenate([np.repeat(np.linspace(ao, 1-ao, ag), ag)
# for ao,ag in zip(anc_offsets,anc_grids)])
# anc_y = np.concatenate([np.tile(np.linspace(ao, 1-ao, ag), ag)
# for ao,ag in zip(anc_offsets,anc_grids)])
# anc_ctrs = np.repeat(np.stack([anc_x,anc_y], axis=1), k, axis=0)
# anc_sizes = np.concatenate([np.array([[o/ag,p/ag] for i in range(ag*ag) for o,p in anchor_scales])
# for ag in anc_grids])
# grid_sizes = torch.tensor(np.concatenate([np.array(
# [ 1/ag for i in range(ag*ag) for o,p in anchor_scales])
# for ag in anc_grids])).float().unsqueeze(1).to(self.device)
# anchors = torch.tensor(np.concatenate([anc_ctrs, anc_sizes], axis=1)).float().to(self.device)
# anchor_cnr = hw2corners(anchors[:,:2], anchors[:,2:])
# self.anchors,self.anchor_cnr,self.grid_sizes,self.k = anchors,anchor_cnr,grid_sizes,k
|
flexible
|
{
"blob_id": "5b8c95354f8b27eff8226ace52ab9e97f98ae217",
"index": 80,
"step-1": "<mask token>\n\n\nclass my_image_csv_dataset(Dataset):\n\n def __init__(self, data_dir, data, transforms_=None, obj=False,\n minorities=None, diffs=None, bal_tfms=None):\n self.data_dir = data_dir\n self.data = data\n self.transforms_ = transforms_\n self.tfms = None\n self.obj = obj\n self.minorities = minorities\n self.diffs = diffs\n self.bal_tfms = bal_tfms\n assert transforms_ is not None, print('Please pass some transforms.')\n\n def __len__(self):\n return len(self.data)\n\n def __getitem__(self, index):\n img_path = os.path.join(self.data_dir, self.data.iloc[index, 0])\n img = Image.open(img_path)\n img = img.convert('RGB')\n img = torchvision.transforms.functional.to_grayscale(img,\n num_output_channels=3)\n y = self.data.iloc[index, 1]\n if self.minorities and self.bal_tfms:\n if y in self.minorities:\n if hasattr(self.bal_tfms, 'transforms'):\n for tr in self.bal_tfms.transforms:\n tr.p = self.diffs[y]\n l = [self.bal_tfms]\n l.extend(self.transforms_)\n self.tfms = transforms.Compose(l)\n else:\n for t in self.bal_tfms:\n t.p = self.diffs[y]\n self.transforms_[1:1] = self.bal_tfms\n self.tfms = transforms.Compose(self.transforms_)\n else:\n self.tfms = transforms.Compose(self.transforms_)\n else:\n self.tfms = transforms.Compose(self.transforms_)\n x = self.tfms(img)\n if self.obj:\n s = x.size()[1]\n if isinstance(s, tuple):\n s = s[0]\n row_scale = s / img.size[0]\n col_scale = s / img.size[1]\n y = rescale_bbox(y, row_scale, col_scale)\n y.squeeze_()\n y2 = self.data.iloc[index, 2]\n y = y, y2\n return x, y\n\n\nclass my_image_folder(DatasetFolder):\n\n def __init__(self, root, transform=None, target_transform=None, loader=\n default_loader, minorities=None, diffs=None, bal_tfms=None,\n tta_tfms=None):\n super(my_image_folder, self).__init__(root, loader, IMG_EXTENSIONS,\n transform=transform, target_transform=target_transform)\n self.imgs = self.samples\n self.minorities = minorities\n self.diffs = diffs\n self.bal_tfms = bal_tfms\n self.tta_tfms = tta_tfms\n self.tfms = None\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n sample = self.loader(path)\n if self.transform:\n if self.minorities and self.bal_tfms:\n if target in self.minorities:\n if hasattr(self.bal_tfms, 'transforms'):\n for tr in self.bal_tfms.transforms:\n tr.p = self.diffs[target]\n l = [self.bal_tfms]\n l.extend(self.transform)\n self.tfms = transforms.Compose(l)\n else:\n for t in self.bal_tfms:\n t.p = self.diffs[target]\n self.tfms = transforms.Compose(self.bal_tfms + self\n .transform)\n else:\n self.tfms = transforms.Compose(self.transform)\n elif self.tta_tfms:\n self.tfms = self.tta_tfms\n else:\n self.tfms = transforms.Compose(self.transform)\n sample = self.tfms(sample)\n if self.target_transform:\n target = self.target_transform(target)\n return sample, target\n\n\n<mask token>\n\n\nclass DataProcessor:\n\n def __init__(self, data_path=None, train_csv=None, val_csv=None, reg=\n False, tr_name='train', val_name='val', test_name='test', extension\n =None, setup_data=True):\n print('+------------------------------------+')\n print('| Dream AI |')\n print('+------------------------------------+')\n print()\n self.device = torch.device('cuda:0' if torch.cuda.is_available() else\n 'cpu')\n (self.data_path, self.train_csv, self.val_csv, self.reg, self.\n tr_name, self.val_name, self.test_name, self.extension) = (\n data_path, train_csv, val_csv, reg, tr_name, val_name,\n test_name, extension)\n self.obj = False\n self.multi_label = False\n if setup_data:\n self.set_up_data()\n\n def set_up_data(self, split_size=0.15):\n data_path, train_csv, val_csv, tr_name, val_name, test_name = (self\n .data_path, self.train_csv, self.val_csv, self.tr_name, self.\n val_name, self.test_name)\n if not data_path:\n data_path = os.getcwd() + '/'\n tr_path = os.path.join(data_path, tr_name)\n val_path = os.path.join(data_path, val_name)\n test_path = os.path.join(data_path, test_name)\n if os.path.exists(os.path.join(data_path, tr_name + '.csv')):\n train_csv = tr_name + '.csv'\n if not train_csv:\n print('no')\n train_csv, val_csv, test_csv = self.data_from_paths_to_csv(\n data_path, tr_path, val_path, test_path)\n train_csv_path = os.path.join(data_path, train_csv)\n train_df = pd.read_csv(train_csv_path)\n if 'Unnamed: 0' in train_df.columns:\n train_df = train_df.drop('Unnamed: 0', 1)\n if len(train_df.columns) > 2:\n self.obj = True\n img_names = [str(x) for x in list(train_df.iloc[:, 0])]\n if self.extension:\n img_names = add_extension(img_names, self.extension)\n if val_csv:\n val_csv_path = os.path.join(data_path, val_csv)\n val_df = pd.read_csv(val_csv_path)\n val_targets = list(map(str, list(val_df.iloc[:, 1])))\n if test_csv:\n test_csv_path = os.path.join(data_path, test_csv)\n test_df = pd.read_csv(test_csv_path)\n test_targets = list(map(str, list(test_df.iloc[:, 1])))\n targets = list(map(str, list(train_df.iloc[:, 1])))\n lengths = [len(t) for t in [s.split() for s in targets]]\n self.target_lengths = lengths\n split_targets = [t.split() for t in targets]\n if self.obj:\n print('\\nObject Detection\\n')\n int_targets = [list(map(float, x)) for x in split_targets]\n zero_targets = np.zeros((len(targets), max(lengths)), dtype=int)\n for i, t in enumerate(zero_targets):\n t[len(t) - len(int_targets[i]):] = int_targets[i]\n zero_targets[i] = t\n train_df.iloc[:, 1] = [torch.from_numpy(z).type(torch.\n FloatTensor) for z in zero_targets]\n obj_targets = list(map(str, list(train_df.iloc[:, 2])))\n obj_split_targets = [t.split() for t in obj_targets]\n try:\n obj_split_targets = [list(map(int, x)) for x in\n obj_split_targets]\n except:\n pass\n dai_onehot, onehot_classes = one_hot(obj_split_targets, True)\n c_names = list(onehot_classes)\n class_idx = [[c_names.index(i) for i in c] for c in\n obj_split_targets]\n zero_idx = np.zeros((len(targets), max(lengths) // 4), dtype=int)\n for i, t in enumerate(zero_idx):\n t[len(t) - len(class_idx[i]):] = class_idx[i]\n zero_idx[i] = t\n train_df.iloc[:, 2] = [torch.from_numpy(z).type(torch.\n LongTensor) for z in zero_idx]\n self.data_dir, self.num_classes, self.class_names = data_path, len(\n onehot_classes), onehot_classes\n elif self.reg:\n print('\\nRegression\\n')\n int_targets = [list(map(int, x)) for x in split_targets]\n zero_targets = np.zeros((len(targets), max(lengths)), dtype=int)\n for i, t in enumerate(zero_targets):\n t[len(t) - len(int_targets[i]):] = int_targets[i]\n zero_targets[i] = t\n train_df.iloc[:, 1] = [torch.from_numpy(z).type(torch.\n FloatTensor) for z in zero_targets]\n self.data_dir, self.num_classes, self.class_names = data_path, max(\n lengths), np.unique(zero_targets, axis=1)\n elif lengths[1:] != lengths[:-1]:\n self.multi_label = True\n print('\\nMulti-label Classification\\n')\n try:\n split_targets = [list(map(int, x)) for x in split_targets]\n except:\n pass\n dai_onehot, onehot_classes = one_hot(split_targets, self.\n multi_label)\n train_df.iloc[:, 1] = [torch.from_numpy(x).type(torch.\n FloatTensor) for x in dai_onehot]\n self.data_dir, self.num_classes, self.class_names = data_path, len(\n onehot_classes), onehot_classes\n else:\n print('\\nSingle-label Classification\\n')\n unique_targets = list(np.unique(targets))\n target_ids = [unique_targets.index(x) for x in targets]\n train_df.iloc[:, 1] = target_ids\n if val_csv:\n target_ids = [unique_targets.index(x) for x in val_targets]\n val_df.iloc[:, 1] = target_ids\n if test_csv:\n target_ids = [unique_targets.index(x) for x in test_targets]\n test_df.iloc[:, 1] = target_ids\n self.data_dir, self.num_classes, self.class_names = data_path, len(\n unique_targets), unique_targets\n if not val_csv:\n train_df, val_df = split_df(train_df, split_size)\n if not test_csv:\n val_df, test_df = split_df(val_df, split_size)\n tr_images = [str(x) for x in list(train_df.iloc[:, 0])]\n val_images = [str(x) for x in list(val_df.iloc[:, 0])]\n test_images = [str(x) for x in list(test_df.iloc[:, 0])]\n if self.extension:\n tr_images = add_extension(tr_images, self.extension)\n val_images = add_extension(val_images, self.extension)\n test_images = add_extension(test_images, self.extension)\n train_df.iloc[:, 0] = tr_images\n val_df.iloc[:, 0] = val_images\n test_df.iloc[:, 0] = test_images\n train_df.to_csv(os.path.join(data_path, 'train.csv'), index=False)\n val_df.to_csv(os.path.join(data_path, 'val.csv'), index=False)\n test_df.to_csv(os.path.join(data_path, 'test.csv'), index=False)\n self.minorities, self.class_diffs = None, None\n if not self.obj or not self.multi_label:\n self.minorities, self.class_diffs = get_minorities(train_df)\n self.data_dfs = {self.tr_name: train_df, self.val_name: val_df,\n self.test_name: test_df}\n data_dict = {'data_dfs': self.data_dfs, 'data_dir': self.data_dir,\n 'num_classes': self.num_classes, 'class_names': self.\n class_names, 'minorities': self.minorities, 'class_diffs': self\n .class_diffs, 'obj': self.obj, 'multi_label': self.multi_label}\n self.data_dict = data_dict\n return data_dict\n\n def data_from_paths_to_csv(self, data_path, tr_path, val_path=None,\n test_path=None):\n train_df = csv_from_path(tr_path, tr_path)\n train_df.to_csv(os.path.join(data_path, self.tr_name + '.csv'),\n index=False)\n ret = self.tr_name + '.csv', None\n if val_path is not None:\n val_exists = os.path.exists(val_path)\n if val_exists:\n val_df = csv_from_path(val_path, tr_path)\n val_df.to_csv(os.path.join(data_path, self.val_name +\n '.csv'), index=False)\n ret = self.tr_name + '.csv', self.val_name + '.csv'\n if test_path is not None:\n test_exists = os.path.exists(test_path)\n if test_exists:\n test_df = csv_from_path(test_path, tr_path)\n test_df.to_csv(os.path.join(data_path, self.test_name +\n '.csv'), index=False)\n ret = (self.tr_name + '.csv', self.val_name + '.csv', self.\n test_name + '.csv')\n return ret\n\n def get_data(self, data_dict=None, s=(224, 224), dataset=\n my_image_csv_dataset, bs=32, balance=False, tfms=None, bal_tfms=\n None, tta=False, num_workers=4, stats_percentage=0.6):\n self.image_size = s\n if not data_dict:\n data_dict = self.data_dict\n data_dfs, data_dir, minorities, class_diffs, obj, multi_label = (\n data_dict['data_dfs'], data_dict['data_dir'], data_dict[\n 'minorities'], data_dict['class_diffs'], data_dict['obj'],\n data_dict['multi_label'])\n if obj or multi_label:\n balance = False\n if tta:\n tta_tfms = {self.tr_name: transforms.Compose([transforms.\n FiveCrop(s[0]), transforms.Lambda(lambda crops: torch.stack\n ([transforms.ToTensor()(crop) for crop in crops])),\n transforms.Lambda(lambda crops: torch.stack([transforms.\n Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])(\n crop) for crop in crops]))]), self.val_name: transforms.\n Compose([transforms.FiveCrop(s[0]), transforms.Lambda(lambda\n crops: torch.stack([transforms.ToTensor()(crop) for crop in\n crops])), transforms.Lambda(lambda crops: torch.stack([\n transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, \n 0.225])(crop) for crop in crops]))]), self.test_name:\n transforms.Compose([transforms.FiveCrop(s[0]), transforms.\n Lambda(lambda crops: torch.stack([transforms.ToTensor()(\n crop) for crop in crops])), transforms.Lambda(lambda crops:\n torch.stack([transforms.Normalize([0.485, 0.456, 0.406], [\n 0.229, 0.224, 0.225])(crop) for crop in crops]))])}\n else:\n tta_tfms = None\n if not bal_tfms:\n bal_tfms = {self.tr_name: [transforms.RandomHorizontalFlip()],\n self.val_name: None, self.test_name: None}\n else:\n bal_tfms = {self.tr_name: bal_tfms, self.val_name: None, self.\n test_name: None}\n if obj:\n resize_transform = transforms.Resize(s)\n else:\n resize_transform = transforms.Resize(s)\n if not tfms:\n tfms = [resize_transform, transforms.ToTensor(), transforms.\n Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])]\n else:\n tfms_temp = [resize_transform, transforms.ToTensor(),\n transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, \n 0.225])]\n tfms_temp[1:1] = tfms\n tfms = tfms_temp\n print(tfms)\n data_transforms = {self.tr_name: tfms, self.val_name: [transforms.\n Resize(s), transforms.ToTensor(), transforms.Normalize([0.485, \n 0.456, 0.406], [0.229, 0.224, 0.225])], self.test_name: [\n transforms.Resize(s), transforms.ToTensor(), transforms.\n Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])]}\n temp_tfms = [resize_transform, transforms.ToTensor()]\n temp_dataset = dataset(os.path.join(data_dir, self.tr_name),\n data_dfs[self.tr_name], temp_tfms)\n self.img_mean, self.img_std = get_img_stats(temp_dataset,\n stats_percentage)\n data_transforms[self.tr_name][-1].mean, data_transforms[self.tr_name][\n -1].std = self.img_mean, self.img_std\n data_transforms[self.val_name][-1].mean, data_transforms[self.val_name\n ][-1].std = self.img_mean, self.img_std\n data_transforms[self.test_name][-1].mean, data_transforms[self.\n test_name][-1].std = self.img_mean, self.img_std\n if balance:\n image_datasets = {x: dataset(os.path.join(data_dir, self.\n tr_name), data_dfs[x], data_transforms[x], obj, minorities,\n class_diffs, bal_tfms[x]) for x in [self.tr_name, self.\n val_name, self.test_name]}\n else:\n image_datasets = {x: dataset(os.path.join(data_dir, self.\n tr_name), data_dfs[x], data_transforms[x], obj) for x in [\n self.tr_name, self.val_name, self.test_name]}\n dataloaders = {x: torch.utils.data.DataLoader(image_datasets[x],\n batch_size=bs, shuffle=True, num_workers=num_workers) for x in\n [self.tr_name, self.val_name, self.test_name]}\n dataset_sizes = {x: len(image_datasets[x]) for x in [self.tr_name,\n self.val_name, self.test_name]}\n self.image_datasets, self.dataloaders, self.dataset_sizes = (\n image_datasets, dataloaders, dataset_sizes)\n return image_datasets, dataloaders, dataset_sizes\n\n def imshow(self, inp, title=None):\n \"\"\"Imshow for Tensor.\"\"\"\n inp = self.denorm_img(inp)\n plt.imshow(inp)\n if title:\n plt.title(title)\n plt.pause(0.001)\n\n def denorm_img(self, inp, calculate=False):\n inp = inp.numpy().transpose((1, 2, 0))\n if calculate:\n mean = np.mean(inp)\n std = np.std(inp)\n else:\n mean = self.img_mean.numpy()\n std = self.img_std.numpy()\n inp = std * inp + mean\n inp = np.clip(inp, 0, 1)\n return inp\n\n def show_data(self, folder_name='train', size=(64, 64), bs=5):\n self.get_data(size, bs)\n batch = next(iter(self.dataloaders[folder_name]))\n inputs, classes = batch[0], batch[1]\n out = torchvision.utils.make_grid(inputs)\n if self.reg:\n print(classes)\n self.imshow(out, title=[x for x in classes])\n elif self.multi_label:\n self.imshow(out, title=[self.class_names[np.nonzero(x.type(\n torch.LongTensor))] for x in classes])\n else:\n self.imshow(out, title=[self.class_names[x] for x in classes])\n",
"step-2": "<mask token>\n\n\nclass my_image_csv_dataset(Dataset):\n\n def __init__(self, data_dir, data, transforms_=None, obj=False,\n minorities=None, diffs=None, bal_tfms=None):\n self.data_dir = data_dir\n self.data = data\n self.transforms_ = transforms_\n self.tfms = None\n self.obj = obj\n self.minorities = minorities\n self.diffs = diffs\n self.bal_tfms = bal_tfms\n assert transforms_ is not None, print('Please pass some transforms.')\n\n def __len__(self):\n return len(self.data)\n\n def __getitem__(self, index):\n img_path = os.path.join(self.data_dir, self.data.iloc[index, 0])\n img = Image.open(img_path)\n img = img.convert('RGB')\n img = torchvision.transforms.functional.to_grayscale(img,\n num_output_channels=3)\n y = self.data.iloc[index, 1]\n if self.minorities and self.bal_tfms:\n if y in self.minorities:\n if hasattr(self.bal_tfms, 'transforms'):\n for tr in self.bal_tfms.transforms:\n tr.p = self.diffs[y]\n l = [self.bal_tfms]\n l.extend(self.transforms_)\n self.tfms = transforms.Compose(l)\n else:\n for t in self.bal_tfms:\n t.p = self.diffs[y]\n self.transforms_[1:1] = self.bal_tfms\n self.tfms = transforms.Compose(self.transforms_)\n else:\n self.tfms = transforms.Compose(self.transforms_)\n else:\n self.tfms = transforms.Compose(self.transforms_)\n x = self.tfms(img)\n if self.obj:\n s = x.size()[1]\n if isinstance(s, tuple):\n s = s[0]\n row_scale = s / img.size[0]\n col_scale = s / img.size[1]\n y = rescale_bbox(y, row_scale, col_scale)\n y.squeeze_()\n y2 = self.data.iloc[index, 2]\n y = y, y2\n return x, y\n\n\nclass my_image_folder(DatasetFolder):\n\n def __init__(self, root, transform=None, target_transform=None, loader=\n default_loader, minorities=None, diffs=None, bal_tfms=None,\n tta_tfms=None):\n super(my_image_folder, self).__init__(root, loader, IMG_EXTENSIONS,\n transform=transform, target_transform=target_transform)\n self.imgs = self.samples\n self.minorities = minorities\n self.diffs = diffs\n self.bal_tfms = bal_tfms\n self.tta_tfms = tta_tfms\n self.tfms = None\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n sample = self.loader(path)\n if self.transform:\n if self.minorities and self.bal_tfms:\n if target in self.minorities:\n if hasattr(self.bal_tfms, 'transforms'):\n for tr in self.bal_tfms.transforms:\n tr.p = self.diffs[target]\n l = [self.bal_tfms]\n l.extend(self.transform)\n self.tfms = transforms.Compose(l)\n else:\n for t in self.bal_tfms:\n t.p = self.diffs[target]\n self.tfms = transforms.Compose(self.bal_tfms + self\n .transform)\n else:\n self.tfms = transforms.Compose(self.transform)\n elif self.tta_tfms:\n self.tfms = self.tta_tfms\n else:\n self.tfms = transforms.Compose(self.transform)\n sample = self.tfms(sample)\n if self.target_transform:\n target = self.target_transform(target)\n return sample, target\n\n\n<mask token>\n\n\ndef rescale_bbox(bb, row_scale, col_scale):\n bb = bb.reshape((-1, 4))\n for b in bb:\n r1, c1, r2, c2 = b\n b[0] = int(np.round(r1 * col_scale))\n b[1] = int(np.round(c1 * row_scale))\n b[2] = int(np.round(r2 * col_scale))\n b[3] = int(np.round(c2 * row_scale))\n bb = bb.reshape((1, -1))\n return bb\n\n\n<mask token>\n\n\nclass DataProcessor:\n\n def __init__(self, data_path=None, train_csv=None, val_csv=None, reg=\n False, tr_name='train', val_name='val', test_name='test', extension\n =None, setup_data=True):\n print('+------------------------------------+')\n print('| Dream AI |')\n print('+------------------------------------+')\n print()\n self.device = torch.device('cuda:0' if torch.cuda.is_available() else\n 'cpu')\n (self.data_path, self.train_csv, self.val_csv, self.reg, self.\n tr_name, self.val_name, self.test_name, self.extension) = (\n data_path, train_csv, val_csv, reg, tr_name, val_name,\n test_name, extension)\n self.obj = False\n self.multi_label = False\n if setup_data:\n self.set_up_data()\n\n def set_up_data(self, split_size=0.15):\n data_path, train_csv, val_csv, tr_name, val_name, test_name = (self\n .data_path, self.train_csv, self.val_csv, self.tr_name, self.\n val_name, self.test_name)\n if not data_path:\n data_path = os.getcwd() + '/'\n tr_path = os.path.join(data_path, tr_name)\n val_path = os.path.join(data_path, val_name)\n test_path = os.path.join(data_path, test_name)\n if os.path.exists(os.path.join(data_path, tr_name + '.csv')):\n train_csv = tr_name + '.csv'\n if not train_csv:\n print('no')\n train_csv, val_csv, test_csv = self.data_from_paths_to_csv(\n data_path, tr_path, val_path, test_path)\n train_csv_path = os.path.join(data_path, train_csv)\n train_df = pd.read_csv(train_csv_path)\n if 'Unnamed: 0' in train_df.columns:\n train_df = train_df.drop('Unnamed: 0', 1)\n if len(train_df.columns) > 2:\n self.obj = True\n img_names = [str(x) for x in list(train_df.iloc[:, 0])]\n if self.extension:\n img_names = add_extension(img_names, self.extension)\n if val_csv:\n val_csv_path = os.path.join(data_path, val_csv)\n val_df = pd.read_csv(val_csv_path)\n val_targets = list(map(str, list(val_df.iloc[:, 1])))\n if test_csv:\n test_csv_path = os.path.join(data_path, test_csv)\n test_df = pd.read_csv(test_csv_path)\n test_targets = list(map(str, list(test_df.iloc[:, 1])))\n targets = list(map(str, list(train_df.iloc[:, 1])))\n lengths = [len(t) for t in [s.split() for s in targets]]\n self.target_lengths = lengths\n split_targets = [t.split() for t in targets]\n if self.obj:\n print('\\nObject Detection\\n')\n int_targets = [list(map(float, x)) for x in split_targets]\n zero_targets = np.zeros((len(targets), max(lengths)), dtype=int)\n for i, t in enumerate(zero_targets):\n t[len(t) - len(int_targets[i]):] = int_targets[i]\n zero_targets[i] = t\n train_df.iloc[:, 1] = [torch.from_numpy(z).type(torch.\n FloatTensor) for z in zero_targets]\n obj_targets = list(map(str, list(train_df.iloc[:, 2])))\n obj_split_targets = [t.split() for t in obj_targets]\n try:\n obj_split_targets = [list(map(int, x)) for x in\n obj_split_targets]\n except:\n pass\n dai_onehot, onehot_classes = one_hot(obj_split_targets, True)\n c_names = list(onehot_classes)\n class_idx = [[c_names.index(i) for i in c] for c in\n obj_split_targets]\n zero_idx = np.zeros((len(targets), max(lengths) // 4), dtype=int)\n for i, t in enumerate(zero_idx):\n t[len(t) - len(class_idx[i]):] = class_idx[i]\n zero_idx[i] = t\n train_df.iloc[:, 2] = [torch.from_numpy(z).type(torch.\n LongTensor) for z in zero_idx]\n self.data_dir, self.num_classes, self.class_names = data_path, len(\n onehot_classes), onehot_classes\n elif self.reg:\n print('\\nRegression\\n')\n int_targets = [list(map(int, x)) for x in split_targets]\n zero_targets = np.zeros((len(targets), max(lengths)), dtype=int)\n for i, t in enumerate(zero_targets):\n t[len(t) - len(int_targets[i]):] = int_targets[i]\n zero_targets[i] = t\n train_df.iloc[:, 1] = [torch.from_numpy(z).type(torch.\n FloatTensor) for z in zero_targets]\n self.data_dir, self.num_classes, self.class_names = data_path, max(\n lengths), np.unique(zero_targets, axis=1)\n elif lengths[1:] != lengths[:-1]:\n self.multi_label = True\n print('\\nMulti-label Classification\\n')\n try:\n split_targets = [list(map(int, x)) for x in split_targets]\n except:\n pass\n dai_onehot, onehot_classes = one_hot(split_targets, self.\n multi_label)\n train_df.iloc[:, 1] = [torch.from_numpy(x).type(torch.\n FloatTensor) for x in dai_onehot]\n self.data_dir, self.num_classes, self.class_names = data_path, len(\n onehot_classes), onehot_classes\n else:\n print('\\nSingle-label Classification\\n')\n unique_targets = list(np.unique(targets))\n target_ids = [unique_targets.index(x) for x in targets]\n train_df.iloc[:, 1] = target_ids\n if val_csv:\n target_ids = [unique_targets.index(x) for x in val_targets]\n val_df.iloc[:, 1] = target_ids\n if test_csv:\n target_ids = [unique_targets.index(x) for x in test_targets]\n test_df.iloc[:, 1] = target_ids\n self.data_dir, self.num_classes, self.class_names = data_path, len(\n unique_targets), unique_targets\n if not val_csv:\n train_df, val_df = split_df(train_df, split_size)\n if not test_csv:\n val_df, test_df = split_df(val_df, split_size)\n tr_images = [str(x) for x in list(train_df.iloc[:, 0])]\n val_images = [str(x) for x in list(val_df.iloc[:, 0])]\n test_images = [str(x) for x in list(test_df.iloc[:, 0])]\n if self.extension:\n tr_images = add_extension(tr_images, self.extension)\n val_images = add_extension(val_images, self.extension)\n test_images = add_extension(test_images, self.extension)\n train_df.iloc[:, 0] = tr_images\n val_df.iloc[:, 0] = val_images\n test_df.iloc[:, 0] = test_images\n train_df.to_csv(os.path.join(data_path, 'train.csv'), index=False)\n val_df.to_csv(os.path.join(data_path, 'val.csv'), index=False)\n test_df.to_csv(os.path.join(data_path, 'test.csv'), index=False)\n self.minorities, self.class_diffs = None, None\n if not self.obj or not self.multi_label:\n self.minorities, self.class_diffs = get_minorities(train_df)\n self.data_dfs = {self.tr_name: train_df, self.val_name: val_df,\n self.test_name: test_df}\n data_dict = {'data_dfs': self.data_dfs, 'data_dir': self.data_dir,\n 'num_classes': self.num_classes, 'class_names': self.\n class_names, 'minorities': self.minorities, 'class_diffs': self\n .class_diffs, 'obj': self.obj, 'multi_label': self.multi_label}\n self.data_dict = data_dict\n return data_dict\n\n def data_from_paths_to_csv(self, data_path, tr_path, val_path=None,\n test_path=None):\n train_df = csv_from_path(tr_path, tr_path)\n train_df.to_csv(os.path.join(data_path, self.tr_name + '.csv'),\n index=False)\n ret = self.tr_name + '.csv', None\n if val_path is not None:\n val_exists = os.path.exists(val_path)\n if val_exists:\n val_df = csv_from_path(val_path, tr_path)\n val_df.to_csv(os.path.join(data_path, self.val_name +\n '.csv'), index=False)\n ret = self.tr_name + '.csv', self.val_name + '.csv'\n if test_path is not None:\n test_exists = os.path.exists(test_path)\n if test_exists:\n test_df = csv_from_path(test_path, tr_path)\n test_df.to_csv(os.path.join(data_path, self.test_name +\n '.csv'), index=False)\n ret = (self.tr_name + '.csv', self.val_name + '.csv', self.\n test_name + '.csv')\n return ret\n\n def get_data(self, data_dict=None, s=(224, 224), dataset=\n my_image_csv_dataset, bs=32, balance=False, tfms=None, bal_tfms=\n None, tta=False, num_workers=4, stats_percentage=0.6):\n self.image_size = s\n if not data_dict:\n data_dict = self.data_dict\n data_dfs, data_dir, minorities, class_diffs, obj, multi_label = (\n data_dict['data_dfs'], data_dict['data_dir'], data_dict[\n 'minorities'], data_dict['class_diffs'], data_dict['obj'],\n data_dict['multi_label'])\n if obj or multi_label:\n balance = False\n if tta:\n tta_tfms = {self.tr_name: transforms.Compose([transforms.\n FiveCrop(s[0]), transforms.Lambda(lambda crops: torch.stack\n ([transforms.ToTensor()(crop) for crop in crops])),\n transforms.Lambda(lambda crops: torch.stack([transforms.\n Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])(\n crop) for crop in crops]))]), self.val_name: transforms.\n Compose([transforms.FiveCrop(s[0]), transforms.Lambda(lambda\n crops: torch.stack([transforms.ToTensor()(crop) for crop in\n crops])), transforms.Lambda(lambda crops: torch.stack([\n transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, \n 0.225])(crop) for crop in crops]))]), self.test_name:\n transforms.Compose([transforms.FiveCrop(s[0]), transforms.\n Lambda(lambda crops: torch.stack([transforms.ToTensor()(\n crop) for crop in crops])), transforms.Lambda(lambda crops:\n torch.stack([transforms.Normalize([0.485, 0.456, 0.406], [\n 0.229, 0.224, 0.225])(crop) for crop in crops]))])}\n else:\n tta_tfms = None\n if not bal_tfms:\n bal_tfms = {self.tr_name: [transforms.RandomHorizontalFlip()],\n self.val_name: None, self.test_name: None}\n else:\n bal_tfms = {self.tr_name: bal_tfms, self.val_name: None, self.\n test_name: None}\n if obj:\n resize_transform = transforms.Resize(s)\n else:\n resize_transform = transforms.Resize(s)\n if not tfms:\n tfms = [resize_transform, transforms.ToTensor(), transforms.\n Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])]\n else:\n tfms_temp = [resize_transform, transforms.ToTensor(),\n transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, \n 0.225])]\n tfms_temp[1:1] = tfms\n tfms = tfms_temp\n print(tfms)\n data_transforms = {self.tr_name: tfms, self.val_name: [transforms.\n Resize(s), transforms.ToTensor(), transforms.Normalize([0.485, \n 0.456, 0.406], [0.229, 0.224, 0.225])], self.test_name: [\n transforms.Resize(s), transforms.ToTensor(), transforms.\n Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])]}\n temp_tfms = [resize_transform, transforms.ToTensor()]\n temp_dataset = dataset(os.path.join(data_dir, self.tr_name),\n data_dfs[self.tr_name], temp_tfms)\n self.img_mean, self.img_std = get_img_stats(temp_dataset,\n stats_percentage)\n data_transforms[self.tr_name][-1].mean, data_transforms[self.tr_name][\n -1].std = self.img_mean, self.img_std\n data_transforms[self.val_name][-1].mean, data_transforms[self.val_name\n ][-1].std = self.img_mean, self.img_std\n data_transforms[self.test_name][-1].mean, data_transforms[self.\n test_name][-1].std = self.img_mean, self.img_std\n if balance:\n image_datasets = {x: dataset(os.path.join(data_dir, self.\n tr_name), data_dfs[x], data_transforms[x], obj, minorities,\n class_diffs, bal_tfms[x]) for x in [self.tr_name, self.\n val_name, self.test_name]}\n else:\n image_datasets = {x: dataset(os.path.join(data_dir, self.\n tr_name), data_dfs[x], data_transforms[x], obj) for x in [\n self.tr_name, self.val_name, self.test_name]}\n dataloaders = {x: torch.utils.data.DataLoader(image_datasets[x],\n batch_size=bs, shuffle=True, num_workers=num_workers) for x in\n [self.tr_name, self.val_name, self.test_name]}\n dataset_sizes = {x: len(image_datasets[x]) for x in [self.tr_name,\n self.val_name, self.test_name]}\n self.image_datasets, self.dataloaders, self.dataset_sizes = (\n image_datasets, dataloaders, dataset_sizes)\n return image_datasets, dataloaders, dataset_sizes\n\n def imshow(self, inp, title=None):\n \"\"\"Imshow for Tensor.\"\"\"\n inp = self.denorm_img(inp)\n plt.imshow(inp)\n if title:\n plt.title(title)\n plt.pause(0.001)\n\n def denorm_img(self, inp, calculate=False):\n inp = inp.numpy().transpose((1, 2, 0))\n if calculate:\n mean = np.mean(inp)\n std = np.std(inp)\n else:\n mean = self.img_mean.numpy()\n std = self.img_std.numpy()\n inp = std * inp + mean\n inp = np.clip(inp, 0, 1)\n return inp\n\n def show_data(self, folder_name='train', size=(64, 64), bs=5):\n self.get_data(size, bs)\n batch = next(iter(self.dataloaders[folder_name]))\n inputs, classes = batch[0], batch[1]\n out = torchvision.utils.make_grid(inputs)\n if self.reg:\n print(classes)\n self.imshow(out, title=[x for x in classes])\n elif self.multi_label:\n self.imshow(out, title=[self.class_names[np.nonzero(x.type(\n torch.LongTensor))] for x in classes])\n else:\n self.imshow(out, title=[self.class_names[x] for x in classes])\n",
"step-3": "<mask token>\n\n\nclass my_image_csv_dataset(Dataset):\n\n def __init__(self, data_dir, data, transforms_=None, obj=False,\n minorities=None, diffs=None, bal_tfms=None):\n self.data_dir = data_dir\n self.data = data\n self.transforms_ = transforms_\n self.tfms = None\n self.obj = obj\n self.minorities = minorities\n self.diffs = diffs\n self.bal_tfms = bal_tfms\n assert transforms_ is not None, print('Please pass some transforms.')\n\n def __len__(self):\n return len(self.data)\n\n def __getitem__(self, index):\n img_path = os.path.join(self.data_dir, self.data.iloc[index, 0])\n img = Image.open(img_path)\n img = img.convert('RGB')\n img = torchvision.transforms.functional.to_grayscale(img,\n num_output_channels=3)\n y = self.data.iloc[index, 1]\n if self.minorities and self.bal_tfms:\n if y in self.minorities:\n if hasattr(self.bal_tfms, 'transforms'):\n for tr in self.bal_tfms.transforms:\n tr.p = self.diffs[y]\n l = [self.bal_tfms]\n l.extend(self.transforms_)\n self.tfms = transforms.Compose(l)\n else:\n for t in self.bal_tfms:\n t.p = self.diffs[y]\n self.transforms_[1:1] = self.bal_tfms\n self.tfms = transforms.Compose(self.transforms_)\n else:\n self.tfms = transforms.Compose(self.transforms_)\n else:\n self.tfms = transforms.Compose(self.transforms_)\n x = self.tfms(img)\n if self.obj:\n s = x.size()[1]\n if isinstance(s, tuple):\n s = s[0]\n row_scale = s / img.size[0]\n col_scale = s / img.size[1]\n y = rescale_bbox(y, row_scale, col_scale)\n y.squeeze_()\n y2 = self.data.iloc[index, 2]\n y = y, y2\n return x, y\n\n\nclass my_image_folder(DatasetFolder):\n\n def __init__(self, root, transform=None, target_transform=None, loader=\n default_loader, minorities=None, diffs=None, bal_tfms=None,\n tta_tfms=None):\n super(my_image_folder, self).__init__(root, loader, IMG_EXTENSIONS,\n transform=transform, target_transform=target_transform)\n self.imgs = self.samples\n self.minorities = minorities\n self.diffs = diffs\n self.bal_tfms = bal_tfms\n self.tta_tfms = tta_tfms\n self.tfms = None\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n sample = self.loader(path)\n if self.transform:\n if self.minorities and self.bal_tfms:\n if target in self.minorities:\n if hasattr(self.bal_tfms, 'transforms'):\n for tr in self.bal_tfms.transforms:\n tr.p = self.diffs[target]\n l = [self.bal_tfms]\n l.extend(self.transform)\n self.tfms = transforms.Compose(l)\n else:\n for t in self.bal_tfms:\n t.p = self.diffs[target]\n self.tfms = transforms.Compose(self.bal_tfms + self\n .transform)\n else:\n self.tfms = transforms.Compose(self.transform)\n elif self.tta_tfms:\n self.tfms = self.tta_tfms\n else:\n self.tfms = transforms.Compose(self.transform)\n sample = self.tfms(sample)\n if self.target_transform:\n target = self.target_transform(target)\n return sample, target\n\n\n<mask token>\n\n\ndef get_index(arr, a):\n for i in range(len(arr)):\n if sum(arr[i] == a) == len(a):\n return i\n return False\n\n\ndef rescale_bbox(bb, row_scale, col_scale):\n bb = bb.reshape((-1, 4))\n for b in bb:\n r1, c1, r2, c2 = b\n b[0] = int(np.round(r1 * col_scale))\n b[1] = int(np.round(c1 * row_scale))\n b[2] = int(np.round(r2 * col_scale))\n b[3] = int(np.round(c2 * row_scale))\n bb = bb.reshape((1, -1))\n return bb\n\n\ndef get_img_stats(dataset, sz):\n size = int(len(dataset) * sz)\n i = 0\n imgs = []\n for img, _ in dataset:\n if i > size:\n break\n imgs.append(img)\n i += 1\n imgs_ = torch.stack(imgs, dim=3)\n imgs_ = imgs_.view(3, -1)\n imgs_mean = imgs_.mean(dim=1)\n imgs_std = imgs_.std(dim=1)\n return imgs_mean, imgs_std\n\n\n<mask token>\n\n\ndef save_obj(obj, path):\n with open(path, 'wb') as f:\n pickle.dump(obj, f, pickle.HIGHEST_PROTOCOL)\n\n\n<mask token>\n\n\nclass DataProcessor:\n\n def __init__(self, data_path=None, train_csv=None, val_csv=None, reg=\n False, tr_name='train', val_name='val', test_name='test', extension\n =None, setup_data=True):\n print('+------------------------------------+')\n print('| Dream AI |')\n print('+------------------------------------+')\n print()\n self.device = torch.device('cuda:0' if torch.cuda.is_available() else\n 'cpu')\n (self.data_path, self.train_csv, self.val_csv, self.reg, self.\n tr_name, self.val_name, self.test_name, self.extension) = (\n data_path, train_csv, val_csv, reg, tr_name, val_name,\n test_name, extension)\n self.obj = False\n self.multi_label = False\n if setup_data:\n self.set_up_data()\n\n def set_up_data(self, split_size=0.15):\n data_path, train_csv, val_csv, tr_name, val_name, test_name = (self\n .data_path, self.train_csv, self.val_csv, self.tr_name, self.\n val_name, self.test_name)\n if not data_path:\n data_path = os.getcwd() + '/'\n tr_path = os.path.join(data_path, tr_name)\n val_path = os.path.join(data_path, val_name)\n test_path = os.path.join(data_path, test_name)\n if os.path.exists(os.path.join(data_path, tr_name + '.csv')):\n train_csv = tr_name + '.csv'\n if not train_csv:\n print('no')\n train_csv, val_csv, test_csv = self.data_from_paths_to_csv(\n data_path, tr_path, val_path, test_path)\n train_csv_path = os.path.join(data_path, train_csv)\n train_df = pd.read_csv(train_csv_path)\n if 'Unnamed: 0' in train_df.columns:\n train_df = train_df.drop('Unnamed: 0', 1)\n if len(train_df.columns) > 2:\n self.obj = True\n img_names = [str(x) for x in list(train_df.iloc[:, 0])]\n if self.extension:\n img_names = add_extension(img_names, self.extension)\n if val_csv:\n val_csv_path = os.path.join(data_path, val_csv)\n val_df = pd.read_csv(val_csv_path)\n val_targets = list(map(str, list(val_df.iloc[:, 1])))\n if test_csv:\n test_csv_path = os.path.join(data_path, test_csv)\n test_df = pd.read_csv(test_csv_path)\n test_targets = list(map(str, list(test_df.iloc[:, 1])))\n targets = list(map(str, list(train_df.iloc[:, 1])))\n lengths = [len(t) for t in [s.split() for s in targets]]\n self.target_lengths = lengths\n split_targets = [t.split() for t in targets]\n if self.obj:\n print('\\nObject Detection\\n')\n int_targets = [list(map(float, x)) for x in split_targets]\n zero_targets = np.zeros((len(targets), max(lengths)), dtype=int)\n for i, t in enumerate(zero_targets):\n t[len(t) - len(int_targets[i]):] = int_targets[i]\n zero_targets[i] = t\n train_df.iloc[:, 1] = [torch.from_numpy(z).type(torch.\n FloatTensor) for z in zero_targets]\n obj_targets = list(map(str, list(train_df.iloc[:, 2])))\n obj_split_targets = [t.split() for t in obj_targets]\n try:\n obj_split_targets = [list(map(int, x)) for x in\n obj_split_targets]\n except:\n pass\n dai_onehot, onehot_classes = one_hot(obj_split_targets, True)\n c_names = list(onehot_classes)\n class_idx = [[c_names.index(i) for i in c] for c in\n obj_split_targets]\n zero_idx = np.zeros((len(targets), max(lengths) // 4), dtype=int)\n for i, t in enumerate(zero_idx):\n t[len(t) - len(class_idx[i]):] = class_idx[i]\n zero_idx[i] = t\n train_df.iloc[:, 2] = [torch.from_numpy(z).type(torch.\n LongTensor) for z in zero_idx]\n self.data_dir, self.num_classes, self.class_names = data_path, len(\n onehot_classes), onehot_classes\n elif self.reg:\n print('\\nRegression\\n')\n int_targets = [list(map(int, x)) for x in split_targets]\n zero_targets = np.zeros((len(targets), max(lengths)), dtype=int)\n for i, t in enumerate(zero_targets):\n t[len(t) - len(int_targets[i]):] = int_targets[i]\n zero_targets[i] = t\n train_df.iloc[:, 1] = [torch.from_numpy(z).type(torch.\n FloatTensor) for z in zero_targets]\n self.data_dir, self.num_classes, self.class_names = data_path, max(\n lengths), np.unique(zero_targets, axis=1)\n elif lengths[1:] != lengths[:-1]:\n self.multi_label = True\n print('\\nMulti-label Classification\\n')\n try:\n split_targets = [list(map(int, x)) for x in split_targets]\n except:\n pass\n dai_onehot, onehot_classes = one_hot(split_targets, self.\n multi_label)\n train_df.iloc[:, 1] = [torch.from_numpy(x).type(torch.\n FloatTensor) for x in dai_onehot]\n self.data_dir, self.num_classes, self.class_names = data_path, len(\n onehot_classes), onehot_classes\n else:\n print('\\nSingle-label Classification\\n')\n unique_targets = list(np.unique(targets))\n target_ids = [unique_targets.index(x) for x in targets]\n train_df.iloc[:, 1] = target_ids\n if val_csv:\n target_ids = [unique_targets.index(x) for x in val_targets]\n val_df.iloc[:, 1] = target_ids\n if test_csv:\n target_ids = [unique_targets.index(x) for x in test_targets]\n test_df.iloc[:, 1] = target_ids\n self.data_dir, self.num_classes, self.class_names = data_path, len(\n unique_targets), unique_targets\n if not val_csv:\n train_df, val_df = split_df(train_df, split_size)\n if not test_csv:\n val_df, test_df = split_df(val_df, split_size)\n tr_images = [str(x) for x in list(train_df.iloc[:, 0])]\n val_images = [str(x) for x in list(val_df.iloc[:, 0])]\n test_images = [str(x) for x in list(test_df.iloc[:, 0])]\n if self.extension:\n tr_images = add_extension(tr_images, self.extension)\n val_images = add_extension(val_images, self.extension)\n test_images = add_extension(test_images, self.extension)\n train_df.iloc[:, 0] = tr_images\n val_df.iloc[:, 0] = val_images\n test_df.iloc[:, 0] = test_images\n train_df.to_csv(os.path.join(data_path, 'train.csv'), index=False)\n val_df.to_csv(os.path.join(data_path, 'val.csv'), index=False)\n test_df.to_csv(os.path.join(data_path, 'test.csv'), index=False)\n self.minorities, self.class_diffs = None, None\n if not self.obj or not self.multi_label:\n self.minorities, self.class_diffs = get_minorities(train_df)\n self.data_dfs = {self.tr_name: train_df, self.val_name: val_df,\n self.test_name: test_df}\n data_dict = {'data_dfs': self.data_dfs, 'data_dir': self.data_dir,\n 'num_classes': self.num_classes, 'class_names': self.\n class_names, 'minorities': self.minorities, 'class_diffs': self\n .class_diffs, 'obj': self.obj, 'multi_label': self.multi_label}\n self.data_dict = data_dict\n return data_dict\n\n def data_from_paths_to_csv(self, data_path, tr_path, val_path=None,\n test_path=None):\n train_df = csv_from_path(tr_path, tr_path)\n train_df.to_csv(os.path.join(data_path, self.tr_name + '.csv'),\n index=False)\n ret = self.tr_name + '.csv', None\n if val_path is not None:\n val_exists = os.path.exists(val_path)\n if val_exists:\n val_df = csv_from_path(val_path, tr_path)\n val_df.to_csv(os.path.join(data_path, self.val_name +\n '.csv'), index=False)\n ret = self.tr_name + '.csv', self.val_name + '.csv'\n if test_path is not None:\n test_exists = os.path.exists(test_path)\n if test_exists:\n test_df = csv_from_path(test_path, tr_path)\n test_df.to_csv(os.path.join(data_path, self.test_name +\n '.csv'), index=False)\n ret = (self.tr_name + '.csv', self.val_name + '.csv', self.\n test_name + '.csv')\n return ret\n\n def get_data(self, data_dict=None, s=(224, 224), dataset=\n my_image_csv_dataset, bs=32, balance=False, tfms=None, bal_tfms=\n None, tta=False, num_workers=4, stats_percentage=0.6):\n self.image_size = s\n if not data_dict:\n data_dict = self.data_dict\n data_dfs, data_dir, minorities, class_diffs, obj, multi_label = (\n data_dict['data_dfs'], data_dict['data_dir'], data_dict[\n 'minorities'], data_dict['class_diffs'], data_dict['obj'],\n data_dict['multi_label'])\n if obj or multi_label:\n balance = False\n if tta:\n tta_tfms = {self.tr_name: transforms.Compose([transforms.\n FiveCrop(s[0]), transforms.Lambda(lambda crops: torch.stack\n ([transforms.ToTensor()(crop) for crop in crops])),\n transforms.Lambda(lambda crops: torch.stack([transforms.\n Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])(\n crop) for crop in crops]))]), self.val_name: transforms.\n Compose([transforms.FiveCrop(s[0]), transforms.Lambda(lambda\n crops: torch.stack([transforms.ToTensor()(crop) for crop in\n crops])), transforms.Lambda(lambda crops: torch.stack([\n transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, \n 0.225])(crop) for crop in crops]))]), self.test_name:\n transforms.Compose([transforms.FiveCrop(s[0]), transforms.\n Lambda(lambda crops: torch.stack([transforms.ToTensor()(\n crop) for crop in crops])), transforms.Lambda(lambda crops:\n torch.stack([transforms.Normalize([0.485, 0.456, 0.406], [\n 0.229, 0.224, 0.225])(crop) for crop in crops]))])}\n else:\n tta_tfms = None\n if not bal_tfms:\n bal_tfms = {self.tr_name: [transforms.RandomHorizontalFlip()],\n self.val_name: None, self.test_name: None}\n else:\n bal_tfms = {self.tr_name: bal_tfms, self.val_name: None, self.\n test_name: None}\n if obj:\n resize_transform = transforms.Resize(s)\n else:\n resize_transform = transforms.Resize(s)\n if not tfms:\n tfms = [resize_transform, transforms.ToTensor(), transforms.\n Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])]\n else:\n tfms_temp = [resize_transform, transforms.ToTensor(),\n transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, \n 0.225])]\n tfms_temp[1:1] = tfms\n tfms = tfms_temp\n print(tfms)\n data_transforms = {self.tr_name: tfms, self.val_name: [transforms.\n Resize(s), transforms.ToTensor(), transforms.Normalize([0.485, \n 0.456, 0.406], [0.229, 0.224, 0.225])], self.test_name: [\n transforms.Resize(s), transforms.ToTensor(), transforms.\n Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])]}\n temp_tfms = [resize_transform, transforms.ToTensor()]\n temp_dataset = dataset(os.path.join(data_dir, self.tr_name),\n data_dfs[self.tr_name], temp_tfms)\n self.img_mean, self.img_std = get_img_stats(temp_dataset,\n stats_percentage)\n data_transforms[self.tr_name][-1].mean, data_transforms[self.tr_name][\n -1].std = self.img_mean, self.img_std\n data_transforms[self.val_name][-1].mean, data_transforms[self.val_name\n ][-1].std = self.img_mean, self.img_std\n data_transforms[self.test_name][-1].mean, data_transforms[self.\n test_name][-1].std = self.img_mean, self.img_std\n if balance:\n image_datasets = {x: dataset(os.path.join(data_dir, self.\n tr_name), data_dfs[x], data_transforms[x], obj, minorities,\n class_diffs, bal_tfms[x]) for x in [self.tr_name, self.\n val_name, self.test_name]}\n else:\n image_datasets = {x: dataset(os.path.join(data_dir, self.\n tr_name), data_dfs[x], data_transforms[x], obj) for x in [\n self.tr_name, self.val_name, self.test_name]}\n dataloaders = {x: torch.utils.data.DataLoader(image_datasets[x],\n batch_size=bs, shuffle=True, num_workers=num_workers) for x in\n [self.tr_name, self.val_name, self.test_name]}\n dataset_sizes = {x: len(image_datasets[x]) for x in [self.tr_name,\n self.val_name, self.test_name]}\n self.image_datasets, self.dataloaders, self.dataset_sizes = (\n image_datasets, dataloaders, dataset_sizes)\n return image_datasets, dataloaders, dataset_sizes\n\n def imshow(self, inp, title=None):\n \"\"\"Imshow for Tensor.\"\"\"\n inp = self.denorm_img(inp)\n plt.imshow(inp)\n if title:\n plt.title(title)\n plt.pause(0.001)\n\n def denorm_img(self, inp, calculate=False):\n inp = inp.numpy().transpose((1, 2, 0))\n if calculate:\n mean = np.mean(inp)\n std = np.std(inp)\n else:\n mean = self.img_mean.numpy()\n std = self.img_std.numpy()\n inp = std * inp + mean\n inp = np.clip(inp, 0, 1)\n return inp\n\n def show_data(self, folder_name='train', size=(64, 64), bs=5):\n self.get_data(size, bs)\n batch = next(iter(self.dataloaders[folder_name]))\n inputs, classes = batch[0], batch[1]\n out = torchvision.utils.make_grid(inputs)\n if self.reg:\n print(classes)\n self.imshow(out, title=[x for x in classes])\n elif self.multi_label:\n self.imshow(out, title=[self.class_names[np.nonzero(x.type(\n torch.LongTensor))] for x in classes])\n else:\n self.imshow(out, title=[self.class_names[x] for x in classes])\n",
"step-4": "<mask token>\n\n\nclass my_image_csv_dataset(Dataset):\n\n def __init__(self, data_dir, data, transforms_=None, obj=False,\n minorities=None, diffs=None, bal_tfms=None):\n self.data_dir = data_dir\n self.data = data\n self.transforms_ = transforms_\n self.tfms = None\n self.obj = obj\n self.minorities = minorities\n self.diffs = diffs\n self.bal_tfms = bal_tfms\n assert transforms_ is not None, print('Please pass some transforms.')\n\n def __len__(self):\n return len(self.data)\n\n def __getitem__(self, index):\n img_path = os.path.join(self.data_dir, self.data.iloc[index, 0])\n img = Image.open(img_path)\n img = img.convert('RGB')\n img = torchvision.transforms.functional.to_grayscale(img,\n num_output_channels=3)\n y = self.data.iloc[index, 1]\n if self.minorities and self.bal_tfms:\n if y in self.minorities:\n if hasattr(self.bal_tfms, 'transforms'):\n for tr in self.bal_tfms.transforms:\n tr.p = self.diffs[y]\n l = [self.bal_tfms]\n l.extend(self.transforms_)\n self.tfms = transforms.Compose(l)\n else:\n for t in self.bal_tfms:\n t.p = self.diffs[y]\n self.transforms_[1:1] = self.bal_tfms\n self.tfms = transforms.Compose(self.transforms_)\n else:\n self.tfms = transforms.Compose(self.transforms_)\n else:\n self.tfms = transforms.Compose(self.transforms_)\n x = self.tfms(img)\n if self.obj:\n s = x.size()[1]\n if isinstance(s, tuple):\n s = s[0]\n row_scale = s / img.size[0]\n col_scale = s / img.size[1]\n y = rescale_bbox(y, row_scale, col_scale)\n y.squeeze_()\n y2 = self.data.iloc[index, 2]\n y = y, y2\n return x, y\n\n\nclass my_image_folder(DatasetFolder):\n\n def __init__(self, root, transform=None, target_transform=None, loader=\n default_loader, minorities=None, diffs=None, bal_tfms=None,\n tta_tfms=None):\n super(my_image_folder, self).__init__(root, loader, IMG_EXTENSIONS,\n transform=transform, target_transform=target_transform)\n self.imgs = self.samples\n self.minorities = minorities\n self.diffs = diffs\n self.bal_tfms = bal_tfms\n self.tta_tfms = tta_tfms\n self.tfms = None\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n sample = self.loader(path)\n if self.transform:\n if self.minorities and self.bal_tfms:\n if target in self.minorities:\n if hasattr(self.bal_tfms, 'transforms'):\n for tr in self.bal_tfms.transforms:\n tr.p = self.diffs[target]\n l = [self.bal_tfms]\n l.extend(self.transform)\n self.tfms = transforms.Compose(l)\n else:\n for t in self.bal_tfms:\n t.p = self.diffs[target]\n self.tfms = transforms.Compose(self.bal_tfms + self\n .transform)\n else:\n self.tfms = transforms.Compose(self.transform)\n elif self.tta_tfms:\n self.tfms = self.tta_tfms\n else:\n self.tfms = transforms.Compose(self.transform)\n sample = self.tfms(sample)\n if self.target_transform:\n target = self.target_transform(target)\n return sample, target\n\n\n<mask token>\n\n\ndef listdir_fullpath(d):\n return [os.path.join(d, f) for f in os.listdir(d)]\n\n\n<mask token>\n\n\ndef csv_from_path(path, img_dest):\n path = Path(path)\n img_dest = Path(img_dest)\n labels_paths = list(path.iterdir())\n tr_images = []\n tr_labels = []\n for l in labels_paths:\n if l.is_dir():\n for i in list(l.iterdir()):\n if i.suffix in IMG_EXTENSIONS:\n name = i.name\n label = l.name\n new_name = '{}_{}'.format(path.name, name)\n new_path = img_dest / new_name\n os.rename(i, new_path)\n tr_images.append(new_name)\n tr_labels.append(label)\n tr_img_label = {'Img': tr_images, 'Label': tr_labels}\n csv = pd.DataFrame(tr_img_label, columns=['Img', 'Label'])\n csv = csv.sample(frac=1).reset_index(drop=True)\n return csv\n\n\ndef add_extension(a, e):\n a = [(x + e) for x in a]\n return a\n\n\ndef one_hot(targets, multi=False):\n if multi:\n binerizer = MultiLabelBinarizer()\n dai_1hot = binerizer.fit_transform(targets)\n else:\n binerizer = LabelBinarizer()\n dai_1hot = binerizer.fit_transform(targets)\n return dai_1hot, binerizer.classes_\n\n\ndef get_index(arr, a):\n for i in range(len(arr)):\n if sum(arr[i] == a) == len(a):\n return i\n return False\n\n\ndef rescale_bbox(bb, row_scale, col_scale):\n bb = bb.reshape((-1, 4))\n for b in bb:\n r1, c1, r2, c2 = b\n b[0] = int(np.round(r1 * col_scale))\n b[1] = int(np.round(c1 * row_scale))\n b[2] = int(np.round(r2 * col_scale))\n b[3] = int(np.round(c2 * row_scale))\n bb = bb.reshape((1, -1))\n return bb\n\n\ndef get_img_stats(dataset, sz):\n size = int(len(dataset) * sz)\n i = 0\n imgs = []\n for img, _ in dataset:\n if i > size:\n break\n imgs.append(img)\n i += 1\n imgs_ = torch.stack(imgs, dim=3)\n imgs_ = imgs_.view(3, -1)\n imgs_mean = imgs_.mean(dim=1)\n imgs_std = imgs_.std(dim=1)\n return imgs_mean, imgs_std\n\n\ndef split_df(train_df, test_size=0.15):\n try:\n train_df, val_df = train_test_split(train_df, test_size=test_size,\n random_state=2, stratify=train_df.iloc[:, 1])\n except:\n train_df, val_df = train_test_split(train_df, test_size=test_size,\n random_state=2)\n train_df = train_df.reset_index(drop=True)\n val_df = val_df.reset_index(drop=True)\n return train_df, val_df\n\n\ndef save_obj(obj, path):\n with open(path, 'wb') as f:\n pickle.dump(obj, f, pickle.HIGHEST_PROTOCOL)\n\n\ndef load_obj(path):\n with open(path, 'rb') as f:\n return pickle.load(f)\n\n\nclass DataProcessor:\n\n def __init__(self, data_path=None, train_csv=None, val_csv=None, reg=\n False, tr_name='train', val_name='val', test_name='test', extension\n =None, setup_data=True):\n print('+------------------------------------+')\n print('| Dream AI |')\n print('+------------------------------------+')\n print()\n self.device = torch.device('cuda:0' if torch.cuda.is_available() else\n 'cpu')\n (self.data_path, self.train_csv, self.val_csv, self.reg, self.\n tr_name, self.val_name, self.test_name, self.extension) = (\n data_path, train_csv, val_csv, reg, tr_name, val_name,\n test_name, extension)\n self.obj = False\n self.multi_label = False\n if setup_data:\n self.set_up_data()\n\n def set_up_data(self, split_size=0.15):\n data_path, train_csv, val_csv, tr_name, val_name, test_name = (self\n .data_path, self.train_csv, self.val_csv, self.tr_name, self.\n val_name, self.test_name)\n if not data_path:\n data_path = os.getcwd() + '/'\n tr_path = os.path.join(data_path, tr_name)\n val_path = os.path.join(data_path, val_name)\n test_path = os.path.join(data_path, test_name)\n if os.path.exists(os.path.join(data_path, tr_name + '.csv')):\n train_csv = tr_name + '.csv'\n if not train_csv:\n print('no')\n train_csv, val_csv, test_csv = self.data_from_paths_to_csv(\n data_path, tr_path, val_path, test_path)\n train_csv_path = os.path.join(data_path, train_csv)\n train_df = pd.read_csv(train_csv_path)\n if 'Unnamed: 0' in train_df.columns:\n train_df = train_df.drop('Unnamed: 0', 1)\n if len(train_df.columns) > 2:\n self.obj = True\n img_names = [str(x) for x in list(train_df.iloc[:, 0])]\n if self.extension:\n img_names = add_extension(img_names, self.extension)\n if val_csv:\n val_csv_path = os.path.join(data_path, val_csv)\n val_df = pd.read_csv(val_csv_path)\n val_targets = list(map(str, list(val_df.iloc[:, 1])))\n if test_csv:\n test_csv_path = os.path.join(data_path, test_csv)\n test_df = pd.read_csv(test_csv_path)\n test_targets = list(map(str, list(test_df.iloc[:, 1])))\n targets = list(map(str, list(train_df.iloc[:, 1])))\n lengths = [len(t) for t in [s.split() for s in targets]]\n self.target_lengths = lengths\n split_targets = [t.split() for t in targets]\n if self.obj:\n print('\\nObject Detection\\n')\n int_targets = [list(map(float, x)) for x in split_targets]\n zero_targets = np.zeros((len(targets), max(lengths)), dtype=int)\n for i, t in enumerate(zero_targets):\n t[len(t) - len(int_targets[i]):] = int_targets[i]\n zero_targets[i] = t\n train_df.iloc[:, 1] = [torch.from_numpy(z).type(torch.\n FloatTensor) for z in zero_targets]\n obj_targets = list(map(str, list(train_df.iloc[:, 2])))\n obj_split_targets = [t.split() for t in obj_targets]\n try:\n obj_split_targets = [list(map(int, x)) for x in\n obj_split_targets]\n except:\n pass\n dai_onehot, onehot_classes = one_hot(obj_split_targets, True)\n c_names = list(onehot_classes)\n class_idx = [[c_names.index(i) for i in c] for c in\n obj_split_targets]\n zero_idx = np.zeros((len(targets), max(lengths) // 4), dtype=int)\n for i, t in enumerate(zero_idx):\n t[len(t) - len(class_idx[i]):] = class_idx[i]\n zero_idx[i] = t\n train_df.iloc[:, 2] = [torch.from_numpy(z).type(torch.\n LongTensor) for z in zero_idx]\n self.data_dir, self.num_classes, self.class_names = data_path, len(\n onehot_classes), onehot_classes\n elif self.reg:\n print('\\nRegression\\n')\n int_targets = [list(map(int, x)) for x in split_targets]\n zero_targets = np.zeros((len(targets), max(lengths)), dtype=int)\n for i, t in enumerate(zero_targets):\n t[len(t) - len(int_targets[i]):] = int_targets[i]\n zero_targets[i] = t\n train_df.iloc[:, 1] = [torch.from_numpy(z).type(torch.\n FloatTensor) for z in zero_targets]\n self.data_dir, self.num_classes, self.class_names = data_path, max(\n lengths), np.unique(zero_targets, axis=1)\n elif lengths[1:] != lengths[:-1]:\n self.multi_label = True\n print('\\nMulti-label Classification\\n')\n try:\n split_targets = [list(map(int, x)) for x in split_targets]\n except:\n pass\n dai_onehot, onehot_classes = one_hot(split_targets, self.\n multi_label)\n train_df.iloc[:, 1] = [torch.from_numpy(x).type(torch.\n FloatTensor) for x in dai_onehot]\n self.data_dir, self.num_classes, self.class_names = data_path, len(\n onehot_classes), onehot_classes\n else:\n print('\\nSingle-label Classification\\n')\n unique_targets = list(np.unique(targets))\n target_ids = [unique_targets.index(x) for x in targets]\n train_df.iloc[:, 1] = target_ids\n if val_csv:\n target_ids = [unique_targets.index(x) for x in val_targets]\n val_df.iloc[:, 1] = target_ids\n if test_csv:\n target_ids = [unique_targets.index(x) for x in test_targets]\n test_df.iloc[:, 1] = target_ids\n self.data_dir, self.num_classes, self.class_names = data_path, len(\n unique_targets), unique_targets\n if not val_csv:\n train_df, val_df = split_df(train_df, split_size)\n if not test_csv:\n val_df, test_df = split_df(val_df, split_size)\n tr_images = [str(x) for x in list(train_df.iloc[:, 0])]\n val_images = [str(x) for x in list(val_df.iloc[:, 0])]\n test_images = [str(x) for x in list(test_df.iloc[:, 0])]\n if self.extension:\n tr_images = add_extension(tr_images, self.extension)\n val_images = add_extension(val_images, self.extension)\n test_images = add_extension(test_images, self.extension)\n train_df.iloc[:, 0] = tr_images\n val_df.iloc[:, 0] = val_images\n test_df.iloc[:, 0] = test_images\n train_df.to_csv(os.path.join(data_path, 'train.csv'), index=False)\n val_df.to_csv(os.path.join(data_path, 'val.csv'), index=False)\n test_df.to_csv(os.path.join(data_path, 'test.csv'), index=False)\n self.minorities, self.class_diffs = None, None\n if not self.obj or not self.multi_label:\n self.minorities, self.class_diffs = get_minorities(train_df)\n self.data_dfs = {self.tr_name: train_df, self.val_name: val_df,\n self.test_name: test_df}\n data_dict = {'data_dfs': self.data_dfs, 'data_dir': self.data_dir,\n 'num_classes': self.num_classes, 'class_names': self.\n class_names, 'minorities': self.minorities, 'class_diffs': self\n .class_diffs, 'obj': self.obj, 'multi_label': self.multi_label}\n self.data_dict = data_dict\n return data_dict\n\n def data_from_paths_to_csv(self, data_path, tr_path, val_path=None,\n test_path=None):\n train_df = csv_from_path(tr_path, tr_path)\n train_df.to_csv(os.path.join(data_path, self.tr_name + '.csv'),\n index=False)\n ret = self.tr_name + '.csv', None\n if val_path is not None:\n val_exists = os.path.exists(val_path)\n if val_exists:\n val_df = csv_from_path(val_path, tr_path)\n val_df.to_csv(os.path.join(data_path, self.val_name +\n '.csv'), index=False)\n ret = self.tr_name + '.csv', self.val_name + '.csv'\n if test_path is not None:\n test_exists = os.path.exists(test_path)\n if test_exists:\n test_df = csv_from_path(test_path, tr_path)\n test_df.to_csv(os.path.join(data_path, self.test_name +\n '.csv'), index=False)\n ret = (self.tr_name + '.csv', self.val_name + '.csv', self.\n test_name + '.csv')\n return ret\n\n def get_data(self, data_dict=None, s=(224, 224), dataset=\n my_image_csv_dataset, bs=32, balance=False, tfms=None, bal_tfms=\n None, tta=False, num_workers=4, stats_percentage=0.6):\n self.image_size = s\n if not data_dict:\n data_dict = self.data_dict\n data_dfs, data_dir, minorities, class_diffs, obj, multi_label = (\n data_dict['data_dfs'], data_dict['data_dir'], data_dict[\n 'minorities'], data_dict['class_diffs'], data_dict['obj'],\n data_dict['multi_label'])\n if obj or multi_label:\n balance = False\n if tta:\n tta_tfms = {self.tr_name: transforms.Compose([transforms.\n FiveCrop(s[0]), transforms.Lambda(lambda crops: torch.stack\n ([transforms.ToTensor()(crop) for crop in crops])),\n transforms.Lambda(lambda crops: torch.stack([transforms.\n Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])(\n crop) for crop in crops]))]), self.val_name: transforms.\n Compose([transforms.FiveCrop(s[0]), transforms.Lambda(lambda\n crops: torch.stack([transforms.ToTensor()(crop) for crop in\n crops])), transforms.Lambda(lambda crops: torch.stack([\n transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, \n 0.225])(crop) for crop in crops]))]), self.test_name:\n transforms.Compose([transforms.FiveCrop(s[0]), transforms.\n Lambda(lambda crops: torch.stack([transforms.ToTensor()(\n crop) for crop in crops])), transforms.Lambda(lambda crops:\n torch.stack([transforms.Normalize([0.485, 0.456, 0.406], [\n 0.229, 0.224, 0.225])(crop) for crop in crops]))])}\n else:\n tta_tfms = None\n if not bal_tfms:\n bal_tfms = {self.tr_name: [transforms.RandomHorizontalFlip()],\n self.val_name: None, self.test_name: None}\n else:\n bal_tfms = {self.tr_name: bal_tfms, self.val_name: None, self.\n test_name: None}\n if obj:\n resize_transform = transforms.Resize(s)\n else:\n resize_transform = transforms.Resize(s)\n if not tfms:\n tfms = [resize_transform, transforms.ToTensor(), transforms.\n Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])]\n else:\n tfms_temp = [resize_transform, transforms.ToTensor(),\n transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, \n 0.225])]\n tfms_temp[1:1] = tfms\n tfms = tfms_temp\n print(tfms)\n data_transforms = {self.tr_name: tfms, self.val_name: [transforms.\n Resize(s), transforms.ToTensor(), transforms.Normalize([0.485, \n 0.456, 0.406], [0.229, 0.224, 0.225])], self.test_name: [\n transforms.Resize(s), transforms.ToTensor(), transforms.\n Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])]}\n temp_tfms = [resize_transform, transforms.ToTensor()]\n temp_dataset = dataset(os.path.join(data_dir, self.tr_name),\n data_dfs[self.tr_name], temp_tfms)\n self.img_mean, self.img_std = get_img_stats(temp_dataset,\n stats_percentage)\n data_transforms[self.tr_name][-1].mean, data_transforms[self.tr_name][\n -1].std = self.img_mean, self.img_std\n data_transforms[self.val_name][-1].mean, data_transforms[self.val_name\n ][-1].std = self.img_mean, self.img_std\n data_transforms[self.test_name][-1].mean, data_transforms[self.\n test_name][-1].std = self.img_mean, self.img_std\n if balance:\n image_datasets = {x: dataset(os.path.join(data_dir, self.\n tr_name), data_dfs[x], data_transforms[x], obj, minorities,\n class_diffs, bal_tfms[x]) for x in [self.tr_name, self.\n val_name, self.test_name]}\n else:\n image_datasets = {x: dataset(os.path.join(data_dir, self.\n tr_name), data_dfs[x], data_transforms[x], obj) for x in [\n self.tr_name, self.val_name, self.test_name]}\n dataloaders = {x: torch.utils.data.DataLoader(image_datasets[x],\n batch_size=bs, shuffle=True, num_workers=num_workers) for x in\n [self.tr_name, self.val_name, self.test_name]}\n dataset_sizes = {x: len(image_datasets[x]) for x in [self.tr_name,\n self.val_name, self.test_name]}\n self.image_datasets, self.dataloaders, self.dataset_sizes = (\n image_datasets, dataloaders, dataset_sizes)\n return image_datasets, dataloaders, dataset_sizes\n\n def imshow(self, inp, title=None):\n \"\"\"Imshow for Tensor.\"\"\"\n inp = self.denorm_img(inp)\n plt.imshow(inp)\n if title:\n plt.title(title)\n plt.pause(0.001)\n\n def denorm_img(self, inp, calculate=False):\n inp = inp.numpy().transpose((1, 2, 0))\n if calculate:\n mean = np.mean(inp)\n std = np.std(inp)\n else:\n mean = self.img_mean.numpy()\n std = self.img_std.numpy()\n inp = std * inp + mean\n inp = np.clip(inp, 0, 1)\n return inp\n\n def show_data(self, folder_name='train', size=(64, 64), bs=5):\n self.get_data(size, bs)\n batch = next(iter(self.dataloaders[folder_name]))\n inputs, classes = batch[0], batch[1]\n out = torchvision.utils.make_grid(inputs)\n if self.reg:\n print(classes)\n self.imshow(out, title=[x for x in classes])\n elif self.multi_label:\n self.imshow(out, title=[self.class_names[np.nonzero(x.type(\n torch.LongTensor))] for x in classes])\n else:\n self.imshow(out, title=[self.class_names[x] for x in classes])\n",
"step-5": "from dai_imports import*\nfrom obj_utils import*\nimport utils\n\nclass my_image_csv_dataset(Dataset):\n \n def __init__(self, data_dir, data, transforms_ = None, obj = False,\n minorities = None, diffs = None, bal_tfms = None):\n \n self.data_dir = data_dir\n self.data = data\n self.transforms_ = transforms_\n self.tfms = None\n self.obj = obj\n self.minorities = minorities\n self.diffs = diffs\n self.bal_tfms = bal_tfms\n assert transforms_ is not None, print('Please pass some transforms.')\n \n def __len__(self):\n return len(self.data)\n \n def __getitem__(self, index):\n img_path = os.path.join(self.data_dir,self.data.iloc[index, 0])\n img = Image.open(img_path)\n img = img.convert('RGB')\n\n img = torchvision.transforms.functional.to_grayscale(img,num_output_channels=3)\n\n y = self.data.iloc[index, 1] \n if self.minorities and self.bal_tfms:\n if y in self.minorities:\n if hasattr(self.bal_tfms,'transforms'):\n for tr in self.bal_tfms.transforms:\n tr.p = self.diffs[y]\n l = [self.bal_tfms]\n l.extend(self.transforms_)\n self.tfms = transforms.Compose(l) \n else: \n for t in self.bal_tfms:\n t.p = self.diffs[y]\n self.transforms_[1:1] = self.bal_tfms \n self.tfms = transforms.Compose(self.transforms_)\n # print(self.tfms)\n else:\n self.tfms = transforms.Compose(self.transforms_)\n else: \n self.tfms = transforms.Compose(self.transforms_) \n x = self.tfms(img)\n if self.obj:\n s = x.size()[1]\n if isinstance(s,tuple):\n s = s[0]\n row_scale = s/img.size[0]\n col_scale = s/img.size[1]\n y = rescale_bbox(y,row_scale,col_scale)\n y.squeeze_()\n y2 = self.data.iloc[index, 2]\n y = (y,y2)\n return (x,y)\n\n\nclass my_image_folder(DatasetFolder):\n \n def __init__(self, root, transform=None, target_transform=None,\n loader=default_loader, minorities=None, diffs = None, bal_tfms=None, tta_tfms = None):\n \n super(my_image_folder, self).__init__(root, loader, IMG_EXTENSIONS,\n transform=transform,\n target_transform=target_transform)\n self.imgs = self.samples\n self.minorities = minorities\n self.diffs = diffs\n self.bal_tfms = bal_tfms\n self.tta_tfms = tta_tfms\n self.tfms = None\n\n def __getitem__(self,index):\n \n path, target = self.samples[index] \n sample = self.loader(path)\n if self.transform:\n if self.minorities and self.bal_tfms:\n if target in self.minorities:\n if hasattr(self.bal_tfms,'transforms'):\n for tr in self.bal_tfms.transforms:\n tr.p = self.diffs[target]\n l = [self.bal_tfms]\n l.extend(self.transform)\n self.tfms = transforms.Compose(l) \n else: \n for t in self.bal_tfms:\n t.p = self.diffs[target]\n self.tfms = transforms.Compose(self.bal_tfms + self.transform )\n else:\n self.tfms = transforms.Compose(self.transform)\n elif self.tta_tfms:\n self.tfms = self.tta_tfms\n else: \n self.tfms = transforms.Compose(self.transform)\n sample = self.tfms(sample)\n if self.target_transform:\n target = self.target_transform(target)\n return sample, target\n\ndef extract_data(dt):\n\n x = []\n y = []\n for a,b in dt:\n x.append(a)\n y.append(b)\n return x,y\n\ndef listdir_fullpath(d):\n return [os.path.join(d, f) for f in os.listdir(d)] \n\ndef get_minorities(df,thresh=0.8):\n\n c = df.iloc[:,1].value_counts()\n lc = list(c)\n max_count = lc[0]\n diffs = [1-(x/max_count) for x in lc]\n diffs = dict((k,v) for k,v in zip(c.keys(),diffs))\n minorities = [c.keys()[x] for x,y in enumerate(lc) if y < (thresh*max_count)]\n return minorities,diffs\n\ndef csv_from_path(path, img_dest):\n\n path = Path(path)\n img_dest = Path(img_dest)\n labels_paths = list(path.iterdir())\n tr_images = []\n tr_labels = []\n for l in labels_paths:\n if l.is_dir():\n for i in list(l.iterdir()):\n if i.suffix in IMG_EXTENSIONS:\n name = i.name\n label = l.name\n new_name = '{}_{}'.format(path.name,name)\n new_path = img_dest/new_name\n# print(new_path)\n os.rename(i,new_path)\n tr_images.append(new_name)\n tr_labels.append(label) \n # os.rmdir(l)\n tr_img_label = {'Img':tr_images, 'Label': tr_labels}\n csv = pd.DataFrame(tr_img_label,columns=['Img','Label'])\n csv = csv.sample(frac=1).reset_index(drop=True)\n return csv\n\ndef add_extension(a,e):\n a = [x+e for x in a]\n return a\n\ndef one_hot(targets, multi = False):\n if multi:\n binerizer = MultiLabelBinarizer()\n dai_1hot = binerizer.fit_transform(targets)\n else:\n binerizer = LabelBinarizer()\n dai_1hot = binerizer.fit_transform(targets)\n return dai_1hot,binerizer.classes_\n\ndef get_index(arr,a):\n for i in range(len(arr)):\n if sum(arr[i] == a) == len(a):\n return i\n return False\n\ndef rescale_bbox(bb,row_scale,col_scale):\n bb = bb.reshape((-1,4))\n for b in bb:\n r1,c1,r2,c2 = b\n b[0] = int(np.round(r1*col_scale))\n b[1] = int(np.round(c1*row_scale))\n b[2] = int(np.round(r2*col_scale))\n b[3] = int(np.round(c2*row_scale))\n\n # bb = torch.tensor([bb_hw(b) for b in bb.reshape(-1,4)])\n # for b in bb:\n # r1,c1,r2,c2 = b\n # b[0] = int(np.round(r1*row_scale))\n # b[1] = int(np.round(c1*col_scale))\n # b[2] = int(np.round(r2*row_scale))\n # b[3] = int(np.round(c2*col_scale))\n # if(sum(b)) == 1:\n # b[0],b[1],b[2],b[3] = 0,0,0,0\n\n bb = bb.reshape((1,-1)) \n return bb\n\ndef get_img_stats(dataset,sz):\n\n size = int(len(dataset)*sz)\n i = 0\n imgs = []\n for img,_ in dataset:\n # print(img.size())\n if i > size:\n break\n imgs.append(img)\n i+=1\n imgs_ = torch.stack(imgs,dim=3)\n imgs_ = imgs_.view(3,-1)\n imgs_mean = imgs_.mean(dim=1)\n imgs_std = imgs_.std(dim=1)\n return imgs_mean,imgs_std\n\ndef split_df(train_df,test_size = 0.15):\n try: \n train_df,val_df = train_test_split(train_df,test_size = test_size,random_state = 2,stratify = train_df.iloc[:,1])\n except:\n train_df,val_df = train_test_split(train_df,test_size = test_size,random_state = 2)\n train_df = train_df.reset_index(drop = True)\n val_df = val_df.reset_index(drop = True)\n return train_df,val_df \n\ndef save_obj(obj, path):\n with open(path, 'wb') as f:\n pickle.dump(obj, f, pickle.HIGHEST_PROTOCOL)\n\ndef load_obj(path):\n with open(path, 'rb') as f:\n return pickle.load(f)\n\nclass DataProcessor:\n \n def __init__(self, data_path = None, train_csv = None, val_csv = None, reg = False,\n tr_name = 'train', val_name = 'val', test_name = 'test', extension = None, setup_data = True):\n \n print('+------------------------------------+')\n print('| Dream AI |')\n print('+------------------------------------+')\n print()\n \n self.device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\n \n self.data_path,self.train_csv,self.val_csv,self.reg,self.tr_name,self.val_name,self.test_name,self.extension = (data_path,train_csv,\n val_csv,reg,tr_name,val_name,test_name,extension)\n \n self.obj = False\n self.multi_label = False\n \n if setup_data:\n self.set_up_data()\n \n def set_up_data(self,split_size = 0.15):\n\n data_path,train_csv,val_csv,tr_name,val_name,test_name = (self.data_path,self.train_csv,self.val_csv,self.tr_name,self.val_name,self.test_name)\n\n # check if paths given and also set paths\n \n if not data_path:\n data_path = os.getcwd() + '/'\n tr_path = os.path.join(data_path,tr_name)\n val_path = os.path.join(data_path,val_name)\n test_path = os.path.join(data_path,test_name)\n\n if os.path.exists(os.path.join(data_path,tr_name+'.csv')):\n train_csv = tr_name+'.csv'\n # if os.path.exists(os.path.join(data_path,val_name+'.csv')):\n # val_csv = val_name+'.csv'\n # if os.path.exists(os.path.join(data_path,test_name+'.csv')):\n # test_csv = test_name+'.csv' \n\n # paths to csv\n\n if not train_csv:\n print('no')\n train_csv,val_csv,test_csv = self.data_from_paths_to_csv(data_path,tr_path,val_path,test_path)\n\n train_csv_path = os.path.join(data_path,train_csv)\n train_df = pd.read_csv(train_csv_path)\n if 'Unnamed: 0' in train_df.columns:\n train_df = train_df.drop('Unnamed: 0', 1)\n if len(train_df.columns) > 2:\n self.obj = True \n img_names = [str(x) for x in list(train_df.iloc[:,0])]\n if self.extension:\n img_names = add_extension(img_names,self.extension)\n if val_csv:\n val_csv_path = os.path.join(data_path,val_csv)\n val_df = pd.read_csv(val_csv_path)\n val_targets = list(map(str,list(val_df.iloc[:,1])))\n if test_csv:\n test_csv_path = os.path.join(data_path,test_csv)\n test_df = pd.read_csv(test_csv_path)\n test_targets = list(map(str,list(test_df.iloc[:,1]))) \n targets = list(map(str,list(train_df.iloc[:,1])))\n lengths = [len(t) for t in [s.split() for s in targets]]\n self.target_lengths = lengths\n split_targets = [t.split() for t in targets]\n if self.obj:\n print('\\nObject Detection\\n')\n\n # bounding boxes\n\n int_targets = [list(map(float,x)) for x in split_targets]\n zero_targets = np.zeros((len(targets),max(lengths)),dtype=int)\n for i,t in enumerate(zero_targets):\n t[len(t)-len(int_targets[i]):] = int_targets[i]\n zero_targets[i] = t\n train_df.iloc[:,1] = [torch.from_numpy(z).type(torch.FloatTensor) for z in zero_targets]\n\n # one-hot classes\n\n obj_targets = list(map(str,list(train_df.iloc[:,2])))\n obj_split_targets = [t.split() for t in obj_targets]\n try:\n obj_split_targets = [list(map(int,x)) for x in obj_split_targets]\n except:\n pass\n dai_onehot,onehot_classes = one_hot(obj_split_targets,True)\n # train_df['one_hot'] = [torch.from_numpy(x).type(torch.FloatTensor) for x in dai_onehot]\n\n # class indexes\n\n c_names = list(onehot_classes)\n class_idx = [[c_names.index(i) for i in c] for c in obj_split_targets]\n zero_idx = np.zeros((len(targets),max(lengths)//4),dtype=int)\n # print(zero_idx.shape)\n for i,t in enumerate(zero_idx):\n # temp_l = len(class_idx[i])\n # if temp_l > 90:\n # print(i,temp_l)\n t[len(t)-len(class_idx[i]):] = class_idx[i]\n zero_idx[i] = t\n train_df.iloc[:,2] = [torch.from_numpy(z).type(torch.LongTensor) for z in zero_idx]\n self.data_dir,self.num_classes,self.class_names = data_path,len(onehot_classes),onehot_classes\n # self.set_up_object_detection([4,2,1],[0.7, 1., 1.3],[(1.,1.), (1.,0.5), (0.5,1.)])\n\n elif self.reg:\n print('\\nRegression\\n')\n int_targets = [list(map(int,x)) for x in split_targets]\n zero_targets = np.zeros((len(targets),max(lengths)),dtype=int)\n for i,t in enumerate(zero_targets):\n t[len(t)-len(int_targets[i]):] = int_targets[i]\n zero_targets[i] = t\n train_df.iloc[:,1] = [torch.from_numpy(z).type(torch.FloatTensor) for z in zero_targets]\n self.data_dir,self.num_classes,self.class_names = data_path, max(lengths),np.unique(zero_targets,axis=1)\n elif lengths[1:] != lengths[:-1]:\n self.multi_label = True\n print('\\nMulti-label Classification\\n')\n try:\n split_targets = [list(map(int,x)) for x in split_targets]\n except:\n pass\n dai_onehot,onehot_classes = one_hot(split_targets,self.multi_label)\n train_df.iloc[:,1] = [torch.from_numpy(x).type(torch.FloatTensor) for x in dai_onehot]\n self.data_dir,self.num_classes,self.class_names = data_path,len(onehot_classes),onehot_classes\n else:\n print('\\nSingle-label Classification\\n')\n unique_targets = list(np.unique(targets))\n target_ids = [unique_targets.index(x) for x in targets]\n train_df.iloc[:,1] = target_ids\n if val_csv:\n target_ids = [unique_targets.index(x) for x in val_targets]\n val_df.iloc[:,1] = target_ids\n if test_csv:\n target_ids = [unique_targets.index(x) for x in test_targets]\n test_df.iloc[:,1] = target_ids \n self.data_dir,self.num_classes,self.class_names = data_path,len(unique_targets),unique_targets\n\n # self.models_path = os.path.join(self.data_dir, 'models')\n # os.makedirs(self.models_path,exist_ok=True)\n\n if not val_csv:\n train_df,val_df = split_df(train_df,split_size)\n if not test_csv: \n val_df,test_df = split_df(val_df,split_size)\n tr_images = [str(x) for x in list(train_df.iloc[:,0])]\n val_images = [str(x) for x in list(val_df.iloc[:,0])]\n test_images = [str(x) for x in list(test_df.iloc[:,0])]\n if self.extension:\n tr_images = add_extension(tr_images,self.extension)\n val_images = add_extension(val_images,self.extension)\n test_images = add_extension(test_images,self.extension)\n train_df.iloc[:,0] = tr_images\n val_df.iloc[:,0] = val_images\n test_df.iloc[:,0] = test_images\n train_df.to_csv(os.path.join(data_path,'train.csv'),index=False)\n val_df.to_csv(os.path.join(data_path,'val.csv'),index=False)\n test_df.to_csv(os.path.join(data_path,'test.csv'),index=False)\n self.minorities,self.class_diffs = None,None\n if (not self.obj) or (not self.multi_label):\n self.minorities,self.class_diffs = get_minorities(train_df)\n self.data_dfs = {self.tr_name:train_df, self.val_name:val_df, self.test_name:test_df}\n data_dict = {'data_dfs':self.data_dfs,'data_dir':self.data_dir,'num_classes':self.num_classes,'class_names':self.class_names,\n 'minorities':self.minorities,'class_diffs':self.class_diffs,'obj':self.obj,'multi_label':self.multi_label}\n # save_obj(data_dict,os.path.join(self.data_dir,'data_dict.pkl'))\n self.data_dict = data_dict\n return data_dict\n\n def data_from_paths_to_csv(self,data_path,tr_path,val_path = None,test_path = None):\n \n train_df = csv_from_path(tr_path,tr_path)\n train_df.to_csv(os.path.join(data_path,self.tr_name+'.csv'),index=False)\n ret = (self.tr_name+'.csv',None)\n if val_path is not None:\n val_exists = os.path.exists(val_path)\n if val_exists:\n val_df = csv_from_path(val_path,tr_path)\n val_df.to_csv(os.path.join(data_path,self.val_name+'.csv'),index=False)\n ret = (self.tr_name+'.csv',self.val_name+'.csv')\n if test_path is not None:\n test_exists = os.path.exists(test_path)\n if test_exists:\n test_df = csv_from_path(test_path,tr_path)\n test_df.to_csv(os.path.join(data_path,self.test_name+'.csv'),index=False)\n ret = (self.tr_name+'.csv',self.val_name+'.csv',self.test_name+'.csv') \n return ret\n \n def get_data(self, data_dict = None, s = (224,224), dataset = my_image_csv_dataset, bs = 32, balance = False, tfms = None,\n bal_tfms = None, tta = False, num_workers = 4, stats_percentage = 0.6):\n \n self.image_size = s\n if not data_dict:\n data_dict = self.data_dict\n data_dfs,data_dir,minorities,class_diffs,obj,multi_label = (data_dict['data_dfs'],data_dict['data_dir'],data_dict['minorities'],\n data_dict['class_diffs'],data_dict['obj'],data_dict['multi_label'])\n if obj or multi_label:\n balance = False \n if tta:\n tta_tfms = {self.tr_name: transforms.Compose( \n [\n# transforms.TenCrop(s),\n transforms.FiveCrop(s[0]), \n transforms.Lambda(lambda crops:torch.stack([transforms.ToTensor()(crop) for crop in crops])),\n transforms.Lambda(lambda crops:torch.stack(\n [transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])(crop) for crop in crops]))\n \n ]),\n self.val_name: transforms.Compose(\n [\n# transforms.TenCrop(s),\n transforms.FiveCrop(s[0]),\n transforms.Lambda(lambda crops:torch.stack([transforms.ToTensor()(crop) for crop in crops])),\n transforms.Lambda(lambda crops:torch.stack(\n [transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])(crop) for crop in crops]))\n ]),\n self.test_name: transforms.Compose(\n [\n# transforms.TenCrop(s),\n transforms.FiveCrop(s[0]),\n transforms.Lambda(lambda crops:torch.stack([transforms.ToTensor()(crop) for crop in crops])),\n transforms.Lambda(lambda crops:torch.stack(\n [transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])(crop) for crop in crops]))\n ])}\n# tta_tfms = {self.tr_name: transforms.Compose([\n# transforms.Resize(s),\n# transforms.ToTensor(),\n# transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\n# ]),\n# self.val_name: transforms.Compose([\n# transforms.Resize(s), \n# transforms.ToTensor(),\n# transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\n# ]) }\n \n else:\n tta_tfms = None\n \n if not bal_tfms:\n bal_tfms = { self.tr_name: [transforms.RandomHorizontalFlip()],\n \n self.val_name: None,\n self.test_name: None \n }\n else:\n bal_tfms = {self.tr_name: bal_tfms, self.val_name: None, self.test_name: None}\n if obj:\n resize_transform = transforms.Resize(s)\n else:\n # resize_transform = transforms.RandomResizedCrop(s[0])\n resize_transform = transforms.Resize(s)\n if not tfms:\n tfms = [\n resize_transform,\n transforms.ToTensor(),\n transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\n ]\n else:\n \n tfms_temp = [\n resize_transform,\n transforms.ToTensor(),\n transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\n ]\n tfms_temp[1:1] = tfms\n tfms = tfms_temp\n print(tfms)\n \n data_transforms = {\n self.tr_name: tfms,\n self.val_name: [\n # transforms.Resize(s[0]+50),\n # transforms.CenterCrop(s[0]),\n transforms.Resize(s),\n transforms.ToTensor(),\n transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\n ],\n self.test_name: [\n # transforms.Resize(s[0]+50),\n # transforms.CenterCrop(s[0]),\n transforms.Resize(s),\n transforms.ToTensor(),\n transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\n ]\n }\n\n temp_tfms = [resize_transform, transforms.ToTensor()]\n temp_dataset = dataset(os.path.join(data_dir,self.tr_name),data_dfs[self.tr_name],temp_tfms)\n self.img_mean,self.img_std = get_img_stats(temp_dataset,stats_percentage)\n data_transforms[self.tr_name][-1].mean,data_transforms[self.tr_name][-1].std = self.img_mean,self.img_std\n data_transforms[self.val_name][-1].mean,data_transforms[self.val_name][-1].std = self.img_mean,self.img_std\n data_transforms[self.test_name][-1].mean,data_transforms[self.test_name][-1].std = self.img_mean,self.img_std\n\n if balance:\n image_datasets = {x: dataset(os.path.join(data_dir,self.tr_name),data_dfs[x],\n data_transforms[x],obj,minorities,class_diffs,bal_tfms[x])\n for x in [self.tr_name, self.val_name, self.test_name]} \n else:\n image_datasets = {x: dataset(os.path.join(data_dir,self.tr_name),data_dfs[x],\n data_transforms[x],obj)\n for x in [self.tr_name, self.val_name, self.test_name]}\n \n dataloaders = {x: torch.utils.data.DataLoader(image_datasets[x], batch_size=bs,\n shuffle=True, num_workers=num_workers)\n for x in [self.tr_name, self.val_name, self.test_name]}\n dataset_sizes = {x: len(image_datasets[x]) for x in [self.tr_name, self.val_name, self.test_name]}\n \n self.image_datasets,self.dataloaders,self.dataset_sizes = (image_datasets,dataloaders,\n dataset_sizes)\n \n return image_datasets,dataloaders,dataset_sizes\n\n def imshow(self,inp, title=None):\n \n \"\"\"Imshow for Tensor.\"\"\"\n inp = self.denorm_img(inp)\n plt.imshow(inp)\n if title:\n plt.title(title)\n plt.pause(0.001)\n\n def denorm_img(self,inp,calculate = False):\n\n inp = inp.numpy().transpose((1, 2, 0))\n if calculate:\n mean = np.mean(inp)\n std = np.std(inp)\n else: \n mean = self.img_mean.numpy()\n std = self.img_std.numpy()\n inp = std * inp + mean\n inp = np.clip(inp, 0, 1)\n return inp \n \n def show_data(self,folder_name = 'train', size = (64,64), bs = 5):\n \n self.get_data(size,bs)\n batch = next(iter(self.dataloaders[folder_name]))\n inputs, classes = batch[0],batch[1]\n out = torchvision.utils.make_grid(inputs)\n if self.reg:\n print(classes)\n self.imshow(out, title=[x for x in classes]) \n elif self.multi_label:\n self.imshow(out, title=[self.class_names[np.nonzero(x.type(torch.LongTensor))] for x in classes]) \n else: \n self.imshow(out, title=[self.class_names[x] for x in classes])\n\n # def set_up_object_detection(self,anc_grids,anc_zooms,anc_ratios,num_colr = 12):\n\n # # print('Would you like to give your own values for anchor_grids, anchor_zooms,and anchor_ratios? The default values are: {}, {} and {}'\n # # .format(anc_grids,anc_zooms,anc_ratios))\n # # print('If so, you may call the function \"set_up_object_detection\" with your own paramteres.')\n\n # cmap = get_cmap(num_colr)\n # self.colr_list = [cmap(float(x)) for x in range(num_colr)]\n # self.num_colr = num_colr\n # self.create_anchors(anc_grids,anc_zooms,anc_ratios)\n # self.custom_head = SSD_MultiHead(self.k,self.num_classes,0.45,-4.)\n # self.loss_f = FocalLoss(self.num_classes)\n\n # def create_anchors(self,anc_grids,anc_zooms,anc_ratios):\n \n # anchor_scales = [(anz*i,anz*j) for anz in anc_zooms for (i,j) in anc_ratios]\n # k = len(anchor_scales)\n # anc_offsets = [1/(o*2) for o in anc_grids]\n # anc_x = np.concatenate([np.repeat(np.linspace(ao, 1-ao, ag), ag)\n # for ao,ag in zip(anc_offsets,anc_grids)])\n # anc_y = np.concatenate([np.tile(np.linspace(ao, 1-ao, ag), ag)\n # for ao,ag in zip(anc_offsets,anc_grids)])\n # anc_ctrs = np.repeat(np.stack([anc_x,anc_y], axis=1), k, axis=0)\n # anc_sizes = np.concatenate([np.array([[o/ag,p/ag] for i in range(ag*ag) for o,p in anchor_scales])\n # for ag in anc_grids])\n # grid_sizes = torch.tensor(np.concatenate([np.array(\n # [ 1/ag for i in range(ag*ag) for o,p in anchor_scales])\n # for ag in anc_grids])).float().unsqueeze(1).to(self.device)\n # anchors = torch.tensor(np.concatenate([anc_ctrs, anc_sizes], axis=1)).float().to(self.device)\n # anchor_cnr = hw2corners(anchors[:,:2], anchors[:,2:])\n # self.anchors,self.anchor_cnr,self.grid_sizes,self.k = anchors,anchor_cnr,grid_sizes,k \n\n\n\n\n\n\n\n\n",
"step-ids": [
15,
16,
19,
25,
29
]
}
|
[
15,
16,
19,
25,
29
] |
<|reserved_special_token_0|>
class France(BigFileSpider):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def start_requests(self):
url = (
'https://www.data.gouv.fr/api/1/datasets/donnees-essentielles-de-la-commande-publique-fichiers-consolides/'
)
yield scrapy.Request(url, meta={'file_name': 'page-1.json'},
callback=self.parse_list)
@handle_http_error
def parse_list(self, response):
for resource in response.json()['resources']:
description = resource['description']
if description and 'ocds' in description.lower():
yield self.build_request(resource['url'], formatter=
components(-2))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class France(BigFileSpider):
<|reserved_special_token_0|>
name = 'france'
data_type = 'release_package'
def start_requests(self):
url = (
'https://www.data.gouv.fr/api/1/datasets/donnees-essentielles-de-la-commande-publique-fichiers-consolides/'
)
yield scrapy.Request(url, meta={'file_name': 'page-1.json'},
callback=self.parse_list)
@handle_http_error
def parse_list(self, response):
for resource in response.json()['resources']:
description = resource['description']
if description and 'ocds' in description.lower():
yield self.build_request(resource['url'], formatter=
components(-2))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class France(BigFileSpider):
"""
Domain
France
Swagger API documentation
https://doc.data.gouv.fr/api/reference/
"""
name = 'france'
data_type = 'release_package'
def start_requests(self):
url = (
'https://www.data.gouv.fr/api/1/datasets/donnees-essentielles-de-la-commande-publique-fichiers-consolides/'
)
yield scrapy.Request(url, meta={'file_name': 'page-1.json'},
callback=self.parse_list)
@handle_http_error
def parse_list(self, response):
for resource in response.json()['resources']:
description = resource['description']
if description and 'ocds' in description.lower():
yield self.build_request(resource['url'], formatter=
components(-2))
<|reserved_special_token_1|>
import scrapy
from kingfisher_scrapy.base_spiders import BigFileSpider
from kingfisher_scrapy.util import components, handle_http_error
class France(BigFileSpider):
"""
Domain
France
Swagger API documentation
https://doc.data.gouv.fr/api/reference/
"""
name = 'france'
data_type = 'release_package'
def start_requests(self):
url = (
'https://www.data.gouv.fr/api/1/datasets/donnees-essentielles-de-la-commande-publique-fichiers-consolides/'
)
yield scrapy.Request(url, meta={'file_name': 'page-1.json'},
callback=self.parse_list)
@handle_http_error
def parse_list(self, response):
for resource in response.json()['resources']:
description = resource['description']
if description and 'ocds' in description.lower():
yield self.build_request(resource['url'], formatter=
components(-2))
<|reserved_special_token_1|>
import scrapy
from kingfisher_scrapy.base_spiders import BigFileSpider
from kingfisher_scrapy.util import components, handle_http_error
class France(BigFileSpider):
"""
Domain
France
Swagger API documentation
https://doc.data.gouv.fr/api/reference/
"""
name = 'france'
# SimpleSpider
data_type = 'release_package'
def start_requests(self):
# A CKAN API JSON response.
# Ministère de l'économie, des finances et de la relance
# https://www.data.gouv.fr/fr/datasets/donnees-essentielles-de-la-commande-publique-fichiers-consolides/
url = 'https://www.data.gouv.fr/api/1/datasets/donnees-essentielles-de-la-commande-publique-fichiers' \
'-consolides/'
yield scrapy.Request(url, meta={'file_name': 'page-1.json'}, callback=self.parse_list)
@handle_http_error
def parse_list(self, response):
for resource in response.json()['resources']:
description = resource['description']
if description and 'ocds' in description.lower():
yield self.build_request(resource['url'], formatter=components(-2))
|
flexible
|
{
"blob_id": "369bffa21b5b8c0ca1d93da3aa30a38e2f4c82cc",
"index": 9451,
"step-1": "<mask token>\n\n\nclass France(BigFileSpider):\n <mask token>\n <mask token>\n <mask token>\n\n def start_requests(self):\n url = (\n 'https://www.data.gouv.fr/api/1/datasets/donnees-essentielles-de-la-commande-publique-fichiers-consolides/'\n )\n yield scrapy.Request(url, meta={'file_name': 'page-1.json'},\n callback=self.parse_list)\n\n @handle_http_error\n def parse_list(self, response):\n for resource in response.json()['resources']:\n description = resource['description']\n if description and 'ocds' in description.lower():\n yield self.build_request(resource['url'], formatter=\n components(-2))\n",
"step-2": "<mask token>\n\n\nclass France(BigFileSpider):\n <mask token>\n name = 'france'\n data_type = 'release_package'\n\n def start_requests(self):\n url = (\n 'https://www.data.gouv.fr/api/1/datasets/donnees-essentielles-de-la-commande-publique-fichiers-consolides/'\n )\n yield scrapy.Request(url, meta={'file_name': 'page-1.json'},\n callback=self.parse_list)\n\n @handle_http_error\n def parse_list(self, response):\n for resource in response.json()['resources']:\n description = resource['description']\n if description and 'ocds' in description.lower():\n yield self.build_request(resource['url'], formatter=\n components(-2))\n",
"step-3": "<mask token>\n\n\nclass France(BigFileSpider):\n \"\"\"\n Domain\n France\n Swagger API documentation\n https://doc.data.gouv.fr/api/reference/\n \"\"\"\n name = 'france'\n data_type = 'release_package'\n\n def start_requests(self):\n url = (\n 'https://www.data.gouv.fr/api/1/datasets/donnees-essentielles-de-la-commande-publique-fichiers-consolides/'\n )\n yield scrapy.Request(url, meta={'file_name': 'page-1.json'},\n callback=self.parse_list)\n\n @handle_http_error\n def parse_list(self, response):\n for resource in response.json()['resources']:\n description = resource['description']\n if description and 'ocds' in description.lower():\n yield self.build_request(resource['url'], formatter=\n components(-2))\n",
"step-4": "import scrapy\nfrom kingfisher_scrapy.base_spiders import BigFileSpider\nfrom kingfisher_scrapy.util import components, handle_http_error\n\n\nclass France(BigFileSpider):\n \"\"\"\n Domain\n France\n Swagger API documentation\n https://doc.data.gouv.fr/api/reference/\n \"\"\"\n name = 'france'\n data_type = 'release_package'\n\n def start_requests(self):\n url = (\n 'https://www.data.gouv.fr/api/1/datasets/donnees-essentielles-de-la-commande-publique-fichiers-consolides/'\n )\n yield scrapy.Request(url, meta={'file_name': 'page-1.json'},\n callback=self.parse_list)\n\n @handle_http_error\n def parse_list(self, response):\n for resource in response.json()['resources']:\n description = resource['description']\n if description and 'ocds' in description.lower():\n yield self.build_request(resource['url'], formatter=\n components(-2))\n",
"step-5": "import scrapy\n\nfrom kingfisher_scrapy.base_spiders import BigFileSpider\nfrom kingfisher_scrapy.util import components, handle_http_error\n\n\nclass France(BigFileSpider):\n \"\"\"\n Domain\n France\n Swagger API documentation\n https://doc.data.gouv.fr/api/reference/\n \"\"\"\n name = 'france'\n\n # SimpleSpider\n data_type = 'release_package'\n\n def start_requests(self):\n # A CKAN API JSON response.\n # Ministère de l'économie, des finances et de la relance\n # https://www.data.gouv.fr/fr/datasets/donnees-essentielles-de-la-commande-publique-fichiers-consolides/\n url = 'https://www.data.gouv.fr/api/1/datasets/donnees-essentielles-de-la-commande-publique-fichiers' \\\n '-consolides/'\n yield scrapy.Request(url, meta={'file_name': 'page-1.json'}, callback=self.parse_list)\n\n @handle_http_error\n def parse_list(self, response):\n for resource in response.json()['resources']:\n description = resource['description']\n if description and 'ocds' in description.lower():\n yield self.build_request(resource['url'], formatter=components(-2))\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
pymysql.install_as_MySQLdb()
<|reserved_special_token_1|>
import pymysql
pymysql.install_as_MySQLdb()
<|reserved_special_token_1|>
import pymysql
pymysql.install_as_MySQLdb()
# from keras.models import load_model
# from keras.models import Model
# from ai import settings
#
# print('load model ...')
# model = load_model(settings.MODEL_PATH)
# model = Model(inputs=model.input, outputs=model.get_layer('dnsthree').output)
# print('load done.')
|
flexible
|
{
"blob_id": "b7d3af29e024b0b2cf5d2c054290f799eae7fed1",
"index": 4476,
"step-1": "<mask token>\n",
"step-2": "<mask token>\npymysql.install_as_MySQLdb()\n",
"step-3": "import pymysql\npymysql.install_as_MySQLdb()\n",
"step-4": "import pymysql\n\npymysql.install_as_MySQLdb()\n\n# from keras.models import load_model\n# from keras.models import Model\n# from ai import settings\n#\n# print('load model ...')\n# model = load_model(settings.MODEL_PATH)\n# model = Model(inputs=model.input, outputs=model.get_layer('dnsthree').output)\n# print('load done.')\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for j in range(0, len(df1)):
print(j)
user = []
proto = []
purity = []
card_name = []
card_effect = []
god = []
rarity = []
mana = []
type = []
set = []
print(df1['address'][j])
url1 = 'https://api.godsunchained.com/v0/card?user=' + df1['address'][j
] + '&perPage=150000'
print(url1)
response = requests.request('GET', url1)
data = response.json()
number_cards = data['total']
if number_cards != 0:
for i in range(0, number_cards):
user.append(data['records'][i]['user'])
proto.append(data['records'][i]['proto'])
url2 = 'https://api.godsunchained.com/v0/proto/' + str(proto[i])
purity.append(data['records'][i]['purity'])
dict = {'user': user, 'proto_number': proto, 'purity': purity}
df = pd.DataFrame(dict)
path = 'C:\\Users\\...'
df.to_csv(os.path.join(path, str(user[0]) + '.csv'), index=False)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
col_list1 = ['cardtype', 'username_opensea', 'address', 'username_game']
df1 = pd.read_csv('profiles.csv', usecols=col_list1)
for j in range(0, len(df1)):
print(j)
user = []
proto = []
purity = []
card_name = []
card_effect = []
god = []
rarity = []
mana = []
type = []
set = []
print(df1['address'][j])
url1 = 'https://api.godsunchained.com/v0/card?user=' + df1['address'][j
] + '&perPage=150000'
print(url1)
response = requests.request('GET', url1)
data = response.json()
number_cards = data['total']
if number_cards != 0:
for i in range(0, number_cards):
user.append(data['records'][i]['user'])
proto.append(data['records'][i]['proto'])
url2 = 'https://api.godsunchained.com/v0/proto/' + str(proto[i])
purity.append(data['records'][i]['purity'])
dict = {'user': user, 'proto_number': proto, 'purity': purity}
df = pd.DataFrame(dict)
path = 'C:\\Users\\...'
df.to_csv(os.path.join(path, str(user[0]) + '.csv'), index=False)
<|reserved_special_token_1|>
import requests
import time
import csv
import os
import pandas as pd
col_list1 = ['cardtype', 'username_opensea', 'address', 'username_game']
df1 = pd.read_csv('profiles.csv', usecols=col_list1)
for j in range(0, len(df1)):
print(j)
user = []
proto = []
purity = []
card_name = []
card_effect = []
god = []
rarity = []
mana = []
type = []
set = []
print(df1['address'][j])
url1 = 'https://api.godsunchained.com/v0/card?user=' + df1['address'][j
] + '&perPage=150000'
print(url1)
response = requests.request('GET', url1)
data = response.json()
number_cards = data['total']
if number_cards != 0:
for i in range(0, number_cards):
user.append(data['records'][i]['user'])
proto.append(data['records'][i]['proto'])
url2 = 'https://api.godsunchained.com/v0/proto/' + str(proto[i])
purity.append(data['records'][i]['purity'])
dict = {'user': user, 'proto_number': proto, 'purity': purity}
df = pd.DataFrame(dict)
path = 'C:\\Users\\...'
df.to_csv(os.path.join(path, str(user[0]) + '.csv'), index=False)
<|reserved_special_token_1|>
import requests
import time
import csv
import os
import pandas as pd
col_list1 = ["cardtype","username_opensea", "address", "username_game"]
df1 = pd.read_csv("profiles.csv", usecols=col_list1)
#
for j in range(0,len(df1) ): #usernames in opensea
print(j)
user=[]
proto=[]
purity=[]
card_name=[]
card_effect=[]
god=[]
rarity=[]
mana=[]
type=[]
set=[]
print(df1['address'][j])
url1 = "https://api.godsunchained.com/v0/card?user="+df1['address'][j]+"&perPage=150000"
print (url1)
response = requests.request("GET", url1)
data = response.json()
number_cards=data['total']
if number_cards!=0:
for i in range(0, number_cards):
user.append(data['records'][i]['user'])
proto.append(data['records'][i]['proto'])
url2 = "https://api.godsunchained.com/v0/proto/" + str(proto[i])
purity.append(data['records'][i]['purity'])
# response2 = requests.request("GET", url2)
# data2 = response2.json()
# if data2['name']!=None:
# card_name.append(data2['name'])
# card_effect.append(data2['effect'])
# god.append(data2['god'])
# rarity.append(data2['rarity'])
# mana.append(data2['god'])
# type.append(data2['type'])
# set.append(data2['set'])
# else:
# card_name.append(None)
# card_effect.append(None)
# god.append(None)
# rarity.append(None)
# mana.append(None)
# type.append(None)
# set.append(None)
dict={
'user': user,
'proto_number': proto,
# 'card_name':card_name,
'purity': purity,
# 'card_effect': card_effect,
# 'god':god,
# 'rarity':rarity,
# 'mana': mana,
# 'type': type,
# 'set': set
}
df = pd.DataFrame(dict)
path = 'C:\\Users\\...'
df.to_csv(os.path.join(path, str(user[0]) + ".csv"), index=False)
|
flexible
|
{
"blob_id": "93909ab98f1141940e64e079e09834ae5ad3995f",
"index": 6537,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor j in range(0, len(df1)):\n print(j)\n user = []\n proto = []\n purity = []\n card_name = []\n card_effect = []\n god = []\n rarity = []\n mana = []\n type = []\n set = []\n print(df1['address'][j])\n url1 = 'https://api.godsunchained.com/v0/card?user=' + df1['address'][j\n ] + '&perPage=150000'\n print(url1)\n response = requests.request('GET', url1)\n data = response.json()\n number_cards = data['total']\n if number_cards != 0:\n for i in range(0, number_cards):\n user.append(data['records'][i]['user'])\n proto.append(data['records'][i]['proto'])\n url2 = 'https://api.godsunchained.com/v0/proto/' + str(proto[i])\n purity.append(data['records'][i]['purity'])\n dict = {'user': user, 'proto_number': proto, 'purity': purity}\n df = pd.DataFrame(dict)\n path = 'C:\\\\Users\\\\...'\n df.to_csv(os.path.join(path, str(user[0]) + '.csv'), index=False)\n",
"step-3": "<mask token>\ncol_list1 = ['cardtype', 'username_opensea', 'address', 'username_game']\ndf1 = pd.read_csv('profiles.csv', usecols=col_list1)\nfor j in range(0, len(df1)):\n print(j)\n user = []\n proto = []\n purity = []\n card_name = []\n card_effect = []\n god = []\n rarity = []\n mana = []\n type = []\n set = []\n print(df1['address'][j])\n url1 = 'https://api.godsunchained.com/v0/card?user=' + df1['address'][j\n ] + '&perPage=150000'\n print(url1)\n response = requests.request('GET', url1)\n data = response.json()\n number_cards = data['total']\n if number_cards != 0:\n for i in range(0, number_cards):\n user.append(data['records'][i]['user'])\n proto.append(data['records'][i]['proto'])\n url2 = 'https://api.godsunchained.com/v0/proto/' + str(proto[i])\n purity.append(data['records'][i]['purity'])\n dict = {'user': user, 'proto_number': proto, 'purity': purity}\n df = pd.DataFrame(dict)\n path = 'C:\\\\Users\\\\...'\n df.to_csv(os.path.join(path, str(user[0]) + '.csv'), index=False)\n",
"step-4": "import requests\nimport time\nimport csv\nimport os\nimport pandas as pd\ncol_list1 = ['cardtype', 'username_opensea', 'address', 'username_game']\ndf1 = pd.read_csv('profiles.csv', usecols=col_list1)\nfor j in range(0, len(df1)):\n print(j)\n user = []\n proto = []\n purity = []\n card_name = []\n card_effect = []\n god = []\n rarity = []\n mana = []\n type = []\n set = []\n print(df1['address'][j])\n url1 = 'https://api.godsunchained.com/v0/card?user=' + df1['address'][j\n ] + '&perPage=150000'\n print(url1)\n response = requests.request('GET', url1)\n data = response.json()\n number_cards = data['total']\n if number_cards != 0:\n for i in range(0, number_cards):\n user.append(data['records'][i]['user'])\n proto.append(data['records'][i]['proto'])\n url2 = 'https://api.godsunchained.com/v0/proto/' + str(proto[i])\n purity.append(data['records'][i]['purity'])\n dict = {'user': user, 'proto_number': proto, 'purity': purity}\n df = pd.DataFrame(dict)\n path = 'C:\\\\Users\\\\...'\n df.to_csv(os.path.join(path, str(user[0]) + '.csv'), index=False)\n",
"step-5": "import requests\r\nimport time\r\nimport csv\r\nimport os\r\nimport pandas as pd\r\n\r\ncol_list1 = [\"cardtype\",\"username_opensea\", \"address\", \"username_game\"]\r\ndf1 = pd.read_csv(\"profiles.csv\", usecols=col_list1)\r\n\r\n\r\n\r\n#\r\nfor j in range(0,len(df1) ): #usernames in opensea\r\n print(j)\r\n user=[]\r\n proto=[]\r\n purity=[]\r\n card_name=[]\r\n card_effect=[]\r\n god=[]\r\n rarity=[]\r\n mana=[]\r\n type=[]\r\n set=[]\r\n\r\n print(df1['address'][j])\r\n\r\n url1 = \"https://api.godsunchained.com/v0/card?user=\"+df1['address'][j]+\"&perPage=150000\"\r\n print (url1)\r\n response = requests.request(\"GET\", url1)\r\n data = response.json()\r\n\r\n\r\n number_cards=data['total']\r\n if number_cards!=0:\r\n for i in range(0, number_cards):\r\n user.append(data['records'][i]['user'])\r\n proto.append(data['records'][i]['proto'])\r\n url2 = \"https://api.godsunchained.com/v0/proto/\" + str(proto[i])\r\n \r\n purity.append(data['records'][i]['purity'])\r\n \r\n # response2 = requests.request(\"GET\", url2)\r\n # data2 = response2.json()\r\n \r\n # if data2['name']!=None:\r\n # card_name.append(data2['name'])\r\n # card_effect.append(data2['effect'])\r\n # god.append(data2['god'])\r\n # rarity.append(data2['rarity'])\r\n # mana.append(data2['god'])\r\n # type.append(data2['type'])\r\n # set.append(data2['set'])\r\n # else:\r\n # card_name.append(None)\r\n # card_effect.append(None)\r\n # god.append(None)\r\n # rarity.append(None)\r\n # mana.append(None)\r\n # type.append(None)\r\n # set.append(None)\r\n \r\n \r\n dict={\r\n 'user': user,\r\n 'proto_number': proto,\r\n # 'card_name':card_name,\r\n 'purity': purity,\r\n # 'card_effect': card_effect,\r\n # 'god':god,\r\n # 'rarity':rarity,\r\n # 'mana': mana,\r\n # 'type': type,\r\n # 'set': set\r\n }\r\n \r\n df = pd.DataFrame(dict)\r\n \r\n path = 'C:\\\\Users\\\\...'\r\n df.to_csv(os.path.join(path, str(user[0]) + \".csv\"), index=False)\r\n\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import numpy
from scipy.optimize import OptimizeResult
from logging import getLogger
logger = getLogger(__name__)
def minimize_neldermead(func, x0, args=(), callback=None,
maxiter=None, maxfev=None, disp=False,
return_all=False, initial_simplex=None,
xatol=1e-4, fatol=1e-4, **unknown_options):
"""
Minimization of scalar function of one or more variables using the
Nelder-Mead algorithm.
Options
-------
disp : bool
Set to True to print convergence messages.
maxiter, maxfev : int
Maximum allowed number of iterations and function evaluations.
Will default to ``N*200``, where ``N`` is the number of
variables, if neither `maxiter` or `maxfev` is set. If both
`maxiter` and `maxfev` are set, minimization will stop at the
first reached.
initial_simplex : array_like of shape (N + 1, N)
Initial simplex. If given, overrides `x0`.
``initial_simplex[j,:]`` should contain the coordinates of
the j-th vertex of the ``N+1`` vertices in the simplex, where
``N`` is the dimension.
xatol : float, optional
Absolute error in xopt between iterations that is acceptable for
convergence.
fatol : number, optional
Absolute error in func(xopt) between iterations that is acceptable for
convergence.
"""
maxfun = maxfev
retall = return_all
rho = 1
chi = 2
psi = 0.5
sigma = 0.5
nonzdelt = 0.05
zdelt = 0.00025
if initial_simplex is None:
N = len(x0)
sim = numpy.zeros((N + 1, N), dtype=x0.dtype)
sim[0] = x0
for k in range(N):
y = numpy.array(x0, copy=True)
if y[k] != 0:
y[k] = (1 + nonzdelt) * y[k]
else:
y[k] = zdelt
sim[k + 1] = y
maxiter = 10
maxfun = 10
one2np1 = list(range(1, N + 1))
fsim = numpy.zeros((N + 1,), float)
for k in range(N + 1):
fsim[k] = func(sim[k])
ind = numpy.argsort(fsim)
fsim = numpy.take(fsim, ind, 0)
# sort so sim[0,:] has the lowest function value
sim = numpy.take(sim, ind, 0)
raise Exception()
print('aaaaffaaaaaa')
iterations = 1
while iterations < maxiter:
if (numpy.max(numpy.ravel(numpy.abs(sim[1:] - sim[0]))) <= xatol and
numpy.max(numpy.abs(fsim[0] - fsim[1:])) <= fatol):
break
logger.debug('itr: %s' % iterations)
print('aaaaaaaaaa')
xbar = numpy.add.reduce(sim[:-1], 0) / N
xr = (1 + rho) * xbar - rho * sim[-1]
fxr = func(xr)
doshrink = 0
if fxr < fsim[0]:
xe = (1 + rho * chi) * xbar - rho * chi * sim[-1]
fxe = func(xe)
if fxe < fxr:
sim[-1] = xe
fsim[-1] = fxe
else:
sim[-1] = xr
fsim[-1] = fxr
else: # fsim[0] <= fxr
if fxr < fsim[-2]:
sim[-1] = xr
fsim[-1] = fxr
else: # fxr >= fsim[-2]
# Perform contraction
if fxr < fsim[-1]:
xc = (1 + psi * rho) * xbar - psi * rho * sim[-1]
fxc = func(xc)
if fxc <= fxr:
sim[-1] = xc
fsim[-1] = fxc
else:
doshrink = 1
else:
# Perform an inside contraction
xcc = (1 - psi) * xbar + psi * sim[-1]
fxcc = func(xcc)
if fxcc < fsim[-1]:
sim[-1] = xcc
fsim[-1] = fxcc
else:
doshrink = 1
if doshrink:
for j in one2np1:
sim[j] = sim[0] + sigma * (sim[j] - sim[0])
fsim[j] = func(sim[j])
ind = numpy.argsort(fsim)
sim = numpy.take(sim, ind, 0)
fsim = numpy.take(fsim, ind, 0)
if callback is not None:
callback(sim[0])
iterations += 1
x = sim[0]
fval = numpy.min(fsim)
warnflag = 0
result = OptimizeResult(fun=fval, nit=iterations, nfev=0,
status=warnflag, success=(warnflag == 0),
message=None, x=x, final_simplex=(sim, fsim))
return result
|
normal
|
{
"blob_id": "35921b081e8e8c4da2b16afc20b27b636e9a6676",
"index": 4761,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef minimize_neldermead(func, x0, args=(), callback=None, maxiter=None,\n maxfev=None, disp=False, return_all=False, initial_simplex=None, xatol=\n 0.0001, fatol=0.0001, **unknown_options):\n \"\"\"\n Minimization of scalar function of one or more variables using the\n Nelder-Mead algorithm.\n Options\n -------\n disp : bool\n Set to True to print convergence messages.\n maxiter, maxfev : int\n Maximum allowed number of iterations and function evaluations.\n Will default to ``N*200``, where ``N`` is the number of\n variables, if neither `maxiter` or `maxfev` is set. If both\n `maxiter` and `maxfev` are set, minimization will stop at the\n first reached.\n initial_simplex : array_like of shape (N + 1, N)\n Initial simplex. If given, overrides `x0`.\n ``initial_simplex[j,:]`` should contain the coordinates of\n the j-th vertex of the ``N+1`` vertices in the simplex, where\n ``N`` is the dimension.\n xatol : float, optional\n Absolute error in xopt between iterations that is acceptable for\n convergence.\n fatol : number, optional\n Absolute error in func(xopt) between iterations that is acceptable for\n convergence.\n \"\"\"\n maxfun = maxfev\n retall = return_all\n rho = 1\n chi = 2\n psi = 0.5\n sigma = 0.5\n nonzdelt = 0.05\n zdelt = 0.00025\n if initial_simplex is None:\n N = len(x0)\n sim = numpy.zeros((N + 1, N), dtype=x0.dtype)\n sim[0] = x0\n for k in range(N):\n y = numpy.array(x0, copy=True)\n if y[k] != 0:\n y[k] = (1 + nonzdelt) * y[k]\n else:\n y[k] = zdelt\n sim[k + 1] = y\n maxiter = 10\n maxfun = 10\n one2np1 = list(range(1, N + 1))\n fsim = numpy.zeros((N + 1,), float)\n for k in range(N + 1):\n fsim[k] = func(sim[k])\n ind = numpy.argsort(fsim)\n fsim = numpy.take(fsim, ind, 0)\n sim = numpy.take(sim, ind, 0)\n raise Exception()\n print('aaaaffaaaaaa')\n iterations = 1\n while iterations < maxiter:\n if numpy.max(numpy.ravel(numpy.abs(sim[1:] - sim[0]))\n ) <= xatol and numpy.max(numpy.abs(fsim[0] - fsim[1:])) <= fatol:\n break\n logger.debug('itr: %s' % iterations)\n print('aaaaaaaaaa')\n xbar = numpy.add.reduce(sim[:-1], 0) / N\n xr = (1 + rho) * xbar - rho * sim[-1]\n fxr = func(xr)\n doshrink = 0\n if fxr < fsim[0]:\n xe = (1 + rho * chi) * xbar - rho * chi * sim[-1]\n fxe = func(xe)\n if fxe < fxr:\n sim[-1] = xe\n fsim[-1] = fxe\n else:\n sim[-1] = xr\n fsim[-1] = fxr\n elif fxr < fsim[-2]:\n sim[-1] = xr\n fsim[-1] = fxr\n else:\n if fxr < fsim[-1]:\n xc = (1 + psi * rho) * xbar - psi * rho * sim[-1]\n fxc = func(xc)\n if fxc <= fxr:\n sim[-1] = xc\n fsim[-1] = fxc\n else:\n doshrink = 1\n else:\n xcc = (1 - psi) * xbar + psi * sim[-1]\n fxcc = func(xcc)\n if fxcc < fsim[-1]:\n sim[-1] = xcc\n fsim[-1] = fxcc\n else:\n doshrink = 1\n if doshrink:\n for j in one2np1:\n sim[j] = sim[0] + sigma * (sim[j] - sim[0])\n fsim[j] = func(sim[j])\n ind = numpy.argsort(fsim)\n sim = numpy.take(sim, ind, 0)\n fsim = numpy.take(fsim, ind, 0)\n if callback is not None:\n callback(sim[0])\n iterations += 1\n x = sim[0]\n fval = numpy.min(fsim)\n warnflag = 0\n result = OptimizeResult(fun=fval, nit=iterations, nfev=0, status=\n warnflag, success=warnflag == 0, message=None, x=x, final_simplex=(\n sim, fsim))\n return result\n",
"step-3": "<mask token>\nlogger = getLogger(__name__)\n\n\ndef minimize_neldermead(func, x0, args=(), callback=None, maxiter=None,\n maxfev=None, disp=False, return_all=False, initial_simplex=None, xatol=\n 0.0001, fatol=0.0001, **unknown_options):\n \"\"\"\n Minimization of scalar function of one or more variables using the\n Nelder-Mead algorithm.\n Options\n -------\n disp : bool\n Set to True to print convergence messages.\n maxiter, maxfev : int\n Maximum allowed number of iterations and function evaluations.\n Will default to ``N*200``, where ``N`` is the number of\n variables, if neither `maxiter` or `maxfev` is set. If both\n `maxiter` and `maxfev` are set, minimization will stop at the\n first reached.\n initial_simplex : array_like of shape (N + 1, N)\n Initial simplex. If given, overrides `x0`.\n ``initial_simplex[j,:]`` should contain the coordinates of\n the j-th vertex of the ``N+1`` vertices in the simplex, where\n ``N`` is the dimension.\n xatol : float, optional\n Absolute error in xopt between iterations that is acceptable for\n convergence.\n fatol : number, optional\n Absolute error in func(xopt) between iterations that is acceptable for\n convergence.\n \"\"\"\n maxfun = maxfev\n retall = return_all\n rho = 1\n chi = 2\n psi = 0.5\n sigma = 0.5\n nonzdelt = 0.05\n zdelt = 0.00025\n if initial_simplex is None:\n N = len(x0)\n sim = numpy.zeros((N + 1, N), dtype=x0.dtype)\n sim[0] = x0\n for k in range(N):\n y = numpy.array(x0, copy=True)\n if y[k] != 0:\n y[k] = (1 + nonzdelt) * y[k]\n else:\n y[k] = zdelt\n sim[k + 1] = y\n maxiter = 10\n maxfun = 10\n one2np1 = list(range(1, N + 1))\n fsim = numpy.zeros((N + 1,), float)\n for k in range(N + 1):\n fsim[k] = func(sim[k])\n ind = numpy.argsort(fsim)\n fsim = numpy.take(fsim, ind, 0)\n sim = numpy.take(sim, ind, 0)\n raise Exception()\n print('aaaaffaaaaaa')\n iterations = 1\n while iterations < maxiter:\n if numpy.max(numpy.ravel(numpy.abs(sim[1:] - sim[0]))\n ) <= xatol and numpy.max(numpy.abs(fsim[0] - fsim[1:])) <= fatol:\n break\n logger.debug('itr: %s' % iterations)\n print('aaaaaaaaaa')\n xbar = numpy.add.reduce(sim[:-1], 0) / N\n xr = (1 + rho) * xbar - rho * sim[-1]\n fxr = func(xr)\n doshrink = 0\n if fxr < fsim[0]:\n xe = (1 + rho * chi) * xbar - rho * chi * sim[-1]\n fxe = func(xe)\n if fxe < fxr:\n sim[-1] = xe\n fsim[-1] = fxe\n else:\n sim[-1] = xr\n fsim[-1] = fxr\n elif fxr < fsim[-2]:\n sim[-1] = xr\n fsim[-1] = fxr\n else:\n if fxr < fsim[-1]:\n xc = (1 + psi * rho) * xbar - psi * rho * sim[-1]\n fxc = func(xc)\n if fxc <= fxr:\n sim[-1] = xc\n fsim[-1] = fxc\n else:\n doshrink = 1\n else:\n xcc = (1 - psi) * xbar + psi * sim[-1]\n fxcc = func(xcc)\n if fxcc < fsim[-1]:\n sim[-1] = xcc\n fsim[-1] = fxcc\n else:\n doshrink = 1\n if doshrink:\n for j in one2np1:\n sim[j] = sim[0] + sigma * (sim[j] - sim[0])\n fsim[j] = func(sim[j])\n ind = numpy.argsort(fsim)\n sim = numpy.take(sim, ind, 0)\n fsim = numpy.take(fsim, ind, 0)\n if callback is not None:\n callback(sim[0])\n iterations += 1\n x = sim[0]\n fval = numpy.min(fsim)\n warnflag = 0\n result = OptimizeResult(fun=fval, nit=iterations, nfev=0, status=\n warnflag, success=warnflag == 0, message=None, x=x, final_simplex=(\n sim, fsim))\n return result\n",
"step-4": "import numpy\nfrom scipy.optimize import OptimizeResult\nfrom logging import getLogger\nlogger = getLogger(__name__)\n\n\ndef minimize_neldermead(func, x0, args=(), callback=None, maxiter=None,\n maxfev=None, disp=False, return_all=False, initial_simplex=None, xatol=\n 0.0001, fatol=0.0001, **unknown_options):\n \"\"\"\n Minimization of scalar function of one or more variables using the\n Nelder-Mead algorithm.\n Options\n -------\n disp : bool\n Set to True to print convergence messages.\n maxiter, maxfev : int\n Maximum allowed number of iterations and function evaluations.\n Will default to ``N*200``, where ``N`` is the number of\n variables, if neither `maxiter` or `maxfev` is set. If both\n `maxiter` and `maxfev` are set, minimization will stop at the\n first reached.\n initial_simplex : array_like of shape (N + 1, N)\n Initial simplex. If given, overrides `x0`.\n ``initial_simplex[j,:]`` should contain the coordinates of\n the j-th vertex of the ``N+1`` vertices in the simplex, where\n ``N`` is the dimension.\n xatol : float, optional\n Absolute error in xopt between iterations that is acceptable for\n convergence.\n fatol : number, optional\n Absolute error in func(xopt) between iterations that is acceptable for\n convergence.\n \"\"\"\n maxfun = maxfev\n retall = return_all\n rho = 1\n chi = 2\n psi = 0.5\n sigma = 0.5\n nonzdelt = 0.05\n zdelt = 0.00025\n if initial_simplex is None:\n N = len(x0)\n sim = numpy.zeros((N + 1, N), dtype=x0.dtype)\n sim[0] = x0\n for k in range(N):\n y = numpy.array(x0, copy=True)\n if y[k] != 0:\n y[k] = (1 + nonzdelt) * y[k]\n else:\n y[k] = zdelt\n sim[k + 1] = y\n maxiter = 10\n maxfun = 10\n one2np1 = list(range(1, N + 1))\n fsim = numpy.zeros((N + 1,), float)\n for k in range(N + 1):\n fsim[k] = func(sim[k])\n ind = numpy.argsort(fsim)\n fsim = numpy.take(fsim, ind, 0)\n sim = numpy.take(sim, ind, 0)\n raise Exception()\n print('aaaaffaaaaaa')\n iterations = 1\n while iterations < maxiter:\n if numpy.max(numpy.ravel(numpy.abs(sim[1:] - sim[0]))\n ) <= xatol and numpy.max(numpy.abs(fsim[0] - fsim[1:])) <= fatol:\n break\n logger.debug('itr: %s' % iterations)\n print('aaaaaaaaaa')\n xbar = numpy.add.reduce(sim[:-1], 0) / N\n xr = (1 + rho) * xbar - rho * sim[-1]\n fxr = func(xr)\n doshrink = 0\n if fxr < fsim[0]:\n xe = (1 + rho * chi) * xbar - rho * chi * sim[-1]\n fxe = func(xe)\n if fxe < fxr:\n sim[-1] = xe\n fsim[-1] = fxe\n else:\n sim[-1] = xr\n fsim[-1] = fxr\n elif fxr < fsim[-2]:\n sim[-1] = xr\n fsim[-1] = fxr\n else:\n if fxr < fsim[-1]:\n xc = (1 + psi * rho) * xbar - psi * rho * sim[-1]\n fxc = func(xc)\n if fxc <= fxr:\n sim[-1] = xc\n fsim[-1] = fxc\n else:\n doshrink = 1\n else:\n xcc = (1 - psi) * xbar + psi * sim[-1]\n fxcc = func(xcc)\n if fxcc < fsim[-1]:\n sim[-1] = xcc\n fsim[-1] = fxcc\n else:\n doshrink = 1\n if doshrink:\n for j in one2np1:\n sim[j] = sim[0] + sigma * (sim[j] - sim[0])\n fsim[j] = func(sim[j])\n ind = numpy.argsort(fsim)\n sim = numpy.take(sim, ind, 0)\n fsim = numpy.take(fsim, ind, 0)\n if callback is not None:\n callback(sim[0])\n iterations += 1\n x = sim[0]\n fval = numpy.min(fsim)\n warnflag = 0\n result = OptimizeResult(fun=fval, nit=iterations, nfev=0, status=\n warnflag, success=warnflag == 0, message=None, x=x, final_simplex=(\n sim, fsim))\n return result\n",
"step-5": "import numpy\nfrom scipy.optimize import OptimizeResult\n\nfrom logging import getLogger\n\nlogger = getLogger(__name__)\n\n\ndef minimize_neldermead(func, x0, args=(), callback=None,\n maxiter=None, maxfev=None, disp=False,\n return_all=False, initial_simplex=None,\n xatol=1e-4, fatol=1e-4, **unknown_options):\n \"\"\"\n Minimization of scalar function of one or more variables using the\n Nelder-Mead algorithm.\n Options\n -------\n disp : bool\n Set to True to print convergence messages.\n maxiter, maxfev : int\n Maximum allowed number of iterations and function evaluations.\n Will default to ``N*200``, where ``N`` is the number of\n variables, if neither `maxiter` or `maxfev` is set. If both\n `maxiter` and `maxfev` are set, minimization will stop at the\n first reached.\n initial_simplex : array_like of shape (N + 1, N)\n Initial simplex. If given, overrides `x0`.\n ``initial_simplex[j,:]`` should contain the coordinates of\n the j-th vertex of the ``N+1`` vertices in the simplex, where\n ``N`` is the dimension.\n xatol : float, optional\n Absolute error in xopt between iterations that is acceptable for\n convergence.\n fatol : number, optional\n Absolute error in func(xopt) between iterations that is acceptable for\n convergence.\n \"\"\"\n maxfun = maxfev\n retall = return_all\n\n rho = 1\n chi = 2\n psi = 0.5\n sigma = 0.5\n nonzdelt = 0.05\n zdelt = 0.00025\n\n if initial_simplex is None:\n N = len(x0)\n\n sim = numpy.zeros((N + 1, N), dtype=x0.dtype)\n sim[0] = x0\n for k in range(N):\n y = numpy.array(x0, copy=True)\n if y[k] != 0:\n y[k] = (1 + nonzdelt) * y[k]\n else:\n y[k] = zdelt\n sim[k + 1] = y\n\n maxiter = 10\n maxfun = 10\n\n one2np1 = list(range(1, N + 1))\n fsim = numpy.zeros((N + 1,), float)\n\n for k in range(N + 1):\n fsim[k] = func(sim[k])\n\n ind = numpy.argsort(fsim)\n fsim = numpy.take(fsim, ind, 0)\n # sort so sim[0,:] has the lowest function value\n sim = numpy.take(sim, ind, 0)\n raise Exception()\n print('aaaaffaaaaaa')\n\n iterations = 1\n\n while iterations < maxiter:\n if (numpy.max(numpy.ravel(numpy.abs(sim[1:] - sim[0]))) <= xatol and\n numpy.max(numpy.abs(fsim[0] - fsim[1:])) <= fatol):\n break\n logger.debug('itr: %s' % iterations)\n print('aaaaaaaaaa')\n xbar = numpy.add.reduce(sim[:-1], 0) / N\n xr = (1 + rho) * xbar - rho * sim[-1]\n fxr = func(xr)\n doshrink = 0\n\n if fxr < fsim[0]:\n xe = (1 + rho * chi) * xbar - rho * chi * sim[-1]\n fxe = func(xe)\n\n if fxe < fxr:\n sim[-1] = xe\n fsim[-1] = fxe\n else:\n sim[-1] = xr\n fsim[-1] = fxr\n else: # fsim[0] <= fxr\n if fxr < fsim[-2]:\n sim[-1] = xr\n fsim[-1] = fxr\n else: # fxr >= fsim[-2]\n # Perform contraction\n if fxr < fsim[-1]:\n xc = (1 + psi * rho) * xbar - psi * rho * sim[-1]\n fxc = func(xc)\n\n if fxc <= fxr:\n sim[-1] = xc\n fsim[-1] = fxc\n else:\n doshrink = 1\n else:\n # Perform an inside contraction\n xcc = (1 - psi) * xbar + psi * sim[-1]\n fxcc = func(xcc)\n\n if fxcc < fsim[-1]:\n sim[-1] = xcc\n fsim[-1] = fxcc\n else:\n doshrink = 1\n\n if doshrink:\n for j in one2np1:\n sim[j] = sim[0] + sigma * (sim[j] - sim[0])\n fsim[j] = func(sim[j])\n\n ind = numpy.argsort(fsim)\n sim = numpy.take(sim, ind, 0)\n fsim = numpy.take(fsim, ind, 0)\n if callback is not None:\n callback(sim[0])\n iterations += 1\n\n x = sim[0]\n fval = numpy.min(fsim)\n warnflag = 0\n\n result = OptimizeResult(fun=fval, nit=iterations, nfev=0,\n status=warnflag, success=(warnflag == 0),\n message=None, x=x, final_simplex=(sim, fsim))\n return result\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.