code
stringlengths
13
6.09M
order_type
stringclasses
2 values
original_example
dict
step_ids
listlengths
1
5
<|reserved_special_token_0|> class Fine(models.Model): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> class Meta: db_table = 'fines' verbose_name_plural = 'Fines' verbose_name = 'Fine' <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class Fine(models.Model): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> class Meta: db_table = 'fines' verbose_name_plural = 'Fines' verbose_name = 'Fine' def __str__(self): return str(self.amount) <|reserved_special_token_1|> <|reserved_special_token_0|> class Fine(models.Model): amount = models.DecimalField(max_digits=8, decimal_places=2, null=True, default=0) student = models.OneToOneField(Student, on_delete=models.DO_NOTHING) timestamp = models.DateField(auto_now_add=True) updated_at = models.DateField(auto_now=True) class Meta: db_table = 'fines' verbose_name_plural = 'Fines' verbose_name = 'Fine' def __str__(self): return str(self.amount) <|reserved_special_token_1|> from django.contrib.auth.decorators import permission_required from django.db import models from students.models import Student class Fine(models.Model): amount = models.DecimalField(max_digits=8, decimal_places=2, null=True, default=0) student = models.OneToOneField(Student, on_delete=models.DO_NOTHING) timestamp = models.DateField(auto_now_add=True) updated_at = models.DateField(auto_now=True) class Meta: db_table = 'fines' verbose_name_plural = 'Fines' verbose_name = 'Fine' def __str__(self): return str(self.amount) <|reserved_special_token_1|> from django.contrib.auth.decorators import permission_required from django.db import models from students.models import Student # Create your models here. class Fine(models.Model): amount = models.DecimalField(max_digits=8, decimal_places=2, null=True, default=0) student = models.OneToOneField(Student, on_delete=models.DO_NOTHING) timestamp = models.DateField(auto_now_add=True) updated_at = models.DateField(auto_now=True) class Meta: db_table = 'fines' verbose_name_plural = 'Fines' verbose_name = 'Fine' def __str__(self): return str(self.amount)
flexible
{ "blob_id": "22b697790516e1160ac501a58ad93ef5b579414a", "index": 7109, "step-1": "<mask token>\n\n\nclass Fine(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\n class Meta:\n db_table = 'fines'\n verbose_name_plural = 'Fines'\n verbose_name = 'Fine'\n <mask token>\n", "step-2": "<mask token>\n\n\nclass Fine(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\n class Meta:\n db_table = 'fines'\n verbose_name_plural = 'Fines'\n verbose_name = 'Fine'\n\n def __str__(self):\n return str(self.amount)\n", "step-3": "<mask token>\n\n\nclass Fine(models.Model):\n amount = models.DecimalField(max_digits=8, decimal_places=2, null=True,\n default=0)\n student = models.OneToOneField(Student, on_delete=models.DO_NOTHING)\n timestamp = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n\n\n class Meta:\n db_table = 'fines'\n verbose_name_plural = 'Fines'\n verbose_name = 'Fine'\n\n def __str__(self):\n return str(self.amount)\n", "step-4": "from django.contrib.auth.decorators import permission_required\nfrom django.db import models\nfrom students.models import Student\n\n\nclass Fine(models.Model):\n amount = models.DecimalField(max_digits=8, decimal_places=2, null=True,\n default=0)\n student = models.OneToOneField(Student, on_delete=models.DO_NOTHING)\n timestamp = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n\n\n class Meta:\n db_table = 'fines'\n verbose_name_plural = 'Fines'\n verbose_name = 'Fine'\n\n def __str__(self):\n return str(self.amount)\n", "step-5": "from django.contrib.auth.decorators import permission_required\nfrom django.db import models\nfrom students.models import Student\n\n\n# Create your models here.\n\nclass Fine(models.Model):\n amount = models.DecimalField(max_digits=8, decimal_places=2, null=True, default=0)\n student = models.OneToOneField(Student, on_delete=models.DO_NOTHING)\n timestamp = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n\n class Meta:\n db_table = 'fines'\n verbose_name_plural = 'Fines'\n verbose_name = 'Fine'\n\n def __str__(self):\n return str(self.amount)\n", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
import asyncio def callback(): print('callback invoked') def stopper(loop): print('stopper invoked') loop.stop() event_loop = asyncio.get_event_loop() try: print('registering callbacks') # the callbacks are invoked in the order they are scheduled event_loop.call_soon(callback) event_loop.call_soon(stopper, event_loop) print('entering event loop') event_loop.run_forever() finally: print('closing event loop') event_loop.close()
normal
{ "blob_id": "3b96cc4ef538a06251958495e36fe5dbdf80c13d", "index": 4952, "step-1": "<mask token>\n\n\ndef callback():\n print('callback invoked')\n\n\ndef stopper(loop):\n print('stopper invoked')\n loop.stop()\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef callback():\n print('callback invoked')\n\n\ndef stopper(loop):\n print('stopper invoked')\n loop.stop()\n\n\n<mask token>\ntry:\n print('registering callbacks')\n event_loop.call_soon(callback)\n event_loop.call_soon(stopper, event_loop)\n print('entering event loop')\n event_loop.run_forever()\nfinally:\n print('closing event loop')\n event_loop.close()\n", "step-3": "<mask token>\n\n\ndef callback():\n print('callback invoked')\n\n\ndef stopper(loop):\n print('stopper invoked')\n loop.stop()\n\n\nevent_loop = asyncio.get_event_loop()\ntry:\n print('registering callbacks')\n event_loop.call_soon(callback)\n event_loop.call_soon(stopper, event_loop)\n print('entering event loop')\n event_loop.run_forever()\nfinally:\n print('closing event loop')\n event_loop.close()\n", "step-4": "import asyncio\n\n\ndef callback():\n print('callback invoked')\n\n\ndef stopper(loop):\n print('stopper invoked')\n loop.stop()\n\n\nevent_loop = asyncio.get_event_loop()\ntry:\n print('registering callbacks')\n event_loop.call_soon(callback)\n event_loop.call_soon(stopper, event_loop)\n print('entering event loop')\n event_loop.run_forever()\nfinally:\n print('closing event loop')\n event_loop.close()\n", "step-5": "import asyncio\n\ndef callback():\n print('callback invoked')\n\ndef stopper(loop):\n print('stopper invoked')\n loop.stop()\n\nevent_loop = asyncio.get_event_loop()\ntry:\n print('registering callbacks')\n # the callbacks are invoked in the order they are scheduled\n event_loop.call_soon(callback)\n event_loop.call_soon(stopper, event_loop)\n print('entering event loop')\n event_loop.run_forever()\nfinally:\n print('closing event loop')\n event_loop.close()\n\n", "step-ids": [ 2, 3, 4, 5, 6 ] }
[ 2, 3, 4, 5, 6 ]
#!/usr/bin/python3 print("content-type: text/html") print() import subprocess import cgi form=cgi.FieldStorage() osname=form.getvalue("x") command="sudo docker stop {}".format(osname) output=subprocess.getstatusoutput(command) status=output[0] info=output[1] if status==0: print("{} OS is stopped succesfully....".format(osname)) else: print("some error: {}".format(info))
normal
{ "blob_id": "1d2dae7f1d937bdd9a6044b23f8f1897e61dac23", "index": 6330, "step-1": "<mask token>\n", "step-2": "print('content-type: text/html')\nprint()\n<mask token>\nif status == 0:\n print('{} OS is stopped succesfully....'.format(osname))\nelse:\n print('some error: {}'.format(info))\n", "step-3": "print('content-type: text/html')\nprint()\n<mask token>\nform = cgi.FieldStorage()\nosname = form.getvalue('x')\ncommand = 'sudo docker stop {}'.format(osname)\noutput = subprocess.getstatusoutput(command)\nstatus = output[0]\ninfo = output[1]\nif status == 0:\n print('{} OS is stopped succesfully....'.format(osname))\nelse:\n print('some error: {}'.format(info))\n", "step-4": "print('content-type: text/html')\nprint()\nimport subprocess\nimport cgi\nform = cgi.FieldStorage()\nosname = form.getvalue('x')\ncommand = 'sudo docker stop {}'.format(osname)\noutput = subprocess.getstatusoutput(command)\nstatus = output[0]\ninfo = output[1]\nif status == 0:\n print('{} OS is stopped succesfully....'.format(osname))\nelse:\n print('some error: {}'.format(info))\n", "step-5": "#!/usr/bin/python3\nprint(\"content-type: text/html\")\nprint()\nimport subprocess\nimport cgi\nform=cgi.FieldStorage()\nosname=form.getvalue(\"x\")\ncommand=\"sudo docker stop {}\".format(osname)\noutput=subprocess.getstatusoutput(command)\nstatus=output[0]\ninfo=output[1]\nif status==0:\n print(\"{} OS is stopped succesfully....\".format(osname))\nelse:\n print(\"some error: {}\".format(info))\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> try: copyfile(serial_filename(), temp_filename) serial_output_code.serial_output_code() with open(serial_filename(), 'rb') as f: qmc_out = pickle.load(f) with open(temp_filename, 'rb') as f: old_out = pickle.load(f) finally: copyfile(temp_filename, serial_filename()) remove(temp_filename) assert qmc_out[0] == old_out[0] print(len(qmc_out)) print(len(old_out)) assert len(qmc_out) == len(old_out) + 1 for ii in range(1, len(old_out)): assert len(old_out[ii]) == len(qmc_out[ii]) for jj in range(len(qmc_out[1])): assert np.all(np.isclose(qmc_out[ii][jj], old_out[ii][jj])) <|reserved_special_token_1|> <|reserved_special_token_0|> temp_filename = 'temp.pickle' try: copyfile(serial_filename(), temp_filename) serial_output_code.serial_output_code() with open(serial_filename(), 'rb') as f: qmc_out = pickle.load(f) with open(temp_filename, 'rb') as f: old_out = pickle.load(f) finally: copyfile(temp_filename, serial_filename()) remove(temp_filename) assert qmc_out[0] == old_out[0] print(len(qmc_out)) print(len(old_out)) assert len(qmc_out) == len(old_out) + 1 for ii in range(1, len(old_out)): assert len(old_out[ii]) == len(qmc_out[ii]) for jj in range(len(qmc_out[1])): assert np.all(np.isclose(qmc_out[ii][jj], old_out[ii][jj])) <|reserved_special_token_1|> import pickle from generation_code import serial_filename import serial_output_code import numpy as np from shutil import copyfile from os import remove temp_filename = 'temp.pickle' try: copyfile(serial_filename(), temp_filename) serial_output_code.serial_output_code() with open(serial_filename(), 'rb') as f: qmc_out = pickle.load(f) with open(temp_filename, 'rb') as f: old_out = pickle.load(f) finally: copyfile(temp_filename, serial_filename()) remove(temp_filename) assert qmc_out[0] == old_out[0] print(len(qmc_out)) print(len(old_out)) assert len(qmc_out) == len(old_out) + 1 for ii in range(1, len(old_out)): assert len(old_out[ii]) == len(qmc_out[ii]) for jj in range(len(qmc_out[1])): assert np.all(np.isclose(qmc_out[ii][jj], old_out[ii][jj])) <|reserved_special_token_1|> import pickle from generation_code import serial_filename import serial_output_code import numpy as np from shutil import copyfile from os import remove # This file is only temporary, mostly to be used when updating the # reference output from a regression test, to ensure that, in all # aspects that are in common with the previosu regression test, the new # solution is the same. # It is largely the same as test_serial_code.py temp_filename = 'temp.pickle' try: # Copy reference output to temporary location copyfile(serial_filename(),temp_filename) # Run serial code serial_output_code.serial_output_code() with open(serial_filename(),'rb') as f: qmc_out = pickle.load(f) with open(temp_filename,'rb') as f: old_out = pickle.load(f) finally: # Copy reference output back copyfile(temp_filename,serial_filename()) # Remove temporary file remove(temp_filename) assert qmc_out[0] == old_out[0] # should be a float print(len(qmc_out)) print(len(old_out)) assert len(qmc_out) == (len(old_out) + 1) # Because we've added in a new output for ii in range(1,len(old_out)): assert(len(old_out[ii])==len(qmc_out[ii])) for jj in range(len(qmc_out[1])): # For some reason, the sizes of these variables (in # bytes) aren't always the same. I've no idea why. # Hence, this assertion is commented out. #assert getsizeof(qmc_out[ii][jj]) == getsizeof(old_out[ii][jj]) #assert np.all(np.isclose(qmc_out[ii][jj],old_out[ii][jj])) assert np.all(np.isclose(qmc_out[ii][jj],old_out[ii][jj]))
flexible
{ "blob_id": "6acb253189798c22d47feb3d61ac68a1851d22ba", "index": 1619, "step-1": "<mask token>\n", "step-2": "<mask token>\ntry:\n copyfile(serial_filename(), temp_filename)\n serial_output_code.serial_output_code()\n with open(serial_filename(), 'rb') as f:\n qmc_out = pickle.load(f)\n with open(temp_filename, 'rb') as f:\n old_out = pickle.load(f)\nfinally:\n copyfile(temp_filename, serial_filename())\n remove(temp_filename)\nassert qmc_out[0] == old_out[0]\nprint(len(qmc_out))\nprint(len(old_out))\nassert len(qmc_out) == len(old_out) + 1\nfor ii in range(1, len(old_out)):\n assert len(old_out[ii]) == len(qmc_out[ii])\n for jj in range(len(qmc_out[1])):\n assert np.all(np.isclose(qmc_out[ii][jj], old_out[ii][jj]))\n", "step-3": "<mask token>\ntemp_filename = 'temp.pickle'\ntry:\n copyfile(serial_filename(), temp_filename)\n serial_output_code.serial_output_code()\n with open(serial_filename(), 'rb') as f:\n qmc_out = pickle.load(f)\n with open(temp_filename, 'rb') as f:\n old_out = pickle.load(f)\nfinally:\n copyfile(temp_filename, serial_filename())\n remove(temp_filename)\nassert qmc_out[0] == old_out[0]\nprint(len(qmc_out))\nprint(len(old_out))\nassert len(qmc_out) == len(old_out) + 1\nfor ii in range(1, len(old_out)):\n assert len(old_out[ii]) == len(qmc_out[ii])\n for jj in range(len(qmc_out[1])):\n assert np.all(np.isclose(qmc_out[ii][jj], old_out[ii][jj]))\n", "step-4": "import pickle\nfrom generation_code import serial_filename\nimport serial_output_code\nimport numpy as np\nfrom shutil import copyfile\nfrom os import remove\ntemp_filename = 'temp.pickle'\ntry:\n copyfile(serial_filename(), temp_filename)\n serial_output_code.serial_output_code()\n with open(serial_filename(), 'rb') as f:\n qmc_out = pickle.load(f)\n with open(temp_filename, 'rb') as f:\n old_out = pickle.load(f)\nfinally:\n copyfile(temp_filename, serial_filename())\n remove(temp_filename)\nassert qmc_out[0] == old_out[0]\nprint(len(qmc_out))\nprint(len(old_out))\nassert len(qmc_out) == len(old_out) + 1\nfor ii in range(1, len(old_out)):\n assert len(old_out[ii]) == len(qmc_out[ii])\n for jj in range(len(qmc_out[1])):\n assert np.all(np.isclose(qmc_out[ii][jj], old_out[ii][jj]))\n", "step-5": "import pickle\nfrom generation_code import serial_filename\nimport serial_output_code\nimport numpy as np\nfrom shutil import copyfile\nfrom os import remove\n\n# This file is only temporary, mostly to be used when updating the\n# reference output from a regression test, to ensure that, in all\n# aspects that are in common with the previosu regression test, the new\n# solution is the same.\n\n# It is largely the same as test_serial_code.py\n\ntemp_filename = 'temp.pickle'\n\ntry:\n # Copy reference output to temporary location\n copyfile(serial_filename(),temp_filename)\n\n # Run serial code\n serial_output_code.serial_output_code()\n\n with open(serial_filename(),'rb') as f:\n qmc_out = pickle.load(f)\n\n with open(temp_filename,'rb') as f:\n old_out = pickle.load(f)\nfinally:\n # Copy reference output back\n copyfile(temp_filename,serial_filename())\n\n # Remove temporary file\n remove(temp_filename)\n\n\nassert qmc_out[0] == old_out[0] # should be a float\n\nprint(len(qmc_out))\n\nprint(len(old_out))\n\nassert len(qmc_out) == (len(old_out) + 1) # Because we've added in a new output\n\nfor ii in range(1,len(old_out)):\n assert(len(old_out[ii])==len(qmc_out[ii]))\n for jj in range(len(qmc_out[1])):\n # For some reason, the sizes of these variables (in\n # bytes) aren't always the same. I've no idea why.\n # Hence, this assertion is commented out.\n #assert getsizeof(qmc_out[ii][jj]) == getsizeof(old_out[ii][jj]) \n #assert np.all(np.isclose(qmc_out[ii][jj],old_out[ii][jj]))\n assert np.all(np.isclose(qmc_out[ii][jj],old_out[ii][jj]))\n\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> @ddt class QuickSearchTest(BaseTestCase): <|reserved_special_token_0|> @data(*testingdata) @unpack def test_QuickSearch(self, search_value, expected_result, notes): homepage = HomePage(self.driver) search_results = homepage.search.searchFor(search_value) self.assertTrue(expected_result in search_results.get_results()) <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> @ddt class QuickSearchTest(BaseTestCase): testingdata = get_data('testdata/QuickSearchTestData.xlsx') @data(*testingdata) @unpack def test_QuickSearch(self, search_value, expected_result, notes): homepage = HomePage(self.driver) search_results = homepage.search.searchFor(search_value) self.assertTrue(expected_result in search_results.get_results()) <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> sys.path.append('..') <|reserved_special_token_0|> @ddt class QuickSearchTest(BaseTestCase): testingdata = get_data('testdata/QuickSearchTestData.xlsx') @data(*testingdata) @unpack def test_QuickSearch(self, search_value, expected_result, notes): homepage = HomePage(self.driver) search_results = homepage.search.searchFor(search_value) self.assertTrue(expected_result in search_results.get_results()) if __name__ == '__main__': unittest.main(verbosity=2) <|reserved_special_token_1|> from ddt import ddt, data, unpack import sys sys.path.append('..') from pages.homepage import HomePage from base.basetestcase import BaseTestCase from helpers.filedatahelper import get_data @ddt class QuickSearchTest(BaseTestCase): testingdata = get_data('testdata/QuickSearchTestData.xlsx') @data(*testingdata) @unpack def test_QuickSearch(self, search_value, expected_result, notes): homepage = HomePage(self.driver) search_results = homepage.search.searchFor(search_value) self.assertTrue(expected_result in search_results.get_results()) if __name__ == '__main__': unittest.main(verbosity=2) <|reserved_special_token_1|> from ddt import ddt, data, unpack import sys sys.path.append("..") from pages.homepage import HomePage from base.basetestcase import BaseTestCase from helpers.filedatahelper import get_data @ddt class QuickSearchTest(BaseTestCase): testingdata = get_data('testdata/QuickSearchTestData.xlsx') @data(*testingdata) @unpack def test_QuickSearch(self, search_value, expected_result, notes): homepage = HomePage(self.driver) search_results = homepage.search.searchFor(search_value) self.assertTrue(expected_result in search_results.get_results()) if __name__ == '__main__': unittest.main(verbosity=2)
flexible
{ "blob_id": "4ba0f7e947830018695c8c9e68a96426f49b4b5b", "index": 3326, "step-1": "<mask token>\n\n\n@ddt\nclass QuickSearchTest(BaseTestCase):\n <mask token>\n\n @data(*testingdata)\n @unpack\n def test_QuickSearch(self, search_value, expected_result, notes):\n homepage = HomePage(self.driver)\n search_results = homepage.search.searchFor(search_value)\n self.assertTrue(expected_result in search_results.get_results())\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\n@ddt\nclass QuickSearchTest(BaseTestCase):\n testingdata = get_data('testdata/QuickSearchTestData.xlsx')\n\n @data(*testingdata)\n @unpack\n def test_QuickSearch(self, search_value, expected_result, notes):\n homepage = HomePage(self.driver)\n search_results = homepage.search.searchFor(search_value)\n self.assertTrue(expected_result in search_results.get_results())\n\n\n<mask token>\n", "step-3": "<mask token>\nsys.path.append('..')\n<mask token>\n\n\n@ddt\nclass QuickSearchTest(BaseTestCase):\n testingdata = get_data('testdata/QuickSearchTestData.xlsx')\n\n @data(*testingdata)\n @unpack\n def test_QuickSearch(self, search_value, expected_result, notes):\n homepage = HomePage(self.driver)\n search_results = homepage.search.searchFor(search_value)\n self.assertTrue(expected_result in search_results.get_results())\n\n\nif __name__ == '__main__':\n unittest.main(verbosity=2)\n", "step-4": "from ddt import ddt, data, unpack\nimport sys\nsys.path.append('..')\nfrom pages.homepage import HomePage\nfrom base.basetestcase import BaseTestCase\nfrom helpers.filedatahelper import get_data\n\n\n@ddt\nclass QuickSearchTest(BaseTestCase):\n testingdata = get_data('testdata/QuickSearchTestData.xlsx')\n\n @data(*testingdata)\n @unpack\n def test_QuickSearch(self, search_value, expected_result, notes):\n homepage = HomePage(self.driver)\n search_results = homepage.search.searchFor(search_value)\n self.assertTrue(expected_result in search_results.get_results())\n\n\nif __name__ == '__main__':\n unittest.main(verbosity=2)\n", "step-5": "from ddt import ddt, data, unpack\nimport sys\nsys.path.append(\"..\")\nfrom pages.homepage import HomePage\nfrom base.basetestcase import BaseTestCase\nfrom helpers.filedatahelper import get_data\n\n\n@ddt\nclass QuickSearchTest(BaseTestCase):\n testingdata = get_data('testdata/QuickSearchTestData.xlsx')\n @data(*testingdata)\n @unpack\n def test_QuickSearch(self, search_value, expected_result, notes):\n homepage = HomePage(self.driver)\n search_results = homepage.search.searchFor(search_value)\n self.assertTrue(expected_result in search_results.get_results())\n\nif __name__ == '__main__':\n unittest.main(verbosity=2)\n", "step-ids": [ 2, 3, 4, 5, 6 ] }
[ 2, 3, 4, 5, 6 ]
import random import time import unittest from math import radians from maciErrType import CannotGetComponentEx from DewarPositioner.positioner import Positioner, NotAllowedError from DewarPositioner.cdbconf import CDBConf from Acspy.Clients.SimpleClient import PySimpleClient from DewarPositionerMockers.mock_components import MockDevice, MockSource class PositionerOffsetTest(unittest.TestCase): def setUp(self): self.cdbconf = CDBConf() self.p = Positioner(self.cdbconf) self.source = MockSource() try: client = PySimpleClient() self.device = client.getComponent('RECEIVERS/SRTKBandDerotator') self.using_mock = False print '\nWARNING -> using the real component' except CannotGetComponentEx: print '\nINFO -> component not available: we will use a mock device' self.device = MockDevice() self.using_mock = True def tearDown(self): self.p.park() time.sleep(0.2) def _test_set_get(self): """Verify the set and get methods""" # Not allowed when the system is not yet configured self.assertRaises(NotAllowedError, self.p.setOffset, 2) self.p.setup(siteInfo={}, source=None, device=self.device) self.p.setOffset(2) self.assertEqual(self.p.getOffset(), 2) self.assertEqual(self.p.getPosition(), 2) self.p.clearOffset() self.assertEqual(self.p.getOffset(), 0) def _test_set_new_pos(self): """Vefify the setOffset set a new position.""" self.p.setup(siteInfo={}, source=None, device=self.device) time.sleep(0.3) if self.using_mock else time.sleep(3) act_position = self.device.getActPosition() offset = 0.5 self.p.setOffset(offset) time.sleep(0.3) if self.using_mock else time.sleep(3) self.assertAlmostEqual( self.p.getPosition(), act_position + offset, places=1 ) self.assertAlmostEqual( act_position + offset, self.device.getActPosition(), places=1 ) def _test_out_of_range(self): """Cause a rewind in case the offset is out of range""" self.cdbconf.setup('KKG') self.cdbconf.setConfiguration('CUSTOM_OPT') az, el, latitude = [radians(50)] * 3 site_info = {'latitude': latitude} self.p.setup(site_info, self.source, self.device) self.p.setRewindingMode('AUTO') offset = 20 max_limit = self.device.getMaxLimit() min_limit = self.device.getMinLimit() Pis = max_limit - offset/2 time.sleep(0.2) if self.using_mock else time.sleep(3) self.p.setPosition(Pis) time.sleep(0.2) # Wait a bit for the setup max_rewinding_steps = (max_limit - min_limit) // self.device.getStep() expected = Pis - max_rewinding_steps*self.device.getStep() + offset self.source.setAzimuth(az) self.source.setElevation(el) self.p.startUpdating('MNG_TRACK', 'ANT_NORTH', az, el, None, None) time.sleep(0.2) if self.using_mock else time.sleep(3) self.p.setOffset(offset) time.sleep(0.2) if self.using_mock else time.sleep(3) self.assertEqual(self.device.getActPosition(), expected) if __name__ == '__main__': unittest.main()
normal
{ "blob_id": "654adc9b77bbad6ba36dd42125e69e1a4ad1312d", "index": 9296, "step-1": "import random\nimport time\nimport unittest\nfrom math import radians\nfrom maciErrType import CannotGetComponentEx\nfrom DewarPositioner.positioner import Positioner, NotAllowedError\nfrom DewarPositioner.cdbconf import CDBConf\nfrom Acspy.Clients.SimpleClient import PySimpleClient\nfrom DewarPositionerMockers.mock_components import MockDevice, MockSource\n\n\nclass PositionerOffsetTest(unittest.TestCase):\n\n def setUp(self):\n self.cdbconf = CDBConf()\n self.p = Positioner(self.cdbconf)\n self.source = MockSource()\n try:\n client = PySimpleClient()\n self.device = client.getComponent('RECEIVERS/SRTKBandDerotator')\n self.using_mock = False\n print '\\nWARNING -> using the real component'\n except CannotGetComponentEx:\n print '\\nINFO -> component not available: we will use a mock device'\n self.device = MockDevice()\n self.using_mock = True\n\n def tearDown(self):\n self.p.park()\n time.sleep(0.2)\n\n def _test_set_get(self):\n \"\"\"Verify the set and get methods\"\"\"\n # Not allowed when the system is not yet configured\n self.assertRaises(NotAllowedError, self.p.setOffset, 2)\n self.p.setup(siteInfo={}, source=None, device=self.device)\n self.p.setOffset(2)\n self.assertEqual(self.p.getOffset(), 2)\n self.assertEqual(self.p.getPosition(), 2)\n self.p.clearOffset()\n self.assertEqual(self.p.getOffset(), 0)\n\n def _test_set_new_pos(self):\n \"\"\"Vefify the setOffset set a new position.\"\"\"\n self.p.setup(siteInfo={}, source=None, device=self.device)\n time.sleep(0.3) if self.using_mock else time.sleep(3)\n act_position = self.device.getActPosition()\n offset = 0.5\n self.p.setOffset(offset)\n time.sleep(0.3) if self.using_mock else time.sleep(3)\n self.assertAlmostEqual(\n self.p.getPosition(), \n act_position + offset, \n places=1\n )\n self.assertAlmostEqual(\n act_position + offset, \n self.device.getActPosition(), \n places=1\n )\n\n def _test_out_of_range(self):\n \"\"\"Cause a rewind in case the offset is out of range\"\"\"\n self.cdbconf.setup('KKG')\n self.cdbconf.setConfiguration('CUSTOM_OPT')\n az, el, latitude = [radians(50)] * 3\n site_info = {'latitude': latitude}\n self.p.setup(site_info, self.source, self.device)\n self.p.setRewindingMode('AUTO')\n offset = 20\n max_limit = self.device.getMaxLimit() \n min_limit = self.device.getMinLimit()\n Pis = max_limit - offset/2\n time.sleep(0.2) if self.using_mock else time.sleep(3)\n self.p.setPosition(Pis)\n time.sleep(0.2) # Wait a bit for the setup\n max_rewinding_steps = (max_limit - min_limit) // self.device.getStep()\n expected = Pis - max_rewinding_steps*self.device.getStep() + offset\n self.source.setAzimuth(az)\n self.source.setElevation(el)\n self.p.startUpdating('MNG_TRACK', 'ANT_NORTH', az, el, None, None)\n time.sleep(0.2) if self.using_mock else time.sleep(3)\n self.p.setOffset(offset)\n time.sleep(0.2) if self.using_mock else time.sleep(3)\n self.assertEqual(self.device.getActPosition(), expected)\n\n\nif __name__ == '__main__':\n unittest.main()\n", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
#!/usr/bin/python import json, sys, getopt, re # Usage: ./get_code.py -i <inputfile> def main(argv): inputfile = argv[0] with open(inputfile) as json_data: d=json.load(json_data) json_data.close() code_array = d["hits"]["hits"] output_json = [] for element in code_array: gistid = element["_id"] e = element["_source"] code = e["code"].encode('ascii', 'ignore') author = e["userId"] code = get_js_only(code) if(code != None): filename = 'data/' + author + '_' + gistid + '.html' outfile = open(filename, 'w') outfile.write(code) simple_e = {} simple_e["uid"] = author + '_' + gistid simple_e["created_at"] = e["created_at"] simple_e["updated_at"] = e["updated_at"] simple_e["api"] = e["api"] simple_e["readme"] = e["readme"] simple_e["description"] = e["description"] simple_e["code"] = code # e["code"] output_json.append(simple_e) print len(output_json) with open('nodes.json', 'w') as datafile: json.dump(output_json, datafile) def get_js_only(code): re.DOTALL re.MULTILINE match = re.search('<script>.*</script>', code, re.DOTALL) if(match != None): return match.group(0) else: # print "\n\n-------------------------------------------------------------" # print code return None if __name__ == "__main__": main(sys.argv[1:])
normal
{ "blob_id": "9594cda360847d2878aa2bd9c9c85fe50562b6ab", "index": 5685, "step-1": "#!/usr/bin/python\n\nimport json, sys, getopt, re\n\n# Usage: ./get_code.py -i <inputfile>\n\ndef main(argv): \n inputfile = argv[0]\n \n with open(inputfile) as json_data: \n d=json.load(json_data)\n json_data.close()\n code_array = d[\"hits\"][\"hits\"]\n \n output_json = []\n \n for element in code_array:\n gistid = element[\"_id\"]\n e = element[\"_source\"]\n code = e[\"code\"].encode('ascii', 'ignore')\n author = e[\"userId\"]\n \n code = get_js_only(code)\n if(code != None): \n filename = 'data/' + author + '_' + gistid + '.html'\n outfile = open(filename, 'w')\n outfile.write(code)\n simple_e = {}\n simple_e[\"uid\"] = author + '_' + gistid\n simple_e[\"created_at\"] = e[\"created_at\"]\n simple_e[\"updated_at\"] = e[\"updated_at\"]\n simple_e[\"api\"] = e[\"api\"]\n simple_e[\"readme\"] = e[\"readme\"]\n simple_e[\"description\"] = e[\"description\"]\n simple_e[\"code\"] = code # e[\"code\"]\n output_json.append(simple_e)\n \n \n print len(output_json)\n with open('nodes.json', 'w') as datafile:\n json.dump(output_json, datafile) \n \ndef get_js_only(code):\n re.DOTALL\n re.MULTILINE\n match = re.search('<script>.*</script>', code, re.DOTALL)\n if(match != None):\n return match.group(0)\n else:\n # print \"\\n\\n-------------------------------------------------------------\"\n # print code\n return None\n \nif __name__ == \"__main__\":\n main(sys.argv[1:])", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> for i in list: print(i) sum = sum + i print('sum =', sum) <|reserved_special_token_1|> list = [10, 20, 30, 40, 50] sum = 0 for i in list: print(i) sum = sum + i print('sum =', sum) <|reserved_special_token_1|> # A program to display and find the sum of a list of numbers using for loop list=[10,20,30,40,50] sum=0; for i in list: print(i) sum=sum+i print('sum =',sum)
flexible
{ "blob_id": "88e34ee5cd5af7d3b04321c4aa4fc815f926add1", "index": 7110, "step-1": "<mask token>\n", "step-2": "<mask token>\nfor i in list:\n print(i)\n sum = sum + i\nprint('sum =', sum)\n", "step-3": "list = [10, 20, 30, 40, 50]\nsum = 0\nfor i in list:\n print(i)\n sum = sum + i\nprint('sum =', sum)\n", "step-4": "# A program to display and find the sum of a list of numbers using for loop\r\n\r\nlist=[10,20,30,40,50]\r\nsum=0;\r\n\r\nfor i in list:\r\n\tprint(i)\r\n\tsum=sum+i\r\nprint('sum =',sum)\t", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
# Generated by Django 2.1.1 on 2018-09-24 04:59 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('backend', '0001_initial'), ] operations = [ migrations.CreateModel( name='Aro', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('nombre', models.CharField(max_length=255, unique=True)), ], ), migrations.AddField( model_name='bicicleta', name='modelo', field=models.CharField(default=1, max_length=255), preserve_default=False, ), migrations.AddField( model_name='bicicleta', name='numero_serie', field=models.CharField(default=1, max_length=255), preserve_default=False, ), migrations.AddField( model_name='bicicleta', name='aro', field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='backend.Aro'), preserve_default=False, ), ]
normal
{ "blob_id": "8dff22249abbae9e30ba1ad423457270e0cd9b20", "index": 7027, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n", "step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('backend', '0001_initial')]\n operations = [migrations.CreateModel(name='Aro', fields=[('id', models.\n AutoField(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')), ('nombre', models.CharField(max_length=255,\n unique=True))]), migrations.AddField(model_name='bicicleta', name=\n 'modelo', field=models.CharField(default=1, max_length=255),\n preserve_default=False), migrations.AddField(model_name='bicicleta',\n name='numero_serie', field=models.CharField(default=1, max_length=\n 255), preserve_default=False), migrations.AddField(model_name=\n 'bicicleta', name='aro', field=models.ForeignKey(default=1,\n on_delete=django.db.models.deletion.CASCADE, to='backend.Aro'),\n preserve_default=False)]\n", "step-4": "from django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n dependencies = [('backend', '0001_initial')]\n operations = [migrations.CreateModel(name='Aro', fields=[('id', models.\n AutoField(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')), ('nombre', models.CharField(max_length=255,\n unique=True))]), migrations.AddField(model_name='bicicleta', name=\n 'modelo', field=models.CharField(default=1, max_length=255),\n preserve_default=False), migrations.AddField(model_name='bicicleta',\n name='numero_serie', field=models.CharField(default=1, max_length=\n 255), preserve_default=False), migrations.AddField(model_name=\n 'bicicleta', name='aro', field=models.ForeignKey(default=1,\n on_delete=django.db.models.deletion.CASCADE, to='backend.Aro'),\n preserve_default=False)]\n", "step-5": "# Generated by Django 2.1.1 on 2018-09-24 04:59\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('backend', '0001_initial'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='Aro',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('nombre', models.CharField(max_length=255, unique=True)),\n ],\n ),\n migrations.AddField(\n model_name='bicicleta',\n name='modelo',\n field=models.CharField(default=1, max_length=255),\n preserve_default=False,\n ),\n migrations.AddField(\n model_name='bicicleta',\n name='numero_serie',\n field=models.CharField(default=1, max_length=255),\n preserve_default=False,\n ),\n migrations.AddField(\n model_name='bicicleta',\n name='aro',\n field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='backend.Aro'),\n preserve_default=False,\n ),\n ]\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> for i in range(6): hexagon.forward(100) hexagon.left(60) <|reserved_special_token_1|> <|reserved_special_token_0|> hexagon = turtle.Turtle() for i in range(6): hexagon.forward(100) hexagon.left(60) <|reserved_special_token_1|> import turtle hexagon = turtle.Turtle() for i in range(6): hexagon.forward(100) hexagon.left(60)
flexible
{ "blob_id": "f6401eca2dc0ea86a934e859c35fa2d6c85a61b3", "index": 8695, "step-1": "<mask token>\n", "step-2": "<mask token>\nfor i in range(6):\n hexagon.forward(100)\n hexagon.left(60)\n", "step-3": "<mask token>\nhexagon = turtle.Turtle()\nfor i in range(6):\n hexagon.forward(100)\n hexagon.left(60)\n", "step-4": "import turtle\nhexagon = turtle.Turtle()\nfor i in range(6):\n hexagon.forward(100)\n hexagon.left(60)\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
#!/usr/bin/env python #============================================================================================= # MODULE DOCSTRING #============================================================================================= """ evaluate-gbvi.py Evaluate the GBVI model on hydration free energies of small molecules for multiple iterations of the Markov chain. """ #============================================================================================= # GLOBAL IMPORTS #============================================================================================= import sys,string from openeye.oechem import * from optparse import OptionParser # For parsing of command line arguments import os import math import numpy import simtk.openmm as openmm import simtk.unit as units import openeye.oechem import openeye.oequacpac import openeye.oeiupac from openeye.oechem import * from openeye.oequacpac import * from openeye.oeszybki import * from openeye.oeiupac import * import time import pymc #============================================================================================= # Load OpenMM plugins. #============================================================================================= print "Loading OpenMM plugins..." openmm.Platform.loadPluginsFromDirectory(os.path.join(os.environ['OPENMM_INSTALL_DIR'], 'lib')) openmm.Platform.loadPluginsFromDirectory(os.path.join(os.environ['OPENMM_INSTALL_DIR'], 'lib', 'plugins')) #============================================================================================= # Atom Typer #============================================================================================= class AtomTyper(object): """ Atom typer Based on 'Patty', by Pat Walters. """ class TypingException(Exception): """ Atom typing exception. """ def __init__(self, molecule, atom): self.molecule = molecule self.atom = atom def __str__(self): return "Atom not assigned: %6d %8s" % (self.atom.GetIdx(), OEGetAtomicSymbol(self.atom.GetAtomicNum())) def __init__(self, infileName, tagname): self.pattyTag = OEGetTag(tagname) self.smartsList = [] ifs = open(infileName) lines = ifs.readlines() for line in lines: # Strip trailing comments index = line.find('%') if index != -1: line = line[0:index] # Split into tokens. toks = string.split(line) if len(toks) == 2: smarts,type = toks pat = OESubSearch() pat.Init(smarts) pat.SetMaxMatches(0) self.smartsList.append([pat,type,smarts]) def dump(self): for pat,type,smarts in self.smartsList: print pat,type,smarts def assignTypes(self,mol): # Assign null types. for atom in mol.GetAtoms(): atom.SetStringData(self.pattyTag, "") # Assign atom types using rules. OEAssignAromaticFlags(mol) for pat,type,smarts in self.smartsList: for matchbase in pat.Match(mol): for matchpair in matchbase.GetAtoms(): matchpair.target.SetStringData(self.pattyTag,type) # Check if any atoms remain unassigned. for atom in mol.GetAtoms(): if atom.GetStringData(self.pattyTag)=="": raise AtomTyper.TypingException(mol, atom) def debugTypes(self,mol): for atom in mol.GetAtoms(): print "%6d %8s %8s" % (atom.GetIdx(),OEGetAtomicSymbol(atom.GetAtomicNum()),atom.GetStringData(self.pattyTag)) def getTypeList(self,mol): typeList = [] for atom in mol.GetAtoms(): typeList.append(atom.GetStringData(self.pattyTag)) return typeList #============================================================================================= # Utility routines #============================================================================================= def read_gbvi_parameters(filename): """ Read a GBVI parameter set from a file. ARGUMENTS filename (string) - the filename to read parameters from RETURNS parameters (dict) - parameters[(atomtype,parameter_name)] contains the dimensionless parameter """ parameters = dict() infile = open(filename, 'r') for line in infile: # Strip trailing comments index = line.find('%') if index != -1: line = line[0:index] # Parse parameters elements = line.split() if len(elements) == 3: [atomtype, radius, gamma] = elements parameters['%s_%s' % (atomtype,'radius')] = float(radius) parameters['%s_%s' % (atomtype,'gamma')] = float(gamma) return parameters #============================================================================================= # Computation of hydration free energies #============================================================================================= def function(x): (molecule, parameters) = x return compute_hydration_energy(molecule, parameters) def compute_hydration_energies_parallel(molecules, parameters): import multiprocessing # Create processor pool. nprocs = 8 pool = multiprocessing.Pool(processes=nprocs) x = list() for molecule in molecules: x.append( (molecule, parameters) ) # Distribute calculation. results = pool.map(function, x) return results def compute_hydration_energies(molecules, parameters): """ Compute solvation energies of all specified molecules using given parameter set. ARGUMENTS molecules (list of OEMol) - molecules with atom types parameters (dict) - parameters for atom types RETURNS energies (dict) - energies[molecule] is the computed solvation energy of given molecule """ energies = dict() # energies[index] is the computed solvation energy of molecules[index] platform = openmm.Platform.getPlatformByName("Reference") for molecule in molecules: # Create OpenMM System. system = openmm.System() for atom in molecule.GetAtoms(): mass = OEGetDefaultMass(atom.GetAtomicNum()) system.addParticle(mass * units.amu) # Add nonbonded term. # nonbonded_force = openmm.NonbondedSoftcoreForce() # nonbonded_force.setNonbondedMethod(openmm.NonbondedForce.NoCutoff) # for atom in molecule.GetAtoms(): # charge = 0.0 * units.elementary_charge # sigma = 1.0 * units.angstrom # epsilon = 0.0 * units.kilocalories_per_mole # nonbonded_force.addParticle(charge, sigma, epsilon) # system.addForce(nonbonded_force) # Add GBVI term # gbvi_force = openmm.GBVISoftcoreForce() gbvi_force = openmm.GBVIForce() gbvi_force.setNonbondedMethod(openmm.GBVIForce.NoCutoff) # set no cutoff gbvi_force.setSoluteDielectric(1) gbvi_force.setSolventDielectric(78) # Use scaling method. # gbvi_force.setBornRadiusScalingMethod(openmm.GBVISoftcoreForce.QuinticSpline) # gbvi_force.setQuinticLowerLimitFactor(0.75) # gbvi_force.setQuinticUpperBornRadiusLimit(50.0*units.nanometers) # Build indexable list of atoms. atoms = [atom for atom in molecule.GetAtoms()] # Assign GB/VI parameters. for atom in molecule.GetAtoms(): atomtype = atom.GetStringData("gbvi_type") # GBVI atomtype charge = atom.GetPartialCharge() * units.elementary_charge radius = parameters['%s_%s' % (atomtype, 'radius')] * units.angstroms gamma = parameters['%s_%s' % (atomtype, 'gamma')] * units.kilocalories_per_mole # gamma *= -1.0 # DEBUG lambda_ = 1.0 # fully interacting # gbvi_force.addParticle(charge, radius, gamma, lambda_) # for GBVISoftcoreForce gbvi_force.addParticle(charge, radius, gamma) # for GBVIForce # Add bonds. for bond in molecule.GetBonds(): # Get atom indices. iatom = bond.GetBgnIdx() jatom = bond.GetEndIdx() # Get bond length. (xi, yi, zi) = molecule.GetCoords(atoms[iatom]) (xj, yj, zj) = molecule.GetCoords(atoms[jatom]) distance = math.sqrt((xi-xj)**2 + (yi-yj)**2 + (zi-zj)**2) * units.angstroms # Identify bonded atoms to GBVI. gbvi_force.addBond(iatom, jatom, distance) # Add the force to the system. system.addForce(gbvi_force) # Build coordinate array. natoms = len(atoms) coordinates = units.Quantity(numpy.zeros([natoms, 3]), units.angstroms) for (index,atom) in enumerate(atoms): (x,y,z) = molecule.GetCoords(atom) coordinates[index,:] = units.Quantity(numpy.array([x,y,z]),units.angstroms) # Create OpenMM Context. timestep = 1.0 * units.femtosecond # arbitrary integrator = openmm.VerletIntegrator(timestep) context = openmm.Context(system, integrator, platform) # Set the coordinates. context.setPositions(coordinates) # Get the energy state = context.getState(getEnergy=True) energies[molecule] = state.getPotentialEnergy() return energies def compute_hydration_energy(molecule, parameters, platform_name="Reference"): """ Compute hydration energy of a specified molecule given the specified GBVI parameter set. ARGUMENTS molecule (OEMol) - molecule with GBVI atom types parameters (dict) - parameters for GBVI atom types RETURNS energy (float) - hydration energy in kcal/mol """ platform = openmm.Platform.getPlatformByName(platform_name) # Create OpenMM System. system = openmm.System() for atom in molecule.GetAtoms(): mass = OEGetDefaultMass(atom.GetAtomicNum()) system.addParticle(mass * units.amu) # Add GBVI term # gbvi_force = openmm.GBVISoftcoreForce() gbvi_force = openmm.GBVIForce() gbvi_force.setNonbondedMethod(openmm.GBVIForce.NoCutoff) # set no cutoff gbvi_force.setSoluteDielectric(1) gbvi_force.setSolventDielectric(78) # Use scaling method. # gbvi_force.setBornRadiusScalingMethod(openmm.GBVISoftcoreForce.QuinticSpline) # gbvi_force.setQuinticLowerLimitFactor(0.75) # gbvi_force.setQuinticUpperBornRadiusLimit(50.0*units.nanometers) # Build indexable list of atoms. atoms = [atom for atom in molecule.GetAtoms()] # Assign GB/VI parameters. for atom in molecule.GetAtoms(): atomtype = atom.GetStringData("gbvi_type") # GBVI atomtype charge = atom.GetPartialCharge() * units.elementary_charge try: radius = parameters['%s_%s' % (atomtype, 'radius')] * units.angstroms gamma = parameters['%s_%s' % (atomtype, 'gamma')] * units.kilocalories_per_mole except Exception, exception: print "Cannot find parameters for atomtype '%s' in molecule '%s'" % (atomtype, molecule.GetTitle()) print parameters.keys() raise exception # gamma *= -1.0 # DEBUG lambda_ = 1.0 # fully interacting # gbvi_force.addParticle(charge, radius, gamma, lambda_) # for GBVISoftcoreForce gbvi_force.addParticle(charge, radius, gamma) # for GBVIForce # Add bonds. for bond in molecule.GetBonds(): # Get atom indices. iatom = bond.GetBgnIdx() jatom = bond.GetEndIdx() # Get bond length. (xi, yi, zi) = molecule.GetCoords(atoms[iatom]) (xj, yj, zj) = molecule.GetCoords(atoms[jatom]) distance = math.sqrt((xi-xj)**2 + (yi-yj)**2 + (zi-zj)**2) * units.angstroms # Identify bonded atoms to GBVI. gbvi_force.addBond(iatom, jatom, distance) # Add the force to the system. system.addForce(gbvi_force) # Build coordinate array. natoms = len(atoms) coordinates = units.Quantity(numpy.zeros([natoms, 3]), units.angstroms) for (index,atom) in enumerate(atoms): (x,y,z) = molecule.GetCoords(atom) coordinates[index,:] = units.Quantity(numpy.array([x,y,z]),units.angstroms) # Create OpenMM Context. timestep = 1.0 * units.femtosecond # arbitrary integrator = openmm.VerletIntegrator(timestep) context = openmm.Context(system, integrator, platform) # Set the coordinates. context.setPositions(coordinates) # Get the energy state = context.getState(getEnergy=True) energy = state.getPotentialEnergy() / units.kilocalories_per_mole if numpy.isnan(energy): energy = +1e6; return energy def hydration_energy_factory(molecule): def hydration_energy(**parameters): return compute_hydration_energy(molecule, parameters, platform_name="Reference") return hydration_energy #============================================================================================= # PyMC model #============================================================================================= def testfun(molecule_index, *x): print molecule_index return molecule_index def create_model(molecules, initial_parameters): # Define priors for parameters. model = dict() parameters = dict() # just the parameters for (key, value) in initial_parameters.iteritems(): (atomtype, parameter_name) = key.split('_') if parameter_name == 'gamma': stochastic = pymc.Uniform(key, value=value, lower=-10.0, upper=+10.0) elif parameter_name == 'radius': stochastic = pymc.Uniform(key, value=value, lower=1.0, upper=3.0) else: raise Exception("Unrecognized parameter name: %s" % parameter_name) model[key] = stochastic parameters[key] = stochastic # Define deterministic functions for hydration free energies. for (molecule_index, molecule) in enumerate(molecules): molecule_name = molecule.GetTitle() variable_name = "dg_gbvi_%08d" % molecule_index # Determine which parameters are involved in this molecule to limit number of parents for caching. parents = dict() for atom in molecule.GetAtoms(): atomtype = atom.GetStringData("gbvi_type") # GBVI atomtype for parameter_name in ['gamma', 'radius']: stochastic_name = '%s_%s' % (atomtype,parameter_name) parents[stochastic_name] = parameters[stochastic_name] print "%s : " % molecule_name, print parents.keys() # Create deterministic variable for computed hydration free energy. function = hydration_energy_factory(molecule) model[variable_name] = pymc.Deterministic(eval=function, name=variable_name, parents=parents, doc=molecule_name, trace=True, verbose=1, dtype=float, plot=False, cache_depth=2) # Define error model log_sigma_min = math.log(0.01) # kcal/mol log_sigma_max = math.log(10.0) # kcal/mol log_sigma_guess = math.log(0.2) # kcal/mol model['log_sigma'] = pymc.Uniform('log_sigma', lower=log_sigma_min, upper=log_sigma_max, value=log_sigma_guess) model['sigma'] = pymc.Lambda('sigma', lambda log_sigma=model['log_sigma'] : math.exp(log_sigma) ) model['tau'] = pymc.Lambda('tau', lambda sigma=model['sigma'] : sigma**(-2) ) for (molecule_index, molecule) in enumerate(molecules): molecule_name = molecule.GetTitle() variable_name = "dg_exp_%08d" % molecule_index dg_exp = float(OEGetSDData(molecule, 'dG(exp)')) # observed hydration free energy in kcal/mol model[variable_name] = pymc.Normal(mu=model['dg_gbvi_%08d' % molecule_index], tau=model['tau'], value=dg_exp, observed=True) return model #============================================================================================= # MAIN #============================================================================================= if __name__=="__main__": # Create command-line argument options. usage_string = """\ usage: %prog --types typefile --parameters paramfile --molecules molfile example: %prog --types parameters/gbvi.types --parameters parameters/gbvi-am1bcc.parameters --molecules datasets/solvation.sdf --mcmcDb MCMC_db_name """ version_string = "%prog %__version__" parser = OptionParser(usage=usage_string, version=version_string) parser.add_option("-t", "--types", metavar='TYPES', action="store", type="string", dest='atomtypes_filename', default='', help="Filename defining atomtypes as SMARTS atom matches.") parser.add_option("-p", "--parameters", metavar='PARAMETERS', action="store", type="string", dest='parameters_filename', default='', help="File containing initial parameter set.") parser.add_option("-m", "--molecules", metavar='MOLECULES', action="store", type="string", dest='molecules_filename', default='', help="Small molecule set (in any OpenEye compatible file format) containing 'dG(exp)' fields with experimental hydration free energies.") parser.add_option("-d", "--mcmcDb", metavar='MCMC_Db', action="store", type="string", dest='mcmcDb', default='', help="MCMC db name.") # Parse command-line arguments. (options,args) = parser.parse_args() # Ensure all required options have been specified. if options.atomtypes_filename=='' or options.parameters_filename=='' or options.molecules_filename=='' or options.mcmcDb == '': parser.print_help() parser.error("All input files must be specified.") # Read GBVI parameters. parameters = read_gbvi_parameters(options.parameters_filename) mcmcDbName = options.mcmcDb printString = "Starting " + sys.argv[0] + "\n" printString += ' atom types=<' + options.atomtypes_filename + ">\n" printString += ' parameters=<' + options.parameters_filename + ">\n" printString += ' molecule=<' + options.molecules_filename + ">\n" printString += ' mcmcDB=<' + options.mcmcDb + ">\n" sys.stderr.write( printString ) sys.stdout.write( printString ) # Construct atom typer. atom_typer = AtomTyper(options.atomtypes_filename, "gbvi_type") # Load and type all molecules in the specified dataset. print "Loading and typing all molecules in dataset..." start_time = time.time() molecules = list() input_molstream = oemolistream(options.molecules_filename) molecule = OECreateOEGraphMol() while OEReadMolecule(input_molstream, molecule): # Get molecule name. name = OEGetSDData(molecule, 'name').strip() molecule.SetTitle(name) # Append to list. molecule_copy = OEMol(molecule) molecules.append(molecule_copy) input_molstream.close() print "%d molecules read" % len(molecules) end_time = time.time() elapsed_time = end_time - start_time print "%.3f s elapsed" % elapsed_time # Add explicit hydrogens. for molecule in molecules: openeye.oechem.OEAddExplicitHydrogens(molecule) # Build a conformation for all molecules with Omega. print "Building conformations for all molecules..." import openeye.oeomega omega = openeye.oeomega.OEOmega() omega.SetMaxConfs(1) omega.SetFromCT(True) for molecule in molecules: #omega.SetFixMol(molecule) omega(molecule) end_time = time.time() elapsed_time = end_time - start_time print "%.3f s elapsed" % elapsed_time # Regularize all molecules through writing as mol2. print "Regularizing all molecules..." ligand_mol2_dirname = os.path.dirname(mcmcDbName) + '/mol2' if( not os.path.exists( ligand_mol2_dirname ) ): os.makedirs(ligand_mol2_dirname) ligand_mol2_filename = ligand_mol2_dirname + '/temp' + os.path.basename(mcmcDbName) + '.mol2' start_time = time.time() omolstream = openeye.oechem.oemolostream(ligand_mol2_filename) for molecule in molecules: # Write molecule as mol2, changing molecule through normalization. openeye.oechem.OEWriteMolecule(omolstream, molecule) omolstream.close() end_time = time.time() elapsed_time = end_time - start_time print "%.3f s elapsed" % elapsed_time # Assign AM1-BCC charges. print "Assigning AM1-BCC charges..." start_time = time.time() for molecule in molecules: # Assign AM1-BCC charges. if molecule.NumAtoms() == 1: # Use formal charges for ions. OEFormalPartialCharges(molecule) else: # Assign AM1-BCC charges for multiatom molecules. OEAssignPartialCharges(molecule, OECharges_AM1BCC, False) # use explicit hydrogens # Check to make sure we ended up with partial charges. if OEHasPartialCharges(molecule) == False: print "No charges on molecule: '%s'" % molecule.GetTitle() print "IUPAC name: %s" % OECreateIUPACName(molecule) # TODO: Write molecule out # Delete themolecule. molecules.remove(molecule) end_time = time.time() elapsed_time = end_time - start_time print "%.3f s elapsed" % elapsed_time print "%d molecules remaining" % len(molecules) # Type all molecules with GAFF parameters. start_time = time.time() typed_molecules = list() untyped_molecules = list() for molecule in molecules: # Assign GBVI types according to SMARTS rules. try: atom_typer.assignTypes(molecule) typed_molecules.append(OEGraphMol(molecule)) #atom_typer.debugTypes(molecule) except AtomTyper.TypingException as exception: print name print exception untyped_molecules.append(OEGraphMol(molecule)) end_time = time.time() elapsed_time = end_time - start_time print "%d molecules correctly typed" % (len(typed_molecules)) print "%d molecules missing some types" % (len(untyped_molecules)) print "%.3f s elapsed" % elapsed_time # Load updated parameter sets. parameter_sets = list() for key in parameters.keys(): # Read parameters. filename = mcmcDbName + '.txt/Chain_0/%s.txt' % key print "Parameter %s from file %s" %( key, filename ) infile = open(filename, 'r') lines = infile.readlines() infile.close() # Discard header lines = lines[3:] # Insert parameter. for (index, line) in enumerate(lines): elements = line.split() parameter = float(elements[0]) try: parameter_sets[index][key] = parameter except Exception: parameter_sets.append( dict() ) parameter_sets[index][key] = parameter for (index, parameter_set) in enumerate([parameters] + parameter_sets): # skip some #for (index, parameter_set) in enumerate([parameters] + parameter_sets[::10]): # skip some # Compute energies with all molecules. print "Computing all energies..." start_time = time.time() energies = compute_hydration_energies(typed_molecules, parameter_set) #energies = compute_hydration_energies_parallel(typed_molecules, parameter_set) end_time = time.time() elapsed_time = end_time - start_time print "%.3f s elapsed" % elapsed_time # # Print comparison. # for molecule in typed_molecules: # # Get metadata. # name = OEGetSDData(molecule, 'name').strip() # dg_exp = float(OEGetSDData(molecule, 'dG(exp)')) * units.kilocalories_per_mole # # Form output. # outstring = "%48s %8.3f %8.3f" % (name, dg_exp / units.kilocalories_per_mole, energies[molecule] / units.kilocalories_per_mole) # print outstring # Print summary statistics. signed_errors = numpy.zeros([len(typed_molecules)], numpy.float64) for (i, molecule) in enumerate(typed_molecules): # Get metadata. name = OEGetSDData(molecule, 'name').strip() energy = energies[molecule] / units.kilocalories_per_mole if( math.isnan(energy) ): print "%5d dG: nan %8.3f %s" % (i, dg_exp / units.kilocalories_per_mole, name) else: try: dg_exp = float(OEGetSDData(molecule, 'dG(exp)')) * units.kilocalories_per_mole signed_errors[i] = energies[molecule] / units.kilocalories_per_mole - dg_exp / units.kilocalories_per_mole except: print "Problem getting dG(exp) for molecule %d %s" % (i, name) print "iteration %8d : RMS error %8.3f kcal/mol" % (index, signed_errors.std())
normal
{ "blob_id": "0ac9e757fa827b311487169d0dc822951ce8c4bb", "index": 7167, "step-1": "#!/usr/bin/env python\n\n#=============================================================================================\n# MODULE DOCSTRING\n#=============================================================================================\n\n\"\"\"\nevaluate-gbvi.py\n\nEvaluate the GBVI model on hydration free energies of small molecules for multiple iterations of the Markov chain.\n\n\"\"\"\n#=============================================================================================\n# GLOBAL IMPORTS\n#=============================================================================================\n\nimport sys,string\nfrom openeye.oechem import *\nfrom optparse import OptionParser # For parsing of command line arguments\n\nimport os\nimport math\nimport numpy\nimport simtk.openmm as openmm\nimport simtk.unit as units\n\nimport openeye.oechem\nimport openeye.oequacpac\nimport openeye.oeiupac\n\nfrom openeye.oechem import *\nfrom openeye.oequacpac import *\nfrom openeye.oeszybki import *\nfrom openeye.oeiupac import *\n\nimport time\nimport pymc \n\n#=============================================================================================\n# Load OpenMM plugins.\n#=============================================================================================\n\nprint \"Loading OpenMM plugins...\"\n\nopenmm.Platform.loadPluginsFromDirectory(os.path.join(os.environ['OPENMM_INSTALL_DIR'], 'lib'))\nopenmm.Platform.loadPluginsFromDirectory(os.path.join(os.environ['OPENMM_INSTALL_DIR'], 'lib', 'plugins'))\n\n#=============================================================================================\n# Atom Typer\n#=============================================================================================\n\nclass AtomTyper(object):\n \"\"\"\n Atom typer\n\n Based on 'Patty', by Pat Walters.\n\n \"\"\"\n \n class TypingException(Exception):\n \"\"\"\n Atom typing exception.\n\n \"\"\"\n def __init__(self, molecule, atom):\n self.molecule = molecule\n self.atom = atom\n\n def __str__(self):\n return \"Atom not assigned: %6d %8s\" % (self.atom.GetIdx(), OEGetAtomicSymbol(self.atom.GetAtomicNum()))\n\n def __init__(self, infileName, tagname):\n self.pattyTag = OEGetTag(tagname) \n self.smartsList = []\n ifs = open(infileName)\n lines = ifs.readlines()\n for line in lines:\n # Strip trailing comments\n index = line.find('%')\n if index != -1:\n line = line[0:index]\n # Split into tokens.\n toks = string.split(line)\n if len(toks) == 2:\n smarts,type = toks\n pat = OESubSearch()\n pat.Init(smarts)\n pat.SetMaxMatches(0)\n self.smartsList.append([pat,type,smarts])\n\n def dump(self):\n for pat,type,smarts in self.smartsList:\n print pat,type,smarts\n\n def assignTypes(self,mol):\n # Assign null types.\n for atom in mol.GetAtoms():\n atom.SetStringData(self.pattyTag, \"\") \n\n # Assign atom types using rules.\n OEAssignAromaticFlags(mol)\n for pat,type,smarts in self.smartsList:\n for matchbase in pat.Match(mol):\n for matchpair in matchbase.GetAtoms():\n matchpair.target.SetStringData(self.pattyTag,type)\n\n # Check if any atoms remain unassigned.\n for atom in mol.GetAtoms():\n if atom.GetStringData(self.pattyTag)==\"\":\n raise AtomTyper.TypingException(mol, atom)\n\n def debugTypes(self,mol):\n for atom in mol.GetAtoms():\n print \"%6d %8s %8s\" % (atom.GetIdx(),OEGetAtomicSymbol(atom.GetAtomicNum()),atom.GetStringData(self.pattyTag))\n\n def getTypeList(self,mol):\n typeList = []\n for atom in mol.GetAtoms():\n typeList.append(atom.GetStringData(self.pattyTag))\n return typeList\n\n#=============================================================================================\n# Utility routines\n#=============================================================================================\n\ndef read_gbvi_parameters(filename):\n \"\"\"\n Read a GBVI parameter set from a file.\n\n ARGUMENTS\n\n filename (string) - the filename to read parameters from\n\n RETURNS\n\n parameters (dict) - parameters[(atomtype,parameter_name)] contains the dimensionless parameter \n \n \"\"\"\n\n parameters = dict()\n \n infile = open(filename, 'r')\n for line in infile:\n # Strip trailing comments\n index = line.find('%')\n if index != -1:\n line = line[0:index] \n\n # Parse parameters\n elements = line.split()\n if len(elements) == 3:\n [atomtype, radius, gamma] = elements\n parameters['%s_%s' % (atomtype,'radius')] = float(radius)\n parameters['%s_%s' % (atomtype,'gamma')] = float(gamma)\n\n return parameters \n\n#=============================================================================================\n# Computation of hydration free energies\n#=============================================================================================\n\ndef function(x):\n (molecule, parameters) = x\n return compute_hydration_energy(molecule, parameters) \n\ndef compute_hydration_energies_parallel(molecules, parameters):\n import multiprocessing\n\n # Create processor pool.\n nprocs = 8\n pool = multiprocessing.Pool(processes=nprocs)\n\n x = list()\n for molecule in molecules:\n x.append( (molecule, parameters) )\n\n # Distribute calculation.\n results = pool.map(function, x)\n\n return results\n\ndef compute_hydration_energies(molecules, parameters):\n \"\"\"\n Compute solvation energies of all specified molecules using given parameter set.\n\n ARGUMENTS\n\n molecules (list of OEMol) - molecules with atom types\n parameters (dict) - parameters for atom types\n\n RETURNS\n\n energies (dict) - energies[molecule] is the computed solvation energy of given molecule\n\n \"\"\"\n\n energies = dict() # energies[index] is the computed solvation energy of molecules[index]\n\n platform = openmm.Platform.getPlatformByName(\"Reference\")\n\n for molecule in molecules:\n # Create OpenMM System.\n system = openmm.System()\n for atom in molecule.GetAtoms():\n mass = OEGetDefaultMass(atom.GetAtomicNum())\n system.addParticle(mass * units.amu)\n\n # Add nonbonded term.\n # nonbonded_force = openmm.NonbondedSoftcoreForce()\n # nonbonded_force.setNonbondedMethod(openmm.NonbondedForce.NoCutoff)\n # for atom in molecule.GetAtoms():\n # charge = 0.0 * units.elementary_charge\n # sigma = 1.0 * units.angstrom\n # epsilon = 0.0 * units.kilocalories_per_mole\n # nonbonded_force.addParticle(charge, sigma, epsilon)\n # system.addForce(nonbonded_force)\n\n # Add GBVI term\n # gbvi_force = openmm.GBVISoftcoreForce()\n gbvi_force = openmm.GBVIForce() \n gbvi_force.setNonbondedMethod(openmm.GBVIForce.NoCutoff) # set no cutoff\n gbvi_force.setSoluteDielectric(1)\n gbvi_force.setSolventDielectric(78)\n\n # Use scaling method.\n # gbvi_force.setBornRadiusScalingMethod(openmm.GBVISoftcoreForce.QuinticSpline)\n # gbvi_force.setQuinticLowerLimitFactor(0.75)\n # gbvi_force.setQuinticUpperBornRadiusLimit(50.0*units.nanometers)\n\n # Build indexable list of atoms.\n atoms = [atom for atom in molecule.GetAtoms()] \n \n # Assign GB/VI parameters.\n for atom in molecule.GetAtoms(): \n atomtype = atom.GetStringData(\"gbvi_type\") # GBVI atomtype\n charge = atom.GetPartialCharge() * units.elementary_charge\n radius = parameters['%s_%s' % (atomtype, 'radius')] * units.angstroms\n gamma = parameters['%s_%s' % (atomtype, 'gamma')] * units.kilocalories_per_mole \n # gamma *= -1.0 # DEBUG\n lambda_ = 1.0 # fully interacting\n # gbvi_force.addParticle(charge, radius, gamma, lambda_) # for GBVISoftcoreForce\n gbvi_force.addParticle(charge, radius, gamma) # for GBVIForce\n\n # Add bonds.\n for bond in molecule.GetBonds():\n # Get atom indices.\n iatom = bond.GetBgnIdx()\n jatom = bond.GetEndIdx()\n # Get bond length.\n (xi, yi, zi) = molecule.GetCoords(atoms[iatom])\n (xj, yj, zj) = molecule.GetCoords(atoms[jatom])\n distance = math.sqrt((xi-xj)**2 + (yi-yj)**2 + (zi-zj)**2) * units.angstroms\n # Identify bonded atoms to GBVI.\n gbvi_force.addBond(iatom, jatom, distance)\n\n # Add the force to the system.\n system.addForce(gbvi_force)\n \n # Build coordinate array.\n natoms = len(atoms)\n coordinates = units.Quantity(numpy.zeros([natoms, 3]), units.angstroms)\n for (index,atom) in enumerate(atoms):\n (x,y,z) = molecule.GetCoords(atom)\n coordinates[index,:] = units.Quantity(numpy.array([x,y,z]),units.angstroms) \n \n # Create OpenMM Context.\n timestep = 1.0 * units.femtosecond # arbitrary\n integrator = openmm.VerletIntegrator(timestep)\n context = openmm.Context(system, integrator, platform)\n\n # Set the coordinates.\n context.setPositions(coordinates)\n \n # Get the energy\n state = context.getState(getEnergy=True)\n energies[molecule] = state.getPotentialEnergy()\n\n return energies\n\ndef compute_hydration_energy(molecule, parameters, platform_name=\"Reference\"):\n \"\"\"\n Compute hydration energy of a specified molecule given the specified GBVI parameter set.\n\n ARGUMENTS\n\n molecule (OEMol) - molecule with GBVI atom types\n parameters (dict) - parameters for GBVI atom types\n\n RETURNS\n\n energy (float) - hydration energy in kcal/mol\n\n \"\"\"\n\n platform = openmm.Platform.getPlatformByName(platform_name)\n\n # Create OpenMM System.\n system = openmm.System()\n for atom in molecule.GetAtoms():\n mass = OEGetDefaultMass(atom.GetAtomicNum())\n system.addParticle(mass * units.amu)\n\n # Add GBVI term\n # gbvi_force = openmm.GBVISoftcoreForce()\n gbvi_force = openmm.GBVIForce() \n gbvi_force.setNonbondedMethod(openmm.GBVIForce.NoCutoff) # set no cutoff\n gbvi_force.setSoluteDielectric(1)\n gbvi_force.setSolventDielectric(78)\n \n # Use scaling method.\n # gbvi_force.setBornRadiusScalingMethod(openmm.GBVISoftcoreForce.QuinticSpline)\n # gbvi_force.setQuinticLowerLimitFactor(0.75)\n # gbvi_force.setQuinticUpperBornRadiusLimit(50.0*units.nanometers)\n \n # Build indexable list of atoms.\n atoms = [atom for atom in molecule.GetAtoms()] \n \n # Assign GB/VI parameters.\n for atom in molecule.GetAtoms(): \n atomtype = atom.GetStringData(\"gbvi_type\") # GBVI atomtype\n charge = atom.GetPartialCharge() * units.elementary_charge\n try:\n radius = parameters['%s_%s' % (atomtype, 'radius')] * units.angstroms\n gamma = parameters['%s_%s' % (atomtype, 'gamma')] * units.kilocalories_per_mole\n except Exception, exception:\n print \"Cannot find parameters for atomtype '%s' in molecule '%s'\" % (atomtype, molecule.GetTitle())\n print parameters.keys()\n raise exception\n \n # gamma *= -1.0 # DEBUG\n lambda_ = 1.0 # fully interacting\n # gbvi_force.addParticle(charge, radius, gamma, lambda_) # for GBVISoftcoreForce\n gbvi_force.addParticle(charge, radius, gamma) # for GBVIForce\n \n # Add bonds.\n for bond in molecule.GetBonds():\n # Get atom indices.\n iatom = bond.GetBgnIdx()\n jatom = bond.GetEndIdx()\n # Get bond length.\n (xi, yi, zi) = molecule.GetCoords(atoms[iatom])\n (xj, yj, zj) = molecule.GetCoords(atoms[jatom])\n distance = math.sqrt((xi-xj)**2 + (yi-yj)**2 + (zi-zj)**2) * units.angstroms\n # Identify bonded atoms to GBVI.\n gbvi_force.addBond(iatom, jatom, distance)\n\n # Add the force to the system.\n system.addForce(gbvi_force)\n \n # Build coordinate array.\n natoms = len(atoms)\n coordinates = units.Quantity(numpy.zeros([natoms, 3]), units.angstroms)\n for (index,atom) in enumerate(atoms):\n (x,y,z) = molecule.GetCoords(atom)\n coordinates[index,:] = units.Quantity(numpy.array([x,y,z]),units.angstroms) \n \n # Create OpenMM Context.\n timestep = 1.0 * units.femtosecond # arbitrary\n integrator = openmm.VerletIntegrator(timestep)\n context = openmm.Context(system, integrator, platform)\n\n # Set the coordinates.\n context.setPositions(coordinates)\n \n # Get the energy\n state = context.getState(getEnergy=True)\n energy = state.getPotentialEnergy() / units.kilocalories_per_mole\n if numpy.isnan(energy):\n energy = +1e6;\n\n return energy\n\ndef hydration_energy_factory(molecule):\n def hydration_energy(**parameters):\n return compute_hydration_energy(molecule, parameters, platform_name=\"Reference\")\n return hydration_energy\n\n#=============================================================================================\n# PyMC model\n#=============================================================================================\n\ndef testfun(molecule_index, *x):\n print molecule_index\n return molecule_index\n\ndef create_model(molecules, initial_parameters):\n\n # Define priors for parameters.\n model = dict()\n parameters = dict() # just the parameters\n for (key, value) in initial_parameters.iteritems():\n (atomtype, parameter_name) = key.split('_')\n if parameter_name == 'gamma':\n stochastic = pymc.Uniform(key, value=value, lower=-10.0, upper=+10.0)\n elif parameter_name == 'radius':\n stochastic = pymc.Uniform(key, value=value, lower=1.0, upper=3.0)\n else:\n raise Exception(\"Unrecognized parameter name: %s\" % parameter_name)\n model[key] = stochastic\n parameters[key] = stochastic\n\n # Define deterministic functions for hydration free energies.\n for (molecule_index, molecule) in enumerate(molecules):\n molecule_name = molecule.GetTitle()\n variable_name = \"dg_gbvi_%08d\" % molecule_index\n # Determine which parameters are involved in this molecule to limit number of parents for caching.\n parents = dict()\n for atom in molecule.GetAtoms():\n atomtype = atom.GetStringData(\"gbvi_type\") # GBVI atomtype\n for parameter_name in ['gamma', 'radius']:\n stochastic_name = '%s_%s' % (atomtype,parameter_name)\n parents[stochastic_name] = parameters[stochastic_name]\n print \"%s : \" % molecule_name,\n print parents.keys()\n # Create deterministic variable for computed hydration free energy.\n function = hydration_energy_factory(molecule)\n model[variable_name] = pymc.Deterministic(eval=function,\n name=variable_name,\n parents=parents,\n doc=molecule_name,\n trace=True,\n verbose=1,\n dtype=float,\n plot=False,\n cache_depth=2)\n\n # Define error model\n log_sigma_min = math.log(0.01) # kcal/mol\n log_sigma_max = math.log(10.0) # kcal/mol\n log_sigma_guess = math.log(0.2) # kcal/mol\n model['log_sigma'] = pymc.Uniform('log_sigma', lower=log_sigma_min, upper=log_sigma_max, value=log_sigma_guess)\n model['sigma'] = pymc.Lambda('sigma', lambda log_sigma=model['log_sigma'] : math.exp(log_sigma) ) \n model['tau'] = pymc.Lambda('tau', lambda sigma=model['sigma'] : sigma**(-2) )\n for (molecule_index, molecule) in enumerate(molecules):\n molecule_name = molecule.GetTitle()\n variable_name = \"dg_exp_%08d\" % molecule_index\n dg_exp = float(OEGetSDData(molecule, 'dG(exp)')) # observed hydration free energy in kcal/mol\n model[variable_name] = pymc.Normal(mu=model['dg_gbvi_%08d' % molecule_index], tau=model['tau'], value=dg_exp, observed=True) \n\n return model\n\n#=============================================================================================\n# MAIN\n#=============================================================================================\n\nif __name__==\"__main__\":\n\n # Create command-line argument options.\n usage_string = \"\"\"\\\n usage: %prog --types typefile --parameters paramfile --molecules molfile\n \n example: %prog --types parameters/gbvi.types --parameters parameters/gbvi-am1bcc.parameters --molecules datasets/solvation.sdf --mcmcDb MCMC_db_name\n \n \"\"\"\n version_string = \"%prog %__version__\"\n parser = OptionParser(usage=usage_string, version=version_string)\n\n parser.add_option(\"-t\", \"--types\", metavar='TYPES',\n action=\"store\", type=\"string\", dest='atomtypes_filename', default='',\n help=\"Filename defining atomtypes as SMARTS atom matches.\")\n parser.add_option(\"-p\", \"--parameters\", metavar='PARAMETERS',\n action=\"store\", type=\"string\", dest='parameters_filename', default='',\n help=\"File containing initial parameter set.\")\n parser.add_option(\"-m\", \"--molecules\", metavar='MOLECULES',\n action=\"store\", type=\"string\", dest='molecules_filename', default='',\n help=\"Small molecule set (in any OpenEye compatible file format) containing 'dG(exp)' fields with experimental hydration free energies.\")\n\n parser.add_option(\"-d\", \"--mcmcDb\", metavar='MCMC_Db',\n action=\"store\", type=\"string\", dest='mcmcDb', default='',\n help=\"MCMC db name.\")\n\n \n # Parse command-line arguments.\n (options,args) = parser.parse_args()\n \n # Ensure all required options have been specified.\n if options.atomtypes_filename=='' or options.parameters_filename=='' or options.molecules_filename=='' or options.mcmcDb == '':\n parser.print_help()\n parser.error(\"All input files must be specified.\")\n\n # Read GBVI parameters.\n parameters = read_gbvi_parameters(options.parameters_filename)\n\n mcmcDbName = options.mcmcDb\n printString = \"Starting \" + sys.argv[0] + \"\\n\"\n printString += ' atom types=<' + options.atomtypes_filename + \">\\n\"\n printString += ' parameters=<' + options.parameters_filename + \">\\n\"\n printString += ' molecule=<' + options.molecules_filename + \">\\n\"\n printString += ' mcmcDB=<' + options.mcmcDb + \">\\n\"\n sys.stderr.write( printString )\n sys.stdout.write( printString )\n\n \n # Construct atom typer.\n atom_typer = AtomTyper(options.atomtypes_filename, \"gbvi_type\")\n \n # Load and type all molecules in the specified dataset.\n print \"Loading and typing all molecules in dataset...\"\n start_time = time.time()\n molecules = list()\n input_molstream = oemolistream(options.molecules_filename)\n molecule = OECreateOEGraphMol()\n while OEReadMolecule(input_molstream, molecule):\n # Get molecule name.\n name = OEGetSDData(molecule, 'name').strip()\n molecule.SetTitle(name)\n # Append to list.\n molecule_copy = OEMol(molecule)\n molecules.append(molecule_copy)\n input_molstream.close()\n print \"%d molecules read\" % len(molecules)\n end_time = time.time()\n elapsed_time = end_time - start_time\n print \"%.3f s elapsed\" % elapsed_time\n\n # Add explicit hydrogens.\n for molecule in molecules:\n openeye.oechem.OEAddExplicitHydrogens(molecule) \n\n # Build a conformation for all molecules with Omega.\n print \"Building conformations for all molecules...\" \n import openeye.oeomega\n omega = openeye.oeomega.OEOmega()\n omega.SetMaxConfs(1)\n omega.SetFromCT(True)\n for molecule in molecules:\n #omega.SetFixMol(molecule)\n omega(molecule)\n end_time = time.time()\n elapsed_time = end_time - start_time\n print \"%.3f s elapsed\" % elapsed_time\n\n # Regularize all molecules through writing as mol2.\n print \"Regularizing all molecules...\"\n ligand_mol2_dirname = os.path.dirname(mcmcDbName) + '/mol2'\n if( not os.path.exists( ligand_mol2_dirname ) ):\n os.makedirs(ligand_mol2_dirname)\n ligand_mol2_filename = ligand_mol2_dirname + '/temp' + os.path.basename(mcmcDbName) + '.mol2' \n\n start_time = time.time() \n omolstream = openeye.oechem.oemolostream(ligand_mol2_filename) \n for molecule in molecules:\n # Write molecule as mol2, changing molecule through normalization. \n openeye.oechem.OEWriteMolecule(omolstream, molecule)\n omolstream.close()\n end_time = time.time()\n elapsed_time = end_time - start_time\n print \"%.3f s elapsed\" % elapsed_time\n \n # Assign AM1-BCC charges.\n print \"Assigning AM1-BCC charges...\"\n start_time = time.time()\n for molecule in molecules:\n # Assign AM1-BCC charges.\n if molecule.NumAtoms() == 1:\n # Use formal charges for ions.\n OEFormalPartialCharges(molecule) \n else:\n # Assign AM1-BCC charges for multiatom molecules.\n OEAssignPartialCharges(molecule, OECharges_AM1BCC, False) # use explicit hydrogens\n # Check to make sure we ended up with partial charges.\n if OEHasPartialCharges(molecule) == False:\n print \"No charges on molecule: '%s'\" % molecule.GetTitle()\n print \"IUPAC name: %s\" % OECreateIUPACName(molecule)\n # TODO: Write molecule out\n # Delete themolecule.\n molecules.remove(molecule)\n \n end_time = time.time()\n elapsed_time = end_time - start_time\n print \"%.3f s elapsed\" % elapsed_time\n print \"%d molecules remaining\" % len(molecules)\n \n # Type all molecules with GAFF parameters.\n start_time = time.time()\n typed_molecules = list()\n untyped_molecules = list()\n for molecule in molecules:\n # Assign GBVI types according to SMARTS rules.\n try:\n atom_typer.assignTypes(molecule)\n typed_molecules.append(OEGraphMol(molecule))\n #atom_typer.debugTypes(molecule)\n except AtomTyper.TypingException as exception:\n print name \n print exception\n untyped_molecules.append(OEGraphMol(molecule)) \n end_time = time.time()\n elapsed_time = end_time - start_time\n print \"%d molecules correctly typed\" % (len(typed_molecules))\n print \"%d molecules missing some types\" % (len(untyped_molecules))\n print \"%.3f s elapsed\" % elapsed_time\n\n # Load updated parameter sets.\n parameter_sets = list()\n for key in parameters.keys():\n # Read parameters.\n filename = mcmcDbName + '.txt/Chain_0/%s.txt' % key\n print \"Parameter %s from file %s\" %( key, filename ) \n infile = open(filename, 'r')\n lines = infile.readlines()\n infile.close()\n # Discard header\n lines = lines[3:]\n # Insert parameter.\n for (index, line) in enumerate(lines):\n elements = line.split()\n parameter = float(elements[0])\n try:\n parameter_sets[index][key] = parameter\n except Exception:\n parameter_sets.append( dict() )\n parameter_sets[index][key] = parameter\n\n for (index, parameter_set) in enumerate([parameters] + parameter_sets): # skip some\n #for (index, parameter_set) in enumerate([parameters] + parameter_sets[::10]): # skip some\n \n # Compute energies with all molecules.\n print \"Computing all energies...\"\n start_time = time.time()\n energies = compute_hydration_energies(typed_molecules, parameter_set)\n #energies = compute_hydration_energies_parallel(typed_molecules, parameter_set)\n end_time = time.time()\n elapsed_time = end_time - start_time\n print \"%.3f s elapsed\" % elapsed_time\n\n# # Print comparison.\n# for molecule in typed_molecules:\n# # Get metadata.\n# name = OEGetSDData(molecule, 'name').strip()\n# dg_exp = float(OEGetSDData(molecule, 'dG(exp)')) * units.kilocalories_per_mole \n# # Form output.\n# outstring = \"%48s %8.3f %8.3f\" % (name, dg_exp / units.kilocalories_per_mole, energies[molecule] / units.kilocalories_per_mole) \n# print outstring\n\n # Print summary statistics.\n signed_errors = numpy.zeros([len(typed_molecules)], numpy.float64)\n for (i, molecule) in enumerate(typed_molecules):\n # Get metadata.\n name = OEGetSDData(molecule, 'name').strip()\n energy = energies[molecule] / units.kilocalories_per_mole\n if( math.isnan(energy) ):\n print \"%5d dG: nan %8.3f %s\" % (i, dg_exp / units.kilocalories_per_mole, name)\n else:\n try:\n dg_exp = float(OEGetSDData(molecule, 'dG(exp)')) * units.kilocalories_per_mole\n signed_errors[i] = energies[molecule] / units.kilocalories_per_mole - dg_exp / units.kilocalories_per_mole\n except:\n print \"Problem getting dG(exp) for molecule %d %s\" % (i, name)\n\n print \"iteration %8d : RMS error %8.3f kcal/mol\" % (index, signed_errors.std())\n\n", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
# animation2.py # multiple-shot cannonball animation from math import sqrt, sin, cos, radians, degrees from graphics import * from projectile import Projectile from button import Button class Launcher: def __init__(self, win): """Create inital launcher with angle 45 degrees and velocity 40 win is the GraphWin to draw the launcher in. """ # draw the base shot of the launcher base = Circle(Point(0,0), 3) base.setFill("red") base.setOutline("red") base.draw(win) # save the window and create initial angle and velocity self.win = win self.angle = radians(45.0) self.vel = 40.0 # create inital "dummy" arrow self.arrow = Line(Point(0,0), Point(0,0)).draw(win) # replace it with the correct arrow self.redraw() def redraw(self): """undraw the arrow and draw a new one for the current values of angle and velocity. """ self.arrow.undraw() pt2 = Point(self.vel*cos(self.angle), self.vel*sin(self.angle)) self.arrow = Line(Point(0,0), pt2).draw(self.win) self.arrow.setArrow("last") self.arrow.setWidth(3) def adjAngle(self, amt): """ change angle by amt degrees """ self.angle = self.angle+radians(amt) self.redraw() def adjVel(self, amt): """ change velocity by amt""" self.vel = self.vel + amt self.redraw() def fire(self): return ShotTracker(self.win, degrees(self.angle), self.vel, 0.0) class ShotTracker: """ Graphical depiction of a projectile flight using a Circle """ def __init__(self, win, angle, velocity, height): """win is the GraphWin to display the shot, angle, velocity, and height are initial projectile parameters. """ self.proj = Projectile(angle, velocity, height) self.marker = Circle(Point(0,height), 3) self.marker.setFill("red") self.marker.setOutline("red") self.marker.draw(win) def update(self, dt): """ Move the shot dt seconds farther along its flight """ self.proj.update(dt) center = self.marker.getCenter() dx = self.proj.getX() - center.getX() dy = self.proj.getY() - center.getY() self.marker.move(dx,dy) def getX(self): """ return the current x coordinate of the shot's center """ return self.proj.getX() def getY(self): """ return the current y coordinate of the shot's center """ return self.proj.getY() def undraw(self): """ undraw the shot """ self.marker.undraw() class ProjectileApp: def __init__(self): self.win = GraphWin("Projectile Animation", 640, 480) self.win.setCoords(-10, -10, 210, 155) Line(Point(-10,0), Point(210,0)).draw(self.win) for x in range(0, 210, 50): Text(Point(x,-7), str(x)).draw(self.win) Line(Point(x,0), Point(x,2)).draw(self.win) self.launcher = Launcher(self.win) self.shots = [] def updateShots(self, dt): alive = [] for shot in self.shots: shot.update(dt) if shot.getY() >= 0 and shot.getX() < 210: alive.append(shot) else: shot.undraw() self.shots = alive def run(self): # main event/animation lopp while True: self.updateShots(1/30) key = self.win.checkKey() if key in ["q", "Q"]: break if key == "Up": self.launcher.adjAngle(5) elif key == "Down": self.launcher.adjAngle(-5) elif key == "Right": self.launcher.adjVel(5) elif key == "Left": self.launcher.adjVel(-5) elif key == "f": self.shots.append(self.launcher.fire()) update(30) self.win.close() if __name__ == "__main__": ProjectileApp().run()
normal
{ "blob_id": "09aedd6cab0b8c6a05bbee5b336fcd38aea1f7b9", "index": 3202, "step-1": "<mask token>\n\n\nclass Launcher:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass ShotTracker:\n \"\"\" Graphical depiction of a projectile flight using a Circle \"\"\"\n\n def __init__(self, win, angle, velocity, height):\n \"\"\"win is the GraphWin to display the shot, angle, velocity, and\n height are initial projectile parameters.\n \"\"\"\n self.proj = Projectile(angle, velocity, height)\n self.marker = Circle(Point(0, height), 3)\n self.marker.setFill('red')\n self.marker.setOutline('red')\n self.marker.draw(win)\n\n def update(self, dt):\n \"\"\" Move the shot dt seconds farther along its flight \"\"\"\n self.proj.update(dt)\n center = self.marker.getCenter()\n dx = self.proj.getX() - center.getX()\n dy = self.proj.getY() - center.getY()\n self.marker.move(dx, dy)\n\n def getX(self):\n \"\"\" return the current x coordinate of the shot's center \"\"\"\n return self.proj.getX()\n\n def getY(self):\n \"\"\" return the current y coordinate of the shot's center \"\"\"\n return self.proj.getY()\n\n def undraw(self):\n \"\"\" undraw the shot \"\"\"\n self.marker.undraw()\n\n\nclass ProjectileApp:\n\n def __init__(self):\n self.win = GraphWin('Projectile Animation', 640, 480)\n self.win.setCoords(-10, -10, 210, 155)\n Line(Point(-10, 0), Point(210, 0)).draw(self.win)\n for x in range(0, 210, 50):\n Text(Point(x, -7), str(x)).draw(self.win)\n Line(Point(x, 0), Point(x, 2)).draw(self.win)\n self.launcher = Launcher(self.win)\n self.shots = []\n\n def updateShots(self, dt):\n alive = []\n for shot in self.shots:\n shot.update(dt)\n if shot.getY() >= 0 and shot.getX() < 210:\n alive.append(shot)\n else:\n shot.undraw()\n self.shots = alive\n\n def run(self):\n while True:\n self.updateShots(1 / 30)\n key = self.win.checkKey()\n if key in ['q', 'Q']:\n break\n if key == 'Up':\n self.launcher.adjAngle(5)\n elif key == 'Down':\n self.launcher.adjAngle(-5)\n elif key == 'Right':\n self.launcher.adjVel(5)\n elif key == 'Left':\n self.launcher.adjVel(-5)\n elif key == 'f':\n self.shots.append(self.launcher.fire())\n update(30)\n self.win.close()\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass Launcher:\n\n def __init__(self, win):\n \"\"\"Create inital launcher with angle 45 degrees and velocity 40\n win is the GraphWin to draw the launcher in.\n \"\"\"\n base = Circle(Point(0, 0), 3)\n base.setFill('red')\n base.setOutline('red')\n base.draw(win)\n self.win = win\n self.angle = radians(45.0)\n self.vel = 40.0\n self.arrow = Line(Point(0, 0), Point(0, 0)).draw(win)\n self.redraw()\n <mask token>\n <mask token>\n\n def adjVel(self, amt):\n \"\"\" change velocity by amt\"\"\"\n self.vel = self.vel + amt\n self.redraw()\n <mask token>\n\n\nclass ShotTracker:\n \"\"\" Graphical depiction of a projectile flight using a Circle \"\"\"\n\n def __init__(self, win, angle, velocity, height):\n \"\"\"win is the GraphWin to display the shot, angle, velocity, and\n height are initial projectile parameters.\n \"\"\"\n self.proj = Projectile(angle, velocity, height)\n self.marker = Circle(Point(0, height), 3)\n self.marker.setFill('red')\n self.marker.setOutline('red')\n self.marker.draw(win)\n\n def update(self, dt):\n \"\"\" Move the shot dt seconds farther along its flight \"\"\"\n self.proj.update(dt)\n center = self.marker.getCenter()\n dx = self.proj.getX() - center.getX()\n dy = self.proj.getY() - center.getY()\n self.marker.move(dx, dy)\n\n def getX(self):\n \"\"\" return the current x coordinate of the shot's center \"\"\"\n return self.proj.getX()\n\n def getY(self):\n \"\"\" return the current y coordinate of the shot's center \"\"\"\n return self.proj.getY()\n\n def undraw(self):\n \"\"\" undraw the shot \"\"\"\n self.marker.undraw()\n\n\nclass ProjectileApp:\n\n def __init__(self):\n self.win = GraphWin('Projectile Animation', 640, 480)\n self.win.setCoords(-10, -10, 210, 155)\n Line(Point(-10, 0), Point(210, 0)).draw(self.win)\n for x in range(0, 210, 50):\n Text(Point(x, -7), str(x)).draw(self.win)\n Line(Point(x, 0), Point(x, 2)).draw(self.win)\n self.launcher = Launcher(self.win)\n self.shots = []\n\n def updateShots(self, dt):\n alive = []\n for shot in self.shots:\n shot.update(dt)\n if shot.getY() >= 0 and shot.getX() < 210:\n alive.append(shot)\n else:\n shot.undraw()\n self.shots = alive\n\n def run(self):\n while True:\n self.updateShots(1 / 30)\n key = self.win.checkKey()\n if key in ['q', 'Q']:\n break\n if key == 'Up':\n self.launcher.adjAngle(5)\n elif key == 'Down':\n self.launcher.adjAngle(-5)\n elif key == 'Right':\n self.launcher.adjVel(5)\n elif key == 'Left':\n self.launcher.adjVel(-5)\n elif key == 'f':\n self.shots.append(self.launcher.fire())\n update(30)\n self.win.close()\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\nclass Launcher:\n\n def __init__(self, win):\n \"\"\"Create inital launcher with angle 45 degrees and velocity 40\n win is the GraphWin to draw the launcher in.\n \"\"\"\n base = Circle(Point(0, 0), 3)\n base.setFill('red')\n base.setOutline('red')\n base.draw(win)\n self.win = win\n self.angle = radians(45.0)\n self.vel = 40.0\n self.arrow = Line(Point(0, 0), Point(0, 0)).draw(win)\n self.redraw()\n <mask token>\n <mask token>\n\n def adjVel(self, amt):\n \"\"\" change velocity by amt\"\"\"\n self.vel = self.vel + amt\n self.redraw()\n\n def fire(self):\n return ShotTracker(self.win, degrees(self.angle), self.vel, 0.0)\n\n\nclass ShotTracker:\n \"\"\" Graphical depiction of a projectile flight using a Circle \"\"\"\n\n def __init__(self, win, angle, velocity, height):\n \"\"\"win is the GraphWin to display the shot, angle, velocity, and\n height are initial projectile parameters.\n \"\"\"\n self.proj = Projectile(angle, velocity, height)\n self.marker = Circle(Point(0, height), 3)\n self.marker.setFill('red')\n self.marker.setOutline('red')\n self.marker.draw(win)\n\n def update(self, dt):\n \"\"\" Move the shot dt seconds farther along its flight \"\"\"\n self.proj.update(dt)\n center = self.marker.getCenter()\n dx = self.proj.getX() - center.getX()\n dy = self.proj.getY() - center.getY()\n self.marker.move(dx, dy)\n\n def getX(self):\n \"\"\" return the current x coordinate of the shot's center \"\"\"\n return self.proj.getX()\n\n def getY(self):\n \"\"\" return the current y coordinate of the shot's center \"\"\"\n return self.proj.getY()\n\n def undraw(self):\n \"\"\" undraw the shot \"\"\"\n self.marker.undraw()\n\n\nclass ProjectileApp:\n\n def __init__(self):\n self.win = GraphWin('Projectile Animation', 640, 480)\n self.win.setCoords(-10, -10, 210, 155)\n Line(Point(-10, 0), Point(210, 0)).draw(self.win)\n for x in range(0, 210, 50):\n Text(Point(x, -7), str(x)).draw(self.win)\n Line(Point(x, 0), Point(x, 2)).draw(self.win)\n self.launcher = Launcher(self.win)\n self.shots = []\n\n def updateShots(self, dt):\n alive = []\n for shot in self.shots:\n shot.update(dt)\n if shot.getY() >= 0 and shot.getX() < 210:\n alive.append(shot)\n else:\n shot.undraw()\n self.shots = alive\n\n def run(self):\n while True:\n self.updateShots(1 / 30)\n key = self.win.checkKey()\n if key in ['q', 'Q']:\n break\n if key == 'Up':\n self.launcher.adjAngle(5)\n elif key == 'Down':\n self.launcher.adjAngle(-5)\n elif key == 'Right':\n self.launcher.adjVel(5)\n elif key == 'Left':\n self.launcher.adjVel(-5)\n elif key == 'f':\n self.shots.append(self.launcher.fire())\n update(30)\n self.win.close()\n\n\n<mask token>\n", "step-4": "<mask token>\n\n\nclass Launcher:\n\n def __init__(self, win):\n \"\"\"Create inital launcher with angle 45 degrees and velocity 40\n win is the GraphWin to draw the launcher in.\n \"\"\"\n base = Circle(Point(0, 0), 3)\n base.setFill('red')\n base.setOutline('red')\n base.draw(win)\n self.win = win\n self.angle = radians(45.0)\n self.vel = 40.0\n self.arrow = Line(Point(0, 0), Point(0, 0)).draw(win)\n self.redraw()\n\n def redraw(self):\n \"\"\"undraw the arrow and draw a new one for the\n current values of angle and velocity.\n \"\"\"\n self.arrow.undraw()\n pt2 = Point(self.vel * cos(self.angle), self.vel * sin(self.angle))\n self.arrow = Line(Point(0, 0), pt2).draw(self.win)\n self.arrow.setArrow('last')\n self.arrow.setWidth(3)\n\n def adjAngle(self, amt):\n \"\"\" change angle by amt degrees \"\"\"\n self.angle = self.angle + radians(amt)\n self.redraw()\n\n def adjVel(self, amt):\n \"\"\" change velocity by amt\"\"\"\n self.vel = self.vel + amt\n self.redraw()\n\n def fire(self):\n return ShotTracker(self.win, degrees(self.angle), self.vel, 0.0)\n\n\nclass ShotTracker:\n \"\"\" Graphical depiction of a projectile flight using a Circle \"\"\"\n\n def __init__(self, win, angle, velocity, height):\n \"\"\"win is the GraphWin to display the shot, angle, velocity, and\n height are initial projectile parameters.\n \"\"\"\n self.proj = Projectile(angle, velocity, height)\n self.marker = Circle(Point(0, height), 3)\n self.marker.setFill('red')\n self.marker.setOutline('red')\n self.marker.draw(win)\n\n def update(self, dt):\n \"\"\" Move the shot dt seconds farther along its flight \"\"\"\n self.proj.update(dt)\n center = self.marker.getCenter()\n dx = self.proj.getX() - center.getX()\n dy = self.proj.getY() - center.getY()\n self.marker.move(dx, dy)\n\n def getX(self):\n \"\"\" return the current x coordinate of the shot's center \"\"\"\n return self.proj.getX()\n\n def getY(self):\n \"\"\" return the current y coordinate of the shot's center \"\"\"\n return self.proj.getY()\n\n def undraw(self):\n \"\"\" undraw the shot \"\"\"\n self.marker.undraw()\n\n\nclass ProjectileApp:\n\n def __init__(self):\n self.win = GraphWin('Projectile Animation', 640, 480)\n self.win.setCoords(-10, -10, 210, 155)\n Line(Point(-10, 0), Point(210, 0)).draw(self.win)\n for x in range(0, 210, 50):\n Text(Point(x, -7), str(x)).draw(self.win)\n Line(Point(x, 0), Point(x, 2)).draw(self.win)\n self.launcher = Launcher(self.win)\n self.shots = []\n\n def updateShots(self, dt):\n alive = []\n for shot in self.shots:\n shot.update(dt)\n if shot.getY() >= 0 and shot.getX() < 210:\n alive.append(shot)\n else:\n shot.undraw()\n self.shots = alive\n\n def run(self):\n while True:\n self.updateShots(1 / 30)\n key = self.win.checkKey()\n if key in ['q', 'Q']:\n break\n if key == 'Up':\n self.launcher.adjAngle(5)\n elif key == 'Down':\n self.launcher.adjAngle(-5)\n elif key == 'Right':\n self.launcher.adjVel(5)\n elif key == 'Left':\n self.launcher.adjVel(-5)\n elif key == 'f':\n self.shots.append(self.launcher.fire())\n update(30)\n self.win.close()\n\n\n<mask token>\n", "step-5": "# animation2.py\n\n# multiple-shot cannonball animation\n\nfrom math import sqrt, sin, cos, radians, degrees\nfrom graphics import *\nfrom projectile import Projectile\nfrom button import Button\n\nclass Launcher:\n\n def __init__(self, win):\n \"\"\"Create inital launcher with angle 45 degrees and velocity 40\n win is the GraphWin to draw the launcher in.\n \"\"\"\n \n # draw the base shot of the launcher\n base = Circle(Point(0,0), 3)\n base.setFill(\"red\")\n base.setOutline(\"red\")\n base.draw(win)\n\n # save the window and create initial angle and velocity\n self.win = win\n self.angle = radians(45.0)\n self.vel = 40.0\n \n # create inital \"dummy\" arrow\n self.arrow = Line(Point(0,0), Point(0,0)).draw(win)\n # replace it with the correct arrow\n self.redraw()\n\n \n def redraw(self):\n \"\"\"undraw the arrow and draw a new one for the\n current values of angle and velocity.\n \"\"\"\n \n self.arrow.undraw()\n pt2 = Point(self.vel*cos(self.angle), self.vel*sin(self.angle))\n self.arrow = Line(Point(0,0), pt2).draw(self.win)\n self.arrow.setArrow(\"last\")\n self.arrow.setWidth(3)\n\n \n def adjAngle(self, amt):\n \"\"\" change angle by amt degrees \"\"\"\n \n self.angle = self.angle+radians(amt)\n self.redraw()\n\n \n def adjVel(self, amt):\n \"\"\" change velocity by amt\"\"\"\n \n self.vel = self.vel + amt\n self.redraw()\n\n def fire(self):\n return ShotTracker(self.win, degrees(self.angle), self.vel, 0.0)\n \n\nclass ShotTracker:\n\n \"\"\" Graphical depiction of a projectile flight using a Circle \"\"\"\n\n def __init__(self, win, angle, velocity, height):\n \"\"\"win is the GraphWin to display the shot, angle, velocity, and\n height are initial projectile parameters.\n \"\"\"\n \n self.proj = Projectile(angle, velocity, height)\n self.marker = Circle(Point(0,height), 3)\n self.marker.setFill(\"red\")\n self.marker.setOutline(\"red\")\n self.marker.draw(win)\n\n \n def update(self, dt):\n \"\"\" Move the shot dt seconds farther along its flight \"\"\"\n \n self.proj.update(dt)\n center = self.marker.getCenter()\n dx = self.proj.getX() - center.getX()\n dy = self.proj.getY() - center.getY()\n self.marker.move(dx,dy)\n\n \n def getX(self):\n \"\"\" return the current x coordinate of the shot's center \"\"\"\n return self.proj.getX()\n\n def getY(self):\n \"\"\" return the current y coordinate of the shot's center \"\"\"\n return self.proj.getY()\n\n def undraw(self):\n \"\"\" undraw the shot \"\"\"\n self.marker.undraw()\n\n\nclass ProjectileApp:\n\n def __init__(self):\n self.win = GraphWin(\"Projectile Animation\", 640, 480)\n self.win.setCoords(-10, -10, 210, 155)\n Line(Point(-10,0), Point(210,0)).draw(self.win)\n for x in range(0, 210, 50):\n Text(Point(x,-7), str(x)).draw(self.win)\n Line(Point(x,0), Point(x,2)).draw(self.win)\n\n self.launcher = Launcher(self.win)\n self.shots = []\n\n def updateShots(self, dt):\n alive = []\n for shot in self.shots:\n shot.update(dt)\n if shot.getY() >= 0 and shot.getX() < 210:\n alive.append(shot)\n else:\n shot.undraw()\n self.shots = alive\n\n def run(self):\n \n # main event/animation lopp\n while True:\n self.updateShots(1/30)\n \n key = self.win.checkKey()\n if key in [\"q\", \"Q\"]:\n break\n\n if key == \"Up\":\n self.launcher.adjAngle(5)\n elif key == \"Down\":\n self.launcher.adjAngle(-5)\n elif key == \"Right\":\n self.launcher.adjVel(5)\n elif key == \"Left\":\n self.launcher.adjVel(-5)\n elif key == \"f\":\n self.shots.append(self.launcher.fire())\n \n update(30)\n \n self.win.close()\n \n\nif __name__ == \"__main__\":\n ProjectileApp().run()\n", "step-ids": [ 12, 14, 15, 17, 20 ] }
[ 12, 14, 15, 17, 20 ]
<|reserved_special_token_0|> def callback(data): global first_a global first_d global oldvar global base_throttle global peak_throttle global base_brake global peak_brake global button axis1 = -data.axes[1] axis3 = -data.axes[3] button1 = data.buttons[1] button4 = data.buttons[4] button5 = data.buttons[5] button_ = button1 + button4 + button5 if axis1 > 0.1: bval = int(axis1 * (peak_brake - base_brake) + base_brake) print(bval) ser.write(str(bval).encode('utf-8')) ser.write('a'.encode('utf-8')) print('Brake') elif axis1 < -0.1 and axis3 < 0.1: tval = int((axis1 * -1 + axis3 * -1) * (peak_throttle - base_throttle) * 0.5 + base_throttle) if abs(tval - oldvar) > 5: ser.write(str(tval).encode('utf-8')) ser.write('a'.encode('utf-8')) ser.write('450a'.encode('utf-8')) print('Throttle') oldvar = tval elif axis1 > -0.1 and axis1 < 0.1: ser.write('4000a'.encode('utf-8')) ser.write('450a'.encode('utf-8')) print('Zero Throttle') print(axis1) print(axis3) if button1 == 1: print('Emergency Brake') ser.write('4600a'.encode('utf-8')) ser.write('600a'.encode('utf-8')) if button4 and button5 == 0: if first_a == 0: ser.write('1000a'.encode('utf-8')) print('Joystick button 4 pressed.') first_a = 1 if button5 and button4 == 0: if first_d == 0: ser.write('2000a'.encode('utf-8')) print('Joystick button 5 pressed.') first_d = 1 if button - button_ != 0: if button4 == 0: first_a = 0 if button5 == 0: first_d = 0 ser.write('3000a'.encode('utf-8')) print('Joystick button released.') button = button_ def start(): rospy.Subscriber('joy', Joy, callback) rospy.init_node('Joy2Turtle') rospy.spin() <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> if platform == 'linux' or platform == 'linux2': ser = serial.Serial('/dev/ttyACM0') elif platform == 'darwin': pass elif platform == 'win32': ser = serial.Serial('COM16') <|reserved_special_token_0|> def callback(data): global first_a global first_d global oldvar global base_throttle global peak_throttle global base_brake global peak_brake global button axis1 = -data.axes[1] axis3 = -data.axes[3] button1 = data.buttons[1] button4 = data.buttons[4] button5 = data.buttons[5] button_ = button1 + button4 + button5 if axis1 > 0.1: bval = int(axis1 * (peak_brake - base_brake) + base_brake) print(bval) ser.write(str(bval).encode('utf-8')) ser.write('a'.encode('utf-8')) print('Brake') elif axis1 < -0.1 and axis3 < 0.1: tval = int((axis1 * -1 + axis3 * -1) * (peak_throttle - base_throttle) * 0.5 + base_throttle) if abs(tval - oldvar) > 5: ser.write(str(tval).encode('utf-8')) ser.write('a'.encode('utf-8')) ser.write('450a'.encode('utf-8')) print('Throttle') oldvar = tval elif axis1 > -0.1 and axis1 < 0.1: ser.write('4000a'.encode('utf-8')) ser.write('450a'.encode('utf-8')) print('Zero Throttle') print(axis1) print(axis3) if button1 == 1: print('Emergency Brake') ser.write('4600a'.encode('utf-8')) ser.write('600a'.encode('utf-8')) if button4 and button5 == 0: if first_a == 0: ser.write('1000a'.encode('utf-8')) print('Joystick button 4 pressed.') first_a = 1 if button5 and button4 == 0: if first_d == 0: ser.write('2000a'.encode('utf-8')) print('Joystick button 5 pressed.') first_d = 1 if button - button_ != 0: if button4 == 0: first_a = 0 if button5 == 0: first_d = 0 ser.write('3000a'.encode('utf-8')) print('Joystick button released.') button = button_ def start(): rospy.Subscriber('joy', Joy, callback) rospy.init_node('Joy2Turtle') rospy.spin() if __name__ == '__main__': start() <|reserved_special_token_1|> <|reserved_special_token_0|> if platform == 'linux' or platform == 'linux2': ser = serial.Serial('/dev/ttyACM0') elif platform == 'darwin': pass elif platform == 'win32': ser = serial.Serial('COM16') <|reserved_special_token_0|> oldvar = 0 first_a = 0 first_d = 0 base_throttle = 5500 peak_throttle = 6500 base_brake = 450 peak_brake = 600 button = 0 def callback(data): global first_a global first_d global oldvar global base_throttle global peak_throttle global base_brake global peak_brake global button axis1 = -data.axes[1] axis3 = -data.axes[3] button1 = data.buttons[1] button4 = data.buttons[4] button5 = data.buttons[5] button_ = button1 + button4 + button5 if axis1 > 0.1: bval = int(axis1 * (peak_brake - base_brake) + base_brake) print(bval) ser.write(str(bval).encode('utf-8')) ser.write('a'.encode('utf-8')) print('Brake') elif axis1 < -0.1 and axis3 < 0.1: tval = int((axis1 * -1 + axis3 * -1) * (peak_throttle - base_throttle) * 0.5 + base_throttle) if abs(tval - oldvar) > 5: ser.write(str(tval).encode('utf-8')) ser.write('a'.encode('utf-8')) ser.write('450a'.encode('utf-8')) print('Throttle') oldvar = tval elif axis1 > -0.1 and axis1 < 0.1: ser.write('4000a'.encode('utf-8')) ser.write('450a'.encode('utf-8')) print('Zero Throttle') print(axis1) print(axis3) if button1 == 1: print('Emergency Brake') ser.write('4600a'.encode('utf-8')) ser.write('600a'.encode('utf-8')) if button4 and button5 == 0: if first_a == 0: ser.write('1000a'.encode('utf-8')) print('Joystick button 4 pressed.') first_a = 1 if button5 and button4 == 0: if first_d == 0: ser.write('2000a'.encode('utf-8')) print('Joystick button 5 pressed.') first_d = 1 if button - button_ != 0: if button4 == 0: first_a = 0 if button5 == 0: first_d = 0 ser.write('3000a'.encode('utf-8')) print('Joystick button released.') button = button_ def start(): rospy.Subscriber('joy', Joy, callback) rospy.init_node('Joy2Turtle') rospy.spin() if __name__ == '__main__': start() <|reserved_special_token_1|> import rospy from geometry_msgs.msg import Twist from sensor_msgs.msg import Joy import serial from sys import platform if platform == 'linux' or platform == 'linux2': ser = serial.Serial('/dev/ttyACM0') elif platform == 'darwin': pass elif platform == 'win32': ser = serial.Serial('COM16') <|reserved_special_token_0|> oldvar = 0 first_a = 0 first_d = 0 base_throttle = 5500 peak_throttle = 6500 base_brake = 450 peak_brake = 600 button = 0 def callback(data): global first_a global first_d global oldvar global base_throttle global peak_throttle global base_brake global peak_brake global button axis1 = -data.axes[1] axis3 = -data.axes[3] button1 = data.buttons[1] button4 = data.buttons[4] button5 = data.buttons[5] button_ = button1 + button4 + button5 if axis1 > 0.1: bval = int(axis1 * (peak_brake - base_brake) + base_brake) print(bval) ser.write(str(bval).encode('utf-8')) ser.write('a'.encode('utf-8')) print('Brake') elif axis1 < -0.1 and axis3 < 0.1: tval = int((axis1 * -1 + axis3 * -1) * (peak_throttle - base_throttle) * 0.5 + base_throttle) if abs(tval - oldvar) > 5: ser.write(str(tval).encode('utf-8')) ser.write('a'.encode('utf-8')) ser.write('450a'.encode('utf-8')) print('Throttle') oldvar = tval elif axis1 > -0.1 and axis1 < 0.1: ser.write('4000a'.encode('utf-8')) ser.write('450a'.encode('utf-8')) print('Zero Throttle') print(axis1) print(axis3) if button1 == 1: print('Emergency Brake') ser.write('4600a'.encode('utf-8')) ser.write('600a'.encode('utf-8')) if button4 and button5 == 0: if first_a == 0: ser.write('1000a'.encode('utf-8')) print('Joystick button 4 pressed.') first_a = 1 if button5 and button4 == 0: if first_d == 0: ser.write('2000a'.encode('utf-8')) print('Joystick button 5 pressed.') first_d = 1 if button - button_ != 0: if button4 == 0: first_a = 0 if button5 == 0: first_d = 0 ser.write('3000a'.encode('utf-8')) print('Joystick button released.') button = button_ def start(): rospy.Subscriber('joy', Joy, callback) rospy.init_node('Joy2Turtle') rospy.spin() if __name__ == '__main__': start() <|reserved_special_token_1|> #!/usr/bin/env python import rospy from geometry_msgs.msg import Twist from sensor_msgs.msg import Joy import serial from sys import platform if platform == "linux" or platform == "linux2": ser = serial.Serial('/dev/ttyACM0') elif platform == "darwin": pass elif platform == "win32": # Windows... ser = serial.Serial('COM16') """ In this test code we are testing basic vehicle control over the network we use ROS middleware to send the control commands This script runs at the remote driver end. Receives joystick messages (subscribed to Joy topic) then converts the joystick inputs into commands WE ARE NOT USING THIS METHOD NOW --- WE HAVE SEPERATED OUT ALL THE STREAMS FROM THE JOYSTICK """ oldvar = 0 first_a = 0 first_d = 0 # Configuatrion tuned for CAR in LOW speed base_throttle = 5500 peak_throttle = 6500 base_brake = 450 peak_brake = 600 button = 0 def callback(data): global first_a global first_d global oldvar global base_throttle global peak_throttle global base_brake global peak_brake global button # print data axis1 = -data.axes[1] axis3 = -data.axes[3] # in logitech axis 3 is axis 4 confirm with ashish button1 = data.buttons[1] button4 = data.buttons[4] button5 = data.buttons[5] button_ = button1+button4+button5 if axis1 > 0.1: bval = int((axis1) * (peak_brake - base_brake) + base_brake) print(bval) ser.write(str(bval).encode('utf-8')) ser.write("a".encode('utf-8')) #### ser.write("4000a".encode('utf-8')) #throttle released on braking print("Brake") elif (axis1 < -0.1 and axis3 < 0.1): tval = int((axis1 * -1 + axis3 * -1) * (peak_throttle - base_throttle) * 0.5 + base_throttle) if (abs(tval - oldvar) > 5): #print(tval) ser.write(str(tval).encode('utf-8')) ser.write("a".encode('utf-8')) ser.write("450a".encode('utf-8')) # brake released on acceleration print("Throttle") oldvar = tval elif (axis1 > -0.1 and axis1 < 0.1): ser.write("4000a".encode('utf-8')) ser.write("450a".encode('utf-8')) # brake released print("Zero Throttle") print (axis1) print (axis3) if button1 == 1: print("Emergency Brake") ser.write("4600a".encode('utf-8')) # throttle released ser.write("600a".encode('utf-8')) # brake engaged if (button4 and button5 == 0): if (first_a == 0): ser.write("1000a".encode('utf-8')) print("Joystick button 4 pressed.") first_a = 1 if (button5 and button4 == 0): if (first_d == 0): ser.write("2000a".encode('utf-8')) print("Joystick button 5 pressed.") first_d = 1 if(button-button_!= 0): if(button4 == 0): first_a = 0 if(button5 == 0): first_d = 0 ser.write("3000a".encode('utf-8')) print("Joystick button released.") button = button_ # Intializes everything def start(): rospy.Subscriber("joy", Joy, callback) # starts the node rospy.init_node('Joy2Turtle') rospy.spin() if __name__ == '__main__': start()
flexible
{ "blob_id": "14a357f3dfb3d59f1d8cfd566edeaf8b0e5bb56d", "index": 374, "step-1": "<mask token>\n\n\ndef callback(data):\n global first_a\n global first_d\n global oldvar\n global base_throttle\n global peak_throttle\n global base_brake\n global peak_brake\n global button\n axis1 = -data.axes[1]\n axis3 = -data.axes[3]\n button1 = data.buttons[1]\n button4 = data.buttons[4]\n button5 = data.buttons[5]\n button_ = button1 + button4 + button5\n if axis1 > 0.1:\n bval = int(axis1 * (peak_brake - base_brake) + base_brake)\n print(bval)\n ser.write(str(bval).encode('utf-8'))\n ser.write('a'.encode('utf-8'))\n print('Brake')\n elif axis1 < -0.1 and axis3 < 0.1:\n tval = int((axis1 * -1 + axis3 * -1) * (peak_throttle -\n base_throttle) * 0.5 + base_throttle)\n if abs(tval - oldvar) > 5:\n ser.write(str(tval).encode('utf-8'))\n ser.write('a'.encode('utf-8'))\n ser.write('450a'.encode('utf-8'))\n print('Throttle')\n oldvar = tval\n elif axis1 > -0.1 and axis1 < 0.1:\n ser.write('4000a'.encode('utf-8'))\n ser.write('450a'.encode('utf-8'))\n print('Zero Throttle')\n print(axis1)\n print(axis3)\n if button1 == 1:\n print('Emergency Brake')\n ser.write('4600a'.encode('utf-8'))\n ser.write('600a'.encode('utf-8'))\n if button4 and button5 == 0:\n if first_a == 0:\n ser.write('1000a'.encode('utf-8'))\n print('Joystick button 4 pressed.')\n first_a = 1\n if button5 and button4 == 0:\n if first_d == 0:\n ser.write('2000a'.encode('utf-8'))\n print('Joystick button 5 pressed.')\n first_d = 1\n if button - button_ != 0:\n if button4 == 0:\n first_a = 0\n if button5 == 0:\n first_d = 0\n ser.write('3000a'.encode('utf-8'))\n print('Joystick button released.')\n button = button_\n\n\ndef start():\n rospy.Subscriber('joy', Joy, callback)\n rospy.init_node('Joy2Turtle')\n rospy.spin()\n\n\n<mask token>\n", "step-2": "<mask token>\nif platform == 'linux' or platform == 'linux2':\n ser = serial.Serial('/dev/ttyACM0')\nelif platform == 'darwin':\n pass\nelif platform == 'win32':\n ser = serial.Serial('COM16')\n<mask token>\n\n\ndef callback(data):\n global first_a\n global first_d\n global oldvar\n global base_throttle\n global peak_throttle\n global base_brake\n global peak_brake\n global button\n axis1 = -data.axes[1]\n axis3 = -data.axes[3]\n button1 = data.buttons[1]\n button4 = data.buttons[4]\n button5 = data.buttons[5]\n button_ = button1 + button4 + button5\n if axis1 > 0.1:\n bval = int(axis1 * (peak_brake - base_brake) + base_brake)\n print(bval)\n ser.write(str(bval).encode('utf-8'))\n ser.write('a'.encode('utf-8'))\n print('Brake')\n elif axis1 < -0.1 and axis3 < 0.1:\n tval = int((axis1 * -1 + axis3 * -1) * (peak_throttle -\n base_throttle) * 0.5 + base_throttle)\n if abs(tval - oldvar) > 5:\n ser.write(str(tval).encode('utf-8'))\n ser.write('a'.encode('utf-8'))\n ser.write('450a'.encode('utf-8'))\n print('Throttle')\n oldvar = tval\n elif axis1 > -0.1 and axis1 < 0.1:\n ser.write('4000a'.encode('utf-8'))\n ser.write('450a'.encode('utf-8'))\n print('Zero Throttle')\n print(axis1)\n print(axis3)\n if button1 == 1:\n print('Emergency Brake')\n ser.write('4600a'.encode('utf-8'))\n ser.write('600a'.encode('utf-8'))\n if button4 and button5 == 0:\n if first_a == 0:\n ser.write('1000a'.encode('utf-8'))\n print('Joystick button 4 pressed.')\n first_a = 1\n if button5 and button4 == 0:\n if first_d == 0:\n ser.write('2000a'.encode('utf-8'))\n print('Joystick button 5 pressed.')\n first_d = 1\n if button - button_ != 0:\n if button4 == 0:\n first_a = 0\n if button5 == 0:\n first_d = 0\n ser.write('3000a'.encode('utf-8'))\n print('Joystick button released.')\n button = button_\n\n\ndef start():\n rospy.Subscriber('joy', Joy, callback)\n rospy.init_node('Joy2Turtle')\n rospy.spin()\n\n\nif __name__ == '__main__':\n start()\n", "step-3": "<mask token>\nif platform == 'linux' or platform == 'linux2':\n ser = serial.Serial('/dev/ttyACM0')\nelif platform == 'darwin':\n pass\nelif platform == 'win32':\n ser = serial.Serial('COM16')\n<mask token>\noldvar = 0\nfirst_a = 0\nfirst_d = 0\nbase_throttle = 5500\npeak_throttle = 6500\nbase_brake = 450\npeak_brake = 600\nbutton = 0\n\n\ndef callback(data):\n global first_a\n global first_d\n global oldvar\n global base_throttle\n global peak_throttle\n global base_brake\n global peak_brake\n global button\n axis1 = -data.axes[1]\n axis3 = -data.axes[3]\n button1 = data.buttons[1]\n button4 = data.buttons[4]\n button5 = data.buttons[5]\n button_ = button1 + button4 + button5\n if axis1 > 0.1:\n bval = int(axis1 * (peak_brake - base_brake) + base_brake)\n print(bval)\n ser.write(str(bval).encode('utf-8'))\n ser.write('a'.encode('utf-8'))\n print('Brake')\n elif axis1 < -0.1 and axis3 < 0.1:\n tval = int((axis1 * -1 + axis3 * -1) * (peak_throttle -\n base_throttle) * 0.5 + base_throttle)\n if abs(tval - oldvar) > 5:\n ser.write(str(tval).encode('utf-8'))\n ser.write('a'.encode('utf-8'))\n ser.write('450a'.encode('utf-8'))\n print('Throttle')\n oldvar = tval\n elif axis1 > -0.1 and axis1 < 0.1:\n ser.write('4000a'.encode('utf-8'))\n ser.write('450a'.encode('utf-8'))\n print('Zero Throttle')\n print(axis1)\n print(axis3)\n if button1 == 1:\n print('Emergency Brake')\n ser.write('4600a'.encode('utf-8'))\n ser.write('600a'.encode('utf-8'))\n if button4 and button5 == 0:\n if first_a == 0:\n ser.write('1000a'.encode('utf-8'))\n print('Joystick button 4 pressed.')\n first_a = 1\n if button5 and button4 == 0:\n if first_d == 0:\n ser.write('2000a'.encode('utf-8'))\n print('Joystick button 5 pressed.')\n first_d = 1\n if button - button_ != 0:\n if button4 == 0:\n first_a = 0\n if button5 == 0:\n first_d = 0\n ser.write('3000a'.encode('utf-8'))\n print('Joystick button released.')\n button = button_\n\n\ndef start():\n rospy.Subscriber('joy', Joy, callback)\n rospy.init_node('Joy2Turtle')\n rospy.spin()\n\n\nif __name__ == '__main__':\n start()\n", "step-4": "import rospy\nfrom geometry_msgs.msg import Twist\nfrom sensor_msgs.msg import Joy\nimport serial\nfrom sys import platform\nif platform == 'linux' or platform == 'linux2':\n ser = serial.Serial('/dev/ttyACM0')\nelif platform == 'darwin':\n pass\nelif platform == 'win32':\n ser = serial.Serial('COM16')\n<mask token>\noldvar = 0\nfirst_a = 0\nfirst_d = 0\nbase_throttle = 5500\npeak_throttle = 6500\nbase_brake = 450\npeak_brake = 600\nbutton = 0\n\n\ndef callback(data):\n global first_a\n global first_d\n global oldvar\n global base_throttle\n global peak_throttle\n global base_brake\n global peak_brake\n global button\n axis1 = -data.axes[1]\n axis3 = -data.axes[3]\n button1 = data.buttons[1]\n button4 = data.buttons[4]\n button5 = data.buttons[5]\n button_ = button1 + button4 + button5\n if axis1 > 0.1:\n bval = int(axis1 * (peak_brake - base_brake) + base_brake)\n print(bval)\n ser.write(str(bval).encode('utf-8'))\n ser.write('a'.encode('utf-8'))\n print('Brake')\n elif axis1 < -0.1 and axis3 < 0.1:\n tval = int((axis1 * -1 + axis3 * -1) * (peak_throttle -\n base_throttle) * 0.5 + base_throttle)\n if abs(tval - oldvar) > 5:\n ser.write(str(tval).encode('utf-8'))\n ser.write('a'.encode('utf-8'))\n ser.write('450a'.encode('utf-8'))\n print('Throttle')\n oldvar = tval\n elif axis1 > -0.1 and axis1 < 0.1:\n ser.write('4000a'.encode('utf-8'))\n ser.write('450a'.encode('utf-8'))\n print('Zero Throttle')\n print(axis1)\n print(axis3)\n if button1 == 1:\n print('Emergency Brake')\n ser.write('4600a'.encode('utf-8'))\n ser.write('600a'.encode('utf-8'))\n if button4 and button5 == 0:\n if first_a == 0:\n ser.write('1000a'.encode('utf-8'))\n print('Joystick button 4 pressed.')\n first_a = 1\n if button5 and button4 == 0:\n if first_d == 0:\n ser.write('2000a'.encode('utf-8'))\n print('Joystick button 5 pressed.')\n first_d = 1\n if button - button_ != 0:\n if button4 == 0:\n first_a = 0\n if button5 == 0:\n first_d = 0\n ser.write('3000a'.encode('utf-8'))\n print('Joystick button released.')\n button = button_\n\n\ndef start():\n rospy.Subscriber('joy', Joy, callback)\n rospy.init_node('Joy2Turtle')\n rospy.spin()\n\n\nif __name__ == '__main__':\n start()\n", "step-5": "#!/usr/bin/env python\nimport rospy\nfrom geometry_msgs.msg import Twist\nfrom sensor_msgs.msg import Joy\n\nimport serial\nfrom sys import platform\nif platform == \"linux\" or platform == \"linux2\":\n ser = serial.Serial('/dev/ttyACM0')\nelif platform == \"darwin\":\n pass\nelif platform == \"win32\":\n # Windows...\n ser = serial.Serial('COM16')\n\"\"\"\nIn this test code we are testing basic vehicle control over the network\nwe use ROS middleware to send the control commands \nThis script runs at the remote driver end. \nReceives joystick messages (subscribed to Joy topic)\nthen converts the joystick inputs into commands\n\nWE ARE NOT USING THIS METHOD NOW \n--- WE HAVE SEPERATED OUT ALL THE STREAMS FROM THE JOYSTICK\n\n\"\"\"\n\noldvar = 0\nfirst_a = 0\nfirst_d = 0\n# Configuatrion tuned for CAR in LOW speed\nbase_throttle = 5500\npeak_throttle = 6500\nbase_brake = 450\npeak_brake = 600\nbutton = 0\n\n\ndef callback(data):\n global first_a\n global first_d\n global oldvar\n global base_throttle\n global peak_throttle\n global base_brake\n global peak_brake\n global button\n # print data\n axis1 = -data.axes[1]\n axis3 = -data.axes[3] # in logitech axis 3 is axis 4 confirm with ashish\n button1 = data.buttons[1]\n button4 = data.buttons[4]\n button5 = data.buttons[5]\n\n button_ = button1+button4+button5\n\n if axis1 > 0.1:\n bval = int((axis1) * (peak_brake - base_brake) + base_brake)\n print(bval)\n ser.write(str(bval).encode('utf-8'))\n ser.write(\"a\".encode('utf-8'))\n #### ser.write(\"4000a\".encode('utf-8')) #throttle released on braking\n print(\"Brake\")\n elif (axis1 < -0.1 and axis3 < 0.1):\n tval = int((axis1 * -1 + axis3 * -1) * (peak_throttle - base_throttle) * 0.5 + base_throttle)\n if (abs(tval - oldvar) > 5):\n #print(tval)\n ser.write(str(tval).encode('utf-8'))\n ser.write(\"a\".encode('utf-8'))\n ser.write(\"450a\".encode('utf-8')) # brake released on acceleration\n print(\"Throttle\")\n oldvar = tval\n elif (axis1 > -0.1 and axis1 < 0.1):\n ser.write(\"4000a\".encode('utf-8'))\n ser.write(\"450a\".encode('utf-8')) # brake released\n print(\"Zero Throttle\")\n print (axis1)\n print (axis3)\n\n if button1 == 1:\n print(\"Emergency Brake\")\n ser.write(\"4600a\".encode('utf-8')) # throttle released\n ser.write(\"600a\".encode('utf-8')) # brake engaged\n\n if (button4 and button5 == 0):\n if (first_a == 0):\n ser.write(\"1000a\".encode('utf-8'))\n print(\"Joystick button 4 pressed.\")\n first_a = 1\n if (button5 and button4 == 0):\n if (first_d == 0):\n ser.write(\"2000a\".encode('utf-8'))\n print(\"Joystick button 5 pressed.\")\n first_d = 1\n\n if(button-button_!= 0):\n if(button4 == 0):\n first_a = 0\n if(button5 == 0):\n first_d = 0\n ser.write(\"3000a\".encode('utf-8'))\n print(\"Joystick button released.\")\n button = button_\n\n# Intializes everything\ndef start():\n rospy.Subscriber(\"joy\", Joy, callback)\n # starts the node\n rospy.init_node('Joy2Turtle')\n rospy.spin()\n\n\nif __name__ == '__main__':\n start()\n", "step-ids": [ 2, 3, 4, 5, 6 ] }
[ 2, 3, 4, 5, 6 ]
# -*- coding: utf-8 -*- # Generated by Django 1.11.7 on 2017-12-13 02:06 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion import uuid class Migration(migrations.Migration): initial = True dependencies = [ ('stores', '0001_initial'), ] operations = [ migrations.CreateModel( name='Assistants', fields=[ ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), ('name_assistants', models.CharField(max_length=255)), ('phone_assistants', models.IntegerField()), ('email_assistants', models.EmailField(max_length=254)), ('address_assistants', models.TextField()), ('timestamp', models.DateField(auto_now=True)), ('fkstore', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='assistants', to='stores.Store')), ], ), ]
normal
{ "blob_id": "e95de58828c63dc8ae24efff314665a308f6ce0c", "index": 983, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n <mask token>\n", "step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = [('stores', '0001_initial')]\n operations = [migrations.CreateModel(name='Assistants', fields=[('id',\n models.UUIDField(default=uuid.uuid4, editable=False, primary_key=\n True, serialize=False)), ('name_assistants', models.CharField(\n max_length=255)), ('phone_assistants', models.IntegerField()), (\n 'email_assistants', models.EmailField(max_length=254)), (\n 'address_assistants', models.TextField()), ('timestamp', models.\n DateField(auto_now=True)), ('fkstore', models.ForeignKey(on_delete=\n django.db.models.deletion.CASCADE, related_name='assistants', to=\n 'stores.Store'))])]\n", "step-4": "from __future__ import unicode_literals\nfrom django.db import migrations, models\nimport django.db.models.deletion\nimport uuid\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = [('stores', '0001_initial')]\n operations = [migrations.CreateModel(name='Assistants', fields=[('id',\n models.UUIDField(default=uuid.uuid4, editable=False, primary_key=\n True, serialize=False)), ('name_assistants', models.CharField(\n max_length=255)), ('phone_assistants', models.IntegerField()), (\n 'email_assistants', models.EmailField(max_length=254)), (\n 'address_assistants', models.TextField()), ('timestamp', models.\n DateField(auto_now=True)), ('fkstore', models.ForeignKey(on_delete=\n django.db.models.deletion.CASCADE, related_name='assistants', to=\n 'stores.Store'))])]\n", "step-5": "# -*- coding: utf-8 -*-\n# Generated by Django 1.11.7 on 2017-12-13 02:06\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\nimport uuid\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ('stores', '0001_initial'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='Assistants',\n fields=[\n ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),\n ('name_assistants', models.CharField(max_length=255)),\n ('phone_assistants', models.IntegerField()),\n ('email_assistants', models.EmailField(max_length=254)),\n ('address_assistants', models.TextField()),\n ('timestamp', models.DateField(auto_now=True)),\n ('fkstore', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='assistants', to='stores.Store')),\n ],\n ),\n ]\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
#!/usr/bin/env python # -*- coding: utf-8 -*- import functools import os import platform import sys import webbrowser import config from pushbullet import Pushbullet class Zui: def __init__(self): self.pb = Pushbullet(self.api_key()) self.target = self.make_devices() self.dayone = config.URL_SCHEME self.clear, self.pause = self.check_platform() def api_key(self): if config.API_KEY: return config.API_KEY else: webbrowser.open('https://www.pushbullet.com/account') API_KEY = input('Copy and Paste Access Token: ') self.config_setting(API_KEY) return API_KEY def config_setting(self, api_key): with open('config.py', 'r') as rf: setting = rf.readlines() setting[0] = 'API_KEY = "{0}"\n'.format(api_key) with open('config.py', 'w') as wf: wf.writelines(setting) wf.flush() def make_devices(self): for d in self.pb.devices: if config.PUSH_TARGET == d.nickname: return d else: new_device = self.pb.new_device(config.PUSH_TARGET) # model argument was not used, only nickname self.pb.edit_device( new_device, nickname=config.PUSH_TARGET, model=config.PUSH_TARGET ) self.make_devices() def clear_notepad(f): functools.wraps(f) def wraps(*args): os.system(args[0].clear) result = f(*args) os.system(args[0].clear) return result return wraps @clear_notepad def push_to_dayone(self): '''Pushbullet couldn't link then whitespace in URL. So, it doesn't push_link, just push_note. Unavilable DayOne URL shceme. ''' try: # body = self.dayone + self.notepad() body = self.notepad() return self.pb.push_note('', body, device=self.target) except KeyboardInterrupt as e: return False def notepad(self): try: print('Push: {}, Close: C-c'.format(self.pause)) lines = [line for line in sys.stdin.readlines()] return ''.join(lines) except KeyboardInterrupt as e: raise e def check_platform(self): cp = { 'Windows': ( 'CLS', 'C-z' ), 'Darwin': ( 'clear', 'C-d' ), } return cp[platform.system()][0], cp[platform.system()][1] def main(): z = Zui() while z.push_to_dayone(): pass else: print('Bye.') if __name__ == '__main__': main()
normal
{ "blob_id": "66cc9ca3d8cbe9690da841e43cef217f3518122c", "index": 7939, "step-1": "<mask token>\n\n\nclass Zui:\n\n def __init__(self):\n self.pb = Pushbullet(self.api_key())\n self.target = self.make_devices()\n self.dayone = config.URL_SCHEME\n self.clear, self.pause = self.check_platform()\n\n def api_key(self):\n if config.API_KEY:\n return config.API_KEY\n else:\n webbrowser.open('https://www.pushbullet.com/account')\n API_KEY = input('Copy and Paste Access Token: ')\n self.config_setting(API_KEY)\n return API_KEY\n\n def config_setting(self, api_key):\n with open('config.py', 'r') as rf:\n setting = rf.readlines()\n setting[0] = 'API_KEY = \"{0}\"\\n'.format(api_key)\n with open('config.py', 'w') as wf:\n wf.writelines(setting)\n wf.flush()\n\n def make_devices(self):\n for d in self.pb.devices:\n if config.PUSH_TARGET == d.nickname:\n return d\n else:\n new_device = self.pb.new_device(config.PUSH_TARGET)\n self.pb.edit_device(new_device, nickname=config.PUSH_TARGET,\n model=config.PUSH_TARGET)\n self.make_devices()\n\n def clear_notepad(f):\n functools.wraps(f)\n\n def wraps(*args):\n os.system(args[0].clear)\n result = f(*args)\n os.system(args[0].clear)\n return result\n return wraps\n\n @clear_notepad\n def push_to_dayone(self):\n \"\"\"Pushbullet couldn't link then whitespace in URL.\n So, it doesn't push_link, just push_note.\n Unavilable DayOne URL shceme.\n \"\"\"\n try:\n body = self.notepad()\n return self.pb.push_note('', body, device=self.target)\n except KeyboardInterrupt as e:\n return False\n <mask token>\n\n def check_platform(self):\n cp = {'Windows': ('CLS', 'C-z'), 'Darwin': ('clear', 'C-d')}\n return cp[platform.system()][0], cp[platform.system()][1]\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass Zui:\n\n def __init__(self):\n self.pb = Pushbullet(self.api_key())\n self.target = self.make_devices()\n self.dayone = config.URL_SCHEME\n self.clear, self.pause = self.check_platform()\n\n def api_key(self):\n if config.API_KEY:\n return config.API_KEY\n else:\n webbrowser.open('https://www.pushbullet.com/account')\n API_KEY = input('Copy and Paste Access Token: ')\n self.config_setting(API_KEY)\n return API_KEY\n\n def config_setting(self, api_key):\n with open('config.py', 'r') as rf:\n setting = rf.readlines()\n setting[0] = 'API_KEY = \"{0}\"\\n'.format(api_key)\n with open('config.py', 'w') as wf:\n wf.writelines(setting)\n wf.flush()\n\n def make_devices(self):\n for d in self.pb.devices:\n if config.PUSH_TARGET == d.nickname:\n return d\n else:\n new_device = self.pb.new_device(config.PUSH_TARGET)\n self.pb.edit_device(new_device, nickname=config.PUSH_TARGET,\n model=config.PUSH_TARGET)\n self.make_devices()\n\n def clear_notepad(f):\n functools.wraps(f)\n\n def wraps(*args):\n os.system(args[0].clear)\n result = f(*args)\n os.system(args[0].clear)\n return result\n return wraps\n\n @clear_notepad\n def push_to_dayone(self):\n \"\"\"Pushbullet couldn't link then whitespace in URL.\n So, it doesn't push_link, just push_note.\n Unavilable DayOne URL shceme.\n \"\"\"\n try:\n body = self.notepad()\n return self.pb.push_note('', body, device=self.target)\n except KeyboardInterrupt as e:\n return False\n\n def notepad(self):\n try:\n print('Push: {}, Close: C-c'.format(self.pause))\n lines = [line for line in sys.stdin.readlines()]\n return ''.join(lines)\n except KeyboardInterrupt as e:\n raise e\n\n def check_platform(self):\n cp = {'Windows': ('CLS', 'C-z'), 'Darwin': ('clear', 'C-d')}\n return cp[platform.system()][0], cp[platform.system()][1]\n\n\ndef main():\n z = Zui()\n while z.push_to_dayone():\n pass\n else:\n print('Bye.')\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\nclass Zui:\n\n def __init__(self):\n self.pb = Pushbullet(self.api_key())\n self.target = self.make_devices()\n self.dayone = config.URL_SCHEME\n self.clear, self.pause = self.check_platform()\n\n def api_key(self):\n if config.API_KEY:\n return config.API_KEY\n else:\n webbrowser.open('https://www.pushbullet.com/account')\n API_KEY = input('Copy and Paste Access Token: ')\n self.config_setting(API_KEY)\n return API_KEY\n\n def config_setting(self, api_key):\n with open('config.py', 'r') as rf:\n setting = rf.readlines()\n setting[0] = 'API_KEY = \"{0}\"\\n'.format(api_key)\n with open('config.py', 'w') as wf:\n wf.writelines(setting)\n wf.flush()\n\n def make_devices(self):\n for d in self.pb.devices:\n if config.PUSH_TARGET == d.nickname:\n return d\n else:\n new_device = self.pb.new_device(config.PUSH_TARGET)\n self.pb.edit_device(new_device, nickname=config.PUSH_TARGET,\n model=config.PUSH_TARGET)\n self.make_devices()\n\n def clear_notepad(f):\n functools.wraps(f)\n\n def wraps(*args):\n os.system(args[0].clear)\n result = f(*args)\n os.system(args[0].clear)\n return result\n return wraps\n\n @clear_notepad\n def push_to_dayone(self):\n \"\"\"Pushbullet couldn't link then whitespace in URL.\n So, it doesn't push_link, just push_note.\n Unavilable DayOne URL shceme.\n \"\"\"\n try:\n body = self.notepad()\n return self.pb.push_note('', body, device=self.target)\n except KeyboardInterrupt as e:\n return False\n\n def notepad(self):\n try:\n print('Push: {}, Close: C-c'.format(self.pause))\n lines = [line for line in sys.stdin.readlines()]\n return ''.join(lines)\n except KeyboardInterrupt as e:\n raise e\n\n def check_platform(self):\n cp = {'Windows': ('CLS', 'C-z'), 'Darwin': ('clear', 'C-d')}\n return cp[platform.system()][0], cp[platform.system()][1]\n\n\ndef main():\n z = Zui()\n while z.push_to_dayone():\n pass\n else:\n print('Bye.')\n\n\nif __name__ == '__main__':\n main()\n", "step-4": "import functools\nimport os\nimport platform\nimport sys\nimport webbrowser\nimport config\nfrom pushbullet import Pushbullet\n\n\nclass Zui:\n\n def __init__(self):\n self.pb = Pushbullet(self.api_key())\n self.target = self.make_devices()\n self.dayone = config.URL_SCHEME\n self.clear, self.pause = self.check_platform()\n\n def api_key(self):\n if config.API_KEY:\n return config.API_KEY\n else:\n webbrowser.open('https://www.pushbullet.com/account')\n API_KEY = input('Copy and Paste Access Token: ')\n self.config_setting(API_KEY)\n return API_KEY\n\n def config_setting(self, api_key):\n with open('config.py', 'r') as rf:\n setting = rf.readlines()\n setting[0] = 'API_KEY = \"{0}\"\\n'.format(api_key)\n with open('config.py', 'w') as wf:\n wf.writelines(setting)\n wf.flush()\n\n def make_devices(self):\n for d in self.pb.devices:\n if config.PUSH_TARGET == d.nickname:\n return d\n else:\n new_device = self.pb.new_device(config.PUSH_TARGET)\n self.pb.edit_device(new_device, nickname=config.PUSH_TARGET,\n model=config.PUSH_TARGET)\n self.make_devices()\n\n def clear_notepad(f):\n functools.wraps(f)\n\n def wraps(*args):\n os.system(args[0].clear)\n result = f(*args)\n os.system(args[0].clear)\n return result\n return wraps\n\n @clear_notepad\n def push_to_dayone(self):\n \"\"\"Pushbullet couldn't link then whitespace in URL.\n So, it doesn't push_link, just push_note.\n Unavilable DayOne URL shceme.\n \"\"\"\n try:\n body = self.notepad()\n return self.pb.push_note('', body, device=self.target)\n except KeyboardInterrupt as e:\n return False\n\n def notepad(self):\n try:\n print('Push: {}, Close: C-c'.format(self.pause))\n lines = [line for line in sys.stdin.readlines()]\n return ''.join(lines)\n except KeyboardInterrupt as e:\n raise e\n\n def check_platform(self):\n cp = {'Windows': ('CLS', 'C-z'), 'Darwin': ('clear', 'C-d')}\n return cp[platform.system()][0], cp[platform.system()][1]\n\n\ndef main():\n z = Zui()\n while z.push_to_dayone():\n pass\n else:\n print('Bye.')\n\n\nif __name__ == '__main__':\n main()\n", "step-5": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nimport functools\nimport os\nimport platform\nimport sys\nimport webbrowser\n\nimport config\nfrom pushbullet import Pushbullet\n\n\nclass Zui:\n\n def __init__(self):\n self.pb = Pushbullet(self.api_key())\n self.target = self.make_devices()\n self.dayone = config.URL_SCHEME\n self.clear, self.pause = self.check_platform()\n\n def api_key(self):\n if config.API_KEY:\n return config.API_KEY\n else:\n webbrowser.open('https://www.pushbullet.com/account')\n API_KEY = input('Copy and Paste Access Token: ')\n self.config_setting(API_KEY)\n return API_KEY\n\n def config_setting(self, api_key):\n with open('config.py', 'r') as rf:\n setting = rf.readlines()\n setting[0] = 'API_KEY = \"{0}\"\\n'.format(api_key)\n with open('config.py', 'w') as wf:\n wf.writelines(setting)\n wf.flush()\n\n def make_devices(self):\n for d in self.pb.devices:\n if config.PUSH_TARGET == d.nickname:\n return d\n else:\n new_device = self.pb.new_device(config.PUSH_TARGET)\n # model argument was not used, only nickname\n self.pb.edit_device(\n new_device,\n nickname=config.PUSH_TARGET,\n model=config.PUSH_TARGET\n )\n self.make_devices()\n\n def clear_notepad(f):\n functools.wraps(f)\n def wraps(*args):\n os.system(args[0].clear)\n result = f(*args)\n os.system(args[0].clear)\n return result\n return wraps\n\n @clear_notepad\n def push_to_dayone(self):\n '''Pushbullet couldn't link then whitespace in URL.\n So, it doesn't push_link, just push_note.\n Unavilable DayOne URL shceme.\n '''\n try:\n # body = self.dayone + self.notepad()\n body = self.notepad()\n return self.pb.push_note('', body, device=self.target)\n except KeyboardInterrupt as e:\n return False\n\n def notepad(self):\n try:\n print('Push: {}, Close: C-c'.format(self.pause))\n lines = [line for line in sys.stdin.readlines()]\n return ''.join(lines)\n except KeyboardInterrupt as e:\n raise e\n\n def check_platform(self):\n cp = {\n 'Windows': (\n 'CLS',\n 'C-z'\n ),\n 'Darwin': (\n 'clear',\n 'C-d'\n ),\n }\n return cp[platform.system()][0], cp[platform.system()][1]\n\n\ndef main():\n z = Zui()\n while z.push_to_dayone():\n pass\n else:\n print('Bye.')\n\n\nif __name__ == '__main__':\n main()\n", "step-ids": [ 8, 10, 11, 12, 13 ] }
[ 8, 10, 11, 12, 13 ]
<|reserved_special_token_0|> class TestVerified(TestCase): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> class TestTrusted(TestCase): def setUp(self): self.instance = Trusted() def tearDown(self): del self.instance def test_apply_rules(self): self.assertTrue(self.instance.apply_rules()) class TestRevoked(TestCase): def setUp(self): self.instance = Revoked() def tearDown(self): del self.instance def test_apply_rules(self): self.assertTrue(self.instance.apply_rules()) <|reserved_special_token_1|> <|reserved_special_token_0|> class TestStatement(TestCase): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> class TestVerified(TestCase): def setUp(self): self.instance = Verified() def tearDown(self): del self.instance def test_apply_rules(self): self.assertTrue(self.instance.apply_rules()) class TestTrusted(TestCase): def setUp(self): self.instance = Trusted() def tearDown(self): del self.instance def test_apply_rules(self): self.assertTrue(self.instance.apply_rules()) class TestRevoked(TestCase): def setUp(self): self.instance = Revoked() def tearDown(self): del self.instance def test_apply_rules(self): self.assertTrue(self.instance.apply_rules()) <|reserved_special_token_1|> <|reserved_special_token_0|> class TestStatement(TestCase): <|reserved_special_token_0|> def tearDown(self): del self.instance <|reserved_special_token_0|> class TestVerified(TestCase): def setUp(self): self.instance = Verified() def tearDown(self): del self.instance def test_apply_rules(self): self.assertTrue(self.instance.apply_rules()) class TestTrusted(TestCase): def setUp(self): self.instance = Trusted() def tearDown(self): del self.instance def test_apply_rules(self): self.assertTrue(self.instance.apply_rules()) class TestRevoked(TestCase): def setUp(self): self.instance = Revoked() def tearDown(self): del self.instance def test_apply_rules(self): self.assertTrue(self.instance.apply_rules()) <|reserved_special_token_1|> from unittest import TestCase from angelos.document.statements import Statement, Verified, Trusted, Revoked class TestStatement(TestCase): def setUp(self): self.instance = Statement() def tearDown(self): del self.instance def test_apply_rules(self): self.assertTrue(self.instance.apply_rules()) class TestVerified(TestCase): def setUp(self): self.instance = Verified() def tearDown(self): del self.instance def test_apply_rules(self): self.assertTrue(self.instance.apply_rules()) class TestTrusted(TestCase): def setUp(self): self.instance = Trusted() def tearDown(self): del self.instance def test_apply_rules(self): self.assertTrue(self.instance.apply_rules()) class TestRevoked(TestCase): def setUp(self): self.instance = Revoked() def tearDown(self): del self.instance def test_apply_rules(self): self.assertTrue(self.instance.apply_rules()) <|reserved_special_token_1|> # # Copyright (c) 2018-2020 by Kristoffer Paulsson <kristoffer.paulsson@talenten.se>. # # This software is available under the terms of the MIT license. Parts are licensed under # different terms if stated. The legal terms are attached to the LICENSE file and are # made available on: # # https://opensource.org/licenses/MIT # # SPDX-License-Identifier: MIT # # Contributors: # Kristoffer Paulsson - initial implementation # from unittest import TestCase from angelos.document.statements import Statement, Verified, Trusted, Revoked class TestStatement(TestCase): def setUp(self): self.instance = Statement() def tearDown(self): del self.instance def test_apply_rules(self): self.assertTrue(self.instance.apply_rules()) class TestVerified(TestCase): def setUp(self): self.instance = Verified() def tearDown(self): del self.instance def test_apply_rules(self): self.assertTrue(self.instance.apply_rules()) class TestTrusted(TestCase): def setUp(self): self.instance = Trusted() def tearDown(self): del self.instance def test_apply_rules(self): self.assertTrue(self.instance.apply_rules()) class TestRevoked(TestCase): def setUp(self): self.instance = Revoked() def tearDown(self): del self.instance def test_apply_rules(self): self.assertTrue(self.instance.apply_rules())
flexible
{ "blob_id": "f494dc99febfad99b371d72f542556a9024bc27d", "index": 5333, "step-1": "<mask token>\n\n\nclass TestVerified(TestCase):\n <mask token>\n <mask token>\n <mask token>\n\n\nclass TestTrusted(TestCase):\n\n def setUp(self):\n self.instance = Trusted()\n\n def tearDown(self):\n del self.instance\n\n def test_apply_rules(self):\n self.assertTrue(self.instance.apply_rules())\n\n\nclass TestRevoked(TestCase):\n\n def setUp(self):\n self.instance = Revoked()\n\n def tearDown(self):\n del self.instance\n\n def test_apply_rules(self):\n self.assertTrue(self.instance.apply_rules())\n", "step-2": "<mask token>\n\n\nclass TestStatement(TestCase):\n <mask token>\n <mask token>\n <mask token>\n\n\nclass TestVerified(TestCase):\n\n def setUp(self):\n self.instance = Verified()\n\n def tearDown(self):\n del self.instance\n\n def test_apply_rules(self):\n self.assertTrue(self.instance.apply_rules())\n\n\nclass TestTrusted(TestCase):\n\n def setUp(self):\n self.instance = Trusted()\n\n def tearDown(self):\n del self.instance\n\n def test_apply_rules(self):\n self.assertTrue(self.instance.apply_rules())\n\n\nclass TestRevoked(TestCase):\n\n def setUp(self):\n self.instance = Revoked()\n\n def tearDown(self):\n del self.instance\n\n def test_apply_rules(self):\n self.assertTrue(self.instance.apply_rules())\n", "step-3": "<mask token>\n\n\nclass TestStatement(TestCase):\n <mask token>\n\n def tearDown(self):\n del self.instance\n <mask token>\n\n\nclass TestVerified(TestCase):\n\n def setUp(self):\n self.instance = Verified()\n\n def tearDown(self):\n del self.instance\n\n def test_apply_rules(self):\n self.assertTrue(self.instance.apply_rules())\n\n\nclass TestTrusted(TestCase):\n\n def setUp(self):\n self.instance = Trusted()\n\n def tearDown(self):\n del self.instance\n\n def test_apply_rules(self):\n self.assertTrue(self.instance.apply_rules())\n\n\nclass TestRevoked(TestCase):\n\n def setUp(self):\n self.instance = Revoked()\n\n def tearDown(self):\n del self.instance\n\n def test_apply_rules(self):\n self.assertTrue(self.instance.apply_rules())\n", "step-4": "from unittest import TestCase\nfrom angelos.document.statements import Statement, Verified, Trusted, Revoked\n\n\nclass TestStatement(TestCase):\n\n def setUp(self):\n self.instance = Statement()\n\n def tearDown(self):\n del self.instance\n\n def test_apply_rules(self):\n self.assertTrue(self.instance.apply_rules())\n\n\nclass TestVerified(TestCase):\n\n def setUp(self):\n self.instance = Verified()\n\n def tearDown(self):\n del self.instance\n\n def test_apply_rules(self):\n self.assertTrue(self.instance.apply_rules())\n\n\nclass TestTrusted(TestCase):\n\n def setUp(self):\n self.instance = Trusted()\n\n def tearDown(self):\n del self.instance\n\n def test_apply_rules(self):\n self.assertTrue(self.instance.apply_rules())\n\n\nclass TestRevoked(TestCase):\n\n def setUp(self):\n self.instance = Revoked()\n\n def tearDown(self):\n del self.instance\n\n def test_apply_rules(self):\n self.assertTrue(self.instance.apply_rules())\n", "step-5": "#\n# Copyright (c) 2018-2020 by Kristoffer Paulsson <kristoffer.paulsson@talenten.se>.\n#\n# This software is available under the terms of the MIT license. Parts are licensed under\n# different terms if stated. The legal terms are attached to the LICENSE file and are\n# made available on:\n#\n# https://opensource.org/licenses/MIT\n#\n# SPDX-License-Identifier: MIT\n#\n# Contributors:\n# Kristoffer Paulsson - initial implementation\n#\nfrom unittest import TestCase\n\nfrom angelos.document.statements import Statement, Verified, Trusted, Revoked\n\n\nclass TestStatement(TestCase):\n def setUp(self):\n self.instance = Statement()\n\n def tearDown(self):\n del self.instance\n\n def test_apply_rules(self):\n self.assertTrue(self.instance.apply_rules())\n\n\nclass TestVerified(TestCase):\n def setUp(self):\n self.instance = Verified()\n\n def tearDown(self):\n del self.instance\n\n def test_apply_rules(self):\n self.assertTrue(self.instance.apply_rules())\n\n\nclass TestTrusted(TestCase):\n def setUp(self):\n self.instance = Trusted()\n\n def tearDown(self):\n del self.instance\n\n def test_apply_rules(self):\n self.assertTrue(self.instance.apply_rules())\n\n\nclass TestRevoked(TestCase):\n def setUp(self):\n self.instance = Revoked()\n\n def tearDown(self):\n del self.instance\n\n def test_apply_rules(self):\n self.assertTrue(self.instance.apply_rules())", "step-ids": [ 9, 13, 14, 17, 18 ] }
[ 9, 13, 14, 17, 18 ]
<|reserved_special_token_0|> class Position(models.Model): <|reserved_special_token_0|> <|reserved_special_token_0|> class Employee(models.Model): nom = models.CharField(max_length=100) prenom = models.CharField(max_length=100) age = models.CharField(max_length=15) sexe = models.ForeignKey(Position, on_delete=models.CASCADE) portable = models.CharField(max_length=100) email = models.CharField(max_length=100) formation = models.CharField(max_length=100000) experiences1 = models.CharField(max_length=100000) experiences2 = models.CharField(max_length=100000) experiences3 = models.CharField(max_length=100000) competences = models.CharField(max_length=100000) divers = models.CharField(max_length=1000) linkedin = models.CharField(max_length=1000) CV = models.FileField(upload_to='media/pdf') """ from pdfminer.pdfinterp import PDFResourceManager from pdfminer.pdfinterp import PDFPageInterpreter from pdfminer.converter import TextConverter from pdfminer.pdfpage import PDFPage from io import BytesIO import argparse rsrcmgr = PDFResourceManager() retstr = BytesIO() device = TextConverter(rsrcmgr, retstr) #with open(path, "rb") as fp: # open in 'rb' mode to read PDF bytes interpreter = PDFPageInterpreter(rsrcmgr, device) for page in PDFPage.get_pages(CV, check_extractable=True): interpreter.process_page(page) device.close() text = retstr.getvalue() retstr.close() pdf_text = text.decode("utf-8") #########""" def __str__(self): return str(self.CV) + '*' + ' ' + str(self.formation) + ' ' + str(self .competences) + ' ' + str(self.experiences1) + ' ' + str(self. experiences2) + ' ' + str(self.experiences3) <|reserved_special_token_1|> <|reserved_special_token_0|> class Position(models.Model): <|reserved_special_token_0|> def __str__(self): return self.title class Employee(models.Model): nom = models.CharField(max_length=100) prenom = models.CharField(max_length=100) age = models.CharField(max_length=15) sexe = models.ForeignKey(Position, on_delete=models.CASCADE) portable = models.CharField(max_length=100) email = models.CharField(max_length=100) formation = models.CharField(max_length=100000) experiences1 = models.CharField(max_length=100000) experiences2 = models.CharField(max_length=100000) experiences3 = models.CharField(max_length=100000) competences = models.CharField(max_length=100000) divers = models.CharField(max_length=1000) linkedin = models.CharField(max_length=1000) CV = models.FileField(upload_to='media/pdf') """ from pdfminer.pdfinterp import PDFResourceManager from pdfminer.pdfinterp import PDFPageInterpreter from pdfminer.converter import TextConverter from pdfminer.pdfpage import PDFPage from io import BytesIO import argparse rsrcmgr = PDFResourceManager() retstr = BytesIO() device = TextConverter(rsrcmgr, retstr) #with open(path, "rb") as fp: # open in 'rb' mode to read PDF bytes interpreter = PDFPageInterpreter(rsrcmgr, device) for page in PDFPage.get_pages(CV, check_extractable=True): interpreter.process_page(page) device.close() text = retstr.getvalue() retstr.close() pdf_text = text.decode("utf-8") #########""" def __str__(self): return str(self.CV) + '*' + ' ' + str(self.formation) + ' ' + str(self .competences) + ' ' + str(self.experiences1) + ' ' + str(self. experiences2) + ' ' + str(self.experiences3) <|reserved_special_token_1|> <|reserved_special_token_0|> class Position(models.Model): title = models.CharField(max_length=50) def __str__(self): return self.title class Employee(models.Model): nom = models.CharField(max_length=100) prenom = models.CharField(max_length=100) age = models.CharField(max_length=15) sexe = models.ForeignKey(Position, on_delete=models.CASCADE) portable = models.CharField(max_length=100) email = models.CharField(max_length=100) formation = models.CharField(max_length=100000) experiences1 = models.CharField(max_length=100000) experiences2 = models.CharField(max_length=100000) experiences3 = models.CharField(max_length=100000) competences = models.CharField(max_length=100000) divers = models.CharField(max_length=1000) linkedin = models.CharField(max_length=1000) CV = models.FileField(upload_to='media/pdf') """ from pdfminer.pdfinterp import PDFResourceManager from pdfminer.pdfinterp import PDFPageInterpreter from pdfminer.converter import TextConverter from pdfminer.pdfpage import PDFPage from io import BytesIO import argparse rsrcmgr = PDFResourceManager() retstr = BytesIO() device = TextConverter(rsrcmgr, retstr) #with open(path, "rb") as fp: # open in 'rb' mode to read PDF bytes interpreter = PDFPageInterpreter(rsrcmgr, device) for page in PDFPage.get_pages(CV, check_extractable=True): interpreter.process_page(page) device.close() text = retstr.getvalue() retstr.close() pdf_text = text.decode("utf-8") #########""" def __str__(self): return str(self.CV) + '*' + ' ' + str(self.formation) + ' ' + str(self .competences) + ' ' + str(self.experiences1) + ' ' + str(self. experiences2) + ' ' + str(self.experiences3) <|reserved_special_token_1|> from django.db import models class Position(models.Model): title = models.CharField(max_length=50) def __str__(self): return self.title class Employee(models.Model): nom = models.CharField(max_length=100) prenom = models.CharField(max_length=100) age = models.CharField(max_length=15) sexe = models.ForeignKey(Position, on_delete=models.CASCADE) portable = models.CharField(max_length=100) email = models.CharField(max_length=100) formation = models.CharField(max_length=100000) experiences1 = models.CharField(max_length=100000) experiences2 = models.CharField(max_length=100000) experiences3 = models.CharField(max_length=100000) competences = models.CharField(max_length=100000) divers = models.CharField(max_length=1000) linkedin = models.CharField(max_length=1000) CV = models.FileField(upload_to='media/pdf') """ from pdfminer.pdfinterp import PDFResourceManager from pdfminer.pdfinterp import PDFPageInterpreter from pdfminer.converter import TextConverter from pdfminer.pdfpage import PDFPage from io import BytesIO import argparse rsrcmgr = PDFResourceManager() retstr = BytesIO() device = TextConverter(rsrcmgr, retstr) #with open(path, "rb") as fp: # open in 'rb' mode to read PDF bytes interpreter = PDFPageInterpreter(rsrcmgr, device) for page in PDFPage.get_pages(CV, check_extractable=True): interpreter.process_page(page) device.close() text = retstr.getvalue() retstr.close() pdf_text = text.decode("utf-8") #########""" def __str__(self): return str(self.CV) + '*' + ' ' + str(self.formation) + ' ' + str(self .competences) + ' ' + str(self.experiences1) + ' ' + str(self. experiences2) + ' ' + str(self.experiences3) <|reserved_special_token_1|> from django.db import models # Create your models here. class Position(models.Model): title = models.CharField(max_length=50) def __str__(self): return self.title class Employee(models.Model): nom = models.CharField(max_length=100) prenom = models.CharField(max_length=100) age= models.CharField(max_length=15) sexe= models.ForeignKey(Position,on_delete=models.CASCADE) portable = models.CharField(max_length=100) email = models.CharField(max_length=100) formation = models.CharField(max_length=100000) experiences1 = models.CharField(max_length=100000) experiences2 = models.CharField(max_length=100000) experiences3 = models.CharField(max_length=100000) competences = models.CharField(max_length=100000) divers = models.CharField(max_length=1000) linkedin = models.CharField(max_length=1000) CV=models.FileField(upload_to ='media/pdf') """ from pdfminer.pdfinterp import PDFResourceManager from pdfminer.pdfinterp import PDFPageInterpreter from pdfminer.converter import TextConverter from pdfminer.pdfpage import PDFPage from io import BytesIO import argparse rsrcmgr = PDFResourceManager() retstr = BytesIO() device = TextConverter(rsrcmgr, retstr) #with open(path, "rb") as fp: # open in 'rb' mode to read PDF bytes interpreter = PDFPageInterpreter(rsrcmgr, device) for page in PDFPage.get_pages(CV, check_extractable=True): interpreter.process_page(page) device.close() text = retstr.getvalue() retstr.close() pdf_text = text.decode("utf-8") #########""" def __str__(self): return (str(self.CV)+"*"+" "+str(self.formation)+" "+str(self.competences)+" "+str(self.experiences1)+" "+str(self.experiences2)+" "+str(self.experiences3))
flexible
{ "blob_id": "5ab20c1cd2dc0d0ad881ee52008d00c2317084f9", "index": 5308, "step-1": "<mask token>\n\n\nclass Position(models.Model):\n <mask token>\n <mask token>\n\n\nclass Employee(models.Model):\n nom = models.CharField(max_length=100)\n prenom = models.CharField(max_length=100)\n age = models.CharField(max_length=15)\n sexe = models.ForeignKey(Position, on_delete=models.CASCADE)\n portable = models.CharField(max_length=100)\n email = models.CharField(max_length=100)\n formation = models.CharField(max_length=100000)\n experiences1 = models.CharField(max_length=100000)\n experiences2 = models.CharField(max_length=100000)\n experiences3 = models.CharField(max_length=100000)\n competences = models.CharField(max_length=100000)\n divers = models.CharField(max_length=1000)\n linkedin = models.CharField(max_length=1000)\n CV = models.FileField(upload_to='media/pdf')\n \"\"\"\n from pdfminer.pdfinterp import PDFResourceManager\n from pdfminer.pdfinterp import PDFPageInterpreter\n from pdfminer.converter import TextConverter\n from pdfminer.pdfpage import PDFPage\n from io import BytesIO \n import argparse\n rsrcmgr = PDFResourceManager()\n retstr = BytesIO()\n device = TextConverter(rsrcmgr, retstr)\n #with open(path, \"rb\") as fp: # open in 'rb' mode to read PDF bytes\n interpreter = PDFPageInterpreter(rsrcmgr, device)\n for page in PDFPage.get_pages(CV, check_extractable=True):\n interpreter.process_page(page)\n device.close() \n text = retstr.getvalue()\n retstr.close()\n pdf_text = text.decode(\"utf-8\") \n #########\"\"\"\n\n def __str__(self):\n return str(self.CV) + '*' + ' ' + str(self.formation) + ' ' + str(self\n .competences) + ' ' + str(self.experiences1) + ' ' + str(self.\n experiences2) + ' ' + str(self.experiences3)\n", "step-2": "<mask token>\n\n\nclass Position(models.Model):\n <mask token>\n\n def __str__(self):\n return self.title\n\n\nclass Employee(models.Model):\n nom = models.CharField(max_length=100)\n prenom = models.CharField(max_length=100)\n age = models.CharField(max_length=15)\n sexe = models.ForeignKey(Position, on_delete=models.CASCADE)\n portable = models.CharField(max_length=100)\n email = models.CharField(max_length=100)\n formation = models.CharField(max_length=100000)\n experiences1 = models.CharField(max_length=100000)\n experiences2 = models.CharField(max_length=100000)\n experiences3 = models.CharField(max_length=100000)\n competences = models.CharField(max_length=100000)\n divers = models.CharField(max_length=1000)\n linkedin = models.CharField(max_length=1000)\n CV = models.FileField(upload_to='media/pdf')\n \"\"\"\n from pdfminer.pdfinterp import PDFResourceManager\n from pdfminer.pdfinterp import PDFPageInterpreter\n from pdfminer.converter import TextConverter\n from pdfminer.pdfpage import PDFPage\n from io import BytesIO \n import argparse\n rsrcmgr = PDFResourceManager()\n retstr = BytesIO()\n device = TextConverter(rsrcmgr, retstr)\n #with open(path, \"rb\") as fp: # open in 'rb' mode to read PDF bytes\n interpreter = PDFPageInterpreter(rsrcmgr, device)\n for page in PDFPage.get_pages(CV, check_extractable=True):\n interpreter.process_page(page)\n device.close() \n text = retstr.getvalue()\n retstr.close()\n pdf_text = text.decode(\"utf-8\") \n #########\"\"\"\n\n def __str__(self):\n return str(self.CV) + '*' + ' ' + str(self.formation) + ' ' + str(self\n .competences) + ' ' + str(self.experiences1) + ' ' + str(self.\n experiences2) + ' ' + str(self.experiences3)\n", "step-3": "<mask token>\n\n\nclass Position(models.Model):\n title = models.CharField(max_length=50)\n\n def __str__(self):\n return self.title\n\n\nclass Employee(models.Model):\n nom = models.CharField(max_length=100)\n prenom = models.CharField(max_length=100)\n age = models.CharField(max_length=15)\n sexe = models.ForeignKey(Position, on_delete=models.CASCADE)\n portable = models.CharField(max_length=100)\n email = models.CharField(max_length=100)\n formation = models.CharField(max_length=100000)\n experiences1 = models.CharField(max_length=100000)\n experiences2 = models.CharField(max_length=100000)\n experiences3 = models.CharField(max_length=100000)\n competences = models.CharField(max_length=100000)\n divers = models.CharField(max_length=1000)\n linkedin = models.CharField(max_length=1000)\n CV = models.FileField(upload_to='media/pdf')\n \"\"\"\n from pdfminer.pdfinterp import PDFResourceManager\n from pdfminer.pdfinterp import PDFPageInterpreter\n from pdfminer.converter import TextConverter\n from pdfminer.pdfpage import PDFPage\n from io import BytesIO \n import argparse\n rsrcmgr = PDFResourceManager()\n retstr = BytesIO()\n device = TextConverter(rsrcmgr, retstr)\n #with open(path, \"rb\") as fp: # open in 'rb' mode to read PDF bytes\n interpreter = PDFPageInterpreter(rsrcmgr, device)\n for page in PDFPage.get_pages(CV, check_extractable=True):\n interpreter.process_page(page)\n device.close() \n text = retstr.getvalue()\n retstr.close()\n pdf_text = text.decode(\"utf-8\") \n #########\"\"\"\n\n def __str__(self):\n return str(self.CV) + '*' + ' ' + str(self.formation) + ' ' + str(self\n .competences) + ' ' + str(self.experiences1) + ' ' + str(self.\n experiences2) + ' ' + str(self.experiences3)\n", "step-4": "from django.db import models\n\n\nclass Position(models.Model):\n title = models.CharField(max_length=50)\n\n def __str__(self):\n return self.title\n\n\nclass Employee(models.Model):\n nom = models.CharField(max_length=100)\n prenom = models.CharField(max_length=100)\n age = models.CharField(max_length=15)\n sexe = models.ForeignKey(Position, on_delete=models.CASCADE)\n portable = models.CharField(max_length=100)\n email = models.CharField(max_length=100)\n formation = models.CharField(max_length=100000)\n experiences1 = models.CharField(max_length=100000)\n experiences2 = models.CharField(max_length=100000)\n experiences3 = models.CharField(max_length=100000)\n competences = models.CharField(max_length=100000)\n divers = models.CharField(max_length=1000)\n linkedin = models.CharField(max_length=1000)\n CV = models.FileField(upload_to='media/pdf')\n \"\"\"\n from pdfminer.pdfinterp import PDFResourceManager\n from pdfminer.pdfinterp import PDFPageInterpreter\n from pdfminer.converter import TextConverter\n from pdfminer.pdfpage import PDFPage\n from io import BytesIO \n import argparse\n rsrcmgr = PDFResourceManager()\n retstr = BytesIO()\n device = TextConverter(rsrcmgr, retstr)\n #with open(path, \"rb\") as fp: # open in 'rb' mode to read PDF bytes\n interpreter = PDFPageInterpreter(rsrcmgr, device)\n for page in PDFPage.get_pages(CV, check_extractable=True):\n interpreter.process_page(page)\n device.close() \n text = retstr.getvalue()\n retstr.close()\n pdf_text = text.decode(\"utf-8\") \n #########\"\"\"\n\n def __str__(self):\n return str(self.CV) + '*' + ' ' + str(self.formation) + ' ' + str(self\n .competences) + ' ' + str(self.experiences1) + ' ' + str(self.\n experiences2) + ' ' + str(self.experiences3)\n", "step-5": "from django.db import models\n\n# Create your models here.\nclass Position(models.Model):\n title = models.CharField(max_length=50)\n\n def __str__(self):\n return self.title\n\nclass Employee(models.Model):\n nom = models.CharField(max_length=100)\n prenom = models.CharField(max_length=100)\n age= models.CharField(max_length=15)\n sexe= models.ForeignKey(Position,on_delete=models.CASCADE)\n portable = models.CharField(max_length=100)\n email = models.CharField(max_length=100)\n formation = models.CharField(max_length=100000)\n experiences1 = models.CharField(max_length=100000)\n experiences2 = models.CharField(max_length=100000)\n experiences3 = models.CharField(max_length=100000)\n competences = models.CharField(max_length=100000)\n divers = models.CharField(max_length=1000)\n linkedin = models.CharField(max_length=1000)\n CV=models.FileField(upload_to ='media/pdf') \n \"\"\"\n from pdfminer.pdfinterp import PDFResourceManager\n from pdfminer.pdfinterp import PDFPageInterpreter\n from pdfminer.converter import TextConverter\n from pdfminer.pdfpage import PDFPage\n from io import BytesIO \n import argparse\n rsrcmgr = PDFResourceManager()\n retstr = BytesIO()\n device = TextConverter(rsrcmgr, retstr)\n #with open(path, \"rb\") as fp: # open in 'rb' mode to read PDF bytes\n interpreter = PDFPageInterpreter(rsrcmgr, device)\n for page in PDFPage.get_pages(CV, check_extractable=True):\n interpreter.process_page(page)\n device.close() \n text = retstr.getvalue()\n retstr.close()\n pdf_text = text.decode(\"utf-8\") \n #########\"\"\"\n def __str__(self):\n return (str(self.CV)+\"*\"+\" \"+str(self.formation)+\" \"+str(self.competences)+\" \"+str(self.experiences1)+\" \"+str(self.experiences2)+\" \"+str(self.experiences3))", "step-ids": [ 5, 6, 7, 8, 9 ] }
[ 5, 6, 7, 8, 9 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> db.define_table('t_form', Field('id', 'id', represent=lambda id: SPAN(id, ' ', A('view', _href=URL('form_read', args=id)))), Field('f_name', type ='string', label=T('Name')), Field('f_content', type='text', represent= lambda x: MARKMIN(x), comment='WIKI (markmin)', label=T('Content')), Field('f_public', type='boolean', default=False, label=T( 'Available to all users?')), Field('f_uuid', default=str(uuid.uuid4()), writable=False, readable=False), Field('f_created_on', 'datetime', default=request.now, label=T('Created On'), writable=False, readable= False), Field('f_modified_on', 'datetime', default=request.now, label=T ('Modified On'), writable=False, readable=False, update=request.now), Field('f_created_by', db.auth_user, default=auth.user_id, label=T( 'Created By'), writable=False, readable=False), Field('f_modified_by', db.auth_user, default=auth.user_id, label=T('Modified By'), writable= False, readable=False, update=auth.user_id), format='%(f_name)s', migrate=settings.migrate) <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> crud.settings.formstyle = 'table2cols' db.define_table('t_form', Field('id', 'id', represent=lambda id: SPAN(id, ' ', A('view', _href=URL('form_read', args=id)))), Field('f_name', type ='string', label=T('Name')), Field('f_content', type='text', represent= lambda x: MARKMIN(x), comment='WIKI (markmin)', label=T('Content')), Field('f_public', type='boolean', default=False, label=T( 'Available to all users?')), Field('f_uuid', default=str(uuid.uuid4()), writable=False, readable=False), Field('f_created_on', 'datetime', default=request.now, label=T('Created On'), writable=False, readable= False), Field('f_modified_on', 'datetime', default=request.now, label=T ('Modified On'), writable=False, readable=False, update=request.now), Field('f_created_by', db.auth_user, default=auth.user_id, label=T( 'Created By'), writable=False, readable=False), Field('f_modified_by', db.auth_user, default=auth.user_id, label=T('Modified By'), writable= False, readable=False, update=auth.user_id), format='%(f_name)s', migrate=settings.migrate) db.t_form.f_name.default = 'Example: Job Application' db.t_form.f_content.default = """ # Job Application ## Instuructions - please complete the form - export it in PDF - print it - sign it - fax it to 111-111-1111 ## Job Application Questionaire ### Personal data -------- **first name:** | ``first_name``:input_text **last name:** | ``last_name``:input_text **email:** | ``email``:input_text -------- ### Skills ``skills``:input_area ### Signature ``accept``:input_bool Accept [[Confidentiality Agreement http://example.com]] Signature: ..................................... Date: ``today``:input_date """ <|reserved_special_token_1|> import uuid crud.settings.formstyle = 'table2cols' db.define_table('t_form', Field('id', 'id', represent=lambda id: SPAN(id, ' ', A('view', _href=URL('form_read', args=id)))), Field('f_name', type ='string', label=T('Name')), Field('f_content', type='text', represent= lambda x: MARKMIN(x), comment='WIKI (markmin)', label=T('Content')), Field('f_public', type='boolean', default=False, label=T( 'Available to all users?')), Field('f_uuid', default=str(uuid.uuid4()), writable=False, readable=False), Field('f_created_on', 'datetime', default=request.now, label=T('Created On'), writable=False, readable= False), Field('f_modified_on', 'datetime', default=request.now, label=T ('Modified On'), writable=False, readable=False, update=request.now), Field('f_created_by', db.auth_user, default=auth.user_id, label=T( 'Created By'), writable=False, readable=False), Field('f_modified_by', db.auth_user, default=auth.user_id, label=T('Modified By'), writable= False, readable=False, update=auth.user_id), format='%(f_name)s', migrate=settings.migrate) db.t_form.f_name.default = 'Example: Job Application' db.t_form.f_content.default = """ # Job Application ## Instuructions - please complete the form - export it in PDF - print it - sign it - fax it to 111-111-1111 ## Job Application Questionaire ### Personal data -------- **first name:** | ``first_name``:input_text **last name:** | ``last_name``:input_text **email:** | ``email``:input_text -------- ### Skills ``skills``:input_area ### Signature ``accept``:input_bool Accept [[Confidentiality Agreement http://example.com]] Signature: ..................................... Date: ``today``:input_date """ <|reserved_special_token_1|> ### we prepend t_ to tablenames and f_ to fieldnames for disambiguity import uuid crud.settings.formstyle="table2cols" ######################################## db.define_table('t_form', Field('id','id', represent=lambda id:SPAN(id,' ',A('view',_href=URL('form_read',args=id)))), Field('f_name', type='string', label=T('Name')), Field('f_content', type='text', represent=lambda x: MARKMIN(x), comment='WIKI (markmin)', label=T('Content')), Field('f_public', type='boolean', default=False, label=T('Available to all users?')), Field('f_uuid',default=str(uuid.uuid4()), writable=False,readable=False), Field('f_created_on','datetime',default=request.now, label=T('Created On'),writable=False,readable=False), Field('f_modified_on','datetime',default=request.now, label=T('Modified On'),writable=False,readable=False, update=request.now), Field('f_created_by',db.auth_user,default=auth.user_id, label=T('Created By'),writable=False,readable=False), Field('f_modified_by',db.auth_user,default=auth.user_id, label=T('Modified By'),writable=False,readable=False, update=auth.user_id), format='%(f_name)s', migrate=settings.migrate) db.t_form.f_name.default="Example: Job Application" db.t_form.f_content.default=""" # Job Application ## Instuructions - please complete the form - export it in PDF - print it - sign it - fax it to 111-111-1111 ## Job Application Questionaire ### Personal data -------- **first name:** | ``first_name``:input_text **last name:** | ``last_name``:input_text **email:** | ``email``:input_text -------- ### Skills ``skills``:input_area ### Signature ``accept``:input_bool Accept [[Confidentiality Agreement http://example.com]] Signature: ..................................... Date: ``today``:input_date """
flexible
{ "blob_id": "e2e275c48f28843931412f8e620f1be90289b40c", "index": 8184, "step-1": "<mask token>\n", "step-2": "<mask token>\ndb.define_table('t_form', Field('id', 'id', represent=lambda id: SPAN(id,\n ' ', A('view', _href=URL('form_read', args=id)))), Field('f_name', type\n ='string', label=T('Name')), Field('f_content', type='text', represent=\n lambda x: MARKMIN(x), comment='WIKI (markmin)', label=T('Content')),\n Field('f_public', type='boolean', default=False, label=T(\n 'Available to all users?')), Field('f_uuid', default=str(uuid.uuid4()),\n writable=False, readable=False), Field('f_created_on', 'datetime',\n default=request.now, label=T('Created On'), writable=False, readable=\n False), Field('f_modified_on', 'datetime', default=request.now, label=T\n ('Modified On'), writable=False, readable=False, update=request.now),\n Field('f_created_by', db.auth_user, default=auth.user_id, label=T(\n 'Created By'), writable=False, readable=False), Field('f_modified_by',\n db.auth_user, default=auth.user_id, label=T('Modified By'), writable=\n False, readable=False, update=auth.user_id), format='%(f_name)s',\n migrate=settings.migrate)\n<mask token>\n", "step-3": "<mask token>\ncrud.settings.formstyle = 'table2cols'\ndb.define_table('t_form', Field('id', 'id', represent=lambda id: SPAN(id,\n ' ', A('view', _href=URL('form_read', args=id)))), Field('f_name', type\n ='string', label=T('Name')), Field('f_content', type='text', represent=\n lambda x: MARKMIN(x), comment='WIKI (markmin)', label=T('Content')),\n Field('f_public', type='boolean', default=False, label=T(\n 'Available to all users?')), Field('f_uuid', default=str(uuid.uuid4()),\n writable=False, readable=False), Field('f_created_on', 'datetime',\n default=request.now, label=T('Created On'), writable=False, readable=\n False), Field('f_modified_on', 'datetime', default=request.now, label=T\n ('Modified On'), writable=False, readable=False, update=request.now),\n Field('f_created_by', db.auth_user, default=auth.user_id, label=T(\n 'Created By'), writable=False, readable=False), Field('f_modified_by',\n db.auth_user, default=auth.user_id, label=T('Modified By'), writable=\n False, readable=False, update=auth.user_id), format='%(f_name)s',\n migrate=settings.migrate)\ndb.t_form.f_name.default = 'Example: Job Application'\ndb.t_form.f_content.default = \"\"\"\n# Job Application\n## Instuructions\n- please complete the form\n- export it in PDF\n- print it\n- sign it\n- fax it to 111-111-1111\n## Job Application Questionaire\n### Personal data\n--------\n**first name:** | ``first_name``:input_text\n**last name:** | ``last_name``:input_text\n**email:** | ``email``:input_text\n--------\n### Skills\n``skills``:input_area\n### Signature\n``accept``:input_bool Accept [[Confidentiality Agreement http://example.com]]\n\nSignature: ..................................... Date: ``today``:input_date \n\"\"\"\n", "step-4": "import uuid\ncrud.settings.formstyle = 'table2cols'\ndb.define_table('t_form', Field('id', 'id', represent=lambda id: SPAN(id,\n ' ', A('view', _href=URL('form_read', args=id)))), Field('f_name', type\n ='string', label=T('Name')), Field('f_content', type='text', represent=\n lambda x: MARKMIN(x), comment='WIKI (markmin)', label=T('Content')),\n Field('f_public', type='boolean', default=False, label=T(\n 'Available to all users?')), Field('f_uuid', default=str(uuid.uuid4()),\n writable=False, readable=False), Field('f_created_on', 'datetime',\n default=request.now, label=T('Created On'), writable=False, readable=\n False), Field('f_modified_on', 'datetime', default=request.now, label=T\n ('Modified On'), writable=False, readable=False, update=request.now),\n Field('f_created_by', db.auth_user, default=auth.user_id, label=T(\n 'Created By'), writable=False, readable=False), Field('f_modified_by',\n db.auth_user, default=auth.user_id, label=T('Modified By'), writable=\n False, readable=False, update=auth.user_id), format='%(f_name)s',\n migrate=settings.migrate)\ndb.t_form.f_name.default = 'Example: Job Application'\ndb.t_form.f_content.default = \"\"\"\n# Job Application\n## Instuructions\n- please complete the form\n- export it in PDF\n- print it\n- sign it\n- fax it to 111-111-1111\n## Job Application Questionaire\n### Personal data\n--------\n**first name:** | ``first_name``:input_text\n**last name:** | ``last_name``:input_text\n**email:** | ``email``:input_text\n--------\n### Skills\n``skills``:input_area\n### Signature\n``accept``:input_bool Accept [[Confidentiality Agreement http://example.com]]\n\nSignature: ..................................... Date: ``today``:input_date \n\"\"\"\n", "step-5": "### we prepend t_ to tablenames and f_ to fieldnames for disambiguity\nimport uuid\n\ncrud.settings.formstyle=\"table2cols\"\n\n########################################\ndb.define_table('t_form',\n Field('id','id',\n represent=lambda id:SPAN(id,' ',A('view',_href=URL('form_read',args=id)))),\n Field('f_name', type='string',\n label=T('Name')),\n Field('f_content', type='text',\n represent=lambda x: MARKMIN(x),\n comment='WIKI (markmin)',\n label=T('Content')),\n Field('f_public', type='boolean', default=False,\n label=T('Available to all users?')),\n Field('f_uuid',default=str(uuid.uuid4()),\n writable=False,readable=False),\n Field('f_created_on','datetime',default=request.now,\n label=T('Created On'),writable=False,readable=False),\n Field('f_modified_on','datetime',default=request.now,\n label=T('Modified On'),writable=False,readable=False,\n update=request.now),\n Field('f_created_by',db.auth_user,default=auth.user_id,\n label=T('Created By'),writable=False,readable=False),\n Field('f_modified_by',db.auth_user,default=auth.user_id,\n label=T('Modified By'),writable=False,readable=False,\n update=auth.user_id),\n format='%(f_name)s',\n migrate=settings.migrate)\n\ndb.t_form.f_name.default=\"Example: Job Application\"\ndb.t_form.f_content.default=\"\"\"\n# Job Application\n## Instuructions\n- please complete the form\n- export it in PDF\n- print it\n- sign it\n- fax it to 111-111-1111\n## Job Application Questionaire\n### Personal data\n--------\n**first name:** | ``first_name``:input_text\n**last name:** | ``last_name``:input_text\n**email:** | ``email``:input_text\n--------\n### Skills\n``skills``:input_area\n### Signature\n``accept``:input_bool Accept [[Confidentiality Agreement http://example.com]]\n\nSignature: ..................................... Date: ``today``:input_date \n\"\"\"\n\n\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> class XiciSpider(CrawlSpider): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def parse_author(self, response): author_item = get_author_item(response) yield author_item def parse_post(self, response): post_item = get_post_item(response) for item_or_request in self.parse_comment(response, post_item): yield item_or_request <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class XiciSpider(CrawlSpider): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def parse_author(self, response): author_item = get_author_item(response) yield author_item def parse_post(self, response): post_item = get_post_item(response) for item_or_request in self.parse_comment(response, post_item): yield item_or_request def parse_comment(self, response, post_item=None): if not post_item: post_item = response.meta['post_item'] for comment_item in get_comment_list(response): post_item['comment_ids'].append(comment_item['comment_id']) yield comment_item comment_next_page = get_comment_next_page(response) if comment_next_page: yield Request(url=comment_next_page, callback=self. parse_comment, meta={'post_item': post_item}) else: yield post_item <|reserved_special_token_1|> <|reserved_special_token_0|> class XiciSpider(CrawlSpider): name = 'xici' start_urls = ['http://www.xici.net'] post_extract = LxmlLinkExtractor(allow=('/d\\d+.htm',), allow_domains= 'xici.net', deny_domains=('account.xici.net',)) author_extract = LxmlLinkExtractor(allow=('/u\\d+$', '/u\\d+/$'), allow_domains=('xici.net',), deny_domains=('account.xici.net',)) follow_extract = LxmlLinkExtractor(allow_domains=('xici.net',), deny=( '/help/',), deny_domains=('account.xici.net',)) rules = Rule(author_extract, follow=True, callback='parse_author'), Rule( post_extract, follow=True, callback='parse_post'), Rule(follow_extract, follow=True) def parse_author(self, response): author_item = get_author_item(response) yield author_item def parse_post(self, response): post_item = get_post_item(response) for item_or_request in self.parse_comment(response, post_item): yield item_or_request def parse_comment(self, response, post_item=None): if not post_item: post_item = response.meta['post_item'] for comment_item in get_comment_list(response): post_item['comment_ids'].append(comment_item['comment_id']) yield comment_item comment_next_page = get_comment_next_page(response) if comment_next_page: yield Request(url=comment_next_page, callback=self. parse_comment, meta={'post_item': post_item}) else: yield post_item <|reserved_special_token_1|> from scrapy import Request from scrapy.linkextractors.lxmlhtml import LxmlLinkExtractor from scrapy.spiders import CrawlSpider from scrapy.spiders import Rule from xici_bbs.spiders.author import get_author_item from xici_bbs.spiders.comment import get_comment_list, get_comment_next_page from xici_bbs.spiders.post import get_post_item class XiciSpider(CrawlSpider): name = 'xici' start_urls = ['http://www.xici.net'] post_extract = LxmlLinkExtractor(allow=('/d\\d+.htm',), allow_domains= 'xici.net', deny_domains=('account.xici.net',)) author_extract = LxmlLinkExtractor(allow=('/u\\d+$', '/u\\d+/$'), allow_domains=('xici.net',), deny_domains=('account.xici.net',)) follow_extract = LxmlLinkExtractor(allow_domains=('xici.net',), deny=( '/help/',), deny_domains=('account.xici.net',)) rules = Rule(author_extract, follow=True, callback='parse_author'), Rule( post_extract, follow=True, callback='parse_post'), Rule(follow_extract, follow=True) def parse_author(self, response): author_item = get_author_item(response) yield author_item def parse_post(self, response): post_item = get_post_item(response) for item_or_request in self.parse_comment(response, post_item): yield item_or_request def parse_comment(self, response, post_item=None): if not post_item: post_item = response.meta['post_item'] for comment_item in get_comment_list(response): post_item['comment_ids'].append(comment_item['comment_id']) yield comment_item comment_next_page = get_comment_next_page(response) if comment_next_page: yield Request(url=comment_next_page, callback=self. parse_comment, meta={'post_item': post_item}) else: yield post_item <|reserved_special_token_1|> #! /usr/bin python3 # -*- coding: utf-8 -*- from scrapy import Request from scrapy.linkextractors.lxmlhtml import LxmlLinkExtractor from scrapy.spiders import CrawlSpider from scrapy.spiders import Rule from xici_bbs.spiders.author import get_author_item from xici_bbs.spiders.comment import get_comment_list, get_comment_next_page from xici_bbs.spiders.post import get_post_item class XiciSpider(CrawlSpider): name = 'xici' start_urls = ['http://www.xici.net'] post_extract = LxmlLinkExtractor( allow=( '/d\d+.htm', ), allow_domains=( 'xici.net' ), # deny=( # # ), deny_domains=( 'account.xici.net', ) ) author_extract = LxmlLinkExtractor( allow=( '/u\d+$', '/u\d+/$', ), allow_domains=( 'xici.net', ), # deny=( # # ), deny_domains=( 'account.xici.net', ) ) follow_extract = LxmlLinkExtractor( # allow=( # '/s/[0-9]+', # ), allow_domains=( 'xici.net', ), deny=( '/help/', ), deny_domains=( 'account.xici.net', # 'life.xici.net', ) ) rules = ( Rule(author_extract, follow=True, callback='parse_author'), Rule(post_extract, follow=True, callback='parse_post'), # Rule(follow_extract, follow=True, callback='parse_follow'), Rule(follow_extract, follow=True), ) # a_count = 0 # p_count = 0 # f_count = 0 def parse_author(self, response): # self.a_count += 1 # print('author: ', self.a_count, ' ', response.url) author_item = get_author_item(response) yield author_item def parse_post(self, response): # self.p_count += 1 # print('post: ', self.p_count, ' ', response.url) post_item = get_post_item(response) for item_or_request in self.parse_comment(response, post_item): yield item_or_request # def parse_follow(self, response): # self.f_count += 1 # print('follow: ', self.f_count, ' ', response.url) def parse_comment(self, response, post_item=None): if not post_item: post_item = response.meta['post_item'] for comment_item in get_comment_list(response): post_item['comment_ids'].append(comment_item['comment_id']) yield comment_item comment_next_page = get_comment_next_page(response) if comment_next_page: yield Request( url=comment_next_page, callback=self.parse_comment, meta={ 'post_item': post_item, } ) else: yield post_item
flexible
{ "blob_id": "f1eaba91e27dc063f3decd7b6a4fe4e40f7ed721", "index": 7948, "step-1": "<mask token>\n\n\nclass XiciSpider(CrawlSpider):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def parse_author(self, response):\n author_item = get_author_item(response)\n yield author_item\n\n def parse_post(self, response):\n post_item = get_post_item(response)\n for item_or_request in self.parse_comment(response, post_item):\n yield item_or_request\n <mask token>\n", "step-2": "<mask token>\n\n\nclass XiciSpider(CrawlSpider):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def parse_author(self, response):\n author_item = get_author_item(response)\n yield author_item\n\n def parse_post(self, response):\n post_item = get_post_item(response)\n for item_or_request in self.parse_comment(response, post_item):\n yield item_or_request\n\n def parse_comment(self, response, post_item=None):\n if not post_item:\n post_item = response.meta['post_item']\n for comment_item in get_comment_list(response):\n post_item['comment_ids'].append(comment_item['comment_id'])\n yield comment_item\n comment_next_page = get_comment_next_page(response)\n if comment_next_page:\n yield Request(url=comment_next_page, callback=self.\n parse_comment, meta={'post_item': post_item})\n else:\n yield post_item\n", "step-3": "<mask token>\n\n\nclass XiciSpider(CrawlSpider):\n name = 'xici'\n start_urls = ['http://www.xici.net']\n post_extract = LxmlLinkExtractor(allow=('/d\\\\d+.htm',), allow_domains=\n 'xici.net', deny_domains=('account.xici.net',))\n author_extract = LxmlLinkExtractor(allow=('/u\\\\d+$', '/u\\\\d+/$'),\n allow_domains=('xici.net',), deny_domains=('account.xici.net',))\n follow_extract = LxmlLinkExtractor(allow_domains=('xici.net',), deny=(\n '/help/',), deny_domains=('account.xici.net',))\n rules = Rule(author_extract, follow=True, callback='parse_author'), Rule(\n post_extract, follow=True, callback='parse_post'), Rule(follow_extract,\n follow=True)\n\n def parse_author(self, response):\n author_item = get_author_item(response)\n yield author_item\n\n def parse_post(self, response):\n post_item = get_post_item(response)\n for item_or_request in self.parse_comment(response, post_item):\n yield item_or_request\n\n def parse_comment(self, response, post_item=None):\n if not post_item:\n post_item = response.meta['post_item']\n for comment_item in get_comment_list(response):\n post_item['comment_ids'].append(comment_item['comment_id'])\n yield comment_item\n comment_next_page = get_comment_next_page(response)\n if comment_next_page:\n yield Request(url=comment_next_page, callback=self.\n parse_comment, meta={'post_item': post_item})\n else:\n yield post_item\n", "step-4": "from scrapy import Request\nfrom scrapy.linkextractors.lxmlhtml import LxmlLinkExtractor\nfrom scrapy.spiders import CrawlSpider\nfrom scrapy.spiders import Rule\nfrom xici_bbs.spiders.author import get_author_item\nfrom xici_bbs.spiders.comment import get_comment_list, get_comment_next_page\nfrom xici_bbs.spiders.post import get_post_item\n\n\nclass XiciSpider(CrawlSpider):\n name = 'xici'\n start_urls = ['http://www.xici.net']\n post_extract = LxmlLinkExtractor(allow=('/d\\\\d+.htm',), allow_domains=\n 'xici.net', deny_domains=('account.xici.net',))\n author_extract = LxmlLinkExtractor(allow=('/u\\\\d+$', '/u\\\\d+/$'),\n allow_domains=('xici.net',), deny_domains=('account.xici.net',))\n follow_extract = LxmlLinkExtractor(allow_domains=('xici.net',), deny=(\n '/help/',), deny_domains=('account.xici.net',))\n rules = Rule(author_extract, follow=True, callback='parse_author'), Rule(\n post_extract, follow=True, callback='parse_post'), Rule(follow_extract,\n follow=True)\n\n def parse_author(self, response):\n author_item = get_author_item(response)\n yield author_item\n\n def parse_post(self, response):\n post_item = get_post_item(response)\n for item_or_request in self.parse_comment(response, post_item):\n yield item_or_request\n\n def parse_comment(self, response, post_item=None):\n if not post_item:\n post_item = response.meta['post_item']\n for comment_item in get_comment_list(response):\n post_item['comment_ids'].append(comment_item['comment_id'])\n yield comment_item\n comment_next_page = get_comment_next_page(response)\n if comment_next_page:\n yield Request(url=comment_next_page, callback=self.\n parse_comment, meta={'post_item': post_item})\n else:\n yield post_item\n", "step-5": "#! /usr/bin python3\n# -*- coding: utf-8 -*-\nfrom scrapy import Request\nfrom scrapy.linkextractors.lxmlhtml import LxmlLinkExtractor\nfrom scrapy.spiders import CrawlSpider\nfrom scrapy.spiders import Rule\n\nfrom xici_bbs.spiders.author import get_author_item\nfrom xici_bbs.spiders.comment import get_comment_list, get_comment_next_page\nfrom xici_bbs.spiders.post import get_post_item\n\n\nclass XiciSpider(CrawlSpider):\n name = 'xici'\n\n start_urls = ['http://www.xici.net']\n\n post_extract = LxmlLinkExtractor(\n allow=(\n '/d\\d+.htm',\n ),\n allow_domains=(\n 'xici.net'\n ),\n # deny=(\n #\n # ),\n deny_domains=(\n 'account.xici.net',\n )\n )\n\n author_extract = LxmlLinkExtractor(\n allow=(\n '/u\\d+$',\n '/u\\d+/$',\n ),\n allow_domains=(\n 'xici.net',\n ),\n # deny=(\n #\n # ),\n deny_domains=(\n 'account.xici.net',\n )\n )\n\n follow_extract = LxmlLinkExtractor(\n # allow=(\n # '/s/[0-9]+',\n # ),\n allow_domains=(\n 'xici.net',\n ),\n deny=(\n '/help/',\n ),\n deny_domains=(\n 'account.xici.net',\n # 'life.xici.net',\n )\n )\n\n rules = (\n Rule(author_extract, follow=True, callback='parse_author'),\n Rule(post_extract, follow=True, callback='parse_post'),\n # Rule(follow_extract, follow=True, callback='parse_follow'),\n Rule(follow_extract, follow=True),\n )\n\n # a_count = 0\n # p_count = 0\n # f_count = 0\n\n def parse_author(self, response):\n # self.a_count += 1\n # print('author: ', self.a_count, ' ', response.url)\n author_item = get_author_item(response)\n\n yield author_item\n\n def parse_post(self, response):\n # self.p_count += 1\n # print('post: ', self.p_count, ' ', response.url)\n post_item = get_post_item(response)\n\n for item_or_request in self.parse_comment(response, post_item):\n yield item_or_request\n\n # def parse_follow(self, response):\n # self.f_count += 1\n # print('follow: ', self.f_count, ' ', response.url)\n\n def parse_comment(self, response, post_item=None):\n if not post_item:\n post_item = response.meta['post_item']\n\n for comment_item in get_comment_list(response):\n post_item['comment_ids'].append(comment_item['comment_id'])\n\n yield comment_item\n\n comment_next_page = get_comment_next_page(response)\n if comment_next_page:\n yield Request(\n url=comment_next_page,\n callback=self.parse_comment,\n meta={\n 'post_item': post_item,\n }\n )\n\n else:\n yield post_item\n\n", "step-ids": [ 3, 4, 5, 6, 7 ] }
[ 3, 4, 5, 6, 7 ]
from .parapred import main main()
normal
{ "blob_id": "96cb2754db2740767dfb145078ed17969e85123d", "index": 843, "step-1": "<mask token>\n", "step-2": "<mask token>\nmain()\n", "step-3": "from .parapred import main\nmain()\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> grid.fit(X, y) <|reserved_special_token_0|> print('Result for {} configurations'.format(len(parameters))) for p in parameters: print('{};{:.2f}%;{:.4f}%;±{:.4f}%'.format(', '.join(map(lambda k: '{}={}'.format(k.split('__')[1], p['params'][k]), p['params'].keys( ))), 100.0 * p['mean_train_score'], 100.0 * p['mean_test_score'], 200.0 * p['std_test_score'])) <|reserved_special_token_0|> f.write('\n'.join(map(lambda o: str(o), pred))) f.close() <|reserved_special_token_1|> <|reserved_special_token_0|> n_jobs = max(1, cpu_count() - 1) test_size = 0.2 X, y, quantitatives = load_train_dataset() pipe, search_grid = get_voting_classifier() cv = StratifiedShuffleSplit(test_size=test_size, random_state=0, n_splits=5) grid = GridSearchCV(pipe, search_grid, cv=cv, n_jobs=n_jobs, return_train_score=True, refit=True, scoring='accuracy') grid.fit(X, y) parameters = get_best_hyper_parameters(grid) print('Result for {} configurations'.format(len(parameters))) for p in parameters: print('{};{:.2f}%;{:.4f}%;±{:.4f}%'.format(', '.join(map(lambda k: '{}={}'.format(k.split('__')[1], p['params'][k]), p['params'].keys( ))), 100.0 * p['mean_train_score'], 100.0 * p['mean_test_score'], 200.0 * p['std_test_score'])) prediction_file = '{}/predictions.csv'.format(path.dirname(path.abspath( __file__))) pred = grid.predict(load_test_dataset()) f = open(prediction_file, 'w') f.write('\n'.join(map(lambda o: str(o), pred))) f.close() <|reserved_special_token_1|> from os import path from sklearn.model_selection import StratifiedShuffleSplit from sklearn.pipeline import Pipeline from sta211.datasets import load_train_dataset, load_test_dataset, find_best_train_dataset from sklearn.model_selection import GridSearchCV from sta211.selection import get_naive_bayes, get_mlp, get_svm, get_gradient_boosting, get_random_forest, get_best_hyper_parameters, get_extra_trees, get_adaboost, get_voting_classifier from multiprocessing import cpu_count n_jobs = max(1, cpu_count() - 1) test_size = 0.2 X, y, quantitatives = load_train_dataset() pipe, search_grid = get_voting_classifier() cv = StratifiedShuffleSplit(test_size=test_size, random_state=0, n_splits=5) grid = GridSearchCV(pipe, search_grid, cv=cv, n_jobs=n_jobs, return_train_score=True, refit=True, scoring='accuracy') grid.fit(X, y) parameters = get_best_hyper_parameters(grid) print('Result for {} configurations'.format(len(parameters))) for p in parameters: print('{};{:.2f}%;{:.4f}%;±{:.4f}%'.format(', '.join(map(lambda k: '{}={}'.format(k.split('__')[1], p['params'][k]), p['params'].keys( ))), 100.0 * p['mean_train_score'], 100.0 * p['mean_test_score'], 200.0 * p['std_test_score'])) prediction_file = '{}/predictions.csv'.format(path.dirname(path.abspath( __file__))) pred = grid.predict(load_test_dataset()) f = open(prediction_file, 'w') f.write('\n'.join(map(lambda o: str(o), pred))) f.close() <|reserved_special_token_1|> from os import path from sklearn.model_selection import StratifiedShuffleSplit from sklearn.pipeline import Pipeline from sta211.datasets import load_train_dataset, load_test_dataset, find_best_train_dataset from sklearn.model_selection import GridSearchCV from sta211.selection import get_naive_bayes, get_mlp, get_svm, get_gradient_boosting, get_random_forest, get_best_hyper_parameters, get_extra_trees, get_adaboost, get_voting_classifier from multiprocessing import cpu_count n_jobs = max(1, cpu_count()-1) test_size = 0.20 X, y, quantitatives = load_train_dataset() # Manual aggregation pipe, search_grid = get_voting_classifier() # pipes, search_grid = get_svm() # pipe = Pipeline(pipes) cv = StratifiedShuffleSplit(test_size=test_size, random_state=0, n_splits=5) grid = GridSearchCV(pipe, search_grid, cv=cv, n_jobs=n_jobs, return_train_score=True, refit=True, scoring="accuracy") grid.fit(X, y) parameters = get_best_hyper_parameters(grid) print("Result for {} configurations".format(len(parameters))) for p in parameters: print("{};{:.2f}%;{:.4f}%;±{:.4f}%".format( ", ".join(map(lambda k: "{}={}".format(k.split("__")[1], p["params"][k]), p["params"].keys())), 100.0 * p["mean_train_score"], 100.0 * p["mean_test_score"], 200.0 * p["std_test_score"] )) # print("Results: Train: {:.2f}%, Test: {:.2f}% std:{:.4f} for {}".format(100 * p["mean_train_score"], 100 * p["mean_test_score"], p["std_test_score"], p["params"])) prediction_file = "{}/predictions.csv".format(path.dirname(path.abspath(__file__))) pred = grid.predict(load_test_dataset()) f = open(prediction_file, "w") f.write("\n".join(map(lambda o: str(o), pred))) f.close()
flexible
{ "blob_id": "c99878dbd5610c8a58f00912e111b1eef9d3893e", "index": 7782, "step-1": "<mask token>\n", "step-2": "<mask token>\ngrid.fit(X, y)\n<mask token>\nprint('Result for {} configurations'.format(len(parameters)))\nfor p in parameters:\n print('{};{:.2f}%;{:.4f}%;±{:.4f}%'.format(', '.join(map(lambda k:\n '{}={}'.format(k.split('__')[1], p['params'][k]), p['params'].keys(\n ))), 100.0 * p['mean_train_score'], 100.0 * p['mean_test_score'], \n 200.0 * p['std_test_score']))\n<mask token>\nf.write('\\n'.join(map(lambda o: str(o), pred)))\nf.close()\n", "step-3": "<mask token>\nn_jobs = max(1, cpu_count() - 1)\ntest_size = 0.2\nX, y, quantitatives = load_train_dataset()\npipe, search_grid = get_voting_classifier()\ncv = StratifiedShuffleSplit(test_size=test_size, random_state=0, n_splits=5)\ngrid = GridSearchCV(pipe, search_grid, cv=cv, n_jobs=n_jobs,\n return_train_score=True, refit=True, scoring='accuracy')\ngrid.fit(X, y)\nparameters = get_best_hyper_parameters(grid)\nprint('Result for {} configurations'.format(len(parameters)))\nfor p in parameters:\n print('{};{:.2f}%;{:.4f}%;±{:.4f}%'.format(', '.join(map(lambda k:\n '{}={}'.format(k.split('__')[1], p['params'][k]), p['params'].keys(\n ))), 100.0 * p['mean_train_score'], 100.0 * p['mean_test_score'], \n 200.0 * p['std_test_score']))\nprediction_file = '{}/predictions.csv'.format(path.dirname(path.abspath(\n __file__)))\npred = grid.predict(load_test_dataset())\nf = open(prediction_file, 'w')\nf.write('\\n'.join(map(lambda o: str(o), pred)))\nf.close()\n", "step-4": "from os import path\nfrom sklearn.model_selection import StratifiedShuffleSplit\nfrom sklearn.pipeline import Pipeline\nfrom sta211.datasets import load_train_dataset, load_test_dataset, find_best_train_dataset\nfrom sklearn.model_selection import GridSearchCV\nfrom sta211.selection import get_naive_bayes, get_mlp, get_svm, get_gradient_boosting, get_random_forest, get_best_hyper_parameters, get_extra_trees, get_adaboost, get_voting_classifier\nfrom multiprocessing import cpu_count\nn_jobs = max(1, cpu_count() - 1)\ntest_size = 0.2\nX, y, quantitatives = load_train_dataset()\npipe, search_grid = get_voting_classifier()\ncv = StratifiedShuffleSplit(test_size=test_size, random_state=0, n_splits=5)\ngrid = GridSearchCV(pipe, search_grid, cv=cv, n_jobs=n_jobs,\n return_train_score=True, refit=True, scoring='accuracy')\ngrid.fit(X, y)\nparameters = get_best_hyper_parameters(grid)\nprint('Result for {} configurations'.format(len(parameters)))\nfor p in parameters:\n print('{};{:.2f}%;{:.4f}%;±{:.4f}%'.format(', '.join(map(lambda k:\n '{}={}'.format(k.split('__')[1], p['params'][k]), p['params'].keys(\n ))), 100.0 * p['mean_train_score'], 100.0 * p['mean_test_score'], \n 200.0 * p['std_test_score']))\nprediction_file = '{}/predictions.csv'.format(path.dirname(path.abspath(\n __file__)))\npred = grid.predict(load_test_dataset())\nf = open(prediction_file, 'w')\nf.write('\\n'.join(map(lambda o: str(o), pred)))\nf.close()\n", "step-5": "from os import path\nfrom sklearn.model_selection import StratifiedShuffleSplit\nfrom sklearn.pipeline import Pipeline\nfrom sta211.datasets import load_train_dataset, load_test_dataset, find_best_train_dataset\nfrom sklearn.model_selection import GridSearchCV\nfrom sta211.selection import get_naive_bayes, get_mlp, get_svm, get_gradient_boosting, get_random_forest, get_best_hyper_parameters, get_extra_trees, get_adaboost, get_voting_classifier\nfrom multiprocessing import cpu_count\n\n\nn_jobs = max(1, cpu_count()-1)\ntest_size = 0.20\n\nX, y, quantitatives = load_train_dataset()\n\n# Manual aggregation\npipe, search_grid = get_voting_classifier()\n\n# pipes, search_grid = get_svm()\n# pipe = Pipeline(pipes)\n\ncv = StratifiedShuffleSplit(test_size=test_size, random_state=0, n_splits=5)\ngrid = GridSearchCV(pipe, search_grid, cv=cv, n_jobs=n_jobs, return_train_score=True, refit=True, scoring=\"accuracy\")\ngrid.fit(X, y)\n\nparameters = get_best_hyper_parameters(grid)\nprint(\"Result for {} configurations\".format(len(parameters)))\nfor p in parameters:\n print(\"{};{:.2f}%;{:.4f}%;±{:.4f}%\".format(\n \", \".join(map(lambda k: \"{}={}\".format(k.split(\"__\")[1], p[\"params\"][k]), p[\"params\"].keys())),\n 100.0 * p[\"mean_train_score\"],\n 100.0 * p[\"mean_test_score\"],\n 200.0 * p[\"std_test_score\"]\n ))\n\n # print(\"Results: Train: {:.2f}%, Test: {:.2f}% std:{:.4f} for {}\".format(100 * p[\"mean_train_score\"], 100 * p[\"mean_test_score\"], p[\"std_test_score\"], p[\"params\"]))\n\nprediction_file = \"{}/predictions.csv\".format(path.dirname(path.abspath(__file__)))\npred = grid.predict(load_test_dataset())\nf = open(prediction_file, \"w\")\nf.write(\"\\n\".join(map(lambda o: str(o), pred)))\nf.close()\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
# https://kyu9341.github.io/algorithm/2020/03/11/algorithm14226/ # https://developingbear.tistory.com/138 # https://devbelly.tistory.com/108 # 이모티콘 s개 생성 # 3가지 연산 이용 # bfs 이용 => visited를 이모티콘 방문 여부 2차원 배열 => 이모티콘의 수 와 클립보드에 저장된 이모티콘의 갯수를 이용 from collections import deque s = int(input()) q = deque() # visited[이모티콘의 수][클리보드의 이모티콘 수] visited = [[False] * 1001 for _ in range(1001)] visited[1][0] = True # 이모티콘의 수, 클립보드의 수, 시간 q.append((1, 0, 0)) while q: e, clip, t = q.popleft() if e == s: print(t) exit(0) if 0 < e < 1001: if visited[e][e] is False: visited[e][e] = True q.append((e, e, t + 1)) # clip이 0 이상 조건이 필요없음 어차피 위에서 e가 0보다 큰걸로 조건 수행했으므로 if e + clip < 1001 and visited[e + clip][clip] is False: visited[e + clip][clip] = True q.append((e + clip, clip, t + 1)) # e가 1000을 넘을때만 수행하는 것이 아닌 모든 경우에 대해서 탐색을 하기 위해서 e에 대한 조건을 걸지 않음 if visited[e - 1][clip] is False: visited[e - 1][clip] = True q.append((e - 1, clip, t + 1))
normal
{ "blob_id": "0c14a6fa8b25e1791a6eb9c71290db8bb316819a", "index": 5684, "step-1": "<mask token>\n", "step-2": "<mask token>\nq.append((1, 0, 0))\nwhile q:\n e, clip, t = q.popleft()\n if e == s:\n print(t)\n exit(0)\n if 0 < e < 1001:\n if visited[e][e] is False:\n visited[e][e] = True\n q.append((e, e, t + 1))\n if e + clip < 1001 and visited[e + clip][clip] is False:\n visited[e + clip][clip] = True\n q.append((e + clip, clip, t + 1))\n if visited[e - 1][clip] is False:\n visited[e - 1][clip] = True\n q.append((e - 1, clip, t + 1))\n", "step-3": "<mask token>\ns = int(input())\nq = deque()\nvisited = [([False] * 1001) for _ in range(1001)]\nvisited[1][0] = True\nq.append((1, 0, 0))\nwhile q:\n e, clip, t = q.popleft()\n if e == s:\n print(t)\n exit(0)\n if 0 < e < 1001:\n if visited[e][e] is False:\n visited[e][e] = True\n q.append((e, e, t + 1))\n if e + clip < 1001 and visited[e + clip][clip] is False:\n visited[e + clip][clip] = True\n q.append((e + clip, clip, t + 1))\n if visited[e - 1][clip] is False:\n visited[e - 1][clip] = True\n q.append((e - 1, clip, t + 1))\n", "step-4": "from collections import deque\ns = int(input())\nq = deque()\nvisited = [([False] * 1001) for _ in range(1001)]\nvisited[1][0] = True\nq.append((1, 0, 0))\nwhile q:\n e, clip, t = q.popleft()\n if e == s:\n print(t)\n exit(0)\n if 0 < e < 1001:\n if visited[e][e] is False:\n visited[e][e] = True\n q.append((e, e, t + 1))\n if e + clip < 1001 and visited[e + clip][clip] is False:\n visited[e + clip][clip] = True\n q.append((e + clip, clip, t + 1))\n if visited[e - 1][clip] is False:\n visited[e - 1][clip] = True\n q.append((e - 1, clip, t + 1))\n", "step-5": "# https://kyu9341.github.io/algorithm/2020/03/11/algorithm14226/\n# https://developingbear.tistory.com/138\n# https://devbelly.tistory.com/108\n# 이모티콘 s개 생성\n# 3가지 연산 이용\n# bfs 이용 => visited를 이모티콘 방문 여부 2차원 배열 => 이모티콘의 수 와 클립보드에 저장된 이모티콘의 갯수를 이용\nfrom collections import deque\ns = int(input())\nq = deque()\n# visited[이모티콘의 수][클리보드의 이모티콘 수]\nvisited = [[False] * 1001 for _ in range(1001)]\nvisited[1][0] = True\n# 이모티콘의 수, 클립보드의 수, 시간\nq.append((1, 0, 0))\nwhile q:\n e, clip, t = q.popleft()\n if e == s:\n print(t)\n exit(0)\n\n if 0 < e < 1001:\n if visited[e][e] is False:\n visited[e][e] = True\n q.append((e, e, t + 1))\n # clip이 0 이상 조건이 필요없음 어차피 위에서 e가 0보다 큰걸로 조건 수행했으므로\n if e + clip < 1001 and visited[e + clip][clip] is False:\n visited[e + clip][clip] = True\n q.append((e + clip, clip, t + 1))\n # e가 1000을 넘을때만 수행하는 것이 아닌 모든 경우에 대해서 탐색을 하기 위해서 e에 대한 조건을 걸지 않음\n if visited[e - 1][clip] is False:\n visited[e - 1][clip] = True\n q.append((e - 1, clip, t + 1))\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> class OrderQuerySet(QuerySet): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class ProductQuerySet(QuerySet): <|reserved_special_token_0|> <|reserved_special_token_0|> class OrderQuerySet(QuerySet): def not_cancelled(self): return self.filter(cancelled=False) def open(self): return self.filter(open__isnull=False) def paid(self): return self.filter(paid=True) def unpaid(self): return self.filter(paid=False) def cancelled(self): return self.filter(cancelled=True) <|reserved_special_token_1|> <|reserved_special_token_0|> class ProductQuerySet(QuerySet): def available(self): return self.filter(available_in__contains=timezone.now(), category__public=True) <|reserved_special_token_0|> class OrderQuerySet(QuerySet): def not_cancelled(self): return self.filter(cancelled=False) def open(self): return self.filter(open__isnull=False) def paid(self): return self.filter(paid=True) def unpaid(self): return self.filter(paid=False) def cancelled(self): return self.filter(cancelled=True) <|reserved_special_token_1|> <|reserved_special_token_0|> class ProductQuerySet(QuerySet): def available(self): return self.filter(available_in__contains=timezone.now(), category__public=True) def annotate_subproducts(self): from .models import SubProductRelation subproducts = SubProductRelation.objects.filter(bundle_product= OuterRef('pk')) return self.annotate(has_subproducts=Exists(subproducts)) class OrderQuerySet(QuerySet): def not_cancelled(self): return self.filter(cancelled=False) def open(self): return self.filter(open__isnull=False) def paid(self): return self.filter(paid=True) def unpaid(self): return self.filter(paid=False) def cancelled(self): return self.filter(cancelled=True) <|reserved_special_token_1|> from django.db.models import Exists from django.db.models import OuterRef from django.db.models import QuerySet from django.utils import timezone class ProductQuerySet(QuerySet): def available(self): return self.filter(available_in__contains=timezone.now(), category__public=True) def annotate_subproducts(self): from .models import SubProductRelation subproducts = SubProductRelation.objects.filter( bundle_product=OuterRef("pk"), ) return self.annotate( has_subproducts=Exists(subproducts), ) class OrderQuerySet(QuerySet): def not_cancelled(self): return self.filter(cancelled=False) def open(self): return self.filter(open__isnull=False) def paid(self): return self.filter(paid=True) def unpaid(self): return self.filter(paid=False) def cancelled(self): return self.filter(cancelled=True)
flexible
{ "blob_id": "3fdf67c3e0e4c3aa8a3fed09102aca0272b5ff4f", "index": 6938, "step-1": "<mask token>\n\n\nclass OrderQuerySet(QuerySet):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n", "step-2": "<mask token>\n\n\nclass ProductQuerySet(QuerySet):\n <mask token>\n <mask token>\n\n\nclass OrderQuerySet(QuerySet):\n\n def not_cancelled(self):\n return self.filter(cancelled=False)\n\n def open(self):\n return self.filter(open__isnull=False)\n\n def paid(self):\n return self.filter(paid=True)\n\n def unpaid(self):\n return self.filter(paid=False)\n\n def cancelled(self):\n return self.filter(cancelled=True)\n", "step-3": "<mask token>\n\n\nclass ProductQuerySet(QuerySet):\n\n def available(self):\n return self.filter(available_in__contains=timezone.now(),\n category__public=True)\n <mask token>\n\n\nclass OrderQuerySet(QuerySet):\n\n def not_cancelled(self):\n return self.filter(cancelled=False)\n\n def open(self):\n return self.filter(open__isnull=False)\n\n def paid(self):\n return self.filter(paid=True)\n\n def unpaid(self):\n return self.filter(paid=False)\n\n def cancelled(self):\n return self.filter(cancelled=True)\n", "step-4": "<mask token>\n\n\nclass ProductQuerySet(QuerySet):\n\n def available(self):\n return self.filter(available_in__contains=timezone.now(),\n category__public=True)\n\n def annotate_subproducts(self):\n from .models import SubProductRelation\n subproducts = SubProductRelation.objects.filter(bundle_product=\n OuterRef('pk'))\n return self.annotate(has_subproducts=Exists(subproducts))\n\n\nclass OrderQuerySet(QuerySet):\n\n def not_cancelled(self):\n return self.filter(cancelled=False)\n\n def open(self):\n return self.filter(open__isnull=False)\n\n def paid(self):\n return self.filter(paid=True)\n\n def unpaid(self):\n return self.filter(paid=False)\n\n def cancelled(self):\n return self.filter(cancelled=True)\n", "step-5": "from django.db.models import Exists\nfrom django.db.models import OuterRef\nfrom django.db.models import QuerySet\nfrom django.utils import timezone\n\n\nclass ProductQuerySet(QuerySet):\n def available(self):\n return self.filter(available_in__contains=timezone.now(), category__public=True)\n\n def annotate_subproducts(self):\n from .models import SubProductRelation\n\n subproducts = SubProductRelation.objects.filter(\n bundle_product=OuterRef(\"pk\"),\n )\n return self.annotate(\n has_subproducts=Exists(subproducts),\n )\n\n\nclass OrderQuerySet(QuerySet):\n def not_cancelled(self):\n return self.filter(cancelled=False)\n\n def open(self):\n return self.filter(open__isnull=False)\n\n def paid(self):\n return self.filter(paid=True)\n\n def unpaid(self):\n return self.filter(paid=False)\n\n def cancelled(self):\n return self.filter(cancelled=True)\n", "step-ids": [ 1, 7, 8, 9, 11 ] }
[ 1, 7, 8, 9, 11 ]
from __future__ import division import numpy as np import matplotlib.pyplot as plt #import matplotlib.cbook as cbook import Image from matplotlib import _png from matplotlib.offsetbox import OffsetImage import scipy.io import pylab #for question 1 (my data) def resample(ms,srate): return int(round(ms/1000*srate)) def formatdata(data,Params): """ reads in TrialsMTX data structure, pulls out relevant data """ mndata = dict() alltrials = np.array([]) for k in range(len(Params["conditions"])): conditionmean = data[0,k].mean(axis = 0) mndata.update({Params["conditions"][k]: {'data' : data[0,k].mean(axis = 0), 'cmax' : conditionmean.max(), 'cmin' : conditionmean.min()}}) return mndata def traces(mndata,Params,srate,imagepath): """ plots traces of high gamma data for the trial duration. separated by condition, with brain & elec position """ #plot high gamma traces #data should be bandpassed (todo) #resample to srate st = resample(Params["st"],srate) en = resample(Params["en"],srate) bl_en = resample(Params["bl_en"],srate) bl_st = resample(Params["bl_st"],srate) plot_tp = resample(Params["plot"],srate) cue = resample(500,srate) colors = ['red','orange','green','blue'] x = np.array(range(st,en+1)) f, (ax,ax2) = plt.subplots(1,2, sharex = False) ax.axhline(y = 0,color = 'k',linewidth=2) ax.axvline(x = 0,color='k',linewidth=2) ax.axvline(x = cue,color = 'gray',linewidth = 2) ax.axvline(x = cue+cue,color = 'gray',linewidth = 2) ax.axvspan(cue, cue+cue, facecolor='0.5', alpha=0.25,label = 'cue') for j in range(len(Params["conditions"])): condition = Params['conditions'][j] y = mndata[condition]['data'] ax.plot(x,y, label = condition,linewidth = 2,color = colors[j]) ax.set_ylim((-30,85)) ax.set_xlim(st,en) ax.legend() ax.xaxis.set_ticklabels(['', '0', '','500', '', '1000', '', '1500', '', '2000','','2500','', '3000'],minor=False) ax.xaxis.set_ticks(range(st,en,plot_tp)) ax.set_xlabel("time (ms)") ax.set_ylabel("% change baseline") ax.set_title('Analytic Amplitude - High Gamma (70-150Hz)', fontsize = 18) #plot brain with elec location #brain = plt.imread(imagepath) #aa = pylab.mean(brain,2) #ax2.imshow(aa) #a2.gray() #brain = Image.open(imagepath) #ax2.set_axis_off() #im = plt.imshow(brain, origin = 'lower') #brain = _png.read_png(imagepath) #imagebox = OffsetImage(brain,zoom =5) #ab = AnnotationBbox(imagebox,) im = Image.open(imagepath) ax2.imshow(im,aspect = 'auto',origin = 'lower') ax2.set_xlim((0,750)) ax2.set_title('Electrode Location',fontsize = 18) return f, (ax, ax2) #for question 2 (stocks data) def readdata(filename): """ reads in a txt file with 2 columns of numbers and 1 header (dates and values) """ dt = np.dtype([('date','int'),('val','<f8')]) data = np.loadtxt(filename,dtype = dt,skiprows = 1) return data def plotstocksdata(datadict,formats): """ takes in dict of data structures and Params indicating when to start/end also takes in formats dictionary. keys must match datadict, values are the linewidth/color to plot """ #plot data f = plt.figure() ax1 = plt.subplot(111) data = datadict["yahoo"] yahoo = ax1.plot(data['date'],data['val'],formats["yahoo"], label = 'Yahoo Stock Value',linewidth = 1.5) data = datadict["google"] google = ax1.plot(data['date'],data['val'],formats["google"], label = 'Google Stock Value',linewidth = 1.5) ax2 = ax1.twinx() data = datadict["nytmp"] nytmp = ax2.plot(data['date'],data['val'],formats["nytmp"],label = 'NY Mon. High Temp',linewidth=1.5) ax1.set_xlabel('Date (MJD)') ax1.set_ylabel('Value (Dollars') ax1.set_ylim((-20,765)) ax1.yaxis.set_minor_locator(plt.MultipleLocator(20)) ax1.set_xlim((48800, 55600)) ax1.xaxis.set_minor_locator(plt.MultipleLocator(200)) #plt.show() #ISAAC EDIT ax2.set_ylim((-150, 100)) ax2.set_ylim((-150, 100)) ax2.set_ylabel('Temperature ($^\circ$F)') ax2.yaxis.set_minor_locator(plt.MultipleLocator(10)) plt.title('New York Temperature, Google, and Yahoo!', fontname = 'serif',fontsize = 18) plts = yahoo+google+nytmp labels = [l.get_label() for l in plts] ax1.legend(plts, labels, loc=(0.025,0.5) ,frameon=False, prop={'size':11}, markerscale = 2) plt.show() def answer_hw(): #QUESTION 1 #load data #dataDir = "/Users/matar/Documents/Courses/PythonClass/HW2/data/" dataDir = "data/" #ISAAC EDIT imagepath = dataDir + 'e37.png' matdata = scipy.io.loadmat(dataDir+'TrialsMTX',struct_as_record = True) data = matdata["TrialsMTX"]['data'][0,0] #define parameters Params={"f1":70, "f2": 150, "st" :-250, "en":3000, "plot":250, "bl_st" : -250, "bl_en":0, "caxis":200, "conditions":['20','40','60','80']} subjdata = scipy.io.loadmat(dataDir+"subj_globals") srate = subjdata["srate"][0,0] #format data mndata = formatdata(data, Params) print '-'*40 print "question 1 : plotting traces" print '-'*40 traces(mndata,Params,srate,imagepath) #ideally would like to separate the traces func from the brain image, but can't figure out how to plot 2 funcs as subplots of same image #QUESTION 2 formats = {'google' : 'b', 'nytmp' : 'r--', 'yahoo' :'purple'} #dataDir = "/Users/matar/Documents/Courses/PythonClass/HW2/hw2_data/" dataDir = "hw2_data/" #ISAAC EDIT datadict = {'nytmp': readdata(dataDir+'ny_temps.txt'), 'google': readdata(dataDir+'google_data.txt'), 'yahoo': readdata(dataDir+'yahoo_data.txt')} print '-'*40 print "question 2 : plotting stock data" print '-'*40 plotstocksdata(datadict,formats)
normal
{ "blob_id": "a81ee0a855c8a731bafe4967b776e3f93ef78c2a", "index": 8908, "step-1": "from __future__ import division\nimport numpy as np\nimport matplotlib.pyplot as plt\n#import matplotlib.cbook as cbook\nimport Image\nfrom matplotlib import _png\nfrom matplotlib.offsetbox import OffsetImage\nimport scipy.io\nimport pylab\n\n#for question 1 (my data)\ndef resample(ms,srate):\n\treturn int(round(ms/1000*srate))\n\ndef formatdata(data,Params):\n\t\"\"\"\n\treads in TrialsMTX data structure, pulls out relevant data\n\t\"\"\"\n\tmndata = dict()\n\talltrials = np.array([])\n\tfor k in range(len(Params[\"conditions\"])):\n\t\tconditionmean = data[0,k].mean(axis = 0)\n\t\tmndata.update({Params[\"conditions\"][k]: {'data' : data[0,k].mean(axis = 0), 'cmax' : conditionmean.max(), 'cmin' : conditionmean.min()}})\n\treturn mndata\n\ndef traces(mndata,Params,srate,imagepath):\n\t\"\"\"\n\tplots traces of high gamma data for the trial duration. separated by condition, with brain & elec position\n\t\"\"\"\n\t#plot high gamma traces\n\t#data should be bandpassed (todo)\n\t#resample to srate\n\tst = resample(Params[\"st\"],srate)\n\ten = resample(Params[\"en\"],srate)\n\tbl_en = resample(Params[\"bl_en\"],srate)\n\tbl_st = resample(Params[\"bl_st\"],srate)\n\tplot_tp = resample(Params[\"plot\"],srate)\n\tcue = resample(500,srate)\n\t\n\tcolors = ['red','orange','green','blue']\n\tx = np.array(range(st,en+1))\n\tf, (ax,ax2) = plt.subplots(1,2, sharex = False)\n\tax.axhline(y = 0,color = 'k',linewidth=2)\n\tax.axvline(x = 0,color='k',linewidth=2)\n\tax.axvline(x = cue,color = 'gray',linewidth = 2)\n\tax.axvline(x = cue+cue,color = 'gray',linewidth = 2)\n\tax.axvspan(cue, cue+cue, facecolor='0.5', alpha=0.25,label = 'cue')\n\n\tfor j in range(len(Params[\"conditions\"])):\n\t\tcondition = Params['conditions'][j]\n\t\ty = mndata[condition]['data']\n\t\tax.plot(x,y, label = condition,linewidth = 2,color = colors[j])\n\t\n\tax.set_ylim((-30,85))\n\tax.set_xlim(st,en)\n\tax.legend()\n\tax.xaxis.set_ticklabels(['', '0', '','500', '', '1000', '', '1500', '', '2000','','2500','', '3000'],minor=False)\n\tax.xaxis.set_ticks(range(st,en,plot_tp))\n\n\tax.set_xlabel(\"time (ms)\")\n\tax.set_ylabel(\"% change baseline\")\n\tax.set_title('Analytic Amplitude - High Gamma (70-150Hz)', fontsize = 18)\n\n\t#plot brain with elec location\n\t#brain = plt.imread(imagepath)\n\t#aa = pylab.mean(brain,2)\n\t#ax2.imshow(aa)\n\t#a2.gray()\n\n\t#brain = Image.open(imagepath)\n\t#ax2.set_axis_off()\n\t#im = plt.imshow(brain, origin = 'lower')\n\n\t#brain = _png.read_png(imagepath)\n\t#imagebox = OffsetImage(brain,zoom =5)\n\t#ab = AnnotationBbox(imagebox,)\n\n\tim = Image.open(imagepath)\n\tax2.imshow(im,aspect = 'auto',origin = 'lower')\n\tax2.set_xlim((0,750))\n\tax2.set_title('Electrode Location',fontsize = 18)\n\n\n\n\treturn f, (ax, ax2)\n\n\n#for question 2 (stocks data)\ndef readdata(filename):\n\t\"\"\" \n\treads in a txt file with 2 columns of numbers and 1 header (dates and values)\n\t\"\"\"\n\tdt = np.dtype([('date','int'),('val','<f8')])\n\tdata = np.loadtxt(filename,dtype = dt,skiprows = 1)\n\treturn data\n\ndef plotstocksdata(datadict,formats):\n\t\"\"\"\n\ttakes in dict of data structures and Params indicating when to start/end\n\talso takes in formats dictionary. keys must match datadict, values are the linewidth/color to plot\n\t\"\"\"\n\t#plot data\n\tf = plt.figure()\n\tax1 = plt.subplot(111)\n\tdata = datadict[\"yahoo\"]\n\tyahoo = ax1.plot(data['date'],data['val'],formats[\"yahoo\"], label = 'Yahoo Stock Value',linewidth = 1.5)\n\tdata = datadict[\"google\"]\n\tgoogle = ax1.plot(data['date'],data['val'],formats[\"google\"], label = 'Google Stock Value',linewidth = 1.5)\n\tax2 = ax1.twinx()\n\tdata = datadict[\"nytmp\"]\n\tnytmp = ax2.plot(data['date'],data['val'],formats[\"nytmp\"],label = 'NY Mon. High Temp',linewidth=1.5)\n\tax1.set_xlabel('Date (MJD)')\n\tax1.set_ylabel('Value (Dollars')\n\tax1.set_ylim((-20,765))\n\tax1.yaxis.set_minor_locator(plt.MultipleLocator(20))\n\tax1.set_xlim((48800, 55600))\n\tax1.xaxis.set_minor_locator(plt.MultipleLocator(200))\n\t#plt.show() #ISAAC EDIT\n\tax2.set_ylim((-150, 100))\n\tax2.set_ylim((-150, 100))\n\tax2.set_ylabel('Temperature ($^\\circ$F)')\n\tax2.yaxis.set_minor_locator(plt.MultipleLocator(10))\n\tplt.title('New York Temperature, Google, and Yahoo!', fontname = 'serif',fontsize = 18)\n\tplts = yahoo+google+nytmp\n\tlabels = [l.get_label() for l in plts]\n\tax1.legend(plts, labels, loc=(0.025,0.5) ,frameon=False, prop={'size':11}, markerscale = 2)\n\tplt.show()\n\n\ndef answer_hw():\n\t#QUESTION 1\n\t#load data\n\t#dataDir = \"/Users/matar/Documents/Courses/PythonClass/HW2/data/\"\n\tdataDir = \"data/\" #ISAAC EDIT\n\timagepath = dataDir + 'e37.png'\n\tmatdata = scipy.io.loadmat(dataDir+'TrialsMTX',struct_as_record = True)\n\tdata = matdata[\"TrialsMTX\"]['data'][0,0]\n\n\t#define parameters\n\tParams={\"f1\":70, \"f2\": 150, \"st\" :-250, \"en\":3000, \"plot\":250, \"bl_st\" : -250, \"bl_en\":0, \"caxis\":200, \"conditions\":['20','40','60','80']}\n\tsubjdata = scipy.io.loadmat(dataDir+\"subj_globals\")\n\tsrate = subjdata[\"srate\"][0,0]\n\n\t#format data\n\tmndata = formatdata(data, Params)\n\n\tprint '-'*40\n\tprint \"question 1 : plotting traces\"\n\tprint '-'*40\n\ttraces(mndata,Params,srate,imagepath) \n\t#ideally would like to separate the traces func from the brain image, but can't figure out how to plot 2 funcs as subplots of same image\n\n\n\t#QUESTION 2\n\tformats = {'google' : 'b', 'nytmp' : 'r--', 'yahoo' :'purple'}\n\t#dataDir = \"/Users/matar/Documents/Courses/PythonClass/HW2/hw2_data/\"\n\tdataDir = \"hw2_data/\" #ISAAC EDIT\n\tdatadict = {'nytmp': readdata(dataDir+'ny_temps.txt'), 'google': readdata(dataDir+'google_data.txt'), 'yahoo': readdata(dataDir+'yahoo_data.txt')}\n\tprint '-'*40\n\tprint \"question 2 : plotting stock data\"\n\tprint '-'*40\n\tplotstocksdata(datadict,formats)", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
import os from multiprocessing import Pool import glob import click import logging import pandas as pd from src.resampling.resampling import Resampler # Default paths path_in = 'data/hecktor_nii/' path_out = 'data/resampled/' path_bb = 'data/bbox.csv' @click.command() @click.argument('input_folder', type=click.Path(exists=True), default=path_in) @click.argument('output_folder', type=click.Path(), default=path_out) @click.argument('bounding_boxes_file', type=click.Path(), default=path_bb) @click.option('--cores', type=click.INT, default=12, help='The number of workers for parallelization.') @click.option('--resampling', type=click.FLOAT, nargs=3, default=(1, 1, 1), help='Expect 3 positive floats describing the output ' 'resolution of the resampling. To avoid resampling ' 'on one or more dimension a value of -1 can be fed ' 'e.g. --resampling 1.0 1.0 -1 will resample the x ' 'and y axis at 1 mm/px and left the z axis untouched.') @click.option('--order', type=click.INT, nargs=1, default=3, help='The order of the spline interpolation used to resample') def main(input_folder, output_folder, bounding_boxes_file, cores, resampling, order): """ This command line interface allows to resample NIFTI files within a given bounding box contain in BOUNDING_BOXES_FILE. The images are resampled with spline interpolation of degree --order (default=3) and the segmentation are resampled by nearest neighbor interpolation. INPUT_FOLDER is the path of the folder containing the NIFTI to resample. OUTPUT_FOLDER is the path of the folder where to store the resampled NIFTI files. BOUNDING_BOXES_FILE is the path of the .csv file containing the bounding boxes of each patient. """ logger = logging.getLogger(__name__) logger.info('Resampling') if not os.path.exists(output_folder): os.mkdir(output_folder) print('resampling is {}'.format(str(resampling))) bb_df = pd.read_csv(bounding_boxes_file) bb_df = bb_df.set_index('PatientID') files_list = [ f for f in glob.glob(input_folder + '/**/*.nii.gz', recursive=True) ] resampler = Resampler(bb_df, output_folder, order, resampling=resampling) with Pool(cores) as p: p.map(resampler, files_list) if __name__ == '__main__': log_fmt = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' logging.basicConfig(level=logging.INFO, format=log_fmt) logging.captureWarnings(True) main()
normal
{ "blob_id": "3479276d4769518aa60dcd4e1bb41a8a1a7d6517", "index": 315, "step-1": "<mask token>\n\n\n@click.command()\n@click.argument('input_folder', type=click.Path(exists=True), default=path_in)\n@click.argument('output_folder', type=click.Path(), default=path_out)\n@click.argument('bounding_boxes_file', type=click.Path(), default=path_bb)\n@click.option('--cores', type=click.INT, default=12, help=\n 'The number of workers for parallelization.')\n@click.option('--resampling', type=click.FLOAT, nargs=3, default=(1, 1, 1),\n help=\n 'Expect 3 positive floats describing the output resolution of the resampling. To avoid resampling on one or more dimension a value of -1 can be fed e.g. --resampling 1.0 1.0 -1 will resample the x and y axis at 1 mm/px and left the z axis untouched.'\n )\n@click.option('--order', type=click.INT, nargs=1, default=3, help=\n 'The order of the spline interpolation used to resample')\ndef main(input_folder, output_folder, bounding_boxes_file, cores,\n resampling, order):\n \"\"\" This command line interface allows to resample NIFTI files within a\n given bounding box contain in BOUNDING_BOXES_FILE. The images are\n resampled with spline interpolation\n of degree --order (default=3) and the segmentation are resampled\n by nearest neighbor interpolation.\n\n INPUT_FOLDER is the path of the folder containing the NIFTI to\n resample.\n OUTPUT_FOLDER is the path of the folder where to store the\n resampled NIFTI files.\n BOUNDING_BOXES_FILE is the path of the .csv file containing the\n bounding boxes of each patient.\n \"\"\"\n logger = logging.getLogger(__name__)\n logger.info('Resampling')\n if not os.path.exists(output_folder):\n os.mkdir(output_folder)\n print('resampling is {}'.format(str(resampling)))\n bb_df = pd.read_csv(bounding_boxes_file)\n bb_df = bb_df.set_index('PatientID')\n files_list = [f for f in glob.glob(input_folder + '/**/*.nii.gz',\n recursive=True)]\n resampler = Resampler(bb_df, output_folder, order, resampling=resampling)\n with Pool(cores) as p:\n p.map(resampler, files_list)\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\n@click.command()\n@click.argument('input_folder', type=click.Path(exists=True), default=path_in)\n@click.argument('output_folder', type=click.Path(), default=path_out)\n@click.argument('bounding_boxes_file', type=click.Path(), default=path_bb)\n@click.option('--cores', type=click.INT, default=12, help=\n 'The number of workers for parallelization.')\n@click.option('--resampling', type=click.FLOAT, nargs=3, default=(1, 1, 1),\n help=\n 'Expect 3 positive floats describing the output resolution of the resampling. To avoid resampling on one or more dimension a value of -1 can be fed e.g. --resampling 1.0 1.0 -1 will resample the x and y axis at 1 mm/px and left the z axis untouched.'\n )\n@click.option('--order', type=click.INT, nargs=1, default=3, help=\n 'The order of the spline interpolation used to resample')\ndef main(input_folder, output_folder, bounding_boxes_file, cores,\n resampling, order):\n \"\"\" This command line interface allows to resample NIFTI files within a\n given bounding box contain in BOUNDING_BOXES_FILE. The images are\n resampled with spline interpolation\n of degree --order (default=3) and the segmentation are resampled\n by nearest neighbor interpolation.\n\n INPUT_FOLDER is the path of the folder containing the NIFTI to\n resample.\n OUTPUT_FOLDER is the path of the folder where to store the\n resampled NIFTI files.\n BOUNDING_BOXES_FILE is the path of the .csv file containing the\n bounding boxes of each patient.\n \"\"\"\n logger = logging.getLogger(__name__)\n logger.info('Resampling')\n if not os.path.exists(output_folder):\n os.mkdir(output_folder)\n print('resampling is {}'.format(str(resampling)))\n bb_df = pd.read_csv(bounding_boxes_file)\n bb_df = bb_df.set_index('PatientID')\n files_list = [f for f in glob.glob(input_folder + '/**/*.nii.gz',\n recursive=True)]\n resampler = Resampler(bb_df, output_folder, order, resampling=resampling)\n with Pool(cores) as p:\n p.map(resampler, files_list)\n\n\nif __name__ == '__main__':\n log_fmt = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'\n logging.basicConfig(level=logging.INFO, format=log_fmt)\n logging.captureWarnings(True)\n main()\n", "step-3": "<mask token>\npath_in = 'data/hecktor_nii/'\npath_out = 'data/resampled/'\npath_bb = 'data/bbox.csv'\n\n\n@click.command()\n@click.argument('input_folder', type=click.Path(exists=True), default=path_in)\n@click.argument('output_folder', type=click.Path(), default=path_out)\n@click.argument('bounding_boxes_file', type=click.Path(), default=path_bb)\n@click.option('--cores', type=click.INT, default=12, help=\n 'The number of workers for parallelization.')\n@click.option('--resampling', type=click.FLOAT, nargs=3, default=(1, 1, 1),\n help=\n 'Expect 3 positive floats describing the output resolution of the resampling. To avoid resampling on one or more dimension a value of -1 can be fed e.g. --resampling 1.0 1.0 -1 will resample the x and y axis at 1 mm/px and left the z axis untouched.'\n )\n@click.option('--order', type=click.INT, nargs=1, default=3, help=\n 'The order of the spline interpolation used to resample')\ndef main(input_folder, output_folder, bounding_boxes_file, cores,\n resampling, order):\n \"\"\" This command line interface allows to resample NIFTI files within a\n given bounding box contain in BOUNDING_BOXES_FILE. The images are\n resampled with spline interpolation\n of degree --order (default=3) and the segmentation are resampled\n by nearest neighbor interpolation.\n\n INPUT_FOLDER is the path of the folder containing the NIFTI to\n resample.\n OUTPUT_FOLDER is the path of the folder where to store the\n resampled NIFTI files.\n BOUNDING_BOXES_FILE is the path of the .csv file containing the\n bounding boxes of each patient.\n \"\"\"\n logger = logging.getLogger(__name__)\n logger.info('Resampling')\n if not os.path.exists(output_folder):\n os.mkdir(output_folder)\n print('resampling is {}'.format(str(resampling)))\n bb_df = pd.read_csv(bounding_boxes_file)\n bb_df = bb_df.set_index('PatientID')\n files_list = [f for f in glob.glob(input_folder + '/**/*.nii.gz',\n recursive=True)]\n resampler = Resampler(bb_df, output_folder, order, resampling=resampling)\n with Pool(cores) as p:\n p.map(resampler, files_list)\n\n\nif __name__ == '__main__':\n log_fmt = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'\n logging.basicConfig(level=logging.INFO, format=log_fmt)\n logging.captureWarnings(True)\n main()\n", "step-4": "import os\nfrom multiprocessing import Pool\nimport glob\nimport click\nimport logging\nimport pandas as pd\nfrom src.resampling.resampling import Resampler\npath_in = 'data/hecktor_nii/'\npath_out = 'data/resampled/'\npath_bb = 'data/bbox.csv'\n\n\n@click.command()\n@click.argument('input_folder', type=click.Path(exists=True), default=path_in)\n@click.argument('output_folder', type=click.Path(), default=path_out)\n@click.argument('bounding_boxes_file', type=click.Path(), default=path_bb)\n@click.option('--cores', type=click.INT, default=12, help=\n 'The number of workers for parallelization.')\n@click.option('--resampling', type=click.FLOAT, nargs=3, default=(1, 1, 1),\n help=\n 'Expect 3 positive floats describing the output resolution of the resampling. To avoid resampling on one or more dimension a value of -1 can be fed e.g. --resampling 1.0 1.0 -1 will resample the x and y axis at 1 mm/px and left the z axis untouched.'\n )\n@click.option('--order', type=click.INT, nargs=1, default=3, help=\n 'The order of the spline interpolation used to resample')\ndef main(input_folder, output_folder, bounding_boxes_file, cores,\n resampling, order):\n \"\"\" This command line interface allows to resample NIFTI files within a\n given bounding box contain in BOUNDING_BOXES_FILE. The images are\n resampled with spline interpolation\n of degree --order (default=3) and the segmentation are resampled\n by nearest neighbor interpolation.\n\n INPUT_FOLDER is the path of the folder containing the NIFTI to\n resample.\n OUTPUT_FOLDER is the path of the folder where to store the\n resampled NIFTI files.\n BOUNDING_BOXES_FILE is the path of the .csv file containing the\n bounding boxes of each patient.\n \"\"\"\n logger = logging.getLogger(__name__)\n logger.info('Resampling')\n if not os.path.exists(output_folder):\n os.mkdir(output_folder)\n print('resampling is {}'.format(str(resampling)))\n bb_df = pd.read_csv(bounding_boxes_file)\n bb_df = bb_df.set_index('PatientID')\n files_list = [f for f in glob.glob(input_folder + '/**/*.nii.gz',\n recursive=True)]\n resampler = Resampler(bb_df, output_folder, order, resampling=resampling)\n with Pool(cores) as p:\n p.map(resampler, files_list)\n\n\nif __name__ == '__main__':\n log_fmt = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'\n logging.basicConfig(level=logging.INFO, format=log_fmt)\n logging.captureWarnings(True)\n main()\n", "step-5": "import os\nfrom multiprocessing import Pool\nimport glob\n\nimport click\nimport logging\nimport pandas as pd\n\nfrom src.resampling.resampling import Resampler\n\n# Default paths\npath_in = 'data/hecktor_nii/'\npath_out = 'data/resampled/'\npath_bb = 'data/bbox.csv'\n\n\n@click.command()\n@click.argument('input_folder', type=click.Path(exists=True), default=path_in)\n@click.argument('output_folder', type=click.Path(), default=path_out)\n@click.argument('bounding_boxes_file', type=click.Path(), default=path_bb)\n@click.option('--cores',\n type=click.INT,\n default=12,\n help='The number of workers for parallelization.')\n@click.option('--resampling',\n type=click.FLOAT,\n nargs=3,\n default=(1, 1, 1),\n help='Expect 3 positive floats describing the output '\n 'resolution of the resampling. To avoid resampling '\n 'on one or more dimension a value of -1 can be fed '\n 'e.g. --resampling 1.0 1.0 -1 will resample the x '\n 'and y axis at 1 mm/px and left the z axis untouched.')\n@click.option('--order',\n type=click.INT,\n nargs=1,\n default=3,\n help='The order of the spline interpolation used to resample')\ndef main(input_folder, output_folder, bounding_boxes_file, cores, resampling,\n order):\n \"\"\" This command line interface allows to resample NIFTI files within a\n given bounding box contain in BOUNDING_BOXES_FILE. The images are\n resampled with spline interpolation\n of degree --order (default=3) and the segmentation are resampled\n by nearest neighbor interpolation.\n\n INPUT_FOLDER is the path of the folder containing the NIFTI to\n resample.\n OUTPUT_FOLDER is the path of the folder where to store the\n resampled NIFTI files.\n BOUNDING_BOXES_FILE is the path of the .csv file containing the\n bounding boxes of each patient.\n \"\"\"\n logger = logging.getLogger(__name__)\n logger.info('Resampling')\n\n if not os.path.exists(output_folder):\n os.mkdir(output_folder)\n print('resampling is {}'.format(str(resampling)))\n bb_df = pd.read_csv(bounding_boxes_file)\n bb_df = bb_df.set_index('PatientID')\n files_list = [\n f for f in glob.glob(input_folder + '/**/*.nii.gz', recursive=True)\n ]\n resampler = Resampler(bb_df, output_folder, order, resampling=resampling)\n with Pool(cores) as p:\n p.map(resampler, files_list)\n\n\nif __name__ == '__main__':\n log_fmt = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'\n logging.basicConfig(level=logging.INFO, format=log_fmt)\n logging.captureWarnings(True)\n\n main()\n", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> class Solution(object): <|reserved_special_token_0|> <|reserved_special_token_1|> class Solution(object): def maxDistToClosest(self, seats): """ :type seats: List[int] :rtype: int """ start = 0 end = 0 length = len(seats) max_distance = 0 for i in range(len(seats)): seat = seats[i] if seat == 1: if start == 0 or end == length - 1: max_distance = max(max_distance, end - start + 1) else: max_distance = max(max_distance, (end - start + 1) / 2 + (end - start + 1) % 2) if i + 1 < length: start = end = i + 1 else: end = i if start == 0 or end == length - 1: max_distance = max(max_distance, end - start + 1) else: max_distance = max(max_distance, (end - start + 1) / 2 + (end - start + 1) % 2) return max_distance
flexible
{ "blob_id": "2b8b502381e35ef8e56bc150114a8a4831782c5a", "index": 3819, "step-1": "<mask token>\n", "step-2": "class Solution(object):\n <mask token>\n", "step-3": "class Solution(object):\n\n def maxDistToClosest(self, seats):\n \"\"\"\n :type seats: List[int]\n :rtype: int\n \"\"\"\n start = 0\n end = 0\n length = len(seats)\n max_distance = 0\n for i in range(len(seats)):\n seat = seats[i]\n if seat == 1:\n if start == 0 or end == length - 1:\n max_distance = max(max_distance, end - start + 1)\n else:\n max_distance = max(max_distance, (end - start + 1) / 2 +\n (end - start + 1) % 2)\n if i + 1 < length:\n start = end = i + 1\n else:\n end = i\n if start == 0 or end == length - 1:\n max_distance = max(max_distance, end - start + 1)\n else:\n max_distance = max(max_distance, (end - start + 1) / 2 + (end -\n start + 1) % 2)\n return max_distance\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
from django.urls import path from . import views urlpatterns = [ path('', views.home, name='VitaminSHE-home'), path('signup/', views.signup, name='VitaminSHE-signup'), path('login/', views.login, name='VitaminSHE-login'), path('healthcheck/', views.healthcheck, name='VitaminSHE-healthcheck'), path('food/', views.food, name='VitaminSHE-food'), path('book/', views.book, name='VitaminSHE-book'), path('why/', views.why, name='VitaminSHE-why'), ]
normal
{ "blob_id": "33aa5c5ab75a26705875b55baf61f7f996cb69cd", "index": 1280, "step-1": "<mask token>\n", "step-2": "<mask token>\nurlpatterns = [path('', views.home, name='VitaminSHE-home'), path('signup/',\n views.signup, name='VitaminSHE-signup'), path('login/', views.login,\n name='VitaminSHE-login'), path('healthcheck/', views.healthcheck, name=\n 'VitaminSHE-healthcheck'), path('food/', views.food, name=\n 'VitaminSHE-food'), path('book/', views.book, name='VitaminSHE-book'),\n path('why/', views.why, name='VitaminSHE-why')]\n", "step-3": "from django.urls import path\nfrom . import views\nurlpatterns = [path('', views.home, name='VitaminSHE-home'), path('signup/',\n views.signup, name='VitaminSHE-signup'), path('login/', views.login,\n name='VitaminSHE-login'), path('healthcheck/', views.healthcheck, name=\n 'VitaminSHE-healthcheck'), path('food/', views.food, name=\n 'VitaminSHE-food'), path('book/', views.book, name='VitaminSHE-book'),\n path('why/', views.why, name='VitaminSHE-why')]\n", "step-4": "from django.urls import path\nfrom . import views\nurlpatterns = [\n path('', views.home, name='VitaminSHE-home'),\n path('signup/', views.signup, name='VitaminSHE-signup'),\n path('login/', views.login, name='VitaminSHE-login'),\n path('healthcheck/', views.healthcheck, name='VitaminSHE-healthcheck'),\n path('food/', views.food, name='VitaminSHE-food'),\n path('book/', views.book, name='VitaminSHE-book'),\n path('why/', views.why, name='VitaminSHE-why'),\n]\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
import sys import time def initialize(x: object) -> object: # Create initialization data and take a lot of time data = [] starttimeinmillis = int(round(time.time())) c =0 file1 = sys.argv[x] with open(file1) as datafile: for line in datafile: c+=1 if(c%100==0): print(".",sep='', end='',flush=True) data.append([int(l) for l in line.split()]) rows = len(data) cols = len(data[0]) # print(data) #print("rows=", rows, " cols=", cols) print("time took:",int(round(time.time()))-starttimeinmillis,"seconds") return data
normal
{ "blob_id": "91f3aae4e74f371cadaf10385510bc1c80063f55", "index": 7765, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef initialize(x: object) ->object:\n data = []\n starttimeinmillis = int(round(time.time()))\n c = 0\n file1 = sys.argv[x]\n with open(file1) as datafile:\n for line in datafile:\n c += 1\n if c % 100 == 0:\n print('.', sep='', end='', flush=True)\n data.append([int(l) for l in line.split()])\n rows = len(data)\n cols = len(data[0])\n print('time took:', int(round(time.time())) - starttimeinmillis, 'seconds')\n return data\n", "step-3": "import sys\nimport time\n\n\ndef initialize(x: object) ->object:\n data = []\n starttimeinmillis = int(round(time.time()))\n c = 0\n file1 = sys.argv[x]\n with open(file1) as datafile:\n for line in datafile:\n c += 1\n if c % 100 == 0:\n print('.', sep='', end='', flush=True)\n data.append([int(l) for l in line.split()])\n rows = len(data)\n cols = len(data[0])\n print('time took:', int(round(time.time())) - starttimeinmillis, 'seconds')\n return data\n", "step-4": "import sys\nimport time\ndef initialize(x: object) -> object:\n # Create initialization data and take a lot of time\n\n data = []\n starttimeinmillis = int(round(time.time()))\n\n c =0\n file1 = sys.argv[x]\n with open(file1) as datafile:\n for line in datafile:\n c+=1\n if(c%100==0):\n print(\".\",sep='', end='',flush=True)\n data.append([int(l) for l in line.split()])\n\n rows = len(data)\n cols = len(data[0])\n # print(data)\n\n #print(\"rows=\", rows, \" cols=\", cols)\n print(\"time took:\",int(round(time.time()))-starttimeinmillis,\"seconds\")\n return data\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
<|reserved_special_token_0|> def test_emojize_win32(mocker): mocker.patch('sys.platform', 'win32') assert reqwire.helpers.cli.emojize(':thumbs_up_sign: foo').encode('utf-8' ) == b'foo' def test_emojize_linux(mocker): mocker.patch('sys.platform', 'linux') mocker.patch('io.open', mocker.mock_open(read_data= 'Linux version 4.4.0-31-generic (gcc version 5.3.1)')) assert reqwire.helpers.cli.emojize(':thumbs_up_sign:').encode('utf-8' ) == b'\xf0\x9f\x91\x8d' def test_emojize_linux_ioerror(mocker): mocker.patch('sys.platform', 'linux') mocker.patch('io.open', side_effect=IOError) assert reqwire.helpers.cli.emojize(':thumbs_up_sign:').encode('utf-8' ) == b'\xf0\x9f\x91\x8d' def test_emojize_wsl(mocker): mocker.patch('sys.platform', 'linux') mocker.patch('io.open', mocker.mock_open(read_data= 'Linux version 3.4.0-Microsoft (Microsoft@Microsoft.com)')) assert reqwire.helpers.cli.emojize(':thumbs_up_sign: foo').encode('utf-8' ) == b'foo' def test_console_writer_quiet(mocker): click_echo = mocker.patch('click.echo') console = reqwire.helpers.cli.ConsoleWriter(verbose=False) for method in log_methods: getattr(console, method)('test') click_echo.assert_not_called() def test_console_writer_verbose(mocker): mocker.patch('sys.platform', 'linux') mocker.patch('io.open', mocker.mock_open(read_data= 'Linux version 4.4.0-31-generic (gcc version 5.3.1)')) click_echo = mocker.patch('click.echo') console = reqwire.helpers.cli.ConsoleWriter(verbose=True) for method in log_methods: getattr(console, method)('test') fmt = console.format_strings.get(method, '{msg}') message = reqwire.helpers.cli.emojize(fmt.format(msg='test')) click_echo.assert_called_once_with(message) click_echo.reset_mock() <|reserved_special_token_0|> def test_main_remove(cli_runner): from reqwire.cli import main result = cli_runner.invoke(main, ['remove', 'Flask']) assert result.exit_code == 0, result.output <|reserved_special_token_1|> <|reserved_special_token_0|> def test_emojize_win32(mocker): mocker.patch('sys.platform', 'win32') assert reqwire.helpers.cli.emojize(':thumbs_up_sign: foo').encode('utf-8' ) == b'foo' def test_emojize_linux(mocker): mocker.patch('sys.platform', 'linux') mocker.patch('io.open', mocker.mock_open(read_data= 'Linux version 4.4.0-31-generic (gcc version 5.3.1)')) assert reqwire.helpers.cli.emojize(':thumbs_up_sign:').encode('utf-8' ) == b'\xf0\x9f\x91\x8d' def test_emojize_linux_ioerror(mocker): mocker.patch('sys.platform', 'linux') mocker.patch('io.open', side_effect=IOError) assert reqwire.helpers.cli.emojize(':thumbs_up_sign:').encode('utf-8' ) == b'\xf0\x9f\x91\x8d' def test_emojize_wsl(mocker): mocker.patch('sys.platform', 'linux') mocker.patch('io.open', mocker.mock_open(read_data= 'Linux version 3.4.0-Microsoft (Microsoft@Microsoft.com)')) assert reqwire.helpers.cli.emojize(':thumbs_up_sign: foo').encode('utf-8' ) == b'foo' def test_console_writer_quiet(mocker): click_echo = mocker.patch('click.echo') console = reqwire.helpers.cli.ConsoleWriter(verbose=False) for method in log_methods: getattr(console, method)('test') click_echo.assert_not_called() def test_console_writer_verbose(mocker): mocker.patch('sys.platform', 'linux') mocker.patch('io.open', mocker.mock_open(read_data= 'Linux version 4.4.0-31-generic (gcc version 5.3.1)')) click_echo = mocker.patch('click.echo') console = reqwire.helpers.cli.ConsoleWriter(verbose=True) for method in log_methods: getattr(console, method)('test') fmt = console.format_strings.get(method, '{msg}') message = reqwire.helpers.cli.emojize(fmt.format(msg='test')) click_echo.assert_called_once_with(message) click_echo.reset_mock() def test_build_with_pip_compile_options(cli_runner, mocker): from reqwire.cli import main pip_compile = mocker.patch.object(sh, 'pip_compile') result = cli_runner.invoke(main, ['build', '-t', 'main', '--', '--no-header']) assert result.exit_code == 0, result.output assert pip_compile.call_args[0][2] == '--no-header' def test_main_remove(cli_runner): from reqwire.cli import main result = cli_runner.invoke(main, ['remove', 'Flask']) assert result.exit_code == 0, result.output <|reserved_special_token_1|> <|reserved_special_token_0|> log_methods = 'echo', 'error', 'fatal', 'info', 'warn', 'warning' def test_emojize_win32(mocker): mocker.patch('sys.platform', 'win32') assert reqwire.helpers.cli.emojize(':thumbs_up_sign: foo').encode('utf-8' ) == b'foo' def test_emojize_linux(mocker): mocker.patch('sys.platform', 'linux') mocker.patch('io.open', mocker.mock_open(read_data= 'Linux version 4.4.0-31-generic (gcc version 5.3.1)')) assert reqwire.helpers.cli.emojize(':thumbs_up_sign:').encode('utf-8' ) == b'\xf0\x9f\x91\x8d' def test_emojize_linux_ioerror(mocker): mocker.patch('sys.platform', 'linux') mocker.patch('io.open', side_effect=IOError) assert reqwire.helpers.cli.emojize(':thumbs_up_sign:').encode('utf-8' ) == b'\xf0\x9f\x91\x8d' def test_emojize_wsl(mocker): mocker.patch('sys.platform', 'linux') mocker.patch('io.open', mocker.mock_open(read_data= 'Linux version 3.4.0-Microsoft (Microsoft@Microsoft.com)')) assert reqwire.helpers.cli.emojize(':thumbs_up_sign: foo').encode('utf-8' ) == b'foo' def test_console_writer_quiet(mocker): click_echo = mocker.patch('click.echo') console = reqwire.helpers.cli.ConsoleWriter(verbose=False) for method in log_methods: getattr(console, method)('test') click_echo.assert_not_called() def test_console_writer_verbose(mocker): mocker.patch('sys.platform', 'linux') mocker.patch('io.open', mocker.mock_open(read_data= 'Linux version 4.4.0-31-generic (gcc version 5.3.1)')) click_echo = mocker.patch('click.echo') console = reqwire.helpers.cli.ConsoleWriter(verbose=True) for method in log_methods: getattr(console, method)('test') fmt = console.format_strings.get(method, '{msg}') message = reqwire.helpers.cli.emojize(fmt.format(msg='test')) click_echo.assert_called_once_with(message) click_echo.reset_mock() def test_build_with_pip_compile_options(cli_runner, mocker): from reqwire.cli import main pip_compile = mocker.patch.object(sh, 'pip_compile') result = cli_runner.invoke(main, ['build', '-t', 'main', '--', '--no-header']) assert result.exit_code == 0, result.output assert pip_compile.call_args[0][2] == '--no-header' def test_main_remove(cli_runner): from reqwire.cli import main result = cli_runner.invoke(main, ['remove', 'Flask']) assert result.exit_code == 0, result.output <|reserved_special_token_1|> from __future__ import absolute_import import sh import reqwire.helpers.cli log_methods = 'echo', 'error', 'fatal', 'info', 'warn', 'warning' def test_emojize_win32(mocker): mocker.patch('sys.platform', 'win32') assert reqwire.helpers.cli.emojize(':thumbs_up_sign: foo').encode('utf-8' ) == b'foo' def test_emojize_linux(mocker): mocker.patch('sys.platform', 'linux') mocker.patch('io.open', mocker.mock_open(read_data= 'Linux version 4.4.0-31-generic (gcc version 5.3.1)')) assert reqwire.helpers.cli.emojize(':thumbs_up_sign:').encode('utf-8' ) == b'\xf0\x9f\x91\x8d' def test_emojize_linux_ioerror(mocker): mocker.patch('sys.platform', 'linux') mocker.patch('io.open', side_effect=IOError) assert reqwire.helpers.cli.emojize(':thumbs_up_sign:').encode('utf-8' ) == b'\xf0\x9f\x91\x8d' def test_emojize_wsl(mocker): mocker.patch('sys.platform', 'linux') mocker.patch('io.open', mocker.mock_open(read_data= 'Linux version 3.4.0-Microsoft (Microsoft@Microsoft.com)')) assert reqwire.helpers.cli.emojize(':thumbs_up_sign: foo').encode('utf-8' ) == b'foo' def test_console_writer_quiet(mocker): click_echo = mocker.patch('click.echo') console = reqwire.helpers.cli.ConsoleWriter(verbose=False) for method in log_methods: getattr(console, method)('test') click_echo.assert_not_called() def test_console_writer_verbose(mocker): mocker.patch('sys.platform', 'linux') mocker.patch('io.open', mocker.mock_open(read_data= 'Linux version 4.4.0-31-generic (gcc version 5.3.1)')) click_echo = mocker.patch('click.echo') console = reqwire.helpers.cli.ConsoleWriter(verbose=True) for method in log_methods: getattr(console, method)('test') fmt = console.format_strings.get(method, '{msg}') message = reqwire.helpers.cli.emojize(fmt.format(msg='test')) click_echo.assert_called_once_with(message) click_echo.reset_mock() def test_build_with_pip_compile_options(cli_runner, mocker): from reqwire.cli import main pip_compile = mocker.patch.object(sh, 'pip_compile') result = cli_runner.invoke(main, ['build', '-t', 'main', '--', '--no-header']) assert result.exit_code == 0, result.output assert pip_compile.call_args[0][2] == '--no-header' def test_main_remove(cli_runner): from reqwire.cli import main result = cli_runner.invoke(main, ['remove', 'Flask']) assert result.exit_code == 0, result.output <|reserved_special_token_1|> # -*- coding: utf-8 -*- from __future__ import absolute_import import sh import reqwire.helpers.cli log_methods = ( 'echo', 'error', 'fatal', 'info', 'warn', 'warning', ) def test_emojize_win32(mocker): mocker.patch('sys.platform', 'win32') assert reqwire.helpers.cli.emojize( ':thumbs_up_sign: foo').encode('utf-8') == b'foo' def test_emojize_linux(mocker): mocker.patch('sys.platform', 'linux') mocker.patch('io.open', mocker.mock_open( read_data='Linux version 4.4.0-31-generic (gcc version 5.3.1)')) assert reqwire.helpers.cli.emojize( ':thumbs_up_sign:').encode('utf-8') == b'\xf0\x9f\x91\x8d' def test_emojize_linux_ioerror(mocker): mocker.patch('sys.platform', 'linux') mocker.patch('io.open', side_effect=IOError) assert reqwire.helpers.cli.emojize( ':thumbs_up_sign:').encode('utf-8') == b'\xf0\x9f\x91\x8d' def test_emojize_wsl(mocker): mocker.patch('sys.platform', 'linux') mocker.patch('io.open', mocker.mock_open( read_data='Linux version 3.4.0-Microsoft (Microsoft@Microsoft.com)')) assert reqwire.helpers.cli.emojize( ':thumbs_up_sign: foo').encode('utf-8') == b'foo' def test_console_writer_quiet(mocker): click_echo = mocker.patch('click.echo') console = reqwire.helpers.cli.ConsoleWriter(verbose=False) for method in log_methods: getattr(console, method)('test') click_echo.assert_not_called() def test_console_writer_verbose(mocker): mocker.patch('sys.platform', 'linux') mocker.patch('io.open', mocker.mock_open( read_data='Linux version 4.4.0-31-generic (gcc version 5.3.1)')) click_echo = mocker.patch('click.echo') console = reqwire.helpers.cli.ConsoleWriter(verbose=True) for method in log_methods: getattr(console, method)('test') fmt = console.format_strings.get(method, '{msg}') message = reqwire.helpers.cli.emojize(fmt.format(msg='test')) click_echo.assert_called_once_with(message) click_echo.reset_mock() def test_build_with_pip_compile_options(cli_runner, mocker): from reqwire.cli import main pip_compile = mocker.patch.object(sh, 'pip_compile') result = cli_runner.invoke(main, ['build', '-t', 'main', '--', '--no-header']) assert result.exit_code == 0, result.output assert pip_compile.call_args[0][2] == '--no-header' def test_main_remove(cli_runner): from reqwire.cli import main result = cli_runner.invoke(main, ['remove', 'Flask']) assert result.exit_code == 0, result.output
flexible
{ "blob_id": "1a7a2c2cfb2aa94401defd7a7a500f7dd2e7e0aa", "index": 9680, "step-1": "<mask token>\n\n\ndef test_emojize_win32(mocker):\n mocker.patch('sys.platform', 'win32')\n assert reqwire.helpers.cli.emojize(':thumbs_up_sign: foo').encode('utf-8'\n ) == b'foo'\n\n\ndef test_emojize_linux(mocker):\n mocker.patch('sys.platform', 'linux')\n mocker.patch('io.open', mocker.mock_open(read_data=\n 'Linux version 4.4.0-31-generic (gcc version 5.3.1)'))\n assert reqwire.helpers.cli.emojize(':thumbs_up_sign:').encode('utf-8'\n ) == b'\\xf0\\x9f\\x91\\x8d'\n\n\ndef test_emojize_linux_ioerror(mocker):\n mocker.patch('sys.platform', 'linux')\n mocker.patch('io.open', side_effect=IOError)\n assert reqwire.helpers.cli.emojize(':thumbs_up_sign:').encode('utf-8'\n ) == b'\\xf0\\x9f\\x91\\x8d'\n\n\ndef test_emojize_wsl(mocker):\n mocker.patch('sys.platform', 'linux')\n mocker.patch('io.open', mocker.mock_open(read_data=\n 'Linux version 3.4.0-Microsoft (Microsoft@Microsoft.com)'))\n assert reqwire.helpers.cli.emojize(':thumbs_up_sign: foo').encode('utf-8'\n ) == b'foo'\n\n\ndef test_console_writer_quiet(mocker):\n click_echo = mocker.patch('click.echo')\n console = reqwire.helpers.cli.ConsoleWriter(verbose=False)\n for method in log_methods:\n getattr(console, method)('test')\n click_echo.assert_not_called()\n\n\ndef test_console_writer_verbose(mocker):\n mocker.patch('sys.platform', 'linux')\n mocker.patch('io.open', mocker.mock_open(read_data=\n 'Linux version 4.4.0-31-generic (gcc version 5.3.1)'))\n click_echo = mocker.patch('click.echo')\n console = reqwire.helpers.cli.ConsoleWriter(verbose=True)\n for method in log_methods:\n getattr(console, method)('test')\n fmt = console.format_strings.get(method, '{msg}')\n message = reqwire.helpers.cli.emojize(fmt.format(msg='test'))\n click_echo.assert_called_once_with(message)\n click_echo.reset_mock()\n\n\n<mask token>\n\n\ndef test_main_remove(cli_runner):\n from reqwire.cli import main\n result = cli_runner.invoke(main, ['remove', 'Flask'])\n assert result.exit_code == 0, result.output\n", "step-2": "<mask token>\n\n\ndef test_emojize_win32(mocker):\n mocker.patch('sys.platform', 'win32')\n assert reqwire.helpers.cli.emojize(':thumbs_up_sign: foo').encode('utf-8'\n ) == b'foo'\n\n\ndef test_emojize_linux(mocker):\n mocker.patch('sys.platform', 'linux')\n mocker.patch('io.open', mocker.mock_open(read_data=\n 'Linux version 4.4.0-31-generic (gcc version 5.3.1)'))\n assert reqwire.helpers.cli.emojize(':thumbs_up_sign:').encode('utf-8'\n ) == b'\\xf0\\x9f\\x91\\x8d'\n\n\ndef test_emojize_linux_ioerror(mocker):\n mocker.patch('sys.platform', 'linux')\n mocker.patch('io.open', side_effect=IOError)\n assert reqwire.helpers.cli.emojize(':thumbs_up_sign:').encode('utf-8'\n ) == b'\\xf0\\x9f\\x91\\x8d'\n\n\ndef test_emojize_wsl(mocker):\n mocker.patch('sys.platform', 'linux')\n mocker.patch('io.open', mocker.mock_open(read_data=\n 'Linux version 3.4.0-Microsoft (Microsoft@Microsoft.com)'))\n assert reqwire.helpers.cli.emojize(':thumbs_up_sign: foo').encode('utf-8'\n ) == b'foo'\n\n\ndef test_console_writer_quiet(mocker):\n click_echo = mocker.patch('click.echo')\n console = reqwire.helpers.cli.ConsoleWriter(verbose=False)\n for method in log_methods:\n getattr(console, method)('test')\n click_echo.assert_not_called()\n\n\ndef test_console_writer_verbose(mocker):\n mocker.patch('sys.platform', 'linux')\n mocker.patch('io.open', mocker.mock_open(read_data=\n 'Linux version 4.4.0-31-generic (gcc version 5.3.1)'))\n click_echo = mocker.patch('click.echo')\n console = reqwire.helpers.cli.ConsoleWriter(verbose=True)\n for method in log_methods:\n getattr(console, method)('test')\n fmt = console.format_strings.get(method, '{msg}')\n message = reqwire.helpers.cli.emojize(fmt.format(msg='test'))\n click_echo.assert_called_once_with(message)\n click_echo.reset_mock()\n\n\ndef test_build_with_pip_compile_options(cli_runner, mocker):\n from reqwire.cli import main\n pip_compile = mocker.patch.object(sh, 'pip_compile')\n result = cli_runner.invoke(main, ['build', '-t', 'main', '--',\n '--no-header'])\n assert result.exit_code == 0, result.output\n assert pip_compile.call_args[0][2] == '--no-header'\n\n\ndef test_main_remove(cli_runner):\n from reqwire.cli import main\n result = cli_runner.invoke(main, ['remove', 'Flask'])\n assert result.exit_code == 0, result.output\n", "step-3": "<mask token>\nlog_methods = 'echo', 'error', 'fatal', 'info', 'warn', 'warning'\n\n\ndef test_emojize_win32(mocker):\n mocker.patch('sys.platform', 'win32')\n assert reqwire.helpers.cli.emojize(':thumbs_up_sign: foo').encode('utf-8'\n ) == b'foo'\n\n\ndef test_emojize_linux(mocker):\n mocker.patch('sys.platform', 'linux')\n mocker.patch('io.open', mocker.mock_open(read_data=\n 'Linux version 4.4.0-31-generic (gcc version 5.3.1)'))\n assert reqwire.helpers.cli.emojize(':thumbs_up_sign:').encode('utf-8'\n ) == b'\\xf0\\x9f\\x91\\x8d'\n\n\ndef test_emojize_linux_ioerror(mocker):\n mocker.patch('sys.platform', 'linux')\n mocker.patch('io.open', side_effect=IOError)\n assert reqwire.helpers.cli.emojize(':thumbs_up_sign:').encode('utf-8'\n ) == b'\\xf0\\x9f\\x91\\x8d'\n\n\ndef test_emojize_wsl(mocker):\n mocker.patch('sys.platform', 'linux')\n mocker.patch('io.open', mocker.mock_open(read_data=\n 'Linux version 3.4.0-Microsoft (Microsoft@Microsoft.com)'))\n assert reqwire.helpers.cli.emojize(':thumbs_up_sign: foo').encode('utf-8'\n ) == b'foo'\n\n\ndef test_console_writer_quiet(mocker):\n click_echo = mocker.patch('click.echo')\n console = reqwire.helpers.cli.ConsoleWriter(verbose=False)\n for method in log_methods:\n getattr(console, method)('test')\n click_echo.assert_not_called()\n\n\ndef test_console_writer_verbose(mocker):\n mocker.patch('sys.platform', 'linux')\n mocker.patch('io.open', mocker.mock_open(read_data=\n 'Linux version 4.4.0-31-generic (gcc version 5.3.1)'))\n click_echo = mocker.patch('click.echo')\n console = reqwire.helpers.cli.ConsoleWriter(verbose=True)\n for method in log_methods:\n getattr(console, method)('test')\n fmt = console.format_strings.get(method, '{msg}')\n message = reqwire.helpers.cli.emojize(fmt.format(msg='test'))\n click_echo.assert_called_once_with(message)\n click_echo.reset_mock()\n\n\ndef test_build_with_pip_compile_options(cli_runner, mocker):\n from reqwire.cli import main\n pip_compile = mocker.patch.object(sh, 'pip_compile')\n result = cli_runner.invoke(main, ['build', '-t', 'main', '--',\n '--no-header'])\n assert result.exit_code == 0, result.output\n assert pip_compile.call_args[0][2] == '--no-header'\n\n\ndef test_main_remove(cli_runner):\n from reqwire.cli import main\n result = cli_runner.invoke(main, ['remove', 'Flask'])\n assert result.exit_code == 0, result.output\n", "step-4": "from __future__ import absolute_import\nimport sh\nimport reqwire.helpers.cli\nlog_methods = 'echo', 'error', 'fatal', 'info', 'warn', 'warning'\n\n\ndef test_emojize_win32(mocker):\n mocker.patch('sys.platform', 'win32')\n assert reqwire.helpers.cli.emojize(':thumbs_up_sign: foo').encode('utf-8'\n ) == b'foo'\n\n\ndef test_emojize_linux(mocker):\n mocker.patch('sys.platform', 'linux')\n mocker.patch('io.open', mocker.mock_open(read_data=\n 'Linux version 4.4.0-31-generic (gcc version 5.3.1)'))\n assert reqwire.helpers.cli.emojize(':thumbs_up_sign:').encode('utf-8'\n ) == b'\\xf0\\x9f\\x91\\x8d'\n\n\ndef test_emojize_linux_ioerror(mocker):\n mocker.patch('sys.platform', 'linux')\n mocker.patch('io.open', side_effect=IOError)\n assert reqwire.helpers.cli.emojize(':thumbs_up_sign:').encode('utf-8'\n ) == b'\\xf0\\x9f\\x91\\x8d'\n\n\ndef test_emojize_wsl(mocker):\n mocker.patch('sys.platform', 'linux')\n mocker.patch('io.open', mocker.mock_open(read_data=\n 'Linux version 3.4.0-Microsoft (Microsoft@Microsoft.com)'))\n assert reqwire.helpers.cli.emojize(':thumbs_up_sign: foo').encode('utf-8'\n ) == b'foo'\n\n\ndef test_console_writer_quiet(mocker):\n click_echo = mocker.patch('click.echo')\n console = reqwire.helpers.cli.ConsoleWriter(verbose=False)\n for method in log_methods:\n getattr(console, method)('test')\n click_echo.assert_not_called()\n\n\ndef test_console_writer_verbose(mocker):\n mocker.patch('sys.platform', 'linux')\n mocker.patch('io.open', mocker.mock_open(read_data=\n 'Linux version 4.4.0-31-generic (gcc version 5.3.1)'))\n click_echo = mocker.patch('click.echo')\n console = reqwire.helpers.cli.ConsoleWriter(verbose=True)\n for method in log_methods:\n getattr(console, method)('test')\n fmt = console.format_strings.get(method, '{msg}')\n message = reqwire.helpers.cli.emojize(fmt.format(msg='test'))\n click_echo.assert_called_once_with(message)\n click_echo.reset_mock()\n\n\ndef test_build_with_pip_compile_options(cli_runner, mocker):\n from reqwire.cli import main\n pip_compile = mocker.patch.object(sh, 'pip_compile')\n result = cli_runner.invoke(main, ['build', '-t', 'main', '--',\n '--no-header'])\n assert result.exit_code == 0, result.output\n assert pip_compile.call_args[0][2] == '--no-header'\n\n\ndef test_main_remove(cli_runner):\n from reqwire.cli import main\n result = cli_runner.invoke(main, ['remove', 'Flask'])\n assert result.exit_code == 0, result.output\n", "step-5": "# -*- coding: utf-8 -*-\nfrom __future__ import absolute_import\n\nimport sh\n\nimport reqwire.helpers.cli\n\n\nlog_methods = (\n 'echo',\n 'error',\n 'fatal',\n 'info',\n 'warn',\n 'warning',\n)\n\n\ndef test_emojize_win32(mocker):\n mocker.patch('sys.platform', 'win32')\n assert reqwire.helpers.cli.emojize(\n ':thumbs_up_sign: foo').encode('utf-8') == b'foo'\n\n\ndef test_emojize_linux(mocker):\n mocker.patch('sys.platform', 'linux')\n mocker.patch('io.open', mocker.mock_open(\n read_data='Linux version 4.4.0-31-generic (gcc version 5.3.1)'))\n assert reqwire.helpers.cli.emojize(\n ':thumbs_up_sign:').encode('utf-8') == b'\\xf0\\x9f\\x91\\x8d'\n\n\ndef test_emojize_linux_ioerror(mocker):\n mocker.patch('sys.platform', 'linux')\n mocker.patch('io.open', side_effect=IOError)\n assert reqwire.helpers.cli.emojize(\n ':thumbs_up_sign:').encode('utf-8') == b'\\xf0\\x9f\\x91\\x8d'\n\n\ndef test_emojize_wsl(mocker):\n mocker.patch('sys.platform', 'linux')\n mocker.patch('io.open', mocker.mock_open(\n read_data='Linux version 3.4.0-Microsoft (Microsoft@Microsoft.com)'))\n assert reqwire.helpers.cli.emojize(\n ':thumbs_up_sign: foo').encode('utf-8') == b'foo'\n\n\ndef test_console_writer_quiet(mocker):\n click_echo = mocker.patch('click.echo')\n console = reqwire.helpers.cli.ConsoleWriter(verbose=False)\n for method in log_methods:\n getattr(console, method)('test')\n click_echo.assert_not_called()\n\n\ndef test_console_writer_verbose(mocker):\n mocker.patch('sys.platform', 'linux')\n mocker.patch('io.open', mocker.mock_open(\n read_data='Linux version 4.4.0-31-generic (gcc version 5.3.1)'))\n click_echo = mocker.patch('click.echo')\n console = reqwire.helpers.cli.ConsoleWriter(verbose=True)\n for method in log_methods:\n getattr(console, method)('test')\n fmt = console.format_strings.get(method, '{msg}')\n message = reqwire.helpers.cli.emojize(fmt.format(msg='test'))\n click_echo.assert_called_once_with(message)\n click_echo.reset_mock()\n\n\ndef test_build_with_pip_compile_options(cli_runner, mocker):\n from reqwire.cli import main\n pip_compile = mocker.patch.object(sh, 'pip_compile')\n result = cli_runner.invoke(main, ['build', '-t', 'main', '--',\n '--no-header'])\n assert result.exit_code == 0, result.output\n assert pip_compile.call_args[0][2] == '--no-header'\n\n\ndef test_main_remove(cli_runner):\n from reqwire.cli import main\n result = cli_runner.invoke(main, ['remove', 'Flask'])\n assert result.exit_code == 0, result.output\n", "step-ids": [ 7, 8, 9, 10, 11 ] }
[ 7, 8, 9, 10, 11 ]
import turtle red = range(4); for i in red: turtle.forward(200) turtle.left(90) turtle.done()
normal
{ "blob_id": "38fceb57977cb792be1a63e8571cd222facdf656", "index": 1142, "step-1": "<mask token>\n", "step-2": "<mask token>\nfor i in red:\n turtle.forward(200)\n turtle.left(90)\nturtle.done()\n", "step-3": "<mask token>\nred = range(4)\nfor i in red:\n turtle.forward(200)\n turtle.left(90)\nturtle.done()\n", "step-4": "import turtle\nred = range(4)\nfor i in red:\n turtle.forward(200)\n turtle.left(90)\nturtle.done()\n", "step-5": "import turtle\n\nred = range(4);\nfor i in red:\n\tturtle.forward(200)\n\tturtle.left(90)\n\nturtle.done()", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> class RestAdminAppConfig(AppConfig): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class RestAdminAppConfig(AppConfig): name = 'libraries.django_rest_admin' verbose_name = 'Rest Admin' loaded = False def ready(self): autodiscover() <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class RestAdminAppConfig(AppConfig): name = 'libraries.django_rest_admin' verbose_name = 'Rest Admin' loaded = False def ready(self): autodiscover() def autodiscover(): """ Automatic discovering of rest_admin.py file inside apps. similar to what Django admin does. """ from .register import rest_admin if not RestAdminAppConfig.loaded: for app in settings.INSTALLED_APPS: try: app_path = importlib.import_module(app).__path__ except AttributeError: continue if not importlib.find_loader('rest_admin', app_path): continue importlib.import_module('%s.rest_admin' % app) RestAdminAppConfig.loaded = True <|reserved_special_token_1|> from django.apps import AppConfig from django.conf import settings import importlib import importlib.util class RestAdminAppConfig(AppConfig): name = 'libraries.django_rest_admin' verbose_name = 'Rest Admin' loaded = False def ready(self): autodiscover() def autodiscover(): """ Automatic discovering of rest_admin.py file inside apps. similar to what Django admin does. """ from .register import rest_admin if not RestAdminAppConfig.loaded: for app in settings.INSTALLED_APPS: try: app_path = importlib.import_module(app).__path__ except AttributeError: continue if not importlib.find_loader('rest_admin', app_path): continue importlib.import_module('%s.rest_admin' % app) RestAdminAppConfig.loaded = True <|reserved_special_token_1|> from django.apps import AppConfig from django.conf import settings import importlib import importlib.util class RestAdminAppConfig(AppConfig): name = 'libraries.django_rest_admin' verbose_name = 'Rest Admin' loaded = False def ready(self): autodiscover() def autodiscover(): """ Automatic discovering of rest_admin.py file inside apps. similar to what Django admin does. """ from .register import rest_admin if not RestAdminAppConfig.loaded: for app in settings.INSTALLED_APPS: # For each app, we need to look for an rest_admin.py inside that app's # package. We can't use os.path here -- recall that modules may be # imported different ways (think zip files) -- so we need to get # the app's __path__ and look for rest_admin.py on that path. # Step 1: find out the app's __path__ Import errors here will (and # should) bubble up, but a missing __path__ (which is legal, but weird) # fails silently -- apps that do weird things with __path__ might # need to roll their own rest_admin registration. try: app_path = importlib.import_module(app).__path__ except AttributeError: continue # Step 2: use imp.find_module to find the app's rest_admin.py. For some # reason imp.find_module raises ImportError if the app can't be found # but doesn't actually try to import the module. So skip this app if # its rest_admin.py doesn't exist # try: # importlib.util.find_spec('rest_admin', app_path) # # imp.find_module('rest_admin', app_path) # except ImportError: # continue # if not importlib.find_loader('rest_admin', app_path): continue # Step 3: import the app's admin file. If this has errors we want them # to bubble up. importlib.import_module("%s.rest_admin" % app) # autodiscover was successful, reset loading flag. RestAdminAppConfig.loaded = True
flexible
{ "blob_id": "a41d00c86d0bdab1bced77c275e56c3569af4f4e", "index": 921, "step-1": "<mask token>\n\n\nclass RestAdminAppConfig(AppConfig):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass RestAdminAppConfig(AppConfig):\n name = 'libraries.django_rest_admin'\n verbose_name = 'Rest Admin'\n loaded = False\n\n def ready(self):\n autodiscover()\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\nclass RestAdminAppConfig(AppConfig):\n name = 'libraries.django_rest_admin'\n verbose_name = 'Rest Admin'\n loaded = False\n\n def ready(self):\n autodiscover()\n\n\ndef autodiscover():\n \"\"\"\n Automatic discovering of rest_admin.py file inside apps.\n similar to what Django admin does. \n \"\"\"\n from .register import rest_admin\n if not RestAdminAppConfig.loaded:\n for app in settings.INSTALLED_APPS:\n try:\n app_path = importlib.import_module(app).__path__\n except AttributeError:\n continue\n if not importlib.find_loader('rest_admin', app_path):\n continue\n importlib.import_module('%s.rest_admin' % app)\n RestAdminAppConfig.loaded = True\n", "step-4": "from django.apps import AppConfig\nfrom django.conf import settings\nimport importlib\nimport importlib.util\n\n\nclass RestAdminAppConfig(AppConfig):\n name = 'libraries.django_rest_admin'\n verbose_name = 'Rest Admin'\n loaded = False\n\n def ready(self):\n autodiscover()\n\n\ndef autodiscover():\n \"\"\"\n Automatic discovering of rest_admin.py file inside apps.\n similar to what Django admin does. \n \"\"\"\n from .register import rest_admin\n if not RestAdminAppConfig.loaded:\n for app in settings.INSTALLED_APPS:\n try:\n app_path = importlib.import_module(app).__path__\n except AttributeError:\n continue\n if not importlib.find_loader('rest_admin', app_path):\n continue\n importlib.import_module('%s.rest_admin' % app)\n RestAdminAppConfig.loaded = True\n", "step-5": "from django.apps import AppConfig\nfrom django.conf import settings\nimport importlib\nimport importlib.util\n\n\nclass RestAdminAppConfig(AppConfig):\n name = 'libraries.django_rest_admin'\n verbose_name = 'Rest Admin'\n loaded = False\n\n def ready(self):\n autodiscover()\n\n\ndef autodiscover():\n \"\"\"\n Automatic discovering of rest_admin.py file inside apps.\n similar to what Django admin does. \n \"\"\"\n from .register import rest_admin\n\n if not RestAdminAppConfig.loaded:\n for app in settings.INSTALLED_APPS:\n # For each app, we need to look for an rest_admin.py inside that app's\n # package. We can't use os.path here -- recall that modules may be\n # imported different ways (think zip files) -- so we need to get\n # the app's __path__ and look for rest_admin.py on that path.\n\n # Step 1: find out the app's __path__ Import errors here will (and\n # should) bubble up, but a missing __path__ (which is legal, but weird)\n # fails silently -- apps that do weird things with __path__ might\n # need to roll their own rest_admin registration.\n try:\n app_path = importlib.import_module(app).__path__\n except AttributeError:\n continue\n\n # Step 2: use imp.find_module to find the app's rest_admin.py. For some\n # reason imp.find_module raises ImportError if the app can't be found\n # but doesn't actually try to import the module. So skip this app if\n # its rest_admin.py doesn't exist\n # try:\n # importlib.util.find_spec('rest_admin', app_path)\n # # imp.find_module('rest_admin', app_path)\n # except ImportError:\n # continue\n #\n if not importlib.find_loader('rest_admin', app_path):\n continue\n\n # Step 3: import the app's admin file. If this has errors we want them\n # to bubble up.\n importlib.import_module(\"%s.rest_admin\" % app)\n\n # autodiscover was successful, reset loading flag.\n RestAdminAppConfig.loaded = True\n", "step-ids": [ 1, 3, 4, 5, 6 ] }
[ 1, 3, 4, 5, 6 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> def countdown(n): def next(): nonlocal n r = n n -= 1 return r return next <|reserved_special_token_0|> <|reserved_special_token_1|> def countdown(n): def next(): nonlocal n r = n n -= 1 return r return next <|reserved_special_token_0|> while True: v = a() if not v: break <|reserved_special_token_1|> def countdown(n): def next(): nonlocal n r = n n -= 1 return r return next a = countdown(12) while True: v = a() if not v: break
flexible
{ "blob_id": "01eef391f6d37d1e74cb032c5b27e1d8fc4395da", "index": 6122, "step-1": "<mask token>\n", "step-2": "def countdown(n):\n\n def next():\n nonlocal n\n r = n\n n -= 1\n return r\n return next\n\n\n<mask token>\n", "step-3": "def countdown(n):\n\n def next():\n nonlocal n\n r = n\n n -= 1\n return r\n return next\n\n\n<mask token>\nwhile True:\n v = a()\n if not v:\n break\n", "step-4": "def countdown(n):\n\n def next():\n nonlocal n\n r = n\n n -= 1\n return r\n return next\n\n\na = countdown(12)\nwhile True:\n v = a()\n if not v:\n break\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def enable_download(driver, directory): """ :param driver: Selenium web driver :param directory: Directory to store the file This function allows the Selenium web driver to store the file in the given directory. """ driver.command_executor._commands['send_command' ] = 'POST', '/session/$sessionId/chromium/send_command' params = {'cmd': 'Page.setDownloadBehavior', 'params': {'behavior': 'allow', 'downloadPath': directory}} driver.execute('send_command', params) <|reserved_special_token_1|> <|reserved_special_token_0|> YEAR = dt.today().year BINARY_LOCATION = {'binary_location': 'C:/Program Files (x86)/Google/Chrome/Application/chrome.exe'} CHROME_DRIVER_PATH = ( 'C:\\Users\\pavithra\\Downloads\\chromedriver_win32\\chromedriver.exe') EXTRACTED_DIR = ( 'C:\\Users\\pavithra\\Documents\\fintuple-automation-projects\\BseBhavCopy\\dailybhavcopy\\dailybhavcopy\\csv_files' ) ZIP_DIR = ( 'C:\\Users\\pavithra\\Documents\\fintuple-automation-projects\\BseBhavCopy\\dailybhavcopy\\dailybhavcopy\\zip_files' ) HEADLESS_OPTIONS = {'headless': '--headless', 'window_size': '--window-size=1920x1080'} DOWNLOAD_PREFERENCES = {'download.default_directory': EXTRACTED_DIR, 'download.prompt_for_download': False} def enable_download(driver, directory): """ :param driver: Selenium web driver :param directory: Directory to store the file This function allows the Selenium web driver to store the file in the given directory. """ driver.command_executor._commands['send_command' ] = 'POST', '/session/$sessionId/chromium/send_command' params = {'cmd': 'Page.setDownloadBehavior', 'params': {'behavior': 'allow', 'downloadPath': directory}} driver.execute('send_command', params) <|reserved_special_token_1|> from datetime import datetime as dt YEAR = dt.today().year BINARY_LOCATION = {'binary_location': 'C:/Program Files (x86)/Google/Chrome/Application/chrome.exe'} CHROME_DRIVER_PATH = ( 'C:\\Users\\pavithra\\Downloads\\chromedriver_win32\\chromedriver.exe') EXTRACTED_DIR = ( 'C:\\Users\\pavithra\\Documents\\fintuple-automation-projects\\BseBhavCopy\\dailybhavcopy\\dailybhavcopy\\csv_files' ) ZIP_DIR = ( 'C:\\Users\\pavithra\\Documents\\fintuple-automation-projects\\BseBhavCopy\\dailybhavcopy\\dailybhavcopy\\zip_files' ) HEADLESS_OPTIONS = {'headless': '--headless', 'window_size': '--window-size=1920x1080'} DOWNLOAD_PREFERENCES = {'download.default_directory': EXTRACTED_DIR, 'download.prompt_for_download': False} def enable_download(driver, directory): """ :param driver: Selenium web driver :param directory: Directory to store the file This function allows the Selenium web driver to store the file in the given directory. """ driver.command_executor._commands['send_command' ] = 'POST', '/session/$sessionId/chromium/send_command' params = {'cmd': 'Page.setDownloadBehavior', 'params': {'behavior': 'allow', 'downloadPath': directory}} driver.execute('send_command', params) <|reserved_special_token_1|> from datetime import datetime as dt YEAR = dt.today().year BINARY_LOCATION = {'binary_location': 'C:/Program Files (x86)/Google/Chrome/Application/chrome.exe'} CHROME_DRIVER_PATH = r'C:\Users\pavithra\Downloads\chromedriver_win32\chromedriver.exe' EXTRACTED_DIR = r'C:\Users\pavithra\Documents\fintuple-automation-projects\BseBhavCopy\dailybhavcopy\dailybhavcopy' \ r'\csv_files' ZIP_DIR = r'C:\Users\pavithra\Documents\fintuple-automation-projects\BseBhavCopy\dailybhavcopy\dailybhavcopy\zip_files' HEADLESS_OPTIONS = {'headless': '--headless', 'window_size': '--window-size=1920x1080'} DOWNLOAD_PREFERENCES = {'download.default_directory': EXTRACTED_DIR, 'download.prompt_for_download': False} def enable_download(driver, directory): """ :param driver: Selenium web driver :param directory: Directory to store the file This function allows the Selenium web driver to store the file in the given directory. """ driver.command_executor._commands["send_command"] = ("POST", '/session/$sessionId/chromium/send_command') params = {'cmd': 'Page.setDownloadBehavior', 'params': {'behavior': 'allow', 'downloadPath': directory}} driver.execute("send_command", params)
flexible
{ "blob_id": "95422348c8db9753830cc0a7c8785c05b44886b1", "index": 842, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef enable_download(driver, directory):\n \"\"\"\n\n :param driver: Selenium web driver\n :param directory: Directory to store the file\n\n This function allows the Selenium web driver to store the file in the given directory.\n \"\"\"\n driver.command_executor._commands['send_command'\n ] = 'POST', '/session/$sessionId/chromium/send_command'\n params = {'cmd': 'Page.setDownloadBehavior', 'params': {'behavior':\n 'allow', 'downloadPath': directory}}\n driver.execute('send_command', params)\n", "step-3": "<mask token>\nYEAR = dt.today().year\nBINARY_LOCATION = {'binary_location':\n 'C:/Program Files (x86)/Google/Chrome/Application/chrome.exe'}\nCHROME_DRIVER_PATH = (\n 'C:\\\\Users\\\\pavithra\\\\Downloads\\\\chromedriver_win32\\\\chromedriver.exe')\nEXTRACTED_DIR = (\n 'C:\\\\Users\\\\pavithra\\\\Documents\\\\fintuple-automation-projects\\\\BseBhavCopy\\\\dailybhavcopy\\\\dailybhavcopy\\\\csv_files'\n )\nZIP_DIR = (\n 'C:\\\\Users\\\\pavithra\\\\Documents\\\\fintuple-automation-projects\\\\BseBhavCopy\\\\dailybhavcopy\\\\dailybhavcopy\\\\zip_files'\n )\nHEADLESS_OPTIONS = {'headless': '--headless', 'window_size':\n '--window-size=1920x1080'}\nDOWNLOAD_PREFERENCES = {'download.default_directory': EXTRACTED_DIR,\n 'download.prompt_for_download': False}\n\n\ndef enable_download(driver, directory):\n \"\"\"\n\n :param driver: Selenium web driver\n :param directory: Directory to store the file\n\n This function allows the Selenium web driver to store the file in the given directory.\n \"\"\"\n driver.command_executor._commands['send_command'\n ] = 'POST', '/session/$sessionId/chromium/send_command'\n params = {'cmd': 'Page.setDownloadBehavior', 'params': {'behavior':\n 'allow', 'downloadPath': directory}}\n driver.execute('send_command', params)\n", "step-4": "from datetime import datetime as dt\nYEAR = dt.today().year\nBINARY_LOCATION = {'binary_location':\n 'C:/Program Files (x86)/Google/Chrome/Application/chrome.exe'}\nCHROME_DRIVER_PATH = (\n 'C:\\\\Users\\\\pavithra\\\\Downloads\\\\chromedriver_win32\\\\chromedriver.exe')\nEXTRACTED_DIR = (\n 'C:\\\\Users\\\\pavithra\\\\Documents\\\\fintuple-automation-projects\\\\BseBhavCopy\\\\dailybhavcopy\\\\dailybhavcopy\\\\csv_files'\n )\nZIP_DIR = (\n 'C:\\\\Users\\\\pavithra\\\\Documents\\\\fintuple-automation-projects\\\\BseBhavCopy\\\\dailybhavcopy\\\\dailybhavcopy\\\\zip_files'\n )\nHEADLESS_OPTIONS = {'headless': '--headless', 'window_size':\n '--window-size=1920x1080'}\nDOWNLOAD_PREFERENCES = {'download.default_directory': EXTRACTED_DIR,\n 'download.prompt_for_download': False}\n\n\ndef enable_download(driver, directory):\n \"\"\"\n\n :param driver: Selenium web driver\n :param directory: Directory to store the file\n\n This function allows the Selenium web driver to store the file in the given directory.\n \"\"\"\n driver.command_executor._commands['send_command'\n ] = 'POST', '/session/$sessionId/chromium/send_command'\n params = {'cmd': 'Page.setDownloadBehavior', 'params': {'behavior':\n 'allow', 'downloadPath': directory}}\n driver.execute('send_command', params)\n", "step-5": "from datetime import datetime as dt\n\nYEAR = dt.today().year\nBINARY_LOCATION = {'binary_location': 'C:/Program Files (x86)/Google/Chrome/Application/chrome.exe'}\nCHROME_DRIVER_PATH = r'C:\\Users\\pavithra\\Downloads\\chromedriver_win32\\chromedriver.exe'\nEXTRACTED_DIR = r'C:\\Users\\pavithra\\Documents\\fintuple-automation-projects\\BseBhavCopy\\dailybhavcopy\\dailybhavcopy' \\\n r'\\csv_files'\nZIP_DIR = r'C:\\Users\\pavithra\\Documents\\fintuple-automation-projects\\BseBhavCopy\\dailybhavcopy\\dailybhavcopy\\zip_files'\nHEADLESS_OPTIONS = {'headless': '--headless',\n 'window_size': '--window-size=1920x1080'}\nDOWNLOAD_PREFERENCES = {'download.default_directory': EXTRACTED_DIR,\n 'download.prompt_for_download': False}\n\n\ndef enable_download(driver, directory):\n \"\"\"\n\n :param driver: Selenium web driver\n :param directory: Directory to store the file\n\n This function allows the Selenium web driver to store the file in the given directory.\n \"\"\"\n driver.command_executor._commands[\"send_command\"] = (\"POST\", '/session/$sessionId/chromium/send_command')\n params = {'cmd': 'Page.setDownloadBehavior',\n 'params': {'behavior': 'allow',\n 'downloadPath': directory}}\n driver.execute(\"send_command\", params)\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
# B. A New Technique # TLE (Time limit exceeded) from sys import stdin, stdout t = int(input()) for _ in range(t): n, m = map(int, input().split()) rows = [0] * n a_column = list() for r in range(n): tmp = list(input().split()) rows[r] = tmp a_column.append(tmp[0]) sorted_a_column = sorted(a_column) found = False for c in range(m): if not found: tmp_c = list(input().split()) if sorted(tmp_c) == sorted_a_column: found = True output = str() for num in tmp_c: index = a_column.index(num) output += ' '.join(rows[index]) output += '\n' print(output, end='') else: stdin.__next__()
normal
{ "blob_id": "9004314951f77b14bab1aba9ae93eb49c8197a8d", "index": 4409, "step-1": "<mask token>\n", "step-2": "<mask token>\nfor _ in range(t):\n n, m = map(int, input().split())\n rows = [0] * n\n a_column = list()\n for r in range(n):\n tmp = list(input().split())\n rows[r] = tmp\n a_column.append(tmp[0])\n sorted_a_column = sorted(a_column)\n found = False\n for c in range(m):\n if not found:\n tmp_c = list(input().split())\n if sorted(tmp_c) == sorted_a_column:\n found = True\n output = str()\n for num in tmp_c:\n index = a_column.index(num)\n output += ' '.join(rows[index])\n output += '\\n'\n print(output, end='')\n else:\n stdin.__next__()\n", "step-3": "<mask token>\nt = int(input())\nfor _ in range(t):\n n, m = map(int, input().split())\n rows = [0] * n\n a_column = list()\n for r in range(n):\n tmp = list(input().split())\n rows[r] = tmp\n a_column.append(tmp[0])\n sorted_a_column = sorted(a_column)\n found = False\n for c in range(m):\n if not found:\n tmp_c = list(input().split())\n if sorted(tmp_c) == sorted_a_column:\n found = True\n output = str()\n for num in tmp_c:\n index = a_column.index(num)\n output += ' '.join(rows[index])\n output += '\\n'\n print(output, end='')\n else:\n stdin.__next__()\n", "step-4": "from sys import stdin, stdout\nt = int(input())\nfor _ in range(t):\n n, m = map(int, input().split())\n rows = [0] * n\n a_column = list()\n for r in range(n):\n tmp = list(input().split())\n rows[r] = tmp\n a_column.append(tmp[0])\n sorted_a_column = sorted(a_column)\n found = False\n for c in range(m):\n if not found:\n tmp_c = list(input().split())\n if sorted(tmp_c) == sorted_a_column:\n found = True\n output = str()\n for num in tmp_c:\n index = a_column.index(num)\n output += ' '.join(rows[index])\n output += '\\n'\n print(output, end='')\n else:\n stdin.__next__()\n", "step-5": "# B. A New Technique\n# TLE (Time limit exceeded)\n\nfrom sys import stdin, stdout\n\nt = int(input())\nfor _ in range(t):\n n, m = map(int, input().split())\n\n rows = [0] * n\n\n a_column = list()\n\n for r in range(n):\n tmp = list(input().split())\n rows[r] = tmp\n a_column.append(tmp[0])\n sorted_a_column = sorted(a_column)\n\n found = False\n for c in range(m):\n if not found:\n tmp_c = list(input().split())\n if sorted(tmp_c) == sorted_a_column:\n found = True\n output = str()\n for num in tmp_c:\n index = a_column.index(num)\n output += ' '.join(rows[index])\n output += '\\n'\n print(output, end='')\n else:\n stdin.__next__()\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> def merge(self, intervals): intervals.sort() arr = [] for i in intervals: if len(arr) == 0 or arr[-1][1] < i[0]: arr.append(i) else: arr[-1][1] = max(arr[-1][1], i[1]) return arr
flexible
{ "blob_id": "a65dfca1773c1e4101ebfb953e0f617a2c345695", "index": 334, "step-1": "<mask token>\n", "step-2": "def merge(self, intervals):\n intervals.sort()\n arr = []\n for i in intervals:\n if len(arr) == 0 or arr[-1][1] < i[0]:\n arr.append(i)\n else:\n arr[-1][1] = max(arr[-1][1], i[1])\n return arr\n", "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0, 1 ] }
[ 0, 1 ]
<|reserved_special_token_0|> class AuthService: <|reserved_special_token_0|> <|reserved_special_token_0|> def __get_connection(self) ->HTTPConnection: """ Creates a new connection to the authentication server. --- Returns: The connection object. """ return HTTPConnection(self.__host, self.__port) <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class AuthService: <|reserved_special_token_0|> def __init__(self, host: str, port: int): """ Constructor method. Initializes the client. --- Parameters: - host: The authentication service host string. - port: The authentication service port number. """ self.__host: str = host self.__port: int = port def __get_connection(self) ->HTTPConnection: """ Creates a new connection to the authentication server. --- Returns: The connection object. """ return HTTPConnection(self.__host, self.__port) <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class AuthService: """ REST client to connect to the authentication service. """ def __init__(self, host: str, port: int): """ Constructor method. Initializes the client. --- Parameters: - host: The authentication service host string. - port: The authentication service port number. """ self.__host: str = host self.__port: int = port def __get_connection(self) ->HTTPConnection: """ Creates a new connection to the authentication server. --- Returns: The connection object. """ return HTTPConnection(self.__host, self.__port) def has_right(self, username: str, right: str) ->bool: """ Determines whether a given user from the authentication server has a certain right or not. --- Parameters: - username: The user name string. - right: The right name. Returns: True if the user has the given right Throws: - NotFoundError: if the user does not have the right, the user does not exist, or the right does not exist. - HTTPException: On an unhandled 500 error. """ form: str = urlencode({'username': username, 'right': right}) headers: dict = {'Content-type': 'application/x-www-form-urlencoded'} connection: HTTPConnection = self.__get_connection() connection.request('GET', '/users/' + str(username) + '/rights/' + str(right), form, headers) response: HTTPResponse = connection.getresponse() if response.status == 200: return True if response.status == 404: raise NotFoundError() if response.status == 500: raise HTTPException('Server error') return False <|reserved_special_token_1|> <|reserved_special_token_0|> from urllib.parse import urlencode from http.client import HTTPConnection, HTTPResponse, HTTPException from dms2021sensor.data.rest.exc import NotFoundError class AuthService: """ REST client to connect to the authentication service. """ def __init__(self, host: str, port: int): """ Constructor method. Initializes the client. --- Parameters: - host: The authentication service host string. - port: The authentication service port number. """ self.__host: str = host self.__port: int = port def __get_connection(self) ->HTTPConnection: """ Creates a new connection to the authentication server. --- Returns: The connection object. """ return HTTPConnection(self.__host, self.__port) def has_right(self, username: str, right: str) ->bool: """ Determines whether a given user from the authentication server has a certain right or not. --- Parameters: - username: The user name string. - right: The right name. Returns: True if the user has the given right Throws: - NotFoundError: if the user does not have the right, the user does not exist, or the right does not exist. - HTTPException: On an unhandled 500 error. """ form: str = urlencode({'username': username, 'right': right}) headers: dict = {'Content-type': 'application/x-www-form-urlencoded'} connection: HTTPConnection = self.__get_connection() connection.request('GET', '/users/' + str(username) + '/rights/' + str(right), form, headers) response: HTTPResponse = connection.getresponse() if response.status == 200: return True if response.status == 404: raise NotFoundError() if response.status == 500: raise HTTPException('Server error') return False <|reserved_special_token_1|> """ AuthService class module. """ from urllib.parse import urlencode from http.client import HTTPConnection, HTTPResponse, HTTPException from dms2021sensor.data.rest.exc import NotFoundError class AuthService(): """ REST client to connect to the authentication service. """ def __init__(self, host: str, port: int): """ Constructor method. Initializes the client. --- Parameters: - host: The authentication service host string. - port: The authentication service port number. """ self.__host: str = host self.__port: int = port def __get_connection(self) -> HTTPConnection: """ Creates a new connection to the authentication server. --- Returns: The connection object. """ return HTTPConnection(self.__host, self.__port) def has_right(self, username: str, right: str) -> bool: """ Determines whether a given user from the authentication server has a certain right or not. --- Parameters: - username: The user name string. - right: The right name. Returns: True if the user has the given right Throws: - NotFoundError: if the user does not have the right, the user does not exist, or the right does not exist. - HTTPException: On an unhandled 500 error. """ form: str = urlencode({'username': username, 'right': right}) headers: dict = { 'Content-type': 'application/x-www-form-urlencoded' } connection: HTTPConnection = self.__get_connection() connection.request('GET', '/users/'+str(username)+'/rights/'+str(right), form, headers) response: HTTPResponse = connection.getresponse() if response.status == 200: return True if response.status == 404: raise NotFoundError() if response.status == 500: raise HTTPException('Server error') return False
flexible
{ "blob_id": "1438a268780217e647999ba031aa4a50a6912d2f", "index": 3069, "step-1": "<mask token>\n\n\nclass AuthService:\n <mask token>\n <mask token>\n\n def __get_connection(self) ->HTTPConnection:\n \"\"\" Creates a new connection to the authentication server.\n ---\n Returns:\n The connection object.\n \"\"\"\n return HTTPConnection(self.__host, self.__port)\n <mask token>\n", "step-2": "<mask token>\n\n\nclass AuthService:\n <mask token>\n\n def __init__(self, host: str, port: int):\n \"\"\" Constructor method.\n\n Initializes the client.\n ---\n Parameters:\n - host: The authentication service host string.\n - port: The authentication service port number.\n \"\"\"\n self.__host: str = host\n self.__port: int = port\n\n def __get_connection(self) ->HTTPConnection:\n \"\"\" Creates a new connection to the authentication server.\n ---\n Returns:\n The connection object.\n \"\"\"\n return HTTPConnection(self.__host, self.__port)\n <mask token>\n", "step-3": "<mask token>\n\n\nclass AuthService:\n \"\"\" REST client to connect to the authentication service.\n \"\"\"\n\n def __init__(self, host: str, port: int):\n \"\"\" Constructor method.\n\n Initializes the client.\n ---\n Parameters:\n - host: The authentication service host string.\n - port: The authentication service port number.\n \"\"\"\n self.__host: str = host\n self.__port: int = port\n\n def __get_connection(self) ->HTTPConnection:\n \"\"\" Creates a new connection to the authentication server.\n ---\n Returns:\n The connection object.\n \"\"\"\n return HTTPConnection(self.__host, self.__port)\n\n def has_right(self, username: str, right: str) ->bool:\n \"\"\" Determines whether a given user from the authentication server\n has a certain right or not.\n ---\n Parameters:\n - username: The user name string.\n - right: The right name.\n Returns:\n True if the user has the given right\n Throws:\n - NotFoundError: if the user does not have the right, the user does not\n exist, or the right does not exist.\n - HTTPException: On an unhandled 500 error.\n \"\"\"\n form: str = urlencode({'username': username, 'right': right})\n headers: dict = {'Content-type': 'application/x-www-form-urlencoded'}\n connection: HTTPConnection = self.__get_connection()\n connection.request('GET', '/users/' + str(username) + '/rights/' +\n str(right), form, headers)\n response: HTTPResponse = connection.getresponse()\n if response.status == 200:\n return True\n if response.status == 404:\n raise NotFoundError()\n if response.status == 500:\n raise HTTPException('Server error')\n return False\n", "step-4": "<mask token>\nfrom urllib.parse import urlencode\nfrom http.client import HTTPConnection, HTTPResponse, HTTPException\nfrom dms2021sensor.data.rest.exc import NotFoundError\n\n\nclass AuthService:\n \"\"\" REST client to connect to the authentication service.\n \"\"\"\n\n def __init__(self, host: str, port: int):\n \"\"\" Constructor method.\n\n Initializes the client.\n ---\n Parameters:\n - host: The authentication service host string.\n - port: The authentication service port number.\n \"\"\"\n self.__host: str = host\n self.__port: int = port\n\n def __get_connection(self) ->HTTPConnection:\n \"\"\" Creates a new connection to the authentication server.\n ---\n Returns:\n The connection object.\n \"\"\"\n return HTTPConnection(self.__host, self.__port)\n\n def has_right(self, username: str, right: str) ->bool:\n \"\"\" Determines whether a given user from the authentication server\n has a certain right or not.\n ---\n Parameters:\n - username: The user name string.\n - right: The right name.\n Returns:\n True if the user has the given right\n Throws:\n - NotFoundError: if the user does not have the right, the user does not\n exist, or the right does not exist.\n - HTTPException: On an unhandled 500 error.\n \"\"\"\n form: str = urlencode({'username': username, 'right': right})\n headers: dict = {'Content-type': 'application/x-www-form-urlencoded'}\n connection: HTTPConnection = self.__get_connection()\n connection.request('GET', '/users/' + str(username) + '/rights/' +\n str(right), form, headers)\n response: HTTPResponse = connection.getresponse()\n if response.status == 200:\n return True\n if response.status == 404:\n raise NotFoundError()\n if response.status == 500:\n raise HTTPException('Server error')\n return False\n", "step-5": "\"\"\" AuthService class module.\n\"\"\"\n\nfrom urllib.parse import urlencode\nfrom http.client import HTTPConnection, HTTPResponse, HTTPException\nfrom dms2021sensor.data.rest.exc import NotFoundError\n\n\nclass AuthService():\n \"\"\" REST client to connect to the authentication service.\n \"\"\"\n\n def __init__(self, host: str, port: int):\n \"\"\" Constructor method.\n\n Initializes the client.\n ---\n Parameters:\n - host: The authentication service host string.\n - port: The authentication service port number.\n \"\"\"\n self.__host: str = host\n self.__port: int = port\n\n def __get_connection(self) -> HTTPConnection:\n \"\"\" Creates a new connection to the authentication server.\n ---\n Returns:\n The connection object.\n \"\"\"\n return HTTPConnection(self.__host, self.__port)\n\n def has_right(self, username: str, right: str) -> bool:\n \"\"\" Determines whether a given user from the authentication server\n has a certain right or not.\n ---\n Parameters:\n - username: The user name string.\n - right: The right name.\n Returns:\n True if the user has the given right\n Throws:\n - NotFoundError: if the user does not have the right, the user does not\n exist, or the right does not exist.\n - HTTPException: On an unhandled 500 error.\n \"\"\"\n form: str = urlencode({'username': username, 'right': right})\n headers: dict = {\n 'Content-type': 'application/x-www-form-urlencoded'\n }\n connection: HTTPConnection = self.__get_connection()\n connection.request('GET', '/users/'+str(username)+'/rights/'+str(right), form, headers)\n response: HTTPResponse = connection.getresponse()\n if response.status == 200:\n return True\n if response.status == 404:\n raise NotFoundError()\n if response.status == 500:\n raise HTTPException('Server error')\n return False\n", "step-ids": [ 2, 3, 5, 6, 7 ] }
[ 2, 3, 5, 6, 7 ]
import re _camel_words = re.compile(r"([A-Z][a-z0-9_]+)") def _camel_to_snake(s): """ Convert CamelCase to snake_case. """ return "_".join( [ i.lower() for i in _camel_words.split(s)[1::2] ] )
normal
{ "blob_id": "6c9f9363a95ea7dc97ccb45d0922f0531c5cfec9", "index": 6572, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef _camel_to_snake(s):\n \"\"\" Convert CamelCase to snake_case.\n \"\"\"\n return '_'.join([i.lower() for i in _camel_words.split(s)[1::2]])\n", "step-3": "<mask token>\n_camel_words = re.compile('([A-Z][a-z0-9_]+)')\n\n\ndef _camel_to_snake(s):\n \"\"\" Convert CamelCase to snake_case.\n \"\"\"\n return '_'.join([i.lower() for i in _camel_words.split(s)[1::2]])\n", "step-4": "import re\n_camel_words = re.compile('([A-Z][a-z0-9_]+)')\n\n\ndef _camel_to_snake(s):\n \"\"\" Convert CamelCase to snake_case.\n \"\"\"\n return '_'.join([i.lower() for i in _camel_words.split(s)[1::2]])\n", "step-5": "import re\n\n\n_camel_words = re.compile(r\"([A-Z][a-z0-9_]+)\")\n\n\ndef _camel_to_snake(s):\n \"\"\" Convert CamelCase to snake_case.\n \"\"\"\n return \"_\".join(\n [\n i.lower() for i in _camel_words.split(s)[1::2]\n ]\n )\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> def test_unfinished_job(mocker, db_session, default_source): auth.set_current_tenant(auth.Tenant(repository_ids=[default_source. repository_id])) build = factories.BuildFactory(source=default_source, queued=True) db_session.add(build) job = factories.JobFactory(build=build, in_progress=True) db_session.add(job) aggregate_build_stats_for_job(job.id) assert build.status == Status.in_progress assert build.result == Result.unknown <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def test_unfinished_job(mocker, db_session, default_source): auth.set_current_tenant(auth.Tenant(repository_ids=[default_source. repository_id])) build = factories.BuildFactory(source=default_source, queued=True) db_session.add(build) job = factories.JobFactory(build=build, in_progress=True) db_session.add(job) aggregate_build_stats_for_job(job.id) assert build.status == Status.in_progress assert build.result == Result.unknown <|reserved_special_token_0|> def test_failing_tests(mocker, db_session, default_source): auth.set_current_tenant(auth.Tenant(repository_ids=[default_source. repository_id])) build = factories.BuildFactory(source=default_source, in_progress=True) db_session.add(build) job = factories.JobFactory(build=build, passed=True) db_session.add(job) factories.TestCaseFactory(job=job, failed=True) aggregate_build_stats_for_job(job.id) assert job.result == Result.failed reasons = list(FailureReason.query.filter(FailureReason.job_id == job.id)) assert len(reasons) == 1 assert reasons[0].reason == FailureReason.Code.failing_tests <|reserved_special_token_1|> <|reserved_special_token_0|> def test_unfinished_job(mocker, db_session, default_source): auth.set_current_tenant(auth.Tenant(repository_ids=[default_source. repository_id])) build = factories.BuildFactory(source=default_source, queued=True) db_session.add(build) job = factories.JobFactory(build=build, in_progress=True) db_session.add(job) aggregate_build_stats_for_job(job.id) assert build.status == Status.in_progress assert build.result == Result.unknown def test_finished_job(mocker, db_session, default_source): auth.set_current_tenant(auth.Tenant(repository_ids=[default_source. repository_id])) build = factories.BuildFactory(source=default_source, in_progress=True) db_session.add(build) job = factories.JobFactory(build=build, failed=True) db_session.add(job) aggregate_build_stats_for_job(job.id) assert build.status == Status.finished assert build.result == Result.failed def test_failing_tests(mocker, db_session, default_source): auth.set_current_tenant(auth.Tenant(repository_ids=[default_source. repository_id])) build = factories.BuildFactory(source=default_source, in_progress=True) db_session.add(build) job = factories.JobFactory(build=build, passed=True) db_session.add(job) factories.TestCaseFactory(job=job, failed=True) aggregate_build_stats_for_job(job.id) assert job.result == Result.failed reasons = list(FailureReason.query.filter(FailureReason.job_id == job.id)) assert len(reasons) == 1 assert reasons[0].reason == FailureReason.Code.failing_tests <|reserved_special_token_1|> from zeus import auth, factories from zeus.constants import Result, Status from zeus.models import FailureReason from zeus.tasks import aggregate_build_stats_for_job def test_unfinished_job(mocker, db_session, default_source): auth.set_current_tenant(auth.Tenant(repository_ids=[default_source. repository_id])) build = factories.BuildFactory(source=default_source, queued=True) db_session.add(build) job = factories.JobFactory(build=build, in_progress=True) db_session.add(job) aggregate_build_stats_for_job(job.id) assert build.status == Status.in_progress assert build.result == Result.unknown def test_finished_job(mocker, db_session, default_source): auth.set_current_tenant(auth.Tenant(repository_ids=[default_source. repository_id])) build = factories.BuildFactory(source=default_source, in_progress=True) db_session.add(build) job = factories.JobFactory(build=build, failed=True) db_session.add(job) aggregate_build_stats_for_job(job.id) assert build.status == Status.finished assert build.result == Result.failed def test_failing_tests(mocker, db_session, default_source): auth.set_current_tenant(auth.Tenant(repository_ids=[default_source. repository_id])) build = factories.BuildFactory(source=default_source, in_progress=True) db_session.add(build) job = factories.JobFactory(build=build, passed=True) db_session.add(job) factories.TestCaseFactory(job=job, failed=True) aggregate_build_stats_for_job(job.id) assert job.result == Result.failed reasons = list(FailureReason.query.filter(FailureReason.job_id == job.id)) assert len(reasons) == 1 assert reasons[0].reason == FailureReason.Code.failing_tests
flexible
{ "blob_id": "71b78b1347456420c3fc29605887d20ba5bff06e", "index": 4313, "step-1": "<mask token>\n\n\ndef test_unfinished_job(mocker, db_session, default_source):\n auth.set_current_tenant(auth.Tenant(repository_ids=[default_source.\n repository_id]))\n build = factories.BuildFactory(source=default_source, queued=True)\n db_session.add(build)\n job = factories.JobFactory(build=build, in_progress=True)\n db_session.add(job)\n aggregate_build_stats_for_job(job.id)\n assert build.status == Status.in_progress\n assert build.result == Result.unknown\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef test_unfinished_job(mocker, db_session, default_source):\n auth.set_current_tenant(auth.Tenant(repository_ids=[default_source.\n repository_id]))\n build = factories.BuildFactory(source=default_source, queued=True)\n db_session.add(build)\n job = factories.JobFactory(build=build, in_progress=True)\n db_session.add(job)\n aggregate_build_stats_for_job(job.id)\n assert build.status == Status.in_progress\n assert build.result == Result.unknown\n\n\n<mask token>\n\n\ndef test_failing_tests(mocker, db_session, default_source):\n auth.set_current_tenant(auth.Tenant(repository_ids=[default_source.\n repository_id]))\n build = factories.BuildFactory(source=default_source, in_progress=True)\n db_session.add(build)\n job = factories.JobFactory(build=build, passed=True)\n db_session.add(job)\n factories.TestCaseFactory(job=job, failed=True)\n aggregate_build_stats_for_job(job.id)\n assert job.result == Result.failed\n reasons = list(FailureReason.query.filter(FailureReason.job_id == job.id))\n assert len(reasons) == 1\n assert reasons[0].reason == FailureReason.Code.failing_tests\n", "step-3": "<mask token>\n\n\ndef test_unfinished_job(mocker, db_session, default_source):\n auth.set_current_tenant(auth.Tenant(repository_ids=[default_source.\n repository_id]))\n build = factories.BuildFactory(source=default_source, queued=True)\n db_session.add(build)\n job = factories.JobFactory(build=build, in_progress=True)\n db_session.add(job)\n aggregate_build_stats_for_job(job.id)\n assert build.status == Status.in_progress\n assert build.result == Result.unknown\n\n\ndef test_finished_job(mocker, db_session, default_source):\n auth.set_current_tenant(auth.Tenant(repository_ids=[default_source.\n repository_id]))\n build = factories.BuildFactory(source=default_source, in_progress=True)\n db_session.add(build)\n job = factories.JobFactory(build=build, failed=True)\n db_session.add(job)\n aggregate_build_stats_for_job(job.id)\n assert build.status == Status.finished\n assert build.result == Result.failed\n\n\ndef test_failing_tests(mocker, db_session, default_source):\n auth.set_current_tenant(auth.Tenant(repository_ids=[default_source.\n repository_id]))\n build = factories.BuildFactory(source=default_source, in_progress=True)\n db_session.add(build)\n job = factories.JobFactory(build=build, passed=True)\n db_session.add(job)\n factories.TestCaseFactory(job=job, failed=True)\n aggregate_build_stats_for_job(job.id)\n assert job.result == Result.failed\n reasons = list(FailureReason.query.filter(FailureReason.job_id == job.id))\n assert len(reasons) == 1\n assert reasons[0].reason == FailureReason.Code.failing_tests\n", "step-4": "from zeus import auth, factories\nfrom zeus.constants import Result, Status\nfrom zeus.models import FailureReason\nfrom zeus.tasks import aggregate_build_stats_for_job\n\n\ndef test_unfinished_job(mocker, db_session, default_source):\n auth.set_current_tenant(auth.Tenant(repository_ids=[default_source.\n repository_id]))\n build = factories.BuildFactory(source=default_source, queued=True)\n db_session.add(build)\n job = factories.JobFactory(build=build, in_progress=True)\n db_session.add(job)\n aggregate_build_stats_for_job(job.id)\n assert build.status == Status.in_progress\n assert build.result == Result.unknown\n\n\ndef test_finished_job(mocker, db_session, default_source):\n auth.set_current_tenant(auth.Tenant(repository_ids=[default_source.\n repository_id]))\n build = factories.BuildFactory(source=default_source, in_progress=True)\n db_session.add(build)\n job = factories.JobFactory(build=build, failed=True)\n db_session.add(job)\n aggregate_build_stats_for_job(job.id)\n assert build.status == Status.finished\n assert build.result == Result.failed\n\n\ndef test_failing_tests(mocker, db_session, default_source):\n auth.set_current_tenant(auth.Tenant(repository_ids=[default_source.\n repository_id]))\n build = factories.BuildFactory(source=default_source, in_progress=True)\n db_session.add(build)\n job = factories.JobFactory(build=build, passed=True)\n db_session.add(job)\n factories.TestCaseFactory(job=job, failed=True)\n aggregate_build_stats_for_job(job.id)\n assert job.result == Result.failed\n reasons = list(FailureReason.query.filter(FailureReason.job_id == job.id))\n assert len(reasons) == 1\n assert reasons[0].reason == FailureReason.Code.failing_tests\n", "step-5": null, "step-ids": [ 1, 2, 3, 4 ] }
[ 1, 2, 3, 4 ]
import torch import torch.nn as nn import torch.nn.functional as F import numpy as np from collections import OrderedDict from functools import reduce class ArcTan(nn.Module): def __init__(self): super(ArcTan,self).__init__() def forward(self, x): return torch.arctan(x) / 1.5708 class Params(): def __init__(self, dim_in=7, dim_act=6, dim_h=0, dropout=0.0): self.dim_act = dim_act self.dim_in = 0 self.dim_h = 0 self.dropout = 0.0 self.model_name = "DockRLParams" self.init_params() self.act = ArcTan() def init_params(self): self.params = np.random.randn(self.dim_act) self.num_params = self.dim_act def forward(self, obs): return self.get_params() def get_params(self): return self.params def set_params(self, params): assert params.shape == self.params.shape self.params = params def reset(self): pass class GraphNN(nn.Module): def __init__(self, dim_in=7, dim_act=6, dim_h=8, dropout=0.00): super(GraphNN, self).__init__() self.ligand_dim = dim_in self.dim_h = dim_h self.dim_act = dim_act self.model_name = "DockRLGraphNN" # This is a guesstimate based on: # https://pymolwiki.org/index.php/Displaying_Biochemical_Properties self.bond_cutoff = 3.6 self.number_updates = 16 self.dropout = dropout self.initialize_gnn() self.reset() my_params = self.get_params() self.num_params = my_params.shape[0] def initialize_gnn(self): # vertices MLP, with 8 element key and query vectors for self-attention self.edge_model = nn.Sequential(\ nn.Linear(self.ligand_dim, self.dim_h),\ nn.LeakyReLU(),\ nn.Linear(self.dim_h, self.dim_h),\ nn.LeakyReLU(),\ nn.Dropout(p=self.dropout),\ nn.Linear(self.dim_h, self.ligand_dim + 2 * self.dim_h) ) self.encoder = nn.Sequential(\ nn.Linear(2*self.ligand_dim, self.ligand_dim),\ ArcTan() ) self.action_layer = nn.Sequential(\ nn.Linear(self.ligand_dim, self.dim_h),\ nn.LeakyReLU(),\ nn.Linear(self.dim_h, self.dim_act)\ ) def get_distance(self, node_0, node_1): return torch.sum(torch.sqrt(torch.abs(node_0 - node_1)**2)) def build_graph(self, x): self.graph = torch.zeros(x.shape[0],x.shape[0]) for ii in range(x.shape[0]): node_ii = x[ii, 0:3] for jj in range(x.shape[0]): node_jj = x[jj, 0:3] distance = self.get_distance(node_ii, node_jj) if distance <= self.bond_cutoff: self.graph[ii, jj] = 1.0 self.graph = self.graph * (1 - torch.eye(self.graph.shape[0])) def forward(self, x, return_codes=False, template=None): if type(x) != torch.Tensor: x = torch.Tensor(x) if template is not None: self.build_graph(template.detach()) else: self.build_graph(x.detach()) new_graph = torch.Tensor() #torch.zeros_like(x) codes = torch.Tensor() #torch.zeros(x.shape[0], self.dim_h) temp_input = [torch.Tensor()] #orch.Tensor() #torch.zeros(x.shape[0], self.dim_h+8+8) for kk in range(x.shape[0]): # loop through nodes for each node for ll in range(x.shape[0]): if self.graph[kk,ll]: temp_input[-1] = torch.cat([temp_input[-1],\ self.edge_model(x[ll]).unsqueeze(0)]) keys = temp_input[-1][:,-self.dim_h*2:-self.dim_h] queries = temp_input[-1][:,-self.dim_h:] attention = torch.zeros(1, keys.shape[0]) for mm in range(keys.shape[0]): attention[:, mm] = torch.matmul(queries[mm], keys[mm].T) attention = torch.softmax(attention, dim=1) my_input = torch.sum(attention.T \ * temp_input[-1][:,:self.ligand_dim],dim=0) my_input = torch.cat([x[kk], my_input]) #this is where the cell gating would happen (TODO) codes = torch.cat([codes, self.encoder(my_input).unsqueeze(0)]) new_graph = torch.cat([new_graph, codes[-1].unsqueeze(0)]) #self.decoder(codes[-1]).unsqueeze(0)]) if return_codes: return codes, new_graph else: return new_graph def get_actions(self, x): if type(x) != torch.Tensor: x = torch.Tensor(x) my_template = x for ii in range(self.number_updates): x = self.forward(x, template=my_template) x = torch.mean(x, dim=0) x = self.action_layer(x) return x def get_params(self): params = np.array([]) for param in self.edge_model.named_parameters(): params = np.append(params, param[1].detach().numpy().ravel()) for param in self.encoder.named_parameters(): params = np.append(params, param[1].detach().numpy().ravel()) # for param in self.decoder.named_parameters(): # params = np.append(params, param[1].detach().numpy().ravel()) for param in self.action_layer.named_parameters(): params = np.append(params, param[1].detach().numpy().ravel()) return params def set_params(self, my_params): if my_params is None: my_params = self.init_mean + torch.randn(self.num_params) * torch.sqrt(torch.tensor(self.var)) param_start = 0 for name, param in self.edge_model.named_parameters(): param_stop = param_start + reduce(lambda x,y: x*y, param.shape) param[:] = torch.nn.Parameter(torch.Tensor(\ my_params[param_start:param_stop].reshape(param.shape))) for name, param in self.encoder.named_parameters(): param_stop = param_start + reduce(lambda x,y: x*y, param.shape) param[:] = torch.nn.Parameter(torch.Tensor(\ my_params[param_start:param_stop].reshape(param.shape))) # for name, param in self.decoder.named_parameters(): # # param_stop = param_start + reduce(lambda x,y: x*y, param.shape) # # param[:] = torch.nn.Parameter(torch.Tensor(\ # my_params[param_start:param_stop].reshape(param.shape))) for name, param in self.action_layer.named_parameters(): param_stop = param_start + reduce(lambda x,y: x*y, param.shape) param[:] = torch.nn.Parameter(torch.Tensor(\ my_params[param_start:param_stop].reshape(param.shape))) def reset(self): # initialize using gated cell states here later (maybe) pass class MLP(nn.Module): def __init__(self, dim_in=6, dim_act=5, dim_h=32, dropout=0.0): super(MLP, self).__init__() self.dim_in = dim_in self.dim_act = dim_act self.dim_h = 32 self.dropout = dropout self.model_name = "DockRLMLP" self.init_params() def init_params(self): self.model = nn.Sequential(\ nn.Linear(self.dim_in, self.dim_h),\ nn.ReLU(),\ nn.Linear(self.dim_h, self.dim_h),\ nn.ReLU(),\ nn.Dropout(p=self.dropout),\ nn.Linear(self.dim_h, self.dim_act)\ ) self.num_params = self.get_params().shape[0] def forward(self, x): x = torch.Tensor(x) if len(x.shape) == 1: x = x.unsqueeze(0) x = self.model(x) return x def get_actions(self, x): act = self.forward(x) act = torch.mean(act, dim=0, keepdim=True) return act def get_params(self): params = np.array([]) for param in self.model.named_parameters(): params = np.append(params, param[1].detach().numpy().ravel()) return params def set_params(self, my_params): if my_params is None: my_params = self.init_mean + torch.randn(self.num_params) * torch.sqrt(torch.tensor(self.var)) param_start = 0 for name, param in self.model.named_parameters(): param_stop = param_start + reduce(lambda x,y: x*y, param.shape) param[:] = torch.nn.Parameter(torch.Tensor(\ my_params[param_start:param_stop].reshape(param.shape))) def reset(self): pass class MRNN(nn.Module): def __init__(self, dim_in=6, dim_act=5): super(MRNN, self).__init__() self.dim_in = dim_in self.dim_act = dim_act self.dim_h = 8 self.init_params() def init_params(self): self.g = nn.Sequential(OrderedDict([\ ("g", nn.Linear(self.dim_h+self.dim_in, self.dim_h)),\ ("act_g", nn.Sigmoid())])) self.j = nn.Sequential(OrderedDict([\ ("j", nn.Linear(self.dim_h+self.dim_in, self.dim_h)),\ ("act_j", nn.Tanh())])) self.w_h2y = nn.Sequential(OrderedDict([\ ("w_h2y", nn.Linear(self.dim_h, self.dim_act))])) self.cell_state = torch.zeros((1,self.dim_h)) self.num_params = self.get_params().shape[0] def forward(self, x): x = torch.Tensor(x) if len(x.shape) == 1: x = x.unsqueeze(0) x = torch.cat((self.cell_state, x), axis=-1) g_out = self.g(x) j_out = (1.0 - g_out) * self.j(x) self.cell_state = g_out * self.cell_state + j_out y = self.w_h2y(self.cell_state) return y def get_action(self, x): act = self.forward(x) return act.detach().cpu().numpy() def get_params(self): params = np.array([]) for param in self.g.named_parameters(): params = np.append(params, param[1].detach().numpy().ravel()) for param in self.j.named_parameters(): params = np.append(params, param[1].detach().numpy().ravel()) for param in self.w_h2y.named_parameters(): params = np.append(params, param[1].detach().numpy().ravel()) return params def set_params(self, my_params): if my_params is None: my_params = self.init_mean + torch.randn(self.num_params) * torch.sqrt(torch.tensor(self.var)) param_start = 0 for name, param in self.g.named_parameters(): param_stop = param_start + reduce(lambda x,y: x*y, param.shape) param[:] = torch.nn.Parameter(torch.Tensor(\ my_params[param_start:param_stop].reshape(param.shape))) for name, param in self.j.named_parameters(): param_stop = param_start + reduce(lambda x,y: x*y, param.shape) param[:] = torch.nn.Parameter(torch.Tensor(\ my_params[param_start:param_stop].reshape(param.shape))) for name, param in self.w_h2y.named_parameters(): param_stop = param_start + reduce(lambda x,y: x*y, param.shape) param[:] = torch.nn.Parameter(torch.Tensor(\ my_params[param_start:param_stop].reshape(param.shape))) def reset(self): self.cell_state *= 0. if __name__ == "__main__": mrnn = MRNN() temp = mrnn.forward(np.random.randn(1,6)) print(temp)
normal
{ "blob_id": "1c1673b5e54bafef9f36a2583115f8135c112ab4", "index": 1922, "step-1": "<mask token>\n\n\nclass GraphNN(nn.Module):\n\n def __init__(self, dim_in=7, dim_act=6, dim_h=8, dropout=0.0):\n super(GraphNN, self).__init__()\n self.ligand_dim = dim_in\n self.dim_h = dim_h\n self.dim_act = dim_act\n self.model_name = 'DockRLGraphNN'\n self.bond_cutoff = 3.6\n self.number_updates = 16\n self.dropout = dropout\n self.initialize_gnn()\n self.reset()\n my_params = self.get_params()\n self.num_params = my_params.shape[0]\n\n def initialize_gnn(self):\n self.edge_model = nn.Sequential(nn.Linear(self.ligand_dim, self.\n dim_h), nn.LeakyReLU(), nn.Linear(self.dim_h, self.dim_h), nn.\n LeakyReLU(), nn.Dropout(p=self.dropout), nn.Linear(self.dim_h, \n self.ligand_dim + 2 * self.dim_h))\n self.encoder = nn.Sequential(nn.Linear(2 * self.ligand_dim, self.\n ligand_dim), ArcTan())\n self.action_layer = nn.Sequential(nn.Linear(self.ligand_dim, self.\n dim_h), nn.LeakyReLU(), nn.Linear(self.dim_h, self.dim_act))\n\n def get_distance(self, node_0, node_1):\n return torch.sum(torch.sqrt(torch.abs(node_0 - node_1) ** 2))\n\n def build_graph(self, x):\n self.graph = torch.zeros(x.shape[0], x.shape[0])\n for ii in range(x.shape[0]):\n node_ii = x[ii, 0:3]\n for jj in range(x.shape[0]):\n node_jj = x[jj, 0:3]\n distance = self.get_distance(node_ii, node_jj)\n if distance <= self.bond_cutoff:\n self.graph[ii, jj] = 1.0\n self.graph = self.graph * (1 - torch.eye(self.graph.shape[0]))\n\n def forward(self, x, return_codes=False, template=None):\n if type(x) != torch.Tensor:\n x = torch.Tensor(x)\n if template is not None:\n self.build_graph(template.detach())\n else:\n self.build_graph(x.detach())\n new_graph = torch.Tensor()\n codes = torch.Tensor()\n temp_input = [torch.Tensor()]\n for kk in range(x.shape[0]):\n for ll in range(x.shape[0]):\n if self.graph[kk, ll]:\n temp_input[-1] = torch.cat([temp_input[-1], self.\n edge_model(x[ll]).unsqueeze(0)])\n keys = temp_input[-1][:, -self.dim_h * 2:-self.dim_h]\n queries = temp_input[-1][:, -self.dim_h:]\n attention = torch.zeros(1, keys.shape[0])\n for mm in range(keys.shape[0]):\n attention[:, mm] = torch.matmul(queries[mm], keys[mm].T)\n attention = torch.softmax(attention, dim=1)\n my_input = torch.sum(attention.T * temp_input[-1][:, :self.\n ligand_dim], dim=0)\n my_input = torch.cat([x[kk], my_input])\n codes = torch.cat([codes, self.encoder(my_input).unsqueeze(0)])\n new_graph = torch.cat([new_graph, codes[-1].unsqueeze(0)])\n if return_codes:\n return codes, new_graph\n else:\n return new_graph\n\n def get_actions(self, x):\n if type(x) != torch.Tensor:\n x = torch.Tensor(x)\n my_template = x\n for ii in range(self.number_updates):\n x = self.forward(x, template=my_template)\n x = torch.mean(x, dim=0)\n x = self.action_layer(x)\n return x\n\n def get_params(self):\n params = np.array([])\n for param in self.edge_model.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n for param in self.encoder.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n for param in self.action_layer.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n return params\n\n def set_params(self, my_params):\n if my_params is None:\n my_params = self.init_mean + torch.randn(self.num_params\n ) * torch.sqrt(torch.tensor(self.var))\n param_start = 0\n for name, param in self.edge_model.named_parameters():\n param_stop = param_start + reduce(lambda x, y: x * y, param.shape)\n param[:] = torch.nn.Parameter(torch.Tensor(my_params[\n param_start:param_stop].reshape(param.shape)))\n for name, param in self.encoder.named_parameters():\n param_stop = param_start + reduce(lambda x, y: x * y, param.shape)\n param[:] = torch.nn.Parameter(torch.Tensor(my_params[\n param_start:param_stop].reshape(param.shape)))\n for name, param in self.action_layer.named_parameters():\n param_stop = param_start + reduce(lambda x, y: x * y, param.shape)\n param[:] = torch.nn.Parameter(torch.Tensor(my_params[\n param_start:param_stop].reshape(param.shape)))\n\n def reset(self):\n pass\n\n\nclass MLP(nn.Module):\n\n def __init__(self, dim_in=6, dim_act=5, dim_h=32, dropout=0.0):\n super(MLP, self).__init__()\n self.dim_in = dim_in\n self.dim_act = dim_act\n self.dim_h = 32\n self.dropout = dropout\n self.model_name = 'DockRLMLP'\n self.init_params()\n\n def init_params(self):\n self.model = nn.Sequential(nn.Linear(self.dim_in, self.dim_h), nn.\n ReLU(), nn.Linear(self.dim_h, self.dim_h), nn.ReLU(), nn.\n Dropout(p=self.dropout), nn.Linear(self.dim_h, self.dim_act))\n self.num_params = self.get_params().shape[0]\n\n def forward(self, x):\n x = torch.Tensor(x)\n if len(x.shape) == 1:\n x = x.unsqueeze(0)\n x = self.model(x)\n return x\n\n def get_actions(self, x):\n act = self.forward(x)\n act = torch.mean(act, dim=0, keepdim=True)\n return act\n\n def get_params(self):\n params = np.array([])\n for param in self.model.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n return params\n\n def set_params(self, my_params):\n if my_params is None:\n my_params = self.init_mean + torch.randn(self.num_params\n ) * torch.sqrt(torch.tensor(self.var))\n param_start = 0\n for name, param in self.model.named_parameters():\n param_stop = param_start + reduce(lambda x, y: x * y, param.shape)\n param[:] = torch.nn.Parameter(torch.Tensor(my_params[\n param_start:param_stop].reshape(param.shape)))\n\n def reset(self):\n pass\n\n\nclass MRNN(nn.Module):\n\n def __init__(self, dim_in=6, dim_act=5):\n super(MRNN, self).__init__()\n self.dim_in = dim_in\n self.dim_act = dim_act\n self.dim_h = 8\n self.init_params()\n\n def init_params(self):\n self.g = nn.Sequential(OrderedDict([('g', nn.Linear(self.dim_h +\n self.dim_in, self.dim_h)), ('act_g', nn.Sigmoid())]))\n self.j = nn.Sequential(OrderedDict([('j', nn.Linear(self.dim_h +\n self.dim_in, self.dim_h)), ('act_j', nn.Tanh())]))\n self.w_h2y = nn.Sequential(OrderedDict([('w_h2y', nn.Linear(self.\n dim_h, self.dim_act))]))\n self.cell_state = torch.zeros((1, self.dim_h))\n self.num_params = self.get_params().shape[0]\n\n def forward(self, x):\n x = torch.Tensor(x)\n if len(x.shape) == 1:\n x = x.unsqueeze(0)\n x = torch.cat((self.cell_state, x), axis=-1)\n g_out = self.g(x)\n j_out = (1.0 - g_out) * self.j(x)\n self.cell_state = g_out * self.cell_state + j_out\n y = self.w_h2y(self.cell_state)\n return y\n\n def get_action(self, x):\n act = self.forward(x)\n return act.detach().cpu().numpy()\n\n def get_params(self):\n params = np.array([])\n for param in self.g.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n for param in self.j.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n for param in self.w_h2y.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n return params\n\n def set_params(self, my_params):\n if my_params is None:\n my_params = self.init_mean + torch.randn(self.num_params\n ) * torch.sqrt(torch.tensor(self.var))\n param_start = 0\n for name, param in self.g.named_parameters():\n param_stop = param_start + reduce(lambda x, y: x * y, param.shape)\n param[:] = torch.nn.Parameter(torch.Tensor(my_params[\n param_start:param_stop].reshape(param.shape)))\n for name, param in self.j.named_parameters():\n param_stop = param_start + reduce(lambda x, y: x * y, param.shape)\n param[:] = torch.nn.Parameter(torch.Tensor(my_params[\n param_start:param_stop].reshape(param.shape)))\n for name, param in self.w_h2y.named_parameters():\n param_stop = param_start + reduce(lambda x, y: x * y, param.shape)\n param[:] = torch.nn.Parameter(torch.Tensor(my_params[\n param_start:param_stop].reshape(param.shape)))\n\n def reset(self):\n self.cell_state *= 0.0\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass Params:\n <mask token>\n\n def init_params(self):\n self.params = np.random.randn(self.dim_act)\n self.num_params = self.dim_act\n\n def forward(self, obs):\n return self.get_params()\n\n def get_params(self):\n return self.params\n <mask token>\n\n def reset(self):\n pass\n\n\nclass GraphNN(nn.Module):\n\n def __init__(self, dim_in=7, dim_act=6, dim_h=8, dropout=0.0):\n super(GraphNN, self).__init__()\n self.ligand_dim = dim_in\n self.dim_h = dim_h\n self.dim_act = dim_act\n self.model_name = 'DockRLGraphNN'\n self.bond_cutoff = 3.6\n self.number_updates = 16\n self.dropout = dropout\n self.initialize_gnn()\n self.reset()\n my_params = self.get_params()\n self.num_params = my_params.shape[0]\n\n def initialize_gnn(self):\n self.edge_model = nn.Sequential(nn.Linear(self.ligand_dim, self.\n dim_h), nn.LeakyReLU(), nn.Linear(self.dim_h, self.dim_h), nn.\n LeakyReLU(), nn.Dropout(p=self.dropout), nn.Linear(self.dim_h, \n self.ligand_dim + 2 * self.dim_h))\n self.encoder = nn.Sequential(nn.Linear(2 * self.ligand_dim, self.\n ligand_dim), ArcTan())\n self.action_layer = nn.Sequential(nn.Linear(self.ligand_dim, self.\n dim_h), nn.LeakyReLU(), nn.Linear(self.dim_h, self.dim_act))\n\n def get_distance(self, node_0, node_1):\n return torch.sum(torch.sqrt(torch.abs(node_0 - node_1) ** 2))\n\n def build_graph(self, x):\n self.graph = torch.zeros(x.shape[0], x.shape[0])\n for ii in range(x.shape[0]):\n node_ii = x[ii, 0:3]\n for jj in range(x.shape[0]):\n node_jj = x[jj, 0:3]\n distance = self.get_distance(node_ii, node_jj)\n if distance <= self.bond_cutoff:\n self.graph[ii, jj] = 1.0\n self.graph = self.graph * (1 - torch.eye(self.graph.shape[0]))\n\n def forward(self, x, return_codes=False, template=None):\n if type(x) != torch.Tensor:\n x = torch.Tensor(x)\n if template is not None:\n self.build_graph(template.detach())\n else:\n self.build_graph(x.detach())\n new_graph = torch.Tensor()\n codes = torch.Tensor()\n temp_input = [torch.Tensor()]\n for kk in range(x.shape[0]):\n for ll in range(x.shape[0]):\n if self.graph[kk, ll]:\n temp_input[-1] = torch.cat([temp_input[-1], self.\n edge_model(x[ll]).unsqueeze(0)])\n keys = temp_input[-1][:, -self.dim_h * 2:-self.dim_h]\n queries = temp_input[-1][:, -self.dim_h:]\n attention = torch.zeros(1, keys.shape[0])\n for mm in range(keys.shape[0]):\n attention[:, mm] = torch.matmul(queries[mm], keys[mm].T)\n attention = torch.softmax(attention, dim=1)\n my_input = torch.sum(attention.T * temp_input[-1][:, :self.\n ligand_dim], dim=0)\n my_input = torch.cat([x[kk], my_input])\n codes = torch.cat([codes, self.encoder(my_input).unsqueeze(0)])\n new_graph = torch.cat([new_graph, codes[-1].unsqueeze(0)])\n if return_codes:\n return codes, new_graph\n else:\n return new_graph\n\n def get_actions(self, x):\n if type(x) != torch.Tensor:\n x = torch.Tensor(x)\n my_template = x\n for ii in range(self.number_updates):\n x = self.forward(x, template=my_template)\n x = torch.mean(x, dim=0)\n x = self.action_layer(x)\n return x\n\n def get_params(self):\n params = np.array([])\n for param in self.edge_model.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n for param in self.encoder.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n for param in self.action_layer.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n return params\n\n def set_params(self, my_params):\n if my_params is None:\n my_params = self.init_mean + torch.randn(self.num_params\n ) * torch.sqrt(torch.tensor(self.var))\n param_start = 0\n for name, param in self.edge_model.named_parameters():\n param_stop = param_start + reduce(lambda x, y: x * y, param.shape)\n param[:] = torch.nn.Parameter(torch.Tensor(my_params[\n param_start:param_stop].reshape(param.shape)))\n for name, param in self.encoder.named_parameters():\n param_stop = param_start + reduce(lambda x, y: x * y, param.shape)\n param[:] = torch.nn.Parameter(torch.Tensor(my_params[\n param_start:param_stop].reshape(param.shape)))\n for name, param in self.action_layer.named_parameters():\n param_stop = param_start + reduce(lambda x, y: x * y, param.shape)\n param[:] = torch.nn.Parameter(torch.Tensor(my_params[\n param_start:param_stop].reshape(param.shape)))\n\n def reset(self):\n pass\n\n\nclass MLP(nn.Module):\n\n def __init__(self, dim_in=6, dim_act=5, dim_h=32, dropout=0.0):\n super(MLP, self).__init__()\n self.dim_in = dim_in\n self.dim_act = dim_act\n self.dim_h = 32\n self.dropout = dropout\n self.model_name = 'DockRLMLP'\n self.init_params()\n\n def init_params(self):\n self.model = nn.Sequential(nn.Linear(self.dim_in, self.dim_h), nn.\n ReLU(), nn.Linear(self.dim_h, self.dim_h), nn.ReLU(), nn.\n Dropout(p=self.dropout), nn.Linear(self.dim_h, self.dim_act))\n self.num_params = self.get_params().shape[0]\n\n def forward(self, x):\n x = torch.Tensor(x)\n if len(x.shape) == 1:\n x = x.unsqueeze(0)\n x = self.model(x)\n return x\n\n def get_actions(self, x):\n act = self.forward(x)\n act = torch.mean(act, dim=0, keepdim=True)\n return act\n\n def get_params(self):\n params = np.array([])\n for param in self.model.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n return params\n\n def set_params(self, my_params):\n if my_params is None:\n my_params = self.init_mean + torch.randn(self.num_params\n ) * torch.sqrt(torch.tensor(self.var))\n param_start = 0\n for name, param in self.model.named_parameters():\n param_stop = param_start + reduce(lambda x, y: x * y, param.shape)\n param[:] = torch.nn.Parameter(torch.Tensor(my_params[\n param_start:param_stop].reshape(param.shape)))\n\n def reset(self):\n pass\n\n\nclass MRNN(nn.Module):\n\n def __init__(self, dim_in=6, dim_act=5):\n super(MRNN, self).__init__()\n self.dim_in = dim_in\n self.dim_act = dim_act\n self.dim_h = 8\n self.init_params()\n\n def init_params(self):\n self.g = nn.Sequential(OrderedDict([('g', nn.Linear(self.dim_h +\n self.dim_in, self.dim_h)), ('act_g', nn.Sigmoid())]))\n self.j = nn.Sequential(OrderedDict([('j', nn.Linear(self.dim_h +\n self.dim_in, self.dim_h)), ('act_j', nn.Tanh())]))\n self.w_h2y = nn.Sequential(OrderedDict([('w_h2y', nn.Linear(self.\n dim_h, self.dim_act))]))\n self.cell_state = torch.zeros((1, self.dim_h))\n self.num_params = self.get_params().shape[0]\n\n def forward(self, x):\n x = torch.Tensor(x)\n if len(x.shape) == 1:\n x = x.unsqueeze(0)\n x = torch.cat((self.cell_state, x), axis=-1)\n g_out = self.g(x)\n j_out = (1.0 - g_out) * self.j(x)\n self.cell_state = g_out * self.cell_state + j_out\n y = self.w_h2y(self.cell_state)\n return y\n\n def get_action(self, x):\n act = self.forward(x)\n return act.detach().cpu().numpy()\n\n def get_params(self):\n params = np.array([])\n for param in self.g.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n for param in self.j.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n for param in self.w_h2y.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n return params\n\n def set_params(self, my_params):\n if my_params is None:\n my_params = self.init_mean + torch.randn(self.num_params\n ) * torch.sqrt(torch.tensor(self.var))\n param_start = 0\n for name, param in self.g.named_parameters():\n param_stop = param_start + reduce(lambda x, y: x * y, param.shape)\n param[:] = torch.nn.Parameter(torch.Tensor(my_params[\n param_start:param_stop].reshape(param.shape)))\n for name, param in self.j.named_parameters():\n param_stop = param_start + reduce(lambda x, y: x * y, param.shape)\n param[:] = torch.nn.Parameter(torch.Tensor(my_params[\n param_start:param_stop].reshape(param.shape)))\n for name, param in self.w_h2y.named_parameters():\n param_stop = param_start + reduce(lambda x, y: x * y, param.shape)\n param[:] = torch.nn.Parameter(torch.Tensor(my_params[\n param_start:param_stop].reshape(param.shape)))\n\n def reset(self):\n self.cell_state *= 0.0\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\nclass ArcTan(nn.Module):\n <mask token>\n <mask token>\n\n\nclass Params:\n\n def __init__(self, dim_in=7, dim_act=6, dim_h=0, dropout=0.0):\n self.dim_act = dim_act\n self.dim_in = 0\n self.dim_h = 0\n self.dropout = 0.0\n self.model_name = 'DockRLParams'\n self.init_params()\n self.act = ArcTan()\n\n def init_params(self):\n self.params = np.random.randn(self.dim_act)\n self.num_params = self.dim_act\n\n def forward(self, obs):\n return self.get_params()\n\n def get_params(self):\n return self.params\n\n def set_params(self, params):\n assert params.shape == self.params.shape\n self.params = params\n\n def reset(self):\n pass\n\n\nclass GraphNN(nn.Module):\n\n def __init__(self, dim_in=7, dim_act=6, dim_h=8, dropout=0.0):\n super(GraphNN, self).__init__()\n self.ligand_dim = dim_in\n self.dim_h = dim_h\n self.dim_act = dim_act\n self.model_name = 'DockRLGraphNN'\n self.bond_cutoff = 3.6\n self.number_updates = 16\n self.dropout = dropout\n self.initialize_gnn()\n self.reset()\n my_params = self.get_params()\n self.num_params = my_params.shape[0]\n\n def initialize_gnn(self):\n self.edge_model = nn.Sequential(nn.Linear(self.ligand_dim, self.\n dim_h), nn.LeakyReLU(), nn.Linear(self.dim_h, self.dim_h), nn.\n LeakyReLU(), nn.Dropout(p=self.dropout), nn.Linear(self.dim_h, \n self.ligand_dim + 2 * self.dim_h))\n self.encoder = nn.Sequential(nn.Linear(2 * self.ligand_dim, self.\n ligand_dim), ArcTan())\n self.action_layer = nn.Sequential(nn.Linear(self.ligand_dim, self.\n dim_h), nn.LeakyReLU(), nn.Linear(self.dim_h, self.dim_act))\n\n def get_distance(self, node_0, node_1):\n return torch.sum(torch.sqrt(torch.abs(node_0 - node_1) ** 2))\n\n def build_graph(self, x):\n self.graph = torch.zeros(x.shape[0], x.shape[0])\n for ii in range(x.shape[0]):\n node_ii = x[ii, 0:3]\n for jj in range(x.shape[0]):\n node_jj = x[jj, 0:3]\n distance = self.get_distance(node_ii, node_jj)\n if distance <= self.bond_cutoff:\n self.graph[ii, jj] = 1.0\n self.graph = self.graph * (1 - torch.eye(self.graph.shape[0]))\n\n def forward(self, x, return_codes=False, template=None):\n if type(x) != torch.Tensor:\n x = torch.Tensor(x)\n if template is not None:\n self.build_graph(template.detach())\n else:\n self.build_graph(x.detach())\n new_graph = torch.Tensor()\n codes = torch.Tensor()\n temp_input = [torch.Tensor()]\n for kk in range(x.shape[0]):\n for ll in range(x.shape[0]):\n if self.graph[kk, ll]:\n temp_input[-1] = torch.cat([temp_input[-1], self.\n edge_model(x[ll]).unsqueeze(0)])\n keys = temp_input[-1][:, -self.dim_h * 2:-self.dim_h]\n queries = temp_input[-1][:, -self.dim_h:]\n attention = torch.zeros(1, keys.shape[0])\n for mm in range(keys.shape[0]):\n attention[:, mm] = torch.matmul(queries[mm], keys[mm].T)\n attention = torch.softmax(attention, dim=1)\n my_input = torch.sum(attention.T * temp_input[-1][:, :self.\n ligand_dim], dim=0)\n my_input = torch.cat([x[kk], my_input])\n codes = torch.cat([codes, self.encoder(my_input).unsqueeze(0)])\n new_graph = torch.cat([new_graph, codes[-1].unsqueeze(0)])\n if return_codes:\n return codes, new_graph\n else:\n return new_graph\n\n def get_actions(self, x):\n if type(x) != torch.Tensor:\n x = torch.Tensor(x)\n my_template = x\n for ii in range(self.number_updates):\n x = self.forward(x, template=my_template)\n x = torch.mean(x, dim=0)\n x = self.action_layer(x)\n return x\n\n def get_params(self):\n params = np.array([])\n for param in self.edge_model.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n for param in self.encoder.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n for param in self.action_layer.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n return params\n\n def set_params(self, my_params):\n if my_params is None:\n my_params = self.init_mean + torch.randn(self.num_params\n ) * torch.sqrt(torch.tensor(self.var))\n param_start = 0\n for name, param in self.edge_model.named_parameters():\n param_stop = param_start + reduce(lambda x, y: x * y, param.shape)\n param[:] = torch.nn.Parameter(torch.Tensor(my_params[\n param_start:param_stop].reshape(param.shape)))\n for name, param in self.encoder.named_parameters():\n param_stop = param_start + reduce(lambda x, y: x * y, param.shape)\n param[:] = torch.nn.Parameter(torch.Tensor(my_params[\n param_start:param_stop].reshape(param.shape)))\n for name, param in self.action_layer.named_parameters():\n param_stop = param_start + reduce(lambda x, y: x * y, param.shape)\n param[:] = torch.nn.Parameter(torch.Tensor(my_params[\n param_start:param_stop].reshape(param.shape)))\n\n def reset(self):\n pass\n\n\nclass MLP(nn.Module):\n\n def __init__(self, dim_in=6, dim_act=5, dim_h=32, dropout=0.0):\n super(MLP, self).__init__()\n self.dim_in = dim_in\n self.dim_act = dim_act\n self.dim_h = 32\n self.dropout = dropout\n self.model_name = 'DockRLMLP'\n self.init_params()\n\n def init_params(self):\n self.model = nn.Sequential(nn.Linear(self.dim_in, self.dim_h), nn.\n ReLU(), nn.Linear(self.dim_h, self.dim_h), nn.ReLU(), nn.\n Dropout(p=self.dropout), nn.Linear(self.dim_h, self.dim_act))\n self.num_params = self.get_params().shape[0]\n\n def forward(self, x):\n x = torch.Tensor(x)\n if len(x.shape) == 1:\n x = x.unsqueeze(0)\n x = self.model(x)\n return x\n\n def get_actions(self, x):\n act = self.forward(x)\n act = torch.mean(act, dim=0, keepdim=True)\n return act\n\n def get_params(self):\n params = np.array([])\n for param in self.model.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n return params\n\n def set_params(self, my_params):\n if my_params is None:\n my_params = self.init_mean + torch.randn(self.num_params\n ) * torch.sqrt(torch.tensor(self.var))\n param_start = 0\n for name, param in self.model.named_parameters():\n param_stop = param_start + reduce(lambda x, y: x * y, param.shape)\n param[:] = torch.nn.Parameter(torch.Tensor(my_params[\n param_start:param_stop].reshape(param.shape)))\n\n def reset(self):\n pass\n\n\nclass MRNN(nn.Module):\n\n def __init__(self, dim_in=6, dim_act=5):\n super(MRNN, self).__init__()\n self.dim_in = dim_in\n self.dim_act = dim_act\n self.dim_h = 8\n self.init_params()\n\n def init_params(self):\n self.g = nn.Sequential(OrderedDict([('g', nn.Linear(self.dim_h +\n self.dim_in, self.dim_h)), ('act_g', nn.Sigmoid())]))\n self.j = nn.Sequential(OrderedDict([('j', nn.Linear(self.dim_h +\n self.dim_in, self.dim_h)), ('act_j', nn.Tanh())]))\n self.w_h2y = nn.Sequential(OrderedDict([('w_h2y', nn.Linear(self.\n dim_h, self.dim_act))]))\n self.cell_state = torch.zeros((1, self.dim_h))\n self.num_params = self.get_params().shape[0]\n\n def forward(self, x):\n x = torch.Tensor(x)\n if len(x.shape) == 1:\n x = x.unsqueeze(0)\n x = torch.cat((self.cell_state, x), axis=-1)\n g_out = self.g(x)\n j_out = (1.0 - g_out) * self.j(x)\n self.cell_state = g_out * self.cell_state + j_out\n y = self.w_h2y(self.cell_state)\n return y\n\n def get_action(self, x):\n act = self.forward(x)\n return act.detach().cpu().numpy()\n\n def get_params(self):\n params = np.array([])\n for param in self.g.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n for param in self.j.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n for param in self.w_h2y.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n return params\n\n def set_params(self, my_params):\n if my_params is None:\n my_params = self.init_mean + torch.randn(self.num_params\n ) * torch.sqrt(torch.tensor(self.var))\n param_start = 0\n for name, param in self.g.named_parameters():\n param_stop = param_start + reduce(lambda x, y: x * y, param.shape)\n param[:] = torch.nn.Parameter(torch.Tensor(my_params[\n param_start:param_stop].reshape(param.shape)))\n for name, param in self.j.named_parameters():\n param_stop = param_start + reduce(lambda x, y: x * y, param.shape)\n param[:] = torch.nn.Parameter(torch.Tensor(my_params[\n param_start:param_stop].reshape(param.shape)))\n for name, param in self.w_h2y.named_parameters():\n param_stop = param_start + reduce(lambda x, y: x * y, param.shape)\n param[:] = torch.nn.Parameter(torch.Tensor(my_params[\n param_start:param_stop].reshape(param.shape)))\n\n def reset(self):\n self.cell_state *= 0.0\n\n\n<mask token>\n", "step-4": "<mask token>\n\n\nclass ArcTan(nn.Module):\n\n def __init__(self):\n super(ArcTan, self).__init__()\n <mask token>\n\n\nclass Params:\n\n def __init__(self, dim_in=7, dim_act=6, dim_h=0, dropout=0.0):\n self.dim_act = dim_act\n self.dim_in = 0\n self.dim_h = 0\n self.dropout = 0.0\n self.model_name = 'DockRLParams'\n self.init_params()\n self.act = ArcTan()\n\n def init_params(self):\n self.params = np.random.randn(self.dim_act)\n self.num_params = self.dim_act\n\n def forward(self, obs):\n return self.get_params()\n\n def get_params(self):\n return self.params\n\n def set_params(self, params):\n assert params.shape == self.params.shape\n self.params = params\n\n def reset(self):\n pass\n\n\nclass GraphNN(nn.Module):\n\n def __init__(self, dim_in=7, dim_act=6, dim_h=8, dropout=0.0):\n super(GraphNN, self).__init__()\n self.ligand_dim = dim_in\n self.dim_h = dim_h\n self.dim_act = dim_act\n self.model_name = 'DockRLGraphNN'\n self.bond_cutoff = 3.6\n self.number_updates = 16\n self.dropout = dropout\n self.initialize_gnn()\n self.reset()\n my_params = self.get_params()\n self.num_params = my_params.shape[0]\n\n def initialize_gnn(self):\n self.edge_model = nn.Sequential(nn.Linear(self.ligand_dim, self.\n dim_h), nn.LeakyReLU(), nn.Linear(self.dim_h, self.dim_h), nn.\n LeakyReLU(), nn.Dropout(p=self.dropout), nn.Linear(self.dim_h, \n self.ligand_dim + 2 * self.dim_h))\n self.encoder = nn.Sequential(nn.Linear(2 * self.ligand_dim, self.\n ligand_dim), ArcTan())\n self.action_layer = nn.Sequential(nn.Linear(self.ligand_dim, self.\n dim_h), nn.LeakyReLU(), nn.Linear(self.dim_h, self.dim_act))\n\n def get_distance(self, node_0, node_1):\n return torch.sum(torch.sqrt(torch.abs(node_0 - node_1) ** 2))\n\n def build_graph(self, x):\n self.graph = torch.zeros(x.shape[0], x.shape[0])\n for ii in range(x.shape[0]):\n node_ii = x[ii, 0:3]\n for jj in range(x.shape[0]):\n node_jj = x[jj, 0:3]\n distance = self.get_distance(node_ii, node_jj)\n if distance <= self.bond_cutoff:\n self.graph[ii, jj] = 1.0\n self.graph = self.graph * (1 - torch.eye(self.graph.shape[0]))\n\n def forward(self, x, return_codes=False, template=None):\n if type(x) != torch.Tensor:\n x = torch.Tensor(x)\n if template is not None:\n self.build_graph(template.detach())\n else:\n self.build_graph(x.detach())\n new_graph = torch.Tensor()\n codes = torch.Tensor()\n temp_input = [torch.Tensor()]\n for kk in range(x.shape[0]):\n for ll in range(x.shape[0]):\n if self.graph[kk, ll]:\n temp_input[-1] = torch.cat([temp_input[-1], self.\n edge_model(x[ll]).unsqueeze(0)])\n keys = temp_input[-1][:, -self.dim_h * 2:-self.dim_h]\n queries = temp_input[-1][:, -self.dim_h:]\n attention = torch.zeros(1, keys.shape[0])\n for mm in range(keys.shape[0]):\n attention[:, mm] = torch.matmul(queries[mm], keys[mm].T)\n attention = torch.softmax(attention, dim=1)\n my_input = torch.sum(attention.T * temp_input[-1][:, :self.\n ligand_dim], dim=0)\n my_input = torch.cat([x[kk], my_input])\n codes = torch.cat([codes, self.encoder(my_input).unsqueeze(0)])\n new_graph = torch.cat([new_graph, codes[-1].unsqueeze(0)])\n if return_codes:\n return codes, new_graph\n else:\n return new_graph\n\n def get_actions(self, x):\n if type(x) != torch.Tensor:\n x = torch.Tensor(x)\n my_template = x\n for ii in range(self.number_updates):\n x = self.forward(x, template=my_template)\n x = torch.mean(x, dim=0)\n x = self.action_layer(x)\n return x\n\n def get_params(self):\n params = np.array([])\n for param in self.edge_model.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n for param in self.encoder.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n for param in self.action_layer.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n return params\n\n def set_params(self, my_params):\n if my_params is None:\n my_params = self.init_mean + torch.randn(self.num_params\n ) * torch.sqrt(torch.tensor(self.var))\n param_start = 0\n for name, param in self.edge_model.named_parameters():\n param_stop = param_start + reduce(lambda x, y: x * y, param.shape)\n param[:] = torch.nn.Parameter(torch.Tensor(my_params[\n param_start:param_stop].reshape(param.shape)))\n for name, param in self.encoder.named_parameters():\n param_stop = param_start + reduce(lambda x, y: x * y, param.shape)\n param[:] = torch.nn.Parameter(torch.Tensor(my_params[\n param_start:param_stop].reshape(param.shape)))\n for name, param in self.action_layer.named_parameters():\n param_stop = param_start + reduce(lambda x, y: x * y, param.shape)\n param[:] = torch.nn.Parameter(torch.Tensor(my_params[\n param_start:param_stop].reshape(param.shape)))\n\n def reset(self):\n pass\n\n\nclass MLP(nn.Module):\n\n def __init__(self, dim_in=6, dim_act=5, dim_h=32, dropout=0.0):\n super(MLP, self).__init__()\n self.dim_in = dim_in\n self.dim_act = dim_act\n self.dim_h = 32\n self.dropout = dropout\n self.model_name = 'DockRLMLP'\n self.init_params()\n\n def init_params(self):\n self.model = nn.Sequential(nn.Linear(self.dim_in, self.dim_h), nn.\n ReLU(), nn.Linear(self.dim_h, self.dim_h), nn.ReLU(), nn.\n Dropout(p=self.dropout), nn.Linear(self.dim_h, self.dim_act))\n self.num_params = self.get_params().shape[0]\n\n def forward(self, x):\n x = torch.Tensor(x)\n if len(x.shape) == 1:\n x = x.unsqueeze(0)\n x = self.model(x)\n return x\n\n def get_actions(self, x):\n act = self.forward(x)\n act = torch.mean(act, dim=0, keepdim=True)\n return act\n\n def get_params(self):\n params = np.array([])\n for param in self.model.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n return params\n\n def set_params(self, my_params):\n if my_params is None:\n my_params = self.init_mean + torch.randn(self.num_params\n ) * torch.sqrt(torch.tensor(self.var))\n param_start = 0\n for name, param in self.model.named_parameters():\n param_stop = param_start + reduce(lambda x, y: x * y, param.shape)\n param[:] = torch.nn.Parameter(torch.Tensor(my_params[\n param_start:param_stop].reshape(param.shape)))\n\n def reset(self):\n pass\n\n\nclass MRNN(nn.Module):\n\n def __init__(self, dim_in=6, dim_act=5):\n super(MRNN, self).__init__()\n self.dim_in = dim_in\n self.dim_act = dim_act\n self.dim_h = 8\n self.init_params()\n\n def init_params(self):\n self.g = nn.Sequential(OrderedDict([('g', nn.Linear(self.dim_h +\n self.dim_in, self.dim_h)), ('act_g', nn.Sigmoid())]))\n self.j = nn.Sequential(OrderedDict([('j', nn.Linear(self.dim_h +\n self.dim_in, self.dim_h)), ('act_j', nn.Tanh())]))\n self.w_h2y = nn.Sequential(OrderedDict([('w_h2y', nn.Linear(self.\n dim_h, self.dim_act))]))\n self.cell_state = torch.zeros((1, self.dim_h))\n self.num_params = self.get_params().shape[0]\n\n def forward(self, x):\n x = torch.Tensor(x)\n if len(x.shape) == 1:\n x = x.unsqueeze(0)\n x = torch.cat((self.cell_state, x), axis=-1)\n g_out = self.g(x)\n j_out = (1.0 - g_out) * self.j(x)\n self.cell_state = g_out * self.cell_state + j_out\n y = self.w_h2y(self.cell_state)\n return y\n\n def get_action(self, x):\n act = self.forward(x)\n return act.detach().cpu().numpy()\n\n def get_params(self):\n params = np.array([])\n for param in self.g.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n for param in self.j.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n for param in self.w_h2y.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n return params\n\n def set_params(self, my_params):\n if my_params is None:\n my_params = self.init_mean + torch.randn(self.num_params\n ) * torch.sqrt(torch.tensor(self.var))\n param_start = 0\n for name, param in self.g.named_parameters():\n param_stop = param_start + reduce(lambda x, y: x * y, param.shape)\n param[:] = torch.nn.Parameter(torch.Tensor(my_params[\n param_start:param_stop].reshape(param.shape)))\n for name, param in self.j.named_parameters():\n param_stop = param_start + reduce(lambda x, y: x * y, param.shape)\n param[:] = torch.nn.Parameter(torch.Tensor(my_params[\n param_start:param_stop].reshape(param.shape)))\n for name, param in self.w_h2y.named_parameters():\n param_stop = param_start + reduce(lambda x, y: x * y, param.shape)\n param[:] = torch.nn.Parameter(torch.Tensor(my_params[\n param_start:param_stop].reshape(param.shape)))\n\n def reset(self):\n self.cell_state *= 0.0\n\n\n<mask token>\n", "step-5": "import torch \nimport torch.nn as nn\nimport torch.nn.functional as F\n\nimport numpy as np\n\nfrom collections import OrderedDict\nfrom functools import reduce\n\nclass ArcTan(nn.Module):\n\n def __init__(self):\n super(ArcTan,self).__init__()\n\n def forward(self, x):\n\n return torch.arctan(x) / 1.5708\n\nclass Params():\n\n def __init__(self, dim_in=7, dim_act=6, dim_h=0, dropout=0.0):\n \n self.dim_act = dim_act\n self.dim_in = 0\n self.dim_h = 0\n self.dropout = 0.0\n self.model_name = \"DockRLParams\"\n\n self.init_params()\n self.act = ArcTan()\n\n def init_params(self):\n\n self.params = np.random.randn(self.dim_act)\n self.num_params = self.dim_act\n\n def forward(self, obs):\n return self.get_params()\n\n def get_params(self):\n return self.params\n\n def set_params(self, params):\n assert params.shape == self.params.shape\n\n self.params = params \n\n def reset(self):\n pass\n\n\n\nclass GraphNN(nn.Module):\n\n def __init__(self, dim_in=7, dim_act=6, dim_h=8, dropout=0.00):\n super(GraphNN, self).__init__()\n \n self.ligand_dim = dim_in\n self.dim_h = dim_h\n self.dim_act = dim_act\n self.model_name = \"DockRLGraphNN\"\n # This is a guesstimate based on: \n # https://pymolwiki.org/index.php/Displaying_Biochemical_Properties\n self.bond_cutoff = 3.6\n self.number_updates = 16\n self.dropout = dropout\n\n self.initialize_gnn()\n self.reset()\n\n my_params = self.get_params()\n self.num_params = my_params.shape[0]\n\n def initialize_gnn(self):\n\n # vertices MLP, with 8 element key and query vectors for self-attention\n self.edge_model = nn.Sequential(\\\n nn.Linear(self.ligand_dim, self.dim_h),\\\n nn.LeakyReLU(),\\\n nn.Linear(self.dim_h, self.dim_h),\\\n nn.LeakyReLU(),\\\n nn.Dropout(p=self.dropout),\\\n nn.Linear(self.dim_h, self.ligand_dim + 2 * self.dim_h)\n )\n\n self.encoder = nn.Sequential(\\\n nn.Linear(2*self.ligand_dim, self.ligand_dim),\\\n ArcTan()\n )\n\n self.action_layer = nn.Sequential(\\\n nn.Linear(self.ligand_dim, self.dim_h),\\\n nn.LeakyReLU(),\\\n nn.Linear(self.dim_h, self.dim_act)\\\n )\n \n def get_distance(self, node_0, node_1):\n\n return torch.sum(torch.sqrt(torch.abs(node_0 - node_1)**2))\n\n def build_graph(self, x):\n\n self.graph = torch.zeros(x.shape[0],x.shape[0])\n\n for ii in range(x.shape[0]):\n node_ii = x[ii, 0:3]\n for jj in range(x.shape[0]):\n node_jj = x[jj, 0:3]\n\n distance = self.get_distance(node_ii, node_jj)\n if distance <= self.bond_cutoff:\n self.graph[ii, jj] = 1.0\n \n self.graph = self.graph * (1 - torch.eye(self.graph.shape[0]))\n\n def forward(self, x, return_codes=False, template=None):\n\n if type(x) != torch.Tensor:\n x = torch.Tensor(x)\n\n if template is not None:\n self.build_graph(template.detach())\n else:\n self.build_graph(x.detach())\n \n new_graph = torch.Tensor() #torch.zeros_like(x)\n codes = torch.Tensor() #torch.zeros(x.shape[0], self.dim_h)\n temp_input = [torch.Tensor()] \n #orch.Tensor() #torch.zeros(x.shape[0], self.dim_h+8+8)\n\n\n for kk in range(x.shape[0]):\n # loop through nodes for each node\n for ll in range(x.shape[0]):\n if self.graph[kk,ll]:\n temp_input[-1] = torch.cat([temp_input[-1],\\\n self.edge_model(x[ll]).unsqueeze(0)])\n\n keys = temp_input[-1][:,-self.dim_h*2:-self.dim_h]\n queries = temp_input[-1][:,-self.dim_h:]\n\n attention = torch.zeros(1, keys.shape[0])\n\n for mm in range(keys.shape[0]):\n attention[:, mm] = torch.matmul(queries[mm], keys[mm].T)\n\n attention = torch.softmax(attention, dim=1)\n\n my_input = torch.sum(attention.T \\\n * temp_input[-1][:,:self.ligand_dim],dim=0)\n my_input = torch.cat([x[kk], my_input])\n\n #this is where the cell gating would happen (TODO)\n codes = torch.cat([codes, self.encoder(my_input).unsqueeze(0)])\n\n new_graph = torch.cat([new_graph, codes[-1].unsqueeze(0)])\n #self.decoder(codes[-1]).unsqueeze(0)])\n\n\n if return_codes:\n return codes, new_graph\n else:\n return new_graph\n\n\n def get_actions(self, x):\n\n if type(x) != torch.Tensor:\n x = torch.Tensor(x)\n\n my_template = x\n\n for ii in range(self.number_updates):\n x = self.forward(x, template=my_template)\n\n x = torch.mean(x, dim=0)\n\n x = self.action_layer(x)\n\n return x\n\n def get_params(self):\n params = np.array([])\n\n for param in self.edge_model.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n\n for param in self.encoder.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n\n# for param in self.decoder.named_parameters():\n# params = np.append(params, param[1].detach().numpy().ravel())\n\n for param in self.action_layer.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n\n return params\n\n def set_params(self, my_params):\n\n if my_params is None:\n my_params = self.init_mean + torch.randn(self.num_params) * torch.sqrt(torch.tensor(self.var))\n\n param_start = 0\n for name, param in self.edge_model.named_parameters():\n\n param_stop = param_start + reduce(lambda x,y: x*y, param.shape)\n\n param[:] = torch.nn.Parameter(torch.Tensor(\\\n my_params[param_start:param_stop].reshape(param.shape)))\n\n for name, param in self.encoder.named_parameters():\n\n param_stop = param_start + reduce(lambda x,y: x*y, param.shape)\n\n param[:] = torch.nn.Parameter(torch.Tensor(\\\n my_params[param_start:param_stop].reshape(param.shape)))\n\n# for name, param in self.decoder.named_parameters():\n#\n# param_stop = param_start + reduce(lambda x,y: x*y, param.shape)\n#\n# param[:] = torch.nn.Parameter(torch.Tensor(\\\n# my_params[param_start:param_stop].reshape(param.shape)))\n\n for name, param in self.action_layer.named_parameters():\n\n param_stop = param_start + reduce(lambda x,y: x*y, param.shape)\n\n param[:] = torch.nn.Parameter(torch.Tensor(\\\n my_params[param_start:param_stop].reshape(param.shape)))\n\n def reset(self):\n # initialize using gated cell states here later (maybe)\n pass\n\nclass MLP(nn.Module):\n def __init__(self, dim_in=6, dim_act=5, dim_h=32, dropout=0.0):\n super(MLP, self).__init__()\n\n self.dim_in = dim_in\n self.dim_act = dim_act\n self.dim_h = 32\n self.dropout = dropout\n self.model_name = \"DockRLMLP\"\n\n self.init_params()\n\n def init_params(self):\n\n self.model = nn.Sequential(\\\n nn.Linear(self.dim_in, self.dim_h),\\\n nn.ReLU(),\\\n nn.Linear(self.dim_h, self.dim_h),\\\n nn.ReLU(),\\\n nn.Dropout(p=self.dropout),\\\n nn.Linear(self.dim_h, self.dim_act)\\\n )\n\n self.num_params = self.get_params().shape[0]\n\n def forward(self, x):\n\n x = torch.Tensor(x)\n\n if len(x.shape) == 1:\n x = x.unsqueeze(0)\n\n\n x = self.model(x)\n\n\n return x\n\n def get_actions(self, x):\n\n act = self.forward(x)\n act = torch.mean(act, dim=0, keepdim=True)\n return act\n\n def get_params(self):\n params = np.array([])\n\n for param in self.model.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n\n return params\n\n def set_params(self, my_params):\n\n if my_params is None:\n my_params = self.init_mean + torch.randn(self.num_params) * torch.sqrt(torch.tensor(self.var))\n\n param_start = 0\n for name, param in self.model.named_parameters():\n\n param_stop = param_start + reduce(lambda x,y: x*y, param.shape)\n\n param[:] = torch.nn.Parameter(torch.Tensor(\\\n my_params[param_start:param_stop].reshape(param.shape)))\n\n def reset(self):\n pass\n\nclass MRNN(nn.Module):\n def __init__(self, dim_in=6, dim_act=5):\n super(MRNN, self).__init__()\n\n self.dim_in = dim_in\n self.dim_act = dim_act\n self.dim_h = 8\n\n self.init_params()\n\n\n def init_params(self):\n\n self.g = nn.Sequential(OrderedDict([\\\n (\"g\", nn.Linear(self.dim_h+self.dim_in, self.dim_h)),\\\n (\"act_g\", nn.Sigmoid())]))\n\n self.j = nn.Sequential(OrderedDict([\\\n (\"j\", nn.Linear(self.dim_h+self.dim_in, self.dim_h)),\\\n (\"act_j\", nn.Tanh())]))\n\n self.w_h2y = nn.Sequential(OrderedDict([\\\n (\"w_h2y\", nn.Linear(self.dim_h, self.dim_act))]))\n\n self.cell_state = torch.zeros((1,self.dim_h))\n\n self.num_params = self.get_params().shape[0]\n \n def forward(self, x):\n \n x = torch.Tensor(x)\n\n if len(x.shape) == 1:\n x = x.unsqueeze(0)\n\n x = torch.cat((self.cell_state, x), axis=-1)\n\n g_out = self.g(x) \n\n j_out = (1.0 - g_out) * self.j(x)\n\n self.cell_state = g_out * self.cell_state + j_out\n\n y = self.w_h2y(self.cell_state) \n\n return y\n \n def get_action(self, x):\n\n act = self.forward(x)\n return act.detach().cpu().numpy()\n\n def get_params(self):\n params = np.array([])\n\n for param in self.g.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n\n for param in self.j.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n\n for param in self.w_h2y.named_parameters():\n params = np.append(params, param[1].detach().numpy().ravel())\n\n return params\n\n def set_params(self, my_params):\n\n if my_params is None:\n my_params = self.init_mean + torch.randn(self.num_params) * torch.sqrt(torch.tensor(self.var))\n\n param_start = 0\n for name, param in self.g.named_parameters():\n\n param_stop = param_start + reduce(lambda x,y: x*y, param.shape)\n\n param[:] = torch.nn.Parameter(torch.Tensor(\\\n my_params[param_start:param_stop].reshape(param.shape)))\n\n for name, param in self.j.named_parameters():\n\n param_stop = param_start + reduce(lambda x,y: x*y, param.shape)\n\n param[:] = torch.nn.Parameter(torch.Tensor(\\\n my_params[param_start:param_stop].reshape(param.shape)))\n\n for name, param in self.w_h2y.named_parameters():\n\n param_stop = param_start + reduce(lambda x,y: x*y, param.shape)\n\n param[:] = torch.nn.Parameter(torch.Tensor(\\\n my_params[param_start:param_stop].reshape(param.shape)))\n\n def reset(self):\n self.cell_state *= 0. \n\n\nif __name__ == \"__main__\":\n\n mrnn = MRNN()\n\n temp = mrnn.forward(np.random.randn(1,6))\n print(temp)\n", "step-ids": [ 26, 31, 34, 35, 39 ] }
[ 26, 31, 34, 35, 39 ]
from django.urls import path from .views import * from .utils import * app_name = 'gymapp' urlpatterns = [ # CLIENT PATHS ## # CLIENT PATHS ## # CLIENT PATHS ## # CLIENT PATHS ## # general pages path('', ClientHomeView.as_view(), name='clienthome'), path('about/', ClientAboutView.as_view(), name='clientabout'), path('contact/', ClientContactCreateView.as_view(), name='clientcontact'), # path('makeanappointment/', ClientAppointmentCreateView.as_view(), # name='clientappointmentcreate'), path('products/', ClientProductListView.as_view(), name='clientproductlist'), path('product/<int:pk>/detail/',ClientProductDetailView.as_view(), name='clientproductdetail'), path('trainers/', ClientTrainerListView.as_view(), name='clienttrainerlist'), path('trainer/<slug:slug>/detail/', ClientTrainerDetailView.as_view(), name='clienttrainerdetail'), path('services/', ClientServiceListView.as_view(), name='clientservicelist'), path('services/<slug:slug>/detail/', ClientServiceDetailView.as_view(), name='clientservicedetail'), path('schedule/<slug:slug>/detail/', ClientScheduleDetailView.as_view(), name='clientscheduledetail'), path('testimonial/', TestimonialListView.as_view(), name='testimoniallist'), # path('slider/', # SliderListView.as_view(), name='sliderlist'), path('facilities/', ClientFacilityListView.as_view(), name='clientfacilitylist'), path('facilities/<slug:slug>/details', ClientFacilityDetailView.as_view(), name='clientfacilitydetail'), path('events/', ClientEventListView.as_view(), name='clienteventlist'), path('events/<slug:slug>/details', ClientEventDetailView.as_view(), name='clienteventdetail'), path('notices/', ClientNoticeListView.as_view(), name='clientnoticelist'), path('notices/<slug:slug>/details', ClientNoticeDetailView.as_view(), name='clientnoticedetail'), path('pages/<slug:slug>/details', ClientPageDetailView.as_view(), name='clientpagedetail'), path('images/', ClientImageListView.as_view(), name='clientimagelist'), path('videos/', ClientVideoListView.as_view(), name='clientvideolist'), path('blogs/', ClientBlogListView.as_view(), name='clientbloglist'), path('blogs/<slug:slug>/details', ClientBlogDetailView.as_view(), name='clientblogdetail'), path('schedules/', ClientScheduleListView.as_view(), name='clientschedulelist'), path('404/', ClientPageNotFoundView.as_view(), name='clientpagenotfound'), path('subscribe/', ClientSubscriberCreateView.as_view(), name='clientsubscribercreate'), path('search/result/', SearchResultView.as_view(), name="searchresult"), path('login/', ClientLoginView.as_view(), name='clientlogin'), path('logout/', ClientLogoutView.as_view(), name='clientlogout'), path('register/', ClientRegistrationView.as_view(), name='clientcreate'), path('cart_update',cart_update,name = 'cart_update'), path('carts/<int:pk>/items/total/',ClientCartTotalView.as_view(), name='clientcarttotal'), ]
normal
{ "blob_id": "48a4331e4b26ea81f1c52ae76db1e92a57cb378c", "index": 2654, "step-1": "<mask token>\n", "step-2": "<mask token>\napp_name = 'gymapp'\nurlpatterns = [path('', ClientHomeView.as_view(), name='clienthome'), path(\n 'about/', ClientAboutView.as_view(), name='clientabout'), path(\n 'contact/', ClientContactCreateView.as_view(), name='clientcontact'),\n path('products/', ClientProductListView.as_view(), name=\n 'clientproductlist'), path('product/<int:pk>/detail/',\n ClientProductDetailView.as_view(), name='clientproductdetail'), path(\n 'trainers/', ClientTrainerListView.as_view(), name='clienttrainerlist'),\n path('trainer/<slug:slug>/detail/', ClientTrainerDetailView.as_view(),\n name='clienttrainerdetail'), path('services/', ClientServiceListView.\n as_view(), name='clientservicelist'), path(\n 'services/<slug:slug>/detail/', ClientServiceDetailView.as_view(), name\n ='clientservicedetail'), path('schedule/<slug:slug>/detail/',\n ClientScheduleDetailView.as_view(), name='clientscheduledetail'), path(\n 'testimonial/', TestimonialListView.as_view(), name='testimoniallist'),\n path('facilities/', ClientFacilityListView.as_view(), name=\n 'clientfacilitylist'), path('facilities/<slug:slug>/details',\n ClientFacilityDetailView.as_view(), name='clientfacilitydetail'), path(\n 'events/', ClientEventListView.as_view(), name='clienteventlist'), path\n ('events/<slug:slug>/details', ClientEventDetailView.as_view(), name=\n 'clienteventdetail'), path('notices/', ClientNoticeListView.as_view(),\n name='clientnoticelist'), path('notices/<slug:slug>/details',\n ClientNoticeDetailView.as_view(), name='clientnoticedetail'), path(\n 'pages/<slug:slug>/details', ClientPageDetailView.as_view(), name=\n 'clientpagedetail'), path('images/', ClientImageListView.as_view(),\n name='clientimagelist'), path('videos/', ClientVideoListView.as_view(),\n name='clientvideolist'), path('blogs/', ClientBlogListView.as_view(),\n name='clientbloglist'), path('blogs/<slug:slug>/details',\n ClientBlogDetailView.as_view(), name='clientblogdetail'), path(\n 'schedules/', ClientScheduleListView.as_view(), name=\n 'clientschedulelist'), path('404/', ClientPageNotFoundView.as_view(),\n name='clientpagenotfound'), path('subscribe/',\n ClientSubscriberCreateView.as_view(), name='clientsubscribercreate'),\n path('search/result/', SearchResultView.as_view(), name='searchresult'),\n path('login/', ClientLoginView.as_view(), name='clientlogin'), path(\n 'logout/', ClientLogoutView.as_view(), name='clientlogout'), path(\n 'register/', ClientRegistrationView.as_view(), name='clientcreate'),\n path('cart_update', cart_update, name='cart_update'), path(\n 'carts/<int:pk>/items/total/', ClientCartTotalView.as_view(), name=\n 'clientcarttotal')]\n", "step-3": "from django.urls import path\nfrom .views import *\nfrom .utils import *\napp_name = 'gymapp'\nurlpatterns = [path('', ClientHomeView.as_view(), name='clienthome'), path(\n 'about/', ClientAboutView.as_view(), name='clientabout'), path(\n 'contact/', ClientContactCreateView.as_view(), name='clientcontact'),\n path('products/', ClientProductListView.as_view(), name=\n 'clientproductlist'), path('product/<int:pk>/detail/',\n ClientProductDetailView.as_view(), name='clientproductdetail'), path(\n 'trainers/', ClientTrainerListView.as_view(), name='clienttrainerlist'),\n path('trainer/<slug:slug>/detail/', ClientTrainerDetailView.as_view(),\n name='clienttrainerdetail'), path('services/', ClientServiceListView.\n as_view(), name='clientservicelist'), path(\n 'services/<slug:slug>/detail/', ClientServiceDetailView.as_view(), name\n ='clientservicedetail'), path('schedule/<slug:slug>/detail/',\n ClientScheduleDetailView.as_view(), name='clientscheduledetail'), path(\n 'testimonial/', TestimonialListView.as_view(), name='testimoniallist'),\n path('facilities/', ClientFacilityListView.as_view(), name=\n 'clientfacilitylist'), path('facilities/<slug:slug>/details',\n ClientFacilityDetailView.as_view(), name='clientfacilitydetail'), path(\n 'events/', ClientEventListView.as_view(), name='clienteventlist'), path\n ('events/<slug:slug>/details', ClientEventDetailView.as_view(), name=\n 'clienteventdetail'), path('notices/', ClientNoticeListView.as_view(),\n name='clientnoticelist'), path('notices/<slug:slug>/details',\n ClientNoticeDetailView.as_view(), name='clientnoticedetail'), path(\n 'pages/<slug:slug>/details', ClientPageDetailView.as_view(), name=\n 'clientpagedetail'), path('images/', ClientImageListView.as_view(),\n name='clientimagelist'), path('videos/', ClientVideoListView.as_view(),\n name='clientvideolist'), path('blogs/', ClientBlogListView.as_view(),\n name='clientbloglist'), path('blogs/<slug:slug>/details',\n ClientBlogDetailView.as_view(), name='clientblogdetail'), path(\n 'schedules/', ClientScheduleListView.as_view(), name=\n 'clientschedulelist'), path('404/', ClientPageNotFoundView.as_view(),\n name='clientpagenotfound'), path('subscribe/',\n ClientSubscriberCreateView.as_view(), name='clientsubscribercreate'),\n path('search/result/', SearchResultView.as_view(), name='searchresult'),\n path('login/', ClientLoginView.as_view(), name='clientlogin'), path(\n 'logout/', ClientLogoutView.as_view(), name='clientlogout'), path(\n 'register/', ClientRegistrationView.as_view(), name='clientcreate'),\n path('cart_update', cart_update, name='cart_update'), path(\n 'carts/<int:pk>/items/total/', ClientCartTotalView.as_view(), name=\n 'clientcarttotal')]\n", "step-4": "from django.urls import path\nfrom .views import *\nfrom .utils import *\n\n\napp_name = 'gymapp'\n\nurlpatterns = [\n\n\n # CLIENT PATHS ##\n # CLIENT PATHS ##\n # CLIENT PATHS ##\n # CLIENT PATHS ##\n\n # general pages\n\n path('', ClientHomeView.as_view(), name='clienthome'),\n path('about/', ClientAboutView.as_view(), name='clientabout'),\n path('contact/', ClientContactCreateView.as_view(), name='clientcontact'),\n # path('makeanappointment/', ClientAppointmentCreateView.as_view(),\n # name='clientappointmentcreate'),\n path('products/', ClientProductListView.as_view(), name='clientproductlist'),\n path('product/<int:pk>/detail/',ClientProductDetailView.as_view(), \n name='clientproductdetail'),\n path('trainers/', ClientTrainerListView.as_view(), name='clienttrainerlist'),\n path('trainer/<slug:slug>/detail/', ClientTrainerDetailView.as_view(),\n name='clienttrainerdetail'),\n path('services/', ClientServiceListView.as_view(),\n name='clientservicelist'),\n path('services/<slug:slug>/detail/',\n ClientServiceDetailView.as_view(), name='clientservicedetail'),\n path('schedule/<slug:slug>/detail/',\n ClientScheduleDetailView.as_view(), name='clientscheduledetail'),\n path('testimonial/',\n TestimonialListView.as_view(), name='testimoniallist'),\n # path('slider/',\n # SliderListView.as_view(), name='sliderlist'),\n path('facilities/', ClientFacilityListView.as_view(),\n name='clientfacilitylist'),\n path('facilities/<slug:slug>/details',\n ClientFacilityDetailView.as_view(), name='clientfacilitydetail'),\n path('events/', ClientEventListView.as_view(),\n name='clienteventlist'),\n path('events/<slug:slug>/details',\n ClientEventDetailView.as_view(), name='clienteventdetail'),\n path('notices/', ClientNoticeListView.as_view(), name='clientnoticelist'),\n path('notices/<slug:slug>/details',\n ClientNoticeDetailView.as_view(), name='clientnoticedetail'),\n path('pages/<slug:slug>/details',\n ClientPageDetailView.as_view(), name='clientpagedetail'),\n path('images/', ClientImageListView.as_view(), name='clientimagelist'),\n path('videos/', ClientVideoListView.as_view(), name='clientvideolist'),\n path('blogs/', ClientBlogListView.as_view(), name='clientbloglist'),\n path('blogs/<slug:slug>/details',\n ClientBlogDetailView.as_view(), name='clientblogdetail'),\n path('schedules/', ClientScheduleListView.as_view(), name='clientschedulelist'),\n path('404/', ClientPageNotFoundView.as_view(), name='clientpagenotfound'),\n path('subscribe/', ClientSubscriberCreateView.as_view(),\n name='clientsubscribercreate'),\n path('search/result/', SearchResultView.as_view(), name=\"searchresult\"),\n path('login/', ClientLoginView.as_view(), name='clientlogin'),\n path('logout/', ClientLogoutView.as_view(), name='clientlogout'),\n path('register/', ClientRegistrationView.as_view(), name='clientcreate'),\n path('cart_update',cart_update,name = 'cart_update'),\n path('carts/<int:pk>/items/total/',ClientCartTotalView.as_view(), name='clientcarttotal'),\n]\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
# -*- coding: utf-8 -*- import sys, io,re import regex from collections import defaultdict import datetime import json def update_key(data_base, url,kkey): keys_saved = regex.get_data('<key>\s(.+?)\s<',data_base[url]['key']) if kkey not in keys_saved: data_base[url]['key'] = data_base[url]['key'][:-1] data_base[url]['key'] += ' <key> ' + kkey + ' <\key>\n' return True return False def check_date(data_base,key_word): date = 0 for url in data_base: for key in data_base[url]: if key_word == key: try: d = int(re.sub(r'-', '', data_base[url][key])) if date < d: date = d except ValueError: continue if date != 0: date = str(date) year = int(date[0:4]) if date[4] != '0': month = int(date[4:6]) elif date[4] == '0': month = int(date[5]) if date[6] != '0': day = int(date[6:8]) elif date[6] == '0': day = int(date[7]) date = (datetime.date(year, month, day) - datetime.timedelta(1)).isoformat() return int(re.sub(r'-', '', date)) else: return 0 def load_keywords_info(): try: with open('keywords.json', 'r') as fp: data = json.load(fp) return data except json.decoder.JSONDecodeError: return defaultdict(str) def save_keywords_info(data): with open('keywords.json', 'w') as fp: json.dump(data, fp) def load_url_info(): try: with open('urls.json', 'r') as fp: data = json.load(fp) return data except json.decoder.JSONDecodeError: return defaultdict(list) def save_url_info(data): with open('urls.json', 'w') as fp: json.dump(data, fp) def load_previous(data_base): previous = [] try: file = open("news.bank","r",encoding='utf8'); for line in file: previous.append(line) i = 0 while i < len(previous): url = regex.get_data('>\s(.+?)\s<',previous[i+4])[0] key = regex.get_data('>\s(.+?)\s<',previous[i+1])[0] #date = regex.get_data('>\s(.+?)\s<',previous[i+5])[0] data_base[key].append(url) #data_base[url][key] = date #data_base[url] = defaultdict(str) #data_base[id]['id'] = previous[i] #data_base[key]['key'] = previous[i] #data_base[url]['title'] = previous[i+1] #data_base[url]['source'] = previous[i+2] #data_base[url]['url'] = previous[i+3] #data_base[url]['date'] = previous[i+4] #data_base[url]['author'] = previous[i+5] #data_base[url]['content1'] = previous[i+6] #data_base[url]['content2'] = previous[i+7] i += 10 except FileNotFoundError: pass def check_last_update(url,date): count = 0 for u in url: d = regex.get_data('\S+\/(\d+\/\d+\/\d+)\S+',u)[0] d = int(re.sub(r'/', '', d)) if d < date: return count count += 1 return -1 def MinEditDist(s1, s2): if len(s1) > len(s2): s1, s2 = s2, s1 distances = range(len(s1) + 1) for i2, c2 in enumerate(s2): distances_ = [i2+1] for i1, c1 in enumerate(s1): if c1 == c2: distances_.append(distances[i1]) else: distances_.append(1 + min((distances[i1], distances[i1 + 1], distances_[-1]))) distances = distances_ return distances[-1]
normal
{ "blob_id": "50a5d3431693b402c15b557357eaf9a85fc02b0b", "index": 2921, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef update_key(data_base, url, kkey):\n keys_saved = regex.get_data('<key>\\\\s(.+?)\\\\s<', data_base[url]['key'])\n if kkey not in keys_saved:\n data_base[url]['key'] = data_base[url]['key'][:-1]\n data_base[url]['key'] += ' <key> ' + kkey + ' <\\\\key>\\n'\n return True\n return False\n\n\ndef check_date(data_base, key_word):\n date = 0\n for url in data_base:\n for key in data_base[url]:\n if key_word == key:\n try:\n d = int(re.sub('-', '', data_base[url][key]))\n if date < d:\n date = d\n except ValueError:\n continue\n if date != 0:\n date = str(date)\n year = int(date[0:4])\n if date[4] != '0':\n month = int(date[4:6])\n elif date[4] == '0':\n month = int(date[5])\n if date[6] != '0':\n day = int(date[6:8])\n elif date[6] == '0':\n day = int(date[7])\n date = (datetime.date(year, month, day) - datetime.timedelta(1)\n ).isoformat()\n return int(re.sub('-', '', date))\n else:\n return 0\n\n\ndef load_keywords_info():\n try:\n with open('keywords.json', 'r') as fp:\n data = json.load(fp)\n return data\n except json.decoder.JSONDecodeError:\n return defaultdict(str)\n\n\n<mask token>\n\n\ndef load_url_info():\n try:\n with open('urls.json', 'r') as fp:\n data = json.load(fp)\n return data\n except json.decoder.JSONDecodeError:\n return defaultdict(list)\n\n\ndef save_url_info(data):\n with open('urls.json', 'w') as fp:\n json.dump(data, fp)\n\n\ndef load_previous(data_base):\n previous = []\n try:\n file = open('news.bank', 'r', encoding='utf8')\n for line in file:\n previous.append(line)\n i = 0\n while i < len(previous):\n url = regex.get_data('>\\\\s(.+?)\\\\s<', previous[i + 4])[0]\n key = regex.get_data('>\\\\s(.+?)\\\\s<', previous[i + 1])[0]\n data_base[key].append(url)\n i += 10\n except FileNotFoundError:\n pass\n\n\n<mask token>\n\n\ndef MinEditDist(s1, s2):\n if len(s1) > len(s2):\n s1, s2 = s2, s1\n distances = range(len(s1) + 1)\n for i2, c2 in enumerate(s2):\n distances_ = [i2 + 1]\n for i1, c1 in enumerate(s1):\n if c1 == c2:\n distances_.append(distances[i1])\n else:\n distances_.append(1 + min((distances[i1], distances[i1 + 1],\n distances_[-1])))\n distances = distances_\n return distances[-1]\n", "step-3": "<mask token>\n\n\ndef update_key(data_base, url, kkey):\n keys_saved = regex.get_data('<key>\\\\s(.+?)\\\\s<', data_base[url]['key'])\n if kkey not in keys_saved:\n data_base[url]['key'] = data_base[url]['key'][:-1]\n data_base[url]['key'] += ' <key> ' + kkey + ' <\\\\key>\\n'\n return True\n return False\n\n\ndef check_date(data_base, key_word):\n date = 0\n for url in data_base:\n for key in data_base[url]:\n if key_word == key:\n try:\n d = int(re.sub('-', '', data_base[url][key]))\n if date < d:\n date = d\n except ValueError:\n continue\n if date != 0:\n date = str(date)\n year = int(date[0:4])\n if date[4] != '0':\n month = int(date[4:6])\n elif date[4] == '0':\n month = int(date[5])\n if date[6] != '0':\n day = int(date[6:8])\n elif date[6] == '0':\n day = int(date[7])\n date = (datetime.date(year, month, day) - datetime.timedelta(1)\n ).isoformat()\n return int(re.sub('-', '', date))\n else:\n return 0\n\n\ndef load_keywords_info():\n try:\n with open('keywords.json', 'r') as fp:\n data = json.load(fp)\n return data\n except json.decoder.JSONDecodeError:\n return defaultdict(str)\n\n\ndef save_keywords_info(data):\n with open('keywords.json', 'w') as fp:\n json.dump(data, fp)\n\n\ndef load_url_info():\n try:\n with open('urls.json', 'r') as fp:\n data = json.load(fp)\n return data\n except json.decoder.JSONDecodeError:\n return defaultdict(list)\n\n\ndef save_url_info(data):\n with open('urls.json', 'w') as fp:\n json.dump(data, fp)\n\n\ndef load_previous(data_base):\n previous = []\n try:\n file = open('news.bank', 'r', encoding='utf8')\n for line in file:\n previous.append(line)\n i = 0\n while i < len(previous):\n url = regex.get_data('>\\\\s(.+?)\\\\s<', previous[i + 4])[0]\n key = regex.get_data('>\\\\s(.+?)\\\\s<', previous[i + 1])[0]\n data_base[key].append(url)\n i += 10\n except FileNotFoundError:\n pass\n\n\n<mask token>\n\n\ndef MinEditDist(s1, s2):\n if len(s1) > len(s2):\n s1, s2 = s2, s1\n distances = range(len(s1) + 1)\n for i2, c2 in enumerate(s2):\n distances_ = [i2 + 1]\n for i1, c1 in enumerate(s1):\n if c1 == c2:\n distances_.append(distances[i1])\n else:\n distances_.append(1 + min((distances[i1], distances[i1 + 1],\n distances_[-1])))\n distances = distances_\n return distances[-1]\n", "step-4": "<mask token>\n\n\ndef update_key(data_base, url, kkey):\n keys_saved = regex.get_data('<key>\\\\s(.+?)\\\\s<', data_base[url]['key'])\n if kkey not in keys_saved:\n data_base[url]['key'] = data_base[url]['key'][:-1]\n data_base[url]['key'] += ' <key> ' + kkey + ' <\\\\key>\\n'\n return True\n return False\n\n\ndef check_date(data_base, key_word):\n date = 0\n for url in data_base:\n for key in data_base[url]:\n if key_word == key:\n try:\n d = int(re.sub('-', '', data_base[url][key]))\n if date < d:\n date = d\n except ValueError:\n continue\n if date != 0:\n date = str(date)\n year = int(date[0:4])\n if date[4] != '0':\n month = int(date[4:6])\n elif date[4] == '0':\n month = int(date[5])\n if date[6] != '0':\n day = int(date[6:8])\n elif date[6] == '0':\n day = int(date[7])\n date = (datetime.date(year, month, day) - datetime.timedelta(1)\n ).isoformat()\n return int(re.sub('-', '', date))\n else:\n return 0\n\n\ndef load_keywords_info():\n try:\n with open('keywords.json', 'r') as fp:\n data = json.load(fp)\n return data\n except json.decoder.JSONDecodeError:\n return defaultdict(str)\n\n\ndef save_keywords_info(data):\n with open('keywords.json', 'w') as fp:\n json.dump(data, fp)\n\n\ndef load_url_info():\n try:\n with open('urls.json', 'r') as fp:\n data = json.load(fp)\n return data\n except json.decoder.JSONDecodeError:\n return defaultdict(list)\n\n\ndef save_url_info(data):\n with open('urls.json', 'w') as fp:\n json.dump(data, fp)\n\n\ndef load_previous(data_base):\n previous = []\n try:\n file = open('news.bank', 'r', encoding='utf8')\n for line in file:\n previous.append(line)\n i = 0\n while i < len(previous):\n url = regex.get_data('>\\\\s(.+?)\\\\s<', previous[i + 4])[0]\n key = regex.get_data('>\\\\s(.+?)\\\\s<', previous[i + 1])[0]\n data_base[key].append(url)\n i += 10\n except FileNotFoundError:\n pass\n\n\ndef check_last_update(url, date):\n count = 0\n for u in url:\n d = regex.get_data('\\\\S+\\\\/(\\\\d+\\\\/\\\\d+\\\\/\\\\d+)\\\\S+', u)[0]\n d = int(re.sub('/', '', d))\n if d < date:\n return count\n count += 1\n return -1\n\n\ndef MinEditDist(s1, s2):\n if len(s1) > len(s2):\n s1, s2 = s2, s1\n distances = range(len(s1) + 1)\n for i2, c2 in enumerate(s2):\n distances_ = [i2 + 1]\n for i1, c1 in enumerate(s1):\n if c1 == c2:\n distances_.append(distances[i1])\n else:\n distances_.append(1 + min((distances[i1], distances[i1 + 1],\n distances_[-1])))\n distances = distances_\n return distances[-1]\n", "step-5": "# -*- coding: utf-8 -*-\r\nimport sys, io,re\r\nimport regex\r\nfrom collections import defaultdict\r\nimport datetime\r\nimport json\r\n\r\n\r\ndef update_key(data_base, url,kkey):\r\n keys_saved = regex.get_data('<key>\\s(.+?)\\s<',data_base[url]['key'])\r\n\r\n if kkey not in keys_saved:\r\n data_base[url]['key'] = data_base[url]['key'][:-1]\r\n data_base[url]['key'] += ' <key> ' + kkey + ' <\\key>\\n'\r\n return True\r\n\r\n return False\r\n\r\ndef check_date(data_base,key_word):\r\n date = 0\r\n\r\n for url in data_base:\r\n for key in data_base[url]:\r\n if key_word == key:\r\n try:\r\n d = int(re.sub(r'-', '', data_base[url][key]))\r\n if date < d:\r\n date = d\r\n except ValueError:\r\n continue\r\n\r\n\r\n if date != 0:\r\n date = str(date)\r\n year = int(date[0:4])\r\n if date[4] != '0':\r\n month = int(date[4:6])\r\n elif date[4] == '0':\r\n month = int(date[5])\r\n if date[6] != '0':\r\n day = int(date[6:8])\r\n elif date[6] == '0':\r\n day = int(date[7])\r\n\r\n\r\n\r\n date = (datetime.date(year, month, day) - datetime.timedelta(1)).isoformat()\r\n return int(re.sub(r'-', '', date))\r\n else:\r\n return 0\r\n \r\ndef load_keywords_info():\r\n try:\r\n with open('keywords.json', 'r') as fp:\r\n data = json.load(fp)\r\n return data\r\n except json.decoder.JSONDecodeError:\r\n return defaultdict(str)\r\n\r\n\r\ndef save_keywords_info(data):\r\n with open('keywords.json', 'w') as fp:\r\n json.dump(data, fp)\r\n\r\n\r\ndef load_url_info():\r\n try:\r\n with open('urls.json', 'r') as fp:\r\n data = json.load(fp)\r\n return data\r\n except json.decoder.JSONDecodeError:\r\n return defaultdict(list)\r\n\r\n\r\ndef save_url_info(data):\r\n with open('urls.json', 'w') as fp:\r\n json.dump(data, fp)\r\n\r\ndef load_previous(data_base):\r\n previous = []\r\n try:\r\n file = open(\"news.bank\",\"r\",encoding='utf8');\r\n for line in file:\r\n previous.append(line)\r\n\r\n\r\n \r\n i = 0\r\n while i < len(previous):\r\n\r\n url = regex.get_data('>\\s(.+?)\\s<',previous[i+4])[0]\r\n key = regex.get_data('>\\s(.+?)\\s<',previous[i+1])[0] \r\n #date = regex.get_data('>\\s(.+?)\\s<',previous[i+5])[0] \r\n\r\n data_base[key].append(url)\r\n\r\n #data_base[url][key] = date\r\n #data_base[url] = defaultdict(str)\r\n #data_base[id]['id'] = previous[i]\r\n #data_base[key]['key'] = previous[i]\r\n #data_base[url]['title'] = previous[i+1]\r\n #data_base[url]['source'] = previous[i+2]\r\n #data_base[url]['url'] = previous[i+3]\r\n #data_base[url]['date'] = previous[i+4]\r\n #data_base[url]['author'] = previous[i+5]\r\n #data_base[url]['content1'] = previous[i+6]\r\n #data_base[url]['content2'] = previous[i+7]\r\n\r\n i += 10\r\n\r\n\r\n except FileNotFoundError:\r\n pass\r\n\r\ndef check_last_update(url,date):\r\n count = 0\r\n for u in url:\r\n d = regex.get_data('\\S+\\/(\\d+\\/\\d+\\/\\d+)\\S+',u)[0]\r\n d = int(re.sub(r'/', '', d))\r\n if d < date:\r\n return count\r\n\r\n count += 1\r\n\r\n return -1\r\n\r\n\r\ndef MinEditDist(s1, s2):\r\n if len(s1) > len(s2):\r\n s1, s2 = s2, s1\r\n\r\n distances = range(len(s1) + 1)\r\n for i2, c2 in enumerate(s2):\r\n distances_ = [i2+1]\r\n for i1, c1 in enumerate(s1):\r\n if c1 == c2:\r\n distances_.append(distances[i1])\r\n else:\r\n distances_.append(1 + min((distances[i1], distances[i1 + 1], distances_[-1])))\r\n distances = distances_\r\n return distances[-1]\r\n", "step-ids": [ 0, 7, 8, 9, 11 ] }
[ 0, 7, 8, 9, 11 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class warning_test(paw_test): <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class warning_test(paw_test): def test_warning_badchars(self): self.paw.cset_lookup(self.badchar) self.assertEqual(1, self.paw.wcount) <|reserved_special_token_1|> from .base import paw_test class warning_test(paw_test): def test_warning_badchars(self): self.paw.cset_lookup(self.badchar) self.assertEqual(1, self.paw.wcount)
flexible
{ "blob_id": "b4c6075aabe833f6fe23471f608d928edd25ef63", "index": 372, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass warning_test(paw_test):\n <mask token>\n", "step-3": "<mask token>\n\n\nclass warning_test(paw_test):\n\n def test_warning_badchars(self):\n self.paw.cset_lookup(self.badchar)\n self.assertEqual(1, self.paw.wcount)\n", "step-4": "from .base import paw_test\n\n\nclass warning_test(paw_test):\n\n def test_warning_badchars(self):\n self.paw.cset_lookup(self.badchar)\n self.assertEqual(1, self.paw.wcount)\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
from .linked_list import LinkedList class Queue: def __init__(self): self.list = LinkedList() def enqueue(self, value): self.list.insert_last(value) def dequeue(self): element = self.list.get_head() self.list.remove_first() return element def front(self): return self.list.get_tail() def rear(self): return self.list.get_head()
normal
{ "blob_id": "4830da6bee6b19a5e5a82a73d2f3b220ca59d28b", "index": 9025, "step-1": "<mask token>\n\n\nclass Queue:\n <mask token>\n <mask token>\n <mask token>\n\n def front(self):\n return self.list.get_tail()\n\n def rear(self):\n return self.list.get_head()\n", "step-2": "<mask token>\n\n\nclass Queue:\n\n def __init__(self):\n self.list = LinkedList()\n <mask token>\n\n def dequeue(self):\n element = self.list.get_head()\n self.list.remove_first()\n return element\n\n def front(self):\n return self.list.get_tail()\n\n def rear(self):\n return self.list.get_head()\n", "step-3": "<mask token>\n\n\nclass Queue:\n\n def __init__(self):\n self.list = LinkedList()\n\n def enqueue(self, value):\n self.list.insert_last(value)\n\n def dequeue(self):\n element = self.list.get_head()\n self.list.remove_first()\n return element\n\n def front(self):\n return self.list.get_tail()\n\n def rear(self):\n return self.list.get_head()\n", "step-4": "from .linked_list import LinkedList\n\n\nclass Queue:\n\n def __init__(self):\n self.list = LinkedList()\n\n def enqueue(self, value):\n self.list.insert_last(value)\n\n def dequeue(self):\n element = self.list.get_head()\n self.list.remove_first()\n return element\n\n def front(self):\n return self.list.get_tail()\n\n def rear(self):\n return self.list.get_head()\n", "step-5": null, "step-ids": [ 3, 5, 6, 7 ] }
[ 3, 5, 6, 7 ]
from django.http import HttpResponse from django.shortcuts import render from .models import game def index(request): all_games = game.objects.all() context = { 'all_games' : all_games } return render(request,'game/index.html',context) def gameview(response): return HttpResponse("<h1>Ludo King</h1>")
normal
{ "blob_id": "6623ac194e380c9554d72a1b20bf860b958dda97", "index": 5961, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef index(request):\n all_games = game.objects.all()\n context = {'all_games': all_games}\n return render(request, 'game/index.html', context)\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef index(request):\n all_games = game.objects.all()\n context = {'all_games': all_games}\n return render(request, 'game/index.html', context)\n\n\ndef gameview(response):\n return HttpResponse('<h1>Ludo King</h1>')\n", "step-4": "from django.http import HttpResponse\nfrom django.shortcuts import render\nfrom .models import game\n\n\ndef index(request):\n all_games = game.objects.all()\n context = {'all_games': all_games}\n return render(request, 'game/index.html', context)\n\n\ndef gameview(response):\n return HttpResponse('<h1>Ludo King</h1>')\n", "step-5": "from django.http import HttpResponse\nfrom django.shortcuts import render\nfrom .models import game\n\ndef index(request):\n all_games = game.objects.all()\n context = {\n 'all_games' : all_games\n }\n return render(request,'game/index.html',context)\n\ndef gameview(response):\n return HttpResponse(\"<h1>Ludo King</h1>\")\n\n\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> def calc(): height = v_height.get() base = v_base.get() print(f'height is {height}') print(f'Basal length is {base}') length = math.isqrt(height * height + base * base) print('Lenght is {:.2f}'.format(length)) <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> GUI.title('My Cal Program') GUI.geometry('500x500') def calc(): height = v_height.get() base = v_base.get() print(f'height is {height}') print(f'Basal length is {base}') length = math.isqrt(height * height + base * base) print('Lenght is {:.2f}'.format(length)) <|reserved_special_token_0|> L1.pack() <|reserved_special_token_0|> E1.pack(pady=8, ipady=7, ipadx=17) <|reserved_special_token_0|> L2.pack() <|reserved_special_token_0|> E2.pack(pady=8, ipady=7, ipadx=17) <|reserved_special_token_0|> B1.pack() <|reserved_special_token_0|> v_result.set('----Result----') <|reserved_special_token_0|> Result.pack() GUI.mainloop() <|reserved_special_token_1|> <|reserved_special_token_0|> GUI = Tk() GUI.title('My Cal Program') GUI.geometry('500x500') def calc(): height = v_height.get() base = v_base.get() print(f'height is {height}') print(f'Basal length is {base}') length = math.isqrt(height * height + base * base) print('Lenght is {:.2f}'.format(length)) <|reserved_special_token_0|> v_height = IntVar() v_base = IntVar() L1 = Label(text='Please input height', foreground='red', font=( 'Angsana New', 15)) L1.pack() E1 = ttk.Entry(GUI, textvariable=v_height) E1.pack(pady=8, ipady=7, ipadx=17) L2 = Label(text='Please input basal length', foreground='red', font=( 'Angsana New', 15)) L2.pack() E2 = ttk.Entry(GUI, textvariable=v_base) E2.pack(pady=8, ipady=7, ipadx=17) B1 = ttk.Button(text='Calculate', command=calc) B1.pack() v_result = StringVar() v_result.set('----Result----') Result = ttk.Label(GUI, textvariable=v_result, foreground='green', font=( 'Angsana New', 15)) Result.pack() GUI.mainloop() <|reserved_special_token_1|> from tkinter import * from tkinter import ttk import math GUI = Tk() GUI.title('My Cal Program') GUI.geometry('500x500') def calc(): height = v_height.get() base = v_base.get() print(f'height is {height}') print(f'Basal length is {base}') length = math.isqrt(height * height + base * base) print('Lenght is {:.2f}'.format(length)) <|reserved_special_token_0|> v_height = IntVar() v_base = IntVar() L1 = Label(text='Please input height', foreground='red', font=( 'Angsana New', 15)) L1.pack() E1 = ttk.Entry(GUI, textvariable=v_height) E1.pack(pady=8, ipady=7, ipadx=17) L2 = Label(text='Please input basal length', foreground='red', font=( 'Angsana New', 15)) L2.pack() E2 = ttk.Entry(GUI, textvariable=v_base) E2.pack(pady=8, ipady=7, ipadx=17) B1 = ttk.Button(text='Calculate', command=calc) B1.pack() v_result = StringVar() v_result.set('----Result----') Result = ttk.Label(GUI, textvariable=v_result, foreground='green', font=( 'Angsana New', 15)) Result.pack() GUI.mainloop() <|reserved_special_token_1|> #GUIcal.py from tkinter import * from tkinter import ttk import math GUI=Tk() GUI.title('My Cal Program') GUI.geometry('500x500') def calc(): height=v_height.get() base=v_base.get()#ดึงค่ามาจากv_base print(f'height is {height}') print(f'Basal length is {base}') length= math.isqrt((height*height)+(base*base)) print('Lenght is {:.2f}'.format(length)) ###For attach picture ''' IMG=PhotoImage(file='pythagorus-theorem.png').subsample(3) IM1=Label(GUI,image=IMG) IM1.pack() ''' v_height=IntVar() v_base=IntVar() L1=Label(text='Please input height',foreground='red',font=('Angsana New',15)) L1.pack() E1=ttk.Entry(GUI,textvariable=v_height) E1.pack(pady=8,ipady=7,ipadx=17) L2=Label(text='Please input basal length',foreground='red',font=('Angsana New',15)) L2.pack() E2=ttk.Entry(GUI,textvariable=v_base) E2.pack(pady=8,ipady=7,ipadx=17) B1=ttk.Button(text='Calculate',command=calc) B1.pack() v_result=StringVar() v_result.set('----Result----') Result=ttk.Label(GUI,textvariable=v_result,foreground='green',font=('Angsana New',15)) Result.pack() GUI.mainloop()
flexible
{ "blob_id": "77d7fb49ed4c3e78b148cd446e9a5c6a0e6fac8b", "index": 835, "step-1": "<mask token>\n\n\ndef calc():\n height = v_height.get()\n base = v_base.get()\n print(f'height is {height}')\n print(f'Basal length is {base}')\n length = math.isqrt(height * height + base * base)\n print('Lenght is {:.2f}'.format(length))\n\n\n<mask token>\n", "step-2": "<mask token>\nGUI.title('My Cal Program')\nGUI.geometry('500x500')\n\n\ndef calc():\n height = v_height.get()\n base = v_base.get()\n print(f'height is {height}')\n print(f'Basal length is {base}')\n length = math.isqrt(height * height + base * base)\n print('Lenght is {:.2f}'.format(length))\n\n\n<mask token>\nL1.pack()\n<mask token>\nE1.pack(pady=8, ipady=7, ipadx=17)\n<mask token>\nL2.pack()\n<mask token>\nE2.pack(pady=8, ipady=7, ipadx=17)\n<mask token>\nB1.pack()\n<mask token>\nv_result.set('----Result----')\n<mask token>\nResult.pack()\nGUI.mainloop()\n", "step-3": "<mask token>\nGUI = Tk()\nGUI.title('My Cal Program')\nGUI.geometry('500x500')\n\n\ndef calc():\n height = v_height.get()\n base = v_base.get()\n print(f'height is {height}')\n print(f'Basal length is {base}')\n length = math.isqrt(height * height + base * base)\n print('Lenght is {:.2f}'.format(length))\n\n\n<mask token>\nv_height = IntVar()\nv_base = IntVar()\nL1 = Label(text='Please input height', foreground='red', font=(\n 'Angsana New', 15))\nL1.pack()\nE1 = ttk.Entry(GUI, textvariable=v_height)\nE1.pack(pady=8, ipady=7, ipadx=17)\nL2 = Label(text='Please input basal length', foreground='red', font=(\n 'Angsana New', 15))\nL2.pack()\nE2 = ttk.Entry(GUI, textvariable=v_base)\nE2.pack(pady=8, ipady=7, ipadx=17)\nB1 = ttk.Button(text='Calculate', command=calc)\nB1.pack()\nv_result = StringVar()\nv_result.set('----Result----')\nResult = ttk.Label(GUI, textvariable=v_result, foreground='green', font=(\n 'Angsana New', 15))\nResult.pack()\nGUI.mainloop()\n", "step-4": "from tkinter import *\nfrom tkinter import ttk\nimport math\nGUI = Tk()\nGUI.title('My Cal Program')\nGUI.geometry('500x500')\n\n\ndef calc():\n height = v_height.get()\n base = v_base.get()\n print(f'height is {height}')\n print(f'Basal length is {base}')\n length = math.isqrt(height * height + base * base)\n print('Lenght is {:.2f}'.format(length))\n\n\n<mask token>\nv_height = IntVar()\nv_base = IntVar()\nL1 = Label(text='Please input height', foreground='red', font=(\n 'Angsana New', 15))\nL1.pack()\nE1 = ttk.Entry(GUI, textvariable=v_height)\nE1.pack(pady=8, ipady=7, ipadx=17)\nL2 = Label(text='Please input basal length', foreground='red', font=(\n 'Angsana New', 15))\nL2.pack()\nE2 = ttk.Entry(GUI, textvariable=v_base)\nE2.pack(pady=8, ipady=7, ipadx=17)\nB1 = ttk.Button(text='Calculate', command=calc)\nB1.pack()\nv_result = StringVar()\nv_result.set('----Result----')\nResult = ttk.Label(GUI, textvariable=v_result, foreground='green', font=(\n 'Angsana New', 15))\nResult.pack()\nGUI.mainloop()\n", "step-5": "#GUIcal.py\r\nfrom tkinter import *\r\nfrom tkinter import ttk\r\nimport math\r\n\r\nGUI=Tk()\r\nGUI.title('My Cal Program')\r\nGUI.geometry('500x500')\r\n\r\ndef calc():\r\n\theight=v_height.get()\r\n\tbase=v_base.get()#ดึงค่ามาจากv_base\r\n\tprint(f'height is {height}')\r\n\tprint(f'Basal length is {base}')\r\n\tlength= math.isqrt((height*height)+(base*base))\r\n\tprint('Lenght is {:.2f}'.format(length))\r\n\t\r\n###For attach picture\r\n'''\r\nIMG=PhotoImage(file='pythagorus-theorem.png').subsample(3)\r\nIM1=Label(GUI,image=IMG)\r\nIM1.pack()\r\n'''\r\nv_height=IntVar()\r\nv_base=IntVar()\r\n\r\nL1=Label(text='Please input height',foreground='red',font=('Angsana New',15))\r\nL1.pack()\r\nE1=ttk.Entry(GUI,textvariable=v_height)\r\nE1.pack(pady=8,ipady=7,ipadx=17)\r\n\r\n\r\nL2=Label(text='Please input basal length',foreground='red',font=('Angsana New',15))\r\nL2.pack()\r\nE2=ttk.Entry(GUI,textvariable=v_base)\r\nE2.pack(pady=8,ipady=7,ipadx=17)\r\n\r\n\r\nB1=ttk.Button(text='Calculate',command=calc)\r\nB1.pack()\r\n\r\nv_result=StringVar()\r\nv_result.set('----Result----')\r\nResult=ttk.Label(GUI,textvariable=v_result,foreground='green',font=('Angsana New',15))\r\nResult.pack()\r\n\r\nGUI.mainloop()\r\n", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
<|reserved_special_token_0|> class Point: def __init__(self, x: int, y: int): self.x = x self.y = y def create_point(self): point = [self.x, self.y] return point @staticmethod def calculate_distance(point_1: [], point_2: []): side_a = abs(point_1.x - point_2.x) side_b = abs(point_1.y - point_2.y) side_c = math.sqrt(side_a ** 2 + side_b ** 2) return side_c <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class Point: def __init__(self, x: int, y: int): self.x = x self.y = y def create_point(self): point = [self.x, self.y] return point @staticmethod def calculate_distance(point_1: [], point_2: []): side_a = abs(point_1.x - point_2.x) side_b = abs(point_1.y - point_2.y) side_c = math.sqrt(side_a ** 2 + side_b ** 2) return side_c <|reserved_special_token_0|> while n > 0: n -= 1 a, b = [int(x) for x in input().split()] point = Point(a, b).create_point() total_points.append(point) <|reserved_special_token_0|> for index_1 in range(len(total_points)): for index_2 in range(len(total_points)): if index_1 != index_2: segment = Point(total_points[index_1][0], total_points[index_2][0]) segment_list.append(segment) <|reserved_special_token_1|> <|reserved_special_token_0|> class Point: def __init__(self, x: int, y: int): self.x = x self.y = y def create_point(self): point = [self.x, self.y] return point @staticmethod def calculate_distance(point_1: [], point_2: []): side_a = abs(point_1.x - point_2.x) side_b = abs(point_1.y - point_2.y) side_c = math.sqrt(side_a ** 2 + side_b ** 2) return side_c n = int(input()) total_points = [] while n > 0: n -= 1 a, b = [int(x) for x in input().split()] point = Point(a, b).create_point() total_points.append(point) segment_list = [] for index_1 in range(len(total_points)): for index_2 in range(len(total_points)): if index_1 != index_2: segment = Point(total_points[index_1][0], total_points[index_2][0]) segment_list.append(segment) <|reserved_special_token_1|> import math class Point: def __init__(self, x: int, y: int): self.x = x self.y = y def create_point(self): point = [self.x, self.y] return point @staticmethod def calculate_distance(point_1: [], point_2: []): side_a = abs(point_1.x - point_2.x) side_b = abs(point_1.y - point_2.y) side_c = math.sqrt(side_a ** 2 + side_b ** 2) return side_c n = int(input()) total_points = [] while n > 0: n -= 1 a, b = [int(x) for x in input().split()] point = Point(a, b).create_point() total_points.append(point) segment_list = [] for index_1 in range(len(total_points)): for index_2 in range(len(total_points)): if index_1 != index_2: segment = Point(total_points[index_1][0], total_points[index_2][0]) segment_list.append(segment)
flexible
{ "blob_id": "cda7595e46528739cad49a5d62a80bc7b2087157", "index": 1911, "step-1": "<mask token>\n\n\nclass Point:\n\n def __init__(self, x: int, y: int):\n self.x = x\n self.y = y\n\n def create_point(self):\n point = [self.x, self.y]\n return point\n\n @staticmethod\n def calculate_distance(point_1: [], point_2: []):\n side_a = abs(point_1.x - point_2.x)\n side_b = abs(point_1.y - point_2.y)\n side_c = math.sqrt(side_a ** 2 + side_b ** 2)\n return side_c\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass Point:\n\n def __init__(self, x: int, y: int):\n self.x = x\n self.y = y\n\n def create_point(self):\n point = [self.x, self.y]\n return point\n\n @staticmethod\n def calculate_distance(point_1: [], point_2: []):\n side_a = abs(point_1.x - point_2.x)\n side_b = abs(point_1.y - point_2.y)\n side_c = math.sqrt(side_a ** 2 + side_b ** 2)\n return side_c\n\n\n<mask token>\nwhile n > 0:\n n -= 1\n a, b = [int(x) for x in input().split()]\n point = Point(a, b).create_point()\n total_points.append(point)\n<mask token>\nfor index_1 in range(len(total_points)):\n for index_2 in range(len(total_points)):\n if index_1 != index_2:\n segment = Point(total_points[index_1][0], total_points[index_2][0])\n segment_list.append(segment)\n", "step-3": "<mask token>\n\n\nclass Point:\n\n def __init__(self, x: int, y: int):\n self.x = x\n self.y = y\n\n def create_point(self):\n point = [self.x, self.y]\n return point\n\n @staticmethod\n def calculate_distance(point_1: [], point_2: []):\n side_a = abs(point_1.x - point_2.x)\n side_b = abs(point_1.y - point_2.y)\n side_c = math.sqrt(side_a ** 2 + side_b ** 2)\n return side_c\n\n\nn = int(input())\ntotal_points = []\nwhile n > 0:\n n -= 1\n a, b = [int(x) for x in input().split()]\n point = Point(a, b).create_point()\n total_points.append(point)\nsegment_list = []\nfor index_1 in range(len(total_points)):\n for index_2 in range(len(total_points)):\n if index_1 != index_2:\n segment = Point(total_points[index_1][0], total_points[index_2][0])\n segment_list.append(segment)\n", "step-4": "import math\n\n\nclass Point:\n\n def __init__(self, x: int, y: int):\n self.x = x\n self.y = y\n\n def create_point(self):\n point = [self.x, self.y]\n return point\n\n @staticmethod\n def calculate_distance(point_1: [], point_2: []):\n side_a = abs(point_1.x - point_2.x)\n side_b = abs(point_1.y - point_2.y)\n side_c = math.sqrt(side_a ** 2 + side_b ** 2)\n return side_c\n\n\nn = int(input())\ntotal_points = []\nwhile n > 0:\n n -= 1\n a, b = [int(x) for x in input().split()]\n point = Point(a, b).create_point()\n total_points.append(point)\nsegment_list = []\nfor index_1 in range(len(total_points)):\n for index_2 in range(len(total_points)):\n if index_1 != index_2:\n segment = Point(total_points[index_1][0], total_points[index_2][0])\n segment_list.append(segment)\n", "step-5": null, "step-ids": [ 4, 5, 6, 7 ] }
[ 4, 5, 6, 7 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def test(): tup = File.readInput('file.txt') graph = tup[0] edgeData = tup[1] ctrl = Controller(graph, edgeData) vertices = ctrl.nrVertices() itv = verticesIterator(vertices) assert itv.valid() cont = 0 while itv.valid(): cont += 1 e = itv.getCurrent() itv.next() e = itv.getCurrent() assert ctrl.existsVertex(e) assert cont == ctrl.nrVertices() itv.first() assert itv.valid() ite = EdgesIterator(graph.getInbound(3)) assert ite.valid() assert ite.getCurrent() == 1 ite.next() assert ite.valid() assert ite.getCurrent() == 2 <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def test(): tup = File.readInput('file.txt') graph = tup[0] edgeData = tup[1] ctrl = Controller(graph, edgeData) vertices = ctrl.nrVertices() itv = verticesIterator(vertices) assert itv.valid() cont = 0 while itv.valid(): cont += 1 e = itv.getCurrent() itv.next() e = itv.getCurrent() assert ctrl.existsVertex(e) assert cont == ctrl.nrVertices() itv.first() assert itv.valid() ite = EdgesIterator(graph.getInbound(3)) assert ite.valid() assert ite.getCurrent() == 1 ite.next() assert ite.valid() assert ite.getCurrent() == 2 test() <|reserved_special_token_1|> <|reserved_special_token_0|> from Graph import Graph from Controller import * from Iterators.Vertices import * from File import File from Iterators.EdgesIterator import EdgesIterator def test(): tup = File.readInput('file.txt') graph = tup[0] edgeData = tup[1] ctrl = Controller(graph, edgeData) vertices = ctrl.nrVertices() itv = verticesIterator(vertices) assert itv.valid() cont = 0 while itv.valid(): cont += 1 e = itv.getCurrent() itv.next() e = itv.getCurrent() assert ctrl.existsVertex(e) assert cont == ctrl.nrVertices() itv.first() assert itv.valid() ite = EdgesIterator(graph.getInbound(3)) assert ite.valid() assert ite.getCurrent() == 1 ite.next() assert ite.valid() assert ite.getCurrent() == 2 test() <|reserved_special_token_1|> ''' Created on Mar 27, 2019 @author: Iulia ''' from Graph import Graph from Controller import * from Iterators.Vertices import * from File import File from Iterators.EdgesIterator import EdgesIterator def test(): tup = File.readInput("file.txt") graph = tup[0] edgeData = tup[1] ctrl = Controller(graph, edgeData) vertices = ctrl.nrVertices() itv = verticesIterator(vertices) assert(itv.valid()) cont = 0 while (itv.valid()): cont += 1 e = itv.getCurrent() itv.next() e = itv.getCurrent() assert(ctrl.existsVertex(e)) assert(cont == ctrl.nrVertices()) itv.first() assert(itv.valid()) ################ ite = EdgesIterator(graph.getInbound(3)) assert(ite.valid()) assert(ite.getCurrent() == 1) ite.next() assert(ite.valid()) assert(ite.getCurrent() == 2) test()
flexible
{ "blob_id": "b01ff71792895bb8839e09ae8c4a449405349990", "index": 7066, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef test():\n tup = File.readInput('file.txt')\n graph = tup[0]\n edgeData = tup[1]\n ctrl = Controller(graph, edgeData)\n vertices = ctrl.nrVertices()\n itv = verticesIterator(vertices)\n assert itv.valid()\n cont = 0\n while itv.valid():\n cont += 1\n e = itv.getCurrent()\n itv.next()\n e = itv.getCurrent()\n assert ctrl.existsVertex(e)\n assert cont == ctrl.nrVertices()\n itv.first()\n assert itv.valid()\n ite = EdgesIterator(graph.getInbound(3))\n assert ite.valid()\n assert ite.getCurrent() == 1\n ite.next()\n assert ite.valid()\n assert ite.getCurrent() == 2\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef test():\n tup = File.readInput('file.txt')\n graph = tup[0]\n edgeData = tup[1]\n ctrl = Controller(graph, edgeData)\n vertices = ctrl.nrVertices()\n itv = verticesIterator(vertices)\n assert itv.valid()\n cont = 0\n while itv.valid():\n cont += 1\n e = itv.getCurrent()\n itv.next()\n e = itv.getCurrent()\n assert ctrl.existsVertex(e)\n assert cont == ctrl.nrVertices()\n itv.first()\n assert itv.valid()\n ite = EdgesIterator(graph.getInbound(3))\n assert ite.valid()\n assert ite.getCurrent() == 1\n ite.next()\n assert ite.valid()\n assert ite.getCurrent() == 2\n\n\ntest()\n", "step-4": "<mask token>\nfrom Graph import Graph\nfrom Controller import *\nfrom Iterators.Vertices import *\nfrom File import File\nfrom Iterators.EdgesIterator import EdgesIterator\n\n\ndef test():\n tup = File.readInput('file.txt')\n graph = tup[0]\n edgeData = tup[1]\n ctrl = Controller(graph, edgeData)\n vertices = ctrl.nrVertices()\n itv = verticesIterator(vertices)\n assert itv.valid()\n cont = 0\n while itv.valid():\n cont += 1\n e = itv.getCurrent()\n itv.next()\n e = itv.getCurrent()\n assert ctrl.existsVertex(e)\n assert cont == ctrl.nrVertices()\n itv.first()\n assert itv.valid()\n ite = EdgesIterator(graph.getInbound(3))\n assert ite.valid()\n assert ite.getCurrent() == 1\n ite.next()\n assert ite.valid()\n assert ite.getCurrent() == 2\n\n\ntest()\n", "step-5": "'''\nCreated on Mar 27, 2019\n\n@author: Iulia\n'''\n\nfrom Graph import Graph\nfrom Controller import *\nfrom Iterators.Vertices import *\nfrom File import File\nfrom Iterators.EdgesIterator import EdgesIterator\n\ndef test():\n tup = File.readInput(\"file.txt\")\n graph = tup[0]\n edgeData = tup[1]\n ctrl = Controller(graph, edgeData)\n \n vertices = ctrl.nrVertices()\n \n itv = verticesIterator(vertices)\n assert(itv.valid())\n \n cont = 0\n while (itv.valid()):\n cont += 1\n e = itv.getCurrent()\n itv.next() \n e = itv.getCurrent()\n assert(ctrl.existsVertex(e))\n assert(cont == ctrl.nrVertices())\n itv.first()\n assert(itv.valid())\n \n ################\n ite = EdgesIterator(graph.getInbound(3))\n assert(ite.valid())\n assert(ite.getCurrent() == 1)\n ite.next()\n assert(ite.valid())\n assert(ite.getCurrent() == 2)\n \ntest()\n ", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> class UpYunStore(object): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def __init__(self, uri): assert uri.startswith('upyun://') self.session = requests.Session() self.bucket, self.prefix = uri[8:].split('/', 1) def stat_file(self, path, info): """ TODO fetch and return file meta info from cloud """ return {} def persist_file(self, path, buf, info, meta=None, headers=None): """Upload file to Azure blob storage""" headers = {'Authorization': 'UPYUN: {}:{}'.format(self.OPERATOR, self.SIGNATURE), 'Date': format_date_time(int(time.time()))} url = 'http://v0.api.upyun.com:5000/{}/{}{}'.format(self.bucket, self.prefix, path) def upload(): try: res = requests.put(url, headers=headers, data=buf) if res.status_code != 200: logger.info( 'failed to upload file %s to upyun, response code: %s, text:\n%s' , path, res.status_code, res.text) else: logger.debug('uploaded file %s to upyun', path) except Exception: logger.warn('upload file %s to upyun failed', path, exc_info=True) return threads.deferToThread(upload) class MbCrawlImagesPipeline(FilesPipeline): STORE_SCHEMES = dict(FilesPipeline.STORE_SCHEMES) STORE_SCHEMES['upyun'] = UpYunStore @classmethod def from_settings(cls, settings): upyunStore = cls.STORE_SCHEMES['upyun'] upyunStore.OPERATOR = settings['UPYUN_OPERATOR'] UpYunStore.SIGNATURE = settings['SIGNATURE'] return super().from_settings(settings) <|reserved_special_token_1|> <|reserved_special_token_0|> class UpYunStore(object): OPERATOR = None SIGNATURE = None HEADERS = {'Cache-Control': 'max-age=172800'} def __init__(self, uri): assert uri.startswith('upyun://') self.session = requests.Session() self.bucket, self.prefix = uri[8:].split('/', 1) def stat_file(self, path, info): """ TODO fetch and return file meta info from cloud """ return {} def persist_file(self, path, buf, info, meta=None, headers=None): """Upload file to Azure blob storage""" headers = {'Authorization': 'UPYUN: {}:{}'.format(self.OPERATOR, self.SIGNATURE), 'Date': format_date_time(int(time.time()))} url = 'http://v0.api.upyun.com:5000/{}/{}{}'.format(self.bucket, self.prefix, path) def upload(): try: res = requests.put(url, headers=headers, data=buf) if res.status_code != 200: logger.info( 'failed to upload file %s to upyun, response code: %s, text:\n%s' , path, res.status_code, res.text) else: logger.debug('uploaded file %s to upyun', path) except Exception: logger.warn('upload file %s to upyun failed', path, exc_info=True) return threads.deferToThread(upload) class MbCrawlImagesPipeline(FilesPipeline): STORE_SCHEMES = dict(FilesPipeline.STORE_SCHEMES) STORE_SCHEMES['upyun'] = UpYunStore @classmethod def from_settings(cls, settings): upyunStore = cls.STORE_SCHEMES['upyun'] upyunStore.OPERATOR = settings['UPYUN_OPERATOR'] UpYunStore.SIGNATURE = settings['SIGNATURE'] return super().from_settings(settings) <|reserved_special_token_1|> <|reserved_special_token_0|> class DBStorePipeline(object): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def spider_closed(self, spider): self.dbpool.close() <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def _handle_error(self, e): logger.error('failed to track item to DB: %s', e) class UpYunStore(object): OPERATOR = None SIGNATURE = None HEADERS = {'Cache-Control': 'max-age=172800'} def __init__(self, uri): assert uri.startswith('upyun://') self.session = requests.Session() self.bucket, self.prefix = uri[8:].split('/', 1) def stat_file(self, path, info): """ TODO fetch and return file meta info from cloud """ return {} def persist_file(self, path, buf, info, meta=None, headers=None): """Upload file to Azure blob storage""" headers = {'Authorization': 'UPYUN: {}:{}'.format(self.OPERATOR, self.SIGNATURE), 'Date': format_date_time(int(time.time()))} url = 'http://v0.api.upyun.com:5000/{}/{}{}'.format(self.bucket, self.prefix, path) def upload(): try: res = requests.put(url, headers=headers, data=buf) if res.status_code != 200: logger.info( 'failed to upload file %s to upyun, response code: %s, text:\n%s' , path, res.status_code, res.text) else: logger.debug('uploaded file %s to upyun', path) except Exception: logger.warn('upload file %s to upyun failed', path, exc_info=True) return threads.deferToThread(upload) class MbCrawlImagesPipeline(FilesPipeline): STORE_SCHEMES = dict(FilesPipeline.STORE_SCHEMES) STORE_SCHEMES['upyun'] = UpYunStore @classmethod def from_settings(cls, settings): upyunStore = cls.STORE_SCHEMES['upyun'] upyunStore.OPERATOR = settings['UPYUN_OPERATOR'] UpYunStore.SIGNATURE = settings['SIGNATURE'] return super().from_settings(settings) <|reserved_special_token_1|> <|reserved_special_token_0|> class DBStorePipeline(object): <|reserved_special_token_0|> @classmethod def from_crawler(cls, crawler): instance = cls(crawler.stats, crawler.settings) crawler.signals.connect(instance.spider_closed, signals.spider_closed) return instance def __init__(self, stats, settings): self.dbpool = adbapi.ConnectionPool('psycopg2', settings['DB_DSN']) self.stats = stats def spider_closed(self, spider): self.dbpool.close() def process_item(self, item, spider): table = getattr(item, 'db_table', None) if not table: return item query = self.dbpool.runInteraction(self._save_item, table, item) query.addErrback(self._handle_error) return item <|reserved_special_token_0|> <|reserved_special_token_0|> def _handle_error(self, e): logger.error('failed to track item to DB: %s', e) class UpYunStore(object): OPERATOR = None SIGNATURE = None HEADERS = {'Cache-Control': 'max-age=172800'} def __init__(self, uri): assert uri.startswith('upyun://') self.session = requests.Session() self.bucket, self.prefix = uri[8:].split('/', 1) def stat_file(self, path, info): """ TODO fetch and return file meta info from cloud """ return {} def persist_file(self, path, buf, info, meta=None, headers=None): """Upload file to Azure blob storage""" headers = {'Authorization': 'UPYUN: {}:{}'.format(self.OPERATOR, self.SIGNATURE), 'Date': format_date_time(int(time.time()))} url = 'http://v0.api.upyun.com:5000/{}/{}{}'.format(self.bucket, self.prefix, path) def upload(): try: res = requests.put(url, headers=headers, data=buf) if res.status_code != 200: logger.info( 'failed to upload file %s to upyun, response code: %s, text:\n%s' , path, res.status_code, res.text) else: logger.debug('uploaded file %s to upyun', path) except Exception: logger.warn('upload file %s to upyun failed', path, exc_info=True) return threads.deferToThread(upload) class MbCrawlImagesPipeline(FilesPipeline): STORE_SCHEMES = dict(FilesPipeline.STORE_SCHEMES) STORE_SCHEMES['upyun'] = UpYunStore @classmethod def from_settings(cls, settings): upyunStore = cls.STORE_SCHEMES['upyun'] upyunStore.OPERATOR = settings['UPYUN_OPERATOR'] UpYunStore.SIGNATURE = settings['SIGNATURE'] return super().from_settings(settings) <|reserved_special_token_1|> # -*- coding: utf-8 -*- # Item pipelines import logging import hashlib from wsgiref.handlers import format_date_time import time import itertools import psycopg2 from psycopg2.extensions import AsIs from psycopg2.extras import Json import requests from scrapy import signals from scrapy.pipelines.files import FilesPipeline from twisted.enterprise import adbapi from twisted.internet import threads logger = logging.getLogger(__name__) class DBStorePipeline(object): ''' This class save the crawled item to a PostgreSQL table The db operation is async and managed by the twisted reactor loop. (References from https://gist.github.com/tzermias/6982723) ''' @classmethod def from_crawler(cls, crawler): instance = cls(crawler.stats, crawler.settings) crawler.signals.connect(instance.spider_closed, signals.spider_closed) return instance def __init__(self, stats, settings): # Instantiate DB self.dbpool = adbapi.ConnectionPool('psycopg2', settings['DB_DSN']) self.stats = stats def spider_closed(self, spider): self.dbpool.close() def process_item(self, item, spider): table = getattr(item, "db_table", None) if not table: return item query = self.dbpool.runInteraction(self._save_item, table, item) query.addErrback(self._handle_error) return item def _save_item(self, tx, table, item): skip_fields = getattr(item, "db_skip_fields", []) cols = [k for k in item if k not in skip_fields] self._insert_row(tx, table, cols, item) self.stats.inc_value('database/records_added') if hasattr(item, "db_helper_table_rows"): helper_table, helper_rows = item.db_helper_table_rows() if helper_rows: self._insert_row(tx, helper_table, helper_rows[0].keys(), *helper_rows) self.stats.inc_value( 'database/records_added', len(helper_rows)) return item def _insert_row(self, tx, table, cols, *rows): val_fmt = "({})".format(",".join(itertools.repeat("%s", len(cols)))) def mk_row_param(row): return tuple(row[k] for k in cols) data_str = ','.join(tx.mogrify(val_fmt, mk_row_param(row)).decode('utf-8') for row in rows) q = "INSERT INTO {} ({}) VALUES ".format(table, ",".join(cols)) tx.execute(q + data_str) def _handle_error(self, e): logger.error("failed to track item to DB: %s", e) class UpYunStore(object): OPERATOR = None SIGNATURE = None HEADERS = { 'Cache-Control': 'max-age=172800', } def __init__(self, uri): assert uri.startswith('upyun://') self.session = requests.Session() self.bucket, self.prefix = uri[8:].split("/", 1) def stat_file(self, path, info): """ TODO fetch and return file meta info from cloud """ return {} def persist_file(self, path, buf, info, meta=None, headers=None): """Upload file to Azure blob storage""" headers = { "Authorization": "UPYUN: {}:{}".format(self.OPERATOR, self.SIGNATURE), "Date": format_date_time(int(time.time())), } url = "http://v0.api.upyun.com:5000/{}/{}{}".format( self.bucket, self.prefix, path) def upload(): try: res = requests.put(url, headers=headers, data=buf) if res.status_code != 200: logger.info( "failed to upload file %s to upyun, response code: %s, text:\n%s", path, res.status_code, res.text) else: logger.debug("uploaded file %s to upyun", path) except Exception: logger.warn("upload file %s to upyun failed", path, exc_info=True) return threads.deferToThread(upload) class MbCrawlImagesPipeline(FilesPipeline): STORE_SCHEMES = dict(FilesPipeline.STORE_SCHEMES) STORE_SCHEMES["upyun"] = UpYunStore @classmethod def from_settings(cls, settings): upyunStore = cls.STORE_SCHEMES["upyun"] upyunStore.OPERATOR = settings["UPYUN_OPERATOR"] UpYunStore.SIGNATURE = settings["SIGNATURE"] return super().from_settings(settings)
flexible
{ "blob_id": "d08e4c85890dab7cb421fa994ef1947d8919d58f", "index": 8547, "step-1": "<mask token>\n\n\nclass UpYunStore(object):\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, uri):\n assert uri.startswith('upyun://')\n self.session = requests.Session()\n self.bucket, self.prefix = uri[8:].split('/', 1)\n\n def stat_file(self, path, info):\n \"\"\"\n TODO fetch and return file meta info from cloud\n \"\"\"\n return {}\n\n def persist_file(self, path, buf, info, meta=None, headers=None):\n \"\"\"Upload file to Azure blob storage\"\"\"\n headers = {'Authorization': 'UPYUN: {}:{}'.format(self.OPERATOR,\n self.SIGNATURE), 'Date': format_date_time(int(time.time()))}\n url = 'http://v0.api.upyun.com:5000/{}/{}{}'.format(self.bucket,\n self.prefix, path)\n\n def upload():\n try:\n res = requests.put(url, headers=headers, data=buf)\n if res.status_code != 200:\n logger.info(\n 'failed to upload file %s to upyun, response code: %s, text:\\n%s'\n , path, res.status_code, res.text)\n else:\n logger.debug('uploaded file %s to upyun', path)\n except Exception:\n logger.warn('upload file %s to upyun failed', path,\n exc_info=True)\n return threads.deferToThread(upload)\n\n\nclass MbCrawlImagesPipeline(FilesPipeline):\n STORE_SCHEMES = dict(FilesPipeline.STORE_SCHEMES)\n STORE_SCHEMES['upyun'] = UpYunStore\n\n @classmethod\n def from_settings(cls, settings):\n upyunStore = cls.STORE_SCHEMES['upyun']\n upyunStore.OPERATOR = settings['UPYUN_OPERATOR']\n UpYunStore.SIGNATURE = settings['SIGNATURE']\n return super().from_settings(settings)\n", "step-2": "<mask token>\n\n\nclass UpYunStore(object):\n OPERATOR = None\n SIGNATURE = None\n HEADERS = {'Cache-Control': 'max-age=172800'}\n\n def __init__(self, uri):\n assert uri.startswith('upyun://')\n self.session = requests.Session()\n self.bucket, self.prefix = uri[8:].split('/', 1)\n\n def stat_file(self, path, info):\n \"\"\"\n TODO fetch and return file meta info from cloud\n \"\"\"\n return {}\n\n def persist_file(self, path, buf, info, meta=None, headers=None):\n \"\"\"Upload file to Azure blob storage\"\"\"\n headers = {'Authorization': 'UPYUN: {}:{}'.format(self.OPERATOR,\n self.SIGNATURE), 'Date': format_date_time(int(time.time()))}\n url = 'http://v0.api.upyun.com:5000/{}/{}{}'.format(self.bucket,\n self.prefix, path)\n\n def upload():\n try:\n res = requests.put(url, headers=headers, data=buf)\n if res.status_code != 200:\n logger.info(\n 'failed to upload file %s to upyun, response code: %s, text:\\n%s'\n , path, res.status_code, res.text)\n else:\n logger.debug('uploaded file %s to upyun', path)\n except Exception:\n logger.warn('upload file %s to upyun failed', path,\n exc_info=True)\n return threads.deferToThread(upload)\n\n\nclass MbCrawlImagesPipeline(FilesPipeline):\n STORE_SCHEMES = dict(FilesPipeline.STORE_SCHEMES)\n STORE_SCHEMES['upyun'] = UpYunStore\n\n @classmethod\n def from_settings(cls, settings):\n upyunStore = cls.STORE_SCHEMES['upyun']\n upyunStore.OPERATOR = settings['UPYUN_OPERATOR']\n UpYunStore.SIGNATURE = settings['SIGNATURE']\n return super().from_settings(settings)\n", "step-3": "<mask token>\n\n\nclass DBStorePipeline(object):\n <mask token>\n <mask token>\n <mask token>\n\n def spider_closed(self, spider):\n self.dbpool.close()\n <mask token>\n <mask token>\n <mask token>\n\n def _handle_error(self, e):\n logger.error('failed to track item to DB: %s', e)\n\n\nclass UpYunStore(object):\n OPERATOR = None\n SIGNATURE = None\n HEADERS = {'Cache-Control': 'max-age=172800'}\n\n def __init__(self, uri):\n assert uri.startswith('upyun://')\n self.session = requests.Session()\n self.bucket, self.prefix = uri[8:].split('/', 1)\n\n def stat_file(self, path, info):\n \"\"\"\n TODO fetch and return file meta info from cloud\n \"\"\"\n return {}\n\n def persist_file(self, path, buf, info, meta=None, headers=None):\n \"\"\"Upload file to Azure blob storage\"\"\"\n headers = {'Authorization': 'UPYUN: {}:{}'.format(self.OPERATOR,\n self.SIGNATURE), 'Date': format_date_time(int(time.time()))}\n url = 'http://v0.api.upyun.com:5000/{}/{}{}'.format(self.bucket,\n self.prefix, path)\n\n def upload():\n try:\n res = requests.put(url, headers=headers, data=buf)\n if res.status_code != 200:\n logger.info(\n 'failed to upload file %s to upyun, response code: %s, text:\\n%s'\n , path, res.status_code, res.text)\n else:\n logger.debug('uploaded file %s to upyun', path)\n except Exception:\n logger.warn('upload file %s to upyun failed', path,\n exc_info=True)\n return threads.deferToThread(upload)\n\n\nclass MbCrawlImagesPipeline(FilesPipeline):\n STORE_SCHEMES = dict(FilesPipeline.STORE_SCHEMES)\n STORE_SCHEMES['upyun'] = UpYunStore\n\n @classmethod\n def from_settings(cls, settings):\n upyunStore = cls.STORE_SCHEMES['upyun']\n upyunStore.OPERATOR = settings['UPYUN_OPERATOR']\n UpYunStore.SIGNATURE = settings['SIGNATURE']\n return super().from_settings(settings)\n", "step-4": "<mask token>\n\n\nclass DBStorePipeline(object):\n <mask token>\n\n @classmethod\n def from_crawler(cls, crawler):\n instance = cls(crawler.stats, crawler.settings)\n crawler.signals.connect(instance.spider_closed, signals.spider_closed)\n return instance\n\n def __init__(self, stats, settings):\n self.dbpool = adbapi.ConnectionPool('psycopg2', settings['DB_DSN'])\n self.stats = stats\n\n def spider_closed(self, spider):\n self.dbpool.close()\n\n def process_item(self, item, spider):\n table = getattr(item, 'db_table', None)\n if not table:\n return item\n query = self.dbpool.runInteraction(self._save_item, table, item)\n query.addErrback(self._handle_error)\n return item\n <mask token>\n <mask token>\n\n def _handle_error(self, e):\n logger.error('failed to track item to DB: %s', e)\n\n\nclass UpYunStore(object):\n OPERATOR = None\n SIGNATURE = None\n HEADERS = {'Cache-Control': 'max-age=172800'}\n\n def __init__(self, uri):\n assert uri.startswith('upyun://')\n self.session = requests.Session()\n self.bucket, self.prefix = uri[8:].split('/', 1)\n\n def stat_file(self, path, info):\n \"\"\"\n TODO fetch and return file meta info from cloud\n \"\"\"\n return {}\n\n def persist_file(self, path, buf, info, meta=None, headers=None):\n \"\"\"Upload file to Azure blob storage\"\"\"\n headers = {'Authorization': 'UPYUN: {}:{}'.format(self.OPERATOR,\n self.SIGNATURE), 'Date': format_date_time(int(time.time()))}\n url = 'http://v0.api.upyun.com:5000/{}/{}{}'.format(self.bucket,\n self.prefix, path)\n\n def upload():\n try:\n res = requests.put(url, headers=headers, data=buf)\n if res.status_code != 200:\n logger.info(\n 'failed to upload file %s to upyun, response code: %s, text:\\n%s'\n , path, res.status_code, res.text)\n else:\n logger.debug('uploaded file %s to upyun', path)\n except Exception:\n logger.warn('upload file %s to upyun failed', path,\n exc_info=True)\n return threads.deferToThread(upload)\n\n\nclass MbCrawlImagesPipeline(FilesPipeline):\n STORE_SCHEMES = dict(FilesPipeline.STORE_SCHEMES)\n STORE_SCHEMES['upyun'] = UpYunStore\n\n @classmethod\n def from_settings(cls, settings):\n upyunStore = cls.STORE_SCHEMES['upyun']\n upyunStore.OPERATOR = settings['UPYUN_OPERATOR']\n UpYunStore.SIGNATURE = settings['SIGNATURE']\n return super().from_settings(settings)\n", "step-5": "# -*- coding: utf-8 -*-\n\n# Item pipelines\nimport logging\nimport hashlib\nfrom wsgiref.handlers import format_date_time\nimport time\nimport itertools\n\nimport psycopg2\nfrom psycopg2.extensions import AsIs\nfrom psycopg2.extras import Json\nimport requests\nfrom scrapy import signals\nfrom scrapy.pipelines.files import FilesPipeline\nfrom twisted.enterprise import adbapi\nfrom twisted.internet import threads\n\nlogger = logging.getLogger(__name__)\n\n\nclass DBStorePipeline(object):\n '''\n This class save the crawled item to a PostgreSQL table\n The db operation is async and managed by the twisted reactor loop.\n (References from https://gist.github.com/tzermias/6982723)\n '''\n\n @classmethod\n def from_crawler(cls, crawler):\n instance = cls(crawler.stats, crawler.settings)\n crawler.signals.connect(instance.spider_closed, signals.spider_closed)\n return instance\n\n def __init__(self, stats, settings):\n # Instantiate DB\n self.dbpool = adbapi.ConnectionPool('psycopg2', settings['DB_DSN'])\n self.stats = stats\n\n def spider_closed(self, spider):\n self.dbpool.close()\n\n def process_item(self, item, spider):\n table = getattr(item, \"db_table\", None)\n if not table:\n return item\n\n query = self.dbpool.runInteraction(self._save_item, table, item)\n query.addErrback(self._handle_error)\n return item\n\n def _save_item(self, tx, table, item):\n\n skip_fields = getattr(item, \"db_skip_fields\", [])\n\n cols = [k for k in item if k not in skip_fields]\n self._insert_row(tx, table, cols, item)\n self.stats.inc_value('database/records_added')\n if hasattr(item, \"db_helper_table_rows\"):\n helper_table, helper_rows = item.db_helper_table_rows()\n if helper_rows:\n self._insert_row(tx, helper_table,\n helper_rows[0].keys(), *helper_rows)\n self.stats.inc_value(\n 'database/records_added', len(helper_rows))\n\n return item\n\n def _insert_row(self, tx, table, cols, *rows):\n val_fmt = \"({})\".format(\",\".join(itertools.repeat(\"%s\", len(cols))))\n\n def mk_row_param(row):\n return tuple(row[k] for k in cols)\n data_str = ','.join(tx.mogrify(val_fmt, mk_row_param(row)).decode('utf-8')\n for row in rows)\n q = \"INSERT INTO {} ({}) VALUES \".format(table, \",\".join(cols))\n tx.execute(q + data_str)\n\n def _handle_error(self, e):\n logger.error(\"failed to track item to DB: %s\", e)\n\n\nclass UpYunStore(object):\n\n OPERATOR = None\n SIGNATURE = None\n\n HEADERS = {\n 'Cache-Control': 'max-age=172800',\n }\n\n def __init__(self, uri):\n assert uri.startswith('upyun://')\n self.session = requests.Session()\n self.bucket, self.prefix = uri[8:].split(\"/\", 1)\n\n def stat_file(self, path, info):\n \"\"\"\n TODO fetch and return file meta info from cloud\n \"\"\"\n return {}\n\n def persist_file(self, path, buf, info, meta=None, headers=None):\n \"\"\"Upload file to Azure blob storage\"\"\"\n headers = {\n \"Authorization\": \"UPYUN: {}:{}\".format(self.OPERATOR, self.SIGNATURE),\n \"Date\": format_date_time(int(time.time())),\n }\n url = \"http://v0.api.upyun.com:5000/{}/{}{}\".format(\n self.bucket, self.prefix, path)\n\n def upload():\n try:\n res = requests.put(url, headers=headers, data=buf)\n if res.status_code != 200:\n logger.info(\n \"failed to upload file %s to upyun, response code: %s, text:\\n%s\",\n path, res.status_code, res.text)\n else:\n logger.debug(\"uploaded file %s to upyun\", path)\n except Exception:\n logger.warn(\"upload file %s to upyun failed\",\n path, exc_info=True)\n return threads.deferToThread(upload)\n\n\nclass MbCrawlImagesPipeline(FilesPipeline):\n STORE_SCHEMES = dict(FilesPipeline.STORE_SCHEMES)\n STORE_SCHEMES[\"upyun\"] = UpYunStore\n\n @classmethod\n def from_settings(cls, settings):\n upyunStore = cls.STORE_SCHEMES[\"upyun\"]\n upyunStore.OPERATOR = settings[\"UPYUN_OPERATOR\"]\n UpYunStore.SIGNATURE = settings[\"SIGNATURE\"]\n return super().from_settings(settings)\n", "step-ids": [ 7, 8, 11, 14, 20 ] }
[ 7, 8, 11, 14, 20 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def test_readme_escaping() ->None: """Ensure the demo matches expected.""" assert main() == '<div>&lt;span&gt;Escaping&lt;/span&gt;</div>' <|reserved_special_token_1|> <|reserved_special_token_0|> from . import main def test_readme_escaping() ->None: """Ensure the demo matches expected.""" assert main() == '<div>&lt;span&gt;Escaping&lt;/span&gt;</div>' <|reserved_special_token_1|> """Test an example.""" from . import main def test_readme_escaping() -> None: """Ensure the demo matches expected.""" assert main() == "<div>&lt;span&gt;Escaping&lt;/span&gt;</div>"
flexible
{ "blob_id": "7b459aad399a31f61b8686e1919b38d5538924b8", "index": 2014, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef test_readme_escaping() ->None:\n \"\"\"Ensure the demo matches expected.\"\"\"\n assert main() == '<div>&lt;span&gt;Escaping&lt;/span&gt;</div>'\n", "step-3": "<mask token>\nfrom . import main\n\n\ndef test_readme_escaping() ->None:\n \"\"\"Ensure the demo matches expected.\"\"\"\n assert main() == '<div>&lt;span&gt;Escaping&lt;/span&gt;</div>'\n", "step-4": "\"\"\"Test an example.\"\"\"\nfrom . import main\n\n\ndef test_readme_escaping() -> None:\n \"\"\"Ensure the demo matches expected.\"\"\"\n assert main() == \"<div>&lt;span&gt;Escaping&lt;/span&gt;</div>\"\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> with open('ACI PostMan Variable Values.csv', encoding='utf-8-sig') as csvfile: reader = csv.DictReader(csvfile) for row in reader: print(row) print("Let's configure the subnets on the Old BD") print("First Let's log in") print('What is the ip address of the APIC?') <|reserved_special_token_0|> print('whats the name of the tenant?') <|reserved_special_token_0|> print('what is the name of the app profile?') <|reserved_special_token_0|> print('what is the name of the old BD?') <|reserved_special_token_0|> print('what is the name of the network?') <|reserved_special_token_0|> print('what is the name of the network IP?') <|reserved_special_token_0|> print('what is the name of the netmask?') <|reserved_special_token_0|> print('what is the name of the epg?') <|reserved_special_token_0|> print(response.text.encode('utf8')) <|reserved_special_token_0|> print(response.text.encode('utf8')) <|reserved_special_token_0|> print(response.text.encode('utf8')) <|reserved_special_token_1|> <|reserved_special_token_0|> with open('ACI PostMan Variable Values.csv', encoding='utf-8-sig') as csvfile: reader = csv.DictReader(csvfile) for row in reader: print(row) print("Let's configure the subnets on the Old BD") print("First Let's log in") print('What is the ip address of the APIC?') apic = input() user = getpass('What is you username?') password = getpass('What is your password?') print('whats the name of the tenant?') tenant = input() print('what is the name of the app profile?') app_profile = input() print('what is the name of the old BD?') old_bd = input() print('what is the name of the network?') subnet_network = input() print('what is the name of the network IP?') subnet_ip = input() print('what is the name of the netmask?') subnet_mask = input() print('what is the name of the epg?') epg = input() s = requests.session() url = 'https://%s/api/aaaLogin.json' % apic payload = ( '{\r\n\t"aaaUser":{\r\n\t\t"attributes":{\r\n\t\t\t"name": "%s",\r\n\t\t\t"pwd":"%s"\r\n\t\t}\r\n\t}\r\n}' % (user, password)) headers = {'Content-Type': 'application/json'} response = s.request('POST', url, headers=headers, data=payload, verify=False) print(response.text.encode('utf8')) url = 'https://%s/api/node/mo/uni/tn-%s/BD-%s/subnet-[%s.%s/%s].json' % (apic, tenant, old_bd, subnet_network, subnet_ip, subnet_mask) payload = ( '{"fvSubnet":{"attributes":{"dn":"uni/tn-%s/BD-%s/subnet-[%s.%s/%s]","ip":"%s.%s/%s","scope":"public","rn":"subnet-[%s.%s/%s]","status":"created"},"children":[]}}\r\n' % (tenant, old_bd, subnet_network, subnet_ip, subnet_mask, subnet_network, subnet_ip, subnet_mask, subnet_network, subnet_ip, subnet_mask)) headers = {'Content-Type': 'application/json'} response = s.request('POST', url, headers=headers, data=payload, verify=False) print(response.text.encode('utf8')) url = 'https://%s/api/node/mo/uni/tn-%s/ap-%s/epg-%s.json' % (apic, tenant, app_profile, epg) payload = ( '{"fvAEPg":{"attributes":{"dn":"uni/tn-%s/ap-%s/epg-%s","name":"%s","rn":"%s","status":"created"},"children":[{"fvRsBd":{"attributes":{"tnFvBDName":"%s","status":"created,modified"},"children":[]}}]}}\r\n' % (tenant, app_profile, epg, epg, epg, old_bd)) headers = {'Content-Type': 'application/json'} response = s.request('POST', url, headers=headers, data=payload, verify=False) print(response.text.encode('utf8')) <|reserved_special_token_1|> import requests from getpass import getpass import csv with open('ACI PostMan Variable Values.csv', encoding='utf-8-sig') as csvfile: reader = csv.DictReader(csvfile) for row in reader: print(row) print("Let's configure the subnets on the Old BD") print("First Let's log in") print('What is the ip address of the APIC?') apic = input() user = getpass('What is you username?') password = getpass('What is your password?') print('whats the name of the tenant?') tenant = input() print('what is the name of the app profile?') app_profile = input() print('what is the name of the old BD?') old_bd = input() print('what is the name of the network?') subnet_network = input() print('what is the name of the network IP?') subnet_ip = input() print('what is the name of the netmask?') subnet_mask = input() print('what is the name of the epg?') epg = input() s = requests.session() url = 'https://%s/api/aaaLogin.json' % apic payload = ( '{\r\n\t"aaaUser":{\r\n\t\t"attributes":{\r\n\t\t\t"name": "%s",\r\n\t\t\t"pwd":"%s"\r\n\t\t}\r\n\t}\r\n}' % (user, password)) headers = {'Content-Type': 'application/json'} response = s.request('POST', url, headers=headers, data=payload, verify=False) print(response.text.encode('utf8')) url = 'https://%s/api/node/mo/uni/tn-%s/BD-%s/subnet-[%s.%s/%s].json' % (apic, tenant, old_bd, subnet_network, subnet_ip, subnet_mask) payload = ( '{"fvSubnet":{"attributes":{"dn":"uni/tn-%s/BD-%s/subnet-[%s.%s/%s]","ip":"%s.%s/%s","scope":"public","rn":"subnet-[%s.%s/%s]","status":"created"},"children":[]}}\r\n' % (tenant, old_bd, subnet_network, subnet_ip, subnet_mask, subnet_network, subnet_ip, subnet_mask, subnet_network, subnet_ip, subnet_mask)) headers = {'Content-Type': 'application/json'} response = s.request('POST', url, headers=headers, data=payload, verify=False) print(response.text.encode('utf8')) url = 'https://%s/api/node/mo/uni/tn-%s/ap-%s/epg-%s.json' % (apic, tenant, app_profile, epg) payload = ( '{"fvAEPg":{"attributes":{"dn":"uni/tn-%s/ap-%s/epg-%s","name":"%s","rn":"%s","status":"created"},"children":[{"fvRsBd":{"attributes":{"tnFvBDName":"%s","status":"created,modified"},"children":[]}}]}}\r\n' % (tenant, app_profile, epg, epg, epg, old_bd)) headers = {'Content-Type': 'application/json'} response = s.request('POST', url, headers=headers, data=payload, verify=False) print(response.text.encode('utf8')) <|reserved_special_token_1|> #! /user/bin/env python import requests from getpass import getpass import csv # Set up the variables with open("ACI PostMan Variable Values.csv", encoding='utf-8-sig') as csvfile: reader = csv.DictReader(csvfile) for row in reader: print(row) print("Let's configure the subnets on the Old BD") print("First Let's log in") print('What is the ip address of the APIC?') apic = input() user = getpass('What is you username?') password = getpass('What is your password?') print('whats the name of the tenant?') tenant = input() print('what is the name of the app profile?') app_profile = input() print('what is the name of the old BD?') old_bd = input() print('what is the name of the network?') subnet_network = input() print('what is the name of the network IP?') subnet_ip = input() print('what is the name of the netmask?') subnet_mask = input() print('what is the name of the epg?') epg = input() # set session persistance for all the API calls s = requests.session() # first call to authenticate into the apic url = "https://%s/api/aaaLogin.json" % (apic) payload = "{\r\n\t\"aaaUser\":{\r\n\t\t\"attributes\":{\r\n\t\t\t\"name\": \"%s\",\r\n\t\t\t\"pwd\":\"%s\"\r\n\t\t}\r\n\t}\r\n}" % (user, password) headers = { 'Content-Type': 'application/json' } response = s.request("POST", url, headers=headers, data = payload, verify = False) print(response.text.encode('utf8')) # Create Subnets under Old BD url = "https://%s/api/node/mo/uni/tn-%s/BD-%s/subnet-[%s.%s/%s].json" % (apic, tenant, old_bd, subnet_network, subnet_ip, subnet_mask) payload = "{\"fvSubnet\":{\"attributes\":{\"dn\":\"uni/tn-%s/BD-%s/subnet-[%s.%s/%s]\",\"ip\":\"%s.%s/%s\",\"scope\":\"public\",\"rn\":\"subnet-[%s.%s/%s]\",\"status\":\"created\"},\"children\":[]}}\r\n" % (tenant, old_bd, subnet_network, subnet_ip, subnet_mask, subnet_network, subnet_ip, subnet_mask, subnet_network, subnet_ip, subnet_mask) headers = { 'Content-Type': 'application/json' } response = s.request("POST", url, headers=headers, data = payload, verify = False) print(response.text.encode('utf8')) # create EPG's for demo url = "https://%s/api/node/mo/uni/tn-%s/ap-%s/epg-%s.json" % (apic, tenant, app_profile, epg) payload = "{\"fvAEPg\":{\"attributes\":{\"dn\":\"uni/tn-%s/ap-%s/epg-%s\",\"name\":\"%s\",\"rn\":\"%s\",\"status\":\"created\"},\"children\":[{\"fvRsBd\":{\"attributes\":{\"tnFvBDName\":\"%s\",\"status\":\"created,modified\"},\"children\":[]}}]}}\r\n" % (tenant, app_profile, epg, epg, epg, old_bd) headers = { 'Content-Type': 'application/json' } response = s.request("POST", url, headers=headers, data = payload, verify = False) print(response.text.encode('utf8'))
flexible
{ "blob_id": "bdc9856bfc61127d6bca31658b1faf3da09f5b86", "index": 161, "step-1": "<mask token>\n", "step-2": "<mask token>\nwith open('ACI PostMan Variable Values.csv', encoding='utf-8-sig') as csvfile:\n reader = csv.DictReader(csvfile)\n for row in reader:\n print(row)\nprint(\"Let's configure the subnets on the Old BD\")\nprint(\"First Let's log in\")\nprint('What is the ip address of the APIC?')\n<mask token>\nprint('whats the name of the tenant?')\n<mask token>\nprint('what is the name of the app profile?')\n<mask token>\nprint('what is the name of the old BD?')\n<mask token>\nprint('what is the name of the network?')\n<mask token>\nprint('what is the name of the network IP?')\n<mask token>\nprint('what is the name of the netmask?')\n<mask token>\nprint('what is the name of the epg?')\n<mask token>\nprint(response.text.encode('utf8'))\n<mask token>\nprint(response.text.encode('utf8'))\n<mask token>\nprint(response.text.encode('utf8'))\n", "step-3": "<mask token>\nwith open('ACI PostMan Variable Values.csv', encoding='utf-8-sig') as csvfile:\n reader = csv.DictReader(csvfile)\n for row in reader:\n print(row)\nprint(\"Let's configure the subnets on the Old BD\")\nprint(\"First Let's log in\")\nprint('What is the ip address of the APIC?')\napic = input()\nuser = getpass('What is you username?')\npassword = getpass('What is your password?')\nprint('whats the name of the tenant?')\ntenant = input()\nprint('what is the name of the app profile?')\napp_profile = input()\nprint('what is the name of the old BD?')\nold_bd = input()\nprint('what is the name of the network?')\nsubnet_network = input()\nprint('what is the name of the network IP?')\nsubnet_ip = input()\nprint('what is the name of the netmask?')\nsubnet_mask = input()\nprint('what is the name of the epg?')\nepg = input()\ns = requests.session()\nurl = 'https://%s/api/aaaLogin.json' % apic\npayload = (\n '{\\r\\n\\t\"aaaUser\":{\\r\\n\\t\\t\"attributes\":{\\r\\n\\t\\t\\t\"name\": \"%s\",\\r\\n\\t\\t\\t\"pwd\":\"%s\"\\r\\n\\t\\t}\\r\\n\\t}\\r\\n}'\n % (user, password))\nheaders = {'Content-Type': 'application/json'}\nresponse = s.request('POST', url, headers=headers, data=payload, verify=False)\nprint(response.text.encode('utf8'))\nurl = 'https://%s/api/node/mo/uni/tn-%s/BD-%s/subnet-[%s.%s/%s].json' % (apic,\n tenant, old_bd, subnet_network, subnet_ip, subnet_mask)\npayload = (\n '{\"fvSubnet\":{\"attributes\":{\"dn\":\"uni/tn-%s/BD-%s/subnet-[%s.%s/%s]\",\"ip\":\"%s.%s/%s\",\"scope\":\"public\",\"rn\":\"subnet-[%s.%s/%s]\",\"status\":\"created\"},\"children\":[]}}\\r\\n'\n % (tenant, old_bd, subnet_network, subnet_ip, subnet_mask,\n subnet_network, subnet_ip, subnet_mask, subnet_network, subnet_ip,\n subnet_mask))\nheaders = {'Content-Type': 'application/json'}\nresponse = s.request('POST', url, headers=headers, data=payload, verify=False)\nprint(response.text.encode('utf8'))\nurl = 'https://%s/api/node/mo/uni/tn-%s/ap-%s/epg-%s.json' % (apic, tenant,\n app_profile, epg)\npayload = (\n '{\"fvAEPg\":{\"attributes\":{\"dn\":\"uni/tn-%s/ap-%s/epg-%s\",\"name\":\"%s\",\"rn\":\"%s\",\"status\":\"created\"},\"children\":[{\"fvRsBd\":{\"attributes\":{\"tnFvBDName\":\"%s\",\"status\":\"created,modified\"},\"children\":[]}}]}}\\r\\n'\n % (tenant, app_profile, epg, epg, epg, old_bd))\nheaders = {'Content-Type': 'application/json'}\nresponse = s.request('POST', url, headers=headers, data=payload, verify=False)\nprint(response.text.encode('utf8'))\n", "step-4": "import requests\nfrom getpass import getpass\nimport csv\nwith open('ACI PostMan Variable Values.csv', encoding='utf-8-sig') as csvfile:\n reader = csv.DictReader(csvfile)\n for row in reader:\n print(row)\nprint(\"Let's configure the subnets on the Old BD\")\nprint(\"First Let's log in\")\nprint('What is the ip address of the APIC?')\napic = input()\nuser = getpass('What is you username?')\npassword = getpass('What is your password?')\nprint('whats the name of the tenant?')\ntenant = input()\nprint('what is the name of the app profile?')\napp_profile = input()\nprint('what is the name of the old BD?')\nold_bd = input()\nprint('what is the name of the network?')\nsubnet_network = input()\nprint('what is the name of the network IP?')\nsubnet_ip = input()\nprint('what is the name of the netmask?')\nsubnet_mask = input()\nprint('what is the name of the epg?')\nepg = input()\ns = requests.session()\nurl = 'https://%s/api/aaaLogin.json' % apic\npayload = (\n '{\\r\\n\\t\"aaaUser\":{\\r\\n\\t\\t\"attributes\":{\\r\\n\\t\\t\\t\"name\": \"%s\",\\r\\n\\t\\t\\t\"pwd\":\"%s\"\\r\\n\\t\\t}\\r\\n\\t}\\r\\n}'\n % (user, password))\nheaders = {'Content-Type': 'application/json'}\nresponse = s.request('POST', url, headers=headers, data=payload, verify=False)\nprint(response.text.encode('utf8'))\nurl = 'https://%s/api/node/mo/uni/tn-%s/BD-%s/subnet-[%s.%s/%s].json' % (apic,\n tenant, old_bd, subnet_network, subnet_ip, subnet_mask)\npayload = (\n '{\"fvSubnet\":{\"attributes\":{\"dn\":\"uni/tn-%s/BD-%s/subnet-[%s.%s/%s]\",\"ip\":\"%s.%s/%s\",\"scope\":\"public\",\"rn\":\"subnet-[%s.%s/%s]\",\"status\":\"created\"},\"children\":[]}}\\r\\n'\n % (tenant, old_bd, subnet_network, subnet_ip, subnet_mask,\n subnet_network, subnet_ip, subnet_mask, subnet_network, subnet_ip,\n subnet_mask))\nheaders = {'Content-Type': 'application/json'}\nresponse = s.request('POST', url, headers=headers, data=payload, verify=False)\nprint(response.text.encode('utf8'))\nurl = 'https://%s/api/node/mo/uni/tn-%s/ap-%s/epg-%s.json' % (apic, tenant,\n app_profile, epg)\npayload = (\n '{\"fvAEPg\":{\"attributes\":{\"dn\":\"uni/tn-%s/ap-%s/epg-%s\",\"name\":\"%s\",\"rn\":\"%s\",\"status\":\"created\"},\"children\":[{\"fvRsBd\":{\"attributes\":{\"tnFvBDName\":\"%s\",\"status\":\"created,modified\"},\"children\":[]}}]}}\\r\\n'\n % (tenant, app_profile, epg, epg, epg, old_bd))\nheaders = {'Content-Type': 'application/json'}\nresponse = s.request('POST', url, headers=headers, data=payload, verify=False)\nprint(response.text.encode('utf8'))\n", "step-5": "#! /user/bin/env python\r\n\r\nimport requests\r\nfrom getpass import getpass\r\nimport csv\r\n\r\n# Set up the variables\r\n\r\nwith open(\"ACI PostMan Variable Values.csv\", encoding='utf-8-sig') as csvfile:\r\n reader = csv.DictReader(csvfile)\r\n for row in reader: \r\n print(row)\r\n\r\nprint(\"Let's configure the subnets on the Old BD\")\r\nprint(\"First Let's log in\")\r\nprint('What is the ip address of the APIC?')\r\napic = input()\r\nuser = getpass('What is you username?')\r\npassword = getpass('What is your password?')\r\nprint('whats the name of the tenant?')\r\ntenant = input()\r\nprint('what is the name of the app profile?')\r\napp_profile = input()\r\nprint('what is the name of the old BD?')\r\nold_bd = input()\r\nprint('what is the name of the network?')\r\nsubnet_network = input()\r\nprint('what is the name of the network IP?')\r\nsubnet_ip = input()\r\nprint('what is the name of the netmask?')\r\nsubnet_mask = input()\r\nprint('what is the name of the epg?')\r\nepg = input()\r\n\r\n\r\n# set session persistance for all the API calls\r\n\r\ns = requests.session()\r\n\r\n# first call to authenticate into the apic\r\n\r\nurl = \"https://%s/api/aaaLogin.json\" % (apic)\r\n\r\npayload = \"{\\r\\n\\t\\\"aaaUser\\\":{\\r\\n\\t\\t\\\"attributes\\\":{\\r\\n\\t\\t\\t\\\"name\\\": \\\"%s\\\",\\r\\n\\t\\t\\t\\\"pwd\\\":\\\"%s\\\"\\r\\n\\t\\t}\\r\\n\\t}\\r\\n}\" % (user, password)\r\nheaders = {\r\n 'Content-Type': 'application/json'\r\n}\r\n\r\nresponse = s.request(\"POST\", url, headers=headers, data = payload, verify = False)\r\n\r\nprint(response.text.encode('utf8'))\r\n\r\n# Create Subnets under Old BD\r\n\r\nurl = \"https://%s/api/node/mo/uni/tn-%s/BD-%s/subnet-[%s.%s/%s].json\" % (apic, tenant, old_bd, subnet_network, subnet_ip, subnet_mask)\r\n\r\npayload = \"{\\\"fvSubnet\\\":{\\\"attributes\\\":{\\\"dn\\\":\\\"uni/tn-%s/BD-%s/subnet-[%s.%s/%s]\\\",\\\"ip\\\":\\\"%s.%s/%s\\\",\\\"scope\\\":\\\"public\\\",\\\"rn\\\":\\\"subnet-[%s.%s/%s]\\\",\\\"status\\\":\\\"created\\\"},\\\"children\\\":[]}}\\r\\n\" % (tenant, old_bd, subnet_network, subnet_ip, subnet_mask, subnet_network, subnet_ip, subnet_mask, subnet_network, subnet_ip, subnet_mask)\r\nheaders = {\r\n 'Content-Type': 'application/json'\r\n}\r\n\r\nresponse = s.request(\"POST\", url, headers=headers, data = payload, verify = False)\r\n\r\nprint(response.text.encode('utf8'))\r\n\r\n# create EPG's for demo\r\n\r\nurl = \"https://%s/api/node/mo/uni/tn-%s/ap-%s/epg-%s.json\" % (apic, tenant, app_profile, epg)\r\n\r\npayload = \"{\\\"fvAEPg\\\":{\\\"attributes\\\":{\\\"dn\\\":\\\"uni/tn-%s/ap-%s/epg-%s\\\",\\\"name\\\":\\\"%s\\\",\\\"rn\\\":\\\"%s\\\",\\\"status\\\":\\\"created\\\"},\\\"children\\\":[{\\\"fvRsBd\\\":{\\\"attributes\\\":{\\\"tnFvBDName\\\":\\\"%s\\\",\\\"status\\\":\\\"created,modified\\\"},\\\"children\\\":[]}}]}}\\r\\n\" % (tenant, app_profile, epg, epg, epg, old_bd)\r\nheaders = {\r\n 'Content-Type': 'application/json'\r\n}\r\n\r\nresponse = s.request(\"POST\", url, headers=headers, data = payload, verify = False)\r\n\r\nprint(response.text.encode('utf8'))\r\n\r\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
from urllib import request from urllib import error from urllib.request import urlretrieve import os, re from bs4 import BeautifulSoup import configparser from apng2gif import apng2gif config = configparser.ConfigParser() config.read('crawler.config') # 下載儲存位置 directoryLocation = os.getcwd() + '\\img' # 設置要爬的頁面 urlList = config['lineStoreUrl']['url'].split(',') downLoadType = '貼圖' # 設置User-Agent headers = ("User_Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36 SE 2.X MetaSr 1.0") # 自定義opener opener = request.build_opener() opener.addheaders = [headers] request.install_opener(opener) def saveImg(imgurl, downLoadType): fileLocation = directoryLocation + "\\" + downLoadType + "\\" + title if not os.path.exists(fileLocation): os.makedirs(fileLocation) file = fileLocation + "\\" + str(count + 1) + ".png" urlretrieve(imgurl, filename=file) return file def getTitle(content): soup = BeautifulSoup(content, 'html.parser') title = soup.find('p', 'mdCMN38Item01Ttl').text return title def downloadImageList(imgurl): # if animationUrl download animation png ,else download imageurl animationUrl = imgurl[:-7] + '_animation@2x.png' try: file = saveImg(animationUrl, '動圖') apng2gif(file) except error.URLError as err: saveImg(imgurl, downLoadType) for i in range(0, len(urlList)): downLoadType = '貼圖' content = request.urlopen(urlList[i]).read().decode("utf-8", "ignore") rule = '(https.*sticker@2x\.png)' # 正則匹配 ruleEmoji = '(https.*/\d{3}\.png)' title = getTitle(content) title = re.sub('\s', '', title) title = re.sub('[\W_]+', '', title) print('開始下載 ' + title) imglist = re.compile(rule).findall(content) # 獲取圖片列表 if len(imglist) == 0: imglist = re.compile(ruleEmoji).findall(content) # 小表情規則 downLoadType = '小表情' for count in range(0, len(imglist)): imgurl = downloadImageList(imglist[count]) print('第', count + 1, '張下載完成!') print("已全部下載完成")
normal
{ "blob_id": "7bcdd6c5c6e41b076e476e1db35b663e34d74a67", "index": 1885, "step-1": "<mask token>\n\n\ndef saveImg(imgurl, downLoadType):\n fileLocation = directoryLocation + '\\\\' + downLoadType + '\\\\' + title\n if not os.path.exists(fileLocation):\n os.makedirs(fileLocation)\n file = fileLocation + '\\\\' + str(count + 1) + '.png'\n urlretrieve(imgurl, filename=file)\n return file\n\n\ndef getTitle(content):\n soup = BeautifulSoup(content, 'html.parser')\n title = soup.find('p', 'mdCMN38Item01Ttl').text\n return title\n\n\ndef downloadImageList(imgurl):\n animationUrl = imgurl[:-7] + '_animation@2x.png'\n try:\n file = saveImg(animationUrl, '動圖')\n apng2gif(file)\n except error.URLError as err:\n saveImg(imgurl, downLoadType)\n\n\n<mask token>\n", "step-2": "<mask token>\nconfig.read('crawler.config')\n<mask token>\nrequest.install_opener(opener)\n\n\ndef saveImg(imgurl, downLoadType):\n fileLocation = directoryLocation + '\\\\' + downLoadType + '\\\\' + title\n if not os.path.exists(fileLocation):\n os.makedirs(fileLocation)\n file = fileLocation + '\\\\' + str(count + 1) + '.png'\n urlretrieve(imgurl, filename=file)\n return file\n\n\ndef getTitle(content):\n soup = BeautifulSoup(content, 'html.parser')\n title = soup.find('p', 'mdCMN38Item01Ttl').text\n return title\n\n\ndef downloadImageList(imgurl):\n animationUrl = imgurl[:-7] + '_animation@2x.png'\n try:\n file = saveImg(animationUrl, '動圖')\n apng2gif(file)\n except error.URLError as err:\n saveImg(imgurl, downLoadType)\n\n\nfor i in range(0, len(urlList)):\n downLoadType = '貼圖'\n content = request.urlopen(urlList[i]).read().decode('utf-8', 'ignore')\n rule = '(https.*sticker@2x\\\\.png)'\n ruleEmoji = '(https.*/\\\\d{3}\\\\.png)'\n title = getTitle(content)\n title = re.sub('\\\\s', '', title)\n title = re.sub('[\\\\W_]+', '', title)\n print('開始下載 ' + title)\n imglist = re.compile(rule).findall(content)\n if len(imglist) == 0:\n imglist = re.compile(ruleEmoji).findall(content)\n downLoadType = '小表情'\n for count in range(0, len(imglist)):\n imgurl = downloadImageList(imglist[count])\n print('第', count + 1, '張下載完成!')\nprint('已全部下載完成')\n", "step-3": "<mask token>\nconfig = configparser.ConfigParser()\nconfig.read('crawler.config')\ndirectoryLocation = os.getcwd() + '\\\\img'\nurlList = config['lineStoreUrl']['url'].split(',')\ndownLoadType = '貼圖'\nheaders = ('User_Agent',\n 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36 SE 2.X MetaSr 1.0'\n )\nopener = request.build_opener()\nopener.addheaders = [headers]\nrequest.install_opener(opener)\n\n\ndef saveImg(imgurl, downLoadType):\n fileLocation = directoryLocation + '\\\\' + downLoadType + '\\\\' + title\n if not os.path.exists(fileLocation):\n os.makedirs(fileLocation)\n file = fileLocation + '\\\\' + str(count + 1) + '.png'\n urlretrieve(imgurl, filename=file)\n return file\n\n\ndef getTitle(content):\n soup = BeautifulSoup(content, 'html.parser')\n title = soup.find('p', 'mdCMN38Item01Ttl').text\n return title\n\n\ndef downloadImageList(imgurl):\n animationUrl = imgurl[:-7] + '_animation@2x.png'\n try:\n file = saveImg(animationUrl, '動圖')\n apng2gif(file)\n except error.URLError as err:\n saveImg(imgurl, downLoadType)\n\n\nfor i in range(0, len(urlList)):\n downLoadType = '貼圖'\n content = request.urlopen(urlList[i]).read().decode('utf-8', 'ignore')\n rule = '(https.*sticker@2x\\\\.png)'\n ruleEmoji = '(https.*/\\\\d{3}\\\\.png)'\n title = getTitle(content)\n title = re.sub('\\\\s', '', title)\n title = re.sub('[\\\\W_]+', '', title)\n print('開始下載 ' + title)\n imglist = re.compile(rule).findall(content)\n if len(imglist) == 0:\n imglist = re.compile(ruleEmoji).findall(content)\n downLoadType = '小表情'\n for count in range(0, len(imglist)):\n imgurl = downloadImageList(imglist[count])\n print('第', count + 1, '張下載完成!')\nprint('已全部下載完成')\n", "step-4": "from urllib import request\nfrom urllib import error\nfrom urllib.request import urlretrieve\nimport os, re\nfrom bs4 import BeautifulSoup\nimport configparser\nfrom apng2gif import apng2gif\nconfig = configparser.ConfigParser()\nconfig.read('crawler.config')\ndirectoryLocation = os.getcwd() + '\\\\img'\nurlList = config['lineStoreUrl']['url'].split(',')\ndownLoadType = '貼圖'\nheaders = ('User_Agent',\n 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36 SE 2.X MetaSr 1.0'\n )\nopener = request.build_opener()\nopener.addheaders = [headers]\nrequest.install_opener(opener)\n\n\ndef saveImg(imgurl, downLoadType):\n fileLocation = directoryLocation + '\\\\' + downLoadType + '\\\\' + title\n if not os.path.exists(fileLocation):\n os.makedirs(fileLocation)\n file = fileLocation + '\\\\' + str(count + 1) + '.png'\n urlretrieve(imgurl, filename=file)\n return file\n\n\ndef getTitle(content):\n soup = BeautifulSoup(content, 'html.parser')\n title = soup.find('p', 'mdCMN38Item01Ttl').text\n return title\n\n\ndef downloadImageList(imgurl):\n animationUrl = imgurl[:-7] + '_animation@2x.png'\n try:\n file = saveImg(animationUrl, '動圖')\n apng2gif(file)\n except error.URLError as err:\n saveImg(imgurl, downLoadType)\n\n\nfor i in range(0, len(urlList)):\n downLoadType = '貼圖'\n content = request.urlopen(urlList[i]).read().decode('utf-8', 'ignore')\n rule = '(https.*sticker@2x\\\\.png)'\n ruleEmoji = '(https.*/\\\\d{3}\\\\.png)'\n title = getTitle(content)\n title = re.sub('\\\\s', '', title)\n title = re.sub('[\\\\W_]+', '', title)\n print('開始下載 ' + title)\n imglist = re.compile(rule).findall(content)\n if len(imglist) == 0:\n imglist = re.compile(ruleEmoji).findall(content)\n downLoadType = '小表情'\n for count in range(0, len(imglist)):\n imgurl = downloadImageList(imglist[count])\n print('第', count + 1, '張下載完成!')\nprint('已全部下載完成')\n", "step-5": "from urllib import request\nfrom urllib import error\nfrom urllib.request import urlretrieve\nimport os, re\nfrom bs4 import BeautifulSoup\nimport configparser\nfrom apng2gif import apng2gif\n\nconfig = configparser.ConfigParser()\nconfig.read('crawler.config')\n# 下載儲存位置\ndirectoryLocation = os.getcwd() + '\\\\img'\n# 設置要爬的頁面\nurlList = config['lineStoreUrl']['url'].split(',')\ndownLoadType = '貼圖'\n\n# 設置User-Agent\nheaders = (\"User_Agent\",\n \"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36 SE 2.X MetaSr 1.0\")\n# 自定義opener\nopener = request.build_opener()\nopener.addheaders = [headers]\nrequest.install_opener(opener)\n\n\ndef saveImg(imgurl, downLoadType):\n fileLocation = directoryLocation + \"\\\\\" + downLoadType + \"\\\\\" + title\n if not os.path.exists(fileLocation):\n os.makedirs(fileLocation)\n file = fileLocation + \"\\\\\" + str(count + 1) + \".png\"\n urlretrieve(imgurl, filename=file)\n return file\n\n\ndef getTitle(content):\n soup = BeautifulSoup(content, 'html.parser')\n title = soup.find('p', 'mdCMN38Item01Ttl').text\n return title\n\n\ndef downloadImageList(imgurl):\n # if animationUrl download animation png ,else download imageurl\n animationUrl = imgurl[:-7] + '_animation@2x.png'\n try:\n file = saveImg(animationUrl, '動圖')\n apng2gif(file)\n except error.URLError as err:\n saveImg(imgurl, downLoadType)\n\n\nfor i in range(0, len(urlList)):\n downLoadType = '貼圖'\n content = request.urlopen(urlList[i]).read().decode(\"utf-8\", \"ignore\")\n rule = '(https.*sticker@2x\\.png)' # 正則匹配\n ruleEmoji = '(https.*/\\d{3}\\.png)'\n title = getTitle(content)\n title = re.sub('\\s', '', title)\n title = re.sub('[\\W_]+', '', title)\n\n print('開始下載 ' + title)\n imglist = re.compile(rule).findall(content) # 獲取圖片列表\n if len(imglist) == 0:\n imglist = re.compile(ruleEmoji).findall(content) # 小表情規則\n downLoadType = '小表情'\n for count in range(0, len(imglist)):\n imgurl = downloadImageList(imglist[count])\n\n print('第', count + 1, '張下載完成!')\nprint(\"已全部下載完成\")\n", "step-ids": [ 3, 4, 5, 6, 7 ] }
[ 3, 4, 5, 6, 7 ]
"""2520 is the smallest number that can be divided by each of the numbers from 1 to 10 without any remainder. What is the smallest positive number that is evenly divisible by all of the numbers from 1 to 20? """ from fractions import gcd def smallest_divisible(nmax=20): smallest = 1 for i in range(1, nmax+1): if smallest % i: smallest *= i/gcd(i, smallest) return smallest
normal
{ "blob_id": "1cc696410a5d2eaf294d032c04a96974d5ef5db0", "index": 2831, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef smallest_divisible(nmax=20):\n smallest = 1\n for i in range(1, nmax + 1):\n if smallest % i:\n smallest *= i / gcd(i, smallest)\n return smallest\n", "step-3": "<mask token>\nfrom fractions import gcd\n\n\ndef smallest_divisible(nmax=20):\n smallest = 1\n for i in range(1, nmax + 1):\n if smallest % i:\n smallest *= i / gcd(i, smallest)\n return smallest\n", "step-4": "\"\"\"2520 is the smallest number that can be divided by each of the\nnumbers from 1 to 10 without any remainder.\n\nWhat is the smallest positive number that is evenly divisible by all\nof the numbers from 1 to 20?\n\"\"\"\nfrom fractions import gcd\n\ndef smallest_divisible(nmax=20):\n smallest = 1\n for i in range(1, nmax+1):\n if smallest % i:\n smallest *= i/gcd(i, smallest)\n\n return smallest\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
from django.conf.urls import url from django.contrib.auth.views import login,logout from appPortas.views import * urlpatterns = [ url(r'^porta/list$', porta_list, name='porta_list'), url(r'^porta/detail/(?P<pk>\d+)$',porta_detail, name='porta_detail'), url(r'^porta/new/$', porta_new, name='porta_new'), url(r'^porta/update/(?P<pk>\d+)$',porta_update, name='porta_update'), url(r'^porta/delete/(?P<pk>\d+)$',porta_delete, name='porta_delete'), url(r'^porta/usuarios/(?P<pk>\d+)$', porta_delete, name='porta_delete'), url(r'^grupo/list$', grupo_list, name='grupo_list'), url(r'^grupo/detail/(?P<pk>\d+)$',grupo_detail, name='grupo_detail'), url(r'^grupo/new/$', grupo_new, name='grupo_new'), url(r'^grupo/update/(?P<pk>\d+)$',grupo_update, name='grupo_update'), url(r'^grupo/delete/(?P<pk>\d+)$',grupo_delete, name='grupo_delete'), url(r'^edit/grupo/$', edit_grupo, name='edit_grupo'), url(r'^usuario/acesso/grupo/(?P<pk>\d+)$', usuario_acesso_grupo, name='usuario_acesso_grupo'), url(r'^usuario/sem_acesso/grupo/(?P<pk>\d+)$', usuario_sem_acesso_grupo, name='usuario_sem_acesso_grupo'), url(r'^porta/no_grupo/(?P<pk>\d+)$', porta_no_grupo, name='porta_no_grupo'), url(r'^porta/nao_grupo/(?P<pk>\d+)$', porta_nao_grupo, name='porta_nao_grupo'), url(r'^portas/$', portas, name='portas'), url(r'^porta/busca/(?P<pk>\d+)$', busca_porta, name='busca_porta'), url(r'^busca/porta_frequencia/$', busca_porta_frequencia, name='busca_frequencia_porta'), url(r'^frequencia_porta_acesso/$', frequencia_porta_acesso, name='frequencia_porta_acesso'), url(r'^porta/frequencia_acesso/(?P<pk>\d+)$', porta_frequencias, name='porta_frequencias'), ]
normal
{ "blob_id": "5e355732f07029aa644617ac9b5e9ad50ee9397f", "index": 1161, "step-1": "<mask token>\n", "step-2": "<mask token>\nurlpatterns = [url('^porta/list$', porta_list, name='porta_list'), url(\n '^porta/detail/(?P<pk>\\\\d+)$', porta_detail, name='porta_detail'), url(\n '^porta/new/$', porta_new, name='porta_new'), url(\n '^porta/update/(?P<pk>\\\\d+)$', porta_update, name='porta_update'), url(\n '^porta/delete/(?P<pk>\\\\d+)$', porta_delete, name='porta_delete'), url(\n '^porta/usuarios/(?P<pk>\\\\d+)$', porta_delete, name='porta_delete'),\n url('^grupo/list$', grupo_list, name='grupo_list'), url(\n '^grupo/detail/(?P<pk>\\\\d+)$', grupo_detail, name='grupo_detail'), url(\n '^grupo/new/$', grupo_new, name='grupo_new'), url(\n '^grupo/update/(?P<pk>\\\\d+)$', grupo_update, name='grupo_update'), url(\n '^grupo/delete/(?P<pk>\\\\d+)$', grupo_delete, name='grupo_delete'), url(\n '^edit/grupo/$', edit_grupo, name='edit_grupo'), url(\n '^usuario/acesso/grupo/(?P<pk>\\\\d+)$', usuario_acesso_grupo, name=\n 'usuario_acesso_grupo'), url('^usuario/sem_acesso/grupo/(?P<pk>\\\\d+)$',\n usuario_sem_acesso_grupo, name='usuario_sem_acesso_grupo'), url(\n '^porta/no_grupo/(?P<pk>\\\\d+)$', porta_no_grupo, name='porta_no_grupo'),\n url('^porta/nao_grupo/(?P<pk>\\\\d+)$', porta_nao_grupo, name=\n 'porta_nao_grupo'), url('^portas/$', portas, name='portas'), url(\n '^porta/busca/(?P<pk>\\\\d+)$', busca_porta, name='busca_porta'), url(\n '^busca/porta_frequencia/$', busca_porta_frequencia, name=\n 'busca_frequencia_porta'), url('^frequencia_porta_acesso/$',\n frequencia_porta_acesso, name='frequencia_porta_acesso'), url(\n '^porta/frequencia_acesso/(?P<pk>\\\\d+)$', porta_frequencias, name=\n 'porta_frequencias')]\n", "step-3": "from django.conf.urls import url\nfrom django.contrib.auth.views import login, logout\nfrom appPortas.views import *\nurlpatterns = [url('^porta/list$', porta_list, name='porta_list'), url(\n '^porta/detail/(?P<pk>\\\\d+)$', porta_detail, name='porta_detail'), url(\n '^porta/new/$', porta_new, name='porta_new'), url(\n '^porta/update/(?P<pk>\\\\d+)$', porta_update, name='porta_update'), url(\n '^porta/delete/(?P<pk>\\\\d+)$', porta_delete, name='porta_delete'), url(\n '^porta/usuarios/(?P<pk>\\\\d+)$', porta_delete, name='porta_delete'),\n url('^grupo/list$', grupo_list, name='grupo_list'), url(\n '^grupo/detail/(?P<pk>\\\\d+)$', grupo_detail, name='grupo_detail'), url(\n '^grupo/new/$', grupo_new, name='grupo_new'), url(\n '^grupo/update/(?P<pk>\\\\d+)$', grupo_update, name='grupo_update'), url(\n '^grupo/delete/(?P<pk>\\\\d+)$', grupo_delete, name='grupo_delete'), url(\n '^edit/grupo/$', edit_grupo, name='edit_grupo'), url(\n '^usuario/acesso/grupo/(?P<pk>\\\\d+)$', usuario_acesso_grupo, name=\n 'usuario_acesso_grupo'), url('^usuario/sem_acesso/grupo/(?P<pk>\\\\d+)$',\n usuario_sem_acesso_grupo, name='usuario_sem_acesso_grupo'), url(\n '^porta/no_grupo/(?P<pk>\\\\d+)$', porta_no_grupo, name='porta_no_grupo'),\n url('^porta/nao_grupo/(?P<pk>\\\\d+)$', porta_nao_grupo, name=\n 'porta_nao_grupo'), url('^portas/$', portas, name='portas'), url(\n '^porta/busca/(?P<pk>\\\\d+)$', busca_porta, name='busca_porta'), url(\n '^busca/porta_frequencia/$', busca_porta_frequencia, name=\n 'busca_frequencia_porta'), url('^frequencia_porta_acesso/$',\n frequencia_porta_acesso, name='frequencia_porta_acesso'), url(\n '^porta/frequencia_acesso/(?P<pk>\\\\d+)$', porta_frequencias, name=\n 'porta_frequencias')]\n", "step-4": "from django.conf.urls import url\nfrom django.contrib.auth.views import login,logout\n\nfrom appPortas.views import *\n\nurlpatterns = [\n url(r'^porta/list$', porta_list, name='porta_list'),\n url(r'^porta/detail/(?P<pk>\\d+)$',porta_detail, name='porta_detail'),\n url(r'^porta/new/$', porta_new, name='porta_new'),\n url(r'^porta/update/(?P<pk>\\d+)$',porta_update, name='porta_update'),\n url(r'^porta/delete/(?P<pk>\\d+)$',porta_delete, name='porta_delete'),\n url(r'^porta/usuarios/(?P<pk>\\d+)$', porta_delete, name='porta_delete'),\n\n url(r'^grupo/list$', grupo_list, name='grupo_list'),\n url(r'^grupo/detail/(?P<pk>\\d+)$',grupo_detail, name='grupo_detail'),\n url(r'^grupo/new/$', grupo_new, name='grupo_new'),\n url(r'^grupo/update/(?P<pk>\\d+)$',grupo_update, name='grupo_update'),\n url(r'^grupo/delete/(?P<pk>\\d+)$',grupo_delete, name='grupo_delete'),\n\n url(r'^edit/grupo/$', edit_grupo, name='edit_grupo'),\n\n url(r'^usuario/acesso/grupo/(?P<pk>\\d+)$', usuario_acesso_grupo, name='usuario_acesso_grupo'),\n url(r'^usuario/sem_acesso/grupo/(?P<pk>\\d+)$', usuario_sem_acesso_grupo, name='usuario_sem_acesso_grupo'),\n\n url(r'^porta/no_grupo/(?P<pk>\\d+)$', porta_no_grupo, name='porta_no_grupo'),\n url(r'^porta/nao_grupo/(?P<pk>\\d+)$', porta_nao_grupo, name='porta_nao_grupo'),\n\n url(r'^portas/$', portas, name='portas'),\n url(r'^porta/busca/(?P<pk>\\d+)$', busca_porta, name='busca_porta'),\n url(r'^busca/porta_frequencia/$', busca_porta_frequencia, name='busca_frequencia_porta'),\n url(r'^frequencia_porta_acesso/$', frequencia_porta_acesso, name='frequencia_porta_acesso'),\n url(r'^porta/frequencia_acesso/(?P<pk>\\d+)$', porta_frequencias, name='porta_frequencias'),\n\n]\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
#!/usr/bin/python2 # # Author: Victor Ananjevsky, 2007 - 2010 # based on xdg-menu.py, written by Piotr Zielinski (http://www.cl.cam.ac.uk/~pz215/) # License: GPL # # This script takes names of menu files conforming to the XDG Desktop # Menu Specification, and outputs their FVWM equivalents to the # standard output. # # http://standards.freedesktop.org/menu-spec/latest/ # # Requirements: # pyxdg, pygtk, gnome-menus # # Syntax: # fvwm-xdg-menu.py [-d Menu] menufile1 menufile2 menufile3 ... # # Each menufile is an XDG menu description file. # Icons of menu entries cached in $XDG_CACHE_HOME/fvwm/icons/menu # # For menufile name `recent' will be generated menu of recently opened files # # -d mean not print headers for toplevel menu (useful in DynamicPopupAction) # # Example: # fvwm-xdg-menu.py /etc/xdg/menus/applications.menu # fvwm-xdg-menu.py applications # import sys import os from optparse import OptionParser import xdg.Menu from xdg.DesktopEntry import * from xdg.RecentFiles import * from xdg.BaseDirectory import xdg_config_dirs, xdg_cache_home import gtk # fix for correct output of unicode chars without terminal sys.stdout = codecs.getwriter('utf-8')(sys.stdout) def cache_icon (icon): ''' cache an icon ''' icon_file = "%s/%s.png" % (cache_path, os.path.basename(icon)) if os.path.exists(icon_file): return full_icon = "%s.png" % icon if os.path.exists(full_icon): gtk.gdk.pixbuf_new_from_file_at_size(full_icon, options.icon_size, options.icon_size).save(icon_file, 'png') return try: icon_theme.load_icon(icon, options.icon_size, gtk.ICON_LOOKUP_NO_SVG).save(icon_file, "png") except: pass def parse_menu (menu, fvwm_menu = None): ''' parse menu file ''' prefix = "+" if fvwm_menu == None: print '' print 'DestroyMenu "%s"' % menu print 'AddToMenu "%s"' % menu else: print 'DestroyMenu recreate %s' % fvwm_menu prefix = "AddToMenu %s" % fvwm_menu for entry in menu.getEntries(): if isinstance(entry, xdg.Menu.Menu): icon = entry.getIcon() print u'%s "%s%%menu/folder.png%%" Popup "%s"' % (prefix, entry.getName(), entry) elif isinstance(entry, xdg.Menu.MenuEntry): desktop = DesktopEntry(entry.DesktopEntry.getFileName()) icon = desktop.getIcon() ind = icon.rfind('.') if ind != -1: icon = icon[0:ind] cmd = desktop.getExec().rstrip('%FUfu') cache_icon(icon) print u'%s "%s%%menu/%s.png%%" Exec exec %s' % (prefix, desktop.getName(), os.path.basename(icon), cmd) else: pass for entry in menu.getEntries(): if isinstance(entry, xdg.Menu.Menu): parse_menu(entry) def parse_recent (fvwm_menu = None): ''' parse recently opened files ''' prefix = "+" if fvwm_menu == None: print '' print 'DestroyMenu "Recent"' print 'AddToMenu "Recent"' else: print 'DestroyMenu recreate %s' % fvwm_menu prefix="AddToMenu %s" % fvwm_menu rm = gtk.RecentManager() for rf in rm.get_items(): print '%s "%s" Exec exec xdg-open "%s"' % (prefix, rf.get_display_name(), rf.get_uri()) # Start cache_path = "%s/fvwm/menu" % xdg_cache_home icon_theme = gtk.icon_theme_get_default() if not os.path.exists(cache_path): os.makedirs(cache_path) # Parse commandline parser = OptionParser() parser.add_option("-d", "--dynamic", dest="fvwm_menu", default=None, help="Use in DynamicPopupAction", metavar="MENU") parser.add_option("-i", "--icons", dest="icon_size", default=16, help="Set icons size", metavar="SIZE") (options, args) = parser.parse_args() for arg in args: filename = "" if os.path.exists(arg) or arg == "recent": filename = arg else: tmpfile = "%s/menus/%s.menu" % (xdg_config_home, arg) if os.path.exists(tmpfile): filename = tmpfile else: for dir in xdg_config_dirs: tmpfile = "%s/menus/%s.menu" % (dir, arg) if os.path.exists(tmpfile): filename = tmpfile break if filename == "": continue elif filename == "recent": parse_recent (options.fvwm_menu) else: parse_menu(xdg.Menu.parse(filename), options.fvwm_menu)
normal
{ "blob_id": "214aadb7b3fc125da12f098bde87fce295349fdf", "index": 1917, "step-1": "#!/usr/bin/python2\n#\n# Author: Victor Ananjevsky, 2007 - 2010\n# based on xdg-menu.py, written by Piotr Zielinski (http://www.cl.cam.ac.uk/~pz215/)\n# License: GPL\n#\n# This script takes names of menu files conforming to the XDG Desktop\n# Menu Specification, and outputs their FVWM equivalents to the\n# standard output.\n#\n# http://standards.freedesktop.org/menu-spec/latest/\n#\n# Requirements:\n# pyxdg, pygtk, gnome-menus\n#\n# Syntax:\n# fvwm-xdg-menu.py [-d Menu] menufile1 menufile2 menufile3 ...\n#\n# Each menufile is an XDG menu description file.\n# Icons of menu entries cached in $XDG_CACHE_HOME/fvwm/icons/menu\n#\n# For menufile name `recent' will be generated menu of recently opened files\n#\n# -d mean not print headers for toplevel menu (useful in DynamicPopupAction)\n#\n# Example:\n# fvwm-xdg-menu.py /etc/xdg/menus/applications.menu\n# fvwm-xdg-menu.py applications\n#\n\n\nimport sys\nimport os\nfrom optparse import OptionParser\n\nimport xdg.Menu\nfrom xdg.DesktopEntry import *\nfrom xdg.RecentFiles import *\nfrom xdg.BaseDirectory import xdg_config_dirs, xdg_cache_home\n\nimport gtk\n\n# fix for correct output of unicode chars without terminal\nsys.stdout = codecs.getwriter('utf-8')(sys.stdout)\n\ndef cache_icon (icon):\n ''' cache an icon '''\n icon_file = \"%s/%s.png\" % (cache_path, os.path.basename(icon))\n if os.path.exists(icon_file):\n return\n full_icon = \"%s.png\" % icon\n if os.path.exists(full_icon):\n gtk.gdk.pixbuf_new_from_file_at_size(full_icon, options.icon_size, options.icon_size).save(icon_file, 'png')\n return\n try:\n icon_theme.load_icon(icon, options.icon_size, gtk.ICON_LOOKUP_NO_SVG).save(icon_file, \"png\")\n except:\n pass\n\ndef parse_menu (menu, fvwm_menu = None):\n ''' parse menu file '''\n prefix = \"+\"\n if fvwm_menu == None:\n print ''\n print 'DestroyMenu \"%s\"' % menu\n print 'AddToMenu \"%s\"' % menu\n else:\n print 'DestroyMenu recreate %s' % fvwm_menu\n prefix = \"AddToMenu %s\" % fvwm_menu\n\n for entry in menu.getEntries():\n\tif isinstance(entry, xdg.Menu.Menu):\n icon = entry.getIcon()\n print u'%s \"%s%%menu/folder.png%%\" Popup \"%s\"' % (prefix, entry.getName(), entry)\n\telif isinstance(entry, xdg.Menu.MenuEntry):\n desktop = DesktopEntry(entry.DesktopEntry.getFileName())\n icon = desktop.getIcon()\n ind = icon.rfind('.')\n if ind != -1:\n icon = icon[0:ind]\n cmd = desktop.getExec().rstrip('%FUfu')\n cache_icon(icon)\n print u'%s \"%s%%menu/%s.png%%\" Exec exec %s' % (prefix, desktop.getName(), os.path.basename(icon), cmd)\n\telse:\n\t pass\n\n for entry in menu.getEntries():\n\tif isinstance(entry, xdg.Menu.Menu):\n\t parse_menu(entry)\n\ndef parse_recent (fvwm_menu = None):\n ''' parse recently opened files '''\n prefix = \"+\"\n if fvwm_menu == None:\n print ''\n print 'DestroyMenu \"Recent\"'\n print 'AddToMenu \"Recent\"'\n else:\n print 'DestroyMenu recreate %s' % fvwm_menu\n prefix=\"AddToMenu %s\" % fvwm_menu\n \n rm = gtk.RecentManager()\n for rf in rm.get_items():\n print '%s \"%s\" Exec exec xdg-open \"%s\"' % (prefix, rf.get_display_name(), rf.get_uri())\n\n# Start\n\ncache_path = \"%s/fvwm/menu\" % xdg_cache_home\nicon_theme = gtk.icon_theme_get_default()\n\nif not os.path.exists(cache_path):\n os.makedirs(cache_path)\n\n# Parse commandline\n\nparser = OptionParser()\nparser.add_option(\"-d\", \"--dynamic\", dest=\"fvwm_menu\", default=None, help=\"Use in DynamicPopupAction\", metavar=\"MENU\")\nparser.add_option(\"-i\", \"--icons\", dest=\"icon_size\", default=16, help=\"Set icons size\", metavar=\"SIZE\")\n(options, args) = parser.parse_args()\n\nfor arg in args:\n filename = \"\"\n if os.path.exists(arg) or arg == \"recent\":\n filename = arg\n else:\n tmpfile = \"%s/menus/%s.menu\" % (xdg_config_home, arg)\n if os.path.exists(tmpfile):\n filename = tmpfile\n else:\n for dir in xdg_config_dirs:\n tmpfile = \"%s/menus/%s.menu\" % (dir, arg)\n if os.path.exists(tmpfile):\n filename = tmpfile\n break\n if filename == \"\":\n continue\n elif filename == \"recent\":\n parse_recent (options.fvwm_menu)\n else:\n parse_menu(xdg.Menu.parse(filename), options.fvwm_menu)\n", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
# encoding=utf-8 from lib.calculate_time import tic,toc import scipy as sp import numpy as np from lib.make_A import make_A from lib.make_distance import make_distance from lib.lambda_sum_smallest import lambda_sum_smallest from lib.fiedler import fiedler from lib.make_al import make_al import math from lib.newmatrix import newmatrix from lib.grComp import gr_comp from lib.Divide2 import Divide2 def mainFunctionD2( Ad, vertex_num, edge_num, nodes_G, iter_times, group): s = [] s.append(vertex_num) if (vertex_num == 3 or edge_num < 5 or iter_times > 4 ): print "something strange in mainfuntiond2" return iter=1 tic() #the transposed matrix of Adjacent matrix #邻接矩阵补全下三角! size_of_Ad = Ad.shape transposed_Ad = np.transpose(Ad) for i in range(size_of_Ad[0]): for j in range(size_of_Ad[1]): Ad[i][j] = Ad[i][j] or transposed_Ad[i][j] #得出A 有1和-1的34行78列的矩阵 A = make_A(Ad, vertex_num, edge_num) transposed_A = np.transpose(A) #列求和得出度矩阵B 34行一列 看成一个列表就行 B = sum(Ad) #构造一个78*5的距离矩阵 Distance = make_distance(Ad, A, vertex_num, edge_num, B) #变量POS记录Distance中第五行中最大值所在的位置 max_list = [] for each in Distance[:,4]: max_list.append(each) Pos = max_list.index(max(max_list)) + 1 #把度矩阵展开成全矩阵,并且构造拉普拉斯矩阵 D = np.diag(B) L = np.dot(A, transposed_A) W = Ad L1 = D - W cutIndexSign = 0 #构造x为L的升序特征值矩阵 eig_val,eig_vec = np.linalg.eig(L) eig_val_list = [] for each in eig_val: eig_val_list.append(each) eig_val_list = sorted(eig_val_list) x = np.array(eig_val_list) x = np.diag(x) #构造Q得L的正交规范化矩阵(求矩阵正交基) Q = sp.linalg.orth(L) #求L的费德勒向量:第二小特征值的特征向量 v = fiedler(L) #找特征向量的特征值 lambda2 = lambda_sum_smallest(L,2) print "ECCEM" print "切割第"+str(iter)+"次" #t为算法运行的时间,写入time中 t=toc() with open("/home/a/PycharmProjects/TestZhu/tjufe_1/Social_Network_Graph/output_data/time.txt","a") as f: f.write(str(t)+"\n") f.close() #求第三小的lambda lambda3 = lambda_sum_smallest(L,3)-lambda2 aa = (v[int(Distance[Pos - 1][0])-1] - v[int(Distance[Pos - 1][1])-1]) ** 2 b1 = 1 + (2 - aa) / (lambda2 - lambda3) low = lambda2 - aa / b1 #矩阵U是Q的转置和al的积 al = make_al(vertex_num,Distance[Pos-1][0],Distance[Pos-1][1]) transposed_Q = np.transpose(Q) u = np.dot(transposed_Q,al) with open("/home/a/PycharmProjects/TestZhu/tjufe_1/Social_Network_Graph/output_data/out.txt","a") as f: f.write(str(lambda2)+"\n") f.close() while(lambda2>math.exp(-23)): cutIndexSigen = 1 if( vertex_num == 1 or edge_num < 3): break #将矩阵中的信息A,edge_num,B进行刷新 result_list = newmatrix(Distance, A, edge_num, B, Pos) A = result_list[0] edge_num = result_list[1] B = result_list[2] Distance = make_distance(Ad, A, vertex_num, edge_num, B) max_list = [] for each in Distance[:,4]: max_list.append(each) Pos = max_list.index(max(max_list)) + 1 iter = iter + 1 print "切割第" + str(iter) + "次" D = np.diag(B) transposed_A = np.transpose(A) L = np.dot(A, transposed_A) v = fiedler(L) #有结点取为零直接跳出循环 list_B = [] for each in B: list_B.append(each) if(0 in list_B): print "Distance_size[0]有节点度为0的孤立节点跳出了循环" break lambda2 = lambda_sum_smallest(L, 2) #写一次时间 t=toc() with open("/home/a/PycharmProjects/TestZhu/tjufe_1/Social_Network_Graph/output_data/time.txt","a") as f: f.write(str(t) + "\n") f.close() lambda3 = lambda_sum_smallest(L,3)-lambda2 a1 = (v[int(Distance[Pos - 1][0])-1] - v[int(Distance[Pos - 1][1])-1]) ** 2 b1 = 1 + (2 - a1) / (lambda2 - lambda3) low = lambda2 - a1 / b1 with open("/home/a/PycharmProjects/TestZhu/tjufe_1/Social_Network_Graph/output_data/out.txt","a") as f: f.write(str(lambda2) + "\n") f.close() #构造comMatrix 就是Distance的前两行 Distance_size = Distance.shape compMatrix = np.arange(Distance_size[0]*2).reshape(Distance_size[0],2) i = 0 for each in Distance[:,0]: compMatrix[i][0] = each i = i + 1 j = 0 for each in Distance[:,1]: compMatrix[j][1] = each j = j + 1 ncV = gr_comp(compMatrix,vertex_num) s.append(group) s.append(iter_times) with open("/home/a/PycharmProjects/TestZhu/tjufe_1/Social_Network_Graph/output_group/out.txt","a") as f: f.write(str(s)+"\n") f.closed nodes_G = np.transpose(nodes_G) result_list_of_Divide2 = D
normal
{ "blob_id": "77d545d1a4fc5f96ae19f654a32ab75707434d46", "index": 7614, "step-1": "# encoding=utf-8\nfrom lib.calculate_time import tic,toc\nimport scipy as sp\nimport numpy as np\nfrom lib.make_A import make_A\nfrom lib.make_distance import make_distance\nfrom lib.lambda_sum_smallest import lambda_sum_smallest\nfrom lib.fiedler import fiedler\nfrom lib.make_al import make_al\nimport math\nfrom lib.newmatrix import newmatrix\nfrom lib.grComp import gr_comp\nfrom lib.Divide2 import Divide2\n\ndef mainFunctionD2( Ad, vertex_num, edge_num, nodes_G, iter_times, group):\n s = []\n s.append(vertex_num)\n if (vertex_num == 3 or edge_num < 5 or iter_times > 4 ):\n print \"something strange in mainfuntiond2\"\n return\n iter=1\n tic()\n #the transposed matrix of Adjacent matrix\n #邻接矩阵补全下三角!\n size_of_Ad = Ad.shape\n transposed_Ad = np.transpose(Ad)\n for i in range(size_of_Ad[0]):\n for j in range(size_of_Ad[1]):\n Ad[i][j] = Ad[i][j] or transposed_Ad[i][j]\n #得出A 有1和-1的34行78列的矩阵\n A = make_A(Ad, vertex_num, edge_num)\n transposed_A = np.transpose(A)\n #列求和得出度矩阵B 34行一列 看成一个列表就行\n B = sum(Ad)\n #构造一个78*5的距离矩阵\n Distance = make_distance(Ad, A, vertex_num, edge_num, B)\n #变量POS记录Distance中第五行中最大值所在的位置\n max_list = []\n for each in Distance[:,4]:\n max_list.append(each)\n Pos = max_list.index(max(max_list)) + 1\n #把度矩阵展开成全矩阵,并且构造拉普拉斯矩阵\n D = np.diag(B)\n L = np.dot(A, transposed_A)\n W = Ad\n L1 = D - W\n cutIndexSign = 0\n #构造x为L的升序特征值矩阵\n eig_val,eig_vec = np.linalg.eig(L)\n eig_val_list = []\n for each in eig_val:\n eig_val_list.append(each)\n eig_val_list = sorted(eig_val_list)\n x = np.array(eig_val_list)\n x = np.diag(x)\n #构造Q得L的正交规范化矩阵(求矩阵正交基)\n Q = sp.linalg.orth(L)\n #求L的费德勒向量:第二小特征值的特征向量\n v = fiedler(L)\n #找特征向量的特征值\n lambda2 = lambda_sum_smallest(L,2)\n print \"ECCEM\"\n print \"切割第\"+str(iter)+\"次\"\n #t为算法运行的时间,写入time中\n t=toc()\n with open(\"/home/a/PycharmProjects/TestZhu/tjufe_1/Social_Network_Graph/output_data/time.txt\",\"a\") as f:\n f.write(str(t)+\"\\n\")\n f.close()\n #求第三小的lambda\n lambda3 = lambda_sum_smallest(L,3)-lambda2\n aa = (v[int(Distance[Pos - 1][0])-1] - v[int(Distance[Pos - 1][1])-1]) ** 2\n\n b1 = 1 + (2 - aa) / (lambda2 - lambda3)\n low = lambda2 - aa / b1\n #矩阵U是Q的转置和al的积\n al = make_al(vertex_num,Distance[Pos-1][0],Distance[Pos-1][1])\n transposed_Q = np.transpose(Q)\n u = np.dot(transposed_Q,al)\n with open(\"/home/a/PycharmProjects/TestZhu/tjufe_1/Social_Network_Graph/output_data/out.txt\",\"a\") as f:\n f.write(str(lambda2)+\"\\n\")\n f.close()\n\n while(lambda2>math.exp(-23)):\n cutIndexSigen = 1\n if( vertex_num == 1 or edge_num < 3):\n break\n #将矩阵中的信息A,edge_num,B进行刷新\n\n result_list = newmatrix(Distance, A, edge_num, B, Pos)\n A = result_list[0]\n edge_num = result_list[1]\n B = result_list[2]\n Distance = make_distance(Ad, A, vertex_num, edge_num, B)\n max_list = []\n for each in Distance[:,4]:\n max_list.append(each)\n Pos = max_list.index(max(max_list)) + 1\n iter = iter + 1\n print \"切割第\" + str(iter) + \"次\"\n D = np.diag(B)\n transposed_A = np.transpose(A)\n L = np.dot(A, transposed_A)\n v = fiedler(L)\n\n #有结点取为零直接跳出循环\n list_B = []\n for each in B:\n list_B.append(each)\n if(0 in list_B):\n print \"Distance_size[0]有节点度为0的孤立节点跳出了循环\"\n break\n lambda2 = lambda_sum_smallest(L, 2)\n #写一次时间\n t=toc()\n\n with open(\"/home/a/PycharmProjects/TestZhu/tjufe_1/Social_Network_Graph/output_data/time.txt\",\"a\") as f:\n f.write(str(t) + \"\\n\")\n f.close()\n lambda3 = lambda_sum_smallest(L,3)-lambda2\n a1 = (v[int(Distance[Pos - 1][0])-1] - v[int(Distance[Pos - 1][1])-1]) ** 2\n b1 = 1 + (2 - a1) / (lambda2 - lambda3)\n low = lambda2 - a1 / b1\n with open(\"/home/a/PycharmProjects/TestZhu/tjufe_1/Social_Network_Graph/output_data/out.txt\",\"a\") as f:\n f.write(str(lambda2) + \"\\n\")\n f.close()\n #构造comMatrix 就是Distance的前两行\n Distance_size = Distance.shape\n compMatrix = np.arange(Distance_size[0]*2).reshape(Distance_size[0],2)\n i = 0\n for each in Distance[:,0]:\n compMatrix[i][0] = each\n i = i + 1\n j = 0\n for each in Distance[:,1]:\n compMatrix[j][1] = each\n j = j + 1\n ncV = gr_comp(compMatrix,vertex_num)\n s.append(group)\n s.append(iter_times)\n with open(\"/home/a/PycharmProjects/TestZhu/tjufe_1/Social_Network_Graph/output_group/out.txt\",\"a\") as f:\n f.write(str(s)+\"\\n\")\n f.closed\n nodes_G = np.transpose(nodes_G)\n\n\n\n\n\n\n\n\n result_list_of_Divide2 = D\n\n\n\n\n\n\n\n\n\n\n\n", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
<|reserved_special_token_0|> class ConfigurationContactForm(forms.ModelForm): class Meta: model = ConfigurationContact <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def clean_phone_number_external(self): phone_number_external = self.cleaned_data['phone_number_external'] if phone_number_external: phone_number_external = mobile_number_validation( phone_number_external) if not phone_number_external: raise forms.ValidationError(_('Enter a valid contact number')) return phone_number_external <|reserved_special_token_0|> def clean_phone_number_internal(self): phone_number_internal = self.cleaned_data['phone_number_internal'] if phone_number_internal: phone_number_internal = mobile_number_validation( phone_number_internal) if not phone_number_internal: raise forms.ValidationError(_('Enter a valid contact number')) return phone_number_internal class ConfigurationLogoForm(forms.ModelForm): class Meta: model = ConfigurationLogo def __init__(self, *args, **kwargs): super(ConfigurationLogoForm, self).__init__(*args, **kwargs) if 'instance' in kwargs: self.id = kwargs['instance'].id else: self.id = '' class ConfigurationHomepageForm(forms.ModelForm): class Meta: model = ConfigurationHomepage def __init__(self, *args, **kwargs): super(ConfigurationHomepageForm, self).__init__(*args, **kwargs) self.fields['company'].widget.attrs['class'] = 'form-dropdownfield' self.fields['header'].widget.attrs['class'] = 'form-text' self.fields['introduction'].widget.attrs['class'] = 'form-textarea' if 'instance' in kwargs: self.id = kwargs['instance'].id else: self.id = '' def clean_header(self): header = self.cleaned_data['header'] if header: if len(header) < 3: raise forms.ValidationError(_('Enter minimum 3 characters.')) elif re.match('^[\\s]*$', header): raise forms.ValidationError(_('Enter a valid name.')) return header def clean_introduction(self): introduction = self.cleaned_data['introduction'] if introduction: if len(introduction) < 10: raise forms.ValidationError(_('Enter minimum 10 characters.')) elif re.match('^[\\s]*$', introduction): raise forms.ValidationError(_('Enter a valid address.')) return introduction class ConfigurationLocationForm(forms.ModelForm): class Meta: model = ConfigurationLocation def __init__(self, *args, **kwargs): super(ConfigurationLocationForm, self).__init__(*args, **kwargs) self.fields['company'].widget.attrs['class'] = 'form-dropdownfield' self.fields['country'].widget.attrs['class'] = 'form-dropdownfield' self.fields['continent'].widget.attrs['class'] = 'form-dropdownfield' if 'instance' in kwargs: self.id = kwargs['instance'].id else: self.id = '' <|reserved_special_token_1|> <|reserved_special_token_0|> class ConfigurationContactForm(forms.ModelForm): class Meta: model = ConfigurationContact <|reserved_special_token_0|> <|reserved_special_token_0|> def clean_name_of_institution(self): name_of_institution = self.cleaned_data['name_of_institution'] if name_of_institution: if len(name_of_institution) < 3: raise forms.ValidationError(_('Enter minimum 3 characters.')) elif re.match('^[\\s]*$', name_of_institution): raise forms.ValidationError(_('Enter a valid name.')) return name_of_institution def clean_country_code_external(self): country_code_external = self.cleaned_data['country_code_external'] if country_code_external: if len(str(country_code_external)) > 5: raise forms.ValidationError(_('maximum 5 characters.')) return country_code_external def clean_phone_number_external(self): phone_number_external = self.cleaned_data['phone_number_external'] if phone_number_external: phone_number_external = mobile_number_validation( phone_number_external) if not phone_number_external: raise forms.ValidationError(_('Enter a valid contact number')) return phone_number_external <|reserved_special_token_0|> def clean_phone_number_internal(self): phone_number_internal = self.cleaned_data['phone_number_internal'] if phone_number_internal: phone_number_internal = mobile_number_validation( phone_number_internal) if not phone_number_internal: raise forms.ValidationError(_('Enter a valid contact number')) return phone_number_internal class ConfigurationLogoForm(forms.ModelForm): class Meta: model = ConfigurationLogo def __init__(self, *args, **kwargs): super(ConfigurationLogoForm, self).__init__(*args, **kwargs) if 'instance' in kwargs: self.id = kwargs['instance'].id else: self.id = '' class ConfigurationHomepageForm(forms.ModelForm): class Meta: model = ConfigurationHomepage def __init__(self, *args, **kwargs): super(ConfigurationHomepageForm, self).__init__(*args, **kwargs) self.fields['company'].widget.attrs['class'] = 'form-dropdownfield' self.fields['header'].widget.attrs['class'] = 'form-text' self.fields['introduction'].widget.attrs['class'] = 'form-textarea' if 'instance' in kwargs: self.id = kwargs['instance'].id else: self.id = '' def clean_header(self): header = self.cleaned_data['header'] if header: if len(header) < 3: raise forms.ValidationError(_('Enter minimum 3 characters.')) elif re.match('^[\\s]*$', header): raise forms.ValidationError(_('Enter a valid name.')) return header def clean_introduction(self): introduction = self.cleaned_data['introduction'] if introduction: if len(introduction) < 10: raise forms.ValidationError(_('Enter minimum 10 characters.')) elif re.match('^[\\s]*$', introduction): raise forms.ValidationError(_('Enter a valid address.')) return introduction class ConfigurationLocationForm(forms.ModelForm): class Meta: model = ConfigurationLocation def __init__(self, *args, **kwargs): super(ConfigurationLocationForm, self).__init__(*args, **kwargs) self.fields['company'].widget.attrs['class'] = 'form-dropdownfield' self.fields['country'].widget.attrs['class'] = 'form-dropdownfield' self.fields['continent'].widget.attrs['class'] = 'form-dropdownfield' if 'instance' in kwargs: self.id = kwargs['instance'].id else: self.id = '' <|reserved_special_token_1|> <|reserved_special_token_0|> class ConfigurationContactForm(forms.ModelForm): class Meta: model = ConfigurationContact <|reserved_special_token_0|> def clean(self): phone_number_external = self.cleaned_data.get('phone_number_external') country_code_external = self.cleaned_data.get('country_code_external') phone_number_internal = self.cleaned_data.get('phone_number_internal') country_code_internal = self.cleaned_data.get('country_code_internal') if phone_number_external and not country_code_external: raise forms.ValidationError(_( 'External Country code Field is required .')) if country_code_external and not phone_number_external: raise forms.ValidationError(_( 'External Phone Number Field is required .')) if phone_number_internal and not country_code_internal: raise forms.ValidationError(_( 'Internal Country code Field is required .')) if country_code_internal and not phone_number_internal: raise forms.ValidationError(_( 'Internal Phone Number Field is required .')) return self.cleaned_data def clean_name_of_institution(self): name_of_institution = self.cleaned_data['name_of_institution'] if name_of_institution: if len(name_of_institution) < 3: raise forms.ValidationError(_('Enter minimum 3 characters.')) elif re.match('^[\\s]*$', name_of_institution): raise forms.ValidationError(_('Enter a valid name.')) return name_of_institution def clean_country_code_external(self): country_code_external = self.cleaned_data['country_code_external'] if country_code_external: if len(str(country_code_external)) > 5: raise forms.ValidationError(_('maximum 5 characters.')) return country_code_external def clean_phone_number_external(self): phone_number_external = self.cleaned_data['phone_number_external'] if phone_number_external: phone_number_external = mobile_number_validation( phone_number_external) if not phone_number_external: raise forms.ValidationError(_('Enter a valid contact number')) return phone_number_external def clean_country_code_internal(self): country_code_internal = self.cleaned_data['country_code_internal'] if country_code_internal: if len(str(country_code_internal)) > 5: raise forms.ValidationError(_('maximum 5 characters.')) return country_code_internal def clean_phone_number_internal(self): phone_number_internal = self.cleaned_data['phone_number_internal'] if phone_number_internal: phone_number_internal = mobile_number_validation( phone_number_internal) if not phone_number_internal: raise forms.ValidationError(_('Enter a valid contact number')) return phone_number_internal class ConfigurationLogoForm(forms.ModelForm): class Meta: model = ConfigurationLogo def __init__(self, *args, **kwargs): super(ConfigurationLogoForm, self).__init__(*args, **kwargs) if 'instance' in kwargs: self.id = kwargs['instance'].id else: self.id = '' class ConfigurationHomepageForm(forms.ModelForm): class Meta: model = ConfigurationHomepage def __init__(self, *args, **kwargs): super(ConfigurationHomepageForm, self).__init__(*args, **kwargs) self.fields['company'].widget.attrs['class'] = 'form-dropdownfield' self.fields['header'].widget.attrs['class'] = 'form-text' self.fields['introduction'].widget.attrs['class'] = 'form-textarea' if 'instance' in kwargs: self.id = kwargs['instance'].id else: self.id = '' def clean_header(self): header = self.cleaned_data['header'] if header: if len(header) < 3: raise forms.ValidationError(_('Enter minimum 3 characters.')) elif re.match('^[\\s]*$', header): raise forms.ValidationError(_('Enter a valid name.')) return header def clean_introduction(self): introduction = self.cleaned_data['introduction'] if introduction: if len(introduction) < 10: raise forms.ValidationError(_('Enter minimum 10 characters.')) elif re.match('^[\\s]*$', introduction): raise forms.ValidationError(_('Enter a valid address.')) return introduction class ConfigurationLocationForm(forms.ModelForm): class Meta: model = ConfigurationLocation def __init__(self, *args, **kwargs): super(ConfigurationLocationForm, self).__init__(*args, **kwargs) self.fields['company'].widget.attrs['class'] = 'form-dropdownfield' self.fields['country'].widget.attrs['class'] = 'form-dropdownfield' self.fields['continent'].widget.attrs['class'] = 'form-dropdownfield' if 'instance' in kwargs: self.id = kwargs['instance'].id else: self.id = '' <|reserved_special_token_1|> <|reserved_special_token_0|> class ConfigurationContactForm(forms.ModelForm): class Meta: model = ConfigurationContact def __init__(self, *args, **kwargs): super(ConfigurationContactForm, self).__init__(*args, **kwargs) self.fields['company'].widget.attrs['class'] = 'form-dropdownfield' self.fields['name_of_institution'].widget.attrs['class'] = 'form-text' self.fields['email_external'].widget.attrs['class'] = 'form-text' self.fields['country_code_external'].widget.attrs['class' ] = 'form-text-small' self.fields['phone_number_external'].widget.attrs['class' ] = 'form-text-phone' self.fields['email_internal'].widget.attrs['class'] = 'form-text' self.fields['country_code_internal'].widget.attrs['class' ] = 'form-text-small' self.fields['phone_number_internal'].widget.attrs['class' ] = 'form-text-phone' if 'instance' in kwargs: self.id = kwargs['instance'].id else: self.id = '' def clean(self): phone_number_external = self.cleaned_data.get('phone_number_external') country_code_external = self.cleaned_data.get('country_code_external') phone_number_internal = self.cleaned_data.get('phone_number_internal') country_code_internal = self.cleaned_data.get('country_code_internal') if phone_number_external and not country_code_external: raise forms.ValidationError(_( 'External Country code Field is required .')) if country_code_external and not phone_number_external: raise forms.ValidationError(_( 'External Phone Number Field is required .')) if phone_number_internal and not country_code_internal: raise forms.ValidationError(_( 'Internal Country code Field is required .')) if country_code_internal and not phone_number_internal: raise forms.ValidationError(_( 'Internal Phone Number Field is required .')) return self.cleaned_data def clean_name_of_institution(self): name_of_institution = self.cleaned_data['name_of_institution'] if name_of_institution: if len(name_of_institution) < 3: raise forms.ValidationError(_('Enter minimum 3 characters.')) elif re.match('^[\\s]*$', name_of_institution): raise forms.ValidationError(_('Enter a valid name.')) return name_of_institution def clean_country_code_external(self): country_code_external = self.cleaned_data['country_code_external'] if country_code_external: if len(str(country_code_external)) > 5: raise forms.ValidationError(_('maximum 5 characters.')) return country_code_external def clean_phone_number_external(self): phone_number_external = self.cleaned_data['phone_number_external'] if phone_number_external: phone_number_external = mobile_number_validation( phone_number_external) if not phone_number_external: raise forms.ValidationError(_('Enter a valid contact number')) return phone_number_external def clean_country_code_internal(self): country_code_internal = self.cleaned_data['country_code_internal'] if country_code_internal: if len(str(country_code_internal)) > 5: raise forms.ValidationError(_('maximum 5 characters.')) return country_code_internal def clean_phone_number_internal(self): phone_number_internal = self.cleaned_data['phone_number_internal'] if phone_number_internal: phone_number_internal = mobile_number_validation( phone_number_internal) if not phone_number_internal: raise forms.ValidationError(_('Enter a valid contact number')) return phone_number_internal class ConfigurationLogoForm(forms.ModelForm): class Meta: model = ConfigurationLogo def __init__(self, *args, **kwargs): super(ConfigurationLogoForm, self).__init__(*args, **kwargs) if 'instance' in kwargs: self.id = kwargs['instance'].id else: self.id = '' class ConfigurationHomepageForm(forms.ModelForm): class Meta: model = ConfigurationHomepage def __init__(self, *args, **kwargs): super(ConfigurationHomepageForm, self).__init__(*args, **kwargs) self.fields['company'].widget.attrs['class'] = 'form-dropdownfield' self.fields['header'].widget.attrs['class'] = 'form-text' self.fields['introduction'].widget.attrs['class'] = 'form-textarea' if 'instance' in kwargs: self.id = kwargs['instance'].id else: self.id = '' def clean_header(self): header = self.cleaned_data['header'] if header: if len(header) < 3: raise forms.ValidationError(_('Enter minimum 3 characters.')) elif re.match('^[\\s]*$', header): raise forms.ValidationError(_('Enter a valid name.')) return header def clean_introduction(self): introduction = self.cleaned_data['introduction'] if introduction: if len(introduction) < 10: raise forms.ValidationError(_('Enter minimum 10 characters.')) elif re.match('^[\\s]*$', introduction): raise forms.ValidationError(_('Enter a valid address.')) return introduction class ConfigurationLocationForm(forms.ModelForm): class Meta: model = ConfigurationLocation def __init__(self, *args, **kwargs): super(ConfigurationLocationForm, self).__init__(*args, **kwargs) self.fields['company'].widget.attrs['class'] = 'form-dropdownfield' self.fields['country'].widget.attrs['class'] = 'form-dropdownfield' self.fields['continent'].widget.attrs['class'] = 'form-dropdownfield' if 'instance' in kwargs: self.id = kwargs['instance'].id else: self.id = '' <|reserved_special_token_1|> import re import datetime from django import forms from django.utils.translation import ugettext as _ from vcg.util.forms import mobile_number_validation from vcg.company_management.models import ConfigurationContact, ConfigurationLogo, ConfigurationHomepage, ConfigurationLocation class ConfigurationContactForm(forms.ModelForm): class Meta: model = ConfigurationContact def __init__(self, *args, **kwargs): super(ConfigurationContactForm, self).__init__(*args, **kwargs) self.fields['company'].widget.attrs['class'] = 'form-dropdownfield' self.fields['name_of_institution'].widget.attrs['class'] = 'form-text' self.fields['email_external'].widget.attrs['class'] = 'form-text' self.fields['country_code_external'].widget.attrs['class'] = 'form-text-small' self.fields['phone_number_external'].widget.attrs['class'] = 'form-text-phone' self.fields['email_internal'].widget.attrs['class'] = 'form-text' self.fields['country_code_internal'].widget.attrs['class'] = 'form-text-small' self.fields['phone_number_internal'].widget.attrs['class'] = 'form-text-phone' if 'instance' in kwargs: self.id = kwargs['instance'].id else: self.id = "" def clean(self): phone_number_external = self.cleaned_data.get("phone_number_external") country_code_external = self.cleaned_data.get("country_code_external") phone_number_internal = self.cleaned_data.get("phone_number_internal") country_code_internal = self.cleaned_data.get("country_code_internal") if phone_number_external and not country_code_external: raise forms.ValidationError(_('External Country code Field is required .')) if country_code_external and not phone_number_external: raise forms.ValidationError(_('External Phone Number Field is required .')) if phone_number_internal and not country_code_internal: raise forms.ValidationError(_('Internal Country code Field is required .')) if country_code_internal and not phone_number_internal: raise forms.ValidationError(_('Internal Phone Number Field is required .')) return self.cleaned_data def clean_name_of_institution(self): name_of_institution = self.cleaned_data['name_of_institution'] if name_of_institution: if len(name_of_institution) < 3: raise forms.ValidationError(_('Enter minimum 3 characters.')) elif re.match(r'^[\s]*$', name_of_institution): raise forms.ValidationError(_("Enter a valid name.")) return name_of_institution def clean_country_code_external(self): country_code_external = self.cleaned_data['country_code_external'] if country_code_external: if len(str(country_code_external)) > 5: raise forms.ValidationError(_('maximum 5 characters.')) return country_code_external def clean_phone_number_external(self): phone_number_external = self.cleaned_data['phone_number_external'] if phone_number_external: phone_number_external = mobile_number_validation(phone_number_external) if not phone_number_external: raise forms.ValidationError(_("Enter a valid contact number")) return phone_number_external def clean_country_code_internal(self): country_code_internal = self.cleaned_data['country_code_internal'] if country_code_internal: if len(str(country_code_internal)) > 5: raise forms.ValidationError(_('maximum 5 characters.')) return country_code_internal def clean_phone_number_internal(self): phone_number_internal = self.cleaned_data['phone_number_internal'] if phone_number_internal: phone_number_internal = mobile_number_validation(phone_number_internal) if not phone_number_internal: raise forms.ValidationError(_("Enter a valid contact number")) return phone_number_internal class ConfigurationLogoForm(forms.ModelForm): class Meta: model = ConfigurationLogo def __init__(self, *args, **kwargs): super(ConfigurationLogoForm, self).__init__(*args, **kwargs) if 'instance' in kwargs: self.id = kwargs['instance'].id else: self.id = "" class ConfigurationHomepageForm(forms.ModelForm): class Meta: model = ConfigurationHomepage def __init__(self, *args, **kwargs): super(ConfigurationHomepageForm, self).__init__(*args, **kwargs) self.fields['company'].widget.attrs['class'] = 'form-dropdownfield' self.fields['header'].widget.attrs['class'] = 'form-text' self.fields['introduction'].widget.attrs['class'] = 'form-textarea' if 'instance' in kwargs: self.id = kwargs['instance'].id else: self.id = "" def clean_header(self): header = self.cleaned_data['header'] if header: if len(header) < 3: raise forms.ValidationError(_('Enter minimum 3 characters.')) elif re.match(r'^[\s]*$', header): raise forms.ValidationError(_("Enter a valid name.")) return header def clean_introduction(self): introduction = self.cleaned_data['introduction'] if introduction: if len(introduction) < 10: raise forms.ValidationError(_('Enter minimum 10 characters.')) elif re.match(r'^[\s]*$', introduction): raise forms.ValidationError(_("Enter a valid address.")) return introduction class ConfigurationLocationForm(forms.ModelForm): class Meta: model = ConfigurationLocation def __init__(self, *args, **kwargs): super(ConfigurationLocationForm, self).__init__(*args, **kwargs) self.fields['company'].widget.attrs['class'] = 'form-dropdownfield' self.fields['country'].widget.attrs['class'] = 'form-dropdownfield' self.fields['continent'].widget.attrs['class'] = 'form-dropdownfield' if 'instance' in kwargs: self.id = kwargs['instance'].id else: self.id = ""
flexible
{ "blob_id": "f6f1cd95e4aaa5e434c3cf3cff0d46b45fc7b830", "index": 6190, "step-1": "<mask token>\n\n\nclass ConfigurationContactForm(forms.ModelForm):\n\n\n class Meta:\n model = ConfigurationContact\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def clean_phone_number_external(self):\n phone_number_external = self.cleaned_data['phone_number_external']\n if phone_number_external:\n phone_number_external = mobile_number_validation(\n phone_number_external)\n if not phone_number_external:\n raise forms.ValidationError(_('Enter a valid contact number'))\n return phone_number_external\n <mask token>\n\n def clean_phone_number_internal(self):\n phone_number_internal = self.cleaned_data['phone_number_internal']\n if phone_number_internal:\n phone_number_internal = mobile_number_validation(\n phone_number_internal)\n if not phone_number_internal:\n raise forms.ValidationError(_('Enter a valid contact number'))\n return phone_number_internal\n\n\nclass ConfigurationLogoForm(forms.ModelForm):\n\n\n class Meta:\n model = ConfigurationLogo\n\n def __init__(self, *args, **kwargs):\n super(ConfigurationLogoForm, self).__init__(*args, **kwargs)\n if 'instance' in kwargs:\n self.id = kwargs['instance'].id\n else:\n self.id = ''\n\n\nclass ConfigurationHomepageForm(forms.ModelForm):\n\n\n class Meta:\n model = ConfigurationHomepage\n\n def __init__(self, *args, **kwargs):\n super(ConfigurationHomepageForm, self).__init__(*args, **kwargs)\n self.fields['company'].widget.attrs['class'] = 'form-dropdownfield'\n self.fields['header'].widget.attrs['class'] = 'form-text'\n self.fields['introduction'].widget.attrs['class'] = 'form-textarea'\n if 'instance' in kwargs:\n self.id = kwargs['instance'].id\n else:\n self.id = ''\n\n def clean_header(self):\n header = self.cleaned_data['header']\n if header:\n if len(header) < 3:\n raise forms.ValidationError(_('Enter minimum 3 characters.'))\n elif re.match('^[\\\\s]*$', header):\n raise forms.ValidationError(_('Enter a valid name.'))\n return header\n\n def clean_introduction(self):\n introduction = self.cleaned_data['introduction']\n if introduction:\n if len(introduction) < 10:\n raise forms.ValidationError(_('Enter minimum 10 characters.'))\n elif re.match('^[\\\\s]*$', introduction):\n raise forms.ValidationError(_('Enter a valid address.'))\n return introduction\n\n\nclass ConfigurationLocationForm(forms.ModelForm):\n\n\n class Meta:\n model = ConfigurationLocation\n\n def __init__(self, *args, **kwargs):\n super(ConfigurationLocationForm, self).__init__(*args, **kwargs)\n self.fields['company'].widget.attrs['class'] = 'form-dropdownfield'\n self.fields['country'].widget.attrs['class'] = 'form-dropdownfield'\n self.fields['continent'].widget.attrs['class'] = 'form-dropdownfield'\n if 'instance' in kwargs:\n self.id = kwargs['instance'].id\n else:\n self.id = ''\n", "step-2": "<mask token>\n\n\nclass ConfigurationContactForm(forms.ModelForm):\n\n\n class Meta:\n model = ConfigurationContact\n <mask token>\n <mask token>\n\n def clean_name_of_institution(self):\n name_of_institution = self.cleaned_data['name_of_institution']\n if name_of_institution:\n if len(name_of_institution) < 3:\n raise forms.ValidationError(_('Enter minimum 3 characters.'))\n elif re.match('^[\\\\s]*$', name_of_institution):\n raise forms.ValidationError(_('Enter a valid name.'))\n return name_of_institution\n\n def clean_country_code_external(self):\n country_code_external = self.cleaned_data['country_code_external']\n if country_code_external:\n if len(str(country_code_external)) > 5:\n raise forms.ValidationError(_('maximum 5 characters.'))\n return country_code_external\n\n def clean_phone_number_external(self):\n phone_number_external = self.cleaned_data['phone_number_external']\n if phone_number_external:\n phone_number_external = mobile_number_validation(\n phone_number_external)\n if not phone_number_external:\n raise forms.ValidationError(_('Enter a valid contact number'))\n return phone_number_external\n <mask token>\n\n def clean_phone_number_internal(self):\n phone_number_internal = self.cleaned_data['phone_number_internal']\n if phone_number_internal:\n phone_number_internal = mobile_number_validation(\n phone_number_internal)\n if not phone_number_internal:\n raise forms.ValidationError(_('Enter a valid contact number'))\n return phone_number_internal\n\n\nclass ConfigurationLogoForm(forms.ModelForm):\n\n\n class Meta:\n model = ConfigurationLogo\n\n def __init__(self, *args, **kwargs):\n super(ConfigurationLogoForm, self).__init__(*args, **kwargs)\n if 'instance' in kwargs:\n self.id = kwargs['instance'].id\n else:\n self.id = ''\n\n\nclass ConfigurationHomepageForm(forms.ModelForm):\n\n\n class Meta:\n model = ConfigurationHomepage\n\n def __init__(self, *args, **kwargs):\n super(ConfigurationHomepageForm, self).__init__(*args, **kwargs)\n self.fields['company'].widget.attrs['class'] = 'form-dropdownfield'\n self.fields['header'].widget.attrs['class'] = 'form-text'\n self.fields['introduction'].widget.attrs['class'] = 'form-textarea'\n if 'instance' in kwargs:\n self.id = kwargs['instance'].id\n else:\n self.id = ''\n\n def clean_header(self):\n header = self.cleaned_data['header']\n if header:\n if len(header) < 3:\n raise forms.ValidationError(_('Enter minimum 3 characters.'))\n elif re.match('^[\\\\s]*$', header):\n raise forms.ValidationError(_('Enter a valid name.'))\n return header\n\n def clean_introduction(self):\n introduction = self.cleaned_data['introduction']\n if introduction:\n if len(introduction) < 10:\n raise forms.ValidationError(_('Enter minimum 10 characters.'))\n elif re.match('^[\\\\s]*$', introduction):\n raise forms.ValidationError(_('Enter a valid address.'))\n return introduction\n\n\nclass ConfigurationLocationForm(forms.ModelForm):\n\n\n class Meta:\n model = ConfigurationLocation\n\n def __init__(self, *args, **kwargs):\n super(ConfigurationLocationForm, self).__init__(*args, **kwargs)\n self.fields['company'].widget.attrs['class'] = 'form-dropdownfield'\n self.fields['country'].widget.attrs['class'] = 'form-dropdownfield'\n self.fields['continent'].widget.attrs['class'] = 'form-dropdownfield'\n if 'instance' in kwargs:\n self.id = kwargs['instance'].id\n else:\n self.id = ''\n", "step-3": "<mask token>\n\n\nclass ConfigurationContactForm(forms.ModelForm):\n\n\n class Meta:\n model = ConfigurationContact\n <mask token>\n\n def clean(self):\n phone_number_external = self.cleaned_data.get('phone_number_external')\n country_code_external = self.cleaned_data.get('country_code_external')\n phone_number_internal = self.cleaned_data.get('phone_number_internal')\n country_code_internal = self.cleaned_data.get('country_code_internal')\n if phone_number_external and not country_code_external:\n raise forms.ValidationError(_(\n 'External Country code Field is required .'))\n if country_code_external and not phone_number_external:\n raise forms.ValidationError(_(\n 'External Phone Number Field is required .'))\n if phone_number_internal and not country_code_internal:\n raise forms.ValidationError(_(\n 'Internal Country code Field is required .'))\n if country_code_internal and not phone_number_internal:\n raise forms.ValidationError(_(\n 'Internal Phone Number Field is required .'))\n return self.cleaned_data\n\n def clean_name_of_institution(self):\n name_of_institution = self.cleaned_data['name_of_institution']\n if name_of_institution:\n if len(name_of_institution) < 3:\n raise forms.ValidationError(_('Enter minimum 3 characters.'))\n elif re.match('^[\\\\s]*$', name_of_institution):\n raise forms.ValidationError(_('Enter a valid name.'))\n return name_of_institution\n\n def clean_country_code_external(self):\n country_code_external = self.cleaned_data['country_code_external']\n if country_code_external:\n if len(str(country_code_external)) > 5:\n raise forms.ValidationError(_('maximum 5 characters.'))\n return country_code_external\n\n def clean_phone_number_external(self):\n phone_number_external = self.cleaned_data['phone_number_external']\n if phone_number_external:\n phone_number_external = mobile_number_validation(\n phone_number_external)\n if not phone_number_external:\n raise forms.ValidationError(_('Enter a valid contact number'))\n return phone_number_external\n\n def clean_country_code_internal(self):\n country_code_internal = self.cleaned_data['country_code_internal']\n if country_code_internal:\n if len(str(country_code_internal)) > 5:\n raise forms.ValidationError(_('maximum 5 characters.'))\n return country_code_internal\n\n def clean_phone_number_internal(self):\n phone_number_internal = self.cleaned_data['phone_number_internal']\n if phone_number_internal:\n phone_number_internal = mobile_number_validation(\n phone_number_internal)\n if not phone_number_internal:\n raise forms.ValidationError(_('Enter a valid contact number'))\n return phone_number_internal\n\n\nclass ConfigurationLogoForm(forms.ModelForm):\n\n\n class Meta:\n model = ConfigurationLogo\n\n def __init__(self, *args, **kwargs):\n super(ConfigurationLogoForm, self).__init__(*args, **kwargs)\n if 'instance' in kwargs:\n self.id = kwargs['instance'].id\n else:\n self.id = ''\n\n\nclass ConfigurationHomepageForm(forms.ModelForm):\n\n\n class Meta:\n model = ConfigurationHomepage\n\n def __init__(self, *args, **kwargs):\n super(ConfigurationHomepageForm, self).__init__(*args, **kwargs)\n self.fields['company'].widget.attrs['class'] = 'form-dropdownfield'\n self.fields['header'].widget.attrs['class'] = 'form-text'\n self.fields['introduction'].widget.attrs['class'] = 'form-textarea'\n if 'instance' in kwargs:\n self.id = kwargs['instance'].id\n else:\n self.id = ''\n\n def clean_header(self):\n header = self.cleaned_data['header']\n if header:\n if len(header) < 3:\n raise forms.ValidationError(_('Enter minimum 3 characters.'))\n elif re.match('^[\\\\s]*$', header):\n raise forms.ValidationError(_('Enter a valid name.'))\n return header\n\n def clean_introduction(self):\n introduction = self.cleaned_data['introduction']\n if introduction:\n if len(introduction) < 10:\n raise forms.ValidationError(_('Enter minimum 10 characters.'))\n elif re.match('^[\\\\s]*$', introduction):\n raise forms.ValidationError(_('Enter a valid address.'))\n return introduction\n\n\nclass ConfigurationLocationForm(forms.ModelForm):\n\n\n class Meta:\n model = ConfigurationLocation\n\n def __init__(self, *args, **kwargs):\n super(ConfigurationLocationForm, self).__init__(*args, **kwargs)\n self.fields['company'].widget.attrs['class'] = 'form-dropdownfield'\n self.fields['country'].widget.attrs['class'] = 'form-dropdownfield'\n self.fields['continent'].widget.attrs['class'] = 'form-dropdownfield'\n if 'instance' in kwargs:\n self.id = kwargs['instance'].id\n else:\n self.id = ''\n", "step-4": "<mask token>\n\n\nclass ConfigurationContactForm(forms.ModelForm):\n\n\n class Meta:\n model = ConfigurationContact\n\n def __init__(self, *args, **kwargs):\n super(ConfigurationContactForm, self).__init__(*args, **kwargs)\n self.fields['company'].widget.attrs['class'] = 'form-dropdownfield'\n self.fields['name_of_institution'].widget.attrs['class'] = 'form-text'\n self.fields['email_external'].widget.attrs['class'] = 'form-text'\n self.fields['country_code_external'].widget.attrs['class'\n ] = 'form-text-small'\n self.fields['phone_number_external'].widget.attrs['class'\n ] = 'form-text-phone'\n self.fields['email_internal'].widget.attrs['class'] = 'form-text'\n self.fields['country_code_internal'].widget.attrs['class'\n ] = 'form-text-small'\n self.fields['phone_number_internal'].widget.attrs['class'\n ] = 'form-text-phone'\n if 'instance' in kwargs:\n self.id = kwargs['instance'].id\n else:\n self.id = ''\n\n def clean(self):\n phone_number_external = self.cleaned_data.get('phone_number_external')\n country_code_external = self.cleaned_data.get('country_code_external')\n phone_number_internal = self.cleaned_data.get('phone_number_internal')\n country_code_internal = self.cleaned_data.get('country_code_internal')\n if phone_number_external and not country_code_external:\n raise forms.ValidationError(_(\n 'External Country code Field is required .'))\n if country_code_external and not phone_number_external:\n raise forms.ValidationError(_(\n 'External Phone Number Field is required .'))\n if phone_number_internal and not country_code_internal:\n raise forms.ValidationError(_(\n 'Internal Country code Field is required .'))\n if country_code_internal and not phone_number_internal:\n raise forms.ValidationError(_(\n 'Internal Phone Number Field is required .'))\n return self.cleaned_data\n\n def clean_name_of_institution(self):\n name_of_institution = self.cleaned_data['name_of_institution']\n if name_of_institution:\n if len(name_of_institution) < 3:\n raise forms.ValidationError(_('Enter minimum 3 characters.'))\n elif re.match('^[\\\\s]*$', name_of_institution):\n raise forms.ValidationError(_('Enter a valid name.'))\n return name_of_institution\n\n def clean_country_code_external(self):\n country_code_external = self.cleaned_data['country_code_external']\n if country_code_external:\n if len(str(country_code_external)) > 5:\n raise forms.ValidationError(_('maximum 5 characters.'))\n return country_code_external\n\n def clean_phone_number_external(self):\n phone_number_external = self.cleaned_data['phone_number_external']\n if phone_number_external:\n phone_number_external = mobile_number_validation(\n phone_number_external)\n if not phone_number_external:\n raise forms.ValidationError(_('Enter a valid contact number'))\n return phone_number_external\n\n def clean_country_code_internal(self):\n country_code_internal = self.cleaned_data['country_code_internal']\n if country_code_internal:\n if len(str(country_code_internal)) > 5:\n raise forms.ValidationError(_('maximum 5 characters.'))\n return country_code_internal\n\n def clean_phone_number_internal(self):\n phone_number_internal = self.cleaned_data['phone_number_internal']\n if phone_number_internal:\n phone_number_internal = mobile_number_validation(\n phone_number_internal)\n if not phone_number_internal:\n raise forms.ValidationError(_('Enter a valid contact number'))\n return phone_number_internal\n\n\nclass ConfigurationLogoForm(forms.ModelForm):\n\n\n class Meta:\n model = ConfigurationLogo\n\n def __init__(self, *args, **kwargs):\n super(ConfigurationLogoForm, self).__init__(*args, **kwargs)\n if 'instance' in kwargs:\n self.id = kwargs['instance'].id\n else:\n self.id = ''\n\n\nclass ConfigurationHomepageForm(forms.ModelForm):\n\n\n class Meta:\n model = ConfigurationHomepage\n\n def __init__(self, *args, **kwargs):\n super(ConfigurationHomepageForm, self).__init__(*args, **kwargs)\n self.fields['company'].widget.attrs['class'] = 'form-dropdownfield'\n self.fields['header'].widget.attrs['class'] = 'form-text'\n self.fields['introduction'].widget.attrs['class'] = 'form-textarea'\n if 'instance' in kwargs:\n self.id = kwargs['instance'].id\n else:\n self.id = ''\n\n def clean_header(self):\n header = self.cleaned_data['header']\n if header:\n if len(header) < 3:\n raise forms.ValidationError(_('Enter minimum 3 characters.'))\n elif re.match('^[\\\\s]*$', header):\n raise forms.ValidationError(_('Enter a valid name.'))\n return header\n\n def clean_introduction(self):\n introduction = self.cleaned_data['introduction']\n if introduction:\n if len(introduction) < 10:\n raise forms.ValidationError(_('Enter minimum 10 characters.'))\n elif re.match('^[\\\\s]*$', introduction):\n raise forms.ValidationError(_('Enter a valid address.'))\n return introduction\n\n\nclass ConfigurationLocationForm(forms.ModelForm):\n\n\n class Meta:\n model = ConfigurationLocation\n\n def __init__(self, *args, **kwargs):\n super(ConfigurationLocationForm, self).__init__(*args, **kwargs)\n self.fields['company'].widget.attrs['class'] = 'form-dropdownfield'\n self.fields['country'].widget.attrs['class'] = 'form-dropdownfield'\n self.fields['continent'].widget.attrs['class'] = 'form-dropdownfield'\n if 'instance' in kwargs:\n self.id = kwargs['instance'].id\n else:\n self.id = ''\n", "step-5": "import re \nimport datetime\n\nfrom django import forms\nfrom django.utils.translation import ugettext as _\n\nfrom vcg.util.forms import mobile_number_validation\nfrom vcg.company_management.models import ConfigurationContact, ConfigurationLogo, ConfigurationHomepage, ConfigurationLocation\n\nclass ConfigurationContactForm(forms.ModelForm):\n class Meta:\n model = ConfigurationContact\n \n def __init__(self, *args, **kwargs):\n super(ConfigurationContactForm, self).__init__(*args, **kwargs)\n \n self.fields['company'].widget.attrs['class'] = 'form-dropdownfield'\n self.fields['name_of_institution'].widget.attrs['class'] = 'form-text'\n self.fields['email_external'].widget.attrs['class'] = 'form-text'\n self.fields['country_code_external'].widget.attrs['class'] = 'form-text-small'\n self.fields['phone_number_external'].widget.attrs['class'] = 'form-text-phone'\n self.fields['email_internal'].widget.attrs['class'] = 'form-text'\n self.fields['country_code_internal'].widget.attrs['class'] = 'form-text-small'\n self.fields['phone_number_internal'].widget.attrs['class'] = 'form-text-phone'\n\n if 'instance' in kwargs:\n self.id = kwargs['instance'].id\n else:\n self.id = \"\" \n def clean(self):\n phone_number_external = self.cleaned_data.get(\"phone_number_external\")\n country_code_external = self.cleaned_data.get(\"country_code_external\")\n \n phone_number_internal = self.cleaned_data.get(\"phone_number_internal\")\n country_code_internal = self.cleaned_data.get(\"country_code_internal\")\n \n if phone_number_external and not country_code_external:\n raise forms.ValidationError(_('External Country code Field is required .')) \n if country_code_external and not phone_number_external:\n raise forms.ValidationError(_('External Phone Number Field is required .')) \n\n if phone_number_internal and not country_code_internal:\n raise forms.ValidationError(_('Internal Country code Field is required .')) \n if country_code_internal and not phone_number_internal:\n raise forms.ValidationError(_('Internal Phone Number Field is required .')) \n \n return self.cleaned_data \n \n def clean_name_of_institution(self):\n name_of_institution = self.cleaned_data['name_of_institution']\n if name_of_institution:\n if len(name_of_institution) < 3:\n raise forms.ValidationError(_('Enter minimum 3 characters.'))\n elif re.match(r'^[\\s]*$', name_of_institution):\n raise forms.ValidationError(_(\"Enter a valid name.\"))\n return name_of_institution \n\n def clean_country_code_external(self):\n country_code_external = self.cleaned_data['country_code_external']\n if country_code_external:\n if len(str(country_code_external)) > 5:\n raise forms.ValidationError(_('maximum 5 characters.'))\n return country_code_external \n \n def clean_phone_number_external(self):\n phone_number_external = self.cleaned_data['phone_number_external']\n if phone_number_external:\n phone_number_external = mobile_number_validation(phone_number_external)\n if not phone_number_external:\n raise forms.ValidationError(_(\"Enter a valid contact number\"))\n return phone_number_external \n\n def clean_country_code_internal(self):\n country_code_internal = self.cleaned_data['country_code_internal']\n if country_code_internal:\n if len(str(country_code_internal)) > 5:\n raise forms.ValidationError(_('maximum 5 characters.'))\n return country_code_internal \n \n def clean_phone_number_internal(self):\n phone_number_internal = self.cleaned_data['phone_number_internal']\n if phone_number_internal:\n phone_number_internal = mobile_number_validation(phone_number_internal)\n if not phone_number_internal:\n raise forms.ValidationError(_(\"Enter a valid contact number\"))\n return phone_number_internal \n\nclass ConfigurationLogoForm(forms.ModelForm):\n class Meta:\n model = ConfigurationLogo\n \n def __init__(self, *args, **kwargs):\n super(ConfigurationLogoForm, self).__init__(*args, **kwargs)\n \n if 'instance' in kwargs:\n self.id = kwargs['instance'].id\n else:\n self.id = \"\" \n \nclass ConfigurationHomepageForm(forms.ModelForm):\n class Meta:\n model = ConfigurationHomepage\n \n def __init__(self, *args, **kwargs):\n super(ConfigurationHomepageForm, self).__init__(*args, **kwargs)\n \n self.fields['company'].widget.attrs['class'] = 'form-dropdownfield'\n self.fields['header'].widget.attrs['class'] = 'form-text'\n self.fields['introduction'].widget.attrs['class'] = 'form-textarea'\n \n if 'instance' in kwargs:\n self.id = kwargs['instance'].id\n else:\n self.id = \"\"\n\n def clean_header(self):\n header = self.cleaned_data['header']\n if header:\n if len(header) < 3:\n raise forms.ValidationError(_('Enter minimum 3 characters.'))\n elif re.match(r'^[\\s]*$', header):\n raise forms.ValidationError(_(\"Enter a valid name.\"))\n return header\n\n def clean_introduction(self):\n introduction = self.cleaned_data['introduction']\n if introduction:\n if len(introduction) < 10:\n raise forms.ValidationError(_('Enter minimum 10 characters.'))\n elif re.match(r'^[\\s]*$', introduction):\n raise forms.ValidationError(_(\"Enter a valid address.\"))\n return introduction \n \nclass ConfigurationLocationForm(forms.ModelForm):\n class Meta:\n model = ConfigurationLocation\n \n def __init__(self, *args, **kwargs):\n super(ConfigurationLocationForm, self).__init__(*args, **kwargs)\n \n self.fields['company'].widget.attrs['class'] = 'form-dropdownfield'\n self.fields['country'].widget.attrs['class'] = 'form-dropdownfield'\n self.fields['continent'].widget.attrs['class'] = 'form-dropdownfield'\n \n if 'instance' in kwargs:\n self.id = kwargs['instance'].id\n else:\n self.id = \"\" ", "step-ids": [ 11, 13, 15, 16, 18 ] }
[ 11, 13, 15, 16, 18 ]
import contextlib import datetime import fnmatch import os import os.path import re import subprocess import sys import click import dataset def get_cmd_output(cmd): """Run a command in shell, and return the Unicode output.""" try: data = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as ex: data = ex.output try: data = data.decode("utf-8") except UnicodeDecodeError: data = data.decode("latin1") return data def load_commits(db, repo_name): """Load the commits from the current directory repo.""" SEP = "-=:=-=:=-=:=-=:=-=:=-=:=-=:=-" GITLOG = f"git log --no-merges --format='format:date: %aI%nhash: %H%nauth: %aE%nname: %aN%nsubj: %s%n%b%n{SEP}'" SHORT_LINES = 5 # $ git log --format="format:---------------------%ndate: %aI%nhash: %H%nauth: %aE%nname: %aN%nsubj: %s%n%b" # --------------------- # date: 2021-04-21T16:13:23-04:00 # hash: efa13ff1d2fb3d8b2ddee8be0868ae60f9bc35a6 # auth: julia.eskew@edx.org # name: Julia Eskew # subj: fix: TNL-8233: Change exception raised at problem creation failure from generic exception to LoncapaProblemError. (#27361) # Raising this specific exception will cause the failure to be handled more gracefully by problem rescoring code. # --------------------- # date: 2021-04-15T21:36:47-04:00 # hash: a1fe3d58dc112bd975f1237baaee787ba22929f1 # auth: astaubin@edx.org # name: Albert (AJ) St. Aubin # subj: [bug] Corrected issue where program dash showed incorrect completed count # [MICROBA-1163] # # This change will correct an issue in the Program Dashboard where a user # would see a course as completed, but not see their Certificate because # it was not available to them yet. # --------------------- with db: commit_table = db["commits"] log = get_cmd_output(GITLOG) for i, commit in enumerate(log.split(SEP + "\n")): if commit: lines = commit.split("\n", maxsplit=SHORT_LINES) row = {"repo": repo_name} for line in lines[:SHORT_LINES]: key, val = line.split(": ", maxsplit=1) row[key] = val row["body"] = lines[SHORT_LINES].strip() analyze_commit(row) commit_table.insert(row) STRICT = r"""(?x) ^ (?P<label>build|chore|docs|feat|fix|perf|refactor|revert|style|test|temp) (?P<breaking>!?):\s (?P<subjtext>.+) $ """ LAX = r"""(?xi) ^ (?P<label>\w+) (?:\(\w+\))? (?P<breaking>!?):\s (?P<subjtext>.+) $ """ def analyze_commit(row): row["conventional"] = row["lax"] = False m = re.search(STRICT, row["subj"]) if m: row["conventional"] = True else: m = re.search(LAX, row["subj"]) if m: row["lax"] = True if m: row["label"] = m["label"] row["breaking"] = bool(m["breaking"]) row["subjtext"] = m["subjtext"] row["bodylines"] = len(row["body"].splitlines()) @contextlib.contextmanager def change_dir(new_dir): """Change directory, and then change back. Use as a context manager, it will give you the new directory, and later restore the old one. """ old_dir = os.getcwd() os.chdir(new_dir) try: yield os.getcwd() finally: os.chdir(old_dir) @click.command(help="Collect stats about commits in local git repos") @click.option("--db", "dbfile", default="commits.db", help="SQLite database file to write to") @click.option("--ignore", multiple=True, help="Repos to ignore") @click.option("--require", help="A file that must exist to process the repo") @click.argument("repos", nargs=-1) def main(dbfile, ignore, require, repos): db = dataset.connect("sqlite:///" + dbfile) for repo in repos: if any(fnmatch.fnmatch(repo, pat) for pat in ignore): print(f"Ignoring {repo}") continue if require is not None: if not os.path.exists(os.path.join(repo, require)): print(f"Skipping {repo}") continue print(repo) with change_dir(repo) as repo_dir: repo_name = "/".join(repo_dir.split("/")[-2:]) load_commits(db, repo_name) if __name__ == "__main__": main() # then: # gittreeif nedbat/meta/installed python /src/ghorg/commitstats.py /src/ghorg/commits.db # # in sqlite: # select strftime("%Y%W", date, "weekday 0") as yw, count(*) total, sum(conventional) as con from commits group by yw; # select yw, total, con, cast((con*100.0)/total as integer) pctcon from (select strftime("%Y%W", date, "weekday 0") as yw, count(*) total, sum(conventional) as con from commits group by yw); """ select weekend, total, con, cast((con*100.0)/total as integer) pctcon, bod, cast((bod*100.0)/total as integer) pctbod from ( select strftime("%Y%m%d", date, "weekday 0") as weekend, count(*) total, sum(conventional) as con, sum(bodylines > 0) as bod from commits where repo = "edx/edx-platform" group by weekend ) where weekend > '202009'; """
normal
{ "blob_id": "16446c2c5612a14d4364cbefb949da0b473f7454", "index": 7934, "step-1": "<mask token>\n\n\ndef analyze_commit(row):\n row['conventional'] = row['lax'] = False\n m = re.search(STRICT, row['subj'])\n if m:\n row['conventional'] = True\n else:\n m = re.search(LAX, row['subj'])\n if m:\n row['lax'] = True\n if m:\n row['label'] = m['label']\n row['breaking'] = bool(m['breaking'])\n row['subjtext'] = m['subjtext']\n row['bodylines'] = len(row['body'].splitlines())\n\n\n<mask token>\n\n\n@click.command(help='Collect stats about commits in local git repos')\n@click.option('--db', 'dbfile', default='commits.db', help=\n 'SQLite database file to write to')\n@click.option('--ignore', multiple=True, help='Repos to ignore')\n@click.option('--require', help='A file that must exist to process the repo')\n@click.argument('repos', nargs=-1)\ndef main(dbfile, ignore, require, repos):\n db = dataset.connect('sqlite:///' + dbfile)\n for repo in repos:\n if any(fnmatch.fnmatch(repo, pat) for pat in ignore):\n print(f'Ignoring {repo}')\n continue\n if require is not None:\n if not os.path.exists(os.path.join(repo, require)):\n print(f'Skipping {repo}')\n continue\n print(repo)\n with change_dir(repo) as repo_dir:\n repo_name = '/'.join(repo_dir.split('/')[-2:])\n load_commits(db, repo_name)\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef get_cmd_output(cmd):\n \"\"\"Run a command in shell, and return the Unicode output.\"\"\"\n try:\n data = subprocess.check_output(cmd, shell=True, stderr=subprocess.\n STDOUT)\n except subprocess.CalledProcessError as ex:\n data = ex.output\n try:\n data = data.decode('utf-8')\n except UnicodeDecodeError:\n data = data.decode('latin1')\n return data\n\n\ndef load_commits(db, repo_name):\n \"\"\"Load the commits from the current directory repo.\"\"\"\n SEP = '-=:=-=:=-=:=-=:=-=:=-=:=-=:=-'\n GITLOG = (\n f\"git log --no-merges --format='format:date: %aI%nhash: %H%nauth: %aE%nname: %aN%nsubj: %s%n%b%n{SEP}'\"\n )\n SHORT_LINES = 5\n with db:\n commit_table = db['commits']\n log = get_cmd_output(GITLOG)\n for i, commit in enumerate(log.split(SEP + '\\n')):\n if commit:\n lines = commit.split('\\n', maxsplit=SHORT_LINES)\n row = {'repo': repo_name}\n for line in lines[:SHORT_LINES]:\n key, val = line.split(': ', maxsplit=1)\n row[key] = val\n row['body'] = lines[SHORT_LINES].strip()\n analyze_commit(row)\n commit_table.insert(row)\n\n\n<mask token>\n\n\ndef analyze_commit(row):\n row['conventional'] = row['lax'] = False\n m = re.search(STRICT, row['subj'])\n if m:\n row['conventional'] = True\n else:\n m = re.search(LAX, row['subj'])\n if m:\n row['lax'] = True\n if m:\n row['label'] = m['label']\n row['breaking'] = bool(m['breaking'])\n row['subjtext'] = m['subjtext']\n row['bodylines'] = len(row['body'].splitlines())\n\n\n@contextlib.contextmanager\ndef change_dir(new_dir):\n \"\"\"Change directory, and then change back.\n\n Use as a context manager, it will give you the new directory, and later\n restore the old one.\n\n \"\"\"\n old_dir = os.getcwd()\n os.chdir(new_dir)\n try:\n yield os.getcwd()\n finally:\n os.chdir(old_dir)\n\n\n@click.command(help='Collect stats about commits in local git repos')\n@click.option('--db', 'dbfile', default='commits.db', help=\n 'SQLite database file to write to')\n@click.option('--ignore', multiple=True, help='Repos to ignore')\n@click.option('--require', help='A file that must exist to process the repo')\n@click.argument('repos', nargs=-1)\ndef main(dbfile, ignore, require, repos):\n db = dataset.connect('sqlite:///' + dbfile)\n for repo in repos:\n if any(fnmatch.fnmatch(repo, pat) for pat in ignore):\n print(f'Ignoring {repo}')\n continue\n if require is not None:\n if not os.path.exists(os.path.join(repo, require)):\n print(f'Skipping {repo}')\n continue\n print(repo)\n with change_dir(repo) as repo_dir:\n repo_name = '/'.join(repo_dir.split('/')[-2:])\n load_commits(db, repo_name)\n\n\nif __name__ == '__main__':\n main()\n<mask token>\n", "step-3": "<mask token>\n\n\ndef get_cmd_output(cmd):\n \"\"\"Run a command in shell, and return the Unicode output.\"\"\"\n try:\n data = subprocess.check_output(cmd, shell=True, stderr=subprocess.\n STDOUT)\n except subprocess.CalledProcessError as ex:\n data = ex.output\n try:\n data = data.decode('utf-8')\n except UnicodeDecodeError:\n data = data.decode('latin1')\n return data\n\n\ndef load_commits(db, repo_name):\n \"\"\"Load the commits from the current directory repo.\"\"\"\n SEP = '-=:=-=:=-=:=-=:=-=:=-=:=-=:=-'\n GITLOG = (\n f\"git log --no-merges --format='format:date: %aI%nhash: %H%nauth: %aE%nname: %aN%nsubj: %s%n%b%n{SEP}'\"\n )\n SHORT_LINES = 5\n with db:\n commit_table = db['commits']\n log = get_cmd_output(GITLOG)\n for i, commit in enumerate(log.split(SEP + '\\n')):\n if commit:\n lines = commit.split('\\n', maxsplit=SHORT_LINES)\n row = {'repo': repo_name}\n for line in lines[:SHORT_LINES]:\n key, val = line.split(': ', maxsplit=1)\n row[key] = val\n row['body'] = lines[SHORT_LINES].strip()\n analyze_commit(row)\n commit_table.insert(row)\n\n\nSTRICT = \"\"\"(?x)\n ^\n (?P<label>build|chore|docs|feat|fix|perf|refactor|revert|style|test|temp)\n (?P<breaking>!?):\\\\s\n (?P<subjtext>.+)\n $\n \"\"\"\nLAX = \"\"\"(?xi)\n ^\n (?P<label>\\\\w+)\n (?:\\\\(\\\\w+\\\\))?\n (?P<breaking>!?):\\\\s\n (?P<subjtext>.+)\n $\n \"\"\"\n\n\ndef analyze_commit(row):\n row['conventional'] = row['lax'] = False\n m = re.search(STRICT, row['subj'])\n if m:\n row['conventional'] = True\n else:\n m = re.search(LAX, row['subj'])\n if m:\n row['lax'] = True\n if m:\n row['label'] = m['label']\n row['breaking'] = bool(m['breaking'])\n row['subjtext'] = m['subjtext']\n row['bodylines'] = len(row['body'].splitlines())\n\n\n@contextlib.contextmanager\ndef change_dir(new_dir):\n \"\"\"Change directory, and then change back.\n\n Use as a context manager, it will give you the new directory, and later\n restore the old one.\n\n \"\"\"\n old_dir = os.getcwd()\n os.chdir(new_dir)\n try:\n yield os.getcwd()\n finally:\n os.chdir(old_dir)\n\n\n@click.command(help='Collect stats about commits in local git repos')\n@click.option('--db', 'dbfile', default='commits.db', help=\n 'SQLite database file to write to')\n@click.option('--ignore', multiple=True, help='Repos to ignore')\n@click.option('--require', help='A file that must exist to process the repo')\n@click.argument('repos', nargs=-1)\ndef main(dbfile, ignore, require, repos):\n db = dataset.connect('sqlite:///' + dbfile)\n for repo in repos:\n if any(fnmatch.fnmatch(repo, pat) for pat in ignore):\n print(f'Ignoring {repo}')\n continue\n if require is not None:\n if not os.path.exists(os.path.join(repo, require)):\n print(f'Skipping {repo}')\n continue\n print(repo)\n with change_dir(repo) as repo_dir:\n repo_name = '/'.join(repo_dir.split('/')[-2:])\n load_commits(db, repo_name)\n\n\nif __name__ == '__main__':\n main()\n<mask token>\n", "step-4": "import contextlib\nimport datetime\nimport fnmatch\nimport os\nimport os.path\nimport re\nimport subprocess\nimport sys\nimport click\nimport dataset\n\n\ndef get_cmd_output(cmd):\n \"\"\"Run a command in shell, and return the Unicode output.\"\"\"\n try:\n data = subprocess.check_output(cmd, shell=True, stderr=subprocess.\n STDOUT)\n except subprocess.CalledProcessError as ex:\n data = ex.output\n try:\n data = data.decode('utf-8')\n except UnicodeDecodeError:\n data = data.decode('latin1')\n return data\n\n\ndef load_commits(db, repo_name):\n \"\"\"Load the commits from the current directory repo.\"\"\"\n SEP = '-=:=-=:=-=:=-=:=-=:=-=:=-=:=-'\n GITLOG = (\n f\"git log --no-merges --format='format:date: %aI%nhash: %H%nauth: %aE%nname: %aN%nsubj: %s%n%b%n{SEP}'\"\n )\n SHORT_LINES = 5\n with db:\n commit_table = db['commits']\n log = get_cmd_output(GITLOG)\n for i, commit in enumerate(log.split(SEP + '\\n')):\n if commit:\n lines = commit.split('\\n', maxsplit=SHORT_LINES)\n row = {'repo': repo_name}\n for line in lines[:SHORT_LINES]:\n key, val = line.split(': ', maxsplit=1)\n row[key] = val\n row['body'] = lines[SHORT_LINES].strip()\n analyze_commit(row)\n commit_table.insert(row)\n\n\nSTRICT = \"\"\"(?x)\n ^\n (?P<label>build|chore|docs|feat|fix|perf|refactor|revert|style|test|temp)\n (?P<breaking>!?):\\\\s\n (?P<subjtext>.+)\n $\n \"\"\"\nLAX = \"\"\"(?xi)\n ^\n (?P<label>\\\\w+)\n (?:\\\\(\\\\w+\\\\))?\n (?P<breaking>!?):\\\\s\n (?P<subjtext>.+)\n $\n \"\"\"\n\n\ndef analyze_commit(row):\n row['conventional'] = row['lax'] = False\n m = re.search(STRICT, row['subj'])\n if m:\n row['conventional'] = True\n else:\n m = re.search(LAX, row['subj'])\n if m:\n row['lax'] = True\n if m:\n row['label'] = m['label']\n row['breaking'] = bool(m['breaking'])\n row['subjtext'] = m['subjtext']\n row['bodylines'] = len(row['body'].splitlines())\n\n\n@contextlib.contextmanager\ndef change_dir(new_dir):\n \"\"\"Change directory, and then change back.\n\n Use as a context manager, it will give you the new directory, and later\n restore the old one.\n\n \"\"\"\n old_dir = os.getcwd()\n os.chdir(new_dir)\n try:\n yield os.getcwd()\n finally:\n os.chdir(old_dir)\n\n\n@click.command(help='Collect stats about commits in local git repos')\n@click.option('--db', 'dbfile', default='commits.db', help=\n 'SQLite database file to write to')\n@click.option('--ignore', multiple=True, help='Repos to ignore')\n@click.option('--require', help='A file that must exist to process the repo')\n@click.argument('repos', nargs=-1)\ndef main(dbfile, ignore, require, repos):\n db = dataset.connect('sqlite:///' + dbfile)\n for repo in repos:\n if any(fnmatch.fnmatch(repo, pat) for pat in ignore):\n print(f'Ignoring {repo}')\n continue\n if require is not None:\n if not os.path.exists(os.path.join(repo, require)):\n print(f'Skipping {repo}')\n continue\n print(repo)\n with change_dir(repo) as repo_dir:\n repo_name = '/'.join(repo_dir.split('/')[-2:])\n load_commits(db, repo_name)\n\n\nif __name__ == '__main__':\n main()\n<mask token>\n", "step-5": "import contextlib\nimport datetime\nimport fnmatch\nimport os\nimport os.path\nimport re\nimport subprocess\nimport sys\n\nimport click\nimport dataset\n\ndef get_cmd_output(cmd):\n \"\"\"Run a command in shell, and return the Unicode output.\"\"\"\n try:\n data = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)\n except subprocess.CalledProcessError as ex:\n data = ex.output\n try:\n data = data.decode(\"utf-8\")\n except UnicodeDecodeError:\n data = data.decode(\"latin1\")\n return data\n\ndef load_commits(db, repo_name):\n \"\"\"Load the commits from the current directory repo.\"\"\"\n\n SEP = \"-=:=-=:=-=:=-=:=-=:=-=:=-=:=-\"\n GITLOG = f\"git log --no-merges --format='format:date: %aI%nhash: %H%nauth: %aE%nname: %aN%nsubj: %s%n%b%n{SEP}'\"\n SHORT_LINES = 5\n\n # $ git log --format=\"format:---------------------%ndate: %aI%nhash: %H%nauth: %aE%nname: %aN%nsubj: %s%n%b\"\n # ---------------------\n # date: 2021-04-21T16:13:23-04:00\n # hash: efa13ff1d2fb3d8b2ddee8be0868ae60f9bc35a6\n # auth: julia.eskew@edx.org\n # name: Julia Eskew\n # subj: fix: TNL-8233: Change exception raised at problem creation failure from generic exception to LoncapaProblemError. (#27361)\n # Raising this specific exception will cause the failure to be handled more gracefully by problem rescoring code.\n # ---------------------\n # date: 2021-04-15T21:36:47-04:00\n # hash: a1fe3d58dc112bd975f1237baaee787ba22929f1\n # auth: astaubin@edx.org\n # name: Albert (AJ) St. Aubin\n # subj: [bug] Corrected issue where program dash showed incorrect completed count\n # [MICROBA-1163]\n # \n # This change will correct an issue in the Program Dashboard where a user\n # would see a course as completed, but not see their Certificate because\n # it was not available to them yet.\n # ---------------------\n\n with db:\n commit_table = db[\"commits\"]\n\n log = get_cmd_output(GITLOG)\n for i, commit in enumerate(log.split(SEP + \"\\n\")):\n if commit:\n lines = commit.split(\"\\n\", maxsplit=SHORT_LINES)\n row = {\"repo\": repo_name}\n for line in lines[:SHORT_LINES]:\n key, val = line.split(\": \", maxsplit=1)\n row[key] = val\n row[\"body\"] = lines[SHORT_LINES].strip()\n analyze_commit(row)\n commit_table.insert(row)\n\nSTRICT = r\"\"\"(?x)\n ^\n (?P<label>build|chore|docs|feat|fix|perf|refactor|revert|style|test|temp)\n (?P<breaking>!?):\\s\n (?P<subjtext>.+)\n $\n \"\"\"\n\nLAX = r\"\"\"(?xi)\n ^\n (?P<label>\\w+)\n (?:\\(\\w+\\))?\n (?P<breaking>!?):\\s\n (?P<subjtext>.+)\n $\n \"\"\"\n\ndef analyze_commit(row):\n row[\"conventional\"] = row[\"lax\"] = False\n m = re.search(STRICT, row[\"subj\"])\n if m:\n row[\"conventional\"] = True\n else:\n m = re.search(LAX, row[\"subj\"])\n if m:\n row[\"lax\"] = True\n if m:\n row[\"label\"] = m[\"label\"]\n row[\"breaking\"] = bool(m[\"breaking\"])\n row[\"subjtext\"] = m[\"subjtext\"]\n row[\"bodylines\"] = len(row[\"body\"].splitlines())\n\n@contextlib.contextmanager\ndef change_dir(new_dir):\n \"\"\"Change directory, and then change back.\n\n Use as a context manager, it will give you the new directory, and later\n restore the old one.\n\n \"\"\"\n old_dir = os.getcwd()\n os.chdir(new_dir)\n try:\n yield os.getcwd()\n finally:\n os.chdir(old_dir)\n\n@click.command(help=\"Collect stats about commits in local git repos\")\n@click.option(\"--db\", \"dbfile\", default=\"commits.db\", help=\"SQLite database file to write to\")\n@click.option(\"--ignore\", multiple=True, help=\"Repos to ignore\")\n@click.option(\"--require\", help=\"A file that must exist to process the repo\")\n@click.argument(\"repos\", nargs=-1)\ndef main(dbfile, ignore, require, repos):\n db = dataset.connect(\"sqlite:///\" + dbfile)\n for repo in repos:\n if any(fnmatch.fnmatch(repo, pat) for pat in ignore):\n print(f\"Ignoring {repo}\")\n continue\n if require is not None:\n if not os.path.exists(os.path.join(repo, require)):\n print(f\"Skipping {repo}\")\n continue\n print(repo)\n with change_dir(repo) as repo_dir:\n repo_name = \"/\".join(repo_dir.split(\"/\")[-2:])\n load_commits(db, repo_name)\n\nif __name__ == \"__main__\":\n main()\n\n# then:\n# gittreeif nedbat/meta/installed python /src/ghorg/commitstats.py /src/ghorg/commits.db\n#\n# in sqlite:\n# select strftime(\"%Y%W\", date, \"weekday 0\") as yw, count(*) total, sum(conventional) as con from commits group by yw;\n# select yw, total, con, cast((con*100.0)/total as integer) pctcon from (select strftime(\"%Y%W\", date, \"weekday 0\") as yw, count(*) total, sum(conventional) as con from commits group by yw);\n\n\"\"\"\n select\n weekend, total, con, cast((con*100.0)/total as integer) pctcon, bod, cast((bod*100.0)/total as integer) pctbod\n from (\n select\n strftime(\"%Y%m%d\", date, \"weekday 0\") as weekend,\n count(*) total,\n sum(conventional) as con, sum(bodylines > 0) as bod\n from commits where repo = \"edx/edx-platform\" group by weekend\n )\n where weekend > '202009';\n\"\"\"\n", "step-ids": [ 2, 6, 7, 8, 9 ] }
[ 2, 6, 7, 8, 9 ]
"""This module defines simple utilities for making toy datasets to be used in testing/examples""" ################################################## # Import Miscellaneous Assets ################################################## import pandas as pd ############################################### # Import Learning Assets ############################################### from sklearn.datasets import load_breast_cancer, make_classification, load_diabetes ################################################## # Dataset Utilities ################################################## def get_breast_cancer_data(target="diagnosis"): """Get the Wisconsin Breast Cancer classification dataset, formatted as a DataFrame Parameters ---------- target: String, default='diagnosis' What to name the column in `df` that contains the target output values Returns ------- df: `pandas.DataFrame` The breast cancer dataset, with friendly column names""" data = load_breast_cancer() df = pd.DataFrame(data=data.data, columns=[_.replace(" ", "_") for _ in data.feature_names]) df[target] = data.target return df def get_diabetes_data(target="progression"): """Get the SKLearn Diabetes regression dataset, formatted as a DataFrame Parameters ---------- target: String, default='progression' What to name the column in `df` that contains the target output values Returns ------- df: `pandas.DataFrame` The diabetes dataset, with friendly column names""" data = load_diabetes() df = pd.DataFrame(data=data.data, columns=[_.replace(" ", "_") for _ in data.feature_names]) df[target] = data.target return df def get_toy_classification_data( target="target", n_samples=300, n_classes=2, shuffle=True, random_state=32, **kwargs ): """Wrapper around `sklearn.datasets.make_classification` to produce a `pandas.DataFrame`""" x, y = make_classification( n_samples=n_samples, n_classes=n_classes, shuffle=shuffle, random_state=random_state, **kwargs ) train_df = pd.DataFrame(data=x, columns=range(x.shape[1])) train_df[target] = y return train_df
normal
{ "blob_id": "285ca945696b32160175f15c4e89b3938f41ebf4", "index": 2172, "step-1": "<mask token>\n\n\ndef get_diabetes_data(target='progression'):\n \"\"\"Get the SKLearn Diabetes regression dataset, formatted as a DataFrame\n\n Parameters\n ----------\n target: String, default='progression'\n What to name the column in `df` that contains the target output values\n\n Returns\n -------\n df: `pandas.DataFrame`\n The diabetes dataset, with friendly column names\"\"\"\n data = load_diabetes()\n df = pd.DataFrame(data=data.data, columns=[_.replace(' ', '_') for _ in\n data.feature_names])\n df[target] = data.target\n return df\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef get_diabetes_data(target='progression'):\n \"\"\"Get the SKLearn Diabetes regression dataset, formatted as a DataFrame\n\n Parameters\n ----------\n target: String, default='progression'\n What to name the column in `df` that contains the target output values\n\n Returns\n -------\n df: `pandas.DataFrame`\n The diabetes dataset, with friendly column names\"\"\"\n data = load_diabetes()\n df = pd.DataFrame(data=data.data, columns=[_.replace(' ', '_') for _ in\n data.feature_names])\n df[target] = data.target\n return df\n\n\ndef get_toy_classification_data(target='target', n_samples=300, n_classes=2,\n shuffle=True, random_state=32, **kwargs):\n \"\"\"Wrapper around `sklearn.datasets.make_classification` to produce a `pandas.DataFrame`\"\"\"\n x, y = make_classification(n_samples=n_samples, n_classes=n_classes,\n shuffle=shuffle, random_state=random_state, **kwargs)\n train_df = pd.DataFrame(data=x, columns=range(x.shape[1]))\n train_df[target] = y\n return train_df\n", "step-3": "<mask token>\n\n\ndef get_breast_cancer_data(target='diagnosis'):\n \"\"\"Get the Wisconsin Breast Cancer classification dataset, formatted as a DataFrame\n\n Parameters\n ----------\n target: String, default='diagnosis'\n What to name the column in `df` that contains the target output values\n\n Returns\n -------\n df: `pandas.DataFrame`\n The breast cancer dataset, with friendly column names\"\"\"\n data = load_breast_cancer()\n df = pd.DataFrame(data=data.data, columns=[_.replace(' ', '_') for _ in\n data.feature_names])\n df[target] = data.target\n return df\n\n\ndef get_diabetes_data(target='progression'):\n \"\"\"Get the SKLearn Diabetes regression dataset, formatted as a DataFrame\n\n Parameters\n ----------\n target: String, default='progression'\n What to name the column in `df` that contains the target output values\n\n Returns\n -------\n df: `pandas.DataFrame`\n The diabetes dataset, with friendly column names\"\"\"\n data = load_diabetes()\n df = pd.DataFrame(data=data.data, columns=[_.replace(' ', '_') for _ in\n data.feature_names])\n df[target] = data.target\n return df\n\n\ndef get_toy_classification_data(target='target', n_samples=300, n_classes=2,\n shuffle=True, random_state=32, **kwargs):\n \"\"\"Wrapper around `sklearn.datasets.make_classification` to produce a `pandas.DataFrame`\"\"\"\n x, y = make_classification(n_samples=n_samples, n_classes=n_classes,\n shuffle=shuffle, random_state=random_state, **kwargs)\n train_df = pd.DataFrame(data=x, columns=range(x.shape[1]))\n train_df[target] = y\n return train_df\n", "step-4": "<mask token>\nimport pandas as pd\nfrom sklearn.datasets import load_breast_cancer, make_classification, load_diabetes\n\n\ndef get_breast_cancer_data(target='diagnosis'):\n \"\"\"Get the Wisconsin Breast Cancer classification dataset, formatted as a DataFrame\n\n Parameters\n ----------\n target: String, default='diagnosis'\n What to name the column in `df` that contains the target output values\n\n Returns\n -------\n df: `pandas.DataFrame`\n The breast cancer dataset, with friendly column names\"\"\"\n data = load_breast_cancer()\n df = pd.DataFrame(data=data.data, columns=[_.replace(' ', '_') for _ in\n data.feature_names])\n df[target] = data.target\n return df\n\n\ndef get_diabetes_data(target='progression'):\n \"\"\"Get the SKLearn Diabetes regression dataset, formatted as a DataFrame\n\n Parameters\n ----------\n target: String, default='progression'\n What to name the column in `df` that contains the target output values\n\n Returns\n -------\n df: `pandas.DataFrame`\n The diabetes dataset, with friendly column names\"\"\"\n data = load_diabetes()\n df = pd.DataFrame(data=data.data, columns=[_.replace(' ', '_') for _ in\n data.feature_names])\n df[target] = data.target\n return df\n\n\ndef get_toy_classification_data(target='target', n_samples=300, n_classes=2,\n shuffle=True, random_state=32, **kwargs):\n \"\"\"Wrapper around `sklearn.datasets.make_classification` to produce a `pandas.DataFrame`\"\"\"\n x, y = make_classification(n_samples=n_samples, n_classes=n_classes,\n shuffle=shuffle, random_state=random_state, **kwargs)\n train_df = pd.DataFrame(data=x, columns=range(x.shape[1]))\n train_df[target] = y\n return train_df\n", "step-5": "\"\"\"This module defines simple utilities for making toy datasets to be used in testing/examples\"\"\"\n##################################################\n# Import Miscellaneous Assets\n##################################################\nimport pandas as pd\n\n###############################################\n# Import Learning Assets\n###############################################\nfrom sklearn.datasets import load_breast_cancer, make_classification, load_diabetes\n\n\n##################################################\n# Dataset Utilities\n##################################################\ndef get_breast_cancer_data(target=\"diagnosis\"):\n \"\"\"Get the Wisconsin Breast Cancer classification dataset, formatted as a DataFrame\n\n Parameters\n ----------\n target: String, default='diagnosis'\n What to name the column in `df` that contains the target output values\n\n Returns\n -------\n df: `pandas.DataFrame`\n The breast cancer dataset, with friendly column names\"\"\"\n data = load_breast_cancer()\n df = pd.DataFrame(data=data.data, columns=[_.replace(\" \", \"_\") for _ in data.feature_names])\n df[target] = data.target\n return df\n\n\ndef get_diabetes_data(target=\"progression\"):\n \"\"\"Get the SKLearn Diabetes regression dataset, formatted as a DataFrame\n\n Parameters\n ----------\n target: String, default='progression'\n What to name the column in `df` that contains the target output values\n\n Returns\n -------\n df: `pandas.DataFrame`\n The diabetes dataset, with friendly column names\"\"\"\n data = load_diabetes()\n df = pd.DataFrame(data=data.data, columns=[_.replace(\" \", \"_\") for _ in data.feature_names])\n df[target] = data.target\n return df\n\n\ndef get_toy_classification_data(\n target=\"target\", n_samples=300, n_classes=2, shuffle=True, random_state=32, **kwargs\n):\n \"\"\"Wrapper around `sklearn.datasets.make_classification` to produce a `pandas.DataFrame`\"\"\"\n x, y = make_classification(\n n_samples=n_samples,\n n_classes=n_classes,\n shuffle=shuffle,\n random_state=random_state,\n **kwargs\n )\n train_df = pd.DataFrame(data=x, columns=range(x.shape[1]))\n train_df[target] = y\n return train_df\n", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
#!/usr/bin/python # -*- coding: utf-8 -*- import sys import Common.Common.GeneralSet as GeneralSet import TestExample.Test as Test from Common.Common.ProcessDefine import * def MainRun(): Cmd() Test.TestGo() def Cmd(): if (len(sys.argv) != 3): print('error cmdargument count!') return cmd = sys.argv[1] if cmd != '-serverid': print('error cmdargument!') return cmdvalue = sys.argv[2] if not cmdvalue.isdigit(): print('error cmdargument type!') return GeneralSet.gServerId = int(cmdvalue) print(GeneralSet.gServerId)
normal
{ "blob_id": "734561c2f127418bdc612f84b3b1ba125b6a2723", "index": 3784, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef MainRun():\n Cmd()\n Test.TestGo()\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef MainRun():\n Cmd()\n Test.TestGo()\n\n\ndef Cmd():\n if len(sys.argv) != 3:\n print('error cmdargument count!')\n return\n cmd = sys.argv[1]\n if cmd != '-serverid':\n print('error cmdargument!')\n return\n cmdvalue = sys.argv[2]\n if not cmdvalue.isdigit():\n print('error cmdargument type!')\n return\n GeneralSet.gServerId = int(cmdvalue)\n print(GeneralSet.gServerId)\n", "step-4": "import sys\nimport Common.Common.GeneralSet as GeneralSet\nimport TestExample.Test as Test\nfrom Common.Common.ProcessDefine import *\n\n\ndef MainRun():\n Cmd()\n Test.TestGo()\n\n\ndef Cmd():\n if len(sys.argv) != 3:\n print('error cmdargument count!')\n return\n cmd = sys.argv[1]\n if cmd != '-serverid':\n print('error cmdargument!')\n return\n cmdvalue = sys.argv[2]\n if not cmdvalue.isdigit():\n print('error cmdargument type!')\n return\n GeneralSet.gServerId = int(cmdvalue)\n print(GeneralSet.gServerId)\n", "step-5": "#!/usr/bin/python\n# -*- coding: utf-8 -*-\n\nimport sys\nimport Common.Common.GeneralSet as GeneralSet\nimport TestExample.Test as Test\nfrom Common.Common.ProcessDefine import *\n \ndef MainRun():\n Cmd()\n Test.TestGo()\n \ndef Cmd():\n if (len(sys.argv) != 3):\n print('error cmdargument count!')\n return\n\n cmd = sys.argv[1]\n if cmd != '-serverid':\n print('error cmdargument!')\n return\n cmdvalue = sys.argv[2]\n if not cmdvalue.isdigit():\n print('error cmdargument type!')\n return\n GeneralSet.gServerId = int(cmdvalue)\n print(GeneralSet.gServerId)\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> plt.plot(data.std(axis=0)) plt.show() plt.plot(data.max(axis=0)) plt.plot(data.mean(axis=0)) plt.plot(data.min(axis=0)) <|reserved_special_token_1|> <|reserved_special_token_0|> data = np.loadtxt(fname='inflammation-01.csv', delimiter=',') <|reserved_special_token_0|> plt.plot(data.std(axis=0)) plt.show() plt.plot(data.max(axis=0)) plt.plot(data.mean(axis=0)) plt.plot(data.min(axis=0)) <|reserved_special_token_1|> import numpy as np data = np.loadtxt(fname='inflammation-01.csv', delimiter=',') import matplotlib.pyplot as plt plt.plot(data.std(axis=0)) plt.show() plt.plot(data.max(axis=0)) plt.plot(data.mean(axis=0)) plt.plot(data.min(axis=0)) <|reserved_special_token_1|> # 1.- Crear una grafica que muestre la desviacion tipica de los datos cada dia para todos los pacientes # 2.- Crear una grafica que muestre a la vez la inflamacion maxima, media y minima para cada dia import numpy as np data = np.loadtxt(fname='inflammation-01.csv', delimiter=',') import matplotlib.pyplot as plt plt.plot(data.std(axis=0)) # Desviacion tipica por dia plt.show() plt.plot(data.max(axis=0)) # Inflamacion maxima, media y minima para cada dia plt.plot(data.mean(axis=0)) plt.plot(data.min(axis=0))
flexible
{ "blob_id": "52064b518ad067c9906e7de8542d9a399076a0b5", "index": 4214, "step-1": "<mask token>\n", "step-2": "<mask token>\nplt.plot(data.std(axis=0))\nplt.show()\nplt.plot(data.max(axis=0))\nplt.plot(data.mean(axis=0))\nplt.plot(data.min(axis=0))\n", "step-3": "<mask token>\ndata = np.loadtxt(fname='inflammation-01.csv', delimiter=',')\n<mask token>\nplt.plot(data.std(axis=0))\nplt.show()\nplt.plot(data.max(axis=0))\nplt.plot(data.mean(axis=0))\nplt.plot(data.min(axis=0))\n", "step-4": "import numpy as np\ndata = np.loadtxt(fname='inflammation-01.csv', delimiter=',')\nimport matplotlib.pyplot as plt\nplt.plot(data.std(axis=0))\nplt.show()\nplt.plot(data.max(axis=0))\nplt.plot(data.mean(axis=0))\nplt.plot(data.min(axis=0))\n", "step-5": "# 1.- Crear una grafica que muestre la desviacion tipica de los datos cada dia para todos los pacientes\r\n# 2.- Crear una grafica que muestre a la vez la inflamacion maxima, media y minima para cada dia\r\n\r\nimport numpy as np\r\ndata = np.loadtxt(fname='inflammation-01.csv', delimiter=',')\r\n\r\nimport matplotlib.pyplot as plt\r\n\r\nplt.plot(data.std(axis=0)) # Desviacion tipica por dia\r\nplt.show()\r\n\r\nplt.plot(data.max(axis=0)) # Inflamacion maxima, media y minima para cada dia\r\nplt.plot(data.mean(axis=0))\r\nplt.plot(data.min(axis=0))\r\n\r\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> print('ABC' if input() == '1' else 'chokudai') <|reserved_special_token_1|> #ABC114 A - クイズ print("ABC" if input()=="1" else "chokudai")
flexible
{ "blob_id": "14d31a4b7491a7f7a64cd151e79c23546e4a3cd2", "index": 7683, "step-1": "<mask token>\n", "step-2": "print('ABC' if input() == '1' else 'chokudai')\n", "step-3": "#ABC114 A - クイズ\nprint(\"ABC\" if input()==\"1\" else \"chokudai\")\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> print('{:>5}\t{:>5}'.format('raw', 'v')) while True: print('{:>5}\t{:>5.3f}'.format(chan.value, chan.voltage)) time.sleep(0.5) <|reserved_special_token_1|> <|reserved_special_token_0|> i2c = busio.I2C(board.SCL, board.SDA) ads = ADS.ADS1015(i2c) chan = AnalogIn(ads, ADS.P0) print('{:>5}\t{:>5}'.format('raw', 'v')) while True: print('{:>5}\t{:>5.3f}'.format(chan.value, chan.voltage)) time.sleep(0.5) <|reserved_special_token_1|> import time import busio import board from adafruit_ads1x15 import ads1015 as ADS from adafruit_ads1x15.analog_in import AnalogIn i2c = busio.I2C(board.SCL, board.SDA) ads = ADS.ADS1015(i2c) chan = AnalogIn(ads, ADS.P0) print('{:>5}\t{:>5}'.format('raw', 'v')) while True: print('{:>5}\t{:>5.3f}'.format(chan.value, chan.voltage)) time.sleep(0.5) <|reserved_special_token_1|> # import adafruit_ads1x15 as adс # from adafruit_ads1x15 import ads1x15 as adc # from adafruit_ads1x15 import analog_in import time import busio import board from adafruit_ads1x15 import ads1015 as ADS from adafruit_ads1x15.analog_in import AnalogIn i2c = busio.I2C(board.SCL, board.SDA) ads = ADS.ADS1015(i2c) chan = AnalogIn(ads, ADS.P0) print("{:>5}\t{:>5}".format('raw', 'v')) while True: print("{:>5}\t{:>5.3f}".format(chan.value, chan.voltage)) time.sleep(0.5) # print(dir(analog_in.AnalogIn())) # analog_in.AnalogIn() # GAIN = 1 # a = adc # # print('| {0:>6} | {1:>6} | {2:>6} | {3:>6} |'.format(*range(4))) # print('-' * 37) # # Main loop. # while True: # # Read all the ADC channel values in a list. # values = [0]*4 # for i in range(4): # # Read the specified ADC channel using the previously set gain value. # values[i] = a.read_adc(i, gain=GAIN) # # print('| {0:>6} | {1:>6} | {2:>6} | {3:>6} |'.format(*values)) # # Pause for half a second. # time.sleep(0.5)
flexible
{ "blob_id": "388904b6b826a1c718b85f2951a3189bb5abea2a", "index": 9755, "step-1": "<mask token>\n", "step-2": "<mask token>\nprint('{:>5}\\t{:>5}'.format('raw', 'v'))\nwhile True:\n print('{:>5}\\t{:>5.3f}'.format(chan.value, chan.voltage))\n time.sleep(0.5)\n", "step-3": "<mask token>\ni2c = busio.I2C(board.SCL, board.SDA)\nads = ADS.ADS1015(i2c)\nchan = AnalogIn(ads, ADS.P0)\nprint('{:>5}\\t{:>5}'.format('raw', 'v'))\nwhile True:\n print('{:>5}\\t{:>5.3f}'.format(chan.value, chan.voltage))\n time.sleep(0.5)\n", "step-4": "import time\nimport busio\nimport board\nfrom adafruit_ads1x15 import ads1015 as ADS\nfrom adafruit_ads1x15.analog_in import AnalogIn\ni2c = busio.I2C(board.SCL, board.SDA)\nads = ADS.ADS1015(i2c)\nchan = AnalogIn(ads, ADS.P0)\nprint('{:>5}\\t{:>5}'.format('raw', 'v'))\nwhile True:\n print('{:>5}\\t{:>5.3f}'.format(chan.value, chan.voltage))\n time.sleep(0.5)\n", "step-5": "# import adafruit_ads1x15 as adс\r\n# from adafruit_ads1x15 import ads1x15 as adc\r\n# from adafruit_ads1x15 import analog_in\r\nimport time\r\nimport busio\r\nimport board\r\nfrom adafruit_ads1x15 import ads1015 as ADS\r\nfrom adafruit_ads1x15.analog_in import AnalogIn\r\n\r\ni2c = busio.I2C(board.SCL, board.SDA)\r\nads = ADS.ADS1015(i2c)\r\nchan = AnalogIn(ads, ADS.P0)\r\n\r\nprint(\"{:>5}\\t{:>5}\".format('raw', 'v'))\r\n\r\nwhile True:\r\n print(\"{:>5}\\t{:>5.3f}\".format(chan.value, chan.voltage))\r\n time.sleep(0.5)\r\n\r\n# print(dir(analog_in.AnalogIn()))\r\n\r\n# analog_in.AnalogIn()\r\n\r\n\r\n# GAIN = 1\r\n# a = adc\r\n#\r\n# print('| {0:>6} | {1:>6} | {2:>6} | {3:>6} |'.format(*range(4)))\r\n# print('-' * 37)\r\n# # Main loop.\r\n# while True:\r\n# # Read all the ADC channel values in a list.\r\n# values = [0]*4\r\n# for i in range(4):\r\n# # Read the specified ADC channel using the previously set gain value.\r\n# values[i] = a.read_adc(i, gain=GAIN)\r\n#\r\n# print('| {0:>6} | {1:>6} | {2:>6} | {3:>6} |'.format(*values))\r\n# # Pause for half a second.\r\n# time.sleep(0.5)\r\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
# # @lc app=leetcode.cn id=15 lang=python3 # # [15] 三数之和 # # https://leetcode-cn.com/problems/3sum/description/ # # algorithms # Medium (25.76%) # Likes: 1904 # Dislikes: 0 # Total Accepted: 176.6K # Total Submissions: 679K # Testcase Example: '[-1,0,1,2,-1,-4]' # # 给你一个包含 n 个整数的数组 nums,判断 nums 中是否存在三个元素 a,b,c ,使得 a + b + c = 0 # ?请你找出所有满足条件且不重复的三元组。 # # 注意:答案中不可以包含重复的三元组。 # # # # 示例: # # 给定数组 nums = [-1, 0, 1, 2, -1, -4], # # 满足要求的三元组集合为: # [ # ⁠ [-1, 0, 1], # ⁠ [-1, -1, 2] # ] # # 1. 三层循环暴力求解 # 2. 双指针求解 # 3. hashmap 求解 # @lc code=start class Solution: def threeSum(self, nums: List[int]) -> List[List[int]]: res = [] nums.sort() for k in range(len(nums) - 2): if k > 0 and nums[k] == nums[k-1]: continuere if nums[k] > 0: break L, R = k+1, len(nums) - 1 while L < R: s = nums[k] + nums[L] + nums[R] if s < 0: L += 1 elif s > 0: R -= 1 else: res.append((nums[k], nums[L], nums[R])) while L < R and nums[L] == nums[L+1]: L += 1 while L < R and nums[R] == nums[R-1]: R -= 1 L += 1 R -= 1 return res # @lc code=end
normal
{ "blob_id": "ccf3ada9a2bedf29820170f2e8184fc16f1b7aea", "index": 9580, "step-1": "<mask token>\n", "step-2": "class Solution:\n <mask token>\n", "step-3": "class Solution:\n\n def threeSum(self, nums: List[int]) ->List[List[int]]:\n res = []\n nums.sort()\n for k in range(len(nums) - 2):\n if k > 0 and nums[k] == nums[k - 1]:\n continuere\n if nums[k] > 0:\n break\n L, R = k + 1, len(nums) - 1\n while L < R:\n s = nums[k] + nums[L] + nums[R]\n if s < 0:\n L += 1\n elif s > 0:\n R -= 1\n else:\n res.append((nums[k], nums[L], nums[R]))\n while L < R and nums[L] == nums[L + 1]:\n L += 1\n while L < R and nums[R] == nums[R - 1]:\n R -= 1\n L += 1\n R -= 1\n return res\n", "step-4": "#\n# @lc app=leetcode.cn id=15 lang=python3\n#\n# [15] 三数之和\n#\n# https://leetcode-cn.com/problems/3sum/description/\n#\n# algorithms\n# Medium (25.76%)\n# Likes: 1904\n# Dislikes: 0\n# Total Accepted: 176.6K\n# Total Submissions: 679K\n# Testcase Example: '[-1,0,1,2,-1,-4]'\n#\n# 给你一个包含 n 个整数的数组 nums,判断 nums 中是否存在三个元素 a,b,c ,使得 a + b + c = 0\n# ?请你找出所有满足条件且不重复的三元组。\n#\n# 注意:答案中不可以包含重复的三元组。\n#\n#\n#\n# 示例:\n#\n# 给定数组 nums = [-1, 0, 1, 2, -1, -4],\n#\n# 满足要求的三元组集合为:\n# [\n# ⁠ [-1, 0, 1],\n# ⁠ [-1, -1, 2]\n# ]\n#\n# 1. 三层循环暴力求解\n# 2. 双指针求解\n# 3. hashmap 求解\n\n# @lc code=start\n\n\nclass Solution:\n def threeSum(self, nums: List[int]) -> List[List[int]]:\n res = []\n nums.sort()\n for k in range(len(nums) - 2):\n if k > 0 and nums[k] == nums[k-1]:\n continuere\n if nums[k] > 0:\n break\n L, R = k+1, len(nums) - 1\n while L < R:\n s = nums[k] + nums[L] + nums[R]\n if s < 0:\n L += 1\n elif s > 0:\n R -= 1\n else:\n res.append((nums[k], nums[L], nums[R]))\n while L < R and nums[L] == nums[L+1]:\n L += 1\n while L < R and nums[R] == nums[R-1]:\n R -= 1\n L += 1\n R -= 1\n return res\n# @lc code=end\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
from application.identifier import Identifier if __name__ == '__main__': idf = Identifier() while raw_input('Hello!, to start listening press enter, to exit press q\n' ) != 'q': idf.guess()
normal
{ "blob_id": "d8da01433b2e6adb403fdadc713d4ee30e92c787", "index": 4829, "step-1": "<mask token>\n", "step-2": "<mask token>\nif __name__ == '__main__':\n idf = Identifier()\n while raw_input('Hello!, to start listening press enter, to exit press q\\n'\n ) != 'q':\n idf.guess()\n", "step-3": "from application.identifier import Identifier\nif __name__ == '__main__':\n idf = Identifier()\n while raw_input('Hello!, to start listening press enter, to exit press q\\n'\n ) != 'q':\n idf.guess()\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
from colorama import init, Fore, Style import tempConv #============================================================================# # TEMP CONVERSION PROGRAM: # #============================================================================# #----------------------------------------------------------------------------- def menu(x): """ Takes a list as argument and displays as a menu """ for i in range(len(x)): print("{0:>4s} {1:<3s}{2:^5s}{3:<15}" .format(str(i + 1) + ")", x[i][1], "-->", x[i][0])) #----------------------------------------------------------------------------- def primary_message(): """ A message for the main switch """ print(Fore.CYAN + "\n Select the unit you want to convert from:\n" + Fore.RESET) menu([ ["Celsius", "\u00b0C"], ["Fahrenheit", "\u00b0F"], ["Kelvin", "\u00b0K"], ["Rankin", "\u00b0R"], ["Delisle", "\u00b0De"], ["Newton", "\u00b0N"], ["R\u00e9aumur", "\u00b0R\u00e9"], ["R\u00f8mer", "\u00b0R\u00f8"], [Fore.RED + "Exit\n" + Fore.RESET,""] ]) #----------------------------------------------------------------------------- def secondary_message(t, unit): """ A message for the secondary switch """ print(Fore.CYAN + "\n Select the unit you would you like to convert " + str(t) + "\u00b0" + unit + " into:\n" + Fore.RESET) menu([ ["Celsius", "\u00b0C"], ["Fahrenheit", "\u00b0F"], ["Kelvin", "\u00b0K"], ["Rankin", "\u00b0R"], ["Delisle", "\u00b0De"], ["Newton", "\u00b0N"], ["R\u00e9aumur", "\u00b0R\u00e9"], ["R\u00f8mer", "\u00b0R\u00f8"], [Fore.RED + "Back\n" + Fore.RESET,""] ]) #----------------------------------------------------------------------------- def result_message(t, t2, unit, unit2): from os import system """ Prints the result to the screen """ print(Fore.GREEN + "\n " + str(round(t, 2)) + "\u00b0" + unit + Fore.YELLOW +" --> " + Fore.GREEN + Style.BRIGHT + str(round(t2, 2)) + "\u00b0" + unit2 + "\n" + Style.RESET_ALL) print(system('pause')) #----------------------------------------------------------------------------- def choice(x, y = 0): """ Checks user input """ while True: try: choice = int(input()) # <=== Check if it's an int if choice <= x and choice > 0 and choice != y: # <=== If choice in return choice # range and not the same; return choice break elif choice == y: print(Fore.RED + "\n Can't convert to the same unit!\n" + Fore.RESET) else: print(Fore.RED + "\n Invalid choice!\n" + Fore.RESET) except ValueError: # <=== If choice is invalid prompt message print(Fore.RED + "\n Invalid input!\n" + Fore.RESET) #----------------------------------------------------------------------------- def value_input(unit): """ Asks user for temp. value, then checks it. """ print(Fore.CYAN + "\n Enter the temperature in \u00b0" + unit + ":\n" + Fore.RESET) while True: try: value = float(input()) # <=== Make sure input is a float return value break except ValueError: print(Fore.RED + "\n Input must be an integer!\n" + Fore.RESET) #----------------------------------------------------------------------------- def value_check(unit, value): """ Check for value below absolute zero """ while True: try: # <=== Checks that value isn't below abs 0 t = value_input(unit) # Returns value if okay if value(t) != None: return t break except ValueError: tempConv(t) #----------------------------------------------------------------------------- def main(): """" This is the main function """ while True: primary_message() # <=== Display menu and take input x = choice(9) z = tempConv if x == 1: # This is the From Celsius options t = value_check("C", tempConv.cel_ran) secondary_message(t, "C") y = choice(9, 1) while True: if y == 2: t2 = z.cel_fah(t) # <=== Fahrenheit result_message(t, t2, "C", "F") break elif y == 3: t2 = z.cel_kel(t) # <=== Kelvin result_message(t, t2, "C", "K") break elif y == 4: t2 = z.cel_ran(t) # <=== Rankin result_message(t, t2, "C", "R") break elif y == 5: t2 = z.cel_del(t) # <=== Delisle result_message(t, t2, "C", "De") break elif y == 6: t2 = z.cel_new(t) # <=== Newton result_message(t, t2, "C", "N") break elif y == 7: t2 = z.cel_rea(t) # <=== Reaumur result_message(t, t2, "C", "R\u00e9") break elif y == 8: t2 = z.cel_rom(t) # <=== Romer result_message(t, t2, "C", "R\u00f8") break elif y == 9: break elif x == 2: t = value_check("F", tempConv.fah_ran) secondary_message(t, "F") y = choice(9, 2) while True: if y == 1: t2 = z.fah_cel(t) result_message(t, t2, "F", "C") break elif y == 3: t2 = z.fah_kel(t) result_message(t, t2, "F", "K") break elif y == 4: t2 = z.fah_ran(t) result_message(t, t2, "F", "R") break elif y == 5: t2 = z.fah_del(t) result_message(t, t2, "F", "De") break elif y == 6: t2 = z.fah_new(t) result_message(t, t2, "F", "N") break elif y == 7: t2 = z.fah_rea(t) result_message(t, t2, "F", "R\u00e9") break elif y == 8: t2 = z.fah_rom(t) result_message(t, t2, "F", "R\u00f8") break elif y == 9: break elif x == 3: t = value_check("K", tempConv.kel_ran) secondary_message(t, "K") y = choice(9, 3) while True: if y == 1: t2 = z.kel_cel(t) result_message(t, t2, "K", "C") break elif y == 2: t2 = z.kel_fah(t) result_message(t, t2, "K", "F") break elif y == 4: t2 = z.kel_ran(t) result_message(t, t2, "K", "R") break elif y == 5: t2 = z.kel_del(t) result_message(t, t2, "K", "De") break elif y == 6: t2 = z.kel_new(t) result_message(t, t2, "K", "N") break elif y == 7: t2 = z.kel_rea(t) result_message(t, t2, "K", "R\u00e9") break elif y == 8: t2 = z.kel_rom(t) result_message(t, t2, "K", "R\u00f8") break elif y == 9: break elif x == 4: t = value_check("R", tempConv.ran_rea) secondary_message(t, "R") y = choice(9, 4) while True: if y == 1: t2 = z.ran_cel(t) result_message(t, t2, "R", "C") break elif y == 2: t2 = z.ran_fah(t) result_message(t, t2, "R", "F") break elif y == 3: t2 = z.ran_kel(t) result_message(t, t2, "R", "K") break elif y == 5: t2 = z.ran_del(t) result_message(t, t2, "R", "De") break elif y == 6: t2 = z.ran_new(t) result_message(t, t2, "R", "N") break elif y == 7: t2 = z.ran_rea(t) result_message(t, t2, "R", "R\u00e9") break elif y == 8: t2 = z.ran_rom(t) result_message(t, t2, "R", "R\u00f8") break elif y == 9: break elif x == 5: t = value_check("De", tempConv.del_ran) secondary_message(t, "De") y = choice(9, 5) while True: if y == 1: t2 = z.del_cel(t) result_message(t, t2, "De", "C") break elif y == 2: t2 = z.del_fah(t) result_message(t, t2, "De", "F") break elif y == 3: t2 = z.del_kel(t) result_message(t, t2, "De", "K") break elif y == 4: t2 = z.del_ran(t) result_message(t, t2, "De", "R") break elif y == 6: t2 = z.del_new(t) result_message(t, t2, "De", "N") break elif y == 7: t2 = z.del_rea(t) result_message(t, t2, "De", "R\u00e9") break elif y == 8: t2 = z.del_rom(t) result_message(t, t2, "De", "R\u00f8") break elif y == 9: break elif x == 6: t = value_check("N", tempConv.new_ran) secondary_message(t, "N") y = choice(9, 6) while True: if y == 1: t2 = z.new_cel(t) result_message(t, t2, "N", "C") break elif y == 2: t2 = z.new_fah(t) result_message(t, t2, "N", "F") break elif y == 3: t2 = z.new_kel(t) result_message(t, t2, "N", "K") break elif y == 4: t2 = z.new_ran(t) result_message(t, t2, "N", "R") break elif y == 5: t2 = z.new_del(t) result_message(t, t2, "N", "N") break elif y == 7: t2 = z.new_rea(t) result_message(t, t2, "N", "R\u00e9") break elif y == 8: t2 = z.new_rom(t) result_message(t, t2, "N", "R\u00f8") break elif y == 9: break elif x == 7: t = value_check("R\u00e9", tempConv.rea_ran) secondary_message(t, "R\u00e9") y = choice(9, 7) while True: if y == 1: t2 = z.rea_cel(t) result_message(t, t2, "R\u00e9", "C") break elif y == 2: t2 = z.rea_fah(t) result_message(t, t2, "R\u00e9", "F") break elif y == 3: t2 = z.rea_kel(t) result_message(t, t2, "R\u00e9", "K") break elif y == 4: t2 = z.rea_ran(t) result_message(t, t2, "R\u00e9", "R") break elif y == 5: t2 = z.rea_del(t) result_message(t, t2, "R\u00e9", "De") break elif y == 6: t2 = z.rea_new(t) result_message(t, t2, "R\u00e9", "N") break elif y == 8: t2 = z.rea_rom(t) result_message(t, t2, "R\u00e9", "R\u00f8") break elif y == 9: break elif x == 8: t = value_check("R\u00f8", tempConv.rom_ran) secondary_message(t, "R\u00f8") y = choice(9, 8) while True: if y == 1: t2 = z.rom_cel(t) result_message(t, t2, "R\u00f8", "C") break elif y == 2: t2 = z.rom_fah(t) result_message(t, t2, "R\u00f8", "F") break elif y == 3: t2 = z.rom_kel(t) result_message(t, t2, "R\u00f8", "K") break elif y == 4: t2 = z.rom_ran(t) result_message(t, t2, "R\u00f8", "R") break elif y == 5: t2 = z.rom_del(t) result_message(t, t2, "R\u00f8", "De") break elif y == 6: t2 = z.rom_new(t) result_message(t, t2, "R\u00f8", "N") break elif y == 7: t2 = z.rom_rea(t) result_message(t, t2, "R\u00f8", "R\u00e9") break elif y == 9: break elif x == 9: print(Fore.CYAN + "\n Goodbye!" + Fore.RESET) i = 0 break #----------------------------------------------------------------------------- if __name__ == "__main__": init() main()
normal
{ "blob_id": "235bb1b9d4c41c12d7667a6bac48737464c685c7", "index": 568, "step-1": "<mask token>\n\n\ndef menu(x):\n \"\"\" Takes a list as argument and displays as a menu \"\"\"\n for i in range(len(x)):\n print('{0:>4s} {1:<3s}{2:^5s}{3:<15}'.format(str(i + 1) + ')', x[i]\n [1], '-->', x[i][0]))\n\n\n<mask token>\n\n\ndef secondary_message(t, unit):\n \"\"\" A message for the secondary switch \"\"\"\n print(Fore.CYAN + '\\n Select the unit you would you like to convert ' +\n str(t) + '°' + unit + ' into:\\n' + Fore.RESET)\n menu([['Celsius', '°C'], ['Fahrenheit', '°F'], ['Kelvin', '°K'], [\n 'Rankin', '°R'], ['Delisle', '°De'], ['Newton', '°N'], ['Réaumur',\n '°Ré'], ['Rømer', '°Rø'], [Fore.RED + 'Back\\n' + Fore.RESET, '']])\n\n\n<mask token>\n\n\ndef main():\n \"\"\"\" This is the main function \"\"\"\n while True:\n primary_message()\n x = choice(9)\n z = tempConv\n if x == 1:\n t = value_check('C', tempConv.cel_ran)\n secondary_message(t, 'C')\n y = choice(9, 1)\n while True:\n if y == 2:\n t2 = z.cel_fah(t)\n result_message(t, t2, 'C', 'F')\n break\n elif y == 3:\n t2 = z.cel_kel(t)\n result_message(t, t2, 'C', 'K')\n break\n elif y == 4:\n t2 = z.cel_ran(t)\n result_message(t, t2, 'C', 'R')\n break\n elif y == 5:\n t2 = z.cel_del(t)\n result_message(t, t2, 'C', 'De')\n break\n elif y == 6:\n t2 = z.cel_new(t)\n result_message(t, t2, 'C', 'N')\n break\n elif y == 7:\n t2 = z.cel_rea(t)\n result_message(t, t2, 'C', 'Ré')\n break\n elif y == 8:\n t2 = z.cel_rom(t)\n result_message(t, t2, 'C', 'Rø')\n break\n elif y == 9:\n break\n elif x == 2:\n t = value_check('F', tempConv.fah_ran)\n secondary_message(t, 'F')\n y = choice(9, 2)\n while True:\n if y == 1:\n t2 = z.fah_cel(t)\n result_message(t, t2, 'F', 'C')\n break\n elif y == 3:\n t2 = z.fah_kel(t)\n result_message(t, t2, 'F', 'K')\n break\n elif y == 4:\n t2 = z.fah_ran(t)\n result_message(t, t2, 'F', 'R')\n break\n elif y == 5:\n t2 = z.fah_del(t)\n result_message(t, t2, 'F', 'De')\n break\n elif y == 6:\n t2 = z.fah_new(t)\n result_message(t, t2, 'F', 'N')\n break\n elif y == 7:\n t2 = z.fah_rea(t)\n result_message(t, t2, 'F', 'Ré')\n break\n elif y == 8:\n t2 = z.fah_rom(t)\n result_message(t, t2, 'F', 'Rø')\n break\n elif y == 9:\n break\n elif x == 3:\n t = value_check('K', tempConv.kel_ran)\n secondary_message(t, 'K')\n y = choice(9, 3)\n while True:\n if y == 1:\n t2 = z.kel_cel(t)\n result_message(t, t2, 'K', 'C')\n break\n elif y == 2:\n t2 = z.kel_fah(t)\n result_message(t, t2, 'K', 'F')\n break\n elif y == 4:\n t2 = z.kel_ran(t)\n result_message(t, t2, 'K', 'R')\n break\n elif y == 5:\n t2 = z.kel_del(t)\n result_message(t, t2, 'K', 'De')\n break\n elif y == 6:\n t2 = z.kel_new(t)\n result_message(t, t2, 'K', 'N')\n break\n elif y == 7:\n t2 = z.kel_rea(t)\n result_message(t, t2, 'K', 'Ré')\n break\n elif y == 8:\n t2 = z.kel_rom(t)\n result_message(t, t2, 'K', 'Rø')\n break\n elif y == 9:\n break\n elif x == 4:\n t = value_check('R', tempConv.ran_rea)\n secondary_message(t, 'R')\n y = choice(9, 4)\n while True:\n if y == 1:\n t2 = z.ran_cel(t)\n result_message(t, t2, 'R', 'C')\n break\n elif y == 2:\n t2 = z.ran_fah(t)\n result_message(t, t2, 'R', 'F')\n break\n elif y == 3:\n t2 = z.ran_kel(t)\n result_message(t, t2, 'R', 'K')\n break\n elif y == 5:\n t2 = z.ran_del(t)\n result_message(t, t2, 'R', 'De')\n break\n elif y == 6:\n t2 = z.ran_new(t)\n result_message(t, t2, 'R', 'N')\n break\n elif y == 7:\n t2 = z.ran_rea(t)\n result_message(t, t2, 'R', 'Ré')\n break\n elif y == 8:\n t2 = z.ran_rom(t)\n result_message(t, t2, 'R', 'Rø')\n break\n elif y == 9:\n break\n elif x == 5:\n t = value_check('De', tempConv.del_ran)\n secondary_message(t, 'De')\n y = choice(9, 5)\n while True:\n if y == 1:\n t2 = z.del_cel(t)\n result_message(t, t2, 'De', 'C')\n break\n elif y == 2:\n t2 = z.del_fah(t)\n result_message(t, t2, 'De', 'F')\n break\n elif y == 3:\n t2 = z.del_kel(t)\n result_message(t, t2, 'De', 'K')\n break\n elif y == 4:\n t2 = z.del_ran(t)\n result_message(t, t2, 'De', 'R')\n break\n elif y == 6:\n t2 = z.del_new(t)\n result_message(t, t2, 'De', 'N')\n break\n elif y == 7:\n t2 = z.del_rea(t)\n result_message(t, t2, 'De', 'Ré')\n break\n elif y == 8:\n t2 = z.del_rom(t)\n result_message(t, t2, 'De', 'Rø')\n break\n elif y == 9:\n break\n elif x == 6:\n t = value_check('N', tempConv.new_ran)\n secondary_message(t, 'N')\n y = choice(9, 6)\n while True:\n if y == 1:\n t2 = z.new_cel(t)\n result_message(t, t2, 'N', 'C')\n break\n elif y == 2:\n t2 = z.new_fah(t)\n result_message(t, t2, 'N', 'F')\n break\n elif y == 3:\n t2 = z.new_kel(t)\n result_message(t, t2, 'N', 'K')\n break\n elif y == 4:\n t2 = z.new_ran(t)\n result_message(t, t2, 'N', 'R')\n break\n elif y == 5:\n t2 = z.new_del(t)\n result_message(t, t2, 'N', 'N')\n break\n elif y == 7:\n t2 = z.new_rea(t)\n result_message(t, t2, 'N', 'Ré')\n break\n elif y == 8:\n t2 = z.new_rom(t)\n result_message(t, t2, 'N', 'Rø')\n break\n elif y == 9:\n break\n elif x == 7:\n t = value_check('Ré', tempConv.rea_ran)\n secondary_message(t, 'Ré')\n y = choice(9, 7)\n while True:\n if y == 1:\n t2 = z.rea_cel(t)\n result_message(t, t2, 'Ré', 'C')\n break\n elif y == 2:\n t2 = z.rea_fah(t)\n result_message(t, t2, 'Ré', 'F')\n break\n elif y == 3:\n t2 = z.rea_kel(t)\n result_message(t, t2, 'Ré', 'K')\n break\n elif y == 4:\n t2 = z.rea_ran(t)\n result_message(t, t2, 'Ré', 'R')\n break\n elif y == 5:\n t2 = z.rea_del(t)\n result_message(t, t2, 'Ré', 'De')\n break\n elif y == 6:\n t2 = z.rea_new(t)\n result_message(t, t2, 'Ré', 'N')\n break\n elif y == 8:\n t2 = z.rea_rom(t)\n result_message(t, t2, 'Ré', 'Rø')\n break\n elif y == 9:\n break\n elif x == 8:\n t = value_check('Rø', tempConv.rom_ran)\n secondary_message(t, 'Rø')\n y = choice(9, 8)\n while True:\n if y == 1:\n t2 = z.rom_cel(t)\n result_message(t, t2, 'Rø', 'C')\n break\n elif y == 2:\n t2 = z.rom_fah(t)\n result_message(t, t2, 'Rø', 'F')\n break\n elif y == 3:\n t2 = z.rom_kel(t)\n result_message(t, t2, 'Rø', 'K')\n break\n elif y == 4:\n t2 = z.rom_ran(t)\n result_message(t, t2, 'Rø', 'R')\n break\n elif y == 5:\n t2 = z.rom_del(t)\n result_message(t, t2, 'Rø', 'De')\n break\n elif y == 6:\n t2 = z.rom_new(t)\n result_message(t, t2, 'Rø', 'N')\n break\n elif y == 7:\n t2 = z.rom_rea(t)\n result_message(t, t2, 'Rø', 'Ré')\n break\n elif y == 9:\n break\n elif x == 9:\n print(Fore.CYAN + '\\n Goodbye!' + Fore.RESET)\n i = 0\n break\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef menu(x):\n \"\"\" Takes a list as argument and displays as a menu \"\"\"\n for i in range(len(x)):\n print('{0:>4s} {1:<3s}{2:^5s}{3:<15}'.format(str(i + 1) + ')', x[i]\n [1], '-->', x[i][0]))\n\n\n<mask token>\n\n\ndef secondary_message(t, unit):\n \"\"\" A message for the secondary switch \"\"\"\n print(Fore.CYAN + '\\n Select the unit you would you like to convert ' +\n str(t) + '°' + unit + ' into:\\n' + Fore.RESET)\n menu([['Celsius', '°C'], ['Fahrenheit', '°F'], ['Kelvin', '°K'], [\n 'Rankin', '°R'], ['Delisle', '°De'], ['Newton', '°N'], ['Réaumur',\n '°Ré'], ['Rømer', '°Rø'], [Fore.RED + 'Back\\n' + Fore.RESET, '']])\n\n\ndef result_message(t, t2, unit, unit2):\n from os import system\n \"\"\" Prints the result to the screen \"\"\"\n print(Fore.GREEN + '\\n ' + str(round(t, 2)) + '°' + unit + Fore.YELLOW +\n ' --> ' + Fore.GREEN + Style.BRIGHT + str(round(t2, 2)) + '°' +\n unit2 + '\\n' + Style.RESET_ALL)\n print(system('pause'))\n\n\ndef choice(x, y=0):\n \"\"\" Checks user input \"\"\"\n while True:\n try:\n choice = int(input())\n if choice <= x and choice > 0 and choice != y:\n return choice\n break\n elif choice == y:\n print(Fore.RED + \"\\n Can't convert to the same unit!\\n\" +\n Fore.RESET)\n else:\n print(Fore.RED + '\\n Invalid choice!\\n' + Fore.RESET)\n except ValueError:\n print(Fore.RED + '\\n Invalid input!\\n' + Fore.RESET)\n\n\n<mask token>\n\n\ndef main():\n \"\"\"\" This is the main function \"\"\"\n while True:\n primary_message()\n x = choice(9)\n z = tempConv\n if x == 1:\n t = value_check('C', tempConv.cel_ran)\n secondary_message(t, 'C')\n y = choice(9, 1)\n while True:\n if y == 2:\n t2 = z.cel_fah(t)\n result_message(t, t2, 'C', 'F')\n break\n elif y == 3:\n t2 = z.cel_kel(t)\n result_message(t, t2, 'C', 'K')\n break\n elif y == 4:\n t2 = z.cel_ran(t)\n result_message(t, t2, 'C', 'R')\n break\n elif y == 5:\n t2 = z.cel_del(t)\n result_message(t, t2, 'C', 'De')\n break\n elif y == 6:\n t2 = z.cel_new(t)\n result_message(t, t2, 'C', 'N')\n break\n elif y == 7:\n t2 = z.cel_rea(t)\n result_message(t, t2, 'C', 'Ré')\n break\n elif y == 8:\n t2 = z.cel_rom(t)\n result_message(t, t2, 'C', 'Rø')\n break\n elif y == 9:\n break\n elif x == 2:\n t = value_check('F', tempConv.fah_ran)\n secondary_message(t, 'F')\n y = choice(9, 2)\n while True:\n if y == 1:\n t2 = z.fah_cel(t)\n result_message(t, t2, 'F', 'C')\n break\n elif y == 3:\n t2 = z.fah_kel(t)\n result_message(t, t2, 'F', 'K')\n break\n elif y == 4:\n t2 = z.fah_ran(t)\n result_message(t, t2, 'F', 'R')\n break\n elif y == 5:\n t2 = z.fah_del(t)\n result_message(t, t2, 'F', 'De')\n break\n elif y == 6:\n t2 = z.fah_new(t)\n result_message(t, t2, 'F', 'N')\n break\n elif y == 7:\n t2 = z.fah_rea(t)\n result_message(t, t2, 'F', 'Ré')\n break\n elif y == 8:\n t2 = z.fah_rom(t)\n result_message(t, t2, 'F', 'Rø')\n break\n elif y == 9:\n break\n elif x == 3:\n t = value_check('K', tempConv.kel_ran)\n secondary_message(t, 'K')\n y = choice(9, 3)\n while True:\n if y == 1:\n t2 = z.kel_cel(t)\n result_message(t, t2, 'K', 'C')\n break\n elif y == 2:\n t2 = z.kel_fah(t)\n result_message(t, t2, 'K', 'F')\n break\n elif y == 4:\n t2 = z.kel_ran(t)\n result_message(t, t2, 'K', 'R')\n break\n elif y == 5:\n t2 = z.kel_del(t)\n result_message(t, t2, 'K', 'De')\n break\n elif y == 6:\n t2 = z.kel_new(t)\n result_message(t, t2, 'K', 'N')\n break\n elif y == 7:\n t2 = z.kel_rea(t)\n result_message(t, t2, 'K', 'Ré')\n break\n elif y == 8:\n t2 = z.kel_rom(t)\n result_message(t, t2, 'K', 'Rø')\n break\n elif y == 9:\n break\n elif x == 4:\n t = value_check('R', tempConv.ran_rea)\n secondary_message(t, 'R')\n y = choice(9, 4)\n while True:\n if y == 1:\n t2 = z.ran_cel(t)\n result_message(t, t2, 'R', 'C')\n break\n elif y == 2:\n t2 = z.ran_fah(t)\n result_message(t, t2, 'R', 'F')\n break\n elif y == 3:\n t2 = z.ran_kel(t)\n result_message(t, t2, 'R', 'K')\n break\n elif y == 5:\n t2 = z.ran_del(t)\n result_message(t, t2, 'R', 'De')\n break\n elif y == 6:\n t2 = z.ran_new(t)\n result_message(t, t2, 'R', 'N')\n break\n elif y == 7:\n t2 = z.ran_rea(t)\n result_message(t, t2, 'R', 'Ré')\n break\n elif y == 8:\n t2 = z.ran_rom(t)\n result_message(t, t2, 'R', 'Rø')\n break\n elif y == 9:\n break\n elif x == 5:\n t = value_check('De', tempConv.del_ran)\n secondary_message(t, 'De')\n y = choice(9, 5)\n while True:\n if y == 1:\n t2 = z.del_cel(t)\n result_message(t, t2, 'De', 'C')\n break\n elif y == 2:\n t2 = z.del_fah(t)\n result_message(t, t2, 'De', 'F')\n break\n elif y == 3:\n t2 = z.del_kel(t)\n result_message(t, t2, 'De', 'K')\n break\n elif y == 4:\n t2 = z.del_ran(t)\n result_message(t, t2, 'De', 'R')\n break\n elif y == 6:\n t2 = z.del_new(t)\n result_message(t, t2, 'De', 'N')\n break\n elif y == 7:\n t2 = z.del_rea(t)\n result_message(t, t2, 'De', 'Ré')\n break\n elif y == 8:\n t2 = z.del_rom(t)\n result_message(t, t2, 'De', 'Rø')\n break\n elif y == 9:\n break\n elif x == 6:\n t = value_check('N', tempConv.new_ran)\n secondary_message(t, 'N')\n y = choice(9, 6)\n while True:\n if y == 1:\n t2 = z.new_cel(t)\n result_message(t, t2, 'N', 'C')\n break\n elif y == 2:\n t2 = z.new_fah(t)\n result_message(t, t2, 'N', 'F')\n break\n elif y == 3:\n t2 = z.new_kel(t)\n result_message(t, t2, 'N', 'K')\n break\n elif y == 4:\n t2 = z.new_ran(t)\n result_message(t, t2, 'N', 'R')\n break\n elif y == 5:\n t2 = z.new_del(t)\n result_message(t, t2, 'N', 'N')\n break\n elif y == 7:\n t2 = z.new_rea(t)\n result_message(t, t2, 'N', 'Ré')\n break\n elif y == 8:\n t2 = z.new_rom(t)\n result_message(t, t2, 'N', 'Rø')\n break\n elif y == 9:\n break\n elif x == 7:\n t = value_check('Ré', tempConv.rea_ran)\n secondary_message(t, 'Ré')\n y = choice(9, 7)\n while True:\n if y == 1:\n t2 = z.rea_cel(t)\n result_message(t, t2, 'Ré', 'C')\n break\n elif y == 2:\n t2 = z.rea_fah(t)\n result_message(t, t2, 'Ré', 'F')\n break\n elif y == 3:\n t2 = z.rea_kel(t)\n result_message(t, t2, 'Ré', 'K')\n break\n elif y == 4:\n t2 = z.rea_ran(t)\n result_message(t, t2, 'Ré', 'R')\n break\n elif y == 5:\n t2 = z.rea_del(t)\n result_message(t, t2, 'Ré', 'De')\n break\n elif y == 6:\n t2 = z.rea_new(t)\n result_message(t, t2, 'Ré', 'N')\n break\n elif y == 8:\n t2 = z.rea_rom(t)\n result_message(t, t2, 'Ré', 'Rø')\n break\n elif y == 9:\n break\n elif x == 8:\n t = value_check('Rø', tempConv.rom_ran)\n secondary_message(t, 'Rø')\n y = choice(9, 8)\n while True:\n if y == 1:\n t2 = z.rom_cel(t)\n result_message(t, t2, 'Rø', 'C')\n break\n elif y == 2:\n t2 = z.rom_fah(t)\n result_message(t, t2, 'Rø', 'F')\n break\n elif y == 3:\n t2 = z.rom_kel(t)\n result_message(t, t2, 'Rø', 'K')\n break\n elif y == 4:\n t2 = z.rom_ran(t)\n result_message(t, t2, 'Rø', 'R')\n break\n elif y == 5:\n t2 = z.rom_del(t)\n result_message(t, t2, 'Rø', 'De')\n break\n elif y == 6:\n t2 = z.rom_new(t)\n result_message(t, t2, 'Rø', 'N')\n break\n elif y == 7:\n t2 = z.rom_rea(t)\n result_message(t, t2, 'Rø', 'Ré')\n break\n elif y == 9:\n break\n elif x == 9:\n print(Fore.CYAN + '\\n Goodbye!' + Fore.RESET)\n i = 0\n break\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef menu(x):\n \"\"\" Takes a list as argument and displays as a menu \"\"\"\n for i in range(len(x)):\n print('{0:>4s} {1:<3s}{2:^5s}{3:<15}'.format(str(i + 1) + ')', x[i]\n [1], '-->', x[i][0]))\n\n\ndef primary_message():\n \"\"\" A message for the main switch \"\"\"\n print(Fore.CYAN + '\\n Select the unit you want to convert from:\\n' +\n Fore.RESET)\n menu([['Celsius', '°C'], ['Fahrenheit', '°F'], ['Kelvin', '°K'], [\n 'Rankin', '°R'], ['Delisle', '°De'], ['Newton', '°N'], ['Réaumur',\n '°Ré'], ['Rømer', '°Rø'], [Fore.RED + 'Exit\\n' + Fore.RESET, '']])\n\n\ndef secondary_message(t, unit):\n \"\"\" A message for the secondary switch \"\"\"\n print(Fore.CYAN + '\\n Select the unit you would you like to convert ' +\n str(t) + '°' + unit + ' into:\\n' + Fore.RESET)\n menu([['Celsius', '°C'], ['Fahrenheit', '°F'], ['Kelvin', '°K'], [\n 'Rankin', '°R'], ['Delisle', '°De'], ['Newton', '°N'], ['Réaumur',\n '°Ré'], ['Rømer', '°Rø'], [Fore.RED + 'Back\\n' + Fore.RESET, '']])\n\n\ndef result_message(t, t2, unit, unit2):\n from os import system\n \"\"\" Prints the result to the screen \"\"\"\n print(Fore.GREEN + '\\n ' + str(round(t, 2)) + '°' + unit + Fore.YELLOW +\n ' --> ' + Fore.GREEN + Style.BRIGHT + str(round(t2, 2)) + '°' +\n unit2 + '\\n' + Style.RESET_ALL)\n print(system('pause'))\n\n\ndef choice(x, y=0):\n \"\"\" Checks user input \"\"\"\n while True:\n try:\n choice = int(input())\n if choice <= x and choice > 0 and choice != y:\n return choice\n break\n elif choice == y:\n print(Fore.RED + \"\\n Can't convert to the same unit!\\n\" +\n Fore.RESET)\n else:\n print(Fore.RED + '\\n Invalid choice!\\n' + Fore.RESET)\n except ValueError:\n print(Fore.RED + '\\n Invalid input!\\n' + Fore.RESET)\n\n\ndef value_input(unit):\n \"\"\" Asks user for temp. value, then checks it. \"\"\"\n print(Fore.CYAN + '\\n Enter the temperature in °' + unit + ':\\n' + Fore\n .RESET)\n while True:\n try:\n value = float(input())\n return value\n break\n except ValueError:\n print(Fore.RED + '\\n Input must be an integer!\\n' + Fore.RESET)\n\n\n<mask token>\n\n\ndef main():\n \"\"\"\" This is the main function \"\"\"\n while True:\n primary_message()\n x = choice(9)\n z = tempConv\n if x == 1:\n t = value_check('C', tempConv.cel_ran)\n secondary_message(t, 'C')\n y = choice(9, 1)\n while True:\n if y == 2:\n t2 = z.cel_fah(t)\n result_message(t, t2, 'C', 'F')\n break\n elif y == 3:\n t2 = z.cel_kel(t)\n result_message(t, t2, 'C', 'K')\n break\n elif y == 4:\n t2 = z.cel_ran(t)\n result_message(t, t2, 'C', 'R')\n break\n elif y == 5:\n t2 = z.cel_del(t)\n result_message(t, t2, 'C', 'De')\n break\n elif y == 6:\n t2 = z.cel_new(t)\n result_message(t, t2, 'C', 'N')\n break\n elif y == 7:\n t2 = z.cel_rea(t)\n result_message(t, t2, 'C', 'Ré')\n break\n elif y == 8:\n t2 = z.cel_rom(t)\n result_message(t, t2, 'C', 'Rø')\n break\n elif y == 9:\n break\n elif x == 2:\n t = value_check('F', tempConv.fah_ran)\n secondary_message(t, 'F')\n y = choice(9, 2)\n while True:\n if y == 1:\n t2 = z.fah_cel(t)\n result_message(t, t2, 'F', 'C')\n break\n elif y == 3:\n t2 = z.fah_kel(t)\n result_message(t, t2, 'F', 'K')\n break\n elif y == 4:\n t2 = z.fah_ran(t)\n result_message(t, t2, 'F', 'R')\n break\n elif y == 5:\n t2 = z.fah_del(t)\n result_message(t, t2, 'F', 'De')\n break\n elif y == 6:\n t2 = z.fah_new(t)\n result_message(t, t2, 'F', 'N')\n break\n elif y == 7:\n t2 = z.fah_rea(t)\n result_message(t, t2, 'F', 'Ré')\n break\n elif y == 8:\n t2 = z.fah_rom(t)\n result_message(t, t2, 'F', 'Rø')\n break\n elif y == 9:\n break\n elif x == 3:\n t = value_check('K', tempConv.kel_ran)\n secondary_message(t, 'K')\n y = choice(9, 3)\n while True:\n if y == 1:\n t2 = z.kel_cel(t)\n result_message(t, t2, 'K', 'C')\n break\n elif y == 2:\n t2 = z.kel_fah(t)\n result_message(t, t2, 'K', 'F')\n break\n elif y == 4:\n t2 = z.kel_ran(t)\n result_message(t, t2, 'K', 'R')\n break\n elif y == 5:\n t2 = z.kel_del(t)\n result_message(t, t2, 'K', 'De')\n break\n elif y == 6:\n t2 = z.kel_new(t)\n result_message(t, t2, 'K', 'N')\n break\n elif y == 7:\n t2 = z.kel_rea(t)\n result_message(t, t2, 'K', 'Ré')\n break\n elif y == 8:\n t2 = z.kel_rom(t)\n result_message(t, t2, 'K', 'Rø')\n break\n elif y == 9:\n break\n elif x == 4:\n t = value_check('R', tempConv.ran_rea)\n secondary_message(t, 'R')\n y = choice(9, 4)\n while True:\n if y == 1:\n t2 = z.ran_cel(t)\n result_message(t, t2, 'R', 'C')\n break\n elif y == 2:\n t2 = z.ran_fah(t)\n result_message(t, t2, 'R', 'F')\n break\n elif y == 3:\n t2 = z.ran_kel(t)\n result_message(t, t2, 'R', 'K')\n break\n elif y == 5:\n t2 = z.ran_del(t)\n result_message(t, t2, 'R', 'De')\n break\n elif y == 6:\n t2 = z.ran_new(t)\n result_message(t, t2, 'R', 'N')\n break\n elif y == 7:\n t2 = z.ran_rea(t)\n result_message(t, t2, 'R', 'Ré')\n break\n elif y == 8:\n t2 = z.ran_rom(t)\n result_message(t, t2, 'R', 'Rø')\n break\n elif y == 9:\n break\n elif x == 5:\n t = value_check('De', tempConv.del_ran)\n secondary_message(t, 'De')\n y = choice(9, 5)\n while True:\n if y == 1:\n t2 = z.del_cel(t)\n result_message(t, t2, 'De', 'C')\n break\n elif y == 2:\n t2 = z.del_fah(t)\n result_message(t, t2, 'De', 'F')\n break\n elif y == 3:\n t2 = z.del_kel(t)\n result_message(t, t2, 'De', 'K')\n break\n elif y == 4:\n t2 = z.del_ran(t)\n result_message(t, t2, 'De', 'R')\n break\n elif y == 6:\n t2 = z.del_new(t)\n result_message(t, t2, 'De', 'N')\n break\n elif y == 7:\n t2 = z.del_rea(t)\n result_message(t, t2, 'De', 'Ré')\n break\n elif y == 8:\n t2 = z.del_rom(t)\n result_message(t, t2, 'De', 'Rø')\n break\n elif y == 9:\n break\n elif x == 6:\n t = value_check('N', tempConv.new_ran)\n secondary_message(t, 'N')\n y = choice(9, 6)\n while True:\n if y == 1:\n t2 = z.new_cel(t)\n result_message(t, t2, 'N', 'C')\n break\n elif y == 2:\n t2 = z.new_fah(t)\n result_message(t, t2, 'N', 'F')\n break\n elif y == 3:\n t2 = z.new_kel(t)\n result_message(t, t2, 'N', 'K')\n break\n elif y == 4:\n t2 = z.new_ran(t)\n result_message(t, t2, 'N', 'R')\n break\n elif y == 5:\n t2 = z.new_del(t)\n result_message(t, t2, 'N', 'N')\n break\n elif y == 7:\n t2 = z.new_rea(t)\n result_message(t, t2, 'N', 'Ré')\n break\n elif y == 8:\n t2 = z.new_rom(t)\n result_message(t, t2, 'N', 'Rø')\n break\n elif y == 9:\n break\n elif x == 7:\n t = value_check('Ré', tempConv.rea_ran)\n secondary_message(t, 'Ré')\n y = choice(9, 7)\n while True:\n if y == 1:\n t2 = z.rea_cel(t)\n result_message(t, t2, 'Ré', 'C')\n break\n elif y == 2:\n t2 = z.rea_fah(t)\n result_message(t, t2, 'Ré', 'F')\n break\n elif y == 3:\n t2 = z.rea_kel(t)\n result_message(t, t2, 'Ré', 'K')\n break\n elif y == 4:\n t2 = z.rea_ran(t)\n result_message(t, t2, 'Ré', 'R')\n break\n elif y == 5:\n t2 = z.rea_del(t)\n result_message(t, t2, 'Ré', 'De')\n break\n elif y == 6:\n t2 = z.rea_new(t)\n result_message(t, t2, 'Ré', 'N')\n break\n elif y == 8:\n t2 = z.rea_rom(t)\n result_message(t, t2, 'Ré', 'Rø')\n break\n elif y == 9:\n break\n elif x == 8:\n t = value_check('Rø', tempConv.rom_ran)\n secondary_message(t, 'Rø')\n y = choice(9, 8)\n while True:\n if y == 1:\n t2 = z.rom_cel(t)\n result_message(t, t2, 'Rø', 'C')\n break\n elif y == 2:\n t2 = z.rom_fah(t)\n result_message(t, t2, 'Rø', 'F')\n break\n elif y == 3:\n t2 = z.rom_kel(t)\n result_message(t, t2, 'Rø', 'K')\n break\n elif y == 4:\n t2 = z.rom_ran(t)\n result_message(t, t2, 'Rø', 'R')\n break\n elif y == 5:\n t2 = z.rom_del(t)\n result_message(t, t2, 'Rø', 'De')\n break\n elif y == 6:\n t2 = z.rom_new(t)\n result_message(t, t2, 'Rø', 'N')\n break\n elif y == 7:\n t2 = z.rom_rea(t)\n result_message(t, t2, 'Rø', 'Ré')\n break\n elif y == 9:\n break\n elif x == 9:\n print(Fore.CYAN + '\\n Goodbye!' + Fore.RESET)\n i = 0\n break\n\n\n<mask token>\n", "step-4": "from colorama import init, Fore, Style\nimport tempConv\n\n\ndef menu(x):\n \"\"\" Takes a list as argument and displays as a menu \"\"\"\n for i in range(len(x)):\n print('{0:>4s} {1:<3s}{2:^5s}{3:<15}'.format(str(i + 1) + ')', x[i]\n [1], '-->', x[i][0]))\n\n\ndef primary_message():\n \"\"\" A message for the main switch \"\"\"\n print(Fore.CYAN + '\\n Select the unit you want to convert from:\\n' +\n Fore.RESET)\n menu([['Celsius', '°C'], ['Fahrenheit', '°F'], ['Kelvin', '°K'], [\n 'Rankin', '°R'], ['Delisle', '°De'], ['Newton', '°N'], ['Réaumur',\n '°Ré'], ['Rømer', '°Rø'], [Fore.RED + 'Exit\\n' + Fore.RESET, '']])\n\n\ndef secondary_message(t, unit):\n \"\"\" A message for the secondary switch \"\"\"\n print(Fore.CYAN + '\\n Select the unit you would you like to convert ' +\n str(t) + '°' + unit + ' into:\\n' + Fore.RESET)\n menu([['Celsius', '°C'], ['Fahrenheit', '°F'], ['Kelvin', '°K'], [\n 'Rankin', '°R'], ['Delisle', '°De'], ['Newton', '°N'], ['Réaumur',\n '°Ré'], ['Rømer', '°Rø'], [Fore.RED + 'Back\\n' + Fore.RESET, '']])\n\n\ndef result_message(t, t2, unit, unit2):\n from os import system\n \"\"\" Prints the result to the screen \"\"\"\n print(Fore.GREEN + '\\n ' + str(round(t, 2)) + '°' + unit + Fore.YELLOW +\n ' --> ' + Fore.GREEN + Style.BRIGHT + str(round(t2, 2)) + '°' +\n unit2 + '\\n' + Style.RESET_ALL)\n print(system('pause'))\n\n\ndef choice(x, y=0):\n \"\"\" Checks user input \"\"\"\n while True:\n try:\n choice = int(input())\n if choice <= x and choice > 0 and choice != y:\n return choice\n break\n elif choice == y:\n print(Fore.RED + \"\\n Can't convert to the same unit!\\n\" +\n Fore.RESET)\n else:\n print(Fore.RED + '\\n Invalid choice!\\n' + Fore.RESET)\n except ValueError:\n print(Fore.RED + '\\n Invalid input!\\n' + Fore.RESET)\n\n\ndef value_input(unit):\n \"\"\" Asks user for temp. value, then checks it. \"\"\"\n print(Fore.CYAN + '\\n Enter the temperature in °' + unit + ':\\n' + Fore\n .RESET)\n while True:\n try:\n value = float(input())\n return value\n break\n except ValueError:\n print(Fore.RED + '\\n Input must be an integer!\\n' + Fore.RESET)\n\n\ndef value_check(unit, value):\n \"\"\" Check for value below absolute zero \"\"\"\n while True:\n try:\n t = value_input(unit)\n if value(t) != None:\n return t\n break\n except ValueError:\n tempConv(t)\n\n\ndef main():\n \"\"\"\" This is the main function \"\"\"\n while True:\n primary_message()\n x = choice(9)\n z = tempConv\n if x == 1:\n t = value_check('C', tempConv.cel_ran)\n secondary_message(t, 'C')\n y = choice(9, 1)\n while True:\n if y == 2:\n t2 = z.cel_fah(t)\n result_message(t, t2, 'C', 'F')\n break\n elif y == 3:\n t2 = z.cel_kel(t)\n result_message(t, t2, 'C', 'K')\n break\n elif y == 4:\n t2 = z.cel_ran(t)\n result_message(t, t2, 'C', 'R')\n break\n elif y == 5:\n t2 = z.cel_del(t)\n result_message(t, t2, 'C', 'De')\n break\n elif y == 6:\n t2 = z.cel_new(t)\n result_message(t, t2, 'C', 'N')\n break\n elif y == 7:\n t2 = z.cel_rea(t)\n result_message(t, t2, 'C', 'Ré')\n break\n elif y == 8:\n t2 = z.cel_rom(t)\n result_message(t, t2, 'C', 'Rø')\n break\n elif y == 9:\n break\n elif x == 2:\n t = value_check('F', tempConv.fah_ran)\n secondary_message(t, 'F')\n y = choice(9, 2)\n while True:\n if y == 1:\n t2 = z.fah_cel(t)\n result_message(t, t2, 'F', 'C')\n break\n elif y == 3:\n t2 = z.fah_kel(t)\n result_message(t, t2, 'F', 'K')\n break\n elif y == 4:\n t2 = z.fah_ran(t)\n result_message(t, t2, 'F', 'R')\n break\n elif y == 5:\n t2 = z.fah_del(t)\n result_message(t, t2, 'F', 'De')\n break\n elif y == 6:\n t2 = z.fah_new(t)\n result_message(t, t2, 'F', 'N')\n break\n elif y == 7:\n t2 = z.fah_rea(t)\n result_message(t, t2, 'F', 'Ré')\n break\n elif y == 8:\n t2 = z.fah_rom(t)\n result_message(t, t2, 'F', 'Rø')\n break\n elif y == 9:\n break\n elif x == 3:\n t = value_check('K', tempConv.kel_ran)\n secondary_message(t, 'K')\n y = choice(9, 3)\n while True:\n if y == 1:\n t2 = z.kel_cel(t)\n result_message(t, t2, 'K', 'C')\n break\n elif y == 2:\n t2 = z.kel_fah(t)\n result_message(t, t2, 'K', 'F')\n break\n elif y == 4:\n t2 = z.kel_ran(t)\n result_message(t, t2, 'K', 'R')\n break\n elif y == 5:\n t2 = z.kel_del(t)\n result_message(t, t2, 'K', 'De')\n break\n elif y == 6:\n t2 = z.kel_new(t)\n result_message(t, t2, 'K', 'N')\n break\n elif y == 7:\n t2 = z.kel_rea(t)\n result_message(t, t2, 'K', 'Ré')\n break\n elif y == 8:\n t2 = z.kel_rom(t)\n result_message(t, t2, 'K', 'Rø')\n break\n elif y == 9:\n break\n elif x == 4:\n t = value_check('R', tempConv.ran_rea)\n secondary_message(t, 'R')\n y = choice(9, 4)\n while True:\n if y == 1:\n t2 = z.ran_cel(t)\n result_message(t, t2, 'R', 'C')\n break\n elif y == 2:\n t2 = z.ran_fah(t)\n result_message(t, t2, 'R', 'F')\n break\n elif y == 3:\n t2 = z.ran_kel(t)\n result_message(t, t2, 'R', 'K')\n break\n elif y == 5:\n t2 = z.ran_del(t)\n result_message(t, t2, 'R', 'De')\n break\n elif y == 6:\n t2 = z.ran_new(t)\n result_message(t, t2, 'R', 'N')\n break\n elif y == 7:\n t2 = z.ran_rea(t)\n result_message(t, t2, 'R', 'Ré')\n break\n elif y == 8:\n t2 = z.ran_rom(t)\n result_message(t, t2, 'R', 'Rø')\n break\n elif y == 9:\n break\n elif x == 5:\n t = value_check('De', tempConv.del_ran)\n secondary_message(t, 'De')\n y = choice(9, 5)\n while True:\n if y == 1:\n t2 = z.del_cel(t)\n result_message(t, t2, 'De', 'C')\n break\n elif y == 2:\n t2 = z.del_fah(t)\n result_message(t, t2, 'De', 'F')\n break\n elif y == 3:\n t2 = z.del_kel(t)\n result_message(t, t2, 'De', 'K')\n break\n elif y == 4:\n t2 = z.del_ran(t)\n result_message(t, t2, 'De', 'R')\n break\n elif y == 6:\n t2 = z.del_new(t)\n result_message(t, t2, 'De', 'N')\n break\n elif y == 7:\n t2 = z.del_rea(t)\n result_message(t, t2, 'De', 'Ré')\n break\n elif y == 8:\n t2 = z.del_rom(t)\n result_message(t, t2, 'De', 'Rø')\n break\n elif y == 9:\n break\n elif x == 6:\n t = value_check('N', tempConv.new_ran)\n secondary_message(t, 'N')\n y = choice(9, 6)\n while True:\n if y == 1:\n t2 = z.new_cel(t)\n result_message(t, t2, 'N', 'C')\n break\n elif y == 2:\n t2 = z.new_fah(t)\n result_message(t, t2, 'N', 'F')\n break\n elif y == 3:\n t2 = z.new_kel(t)\n result_message(t, t2, 'N', 'K')\n break\n elif y == 4:\n t2 = z.new_ran(t)\n result_message(t, t2, 'N', 'R')\n break\n elif y == 5:\n t2 = z.new_del(t)\n result_message(t, t2, 'N', 'N')\n break\n elif y == 7:\n t2 = z.new_rea(t)\n result_message(t, t2, 'N', 'Ré')\n break\n elif y == 8:\n t2 = z.new_rom(t)\n result_message(t, t2, 'N', 'Rø')\n break\n elif y == 9:\n break\n elif x == 7:\n t = value_check('Ré', tempConv.rea_ran)\n secondary_message(t, 'Ré')\n y = choice(9, 7)\n while True:\n if y == 1:\n t2 = z.rea_cel(t)\n result_message(t, t2, 'Ré', 'C')\n break\n elif y == 2:\n t2 = z.rea_fah(t)\n result_message(t, t2, 'Ré', 'F')\n break\n elif y == 3:\n t2 = z.rea_kel(t)\n result_message(t, t2, 'Ré', 'K')\n break\n elif y == 4:\n t2 = z.rea_ran(t)\n result_message(t, t2, 'Ré', 'R')\n break\n elif y == 5:\n t2 = z.rea_del(t)\n result_message(t, t2, 'Ré', 'De')\n break\n elif y == 6:\n t2 = z.rea_new(t)\n result_message(t, t2, 'Ré', 'N')\n break\n elif y == 8:\n t2 = z.rea_rom(t)\n result_message(t, t2, 'Ré', 'Rø')\n break\n elif y == 9:\n break\n elif x == 8:\n t = value_check('Rø', tempConv.rom_ran)\n secondary_message(t, 'Rø')\n y = choice(9, 8)\n while True:\n if y == 1:\n t2 = z.rom_cel(t)\n result_message(t, t2, 'Rø', 'C')\n break\n elif y == 2:\n t2 = z.rom_fah(t)\n result_message(t, t2, 'Rø', 'F')\n break\n elif y == 3:\n t2 = z.rom_kel(t)\n result_message(t, t2, 'Rø', 'K')\n break\n elif y == 4:\n t2 = z.rom_ran(t)\n result_message(t, t2, 'Rø', 'R')\n break\n elif y == 5:\n t2 = z.rom_del(t)\n result_message(t, t2, 'Rø', 'De')\n break\n elif y == 6:\n t2 = z.rom_new(t)\n result_message(t, t2, 'Rø', 'N')\n break\n elif y == 7:\n t2 = z.rom_rea(t)\n result_message(t, t2, 'Rø', 'Ré')\n break\n elif y == 9:\n break\n elif x == 9:\n print(Fore.CYAN + '\\n Goodbye!' + Fore.RESET)\n i = 0\n break\n\n\nif __name__ == '__main__':\n init()\n main()\n", "step-5": "\nfrom colorama import init, Fore, Style\nimport tempConv\n\n#============================================================================#\n# TEMP CONVERSION PROGRAM: #\n#============================================================================#\n\n#-----------------------------------------------------------------------------\n\ndef menu(x):\n \"\"\" Takes a list as argument and displays as a menu \"\"\"\n for i in range(len(x)):\n print(\"{0:>4s} {1:<3s}{2:^5s}{3:<15}\"\n .format(str(i + 1) + \")\", x[i][1], \"-->\", x[i][0]))\n\n#-----------------------------------------------------------------------------\n\ndef primary_message():\n \"\"\" A message for the main switch \"\"\"\n print(Fore.CYAN + \"\\n Select the unit you want to convert from:\\n\" +\n Fore.RESET)\n menu([\n [\"Celsius\", \"\\u00b0C\"],\n [\"Fahrenheit\", \"\\u00b0F\"],\n [\"Kelvin\", \"\\u00b0K\"],\n [\"Rankin\", \"\\u00b0R\"],\n [\"Delisle\", \"\\u00b0De\"],\n [\"Newton\", \"\\u00b0N\"],\n [\"R\\u00e9aumur\", \"\\u00b0R\\u00e9\"],\n [\"R\\u00f8mer\", \"\\u00b0R\\u00f8\"],\n [Fore.RED + \"Exit\\n\" + Fore.RESET,\"\"]\n ])\n\n#-----------------------------------------------------------------------------\n\ndef secondary_message(t, unit):\n \"\"\" A message for the secondary switch \"\"\"\n print(Fore.CYAN + \"\\n Select the unit you would you like to convert \" +\n str(t) + \"\\u00b0\" + unit + \" into:\\n\" + Fore.RESET)\n menu([\n [\"Celsius\", \"\\u00b0C\"],\n [\"Fahrenheit\", \"\\u00b0F\"],\n [\"Kelvin\", \"\\u00b0K\"],\n [\"Rankin\", \"\\u00b0R\"],\n [\"Delisle\", \"\\u00b0De\"],\n [\"Newton\", \"\\u00b0N\"],\n [\"R\\u00e9aumur\", \"\\u00b0R\\u00e9\"],\n [\"R\\u00f8mer\", \"\\u00b0R\\u00f8\"],\n [Fore.RED + \"Back\\n\" + Fore.RESET,\"\"]\n ])\n\n#-----------------------------------------------------------------------------\n\ndef result_message(t, t2, unit, unit2):\n from os import system\n \"\"\" Prints the result to the screen \"\"\"\n print(Fore.GREEN + \"\\n \" + str(round(t, 2)) + \"\\u00b0\" + unit +\n Fore.YELLOW +\" --> \" + Fore.GREEN + Style.BRIGHT +\n str(round(t2, 2)) + \"\\u00b0\" + unit2 + \"\\n\" + Style.RESET_ALL)\n print(system('pause'))\n#-----------------------------------------------------------------------------\n\ndef choice(x, y = 0):\n \"\"\" Checks user input \"\"\"\n while True:\n try:\n choice = int(input()) # <=== Check if it's an int\n if choice <= x and choice > 0 and choice != y: # <=== If choice in\n return choice # range and not the same; return choice\n break\n elif choice == y:\n print(Fore.RED + \"\\n Can't convert to the same unit!\\n\" +\n Fore.RESET)\n else:\n print(Fore.RED + \"\\n Invalid choice!\\n\" + Fore.RESET)\n except ValueError: # <=== If choice is invalid prompt message\n print(Fore.RED + \"\\n Invalid input!\\n\" + Fore.RESET)\n\n#-----------------------------------------------------------------------------\n\ndef value_input(unit):\n \"\"\" Asks user for temp. value, then checks it. \"\"\"\n print(Fore.CYAN + \"\\n Enter the temperature in \\u00b0\" + unit + \":\\n\" +\n Fore.RESET)\n while True:\n try:\n value = float(input()) # <=== Make sure input is a float\n return value\n break\n except ValueError:\n print(Fore.RED + \"\\n Input must be an integer!\\n\" + Fore.RESET)\n\n#-----------------------------------------------------------------------------\n\ndef value_check(unit, value):\n \"\"\" Check for value below absolute zero \"\"\"\n while True:\n try: # <=== Checks that value isn't below abs 0\n t = value_input(unit) # Returns value if okay\n if value(t) != None:\n return t\n break\n except ValueError:\n tempConv(t)\n\n#-----------------------------------------------------------------------------\n\ndef main():\n \"\"\"\" This is the main function \"\"\"\n while True:\n primary_message() # <=== Display menu and take input\n x = choice(9)\n z = tempConv\n\n if x == 1:\n # This is the From Celsius options\n t = value_check(\"C\", tempConv.cel_ran)\n secondary_message(t, \"C\")\n y = choice(9, 1)\n\n while True:\n if y == 2:\n t2 = z.cel_fah(t) # <=== Fahrenheit\n result_message(t, t2, \"C\", \"F\")\n break\n elif y == 3:\n t2 = z.cel_kel(t) # <=== Kelvin\n result_message(t, t2, \"C\", \"K\")\n break\n elif y == 4:\n t2 = z.cel_ran(t) # <=== Rankin\n result_message(t, t2, \"C\", \"R\")\n break\n elif y == 5:\n t2 = z.cel_del(t) # <=== Delisle\n result_message(t, t2, \"C\", \"De\")\n break\n elif y == 6:\n t2 = z.cel_new(t) # <=== Newton\n result_message(t, t2, \"C\", \"N\")\n break\n elif y == 7:\n t2 = z.cel_rea(t) # <=== Reaumur\n result_message(t, t2, \"C\", \"R\\u00e9\")\n break\n elif y == 8:\n t2 = z.cel_rom(t) # <=== Romer\n result_message(t, t2, \"C\", \"R\\u00f8\")\n break\n elif y == 9:\n break\n\n elif x == 2:\n t = value_check(\"F\", tempConv.fah_ran)\n secondary_message(t, \"F\")\n y = choice(9, 2)\n\n while True:\n if y == 1:\n t2 = z.fah_cel(t)\n result_message(t, t2, \"F\", \"C\")\n break\n elif y == 3:\n t2 = z.fah_kel(t)\n result_message(t, t2, \"F\", \"K\")\n break\n elif y == 4:\n t2 = z.fah_ran(t)\n result_message(t, t2, \"F\", \"R\")\n break\n elif y == 5:\n t2 = z.fah_del(t)\n result_message(t, t2, \"F\", \"De\")\n break\n elif y == 6:\n t2 = z.fah_new(t)\n result_message(t, t2, \"F\", \"N\")\n break\n elif y == 7:\n t2 = z.fah_rea(t)\n result_message(t, t2, \"F\", \"R\\u00e9\")\n break\n elif y == 8:\n t2 = z.fah_rom(t)\n result_message(t, t2, \"F\", \"R\\u00f8\")\n break\n elif y == 9:\n break\n\n elif x == 3:\n t = value_check(\"K\", tempConv.kel_ran)\n secondary_message(t, \"K\")\n y = choice(9, 3)\n\n while True:\n if y == 1:\n t2 = z.kel_cel(t)\n result_message(t, t2, \"K\", \"C\")\n break\n elif y == 2:\n t2 = z.kel_fah(t)\n result_message(t, t2, \"K\", \"F\")\n break\n elif y == 4:\n t2 = z.kel_ran(t)\n result_message(t, t2, \"K\", \"R\")\n break\n elif y == 5:\n t2 = z.kel_del(t)\n result_message(t, t2, \"K\", \"De\")\n break\n elif y == 6:\n t2 = z.kel_new(t)\n result_message(t, t2, \"K\", \"N\")\n break\n elif y == 7:\n t2 = z.kel_rea(t)\n result_message(t, t2, \"K\", \"R\\u00e9\")\n break\n elif y == 8:\n t2 = z.kel_rom(t)\n result_message(t, t2, \"K\", \"R\\u00f8\")\n break\n elif y == 9:\n break\n\n elif x == 4:\n t = value_check(\"R\", tempConv.ran_rea)\n secondary_message(t, \"R\")\n y = choice(9, 4)\n\n while True:\n if y == 1:\n t2 = z.ran_cel(t)\n result_message(t, t2, \"R\", \"C\")\n break\n elif y == 2:\n t2 = z.ran_fah(t)\n result_message(t, t2, \"R\", \"F\")\n break\n elif y == 3:\n t2 = z.ran_kel(t)\n result_message(t, t2, \"R\", \"K\")\n break\n elif y == 5:\n t2 = z.ran_del(t)\n result_message(t, t2, \"R\", \"De\")\n break\n elif y == 6:\n t2 = z.ran_new(t)\n result_message(t, t2, \"R\", \"N\")\n break\n elif y == 7:\n t2 = z.ran_rea(t)\n result_message(t, t2, \"R\", \"R\\u00e9\")\n break\n elif y == 8:\n t2 = z.ran_rom(t)\n result_message(t, t2, \"R\", \"R\\u00f8\")\n break\n elif y == 9:\n break\n\n elif x == 5:\n t = value_check(\"De\", tempConv.del_ran)\n secondary_message(t, \"De\")\n y = choice(9, 5)\n\n while True:\n\n if y == 1:\n t2 = z.del_cel(t)\n result_message(t, t2, \"De\", \"C\")\n break\n elif y == 2:\n t2 = z.del_fah(t)\n result_message(t, t2, \"De\", \"F\")\n break\n elif y == 3:\n t2 = z.del_kel(t)\n result_message(t, t2, \"De\", \"K\")\n break\n elif y == 4:\n t2 = z.del_ran(t)\n result_message(t, t2, \"De\", \"R\")\n break\n elif y == 6:\n t2 = z.del_new(t)\n result_message(t, t2, \"De\", \"N\")\n break\n elif y == 7:\n t2 = z.del_rea(t)\n result_message(t, t2, \"De\", \"R\\u00e9\")\n break\n elif y == 8:\n t2 = z.del_rom(t)\n result_message(t, t2, \"De\", \"R\\u00f8\")\n break\n elif y == 9:\n break\n\n elif x == 6:\n t = value_check(\"N\", tempConv.new_ran)\n secondary_message(t, \"N\")\n y = choice(9, 6)\n\n while True:\n\n if y == 1:\n t2 = z.new_cel(t)\n result_message(t, t2, \"N\", \"C\")\n break\n elif y == 2:\n t2 = z.new_fah(t)\n result_message(t, t2, \"N\", \"F\")\n break\n elif y == 3:\n t2 = z.new_kel(t)\n result_message(t, t2, \"N\", \"K\")\n break\n elif y == 4:\n t2 = z.new_ran(t)\n result_message(t, t2, \"N\", \"R\")\n break\n elif y == 5:\n t2 = z.new_del(t)\n result_message(t, t2, \"N\", \"N\")\n break\n elif y == 7:\n t2 = z.new_rea(t)\n result_message(t, t2, \"N\", \"R\\u00e9\")\n break\n elif y == 8:\n t2 = z.new_rom(t)\n result_message(t, t2, \"N\", \"R\\u00f8\")\n break\n elif y == 9:\n break\n\n elif x == 7:\n t = value_check(\"R\\u00e9\", tempConv.rea_ran)\n secondary_message(t, \"R\\u00e9\")\n y = choice(9, 7)\n\n while True:\n\n if y == 1:\n t2 = z.rea_cel(t)\n result_message(t, t2, \"R\\u00e9\", \"C\")\n break\n elif y == 2:\n t2 = z.rea_fah(t)\n result_message(t, t2, \"R\\u00e9\", \"F\")\n break\n elif y == 3:\n t2 = z.rea_kel(t)\n result_message(t, t2, \"R\\u00e9\", \"K\")\n break\n elif y == 4:\n t2 = z.rea_ran(t)\n result_message(t, t2, \"R\\u00e9\", \"R\")\n break\n elif y == 5:\n t2 = z.rea_del(t)\n result_message(t, t2, \"R\\u00e9\", \"De\")\n break\n elif y == 6:\n t2 = z.rea_new(t)\n result_message(t, t2, \"R\\u00e9\", \"N\")\n break\n elif y == 8:\n t2 = z.rea_rom(t)\n result_message(t, t2, \"R\\u00e9\", \"R\\u00f8\")\n break\n elif y == 9:\n break\n\n elif x == 8:\n t = value_check(\"R\\u00f8\", tempConv.rom_ran)\n secondary_message(t, \"R\\u00f8\")\n y = choice(9, 8)\n\n while True:\n\n if y == 1:\n t2 = z.rom_cel(t)\n result_message(t, t2, \"R\\u00f8\", \"C\")\n break\n elif y == 2:\n t2 = z.rom_fah(t)\n result_message(t, t2, \"R\\u00f8\", \"F\")\n break\n elif y == 3:\n t2 = z.rom_kel(t)\n result_message(t, t2, \"R\\u00f8\", \"K\")\n break\n elif y == 4:\n t2 = z.rom_ran(t)\n result_message(t, t2, \"R\\u00f8\", \"R\")\n break\n elif y == 5:\n t2 = z.rom_del(t)\n result_message(t, t2, \"R\\u00f8\", \"De\")\n break\n elif y == 6:\n t2 = z.rom_new(t)\n result_message(t, t2, \"R\\u00f8\", \"N\")\n break\n elif y == 7:\n t2 = z.rom_rea(t)\n result_message(t, t2, \"R\\u00f8\", \"R\\u00e9\")\n break\n elif y == 9:\n break\n\n elif x == 9:\n print(Fore.CYAN + \"\\n Goodbye!\" + Fore.RESET)\n i = 0\n break\n\n#-----------------------------------------------------------------------------\n\nif __name__ == \"__main__\":\n init()\n main()\n", "step-ids": [ 3, 5, 7, 10, 11 ] }
[ 3, 5, 7, 10, 11 ]
import numpy as np from collections import Counter import matplotlib.pyplot as plt # 1. sepal length in cm # 2. sepal width in cm # 3. petal length in cm # 4. petal width in cm TrainingData = np.loadtxt("Data2",delimiter = ',',skiprows = 1,dtype = str) class Knn(object): """docstring for data""" def __init__(self, TrainingData): self.TrainingData = TrainingData self.nFeatures = self.TrainingData.shape[1]-1 self.data = TrainingData[:,0:self.nFeatures].astype(float) self.FeatureRange = [] self.normalize() def normalize(self,weights = None): if weights == None: weights = np.ones(self.nFeatures) for i in range(self.nFeatures): mn = np.min(self.data[:,i]) self.data[:,i] -= mn mx = np.max(self.data[:,i]) self.data[:,i] /= mx self.FeatureRange.append([mn,mx]) def Check(self,pnt): for i in range(self.nFeatures): pnt[i] -= self.FeatureRange[i][0] pnt[i] /= self.FeatureRange[i][1] distances = [] for i in range(len(self.data)): dist = np.linalg.norm(pnt-self.data[i]) distances.append(dist) order = np.argsort(distances) c = Counter(self.TrainingData[:,self.nFeatures][order][0:7]) ans = c.most_common(3) print(ans[0][0]) boop = Knn(TrainingData) pnt = np.array([7.0,3.2,4.7,1.85]) boop.Check(pnt)
normal
{ "blob_id": "5e0affbd295d7237784cd8e72926afeda6456500", "index": 7080, "step-1": "<mask token>\n\n\nclass Knn(object):\n <mask token>\n\n def __init__(self, TrainingData):\n self.TrainingData = TrainingData\n self.nFeatures = self.TrainingData.shape[1] - 1\n self.data = TrainingData[:, 0:self.nFeatures].astype(float)\n self.FeatureRange = []\n self.normalize()\n\n def normalize(self, weights=None):\n if weights == None:\n weights = np.ones(self.nFeatures)\n for i in range(self.nFeatures):\n mn = np.min(self.data[:, i])\n self.data[:, i] -= mn\n mx = np.max(self.data[:, i])\n self.data[:, i] /= mx\n self.FeatureRange.append([mn, mx])\n\n def Check(self, pnt):\n for i in range(self.nFeatures):\n pnt[i] -= self.FeatureRange[i][0]\n pnt[i] /= self.FeatureRange[i][1]\n distances = []\n for i in range(len(self.data)):\n dist = np.linalg.norm(pnt - self.data[i])\n distances.append(dist)\n order = np.argsort(distances)\n c = Counter(self.TrainingData[:, self.nFeatures][order][0:7])\n ans = c.most_common(3)\n print(ans[0][0])\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass Knn(object):\n \"\"\"docstring for data\"\"\"\n\n def __init__(self, TrainingData):\n self.TrainingData = TrainingData\n self.nFeatures = self.TrainingData.shape[1] - 1\n self.data = TrainingData[:, 0:self.nFeatures].astype(float)\n self.FeatureRange = []\n self.normalize()\n\n def normalize(self, weights=None):\n if weights == None:\n weights = np.ones(self.nFeatures)\n for i in range(self.nFeatures):\n mn = np.min(self.data[:, i])\n self.data[:, i] -= mn\n mx = np.max(self.data[:, i])\n self.data[:, i] /= mx\n self.FeatureRange.append([mn, mx])\n\n def Check(self, pnt):\n for i in range(self.nFeatures):\n pnt[i] -= self.FeatureRange[i][0]\n pnt[i] /= self.FeatureRange[i][1]\n distances = []\n for i in range(len(self.data)):\n dist = np.linalg.norm(pnt - self.data[i])\n distances.append(dist)\n order = np.argsort(distances)\n c = Counter(self.TrainingData[:, self.nFeatures][order][0:7])\n ans = c.most_common(3)\n print(ans[0][0])\n\n\n<mask token>\n", "step-3": "<mask token>\nTrainingData = np.loadtxt('Data2', delimiter=',', skiprows=1, dtype=str)\n\n\nclass Knn(object):\n \"\"\"docstring for data\"\"\"\n\n def __init__(self, TrainingData):\n self.TrainingData = TrainingData\n self.nFeatures = self.TrainingData.shape[1] - 1\n self.data = TrainingData[:, 0:self.nFeatures].astype(float)\n self.FeatureRange = []\n self.normalize()\n\n def normalize(self, weights=None):\n if weights == None:\n weights = np.ones(self.nFeatures)\n for i in range(self.nFeatures):\n mn = np.min(self.data[:, i])\n self.data[:, i] -= mn\n mx = np.max(self.data[:, i])\n self.data[:, i] /= mx\n self.FeatureRange.append([mn, mx])\n\n def Check(self, pnt):\n for i in range(self.nFeatures):\n pnt[i] -= self.FeatureRange[i][0]\n pnt[i] /= self.FeatureRange[i][1]\n distances = []\n for i in range(len(self.data)):\n dist = np.linalg.norm(pnt - self.data[i])\n distances.append(dist)\n order = np.argsort(distances)\n c = Counter(self.TrainingData[:, self.nFeatures][order][0:7])\n ans = c.most_common(3)\n print(ans[0][0])\n\n\nboop = Knn(TrainingData)\npnt = np.array([7.0, 3.2, 4.7, 1.85])\nboop.Check(pnt)\n", "step-4": "import numpy as np\nfrom collections import Counter\nimport matplotlib.pyplot as plt\nTrainingData = np.loadtxt('Data2', delimiter=',', skiprows=1, dtype=str)\n\n\nclass Knn(object):\n \"\"\"docstring for data\"\"\"\n\n def __init__(self, TrainingData):\n self.TrainingData = TrainingData\n self.nFeatures = self.TrainingData.shape[1] - 1\n self.data = TrainingData[:, 0:self.nFeatures].astype(float)\n self.FeatureRange = []\n self.normalize()\n\n def normalize(self, weights=None):\n if weights == None:\n weights = np.ones(self.nFeatures)\n for i in range(self.nFeatures):\n mn = np.min(self.data[:, i])\n self.data[:, i] -= mn\n mx = np.max(self.data[:, i])\n self.data[:, i] /= mx\n self.FeatureRange.append([mn, mx])\n\n def Check(self, pnt):\n for i in range(self.nFeatures):\n pnt[i] -= self.FeatureRange[i][0]\n pnt[i] /= self.FeatureRange[i][1]\n distances = []\n for i in range(len(self.data)):\n dist = np.linalg.norm(pnt - self.data[i])\n distances.append(dist)\n order = np.argsort(distances)\n c = Counter(self.TrainingData[:, self.nFeatures][order][0:7])\n ans = c.most_common(3)\n print(ans[0][0])\n\n\nboop = Knn(TrainingData)\npnt = np.array([7.0, 3.2, 4.7, 1.85])\nboop.Check(pnt)\n", "step-5": "import numpy as np\nfrom collections import Counter\nimport matplotlib.pyplot as plt\n\n\n # 1. sepal length in cm\n # 2. sepal width in cm\n # 3. petal length in cm\n # 4. petal width in cm\nTrainingData = np.loadtxt(\"Data2\",delimiter = ',',skiprows = 1,dtype = str)\n\n\nclass Knn(object):\n\t\"\"\"docstring for data\"\"\"\n\tdef __init__(self, TrainingData):\n\t\tself.TrainingData = TrainingData\n\n\n\t\tself.nFeatures = self.TrainingData.shape[1]-1\n\t\tself.data = TrainingData[:,0:self.nFeatures].astype(float)\n\t\tself.FeatureRange = []\n\n\t\tself.normalize()\n\tdef normalize(self,weights = None):\n\t\tif weights == None:\n\t\t\tweights = np.ones(self.nFeatures)\n\t\tfor i in range(self.nFeatures):\n\n\t\t\tmn = np.min(self.data[:,i])\n\t\t\tself.data[:,i] -= mn\n\t\t\tmx = np.max(self.data[:,i])\n\t\t\tself.data[:,i] /= mx\n\n\t\t\tself.FeatureRange.append([mn,mx])\n\tdef Check(self,pnt):\n\t\tfor i in range(self.nFeatures):\n\t\t\tpnt[i] -= self.FeatureRange[i][0]\n\t\t\tpnt[i] /= self.FeatureRange[i][1]\n\n\t\tdistances = []\n\t\tfor i in range(len(self.data)):\n\t\t\tdist = np.linalg.norm(pnt-self.data[i])\n\t\t\tdistances.append(dist)\n\t\torder = np.argsort(distances)\n\t\tc = Counter(self.TrainingData[:,self.nFeatures][order][0:7])\n\t\tans = c.most_common(3)\n\t\tprint(ans[0][0])\n\n\n\n\n\nboop = Knn(TrainingData)\n\npnt = np.array([7.0,3.2,4.7,1.85])\nboop.Check(pnt)\n", "step-ids": [ 4, 5, 7, 8, 9 ] }
[ 4, 5, 7, 8, 9 ]
import math import numpy as np import torch import torch.nn as nn from torch.utils.data import DataLoader import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt def value(energy, noise, x, gen): logp_x = energy(x) logq_x = noise.log_prob(x).unsqueeze(1) logp_gen = energy(gen) logq_gen = noise.log_prob(gen).unsqueeze(1) ll_data = logp_x - torch.logsumexp(torch.cat([logp_x, logq_x], dim=1), dim=1, keepdim=True) ll_gen = logq_gen - torch.logsumexp(torch.cat([logp_gen, logq_gen], dim=1), dim=1, keepdim=True) v = ll_data.mean() + ll_gen.mean() r_x = torch.sigmoid(logp_x - logq_x) r_gen = torch.sigmoid(logq_gen - logp_gen) acc = ((r_x > 1/2).sum() + (r_gen > 1/2).sum()).cpu().numpy() / (len(x) + len(gen)) return -v, acc #------------------------------------------- # DATA #------------------------------------------- def get_data(args): dataset = sample_2d_data(dataset=args.dataset, n_samples=args.samples) dataloader = DataLoader(dataset, batch_size=args.batch, shuffle=True) return dataset, dataloader def sample_2d_data(dataset='8gaussians', n_samples=50000): z = torch.randn(n_samples, 2) if dataset == '8gaussians': scale = 4 sq2 = 1/math.sqrt(2) centers = [(1,0), (-1,0), (0,1), (0,-1), (sq2,sq2), (-sq2,sq2), (sq2,-sq2), (-sq2,-sq2)] centers = torch.tensor([(scale * x, scale * y) for x,y in centers]) return sq2 * (0.5 * z + centers[torch.randint(len(centers), size=(n_samples,))]) elif dataset == '2spirals': n = torch.sqrt(torch.rand(n_samples // 2)) * 540 * (2 * math.pi) / 360 d1x = - torch.cos(n) * n + torch.rand(n_samples // 2) * 0.5 d1y = torch.sin(n) * n + torch.rand(n_samples // 2) * 0.5 x = torch.cat([torch.stack([ d1x, d1y], dim=1), torch.stack([-d1x, -d1y], dim=1)], dim=0) / 3 return x + 0.1*z elif dataset == 'checkerboard': x1 = torch.rand(n_samples) * 4 - 2 x2_ = torch.rand(n_samples) - torch.randint(0, 2, (n_samples,), dtype=torch.float) * 2 x2 = x2_ + x1.floor() % 2 return torch.stack([x1, x2], dim=1) * 2 elif dataset == 'rings': n_samples4 = n_samples3 = n_samples2 = n_samples // 4 n_samples1 = n_samples - n_samples4 - n_samples3 - n_samples2 # so as not to have the first point = last point, set endpoint=False in np; here shifted by one linspace4 = torch.linspace(0, 2 * math.pi, n_samples4 + 1)[:-1] linspace3 = torch.linspace(0, 2 * math.pi, n_samples3 + 1)[:-1] linspace2 = torch.linspace(0, 2 * math.pi, n_samples2 + 1)[:-1] linspace1 = torch.linspace(0, 2 * math.pi, n_samples1 + 1)[:-1] circ4_x = torch.cos(linspace4) circ4_y = torch.sin(linspace4) circ3_x = torch.cos(linspace4) * 0.75 circ3_y = torch.sin(linspace3) * 0.75 circ2_x = torch.cos(linspace2) * 0.5 circ2_y = torch.sin(linspace2) * 0.5 circ1_x = torch.cos(linspace1) * 0.25 circ1_y = torch.sin(linspace1) * 0.25 x = torch.stack([torch.cat([circ4_x, circ3_x, circ2_x, circ1_x]), torch.cat([circ4_y, circ3_y, circ2_y, circ1_y])], dim=1) * 3.0 # random sample x = x[torch.randint(0, n_samples, size=(n_samples,))] # Add noise return x + torch.normal(mean=torch.zeros_like(x), std=0.08*torch.ones_like(x)) elif dataset == "pinwheel": rng = np.random.RandomState() radial_std = 0.3 tangential_std = 0.1 num_classes = 5 num_per_class = n_samples // 5 rate = 0.25 rads = np.linspace(0, 2 * np.pi, num_classes, endpoint=False) features = rng.randn(num_classes*num_per_class, 2) \ * np.array([radial_std, tangential_std]) features[:, 0] += 1. labels = np.repeat(np.arange(num_classes), num_per_class) angles = rads[labels] + rate * np.exp(features[:, 0]) rotations = np.stack([np.cos(angles), -np.sin(angles), np.sin(angles), np.cos(angles)]) rotations = np.reshape(rotations.T, (-1, 2, 2)) data = 2 * rng.permutation(np.einsum("ti,tij->tj", features, rotations)) return torch.as_tensor(data, dtype=torch.float32) else: raise RuntimeError('Invalid `dataset` to sample from.') # -------------------- # Plotting # -------------------- @torch.no_grad() def plot(dataset, energy, noise, epoch, device): n_pts = 1000 range_lim = 4 # construct test points test_grid = setup_grid(range_lim, n_pts, device) # plot fig, axs = plt.subplots(1, 3, figsize=(12,4.3), subplot_kw={'aspect': 'equal'}) plot_samples(dataset, axs[0], range_lim, n_pts) plot_noise(noise, axs[1], test_grid, n_pts) plot_energy(energy, axs[2], test_grid, n_pts) # format for ax in plt.gcf().axes: format_ax(ax, range_lim) plt.tight_layout() # save print('Saving image to images/....') plt.savefig('images/epoch_{}.png'.format(epoch)) plt.close() def setup_grid(range_lim, n_pts, device): x = torch.linspace(-range_lim, range_lim, n_pts) xx, yy = torch.meshgrid((x, x)) zz = torch.stack((xx.flatten(), yy.flatten()), dim=1) return xx, yy, zz.to(device) def plot_samples(dataset, ax, range_lim, n_pts): samples = dataset.numpy() ax.hist2d(samples[:,0], samples[:,1], range=[[-range_lim, range_lim], [-range_lim, range_lim]], bins=n_pts, cmap=plt.cm.jet) ax.set_title('Target samples') def plot_energy(energy, ax, test_grid, n_pts): xx, yy, zz = test_grid log_prob = energy(zz) prob = log_prob.exp().cpu() # plot ax.pcolormesh(xx, yy, prob.view(n_pts,n_pts), cmap=plt.cm.jet) ax.set_facecolor(plt.cm.jet(0.)) ax.set_title('Energy density') def plot_noise(noise, ax, test_grid, n_pts): xx, yy, zz = test_grid log_prob = noise.log_prob(zz) prob = log_prob.exp().cpu() # plot ax.pcolormesh(xx, yy, prob.view(n_pts,n_pts), cmap=plt.cm.jet) ax.set_facecolor(plt.cm.jet(0.)) ax.set_title('Noise density') def format_ax(ax, range_lim): ax.set_xlim(-range_lim, range_lim) ax.set_ylim(-range_lim, range_lim) ax.get_xaxis().set_visible(False) ax.get_yaxis().set_visible(False) ax.invert_yaxis()
normal
{ "blob_id": "010a132645883915eff605ae15696a1fac42d570", "index": 8276, "step-1": "<mask token>\n\n\ndef value(energy, noise, x, gen):\n logp_x = energy(x)\n logq_x = noise.log_prob(x).unsqueeze(1)\n logp_gen = energy(gen)\n logq_gen = noise.log_prob(gen).unsqueeze(1)\n ll_data = logp_x - torch.logsumexp(torch.cat([logp_x, logq_x], dim=1),\n dim=1, keepdim=True)\n ll_gen = logq_gen - torch.logsumexp(torch.cat([logp_gen, logq_gen], dim\n =1), dim=1, keepdim=True)\n v = ll_data.mean() + ll_gen.mean()\n r_x = torch.sigmoid(logp_x - logq_x)\n r_gen = torch.sigmoid(logq_gen - logp_gen)\n acc = ((r_x > 1 / 2).sum() + (r_gen > 1 / 2).sum()).cpu().numpy() / (len\n (x) + len(gen))\n return -v, acc\n\n\ndef get_data(args):\n dataset = sample_2d_data(dataset=args.dataset, n_samples=args.samples)\n dataloader = DataLoader(dataset, batch_size=args.batch, shuffle=True)\n return dataset, dataloader\n\n\ndef sample_2d_data(dataset='8gaussians', n_samples=50000):\n z = torch.randn(n_samples, 2)\n if dataset == '8gaussians':\n scale = 4\n sq2 = 1 / math.sqrt(2)\n centers = [(1, 0), (-1, 0), (0, 1), (0, -1), (sq2, sq2), (-sq2, sq2\n ), (sq2, -sq2), (-sq2, -sq2)]\n centers = torch.tensor([(scale * x, scale * y) for x, y in centers])\n return sq2 * (0.5 * z + centers[torch.randint(len(centers), size=(\n n_samples,))])\n elif dataset == '2spirals':\n n = torch.sqrt(torch.rand(n_samples // 2)) * 540 * (2 * math.pi) / 360\n d1x = -torch.cos(n) * n + torch.rand(n_samples // 2) * 0.5\n d1y = torch.sin(n) * n + torch.rand(n_samples // 2) * 0.5\n x = torch.cat([torch.stack([d1x, d1y], dim=1), torch.stack([-d1x, -\n d1y], dim=1)], dim=0) / 3\n return x + 0.1 * z\n elif dataset == 'checkerboard':\n x1 = torch.rand(n_samples) * 4 - 2\n x2_ = torch.rand(n_samples) - torch.randint(0, 2, (n_samples,),\n dtype=torch.float) * 2\n x2 = x2_ + x1.floor() % 2\n return torch.stack([x1, x2], dim=1) * 2\n elif dataset == 'rings':\n n_samples4 = n_samples3 = n_samples2 = n_samples // 4\n n_samples1 = n_samples - n_samples4 - n_samples3 - n_samples2\n linspace4 = torch.linspace(0, 2 * math.pi, n_samples4 + 1)[:-1]\n linspace3 = torch.linspace(0, 2 * math.pi, n_samples3 + 1)[:-1]\n linspace2 = torch.linspace(0, 2 * math.pi, n_samples2 + 1)[:-1]\n linspace1 = torch.linspace(0, 2 * math.pi, n_samples1 + 1)[:-1]\n circ4_x = torch.cos(linspace4)\n circ4_y = torch.sin(linspace4)\n circ3_x = torch.cos(linspace4) * 0.75\n circ3_y = torch.sin(linspace3) * 0.75\n circ2_x = torch.cos(linspace2) * 0.5\n circ2_y = torch.sin(linspace2) * 0.5\n circ1_x = torch.cos(linspace1) * 0.25\n circ1_y = torch.sin(linspace1) * 0.25\n x = torch.stack([torch.cat([circ4_x, circ3_x, circ2_x, circ1_x]),\n torch.cat([circ4_y, circ3_y, circ2_y, circ1_y])], dim=1) * 3.0\n x = x[torch.randint(0, n_samples, size=(n_samples,))]\n return x + torch.normal(mean=torch.zeros_like(x), std=0.08 * torch.\n ones_like(x))\n elif dataset == 'pinwheel':\n rng = np.random.RandomState()\n radial_std = 0.3\n tangential_std = 0.1\n num_classes = 5\n num_per_class = n_samples // 5\n rate = 0.25\n rads = np.linspace(0, 2 * np.pi, num_classes, endpoint=False)\n features = rng.randn(num_classes * num_per_class, 2) * np.array([\n radial_std, tangential_std])\n features[:, 0] += 1.0\n labels = np.repeat(np.arange(num_classes), num_per_class)\n angles = rads[labels] + rate * np.exp(features[:, 0])\n rotations = np.stack([np.cos(angles), -np.sin(angles), np.sin(\n angles), np.cos(angles)])\n rotations = np.reshape(rotations.T, (-1, 2, 2))\n data = 2 * rng.permutation(np.einsum('ti,tij->tj', features, rotations)\n )\n return torch.as_tensor(data, dtype=torch.float32)\n else:\n raise RuntimeError('Invalid `dataset` to sample from.')\n\n\n<mask token>\n\n\ndef setup_grid(range_lim, n_pts, device):\n x = torch.linspace(-range_lim, range_lim, n_pts)\n xx, yy = torch.meshgrid((x, x))\n zz = torch.stack((xx.flatten(), yy.flatten()), dim=1)\n return xx, yy, zz.to(device)\n\n\ndef plot_samples(dataset, ax, range_lim, n_pts):\n samples = dataset.numpy()\n ax.hist2d(samples[:, 0], samples[:, 1], range=[[-range_lim, range_lim],\n [-range_lim, range_lim]], bins=n_pts, cmap=plt.cm.jet)\n ax.set_title('Target samples')\n\n\ndef plot_energy(energy, ax, test_grid, n_pts):\n xx, yy, zz = test_grid\n log_prob = energy(zz)\n prob = log_prob.exp().cpu()\n ax.pcolormesh(xx, yy, prob.view(n_pts, n_pts), cmap=plt.cm.jet)\n ax.set_facecolor(plt.cm.jet(0.0))\n ax.set_title('Energy density')\n\n\n<mask token>\n\n\ndef format_ax(ax, range_lim):\n ax.set_xlim(-range_lim, range_lim)\n ax.set_ylim(-range_lim, range_lim)\n ax.get_xaxis().set_visible(False)\n ax.get_yaxis().set_visible(False)\n ax.invert_yaxis()\n", "step-2": "<mask token>\n\n\ndef value(energy, noise, x, gen):\n logp_x = energy(x)\n logq_x = noise.log_prob(x).unsqueeze(1)\n logp_gen = energy(gen)\n logq_gen = noise.log_prob(gen).unsqueeze(1)\n ll_data = logp_x - torch.logsumexp(torch.cat([logp_x, logq_x], dim=1),\n dim=1, keepdim=True)\n ll_gen = logq_gen - torch.logsumexp(torch.cat([logp_gen, logq_gen], dim\n =1), dim=1, keepdim=True)\n v = ll_data.mean() + ll_gen.mean()\n r_x = torch.sigmoid(logp_x - logq_x)\n r_gen = torch.sigmoid(logq_gen - logp_gen)\n acc = ((r_x > 1 / 2).sum() + (r_gen > 1 / 2).sum()).cpu().numpy() / (len\n (x) + len(gen))\n return -v, acc\n\n\ndef get_data(args):\n dataset = sample_2d_data(dataset=args.dataset, n_samples=args.samples)\n dataloader = DataLoader(dataset, batch_size=args.batch, shuffle=True)\n return dataset, dataloader\n\n\ndef sample_2d_data(dataset='8gaussians', n_samples=50000):\n z = torch.randn(n_samples, 2)\n if dataset == '8gaussians':\n scale = 4\n sq2 = 1 / math.sqrt(2)\n centers = [(1, 0), (-1, 0), (0, 1), (0, -1), (sq2, sq2), (-sq2, sq2\n ), (sq2, -sq2), (-sq2, -sq2)]\n centers = torch.tensor([(scale * x, scale * y) for x, y in centers])\n return sq2 * (0.5 * z + centers[torch.randint(len(centers), size=(\n n_samples,))])\n elif dataset == '2spirals':\n n = torch.sqrt(torch.rand(n_samples // 2)) * 540 * (2 * math.pi) / 360\n d1x = -torch.cos(n) * n + torch.rand(n_samples // 2) * 0.5\n d1y = torch.sin(n) * n + torch.rand(n_samples // 2) * 0.5\n x = torch.cat([torch.stack([d1x, d1y], dim=1), torch.stack([-d1x, -\n d1y], dim=1)], dim=0) / 3\n return x + 0.1 * z\n elif dataset == 'checkerboard':\n x1 = torch.rand(n_samples) * 4 - 2\n x2_ = torch.rand(n_samples) - torch.randint(0, 2, (n_samples,),\n dtype=torch.float) * 2\n x2 = x2_ + x1.floor() % 2\n return torch.stack([x1, x2], dim=1) * 2\n elif dataset == 'rings':\n n_samples4 = n_samples3 = n_samples2 = n_samples // 4\n n_samples1 = n_samples - n_samples4 - n_samples3 - n_samples2\n linspace4 = torch.linspace(0, 2 * math.pi, n_samples4 + 1)[:-1]\n linspace3 = torch.linspace(0, 2 * math.pi, n_samples3 + 1)[:-1]\n linspace2 = torch.linspace(0, 2 * math.pi, n_samples2 + 1)[:-1]\n linspace1 = torch.linspace(0, 2 * math.pi, n_samples1 + 1)[:-1]\n circ4_x = torch.cos(linspace4)\n circ4_y = torch.sin(linspace4)\n circ3_x = torch.cos(linspace4) * 0.75\n circ3_y = torch.sin(linspace3) * 0.75\n circ2_x = torch.cos(linspace2) * 0.5\n circ2_y = torch.sin(linspace2) * 0.5\n circ1_x = torch.cos(linspace1) * 0.25\n circ1_y = torch.sin(linspace1) * 0.25\n x = torch.stack([torch.cat([circ4_x, circ3_x, circ2_x, circ1_x]),\n torch.cat([circ4_y, circ3_y, circ2_y, circ1_y])], dim=1) * 3.0\n x = x[torch.randint(0, n_samples, size=(n_samples,))]\n return x + torch.normal(mean=torch.zeros_like(x), std=0.08 * torch.\n ones_like(x))\n elif dataset == 'pinwheel':\n rng = np.random.RandomState()\n radial_std = 0.3\n tangential_std = 0.1\n num_classes = 5\n num_per_class = n_samples // 5\n rate = 0.25\n rads = np.linspace(0, 2 * np.pi, num_classes, endpoint=False)\n features = rng.randn(num_classes * num_per_class, 2) * np.array([\n radial_std, tangential_std])\n features[:, 0] += 1.0\n labels = np.repeat(np.arange(num_classes), num_per_class)\n angles = rads[labels] + rate * np.exp(features[:, 0])\n rotations = np.stack([np.cos(angles), -np.sin(angles), np.sin(\n angles), np.cos(angles)])\n rotations = np.reshape(rotations.T, (-1, 2, 2))\n data = 2 * rng.permutation(np.einsum('ti,tij->tj', features, rotations)\n )\n return torch.as_tensor(data, dtype=torch.float32)\n else:\n raise RuntimeError('Invalid `dataset` to sample from.')\n\n\n@torch.no_grad()\ndef plot(dataset, energy, noise, epoch, device):\n n_pts = 1000\n range_lim = 4\n test_grid = setup_grid(range_lim, n_pts, device)\n fig, axs = plt.subplots(1, 3, figsize=(12, 4.3), subplot_kw={'aspect':\n 'equal'})\n plot_samples(dataset, axs[0], range_lim, n_pts)\n plot_noise(noise, axs[1], test_grid, n_pts)\n plot_energy(energy, axs[2], test_grid, n_pts)\n for ax in plt.gcf().axes:\n format_ax(ax, range_lim)\n plt.tight_layout()\n print('Saving image to images/....')\n plt.savefig('images/epoch_{}.png'.format(epoch))\n plt.close()\n\n\ndef setup_grid(range_lim, n_pts, device):\n x = torch.linspace(-range_lim, range_lim, n_pts)\n xx, yy = torch.meshgrid((x, x))\n zz = torch.stack((xx.flatten(), yy.flatten()), dim=1)\n return xx, yy, zz.to(device)\n\n\ndef plot_samples(dataset, ax, range_lim, n_pts):\n samples = dataset.numpy()\n ax.hist2d(samples[:, 0], samples[:, 1], range=[[-range_lim, range_lim],\n [-range_lim, range_lim]], bins=n_pts, cmap=plt.cm.jet)\n ax.set_title('Target samples')\n\n\ndef plot_energy(energy, ax, test_grid, n_pts):\n xx, yy, zz = test_grid\n log_prob = energy(zz)\n prob = log_prob.exp().cpu()\n ax.pcolormesh(xx, yy, prob.view(n_pts, n_pts), cmap=plt.cm.jet)\n ax.set_facecolor(plt.cm.jet(0.0))\n ax.set_title('Energy density')\n\n\n<mask token>\n\n\ndef format_ax(ax, range_lim):\n ax.set_xlim(-range_lim, range_lim)\n ax.set_ylim(-range_lim, range_lim)\n ax.get_xaxis().set_visible(False)\n ax.get_yaxis().set_visible(False)\n ax.invert_yaxis()\n", "step-3": "<mask token>\n\n\ndef value(energy, noise, x, gen):\n logp_x = energy(x)\n logq_x = noise.log_prob(x).unsqueeze(1)\n logp_gen = energy(gen)\n logq_gen = noise.log_prob(gen).unsqueeze(1)\n ll_data = logp_x - torch.logsumexp(torch.cat([logp_x, logq_x], dim=1),\n dim=1, keepdim=True)\n ll_gen = logq_gen - torch.logsumexp(torch.cat([logp_gen, logq_gen], dim\n =1), dim=1, keepdim=True)\n v = ll_data.mean() + ll_gen.mean()\n r_x = torch.sigmoid(logp_x - logq_x)\n r_gen = torch.sigmoid(logq_gen - logp_gen)\n acc = ((r_x > 1 / 2).sum() + (r_gen > 1 / 2).sum()).cpu().numpy() / (len\n (x) + len(gen))\n return -v, acc\n\n\ndef get_data(args):\n dataset = sample_2d_data(dataset=args.dataset, n_samples=args.samples)\n dataloader = DataLoader(dataset, batch_size=args.batch, shuffle=True)\n return dataset, dataloader\n\n\ndef sample_2d_data(dataset='8gaussians', n_samples=50000):\n z = torch.randn(n_samples, 2)\n if dataset == '8gaussians':\n scale = 4\n sq2 = 1 / math.sqrt(2)\n centers = [(1, 0), (-1, 0), (0, 1), (0, -1), (sq2, sq2), (-sq2, sq2\n ), (sq2, -sq2), (-sq2, -sq2)]\n centers = torch.tensor([(scale * x, scale * y) for x, y in centers])\n return sq2 * (0.5 * z + centers[torch.randint(len(centers), size=(\n n_samples,))])\n elif dataset == '2spirals':\n n = torch.sqrt(torch.rand(n_samples // 2)) * 540 * (2 * math.pi) / 360\n d1x = -torch.cos(n) * n + torch.rand(n_samples // 2) * 0.5\n d1y = torch.sin(n) * n + torch.rand(n_samples // 2) * 0.5\n x = torch.cat([torch.stack([d1x, d1y], dim=1), torch.stack([-d1x, -\n d1y], dim=1)], dim=0) / 3\n return x + 0.1 * z\n elif dataset == 'checkerboard':\n x1 = torch.rand(n_samples) * 4 - 2\n x2_ = torch.rand(n_samples) - torch.randint(0, 2, (n_samples,),\n dtype=torch.float) * 2\n x2 = x2_ + x1.floor() % 2\n return torch.stack([x1, x2], dim=1) * 2\n elif dataset == 'rings':\n n_samples4 = n_samples3 = n_samples2 = n_samples // 4\n n_samples1 = n_samples - n_samples4 - n_samples3 - n_samples2\n linspace4 = torch.linspace(0, 2 * math.pi, n_samples4 + 1)[:-1]\n linspace3 = torch.linspace(0, 2 * math.pi, n_samples3 + 1)[:-1]\n linspace2 = torch.linspace(0, 2 * math.pi, n_samples2 + 1)[:-1]\n linspace1 = torch.linspace(0, 2 * math.pi, n_samples1 + 1)[:-1]\n circ4_x = torch.cos(linspace4)\n circ4_y = torch.sin(linspace4)\n circ3_x = torch.cos(linspace4) * 0.75\n circ3_y = torch.sin(linspace3) * 0.75\n circ2_x = torch.cos(linspace2) * 0.5\n circ2_y = torch.sin(linspace2) * 0.5\n circ1_x = torch.cos(linspace1) * 0.25\n circ1_y = torch.sin(linspace1) * 0.25\n x = torch.stack([torch.cat([circ4_x, circ3_x, circ2_x, circ1_x]),\n torch.cat([circ4_y, circ3_y, circ2_y, circ1_y])], dim=1) * 3.0\n x = x[torch.randint(0, n_samples, size=(n_samples,))]\n return x + torch.normal(mean=torch.zeros_like(x), std=0.08 * torch.\n ones_like(x))\n elif dataset == 'pinwheel':\n rng = np.random.RandomState()\n radial_std = 0.3\n tangential_std = 0.1\n num_classes = 5\n num_per_class = n_samples // 5\n rate = 0.25\n rads = np.linspace(0, 2 * np.pi, num_classes, endpoint=False)\n features = rng.randn(num_classes * num_per_class, 2) * np.array([\n radial_std, tangential_std])\n features[:, 0] += 1.0\n labels = np.repeat(np.arange(num_classes), num_per_class)\n angles = rads[labels] + rate * np.exp(features[:, 0])\n rotations = np.stack([np.cos(angles), -np.sin(angles), np.sin(\n angles), np.cos(angles)])\n rotations = np.reshape(rotations.T, (-1, 2, 2))\n data = 2 * rng.permutation(np.einsum('ti,tij->tj', features, rotations)\n )\n return torch.as_tensor(data, dtype=torch.float32)\n else:\n raise RuntimeError('Invalid `dataset` to sample from.')\n\n\n@torch.no_grad()\ndef plot(dataset, energy, noise, epoch, device):\n n_pts = 1000\n range_lim = 4\n test_grid = setup_grid(range_lim, n_pts, device)\n fig, axs = plt.subplots(1, 3, figsize=(12, 4.3), subplot_kw={'aspect':\n 'equal'})\n plot_samples(dataset, axs[0], range_lim, n_pts)\n plot_noise(noise, axs[1], test_grid, n_pts)\n plot_energy(energy, axs[2], test_grid, n_pts)\n for ax in plt.gcf().axes:\n format_ax(ax, range_lim)\n plt.tight_layout()\n print('Saving image to images/....')\n plt.savefig('images/epoch_{}.png'.format(epoch))\n plt.close()\n\n\ndef setup_grid(range_lim, n_pts, device):\n x = torch.linspace(-range_lim, range_lim, n_pts)\n xx, yy = torch.meshgrid((x, x))\n zz = torch.stack((xx.flatten(), yy.flatten()), dim=1)\n return xx, yy, zz.to(device)\n\n\ndef plot_samples(dataset, ax, range_lim, n_pts):\n samples = dataset.numpy()\n ax.hist2d(samples[:, 0], samples[:, 1], range=[[-range_lim, range_lim],\n [-range_lim, range_lim]], bins=n_pts, cmap=plt.cm.jet)\n ax.set_title('Target samples')\n\n\ndef plot_energy(energy, ax, test_grid, n_pts):\n xx, yy, zz = test_grid\n log_prob = energy(zz)\n prob = log_prob.exp().cpu()\n ax.pcolormesh(xx, yy, prob.view(n_pts, n_pts), cmap=plt.cm.jet)\n ax.set_facecolor(plt.cm.jet(0.0))\n ax.set_title('Energy density')\n\n\ndef plot_noise(noise, ax, test_grid, n_pts):\n xx, yy, zz = test_grid\n log_prob = noise.log_prob(zz)\n prob = log_prob.exp().cpu()\n ax.pcolormesh(xx, yy, prob.view(n_pts, n_pts), cmap=plt.cm.jet)\n ax.set_facecolor(plt.cm.jet(0.0))\n ax.set_title('Noise density')\n\n\ndef format_ax(ax, range_lim):\n ax.set_xlim(-range_lim, range_lim)\n ax.set_ylim(-range_lim, range_lim)\n ax.get_xaxis().set_visible(False)\n ax.get_yaxis().set_visible(False)\n ax.invert_yaxis()\n", "step-4": "import math\nimport numpy as np\nimport torch\nimport torch.nn as nn\nfrom torch.utils.data import DataLoader\nimport matplotlib\nmatplotlib.use('Agg')\nimport matplotlib.pyplot as plt\n\n\ndef value(energy, noise, x, gen):\n logp_x = energy(x)\n logq_x = noise.log_prob(x).unsqueeze(1)\n logp_gen = energy(gen)\n logq_gen = noise.log_prob(gen).unsqueeze(1)\n ll_data = logp_x - torch.logsumexp(torch.cat([logp_x, logq_x], dim=1),\n dim=1, keepdim=True)\n ll_gen = logq_gen - torch.logsumexp(torch.cat([logp_gen, logq_gen], dim\n =1), dim=1, keepdim=True)\n v = ll_data.mean() + ll_gen.mean()\n r_x = torch.sigmoid(logp_x - logq_x)\n r_gen = torch.sigmoid(logq_gen - logp_gen)\n acc = ((r_x > 1 / 2).sum() + (r_gen > 1 / 2).sum()).cpu().numpy() / (len\n (x) + len(gen))\n return -v, acc\n\n\ndef get_data(args):\n dataset = sample_2d_data(dataset=args.dataset, n_samples=args.samples)\n dataloader = DataLoader(dataset, batch_size=args.batch, shuffle=True)\n return dataset, dataloader\n\n\ndef sample_2d_data(dataset='8gaussians', n_samples=50000):\n z = torch.randn(n_samples, 2)\n if dataset == '8gaussians':\n scale = 4\n sq2 = 1 / math.sqrt(2)\n centers = [(1, 0), (-1, 0), (0, 1), (0, -1), (sq2, sq2), (-sq2, sq2\n ), (sq2, -sq2), (-sq2, -sq2)]\n centers = torch.tensor([(scale * x, scale * y) for x, y in centers])\n return sq2 * (0.5 * z + centers[torch.randint(len(centers), size=(\n n_samples,))])\n elif dataset == '2spirals':\n n = torch.sqrt(torch.rand(n_samples // 2)) * 540 * (2 * math.pi) / 360\n d1x = -torch.cos(n) * n + torch.rand(n_samples // 2) * 0.5\n d1y = torch.sin(n) * n + torch.rand(n_samples // 2) * 0.5\n x = torch.cat([torch.stack([d1x, d1y], dim=1), torch.stack([-d1x, -\n d1y], dim=1)], dim=0) / 3\n return x + 0.1 * z\n elif dataset == 'checkerboard':\n x1 = torch.rand(n_samples) * 4 - 2\n x2_ = torch.rand(n_samples) - torch.randint(0, 2, (n_samples,),\n dtype=torch.float) * 2\n x2 = x2_ + x1.floor() % 2\n return torch.stack([x1, x2], dim=1) * 2\n elif dataset == 'rings':\n n_samples4 = n_samples3 = n_samples2 = n_samples // 4\n n_samples1 = n_samples - n_samples4 - n_samples3 - n_samples2\n linspace4 = torch.linspace(0, 2 * math.pi, n_samples4 + 1)[:-1]\n linspace3 = torch.linspace(0, 2 * math.pi, n_samples3 + 1)[:-1]\n linspace2 = torch.linspace(0, 2 * math.pi, n_samples2 + 1)[:-1]\n linspace1 = torch.linspace(0, 2 * math.pi, n_samples1 + 1)[:-1]\n circ4_x = torch.cos(linspace4)\n circ4_y = torch.sin(linspace4)\n circ3_x = torch.cos(linspace4) * 0.75\n circ3_y = torch.sin(linspace3) * 0.75\n circ2_x = torch.cos(linspace2) * 0.5\n circ2_y = torch.sin(linspace2) * 0.5\n circ1_x = torch.cos(linspace1) * 0.25\n circ1_y = torch.sin(linspace1) * 0.25\n x = torch.stack([torch.cat([circ4_x, circ3_x, circ2_x, circ1_x]),\n torch.cat([circ4_y, circ3_y, circ2_y, circ1_y])], dim=1) * 3.0\n x = x[torch.randint(0, n_samples, size=(n_samples,))]\n return x + torch.normal(mean=torch.zeros_like(x), std=0.08 * torch.\n ones_like(x))\n elif dataset == 'pinwheel':\n rng = np.random.RandomState()\n radial_std = 0.3\n tangential_std = 0.1\n num_classes = 5\n num_per_class = n_samples // 5\n rate = 0.25\n rads = np.linspace(0, 2 * np.pi, num_classes, endpoint=False)\n features = rng.randn(num_classes * num_per_class, 2) * np.array([\n radial_std, tangential_std])\n features[:, 0] += 1.0\n labels = np.repeat(np.arange(num_classes), num_per_class)\n angles = rads[labels] + rate * np.exp(features[:, 0])\n rotations = np.stack([np.cos(angles), -np.sin(angles), np.sin(\n angles), np.cos(angles)])\n rotations = np.reshape(rotations.T, (-1, 2, 2))\n data = 2 * rng.permutation(np.einsum('ti,tij->tj', features, rotations)\n )\n return torch.as_tensor(data, dtype=torch.float32)\n else:\n raise RuntimeError('Invalid `dataset` to sample from.')\n\n\n@torch.no_grad()\ndef plot(dataset, energy, noise, epoch, device):\n n_pts = 1000\n range_lim = 4\n test_grid = setup_grid(range_lim, n_pts, device)\n fig, axs = plt.subplots(1, 3, figsize=(12, 4.3), subplot_kw={'aspect':\n 'equal'})\n plot_samples(dataset, axs[0], range_lim, n_pts)\n plot_noise(noise, axs[1], test_grid, n_pts)\n plot_energy(energy, axs[2], test_grid, n_pts)\n for ax in plt.gcf().axes:\n format_ax(ax, range_lim)\n plt.tight_layout()\n print('Saving image to images/....')\n plt.savefig('images/epoch_{}.png'.format(epoch))\n plt.close()\n\n\ndef setup_grid(range_lim, n_pts, device):\n x = torch.linspace(-range_lim, range_lim, n_pts)\n xx, yy = torch.meshgrid((x, x))\n zz = torch.stack((xx.flatten(), yy.flatten()), dim=1)\n return xx, yy, zz.to(device)\n\n\ndef plot_samples(dataset, ax, range_lim, n_pts):\n samples = dataset.numpy()\n ax.hist2d(samples[:, 0], samples[:, 1], range=[[-range_lim, range_lim],\n [-range_lim, range_lim]], bins=n_pts, cmap=plt.cm.jet)\n ax.set_title('Target samples')\n\n\ndef plot_energy(energy, ax, test_grid, n_pts):\n xx, yy, zz = test_grid\n log_prob = energy(zz)\n prob = log_prob.exp().cpu()\n ax.pcolormesh(xx, yy, prob.view(n_pts, n_pts), cmap=plt.cm.jet)\n ax.set_facecolor(plt.cm.jet(0.0))\n ax.set_title('Energy density')\n\n\ndef plot_noise(noise, ax, test_grid, n_pts):\n xx, yy, zz = test_grid\n log_prob = noise.log_prob(zz)\n prob = log_prob.exp().cpu()\n ax.pcolormesh(xx, yy, prob.view(n_pts, n_pts), cmap=plt.cm.jet)\n ax.set_facecolor(plt.cm.jet(0.0))\n ax.set_title('Noise density')\n\n\ndef format_ax(ax, range_lim):\n ax.set_xlim(-range_lim, range_lim)\n ax.set_ylim(-range_lim, range_lim)\n ax.get_xaxis().set_visible(False)\n ax.get_yaxis().set_visible(False)\n ax.invert_yaxis()\n", "step-5": "import math\nimport numpy as np\nimport torch\nimport torch.nn as nn\nfrom torch.utils.data import DataLoader\n\nimport matplotlib\nmatplotlib.use('Agg')\nimport matplotlib.pyplot as plt\n\n\ndef value(energy, noise, x, gen):\n logp_x = energy(x)\n logq_x = noise.log_prob(x).unsqueeze(1)\n logp_gen = energy(gen)\n logq_gen = noise.log_prob(gen).unsqueeze(1)\n\n ll_data = logp_x - torch.logsumexp(torch.cat([logp_x, logq_x], dim=1), dim=1, keepdim=True)\n ll_gen = logq_gen - torch.logsumexp(torch.cat([logp_gen, logq_gen], dim=1), dim=1, keepdim=True)\n\n v = ll_data.mean() + ll_gen.mean()\n\n r_x = torch.sigmoid(logp_x - logq_x)\n r_gen = torch.sigmoid(logq_gen - logp_gen)\n\n acc = ((r_x > 1/2).sum() + (r_gen > 1/2).sum()).cpu().numpy() / (len(x) + len(gen))\n\n return -v, acc\n\n\n#-------------------------------------------\n# DATA\n#-------------------------------------------\ndef get_data(args):\n dataset = sample_2d_data(dataset=args.dataset, n_samples=args.samples)\n dataloader = DataLoader(dataset, batch_size=args.batch, shuffle=True)\n return dataset, dataloader\n\ndef sample_2d_data(dataset='8gaussians', n_samples=50000):\n \n z = torch.randn(n_samples, 2)\n\n if dataset == '8gaussians':\n scale = 4\n sq2 = 1/math.sqrt(2)\n centers = [(1,0), (-1,0), (0,1), (0,-1), (sq2,sq2), (-sq2,sq2), (sq2,-sq2), (-sq2,-sq2)]\n centers = torch.tensor([(scale * x, scale * y) for x,y in centers])\n return sq2 * (0.5 * z + centers[torch.randint(len(centers), size=(n_samples,))])\n\n elif dataset == '2spirals':\n n = torch.sqrt(torch.rand(n_samples // 2)) * 540 * (2 * math.pi) / 360\n d1x = - torch.cos(n) * n + torch.rand(n_samples // 2) * 0.5\n d1y = torch.sin(n) * n + torch.rand(n_samples // 2) * 0.5\n x = torch.cat([torch.stack([ d1x, d1y], dim=1),\n torch.stack([-d1x, -d1y], dim=1)], dim=0) / 3\n return x + 0.1*z\n\n elif dataset == 'checkerboard':\n x1 = torch.rand(n_samples) * 4 - 2\n x2_ = torch.rand(n_samples) - torch.randint(0, 2, (n_samples,), dtype=torch.float) * 2\n x2 = x2_ + x1.floor() % 2\n return torch.stack([x1, x2], dim=1) * 2\n\n elif dataset == 'rings':\n n_samples4 = n_samples3 = n_samples2 = n_samples // 4\n n_samples1 = n_samples - n_samples4 - n_samples3 - n_samples2\n\n # so as not to have the first point = last point, set endpoint=False in np; here shifted by one\n linspace4 = torch.linspace(0, 2 * math.pi, n_samples4 + 1)[:-1]\n linspace3 = torch.linspace(0, 2 * math.pi, n_samples3 + 1)[:-1]\n linspace2 = torch.linspace(0, 2 * math.pi, n_samples2 + 1)[:-1]\n linspace1 = torch.linspace(0, 2 * math.pi, n_samples1 + 1)[:-1]\n\n circ4_x = torch.cos(linspace4)\n circ4_y = torch.sin(linspace4)\n circ3_x = torch.cos(linspace4) * 0.75\n circ3_y = torch.sin(linspace3) * 0.75\n circ2_x = torch.cos(linspace2) * 0.5\n circ2_y = torch.sin(linspace2) * 0.5\n circ1_x = torch.cos(linspace1) * 0.25\n circ1_y = torch.sin(linspace1) * 0.25\n\n x = torch.stack([torch.cat([circ4_x, circ3_x, circ2_x, circ1_x]),\n torch.cat([circ4_y, circ3_y, circ2_y, circ1_y])], dim=1) * 3.0\n\n # random sample\n x = x[torch.randint(0, n_samples, size=(n_samples,))]\n\n # Add noise\n return x + torch.normal(mean=torch.zeros_like(x), std=0.08*torch.ones_like(x))\n\n elif dataset == \"pinwheel\":\n rng = np.random.RandomState()\n radial_std = 0.3\n tangential_std = 0.1\n num_classes = 5\n num_per_class = n_samples // 5\n rate = 0.25\n rads = np.linspace(0, 2 * np.pi, num_classes, endpoint=False)\n\n features = rng.randn(num_classes*num_per_class, 2) \\\n * np.array([radial_std, tangential_std])\n features[:, 0] += 1.\n labels = np.repeat(np.arange(num_classes), num_per_class)\n\n angles = rads[labels] + rate * np.exp(features[:, 0])\n rotations = np.stack([np.cos(angles), -np.sin(angles), np.sin(angles), np.cos(angles)])\n rotations = np.reshape(rotations.T, (-1, 2, 2))\n \n data = 2 * rng.permutation(np.einsum(\"ti,tij->tj\", features, rotations))\n return torch.as_tensor(data, dtype=torch.float32)\n\n else:\n raise RuntimeError('Invalid `dataset` to sample from.')\n\n# --------------------\n# Plotting\n# --------------------\n\n@torch.no_grad()\ndef plot(dataset, energy, noise, epoch, device):\n n_pts = 1000\n range_lim = 4\n\n # construct test points\n test_grid = setup_grid(range_lim, n_pts, device)\n\n # plot\n fig, axs = plt.subplots(1, 3, figsize=(12,4.3), subplot_kw={'aspect': 'equal'})\n plot_samples(dataset, axs[0], range_lim, n_pts)\n plot_noise(noise, axs[1], test_grid, n_pts)\n plot_energy(energy, axs[2], test_grid, n_pts)\n\n # format\n for ax in plt.gcf().axes: format_ax(ax, range_lim)\n plt.tight_layout()\n\n # save\n print('Saving image to images/....')\n plt.savefig('images/epoch_{}.png'.format(epoch))\n plt.close()\n\ndef setup_grid(range_lim, n_pts, device):\n x = torch.linspace(-range_lim, range_lim, n_pts)\n xx, yy = torch.meshgrid((x, x))\n zz = torch.stack((xx.flatten(), yy.flatten()), dim=1)\n return xx, yy, zz.to(device)\n\ndef plot_samples(dataset, ax, range_lim, n_pts):\n samples = dataset.numpy()\n ax.hist2d(samples[:,0], samples[:,1], range=[[-range_lim, range_lim], [-range_lim, range_lim]], bins=n_pts, cmap=plt.cm.jet)\n ax.set_title('Target samples')\n\ndef plot_energy(energy, ax, test_grid, n_pts):\n xx, yy, zz = test_grid\n log_prob = energy(zz)\n prob = log_prob.exp().cpu()\n # plot\n ax.pcolormesh(xx, yy, prob.view(n_pts,n_pts), cmap=plt.cm.jet)\n ax.set_facecolor(plt.cm.jet(0.))\n ax.set_title('Energy density')\n\ndef plot_noise(noise, ax, test_grid, n_pts):\n xx, yy, zz = test_grid\n log_prob = noise.log_prob(zz)\n prob = log_prob.exp().cpu()\n # plot\n ax.pcolormesh(xx, yy, prob.view(n_pts,n_pts), cmap=plt.cm.jet)\n ax.set_facecolor(plt.cm.jet(0.))\n ax.set_title('Noise density')\n\ndef format_ax(ax, range_lim):\n ax.set_xlim(-range_lim, range_lim)\n ax.set_ylim(-range_lim, range_lim)\n ax.get_xaxis().set_visible(False)\n ax.get_yaxis().set_visible(False)\n ax.invert_yaxis()", "step-ids": [ 7, 8, 9, 11, 12 ] }
[ 7, 8, 9, 11, 12 ]
import os, subprocess os.environ['FLASK_APP'] = "app/app.py" os.environ['FLASK_DEBUG'] = "1" # for LSTM instead: https://storage.googleapis.com/jacobdanovitch/twtc/lstm.tar.gz # Will have to change app.py to accept only attention_weights subprocess.call('./serve_model.sh') subprocess.call(['flask', 'run'])
normal
{ "blob_id": "cbad5d6f381e788a2f064aac0a5d468f40b39c93", "index": 3696, "step-1": "<mask token>\n", "step-2": "<mask token>\nsubprocess.call('./serve_model.sh')\nsubprocess.call(['flask', 'run'])\n", "step-3": "<mask token>\nos.environ['FLASK_APP'] = 'app/app.py'\nos.environ['FLASK_DEBUG'] = '1'\nsubprocess.call('./serve_model.sh')\nsubprocess.call(['flask', 'run'])\n", "step-4": "import os, subprocess\nos.environ['FLASK_APP'] = 'app/app.py'\nos.environ['FLASK_DEBUG'] = '1'\nsubprocess.call('./serve_model.sh')\nsubprocess.call(['flask', 'run'])\n", "step-5": "import os, subprocess\n\nos.environ['FLASK_APP'] = \"app/app.py\"\nos.environ['FLASK_DEBUG'] = \"1\"\n\n# for LSTM instead: https://storage.googleapis.com/jacobdanovitch/twtc/lstm.tar.gz\n# Will have to change app.py to accept only attention_weights\n\n\nsubprocess.call('./serve_model.sh')\nsubprocess.call(['flask', 'run'])\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> s.connect((host, port)) <|reserved_special_token_0|> s.sendall(cmd.encode()) <|reserved_special_token_0|> print(data.decode()) s.close() <|reserved_special_token_1|> <|reserved_special_token_0|> s = socket.socket() host = socket.gethostname() port = 3456 s.connect((host, port)) cmd = input('>>>') s.sendall(cmd.encode()) data = s.recv(1024) print(data.decode()) s.close() <|reserved_special_token_1|> import socket s = socket.socket() host = socket.gethostname() port = 3456 s.connect((host, port)) cmd = input('>>>') s.sendall(cmd.encode()) data = s.recv(1024) print(data.decode()) s.close() <|reserved_special_token_1|> # -*- coding: utf-8 -*- # @Time : 2019/3/21 20:12 # @Author : for # @File : test01.py # @Software: PyCharm import socket s=socket.socket() host=socket.gethostname() port=3456 s.connect((host,port)) cmd=input(">>>") s.sendall(cmd.encode()) data=s.recv(1024) print(data.decode()) s.close()
flexible
{ "blob_id": "596814032218c3db746f67e54e4f1863753aea06", "index": 6299, "step-1": "<mask token>\n", "step-2": "<mask token>\ns.connect((host, port))\n<mask token>\ns.sendall(cmd.encode())\n<mask token>\nprint(data.decode())\ns.close()\n", "step-3": "<mask token>\ns = socket.socket()\nhost = socket.gethostname()\nport = 3456\ns.connect((host, port))\ncmd = input('>>>')\ns.sendall(cmd.encode())\ndata = s.recv(1024)\nprint(data.decode())\ns.close()\n", "step-4": "import socket\ns = socket.socket()\nhost = socket.gethostname()\nport = 3456\ns.connect((host, port))\ncmd = input('>>>')\ns.sendall(cmd.encode())\ndata = s.recv(1024)\nprint(data.decode())\ns.close()\n", "step-5": "# -*- coding: utf-8 -*-\r\n# @Time : 2019/3/21 20:12\r\n# @Author : for \r\n# @File : test01.py\r\n# @Software: PyCharm\r\nimport socket\r\n\r\ns=socket.socket()\r\n\r\nhost=socket.gethostname()\r\nport=3456\r\ns.connect((host,port))\r\n\r\ncmd=input(\">>>\")\r\ns.sendall(cmd.encode())\r\ndata=s.recv(1024)\r\nprint(data.decode())\r\n\r\ns.close()\r\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> for i in range(N): a, b = map(int, readline().split()) if a == 0 and b == 0: zeropair += 1 continue if a == 0: zeroa += 1 continue if b == 0: zerob += 1 continue absa = abs(a) absb = abs(b) g = math.gcd(absa, absb) absa, absb = absa // g, absb // g if a * b > 0: pluspair[absa, absb] += 1 else: minuspair[absa, absb] += 1 <|reserved_special_token_0|> ans *= (pow(2, zeroa, DIV) + pow(2, zerob, DIV) - 1) % DIV ans %= DIV <|reserved_special_token_0|> for item in pluspair.items(): a, b = item[0] cnt = item[1] if (b, a) in minuspair: ans *= (pow(2, cnt, DIV) + pow(2, minuspair[b, a]) - 1) % DIV ans %= DIV del minuspair[b, a] else: allcnt += cnt for val in minuspair.values(): allcnt += val <|reserved_special_token_0|> ans += zeropair print((ans - 1) % DIV) <|reserved_special_token_1|> <|reserved_special_token_0|> readline = sys.stdin.readline N = int(readline()) <|reserved_special_token_0|> zeropair = 0 zeroa = 0 zerob = 0 <|reserved_special_token_0|> pluspair = defaultdict(int) minuspair = defaultdict(int) for i in range(N): a, b = map(int, readline().split()) if a == 0 and b == 0: zeropair += 1 continue if a == 0: zeroa += 1 continue if b == 0: zerob += 1 continue absa = abs(a) absb = abs(b) g = math.gcd(absa, absb) absa, absb = absa // g, absb // g if a * b > 0: pluspair[absa, absb] += 1 else: minuspair[absa, absb] += 1 DIV = 1000000007 ans = 1 ans *= (pow(2, zeroa, DIV) + pow(2, zerob, DIV) - 1) % DIV ans %= DIV allcnt = 0 for item in pluspair.items(): a, b = item[0] cnt = item[1] if (b, a) in minuspair: ans *= (pow(2, cnt, DIV) + pow(2, minuspair[b, a]) - 1) % DIV ans %= DIV del minuspair[b, a] else: allcnt += cnt for val in minuspair.values(): allcnt += val ans = ans * pow(2, allcnt, DIV) % DIV ans += zeropair print((ans - 1) % DIV) <|reserved_special_token_1|> import sys readline = sys.stdin.readline N = int(readline()) import math zeropair = 0 zeroa = 0 zerob = 0 from collections import defaultdict pluspair = defaultdict(int) minuspair = defaultdict(int) for i in range(N): a, b = map(int, readline().split()) if a == 0 and b == 0: zeropair += 1 continue if a == 0: zeroa += 1 continue if b == 0: zerob += 1 continue absa = abs(a) absb = abs(b) g = math.gcd(absa, absb) absa, absb = absa // g, absb // g if a * b > 0: pluspair[absa, absb] += 1 else: minuspair[absa, absb] += 1 DIV = 1000000007 ans = 1 ans *= (pow(2, zeroa, DIV) + pow(2, zerob, DIV) - 1) % DIV ans %= DIV allcnt = 0 for item in pluspair.items(): a, b = item[0] cnt = item[1] if (b, a) in minuspair: ans *= (pow(2, cnt, DIV) + pow(2, minuspair[b, a]) - 1) % DIV ans %= DIV del minuspair[b, a] else: allcnt += cnt for val in minuspair.values(): allcnt += val ans = ans * pow(2, allcnt, DIV) % DIV ans += zeropair print((ans - 1) % DIV) <|reserved_special_token_1|> # 約分して、互いに素な(1,3) (3,1)のようなペアを作りカウントする # 正のグループと負のグループを別々に管理 # 正のグループの相手が負のグループに存在した場合、 # どちらかのグループから好きなだけ選ぶか、どちらも選ばないかしかない # 誰ともペアにならなかったグループの個数を全て足してP個だとして、2^P通りを掛ける # (0,0)については、その中から1つ選ぶか、選ばないかしかない import sys readline = sys.stdin.readline N = int(readline()) import math zeropair = 0 zeroa = 0 zerob = 0 from collections import defaultdict pluspair = defaultdict(int) minuspair = defaultdict(int) for i in range(N): a,b = map(int,readline().split()) if a == 0 and b == 0: zeropair += 1 continue if a == 0: zeroa += 1 continue if b == 0: zerob += 1 continue absa = abs(a) absb = abs(b) g = math.gcd(absa,absb) absa,absb = absa//g,absb//g if a * b > 0: pluspair[(absa,absb)] += 1 else: minuspair[(absa,absb)] += 1 DIV = 1000000007 ans = 1 # zeroa,zerobから選ぶパターンは、どちらから好きなだけ選ぶか、どちらも選ばないか ans *= (pow(2,zeroa,DIV) + pow(2,zerob,DIV) - 1) % DIV ans %= DIV # 誰とでもペアになれるものをカウント allcnt = 0 # plusから選ぶパターンで、minusにある対応ペアを探す for item in pluspair.items(): a,b = item[0] cnt = item[1] if (b,a) in minuspair: ans *= (pow(2,cnt,DIV) + pow(2,minuspair[(b,a)]) - 1) % DIV ans %= DIV del minuspair[(b,a)] else: allcnt += cnt for val in minuspair.values(): allcnt += val ans = (ans * pow(2,allcnt,DIV)) % DIV # zeropairから選んだ場合、今までのパターンとは独立 ans += zeropair print((ans - 1) % DIV)
flexible
{ "blob_id": "098488fd10bcf81c4efa198a44d2ff87e4f8c130", "index": 3225, "step-1": "<mask token>\n", "step-2": "<mask token>\nfor i in range(N):\n a, b = map(int, readline().split())\n if a == 0 and b == 0:\n zeropair += 1\n continue\n if a == 0:\n zeroa += 1\n continue\n if b == 0:\n zerob += 1\n continue\n absa = abs(a)\n absb = abs(b)\n g = math.gcd(absa, absb)\n absa, absb = absa // g, absb // g\n if a * b > 0:\n pluspair[absa, absb] += 1\n else:\n minuspair[absa, absb] += 1\n<mask token>\nans *= (pow(2, zeroa, DIV) + pow(2, zerob, DIV) - 1) % DIV\nans %= DIV\n<mask token>\nfor item in pluspair.items():\n a, b = item[0]\n cnt = item[1]\n if (b, a) in minuspair:\n ans *= (pow(2, cnt, DIV) + pow(2, minuspair[b, a]) - 1) % DIV\n ans %= DIV\n del minuspair[b, a]\n else:\n allcnt += cnt\nfor val in minuspair.values():\n allcnt += val\n<mask token>\nans += zeropair\nprint((ans - 1) % DIV)\n", "step-3": "<mask token>\nreadline = sys.stdin.readline\nN = int(readline())\n<mask token>\nzeropair = 0\nzeroa = 0\nzerob = 0\n<mask token>\npluspair = defaultdict(int)\nminuspair = defaultdict(int)\nfor i in range(N):\n a, b = map(int, readline().split())\n if a == 0 and b == 0:\n zeropair += 1\n continue\n if a == 0:\n zeroa += 1\n continue\n if b == 0:\n zerob += 1\n continue\n absa = abs(a)\n absb = abs(b)\n g = math.gcd(absa, absb)\n absa, absb = absa // g, absb // g\n if a * b > 0:\n pluspair[absa, absb] += 1\n else:\n minuspair[absa, absb] += 1\nDIV = 1000000007\nans = 1\nans *= (pow(2, zeroa, DIV) + pow(2, zerob, DIV) - 1) % DIV\nans %= DIV\nallcnt = 0\nfor item in pluspair.items():\n a, b = item[0]\n cnt = item[1]\n if (b, a) in minuspair:\n ans *= (pow(2, cnt, DIV) + pow(2, minuspair[b, a]) - 1) % DIV\n ans %= DIV\n del minuspair[b, a]\n else:\n allcnt += cnt\nfor val in minuspair.values():\n allcnt += val\nans = ans * pow(2, allcnt, DIV) % DIV\nans += zeropair\nprint((ans - 1) % DIV)\n", "step-4": "import sys\nreadline = sys.stdin.readline\nN = int(readline())\nimport math\nzeropair = 0\nzeroa = 0\nzerob = 0\nfrom collections import defaultdict\npluspair = defaultdict(int)\nminuspair = defaultdict(int)\nfor i in range(N):\n a, b = map(int, readline().split())\n if a == 0 and b == 0:\n zeropair += 1\n continue\n if a == 0:\n zeroa += 1\n continue\n if b == 0:\n zerob += 1\n continue\n absa = abs(a)\n absb = abs(b)\n g = math.gcd(absa, absb)\n absa, absb = absa // g, absb // g\n if a * b > 0:\n pluspair[absa, absb] += 1\n else:\n minuspair[absa, absb] += 1\nDIV = 1000000007\nans = 1\nans *= (pow(2, zeroa, DIV) + pow(2, zerob, DIV) - 1) % DIV\nans %= DIV\nallcnt = 0\nfor item in pluspair.items():\n a, b = item[0]\n cnt = item[1]\n if (b, a) in minuspair:\n ans *= (pow(2, cnt, DIV) + pow(2, minuspair[b, a]) - 1) % DIV\n ans %= DIV\n del minuspair[b, a]\n else:\n allcnt += cnt\nfor val in minuspair.values():\n allcnt += val\nans = ans * pow(2, allcnt, DIV) % DIV\nans += zeropair\nprint((ans - 1) % DIV)\n", "step-5": "# 約分して、互いに素な(1,3) (3,1)のようなペアを作りカウントする\n# 正のグループと負のグループを別々に管理\n# 正のグループの相手が負のグループに存在した場合、\n# どちらかのグループから好きなだけ選ぶか、どちらも選ばないかしかない\n# 誰ともペアにならなかったグループの個数を全て足してP個だとして、2^P通りを掛ける\n# (0,0)については、その中から1つ選ぶか、選ばないかしかない\n\nimport sys\nreadline = sys.stdin.readline\n\nN = int(readline())\nimport math\n\nzeropair = 0\nzeroa = 0\nzerob = 0\nfrom collections import defaultdict\npluspair = defaultdict(int)\nminuspair = defaultdict(int)\nfor i in range(N):\n a,b = map(int,readline().split())\n if a == 0 and b == 0:\n zeropair += 1\n continue\n if a == 0:\n zeroa += 1\n continue\n if b == 0:\n zerob += 1\n continue\n absa = abs(a)\n absb = abs(b)\n g = math.gcd(absa,absb)\n absa,absb = absa//g,absb//g\n if a * b > 0:\n pluspair[(absa,absb)] += 1\n else:\n minuspair[(absa,absb)] += 1\n\nDIV = 1000000007\nans = 1\n# zeroa,zerobから選ぶパターンは、どちらから好きなだけ選ぶか、どちらも選ばないか\nans *= (pow(2,zeroa,DIV) + pow(2,zerob,DIV) - 1) % DIV\nans %= DIV\n\n# 誰とでもペアになれるものをカウント\nallcnt = 0\n\n# plusから選ぶパターンで、minusにある対応ペアを探す\nfor item in pluspair.items():\n a,b = item[0]\n cnt = item[1]\n if (b,a) in minuspair:\n ans *= (pow(2,cnt,DIV) + pow(2,minuspair[(b,a)]) - 1) % DIV\n ans %= DIV\n del minuspair[(b,a)]\n else:\n allcnt += cnt\n\nfor val in minuspair.values():\n allcnt += val\n\nans = (ans * pow(2,allcnt,DIV)) % DIV\n# zeropairから選んだ場合、今までのパターンとは独立\nans += zeropair\nprint((ans - 1) % DIV)\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class BBCCrawler(AbstractWebCrawler): <|reserved_special_token_0|> name = 'web_bbc' resource_link = ( 'http://www.bbc.com/news/topics/cz4pr2gd85qt/cyber-security') resource_label = 'bbc' custom_settings = {'ITEM_PIPELINES': { 'scrapy_crawlers.pipelines.ElasticIndexPipeline': 500}} links_to_articles_query = 'article > header > div > h3 > a::attr(href)' links_to_pages_query = 'dummy' extract_title_query = ( '#page > div:nth-child(1) > div.container > div > div.column--primary > div.story-body > h1::text' ) extract_datetime_query = ( '#page > div:nth-child(1) > div.container > div > div.column--primary > div.story-body > div.with-extracted-share-icons > div > div.story-body__mini-info-list-and-share-row > div.mini-info-list-wrap > ul > li > div::text' ) extract_content_query = ( '#page > div:nth-child(1) > div.container > div > div.column--primary > div.story-body > div.story-body__inner' ) <|reserved_special_token_1|> <|reserved_special_token_0|> class BBCCrawler(AbstractWebCrawler): """ [BBC] Web Scraper """ name = 'web_bbc' resource_link = ( 'http://www.bbc.com/news/topics/cz4pr2gd85qt/cyber-security') resource_label = 'bbc' custom_settings = {'ITEM_PIPELINES': { 'scrapy_crawlers.pipelines.ElasticIndexPipeline': 500}} links_to_articles_query = 'article > header > div > h3 > a::attr(href)' links_to_pages_query = 'dummy' extract_title_query = ( '#page > div:nth-child(1) > div.container > div > div.column--primary > div.story-body > h1::text' ) extract_datetime_query = ( '#page > div:nth-child(1) > div.container > div > div.column--primary > div.story-body > div.with-extracted-share-icons > div > div.story-body__mini-info-list-and-share-row > div.mini-info-list-wrap > ul > li > div::text' ) extract_content_query = ( '#page > div:nth-child(1) > div.container > div > div.column--primary > div.story-body > div.story-body__inner' ) <|reserved_special_token_1|> <|reserved_special_token_0|> import os from .abstract_crawler import AbstractWebCrawler class BBCCrawler(AbstractWebCrawler): """ [BBC] Web Scraper """ name = 'web_bbc' resource_link = ( 'http://www.bbc.com/news/topics/cz4pr2gd85qt/cyber-security') resource_label = 'bbc' custom_settings = {'ITEM_PIPELINES': { 'scrapy_crawlers.pipelines.ElasticIndexPipeline': 500}} links_to_articles_query = 'article > header > div > h3 > a::attr(href)' links_to_pages_query = 'dummy' extract_title_query = ( '#page > div:nth-child(1) > div.container > div > div.column--primary > div.story-body > h1::text' ) extract_datetime_query = ( '#page > div:nth-child(1) > div.container > div > div.column--primary > div.story-body > div.with-extracted-share-icons > div > div.story-body__mini-info-list-and-share-row > div.mini-info-list-wrap > ul > li > div::text' ) extract_content_query = ( '#page > div:nth-child(1) > div.container > div > div.column--primary > div.story-body > div.story-body__inner' ) <|reserved_special_token_1|> """ [BBC] Web Scraper """ import os from .abstract_crawler import AbstractWebCrawler class BBCCrawler(AbstractWebCrawler): """ [BBC] Web Scraper """ # Spider Properties name = "web_bbc" # Crawler Properties resource_link = 'http://www.bbc.com/news/topics/cz4pr2gd85qt/cyber-security' resource_label = 'bbc' # TODO Move it to the super class custom_settings = { 'ITEM_PIPELINES': { 'scrapy_crawlers.pipelines.ElasticIndexPipeline': 500 } } links_to_articles_query = 'article > header > div > h3 > a::attr(href)' links_to_pages_query = 'dummy' # dynamic ajax pagination extract_title_query = '#page > div:nth-child(1) > div.container > div > div.column--primary > div.story-body > h1::text' extract_datetime_query = '#page > div:nth-child(1) > div.container > div > div.column--primary > div.story-body > div.with-extracted-share-icons > div > div.story-body__mini-info-list-and-share-row > div.mini-info-list-wrap > ul > li > div::text' extract_content_query = '#page > div:nth-child(1) > div.container > div > div.column--primary > div.story-body > div.story-body__inner'
flexible
{ "blob_id": "3c22fbfd7d83ff3ecacabc3c88af2169fa5906b9", "index": 5190, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass BBCCrawler(AbstractWebCrawler):\n <mask token>\n name = 'web_bbc'\n resource_link = (\n 'http://www.bbc.com/news/topics/cz4pr2gd85qt/cyber-security')\n resource_label = 'bbc'\n custom_settings = {'ITEM_PIPELINES': {\n 'scrapy_crawlers.pipelines.ElasticIndexPipeline': 500}}\n links_to_articles_query = 'article > header > div > h3 > a::attr(href)'\n links_to_pages_query = 'dummy'\n extract_title_query = (\n '#page > div:nth-child(1) > div.container > div > div.column--primary > div.story-body > h1::text'\n )\n extract_datetime_query = (\n '#page > div:nth-child(1) > div.container > div > div.column--primary > div.story-body > div.with-extracted-share-icons > div > div.story-body__mini-info-list-and-share-row > div.mini-info-list-wrap > ul > li > div::text'\n )\n extract_content_query = (\n '#page > div:nth-child(1) > div.container > div > div.column--primary > div.story-body > div.story-body__inner'\n )\n", "step-3": "<mask token>\n\n\nclass BBCCrawler(AbstractWebCrawler):\n \"\"\" [BBC] Web Scraper \"\"\"\n name = 'web_bbc'\n resource_link = (\n 'http://www.bbc.com/news/topics/cz4pr2gd85qt/cyber-security')\n resource_label = 'bbc'\n custom_settings = {'ITEM_PIPELINES': {\n 'scrapy_crawlers.pipelines.ElasticIndexPipeline': 500}}\n links_to_articles_query = 'article > header > div > h3 > a::attr(href)'\n links_to_pages_query = 'dummy'\n extract_title_query = (\n '#page > div:nth-child(1) > div.container > div > div.column--primary > div.story-body > h1::text'\n )\n extract_datetime_query = (\n '#page > div:nth-child(1) > div.container > div > div.column--primary > div.story-body > div.with-extracted-share-icons > div > div.story-body__mini-info-list-and-share-row > div.mini-info-list-wrap > ul > li > div::text'\n )\n extract_content_query = (\n '#page > div:nth-child(1) > div.container > div > div.column--primary > div.story-body > div.story-body__inner'\n )\n", "step-4": "<mask token>\nimport os\nfrom .abstract_crawler import AbstractWebCrawler\n\n\nclass BBCCrawler(AbstractWebCrawler):\n \"\"\" [BBC] Web Scraper \"\"\"\n name = 'web_bbc'\n resource_link = (\n 'http://www.bbc.com/news/topics/cz4pr2gd85qt/cyber-security')\n resource_label = 'bbc'\n custom_settings = {'ITEM_PIPELINES': {\n 'scrapy_crawlers.pipelines.ElasticIndexPipeline': 500}}\n links_to_articles_query = 'article > header > div > h3 > a::attr(href)'\n links_to_pages_query = 'dummy'\n extract_title_query = (\n '#page > div:nth-child(1) > div.container > div > div.column--primary > div.story-body > h1::text'\n )\n extract_datetime_query = (\n '#page > div:nth-child(1) > div.container > div > div.column--primary > div.story-body > div.with-extracted-share-icons > div > div.story-body__mini-info-list-and-share-row > div.mini-info-list-wrap > ul > li > div::text'\n )\n extract_content_query = (\n '#page > div:nth-child(1) > div.container > div > div.column--primary > div.story-body > div.story-body__inner'\n )\n", "step-5": "\"\"\" [BBC] Web Scraper \"\"\"\n\nimport os\nfrom .abstract_crawler import AbstractWebCrawler\n\n\nclass BBCCrawler(AbstractWebCrawler):\n \"\"\" [BBC] Web Scraper \"\"\"\n\n # Spider Properties\n name = \"web_bbc\"\n\n # Crawler Properties\n resource_link = 'http://www.bbc.com/news/topics/cz4pr2gd85qt/cyber-security'\n resource_label = 'bbc'\n\n # TODO Move it to the super class\n custom_settings = {\n 'ITEM_PIPELINES': {\n 'scrapy_crawlers.pipelines.ElasticIndexPipeline': 500\n }\n }\n\n links_to_articles_query = 'article > header > div > h3 > a::attr(href)'\n links_to_pages_query = 'dummy' # dynamic ajax pagination\n extract_title_query = '#page > div:nth-child(1) > div.container > div > div.column--primary > div.story-body > h1::text'\n extract_datetime_query = '#page > div:nth-child(1) > div.container > div > div.column--primary > div.story-body > div.with-extracted-share-icons > div > div.story-body__mini-info-list-and-share-row > div.mini-info-list-wrap > ul > li > div::text'\n extract_content_query = '#page > div:nth-child(1) > div.container > div > div.column--primary > div.story-body > div.story-body__inner'\n", "step-ids": [ 0, 2, 3, 4, 5 ] }
[ 0, 2, 3, 4, 5 ]
class Error(Exception): pass class TunnelInstanceError(Error): def __init__(self, expression, message): self.expression = expression self.message = message class TunnelManagerError(Error): def __init__(self, expression, message): self.expression = expression self.message = message
normal
{ "blob_id": "661b622708692bd9cd1b3399835f332c86e39bf6", "index": 8835, "step-1": "<mask token>\n\n\nclass TunnelManagerError(Error):\n <mask token>\n", "step-2": "<mask token>\n\n\nclass TunnelManagerError(Error):\n\n def __init__(self, expression, message):\n self.expression = expression\n self.message = message\n", "step-3": "<mask token>\n\n\nclass TunnelInstanceError(Error):\n <mask token>\n\n\nclass TunnelManagerError(Error):\n\n def __init__(self, expression, message):\n self.expression = expression\n self.message = message\n", "step-4": "<mask token>\n\n\nclass TunnelInstanceError(Error):\n\n def __init__(self, expression, message):\n self.expression = expression\n self.message = message\n\n\nclass TunnelManagerError(Error):\n\n def __init__(self, expression, message):\n self.expression = expression\n self.message = message\n", "step-5": "class Error(Exception):\n pass\n\n\nclass TunnelInstanceError(Error):\n\n def __init__(self, expression, message):\n self.expression = expression\n self.message = message\n\n\nclass TunnelManagerError(Error):\n\n def __init__(self, expression, message):\n self.expression = expression\n self.message = message\n", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
card = int(input()) last4 = card % 10000 print(last4)
normal
{ "blob_id": "7b920545a0241b30b66ff99f330dbb361f747f13", "index": 8297, "step-1": "<mask token>\n", "step-2": "<mask token>\nprint(last4)\n", "step-3": "card = int(input())\nlast4 = card % 10000\nprint(last4)\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
# TODO - let user input file name on command line level_file = 'level.txt' # read characters in level.txt into # terrain map # which is array of columns f = open(level_file) terrain_map = [] for row in f: col_index = 0 row_index = 0 for tile in row.rstrip(): if col_index == len(terrain_map): terrain_map.append([]) terrain_map[col_index].append(tile) col_index += 1 row_index += 1 f.close() # print(terrain_map) def map_tile_char_to_terrain(tile): if tile == 'M': return "dirt" if tile == 'R': return "rock" if tile == 'D': return "data" if tile == 'B': return "empty" if tile == "P": return "solar" return "dirt" def output_terrain_column(column): n = len(column) print('[') for i, tile in enumerate(column): print(' {') print(' "tex": "' + map_tile_char_to_terrain(tile) + '"') if i + 1 < n: print(' },') else: print(' }') print(']') def print_entities(): print """\ "entities": [ { "x": 0, "y": 0, "rot": 0, "tex": "rover", "name": "rover", "inherits": { "Accessible": [ "Bots" ], "Rover": { "moveSFX": "move" } } } ], """ def print_footer(): # TODO add other textures print """ "tex": { "rover": "/images/rover.png", "dirt": "/images/mars.png", "rock": "/images/mars_rock.png", "blank": "/images/blank.png", "solar": "/images/panel.png", "data": "/images/data_drive.png" }, "sfx": { "botMove": "/audio/" }, "meta": { "title": "Getting started", "desc": "Learn the basics of javascript and how to control a bot" } """ # output terrain map by columns print("{") print(' "tests": [') print(' {') print_entities() print(' "terrain": [') num_cols = len(terrain_map) for i, column in enumerate(terrain_map): output_terrain_column(column) if i + 1 < num_cols: print(',') print(' ]') print(' }') print(' ]') print(',') print_footer() print("}")
normal
{ "blob_id": "fe1cc7660396071172c1ec65ba685e677e497646", "index": 6354, "step-1": "# TODO - let user input file name on command line\n\nlevel_file = 'level.txt'\n\n# read characters in level.txt into\n# terrain map\n# which is array of columns\nf = open(level_file)\nterrain_map = []\nfor row in f:\n\tcol_index = 0\n\trow_index = 0\n\tfor tile in row.rstrip():\n\t\tif col_index == len(terrain_map):\n\t\t\tterrain_map.append([])\n\t\tterrain_map[col_index].append(tile)\n\t\tcol_index += 1\n\trow_index += 1\nf.close()\n\n# print(terrain_map)\n\ndef map_tile_char_to_terrain(tile):\n\tif tile == 'M':\n\t\treturn \"dirt\"\n\tif tile == 'R':\n\t\treturn \"rock\"\n\tif tile == 'D':\n\t\treturn \"data\"\n\tif tile == 'B':\n\t\treturn \"empty\"\n\tif tile == \"P\":\n\t\treturn \"solar\"\n\treturn \"dirt\"\n\ndef output_terrain_column(column):\n\tn = len(column)\n\tprint('[')\n\tfor i, tile in enumerate(column):\n\t\tprint(' {')\n\t\tprint(' \"tex\": \"' + map_tile_char_to_terrain(tile) + '\"')\n\t\tif i + 1 < n:\n\t\t\tprint(' },')\n\t\telse:\n\t\t\tprint(' }')\n\tprint(']')\n\ndef print_entities():\n\tprint \"\"\"\\\n\t\"entities\": [\n {\n \"x\": 0,\n \"y\": 0,\n \"rot\": 0,\n \"tex\": \"rover\",\n \"name\": \"rover\",\n \"inherits\": {\n\t\t\t\"Accessible\": [\n\t\t\t\t\"Bots\"\n\t\t\t],\n \"Rover\": {\n \"moveSFX\": \"move\"\n }\n }\n }\n ],\n\t \"\"\"\n\ndef print_footer():\n\t# TODO add other textures\n\tprint \"\"\"\n\t\"tex\": {\n\t\t\"rover\": \"/images/rover.png\",\n \"dirt\": \"/images/mars.png\",\n \"rock\": \"/images/mars_rock.png\",\n \"blank\": \"/images/blank.png\",\n \"solar\": \"/images/panel.png\",\n \"data\": \"/images/data_drive.png\"\n\t},\n\t\"sfx\": {\n\t\t\"botMove\": \"/audio/\"\n\t},\n\t\"meta\": {\n\t\t\"title\": \"Getting started\",\n\t\t\"desc\": \"Learn the basics of javascript and how to control a bot\"\n\t}\n\t\"\"\"\n\n\n# output terrain map by columns\nprint(\"{\")\nprint(' \"tests\": [')\nprint(' {')\n\nprint_entities()\n\nprint(' \"terrain\": [')\n\nnum_cols = len(terrain_map)\nfor i, column in enumerate(terrain_map):\n\toutput_terrain_column(column)\n\tif i + 1 < num_cols:\n\t\tprint(',')\n\nprint(' ]')\nprint(' }')\nprint(' ]')\n\nprint(',')\n\nprint_footer()\n\nprint(\"}\")", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def main(db_client: DBClient): sns.set_theme() peer_ids = db_client.get_dangling_peer_ids() arrivals = db_client.get_inter_arrival_time(peer_ids) results_df = pd.DataFrame(arrivals, columns=['id', 'peer_id', 'diff_in_s']) results_df = results_df.assign(diff_in_h=results_df.diff_in_s.apply(lambda x: x / 3600)) fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(15, 5), sharey=True) sns.ecdfplot(ax=ax1, x='diff_in_h', data=results_df) ax1.set_xlim(0, 48) ax1.set_xticks(np.arange(0, 50, step=4)) ax1.set_xlabel('Time in Hours') ax1.set_ylabel('Number of Peers in %') ax1.get_yaxis().set_major_formatter(ticker.FuncFormatter(lambda x, p: '%d' % int(x * 100))) ax1.legend(loc='lower right', labels=[ f'dangling ({fmt_thousands(len(results_df))})']) ax1.title.set_text(f'CDF of Inter Arrival Times of Dangling Peers') labels = [] for agent in known_agents: peer_ids = db_client.get_peer_ids_for_agent_versions([agent]) arrivals = db_client.get_inter_arrival_time(peer_ids) data = pd.DataFrame(arrivals, columns=['id', 'peer_id', 'diff_in_s']) data = data.assign(diff_in_h=data.diff_in_s.apply(lambda x: x / 3600)) labels += [f'{agent} ({fmt_thousands(len(data))})'] sns.ecdfplot(ax=ax2, x='diff_in_h', data=data) ax2.set_xlim(0, 48) ax2.set_xticks(np.arange(0, 50, step=4)) ax2.set_xlabel('Time in Hours') ax2.set_ylabel('Number of Peers in %') ax2.get_yaxis().set_major_formatter(ticker.FuncFormatter(lambda x, p: '%d' % int(x * 100))) ax2.title.set_text(f'CDF of Inter Arrival Times by Agent') ax2.legend(loc='lower right', labels=labels) plt.tight_layout() lib_plot.savefig('cdf-inter-arrival-dangling') plt.show() <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def main(db_client: DBClient): sns.set_theme() peer_ids = db_client.get_dangling_peer_ids() arrivals = db_client.get_inter_arrival_time(peer_ids) results_df = pd.DataFrame(arrivals, columns=['id', 'peer_id', 'diff_in_s']) results_df = results_df.assign(diff_in_h=results_df.diff_in_s.apply(lambda x: x / 3600)) fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(15, 5), sharey=True) sns.ecdfplot(ax=ax1, x='diff_in_h', data=results_df) ax1.set_xlim(0, 48) ax1.set_xticks(np.arange(0, 50, step=4)) ax1.set_xlabel('Time in Hours') ax1.set_ylabel('Number of Peers in %') ax1.get_yaxis().set_major_formatter(ticker.FuncFormatter(lambda x, p: '%d' % int(x * 100))) ax1.legend(loc='lower right', labels=[ f'dangling ({fmt_thousands(len(results_df))})']) ax1.title.set_text(f'CDF of Inter Arrival Times of Dangling Peers') labels = [] for agent in known_agents: peer_ids = db_client.get_peer_ids_for_agent_versions([agent]) arrivals = db_client.get_inter_arrival_time(peer_ids) data = pd.DataFrame(arrivals, columns=['id', 'peer_id', 'diff_in_s']) data = data.assign(diff_in_h=data.diff_in_s.apply(lambda x: x / 3600)) labels += [f'{agent} ({fmt_thousands(len(data))})'] sns.ecdfplot(ax=ax2, x='diff_in_h', data=data) ax2.set_xlim(0, 48) ax2.set_xticks(np.arange(0, 50, step=4)) ax2.set_xlabel('Time in Hours') ax2.set_ylabel('Number of Peers in %') ax2.get_yaxis().set_major_formatter(ticker.FuncFormatter(lambda x, p: '%d' % int(x * 100))) ax2.title.set_text(f'CDF of Inter Arrival Times by Agent') ax2.legend(loc='lower right', labels=labels) plt.tight_layout() lib_plot.savefig('cdf-inter-arrival-dangling') plt.show() if __name__ == '__main__': client = DBClient() main(client) <|reserved_special_token_1|> import pandas as pd import numpy as np import seaborn as sns from matplotlib import pyplot as plt, ticker from analysis.report import lib_plot from analysis.report.lib_agent import known_agents from analysis.report.lib_fmt import fmt_thousands from lib_db import DBClient def main(db_client: DBClient): sns.set_theme() peer_ids = db_client.get_dangling_peer_ids() arrivals = db_client.get_inter_arrival_time(peer_ids) results_df = pd.DataFrame(arrivals, columns=['id', 'peer_id', 'diff_in_s']) results_df = results_df.assign(diff_in_h=results_df.diff_in_s.apply(lambda x: x / 3600)) fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(15, 5), sharey=True) sns.ecdfplot(ax=ax1, x='diff_in_h', data=results_df) ax1.set_xlim(0, 48) ax1.set_xticks(np.arange(0, 50, step=4)) ax1.set_xlabel('Time in Hours') ax1.set_ylabel('Number of Peers in %') ax1.get_yaxis().set_major_formatter(ticker.FuncFormatter(lambda x, p: '%d' % int(x * 100))) ax1.legend(loc='lower right', labels=[ f'dangling ({fmt_thousands(len(results_df))})']) ax1.title.set_text(f'CDF of Inter Arrival Times of Dangling Peers') labels = [] for agent in known_agents: peer_ids = db_client.get_peer_ids_for_agent_versions([agent]) arrivals = db_client.get_inter_arrival_time(peer_ids) data = pd.DataFrame(arrivals, columns=['id', 'peer_id', 'diff_in_s']) data = data.assign(diff_in_h=data.diff_in_s.apply(lambda x: x / 3600)) labels += [f'{agent} ({fmt_thousands(len(data))})'] sns.ecdfplot(ax=ax2, x='diff_in_h', data=data) ax2.set_xlim(0, 48) ax2.set_xticks(np.arange(0, 50, step=4)) ax2.set_xlabel('Time in Hours') ax2.set_ylabel('Number of Peers in %') ax2.get_yaxis().set_major_formatter(ticker.FuncFormatter(lambda x, p: '%d' % int(x * 100))) ax2.title.set_text(f'CDF of Inter Arrival Times by Agent') ax2.legend(loc='lower right', labels=labels) plt.tight_layout() lib_plot.savefig('cdf-inter-arrival-dangling') plt.show() if __name__ == '__main__': client = DBClient() main(client) <|reserved_special_token_1|> import pandas as pd import numpy as np import seaborn as sns from matplotlib import pyplot as plt, ticker from analysis.report import lib_plot from analysis.report.lib_agent import known_agents from analysis.report.lib_fmt import fmt_thousands from lib_db import DBClient def main(db_client: DBClient): sns.set_theme() peer_ids = db_client.get_dangling_peer_ids() arrivals = db_client.get_inter_arrival_time(peer_ids) results_df = pd.DataFrame(arrivals, columns=['id', 'peer_id', 'diff_in_s']) results_df = results_df.assign( diff_in_h=results_df.diff_in_s.apply(lambda x: x / 3600), ) fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(15, 5), sharey=True) sns.ecdfplot(ax=ax1, x="diff_in_h", data=results_df) ax1.set_xlim(0, 48) ax1.set_xticks(np.arange(0, 50, step=4)) ax1.set_xlabel("Time in Hours") ax1.set_ylabel("Number of Peers in %") ax1.get_yaxis().set_major_formatter(ticker.FuncFormatter(lambda x, p: "%d" % int(x * 100))) ax1.legend(loc='lower right', labels=[f"dangling ({fmt_thousands(len(results_df))})"]) ax1.title.set_text(f"CDF of Inter Arrival Times of Dangling Peers") labels = [] for agent in known_agents: peer_ids = db_client.get_peer_ids_for_agent_versions([agent]) arrivals = db_client.get_inter_arrival_time(peer_ids) data = pd.DataFrame(arrivals, columns=['id', 'peer_id', 'diff_in_s']) data = data.assign( diff_in_h=data.diff_in_s.apply(lambda x: x / 3600), ) labels += [f"{agent} ({fmt_thousands(len(data))})"] sns.ecdfplot(ax=ax2, x="diff_in_h", data=data) ax2.set_xlim(0, 48) ax2.set_xticks(np.arange(0, 50, step=4)) ax2.set_xlabel("Time in Hours") ax2.set_ylabel("Number of Peers in %") ax2.get_yaxis().set_major_formatter(ticker.FuncFormatter(lambda x, p: "%d" % int(x * 100))) ax2.title.set_text(f"CDF of Inter Arrival Times by Agent") ax2.legend(loc='lower right', labels=labels) plt.tight_layout() lib_plot.savefig("cdf-inter-arrival-dangling") plt.show() if __name__ == '__main__': client = DBClient() main(client)
flexible
{ "blob_id": "51b28650f8ae6cbda3d81695acd27744e9bfebd1", "index": 2528, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef main(db_client: DBClient):\n sns.set_theme()\n peer_ids = db_client.get_dangling_peer_ids()\n arrivals = db_client.get_inter_arrival_time(peer_ids)\n results_df = pd.DataFrame(arrivals, columns=['id', 'peer_id', 'diff_in_s'])\n results_df = results_df.assign(diff_in_h=results_df.diff_in_s.apply(lambda\n x: x / 3600))\n fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(15, 5), sharey=True)\n sns.ecdfplot(ax=ax1, x='diff_in_h', data=results_df)\n ax1.set_xlim(0, 48)\n ax1.set_xticks(np.arange(0, 50, step=4))\n ax1.set_xlabel('Time in Hours')\n ax1.set_ylabel('Number of Peers in %')\n ax1.get_yaxis().set_major_formatter(ticker.FuncFormatter(lambda x, p: \n '%d' % int(x * 100)))\n ax1.legend(loc='lower right', labels=[\n f'dangling ({fmt_thousands(len(results_df))})'])\n ax1.title.set_text(f'CDF of Inter Arrival Times of Dangling Peers')\n labels = []\n for agent in known_agents:\n peer_ids = db_client.get_peer_ids_for_agent_versions([agent])\n arrivals = db_client.get_inter_arrival_time(peer_ids)\n data = pd.DataFrame(arrivals, columns=['id', 'peer_id', 'diff_in_s'])\n data = data.assign(diff_in_h=data.diff_in_s.apply(lambda x: x / 3600))\n labels += [f'{agent} ({fmt_thousands(len(data))})']\n sns.ecdfplot(ax=ax2, x='diff_in_h', data=data)\n ax2.set_xlim(0, 48)\n ax2.set_xticks(np.arange(0, 50, step=4))\n ax2.set_xlabel('Time in Hours')\n ax2.set_ylabel('Number of Peers in %')\n ax2.get_yaxis().set_major_formatter(ticker.FuncFormatter(lambda x,\n p: '%d' % int(x * 100)))\n ax2.title.set_text(f'CDF of Inter Arrival Times by Agent')\n ax2.legend(loc='lower right', labels=labels)\n plt.tight_layout()\n lib_plot.savefig('cdf-inter-arrival-dangling')\n plt.show()\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef main(db_client: DBClient):\n sns.set_theme()\n peer_ids = db_client.get_dangling_peer_ids()\n arrivals = db_client.get_inter_arrival_time(peer_ids)\n results_df = pd.DataFrame(arrivals, columns=['id', 'peer_id', 'diff_in_s'])\n results_df = results_df.assign(diff_in_h=results_df.diff_in_s.apply(lambda\n x: x / 3600))\n fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(15, 5), sharey=True)\n sns.ecdfplot(ax=ax1, x='diff_in_h', data=results_df)\n ax1.set_xlim(0, 48)\n ax1.set_xticks(np.arange(0, 50, step=4))\n ax1.set_xlabel('Time in Hours')\n ax1.set_ylabel('Number of Peers in %')\n ax1.get_yaxis().set_major_formatter(ticker.FuncFormatter(lambda x, p: \n '%d' % int(x * 100)))\n ax1.legend(loc='lower right', labels=[\n f'dangling ({fmt_thousands(len(results_df))})'])\n ax1.title.set_text(f'CDF of Inter Arrival Times of Dangling Peers')\n labels = []\n for agent in known_agents:\n peer_ids = db_client.get_peer_ids_for_agent_versions([agent])\n arrivals = db_client.get_inter_arrival_time(peer_ids)\n data = pd.DataFrame(arrivals, columns=['id', 'peer_id', 'diff_in_s'])\n data = data.assign(diff_in_h=data.diff_in_s.apply(lambda x: x / 3600))\n labels += [f'{agent} ({fmt_thousands(len(data))})']\n sns.ecdfplot(ax=ax2, x='diff_in_h', data=data)\n ax2.set_xlim(0, 48)\n ax2.set_xticks(np.arange(0, 50, step=4))\n ax2.set_xlabel('Time in Hours')\n ax2.set_ylabel('Number of Peers in %')\n ax2.get_yaxis().set_major_formatter(ticker.FuncFormatter(lambda x,\n p: '%d' % int(x * 100)))\n ax2.title.set_text(f'CDF of Inter Arrival Times by Agent')\n ax2.legend(loc='lower right', labels=labels)\n plt.tight_layout()\n lib_plot.savefig('cdf-inter-arrival-dangling')\n plt.show()\n\n\nif __name__ == '__main__':\n client = DBClient()\n main(client)\n", "step-4": "import pandas as pd\nimport numpy as np\nimport seaborn as sns\nfrom matplotlib import pyplot as plt, ticker\nfrom analysis.report import lib_plot\nfrom analysis.report.lib_agent import known_agents\nfrom analysis.report.lib_fmt import fmt_thousands\nfrom lib_db import DBClient\n\n\ndef main(db_client: DBClient):\n sns.set_theme()\n peer_ids = db_client.get_dangling_peer_ids()\n arrivals = db_client.get_inter_arrival_time(peer_ids)\n results_df = pd.DataFrame(arrivals, columns=['id', 'peer_id', 'diff_in_s'])\n results_df = results_df.assign(diff_in_h=results_df.diff_in_s.apply(lambda\n x: x / 3600))\n fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(15, 5), sharey=True)\n sns.ecdfplot(ax=ax1, x='diff_in_h', data=results_df)\n ax1.set_xlim(0, 48)\n ax1.set_xticks(np.arange(0, 50, step=4))\n ax1.set_xlabel('Time in Hours')\n ax1.set_ylabel('Number of Peers in %')\n ax1.get_yaxis().set_major_formatter(ticker.FuncFormatter(lambda x, p: \n '%d' % int(x * 100)))\n ax1.legend(loc='lower right', labels=[\n f'dangling ({fmt_thousands(len(results_df))})'])\n ax1.title.set_text(f'CDF of Inter Arrival Times of Dangling Peers')\n labels = []\n for agent in known_agents:\n peer_ids = db_client.get_peer_ids_for_agent_versions([agent])\n arrivals = db_client.get_inter_arrival_time(peer_ids)\n data = pd.DataFrame(arrivals, columns=['id', 'peer_id', 'diff_in_s'])\n data = data.assign(diff_in_h=data.diff_in_s.apply(lambda x: x / 3600))\n labels += [f'{agent} ({fmt_thousands(len(data))})']\n sns.ecdfplot(ax=ax2, x='diff_in_h', data=data)\n ax2.set_xlim(0, 48)\n ax2.set_xticks(np.arange(0, 50, step=4))\n ax2.set_xlabel('Time in Hours')\n ax2.set_ylabel('Number of Peers in %')\n ax2.get_yaxis().set_major_formatter(ticker.FuncFormatter(lambda x,\n p: '%d' % int(x * 100)))\n ax2.title.set_text(f'CDF of Inter Arrival Times by Agent')\n ax2.legend(loc='lower right', labels=labels)\n plt.tight_layout()\n lib_plot.savefig('cdf-inter-arrival-dangling')\n plt.show()\n\n\nif __name__ == '__main__':\n client = DBClient()\n main(client)\n", "step-5": "import pandas as pd\nimport numpy as np\nimport seaborn as sns\nfrom matplotlib import pyplot as plt, ticker\n\nfrom analysis.report import lib_plot\nfrom analysis.report.lib_agent import known_agents\nfrom analysis.report.lib_fmt import fmt_thousands\nfrom lib_db import DBClient\n\n\ndef main(db_client: DBClient):\n sns.set_theme()\n\n peer_ids = db_client.get_dangling_peer_ids()\n arrivals = db_client.get_inter_arrival_time(peer_ids)\n\n results_df = pd.DataFrame(arrivals, columns=['id', 'peer_id', 'diff_in_s'])\n results_df = results_df.assign(\n diff_in_h=results_df.diff_in_s.apply(lambda x: x / 3600),\n )\n\n fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(15, 5), sharey=True)\n\n sns.ecdfplot(ax=ax1, x=\"diff_in_h\", data=results_df)\n\n ax1.set_xlim(0, 48)\n ax1.set_xticks(np.arange(0, 50, step=4))\n ax1.set_xlabel(\"Time in Hours\")\n ax1.set_ylabel(\"Number of Peers in %\")\n ax1.get_yaxis().set_major_formatter(ticker.FuncFormatter(lambda x, p: \"%d\" % int(x * 100)))\n ax1.legend(loc='lower right', labels=[f\"dangling ({fmt_thousands(len(results_df))})\"])\n\n ax1.title.set_text(f\"CDF of Inter Arrival Times of Dangling Peers\")\n\n labels = []\n for agent in known_agents:\n peer_ids = db_client.get_peer_ids_for_agent_versions([agent])\n arrivals = db_client.get_inter_arrival_time(peer_ids)\n data = pd.DataFrame(arrivals, columns=['id', 'peer_id', 'diff_in_s'])\n data = data.assign(\n diff_in_h=data.diff_in_s.apply(lambda x: x / 3600),\n )\n labels += [f\"{agent} ({fmt_thousands(len(data))})\"]\n sns.ecdfplot(ax=ax2, x=\"diff_in_h\", data=data)\n ax2.set_xlim(0, 48)\n ax2.set_xticks(np.arange(0, 50, step=4))\n ax2.set_xlabel(\"Time in Hours\")\n ax2.set_ylabel(\"Number of Peers in %\")\n ax2.get_yaxis().set_major_formatter(ticker.FuncFormatter(lambda x, p: \"%d\" % int(x * 100)))\n\n ax2.title.set_text(f\"CDF of Inter Arrival Times by Agent\")\n ax2.legend(loc='lower right', labels=labels)\n\n plt.tight_layout()\n lib_plot.savefig(\"cdf-inter-arrival-dangling\")\n plt.show()\n\n\nif __name__ == '__main__':\n client = DBClient()\n main(client)\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> for i in range(n): l = list(map(lambda x: x * x, map(int, input().split()))) l.sort() if l[0] + l[1] == l[2]: s += 'YES\n' else: s += 'NO\n' print(s, end='') <|reserved_special_token_1|> n = int(input()) s = '' for i in range(n): l = list(map(lambda x: x * x, map(int, input().split()))) l.sort() if l[0] + l[1] == l[2]: s += 'YES\n' else: s += 'NO\n' print(s, end='') <|reserved_special_token_1|> n = int(input()) s = "" for i in range(n): l = list(map(lambda x:x*x,map(int, input().split()))) l.sort() if l[0] + l[1] == l[2]: s += "YES\n" else: s += "NO\n" print(s,end="")
flexible
{ "blob_id": "f8b473451a15e42319b60f44a527d715c0032614", "index": 3411, "step-1": "<mask token>\n", "step-2": "<mask token>\nfor i in range(n):\n l = list(map(lambda x: x * x, map(int, input().split())))\n l.sort()\n if l[0] + l[1] == l[2]:\n s += 'YES\\n'\n else:\n s += 'NO\\n'\nprint(s, end='')\n", "step-3": "n = int(input())\ns = ''\nfor i in range(n):\n l = list(map(lambda x: x * x, map(int, input().split())))\n l.sort()\n if l[0] + l[1] == l[2]:\n s += 'YES\\n'\n else:\n s += 'NO\\n'\nprint(s, end='')\n", "step-4": "n = int(input())\ns = \"\"\nfor i in range(n):\n l = list(map(lambda x:x*x,map(int, input().split())))\n l.sort()\n if l[0] + l[1] == l[2]:\n s += \"YES\\n\"\n else:\n s += \"NO\\n\"\n\nprint(s,end=\"\")", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> for i in tags: casa = i.find('td', {'class': re.compile('team-home')}).find('a') visitante = i.find('td', {'class': re.compile('team-away')}).find('a') print('Partido-> ' + casa.get_text() + ' vs ' + visitante.get_text()) <|reserved_special_token_1|> <|reserved_special_token_0|> url = input('Enter - ') html = urlopen(url).read() soup = BeautifulSoup(html, 'html.parser') tags = soup.find_all('tr', {'id': re.compile('nonplayingnow.*')}) for i in tags: casa = i.find('td', {'class': re.compile('team-home')}).find('a') visitante = i.find('td', {'class': re.compile('team-away')}).find('a') print('Partido-> ' + casa.get_text() + ' vs ' + visitante.get_text()) <|reserved_special_token_1|> from urllib.request import urlopen from bs4 import BeautifulSoup import re url = input('Enter - ') html = urlopen(url).read() soup = BeautifulSoup(html, 'html.parser') tags = soup.find_all('tr', {'id': re.compile('nonplayingnow.*')}) for i in tags: casa = i.find('td', {'class': re.compile('team-home')}).find('a') visitante = i.find('td', {'class': re.compile('team-away')}).find('a') print('Partido-> ' + casa.get_text() + ' vs ' + visitante.get_text()) <|reserved_special_token_1|> from urllib.request import urlopen from bs4 import BeautifulSoup import re url = input('Enter - ') html = urlopen(url).read() soup = BeautifulSoup(html, "html.parser") tags = soup.find_all('tr', {'id': re.compile(r'nonplayingnow.*')}) for i in tags: casa = i.find("td", {'class': re.compile(r'team-home')}).find("a") visitante = i.find("td", {'class': re.compile(r'team-away')}).find("a") print ("Partido-> "+casa.get_text()+" vs "+visitante.get_text())
flexible
{ "blob_id": "d07a26a69ccbbccf61402632dd6011315e0d61ed", "index": 2710, "step-1": "<mask token>\n", "step-2": "<mask token>\nfor i in tags:\n casa = i.find('td', {'class': re.compile('team-home')}).find('a')\n visitante = i.find('td', {'class': re.compile('team-away')}).find('a')\n print('Partido-> ' + casa.get_text() + ' vs ' + visitante.get_text())\n", "step-3": "<mask token>\nurl = input('Enter - ')\nhtml = urlopen(url).read()\nsoup = BeautifulSoup(html, 'html.parser')\ntags = soup.find_all('tr', {'id': re.compile('nonplayingnow.*')})\nfor i in tags:\n casa = i.find('td', {'class': re.compile('team-home')}).find('a')\n visitante = i.find('td', {'class': re.compile('team-away')}).find('a')\n print('Partido-> ' + casa.get_text() + ' vs ' + visitante.get_text())\n", "step-4": "from urllib.request import urlopen\nfrom bs4 import BeautifulSoup\nimport re\nurl = input('Enter - ')\nhtml = urlopen(url).read()\nsoup = BeautifulSoup(html, 'html.parser')\ntags = soup.find_all('tr', {'id': re.compile('nonplayingnow.*')})\nfor i in tags:\n casa = i.find('td', {'class': re.compile('team-home')}).find('a')\n visitante = i.find('td', {'class': re.compile('team-away')}).find('a')\n print('Partido-> ' + casa.get_text() + ' vs ' + visitante.get_text())\n", "step-5": "from urllib.request import urlopen\nfrom bs4 import BeautifulSoup\nimport re\n\nurl = input('Enter - ')\n\nhtml = urlopen(url).read()\nsoup = BeautifulSoup(html, \"html.parser\")\n\ntags = soup.find_all('tr', {'id': re.compile(r'nonplayingnow.*')})\n\nfor i in tags:\n casa = i.find(\"td\", {'class': re.compile(r'team-home')}).find(\"a\")\n visitante = i.find(\"td\", {'class': re.compile(r'team-away')}).find(\"a\")\n print (\"Partido-> \"+casa.get_text()+\" vs \"+visitante.get_text())\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
# -*- coding:utf-8 -*- from __future__ import unicode_literals from django.db import models SERVICE_RANGE_CHOISE = {(1, '1年'), (2, '2年'), (3, '3年'), (4, '4年'), (5, '5年'), (6, '6年'), (7, '7年'), (8, '8年'), (0, '长期')} USER_STATUS_CHOISE = {(1, '停用'), (2, '正常'), (3, '锁定')} DBSERVER_POS_CHOISE = {(1, '8层机房'), (2, '11层机房')} FIRM_CHOISE = {(1, 'DELL'), (2, 'IBM'), (3, 'EMC')} class Odbserver(models.Model): name = models.CharField(max_length=30, verbose_name='名称') ip = models.GenericIPAddressField(verbose_name='IP') pos = models.IntegerField(default=1, choices=DBSERVER_POS_CHOISE, verbose_name='位置') sn = models.CharField(null=True, blank=True, max_length=50, verbose_name='序列号') sid = models.CharField(null=True, blank=True, max_length=50, verbose_name='快速服务代码') firm = models.IntegerField(default=1, choices=FIRM_CHOISE, verbose_name='厂商') model = models.CharField(null=True, blank=True, max_length=30, verbose_name='型号') feature = models.TextField(null=True, blank=True, verbose_name='配置') buy_time = models.DateField(null=True, blank=True, verbose_name='购买时间') service_range = models.IntegerField(default=1, choices=SERVICE_RANGE_CHOISE, verbose_name='服务年限') comment = models.TextField(null=True, blank=True, verbose_name='备注') class Meta: ordering = ["name"] verbose_name = '服务器信息' verbose_name_plural = verbose_name def __unicode__(self): return u'%s' % self.name def __str__(self): return u'%s' % self.name class Ousers(models.Model): dbserver = models.ForeignKey(Odbserver, null=True, blank=True, verbose_name='服务器') user = models.CharField(max_length=20, verbose_name='用户名') passwd = models.CharField(max_length=20, verbose_name='密码') tablespace = models.CharField(max_length=20, null=True, blank=True, verbose_name='表空间') status = models.IntegerField(choices=USER_STATUS_CHOISE, verbose_name='状态') business = models.CharField(null=True, blank=True, max_length=100, verbose_name='业务') created = models.DateField(null=True, blank=True, verbose_name='创建时间') comment = models.TextField(null=True, blank=True, verbose_name='备注') class Meta: ordering = ["user"] verbose_name = '数据库用户信息' verbose_name_plural = verbose_name def __unicode__(self): return u'%s' % self.business def __str__(self): return u'%s' % self.business class Osysusers(models.Model): dbserver = models.ForeignKey(Odbserver, null=True, blank=True, verbose_name='服务器') name = models.CharField(max_length=20, verbose_name='名称') user = models.CharField(max_length=20, verbose_name='用户') passwd = models.CharField(max_length=20, verbose_name='密码') class Meta: ordering = ["dbserver"] verbose_name = '系统用户信息' verbose_name_plural = verbose_name def __unicode__(self): return u'%s' % self.name def __str__(self): return u'%s' % self.name class Omysqluser(models.Model): dbserver = models.ForeignKey(Odbserver, verbose_name='服务器') name = models.CharField(max_length=20, verbose_name='用户名') passwd = models.CharField(max_length=20, verbose_name='密码') dbname = models.CharField(max_length=20, verbose_name='数据库名') business = models.CharField(null=True, blank=True, max_length=100, verbose_name='业务') comment = models.TextField(null=True, blank=True, verbose_name='备注') class Meta: ordering = ["dbserver"] verbose_name = 'MYSQL用户信息' verbose_name_plural = verbose_name def __unicode__(self): return u'%s' % self.business def __str__(self): return u'%s' % self.business
normal
{ "blob_id": "c2490c3aacfa3ce22c3f47a69dbc44b695c2a2e5", "index": 9509, "step-1": "<mask token>\n\n\nclass Odbserver(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\n class Meta:\n ordering = ['name']\n verbose_name = '服务器信息'\n verbose_name_plural = verbose_name\n <mask token>\n <mask token>\n\n\nclass Ousers(models.Model):\n dbserver = models.ForeignKey(Odbserver, null=True, blank=True,\n verbose_name='服务器')\n user = models.CharField(max_length=20, verbose_name='用户名')\n passwd = models.CharField(max_length=20, verbose_name='密码')\n tablespace = models.CharField(max_length=20, null=True, blank=True,\n verbose_name='表空间')\n status = models.IntegerField(choices=USER_STATUS_CHOISE, verbose_name='状态')\n business = models.CharField(null=True, blank=True, max_length=100,\n verbose_name='业务')\n created = models.DateField(null=True, blank=True, verbose_name='创建时间')\n comment = models.TextField(null=True, blank=True, verbose_name='备注')\n\n\n class Meta:\n ordering = ['user']\n verbose_name = '数据库用户信息'\n verbose_name_plural = verbose_name\n\n def __unicode__(self):\n return u'%s' % self.business\n\n def __str__(self):\n return u'%s' % self.business\n\n\nclass Osysusers(models.Model):\n dbserver = models.ForeignKey(Odbserver, null=True, blank=True,\n verbose_name='服务器')\n name = models.CharField(max_length=20, verbose_name='名称')\n user = models.CharField(max_length=20, verbose_name='用户')\n passwd = models.CharField(max_length=20, verbose_name='密码')\n\n\n class Meta:\n ordering = ['dbserver']\n verbose_name = '系统用户信息'\n verbose_name_plural = verbose_name\n\n def __unicode__(self):\n return u'%s' % self.name\n\n def __str__(self):\n return u'%s' % self.name\n\n\nclass Omysqluser(models.Model):\n dbserver = models.ForeignKey(Odbserver, verbose_name='服务器')\n name = models.CharField(max_length=20, verbose_name='用户名')\n passwd = models.CharField(max_length=20, verbose_name='密码')\n dbname = models.CharField(max_length=20, verbose_name='数据库名')\n business = models.CharField(null=True, blank=True, max_length=100,\n verbose_name='业务')\n comment = models.TextField(null=True, blank=True, verbose_name='备注')\n\n\n class Meta:\n ordering = ['dbserver']\n verbose_name = 'MYSQL用户信息'\n verbose_name_plural = verbose_name\n\n def __unicode__(self):\n return u'%s' % self.business\n\n def __str__(self):\n return u'%s' % self.business\n", "step-2": "<mask token>\n\n\nclass Odbserver(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\n class Meta:\n ordering = ['name']\n verbose_name = '服务器信息'\n verbose_name_plural = verbose_name\n <mask token>\n\n def __str__(self):\n return u'%s' % self.name\n\n\nclass Ousers(models.Model):\n dbserver = models.ForeignKey(Odbserver, null=True, blank=True,\n verbose_name='服务器')\n user = models.CharField(max_length=20, verbose_name='用户名')\n passwd = models.CharField(max_length=20, verbose_name='密码')\n tablespace = models.CharField(max_length=20, null=True, blank=True,\n verbose_name='表空间')\n status = models.IntegerField(choices=USER_STATUS_CHOISE, verbose_name='状态')\n business = models.CharField(null=True, blank=True, max_length=100,\n verbose_name='业务')\n created = models.DateField(null=True, blank=True, verbose_name='创建时间')\n comment = models.TextField(null=True, blank=True, verbose_name='备注')\n\n\n class Meta:\n ordering = ['user']\n verbose_name = '数据库用户信息'\n verbose_name_plural = verbose_name\n\n def __unicode__(self):\n return u'%s' % self.business\n\n def __str__(self):\n return u'%s' % self.business\n\n\nclass Osysusers(models.Model):\n dbserver = models.ForeignKey(Odbserver, null=True, blank=True,\n verbose_name='服务器')\n name = models.CharField(max_length=20, verbose_name='名称')\n user = models.CharField(max_length=20, verbose_name='用户')\n passwd = models.CharField(max_length=20, verbose_name='密码')\n\n\n class Meta:\n ordering = ['dbserver']\n verbose_name = '系统用户信息'\n verbose_name_plural = verbose_name\n\n def __unicode__(self):\n return u'%s' % self.name\n\n def __str__(self):\n return u'%s' % self.name\n\n\nclass Omysqluser(models.Model):\n dbserver = models.ForeignKey(Odbserver, verbose_name='服务器')\n name = models.CharField(max_length=20, verbose_name='用户名')\n passwd = models.CharField(max_length=20, verbose_name='密码')\n dbname = models.CharField(max_length=20, verbose_name='数据库名')\n business = models.CharField(null=True, blank=True, max_length=100,\n verbose_name='业务')\n comment = models.TextField(null=True, blank=True, verbose_name='备注')\n\n\n class Meta:\n ordering = ['dbserver']\n verbose_name = 'MYSQL用户信息'\n verbose_name_plural = verbose_name\n\n def __unicode__(self):\n return u'%s' % self.business\n\n def __str__(self):\n return u'%s' % self.business\n", "step-3": "<mask token>\n\n\nclass Odbserver(models.Model):\n name = models.CharField(max_length=30, verbose_name='名称')\n ip = models.GenericIPAddressField(verbose_name='IP')\n pos = models.IntegerField(default=1, choices=DBSERVER_POS_CHOISE,\n verbose_name='位置')\n sn = models.CharField(null=True, blank=True, max_length=50,\n verbose_name='序列号')\n sid = models.CharField(null=True, blank=True, max_length=50,\n verbose_name='快速服务代码')\n firm = models.IntegerField(default=1, choices=FIRM_CHOISE, verbose_name\n ='厂商')\n model = models.CharField(null=True, blank=True, max_length=30,\n verbose_name='型号')\n feature = models.TextField(null=True, blank=True, verbose_name='配置')\n buy_time = models.DateField(null=True, blank=True, verbose_name='购买时间')\n service_range = models.IntegerField(default=1, choices=\n SERVICE_RANGE_CHOISE, verbose_name='服务年限')\n comment = models.TextField(null=True, blank=True, verbose_name='备注')\n\n\n class Meta:\n ordering = ['name']\n verbose_name = '服务器信息'\n verbose_name_plural = verbose_name\n\n def __unicode__(self):\n return u'%s' % self.name\n\n def __str__(self):\n return u'%s' % self.name\n\n\nclass Ousers(models.Model):\n dbserver = models.ForeignKey(Odbserver, null=True, blank=True,\n verbose_name='服务器')\n user = models.CharField(max_length=20, verbose_name='用户名')\n passwd = models.CharField(max_length=20, verbose_name='密码')\n tablespace = models.CharField(max_length=20, null=True, blank=True,\n verbose_name='表空间')\n status = models.IntegerField(choices=USER_STATUS_CHOISE, verbose_name='状态')\n business = models.CharField(null=True, blank=True, max_length=100,\n verbose_name='业务')\n created = models.DateField(null=True, blank=True, verbose_name='创建时间')\n comment = models.TextField(null=True, blank=True, verbose_name='备注')\n\n\n class Meta:\n ordering = ['user']\n verbose_name = '数据库用户信息'\n verbose_name_plural = verbose_name\n\n def __unicode__(self):\n return u'%s' % self.business\n\n def __str__(self):\n return u'%s' % self.business\n\n\nclass Osysusers(models.Model):\n dbserver = models.ForeignKey(Odbserver, null=True, blank=True,\n verbose_name='服务器')\n name = models.CharField(max_length=20, verbose_name='名称')\n user = models.CharField(max_length=20, verbose_name='用户')\n passwd = models.CharField(max_length=20, verbose_name='密码')\n\n\n class Meta:\n ordering = ['dbserver']\n verbose_name = '系统用户信息'\n verbose_name_plural = verbose_name\n\n def __unicode__(self):\n return u'%s' % self.name\n\n def __str__(self):\n return u'%s' % self.name\n\n\nclass Omysqluser(models.Model):\n dbserver = models.ForeignKey(Odbserver, verbose_name='服务器')\n name = models.CharField(max_length=20, verbose_name='用户名')\n passwd = models.CharField(max_length=20, verbose_name='密码')\n dbname = models.CharField(max_length=20, verbose_name='数据库名')\n business = models.CharField(null=True, blank=True, max_length=100,\n verbose_name='业务')\n comment = models.TextField(null=True, blank=True, verbose_name='备注')\n\n\n class Meta:\n ordering = ['dbserver']\n verbose_name = 'MYSQL用户信息'\n verbose_name_plural = verbose_name\n\n def __unicode__(self):\n return u'%s' % self.business\n\n def __str__(self):\n return u'%s' % self.business\n", "step-4": "<mask token>\nSERVICE_RANGE_CHOISE = {(1, '1年'), (2, '2年'), (3, '3年'), (4, '4年'), (5,\n '5年'), (6, '6年'), (7, '7年'), (8, '8年'), (0, '长期')}\nUSER_STATUS_CHOISE = {(1, '停用'), (2, '正常'), (3, '锁定')}\nDBSERVER_POS_CHOISE = {(1, '8层机房'), (2, '11层机房')}\nFIRM_CHOISE = {(1, 'DELL'), (2, 'IBM'), (3, 'EMC')}\n\n\nclass Odbserver(models.Model):\n name = models.CharField(max_length=30, verbose_name='名称')\n ip = models.GenericIPAddressField(verbose_name='IP')\n pos = models.IntegerField(default=1, choices=DBSERVER_POS_CHOISE,\n verbose_name='位置')\n sn = models.CharField(null=True, blank=True, max_length=50,\n verbose_name='序列号')\n sid = models.CharField(null=True, blank=True, max_length=50,\n verbose_name='快速服务代码')\n firm = models.IntegerField(default=1, choices=FIRM_CHOISE, verbose_name\n ='厂商')\n model = models.CharField(null=True, blank=True, max_length=30,\n verbose_name='型号')\n feature = models.TextField(null=True, blank=True, verbose_name='配置')\n buy_time = models.DateField(null=True, blank=True, verbose_name='购买时间')\n service_range = models.IntegerField(default=1, choices=\n SERVICE_RANGE_CHOISE, verbose_name='服务年限')\n comment = models.TextField(null=True, blank=True, verbose_name='备注')\n\n\n class Meta:\n ordering = ['name']\n verbose_name = '服务器信息'\n verbose_name_plural = verbose_name\n\n def __unicode__(self):\n return u'%s' % self.name\n\n def __str__(self):\n return u'%s' % self.name\n\n\nclass Ousers(models.Model):\n dbserver = models.ForeignKey(Odbserver, null=True, blank=True,\n verbose_name='服务器')\n user = models.CharField(max_length=20, verbose_name='用户名')\n passwd = models.CharField(max_length=20, verbose_name='密码')\n tablespace = models.CharField(max_length=20, null=True, blank=True,\n verbose_name='表空间')\n status = models.IntegerField(choices=USER_STATUS_CHOISE, verbose_name='状态')\n business = models.CharField(null=True, blank=True, max_length=100,\n verbose_name='业务')\n created = models.DateField(null=True, blank=True, verbose_name='创建时间')\n comment = models.TextField(null=True, blank=True, verbose_name='备注')\n\n\n class Meta:\n ordering = ['user']\n verbose_name = '数据库用户信息'\n verbose_name_plural = verbose_name\n\n def __unicode__(self):\n return u'%s' % self.business\n\n def __str__(self):\n return u'%s' % self.business\n\n\nclass Osysusers(models.Model):\n dbserver = models.ForeignKey(Odbserver, null=True, blank=True,\n verbose_name='服务器')\n name = models.CharField(max_length=20, verbose_name='名称')\n user = models.CharField(max_length=20, verbose_name='用户')\n passwd = models.CharField(max_length=20, verbose_name='密码')\n\n\n class Meta:\n ordering = ['dbserver']\n verbose_name = '系统用户信息'\n verbose_name_plural = verbose_name\n\n def __unicode__(self):\n return u'%s' % self.name\n\n def __str__(self):\n return u'%s' % self.name\n\n\nclass Omysqluser(models.Model):\n dbserver = models.ForeignKey(Odbserver, verbose_name='服务器')\n name = models.CharField(max_length=20, verbose_name='用户名')\n passwd = models.CharField(max_length=20, verbose_name='密码')\n dbname = models.CharField(max_length=20, verbose_name='数据库名')\n business = models.CharField(null=True, blank=True, max_length=100,\n verbose_name='业务')\n comment = models.TextField(null=True, blank=True, verbose_name='备注')\n\n\n class Meta:\n ordering = ['dbserver']\n verbose_name = 'MYSQL用户信息'\n verbose_name_plural = verbose_name\n\n def __unicode__(self):\n return u'%s' % self.business\n\n def __str__(self):\n return u'%s' % self.business\n", "step-5": "# -*- coding:utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import models\n\nSERVICE_RANGE_CHOISE = {(1, '1年'), (2, '2年'), (3, '3年'), (4, '4年'), (5, '5年'), (6, '6年'), (7, '7年'), (8, '8年'), (0, '长期')}\nUSER_STATUS_CHOISE = {(1, '停用'), (2, '正常'), (3, '锁定')}\nDBSERVER_POS_CHOISE = {(1, '8层机房'), (2, '11层机房')}\nFIRM_CHOISE = {(1, 'DELL'), (2, 'IBM'), (3, 'EMC')}\n\n\nclass Odbserver(models.Model):\n name = models.CharField(max_length=30, verbose_name='名称')\n ip = models.GenericIPAddressField(verbose_name='IP')\n pos = models.IntegerField(default=1, choices=DBSERVER_POS_CHOISE, verbose_name='位置')\n sn = models.CharField(null=True, blank=True, max_length=50, verbose_name='序列号')\n sid = models.CharField(null=True, blank=True, max_length=50, verbose_name='快速服务代码')\n firm = models.IntegerField(default=1, choices=FIRM_CHOISE, verbose_name='厂商')\n model = models.CharField(null=True, blank=True, max_length=30, verbose_name='型号')\n feature = models.TextField(null=True, blank=True, verbose_name='配置')\n buy_time = models.DateField(null=True, blank=True, verbose_name='购买时间')\n service_range = models.IntegerField(default=1, choices=SERVICE_RANGE_CHOISE, verbose_name='服务年限')\n comment = models.TextField(null=True, blank=True, verbose_name='备注')\n\n class Meta:\n ordering = [\"name\"]\n verbose_name = '服务器信息'\n verbose_name_plural = verbose_name\n\n def __unicode__(self):\n return u'%s' % self.name\n\n def __str__(self):\n return u'%s' % self.name\n\n\nclass Ousers(models.Model):\n dbserver = models.ForeignKey(Odbserver, null=True, blank=True, verbose_name='服务器')\n user = models.CharField(max_length=20, verbose_name='用户名')\n passwd = models.CharField(max_length=20, verbose_name='密码')\n tablespace = models.CharField(max_length=20, null=True, blank=True, verbose_name='表空间')\n status = models.IntegerField(choices=USER_STATUS_CHOISE, verbose_name='状态')\n business = models.CharField(null=True, blank=True, max_length=100, verbose_name='业务')\n created = models.DateField(null=True, blank=True, verbose_name='创建时间')\n comment = models.TextField(null=True, blank=True, verbose_name='备注')\n\n class Meta:\n ordering = [\"user\"]\n verbose_name = '数据库用户信息'\n verbose_name_plural = verbose_name\n\n def __unicode__(self):\n return u'%s' % self.business\n\n def __str__(self):\n return u'%s' % self.business\n\n\nclass Osysusers(models.Model):\n dbserver = models.ForeignKey(Odbserver, null=True, blank=True, verbose_name='服务器')\n name = models.CharField(max_length=20, verbose_name='名称')\n user = models.CharField(max_length=20, verbose_name='用户')\n passwd = models.CharField(max_length=20, verbose_name='密码')\n\n class Meta:\n ordering = [\"dbserver\"]\n verbose_name = '系统用户信息'\n verbose_name_plural = verbose_name\n\n def __unicode__(self):\n return u'%s' % self.name\n\n def __str__(self):\n return u'%s' % self.name\n\n\nclass Omysqluser(models.Model):\n dbserver = models.ForeignKey(Odbserver, verbose_name='服务器')\n name = models.CharField(max_length=20, verbose_name='用户名')\n passwd = models.CharField(max_length=20, verbose_name='密码')\n dbname = models.CharField(max_length=20, verbose_name='数据库名')\n business = models.CharField(null=True, blank=True, max_length=100, verbose_name='业务')\n comment = models.TextField(null=True, blank=True, verbose_name='备注')\n\n class Meta:\n ordering = [\"dbserver\"]\n verbose_name = 'MYSQL用户信息'\n verbose_name_plural = verbose_name\n\n def __unicode__(self):\n return u'%s' % self.business\n\n def __str__(self):\n return u'%s' % self.business\n", "step-ids": [ 13, 14, 16, 17, 19 ] }
[ 13, 14, 16, 17, 19 ]
"""A tiny example binary for the native Python rules of Bazel.""" import unittest from bazel_tutorial.examples.py.lib import GetNumber from bazel_tutorial.examples.py.fibonacci.fib import Fib class TestGetNumber(unittest.TestCase): def test_ok(self): self.assertEqual(GetNumber(), 42) def test_fib(self): self.assertEqual(Fib(5), 8) if __name__ == '__main__': unittest.main()
normal
{ "blob_id": "d126efa91b964a3a374d546bb860b39ae26dfa22", "index": 256, "step-1": "<mask token>\n\n\nclass TestGetNumber(unittest.TestCase):\n <mask token>\n\n def test_fib(self):\n self.assertEqual(Fib(5), 8)\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass TestGetNumber(unittest.TestCase):\n\n def test_ok(self):\n self.assertEqual(GetNumber(), 42)\n\n def test_fib(self):\n self.assertEqual(Fib(5), 8)\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\nclass TestGetNumber(unittest.TestCase):\n\n def test_ok(self):\n self.assertEqual(GetNumber(), 42)\n\n def test_fib(self):\n self.assertEqual(Fib(5), 8)\n\n\nif __name__ == '__main__':\n unittest.main()\n", "step-4": "<mask token>\nimport unittest\nfrom bazel_tutorial.examples.py.lib import GetNumber\nfrom bazel_tutorial.examples.py.fibonacci.fib import Fib\n\n\nclass TestGetNumber(unittest.TestCase):\n\n def test_ok(self):\n self.assertEqual(GetNumber(), 42)\n\n def test_fib(self):\n self.assertEqual(Fib(5), 8)\n\n\nif __name__ == '__main__':\n unittest.main()\n", "step-5": "\"\"\"A tiny example binary for the native Python rules of Bazel.\"\"\"\n\nimport unittest\nfrom bazel_tutorial.examples.py.lib import GetNumber\nfrom bazel_tutorial.examples.py.fibonacci.fib import Fib\n\n\nclass TestGetNumber(unittest.TestCase):\n\n def test_ok(self):\n self.assertEqual(GetNumber(), 42)\n\n def test_fib(self):\n self.assertEqual(Fib(5), 8)\n\nif __name__ == '__main__':\n unittest.main()\n", "step-ids": [ 2, 3, 4, 5, 6 ] }
[ 2, 3, 4, 5, 6 ]
<|reserved_special_token_0|> class Question(models.Model): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def __str__(self): return self.text class AnswerChoice(models.Model): """Represantation of question's answer's choice. """ question = models.ForeignKey(Question, on_delete=models.CASCADE, related_name='choices') text = models.TextField() def __str__(self): return self.text class CompletedSurvey(models.Model): """Representation of survey, completed by the user. """ user_id = models.IntegerField(null=True, blank=True) survey = models.ForeignKey(Survey, on_delete=models.SET_NULL, null=True, related_name='completed_surveys') def __str__(self): return f'{self.user_id} - {self.survey.name}' class Answer(models.Model): """Representations of question's answer. """ completed_survey = models.ForeignKey(CompletedSurvey, on_delete=models. CASCADE, related_name='answers') question = models.ForeignKey(Question, on_delete=models.CASCADE, related_name='answers') text_answer = models.TextField(blank=True) answer_choices = models.ManyToManyField(AnswerChoice, blank=True) def __str__(self): return ( f"Answer for survey '{str(self.completed_survey)}' made by user {self.completed_survey.user_id}" ) <|reserved_special_token_1|> <|reserved_special_token_0|> class Survey(models.Model): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> class Question(models.Model): """Survey's question respresentation. """ QUESTION_TYPE_CHOICES = (1, 'Text answer'), (2, 'One choice answer'), ( 3, 'Multiple choices answer') survey = models.ForeignKey(Survey, on_delete=models.CASCADE, related_name='questions') text = models.TextField() question_type = models.IntegerField(choices=QUESTION_TYPE_CHOICES) def __str__(self): return self.text class AnswerChoice(models.Model): """Represantation of question's answer's choice. """ question = models.ForeignKey(Question, on_delete=models.CASCADE, related_name='choices') text = models.TextField() def __str__(self): return self.text class CompletedSurvey(models.Model): """Representation of survey, completed by the user. """ user_id = models.IntegerField(null=True, blank=True) survey = models.ForeignKey(Survey, on_delete=models.SET_NULL, null=True, related_name='completed_surveys') def __str__(self): return f'{self.user_id} - {self.survey.name}' class Answer(models.Model): """Representations of question's answer. """ completed_survey = models.ForeignKey(CompletedSurvey, on_delete=models. CASCADE, related_name='answers') question = models.ForeignKey(Question, on_delete=models.CASCADE, related_name='answers') text_answer = models.TextField(blank=True) answer_choices = models.ManyToManyField(AnswerChoice, blank=True) def __str__(self): return ( f"Answer for survey '{str(self.completed_survey)}' made by user {self.completed_survey.user_id}" ) <|reserved_special_token_1|> <|reserved_special_token_0|> class Survey(models.Model): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def __str__(self): return self.name class Question(models.Model): """Survey's question respresentation. """ QUESTION_TYPE_CHOICES = (1, 'Text answer'), (2, 'One choice answer'), ( 3, 'Multiple choices answer') survey = models.ForeignKey(Survey, on_delete=models.CASCADE, related_name='questions') text = models.TextField() question_type = models.IntegerField(choices=QUESTION_TYPE_CHOICES) def __str__(self): return self.text class AnswerChoice(models.Model): """Represantation of question's answer's choice. """ question = models.ForeignKey(Question, on_delete=models.CASCADE, related_name='choices') text = models.TextField() def __str__(self): return self.text class CompletedSurvey(models.Model): """Representation of survey, completed by the user. """ user_id = models.IntegerField(null=True, blank=True) survey = models.ForeignKey(Survey, on_delete=models.SET_NULL, null=True, related_name='completed_surveys') def __str__(self): return f'{self.user_id} - {self.survey.name}' class Answer(models.Model): """Representations of question's answer. """ completed_survey = models.ForeignKey(CompletedSurvey, on_delete=models. CASCADE, related_name='answers') question = models.ForeignKey(Question, on_delete=models.CASCADE, related_name='answers') text_answer = models.TextField(blank=True) answer_choices = models.ManyToManyField(AnswerChoice, blank=True) def __str__(self): return ( f"Answer for survey '{str(self.completed_survey)}' made by user {self.completed_survey.user_id}" ) <|reserved_special_token_1|> <|reserved_special_token_0|> class Survey(models.Model): """Survey representation. """ name = models.CharField(max_length=255) description = models.TextField() start_date = models.DateTimeField() end_date = models.DateTimeField() def __str__(self): return self.name class Question(models.Model): """Survey's question respresentation. """ QUESTION_TYPE_CHOICES = (1, 'Text answer'), (2, 'One choice answer'), ( 3, 'Multiple choices answer') survey = models.ForeignKey(Survey, on_delete=models.CASCADE, related_name='questions') text = models.TextField() question_type = models.IntegerField(choices=QUESTION_TYPE_CHOICES) def __str__(self): return self.text class AnswerChoice(models.Model): """Represantation of question's answer's choice. """ question = models.ForeignKey(Question, on_delete=models.CASCADE, related_name='choices') text = models.TextField() def __str__(self): return self.text class CompletedSurvey(models.Model): """Representation of survey, completed by the user. """ user_id = models.IntegerField(null=True, blank=True) survey = models.ForeignKey(Survey, on_delete=models.SET_NULL, null=True, related_name='completed_surveys') def __str__(self): return f'{self.user_id} - {self.survey.name}' class Answer(models.Model): """Representations of question's answer. """ completed_survey = models.ForeignKey(CompletedSurvey, on_delete=models. CASCADE, related_name='answers') question = models.ForeignKey(Question, on_delete=models.CASCADE, related_name='answers') text_answer = models.TextField(blank=True) answer_choices = models.ManyToManyField(AnswerChoice, blank=True) def __str__(self): return ( f"Answer for survey '{str(self.completed_survey)}' made by user {self.completed_survey.user_id}" ) <|reserved_special_token_1|> from django.db import models class Survey(models.Model): """Survey representation. """ name = models.CharField(max_length=255) description = models.TextField() start_date = models.DateTimeField() end_date = models.DateTimeField() def __str__(self): return self.name class Question(models.Model): """Survey's question respresentation. """ QUESTION_TYPE_CHOICES = ( (1, 'Text answer'), (2, 'One choice answer'), (3, 'Multiple choices answer') ) survey = models.ForeignKey( Survey, on_delete=models.CASCADE, related_name='questions') text = models.TextField() question_type = models.IntegerField(choices=QUESTION_TYPE_CHOICES) def __str__(self): return self.text class AnswerChoice(models.Model): """Represantation of question's answer's choice. """ question = models.ForeignKey( Question, on_delete=models.CASCADE, related_name='choices') text = models.TextField() def __str__(self): return self.text class CompletedSurvey(models.Model): """Representation of survey, completed by the user. """ user_id = models.IntegerField(null=True, blank=True) survey = models.ForeignKey( Survey, on_delete=models.SET_NULL, null=True, related_name='completed_surveys') def __str__(self): return f"{self.user_id} - {self.survey.name}" class Answer(models.Model): """Representations of question's answer. """ completed_survey = models.ForeignKey( CompletedSurvey, on_delete=models.CASCADE, related_name='answers') question = models.ForeignKey( Question, on_delete=models.CASCADE, related_name='answers') text_answer = models.TextField(blank=True) answer_choices = models.ManyToManyField(AnswerChoice, blank=True) def __str__(self): return f"Answer for survey '{str(self.completed_survey)}' made by user {self.completed_survey.user_id}"
flexible
{ "blob_id": "2c4f27e7d1bfe6d68fd0836094b9e350946913f6", "index": 5480, "step-1": "<mask token>\n\n\nclass Question(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __str__(self):\n return self.text\n\n\nclass AnswerChoice(models.Model):\n \"\"\"Represantation of question's \n answer's choice.\n \"\"\"\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n related_name='choices')\n text = models.TextField()\n\n def __str__(self):\n return self.text\n\n\nclass CompletedSurvey(models.Model):\n \"\"\"Representation of survey, \n completed by the user.\n \"\"\"\n user_id = models.IntegerField(null=True, blank=True)\n survey = models.ForeignKey(Survey, on_delete=models.SET_NULL, null=True,\n related_name='completed_surveys')\n\n def __str__(self):\n return f'{self.user_id} - {self.survey.name}'\n\n\nclass Answer(models.Model):\n \"\"\"Representations of question's answer.\n \"\"\"\n completed_survey = models.ForeignKey(CompletedSurvey, on_delete=models.\n CASCADE, related_name='answers')\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n related_name='answers')\n text_answer = models.TextField(blank=True)\n answer_choices = models.ManyToManyField(AnswerChoice, blank=True)\n\n def __str__(self):\n return (\n f\"Answer for survey '{str(self.completed_survey)}' made by user {self.completed_survey.user_id}\"\n )\n", "step-2": "<mask token>\n\n\nclass Survey(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Question(models.Model):\n \"\"\"Survey's question respresentation.\n \"\"\"\n QUESTION_TYPE_CHOICES = (1, 'Text answer'), (2, 'One choice answer'), (\n 3, 'Multiple choices answer')\n survey = models.ForeignKey(Survey, on_delete=models.CASCADE,\n related_name='questions')\n text = models.TextField()\n question_type = models.IntegerField(choices=QUESTION_TYPE_CHOICES)\n\n def __str__(self):\n return self.text\n\n\nclass AnswerChoice(models.Model):\n \"\"\"Represantation of question's \n answer's choice.\n \"\"\"\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n related_name='choices')\n text = models.TextField()\n\n def __str__(self):\n return self.text\n\n\nclass CompletedSurvey(models.Model):\n \"\"\"Representation of survey, \n completed by the user.\n \"\"\"\n user_id = models.IntegerField(null=True, blank=True)\n survey = models.ForeignKey(Survey, on_delete=models.SET_NULL, null=True,\n related_name='completed_surveys')\n\n def __str__(self):\n return f'{self.user_id} - {self.survey.name}'\n\n\nclass Answer(models.Model):\n \"\"\"Representations of question's answer.\n \"\"\"\n completed_survey = models.ForeignKey(CompletedSurvey, on_delete=models.\n CASCADE, related_name='answers')\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n related_name='answers')\n text_answer = models.TextField(blank=True)\n answer_choices = models.ManyToManyField(AnswerChoice, blank=True)\n\n def __str__(self):\n return (\n f\"Answer for survey '{str(self.completed_survey)}' made by user {self.completed_survey.user_id}\"\n )\n", "step-3": "<mask token>\n\n\nclass Survey(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __str__(self):\n return self.name\n\n\nclass Question(models.Model):\n \"\"\"Survey's question respresentation.\n \"\"\"\n QUESTION_TYPE_CHOICES = (1, 'Text answer'), (2, 'One choice answer'), (\n 3, 'Multiple choices answer')\n survey = models.ForeignKey(Survey, on_delete=models.CASCADE,\n related_name='questions')\n text = models.TextField()\n question_type = models.IntegerField(choices=QUESTION_TYPE_CHOICES)\n\n def __str__(self):\n return self.text\n\n\nclass AnswerChoice(models.Model):\n \"\"\"Represantation of question's \n answer's choice.\n \"\"\"\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n related_name='choices')\n text = models.TextField()\n\n def __str__(self):\n return self.text\n\n\nclass CompletedSurvey(models.Model):\n \"\"\"Representation of survey, \n completed by the user.\n \"\"\"\n user_id = models.IntegerField(null=True, blank=True)\n survey = models.ForeignKey(Survey, on_delete=models.SET_NULL, null=True,\n related_name='completed_surveys')\n\n def __str__(self):\n return f'{self.user_id} - {self.survey.name}'\n\n\nclass Answer(models.Model):\n \"\"\"Representations of question's answer.\n \"\"\"\n completed_survey = models.ForeignKey(CompletedSurvey, on_delete=models.\n CASCADE, related_name='answers')\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n related_name='answers')\n text_answer = models.TextField(blank=True)\n answer_choices = models.ManyToManyField(AnswerChoice, blank=True)\n\n def __str__(self):\n return (\n f\"Answer for survey '{str(self.completed_survey)}' made by user {self.completed_survey.user_id}\"\n )\n", "step-4": "<mask token>\n\n\nclass Survey(models.Model):\n \"\"\"Survey representation.\n \"\"\"\n name = models.CharField(max_length=255)\n description = models.TextField()\n start_date = models.DateTimeField()\n end_date = models.DateTimeField()\n\n def __str__(self):\n return self.name\n\n\nclass Question(models.Model):\n \"\"\"Survey's question respresentation.\n \"\"\"\n QUESTION_TYPE_CHOICES = (1, 'Text answer'), (2, 'One choice answer'), (\n 3, 'Multiple choices answer')\n survey = models.ForeignKey(Survey, on_delete=models.CASCADE,\n related_name='questions')\n text = models.TextField()\n question_type = models.IntegerField(choices=QUESTION_TYPE_CHOICES)\n\n def __str__(self):\n return self.text\n\n\nclass AnswerChoice(models.Model):\n \"\"\"Represantation of question's \n answer's choice.\n \"\"\"\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n related_name='choices')\n text = models.TextField()\n\n def __str__(self):\n return self.text\n\n\nclass CompletedSurvey(models.Model):\n \"\"\"Representation of survey, \n completed by the user.\n \"\"\"\n user_id = models.IntegerField(null=True, blank=True)\n survey = models.ForeignKey(Survey, on_delete=models.SET_NULL, null=True,\n related_name='completed_surveys')\n\n def __str__(self):\n return f'{self.user_id} - {self.survey.name}'\n\n\nclass Answer(models.Model):\n \"\"\"Representations of question's answer.\n \"\"\"\n completed_survey = models.ForeignKey(CompletedSurvey, on_delete=models.\n CASCADE, related_name='answers')\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n related_name='answers')\n text_answer = models.TextField(blank=True)\n answer_choices = models.ManyToManyField(AnswerChoice, blank=True)\n\n def __str__(self):\n return (\n f\"Answer for survey '{str(self.completed_survey)}' made by user {self.completed_survey.user_id}\"\n )\n", "step-5": "from django.db import models\n\n\nclass Survey(models.Model):\n \"\"\"Survey representation.\n \"\"\"\n\n name = models.CharField(max_length=255)\n description = models.TextField()\n start_date = models.DateTimeField()\n end_date = models.DateTimeField()\n\n def __str__(self):\n return self.name\n\n\nclass Question(models.Model):\n \"\"\"Survey's question respresentation.\n \"\"\"\n\n QUESTION_TYPE_CHOICES = (\n (1, 'Text answer'),\n (2, 'One choice answer'),\n (3, 'Multiple choices answer')\n )\n\n survey = models.ForeignKey(\n Survey, \n on_delete=models.CASCADE, \n related_name='questions')\n text = models.TextField()\n question_type = models.IntegerField(choices=QUESTION_TYPE_CHOICES)\n\n def __str__(self):\n return self.text\n\n\nclass AnswerChoice(models.Model):\n \"\"\"Represantation of question's \n answer's choice.\n \"\"\"\n\n question = models.ForeignKey(\n Question, \n on_delete=models.CASCADE, \n related_name='choices')\n text = models.TextField()\n\n def __str__(self):\n return self.text\n\n\nclass CompletedSurvey(models.Model):\n \"\"\"Representation of survey, \n completed by the user.\n \"\"\"\n\n user_id = models.IntegerField(null=True, blank=True)\n survey = models.ForeignKey(\n Survey, \n on_delete=models.SET_NULL, \n null=True, \n related_name='completed_surveys')\n\n def __str__(self):\n return f\"{self.user_id} - {self.survey.name}\"\n \n\nclass Answer(models.Model):\n \"\"\"Representations of question's answer.\n \"\"\"\n\n completed_survey = models.ForeignKey(\n CompletedSurvey,\n on_delete=models.CASCADE,\n related_name='answers')\n question = models.ForeignKey(\n Question,\n on_delete=models.CASCADE,\n related_name='answers')\n text_answer = models.TextField(blank=True)\n answer_choices = models.ManyToManyField(AnswerChoice, blank=True)\n\n def __str__(self):\n return f\"Answer for survey '{str(self.completed_survey)}' made by user {self.completed_survey.user_id}\"", "step-ids": [ 14, 17, 18, 20, 22 ] }
[ 14, 17, 18, 20, 22 ]
<|reserved_special_token_0|> @respond_to('^term\\s+([\\w-]+)$') @respond_to('^term\\s+create\\s+([\\w-]+)$') @respond_to('^term\\s+add\\s+([\\w-]+)$') def term_create(message, command): """ 指定されたコマンドを生成する """ if command in ('list', 'help'): return command = command.lower() if command in RESERVED: botsend(message, 'コマンド `${}` は予約語なので登録できません'.format(command)) return creator = message.body['user'] term, created = Term.get_or_create(command=command, creator=creator) if not created: botsend(message, 'コマンド `${}` はすでに登録されています'.format(command)) else: msg = 'コマンド `${}` を作成しました。\n'.format(command) msg += '`${} add (レスポンス)` でレスポンスを追加できます'.format(command) botsend(message, msg) commands.add(command) <|reserved_special_token_0|> @respond_to('^term\\s+list$') def term_list(message): """ 現在使用可能な用語コマンドの一覧を返す """ pretext = '用語コマンドの一覧です' attachments = _create_attachments_for_list(pretext, sorted(commands)) botwebapi(message, attachments) <|reserved_special_token_0|> def _send_markdown_text(message, text): """ 指定されたtextをmarkdown形式で送信する """ attachments = [{'pretext': text, 'mrkdwn_in': ['pretext']}] botwebapi(message, attachments) <|reserved_special_token_0|> def add_response(message, command, text): """ 用語コマンドに応答を追加する """ if _exist_response(command, text): reply = 'コマンド `${}` に「{}」は登録済みです'.format(command, text) _send_markdown_text(message, reply) return term = Term.get(command=command) creator = message.body['user'] resp, created = Response.get_or_create(term=term, text=text, creator= creator, created=datetime.now()) resp.save() text = 'コマンド `${}` に「{}」を追加しました'.format(command, text) _send_markdown_text(message, text) def del_response(message, command, text): """ 用語コマンドから応答を削除する """ term = Term.get(command=command) try: response = Response.get(term=term, text=text) except Response.DoesNotExist: reply = 'コマンド `${}` に「{}」は登録されていません'.format(command, text) _send_markdown_text(message, reply) return response.delete_instance() reply = 'コマンド `${}` から「{}」を削除しました'.format(command, text) _send_markdown_text(message, reply) <|reserved_special_token_0|> def search_responses(message, command, keyword): """ 用語コマンドに登録されている応答のうち、キーワードにマッチするものを返す """ term = Term.get(command=command) pat = '%{}%'.format(keyword) responses = Response.select().where(term == term, Response.text ** pat) if len(responses) == 0: botsend(message, 'コマンド `${}` に `{}` を含む応答はありません'.format(command, keyword)) else: pretext = 'コマンド `${}` の `{}` を含む応答は {} 件あります\n'.format(command, keyword, len(responses)) data = [x.text for x in responses] attachments = _create_attachments_for_list(pretext, data, False) botwebapi(message, attachments) def get_responses(message, command): """ 用語コマンドに登録されている応答の一覧を返す """ response_set = Term.get(command=command).response_set if len(response_set) == 0: msg = 'コマンド `${}` には応答が登録されていません\n'.format(command) msg += '`${} add (レスポンス)` で応答を登録してください'.format(command) botsend(message, msg) else: pretext = 'コマンド `${}` の応答は {} 件あります\n'.format(command, len( response_set)) data = [x.text for x in response_set] attachments = _create_attachments_for_list(pretext, data, False) botwebapi(message, attachments) <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> @respond_to('^term\\s+([\\w-]+)$') @respond_to('^term\\s+create\\s+([\\w-]+)$') @respond_to('^term\\s+add\\s+([\\w-]+)$') def term_create(message, command): """ 指定されたコマンドを生成する """ if command in ('list', 'help'): return command = command.lower() if command in RESERVED: botsend(message, 'コマンド `${}` は予約語なので登録できません'.format(command)) return creator = message.body['user'] term, created = Term.get_or_create(command=command, creator=creator) if not created: botsend(message, 'コマンド `${}` はすでに登録されています'.format(command)) else: msg = 'コマンド `${}` を作成しました。\n'.format(command) msg += '`${} add (レスポンス)` でレスポンスを追加できます'.format(command) botsend(message, msg) commands.add(command) @respond_to('^term\\s+(drop|del|delete)\\s+([\\w-]+)$') def term_drop(message, subcommand, command): """ 指定されたコマンドを消去する """ command = command.lower() if not _available_command(message, command): return term = Term.get(command=command) term.delete_instance(recursive=True) term.save() commands.remove(command) botsend(message, 'コマンド `${}` を消去しました'.format(command)) <|reserved_special_token_0|> @respond_to('^term\\s+list$') def term_list(message): """ 現在使用可能な用語コマンドの一覧を返す """ pretext = '用語コマンドの一覧です' attachments = _create_attachments_for_list(pretext, sorted(commands)) botwebapi(message, attachments) <|reserved_special_token_0|> def _send_markdown_text(message, text): """ 指定されたtextをmarkdown形式で送信する """ attachments = [{'pretext': text, 'mrkdwn_in': ['pretext']}] botwebapi(message, attachments) <|reserved_special_token_0|> def add_response(message, command, text): """ 用語コマンドに応答を追加する """ if _exist_response(command, text): reply = 'コマンド `${}` に「{}」は登録済みです'.format(command, text) _send_markdown_text(message, reply) return term = Term.get(command=command) creator = message.body['user'] resp, created = Response.get_or_create(term=term, text=text, creator= creator, created=datetime.now()) resp.save() text = 'コマンド `${}` に「{}」を追加しました'.format(command, text) _send_markdown_text(message, text) def del_response(message, command, text): """ 用語コマンドから応答を削除する """ term = Term.get(command=command) try: response = Response.get(term=term, text=text) except Response.DoesNotExist: reply = 'コマンド `${}` に「{}」は登録されていません'.format(command, text) _send_markdown_text(message, reply) return response.delete_instance() reply = 'コマンド `${}` から「{}」を削除しました'.format(command, text) _send_markdown_text(message, reply) <|reserved_special_token_0|> def search_responses(message, command, keyword): """ 用語コマンドに登録されている応答のうち、キーワードにマッチするものを返す """ term = Term.get(command=command) pat = '%{}%'.format(keyword) responses = Response.select().where(term == term, Response.text ** pat) if len(responses) == 0: botsend(message, 'コマンド `${}` に `{}` を含む応答はありません'.format(command, keyword)) else: pretext = 'コマンド `${}` の `{}` を含む応答は {} 件あります\n'.format(command, keyword, len(responses)) data = [x.text for x in responses] attachments = _create_attachments_for_list(pretext, data, False) botwebapi(message, attachments) def get_responses(message, command): """ 用語コマンドに登録されている応答の一覧を返す """ response_set = Term.get(command=command).response_set if len(response_set) == 0: msg = 'コマンド `${}` には応答が登録されていません\n'.format(command) msg += '`${} add (レスポンス)` で応答を登録してください'.format(command) botsend(message, msg) else: pretext = 'コマンド `${}` の応答は {} 件あります\n'.format(command, len( response_set)) data = [x.text for x in response_set] attachments = _create_attachments_for_list(pretext, data, False) botwebapi(message, attachments) @respond_to('term\\s+help') def term_help(message): """ term pluginのヘルプを返す """ botsend(message, """- `$term (用語)`: 用語コマンドを作成する - `$term create (用語)`: 用語コマンドを作成する - `$term drop (用語)`: 用語コマンドを消去する - `$term search (キーワード)`: キーワードを含む用語コマンドの一覧を返す - `$term list`: 用語コマンドの一覧を返す - `$(用語)`: 用語コマンドに登録してある応答からランダムに一つ返す - `$(用語) add (応答)`: 用語コマンドに応答を追加する - `$(用語) del (応答)`: 用語コマンドから応答を削除する - `$(用語) pop`: 用語コマンドの最後に登録した応答を削除する - `$(用語) list`: 用語コマンドの応答一覧を返す - `$(用語) search (キーワード)`: 用語コマンドのうちキーワードを含む応答一覧を返す ``` > $term create 酒 コマンド `$酒` を作成しました。 `$酒 add (レスポンス)` でレスポンスを追加できます > $酒 add ビール コマンド `$酒` に `ビール` を追加しました > $酒 add ワイン コマンド `$酒` に `ワイン` を追加しました > $酒 ビール ``` """ ) <|reserved_special_token_1|> <|reserved_special_token_0|> @respond_to('^term\\s+([\\w-]+)$') @respond_to('^term\\s+create\\s+([\\w-]+)$') @respond_to('^term\\s+add\\s+([\\w-]+)$') def term_create(message, command): """ 指定されたコマンドを生成する """ if command in ('list', 'help'): return command = command.lower() if command in RESERVED: botsend(message, 'コマンド `${}` は予約語なので登録できません'.format(command)) return creator = message.body['user'] term, created = Term.get_or_create(command=command, creator=creator) if not created: botsend(message, 'コマンド `${}` はすでに登録されています'.format(command)) else: msg = 'コマンド `${}` を作成しました。\n'.format(command) msg += '`${} add (レスポンス)` でレスポンスを追加できます'.format(command) botsend(message, msg) commands.add(command) @respond_to('^term\\s+(drop|del|delete)\\s+([\\w-]+)$') def term_drop(message, subcommand, command): """ 指定されたコマンドを消去する """ command = command.lower() if not _available_command(message, command): return term = Term.get(command=command) term.delete_instance(recursive=True) term.save() commands.remove(command) botsend(message, 'コマンド `${}` を消去しました'.format(command)) def _create_attachments_for_list(pretext, data, command=True): """ 指定されたリストの一覧を message.send_webapi で送信するための attachments を生成する """ if command: list_text = ', '.join(['`${}`'.format(x) for x in data]) else: list_text = '\n'.join([x for x in data]) attachments = [{'pretext': pretext, 'text': list_text, 'mrkdwn_in': [ 'pretext', 'text']}] return attachments <|reserved_special_token_0|> @respond_to('^term\\s+list$') def term_list(message): """ 現在使用可能な用語コマンドの一覧を返す """ pretext = '用語コマンドの一覧です' attachments = _create_attachments_for_list(pretext, sorted(commands)) botwebapi(message, attachments) def _available_command(message, command): """ 指定されたコマンドが有効化どうかを返す """ result = True if command in RESERVED: result = False elif command not in commands: botsend(message, 'コマンド `${}` は登録されていません'.format(command)) result = False return result def _send_markdown_text(message, text): """ 指定されたtextをmarkdown形式で送信する """ attachments = [{'pretext': text, 'mrkdwn_in': ['pretext']}] botwebapi(message, attachments) <|reserved_special_token_0|> def add_response(message, command, text): """ 用語コマンドに応答を追加する """ if _exist_response(command, text): reply = 'コマンド `${}` に「{}」は登録済みです'.format(command, text) _send_markdown_text(message, reply) return term = Term.get(command=command) creator = message.body['user'] resp, created = Response.get_or_create(term=term, text=text, creator= creator, created=datetime.now()) resp.save() text = 'コマンド `${}` に「{}」を追加しました'.format(command, text) _send_markdown_text(message, text) def del_response(message, command, text): """ 用語コマンドから応答を削除する """ term = Term.get(command=command) try: response = Response.get(term=term, text=text) except Response.DoesNotExist: reply = 'コマンド `${}` に「{}」は登録されていません'.format(command, text) _send_markdown_text(message, reply) return response.delete_instance() reply = 'コマンド `${}` から「{}」を削除しました'.format(command, text) _send_markdown_text(message, reply) def pop_response(message, command): """ 用語コマンドで最後に登録された応答を削除する """ response_set = Term.get(command=command).response_set if len(response_set) == 0: msg = 'コマンド `${}` には応答が登録されていません\n'.format(command) msg += '`${} add (レスポンス)` で応答を登録してください'.format(command) botsend(message, msg) return last_response = response_set.order_by(Response.created.desc())[0] text = last_response.text last_response.delete_instance() reply = 'コマンド `${}` から「{}」を削除しました'.format(command, text) _send_markdown_text(message, reply) def search_responses(message, command, keyword): """ 用語コマンドに登録されている応答のうち、キーワードにマッチするものを返す """ term = Term.get(command=command) pat = '%{}%'.format(keyword) responses = Response.select().where(term == term, Response.text ** pat) if len(responses) == 0: botsend(message, 'コマンド `${}` に `{}` を含む応答はありません'.format(command, keyword)) else: pretext = 'コマンド `${}` の `{}` を含む応答は {} 件あります\n'.format(command, keyword, len(responses)) data = [x.text for x in responses] attachments = _create_attachments_for_list(pretext, data, False) botwebapi(message, attachments) def get_responses(message, command): """ 用語コマンドに登録されている応答の一覧を返す """ response_set = Term.get(command=command).response_set if len(response_set) == 0: msg = 'コマンド `${}` には応答が登録されていません\n'.format(command) msg += '`${} add (レスポンス)` で応答を登録してください'.format(command) botsend(message, msg) else: pretext = 'コマンド `${}` の応答は {} 件あります\n'.format(command, len( response_set)) data = [x.text for x in response_set] attachments = _create_attachments_for_list(pretext, data, False) botwebapi(message, attachments) @respond_to('term\\s+help') def term_help(message): """ term pluginのヘルプを返す """ botsend(message, """- `$term (用語)`: 用語コマンドを作成する - `$term create (用語)`: 用語コマンドを作成する - `$term drop (用語)`: 用語コマンドを消去する - `$term search (キーワード)`: キーワードを含む用語コマンドの一覧を返す - `$term list`: 用語コマンドの一覧を返す - `$(用語)`: 用語コマンドに登録してある応答からランダムに一つ返す - `$(用語) add (応答)`: 用語コマンドに応答を追加する - `$(用語) del (応答)`: 用語コマンドから応答を削除する - `$(用語) pop`: 用語コマンドの最後に登録した応答を削除する - `$(用語) list`: 用語コマンドの応答一覧を返す - `$(用語) search (キーワード)`: 用語コマンドのうちキーワードを含む応答一覧を返す ``` > $term create 酒 コマンド `$酒` を作成しました。 `$酒 add (レスポンス)` でレスポンスを追加できます > $酒 add ビール コマンド `$酒` に `ビール` を追加しました > $酒 add ワイン コマンド `$酒` に `ワイン` を追加しました > $酒 ビール ``` """ ) <|reserved_special_token_1|> <|reserved_special_token_0|> @respond_to('^term\\s+([\\w-]+)$') @respond_to('^term\\s+create\\s+([\\w-]+)$') @respond_to('^term\\s+add\\s+([\\w-]+)$') def term_create(message, command): """ 指定されたコマンドを生成する """ if command in ('list', 'help'): return command = command.lower() if command in RESERVED: botsend(message, 'コマンド `${}` は予約語なので登録できません'.format(command)) return creator = message.body['user'] term, created = Term.get_or_create(command=command, creator=creator) if not created: botsend(message, 'コマンド `${}` はすでに登録されています'.format(command)) else: msg = 'コマンド `${}` を作成しました。\n'.format(command) msg += '`${} add (レスポンス)` でレスポンスを追加できます'.format(command) botsend(message, msg) commands.add(command) @respond_to('^term\\s+(drop|del|delete)\\s+([\\w-]+)$') def term_drop(message, subcommand, command): """ 指定されたコマンドを消去する """ command = command.lower() if not _available_command(message, command): return term = Term.get(command=command) term.delete_instance(recursive=True) term.save() commands.remove(command) botsend(message, 'コマンド `${}` を消去しました'.format(command)) def _create_attachments_for_list(pretext, data, command=True): """ 指定されたリストの一覧を message.send_webapi で送信するための attachments を生成する """ if command: list_text = ', '.join(['`${}`'.format(x) for x in data]) else: list_text = '\n'.join([x for x in data]) attachments = [{'pretext': pretext, 'text': list_text, 'mrkdwn_in': [ 'pretext', 'text']}] return attachments @respond_to('^term\\s+search\\s+([\\w-]+)$') def term_search(message, keyword): """ 指定したキーワードを含む用語コマンドの一覧を返す """ pretext = '`{}` を含む用語コマンドの一覧です'.format(keyword) data = [] for command in sorted(commands): if keyword in command: data.append(command) attachments = _create_attachments_for_list(pretext, data) botwebapi(message, attachments) @respond_to('^term\\s+list$') def term_list(message): """ 現在使用可能な用語コマンドの一覧を返す """ pretext = '用語コマンドの一覧です' attachments = _create_attachments_for_list(pretext, sorted(commands)) botwebapi(message, attachments) def _available_command(message, command): """ 指定されたコマンドが有効化どうかを返す """ result = True if command in RESERVED: result = False elif command not in commands: botsend(message, 'コマンド `${}` は登録されていません'.format(command)) result = False return result def _send_markdown_text(message, text): """ 指定されたtextをmarkdown形式で送信する """ attachments = [{'pretext': text, 'mrkdwn_in': ['pretext']}] botwebapi(message, attachments) @respond_to('^([\\w-]+)$') def return_response(message, command): """ 用語コマンドに登録されている応答をランダムに返す """ if not _available_command(message, command): return response_set = Term.get(command=command).response_set if len(response_set) == 0: msg = 'コマンド `${}` には応答が登録されていません\n'.format(command) msg += '`${} add (レスポンス)` で応答を登録してください'.format(command) botsend(message, msg) else: response = random.choice(response_set) _send_markdown_text(message, response.text) @respond_to('^([\\w-]+)\\s+(.*)') def response(message, command, params): """ 用語コマンドの処理をする """ if not _available_command(message, command): return data = params.split(maxsplit=1) subcommand = data[0] try: if subcommand == 'pop': pop_response(message, command) elif subcommand == 'list': get_responses(message, command) elif subcommand == 'search': search_responses(message, command, data[1]) elif subcommand in ('del', 'delete', 'remove'): del_response(message, command, data[1]) elif subcommand == 'add': add_response(message, command, data[1]) else: add_response(message, command, params) except IndexError: term_help(message) pass <|reserved_special_token_0|> def add_response(message, command, text): """ 用語コマンドに応答を追加する """ if _exist_response(command, text): reply = 'コマンド `${}` に「{}」は登録済みです'.format(command, text) _send_markdown_text(message, reply) return term = Term.get(command=command) creator = message.body['user'] resp, created = Response.get_or_create(term=term, text=text, creator= creator, created=datetime.now()) resp.save() text = 'コマンド `${}` に「{}」を追加しました'.format(command, text) _send_markdown_text(message, text) def del_response(message, command, text): """ 用語コマンドから応答を削除する """ term = Term.get(command=command) try: response = Response.get(term=term, text=text) except Response.DoesNotExist: reply = 'コマンド `${}` に「{}」は登録されていません'.format(command, text) _send_markdown_text(message, reply) return response.delete_instance() reply = 'コマンド `${}` から「{}」を削除しました'.format(command, text) _send_markdown_text(message, reply) def pop_response(message, command): """ 用語コマンドで最後に登録された応答を削除する """ response_set = Term.get(command=command).response_set if len(response_set) == 0: msg = 'コマンド `${}` には応答が登録されていません\n'.format(command) msg += '`${} add (レスポンス)` で応答を登録してください'.format(command) botsend(message, msg) return last_response = response_set.order_by(Response.created.desc())[0] text = last_response.text last_response.delete_instance() reply = 'コマンド `${}` から「{}」を削除しました'.format(command, text) _send_markdown_text(message, reply) def search_responses(message, command, keyword): """ 用語コマンドに登録されている応答のうち、キーワードにマッチするものを返す """ term = Term.get(command=command) pat = '%{}%'.format(keyword) responses = Response.select().where(term == term, Response.text ** pat) if len(responses) == 0: botsend(message, 'コマンド `${}` に `{}` を含む応答はありません'.format(command, keyword)) else: pretext = 'コマンド `${}` の `{}` を含む応答は {} 件あります\n'.format(command, keyword, len(responses)) data = [x.text for x in responses] attachments = _create_attachments_for_list(pretext, data, False) botwebapi(message, attachments) def get_responses(message, command): """ 用語コマンドに登録されている応答の一覧を返す """ response_set = Term.get(command=command).response_set if len(response_set) == 0: msg = 'コマンド `${}` には応答が登録されていません\n'.format(command) msg += '`${} add (レスポンス)` で応答を登録してください'.format(command) botsend(message, msg) else: pretext = 'コマンド `${}` の応答は {} 件あります\n'.format(command, len( response_set)) data = [x.text for x in response_set] attachments = _create_attachments_for_list(pretext, data, False) botwebapi(message, attachments) @respond_to('term\\s+help') def term_help(message): """ term pluginのヘルプを返す """ botsend(message, """- `$term (用語)`: 用語コマンドを作成する - `$term create (用語)`: 用語コマンドを作成する - `$term drop (用語)`: 用語コマンドを消去する - `$term search (キーワード)`: キーワードを含む用語コマンドの一覧を返す - `$term list`: 用語コマンドの一覧を返す - `$(用語)`: 用語コマンドに登録してある応答からランダムに一つ返す - `$(用語) add (応答)`: 用語コマンドに応答を追加する - `$(用語) del (応答)`: 用語コマンドから応答を削除する - `$(用語) pop`: 用語コマンドの最後に登録した応答を削除する - `$(用語) list`: 用語コマンドの応答一覧を返す - `$(用語) search (キーワード)`: 用語コマンドのうちキーワードを含む応答一覧を返す ``` > $term create 酒 コマンド `$酒` を作成しました。 `$酒 add (レスポンス)` でレスポンスを追加できます > $酒 add ビール コマンド `$酒` に `ビール` を追加しました > $酒 add ワイン コマンド `$酒` に `ワイン` を追加しました > $酒 ビール ``` """ ) <|reserved_special_token_1|> import random from datetime import datetime from slackbot.bot import respond_to from .term_model import Term, Response from ..botmessage import botsend, botwebapi # すでに存在するコマンドは無視する RESERVED = ( 'drive', 'manual', 'jira', 'wikipedia', 'plusplus', 'translate', '翻訳', 'weather', '天気', 'term', 'shuffle', 'help', 'choice', 'ping', 'version', 'random', 'cal', 'google', 'image', 'map', 'gadmin', 'github', 'suddendeath', 'pycamp', 'lgtm', ) # コマンド一覧を初期化 commands = {term.command for term in Term.select()} @respond_to('^term\s+([\w-]+)$') @respond_to('^term\s+create\s+([\w-]+)$') @respond_to('^term\s+add\s+([\w-]+)$') def term_create(message, command): """ 指定されたコマンドを生成する """ if command in ('list', 'help'): return # コマンドは小文字に統一 command = command.lower() # 予約語の場合は実行しない if command in RESERVED: botsend(message, 'コマンド `${}` は予約語なので登録できません'.format(command)) return creator = message.body['user'] term, created = Term.get_or_create(command=command, creator=creator) if not created: # すでに登録してあるコマンドは登録しない botsend(message, 'コマンド `${}` はすでに登録されています'.format(command)) else: msg = 'コマンド `${}` を作成しました。\n'.format(command) msg += '`${} add (レスポンス)` でレスポンスを追加できます'.format(command) botsend(message, msg) # コマンド一覧の set に追加 commands.add(command) @respond_to('^term\s+(drop|del|delete)\s+([\w-]+)$') def term_drop(message, subcommand, command): """ 指定されたコマンドを消去する """ # コマンドは小文字に統一 command = command.lower() # コマンドの存在チェック if not _available_command(message, command): return # 用語コマンドと応答をまとめて削除 term = Term.get(command=command) term.delete_instance(recursive=True) term.save() # コマンド一覧の set から削除 commands.remove(command) botsend(message, 'コマンド `${}` を消去しました'.format(command)) def _create_attachments_for_list(pretext, data, command=True): """ 指定されたリストの一覧を message.send_webapi で送信するための attachments を生成する """ if command: # ['foo', 'bar', 'baz'] -> '`$far`, `$bar`, `$baz`' list_text = ', '.join(['`${}`'.format(x) for x in data]) else: list_text = '\n'.join([x for x in data]) attachments = [{ 'pretext': pretext, 'text': list_text, 'mrkdwn_in': ['pretext', 'text'], }] return attachments @respond_to('^term\s+search\s+([\w-]+)$') def term_search(message, keyword): """ 指定したキーワードを含む用語コマンドの一覧を返す """ pretext = '`{}` を含む用語コマンドの一覧です'.format(keyword) data = [] for command in sorted(commands): if keyword in command: data.append(command) attachments = _create_attachments_for_list(pretext, data) botwebapi(message, attachments) @respond_to('^term\s+list$') def term_list(message): """ 現在使用可能な用語コマンドの一覧を返す """ pretext = '用語コマンドの一覧です' attachments = _create_attachments_for_list(pretext, sorted(commands)) botwebapi(message, attachments) def _available_command(message, command): """ 指定されたコマンドが有効化どうかを返す """ result = True if command in RESERVED: result = False elif command not in commands: botsend(message, 'コマンド `${}` は登録されていません'.format(command)) result = False return result def _send_markdown_text(message, text): """ 指定されたtextをmarkdown形式で送信する """ attachments = [{ 'pretext': text, 'mrkdwn_in': ['pretext'], }] botwebapi(message, attachments) @respond_to('^([\w-]+)$') def return_response(message, command): """ 用語コマンドに登録されている応答をランダムに返す """ if not _available_command(message, command): return response_set = Term.get(command=command).response_set if len(response_set) == 0: msg = 'コマンド `${}` には応答が登録されていません\n'.format(command) msg += '`${} add (レスポンス)` で応答を登録してください'.format(command) botsend(message, msg) else: response = random.choice(response_set) _send_markdown_text(message, response.text) @respond_to('^([\w-]+)\s+(.*)') def response(message, command, params): """ 用語コマンドの処理をする """ if not _available_command(message, command): return data = params.split(maxsplit=1) subcommand = data[0] try: if subcommand == 'pop': # 最後に登録された応答を削除 pop_response(message, command) elif subcommand == 'list': # 応答の一覧を返す get_responses(message, command) elif subcommand == 'search': # 応答を検索 search_responses(message, command, data[1]) elif subcommand in ('del', 'delete', 'remove'): # 応答を削除 del_response(message, command, data[1]) elif subcommand == 'add': # 応答を追加 add_response(message, command, data[1]) else: # サブコマンドが存在しない場合も追加 add_response(message, command, params) except IndexError: # ヘルプを返す term_help(message) pass def _exist_response(command, text): """ 指定されたコマンドに応答が登録されているかを調べて返す """ term = Term.get(command=command) count = Response.select().where(Response.term == term, Response.text == text).count() if count == 0: return False else: return True def add_response(message, command, text): """ 用語コマンドに応答を追加する """ # 登録済かどうかを確認する if _exist_response(command, text): reply = 'コマンド `${}` に「{}」は登録済みです'.format(command, text) _send_markdown_text(message, reply) return term = Term.get(command=command) creator = message.body['user'] # 用語を登録する resp, created = Response.get_or_create(term=term, text=text, creator=creator, created=datetime.now()) resp.save() text = 'コマンド `${}` に「{}」を追加しました'.format(command, text) _send_markdown_text(message, text) def del_response(message, command, text): """ 用語コマンドから応答を削除する """ term = Term.get(command=command) try: response = Response.get(term=term, text=text) except Response.DoesNotExist: reply = 'コマンド `${}` に「{}」は登録されていません'.format(command, text) _send_markdown_text(message, reply) return # 応答を削除する response.delete_instance() reply = 'コマンド `${}` から「{}」を削除しました'.format(command, text) _send_markdown_text(message, reply) def pop_response(message, command): """ 用語コマンドで最後に登録された応答を削除する """ response_set = Term.get(command=command).response_set # 応答が登録されていない if len(response_set) == 0: msg = 'コマンド `${}` には応答が登録されていません\n'.format(command) msg += '`${} add (レスポンス)` で応答を登録してください'.format(command) botsend(message, msg) return last_response = response_set.order_by(Response.created.desc())[0] text = last_response.text last_response.delete_instance() reply = 'コマンド `${}` から「{}」を削除しました'.format(command, text) _send_markdown_text(message, reply) def search_responses(message, command, keyword): """ 用語コマンドに登録されている応答のうち、キーワードにマッチするものを返す """ term = Term.get(command=command) pat = '%{}%'.format(keyword) responses = Response.select().where(term == term, Response.text ** pat) if len(responses) == 0: botsend(message, 'コマンド `${}` に `{}` を含む応答はありません'.format(command, keyword)) else: pretext = 'コマンド `${}` の `{}` を含む応答は {} 件あります\n'.format( command, keyword, len(responses)) data = [x.text for x in responses] attachments = _create_attachments_for_list(pretext, data, False) botwebapi(message, attachments) def get_responses(message, command): """ 用語コマンドに登録されている応答の一覧を返す """ response_set = Term.get(command=command).response_set if len(response_set) == 0: msg = 'コマンド `${}` には応答が登録されていません\n'.format(command) msg += '`${} add (レスポンス)` で応答を登録してください'.format(command) botsend(message, msg) else: pretext = 'コマンド `${}` の応答は {} 件あります\n'.format( command, len(response_set)) data = [x.text for x in response_set] attachments = _create_attachments_for_list(pretext, data, False) botwebapi(message, attachments) @respond_to('term\s+help') def term_help(message): """ term pluginのヘルプを返す """ botsend(message, '''- `$term (用語)`: 用語コマンドを作成する - `$term create (用語)`: 用語コマンドを作成する - `$term drop (用語)`: 用語コマンドを消去する - `$term search (キーワード)`: キーワードを含む用語コマンドの一覧を返す - `$term list`: 用語コマンドの一覧を返す - `$(用語)`: 用語コマンドに登録してある応答からランダムに一つ返す - `$(用語) add (応答)`: 用語コマンドに応答を追加する - `$(用語) del (応答)`: 用語コマンドから応答を削除する - `$(用語) pop`: 用語コマンドの最後に登録した応答を削除する - `$(用語) list`: 用語コマンドの応答一覧を返す - `$(用語) search (キーワード)`: 用語コマンドのうちキーワードを含む応答一覧を返す ``` > $term create 酒 コマンド `$酒` を作成しました。 `$酒 add (レスポンス)` でレスポンスを追加できます > $酒 add ビール コマンド `$酒` に `ビール` を追加しました > $酒 add ワイン コマンド `$酒` に `ワイン` を追加しました > $酒 ビール ``` ''')
flexible
{ "blob_id": "86e97e7eaf0d23ccf4154b5ffc853c5aee966326", "index": 5769, "step-1": "<mask token>\n\n\n@respond_to('^term\\\\s+([\\\\w-]+)$')\n@respond_to('^term\\\\s+create\\\\s+([\\\\w-]+)$')\n@respond_to('^term\\\\s+add\\\\s+([\\\\w-]+)$')\ndef term_create(message, command):\n \"\"\"\n 指定されたコマンドを生成する\n \"\"\"\n if command in ('list', 'help'):\n return\n command = command.lower()\n if command in RESERVED:\n botsend(message, 'コマンド `${}` は予約語なので登録できません'.format(command))\n return\n creator = message.body['user']\n term, created = Term.get_or_create(command=command, creator=creator)\n if not created:\n botsend(message, 'コマンド `${}` はすでに登録されています'.format(command))\n else:\n msg = 'コマンド `${}` を作成しました。\\n'.format(command)\n msg += '`${} add (レスポンス)` でレスポンスを追加できます'.format(command)\n botsend(message, msg)\n commands.add(command)\n\n\n<mask token>\n\n\n@respond_to('^term\\\\s+list$')\ndef term_list(message):\n \"\"\"\n 現在使用可能な用語コマンドの一覧を返す\n \"\"\"\n pretext = '用語コマンドの一覧です'\n attachments = _create_attachments_for_list(pretext, sorted(commands))\n botwebapi(message, attachments)\n\n\n<mask token>\n\n\ndef _send_markdown_text(message, text):\n \"\"\"\n 指定されたtextをmarkdown形式で送信する\n \"\"\"\n attachments = [{'pretext': text, 'mrkdwn_in': ['pretext']}]\n botwebapi(message, attachments)\n\n\n<mask token>\n\n\ndef add_response(message, command, text):\n \"\"\"\n 用語コマンドに応答を追加する\n \"\"\"\n if _exist_response(command, text):\n reply = 'コマンド `${}` に「{}」は登録済みです'.format(command, text)\n _send_markdown_text(message, reply)\n return\n term = Term.get(command=command)\n creator = message.body['user']\n resp, created = Response.get_or_create(term=term, text=text, creator=\n creator, created=datetime.now())\n resp.save()\n text = 'コマンド `${}` に「{}」を追加しました'.format(command, text)\n _send_markdown_text(message, text)\n\n\ndef del_response(message, command, text):\n \"\"\"\n 用語コマンドから応答を削除する\n \"\"\"\n term = Term.get(command=command)\n try:\n response = Response.get(term=term, text=text)\n except Response.DoesNotExist:\n reply = 'コマンド `${}` に「{}」は登録されていません'.format(command, text)\n _send_markdown_text(message, reply)\n return\n response.delete_instance()\n reply = 'コマンド `${}` から「{}」を削除しました'.format(command, text)\n _send_markdown_text(message, reply)\n\n\n<mask token>\n\n\ndef search_responses(message, command, keyword):\n \"\"\"\n 用語コマンドに登録されている応答のうち、キーワードにマッチするものを返す\n \"\"\"\n term = Term.get(command=command)\n pat = '%{}%'.format(keyword)\n responses = Response.select().where(term == term, Response.text ** pat)\n if len(responses) == 0:\n botsend(message, 'コマンド `${}` に `{}` を含む応答はありません'.format(command,\n keyword))\n else:\n pretext = 'コマンド `${}` の `{}` を含む応答は {} 件あります\\n'.format(command,\n keyword, len(responses))\n data = [x.text for x in responses]\n attachments = _create_attachments_for_list(pretext, data, False)\n botwebapi(message, attachments)\n\n\ndef get_responses(message, command):\n \"\"\"\n 用語コマンドに登録されている応答の一覧を返す\n \"\"\"\n response_set = Term.get(command=command).response_set\n if len(response_set) == 0:\n msg = 'コマンド `${}` には応答が登録されていません\\n'.format(command)\n msg += '`${} add (レスポンス)` で応答を登録してください'.format(command)\n botsend(message, msg)\n else:\n pretext = 'コマンド `${}` の応答は {} 件あります\\n'.format(command, len(\n response_set))\n data = [x.text for x in response_set]\n attachments = _create_attachments_for_list(pretext, data, False)\n botwebapi(message, attachments)\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\n@respond_to('^term\\\\s+([\\\\w-]+)$')\n@respond_to('^term\\\\s+create\\\\s+([\\\\w-]+)$')\n@respond_to('^term\\\\s+add\\\\s+([\\\\w-]+)$')\ndef term_create(message, command):\n \"\"\"\n 指定されたコマンドを生成する\n \"\"\"\n if command in ('list', 'help'):\n return\n command = command.lower()\n if command in RESERVED:\n botsend(message, 'コマンド `${}` は予約語なので登録できません'.format(command))\n return\n creator = message.body['user']\n term, created = Term.get_or_create(command=command, creator=creator)\n if not created:\n botsend(message, 'コマンド `${}` はすでに登録されています'.format(command))\n else:\n msg = 'コマンド `${}` を作成しました。\\n'.format(command)\n msg += '`${} add (レスポンス)` でレスポンスを追加できます'.format(command)\n botsend(message, msg)\n commands.add(command)\n\n\n@respond_to('^term\\\\s+(drop|del|delete)\\\\s+([\\\\w-]+)$')\ndef term_drop(message, subcommand, command):\n \"\"\"\n 指定されたコマンドを消去する\n \"\"\"\n command = command.lower()\n if not _available_command(message, command):\n return\n term = Term.get(command=command)\n term.delete_instance(recursive=True)\n term.save()\n commands.remove(command)\n botsend(message, 'コマンド `${}` を消去しました'.format(command))\n\n\n<mask token>\n\n\n@respond_to('^term\\\\s+list$')\ndef term_list(message):\n \"\"\"\n 現在使用可能な用語コマンドの一覧を返す\n \"\"\"\n pretext = '用語コマンドの一覧です'\n attachments = _create_attachments_for_list(pretext, sorted(commands))\n botwebapi(message, attachments)\n\n\n<mask token>\n\n\ndef _send_markdown_text(message, text):\n \"\"\"\n 指定されたtextをmarkdown形式で送信する\n \"\"\"\n attachments = [{'pretext': text, 'mrkdwn_in': ['pretext']}]\n botwebapi(message, attachments)\n\n\n<mask token>\n\n\ndef add_response(message, command, text):\n \"\"\"\n 用語コマンドに応答を追加する\n \"\"\"\n if _exist_response(command, text):\n reply = 'コマンド `${}` に「{}」は登録済みです'.format(command, text)\n _send_markdown_text(message, reply)\n return\n term = Term.get(command=command)\n creator = message.body['user']\n resp, created = Response.get_or_create(term=term, text=text, creator=\n creator, created=datetime.now())\n resp.save()\n text = 'コマンド `${}` に「{}」を追加しました'.format(command, text)\n _send_markdown_text(message, text)\n\n\ndef del_response(message, command, text):\n \"\"\"\n 用語コマンドから応答を削除する\n \"\"\"\n term = Term.get(command=command)\n try:\n response = Response.get(term=term, text=text)\n except Response.DoesNotExist:\n reply = 'コマンド `${}` に「{}」は登録されていません'.format(command, text)\n _send_markdown_text(message, reply)\n return\n response.delete_instance()\n reply = 'コマンド `${}` から「{}」を削除しました'.format(command, text)\n _send_markdown_text(message, reply)\n\n\n<mask token>\n\n\ndef search_responses(message, command, keyword):\n \"\"\"\n 用語コマンドに登録されている応答のうち、キーワードにマッチするものを返す\n \"\"\"\n term = Term.get(command=command)\n pat = '%{}%'.format(keyword)\n responses = Response.select().where(term == term, Response.text ** pat)\n if len(responses) == 0:\n botsend(message, 'コマンド `${}` に `{}` を含む応答はありません'.format(command,\n keyword))\n else:\n pretext = 'コマンド `${}` の `{}` を含む応答は {} 件あります\\n'.format(command,\n keyword, len(responses))\n data = [x.text for x in responses]\n attachments = _create_attachments_for_list(pretext, data, False)\n botwebapi(message, attachments)\n\n\ndef get_responses(message, command):\n \"\"\"\n 用語コマンドに登録されている応答の一覧を返す\n \"\"\"\n response_set = Term.get(command=command).response_set\n if len(response_set) == 0:\n msg = 'コマンド `${}` には応答が登録されていません\\n'.format(command)\n msg += '`${} add (レスポンス)` で応答を登録してください'.format(command)\n botsend(message, msg)\n else:\n pretext = 'コマンド `${}` の応答は {} 件あります\\n'.format(command, len(\n response_set))\n data = [x.text for x in response_set]\n attachments = _create_attachments_for_list(pretext, data, False)\n botwebapi(message, attachments)\n\n\n@respond_to('term\\\\s+help')\ndef term_help(message):\n \"\"\"\n term pluginのヘルプを返す\n \"\"\"\n botsend(message,\n \"\"\"- `$term (用語)`: 用語コマンドを作成する\n- `$term create (用語)`: 用語コマンドを作成する\n- `$term drop (用語)`: 用語コマンドを消去する\n- `$term search (キーワード)`: キーワードを含む用語コマンドの一覧を返す\n- `$term list`: 用語コマンドの一覧を返す\n\n- `$(用語)`: 用語コマンドに登録してある応答からランダムに一つ返す\n- `$(用語) add (応答)`: 用語コマンドに応答を追加する\n- `$(用語) del (応答)`: 用語コマンドから応答を削除する\n- `$(用語) pop`: 用語コマンドの最後に登録した応答を削除する\n- `$(用語) list`: 用語コマンドの応答一覧を返す\n- `$(用語) search (キーワード)`: 用語コマンドのうちキーワードを含む応答一覧を返す\n```\n> $term create 酒\nコマンド `$酒` を作成しました。\n`$酒 add (レスポンス)` でレスポンスを追加できます\n> $酒 add ビール\nコマンド `$酒` に `ビール` を追加しました\n> $酒 add ワイン\nコマンド `$酒` に `ワイン` を追加しました\n> $酒\nビール\n```\n\"\"\"\n )\n", "step-3": "<mask token>\n\n\n@respond_to('^term\\\\s+([\\\\w-]+)$')\n@respond_to('^term\\\\s+create\\\\s+([\\\\w-]+)$')\n@respond_to('^term\\\\s+add\\\\s+([\\\\w-]+)$')\ndef term_create(message, command):\n \"\"\"\n 指定されたコマンドを生成する\n \"\"\"\n if command in ('list', 'help'):\n return\n command = command.lower()\n if command in RESERVED:\n botsend(message, 'コマンド `${}` は予約語なので登録できません'.format(command))\n return\n creator = message.body['user']\n term, created = Term.get_or_create(command=command, creator=creator)\n if not created:\n botsend(message, 'コマンド `${}` はすでに登録されています'.format(command))\n else:\n msg = 'コマンド `${}` を作成しました。\\n'.format(command)\n msg += '`${} add (レスポンス)` でレスポンスを追加できます'.format(command)\n botsend(message, msg)\n commands.add(command)\n\n\n@respond_to('^term\\\\s+(drop|del|delete)\\\\s+([\\\\w-]+)$')\ndef term_drop(message, subcommand, command):\n \"\"\"\n 指定されたコマンドを消去する\n \"\"\"\n command = command.lower()\n if not _available_command(message, command):\n return\n term = Term.get(command=command)\n term.delete_instance(recursive=True)\n term.save()\n commands.remove(command)\n botsend(message, 'コマンド `${}` を消去しました'.format(command))\n\n\ndef _create_attachments_for_list(pretext, data, command=True):\n \"\"\"\n 指定されたリストの一覧を message.send_webapi で送信するための\n attachments を生成する\n \"\"\"\n if command:\n list_text = ', '.join(['`${}`'.format(x) for x in data])\n else:\n list_text = '\\n'.join([x for x in data])\n attachments = [{'pretext': pretext, 'text': list_text, 'mrkdwn_in': [\n 'pretext', 'text']}]\n return attachments\n\n\n<mask token>\n\n\n@respond_to('^term\\\\s+list$')\ndef term_list(message):\n \"\"\"\n 現在使用可能な用語コマンドの一覧を返す\n \"\"\"\n pretext = '用語コマンドの一覧です'\n attachments = _create_attachments_for_list(pretext, sorted(commands))\n botwebapi(message, attachments)\n\n\ndef _available_command(message, command):\n \"\"\"\n 指定されたコマンドが有効化どうかを返す\n \"\"\"\n result = True\n if command in RESERVED:\n result = False\n elif command not in commands:\n botsend(message, 'コマンド `${}` は登録されていません'.format(command))\n result = False\n return result\n\n\ndef _send_markdown_text(message, text):\n \"\"\"\n 指定されたtextをmarkdown形式で送信する\n \"\"\"\n attachments = [{'pretext': text, 'mrkdwn_in': ['pretext']}]\n botwebapi(message, attachments)\n\n\n<mask token>\n\n\ndef add_response(message, command, text):\n \"\"\"\n 用語コマンドに応答を追加する\n \"\"\"\n if _exist_response(command, text):\n reply = 'コマンド `${}` に「{}」は登録済みです'.format(command, text)\n _send_markdown_text(message, reply)\n return\n term = Term.get(command=command)\n creator = message.body['user']\n resp, created = Response.get_or_create(term=term, text=text, creator=\n creator, created=datetime.now())\n resp.save()\n text = 'コマンド `${}` に「{}」を追加しました'.format(command, text)\n _send_markdown_text(message, text)\n\n\ndef del_response(message, command, text):\n \"\"\"\n 用語コマンドから応答を削除する\n \"\"\"\n term = Term.get(command=command)\n try:\n response = Response.get(term=term, text=text)\n except Response.DoesNotExist:\n reply = 'コマンド `${}` に「{}」は登録されていません'.format(command, text)\n _send_markdown_text(message, reply)\n return\n response.delete_instance()\n reply = 'コマンド `${}` から「{}」を削除しました'.format(command, text)\n _send_markdown_text(message, reply)\n\n\ndef pop_response(message, command):\n \"\"\"\n 用語コマンドで最後に登録された応答を削除する\n \"\"\"\n response_set = Term.get(command=command).response_set\n if len(response_set) == 0:\n msg = 'コマンド `${}` には応答が登録されていません\\n'.format(command)\n msg += '`${} add (レスポンス)` で応答を登録してください'.format(command)\n botsend(message, msg)\n return\n last_response = response_set.order_by(Response.created.desc())[0]\n text = last_response.text\n last_response.delete_instance()\n reply = 'コマンド `${}` から「{}」を削除しました'.format(command, text)\n _send_markdown_text(message, reply)\n\n\ndef search_responses(message, command, keyword):\n \"\"\"\n 用語コマンドに登録されている応答のうち、キーワードにマッチするものを返す\n \"\"\"\n term = Term.get(command=command)\n pat = '%{}%'.format(keyword)\n responses = Response.select().where(term == term, Response.text ** pat)\n if len(responses) == 0:\n botsend(message, 'コマンド `${}` に `{}` を含む応答はありません'.format(command,\n keyword))\n else:\n pretext = 'コマンド `${}` の `{}` を含む応答は {} 件あります\\n'.format(command,\n keyword, len(responses))\n data = [x.text for x in responses]\n attachments = _create_attachments_for_list(pretext, data, False)\n botwebapi(message, attachments)\n\n\ndef get_responses(message, command):\n \"\"\"\n 用語コマンドに登録されている応答の一覧を返す\n \"\"\"\n response_set = Term.get(command=command).response_set\n if len(response_set) == 0:\n msg = 'コマンド `${}` には応答が登録されていません\\n'.format(command)\n msg += '`${} add (レスポンス)` で応答を登録してください'.format(command)\n botsend(message, msg)\n else:\n pretext = 'コマンド `${}` の応答は {} 件あります\\n'.format(command, len(\n response_set))\n data = [x.text for x in response_set]\n attachments = _create_attachments_for_list(pretext, data, False)\n botwebapi(message, attachments)\n\n\n@respond_to('term\\\\s+help')\ndef term_help(message):\n \"\"\"\n term pluginのヘルプを返す\n \"\"\"\n botsend(message,\n \"\"\"- `$term (用語)`: 用語コマンドを作成する\n- `$term create (用語)`: 用語コマンドを作成する\n- `$term drop (用語)`: 用語コマンドを消去する\n- `$term search (キーワード)`: キーワードを含む用語コマンドの一覧を返す\n- `$term list`: 用語コマンドの一覧を返す\n\n- `$(用語)`: 用語コマンドに登録してある応答からランダムに一つ返す\n- `$(用語) add (応答)`: 用語コマンドに応答を追加する\n- `$(用語) del (応答)`: 用語コマンドから応答を削除する\n- `$(用語) pop`: 用語コマンドの最後に登録した応答を削除する\n- `$(用語) list`: 用語コマンドの応答一覧を返す\n- `$(用語) search (キーワード)`: 用語コマンドのうちキーワードを含む応答一覧を返す\n```\n> $term create 酒\nコマンド `$酒` を作成しました。\n`$酒 add (レスポンス)` でレスポンスを追加できます\n> $酒 add ビール\nコマンド `$酒` に `ビール` を追加しました\n> $酒 add ワイン\nコマンド `$酒` に `ワイン` を追加しました\n> $酒\nビール\n```\n\"\"\"\n )\n", "step-4": "<mask token>\n\n\n@respond_to('^term\\\\s+([\\\\w-]+)$')\n@respond_to('^term\\\\s+create\\\\s+([\\\\w-]+)$')\n@respond_to('^term\\\\s+add\\\\s+([\\\\w-]+)$')\ndef term_create(message, command):\n \"\"\"\n 指定されたコマンドを生成する\n \"\"\"\n if command in ('list', 'help'):\n return\n command = command.lower()\n if command in RESERVED:\n botsend(message, 'コマンド `${}` は予約語なので登録できません'.format(command))\n return\n creator = message.body['user']\n term, created = Term.get_or_create(command=command, creator=creator)\n if not created:\n botsend(message, 'コマンド `${}` はすでに登録されています'.format(command))\n else:\n msg = 'コマンド `${}` を作成しました。\\n'.format(command)\n msg += '`${} add (レスポンス)` でレスポンスを追加できます'.format(command)\n botsend(message, msg)\n commands.add(command)\n\n\n@respond_to('^term\\\\s+(drop|del|delete)\\\\s+([\\\\w-]+)$')\ndef term_drop(message, subcommand, command):\n \"\"\"\n 指定されたコマンドを消去する\n \"\"\"\n command = command.lower()\n if not _available_command(message, command):\n return\n term = Term.get(command=command)\n term.delete_instance(recursive=True)\n term.save()\n commands.remove(command)\n botsend(message, 'コマンド `${}` を消去しました'.format(command))\n\n\ndef _create_attachments_for_list(pretext, data, command=True):\n \"\"\"\n 指定されたリストの一覧を message.send_webapi で送信するための\n attachments を生成する\n \"\"\"\n if command:\n list_text = ', '.join(['`${}`'.format(x) for x in data])\n else:\n list_text = '\\n'.join([x for x in data])\n attachments = [{'pretext': pretext, 'text': list_text, 'mrkdwn_in': [\n 'pretext', 'text']}]\n return attachments\n\n\n@respond_to('^term\\\\s+search\\\\s+([\\\\w-]+)$')\ndef term_search(message, keyword):\n \"\"\"\n 指定したキーワードを含む用語コマンドの一覧を返す\n \"\"\"\n pretext = '`{}` を含む用語コマンドの一覧です'.format(keyword)\n data = []\n for command in sorted(commands):\n if keyword in command:\n data.append(command)\n attachments = _create_attachments_for_list(pretext, data)\n botwebapi(message, attachments)\n\n\n@respond_to('^term\\\\s+list$')\ndef term_list(message):\n \"\"\"\n 現在使用可能な用語コマンドの一覧を返す\n \"\"\"\n pretext = '用語コマンドの一覧です'\n attachments = _create_attachments_for_list(pretext, sorted(commands))\n botwebapi(message, attachments)\n\n\ndef _available_command(message, command):\n \"\"\"\n 指定されたコマンドが有効化どうかを返す\n \"\"\"\n result = True\n if command in RESERVED:\n result = False\n elif command not in commands:\n botsend(message, 'コマンド `${}` は登録されていません'.format(command))\n result = False\n return result\n\n\ndef _send_markdown_text(message, text):\n \"\"\"\n 指定されたtextをmarkdown形式で送信する\n \"\"\"\n attachments = [{'pretext': text, 'mrkdwn_in': ['pretext']}]\n botwebapi(message, attachments)\n\n\n@respond_to('^([\\\\w-]+)$')\ndef return_response(message, command):\n \"\"\"\n 用語コマンドに登録されている応答をランダムに返す\n \"\"\"\n if not _available_command(message, command):\n return\n response_set = Term.get(command=command).response_set\n if len(response_set) == 0:\n msg = 'コマンド `${}` には応答が登録されていません\\n'.format(command)\n msg += '`${} add (レスポンス)` で応答を登録してください'.format(command)\n botsend(message, msg)\n else:\n response = random.choice(response_set)\n _send_markdown_text(message, response.text)\n\n\n@respond_to('^([\\\\w-]+)\\\\s+(.*)')\ndef response(message, command, params):\n \"\"\"\n 用語コマンドの処理をする\n \"\"\"\n if not _available_command(message, command):\n return\n data = params.split(maxsplit=1)\n subcommand = data[0]\n try:\n if subcommand == 'pop':\n pop_response(message, command)\n elif subcommand == 'list':\n get_responses(message, command)\n elif subcommand == 'search':\n search_responses(message, command, data[1])\n elif subcommand in ('del', 'delete', 'remove'):\n del_response(message, command, data[1])\n elif subcommand == 'add':\n add_response(message, command, data[1])\n else:\n add_response(message, command, params)\n except IndexError:\n term_help(message)\n pass\n\n\n<mask token>\n\n\ndef add_response(message, command, text):\n \"\"\"\n 用語コマンドに応答を追加する\n \"\"\"\n if _exist_response(command, text):\n reply = 'コマンド `${}` に「{}」は登録済みです'.format(command, text)\n _send_markdown_text(message, reply)\n return\n term = Term.get(command=command)\n creator = message.body['user']\n resp, created = Response.get_or_create(term=term, text=text, creator=\n creator, created=datetime.now())\n resp.save()\n text = 'コマンド `${}` に「{}」を追加しました'.format(command, text)\n _send_markdown_text(message, text)\n\n\ndef del_response(message, command, text):\n \"\"\"\n 用語コマンドから応答を削除する\n \"\"\"\n term = Term.get(command=command)\n try:\n response = Response.get(term=term, text=text)\n except Response.DoesNotExist:\n reply = 'コマンド `${}` に「{}」は登録されていません'.format(command, text)\n _send_markdown_text(message, reply)\n return\n response.delete_instance()\n reply = 'コマンド `${}` から「{}」を削除しました'.format(command, text)\n _send_markdown_text(message, reply)\n\n\ndef pop_response(message, command):\n \"\"\"\n 用語コマンドで最後に登録された応答を削除する\n \"\"\"\n response_set = Term.get(command=command).response_set\n if len(response_set) == 0:\n msg = 'コマンド `${}` には応答が登録されていません\\n'.format(command)\n msg += '`${} add (レスポンス)` で応答を登録してください'.format(command)\n botsend(message, msg)\n return\n last_response = response_set.order_by(Response.created.desc())[0]\n text = last_response.text\n last_response.delete_instance()\n reply = 'コマンド `${}` から「{}」を削除しました'.format(command, text)\n _send_markdown_text(message, reply)\n\n\ndef search_responses(message, command, keyword):\n \"\"\"\n 用語コマンドに登録されている応答のうち、キーワードにマッチするものを返す\n \"\"\"\n term = Term.get(command=command)\n pat = '%{}%'.format(keyword)\n responses = Response.select().where(term == term, Response.text ** pat)\n if len(responses) == 0:\n botsend(message, 'コマンド `${}` に `{}` を含む応答はありません'.format(command,\n keyword))\n else:\n pretext = 'コマンド `${}` の `{}` を含む応答は {} 件あります\\n'.format(command,\n keyword, len(responses))\n data = [x.text for x in responses]\n attachments = _create_attachments_for_list(pretext, data, False)\n botwebapi(message, attachments)\n\n\ndef get_responses(message, command):\n \"\"\"\n 用語コマンドに登録されている応答の一覧を返す\n \"\"\"\n response_set = Term.get(command=command).response_set\n if len(response_set) == 0:\n msg = 'コマンド `${}` には応答が登録されていません\\n'.format(command)\n msg += '`${} add (レスポンス)` で応答を登録してください'.format(command)\n botsend(message, msg)\n else:\n pretext = 'コマンド `${}` の応答は {} 件あります\\n'.format(command, len(\n response_set))\n data = [x.text for x in response_set]\n attachments = _create_attachments_for_list(pretext, data, False)\n botwebapi(message, attachments)\n\n\n@respond_to('term\\\\s+help')\ndef term_help(message):\n \"\"\"\n term pluginのヘルプを返す\n \"\"\"\n botsend(message,\n \"\"\"- `$term (用語)`: 用語コマンドを作成する\n- `$term create (用語)`: 用語コマンドを作成する\n- `$term drop (用語)`: 用語コマンドを消去する\n- `$term search (キーワード)`: キーワードを含む用語コマンドの一覧を返す\n- `$term list`: 用語コマンドの一覧を返す\n\n- `$(用語)`: 用語コマンドに登録してある応答からランダムに一つ返す\n- `$(用語) add (応答)`: 用語コマンドに応答を追加する\n- `$(用語) del (応答)`: 用語コマンドから応答を削除する\n- `$(用語) pop`: 用語コマンドの最後に登録した応答を削除する\n- `$(用語) list`: 用語コマンドの応答一覧を返す\n- `$(用語) search (キーワード)`: 用語コマンドのうちキーワードを含む応答一覧を返す\n```\n> $term create 酒\nコマンド `$酒` を作成しました。\n`$酒 add (レスポンス)` でレスポンスを追加できます\n> $酒 add ビール\nコマンド `$酒` に `ビール` を追加しました\n> $酒 add ワイン\nコマンド `$酒` に `ワイン` を追加しました\n> $酒\nビール\n```\n\"\"\"\n )\n", "step-5": "import random\nfrom datetime import datetime\n\nfrom slackbot.bot import respond_to\n\nfrom .term_model import Term, Response\nfrom ..botmessage import botsend, botwebapi\n\n# すでに存在するコマンドは無視する\nRESERVED = (\n 'drive', 'manual', 'jira', 'wikipedia', 'plusplus',\n 'translate', '翻訳',\n 'weather', '天気',\n 'term',\n 'shuffle', 'help', 'choice', 'ping', 'version', 'random', 'cal',\n 'google', 'image', 'map', 'gadmin',\n 'github',\n 'suddendeath',\n 'pycamp',\n 'lgtm',\n)\n\n# コマンド一覧を初期化\ncommands = {term.command for term in Term.select()}\n\n\n@respond_to('^term\\s+([\\w-]+)$')\n@respond_to('^term\\s+create\\s+([\\w-]+)$')\n@respond_to('^term\\s+add\\s+([\\w-]+)$')\ndef term_create(message, command):\n \"\"\"\n 指定されたコマンドを生成する\n \"\"\"\n if command in ('list', 'help'):\n return\n \n # コマンドは小文字に統一\n command = command.lower()\n # 予約語の場合は実行しない\n if command in RESERVED:\n botsend(message, 'コマンド `${}` は予約語なので登録できません'.format(command))\n return\n\n creator = message.body['user']\n term, created = Term.get_or_create(command=command, creator=creator)\n if not created:\n # すでに登録してあるコマンドは登録しない\n botsend(message, 'コマンド `${}` はすでに登録されています'.format(command))\n\n else:\n msg = 'コマンド `${}` を作成しました。\\n'.format(command)\n msg += '`${} add (レスポンス)` でレスポンスを追加できます'.format(command)\n botsend(message, msg)\n\n # コマンド一覧の set に追加\n commands.add(command)\n\n\n@respond_to('^term\\s+(drop|del|delete)\\s+([\\w-]+)$')\ndef term_drop(message, subcommand, command):\n \"\"\"\n 指定されたコマンドを消去する\n \"\"\"\n # コマンドは小文字に統一\n command = command.lower()\n\n # コマンドの存在チェック\n if not _available_command(message, command):\n return\n\n # 用語コマンドと応答をまとめて削除\n term = Term.get(command=command)\n term.delete_instance(recursive=True)\n term.save()\n\n # コマンド一覧の set から削除\n commands.remove(command)\n botsend(message, 'コマンド `${}` を消去しました'.format(command))\n\n\ndef _create_attachments_for_list(pretext, data, command=True):\n \"\"\"\n 指定されたリストの一覧を message.send_webapi で送信するための\n attachments を生成する\n \"\"\"\n if command:\n # ['foo', 'bar', 'baz'] -> '`$far`, `$bar`, `$baz`'\n list_text = ', '.join(['`${}`'.format(x) for x in data])\n else:\n list_text = '\\n'.join([x for x in data])\n attachments = [{\n 'pretext': pretext,\n 'text': list_text,\n 'mrkdwn_in': ['pretext', 'text'],\n }]\n return attachments\n\n\n@respond_to('^term\\s+search\\s+([\\w-]+)$')\ndef term_search(message, keyword):\n \"\"\"\n 指定したキーワードを含む用語コマンドの一覧を返す\n \"\"\"\n pretext = '`{}` を含む用語コマンドの一覧です'.format(keyword)\n data = []\n for command in sorted(commands):\n if keyword in command:\n data.append(command)\n attachments = _create_attachments_for_list(pretext, data)\n botwebapi(message, attachments)\n\n\n@respond_to('^term\\s+list$')\ndef term_list(message):\n \"\"\"\n 現在使用可能な用語コマンドの一覧を返す\n \"\"\"\n pretext = '用語コマンドの一覧です'\n attachments = _create_attachments_for_list(pretext, sorted(commands))\n botwebapi(message, attachments)\n\n\ndef _available_command(message, command):\n \"\"\"\n 指定されたコマンドが有効化どうかを返す\n \"\"\"\n result = True\n\n if command in RESERVED:\n result = False\n elif command not in commands:\n botsend(message, 'コマンド `${}` は登録されていません'.format(command))\n result = False\n\n return result\n\n\ndef _send_markdown_text(message, text):\n \"\"\"\n 指定されたtextをmarkdown形式で送信する\n \"\"\"\n attachments = [{\n 'pretext': text,\n 'mrkdwn_in': ['pretext'],\n }]\n botwebapi(message, attachments)\n\n\n@respond_to('^([\\w-]+)$')\ndef return_response(message, command):\n \"\"\"\n 用語コマンドに登録されている応答をランダムに返す\n \"\"\"\n if not _available_command(message, command):\n return\n\n response_set = Term.get(command=command).response_set\n if len(response_set) == 0:\n msg = 'コマンド `${}` には応答が登録されていません\\n'.format(command)\n msg += '`${} add (レスポンス)` で応答を登録してください'.format(command)\n botsend(message, msg)\n else:\n response = random.choice(response_set)\n _send_markdown_text(message, response.text)\n\n\n@respond_to('^([\\w-]+)\\s+(.*)')\ndef response(message, command, params):\n \"\"\"\n 用語コマンドの処理をする\n \"\"\"\n if not _available_command(message, command):\n return\n\n data = params.split(maxsplit=1)\n subcommand = data[0]\n try:\n if subcommand == 'pop':\n # 最後に登録された応答を削除\n pop_response(message, command)\n elif subcommand == 'list':\n # 応答の一覧を返す\n get_responses(message, command)\n elif subcommand == 'search':\n # 応答を検索\n search_responses(message, command, data[1])\n elif subcommand in ('del', 'delete', 'remove'):\n # 応答を削除\n del_response(message, command, data[1])\n elif subcommand == 'add':\n # 応答を追加\n add_response(message, command, data[1])\n else:\n # サブコマンドが存在しない場合も追加\n add_response(message, command, params)\n except IndexError:\n # ヘルプを返す\n term_help(message)\n pass\n\n\ndef _exist_response(command, text):\n \"\"\"\n 指定されたコマンドに応答が登録されているかを調べて返す\n \"\"\"\n term = Term.get(command=command)\n count = Response.select().where(Response.term == term,\n Response.text == text).count()\n if count == 0:\n return False\n else:\n return True\n\n\ndef add_response(message, command, text):\n \"\"\"\n 用語コマンドに応答を追加する\n \"\"\"\n\n # 登録済かどうかを確認する\n if _exist_response(command, text):\n reply = 'コマンド `${}` に「{}」は登録済みです'.format(command, text)\n _send_markdown_text(message, reply)\n return\n\n term = Term.get(command=command)\n creator = message.body['user']\n # 用語を登録する\n resp, created = Response.get_or_create(term=term, text=text,\n creator=creator,\n created=datetime.now())\n resp.save()\n text = 'コマンド `${}` に「{}」を追加しました'.format(command, text)\n _send_markdown_text(message, text)\n\n\ndef del_response(message, command, text):\n \"\"\"\n 用語コマンドから応答を削除する\n \"\"\"\n term = Term.get(command=command)\n try:\n response = Response.get(term=term, text=text)\n except Response.DoesNotExist:\n reply = 'コマンド `${}` に「{}」は登録されていません'.format(command, text)\n _send_markdown_text(message, reply)\n return\n\n # 応答を削除する\n response.delete_instance()\n\n reply = 'コマンド `${}` から「{}」を削除しました'.format(command, text)\n _send_markdown_text(message, reply)\n\n\ndef pop_response(message, command):\n \"\"\"\n 用語コマンドで最後に登録された応答を削除する\n \"\"\"\n response_set = Term.get(command=command).response_set\n # 応答が登録されていない\n if len(response_set) == 0:\n msg = 'コマンド `${}` には応答が登録されていません\\n'.format(command)\n msg += '`${} add (レスポンス)` で応答を登録してください'.format(command)\n botsend(message, msg)\n return\n \n last_response = response_set.order_by(Response.created.desc())[0]\n text = last_response.text\n last_response.delete_instance()\n\n reply = 'コマンド `${}` から「{}」を削除しました'.format(command, text)\n _send_markdown_text(message, reply)\n\n\ndef search_responses(message, command, keyword):\n \"\"\"\n 用語コマンドに登録されている応答のうち、キーワードにマッチするものを返す\n \"\"\"\n term = Term.get(command=command)\n pat = '%{}%'.format(keyword)\n responses = Response.select().where(term == term, Response.text ** pat)\n\n if len(responses) == 0:\n botsend(message, 'コマンド `${}` に `{}` を含む応答はありません'.format(command, keyword))\n else:\n pretext = 'コマンド `${}` の `{}` を含む応答は {} 件あります\\n'.format(\n command, keyword, len(responses))\n data = [x.text for x in responses]\n attachments = _create_attachments_for_list(pretext, data, False)\n botwebapi(message, attachments)\n\n\ndef get_responses(message, command):\n \"\"\"\n 用語コマンドに登録されている応答の一覧を返す\n \"\"\"\n response_set = Term.get(command=command).response_set\n if len(response_set) == 0:\n msg = 'コマンド `${}` には応答が登録されていません\\n'.format(command)\n msg += '`${} add (レスポンス)` で応答を登録してください'.format(command)\n botsend(message, msg)\n else:\n pretext = 'コマンド `${}` の応答は {} 件あります\\n'.format(\n command, len(response_set))\n data = [x.text for x in response_set]\n attachments = _create_attachments_for_list(pretext, data, False)\n botwebapi(message, attachments)\n\n\n@respond_to('term\\s+help')\ndef term_help(message):\n \"\"\"\n term pluginのヘルプを返す\n \"\"\"\n botsend(message, '''- `$term (用語)`: 用語コマンドを作成する\n- `$term create (用語)`: 用語コマンドを作成する\n- `$term drop (用語)`: 用語コマンドを消去する\n- `$term search (キーワード)`: キーワードを含む用語コマンドの一覧を返す\n- `$term list`: 用語コマンドの一覧を返す\n\n- `$(用語)`: 用語コマンドに登録してある応答からランダムに一つ返す\n- `$(用語) add (応答)`: 用語コマンドに応答を追加する\n- `$(用語) del (応答)`: 用語コマンドから応答を削除する\n- `$(用語) pop`: 用語コマンドの最後に登録した応答を削除する\n- `$(用語) list`: 用語コマンドの応答一覧を返す\n- `$(用語) search (キーワード)`: 用語コマンドのうちキーワードを含む応答一覧を返す\n```\n> $term create 酒\nコマンド `$酒` を作成しました。\n`$酒 add (レスポンス)` でレスポンスを追加できます\n> $酒 add ビール\nコマンド `$酒` に `ビール` を追加しました\n> $酒 add ワイン\nコマンド `$酒` に `ワイン` を追加しました\n> $酒\nビール\n```\n''')\n", "step-ids": [ 7, 9, 12, 15, 19 ] }
[ 7, 9, 12, 15, 19 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> try: a = 100 b = a / 0 print(b) except ZeroDivisionError as z: print('Error= ', z) <|reserved_special_token_1|> try: a=100 b=a/0 print(b) except ZeroDivisionError as z: print("Error= ",z)
flexible
{ "blob_id": "9dead39e41fd0f3cff43501c659050885a50fec3", "index": 4521, "step-1": "<mask token>\n", "step-2": "try:\n a = 100\n b = a / 0\n print(b)\nexcept ZeroDivisionError as z:\n print('Error= ', z)\n", "step-3": "try:\r\n a=100\r\n b=a/0\r\n print(b)\r\nexcept ZeroDivisionError as z:\r\n print(\"Error= \",z)", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
try: from setuptools import setup, find_packages except ImportError: import ez_setup ez_setup.use_setuptools() from setuptools import setup, find_packages setup( name = "pip-utils", version = "0.0.1", url = 'https://github.com/mattpaletta/pip-utils', packages = find_packages(), include_package_data = True, install_requires = ["threadlru", "beautifulsoup4"], setup_requires = [], author = "Matthew Paletta", author_email = "mattpaletta@gmail.com", description = "Programatic Utils for pip management", license = "BSD", dependency_links=[ 'git+git://github.com/mattpaletta/pynotstdlib.git@master#egg=pynotstdlib-0' ], )
normal
{ "blob_id": "5fe81a6143642d671686c6623a9ecc93e04a82bf", "index": 5711, "step-1": "<mask token>\n", "step-2": "try:\n from setuptools import setup, find_packages\nexcept ImportError:\n import ez_setup\n ez_setup.use_setuptools()\n from setuptools import setup, find_packages\nsetup(name='pip-utils', version='0.0.1', url=\n 'https://github.com/mattpaletta/pip-utils', packages=find_packages(),\n include_package_data=True, install_requires=['threadlru',\n 'beautifulsoup4'], setup_requires=[], author='Matthew Paletta',\n author_email='mattpaletta@gmail.com', description=\n 'Programatic Utils for pip management', license='BSD', dependency_links\n =[\n 'git+git://github.com/mattpaletta/pynotstdlib.git@master#egg=pynotstdlib-0'\n ])\n", "step-3": "try:\n from setuptools import setup, find_packages\nexcept ImportError:\n import ez_setup\n\n ez_setup.use_setuptools()\n from setuptools import setup, find_packages\n\nsetup(\n name = \"pip-utils\",\n version = \"0.0.1\",\n url = 'https://github.com/mattpaletta/pip-utils',\n packages = find_packages(),\n include_package_data = True,\n install_requires = [\"threadlru\", \"beautifulsoup4\"],\n setup_requires = [],\n author = \"Matthew Paletta\",\n author_email = \"mattpaletta@gmail.com\",\n description = \"Programatic Utils for pip management\",\n license = \"BSD\",\n dependency_links=[\n 'git+git://github.com/mattpaletta/pynotstdlib.git@master#egg=pynotstdlib-0'\n ],\n)", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
import json from iamport import Iamport from django.views import View from django.http import JsonResponse from share.decorators import check_auth_decorator class PaymentView(View): @check_auth_decorator def post(self, request): data = json.loads(request.body) try: user = request.user payment = Payment.objects.create( user_id = user, subscribe_day = data['subscribe_day'], expired_day = data['expired_day'], method = data['method'], next_payday = data['next_payday'] ) return JsonResponse({'message':'SUCCESS'}, status=200) return KeyError: return JsonResponse({'message':'KEY_ERROR'}, status=200) @check_auth_decorator def get(self, request): try: user = request.user payment = Payment.objects.get(user_id=user) payment_list = { 'user_id' : payment.user_id, 'subscribe_day' : payment.subscribe_day, 'expired_day' : payment.expired_day, 'method' : payment.method, 'next_payday' : payment.next_payday, 'created_at' : payment.created_at } return JsonResponse({'payment_list':payment_list}, status=200) except KeyError: return JsonResponse({'message':'KEY_ERROR'}, status=400)
normal
{ "blob_id": "c1c6db4dbd1e6719d30905babd6ccf5b1e76e75d", "index": 2824, "step-1": "import json\nfrom iamport import Iamport\n\nfrom django.views import View\nfrom django.http import JsonResponse\n\nfrom share.decorators import check_auth_decorator\n\nclass PaymentView(View):\n @check_auth_decorator\n def post(self, request):\n data = json.loads(request.body)\n try:\n user = request.user\n payment = Payment.objects.create(\n user_id = user,\n subscribe_day = data['subscribe_day'],\n expired_day = data['expired_day'],\n method = data['method'],\n next_payday = data['next_payday']\n )\n return JsonResponse({'message':'SUCCESS'}, status=200)\n return KeyError:\n return JsonResponse({'message':'KEY_ERROR'}, status=200)\n\n @check_auth_decorator\n def get(self, request):\n try:\n user = request.user\n payment = Payment.objects.get(user_id=user)\n\n payment_list = {\n 'user_id' : payment.user_id,\n 'subscribe_day' : payment.subscribe_day,\n 'expired_day' : payment.expired_day,\n 'method' : payment.method,\n 'next_payday' : payment.next_payday,\n 'created_at' : payment.created_at\n }\n return JsonResponse({'payment_list':payment_list}, status=200)\n except KeyError:\n return JsonResponse({'message':'KEY_ERROR'}, status=400)\n\n\n", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
#!/usr/bin/python import os, sys import csv import glob if len(sys.argv)==3: res_dir = sys.argv[1] info = sys.argv[2] else: print "Incorrect arguments: enter outout directory" sys.exit(0) seg = dict([('PB2','1'), ('PB1','2'), ('PA','3'), ('HA','4'), ('NP','5'), ('NA','6'), ('MP','7'), ('NS','8')]) # Read the summary info file: info_list = [] with open(info, 'r') as csvfile: reader = csv.reader(csvfile) for xi in reader: print xi info_list = xi print info_list # if one samlple or many samples : fixing the list length issue if len(info_list[0]) < 4 : subtypes = list[set([c[-1] for c in info_list])] else: subtypes = [info_list[-1],] # Merge all Annotation file of the consensus genome all_annot = [] assembled_cons = [["Sample Id", "Sample Name", "HA", "NA", "MP", "PB2", "PB1", "PA", "NP", "NS"]] for sub_type in subtypes: for x in glob.glob(res_dir + "/Consensus_genome/" + sub_type + "/*csv"): X = x.split("/") y = X[-1].replace("-annotation.csv", "") with open(x, 'rb') as csvfile: r = csv.reader(csvfile) ha = "-" na = "-" mp = "-" pb2 = "-" pb1 = "-" pa = "-" np = "-" ns = "-" for a in r: if a[0] != "Genome": print X, a seg_nam = a[0].split("|")[1] a.insert(0,y + "." + seg[seg_nam]) all_annot.append(a) if a[1].split("|")[1] == "HA": ha = a[-1] if a[1].split("|")[1] == "NA": na = a[-1] if a[1].split("|")[1] == "MP": mp = a[-1] if a[1].split("|")[1] == "PB2": pb2 = a[-1] if a[1].split("|")[1] == "PB1": pb1 = a[-1] if a[1].split("|")[1] == "PA": pa = a[-1] if a[1].split("|")[1] == "NP": np = a[-1] if a[1].split("|")[1] == "NS": ns = a[-1] else: annot_header = a assembled_cons.append([y, a[1].split("|")[0], ha, na, mp, pb2, pb1, pa, np, ns]) with open(res_dir + '/' + sub_type + '-ConsensusDetail.csv', 'wb') as f: writer = csv.writer(f) annot_header.insert(0,"Sample Id") all_annot.insert(0,annot_header) writer.writerows(all_annot) with open(res_dir + '/' + sub_type + '-ConsensusSummary.csv', 'wb') as f: writer = csv.writer(f) writer.writerows(assembled_cons) # Merge all SNPs called... merge_snps = [] for sub_type in subtypes: for x in glob.glob(res_dir + "/Snps/" + sub_type + "/*.vcf"): X = x.split("/") y = X[-1].replace("-genome-snps.vcf", "") with open(x, 'rb') as csvfile: r = csv.reader(csvfile, delimiter="\t") for s in r: if not s[0].startswith("#"): print s seg_nam = s[0].split("|")[1] s.insert(0, y + "." + seg[seg_nam]) merge_snps.append(s) with open(res_dir + '/' + sub_type + '-SNPs.csv', 'wb') as f: writer = csv.writer(f) merge_snps.insert(0, ["Sample Id", "Sample Name", "POS","ID","REF","ALT", "QUAL", "FILTER", "INFO"]) writer.writerows(merge_snps)
normal
{ "blob_id": "4a2796645f1ab585084be47c8cd984c2945aa38b", "index": 4270, "step-1": "#!/usr/bin/python\n\nimport os, sys\nimport csv\nimport glob\n\nif len(sys.argv)==3:\n res_dir = sys.argv[1]\n info = sys.argv[2]\n\nelse:\n print \"Incorrect arguments: enter outout directory\"\n sys.exit(0)\n\nseg = dict([('PB2','1'), ('PB1','2'), ('PA','3'), ('HA','4'), ('NP','5'), ('NA','6'), ('MP','7'), ('NS','8')])\n# Read the summary info file:\ninfo_list = []\nwith open(info, 'r') as csvfile:\n reader = csv.reader(csvfile)\n for xi in reader:\n print xi\n info_list = xi \nprint info_list\n# if one samlple or many samples : fixing the list length issue\nif len(info_list[0]) < 4 : subtypes = list[set([c[-1] for c in info_list])]\nelse: subtypes = [info_list[-1],]\n\n# Merge all Annotation file of the consensus genome\nall_annot = []\nassembled_cons = [[\"Sample Id\", \"Sample Name\", \"HA\", \"NA\", \"MP\", \"PB2\", \"PB1\", \"PA\", \"NP\", \"NS\"]]\n\nfor sub_type in subtypes:\n for x in glob.glob(res_dir + \"/Consensus_genome/\" + sub_type + \"/*csv\"):\n X = x.split(\"/\")\n y = X[-1].replace(\"-annotation.csv\", \"\")\n with open(x, 'rb') as csvfile:\n r = csv.reader(csvfile)\n ha = \"-\"\n na = \"-\"\n mp = \"-\"\n pb2 = \"-\"\n pb1 = \"-\"\n pa = \"-\"\n np = \"-\"\n ns = \"-\" \n for a in r:\n if a[0] != \"Genome\":\n\t print X, a\n seg_nam = a[0].split(\"|\")[1]\n a.insert(0,y + \".\" + seg[seg_nam]) \n all_annot.append(a) \n\t if a[1].split(\"|\")[1] == \"HA\": ha = a[-1]\n if a[1].split(\"|\")[1] == \"NA\": na = a[-1]\n\t if a[1].split(\"|\")[1] == \"MP\": mp = a[-1]\n if a[1].split(\"|\")[1] == \"PB2\": pb2 = a[-1]\t\n\t if a[1].split(\"|\")[1] == \"PB1\": pb1 = a[-1]\n if a[1].split(\"|\")[1] == \"PA\": pa = a[-1]\n\t if a[1].split(\"|\")[1] == \"NP\": np = a[-1]\n if a[1].split(\"|\")[1] == \"NS\": ns = a[-1] \n else: annot_header = a \n assembled_cons.append([y, a[1].split(\"|\")[0], ha, na, mp, pb2, pb1, pa, np, ns]) \t\n\n with open(res_dir + '/' + sub_type + '-ConsensusDetail.csv', 'wb') as f:\n writer = csv.writer(f)\n annot_header.insert(0,\"Sample Id\")\n all_annot.insert(0,annot_header)\n writer.writerows(all_annot) \n \n with open(res_dir + '/' + sub_type + '-ConsensusSummary.csv', 'wb') as f:\n writer = csv.writer(f)\n writer.writerows(assembled_cons) \n\n\n# Merge all SNPs called...\nmerge_snps = []\nfor sub_type in subtypes:\n for x in glob.glob(res_dir + \"/Snps/\" + sub_type + \"/*.vcf\"):\n X = x.split(\"/\")\n y = X[-1].replace(\"-genome-snps.vcf\", \"\")\n with open(x, 'rb') as csvfile:\n r = csv.reader(csvfile, delimiter=\"\\t\")\n for s in r:\n\tif not s[0].startswith(\"#\"):\n\t print s\n seg_nam = s[0].split(\"|\")[1]\n s.insert(0, y + \".\" + seg[seg_nam])\n\t merge_snps.append(s)\n\n with open(res_dir + '/' + sub_type + '-SNPs.csv', 'wb') as f:\n writer = csv.writer(f)\n merge_snps.insert(0, [\"Sample Id\", \"Sample Name\", \"POS\",\"ID\",\"REF\",\"ALT\", \"QUAL\", \"FILTER\", \"INFO\"])\n writer.writerows(merge_snps) \n\n\n\n\n\n\n", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def get_resnet18(pre_imgnet=False, num_classes=64): model = torchvision.models.resnet18(pretrained=pre_imgnet) model.fc = nn.Linear(512, 64) return model <|reserved_special_token_1|> import torch import torchvision from torch import nn def get_resnet18(pre_imgnet=False, num_classes=64): model = torchvision.models.resnet18(pretrained=pre_imgnet) model.fc = nn.Linear(512, 64) return model
flexible
{ "blob_id": "8e05b2723d8c50354e785b4bc7c5de8860aa706d", "index": 5355, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef get_resnet18(pre_imgnet=False, num_classes=64):\n model = torchvision.models.resnet18(pretrained=pre_imgnet)\n model.fc = nn.Linear(512, 64)\n return model\n", "step-3": "import torch\nimport torchvision\nfrom torch import nn\n\n\ndef get_resnet18(pre_imgnet=False, num_classes=64):\n model = torchvision.models.resnet18(pretrained=pre_imgnet)\n model.fc = nn.Linear(512, 64)\n return model\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
''' A linear regression learning algorithm example using TensorFlow library. Author: Aymeric Damien Project: https://github.com/aymericdamien/TensorFlow-Examples/ ''' from __future__ import print_function import tensorflow as tf import argparse import numpy rng = numpy.random #"python tf_cnn_benchmarks.py --device=cpu --data_format=NHWC --num_warmup_batches=0 --model=lenet --batch_size=32 --num_intra_threads=19 --num_batches=3750" parser = argparse.ArgumentParser() parser.add_argument('--batch_size', help='batch_size', required=False, default=32) parser.add_argument('--data_size', help='data_size', required=False, default=1700) parser.add_argument('--num_intra_threads', help='num_intra_threads', required=False, default=19) parser.add_argument('--num_batches', help='num_batches', required=False, default=5000000) parser.add_argument('--device', help='device', required=False, default='gpu') args = vars(parser.parse_args()) batch_size = int(args['batch_size']) data_size = int(args['data_size']) num_intra_threads =int(args['num_intra_threads']) num_batches =int(args['num_batches']) device =args['device'] # Parameters learning_rate = 0.01 training_epochs = num_batches display_step = 50 # Training Data #train_X = numpy.asarray([3.3,4.4,5.5,6.71,6.93,4.168,9.779,6.182,7.59,2.167, 7.042,10.791,5.313,7.997,5.654,9.27,3.1]) #train_Y = numpy.asarray([1.7,2.76,2.09,3.19,1.694,1.573,3.366,2.596,2.53,1.221, 2.827,3.465,1.65,2.904,2.42,2.94,1.3]) #n_samples = train_X.shape[0] n_samples=data_size train_X=rng.rand(1,n_samples) train_Y=rng.rand(1,n_samples) with tf.device('/'+device+':0'): # tf Graph Input X = tf.placeholder("float") Y = tf.placeholder("float") # Set model weights W = tf.Variable(rng.randn(), name="weight") b = tf.Variable(rng.randn(), name="bias") # Construct a linear model pred = tf.add(tf.multiply(X, W), b) # Mean squared error cost = tf.reduce_sum(tf.pow(pred-Y, 2))/(2*n_samples) # Gradient descent # Note, minimize() knows to modify W and b because Variable objects are trainable=True by default optimizer = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost) # Initializing the variables init = tf.global_variables_initializer() # gpu share #gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.2) # Launch the graph newConfig = tf.ConfigProto() newConfig.intra_op_parallelism_threads = num_intra_threads with tf.Session(config=newConfig) as sess: # with tf.Session() as sess: sess.run(init) # Fit all training data for epoch in range(training_epochs): for (x, y) in zip(train_X, train_Y): sess.run(optimizer, feed_dict={X: x, Y: y})
normal
{ "blob_id": "2e8d39d6d72672de8e4eac8295b90d68b1dff938", "index": 9007, "step-1": "<mask token>\n", "step-2": "<mask token>\nparser.add_argument('--batch_size', help='batch_size', required=False,\n default=32)\nparser.add_argument('--data_size', help='data_size', required=False,\n default=1700)\nparser.add_argument('--num_intra_threads', help='num_intra_threads',\n required=False, default=19)\nparser.add_argument('--num_batches', help='num_batches', required=False,\n default=5000000)\nparser.add_argument('--device', help='device', required=False, default='gpu')\n<mask token>\nwith tf.device('/' + device + ':0'):\n X = tf.placeholder('float')\n Y = tf.placeholder('float')\n W = tf.Variable(rng.randn(), name='weight')\n b = tf.Variable(rng.randn(), name='bias')\n pred = tf.add(tf.multiply(X, W), b)\n cost = tf.reduce_sum(tf.pow(pred - Y, 2)) / (2 * n_samples)\n optimizer = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost)\n init = tf.global_variables_initializer()\n<mask token>\nwith tf.Session(config=newConfig) as sess:\n sess.run(init)\n for epoch in range(training_epochs):\n for x, y in zip(train_X, train_Y):\n sess.run(optimizer, feed_dict={X: x, Y: y})\n", "step-3": "<mask token>\nrng = numpy.random\nparser = argparse.ArgumentParser()\nparser.add_argument('--batch_size', help='batch_size', required=False,\n default=32)\nparser.add_argument('--data_size', help='data_size', required=False,\n default=1700)\nparser.add_argument('--num_intra_threads', help='num_intra_threads',\n required=False, default=19)\nparser.add_argument('--num_batches', help='num_batches', required=False,\n default=5000000)\nparser.add_argument('--device', help='device', required=False, default='gpu')\nargs = vars(parser.parse_args())\nbatch_size = int(args['batch_size'])\ndata_size = int(args['data_size'])\nnum_intra_threads = int(args['num_intra_threads'])\nnum_batches = int(args['num_batches'])\ndevice = args['device']\nlearning_rate = 0.01\ntraining_epochs = num_batches\ndisplay_step = 50\nn_samples = data_size\ntrain_X = rng.rand(1, n_samples)\ntrain_Y = rng.rand(1, n_samples)\nwith tf.device('/' + device + ':0'):\n X = tf.placeholder('float')\n Y = tf.placeholder('float')\n W = tf.Variable(rng.randn(), name='weight')\n b = tf.Variable(rng.randn(), name='bias')\n pred = tf.add(tf.multiply(X, W), b)\n cost = tf.reduce_sum(tf.pow(pred - Y, 2)) / (2 * n_samples)\n optimizer = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost)\n init = tf.global_variables_initializer()\nnewConfig = tf.ConfigProto()\nnewConfig.intra_op_parallelism_threads = num_intra_threads\nwith tf.Session(config=newConfig) as sess:\n sess.run(init)\n for epoch in range(training_epochs):\n for x, y in zip(train_X, train_Y):\n sess.run(optimizer, feed_dict={X: x, Y: y})\n", "step-4": "<mask token>\nfrom __future__ import print_function\nimport tensorflow as tf\nimport argparse\nimport numpy\nrng = numpy.random\nparser = argparse.ArgumentParser()\nparser.add_argument('--batch_size', help='batch_size', required=False,\n default=32)\nparser.add_argument('--data_size', help='data_size', required=False,\n default=1700)\nparser.add_argument('--num_intra_threads', help='num_intra_threads',\n required=False, default=19)\nparser.add_argument('--num_batches', help='num_batches', required=False,\n default=5000000)\nparser.add_argument('--device', help='device', required=False, default='gpu')\nargs = vars(parser.parse_args())\nbatch_size = int(args['batch_size'])\ndata_size = int(args['data_size'])\nnum_intra_threads = int(args['num_intra_threads'])\nnum_batches = int(args['num_batches'])\ndevice = args['device']\nlearning_rate = 0.01\ntraining_epochs = num_batches\ndisplay_step = 50\nn_samples = data_size\ntrain_X = rng.rand(1, n_samples)\ntrain_Y = rng.rand(1, n_samples)\nwith tf.device('/' + device + ':0'):\n X = tf.placeholder('float')\n Y = tf.placeholder('float')\n W = tf.Variable(rng.randn(), name='weight')\n b = tf.Variable(rng.randn(), name='bias')\n pred = tf.add(tf.multiply(X, W), b)\n cost = tf.reduce_sum(tf.pow(pred - Y, 2)) / (2 * n_samples)\n optimizer = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost)\n init = tf.global_variables_initializer()\nnewConfig = tf.ConfigProto()\nnewConfig.intra_op_parallelism_threads = num_intra_threads\nwith tf.Session(config=newConfig) as sess:\n sess.run(init)\n for epoch in range(training_epochs):\n for x, y in zip(train_X, train_Y):\n sess.run(optimizer, feed_dict={X: x, Y: y})\n", "step-5": "'''\nA linear regression learning algorithm example using TensorFlow library.\n\nAuthor: Aymeric Damien\nProject: https://github.com/aymericdamien/TensorFlow-Examples/\n'''\n\nfrom __future__ import print_function\n\nimport tensorflow as tf\nimport argparse\n\nimport numpy\nrng = numpy.random\n\n#\"python tf_cnn_benchmarks.py --device=cpu --data_format=NHWC --num_warmup_batches=0 --model=lenet --batch_size=32 --num_intra_threads=19 --num_batches=3750\"\n\nparser = argparse.ArgumentParser()\nparser.add_argument('--batch_size', help='batch_size', required=False, default=32)\nparser.add_argument('--data_size', help='data_size', required=False, default=1700)\nparser.add_argument('--num_intra_threads', help='num_intra_threads', required=False, default=19)\nparser.add_argument('--num_batches', help='num_batches', required=False, default=5000000)\nparser.add_argument('--device', help='device', required=False, default='gpu')\n\nargs = vars(parser.parse_args())\n\nbatch_size = int(args['batch_size'])\ndata_size = int(args['data_size'])\nnum_intra_threads =int(args['num_intra_threads'])\nnum_batches =int(args['num_batches'])\ndevice =args['device']\n\n# Parameters\nlearning_rate = 0.01\ntraining_epochs = num_batches\ndisplay_step = 50\n\n# Training Data\n#train_X = numpy.asarray([3.3,4.4,5.5,6.71,6.93,4.168,9.779,6.182,7.59,2.167, 7.042,10.791,5.313,7.997,5.654,9.27,3.1]) \n#train_Y = numpy.asarray([1.7,2.76,2.09,3.19,1.694,1.573,3.366,2.596,2.53,1.221, 2.827,3.465,1.65,2.904,2.42,2.94,1.3])\n#n_samples = train_X.shape[0]\n\nn_samples=data_size\ntrain_X=rng.rand(1,n_samples)\ntrain_Y=rng.rand(1,n_samples)\n\n\nwith tf.device('/'+device+':0'):\n # tf Graph Input\n X = tf.placeholder(\"float\")\n Y = tf.placeholder(\"float\")\n\n # Set model weights\n W = tf.Variable(rng.randn(), name=\"weight\")\n b = tf.Variable(rng.randn(), name=\"bias\")\n\n # Construct a linear model\n pred = tf.add(tf.multiply(X, W), b)\n\n # Mean squared error\n cost = tf.reduce_sum(tf.pow(pred-Y, 2))/(2*n_samples)\n # Gradient descent\n # Note, minimize() knows to modify W and b because Variable objects are trainable=True by default\n optimizer = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost)\n\n # Initializing the variables\n init = tf.global_variables_initializer()\n\n # gpu share\n#gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.2)\n\n# Launch the graph\nnewConfig = tf.ConfigProto()\nnewConfig.intra_op_parallelism_threads = num_intra_threads\nwith tf.Session(config=newConfig) as sess:\n# with tf.Session() as sess:\n sess.run(init)\n # Fit all training data\n for epoch in range(training_epochs):\n for (x, y) in zip(train_X, train_Y):\n sess.run(optimizer, feed_dict={X: x, Y: y})", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
from flask_restful import Api, Resource, reqparse class HelloApiHandler(Resource): def get(self): return { 'resultStatus': 'SUCCESS', 'message': "Hello Api Handler" } def post(self): print(self) parser = reqparse.RequestParser() parser.add_argument('type', type=str) parser.add_argument('message', type=str) args = parser.parse_args() print(args) # note, the post req from frontend needs to match the strings here (e.g. 'type and 'message') request_type = args['type'] request_json = args['message'] # ret_status, ret_msg = ReturnData(request_type, request_json) # currently just returning the req straight ret_status = request_type ret_msg = request_json if ret_msg: message = "Your Message Requested: {}".format(ret_msg) else: message = "No Msg" final_ret = {"status": "Success", "message": message} return final_ret
normal
{ "blob_id": "80c3d9165c1b592122fabf6382e265465604989c", "index": 1450, "step-1": "<mask token>\n\n\nclass HelloApiHandler(Resource):\n <mask token>\n <mask token>\n", "step-2": "<mask token>\n\n\nclass HelloApiHandler(Resource):\n\n def get(self):\n return {'resultStatus': 'SUCCESS', 'message': 'Hello Api Handler'}\n <mask token>\n", "step-3": "<mask token>\n\n\nclass HelloApiHandler(Resource):\n\n def get(self):\n return {'resultStatus': 'SUCCESS', 'message': 'Hello Api Handler'}\n\n def post(self):\n print(self)\n parser = reqparse.RequestParser()\n parser.add_argument('type', type=str)\n parser.add_argument('message', type=str)\n args = parser.parse_args()\n print(args)\n request_type = args['type']\n request_json = args['message']\n ret_status = request_type\n ret_msg = request_json\n if ret_msg:\n message = 'Your Message Requested: {}'.format(ret_msg)\n else:\n message = 'No Msg'\n final_ret = {'status': 'Success', 'message': message}\n return final_ret\n", "step-4": "from flask_restful import Api, Resource, reqparse\n\n\nclass HelloApiHandler(Resource):\n\n def get(self):\n return {'resultStatus': 'SUCCESS', 'message': 'Hello Api Handler'}\n\n def post(self):\n print(self)\n parser = reqparse.RequestParser()\n parser.add_argument('type', type=str)\n parser.add_argument('message', type=str)\n args = parser.parse_args()\n print(args)\n request_type = args['type']\n request_json = args['message']\n ret_status = request_type\n ret_msg = request_json\n if ret_msg:\n message = 'Your Message Requested: {}'.format(ret_msg)\n else:\n message = 'No Msg'\n final_ret = {'status': 'Success', 'message': message}\n return final_ret\n", "step-5": "from flask_restful import Api, Resource, reqparse\n\nclass HelloApiHandler(Resource):\n def get(self):\n return {\n 'resultStatus': 'SUCCESS',\n 'message': \"Hello Api Handler\"\n }\n\n def post(self):\n print(self)\n parser = reqparse.RequestParser()\n parser.add_argument('type', type=str)\n parser.add_argument('message', type=str)\n\n args = parser.parse_args()\n\n print(args)\n # note, the post req from frontend needs to match the strings here (e.g. 'type and 'message')\n\n request_type = args['type']\n request_json = args['message']\n # ret_status, ret_msg = ReturnData(request_type, request_json)\n # currently just returning the req straight\n ret_status = request_type\n ret_msg = request_json\n\n if ret_msg:\n message = \"Your Message Requested: {}\".format(ret_msg)\n else:\n message = \"No Msg\"\n \n final_ret = {\"status\": \"Success\", \"message\": message}\n\n return final_ret", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
# from django.contrib.auth import forms # class UserRegister(froms.M): # class Meta: # fields = []
normal
{ "blob_id": "c1f432ff70b21064f36cf9651f8cff9c69361d5c", "index": 9073, "step-1": "# from django.contrib.auth import forms\n\n\n\n# class UserRegister(froms.M):\n# class Meta:\n# fields = []\n", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 1 ] }
[ 1 ]
""" Find two distinct numbers in values whose sum is equal to 100. Assign one of them to value1 and the other one to value2. If there are several solutions, any one will be marked as correct. Optional step to check your answer: Print the value of value1 and value2. """ values = [72, 50, 48, 50, 7, 66, 62, 32, 33, 75, 30, 85, 6, 85, 82, 88, 30, 32, 78, 39, 57, 96, 45, 57, 61, 10, 62, 48, 32, 96, 75, 15, 50, 50] value1 = None value2 = None for x in values: for y in values: if x + y == 100 and x != y: value1 = x value2 = y print(value1) print(value2)
normal
{ "blob_id": "c0ebf10b8c0cb4af11608cafcdb85dbff4abdf90", "index": 4755, "step-1": "<mask token>\n", "step-2": "<mask token>\nfor x in values:\n for y in values:\n if x + y == 100 and x != y:\n value1 = x\n value2 = y\nprint(value1)\nprint(value2)\n", "step-3": "<mask token>\nvalues = [72, 50, 48, 50, 7, 66, 62, 32, 33, 75, 30, 85, 6, 85, 82, 88, 30,\n 32, 78, 39, 57, 96, 45, 57, 61, 10, 62, 48, 32, 96, 75, 15, 50, 50]\nvalue1 = None\nvalue2 = None\nfor x in values:\n for y in values:\n if x + y == 100 and x != y:\n value1 = x\n value2 = y\nprint(value1)\nprint(value2)\n", "step-4": "\"\"\"\r\nFind two distinct numbers in values whose sum is equal to 100.\r\nAssign one of them to value1 and the other one to value2.\r\nIf there are several solutions, any one will be marked as correct.\r\n\r\nOptional step to check your answer:\r\n\r\nPrint the value of value1 and value2.\r\n\"\"\"\r\n\r\n\r\nvalues = [72, 50, 48, 50, 7, 66, 62, 32, 33, 75, 30, 85, 6, 85, 82, 88, 30, 32, 78, 39, 57, 96, 45, 57, 61, 10, 62, 48, 32, 96, 75, 15, 50, 50]\r\n\r\nvalue1 = None\r\nvalue2 = None\r\nfor x in values:\r\n for y in values:\r\n if x + y == 100 and x != y:\r\n value1 = x\r\n value2 = y\r\n \r\nprint(value1)\r\nprint(value2)", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
import json # No llego a solucionarlo entero. #Aparcamientos que estan cubiertos en el centro de deportes . from pprint import pprint with open('Aparcamientos.json') as data_file: data = json.load(data_file) for x in data['docs']: if x['TIPOLOGIA'] == 'Cubierto': print(x['NOMBRE']) elif x['TIPOLOGIA'] == 'Pabellón de deportes': print(x['NOMBRE']) print(x['TIPOLOGIA'])
normal
{ "blob_id": "d111f93144a1d2790470365d0ca31bcea17713d7", "index": 8766, "step-1": "<mask token>\n", "step-2": "<mask token>\nwith open('Aparcamientos.json') as data_file:\n data = json.load(data_file)\nfor x in data['docs']:\n if x['TIPOLOGIA'] == 'Cubierto':\n print(x['NOMBRE'])\n elif x['TIPOLOGIA'] == 'Pabellón de deportes':\n print(x['NOMBRE'])\n print(x['TIPOLOGIA'])\n", "step-3": "import json\nfrom pprint import pprint\nwith open('Aparcamientos.json') as data_file:\n data = json.load(data_file)\nfor x in data['docs']:\n if x['TIPOLOGIA'] == 'Cubierto':\n print(x['NOMBRE'])\n elif x['TIPOLOGIA'] == 'Pabellón de deportes':\n print(x['NOMBRE'])\n print(x['TIPOLOGIA'])\n", "step-4": "import json\n# No llego a solucionarlo entero.\n#Aparcamientos que estan cubiertos en el centro de deportes .\nfrom pprint import pprint\n\nwith open('Aparcamientos.json') as data_file: \n data = json.load(data_file)\nfor x in data['docs']:\n\tif x['TIPOLOGIA'] == 'Cubierto':\n\t\tprint(x['NOMBRE'])\n\telif x['TIPOLOGIA'] == 'Pabellón de deportes':\n\t\tprint(x['NOMBRE'])\n\t\tprint(x['TIPOLOGIA'])\n\n\n\n\t\t\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
num=int(input("Enter the number: ")) table=[num*i for i in range(1,11)] print(table) with open("table.txt","a") as f: f.write(f"{num} table is: {str(table)}") f.write('\n')
normal
{ "blob_id": "657ac500c40ddbd29f5e3736a78ed43e7d105478", "index": 9417, "step-1": "<mask token>\n", "step-2": "<mask token>\nprint(table)\nwith open('table.txt', 'a') as f:\n f.write(f'{num} table is: {str(table)}')\n f.write('\\n')\n", "step-3": "num = int(input('Enter the number: '))\ntable = [(num * i) for i in range(1, 11)]\nprint(table)\nwith open('table.txt', 'a') as f:\n f.write(f'{num} table is: {str(table)}')\n f.write('\\n')\n", "step-4": "num=int(input(\"Enter the number: \"))\n\ntable=[num*i for i in range(1,11)]\nprint(table)\nwith open(\"table.txt\",\"a\") as f:\n f.write(f\"{num} table is: {str(table)}\")\n f.write('\\n')", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class BuyerSellerAppConfig(AppConfig): <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class BuyerSellerAppConfig(AppConfig): name = 'buyer_seller_app' <|reserved_special_token_1|> from django.apps import AppConfig class BuyerSellerAppConfig(AppConfig): name = 'buyer_seller_app'
flexible
{ "blob_id": "0b730314fef31e7304a8f5d8bb998581b021a610", "index": 1798, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass BuyerSellerAppConfig(AppConfig):\n <mask token>\n", "step-3": "<mask token>\n\n\nclass BuyerSellerAppConfig(AppConfig):\n name = 'buyer_seller_app'\n", "step-4": "from django.apps import AppConfig\n\n\nclass BuyerSellerAppConfig(AppConfig):\n name = 'buyer_seller_app'\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> while 1: successFlag, frame = cap.read() if not successFlag: cv2.waitKey(0) break lower_hsv_thresholdcr = np.array([0, 250, 250]) upper_hsv_thresholdcr = np.array([10, 255, 255]) gray = np.float32(cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)) dst = cv2.cornerHarris(gray, 2, 3, 0.04) dst = cv2.dilate(dst, None) frameWithRedCorners = np.copy(frame) frameWithRedCorners[dst > 0.005 * dst.max()] = [0, 0, 255] hsv = cv2.cvtColor(frameWithRedCorners, cv2.COLOR_BGR2HSV) crmask = cv2.inRange(hsv, lower_hsv_thresholdcr, upper_hsv_thresholdcr) cntscr = cv2.findContours(crmask.copy(), cv2.RETR_EXTERNAL, cv2. CHAIN_APPROX_SIMPLE)[-2] cv2.imshow('Frame', frameWithRedCorners) k = cv2.waitKey(10000) & 255 if k == 27: break cv2.destroyAllWindows() cap.release() <|reserved_special_token_1|> <|reserved_special_token_0|> frameFileName = ( 'H:\\Summer Research 2017\\Whirligig Beetle pictures and videos\\large1.mp4' ) cap = cv2.VideoCapture( 'H:\\Summer Research 2017\\Whirligig Beetle pictures and videos\\large1.mp4' ) while 1: successFlag, frame = cap.read() if not successFlag: cv2.waitKey(0) break lower_hsv_thresholdcr = np.array([0, 250, 250]) upper_hsv_thresholdcr = np.array([10, 255, 255]) gray = np.float32(cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)) dst = cv2.cornerHarris(gray, 2, 3, 0.04) dst = cv2.dilate(dst, None) frameWithRedCorners = np.copy(frame) frameWithRedCorners[dst > 0.005 * dst.max()] = [0, 0, 255] hsv = cv2.cvtColor(frameWithRedCorners, cv2.COLOR_BGR2HSV) crmask = cv2.inRange(hsv, lower_hsv_thresholdcr, upper_hsv_thresholdcr) cntscr = cv2.findContours(crmask.copy(), cv2.RETR_EXTERNAL, cv2. CHAIN_APPROX_SIMPLE)[-2] cv2.imshow('Frame', frameWithRedCorners) k = cv2.waitKey(10000) & 255 if k == 27: break cv2.destroyAllWindows() cap.release() <|reserved_special_token_1|> <|reserved_special_token_0|> import cv2 import numpy as np from collections import deque import imutils import misc_image_tools frameFileName = ( 'H:\\Summer Research 2017\\Whirligig Beetle pictures and videos\\large1.mp4' ) cap = cv2.VideoCapture( 'H:\\Summer Research 2017\\Whirligig Beetle pictures and videos\\large1.mp4' ) while 1: successFlag, frame = cap.read() if not successFlag: cv2.waitKey(0) break lower_hsv_thresholdcr = np.array([0, 250, 250]) upper_hsv_thresholdcr = np.array([10, 255, 255]) gray = np.float32(cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)) dst = cv2.cornerHarris(gray, 2, 3, 0.04) dst = cv2.dilate(dst, None) frameWithRedCorners = np.copy(frame) frameWithRedCorners[dst > 0.005 * dst.max()] = [0, 0, 255] hsv = cv2.cvtColor(frameWithRedCorners, cv2.COLOR_BGR2HSV) crmask = cv2.inRange(hsv, lower_hsv_thresholdcr, upper_hsv_thresholdcr) cntscr = cv2.findContours(crmask.copy(), cv2.RETR_EXTERNAL, cv2. CHAIN_APPROX_SIMPLE)[-2] cv2.imshow('Frame', frameWithRedCorners) k = cv2.waitKey(10000) & 255 if k == 27: break cv2.destroyAllWindows() cap.release() <|reserved_special_token_1|> # -*- coding: utf-8 -*- """ Created on Tue Jul 18 13:39:05 2017 @author: jaredhaeme15 """ import cv2 import numpy as np from collections import deque import imutils import misc_image_tools frameFileName = r"H:\Summer Research 2017\Whirligig Beetle pictures and videos\large1.mp4" cap = cv2.VideoCapture(r"H:\Summer Research 2017\Whirligig Beetle pictures and videos\large1.mp4") while(1): successFlag, frame = cap.read() if not successFlag: cv2.waitKey(0) break lower_hsv_thresholdcr = np.array([0,250,250]) upper_hsv_thresholdcr = np.array([10,255,255]) gray = np.float32(cv2.cvtColor(frame,cv2.COLOR_BGR2GRAY)) dst = cv2.cornerHarris(gray,2,3,0.04) #result is dilated for marking the corners, not important dst = cv2.dilate(dst,None) frameWithRedCorners = np.copy(frame) # Threshold for an optimal value, it may vary depending on the image. frameWithRedCorners[dst>0.005*dst.max()]=[0,0,255] hsv = cv2.cvtColor(frameWithRedCorners, cv2.COLOR_BGR2HSV) #construct a mask for the color "green", then perform # a series of dilations and erosions to remove any small # blobs left in the mask crmask = cv2.inRange(hsv, lower_hsv_thresholdcr, upper_hsv_thresholdcr) cntscr = cv2.findContours(crmask.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)[-2] cv2.imshow("Frame", frameWithRedCorners) k = cv2.waitKey(10000) & 0xFF if k == 27: # esc key break cv2.destroyAllWindows() cap.release()
flexible
{ "blob_id": "5ccfad17ede9f685ea9ef9c514c0108a61c2dfd6", "index": 8699, "step-1": "<mask token>\n", "step-2": "<mask token>\nwhile 1:\n successFlag, frame = cap.read()\n if not successFlag:\n cv2.waitKey(0)\n break\n lower_hsv_thresholdcr = np.array([0, 250, 250])\n upper_hsv_thresholdcr = np.array([10, 255, 255])\n gray = np.float32(cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY))\n dst = cv2.cornerHarris(gray, 2, 3, 0.04)\n dst = cv2.dilate(dst, None)\n frameWithRedCorners = np.copy(frame)\n frameWithRedCorners[dst > 0.005 * dst.max()] = [0, 0, 255]\n hsv = cv2.cvtColor(frameWithRedCorners, cv2.COLOR_BGR2HSV)\n crmask = cv2.inRange(hsv, lower_hsv_thresholdcr, upper_hsv_thresholdcr)\n cntscr = cv2.findContours(crmask.copy(), cv2.RETR_EXTERNAL, cv2.\n CHAIN_APPROX_SIMPLE)[-2]\n cv2.imshow('Frame', frameWithRedCorners)\n k = cv2.waitKey(10000) & 255\n if k == 27:\n break\ncv2.destroyAllWindows()\ncap.release()\n", "step-3": "<mask token>\nframeFileName = (\n 'H:\\\\Summer Research 2017\\\\Whirligig Beetle pictures and videos\\\\large1.mp4'\n )\ncap = cv2.VideoCapture(\n 'H:\\\\Summer Research 2017\\\\Whirligig Beetle pictures and videos\\\\large1.mp4'\n )\nwhile 1:\n successFlag, frame = cap.read()\n if not successFlag:\n cv2.waitKey(0)\n break\n lower_hsv_thresholdcr = np.array([0, 250, 250])\n upper_hsv_thresholdcr = np.array([10, 255, 255])\n gray = np.float32(cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY))\n dst = cv2.cornerHarris(gray, 2, 3, 0.04)\n dst = cv2.dilate(dst, None)\n frameWithRedCorners = np.copy(frame)\n frameWithRedCorners[dst > 0.005 * dst.max()] = [0, 0, 255]\n hsv = cv2.cvtColor(frameWithRedCorners, cv2.COLOR_BGR2HSV)\n crmask = cv2.inRange(hsv, lower_hsv_thresholdcr, upper_hsv_thresholdcr)\n cntscr = cv2.findContours(crmask.copy(), cv2.RETR_EXTERNAL, cv2.\n CHAIN_APPROX_SIMPLE)[-2]\n cv2.imshow('Frame', frameWithRedCorners)\n k = cv2.waitKey(10000) & 255\n if k == 27:\n break\ncv2.destroyAllWindows()\ncap.release()\n", "step-4": "<mask token>\nimport cv2\nimport numpy as np\nfrom collections import deque\nimport imutils\nimport misc_image_tools\nframeFileName = (\n 'H:\\\\Summer Research 2017\\\\Whirligig Beetle pictures and videos\\\\large1.mp4'\n )\ncap = cv2.VideoCapture(\n 'H:\\\\Summer Research 2017\\\\Whirligig Beetle pictures and videos\\\\large1.mp4'\n )\nwhile 1:\n successFlag, frame = cap.read()\n if not successFlag:\n cv2.waitKey(0)\n break\n lower_hsv_thresholdcr = np.array([0, 250, 250])\n upper_hsv_thresholdcr = np.array([10, 255, 255])\n gray = np.float32(cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY))\n dst = cv2.cornerHarris(gray, 2, 3, 0.04)\n dst = cv2.dilate(dst, None)\n frameWithRedCorners = np.copy(frame)\n frameWithRedCorners[dst > 0.005 * dst.max()] = [0, 0, 255]\n hsv = cv2.cvtColor(frameWithRedCorners, cv2.COLOR_BGR2HSV)\n crmask = cv2.inRange(hsv, lower_hsv_thresholdcr, upper_hsv_thresholdcr)\n cntscr = cv2.findContours(crmask.copy(), cv2.RETR_EXTERNAL, cv2.\n CHAIN_APPROX_SIMPLE)[-2]\n cv2.imshow('Frame', frameWithRedCorners)\n k = cv2.waitKey(10000) & 255\n if k == 27:\n break\ncv2.destroyAllWindows()\ncap.release()\n", "step-5": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Tue Jul 18 13:39:05 2017\n\n@author: jaredhaeme15\n\"\"\"\n\n\nimport cv2\nimport numpy as np\nfrom collections import deque\nimport imutils\nimport misc_image_tools \n\nframeFileName = r\"H:\\Summer Research 2017\\Whirligig Beetle pictures and videos\\large1.mp4\"\ncap = cv2.VideoCapture(r\"H:\\Summer Research 2017\\Whirligig Beetle pictures and videos\\large1.mp4\")\n \nwhile(1): \n \n successFlag, frame = cap.read()\n if not successFlag:\n cv2.waitKey(0)\n break \n lower_hsv_thresholdcr = np.array([0,250,250])\n upper_hsv_thresholdcr = np.array([10,255,255])\n gray = np.float32(cv2.cvtColor(frame,cv2.COLOR_BGR2GRAY))\n dst = cv2.cornerHarris(gray,2,3,0.04)\n #result is dilated for marking the corners, not important\n dst = cv2.dilate(dst,None)\n frameWithRedCorners = np.copy(frame)\n # Threshold for an optimal value, it may vary depending on the image.\n frameWithRedCorners[dst>0.005*dst.max()]=[0,0,255]\n hsv = cv2.cvtColor(frameWithRedCorners, cv2.COLOR_BGR2HSV)\n #construct a mask for the color \"green\", then perform\n # a series of dilations and erosions to remove any small\n # blobs left in the mask\n crmask = cv2.inRange(hsv, lower_hsv_thresholdcr, upper_hsv_thresholdcr)\n cntscr = cv2.findContours(crmask.copy(), cv2.RETR_EXTERNAL,\n cv2.CHAIN_APPROX_SIMPLE)[-2]\n cv2.imshow(\"Frame\", frameWithRedCorners)\n k = cv2.waitKey(10000) & 0xFF\n if k == 27: # esc key\n break\ncv2.destroyAllWindows()\ncap.release()", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> def distribution_plot(): confirmed_results = pd.read_csv( 'https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_confirmed.csv' ) trial = pd.notnull(confirmed_results['age']) print('Enter the number of bins between 0 and 100') n_of_bins = input(str()) print('Enter the number of xticks between 0 and 4') xticks = input(str()) plt.figure(figsize=(15, 8)) plt.title( 'Distribution of Age of the COVID-19 Positive Cases in South Africa') plt.xticks(np.arange(confirmed_results[trial]['age'].min(), confirmed_results[trial]['age'].max(), step=4)) plots = sns.distplot(confirmed_results[trial]['age'], bins=int( n_of_bins), kde=True, rug=True) print('The highest age of all COVID-19 patients is: ' + str( confirmed_results[trial]['age'].max())) return plots def other_distributions(): confirmed_results = pd.read_csv( 'https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_confirmed.csv' ) trial = pd.notnull(confirmed_results['age']) plt.figure(figsize=(15, 8)) plt.title( 'Countplot of the COVID-19 Positive Cases in each South African Province' ) sns.countplot(confirmed_results[trial]['province'], order= confirmed_results[trial]['province'].value_counts().index, palette= 'RdBu') plt.figure(figsize=(15, 8)) plt.title('Gender difference of the COVID-19 in South Africa') sns.countplot(confirmed_results[trial]['gender']) print('Number of rows and columns in the dataframe: ' + str( confirmed_results[trial].shape)) print('Number of rows: ' + str(confirmed_results[trial].shape[0])) confirmed_results[trial][['date', 'country']].groupby('date').count() confirmed_results[trial][['date', 'country']].groupby('date').count( ).cumsum().reset_index().rename(columns={'country': 'cumulative sum'}) plt.figure(figsize=(25, 8)) plt.title( 'The Number of patients infected with the COVID-19 in South Africa') cumulative_cases = confirmed_results[trial][['date', 'country']].groupby( 'date').count().cumsum().reset_index().rename(columns={'country': 'cumulative sum'}) ax = sns.lineplot(data=cumulative_cases, x='date', y='cumulative sum', marker='o', dashes=False) for i in cumulative_cases.groupby('date'): for x, y, m in i[1][['date', 'cumulative sum', 'cumulative sum'] ].values: ax.text(x, y, f'{m:.0f}') return plt.show() <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def distribution_plot(): confirmed_results = pd.read_csv( 'https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_confirmed.csv' ) trial = pd.notnull(confirmed_results['age']) print('Enter the number of bins between 0 and 100') n_of_bins = input(str()) print('Enter the number of xticks between 0 and 4') xticks = input(str()) plt.figure(figsize=(15, 8)) plt.title( 'Distribution of Age of the COVID-19 Positive Cases in South Africa') plt.xticks(np.arange(confirmed_results[trial]['age'].min(), confirmed_results[trial]['age'].max(), step=4)) plots = sns.distplot(confirmed_results[trial]['age'], bins=int( n_of_bins), kde=True, rug=True) print('The highest age of all COVID-19 patients is: ' + str( confirmed_results[trial]['age'].max())) return plots def other_distributions(): confirmed_results = pd.read_csv( 'https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_confirmed.csv' ) trial = pd.notnull(confirmed_results['age']) plt.figure(figsize=(15, 8)) plt.title( 'Countplot of the COVID-19 Positive Cases in each South African Province' ) sns.countplot(confirmed_results[trial]['province'], order= confirmed_results[trial]['province'].value_counts().index, palette= 'RdBu') plt.figure(figsize=(15, 8)) plt.title('Gender difference of the COVID-19 in South Africa') sns.countplot(confirmed_results[trial]['gender']) print('Number of rows and columns in the dataframe: ' + str( confirmed_results[trial].shape)) print('Number of rows: ' + str(confirmed_results[trial].shape[0])) confirmed_results[trial][['date', 'country']].groupby('date').count() confirmed_results[trial][['date', 'country']].groupby('date').count( ).cumsum().reset_index().rename(columns={'country': 'cumulative sum'}) plt.figure(figsize=(25, 8)) plt.title( 'The Number of patients infected with the COVID-19 in South Africa') cumulative_cases = confirmed_results[trial][['date', 'country']].groupby( 'date').count().cumsum().reset_index().rename(columns={'country': 'cumulative sum'}) ax = sns.lineplot(data=cumulative_cases, x='date', y='cumulative sum', marker='o', dashes=False) for i in cumulative_cases.groupby('date'): for x, y, m in i[1][['date', 'cumulative sum', 'cumulative sum'] ].values: ax.text(x, y, f'{m:.0f}') return plt.show() def overall_data(): confirmed_results = pd.read_csv( 'https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_confirmed.csv' ) trial = pd.notnull(confirmed_results['age']) attempt = pd.isnull(confirmed_results['age']) cumulative_cases = confirmed_results[trial][['date', 'country']].groupby( 'date').count().cumsum().reset_index().rename(columns={'country': 'cumulative sum'}) fig, ax = plt.subplots(ncols=2, nrows=2, figsize=(35, 10)) graph1 = sns.distplot(confirmed_results[trial]['age'], bins=20, kde= True, rug=True, ax=ax[0, 0]) ax[0, 0].title.set_text( 'Distribution of Age of the COVID-19 Positive Cases in South Africa') graph2 = sns.countplot(confirmed_results[trial]['province'], order= confirmed_results[trial]['province'].value_counts().index, palette= 'RdBu', ax=ax[0, 1]) ax[0, 1].title.set_text( 'Countplot of the COVID-19 Positive Cases in each South African Province' ) graph3 = sns.countplot(confirmed_results[trial]['gender'], ax=ax[1, 0]) ax[1, 0].title.set_text( 'Gender difference of the patients infected with COVID-19 in South Africa' ) graph4 = sns.lineplot(data=cumulative_cases, x='date', y= 'cumulative sum', marker='o', dashes=False, ax=ax[1, 1]) for i in cumulative_cases.groupby('date'): for x, y, m in i[1][['date', 'cumulative sum', 'cumulative sum'] ].values: ax[1, 1].text(x, y, f'{m:.0f}') ax[1, 1].title.set_text( 'The Number of patients infected with the COVID-19 in South Africa') ax[1, 1].tick_params(labelrotation=45) print('Total Number of Cases without Null Values: ' + str( confirmed_results[trial].shape[0])) print('Total Number of Cases with Null Values: ' + str( confirmed_results[attempt].shape[0])) print('Total Number of Cases: ' + str(confirmed_results.shape[0])) return graph1, graph2, graph3, graph4 <|reserved_special_token_1|> <|reserved_special_token_0|> def distribution(): confirmed_results = pd.read_csv( 'https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_confirmed.csv' ) trial = pd.notnull(confirmed_results['age']) return confirmed_results[trial].drop(columns=['case_id', 'YYYYMMDD', 'geo_subdivision']) def distribution_plot(): confirmed_results = pd.read_csv( 'https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_confirmed.csv' ) trial = pd.notnull(confirmed_results['age']) print('Enter the number of bins between 0 and 100') n_of_bins = input(str()) print('Enter the number of xticks between 0 and 4') xticks = input(str()) plt.figure(figsize=(15, 8)) plt.title( 'Distribution of Age of the COVID-19 Positive Cases in South Africa') plt.xticks(np.arange(confirmed_results[trial]['age'].min(), confirmed_results[trial]['age'].max(), step=4)) plots = sns.distplot(confirmed_results[trial]['age'], bins=int( n_of_bins), kde=True, rug=True) print('The highest age of all COVID-19 patients is: ' + str( confirmed_results[trial]['age'].max())) return plots def other_distributions(): confirmed_results = pd.read_csv( 'https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_confirmed.csv' ) trial = pd.notnull(confirmed_results['age']) plt.figure(figsize=(15, 8)) plt.title( 'Countplot of the COVID-19 Positive Cases in each South African Province' ) sns.countplot(confirmed_results[trial]['province'], order= confirmed_results[trial]['province'].value_counts().index, palette= 'RdBu') plt.figure(figsize=(15, 8)) plt.title('Gender difference of the COVID-19 in South Africa') sns.countplot(confirmed_results[trial]['gender']) print('Number of rows and columns in the dataframe: ' + str( confirmed_results[trial].shape)) print('Number of rows: ' + str(confirmed_results[trial].shape[0])) confirmed_results[trial][['date', 'country']].groupby('date').count() confirmed_results[trial][['date', 'country']].groupby('date').count( ).cumsum().reset_index().rename(columns={'country': 'cumulative sum'}) plt.figure(figsize=(25, 8)) plt.title( 'The Number of patients infected with the COVID-19 in South Africa') cumulative_cases = confirmed_results[trial][['date', 'country']].groupby( 'date').count().cumsum().reset_index().rename(columns={'country': 'cumulative sum'}) ax = sns.lineplot(data=cumulative_cases, x='date', y='cumulative sum', marker='o', dashes=False) for i in cumulative_cases.groupby('date'): for x, y, m in i[1][['date', 'cumulative sum', 'cumulative sum'] ].values: ax.text(x, y, f'{m:.0f}') return plt.show() def overall_data(): confirmed_results = pd.read_csv( 'https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_confirmed.csv' ) trial = pd.notnull(confirmed_results['age']) attempt = pd.isnull(confirmed_results['age']) cumulative_cases = confirmed_results[trial][['date', 'country']].groupby( 'date').count().cumsum().reset_index().rename(columns={'country': 'cumulative sum'}) fig, ax = plt.subplots(ncols=2, nrows=2, figsize=(35, 10)) graph1 = sns.distplot(confirmed_results[trial]['age'], bins=20, kde= True, rug=True, ax=ax[0, 0]) ax[0, 0].title.set_text( 'Distribution of Age of the COVID-19 Positive Cases in South Africa') graph2 = sns.countplot(confirmed_results[trial]['province'], order= confirmed_results[trial]['province'].value_counts().index, palette= 'RdBu', ax=ax[0, 1]) ax[0, 1].title.set_text( 'Countplot of the COVID-19 Positive Cases in each South African Province' ) graph3 = sns.countplot(confirmed_results[trial]['gender'], ax=ax[1, 0]) ax[1, 0].title.set_text( 'Gender difference of the patients infected with COVID-19 in South Africa' ) graph4 = sns.lineplot(data=cumulative_cases, x='date', y= 'cumulative sum', marker='o', dashes=False, ax=ax[1, 1]) for i in cumulative_cases.groupby('date'): for x, y, m in i[1][['date', 'cumulative sum', 'cumulative sum'] ].values: ax[1, 1].text(x, y, f'{m:.0f}') ax[1, 1].title.set_text( 'The Number of patients infected with the COVID-19 in South Africa') ax[1, 1].tick_params(labelrotation=45) print('Total Number of Cases without Null Values: ' + str( confirmed_results[trial].shape[0])) print('Total Number of Cases with Null Values: ' + str( confirmed_results[attempt].shape[0])) print('Total Number of Cases: ' + str(confirmed_results.shape[0])) return graph1, graph2, graph3, graph4 <|reserved_special_token_1|> import numpy as np import pandas as pd import matplotlib.pyplot as plt import seaborn as sns def distribution(): confirmed_results = pd.read_csv( 'https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_confirmed.csv' ) trial = pd.notnull(confirmed_results['age']) return confirmed_results[trial].drop(columns=['case_id', 'YYYYMMDD', 'geo_subdivision']) def distribution_plot(): confirmed_results = pd.read_csv( 'https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_confirmed.csv' ) trial = pd.notnull(confirmed_results['age']) print('Enter the number of bins between 0 and 100') n_of_bins = input(str()) print('Enter the number of xticks between 0 and 4') xticks = input(str()) plt.figure(figsize=(15, 8)) plt.title( 'Distribution of Age of the COVID-19 Positive Cases in South Africa') plt.xticks(np.arange(confirmed_results[trial]['age'].min(), confirmed_results[trial]['age'].max(), step=4)) plots = sns.distplot(confirmed_results[trial]['age'], bins=int( n_of_bins), kde=True, rug=True) print('The highest age of all COVID-19 patients is: ' + str( confirmed_results[trial]['age'].max())) return plots def other_distributions(): confirmed_results = pd.read_csv( 'https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_confirmed.csv' ) trial = pd.notnull(confirmed_results['age']) plt.figure(figsize=(15, 8)) plt.title( 'Countplot of the COVID-19 Positive Cases in each South African Province' ) sns.countplot(confirmed_results[trial]['province'], order= confirmed_results[trial]['province'].value_counts().index, palette= 'RdBu') plt.figure(figsize=(15, 8)) plt.title('Gender difference of the COVID-19 in South Africa') sns.countplot(confirmed_results[trial]['gender']) print('Number of rows and columns in the dataframe: ' + str( confirmed_results[trial].shape)) print('Number of rows: ' + str(confirmed_results[trial].shape[0])) confirmed_results[trial][['date', 'country']].groupby('date').count() confirmed_results[trial][['date', 'country']].groupby('date').count( ).cumsum().reset_index().rename(columns={'country': 'cumulative sum'}) plt.figure(figsize=(25, 8)) plt.title( 'The Number of patients infected with the COVID-19 in South Africa') cumulative_cases = confirmed_results[trial][['date', 'country']].groupby( 'date').count().cumsum().reset_index().rename(columns={'country': 'cumulative sum'}) ax = sns.lineplot(data=cumulative_cases, x='date', y='cumulative sum', marker='o', dashes=False) for i in cumulative_cases.groupby('date'): for x, y, m in i[1][['date', 'cumulative sum', 'cumulative sum'] ].values: ax.text(x, y, f'{m:.0f}') return plt.show() def overall_data(): confirmed_results = pd.read_csv( 'https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_confirmed.csv' ) trial = pd.notnull(confirmed_results['age']) attempt = pd.isnull(confirmed_results['age']) cumulative_cases = confirmed_results[trial][['date', 'country']].groupby( 'date').count().cumsum().reset_index().rename(columns={'country': 'cumulative sum'}) fig, ax = plt.subplots(ncols=2, nrows=2, figsize=(35, 10)) graph1 = sns.distplot(confirmed_results[trial]['age'], bins=20, kde= True, rug=True, ax=ax[0, 0]) ax[0, 0].title.set_text( 'Distribution of Age of the COVID-19 Positive Cases in South Africa') graph2 = sns.countplot(confirmed_results[trial]['province'], order= confirmed_results[trial]['province'].value_counts().index, palette= 'RdBu', ax=ax[0, 1]) ax[0, 1].title.set_text( 'Countplot of the COVID-19 Positive Cases in each South African Province' ) graph3 = sns.countplot(confirmed_results[trial]['gender'], ax=ax[1, 0]) ax[1, 0].title.set_text( 'Gender difference of the patients infected with COVID-19 in South Africa' ) graph4 = sns.lineplot(data=cumulative_cases, x='date', y= 'cumulative sum', marker='o', dashes=False, ax=ax[1, 1]) for i in cumulative_cases.groupby('date'): for x, y, m in i[1][['date', 'cumulative sum', 'cumulative sum'] ].values: ax[1, 1].text(x, y, f'{m:.0f}') ax[1, 1].title.set_text( 'The Number of patients infected with the COVID-19 in South Africa') ax[1, 1].tick_params(labelrotation=45) print('Total Number of Cases without Null Values: ' + str( confirmed_results[trial].shape[0])) print('Total Number of Cases with Null Values: ' + str( confirmed_results[attempt].shape[0])) print('Total Number of Cases: ' + str(confirmed_results.shape[0])) return graph1, graph2, graph3, graph4 <|reserved_special_token_1|> import numpy as np import pandas as pd import matplotlib.pyplot as plt import seaborn as sns def distribution(): ##testing_results = pd.read_csv('https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_testing.csv') confirmed_results = pd.read_csv('https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_confirmed.csv') trial = pd.notnull(confirmed_results["age"]) ##attempt = pd.isnull(confirmed_results["age"]) return(confirmed_results[trial].drop(columns=['case_id', 'YYYYMMDD','geo_subdivision'])) def distribution_plot(): confirmed_results = pd.read_csv('https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_confirmed.csv') trial = pd.notnull(confirmed_results["age"]) ##attempt = pd.isnull(confirmed_results["age"]) print('Enter the number of bins between 0 and 100') n_of_bins = input(str()) print('Enter the number of xticks between 0 and 4') xticks = input(str()) plt.figure(figsize=(15,8)) #Set figure size plt.title('Distribution of Age of the COVID-19 Positive Cases in South Africa') #Set axis title plt.xticks(np.arange(confirmed_results[trial]['age'].min(), confirmed_results[trial]['age'].max(), step=4)) # Set label locations. plots = sns.distplot(confirmed_results[trial]['age'], bins=int(n_of_bins), kde=True, rug=True) #"rug" will give the ticks on the x-axis print('The highest age of all COVID-19 patients is: ' + str(confirmed_results[trial]['age'].max())) return(plots) def other_distributions(): confirmed_results = pd.read_csv('https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_confirmed.csv') trial = pd.notnull(confirmed_results["age"]) ##attempt = pd.isnull(confirmed_results["age"]) plt.figure(figsize=(15,8)) #Set figure size plt.title('Countplot of the COVID-19 Positive Cases in each South African Province') sns.countplot(confirmed_results[trial]['province'], order = confirmed_results[trial]['province'].value_counts().index, palette='RdBu') plt.figure(figsize=(15,8)) #Set figure size plt.title('Gender difference of the COVID-19 in South Africa') sns.countplot(confirmed_results[trial]['gender']) print('Number of rows and columns in the dataframe: ' + str(confirmed_results[trial].shape)) #"shape" will give this tupple of rows and columns print('Number of rows: ' + str(confirmed_results[trial].shape[0])) #you can index a tuple like a list! confirmed_results[trial][['date', 'country']].groupby('date').count() confirmed_results[trial][['date', 'country']].groupby('date').count().cumsum().reset_index().rename(columns={'country':'cumulative sum'}) # "cumsum()" will give the cumulative sum plt.figure(figsize=(25,8)) #Set figure size plt.title('The Number of patients infected with the COVID-19 in South Africa') cumulative_cases = confirmed_results[trial][['date', 'country']].groupby('date').count().cumsum().reset_index().rename(columns={'country':'cumulative sum'}) #create cumulative dataframe ax = sns.lineplot(data=cumulative_cases, x='date', y='cumulative sum', marker='o', dashes=False) for i in cumulative_cases.groupby('date'): #i[1] is a grouped data frame; looping through each data row in the cumulative dataframe for x,y,m in i[1][['date','cumulative sum','cumulative sum']].values: # x = x value; y = y_value ; m = marker value ax.text(x,y,f'{m:.0f}') #ax.text will return(plt.show()) def overall_data(): confirmed_results = pd.read_csv('https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_confirmed.csv') trial = pd.notnull(confirmed_results["age"]) attempt = pd.isnull(confirmed_results["age"]) cumulative_cases = confirmed_results[trial][['date', 'country']].groupby('date').count().cumsum().reset_index().rename(columns={'country':'cumulative sum'}) #create cumulative dataframe fig, ax = plt.subplots(ncols=2, nrows=2, figsize=(35,10)) graph1 = sns.distplot(confirmed_results[trial]['age'], bins=20, kde=True, rug=True, ax=ax[0,0]) ax[0,0].title.set_text('Distribution of Age of the COVID-19 Positive Cases in South Africa') graph2 = sns.countplot(confirmed_results[trial]['province'], order = confirmed_results[trial]['province'].value_counts().index, palette='RdBu', ax=ax[0,1]) ax[0,1].title.set_text('Countplot of the COVID-19 Positive Cases in each South African Province') graph3 = sns.countplot(confirmed_results[trial]['gender'], ax=ax[1,0]) ax[1,0].title.set_text('Gender difference of the patients infected with COVID-19 in South Africa') graph4 = sns.lineplot(data=cumulative_cases, x='date', y='cumulative sum', marker='o', dashes=False, ax=ax[1,1]) for i in cumulative_cases.groupby('date'): #i[1] is a grouped data frame; looping through each data row in the cumulative dataframe for x,y,m in i[1][['date','cumulative sum','cumulative sum']].values: # x = x value; y = y_value ; m = marker value ax[1,1].text(x,y,f'{m:.0f}') #ax.text will ax[1,1].title.set_text('The Number of patients infected with the COVID-19 in South Africa') ax[1,1].tick_params(labelrotation=45) print('Total Number of Cases without Null Values: ' + str(confirmed_results[trial].shape[0])) print('Total Number of Cases with Null Values: ' + str(confirmed_results[attempt].shape[0])) print('Total Number of Cases: ' + str(confirmed_results.shape[0])) return(graph1,graph2,graph3,graph4)
flexible
{ "blob_id": "38be4e75c2311a1e5a443d39a414058dc4d1879b", "index": 2320, "step-1": "<mask token>\n\n\ndef distribution_plot():\n confirmed_results = pd.read_csv(\n 'https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_confirmed.csv'\n )\n trial = pd.notnull(confirmed_results['age'])\n print('Enter the number of bins between 0 and 100')\n n_of_bins = input(str())\n print('Enter the number of xticks between 0 and 4')\n xticks = input(str())\n plt.figure(figsize=(15, 8))\n plt.title(\n 'Distribution of Age of the COVID-19 Positive Cases in South Africa')\n plt.xticks(np.arange(confirmed_results[trial]['age'].min(),\n confirmed_results[trial]['age'].max(), step=4))\n plots = sns.distplot(confirmed_results[trial]['age'], bins=int(\n n_of_bins), kde=True, rug=True)\n print('The highest age of all COVID-19 patients is: ' + str(\n confirmed_results[trial]['age'].max()))\n return plots\n\n\ndef other_distributions():\n confirmed_results = pd.read_csv(\n 'https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_confirmed.csv'\n )\n trial = pd.notnull(confirmed_results['age'])\n plt.figure(figsize=(15, 8))\n plt.title(\n 'Countplot of the COVID-19 Positive Cases in each South African Province'\n )\n sns.countplot(confirmed_results[trial]['province'], order=\n confirmed_results[trial]['province'].value_counts().index, palette=\n 'RdBu')\n plt.figure(figsize=(15, 8))\n plt.title('Gender difference of the COVID-19 in South Africa')\n sns.countplot(confirmed_results[trial]['gender'])\n print('Number of rows and columns in the dataframe: ' + str(\n confirmed_results[trial].shape))\n print('Number of rows: ' + str(confirmed_results[trial].shape[0]))\n confirmed_results[trial][['date', 'country']].groupby('date').count()\n confirmed_results[trial][['date', 'country']].groupby('date').count(\n ).cumsum().reset_index().rename(columns={'country': 'cumulative sum'})\n plt.figure(figsize=(25, 8))\n plt.title(\n 'The Number of patients infected with the COVID-19 in South Africa')\n cumulative_cases = confirmed_results[trial][['date', 'country']].groupby(\n 'date').count().cumsum().reset_index().rename(columns={'country':\n 'cumulative sum'})\n ax = sns.lineplot(data=cumulative_cases, x='date', y='cumulative sum',\n marker='o', dashes=False)\n for i in cumulative_cases.groupby('date'):\n for x, y, m in i[1][['date', 'cumulative sum', 'cumulative sum']\n ].values:\n ax.text(x, y, f'{m:.0f}')\n return plt.show()\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef distribution_plot():\n confirmed_results = pd.read_csv(\n 'https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_confirmed.csv'\n )\n trial = pd.notnull(confirmed_results['age'])\n print('Enter the number of bins between 0 and 100')\n n_of_bins = input(str())\n print('Enter the number of xticks between 0 and 4')\n xticks = input(str())\n plt.figure(figsize=(15, 8))\n plt.title(\n 'Distribution of Age of the COVID-19 Positive Cases in South Africa')\n plt.xticks(np.arange(confirmed_results[trial]['age'].min(),\n confirmed_results[trial]['age'].max(), step=4))\n plots = sns.distplot(confirmed_results[trial]['age'], bins=int(\n n_of_bins), kde=True, rug=True)\n print('The highest age of all COVID-19 patients is: ' + str(\n confirmed_results[trial]['age'].max()))\n return plots\n\n\ndef other_distributions():\n confirmed_results = pd.read_csv(\n 'https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_confirmed.csv'\n )\n trial = pd.notnull(confirmed_results['age'])\n plt.figure(figsize=(15, 8))\n plt.title(\n 'Countplot of the COVID-19 Positive Cases in each South African Province'\n )\n sns.countplot(confirmed_results[trial]['province'], order=\n confirmed_results[trial]['province'].value_counts().index, palette=\n 'RdBu')\n plt.figure(figsize=(15, 8))\n plt.title('Gender difference of the COVID-19 in South Africa')\n sns.countplot(confirmed_results[trial]['gender'])\n print('Number of rows and columns in the dataframe: ' + str(\n confirmed_results[trial].shape))\n print('Number of rows: ' + str(confirmed_results[trial].shape[0]))\n confirmed_results[trial][['date', 'country']].groupby('date').count()\n confirmed_results[trial][['date', 'country']].groupby('date').count(\n ).cumsum().reset_index().rename(columns={'country': 'cumulative sum'})\n plt.figure(figsize=(25, 8))\n plt.title(\n 'The Number of patients infected with the COVID-19 in South Africa')\n cumulative_cases = confirmed_results[trial][['date', 'country']].groupby(\n 'date').count().cumsum().reset_index().rename(columns={'country':\n 'cumulative sum'})\n ax = sns.lineplot(data=cumulative_cases, x='date', y='cumulative sum',\n marker='o', dashes=False)\n for i in cumulative_cases.groupby('date'):\n for x, y, m in i[1][['date', 'cumulative sum', 'cumulative sum']\n ].values:\n ax.text(x, y, f'{m:.0f}')\n return plt.show()\n\n\ndef overall_data():\n confirmed_results = pd.read_csv(\n 'https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_confirmed.csv'\n )\n trial = pd.notnull(confirmed_results['age'])\n attempt = pd.isnull(confirmed_results['age'])\n cumulative_cases = confirmed_results[trial][['date', 'country']].groupby(\n 'date').count().cumsum().reset_index().rename(columns={'country':\n 'cumulative sum'})\n fig, ax = plt.subplots(ncols=2, nrows=2, figsize=(35, 10))\n graph1 = sns.distplot(confirmed_results[trial]['age'], bins=20, kde=\n True, rug=True, ax=ax[0, 0])\n ax[0, 0].title.set_text(\n 'Distribution of Age of the COVID-19 Positive Cases in South Africa')\n graph2 = sns.countplot(confirmed_results[trial]['province'], order=\n confirmed_results[trial]['province'].value_counts().index, palette=\n 'RdBu', ax=ax[0, 1])\n ax[0, 1].title.set_text(\n 'Countplot of the COVID-19 Positive Cases in each South African Province'\n )\n graph3 = sns.countplot(confirmed_results[trial]['gender'], ax=ax[1, 0])\n ax[1, 0].title.set_text(\n 'Gender difference of the patients infected with COVID-19 in South Africa'\n )\n graph4 = sns.lineplot(data=cumulative_cases, x='date', y=\n 'cumulative sum', marker='o', dashes=False, ax=ax[1, 1])\n for i in cumulative_cases.groupby('date'):\n for x, y, m in i[1][['date', 'cumulative sum', 'cumulative sum']\n ].values:\n ax[1, 1].text(x, y, f'{m:.0f}')\n ax[1, 1].title.set_text(\n 'The Number of patients infected with the COVID-19 in South Africa')\n ax[1, 1].tick_params(labelrotation=45)\n print('Total Number of Cases without Null Values: ' + str(\n confirmed_results[trial].shape[0]))\n print('Total Number of Cases with Null Values: ' + str(\n confirmed_results[attempt].shape[0]))\n print('Total Number of Cases: ' + str(confirmed_results.shape[0]))\n return graph1, graph2, graph3, graph4\n", "step-3": "<mask token>\n\n\ndef distribution():\n confirmed_results = pd.read_csv(\n 'https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_confirmed.csv'\n )\n trial = pd.notnull(confirmed_results['age'])\n return confirmed_results[trial].drop(columns=['case_id', 'YYYYMMDD',\n 'geo_subdivision'])\n\n\ndef distribution_plot():\n confirmed_results = pd.read_csv(\n 'https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_confirmed.csv'\n )\n trial = pd.notnull(confirmed_results['age'])\n print('Enter the number of bins between 0 and 100')\n n_of_bins = input(str())\n print('Enter the number of xticks between 0 and 4')\n xticks = input(str())\n plt.figure(figsize=(15, 8))\n plt.title(\n 'Distribution of Age of the COVID-19 Positive Cases in South Africa')\n plt.xticks(np.arange(confirmed_results[trial]['age'].min(),\n confirmed_results[trial]['age'].max(), step=4))\n plots = sns.distplot(confirmed_results[trial]['age'], bins=int(\n n_of_bins), kde=True, rug=True)\n print('The highest age of all COVID-19 patients is: ' + str(\n confirmed_results[trial]['age'].max()))\n return plots\n\n\ndef other_distributions():\n confirmed_results = pd.read_csv(\n 'https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_confirmed.csv'\n )\n trial = pd.notnull(confirmed_results['age'])\n plt.figure(figsize=(15, 8))\n plt.title(\n 'Countplot of the COVID-19 Positive Cases in each South African Province'\n )\n sns.countplot(confirmed_results[trial]['province'], order=\n confirmed_results[trial]['province'].value_counts().index, palette=\n 'RdBu')\n plt.figure(figsize=(15, 8))\n plt.title('Gender difference of the COVID-19 in South Africa')\n sns.countplot(confirmed_results[trial]['gender'])\n print('Number of rows and columns in the dataframe: ' + str(\n confirmed_results[trial].shape))\n print('Number of rows: ' + str(confirmed_results[trial].shape[0]))\n confirmed_results[trial][['date', 'country']].groupby('date').count()\n confirmed_results[trial][['date', 'country']].groupby('date').count(\n ).cumsum().reset_index().rename(columns={'country': 'cumulative sum'})\n plt.figure(figsize=(25, 8))\n plt.title(\n 'The Number of patients infected with the COVID-19 in South Africa')\n cumulative_cases = confirmed_results[trial][['date', 'country']].groupby(\n 'date').count().cumsum().reset_index().rename(columns={'country':\n 'cumulative sum'})\n ax = sns.lineplot(data=cumulative_cases, x='date', y='cumulative sum',\n marker='o', dashes=False)\n for i in cumulative_cases.groupby('date'):\n for x, y, m in i[1][['date', 'cumulative sum', 'cumulative sum']\n ].values:\n ax.text(x, y, f'{m:.0f}')\n return plt.show()\n\n\ndef overall_data():\n confirmed_results = pd.read_csv(\n 'https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_confirmed.csv'\n )\n trial = pd.notnull(confirmed_results['age'])\n attempt = pd.isnull(confirmed_results['age'])\n cumulative_cases = confirmed_results[trial][['date', 'country']].groupby(\n 'date').count().cumsum().reset_index().rename(columns={'country':\n 'cumulative sum'})\n fig, ax = plt.subplots(ncols=2, nrows=2, figsize=(35, 10))\n graph1 = sns.distplot(confirmed_results[trial]['age'], bins=20, kde=\n True, rug=True, ax=ax[0, 0])\n ax[0, 0].title.set_text(\n 'Distribution of Age of the COVID-19 Positive Cases in South Africa')\n graph2 = sns.countplot(confirmed_results[trial]['province'], order=\n confirmed_results[trial]['province'].value_counts().index, palette=\n 'RdBu', ax=ax[0, 1])\n ax[0, 1].title.set_text(\n 'Countplot of the COVID-19 Positive Cases in each South African Province'\n )\n graph3 = sns.countplot(confirmed_results[trial]['gender'], ax=ax[1, 0])\n ax[1, 0].title.set_text(\n 'Gender difference of the patients infected with COVID-19 in South Africa'\n )\n graph4 = sns.lineplot(data=cumulative_cases, x='date', y=\n 'cumulative sum', marker='o', dashes=False, ax=ax[1, 1])\n for i in cumulative_cases.groupby('date'):\n for x, y, m in i[1][['date', 'cumulative sum', 'cumulative sum']\n ].values:\n ax[1, 1].text(x, y, f'{m:.0f}')\n ax[1, 1].title.set_text(\n 'The Number of patients infected with the COVID-19 in South Africa')\n ax[1, 1].tick_params(labelrotation=45)\n print('Total Number of Cases without Null Values: ' + str(\n confirmed_results[trial].shape[0]))\n print('Total Number of Cases with Null Values: ' + str(\n confirmed_results[attempt].shape[0]))\n print('Total Number of Cases: ' + str(confirmed_results.shape[0]))\n return graph1, graph2, graph3, graph4\n", "step-4": "import numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\nimport seaborn as sns\n\n\ndef distribution():\n confirmed_results = pd.read_csv(\n 'https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_confirmed.csv'\n )\n trial = pd.notnull(confirmed_results['age'])\n return confirmed_results[trial].drop(columns=['case_id', 'YYYYMMDD',\n 'geo_subdivision'])\n\n\ndef distribution_plot():\n confirmed_results = pd.read_csv(\n 'https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_confirmed.csv'\n )\n trial = pd.notnull(confirmed_results['age'])\n print('Enter the number of bins between 0 and 100')\n n_of_bins = input(str())\n print('Enter the number of xticks between 0 and 4')\n xticks = input(str())\n plt.figure(figsize=(15, 8))\n plt.title(\n 'Distribution of Age of the COVID-19 Positive Cases in South Africa')\n plt.xticks(np.arange(confirmed_results[trial]['age'].min(),\n confirmed_results[trial]['age'].max(), step=4))\n plots = sns.distplot(confirmed_results[trial]['age'], bins=int(\n n_of_bins), kde=True, rug=True)\n print('The highest age of all COVID-19 patients is: ' + str(\n confirmed_results[trial]['age'].max()))\n return plots\n\n\ndef other_distributions():\n confirmed_results = pd.read_csv(\n 'https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_confirmed.csv'\n )\n trial = pd.notnull(confirmed_results['age'])\n plt.figure(figsize=(15, 8))\n plt.title(\n 'Countplot of the COVID-19 Positive Cases in each South African Province'\n )\n sns.countplot(confirmed_results[trial]['province'], order=\n confirmed_results[trial]['province'].value_counts().index, palette=\n 'RdBu')\n plt.figure(figsize=(15, 8))\n plt.title('Gender difference of the COVID-19 in South Africa')\n sns.countplot(confirmed_results[trial]['gender'])\n print('Number of rows and columns in the dataframe: ' + str(\n confirmed_results[trial].shape))\n print('Number of rows: ' + str(confirmed_results[trial].shape[0]))\n confirmed_results[trial][['date', 'country']].groupby('date').count()\n confirmed_results[trial][['date', 'country']].groupby('date').count(\n ).cumsum().reset_index().rename(columns={'country': 'cumulative sum'})\n plt.figure(figsize=(25, 8))\n plt.title(\n 'The Number of patients infected with the COVID-19 in South Africa')\n cumulative_cases = confirmed_results[trial][['date', 'country']].groupby(\n 'date').count().cumsum().reset_index().rename(columns={'country':\n 'cumulative sum'})\n ax = sns.lineplot(data=cumulative_cases, x='date', y='cumulative sum',\n marker='o', dashes=False)\n for i in cumulative_cases.groupby('date'):\n for x, y, m in i[1][['date', 'cumulative sum', 'cumulative sum']\n ].values:\n ax.text(x, y, f'{m:.0f}')\n return plt.show()\n\n\ndef overall_data():\n confirmed_results = pd.read_csv(\n 'https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_confirmed.csv'\n )\n trial = pd.notnull(confirmed_results['age'])\n attempt = pd.isnull(confirmed_results['age'])\n cumulative_cases = confirmed_results[trial][['date', 'country']].groupby(\n 'date').count().cumsum().reset_index().rename(columns={'country':\n 'cumulative sum'})\n fig, ax = plt.subplots(ncols=2, nrows=2, figsize=(35, 10))\n graph1 = sns.distplot(confirmed_results[trial]['age'], bins=20, kde=\n True, rug=True, ax=ax[0, 0])\n ax[0, 0].title.set_text(\n 'Distribution of Age of the COVID-19 Positive Cases in South Africa')\n graph2 = sns.countplot(confirmed_results[trial]['province'], order=\n confirmed_results[trial]['province'].value_counts().index, palette=\n 'RdBu', ax=ax[0, 1])\n ax[0, 1].title.set_text(\n 'Countplot of the COVID-19 Positive Cases in each South African Province'\n )\n graph3 = sns.countplot(confirmed_results[trial]['gender'], ax=ax[1, 0])\n ax[1, 0].title.set_text(\n 'Gender difference of the patients infected with COVID-19 in South Africa'\n )\n graph4 = sns.lineplot(data=cumulative_cases, x='date', y=\n 'cumulative sum', marker='o', dashes=False, ax=ax[1, 1])\n for i in cumulative_cases.groupby('date'):\n for x, y, m in i[1][['date', 'cumulative sum', 'cumulative sum']\n ].values:\n ax[1, 1].text(x, y, f'{m:.0f}')\n ax[1, 1].title.set_text(\n 'The Number of patients infected with the COVID-19 in South Africa')\n ax[1, 1].tick_params(labelrotation=45)\n print('Total Number of Cases without Null Values: ' + str(\n confirmed_results[trial].shape[0]))\n print('Total Number of Cases with Null Values: ' + str(\n confirmed_results[attempt].shape[0]))\n print('Total Number of Cases: ' + str(confirmed_results.shape[0]))\n return graph1, graph2, graph3, graph4\n", "step-5": "import numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\nimport seaborn as sns\n\ndef distribution():\n ##testing_results = pd.read_csv('https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_testing.csv')\n confirmed_results = pd.read_csv('https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_confirmed.csv')\n \n trial = pd.notnull(confirmed_results[\"age\"])\n\n ##attempt = pd.isnull(confirmed_results[\"age\"])\n \n return(confirmed_results[trial].drop(columns=['case_id', 'YYYYMMDD','geo_subdivision']))\n\ndef distribution_plot():\n confirmed_results = pd.read_csv('https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_confirmed.csv')\n trial = pd.notnull(confirmed_results[\"age\"])\n ##attempt = pd.isnull(confirmed_results[\"age\"])\n print('Enter the number of bins between 0 and 100')\n n_of_bins = input(str())\n print('Enter the number of xticks between 0 and 4')\n xticks = input(str())\n plt.figure(figsize=(15,8)) #Set figure size\n plt.title('Distribution of Age of the COVID-19 Positive Cases in South Africa') #Set axis title\n plt.xticks(np.arange(confirmed_results[trial]['age'].min(), confirmed_results[trial]['age'].max(), step=4)) # Set label locations.\n\n plots = sns.distplot(confirmed_results[trial]['age'],\n bins=int(n_of_bins),\n kde=True,\n rug=True) #\"rug\" will give the ticks on the x-axis\n print('The highest age of all COVID-19 patients is: ' + str(confirmed_results[trial]['age'].max()))\n \n return(plots)\n \ndef other_distributions():\n confirmed_results = pd.read_csv('https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_confirmed.csv')\n trial = pd.notnull(confirmed_results[\"age\"])\n ##attempt = pd.isnull(confirmed_results[\"age\"])\n plt.figure(figsize=(15,8)) #Set figure size\n plt.title('Countplot of the COVID-19 Positive Cases in each South African Province')\n\n sns.countplot(confirmed_results[trial]['province'],\n order = confirmed_results[trial]['province'].value_counts().index,\n palette='RdBu')\n plt.figure(figsize=(15,8)) #Set figure size\n plt.title('Gender difference of the COVID-19 in South Africa')\n\n sns.countplot(confirmed_results[trial]['gender'])\n print('Number of rows and columns in the dataframe: ' + str(confirmed_results[trial].shape)) #\"shape\" will give this tupple of rows and columns\n print('Number of rows: ' + str(confirmed_results[trial].shape[0])) #you can index a tuple like a list!\n confirmed_results[trial][['date', 'country']].groupby('date').count()\n confirmed_results[trial][['date', 'country']].groupby('date').count().cumsum().reset_index().rename(columns={'country':'cumulative sum'}) # \"cumsum()\" will give the cumulative sum\n plt.figure(figsize=(25,8)) #Set figure size\n plt.title('The Number of patients infected with the COVID-19 in South Africa')\n cumulative_cases = confirmed_results[trial][['date', 'country']].groupby('date').count().cumsum().reset_index().rename(columns={'country':'cumulative sum'}) #create cumulative dataframe\n\n ax = sns.lineplot(data=cumulative_cases, x='date', y='cumulative sum', \n marker='o', \n dashes=False)\n\n for i in cumulative_cases.groupby('date'):\n #i[1] is a grouped data frame; looping through each data row in the cumulative dataframe\n for x,y,m in i[1][['date','cumulative sum','cumulative sum']].values: # x = x value; y = y_value ; m = marker value\n ax.text(x,y,f'{m:.0f}') #ax.text will \n\n return(plt.show())\n\ndef overall_data():\n confirmed_results = pd.read_csv('https://raw.githubusercontent.com/dsfsi/covid19za/master/data/covid19za_timeline_confirmed.csv')\n trial = pd.notnull(confirmed_results[\"age\"])\n attempt = pd.isnull(confirmed_results[\"age\"])\n cumulative_cases = confirmed_results[trial][['date', 'country']].groupby('date').count().cumsum().reset_index().rename(columns={'country':'cumulative sum'}) #create cumulative dataframe\n fig, ax = plt.subplots(ncols=2, nrows=2, figsize=(35,10))\n\n graph1 = sns.distplot(confirmed_results[trial]['age'],\n bins=20,\n kde=True,\n rug=True,\n ax=ax[0,0])\n ax[0,0].title.set_text('Distribution of Age of the COVID-19 Positive Cases in South Africa')\n\n graph2 = sns.countplot(confirmed_results[trial]['province'],\n order = confirmed_results[trial]['province'].value_counts().index,\n palette='RdBu',\n ax=ax[0,1])\n\n ax[0,1].title.set_text('Countplot of the COVID-19 Positive Cases in each South African Province')\n\n graph3 = sns.countplot(confirmed_results[trial]['gender'], ax=ax[1,0])\n\n ax[1,0].title.set_text('Gender difference of the patients infected with COVID-19 in South Africa')\n\n graph4 = sns.lineplot(data=cumulative_cases, x='date', y='cumulative sum', \n marker='o', \n dashes=False,\n ax=ax[1,1])\n\n\n for i in cumulative_cases.groupby('date'):\n #i[1] is a grouped data frame; looping through each data row in the cumulative dataframe\n for x,y,m in i[1][['date','cumulative sum','cumulative sum']].values: # x = x value; y = y_value ; m = marker value\n ax[1,1].text(x,y,f'{m:.0f}') #ax.text will \n\n ax[1,1].title.set_text('The Number of patients infected with the COVID-19 in South Africa')\n ax[1,1].tick_params(labelrotation=45)\n\n print('Total Number of Cases without Null Values: ' + str(confirmed_results[trial].shape[0]))\n print('Total Number of Cases with Null Values: ' + str(confirmed_results[attempt].shape[0]))\n print('Total Number of Cases: ' + str(confirmed_results.shape[0]))\n \n return(graph1,graph2,graph3,graph4)", "step-ids": [ 2, 3, 4, 5, 6 ] }
[ 2, 3, 4, 5, 6 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> urlpatterns = [path('contacts', apiviews.ContactsView.as_view(), name= 'contacts'), path('contact/<int:pk>', apiviews.ContactView.as_view(), name='contact'), path('signup', apiviews.create_user_with_token, name= 'signup'), path('signin', apiviews.signin, name='signin'), path( 'signout', apiviews.sign_out, name='signout'), path('api-token-auth/', obtain_auth_token, name='api_token_auth')] <|reserved_special_token_1|> from django.urls import path from . import apiviews from rest_framework.authtoken.views import obtain_auth_token urlpatterns = [path('contacts', apiviews.ContactsView.as_view(), name= 'contacts'), path('contact/<int:pk>', apiviews.ContactView.as_view(), name='contact'), path('signup', apiviews.create_user_with_token, name= 'signup'), path('signin', apiviews.signin, name='signin'), path( 'signout', apiviews.sign_out, name='signout'), path('api-token-auth/', obtain_auth_token, name='api_token_auth')]
flexible
{ "blob_id": "5f56838ad0717c4f7a2da6b53f586a88b0166113", "index": 8629, "step-1": "<mask token>\n", "step-2": "<mask token>\nurlpatterns = [path('contacts', apiviews.ContactsView.as_view(), name=\n 'contacts'), path('contact/<int:pk>', apiviews.ContactView.as_view(),\n name='contact'), path('signup', apiviews.create_user_with_token, name=\n 'signup'), path('signin', apiviews.signin, name='signin'), path(\n 'signout', apiviews.sign_out, name='signout'), path('api-token-auth/',\n obtain_auth_token, name='api_token_auth')]\n", "step-3": "from django.urls import path\nfrom . import apiviews\nfrom rest_framework.authtoken.views import obtain_auth_token\nurlpatterns = [path('contacts', apiviews.ContactsView.as_view(), name=\n 'contacts'), path('contact/<int:pk>', apiviews.ContactView.as_view(),\n name='contact'), path('signup', apiviews.create_user_with_token, name=\n 'signup'), path('signin', apiviews.signin, name='signin'), path(\n 'signout', apiviews.sign_out, name='signout'), path('api-token-auth/',\n obtain_auth_token, name='api_token_auth')]\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> aq.add_argument('-i', '--input', required=True, help='input image path') aq.add_argument('-o', '--output', help= 'path where you want to download the image') <|reserved_special_token_0|> if args['output']: cv2.imwrite(args['output'], cartoon) cv2.imshow('Cartoon', cartoon) cv2.waitKey(0) cv2.destroyAllWindows() <|reserved_special_token_1|> <|reserved_special_token_0|> aq = argparse.ArgumentParser() aq.add_argument('-i', '--input', required=True, help='input image path') aq.add_argument('-o', '--output', help= 'path where you want to download the image') args = vars(aq.parse_args()) img = cv2.imread(args['input']) gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) gray = cv2.medianBlur(gray, 5) edges = cv2.adaptiveThreshold(gray, 255, cv2.ADAPTIVE_THRESH_MEAN_C, cv2. THRESH_BINARY, 9, 9) color = cv2.bilateralFilter(img, 2, 250, 250) cartoon = cv2.bitwise_or(color, color, mask=edges) if args['output']: cv2.imwrite(args['output'], cartoon) cv2.imshow('Cartoon', cartoon) cv2.waitKey(0) cv2.destroyAllWindows() <|reserved_special_token_1|> import cv2 import numpy as np import argparse aq = argparse.ArgumentParser() aq.add_argument('-i', '--input', required=True, help='input image path') aq.add_argument('-o', '--output', help= 'path where you want to download the image') args = vars(aq.parse_args()) img = cv2.imread(args['input']) gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) gray = cv2.medianBlur(gray, 5) edges = cv2.adaptiveThreshold(gray, 255, cv2.ADAPTIVE_THRESH_MEAN_C, cv2. THRESH_BINARY, 9, 9) color = cv2.bilateralFilter(img, 2, 250, 250) cartoon = cv2.bitwise_or(color, color, mask=edges) if args['output']: cv2.imwrite(args['output'], cartoon) cv2.imshow('Cartoon', cartoon) cv2.waitKey(0) cv2.destroyAllWindows() <|reserved_special_token_1|> # importing libraries import cv2 import numpy as np import argparse aq = argparse.ArgumentParser() aq.add_argument('-i', '--input', required=True, help="input image path") aq.add_argument('-o', '--output', help="path where you want to download the image") args = vars(aq.parse_args()) # reading image img = cv2.imread(args['input']) # Edges gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) gray = cv2.medianBlur(gray, 5) edges = cv2.adaptiveThreshold(gray, 255, cv2.ADAPTIVE_THRESH_MEAN_C, cv2.THRESH_BINARY, 9, 9) # Cartoonization color = cv2.bilateralFilter(img, 2, 250, 250) cartoon = cv2.bitwise_or(color, color, mask=edges) if(args['output']): cv2.imwrite(args['output'], cartoon) cv2.imshow("Cartoon", cartoon) cv2.waitKey(0) cv2.destroyAllWindows()
flexible
{ "blob_id": "10cefb1cf2392fdcd368f11d0d69774a9ffa73ec", "index": 2816, "step-1": "<mask token>\n", "step-2": "<mask token>\naq.add_argument('-i', '--input', required=True, help='input image path')\naq.add_argument('-o', '--output', help=\n 'path where you want to download the image')\n<mask token>\nif args['output']:\n cv2.imwrite(args['output'], cartoon)\ncv2.imshow('Cartoon', cartoon)\ncv2.waitKey(0)\ncv2.destroyAllWindows()\n", "step-3": "<mask token>\naq = argparse.ArgumentParser()\naq.add_argument('-i', '--input', required=True, help='input image path')\naq.add_argument('-o', '--output', help=\n 'path where you want to download the image')\nargs = vars(aq.parse_args())\nimg = cv2.imread(args['input'])\ngray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\ngray = cv2.medianBlur(gray, 5)\nedges = cv2.adaptiveThreshold(gray, 255, cv2.ADAPTIVE_THRESH_MEAN_C, cv2.\n THRESH_BINARY, 9, 9)\ncolor = cv2.bilateralFilter(img, 2, 250, 250)\ncartoon = cv2.bitwise_or(color, color, mask=edges)\nif args['output']:\n cv2.imwrite(args['output'], cartoon)\ncv2.imshow('Cartoon', cartoon)\ncv2.waitKey(0)\ncv2.destroyAllWindows()\n", "step-4": "import cv2\nimport numpy as np\nimport argparse\naq = argparse.ArgumentParser()\naq.add_argument('-i', '--input', required=True, help='input image path')\naq.add_argument('-o', '--output', help=\n 'path where you want to download the image')\nargs = vars(aq.parse_args())\nimg = cv2.imread(args['input'])\ngray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\ngray = cv2.medianBlur(gray, 5)\nedges = cv2.adaptiveThreshold(gray, 255, cv2.ADAPTIVE_THRESH_MEAN_C, cv2.\n THRESH_BINARY, 9, 9)\ncolor = cv2.bilateralFilter(img, 2, 250, 250)\ncartoon = cv2.bitwise_or(color, color, mask=edges)\nif args['output']:\n cv2.imwrite(args['output'], cartoon)\ncv2.imshow('Cartoon', cartoon)\ncv2.waitKey(0)\ncv2.destroyAllWindows()\n", "step-5": "# importing libraries \nimport cv2 \nimport numpy as np \nimport argparse\n\naq = argparse.ArgumentParser()\n\naq.add_argument('-i', '--input', required=True, help=\"input image path\")\n\naq.add_argument('-o', '--output', help=\"path where you want to download the image\")\n\nargs = vars(aq.parse_args())\n# reading image \nimg = cv2.imread(args['input']) \n \n# Edges \ngray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) \ngray = cv2.medianBlur(gray, 5) \nedges = cv2.adaptiveThreshold(gray, 255, cv2.ADAPTIVE_THRESH_MEAN_C, \n cv2.THRESH_BINARY, 9, 9) \n \n# Cartoonization \ncolor = cv2.bilateralFilter(img, 2, 250, 250) \ncartoon = cv2.bitwise_or(color, color, mask=edges) \n \nif(args['output']):\n\tcv2.imwrite(args['output'], cartoon)\n\n\ncv2.imshow(\"Cartoon\", cartoon) \ncv2.waitKey(0) \ncv2.destroyAllWindows() ", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> pairs = ['usdt', 'btc'] warn_msg = '** WARN ** ' info_msg = '** INFO **'
flexible
{ "blob_id": "26289d88ac51ee359faa81ca70b01879d2b1f840", "index": 9460, "step-1": "<mask token>\n", "step-2": "pairs = ['usdt', 'btc']\nwarn_msg = '** WARN ** '\ninfo_msg = '** INFO **'\n", "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0, 1 ] }
[ 0, 1 ]
<|reserved_special_token_0|> def nothing(x): pass <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def nothing(x): pass cv.namedWindow('Binary') cv.createTrackbar('threshold', 'Binary', 0, 255, nothing) cv.setTrackbarPos('threshold', 'Binary', 127) <|reserved_special_token_0|> while True: thre = cv.getTrackbarPos('threshold', 'Binary') ret, img_binary = cv.threshold(img_gray, thre, 255, cv.THRESH_BINARY_INV) img_result = cv.bitwise_and(img_color, img_color, mask=img_binary) cv.imshow('Result', img_result) cv.imshow('Binary', img_binary) if cv.waitKey(1) == 27: break cv.destroyAllWindows() <|reserved_special_token_1|> <|reserved_special_token_0|> def nothing(x): pass cv.namedWindow('Binary') cv.createTrackbar('threshold', 'Binary', 0, 255, nothing) cv.setTrackbarPos('threshold', 'Binary', 127) img_color = cv.imread('../sample/ball.png', cv.IMREAD_COLOR) img_gray = cv.cvtColor(img_color, cv.COLOR_BGR2GRAY) while True: thre = cv.getTrackbarPos('threshold', 'Binary') ret, img_binary = cv.threshold(img_gray, thre, 255, cv.THRESH_BINARY_INV) img_result = cv.bitwise_and(img_color, img_color, mask=img_binary) cv.imshow('Result', img_result) cv.imshow('Binary', img_binary) if cv.waitKey(1) == 27: break cv.destroyAllWindows() <|reserved_special_token_1|> import cv2 as cv def nothing(x): pass cv.namedWindow('Binary') cv.createTrackbar('threshold', 'Binary', 0, 255, nothing) cv.setTrackbarPos('threshold', 'Binary', 127) img_color = cv.imread('../sample/ball.png', cv.IMREAD_COLOR) img_gray = cv.cvtColor(img_color, cv.COLOR_BGR2GRAY) while True: thre = cv.getTrackbarPos('threshold', 'Binary') ret, img_binary = cv.threshold(img_gray, thre, 255, cv.THRESH_BINARY_INV) img_result = cv.bitwise_and(img_color, img_color, mask=img_binary) cv.imshow('Result', img_result) cv.imshow('Binary', img_binary) if cv.waitKey(1) == 27: break cv.destroyAllWindows() <|reserved_special_token_1|> import cv2 as cv def nothing(x): pass cv.namedWindow('Binary') cv.createTrackbar('threshold', 'Binary', 0, 255, nothing) cv.setTrackbarPos('threshold', 'Binary', 127) img_color = cv.imread('../sample/ball.png', cv.IMREAD_COLOR) img_gray = cv.cvtColor(img_color, cv.COLOR_BGR2GRAY) while(True): thre = cv.getTrackbarPos('threshold', 'Binary') # THRESH_BINARY_INV : 이진화 결과를 반전 시킴 ret, img_binary = cv.threshold(img_gray, thre, 255, cv.THRESH_BINARY_INV) img_result = cv.bitwise_and(img_color, img_color, mask=img_binary) cv.imshow('Result', img_result) cv.imshow('Binary', img_binary) if cv.waitKey(1) == 27: break cv.destroyAllWindows()
flexible
{ "blob_id": "034d4027ea98bca656178b66c5c6e6e8b13e4b9e", "index": 4219, "step-1": "<mask token>\n\n\ndef nothing(x):\n pass\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef nothing(x):\n pass\n\n\ncv.namedWindow('Binary')\ncv.createTrackbar('threshold', 'Binary', 0, 255, nothing)\ncv.setTrackbarPos('threshold', 'Binary', 127)\n<mask token>\nwhile True:\n thre = cv.getTrackbarPos('threshold', 'Binary')\n ret, img_binary = cv.threshold(img_gray, thre, 255, cv.THRESH_BINARY_INV)\n img_result = cv.bitwise_and(img_color, img_color, mask=img_binary)\n cv.imshow('Result', img_result)\n cv.imshow('Binary', img_binary)\n if cv.waitKey(1) == 27:\n break\ncv.destroyAllWindows()\n", "step-3": "<mask token>\n\n\ndef nothing(x):\n pass\n\n\ncv.namedWindow('Binary')\ncv.createTrackbar('threshold', 'Binary', 0, 255, nothing)\ncv.setTrackbarPos('threshold', 'Binary', 127)\nimg_color = cv.imread('../sample/ball.png', cv.IMREAD_COLOR)\nimg_gray = cv.cvtColor(img_color, cv.COLOR_BGR2GRAY)\nwhile True:\n thre = cv.getTrackbarPos('threshold', 'Binary')\n ret, img_binary = cv.threshold(img_gray, thre, 255, cv.THRESH_BINARY_INV)\n img_result = cv.bitwise_and(img_color, img_color, mask=img_binary)\n cv.imshow('Result', img_result)\n cv.imshow('Binary', img_binary)\n if cv.waitKey(1) == 27:\n break\ncv.destroyAllWindows()\n", "step-4": "import cv2 as cv\n\n\ndef nothing(x):\n pass\n\n\ncv.namedWindow('Binary')\ncv.createTrackbar('threshold', 'Binary', 0, 255, nothing)\ncv.setTrackbarPos('threshold', 'Binary', 127)\nimg_color = cv.imread('../sample/ball.png', cv.IMREAD_COLOR)\nimg_gray = cv.cvtColor(img_color, cv.COLOR_BGR2GRAY)\nwhile True:\n thre = cv.getTrackbarPos('threshold', 'Binary')\n ret, img_binary = cv.threshold(img_gray, thre, 255, cv.THRESH_BINARY_INV)\n img_result = cv.bitwise_and(img_color, img_color, mask=img_binary)\n cv.imshow('Result', img_result)\n cv.imshow('Binary', img_binary)\n if cv.waitKey(1) == 27:\n break\ncv.destroyAllWindows()\n", "step-5": "import cv2 as cv\n\ndef nothing(x):\n pass\n\n\ncv.namedWindow('Binary')\ncv.createTrackbar('threshold', 'Binary', 0, 255, nothing)\ncv.setTrackbarPos('threshold', 'Binary', 127)\n\nimg_color = cv.imread('../sample/ball.png', cv.IMREAD_COLOR)\nimg_gray = cv.cvtColor(img_color, cv.COLOR_BGR2GRAY)\n\nwhile(True):\n thre = cv.getTrackbarPos('threshold', 'Binary')\n # THRESH_BINARY_INV : 이진화 결과를 반전 시킴\n ret, img_binary = cv.threshold(img_gray, thre, 255, cv.THRESH_BINARY_INV)\n\n img_result = cv.bitwise_and(img_color, img_color, mask=img_binary)\n\n cv.imshow('Result', img_result)\n cv.imshow('Binary', img_binary)\n\n if cv.waitKey(1) == 27:\n break\n\n\ncv.destroyAllWindows()", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
<|reserved_special_token_0|> class Solution: <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class Solution: def letterCombinations(self, digits: str) ->List[str]: d = {(2): 'abc', (3): 'def', (4): 'ghi', (5): 'jkl', (6): 'mno', (7 ): 'pqrs', (8): 'tuv', (9): 'wxyz'} def merge(body, digits): if len(digits) == 0: ans.append(body) return else: for c in d[int(digits[0])]: merge(body + c, digits[1:]) ans = [] merge('', digits) return ans if len(ans) != 1 else [] <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class Solution: def letterCombinations(self, digits: str) ->List[str]: d = {(2): 'abc', (3): 'def', (4): 'ghi', (5): 'jkl', (6): 'mno', (7 ): 'pqrs', (8): 'tuv', (9): 'wxyz'} def merge(body, digits): if len(digits) == 0: ans.append(body) return else: for c in d[int(digits[0])]: merge(body + c, digits[1:]) ans = [] merge('', digits) return ans if len(ans) != 1 else [] print(Solution().letterCombinations('')) <|reserved_special_token_1|> <|reserved_special_token_0|> from typing import List class Solution: def letterCombinations(self, digits: str) ->List[str]: d = {(2): 'abc', (3): 'def', (4): 'ghi', (5): 'jkl', (6): 'mno', (7 ): 'pqrs', (8): 'tuv', (9): 'wxyz'} def merge(body, digits): if len(digits) == 0: ans.append(body) return else: for c in d[int(digits[0])]: merge(body + c, digits[1:]) ans = [] merge('', digits) return ans if len(ans) != 1 else [] print(Solution().letterCombinations('')) <|reserved_special_token_1|> #!/usr/bin/env python # -*- coding: utf-8 -*- ''' @author: Allen(Zifeng) An @course: @contact: anz8@mcmaster.ca @file: 17. Letter Combinations of a Phone Number.py @time: 2020/2/2 21:18 ''' from typing import List class Solution: def letterCombinations(self, digits: str) -> List[str]: d={2:'abc', 3:'def', 4:'ghi', 5:'jkl', 6:'mno', 7:'pqrs', 8:'tuv', 9:'wxyz' } def merge(body,digits): if len(digits)==0: ans.append(body) return else: for c in d[int(digits[0])]: merge(body+c,digits[1:]) # arr=[] ans=[] # for digit in digits: # arr.append(list(d[int(digit)])) # print(arr) merge('',digits) return ans if len(ans)!=1 else [] print(Solution().letterCombinations('')) # # class Solution: # def letterCombinations(self, digits: str) -> List[str]: # d={2:'abc', # 3:'def', # 4:'ghi', # 5:'jkl', # 6:'mno', # 7:'pqrs', # 8:'tuv', # 9:'wxyz' # } # # cmb=[''] if len(digits)!=0 else [] # # for digit in digits: # cmb=[p+q for p in cmb for q in d[int(digit)]] # # return cmb # print(Solution().letterCombinations('23'))
flexible
{ "blob_id": "de925b8f6bd31bfdfd1f04628659847b0761899d", "index": 340, "step-1": "<mask token>\n\n\nclass Solution:\n <mask token>\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass Solution:\n\n def letterCombinations(self, digits: str) ->List[str]:\n d = {(2): 'abc', (3): 'def', (4): 'ghi', (5): 'jkl', (6): 'mno', (7\n ): 'pqrs', (8): 'tuv', (9): 'wxyz'}\n\n def merge(body, digits):\n if len(digits) == 0:\n ans.append(body)\n return\n else:\n for c in d[int(digits[0])]:\n merge(body + c, digits[1:])\n ans = []\n merge('', digits)\n return ans if len(ans) != 1 else []\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\nclass Solution:\n\n def letterCombinations(self, digits: str) ->List[str]:\n d = {(2): 'abc', (3): 'def', (4): 'ghi', (5): 'jkl', (6): 'mno', (7\n ): 'pqrs', (8): 'tuv', (9): 'wxyz'}\n\n def merge(body, digits):\n if len(digits) == 0:\n ans.append(body)\n return\n else:\n for c in d[int(digits[0])]:\n merge(body + c, digits[1:])\n ans = []\n merge('', digits)\n return ans if len(ans) != 1 else []\n\n\nprint(Solution().letterCombinations(''))\n", "step-4": "<mask token>\nfrom typing import List\n\n\nclass Solution:\n\n def letterCombinations(self, digits: str) ->List[str]:\n d = {(2): 'abc', (3): 'def', (4): 'ghi', (5): 'jkl', (6): 'mno', (7\n ): 'pqrs', (8): 'tuv', (9): 'wxyz'}\n\n def merge(body, digits):\n if len(digits) == 0:\n ans.append(body)\n return\n else:\n for c in d[int(digits[0])]:\n merge(body + c, digits[1:])\n ans = []\n merge('', digits)\n return ans if len(ans) != 1 else []\n\n\nprint(Solution().letterCombinations(''))\n", "step-5": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n'''\n@author: Allen(Zifeng) An\n@course: \n@contact: anz8@mcmaster.ca\n@file: 17. Letter Combinations of a Phone Number.py\n@time: 2020/2/2 21:18\n'''\nfrom typing import List\n\n\nclass Solution:\n def letterCombinations(self, digits: str) -> List[str]:\n d={2:'abc',\n 3:'def',\n 4:'ghi',\n 5:'jkl',\n 6:'mno',\n 7:'pqrs',\n 8:'tuv',\n 9:'wxyz'\n }\n\n def merge(body,digits):\n\n if len(digits)==0:\n ans.append(body)\n return\n else:\n for c in d[int(digits[0])]:\n merge(body+c,digits[1:])\n\n # arr=[]\n ans=[]\n # for digit in digits:\n # arr.append(list(d[int(digit)]))\n # print(arr)\n merge('',digits)\n return ans if len(ans)!=1 else []\n\nprint(Solution().letterCombinations(''))\n\n#\n# class Solution:\n# def letterCombinations(self, digits: str) -> List[str]:\n# d={2:'abc',\n# 3:'def',\n# 4:'ghi',\n# 5:'jkl',\n# 6:'mno',\n# 7:'pqrs',\n# 8:'tuv',\n# 9:'wxyz'\n# }\n#\n# cmb=[''] if len(digits)!=0 else []\n#\n# for digit in digits:\n# cmb=[p+q for p in cmb for q in d[int(digit)]]\n#\n# return cmb\n# print(Solution().letterCombinations('23'))\n\n", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]