code
stringlengths 13
6.09M
| order_type
stringclasses 2
values | original_example
dict | step_ids
listlengths 1
5
|
|---|---|---|---|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
"""
DIM Station Test
~~~~~~~~~~~~~~~~
Unit test for DIM Station
"""
import unittest
from dimp import ID, NetworkID
class StationTestCase(unittest.TestCase):
def test_identifier(self):
print('\n---------------- %s' % self)
str1 = 'gsp-s001@x77uVYBT1G48CLzW9iwe2dr5jhUNEM772G'
id1 = ID(str1)
self.assertEqual(id1.address.network, NetworkID.Station)
arr1 = [str1]
self.assertTrue(id1 in arr1)
def test_btc(self):
total_money = 2100 * 10000
package = 50
print('total BTC: %d, first package: %d' % (total_money, package))
spent = 0
order = 0
day = 0
year = 0
while (spent + package) <= total_money:
spent += package
order += 1
if order % (6 * 24) == 0:
day += 1
if day % 365 == 0:
year += 1
print('year %d, day %d: package=%f, spent=%f' % (year, day, package, spent))
if year % 4 == 0:
package /= 2.0
print('BTC OVER! year=%d, day=%d, pack=%f, spent=%f, left=%f' % (year, day, package, spent, (total_money - spent)))
def test_dimt(self):
total_money = 15 * 10000 * 10000
package = 2 ** 20
print('total money: %d, first package: %d' % (total_money, package))
spent = 0
day = 0
year = 0
while (spent + package) <= total_money and package >= 1:
spent += package
day += 1
if day % 365 == 0:
year += 1
print('year %d, day %d: package=%f, spent=%f' % (year, day, package, spent))
if year % 2 == 0:
package /= 2.0
print('DIMT OVER! year=%d, day=%d, pack=%f, spent=%f, left=%f' % (year, day, package, spent, (total_money - spent)))
if __name__ == '__main__':
unittest.main()
|
normal
|
{
"blob_id": "533d0b883a0bbbb148f04826e4c0a2bcc31732e9",
"index": 6702,
"step-1": "<mask token>\n\n\nclass StationTestCase(unittest.TestCase):\n\n def test_identifier(self):\n print('\\n---------------- %s' % self)\n str1 = 'gsp-s001@x77uVYBT1G48CLzW9iwe2dr5jhUNEM772G'\n id1 = ID(str1)\n self.assertEqual(id1.address.network, NetworkID.Station)\n arr1 = [str1]\n self.assertTrue(id1 in arr1)\n <mask token>\n\n def test_dimt(self):\n total_money = 15 * 10000 * 10000\n package = 2 ** 20\n print('total money: %d, first package: %d' % (total_money, package))\n spent = 0\n day = 0\n year = 0\n while spent + package <= total_money and package >= 1:\n spent += package\n day += 1\n if day % 365 == 0:\n year += 1\n print('year %d, day %d: package=%f, spent=%f' % (year, day,\n package, spent))\n if year % 2 == 0:\n package /= 2.0\n print('DIMT OVER! year=%d, day=%d, pack=%f, spent=%f, left=%f' % (\n year, day, package, spent, total_money - spent))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass StationTestCase(unittest.TestCase):\n\n def test_identifier(self):\n print('\\n---------------- %s' % self)\n str1 = 'gsp-s001@x77uVYBT1G48CLzW9iwe2dr5jhUNEM772G'\n id1 = ID(str1)\n self.assertEqual(id1.address.network, NetworkID.Station)\n arr1 = [str1]\n self.assertTrue(id1 in arr1)\n\n def test_btc(self):\n total_money = 2100 * 10000\n package = 50\n print('total BTC: %d, first package: %d' % (total_money, package))\n spent = 0\n order = 0\n day = 0\n year = 0\n while spent + package <= total_money:\n spent += package\n order += 1\n if order % (6 * 24) == 0:\n day += 1\n if day % 365 == 0:\n year += 1\n print('year %d, day %d: package=%f, spent=%f' % (year,\n day, package, spent))\n if year % 4 == 0:\n package /= 2.0\n print('BTC OVER! year=%d, day=%d, pack=%f, spent=%f, left=%f' % (\n year, day, package, spent, total_money - spent))\n\n def test_dimt(self):\n total_money = 15 * 10000 * 10000\n package = 2 ** 20\n print('total money: %d, first package: %d' % (total_money, package))\n spent = 0\n day = 0\n year = 0\n while spent + package <= total_money and package >= 1:\n spent += package\n day += 1\n if day % 365 == 0:\n year += 1\n print('year %d, day %d: package=%f, spent=%f' % (year, day,\n package, spent))\n if year % 2 == 0:\n package /= 2.0\n print('DIMT OVER! year=%d, day=%d, pack=%f, spent=%f, left=%f' % (\n year, day, package, spent, total_money - spent))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass StationTestCase(unittest.TestCase):\n\n def test_identifier(self):\n print('\\n---------------- %s' % self)\n str1 = 'gsp-s001@x77uVYBT1G48CLzW9iwe2dr5jhUNEM772G'\n id1 = ID(str1)\n self.assertEqual(id1.address.network, NetworkID.Station)\n arr1 = [str1]\n self.assertTrue(id1 in arr1)\n\n def test_btc(self):\n total_money = 2100 * 10000\n package = 50\n print('total BTC: %d, first package: %d' % (total_money, package))\n spent = 0\n order = 0\n day = 0\n year = 0\n while spent + package <= total_money:\n spent += package\n order += 1\n if order % (6 * 24) == 0:\n day += 1\n if day % 365 == 0:\n year += 1\n print('year %d, day %d: package=%f, spent=%f' % (year,\n day, package, spent))\n if year % 4 == 0:\n package /= 2.0\n print('BTC OVER! year=%d, day=%d, pack=%f, spent=%f, left=%f' % (\n year, day, package, spent, total_money - spent))\n\n def test_dimt(self):\n total_money = 15 * 10000 * 10000\n package = 2 ** 20\n print('total money: %d, first package: %d' % (total_money, package))\n spent = 0\n day = 0\n year = 0\n while spent + package <= total_money and package >= 1:\n spent += package\n day += 1\n if day % 365 == 0:\n year += 1\n print('year %d, day %d: package=%f, spent=%f' % (year, day,\n package, spent))\n if year % 2 == 0:\n package /= 2.0\n print('DIMT OVER! year=%d, day=%d, pack=%f, spent=%f, left=%f' % (\n year, day, package, spent, total_money - spent))\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-4": "<mask token>\nimport unittest\nfrom dimp import ID, NetworkID\n\n\nclass StationTestCase(unittest.TestCase):\n\n def test_identifier(self):\n print('\\n---------------- %s' % self)\n str1 = 'gsp-s001@x77uVYBT1G48CLzW9iwe2dr5jhUNEM772G'\n id1 = ID(str1)\n self.assertEqual(id1.address.network, NetworkID.Station)\n arr1 = [str1]\n self.assertTrue(id1 in arr1)\n\n def test_btc(self):\n total_money = 2100 * 10000\n package = 50\n print('total BTC: %d, first package: %d' % (total_money, package))\n spent = 0\n order = 0\n day = 0\n year = 0\n while spent + package <= total_money:\n spent += package\n order += 1\n if order % (6 * 24) == 0:\n day += 1\n if day % 365 == 0:\n year += 1\n print('year %d, day %d: package=%f, spent=%f' % (year,\n day, package, spent))\n if year % 4 == 0:\n package /= 2.0\n print('BTC OVER! year=%d, day=%d, pack=%f, spent=%f, left=%f' % (\n year, day, package, spent, total_money - spent))\n\n def test_dimt(self):\n total_money = 15 * 10000 * 10000\n package = 2 ** 20\n print('total money: %d, first package: %d' % (total_money, package))\n spent = 0\n day = 0\n year = 0\n while spent + package <= total_money and package >= 1:\n spent += package\n day += 1\n if day % 365 == 0:\n year += 1\n print('year %d, day %d: package=%f, spent=%f' % (year, day,\n package, spent))\n if year % 2 == 0:\n package /= 2.0\n print('DIMT OVER! year=%d, day=%d, pack=%f, spent=%f, left=%f' % (\n year, day, package, spent, total_money - spent))\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-5": "#! /usr/bin/env python3\n# -*- coding: utf-8 -*-\n\n\"\"\"\n DIM Station Test\n ~~~~~~~~~~~~~~~~\n\n Unit test for DIM Station\n\"\"\"\n\nimport unittest\n\nfrom dimp import ID, NetworkID\n\n\nclass StationTestCase(unittest.TestCase):\n\n def test_identifier(self):\n print('\\n---------------- %s' % self)\n str1 = 'gsp-s001@x77uVYBT1G48CLzW9iwe2dr5jhUNEM772G'\n id1 = ID(str1)\n self.assertEqual(id1.address.network, NetworkID.Station)\n arr1 = [str1]\n self.assertTrue(id1 in arr1)\n\n def test_btc(self):\n total_money = 2100 * 10000\n package = 50\n print('total BTC: %d, first package: %d' % (total_money, package))\n spent = 0\n order = 0\n day = 0\n year = 0\n while (spent + package) <= total_money:\n spent += package\n order += 1\n if order % (6 * 24) == 0:\n day += 1\n if day % 365 == 0:\n year += 1\n print('year %d, day %d: package=%f, spent=%f' % (year, day, package, spent))\n if year % 4 == 0:\n package /= 2.0\n print('BTC OVER! year=%d, day=%d, pack=%f, spent=%f, left=%f' % (year, day, package, spent, (total_money - spent)))\n\n def test_dimt(self):\n total_money = 15 * 10000 * 10000\n package = 2 ** 20\n print('total money: %d, first package: %d' % (total_money, package))\n spent = 0\n day = 0\n year = 0\n while (spent + package) <= total_money and package >= 1:\n spent += package\n day += 1\n if day % 365 == 0:\n year += 1\n print('year %d, day %d: package=%f, spent=%f' % (year, day, package, spent))\n if year % 2 == 0:\n package /= 2.0\n print('DIMT OVER! year=%d, day=%d, pack=%f, spent=%f, left=%f' % (year, day, package, spent, (total_money - spent)))\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
"""
\tSeja bem-vindo ao Admirável Mundo Novo!
\tO objetivo do jogo é dar suporte ao desenvolvimento de Agentes Inteligentes que utilizam Deep Reinforcement Learning
\tpara tarefas de Processamento de Linguagem Natural em língua portuguesa.
\tAutor: Gabriel Pontes (@ograndoptimist)
"""
import random
from source.emulador.textos import ESTADOS
from source.emulador.textos import ACOES
from source.emulador.textos import REFORCOS
from source.emulador.textos import FINALIZADO
from source.emulador.textos import DIMENSOES
print(__doc__)
class AdmiravelMundoNovo(object):
def __init__(self):
self.reforco = 0
self._checa_estado = False
self._estado_texto = None
self._estado_acao = None
self._finalizado = False
self._espaco_acoes = None
self._estados_texto = ESTADOS
self._acao_textos = ACOES
self._acao_dimensoes = DIMENSOES
self._estados_reforcos = REFORCOS
self._estados_finalizado = FINALIZADO
self._valores_estados_iniciais = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]
print("\tO objetivo do jogo é coletar a chave preciosa de ouro." +
".\n\tPara tal, você precisa vasculhar a Ilha da Fantasia.")
print()
self._escolha_estado_inicial()
def _escolha_estado_inicial(self):
escolha = random.choice(self._valores_estados_iniciais)
if escolha == 1:
self._estado_1()
elif escolha == 2:
self._estado_2()
elif escolha == 3:
self._estado_3()
elif escolha == 4:
self._estado_4()
elif escolha == 5:
self._estado_5()
elif escolha == 6:
self._estado_6()
elif escolha == 7:
self._estado_7()
elif escolha == 8:
self._estado_8()
elif escolha == 9:
self._estado_9()
elif escolha == 10:
self._estado_10()
elif escolha == 11:
self._estado_11()
elif escolha == 12:
self._estado_12()
elif escolha == 13:
self._estado_13()
elif escolha == 14:
self._estado_14()
def transicao_estado(self, acao):
if self._valor_estado == 2 and acao == 0:
self._estado_6()
elif self._valor_estado == 2 and acao == 1:
self._estado_3()
elif self._valor_estado in [1, 3, 4] and acao == 0:
self._estado_2()
elif self._valor_estado == 3 and acao == 1:
self._estado_5()
elif self._valor_estado == 2 and acao == 2:
self._estado_4()
elif self._valor_estado == 5 and acao == 1:
self._estado_3()
elif self._valor_estado == 6 and acao == 1:
self._estado_7()
elif self._valor_estado in [7, 8] and acao == 0:
self._estado_6()
elif self._valor_estado == 6 and acao == 2:
self._estado_8()
elif self._valor_estado in [6, 10, 11] and acao == 0:
self._estado_9()
elif self._valor_estado == 9 and acao == 1:
self._estado_10()
elif self._valor_estado == 9 and acao == 2:
self._estado_11()
elif self._valor_estado in [5, 9, 13] and acao == 0:
self._estado_12()
elif self._valor_estado == 12 and acao == 0:
self._estado_13()
elif self._valor_estado == 12 and acao == 1:
self._estado_final()
elif self._valor_estado == 9 and acao == 3:
self._estado_6()
elif self._valor_estado == 6 and acao == 3:
self._estado_2()
def _estado_1(self):
self._reforco_imediato = self._estados_reforcos['estado_1']
self.reforco += self._reforco_imediato
self._valor_estado = 1
self._finalizado = self._estados_finalizado['estado_1']
self._estado_texto = self._estados_texto['estado_1']
self._estado_acao = self._acao_textos['estado_1']
self._espaco_acoes = self._acao_dimensoes['estado_1']
def _estado_2(self):
self._reforco_imediato = self._estados_reforcos['estado_2']
self.reforco += self._reforco_imediato
self._valor_estado = 2
self._finalizado = self._estados_finalizado['estado_1']
self._estado_texto = self._estados_texto['estado_2']
self._estado_acao = self._acao_textos['estado_2']
self._espaco_acoes = self._acao_dimensoes['estado_2']
def _estado_3(self):
self._reforco_imediato = self._estados_reforcos['estado_3']
self.reforco += self._reforco_imediato
self._valor_estado = 3
self._finalizado = self._estados_finalizado['estado_1']
self._estado_texto = self._estados_texto['estado_3']
self._estado_acao = self._acao_textos['estado_3']
self._espaco_acoes = self._acao_dimensoes['estado_3']
def _estado_4(self):
self._reforco_imediato = self._estados_reforcos['estado_4']
self.reforco += self._reforco_imediato
self._valor_estado = 4
self._finalizado = self._estados_finalizado['estado_1']
self._estado_texto = self._estados_texto['estado_4']
self._estado_acao = self._acao_textos['estado_4']
self._espaco_acoes = self._acao_dimensoes['estado_4']
def _estado_5(self):
self._reforco_imediato = self._estados_reforcos['estado_5']
self.reforco += self._reforco_imediato
self._valor_estado = 5
self._finalizado = self._estados_finalizado['estado_1']
self._estado_texto = self._estados_texto['estado_5']
self._estado_acao = self._acao_textos['estado_5']
self._espaco_acoes = self._acao_dimensoes['estado_5']
def _estado_6(self):
self._reforco_imediato = self._estados_reforcos['estado_6']
self.reforco += self._reforco_imediato
self._valor_estado = 6
self._finalizado = self._estados_finalizado['estado_1']
self._estado_texto = self._estados_texto['estado_6']
self._estado_acao = self._acao_textos['estado_6']
self._espaco_acoes = self._acao_dimensoes['estado_6']
def _estado_7(self):
self._reforco_imediato = self._estados_reforcos['estado_7']
self.reforco += self._reforco_imediato
self._valor_estado = 7
self._finalizado = self._estados_finalizado['estado_1']
self._estado_texto = self._estados_texto['estado_7']
self._estado_acao = self._acao_textos['estado_7']
self._espaco_acoes = self._acao_dimensoes['estado_7']
def _estado_8(self):
self._reforco_imediato = self._estados_reforcos['estado_7']
self.reforco += self._reforco_imediato
self._valor_estado = 8
self._finalizado = self._estados_finalizado['estado_1']
self._estado_texto = self._estados_texto['estado_8']
self._estado_acao = self._acao_textos['estado_7']
self._espaco_acoes = self._acao_dimensoes['estado_7']
def _estado_9(self):
self._reforco_imediato = self._estados_reforcos['estado_9']
self.reforco += self._reforco_imediato
self._valor_estado = 9
self._finalizado = self._estados_finalizado['estado_1']
self._estado_texto = self._estados_texto['estado_9']
self._estado_acao = self._acao_textos['estado_9']
self._espaco_acoes = self._acao_dimensoes['estado_9']
def _estado_10(self):
self._reforco_imediato = self._estados_reforcos['estado_10']
self.reforco += self._reforco_imediato
self._valor_estado = 10
self._finalizado = self._estados_finalizado['estado_1']
self._estado_texto = self._estados_texto['estado_10']
self._estado_acao = self._acao_textos['estado_10']
self._espaco_acoes = self._acao_dimensoes['estado_10']
def _estado_11(self):
self._reforco_imediato = self._estados_reforcos['estado_10']
self.reforco += self._reforco_imediato
self._valor_estado = 11
self._finalizado = self._estados_finalizado['estado_1']
self._estado_texto = self._estados_texto['estado_11']
self._estado_acao = self._acao_textos['estado_10']
self._espaco_acoes = self._acao_dimensoes['estado_10']
def _estado_12(self):
self._reforco_imediato = self._estados_reforcos['estado_12']
self.reforco += self._reforco_imediato
self._valor_estado = 12
self._finalizado = self._estados_finalizado['estado_1']
self._estado_texto = self._estados_texto['estado_12']
self._estado_acao = self._acao_textos['estado_12']
self._espaco_acoes = self._acao_dimensoes['estado_12']
def _estado_13(self):
self._reforco_imediato = self._estados_reforcos['estado_13']
self.reforco -= self._reforco_imediato
self._valor_estado = 13
self._finalizado = self._estados_finalizado['estado_1']
self._estado_texto = self._estados_texto['estado_13']
self._estado_acao = self._acao_textos['estado_13']
self._espaco_acoes = self._acao_dimensoes['estado_13']
def _estado_14(self):
self._reforco_imediato = self._estados_reforcos['estado_14']
self.reforco -= self._reforco_imediato
self._valor_estado = 14
self._finalizado = self._estados_finalizado['estado_14']
self._estado_texto = self._estados_texto['estado_14']
self._estado_acao = self._acao_textos['estado_14']
self._espaco_acoes = self._acao_dimensoes['estado_14']
def _estado_final(self):
self._reforco_imediato = self._estados_reforcos['estado_final']
self.reforco += self._reforco_imediato
self._finalizado = self._estados_finalizado['estado_final']
self._estado_texto = self._estados_texto['estado_final']
print("\tReforço acumulado de {0}".format(self.reforco))
self._estado_acao = ""
def _pacote_acoes(self):
if self._valor_estado in [1, 4, 7, 8, 10, 11, 13]:
return [0]
elif self._valor_estado in [2]:
return [0, 1, 2]
elif self._valor_estado in [3, 5, 12]:
return [0, 1]
elif self._valor_estado in [9, 6]:
return [0, 1, 2, 3]
def checa_acao(self, acao):
if acao in self._pacote_acoes():
return True
else:
return False
def read_1(self):
return self._estado_texto, self._estado_acao, self._espaco_acoes, self._reforco_imediato, self._finalizado
def read(self):
return self._estado_texto, self._estado_acao, self._espaco_acoes
def imprime_acao(self, acoes):
for cont, acao in enumerate(acoes):
print("\t[{0}] {1}".format(cont, acao))
def emulador(self, acao):
if self._valor_estado == 2 and acao == 0: # ok
return self._estados_texto['estado_6'], self._acao_textos['estado_6'], self._acao_dimensoes['estado_6'], \
self._estados_reforcos['estado_6'], self._estados_finalizado['estado_1']
elif self._valor_estado == 2 and acao == 1: # ok
return self._estados_texto['estado_9'], self._acao_textos['estado_9'], self._acao_dimensoes['estado_9'], \
self._estados_reforcos['estado_9'], self._estados_finalizado['estado_1']
elif self._valor_estado in [1, 3, 4] and acao == 0:
return self._estados_texto['estado_2'], self._acao_textos['estado_2'], self._acao_dimensoes['estado_2'], \
self._estados_reforcos['estado_2'], self._estados_finalizado['estado_1']
elif self._valor_estado == 3 and acao == 1:
return self._estados_texto['estado_5'], self._acao_textos['estado_5'], self._acao_dimensoes['estado_5'], \
self._estados_reforcos['estado_5'], self._estados_finalizado['estado_1']
elif self._valor_estado == 2 and acao == 2: # ok
return self._estados_texto['estado_4'], self._acao_textos['estado_4'], self._acao_dimensoes['estado_4'], \
self._estados_reforcos['estado_4'], self._estados_finalizado['estado_1']
elif self._valor_estado == 5 and acao == 1:
return self._estados_texto['estado_9'], self._acao_textos['estado_9'], self._acao_dimensoes['estado_9'], \
self._estados_reforcos['estado_9'], self._estados_finalizado['estado_1']
elif self._valor_estado == 6 and acao == 1:
return self._estados_texto['estado_7'], self._acao_textos['estado_7'], self._acao_dimensoes['estado_7'], \
self._estados_reforcos['estado_7'], self._estados_finalizado['estado_1']
elif self._valor_estado in [7, 8] and acao == 0:
return self._estados_texto['estado_6'], self._acao_textos['estado_6'], self._acao_dimensoes['estado_6'], \
self._estados_reforcos['estado_6'], self._estados_finalizado['estado_1']
elif self._valor_estado == 6 and acao == 2:
return self._estados_texto['estado_8'], self._acao_textos['estado_7'], self._acao_dimensoes['estado_7'], \
self._estados_reforcos['estado_7'], self._estados_finalizado['estado_1']
elif self._valor_estado == 9 and acao == 1:
return self._estados_texto['estado_10'], self._acao_textos['estado_10'], self._acao_dimensoes['estado_10'], \
self._estados_reforcos['estado_10'], self._estados_finalizado['estado_1']
elif self._valor_estado in [6, 10, 11] and acao == 0:
return self._estados_texto['estado_9'], self._acao_textos['estado_9'], self._acao_dimensoes['estado_9'], \
self._estados_reforcos['estado_9'], self._estados_finalizado['estado_1']
elif self._valor_estado == 9 and acao == 2:
return self._estados_texto['estado_11'], self._acao_textos['estado_10'], self._acao_dimensoes['estado_10'], \
self._estados_reforcos['estado_10'], self._estados_finalizado['estado_1']
elif self._valor_estado in [5, 9, 13] and acao == 0:
return self._estados_texto['estado_12'], self._acao_textos['estado_12'], self._acao_dimensoes['estado_12'], \
self._estados_reforcos['estado_12'], self._estados_finalizado['estado_1']
elif self._valor_estado == 12 and acao == 0:
return self._estados_texto['estado_13'], self._acao_textos['estado_13'], self._acao_dimensoes['estado_13'], \
self._estados_reforcos['estado_13'], self._estados_finalizado['estado_1']
elif self._valor_estado == 12 and acao == 1:
return self._estados_texto['estado_final'], self._acao_textos['estado_final'], self._acao_dimensoes[
'estado_final'], self._estados_reforcos['estado_final'], self._estados_finalizado['estado_final']
elif self._valor_estado == 9 and acao == 3:
return self._estados_texto['estado_6'], self._acao_textos['estado_6'], self._acao_dimensoes['estado_6'], \
self._estados_reforcos['estado_6'], self._estados_finalizado['estado_1']
elif self._valor_estado == 6 and acao == 3:
return self._estados_texto['estado_2'], self._acao_textos['estado_2'], self._acao_dimensoes['estado_2'], \
self._estados_reforcos['estado_2'], self._estados_finalizado['estado_1']
|
normal
|
{
"blob_id": "38ffbb6a66837e975a611a57579bb365ab69a32c",
"index": 9504,
"step-1": "<mask token>\n\n\nclass AdmiravelMundoNovo(object):\n <mask token>\n <mask token>\n\n def transicao_estado(self, acao):\n if self._valor_estado == 2 and acao == 0:\n self._estado_6()\n elif self._valor_estado == 2 and acao == 1:\n self._estado_3()\n elif self._valor_estado in [1, 3, 4] and acao == 0:\n self._estado_2()\n elif self._valor_estado == 3 and acao == 1:\n self._estado_5()\n elif self._valor_estado == 2 and acao == 2:\n self._estado_4()\n elif self._valor_estado == 5 and acao == 1:\n self._estado_3()\n elif self._valor_estado == 6 and acao == 1:\n self._estado_7()\n elif self._valor_estado in [7, 8] and acao == 0:\n self._estado_6()\n elif self._valor_estado == 6 and acao == 2:\n self._estado_8()\n elif self._valor_estado in [6, 10, 11] and acao == 0:\n self._estado_9()\n elif self._valor_estado == 9 and acao == 1:\n self._estado_10()\n elif self._valor_estado == 9 and acao == 2:\n self._estado_11()\n elif self._valor_estado in [5, 9, 13] and acao == 0:\n self._estado_12()\n elif self._valor_estado == 12 and acao == 0:\n self._estado_13()\n elif self._valor_estado == 12 and acao == 1:\n self._estado_final()\n elif self._valor_estado == 9 and acao == 3:\n self._estado_6()\n elif self._valor_estado == 6 and acao == 3:\n self._estado_2()\n\n def _estado_1(self):\n self._reforco_imediato = self._estados_reforcos['estado_1']\n self.reforco += self._reforco_imediato\n self._valor_estado = 1\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_1']\n self._estado_acao = self._acao_textos['estado_1']\n self._espaco_acoes = self._acao_dimensoes['estado_1']\n\n def _estado_2(self):\n self._reforco_imediato = self._estados_reforcos['estado_2']\n self.reforco += self._reforco_imediato\n self._valor_estado = 2\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_2']\n self._estado_acao = self._acao_textos['estado_2']\n self._espaco_acoes = self._acao_dimensoes['estado_2']\n\n def _estado_3(self):\n self._reforco_imediato = self._estados_reforcos['estado_3']\n self.reforco += self._reforco_imediato\n self._valor_estado = 3\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_3']\n self._estado_acao = self._acao_textos['estado_3']\n self._espaco_acoes = self._acao_dimensoes['estado_3']\n <mask token>\n\n def _estado_5(self):\n self._reforco_imediato = self._estados_reforcos['estado_5']\n self.reforco += self._reforco_imediato\n self._valor_estado = 5\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_5']\n self._estado_acao = self._acao_textos['estado_5']\n self._espaco_acoes = self._acao_dimensoes['estado_5']\n\n def _estado_6(self):\n self._reforco_imediato = self._estados_reforcos['estado_6']\n self.reforco += self._reforco_imediato\n self._valor_estado = 6\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_6']\n self._estado_acao = self._acao_textos['estado_6']\n self._espaco_acoes = self._acao_dimensoes['estado_6']\n <mask token>\n <mask token>\n\n def _estado_9(self):\n self._reforco_imediato = self._estados_reforcos['estado_9']\n self.reforco += self._reforco_imediato\n self._valor_estado = 9\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_9']\n self._estado_acao = self._acao_textos['estado_9']\n self._espaco_acoes = self._acao_dimensoes['estado_9']\n\n def _estado_10(self):\n self._reforco_imediato = self._estados_reforcos['estado_10']\n self.reforco += self._reforco_imediato\n self._valor_estado = 10\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_10']\n self._estado_acao = self._acao_textos['estado_10']\n self._espaco_acoes = self._acao_dimensoes['estado_10']\n\n def _estado_11(self):\n self._reforco_imediato = self._estados_reforcos['estado_10']\n self.reforco += self._reforco_imediato\n self._valor_estado = 11\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_11']\n self._estado_acao = self._acao_textos['estado_10']\n self._espaco_acoes = self._acao_dimensoes['estado_10']\n\n def _estado_12(self):\n self._reforco_imediato = self._estados_reforcos['estado_12']\n self.reforco += self._reforco_imediato\n self._valor_estado = 12\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_12']\n self._estado_acao = self._acao_textos['estado_12']\n self._espaco_acoes = self._acao_dimensoes['estado_12']\n <mask token>\n\n def _estado_14(self):\n self._reforco_imediato = self._estados_reforcos['estado_14']\n self.reforco -= self._reforco_imediato\n self._valor_estado = 14\n self._finalizado = self._estados_finalizado['estado_14']\n self._estado_texto = self._estados_texto['estado_14']\n self._estado_acao = self._acao_textos['estado_14']\n self._espaco_acoes = self._acao_dimensoes['estado_14']\n <mask token>\n\n def _pacote_acoes(self):\n if self._valor_estado in [1, 4, 7, 8, 10, 11, 13]:\n return [0]\n elif self._valor_estado in [2]:\n return [0, 1, 2]\n elif self._valor_estado in [3, 5, 12]:\n return [0, 1]\n elif self._valor_estado in [9, 6]:\n return [0, 1, 2, 3]\n\n def checa_acao(self, acao):\n if acao in self._pacote_acoes():\n return True\n else:\n return False\n\n def read_1(self):\n return (self._estado_texto, self._estado_acao, self._espaco_acoes,\n self._reforco_imediato, self._finalizado)\n\n def read(self):\n return self._estado_texto, self._estado_acao, self._espaco_acoes\n\n def imprime_acao(self, acoes):\n for cont, acao in enumerate(acoes):\n print('\\t[{0}] {1}'.format(cont, acao))\n\n def emulador(self, acao):\n if self._valor_estado == 2 and acao == 0:\n return self._estados_texto['estado_6'], self._acao_textos[\n 'estado_6'], self._acao_dimensoes['estado_6'\n ], self._estados_reforcos['estado_6'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 2 and acao == 1:\n return self._estados_texto['estado_9'], self._acao_textos[\n 'estado_9'], self._acao_dimensoes['estado_9'\n ], self._estados_reforcos['estado_9'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado in [1, 3, 4] and acao == 0:\n return self._estados_texto['estado_2'], self._acao_textos[\n 'estado_2'], self._acao_dimensoes['estado_2'\n ], self._estados_reforcos['estado_2'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 3 and acao == 1:\n return self._estados_texto['estado_5'], self._acao_textos[\n 'estado_5'], self._acao_dimensoes['estado_5'\n ], self._estados_reforcos['estado_5'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 2 and acao == 2:\n return self._estados_texto['estado_4'], self._acao_textos[\n 'estado_4'], self._acao_dimensoes['estado_4'\n ], self._estados_reforcos['estado_4'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 5 and acao == 1:\n return self._estados_texto['estado_9'], self._acao_textos[\n 'estado_9'], self._acao_dimensoes['estado_9'\n ], self._estados_reforcos['estado_9'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 6 and acao == 1:\n return self._estados_texto['estado_7'], self._acao_textos[\n 'estado_7'], self._acao_dimensoes['estado_7'\n ], self._estados_reforcos['estado_7'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado in [7, 8] and acao == 0:\n return self._estados_texto['estado_6'], self._acao_textos[\n 'estado_6'], self._acao_dimensoes['estado_6'\n ], self._estados_reforcos['estado_6'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 6 and acao == 2:\n return self._estados_texto['estado_8'], self._acao_textos[\n 'estado_7'], self._acao_dimensoes['estado_7'\n ], self._estados_reforcos['estado_7'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 9 and acao == 1:\n return self._estados_texto['estado_10'], self._acao_textos[\n 'estado_10'], self._acao_dimensoes['estado_10'\n ], self._estados_reforcos['estado_10'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado in [6, 10, 11] and acao == 0:\n return self._estados_texto['estado_9'], self._acao_textos[\n 'estado_9'], self._acao_dimensoes['estado_9'\n ], self._estados_reforcos['estado_9'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 9 and acao == 2:\n return self._estados_texto['estado_11'], self._acao_textos[\n 'estado_10'], self._acao_dimensoes['estado_10'\n ], self._estados_reforcos['estado_10'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado in [5, 9, 13] and acao == 0:\n return self._estados_texto['estado_12'], self._acao_textos[\n 'estado_12'], self._acao_dimensoes['estado_12'\n ], self._estados_reforcos['estado_12'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 12 and acao == 0:\n return self._estados_texto['estado_13'], self._acao_textos[\n 'estado_13'], self._acao_dimensoes['estado_13'\n ], self._estados_reforcos['estado_13'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 12 and acao == 1:\n return self._estados_texto['estado_final'], self._acao_textos[\n 'estado_final'], self._acao_dimensoes['estado_final'\n ], self._estados_reforcos['estado_final'\n ], self._estados_finalizado['estado_final']\n elif self._valor_estado == 9 and acao == 3:\n return self._estados_texto['estado_6'], self._acao_textos[\n 'estado_6'], self._acao_dimensoes['estado_6'\n ], self._estados_reforcos['estado_6'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 6 and acao == 3:\n return self._estados_texto['estado_2'], self._acao_textos[\n 'estado_2'], self._acao_dimensoes['estado_2'\n ], self._estados_reforcos['estado_2'\n ], self._estados_finalizado['estado_1']\n",
"step-2": "<mask token>\n\n\nclass AdmiravelMundoNovo(object):\n\n def __init__(self):\n self.reforco = 0\n self._checa_estado = False\n self._estado_texto = None\n self._estado_acao = None\n self._finalizado = False\n self._espaco_acoes = None\n self._estados_texto = ESTADOS\n self._acao_textos = ACOES\n self._acao_dimensoes = DIMENSOES\n self._estados_reforcos = REFORCOS\n self._estados_finalizado = FINALIZADO\n self._valores_estados_iniciais = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11,\n 12, 13]\n print('\\tO objetivo do jogo é coletar a chave preciosa de ouro.' +\n \"\"\".\n\tPara tal, você precisa vasculhar a Ilha da Fantasia.\"\"\")\n print()\n self._escolha_estado_inicial()\n <mask token>\n\n def transicao_estado(self, acao):\n if self._valor_estado == 2 and acao == 0:\n self._estado_6()\n elif self._valor_estado == 2 and acao == 1:\n self._estado_3()\n elif self._valor_estado in [1, 3, 4] and acao == 0:\n self._estado_2()\n elif self._valor_estado == 3 and acao == 1:\n self._estado_5()\n elif self._valor_estado == 2 and acao == 2:\n self._estado_4()\n elif self._valor_estado == 5 and acao == 1:\n self._estado_3()\n elif self._valor_estado == 6 and acao == 1:\n self._estado_7()\n elif self._valor_estado in [7, 8] and acao == 0:\n self._estado_6()\n elif self._valor_estado == 6 and acao == 2:\n self._estado_8()\n elif self._valor_estado in [6, 10, 11] and acao == 0:\n self._estado_9()\n elif self._valor_estado == 9 and acao == 1:\n self._estado_10()\n elif self._valor_estado == 9 and acao == 2:\n self._estado_11()\n elif self._valor_estado in [5, 9, 13] and acao == 0:\n self._estado_12()\n elif self._valor_estado == 12 and acao == 0:\n self._estado_13()\n elif self._valor_estado == 12 and acao == 1:\n self._estado_final()\n elif self._valor_estado == 9 and acao == 3:\n self._estado_6()\n elif self._valor_estado == 6 and acao == 3:\n self._estado_2()\n\n def _estado_1(self):\n self._reforco_imediato = self._estados_reforcos['estado_1']\n self.reforco += self._reforco_imediato\n self._valor_estado = 1\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_1']\n self._estado_acao = self._acao_textos['estado_1']\n self._espaco_acoes = self._acao_dimensoes['estado_1']\n\n def _estado_2(self):\n self._reforco_imediato = self._estados_reforcos['estado_2']\n self.reforco += self._reforco_imediato\n self._valor_estado = 2\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_2']\n self._estado_acao = self._acao_textos['estado_2']\n self._espaco_acoes = self._acao_dimensoes['estado_2']\n\n def _estado_3(self):\n self._reforco_imediato = self._estados_reforcos['estado_3']\n self.reforco += self._reforco_imediato\n self._valor_estado = 3\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_3']\n self._estado_acao = self._acao_textos['estado_3']\n self._espaco_acoes = self._acao_dimensoes['estado_3']\n <mask token>\n\n def _estado_5(self):\n self._reforco_imediato = self._estados_reforcos['estado_5']\n self.reforco += self._reforco_imediato\n self._valor_estado = 5\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_5']\n self._estado_acao = self._acao_textos['estado_5']\n self._espaco_acoes = self._acao_dimensoes['estado_5']\n\n def _estado_6(self):\n self._reforco_imediato = self._estados_reforcos['estado_6']\n self.reforco += self._reforco_imediato\n self._valor_estado = 6\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_6']\n self._estado_acao = self._acao_textos['estado_6']\n self._espaco_acoes = self._acao_dimensoes['estado_6']\n <mask token>\n\n def _estado_8(self):\n self._reforco_imediato = self._estados_reforcos['estado_7']\n self.reforco += self._reforco_imediato\n self._valor_estado = 8\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_8']\n self._estado_acao = self._acao_textos['estado_7']\n self._espaco_acoes = self._acao_dimensoes['estado_7']\n\n def _estado_9(self):\n self._reforco_imediato = self._estados_reforcos['estado_9']\n self.reforco += self._reforco_imediato\n self._valor_estado = 9\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_9']\n self._estado_acao = self._acao_textos['estado_9']\n self._espaco_acoes = self._acao_dimensoes['estado_9']\n\n def _estado_10(self):\n self._reforco_imediato = self._estados_reforcos['estado_10']\n self.reforco += self._reforco_imediato\n self._valor_estado = 10\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_10']\n self._estado_acao = self._acao_textos['estado_10']\n self._espaco_acoes = self._acao_dimensoes['estado_10']\n\n def _estado_11(self):\n self._reforco_imediato = self._estados_reforcos['estado_10']\n self.reforco += self._reforco_imediato\n self._valor_estado = 11\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_11']\n self._estado_acao = self._acao_textos['estado_10']\n self._espaco_acoes = self._acao_dimensoes['estado_10']\n\n def _estado_12(self):\n self._reforco_imediato = self._estados_reforcos['estado_12']\n self.reforco += self._reforco_imediato\n self._valor_estado = 12\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_12']\n self._estado_acao = self._acao_textos['estado_12']\n self._espaco_acoes = self._acao_dimensoes['estado_12']\n <mask token>\n\n def _estado_14(self):\n self._reforco_imediato = self._estados_reforcos['estado_14']\n self.reforco -= self._reforco_imediato\n self._valor_estado = 14\n self._finalizado = self._estados_finalizado['estado_14']\n self._estado_texto = self._estados_texto['estado_14']\n self._estado_acao = self._acao_textos['estado_14']\n self._espaco_acoes = self._acao_dimensoes['estado_14']\n\n def _estado_final(self):\n self._reforco_imediato = self._estados_reforcos['estado_final']\n self.reforco += self._reforco_imediato\n self._finalizado = self._estados_finalizado['estado_final']\n self._estado_texto = self._estados_texto['estado_final']\n print('\\tReforço acumulado de {0}'.format(self.reforco))\n self._estado_acao = ''\n\n def _pacote_acoes(self):\n if self._valor_estado in [1, 4, 7, 8, 10, 11, 13]:\n return [0]\n elif self._valor_estado in [2]:\n return [0, 1, 2]\n elif self._valor_estado in [3, 5, 12]:\n return [0, 1]\n elif self._valor_estado in [9, 6]:\n return [0, 1, 2, 3]\n\n def checa_acao(self, acao):\n if acao in self._pacote_acoes():\n return True\n else:\n return False\n\n def read_1(self):\n return (self._estado_texto, self._estado_acao, self._espaco_acoes,\n self._reforco_imediato, self._finalizado)\n\n def read(self):\n return self._estado_texto, self._estado_acao, self._espaco_acoes\n\n def imprime_acao(self, acoes):\n for cont, acao in enumerate(acoes):\n print('\\t[{0}] {1}'.format(cont, acao))\n\n def emulador(self, acao):\n if self._valor_estado == 2 and acao == 0:\n return self._estados_texto['estado_6'], self._acao_textos[\n 'estado_6'], self._acao_dimensoes['estado_6'\n ], self._estados_reforcos['estado_6'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 2 and acao == 1:\n return self._estados_texto['estado_9'], self._acao_textos[\n 'estado_9'], self._acao_dimensoes['estado_9'\n ], self._estados_reforcos['estado_9'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado in [1, 3, 4] and acao == 0:\n return self._estados_texto['estado_2'], self._acao_textos[\n 'estado_2'], self._acao_dimensoes['estado_2'\n ], self._estados_reforcos['estado_2'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 3 and acao == 1:\n return self._estados_texto['estado_5'], self._acao_textos[\n 'estado_5'], self._acao_dimensoes['estado_5'\n ], self._estados_reforcos['estado_5'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 2 and acao == 2:\n return self._estados_texto['estado_4'], self._acao_textos[\n 'estado_4'], self._acao_dimensoes['estado_4'\n ], self._estados_reforcos['estado_4'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 5 and acao == 1:\n return self._estados_texto['estado_9'], self._acao_textos[\n 'estado_9'], self._acao_dimensoes['estado_9'\n ], self._estados_reforcos['estado_9'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 6 and acao == 1:\n return self._estados_texto['estado_7'], self._acao_textos[\n 'estado_7'], self._acao_dimensoes['estado_7'\n ], self._estados_reforcos['estado_7'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado in [7, 8] and acao == 0:\n return self._estados_texto['estado_6'], self._acao_textos[\n 'estado_6'], self._acao_dimensoes['estado_6'\n ], self._estados_reforcos['estado_6'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 6 and acao == 2:\n return self._estados_texto['estado_8'], self._acao_textos[\n 'estado_7'], self._acao_dimensoes['estado_7'\n ], self._estados_reforcos['estado_7'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 9 and acao == 1:\n return self._estados_texto['estado_10'], self._acao_textos[\n 'estado_10'], self._acao_dimensoes['estado_10'\n ], self._estados_reforcos['estado_10'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado in [6, 10, 11] and acao == 0:\n return self._estados_texto['estado_9'], self._acao_textos[\n 'estado_9'], self._acao_dimensoes['estado_9'\n ], self._estados_reforcos['estado_9'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 9 and acao == 2:\n return self._estados_texto['estado_11'], self._acao_textos[\n 'estado_10'], self._acao_dimensoes['estado_10'\n ], self._estados_reforcos['estado_10'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado in [5, 9, 13] and acao == 0:\n return self._estados_texto['estado_12'], self._acao_textos[\n 'estado_12'], self._acao_dimensoes['estado_12'\n ], self._estados_reforcos['estado_12'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 12 and acao == 0:\n return self._estados_texto['estado_13'], self._acao_textos[\n 'estado_13'], self._acao_dimensoes['estado_13'\n ], self._estados_reforcos['estado_13'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 12 and acao == 1:\n return self._estados_texto['estado_final'], self._acao_textos[\n 'estado_final'], self._acao_dimensoes['estado_final'\n ], self._estados_reforcos['estado_final'\n ], self._estados_finalizado['estado_final']\n elif self._valor_estado == 9 and acao == 3:\n return self._estados_texto['estado_6'], self._acao_textos[\n 'estado_6'], self._acao_dimensoes['estado_6'\n ], self._estados_reforcos['estado_6'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 6 and acao == 3:\n return self._estados_texto['estado_2'], self._acao_textos[\n 'estado_2'], self._acao_dimensoes['estado_2'\n ], self._estados_reforcos['estado_2'\n ], self._estados_finalizado['estado_1']\n",
"step-3": "<mask token>\n\n\nclass AdmiravelMundoNovo(object):\n\n def __init__(self):\n self.reforco = 0\n self._checa_estado = False\n self._estado_texto = None\n self._estado_acao = None\n self._finalizado = False\n self._espaco_acoes = None\n self._estados_texto = ESTADOS\n self._acao_textos = ACOES\n self._acao_dimensoes = DIMENSOES\n self._estados_reforcos = REFORCOS\n self._estados_finalizado = FINALIZADO\n self._valores_estados_iniciais = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11,\n 12, 13]\n print('\\tO objetivo do jogo é coletar a chave preciosa de ouro.' +\n \"\"\".\n\tPara tal, você precisa vasculhar a Ilha da Fantasia.\"\"\")\n print()\n self._escolha_estado_inicial()\n <mask token>\n\n def transicao_estado(self, acao):\n if self._valor_estado == 2 and acao == 0:\n self._estado_6()\n elif self._valor_estado == 2 and acao == 1:\n self._estado_3()\n elif self._valor_estado in [1, 3, 4] and acao == 0:\n self._estado_2()\n elif self._valor_estado == 3 and acao == 1:\n self._estado_5()\n elif self._valor_estado == 2 and acao == 2:\n self._estado_4()\n elif self._valor_estado == 5 and acao == 1:\n self._estado_3()\n elif self._valor_estado == 6 and acao == 1:\n self._estado_7()\n elif self._valor_estado in [7, 8] and acao == 0:\n self._estado_6()\n elif self._valor_estado == 6 and acao == 2:\n self._estado_8()\n elif self._valor_estado in [6, 10, 11] and acao == 0:\n self._estado_9()\n elif self._valor_estado == 9 and acao == 1:\n self._estado_10()\n elif self._valor_estado == 9 and acao == 2:\n self._estado_11()\n elif self._valor_estado in [5, 9, 13] and acao == 0:\n self._estado_12()\n elif self._valor_estado == 12 and acao == 0:\n self._estado_13()\n elif self._valor_estado == 12 and acao == 1:\n self._estado_final()\n elif self._valor_estado == 9 and acao == 3:\n self._estado_6()\n elif self._valor_estado == 6 and acao == 3:\n self._estado_2()\n\n def _estado_1(self):\n self._reforco_imediato = self._estados_reforcos['estado_1']\n self.reforco += self._reforco_imediato\n self._valor_estado = 1\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_1']\n self._estado_acao = self._acao_textos['estado_1']\n self._espaco_acoes = self._acao_dimensoes['estado_1']\n\n def _estado_2(self):\n self._reforco_imediato = self._estados_reforcos['estado_2']\n self.reforco += self._reforco_imediato\n self._valor_estado = 2\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_2']\n self._estado_acao = self._acao_textos['estado_2']\n self._espaco_acoes = self._acao_dimensoes['estado_2']\n\n def _estado_3(self):\n self._reforco_imediato = self._estados_reforcos['estado_3']\n self.reforco += self._reforco_imediato\n self._valor_estado = 3\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_3']\n self._estado_acao = self._acao_textos['estado_3']\n self._espaco_acoes = self._acao_dimensoes['estado_3']\n\n def _estado_4(self):\n self._reforco_imediato = self._estados_reforcos['estado_4']\n self.reforco += self._reforco_imediato\n self._valor_estado = 4\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_4']\n self._estado_acao = self._acao_textos['estado_4']\n self._espaco_acoes = self._acao_dimensoes['estado_4']\n\n def _estado_5(self):\n self._reforco_imediato = self._estados_reforcos['estado_5']\n self.reforco += self._reforco_imediato\n self._valor_estado = 5\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_5']\n self._estado_acao = self._acao_textos['estado_5']\n self._espaco_acoes = self._acao_dimensoes['estado_5']\n\n def _estado_6(self):\n self._reforco_imediato = self._estados_reforcos['estado_6']\n self.reforco += self._reforco_imediato\n self._valor_estado = 6\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_6']\n self._estado_acao = self._acao_textos['estado_6']\n self._espaco_acoes = self._acao_dimensoes['estado_6']\n\n def _estado_7(self):\n self._reforco_imediato = self._estados_reforcos['estado_7']\n self.reforco += self._reforco_imediato\n self._valor_estado = 7\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_7']\n self._estado_acao = self._acao_textos['estado_7']\n self._espaco_acoes = self._acao_dimensoes['estado_7']\n\n def _estado_8(self):\n self._reforco_imediato = self._estados_reforcos['estado_7']\n self.reforco += self._reforco_imediato\n self._valor_estado = 8\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_8']\n self._estado_acao = self._acao_textos['estado_7']\n self._espaco_acoes = self._acao_dimensoes['estado_7']\n\n def _estado_9(self):\n self._reforco_imediato = self._estados_reforcos['estado_9']\n self.reforco += self._reforco_imediato\n self._valor_estado = 9\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_9']\n self._estado_acao = self._acao_textos['estado_9']\n self._espaco_acoes = self._acao_dimensoes['estado_9']\n\n def _estado_10(self):\n self._reforco_imediato = self._estados_reforcos['estado_10']\n self.reforco += self._reforco_imediato\n self._valor_estado = 10\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_10']\n self._estado_acao = self._acao_textos['estado_10']\n self._espaco_acoes = self._acao_dimensoes['estado_10']\n\n def _estado_11(self):\n self._reforco_imediato = self._estados_reforcos['estado_10']\n self.reforco += self._reforco_imediato\n self._valor_estado = 11\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_11']\n self._estado_acao = self._acao_textos['estado_10']\n self._espaco_acoes = self._acao_dimensoes['estado_10']\n\n def _estado_12(self):\n self._reforco_imediato = self._estados_reforcos['estado_12']\n self.reforco += self._reforco_imediato\n self._valor_estado = 12\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_12']\n self._estado_acao = self._acao_textos['estado_12']\n self._espaco_acoes = self._acao_dimensoes['estado_12']\n <mask token>\n\n def _estado_14(self):\n self._reforco_imediato = self._estados_reforcos['estado_14']\n self.reforco -= self._reforco_imediato\n self._valor_estado = 14\n self._finalizado = self._estados_finalizado['estado_14']\n self._estado_texto = self._estados_texto['estado_14']\n self._estado_acao = self._acao_textos['estado_14']\n self._espaco_acoes = self._acao_dimensoes['estado_14']\n\n def _estado_final(self):\n self._reforco_imediato = self._estados_reforcos['estado_final']\n self.reforco += self._reforco_imediato\n self._finalizado = self._estados_finalizado['estado_final']\n self._estado_texto = self._estados_texto['estado_final']\n print('\\tReforço acumulado de {0}'.format(self.reforco))\n self._estado_acao = ''\n\n def _pacote_acoes(self):\n if self._valor_estado in [1, 4, 7, 8, 10, 11, 13]:\n return [0]\n elif self._valor_estado in [2]:\n return [0, 1, 2]\n elif self._valor_estado in [3, 5, 12]:\n return [0, 1]\n elif self._valor_estado in [9, 6]:\n return [0, 1, 2, 3]\n\n def checa_acao(self, acao):\n if acao in self._pacote_acoes():\n return True\n else:\n return False\n\n def read_1(self):\n return (self._estado_texto, self._estado_acao, self._espaco_acoes,\n self._reforco_imediato, self._finalizado)\n\n def read(self):\n return self._estado_texto, self._estado_acao, self._espaco_acoes\n\n def imprime_acao(self, acoes):\n for cont, acao in enumerate(acoes):\n print('\\t[{0}] {1}'.format(cont, acao))\n\n def emulador(self, acao):\n if self._valor_estado == 2 and acao == 0:\n return self._estados_texto['estado_6'], self._acao_textos[\n 'estado_6'], self._acao_dimensoes['estado_6'\n ], self._estados_reforcos['estado_6'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 2 and acao == 1:\n return self._estados_texto['estado_9'], self._acao_textos[\n 'estado_9'], self._acao_dimensoes['estado_9'\n ], self._estados_reforcos['estado_9'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado in [1, 3, 4] and acao == 0:\n return self._estados_texto['estado_2'], self._acao_textos[\n 'estado_2'], self._acao_dimensoes['estado_2'\n ], self._estados_reforcos['estado_2'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 3 and acao == 1:\n return self._estados_texto['estado_5'], self._acao_textos[\n 'estado_5'], self._acao_dimensoes['estado_5'\n ], self._estados_reforcos['estado_5'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 2 and acao == 2:\n return self._estados_texto['estado_4'], self._acao_textos[\n 'estado_4'], self._acao_dimensoes['estado_4'\n ], self._estados_reforcos['estado_4'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 5 and acao == 1:\n return self._estados_texto['estado_9'], self._acao_textos[\n 'estado_9'], self._acao_dimensoes['estado_9'\n ], self._estados_reforcos['estado_9'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 6 and acao == 1:\n return self._estados_texto['estado_7'], self._acao_textos[\n 'estado_7'], self._acao_dimensoes['estado_7'\n ], self._estados_reforcos['estado_7'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado in [7, 8] and acao == 0:\n return self._estados_texto['estado_6'], self._acao_textos[\n 'estado_6'], self._acao_dimensoes['estado_6'\n ], self._estados_reforcos['estado_6'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 6 and acao == 2:\n return self._estados_texto['estado_8'], self._acao_textos[\n 'estado_7'], self._acao_dimensoes['estado_7'\n ], self._estados_reforcos['estado_7'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 9 and acao == 1:\n return self._estados_texto['estado_10'], self._acao_textos[\n 'estado_10'], self._acao_dimensoes['estado_10'\n ], self._estados_reforcos['estado_10'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado in [6, 10, 11] and acao == 0:\n return self._estados_texto['estado_9'], self._acao_textos[\n 'estado_9'], self._acao_dimensoes['estado_9'\n ], self._estados_reforcos['estado_9'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 9 and acao == 2:\n return self._estados_texto['estado_11'], self._acao_textos[\n 'estado_10'], self._acao_dimensoes['estado_10'\n ], self._estados_reforcos['estado_10'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado in [5, 9, 13] and acao == 0:\n return self._estados_texto['estado_12'], self._acao_textos[\n 'estado_12'], self._acao_dimensoes['estado_12'\n ], self._estados_reforcos['estado_12'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 12 and acao == 0:\n return self._estados_texto['estado_13'], self._acao_textos[\n 'estado_13'], self._acao_dimensoes['estado_13'\n ], self._estados_reforcos['estado_13'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 12 and acao == 1:\n return self._estados_texto['estado_final'], self._acao_textos[\n 'estado_final'], self._acao_dimensoes['estado_final'\n ], self._estados_reforcos['estado_final'\n ], self._estados_finalizado['estado_final']\n elif self._valor_estado == 9 and acao == 3:\n return self._estados_texto['estado_6'], self._acao_textos[\n 'estado_6'], self._acao_dimensoes['estado_6'\n ], self._estados_reforcos['estado_6'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 6 and acao == 3:\n return self._estados_texto['estado_2'], self._acao_textos[\n 'estado_2'], self._acao_dimensoes['estado_2'\n ], self._estados_reforcos['estado_2'\n ], self._estados_finalizado['estado_1']\n",
"step-4": "<mask token>\n\n\nclass AdmiravelMundoNovo(object):\n\n def __init__(self):\n self.reforco = 0\n self._checa_estado = False\n self._estado_texto = None\n self._estado_acao = None\n self._finalizado = False\n self._espaco_acoes = None\n self._estados_texto = ESTADOS\n self._acao_textos = ACOES\n self._acao_dimensoes = DIMENSOES\n self._estados_reforcos = REFORCOS\n self._estados_finalizado = FINALIZADO\n self._valores_estados_iniciais = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11,\n 12, 13]\n print('\\tO objetivo do jogo é coletar a chave preciosa de ouro.' +\n \"\"\".\n\tPara tal, você precisa vasculhar a Ilha da Fantasia.\"\"\")\n print()\n self._escolha_estado_inicial()\n\n def _escolha_estado_inicial(self):\n escolha = random.choice(self._valores_estados_iniciais)\n if escolha == 1:\n self._estado_1()\n elif escolha == 2:\n self._estado_2()\n elif escolha == 3:\n self._estado_3()\n elif escolha == 4:\n self._estado_4()\n elif escolha == 5:\n self._estado_5()\n elif escolha == 6:\n self._estado_6()\n elif escolha == 7:\n self._estado_7()\n elif escolha == 8:\n self._estado_8()\n elif escolha == 9:\n self._estado_9()\n elif escolha == 10:\n self._estado_10()\n elif escolha == 11:\n self._estado_11()\n elif escolha == 12:\n self._estado_12()\n elif escolha == 13:\n self._estado_13()\n elif escolha == 14:\n self._estado_14()\n\n def transicao_estado(self, acao):\n if self._valor_estado == 2 and acao == 0:\n self._estado_6()\n elif self._valor_estado == 2 and acao == 1:\n self._estado_3()\n elif self._valor_estado in [1, 3, 4] and acao == 0:\n self._estado_2()\n elif self._valor_estado == 3 and acao == 1:\n self._estado_5()\n elif self._valor_estado == 2 and acao == 2:\n self._estado_4()\n elif self._valor_estado == 5 and acao == 1:\n self._estado_3()\n elif self._valor_estado == 6 and acao == 1:\n self._estado_7()\n elif self._valor_estado in [7, 8] and acao == 0:\n self._estado_6()\n elif self._valor_estado == 6 and acao == 2:\n self._estado_8()\n elif self._valor_estado in [6, 10, 11] and acao == 0:\n self._estado_9()\n elif self._valor_estado == 9 and acao == 1:\n self._estado_10()\n elif self._valor_estado == 9 and acao == 2:\n self._estado_11()\n elif self._valor_estado in [5, 9, 13] and acao == 0:\n self._estado_12()\n elif self._valor_estado == 12 and acao == 0:\n self._estado_13()\n elif self._valor_estado == 12 and acao == 1:\n self._estado_final()\n elif self._valor_estado == 9 and acao == 3:\n self._estado_6()\n elif self._valor_estado == 6 and acao == 3:\n self._estado_2()\n\n def _estado_1(self):\n self._reforco_imediato = self._estados_reforcos['estado_1']\n self.reforco += self._reforco_imediato\n self._valor_estado = 1\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_1']\n self._estado_acao = self._acao_textos['estado_1']\n self._espaco_acoes = self._acao_dimensoes['estado_1']\n\n def _estado_2(self):\n self._reforco_imediato = self._estados_reforcos['estado_2']\n self.reforco += self._reforco_imediato\n self._valor_estado = 2\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_2']\n self._estado_acao = self._acao_textos['estado_2']\n self._espaco_acoes = self._acao_dimensoes['estado_2']\n\n def _estado_3(self):\n self._reforco_imediato = self._estados_reforcos['estado_3']\n self.reforco += self._reforco_imediato\n self._valor_estado = 3\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_3']\n self._estado_acao = self._acao_textos['estado_3']\n self._espaco_acoes = self._acao_dimensoes['estado_3']\n\n def _estado_4(self):\n self._reforco_imediato = self._estados_reforcos['estado_4']\n self.reforco += self._reforco_imediato\n self._valor_estado = 4\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_4']\n self._estado_acao = self._acao_textos['estado_4']\n self._espaco_acoes = self._acao_dimensoes['estado_4']\n\n def _estado_5(self):\n self._reforco_imediato = self._estados_reforcos['estado_5']\n self.reforco += self._reforco_imediato\n self._valor_estado = 5\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_5']\n self._estado_acao = self._acao_textos['estado_5']\n self._espaco_acoes = self._acao_dimensoes['estado_5']\n\n def _estado_6(self):\n self._reforco_imediato = self._estados_reforcos['estado_6']\n self.reforco += self._reforco_imediato\n self._valor_estado = 6\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_6']\n self._estado_acao = self._acao_textos['estado_6']\n self._espaco_acoes = self._acao_dimensoes['estado_6']\n\n def _estado_7(self):\n self._reforco_imediato = self._estados_reforcos['estado_7']\n self.reforco += self._reforco_imediato\n self._valor_estado = 7\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_7']\n self._estado_acao = self._acao_textos['estado_7']\n self._espaco_acoes = self._acao_dimensoes['estado_7']\n\n def _estado_8(self):\n self._reforco_imediato = self._estados_reforcos['estado_7']\n self.reforco += self._reforco_imediato\n self._valor_estado = 8\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_8']\n self._estado_acao = self._acao_textos['estado_7']\n self._espaco_acoes = self._acao_dimensoes['estado_7']\n\n def _estado_9(self):\n self._reforco_imediato = self._estados_reforcos['estado_9']\n self.reforco += self._reforco_imediato\n self._valor_estado = 9\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_9']\n self._estado_acao = self._acao_textos['estado_9']\n self._espaco_acoes = self._acao_dimensoes['estado_9']\n\n def _estado_10(self):\n self._reforco_imediato = self._estados_reforcos['estado_10']\n self.reforco += self._reforco_imediato\n self._valor_estado = 10\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_10']\n self._estado_acao = self._acao_textos['estado_10']\n self._espaco_acoes = self._acao_dimensoes['estado_10']\n\n def _estado_11(self):\n self._reforco_imediato = self._estados_reforcos['estado_10']\n self.reforco += self._reforco_imediato\n self._valor_estado = 11\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_11']\n self._estado_acao = self._acao_textos['estado_10']\n self._espaco_acoes = self._acao_dimensoes['estado_10']\n\n def _estado_12(self):\n self._reforco_imediato = self._estados_reforcos['estado_12']\n self.reforco += self._reforco_imediato\n self._valor_estado = 12\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_12']\n self._estado_acao = self._acao_textos['estado_12']\n self._espaco_acoes = self._acao_dimensoes['estado_12']\n\n def _estado_13(self):\n self._reforco_imediato = self._estados_reforcos['estado_13']\n self.reforco -= self._reforco_imediato\n self._valor_estado = 13\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_13']\n self._estado_acao = self._acao_textos['estado_13']\n self._espaco_acoes = self._acao_dimensoes['estado_13']\n\n def _estado_14(self):\n self._reforco_imediato = self._estados_reforcos['estado_14']\n self.reforco -= self._reforco_imediato\n self._valor_estado = 14\n self._finalizado = self._estados_finalizado['estado_14']\n self._estado_texto = self._estados_texto['estado_14']\n self._estado_acao = self._acao_textos['estado_14']\n self._espaco_acoes = self._acao_dimensoes['estado_14']\n\n def _estado_final(self):\n self._reforco_imediato = self._estados_reforcos['estado_final']\n self.reforco += self._reforco_imediato\n self._finalizado = self._estados_finalizado['estado_final']\n self._estado_texto = self._estados_texto['estado_final']\n print('\\tReforço acumulado de {0}'.format(self.reforco))\n self._estado_acao = ''\n\n def _pacote_acoes(self):\n if self._valor_estado in [1, 4, 7, 8, 10, 11, 13]:\n return [0]\n elif self._valor_estado in [2]:\n return [0, 1, 2]\n elif self._valor_estado in [3, 5, 12]:\n return [0, 1]\n elif self._valor_estado in [9, 6]:\n return [0, 1, 2, 3]\n\n def checa_acao(self, acao):\n if acao in self._pacote_acoes():\n return True\n else:\n return False\n\n def read_1(self):\n return (self._estado_texto, self._estado_acao, self._espaco_acoes,\n self._reforco_imediato, self._finalizado)\n\n def read(self):\n return self._estado_texto, self._estado_acao, self._espaco_acoes\n\n def imprime_acao(self, acoes):\n for cont, acao in enumerate(acoes):\n print('\\t[{0}] {1}'.format(cont, acao))\n\n def emulador(self, acao):\n if self._valor_estado == 2 and acao == 0:\n return self._estados_texto['estado_6'], self._acao_textos[\n 'estado_6'], self._acao_dimensoes['estado_6'\n ], self._estados_reforcos['estado_6'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 2 and acao == 1:\n return self._estados_texto['estado_9'], self._acao_textos[\n 'estado_9'], self._acao_dimensoes['estado_9'\n ], self._estados_reforcos['estado_9'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado in [1, 3, 4] and acao == 0:\n return self._estados_texto['estado_2'], self._acao_textos[\n 'estado_2'], self._acao_dimensoes['estado_2'\n ], self._estados_reforcos['estado_2'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 3 and acao == 1:\n return self._estados_texto['estado_5'], self._acao_textos[\n 'estado_5'], self._acao_dimensoes['estado_5'\n ], self._estados_reforcos['estado_5'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 2 and acao == 2:\n return self._estados_texto['estado_4'], self._acao_textos[\n 'estado_4'], self._acao_dimensoes['estado_4'\n ], self._estados_reforcos['estado_4'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 5 and acao == 1:\n return self._estados_texto['estado_9'], self._acao_textos[\n 'estado_9'], self._acao_dimensoes['estado_9'\n ], self._estados_reforcos['estado_9'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 6 and acao == 1:\n return self._estados_texto['estado_7'], self._acao_textos[\n 'estado_7'], self._acao_dimensoes['estado_7'\n ], self._estados_reforcos['estado_7'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado in [7, 8] and acao == 0:\n return self._estados_texto['estado_6'], self._acao_textos[\n 'estado_6'], self._acao_dimensoes['estado_6'\n ], self._estados_reforcos['estado_6'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 6 and acao == 2:\n return self._estados_texto['estado_8'], self._acao_textos[\n 'estado_7'], self._acao_dimensoes['estado_7'\n ], self._estados_reforcos['estado_7'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 9 and acao == 1:\n return self._estados_texto['estado_10'], self._acao_textos[\n 'estado_10'], self._acao_dimensoes['estado_10'\n ], self._estados_reforcos['estado_10'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado in [6, 10, 11] and acao == 0:\n return self._estados_texto['estado_9'], self._acao_textos[\n 'estado_9'], self._acao_dimensoes['estado_9'\n ], self._estados_reforcos['estado_9'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 9 and acao == 2:\n return self._estados_texto['estado_11'], self._acao_textos[\n 'estado_10'], self._acao_dimensoes['estado_10'\n ], self._estados_reforcos['estado_10'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado in [5, 9, 13] and acao == 0:\n return self._estados_texto['estado_12'], self._acao_textos[\n 'estado_12'], self._acao_dimensoes['estado_12'\n ], self._estados_reforcos['estado_12'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 12 and acao == 0:\n return self._estados_texto['estado_13'], self._acao_textos[\n 'estado_13'], self._acao_dimensoes['estado_13'\n ], self._estados_reforcos['estado_13'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 12 and acao == 1:\n return self._estados_texto['estado_final'], self._acao_textos[\n 'estado_final'], self._acao_dimensoes['estado_final'\n ], self._estados_reforcos['estado_final'\n ], self._estados_finalizado['estado_final']\n elif self._valor_estado == 9 and acao == 3:\n return self._estados_texto['estado_6'], self._acao_textos[\n 'estado_6'], self._acao_dimensoes['estado_6'\n ], self._estados_reforcos['estado_6'\n ], self._estados_finalizado['estado_1']\n elif self._valor_estado == 6 and acao == 3:\n return self._estados_texto['estado_2'], self._acao_textos[\n 'estado_2'], self._acao_dimensoes['estado_2'\n ], self._estados_reforcos['estado_2'\n ], self._estados_finalizado['estado_1']\n",
"step-5": "\"\"\"\n \\tSeja bem-vindo ao Admirável Mundo Novo!\n \\tO objetivo do jogo é dar suporte ao desenvolvimento de Agentes Inteligentes que utilizam Deep Reinforcement Learning\n \\tpara tarefas de Processamento de Linguagem Natural em língua portuguesa.\n \\tAutor: Gabriel Pontes (@ograndoptimist)\n\"\"\"\n\nimport random\n\nfrom source.emulador.textos import ESTADOS\nfrom source.emulador.textos import ACOES\nfrom source.emulador.textos import REFORCOS\nfrom source.emulador.textos import FINALIZADO\nfrom source.emulador.textos import DIMENSOES\n\n\nprint(__doc__)\n\n\nclass AdmiravelMundoNovo(object):\n def __init__(self):\n self.reforco = 0\n self._checa_estado = False\n self._estado_texto = None\n self._estado_acao = None\n self._finalizado = False\n self._espaco_acoes = None\n self._estados_texto = ESTADOS\n self._acao_textos = ACOES\n self._acao_dimensoes = DIMENSOES\n self._estados_reforcos = REFORCOS\n self._estados_finalizado = FINALIZADO\n self._valores_estados_iniciais = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]\n\n print(\"\\tO objetivo do jogo é coletar a chave preciosa de ouro.\" +\n \".\\n\\tPara tal, você precisa vasculhar a Ilha da Fantasia.\")\n print()\n\n self._escolha_estado_inicial()\n\n def _escolha_estado_inicial(self):\n escolha = random.choice(self._valores_estados_iniciais)\n\n if escolha == 1:\n self._estado_1()\n elif escolha == 2:\n self._estado_2()\n elif escolha == 3:\n self._estado_3()\n elif escolha == 4:\n self._estado_4()\n elif escolha == 5:\n self._estado_5()\n elif escolha == 6:\n self._estado_6()\n elif escolha == 7:\n self._estado_7()\n elif escolha == 8:\n self._estado_8()\n elif escolha == 9:\n self._estado_9()\n elif escolha == 10:\n self._estado_10()\n elif escolha == 11:\n self._estado_11()\n elif escolha == 12:\n self._estado_12()\n elif escolha == 13:\n self._estado_13()\n elif escolha == 14:\n self._estado_14()\n\n def transicao_estado(self, acao):\n if self._valor_estado == 2 and acao == 0:\n self._estado_6()\n elif self._valor_estado == 2 and acao == 1:\n self._estado_3()\n elif self._valor_estado in [1, 3, 4] and acao == 0:\n self._estado_2()\n elif self._valor_estado == 3 and acao == 1:\n self._estado_5()\n elif self._valor_estado == 2 and acao == 2:\n self._estado_4()\n elif self._valor_estado == 5 and acao == 1:\n self._estado_3()\n elif self._valor_estado == 6 and acao == 1:\n self._estado_7()\n elif self._valor_estado in [7, 8] and acao == 0:\n self._estado_6()\n elif self._valor_estado == 6 and acao == 2:\n self._estado_8()\n elif self._valor_estado in [6, 10, 11] and acao == 0:\n self._estado_9()\n elif self._valor_estado == 9 and acao == 1:\n self._estado_10()\n elif self._valor_estado == 9 and acao == 2:\n self._estado_11()\n elif self._valor_estado in [5, 9, 13] and acao == 0:\n self._estado_12()\n elif self._valor_estado == 12 and acao == 0:\n self._estado_13()\n elif self._valor_estado == 12 and acao == 1:\n self._estado_final()\n elif self._valor_estado == 9 and acao == 3:\n self._estado_6()\n elif self._valor_estado == 6 and acao == 3:\n self._estado_2()\n\n def _estado_1(self):\n self._reforco_imediato = self._estados_reforcos['estado_1']\n self.reforco += self._reforco_imediato\n self._valor_estado = 1\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_1']\n self._estado_acao = self._acao_textos['estado_1']\n self._espaco_acoes = self._acao_dimensoes['estado_1']\n\n def _estado_2(self):\n self._reforco_imediato = self._estados_reforcos['estado_2']\n self.reforco += self._reforco_imediato\n self._valor_estado = 2\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_2']\n self._estado_acao = self._acao_textos['estado_2']\n self._espaco_acoes = self._acao_dimensoes['estado_2']\n\n def _estado_3(self):\n self._reforco_imediato = self._estados_reforcos['estado_3']\n self.reforco += self._reforco_imediato\n self._valor_estado = 3\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_3']\n self._estado_acao = self._acao_textos['estado_3']\n self._espaco_acoes = self._acao_dimensoes['estado_3']\n\n def _estado_4(self):\n self._reforco_imediato = self._estados_reforcos['estado_4']\n self.reforco += self._reforco_imediato\n self._valor_estado = 4\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_4']\n self._estado_acao = self._acao_textos['estado_4']\n self._espaco_acoes = self._acao_dimensoes['estado_4']\n\n def _estado_5(self):\n self._reforco_imediato = self._estados_reforcos['estado_5']\n self.reforco += self._reforco_imediato\n self._valor_estado = 5\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_5']\n self._estado_acao = self._acao_textos['estado_5']\n self._espaco_acoes = self._acao_dimensoes['estado_5']\n\n def _estado_6(self):\n self._reforco_imediato = self._estados_reforcos['estado_6']\n self.reforco += self._reforco_imediato\n self._valor_estado = 6\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_6']\n self._estado_acao = self._acao_textos['estado_6']\n self._espaco_acoes = self._acao_dimensoes['estado_6']\n\n def _estado_7(self):\n self._reforco_imediato = self._estados_reforcos['estado_7']\n self.reforco += self._reforco_imediato\n self._valor_estado = 7\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_7']\n self._estado_acao = self._acao_textos['estado_7']\n self._espaco_acoes = self._acao_dimensoes['estado_7']\n\n def _estado_8(self):\n self._reforco_imediato = self._estados_reforcos['estado_7']\n self.reforco += self._reforco_imediato\n self._valor_estado = 8\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_8']\n self._estado_acao = self._acao_textos['estado_7']\n self._espaco_acoes = self._acao_dimensoes['estado_7']\n\n def _estado_9(self):\n self._reforco_imediato = self._estados_reforcos['estado_9']\n self.reforco += self._reforco_imediato\n self._valor_estado = 9\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_9']\n self._estado_acao = self._acao_textos['estado_9']\n self._espaco_acoes = self._acao_dimensoes['estado_9']\n\n def _estado_10(self):\n self._reforco_imediato = self._estados_reforcos['estado_10']\n self.reforco += self._reforco_imediato\n self._valor_estado = 10\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_10']\n self._estado_acao = self._acao_textos['estado_10']\n self._espaco_acoes = self._acao_dimensoes['estado_10']\n\n def _estado_11(self):\n self._reforco_imediato = self._estados_reforcos['estado_10']\n self.reforco += self._reforco_imediato\n self._valor_estado = 11\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_11']\n self._estado_acao = self._acao_textos['estado_10']\n self._espaco_acoes = self._acao_dimensoes['estado_10']\n\n def _estado_12(self):\n self._reforco_imediato = self._estados_reforcos['estado_12']\n self.reforco += self._reforco_imediato\n self._valor_estado = 12\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_12']\n self._estado_acao = self._acao_textos['estado_12']\n self._espaco_acoes = self._acao_dimensoes['estado_12']\n\n def _estado_13(self):\n self._reforco_imediato = self._estados_reforcos['estado_13']\n self.reforco -= self._reforco_imediato\n self._valor_estado = 13\n self._finalizado = self._estados_finalizado['estado_1']\n self._estado_texto = self._estados_texto['estado_13']\n self._estado_acao = self._acao_textos['estado_13']\n self._espaco_acoes = self._acao_dimensoes['estado_13']\n\n def _estado_14(self):\n self._reforco_imediato = self._estados_reforcos['estado_14']\n self.reforco -= self._reforco_imediato\n self._valor_estado = 14\n self._finalizado = self._estados_finalizado['estado_14']\n self._estado_texto = self._estados_texto['estado_14']\n self._estado_acao = self._acao_textos['estado_14']\n self._espaco_acoes = self._acao_dimensoes['estado_14']\n\n def _estado_final(self):\n self._reforco_imediato = self._estados_reforcos['estado_final']\n self.reforco += self._reforco_imediato\n self._finalizado = self._estados_finalizado['estado_final']\n self._estado_texto = self._estados_texto['estado_final']\n print(\"\\tReforço acumulado de {0}\".format(self.reforco))\n self._estado_acao = \"\"\n\n def _pacote_acoes(self):\n if self._valor_estado in [1, 4, 7, 8, 10, 11, 13]:\n return [0]\n elif self._valor_estado in [2]:\n return [0, 1, 2]\n elif self._valor_estado in [3, 5, 12]:\n return [0, 1]\n elif self._valor_estado in [9, 6]:\n return [0, 1, 2, 3]\n\n def checa_acao(self, acao):\n if acao in self._pacote_acoes():\n return True\n else:\n return False\n\n def read_1(self):\n return self._estado_texto, self._estado_acao, self._espaco_acoes, self._reforco_imediato, self._finalizado\n\n def read(self):\n return self._estado_texto, self._estado_acao, self._espaco_acoes\n\n def imprime_acao(self, acoes):\n for cont, acao in enumerate(acoes):\n print(\"\\t[{0}] {1}\".format(cont, acao))\n\n def emulador(self, acao):\n if self._valor_estado == 2 and acao == 0: # ok\n return self._estados_texto['estado_6'], self._acao_textos['estado_6'], self._acao_dimensoes['estado_6'], \\\n self._estados_reforcos['estado_6'], self._estados_finalizado['estado_1']\n elif self._valor_estado == 2 and acao == 1: # ok\n return self._estados_texto['estado_9'], self._acao_textos['estado_9'], self._acao_dimensoes['estado_9'], \\\n self._estados_reforcos['estado_9'], self._estados_finalizado['estado_1']\n elif self._valor_estado in [1, 3, 4] and acao == 0:\n return self._estados_texto['estado_2'], self._acao_textos['estado_2'], self._acao_dimensoes['estado_2'], \\\n self._estados_reforcos['estado_2'], self._estados_finalizado['estado_1']\n elif self._valor_estado == 3 and acao == 1:\n return self._estados_texto['estado_5'], self._acao_textos['estado_5'], self._acao_dimensoes['estado_5'], \\\n self._estados_reforcos['estado_5'], self._estados_finalizado['estado_1']\n elif self._valor_estado == 2 and acao == 2: # ok\n return self._estados_texto['estado_4'], self._acao_textos['estado_4'], self._acao_dimensoes['estado_4'], \\\n self._estados_reforcos['estado_4'], self._estados_finalizado['estado_1']\n elif self._valor_estado == 5 and acao == 1:\n return self._estados_texto['estado_9'], self._acao_textos['estado_9'], self._acao_dimensoes['estado_9'], \\\n self._estados_reforcos['estado_9'], self._estados_finalizado['estado_1']\n elif self._valor_estado == 6 and acao == 1:\n return self._estados_texto['estado_7'], self._acao_textos['estado_7'], self._acao_dimensoes['estado_7'], \\\n self._estados_reforcos['estado_7'], self._estados_finalizado['estado_1']\n elif self._valor_estado in [7, 8] and acao == 0:\n return self._estados_texto['estado_6'], self._acao_textos['estado_6'], self._acao_dimensoes['estado_6'], \\\n self._estados_reforcos['estado_6'], self._estados_finalizado['estado_1']\n elif self._valor_estado == 6 and acao == 2:\n return self._estados_texto['estado_8'], self._acao_textos['estado_7'], self._acao_dimensoes['estado_7'], \\\n self._estados_reforcos['estado_7'], self._estados_finalizado['estado_1']\n elif self._valor_estado == 9 and acao == 1:\n return self._estados_texto['estado_10'], self._acao_textos['estado_10'], self._acao_dimensoes['estado_10'], \\\n self._estados_reforcos['estado_10'], self._estados_finalizado['estado_1']\n elif self._valor_estado in [6, 10, 11] and acao == 0:\n return self._estados_texto['estado_9'], self._acao_textos['estado_9'], self._acao_dimensoes['estado_9'], \\\n self._estados_reforcos['estado_9'], self._estados_finalizado['estado_1']\n elif self._valor_estado == 9 and acao == 2:\n return self._estados_texto['estado_11'], self._acao_textos['estado_10'], self._acao_dimensoes['estado_10'], \\\n self._estados_reforcos['estado_10'], self._estados_finalizado['estado_1']\n elif self._valor_estado in [5, 9, 13] and acao == 0:\n return self._estados_texto['estado_12'], self._acao_textos['estado_12'], self._acao_dimensoes['estado_12'], \\\n self._estados_reforcos['estado_12'], self._estados_finalizado['estado_1']\n elif self._valor_estado == 12 and acao == 0:\n return self._estados_texto['estado_13'], self._acao_textos['estado_13'], self._acao_dimensoes['estado_13'], \\\n self._estados_reforcos['estado_13'], self._estados_finalizado['estado_1']\n elif self._valor_estado == 12 and acao == 1:\n return self._estados_texto['estado_final'], self._acao_textos['estado_final'], self._acao_dimensoes[\n 'estado_final'], self._estados_reforcos['estado_final'], self._estados_finalizado['estado_final']\n elif self._valor_estado == 9 and acao == 3:\n return self._estados_texto['estado_6'], self._acao_textos['estado_6'], self._acao_dimensoes['estado_6'], \\\n self._estados_reforcos['estado_6'], self._estados_finalizado['estado_1']\n elif self._valor_estado == 6 and acao == 3:\n return self._estados_texto['estado_2'], self._acao_textos['estado_2'], self._acao_dimensoes['estado_2'], \\\n self._estados_reforcos['estado_2'], self._estados_finalizado['estado_1']\n\n",
"step-ids": [
18,
21,
23,
25,
28
]
}
|
[
18,
21,
23,
25,
28
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
print('Content-Type:text/html\n\n')
print(
"""
<html>
<head>
<link href="iconTech.png" rel="icon"/>
<meta name="viewport" content="width=device-width,intial-scale=1.0"/>
<link href="../css/bootstrap.min.css" rel="stylesheet" type="text/css"/>
<link href="../css/bootstrap-theme.min.css" rel="stylesheet" type="text/css"/>
<link rel="stylesheet" href="../css/font-awesome.min.css" type="text/css"/>
<script src="../js/jquery.js"></script>
<script src="../js/bootstrap.min.js"></script>
<style>
.outer
{
min-height:100px;
}
.top
{
min-height:50px;
background:gray;
}
.logo
{
height:50px;
width:240px;
margin:5px 5px;
background:white;
font-size:30px;
font-family:Algerian;
border:5px double green;
}
.menu
{
height:50px;
width:1000px;
background:gray;
z-index:10;
}
#menu
{
background:none;
border:none;
box-shadow:none;
padding:1% 0%;
margin:0px;
font-size:15px;
}
#menu ul li a
{
color:white;
text-shadow:none;
font-weight:bold;
font-size:12px;
}
#menu ul li:hover
{
background:transparent;
}
.head
{
height:100px;
background:url('../bimg/d1.jpg');
background-attachment:fixed;
background-size:100% 100%;
}
.head1
{
height:100px;
background-color:rgba(0,0,0,.4);
color:white;
font-size:20px;
padding:2% 0%;
}
.addcake
{
min-height:550px;
margin-left:25%;
background:rgba(0,0,0,.3);
margin-top:20px;
margin-bottom:20px;
}
.footer
{
min-height:50px;
padding:1% 0%;
text-align:center;
color:white;
font-size:20px;
background:black;
}
</style>
</head>
<body>
<div class="col-sm-12 outer">
<div class="row">
<div class="col-sm-12 top">
<div class="row">
<div class="col-sm-3 logo">Bake<span style="color:orange;">-o-</span>logy</div>
<div class="col-sm-9 menu"> <nav class="navbar navbar-default" id="menu">
<div class="container-fluid">
<!-- Brand and toggle get grouped for better mobile display -->
<div class="navbar-header">
<button type="button" class="navbar-toggle collapsed" data-toggle="collapse" data-target="#bs-example-navbar-collapse-1" aria-expanded="false">
<span class="sr-only clpbtn">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
</div>
<!-- Collect the nav links, forms, and other content for toggling -->
<div class="collapse navbar-collapse" id="bs-example-navbar-collapse-1" >
<ul class="nav navbar-nav navbar-right">
<li><a href="index.py">Dashboard</a></li>
<li><a href="Addmenu.py">Add Menu</a></li>
<li><a href="Addservices.py">Add Services</a></li>
<li><a href="Addimages.py">Add Images</a></li>
<li><a href="OrderManagement.py">Order Management</a></li>
<li><a href="ContactManagement.py">Contact Management</a></li>
<li><a href="Changepassword.py">Change Password</a></li>
<li><a href="LogOut.py">LogOut</a></li>
</li>
</ul>
</div><!-- /.navbar-collapse -->
</div><!-- /.container-fluid -->
</nav>
</div></div></div>
<div class="col-sm-12 main">
<div class="row">
<div class="col-sm-12 head">
<div class="row">
<div class="col-sm-12 head1">
<div class="text-center"><span class="fa fa-cutlery "></span> Add Cake Menu </div>
</div>
</div></div>
</div></div>
<div class="col-sm-6 addcake">
<div class="h2 text-center">Add Cakes Menu</div>
<form action="../code/cakecode.py" enctype="multipart/form-data" method="post">
<div class="h4">Cake Name</div>
<input type="text" placeholder="Input Your Cake Name" name="cake" class="form-control">
<div class="h4">Cake Size</div>
<input type="text" placeholder="Input Your Cake size" name="size" class="form-control">
<div class="h4">Cake Weight</div>
<input type="text" placeholder="Input Your Cake Flavour" name="flavour" class="form-control">
<div class="h4">Price</div>
<input type="text" placeholder="Input Your Cake Weight" name="weight" class="form-control">
<div class="h4">Cake Flavour</div>
<input type="text" placeholder="Input Your Cake Price" name="price" class="form-control">
<div class="h4">Cake Image</div>
<input type="file" placeholder="Import Your Cake image" name="pic" class="form-control"><br/>
<input type="submit" class="form-control" value="Add">
</div>
<div class="col-sm-12 footer">
<div class="col-sm-6">©copyright:<a target="_blank" href="https://www.techpile.in">Techpile Technology.pvt.Ltd.</a>
</div>
<div class="col-sm-6">
Developed By:-Yash Rastogi</div>
</div>
</div>
</div>
</body>
</html>
"""
)
<|reserved_special_token_1|>
#!C:\Python27\python
print('Content-Type:text/html\n\n')
print ("""
<html>
<head>
<link href="iconTech.png" rel="icon"/>
<meta name="viewport" content="width=device-width,intial-scale=1.0"/>
<link href="../css/bootstrap.min.css" rel="stylesheet" type="text/css"/>
<link href="../css/bootstrap-theme.min.css" rel="stylesheet" type="text/css"/>
<link rel="stylesheet" href="../css/font-awesome.min.css" type="text/css"/>
<script src="../js/jquery.js"></script>
<script src="../js/bootstrap.min.js"></script>
<style>
.outer
{
min-height:100px;
}
.top
{
min-height:50px;
background:gray;
}
.logo
{
height:50px;
width:240px;
margin:5px 5px;
background:white;
font-size:30px;
font-family:Algerian;
border:5px double green;
}
.menu
{
height:50px;
width:1000px;
background:gray;
z-index:10;
}
#menu
{
background:none;
border:none;
box-shadow:none;
padding:1% 0%;
margin:0px;
font-size:15px;
}
#menu ul li a
{
color:white;
text-shadow:none;
font-weight:bold;
font-size:12px;
}
#menu ul li:hover
{
background:transparent;
}
.head
{
height:100px;
background:url('../bimg/d1.jpg');
background-attachment:fixed;
background-size:100% 100%;
}
.head1
{
height:100px;
background-color:rgba(0,0,0,.4);
color:white;
font-size:20px;
padding:2% 0%;
}
.addcake
{
min-height:550px;
margin-left:25%;
background:rgba(0,0,0,.3);
margin-top:20px;
margin-bottom:20px;
}
.footer
{
min-height:50px;
padding:1% 0%;
text-align:center;
color:white;
font-size:20px;
background:black;
}
</style>
</head>
<body>
<div class="col-sm-12 outer">
<div class="row">
<div class="col-sm-12 top">
<div class="row">
<div class="col-sm-3 logo">Bake<span style="color:orange;">-o-</span>logy</div>
<div class="col-sm-9 menu"> <nav class="navbar navbar-default" id="menu">
<div class="container-fluid">
<!-- Brand and toggle get grouped for better mobile display -->
<div class="navbar-header">
<button type="button" class="navbar-toggle collapsed" data-toggle="collapse" data-target="#bs-example-navbar-collapse-1" aria-expanded="false">
<span class="sr-only clpbtn">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
</div>
<!-- Collect the nav links, forms, and other content for toggling -->
<div class="collapse navbar-collapse" id="bs-example-navbar-collapse-1" >
<ul class="nav navbar-nav navbar-right">
<li><a href="index.py">Dashboard</a></li>
<li><a href="Addmenu.py">Add Menu</a></li>
<li><a href="Addservices.py">Add Services</a></li>
<li><a href="Addimages.py">Add Images</a></li>
<li><a href="OrderManagement.py">Order Management</a></li>
<li><a href="ContactManagement.py">Contact Management</a></li>
<li><a href="Changepassword.py">Change Password</a></li>
<li><a href="LogOut.py">LogOut</a></li>
</li>
</ul>
</div><!-- /.navbar-collapse -->
</div><!-- /.container-fluid -->
</nav>
</div></div></div>
<div class="col-sm-12 main">
<div class="row">
<div class="col-sm-12 head">
<div class="row">
<div class="col-sm-12 head1">
<div class="text-center"><span class="fa fa-cutlery "></span> Add Cake Menu </div>
</div>
</div></div>
</div></div>
<div class="col-sm-6 addcake">
<div class="h2 text-center">Add Cakes Menu</div>
<form action="../code/cakecode.py" enctype="multipart/form-data" method="post">
<div class="h4">Cake Name</div>
<input type="text" placeholder="Input Your Cake Name" name="cake" class="form-control">
<div class="h4">Cake Size</div>
<input type="text" placeholder="Input Your Cake size" name="size" class="form-control">
<div class="h4">Cake Weight</div>
<input type="text" placeholder="Input Your Cake Flavour" name="flavour" class="form-control">
<div class="h4">Price</div>
<input type="text" placeholder="Input Your Cake Weight" name="weight" class="form-control">
<div class="h4">Cake Flavour</div>
<input type="text" placeholder="Input Your Cake Price" name="price" class="form-control">
<div class="h4">Cake Image</div>
<input type="file" placeholder="Import Your Cake image" name="pic" class="form-control"><br/>
<input type="submit" class="form-control" value="Add">
</div>
<div class="col-sm-12 footer">
<div class="col-sm-6">©copyright:<a target="_blank" href="https://www.techpile.in">Techpile Technology.pvt.Ltd.</a>
</div>
<div class="col-sm-6">
Developed By:-Yash Rastogi</div>
</div>
</div>
</div>
</body>
</html>
""")
|
flexible
|
{
"blob_id": "968cfcfe9d31adcd3a67a88a66e5ebe7b719be8d",
"index": 2841,
"step-1": "<mask token>\n",
"step-2": "print('Content-Type:text/html\\n\\n')\nprint(\n \"\"\"\n<html>\n<head>\n<link href=\"iconTech.png\" rel=\"icon\"/>\n<meta name=\"viewport\" content=\"width=device-width,intial-scale=1.0\"/>\n<link href=\"../css/bootstrap.min.css\" rel=\"stylesheet\" type=\"text/css\"/>\n<link href=\"../css/bootstrap-theme.min.css\" rel=\"stylesheet\" type=\"text/css\"/>\n<link rel=\"stylesheet\" href=\"../css/font-awesome.min.css\" type=\"text/css\"/>\n<script src=\"../js/jquery.js\"></script>\n<script src=\"../js/bootstrap.min.js\"></script>\n<style>\n.outer\n{\nmin-height:100px;\n}\n.top\n{\n min-height:50px;\n background:gray;\n}\n.logo\n{\n height:50px;\n width:240px;\n margin:5px 5px;\n background:white;\n font-size:30px;\n font-family:Algerian;\n border:5px double green;\n}\n.menu\n{\n height:50px;\n width:1000px;\n background:gray;\n z-index:10;\n}\n#menu\n{\n background:none;\n border:none;\n box-shadow:none;\n padding:1% 0%;\n margin:0px;\n font-size:15px;\n}\n#menu ul li a\n{\n color:white;\n text-shadow:none;\n font-weight:bold;\n font-size:12px;\n}\n#menu ul li:hover\n{\n background:transparent; \n}\n.head\n{\n height:100px;\n background:url('../bimg/d1.jpg');\n background-attachment:fixed;\n background-size:100% 100%;\n}\n.head1\n{\n height:100px;\n background-color:rgba(0,0,0,.4);\n color:white;\n font-size:20px;\n padding:2% 0%;\n}\n.addcake\n{\nmin-height:550px;\nmargin-left:25%;\nbackground:rgba(0,0,0,.3);\nmargin-top:20px;\nmargin-bottom:20px;\n}\n\n.footer\n{\n min-height:50px;\n padding:1% 0%;\n text-align:center;\n color:white;\n font-size:20px;\n background:black;\n}\n</style>\n</head>\n<body>\n<div class=\"col-sm-12 outer\">\n\t\t\t<div class=\"row\">\n\t\t\t\t<div class=\"col-sm-12 top\">\n\t\t\t\t<div class=\"row\">\n\t\t\t\t\t<div class=\"col-sm-3 logo\">Bake<span style=\"color:orange;\">-o-</span>logy</div>\n\t\t\t\t\t<div class=\"col-sm-9 menu\">\t\t\t\t\t<nav class=\"navbar navbar-default\" id=\"menu\">\n <div class=\"container-fluid\">\n <!-- Brand and toggle get grouped for better mobile display -->\n <div class=\"navbar-header\">\n <button type=\"button\" class=\"navbar-toggle collapsed\" data-toggle=\"collapse\" data-target=\"#bs-example-navbar-collapse-1\" aria-expanded=\"false\">\n <span class=\"sr-only clpbtn\">Toggle navigation</span>\n <span class=\"icon-bar\"></span>\n <span class=\"icon-bar\"></span>\n <span class=\"icon-bar\"></span>\n </button>\n </div>\n<!-- Collect the nav links, forms, and other content for toggling -->\n <div class=\"collapse navbar-collapse\" id=\"bs-example-navbar-collapse-1\" >\n <ul class=\"nav navbar-nav navbar-right\">\n <li><a href=\"index.py\">Dashboard</a></li>\n <li><a href=\"Addmenu.py\">Add Menu</a></li>\n\t\t<li><a href=\"Addservices.py\">Add Services</a></li>\n\t\t<li><a href=\"Addimages.py\">Add Images</a></li>\n\t\t<li><a href=\"OrderManagement.py\">Order Management</a></li>\n\t\t<li><a href=\"ContactManagement.py\">Contact Management</a></li>\n\t\t<li><a href=\"Changepassword.py\">Change Password</a></li>\n\t\t<li><a href=\"LogOut.py\">LogOut</a></li>\n\t\t</li>\n \n\t\t </ul>\n\t\t\t \n </div><!-- /.navbar-collapse -->\n </div><!-- /.container-fluid -->\n</nav>\n \t\t\t\t\t\n\t\t </div></div></div>\n\t\t\t\t<div class=\"col-sm-12 main\">\n\t\t\t\t<div class=\"row\">\n\t\t\t\t<div class=\"col-sm-12 head\">\n\t\t\t\t<div class=\"row\">\n\t\t\t\t<div class=\"col-sm-12 head1\">\n\t\t\t\t<div class=\"text-center\"><span class=\"fa fa-cutlery \"></span> Add Cake Menu </div>\n\t\t\t\t</div>\n\t\t\t\t</div></div>\n\t\t\t\t</div></div>\n\t\t\t\t<div class=\"col-sm-6 addcake\">\n\t\t\t\t<div class=\"h2 text-center\">Add Cakes Menu</div>\n\t\t\t\t<form action=\"../code/cakecode.py\" enctype=\"multipart/form-data\" method=\"post\">\n\t\t\t\t<div class=\"h4\">Cake Name</div>\n\t\t\t\t<input type=\"text\" placeholder=\"Input Your Cake Name\" name=\"cake\" class=\"form-control\">\n\t\t\t\t<div class=\"h4\">Cake Size</div>\n\t\t\t\t<input type=\"text\" placeholder=\"Input Your Cake size\" name=\"size\" class=\"form-control\">\n <div class=\"h4\">Cake Weight</div>\n\t\t\t\t<input type=\"text\" placeholder=\"Input Your Cake Flavour\" name=\"flavour\" class=\"form-control\">\n <div class=\"h4\">Price</div>\n\t\t\t\t <input type=\"text\" placeholder=\"Input Your Cake Weight\" name=\"weight\" class=\"form-control\">\n <div class=\"h4\">Cake Flavour</div>\n\t\t\t\t<input type=\"text\" placeholder=\"Input Your Cake Price\" name=\"price\" class=\"form-control\">\t\n <div class=\"h4\">Cake Image</div>\n\t\t\t\t<input type=\"file\" placeholder=\"Import Your Cake image\" name=\"pic\" class=\"form-control\"><br/>\n <input type=\"submit\" class=\"form-control\" value=\"Add\">\n\t\t\t\t</div>\n\t\t\t\t<div class=\"col-sm-12 footer\">\n\t\t\t\t<div class=\"col-sm-6\">©copyright:<a target=\"_blank\" href=\"https://www.techpile.in\">Techpile Technology.pvt.Ltd.</a>\n\t\t\t\t</div>\n\t\t\t\t\n\t\t\t\t<div class=\"col-sm-6\">\t\n Developed By:-Yash Rastogi</div>\n\t\t\t\t</div>\n\t\t\t\t\n\t\t\t</div>\n\t\t\t</div>\n\n\n</body>\n</html>\n\n\"\"\"\n )\n",
"step-3": "#!C:\\Python27\\python\r\nprint('Content-Type:text/html\\n\\n')\r\nprint (\"\"\"\r\n<html>\r\n<head>\r\n<link href=\"iconTech.png\" rel=\"icon\"/>\r\n<meta name=\"viewport\" content=\"width=device-width,intial-scale=1.0\"/>\r\n<link href=\"../css/bootstrap.min.css\" rel=\"stylesheet\" type=\"text/css\"/>\r\n<link href=\"../css/bootstrap-theme.min.css\" rel=\"stylesheet\" type=\"text/css\"/>\r\n<link rel=\"stylesheet\" href=\"../css/font-awesome.min.css\" type=\"text/css\"/>\r\n<script src=\"../js/jquery.js\"></script>\r\n<script src=\"../js/bootstrap.min.js\"></script>\r\n<style>\r\n.outer\r\n{\r\nmin-height:100px;\r\n}\r\n.top\r\n{\r\n min-height:50px;\r\n background:gray;\r\n}\r\n.logo\r\n{\r\n height:50px;\r\n width:240px;\r\n margin:5px 5px;\r\n background:white;\r\n font-size:30px;\r\n font-family:Algerian;\r\n border:5px double green;\r\n}\r\n.menu\r\n{\r\n height:50px;\r\n width:1000px;\r\n background:gray;\r\n z-index:10;\r\n}\r\n#menu\r\n{\r\n background:none;\r\n border:none;\r\n box-shadow:none;\r\n padding:1% 0%;\r\n margin:0px;\r\n font-size:15px;\r\n}\r\n#menu ul li a\r\n{\r\n color:white;\r\n text-shadow:none;\r\n font-weight:bold;\r\n font-size:12px;\r\n}\r\n#menu ul li:hover\r\n{\r\n background:transparent; \r\n}\r\n.head\r\n{\r\n height:100px;\r\n background:url('../bimg/d1.jpg');\r\n background-attachment:fixed;\r\n background-size:100% 100%;\r\n}\r\n.head1\r\n{\r\n height:100px;\r\n background-color:rgba(0,0,0,.4);\r\n color:white;\r\n font-size:20px;\r\n padding:2% 0%;\r\n}\r\n.addcake\r\n{\r\nmin-height:550px;\r\nmargin-left:25%;\r\nbackground:rgba(0,0,0,.3);\r\nmargin-top:20px;\r\nmargin-bottom:20px;\r\n}\r\n\r\n.footer\r\n{\r\n min-height:50px;\r\n padding:1% 0%;\r\n text-align:center;\r\n color:white;\r\n font-size:20px;\r\n background:black;\r\n}\r\n</style>\r\n</head>\r\n<body>\r\n<div class=\"col-sm-12 outer\">\r\n\t\t\t<div class=\"row\">\r\n\t\t\t\t<div class=\"col-sm-12 top\">\r\n\t\t\t\t<div class=\"row\">\r\n\t\t\t\t\t<div class=\"col-sm-3 logo\">Bake<span style=\"color:orange;\">-o-</span>logy</div>\r\n\t\t\t\t\t<div class=\"col-sm-9 menu\">\t\t\t\t\t<nav class=\"navbar navbar-default\" id=\"menu\">\r\n <div class=\"container-fluid\">\r\n <!-- Brand and toggle get grouped for better mobile display -->\r\n <div class=\"navbar-header\">\r\n <button type=\"button\" class=\"navbar-toggle collapsed\" data-toggle=\"collapse\" data-target=\"#bs-example-navbar-collapse-1\" aria-expanded=\"false\">\r\n <span class=\"sr-only clpbtn\">Toggle navigation</span>\r\n <span class=\"icon-bar\"></span>\r\n <span class=\"icon-bar\"></span>\r\n <span class=\"icon-bar\"></span>\r\n </button>\r\n </div>\r\n<!-- Collect the nav links, forms, and other content for toggling -->\r\n <div class=\"collapse navbar-collapse\" id=\"bs-example-navbar-collapse-1\" >\r\n <ul class=\"nav navbar-nav navbar-right\">\r\n <li><a href=\"index.py\">Dashboard</a></li>\r\n <li><a href=\"Addmenu.py\">Add Menu</a></li>\r\n\t\t<li><a href=\"Addservices.py\">Add Services</a></li>\r\n\t\t<li><a href=\"Addimages.py\">Add Images</a></li>\r\n\t\t<li><a href=\"OrderManagement.py\">Order Management</a></li>\r\n\t\t<li><a href=\"ContactManagement.py\">Contact Management</a></li>\r\n\t\t<li><a href=\"Changepassword.py\">Change Password</a></li>\r\n\t\t<li><a href=\"LogOut.py\">LogOut</a></li>\r\n\t\t</li>\r\n \r\n\t\t </ul>\r\n\t\t\t \r\n </div><!-- /.navbar-collapse -->\r\n </div><!-- /.container-fluid -->\r\n</nav>\r\n \t\t\t\t\t\r\n\t\t </div></div></div>\r\n\t\t\t\t<div class=\"col-sm-12 main\">\r\n\t\t\t\t<div class=\"row\">\r\n\t\t\t\t<div class=\"col-sm-12 head\">\r\n\t\t\t\t<div class=\"row\">\r\n\t\t\t\t<div class=\"col-sm-12 head1\">\r\n\t\t\t\t<div class=\"text-center\"><span class=\"fa fa-cutlery \"></span> Add Cake Menu </div>\r\n\t\t\t\t</div>\r\n\t\t\t\t</div></div>\r\n\t\t\t\t</div></div>\r\n\t\t\t\t<div class=\"col-sm-6 addcake\">\r\n\t\t\t\t<div class=\"h2 text-center\">Add Cakes Menu</div>\r\n\t\t\t\t<form action=\"../code/cakecode.py\" enctype=\"multipart/form-data\" method=\"post\">\r\n\t\t\t\t<div class=\"h4\">Cake Name</div>\r\n\t\t\t\t<input type=\"text\" placeholder=\"Input Your Cake Name\" name=\"cake\" class=\"form-control\">\r\n\t\t\t\t<div class=\"h4\">Cake Size</div>\r\n\t\t\t\t<input type=\"text\" placeholder=\"Input Your Cake size\" name=\"size\" class=\"form-control\">\r\n <div class=\"h4\">Cake Weight</div>\r\n\t\t\t\t<input type=\"text\" placeholder=\"Input Your Cake Flavour\" name=\"flavour\" class=\"form-control\">\r\n <div class=\"h4\">Price</div>\r\n\t\t\t\t <input type=\"text\" placeholder=\"Input Your Cake Weight\" name=\"weight\" class=\"form-control\">\r\n <div class=\"h4\">Cake Flavour</div>\r\n\t\t\t\t<input type=\"text\" placeholder=\"Input Your Cake Price\" name=\"price\" class=\"form-control\">\t\r\n <div class=\"h4\">Cake Image</div>\r\n\t\t\t\t<input type=\"file\" placeholder=\"Import Your Cake image\" name=\"pic\" class=\"form-control\"><br/>\r\n <input type=\"submit\" class=\"form-control\" value=\"Add\">\r\n\t\t\t\t</div>\r\n\t\t\t\t<div class=\"col-sm-12 footer\">\r\n\t\t\t\t<div class=\"col-sm-6\">©copyright:<a target=\"_blank\" href=\"https://www.techpile.in\">Techpile Technology.pvt.Ltd.</a>\r\n\t\t\t\t</div>\r\n\t\t\t\t\r\n\t\t\t\t<div class=\"col-sm-6\">\t\r\n Developed By:-Yash Rastogi</div>\r\n\t\t\t\t</div>\r\n\t\t\t\t\r\n\t\t\t</div>\r\n\t\t\t</div>\r\n\r\n\r\n</body>\r\n</html>\r\n\r\n\"\"\")",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Carafe(object):
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Carafe(object):
def __init__(self):
self.level = CarafeLevel()
self.temp = CarafeTemp()
<|reserved_special_token_1|>
from barista.sensor import CarafeLevel, CarafeTemp
class Carafe(object):
def __init__(self):
self.level = CarafeLevel()
self.temp = CarafeTemp()
<|reserved_special_token_1|>
from barista.sensor import CarafeLevel, CarafeTemp
class Carafe(object):
def __init__(self):
self.level = CarafeLevel()
self.temp = CarafeTemp()
# TODO add callback for when the temperature or level are too low.
|
flexible
|
{
"blob_id": "a0cce8d48f929dd63ba809a1e9bf02b172e8bc1b",
"index": 2192,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Carafe(object):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Carafe(object):\n\n def __init__(self):\n self.level = CarafeLevel()\n self.temp = CarafeTemp()\n",
"step-4": "from barista.sensor import CarafeLevel, CarafeTemp\n\n\nclass Carafe(object):\n\n def __init__(self):\n self.level = CarafeLevel()\n self.temp = CarafeTemp()\n",
"step-5": "from barista.sensor import CarafeLevel, CarafeTemp\n\n\nclass Carafe(object):\n def __init__(self):\n self.level = CarafeLevel()\n self.temp = CarafeTemp()\n\n # TODO add callback for when the temperature or level are too low.",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#--------------------------------------------------------
# File------------project2.py
# Developer-------Paige Weber
# Course----------CS1213-03
# Project---------Project #1
# Due-------------September 26, 2017
#
# This program uses Gregory-Leibniz series to compute
# an approximate value of pi.
#--------------------------------------------------------
number_of_terms = int(input("How many terms? "))
number_of_terms = number_of_terms + 1
if number_of_terms >= 1:
add_approximation = 0
for count in range (1, number_of_terms):
approximation = (((-1)**(count + 1))/(2 * count - 1))
add_approximation = approximation + add_approximation
solution = add_approximation * 4
print("Approxiation of pi: %1.5f"%solution)
else:
print("The number of terms must be greater than zero.")
|
normal
|
{
"blob_id": "466148395a4141793b5f92c84513fd093876db76",
"index": 9964,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif number_of_terms >= 1:\n add_approximation = 0\n for count in range(1, number_of_terms):\n approximation = (-1) ** (count + 1) / (2 * count - 1)\n add_approximation = approximation + add_approximation\n solution = add_approximation * 4\n print('Approxiation of pi: %1.5f' % solution)\nelse:\n print('The number of terms must be greater than zero.')\n",
"step-3": "number_of_terms = int(input('How many terms? '))\nnumber_of_terms = number_of_terms + 1\nif number_of_terms >= 1:\n add_approximation = 0\n for count in range(1, number_of_terms):\n approximation = (-1) ** (count + 1) / (2 * count - 1)\n add_approximation = approximation + add_approximation\n solution = add_approximation * 4\n print('Approxiation of pi: %1.5f' % solution)\nelse:\n print('The number of terms must be greater than zero.')\n",
"step-4": "#--------------------------------------------------------\n# File------------project2.py\n# Developer-------Paige Weber\n# Course----------CS1213-03\n# Project---------Project #1\n# Due-------------September 26, 2017\n#\n# This program uses Gregory-Leibniz series to compute\n# an approximate value of pi.\n#--------------------------------------------------------\nnumber_of_terms = int(input(\"How many terms? \"))\nnumber_of_terms = number_of_terms + 1\nif number_of_terms >= 1:\n\n add_approximation = 0\n\n for count in range (1, number_of_terms):\n approximation = (((-1)**(count + 1))/(2 * count - 1))\n add_approximation = approximation + add_approximation\n solution = add_approximation * 4\n\n print(\"Approxiation of pi: %1.5f\"%solution)\n\nelse:\n print(\"The number of terms must be greater than zero.\")\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
sys.path.append(PROJ_DIR)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
PROJ_DIR = os.path.dirname(os.path.dirname(__file__))
sys.path.append(PROJ_DIR)
<|reserved_special_token_1|>
import sys
import os
PROJ_DIR = os.path.dirname(os.path.dirname(__file__))
sys.path.append(PROJ_DIR)
|
flexible
|
{
"blob_id": "54276074d84e63e6418f8738bb7f910424f1c94d",
"index": 9469,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nsys.path.append(PROJ_DIR)\n",
"step-3": "<mask token>\nPROJ_DIR = os.path.dirname(os.path.dirname(__file__))\nsys.path.append(PROJ_DIR)\n",
"step-4": "import sys\nimport os\nPROJ_DIR = os.path.dirname(os.path.dirname(__file__))\nsys.path.append(PROJ_DIR)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import imp
from django.shortcuts import render
# ***************** API ****************
from django.views.decorators.csrf import csrf_exempt
from rest_framework.parsers import JSONParser,FileUploadParser,MultiPartParser,FormParser
from .models import *
from django.http import Http404
from .serializers import *
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status,viewsets,permissions
from rest_framework import generics
from rest_framework.permissions import AllowAny, IsAuthenticated
from django.contrib.auth import get_user_model
from client.models import ClientModel
from adminapp.models import SchoolModel
from adminapp.serializers import SchoolSerializer
from .custompermissions import *
from client.permissions import *
from rest_framework.authentication import SessionAuthentication
from Student.permissions import IsStudent
User = get_user_model()
def get_user_from_token(request):
token = request.user.auth_token #auth key(token) of current user 91391f4c12b94b753d08008150d2315d9d8d7e1e
print("token.user_id",token.user_id) #gives id of user (pk) 2
user = User.objects.get(id=token.user_id) #gives user name
return user
# Create your views here.
# class UserListView(generics.ListAPIView):
# parser_classes = (MultiPartParser,FormParser)
# queryset = UserModel.objects.all()
# serializer_class = UserSerializer
# class UserDetailView(generics.RetrieveAPIView):
# parser_classes = (MultiPartParser,FormParser)
# queryset = UserModel.objects.all()
# serializer_class = UserSerializer
class AddArticleView(generics.CreateAPIView):
#All authenticated users can add articles
permission_classes = (IsAuthenticated, )
serializer_class = ArticleSerializer
queryset = ArticleModel.objects.all()
def perform_create(self, serializer):
serializer.save(user=self.request.user)
class ListArticleView(generics.ListAPIView):
#Anyone can see the published Articles
permission_classes = (AllowAny, )
serializer_class = ArticleSerializer
queryset = ArticleModel.objects.filter(status__exact="P")
class ArticleDetail(generics.RetrieveAPIView):
#anyone can see detail of published article
lookup_field = 'slug'
permission_classes = (AllowAny, )
serializer_class = ArticleSerializer
queryset = ArticleModel.objects.filter(status__exact="P")
class ArticleDeleteUpdate(generics.RetrieveUpdateDestroyAPIView):
'''
Get: superadmin can see all articles (draft, published)
PATCH : superadmin can mark article as published by changing status = P
Delete: superadmin can delete article.
'''
lookup_field = 'slug'
permission_classes = (IsSuperUser, )
serializer_class = UpdateArticleSerializer
queryset = ArticleModel.objects.all()
class AddQuestions(generics.CreateAPIView):
permission_classes = (IsSuperUser, )
serializer_class = QuestionSerializer
queryset = QuestionModel.objects.all()
class ViewQuestion(generics.ListAPIView):
permission_classes = (IsClient, )
serializer_class = QuestionSerializer
queryset = QuestionModel.objects.all()
class QuestionDetailView(generics.RetrieveAPIView):
lookup_field = 'slug'
permission_classes = (IsClient, )
serializer_class = QuestionSerializer
queryset = QuestionModel.objects.all()
class QuestionDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):
lookup_field = 'slug'
permission_classes = (IsSuperUser, )
serializer_class = QuestionSerializer
queryset = QuestionModel.objects.all()
class AddSchools(generics.CreateAPIView):
permission_classes = (IsSuperUser, )
serializer_class = SchoolSerializer
queryset = SchoolModel.objects.all()
class ViewSchool(generics.ListAPIView):
permission_classes = (IsClient, )
serializer_class = SchoolSerializer
queryset = SchoolModel.objects.all()
class SchoolDetailView(generics.RetrieveAPIView):
lookup_field = 'slug'
permission_classes = (IsClient, )
serializer_class = SchoolSerializer
queryset = SchoolModel.objects.all()
class SchoolDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):
lookup_field = 'slug'
permission_classes = (IsSuperUser, )
serializer_class = SchoolSerializer
queryset = SchoolModel.objects.all()
class AddBlogs(generics.CreateAPIView):
permission_classes = (IsSuperUser, )
serializer_class = BlogSerializer
queryset = BlogModel.objects.all()
class ViewBlog(generics.ListAPIView):
permission_classes = (IsClient, )
serializer_class = BlogSerializer
queryset = BlogModel.objects.all()
class BlogDetailView(generics.RetrieveAPIView):
lookup_field = 'slug'
permission_classes = (IsClient, )
serializer_class = BlogSerializer
queryset = BlogModel.objects.all()
class BlogDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):
lookup_field = 'slug'
permission_classes = (IsSuperUser, )
serializer_class = BlogSerializer
queryset = BlogModel.objects.all()
class AddEventView(generics.CreateAPIView):
#only super user can add events
permission_classes = (IsSuperUser, )
serializer_class = EventSerializer
queryset = EventModel.objects.all()
class ListEventView(generics.ListAPIView):
#Anyone can see the events
permission_classes = (AllowAny, )
serializer_class = EventSerializer
queryset = EventModel.objects.all()
class EventDetailView(generics.RetrieveAPIView):
#Anyone can see the detail of events
lookup_field = 'slug'
permission_classes = (AllowAny, )
serializer_class = EventSerializer
queryset = EventModel.objects.all()
class EventDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):
#only superadmin can delete and update events
lookup_field = 'slug'
permission_classes = (IsSuperUser, )
serializer_class = EventSerializer
queryset = EventModel.objects.all()
class AddBusinessPartners(generics.CreateAPIView):
permission_classes = (IsSuperUser, )
serializer_class = BusinessPartnersSerializer
queryset = BusinessPartnersModel.objects.all()
class ViewBusinessPartner(generics.ListAPIView):
permission_classes = (AllowAny, )
serializer_class = BusinessPartnersSerializer
queryset = BusinessPartnersModel.objects.all()
class BusinessPartnerDetailView(generics.RetrieveAPIView):
lookup_field = 'slug'
permission_classes = (AllowAny, )
serializer_class = BusinessPartnersSerializer
queryset = BusinessPartnersModel.objects.all()
class BusinessPartnerDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):
lookup_field = 'slug'
permission_classes = (IsSuperUser, )
serializer_class = BusinessPartnersSerializer
queryset = BusinessPartnersModel.objects.all()
class AddKidStory(generics.CreateAPIView):
#Students can add kidstory
permission_classes = (IsStudent, )
serializer_class = KidStorySerializer
queryset = KidStoryModel.objects.all()
def perform_create(self, serializer):
serializer.save(user=self.request.user)
class ViewKidStory(generics.ListAPIView):
# anyone can see published kids story
permission_classes = (AllowAny, )
serializer_class = KidStorySerializer
queryset = KidStoryModel.objects.filter(status__exact="P")
class KidStoryDetailView(generics.RetrieveAPIView):
#anyone can see detail of published kids story
lookup_field = 'slug'
permission_classes = (AllowAny, )
serializer_class = KidStorySerializer
queryset = KidStoryModel.objects.filter(status__exact="P")
class KidStoryDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):
'''
Get: superadmin can see all stories (draft, published)
PATCH : superadmin can mark stories as published by changing status = P
Delete: superadmin can delete stories.
'''
lookup_field = 'slug'
permission_classes = (IsSuperUser, )
serializer_class = UpdateKidsStorySerializer
queryset = KidStoryModel.objects.all()
class AddKidTalent(generics.CreateAPIView):
#Students or client can add KidsTalent
permission_classes = (IsStudentORClient, )
serializer_class = KidTalentSerializer
queryset = KidTalentModel.objects.all()
def perform_create(self, serializer):
serializer.save(user=self.request.user)
class ViewKidTalent(generics.ListAPIView):
# anyone can see published kids talent
permission_classes = (AllowAny, )
serializer_class = KidTalentSerializer
queryset = KidTalentModel.objects.filter(status__exact="P")
class KidTalentDetailView(generics.RetrieveAPIView):
#anyone can see detail of published kids talent
lookup_field = 'slug'
permission_classes = (AllowAny, )
serializer_class = KidTalentSerializer
queryset = KidTalentModel.objects.filter(status__exact="P")
class KidTalentDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):
'''
Get: superadmin can see all kids talent (draft, published)
PATCH : superadmin can mark kids talent as published by changing status = P
Delete: superadmin can delete kids talent.
'''
lookup_field = 'slug'
permission_classes = (IsSuperUser, )
serializer_class = UpdateKidsTalentSerializer
queryset = KidTalentModel.objects.all()
class AddCourses(generics.CreateAPIView):
permission_classes = (IsSuperUser, )
serializer_class = CourseSerializer
queryset = CourseModel.objects.all()
class ViewCourse(generics.ListAPIView):
permission_classes = (AllowAny, )
serializer_class = CourseSerializer
queryset = CourseModel.objects.all()
class CourseDetailView(generics.RetrieveAPIView):
lookup_field = 'slug'
permission_classes = (AllowAny, )
serializer_class = CourseSerializer
queryset = CourseModel.objects.all()
class CourseDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):
lookup_field = 'slug'
permission_classes = (IsSuperUser, )
serializer_class = CourseSerializer
queryset = CourseModel.objects.all()
class AddQuizContext(generics.CreateAPIView):
permission_classes = (IsSuperUser, )
serializer_class = QuizContextSerializer
queryset = QuizContextModel.objects.all()
class ViewQuizContext(generics.ListAPIView):
permission_classes = (IsClient, )
serializer_class = QuizContextSerializer
queryset = QuizContextModel.objects.all()
class QuizContextDetailView(generics.RetrieveAPIView):
lookup_field = 'slug'
permission_classes = (IsClient, )
serializer_class = QuizContextSerializer
queryset = QuizContextModel.objects.all()
class QuizContextDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):
lookup_field = 'slug'
permission_classes = (IsSuperUser, )
serializer_class = QuizContextSerializer
queryset = QuizContextModel.objects.all()
class AddFeedback(generics.CreateAPIView):
permission_classes = (IsSuperUser, )
serializer_class = ClientFeedbackSerializer
queryset = ClientFeedBackModel.objects.all()
class ViewFeedback(generics.ListAPIView):
permission_classes = (IsClient, )
serializer_class = ClientFeedbackSerializer
queryset = ClientFeedBackModel.objects.all()
class FeedbackDetailView(generics.RetrieveAPIView):
lookup_field = 'slug'
permission_classes = (IsClient, )
serializer_class = ClientFeedbackSerializer
queryset = ClientFeedBackModel.objects.all()
class FeedbackDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):
lookup_field = 'slug'
permission_classes = (IsSuperUser, )
serializer_class = ClientFeedbackSerializer
queryset = ClientFeedBackModel.objects.all()
class AddWebsiteAd(generics.CreateAPIView):
permission_classes = (IsSuperUser, )
serializer_class = WebsiteAdSerializer
queryset = WebsiteAdModel.objects.all()
class ViewWebsiteAd(generics.ListAPIView):
permission_classes = (AllowAny, )
serializer_class = WebsiteAdSerializer
queryset = WebsiteAdModel.objects.all()
class WebsiteAdDetailView(generics.RetrieveAPIView):
lookup_field = 'slug'
permission_classes = (AllowAny, )
serializer_class = WebsiteAdSerializer
queryset = WebsiteAdModel.objects.all()
class WebsiteAdDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):
lookup_field = 'slug'
permission_classes = (IsSuperUser, )
serializer_class = WebsiteAdSerializer
queryset = WebsiteAdModel.objects.all()
# class AddApproval(generics.CreateAPIView):
# permission_classes = (IsSuperUser, )
# serializer_class = ApprovalSerializer
# queryset = ApprovalModel.objects.all()
# class ViewApproval(generics.ListAPIView):
# permission_classes = (IsClient, )
# serializer_class = ApprovalSerializer
# queryset = ApprovalModel.objects.all()
# class ApprovalDetailView(generics.RetrieveAPIView):
# lookup_field = 'slug'
# permission_classes = (IsClient, )
# serializer_class = ApprovalSerializer
# queryset = ApprovalModel.objects.all()
# class ApprovalDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):
# lookup_field = 'slug'
# permission_classes = (IsSuperUser, )
# serializer_class = ApprovalSerializer
# queryset = ApprovalModel.objects.all()
class AddBusinessPromotion(generics.CreateAPIView):
permission_classes = (IsSuperUser, )
serializer_class = BusinessPromotionSerializer
queryset = BusinessPromotionModel.objects.all()
class ViewBusinessPromotion(generics.ListAPIView):
permission_classes = (AllowAny, )
serializer_class = BusinessPromotionSerializer
queryset = BusinessPromotionModel.objects.all()
class BusinessPromotionDetailView(generics.RetrieveAPIView):
lookup_field = 'slug'
permission_classes = (AllowAny, )
serializer_class = BusinessPromotionSerializer
queryset = BusinessPromotionModel.objects.all()
class BusinessPromotionDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):
lookup_field = 'slug'
permission_classes = (IsSuperUser, )
serializer_class = BusinessPromotionSerializer
queryset = BusinessPromotionModel.objects.all()
class AddTeam(generics.CreateAPIView):
permission_classes = (IsSuperUser, )
serializer_class = TeamSerializer
queryset = TeamModel.objects.all()
class ViewTeam(generics.ListAPIView):
permission_classes = (AllowAny, )
serializer_class = TeamSerializer
queryset = TeamModel.objects.all()
class TeamDetailView(generics.RetrieveAPIView):
lookup_field = 'slug'
permission_classes = (AllowAny, )
serializer_class = TeamSerializer
queryset = TeamModel.objects.all()
class TeamDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):
lookup_field = 'slug'
permission_classes = (IsSuperUser, )
serializer_class = TeamSerializer
queryset = TeamModel.objects.all()
class AddAdvisoryBoard(generics.CreateAPIView):
permission_classes = (IsSuperUser, )
serializer_class = AdvisoryBoardSerializer
queryset = AdvisoryBoardModel.objects.all()
class ViewAdvisoryBoard(generics.ListAPIView):
permission_classes = (IsSuperUser, )
serializer_class = AdvisoryBoardSerializer
queryset = AdvisoryBoardModel.objects.all()
class AdvisoryBoardDetailView(generics.RetrieveAPIView):
lookup_field = 'slug'
permission_classes = (IsSuperUser, )
serializer_class = AdvisoryBoardSerializer
queryset = AdvisoryBoardModel.objects.all()
class AdvisoryBoardDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):
lookup_field = 'slug'
permission_classes = (IsSuperUser, )
serializer_class = AdvisoryBoardSerializer
queryset = AdvisoryBoardModel.objects.all()
class AddAnnouncement(generics.CreateAPIView):
permission_classes = (IsSuperUser, )
serializer_class = AnnouncementSerializer
queryset = AnnouncementModel.objects.all()
class ListAnnouncement(generics.ListAPIView):
permission_classes = (AllowAny, )
serializer_class = AnnouncementSerializer
queryset = AnnouncementModel.objects.all()
class AnnouncementDetail(generics.RetrieveAPIView):
lookup_field = 'slug'
permission_classes = (AllowAny, )
serializer_class = AnnouncementSerializer
queryset = AnnouncementModel.objects.all()
class AnnouncementDeleteUpdate(generics.RetrieveUpdateDestroyAPIView):
lookup_field = 'slug'
permission_classes = (IsSuperUser, )
serializer_class = AnnouncementSerializer
queryset = AnnouncementModel.objects.all()
class SuperadminProfileView(APIView):
permission_classes = (IsSuperUser, )
def get(self, request, *args, **kwargs):
user = get_user_from_token(request)
data = {
'name': user.username,
'email': user.email
}
return Response(data)
class AddJobClassified(generics.CreateAPIView):
permission_classes = (IsSuperUser, )
serializer_class = JobClassifiedSerializer
queryset = JobClassifiedModel.objects.all()
class ViewJobClassified(generics.ListAPIView):
permission_classes = (AllowAny, )
serializer_class = JobClassifiedSerializer
queryset = JobClassifiedModel.objects.all()
class JobClassifiedDetailView(generics.RetrieveAPIView):
lookup_field = 'slug'
permission_classes = (AllowAny, )
serializer_class = JobClassifiedSerializer
queryset = JobClassifiedModel.objects.all()
class JobClassifiedDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):
lookup_field = 'slug'
permission_classes = (IsSuperUser, )
serializer_class = JobClassifiedSerializer
queryset = JobClassifiedModel.objects.all()
class AddCustomerReviews(generics.CreateAPIView):
permission_classes = (IsSuperUser, )
serializer_class = CustomerReviewSerializer
queryset = CustomerReviewModel.objects.all()
class ViewCustomerReview(generics.ListAPIView):
permission_classes = (IsClient, )
serializer_class = CustomerReviewSerializer
queryset = CustomerReviewModel.objects.all()
class CustomerReviewDetailView(generics.RetrieveAPIView):
lookup_field = 'slug'
permission_classes = (IsClient, )
serializer_class = CustomerReviewSerializer
queryset = CustomerReviewModel.objects.all()
class CustomerReviewDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):
lookup_field = 'slug'
permission_classes = (IsSuperUser, )
serializer_class = CustomerReviewSerializer
queryset = CustomerReviewModel.objects.all()
class ClientComplain(APIView):
permission_classes = (IsSuperUser, )
serializer = ViewComplainSerializer(many=True)
class clientfeedback(APIView):
permission_classes = (IsSuperUser, )
def get(self, request, format=None):
feeds = ClientFeedBackModel.objects.filter(
Class__admin = self.request.user
)
serializer = ClientFeedbackSerializer(feeds, many=True)
return Response(serializer.data)
class Enroll_Course(APIView):
permission_classes = (IsSuperUser, )
def post(self, request, format=None):
serializer = EnrollCourseSerializer(data=request.data)
print(serializer)
if serializer.is_valid():
course = serializer.validated_data.get('course', '')
serializer.save()
return Response(serializer.data,status =status.HTTP_201_CREATED)
else:
return Response(serializer.errors,status =status.HTTP_400_BAD_REQUEST)
class ViewEnroll_Course(APIView):
permission_classes = (IsSuperUser, )
def get(self, request, *args, **kwargs):
course = self.kwargs['course_id']
client = self.kwargs['client_id']
data = Enroll_CourseModel.objects.filter(
course = course, client = client
)
serializer = ViewEnrollCourseSerializer(data, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
class DetailEnroll_CourseView(APIView):
permission_classes = (IsSuperUser, )
def get_object(self,pk):
try:
return Enroll_CourseModel.objects.get(id=pk)
except:
raise Http404
def get(self, request, pk, format=None):
data = self.get_object(pk)
serializer = ViewEnrollCourseSerializer(data)
return Response(serializer.data)
def put(self,request,pk,format=None):
data = self.get_object(pk)
serializer = ViewEnrollCourseSerializer(data,data = request.data)
if serializer.is_valid(raise_exception=True):
serializer.save()
return Response(serializer.data,status=status.HTTP_201_CREATED)
else:
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def delete(self,request,pk,format=None):
data = self.get_object(pk)
data.delete()
return Response(status = status.HTTP_204_NO_CONTENT)
class CourseDetail(APIView):
permission_classes = (IsSuperUser, )
def get_object(self, slug):
try:
return CourseModel.objects.get(slug=slug)
except CourseModel.DoesNotExist:
raise Http404
def get(self, request, slug, format=None):
data = self.get_object(slug)
if data.classes.school.admin == self.request.user:
serializer = ViewCourseSerializer(data)
return Response(serializer.data)
else:
return Response(
{'message':'This course does not belong to your school'},
status=status.HTTP_400_BAD_REQUEST
)
def put(self,request,slug,format=None):
data = self.get_object(slug)
if data.course.client.admin == self.request.user:
serializer = CourseSerializer(data,data = request.data)
if serializer.is_valid(raise_exception=True):
course = serializer.validated_data.get('course', '')
if course.client.admin == self.request.user:
serializer.save()
return Response(serializer.data,status=status.HTTP_201_CREATED)
return Response(
{'message':'This Class does not belong to you'},
status=status.HTTP_400_BAD_REQUEST
)
else:
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
else:
return Response(
{'message':'This course does not belong to you'},
status=status.HTTP_400_BAD_REQUEST
)
def delete(self,request,slug,format=None):
data = self.get_object(slug)
if data.course.client.admin == self.request.user:
data.delete()
return Response(status = status.HTTP_204_NO_CONTENT)
else:
return Response(
{'message':'This course does not belong to you'},
status=status.HTTP_400_BAD_REQUEST
)
class SchoolRegistrationView(RegisterView):
serializer_class = RegisterSchoolSerializer
permission_classes = (IsSuperUser,)
class Add_question(generics.CreateAPIView):
permission_classes = (IsSuperUser, )
def post(self,request,format=None):
serializer = QuestionSerializer(data=request.data)
print(serializer)
if serializer.is_valid():
course = serializer.validated_data.get('course', '')
serializer.save()
return Response(serializer.data,status =status.HTTP_201_CREATED)
else:
return Response(serializer.errors,status =status.HTTP_400_BAD_REQUEST)
class Viewquestion(generics.ListAPIView):
permission_classes = (IsSuperUser, )
def get(self, request, *args, **kwargs):
course = self.kwargs['course_id']
data = QuestionModel.objects.filter(
course_id = course)
serializer = QuestionSerializer(data, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
class QuestionDetail(APIView):
permission_classes = (IsSuperUser, )
def get_object(self,pk):
try:
return QuestionModel.objects.get(id=pk)
except:
raise Http404
def get(self,request,pk,format=None):
data = self.get_object(pk)
serializer = QuestionSerializer(data)
return Response(serializer.data)
def put(self,request,pk,format=None):
data = self.get_object(pk)
serializer = QuestionSerializer(data,data = request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data,status=status.HTTP_201_CREATED)
else:
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def delete(self,request,pk,format=None):
data = self.get_object(pk)
data.delete()
return Response(status = status.HTTP_204_NO_CONTENT)
class SubmittedQuestionView(APIView):
permission_classes = (IsSuperUser, )
def get(self, request, *args, **kwargs):
admin = self.request.user
course = self.kwargs['course_id']
client = self.kwargs['client_id']
data = Client_SubmitquestionModel.objects.filter(
course__course = course,
client__client = client
)
serializer = Client_submittedquestionSerializer(data, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
class AddonlineTest(generics.CreateAPIView):
permission_classes = (IsSuperUser, )
def post(self, request, format=None):
serializer = testSerializer(data=request.data)
print(serializer)
if serializer.is_valid():
course = serializer.validated_data.get('course', '')
serializer.save()
return Response(serializer.data,status =status.HTTP_201_CREATED)
else:
return Response(serializer.errors,status =status.HTTP_400_BAD_REQUEST)
class ViewOnlinetest(generics.ListAPIView):
permission_classes = (IsSuperUser, )
def get(self, request, *args, **kwargs):
course = self.kwargs['course_id']
data = Client_testModel.objects.filter(
course_id = course)
serializer = testSerializer(data, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
class onlinetestDetail(APIView):
permission_classes = (IsSuperUser, )
def get_object(self,pk):
try:
return Client_testModel.objects.get(id=pk)
except:
raise Http404
def get(self,request,pk,format=None):
data = self.get_object(pk)
serializer = testSerializer(data)
return Response(serializer.data)
def put(self,request,pk,format=None):
data = self.get_object(pk)
serializer = testSerializer(data,data = request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data,status=status.HTTP_201_CREATED)
else:
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def delete(self,request,pk,format=None):
data = self.get_object(pk)
data.delete()
return Response(status = status.HTTP_204_NO_CONTENT)
class SubmittedonlineTestView(APIView):
permission_classes = (IsSuperUser, )
def get(self, request, *args, **kwargs):
admin = self.request.user
course = self.kwargs['course_id']
client = self.kwargs['client_id']
data = Client_SubmittestModel.objects.filter(
course__course = course,
client__client = client
)
serializer = Client_submittedtestSerializer(data, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
|
normal
|
{
"blob_id": "aec5280869a780bbd93ef24b659d9959f7b81426",
"index": 3545,
"step-1": "<mask token>\n\n\nclass AddBlogs(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = BlogSerializer\n queryset = BlogModel.objects.all()\n\n\nclass ViewBlog(generics.ListAPIView):\n permission_classes = IsClient,\n serializer_class = BlogSerializer\n queryset = BlogModel.objects.all()\n\n\nclass BlogDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = IsClient,\n serializer_class = BlogSerializer\n queryset = BlogModel.objects.all()\n\n\nclass BlogDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = BlogSerializer\n queryset = BlogModel.objects.all()\n\n\nclass AddEventView(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = EventSerializer\n queryset = EventModel.objects.all()\n\n\nclass ListEventView(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = EventSerializer\n queryset = EventModel.objects.all()\n\n\nclass EventDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = EventSerializer\n queryset = EventModel.objects.all()\n\n\nclass EventDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = EventSerializer\n queryset = EventModel.objects.all()\n\n\nclass AddBusinessPartners(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = BusinessPartnersSerializer\n queryset = BusinessPartnersModel.objects.all()\n\n\nclass ViewBusinessPartner(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = BusinessPartnersSerializer\n queryset = BusinessPartnersModel.objects.all()\n\n\nclass BusinessPartnerDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = BusinessPartnersSerializer\n queryset = BusinessPartnersModel.objects.all()\n\n\nclass BusinessPartnerDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = BusinessPartnersSerializer\n queryset = BusinessPartnersModel.objects.all()\n\n\nclass AddKidStory(generics.CreateAPIView):\n permission_classes = IsStudent,\n serializer_class = KidStorySerializer\n queryset = KidStoryModel.objects.all()\n\n def perform_create(self, serializer):\n serializer.save(user=self.request.user)\n\n\nclass ViewKidStory(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = KidStorySerializer\n queryset = KidStoryModel.objects.filter(status__exact='P')\n\n\nclass KidStoryDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = KidStorySerializer\n queryset = KidStoryModel.objects.filter(status__exact='P')\n\n\nclass KidStoryDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n \"\"\"\n Get: superadmin can see all stories (draft, published)\n PATCH : superadmin can mark stories as published by changing status = P\n Delete: superadmin can delete stories.\n \"\"\"\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = UpdateKidsStorySerializer\n queryset = KidStoryModel.objects.all()\n\n\nclass AddKidTalent(generics.CreateAPIView):\n permission_classes = IsStudentORClient,\n serializer_class = KidTalentSerializer\n queryset = KidTalentModel.objects.all()\n\n def perform_create(self, serializer):\n serializer.save(user=self.request.user)\n\n\nclass ViewKidTalent(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = KidTalentSerializer\n queryset = KidTalentModel.objects.filter(status__exact='P')\n\n\nclass KidTalentDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = KidTalentSerializer\n queryset = KidTalentModel.objects.filter(status__exact='P')\n\n\nclass KidTalentDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n \"\"\"\n Get: superadmin can see all kids talent (draft, published)\n PATCH : superadmin can mark kids talent as published by changing status = P\n Delete: superadmin can delete kids talent.\n \"\"\"\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = UpdateKidsTalentSerializer\n queryset = KidTalentModel.objects.all()\n\n\nclass AddCourses(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = CourseSerializer\n queryset = CourseModel.objects.all()\n\n\nclass ViewCourse(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = CourseSerializer\n queryset = CourseModel.objects.all()\n\n\nclass CourseDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = CourseSerializer\n queryset = CourseModel.objects.all()\n\n\nclass CourseDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = CourseSerializer\n queryset = CourseModel.objects.all()\n\n\nclass AddQuizContext(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = QuizContextSerializer\n queryset = QuizContextModel.objects.all()\n\n\nclass ViewQuizContext(generics.ListAPIView):\n permission_classes = IsClient,\n serializer_class = QuizContextSerializer\n queryset = QuizContextModel.objects.all()\n\n\nclass QuizContextDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = IsClient,\n serializer_class = QuizContextSerializer\n queryset = QuizContextModel.objects.all()\n\n\nclass QuizContextDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = QuizContextSerializer\n queryset = QuizContextModel.objects.all()\n\n\nclass AddFeedback(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = ClientFeedbackSerializer\n queryset = ClientFeedBackModel.objects.all()\n\n\nclass ViewFeedback(generics.ListAPIView):\n permission_classes = IsClient,\n serializer_class = ClientFeedbackSerializer\n queryset = ClientFeedBackModel.objects.all()\n\n\nclass FeedbackDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = IsClient,\n serializer_class = ClientFeedbackSerializer\n queryset = ClientFeedBackModel.objects.all()\n\n\nclass FeedbackDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = ClientFeedbackSerializer\n queryset = ClientFeedBackModel.objects.all()\n\n\nclass AddWebsiteAd(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = WebsiteAdSerializer\n queryset = WebsiteAdModel.objects.all()\n\n\nclass ViewWebsiteAd(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = WebsiteAdSerializer\n queryset = WebsiteAdModel.objects.all()\n\n\nclass WebsiteAdDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = WebsiteAdSerializer\n queryset = WebsiteAdModel.objects.all()\n\n\nclass WebsiteAdDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = WebsiteAdSerializer\n queryset = WebsiteAdModel.objects.all()\n\n\nclass AddBusinessPromotion(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = BusinessPromotionSerializer\n queryset = BusinessPromotionModel.objects.all()\n\n\nclass ViewBusinessPromotion(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = BusinessPromotionSerializer\n queryset = BusinessPromotionModel.objects.all()\n\n\nclass BusinessPromotionDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = BusinessPromotionSerializer\n queryset = BusinessPromotionModel.objects.all()\n\n\nclass BusinessPromotionDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = BusinessPromotionSerializer\n queryset = BusinessPromotionModel.objects.all()\n\n\nclass AddTeam(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = TeamSerializer\n queryset = TeamModel.objects.all()\n\n\nclass ViewTeam(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = TeamSerializer\n queryset = TeamModel.objects.all()\n\n\nclass TeamDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = TeamSerializer\n queryset = TeamModel.objects.all()\n\n\nclass TeamDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = TeamSerializer\n queryset = TeamModel.objects.all()\n\n\nclass AddAdvisoryBoard(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = AdvisoryBoardSerializer\n queryset = AdvisoryBoardModel.objects.all()\n\n\nclass ViewAdvisoryBoard(generics.ListAPIView):\n permission_classes = IsSuperUser,\n serializer_class = AdvisoryBoardSerializer\n queryset = AdvisoryBoardModel.objects.all()\n\n\nclass AdvisoryBoardDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = AdvisoryBoardSerializer\n queryset = AdvisoryBoardModel.objects.all()\n\n\nclass AdvisoryBoardDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = AdvisoryBoardSerializer\n queryset = AdvisoryBoardModel.objects.all()\n\n\nclass AddAnnouncement(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = AnnouncementSerializer\n queryset = AnnouncementModel.objects.all()\n\n\nclass ListAnnouncement(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = AnnouncementSerializer\n queryset = AnnouncementModel.objects.all()\n\n\nclass AnnouncementDetail(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = AnnouncementSerializer\n queryset = AnnouncementModel.objects.all()\n\n\nclass AnnouncementDeleteUpdate(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = AnnouncementSerializer\n queryset = AnnouncementModel.objects.all()\n\n\nclass SuperadminProfileView(APIView):\n permission_classes = IsSuperUser,\n\n def get(self, request, *args, **kwargs):\n user = get_user_from_token(request)\n data = {'name': user.username, 'email': user.email}\n return Response(data)\n\n\nclass AddJobClassified(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = JobClassifiedSerializer\n queryset = JobClassifiedModel.objects.all()\n\n\nclass ViewJobClassified(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = JobClassifiedSerializer\n queryset = JobClassifiedModel.objects.all()\n\n\nclass JobClassifiedDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = JobClassifiedSerializer\n queryset = JobClassifiedModel.objects.all()\n\n\nclass JobClassifiedDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = JobClassifiedSerializer\n queryset = JobClassifiedModel.objects.all()\n\n\nclass AddCustomerReviews(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = CustomerReviewSerializer\n queryset = CustomerReviewModel.objects.all()\n\n\nclass ViewCustomerReview(generics.ListAPIView):\n permission_classes = IsClient,\n serializer_class = CustomerReviewSerializer\n queryset = CustomerReviewModel.objects.all()\n\n\nclass CustomerReviewDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = IsClient,\n serializer_class = CustomerReviewSerializer\n queryset = CustomerReviewModel.objects.all()\n\n\nclass CustomerReviewDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = CustomerReviewSerializer\n queryset = CustomerReviewModel.objects.all()\n\n\nclass ClientComplain(APIView):\n permission_classes = IsSuperUser,\n serializer = ViewComplainSerializer(many=True)\n\n\nclass clientfeedback(APIView):\n permission_classes = IsSuperUser,\n\n def get(self, request, format=None):\n feeds = ClientFeedBackModel.objects.filter(Class__admin=self.\n request.user)\n serializer = ClientFeedbackSerializer(feeds, many=True)\n return Response(serializer.data)\n\n\nclass Enroll_Course(APIView):\n permission_classes = IsSuperUser,\n\n def post(self, request, format=None):\n serializer = EnrollCourseSerializer(data=request.data)\n print(serializer)\n if serializer.is_valid():\n course = serializer.validated_data.get('course', '')\n serializer.save()\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n\n\nclass ViewEnroll_Course(APIView):\n permission_classes = IsSuperUser,\n\n def get(self, request, *args, **kwargs):\n course = self.kwargs['course_id']\n client = self.kwargs['client_id']\n data = Enroll_CourseModel.objects.filter(course=course, client=client)\n serializer = ViewEnrollCourseSerializer(data, many=True)\n return Response(serializer.data, status=status.HTTP_200_OK)\n\n\nclass DetailEnroll_CourseView(APIView):\n permission_classes = IsSuperUser,\n\n def get_object(self, pk):\n try:\n return Enroll_CourseModel.objects.get(id=pk)\n except:\n raise Http404\n\n def get(self, request, pk, format=None):\n data = self.get_object(pk)\n serializer = ViewEnrollCourseSerializer(data)\n return Response(serializer.data)\n\n def put(self, request, pk, format=None):\n data = self.get_object(pk)\n serializer = ViewEnrollCourseSerializer(data, data=request.data)\n if serializer.is_valid(raise_exception=True):\n serializer.save()\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n\n def delete(self, request, pk, format=None):\n data = self.get_object(pk)\n data.delete()\n return Response(status=status.HTTP_204_NO_CONTENT)\n\n\nclass CourseDetail(APIView):\n permission_classes = IsSuperUser,\n\n def get_object(self, slug):\n try:\n return CourseModel.objects.get(slug=slug)\n except CourseModel.DoesNotExist:\n raise Http404\n\n def get(self, request, slug, format=None):\n data = self.get_object(slug)\n if data.classes.school.admin == self.request.user:\n serializer = ViewCourseSerializer(data)\n return Response(serializer.data)\n else:\n return Response({'message':\n 'This course does not belong to your school'}, status=\n status.HTTP_400_BAD_REQUEST)\n\n def put(self, request, slug, format=None):\n data = self.get_object(slug)\n if data.course.client.admin == self.request.user:\n serializer = CourseSerializer(data, data=request.data)\n if serializer.is_valid(raise_exception=True):\n course = serializer.validated_data.get('course', '')\n if course.client.admin == self.request.user:\n serializer.save()\n return Response(serializer.data, status=status.\n HTTP_201_CREATED)\n return Response({'message':\n 'This Class does not belong to you'}, status=status.\n HTTP_400_BAD_REQUEST)\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n else:\n return Response({'message':\n 'This course does not belong to you'}, status=status.\n HTTP_400_BAD_REQUEST)\n\n def delete(self, request, slug, format=None):\n data = self.get_object(slug)\n if data.course.client.admin == self.request.user:\n data.delete()\n return Response(status=status.HTTP_204_NO_CONTENT)\n else:\n return Response({'message':\n 'This course does not belong to you'}, status=status.\n HTTP_400_BAD_REQUEST)\n\n\nclass SchoolRegistrationView(RegisterView):\n serializer_class = RegisterSchoolSerializer\n permission_classes = IsSuperUser,\n\n\nclass Add_question(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n\n def post(self, request, format=None):\n serializer = QuestionSerializer(data=request.data)\n print(serializer)\n if serializer.is_valid():\n course = serializer.validated_data.get('course', '')\n serializer.save()\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n\n\nclass Viewquestion(generics.ListAPIView):\n permission_classes = IsSuperUser,\n\n def get(self, request, *args, **kwargs):\n course = self.kwargs['course_id']\n data = QuestionModel.objects.filter(course_id=course)\n serializer = QuestionSerializer(data, many=True)\n return Response(serializer.data, status=status.HTTP_200_OK)\n\n\nclass QuestionDetail(APIView):\n permission_classes = IsSuperUser,\n\n def get_object(self, pk):\n try:\n return QuestionModel.objects.get(id=pk)\n except:\n raise Http404\n\n def get(self, request, pk, format=None):\n data = self.get_object(pk)\n serializer = QuestionSerializer(data)\n return Response(serializer.data)\n\n def put(self, request, pk, format=None):\n data = self.get_object(pk)\n serializer = QuestionSerializer(data, data=request.data)\n if serializer.is_valid():\n serializer.save()\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n\n def delete(self, request, pk, format=None):\n data = self.get_object(pk)\n data.delete()\n return Response(status=status.HTTP_204_NO_CONTENT)\n\n\nclass SubmittedQuestionView(APIView):\n permission_classes = IsSuperUser,\n\n def get(self, request, *args, **kwargs):\n admin = self.request.user\n course = self.kwargs['course_id']\n client = self.kwargs['client_id']\n data = Client_SubmitquestionModel.objects.filter(course__course=\n course, client__client=client)\n serializer = Client_submittedquestionSerializer(data, many=True)\n return Response(serializer.data, status=status.HTTP_200_OK)\n\n\nclass AddonlineTest(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n\n def post(self, request, format=None):\n serializer = testSerializer(data=request.data)\n print(serializer)\n if serializer.is_valid():\n course = serializer.validated_data.get('course', '')\n serializer.save()\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n\n\nclass ViewOnlinetest(generics.ListAPIView):\n permission_classes = IsSuperUser,\n\n def get(self, request, *args, **kwargs):\n course = self.kwargs['course_id']\n data = Client_testModel.objects.filter(course_id=course)\n serializer = testSerializer(data, many=True)\n return Response(serializer.data, status=status.HTTP_200_OK)\n\n\nclass onlinetestDetail(APIView):\n permission_classes = IsSuperUser,\n\n def get_object(self, pk):\n try:\n return Client_testModel.objects.get(id=pk)\n except:\n raise Http404\n\n def get(self, request, pk, format=None):\n data = self.get_object(pk)\n serializer = testSerializer(data)\n return Response(serializer.data)\n\n def put(self, request, pk, format=None):\n data = self.get_object(pk)\n serializer = testSerializer(data, data=request.data)\n if serializer.is_valid():\n serializer.save()\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n\n def delete(self, request, pk, format=None):\n data = self.get_object(pk)\n data.delete()\n return Response(status=status.HTTP_204_NO_CONTENT)\n\n\nclass SubmittedonlineTestView(APIView):\n permission_classes = IsSuperUser,\n\n def get(self, request, *args, **kwargs):\n admin = self.request.user\n course = self.kwargs['course_id']\n client = self.kwargs['client_id']\n data = Client_SubmittestModel.objects.filter(course__course=course,\n client__client=client)\n serializer = Client_submittedtestSerializer(data, many=True)\n return Response(serializer.data, status=status.HTTP_200_OK)\n",
"step-2": "<mask token>\n\n\nclass SchoolDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = IsClient,\n serializer_class = SchoolSerializer\n queryset = SchoolModel.objects.all()\n\n\nclass SchoolDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = SchoolSerializer\n queryset = SchoolModel.objects.all()\n\n\nclass AddBlogs(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = BlogSerializer\n queryset = BlogModel.objects.all()\n\n\nclass ViewBlog(generics.ListAPIView):\n permission_classes = IsClient,\n serializer_class = BlogSerializer\n queryset = BlogModel.objects.all()\n\n\nclass BlogDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = IsClient,\n serializer_class = BlogSerializer\n queryset = BlogModel.objects.all()\n\n\nclass BlogDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = BlogSerializer\n queryset = BlogModel.objects.all()\n\n\nclass AddEventView(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = EventSerializer\n queryset = EventModel.objects.all()\n\n\nclass ListEventView(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = EventSerializer\n queryset = EventModel.objects.all()\n\n\nclass EventDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = EventSerializer\n queryset = EventModel.objects.all()\n\n\nclass EventDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = EventSerializer\n queryset = EventModel.objects.all()\n\n\nclass AddBusinessPartners(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = BusinessPartnersSerializer\n queryset = BusinessPartnersModel.objects.all()\n\n\nclass ViewBusinessPartner(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = BusinessPartnersSerializer\n queryset = BusinessPartnersModel.objects.all()\n\n\nclass BusinessPartnerDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = BusinessPartnersSerializer\n queryset = BusinessPartnersModel.objects.all()\n\n\nclass BusinessPartnerDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = BusinessPartnersSerializer\n queryset = BusinessPartnersModel.objects.all()\n\n\nclass AddKidStory(generics.CreateAPIView):\n permission_classes = IsStudent,\n serializer_class = KidStorySerializer\n queryset = KidStoryModel.objects.all()\n\n def perform_create(self, serializer):\n serializer.save(user=self.request.user)\n\n\nclass ViewKidStory(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = KidStorySerializer\n queryset = KidStoryModel.objects.filter(status__exact='P')\n\n\nclass KidStoryDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = KidStorySerializer\n queryset = KidStoryModel.objects.filter(status__exact='P')\n\n\nclass KidStoryDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n \"\"\"\n Get: superadmin can see all stories (draft, published)\n PATCH : superadmin can mark stories as published by changing status = P\n Delete: superadmin can delete stories.\n \"\"\"\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = UpdateKidsStorySerializer\n queryset = KidStoryModel.objects.all()\n\n\nclass AddKidTalent(generics.CreateAPIView):\n permission_classes = IsStudentORClient,\n serializer_class = KidTalentSerializer\n queryset = KidTalentModel.objects.all()\n\n def perform_create(self, serializer):\n serializer.save(user=self.request.user)\n\n\nclass ViewKidTalent(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = KidTalentSerializer\n queryset = KidTalentModel.objects.filter(status__exact='P')\n\n\nclass KidTalentDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = KidTalentSerializer\n queryset = KidTalentModel.objects.filter(status__exact='P')\n\n\nclass KidTalentDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n \"\"\"\n Get: superadmin can see all kids talent (draft, published)\n PATCH : superadmin can mark kids talent as published by changing status = P\n Delete: superadmin can delete kids talent.\n \"\"\"\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = UpdateKidsTalentSerializer\n queryset = KidTalentModel.objects.all()\n\n\nclass AddCourses(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = CourseSerializer\n queryset = CourseModel.objects.all()\n\n\nclass ViewCourse(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = CourseSerializer\n queryset = CourseModel.objects.all()\n\n\nclass CourseDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = CourseSerializer\n queryset = CourseModel.objects.all()\n\n\nclass CourseDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = CourseSerializer\n queryset = CourseModel.objects.all()\n\n\nclass AddQuizContext(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = QuizContextSerializer\n queryset = QuizContextModel.objects.all()\n\n\nclass ViewQuizContext(generics.ListAPIView):\n permission_classes = IsClient,\n serializer_class = QuizContextSerializer\n queryset = QuizContextModel.objects.all()\n\n\nclass QuizContextDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = IsClient,\n serializer_class = QuizContextSerializer\n queryset = QuizContextModel.objects.all()\n\n\nclass QuizContextDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = QuizContextSerializer\n queryset = QuizContextModel.objects.all()\n\n\nclass AddFeedback(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = ClientFeedbackSerializer\n queryset = ClientFeedBackModel.objects.all()\n\n\nclass ViewFeedback(generics.ListAPIView):\n permission_classes = IsClient,\n serializer_class = ClientFeedbackSerializer\n queryset = ClientFeedBackModel.objects.all()\n\n\nclass FeedbackDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = IsClient,\n serializer_class = ClientFeedbackSerializer\n queryset = ClientFeedBackModel.objects.all()\n\n\nclass FeedbackDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = ClientFeedbackSerializer\n queryset = ClientFeedBackModel.objects.all()\n\n\nclass AddWebsiteAd(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = WebsiteAdSerializer\n queryset = WebsiteAdModel.objects.all()\n\n\nclass ViewWebsiteAd(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = WebsiteAdSerializer\n queryset = WebsiteAdModel.objects.all()\n\n\nclass WebsiteAdDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = WebsiteAdSerializer\n queryset = WebsiteAdModel.objects.all()\n\n\nclass WebsiteAdDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = WebsiteAdSerializer\n queryset = WebsiteAdModel.objects.all()\n\n\nclass AddBusinessPromotion(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = BusinessPromotionSerializer\n queryset = BusinessPromotionModel.objects.all()\n\n\nclass ViewBusinessPromotion(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = BusinessPromotionSerializer\n queryset = BusinessPromotionModel.objects.all()\n\n\nclass BusinessPromotionDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = BusinessPromotionSerializer\n queryset = BusinessPromotionModel.objects.all()\n\n\nclass BusinessPromotionDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = BusinessPromotionSerializer\n queryset = BusinessPromotionModel.objects.all()\n\n\nclass AddTeam(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = TeamSerializer\n queryset = TeamModel.objects.all()\n\n\nclass ViewTeam(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = TeamSerializer\n queryset = TeamModel.objects.all()\n\n\nclass TeamDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = TeamSerializer\n queryset = TeamModel.objects.all()\n\n\nclass TeamDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = TeamSerializer\n queryset = TeamModel.objects.all()\n\n\nclass AddAdvisoryBoard(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = AdvisoryBoardSerializer\n queryset = AdvisoryBoardModel.objects.all()\n\n\nclass ViewAdvisoryBoard(generics.ListAPIView):\n permission_classes = IsSuperUser,\n serializer_class = AdvisoryBoardSerializer\n queryset = AdvisoryBoardModel.objects.all()\n\n\nclass AdvisoryBoardDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = AdvisoryBoardSerializer\n queryset = AdvisoryBoardModel.objects.all()\n\n\nclass AdvisoryBoardDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = AdvisoryBoardSerializer\n queryset = AdvisoryBoardModel.objects.all()\n\n\nclass AddAnnouncement(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = AnnouncementSerializer\n queryset = AnnouncementModel.objects.all()\n\n\nclass ListAnnouncement(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = AnnouncementSerializer\n queryset = AnnouncementModel.objects.all()\n\n\nclass AnnouncementDetail(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = AnnouncementSerializer\n queryset = AnnouncementModel.objects.all()\n\n\nclass AnnouncementDeleteUpdate(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = AnnouncementSerializer\n queryset = AnnouncementModel.objects.all()\n\n\nclass SuperadminProfileView(APIView):\n permission_classes = IsSuperUser,\n\n def get(self, request, *args, **kwargs):\n user = get_user_from_token(request)\n data = {'name': user.username, 'email': user.email}\n return Response(data)\n\n\nclass AddJobClassified(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = JobClassifiedSerializer\n queryset = JobClassifiedModel.objects.all()\n\n\nclass ViewJobClassified(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = JobClassifiedSerializer\n queryset = JobClassifiedModel.objects.all()\n\n\nclass JobClassifiedDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = JobClassifiedSerializer\n queryset = JobClassifiedModel.objects.all()\n\n\nclass JobClassifiedDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = JobClassifiedSerializer\n queryset = JobClassifiedModel.objects.all()\n\n\nclass AddCustomerReviews(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = CustomerReviewSerializer\n queryset = CustomerReviewModel.objects.all()\n\n\nclass ViewCustomerReview(generics.ListAPIView):\n permission_classes = IsClient,\n serializer_class = CustomerReviewSerializer\n queryset = CustomerReviewModel.objects.all()\n\n\nclass CustomerReviewDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = IsClient,\n serializer_class = CustomerReviewSerializer\n queryset = CustomerReviewModel.objects.all()\n\n\nclass CustomerReviewDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = CustomerReviewSerializer\n queryset = CustomerReviewModel.objects.all()\n\n\nclass ClientComplain(APIView):\n permission_classes = IsSuperUser,\n serializer = ViewComplainSerializer(many=True)\n\n\nclass clientfeedback(APIView):\n permission_classes = IsSuperUser,\n\n def get(self, request, format=None):\n feeds = ClientFeedBackModel.objects.filter(Class__admin=self.\n request.user)\n serializer = ClientFeedbackSerializer(feeds, many=True)\n return Response(serializer.data)\n\n\nclass Enroll_Course(APIView):\n permission_classes = IsSuperUser,\n\n def post(self, request, format=None):\n serializer = EnrollCourseSerializer(data=request.data)\n print(serializer)\n if serializer.is_valid():\n course = serializer.validated_data.get('course', '')\n serializer.save()\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n\n\nclass ViewEnroll_Course(APIView):\n permission_classes = IsSuperUser,\n\n def get(self, request, *args, **kwargs):\n course = self.kwargs['course_id']\n client = self.kwargs['client_id']\n data = Enroll_CourseModel.objects.filter(course=course, client=client)\n serializer = ViewEnrollCourseSerializer(data, many=True)\n return Response(serializer.data, status=status.HTTP_200_OK)\n\n\nclass DetailEnroll_CourseView(APIView):\n permission_classes = IsSuperUser,\n\n def get_object(self, pk):\n try:\n return Enroll_CourseModel.objects.get(id=pk)\n except:\n raise Http404\n\n def get(self, request, pk, format=None):\n data = self.get_object(pk)\n serializer = ViewEnrollCourseSerializer(data)\n return Response(serializer.data)\n\n def put(self, request, pk, format=None):\n data = self.get_object(pk)\n serializer = ViewEnrollCourseSerializer(data, data=request.data)\n if serializer.is_valid(raise_exception=True):\n serializer.save()\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n\n def delete(self, request, pk, format=None):\n data = self.get_object(pk)\n data.delete()\n return Response(status=status.HTTP_204_NO_CONTENT)\n\n\nclass CourseDetail(APIView):\n permission_classes = IsSuperUser,\n\n def get_object(self, slug):\n try:\n return CourseModel.objects.get(slug=slug)\n except CourseModel.DoesNotExist:\n raise Http404\n\n def get(self, request, slug, format=None):\n data = self.get_object(slug)\n if data.classes.school.admin == self.request.user:\n serializer = ViewCourseSerializer(data)\n return Response(serializer.data)\n else:\n return Response({'message':\n 'This course does not belong to your school'}, status=\n status.HTTP_400_BAD_REQUEST)\n\n def put(self, request, slug, format=None):\n data = self.get_object(slug)\n if data.course.client.admin == self.request.user:\n serializer = CourseSerializer(data, data=request.data)\n if serializer.is_valid(raise_exception=True):\n course = serializer.validated_data.get('course', '')\n if course.client.admin == self.request.user:\n serializer.save()\n return Response(serializer.data, status=status.\n HTTP_201_CREATED)\n return Response({'message':\n 'This Class does not belong to you'}, status=status.\n HTTP_400_BAD_REQUEST)\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n else:\n return Response({'message':\n 'This course does not belong to you'}, status=status.\n HTTP_400_BAD_REQUEST)\n\n def delete(self, request, slug, format=None):\n data = self.get_object(slug)\n if data.course.client.admin == self.request.user:\n data.delete()\n return Response(status=status.HTTP_204_NO_CONTENT)\n else:\n return Response({'message':\n 'This course does not belong to you'}, status=status.\n HTTP_400_BAD_REQUEST)\n\n\nclass SchoolRegistrationView(RegisterView):\n serializer_class = RegisterSchoolSerializer\n permission_classes = IsSuperUser,\n\n\nclass Add_question(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n\n def post(self, request, format=None):\n serializer = QuestionSerializer(data=request.data)\n print(serializer)\n if serializer.is_valid():\n course = serializer.validated_data.get('course', '')\n serializer.save()\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n\n\nclass Viewquestion(generics.ListAPIView):\n permission_classes = IsSuperUser,\n\n def get(self, request, *args, **kwargs):\n course = self.kwargs['course_id']\n data = QuestionModel.objects.filter(course_id=course)\n serializer = QuestionSerializer(data, many=True)\n return Response(serializer.data, status=status.HTTP_200_OK)\n\n\nclass QuestionDetail(APIView):\n permission_classes = IsSuperUser,\n\n def get_object(self, pk):\n try:\n return QuestionModel.objects.get(id=pk)\n except:\n raise Http404\n\n def get(self, request, pk, format=None):\n data = self.get_object(pk)\n serializer = QuestionSerializer(data)\n return Response(serializer.data)\n\n def put(self, request, pk, format=None):\n data = self.get_object(pk)\n serializer = QuestionSerializer(data, data=request.data)\n if serializer.is_valid():\n serializer.save()\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n\n def delete(self, request, pk, format=None):\n data = self.get_object(pk)\n data.delete()\n return Response(status=status.HTTP_204_NO_CONTENT)\n\n\nclass SubmittedQuestionView(APIView):\n permission_classes = IsSuperUser,\n\n def get(self, request, *args, **kwargs):\n admin = self.request.user\n course = self.kwargs['course_id']\n client = self.kwargs['client_id']\n data = Client_SubmitquestionModel.objects.filter(course__course=\n course, client__client=client)\n serializer = Client_submittedquestionSerializer(data, many=True)\n return Response(serializer.data, status=status.HTTP_200_OK)\n\n\nclass AddonlineTest(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n\n def post(self, request, format=None):\n serializer = testSerializer(data=request.data)\n print(serializer)\n if serializer.is_valid():\n course = serializer.validated_data.get('course', '')\n serializer.save()\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n\n\nclass ViewOnlinetest(generics.ListAPIView):\n permission_classes = IsSuperUser,\n\n def get(self, request, *args, **kwargs):\n course = self.kwargs['course_id']\n data = Client_testModel.objects.filter(course_id=course)\n serializer = testSerializer(data, many=True)\n return Response(serializer.data, status=status.HTTP_200_OK)\n\n\nclass onlinetestDetail(APIView):\n permission_classes = IsSuperUser,\n\n def get_object(self, pk):\n try:\n return Client_testModel.objects.get(id=pk)\n except:\n raise Http404\n\n def get(self, request, pk, format=None):\n data = self.get_object(pk)\n serializer = testSerializer(data)\n return Response(serializer.data)\n\n def put(self, request, pk, format=None):\n data = self.get_object(pk)\n serializer = testSerializer(data, data=request.data)\n if serializer.is_valid():\n serializer.save()\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n\n def delete(self, request, pk, format=None):\n data = self.get_object(pk)\n data.delete()\n return Response(status=status.HTTP_204_NO_CONTENT)\n\n\nclass SubmittedonlineTestView(APIView):\n permission_classes = IsSuperUser,\n\n def get(self, request, *args, **kwargs):\n admin = self.request.user\n course = self.kwargs['course_id']\n client = self.kwargs['client_id']\n data = Client_SubmittestModel.objects.filter(course__course=course,\n client__client=client)\n serializer = Client_submittedtestSerializer(data, many=True)\n return Response(serializer.data, status=status.HTTP_200_OK)\n",
"step-3": "<mask token>\n\n\nclass AddSchools(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = SchoolSerializer\n queryset = SchoolModel.objects.all()\n\n\nclass ViewSchool(generics.ListAPIView):\n permission_classes = IsClient,\n serializer_class = SchoolSerializer\n queryset = SchoolModel.objects.all()\n\n\nclass SchoolDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = IsClient,\n serializer_class = SchoolSerializer\n queryset = SchoolModel.objects.all()\n\n\nclass SchoolDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = SchoolSerializer\n queryset = SchoolModel.objects.all()\n\n\nclass AddBlogs(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = BlogSerializer\n queryset = BlogModel.objects.all()\n\n\nclass ViewBlog(generics.ListAPIView):\n permission_classes = IsClient,\n serializer_class = BlogSerializer\n queryset = BlogModel.objects.all()\n\n\nclass BlogDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = IsClient,\n serializer_class = BlogSerializer\n queryset = BlogModel.objects.all()\n\n\nclass BlogDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = BlogSerializer\n queryset = BlogModel.objects.all()\n\n\nclass AddEventView(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = EventSerializer\n queryset = EventModel.objects.all()\n\n\nclass ListEventView(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = EventSerializer\n queryset = EventModel.objects.all()\n\n\nclass EventDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = EventSerializer\n queryset = EventModel.objects.all()\n\n\nclass EventDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = EventSerializer\n queryset = EventModel.objects.all()\n\n\nclass AddBusinessPartners(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = BusinessPartnersSerializer\n queryset = BusinessPartnersModel.objects.all()\n\n\nclass ViewBusinessPartner(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = BusinessPartnersSerializer\n queryset = BusinessPartnersModel.objects.all()\n\n\nclass BusinessPartnerDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = BusinessPartnersSerializer\n queryset = BusinessPartnersModel.objects.all()\n\n\nclass BusinessPartnerDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = BusinessPartnersSerializer\n queryset = BusinessPartnersModel.objects.all()\n\n\nclass AddKidStory(generics.CreateAPIView):\n permission_classes = IsStudent,\n serializer_class = KidStorySerializer\n queryset = KidStoryModel.objects.all()\n\n def perform_create(self, serializer):\n serializer.save(user=self.request.user)\n\n\nclass ViewKidStory(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = KidStorySerializer\n queryset = KidStoryModel.objects.filter(status__exact='P')\n\n\nclass KidStoryDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = KidStorySerializer\n queryset = KidStoryModel.objects.filter(status__exact='P')\n\n\nclass KidStoryDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n \"\"\"\n Get: superadmin can see all stories (draft, published)\n PATCH : superadmin can mark stories as published by changing status = P\n Delete: superadmin can delete stories.\n \"\"\"\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = UpdateKidsStorySerializer\n queryset = KidStoryModel.objects.all()\n\n\nclass AddKidTalent(generics.CreateAPIView):\n permission_classes = IsStudentORClient,\n serializer_class = KidTalentSerializer\n queryset = KidTalentModel.objects.all()\n\n def perform_create(self, serializer):\n serializer.save(user=self.request.user)\n\n\nclass ViewKidTalent(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = KidTalentSerializer\n queryset = KidTalentModel.objects.filter(status__exact='P')\n\n\nclass KidTalentDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = KidTalentSerializer\n queryset = KidTalentModel.objects.filter(status__exact='P')\n\n\nclass KidTalentDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n \"\"\"\n Get: superadmin can see all kids talent (draft, published)\n PATCH : superadmin can mark kids talent as published by changing status = P\n Delete: superadmin can delete kids talent.\n \"\"\"\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = UpdateKidsTalentSerializer\n queryset = KidTalentModel.objects.all()\n\n\nclass AddCourses(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = CourseSerializer\n queryset = CourseModel.objects.all()\n\n\nclass ViewCourse(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = CourseSerializer\n queryset = CourseModel.objects.all()\n\n\nclass CourseDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = CourseSerializer\n queryset = CourseModel.objects.all()\n\n\nclass CourseDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = CourseSerializer\n queryset = CourseModel.objects.all()\n\n\nclass AddQuizContext(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = QuizContextSerializer\n queryset = QuizContextModel.objects.all()\n\n\nclass ViewQuizContext(generics.ListAPIView):\n permission_classes = IsClient,\n serializer_class = QuizContextSerializer\n queryset = QuizContextModel.objects.all()\n\n\nclass QuizContextDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = IsClient,\n serializer_class = QuizContextSerializer\n queryset = QuizContextModel.objects.all()\n\n\nclass QuizContextDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = QuizContextSerializer\n queryset = QuizContextModel.objects.all()\n\n\nclass AddFeedback(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = ClientFeedbackSerializer\n queryset = ClientFeedBackModel.objects.all()\n\n\nclass ViewFeedback(generics.ListAPIView):\n permission_classes = IsClient,\n serializer_class = ClientFeedbackSerializer\n queryset = ClientFeedBackModel.objects.all()\n\n\nclass FeedbackDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = IsClient,\n serializer_class = ClientFeedbackSerializer\n queryset = ClientFeedBackModel.objects.all()\n\n\nclass FeedbackDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = ClientFeedbackSerializer\n queryset = ClientFeedBackModel.objects.all()\n\n\nclass AddWebsiteAd(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = WebsiteAdSerializer\n queryset = WebsiteAdModel.objects.all()\n\n\nclass ViewWebsiteAd(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = WebsiteAdSerializer\n queryset = WebsiteAdModel.objects.all()\n\n\nclass WebsiteAdDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = WebsiteAdSerializer\n queryset = WebsiteAdModel.objects.all()\n\n\nclass WebsiteAdDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = WebsiteAdSerializer\n queryset = WebsiteAdModel.objects.all()\n\n\nclass AddBusinessPromotion(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = BusinessPromotionSerializer\n queryset = BusinessPromotionModel.objects.all()\n\n\nclass ViewBusinessPromotion(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = BusinessPromotionSerializer\n queryset = BusinessPromotionModel.objects.all()\n\n\nclass BusinessPromotionDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = BusinessPromotionSerializer\n queryset = BusinessPromotionModel.objects.all()\n\n\nclass BusinessPromotionDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = BusinessPromotionSerializer\n queryset = BusinessPromotionModel.objects.all()\n\n\nclass AddTeam(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = TeamSerializer\n queryset = TeamModel.objects.all()\n\n\nclass ViewTeam(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = TeamSerializer\n queryset = TeamModel.objects.all()\n\n\nclass TeamDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = TeamSerializer\n queryset = TeamModel.objects.all()\n\n\nclass TeamDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = TeamSerializer\n queryset = TeamModel.objects.all()\n\n\nclass AddAdvisoryBoard(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = AdvisoryBoardSerializer\n queryset = AdvisoryBoardModel.objects.all()\n\n\nclass ViewAdvisoryBoard(generics.ListAPIView):\n permission_classes = IsSuperUser,\n serializer_class = AdvisoryBoardSerializer\n queryset = AdvisoryBoardModel.objects.all()\n\n\nclass AdvisoryBoardDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = AdvisoryBoardSerializer\n queryset = AdvisoryBoardModel.objects.all()\n\n\nclass AdvisoryBoardDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = AdvisoryBoardSerializer\n queryset = AdvisoryBoardModel.objects.all()\n\n\nclass AddAnnouncement(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = AnnouncementSerializer\n queryset = AnnouncementModel.objects.all()\n\n\nclass ListAnnouncement(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = AnnouncementSerializer\n queryset = AnnouncementModel.objects.all()\n\n\nclass AnnouncementDetail(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = AnnouncementSerializer\n queryset = AnnouncementModel.objects.all()\n\n\nclass AnnouncementDeleteUpdate(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = AnnouncementSerializer\n queryset = AnnouncementModel.objects.all()\n\n\nclass SuperadminProfileView(APIView):\n permission_classes = IsSuperUser,\n\n def get(self, request, *args, **kwargs):\n user = get_user_from_token(request)\n data = {'name': user.username, 'email': user.email}\n return Response(data)\n\n\nclass AddJobClassified(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = JobClassifiedSerializer\n queryset = JobClassifiedModel.objects.all()\n\n\nclass ViewJobClassified(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = JobClassifiedSerializer\n queryset = JobClassifiedModel.objects.all()\n\n\nclass JobClassifiedDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = JobClassifiedSerializer\n queryset = JobClassifiedModel.objects.all()\n\n\nclass JobClassifiedDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = JobClassifiedSerializer\n queryset = JobClassifiedModel.objects.all()\n\n\nclass AddCustomerReviews(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = CustomerReviewSerializer\n queryset = CustomerReviewModel.objects.all()\n\n\nclass ViewCustomerReview(generics.ListAPIView):\n permission_classes = IsClient,\n serializer_class = CustomerReviewSerializer\n queryset = CustomerReviewModel.objects.all()\n\n\nclass CustomerReviewDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = IsClient,\n serializer_class = CustomerReviewSerializer\n queryset = CustomerReviewModel.objects.all()\n\n\nclass CustomerReviewDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = CustomerReviewSerializer\n queryset = CustomerReviewModel.objects.all()\n\n\nclass ClientComplain(APIView):\n permission_classes = IsSuperUser,\n serializer = ViewComplainSerializer(many=True)\n\n\nclass clientfeedback(APIView):\n permission_classes = IsSuperUser,\n\n def get(self, request, format=None):\n feeds = ClientFeedBackModel.objects.filter(Class__admin=self.\n request.user)\n serializer = ClientFeedbackSerializer(feeds, many=True)\n return Response(serializer.data)\n\n\nclass Enroll_Course(APIView):\n permission_classes = IsSuperUser,\n\n def post(self, request, format=None):\n serializer = EnrollCourseSerializer(data=request.data)\n print(serializer)\n if serializer.is_valid():\n course = serializer.validated_data.get('course', '')\n serializer.save()\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n\n\nclass ViewEnroll_Course(APIView):\n permission_classes = IsSuperUser,\n\n def get(self, request, *args, **kwargs):\n course = self.kwargs['course_id']\n client = self.kwargs['client_id']\n data = Enroll_CourseModel.objects.filter(course=course, client=client)\n serializer = ViewEnrollCourseSerializer(data, many=True)\n return Response(serializer.data, status=status.HTTP_200_OK)\n\n\nclass DetailEnroll_CourseView(APIView):\n permission_classes = IsSuperUser,\n\n def get_object(self, pk):\n try:\n return Enroll_CourseModel.objects.get(id=pk)\n except:\n raise Http404\n\n def get(self, request, pk, format=None):\n data = self.get_object(pk)\n serializer = ViewEnrollCourseSerializer(data)\n return Response(serializer.data)\n\n def put(self, request, pk, format=None):\n data = self.get_object(pk)\n serializer = ViewEnrollCourseSerializer(data, data=request.data)\n if serializer.is_valid(raise_exception=True):\n serializer.save()\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n\n def delete(self, request, pk, format=None):\n data = self.get_object(pk)\n data.delete()\n return Response(status=status.HTTP_204_NO_CONTENT)\n\n\nclass CourseDetail(APIView):\n permission_classes = IsSuperUser,\n\n def get_object(self, slug):\n try:\n return CourseModel.objects.get(slug=slug)\n except CourseModel.DoesNotExist:\n raise Http404\n\n def get(self, request, slug, format=None):\n data = self.get_object(slug)\n if data.classes.school.admin == self.request.user:\n serializer = ViewCourseSerializer(data)\n return Response(serializer.data)\n else:\n return Response({'message':\n 'This course does not belong to your school'}, status=\n status.HTTP_400_BAD_REQUEST)\n\n def put(self, request, slug, format=None):\n data = self.get_object(slug)\n if data.course.client.admin == self.request.user:\n serializer = CourseSerializer(data, data=request.data)\n if serializer.is_valid(raise_exception=True):\n course = serializer.validated_data.get('course', '')\n if course.client.admin == self.request.user:\n serializer.save()\n return Response(serializer.data, status=status.\n HTTP_201_CREATED)\n return Response({'message':\n 'This Class does not belong to you'}, status=status.\n HTTP_400_BAD_REQUEST)\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n else:\n return Response({'message':\n 'This course does not belong to you'}, status=status.\n HTTP_400_BAD_REQUEST)\n\n def delete(self, request, slug, format=None):\n data = self.get_object(slug)\n if data.course.client.admin == self.request.user:\n data.delete()\n return Response(status=status.HTTP_204_NO_CONTENT)\n else:\n return Response({'message':\n 'This course does not belong to you'}, status=status.\n HTTP_400_BAD_REQUEST)\n\n\nclass SchoolRegistrationView(RegisterView):\n serializer_class = RegisterSchoolSerializer\n permission_classes = IsSuperUser,\n\n\nclass Add_question(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n\n def post(self, request, format=None):\n serializer = QuestionSerializer(data=request.data)\n print(serializer)\n if serializer.is_valid():\n course = serializer.validated_data.get('course', '')\n serializer.save()\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n\n\nclass Viewquestion(generics.ListAPIView):\n permission_classes = IsSuperUser,\n\n def get(self, request, *args, **kwargs):\n course = self.kwargs['course_id']\n data = QuestionModel.objects.filter(course_id=course)\n serializer = QuestionSerializer(data, many=True)\n return Response(serializer.data, status=status.HTTP_200_OK)\n\n\nclass QuestionDetail(APIView):\n permission_classes = IsSuperUser,\n\n def get_object(self, pk):\n try:\n return QuestionModel.objects.get(id=pk)\n except:\n raise Http404\n\n def get(self, request, pk, format=None):\n data = self.get_object(pk)\n serializer = QuestionSerializer(data)\n return Response(serializer.data)\n\n def put(self, request, pk, format=None):\n data = self.get_object(pk)\n serializer = QuestionSerializer(data, data=request.data)\n if serializer.is_valid():\n serializer.save()\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n\n def delete(self, request, pk, format=None):\n data = self.get_object(pk)\n data.delete()\n return Response(status=status.HTTP_204_NO_CONTENT)\n\n\nclass SubmittedQuestionView(APIView):\n permission_classes = IsSuperUser,\n\n def get(self, request, *args, **kwargs):\n admin = self.request.user\n course = self.kwargs['course_id']\n client = self.kwargs['client_id']\n data = Client_SubmitquestionModel.objects.filter(course__course=\n course, client__client=client)\n serializer = Client_submittedquestionSerializer(data, many=True)\n return Response(serializer.data, status=status.HTTP_200_OK)\n\n\nclass AddonlineTest(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n\n def post(self, request, format=None):\n serializer = testSerializer(data=request.data)\n print(serializer)\n if serializer.is_valid():\n course = serializer.validated_data.get('course', '')\n serializer.save()\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n\n\nclass ViewOnlinetest(generics.ListAPIView):\n permission_classes = IsSuperUser,\n\n def get(self, request, *args, **kwargs):\n course = self.kwargs['course_id']\n data = Client_testModel.objects.filter(course_id=course)\n serializer = testSerializer(data, many=True)\n return Response(serializer.data, status=status.HTTP_200_OK)\n\n\nclass onlinetestDetail(APIView):\n permission_classes = IsSuperUser,\n\n def get_object(self, pk):\n try:\n return Client_testModel.objects.get(id=pk)\n except:\n raise Http404\n\n def get(self, request, pk, format=None):\n data = self.get_object(pk)\n serializer = testSerializer(data)\n return Response(serializer.data)\n\n def put(self, request, pk, format=None):\n data = self.get_object(pk)\n serializer = testSerializer(data, data=request.data)\n if serializer.is_valid():\n serializer.save()\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n\n def delete(self, request, pk, format=None):\n data = self.get_object(pk)\n data.delete()\n return Response(status=status.HTTP_204_NO_CONTENT)\n\n\nclass SubmittedonlineTestView(APIView):\n permission_classes = IsSuperUser,\n\n def get(self, request, *args, **kwargs):\n admin = self.request.user\n course = self.kwargs['course_id']\n client = self.kwargs['client_id']\n data = Client_SubmittestModel.objects.filter(course__course=course,\n client__client=client)\n serializer = Client_submittedtestSerializer(data, many=True)\n return Response(serializer.data, status=status.HTTP_200_OK)\n",
"step-4": "<mask token>\n\n\nclass AddArticleView(generics.CreateAPIView):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass ListArticleView(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = ArticleSerializer\n queryset = ArticleModel.objects.filter(status__exact='P')\n\n\nclass ArticleDetail(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = ArticleSerializer\n queryset = ArticleModel.objects.filter(status__exact='P')\n\n\nclass ArticleDeleteUpdate(generics.RetrieveUpdateDestroyAPIView):\n \"\"\"\n Get: superadmin can see all articles (draft, published)\n PATCH : superadmin can mark article as published by changing status = P\n Delete: superadmin can delete article.\n \"\"\"\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = UpdateArticleSerializer\n queryset = ArticleModel.objects.all()\n\n\nclass AddQuestions(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = QuestionSerializer\n queryset = QuestionModel.objects.all()\n\n\nclass ViewQuestion(generics.ListAPIView):\n permission_classes = IsClient,\n serializer_class = QuestionSerializer\n queryset = QuestionModel.objects.all()\n\n\nclass QuestionDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = IsClient,\n serializer_class = QuestionSerializer\n queryset = QuestionModel.objects.all()\n\n\nclass QuestionDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = QuestionSerializer\n queryset = QuestionModel.objects.all()\n\n\nclass AddSchools(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = SchoolSerializer\n queryset = SchoolModel.objects.all()\n\n\nclass ViewSchool(generics.ListAPIView):\n permission_classes = IsClient,\n serializer_class = SchoolSerializer\n queryset = SchoolModel.objects.all()\n\n\nclass SchoolDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = IsClient,\n serializer_class = SchoolSerializer\n queryset = SchoolModel.objects.all()\n\n\nclass SchoolDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = SchoolSerializer\n queryset = SchoolModel.objects.all()\n\n\nclass AddBlogs(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = BlogSerializer\n queryset = BlogModel.objects.all()\n\n\nclass ViewBlog(generics.ListAPIView):\n permission_classes = IsClient,\n serializer_class = BlogSerializer\n queryset = BlogModel.objects.all()\n\n\nclass BlogDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = IsClient,\n serializer_class = BlogSerializer\n queryset = BlogModel.objects.all()\n\n\nclass BlogDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = BlogSerializer\n queryset = BlogModel.objects.all()\n\n\nclass AddEventView(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = EventSerializer\n queryset = EventModel.objects.all()\n\n\nclass ListEventView(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = EventSerializer\n queryset = EventModel.objects.all()\n\n\nclass EventDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = EventSerializer\n queryset = EventModel.objects.all()\n\n\nclass EventDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = EventSerializer\n queryset = EventModel.objects.all()\n\n\nclass AddBusinessPartners(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = BusinessPartnersSerializer\n queryset = BusinessPartnersModel.objects.all()\n\n\nclass ViewBusinessPartner(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = BusinessPartnersSerializer\n queryset = BusinessPartnersModel.objects.all()\n\n\nclass BusinessPartnerDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = BusinessPartnersSerializer\n queryset = BusinessPartnersModel.objects.all()\n\n\nclass BusinessPartnerDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = BusinessPartnersSerializer\n queryset = BusinessPartnersModel.objects.all()\n\n\nclass AddKidStory(generics.CreateAPIView):\n permission_classes = IsStudent,\n serializer_class = KidStorySerializer\n queryset = KidStoryModel.objects.all()\n\n def perform_create(self, serializer):\n serializer.save(user=self.request.user)\n\n\nclass ViewKidStory(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = KidStorySerializer\n queryset = KidStoryModel.objects.filter(status__exact='P')\n\n\nclass KidStoryDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = KidStorySerializer\n queryset = KidStoryModel.objects.filter(status__exact='P')\n\n\nclass KidStoryDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n \"\"\"\n Get: superadmin can see all stories (draft, published)\n PATCH : superadmin can mark stories as published by changing status = P\n Delete: superadmin can delete stories.\n \"\"\"\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = UpdateKidsStorySerializer\n queryset = KidStoryModel.objects.all()\n\n\nclass AddKidTalent(generics.CreateAPIView):\n permission_classes = IsStudentORClient,\n serializer_class = KidTalentSerializer\n queryset = KidTalentModel.objects.all()\n\n def perform_create(self, serializer):\n serializer.save(user=self.request.user)\n\n\nclass ViewKidTalent(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = KidTalentSerializer\n queryset = KidTalentModel.objects.filter(status__exact='P')\n\n\nclass KidTalentDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = KidTalentSerializer\n queryset = KidTalentModel.objects.filter(status__exact='P')\n\n\nclass KidTalentDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n \"\"\"\n Get: superadmin can see all kids talent (draft, published)\n PATCH : superadmin can mark kids talent as published by changing status = P\n Delete: superadmin can delete kids talent.\n \"\"\"\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = UpdateKidsTalentSerializer\n queryset = KidTalentModel.objects.all()\n\n\nclass AddCourses(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = CourseSerializer\n queryset = CourseModel.objects.all()\n\n\nclass ViewCourse(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = CourseSerializer\n queryset = CourseModel.objects.all()\n\n\nclass CourseDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = CourseSerializer\n queryset = CourseModel.objects.all()\n\n\nclass CourseDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = CourseSerializer\n queryset = CourseModel.objects.all()\n\n\nclass AddQuizContext(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = QuizContextSerializer\n queryset = QuizContextModel.objects.all()\n\n\nclass ViewQuizContext(generics.ListAPIView):\n permission_classes = IsClient,\n serializer_class = QuizContextSerializer\n queryset = QuizContextModel.objects.all()\n\n\nclass QuizContextDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = IsClient,\n serializer_class = QuizContextSerializer\n queryset = QuizContextModel.objects.all()\n\n\nclass QuizContextDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = QuizContextSerializer\n queryset = QuizContextModel.objects.all()\n\n\nclass AddFeedback(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = ClientFeedbackSerializer\n queryset = ClientFeedBackModel.objects.all()\n\n\nclass ViewFeedback(generics.ListAPIView):\n permission_classes = IsClient,\n serializer_class = ClientFeedbackSerializer\n queryset = ClientFeedBackModel.objects.all()\n\n\nclass FeedbackDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = IsClient,\n serializer_class = ClientFeedbackSerializer\n queryset = ClientFeedBackModel.objects.all()\n\n\nclass FeedbackDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = ClientFeedbackSerializer\n queryset = ClientFeedBackModel.objects.all()\n\n\nclass AddWebsiteAd(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = WebsiteAdSerializer\n queryset = WebsiteAdModel.objects.all()\n\n\nclass ViewWebsiteAd(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = WebsiteAdSerializer\n queryset = WebsiteAdModel.objects.all()\n\n\nclass WebsiteAdDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = WebsiteAdSerializer\n queryset = WebsiteAdModel.objects.all()\n\n\nclass WebsiteAdDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = WebsiteAdSerializer\n queryset = WebsiteAdModel.objects.all()\n\n\nclass AddBusinessPromotion(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = BusinessPromotionSerializer\n queryset = BusinessPromotionModel.objects.all()\n\n\nclass ViewBusinessPromotion(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = BusinessPromotionSerializer\n queryset = BusinessPromotionModel.objects.all()\n\n\nclass BusinessPromotionDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = BusinessPromotionSerializer\n queryset = BusinessPromotionModel.objects.all()\n\n\nclass BusinessPromotionDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = BusinessPromotionSerializer\n queryset = BusinessPromotionModel.objects.all()\n\n\nclass AddTeam(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = TeamSerializer\n queryset = TeamModel.objects.all()\n\n\nclass ViewTeam(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = TeamSerializer\n queryset = TeamModel.objects.all()\n\n\nclass TeamDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = TeamSerializer\n queryset = TeamModel.objects.all()\n\n\nclass TeamDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = TeamSerializer\n queryset = TeamModel.objects.all()\n\n\nclass AddAdvisoryBoard(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = AdvisoryBoardSerializer\n queryset = AdvisoryBoardModel.objects.all()\n\n\nclass ViewAdvisoryBoard(generics.ListAPIView):\n permission_classes = IsSuperUser,\n serializer_class = AdvisoryBoardSerializer\n queryset = AdvisoryBoardModel.objects.all()\n\n\nclass AdvisoryBoardDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = AdvisoryBoardSerializer\n queryset = AdvisoryBoardModel.objects.all()\n\n\nclass AdvisoryBoardDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = AdvisoryBoardSerializer\n queryset = AdvisoryBoardModel.objects.all()\n\n\nclass AddAnnouncement(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = AnnouncementSerializer\n queryset = AnnouncementModel.objects.all()\n\n\nclass ListAnnouncement(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = AnnouncementSerializer\n queryset = AnnouncementModel.objects.all()\n\n\nclass AnnouncementDetail(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = AnnouncementSerializer\n queryset = AnnouncementModel.objects.all()\n\n\nclass AnnouncementDeleteUpdate(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = AnnouncementSerializer\n queryset = AnnouncementModel.objects.all()\n\n\nclass SuperadminProfileView(APIView):\n permission_classes = IsSuperUser,\n\n def get(self, request, *args, **kwargs):\n user = get_user_from_token(request)\n data = {'name': user.username, 'email': user.email}\n return Response(data)\n\n\nclass AddJobClassified(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = JobClassifiedSerializer\n queryset = JobClassifiedModel.objects.all()\n\n\nclass ViewJobClassified(generics.ListAPIView):\n permission_classes = AllowAny,\n serializer_class = JobClassifiedSerializer\n queryset = JobClassifiedModel.objects.all()\n\n\nclass JobClassifiedDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = AllowAny,\n serializer_class = JobClassifiedSerializer\n queryset = JobClassifiedModel.objects.all()\n\n\nclass JobClassifiedDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = JobClassifiedSerializer\n queryset = JobClassifiedModel.objects.all()\n\n\nclass AddCustomerReviews(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n serializer_class = CustomerReviewSerializer\n queryset = CustomerReviewModel.objects.all()\n\n\nclass ViewCustomerReview(generics.ListAPIView):\n permission_classes = IsClient,\n serializer_class = CustomerReviewSerializer\n queryset = CustomerReviewModel.objects.all()\n\n\nclass CustomerReviewDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = IsClient,\n serializer_class = CustomerReviewSerializer\n queryset = CustomerReviewModel.objects.all()\n\n\nclass CustomerReviewDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = IsSuperUser,\n serializer_class = CustomerReviewSerializer\n queryset = CustomerReviewModel.objects.all()\n\n\nclass ClientComplain(APIView):\n permission_classes = IsSuperUser,\n serializer = ViewComplainSerializer(many=True)\n\n\nclass clientfeedback(APIView):\n permission_classes = IsSuperUser,\n\n def get(self, request, format=None):\n feeds = ClientFeedBackModel.objects.filter(Class__admin=self.\n request.user)\n serializer = ClientFeedbackSerializer(feeds, many=True)\n return Response(serializer.data)\n\n\nclass Enroll_Course(APIView):\n permission_classes = IsSuperUser,\n\n def post(self, request, format=None):\n serializer = EnrollCourseSerializer(data=request.data)\n print(serializer)\n if serializer.is_valid():\n course = serializer.validated_data.get('course', '')\n serializer.save()\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n\n\nclass ViewEnroll_Course(APIView):\n permission_classes = IsSuperUser,\n\n def get(self, request, *args, **kwargs):\n course = self.kwargs['course_id']\n client = self.kwargs['client_id']\n data = Enroll_CourseModel.objects.filter(course=course, client=client)\n serializer = ViewEnrollCourseSerializer(data, many=True)\n return Response(serializer.data, status=status.HTTP_200_OK)\n\n\nclass DetailEnroll_CourseView(APIView):\n permission_classes = IsSuperUser,\n\n def get_object(self, pk):\n try:\n return Enroll_CourseModel.objects.get(id=pk)\n except:\n raise Http404\n\n def get(self, request, pk, format=None):\n data = self.get_object(pk)\n serializer = ViewEnrollCourseSerializer(data)\n return Response(serializer.data)\n\n def put(self, request, pk, format=None):\n data = self.get_object(pk)\n serializer = ViewEnrollCourseSerializer(data, data=request.data)\n if serializer.is_valid(raise_exception=True):\n serializer.save()\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n\n def delete(self, request, pk, format=None):\n data = self.get_object(pk)\n data.delete()\n return Response(status=status.HTTP_204_NO_CONTENT)\n\n\nclass CourseDetail(APIView):\n permission_classes = IsSuperUser,\n\n def get_object(self, slug):\n try:\n return CourseModel.objects.get(slug=slug)\n except CourseModel.DoesNotExist:\n raise Http404\n\n def get(self, request, slug, format=None):\n data = self.get_object(slug)\n if data.classes.school.admin == self.request.user:\n serializer = ViewCourseSerializer(data)\n return Response(serializer.data)\n else:\n return Response({'message':\n 'This course does not belong to your school'}, status=\n status.HTTP_400_BAD_REQUEST)\n\n def put(self, request, slug, format=None):\n data = self.get_object(slug)\n if data.course.client.admin == self.request.user:\n serializer = CourseSerializer(data, data=request.data)\n if serializer.is_valid(raise_exception=True):\n course = serializer.validated_data.get('course', '')\n if course.client.admin == self.request.user:\n serializer.save()\n return Response(serializer.data, status=status.\n HTTP_201_CREATED)\n return Response({'message':\n 'This Class does not belong to you'}, status=status.\n HTTP_400_BAD_REQUEST)\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n else:\n return Response({'message':\n 'This course does not belong to you'}, status=status.\n HTTP_400_BAD_REQUEST)\n\n def delete(self, request, slug, format=None):\n data = self.get_object(slug)\n if data.course.client.admin == self.request.user:\n data.delete()\n return Response(status=status.HTTP_204_NO_CONTENT)\n else:\n return Response({'message':\n 'This course does not belong to you'}, status=status.\n HTTP_400_BAD_REQUEST)\n\n\nclass SchoolRegistrationView(RegisterView):\n serializer_class = RegisterSchoolSerializer\n permission_classes = IsSuperUser,\n\n\nclass Add_question(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n\n def post(self, request, format=None):\n serializer = QuestionSerializer(data=request.data)\n print(serializer)\n if serializer.is_valid():\n course = serializer.validated_data.get('course', '')\n serializer.save()\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n\n\nclass Viewquestion(generics.ListAPIView):\n permission_classes = IsSuperUser,\n\n def get(self, request, *args, **kwargs):\n course = self.kwargs['course_id']\n data = QuestionModel.objects.filter(course_id=course)\n serializer = QuestionSerializer(data, many=True)\n return Response(serializer.data, status=status.HTTP_200_OK)\n\n\nclass QuestionDetail(APIView):\n permission_classes = IsSuperUser,\n\n def get_object(self, pk):\n try:\n return QuestionModel.objects.get(id=pk)\n except:\n raise Http404\n\n def get(self, request, pk, format=None):\n data = self.get_object(pk)\n serializer = QuestionSerializer(data)\n return Response(serializer.data)\n\n def put(self, request, pk, format=None):\n data = self.get_object(pk)\n serializer = QuestionSerializer(data, data=request.data)\n if serializer.is_valid():\n serializer.save()\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n\n def delete(self, request, pk, format=None):\n data = self.get_object(pk)\n data.delete()\n return Response(status=status.HTTP_204_NO_CONTENT)\n\n\nclass SubmittedQuestionView(APIView):\n permission_classes = IsSuperUser,\n\n def get(self, request, *args, **kwargs):\n admin = self.request.user\n course = self.kwargs['course_id']\n client = self.kwargs['client_id']\n data = Client_SubmitquestionModel.objects.filter(course__course=\n course, client__client=client)\n serializer = Client_submittedquestionSerializer(data, many=True)\n return Response(serializer.data, status=status.HTTP_200_OK)\n\n\nclass AddonlineTest(generics.CreateAPIView):\n permission_classes = IsSuperUser,\n\n def post(self, request, format=None):\n serializer = testSerializer(data=request.data)\n print(serializer)\n if serializer.is_valid():\n course = serializer.validated_data.get('course', '')\n serializer.save()\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n\n\nclass ViewOnlinetest(generics.ListAPIView):\n permission_classes = IsSuperUser,\n\n def get(self, request, *args, **kwargs):\n course = self.kwargs['course_id']\n data = Client_testModel.objects.filter(course_id=course)\n serializer = testSerializer(data, many=True)\n return Response(serializer.data, status=status.HTTP_200_OK)\n\n\nclass onlinetestDetail(APIView):\n permission_classes = IsSuperUser,\n\n def get_object(self, pk):\n try:\n return Client_testModel.objects.get(id=pk)\n except:\n raise Http404\n\n def get(self, request, pk, format=None):\n data = self.get_object(pk)\n serializer = testSerializer(data)\n return Response(serializer.data)\n\n def put(self, request, pk, format=None):\n data = self.get_object(pk)\n serializer = testSerializer(data, data=request.data)\n if serializer.is_valid():\n serializer.save()\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n else:\n return Response(serializer.errors, status=status.\n HTTP_400_BAD_REQUEST)\n\n def delete(self, request, pk, format=None):\n data = self.get_object(pk)\n data.delete()\n return Response(status=status.HTTP_204_NO_CONTENT)\n\n\nclass SubmittedonlineTestView(APIView):\n permission_classes = IsSuperUser,\n\n def get(self, request, *args, **kwargs):\n admin = self.request.user\n course = self.kwargs['course_id']\n client = self.kwargs['client_id']\n data = Client_SubmittestModel.objects.filter(course__course=course,\n client__client=client)\n serializer = Client_submittedtestSerializer(data, many=True)\n return Response(serializer.data, status=status.HTTP_200_OK)\n",
"step-5": "import imp\nfrom django.shortcuts import render\n\n# ***************** API ****************\nfrom django.views.decorators.csrf import csrf_exempt\nfrom rest_framework.parsers import JSONParser,FileUploadParser,MultiPartParser,FormParser\nfrom .models import *\nfrom django.http import Http404\nfrom .serializers import *\nfrom rest_framework.views import APIView\nfrom rest_framework.response import Response\nfrom rest_framework import status,viewsets,permissions\nfrom rest_framework import generics\nfrom rest_framework.permissions import AllowAny, IsAuthenticated\nfrom django.contrib.auth import get_user_model\nfrom client.models import ClientModel\nfrom adminapp.models import SchoolModel\nfrom adminapp.serializers import SchoolSerializer\n\nfrom .custompermissions import *\nfrom client.permissions import *\nfrom rest_framework.authentication import SessionAuthentication\nfrom Student.permissions import IsStudent\n\nUser = get_user_model()\n\ndef get_user_from_token(request):\n\ttoken = request.user.auth_token #auth key(token) of current user 91391f4c12b94b753d08008150d2315d9d8d7e1e\n\tprint(\"token.user_id\",token.user_id) #gives id of user (pk) 2\n\tuser = User.objects.get(id=token.user_id) #gives user name\n\treturn user\n\n# Create your views here.\n\n# class UserListView(generics.ListAPIView):\n# parser_classes = (MultiPartParser,FormParser)\n# queryset = UserModel.objects.all()\n# serializer_class = UserSerializer\n\n# class UserDetailView(generics.RetrieveAPIView):\n# parser_classes = (MultiPartParser,FormParser)\n# queryset = UserModel.objects.all()\n# serializer_class = UserSerializer\n\nclass AddArticleView(generics.CreateAPIView):\n #All authenticated users can add articles\n permission_classes = (IsAuthenticated, )\n serializer_class = ArticleSerializer\n queryset = ArticleModel.objects.all()\n\n def perform_create(self, serializer):\n serializer.save(user=self.request.user)\n\n\nclass ListArticleView(generics.ListAPIView):\n #Anyone can see the published Articles\n permission_classes = (AllowAny, )\n serializer_class = ArticleSerializer\n queryset = ArticleModel.objects.filter(status__exact=\"P\")\n\n\nclass ArticleDetail(generics.RetrieveAPIView):\n #anyone can see detail of published article\n lookup_field = 'slug'\n permission_classes = (AllowAny, )\n serializer_class = ArticleSerializer\n queryset = ArticleModel.objects.filter(status__exact=\"P\")\n\n\nclass ArticleDeleteUpdate(generics.RetrieveUpdateDestroyAPIView):\n '''\n Get: superadmin can see all articles (draft, published)\n PATCH : superadmin can mark article as published by changing status = P\n Delete: superadmin can delete article.\n '''\n lookup_field = 'slug'\n permission_classes = (IsSuperUser, )\n serializer_class = UpdateArticleSerializer\n queryset = ArticleModel.objects.all()\n\n\nclass AddQuestions(generics.CreateAPIView):\n permission_classes = (IsSuperUser, )\n serializer_class = QuestionSerializer\n queryset = QuestionModel.objects.all()\n\nclass ViewQuestion(generics.ListAPIView):\n permission_classes = (IsClient, )\n serializer_class = QuestionSerializer\n queryset = QuestionModel.objects.all()\n\n\nclass QuestionDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = (IsClient, )\n serializer_class = QuestionSerializer\n queryset = QuestionModel.objects.all()\n\nclass QuestionDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = (IsSuperUser, )\n serializer_class = QuestionSerializer\n queryset = QuestionModel.objects.all()\n\n\nclass AddSchools(generics.CreateAPIView):\n permission_classes = (IsSuperUser, )\n serializer_class = SchoolSerializer\n queryset = SchoolModel.objects.all()\n\nclass ViewSchool(generics.ListAPIView):\n permission_classes = (IsClient, )\n serializer_class = SchoolSerializer\n queryset = SchoolModel.objects.all()\n\n\nclass SchoolDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = (IsClient, )\n serializer_class = SchoolSerializer\n queryset = SchoolModel.objects.all()\n\nclass SchoolDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = (IsSuperUser, )\n serializer_class = SchoolSerializer\n queryset = SchoolModel.objects.all()\n\n\nclass AddBlogs(generics.CreateAPIView):\n permission_classes = (IsSuperUser, )\n serializer_class = BlogSerializer\n queryset = BlogModel.objects.all()\n\nclass ViewBlog(generics.ListAPIView):\n permission_classes = (IsClient, )\n serializer_class = BlogSerializer\n queryset = BlogModel.objects.all()\n\n\nclass BlogDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = (IsClient, )\n serializer_class = BlogSerializer\n queryset = BlogModel.objects.all()\n\nclass BlogDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = (IsSuperUser, )\n serializer_class = BlogSerializer\n queryset = BlogModel.objects.all()\n\n\nclass AddEventView(generics.CreateAPIView):\n #only super user can add events\n permission_classes = (IsSuperUser, )\n serializer_class = EventSerializer\n queryset = EventModel.objects.all()\n\n\nclass ListEventView(generics.ListAPIView):\n #Anyone can see the events\n permission_classes = (AllowAny, )\n serializer_class = EventSerializer\n queryset = EventModel.objects.all()\n\n\nclass EventDetailView(generics.RetrieveAPIView):\n #Anyone can see the detail of events\n lookup_field = 'slug'\n permission_classes = (AllowAny, )\n serializer_class = EventSerializer\n queryset = EventModel.objects.all()\n\nclass EventDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n #only superadmin can delete and update events\n lookup_field = 'slug'\n permission_classes = (IsSuperUser, )\n serializer_class = EventSerializer\n queryset = EventModel.objects.all()\n\nclass AddBusinessPartners(generics.CreateAPIView):\n permission_classes = (IsSuperUser, )\n serializer_class = BusinessPartnersSerializer\n queryset = BusinessPartnersModel.objects.all()\n\nclass ViewBusinessPartner(generics.ListAPIView):\n permission_classes = (AllowAny, )\n serializer_class = BusinessPartnersSerializer\n queryset = BusinessPartnersModel.objects.all()\n\n\nclass BusinessPartnerDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = (AllowAny, )\n serializer_class = BusinessPartnersSerializer\n queryset = BusinessPartnersModel.objects.all()\n\nclass BusinessPartnerDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = (IsSuperUser, )\n serializer_class = BusinessPartnersSerializer\n queryset = BusinessPartnersModel.objects.all()\n\nclass AddKidStory(generics.CreateAPIView):\n #Students can add kidstory\n permission_classes = (IsStudent, )\n serializer_class = KidStorySerializer\n queryset = KidStoryModel.objects.all()\n\n def perform_create(self, serializer):\n serializer.save(user=self.request.user)\n\nclass ViewKidStory(generics.ListAPIView):\n # anyone can see published kids story\n permission_classes = (AllowAny, )\n serializer_class = KidStorySerializer\n queryset = KidStoryModel.objects.filter(status__exact=\"P\")\n\n\nclass KidStoryDetailView(generics.RetrieveAPIView):\n #anyone can see detail of published kids story\n lookup_field = 'slug'\n permission_classes = (AllowAny, )\n serializer_class = KidStorySerializer\n queryset = KidStoryModel.objects.filter(status__exact=\"P\")\n\nclass KidStoryDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n '''\n Get: superadmin can see all stories (draft, published)\n PATCH : superadmin can mark stories as published by changing status = P\n Delete: superadmin can delete stories.\n '''\n lookup_field = 'slug'\n permission_classes = (IsSuperUser, )\n serializer_class = UpdateKidsStorySerializer\n queryset = KidStoryModel.objects.all()\n\n\nclass AddKidTalent(generics.CreateAPIView):\n #Students or client can add KidsTalent\n permission_classes = (IsStudentORClient, )\n serializer_class = KidTalentSerializer\n queryset = KidTalentModel.objects.all()\n\n def perform_create(self, serializer):\n serializer.save(user=self.request.user)\n\nclass ViewKidTalent(generics.ListAPIView):\n # anyone can see published kids talent\n permission_classes = (AllowAny, )\n serializer_class = KidTalentSerializer\n queryset = KidTalentModel.objects.filter(status__exact=\"P\")\n\n\nclass KidTalentDetailView(generics.RetrieveAPIView):\n #anyone can see detail of published kids talent\n lookup_field = 'slug'\n permission_classes = (AllowAny, )\n serializer_class = KidTalentSerializer\n queryset = KidTalentModel.objects.filter(status__exact=\"P\")\n\nclass KidTalentDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n '''\n Get: superadmin can see all kids talent (draft, published)\n PATCH : superadmin can mark kids talent as published by changing status = P\n Delete: superadmin can delete kids talent.\n '''\n lookup_field = 'slug'\n permission_classes = (IsSuperUser, )\n serializer_class = UpdateKidsTalentSerializer\n queryset = KidTalentModel.objects.all()\n\n\nclass AddCourses(generics.CreateAPIView):\n permission_classes = (IsSuperUser, )\n serializer_class = CourseSerializer\n queryset = CourseModel.objects.all()\n\nclass ViewCourse(generics.ListAPIView):\n permission_classes = (AllowAny, )\n serializer_class = CourseSerializer\n queryset = CourseModel.objects.all()\n\n\nclass CourseDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = (AllowAny, )\n serializer_class = CourseSerializer\n queryset = CourseModel.objects.all()\n\nclass CourseDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = (IsSuperUser, )\n serializer_class = CourseSerializer\n queryset = CourseModel.objects.all()\n\n\nclass AddQuizContext(generics.CreateAPIView):\n permission_classes = (IsSuperUser, )\n serializer_class = QuizContextSerializer\n queryset = QuizContextModel.objects.all()\n\nclass ViewQuizContext(generics.ListAPIView):\n permission_classes = (IsClient, )\n serializer_class = QuizContextSerializer\n queryset = QuizContextModel.objects.all()\n\n\nclass QuizContextDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = (IsClient, )\n serializer_class = QuizContextSerializer\n queryset = QuizContextModel.objects.all()\n\nclass QuizContextDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = (IsSuperUser, )\n serializer_class = QuizContextSerializer\n queryset = QuizContextModel.objects.all()\n\n\nclass AddFeedback(generics.CreateAPIView):\n permission_classes = (IsSuperUser, )\n serializer_class = ClientFeedbackSerializer\n queryset = ClientFeedBackModel.objects.all()\n\nclass ViewFeedback(generics.ListAPIView):\n permission_classes = (IsClient, )\n serializer_class = ClientFeedbackSerializer\n queryset = ClientFeedBackModel.objects.all()\n\n\nclass FeedbackDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = (IsClient, )\n serializer_class = ClientFeedbackSerializer\n queryset = ClientFeedBackModel.objects.all()\n\nclass FeedbackDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = (IsSuperUser, )\n serializer_class = ClientFeedbackSerializer\n queryset = ClientFeedBackModel.objects.all()\n\n\nclass AddWebsiteAd(generics.CreateAPIView):\n permission_classes = (IsSuperUser, )\n serializer_class = WebsiteAdSerializer\n queryset = WebsiteAdModel.objects.all()\n\nclass ViewWebsiteAd(generics.ListAPIView):\n permission_classes = (AllowAny, )\n serializer_class = WebsiteAdSerializer\n queryset = WebsiteAdModel.objects.all()\n\n\nclass WebsiteAdDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = (AllowAny, )\n serializer_class = WebsiteAdSerializer\n queryset = WebsiteAdModel.objects.all()\n\nclass WebsiteAdDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = (IsSuperUser, )\n serializer_class = WebsiteAdSerializer\n queryset = WebsiteAdModel.objects.all()\n\n\n\n\n\n# class AddApproval(generics.CreateAPIView):\n# permission_classes = (IsSuperUser, )\n# serializer_class = ApprovalSerializer\n# queryset = ApprovalModel.objects.all()\n\n# class ViewApproval(generics.ListAPIView):\n# permission_classes = (IsClient, )\n# serializer_class = ApprovalSerializer\n# queryset = ApprovalModel.objects.all()\n\n\n# class ApprovalDetailView(generics.RetrieveAPIView):\n# lookup_field = 'slug'\n# permission_classes = (IsClient, )\n# serializer_class = ApprovalSerializer\n# queryset = ApprovalModel.objects.all()\n\n# class ApprovalDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n# lookup_field = 'slug'\n# permission_classes = (IsSuperUser, )\n# serializer_class = ApprovalSerializer\n# queryset = ApprovalModel.objects.all()\n\n\nclass AddBusinessPromotion(generics.CreateAPIView):\n permission_classes = (IsSuperUser, )\n serializer_class = BusinessPromotionSerializer\n queryset = BusinessPromotionModel.objects.all()\n\nclass ViewBusinessPromotion(generics.ListAPIView):\n permission_classes = (AllowAny, )\n serializer_class = BusinessPromotionSerializer\n queryset = BusinessPromotionModel.objects.all()\n\n\nclass BusinessPromotionDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = (AllowAny, )\n serializer_class = BusinessPromotionSerializer\n queryset = BusinessPromotionModel.objects.all()\n\nclass BusinessPromotionDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = (IsSuperUser, )\n serializer_class = BusinessPromotionSerializer\n queryset = BusinessPromotionModel.objects.all()\n\n\nclass AddTeam(generics.CreateAPIView):\n permission_classes = (IsSuperUser, )\n serializer_class = TeamSerializer\n queryset = TeamModel.objects.all()\n\nclass ViewTeam(generics.ListAPIView):\n permission_classes = (AllowAny, )\n serializer_class = TeamSerializer\n queryset = TeamModel.objects.all()\n\n\nclass TeamDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = (AllowAny, )\n serializer_class = TeamSerializer\n queryset = TeamModel.objects.all()\n\nclass TeamDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = (IsSuperUser, )\n serializer_class = TeamSerializer\n queryset = TeamModel.objects.all()\n\n\nclass AddAdvisoryBoard(generics.CreateAPIView):\n permission_classes = (IsSuperUser, )\n serializer_class = AdvisoryBoardSerializer\n queryset = AdvisoryBoardModel.objects.all()\n\nclass ViewAdvisoryBoard(generics.ListAPIView):\n permission_classes = (IsSuperUser, )\n serializer_class = AdvisoryBoardSerializer\n queryset = AdvisoryBoardModel.objects.all()\n\n\nclass AdvisoryBoardDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = (IsSuperUser, )\n serializer_class = AdvisoryBoardSerializer\n queryset = AdvisoryBoardModel.objects.all()\n\nclass AdvisoryBoardDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = (IsSuperUser, )\n serializer_class = AdvisoryBoardSerializer\n queryset = AdvisoryBoardModel.objects.all()\n\n\n\nclass AddAnnouncement(generics.CreateAPIView):\n permission_classes = (IsSuperUser, )\n serializer_class = AnnouncementSerializer\n queryset = AnnouncementModel.objects.all()\n\n\nclass ListAnnouncement(generics.ListAPIView):\n permission_classes = (AllowAny, )\n serializer_class = AnnouncementSerializer\n queryset = AnnouncementModel.objects.all()\n\n\nclass AnnouncementDetail(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = (AllowAny, )\n serializer_class = AnnouncementSerializer\n queryset = AnnouncementModel.objects.all()\n\nclass AnnouncementDeleteUpdate(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = (IsSuperUser, )\n serializer_class = AnnouncementSerializer\n queryset = AnnouncementModel.objects.all()\n\n\nclass SuperadminProfileView(APIView):\n permission_classes = (IsSuperUser, )\n\n def get(self, request, *args, **kwargs):\n user = get_user_from_token(request)\n data = {\n 'name': user.username,\n 'email': user.email\n }\n return Response(data)\n\n\n\nclass AddJobClassified(generics.CreateAPIView):\n permission_classes = (IsSuperUser, )\n serializer_class = JobClassifiedSerializer\n queryset = JobClassifiedModel.objects.all()\n\nclass ViewJobClassified(generics.ListAPIView):\n permission_classes = (AllowAny, )\n serializer_class = JobClassifiedSerializer\n queryset = JobClassifiedModel.objects.all()\n\n\nclass JobClassifiedDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = (AllowAny, )\n serializer_class = JobClassifiedSerializer\n queryset = JobClassifiedModel.objects.all()\n\nclass JobClassifiedDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = (IsSuperUser, )\n serializer_class = JobClassifiedSerializer\n queryset = JobClassifiedModel.objects.all()\n\n\n\nclass AddCustomerReviews(generics.CreateAPIView):\n permission_classes = (IsSuperUser, )\n serializer_class = CustomerReviewSerializer\n queryset = CustomerReviewModel.objects.all()\n\nclass ViewCustomerReview(generics.ListAPIView):\n permission_classes = (IsClient, )\n serializer_class = CustomerReviewSerializer\n queryset = CustomerReviewModel.objects.all()\n\n\nclass CustomerReviewDetailView(generics.RetrieveAPIView):\n lookup_field = 'slug'\n permission_classes = (IsClient, )\n serializer_class = CustomerReviewSerializer\n queryset = CustomerReviewModel.objects.all()\n\nclass CustomerReviewDeleteUpdateView(generics.RetrieveUpdateDestroyAPIView):\n lookup_field = 'slug'\n permission_classes = (IsSuperUser, )\n serializer_class = CustomerReviewSerializer\n queryset = CustomerReviewModel.objects.all()\n\n\n\nclass ClientComplain(APIView):\n\n permission_classes = (IsSuperUser, )\n serializer = ViewComplainSerializer(many=True)\n\n\nclass clientfeedback(APIView):\n\n permission_classes = (IsSuperUser, )\n\n def get(self, request, format=None):\n feeds = ClientFeedBackModel.objects.filter(\n Class__admin = self.request.user\n )\n serializer = ClientFeedbackSerializer(feeds, many=True)\n return Response(serializer.data)\n\nclass Enroll_Course(APIView):\n permission_classes = (IsSuperUser, )\n def post(self, request, format=None):\n serializer = EnrollCourseSerializer(data=request.data)\n print(serializer)\n if serializer.is_valid():\n course = serializer.validated_data.get('course', '')\n serializer.save()\n return Response(serializer.data,status =status.HTTP_201_CREATED)\n \n else:\n return Response(serializer.errors,status =status.HTTP_400_BAD_REQUEST)\nclass ViewEnroll_Course(APIView):\n permission_classes = (IsSuperUser, )\n \n def get(self, request, *args, **kwargs):\n course = self.kwargs['course_id']\n client = self.kwargs['client_id']\n data = Enroll_CourseModel.objects.filter(\n course = course, client = client\n )\n serializer = ViewEnrollCourseSerializer(data, many=True)\n return Response(serializer.data, status=status.HTTP_200_OK)\n\nclass DetailEnroll_CourseView(APIView):\n permission_classes = (IsSuperUser, )\n\n def get_object(self,pk):\n try:\n return Enroll_CourseModel.objects.get(id=pk)\n except:\n raise Http404\n\n def get(self, request, pk, format=None):\n data = self.get_object(pk)\n serializer = ViewEnrollCourseSerializer(data)\n return Response(serializer.data)\n\n def put(self,request,pk,format=None):\n data = self.get_object(pk)\n serializer = ViewEnrollCourseSerializer(data,data = request.data)\n if serializer.is_valid(raise_exception=True):\n serializer.save()\n return Response(serializer.data,status=status.HTTP_201_CREATED)\n else:\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n \n\n def delete(self,request,pk,format=None):\n data = self.get_object(pk)\n data.delete()\n return Response(status = status.HTTP_204_NO_CONTENT)\n \n \nclass CourseDetail(APIView):\n permission_classes = (IsSuperUser, )\n\n def get_object(self, slug):\n try:\n return CourseModel.objects.get(slug=slug)\n except CourseModel.DoesNotExist:\n raise Http404\n\n def get(self, request, slug, format=None):\n data = self.get_object(slug)\n if data.classes.school.admin == self.request.user:\n serializer = ViewCourseSerializer(data)\n return Response(serializer.data)\n else:\n return Response(\n {'message':'This course does not belong to your school'}, \n status=status.HTTP_400_BAD_REQUEST\n )\n\n def put(self,request,slug,format=None):\n data = self.get_object(slug)\n if data.course.client.admin == self.request.user:\n serializer = CourseSerializer(data,data = request.data)\n if serializer.is_valid(raise_exception=True):\n course = serializer.validated_data.get('course', '')\n if course.client.admin == self.request.user:\n serializer.save()\n return Response(serializer.data,status=status.HTTP_201_CREATED)\n return Response(\n {'message':'This Class does not belong to you'}, \n status=status.HTTP_400_BAD_REQUEST\n )\n else:\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n else:\n return Response(\n {'message':'This course does not belong to you'}, \n status=status.HTTP_400_BAD_REQUEST\n )\n\n def delete(self,request,slug,format=None):\n data = self.get_object(slug)\n if data.course.client.admin == self.request.user:\n data.delete()\n return Response(status = status.HTTP_204_NO_CONTENT)\n else:\n return Response(\n {'message':'This course does not belong to you'}, \n status=status.HTTP_400_BAD_REQUEST\n )\n\nclass SchoolRegistrationView(RegisterView):\n serializer_class = RegisterSchoolSerializer\n permission_classes = (IsSuperUser,)\n \nclass Add_question(generics.CreateAPIView):\n permission_classes = (IsSuperUser, )\n def post(self,request,format=None):\n serializer = QuestionSerializer(data=request.data)\n print(serializer)\n if serializer.is_valid():\n course = serializer.validated_data.get('course', '')\n serializer.save()\n return Response(serializer.data,status =status.HTTP_201_CREATED)\n \n else:\n return Response(serializer.errors,status =status.HTTP_400_BAD_REQUEST)\n\nclass Viewquestion(generics.ListAPIView):\n permission_classes = (IsSuperUser, )\n \n def get(self, request, *args, **kwargs):\n course = self.kwargs['course_id']\n data = QuestionModel.objects.filter(\n course_id = course)\n serializer = QuestionSerializer(data, many=True)\n return Response(serializer.data, status=status.HTTP_200_OK)\n \n\n\nclass QuestionDetail(APIView):\n permission_classes = (IsSuperUser, )\n\n def get_object(self,pk):\n try:\n return QuestionModel.objects.get(id=pk)\n except:\n raise Http404\n\n def get(self,request,pk,format=None):\n data = self.get_object(pk)\n serializer = QuestionSerializer(data)\n return Response(serializer.data)\n \n\n def put(self,request,pk,format=None):\n data = self.get_object(pk)\n serializer = QuestionSerializer(data,data = request.data)\n if serializer.is_valid():\n serializer.save()\n return Response(serializer.data,status=status.HTTP_201_CREATED)\n else:\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n \n \n \n def delete(self,request,pk,format=None):\n data = self.get_object(pk)\n data.delete()\n return Response(status = status.HTTP_204_NO_CONTENT)\nclass SubmittedQuestionView(APIView):\n permission_classes = (IsSuperUser, )\n \n def get(self, request, *args, **kwargs):\n admin = self.request.user\n course = self.kwargs['course_id']\n client = self.kwargs['client_id']\n data = Client_SubmitquestionModel.objects.filter(\n course__course = course,\n client__client = client\n )\n serializer = Client_submittedquestionSerializer(data, many=True)\n return Response(serializer.data, status=status.HTTP_200_OK) \n\nclass AddonlineTest(generics.CreateAPIView):\n permission_classes = (IsSuperUser, )\n def post(self, request, format=None):\n serializer = testSerializer(data=request.data)\n print(serializer)\n if serializer.is_valid():\n course = serializer.validated_data.get('course', '')\n serializer.save()\n return Response(serializer.data,status =status.HTTP_201_CREATED)\n \n else:\n return Response(serializer.errors,status =status.HTTP_400_BAD_REQUEST)\n\nclass ViewOnlinetest(generics.ListAPIView):\n permission_classes = (IsSuperUser, )\n \n def get(self, request, *args, **kwargs):\n course = self.kwargs['course_id']\n data = Client_testModel.objects.filter(\n course_id = course)\n serializer = testSerializer(data, many=True)\n return Response(serializer.data, status=status.HTTP_200_OK)\n \n\n\nclass onlinetestDetail(APIView):\n permission_classes = (IsSuperUser, )\n\n def get_object(self,pk):\n try:\n return Client_testModel.objects.get(id=pk)\n except:\n raise Http404\n\n def get(self,request,pk,format=None):\n data = self.get_object(pk)\n serializer = testSerializer(data)\n return Response(serializer.data)\n \n\n def put(self,request,pk,format=None):\n data = self.get_object(pk)\n serializer = testSerializer(data,data = request.data)\n if serializer.is_valid():\n serializer.save()\n return Response(serializer.data,status=status.HTTP_201_CREATED)\n else:\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n \n \n \n def delete(self,request,pk,format=None):\n data = self.get_object(pk)\n data.delete()\n return Response(status = status.HTTP_204_NO_CONTENT) \n\nclass SubmittedonlineTestView(APIView):\n permission_classes = (IsSuperUser, )\n \n def get(self, request, *args, **kwargs):\n admin = self.request.user\n course = self.kwargs['course_id']\n client = self.kwargs['client_id']\n data = Client_SubmittestModel.objects.filter(\n course__course = course,\n client__client = client\n )\n serializer = Client_submittedtestSerializer(data, many=True)\n return Response(serializer.data, status=status.HTTP_200_OK)",
"step-ids": [
182,
186,
190,
206,
212
]
}
|
[
182,
186,
190,
206,
212
] |
numbers = [3,4,6,7]
# 0 1 2 3
print(numbers)
print(numbers[1])
print(numbers[-1])
numbers[1] = 3
print(numbers)
del numbers[1]
print(numbers)
numbers.append(17)
print(numbers)
numbers.insert(2,5)
print(numbers)
numbers.sort()
print(numbers)
|
normal
|
{
"blob_id": "34d3eebf6ccb19f891ccbb16db47cd6412f1cb0f",
"index": 1155,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(numbers)\nprint(numbers[1])\nprint(numbers[-1])\n<mask token>\nprint(numbers)\ndel numbers[1]\nprint(numbers)\nnumbers.append(17)\nprint(numbers)\nnumbers.insert(2, 5)\nprint(numbers)\nnumbers.sort()\nprint(numbers)\n",
"step-3": "numbers = [3, 4, 6, 7]\nprint(numbers)\nprint(numbers[1])\nprint(numbers[-1])\nnumbers[1] = 3\nprint(numbers)\ndel numbers[1]\nprint(numbers)\nnumbers.append(17)\nprint(numbers)\nnumbers.insert(2, 5)\nprint(numbers)\nnumbers.sort()\nprint(numbers)\n",
"step-4": "numbers = [3,4,6,7]\n# 0 1 2 3\nprint(numbers)\nprint(numbers[1])\nprint(numbers[-1])\nnumbers[1] = 3\nprint(numbers)\ndel numbers[1]\nprint(numbers)\nnumbers.append(17)\nprint(numbers)\nnumbers.insert(2,5)\nprint(numbers)\nnumbers.sort()\nprint(numbers)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
class Solution(object):
def shortestPalindrome(self, s):
"""
:type s: str
:rtype: str
"""
left= 0
#for right in range(len(s)-1, -1, -1):
for right in reversed(range(len(s))):
if s[right] == s[left]:
left += 1
if left == len(s):
return s
"""s[left:] will get the right part of string, [::-1] will reverse that """
return s[left:][::-1] + self.shortestPalindrome(s[:left]) + s[left:]
sol = Solution()
print(sol.shortestPalindrome("abb"))
|
normal
|
{
"blob_id": "4d18c056845403adc9c4b5848fafa06d0fe4ff4c",
"index": 3165,
"step-1": "class Solution(object):\n <mask token>\n\n\n<mask token>\n",
"step-2": "class Solution(object):\n\n def shortestPalindrome(self, s):\n \"\"\"\n :type s: str\n :rtype: str\n \"\"\"\n left = 0\n for right in reversed(range(len(s))):\n if s[right] == s[left]:\n left += 1\n if left == len(s):\n return s\n \"\"\"s[left:] will get the right part of string, [::-1] will reverse that \"\"\"\n return s[left:][::-1] + self.shortestPalindrome(s[:left]) + s[left:]\n\n\n<mask token>\n",
"step-3": "class Solution(object):\n\n def shortestPalindrome(self, s):\n \"\"\"\n :type s: str\n :rtype: str\n \"\"\"\n left = 0\n for right in reversed(range(len(s))):\n if s[right] == s[left]:\n left += 1\n if left == len(s):\n return s\n \"\"\"s[left:] will get the right part of string, [::-1] will reverse that \"\"\"\n return s[left:][::-1] + self.shortestPalindrome(s[:left]) + s[left:]\n\n\n<mask token>\nprint(sol.shortestPalindrome('abb'))\n",
"step-4": "class Solution(object):\n\n def shortestPalindrome(self, s):\n \"\"\"\n :type s: str\n :rtype: str\n \"\"\"\n left = 0\n for right in reversed(range(len(s))):\n if s[right] == s[left]:\n left += 1\n if left == len(s):\n return s\n \"\"\"s[left:] will get the right part of string, [::-1] will reverse that \"\"\"\n return s[left:][::-1] + self.shortestPalindrome(s[:left]) + s[left:]\n\n\nsol = Solution()\nprint(sol.shortestPalindrome('abb'))\n",
"step-5": "class Solution(object):\n def shortestPalindrome(self, s):\n \"\"\"\n :type s: str\n :rtype: str\n \"\"\"\n left= 0\n \n #for right in range(len(s)-1, -1, -1):\n for right in reversed(range(len(s))):\n if s[right] == s[left]:\n left += 1\n \n if left == len(s):\n return s\n \"\"\"s[left:] will get the right part of string, [::-1] will reverse that \"\"\"\n return s[left:][::-1] + self.shortestPalindrome(s[:left]) + s[left:]\n \nsol = Solution()\nprint(sol.shortestPalindrome(\"abb\"))",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy.orm import Session
from typing import List
from sqlalchemy.sql.functions import current_date, current_user
from db.session import get_db
from db.models.jobs import Job
from schemas.jobs import JobCreate, ShowJob
from db.repository.jobs import create_new_job, delete_job_by_id, retrieve_job, list_jobs, update_job_by_id
from apis.version1.route_login import get_current_user_from_token
from db.models.users import User
router = APIRouter()
@router.post("/create-job", response_model=ShowJob)
def create_job(job: JobCreate, db: Session = Depends(get_db), current_user: User = Depends(get_current_user_from_token)):
owner_id = current_user.id
job = create_new_job(job=job, db=db, owner_id=owner_id)
return job
@router.put("/update/{id}")
def update_job(id: int, job: JobCreate, db: Session = Depends(get_db), current_user: User = Depends(get_current_user_from_token)):
owner_id = current_user.id
message = update_job_by_id(id, job, db, owner_id)
if message == 0:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Job with id {id} does not exist"
)
return {"detail": "Successfully updated"}
@router.get("/get/{id}", response_model=ShowJob)
def retrieve_job_by_id(id: int, db: Session = Depends(get_db)):
job = retrieve_job(id=id, db=db)
if not job:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Job with id {id} does not exist"
)
return job
@router.get("/all", response_model=List[ShowJob])
def retrieve_all_jobs(db: Session = Depends(get_db)):
jobs = list_jobs(db=db)
return jobs
@router.delete("/delete/{id}")
def delete_job(id: int, db: Session = Depends(get_db), current_user: User = Depends(get_current_user_from_token)):
owner_id = current_user.id
message = delete_job_by_id(id, db, owner_id=owner_id)
if message == 0:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Job with id {id} does not exist"
)
return {"details": "Successfully deleted"}
|
normal
|
{
"blob_id": "e8092faed22607f9c8f18a79709022037ff647bf",
"index": 9625,
"step-1": "<mask token>\n\n\n@router.post('/create-job', response_model=ShowJob)\ndef create_job(job: JobCreate, db: Session=Depends(get_db), current_user:\n User=Depends(get_current_user_from_token)):\n owner_id = current_user.id\n job = create_new_job(job=job, db=db, owner_id=owner_id)\n return job\n\n\n@router.put('/update/{id}')\ndef update_job(id: int, job: JobCreate, db: Session=Depends(get_db),\n current_user: User=Depends(get_current_user_from_token)):\n owner_id = current_user.id\n message = update_job_by_id(id, job, db, owner_id)\n if message == 0:\n raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=\n f'Job with id {id} does not exist')\n return {'detail': 'Successfully updated'}\n\n\n@router.get('/get/{id}', response_model=ShowJob)\ndef retrieve_job_by_id(id: int, db: Session=Depends(get_db)):\n job = retrieve_job(id=id, db=db)\n if not job:\n raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=\n f'Job with id {id} does not exist')\n return job\n\n\n@router.get('/all', response_model=List[ShowJob])\ndef retrieve_all_jobs(db: Session=Depends(get_db)):\n jobs = list_jobs(db=db)\n return jobs\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@router.post('/create-job', response_model=ShowJob)\ndef create_job(job: JobCreate, db: Session=Depends(get_db), current_user:\n User=Depends(get_current_user_from_token)):\n owner_id = current_user.id\n job = create_new_job(job=job, db=db, owner_id=owner_id)\n return job\n\n\n@router.put('/update/{id}')\ndef update_job(id: int, job: JobCreate, db: Session=Depends(get_db),\n current_user: User=Depends(get_current_user_from_token)):\n owner_id = current_user.id\n message = update_job_by_id(id, job, db, owner_id)\n if message == 0:\n raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=\n f'Job with id {id} does not exist')\n return {'detail': 'Successfully updated'}\n\n\n@router.get('/get/{id}', response_model=ShowJob)\ndef retrieve_job_by_id(id: int, db: Session=Depends(get_db)):\n job = retrieve_job(id=id, db=db)\n if not job:\n raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=\n f'Job with id {id} does not exist')\n return job\n\n\n@router.get('/all', response_model=List[ShowJob])\ndef retrieve_all_jobs(db: Session=Depends(get_db)):\n jobs = list_jobs(db=db)\n return jobs\n\n\n@router.delete('/delete/{id}')\ndef delete_job(id: int, db: Session=Depends(get_db), current_user: User=\n Depends(get_current_user_from_token)):\n owner_id = current_user.id\n message = delete_job_by_id(id, db, owner_id=owner_id)\n if message == 0:\n raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=\n f'Job with id {id} does not exist')\n return {'details': 'Successfully deleted'}\n",
"step-3": "<mask token>\nrouter = APIRouter()\n\n\n@router.post('/create-job', response_model=ShowJob)\ndef create_job(job: JobCreate, db: Session=Depends(get_db), current_user:\n User=Depends(get_current_user_from_token)):\n owner_id = current_user.id\n job = create_new_job(job=job, db=db, owner_id=owner_id)\n return job\n\n\n@router.put('/update/{id}')\ndef update_job(id: int, job: JobCreate, db: Session=Depends(get_db),\n current_user: User=Depends(get_current_user_from_token)):\n owner_id = current_user.id\n message = update_job_by_id(id, job, db, owner_id)\n if message == 0:\n raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=\n f'Job with id {id} does not exist')\n return {'detail': 'Successfully updated'}\n\n\n@router.get('/get/{id}', response_model=ShowJob)\ndef retrieve_job_by_id(id: int, db: Session=Depends(get_db)):\n job = retrieve_job(id=id, db=db)\n if not job:\n raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=\n f'Job with id {id} does not exist')\n return job\n\n\n@router.get('/all', response_model=List[ShowJob])\ndef retrieve_all_jobs(db: Session=Depends(get_db)):\n jobs = list_jobs(db=db)\n return jobs\n\n\n@router.delete('/delete/{id}')\ndef delete_job(id: int, db: Session=Depends(get_db), current_user: User=\n Depends(get_current_user_from_token)):\n owner_id = current_user.id\n message = delete_job_by_id(id, db, owner_id=owner_id)\n if message == 0:\n raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=\n f'Job with id {id} does not exist')\n return {'details': 'Successfully deleted'}\n",
"step-4": "from fastapi import APIRouter, Depends, HTTPException, status\nfrom sqlalchemy.orm import Session\nfrom typing import List\nfrom sqlalchemy.sql.functions import current_date, current_user\nfrom db.session import get_db\nfrom db.models.jobs import Job\nfrom schemas.jobs import JobCreate, ShowJob\nfrom db.repository.jobs import create_new_job, delete_job_by_id, retrieve_job, list_jobs, update_job_by_id\nfrom apis.version1.route_login import get_current_user_from_token\nfrom db.models.users import User\nrouter = APIRouter()\n\n\n@router.post('/create-job', response_model=ShowJob)\ndef create_job(job: JobCreate, db: Session=Depends(get_db), current_user:\n User=Depends(get_current_user_from_token)):\n owner_id = current_user.id\n job = create_new_job(job=job, db=db, owner_id=owner_id)\n return job\n\n\n@router.put('/update/{id}')\ndef update_job(id: int, job: JobCreate, db: Session=Depends(get_db),\n current_user: User=Depends(get_current_user_from_token)):\n owner_id = current_user.id\n message = update_job_by_id(id, job, db, owner_id)\n if message == 0:\n raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=\n f'Job with id {id} does not exist')\n return {'detail': 'Successfully updated'}\n\n\n@router.get('/get/{id}', response_model=ShowJob)\ndef retrieve_job_by_id(id: int, db: Session=Depends(get_db)):\n job = retrieve_job(id=id, db=db)\n if not job:\n raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=\n f'Job with id {id} does not exist')\n return job\n\n\n@router.get('/all', response_model=List[ShowJob])\ndef retrieve_all_jobs(db: Session=Depends(get_db)):\n jobs = list_jobs(db=db)\n return jobs\n\n\n@router.delete('/delete/{id}')\ndef delete_job(id: int, db: Session=Depends(get_db), current_user: User=\n Depends(get_current_user_from_token)):\n owner_id = current_user.id\n message = delete_job_by_id(id, db, owner_id=owner_id)\n if message == 0:\n raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=\n f'Job with id {id} does not exist')\n return {'details': 'Successfully deleted'}\n",
"step-5": "from fastapi import APIRouter, Depends, HTTPException, status\nfrom sqlalchemy.orm import Session\nfrom typing import List\n\nfrom sqlalchemy.sql.functions import current_date, current_user\n\nfrom db.session import get_db\nfrom db.models.jobs import Job\nfrom schemas.jobs import JobCreate, ShowJob\nfrom db.repository.jobs import create_new_job, delete_job_by_id, retrieve_job, list_jobs, update_job_by_id\nfrom apis.version1.route_login import get_current_user_from_token\nfrom db.models.users import User\n\n\nrouter = APIRouter()\n\n\n@router.post(\"/create-job\", response_model=ShowJob)\ndef create_job(job: JobCreate, db: Session = Depends(get_db), current_user: User = Depends(get_current_user_from_token)):\n owner_id = current_user.id\n job = create_new_job(job=job, db=db, owner_id=owner_id)\n return job\n\n\n@router.put(\"/update/{id}\")\ndef update_job(id: int, job: JobCreate, db: Session = Depends(get_db), current_user: User = Depends(get_current_user_from_token)):\n owner_id = current_user.id\n message = update_job_by_id(id, job, db, owner_id)\n\n if message == 0:\n raise HTTPException(\n status_code=status.HTTP_404_NOT_FOUND,\n detail=f\"Job with id {id} does not exist\"\n )\n \n return {\"detail\": \"Successfully updated\"}\n\n\n@router.get(\"/get/{id}\", response_model=ShowJob)\ndef retrieve_job_by_id(id: int, db: Session = Depends(get_db)):\n job = retrieve_job(id=id, db=db)\n if not job:\n raise HTTPException(\n status_code=status.HTTP_404_NOT_FOUND,\n detail=f\"Job with id {id} does not exist\"\n )\n return job\n\n\n@router.get(\"/all\", response_model=List[ShowJob])\ndef retrieve_all_jobs(db: Session = Depends(get_db)):\n jobs = list_jobs(db=db)\n return jobs\n\n\n@router.delete(\"/delete/{id}\")\ndef delete_job(id: int, db: Session = Depends(get_db), current_user: User = Depends(get_current_user_from_token)):\n owner_id = current_user.id\n message = delete_job_by_id(id, db, owner_id=owner_id)\n if message == 0:\n raise HTTPException(\n status_code=status.HTTP_404_NOT_FOUND,\n detail=f\"Job with id {id} does not exist\"\n )\n return {\"details\": \"Successfully deleted\"}\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy.orm import backref
db = SQLAlchemy()
def connect_db(app):
"""Connect to database."""
db.app = app
db.init_app(app)
"""Models for Blogly."""
class User(db.Model):
__tablename__= "users"
id = db.Column(db.Integer, primary_key=True, autoincrement = True)
first_name = db.Column(db.String(50), nullable = False)
last_name = db.Column(db.String(50), nullable = False)
image_url = db.Column(db.String)
class Post(db.Model):
__tablename__ = "posts"
id = db.Column(db.Integer, primary_key = True, autoincrement = True)
title = db.Column(db.String(50), nullable = False)
content = db.Column(db.String(250), nullable = False)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
db.relationship(User, backref="posts")
|
normal
|
{
"blob_id": "9ae92d6ee4b82f7ed335c47d53567b817140a51c",
"index": 8922,
"step-1": "<mask token>\n\n\nclass User(db.Model):\n __tablename__ = 'users'\n id = db.Column(db.Integer, primary_key=True, autoincrement=True)\n first_name = db.Column(db.String(50), nullable=False)\n last_name = db.Column(db.String(50), nullable=False)\n image_url = db.Column(db.String)\n\n\nclass Post(db.Model):\n __tablename__ = 'posts'\n id = db.Column(db.Integer, primary_key=True, autoincrement=True)\n title = db.Column(db.String(50), nullable=False)\n content = db.Column(db.String(250), nullable=False)\n user_id = db.Column(db.Integer, db.ForeignKey('users.id'))\n db.relationship(User, backref='posts')\n",
"step-2": "<mask token>\n\n\ndef connect_db(app):\n \"\"\"Connect to database.\"\"\"\n db.app = app\n db.init_app(app)\n\n\n<mask token>\n\n\nclass User(db.Model):\n __tablename__ = 'users'\n id = db.Column(db.Integer, primary_key=True, autoincrement=True)\n first_name = db.Column(db.String(50), nullable=False)\n last_name = db.Column(db.String(50), nullable=False)\n image_url = db.Column(db.String)\n\n\nclass Post(db.Model):\n __tablename__ = 'posts'\n id = db.Column(db.Integer, primary_key=True, autoincrement=True)\n title = db.Column(db.String(50), nullable=False)\n content = db.Column(db.String(250), nullable=False)\n user_id = db.Column(db.Integer, db.ForeignKey('users.id'))\n db.relationship(User, backref='posts')\n",
"step-3": "<mask token>\ndb = SQLAlchemy()\n\n\ndef connect_db(app):\n \"\"\"Connect to database.\"\"\"\n db.app = app\n db.init_app(app)\n\n\n<mask token>\n\n\nclass User(db.Model):\n __tablename__ = 'users'\n id = db.Column(db.Integer, primary_key=True, autoincrement=True)\n first_name = db.Column(db.String(50), nullable=False)\n last_name = db.Column(db.String(50), nullable=False)\n image_url = db.Column(db.String)\n\n\nclass Post(db.Model):\n __tablename__ = 'posts'\n id = db.Column(db.Integer, primary_key=True, autoincrement=True)\n title = db.Column(db.String(50), nullable=False)\n content = db.Column(db.String(250), nullable=False)\n user_id = db.Column(db.Integer, db.ForeignKey('users.id'))\n db.relationship(User, backref='posts')\n",
"step-4": "from flask_sqlalchemy import SQLAlchemy\nfrom sqlalchemy.orm import backref\ndb = SQLAlchemy()\n\n\ndef connect_db(app):\n \"\"\"Connect to database.\"\"\"\n db.app = app\n db.init_app(app)\n\n\n<mask token>\n\n\nclass User(db.Model):\n __tablename__ = 'users'\n id = db.Column(db.Integer, primary_key=True, autoincrement=True)\n first_name = db.Column(db.String(50), nullable=False)\n last_name = db.Column(db.String(50), nullable=False)\n image_url = db.Column(db.String)\n\n\nclass Post(db.Model):\n __tablename__ = 'posts'\n id = db.Column(db.Integer, primary_key=True, autoincrement=True)\n title = db.Column(db.String(50), nullable=False)\n content = db.Column(db.String(250), nullable=False)\n user_id = db.Column(db.Integer, db.ForeignKey('users.id'))\n db.relationship(User, backref='posts')\n",
"step-5": "from flask_sqlalchemy import SQLAlchemy\nfrom sqlalchemy.orm import backref\n\ndb = SQLAlchemy()\n\n\ndef connect_db(app):\n \"\"\"Connect to database.\"\"\"\n db.app = app\n db.init_app(app)\n\n\"\"\"Models for Blogly.\"\"\"\nclass User(db.Model):\n __tablename__= \"users\"\n\n id = db.Column(db.Integer, primary_key=True, autoincrement = True)\n first_name = db.Column(db.String(50), nullable = False)\n last_name = db.Column(db.String(50), nullable = False)\n image_url = db.Column(db.String)\n\nclass Post(db.Model):\n __tablename__ = \"posts\"\n\n id = db.Column(db.Integer, primary_key = True, autoincrement = True)\n title = db.Column(db.String(50), nullable = False)\n content = db.Column(db.String(250), nullable = False)\n user_id = db.Column(db.Integer, db.ForeignKey('users.id'))\n\n db.relationship(User, backref=\"posts\")\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
import requests
from bs4 import BeautifulSoup
import urllib.request
url = 'http://www.dytt8.net/'
user = {'User-Agent':
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36'
}
html = urllib.request.urlopen(url)
html.encoding = 'utf-8'
soup = BeautifulSoup(html.read())
for i in soup.find_all('a'):
if 'href' in i.attrs:
print(i.attrs['href'])
|
normal
|
{
"blob_id": "2e571e3412bf9f3a42bf87976ea9a5ec68d5815c",
"index": 9056,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in soup.find_all('a'):\n if 'href' in i.attrs:\n print(i.attrs['href'])\n",
"step-3": "<mask token>\nurl = 'http://www.dytt8.net/'\nuser = {'User-Agent':\n 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36'\n }\nhtml = urllib.request.urlopen(url)\nhtml.encoding = 'utf-8'\nsoup = BeautifulSoup(html.read())\nfor i in soup.find_all('a'):\n if 'href' in i.attrs:\n print(i.attrs['href'])\n",
"step-4": "import requests\nfrom bs4 import BeautifulSoup\nimport urllib.request\nurl = 'http://www.dytt8.net/'\nuser = {'User-Agent':\n 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36'\n }\nhtml = urllib.request.urlopen(url)\nhtml.encoding = 'utf-8'\nsoup = BeautifulSoup(html.read())\nfor i in soup.find_all('a'):\n if 'href' in i.attrs:\n print(i.attrs['href'])\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class AggregationTypes(Enum):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class AggregationTypes(Enum):
NO_AGG = 'NO-AGG'
STATIC = 'STATIC'
SUB_HOUR = 'SUB-HOUR'
DYNAMIC = 'DYNAMIC'
<|reserved_special_token_1|>
from enum import Enum
class AggregationTypes(Enum):
NO_AGG = 'NO-AGG'
STATIC = 'STATIC'
SUB_HOUR = 'SUB-HOUR'
DYNAMIC = 'DYNAMIC'
|
flexible
|
{
"blob_id": "436b89b91aed14525f847e6488b452b7ca0e1b70",
"index": 5322,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass AggregationTypes(Enum):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass AggregationTypes(Enum):\n NO_AGG = 'NO-AGG'\n STATIC = 'STATIC'\n SUB_HOUR = 'SUB-HOUR'\n DYNAMIC = 'DYNAMIC'\n",
"step-4": "from enum import Enum\n\n\nclass AggregationTypes(Enum):\n NO_AGG = 'NO-AGG'\n STATIC = 'STATIC'\n SUB_HOUR = 'SUB-HOUR'\n DYNAMIC = 'DYNAMIC'\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
N, D = map(int, input().split())
ans = 0
D2 = D*D
for i in range(N):
x, y = map(int, input().split())
if (x*x+y*y) <= D2:
ans += 1
print(ans)
|
normal
|
{
"blob_id": "947055d1d6acc50e1722d79ea30e327414cd9c41",
"index": 8523,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in range(N):\n x, y = map(int, input().split())\n if x * x + y * y <= D2:\n ans += 1\nprint(ans)\n",
"step-3": "N, D = map(int, input().split())\nans = 0\nD2 = D * D\nfor i in range(N):\n x, y = map(int, input().split())\n if x * x + y * y <= D2:\n ans += 1\nprint(ans)\n",
"step-4": "N, D = map(int, input().split())\nans = 0\nD2 = D*D\nfor i in range(N):\n x, y = map(int, input().split())\n if (x*x+y*y) <= D2:\n ans += 1\n\nprint(ans)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
img = cv2.imread('test_image.png', 0)
res = np.zeros((img.shape[0], img.shape[1], 3), np.uint8)
thresh = cv2.threshold(img, 50, 255, 0)[1]
_, contours, _ = cv2.findContours(thresh, cv2.RETR_TREE, cv2.
CHAIN_APPROX_SIMPLE)
for cnt in contours:
cv2.drawContours(res, [cnt], 0, (random.randint(0, 255), random.
randint(0, 255), random.randint(0, 255)), -1)
cv2.imshow('res', res)
cv2.waitKey(0)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
img = cv2.imread('test_image.png', 0)
res = np.zeros((img.shape[0], img.shape[1], 3), np.uint8)
thresh = cv2.threshold(img, 50, 255, 0)[1]
_, contours, _ = cv2.findContours(thresh, cv2.RETR_TREE, cv2.
CHAIN_APPROX_SIMPLE)
for cnt in contours:
cv2.drawContours(res, [cnt], 0, (random.randint(0, 255), random.
randint(0, 255), random.randint(0, 255)), -1)
cv2.imshow('res', res)
cv2.waitKey(0)
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
import cv2
import numpy as np
import random
def main():
img = cv2.imread('test_image.png', 0)
res = np.zeros((img.shape[0], img.shape[1], 3), np.uint8)
thresh = cv2.threshold(img, 50, 255, 0)[1]
_, contours, _ = cv2.findContours(thresh, cv2.RETR_TREE, cv2.
CHAIN_APPROX_SIMPLE)
for cnt in contours:
cv2.drawContours(res, [cnt], 0, (random.randint(0, 255), random.
randint(0, 255), random.randint(0, 255)), -1)
cv2.imshow('res', res)
cv2.waitKey(0)
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
import cv2
import numpy as np
import random
def main():
img = cv2.imread('test_image.png',0)
res = np.zeros((img.shape[0],img.shape[1],3),np.uint8)
thresh = cv2.threshold(img, 50, 255, 0)[1]
_, contours,_ = cv2.findContours(thresh,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
for cnt in contours:
cv2.drawContours(res, [cnt],0,(random.randint(0,255),random.randint(0,255) ,random.randint(0,255)),-1)
cv2.imshow('res',res)
cv2.waitKey(0)
if __name__ == "__main__":
main()
|
flexible
|
{
"blob_id": "1babf9f27e6792d2a1c2545a1e3bcd08fefa0975",
"index": 5639,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n img = cv2.imread('test_image.png', 0)\n res = np.zeros((img.shape[0], img.shape[1], 3), np.uint8)\n thresh = cv2.threshold(img, 50, 255, 0)[1]\n _, contours, _ = cv2.findContours(thresh, cv2.RETR_TREE, cv2.\n CHAIN_APPROX_SIMPLE)\n for cnt in contours:\n cv2.drawContours(res, [cnt], 0, (random.randint(0, 255), random.\n randint(0, 255), random.randint(0, 255)), -1)\n cv2.imshow('res', res)\n cv2.waitKey(0)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef main():\n img = cv2.imread('test_image.png', 0)\n res = np.zeros((img.shape[0], img.shape[1], 3), np.uint8)\n thresh = cv2.threshold(img, 50, 255, 0)[1]\n _, contours, _ = cv2.findContours(thresh, cv2.RETR_TREE, cv2.\n CHAIN_APPROX_SIMPLE)\n for cnt in contours:\n cv2.drawContours(res, [cnt], 0, (random.randint(0, 255), random.\n randint(0, 255), random.randint(0, 255)), -1)\n cv2.imshow('res', res)\n cv2.waitKey(0)\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "import cv2\nimport numpy as np\nimport random\n\n\ndef main():\n img = cv2.imread('test_image.png', 0)\n res = np.zeros((img.shape[0], img.shape[1], 3), np.uint8)\n thresh = cv2.threshold(img, 50, 255, 0)[1]\n _, contours, _ = cv2.findContours(thresh, cv2.RETR_TREE, cv2.\n CHAIN_APPROX_SIMPLE)\n for cnt in contours:\n cv2.drawContours(res, [cnt], 0, (random.randint(0, 255), random.\n randint(0, 255), random.randint(0, 255)), -1)\n cv2.imshow('res', res)\n cv2.waitKey(0)\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "import cv2\nimport numpy as np \nimport random\n\n\ndef main():\n img = cv2.imread('test_image.png',0)\n res = np.zeros((img.shape[0],img.shape[1],3),np.uint8)\n thresh = cv2.threshold(img, 50, 255, 0)[1]\n _, contours,_ = cv2.findContours(thresh,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)\n for cnt in contours:\n cv2.drawContours(res, [cnt],0,(random.randint(0,255),random.randint(0,255) ,random.randint(0,255)),-1)\n cv2.imshow('res',res)\n cv2.waitKey(0)\n \nif __name__ == \"__main__\":\n main()",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
print(4 / 2, 4 / 3, 4 / 4)
print(5 / 2, 5 / 3, 5 / 4)
print(4 // 2, 4 // 3, 4 // 4)
print(5 // 2, 5 // 3, 5 // 4)
print(4.0 / 2, 4 / 3.0, 4.0 / float(4))
print(5.0 / 2, 5 / 3.0, 5.0 / float(4))
print(4.0 // 2, 4 // 3.0, 4.0 // float(4))
print(5.0 // 2, 5 // 3.0, 5.0 // float(4))
|
normal
|
{
"blob_id": "988e1f0631c434cbbb6d6e973792a65ebbd9405e",
"index": 9474,
"step-1": "<mask token>\n",
"step-2": "print(4 / 2, 4 / 3, 4 / 4)\nprint(5 / 2, 5 / 3, 5 / 4)\nprint(4 // 2, 4 // 3, 4 // 4)\nprint(5 // 2, 5 // 3, 5 // 4)\nprint(4.0 / 2, 4 / 3.0, 4.0 / float(4))\nprint(5.0 / 2, 5 / 3.0, 5.0 / float(4))\nprint(4.0 // 2, 4 // 3.0, 4.0 // float(4))\nprint(5.0 // 2, 5 // 3.0, 5.0 // float(4))\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
from . import utility
from . import regular_gram_schmidt as RGSC
from . import custom_gram_schmidt as CGSC
def RegularGramSchmidt():
while True:
vectors = utility.get_matrix_from_user(5)
if len(vectors) > 0:
calc = RGSC.RegularGramSchmidt()
result_matrix = calc.calc(vectors)
if result_matrix is not None:
print(result_matrix)
utility.print_if_matrix_is_basis(result_matrix)
answer = input("Start over? (Y/n)")
if answer.lower() == 'n':
break
else:
continue
def CustomGramSchmidt():
while True:
print("Enter the inner product matrix 3x3")
inner_product_matrix = utility.get_matrix_from_user(3, True)
calc = CGSC.CustomGramSchmidt(inner_product_matrix)
print("Enter vectors from R(3)")
vectors = utility.get_matrix_from_user(3)
if len(vectors) > 0:
result_matrix = calc.calc(vectors)
if result_matrix is not None:
print(result_matrix)
utility.print_if_matrix_is_basis(result_matrix)
answer = input("Start over? (Y/n)")
if answer.lower() == 'n':
break
else:
continue
|
normal
|
{
"blob_id": "b6b3d94db62b47aac9bf78e8224a38ccff9335e3",
"index": 7591,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef RegularGramSchmidt():\n while True:\n vectors = utility.get_matrix_from_user(5)\n if len(vectors) > 0:\n calc = RGSC.RegularGramSchmidt()\n result_matrix = calc.calc(vectors)\n if result_matrix is not None:\n print(result_matrix)\n utility.print_if_matrix_is_basis(result_matrix)\n answer = input('Start over? (Y/n)')\n if answer.lower() == 'n':\n break\n else:\n continue\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef RegularGramSchmidt():\n while True:\n vectors = utility.get_matrix_from_user(5)\n if len(vectors) > 0:\n calc = RGSC.RegularGramSchmidt()\n result_matrix = calc.calc(vectors)\n if result_matrix is not None:\n print(result_matrix)\n utility.print_if_matrix_is_basis(result_matrix)\n answer = input('Start over? (Y/n)')\n if answer.lower() == 'n':\n break\n else:\n continue\n\n\ndef CustomGramSchmidt():\n while True:\n print('Enter the inner product matrix 3x3')\n inner_product_matrix = utility.get_matrix_from_user(3, True)\n calc = CGSC.CustomGramSchmidt(inner_product_matrix)\n print('Enter vectors from R(3)')\n vectors = utility.get_matrix_from_user(3)\n if len(vectors) > 0:\n result_matrix = calc.calc(vectors)\n if result_matrix is not None:\n print(result_matrix)\n utility.print_if_matrix_is_basis(result_matrix)\n answer = input('Start over? (Y/n)')\n if answer.lower() == 'n':\n break\n else:\n continue\n",
"step-4": "from . import utility\nfrom . import regular_gram_schmidt as RGSC\nfrom . import custom_gram_schmidt as CGSC\n\n\ndef RegularGramSchmidt():\n while True:\n vectors = utility.get_matrix_from_user(5)\n if len(vectors) > 0:\n calc = RGSC.RegularGramSchmidt()\n result_matrix = calc.calc(vectors)\n if result_matrix is not None:\n print(result_matrix)\n utility.print_if_matrix_is_basis(result_matrix)\n answer = input('Start over? (Y/n)')\n if answer.lower() == 'n':\n break\n else:\n continue\n\n\ndef CustomGramSchmidt():\n while True:\n print('Enter the inner product matrix 3x3')\n inner_product_matrix = utility.get_matrix_from_user(3, True)\n calc = CGSC.CustomGramSchmidt(inner_product_matrix)\n print('Enter vectors from R(3)')\n vectors = utility.get_matrix_from_user(3)\n if len(vectors) > 0:\n result_matrix = calc.calc(vectors)\n if result_matrix is not None:\n print(result_matrix)\n utility.print_if_matrix_is_basis(result_matrix)\n answer = input('Start over? (Y/n)')\n if answer.lower() == 'n':\n break\n else:\n continue\n",
"step-5": "from . import utility\nfrom . import regular_gram_schmidt as RGSC\nfrom . import custom_gram_schmidt as CGSC\n\n\ndef RegularGramSchmidt():\n while True:\n vectors = utility.get_matrix_from_user(5)\n if len(vectors) > 0:\n calc = RGSC.RegularGramSchmidt()\n result_matrix = calc.calc(vectors)\n if result_matrix is not None:\n print(result_matrix)\n utility.print_if_matrix_is_basis(result_matrix)\n answer = input(\"Start over? (Y/n)\")\n if answer.lower() == 'n':\n break\n else:\n continue\n\n\ndef CustomGramSchmidt():\n while True:\n print(\"Enter the inner product matrix 3x3\")\n inner_product_matrix = utility.get_matrix_from_user(3, True)\n calc = CGSC.CustomGramSchmidt(inner_product_matrix)\n print(\"Enter vectors from R(3)\")\n vectors = utility.get_matrix_from_user(3)\n if len(vectors) > 0:\n result_matrix = calc.calc(vectors)\n if result_matrix is not None:\n print(result_matrix)\n utility.print_if_matrix_is_basis(result_matrix)\n answer = input(\"Start over? (Y/n)\")\n if answer.lower() == 'n':\n break\n else:\n continue\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/env python2.7
from __future__ import print_function, division
import numpy as np
import matplotlib
import os
#checks if there is a display to use.
if os.environ.get('DISPLAY') is None:
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import matplotlib.colors as clr
import dtk
import sys
import time
import numpy.random
from matplotlib.colors import LogNorm
from scipy.optimize import minimize
from calc_ngal import *
from generate_parameter_dist import *
from zmr import ZMR
from matplotlib import rc
rc('text', usetex=True)
rc('font', **{'family':'serif', 'serif':['Computer Modern Roman'], })
rc('font', size=18)
def load_clusters(file_name):
if file_name not in load_clusters._cache:
cluster_data = ClusterData()
cluster_data.load_file(file_name)
else:
cluster_data = load_clusters._cache[file_name]
return cluster_data
load_clusters._cache = {}
def get_ngal_fit(param_fname, cluster_num, color, plot_fit=True, spider=False, manual_calc=False):
param = dtk.Param(param_fname)
cluster_loc = param.get_string('cluster_loc')
if cluster_num is None:
cluster_num = param.get_int('cluster_load_num')
zmrh5_loc = param.get_string('zmrh5_loc')
zmr_sdss = ZMR(zmrh5_loc)
zmr_fit = ZMR("output/"+param_fname+"/zmr_lkhd_cores.param")
m_bins = zmr_fit.m_bins
r_bins = zmr_fit.r_bins
zmr_core_ngal, zmr_core_ngal_err = zmr_fit.get_ngal() # only one z-bin, so we don't select it out
zmr_core_ngal = zmr_core_ngal[0]
zmr_core_ngal_err = zmr_core_ngal_err[0]
zmr_sdss_ngal, zmr_sdss_ngal_err = zmr_sdss.get_ngal()
zmr_sdss_ngal = zmr_sdss_ngal[0]
zmr_sdss_ngal_err = zmr_sdss_ngal_err[0]
if manual_calc:
model_fit_fname = "figs/"+param_fname+"/calc_likelihood_bounds.py/grid_fit_param.txt"
model_fit = load_fit_limits(model_fit_fname)
m_infall = 10**model_fit['mi']
if 'rd' in model_fit:
# print(model_fit['rd'])
r_disrupt = model_fit['rd']/1000.0 #convert to mpc/h from kpc/h
else:
r_disrupt = np.inf
# print("\ncalculating ngal for ", param_fname)
# print("\tmodel_fit_fname:", model_fit_fname)
# print("\tmodel params: {:.2e} {:.3f}".format(m_infall, r_disrupt))
print(cluster_loc)
cluster_data = load_clusters(cluster_loc)
if cluster_num == -1:
cluster_num = cluster_data.num
cluster_ngal = np.zeros(cluster_num)
cluster_m_i = np.zeros(cluster_num)
for i in range(0, cluster_num):
mass_index = cluster_data.get_cluster_mass_bin(i, m_bins)
cluster_m_i[i] = mass_index
cluster_ngal[i] = cluster_data.get_ngal(i, m_infall, r_disrupt)[1]
ngal_mean = np.zeros(len(m_bins)-1)
ngal_err = np.zeros(len(m_bins)-1)
ngal_std = np.zeros(len(m_bins)-1)
for i in range(0, len(m_bins)-1):
slct = cluster_m_i == i
ngal_mean[i] = np.mean(cluster_ngal[slct])
ngal_std[i] = np.std(cluster_ngal[slct])
ngal_err[i] = ngal_std[i]/np.sqrt(np.sum(slct))
# print("{:.2e}->{:.2e}: {}".format(m_bins[i], m_bins[i+1], np.sum(slct)))
plt.plot(dtk.bins_avg(m_bins), ngal_mean, '-x', color=color, label='Ngal recalc')
if plot_fit:
plt.plot(dtk.bins_avg(m_bins), zmr_core_ngal, '-', color=color)
plt.fill_between(dtk.bins_avg(m_bins), zmr_core_ngal-zmr_core_ngal_err, zmr_core_ngal+zmr_core_ngal_err, color=color, alpha=0.3)
offset_amount = 1.025
if spider:
markerfacecolor='None'
markeredgecolor=color
xaxis_offset=offset_amount
lw = 1
else:
markerfacecolor=color
markeredgecolor='None'
xaxis_offset=1./offset_amount
lw = 2
# remove problematic 2.5 L* low mass cluster in the spider sample
if "mstar-1" in param_fname and "spider" in param_fname:
print("SPIDERSS!: ", zmr_sdss_ngal)
zmr_sdss_ngal[zmr_sdss_ngal < 0.1 ] = np.nan
plt.errorbar(dtk.bins_avg(m_bins)*xaxis_offset, zmr_sdss_ngal,
yerr=zmr_sdss_ngal_err, fmt='o', capsize=0, lw=lw, color=color,
markeredgecolor=markeredgecolor, markerfacecolor=markerfacecolor)
# plt.fill_between(dtk.bins_avg(m_bins), ngal_mean-ngal_err, ngal_mean+ngal_err, color=color, alpha=0.3)
plt.yscale('log')
plt.xscale('log')
# plt.legend(loc='best')
def format_plot():
p4 = plt.plot([],[], 'tab:purple', lw=5, label=r'{:1.2f}~L$_*$'.format(0.4))
p3 = plt.plot([],[], 'tab:red', lw=5, label=r'{:1.2f}~L$_*$'.format(0.63))
p2 = plt.plot([],[], 'tab:green', lw=5, label=r'{:1.2f}~L$_*$'.format(1.0))
p12 = plt.plot([],[], 'tab:orange',lw=5, label=r'{:1.2f}~L$_*$'.format(1.58))
p1 = plt.plot([],[], 'tab:blue',lw=5, label=r'{:1.2f}~L$_*$'.format(2.5))
plt.errorbar([], [], yerr=[], fmt='o', lw=2, color='k', label="redMaPPer", capsize=0)
plt.plot([], [], color='k', label="Core Model")
# plt.errorbar([], [], yerr=[], fmt='o', lw=1, color='k', markerfacecolor='none', label='SPIDERS clusters', capsize=0)
plt.legend(ncol=2, loc='best', framealpha=0.0)
plt.xlabel(r'M$_{200c}$ [h$^{-1}$ M$_\odot$]')
plt.ylabel(r'Projected N$_{\rm{gal}}$')
plt.ylim([1e-1, 3e3])
plt.xlim([1e14, 5e15])
plt.tight_layout()
def plot_ngal_fits():
get_ngal_fit("params/cfn/simet/mstar1/mean/a3_rd.param", None, 'c')
get_ngal_fit("params/cfn/simet/mstar0.5/mean/a3_rd.param", None, 'g')
get_ngal_fit("params/cfn/simet/mstar0/mean/a3_rd.param", None, 'b')
get_ngal_fit("params/cfn/simet/mstar-1/mean/a3_rd.param", None, 'r')
#just spider points
get_ngal_fit("params/cfn/spider/mstar1/mean/spider_rd.param", None, 'c', plot_fit=False, spider=True)
get_ngal_fit("params/cfn/spider/mstar0.5/mean/spider_rd.param", None, 'g', plot_fit=False, spider=True)
get_ngal_fit("params/cfn/spider/mstar0/mean/spider_rd.param", None, 'b', plot_fit=False, spider=True)
get_ngal_fit("params/cfn/spider/mstar-1/mean/spider_rd.param", None, 'r', plot_fit=False, spider=True)
# get_ngal_fit("params/cfn/spider/mstar0/mean/spider_rd.param", None, 'm', plot_fit=False, spider=True)
# get_ngal_fit("params/cfn/spider/mstar0/mean/bcg_rd.param", None, 'c', plot_fit=False, spider=True)
format_plot()
def plot_ngal_fits2(pattern, mstars):
color_cycle = ['tab:blue', 'tab:orange', 'tab:green', 'tab:red', 'tab:purple', 'tab:brown', 'tab:pink', 'tab:gray', 'tab:olive', 'tab:cyan']
for mstar, color in zip(mstars, color_cycle):
get_ngal_fit(pattern.replace("${mstarval}", mstar), None, color)
format_plot()
if __name__ == "__main__":
if len(sys.argv) > 2:
plot_name = sys.argv[1]
else:
plot_name = "OR_McClintock2019"
mstars = ['-1', '-0.5', '0', '0.5', '1']
if plot_name == "OR_Simet2017":
pattern = 'params/rmba/auto/make_all_OR.high_richness.low_rez.min20.sh/crit/mstar${mstarval}/OR_rd_zoom.param'
plot_ngal_fits2(pattern, mstars)
elif plot_name == "OR_McClintock2019":
pattern = 'params/rmba/auto/make_all_OR.McClintock.high_richness.low_rez.min20.sh/crit/mstar${mstarval}/OR_rd_zoom.param'
plot_ngal_fits2(pattern, mstars)
# plot_ngal_fits()
dtk.save_figs("figs/"+__file__+"/"+plot_name+"/", extension='.pdf')
plt.show()
|
normal
|
{
"blob_id": "3acbb37809462ee69ff8792b4ad86b31dba5d630",
"index": 3821,
"step-1": "<mask token>\n\n\ndef load_clusters(file_name):\n if file_name not in load_clusters._cache:\n cluster_data = ClusterData()\n cluster_data.load_file(file_name)\n else:\n cluster_data = load_clusters._cache[file_name]\n return cluster_data\n\n\n<mask token>\n\n\ndef get_ngal_fit(param_fname, cluster_num, color, plot_fit=True, spider=\n False, manual_calc=False):\n param = dtk.Param(param_fname)\n cluster_loc = param.get_string('cluster_loc')\n if cluster_num is None:\n cluster_num = param.get_int('cluster_load_num')\n zmrh5_loc = param.get_string('zmrh5_loc')\n zmr_sdss = ZMR(zmrh5_loc)\n zmr_fit = ZMR('output/' + param_fname + '/zmr_lkhd_cores.param')\n m_bins = zmr_fit.m_bins\n r_bins = zmr_fit.r_bins\n zmr_core_ngal, zmr_core_ngal_err = zmr_fit.get_ngal()\n zmr_core_ngal = zmr_core_ngal[0]\n zmr_core_ngal_err = zmr_core_ngal_err[0]\n zmr_sdss_ngal, zmr_sdss_ngal_err = zmr_sdss.get_ngal()\n zmr_sdss_ngal = zmr_sdss_ngal[0]\n zmr_sdss_ngal_err = zmr_sdss_ngal_err[0]\n if manual_calc:\n model_fit_fname = ('figs/' + param_fname +\n '/calc_likelihood_bounds.py/grid_fit_param.txt')\n model_fit = load_fit_limits(model_fit_fname)\n m_infall = 10 ** model_fit['mi']\n if 'rd' in model_fit:\n r_disrupt = model_fit['rd'] / 1000.0\n else:\n r_disrupt = np.inf\n print(cluster_loc)\n cluster_data = load_clusters(cluster_loc)\n if cluster_num == -1:\n cluster_num = cluster_data.num\n cluster_ngal = np.zeros(cluster_num)\n cluster_m_i = np.zeros(cluster_num)\n for i in range(0, cluster_num):\n mass_index = cluster_data.get_cluster_mass_bin(i, m_bins)\n cluster_m_i[i] = mass_index\n cluster_ngal[i] = cluster_data.get_ngal(i, m_infall, r_disrupt)[1]\n ngal_mean = np.zeros(len(m_bins) - 1)\n ngal_err = np.zeros(len(m_bins) - 1)\n ngal_std = np.zeros(len(m_bins) - 1)\n for i in range(0, len(m_bins) - 1):\n slct = cluster_m_i == i\n ngal_mean[i] = np.mean(cluster_ngal[slct])\n ngal_std[i] = np.std(cluster_ngal[slct])\n ngal_err[i] = ngal_std[i] / np.sqrt(np.sum(slct))\n plt.plot(dtk.bins_avg(m_bins), ngal_mean, '-x', color=color, label=\n 'Ngal recalc')\n if plot_fit:\n plt.plot(dtk.bins_avg(m_bins), zmr_core_ngal, '-', color=color)\n plt.fill_between(dtk.bins_avg(m_bins), zmr_core_ngal -\n zmr_core_ngal_err, zmr_core_ngal + zmr_core_ngal_err, color=\n color, alpha=0.3)\n offset_amount = 1.025\n if spider:\n markerfacecolor = 'None'\n markeredgecolor = color\n xaxis_offset = offset_amount\n lw = 1\n else:\n markerfacecolor = color\n markeredgecolor = 'None'\n xaxis_offset = 1.0 / offset_amount\n lw = 2\n if 'mstar-1' in param_fname and 'spider' in param_fname:\n print('SPIDERSS!: ', zmr_sdss_ngal)\n zmr_sdss_ngal[zmr_sdss_ngal < 0.1] = np.nan\n plt.errorbar(dtk.bins_avg(m_bins) * xaxis_offset, zmr_sdss_ngal, yerr=\n zmr_sdss_ngal_err, fmt='o', capsize=0, lw=lw, color=color,\n markeredgecolor=markeredgecolor, markerfacecolor=markerfacecolor)\n plt.yscale('log')\n plt.xscale('log')\n\n\ndef format_plot():\n p4 = plt.plot([], [], 'tab:purple', lw=5, label='{:1.2f}~L$_*$'.format(0.4)\n )\n p3 = plt.plot([], [], 'tab:red', lw=5, label='{:1.2f}~L$_*$'.format(0.63))\n p2 = plt.plot([], [], 'tab:green', lw=5, label='{:1.2f}~L$_*$'.format(1.0))\n p12 = plt.plot([], [], 'tab:orange', lw=5, label='{:1.2f}~L$_*$'.format\n (1.58))\n p1 = plt.plot([], [], 'tab:blue', lw=5, label='{:1.2f}~L$_*$'.format(2.5))\n plt.errorbar([], [], yerr=[], fmt='o', lw=2, color='k', label=\n 'redMaPPer', capsize=0)\n plt.plot([], [], color='k', label='Core Model')\n plt.legend(ncol=2, loc='best', framealpha=0.0)\n plt.xlabel('M$_{200c}$ [h$^{-1}$ M$_\\\\odot$]')\n plt.ylabel('Projected N$_{\\\\rm{gal}}$')\n plt.ylim([0.1, 3000.0])\n plt.xlim([100000000000000.0, 5000000000000000.0])\n plt.tight_layout()\n\n\ndef plot_ngal_fits():\n get_ngal_fit('params/cfn/simet/mstar1/mean/a3_rd.param', None, 'c')\n get_ngal_fit('params/cfn/simet/mstar0.5/mean/a3_rd.param', None, 'g')\n get_ngal_fit('params/cfn/simet/mstar0/mean/a3_rd.param', None, 'b')\n get_ngal_fit('params/cfn/simet/mstar-1/mean/a3_rd.param', None, 'r')\n get_ngal_fit('params/cfn/spider/mstar1/mean/spider_rd.param', None, 'c',\n plot_fit=False, spider=True)\n get_ngal_fit('params/cfn/spider/mstar0.5/mean/spider_rd.param', None,\n 'g', plot_fit=False, spider=True)\n get_ngal_fit('params/cfn/spider/mstar0/mean/spider_rd.param', None, 'b',\n plot_fit=False, spider=True)\n get_ngal_fit('params/cfn/spider/mstar-1/mean/spider_rd.param', None,\n 'r', plot_fit=False, spider=True)\n format_plot()\n\n\ndef plot_ngal_fits2(pattern, mstars):\n color_cycle = ['tab:blue', 'tab:orange', 'tab:green', 'tab:red',\n 'tab:purple', 'tab:brown', 'tab:pink', 'tab:gray', 'tab:olive',\n 'tab:cyan']\n for mstar, color in zip(mstars, color_cycle):\n get_ngal_fit(pattern.replace('${mstarval}', mstar), None, color)\n format_plot()\n\n\n<mask token>\n",
"step-2": "<mask token>\nif os.environ.get('DISPLAY') is None:\n matplotlib.use('Agg')\n<mask token>\nrc('text', usetex=True)\nrc('font', **{'family': 'serif', 'serif': ['Computer Modern Roman']})\nrc('font', size=18)\n\n\ndef load_clusters(file_name):\n if file_name not in load_clusters._cache:\n cluster_data = ClusterData()\n cluster_data.load_file(file_name)\n else:\n cluster_data = load_clusters._cache[file_name]\n return cluster_data\n\n\n<mask token>\n\n\ndef get_ngal_fit(param_fname, cluster_num, color, plot_fit=True, spider=\n False, manual_calc=False):\n param = dtk.Param(param_fname)\n cluster_loc = param.get_string('cluster_loc')\n if cluster_num is None:\n cluster_num = param.get_int('cluster_load_num')\n zmrh5_loc = param.get_string('zmrh5_loc')\n zmr_sdss = ZMR(zmrh5_loc)\n zmr_fit = ZMR('output/' + param_fname + '/zmr_lkhd_cores.param')\n m_bins = zmr_fit.m_bins\n r_bins = zmr_fit.r_bins\n zmr_core_ngal, zmr_core_ngal_err = zmr_fit.get_ngal()\n zmr_core_ngal = zmr_core_ngal[0]\n zmr_core_ngal_err = zmr_core_ngal_err[0]\n zmr_sdss_ngal, zmr_sdss_ngal_err = zmr_sdss.get_ngal()\n zmr_sdss_ngal = zmr_sdss_ngal[0]\n zmr_sdss_ngal_err = zmr_sdss_ngal_err[0]\n if manual_calc:\n model_fit_fname = ('figs/' + param_fname +\n '/calc_likelihood_bounds.py/grid_fit_param.txt')\n model_fit = load_fit_limits(model_fit_fname)\n m_infall = 10 ** model_fit['mi']\n if 'rd' in model_fit:\n r_disrupt = model_fit['rd'] / 1000.0\n else:\n r_disrupt = np.inf\n print(cluster_loc)\n cluster_data = load_clusters(cluster_loc)\n if cluster_num == -1:\n cluster_num = cluster_data.num\n cluster_ngal = np.zeros(cluster_num)\n cluster_m_i = np.zeros(cluster_num)\n for i in range(0, cluster_num):\n mass_index = cluster_data.get_cluster_mass_bin(i, m_bins)\n cluster_m_i[i] = mass_index\n cluster_ngal[i] = cluster_data.get_ngal(i, m_infall, r_disrupt)[1]\n ngal_mean = np.zeros(len(m_bins) - 1)\n ngal_err = np.zeros(len(m_bins) - 1)\n ngal_std = np.zeros(len(m_bins) - 1)\n for i in range(0, len(m_bins) - 1):\n slct = cluster_m_i == i\n ngal_mean[i] = np.mean(cluster_ngal[slct])\n ngal_std[i] = np.std(cluster_ngal[slct])\n ngal_err[i] = ngal_std[i] / np.sqrt(np.sum(slct))\n plt.plot(dtk.bins_avg(m_bins), ngal_mean, '-x', color=color, label=\n 'Ngal recalc')\n if plot_fit:\n plt.plot(dtk.bins_avg(m_bins), zmr_core_ngal, '-', color=color)\n plt.fill_between(dtk.bins_avg(m_bins), zmr_core_ngal -\n zmr_core_ngal_err, zmr_core_ngal + zmr_core_ngal_err, color=\n color, alpha=0.3)\n offset_amount = 1.025\n if spider:\n markerfacecolor = 'None'\n markeredgecolor = color\n xaxis_offset = offset_amount\n lw = 1\n else:\n markerfacecolor = color\n markeredgecolor = 'None'\n xaxis_offset = 1.0 / offset_amount\n lw = 2\n if 'mstar-1' in param_fname and 'spider' in param_fname:\n print('SPIDERSS!: ', zmr_sdss_ngal)\n zmr_sdss_ngal[zmr_sdss_ngal < 0.1] = np.nan\n plt.errorbar(dtk.bins_avg(m_bins) * xaxis_offset, zmr_sdss_ngal, yerr=\n zmr_sdss_ngal_err, fmt='o', capsize=0, lw=lw, color=color,\n markeredgecolor=markeredgecolor, markerfacecolor=markerfacecolor)\n plt.yscale('log')\n plt.xscale('log')\n\n\ndef format_plot():\n p4 = plt.plot([], [], 'tab:purple', lw=5, label='{:1.2f}~L$_*$'.format(0.4)\n )\n p3 = plt.plot([], [], 'tab:red', lw=5, label='{:1.2f}~L$_*$'.format(0.63))\n p2 = plt.plot([], [], 'tab:green', lw=5, label='{:1.2f}~L$_*$'.format(1.0))\n p12 = plt.plot([], [], 'tab:orange', lw=5, label='{:1.2f}~L$_*$'.format\n (1.58))\n p1 = plt.plot([], [], 'tab:blue', lw=5, label='{:1.2f}~L$_*$'.format(2.5))\n plt.errorbar([], [], yerr=[], fmt='o', lw=2, color='k', label=\n 'redMaPPer', capsize=0)\n plt.plot([], [], color='k', label='Core Model')\n plt.legend(ncol=2, loc='best', framealpha=0.0)\n plt.xlabel('M$_{200c}$ [h$^{-1}$ M$_\\\\odot$]')\n plt.ylabel('Projected N$_{\\\\rm{gal}}$')\n plt.ylim([0.1, 3000.0])\n plt.xlim([100000000000000.0, 5000000000000000.0])\n plt.tight_layout()\n\n\ndef plot_ngal_fits():\n get_ngal_fit('params/cfn/simet/mstar1/mean/a3_rd.param', None, 'c')\n get_ngal_fit('params/cfn/simet/mstar0.5/mean/a3_rd.param', None, 'g')\n get_ngal_fit('params/cfn/simet/mstar0/mean/a3_rd.param', None, 'b')\n get_ngal_fit('params/cfn/simet/mstar-1/mean/a3_rd.param', None, 'r')\n get_ngal_fit('params/cfn/spider/mstar1/mean/spider_rd.param', None, 'c',\n plot_fit=False, spider=True)\n get_ngal_fit('params/cfn/spider/mstar0.5/mean/spider_rd.param', None,\n 'g', plot_fit=False, spider=True)\n get_ngal_fit('params/cfn/spider/mstar0/mean/spider_rd.param', None, 'b',\n plot_fit=False, spider=True)\n get_ngal_fit('params/cfn/spider/mstar-1/mean/spider_rd.param', None,\n 'r', plot_fit=False, spider=True)\n format_plot()\n\n\ndef plot_ngal_fits2(pattern, mstars):\n color_cycle = ['tab:blue', 'tab:orange', 'tab:green', 'tab:red',\n 'tab:purple', 'tab:brown', 'tab:pink', 'tab:gray', 'tab:olive',\n 'tab:cyan']\n for mstar, color in zip(mstars, color_cycle):\n get_ngal_fit(pattern.replace('${mstarval}', mstar), None, color)\n format_plot()\n\n\nif __name__ == '__main__':\n if len(sys.argv) > 2:\n plot_name = sys.argv[1]\n else:\n plot_name = 'OR_McClintock2019'\n mstars = ['-1', '-0.5', '0', '0.5', '1']\n if plot_name == 'OR_Simet2017':\n pattern = (\n 'params/rmba/auto/make_all_OR.high_richness.low_rez.min20.sh/crit/mstar${mstarval}/OR_rd_zoom.param'\n )\n plot_ngal_fits2(pattern, mstars)\n elif plot_name == 'OR_McClintock2019':\n pattern = (\n 'params/rmba/auto/make_all_OR.McClintock.high_richness.low_rez.min20.sh/crit/mstar${mstarval}/OR_rd_zoom.param'\n )\n plot_ngal_fits2(pattern, mstars)\n dtk.save_figs('figs/' + __file__ + '/' + plot_name + '/', extension='.pdf')\n plt.show()\n",
"step-3": "<mask token>\nif os.environ.get('DISPLAY') is None:\n matplotlib.use('Agg')\n<mask token>\nrc('text', usetex=True)\nrc('font', **{'family': 'serif', 'serif': ['Computer Modern Roman']})\nrc('font', size=18)\n\n\ndef load_clusters(file_name):\n if file_name not in load_clusters._cache:\n cluster_data = ClusterData()\n cluster_data.load_file(file_name)\n else:\n cluster_data = load_clusters._cache[file_name]\n return cluster_data\n\n\nload_clusters._cache = {}\n\n\ndef get_ngal_fit(param_fname, cluster_num, color, plot_fit=True, spider=\n False, manual_calc=False):\n param = dtk.Param(param_fname)\n cluster_loc = param.get_string('cluster_loc')\n if cluster_num is None:\n cluster_num = param.get_int('cluster_load_num')\n zmrh5_loc = param.get_string('zmrh5_loc')\n zmr_sdss = ZMR(zmrh5_loc)\n zmr_fit = ZMR('output/' + param_fname + '/zmr_lkhd_cores.param')\n m_bins = zmr_fit.m_bins\n r_bins = zmr_fit.r_bins\n zmr_core_ngal, zmr_core_ngal_err = zmr_fit.get_ngal()\n zmr_core_ngal = zmr_core_ngal[0]\n zmr_core_ngal_err = zmr_core_ngal_err[0]\n zmr_sdss_ngal, zmr_sdss_ngal_err = zmr_sdss.get_ngal()\n zmr_sdss_ngal = zmr_sdss_ngal[0]\n zmr_sdss_ngal_err = zmr_sdss_ngal_err[0]\n if manual_calc:\n model_fit_fname = ('figs/' + param_fname +\n '/calc_likelihood_bounds.py/grid_fit_param.txt')\n model_fit = load_fit_limits(model_fit_fname)\n m_infall = 10 ** model_fit['mi']\n if 'rd' in model_fit:\n r_disrupt = model_fit['rd'] / 1000.0\n else:\n r_disrupt = np.inf\n print(cluster_loc)\n cluster_data = load_clusters(cluster_loc)\n if cluster_num == -1:\n cluster_num = cluster_data.num\n cluster_ngal = np.zeros(cluster_num)\n cluster_m_i = np.zeros(cluster_num)\n for i in range(0, cluster_num):\n mass_index = cluster_data.get_cluster_mass_bin(i, m_bins)\n cluster_m_i[i] = mass_index\n cluster_ngal[i] = cluster_data.get_ngal(i, m_infall, r_disrupt)[1]\n ngal_mean = np.zeros(len(m_bins) - 1)\n ngal_err = np.zeros(len(m_bins) - 1)\n ngal_std = np.zeros(len(m_bins) - 1)\n for i in range(0, len(m_bins) - 1):\n slct = cluster_m_i == i\n ngal_mean[i] = np.mean(cluster_ngal[slct])\n ngal_std[i] = np.std(cluster_ngal[slct])\n ngal_err[i] = ngal_std[i] / np.sqrt(np.sum(slct))\n plt.plot(dtk.bins_avg(m_bins), ngal_mean, '-x', color=color, label=\n 'Ngal recalc')\n if plot_fit:\n plt.plot(dtk.bins_avg(m_bins), zmr_core_ngal, '-', color=color)\n plt.fill_between(dtk.bins_avg(m_bins), zmr_core_ngal -\n zmr_core_ngal_err, zmr_core_ngal + zmr_core_ngal_err, color=\n color, alpha=0.3)\n offset_amount = 1.025\n if spider:\n markerfacecolor = 'None'\n markeredgecolor = color\n xaxis_offset = offset_amount\n lw = 1\n else:\n markerfacecolor = color\n markeredgecolor = 'None'\n xaxis_offset = 1.0 / offset_amount\n lw = 2\n if 'mstar-1' in param_fname and 'spider' in param_fname:\n print('SPIDERSS!: ', zmr_sdss_ngal)\n zmr_sdss_ngal[zmr_sdss_ngal < 0.1] = np.nan\n plt.errorbar(dtk.bins_avg(m_bins) * xaxis_offset, zmr_sdss_ngal, yerr=\n zmr_sdss_ngal_err, fmt='o', capsize=0, lw=lw, color=color,\n markeredgecolor=markeredgecolor, markerfacecolor=markerfacecolor)\n plt.yscale('log')\n plt.xscale('log')\n\n\ndef format_plot():\n p4 = plt.plot([], [], 'tab:purple', lw=5, label='{:1.2f}~L$_*$'.format(0.4)\n )\n p3 = plt.plot([], [], 'tab:red', lw=5, label='{:1.2f}~L$_*$'.format(0.63))\n p2 = plt.plot([], [], 'tab:green', lw=5, label='{:1.2f}~L$_*$'.format(1.0))\n p12 = plt.plot([], [], 'tab:orange', lw=5, label='{:1.2f}~L$_*$'.format\n (1.58))\n p1 = plt.plot([], [], 'tab:blue', lw=5, label='{:1.2f}~L$_*$'.format(2.5))\n plt.errorbar([], [], yerr=[], fmt='o', lw=2, color='k', label=\n 'redMaPPer', capsize=0)\n plt.plot([], [], color='k', label='Core Model')\n plt.legend(ncol=2, loc='best', framealpha=0.0)\n plt.xlabel('M$_{200c}$ [h$^{-1}$ M$_\\\\odot$]')\n plt.ylabel('Projected N$_{\\\\rm{gal}}$')\n plt.ylim([0.1, 3000.0])\n plt.xlim([100000000000000.0, 5000000000000000.0])\n plt.tight_layout()\n\n\ndef plot_ngal_fits():\n get_ngal_fit('params/cfn/simet/mstar1/mean/a3_rd.param', None, 'c')\n get_ngal_fit('params/cfn/simet/mstar0.5/mean/a3_rd.param', None, 'g')\n get_ngal_fit('params/cfn/simet/mstar0/mean/a3_rd.param', None, 'b')\n get_ngal_fit('params/cfn/simet/mstar-1/mean/a3_rd.param', None, 'r')\n get_ngal_fit('params/cfn/spider/mstar1/mean/spider_rd.param', None, 'c',\n plot_fit=False, spider=True)\n get_ngal_fit('params/cfn/spider/mstar0.5/mean/spider_rd.param', None,\n 'g', plot_fit=False, spider=True)\n get_ngal_fit('params/cfn/spider/mstar0/mean/spider_rd.param', None, 'b',\n plot_fit=False, spider=True)\n get_ngal_fit('params/cfn/spider/mstar-1/mean/spider_rd.param', None,\n 'r', plot_fit=False, spider=True)\n format_plot()\n\n\ndef plot_ngal_fits2(pattern, mstars):\n color_cycle = ['tab:blue', 'tab:orange', 'tab:green', 'tab:red',\n 'tab:purple', 'tab:brown', 'tab:pink', 'tab:gray', 'tab:olive',\n 'tab:cyan']\n for mstar, color in zip(mstars, color_cycle):\n get_ngal_fit(pattern.replace('${mstarval}', mstar), None, color)\n format_plot()\n\n\nif __name__ == '__main__':\n if len(sys.argv) > 2:\n plot_name = sys.argv[1]\n else:\n plot_name = 'OR_McClintock2019'\n mstars = ['-1', '-0.5', '0', '0.5', '1']\n if plot_name == 'OR_Simet2017':\n pattern = (\n 'params/rmba/auto/make_all_OR.high_richness.low_rez.min20.sh/crit/mstar${mstarval}/OR_rd_zoom.param'\n )\n plot_ngal_fits2(pattern, mstars)\n elif plot_name == 'OR_McClintock2019':\n pattern = (\n 'params/rmba/auto/make_all_OR.McClintock.high_richness.low_rez.min20.sh/crit/mstar${mstarval}/OR_rd_zoom.param'\n )\n plot_ngal_fits2(pattern, mstars)\n dtk.save_figs('figs/' + __file__ + '/' + plot_name + '/', extension='.pdf')\n plt.show()\n",
"step-4": "from __future__ import print_function, division\nimport numpy as np\nimport matplotlib\nimport os\nif os.environ.get('DISPLAY') is None:\n matplotlib.use('Agg')\nimport matplotlib.pyplot as plt\nimport matplotlib.colors as clr\nimport dtk\nimport sys\nimport time\nimport numpy.random\nfrom matplotlib.colors import LogNorm\nfrom scipy.optimize import minimize\nfrom calc_ngal import *\nfrom generate_parameter_dist import *\nfrom zmr import ZMR\nfrom matplotlib import rc\nrc('text', usetex=True)\nrc('font', **{'family': 'serif', 'serif': ['Computer Modern Roman']})\nrc('font', size=18)\n\n\ndef load_clusters(file_name):\n if file_name not in load_clusters._cache:\n cluster_data = ClusterData()\n cluster_data.load_file(file_name)\n else:\n cluster_data = load_clusters._cache[file_name]\n return cluster_data\n\n\nload_clusters._cache = {}\n\n\ndef get_ngal_fit(param_fname, cluster_num, color, plot_fit=True, spider=\n False, manual_calc=False):\n param = dtk.Param(param_fname)\n cluster_loc = param.get_string('cluster_loc')\n if cluster_num is None:\n cluster_num = param.get_int('cluster_load_num')\n zmrh5_loc = param.get_string('zmrh5_loc')\n zmr_sdss = ZMR(zmrh5_loc)\n zmr_fit = ZMR('output/' + param_fname + '/zmr_lkhd_cores.param')\n m_bins = zmr_fit.m_bins\n r_bins = zmr_fit.r_bins\n zmr_core_ngal, zmr_core_ngal_err = zmr_fit.get_ngal()\n zmr_core_ngal = zmr_core_ngal[0]\n zmr_core_ngal_err = zmr_core_ngal_err[0]\n zmr_sdss_ngal, zmr_sdss_ngal_err = zmr_sdss.get_ngal()\n zmr_sdss_ngal = zmr_sdss_ngal[0]\n zmr_sdss_ngal_err = zmr_sdss_ngal_err[0]\n if manual_calc:\n model_fit_fname = ('figs/' + param_fname +\n '/calc_likelihood_bounds.py/grid_fit_param.txt')\n model_fit = load_fit_limits(model_fit_fname)\n m_infall = 10 ** model_fit['mi']\n if 'rd' in model_fit:\n r_disrupt = model_fit['rd'] / 1000.0\n else:\n r_disrupt = np.inf\n print(cluster_loc)\n cluster_data = load_clusters(cluster_loc)\n if cluster_num == -1:\n cluster_num = cluster_data.num\n cluster_ngal = np.zeros(cluster_num)\n cluster_m_i = np.zeros(cluster_num)\n for i in range(0, cluster_num):\n mass_index = cluster_data.get_cluster_mass_bin(i, m_bins)\n cluster_m_i[i] = mass_index\n cluster_ngal[i] = cluster_data.get_ngal(i, m_infall, r_disrupt)[1]\n ngal_mean = np.zeros(len(m_bins) - 1)\n ngal_err = np.zeros(len(m_bins) - 1)\n ngal_std = np.zeros(len(m_bins) - 1)\n for i in range(0, len(m_bins) - 1):\n slct = cluster_m_i == i\n ngal_mean[i] = np.mean(cluster_ngal[slct])\n ngal_std[i] = np.std(cluster_ngal[slct])\n ngal_err[i] = ngal_std[i] / np.sqrt(np.sum(slct))\n plt.plot(dtk.bins_avg(m_bins), ngal_mean, '-x', color=color, label=\n 'Ngal recalc')\n if plot_fit:\n plt.plot(dtk.bins_avg(m_bins), zmr_core_ngal, '-', color=color)\n plt.fill_between(dtk.bins_avg(m_bins), zmr_core_ngal -\n zmr_core_ngal_err, zmr_core_ngal + zmr_core_ngal_err, color=\n color, alpha=0.3)\n offset_amount = 1.025\n if spider:\n markerfacecolor = 'None'\n markeredgecolor = color\n xaxis_offset = offset_amount\n lw = 1\n else:\n markerfacecolor = color\n markeredgecolor = 'None'\n xaxis_offset = 1.0 / offset_amount\n lw = 2\n if 'mstar-1' in param_fname and 'spider' in param_fname:\n print('SPIDERSS!: ', zmr_sdss_ngal)\n zmr_sdss_ngal[zmr_sdss_ngal < 0.1] = np.nan\n plt.errorbar(dtk.bins_avg(m_bins) * xaxis_offset, zmr_sdss_ngal, yerr=\n zmr_sdss_ngal_err, fmt='o', capsize=0, lw=lw, color=color,\n markeredgecolor=markeredgecolor, markerfacecolor=markerfacecolor)\n plt.yscale('log')\n plt.xscale('log')\n\n\ndef format_plot():\n p4 = plt.plot([], [], 'tab:purple', lw=5, label='{:1.2f}~L$_*$'.format(0.4)\n )\n p3 = plt.plot([], [], 'tab:red', lw=5, label='{:1.2f}~L$_*$'.format(0.63))\n p2 = plt.plot([], [], 'tab:green', lw=5, label='{:1.2f}~L$_*$'.format(1.0))\n p12 = plt.plot([], [], 'tab:orange', lw=5, label='{:1.2f}~L$_*$'.format\n (1.58))\n p1 = plt.plot([], [], 'tab:blue', lw=5, label='{:1.2f}~L$_*$'.format(2.5))\n plt.errorbar([], [], yerr=[], fmt='o', lw=2, color='k', label=\n 'redMaPPer', capsize=0)\n plt.plot([], [], color='k', label='Core Model')\n plt.legend(ncol=2, loc='best', framealpha=0.0)\n plt.xlabel('M$_{200c}$ [h$^{-1}$ M$_\\\\odot$]')\n plt.ylabel('Projected N$_{\\\\rm{gal}}$')\n plt.ylim([0.1, 3000.0])\n plt.xlim([100000000000000.0, 5000000000000000.0])\n plt.tight_layout()\n\n\ndef plot_ngal_fits():\n get_ngal_fit('params/cfn/simet/mstar1/mean/a3_rd.param', None, 'c')\n get_ngal_fit('params/cfn/simet/mstar0.5/mean/a3_rd.param', None, 'g')\n get_ngal_fit('params/cfn/simet/mstar0/mean/a3_rd.param', None, 'b')\n get_ngal_fit('params/cfn/simet/mstar-1/mean/a3_rd.param', None, 'r')\n get_ngal_fit('params/cfn/spider/mstar1/mean/spider_rd.param', None, 'c',\n plot_fit=False, spider=True)\n get_ngal_fit('params/cfn/spider/mstar0.5/mean/spider_rd.param', None,\n 'g', plot_fit=False, spider=True)\n get_ngal_fit('params/cfn/spider/mstar0/mean/spider_rd.param', None, 'b',\n plot_fit=False, spider=True)\n get_ngal_fit('params/cfn/spider/mstar-1/mean/spider_rd.param', None,\n 'r', plot_fit=False, spider=True)\n format_plot()\n\n\ndef plot_ngal_fits2(pattern, mstars):\n color_cycle = ['tab:blue', 'tab:orange', 'tab:green', 'tab:red',\n 'tab:purple', 'tab:brown', 'tab:pink', 'tab:gray', 'tab:olive',\n 'tab:cyan']\n for mstar, color in zip(mstars, color_cycle):\n get_ngal_fit(pattern.replace('${mstarval}', mstar), None, color)\n format_plot()\n\n\nif __name__ == '__main__':\n if len(sys.argv) > 2:\n plot_name = sys.argv[1]\n else:\n plot_name = 'OR_McClintock2019'\n mstars = ['-1', '-0.5', '0', '0.5', '1']\n if plot_name == 'OR_Simet2017':\n pattern = (\n 'params/rmba/auto/make_all_OR.high_richness.low_rez.min20.sh/crit/mstar${mstarval}/OR_rd_zoom.param'\n )\n plot_ngal_fits2(pattern, mstars)\n elif plot_name == 'OR_McClintock2019':\n pattern = (\n 'params/rmba/auto/make_all_OR.McClintock.high_richness.low_rez.min20.sh/crit/mstar${mstarval}/OR_rd_zoom.param'\n )\n plot_ngal_fits2(pattern, mstars)\n dtk.save_figs('figs/' + __file__ + '/' + plot_name + '/', extension='.pdf')\n plt.show()\n",
"step-5": "#!/usr/bin/env python2.7\n\nfrom __future__ import print_function, division \nimport numpy as np\nimport matplotlib\nimport os\n#checks if there is a display to use.\nif os.environ.get('DISPLAY') is None:\n matplotlib.use('Agg')\n\nimport matplotlib.pyplot as plt\nimport matplotlib.colors as clr\nimport dtk\nimport sys\nimport time\nimport numpy.random\nfrom matplotlib.colors import LogNorm\nfrom scipy.optimize import minimize\n\nfrom calc_ngal import *\nfrom generate_parameter_dist import *\nfrom zmr import ZMR\nfrom matplotlib import rc\nrc('text', usetex=True)\nrc('font', **{'family':'serif', 'serif':['Computer Modern Roman'], })\nrc('font', size=18)\n\ndef load_clusters(file_name):\n if file_name not in load_clusters._cache:\n cluster_data = ClusterData()\n cluster_data.load_file(file_name)\n else:\n cluster_data = load_clusters._cache[file_name]\n return cluster_data\n\nload_clusters._cache = {}\n\ndef get_ngal_fit(param_fname, cluster_num, color, plot_fit=True, spider=False, manual_calc=False):\n param = dtk.Param(param_fname)\n cluster_loc = param.get_string('cluster_loc')\n if cluster_num is None:\n cluster_num = param.get_int('cluster_load_num')\n zmrh5_loc = param.get_string('zmrh5_loc')\n zmr_sdss = ZMR(zmrh5_loc)\n zmr_fit = ZMR(\"output/\"+param_fname+\"/zmr_lkhd_cores.param\")\n m_bins = zmr_fit.m_bins\n r_bins = zmr_fit.r_bins\n zmr_core_ngal, zmr_core_ngal_err = zmr_fit.get_ngal() # only one z-bin, so we don't select it out\n zmr_core_ngal = zmr_core_ngal[0]\n zmr_core_ngal_err = zmr_core_ngal_err[0]\n zmr_sdss_ngal, zmr_sdss_ngal_err = zmr_sdss.get_ngal()\n zmr_sdss_ngal = zmr_sdss_ngal[0]\n zmr_sdss_ngal_err = zmr_sdss_ngal_err[0]\n\n if manual_calc:\n model_fit_fname = \"figs/\"+param_fname+\"/calc_likelihood_bounds.py/grid_fit_param.txt\"\n model_fit = load_fit_limits(model_fit_fname)\n m_infall = 10**model_fit['mi']\n if 'rd' in model_fit:\n # print(model_fit['rd'])\n r_disrupt = model_fit['rd']/1000.0 #convert to mpc/h from kpc/h\n else:\n r_disrupt = np.inf\n # print(\"\\ncalculating ngal for \", param_fname)\n # print(\"\\tmodel_fit_fname:\", model_fit_fname)\n # print(\"\\tmodel params: {:.2e} {:.3f}\".format(m_infall, r_disrupt))\n print(cluster_loc)\n cluster_data = load_clusters(cluster_loc)\n if cluster_num == -1:\n cluster_num = cluster_data.num\n cluster_ngal = np.zeros(cluster_num)\n cluster_m_i = np.zeros(cluster_num)\n for i in range(0, cluster_num):\n mass_index = cluster_data.get_cluster_mass_bin(i, m_bins)\n cluster_m_i[i] = mass_index\n cluster_ngal[i] = cluster_data.get_ngal(i, m_infall, r_disrupt)[1]\n ngal_mean = np.zeros(len(m_bins)-1)\n ngal_err = np.zeros(len(m_bins)-1)\n ngal_std = np.zeros(len(m_bins)-1)\n for i in range(0, len(m_bins)-1):\n slct = cluster_m_i == i\n ngal_mean[i] = np.mean(cluster_ngal[slct])\n ngal_std[i] = np.std(cluster_ngal[slct])\n ngal_err[i] = ngal_std[i]/np.sqrt(np.sum(slct))\n # print(\"{:.2e}->{:.2e}: {}\".format(m_bins[i], m_bins[i+1], np.sum(slct)))\n plt.plot(dtk.bins_avg(m_bins), ngal_mean, '-x', color=color, label='Ngal recalc')\n if plot_fit:\n plt.plot(dtk.bins_avg(m_bins), zmr_core_ngal, '-', color=color)\n plt.fill_between(dtk.bins_avg(m_bins), zmr_core_ngal-zmr_core_ngal_err, zmr_core_ngal+zmr_core_ngal_err, color=color, alpha=0.3)\n offset_amount = 1.025\n if spider:\n markerfacecolor='None'\n markeredgecolor=color\n xaxis_offset=offset_amount\n lw = 1\n else:\n markerfacecolor=color\n markeredgecolor='None'\n xaxis_offset=1./offset_amount\n lw = 2\n \n # remove problematic 2.5 L* low mass cluster in the spider sample\n if \"mstar-1\" in param_fname and \"spider\" in param_fname:\n print(\"SPIDERSS!: \", zmr_sdss_ngal)\n zmr_sdss_ngal[zmr_sdss_ngal < 0.1 ] = np.nan\n plt.errorbar(dtk.bins_avg(m_bins)*xaxis_offset, zmr_sdss_ngal,\n yerr=zmr_sdss_ngal_err, fmt='o', capsize=0, lw=lw, color=color,\n markeredgecolor=markeredgecolor, markerfacecolor=markerfacecolor)\n # plt.fill_between(dtk.bins_avg(m_bins), ngal_mean-ngal_err, ngal_mean+ngal_err, color=color, alpha=0.3)\n plt.yscale('log')\n plt.xscale('log')\n # plt.legend(loc='best')\ndef format_plot():\n\n p4 = plt.plot([],[], 'tab:purple', lw=5, label=r'{:1.2f}~L$_*$'.format(0.4))\n p3 = plt.plot([],[], 'tab:red', lw=5, label=r'{:1.2f}~L$_*$'.format(0.63))\n p2 = plt.plot([],[], 'tab:green', lw=5, label=r'{:1.2f}~L$_*$'.format(1.0))\n p12 = plt.plot([],[], 'tab:orange',lw=5, label=r'{:1.2f}~L$_*$'.format(1.58))\n p1 = plt.plot([],[], 'tab:blue',lw=5, label=r'{:1.2f}~L$_*$'.format(2.5))\n plt.errorbar([], [], yerr=[], fmt='o', lw=2, color='k', label=\"redMaPPer\", capsize=0)\n plt.plot([], [], color='k', label=\"Core Model\")\n # plt.errorbar([], [], yerr=[], fmt='o', lw=1, color='k', markerfacecolor='none', label='SPIDERS clusters', capsize=0)\n plt.legend(ncol=2, loc='best', framealpha=0.0)\n\n plt.xlabel(r'M$_{200c}$ [h$^{-1}$ M$_\\odot$]')\n plt.ylabel(r'Projected N$_{\\rm{gal}}$')\n plt.ylim([1e-1, 3e3])\n plt.xlim([1e14, 5e15])\n plt.tight_layout()\n\ndef plot_ngal_fits():\n get_ngal_fit(\"params/cfn/simet/mstar1/mean/a3_rd.param\", None, 'c')\n get_ngal_fit(\"params/cfn/simet/mstar0.5/mean/a3_rd.param\", None, 'g')\n get_ngal_fit(\"params/cfn/simet/mstar0/mean/a3_rd.param\", None, 'b')\n get_ngal_fit(\"params/cfn/simet/mstar-1/mean/a3_rd.param\", None, 'r')\n\n #just spider points\n get_ngal_fit(\"params/cfn/spider/mstar1/mean/spider_rd.param\", None, 'c', plot_fit=False, spider=True)\n get_ngal_fit(\"params/cfn/spider/mstar0.5/mean/spider_rd.param\", None, 'g', plot_fit=False, spider=True)\n get_ngal_fit(\"params/cfn/spider/mstar0/mean/spider_rd.param\", None, 'b', plot_fit=False, spider=True)\n get_ngal_fit(\"params/cfn/spider/mstar-1/mean/spider_rd.param\", None, 'r', plot_fit=False, spider=True)\n\n # get_ngal_fit(\"params/cfn/spider/mstar0/mean/spider_rd.param\", None, 'm', plot_fit=False, spider=True)\n # get_ngal_fit(\"params/cfn/spider/mstar0/mean/bcg_rd.param\", None, 'c', plot_fit=False, spider=True)\n format_plot()\ndef plot_ngal_fits2(pattern, mstars):\n color_cycle = ['tab:blue', 'tab:orange', 'tab:green', 'tab:red', 'tab:purple', 'tab:brown', 'tab:pink', 'tab:gray', 'tab:olive', 'tab:cyan']\n for mstar, color in zip(mstars, color_cycle):\n get_ngal_fit(pattern.replace(\"${mstarval}\", mstar), None, color)\n format_plot()\n\nif __name__ == \"__main__\":\n if len(sys.argv) > 2:\n plot_name = sys.argv[1]\n else:\n plot_name = \"OR_McClintock2019\"\n mstars = ['-1', '-0.5', '0', '0.5', '1']\n if plot_name == \"OR_Simet2017\":\n pattern = 'params/rmba/auto/make_all_OR.high_richness.low_rez.min20.sh/crit/mstar${mstarval}/OR_rd_zoom.param'\n plot_ngal_fits2(pattern, mstars)\n elif plot_name == \"OR_McClintock2019\":\n pattern = 'params/rmba/auto/make_all_OR.McClintock.high_richness.low_rez.min20.sh/crit/mstar${mstarval}/OR_rd_zoom.param'\n plot_ngal_fits2(pattern, mstars)\n # plot_ngal_fits()\n dtk.save_figs(\"figs/\"+__file__+\"/\"+plot_name+\"/\", extension='.pdf')\n plt.show()\n",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
<|reserved_special_token_0|>
@contextlib.contextmanager
def dummy_context(*args, **kwargs):
yield
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@contextlib.contextmanager
def dummy_context(*args, **kwargs):
yield
if six.PY2:
from collections import Mapping
else:
from collections.abc import Mapping
if DASK_VERSION < packaging.version.parse('1.1.0'):
blockwise = da.atop
else:
blockwise = da.blockwise
<|reserved_special_token_1|>
<|reserved_special_token_0|>
SK_VERSION = packaging.version.parse(sklearn.__version__)
DASK_VERSION = packaging.version.parse(dask.__version__)
PANDAS_VERSION = packaging.version.parse(pandas.__version__)
@contextlib.contextmanager
def dummy_context(*args, **kwargs):
yield
if six.PY2:
from collections import Mapping
else:
from collections.abc import Mapping
if DASK_VERSION < packaging.version.parse('1.1.0'):
blockwise = da.atop
else:
blockwise = da.blockwise
<|reserved_special_token_1|>
import contextlib
import dask
import dask.array as da
import packaging.version
import pandas
import six
import sklearn
SK_VERSION = packaging.version.parse(sklearn.__version__)
DASK_VERSION = packaging.version.parse(dask.__version__)
PANDAS_VERSION = packaging.version.parse(pandas.__version__)
@contextlib.contextmanager
def dummy_context(*args, **kwargs):
yield
if six.PY2:
from collections import Mapping
else:
from collections.abc import Mapping
if DASK_VERSION < packaging.version.parse('1.1.0'):
blockwise = da.atop
else:
blockwise = da.blockwise
<|reserved_special_token_1|>
import contextlib
import dask
import dask.array as da
import packaging.version
import pandas
import six
import sklearn
SK_VERSION = packaging.version.parse(sklearn.__version__)
DASK_VERSION = packaging.version.parse(dask.__version__)
PANDAS_VERSION = packaging.version.parse(pandas.__version__)
@contextlib.contextmanager
def dummy_context(*args, **kwargs):
yield
if six.PY2:
from collections import Mapping
else:
from collections.abc import Mapping # noqa
if DASK_VERSION < packaging.version.parse("1.1.0"):
blockwise = da.atop
else:
blockwise = da.blockwise
|
flexible
|
{
"blob_id": "1bdb19373960e4f63d80d6ab73ec3c0939e40b7f",
"index": 364,
"step-1": "<mask token>\n\n\n@contextlib.contextmanager\ndef dummy_context(*args, **kwargs):\n yield\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@contextlib.contextmanager\ndef dummy_context(*args, **kwargs):\n yield\n\n\nif six.PY2:\n from collections import Mapping\nelse:\n from collections.abc import Mapping\nif DASK_VERSION < packaging.version.parse('1.1.0'):\n blockwise = da.atop\nelse:\n blockwise = da.blockwise\n",
"step-3": "<mask token>\nSK_VERSION = packaging.version.parse(sklearn.__version__)\nDASK_VERSION = packaging.version.parse(dask.__version__)\nPANDAS_VERSION = packaging.version.parse(pandas.__version__)\n\n\n@contextlib.contextmanager\ndef dummy_context(*args, **kwargs):\n yield\n\n\nif six.PY2:\n from collections import Mapping\nelse:\n from collections.abc import Mapping\nif DASK_VERSION < packaging.version.parse('1.1.0'):\n blockwise = da.atop\nelse:\n blockwise = da.blockwise\n",
"step-4": "import contextlib\nimport dask\nimport dask.array as da\nimport packaging.version\nimport pandas\nimport six\nimport sklearn\nSK_VERSION = packaging.version.parse(sklearn.__version__)\nDASK_VERSION = packaging.version.parse(dask.__version__)\nPANDAS_VERSION = packaging.version.parse(pandas.__version__)\n\n\n@contextlib.contextmanager\ndef dummy_context(*args, **kwargs):\n yield\n\n\nif six.PY2:\n from collections import Mapping\nelse:\n from collections.abc import Mapping\nif DASK_VERSION < packaging.version.parse('1.1.0'):\n blockwise = da.atop\nelse:\n blockwise = da.blockwise\n",
"step-5": "import contextlib\n\nimport dask\nimport dask.array as da\nimport packaging.version\nimport pandas\nimport six\nimport sklearn\n\nSK_VERSION = packaging.version.parse(sklearn.__version__)\nDASK_VERSION = packaging.version.parse(dask.__version__)\nPANDAS_VERSION = packaging.version.parse(pandas.__version__)\n\n\n@contextlib.contextmanager\ndef dummy_context(*args, **kwargs):\n yield\n\n\nif six.PY2:\n from collections import Mapping\nelse:\n from collections.abc import Mapping # noqa\n\nif DASK_VERSION < packaging.version.parse(\"1.1.0\"):\n blockwise = da.atop\nelse:\n blockwise = da.blockwise\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
def solution(n):
answer = []
for i in range(1,n+1):
if n % i == 0:
answer.append(i)
return sum(answer)
def solution2(n):
return sum([i for i in range(1,n+1) if n % i == 0])
print(solution(12))
print(solution(5))
print(solution2(12))
print(solution2(5))
# n return
# 12 28
# 5 6
|
normal
|
{
"blob_id": "7cfbc36cc6cd6ff7c30f02d979667448f2003546",
"index": 9267,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef solution2(n):\n return sum([i for i in range(1, n + 1) if n % i == 0])\n\n\n<mask token>\n",
"step-3": "def solution(n):\n answer = []\n for i in range(1, n + 1):\n if n % i == 0:\n answer.append(i)\n return sum(answer)\n\n\ndef solution2(n):\n return sum([i for i in range(1, n + 1) if n % i == 0])\n\n\n<mask token>\n",
"step-4": "def solution(n):\n answer = []\n for i in range(1, n + 1):\n if n % i == 0:\n answer.append(i)\n return sum(answer)\n\n\ndef solution2(n):\n return sum([i for i in range(1, n + 1) if n % i == 0])\n\n\nprint(solution(12))\nprint(solution(5))\nprint(solution2(12))\nprint(solution2(5))\n",
"step-5": "def solution(n):\n answer = []\n for i in range(1,n+1):\n if n % i == 0:\n answer.append(i)\n\n return sum(answer)\n\ndef solution2(n):\n return sum([i for i in range(1,n+1) if n % i == 0])\n\nprint(solution(12))\nprint(solution(5))\nprint(solution2(12))\nprint(solution2(5))\n# n\treturn\n# 12\t28\n# 5\t6",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution(object):
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution(object):
def postorder_traversal(self, root: TreeNode):
if not root:
return []
else:
return self.postorder_traversal(root.left
) + self.postorder_traversal(root.right) + [root.val]
<|reserved_special_token_1|>
from thor.tree import TreeNode
class Solution(object):
def postorder_traversal(self, root: TreeNode):
if not root:
return []
else:
return self.postorder_traversal(root.left
) + self.postorder_traversal(root.right) + [root.val]
<|reserved_special_token_1|>
#! /usr/bin/env python
from thor.tree import TreeNode
class Solution(object):
def postorder_traversal(self, root: TreeNode):
if not root:
return []
else:
return self.postorder_traversal(root.left) + self.postorder_traversal(root.right) + [root.val]
|
flexible
|
{
"blob_id": "1d314a04625cfadf574f122b95577c1e677a8b35",
"index": 3247,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Solution(object):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Solution(object):\n\n def postorder_traversal(self, root: TreeNode):\n if not root:\n return []\n else:\n return self.postorder_traversal(root.left\n ) + self.postorder_traversal(root.right) + [root.val]\n",
"step-4": "from thor.tree import TreeNode\n\n\nclass Solution(object):\n\n def postorder_traversal(self, root: TreeNode):\n if not root:\n return []\n else:\n return self.postorder_traversal(root.left\n ) + self.postorder_traversal(root.right) + [root.val]\n",
"step-5": "#! /usr/bin/env python\nfrom thor.tree import TreeNode\n\n\nclass Solution(object):\n\tdef postorder_traversal(self, root: TreeNode):\n\t\tif not root:\n\t\t\treturn []\n\t\telse:\n\t\t\treturn self.postorder_traversal(root.left) + self.postorder_traversal(root.right) + [root.val]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
INPUT_MINBIAS = '/build/RAWReference/MinBias_RAW_320_STARTUP.root'
INPUT_TTBAR = '/build/RAWReference/TTbar_RAW_320_STARTUP.root'
puSTARTUP_TTBAR = '/build/RAWReference/TTbar_Tauola_PileUp_RAW_320_STARTUP.root'
relval = {
'step1': { 'step': 'GEN-HLT',
'timesize': (100, ['MinBias','TTbar']),
'igprof': (50, ['TTbar']),
'memcheck': (5, ['TTbar']),
'pileup': ['TTbar'],
#??? 'pileupInput': '',
'cmsdriver': '--eventcontent RAWSIM --conditions auto:mc' },
'step2': { 'step': 'RAW2DIGI-RECO',
'timesize': (8000, ['MinBias','TTbar']),
'igprof': (200, ['TTbar']),
'memcheck': (5, ['TTbar']),
'pileup': ['TTbar'],
'pileupInput': puSTARTUP_TTBAR,
'fileInput': [INPUT_MINBIAS,INPUT_TTBAR],
'cmsdriver': '--eventcontent RECOSIM --conditions auto:startup' },
'GENSIMDIGI': { 'step': 'GEN-SIM,DIGI',
'timesize': (100, ['MinBias','SingleElectronE1000','SingleMuMinusPt10','SinglePiMinusE1000','TTbar']),
'igprof': (5, ['TTbar']),
'memcheck': (5, ['TTbar']),
'pileup': ['TTbar'],
#??? 'pileupInput': '',
'fileInput': '',
'cmsdriver': '--eventcontent FEVTDEBUG --conditions auto:mc' },
'HLT': { 'step': 'HLT',
'timesize': (8000, ['MinBias','TTbar']),
'igprof': (500, ['TTbar']),
'memcheck': (5, ['TTbar']),
'pileup': ['TTbar'],
'pileupInput': puSTARTUP_TTBAR,
'fileInput': [INPUT_MINBIAS,INPUT_TTBAR],
'cmsdriver': '--eventcontent RAWSIM --conditions auto:startup --processName HLTFROMRAW' },
'FASTSIM': { 'step': 'GEN-FASTSIM',
'timesize': (8000, ['MinBias','TTbar']),
'igprof': (500, ['TTbar']),
'memcheck': (5, ['TTbar']),
'pileup': ['TTbar'],
'cmsdriver': '--eventcontent RECOSIM --conditions auto:mc' }
}
|
normal
|
{
"blob_id": "78c9f92349ba834bc64dc84f884638c4316a9ea4",
"index": 352,
"step-1": "<mask token>\n",
"step-2": "INPUT_MINBIAS = '/build/RAWReference/MinBias_RAW_320_STARTUP.root'\nINPUT_TTBAR = '/build/RAWReference/TTbar_RAW_320_STARTUP.root'\npuSTARTUP_TTBAR = (\n '/build/RAWReference/TTbar_Tauola_PileUp_RAW_320_STARTUP.root')\nrelval = {'step1': {'step': 'GEN-HLT', 'timesize': (100, ['MinBias',\n 'TTbar']), 'igprof': (50, ['TTbar']), 'memcheck': (5, ['TTbar']),\n 'pileup': ['TTbar'], 'cmsdriver':\n '--eventcontent RAWSIM --conditions auto:mc'}, 'step2': {'step':\n 'RAW2DIGI-RECO', 'timesize': (8000, ['MinBias', 'TTbar']), 'igprof': (\n 200, ['TTbar']), 'memcheck': (5, ['TTbar']), 'pileup': ['TTbar'],\n 'pileupInput': puSTARTUP_TTBAR, 'fileInput': [INPUT_MINBIAS,\n INPUT_TTBAR], 'cmsdriver':\n '--eventcontent RECOSIM --conditions auto:startup'}, 'GENSIMDIGI': {\n 'step': 'GEN-SIM,DIGI', 'timesize': (100, ['MinBias',\n 'SingleElectronE1000', 'SingleMuMinusPt10', 'SinglePiMinusE1000',\n 'TTbar']), 'igprof': (5, ['TTbar']), 'memcheck': (5, ['TTbar']),\n 'pileup': ['TTbar'], 'fileInput': '', 'cmsdriver':\n '--eventcontent FEVTDEBUG --conditions auto:mc'}, 'HLT': {'step': 'HLT',\n 'timesize': (8000, ['MinBias', 'TTbar']), 'igprof': (500, ['TTbar']),\n 'memcheck': (5, ['TTbar']), 'pileup': ['TTbar'], 'pileupInput':\n puSTARTUP_TTBAR, 'fileInput': [INPUT_MINBIAS, INPUT_TTBAR], 'cmsdriver':\n '--eventcontent RAWSIM --conditions auto:startup --processName HLTFROMRAW'\n }, 'FASTSIM': {'step': 'GEN-FASTSIM', 'timesize': (8000, ['MinBias',\n 'TTbar']), 'igprof': (500, ['TTbar']), 'memcheck': (5, ['TTbar']),\n 'pileup': ['TTbar'], 'cmsdriver':\n '--eventcontent RECOSIM --conditions auto:mc'}}\n",
"step-3": "INPUT_MINBIAS = '/build/RAWReference/MinBias_RAW_320_STARTUP.root'\nINPUT_TTBAR = '/build/RAWReference/TTbar_RAW_320_STARTUP.root'\n\npuSTARTUP_TTBAR = '/build/RAWReference/TTbar_Tauola_PileUp_RAW_320_STARTUP.root'\n\nrelval = {\n 'step1': {\t'step': 'GEN-HLT',\n\t\t\t'timesize': (100, ['MinBias','TTbar']),\n\t\t\t'igprof': (50, ['TTbar']),\n\t\t\t'memcheck': (5, ['TTbar']),\n\t\t\t'pileup': ['TTbar'],\n#???\t\t\t'pileupInput': '',\n\t\t\t'cmsdriver': '--eventcontent RAWSIM --conditions auto:mc' },\n\n\t'step2': {\t'step': 'RAW2DIGI-RECO',\n\t\t\t'timesize': (8000, ['MinBias','TTbar']),\n\t \t\t'igprof': (200, ['TTbar']),\n\t\t\t'memcheck': (5, ['TTbar']),\n\t\t\t'pileup': ['TTbar'],\n\t\t\t'pileupInput': puSTARTUP_TTBAR,\n\t\t\t'fileInput': [INPUT_MINBIAS,INPUT_TTBAR],\n\t\t\t'cmsdriver': '--eventcontent RECOSIM --conditions auto:startup' },\n\n\t'GENSIMDIGI': {\t'step': 'GEN-SIM,DIGI',\n\t\t\t'timesize': (100, ['MinBias','SingleElectronE1000','SingleMuMinusPt10','SinglePiMinusE1000','TTbar']),\n\t\t\t'igprof': (5, ['TTbar']),\n\t\t\t'memcheck': (5, ['TTbar']),\n\t\t\t'pileup': ['TTbar'],\n#???\t\t\t'pileupInput': '',\n\t\t\t'fileInput': '',\n\t\t\t'cmsdriver': '--eventcontent FEVTDEBUG --conditions auto:mc' },\n\n\t'HLT': { 'step': 'HLT',\n\t\t\t'timesize': (8000, ['MinBias','TTbar']),\n\t\t\t'igprof': (500, ['TTbar']),\n\t\t\t'memcheck': (5, ['TTbar']),\n\t\t\t'pileup': ['TTbar'],\n\t\t\t'pileupInput': puSTARTUP_TTBAR,\n\t\t\t'fileInput': [INPUT_MINBIAS,INPUT_TTBAR],\n\t\t\t'cmsdriver': '--eventcontent RAWSIM --conditions auto:startup --processName HLTFROMRAW' },\n\n\t'FASTSIM': {\t'step': 'GEN-FASTSIM',\n\t\t\t'timesize': (8000, ['MinBias','TTbar']),\n\t\t\t'igprof': (500, ['TTbar']),\n\t\t\t'memcheck': (5, ['TTbar']),\n\t\t\t'pileup': ['TTbar'],\n\t\t\t'cmsdriver': '--eventcontent RECOSIM --conditions auto:mc' }\n}\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
"""
Type data Dictionary hanya sekedar menghubungkan KEY dan VALUE
KVP = KEY VALUE PAIR
"""
kamus = {}
kamus['anak'] = 'son'
kamus['istri'] = 'wife'
kamus['ayah'] = 'father'
print(kamus)
print(kamus['ayah'])
print('\nData ini dikirimkan server gojek, memberikan info driver di sekitar pemakai aplikasi')
data_server_gojek = {
'tanggal': '2020-10-27',
'driver_list': [ # diver_list merupakan array yang bertipe dictionary krna memiliki beberapa atribut
{'nama': 'Eko', 'jarak': 10},
{'nama': 'Dwi', 'jarak': 100},
{'nama': 'Tri', 'jarak': 1000}
]
}
print(data_server_gojek)
print(f"Driver di sekitar sini {data_server_gojek['driver_list']}")
print(f"Driver #1 {data_server_gojek['driver_list'][0]}")
print(f"Driver #3 {data_server_gojek['driver_list'][2]}")
print('\nCara mengambil data jarak terdekat')
print(f"jarak driver terdekat {data_server_gojek['driver_list'][0]['jarak']} meters")
|
normal
|
{
"blob_id": "67b101df690bbe9629db2cabf0060c0f2aad9722",
"index": 2389,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(kamus)\nprint(kamus['ayah'])\nprint(\n \"\"\"\nData ini dikirimkan server gojek, memberikan info driver di sekitar pemakai aplikasi\"\"\"\n )\n<mask token>\nprint(data_server_gojek)\nprint(f\"Driver di sekitar sini {data_server_gojek['driver_list']}\")\nprint(f\"Driver #1 {data_server_gojek['driver_list'][0]}\")\nprint(f\"Driver #3 {data_server_gojek['driver_list'][2]}\")\nprint(\"\"\"\nCara mengambil data jarak terdekat\"\"\")\nprint(\n f\"jarak driver terdekat {data_server_gojek['driver_list'][0]['jarak']} meters\"\n )\n",
"step-3": "<mask token>\nkamus = {}\nkamus['anak'] = 'son'\nkamus['istri'] = 'wife'\nkamus['ayah'] = 'father'\nprint(kamus)\nprint(kamus['ayah'])\nprint(\n \"\"\"\nData ini dikirimkan server gojek, memberikan info driver di sekitar pemakai aplikasi\"\"\"\n )\ndata_server_gojek = {'tanggal': '2020-10-27', 'driver_list': [{'nama':\n 'Eko', 'jarak': 10}, {'nama': 'Dwi', 'jarak': 100}, {'nama': 'Tri',\n 'jarak': 1000}]}\nprint(data_server_gojek)\nprint(f\"Driver di sekitar sini {data_server_gojek['driver_list']}\")\nprint(f\"Driver #1 {data_server_gojek['driver_list'][0]}\")\nprint(f\"Driver #3 {data_server_gojek['driver_list'][2]}\")\nprint(\"\"\"\nCara mengambil data jarak terdekat\"\"\")\nprint(\n f\"jarak driver terdekat {data_server_gojek['driver_list'][0]['jarak']} meters\"\n )\n",
"step-4": "\"\"\"\nType data Dictionary hanya sekedar menghubungkan KEY dan VALUE\nKVP = KEY VALUE PAIR\n\"\"\"\n\nkamus = {}\nkamus['anak'] = 'son'\nkamus['istri'] = 'wife'\nkamus['ayah'] = 'father'\n\nprint(kamus)\nprint(kamus['ayah'])\n\nprint('\\nData ini dikirimkan server gojek, memberikan info driver di sekitar pemakai aplikasi')\ndata_server_gojek = {\n 'tanggal': '2020-10-27',\n 'driver_list': [ # diver_list merupakan array yang bertipe dictionary krna memiliki beberapa atribut\n {'nama': 'Eko', 'jarak': 10},\n {'nama': 'Dwi', 'jarak': 100},\n {'nama': 'Tri', 'jarak': 1000}\n ]\n}\nprint(data_server_gojek)\nprint(f\"Driver di sekitar sini {data_server_gojek['driver_list']}\")\nprint(f\"Driver #1 {data_server_gojek['driver_list'][0]}\")\nprint(f\"Driver #3 {data_server_gojek['driver_list'][2]}\")\n\nprint('\\nCara mengambil data jarak terdekat')\nprint(f\"jarak driver terdekat {data_server_gojek['driver_list'][0]['jarak']} meters\")\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class OitYitikuscrapyDataPipeline(object):
def open_spider(self, spider):
path = 'D:\\xiti10001\\data\\{}\\'.format(time.strftime('%Y%m%d',
time.localtime()))
isExists = os.path.exists(path)
if isExists:
pass
else:
os.makedirs(path)
self.file = codecs.open(path + spider.name + '.json', 'a', encoding
='utf-8')
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class OitYitikuscrapyDataPipeline(object):
def open_spider(self, spider):
path = 'D:\\xiti10001\\data\\{}\\'.format(time.strftime('%Y%m%d',
time.localtime()))
isExists = os.path.exists(path)
if isExists:
pass
else:
os.makedirs(path)
self.file = codecs.open(path + spider.name + '.json', 'a', encoding
='utf-8')
def process_item(self, item, spider):
print('进程打印信息:', spider.name)
lines = json.dumps(dict(item), ensure_ascii=False) + '\n'
self.file.write(lines)
return item
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class OitYitikuscrapyDataPipeline(object):
def open_spider(self, spider):
path = 'D:\\xiti10001\\data\\{}\\'.format(time.strftime('%Y%m%d',
time.localtime()))
isExists = os.path.exists(path)
if isExists:
pass
else:
os.makedirs(path)
self.file = codecs.open(path + spider.name + '.json', 'a', encoding
='utf-8')
def process_item(self, item, spider):
print('进程打印信息:', spider.name)
lines = json.dumps(dict(item), ensure_ascii=False) + '\n'
self.file.write(lines)
return item
def close_spider(self, spider):
self.file.close()
<|reserved_special_token_1|>
import codecs
import time
import json
import os
class OitYitikuscrapyDataPipeline(object):
def open_spider(self, spider):
path = 'D:\\xiti10001\\data\\{}\\'.format(time.strftime('%Y%m%d',
time.localtime()))
isExists = os.path.exists(path)
if isExists:
pass
else:
os.makedirs(path)
self.file = codecs.open(path + spider.name + '.json', 'a', encoding
='utf-8')
def process_item(self, item, spider):
print('进程打印信息:', spider.name)
lines = json.dumps(dict(item), ensure_ascii=False) + '\n'
self.file.write(lines)
return item
def close_spider(self, spider):
self.file.close()
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import codecs
import time
import json
import os
class OitYitikuscrapyDataPipeline(object):
def open_spider(self, spider):
path ='D:\\xiti10001\\data\\{}\\'.format(time.strftime("%Y%m%d",time.localtime()))
# path = 'd:\\OITData\\zujuan\\{0}\\{1}\\'.format(time.strftime("%Y%m%d", time.localtime()), spider.name)
isExists = os.path.exists(path)
if isExists:
pass
else:
os.makedirs(path)
self.file = codecs.open(path + spider.name+'.json', 'a', encoding='utf-8')
def process_item(self, item, spider):
print('进程打印信息:',spider.name)
lines = json.dumps(dict(item), ensure_ascii=False) + '\n'
self.file.write(lines)
return item
def close_spider(self, spider):
self.file.close()
|
flexible
|
{
"blob_id": "315996a783d7b95fd87374a8fe2602a572de071e",
"index": 3495,
"step-1": "<mask token>\n\n\nclass OitYitikuscrapyDataPipeline(object):\n\n def open_spider(self, spider):\n path = 'D:\\\\xiti10001\\\\data\\\\{}\\\\'.format(time.strftime('%Y%m%d',\n time.localtime()))\n isExists = os.path.exists(path)\n if isExists:\n pass\n else:\n os.makedirs(path)\n self.file = codecs.open(path + spider.name + '.json', 'a', encoding\n ='utf-8')\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass OitYitikuscrapyDataPipeline(object):\n\n def open_spider(self, spider):\n path = 'D:\\\\xiti10001\\\\data\\\\{}\\\\'.format(time.strftime('%Y%m%d',\n time.localtime()))\n isExists = os.path.exists(path)\n if isExists:\n pass\n else:\n os.makedirs(path)\n self.file = codecs.open(path + spider.name + '.json', 'a', encoding\n ='utf-8')\n\n def process_item(self, item, spider):\n print('进程打印信息:', spider.name)\n lines = json.dumps(dict(item), ensure_ascii=False) + '\\n'\n self.file.write(lines)\n return item\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass OitYitikuscrapyDataPipeline(object):\n\n def open_spider(self, spider):\n path = 'D:\\\\xiti10001\\\\data\\\\{}\\\\'.format(time.strftime('%Y%m%d',\n time.localtime()))\n isExists = os.path.exists(path)\n if isExists:\n pass\n else:\n os.makedirs(path)\n self.file = codecs.open(path + spider.name + '.json', 'a', encoding\n ='utf-8')\n\n def process_item(self, item, spider):\n print('进程打印信息:', spider.name)\n lines = json.dumps(dict(item), ensure_ascii=False) + '\\n'\n self.file.write(lines)\n return item\n\n def close_spider(self, spider):\n self.file.close()\n",
"step-4": "import codecs\nimport time\nimport json\nimport os\n\n\nclass OitYitikuscrapyDataPipeline(object):\n\n def open_spider(self, spider):\n path = 'D:\\\\xiti10001\\\\data\\\\{}\\\\'.format(time.strftime('%Y%m%d',\n time.localtime()))\n isExists = os.path.exists(path)\n if isExists:\n pass\n else:\n os.makedirs(path)\n self.file = codecs.open(path + spider.name + '.json', 'a', encoding\n ='utf-8')\n\n def process_item(self, item, spider):\n print('进程打印信息:', spider.name)\n lines = json.dumps(dict(item), ensure_ascii=False) + '\\n'\n self.file.write(lines)\n return item\n\n def close_spider(self, spider):\n self.file.close()\n",
"step-5": "# -*- coding: utf-8 -*-\n\n# Define your item pipelines here\n#\n# Don't forget to add your pipeline to the ITEM_PIPELINES setting\n# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html\nimport codecs\nimport time\nimport json\nimport os\n\nclass OitYitikuscrapyDataPipeline(object):\n def open_spider(self, spider):\n path ='D:\\\\xiti10001\\\\data\\\\{}\\\\'.format(time.strftime(\"%Y%m%d\",time.localtime()))\n # path = 'd:\\\\OITData\\\\zujuan\\\\{0}\\\\{1}\\\\'.format(time.strftime(\"%Y%m%d\", time.localtime()), spider.name)\n isExists = os.path.exists(path)\n if isExists:\n pass\n else:\n os.makedirs(path)\n self.file = codecs.open(path + spider.name+'.json', 'a', encoding='utf-8')\n def process_item(self, item, spider):\n print('进程打印信息:',spider.name)\n lines = json.dumps(dict(item), ensure_ascii=False) + '\\n'\n self.file.write(lines)\n return item\n\n def close_spider(self, spider):\n self.file.close()\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
from django.shortcuts import render
from django.http import HttpResponseRedirect, HttpResponse, Http404, HttpResponseNotAllowed
from booli import booliwood
from models import add_bosta, get_all_bostas, Bosta
from django import forms
import time
class BostaForm(forms.Form):
maxPrice = forms.IntegerField()
livingArea = forms.IntegerField()
room = forms.IntegerField()
class BostaIdForm(forms.Form):
bostaId = forms.IntegerField()
class SearchBosta(forms.Form):
search_query = forms.CharField()
def show(request):
if request.method == 'POST':
form = BostaForm(request.POST)
if form.is_valid():
maxPrice = form.cleaned_data['maxPrice']
livingArea = form.cleaned_data['livingArea']
room = form.cleaned_data['room']
bostas = Bosta.objects \
.filter(listPrice__lte=maxPrice) \
.filter(livingArea__gte=livingArea) \
.filter(rooms__gte=room) \
.exclude(listPrice=0) \
.order_by('soldDate')
else:
form = BostaForm()
bostas = get_all_bostas()
for bosta in bostas:
if bosta.livingArea == 0:
bosta.sek_m2 = 0
elif bosta.soldPrice == 0:
bosta.sek_m2 = bosta.listPrice / bosta.livingArea
else:
bosta.sek_m2 = bosta.soldPrice / bosta.livingArea
data = {
'bostas': bostas,
'form': form,
}
return render(request, 'main.html', data)
def update(request):
totalListing = 0
totalSold = 0
form = SearchBosta()
data = {
'totalListing': totalListing,
'totalSold': totalSold,
'countListing': 0,
'countSold': 0,
'form': form
}
if request.method == 'POST':
form = SearchBosta(request.POST)
if form.is_valid():
q = form.cleaned_data['search_query'].encode('utf8')
d1 = search("listings", q)
if d1:
data['totalListing'] = d1['total']
data['countListing'] = d1['count']
d1 = search("sold", q)
if d1:
data['totalSold'] = d1['total']
data['countSold'] = d1['count']
return render(request, 'update.html', data)
def search(type_search, q):
total = 0
while True:
result = booliwood(q, total, 50, type_search)
for listing in result[type_search]:
add_bosta(listing)
total = total + result['count']
if total >= result['totalCount']:
break
time.sleep(1)
data = {
'total': total,
'count': result['totalCount'],
}
return data
|
normal
|
{
"blob_id": "53573a21364e9dfef9ed1164185ab441dbc29601",
"index": 123,
"step-1": "<mask token>\n\n\nclass BostaForm(forms.Form):\n maxPrice = forms.IntegerField()\n livingArea = forms.IntegerField()\n room = forms.IntegerField()\n\n\nclass BostaIdForm(forms.Form):\n bostaId = forms.IntegerField()\n\n\nclass SearchBosta(forms.Form):\n search_query = forms.CharField()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass BostaForm(forms.Form):\n maxPrice = forms.IntegerField()\n livingArea = forms.IntegerField()\n room = forms.IntegerField()\n\n\nclass BostaIdForm(forms.Form):\n bostaId = forms.IntegerField()\n\n\nclass SearchBosta(forms.Form):\n search_query = forms.CharField()\n\n\ndef show(request):\n if request.method == 'POST':\n form = BostaForm(request.POST)\n if form.is_valid():\n maxPrice = form.cleaned_data['maxPrice']\n livingArea = form.cleaned_data['livingArea']\n room = form.cleaned_data['room']\n bostas = Bosta.objects.filter(listPrice__lte=maxPrice).filter(\n livingArea__gte=livingArea).filter(rooms__gte=room).exclude(\n listPrice=0).order_by('soldDate')\n else:\n form = BostaForm()\n bostas = get_all_bostas()\n for bosta in bostas:\n if bosta.livingArea == 0:\n bosta.sek_m2 = 0\n elif bosta.soldPrice == 0:\n bosta.sek_m2 = bosta.listPrice / bosta.livingArea\n else:\n bosta.sek_m2 = bosta.soldPrice / bosta.livingArea\n data = {'bostas': bostas, 'form': form}\n return render(request, 'main.html', data)\n\n\ndef update(request):\n totalListing = 0\n totalSold = 0\n form = SearchBosta()\n data = {'totalListing': totalListing, 'totalSold': totalSold,\n 'countListing': 0, 'countSold': 0, 'form': form}\n if request.method == 'POST':\n form = SearchBosta(request.POST)\n if form.is_valid():\n q = form.cleaned_data['search_query'].encode('utf8')\n d1 = search('listings', q)\n if d1:\n data['totalListing'] = d1['total']\n data['countListing'] = d1['count']\n d1 = search('sold', q)\n if d1:\n data['totalSold'] = d1['total']\n data['countSold'] = d1['count']\n return render(request, 'update.html', data)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass BostaForm(forms.Form):\n maxPrice = forms.IntegerField()\n livingArea = forms.IntegerField()\n room = forms.IntegerField()\n\n\nclass BostaIdForm(forms.Form):\n bostaId = forms.IntegerField()\n\n\nclass SearchBosta(forms.Form):\n search_query = forms.CharField()\n\n\ndef show(request):\n if request.method == 'POST':\n form = BostaForm(request.POST)\n if form.is_valid():\n maxPrice = form.cleaned_data['maxPrice']\n livingArea = form.cleaned_data['livingArea']\n room = form.cleaned_data['room']\n bostas = Bosta.objects.filter(listPrice__lte=maxPrice).filter(\n livingArea__gte=livingArea).filter(rooms__gte=room).exclude(\n listPrice=0).order_by('soldDate')\n else:\n form = BostaForm()\n bostas = get_all_bostas()\n for bosta in bostas:\n if bosta.livingArea == 0:\n bosta.sek_m2 = 0\n elif bosta.soldPrice == 0:\n bosta.sek_m2 = bosta.listPrice / bosta.livingArea\n else:\n bosta.sek_m2 = bosta.soldPrice / bosta.livingArea\n data = {'bostas': bostas, 'form': form}\n return render(request, 'main.html', data)\n\n\ndef update(request):\n totalListing = 0\n totalSold = 0\n form = SearchBosta()\n data = {'totalListing': totalListing, 'totalSold': totalSold,\n 'countListing': 0, 'countSold': 0, 'form': form}\n if request.method == 'POST':\n form = SearchBosta(request.POST)\n if form.is_valid():\n q = form.cleaned_data['search_query'].encode('utf8')\n d1 = search('listings', q)\n if d1:\n data['totalListing'] = d1['total']\n data['countListing'] = d1['count']\n d1 = search('sold', q)\n if d1:\n data['totalSold'] = d1['total']\n data['countSold'] = d1['count']\n return render(request, 'update.html', data)\n\n\ndef search(type_search, q):\n total = 0\n while True:\n result = booliwood(q, total, 50, type_search)\n for listing in result[type_search]:\n add_bosta(listing)\n total = total + result['count']\n if total >= result['totalCount']:\n break\n time.sleep(1)\n data = {'total': total, 'count': result['totalCount']}\n return data\n",
"step-4": "from django.shortcuts import render\nfrom django.http import HttpResponseRedirect, HttpResponse, Http404, HttpResponseNotAllowed\nfrom booli import booliwood\nfrom models import add_bosta, get_all_bostas, Bosta\nfrom django import forms\nimport time\n\n\nclass BostaForm(forms.Form):\n maxPrice = forms.IntegerField()\n livingArea = forms.IntegerField()\n room = forms.IntegerField()\n\n\nclass BostaIdForm(forms.Form):\n bostaId = forms.IntegerField()\n\n\nclass SearchBosta(forms.Form):\n search_query = forms.CharField()\n\n\ndef show(request):\n if request.method == 'POST':\n form = BostaForm(request.POST)\n if form.is_valid():\n maxPrice = form.cleaned_data['maxPrice']\n livingArea = form.cleaned_data['livingArea']\n room = form.cleaned_data['room']\n bostas = Bosta.objects.filter(listPrice__lte=maxPrice).filter(\n livingArea__gte=livingArea).filter(rooms__gte=room).exclude(\n listPrice=0).order_by('soldDate')\n else:\n form = BostaForm()\n bostas = get_all_bostas()\n for bosta in bostas:\n if bosta.livingArea == 0:\n bosta.sek_m2 = 0\n elif bosta.soldPrice == 0:\n bosta.sek_m2 = bosta.listPrice / bosta.livingArea\n else:\n bosta.sek_m2 = bosta.soldPrice / bosta.livingArea\n data = {'bostas': bostas, 'form': form}\n return render(request, 'main.html', data)\n\n\ndef update(request):\n totalListing = 0\n totalSold = 0\n form = SearchBosta()\n data = {'totalListing': totalListing, 'totalSold': totalSold,\n 'countListing': 0, 'countSold': 0, 'form': form}\n if request.method == 'POST':\n form = SearchBosta(request.POST)\n if form.is_valid():\n q = form.cleaned_data['search_query'].encode('utf8')\n d1 = search('listings', q)\n if d1:\n data['totalListing'] = d1['total']\n data['countListing'] = d1['count']\n d1 = search('sold', q)\n if d1:\n data['totalSold'] = d1['total']\n data['countSold'] = d1['count']\n return render(request, 'update.html', data)\n\n\ndef search(type_search, q):\n total = 0\n while True:\n result = booliwood(q, total, 50, type_search)\n for listing in result[type_search]:\n add_bosta(listing)\n total = total + result['count']\n if total >= result['totalCount']:\n break\n time.sleep(1)\n data = {'total': total, 'count': result['totalCount']}\n return data\n",
"step-5": "from django.shortcuts import render\nfrom django.http import HttpResponseRedirect, HttpResponse, Http404, HttpResponseNotAllowed\nfrom booli import booliwood\nfrom models import add_bosta, get_all_bostas, Bosta\nfrom django import forms\nimport time\n\nclass BostaForm(forms.Form):\n maxPrice = forms.IntegerField()\n livingArea = forms.IntegerField()\n room = forms.IntegerField()\n\nclass BostaIdForm(forms.Form):\n bostaId = forms.IntegerField()\n\nclass SearchBosta(forms.Form):\n search_query = forms.CharField()\n\ndef show(request):\n\tif request.method == 'POST':\n\t\tform = BostaForm(request.POST)\n\t\tif form.is_valid():\n\t\t\tmaxPrice = form.cleaned_data['maxPrice']\n\t\t\tlivingArea = form.cleaned_data['livingArea']\n\t\t\troom = form.cleaned_data['room']\n\t\t\tbostas = Bosta.objects \\\n\t\t\t.filter(listPrice__lte=maxPrice) \\\n\t\t\t.filter(livingArea__gte=livingArea) \\\n\t\t\t.filter(rooms__gte=room) \\\n\t\t\t.exclude(listPrice=0) \\\n\t\t\t.order_by('soldDate') \n\telse:\n\t\tform = BostaForm()\n\t\tbostas = get_all_bostas()\n\tfor bosta in bostas:\n\t\tif bosta.livingArea == 0:\n\t\t\tbosta.sek_m2 = 0\n\t\telif bosta.soldPrice == 0:\n\t\t\tbosta.sek_m2 = bosta.listPrice / bosta.livingArea\n\t\telse:\n\t\t\tbosta.sek_m2 = bosta.soldPrice / bosta.livingArea\n\n\tdata = { \n\t'bostas': bostas,\n\t'form': form,\n\t}\n\treturn render(request, 'main.html', data)\n\ndef update(request):\n\ttotalListing = 0\n\ttotalSold = 0\n\tform = SearchBosta()\n\tdata = {\n\t'totalListing': totalListing,\n\t'totalSold': totalSold,\n\t'countListing': 0,\n\t'countSold': 0,\n\t'form': form\n\t}\n\tif request.method == 'POST':\n\t\tform = SearchBosta(request.POST)\n\t\tif form.is_valid():\n\t\t\tq = form.cleaned_data['search_query'].encode('utf8')\n\t\t\td1 = search(\"listings\", q)\n\t\t\tif d1:\n\t\t\t\tdata['totalListing'] = d1['total']\n\t\t\t\tdata['countListing'] = d1['count']\n\t\t\td1 = search(\"sold\", q)\n\t\t\tif d1:\n\t\t\t\tdata['totalSold'] = d1['total']\n\t\t\t\tdata['countSold'] = d1['count']\n\n\treturn render(request, 'update.html', data)\n\ndef search(type_search, q):\n\ttotal = 0\n\twhile True:\n\t\tresult = booliwood(q, total, 50, type_search)\n\t\tfor listing in result[type_search]:\n\t\t\tadd_bosta(listing)\n\t\ttotal = total + result['count']\n\t\tif total >= result['totalCount']:\n\t\t\tbreak\n\t\ttime.sleep(1)\n\tdata = {\n\t'total': total,\n\t'count': result['totalCount'],\n\t}\n\treturn data\n\n",
"step-ids": [
6,
8,
9,
10,
11
]
}
|
[
6,
8,
9,
10,
11
] |
# -*- coding: utf-8 -*-
"""
# @Time : 2018/6/11 下午6:45
# @Author : zhanzecheng
# @File : 542.01矩阵1.py
# @Software: PyCharm
"""
# 一个简单的循环方式来解决这个问题
# 这一题的思路不错,用多次循环来计数
# TODO: check 1
class Solution:
def updateMatrix(self, matrix):
"""
:type matrix: List[List[int]]
:rtype: List[List[int]]
"""
cur = 0
col = len(matrix[0])
row = len(matrix)
while True:
cur += 1
flag = False
for i in range(len(matrix)):
for j in range(len(matrix[0])):
if matrix[i][j] == cur:
if i - 1 < 0 or matrix[i - 1][j] >= cur:
pass
else:
continue
if j - 1 < 0 or matrix[i][j - 1] >= cur:
pass
else:
continue
if i + 1 >= row or matrix[i + 1][j] >= cur:
pass
else:
continue
if j + 1 >= col or matrix[i][j + 1] >= cur:
pass
else:
continue
flag = True
matrix[i][j] += 1
if not flag:
break
return matrix
if __name__ == '__main__':
solution = Solution()
data = [
[0, 0, 0, 0],
[1, 1, 1, 1],
[1, 1, 1, 1],
[1, 1, 1, 1]
]
print(solution.updateMatrix(data))
data =[
[1, 0, 1, 1, 0, 0, 1, 0, 0, 1],
[0, 1, 1, 0, 1, 0, 1, 0, 1, 1],
[0, 0, 1, 0, 1, 0, 0, 1, 0, 0],
[1, 0, 1, 0, 1, 1, 1, 1, 1, 1],
[0, 1, 0, 1, 1, 0, 0, 0, 0, 1],
[0, 0, 1, 0, 1, 1, 1, 0, 1, 0],
[0, 1, 0, 1, 0, 1, 0, 0, 1, 1],
[1, 0, 0, 0, 1, 1, 1, 1, 0, 1],
[1, 1, 1, 1, 1, 1, 1, 0, 1, 0],
[1, 1, 1, 1, 0, 1, 0, 0, 1, 1]
]
result = [
[1,0,1,1,0,0,1,0,0,1],
[0,1,1,0,1,0,1,0,1,1],
[0,0,1,0,1,0,0,1,0,0],
[1,0,1,0,1,1,1,1,1,1],
[0,1,0,1,1,0,0,0,0,1],
[0,0,1,0,1,1,1,0,1,0],
[0,1,0,1,0,1,0,0,1,1],
[1,0,0,0,1,2,1,1,0,1],
[2,1,1,1,1,1,1,0,1,0],
[1,2,1,1,0,1,0,0,1,1]
]
true_result = [
[1,0,1,1,0,0,1,0,0,1],
[0,1,1,0,1,0,1,0,1,1],
[0,0,1,0,1,0,0,1,0,0],
[1,0,1,0,1,1,1,1,1,1],
[0,1,0,1,1,0,0,0,0,1],
[0,0,1,0,1,1,1,0,1,0],
[0,1,0,1,0,1,0,0,1,1],
[1,0,0,0,1,2,1,1,0,1],
[2,1,1,1,1,2,1,0,1,0],
[3,2,2,1,0,1,0,0,1,1]
]
|
normal
|
{
"blob_id": "1145050d82e614d5c248fc7e6a71720e6ff72414",
"index": 6055,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Solution:\n <mask token>\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Solution:\n\n def updateMatrix(self, matrix):\n \"\"\"\n :type matrix: List[List[int]]\n :rtype: List[List[int]]\n \"\"\"\n cur = 0\n col = len(matrix[0])\n row = len(matrix)\n while True:\n cur += 1\n flag = False\n for i in range(len(matrix)):\n for j in range(len(matrix[0])):\n if matrix[i][j] == cur:\n if i - 1 < 0 or matrix[i - 1][j] >= cur:\n pass\n else:\n continue\n if j - 1 < 0 or matrix[i][j - 1] >= cur:\n pass\n else:\n continue\n if i + 1 >= row or matrix[i + 1][j] >= cur:\n pass\n else:\n continue\n if j + 1 >= col or matrix[i][j + 1] >= cur:\n pass\n else:\n continue\n flag = True\n matrix[i][j] += 1\n if not flag:\n break\n return matrix\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass Solution:\n\n def updateMatrix(self, matrix):\n \"\"\"\n :type matrix: List[List[int]]\n :rtype: List[List[int]]\n \"\"\"\n cur = 0\n col = len(matrix[0])\n row = len(matrix)\n while True:\n cur += 1\n flag = False\n for i in range(len(matrix)):\n for j in range(len(matrix[0])):\n if matrix[i][j] == cur:\n if i - 1 < 0 or matrix[i - 1][j] >= cur:\n pass\n else:\n continue\n if j - 1 < 0 or matrix[i][j - 1] >= cur:\n pass\n else:\n continue\n if i + 1 >= row or matrix[i + 1][j] >= cur:\n pass\n else:\n continue\n if j + 1 >= col or matrix[i][j + 1] >= cur:\n pass\n else:\n continue\n flag = True\n matrix[i][j] += 1\n if not flag:\n break\n return matrix\n\n\nif __name__ == '__main__':\n solution = Solution()\n data = [[0, 0, 0, 0], [1, 1, 1, 1], [1, 1, 1, 1], [1, 1, 1, 1]]\n print(solution.updateMatrix(data))\n data = [[1, 0, 1, 1, 0, 0, 1, 0, 0, 1], [0, 1, 1, 0, 1, 0, 1, 0, 1, 1],\n [0, 0, 1, 0, 1, 0, 0, 1, 0, 0], [1, 0, 1, 0, 1, 1, 1, 1, 1, 1], [0,\n 1, 0, 1, 1, 0, 0, 0, 0, 1], [0, 0, 1, 0, 1, 1, 1, 0, 1, 0], [0, 1, \n 0, 1, 0, 1, 0, 0, 1, 1], [1, 0, 0, 0, 1, 1, 1, 1, 0, 1], [1, 1, 1, \n 1, 1, 1, 1, 0, 1, 0], [1, 1, 1, 1, 0, 1, 0, 0, 1, 1]]\n result = [[1, 0, 1, 1, 0, 0, 1, 0, 0, 1], [0, 1, 1, 0, 1, 0, 1, 0, 1, 1\n ], [0, 0, 1, 0, 1, 0, 0, 1, 0, 0], [1, 0, 1, 0, 1, 1, 1, 1, 1, 1],\n [0, 1, 0, 1, 1, 0, 0, 0, 0, 1], [0, 0, 1, 0, 1, 1, 1, 0, 1, 0], [0,\n 1, 0, 1, 0, 1, 0, 0, 1, 1], [1, 0, 0, 0, 1, 2, 1, 1, 0, 1], [2, 1, \n 1, 1, 1, 1, 1, 0, 1, 0], [1, 2, 1, 1, 0, 1, 0, 0, 1, 1]]\n true_result = [[1, 0, 1, 1, 0, 0, 1, 0, 0, 1], [0, 1, 1, 0, 1, 0, 1, 0,\n 1, 1], [0, 0, 1, 0, 1, 0, 0, 1, 0, 0], [1, 0, 1, 0, 1, 1, 1, 1, 1, \n 1], [0, 1, 0, 1, 1, 0, 0, 0, 0, 1], [0, 0, 1, 0, 1, 1, 1, 0, 1, 0],\n [0, 1, 0, 1, 0, 1, 0, 0, 1, 1], [1, 0, 0, 0, 1, 2, 1, 1, 0, 1], [2,\n 1, 1, 1, 1, 2, 1, 0, 1, 0], [3, 2, 2, 1, 0, 1, 0, 0, 1, 1]]\n",
"step-5": "# -*- coding: utf-8 -*-\n\"\"\"\n# @Time : 2018/6/11 下午6:45\n# @Author : zhanzecheng\n# @File : 542.01矩阵1.py\n# @Software: PyCharm\n\"\"\"\n\n# 一个简单的循环方式来解决这个问题\n# 这一题的思路不错,用多次循环来计数\n# TODO: check 1\nclass Solution:\n def updateMatrix(self, matrix):\n \"\"\"\n :type matrix: List[List[int]]\n :rtype: List[List[int]]\n \"\"\"\n cur = 0\n col = len(matrix[0])\n row = len(matrix)\n while True:\n cur += 1\n flag = False\n for i in range(len(matrix)):\n for j in range(len(matrix[0])):\n if matrix[i][j] == cur:\n if i - 1 < 0 or matrix[i - 1][j] >= cur:\n pass\n else:\n continue\n\n if j - 1 < 0 or matrix[i][j - 1] >= cur:\n pass\n else:\n continue\n\n if i + 1 >= row or matrix[i + 1][j] >= cur:\n pass\n else:\n continue\n\n if j + 1 >= col or matrix[i][j + 1] >= cur:\n pass\n else:\n continue\n flag = True\n matrix[i][j] += 1\n if not flag:\n break\n return matrix\n\nif __name__ == '__main__':\n solution = Solution()\n data = [\n [0, 0, 0, 0],\n [1, 1, 1, 1],\n [1, 1, 1, 1],\n [1, 1, 1, 1]\n ]\n print(solution.updateMatrix(data))\n data =[\n [1, 0, 1, 1, 0, 0, 1, 0, 0, 1],\n [0, 1, 1, 0, 1, 0, 1, 0, 1, 1],\n [0, 0, 1, 0, 1, 0, 0, 1, 0, 0],\n [1, 0, 1, 0, 1, 1, 1, 1, 1, 1],\n [0, 1, 0, 1, 1, 0, 0, 0, 0, 1],\n [0, 0, 1, 0, 1, 1, 1, 0, 1, 0],\n [0, 1, 0, 1, 0, 1, 0, 0, 1, 1],\n [1, 0, 0, 0, 1, 1, 1, 1, 0, 1],\n [1, 1, 1, 1, 1, 1, 1, 0, 1, 0],\n [1, 1, 1, 1, 0, 1, 0, 0, 1, 1]\n ]\n\n result = [\n [1,0,1,1,0,0,1,0,0,1],\n [0,1,1,0,1,0,1,0,1,1],\n [0,0,1,0,1,0,0,1,0,0],\n [1,0,1,0,1,1,1,1,1,1],\n [0,1,0,1,1,0,0,0,0,1],\n [0,0,1,0,1,1,1,0,1,0],\n [0,1,0,1,0,1,0,0,1,1],\n [1,0,0,0,1,2,1,1,0,1],\n [2,1,1,1,1,1,1,0,1,0],\n [1,2,1,1,0,1,0,0,1,1]\n ]\n true_result = [\n [1,0,1,1,0,0,1,0,0,1],\n [0,1,1,0,1,0,1,0,1,1],\n [0,0,1,0,1,0,0,1,0,0],\n [1,0,1,0,1,1,1,1,1,1],\n [0,1,0,1,1,0,0,0,0,1],\n [0,0,1,0,1,1,1,0,1,0],\n [0,1,0,1,0,1,0,0,1,1],\n [1,0,0,0,1,2,1,1,0,1],\n [2,1,1,1,1,2,1,0,1,0],\n [3,2,2,1,0,1,0,0,1,1]\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import numpy
import cv2
from keras.models import model_from_json
from keras.layers import Dense
from keras.utils import np_utils
import os
from keras.optimizers import SGD, Adam
numpy.random.seed(42)
file_json = open('model.json', "r")
model_json = file_json.read()
file_json.close()
model = model_from_json(model_json)
model.load_weights('weights.h5')
print('Model loaded')
sgd = SGD(lr=0.01, momentum=0.9, nesterov=True)
model.compile(loss='categorical_crossentropy', optimizer=Adam(), metrics=['accuracy'])
# for i in range(10):
# img = cv2.imread(str(i) + '.png', 0)
# img = cv2.resize(img, (28, 28))
# for i in range(28):
# for j in range(28):
# img[i][j] = abs(img[i][j] - 255)
# print('%4.f' % img[i][j], end='')
# print()
# print()
# print()
# print()
for i in range(10):
img = cv2.imread(str(i) + '.png', 0)
img = cv2.resize(img, (28, 28))
for x in range(28):
for y in range(28):
img[x][y] = abs(img[x][y] - 255)
img = img.astype('float32')
img /= numpy.max(img)
img = numpy.array([img[numpy.newaxis, :, :]])
a = model.predict(img, batch_size=64)
print(i, numpy.argmax(a, axis=None, out=None))
|
normal
|
{
"blob_id": "05021c3b39a0df07ca3d7d1c3ff9d47be6723131",
"index": 4084,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nnumpy.random.seed(42)\n<mask token>\nfile_json.close()\n<mask token>\nmodel.load_weights('weights.h5')\nprint('Model loaded')\n<mask token>\nmodel.compile(loss='categorical_crossentropy', optimizer=Adam(), metrics=[\n 'accuracy'])\nfor i in range(10):\n img = cv2.imread(str(i) + '.png', 0)\n img = cv2.resize(img, (28, 28))\n for x in range(28):\n for y in range(28):\n img[x][y] = abs(img[x][y] - 255)\n img = img.astype('float32')\n img /= numpy.max(img)\n img = numpy.array([img[numpy.newaxis, :, :]])\n a = model.predict(img, batch_size=64)\n print(i, numpy.argmax(a, axis=None, out=None))\n",
"step-3": "<mask token>\nnumpy.random.seed(42)\nfile_json = open('model.json', 'r')\nmodel_json = file_json.read()\nfile_json.close()\nmodel = model_from_json(model_json)\nmodel.load_weights('weights.h5')\nprint('Model loaded')\nsgd = SGD(lr=0.01, momentum=0.9, nesterov=True)\nmodel.compile(loss='categorical_crossentropy', optimizer=Adam(), metrics=[\n 'accuracy'])\nfor i in range(10):\n img = cv2.imread(str(i) + '.png', 0)\n img = cv2.resize(img, (28, 28))\n for x in range(28):\n for y in range(28):\n img[x][y] = abs(img[x][y] - 255)\n img = img.astype('float32')\n img /= numpy.max(img)\n img = numpy.array([img[numpy.newaxis, :, :]])\n a = model.predict(img, batch_size=64)\n print(i, numpy.argmax(a, axis=None, out=None))\n",
"step-4": "import numpy\nimport cv2\nfrom keras.models import model_from_json\nfrom keras.layers import Dense\nfrom keras.utils import np_utils\nimport os\nfrom keras.optimizers import SGD, Adam\nnumpy.random.seed(42)\nfile_json = open('model.json', 'r')\nmodel_json = file_json.read()\nfile_json.close()\nmodel = model_from_json(model_json)\nmodel.load_weights('weights.h5')\nprint('Model loaded')\nsgd = SGD(lr=0.01, momentum=0.9, nesterov=True)\nmodel.compile(loss='categorical_crossentropy', optimizer=Adam(), metrics=[\n 'accuracy'])\nfor i in range(10):\n img = cv2.imread(str(i) + '.png', 0)\n img = cv2.resize(img, (28, 28))\n for x in range(28):\n for y in range(28):\n img[x][y] = abs(img[x][y] - 255)\n img = img.astype('float32')\n img /= numpy.max(img)\n img = numpy.array([img[numpy.newaxis, :, :]])\n a = model.predict(img, batch_size=64)\n print(i, numpy.argmax(a, axis=None, out=None))\n",
"step-5": "import numpy\nimport cv2\nfrom keras.models import model_from_json\nfrom keras.layers import Dense\nfrom keras.utils import np_utils\nimport os\nfrom keras.optimizers import SGD, Adam\n\nnumpy.random.seed(42)\nfile_json = open('model.json', \"r\")\nmodel_json = file_json.read()\nfile_json.close()\nmodel = model_from_json(model_json)\nmodel.load_weights('weights.h5')\nprint('Model loaded')\nsgd = SGD(lr=0.01, momentum=0.9, nesterov=True)\nmodel.compile(loss='categorical_crossentropy', optimizer=Adam(), metrics=['accuracy'])\n\n# for i in range(10):\n# img = cv2.imread(str(i) + '.png', 0)\n# img = cv2.resize(img, (28, 28))\n# for i in range(28):\n# for j in range(28):\n# img[i][j] = abs(img[i][j] - 255)\n# print('%4.f' % img[i][j], end='')\n# print()\n# print()\n# print()\n# print()\n\nfor i in range(10):\n\timg = cv2.imread(str(i) + '.png', 0)\n\timg = cv2.resize(img, (28, 28))\n\tfor x in range(28):\n\t\tfor y in range(28):\n\t\t\timg[x][y] = abs(img[x][y] - 255)\n\timg = img.astype('float32')\n\timg /= numpy.max(img)\n\timg = numpy.array([img[numpy.newaxis, :, :]])\n\ta = model.predict(img, batch_size=64)\n\tprint(i, numpy.argmax(a, axis=None, out=None))\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import pytest
from freezegun import freeze_time
from datetime import datetime
from khayyam import JalaliDatetime, TehranTimezone
from dilami_calendar import DilamiDatetime, dilami_to_jalali
def test_dilami_date():
gdate = datetime(2018, 2, 1)
ddate = DilamiDatetime(gdate, tzinfo=TehranTimezone)
assert ddate.year == 1591
assert ddate.month == 6
assert ddate.day == 28
ddate = DilamiDatetime(1591, 6, 28, tzinfo=TehranTimezone)
assert ddate
ddate = DilamiDatetime(1592, 5, 1, tzinfo=TehranTimezone)
dilami_date = DilamiDatetime(ddate)
assert dilami_date
# Check Dilami date return today
ddate = DilamiDatetime().now()
jy, jm, jd = dilami_to_jalali(ddate.year, ddate.month, ddate.day)
today = JalaliDatetime.now(TehranTimezone())
assert today.year == jy
assert today.month == jm
assert today.day == jd
with freeze_time(datetime.now()):
dilami_now = DilamiDatetime(datetime.now()).to_datetime()
assert dilami_now.time() == datetime.now().time()
now = datetime.now()
dilami_date = DilamiDatetime(now)
assert dilami_date.to_date() == now.date()
def test_limits():
# Test MinYear and MaxYear
with pytest.raises(ValueError):
DilamiDatetime(194, 1, 1)
with pytest.raises(ValueError):
DilamiDatetime(3373, 1, 1)
# Test months
with pytest.raises(ValueError):
DilamiDatetime(1592, -1, 3)
with pytest.raises(ValueError):
DilamiDatetime(1592, 13, 1)
# Test days
with pytest.raises(ValueError):
DilamiDatetime(1592, 1, 32)
with pytest.raises(ValueError):
DilamiDatetime(1592, 1, -1)
# Test days of leap year
with pytest.raises(ValueError):
DilamiDatetime(1595, 0, 0)
with pytest.raises(ValueError):
DilamiDatetime(1593, 0, 6)
|
normal
|
{
"blob_id": "7997efb00f24ecc5c4fbf3ca049eca6b5b178d53",
"index": 4088,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef test_dilami_date():\n gdate = datetime(2018, 2, 1)\n ddate = DilamiDatetime(gdate, tzinfo=TehranTimezone)\n assert ddate.year == 1591\n assert ddate.month == 6\n assert ddate.day == 28\n ddate = DilamiDatetime(1591, 6, 28, tzinfo=TehranTimezone)\n assert ddate\n ddate = DilamiDatetime(1592, 5, 1, tzinfo=TehranTimezone)\n dilami_date = DilamiDatetime(ddate)\n assert dilami_date\n ddate = DilamiDatetime().now()\n jy, jm, jd = dilami_to_jalali(ddate.year, ddate.month, ddate.day)\n today = JalaliDatetime.now(TehranTimezone())\n assert today.year == jy\n assert today.month == jm\n assert today.day == jd\n with freeze_time(datetime.now()):\n dilami_now = DilamiDatetime(datetime.now()).to_datetime()\n assert dilami_now.time() == datetime.now().time()\n now = datetime.now()\n dilami_date = DilamiDatetime(now)\n assert dilami_date.to_date() == now.date()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef test_dilami_date():\n gdate = datetime(2018, 2, 1)\n ddate = DilamiDatetime(gdate, tzinfo=TehranTimezone)\n assert ddate.year == 1591\n assert ddate.month == 6\n assert ddate.day == 28\n ddate = DilamiDatetime(1591, 6, 28, tzinfo=TehranTimezone)\n assert ddate\n ddate = DilamiDatetime(1592, 5, 1, tzinfo=TehranTimezone)\n dilami_date = DilamiDatetime(ddate)\n assert dilami_date\n ddate = DilamiDatetime().now()\n jy, jm, jd = dilami_to_jalali(ddate.year, ddate.month, ddate.day)\n today = JalaliDatetime.now(TehranTimezone())\n assert today.year == jy\n assert today.month == jm\n assert today.day == jd\n with freeze_time(datetime.now()):\n dilami_now = DilamiDatetime(datetime.now()).to_datetime()\n assert dilami_now.time() == datetime.now().time()\n now = datetime.now()\n dilami_date = DilamiDatetime(now)\n assert dilami_date.to_date() == now.date()\n\n\ndef test_limits():\n with pytest.raises(ValueError):\n DilamiDatetime(194, 1, 1)\n with pytest.raises(ValueError):\n DilamiDatetime(3373, 1, 1)\n with pytest.raises(ValueError):\n DilamiDatetime(1592, -1, 3)\n with pytest.raises(ValueError):\n DilamiDatetime(1592, 13, 1)\n with pytest.raises(ValueError):\n DilamiDatetime(1592, 1, 32)\n with pytest.raises(ValueError):\n DilamiDatetime(1592, 1, -1)\n with pytest.raises(ValueError):\n DilamiDatetime(1595, 0, 0)\n with pytest.raises(ValueError):\n DilamiDatetime(1593, 0, 6)\n",
"step-4": "import pytest\nfrom freezegun import freeze_time\nfrom datetime import datetime\nfrom khayyam import JalaliDatetime, TehranTimezone\nfrom dilami_calendar import DilamiDatetime, dilami_to_jalali\n\n\ndef test_dilami_date():\n gdate = datetime(2018, 2, 1)\n ddate = DilamiDatetime(gdate, tzinfo=TehranTimezone)\n assert ddate.year == 1591\n assert ddate.month == 6\n assert ddate.day == 28\n ddate = DilamiDatetime(1591, 6, 28, tzinfo=TehranTimezone)\n assert ddate\n ddate = DilamiDatetime(1592, 5, 1, tzinfo=TehranTimezone)\n dilami_date = DilamiDatetime(ddate)\n assert dilami_date\n ddate = DilamiDatetime().now()\n jy, jm, jd = dilami_to_jalali(ddate.year, ddate.month, ddate.day)\n today = JalaliDatetime.now(TehranTimezone())\n assert today.year == jy\n assert today.month == jm\n assert today.day == jd\n with freeze_time(datetime.now()):\n dilami_now = DilamiDatetime(datetime.now()).to_datetime()\n assert dilami_now.time() == datetime.now().time()\n now = datetime.now()\n dilami_date = DilamiDatetime(now)\n assert dilami_date.to_date() == now.date()\n\n\ndef test_limits():\n with pytest.raises(ValueError):\n DilamiDatetime(194, 1, 1)\n with pytest.raises(ValueError):\n DilamiDatetime(3373, 1, 1)\n with pytest.raises(ValueError):\n DilamiDatetime(1592, -1, 3)\n with pytest.raises(ValueError):\n DilamiDatetime(1592, 13, 1)\n with pytest.raises(ValueError):\n DilamiDatetime(1592, 1, 32)\n with pytest.raises(ValueError):\n DilamiDatetime(1592, 1, -1)\n with pytest.raises(ValueError):\n DilamiDatetime(1595, 0, 0)\n with pytest.raises(ValueError):\n DilamiDatetime(1593, 0, 6)\n",
"step-5": "import pytest\n\nfrom freezegun import freeze_time\nfrom datetime import datetime\nfrom khayyam import JalaliDatetime, TehranTimezone\n\nfrom dilami_calendar import DilamiDatetime, dilami_to_jalali\n\n\ndef test_dilami_date():\n gdate = datetime(2018, 2, 1)\n ddate = DilamiDatetime(gdate, tzinfo=TehranTimezone)\n\n assert ddate.year == 1591\n assert ddate.month == 6\n assert ddate.day == 28\n\n ddate = DilamiDatetime(1591, 6, 28, tzinfo=TehranTimezone)\n assert ddate\n\n ddate = DilamiDatetime(1592, 5, 1, tzinfo=TehranTimezone)\n dilami_date = DilamiDatetime(ddate)\n assert dilami_date\n\n # Check Dilami date return today\n ddate = DilamiDatetime().now()\n jy, jm, jd = dilami_to_jalali(ddate.year, ddate.month, ddate.day)\n\n today = JalaliDatetime.now(TehranTimezone())\n assert today.year == jy\n assert today.month == jm\n assert today.day == jd\n\n with freeze_time(datetime.now()):\n dilami_now = DilamiDatetime(datetime.now()).to_datetime()\n assert dilami_now.time() == datetime.now().time()\n\n now = datetime.now()\n dilami_date = DilamiDatetime(now)\n assert dilami_date.to_date() == now.date()\n\n\ndef test_limits():\n # Test MinYear and MaxYear\n with pytest.raises(ValueError):\n DilamiDatetime(194, 1, 1)\n with pytest.raises(ValueError):\n DilamiDatetime(3373, 1, 1)\n\n # Test months\n with pytest.raises(ValueError):\n DilamiDatetime(1592, -1, 3)\n\n with pytest.raises(ValueError):\n DilamiDatetime(1592, 13, 1)\n\n # Test days\n with pytest.raises(ValueError):\n DilamiDatetime(1592, 1, 32)\n with pytest.raises(ValueError):\n DilamiDatetime(1592, 1, -1)\n\n # Test days of leap year\n with pytest.raises(ValueError):\n DilamiDatetime(1595, 0, 0)\n\n with pytest.raises(ValueError):\n DilamiDatetime(1593, 0, 6)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# Generated by Django 3.2 on 2021-06-28 04:32
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('rrhh', '0014_alter_detallepermiso_fecha_permiso'),
]
operations = [
migrations.AlterField(
model_name='permiso',
name='mes',
field=models.CharField(choices=[('01', 'ENERO'), ('02', 'FEBRERO'), ('03', 'MARZO'), ('04', 'ABRIL'), ('05', 'MAYO'), ('06', 'JUNIO'), ('07', 'JULIO'), ('08', 'AGOSTO'), ('09', 'SEPTIEMBRE'), ('10', 'OCTUBRE'), ('11', 'NOVIEMBRE'), ('12', 'DICIEMBRE')], max_length=2),
),
]
|
normal
|
{
"blob_id": "5db450424dc143443839e24801ece444d0d7e162",
"index": 3611,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('rrhh', '0014_alter_detallepermiso_fecha_permiso')]\n operations = [migrations.AlterField(model_name='permiso', name='mes',\n field=models.CharField(choices=[('01', 'ENERO'), ('02', 'FEBRERO'),\n ('03', 'MARZO'), ('04', 'ABRIL'), ('05', 'MAYO'), ('06', 'JUNIO'),\n ('07', 'JULIO'), ('08', 'AGOSTO'), ('09', 'SEPTIEMBRE'), ('10',\n 'OCTUBRE'), ('11', 'NOVIEMBRE'), ('12', 'DICIEMBRE')], max_length=2))]\n",
"step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('rrhh', '0014_alter_detallepermiso_fecha_permiso')]\n operations = [migrations.AlterField(model_name='permiso', name='mes',\n field=models.CharField(choices=[('01', 'ENERO'), ('02', 'FEBRERO'),\n ('03', 'MARZO'), ('04', 'ABRIL'), ('05', 'MAYO'), ('06', 'JUNIO'),\n ('07', 'JULIO'), ('08', 'AGOSTO'), ('09', 'SEPTIEMBRE'), ('10',\n 'OCTUBRE'), ('11', 'NOVIEMBRE'), ('12', 'DICIEMBRE')], max_length=2))]\n",
"step-5": "# Generated by Django 3.2 on 2021-06-28 04:32\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('rrhh', '0014_alter_detallepermiso_fecha_permiso'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='permiso',\n name='mes',\n field=models.CharField(choices=[('01', 'ENERO'), ('02', 'FEBRERO'), ('03', 'MARZO'), ('04', 'ABRIL'), ('05', 'MAYO'), ('06', 'JUNIO'), ('07', 'JULIO'), ('08', 'AGOSTO'), ('09', 'SEPTIEMBRE'), ('10', 'OCTUBRE'), ('11', 'NOVIEMBRE'), ('12', 'DICIEMBRE')], max_length=2),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Solution:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Solution:
<|reserved_special_token_0|>
def GetNext(self, pNode):
def left_most(p):
if p == None:
return None
while p.left:
p = p.left
return p
if pNode == None:
return None
if pNode.right:
return left_most(pNode.right)
else:
temp = pNode
while temp.next:
if temp.next.left == temp:
return temp.next
temp = temp.next
return None
<|reserved_special_token_1|>
class Solution:
"""
给定一个二叉树和其中的一个结点,请找出中序遍历顺序的下一个结点并且返回。
注意,树中的结点不仅包含左右子结点,同时包含指向父结点的指针。
"""
def GetNext(self, pNode):
def left_most(p):
if p == None:
return None
while p.left:
p = p.left
return p
if pNode == None:
return None
if pNode.right:
return left_most(pNode.right)
else:
temp = pNode
while temp.next:
if temp.next.left == temp:
return temp.next
temp = temp.next
return None
<|reserved_special_token_1|>
# -*- coding:utf-8 -*-
# class TreeLinkNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
# self.next = None
class Solution:
"""
给定一个二叉树和其中的一个结点,请找出中序遍历顺序的下一个结点并且返回。
注意,树中的结点不仅包含左右子结点,同时包含指向父结点的指针。
"""
def GetNext(self, pNode):
# write code here
def left_most(p):
if p == None:
return None
while p.left:
p = p.left
return p
if pNode == None:
return None
if pNode.right:
return left_most(pNode.right)
else:
temp = pNode
while temp.next:
if temp.next.left == temp:
# pNode在某节点的左子树中
return temp.next
temp = temp.next
# 退到根节点
return None
|
flexible
|
{
"blob_id": "57f8584a8d058e5f9d4e0b7b75c7ec8dbbfef24a",
"index": 9681,
"step-1": "<mask token>\n",
"step-2": "class Solution:\n <mask token>\n <mask token>\n",
"step-3": "class Solution:\n <mask token>\n\n def GetNext(self, pNode):\n\n def left_most(p):\n if p == None:\n return None\n while p.left:\n p = p.left\n return p\n if pNode == None:\n return None\n if pNode.right:\n return left_most(pNode.right)\n else:\n temp = pNode\n while temp.next:\n if temp.next.left == temp:\n return temp.next\n temp = temp.next\n return None\n",
"step-4": "class Solution:\n \"\"\"\n 给定一个二叉树和其中的一个结点,请找出中序遍历顺序的下一个结点并且返回。\n 注意,树中的结点不仅包含左右子结点,同时包含指向父结点的指针。\n \"\"\"\n\n def GetNext(self, pNode):\n\n def left_most(p):\n if p == None:\n return None\n while p.left:\n p = p.left\n return p\n if pNode == None:\n return None\n if pNode.right:\n return left_most(pNode.right)\n else:\n temp = pNode\n while temp.next:\n if temp.next.left == temp:\n return temp.next\n temp = temp.next\n return None\n",
"step-5": "# -*- coding:utf-8 -*-\n# class TreeLinkNode:\n# def __init__(self, x):\n# self.val = x\n# self.left = None\n# self.right = None\n# self.next = None\nclass Solution:\n \"\"\"\n 给定一个二叉树和其中的一个结点,请找出中序遍历顺序的下一个结点并且返回。\n 注意,树中的结点不仅包含左右子结点,同时包含指向父结点的指针。\n \"\"\"\n def GetNext(self, pNode):\n # write code here\n def left_most(p):\n if p == None:\n return None\n while p.left:\n p = p.left\n return p\n\n if pNode == None:\n return None\n if pNode.right:\n return left_most(pNode.right)\n else:\n temp = pNode\n while temp.next:\n if temp.next.left == temp:\n # pNode在某节点的左子树中\n return temp.next\n temp = temp.next\n # 退到根节点\n return None\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import logging
from .const import (
DOMAIN,
CONF_SCREENS
)
from typing import Any, Callable, Dict, Optional
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.typing import (
ConfigType,
DiscoveryInfoType,
HomeAssistantType,
)
from homeassistant.core import callback
from homeassistant.helpers.event import async_track_state_change
from .dgus_protocol import create_protocol
_LOGGER = logging.getLogger(__name__)
async def async_setup_platform(
hass: HomeAssistantType,
config: ConfigType,
async_add_entities: Callable,
discovery_info: Optional[DiscoveryInfoType] = None,
) -> None:
sensors = [DGUSScreen(hass, screen) for screen in config[CONF_SCREENS]]
async_add_entities(sensors, update_before_add=True)
class StateConverters:
@staticmethod
def extract_attr(state, attr):
if attr:
return state.attributes[attr]
else:
return state.as_dict()['state']
@staticmethod
def send_int(state, settings, protocol):
vp = settings['vp']
attr = settings.get('attribute', None)
try:
value = int(float(StateConverters.extract_attr(state, attr)))
protocol.write_vp(vp, value)
except Exception as er:
_LOGGER.error("Can't send value: %s", str(er))
@staticmethod
def send_map(state, settings, protocol):
vp = settings['vp']
map_state = settings['map']
attr = settings.get('attribute', None)
key = str(StateConverters.extract_attr(state, attr))
value = int(map_state[key])
protocol.write_vp(vp, value)
class DGUSScreen(Entity):
def __init__(self, hass, screen):
self._state = None
self._hass = hass
self._name = screen['name']
self._state_track_settings = {
entry['entity_id']: entry for entry in screen.get('show_states', [])}
try:
self._protocol = create_protocol(
screen['port_name'], screen['bound_rate'], self.on_data)
except Exception as er:
_LOGGER.error("Can't open serial port %s, : %s",
screen['port_name'], str(er))
entiti_ids = [entry['entity_id'] for entry in screen['show_states']]
async_track_state_change(hass, entiti_ids, self.state_listener)
def state_listener(self, entity, old_state, new_state):
settings = self._state_track_settings[entity]
if settings['type'] == 'int':
StateConverters.send_int(
new_state, settings, self._protocol.protocol)
elif settings['type'] == 'map':
StateConverters.send_map(
new_state, settings, self._protocol.protocol)
@property
def name(self):
return self._name
@property
def state(self):
return self._state
def on_data(self, vp, value):
"""fire event for data, received from screen"""
eventName = self.name + "_set_vp"
self._hass.bus.fire(eventName, {"vp": vp, "value": value})
|
normal
|
{
"blob_id": "6f1b08a5ae1a07a30d89f3997461f4f97658f364",
"index": 4920,
"step-1": "<mask token>\n\n\nclass StateConverters:\n <mask token>\n <mask token>\n <mask token>\n\n\nclass DGUSScreen(Entity):\n\n def __init__(self, hass, screen):\n self._state = None\n self._hass = hass\n self._name = screen['name']\n self._state_track_settings = {entry['entity_id']: entry for entry in\n screen.get('show_states', [])}\n try:\n self._protocol = create_protocol(screen['port_name'], screen[\n 'bound_rate'], self.on_data)\n except Exception as er:\n _LOGGER.error(\"Can't open serial port %s, : %s\", screen[\n 'port_name'], str(er))\n entiti_ids = [entry['entity_id'] for entry in screen['show_states']]\n async_track_state_change(hass, entiti_ids, self.state_listener)\n\n def state_listener(self, entity, old_state, new_state):\n settings = self._state_track_settings[entity]\n if settings['type'] == 'int':\n StateConverters.send_int(new_state, settings, self._protocol.\n protocol)\n elif settings['type'] == 'map':\n StateConverters.send_map(new_state, settings, self._protocol.\n protocol)\n\n @property\n def name(self):\n return self._name\n\n @property\n def state(self):\n return self._state\n\n def on_data(self, vp, value):\n \"\"\"fire event for data, received from screen\"\"\"\n eventName = self.name + '_set_vp'\n self._hass.bus.fire(eventName, {'vp': vp, 'value': value})\n",
"step-2": "<mask token>\n\n\nclass StateConverters:\n\n @staticmethod\n def extract_attr(state, attr):\n if attr:\n return state.attributes[attr]\n else:\n return state.as_dict()['state']\n\n @staticmethod\n def send_int(state, settings, protocol):\n vp = settings['vp']\n attr = settings.get('attribute', None)\n try:\n value = int(float(StateConverters.extract_attr(state, attr)))\n protocol.write_vp(vp, value)\n except Exception as er:\n _LOGGER.error(\"Can't send value: %s\", str(er))\n\n @staticmethod\n def send_map(state, settings, protocol):\n vp = settings['vp']\n map_state = settings['map']\n attr = settings.get('attribute', None)\n key = str(StateConverters.extract_attr(state, attr))\n value = int(map_state[key])\n protocol.write_vp(vp, value)\n\n\nclass DGUSScreen(Entity):\n\n def __init__(self, hass, screen):\n self._state = None\n self._hass = hass\n self._name = screen['name']\n self._state_track_settings = {entry['entity_id']: entry for entry in\n screen.get('show_states', [])}\n try:\n self._protocol = create_protocol(screen['port_name'], screen[\n 'bound_rate'], self.on_data)\n except Exception as er:\n _LOGGER.error(\"Can't open serial port %s, : %s\", screen[\n 'port_name'], str(er))\n entiti_ids = [entry['entity_id'] for entry in screen['show_states']]\n async_track_state_change(hass, entiti_ids, self.state_listener)\n\n def state_listener(self, entity, old_state, new_state):\n settings = self._state_track_settings[entity]\n if settings['type'] == 'int':\n StateConverters.send_int(new_state, settings, self._protocol.\n protocol)\n elif settings['type'] == 'map':\n StateConverters.send_map(new_state, settings, self._protocol.\n protocol)\n\n @property\n def name(self):\n return self._name\n\n @property\n def state(self):\n return self._state\n\n def on_data(self, vp, value):\n \"\"\"fire event for data, received from screen\"\"\"\n eventName = self.name + '_set_vp'\n self._hass.bus.fire(eventName, {'vp': vp, 'value': value})\n",
"step-3": "<mask token>\n\n\nasync def async_setup_platform(hass: HomeAssistantType, config: ConfigType,\n async_add_entities: Callable, discovery_info: Optional[\n DiscoveryInfoType]=None) ->None:\n sensors = [DGUSScreen(hass, screen) for screen in config[CONF_SCREENS]]\n async_add_entities(sensors, update_before_add=True)\n\n\nclass StateConverters:\n\n @staticmethod\n def extract_attr(state, attr):\n if attr:\n return state.attributes[attr]\n else:\n return state.as_dict()['state']\n\n @staticmethod\n def send_int(state, settings, protocol):\n vp = settings['vp']\n attr = settings.get('attribute', None)\n try:\n value = int(float(StateConverters.extract_attr(state, attr)))\n protocol.write_vp(vp, value)\n except Exception as er:\n _LOGGER.error(\"Can't send value: %s\", str(er))\n\n @staticmethod\n def send_map(state, settings, protocol):\n vp = settings['vp']\n map_state = settings['map']\n attr = settings.get('attribute', None)\n key = str(StateConverters.extract_attr(state, attr))\n value = int(map_state[key])\n protocol.write_vp(vp, value)\n\n\nclass DGUSScreen(Entity):\n\n def __init__(self, hass, screen):\n self._state = None\n self._hass = hass\n self._name = screen['name']\n self._state_track_settings = {entry['entity_id']: entry for entry in\n screen.get('show_states', [])}\n try:\n self._protocol = create_protocol(screen['port_name'], screen[\n 'bound_rate'], self.on_data)\n except Exception as er:\n _LOGGER.error(\"Can't open serial port %s, : %s\", screen[\n 'port_name'], str(er))\n entiti_ids = [entry['entity_id'] for entry in screen['show_states']]\n async_track_state_change(hass, entiti_ids, self.state_listener)\n\n def state_listener(self, entity, old_state, new_state):\n settings = self._state_track_settings[entity]\n if settings['type'] == 'int':\n StateConverters.send_int(new_state, settings, self._protocol.\n protocol)\n elif settings['type'] == 'map':\n StateConverters.send_map(new_state, settings, self._protocol.\n protocol)\n\n @property\n def name(self):\n return self._name\n\n @property\n def state(self):\n return self._state\n\n def on_data(self, vp, value):\n \"\"\"fire event for data, received from screen\"\"\"\n eventName = self.name + '_set_vp'\n self._hass.bus.fire(eventName, {'vp': vp, 'value': value})\n",
"step-4": "import logging\nfrom .const import DOMAIN, CONF_SCREENS\nfrom typing import Any, Callable, Dict, Optional\nfrom homeassistant.helpers.entity import Entity\nfrom homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, HomeAssistantType\nfrom homeassistant.core import callback\nfrom homeassistant.helpers.event import async_track_state_change\nfrom .dgus_protocol import create_protocol\n_LOGGER = logging.getLogger(__name__)\n\n\nasync def async_setup_platform(hass: HomeAssistantType, config: ConfigType,\n async_add_entities: Callable, discovery_info: Optional[\n DiscoveryInfoType]=None) ->None:\n sensors = [DGUSScreen(hass, screen) for screen in config[CONF_SCREENS]]\n async_add_entities(sensors, update_before_add=True)\n\n\nclass StateConverters:\n\n @staticmethod\n def extract_attr(state, attr):\n if attr:\n return state.attributes[attr]\n else:\n return state.as_dict()['state']\n\n @staticmethod\n def send_int(state, settings, protocol):\n vp = settings['vp']\n attr = settings.get('attribute', None)\n try:\n value = int(float(StateConverters.extract_attr(state, attr)))\n protocol.write_vp(vp, value)\n except Exception as er:\n _LOGGER.error(\"Can't send value: %s\", str(er))\n\n @staticmethod\n def send_map(state, settings, protocol):\n vp = settings['vp']\n map_state = settings['map']\n attr = settings.get('attribute', None)\n key = str(StateConverters.extract_attr(state, attr))\n value = int(map_state[key])\n protocol.write_vp(vp, value)\n\n\nclass DGUSScreen(Entity):\n\n def __init__(self, hass, screen):\n self._state = None\n self._hass = hass\n self._name = screen['name']\n self._state_track_settings = {entry['entity_id']: entry for entry in\n screen.get('show_states', [])}\n try:\n self._protocol = create_protocol(screen['port_name'], screen[\n 'bound_rate'], self.on_data)\n except Exception as er:\n _LOGGER.error(\"Can't open serial port %s, : %s\", screen[\n 'port_name'], str(er))\n entiti_ids = [entry['entity_id'] for entry in screen['show_states']]\n async_track_state_change(hass, entiti_ids, self.state_listener)\n\n def state_listener(self, entity, old_state, new_state):\n settings = self._state_track_settings[entity]\n if settings['type'] == 'int':\n StateConverters.send_int(new_state, settings, self._protocol.\n protocol)\n elif settings['type'] == 'map':\n StateConverters.send_map(new_state, settings, self._protocol.\n protocol)\n\n @property\n def name(self):\n return self._name\n\n @property\n def state(self):\n return self._state\n\n def on_data(self, vp, value):\n \"\"\"fire event for data, received from screen\"\"\"\n eventName = self.name + '_set_vp'\n self._hass.bus.fire(eventName, {'vp': vp, 'value': value})\n",
"step-5": "import logging\nfrom .const import (\n DOMAIN,\n CONF_SCREENS\n)\nfrom typing import Any, Callable, Dict, Optional\nfrom homeassistant.helpers.entity import Entity\nfrom homeassistant.helpers.typing import (\n ConfigType,\n DiscoveryInfoType,\n HomeAssistantType,\n)\nfrom homeassistant.core import callback\nfrom homeassistant.helpers.event import async_track_state_change\nfrom .dgus_protocol import create_protocol\n\n_LOGGER = logging.getLogger(__name__)\n\n\nasync def async_setup_platform(\n hass: HomeAssistantType,\n config: ConfigType,\n async_add_entities: Callable,\n discovery_info: Optional[DiscoveryInfoType] = None,\n) -> None:\n sensors = [DGUSScreen(hass, screen) for screen in config[CONF_SCREENS]]\n async_add_entities(sensors, update_before_add=True)\n\n\nclass StateConverters:\n @staticmethod\n def extract_attr(state, attr):\n if attr:\n return state.attributes[attr]\n else:\n return state.as_dict()['state']\n\n @staticmethod\n def send_int(state, settings, protocol):\n vp = settings['vp']\n attr = settings.get('attribute', None)\n try:\n value = int(float(StateConverters.extract_attr(state, attr)))\n protocol.write_vp(vp, value)\n except Exception as er:\n _LOGGER.error(\"Can't send value: %s\", str(er))\n\n @staticmethod\n def send_map(state, settings, protocol):\n vp = settings['vp']\n map_state = settings['map']\n attr = settings.get('attribute', None)\n key = str(StateConverters.extract_attr(state, attr))\n value = int(map_state[key])\n protocol.write_vp(vp, value)\n\n\nclass DGUSScreen(Entity):\n def __init__(self, hass, screen):\n self._state = None\n self._hass = hass\n self._name = screen['name']\n self._state_track_settings = {\n entry['entity_id']: entry for entry in screen.get('show_states', [])}\n try:\n self._protocol = create_protocol(\n screen['port_name'], screen['bound_rate'], self.on_data)\n except Exception as er:\n _LOGGER.error(\"Can't open serial port %s, : %s\",\n screen['port_name'], str(er))\n \n entiti_ids = [entry['entity_id'] for entry in screen['show_states']]\n async_track_state_change(hass, entiti_ids, self.state_listener)\n\n def state_listener(self, entity, old_state, new_state):\n settings = self._state_track_settings[entity]\n if settings['type'] == 'int':\n StateConverters.send_int(\n new_state, settings, self._protocol.protocol)\n elif settings['type'] == 'map':\n StateConverters.send_map(\n new_state, settings, self._protocol.protocol)\n\n @property\n def name(self):\n return self._name\n\n @property\n def state(self):\n return self._state\n\n def on_data(self, vp, value):\n \"\"\"fire event for data, received from screen\"\"\"\n eventName = self.name + \"_set_vp\"\n self._hass.bus.fire(eventName, {\"vp\": vp, \"value\": value})\n",
"step-ids": [
7,
10,
11,
13,
14
]
}
|
[
7,
10,
11,
13,
14
] |
<|reserved_special_token_0|>
class CSV_Normalize:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def get_input(self):
return list(zip(self.close_prices, self.high_prices, self.
prev_prices, self.sentiments))
def get_nomralized_input(self):
return list(zip(self.normalized_close, self.normalized_high, self.
normalized_prev, self.sentiments))
def set_output(self):
with open(self.stock + '.csv') as csvfile:
readCSV = csv.reader(csvfile, delimiter=',')
for row in readCSV:
self.open_prices.append(row[2])
self.open_prices = self.open_prices[2:]
for m in range(len(self.open_prices)):
self.open_prices[m] = float(self.open_prices[m])
for i in range(len(self.open_prices)):
if self.open_prices[i] > self.max_open:
self.max_open = self.open_prices[i]
if self.open_prices[i] < self.min_open:
self.min_open = self.open_prices[i]
def set_normalized_output(self):
self.set_output()
for i1 in range(len(self.open_prices)):
self.normalized_open.append((self.open_prices[i1] - self.
min_open) / (self.max_open - self.min_open))
<|reserved_special_token_0|>
def get_normalized_output(self):
return self.normalized_open
<|reserved_special_token_0|>
def get_training_input(self):
self.set_training_input()
return self.training_inputs
def set_training_input(self):
for i in range(len(self.normalized_close)):
temp_list = [self.normalized_close[i], self.normalized_high[i],
self.normalized_prev[i], self.normalized_sent[i]]
self.inputs.append(temp_list)
train_end = int(0.7 * len(self.inputs))
self.training_inputs = self.inputs[0:train_end]
def get_testing_input(self):
self.set_testing_input()
return self.testing_inputs
<|reserved_special_token_0|>
def set_testing_input(self):
train_end = int(0.7 * len(self.inputs))
self.testing_inputs = self.inputs[train_end:]
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def set_testing_output(self):
train_end = int(0.7 * len(self.normalized_open))
self.testing_outputs = self.normalized_open[train_end:]
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class CSV_Normalize:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def set_normalized_input(self):
if self.max_prev == 0:
self.set_input()
for i1 in range(len(self.close_prices)):
self.normalized_close.append((self.close_prices[i1] - self.
min_close) / (self.max_close - self.min_close))
for i2 in range(len(self.high_prices)):
self.normalized_high.append((self.high_prices[i2] - self.
min_high) / (self.max_high - self.min_high))
for i4 in range(len(self.prev_prices)):
self.normalized_prev.append((self.prev_prices[i4] - self.
min_prev) / (self.max_prev - self.min_prev))
for i5 in range(len(self.sentiments)):
diff = self.max_sent - self.min_sent
if diff == 0:
self.normalized_sent.append(0)
else:
self.normalized_sent.append((self.sentiments[i5] - self.
min_sent) / (self.max_sent - self.min_sent))
def get_input(self):
return list(zip(self.close_prices, self.high_prices, self.
prev_prices, self.sentiments))
def get_nomralized_input(self):
return list(zip(self.normalized_close, self.normalized_high, self.
normalized_prev, self.sentiments))
def set_output(self):
with open(self.stock + '.csv') as csvfile:
readCSV = csv.reader(csvfile, delimiter=',')
for row in readCSV:
self.open_prices.append(row[2])
self.open_prices = self.open_prices[2:]
for m in range(len(self.open_prices)):
self.open_prices[m] = float(self.open_prices[m])
for i in range(len(self.open_prices)):
if self.open_prices[i] > self.max_open:
self.max_open = self.open_prices[i]
if self.open_prices[i] < self.min_open:
self.min_open = self.open_prices[i]
def set_normalized_output(self):
self.set_output()
for i1 in range(len(self.open_prices)):
self.normalized_open.append((self.open_prices[i1] - self.
min_open) / (self.max_open - self.min_open))
<|reserved_special_token_0|>
def get_normalized_output(self):
return self.normalized_open
<|reserved_special_token_0|>
def get_training_input(self):
self.set_training_input()
return self.training_inputs
def set_training_input(self):
for i in range(len(self.normalized_close)):
temp_list = [self.normalized_close[i], self.normalized_high[i],
self.normalized_prev[i], self.normalized_sent[i]]
self.inputs.append(temp_list)
train_end = int(0.7 * len(self.inputs))
self.training_inputs = self.inputs[0:train_end]
def get_testing_input(self):
self.set_testing_input()
return self.testing_inputs
<|reserved_special_token_0|>
def set_testing_input(self):
train_end = int(0.7 * len(self.inputs))
self.testing_inputs = self.inputs[train_end:]
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def set_testing_output(self):
train_end = int(0.7 * len(self.normalized_open))
self.testing_outputs = self.normalized_open[train_end:]
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class CSV_Normalize:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def set_stock(self, stock):
self.stock = stock
def set_input(self):
with open(self.stock + '.csv') as csvfile:
readCSV = csv.reader(csvfile, delimiter=',')
for row in readCSV:
self.close_prices.append(row[5])
self.high_prices.append(row[3])
self.prev_prices.append(row[2])
self.sentiments.append(row[7])
self.close_prices = self.close_prices[1:-1]
self.high_prices = self.high_prices[1:-1]
self.prev_prices = self.prev_prices[1:-1]
self.sentiments = self.sentiments[1:-1]
for m in range(len(self.close_prices)):
if self.close_prices[m] != 'Close':
self.close_prices[m] = float(self.close_prices[m])
for n in range(len(self.high_prices)):
if self.high_prices[n] != 'High':
self.high_prices[n] = float(self.high_prices[n])
for pp in range(len(self.prev_prices)):
if self.prev_prices[pp] != 'Open':
self.prev_prices[pp] = float(self.prev_prices[pp])
for p in range(len(self.close_prices)):
if self.close_prices[m] != 'Close':
if self.close_prices[p] > self.max_close:
self.max_close = self.close_prices[p]
if self.close_prices[p] < self.min_close:
self.min_close = self.close_prices[p]
for q in range(len(self.high_prices)):
if self.high_prices[q] > self.max_high:
self.max_high = self.high_prices[q]
if self.high_prices[q] < self.min_high:
self.min_high = self.high_prices[q]
for s in range(len(self.prev_prices)):
if self.prev_prices[s] > self.max_prev:
self.max_prev = self.prev_prices[s]
if self.prev_prices[s] < self.min_prev:
self.min_prev = self.prev_prices[s]
for s in range(len(self.sentiments)):
self.sentiments[s] = float(self.sentiments[s])
if self.max_sent > self.max_sent:
self.max_sent = self.sentiments[s]
if self.sentiments[s] < self.min_sent:
self.min_sent = self.sentiments[s]
def set_normalized_input(self):
if self.max_prev == 0:
self.set_input()
for i1 in range(len(self.close_prices)):
self.normalized_close.append((self.close_prices[i1] - self.
min_close) / (self.max_close - self.min_close))
for i2 in range(len(self.high_prices)):
self.normalized_high.append((self.high_prices[i2] - self.
min_high) / (self.max_high - self.min_high))
for i4 in range(len(self.prev_prices)):
self.normalized_prev.append((self.prev_prices[i4] - self.
min_prev) / (self.max_prev - self.min_prev))
for i5 in range(len(self.sentiments)):
diff = self.max_sent - self.min_sent
if diff == 0:
self.normalized_sent.append(0)
else:
self.normalized_sent.append((self.sentiments[i5] - self.
min_sent) / (self.max_sent - self.min_sent))
def get_input(self):
return list(zip(self.close_prices, self.high_prices, self.
prev_prices, self.sentiments))
def get_nomralized_input(self):
return list(zip(self.normalized_close, self.normalized_high, self.
normalized_prev, self.sentiments))
def set_output(self):
with open(self.stock + '.csv') as csvfile:
readCSV = csv.reader(csvfile, delimiter=',')
for row in readCSV:
self.open_prices.append(row[2])
self.open_prices = self.open_prices[2:]
for m in range(len(self.open_prices)):
self.open_prices[m] = float(self.open_prices[m])
for i in range(len(self.open_prices)):
if self.open_prices[i] > self.max_open:
self.max_open = self.open_prices[i]
if self.open_prices[i] < self.min_open:
self.min_open = self.open_prices[i]
def set_normalized_output(self):
self.set_output()
for i1 in range(len(self.open_prices)):
self.normalized_open.append((self.open_prices[i1] - self.
min_open) / (self.max_open - self.min_open))
<|reserved_special_token_0|>
def get_normalized_output(self):
return self.normalized_open
def inverse(self, normalized):
return normalized * (self.max_open - self.min_open) + self.min_open
def get_training_input(self):
self.set_training_input()
return self.training_inputs
def set_training_input(self):
for i in range(len(self.normalized_close)):
temp_list = [self.normalized_close[i], self.normalized_high[i],
self.normalized_prev[i], self.normalized_sent[i]]
self.inputs.append(temp_list)
train_end = int(0.7 * len(self.inputs))
self.training_inputs = self.inputs[0:train_end]
def get_testing_input(self):
self.set_testing_input()
return self.testing_inputs
def get_training_output(self):
self.set_training_output()
return self.training_outputs
def set_testing_input(self):
train_end = int(0.7 * len(self.inputs))
self.testing_inputs = self.inputs[train_end:]
def set_training_output(self):
train_end = int(0.7 * len(self.normalized_open))
self.training_outputs = self.normalized_open[0:train_end]
def get_testing_output(self):
self.set_testing_output()
return self.testing_outputs
def set_testing_output(self):
train_end = int(0.7 * len(self.normalized_open))
self.testing_outputs = self.normalized_open[train_end:]
def clear_lists(self):
self.close_prices.clear()
self.high_prices.clear()
self.prev_prices.clear()
self.normalized_close.clear()
self.normalized_high.clear()
self.normalized_prev.clear()
self.open_prices.clear()
self.normalized_open.clear()
self.inputs.clear()
self.training_inputs.clear()
self.testing_inputs.clear()
self.training_outputs.clear()
self.testing_outputs.clear()
self.sentiments.clear()
self.normalized_sent = []
self.max_sent = 0.0
self.min_sent = 0.0
self.min_close = 1000
self.max_close = 0
self.min_high = 1000
self.max_high = 0
self.min_prev = 1000
self.max_prev = 0
self.min_open = 1000
self.max_open = 0
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class CSV_Normalize:
stock = ''
close_prices = []
high_prices = []
prev_prices = []
sentiments = []
max_sent = 0.0
min_sent = 0.0
min_close = 1000
max_close = 0
min_high = 1000
max_high = 0
min_prev = 1000
max_prev = 0
normalized_close = []
normalized_high = []
normalized_prev = []
normalized_sent = []
open_prices = []
min_open = 1000
max_open = 0
normalized_open = []
inputs = []
training_inputs = []
testing_inputs = []
training_outputs = []
testing_outputs = []
def set_stock(self, stock):
self.stock = stock
def set_input(self):
with open(self.stock + '.csv') as csvfile:
readCSV = csv.reader(csvfile, delimiter=',')
for row in readCSV:
self.close_prices.append(row[5])
self.high_prices.append(row[3])
self.prev_prices.append(row[2])
self.sentiments.append(row[7])
self.close_prices = self.close_prices[1:-1]
self.high_prices = self.high_prices[1:-1]
self.prev_prices = self.prev_prices[1:-1]
self.sentiments = self.sentiments[1:-1]
for m in range(len(self.close_prices)):
if self.close_prices[m] != 'Close':
self.close_prices[m] = float(self.close_prices[m])
for n in range(len(self.high_prices)):
if self.high_prices[n] != 'High':
self.high_prices[n] = float(self.high_prices[n])
for pp in range(len(self.prev_prices)):
if self.prev_prices[pp] != 'Open':
self.prev_prices[pp] = float(self.prev_prices[pp])
for p in range(len(self.close_prices)):
if self.close_prices[m] != 'Close':
if self.close_prices[p] > self.max_close:
self.max_close = self.close_prices[p]
if self.close_prices[p] < self.min_close:
self.min_close = self.close_prices[p]
for q in range(len(self.high_prices)):
if self.high_prices[q] > self.max_high:
self.max_high = self.high_prices[q]
if self.high_prices[q] < self.min_high:
self.min_high = self.high_prices[q]
for s in range(len(self.prev_prices)):
if self.prev_prices[s] > self.max_prev:
self.max_prev = self.prev_prices[s]
if self.prev_prices[s] < self.min_prev:
self.min_prev = self.prev_prices[s]
for s in range(len(self.sentiments)):
self.sentiments[s] = float(self.sentiments[s])
if self.max_sent > self.max_sent:
self.max_sent = self.sentiments[s]
if self.sentiments[s] < self.min_sent:
self.min_sent = self.sentiments[s]
def set_normalized_input(self):
if self.max_prev == 0:
self.set_input()
for i1 in range(len(self.close_prices)):
self.normalized_close.append((self.close_prices[i1] - self.
min_close) / (self.max_close - self.min_close))
for i2 in range(len(self.high_prices)):
self.normalized_high.append((self.high_prices[i2] - self.
min_high) / (self.max_high - self.min_high))
for i4 in range(len(self.prev_prices)):
self.normalized_prev.append((self.prev_prices[i4] - self.
min_prev) / (self.max_prev - self.min_prev))
for i5 in range(len(self.sentiments)):
diff = self.max_sent - self.min_sent
if diff == 0:
self.normalized_sent.append(0)
else:
self.normalized_sent.append((self.sentiments[i5] - self.
min_sent) / (self.max_sent - self.min_sent))
def get_input(self):
return list(zip(self.close_prices, self.high_prices, self.
prev_prices, self.sentiments))
def get_nomralized_input(self):
return list(zip(self.normalized_close, self.normalized_high, self.
normalized_prev, self.sentiments))
def set_output(self):
with open(self.stock + '.csv') as csvfile:
readCSV = csv.reader(csvfile, delimiter=',')
for row in readCSV:
self.open_prices.append(row[2])
self.open_prices = self.open_prices[2:]
for m in range(len(self.open_prices)):
self.open_prices[m] = float(self.open_prices[m])
for i in range(len(self.open_prices)):
if self.open_prices[i] > self.max_open:
self.max_open = self.open_prices[i]
if self.open_prices[i] < self.min_open:
self.min_open = self.open_prices[i]
def set_normalized_output(self):
self.set_output()
for i1 in range(len(self.open_prices)):
self.normalized_open.append((self.open_prices[i1] - self.
min_open) / (self.max_open - self.min_open))
def get_output(self):
return self.open_prices
def get_normalized_output(self):
return self.normalized_open
def inverse(self, normalized):
return normalized * (self.max_open - self.min_open) + self.min_open
def get_training_input(self):
self.set_training_input()
return self.training_inputs
def set_training_input(self):
for i in range(len(self.normalized_close)):
temp_list = [self.normalized_close[i], self.normalized_high[i],
self.normalized_prev[i], self.normalized_sent[i]]
self.inputs.append(temp_list)
train_end = int(0.7 * len(self.inputs))
self.training_inputs = self.inputs[0:train_end]
def get_testing_input(self):
self.set_testing_input()
return self.testing_inputs
def get_training_output(self):
self.set_training_output()
return self.training_outputs
def set_testing_input(self):
train_end = int(0.7 * len(self.inputs))
self.testing_inputs = self.inputs[train_end:]
def set_training_output(self):
train_end = int(0.7 * len(self.normalized_open))
self.training_outputs = self.normalized_open[0:train_end]
def get_testing_output(self):
self.set_testing_output()
return self.testing_outputs
def set_testing_output(self):
train_end = int(0.7 * len(self.normalized_open))
self.testing_outputs = self.normalized_open[train_end:]
def clear_lists(self):
self.close_prices.clear()
self.high_prices.clear()
self.prev_prices.clear()
self.normalized_close.clear()
self.normalized_high.clear()
self.normalized_prev.clear()
self.open_prices.clear()
self.normalized_open.clear()
self.inputs.clear()
self.training_inputs.clear()
self.testing_inputs.clear()
self.training_outputs.clear()
self.testing_outputs.clear()
self.sentiments.clear()
self.normalized_sent = []
self.max_sent = 0.0
self.min_sent = 0.0
self.min_close = 1000
self.max_close = 0
self.min_high = 1000
self.max_high = 0
self.min_prev = 1000
self.max_prev = 0
self.min_open = 1000
self.max_open = 0
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
"""
Created on Wed May 8 15:05:51 2019
@author: Brian Heckman and Kyle Oprisko
"""
import csv
"""this file opens a csv file created in the csv creator class. The main purpose of this class is to
normalize the data in the csv file, so that it can be read by the neural network.
"""
class CSV_Normalize:
stock = ""
# Initialize the lists for the 4 parameters
close_prices = []
high_prices = []
prev_prices = []
sentiments = []
# Initialize max and min values for normalization calc
max_sent = 0.0
min_sent = 0.0
min_close = 1000
max_close = 0
min_high = 1000
max_high = 0
min_prev = 1000
max_prev = 0
# Initialize lists for normalized values of parameters
normalized_close = []
normalized_high = []
normalized_prev = []
normalized_sent = []
# Initialize output parameters
open_prices = []
# Initialize max and min for normalization calc
min_open= 1000
max_open = 0
# Initialize the normalized output list
normalized_open = []
# Create arrays to separate into training and testing lists
inputs = []
training_inputs = []
testing_inputs = []
training_outputs = []
testing_outputs = []
# Set name of stock
def set_stock(self,stock):
self.stock = stock
# Set input values
def set_input(self):
# Open CSV and read each row and append to specific list
with open(self.stock + '.csv') as csvfile:
readCSV = csv.reader(csvfile, delimiter = ',')
for row in readCSV:
self.close_prices.append(row[5])
self.high_prices.append(row[3])
self.prev_prices.append(row[2])
self.sentiments.append(row[7])
# Remove the headers and the last row because the data is trailing
self.close_prices = self.close_prices[1:-1]
self.high_prices = self.high_prices[1:-1]
self.prev_prices = self.prev_prices[1:-1]
self.sentiments = self.sentiments[1:-1]
# Turn data values into floats
for m in range(len(self.close_prices)):
if self.close_prices[m] != "Close":
self.close_prices[m] = float(self.close_prices[m])
for n in range(len(self.high_prices)):
if self.high_prices[n] != "High":
self.high_prices[n] = float(self.high_prices[n])
for pp in range(len(self.prev_prices)):
if self.prev_prices[pp] != "Open":
self.prev_prices[pp] = float(self.prev_prices[pp])
#Set Min and Max values for normalization
for p in range(len(self.close_prices)):
if self.close_prices[m] != "Close":
if (self.close_prices[p] > self.max_close):
self.max_close = self.close_prices[p]
if (self.close_prices[p] < self.min_close):
self.min_close = self.close_prices[p]
for q in range(len(self.high_prices)):
if (self.high_prices[q] > self.max_high):
self.max_high = self.high_prices[q]
if (self.high_prices[q] < self.min_high):
self.min_high = self.high_prices[q]
for s in range(len(self.prev_prices)):
if (self.prev_prices[s] > self.max_prev):
self.max_prev = self.prev_prices[s]
if (self.prev_prices[s] < self.min_prev):
self.min_prev = self.prev_prices[s]
for s in range(len(self.sentiments)):
self.sentiments[s] = float(self.sentiments[s])
if (self.max_sent > self.max_sent):
self.max_sent = self.sentiments[s]
if (self.sentiments[s] < self.min_sent):
self.min_sent = self.sentiments[s]
# Perform normalization calculation and set normalized inputs
def set_normalized_input(self):
# Call set_input function in case it was not called already
if (self.max_prev == 0):
self.set_input()
# Perform normalization calculation under the normalized_x = (x - min)/(max - min) model
for i1 in range(len(self.close_prices)):
self.normalized_close.append((self.close_prices[i1] - self.min_close)/(self.max_close - self.min_close))
for i2 in range(len(self.high_prices)):
self.normalized_high.append((self.high_prices[i2] - self.min_high)/(self.max_high - self.min_high))
for i4 in range(len(self.prev_prices)):
self.normalized_prev.append((self.prev_prices[i4] - self.min_prev)/(self.max_prev - self.min_prev))
for i5 in range(len(self.sentiments)):
diff = self.max_sent - self.min_sent
if diff == 0:
self.normalized_sent.append(0)
else:
self.normalized_sent.append((self.sentiments[i5] - self.min_sent)/(self.max_sent - self.min_sent))
# Organize the input into a zipped list
def get_input(self):
return (list(zip(self.close_prices,self.high_prices,self.prev_prices,self.sentiments)))
# Organize the normalized input into a zipped list
def get_nomralized_input(self):
return (list(zip(self.normalized_close,self.normalized_high,self.normalized_prev,self.sentiments)))
# Set the output data
def set_output(self):
# Open and read the output file and append the list
with open(self.stock + '.csv') as csvfile:
readCSV = csv.reader(csvfile, delimiter = ',')
for row in readCSV:
self.open_prices.append(row[2])
# Remove the first two rows (header and first data point)
self.open_prices = self.open_prices[2:]
#
for m in range(len(self.open_prices)):
self.open_prices[m] = float(self.open_prices[m])
for i in range(len(self.open_prices)):
if (self.open_prices[i] > self.max_open):
self.max_open = self.open_prices[i]
if (self.open_prices[i] < self.min_open):
self.min_open = self.open_prices[i]
#uses min max function
def set_normalized_output(self):
self.set_output()
for i1 in range(len(self.open_prices)):
self.normalized_open.append((self.open_prices[i1] - self.min_open)/(self.max_open - self.min_open))
#returns open_prices
def get_output(self):
return (self.open_prices)
#gets the normalized output
def get_normalized_output(self):
return (self.normalized_open)
#inverse function to get predicted values into actual values
def inverse(self,normalized):
return ((normalized * (self.max_open - self.min_open)) + self.min_open)
#retuns what the user input
def get_training_input(self):
self.set_training_input()
return self.training_inputs
#sets puts all of the data into a list as a tuple
def set_training_input(self):
for i in range(len(self.normalized_close)):
temp_list = [self.normalized_close[i],self.normalized_high[i],self.normalized_prev[i],self.normalized_sent[i]]
self.inputs.append(temp_list)
train_end = int(.7*len(self.inputs))
self.training_inputs = self.inputs[0:train_end]
def get_testing_input(self):
self.set_testing_input()
return self.testing_inputs
def get_training_output(self):
self.set_training_output()
return self.training_outputs
def set_testing_input(self):
train_end = int(.7*len(self.inputs))
self.testing_inputs = self.inputs[train_end:]
def set_training_output(self):
train_end = int(.7*len(self.normalized_open))
self.training_outputs = self.normalized_open[0:train_end]
def get_testing_output(self):
self.set_testing_output()
return self.testing_outputs
def set_testing_output(self):
train_end = int(.7*len(self.normalized_open))
self.testing_outputs = self.normalized_open[train_end:]
def clear_lists(self):
#everything is reinitialized
self.close_prices.clear()
self.high_prices.clear()
self.prev_prices.clear()
self.normalized_close.clear()
self.normalized_high.clear()
self.normalized_prev.clear()
self.open_prices.clear()
self.normalized_open.clear()
self.inputs.clear()
self.training_inputs.clear()
self.testing_inputs.clear()
self.training_outputs.clear()
self.testing_outputs.clear()
self.sentiments.clear()
self.normalized_sent = []
self.max_sent = 0.0
self.min_sent = 0.0
self.min_close = 1000
self.max_close = 0
self.min_high = 1000
self.max_high = 0
self.min_prev = 1000
self.max_prev = 0
self.min_open= 1000
self.max_open = 0
|
flexible
|
{
"blob_id": "ecbca04a58c19469e63ee2310e2b2f6b86c41199",
"index": 1011,
"step-1": "<mask token>\n\n\nclass CSV_Normalize:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def get_input(self):\n return list(zip(self.close_prices, self.high_prices, self.\n prev_prices, self.sentiments))\n\n def get_nomralized_input(self):\n return list(zip(self.normalized_close, self.normalized_high, self.\n normalized_prev, self.sentiments))\n\n def set_output(self):\n with open(self.stock + '.csv') as csvfile:\n readCSV = csv.reader(csvfile, delimiter=',')\n for row in readCSV:\n self.open_prices.append(row[2])\n self.open_prices = self.open_prices[2:]\n for m in range(len(self.open_prices)):\n self.open_prices[m] = float(self.open_prices[m])\n for i in range(len(self.open_prices)):\n if self.open_prices[i] > self.max_open:\n self.max_open = self.open_prices[i]\n if self.open_prices[i] < self.min_open:\n self.min_open = self.open_prices[i]\n\n def set_normalized_output(self):\n self.set_output()\n for i1 in range(len(self.open_prices)):\n self.normalized_open.append((self.open_prices[i1] - self.\n min_open) / (self.max_open - self.min_open))\n <mask token>\n\n def get_normalized_output(self):\n return self.normalized_open\n <mask token>\n\n def get_training_input(self):\n self.set_training_input()\n return self.training_inputs\n\n def set_training_input(self):\n for i in range(len(self.normalized_close)):\n temp_list = [self.normalized_close[i], self.normalized_high[i],\n self.normalized_prev[i], self.normalized_sent[i]]\n self.inputs.append(temp_list)\n train_end = int(0.7 * len(self.inputs))\n self.training_inputs = self.inputs[0:train_end]\n\n def get_testing_input(self):\n self.set_testing_input()\n return self.testing_inputs\n <mask token>\n\n def set_testing_input(self):\n train_end = int(0.7 * len(self.inputs))\n self.testing_inputs = self.inputs[train_end:]\n <mask token>\n <mask token>\n\n def set_testing_output(self):\n train_end = int(0.7 * len(self.normalized_open))\n self.testing_outputs = self.normalized_open[train_end:]\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass CSV_Normalize:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def set_normalized_input(self):\n if self.max_prev == 0:\n self.set_input()\n for i1 in range(len(self.close_prices)):\n self.normalized_close.append((self.close_prices[i1] - self.\n min_close) / (self.max_close - self.min_close))\n for i2 in range(len(self.high_prices)):\n self.normalized_high.append((self.high_prices[i2] - self.\n min_high) / (self.max_high - self.min_high))\n for i4 in range(len(self.prev_prices)):\n self.normalized_prev.append((self.prev_prices[i4] - self.\n min_prev) / (self.max_prev - self.min_prev))\n for i5 in range(len(self.sentiments)):\n diff = self.max_sent - self.min_sent\n if diff == 0:\n self.normalized_sent.append(0)\n else:\n self.normalized_sent.append((self.sentiments[i5] - self.\n min_sent) / (self.max_sent - self.min_sent))\n\n def get_input(self):\n return list(zip(self.close_prices, self.high_prices, self.\n prev_prices, self.sentiments))\n\n def get_nomralized_input(self):\n return list(zip(self.normalized_close, self.normalized_high, self.\n normalized_prev, self.sentiments))\n\n def set_output(self):\n with open(self.stock + '.csv') as csvfile:\n readCSV = csv.reader(csvfile, delimiter=',')\n for row in readCSV:\n self.open_prices.append(row[2])\n self.open_prices = self.open_prices[2:]\n for m in range(len(self.open_prices)):\n self.open_prices[m] = float(self.open_prices[m])\n for i in range(len(self.open_prices)):\n if self.open_prices[i] > self.max_open:\n self.max_open = self.open_prices[i]\n if self.open_prices[i] < self.min_open:\n self.min_open = self.open_prices[i]\n\n def set_normalized_output(self):\n self.set_output()\n for i1 in range(len(self.open_prices)):\n self.normalized_open.append((self.open_prices[i1] - self.\n min_open) / (self.max_open - self.min_open))\n <mask token>\n\n def get_normalized_output(self):\n return self.normalized_open\n <mask token>\n\n def get_training_input(self):\n self.set_training_input()\n return self.training_inputs\n\n def set_training_input(self):\n for i in range(len(self.normalized_close)):\n temp_list = [self.normalized_close[i], self.normalized_high[i],\n self.normalized_prev[i], self.normalized_sent[i]]\n self.inputs.append(temp_list)\n train_end = int(0.7 * len(self.inputs))\n self.training_inputs = self.inputs[0:train_end]\n\n def get_testing_input(self):\n self.set_testing_input()\n return self.testing_inputs\n <mask token>\n\n def set_testing_input(self):\n train_end = int(0.7 * len(self.inputs))\n self.testing_inputs = self.inputs[train_end:]\n <mask token>\n <mask token>\n\n def set_testing_output(self):\n train_end = int(0.7 * len(self.normalized_open))\n self.testing_outputs = self.normalized_open[train_end:]\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass CSV_Normalize:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def set_stock(self, stock):\n self.stock = stock\n\n def set_input(self):\n with open(self.stock + '.csv') as csvfile:\n readCSV = csv.reader(csvfile, delimiter=',')\n for row in readCSV:\n self.close_prices.append(row[5])\n self.high_prices.append(row[3])\n self.prev_prices.append(row[2])\n self.sentiments.append(row[7])\n self.close_prices = self.close_prices[1:-1]\n self.high_prices = self.high_prices[1:-1]\n self.prev_prices = self.prev_prices[1:-1]\n self.sentiments = self.sentiments[1:-1]\n for m in range(len(self.close_prices)):\n if self.close_prices[m] != 'Close':\n self.close_prices[m] = float(self.close_prices[m])\n for n in range(len(self.high_prices)):\n if self.high_prices[n] != 'High':\n self.high_prices[n] = float(self.high_prices[n])\n for pp in range(len(self.prev_prices)):\n if self.prev_prices[pp] != 'Open':\n self.prev_prices[pp] = float(self.prev_prices[pp])\n for p in range(len(self.close_prices)):\n if self.close_prices[m] != 'Close':\n if self.close_prices[p] > self.max_close:\n self.max_close = self.close_prices[p]\n if self.close_prices[p] < self.min_close:\n self.min_close = self.close_prices[p]\n for q in range(len(self.high_prices)):\n if self.high_prices[q] > self.max_high:\n self.max_high = self.high_prices[q]\n if self.high_prices[q] < self.min_high:\n self.min_high = self.high_prices[q]\n for s in range(len(self.prev_prices)):\n if self.prev_prices[s] > self.max_prev:\n self.max_prev = self.prev_prices[s]\n if self.prev_prices[s] < self.min_prev:\n self.min_prev = self.prev_prices[s]\n for s in range(len(self.sentiments)):\n self.sentiments[s] = float(self.sentiments[s])\n if self.max_sent > self.max_sent:\n self.max_sent = self.sentiments[s]\n if self.sentiments[s] < self.min_sent:\n self.min_sent = self.sentiments[s]\n\n def set_normalized_input(self):\n if self.max_prev == 0:\n self.set_input()\n for i1 in range(len(self.close_prices)):\n self.normalized_close.append((self.close_prices[i1] - self.\n min_close) / (self.max_close - self.min_close))\n for i2 in range(len(self.high_prices)):\n self.normalized_high.append((self.high_prices[i2] - self.\n min_high) / (self.max_high - self.min_high))\n for i4 in range(len(self.prev_prices)):\n self.normalized_prev.append((self.prev_prices[i4] - self.\n min_prev) / (self.max_prev - self.min_prev))\n for i5 in range(len(self.sentiments)):\n diff = self.max_sent - self.min_sent\n if diff == 0:\n self.normalized_sent.append(0)\n else:\n self.normalized_sent.append((self.sentiments[i5] - self.\n min_sent) / (self.max_sent - self.min_sent))\n\n def get_input(self):\n return list(zip(self.close_prices, self.high_prices, self.\n prev_prices, self.sentiments))\n\n def get_nomralized_input(self):\n return list(zip(self.normalized_close, self.normalized_high, self.\n normalized_prev, self.sentiments))\n\n def set_output(self):\n with open(self.stock + '.csv') as csvfile:\n readCSV = csv.reader(csvfile, delimiter=',')\n for row in readCSV:\n self.open_prices.append(row[2])\n self.open_prices = self.open_prices[2:]\n for m in range(len(self.open_prices)):\n self.open_prices[m] = float(self.open_prices[m])\n for i in range(len(self.open_prices)):\n if self.open_prices[i] > self.max_open:\n self.max_open = self.open_prices[i]\n if self.open_prices[i] < self.min_open:\n self.min_open = self.open_prices[i]\n\n def set_normalized_output(self):\n self.set_output()\n for i1 in range(len(self.open_prices)):\n self.normalized_open.append((self.open_prices[i1] - self.\n min_open) / (self.max_open - self.min_open))\n <mask token>\n\n def get_normalized_output(self):\n return self.normalized_open\n\n def inverse(self, normalized):\n return normalized * (self.max_open - self.min_open) + self.min_open\n\n def get_training_input(self):\n self.set_training_input()\n return self.training_inputs\n\n def set_training_input(self):\n for i in range(len(self.normalized_close)):\n temp_list = [self.normalized_close[i], self.normalized_high[i],\n self.normalized_prev[i], self.normalized_sent[i]]\n self.inputs.append(temp_list)\n train_end = int(0.7 * len(self.inputs))\n self.training_inputs = self.inputs[0:train_end]\n\n def get_testing_input(self):\n self.set_testing_input()\n return self.testing_inputs\n\n def get_training_output(self):\n self.set_training_output()\n return self.training_outputs\n\n def set_testing_input(self):\n train_end = int(0.7 * len(self.inputs))\n self.testing_inputs = self.inputs[train_end:]\n\n def set_training_output(self):\n train_end = int(0.7 * len(self.normalized_open))\n self.training_outputs = self.normalized_open[0:train_end]\n\n def get_testing_output(self):\n self.set_testing_output()\n return self.testing_outputs\n\n def set_testing_output(self):\n train_end = int(0.7 * len(self.normalized_open))\n self.testing_outputs = self.normalized_open[train_end:]\n\n def clear_lists(self):\n self.close_prices.clear()\n self.high_prices.clear()\n self.prev_prices.clear()\n self.normalized_close.clear()\n self.normalized_high.clear()\n self.normalized_prev.clear()\n self.open_prices.clear()\n self.normalized_open.clear()\n self.inputs.clear()\n self.training_inputs.clear()\n self.testing_inputs.clear()\n self.training_outputs.clear()\n self.testing_outputs.clear()\n self.sentiments.clear()\n self.normalized_sent = []\n self.max_sent = 0.0\n self.min_sent = 0.0\n self.min_close = 1000\n self.max_close = 0\n self.min_high = 1000\n self.max_high = 0\n self.min_prev = 1000\n self.max_prev = 0\n self.min_open = 1000\n self.max_open = 0\n",
"step-4": "<mask token>\n\n\nclass CSV_Normalize:\n stock = ''\n close_prices = []\n high_prices = []\n prev_prices = []\n sentiments = []\n max_sent = 0.0\n min_sent = 0.0\n min_close = 1000\n max_close = 0\n min_high = 1000\n max_high = 0\n min_prev = 1000\n max_prev = 0\n normalized_close = []\n normalized_high = []\n normalized_prev = []\n normalized_sent = []\n open_prices = []\n min_open = 1000\n max_open = 0\n normalized_open = []\n inputs = []\n training_inputs = []\n testing_inputs = []\n training_outputs = []\n testing_outputs = []\n\n def set_stock(self, stock):\n self.stock = stock\n\n def set_input(self):\n with open(self.stock + '.csv') as csvfile:\n readCSV = csv.reader(csvfile, delimiter=',')\n for row in readCSV:\n self.close_prices.append(row[5])\n self.high_prices.append(row[3])\n self.prev_prices.append(row[2])\n self.sentiments.append(row[7])\n self.close_prices = self.close_prices[1:-1]\n self.high_prices = self.high_prices[1:-1]\n self.prev_prices = self.prev_prices[1:-1]\n self.sentiments = self.sentiments[1:-1]\n for m in range(len(self.close_prices)):\n if self.close_prices[m] != 'Close':\n self.close_prices[m] = float(self.close_prices[m])\n for n in range(len(self.high_prices)):\n if self.high_prices[n] != 'High':\n self.high_prices[n] = float(self.high_prices[n])\n for pp in range(len(self.prev_prices)):\n if self.prev_prices[pp] != 'Open':\n self.prev_prices[pp] = float(self.prev_prices[pp])\n for p in range(len(self.close_prices)):\n if self.close_prices[m] != 'Close':\n if self.close_prices[p] > self.max_close:\n self.max_close = self.close_prices[p]\n if self.close_prices[p] < self.min_close:\n self.min_close = self.close_prices[p]\n for q in range(len(self.high_prices)):\n if self.high_prices[q] > self.max_high:\n self.max_high = self.high_prices[q]\n if self.high_prices[q] < self.min_high:\n self.min_high = self.high_prices[q]\n for s in range(len(self.prev_prices)):\n if self.prev_prices[s] > self.max_prev:\n self.max_prev = self.prev_prices[s]\n if self.prev_prices[s] < self.min_prev:\n self.min_prev = self.prev_prices[s]\n for s in range(len(self.sentiments)):\n self.sentiments[s] = float(self.sentiments[s])\n if self.max_sent > self.max_sent:\n self.max_sent = self.sentiments[s]\n if self.sentiments[s] < self.min_sent:\n self.min_sent = self.sentiments[s]\n\n def set_normalized_input(self):\n if self.max_prev == 0:\n self.set_input()\n for i1 in range(len(self.close_prices)):\n self.normalized_close.append((self.close_prices[i1] - self.\n min_close) / (self.max_close - self.min_close))\n for i2 in range(len(self.high_prices)):\n self.normalized_high.append((self.high_prices[i2] - self.\n min_high) / (self.max_high - self.min_high))\n for i4 in range(len(self.prev_prices)):\n self.normalized_prev.append((self.prev_prices[i4] - self.\n min_prev) / (self.max_prev - self.min_prev))\n for i5 in range(len(self.sentiments)):\n diff = self.max_sent - self.min_sent\n if diff == 0:\n self.normalized_sent.append(0)\n else:\n self.normalized_sent.append((self.sentiments[i5] - self.\n min_sent) / (self.max_sent - self.min_sent))\n\n def get_input(self):\n return list(zip(self.close_prices, self.high_prices, self.\n prev_prices, self.sentiments))\n\n def get_nomralized_input(self):\n return list(zip(self.normalized_close, self.normalized_high, self.\n normalized_prev, self.sentiments))\n\n def set_output(self):\n with open(self.stock + '.csv') as csvfile:\n readCSV = csv.reader(csvfile, delimiter=',')\n for row in readCSV:\n self.open_prices.append(row[2])\n self.open_prices = self.open_prices[2:]\n for m in range(len(self.open_prices)):\n self.open_prices[m] = float(self.open_prices[m])\n for i in range(len(self.open_prices)):\n if self.open_prices[i] > self.max_open:\n self.max_open = self.open_prices[i]\n if self.open_prices[i] < self.min_open:\n self.min_open = self.open_prices[i]\n\n def set_normalized_output(self):\n self.set_output()\n for i1 in range(len(self.open_prices)):\n self.normalized_open.append((self.open_prices[i1] - self.\n min_open) / (self.max_open - self.min_open))\n\n def get_output(self):\n return self.open_prices\n\n def get_normalized_output(self):\n return self.normalized_open\n\n def inverse(self, normalized):\n return normalized * (self.max_open - self.min_open) + self.min_open\n\n def get_training_input(self):\n self.set_training_input()\n return self.training_inputs\n\n def set_training_input(self):\n for i in range(len(self.normalized_close)):\n temp_list = [self.normalized_close[i], self.normalized_high[i],\n self.normalized_prev[i], self.normalized_sent[i]]\n self.inputs.append(temp_list)\n train_end = int(0.7 * len(self.inputs))\n self.training_inputs = self.inputs[0:train_end]\n\n def get_testing_input(self):\n self.set_testing_input()\n return self.testing_inputs\n\n def get_training_output(self):\n self.set_training_output()\n return self.training_outputs\n\n def set_testing_input(self):\n train_end = int(0.7 * len(self.inputs))\n self.testing_inputs = self.inputs[train_end:]\n\n def set_training_output(self):\n train_end = int(0.7 * len(self.normalized_open))\n self.training_outputs = self.normalized_open[0:train_end]\n\n def get_testing_output(self):\n self.set_testing_output()\n return self.testing_outputs\n\n def set_testing_output(self):\n train_end = int(0.7 * len(self.normalized_open))\n self.testing_outputs = self.normalized_open[train_end:]\n\n def clear_lists(self):\n self.close_prices.clear()\n self.high_prices.clear()\n self.prev_prices.clear()\n self.normalized_close.clear()\n self.normalized_high.clear()\n self.normalized_prev.clear()\n self.open_prices.clear()\n self.normalized_open.clear()\n self.inputs.clear()\n self.training_inputs.clear()\n self.testing_inputs.clear()\n self.training_outputs.clear()\n self.testing_outputs.clear()\n self.sentiments.clear()\n self.normalized_sent = []\n self.max_sent = 0.0\n self.min_sent = 0.0\n self.min_close = 1000\n self.max_close = 0\n self.min_high = 1000\n self.max_high = 0\n self.min_prev = 1000\n self.max_prev = 0\n self.min_open = 1000\n self.max_open = 0\n",
"step-5": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Wed May 8 15:05:51 2019\r\n\r\n@author: Brian Heckman and Kyle Oprisko\r\n\"\"\"\r\nimport csv\r\n\r\n\"\"\"this file opens a csv file created in the csv creator class. The main purpose of this class is to \r\nnormalize the data in the csv file, so that it can be read by the neural network. \r\n\"\"\"\r\n\r\nclass CSV_Normalize:\r\n stock = \"\"\r\n\r\n # Initialize the lists for the 4 parameters\r\n \r\n close_prices = []\r\n high_prices = []\r\n prev_prices = []\r\n sentiments = []\r\n \r\n # Initialize max and min values for normalization calc\r\n \r\n max_sent = 0.0\r\n min_sent = 0.0\r\n min_close = 1000\r\n max_close = 0 \r\n min_high = 1000\r\n max_high = 0\r\n min_prev = 1000\r\n max_prev = 0\r\n\r\n # Initialize lists for normalized values of parameters\r\n \r\n normalized_close = []\r\n normalized_high = []\r\n normalized_prev = []\r\n normalized_sent = []\r\n \r\n # Initialize output parameters\r\n \r\n open_prices = []\r\n\r\n # Initialize max and min for normalization calc\r\n \r\n min_open= 1000\r\n max_open = 0\r\n\r\n # Initialize the normalized output list\r\n \r\n normalized_open = []\r\n\r\n # Create arrays to separate into training and testing lists\r\n \r\n inputs = []\r\n training_inputs = []\r\n testing_inputs = []\r\n\r\n \r\n training_outputs = []\r\n testing_outputs = []\r\n\r\n # Set name of stock\r\n \r\n def set_stock(self,stock):\r\n self.stock = stock\r\n \r\n # Set input values\r\n \r\n def set_input(self):\r\n \r\n # Open CSV and read each row and append to specific list\r\n \r\n with open(self.stock + '.csv') as csvfile:\r\n readCSV = csv.reader(csvfile, delimiter = ',')\r\n for row in readCSV:\r\n self.close_prices.append(row[5])\r\n self.high_prices.append(row[3])\r\n self.prev_prices.append(row[2])\r\n self.sentiments.append(row[7])\r\n\r\n # Remove the headers and the last row because the data is trailing\r\n \r\n self.close_prices = self.close_prices[1:-1]\r\n self.high_prices = self.high_prices[1:-1]\r\n self.prev_prices = self.prev_prices[1:-1]\r\n self.sentiments = self.sentiments[1:-1]\r\n\r\n # Turn data values into floats\r\n \r\n for m in range(len(self.close_prices)):\r\n if self.close_prices[m] != \"Close\":\r\n self.close_prices[m] = float(self.close_prices[m])\r\n for n in range(len(self.high_prices)):\r\n if self.high_prices[n] != \"High\":\r\n self.high_prices[n] = float(self.high_prices[n])\r\n for pp in range(len(self.prev_prices)):\r\n if self.prev_prices[pp] != \"Open\":\r\n self.prev_prices[pp] = float(self.prev_prices[pp])\r\n\r\n\r\n #Set Min and Max values for normalization\r\n\r\n for p in range(len(self.close_prices)):\r\n if self.close_prices[m] != \"Close\":\r\n if (self.close_prices[p] > self.max_close):\r\n self.max_close = self.close_prices[p]\r\n if (self.close_prices[p] < self.min_close):\r\n self.min_close = self.close_prices[p]\r\n for q in range(len(self.high_prices)):\r\n if (self.high_prices[q] > self.max_high):\r\n self.max_high = self.high_prices[q]\r\n if (self.high_prices[q] < self.min_high):\r\n self.min_high = self.high_prices[q] \r\n\r\n for s in range(len(self.prev_prices)):\r\n if (self.prev_prices[s] > self.max_prev):\r\n self.max_prev = self.prev_prices[s]\r\n if (self.prev_prices[s] < self.min_prev):\r\n self.min_prev = self.prev_prices[s]\r\n \r\n for s in range(len(self.sentiments)):\r\n self.sentiments[s] = float(self.sentiments[s])\r\n if (self.max_sent > self.max_sent):\r\n self.max_sent = self.sentiments[s]\r\n if (self.sentiments[s] < self.min_sent):\r\n self.min_sent = self.sentiments[s]\r\n\r\n # Perform normalization calculation and set normalized inputs \r\n def set_normalized_input(self):\r\n # Call set_input function in case it was not called already\r\n if (self.max_prev == 0):\r\n self.set_input()\r\n \r\n # Perform normalization calculation under the normalized_x = (x - min)/(max - min) model\r\n \r\n for i1 in range(len(self.close_prices)):\r\n self.normalized_close.append((self.close_prices[i1] - self.min_close)/(self.max_close - self.min_close))\r\n\r\n for i2 in range(len(self.high_prices)):\r\n self.normalized_high.append((self.high_prices[i2] - self.min_high)/(self.max_high - self.min_high))\r\n\r\n\r\n for i4 in range(len(self.prev_prices)):\r\n self.normalized_prev.append((self.prev_prices[i4] - self.min_prev)/(self.max_prev - self.min_prev))\r\n \r\n \r\n \r\n for i5 in range(len(self.sentiments)):\r\n diff = self.max_sent - self.min_sent\r\n if diff == 0:\r\n self.normalized_sent.append(0)\r\n else:\r\n self.normalized_sent.append((self.sentiments[i5] - self.min_sent)/(self.max_sent - self.min_sent))\r\n \r\n # Organize the input into a zipped list\r\n def get_input(self):\r\n return (list(zip(self.close_prices,self.high_prices,self.prev_prices,self.sentiments)))\r\n # Organize the normalized input into a zipped list\r\n def get_nomralized_input(self):\r\n return (list(zip(self.normalized_close,self.normalized_high,self.normalized_prev,self.sentiments)))\r\n\r\n # Set the output data\r\n def set_output(self):\r\n \r\n # Open and read the output file and append the list\r\n \r\n with open(self.stock + '.csv') as csvfile:\r\n readCSV = csv.reader(csvfile, delimiter = ',')\r\n for row in readCSV:\r\n self.open_prices.append(row[2])\r\n \r\n # Remove the first two rows (header and first data point)\r\n self.open_prices = self.open_prices[2:]\r\n\r\n #\r\n for m in range(len(self.open_prices)):\r\n self.open_prices[m] = float(self.open_prices[m])\r\n\r\n for i in range(len(self.open_prices)):\r\n if (self.open_prices[i] > self.max_open):\r\n self.max_open = self.open_prices[i]\r\n if (self.open_prices[i] < self.min_open):\r\n self.min_open = self.open_prices[i]\r\n\r\n\r\n #uses min max function\r\n def set_normalized_output(self):\r\n self.set_output()\r\n for i1 in range(len(self.open_prices)):\r\n self.normalized_open.append((self.open_prices[i1] - self.min_open)/(self.max_open - self.min_open))\r\n #returns open_prices\r\n def get_output(self):\r\n return (self.open_prices)\r\n #gets the normalized output\r\n def get_normalized_output(self):\r\n return (self.normalized_open)\r\n #inverse function to get predicted values into actual values\r\n def inverse(self,normalized):\r\n return ((normalized * (self.max_open - self.min_open)) + self.min_open)\r\n #retuns what the user input\r\n def get_training_input(self):\r\n self.set_training_input()\r\n return self.training_inputs\r\n \r\n #sets puts all of the data into a list as a tuple\r\n def set_training_input(self):\r\n for i in range(len(self.normalized_close)): \r\n temp_list = [self.normalized_close[i],self.normalized_high[i],self.normalized_prev[i],self.normalized_sent[i]]\r\n self.inputs.append(temp_list)\r\n train_end = int(.7*len(self.inputs))\r\n self.training_inputs = self.inputs[0:train_end]\r\n\r\n def get_testing_input(self):\r\n self.set_testing_input()\r\n return self.testing_inputs\r\n\r\n def get_training_output(self):\r\n self.set_training_output()\r\n return self.training_outputs\r\n \r\n def set_testing_input(self):\r\n train_end = int(.7*len(self.inputs))\r\n self.testing_inputs = self.inputs[train_end:]\r\n \r\n def set_training_output(self):\r\n train_end = int(.7*len(self.normalized_open))\r\n self.training_outputs = self.normalized_open[0:train_end]\r\n \r\n def get_testing_output(self):\r\n self.set_testing_output()\r\n return self.testing_outputs\r\n def set_testing_output(self):\r\n train_end = int(.7*len(self.normalized_open))\r\n self.testing_outputs = self.normalized_open[train_end:]\r\n \r\n def clear_lists(self):\r\n #everything is reinitialized \r\n self.close_prices.clear()\r\n self.high_prices.clear()\r\n self.prev_prices.clear()\r\n self.normalized_close.clear()\r\n self.normalized_high.clear()\r\n self.normalized_prev.clear()\r\n self.open_prices.clear()\r\n self.normalized_open.clear()\r\n self.inputs.clear()\r\n self.training_inputs.clear()\r\n self.testing_inputs.clear()\r\n self.training_outputs.clear()\r\n self.testing_outputs.clear()\r\n self.sentiments.clear()\r\n self.normalized_sent = []\r\n self.max_sent = 0.0\r\n self.min_sent = 0.0\r\n self.min_close = 1000\r\n self.max_close = 0 \r\n self.min_high = 1000\r\n self.max_high = 0\r\n self.min_prev = 1000\r\n self.max_prev = 0\r\n self.min_open= 1000\r\n self.max_open = 0",
"step-ids": [
11,
12,
19,
21,
23
]
}
|
[
11,
12,
19,
21,
23
] |
#!/bin/python3
def solveMeFirst(a,b):
return a + b
print(solveMeFirst(int(input()),int(input())))
|
normal
|
{
"blob_id": "5d55c586c57de8f287d9f51f0cb1f188c8046c29",
"index": 2977,
"step-1": "<mask token>\n",
"step-2": "def solveMeFirst(a, b):\n return a + b\n\n\n<mask token>\n",
"step-3": "def solveMeFirst(a, b):\n return a + b\n\n\nprint(solveMeFirst(int(input()), int(input())))\n",
"step-4": "#!/bin/python3\n\ndef solveMeFirst(a,b):\n return a + b\n\nprint(solveMeFirst(int(input()),int(input())))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from nbt import nbt
from matplotlib import pyplot
class Banana(object):
id = 10
def srange(x1, xDoors, spaces):
"""
a counting thing that i dunno what does.
"""
for a in xrange(x1, x1 + xDoors):
yield a
for a in xrange(x1 + xDoors + spaces, x1 + spaces + xDoors * 2):
yield a
def village_doors_coordinates(x1, villages, y, z1, halfDoorsInVillage, emptySpaces, axis):
"""
x1 and z1 are the lowest value on the X / Z axis
'halfDoorsInVillage' is 1/2 of the total doors in a village
:param axis: The axis along which a single village is created;
make a MCEDIT filter to do the same thing could be cool,
like a filter to create a village on every selected door, or on a row of doors on the X/Z axis.
"""
k = []
assert axis in ('X', 'Z')
if axis == "Z":
for x in xrange(x1, x1 + villages):
j = [[x, y, z] for z in srange(z1, halfDoorsInVillage, emptySpaces)]
k.append(j)
elif axis == "X":
for z in xrange(z1, z1 + villages):
j = [[x, y, z] for x in srange(x1, halfDoorsInVillage, emptySpaces)]
k.append(j)
return k
number_of_villages_to_generate = 32
number_of_doors_to_generate = 22
tick = 77
def template_village_file(tick):
"""
Creates a template villages.dat file that i can modify later on
"""
cat = nbt.NBTFile()
cat2 = cat['data'] = nbt.TAG_Compound()
cat2["Villages"] = nbt.TAG_List(Banana)
cat2['Tick'] = nbt.TAG_Int(tick)
return cat
def existing_village_file(kovetz):
"""
Create an editable villages.nbt file from an already existing one, using the same tick value
"""
try:
cat77 = nbt.NBTFile(kovetz)
except IOError:
raise Exception("Hmm. Unfortunately, the file requested does not exist :(")
tick4 = cat77['data']['Tick'].value
return cat77, tick4
class Village(object):
"""
Some villages.dat related functions
village is a tag_compound
:type village: nbt.TAG_Compound
"""
def __init__(self, village):
self._village = village
def add_door(self, door):
"""
Adds a door and updates the current village aggregate and center with some magic math stuff
"""
doors_list = self._village['Doors']
doors_list.append(door)
x = door['X'].value
y = door['Y'].value
z = door['Z'].value
self._update_doormath(x, y, z)
def del_doorz(self, new_doors):
kapoow = self.get_vil()['Doors']
kapooww = list(kapoow)
for door in kapooww:
x, y, z = door['X'].value, door['Y'].value, door['Z'].value
if (x, y, z) in new_doors:
kapoow.remove(door)
self._update_doormath(-x, -y, -z)
def _update_doormath(self, x, y, z):
doors_list = self._village['Doors']
self._village['ACX'].value += x
self._village['ACY'].value += y
self._village['ACZ'].value += z
if len(doors_list) == 0:
self._village['CX'] = nbt.TAG_Int(0)
self._village['CY'] = nbt.TAG_Int(0)
self._village['CZ'] = nbt.TAG_Int(0)
else:
self._village['CX'].value = self._village['ACX'].value / len(doors_list)
self._village['CY'].value = self._village['ACY'].value / len(doors_list)
self._village['CZ'].value = self._village['ACZ'].value / len(doors_list)
@property
def is_empty(self):
return len(self._village["Doors"]) == 0
def get_vil(self):
return self._village
@staticmethod
def create_village(tick):
"""
Creates a template village
"""
village_template = nbt.TAG_Compound()
village_template['Doors'] = nbt.TAG_List(Banana)
village_template['Players'] = nbt.TAG_List(Banana)
village_template['ACX'] = nbt.TAG_Int(0)
village_template['ACY'] = nbt.TAG_Int(0)
village_template['ACZ'] = nbt.TAG_Int(0)
village_template['CX'] = nbt.TAG_Int(0)
village_template['CY'] = nbt.TAG_Int(0)
village_template['CZ'] = nbt.TAG_Int(0)
village_template['Golems'] = nbt.TAG_Int(0)
village_template['MTick'] = nbt.TAG_Int(0)
village_template['PopSize'] = nbt.TAG_Int(1)
village_template['Radius'] = nbt.TAG_Int(32)
village_template['Stable'] = nbt.TAG_Int(tick)
village_template['Tick'] = nbt.TAG_Int(tick)
return Village(village_template)
def create_door(tick, x, y, z):
"""
Generates a door using given coords and tick.
"""
door = nbt.TAG_Compound()
door['TS'] = nbt.TAG_Int(tick)
door['X'] = nbt.TAG_Int(x)
door['Y'] = nbt.TAG_Int(y)
door['Z'] = nbt.TAG_Int(z)
return door
def del_door(vil_list, doors_set):
"""
:param vil_list:
:param doors_set:
:return:
"""
vil85 = list(vil_list)
for vil_TAGCompound in vil85:
villl = Village(vil_TAGCompound)
villl.del_doorz(doors_set)
if villl.is_empty:
vil_list.remove(vil_TAGCompound)
def village_gen(x1, villages, y_list, z1, halfDoorsInVillage, emptySpaces, axis, tick, cat):
"""
generates villages with doors n stuff
'x1' is the lowest block on the X axis
'z1' is the lowest block on the Z axis
'y' is the Y level of the lower block of the doors
:param axis: The axis along a single village is created;
'axis' is the axis on which the villages are, either the axis where the in the village doors are,
or the axis where the villages are, as in if i was to walk down that axis i would go through a door of every village
'villages' is the numbers of villages i want on this layer
'halfDoorsInVillage' is half of the doors in a village
'emptySpaces' is the space between the 2 blocks of doors /
the space between the first half of the doors and the second
'tick' the time in ticks, in a new file can be basicly anything but 0 and in an old file it has the be the same as
the other villages and the main tick of the file.
'cat' magic NBT file
"""
cat2 = cat["data"]
doors_coords_lists = []
doors_set = set()
for y in y_list:
doors_coords_lists += village_doors_coordinates(x1, villages, y, z1, halfDoorsInVillage, emptySpaces, axis)
for vill_coords_list in doors_coords_lists:
for single_door_coord in vill_coords_list:
doors_set.add(tuple(single_door_coord))
del_door(cat2['Villages'], doors_set)
for coordinates_list in doors_coords_lists:
vil = Village.create_village(tick)
for x, y, z in coordinates_list:
vil.add_door(create_door(tick, x, y, z))
cat2['Villages'].append(vil.get_vil())
def main():
cat1, tick = existing_village_file("./villagesCopy2.dat")
village_gen(-107, number_of_villages_to_generate, [132], 169, number_of_doors_to_generate / 2, 19, 'X', tick, cat1)
cat1.write_file("./villagesCopy2.dat")
if __name__ == '__main__':
main()
|
normal
|
{
"blob_id": "4e9674ea46bdf930d1e99bcda56eaa300c84deef",
"index": 7196,
"step-1": "<mask token>\n\n\nclass Banana(object):\n id = 10\n\n\n<mask token>\n\n\ndef template_village_file(tick):\n \"\"\"\n Creates a template villages.dat file that i can modify later on\n \"\"\"\n cat = nbt.NBTFile()\n cat2 = cat['data'] = nbt.TAG_Compound()\n cat2['Villages'] = nbt.TAG_List(Banana)\n cat2['Tick'] = nbt.TAG_Int(tick)\n return cat\n\n\n<mask token>\n\n\nclass Village(object):\n \"\"\"\n Some villages.dat related functions\n village is a tag_compound\n :type village: nbt.TAG_Compound\n \"\"\"\n\n def __init__(self, village):\n self._village = village\n\n def add_door(self, door):\n \"\"\"\n Adds a door and updates the current village aggregate and center with some magic math stuff\n \"\"\"\n doors_list = self._village['Doors']\n doors_list.append(door)\n x = door['X'].value\n y = door['Y'].value\n z = door['Z'].value\n self._update_doormath(x, y, z)\n\n def del_doorz(self, new_doors):\n kapoow = self.get_vil()['Doors']\n kapooww = list(kapoow)\n for door in kapooww:\n x, y, z = door['X'].value, door['Y'].value, door['Z'].value\n if (x, y, z) in new_doors:\n kapoow.remove(door)\n self._update_doormath(-x, -y, -z)\n\n def _update_doormath(self, x, y, z):\n doors_list = self._village['Doors']\n self._village['ACX'].value += x\n self._village['ACY'].value += y\n self._village['ACZ'].value += z\n if len(doors_list) == 0:\n self._village['CX'] = nbt.TAG_Int(0)\n self._village['CY'] = nbt.TAG_Int(0)\n self._village['CZ'] = nbt.TAG_Int(0)\n else:\n self._village['CX'].value = self._village['ACX'].value / len(\n doors_list)\n self._village['CY'].value = self._village['ACY'].value / len(\n doors_list)\n self._village['CZ'].value = self._village['ACZ'].value / len(\n doors_list)\n\n @property\n def is_empty(self):\n return len(self._village['Doors']) == 0\n\n def get_vil(self):\n return self._village\n\n @staticmethod\n def create_village(tick):\n \"\"\"\n Creates a template village\n \"\"\"\n village_template = nbt.TAG_Compound()\n village_template['Doors'] = nbt.TAG_List(Banana)\n village_template['Players'] = nbt.TAG_List(Banana)\n village_template['ACX'] = nbt.TAG_Int(0)\n village_template['ACY'] = nbt.TAG_Int(0)\n village_template['ACZ'] = nbt.TAG_Int(0)\n village_template['CX'] = nbt.TAG_Int(0)\n village_template['CY'] = nbt.TAG_Int(0)\n village_template['CZ'] = nbt.TAG_Int(0)\n village_template['Golems'] = nbt.TAG_Int(0)\n village_template['MTick'] = nbt.TAG_Int(0)\n village_template['PopSize'] = nbt.TAG_Int(1)\n village_template['Radius'] = nbt.TAG_Int(32)\n village_template['Stable'] = nbt.TAG_Int(tick)\n village_template['Tick'] = nbt.TAG_Int(tick)\n return Village(village_template)\n\n\ndef create_door(tick, x, y, z):\n \"\"\"\n Generates a door using given coords and tick.\n \"\"\"\n door = nbt.TAG_Compound()\n door['TS'] = nbt.TAG_Int(tick)\n door['X'] = nbt.TAG_Int(x)\n door['Y'] = nbt.TAG_Int(y)\n door['Z'] = nbt.TAG_Int(z)\n return door\n\n\ndef del_door(vil_list, doors_set):\n \"\"\"\n :param vil_list:\n :param doors_set:\n :return:\n \"\"\"\n vil85 = list(vil_list)\n for vil_TAGCompound in vil85:\n villl = Village(vil_TAGCompound)\n villl.del_doorz(doors_set)\n if villl.is_empty:\n vil_list.remove(vil_TAGCompound)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Banana(object):\n id = 10\n\n\n<mask token>\n\n\ndef village_doors_coordinates(x1, villages, y, z1, halfDoorsInVillage,\n emptySpaces, axis):\n \"\"\"\n x1 and z1 are the lowest value on the X / Z axis\n 'halfDoorsInVillage' is 1/2 of the total doors in a village\n :param axis: The axis along which a single village is created;\n\n make a MCEDIT filter to do the same thing could be cool,\n like a filter to create a village on every selected door, or on a row of doors on the X/Z axis.\n \"\"\"\n k = []\n assert axis in ('X', 'Z')\n if axis == 'Z':\n for x in xrange(x1, x1 + villages):\n j = [[x, y, z] for z in srange(z1, halfDoorsInVillage, emptySpaces)\n ]\n k.append(j)\n elif axis == 'X':\n for z in xrange(z1, z1 + villages):\n j = [[x, y, z] for x in srange(x1, halfDoorsInVillage, emptySpaces)\n ]\n k.append(j)\n return k\n\n\n<mask token>\n\n\ndef template_village_file(tick):\n \"\"\"\n Creates a template villages.dat file that i can modify later on\n \"\"\"\n cat = nbt.NBTFile()\n cat2 = cat['data'] = nbt.TAG_Compound()\n cat2['Villages'] = nbt.TAG_List(Banana)\n cat2['Tick'] = nbt.TAG_Int(tick)\n return cat\n\n\n<mask token>\n\n\nclass Village(object):\n \"\"\"\n Some villages.dat related functions\n village is a tag_compound\n :type village: nbt.TAG_Compound\n \"\"\"\n\n def __init__(self, village):\n self._village = village\n\n def add_door(self, door):\n \"\"\"\n Adds a door and updates the current village aggregate and center with some magic math stuff\n \"\"\"\n doors_list = self._village['Doors']\n doors_list.append(door)\n x = door['X'].value\n y = door['Y'].value\n z = door['Z'].value\n self._update_doormath(x, y, z)\n\n def del_doorz(self, new_doors):\n kapoow = self.get_vil()['Doors']\n kapooww = list(kapoow)\n for door in kapooww:\n x, y, z = door['X'].value, door['Y'].value, door['Z'].value\n if (x, y, z) in new_doors:\n kapoow.remove(door)\n self._update_doormath(-x, -y, -z)\n\n def _update_doormath(self, x, y, z):\n doors_list = self._village['Doors']\n self._village['ACX'].value += x\n self._village['ACY'].value += y\n self._village['ACZ'].value += z\n if len(doors_list) == 0:\n self._village['CX'] = nbt.TAG_Int(0)\n self._village['CY'] = nbt.TAG_Int(0)\n self._village['CZ'] = nbt.TAG_Int(0)\n else:\n self._village['CX'].value = self._village['ACX'].value / len(\n doors_list)\n self._village['CY'].value = self._village['ACY'].value / len(\n doors_list)\n self._village['CZ'].value = self._village['ACZ'].value / len(\n doors_list)\n\n @property\n def is_empty(self):\n return len(self._village['Doors']) == 0\n\n def get_vil(self):\n return self._village\n\n @staticmethod\n def create_village(tick):\n \"\"\"\n Creates a template village\n \"\"\"\n village_template = nbt.TAG_Compound()\n village_template['Doors'] = nbt.TAG_List(Banana)\n village_template['Players'] = nbt.TAG_List(Banana)\n village_template['ACX'] = nbt.TAG_Int(0)\n village_template['ACY'] = nbt.TAG_Int(0)\n village_template['ACZ'] = nbt.TAG_Int(0)\n village_template['CX'] = nbt.TAG_Int(0)\n village_template['CY'] = nbt.TAG_Int(0)\n village_template['CZ'] = nbt.TAG_Int(0)\n village_template['Golems'] = nbt.TAG_Int(0)\n village_template['MTick'] = nbt.TAG_Int(0)\n village_template['PopSize'] = nbt.TAG_Int(1)\n village_template['Radius'] = nbt.TAG_Int(32)\n village_template['Stable'] = nbt.TAG_Int(tick)\n village_template['Tick'] = nbt.TAG_Int(tick)\n return Village(village_template)\n\n\ndef create_door(tick, x, y, z):\n \"\"\"\n Generates a door using given coords and tick.\n \"\"\"\n door = nbt.TAG_Compound()\n door['TS'] = nbt.TAG_Int(tick)\n door['X'] = nbt.TAG_Int(x)\n door['Y'] = nbt.TAG_Int(y)\n door['Z'] = nbt.TAG_Int(z)\n return door\n\n\ndef del_door(vil_list, doors_set):\n \"\"\"\n :param vil_list:\n :param doors_set:\n :return:\n \"\"\"\n vil85 = list(vil_list)\n for vil_TAGCompound in vil85:\n villl = Village(vil_TAGCompound)\n villl.del_doorz(doors_set)\n if villl.is_empty:\n vil_list.remove(vil_TAGCompound)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Banana(object):\n id = 10\n\n\ndef srange(x1, xDoors, spaces):\n \"\"\"\n a counting thing that i dunno what does.\n \"\"\"\n for a in xrange(x1, x1 + xDoors):\n yield a\n for a in xrange(x1 + xDoors + spaces, x1 + spaces + xDoors * 2):\n yield a\n\n\ndef village_doors_coordinates(x1, villages, y, z1, halfDoorsInVillage,\n emptySpaces, axis):\n \"\"\"\n x1 and z1 are the lowest value on the X / Z axis\n 'halfDoorsInVillage' is 1/2 of the total doors in a village\n :param axis: The axis along which a single village is created;\n\n make a MCEDIT filter to do the same thing could be cool,\n like a filter to create a village on every selected door, or on a row of doors on the X/Z axis.\n \"\"\"\n k = []\n assert axis in ('X', 'Z')\n if axis == 'Z':\n for x in xrange(x1, x1 + villages):\n j = [[x, y, z] for z in srange(z1, halfDoorsInVillage, emptySpaces)\n ]\n k.append(j)\n elif axis == 'X':\n for z in xrange(z1, z1 + villages):\n j = [[x, y, z] for x in srange(x1, halfDoorsInVillage, emptySpaces)\n ]\n k.append(j)\n return k\n\n\n<mask token>\n\n\ndef template_village_file(tick):\n \"\"\"\n Creates a template villages.dat file that i can modify later on\n \"\"\"\n cat = nbt.NBTFile()\n cat2 = cat['data'] = nbt.TAG_Compound()\n cat2['Villages'] = nbt.TAG_List(Banana)\n cat2['Tick'] = nbt.TAG_Int(tick)\n return cat\n\n\ndef existing_village_file(kovetz):\n \"\"\"\n Create an editable villages.nbt file from an already existing one, using the same tick value\n \"\"\"\n try:\n cat77 = nbt.NBTFile(kovetz)\n except IOError:\n raise Exception(\n 'Hmm. Unfortunately, the file requested does not exist :(')\n tick4 = cat77['data']['Tick'].value\n return cat77, tick4\n\n\nclass Village(object):\n \"\"\"\n Some villages.dat related functions\n village is a tag_compound\n :type village: nbt.TAG_Compound\n \"\"\"\n\n def __init__(self, village):\n self._village = village\n\n def add_door(self, door):\n \"\"\"\n Adds a door and updates the current village aggregate and center with some magic math stuff\n \"\"\"\n doors_list = self._village['Doors']\n doors_list.append(door)\n x = door['X'].value\n y = door['Y'].value\n z = door['Z'].value\n self._update_doormath(x, y, z)\n\n def del_doorz(self, new_doors):\n kapoow = self.get_vil()['Doors']\n kapooww = list(kapoow)\n for door in kapooww:\n x, y, z = door['X'].value, door['Y'].value, door['Z'].value\n if (x, y, z) in new_doors:\n kapoow.remove(door)\n self._update_doormath(-x, -y, -z)\n\n def _update_doormath(self, x, y, z):\n doors_list = self._village['Doors']\n self._village['ACX'].value += x\n self._village['ACY'].value += y\n self._village['ACZ'].value += z\n if len(doors_list) == 0:\n self._village['CX'] = nbt.TAG_Int(0)\n self._village['CY'] = nbt.TAG_Int(0)\n self._village['CZ'] = nbt.TAG_Int(0)\n else:\n self._village['CX'].value = self._village['ACX'].value / len(\n doors_list)\n self._village['CY'].value = self._village['ACY'].value / len(\n doors_list)\n self._village['CZ'].value = self._village['ACZ'].value / len(\n doors_list)\n\n @property\n def is_empty(self):\n return len(self._village['Doors']) == 0\n\n def get_vil(self):\n return self._village\n\n @staticmethod\n def create_village(tick):\n \"\"\"\n Creates a template village\n \"\"\"\n village_template = nbt.TAG_Compound()\n village_template['Doors'] = nbt.TAG_List(Banana)\n village_template['Players'] = nbt.TAG_List(Banana)\n village_template['ACX'] = nbt.TAG_Int(0)\n village_template['ACY'] = nbt.TAG_Int(0)\n village_template['ACZ'] = nbt.TAG_Int(0)\n village_template['CX'] = nbt.TAG_Int(0)\n village_template['CY'] = nbt.TAG_Int(0)\n village_template['CZ'] = nbt.TAG_Int(0)\n village_template['Golems'] = nbt.TAG_Int(0)\n village_template['MTick'] = nbt.TAG_Int(0)\n village_template['PopSize'] = nbt.TAG_Int(1)\n village_template['Radius'] = nbt.TAG_Int(32)\n village_template['Stable'] = nbt.TAG_Int(tick)\n village_template['Tick'] = nbt.TAG_Int(tick)\n return Village(village_template)\n\n\ndef create_door(tick, x, y, z):\n \"\"\"\n Generates a door using given coords and tick.\n \"\"\"\n door = nbt.TAG_Compound()\n door['TS'] = nbt.TAG_Int(tick)\n door['X'] = nbt.TAG_Int(x)\n door['Y'] = nbt.TAG_Int(y)\n door['Z'] = nbt.TAG_Int(z)\n return door\n\n\ndef del_door(vil_list, doors_set):\n \"\"\"\n :param vil_list:\n :param doors_set:\n :return:\n \"\"\"\n vil85 = list(vil_list)\n for vil_TAGCompound in vil85:\n villl = Village(vil_TAGCompound)\n villl.del_doorz(doors_set)\n if villl.is_empty:\n vil_list.remove(vil_TAGCompound)\n\n\ndef village_gen(x1, villages, y_list, z1, halfDoorsInVillage, emptySpaces,\n axis, tick, cat):\n \"\"\"\n generates villages with doors n stuff\n\n 'x1' is the lowest block on the X axis\n 'z1' is the lowest block on the Z axis\n 'y' is the Y level of the lower block of the doors\n :param axis: The axis along a single village is created;\n 'axis' is the axis on which the villages are, either the axis where the in the village doors are,\n or the axis where the villages are, as in if i was to walk down that axis i would go through a door of every village\n\n 'villages' is the numbers of villages i want on this layer\n 'halfDoorsInVillage' is half of the doors in a village\n 'emptySpaces' is the space between the 2 blocks of doors /\n the space between the first half of the doors and the second\n 'tick' the time in ticks, in a new file can be basicly anything but 0 and in an old file it has the be the same as\n the other villages and the main tick of the file.\n 'cat' magic NBT file\n\n \"\"\"\n cat2 = cat['data']\n doors_coords_lists = []\n doors_set = set()\n for y in y_list:\n doors_coords_lists += village_doors_coordinates(x1, villages, y, z1,\n halfDoorsInVillage, emptySpaces, axis)\n for vill_coords_list in doors_coords_lists:\n for single_door_coord in vill_coords_list:\n doors_set.add(tuple(single_door_coord))\n del_door(cat2['Villages'], doors_set)\n for coordinates_list in doors_coords_lists:\n vil = Village.create_village(tick)\n for x, y, z in coordinates_list:\n vil.add_door(create_door(tick, x, y, z))\n cat2['Villages'].append(vil.get_vil())\n\n\ndef main():\n cat1, tick = existing_village_file('./villagesCopy2.dat')\n village_gen(-107, number_of_villages_to_generate, [132], 169, \n number_of_doors_to_generate / 2, 19, 'X', tick, cat1)\n cat1.write_file('./villagesCopy2.dat')\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass Banana(object):\n id = 10\n\n\ndef srange(x1, xDoors, spaces):\n \"\"\"\n a counting thing that i dunno what does.\n \"\"\"\n for a in xrange(x1, x1 + xDoors):\n yield a\n for a in xrange(x1 + xDoors + spaces, x1 + spaces + xDoors * 2):\n yield a\n\n\ndef village_doors_coordinates(x1, villages, y, z1, halfDoorsInVillage,\n emptySpaces, axis):\n \"\"\"\n x1 and z1 are the lowest value on the X / Z axis\n 'halfDoorsInVillage' is 1/2 of the total doors in a village\n :param axis: The axis along which a single village is created;\n\n make a MCEDIT filter to do the same thing could be cool,\n like a filter to create a village on every selected door, or on a row of doors on the X/Z axis.\n \"\"\"\n k = []\n assert axis in ('X', 'Z')\n if axis == 'Z':\n for x in xrange(x1, x1 + villages):\n j = [[x, y, z] for z in srange(z1, halfDoorsInVillage, emptySpaces)\n ]\n k.append(j)\n elif axis == 'X':\n for z in xrange(z1, z1 + villages):\n j = [[x, y, z] for x in srange(x1, halfDoorsInVillage, emptySpaces)\n ]\n k.append(j)\n return k\n\n\nnumber_of_villages_to_generate = 32\nnumber_of_doors_to_generate = 22\ntick = 77\n\n\ndef template_village_file(tick):\n \"\"\"\n Creates a template villages.dat file that i can modify later on\n \"\"\"\n cat = nbt.NBTFile()\n cat2 = cat['data'] = nbt.TAG_Compound()\n cat2['Villages'] = nbt.TAG_List(Banana)\n cat2['Tick'] = nbt.TAG_Int(tick)\n return cat\n\n\ndef existing_village_file(kovetz):\n \"\"\"\n Create an editable villages.nbt file from an already existing one, using the same tick value\n \"\"\"\n try:\n cat77 = nbt.NBTFile(kovetz)\n except IOError:\n raise Exception(\n 'Hmm. Unfortunately, the file requested does not exist :(')\n tick4 = cat77['data']['Tick'].value\n return cat77, tick4\n\n\nclass Village(object):\n \"\"\"\n Some villages.dat related functions\n village is a tag_compound\n :type village: nbt.TAG_Compound\n \"\"\"\n\n def __init__(self, village):\n self._village = village\n\n def add_door(self, door):\n \"\"\"\n Adds a door and updates the current village aggregate and center with some magic math stuff\n \"\"\"\n doors_list = self._village['Doors']\n doors_list.append(door)\n x = door['X'].value\n y = door['Y'].value\n z = door['Z'].value\n self._update_doormath(x, y, z)\n\n def del_doorz(self, new_doors):\n kapoow = self.get_vil()['Doors']\n kapooww = list(kapoow)\n for door in kapooww:\n x, y, z = door['X'].value, door['Y'].value, door['Z'].value\n if (x, y, z) in new_doors:\n kapoow.remove(door)\n self._update_doormath(-x, -y, -z)\n\n def _update_doormath(self, x, y, z):\n doors_list = self._village['Doors']\n self._village['ACX'].value += x\n self._village['ACY'].value += y\n self._village['ACZ'].value += z\n if len(doors_list) == 0:\n self._village['CX'] = nbt.TAG_Int(0)\n self._village['CY'] = nbt.TAG_Int(0)\n self._village['CZ'] = nbt.TAG_Int(0)\n else:\n self._village['CX'].value = self._village['ACX'].value / len(\n doors_list)\n self._village['CY'].value = self._village['ACY'].value / len(\n doors_list)\n self._village['CZ'].value = self._village['ACZ'].value / len(\n doors_list)\n\n @property\n def is_empty(self):\n return len(self._village['Doors']) == 0\n\n def get_vil(self):\n return self._village\n\n @staticmethod\n def create_village(tick):\n \"\"\"\n Creates a template village\n \"\"\"\n village_template = nbt.TAG_Compound()\n village_template['Doors'] = nbt.TAG_List(Banana)\n village_template['Players'] = nbt.TAG_List(Banana)\n village_template['ACX'] = nbt.TAG_Int(0)\n village_template['ACY'] = nbt.TAG_Int(0)\n village_template['ACZ'] = nbt.TAG_Int(0)\n village_template['CX'] = nbt.TAG_Int(0)\n village_template['CY'] = nbt.TAG_Int(0)\n village_template['CZ'] = nbt.TAG_Int(0)\n village_template['Golems'] = nbt.TAG_Int(0)\n village_template['MTick'] = nbt.TAG_Int(0)\n village_template['PopSize'] = nbt.TAG_Int(1)\n village_template['Radius'] = nbt.TAG_Int(32)\n village_template['Stable'] = nbt.TAG_Int(tick)\n village_template['Tick'] = nbt.TAG_Int(tick)\n return Village(village_template)\n\n\ndef create_door(tick, x, y, z):\n \"\"\"\n Generates a door using given coords and tick.\n \"\"\"\n door = nbt.TAG_Compound()\n door['TS'] = nbt.TAG_Int(tick)\n door['X'] = nbt.TAG_Int(x)\n door['Y'] = nbt.TAG_Int(y)\n door['Z'] = nbt.TAG_Int(z)\n return door\n\n\ndef del_door(vil_list, doors_set):\n \"\"\"\n :param vil_list:\n :param doors_set:\n :return:\n \"\"\"\n vil85 = list(vil_list)\n for vil_TAGCompound in vil85:\n villl = Village(vil_TAGCompound)\n villl.del_doorz(doors_set)\n if villl.is_empty:\n vil_list.remove(vil_TAGCompound)\n\n\ndef village_gen(x1, villages, y_list, z1, halfDoorsInVillage, emptySpaces,\n axis, tick, cat):\n \"\"\"\n generates villages with doors n stuff\n\n 'x1' is the lowest block on the X axis\n 'z1' is the lowest block on the Z axis\n 'y' is the Y level of the lower block of the doors\n :param axis: The axis along a single village is created;\n 'axis' is the axis on which the villages are, either the axis where the in the village doors are,\n or the axis where the villages are, as in if i was to walk down that axis i would go through a door of every village\n\n 'villages' is the numbers of villages i want on this layer\n 'halfDoorsInVillage' is half of the doors in a village\n 'emptySpaces' is the space between the 2 blocks of doors /\n the space between the first half of the doors and the second\n 'tick' the time in ticks, in a new file can be basicly anything but 0 and in an old file it has the be the same as\n the other villages and the main tick of the file.\n 'cat' magic NBT file\n\n \"\"\"\n cat2 = cat['data']\n doors_coords_lists = []\n doors_set = set()\n for y in y_list:\n doors_coords_lists += village_doors_coordinates(x1, villages, y, z1,\n halfDoorsInVillage, emptySpaces, axis)\n for vill_coords_list in doors_coords_lists:\n for single_door_coord in vill_coords_list:\n doors_set.add(tuple(single_door_coord))\n del_door(cat2['Villages'], doors_set)\n for coordinates_list in doors_coords_lists:\n vil = Village.create_village(tick)\n for x, y, z in coordinates_list:\n vil.add_door(create_door(tick, x, y, z))\n cat2['Villages'].append(vil.get_vil())\n\n\ndef main():\n cat1, tick = existing_village_file('./villagesCopy2.dat')\n village_gen(-107, number_of_villages_to_generate, [132], 169, \n number_of_doors_to_generate / 2, 19, 'X', tick, cat1)\n cat1.write_file('./villagesCopy2.dat')\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "from nbt import nbt\nfrom matplotlib import pyplot\n\nclass Banana(object):\n id = 10\n\n\ndef srange(x1, xDoors, spaces):\n \"\"\"\n a counting thing that i dunno what does.\n \"\"\"\n for a in xrange(x1, x1 + xDoors):\n yield a\n for a in xrange(x1 + xDoors + spaces, x1 + spaces + xDoors * 2):\n yield a\n\n\ndef village_doors_coordinates(x1, villages, y, z1, halfDoorsInVillage, emptySpaces, axis):\n \"\"\"\n x1 and z1 are the lowest value on the X / Z axis\n 'halfDoorsInVillage' is 1/2 of the total doors in a village\n :param axis: The axis along which a single village is created;\n\n make a MCEDIT filter to do the same thing could be cool,\n like a filter to create a village on every selected door, or on a row of doors on the X/Z axis.\n \"\"\"\n\n k = []\n assert axis in ('X', 'Z')\n\n if axis == \"Z\":\n for x in xrange(x1, x1 + villages):\n j = [[x, y, z] for z in srange(z1, halfDoorsInVillage, emptySpaces)]\n k.append(j)\n elif axis == \"X\":\n for z in xrange(z1, z1 + villages):\n j = [[x, y, z] for x in srange(x1, halfDoorsInVillage, emptySpaces)]\n k.append(j)\n return k\n\n\nnumber_of_villages_to_generate = 32\nnumber_of_doors_to_generate = 22\ntick = 77\n\n\ndef template_village_file(tick):\n \"\"\"\n Creates a template villages.dat file that i can modify later on\n \"\"\"\n cat = nbt.NBTFile()\n cat2 = cat['data'] = nbt.TAG_Compound()\n cat2[\"Villages\"] = nbt.TAG_List(Banana)\n cat2['Tick'] = nbt.TAG_Int(tick)\n return cat\n\n\ndef existing_village_file(kovetz):\n \"\"\"\n Create an editable villages.nbt file from an already existing one, using the same tick value\n \"\"\"\n try:\n cat77 = nbt.NBTFile(kovetz)\n except IOError:\n raise Exception(\"Hmm. Unfortunately, the file requested does not exist :(\")\n tick4 = cat77['data']['Tick'].value\n return cat77, tick4\n\n\nclass Village(object):\n \"\"\"\n Some villages.dat related functions\n village is a tag_compound\n :type village: nbt.TAG_Compound\n \"\"\"\n\n def __init__(self, village):\n self._village = village\n\n def add_door(self, door):\n \"\"\"\n Adds a door and updates the current village aggregate and center with some magic math stuff\n \"\"\"\n doors_list = self._village['Doors']\n doors_list.append(door)\n x = door['X'].value\n y = door['Y'].value\n z = door['Z'].value\n\n self._update_doormath(x, y, z)\n\n def del_doorz(self, new_doors):\n kapoow = self.get_vil()['Doors']\n kapooww = list(kapoow)\n for door in kapooww:\n x, y, z = door['X'].value, door['Y'].value, door['Z'].value\n if (x, y, z) in new_doors:\n kapoow.remove(door)\n self._update_doormath(-x, -y, -z)\n\n def _update_doormath(self, x, y, z):\n doors_list = self._village['Doors']\n self._village['ACX'].value += x\n self._village['ACY'].value += y\n self._village['ACZ'].value += z\n if len(doors_list) == 0:\n self._village['CX'] = nbt.TAG_Int(0)\n self._village['CY'] = nbt.TAG_Int(0)\n self._village['CZ'] = nbt.TAG_Int(0)\n else:\n self._village['CX'].value = self._village['ACX'].value / len(doors_list)\n self._village['CY'].value = self._village['ACY'].value / len(doors_list)\n self._village['CZ'].value = self._village['ACZ'].value / len(doors_list)\n\n @property\n def is_empty(self):\n return len(self._village[\"Doors\"]) == 0\n\n def get_vil(self):\n return self._village\n\n @staticmethod\n def create_village(tick):\n \"\"\"\n Creates a template village\n \"\"\"\n village_template = nbt.TAG_Compound()\n\n village_template['Doors'] = nbt.TAG_List(Banana)\n village_template['Players'] = nbt.TAG_List(Banana)\n village_template['ACX'] = nbt.TAG_Int(0)\n village_template['ACY'] = nbt.TAG_Int(0)\n village_template['ACZ'] = nbt.TAG_Int(0)\n\n village_template['CX'] = nbt.TAG_Int(0)\n village_template['CY'] = nbt.TAG_Int(0)\n village_template['CZ'] = nbt.TAG_Int(0)\n\n village_template['Golems'] = nbt.TAG_Int(0)\n village_template['MTick'] = nbt.TAG_Int(0)\n village_template['PopSize'] = nbt.TAG_Int(1)\n village_template['Radius'] = nbt.TAG_Int(32)\n village_template['Stable'] = nbt.TAG_Int(tick)\n village_template['Tick'] = nbt.TAG_Int(tick)\n return Village(village_template)\n\n\ndef create_door(tick, x, y, z):\n \"\"\"\n Generates a door using given coords and tick.\n \"\"\"\n door = nbt.TAG_Compound()\n door['TS'] = nbt.TAG_Int(tick)\n door['X'] = nbt.TAG_Int(x)\n door['Y'] = nbt.TAG_Int(y)\n door['Z'] = nbt.TAG_Int(z)\n return door\n\n\ndef del_door(vil_list, doors_set):\n \"\"\"\n :param vil_list:\n :param doors_set:\n :return:\n \"\"\"\n vil85 = list(vil_list)\n for vil_TAGCompound in vil85:\n villl = Village(vil_TAGCompound)\n villl.del_doorz(doors_set)\n if villl.is_empty:\n vil_list.remove(vil_TAGCompound)\n\ndef village_gen(x1, villages, y_list, z1, halfDoorsInVillage, emptySpaces, axis, tick, cat):\n \"\"\"\n generates villages with doors n stuff\n\n 'x1' is the lowest block on the X axis\n 'z1' is the lowest block on the Z axis\n 'y' is the Y level of the lower block of the doors\n :param axis: The axis along a single village is created;\n 'axis' is the axis on which the villages are, either the axis where the in the village doors are,\n or the axis where the villages are, as in if i was to walk down that axis i would go through a door of every village\n\n 'villages' is the numbers of villages i want on this layer\n 'halfDoorsInVillage' is half of the doors in a village\n 'emptySpaces' is the space between the 2 blocks of doors /\n the space between the first half of the doors and the second\n 'tick' the time in ticks, in a new file can be basicly anything but 0 and in an old file it has the be the same as\n the other villages and the main tick of the file.\n 'cat' magic NBT file\n\n \"\"\"\n cat2 = cat[\"data\"]\n doors_coords_lists = []\n doors_set = set()\n for y in y_list:\n doors_coords_lists += village_doors_coordinates(x1, villages, y, z1, halfDoorsInVillage, emptySpaces, axis)\n for vill_coords_list in doors_coords_lists:\n for single_door_coord in vill_coords_list:\n doors_set.add(tuple(single_door_coord))\n del_door(cat2['Villages'], doors_set)\n for coordinates_list in doors_coords_lists:\n vil = Village.create_village(tick)\n for x, y, z in coordinates_list:\n vil.add_door(create_door(tick, x, y, z))\n cat2['Villages'].append(vil.get_vil())\n\n\ndef main():\n cat1, tick = existing_village_file(\"./villagesCopy2.dat\")\n village_gen(-107, number_of_villages_to_generate, [132], 169, number_of_doors_to_generate / 2, 19, 'X', tick, cat1)\n cat1.write_file(\"./villagesCopy2.dat\")\n\nif __name__ == '__main__':\n main()",
"step-ids": [
14,
15,
19,
21,
23
]
}
|
[
14,
15,
19,
21,
23
] |
#finding optimal betting strategy for the blackjack game using Monte Carlo ES method
import random
class Player():
def __init__(self) -> None:
q = None
policy = None
returns = None
cards = 0
dealer = 0
def hit(self):
self.cards += random.randint(1,11)
def deal(self):
self.cards = random.randint(1,11) + random.randint(1,11)
self.dealer = random.randint(1,11)
def stick(self):
pass
def reset(self):
self.dealer = 0
self.cards = 0
def episode(self):
self.reset()
self.deal()
#take action based on policy
#Initialize, for all s ∈ S, a ∈ A(s):
#Q(s, a) ← arbitrary
#π(s) ← arbitrary
#Returns(s, a) ← empty list
#Repeat forever:
#Choose S0 ∈ S and A0 ∈ A(S0) s.t. all pairs have probability > 0
#Generate an episode starting from S0, A0, following π
#For each pair s, a appearing in the episode:
#G ← return following the first occurrence of s, a
#Append G to Returns(s, a)
#Q(s, a) ← average(Returns(s, a))
#For each s in the episode:
#π(s) ← argmaxa Q(s, a)
if __name__=="__main__":
pass
|
normal
|
{
"blob_id": "db159cfb198311b0369f65eb9e10947c4d28c695",
"index": 2919,
"step-1": "<mask token>\n\n\nclass Player:\n <mask token>\n\n def hit(self):\n self.cards += random.randint(1, 11)\n\n def deal(self):\n self.cards = random.randint(1, 11) + random.randint(1, 11)\n self.dealer = random.randint(1, 11)\n <mask token>\n\n def reset(self):\n self.dealer = 0\n self.cards = 0\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Player:\n <mask token>\n\n def hit(self):\n self.cards += random.randint(1, 11)\n\n def deal(self):\n self.cards = random.randint(1, 11) + random.randint(1, 11)\n self.dealer = random.randint(1, 11)\n\n def stick(self):\n pass\n\n def reset(self):\n self.dealer = 0\n self.cards = 0\n <mask token>\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Player:\n\n def __init__(self) ->None:\n q = None\n policy = None\n returns = None\n cards = 0\n dealer = 0\n\n def hit(self):\n self.cards += random.randint(1, 11)\n\n def deal(self):\n self.cards = random.randint(1, 11) + random.randint(1, 11)\n self.dealer = random.randint(1, 11)\n\n def stick(self):\n pass\n\n def reset(self):\n self.dealer = 0\n self.cards = 0\n\n def episode(self):\n self.reset()\n self.deal()\n\n\nif __name__ == '__main__':\n pass\n",
"step-4": "import random\n\n\nclass Player:\n\n def __init__(self) ->None:\n q = None\n policy = None\n returns = None\n cards = 0\n dealer = 0\n\n def hit(self):\n self.cards += random.randint(1, 11)\n\n def deal(self):\n self.cards = random.randint(1, 11) + random.randint(1, 11)\n self.dealer = random.randint(1, 11)\n\n def stick(self):\n pass\n\n def reset(self):\n self.dealer = 0\n self.cards = 0\n\n def episode(self):\n self.reset()\n self.deal()\n\n\nif __name__ == '__main__':\n pass\n",
"step-5": "#finding optimal betting strategy for the blackjack game using Monte Carlo ES method\nimport random\n\nclass Player():\n def __init__(self) -> None:\n q = None\n policy = None\n returns = None\n cards = 0\n dealer = 0\n\n def hit(self):\n self.cards += random.randint(1,11)\n\n def deal(self):\n self.cards = random.randint(1,11) + random.randint(1,11)\n self.dealer = random.randint(1,11)\n\n def stick(self):\n pass\n\n def reset(self):\n self.dealer = 0\n self.cards = 0\n\n def episode(self):\n self.reset()\n self.deal()\n #take action based on policy\n\n#Initialize, for all s ∈ S, a ∈ A(s):\n#Q(s, a) ← arbitrary\n#π(s) ← arbitrary\n#Returns(s, a) ← empty list\n#Repeat forever:\n#Choose S0 ∈ S and A0 ∈ A(S0) s.t. all pairs have probability > 0\n#Generate an episode starting from S0, A0, following π\n#For each pair s, a appearing in the episode:\n#G ← return following the first occurrence of s, a\n#Append G to Returns(s, a)\n#Q(s, a) ← average(Returns(s, a))\n#For each s in the episode:\n#π(s) ← argmaxa Q(s, a)\n\n\nif __name__==\"__main__\":\n pass\n ",
"step-ids": [
4,
5,
8,
9,
10
]
}
|
[
4,
5,
8,
9,
10
] |
<|reserved_special_token_0|>
@register.resources()
class Roles(object):
<|reserved_special_token_0|>
def role(self, req, resp, id):
return obj(req, infinitystone_role, sql_id=id)
def roles(self, req, resp):
return sql_list(req, 'infinitystone_role', search={'id': str,
'name': str})
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@register.resources()
class Roles(object):
<|reserved_special_token_0|>
def role(self, req, resp, id):
return obj(req, infinitystone_role, sql_id=id)
def roles(self, req, resp):
return sql_list(req, 'infinitystone_role', search={'id': str,
'name': str})
<|reserved_special_token_0|>
def update(self, req, resp, id):
role = obj(req, infinitystone_role, sql_id=id)
role.commit()
return role
def delete(self, req, resp, id):
role = obj(req, infinitystone_role, sql_id=id)
role.commit()
return role
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@register.resources()
class Roles(object):
def __init__(self):
router.add('GET', '/v1/role/{id}', self.role, tag='roles:view')
router.add('GET', '/v1/roles', self.roles, tag='roles:view')
router.add('POST', '/v1/role', self.create, tag='roles:admin')
router.add(['PUT', 'PATCH'], '/v1/role/{id}', self.update, tag=
'roles:admin')
router.add('DELETE', '/v1/role/{id}', self.delete, tag='roles:admin')
def role(self, req, resp, id):
return obj(req, infinitystone_role, sql_id=id)
def roles(self, req, resp):
return sql_list(req, 'infinitystone_role', search={'id': str,
'name': str})
<|reserved_special_token_0|>
def update(self, req, resp, id):
role = obj(req, infinitystone_role, sql_id=id)
role.commit()
return role
def delete(self, req, resp, id):
role = obj(req, infinitystone_role, sql_id=id)
role.commit()
return role
<|reserved_special_token_1|>
from luxon import register
from luxon import router
from luxon.helpers.api import sql_list, obj
from infinitystone.models.roles import infinitystone_role
@register.resources()
class Roles(object):
def __init__(self):
router.add('GET', '/v1/role/{id}', self.role, tag='roles:view')
router.add('GET', '/v1/roles', self.roles, tag='roles:view')
router.add('POST', '/v1/role', self.create, tag='roles:admin')
router.add(['PUT', 'PATCH'], '/v1/role/{id}', self.update, tag=
'roles:admin')
router.add('DELETE', '/v1/role/{id}', self.delete, tag='roles:admin')
def role(self, req, resp, id):
return obj(req, infinitystone_role, sql_id=id)
def roles(self, req, resp):
return sql_list(req, 'infinitystone_role', search={'id': str,
'name': str})
def create(self, req, resp):
role = obj(req, infinitystone_role)
role.commit()
return role
def update(self, req, resp, id):
role = obj(req, infinitystone_role, sql_id=id)
role.commit()
return role
def delete(self, req, resp, id):
role = obj(req, infinitystone_role, sql_id=id)
role.commit()
return role
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
# Copyright (c) 2018-2020 Christiaan Frans Rademan <chris@fwiw.co.za>.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
from luxon import register
from luxon import router
from luxon.helpers.api import sql_list, obj
from infinitystone.models.roles import infinitystone_role
@register.resources()
class Roles(object):
def __init__(self):
router.add('GET', '/v1/role/{id}', self.role,
tag='roles:view')
router.add('GET', '/v1/roles', self.roles,
tag='roles:view')
router.add('POST', '/v1/role', self.create,
tag='roles:admin')
router.add(['PUT', 'PATCH'], '/v1/role/{id}', self.update,
tag='roles:admin')
router.add('DELETE', '/v1/role/{id}', self.delete,
tag='roles:admin')
def role(self, req, resp, id):
return obj(req, infinitystone_role, sql_id=id)
def roles(self, req, resp):
return sql_list(req, 'infinitystone_role',
search={'id': str,
'name': str})
def create(self, req, resp):
role = obj(req, infinitystone_role)
role.commit()
return role
def update(self, req, resp, id):
role = obj(req, infinitystone_role, sql_id=id)
role.commit()
return role
def delete(self, req, resp, id):
role = obj(req, infinitystone_role, sql_id=id)
role.commit()
return role
|
flexible
|
{
"blob_id": "13e27c29839286988b37d2d3685f54d42fd57973",
"index": 9773,
"step-1": "<mask token>\n\n\n@register.resources()\nclass Roles(object):\n <mask token>\n\n def role(self, req, resp, id):\n return obj(req, infinitystone_role, sql_id=id)\n\n def roles(self, req, resp):\n return sql_list(req, 'infinitystone_role', search={'id': str,\n 'name': str})\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\n@register.resources()\nclass Roles(object):\n <mask token>\n\n def role(self, req, resp, id):\n return obj(req, infinitystone_role, sql_id=id)\n\n def roles(self, req, resp):\n return sql_list(req, 'infinitystone_role', search={'id': str,\n 'name': str})\n <mask token>\n\n def update(self, req, resp, id):\n role = obj(req, infinitystone_role, sql_id=id)\n role.commit()\n return role\n\n def delete(self, req, resp, id):\n role = obj(req, infinitystone_role, sql_id=id)\n role.commit()\n return role\n",
"step-3": "<mask token>\n\n\n@register.resources()\nclass Roles(object):\n\n def __init__(self):\n router.add('GET', '/v1/role/{id}', self.role, tag='roles:view')\n router.add('GET', '/v1/roles', self.roles, tag='roles:view')\n router.add('POST', '/v1/role', self.create, tag='roles:admin')\n router.add(['PUT', 'PATCH'], '/v1/role/{id}', self.update, tag=\n 'roles:admin')\n router.add('DELETE', '/v1/role/{id}', self.delete, tag='roles:admin')\n\n def role(self, req, resp, id):\n return obj(req, infinitystone_role, sql_id=id)\n\n def roles(self, req, resp):\n return sql_list(req, 'infinitystone_role', search={'id': str,\n 'name': str})\n <mask token>\n\n def update(self, req, resp, id):\n role = obj(req, infinitystone_role, sql_id=id)\n role.commit()\n return role\n\n def delete(self, req, resp, id):\n role = obj(req, infinitystone_role, sql_id=id)\n role.commit()\n return role\n",
"step-4": "from luxon import register\nfrom luxon import router\nfrom luxon.helpers.api import sql_list, obj\nfrom infinitystone.models.roles import infinitystone_role\n\n\n@register.resources()\nclass Roles(object):\n\n def __init__(self):\n router.add('GET', '/v1/role/{id}', self.role, tag='roles:view')\n router.add('GET', '/v1/roles', self.roles, tag='roles:view')\n router.add('POST', '/v1/role', self.create, tag='roles:admin')\n router.add(['PUT', 'PATCH'], '/v1/role/{id}', self.update, tag=\n 'roles:admin')\n router.add('DELETE', '/v1/role/{id}', self.delete, tag='roles:admin')\n\n def role(self, req, resp, id):\n return obj(req, infinitystone_role, sql_id=id)\n\n def roles(self, req, resp):\n return sql_list(req, 'infinitystone_role', search={'id': str,\n 'name': str})\n\n def create(self, req, resp):\n role = obj(req, infinitystone_role)\n role.commit()\n return role\n\n def update(self, req, resp, id):\n role = obj(req, infinitystone_role, sql_id=id)\n role.commit()\n return role\n\n def delete(self, req, resp, id):\n role = obj(req, infinitystone_role, sql_id=id)\n role.commit()\n return role\n",
"step-5": "# -*- coding: utf-8 -*-\n# Copyright (c) 2018-2020 Christiaan Frans Rademan <chris@fwiw.co.za>.\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are met:\n#\n# * Redistributions of source code must retain the above copyright notice, this\n# list of conditions and the following disclaimer.\n#\n# * Redistributions in binary form must reproduce the above copyright notice,\n# this list of conditions and the following disclaimer in the documentation\n# and/or other materials provided with the distribution.\n#\n# * Neither the name of the copyright holders nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\n# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE\n# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\n# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\n# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\n# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\n# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\n# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF\n# THE POSSIBILITY OF SUCH DAMAGE.\nfrom luxon import register\nfrom luxon import router\nfrom luxon.helpers.api import sql_list, obj\n\nfrom infinitystone.models.roles import infinitystone_role\n\n\n@register.resources()\nclass Roles(object):\n def __init__(self):\n router.add('GET', '/v1/role/{id}', self.role,\n tag='roles:view')\n router.add('GET', '/v1/roles', self.roles,\n tag='roles:view')\n router.add('POST', '/v1/role', self.create,\n tag='roles:admin')\n router.add(['PUT', 'PATCH'], '/v1/role/{id}', self.update,\n tag='roles:admin')\n router.add('DELETE', '/v1/role/{id}', self.delete,\n tag='roles:admin')\n\n def role(self, req, resp, id):\n return obj(req, infinitystone_role, sql_id=id)\n\n def roles(self, req, resp):\n return sql_list(req, 'infinitystone_role',\n search={'id': str,\n 'name': str})\n\n def create(self, req, resp):\n role = obj(req, infinitystone_role)\n role.commit()\n return role\n\n def update(self, req, resp, id):\n role = obj(req, infinitystone_role, sql_id=id)\n role.commit()\n return role\n\n def delete(self, req, resp, id):\n role = obj(req, infinitystone_role, sql_id=id)\n role.commit()\n return role\n",
"step-ids": [
3,
5,
6,
8,
9
]
}
|
[
3,
5,
6,
8,
9
] |
<|reserved_special_token_0|>
def generate_grid():
""" Code that generates the grid """
y_offset = -10
for a in range(20):
x_offset = 10
for b in range(1):
y_offset += 20
for c in range(20):
square(x_offset, y_offset, 20, 20, dark_green)
for d in range(1):
x_offset += 40
x_offset = 30
for e in range(1):
y_offset += 20
for f in range(20):
square(x_offset, y_offset, 20, 20, dark_green)
for g in range(1):
x_offset += 40
def apple():
""" Draws an apple """
arcade.draw_circle_filled(230, 490, 9.5, (231, 71, 29))
def snake(snake_x, snake_y, snake_scale_x, snake_scale_y, snake_color):
""" Code that sets up the snake part to be drawn """
arcade.draw_rectangle_filled(snake_x, snake_y, snake_scale_x,
snake_scale_y, snake_color)
def on_draw(delta_time):
""" Draw everything every frame(we chose in on_draw.schedule(e.g I chose 1/3 so every 1/3 of a second a frame is
drawn)). """
arcade.start_render()
generate_grid()
apple()
snake(on_draw.snake_part_x, on_draw.snake_part_y, 20, 20, snake_color)
snake(on_draw.snake_part2_x, on_draw.snake_part2_y, 20, 20, snake_color)
snake(on_draw.snake_part3_x, on_draw.snake_part3_y, 20, 20, snake_color)
snake(on_draw.snake_part4_x, on_draw.snake_part4_y, 20, 20, snake_color)
snake(on_draw.snake_part5_x, on_draw.snake_part5_y, 20, 20, snake_color)
snake(on_draw.snake_part6_x, on_draw.snake_part6_y, 20, 20, snake_color)
if on_draw.snake_part_x <= 230:
snake(on_draw.snake_part6_x + 20, on_draw.snake_part6_y, 20, 20,
snake_color)
""" If statements that will make snake part one move """
if on_draw.snake_part_x >= 550:
on_draw.snake_part_x -= 20
elif on_draw.snake_part_x <= 550:
on_draw.snake_part_y += 20
if on_draw.snake_part_y >= 500:
on_draw.snake_part_y -= 20
on_draw.snake_part_x -= 20
if on_draw.snake_part_x <= 180:
on_draw.snake_part_x += 20
""" If statements that will make snake part two move """
if on_draw.snake_part2_x >= 550:
on_draw.snake_part2_x -= 20
elif on_draw.snake_part2_x <= 550:
on_draw.snake_part2_y += 20
if on_draw.snake_part2_y >= 500:
on_draw.snake_part2_y -= 20
on_draw.snake_part2_x -= 20
if on_draw.snake_part2_x <= 200:
on_draw.snake_part2_x += 20
""" If statements that will make snake part three move """
if on_draw.snake_part3_x >= 550:
on_draw.snake_part3_x -= 20
elif on_draw.snake_part3_x <= 550:
on_draw.snake_part3_y += 20
if on_draw.snake_part3_y >= 500:
on_draw.snake_part3_y -= 20
on_draw.snake_part3_x -= 20
if on_draw.snake_part3_x <= 220:
on_draw.snake_part3_x += 20
""" If statements that will make snake part four move """
if on_draw.snake_part4_x >= 550:
on_draw.snake_part4_x -= 20
elif on_draw.snake_part4_x <= 550:
on_draw.snake_part4_y += 20
if on_draw.snake_part4_y >= 500:
on_draw.snake_part4_y -= 20
on_draw.snake_part4_x -= 20
if on_draw.snake_part4_x <= 240:
on_draw.snake_part4_x += 20
""" If statements that will make snake part five move """
if on_draw.snake_part5_x >= 550:
on_draw.snake_part5_x -= 20
elif on_draw.snake_part5_x <= 550:
on_draw.snake_part5_y += 20
if on_draw.snake_part5_y >= 500:
on_draw.snake_part5_y -= 20
on_draw.snake_part5_x -= 20
if on_draw.snake_part5_x <= 260:
on_draw.snake_part5_x += 20
""" If statements that will make snake part six move """
if on_draw.snake_part6_x >= 550:
on_draw.snake_part6_x -= 20
elif on_draw.snake_part6_x <= 550:
on_draw.snake_part6_y += 20
if on_draw.snake_part6_y >= 500:
on_draw.snake_part6_y -= 20
on_draw.snake_part6_x -= 20
if on_draw.snake_part6_x <= 280:
on_draw.snake_part6_x += 20
<|reserved_special_token_0|>
def main():
""" Main code the calls all the rest of the code """
arcade.open_window(WINDOW_WIDTH, WINDOW_HEIGHT, 'Snake.exe')
arcade.set_background_color(light_green)
arcade.schedule(on_draw, 1 / 3)
arcade.run()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def square(square_x, square_y, square_width, square_height, square_color):
""" Code that sets up the squares for generation """
arcade.draw_rectangle_filled(square_x, square_y, square_width,
square_height, square_color)
def generate_grid():
""" Code that generates the grid """
y_offset = -10
for a in range(20):
x_offset = 10
for b in range(1):
y_offset += 20
for c in range(20):
square(x_offset, y_offset, 20, 20, dark_green)
for d in range(1):
x_offset += 40
x_offset = 30
for e in range(1):
y_offset += 20
for f in range(20):
square(x_offset, y_offset, 20, 20, dark_green)
for g in range(1):
x_offset += 40
def apple():
""" Draws an apple """
arcade.draw_circle_filled(230, 490, 9.5, (231, 71, 29))
def snake(snake_x, snake_y, snake_scale_x, snake_scale_y, snake_color):
""" Code that sets up the snake part to be drawn """
arcade.draw_rectangle_filled(snake_x, snake_y, snake_scale_x,
snake_scale_y, snake_color)
def on_draw(delta_time):
""" Draw everything every frame(we chose in on_draw.schedule(e.g I chose 1/3 so every 1/3 of a second a frame is
drawn)). """
arcade.start_render()
generate_grid()
apple()
snake(on_draw.snake_part_x, on_draw.snake_part_y, 20, 20, snake_color)
snake(on_draw.snake_part2_x, on_draw.snake_part2_y, 20, 20, snake_color)
snake(on_draw.snake_part3_x, on_draw.snake_part3_y, 20, 20, snake_color)
snake(on_draw.snake_part4_x, on_draw.snake_part4_y, 20, 20, snake_color)
snake(on_draw.snake_part5_x, on_draw.snake_part5_y, 20, 20, snake_color)
snake(on_draw.snake_part6_x, on_draw.snake_part6_y, 20, 20, snake_color)
if on_draw.snake_part_x <= 230:
snake(on_draw.snake_part6_x + 20, on_draw.snake_part6_y, 20, 20,
snake_color)
""" If statements that will make snake part one move """
if on_draw.snake_part_x >= 550:
on_draw.snake_part_x -= 20
elif on_draw.snake_part_x <= 550:
on_draw.snake_part_y += 20
if on_draw.snake_part_y >= 500:
on_draw.snake_part_y -= 20
on_draw.snake_part_x -= 20
if on_draw.snake_part_x <= 180:
on_draw.snake_part_x += 20
""" If statements that will make snake part two move """
if on_draw.snake_part2_x >= 550:
on_draw.snake_part2_x -= 20
elif on_draw.snake_part2_x <= 550:
on_draw.snake_part2_y += 20
if on_draw.snake_part2_y >= 500:
on_draw.snake_part2_y -= 20
on_draw.snake_part2_x -= 20
if on_draw.snake_part2_x <= 200:
on_draw.snake_part2_x += 20
""" If statements that will make snake part three move """
if on_draw.snake_part3_x >= 550:
on_draw.snake_part3_x -= 20
elif on_draw.snake_part3_x <= 550:
on_draw.snake_part3_y += 20
if on_draw.snake_part3_y >= 500:
on_draw.snake_part3_y -= 20
on_draw.snake_part3_x -= 20
if on_draw.snake_part3_x <= 220:
on_draw.snake_part3_x += 20
""" If statements that will make snake part four move """
if on_draw.snake_part4_x >= 550:
on_draw.snake_part4_x -= 20
elif on_draw.snake_part4_x <= 550:
on_draw.snake_part4_y += 20
if on_draw.snake_part4_y >= 500:
on_draw.snake_part4_y -= 20
on_draw.snake_part4_x -= 20
if on_draw.snake_part4_x <= 240:
on_draw.snake_part4_x += 20
""" If statements that will make snake part five move """
if on_draw.snake_part5_x >= 550:
on_draw.snake_part5_x -= 20
elif on_draw.snake_part5_x <= 550:
on_draw.snake_part5_y += 20
if on_draw.snake_part5_y >= 500:
on_draw.snake_part5_y -= 20
on_draw.snake_part5_x -= 20
if on_draw.snake_part5_x <= 260:
on_draw.snake_part5_x += 20
""" If statements that will make snake part six move """
if on_draw.snake_part6_x >= 550:
on_draw.snake_part6_x -= 20
elif on_draw.snake_part6_x <= 550:
on_draw.snake_part6_y += 20
if on_draw.snake_part6_y >= 500:
on_draw.snake_part6_y -= 20
on_draw.snake_part6_x -= 20
if on_draw.snake_part6_x <= 280:
on_draw.snake_part6_x += 20
<|reserved_special_token_0|>
def main():
""" Main code the calls all the rest of the code """
arcade.open_window(WINDOW_WIDTH, WINDOW_HEIGHT, 'Snake.exe')
arcade.set_background_color(light_green)
arcade.schedule(on_draw, 1 / 3)
arcade.run()
main()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
WINDOW_WIDTH = 740
WINDOW_HEIGHT = 740
dark_green = 170, 216, 81
light_green = 162, 210, 73
snake_color = 72, 118, 235
def square(square_x, square_y, square_width, square_height, square_color):
""" Code that sets up the squares for generation """
arcade.draw_rectangle_filled(square_x, square_y, square_width,
square_height, square_color)
def generate_grid():
""" Code that generates the grid """
y_offset = -10
for a in range(20):
x_offset = 10
for b in range(1):
y_offset += 20
for c in range(20):
square(x_offset, y_offset, 20, 20, dark_green)
for d in range(1):
x_offset += 40
x_offset = 30
for e in range(1):
y_offset += 20
for f in range(20):
square(x_offset, y_offset, 20, 20, dark_green)
for g in range(1):
x_offset += 40
def apple():
""" Draws an apple """
arcade.draw_circle_filled(230, 490, 9.5, (231, 71, 29))
def snake(snake_x, snake_y, snake_scale_x, snake_scale_y, snake_color):
""" Code that sets up the snake part to be drawn """
arcade.draw_rectangle_filled(snake_x, snake_y, snake_scale_x,
snake_scale_y, snake_color)
def on_draw(delta_time):
""" Draw everything every frame(we chose in on_draw.schedule(e.g I chose 1/3 so every 1/3 of a second a frame is
drawn)). """
arcade.start_render()
generate_grid()
apple()
snake(on_draw.snake_part_x, on_draw.snake_part_y, 20, 20, snake_color)
snake(on_draw.snake_part2_x, on_draw.snake_part2_y, 20, 20, snake_color)
snake(on_draw.snake_part3_x, on_draw.snake_part3_y, 20, 20, snake_color)
snake(on_draw.snake_part4_x, on_draw.snake_part4_y, 20, 20, snake_color)
snake(on_draw.snake_part5_x, on_draw.snake_part5_y, 20, 20, snake_color)
snake(on_draw.snake_part6_x, on_draw.snake_part6_y, 20, 20, snake_color)
if on_draw.snake_part_x <= 230:
snake(on_draw.snake_part6_x + 20, on_draw.snake_part6_y, 20, 20,
snake_color)
""" If statements that will make snake part one move """
if on_draw.snake_part_x >= 550:
on_draw.snake_part_x -= 20
elif on_draw.snake_part_x <= 550:
on_draw.snake_part_y += 20
if on_draw.snake_part_y >= 500:
on_draw.snake_part_y -= 20
on_draw.snake_part_x -= 20
if on_draw.snake_part_x <= 180:
on_draw.snake_part_x += 20
""" If statements that will make snake part two move """
if on_draw.snake_part2_x >= 550:
on_draw.snake_part2_x -= 20
elif on_draw.snake_part2_x <= 550:
on_draw.snake_part2_y += 20
if on_draw.snake_part2_y >= 500:
on_draw.snake_part2_y -= 20
on_draw.snake_part2_x -= 20
if on_draw.snake_part2_x <= 200:
on_draw.snake_part2_x += 20
""" If statements that will make snake part three move """
if on_draw.snake_part3_x >= 550:
on_draw.snake_part3_x -= 20
elif on_draw.snake_part3_x <= 550:
on_draw.snake_part3_y += 20
if on_draw.snake_part3_y >= 500:
on_draw.snake_part3_y -= 20
on_draw.snake_part3_x -= 20
if on_draw.snake_part3_x <= 220:
on_draw.snake_part3_x += 20
""" If statements that will make snake part four move """
if on_draw.snake_part4_x >= 550:
on_draw.snake_part4_x -= 20
elif on_draw.snake_part4_x <= 550:
on_draw.snake_part4_y += 20
if on_draw.snake_part4_y >= 500:
on_draw.snake_part4_y -= 20
on_draw.snake_part4_x -= 20
if on_draw.snake_part4_x <= 240:
on_draw.snake_part4_x += 20
""" If statements that will make snake part five move """
if on_draw.snake_part5_x >= 550:
on_draw.snake_part5_x -= 20
elif on_draw.snake_part5_x <= 550:
on_draw.snake_part5_y += 20
if on_draw.snake_part5_y >= 500:
on_draw.snake_part5_y -= 20
on_draw.snake_part5_x -= 20
if on_draw.snake_part5_x <= 260:
on_draw.snake_part5_x += 20
""" If statements that will make snake part six move """
if on_draw.snake_part6_x >= 550:
on_draw.snake_part6_x -= 20
elif on_draw.snake_part6_x <= 550:
on_draw.snake_part6_y += 20
if on_draw.snake_part6_y >= 500:
on_draw.snake_part6_y -= 20
on_draw.snake_part6_x -= 20
if on_draw.snake_part6_x <= 280:
on_draw.snake_part6_x += 20
on_draw.snake_part_x = 570
on_draw.snake_part_y = 130
on_draw.snake_part2_x = 590
on_draw.snake_part2_y = 130
on_draw.snake_part3_x = 610
on_draw.snake_part3_y = 130
on_draw.snake_part4_x = 630
on_draw.snake_part4_y = 130
on_draw.snake_part5_x = 650
on_draw.snake_part5_y = 130
on_draw.snake_part6_x = 670
on_draw.snake_part6_y = 130
def main():
""" Main code the calls all the rest of the code """
arcade.open_window(WINDOW_WIDTH, WINDOW_HEIGHT, 'Snake.exe')
arcade.set_background_color(light_green)
arcade.schedule(on_draw, 1 / 3)
arcade.run()
main()
<|reserved_special_token_1|>
import arcade
WINDOW_WIDTH = 740
WINDOW_HEIGHT = 740
dark_green = 170, 216, 81
light_green = 162, 210, 73
snake_color = 72, 118, 235
def square(square_x, square_y, square_width, square_height, square_color):
""" Code that sets up the squares for generation """
arcade.draw_rectangle_filled(square_x, square_y, square_width,
square_height, square_color)
def generate_grid():
""" Code that generates the grid """
y_offset = -10
for a in range(20):
x_offset = 10
for b in range(1):
y_offset += 20
for c in range(20):
square(x_offset, y_offset, 20, 20, dark_green)
for d in range(1):
x_offset += 40
x_offset = 30
for e in range(1):
y_offset += 20
for f in range(20):
square(x_offset, y_offset, 20, 20, dark_green)
for g in range(1):
x_offset += 40
def apple():
""" Draws an apple """
arcade.draw_circle_filled(230, 490, 9.5, (231, 71, 29))
def snake(snake_x, snake_y, snake_scale_x, snake_scale_y, snake_color):
""" Code that sets up the snake part to be drawn """
arcade.draw_rectangle_filled(snake_x, snake_y, snake_scale_x,
snake_scale_y, snake_color)
def on_draw(delta_time):
""" Draw everything every frame(we chose in on_draw.schedule(e.g I chose 1/3 so every 1/3 of a second a frame is
drawn)). """
arcade.start_render()
generate_grid()
apple()
snake(on_draw.snake_part_x, on_draw.snake_part_y, 20, 20, snake_color)
snake(on_draw.snake_part2_x, on_draw.snake_part2_y, 20, 20, snake_color)
snake(on_draw.snake_part3_x, on_draw.snake_part3_y, 20, 20, snake_color)
snake(on_draw.snake_part4_x, on_draw.snake_part4_y, 20, 20, snake_color)
snake(on_draw.snake_part5_x, on_draw.snake_part5_y, 20, 20, snake_color)
snake(on_draw.snake_part6_x, on_draw.snake_part6_y, 20, 20, snake_color)
if on_draw.snake_part_x <= 230:
snake(on_draw.snake_part6_x + 20, on_draw.snake_part6_y, 20, 20,
snake_color)
""" If statements that will make snake part one move """
if on_draw.snake_part_x >= 550:
on_draw.snake_part_x -= 20
elif on_draw.snake_part_x <= 550:
on_draw.snake_part_y += 20
if on_draw.snake_part_y >= 500:
on_draw.snake_part_y -= 20
on_draw.snake_part_x -= 20
if on_draw.snake_part_x <= 180:
on_draw.snake_part_x += 20
""" If statements that will make snake part two move """
if on_draw.snake_part2_x >= 550:
on_draw.snake_part2_x -= 20
elif on_draw.snake_part2_x <= 550:
on_draw.snake_part2_y += 20
if on_draw.snake_part2_y >= 500:
on_draw.snake_part2_y -= 20
on_draw.snake_part2_x -= 20
if on_draw.snake_part2_x <= 200:
on_draw.snake_part2_x += 20
""" If statements that will make snake part three move """
if on_draw.snake_part3_x >= 550:
on_draw.snake_part3_x -= 20
elif on_draw.snake_part3_x <= 550:
on_draw.snake_part3_y += 20
if on_draw.snake_part3_y >= 500:
on_draw.snake_part3_y -= 20
on_draw.snake_part3_x -= 20
if on_draw.snake_part3_x <= 220:
on_draw.snake_part3_x += 20
""" If statements that will make snake part four move """
if on_draw.snake_part4_x >= 550:
on_draw.snake_part4_x -= 20
elif on_draw.snake_part4_x <= 550:
on_draw.snake_part4_y += 20
if on_draw.snake_part4_y >= 500:
on_draw.snake_part4_y -= 20
on_draw.snake_part4_x -= 20
if on_draw.snake_part4_x <= 240:
on_draw.snake_part4_x += 20
""" If statements that will make snake part five move """
if on_draw.snake_part5_x >= 550:
on_draw.snake_part5_x -= 20
elif on_draw.snake_part5_x <= 550:
on_draw.snake_part5_y += 20
if on_draw.snake_part5_y >= 500:
on_draw.snake_part5_y -= 20
on_draw.snake_part5_x -= 20
if on_draw.snake_part5_x <= 260:
on_draw.snake_part5_x += 20
""" If statements that will make snake part six move """
if on_draw.snake_part6_x >= 550:
on_draw.snake_part6_x -= 20
elif on_draw.snake_part6_x <= 550:
on_draw.snake_part6_y += 20
if on_draw.snake_part6_y >= 500:
on_draw.snake_part6_y -= 20
on_draw.snake_part6_x -= 20
if on_draw.snake_part6_x <= 280:
on_draw.snake_part6_x += 20
on_draw.snake_part_x = 570
on_draw.snake_part_y = 130
on_draw.snake_part2_x = 590
on_draw.snake_part2_y = 130
on_draw.snake_part3_x = 610
on_draw.snake_part3_y = 130
on_draw.snake_part4_x = 630
on_draw.snake_part4_y = 130
on_draw.snake_part5_x = 650
on_draw.snake_part5_y = 130
on_draw.snake_part6_x = 670
on_draw.snake_part6_y = 130
def main():
""" Main code the calls all the rest of the code """
arcade.open_window(WINDOW_WIDTH, WINDOW_HEIGHT, 'Snake.exe')
arcade.set_background_color(light_green)
arcade.schedule(on_draw, 1 / 3)
arcade.run()
main()
<|reserved_special_token_1|>
import arcade
WINDOW_WIDTH = 740
WINDOW_HEIGHT = 740
dark_green = (170, 216, 81)
light_green = (162, 210, 73)
snake_color = (72, 118, 235)
def square(square_x, square_y, square_width, square_height, square_color):
""" Code that sets up the squares for generation """
arcade.draw_rectangle_filled(square_x, square_y, square_width, square_height, square_color)
def generate_grid():
""" Code that generates the grid """
y_offset = -10
for a in range(20):
# Line 1
# Adds offset to the x position of the squares
x_offset = 10
for b in range(1):
# Adds offset to the y position of the squares
y_offset += 20
for c in range(20):
# Prints a row of squares(5 squares along the x)
square(x_offset, y_offset, 20, 20, dark_green)
for d in range(1):
# Adds x offset for the next line of squares on the y axis
x_offset += 40
# Line 2 (needs 2 lines because the offset of each line)
# Adds offset to the x position of the squares
x_offset = 30
for e in range(1):
# Adds offset to the y position of the squares
y_offset += 20
for f in range(20):
# Prints a row of squares(5 squares along the x)
square(x_offset, y_offset, 20, 20, dark_green)
for g in range(1):
# Adds x offset for the next line of squares on the y axis
x_offset += 40
def apple():
""" Draws an apple """
arcade.draw_circle_filled(230, 490, 9.5, (231, 71, 29))
def snake(snake_x, snake_y, snake_scale_x, snake_scale_y, snake_color):
""" Code that sets up the snake part to be drawn """
arcade.draw_rectangle_filled(snake_x, snake_y, snake_scale_x, snake_scale_y, snake_color)
def on_draw(delta_time):
""" Draw everything every frame(we chose in on_draw.schedule(e.g I chose 1/3 so every 1/3 of a second a frame is
drawn)). """
# draws all our objects
arcade.start_render()
generate_grid()
apple()
snake(on_draw.snake_part_x, on_draw.snake_part_y, 20, 20, snake_color)
snake(on_draw.snake_part2_x, on_draw.snake_part2_y, 20, 20, snake_color)
snake(on_draw.snake_part3_x, on_draw.snake_part3_y, 20, 20, snake_color)
snake(on_draw.snake_part4_x, on_draw.snake_part4_y, 20, 20, snake_color)
snake(on_draw.snake_part5_x, on_draw.snake_part5_y, 20, 20, snake_color)
snake(on_draw.snake_part6_x, on_draw.snake_part6_y, 20, 20, snake_color)
if on_draw.snake_part_x <= 230:
snake(on_draw.snake_part6_x + 20, on_draw.snake_part6_y, 20, 20, snake_color)
""" If statements that will make snake part one move """
if on_draw.snake_part_x >= 550:
on_draw.snake_part_x -= 20
elif on_draw.snake_part_x <= 550:
on_draw.snake_part_y += 20
if on_draw.snake_part_y >= 500:
on_draw.snake_part_y -= 20
on_draw.snake_part_x -= 20
if on_draw.snake_part_x <= 180:
on_draw.snake_part_x += 20
""" If statements that will make snake part two move """
if on_draw.snake_part2_x >= 550:
on_draw.snake_part2_x -= 20
elif on_draw.snake_part2_x <= 550:
on_draw.snake_part2_y += 20
if on_draw.snake_part2_y >= 500:
on_draw.snake_part2_y -= 20
on_draw.snake_part2_x -= 20
if on_draw.snake_part2_x <= 200:
on_draw.snake_part2_x += 20
""" If statements that will make snake part three move """
if on_draw.snake_part3_x >= 550:
on_draw.snake_part3_x -= 20
elif on_draw.snake_part3_x <= 550:
on_draw.snake_part3_y += 20
if on_draw.snake_part3_y >= 500:
on_draw.snake_part3_y -= 20
on_draw.snake_part3_x -= 20
if on_draw.snake_part3_x <= 220:
on_draw.snake_part3_x += 20
""" If statements that will make snake part four move """
if on_draw.snake_part4_x >= 550:
on_draw.snake_part4_x -= 20
elif on_draw.snake_part4_x <= 550:
on_draw.snake_part4_y += 20
if on_draw.snake_part4_y >= 500:
on_draw.snake_part4_y -= 20
on_draw.snake_part4_x -= 20
if on_draw.snake_part4_x <= 240:
on_draw.snake_part4_x += 20
""" If statements that will make snake part five move """
if on_draw.snake_part5_x >= 550:
on_draw.snake_part5_x -= 20
elif on_draw.snake_part5_x <= 550:
on_draw.snake_part5_y += 20
if on_draw.snake_part5_y >= 500:
on_draw.snake_part5_y -= 20
on_draw.snake_part5_x -= 20
if on_draw.snake_part5_x <= 260:
on_draw.snake_part5_x += 20
""" If statements that will make snake part six move """
if on_draw.snake_part6_x >= 550:
on_draw.snake_part6_x -= 20
elif on_draw.snake_part6_x <= 550:
on_draw.snake_part6_y += 20
if on_draw.snake_part6_y >= 500:
on_draw.snake_part6_y -= 20
on_draw.snake_part6_x -= 20
if on_draw.snake_part6_x <= 280:
on_draw.snake_part6_x += 20
# Sets a initial value to on_draw.snake_part_x(this is the starting position of the snake)
on_draw.snake_part_x = 570
on_draw.snake_part_y = 130
on_draw.snake_part2_x = 590
on_draw.snake_part2_y = 130
on_draw.snake_part3_x = 610
on_draw.snake_part3_y = 130
on_draw.snake_part4_x = 630
on_draw.snake_part4_y = 130
on_draw.snake_part5_x = 650
on_draw.snake_part5_y = 130
on_draw.snake_part6_x = 670
on_draw.snake_part6_y = 130
def main():
""" Main code the calls all the rest of the code """
arcade.open_window(WINDOW_WIDTH, WINDOW_HEIGHT, "Snake.exe")
# Set the window background colour
arcade.set_background_color(light_green)
# Calls the on_draw method every 1/3(20 seconds) of a second
arcade.schedule(on_draw, 1/3)
# Keeps the window open until closed by the user
arcade.run()
main()
|
flexible
|
{
"blob_id": "fbe091b1cf3ecc2f69d34e3b1c399314b38ebc4a",
"index": 5656,
"step-1": "<mask token>\n\n\ndef generate_grid():\n \"\"\" Code that generates the grid \"\"\"\n y_offset = -10\n for a in range(20):\n x_offset = 10\n for b in range(1):\n y_offset += 20\n for c in range(20):\n square(x_offset, y_offset, 20, 20, dark_green)\n for d in range(1):\n x_offset += 40\n x_offset = 30\n for e in range(1):\n y_offset += 20\n for f in range(20):\n square(x_offset, y_offset, 20, 20, dark_green)\n for g in range(1):\n x_offset += 40\n\n\ndef apple():\n \"\"\" Draws an apple \"\"\"\n arcade.draw_circle_filled(230, 490, 9.5, (231, 71, 29))\n\n\ndef snake(snake_x, snake_y, snake_scale_x, snake_scale_y, snake_color):\n \"\"\" Code that sets up the snake part to be drawn \"\"\"\n arcade.draw_rectangle_filled(snake_x, snake_y, snake_scale_x,\n snake_scale_y, snake_color)\n\n\ndef on_draw(delta_time):\n \"\"\" Draw everything every frame(we chose in on_draw.schedule(e.g I chose 1/3 so every 1/3 of a second a frame is\n drawn)). \"\"\"\n arcade.start_render()\n generate_grid()\n apple()\n snake(on_draw.snake_part_x, on_draw.snake_part_y, 20, 20, snake_color)\n snake(on_draw.snake_part2_x, on_draw.snake_part2_y, 20, 20, snake_color)\n snake(on_draw.snake_part3_x, on_draw.snake_part3_y, 20, 20, snake_color)\n snake(on_draw.snake_part4_x, on_draw.snake_part4_y, 20, 20, snake_color)\n snake(on_draw.snake_part5_x, on_draw.snake_part5_y, 20, 20, snake_color)\n snake(on_draw.snake_part6_x, on_draw.snake_part6_y, 20, 20, snake_color)\n if on_draw.snake_part_x <= 230:\n snake(on_draw.snake_part6_x + 20, on_draw.snake_part6_y, 20, 20,\n snake_color)\n \"\"\" If statements that will make snake part one move \"\"\"\n if on_draw.snake_part_x >= 550:\n on_draw.snake_part_x -= 20\n elif on_draw.snake_part_x <= 550:\n on_draw.snake_part_y += 20\n if on_draw.snake_part_y >= 500:\n on_draw.snake_part_y -= 20\n on_draw.snake_part_x -= 20\n if on_draw.snake_part_x <= 180:\n on_draw.snake_part_x += 20\n \"\"\" If statements that will make snake part two move \"\"\"\n if on_draw.snake_part2_x >= 550:\n on_draw.snake_part2_x -= 20\n elif on_draw.snake_part2_x <= 550:\n on_draw.snake_part2_y += 20\n if on_draw.snake_part2_y >= 500:\n on_draw.snake_part2_y -= 20\n on_draw.snake_part2_x -= 20\n if on_draw.snake_part2_x <= 200:\n on_draw.snake_part2_x += 20\n \"\"\" If statements that will make snake part three move \"\"\"\n if on_draw.snake_part3_x >= 550:\n on_draw.snake_part3_x -= 20\n elif on_draw.snake_part3_x <= 550:\n on_draw.snake_part3_y += 20\n if on_draw.snake_part3_y >= 500:\n on_draw.snake_part3_y -= 20\n on_draw.snake_part3_x -= 20\n if on_draw.snake_part3_x <= 220:\n on_draw.snake_part3_x += 20\n \"\"\" If statements that will make snake part four move \"\"\"\n if on_draw.snake_part4_x >= 550:\n on_draw.snake_part4_x -= 20\n elif on_draw.snake_part4_x <= 550:\n on_draw.snake_part4_y += 20\n if on_draw.snake_part4_y >= 500:\n on_draw.snake_part4_y -= 20\n on_draw.snake_part4_x -= 20\n if on_draw.snake_part4_x <= 240:\n on_draw.snake_part4_x += 20\n \"\"\" If statements that will make snake part five move \"\"\"\n if on_draw.snake_part5_x >= 550:\n on_draw.snake_part5_x -= 20\n elif on_draw.snake_part5_x <= 550:\n on_draw.snake_part5_y += 20\n if on_draw.snake_part5_y >= 500:\n on_draw.snake_part5_y -= 20\n on_draw.snake_part5_x -= 20\n if on_draw.snake_part5_x <= 260:\n on_draw.snake_part5_x += 20\n \"\"\" If statements that will make snake part six move \"\"\"\n if on_draw.snake_part6_x >= 550:\n on_draw.snake_part6_x -= 20\n elif on_draw.snake_part6_x <= 550:\n on_draw.snake_part6_y += 20\n if on_draw.snake_part6_y >= 500:\n on_draw.snake_part6_y -= 20\n on_draw.snake_part6_x -= 20\n if on_draw.snake_part6_x <= 280:\n on_draw.snake_part6_x += 20\n\n\n<mask token>\n\n\ndef main():\n \"\"\" Main code the calls all the rest of the code \"\"\"\n arcade.open_window(WINDOW_WIDTH, WINDOW_HEIGHT, 'Snake.exe')\n arcade.set_background_color(light_green)\n arcade.schedule(on_draw, 1 / 3)\n arcade.run()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef square(square_x, square_y, square_width, square_height, square_color):\n \"\"\" Code that sets up the squares for generation \"\"\"\n arcade.draw_rectangle_filled(square_x, square_y, square_width,\n square_height, square_color)\n\n\ndef generate_grid():\n \"\"\" Code that generates the grid \"\"\"\n y_offset = -10\n for a in range(20):\n x_offset = 10\n for b in range(1):\n y_offset += 20\n for c in range(20):\n square(x_offset, y_offset, 20, 20, dark_green)\n for d in range(1):\n x_offset += 40\n x_offset = 30\n for e in range(1):\n y_offset += 20\n for f in range(20):\n square(x_offset, y_offset, 20, 20, dark_green)\n for g in range(1):\n x_offset += 40\n\n\ndef apple():\n \"\"\" Draws an apple \"\"\"\n arcade.draw_circle_filled(230, 490, 9.5, (231, 71, 29))\n\n\ndef snake(snake_x, snake_y, snake_scale_x, snake_scale_y, snake_color):\n \"\"\" Code that sets up the snake part to be drawn \"\"\"\n arcade.draw_rectangle_filled(snake_x, snake_y, snake_scale_x,\n snake_scale_y, snake_color)\n\n\ndef on_draw(delta_time):\n \"\"\" Draw everything every frame(we chose in on_draw.schedule(e.g I chose 1/3 so every 1/3 of a second a frame is\n drawn)). \"\"\"\n arcade.start_render()\n generate_grid()\n apple()\n snake(on_draw.snake_part_x, on_draw.snake_part_y, 20, 20, snake_color)\n snake(on_draw.snake_part2_x, on_draw.snake_part2_y, 20, 20, snake_color)\n snake(on_draw.snake_part3_x, on_draw.snake_part3_y, 20, 20, snake_color)\n snake(on_draw.snake_part4_x, on_draw.snake_part4_y, 20, 20, snake_color)\n snake(on_draw.snake_part5_x, on_draw.snake_part5_y, 20, 20, snake_color)\n snake(on_draw.snake_part6_x, on_draw.snake_part6_y, 20, 20, snake_color)\n if on_draw.snake_part_x <= 230:\n snake(on_draw.snake_part6_x + 20, on_draw.snake_part6_y, 20, 20,\n snake_color)\n \"\"\" If statements that will make snake part one move \"\"\"\n if on_draw.snake_part_x >= 550:\n on_draw.snake_part_x -= 20\n elif on_draw.snake_part_x <= 550:\n on_draw.snake_part_y += 20\n if on_draw.snake_part_y >= 500:\n on_draw.snake_part_y -= 20\n on_draw.snake_part_x -= 20\n if on_draw.snake_part_x <= 180:\n on_draw.snake_part_x += 20\n \"\"\" If statements that will make snake part two move \"\"\"\n if on_draw.snake_part2_x >= 550:\n on_draw.snake_part2_x -= 20\n elif on_draw.snake_part2_x <= 550:\n on_draw.snake_part2_y += 20\n if on_draw.snake_part2_y >= 500:\n on_draw.snake_part2_y -= 20\n on_draw.snake_part2_x -= 20\n if on_draw.snake_part2_x <= 200:\n on_draw.snake_part2_x += 20\n \"\"\" If statements that will make snake part three move \"\"\"\n if on_draw.snake_part3_x >= 550:\n on_draw.snake_part3_x -= 20\n elif on_draw.snake_part3_x <= 550:\n on_draw.snake_part3_y += 20\n if on_draw.snake_part3_y >= 500:\n on_draw.snake_part3_y -= 20\n on_draw.snake_part3_x -= 20\n if on_draw.snake_part3_x <= 220:\n on_draw.snake_part3_x += 20\n \"\"\" If statements that will make snake part four move \"\"\"\n if on_draw.snake_part4_x >= 550:\n on_draw.snake_part4_x -= 20\n elif on_draw.snake_part4_x <= 550:\n on_draw.snake_part4_y += 20\n if on_draw.snake_part4_y >= 500:\n on_draw.snake_part4_y -= 20\n on_draw.snake_part4_x -= 20\n if on_draw.snake_part4_x <= 240:\n on_draw.snake_part4_x += 20\n \"\"\" If statements that will make snake part five move \"\"\"\n if on_draw.snake_part5_x >= 550:\n on_draw.snake_part5_x -= 20\n elif on_draw.snake_part5_x <= 550:\n on_draw.snake_part5_y += 20\n if on_draw.snake_part5_y >= 500:\n on_draw.snake_part5_y -= 20\n on_draw.snake_part5_x -= 20\n if on_draw.snake_part5_x <= 260:\n on_draw.snake_part5_x += 20\n \"\"\" If statements that will make snake part six move \"\"\"\n if on_draw.snake_part6_x >= 550:\n on_draw.snake_part6_x -= 20\n elif on_draw.snake_part6_x <= 550:\n on_draw.snake_part6_y += 20\n if on_draw.snake_part6_y >= 500:\n on_draw.snake_part6_y -= 20\n on_draw.snake_part6_x -= 20\n if on_draw.snake_part6_x <= 280:\n on_draw.snake_part6_x += 20\n\n\n<mask token>\n\n\ndef main():\n \"\"\" Main code the calls all the rest of the code \"\"\"\n arcade.open_window(WINDOW_WIDTH, WINDOW_HEIGHT, 'Snake.exe')\n arcade.set_background_color(light_green)\n arcade.schedule(on_draw, 1 / 3)\n arcade.run()\n\n\nmain()\n",
"step-3": "<mask token>\nWINDOW_WIDTH = 740\nWINDOW_HEIGHT = 740\ndark_green = 170, 216, 81\nlight_green = 162, 210, 73\nsnake_color = 72, 118, 235\n\n\ndef square(square_x, square_y, square_width, square_height, square_color):\n \"\"\" Code that sets up the squares for generation \"\"\"\n arcade.draw_rectangle_filled(square_x, square_y, square_width,\n square_height, square_color)\n\n\ndef generate_grid():\n \"\"\" Code that generates the grid \"\"\"\n y_offset = -10\n for a in range(20):\n x_offset = 10\n for b in range(1):\n y_offset += 20\n for c in range(20):\n square(x_offset, y_offset, 20, 20, dark_green)\n for d in range(1):\n x_offset += 40\n x_offset = 30\n for e in range(1):\n y_offset += 20\n for f in range(20):\n square(x_offset, y_offset, 20, 20, dark_green)\n for g in range(1):\n x_offset += 40\n\n\ndef apple():\n \"\"\" Draws an apple \"\"\"\n arcade.draw_circle_filled(230, 490, 9.5, (231, 71, 29))\n\n\ndef snake(snake_x, snake_y, snake_scale_x, snake_scale_y, snake_color):\n \"\"\" Code that sets up the snake part to be drawn \"\"\"\n arcade.draw_rectangle_filled(snake_x, snake_y, snake_scale_x,\n snake_scale_y, snake_color)\n\n\ndef on_draw(delta_time):\n \"\"\" Draw everything every frame(we chose in on_draw.schedule(e.g I chose 1/3 so every 1/3 of a second a frame is\n drawn)). \"\"\"\n arcade.start_render()\n generate_grid()\n apple()\n snake(on_draw.snake_part_x, on_draw.snake_part_y, 20, 20, snake_color)\n snake(on_draw.snake_part2_x, on_draw.snake_part2_y, 20, 20, snake_color)\n snake(on_draw.snake_part3_x, on_draw.snake_part3_y, 20, 20, snake_color)\n snake(on_draw.snake_part4_x, on_draw.snake_part4_y, 20, 20, snake_color)\n snake(on_draw.snake_part5_x, on_draw.snake_part5_y, 20, 20, snake_color)\n snake(on_draw.snake_part6_x, on_draw.snake_part6_y, 20, 20, snake_color)\n if on_draw.snake_part_x <= 230:\n snake(on_draw.snake_part6_x + 20, on_draw.snake_part6_y, 20, 20,\n snake_color)\n \"\"\" If statements that will make snake part one move \"\"\"\n if on_draw.snake_part_x >= 550:\n on_draw.snake_part_x -= 20\n elif on_draw.snake_part_x <= 550:\n on_draw.snake_part_y += 20\n if on_draw.snake_part_y >= 500:\n on_draw.snake_part_y -= 20\n on_draw.snake_part_x -= 20\n if on_draw.snake_part_x <= 180:\n on_draw.snake_part_x += 20\n \"\"\" If statements that will make snake part two move \"\"\"\n if on_draw.snake_part2_x >= 550:\n on_draw.snake_part2_x -= 20\n elif on_draw.snake_part2_x <= 550:\n on_draw.snake_part2_y += 20\n if on_draw.snake_part2_y >= 500:\n on_draw.snake_part2_y -= 20\n on_draw.snake_part2_x -= 20\n if on_draw.snake_part2_x <= 200:\n on_draw.snake_part2_x += 20\n \"\"\" If statements that will make snake part three move \"\"\"\n if on_draw.snake_part3_x >= 550:\n on_draw.snake_part3_x -= 20\n elif on_draw.snake_part3_x <= 550:\n on_draw.snake_part3_y += 20\n if on_draw.snake_part3_y >= 500:\n on_draw.snake_part3_y -= 20\n on_draw.snake_part3_x -= 20\n if on_draw.snake_part3_x <= 220:\n on_draw.snake_part3_x += 20\n \"\"\" If statements that will make snake part four move \"\"\"\n if on_draw.snake_part4_x >= 550:\n on_draw.snake_part4_x -= 20\n elif on_draw.snake_part4_x <= 550:\n on_draw.snake_part4_y += 20\n if on_draw.snake_part4_y >= 500:\n on_draw.snake_part4_y -= 20\n on_draw.snake_part4_x -= 20\n if on_draw.snake_part4_x <= 240:\n on_draw.snake_part4_x += 20\n \"\"\" If statements that will make snake part five move \"\"\"\n if on_draw.snake_part5_x >= 550:\n on_draw.snake_part5_x -= 20\n elif on_draw.snake_part5_x <= 550:\n on_draw.snake_part5_y += 20\n if on_draw.snake_part5_y >= 500:\n on_draw.snake_part5_y -= 20\n on_draw.snake_part5_x -= 20\n if on_draw.snake_part5_x <= 260:\n on_draw.snake_part5_x += 20\n \"\"\" If statements that will make snake part six move \"\"\"\n if on_draw.snake_part6_x >= 550:\n on_draw.snake_part6_x -= 20\n elif on_draw.snake_part6_x <= 550:\n on_draw.snake_part6_y += 20\n if on_draw.snake_part6_y >= 500:\n on_draw.snake_part6_y -= 20\n on_draw.snake_part6_x -= 20\n if on_draw.snake_part6_x <= 280:\n on_draw.snake_part6_x += 20\n\n\non_draw.snake_part_x = 570\non_draw.snake_part_y = 130\non_draw.snake_part2_x = 590\non_draw.snake_part2_y = 130\non_draw.snake_part3_x = 610\non_draw.snake_part3_y = 130\non_draw.snake_part4_x = 630\non_draw.snake_part4_y = 130\non_draw.snake_part5_x = 650\non_draw.snake_part5_y = 130\non_draw.snake_part6_x = 670\non_draw.snake_part6_y = 130\n\n\ndef main():\n \"\"\" Main code the calls all the rest of the code \"\"\"\n arcade.open_window(WINDOW_WIDTH, WINDOW_HEIGHT, 'Snake.exe')\n arcade.set_background_color(light_green)\n arcade.schedule(on_draw, 1 / 3)\n arcade.run()\n\n\nmain()\n",
"step-4": "import arcade\nWINDOW_WIDTH = 740\nWINDOW_HEIGHT = 740\ndark_green = 170, 216, 81\nlight_green = 162, 210, 73\nsnake_color = 72, 118, 235\n\n\ndef square(square_x, square_y, square_width, square_height, square_color):\n \"\"\" Code that sets up the squares for generation \"\"\"\n arcade.draw_rectangle_filled(square_x, square_y, square_width,\n square_height, square_color)\n\n\ndef generate_grid():\n \"\"\" Code that generates the grid \"\"\"\n y_offset = -10\n for a in range(20):\n x_offset = 10\n for b in range(1):\n y_offset += 20\n for c in range(20):\n square(x_offset, y_offset, 20, 20, dark_green)\n for d in range(1):\n x_offset += 40\n x_offset = 30\n for e in range(1):\n y_offset += 20\n for f in range(20):\n square(x_offset, y_offset, 20, 20, dark_green)\n for g in range(1):\n x_offset += 40\n\n\ndef apple():\n \"\"\" Draws an apple \"\"\"\n arcade.draw_circle_filled(230, 490, 9.5, (231, 71, 29))\n\n\ndef snake(snake_x, snake_y, snake_scale_x, snake_scale_y, snake_color):\n \"\"\" Code that sets up the snake part to be drawn \"\"\"\n arcade.draw_rectangle_filled(snake_x, snake_y, snake_scale_x,\n snake_scale_y, snake_color)\n\n\ndef on_draw(delta_time):\n \"\"\" Draw everything every frame(we chose in on_draw.schedule(e.g I chose 1/3 so every 1/3 of a second a frame is\n drawn)). \"\"\"\n arcade.start_render()\n generate_grid()\n apple()\n snake(on_draw.snake_part_x, on_draw.snake_part_y, 20, 20, snake_color)\n snake(on_draw.snake_part2_x, on_draw.snake_part2_y, 20, 20, snake_color)\n snake(on_draw.snake_part3_x, on_draw.snake_part3_y, 20, 20, snake_color)\n snake(on_draw.snake_part4_x, on_draw.snake_part4_y, 20, 20, snake_color)\n snake(on_draw.snake_part5_x, on_draw.snake_part5_y, 20, 20, snake_color)\n snake(on_draw.snake_part6_x, on_draw.snake_part6_y, 20, 20, snake_color)\n if on_draw.snake_part_x <= 230:\n snake(on_draw.snake_part6_x + 20, on_draw.snake_part6_y, 20, 20,\n snake_color)\n \"\"\" If statements that will make snake part one move \"\"\"\n if on_draw.snake_part_x >= 550:\n on_draw.snake_part_x -= 20\n elif on_draw.snake_part_x <= 550:\n on_draw.snake_part_y += 20\n if on_draw.snake_part_y >= 500:\n on_draw.snake_part_y -= 20\n on_draw.snake_part_x -= 20\n if on_draw.snake_part_x <= 180:\n on_draw.snake_part_x += 20\n \"\"\" If statements that will make snake part two move \"\"\"\n if on_draw.snake_part2_x >= 550:\n on_draw.snake_part2_x -= 20\n elif on_draw.snake_part2_x <= 550:\n on_draw.snake_part2_y += 20\n if on_draw.snake_part2_y >= 500:\n on_draw.snake_part2_y -= 20\n on_draw.snake_part2_x -= 20\n if on_draw.snake_part2_x <= 200:\n on_draw.snake_part2_x += 20\n \"\"\" If statements that will make snake part three move \"\"\"\n if on_draw.snake_part3_x >= 550:\n on_draw.snake_part3_x -= 20\n elif on_draw.snake_part3_x <= 550:\n on_draw.snake_part3_y += 20\n if on_draw.snake_part3_y >= 500:\n on_draw.snake_part3_y -= 20\n on_draw.snake_part3_x -= 20\n if on_draw.snake_part3_x <= 220:\n on_draw.snake_part3_x += 20\n \"\"\" If statements that will make snake part four move \"\"\"\n if on_draw.snake_part4_x >= 550:\n on_draw.snake_part4_x -= 20\n elif on_draw.snake_part4_x <= 550:\n on_draw.snake_part4_y += 20\n if on_draw.snake_part4_y >= 500:\n on_draw.snake_part4_y -= 20\n on_draw.snake_part4_x -= 20\n if on_draw.snake_part4_x <= 240:\n on_draw.snake_part4_x += 20\n \"\"\" If statements that will make snake part five move \"\"\"\n if on_draw.snake_part5_x >= 550:\n on_draw.snake_part5_x -= 20\n elif on_draw.snake_part5_x <= 550:\n on_draw.snake_part5_y += 20\n if on_draw.snake_part5_y >= 500:\n on_draw.snake_part5_y -= 20\n on_draw.snake_part5_x -= 20\n if on_draw.snake_part5_x <= 260:\n on_draw.snake_part5_x += 20\n \"\"\" If statements that will make snake part six move \"\"\"\n if on_draw.snake_part6_x >= 550:\n on_draw.snake_part6_x -= 20\n elif on_draw.snake_part6_x <= 550:\n on_draw.snake_part6_y += 20\n if on_draw.snake_part6_y >= 500:\n on_draw.snake_part6_y -= 20\n on_draw.snake_part6_x -= 20\n if on_draw.snake_part6_x <= 280:\n on_draw.snake_part6_x += 20\n\n\non_draw.snake_part_x = 570\non_draw.snake_part_y = 130\non_draw.snake_part2_x = 590\non_draw.snake_part2_y = 130\non_draw.snake_part3_x = 610\non_draw.snake_part3_y = 130\non_draw.snake_part4_x = 630\non_draw.snake_part4_y = 130\non_draw.snake_part5_x = 650\non_draw.snake_part5_y = 130\non_draw.snake_part6_x = 670\non_draw.snake_part6_y = 130\n\n\ndef main():\n \"\"\" Main code the calls all the rest of the code \"\"\"\n arcade.open_window(WINDOW_WIDTH, WINDOW_HEIGHT, 'Snake.exe')\n arcade.set_background_color(light_green)\n arcade.schedule(on_draw, 1 / 3)\n arcade.run()\n\n\nmain()\n",
"step-5": "import arcade\n\nWINDOW_WIDTH = 740\nWINDOW_HEIGHT = 740\ndark_green = (170, 216, 81)\nlight_green = (162, 210, 73)\nsnake_color = (72, 118, 235)\n\n\ndef square(square_x, square_y, square_width, square_height, square_color):\n \"\"\" Code that sets up the squares for generation \"\"\"\n arcade.draw_rectangle_filled(square_x, square_y, square_width, square_height, square_color)\n\n\ndef generate_grid():\n \"\"\" Code that generates the grid \"\"\"\n y_offset = -10\n for a in range(20):\n # Line 1\n # Adds offset to the x position of the squares\n x_offset = 10\n for b in range(1):\n # Adds offset to the y position of the squares\n y_offset += 20\n for c in range(20):\n # Prints a row of squares(5 squares along the x)\n square(x_offset, y_offset, 20, 20, dark_green)\n for d in range(1):\n # Adds x offset for the next line of squares on the y axis\n x_offset += 40\n # Line 2 (needs 2 lines because the offset of each line)\n # Adds offset to the x position of the squares\n x_offset = 30\n for e in range(1):\n # Adds offset to the y position of the squares\n y_offset += 20\n for f in range(20):\n # Prints a row of squares(5 squares along the x)\n square(x_offset, y_offset, 20, 20, dark_green)\n for g in range(1):\n # Adds x offset for the next line of squares on the y axis\n x_offset += 40\n\n\ndef apple():\n \"\"\" Draws an apple \"\"\"\n arcade.draw_circle_filled(230, 490, 9.5, (231, 71, 29))\n\n\ndef snake(snake_x, snake_y, snake_scale_x, snake_scale_y, snake_color):\n \"\"\" Code that sets up the snake part to be drawn \"\"\"\n arcade.draw_rectangle_filled(snake_x, snake_y, snake_scale_x, snake_scale_y, snake_color)\n\n\ndef on_draw(delta_time):\n \"\"\" Draw everything every frame(we chose in on_draw.schedule(e.g I chose 1/3 so every 1/3 of a second a frame is\n drawn)). \"\"\"\n # draws all our objects\n arcade.start_render()\n\n generate_grid()\n apple()\n snake(on_draw.snake_part_x, on_draw.snake_part_y, 20, 20, snake_color)\n snake(on_draw.snake_part2_x, on_draw.snake_part2_y, 20, 20, snake_color)\n snake(on_draw.snake_part3_x, on_draw.snake_part3_y, 20, 20, snake_color)\n snake(on_draw.snake_part4_x, on_draw.snake_part4_y, 20, 20, snake_color)\n snake(on_draw.snake_part5_x, on_draw.snake_part5_y, 20, 20, snake_color)\n snake(on_draw.snake_part6_x, on_draw.snake_part6_y, 20, 20, snake_color)\n\n\n if on_draw.snake_part_x <= 230:\n snake(on_draw.snake_part6_x + 20, on_draw.snake_part6_y, 20, 20, snake_color)\n\n\n \"\"\" If statements that will make snake part one move \"\"\"\n if on_draw.snake_part_x >= 550:\n on_draw.snake_part_x -= 20\n\n elif on_draw.snake_part_x <= 550:\n on_draw.snake_part_y += 20\n if on_draw.snake_part_y >= 500:\n on_draw.snake_part_y -= 20\n on_draw.snake_part_x -= 20\n if on_draw.snake_part_x <= 180:\n on_draw.snake_part_x += 20\n\n \"\"\" If statements that will make snake part two move \"\"\"\n if on_draw.snake_part2_x >= 550:\n on_draw.snake_part2_x -= 20\n\n elif on_draw.snake_part2_x <= 550:\n on_draw.snake_part2_y += 20\n if on_draw.snake_part2_y >= 500:\n on_draw.snake_part2_y -= 20\n on_draw.snake_part2_x -= 20\n if on_draw.snake_part2_x <= 200:\n on_draw.snake_part2_x += 20\n\n \"\"\" If statements that will make snake part three move \"\"\"\n if on_draw.snake_part3_x >= 550:\n on_draw.snake_part3_x -= 20\n\n elif on_draw.snake_part3_x <= 550:\n on_draw.snake_part3_y += 20\n if on_draw.snake_part3_y >= 500:\n on_draw.snake_part3_y -= 20\n on_draw.snake_part3_x -= 20\n if on_draw.snake_part3_x <= 220:\n on_draw.snake_part3_x += 20\n\n \"\"\" If statements that will make snake part four move \"\"\"\n if on_draw.snake_part4_x >= 550:\n on_draw.snake_part4_x -= 20\n\n elif on_draw.snake_part4_x <= 550:\n on_draw.snake_part4_y += 20\n if on_draw.snake_part4_y >= 500:\n on_draw.snake_part4_y -= 20\n on_draw.snake_part4_x -= 20\n if on_draw.snake_part4_x <= 240:\n on_draw.snake_part4_x += 20\n\n \"\"\" If statements that will make snake part five move \"\"\"\n if on_draw.snake_part5_x >= 550:\n on_draw.snake_part5_x -= 20\n\n elif on_draw.snake_part5_x <= 550:\n on_draw.snake_part5_y += 20\n if on_draw.snake_part5_y >= 500:\n on_draw.snake_part5_y -= 20\n on_draw.snake_part5_x -= 20\n if on_draw.snake_part5_x <= 260:\n on_draw.snake_part5_x += 20\n\n \"\"\" If statements that will make snake part six move \"\"\"\n if on_draw.snake_part6_x >= 550:\n on_draw.snake_part6_x -= 20\n\n elif on_draw.snake_part6_x <= 550:\n on_draw.snake_part6_y += 20\n if on_draw.snake_part6_y >= 500:\n on_draw.snake_part6_y -= 20\n on_draw.snake_part6_x -= 20\n if on_draw.snake_part6_x <= 280:\n on_draw.snake_part6_x += 20\n\n\n# Sets a initial value to on_draw.snake_part_x(this is the starting position of the snake)\non_draw.snake_part_x = 570\non_draw.snake_part_y = 130\non_draw.snake_part2_x = 590\non_draw.snake_part2_y = 130\non_draw.snake_part3_x = 610\non_draw.snake_part3_y = 130\non_draw.snake_part4_x = 630\non_draw.snake_part4_y = 130\non_draw.snake_part5_x = 650\non_draw.snake_part5_y = 130\non_draw.snake_part6_x = 670\non_draw.snake_part6_y = 130\n\n\ndef main():\n \"\"\" Main code the calls all the rest of the code \"\"\"\n arcade.open_window(WINDOW_WIDTH, WINDOW_HEIGHT, \"Snake.exe\")\n # Set the window background colour\n arcade.set_background_color(light_green)\n\n # Calls the on_draw method every 1/3(20 seconds) of a second\n arcade.schedule(on_draw, 1/3)\n # Keeps the window open until closed by the user\n arcade.run()\n\n\nmain()\n",
"step-ids": [
5,
7,
8,
9,
10
]
}
|
[
5,
7,
8,
9,
10
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
urlpatterns = [re_path('^rest/', include(
'core_federated_search_app.rest.urls'))]
<|reserved_special_token_1|>
<|reserved_special_token_0|>
from django.conf.urls import include
from django.urls import re_path
urlpatterns = [re_path('^rest/', include(
'core_federated_search_app.rest.urls'))]
<|reserved_special_token_1|>
""" Url router for the federated search application
"""
from django.conf.urls import include
from django.urls import re_path
urlpatterns = [
re_path(r"^rest/", include("core_federated_search_app.rest.urls")),
]
|
flexible
|
{
"blob_id": "6903584b27c0720cebf42ed39968b18f0f67f796",
"index": 6167,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = [re_path('^rest/', include(\n 'core_federated_search_app.rest.urls'))]\n",
"step-3": "<mask token>\nfrom django.conf.urls import include\nfrom django.urls import re_path\nurlpatterns = [re_path('^rest/', include(\n 'core_federated_search_app.rest.urls'))]\n",
"step-4": "\"\"\" Url router for the federated search application\n\"\"\"\nfrom django.conf.urls import include\nfrom django.urls import re_path\n\nurlpatterns = [\n re_path(r\"^rest/\", include(\"core_federated_search_app.rest.urls\")),\n]\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def task1():
print('Task 1 is starting...')
print('Task 1 is waiting to acquire Lock A')
with lock_a:
print('Task 1 has acquired Lock A')
print('Task 1 is doing some calculations')
time.sleep(2)
print('Task 1 is waiting to acquire Lock B')
with lock_b:
print('Task 1 has acquired Lock B')
print('Task 1 is doing some calculations')
time.sleep(2)
print('Task 1 is releasing both locks')
def task2():
print('Task 2 is starting...')
print('Task 2 is waiting to acquire Lock B')
with lock_b:
print('Task 2 has acquired Lock B')
print('Task 2 is doing some calculations')
time.sleep(5)
print('Task 2 is waiting to acquire Lock A')
with lock_a:
print('Task 2 has acquired Lock A')
print('Task 2 is doing some calculations')
time.sleep(5)
print('Task 2 is releasing both locks')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def task1():
print('Task 1 is starting...')
print('Task 1 is waiting to acquire Lock A')
with lock_a:
print('Task 1 has acquired Lock A')
print('Task 1 is doing some calculations')
time.sleep(2)
print('Task 1 is waiting to acquire Lock B')
with lock_b:
print('Task 1 has acquired Lock B')
print('Task 1 is doing some calculations')
time.sleep(2)
print('Task 1 is releasing both locks')
def task2():
print('Task 2 is starting...')
print('Task 2 is waiting to acquire Lock B')
with lock_b:
print('Task 2 has acquired Lock B')
print('Task 2 is doing some calculations')
time.sleep(5)
print('Task 2 is waiting to acquire Lock A')
with lock_a:
print('Task 2 has acquired Lock A')
print('Task 2 is doing some calculations')
time.sleep(5)
print('Task 2 is releasing both locks')
if __name__ == '__main__':
t1 = threading.Thread(target=task1)
t2 = threading.Thread(target=task2)
t1.start()
t2.start()
t1.join()
t2.join()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
lock_a = threading.Lock()
lock_b = threading.Lock()
def task1():
print('Task 1 is starting...')
print('Task 1 is waiting to acquire Lock A')
with lock_a:
print('Task 1 has acquired Lock A')
print('Task 1 is doing some calculations')
time.sleep(2)
print('Task 1 is waiting to acquire Lock B')
with lock_b:
print('Task 1 has acquired Lock B')
print('Task 1 is doing some calculations')
time.sleep(2)
print('Task 1 is releasing both locks')
def task2():
print('Task 2 is starting...')
print('Task 2 is waiting to acquire Lock B')
with lock_b:
print('Task 2 has acquired Lock B')
print('Task 2 is doing some calculations')
time.sleep(5)
print('Task 2 is waiting to acquire Lock A')
with lock_a:
print('Task 2 has acquired Lock A')
print('Task 2 is doing some calculations')
time.sleep(5)
print('Task 2 is releasing both locks')
if __name__ == '__main__':
t1 = threading.Thread(target=task1)
t2 = threading.Thread(target=task2)
t1.start()
t2.start()
t1.join()
t2.join()
<|reserved_special_token_1|>
import time
import threading
lock_a = threading.Lock()
lock_b = threading.Lock()
def task1():
print('Task 1 is starting...')
print('Task 1 is waiting to acquire Lock A')
with lock_a:
print('Task 1 has acquired Lock A')
print('Task 1 is doing some calculations')
time.sleep(2)
print('Task 1 is waiting to acquire Lock B')
with lock_b:
print('Task 1 has acquired Lock B')
print('Task 1 is doing some calculations')
time.sleep(2)
print('Task 1 is releasing both locks')
def task2():
print('Task 2 is starting...')
print('Task 2 is waiting to acquire Lock B')
with lock_b:
print('Task 2 has acquired Lock B')
print('Task 2 is doing some calculations')
time.sleep(5)
print('Task 2 is waiting to acquire Lock A')
with lock_a:
print('Task 2 has acquired Lock A')
print('Task 2 is doing some calculations')
time.sleep(5)
print('Task 2 is releasing both locks')
if __name__ == '__main__':
t1 = threading.Thread(target=task1)
t2 = threading.Thread(target=task2)
t1.start()
t2.start()
t1.join()
t2.join()
|
flexible
|
{
"blob_id": "c7d8a67587a6ca01c23ed922faabbaca8bbaf337",
"index": 6307,
"step-1": "<mask token>\n\n\ndef task1():\n print('Task 1 is starting...')\n print('Task 1 is waiting to acquire Lock A')\n with lock_a:\n print('Task 1 has acquired Lock A')\n print('Task 1 is doing some calculations')\n time.sleep(2)\n print('Task 1 is waiting to acquire Lock B')\n with lock_b:\n print('Task 1 has acquired Lock B')\n print('Task 1 is doing some calculations')\n time.sleep(2)\n print('Task 1 is releasing both locks')\n\n\ndef task2():\n print('Task 2 is starting...')\n print('Task 2 is waiting to acquire Lock B')\n with lock_b:\n print('Task 2 has acquired Lock B')\n print('Task 2 is doing some calculations')\n time.sleep(5)\n print('Task 2 is waiting to acquire Lock A')\n with lock_a:\n print('Task 2 has acquired Lock A')\n print('Task 2 is doing some calculations')\n time.sleep(5)\n print('Task 2 is releasing both locks')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef task1():\n print('Task 1 is starting...')\n print('Task 1 is waiting to acquire Lock A')\n with lock_a:\n print('Task 1 has acquired Lock A')\n print('Task 1 is doing some calculations')\n time.sleep(2)\n print('Task 1 is waiting to acquire Lock B')\n with lock_b:\n print('Task 1 has acquired Lock B')\n print('Task 1 is doing some calculations')\n time.sleep(2)\n print('Task 1 is releasing both locks')\n\n\ndef task2():\n print('Task 2 is starting...')\n print('Task 2 is waiting to acquire Lock B')\n with lock_b:\n print('Task 2 has acquired Lock B')\n print('Task 2 is doing some calculations')\n time.sleep(5)\n print('Task 2 is waiting to acquire Lock A')\n with lock_a:\n print('Task 2 has acquired Lock A')\n print('Task 2 is doing some calculations')\n time.sleep(5)\n print('Task 2 is releasing both locks')\n\n\nif __name__ == '__main__':\n t1 = threading.Thread(target=task1)\n t2 = threading.Thread(target=task2)\n t1.start()\n t2.start()\n t1.join()\n t2.join()\n",
"step-3": "<mask token>\nlock_a = threading.Lock()\nlock_b = threading.Lock()\n\n\ndef task1():\n print('Task 1 is starting...')\n print('Task 1 is waiting to acquire Lock A')\n with lock_a:\n print('Task 1 has acquired Lock A')\n print('Task 1 is doing some calculations')\n time.sleep(2)\n print('Task 1 is waiting to acquire Lock B')\n with lock_b:\n print('Task 1 has acquired Lock B')\n print('Task 1 is doing some calculations')\n time.sleep(2)\n print('Task 1 is releasing both locks')\n\n\ndef task2():\n print('Task 2 is starting...')\n print('Task 2 is waiting to acquire Lock B')\n with lock_b:\n print('Task 2 has acquired Lock B')\n print('Task 2 is doing some calculations')\n time.sleep(5)\n print('Task 2 is waiting to acquire Lock A')\n with lock_a:\n print('Task 2 has acquired Lock A')\n print('Task 2 is doing some calculations')\n time.sleep(5)\n print('Task 2 is releasing both locks')\n\n\nif __name__ == '__main__':\n t1 = threading.Thread(target=task1)\n t2 = threading.Thread(target=task2)\n t1.start()\n t2.start()\n t1.join()\n t2.join()\n",
"step-4": "import time\nimport threading\nlock_a = threading.Lock()\nlock_b = threading.Lock()\n\n\ndef task1():\n print('Task 1 is starting...')\n print('Task 1 is waiting to acquire Lock A')\n with lock_a:\n print('Task 1 has acquired Lock A')\n print('Task 1 is doing some calculations')\n time.sleep(2)\n print('Task 1 is waiting to acquire Lock B')\n with lock_b:\n print('Task 1 has acquired Lock B')\n print('Task 1 is doing some calculations')\n time.sleep(2)\n print('Task 1 is releasing both locks')\n\n\ndef task2():\n print('Task 2 is starting...')\n print('Task 2 is waiting to acquire Lock B')\n with lock_b:\n print('Task 2 has acquired Lock B')\n print('Task 2 is doing some calculations')\n time.sleep(5)\n print('Task 2 is waiting to acquire Lock A')\n with lock_a:\n print('Task 2 has acquired Lock A')\n print('Task 2 is doing some calculations')\n time.sleep(5)\n print('Task 2 is releasing both locks')\n\n\nif __name__ == '__main__':\n t1 = threading.Thread(target=task1)\n t2 = threading.Thread(target=task2)\n t1.start()\n t2.start()\n t1.join()\n t2.join()\n",
"step-5": null,
"step-ids": [
2,
3,
4,
5
]
}
|
[
2,
3,
4,
5
] |
<|reserved_special_token_0|>
def decision(statement, player):
stdscr.clear()
stdscr.border(0)
stdscr.timeout(-1)
decision = 'play again' if statement == 1 else 'return to main menu'
stdscr.addstr(3, 5, 'Your Partner has decided to ' + decision)
if statement == 1:
stdscr.addstr(5, 10, 'Do you want to play again?')
stdscr.addstr(7, 10, 'Press y for yes and n for no')
stdscr.refresh()
while True:
choice = stdscr.getch()
if choice == 110:
return 0
elif choice == 121:
return 1
elif statement == 0:
stdscr.addstr(5, 5, 'You will be taken back to the main menu.')
return -1
def death(player):
stdscr.clear()
stdscr.border(0)
stdscr.timeout(100)
stdscr.addstr(3, 5, 'You have died. What do you want to do?')
stdscr.addstr(5, 10, 'Play Again - Press p')
stdscr.addstr(7, 10, 'Return to Main Menu - Press r')
stdscr.refresh()
while True:
choice = stdscr.getch()
if choice == 114:
return 0
elif choice == 112:
return 1
return -1
<|reserved_special_token_0|>
def start():
stdscr.clear()
stdscr.border(0)
stdscr.timeout(100)
stdscr.addstr(3, 5, 'Waiting for 2nd player')
stdscr.addstr(5, 10, 'Return to Main Menu - Press r')
stdscr.refresh()
while True:
choice = stdscr.getch()
if choice == 114:
return 0
return -1
<|reserved_special_token_0|>
def menu(name):
stdscr.clear()
stdscr.border(0)
stdscr.timeout(-1)
stdscr.addstr(3, 5, name + ', what do you want to do?')
stdscr.addstr(5, 10, 'Play new game - Press 1')
stdscr.addstr(7, 10, 'Exit - Press 4')
stdscr.refresh()
choice = stdscr.getch()
stdscr.clear()
stdscr.border(0)
if choice == 49:
return 1
elif choice == 52:
return 0
return 1
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def decision(statement, player):
stdscr.clear()
stdscr.border(0)
stdscr.timeout(-1)
decision = 'play again' if statement == 1 else 'return to main menu'
stdscr.addstr(3, 5, 'Your Partner has decided to ' + decision)
if statement == 1:
stdscr.addstr(5, 10, 'Do you want to play again?')
stdscr.addstr(7, 10, 'Press y for yes and n for no')
stdscr.refresh()
while True:
choice = stdscr.getch()
if choice == 110:
return 0
elif choice == 121:
return 1
elif statement == 0:
stdscr.addstr(5, 5, 'You will be taken back to the main menu.')
return -1
def death(player):
stdscr.clear()
stdscr.border(0)
stdscr.timeout(100)
stdscr.addstr(3, 5, 'You have died. What do you want to do?')
stdscr.addstr(5, 10, 'Play Again - Press p')
stdscr.addstr(7, 10, 'Return to Main Menu - Press r')
stdscr.refresh()
while True:
choice = stdscr.getch()
if choice == 114:
return 0
elif choice == 112:
return 1
return -1
def join():
stdscr.clear()
stdscr.border(0)
stdscr.timeout(-1)
stdscr.addstr(3, 5, 'Pick a player to join')
stdscr.addstr(5, 10, 'Return to Main Menu - Press r')
stdscr.refresh()
while True:
choice = stdscr.getch()
if choice == 114:
return 0
return -1
def start():
stdscr.clear()
stdscr.border(0)
stdscr.timeout(100)
stdscr.addstr(3, 5, 'Waiting for 2nd player')
stdscr.addstr(5, 10, 'Return to Main Menu - Press r')
stdscr.refresh()
while True:
choice = stdscr.getch()
if choice == 114:
return 0
return -1
<|reserved_special_token_0|>
def menu(name):
stdscr.clear()
stdscr.border(0)
stdscr.timeout(-1)
stdscr.addstr(3, 5, name + ', what do you want to do?')
stdscr.addstr(5, 10, 'Play new game - Press 1')
stdscr.addstr(7, 10, 'Exit - Press 4')
stdscr.refresh()
choice = stdscr.getch()
stdscr.clear()
stdscr.border(0)
if choice == 49:
return 1
elif choice == 52:
return 0
return 1
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
HEIGHT = 24
WIDTH = 80
TESTING = True
curses.initscr()
stdscr = curses.newwin(HEIGHT, WIDTH, 0, 0)
curses.noecho()
stdscr.keypad(1)
def decision(statement, player):
stdscr.clear()
stdscr.border(0)
stdscr.timeout(-1)
decision = 'play again' if statement == 1 else 'return to main menu'
stdscr.addstr(3, 5, 'Your Partner has decided to ' + decision)
if statement == 1:
stdscr.addstr(5, 10, 'Do you want to play again?')
stdscr.addstr(7, 10, 'Press y for yes and n for no')
stdscr.refresh()
while True:
choice = stdscr.getch()
if choice == 110:
return 0
elif choice == 121:
return 1
elif statement == 0:
stdscr.addstr(5, 5, 'You will be taken back to the main menu.')
return -1
def death(player):
stdscr.clear()
stdscr.border(0)
stdscr.timeout(100)
stdscr.addstr(3, 5, 'You have died. What do you want to do?')
stdscr.addstr(5, 10, 'Play Again - Press p')
stdscr.addstr(7, 10, 'Return to Main Menu - Press r')
stdscr.refresh()
while True:
choice = stdscr.getch()
if choice == 114:
return 0
elif choice == 112:
return 1
return -1
def join():
stdscr.clear()
stdscr.border(0)
stdscr.timeout(-1)
stdscr.addstr(3, 5, 'Pick a player to join')
stdscr.addstr(5, 10, 'Return to Main Menu - Press r')
stdscr.refresh()
while True:
choice = stdscr.getch()
if choice == 114:
return 0
return -1
def start():
stdscr.clear()
stdscr.border(0)
stdscr.timeout(100)
stdscr.addstr(3, 5, 'Waiting for 2nd player')
stdscr.addstr(5, 10, 'Return to Main Menu - Press r')
stdscr.refresh()
while True:
choice = stdscr.getch()
if choice == 114:
return 0
return -1
def pause():
stdscr.clear()
stdscr.border(0)
stdscr.timeout(-1)
stdscr.addstr(3, 5, 'Paused. What do you want to do?')
stdscr.addstr(5, 10, 'Continue - Press c')
stdscr.addstr(7, 10, 'Swap Controls - Press s')
stdscr.addstr(9, 10, 'End Game - Press e')
stdscr.refresh()
while True:
choice = stdscr.getch()
if choice == 99:
return 1
elif choice == 115:
return 0
elif choice == 101:
return -1
return 1
def menu(name):
stdscr.clear()
stdscr.border(0)
stdscr.timeout(-1)
stdscr.addstr(3, 5, name + ', what do you want to do?')
stdscr.addstr(5, 10, 'Play new game - Press 1')
stdscr.addstr(7, 10, 'Exit - Press 4')
stdscr.refresh()
choice = stdscr.getch()
stdscr.clear()
stdscr.border(0)
if choice == 49:
return 1
elif choice == 52:
return 0
return 1
play = menu('HOPPY')
c = 1
x = 25
y = 12
player = 0
while play:
if TESTING:
stdscr.clear()
stdscr.border(0)
stdscr.addstr(y, x, str(c))
stdscr.timeout(100)
button = stdscr.getch()
if button != -1:
if button == curses.KEY_RIGHT and player == 0:
x += 1
if x >= WIDTH - 1:
x -= 1
elif button == curses.KEY_LEFT and player == 0:
x -= 1
if x <= 0:
x += 1
elif button == curses.KEY_UP and player == 1:
y -= 1
if y <= 0:
y += 1
elif button == curses.KEY_DOWN and player == 1:
y += 1
if y >= HEIGHT - 1:
y -= 1
if button == 112:
cont = pause()
if cont == -1:
c = 1
player = 0
play = menu('HOPPY')
elif cont == 0:
player = (player + 1) % 2
if TESTING:
c += 1
stdscr.keypad(0)
curses.echo()
curses.endwin()
<|reserved_special_token_1|>
import curses
HEIGHT = 24
WIDTH = 80
TESTING = True
curses.initscr()
stdscr = curses.newwin(HEIGHT, WIDTH, 0, 0)
curses.noecho()
stdscr.keypad(1)
def decision(statement, player):
stdscr.clear()
stdscr.border(0)
stdscr.timeout(-1)
decision = 'play again' if statement == 1 else 'return to main menu'
stdscr.addstr(3, 5, 'Your Partner has decided to ' + decision)
if statement == 1:
stdscr.addstr(5, 10, 'Do you want to play again?')
stdscr.addstr(7, 10, 'Press y for yes and n for no')
stdscr.refresh()
while True:
choice = stdscr.getch()
if choice == 110:
return 0
elif choice == 121:
return 1
elif statement == 0:
stdscr.addstr(5, 5, 'You will be taken back to the main menu.')
return -1
def death(player):
stdscr.clear()
stdscr.border(0)
stdscr.timeout(100)
stdscr.addstr(3, 5, 'You have died. What do you want to do?')
stdscr.addstr(5, 10, 'Play Again - Press p')
stdscr.addstr(7, 10, 'Return to Main Menu - Press r')
stdscr.refresh()
while True:
choice = stdscr.getch()
if choice == 114:
return 0
elif choice == 112:
return 1
return -1
def join():
stdscr.clear()
stdscr.border(0)
stdscr.timeout(-1)
stdscr.addstr(3, 5, 'Pick a player to join')
stdscr.addstr(5, 10, 'Return to Main Menu - Press r')
stdscr.refresh()
while True:
choice = stdscr.getch()
if choice == 114:
return 0
return -1
def start():
stdscr.clear()
stdscr.border(0)
stdscr.timeout(100)
stdscr.addstr(3, 5, 'Waiting for 2nd player')
stdscr.addstr(5, 10, 'Return to Main Menu - Press r')
stdscr.refresh()
while True:
choice = stdscr.getch()
if choice == 114:
return 0
return -1
def pause():
stdscr.clear()
stdscr.border(0)
stdscr.timeout(-1)
stdscr.addstr(3, 5, 'Paused. What do you want to do?')
stdscr.addstr(5, 10, 'Continue - Press c')
stdscr.addstr(7, 10, 'Swap Controls - Press s')
stdscr.addstr(9, 10, 'End Game - Press e')
stdscr.refresh()
while True:
choice = stdscr.getch()
if choice == 99:
return 1
elif choice == 115:
return 0
elif choice == 101:
return -1
return 1
def menu(name):
stdscr.clear()
stdscr.border(0)
stdscr.timeout(-1)
stdscr.addstr(3, 5, name + ', what do you want to do?')
stdscr.addstr(5, 10, 'Play new game - Press 1')
stdscr.addstr(7, 10, 'Exit - Press 4')
stdscr.refresh()
choice = stdscr.getch()
stdscr.clear()
stdscr.border(0)
if choice == 49:
return 1
elif choice == 52:
return 0
return 1
play = menu('HOPPY')
c = 1
x = 25
y = 12
player = 0
while play:
if TESTING:
stdscr.clear()
stdscr.border(0)
stdscr.addstr(y, x, str(c))
stdscr.timeout(100)
button = stdscr.getch()
if button != -1:
if button == curses.KEY_RIGHT and player == 0:
x += 1
if x >= WIDTH - 1:
x -= 1
elif button == curses.KEY_LEFT and player == 0:
x -= 1
if x <= 0:
x += 1
elif button == curses.KEY_UP and player == 1:
y -= 1
if y <= 0:
y += 1
elif button == curses.KEY_DOWN and player == 1:
y += 1
if y >= HEIGHT - 1:
y -= 1
if button == 112:
cont = pause()
if cont == -1:
c = 1
player = 0
play = menu('HOPPY')
elif cont == 0:
player = (player + 1) % 2
if TESTING:
c += 1
stdscr.keypad(0)
curses.echo()
curses.endwin()
<|reserved_special_token_1|>
#!/usr/bin/python3
#start up curses
import curses
HEIGHT = 24
WIDTH = 80
TESTING = True
curses.initscr()
stdscr = curses.newwin(HEIGHT, WIDTH, 0, 0)
curses.noecho() #don't echo keys
stdscr.keypad(1)
#function for displaying other players decision
#statement is the number of the other player's death funciton returned
#player is the other player's name
#returns 0 if other player choose to play again and this player doesn't
#want to, if player does want to then returns 1
#returns -1 if other player choose to quit to main menu
def decision(statement, player):
stdscr.clear()
stdscr.border(0)
stdscr.timeout(-1)
decision = "play again" if statement == 1 else "return to main menu"
stdscr.addstr(3, 5, "Your Partner has decided to " + decision)
if statement == 1:
stdscr.addstr(5, 10, "Do you want to play again?")
stdscr.addstr(7, 10, "Press y for yes and n for no")
stdscr.refresh()
while True:
choice = stdscr.getch()
if choice == 110: #choice is n
return 0
elif choice == 121: #choice is y
return 1
elif statement == 0:
stdscr.addstr(5, 5, "You will be taken back to the main menu.")
return -1
#funciton for waiting screen for starting a game as player 1
#takes other player's name
#returns 0 if player wants to return to main menu
#returns 1 if player wants to play again
#returns -1 if while loop is exited (which shouldn't happen)
def death(player):
stdscr.clear()
stdscr.border(0)
stdscr.timeout(100)
stdscr.addstr(3, 5, "You have died. What do you want to do?")
stdscr.addstr(5, 10, "Play Again - Press p")
stdscr.addstr(7, 10, "Return to Main Menu - Press r")
stdscr.refresh()
while True:
#if other player already made a decision
# statement = other player's decision
# choice = decision(statement, player)
# if statement == 1:
# send choice to other player
# if choice == 0 or choice == -1:
# return 0
# elif choice == 1:
# return 1
choice = stdscr.getch()
#send choice to other player
if choice == 114: #choice is r
return 0
elif choice == 112: #choice is p
#choice = get decision back from other player
#if choice == 1:
#print message saying other player agrees to play again
return 1
#elif choice == 0
#print message saying other player quit to main menu
#return 0
return -1
#funciton for waiting screen for starting a game as player 1
#returns 0 if player wants to return to main menu
#returns 1 if a 1st player is chosen
#returns -1 if while loop is exited (which shouldn't happen)
def join():
stdscr.clear()
stdscr.border(0)
stdscr.timeout(-1)
stdscr.addstr(3, 5, "Pick a player to join")
#get list of available players from the server
#loop through them all and display them (maybe only the first 10)
#make a counter for the addstr y value and increment by 2 each loop
stdscr.addstr(5, 10, "Return to Main Menu - Press r")
stdscr.refresh()
while True:
choice = stdscr.getch()
if choice == 114: #choice is r
return 0
#elif check if a first player has been chosen
# send this player's name to first player
# get back first player's name
# return 1
return -1
#funciton for waiting screen for starting a game as player 1
#returns 0 if player wants to return to main menu
#returns 1 if a second player is chosen
#returns -1 if while loop is exited (which shouldn't happen)
def start():
stdscr.clear()
stdscr.border(0)
stdscr.timeout(100)
stdscr.addstr(3, 5, "Waiting for 2nd player")
stdscr.addstr(5, 10, "Return to Main Menu - Press r")
stdscr.refresh()
while True:
choice = stdscr.getch()
if choice == 114: #choice is r
return 0
#elif check if a second player has been chosen
# get second player's name
# send this player's name
# return 1
return -1
def pause():
stdscr.clear()
stdscr.border(0)
stdscr.timeout(-1)
stdscr.addstr(3, 5, "Paused. What do you want to do?")
stdscr.addstr(5, 10, "Continue - Press c")
stdscr.addstr(7, 10, "Swap Controls - Press s")
stdscr.addstr(9, 10, "End Game - Press e")
stdscr.refresh()
while True:
choice = stdscr.getch()
if choice == 99: #choice is c
return 1
elif choice == 115: #choice is s
return 0
elif choice == 101: #choice is e
return -1
return 1
def menu(name):
stdscr.clear()
stdscr.border(0)
stdscr.timeout(-1)
stdscr.addstr(3, 5, name + ", what do you want to do?")
stdscr.addstr(5, 10, "Play new game - Press 1")
stdscr.addstr(7, 10, "Exit - Press 4")
stdscr.refresh()
choice = stdscr.getch()
stdscr.clear()
stdscr.border(0)
if choice == 49: #choice is 1
return 1
elif choice == 52: #choice is 4
return 0
return 1
play = menu("HOPPY")
c = 1
x = 25
y = 12
player = 0
while play:
if TESTING:
stdscr.clear()
stdscr.border(0)
stdscr.addstr(y, x, str(c))
stdscr.timeout(100)
button = stdscr.getch()
if button != -1:
if button == curses.KEY_RIGHT and player == 0:
x += 1
if x >= WIDTH - 1:
x -= 1
elif button == curses.KEY_LEFT and player == 0:
x -= 1
if x <= 0:
x += 1
elif button == curses.KEY_UP and player == 1:
y -= 1
if y <= 0:
y += 1
elif button == curses.KEY_DOWN and player == 1:
y += 1
if y >= HEIGHT - 1:
y -= 1
if button == 112: #button is p
cont = pause()
if cont == -1:
c = 1
player = 0
play = menu("HOPPY")
elif cont == 0:
player = (player + 1) % 2
if TESTING:
c += 1
stdscr.keypad(0)
curses.echo()
curses.endwin()
#curse.wrapper([funciton]) sets up and exits curses for you, function is the
#code the runs in curses
#initialize curses
#curses.noecho() #don't echo keys
#curses.cbreak() or curses.raw() #react instantly to keys, raw doesn't ignore
#CTRL-Z(suspend) and CTRL-C(exit)
#stdscr.keypad(1) #read navigation key sequences for me
#deinitialize curses
#curses.nocbreak(); stdscr.keypad(0); curses.echo()
#exit curses
#curses.endwin()
|
flexible
|
{
"blob_id": "a6f03340c2f60c061977fed6807703cdaeb1b7fd",
"index": 7976,
"step-1": "<mask token>\n\n\ndef decision(statement, player):\n stdscr.clear()\n stdscr.border(0)\n stdscr.timeout(-1)\n decision = 'play again' if statement == 1 else 'return to main menu'\n stdscr.addstr(3, 5, 'Your Partner has decided to ' + decision)\n if statement == 1:\n stdscr.addstr(5, 10, 'Do you want to play again?')\n stdscr.addstr(7, 10, 'Press y for yes and n for no')\n stdscr.refresh()\n while True:\n choice = stdscr.getch()\n if choice == 110:\n return 0\n elif choice == 121:\n return 1\n elif statement == 0:\n stdscr.addstr(5, 5, 'You will be taken back to the main menu.')\n return -1\n\n\ndef death(player):\n stdscr.clear()\n stdscr.border(0)\n stdscr.timeout(100)\n stdscr.addstr(3, 5, 'You have died. What do you want to do?')\n stdscr.addstr(5, 10, 'Play Again - Press p')\n stdscr.addstr(7, 10, 'Return to Main Menu - Press r')\n stdscr.refresh()\n while True:\n choice = stdscr.getch()\n if choice == 114:\n return 0\n elif choice == 112:\n return 1\n return -1\n\n\n<mask token>\n\n\ndef start():\n stdscr.clear()\n stdscr.border(0)\n stdscr.timeout(100)\n stdscr.addstr(3, 5, 'Waiting for 2nd player')\n stdscr.addstr(5, 10, 'Return to Main Menu - Press r')\n stdscr.refresh()\n while True:\n choice = stdscr.getch()\n if choice == 114:\n return 0\n return -1\n\n\n<mask token>\n\n\ndef menu(name):\n stdscr.clear()\n stdscr.border(0)\n stdscr.timeout(-1)\n stdscr.addstr(3, 5, name + ', what do you want to do?')\n stdscr.addstr(5, 10, 'Play new game - Press 1')\n stdscr.addstr(7, 10, 'Exit - Press 4')\n stdscr.refresh()\n choice = stdscr.getch()\n stdscr.clear()\n stdscr.border(0)\n if choice == 49:\n return 1\n elif choice == 52:\n return 0\n return 1\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef decision(statement, player):\n stdscr.clear()\n stdscr.border(0)\n stdscr.timeout(-1)\n decision = 'play again' if statement == 1 else 'return to main menu'\n stdscr.addstr(3, 5, 'Your Partner has decided to ' + decision)\n if statement == 1:\n stdscr.addstr(5, 10, 'Do you want to play again?')\n stdscr.addstr(7, 10, 'Press y for yes and n for no')\n stdscr.refresh()\n while True:\n choice = stdscr.getch()\n if choice == 110:\n return 0\n elif choice == 121:\n return 1\n elif statement == 0:\n stdscr.addstr(5, 5, 'You will be taken back to the main menu.')\n return -1\n\n\ndef death(player):\n stdscr.clear()\n stdscr.border(0)\n stdscr.timeout(100)\n stdscr.addstr(3, 5, 'You have died. What do you want to do?')\n stdscr.addstr(5, 10, 'Play Again - Press p')\n stdscr.addstr(7, 10, 'Return to Main Menu - Press r')\n stdscr.refresh()\n while True:\n choice = stdscr.getch()\n if choice == 114:\n return 0\n elif choice == 112:\n return 1\n return -1\n\n\ndef join():\n stdscr.clear()\n stdscr.border(0)\n stdscr.timeout(-1)\n stdscr.addstr(3, 5, 'Pick a player to join')\n stdscr.addstr(5, 10, 'Return to Main Menu - Press r')\n stdscr.refresh()\n while True:\n choice = stdscr.getch()\n if choice == 114:\n return 0\n return -1\n\n\ndef start():\n stdscr.clear()\n stdscr.border(0)\n stdscr.timeout(100)\n stdscr.addstr(3, 5, 'Waiting for 2nd player')\n stdscr.addstr(5, 10, 'Return to Main Menu - Press r')\n stdscr.refresh()\n while True:\n choice = stdscr.getch()\n if choice == 114:\n return 0\n return -1\n\n\n<mask token>\n\n\ndef menu(name):\n stdscr.clear()\n stdscr.border(0)\n stdscr.timeout(-1)\n stdscr.addstr(3, 5, name + ', what do you want to do?')\n stdscr.addstr(5, 10, 'Play new game - Press 1')\n stdscr.addstr(7, 10, 'Exit - Press 4')\n stdscr.refresh()\n choice = stdscr.getch()\n stdscr.clear()\n stdscr.border(0)\n if choice == 49:\n return 1\n elif choice == 52:\n return 0\n return 1\n\n\n<mask token>\n",
"step-3": "<mask token>\nHEIGHT = 24\nWIDTH = 80\nTESTING = True\ncurses.initscr()\nstdscr = curses.newwin(HEIGHT, WIDTH, 0, 0)\ncurses.noecho()\nstdscr.keypad(1)\n\n\ndef decision(statement, player):\n stdscr.clear()\n stdscr.border(0)\n stdscr.timeout(-1)\n decision = 'play again' if statement == 1 else 'return to main menu'\n stdscr.addstr(3, 5, 'Your Partner has decided to ' + decision)\n if statement == 1:\n stdscr.addstr(5, 10, 'Do you want to play again?')\n stdscr.addstr(7, 10, 'Press y for yes and n for no')\n stdscr.refresh()\n while True:\n choice = stdscr.getch()\n if choice == 110:\n return 0\n elif choice == 121:\n return 1\n elif statement == 0:\n stdscr.addstr(5, 5, 'You will be taken back to the main menu.')\n return -1\n\n\ndef death(player):\n stdscr.clear()\n stdscr.border(0)\n stdscr.timeout(100)\n stdscr.addstr(3, 5, 'You have died. What do you want to do?')\n stdscr.addstr(5, 10, 'Play Again - Press p')\n stdscr.addstr(7, 10, 'Return to Main Menu - Press r')\n stdscr.refresh()\n while True:\n choice = stdscr.getch()\n if choice == 114:\n return 0\n elif choice == 112:\n return 1\n return -1\n\n\ndef join():\n stdscr.clear()\n stdscr.border(0)\n stdscr.timeout(-1)\n stdscr.addstr(3, 5, 'Pick a player to join')\n stdscr.addstr(5, 10, 'Return to Main Menu - Press r')\n stdscr.refresh()\n while True:\n choice = stdscr.getch()\n if choice == 114:\n return 0\n return -1\n\n\ndef start():\n stdscr.clear()\n stdscr.border(0)\n stdscr.timeout(100)\n stdscr.addstr(3, 5, 'Waiting for 2nd player')\n stdscr.addstr(5, 10, 'Return to Main Menu - Press r')\n stdscr.refresh()\n while True:\n choice = stdscr.getch()\n if choice == 114:\n return 0\n return -1\n\n\ndef pause():\n stdscr.clear()\n stdscr.border(0)\n stdscr.timeout(-1)\n stdscr.addstr(3, 5, 'Paused. What do you want to do?')\n stdscr.addstr(5, 10, 'Continue - Press c')\n stdscr.addstr(7, 10, 'Swap Controls - Press s')\n stdscr.addstr(9, 10, 'End Game - Press e')\n stdscr.refresh()\n while True:\n choice = stdscr.getch()\n if choice == 99:\n return 1\n elif choice == 115:\n return 0\n elif choice == 101:\n return -1\n return 1\n\n\ndef menu(name):\n stdscr.clear()\n stdscr.border(0)\n stdscr.timeout(-1)\n stdscr.addstr(3, 5, name + ', what do you want to do?')\n stdscr.addstr(5, 10, 'Play new game - Press 1')\n stdscr.addstr(7, 10, 'Exit - Press 4')\n stdscr.refresh()\n choice = stdscr.getch()\n stdscr.clear()\n stdscr.border(0)\n if choice == 49:\n return 1\n elif choice == 52:\n return 0\n return 1\n\n\nplay = menu('HOPPY')\nc = 1\nx = 25\ny = 12\nplayer = 0\nwhile play:\n if TESTING:\n stdscr.clear()\n stdscr.border(0)\n stdscr.addstr(y, x, str(c))\n stdscr.timeout(100)\n button = stdscr.getch()\n if button != -1:\n if button == curses.KEY_RIGHT and player == 0:\n x += 1\n if x >= WIDTH - 1:\n x -= 1\n elif button == curses.KEY_LEFT and player == 0:\n x -= 1\n if x <= 0:\n x += 1\n elif button == curses.KEY_UP and player == 1:\n y -= 1\n if y <= 0:\n y += 1\n elif button == curses.KEY_DOWN and player == 1:\n y += 1\n if y >= HEIGHT - 1:\n y -= 1\n if button == 112:\n cont = pause()\n if cont == -1:\n c = 1\n player = 0\n play = menu('HOPPY')\n elif cont == 0:\n player = (player + 1) % 2\n if TESTING:\n c += 1\nstdscr.keypad(0)\ncurses.echo()\ncurses.endwin()\n",
"step-4": "import curses\nHEIGHT = 24\nWIDTH = 80\nTESTING = True\ncurses.initscr()\nstdscr = curses.newwin(HEIGHT, WIDTH, 0, 0)\ncurses.noecho()\nstdscr.keypad(1)\n\n\ndef decision(statement, player):\n stdscr.clear()\n stdscr.border(0)\n stdscr.timeout(-1)\n decision = 'play again' if statement == 1 else 'return to main menu'\n stdscr.addstr(3, 5, 'Your Partner has decided to ' + decision)\n if statement == 1:\n stdscr.addstr(5, 10, 'Do you want to play again?')\n stdscr.addstr(7, 10, 'Press y for yes and n for no')\n stdscr.refresh()\n while True:\n choice = stdscr.getch()\n if choice == 110:\n return 0\n elif choice == 121:\n return 1\n elif statement == 0:\n stdscr.addstr(5, 5, 'You will be taken back to the main menu.')\n return -1\n\n\ndef death(player):\n stdscr.clear()\n stdscr.border(0)\n stdscr.timeout(100)\n stdscr.addstr(3, 5, 'You have died. What do you want to do?')\n stdscr.addstr(5, 10, 'Play Again - Press p')\n stdscr.addstr(7, 10, 'Return to Main Menu - Press r')\n stdscr.refresh()\n while True:\n choice = stdscr.getch()\n if choice == 114:\n return 0\n elif choice == 112:\n return 1\n return -1\n\n\ndef join():\n stdscr.clear()\n stdscr.border(0)\n stdscr.timeout(-1)\n stdscr.addstr(3, 5, 'Pick a player to join')\n stdscr.addstr(5, 10, 'Return to Main Menu - Press r')\n stdscr.refresh()\n while True:\n choice = stdscr.getch()\n if choice == 114:\n return 0\n return -1\n\n\ndef start():\n stdscr.clear()\n stdscr.border(0)\n stdscr.timeout(100)\n stdscr.addstr(3, 5, 'Waiting for 2nd player')\n stdscr.addstr(5, 10, 'Return to Main Menu - Press r')\n stdscr.refresh()\n while True:\n choice = stdscr.getch()\n if choice == 114:\n return 0\n return -1\n\n\ndef pause():\n stdscr.clear()\n stdscr.border(0)\n stdscr.timeout(-1)\n stdscr.addstr(3, 5, 'Paused. What do you want to do?')\n stdscr.addstr(5, 10, 'Continue - Press c')\n stdscr.addstr(7, 10, 'Swap Controls - Press s')\n stdscr.addstr(9, 10, 'End Game - Press e')\n stdscr.refresh()\n while True:\n choice = stdscr.getch()\n if choice == 99:\n return 1\n elif choice == 115:\n return 0\n elif choice == 101:\n return -1\n return 1\n\n\ndef menu(name):\n stdscr.clear()\n stdscr.border(0)\n stdscr.timeout(-1)\n stdscr.addstr(3, 5, name + ', what do you want to do?')\n stdscr.addstr(5, 10, 'Play new game - Press 1')\n stdscr.addstr(7, 10, 'Exit - Press 4')\n stdscr.refresh()\n choice = stdscr.getch()\n stdscr.clear()\n stdscr.border(0)\n if choice == 49:\n return 1\n elif choice == 52:\n return 0\n return 1\n\n\nplay = menu('HOPPY')\nc = 1\nx = 25\ny = 12\nplayer = 0\nwhile play:\n if TESTING:\n stdscr.clear()\n stdscr.border(0)\n stdscr.addstr(y, x, str(c))\n stdscr.timeout(100)\n button = stdscr.getch()\n if button != -1:\n if button == curses.KEY_RIGHT and player == 0:\n x += 1\n if x >= WIDTH - 1:\n x -= 1\n elif button == curses.KEY_LEFT and player == 0:\n x -= 1\n if x <= 0:\n x += 1\n elif button == curses.KEY_UP and player == 1:\n y -= 1\n if y <= 0:\n y += 1\n elif button == curses.KEY_DOWN and player == 1:\n y += 1\n if y >= HEIGHT - 1:\n y -= 1\n if button == 112:\n cont = pause()\n if cont == -1:\n c = 1\n player = 0\n play = menu('HOPPY')\n elif cont == 0:\n player = (player + 1) % 2\n if TESTING:\n c += 1\nstdscr.keypad(0)\ncurses.echo()\ncurses.endwin()\n",
"step-5": "#!/usr/bin/python3\n\n#start up curses\nimport curses\n\nHEIGHT = 24\nWIDTH = 80\nTESTING = True\n\ncurses.initscr()\nstdscr = curses.newwin(HEIGHT, WIDTH, 0, 0)\ncurses.noecho() #don't echo keys\nstdscr.keypad(1)\n\n#function for displaying other players decision\n#statement is the number of the other player's death funciton returned\n#player is the other player's name\n#returns 0 if other player choose to play again and this player doesn't\n#want to, if player does want to then returns 1\n#returns -1 if other player choose to quit to main menu\ndef decision(statement, player):\n stdscr.clear()\n stdscr.border(0)\n stdscr.timeout(-1)\n decision = \"play again\" if statement == 1 else \"return to main menu\"\n stdscr.addstr(3, 5, \"Your Partner has decided to \" + decision)\n if statement == 1:\n stdscr.addstr(5, 10, \"Do you want to play again?\")\n stdscr.addstr(7, 10, \"Press y for yes and n for no\")\n stdscr.refresh()\n while True:\n choice = stdscr.getch()\n if choice == 110: #choice is n\n return 0\n elif choice == 121: #choice is y\n return 1\n elif statement == 0:\n stdscr.addstr(5, 5, \"You will be taken back to the main menu.\")\n return -1\n\n#funciton for waiting screen for starting a game as player 1\n#takes other player's name\n#returns 0 if player wants to return to main menu\n#returns 1 if player wants to play again\n#returns -1 if while loop is exited (which shouldn't happen)\ndef death(player):\n stdscr.clear()\n stdscr.border(0)\n stdscr.timeout(100)\n stdscr.addstr(3, 5, \"You have died. What do you want to do?\")\n stdscr.addstr(5, 10, \"Play Again - Press p\")\n stdscr.addstr(7, 10, \"Return to Main Menu - Press r\")\n stdscr.refresh()\n while True:\n #if other player already made a decision\n # statement = other player's decision\n # choice = decision(statement, player)\n # if statement == 1:\n # send choice to other player\n # if choice == 0 or choice == -1:\n # return 0\n # elif choice == 1:\n # return 1\n choice = stdscr.getch()\n #send choice to other player\n if choice == 114: #choice is r\n return 0\n elif choice == 112: #choice is p\n #choice = get decision back from other player\n #if choice == 1:\n #print message saying other player agrees to play again\n return 1\n #elif choice == 0\n #print message saying other player quit to main menu\n #return 0\n\n return -1\n\n#funciton for waiting screen for starting a game as player 1\n#returns 0 if player wants to return to main menu\n#returns 1 if a 1st player is chosen\n#returns -1 if while loop is exited (which shouldn't happen)\ndef join():\n stdscr.clear()\n stdscr.border(0)\n stdscr.timeout(-1)\n stdscr.addstr(3, 5, \"Pick a player to join\")\n #get list of available players from the server\n #loop through them all and display them (maybe only the first 10)\n #make a counter for the addstr y value and increment by 2 each loop\n stdscr.addstr(5, 10, \"Return to Main Menu - Press r\")\n stdscr.refresh()\n while True:\n choice = stdscr.getch()\n\n if choice == 114: #choice is r\n return 0\n #elif check if a first player has been chosen\n # send this player's name to first player\n # get back first player's name\n # return 1\n\n return -1\n\n#funciton for waiting screen for starting a game as player 1\n#returns 0 if player wants to return to main menu\n#returns 1 if a second player is chosen\n#returns -1 if while loop is exited (which shouldn't happen)\ndef start():\n stdscr.clear()\n stdscr.border(0)\n stdscr.timeout(100)\n stdscr.addstr(3, 5, \"Waiting for 2nd player\")\n stdscr.addstr(5, 10, \"Return to Main Menu - Press r\")\n stdscr.refresh()\n while True:\n choice = stdscr.getch()\n\n if choice == 114: #choice is r\n return 0\n #elif check if a second player has been chosen\n # get second player's name\n # send this player's name\n # return 1\n\n return -1\n\ndef pause():\n stdscr.clear()\n stdscr.border(0)\n stdscr.timeout(-1)\n stdscr.addstr(3, 5, \"Paused. What do you want to do?\")\n stdscr.addstr(5, 10, \"Continue - Press c\")\n stdscr.addstr(7, 10, \"Swap Controls - Press s\")\n stdscr.addstr(9, 10, \"End Game - Press e\")\n stdscr.refresh()\n while True:\n choice = stdscr.getch()\n if choice == 99: #choice is c\n return 1\n elif choice == 115: #choice is s\n return 0\n elif choice == 101: #choice is e\n return -1\n \n return 1\n\ndef menu(name):\n stdscr.clear()\n stdscr.border(0)\n stdscr.timeout(-1)\n stdscr.addstr(3, 5, name + \", what do you want to do?\")\n stdscr.addstr(5, 10, \"Play new game - Press 1\")\n stdscr.addstr(7, 10, \"Exit - Press 4\")\n stdscr.refresh()\n choice = stdscr.getch()\n\n stdscr.clear()\n stdscr.border(0)\n if choice == 49: #choice is 1\n return 1\n elif choice == 52: #choice is 4\n return 0\n \n return 1\n\nplay = menu(\"HOPPY\")\nc = 1\nx = 25\ny = 12\nplayer = 0\nwhile play:\n if TESTING:\n stdscr.clear()\n stdscr.border(0)\n stdscr.addstr(y, x, str(c))\n\n stdscr.timeout(100)\n button = stdscr.getch()\n if button != -1:\n if button == curses.KEY_RIGHT and player == 0:\n x += 1\n if x >= WIDTH - 1:\n x -= 1\n elif button == curses.KEY_LEFT and player == 0:\n x -= 1\n if x <= 0:\n x += 1\n elif button == curses.KEY_UP and player == 1:\n y -= 1\n if y <= 0:\n y += 1\n elif button == curses.KEY_DOWN and player == 1:\n y += 1\n if y >= HEIGHT - 1:\n y -= 1\n if button == 112: #button is p\n cont = pause()\n if cont == -1:\n c = 1\n player = 0\n play = menu(\"HOPPY\")\n elif cont == 0:\n player = (player + 1) % 2\n if TESTING:\n c += 1\nstdscr.keypad(0)\ncurses.echo()\ncurses.endwin()\n\n\n\n\n\n\n#curse.wrapper([funciton]) sets up and exits curses for you, function is the\n #code the runs in curses\n\n#initialize curses\n#curses.noecho() #don't echo keys\n#curses.cbreak() or curses.raw() #react instantly to keys, raw doesn't ignore\n #CTRL-Z(suspend) and CTRL-C(exit)\n#stdscr.keypad(1) #read navigation key sequences for me\n\n\n#deinitialize curses\n#curses.nocbreak(); stdscr.keypad(0); curses.echo()\n\n#exit curses\n#curses.endwin()\n",
"step-ids": [
4,
5,
8,
9,
10
]
}
|
[
4,
5,
8,
9,
10
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
urlpatterns = [path('firstModel', FirstModelView.as_view())]
<|reserved_special_token_1|>
from django.urls import path
from .views import FirstModelView
urlpatterns = [path('firstModel', FirstModelView.as_view())]
|
flexible
|
{
"blob_id": "4efd22d132accd0f5945a0c911b73b67654b92e4",
"index": 9358,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = [path('firstModel', FirstModelView.as_view())]\n",
"step-3": "from django.urls import path\nfrom .views import FirstModelView\nurlpatterns = [path('firstModel', FirstModelView.as_view())]\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
with tf.Session() as ss:
ss.run(init)
for step in range(201):
ss.run(train)
if step % 10 == 0:
print(step, ss.run([k, b]))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
x_data = np.random.rand(100)
y_data = x_data * 10 + 5
b = tf.Variable(0.0)
k = tf.Variable(0.0)
y = k * x_data + b
loss = tf.reduce_mean(tf.square(y_data - y))
optimizer = tf.train.GradientDescentOptimizer(0.2)
train = optimizer.minimize(loss)
init = tf.global_variables_initializer()
with tf.Session() as ss:
ss.run(init)
for step in range(201):
ss.run(train)
if step % 10 == 0:
print(step, ss.run([k, b]))
<|reserved_special_token_1|>
import numpy as np
import tensorflow as tf
x_data = np.random.rand(100)
y_data = x_data * 10 + 5
b = tf.Variable(0.0)
k = tf.Variable(0.0)
y = k * x_data + b
loss = tf.reduce_mean(tf.square(y_data - y))
optimizer = tf.train.GradientDescentOptimizer(0.2)
train = optimizer.minimize(loss)
init = tf.global_variables_initializer()
with tf.Session() as ss:
ss.run(init)
for step in range(201):
ss.run(train)
if step % 10 == 0:
print(step, ss.run([k, b]))
<|reserved_special_token_1|>
import numpy as np
import tensorflow as tf
x_data = np.random.rand(100)
y_data = x_data * 10 + 5
#构造线性模型
b = tf.Variable(0.)
k = tf.Variable(0.)
y=k*x_data+b
#二次代价函数 square求平方
loss= tf.reduce_mean(tf.square(y_data-y))
#定义一个梯度下降法来进行训练的优化器
optimizer=tf.train.GradientDescentOptimizer(.2)
train=optimizer.minimize(loss)
init=tf.global_variables_initializer()
with tf.Session() as ss:
ss.run(init)
for step in range(201):
ss.run(train)
if step %10==0:
print(step,ss.run([k,b]))
|
flexible
|
{
"blob_id": "ba7f66a0f9cf1028add778315033d596e10d6f16",
"index": 3197,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwith tf.Session() as ss:\n ss.run(init)\n for step in range(201):\n ss.run(train)\n if step % 10 == 0:\n print(step, ss.run([k, b]))\n",
"step-3": "<mask token>\nx_data = np.random.rand(100)\ny_data = x_data * 10 + 5\nb = tf.Variable(0.0)\nk = tf.Variable(0.0)\ny = k * x_data + b\nloss = tf.reduce_mean(tf.square(y_data - y))\noptimizer = tf.train.GradientDescentOptimizer(0.2)\ntrain = optimizer.minimize(loss)\ninit = tf.global_variables_initializer()\nwith tf.Session() as ss:\n ss.run(init)\n for step in range(201):\n ss.run(train)\n if step % 10 == 0:\n print(step, ss.run([k, b]))\n",
"step-4": "import numpy as np\nimport tensorflow as tf\nx_data = np.random.rand(100)\ny_data = x_data * 10 + 5\nb = tf.Variable(0.0)\nk = tf.Variable(0.0)\ny = k * x_data + b\nloss = tf.reduce_mean(tf.square(y_data - y))\noptimizer = tf.train.GradientDescentOptimizer(0.2)\ntrain = optimizer.minimize(loss)\ninit = tf.global_variables_initializer()\nwith tf.Session() as ss:\n ss.run(init)\n for step in range(201):\n ss.run(train)\n if step % 10 == 0:\n print(step, ss.run([k, b]))\n",
"step-5": "import numpy as np\nimport tensorflow as tf\n\nx_data = np.random.rand(100)\ny_data = x_data * 10 + 5\n\n#构造线性模型\nb = tf.Variable(0.)\nk = tf.Variable(0.)\ny=k*x_data+b\n\n\n#二次代价函数 square求平方\nloss= tf.reduce_mean(tf.square(y_data-y))\n\n#定义一个梯度下降法来进行训练的优化器\n\noptimizer=tf.train.GradientDescentOptimizer(.2)\n\ntrain=optimizer.minimize(loss)\n\ninit=tf.global_variables_initializer()\n\nwith tf.Session() as ss:\n ss.run(init)\n for step in range(201):\n ss.run(train)\n if step %10==0:\n print(step,ss.run([k,b]))\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
class Node:
def __init__(self, value, next=None):
self.value = value
self.next = next
def __str__(self):
values = []
iter = self
while iter != None:
values.append(iter.value)
iter = iter.next
return ' -> '.join(values)
@staticmethod
def makelist(values):
node = None
for i in range(len(values) - 1, -1, -1):
node = Node(values[i], node)
return node
def reverse(node, s, f):
dummy = Node(0, node)
iter = node
start = dummy
end = node
rstart = node
rend = node
i = 1
if s == f:
return node
while i < s:
start = iter
if iter != None:
iter = iter.next
else:
return node
i += 1
rstart = iter
prev = iter
if iter == None:
return node
next = iter.next
while i < f:
curr = next
if next != None:
next = next.next
else:
return node
curr.next = prev
prev = curr
i += 1
rend = prev
end = next
start.next = rend
rstart.next = end
return dummy.next
values = input('Enter a list: ').split(',')
s, f = map(lambda x: int(x), input('Enter start and finish: ').split(','))
node = Node.makelist(values)
print(node)
print(reverse(node, s, f))
|
normal
|
{
"blob_id": "599310cfd05be28445535bc72251128ed72a9069",
"index": 4372,
"step-1": "class Node:\n\n def __init__(self, value, next=None):\n self.value = value\n self.next = next\n <mask token>\n\n @staticmethod\n def makelist(values):\n node = None\n for i in range(len(values) - 1, -1, -1):\n node = Node(values[i], node)\n return node\n\n\n<mask token>\n",
"step-2": "class Node:\n\n def __init__(self, value, next=None):\n self.value = value\n self.next = next\n\n def __str__(self):\n values = []\n iter = self\n while iter != None:\n values.append(iter.value)\n iter = iter.next\n return ' -> '.join(values)\n\n @staticmethod\n def makelist(values):\n node = None\n for i in range(len(values) - 1, -1, -1):\n node = Node(values[i], node)\n return node\n\n\n<mask token>\n",
"step-3": "class Node:\n\n def __init__(self, value, next=None):\n self.value = value\n self.next = next\n\n def __str__(self):\n values = []\n iter = self\n while iter != None:\n values.append(iter.value)\n iter = iter.next\n return ' -> '.join(values)\n\n @staticmethod\n def makelist(values):\n node = None\n for i in range(len(values) - 1, -1, -1):\n node = Node(values[i], node)\n return node\n\n\ndef reverse(node, s, f):\n dummy = Node(0, node)\n iter = node\n start = dummy\n end = node\n rstart = node\n rend = node\n i = 1\n if s == f:\n return node\n while i < s:\n start = iter\n if iter != None:\n iter = iter.next\n else:\n return node\n i += 1\n rstart = iter\n prev = iter\n if iter == None:\n return node\n next = iter.next\n while i < f:\n curr = next\n if next != None:\n next = next.next\n else:\n return node\n curr.next = prev\n prev = curr\n i += 1\n rend = prev\n end = next\n start.next = rend\n rstart.next = end\n return dummy.next\n\n\n<mask token>\nprint(node)\nprint(reverse(node, s, f))\n",
"step-4": "class Node:\n\n def __init__(self, value, next=None):\n self.value = value\n self.next = next\n\n def __str__(self):\n values = []\n iter = self\n while iter != None:\n values.append(iter.value)\n iter = iter.next\n return ' -> '.join(values)\n\n @staticmethod\n def makelist(values):\n node = None\n for i in range(len(values) - 1, -1, -1):\n node = Node(values[i], node)\n return node\n\n\ndef reverse(node, s, f):\n dummy = Node(0, node)\n iter = node\n start = dummy\n end = node\n rstart = node\n rend = node\n i = 1\n if s == f:\n return node\n while i < s:\n start = iter\n if iter != None:\n iter = iter.next\n else:\n return node\n i += 1\n rstart = iter\n prev = iter\n if iter == None:\n return node\n next = iter.next\n while i < f:\n curr = next\n if next != None:\n next = next.next\n else:\n return node\n curr.next = prev\n prev = curr\n i += 1\n rend = prev\n end = next\n start.next = rend\n rstart.next = end\n return dummy.next\n\n\nvalues = input('Enter a list: ').split(',')\ns, f = map(lambda x: int(x), input('Enter start and finish: ').split(','))\nnode = Node.makelist(values)\nprint(node)\nprint(reverse(node, s, f))\n",
"step-5": null,
"step-ids": [
3,
4,
6,
7
]
}
|
[
3,
4,
6,
7
] |
<|reserved_special_token_0|>
def F(f):
"""Returns the test file on the "data" subdirectory"""
return pkg_resources.resource_filename(__name__, os.path.join('data', f))
<|reserved_special_token_0|>
def load_expected(filename):
"""Loads libsvm's svm-predict output file with probabilities"""
all_labels = sorted([int(k) for k in open(filename).readline().split()[1:]]
)
data = numpy.loadtxt(filename, dtype='float64', skiprows=1)
return all_labels, data[:, 0].astype('int64'), data[:, 1:]
<|reserved_special_token_0|>
def test_can_save():
machine = Machine(HEART_MACHINE)
tmp = tempname('.model')
machine.save(tmp)
del machine
machine = Machine(tmp)
nose.tools.eq_(machine.shape, (13, 1))
nose.tools.eq_(machine.n_support_vectors, [64, 68])
nose.tools.eq_(machine.kernel_type, 'RBF')
nose.tools.eq_(machine.machine_type, 'C_SVC')
nose.tools.eq_(len(machine.labels), 2)
assert -1 in machine.labels
assert +1 in machine.labels
assert abs(machine.gamma - 0.0769231) < 1e-06
os.unlink(tmp)
<|reserved_special_token_0|>
def test_can_save_arbitrary():
run_for_extension('.arbitrary')
def test_can_save_h5():
run_for_extension('.h5')
def test_can_save_hdf5():
run_for_extension('.hdf5')
<|reserved_special_token_0|>
def test_correctness_iris():
machine = Machine(IRIS_MACHINE)
labels, data = File(IRIS_DATA).read_all()
pred_label = machine.predict_class(data)
assert numpy.array_equal(pred_label, expected_iris_predictions)
pred_lab_values = [machine.predict_class_and_scores(k) for k in data]
pred_labels2, pred_scores2 = machine.predict_class_and_scores(data)
assert numpy.array_equal(expected_iris_predictions, pred_labels2)
assert numpy.all(abs(numpy.vstack([k[1] for k in pred_lab_values]) -
numpy.vstack(pred_scores2)) < 1e-20)
all_labels, real_labels, real_probs = load_expected(IRIS_EXPECTED)
pred_labels, pred_probs = machine.predict_class_and_probabilities(data)
assert numpy.array_equal(pred_labels, real_labels)
assert numpy.all(abs(numpy.vstack(pred_probs) - numpy.vstack(real_probs
)) < 1e-06)
@nose.tools.raises(RuntimeError)
def test_correctness_inputsize_exceeds():
machine = Machine(IRIS_MACHINE)
labels, data = File(IRIS_DATA).read_all()
data = numpy.hstack([data, numpy.ones((data.shape[0], 2), dtype=float)])
pred_label = machine.predict_class(data)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def F(f):
"""Returns the test file on the "data" subdirectory"""
return pkg_resources.resource_filename(__name__, os.path.join('data', f))
<|reserved_special_token_0|>
def load_expected(filename):
"""Loads libsvm's svm-predict output file with probabilities"""
all_labels = sorted([int(k) for k in open(filename).readline().split()[1:]]
)
data = numpy.loadtxt(filename, dtype='float64', skiprows=1)
return all_labels, data[:, 0].astype('int64'), data[:, 1:]
<|reserved_special_token_0|>
def test_can_save():
machine = Machine(HEART_MACHINE)
tmp = tempname('.model')
machine.save(tmp)
del machine
machine = Machine(tmp)
nose.tools.eq_(machine.shape, (13, 1))
nose.tools.eq_(machine.n_support_vectors, [64, 68])
nose.tools.eq_(machine.kernel_type, 'RBF')
nose.tools.eq_(machine.machine_type, 'C_SVC')
nose.tools.eq_(len(machine.labels), 2)
assert -1 in machine.labels
assert +1 in machine.labels
assert abs(machine.gamma - 0.0769231) < 1e-06
os.unlink(tmp)
<|reserved_special_token_0|>
def test_can_save_arbitrary():
run_for_extension('.arbitrary')
def test_can_save_h5():
run_for_extension('.h5')
def test_can_save_hdf5():
run_for_extension('.hdf5')
def test_data_loading():
data = File(HEART_DATA)
nose.tools.eq_(data.shape, (13,))
nose.tools.eq_(data.good(), True)
nose.tools.eq_(data.fail(), False)
nose.tools.eq_(data.eof(), False)
all_data = []
all_labels = []
while data.good():
entry = data.read()
if entry is not None:
all_labels.append(entry[0])
all_data.append(entry[1])
nose.tools.eq_(len(all_data), len(all_labels))
nose.tools.eq_(len(all_data), 270)
counter = 0
data.reset()
entry = data.read()
while entry:
nose.tools.eq_(entry[0], all_labels[counter])
assert numpy.array_equal(entry[1], all_data[counter])
counter += 1
entry = data.read()
data.reset()
labels, data = data.read_all()
assert numpy.array_equal(labels, all_labels)
for k, l in zip(data, all_data):
assert numpy.array_equal(k, l)
ex = []
ex.append(numpy.array([0.708333, 1, 1, -0.320755, -0.105023, -1, 1, -
0.419847, -1, -0.225806, 0.0, 1, -1], 'float64'))
ex.append(numpy.array([0.583333, -1, 0.333333, -0.603774, 1, -1, 1,
0.358779, -1, -0.483871, 0.0, -1, 1], 'float64'))
ex.append(numpy.array([0.166667, 1, -0.333333, -0.433962, -0.383562, -1,
-1, 0.0687023, -1, -0.903226, -1, -1, 1], 'float64'))
ls = [+1, -1, +1]
for k, (l, e) in enumerate(zip(ls, ex)):
nose.tools.eq_(l, labels[k])
assert numpy.array_equal(e, data[k])
<|reserved_special_token_0|>
def test_correctness_iris():
machine = Machine(IRIS_MACHINE)
labels, data = File(IRIS_DATA).read_all()
pred_label = machine.predict_class(data)
assert numpy.array_equal(pred_label, expected_iris_predictions)
pred_lab_values = [machine.predict_class_and_scores(k) for k in data]
pred_labels2, pred_scores2 = machine.predict_class_and_scores(data)
assert numpy.array_equal(expected_iris_predictions, pred_labels2)
assert numpy.all(abs(numpy.vstack([k[1] for k in pred_lab_values]) -
numpy.vstack(pred_scores2)) < 1e-20)
all_labels, real_labels, real_probs = load_expected(IRIS_EXPECTED)
pred_labels, pred_probs = machine.predict_class_and_probabilities(data)
assert numpy.array_equal(pred_labels, real_labels)
assert numpy.all(abs(numpy.vstack(pred_probs) - numpy.vstack(real_probs
)) < 1e-06)
@nose.tools.raises(RuntimeError)
def test_correctness_inputsize_exceeds():
machine = Machine(IRIS_MACHINE)
labels, data = File(IRIS_DATA).read_all()
data = numpy.hstack([data, numpy.ones((data.shape[0], 2), dtype=float)])
pred_label = machine.predict_class(data)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def F(f):
"""Returns the test file on the "data" subdirectory"""
return pkg_resources.resource_filename(__name__, os.path.join('data', f))
def tempname(suffix, prefix='bobtest_machine_'):
fd, name = tempfile.mkstemp(suffix, prefix)
os.close(fd)
os.unlink(name)
return name
<|reserved_special_token_0|>
def load_expected(filename):
"""Loads libsvm's svm-predict output file with probabilities"""
all_labels = sorted([int(k) for k in open(filename).readline().split()[1:]]
)
data = numpy.loadtxt(filename, dtype='float64', skiprows=1)
return all_labels, data[:, 0].astype('int64'), data[:, 1:]
<|reserved_special_token_0|>
def test_can_save():
machine = Machine(HEART_MACHINE)
tmp = tempname('.model')
machine.save(tmp)
del machine
machine = Machine(tmp)
nose.tools.eq_(machine.shape, (13, 1))
nose.tools.eq_(machine.n_support_vectors, [64, 68])
nose.tools.eq_(machine.kernel_type, 'RBF')
nose.tools.eq_(machine.machine_type, 'C_SVC')
nose.tools.eq_(len(machine.labels), 2)
assert -1 in machine.labels
assert +1 in machine.labels
assert abs(machine.gamma - 0.0769231) < 1e-06
os.unlink(tmp)
def run_for_extension(ext):
machine = Machine(HEART_MACHINE)
tmp = tempname(ext)
machine.save(bob.io.base.HDF5File(tmp, 'w'))
del machine
machine = Machine(bob.io.base.HDF5File(tmp))
nose.tools.eq_(machine.shape, (13, 1))
nose.tools.eq_(machine.n_support_vectors, [64, 68])
nose.tools.eq_(machine.kernel_type, 'RBF')
nose.tools.eq_(machine.machine_type, 'C_SVC')
nose.tools.eq_(len(machine.labels), 2)
assert -1 in machine.labels
assert +1 in machine.labels
assert abs(machine.gamma - 0.0769231) < 1e-06
assert numpy.all(abs(machine.input_subtract - 0) < 1e-10)
assert numpy.all(abs(machine.input_divide - 1) < 1e-10)
os.unlink(tmp)
def test_can_save_arbitrary():
run_for_extension('.arbitrary')
def test_can_save_h5():
run_for_extension('.h5')
def test_can_save_hdf5():
run_for_extension('.hdf5')
def test_data_loading():
data = File(HEART_DATA)
nose.tools.eq_(data.shape, (13,))
nose.tools.eq_(data.good(), True)
nose.tools.eq_(data.fail(), False)
nose.tools.eq_(data.eof(), False)
all_data = []
all_labels = []
while data.good():
entry = data.read()
if entry is not None:
all_labels.append(entry[0])
all_data.append(entry[1])
nose.tools.eq_(len(all_data), len(all_labels))
nose.tools.eq_(len(all_data), 270)
counter = 0
data.reset()
entry = data.read()
while entry:
nose.tools.eq_(entry[0], all_labels[counter])
assert numpy.array_equal(entry[1], all_data[counter])
counter += 1
entry = data.read()
data.reset()
labels, data = data.read_all()
assert numpy.array_equal(labels, all_labels)
for k, l in zip(data, all_data):
assert numpy.array_equal(k, l)
ex = []
ex.append(numpy.array([0.708333, 1, 1, -0.320755, -0.105023, -1, 1, -
0.419847, -1, -0.225806, 0.0, 1, -1], 'float64'))
ex.append(numpy.array([0.583333, -1, 0.333333, -0.603774, 1, -1, 1,
0.358779, -1, -0.483871, 0.0, -1, 1], 'float64'))
ex.append(numpy.array([0.166667, 1, -0.333333, -0.433962, -0.383562, -1,
-1, 0.0687023, -1, -0.903226, -1, -1, 1], 'float64'))
ls = [+1, -1, +1]
for k, (l, e) in enumerate(zip(ls, ex)):
nose.tools.eq_(l, labels[k])
assert numpy.array_equal(e, data[k])
@nose.tools.raises(RuntimeError)
def test_raises():
machine = Machine(TEST_MACHINE_NO_PROBS)
labels, data = File(HEART_DATA).read_all()
machine.predict_class_and_probabilities(data)
<|reserved_special_token_0|>
def test_correctness_iris():
machine = Machine(IRIS_MACHINE)
labels, data = File(IRIS_DATA).read_all()
pred_label = machine.predict_class(data)
assert numpy.array_equal(pred_label, expected_iris_predictions)
pred_lab_values = [machine.predict_class_and_scores(k) for k in data]
pred_labels2, pred_scores2 = machine.predict_class_and_scores(data)
assert numpy.array_equal(expected_iris_predictions, pred_labels2)
assert numpy.all(abs(numpy.vstack([k[1] for k in pred_lab_values]) -
numpy.vstack(pred_scores2)) < 1e-20)
all_labels, real_labels, real_probs = load_expected(IRIS_EXPECTED)
pred_labels, pred_probs = machine.predict_class_and_probabilities(data)
assert numpy.array_equal(pred_labels, real_labels)
assert numpy.all(abs(numpy.vstack(pred_probs) - numpy.vstack(real_probs
)) < 1e-06)
@nose.tools.raises(RuntimeError)
def test_correctness_inputsize_exceeds():
machine = Machine(IRIS_MACHINE)
labels, data = File(IRIS_DATA).read_all()
data = numpy.hstack([data, numpy.ones((data.shape[0], 2), dtype=float)])
pred_label = machine.predict_class(data)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def F(f):
"""Returns the test file on the "data" subdirectory"""
return pkg_resources.resource_filename(__name__, os.path.join('data', f))
def tempname(suffix, prefix='bobtest_machine_'):
fd, name = tempfile.mkstemp(suffix, prefix)
os.close(fd)
os.unlink(name)
return name
<|reserved_special_token_0|>
def load_expected(filename):
"""Loads libsvm's svm-predict output file with probabilities"""
all_labels = sorted([int(k) for k in open(filename).readline().split()[1:]]
)
data = numpy.loadtxt(filename, dtype='float64', skiprows=1)
return all_labels, data[:, 0].astype('int64'), data[:, 1:]
<|reserved_special_token_0|>
def test_can_save():
machine = Machine(HEART_MACHINE)
tmp = tempname('.model')
machine.save(tmp)
del machine
machine = Machine(tmp)
nose.tools.eq_(machine.shape, (13, 1))
nose.tools.eq_(machine.n_support_vectors, [64, 68])
nose.tools.eq_(machine.kernel_type, 'RBF')
nose.tools.eq_(machine.machine_type, 'C_SVC')
nose.tools.eq_(len(machine.labels), 2)
assert -1 in machine.labels
assert +1 in machine.labels
assert abs(machine.gamma - 0.0769231) < 1e-06
os.unlink(tmp)
def run_for_extension(ext):
machine = Machine(HEART_MACHINE)
tmp = tempname(ext)
machine.save(bob.io.base.HDF5File(tmp, 'w'))
del machine
machine = Machine(bob.io.base.HDF5File(tmp))
nose.tools.eq_(machine.shape, (13, 1))
nose.tools.eq_(machine.n_support_vectors, [64, 68])
nose.tools.eq_(machine.kernel_type, 'RBF')
nose.tools.eq_(machine.machine_type, 'C_SVC')
nose.tools.eq_(len(machine.labels), 2)
assert -1 in machine.labels
assert +1 in machine.labels
assert abs(machine.gamma - 0.0769231) < 1e-06
assert numpy.all(abs(machine.input_subtract - 0) < 1e-10)
assert numpy.all(abs(machine.input_divide - 1) < 1e-10)
os.unlink(tmp)
def test_can_save_arbitrary():
run_for_extension('.arbitrary')
def test_can_save_h5():
run_for_extension('.h5')
def test_can_save_hdf5():
run_for_extension('.hdf5')
def test_data_loading():
data = File(HEART_DATA)
nose.tools.eq_(data.shape, (13,))
nose.tools.eq_(data.good(), True)
nose.tools.eq_(data.fail(), False)
nose.tools.eq_(data.eof(), False)
all_data = []
all_labels = []
while data.good():
entry = data.read()
if entry is not None:
all_labels.append(entry[0])
all_data.append(entry[1])
nose.tools.eq_(len(all_data), len(all_labels))
nose.tools.eq_(len(all_data), 270)
counter = 0
data.reset()
entry = data.read()
while entry:
nose.tools.eq_(entry[0], all_labels[counter])
assert numpy.array_equal(entry[1], all_data[counter])
counter += 1
entry = data.read()
data.reset()
labels, data = data.read_all()
assert numpy.array_equal(labels, all_labels)
for k, l in zip(data, all_data):
assert numpy.array_equal(k, l)
ex = []
ex.append(numpy.array([0.708333, 1, 1, -0.320755, -0.105023, -1, 1, -
0.419847, -1, -0.225806, 0.0, 1, -1], 'float64'))
ex.append(numpy.array([0.583333, -1, 0.333333, -0.603774, 1, -1, 1,
0.358779, -1, -0.483871, 0.0, -1, 1], 'float64'))
ex.append(numpy.array([0.166667, 1, -0.333333, -0.433962, -0.383562, -1,
-1, 0.0687023, -1, -0.903226, -1, -1, 1], 'float64'))
ls = [+1, -1, +1]
for k, (l, e) in enumerate(zip(ls, ex)):
nose.tools.eq_(l, labels[k])
assert numpy.array_equal(e, data[k])
@nose.tools.raises(RuntimeError)
def test_raises():
machine = Machine(TEST_MACHINE_NO_PROBS)
labels, data = File(HEART_DATA).read_all()
machine.predict_class_and_probabilities(data)
def test_correctness_heart():
machine = Machine(HEART_MACHINE)
labels, data = File(HEART_DATA).read_all()
pred_label = machine.predict_class(data)
assert numpy.array_equal(pred_label, expected_heart_predictions)
pred_lab_values = [machine.predict_class_and_scores(k) for k in data]
pred_labels2, pred_scores2 = machine.predict_class_and_scores(data)
assert numpy.array_equal(expected_heart_predictions, pred_labels2)
assert numpy.array_equal(tuple([k[1] for k in pred_lab_values]),
pred_scores2)
all_labels, real_labels, real_probs = load_expected(HEART_EXPECTED)
pred_labels, pred_probs = machine.predict_class_and_probabilities(data)
assert numpy.array_equal(pred_labels, real_labels)
assert numpy.all(abs(pred_probs - real_probs) < 0.01), abs(pred_probs -
real_probs)
def test_correctness_iris():
machine = Machine(IRIS_MACHINE)
labels, data = File(IRIS_DATA).read_all()
pred_label = machine.predict_class(data)
assert numpy.array_equal(pred_label, expected_iris_predictions)
pred_lab_values = [machine.predict_class_and_scores(k) for k in data]
pred_labels2, pred_scores2 = machine.predict_class_and_scores(data)
assert numpy.array_equal(expected_iris_predictions, pred_labels2)
assert numpy.all(abs(numpy.vstack([k[1] for k in pred_lab_values]) -
numpy.vstack(pred_scores2)) < 1e-20)
all_labels, real_labels, real_probs = load_expected(IRIS_EXPECTED)
pred_labels, pred_probs = machine.predict_class_and_probabilities(data)
assert numpy.array_equal(pred_labels, real_labels)
assert numpy.all(abs(numpy.vstack(pred_probs) - numpy.vstack(real_probs
)) < 1e-06)
@nose.tools.raises(RuntimeError)
def test_correctness_inputsize_exceeds():
machine = Machine(IRIS_MACHINE)
labels, data = File(IRIS_DATA).read_all()
data = numpy.hstack([data, numpy.ones((data.shape[0], 2), dtype=float)])
pred_label = machine.predict_class(data)
<|reserved_special_token_1|>
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
# Andre Anjos <andre.anjos@idiap.ch>
# Sat Dec 17 14:41:56 2011 +0100
#
# Copyright (C) 2011-2013 Idiap Research Institute, Martigny, Switzerland
"""Run tests on the libsvm machine infrastructure.
"""
import os
import numpy
import tempfile
import pkg_resources
import nose.tools
import bob.io.base
from . import File, Machine
def F(f):
"""Returns the test file on the "data" subdirectory"""
return pkg_resources.resource_filename(__name__, os.path.join('data', f))
def tempname(suffix, prefix='bobtest_machine_'):
(fd, name) = tempfile.mkstemp(suffix, prefix)
os.close(fd)
os.unlink(name)
return name
TEST_MACHINE_NO_PROBS = F('heart_no_probs.svmmodel')
HEART_DATA = F('heart.svmdata') #13 inputs
HEART_MACHINE = F('heart.svmmodel') #supports probabilities
HEART_EXPECTED = F('heart.out') #expected probabilities
IRIS_DATA = F('iris.svmdata')
IRIS_MACHINE = F('iris.svmmodel')
IRIS_EXPECTED = F('iris.out') #expected probabilities
def load_expected(filename):
"""Loads libsvm's svm-predict output file with probabilities"""
all_labels = sorted([int(k) for k in open(filename).readline().split()[1:]])
data = numpy.loadtxt(filename, dtype='float64', skiprows=1)
return all_labels, data[:,0].astype('int64'), data[:,1:]
#extracted by running svm-predict.c on the heart_scale example data
expected_heart_predictions = (1, -1, -1, 1, -1, -1, 1, 1, 1, 1, 1, 1, -1, -1,
-1, -1, 1, 1, -1, -1, 1, -1, -1, -1, -1, -1, -1, -1, 1, -1, 1, 1, -1, 1, 1,
1, 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1, -1, 1, 1, 1, -1, -1, -1, -1,
-1, 1, -1, -1, 1, -1, 1, -1, -1, -1, 1, -1, -1, -1, -1, 1, -1, -1, -1, -1,
1, 1, -1, -1, -1, 1, 1, 1, -1, 1, -1, -1, -1, -1, 1, -1, -1, 1, 1, 1, 1,
-1, 1, -1, -1, -1, -1, -1, 1, 1, 1, -1, 1, 1, -1, 1, -1, -1, -1, -1, -1, 1,
1, -1, 1, 1, 1, 1, -1, -1, -1, 1, -1, -1, 1, 1, 1, -1, 1, 1, -1, -1, 1, -1,
1, 1, -1, 1, -1, 1, 1, -1, 1, 1, -1, -1, -1, -1, -1, -1, -1, 1, -1, -1, 1,
-1, -1, -1, 1, -1, -1, -1, -1, -1, -1, -1, 1, 1, -1, -1, 1, 1, -1, 1, -1,
-1, 1, 1, -1, -1, -1, 1, 1, -1, 1, -1, 1, -1, 1, -1, -1, -1, -1, -1, 1, -1,
1, 1, 1, 1, -1, -1, 1, 1, -1, -1, -1, -1, 1, -1, -1, -1, 1, 1, -1, 1, 1,
-1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, 1, 1, 1, -1, 1, -1, -1, 1, -1, -1,
1, -1, 1, 1, -1, -1, 1, 1, -1, -1, -1, -1, -1, -1, 1, -1, -1, -1, 1, -1,
-1, -1, -1, -1, -1, -1, 1)
expected_iris_predictions = (1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3)
def test_can_load():
machine = Machine(HEART_MACHINE)
nose.tools.eq_(machine.shape, (13,1))
nose.tools.eq_(machine.n_support_vectors, [64,68])
nose.tools.eq_(machine.kernel_type, 'RBF')
nose.tools.eq_(machine.machine_type, 'C_SVC')
nose.tools.eq_(len(machine.labels), 2)
assert -1 in machine.labels
assert +1 in machine.labels
assert abs(machine.gamma - 0.0769231) < 1e-6
assert type(machine.__repr__()) is str
def test_can_save():
machine = Machine(HEART_MACHINE)
tmp = tempname('.model')
machine.save(tmp)
del machine
# make sure that the save machine is the same as before
machine = Machine(tmp)
nose.tools.eq_(machine.shape, (13,1))
nose.tools.eq_(machine.n_support_vectors, [64,68])
nose.tools.eq_(machine.kernel_type, 'RBF')
nose.tools.eq_(machine.machine_type, 'C_SVC')
nose.tools.eq_(len(machine.labels), 2)
assert -1 in machine.labels
assert +1 in machine.labels
assert abs(machine.gamma - 0.0769231) < 1e-6
os.unlink(tmp)
def run_for_extension(ext):
machine = Machine(HEART_MACHINE)
tmp = tempname(ext)
machine.save(bob.io.base.HDF5File(tmp, 'w'))
del machine
# make sure that the save machine is the same as before
machine = Machine(bob.io.base.HDF5File(tmp))
nose.tools.eq_(machine.shape, (13,1))
nose.tools.eq_(machine.n_support_vectors, [64,68])
nose.tools.eq_(machine.kernel_type, 'RBF')
nose.tools.eq_(machine.machine_type, 'C_SVC')
nose.tools.eq_(len(machine.labels), 2)
assert -1 in machine.labels
assert +1 in machine.labels
assert abs(machine.gamma - 0.0769231) < 1e-6
assert numpy.all(abs(machine.input_subtract - 0) < 1e-10)
assert numpy.all(abs(machine.input_divide - 1) < 1e-10)
os.unlink(tmp)
def test_can_save_arbitrary():
run_for_extension('.arbitrary')
def test_can_save_h5():
run_for_extension('.h5')
def test_can_save_hdf5():
run_for_extension('.hdf5')
def test_data_loading():
#tests if I can load data in libsvm format using SVMFile
data = File(HEART_DATA)
nose.tools.eq_(data.shape, (13,))
nose.tools.eq_(data.good(), True)
nose.tools.eq_(data.fail(), False)
nose.tools.eq_(data.eof(), False)
#tries loading the data, one by one
all_data = []
all_labels = []
while data.good():
entry = data.read()
if entry is not None:
all_labels.append(entry[0])
all_data.append(entry[1])
nose.tools.eq_(len(all_data), len(all_labels))
nose.tools.eq_(len(all_data), 270)
#tries loading the data with numpy arrays allocated internally
counter = 0
data.reset()
entry = data.read()
while entry:
nose.tools.eq_( entry[0], all_labels[counter] )
assert numpy.array_equal(entry[1], all_data[counter])
counter += 1
entry = data.read()
#tries loading the file all in a single shot
data.reset()
labels, data = data.read_all()
assert numpy.array_equal(labels, all_labels)
for k, l in zip(data, all_data):
assert numpy.array_equal(k, l)
#makes sure the first 3 examples are correctly read
ex = []
ex.append(numpy.array([0.708333 , 1, 1, -0.320755 , -0.105023 , -1, 1,
-0.419847 ,-1, -0.225806 ,0. ,1, -1], 'float64'))
ex.append(numpy.array([0.583333, -1, 0.333333, -0.603774, 1, -1, 1,
0.358779, -1, -0.483871, 0., -1, 1], 'float64'))
ex.append(numpy.array([0.166667, 1, -0.333333, -0.433962, -0.383562, -1,
-1, 0.0687023, -1, -0.903226, -1, -1, 1], 'float64'))
ls = [+1, -1, +1]
for k, (l, e) in enumerate(zip(ls, ex)):
nose.tools.eq_( l, labels[k] )
assert numpy.array_equal(e, data[k])
@nose.tools.raises(RuntimeError)
def test_raises():
#tests that the normal machine raises because probabilities are not
#supported on that model
machine = Machine(TEST_MACHINE_NO_PROBS)
labels, data = File(HEART_DATA).read_all()
machine.predict_class_and_probabilities(data)
def test_correctness_heart():
#tests the correctness of the libSVM bindings
machine = Machine(HEART_MACHINE)
labels, data = File(HEART_DATA).read_all()
pred_label = machine.predict_class(data)
assert numpy.array_equal(pred_label, expected_heart_predictions)
#finally, we test if the values also work fine.
pred_lab_values = [machine.predict_class_and_scores(k) for k in data]
#tries the variant with multiple inputs
pred_labels2, pred_scores2 = machine.predict_class_and_scores(data)
assert numpy.array_equal(expected_heart_predictions, pred_labels2)
assert numpy.array_equal(tuple([k[1] for k in pred_lab_values]), pred_scores2)
#tries to get the probabilities - note: for some reason, when getting
#probabilities, the labels change, but notice the note bellow:
# Note from the libSVM FAQ:
# Q: Why using the -b option does not give me better accuracy?
# There is absolutely no reason the probability outputs guarantee you
# better accuracy. The main purpose of this option is to provide you the
# probability estimates, but not to boost prediction accuracy. From our
# experience, after proper parameter selections, in general with and
# without -b have similar accuracy. Occasionally there are some
# differences. It is not recommended to compare the two under just a fixed
# parameter set as more differences will be observed.
all_labels, real_labels, real_probs = load_expected(HEART_EXPECTED)
pred_labels, pred_probs = machine.predict_class_and_probabilities(data)
assert numpy.array_equal(pred_labels, real_labels)
assert numpy.all(abs(pred_probs - real_probs) < 1e-2), abs(pred_probs - real_probs)
def test_correctness_iris():
#same test as above, but with a 3-class problem.
machine = Machine(IRIS_MACHINE)
labels, data = File(IRIS_DATA).read_all()
pred_label = machine.predict_class(data)
assert numpy.array_equal(pred_label, expected_iris_predictions)
#finally, we test if the values also work fine.
pred_lab_values = [machine.predict_class_and_scores(k) for k in data]
#tries the variant with multiple inputs
pred_labels2, pred_scores2 = machine.predict_class_and_scores(data)
assert numpy.array_equal(expected_iris_predictions, pred_labels2)
assert numpy.all(abs(numpy.vstack([k[1] for k in
pred_lab_values]) - numpy.vstack(pred_scores2)) < 1e-20 )
#tries to get the probabilities - note: for some reason, when getting
#probabilities, the labels change, but notice the note bellow:
all_labels, real_labels, real_probs = load_expected(IRIS_EXPECTED)
pred_labels, pred_probs = machine.predict_class_and_probabilities(data)
assert numpy.array_equal(pred_labels, real_labels)
assert numpy.all(abs(numpy.vstack(pred_probs) - numpy.vstack(real_probs)) < 1e-6)
@nose.tools.raises(RuntimeError)
def test_correctness_inputsize_exceeds():
#same test as above, but test for excess input
machine = Machine(IRIS_MACHINE)
labels, data = File(IRIS_DATA).read_all()
# add extra columns to the input data
data = numpy.hstack([data, numpy.ones((data.shape[0], 2), dtype=float)])
pred_label = machine.predict_class(data)
|
flexible
|
{
"blob_id": "c24be05700e5ee043d09d6f2e78cb3de1e7088f1",
"index": 6242,
"step-1": "<mask token>\n\n\ndef F(f):\n \"\"\"Returns the test file on the \"data\" subdirectory\"\"\"\n return pkg_resources.resource_filename(__name__, os.path.join('data', f))\n\n\n<mask token>\n\n\ndef load_expected(filename):\n \"\"\"Loads libsvm's svm-predict output file with probabilities\"\"\"\n all_labels = sorted([int(k) for k in open(filename).readline().split()[1:]]\n )\n data = numpy.loadtxt(filename, dtype='float64', skiprows=1)\n return all_labels, data[:, 0].astype('int64'), data[:, 1:]\n\n\n<mask token>\n\n\ndef test_can_save():\n machine = Machine(HEART_MACHINE)\n tmp = tempname('.model')\n machine.save(tmp)\n del machine\n machine = Machine(tmp)\n nose.tools.eq_(machine.shape, (13, 1))\n nose.tools.eq_(machine.n_support_vectors, [64, 68])\n nose.tools.eq_(machine.kernel_type, 'RBF')\n nose.tools.eq_(machine.machine_type, 'C_SVC')\n nose.tools.eq_(len(machine.labels), 2)\n assert -1 in machine.labels\n assert +1 in machine.labels\n assert abs(machine.gamma - 0.0769231) < 1e-06\n os.unlink(tmp)\n\n\n<mask token>\n\n\ndef test_can_save_arbitrary():\n run_for_extension('.arbitrary')\n\n\ndef test_can_save_h5():\n run_for_extension('.h5')\n\n\ndef test_can_save_hdf5():\n run_for_extension('.hdf5')\n\n\n<mask token>\n\n\ndef test_correctness_iris():\n machine = Machine(IRIS_MACHINE)\n labels, data = File(IRIS_DATA).read_all()\n pred_label = machine.predict_class(data)\n assert numpy.array_equal(pred_label, expected_iris_predictions)\n pred_lab_values = [machine.predict_class_and_scores(k) for k in data]\n pred_labels2, pred_scores2 = machine.predict_class_and_scores(data)\n assert numpy.array_equal(expected_iris_predictions, pred_labels2)\n assert numpy.all(abs(numpy.vstack([k[1] for k in pred_lab_values]) -\n numpy.vstack(pred_scores2)) < 1e-20)\n all_labels, real_labels, real_probs = load_expected(IRIS_EXPECTED)\n pred_labels, pred_probs = machine.predict_class_and_probabilities(data)\n assert numpy.array_equal(pred_labels, real_labels)\n assert numpy.all(abs(numpy.vstack(pred_probs) - numpy.vstack(real_probs\n )) < 1e-06)\n\n\n@nose.tools.raises(RuntimeError)\ndef test_correctness_inputsize_exceeds():\n machine = Machine(IRIS_MACHINE)\n labels, data = File(IRIS_DATA).read_all()\n data = numpy.hstack([data, numpy.ones((data.shape[0], 2), dtype=float)])\n pred_label = machine.predict_class(data)\n",
"step-2": "<mask token>\n\n\ndef F(f):\n \"\"\"Returns the test file on the \"data\" subdirectory\"\"\"\n return pkg_resources.resource_filename(__name__, os.path.join('data', f))\n\n\n<mask token>\n\n\ndef load_expected(filename):\n \"\"\"Loads libsvm's svm-predict output file with probabilities\"\"\"\n all_labels = sorted([int(k) for k in open(filename).readline().split()[1:]]\n )\n data = numpy.loadtxt(filename, dtype='float64', skiprows=1)\n return all_labels, data[:, 0].astype('int64'), data[:, 1:]\n\n\n<mask token>\n\n\ndef test_can_save():\n machine = Machine(HEART_MACHINE)\n tmp = tempname('.model')\n machine.save(tmp)\n del machine\n machine = Machine(tmp)\n nose.tools.eq_(machine.shape, (13, 1))\n nose.tools.eq_(machine.n_support_vectors, [64, 68])\n nose.tools.eq_(machine.kernel_type, 'RBF')\n nose.tools.eq_(machine.machine_type, 'C_SVC')\n nose.tools.eq_(len(machine.labels), 2)\n assert -1 in machine.labels\n assert +1 in machine.labels\n assert abs(machine.gamma - 0.0769231) < 1e-06\n os.unlink(tmp)\n\n\n<mask token>\n\n\ndef test_can_save_arbitrary():\n run_for_extension('.arbitrary')\n\n\ndef test_can_save_h5():\n run_for_extension('.h5')\n\n\ndef test_can_save_hdf5():\n run_for_extension('.hdf5')\n\n\ndef test_data_loading():\n data = File(HEART_DATA)\n nose.tools.eq_(data.shape, (13,))\n nose.tools.eq_(data.good(), True)\n nose.tools.eq_(data.fail(), False)\n nose.tools.eq_(data.eof(), False)\n all_data = []\n all_labels = []\n while data.good():\n entry = data.read()\n if entry is not None:\n all_labels.append(entry[0])\n all_data.append(entry[1])\n nose.tools.eq_(len(all_data), len(all_labels))\n nose.tools.eq_(len(all_data), 270)\n counter = 0\n data.reset()\n entry = data.read()\n while entry:\n nose.tools.eq_(entry[0], all_labels[counter])\n assert numpy.array_equal(entry[1], all_data[counter])\n counter += 1\n entry = data.read()\n data.reset()\n labels, data = data.read_all()\n assert numpy.array_equal(labels, all_labels)\n for k, l in zip(data, all_data):\n assert numpy.array_equal(k, l)\n ex = []\n ex.append(numpy.array([0.708333, 1, 1, -0.320755, -0.105023, -1, 1, -\n 0.419847, -1, -0.225806, 0.0, 1, -1], 'float64'))\n ex.append(numpy.array([0.583333, -1, 0.333333, -0.603774, 1, -1, 1, \n 0.358779, -1, -0.483871, 0.0, -1, 1], 'float64'))\n ex.append(numpy.array([0.166667, 1, -0.333333, -0.433962, -0.383562, -1,\n -1, 0.0687023, -1, -0.903226, -1, -1, 1], 'float64'))\n ls = [+1, -1, +1]\n for k, (l, e) in enumerate(zip(ls, ex)):\n nose.tools.eq_(l, labels[k])\n assert numpy.array_equal(e, data[k])\n\n\n<mask token>\n\n\ndef test_correctness_iris():\n machine = Machine(IRIS_MACHINE)\n labels, data = File(IRIS_DATA).read_all()\n pred_label = machine.predict_class(data)\n assert numpy.array_equal(pred_label, expected_iris_predictions)\n pred_lab_values = [machine.predict_class_and_scores(k) for k in data]\n pred_labels2, pred_scores2 = machine.predict_class_and_scores(data)\n assert numpy.array_equal(expected_iris_predictions, pred_labels2)\n assert numpy.all(abs(numpy.vstack([k[1] for k in pred_lab_values]) -\n numpy.vstack(pred_scores2)) < 1e-20)\n all_labels, real_labels, real_probs = load_expected(IRIS_EXPECTED)\n pred_labels, pred_probs = machine.predict_class_and_probabilities(data)\n assert numpy.array_equal(pred_labels, real_labels)\n assert numpy.all(abs(numpy.vstack(pred_probs) - numpy.vstack(real_probs\n )) < 1e-06)\n\n\n@nose.tools.raises(RuntimeError)\ndef test_correctness_inputsize_exceeds():\n machine = Machine(IRIS_MACHINE)\n labels, data = File(IRIS_DATA).read_all()\n data = numpy.hstack([data, numpy.ones((data.shape[0], 2), dtype=float)])\n pred_label = machine.predict_class(data)\n",
"step-3": "<mask token>\n\n\ndef F(f):\n \"\"\"Returns the test file on the \"data\" subdirectory\"\"\"\n return pkg_resources.resource_filename(__name__, os.path.join('data', f))\n\n\ndef tempname(suffix, prefix='bobtest_machine_'):\n fd, name = tempfile.mkstemp(suffix, prefix)\n os.close(fd)\n os.unlink(name)\n return name\n\n\n<mask token>\n\n\ndef load_expected(filename):\n \"\"\"Loads libsvm's svm-predict output file with probabilities\"\"\"\n all_labels = sorted([int(k) for k in open(filename).readline().split()[1:]]\n )\n data = numpy.loadtxt(filename, dtype='float64', skiprows=1)\n return all_labels, data[:, 0].astype('int64'), data[:, 1:]\n\n\n<mask token>\n\n\ndef test_can_save():\n machine = Machine(HEART_MACHINE)\n tmp = tempname('.model')\n machine.save(tmp)\n del machine\n machine = Machine(tmp)\n nose.tools.eq_(machine.shape, (13, 1))\n nose.tools.eq_(machine.n_support_vectors, [64, 68])\n nose.tools.eq_(machine.kernel_type, 'RBF')\n nose.tools.eq_(machine.machine_type, 'C_SVC')\n nose.tools.eq_(len(machine.labels), 2)\n assert -1 in machine.labels\n assert +1 in machine.labels\n assert abs(machine.gamma - 0.0769231) < 1e-06\n os.unlink(tmp)\n\n\ndef run_for_extension(ext):\n machine = Machine(HEART_MACHINE)\n tmp = tempname(ext)\n machine.save(bob.io.base.HDF5File(tmp, 'w'))\n del machine\n machine = Machine(bob.io.base.HDF5File(tmp))\n nose.tools.eq_(machine.shape, (13, 1))\n nose.tools.eq_(machine.n_support_vectors, [64, 68])\n nose.tools.eq_(machine.kernel_type, 'RBF')\n nose.tools.eq_(machine.machine_type, 'C_SVC')\n nose.tools.eq_(len(machine.labels), 2)\n assert -1 in machine.labels\n assert +1 in machine.labels\n assert abs(machine.gamma - 0.0769231) < 1e-06\n assert numpy.all(abs(machine.input_subtract - 0) < 1e-10)\n assert numpy.all(abs(machine.input_divide - 1) < 1e-10)\n os.unlink(tmp)\n\n\ndef test_can_save_arbitrary():\n run_for_extension('.arbitrary')\n\n\ndef test_can_save_h5():\n run_for_extension('.h5')\n\n\ndef test_can_save_hdf5():\n run_for_extension('.hdf5')\n\n\ndef test_data_loading():\n data = File(HEART_DATA)\n nose.tools.eq_(data.shape, (13,))\n nose.tools.eq_(data.good(), True)\n nose.tools.eq_(data.fail(), False)\n nose.tools.eq_(data.eof(), False)\n all_data = []\n all_labels = []\n while data.good():\n entry = data.read()\n if entry is not None:\n all_labels.append(entry[0])\n all_data.append(entry[1])\n nose.tools.eq_(len(all_data), len(all_labels))\n nose.tools.eq_(len(all_data), 270)\n counter = 0\n data.reset()\n entry = data.read()\n while entry:\n nose.tools.eq_(entry[0], all_labels[counter])\n assert numpy.array_equal(entry[1], all_data[counter])\n counter += 1\n entry = data.read()\n data.reset()\n labels, data = data.read_all()\n assert numpy.array_equal(labels, all_labels)\n for k, l in zip(data, all_data):\n assert numpy.array_equal(k, l)\n ex = []\n ex.append(numpy.array([0.708333, 1, 1, -0.320755, -0.105023, -1, 1, -\n 0.419847, -1, -0.225806, 0.0, 1, -1], 'float64'))\n ex.append(numpy.array([0.583333, -1, 0.333333, -0.603774, 1, -1, 1, \n 0.358779, -1, -0.483871, 0.0, -1, 1], 'float64'))\n ex.append(numpy.array([0.166667, 1, -0.333333, -0.433962, -0.383562, -1,\n -1, 0.0687023, -1, -0.903226, -1, -1, 1], 'float64'))\n ls = [+1, -1, +1]\n for k, (l, e) in enumerate(zip(ls, ex)):\n nose.tools.eq_(l, labels[k])\n assert numpy.array_equal(e, data[k])\n\n\n@nose.tools.raises(RuntimeError)\ndef test_raises():\n machine = Machine(TEST_MACHINE_NO_PROBS)\n labels, data = File(HEART_DATA).read_all()\n machine.predict_class_and_probabilities(data)\n\n\n<mask token>\n\n\ndef test_correctness_iris():\n machine = Machine(IRIS_MACHINE)\n labels, data = File(IRIS_DATA).read_all()\n pred_label = machine.predict_class(data)\n assert numpy.array_equal(pred_label, expected_iris_predictions)\n pred_lab_values = [machine.predict_class_and_scores(k) for k in data]\n pred_labels2, pred_scores2 = machine.predict_class_and_scores(data)\n assert numpy.array_equal(expected_iris_predictions, pred_labels2)\n assert numpy.all(abs(numpy.vstack([k[1] for k in pred_lab_values]) -\n numpy.vstack(pred_scores2)) < 1e-20)\n all_labels, real_labels, real_probs = load_expected(IRIS_EXPECTED)\n pred_labels, pred_probs = machine.predict_class_and_probabilities(data)\n assert numpy.array_equal(pred_labels, real_labels)\n assert numpy.all(abs(numpy.vstack(pred_probs) - numpy.vstack(real_probs\n )) < 1e-06)\n\n\n@nose.tools.raises(RuntimeError)\ndef test_correctness_inputsize_exceeds():\n machine = Machine(IRIS_MACHINE)\n labels, data = File(IRIS_DATA).read_all()\n data = numpy.hstack([data, numpy.ones((data.shape[0], 2), dtype=float)])\n pred_label = machine.predict_class(data)\n",
"step-4": "<mask token>\n\n\ndef F(f):\n \"\"\"Returns the test file on the \"data\" subdirectory\"\"\"\n return pkg_resources.resource_filename(__name__, os.path.join('data', f))\n\n\ndef tempname(suffix, prefix='bobtest_machine_'):\n fd, name = tempfile.mkstemp(suffix, prefix)\n os.close(fd)\n os.unlink(name)\n return name\n\n\n<mask token>\n\n\ndef load_expected(filename):\n \"\"\"Loads libsvm's svm-predict output file with probabilities\"\"\"\n all_labels = sorted([int(k) for k in open(filename).readline().split()[1:]]\n )\n data = numpy.loadtxt(filename, dtype='float64', skiprows=1)\n return all_labels, data[:, 0].astype('int64'), data[:, 1:]\n\n\n<mask token>\n\n\ndef test_can_save():\n machine = Machine(HEART_MACHINE)\n tmp = tempname('.model')\n machine.save(tmp)\n del machine\n machine = Machine(tmp)\n nose.tools.eq_(machine.shape, (13, 1))\n nose.tools.eq_(machine.n_support_vectors, [64, 68])\n nose.tools.eq_(machine.kernel_type, 'RBF')\n nose.tools.eq_(machine.machine_type, 'C_SVC')\n nose.tools.eq_(len(machine.labels), 2)\n assert -1 in machine.labels\n assert +1 in machine.labels\n assert abs(machine.gamma - 0.0769231) < 1e-06\n os.unlink(tmp)\n\n\ndef run_for_extension(ext):\n machine = Machine(HEART_MACHINE)\n tmp = tempname(ext)\n machine.save(bob.io.base.HDF5File(tmp, 'w'))\n del machine\n machine = Machine(bob.io.base.HDF5File(tmp))\n nose.tools.eq_(machine.shape, (13, 1))\n nose.tools.eq_(machine.n_support_vectors, [64, 68])\n nose.tools.eq_(machine.kernel_type, 'RBF')\n nose.tools.eq_(machine.machine_type, 'C_SVC')\n nose.tools.eq_(len(machine.labels), 2)\n assert -1 in machine.labels\n assert +1 in machine.labels\n assert abs(machine.gamma - 0.0769231) < 1e-06\n assert numpy.all(abs(machine.input_subtract - 0) < 1e-10)\n assert numpy.all(abs(machine.input_divide - 1) < 1e-10)\n os.unlink(tmp)\n\n\ndef test_can_save_arbitrary():\n run_for_extension('.arbitrary')\n\n\ndef test_can_save_h5():\n run_for_extension('.h5')\n\n\ndef test_can_save_hdf5():\n run_for_extension('.hdf5')\n\n\ndef test_data_loading():\n data = File(HEART_DATA)\n nose.tools.eq_(data.shape, (13,))\n nose.tools.eq_(data.good(), True)\n nose.tools.eq_(data.fail(), False)\n nose.tools.eq_(data.eof(), False)\n all_data = []\n all_labels = []\n while data.good():\n entry = data.read()\n if entry is not None:\n all_labels.append(entry[0])\n all_data.append(entry[1])\n nose.tools.eq_(len(all_data), len(all_labels))\n nose.tools.eq_(len(all_data), 270)\n counter = 0\n data.reset()\n entry = data.read()\n while entry:\n nose.tools.eq_(entry[0], all_labels[counter])\n assert numpy.array_equal(entry[1], all_data[counter])\n counter += 1\n entry = data.read()\n data.reset()\n labels, data = data.read_all()\n assert numpy.array_equal(labels, all_labels)\n for k, l in zip(data, all_data):\n assert numpy.array_equal(k, l)\n ex = []\n ex.append(numpy.array([0.708333, 1, 1, -0.320755, -0.105023, -1, 1, -\n 0.419847, -1, -0.225806, 0.0, 1, -1], 'float64'))\n ex.append(numpy.array([0.583333, -1, 0.333333, -0.603774, 1, -1, 1, \n 0.358779, -1, -0.483871, 0.0, -1, 1], 'float64'))\n ex.append(numpy.array([0.166667, 1, -0.333333, -0.433962, -0.383562, -1,\n -1, 0.0687023, -1, -0.903226, -1, -1, 1], 'float64'))\n ls = [+1, -1, +1]\n for k, (l, e) in enumerate(zip(ls, ex)):\n nose.tools.eq_(l, labels[k])\n assert numpy.array_equal(e, data[k])\n\n\n@nose.tools.raises(RuntimeError)\ndef test_raises():\n machine = Machine(TEST_MACHINE_NO_PROBS)\n labels, data = File(HEART_DATA).read_all()\n machine.predict_class_and_probabilities(data)\n\n\ndef test_correctness_heart():\n machine = Machine(HEART_MACHINE)\n labels, data = File(HEART_DATA).read_all()\n pred_label = machine.predict_class(data)\n assert numpy.array_equal(pred_label, expected_heart_predictions)\n pred_lab_values = [machine.predict_class_and_scores(k) for k in data]\n pred_labels2, pred_scores2 = machine.predict_class_and_scores(data)\n assert numpy.array_equal(expected_heart_predictions, pred_labels2)\n assert numpy.array_equal(tuple([k[1] for k in pred_lab_values]),\n pred_scores2)\n all_labels, real_labels, real_probs = load_expected(HEART_EXPECTED)\n pred_labels, pred_probs = machine.predict_class_and_probabilities(data)\n assert numpy.array_equal(pred_labels, real_labels)\n assert numpy.all(abs(pred_probs - real_probs) < 0.01), abs(pred_probs -\n real_probs)\n\n\ndef test_correctness_iris():\n machine = Machine(IRIS_MACHINE)\n labels, data = File(IRIS_DATA).read_all()\n pred_label = machine.predict_class(data)\n assert numpy.array_equal(pred_label, expected_iris_predictions)\n pred_lab_values = [machine.predict_class_and_scores(k) for k in data]\n pred_labels2, pred_scores2 = machine.predict_class_and_scores(data)\n assert numpy.array_equal(expected_iris_predictions, pred_labels2)\n assert numpy.all(abs(numpy.vstack([k[1] for k in pred_lab_values]) -\n numpy.vstack(pred_scores2)) < 1e-20)\n all_labels, real_labels, real_probs = load_expected(IRIS_EXPECTED)\n pred_labels, pred_probs = machine.predict_class_and_probabilities(data)\n assert numpy.array_equal(pred_labels, real_labels)\n assert numpy.all(abs(numpy.vstack(pred_probs) - numpy.vstack(real_probs\n )) < 1e-06)\n\n\n@nose.tools.raises(RuntimeError)\ndef test_correctness_inputsize_exceeds():\n machine = Machine(IRIS_MACHINE)\n labels, data = File(IRIS_DATA).read_all()\n data = numpy.hstack([data, numpy.ones((data.shape[0], 2), dtype=float)])\n pred_label = machine.predict_class(data)\n",
"step-5": "#!/usr/bin/env python\n# vim: set fileencoding=utf-8 :\n# Andre Anjos <andre.anjos@idiap.ch>\n# Sat Dec 17 14:41:56 2011 +0100\n#\n# Copyright (C) 2011-2013 Idiap Research Institute, Martigny, Switzerland\n\n\"\"\"Run tests on the libsvm machine infrastructure.\n\"\"\"\n\nimport os\nimport numpy\nimport tempfile\nimport pkg_resources\nimport nose.tools\nimport bob.io.base\n\nfrom . import File, Machine\n\ndef F(f):\n \"\"\"Returns the test file on the \"data\" subdirectory\"\"\"\n return pkg_resources.resource_filename(__name__, os.path.join('data', f))\n\ndef tempname(suffix, prefix='bobtest_machine_'):\n (fd, name) = tempfile.mkstemp(suffix, prefix)\n os.close(fd)\n os.unlink(name)\n return name\n\nTEST_MACHINE_NO_PROBS = F('heart_no_probs.svmmodel')\n\nHEART_DATA = F('heart.svmdata') #13 inputs\nHEART_MACHINE = F('heart.svmmodel') #supports probabilities\nHEART_EXPECTED = F('heart.out') #expected probabilities\n\nIRIS_DATA = F('iris.svmdata')\nIRIS_MACHINE = F('iris.svmmodel')\nIRIS_EXPECTED = F('iris.out') #expected probabilities\n\ndef load_expected(filename):\n \"\"\"Loads libsvm's svm-predict output file with probabilities\"\"\"\n\n all_labels = sorted([int(k) for k in open(filename).readline().split()[1:]])\n data = numpy.loadtxt(filename, dtype='float64', skiprows=1)\n return all_labels, data[:,0].astype('int64'), data[:,1:]\n\n#extracted by running svm-predict.c on the heart_scale example data\nexpected_heart_predictions = (1, -1, -1, 1, -1, -1, 1, 1, 1, 1, 1, 1, -1, -1,\n -1, -1, 1, 1, -1, -1, 1, -1, -1, -1, -1, -1, -1, -1, 1, -1, 1, 1, -1, 1, 1,\n 1, 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1, -1, 1, 1, 1, -1, -1, -1, -1,\n -1, 1, -1, -1, 1, -1, 1, -1, -1, -1, 1, -1, -1, -1, -1, 1, -1, -1, -1, -1,\n 1, 1, -1, -1, -1, 1, 1, 1, -1, 1, -1, -1, -1, -1, 1, -1, -1, 1, 1, 1, 1,\n -1, 1, -1, -1, -1, -1, -1, 1, 1, 1, -1, 1, 1, -1, 1, -1, -1, -1, -1, -1, 1,\n 1, -1, 1, 1, 1, 1, -1, -1, -1, 1, -1, -1, 1, 1, 1, -1, 1, 1, -1, -1, 1, -1,\n 1, 1, -1, 1, -1, 1, 1, -1, 1, 1, -1, -1, -1, -1, -1, -1, -1, 1, -1, -1, 1,\n -1, -1, -1, 1, -1, -1, -1, -1, -1, -1, -1, 1, 1, -1, -1, 1, 1, -1, 1, -1,\n -1, 1, 1, -1, -1, -1, 1, 1, -1, 1, -1, 1, -1, 1, -1, -1, -1, -1, -1, 1, -1,\n 1, 1, 1, 1, -1, -1, 1, 1, -1, -1, -1, -1, 1, -1, -1, -1, 1, 1, -1, 1, 1,\n -1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, 1, 1, 1, -1, 1, -1, -1, 1, -1, -1,\n 1, -1, 1, 1, -1, -1, 1, 1, -1, -1, -1, -1, -1, -1, 1, -1, -1, -1, 1, -1,\n -1, -1, -1, -1, -1, -1, 1)\n\nexpected_iris_predictions = (1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,\n 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2,\n 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,\n 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3,\n 3, 3, 3, 3, 3, 3, 3, 3)\n\ndef test_can_load():\n\n machine = Machine(HEART_MACHINE)\n nose.tools.eq_(machine.shape, (13,1))\n nose.tools.eq_(machine.n_support_vectors, [64,68])\n nose.tools.eq_(machine.kernel_type, 'RBF')\n nose.tools.eq_(machine.machine_type, 'C_SVC')\n nose.tools.eq_(len(machine.labels), 2)\n assert -1 in machine.labels\n assert +1 in machine.labels\n assert abs(machine.gamma - 0.0769231) < 1e-6\n assert type(machine.__repr__()) is str\n\ndef test_can_save():\n\n machine = Machine(HEART_MACHINE)\n tmp = tempname('.model')\n machine.save(tmp)\n del machine\n\n # make sure that the save machine is the same as before\n machine = Machine(tmp)\n nose.tools.eq_(machine.shape, (13,1))\n nose.tools.eq_(machine.n_support_vectors, [64,68])\n nose.tools.eq_(machine.kernel_type, 'RBF')\n nose.tools.eq_(machine.machine_type, 'C_SVC')\n nose.tools.eq_(len(machine.labels), 2)\n assert -1 in machine.labels\n assert +1 in machine.labels\n assert abs(machine.gamma - 0.0769231) < 1e-6\n\n os.unlink(tmp)\n\ndef run_for_extension(ext):\n\n machine = Machine(HEART_MACHINE)\n tmp = tempname(ext)\n machine.save(bob.io.base.HDF5File(tmp, 'w'))\n del machine\n\n # make sure that the save machine is the same as before\n machine = Machine(bob.io.base.HDF5File(tmp))\n nose.tools.eq_(machine.shape, (13,1))\n nose.tools.eq_(machine.n_support_vectors, [64,68])\n nose.tools.eq_(machine.kernel_type, 'RBF')\n nose.tools.eq_(machine.machine_type, 'C_SVC')\n nose.tools.eq_(len(machine.labels), 2)\n assert -1 in machine.labels\n assert +1 in machine.labels\n assert abs(machine.gamma - 0.0769231) < 1e-6\n assert numpy.all(abs(machine.input_subtract - 0) < 1e-10)\n assert numpy.all(abs(machine.input_divide - 1) < 1e-10)\n\n os.unlink(tmp)\n\ndef test_can_save_arbitrary():\n run_for_extension('.arbitrary')\n\ndef test_can_save_h5():\n run_for_extension('.h5')\n\ndef test_can_save_hdf5():\n run_for_extension('.hdf5')\n\ndef test_data_loading():\n\n #tests if I can load data in libsvm format using SVMFile\n data = File(HEART_DATA)\n nose.tools.eq_(data.shape, (13,))\n nose.tools.eq_(data.good(), True)\n nose.tools.eq_(data.fail(), False)\n nose.tools.eq_(data.eof(), False)\n\n\n #tries loading the data, one by one\n all_data = []\n all_labels = []\n while data.good():\n entry = data.read()\n if entry is not None:\n all_labels.append(entry[0])\n all_data.append(entry[1])\n\n nose.tools.eq_(len(all_data), len(all_labels))\n nose.tools.eq_(len(all_data), 270)\n\n #tries loading the data with numpy arrays allocated internally\n counter = 0\n data.reset()\n entry = data.read()\n while entry:\n nose.tools.eq_( entry[0], all_labels[counter] )\n assert numpy.array_equal(entry[1], all_data[counter])\n counter += 1\n entry = data.read()\n\n\n #tries loading the file all in a single shot\n data.reset()\n labels, data = data.read_all()\n\n assert numpy.array_equal(labels, all_labels)\n for k, l in zip(data, all_data):\n assert numpy.array_equal(k, l)\n\n\n\n\n #makes sure the first 3 examples are correctly read\n ex = []\n ex.append(numpy.array([0.708333 , 1, 1, -0.320755 , -0.105023 , -1, 1,\n -0.419847 ,-1, -0.225806 ,0. ,1, -1], 'float64'))\n ex.append(numpy.array([0.583333, -1, 0.333333, -0.603774, 1, -1, 1,\n 0.358779, -1, -0.483871, 0., -1, 1], 'float64'))\n ex.append(numpy.array([0.166667, 1, -0.333333, -0.433962, -0.383562, -1,\n -1, 0.0687023, -1, -0.903226, -1, -1, 1], 'float64'))\n ls = [+1, -1, +1]\n\n for k, (l, e) in enumerate(zip(ls, ex)):\n nose.tools.eq_( l, labels[k] )\n assert numpy.array_equal(e, data[k])\n\n\n@nose.tools.raises(RuntimeError)\ndef test_raises():\n\n #tests that the normal machine raises because probabilities are not\n #supported on that model\n machine = Machine(TEST_MACHINE_NO_PROBS)\n labels, data = File(HEART_DATA).read_all()\n machine.predict_class_and_probabilities(data)\n\ndef test_correctness_heart():\n\n #tests the correctness of the libSVM bindings\n machine = Machine(HEART_MACHINE)\n labels, data = File(HEART_DATA).read_all()\n pred_label = machine.predict_class(data)\n\n assert numpy.array_equal(pred_label, expected_heart_predictions)\n\n #finally, we test if the values also work fine.\n pred_lab_values = [machine.predict_class_and_scores(k) for k in data]\n\n #tries the variant with multiple inputs\n pred_labels2, pred_scores2 = machine.predict_class_and_scores(data)\n assert numpy.array_equal(expected_heart_predictions, pred_labels2)\n assert numpy.array_equal(tuple([k[1] for k in pred_lab_values]), pred_scores2)\n\n #tries to get the probabilities - note: for some reason, when getting\n #probabilities, the labels change, but notice the note bellow:\n\n # Note from the libSVM FAQ:\n # Q: Why using the -b option does not give me better accuracy?\n # There is absolutely no reason the probability outputs guarantee you\n # better accuracy. The main purpose of this option is to provide you the\n # probability estimates, but not to boost prediction accuracy. From our\n # experience, after proper parameter selections, in general with and\n # without -b have similar accuracy. Occasionally there are some\n # differences. It is not recommended to compare the two under just a fixed\n # parameter set as more differences will be observed.\n all_labels, real_labels, real_probs = load_expected(HEART_EXPECTED)\n\n pred_labels, pred_probs = machine.predict_class_and_probabilities(data)\n assert numpy.array_equal(pred_labels, real_labels)\n assert numpy.all(abs(pred_probs - real_probs) < 1e-2), abs(pred_probs - real_probs)\n\ndef test_correctness_iris():\n\n #same test as above, but with a 3-class problem.\n machine = Machine(IRIS_MACHINE)\n labels, data = File(IRIS_DATA).read_all()\n pred_label = machine.predict_class(data)\n\n assert numpy.array_equal(pred_label, expected_iris_predictions)\n\n #finally, we test if the values also work fine.\n pred_lab_values = [machine.predict_class_and_scores(k) for k in data]\n\n #tries the variant with multiple inputs\n pred_labels2, pred_scores2 = machine.predict_class_and_scores(data)\n assert numpy.array_equal(expected_iris_predictions, pred_labels2)\n assert numpy.all(abs(numpy.vstack([k[1] for k in\n pred_lab_values]) - numpy.vstack(pred_scores2)) < 1e-20 )\n\n #tries to get the probabilities - note: for some reason, when getting\n #probabilities, the labels change, but notice the note bellow:\n\n all_labels, real_labels, real_probs = load_expected(IRIS_EXPECTED)\n\n pred_labels, pred_probs = machine.predict_class_and_probabilities(data)\n assert numpy.array_equal(pred_labels, real_labels)\n assert numpy.all(abs(numpy.vstack(pred_probs) - numpy.vstack(real_probs)) < 1e-6)\n\n\n@nose.tools.raises(RuntimeError)\ndef test_correctness_inputsize_exceeds():\n\n #same test as above, but test for excess input\n machine = Machine(IRIS_MACHINE)\n labels, data = File(IRIS_DATA).read_all()\n\n # add extra columns to the input data\n data = numpy.hstack([data, numpy.ones((data.shape[0], 2), dtype=float)])\n\n pred_label = machine.predict_class(data)\n",
"step-ids": [
8,
9,
12,
13,
17
]
}
|
[
8,
9,
12,
13,
17
] |
from functools import reduce
import confuse
config = confuse.Configuration('SleepCycleWebhooks')
config.set_file('config.yaml')
def get(path):
return reduce(lambda view, part: view[part], path.split('.'), config).get()
|
normal
|
{
"blob_id": "16879598a8b1a0b23c5ea6de18f8fb0b0b77201c",
"index": 1360,
"step-1": "<mask token>\n\n\ndef get(path):\n return reduce(lambda view, part: view[part], path.split('.'), config).get()\n",
"step-2": "<mask token>\nconfig.set_file('config.yaml')\n\n\ndef get(path):\n return reduce(lambda view, part: view[part], path.split('.'), config).get()\n",
"step-3": "<mask token>\nconfig = confuse.Configuration('SleepCycleWebhooks')\nconfig.set_file('config.yaml')\n\n\ndef get(path):\n return reduce(lambda view, part: view[part], path.split('.'), config).get()\n",
"step-4": "from functools import reduce\nimport confuse\nconfig = confuse.Configuration('SleepCycleWebhooks')\nconfig.set_file('config.yaml')\n\n\ndef get(path):\n return reduce(lambda view, part: view[part], path.split('.'), config).get()\n",
"step-5": null,
"step-ids": [
1,
2,
3,
4
]
}
|
[
1,
2,
3,
4
] |
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
#######################
# Iterative solution
#######################
class Solution:
def reverseList(self, head: ListNode) -> ListNode:
if head is None:
return head
val = []
while (head):
val.append(head.val)
head = head.next
new_head = ListNode(val[-1])
pre = new_head
for i in range(len(val)-2, -1, -1):
pre.next = ListNode(val[i])
pre = pre.next
pre.next = None
return new_head
#######################
# Recursive solution
#######################
class Solution:
def reverseList(self, head: ListNode) -> ListNode:
if head is None:
return head
new_h, cur_nd = self.recursiveReverseList(head)
cur_nd.next = None
return new_h
def recursiveReverseList(self, node):
if node.next == None:
new_head = cur = ListNode(node.val)
return new_head, cur
new_head, cur_node = self.recursiveReverseList(node.next)
cur_node.next = ListNode(node.val)
return new_head, cur_node.next
#######################
# Other's iterative solution
#######################
class Solution:
# @param {ListNode} head
# @return {ListNode}
def reverseList(self, head):
prev = None
while head:
curr = head
head = head.next
curr.next = prev
prev = curr
return prev
#######################
# Other's recursive solution
#######################
class Solution:
# @param {ListNode} head
# @return {ListNode}
def reverseList(self, head):
return self._reverse(head)
def _reverse(self, node, prev=None):
if not node:
return prev
n = node.next
node.next = prev
return self._reverse(n, node)
|
normal
|
{
"blob_id": "682495fec200ddad5a68f06bb0ec24e59036e66b",
"index": 3286,
"step-1": "<mask token>\n\n\nclass Solution:\n <mask token>\n <mask token>\n\n\nclass Solution:\n\n def reverseList(self, head):\n prev = None\n while head:\n curr = head\n head = head.next\n curr.next = prev\n prev = curr\n return prev\n\n\nclass Solution:\n\n def reverseList(self, head):\n return self._reverse(head)\n\n def _reverse(self, node, prev=None):\n if not node:\n return prev\n n = node.next\n node.next = prev\n return self._reverse(n, node)\n",
"step-2": "<mask token>\n\n\nclass Solution:\n <mask token>\n\n def recursiveReverseList(self, node):\n if node.next == None:\n new_head = cur = ListNode(node.val)\n return new_head, cur\n new_head, cur_node = self.recursiveReverseList(node.next)\n cur_node.next = ListNode(node.val)\n return new_head, cur_node.next\n\n\nclass Solution:\n\n def reverseList(self, head):\n prev = None\n while head:\n curr = head\n head = head.next\n curr.next = prev\n prev = curr\n return prev\n\n\nclass Solution:\n\n def reverseList(self, head):\n return self._reverse(head)\n\n def _reverse(self, node, prev=None):\n if not node:\n return prev\n n = node.next\n node.next = prev\n return self._reverse(n, node)\n",
"step-3": "<mask token>\n\n\nclass Solution:\n\n def reverseList(self, head: ListNode) ->ListNode:\n if head is None:\n return head\n new_h, cur_nd = self.recursiveReverseList(head)\n cur_nd.next = None\n return new_h\n\n def recursiveReverseList(self, node):\n if node.next == None:\n new_head = cur = ListNode(node.val)\n return new_head, cur\n new_head, cur_node = self.recursiveReverseList(node.next)\n cur_node.next = ListNode(node.val)\n return new_head, cur_node.next\n\n\nclass Solution:\n\n def reverseList(self, head):\n prev = None\n while head:\n curr = head\n head = head.next\n curr.next = prev\n prev = curr\n return prev\n\n\nclass Solution:\n\n def reverseList(self, head):\n return self._reverse(head)\n\n def _reverse(self, node, prev=None):\n if not node:\n return prev\n n = node.next\n node.next = prev\n return self._reverse(n, node)\n",
"step-4": "class Solution:\n <mask token>\n\n\nclass Solution:\n\n def reverseList(self, head: ListNode) ->ListNode:\n if head is None:\n return head\n new_h, cur_nd = self.recursiveReverseList(head)\n cur_nd.next = None\n return new_h\n\n def recursiveReverseList(self, node):\n if node.next == None:\n new_head = cur = ListNode(node.val)\n return new_head, cur\n new_head, cur_node = self.recursiveReverseList(node.next)\n cur_node.next = ListNode(node.val)\n return new_head, cur_node.next\n\n\nclass Solution:\n\n def reverseList(self, head):\n prev = None\n while head:\n curr = head\n head = head.next\n curr.next = prev\n prev = curr\n return prev\n\n\nclass Solution:\n\n def reverseList(self, head):\n return self._reverse(head)\n\n def _reverse(self, node, prev=None):\n if not node:\n return prev\n n = node.next\n node.next = prev\n return self._reverse(n, node)\n",
"step-5": "# Definition for singly-linked list.\n# class ListNode:\n# def __init__(self, x):\n# self.val = x\n# self.next = None\n\n#######################\n# Iterative solution\n#######################\nclass Solution:\n def reverseList(self, head: ListNode) -> ListNode:\n if head is None:\n return head\n val = []\n while (head):\n val.append(head.val)\n head = head.next\n new_head = ListNode(val[-1])\n pre = new_head\n for i in range(len(val)-2, -1, -1):\n pre.next = ListNode(val[i])\n pre = pre.next\n pre.next = None\n return new_head\n\n\n#######################\n# Recursive solution\n#######################\nclass Solution:\n def reverseList(self, head: ListNode) -> ListNode:\n if head is None:\n return head\n new_h, cur_nd = self.recursiveReverseList(head)\n cur_nd.next = None\n return new_h\n \n def recursiveReverseList(self, node):\n if node.next == None:\n new_head = cur = ListNode(node.val)\n return new_head, cur\n new_head, cur_node = self.recursiveReverseList(node.next)\n cur_node.next = ListNode(node.val)\n return new_head, cur_node.next\n\n\n#######################\n# Other's iterative solution\n#######################\nclass Solution:\n# @param {ListNode} head\n# @return {ListNode}\n def reverseList(self, head):\n prev = None\n while head:\n curr = head\n head = head.next\n curr.next = prev\n prev = curr\n return prev\n\n\n#######################\n# Other's recursive solution\n#######################\nclass Solution:\n# @param {ListNode} head\n# @return {ListNode}\n def reverseList(self, head):\n return self._reverse(head)\n\n def _reverse(self, node, prev=None):\n if not node:\n return prev\n n = node.next\n node.next = prev\n return self._reverse(n, node)\n",
"step-ids": [
6,
7,
8,
9,
11
]
}
|
[
6,
7,
8,
9,
11
] |
#!/usr/bin/env python
import remctl
import json
import datetime
import time,random
import argparse
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description = "List all TCC Forge overlays"
)
parser.add_argument(
'-n',
'--now',
action = "store_false",
default = True,
dest = 'now',
help = 'skip the random wait'
)
cmdlineopts = parser.parse_args()
if cmdlineopts.now:
timeout = random.randint(0, 2 * 60 * 10) * 0.1
time.sleep(timeout)
command = ('forge','overlay','list','{"group":"tcc","distro":"F19"}')
c = remctl.remctl(host = 'update.nmt.edu', command=command)
r = json.loads(c.stdout)
for overlay in r:
print overlay["name"]
|
normal
|
{
"blob_id": "1ce34bfec6a9acfeaf0d5c5835ebebed4d7ee369",
"index": 2056,
"step-1": "#!/usr/bin/env python\n\nimport remctl\nimport json\nimport datetime\nimport time,random\nimport argparse\n\nif __name__ == '__main__':\n\tparser = argparse.ArgumentParser(\n\t\tdescription = \"List all TCC Forge overlays\"\n\t)\n\tparser.add_argument(\n\t\t'-n',\n\t\t'--now',\n\t\taction = \"store_false\",\n\t\tdefault = True,\n\t\tdest = 'now',\n\t\thelp = 'skip the random wait'\n\t)\n\tcmdlineopts = parser.parse_args()\n\tif cmdlineopts.now:\n\t\ttimeout = random.randint(0, 2 * 60 * 10) * 0.1\n\t\ttime.sleep(timeout)\n\tcommand = ('forge','overlay','list','{\"group\":\"tcc\",\"distro\":\"F19\"}')\n\tc = remctl.remctl(host = 'update.nmt.edu', command=command)\n\tr = json.loads(c.stdout)\n\tfor overlay in r:\n\t\tprint overlay[\"name\"]\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
class Writer:
<|reserved_special_token_0|>
@staticmethod
def rep(string, find, replace):
ex = find + '[^0-9]'
while re.search(ex, string) is not None:
res = re.search(ex, string)
string = string[0:res.start()] + replace + ' ' + string[res.end
() - 1:]
ex = find + '$'
if re.search(ex, string) is not None:
res = re.search(ex, string)
string = string[0:res.start()] + replace + ' ' + string[res.end():]
return string
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Writer:
<|reserved_special_token_0|>
@staticmethod
def rep(string, find, replace):
ex = find + '[^0-9]'
while re.search(ex, string) is not None:
res = re.search(ex, string)
string = string[0:res.start()] + replace + ' ' + string[res.end
() - 1:]
ex = find + '$'
if re.search(ex, string) is not None:
res = re.search(ex, string)
string = string[0:res.start()] + replace + ' ' + string[res.end():]
return string
def categorise_variables(self):
model = self.parser.parsedModel
rule_params = []
rule_values = []
constant_params = []
constant_values = []
for i in range(len(model.listOfParameter)):
is_constant = True
if not model.listOfParameter[i].getConstant():
for k in range(len(model.listOfRules)):
if model.listOfRules[k].isRate() and model.ruleVariable[k
] == model.parameterId[i]:
rule_params.append(model.parameterId[i])
rule_values.append(str(model.parameter[i]))
is_constant = False
if is_constant:
constant_params.append(model.parameterId[i])
constant_values.append(str(model.parameter[i]))
species_list = copy.copy(model.speciesId)
species_list.extend(rule_params)
species_values = map(lambda x: str(x), model.initValues)
species_values.extend(rule_values)
return species_list, constant_params, species_values, constant_values
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Writer:
def __init__(self):
pass
@staticmethod
def rep(string, find, replace):
ex = find + '[^0-9]'
while re.search(ex, string) is not None:
res = re.search(ex, string)
string = string[0:res.start()] + replace + ' ' + string[res.end
() - 1:]
ex = find + '$'
if re.search(ex, string) is not None:
res = re.search(ex, string)
string = string[0:res.start()] + replace + ' ' + string[res.end():]
return string
def categorise_variables(self):
model = self.parser.parsedModel
rule_params = []
rule_values = []
constant_params = []
constant_values = []
for i in range(len(model.listOfParameter)):
is_constant = True
if not model.listOfParameter[i].getConstant():
for k in range(len(model.listOfRules)):
if model.listOfRules[k].isRate() and model.ruleVariable[k
] == model.parameterId[i]:
rule_params.append(model.parameterId[i])
rule_values.append(str(model.parameter[i]))
is_constant = False
if is_constant:
constant_params.append(model.parameterId[i])
constant_values.append(str(model.parameter[i]))
species_list = copy.copy(model.speciesId)
species_list.extend(rule_params)
species_values = map(lambda x: str(x), model.initValues)
species_values.extend(rule_values)
return species_list, constant_params, species_values, constant_values
<|reserved_special_token_1|>
from cudasim.ParsedModel import ParsedModel
import re
import copy
class Writer:
def __init__(self):
pass
@staticmethod
def rep(string, find, replace):
ex = find + '[^0-9]'
while re.search(ex, string) is not None:
res = re.search(ex, string)
string = string[0:res.start()] + replace + ' ' + string[res.end
() - 1:]
ex = find + '$'
if re.search(ex, string) is not None:
res = re.search(ex, string)
string = string[0:res.start()] + replace + ' ' + string[res.end():]
return string
def categorise_variables(self):
model = self.parser.parsedModel
rule_params = []
rule_values = []
constant_params = []
constant_values = []
for i in range(len(model.listOfParameter)):
is_constant = True
if not model.listOfParameter[i].getConstant():
for k in range(len(model.listOfRules)):
if model.listOfRules[k].isRate() and model.ruleVariable[k
] == model.parameterId[i]:
rule_params.append(model.parameterId[i])
rule_values.append(str(model.parameter[i]))
is_constant = False
if is_constant:
constant_params.append(model.parameterId[i])
constant_values.append(str(model.parameter[i]))
species_list = copy.copy(model.speciesId)
species_list.extend(rule_params)
species_values = map(lambda x: str(x), model.initValues)
species_values.extend(rule_values)
return species_list, constant_params, species_values, constant_values
<|reserved_special_token_1|>
from cudasim.ParsedModel import ParsedModel
import re
import copy
class Writer:
def __init__(self):
pass
# replace the species and parameters recursively
@staticmethod
def rep(string, find, replace):
ex = find + "[^0-9]"
while re.search(ex, string) is not None:
res = re.search(ex, string)
string = string[0:res.start()] + replace + " " + string[res.end() - 1:]
ex = find + "$"
if re.search(ex, string) is not None:
res = re.search(ex, string)
string = string[0:res.start()] + replace + " " + string[res.end():]
return string
def categorise_variables(self):
# form a list of the species, and parameters which are set by rate rules
model = self.parser.parsedModel
rule_params = []
rule_values = []
constant_params = []
constant_values = []
for i in range(len(model.listOfParameter)):
is_constant = True
if not model.listOfParameter[i].getConstant():
for k in range(len(model.listOfRules)):
if model.listOfRules[k].isRate() and model.ruleVariable[k] == model.parameterId[i]:
rule_params.append(model.parameterId[i])
rule_values.append(str(model.parameter[i]))
is_constant = False
if is_constant:
constant_params.append(model.parameterId[i])
constant_values.append(str(model.parameter[i]))
species_list = copy.copy(model.speciesId)
species_list.extend(rule_params)
species_values = map(lambda x: str(x), model.initValues)
species_values.extend(rule_values)
return species_list, constant_params, species_values, constant_values
|
flexible
|
{
"blob_id": "acd0b9019ef413699b47ecb2b66a0980cf3aa81f",
"index": 9792,
"step-1": "<mask token>\n\n\nclass Writer:\n <mask token>\n\n @staticmethod\n def rep(string, find, replace):\n ex = find + '[^0-9]'\n while re.search(ex, string) is not None:\n res = re.search(ex, string)\n string = string[0:res.start()] + replace + ' ' + string[res.end\n () - 1:]\n ex = find + '$'\n if re.search(ex, string) is not None:\n res = re.search(ex, string)\n string = string[0:res.start()] + replace + ' ' + string[res.end():]\n return string\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Writer:\n <mask token>\n\n @staticmethod\n def rep(string, find, replace):\n ex = find + '[^0-9]'\n while re.search(ex, string) is not None:\n res = re.search(ex, string)\n string = string[0:res.start()] + replace + ' ' + string[res.end\n () - 1:]\n ex = find + '$'\n if re.search(ex, string) is not None:\n res = re.search(ex, string)\n string = string[0:res.start()] + replace + ' ' + string[res.end():]\n return string\n\n def categorise_variables(self):\n model = self.parser.parsedModel\n rule_params = []\n rule_values = []\n constant_params = []\n constant_values = []\n for i in range(len(model.listOfParameter)):\n is_constant = True\n if not model.listOfParameter[i].getConstant():\n for k in range(len(model.listOfRules)):\n if model.listOfRules[k].isRate() and model.ruleVariable[k\n ] == model.parameterId[i]:\n rule_params.append(model.parameterId[i])\n rule_values.append(str(model.parameter[i]))\n is_constant = False\n if is_constant:\n constant_params.append(model.parameterId[i])\n constant_values.append(str(model.parameter[i]))\n species_list = copy.copy(model.speciesId)\n species_list.extend(rule_params)\n species_values = map(lambda x: str(x), model.initValues)\n species_values.extend(rule_values)\n return species_list, constant_params, species_values, constant_values\n",
"step-3": "<mask token>\n\n\nclass Writer:\n\n def __init__(self):\n pass\n\n @staticmethod\n def rep(string, find, replace):\n ex = find + '[^0-9]'\n while re.search(ex, string) is not None:\n res = re.search(ex, string)\n string = string[0:res.start()] + replace + ' ' + string[res.end\n () - 1:]\n ex = find + '$'\n if re.search(ex, string) is not None:\n res = re.search(ex, string)\n string = string[0:res.start()] + replace + ' ' + string[res.end():]\n return string\n\n def categorise_variables(self):\n model = self.parser.parsedModel\n rule_params = []\n rule_values = []\n constant_params = []\n constant_values = []\n for i in range(len(model.listOfParameter)):\n is_constant = True\n if not model.listOfParameter[i].getConstant():\n for k in range(len(model.listOfRules)):\n if model.listOfRules[k].isRate() and model.ruleVariable[k\n ] == model.parameterId[i]:\n rule_params.append(model.parameterId[i])\n rule_values.append(str(model.parameter[i]))\n is_constant = False\n if is_constant:\n constant_params.append(model.parameterId[i])\n constant_values.append(str(model.parameter[i]))\n species_list = copy.copy(model.speciesId)\n species_list.extend(rule_params)\n species_values = map(lambda x: str(x), model.initValues)\n species_values.extend(rule_values)\n return species_list, constant_params, species_values, constant_values\n",
"step-4": "from cudasim.ParsedModel import ParsedModel\nimport re\nimport copy\n\n\nclass Writer:\n\n def __init__(self):\n pass\n\n @staticmethod\n def rep(string, find, replace):\n ex = find + '[^0-9]'\n while re.search(ex, string) is not None:\n res = re.search(ex, string)\n string = string[0:res.start()] + replace + ' ' + string[res.end\n () - 1:]\n ex = find + '$'\n if re.search(ex, string) is not None:\n res = re.search(ex, string)\n string = string[0:res.start()] + replace + ' ' + string[res.end():]\n return string\n\n def categorise_variables(self):\n model = self.parser.parsedModel\n rule_params = []\n rule_values = []\n constant_params = []\n constant_values = []\n for i in range(len(model.listOfParameter)):\n is_constant = True\n if not model.listOfParameter[i].getConstant():\n for k in range(len(model.listOfRules)):\n if model.listOfRules[k].isRate() and model.ruleVariable[k\n ] == model.parameterId[i]:\n rule_params.append(model.parameterId[i])\n rule_values.append(str(model.parameter[i]))\n is_constant = False\n if is_constant:\n constant_params.append(model.parameterId[i])\n constant_values.append(str(model.parameter[i]))\n species_list = copy.copy(model.speciesId)\n species_list.extend(rule_params)\n species_values = map(lambda x: str(x), model.initValues)\n species_values.extend(rule_values)\n return species_list, constant_params, species_values, constant_values\n",
"step-5": "from cudasim.ParsedModel import ParsedModel\nimport re\nimport copy\n\nclass Writer:\n\n def __init__(self):\n pass\n\n # replace the species and parameters recursively\n @staticmethod\n def rep(string, find, replace):\n ex = find + \"[^0-9]\"\n while re.search(ex, string) is not None:\n res = re.search(ex, string)\n string = string[0:res.start()] + replace + \" \" + string[res.end() - 1:]\n\n ex = find + \"$\"\n if re.search(ex, string) is not None:\n res = re.search(ex, string)\n string = string[0:res.start()] + replace + \" \" + string[res.end():]\n\n return string\n\n def categorise_variables(self):\n # form a list of the species, and parameters which are set by rate rules\n model = self.parser.parsedModel\n\n rule_params = []\n rule_values = []\n constant_params = []\n constant_values = []\n\n for i in range(len(model.listOfParameter)):\n is_constant = True\n if not model.listOfParameter[i].getConstant():\n for k in range(len(model.listOfRules)):\n if model.listOfRules[k].isRate() and model.ruleVariable[k] == model.parameterId[i]:\n rule_params.append(model.parameterId[i])\n rule_values.append(str(model.parameter[i]))\n is_constant = False\n if is_constant:\n constant_params.append(model.parameterId[i])\n constant_values.append(str(model.parameter[i]))\n\n species_list = copy.copy(model.speciesId)\n species_list.extend(rule_params)\n\n species_values = map(lambda x: str(x), model.initValues)\n species_values.extend(rule_values)\n\n return species_list, constant_params, species_values, constant_values\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
#!/usr/bin/env python
import sys
import ROOT
from ROOT import TTree
from ROOT import TChain
import numpy as np
import yaml
import xml.etree.ElementTree as ET
import datetime
#sys.path.append("/disk/gamma/cta/store/takhsm/FermiMVA/AllSky")
#sys.path.append("/home/takhsm/FermiMVA/python")
ROOT.gROOT.SetBatch()
from array import array
import math
from math import cos, sin, tan, acos, asin, atan, radians, degrees
from pColor import *
ROOT.gStyle.SetPadGridX(True)
ROOT.gStyle.SetPadGridY(True)
ROOT.gStyle.SetPadTickX(True)
ROOT.gStyle.SetPadTickY(True)
#from pCutBDT import cutBDT
from pAnalysisConfig import *
# ----- Event class setup -----
par = sys.argv
cfg = ClassConfig('Both', [10, 3, 1], 1)
aCutEGB = cfg.aCutEGB
aaStrSelect = cfg.aaStrSelect
nStartBin = cfg.nStartBin
nameFileRoc = "/nfs/farm/g/glast/u/mtakahas/v20r09p09_G1haB1/S18/S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15/S16V200909_020RAWE20ZCS000wwoTRKwoMCZDIR00woRW_15_S11D_catTwoZDIR050Log_roc.root" #par[2]
nameVarBDT = "S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15_BDTG1000D06"
nameVarBDT = par[1]
nameFileSuffix = par[1]
cutMVA = CutBDT(nameFileRoc, aCutEGB)
aaValCutBDT = cutMVA.aaValCutBDT[0:]
print aaValCutBDT
nEnergyBin = cutMVA.aaValCutBDT[0]['numBin'] - nStartBin
vEnergyBinWidth = cutMVA.aaValCutBDT[0]['widthBin']
vEnergyLow = cutMVA.aaValCutBDT[0]['edgeLow'] + nStartBin*vEnergyBinWidth
vEnergyUp = vEnergyLow + nEnergyBin*vEnergyBinWidth
aaNumEventClass=[]
#aColor = []
for hS in range(len(aaStrSelect)):
aaNumEventClass.append([])
for iS in range(len(aaStrSelect[hS])):
aaNumEventClass[hS].append(0)
#IRF
listPathFilePerf = [['/nfs/farm/g/glast/u/mtakahas/v20r09p09_G1haB1/S16/S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15/S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15_P8R2_TRANSIENT100_P8R2_TRANSIENT100_perf.root', '/nfs/farm/g/glast/u/mtakahas/v20r09p09_G1haB1/S16/S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15/S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15_P8R2_SOURCE_P8R2_SOURCE_perf.root'],
['/nfs/farm/g/glast/u/mtakahas/v20r09p09_G1haB1/S16/S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15/S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15_CalOnly_R100_perf.root', '/nfs/farm/g/glast/u/mtakahas/v20r09p09_G1haB1/S16/S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15/S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15_CalOnly_R30_perf.root', '/nfs/farm/g/glast/u/mtakahas/v20r09p09_G1haB1/S16/S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15/S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15_CalOnly_R10_perf.root']]
htgPerf = CutPerformanceHtg(listPathFilePerf)
# Data
pathList = "/nfs/farm/g/glast/u/mtakahas/data/catalogue/PublicTableGRBs.xml"
fileList = ET.parse(pathList)
rtXml = fileList.getroot()
# OFF regions
nOff = 4;
degOffOffset = 14.0;
print "===================="
# Making all sky map
listFileIn = par[2:]
print listFileIn
aliasSelections = yaml.load(open('/afs/slac.stanford.edu/u/gl/mtakahas/eventSelect/config/pass8_event_selections.yaml','r'))
for nameFileIn in listFileIn:
print ""
print "========================================================================"
fileIn = ROOT.TFile(nameFileIn, "READ")
print fileIn.GetName()
print "========================================================================"
chainData = fileIn.Get("MeritTuple")
nameFileFriend = nameFileIn.replace(".root", "_" + nameVarBDT + ".root")
chainData.AddFriend("friendTemp=MeritTuple", nameFileFriend)
for k,v in aliasSelections.iteritems():
chainData.SetAlias(k,v)
nameFileOut = nameFileIn[:-5] + "_PHOTON_" + nameVarBDT + nameFileSuffix + ".root"
fileOut = ROOT.TFile(nameFileOut, 'UPDATE')
#------ Source data -----
indexGrbName = nameFileIn.rindex('GRB') + 3
indexGrbNameEnd = indexGrbName + 9
nameGrb = nameFileIn[indexGrbName:indexGrbNameEnd]
for grb in rtXml: #for iGrb in range(trList.GetEntries())
if grb.findtext("./GRBNAME")==nameGrb:
trigger_time = float(grb.findtext("./MET"))
if grb.findtext("./ERROR") == "--" or grb.findtext("./ERROR") == "":
if grb.findtext("./LATERROR") == "--" or grb.findtext("./LATERROR") == "":
err_rad = 0.
else:
err_rad = float(grb.findtext("./LATERROR"))
else:
if float(grb.findtext("./ERROR")) > float(grb.findtext("./LATERROR")):
err_rad = float(grb.findtext("./LATERROR"))
raSrc = float(grb.findtext("./LATRA"))
decSrc = float(grb.findtext("./LATDEC"))
else:
err_rad = float(grb.findtext("./ERROR"))
raSrc = float(grb.findtext("./RA"))
decSrc = float(grb.findtext("./DEC"))
print ""
print "==============="
print "GRB", nameGrb
print "==============="
print "(", raSrc, ",", decSrc, "), Error radius:", err_rad, "Trigger MET:", trigger_time
nEvent = chainData.GetEntries()
print "Total number of events:", nEvent
#distCut = min(7.0, 5.0+err_rad)
# Plot
#mgr = ROOT.TMultiGraph("mgr", "Gamma-like events within {0} deg".format(distCut))
mgr = ROOT.TMultiGraph("mgr", "Gamma-like events around the GRB")
greOn = []
greOff=[]
for pC in range(len(aaStrSelect)):
greOn.append([])
greOff.append([])
for qC in range(len(aaStrSelect[pC])):
greOn[-1].append(ROOT.TGraphErrors())
greOn[-1][-1].SetName("greOn_{0}_{1}".format(pC, qC))
greOn[-1][-1].SetTitle("{0} ON".format(aaStrSelect[pC][qC]))
greOn[-1][-1].SetMarkerStyle(20)
if pC==0:
greOn[-1][-1].SetMarkerColor(13-12*qC)
elif pC==1:
greOn[-1][-1].SetMarkerColor(kRed+3*(qC-2))
greOn[-1][-1].SetMarkerStyle(20)
mgr.Add(greOn[-1][-1])
greOff[-1].append([])
for hRegio in range(nOff):
greOff[-1][-1].append(ROOT.TGraphErrors())
greOff[-1][-1][-1].SetName("greOff_{0}_{1}_{2}".format(pC, qC, hRegio+1))
greOff[-1][-1][-1].SetTitle("{0} Off{1} events".format(aaStrSelect[pC][qC], hRegio+1))
if pC==0:
greOff[-1][-1][-1].SetMarkerColor(13-12*qC)
elif pC==1:
greOff[-1][-1][-1].SetMarkerColor(kRed+3*(qC-2))
greOff[-1][-1][-1].SetMarkerStyle(25+hRegio)
mgr.Add(greOff[-1][-1][-1])
mgrZenith = ROOT.TMultiGraph("mgrZenith", "Zenith angle within ON/OFF regions")
grZenith = []
for gRegio in range(nOff+1):
grZenith.append(ROOT.TGraph())
grZenith[-1].SetName("grZenith{0}".format(gRegio))
if gRegio==0:
grZenith[0].SetTitle("ON")
else:
grZenith[gRegio].SetTitle("OFF{0}".format(gRegio))
grZenith[gRegio].SetMarkerStyle(7)
grZenith[gRegio].SetMarkerColor(akColor(gRegio))
mgrZenith.Add(grZenith[-1])
#------ TTree setting -----
trm = []
c = np.zeros(1, dtype=np.int32)
s = np.zeros(1, dtype=np.int32)
ty = np.zeros(1, dtype=np.int32)
ngrb = np.zeros(1, dtype=int)
evid = np.zeros(1, dtype=int)
run = np.zeros(1, dtype=int)
e = np.zeros(1, dtype=float)
t = np.zeros(1, dtype=float)
lt = np.zeros(1, dtype=float)
ra = np.zeros(1, dtype=float)
dec = np.zeros(1, dtype=float)
l = np.zeros(1, dtype=float)
b = np.zeros(1, dtype=float)
z = np.zeros(1, dtype=float)
az = np.zeros(1, dtype=float)
bep = np.zeros(1, dtype=float)
p = np.zeros(1, dtype=float)
ctp = np.zeros(1, dtype=float)
rawe = np.zeros(1, dtype=float)
cth = np.zeros(1, dtype=float)
th = np.zeros(1, dtype=float)
phi = np.zeros(1, dtype=float)
dist = np.zeros(1, dtype=float)
grbt = np.zeros(1, dtype=float)
flag = np.zeros(1, dtype=int)
# for iRegio in range(1+nOff):
#if iRegio==0:
#trm.append(ROOT.TTree("trGammas", "Gamma-like events"))
trm = ROOT.TTree("trGammas", "Gamma-like events")
#else:
# trm.append(ROOT.TTree("trGammasOFF{0}".format(iRegio), "Gamma-like events in the OFF region {0}".format(iRegio)))
trm.Branch('Category',c,'c/I') # 1:CalTkr or 2:CalOnly
trm.Branch('EVENT_CLASS',s,'s/I') # 4: TRANSIENT100, 128: SOURCE, 4096: CalOnly_10xEGB, 8192: CalOnly_3xEGB, 16384: CalOnly_1xEGB
trm.Branch('EVENT_TYPE',ty,'ty/I') # 1: FRONT, 2: BACK, 4, PSF0, ... , 32: PSF3, 64: EDISP0, ... , 512: EDISP3
trm.Branch('GRB_NAME',ngrb,'ngrb/I') #EvtEventId
trm.Branch('EVENT_ID',evid,'evid/I') #EvtEventId
trm.Branch('RUN_ID',run,'run/I') #EvtRun
trm.Branch('ENERGY',e,'e/D') #FT1Energy
trm.Branch('TIME',t,'t/D') #EvtElapsedTime
trm.Branch('LIVETIME',lt,'lt/D') #EvtLiveTime
trm.Branch('RA',ra,'ra/D') #FT1Ra
trm.Branch('DEC',dec,'dec/D') #FT1Dec
trm.Branch('L',l,'l/D') #FT1L
trm.Branch('B',b,'b/D') #FT1B
trm.Branch('ZENITH_ANGLE',z,'z/D') #FT1ZenithTheta
trm.Branch('EARTH_AZIMUTH_ANGLE',az,'az/D') #FT1EarthAzimuth
trm.Branch('WP8CTCalOnlyBestEnergyProb',bep,'bep/D') # (WP8CalOnlyBEPCaseE_myBDT+1.0)/2.0
trm.Branch('WP8CTCalOnlyProb',p,'p/D') # (S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15_BDTG1000D06+1.0)/2.0
trm.Branch('WP8CTAllProb',ctp,'ctp/D') #WP8CTAllProb
trm.Branch('CalEnergyRaw',rawe,'rawe/D') #CalEnergyRaw
trm.Branch('CosTHETA',cth,'cth/D') #
trm.Branch('THETA',th,'th/D') # FT1Theta or -Cal1MomZDir
trm.Branch('PHI',phi,'phi/D') # FT1Phi or Cal1MomYDir/Cal1MomXDir
trm.Branch('DIST',dist,'dist/D')
trm.Branch('GRB_TIME',grbt,'grbt/D')
trm.Branch('FLAG',flag,'flag/I') #flag for this GRB, 0: On, 1,2,3,4,...: Off, -1: Other
timeStart = datetime.datetime.now()
#print timeStart
vecTgt = []
vecTgt.append(np.array([cos(radians(decSrc))*cos(radians(raSrc)), cos(radians(decSrc))*sin(radians(raSrc)), sin(radians(decSrc))]))
vecTgt.append(np.array([cos(radians(decSrc-degOffOffset))*cos(radians(raSrc)), cos(radians(decSrc-degOffOffset))*sin(radians(raSrc)), sin(radians(decSrc-degOffOffset))]))
vecTgt.append(np.array([cos(radians(decSrc))*cos(radians(raSrc-degOffOffset/cos(radians(decSrc)))), cos(radians(decSrc))*sin(radians(raSrc-degOffOffset/cos(radians(decSrc)))), sin(radians(decSrc))]))
vecTgt.append(np.array([cos(radians(decSrc+degOffOffset))*cos(radians(raSrc)), cos(radians(decSrc+degOffOffset))*sin(radians(raSrc)), sin(radians(decSrc+degOffOffset))]))
vecTgt.append(np.array([cos(radians(decSrc))*cos(radians(raSrc+degOffOffset/cos(radians(decSrc)))), cos(radians(decSrc))*sin(radians(raSrc+degOffOffset/cos(radians(decSrc)))), sin(radians(decSrc))]))
for iEvent in range(nEvent):
chainData.GetEntry(iEvent)
flag[0] = -1;
e[0] = chainData.EvtJointLogEnergy
rawe[0] = chainData.CalEnergyRaw
c[0] = 0
s[0] = 0
ty[0] = 0
ngrb[0] = float(nameGrb)
evid[0] = chainData.EvtEventId
run[0] = chainData.EvtRun
t[0] = chainData.EvtElapsedTime
grbt[0] = t[0] - trigger_time
lt[0] = chainData.EvtLiveTime
bep[0] = (chainData.WP8CalOnlyBEPCaseE_myBDT+1.0)/2.0
p[0] = (chainData.S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15_BDTG1000D06+1.0)/2.0
ctp[0] = chainData.WP8CTAllProb
binEnergy = max(min(nEnergyBin-1, int((e[0]-vEnergyLow)/vEnergyBinWidth * (int(e[0]<vEnergyLow)*(-2)+1)) ), 0)
if (chainData.TkrNumTracks>0) and (math.log10(max(chainData.CalTrackAngle,1E-4)) <= (0.529795)*(e[0] < 3.000000) + ((1.0)*((0.529795)*(1.0)+(-1.379791)*(pow((e[0]-3.000000)/0.916667,1))+(0.583401)*(pow((e[0]-3.000000)/0.916667,2))+(-0.075555)*(pow((e[0]-3.000000)/0.916667,3))))*(e[0] >= 3.000000 and e[0] <= 5.750000) + (-0.398962)*(e[0] > 5.750000)) and chainData.EvtCalCsIRLn>4 and chainData.WP8CTPSFTail>0.05 and chainData.WP8CTBestEnergyProb>0.1 and chainData.FswGamState == 0: # CalTkr
c[0] = 1
z[0] = chainData.FT1ZenithTheta
az[0] = chainData.FT1EarthAzimuth
ra[0] = chainData.FT1Ra
dec[0] = chainData.FT1Dec
l[0] = chainData.FT1L
b[0] = chainData.FT1B
cth[0] = chainData.Cal1MomZDir
th[0] = chainData.FT1Theta
phi[0] = chainData.FT1Phi
if ( -math.log10(1.0-ctp[0]) >= (0.010000)*(e[0] < 1.250000) + ((e[0] <= 1.750000)*((0.010000)*(1.0)+(0.000000)*(math.pow((e[0]-1.250000)/0.500000,1))+(0.018669)*(math.pow((e[0]-1.250000)/0.500000,2)))+((e[0] > 1.750000)*(e[0] <= 2.250000))*((0.028669)*(1.0)+(0.037338)*(math.pow((e[0]-1.750000)/0.500000,1))+(-0.017111)*(math.pow((e[0]-1.750000)/0.500000,2)))+((e[0] > 2.250000)*(e[0] <= 2.750000))*((0.048897)*(1.0)+(0.003117)*(math.pow((e[0]-2.250000)/0.500000,1))+(0.001967)*(math.pow((e[0]-2.250000)/0.500000,2)))+((e[0] > 2.750000)*(e[0] <= 3.250000))*((0.053980)*(1.0)+(0.007050)*(math.pow((e[0]-2.750000)/0.500000,1))+(-0.003525)*(math.pow((e[0]-2.750000)/0.500000,2)))+((e[0] > 3.250000)*(e[0] <= 3.750000))*((0.057505)*(1.0)+(0.000000)*(math.pow((e[0]-3.250000)/0.500000,1))+(0.121963)*(math.pow((e[0]-3.250000)/0.500000,2)))+((e[0] > 3.750000)*(e[0] <= 4.250000))*((0.179468)*(1.0)+(0.243925)*(math.pow((e[0]-3.750000)/0.500000,1))+(0.493075)*(math.pow((e[0]-3.750000)/0.500000,2)))+((e[0] > 4.250000)*(e[0] <= 4.750000))*((0.916468)*(1.0)+(1.230076)*(math.pow((e[0]-4.250000)/0.500000,1))+(-0.501532)*(math.pow((e[0]-4.250000)/0.500000,2)))+(e[0] > 4.750000)*((1.645012)*(1.0)+(0.227011)*(math.pow((e[0]-4.750000)/0.500000,1))+(0.029483)*(math.pow((e[0]-4.750000)/0.500000,2))))*(e[0] >= 1.250000 and e[0] <= 5.750000) + (2.216967)*(e[0] > 5.750000) ): #P8R1_TRANSIENT_R100
if ( -math.log10(1.0-ctp[0]) >= (0.080914)*(e[0] < 1.250000) + ((e[0] <= 1.750000)*((0.080914)*(1.0)+(0.108897)*(pow((e[0]-1.250000)/0.500000,1))+(0.377870)*(pow((e[0]-1.250000)/0.500000,2)))+((e[0] > 1.750000)*(e[0] <= 2.250000))*((0.567682)*(1.0)+(0.864637)*(pow((e[0]-1.750000)/0.500000,1))+(-0.182318)*(pow((e[0]-1.750000)/0.500000,2)))+((e[0] > 2.250000)*(e[0] <= 2.750000))*((1.250000)*(1.0)+(0.500000)*(pow((e[0]-2.250000)/0.500000,1))+(-0.085000)*(pow((e[0]-2.250000)/0.500000,2)))+((e[0] > 2.750000)*(e[0] <= 3.250000))*((1.665000)*(1.0)+(0.330000)*(pow((e[0]-2.750000)/0.500000,1))+(-0.165000)*(pow((e[0]-2.750000)/0.500000,2)))+((e[0] > 3.250000)*(e[0] <= 3.750000))*((1.830000)*(1.0)+(0.000000)*(pow((e[0]-3.250000)/0.500000,1))+(0.285000)*(pow((e[0]-3.250000)/0.500000,2)))+((e[0] > 3.750000)*(e[0] <= 4.250000))*((2.115000)*(1.0)+(0.570000)*(pow((e[0]-3.750000)/0.500000,1))+(-0.185000)*(pow((e[0]-3.750000)/0.500000,2)))+((e[0] > 4.250000)*(e[0] <= 4.750000))*((2.500000)*(1.0)+(0.200000)*(pow((e[0]-4.250000)/0.500000,1))+(0.100000)*(pow((e[0]-4.250000)/0.500000,2)))+(e[0] > 4.750000)*((2.800000)*(1.0)+(0.400000)*(pow((e[0]-4.750000)/0.500000,1))+(-0.112171)*(pow((e[0]-4.750000)/0.500000,2))))*(e[0] >= 1.250000 and e[0] <= 5.750000) + (3.151318)*(e[0] > 5.750000) ) and ( chainData.WP8CTAllBkProb >= (0.366167)*(e[0] < 1.250000) + ((e[0] <= 1.541667)*((0.366167)*(1.0)+(0.028500)*(pow((e[0]-1.250000)/0.291667,1))+(-0.056500)*(pow((e[0]-1.250000)/0.291667,2))+(0.106667)*(pow((e[0]-1.250000)/0.291667,3)))+((e[0] > 1.541667)*(e[0] <= 1.833333))*((0.444833)*(1.0)+(0.235500)*(pow((e[0]-1.541667)/0.291667,1))+(0.263500)*(pow((e[0]-1.541667)/0.291667,2))+(-0.162667)*(pow((e[0]-1.541667)/0.291667,3)))+((e[0] > 1.833333)*(e[0] <= 2.125000))*((0.781167)*(1.0)+(0.274500)*(pow((e[0]-1.833333)/0.291667,1))+(-0.224500)*(pow((e[0]-1.833333)/0.291667,2))+(0.072667)*(pow((e[0]-1.833333)/0.291667,3)))+(e[0] > 2.125000)*((0.903833)*(1.0)+(0.043500)*(pow((e[0]-2.125000)/0.291667,1))+(-0.006500)*(pow((e[0]-2.125000)/0.291667,2))+(-0.000333)*(pow((e[0]-2.125000)/0.291667,3))))*(e[0] >= 1.250000 and e[0] <= 3.000000) + (0.966833)*(e[0] > 3.000000) ): #P8R1_SOURCE_AllProbFilter&&P8R1_SOURCE_AllBkProbFilter
s[0] = 128#3
aaNumEventClass[0][1] = aaNumEventClass[0][1]+1
else:
s[0] = 4#1
aaNumEventClass[0][0] = aaNumEventClass[0][0]+1
#trm.Fill()
elif chainData.Cal1RawEnergySum>=20000 and chainData.Cal1MomZDir>=0.1 and chainData.Cal1MomZCrossSide840>=0.0 and (chainData.WP8CalOnlyBEPCaseE_myBDT+1.0)/2.0>0.06 and (chainData.TkrNumTracks==0 or (math.log10(max(chainData.CalTrackAngle,1E-4)) > (0.529795)*(e[0] < 3.000000) + ((1.0)*((0.529795)*(1.0)+(-1.379791)*(pow((e[0]-3.000000)/0.916667,1))+(0.583401)*(pow((e[0]-3.000000)/0.916667,2))+(-0.075555)*(pow((e[0]-3.000000)/0.916667,3))))*(e[0] >= 3.000000 and e[0] <= 5.750000) + (-0.398962)*(e[0] > 5.750000))) and chainData.Acd2Cal1VetoSigmaHit>0 and chainData.Cal1TransRms>=10 and chainData.Cal1TransRms<70 and chainData.Cal1MomNumIterations>0 and chainData.FswGamState == 0: # CalOnly
c[0] = 2
z[0] = chainData.FT1CalZenithTheta
az[0] = chainData.FT1CalEarthAzimuth
ra[0] = chainData.FT1CalRa
dec[0] = chainData.FT1CalDec
l[0] = chainData.FT1CalL
b[0] = chainData.FT1CalB
cth[0] = chainData.Cal1MomZDir
th[0] = math.degrees(math.acos(chainData.Cal1MomZDir))
phi[0] = math.degrees(math.atan2(chainData.Cal1MomYDir, chainData.Cal1MomXDir))
if -math.log10(1.0-p[0])>aaValCutBDT[binEnergy+nStartBin][0]: #CalOnly_R100
if -math.log10(1.0-p[0])>aaValCutBDT[binEnergy+nStartBin][1]: #CalOnly_R30
if -math.log10(1.0-p[0])>aaValCutBDT[binEnergy+nStartBin][2]: #CalOnly_R10
s[0]=16384#3
aaNumEventClass[1][2] = aaNumEventClass[1][2]+1
else:
s[0] = 8192#2
aaNumEventClass[1][1] = aaNumEventClass[1][1]+1
else:
s[0] = 4096#1
aaNumEventClass[1][0] = aaNumEventClass[1][0]+1
if(e[0]<4.55 or cth[0]<0.6):
ty[0] = ty[0]+2 #BACK
else:
ty[0] = ty[0]+1 #FRONT
vecEvt = np.array([cos(radians(dec[0]))*cos(radians(ra[0])), cos(radians(dec[0]))*sin(radians(ra[0])), sin(radians(dec[0]))])
aDist = []
distCut = htgPerf.getPSF95_cth(c[0]-1, 0*(s[0]==4 or s[0]==4096)+1*(s[0]==128 or s[0]==8192)+2*(s[0]==16384), e[0], cth[0]) + err_rad
for iRegio in range(1+nOff):
radTheta = acos(np.dot(vecTgt[iRegio], vecEvt))
aDist.append(degrees(radTheta))
if iRegio==0:
dist[0] = aDist[0]
if aDist[iRegio] < distCut:
grZenith[iRegio].SetPoint(grZenith[iRegio].GetN(), t[0]-trigger_time, z[0])
#if s[0]>0:
#print "============================"
if iRegio==0:
flag[0] = 0
if s[0]>0:
print ""
print "== ON photon candidate!!! =="
if c[0] == 1:
if s[0] == 4:
greOn[0][0].SetPoint(greOn[0][0].GetN(), t[0]-trigger_time, pow(10, e[0]-3))
elif s[0] == 128:
greOn[0][1].SetPoint(greOn[0][1].GetN(), t[0]-trigger_time, pow(10, e[0]-3))
elif c[0] == 2:
if s[0] == 4096:
greOn[1][0].SetPoint(greOn[1][0].GetN(), t[0]-trigger_time, pow(10, e[0]-3))
elif s[0] == 8192:
greOn[1][1].SetPoint(greOn[1][1].GetN(), t[0]-trigger_time, pow(10, e[0]-3))
elif s[0] == 16384:
greOn[1][2].SetPoint(greOn[1][2].GetN(), t[0]-trigger_time, pow(10, e[0]-3))
elif iRegio>0:
flag[0] = iRegio
if s[0]>0:
#print ""
#print "== OFF{0} photon candidate! ==".format(iRegio)
if c[0] == 1:
if s[0] == 4:
greOff[0][0][iRegio-1].SetPoint(greOff[0][0][iRegio-1].GetN(), t[0]-trigger_time, pow(10, e[0]-3))
elif s[0] == 128:
greOff[0][1][iRegio-1].SetPoint(greOff[0][1][iRegio-1].GetN(), t[0]-trigger_time, pow(10, e[0]-3))
elif c[0] == 2:
if s[0] == 4096:
greOff[1][0][iRegio-1].SetPoint(greOff[1][0][iRegio-1].GetN(), t[0]-trigger_time, pow(10, e[0]-3))
elif s[0] == 8192:
greOff[1][1][iRegio-1].SetPoint(greOff[1][1][iRegio-1].GetN(), t[0]-trigger_time, pow(10, e[0]-3))
elif s[0] == 16384:
greOff[1][2][iRegio-1].SetPoint(greOff[1][2][iRegio-1].GetN(), t[0]-trigger_time, pow(10, e[0]-3))
if s[0]>0 and iRegio==0:
print "Event No.", iEvent
print "Event category:", cfg.aStrSelect[c[0]-1]
print "Event class:", s[0]
print "Time from the trigger:", t[0]-trigger_time, "s"
print "Anglular distance:", dist[0], "deg"
print "PSF68:", htgPerf.getPSF68_cth(c[0]-1, 0*(s[0]==4 or s[0]==4096)+1*(s[0]==128 or s[0]==8192)+2*(s[0]==16384), e[0], cth[0]), "deg"
print "Energy:", pow(10,e[0]-3), "GeV"
print "Edisp68:", 100*htgPerf.getEdisp68_cth(c[0]-1, 0*(s[0]==4 or s[0]==4096)+1*(s[0]==128 or s[0]==8192)+2*(s[0]==16384), e[0], cth[0]), "%"
print "Cos( inclination angle ):", cth[0]
print "Zenith angle:", z[0], "deg"
print "Run ID:", run[0]
print "Event ID:", evid[0]
trm.Fill()
# trm.Fill()
if iEvent%(nEvent/200)==0:
rate = int((iEvent*100.)/nEvent+0.5)
if rate>0:
nt = (datetime.datetime.now() - timeStart).seconds * (100.-rate)/rate
meter = "\r[{0}{1}] {2} Wait {3} hr {4} min".format("=" * rate, ' ' * (100-rate), aaNumEventClass, int(nt/3600), (int(nt)%3600)/60+1)
else:
meter = "\r[{0}{1}]".format("=" * rate, ' ' * (100-rate))
sys.stdout.write(meter)
sys.stdout.flush()
cEvent = ROOT.TCanvas("cEvent", "GRB {0} gamma-like events within {1} deg".format(nameGrb, distCut))
cEvent.cd()
mgr.Draw("AP")
mgr.GetXaxis().SetTitle("Time [s]")
mgr.GetYaxis().SetTitle("Energy [GeV]")
leg = ROOT.TLegend(0.67, 0.5, 0.88, 0.88)
for pD in range(len(aaStrSelect)):
for qD in range(len(aaStrSelect[pD])):
leg.AddEntry(greOn[pD][qD], greOn[pD][qD].GetTitle(), "p")
for rr in range(nOff):
leg.AddEntry(greOff[1][0][rr], "OFF{0}".format(rr+1), "p")
leg.Draw("same")
cZenith = ROOT.TCanvas("cZenith", "Zenith angle of ON/OFF events")
cZenith.cd()
mgrZenith.Draw("AP")
mgrZenith.GetXaxis().SetTitle("Time [s]")
mgrZenith.GetYaxis().SetTitle("Zenith angle [deg]")
legZenith = ROOT.TLegend(0.67, 0.5, 0.88, 0.88)
for rz in range(nOff+1):
legZenith.AddEntry(grZenith[rz], grZenith[rz].GetTitle(), "p")
legZenith.Draw("same")
print ""
fileOut.cd()
trm.Write()
cEvent.Write()
cZenith.Write()
print "Finished!"
|
normal
|
{
"blob_id": "66cdeaa106a8f22dbfd64c12c4cb04fdb9f5b453",
"index": 5160,
"step-1": "#!/usr/bin/env python\n\nimport sys\nimport ROOT\nfrom ROOT import TTree\nfrom ROOT import TChain\nimport numpy as np\nimport yaml\nimport xml.etree.ElementTree as ET\nimport datetime\n#sys.path.append(\"/disk/gamma/cta/store/takhsm/FermiMVA/AllSky\")\n#sys.path.append(\"/home/takhsm/FermiMVA/python\")\nROOT.gROOT.SetBatch()\nfrom array import array\nimport math\nfrom math import cos, sin, tan, acos, asin, atan, radians, degrees\nfrom pColor import *\n\nROOT.gStyle.SetPadGridX(True)\nROOT.gStyle.SetPadGridY(True)\nROOT.gStyle.SetPadTickX(True)\nROOT.gStyle.SetPadTickY(True)\n\n#from pCutBDT import cutBDT\nfrom pAnalysisConfig import *\n\n# ----- Event class setup -----\npar = sys.argv\ncfg = ClassConfig('Both', [10, 3, 1], 1)\naCutEGB = cfg.aCutEGB\naaStrSelect = cfg.aaStrSelect\nnStartBin = cfg.nStartBin\n\nnameFileRoc = \"/nfs/farm/g/glast/u/mtakahas/v20r09p09_G1haB1/S18/S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15/S16V200909_020RAWE20ZCS000wwoTRKwoMCZDIR00woRW_15_S11D_catTwoZDIR050Log_roc.root\" #par[2]\nnameVarBDT = \"S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15_BDTG1000D06\"\nnameVarBDT = par[1]\nnameFileSuffix = par[1]\ncutMVA = CutBDT(nameFileRoc, aCutEGB)\naaValCutBDT = cutMVA.aaValCutBDT[0:]\nprint aaValCutBDT\nnEnergyBin = cutMVA.aaValCutBDT[0]['numBin'] - nStartBin\nvEnergyBinWidth = cutMVA.aaValCutBDT[0]['widthBin']\nvEnergyLow = cutMVA.aaValCutBDT[0]['edgeLow'] + nStartBin*vEnergyBinWidth\nvEnergyUp = vEnergyLow + nEnergyBin*vEnergyBinWidth\naaNumEventClass=[]\n#aColor = []\nfor hS in range(len(aaStrSelect)):\n aaNumEventClass.append([])\n for iS in range(len(aaStrSelect[hS])):\n aaNumEventClass[hS].append(0)\n\n#IRF\nlistPathFilePerf = [['/nfs/farm/g/glast/u/mtakahas/v20r09p09_G1haB1/S16/S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15/S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15_P8R2_TRANSIENT100_P8R2_TRANSIENT100_perf.root', '/nfs/farm/g/glast/u/mtakahas/v20r09p09_G1haB1/S16/S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15/S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15_P8R2_SOURCE_P8R2_SOURCE_perf.root'], \n ['/nfs/farm/g/glast/u/mtakahas/v20r09p09_G1haB1/S16/S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15/S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15_CalOnly_R100_perf.root', '/nfs/farm/g/glast/u/mtakahas/v20r09p09_G1haB1/S16/S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15/S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15_CalOnly_R30_perf.root', '/nfs/farm/g/glast/u/mtakahas/v20r09p09_G1haB1/S16/S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15/S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15_CalOnly_R10_perf.root']]\nhtgPerf = CutPerformanceHtg(listPathFilePerf)\n\n# Data\npathList = \"/nfs/farm/g/glast/u/mtakahas/data/catalogue/PublicTableGRBs.xml\"\nfileList = ET.parse(pathList)\nrtXml = fileList.getroot()\n\n# OFF regions\nnOff = 4;\ndegOffOffset = 14.0;\n\nprint \"====================\"\n# Making all sky map\nlistFileIn = par[2:]\nprint listFileIn\n\naliasSelections = yaml.load(open('/afs/slac.stanford.edu/u/gl/mtakahas/eventSelect/config/pass8_event_selections.yaml','r'))\nfor nameFileIn in listFileIn:\n print \"\"\n print \"========================================================================\"\n fileIn = ROOT.TFile(nameFileIn, \"READ\")\n print fileIn.GetName()\n print \"========================================================================\"\n chainData = fileIn.Get(\"MeritTuple\")\n nameFileFriend = nameFileIn.replace(\".root\", \"_\" + nameVarBDT + \".root\")\n chainData.AddFriend(\"friendTemp=MeritTuple\", nameFileFriend)\n for k,v in aliasSelections.iteritems(): \n chainData.SetAlias(k,v)\n\n nameFileOut = nameFileIn[:-5] + \"_PHOTON_\" + nameVarBDT + nameFileSuffix + \".root\"\n fileOut = ROOT.TFile(nameFileOut, 'UPDATE')\n\n #------ Source data -----\n indexGrbName = nameFileIn.rindex('GRB') + 3\n indexGrbNameEnd = indexGrbName + 9\n nameGrb = nameFileIn[indexGrbName:indexGrbNameEnd]\n for grb in rtXml: #for iGrb in range(trList.GetEntries())\n if grb.findtext(\"./GRBNAME\")==nameGrb: \n trigger_time = float(grb.findtext(\"./MET\"))\n if grb.findtext(\"./ERROR\") == \"--\" or grb.findtext(\"./ERROR\") == \"\":\n if grb.findtext(\"./LATERROR\") == \"--\" or grb.findtext(\"./LATERROR\") == \"\":\n err_rad = 0.\n else:\n err_rad = float(grb.findtext(\"./LATERROR\"))\n else:\n if float(grb.findtext(\"./ERROR\")) > float(grb.findtext(\"./LATERROR\")):\n err_rad = float(grb.findtext(\"./LATERROR\")) \n raSrc = float(grb.findtext(\"./LATRA\"))\n decSrc = float(grb.findtext(\"./LATDEC\"))\n else:\n err_rad = float(grb.findtext(\"./ERROR\")) \n raSrc = float(grb.findtext(\"./RA\"))\n decSrc = float(grb.findtext(\"./DEC\"))\n print \"\"\n print \"===============\"\n print \"GRB\", nameGrb\n print \"===============\"\n print \"(\", raSrc, \",\", decSrc, \"), Error radius:\", err_rad, \"Trigger MET:\", trigger_time \n nEvent = chainData.GetEntries()\n print \"Total number of events:\", nEvent\n #distCut = min(7.0, 5.0+err_rad)\n\n # Plot\n #mgr = ROOT.TMultiGraph(\"mgr\", \"Gamma-like events within {0} deg\".format(distCut))\n mgr = ROOT.TMultiGraph(\"mgr\", \"Gamma-like events around the GRB\")\n greOn = []\n greOff=[]\n for pC in range(len(aaStrSelect)):\n greOn.append([])\n greOff.append([])\n for qC in range(len(aaStrSelect[pC])):\n greOn[-1].append(ROOT.TGraphErrors())\n greOn[-1][-1].SetName(\"greOn_{0}_{1}\".format(pC, qC))\n greOn[-1][-1].SetTitle(\"{0} ON\".format(aaStrSelect[pC][qC]))\n greOn[-1][-1].SetMarkerStyle(20)\n if pC==0:\n greOn[-1][-1].SetMarkerColor(13-12*qC)\n elif pC==1:\n greOn[-1][-1].SetMarkerColor(kRed+3*(qC-2))\n greOn[-1][-1].SetMarkerStyle(20)\n mgr.Add(greOn[-1][-1])\n greOff[-1].append([])\n for hRegio in range(nOff):\n greOff[-1][-1].append(ROOT.TGraphErrors())\n greOff[-1][-1][-1].SetName(\"greOff_{0}_{1}_{2}\".format(pC, qC, hRegio+1))\n greOff[-1][-1][-1].SetTitle(\"{0} Off{1} events\".format(aaStrSelect[pC][qC], hRegio+1))\n if pC==0:\n greOff[-1][-1][-1].SetMarkerColor(13-12*qC)\n elif pC==1:\n greOff[-1][-1][-1].SetMarkerColor(kRed+3*(qC-2))\n greOff[-1][-1][-1].SetMarkerStyle(25+hRegio)\n mgr.Add(greOff[-1][-1][-1])\n mgrZenith = ROOT.TMultiGraph(\"mgrZenith\", \"Zenith angle within ON/OFF regions\")\n grZenith = []\n for gRegio in range(nOff+1):\n grZenith.append(ROOT.TGraph())\n grZenith[-1].SetName(\"grZenith{0}\".format(gRegio))\n if gRegio==0:\n grZenith[0].SetTitle(\"ON\")\n else:\n grZenith[gRegio].SetTitle(\"OFF{0}\".format(gRegio))\n grZenith[gRegio].SetMarkerStyle(7)\n grZenith[gRegio].SetMarkerColor(akColor(gRegio))\n mgrZenith.Add(grZenith[-1])\n\n #------ TTree setting -----\n trm = []\n c = np.zeros(1, dtype=np.int32)\n s = np.zeros(1, dtype=np.int32)\n ty = np.zeros(1, dtype=np.int32)\n ngrb = np.zeros(1, dtype=int)\n evid = np.zeros(1, dtype=int)\n run = np.zeros(1, dtype=int)\n e = np.zeros(1, dtype=float)\n t = np.zeros(1, dtype=float)\n lt = np.zeros(1, dtype=float)\n ra = np.zeros(1, dtype=float)\n dec = np.zeros(1, dtype=float)\n l = np.zeros(1, dtype=float)\n b = np.zeros(1, dtype=float)\n z = np.zeros(1, dtype=float)\n az = np.zeros(1, dtype=float)\n bep = np.zeros(1, dtype=float)\n p = np.zeros(1, dtype=float)\n ctp = np.zeros(1, dtype=float)\n rawe = np.zeros(1, dtype=float)\n cth = np.zeros(1, dtype=float)\n th = np.zeros(1, dtype=float)\n phi = np.zeros(1, dtype=float)\n dist = np.zeros(1, dtype=float)\n grbt = np.zeros(1, dtype=float)\n flag = np.zeros(1, dtype=int)\n\n# for iRegio in range(1+nOff):\n #if iRegio==0:\n #trm.append(ROOT.TTree(\"trGammas\", \"Gamma-like events\"))\n trm = ROOT.TTree(\"trGammas\", \"Gamma-like events\")\n #else:\n # trm.append(ROOT.TTree(\"trGammasOFF{0}\".format(iRegio), \"Gamma-like events in the OFF region {0}\".format(iRegio)))\n trm.Branch('Category',c,'c/I') # 1:CalTkr or 2:CalOnly\n trm.Branch('EVENT_CLASS',s,'s/I') # 4: TRANSIENT100, 128: SOURCE, 4096: CalOnly_10xEGB, 8192: CalOnly_3xEGB, 16384: CalOnly_1xEGB\n trm.Branch('EVENT_TYPE',ty,'ty/I') # 1: FRONT, 2: BACK, 4, PSF0, ... , 32: PSF3, 64: EDISP0, ... , 512: EDISP3\n trm.Branch('GRB_NAME',ngrb,'ngrb/I') #EvtEventId\n trm.Branch('EVENT_ID',evid,'evid/I') #EvtEventId\n trm.Branch('RUN_ID',run,'run/I') #EvtRun\n trm.Branch('ENERGY',e,'e/D') #FT1Energy\n trm.Branch('TIME',t,'t/D') #EvtElapsedTime\n trm.Branch('LIVETIME',lt,'lt/D') #EvtLiveTime\n trm.Branch('RA',ra,'ra/D') #FT1Ra\n trm.Branch('DEC',dec,'dec/D') #FT1Dec\n trm.Branch('L',l,'l/D') #FT1L\n trm.Branch('B',b,'b/D') #FT1B\n trm.Branch('ZENITH_ANGLE',z,'z/D') #FT1ZenithTheta\n trm.Branch('EARTH_AZIMUTH_ANGLE',az,'az/D') #FT1EarthAzimuth\n trm.Branch('WP8CTCalOnlyBestEnergyProb',bep,'bep/D') # (WP8CalOnlyBEPCaseE_myBDT+1.0)/2.0\n trm.Branch('WP8CTCalOnlyProb',p,'p/D') # (S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15_BDTG1000D06+1.0)/2.0\n trm.Branch('WP8CTAllProb',ctp,'ctp/D') #WP8CTAllProb\n trm.Branch('CalEnergyRaw',rawe,'rawe/D') #CalEnergyRaw\n trm.Branch('CosTHETA',cth,'cth/D') #\n trm.Branch('THETA',th,'th/D') # FT1Theta or -Cal1MomZDir\n trm.Branch('PHI',phi,'phi/D') # FT1Phi or Cal1MomYDir/Cal1MomXDir\n trm.Branch('DIST',dist,'dist/D')\n trm.Branch('GRB_TIME',grbt,'grbt/D') \n trm.Branch('FLAG',flag,'flag/I') #flag for this GRB, 0: On, 1,2,3,4,...: Off, -1: Other\n\n timeStart = datetime.datetime.now()\n #print timeStart\n\n vecTgt = []\n vecTgt.append(np.array([cos(radians(decSrc))*cos(radians(raSrc)), cos(radians(decSrc))*sin(radians(raSrc)), sin(radians(decSrc))]))\n vecTgt.append(np.array([cos(radians(decSrc-degOffOffset))*cos(radians(raSrc)), cos(radians(decSrc-degOffOffset))*sin(radians(raSrc)), sin(radians(decSrc-degOffOffset))]))\n vecTgt.append(np.array([cos(radians(decSrc))*cos(radians(raSrc-degOffOffset/cos(radians(decSrc)))), cos(radians(decSrc))*sin(radians(raSrc-degOffOffset/cos(radians(decSrc)))), sin(radians(decSrc))]))\n vecTgt.append(np.array([cos(radians(decSrc+degOffOffset))*cos(radians(raSrc)), cos(radians(decSrc+degOffOffset))*sin(radians(raSrc)), sin(radians(decSrc+degOffOffset))]))\n vecTgt.append(np.array([cos(radians(decSrc))*cos(radians(raSrc+degOffOffset/cos(radians(decSrc)))), cos(radians(decSrc))*sin(radians(raSrc+degOffOffset/cos(radians(decSrc)))), sin(radians(decSrc))]))\n\n for iEvent in range(nEvent):\n chainData.GetEntry(iEvent)\n flag[0] = -1;\n e[0] = chainData.EvtJointLogEnergy\n rawe[0] = chainData.CalEnergyRaw\n c[0] = 0\n s[0] = 0\n ty[0] = 0\n ngrb[0] = float(nameGrb)\n evid[0] = chainData.EvtEventId\n run[0] = chainData.EvtRun\n t[0] = chainData.EvtElapsedTime\n grbt[0] = t[0] - trigger_time\n lt[0] = chainData.EvtLiveTime\n bep[0] = (chainData.WP8CalOnlyBEPCaseE_myBDT+1.0)/2.0\n p[0] = (chainData.S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15_BDTG1000D06+1.0)/2.0\n ctp[0] = chainData.WP8CTAllProb\n binEnergy = max(min(nEnergyBin-1, int((e[0]-vEnergyLow)/vEnergyBinWidth * (int(e[0]<vEnergyLow)*(-2)+1)) ), 0)\n if (chainData.TkrNumTracks>0) and (math.log10(max(chainData.CalTrackAngle,1E-4)) <= (0.529795)*(e[0] < 3.000000) + ((1.0)*((0.529795)*(1.0)+(-1.379791)*(pow((e[0]-3.000000)/0.916667,1))+(0.583401)*(pow((e[0]-3.000000)/0.916667,2))+(-0.075555)*(pow((e[0]-3.000000)/0.916667,3))))*(e[0] >= 3.000000 and e[0] <= 5.750000) + (-0.398962)*(e[0] > 5.750000)) and chainData.EvtCalCsIRLn>4 and chainData.WP8CTPSFTail>0.05 and chainData.WP8CTBestEnergyProb>0.1 and chainData.FswGamState == 0: # CalTkr\n c[0] = 1\n z[0] = chainData.FT1ZenithTheta\n az[0] = chainData.FT1EarthAzimuth\n ra[0] = chainData.FT1Ra\n dec[0] = chainData.FT1Dec\n l[0] = chainData.FT1L\n b[0] = chainData.FT1B\n cth[0] = chainData.Cal1MomZDir\n th[0] = chainData.FT1Theta\n phi[0] = chainData.FT1Phi\n if ( -math.log10(1.0-ctp[0]) >= (0.010000)*(e[0] < 1.250000) + ((e[0] <= 1.750000)*((0.010000)*(1.0)+(0.000000)*(math.pow((e[0]-1.250000)/0.500000,1))+(0.018669)*(math.pow((e[0]-1.250000)/0.500000,2)))+((e[0] > 1.750000)*(e[0] <= 2.250000))*((0.028669)*(1.0)+(0.037338)*(math.pow((e[0]-1.750000)/0.500000,1))+(-0.017111)*(math.pow((e[0]-1.750000)/0.500000,2)))+((e[0] > 2.250000)*(e[0] <= 2.750000))*((0.048897)*(1.0)+(0.003117)*(math.pow((e[0]-2.250000)/0.500000,1))+(0.001967)*(math.pow((e[0]-2.250000)/0.500000,2)))+((e[0] > 2.750000)*(e[0] <= 3.250000))*((0.053980)*(1.0)+(0.007050)*(math.pow((e[0]-2.750000)/0.500000,1))+(-0.003525)*(math.pow((e[0]-2.750000)/0.500000,2)))+((e[0] > 3.250000)*(e[0] <= 3.750000))*((0.057505)*(1.0)+(0.000000)*(math.pow((e[0]-3.250000)/0.500000,1))+(0.121963)*(math.pow((e[0]-3.250000)/0.500000,2)))+((e[0] > 3.750000)*(e[0] <= 4.250000))*((0.179468)*(1.0)+(0.243925)*(math.pow((e[0]-3.750000)/0.500000,1))+(0.493075)*(math.pow((e[0]-3.750000)/0.500000,2)))+((e[0] > 4.250000)*(e[0] <= 4.750000))*((0.916468)*(1.0)+(1.230076)*(math.pow((e[0]-4.250000)/0.500000,1))+(-0.501532)*(math.pow((e[0]-4.250000)/0.500000,2)))+(e[0] > 4.750000)*((1.645012)*(1.0)+(0.227011)*(math.pow((e[0]-4.750000)/0.500000,1))+(0.029483)*(math.pow((e[0]-4.750000)/0.500000,2))))*(e[0] >= 1.250000 and e[0] <= 5.750000) + (2.216967)*(e[0] > 5.750000) ): #P8R1_TRANSIENT_R100\n if ( -math.log10(1.0-ctp[0]) >= (0.080914)*(e[0] < 1.250000) + ((e[0] <= 1.750000)*((0.080914)*(1.0)+(0.108897)*(pow((e[0]-1.250000)/0.500000,1))+(0.377870)*(pow((e[0]-1.250000)/0.500000,2)))+((e[0] > 1.750000)*(e[0] <= 2.250000))*((0.567682)*(1.0)+(0.864637)*(pow((e[0]-1.750000)/0.500000,1))+(-0.182318)*(pow((e[0]-1.750000)/0.500000,2)))+((e[0] > 2.250000)*(e[0] <= 2.750000))*((1.250000)*(1.0)+(0.500000)*(pow((e[0]-2.250000)/0.500000,1))+(-0.085000)*(pow((e[0]-2.250000)/0.500000,2)))+((e[0] > 2.750000)*(e[0] <= 3.250000))*((1.665000)*(1.0)+(0.330000)*(pow((e[0]-2.750000)/0.500000,1))+(-0.165000)*(pow((e[0]-2.750000)/0.500000,2)))+((e[0] > 3.250000)*(e[0] <= 3.750000))*((1.830000)*(1.0)+(0.000000)*(pow((e[0]-3.250000)/0.500000,1))+(0.285000)*(pow((e[0]-3.250000)/0.500000,2)))+((e[0] > 3.750000)*(e[0] <= 4.250000))*((2.115000)*(1.0)+(0.570000)*(pow((e[0]-3.750000)/0.500000,1))+(-0.185000)*(pow((e[0]-3.750000)/0.500000,2)))+((e[0] > 4.250000)*(e[0] <= 4.750000))*((2.500000)*(1.0)+(0.200000)*(pow((e[0]-4.250000)/0.500000,1))+(0.100000)*(pow((e[0]-4.250000)/0.500000,2)))+(e[0] > 4.750000)*((2.800000)*(1.0)+(0.400000)*(pow((e[0]-4.750000)/0.500000,1))+(-0.112171)*(pow((e[0]-4.750000)/0.500000,2))))*(e[0] >= 1.250000 and e[0] <= 5.750000) + (3.151318)*(e[0] > 5.750000) ) and ( chainData.WP8CTAllBkProb >= (0.366167)*(e[0] < 1.250000) + ((e[0] <= 1.541667)*((0.366167)*(1.0)+(0.028500)*(pow((e[0]-1.250000)/0.291667,1))+(-0.056500)*(pow((e[0]-1.250000)/0.291667,2))+(0.106667)*(pow((e[0]-1.250000)/0.291667,3)))+((e[0] > 1.541667)*(e[0] <= 1.833333))*((0.444833)*(1.0)+(0.235500)*(pow((e[0]-1.541667)/0.291667,1))+(0.263500)*(pow((e[0]-1.541667)/0.291667,2))+(-0.162667)*(pow((e[0]-1.541667)/0.291667,3)))+((e[0] > 1.833333)*(e[0] <= 2.125000))*((0.781167)*(1.0)+(0.274500)*(pow((e[0]-1.833333)/0.291667,1))+(-0.224500)*(pow((e[0]-1.833333)/0.291667,2))+(0.072667)*(pow((e[0]-1.833333)/0.291667,3)))+(e[0] > 2.125000)*((0.903833)*(1.0)+(0.043500)*(pow((e[0]-2.125000)/0.291667,1))+(-0.006500)*(pow((e[0]-2.125000)/0.291667,2))+(-0.000333)*(pow((e[0]-2.125000)/0.291667,3))))*(e[0] >= 1.250000 and e[0] <= 3.000000) + (0.966833)*(e[0] > 3.000000) ): #P8R1_SOURCE_AllProbFilter&&P8R1_SOURCE_AllBkProbFilter\n s[0] = 128#3\n aaNumEventClass[0][1] = aaNumEventClass[0][1]+1\n else:\n s[0] = 4#1\n aaNumEventClass[0][0] = aaNumEventClass[0][0]+1\n #trm.Fill()\n elif chainData.Cal1RawEnergySum>=20000 and chainData.Cal1MomZDir>=0.1 and chainData.Cal1MomZCrossSide840>=0.0 and (chainData.WP8CalOnlyBEPCaseE_myBDT+1.0)/2.0>0.06 and (chainData.TkrNumTracks==0 or (math.log10(max(chainData.CalTrackAngle,1E-4)) > (0.529795)*(e[0] < 3.000000) + ((1.0)*((0.529795)*(1.0)+(-1.379791)*(pow((e[0]-3.000000)/0.916667,1))+(0.583401)*(pow((e[0]-3.000000)/0.916667,2))+(-0.075555)*(pow((e[0]-3.000000)/0.916667,3))))*(e[0] >= 3.000000 and e[0] <= 5.750000) + (-0.398962)*(e[0] > 5.750000))) and chainData.Acd2Cal1VetoSigmaHit>0 and chainData.Cal1TransRms>=10 and chainData.Cal1TransRms<70 and chainData.Cal1MomNumIterations>0 and chainData.FswGamState == 0: # CalOnly\n c[0] = 2\n z[0] = chainData.FT1CalZenithTheta\n az[0] = chainData.FT1CalEarthAzimuth\n ra[0] = chainData.FT1CalRa\n dec[0] = chainData.FT1CalDec\n l[0] = chainData.FT1CalL\n b[0] = chainData.FT1CalB\n cth[0] = chainData.Cal1MomZDir\n th[0] = math.degrees(math.acos(chainData.Cal1MomZDir))\n phi[0] = math.degrees(math.atan2(chainData.Cal1MomYDir, chainData.Cal1MomXDir))\n if -math.log10(1.0-p[0])>aaValCutBDT[binEnergy+nStartBin][0]: #CalOnly_R100\n if -math.log10(1.0-p[0])>aaValCutBDT[binEnergy+nStartBin][1]: #CalOnly_R30\n if -math.log10(1.0-p[0])>aaValCutBDT[binEnergy+nStartBin][2]: #CalOnly_R10\n s[0]=16384#3\n aaNumEventClass[1][2] = aaNumEventClass[1][2]+1\n else:\n s[0] = 8192#2\n aaNumEventClass[1][1] = aaNumEventClass[1][1]+1\n else:\n s[0] = 4096#1\n aaNumEventClass[1][0] = aaNumEventClass[1][0]+1\n if(e[0]<4.55 or cth[0]<0.6):\n ty[0] = ty[0]+2 #BACK\n else:\n ty[0] = ty[0]+1 #FRONT\n\n vecEvt = np.array([cos(radians(dec[0]))*cos(radians(ra[0])), cos(radians(dec[0]))*sin(radians(ra[0])), sin(radians(dec[0]))])\n aDist = []\n distCut = htgPerf.getPSF95_cth(c[0]-1, 0*(s[0]==4 or s[0]==4096)+1*(s[0]==128 or s[0]==8192)+2*(s[0]==16384), e[0], cth[0]) + err_rad\n for iRegio in range(1+nOff):\n radTheta = acos(np.dot(vecTgt[iRegio], vecEvt))\n aDist.append(degrees(radTheta))\n if iRegio==0:\n dist[0] = aDist[0]\n if aDist[iRegio] < distCut:\n grZenith[iRegio].SetPoint(grZenith[iRegio].GetN(), t[0]-trigger_time, z[0])\n #if s[0]>0:\n #print \"============================\"\n if iRegio==0:\n flag[0] = 0\n if s[0]>0:\n print \"\"\n print \"== ON photon candidate!!! ==\"\n if c[0] == 1:\n if s[0] == 4:\n greOn[0][0].SetPoint(greOn[0][0].GetN(), t[0]-trigger_time, pow(10, e[0]-3))\n elif s[0] == 128:\n greOn[0][1].SetPoint(greOn[0][1].GetN(), t[0]-trigger_time, pow(10, e[0]-3))\n elif c[0] == 2:\n if s[0] == 4096:\n greOn[1][0].SetPoint(greOn[1][0].GetN(), t[0]-trigger_time, pow(10, e[0]-3))\n elif s[0] == 8192:\n greOn[1][1].SetPoint(greOn[1][1].GetN(), t[0]-trigger_time, pow(10, e[0]-3))\n elif s[0] == 16384:\n greOn[1][2].SetPoint(greOn[1][2].GetN(), t[0]-trigger_time, pow(10, e[0]-3))\n elif iRegio>0:\n flag[0] = iRegio\n if s[0]>0:\n #print \"\"\n #print \"== OFF{0} photon candidate! ==\".format(iRegio)\n if c[0] == 1:\n if s[0] == 4:\n greOff[0][0][iRegio-1].SetPoint(greOff[0][0][iRegio-1].GetN(), t[0]-trigger_time, pow(10, e[0]-3))\n elif s[0] == 128:\n greOff[0][1][iRegio-1].SetPoint(greOff[0][1][iRegio-1].GetN(), t[0]-trigger_time, pow(10, e[0]-3))\n elif c[0] == 2:\n if s[0] == 4096:\n greOff[1][0][iRegio-1].SetPoint(greOff[1][0][iRegio-1].GetN(), t[0]-trigger_time, pow(10, e[0]-3))\n elif s[0] == 8192:\n greOff[1][1][iRegio-1].SetPoint(greOff[1][1][iRegio-1].GetN(), t[0]-trigger_time, pow(10, e[0]-3))\n elif s[0] == 16384:\n greOff[1][2][iRegio-1].SetPoint(greOff[1][2][iRegio-1].GetN(), t[0]-trigger_time, pow(10, e[0]-3))\n if s[0]>0 and iRegio==0:\n print \"Event No.\", iEvent\n print \"Event category:\", cfg.aStrSelect[c[0]-1]\n print \"Event class:\", s[0]\n print \"Time from the trigger:\", t[0]-trigger_time, \"s\"\n print \"Anglular distance:\", dist[0], \"deg\"\n print \"PSF68:\", htgPerf.getPSF68_cth(c[0]-1, 0*(s[0]==4 or s[0]==4096)+1*(s[0]==128 or s[0]==8192)+2*(s[0]==16384), e[0], cth[0]), \"deg\"\n print \"Energy:\", pow(10,e[0]-3), \"GeV\"\n print \"Edisp68:\", 100*htgPerf.getEdisp68_cth(c[0]-1, 0*(s[0]==4 or s[0]==4096)+1*(s[0]==128 or s[0]==8192)+2*(s[0]==16384), e[0], cth[0]), \"%\"\n print \"Cos( inclination angle ):\", cth[0]\n print \"Zenith angle:\", z[0], \"deg\"\n print \"Run ID:\", run[0]\n print \"Event ID:\", evid[0]\n trm.Fill()\n# trm.Fill()\n if iEvent%(nEvent/200)==0:\n rate = int((iEvent*100.)/nEvent+0.5)\n if rate>0:\n nt = (datetime.datetime.now() - timeStart).seconds * (100.-rate)/rate\n meter = \"\\r[{0}{1}] {2} Wait {3} hr {4} min\".format(\"=\" * rate, ' ' * (100-rate), aaNumEventClass, int(nt/3600), (int(nt)%3600)/60+1)\n else:\n meter = \"\\r[{0}{1}]\".format(\"=\" * rate, ' ' * (100-rate))\n sys.stdout.write(meter)\n sys.stdout.flush()\n cEvent = ROOT.TCanvas(\"cEvent\", \"GRB {0} gamma-like events within {1} deg\".format(nameGrb, distCut))\n cEvent.cd()\n mgr.Draw(\"AP\")\n mgr.GetXaxis().SetTitle(\"Time [s]\")\n mgr.GetYaxis().SetTitle(\"Energy [GeV]\")\n leg = ROOT.TLegend(0.67, 0.5, 0.88, 0.88)\n for pD in range(len(aaStrSelect)):\n for qD in range(len(aaStrSelect[pD])):\n leg.AddEntry(greOn[pD][qD], greOn[pD][qD].GetTitle(), \"p\")\n for rr in range(nOff):\n leg.AddEntry(greOff[1][0][rr], \"OFF{0}\".format(rr+1), \"p\")\n leg.Draw(\"same\")\n cZenith = ROOT.TCanvas(\"cZenith\", \"Zenith angle of ON/OFF events\")\n cZenith.cd()\n mgrZenith.Draw(\"AP\")\n mgrZenith.GetXaxis().SetTitle(\"Time [s]\")\n mgrZenith.GetYaxis().SetTitle(\"Zenith angle [deg]\")\n legZenith = ROOT.TLegend(0.67, 0.5, 0.88, 0.88)\n for rz in range(nOff+1):\n legZenith.AddEntry(grZenith[rz], grZenith[rz].GetTitle(), \"p\")\n legZenith.Draw(\"same\")\n print \"\"\n fileOut.cd()\n trm.Write()\n cEvent.Write()\n cZenith.Write()\n print \"Finished!\"\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import collections
def solution(genres, plays):
answer = []
cache = collections.defaultdict(list) # 장르 : [고유번호, 재생횟수]
genre_order = collections.defaultdict(int) # 장르 : 전체재생횟수
order = collections.defaultdict() # 전체재생횟수 : 장르
# 첫번째 딕셔너리와 두번째 딕셔너리 생성
for i in range(len(genres)):
cache[genres[i]].append([i, plays[i]])
genre_order[genres[i]] += plays[i]
# 두번째 딕셔너리를 기반으로 세번째 딕셔너리 생성 -> key 와 value 를 뒤바꾼 구조임.
for k in genre_order:
order[genre_order[k]] = k
key_list = sorted(order) # Key 가 int 가 됐으므로 key 기반 정렬이 가능해짐.
# 첫번째 딕셔너리안에 value 에서 재생횟수를 기반으로 정렬, 같다면 고유번호 기반 정렬
for g in cache:
cache[g].sort(key=lambda x: (x[1]), reverse=True)
# 정렬된 장르 순서를 담고 있는 key_list 에서 하나씩 장르를 꺼내서 order 에 키값으로 주어, 고유번호 부분을 꺼내서 answer 에 추가. (이미 재생횟수 기반으로 정렬한 거라서 바로 꺼낼 수 있음)
while key_list:
od = order[key_list.pop()]
if len(cache[od]) == 1:
answer.append(cache[od][0][0])
else:
answer.append(cache[od][0][0])
answer.append(cache[od][1][0])
return answer
print(solution(["A", "A", "B", "A", "B", "B", "A", "A", "A", "A"],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]))
'''
[베스트 앨범] hash
문제설명:
노래는 인덱스(고유번호)로 구분
각 노래들의 재생 횟수를 더해서 가장 횟수가 높은 장르의 곡들을 먼저 수록
해당 장르 내에서는 재생 횟수가 높은 노래를 먼저 수록
한 장르에서 최대 2곡까지 수록할 수 있고 1곡밖에 없다면 1곡만 수록
만약 재생횟수가 같은 노래가 있다면 고유번호가 낮은 것을 우선 수록
풀이과정:
해쉬를 어디에 적용해야 할지 고민을 많이 한 문제
결국 고유번호 배열, 장르 배열, 재생횟수 배열 이렇게 3가지 배열이 있는 것이고
해쉬도 3개가 만들어져야함.
그리고 최고 많이 재생된 장르, 최고 많이 재생된 노래 들이라서 정렬을 선택했는데
노래의 재생횟수는 중첩이 가능해서 같은 경우 고유번호가 낮아야한다.
이를 위해 고유번호와 재생횟수를 함께 배열화 해서 하나의 딕셔너리에 장르별로(키) 묶어두고
람다로 정렬 진행, 다만 리벌스로 정렬해야 같은 경우 고유번호가 낮은게 앞으로 감.
또, 최대 많이 재생된 장르도 해쉬로 만들어두면 쉽게 될 것 같지만
해쉬 구조 자체를 정렬해야해서 힘듬.
그래서 해쉬가 만들어진 후에는 역으로 int: string 형태로 또 하나의 해쉬를 만들어서 해결했음.
'''
|
normal
|
{
"blob_id": "d56c80b4822b1bd0f2d4d816ed29a4da9d19a625",
"index": 3040,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef solution(genres, plays):\n answer = []\n cache = collections.defaultdict(list)\n genre_order = collections.defaultdict(int)\n order = collections.defaultdict()\n for i in range(len(genres)):\n cache[genres[i]].append([i, plays[i]])\n genre_order[genres[i]] += plays[i]\n for k in genre_order:\n order[genre_order[k]] = k\n key_list = sorted(order)\n for g in cache:\n cache[g].sort(key=lambda x: x[1], reverse=True)\n while key_list:\n od = order[key_list.pop()]\n if len(cache[od]) == 1:\n answer.append(cache[od][0][0])\n else:\n answer.append(cache[od][0][0])\n answer.append(cache[od][1][0])\n return answer\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef solution(genres, plays):\n answer = []\n cache = collections.defaultdict(list)\n genre_order = collections.defaultdict(int)\n order = collections.defaultdict()\n for i in range(len(genres)):\n cache[genres[i]].append([i, plays[i]])\n genre_order[genres[i]] += plays[i]\n for k in genre_order:\n order[genre_order[k]] = k\n key_list = sorted(order)\n for g in cache:\n cache[g].sort(key=lambda x: x[1], reverse=True)\n while key_list:\n od = order[key_list.pop()]\n if len(cache[od]) == 1:\n answer.append(cache[od][0][0])\n else:\n answer.append(cache[od][0][0])\n answer.append(cache[od][1][0])\n return answer\n\n\nprint(solution(['A', 'A', 'B', 'A', 'B', 'B', 'A', 'A', 'A', 'A'], [1, 1, 1,\n 1, 1, 1, 1, 1, 1, 1, 1, 1]))\n<mask token>\n",
"step-4": "import collections\n\n\ndef solution(genres, plays):\n answer = []\n cache = collections.defaultdict(list)\n genre_order = collections.defaultdict(int)\n order = collections.defaultdict()\n for i in range(len(genres)):\n cache[genres[i]].append([i, plays[i]])\n genre_order[genres[i]] += plays[i]\n for k in genre_order:\n order[genre_order[k]] = k\n key_list = sorted(order)\n for g in cache:\n cache[g].sort(key=lambda x: x[1], reverse=True)\n while key_list:\n od = order[key_list.pop()]\n if len(cache[od]) == 1:\n answer.append(cache[od][0][0])\n else:\n answer.append(cache[od][0][0])\n answer.append(cache[od][1][0])\n return answer\n\n\nprint(solution(['A', 'A', 'B', 'A', 'B', 'B', 'A', 'A', 'A', 'A'], [1, 1, 1,\n 1, 1, 1, 1, 1, 1, 1, 1, 1]))\n<mask token>\n",
"step-5": "import collections\ndef solution(genres, plays):\n answer = []\n cache = collections.defaultdict(list) # 장르 : [고유번호, 재생횟수]\n genre_order = collections.defaultdict(int) # 장르 : 전체재생횟수\n order = collections.defaultdict() # 전체재생횟수 : 장르\n # 첫번째 딕셔너리와 두번째 딕셔너리 생성\n for i in range(len(genres)):\n cache[genres[i]].append([i, plays[i]])\n genre_order[genres[i]] += plays[i]\n # 두번째 딕셔너리를 기반으로 세번째 딕셔너리 생성 -> key 와 value 를 뒤바꾼 구조임.\n for k in genre_order:\n order[genre_order[k]] = k\n key_list = sorted(order) # Key 가 int 가 됐으므로 key 기반 정렬이 가능해짐.\n # 첫번째 딕셔너리안에 value 에서 재생횟수를 기반으로 정렬, 같다면 고유번호 기반 정렬\n for g in cache:\n cache[g].sort(key=lambda x: (x[1]), reverse=True)\n # 정렬된 장르 순서를 담고 있는 key_list 에서 하나씩 장르를 꺼내서 order 에 키값으로 주어, 고유번호 부분을 꺼내서 answer 에 추가. (이미 재생횟수 기반으로 정렬한 거라서 바로 꺼낼 수 있음)\n while key_list:\n od = order[key_list.pop()]\n if len(cache[od]) == 1:\n answer.append(cache[od][0][0])\n else:\n answer.append(cache[od][0][0])\n answer.append(cache[od][1][0])\n return answer\n\n\nprint(solution([\"A\", \"A\", \"B\", \"A\", \"B\", \"B\", \"A\", \"A\", \"A\", \"A\"],\n [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]))\n\n'''\n[베스트 앨범] hash\n문제설명:\n 노래는 인덱스(고유번호)로 구분\n 각 노래들의 재생 횟수를 더해서 가장 횟수가 높은 장르의 곡들을 먼저 수록\n 해당 장르 내에서는 재생 횟수가 높은 노래를 먼저 수록\n 한 장르에서 최대 2곡까지 수록할 수 있고 1곡밖에 없다면 1곡만 수록\n 만약 재생횟수가 같은 노래가 있다면 고유번호가 낮은 것을 우선 수록\n풀이과정:\n 해쉬를 어디에 적용해야 할지 고민을 많이 한 문제\n 결국 고유번호 배열, 장르 배열, 재생횟수 배열 이렇게 3가지 배열이 있는 것이고\n 해쉬도 3개가 만들어져야함.\n 그리고 최고 많이 재생된 장르, 최고 많이 재생된 노래 들이라서 정렬을 선택했는데\n 노래의 재생횟수는 중첩이 가능해서 같은 경우 고유번호가 낮아야한다.\n 이를 위해 고유번호와 재생횟수를 함께 배열화 해서 하나의 딕셔너리에 장르별로(키) 묶어두고\n 람다로 정렬 진행, 다만 리벌스로 정렬해야 같은 경우 고유번호가 낮은게 앞으로 감.\n 또, 최대 많이 재생된 장르도 해쉬로 만들어두면 쉽게 될 것 같지만\n 해쉬 구조 자체를 정렬해야해서 힘듬.\n 그래서 해쉬가 만들어진 후에는 역으로 int: string 형태로 또 하나의 해쉬를 만들어서 해결했음.\n'''",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def custom_exception_handler(exc, context):
response = exception_handler(exc, context)
if response is not None:
response.data['status_code'] = response.status_code
return response
<|reserved_special_token_1|>
<|reserved_special_token_0|>
from rest_framework.views import exception_handler
def custom_exception_handler(exc, context):
response = exception_handler(exc, context)
if response is not None:
response.data['status_code'] = response.status_code
return response
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
'''
File Name: bubustatus/utils.py
Author: JackeyGao
mail: junqi.gao@shuyun.com
Created Time: 一 9/14 12:51:37 2015
'''
from rest_framework.views import exception_handler
def custom_exception_handler(exc, context):
# Call REST framework's default exception handler first,
# to get the standard error response.
response = exception_handler(exc, context)
# Now add the HTTP status code to the response.
if response is not None:
response.data['status_code'] = response.status_code
return response
|
flexible
|
{
"blob_id": "4e6e4917aee2385fe118d6e58c359a4c9fc50943",
"index": 8617,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef custom_exception_handler(exc, context):\n response = exception_handler(exc, context)\n if response is not None:\n response.data['status_code'] = response.status_code\n return response\n",
"step-3": "<mask token>\nfrom rest_framework.views import exception_handler\n\n\ndef custom_exception_handler(exc, context):\n response = exception_handler(exc, context)\n if response is not None:\n response.data['status_code'] = response.status_code\n return response\n",
"step-4": "# -*- coding: utf-8 -*-\n'''\nFile Name: bubustatus/utils.py\nAuthor: JackeyGao\nmail: junqi.gao@shuyun.com\nCreated Time: 一 9/14 12:51:37 2015\n'''\nfrom rest_framework.views import exception_handler\n\ndef custom_exception_handler(exc, context):\n # Call REST framework's default exception handler first,\n # to get the standard error response.\n response = exception_handler(exc, context)\n\n # Now add the HTTP status code to the response.\n if response is not None:\n response.data['status_code'] = response.status_code\n\n return response\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class Hour(object):
def __init__(self):
self.dayOfTheWeek = fake.day_of_week()
self.openingTime = str(random.randint(1, 12)) + 'AM'
self.closingTime = str(random.randint(1, 12)) + 'PM'
<|reserved_special_token_1|>
<|reserved_special_token_0|>
fake.add_provider(date_time)
class Hour(object):
def __init__(self):
self.dayOfTheWeek = fake.day_of_week()
self.openingTime = str(random.randint(1, 12)) + 'AM'
self.closingTime = str(random.randint(1, 12)) + 'PM'
<|reserved_special_token_1|>
<|reserved_special_token_0|>
fake = Faker()
<|reserved_special_token_0|>
fake.add_provider(date_time)
class Hour(object):
def __init__(self):
self.dayOfTheWeek = fake.day_of_week()
self.openingTime = str(random.randint(1, 12)) + 'AM'
self.closingTime = str(random.randint(1, 12)) + 'PM'
<|reserved_special_token_1|>
import json
from faker import Faker
import random
fake = Faker()
from faker.providers import date_time
fake.add_provider(date_time)
class Hour(object):
def __init__(self):
self.dayOfTheWeek = fake.day_of_week()
self.openingTime = str(random.randint(1, 12)) + 'AM'
self.closingTime = str(random.randint(1, 12)) + 'PM'
|
flexible
|
{
"blob_id": "e3386b01bb0bdc7064a2e3e9f3edce8a3231721b",
"index": 3664,
"step-1": "<mask token>\n\n\nclass Hour(object):\n\n def __init__(self):\n self.dayOfTheWeek = fake.day_of_week()\n self.openingTime = str(random.randint(1, 12)) + 'AM'\n self.closingTime = str(random.randint(1, 12)) + 'PM'\n",
"step-2": "<mask token>\nfake.add_provider(date_time)\n\n\nclass Hour(object):\n\n def __init__(self):\n self.dayOfTheWeek = fake.day_of_week()\n self.openingTime = str(random.randint(1, 12)) + 'AM'\n self.closingTime = str(random.randint(1, 12)) + 'PM'\n",
"step-3": "<mask token>\nfake = Faker()\n<mask token>\nfake.add_provider(date_time)\n\n\nclass Hour(object):\n\n def __init__(self):\n self.dayOfTheWeek = fake.day_of_week()\n self.openingTime = str(random.randint(1, 12)) + 'AM'\n self.closingTime = str(random.randint(1, 12)) + 'PM'\n",
"step-4": "import json\nfrom faker import Faker\nimport random\nfake = Faker()\nfrom faker.providers import date_time\nfake.add_provider(date_time)\n\n\nclass Hour(object):\n\n def __init__(self):\n self.dayOfTheWeek = fake.day_of_week()\n self.openingTime = str(random.randint(1, 12)) + 'AM'\n self.closingTime = str(random.randint(1, 12)) + 'PM'\n",
"step-5": null,
"step-ids": [
2,
3,
4,
5
]
}
|
[
2,
3,
4,
5
] |
import pickle
class myPickle:
def make(self, obj,fileName):
print("myPickle make file",fileName)
pickle.dump( obj, open(fileName,'wb') )
print(" DONE")
def load(self, fileName):
print("myPickle load file",fileName)
tr = pickle.load( open(fileName,'rb') )
print(" DONE")
return tr
|
normal
|
{
"blob_id": "e50feccd583d7e33877d5fcc377a1d79dc247d3a",
"index": 3117,
"step-1": "<mask token>\n\n\nclass myPickle:\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass myPickle:\n\n def make(self, obj, fileName):\n print('myPickle make file', fileName)\n pickle.dump(obj, open(fileName, 'wb'))\n print(' DONE')\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass myPickle:\n\n def make(self, obj, fileName):\n print('myPickle make file', fileName)\n pickle.dump(obj, open(fileName, 'wb'))\n print(' DONE')\n\n def load(self, fileName):\n print('myPickle load file', fileName)\n tr = pickle.load(open(fileName, 'rb'))\n print(' DONE')\n return tr\n",
"step-4": "import pickle\n\n\nclass myPickle:\n\n def make(self, obj, fileName):\n print('myPickle make file', fileName)\n pickle.dump(obj, open(fileName, 'wb'))\n print(' DONE')\n\n def load(self, fileName):\n print('myPickle load file', fileName)\n tr = pickle.load(open(fileName, 'rb'))\n print(' DONE')\n return tr\n",
"step-5": "\nimport pickle\n\nclass myPickle:\n \n def make(self, obj,fileName):\n print(\"myPickle make file\",fileName)\n pickle.dump( obj, open(fileName,'wb') )\n print(\" DONE\")\n \n def load(self, fileName):\n print(\"myPickle load file\",fileName)\n tr = pickle.load( open(fileName,'rb') )\n print(\" DONE\")\n return tr\n ",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
def Msort(left, right):
if left + 1 < right:
mid = int((left + right) / 2)
Msort(left, mid)
Msort(mid, right)
merge(left, mid, right)
def main():
global ans
global A
n = int(input())
A = list(map(int, input().split()))
Msort(0, n)
print(' '.join(list(map(str, A))))
print(ans)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def merge(left, mid, right):
global A
global ans
n1 = mid - left
n2 = right - mid
l = []
r = []
for i in range(n1):
l += [A[left + i]]
for i in range(n2):
r += [A[mid + i]]
l += [10 ** 18]
r += [10 ** 18]
i = 0
j = 0
ans += right - left
for k in range(left, right):
if l[i] <= r[j]:
A[k] = l[i]
i += 1
else:
A[k] = r[j]
j += 1
def Msort(left, right):
if left + 1 < right:
mid = int((left + right) / 2)
Msort(left, mid)
Msort(mid, right)
merge(left, mid, right)
def main():
global ans
global A
n = int(input())
A = list(map(int, input().split()))
Msort(0, n)
print(' '.join(list(map(str, A))))
print(ans)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def merge(left, mid, right):
global A
global ans
n1 = mid - left
n2 = right - mid
l = []
r = []
for i in range(n1):
l += [A[left + i]]
for i in range(n2):
r += [A[mid + i]]
l += [10 ** 18]
r += [10 ** 18]
i = 0
j = 0
ans += right - left
for k in range(left, right):
if l[i] <= r[j]:
A[k] = l[i]
i += 1
else:
A[k] = r[j]
j += 1
def Msort(left, right):
if left + 1 < right:
mid = int((left + right) / 2)
Msort(left, mid)
Msort(mid, right)
merge(left, mid, right)
def main():
global ans
global A
n = int(input())
A = list(map(int, input().split()))
Msort(0, n)
print(' '.join(list(map(str, A))))
print(ans)
main()
<|reserved_special_token_1|>
A = []
ans = 0
def merge(left, mid, right):
global A
global ans
n1 = mid - left
n2 = right - mid
l = []
r = []
for i in range(n1):
l += [A[left + i]]
for i in range(n2):
r += [A[mid + i]]
l += [10 ** 18]
r += [10 ** 18]
i = 0
j = 0
ans += right - left
for k in range(left, right):
if l[i] <= r[j]:
A[k] = l[i]
i += 1
else:
A[k] = r[j]
j += 1
def Msort(left, right):
if left + 1 < right:
mid = int((left + right) / 2)
Msort(left, mid)
Msort(mid, right)
merge(left, mid, right)
def main():
global ans
global A
n = int(input())
A = list(map(int, input().split()))
Msort(0, n)
print(' '.join(list(map(str, A))))
print(ans)
main()
<|reserved_special_token_1|>
A = []
ans = 0
def merge(left, mid, right):
global A
global ans
n1 = mid - left
n2 = right - mid
l = []
r = []
for i in range(n1):
l += [A[left + i]]
for i in range(n2):
r += [A[mid + i]]
l += [10**18]
r += [10**18]
i = 0
j = 0
ans += right - left
for k in range(left, right):
if l[i] <= r[j]:
A[k] = l[i]
i += 1
else:
A[k] = r[j]
j += 1
def Msort(left, right):
if left + 1 < right:
mid = int((left + right)/2)
Msort(left, mid)
Msort(mid,right)
merge(left,mid,right)
def main():
global ans
global A
n = int(input())
A = list(map(int,input().split()))
Msort(0,n)
print(" ".join(list(map(str,A))))
print(ans)
main()
|
flexible
|
{
"blob_id": "dc81ab808720c3a2c76174264c9be9bcdd99c292",
"index": 1265,
"step-1": "<mask token>\n\n\ndef Msort(left, right):\n if left + 1 < right:\n mid = int((left + right) / 2)\n Msort(left, mid)\n Msort(mid, right)\n merge(left, mid, right)\n\n\ndef main():\n global ans\n global A\n n = int(input())\n A = list(map(int, input().split()))\n Msort(0, n)\n print(' '.join(list(map(str, A))))\n print(ans)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef merge(left, mid, right):\n global A\n global ans\n n1 = mid - left\n n2 = right - mid\n l = []\n r = []\n for i in range(n1):\n l += [A[left + i]]\n for i in range(n2):\n r += [A[mid + i]]\n l += [10 ** 18]\n r += [10 ** 18]\n i = 0\n j = 0\n ans += right - left\n for k in range(left, right):\n if l[i] <= r[j]:\n A[k] = l[i]\n i += 1\n else:\n A[k] = r[j]\n j += 1\n\n\ndef Msort(left, right):\n if left + 1 < right:\n mid = int((left + right) / 2)\n Msort(left, mid)\n Msort(mid, right)\n merge(left, mid, right)\n\n\ndef main():\n global ans\n global A\n n = int(input())\n A = list(map(int, input().split()))\n Msort(0, n)\n print(' '.join(list(map(str, A))))\n print(ans)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef merge(left, mid, right):\n global A\n global ans\n n1 = mid - left\n n2 = right - mid\n l = []\n r = []\n for i in range(n1):\n l += [A[left + i]]\n for i in range(n2):\n r += [A[mid + i]]\n l += [10 ** 18]\n r += [10 ** 18]\n i = 0\n j = 0\n ans += right - left\n for k in range(left, right):\n if l[i] <= r[j]:\n A[k] = l[i]\n i += 1\n else:\n A[k] = r[j]\n j += 1\n\n\ndef Msort(left, right):\n if left + 1 < right:\n mid = int((left + right) / 2)\n Msort(left, mid)\n Msort(mid, right)\n merge(left, mid, right)\n\n\ndef main():\n global ans\n global A\n n = int(input())\n A = list(map(int, input().split()))\n Msort(0, n)\n print(' '.join(list(map(str, A))))\n print(ans)\n\n\nmain()\n",
"step-4": "A = []\nans = 0\n\n\ndef merge(left, mid, right):\n global A\n global ans\n n1 = mid - left\n n2 = right - mid\n l = []\n r = []\n for i in range(n1):\n l += [A[left + i]]\n for i in range(n2):\n r += [A[mid + i]]\n l += [10 ** 18]\n r += [10 ** 18]\n i = 0\n j = 0\n ans += right - left\n for k in range(left, right):\n if l[i] <= r[j]:\n A[k] = l[i]\n i += 1\n else:\n A[k] = r[j]\n j += 1\n\n\ndef Msort(left, right):\n if left + 1 < right:\n mid = int((left + right) / 2)\n Msort(left, mid)\n Msort(mid, right)\n merge(left, mid, right)\n\n\ndef main():\n global ans\n global A\n n = int(input())\n A = list(map(int, input().split()))\n Msort(0, n)\n print(' '.join(list(map(str, A))))\n print(ans)\n\n\nmain()\n",
"step-5": "A = []\nans = 0\n\ndef merge(left, mid, right):\n\tglobal A\n\tglobal ans\n\tn1 = mid - left\n\tn2 = right - mid\n\tl = []\n\tr = []\n\tfor i in range(n1):\n\t\tl += [A[left + i]]\n\tfor i in range(n2):\n\t\tr += [A[mid + i]]\n\tl += [10**18]\n\tr += [10**18]\n\ti = 0\n\tj = 0\n\tans += right - left\n\tfor k in range(left, right):\n\t\tif l[i] <= r[j]:\n\t\t\tA[k] = l[i]\n\t\t\ti += 1\n\t\telse:\n\t\t\tA[k] = r[j]\n\t\t\tj += 1\n\n\ndef Msort(left, right):\n\tif left + 1 < right:\n\t\tmid = int((left + right)/2)\n\t\tMsort(left, mid)\n\t\tMsort(mid,right)\n\t\tmerge(left,mid,right)\n\ndef main():\n\tglobal ans\n\tglobal A\n\tn = int(input())\n\tA = list(map(int,input().split()))\n\tMsort(0,n)\n\tprint(\" \".join(list(map(str,A))))\n\tprint(ans)\n\nmain()\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
"""4. Начните работу над проектом «Склад оргтехники». Создайте класс, описывающий склад. А также класс «Оргтехника»,
который будет базовым для классов-наследников. Эти классы — конкретные типы оргтехники (принтер, сканер, ксерокс).
В базовом классе определить параметры, общие для приведенных типов. В классах-наследниках реализовать параметры,
уникальные для каждого типа оргтехники.
5. Продолжить работу над первым заданием. Разработать методы, отвечающие за приём оргтехники на склад и передачу в
определенное подразделение компании. Для хранения данных о наименовании и количестве единиц оргтехники, а также других
данных, можно использовать любую подходящую структуру, например словарь.
6. Продолжить работу над вторым заданием. Реализуйте механизм валидации вводимых пользователем данных. Например, для
указания количества принтеров, отправленных на склад, нельзя использовать строковый тип данных.
Подсказка: постарайтесь по возможности реализовать в проекте «Склад оргтехники» максимум возможностей, изученных на
уроках по ООП.
"""
class Equipment():
def __init__(self, c_name, model, sn):
self.c_name = c_name # название фирмы
self.model = model # модель устройства
self.sn = sn
self.holder = None # местонахождение
def _move(self, holder):
self.holder = holder
def add(self, qnt):
pass
class Whouse:
def __init__(self, max_volume):
self.max_volume = max_volume
self.total = 0
self.storage = {'printers': set()}
self.add_mapper = {Printer: 'printers'}
def get_tech_to_whouse(self, equip: Equipment):
if self.total == self.max_volume:
raise OverflowError('Склад заполнен!')
self.storage[self.add_mapper[type(equip)]].add(equip)
print(type(equip))
equip._move('whouse')
self.total += 1
def move_holder(self, tech_type, holder):
print(self.storage[tech_type] )
tech_to_holder = self.storage[tech_type].pop()
tech_to_holder._move(holder)
self.total -= 1
def __call__(self, *args, **kwargs):
self.get_tech_to_whouse(*args, **kwargs)
class Printer(Equipment):
def __init__(self, c_name, model, sn, ptype, color):
super().__init__(c_name, model, sn)
self.ptype = ptype
self.color = color
def add(self):
return f'Company: {self.c_name} Model: {self.model} s/n {self.sn} Paper type: {self.ptype} ' \
f'Color: {self.color} Holder: {self.holder}'
def __call__(self, *args, **kwargs):
self.add()
def __str__(self):
return f'Company: {self.c_name}\nModel: {self.model}\ns/n {self.sn}\nPaper type: {self.ptype}\n' \
f'Color: {self.color}\nHolder: {self.holder}'
printer1 = Printer('hp', 'lj 1100', '1212223', 'A4', 'BW')
printer2 = Printer('hp', 'lj 1100', '1212224', 'A4', 'BW')
printer3 = Printer('hp', 'lj 1100', '1212225', 'A4', 'BW')
printer4 = Printer('hp', 'lj 1100', '1212226', 'A4', 'BW')
printer5 = Printer('hp', 'lj 1100', '1212223', 'A4', 'BW')
"""Почему set() не отрабатывает? 1 и 5 одинаковые"""
warehouse = Whouse(5)
print(warehouse.total)
warehouse.get_tech_to_whouse(printer1)
warehouse.get_tech_to_whouse(printer2)
warehouse.get_tech_to_whouse(printer3)
warehouse.get_tech_to_whouse(printer4)
warehouse.get_tech_to_whouse(printer5)
warehouse.move_holder('printers', 'IT')
"""как в данном примере переместить printer3, а не последний созданный?"""
print(warehouse.total)
print(printer1)
print(printer2)
print(printer3)
print(printer4)
print(printer5)
|
normal
|
{
"blob_id": "03bc377bef1de7d512b7982a09c255af1d82fb7d",
"index": 3905,
"step-1": "<mask token>\n\n\nclass Whouse:\n <mask token>\n\n def get_tech_to_whouse(self, equip: Equipment):\n if self.total == self.max_volume:\n raise OverflowError('Склад заполнен!')\n self.storage[self.add_mapper[type(equip)]].add(equip)\n print(type(equip))\n equip._move('whouse')\n self.total += 1\n\n def move_holder(self, tech_type, holder):\n print(self.storage[tech_type])\n tech_to_holder = self.storage[tech_type].pop()\n tech_to_holder._move(holder)\n self.total -= 1\n\n def __call__(self, *args, **kwargs):\n self.get_tech_to_whouse(*args, **kwargs)\n\n\nclass Printer(Equipment):\n\n def __init__(self, c_name, model, sn, ptype, color):\n super().__init__(c_name, model, sn)\n self.ptype = ptype\n self.color = color\n\n def add(self):\n return (\n f'Company: {self.c_name} Model: {self.model} s/n {self.sn} Paper type: {self.ptype} Color: {self.color} Holder: {self.holder}'\n )\n\n def __call__(self, *args, **kwargs):\n self.add()\n\n def __str__(self):\n return f\"\"\"Company: {self.c_name}\nModel: {self.model}\ns/n {self.sn}\nPaper type: {self.ptype}\nColor: {self.color}\nHolder: {self.holder}\"\"\"\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Equipment:\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Whouse:\n\n def __init__(self, max_volume):\n self.max_volume = max_volume\n self.total = 0\n self.storage = {'printers': set()}\n self.add_mapper = {Printer: 'printers'}\n\n def get_tech_to_whouse(self, equip: Equipment):\n if self.total == self.max_volume:\n raise OverflowError('Склад заполнен!')\n self.storage[self.add_mapper[type(equip)]].add(equip)\n print(type(equip))\n equip._move('whouse')\n self.total += 1\n\n def move_holder(self, tech_type, holder):\n print(self.storage[tech_type])\n tech_to_holder = self.storage[tech_type].pop()\n tech_to_holder._move(holder)\n self.total -= 1\n\n def __call__(self, *args, **kwargs):\n self.get_tech_to_whouse(*args, **kwargs)\n\n\nclass Printer(Equipment):\n\n def __init__(self, c_name, model, sn, ptype, color):\n super().__init__(c_name, model, sn)\n self.ptype = ptype\n self.color = color\n\n def add(self):\n return (\n f'Company: {self.c_name} Model: {self.model} s/n {self.sn} Paper type: {self.ptype} Color: {self.color} Holder: {self.holder}'\n )\n\n def __call__(self, *args, **kwargs):\n self.add()\n\n def __str__(self):\n return f\"\"\"Company: {self.c_name}\nModel: {self.model}\ns/n {self.sn}\nPaper type: {self.ptype}\nColor: {self.color}\nHolder: {self.holder}\"\"\"\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Equipment:\n\n def __init__(self, c_name, model, sn):\n self.c_name = c_name\n self.model = model\n self.sn = sn\n self.holder = None\n <mask token>\n <mask token>\n\n\nclass Whouse:\n\n def __init__(self, max_volume):\n self.max_volume = max_volume\n self.total = 0\n self.storage = {'printers': set()}\n self.add_mapper = {Printer: 'printers'}\n\n def get_tech_to_whouse(self, equip: Equipment):\n if self.total == self.max_volume:\n raise OverflowError('Склад заполнен!')\n self.storage[self.add_mapper[type(equip)]].add(equip)\n print(type(equip))\n equip._move('whouse')\n self.total += 1\n\n def move_holder(self, tech_type, holder):\n print(self.storage[tech_type])\n tech_to_holder = self.storage[tech_type].pop()\n tech_to_holder._move(holder)\n self.total -= 1\n\n def __call__(self, *args, **kwargs):\n self.get_tech_to_whouse(*args, **kwargs)\n\n\nclass Printer(Equipment):\n\n def __init__(self, c_name, model, sn, ptype, color):\n super().__init__(c_name, model, sn)\n self.ptype = ptype\n self.color = color\n\n def add(self):\n return (\n f'Company: {self.c_name} Model: {self.model} s/n {self.sn} Paper type: {self.ptype} Color: {self.color} Holder: {self.holder}'\n )\n\n def __call__(self, *args, **kwargs):\n self.add()\n\n def __str__(self):\n return f\"\"\"Company: {self.c_name}\nModel: {self.model}\ns/n {self.sn}\nPaper type: {self.ptype}\nColor: {self.color}\nHolder: {self.holder}\"\"\"\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass Equipment:\n\n def __init__(self, c_name, model, sn):\n self.c_name = c_name\n self.model = model\n self.sn = sn\n self.holder = None\n\n def _move(self, holder):\n self.holder = holder\n\n def add(self, qnt):\n pass\n\n\nclass Whouse:\n\n def __init__(self, max_volume):\n self.max_volume = max_volume\n self.total = 0\n self.storage = {'printers': set()}\n self.add_mapper = {Printer: 'printers'}\n\n def get_tech_to_whouse(self, equip: Equipment):\n if self.total == self.max_volume:\n raise OverflowError('Склад заполнен!')\n self.storage[self.add_mapper[type(equip)]].add(equip)\n print(type(equip))\n equip._move('whouse')\n self.total += 1\n\n def move_holder(self, tech_type, holder):\n print(self.storage[tech_type])\n tech_to_holder = self.storage[tech_type].pop()\n tech_to_holder._move(holder)\n self.total -= 1\n\n def __call__(self, *args, **kwargs):\n self.get_tech_to_whouse(*args, **kwargs)\n\n\nclass Printer(Equipment):\n\n def __init__(self, c_name, model, sn, ptype, color):\n super().__init__(c_name, model, sn)\n self.ptype = ptype\n self.color = color\n\n def add(self):\n return (\n f'Company: {self.c_name} Model: {self.model} s/n {self.sn} Paper type: {self.ptype} Color: {self.color} Holder: {self.holder}'\n )\n\n def __call__(self, *args, **kwargs):\n self.add()\n\n def __str__(self):\n return f\"\"\"Company: {self.c_name}\nModel: {self.model}\ns/n {self.sn}\nPaper type: {self.ptype}\nColor: {self.color}\nHolder: {self.holder}\"\"\"\n\n\nprinter1 = Printer('hp', 'lj 1100', '1212223', 'A4', 'BW')\nprinter2 = Printer('hp', 'lj 1100', '1212224', 'A4', 'BW')\nprinter3 = Printer('hp', 'lj 1100', '1212225', 'A4', 'BW')\nprinter4 = Printer('hp', 'lj 1100', '1212226', 'A4', 'BW')\nprinter5 = Printer('hp', 'lj 1100', '1212223', 'A4', 'BW')\n<mask token>\nwarehouse = Whouse(5)\nprint(warehouse.total)\nwarehouse.get_tech_to_whouse(printer1)\nwarehouse.get_tech_to_whouse(printer2)\nwarehouse.get_tech_to_whouse(printer3)\nwarehouse.get_tech_to_whouse(printer4)\nwarehouse.get_tech_to_whouse(printer5)\nwarehouse.move_holder('printers', 'IT')\n<mask token>\nprint(warehouse.total)\nprint(printer1)\nprint(printer2)\nprint(printer3)\nprint(printer4)\nprint(printer5)\n",
"step-5": "\"\"\"4. Начните работу над проектом «Склад оргтехники». Создайте класс, описывающий склад. А также класс «Оргтехника»,\nкоторый будет базовым для классов-наследников. Эти классы — конкретные типы оргтехники (принтер, сканер, ксерокс).\nВ базовом классе определить параметры, общие для приведенных типов. В классах-наследниках реализовать параметры,\nуникальные для каждого типа оргтехники.\n\n5. Продолжить работу над первым заданием. Разработать методы, отвечающие за приём оргтехники на склад и передачу в\nопределенное подразделение компании. Для хранения данных о наименовании и количестве единиц оргтехники, а также других\nданных, можно использовать любую подходящую структуру, например словарь.\n\n6. Продолжить работу над вторым заданием. Реализуйте механизм валидации вводимых пользователем данных. Например, для\nуказания количества принтеров, отправленных на склад, нельзя использовать строковый тип данных.\n\nПодсказка: постарайтесь по возможности реализовать в проекте «Склад оргтехники» максимум возможностей, изученных на\nуроках по ООП.\n\"\"\"\n\nclass Equipment():\n\n def __init__(self, c_name, model, sn):\n self.c_name = c_name # название фирмы\n self.model = model # модель устройства\n self.sn = sn\n self.holder = None # местонахождение\n\n def _move(self, holder):\n self.holder = holder\n\n def add(self, qnt):\n pass\n\nclass Whouse:\n def __init__(self, max_volume):\n self.max_volume = max_volume\n self.total = 0\n self.storage = {'printers': set()}\n self.add_mapper = {Printer: 'printers'}\n\n\n def get_tech_to_whouse(self, equip: Equipment):\n if self.total == self.max_volume:\n raise OverflowError('Склад заполнен!')\n self.storage[self.add_mapper[type(equip)]].add(equip)\n print(type(equip))\n equip._move('whouse')\n self.total += 1\n\n def move_holder(self, tech_type, holder):\n print(self.storage[tech_type] )\n tech_to_holder = self.storage[tech_type].pop()\n tech_to_holder._move(holder)\n self.total -= 1\n\n def __call__(self, *args, **kwargs):\n self.get_tech_to_whouse(*args, **kwargs)\n\nclass Printer(Equipment):\n def __init__(self, c_name, model, sn, ptype, color):\n super().__init__(c_name, model, sn)\n self.ptype = ptype\n self.color = color\n\n def add(self):\n return f'Company: {self.c_name} Model: {self.model} s/n {self.sn} Paper type: {self.ptype} ' \\\n f'Color: {self.color} Holder: {self.holder}'\n\n def __call__(self, *args, **kwargs):\n self.add()\n\n def __str__(self):\n return f'Company: {self.c_name}\\nModel: {self.model}\\ns/n {self.sn}\\nPaper type: {self.ptype}\\n' \\\n f'Color: {self.color}\\nHolder: {self.holder}'\n\nprinter1 = Printer('hp', 'lj 1100', '1212223', 'A4', 'BW')\nprinter2 = Printer('hp', 'lj 1100', '1212224', 'A4', 'BW')\nprinter3 = Printer('hp', 'lj 1100', '1212225', 'A4', 'BW')\nprinter4 = Printer('hp', 'lj 1100', '1212226', 'A4', 'BW')\nprinter5 = Printer('hp', 'lj 1100', '1212223', 'A4', 'BW')\n\"\"\"Почему set() не отрабатывает? 1 и 5 одинаковые\"\"\"\n\nwarehouse = Whouse(5)\nprint(warehouse.total)\nwarehouse.get_tech_to_whouse(printer1)\nwarehouse.get_tech_to_whouse(printer2)\nwarehouse.get_tech_to_whouse(printer3)\nwarehouse.get_tech_to_whouse(printer4)\nwarehouse.get_tech_to_whouse(printer5)\nwarehouse.move_holder('printers', 'IT')\n\"\"\"как в данном примере переместить printer3, а не последний созданный?\"\"\"\n\nprint(warehouse.total)\nprint(printer1)\nprint(printer2)\nprint(printer3)\nprint(printer4)\nprint(printer5)\n\n",
"step-ids": [
9,
11,
12,
16,
17
]
}
|
[
9,
11,
12,
16,
17
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print(y)
plt.figure(figsize=(10, 1))
plt.bar(range(len(y)), y)
plt.savefig('test.png')
plt.clf()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
y = np.array([2, 2, 3, 4, 5])
print(y)
plt.figure(figsize=(10, 1))
plt.bar(range(len(y)), y)
plt.savefig('test.png')
plt.clf()
<|reserved_special_token_1|>
import matplotlib.pyplot as plt
import numpy as np
y = np.array([2, 2, 3, 4, 5])
print(y)
plt.figure(figsize=(10, 1))
plt.bar(range(len(y)), y)
plt.savefig('test.png')
plt.clf()
<|reserved_special_token_1|>
import matplotlib.pyplot as plt
import numpy as np
# 描画用サンプルデータ
#x= np.array([0,1,2,3,4])
y = np.array([2, 2, 3, 4, 5])
print(y)
#print(range(y))
plt.figure(figsize=(10,1))
plt.bar(range(len(y)), y)
plt.savefig('test.png')
plt.clf()
|
flexible
|
{
"blob_id": "2f714ed54a19ec26d7ecb1979e79366721b3d0fe",
"index": 6682,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(y)\nplt.figure(figsize=(10, 1))\nplt.bar(range(len(y)), y)\nplt.savefig('test.png')\nplt.clf()\n",
"step-3": "<mask token>\ny = np.array([2, 2, 3, 4, 5])\nprint(y)\nplt.figure(figsize=(10, 1))\nplt.bar(range(len(y)), y)\nplt.savefig('test.png')\nplt.clf()\n",
"step-4": "import matplotlib.pyplot as plt\nimport numpy as np\ny = np.array([2, 2, 3, 4, 5])\nprint(y)\nplt.figure(figsize=(10, 1))\nplt.bar(range(len(y)), y)\nplt.savefig('test.png')\nplt.clf()\n",
"step-5": "import matplotlib.pyplot as plt\nimport numpy as np\n# 描画用サンプルデータ\n#x= np.array([0,1,2,3,4])\ny = np.array([2, 2, 3, 4, 5])\nprint(y)\n#print(range(y))\n\nplt.figure(figsize=(10,1))\nplt.bar(range(len(y)), y)\nplt.savefig('test.png')\nplt.clf()",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# -*- coding: utf-8 -*-
"""
Created on Sun Apr 29 15:10:34 2018
@author: nit_n
"""
from gaussxw import gaussxwab
from numpy import linspace, arange
from pylab import plot, show, xlabel, ylabel
from math import pi, exp, sqrt
k = 1.38065e-23 # joules/kelvin
h = 6.626e-34 # joules
lam1 = 390e-9 # meters
lam2 = 750e-9 # meters
c = 3e8 # meters/second
T = linspace(300, 10000, 7000)
part = str(input("what part would you like to do? (a, b, or c) "))
def n(T):
k = 1.38065e-23 # joules/kelvin
c = 3e8 # meters/second
N = 100
a = h*c/(lam2*k*T)
b = h*c/(lam1*k*T)
x,w = gaussxwab(N,a,b)
s = 0.0
for k in range(N):
s += w[k]*(x[k]**3/(exp(x[k])-1))
s = s*(15/(pi*pi*pi*pi))
return s
if part in ['a'] or ['b']:
lol = linspace(0, 7000, 7000)
for i in range(len(T)):
print("i =",i)
lol = n(T[i])
plot(T[i], lol, 'k-')
show()
if part in ['b']:
z = (1 + sqrt(5))/2
accuracy = 1e-6
x1 = 1/10
x4 = 1*10
x2 = x4 - (x4 - x1)/z
x3 = x1 + (x4 - x1)/z
f1 = n(x1)
f2 = n(x2)
f3 = n(x3)
f4 = n(x4)
while x4-x1>accuracy:
if f2<f3:
x4,f4 = x3,f3
x3,f3 = x2,f2
x2 = x4 - (x4-x1)/z
f2 = n(x2)
else:
x1,f1 = x2,f2
x2,f2 = x3,f3
x3 = x1 - (x4-x1)/z
f3 = n(x3)
print("minimum falls at", 0.5*(x1+x4),"K")
|
normal
|
{
"blob_id": "9b88a3976d522bdfd38502e29eefc1f1a0c29ed2",
"index": 2884,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef n(T):\n k = 1.38065e-23\n c = 300000000.0\n N = 100\n a = h * c / (lam2 * k * T)\n b = h * c / (lam1 * k * T)\n x, w = gaussxwab(N, a, b)\n s = 0.0\n for k in range(N):\n s += w[k] * (x[k] ** 3 / (exp(x[k]) - 1))\n s = s * (15 / (pi * pi * pi * pi))\n return s\n\n\nif part in ['a'] or ['b']:\n lol = linspace(0, 7000, 7000)\n for i in range(len(T)):\n print('i =', i)\n lol = n(T[i])\n plot(T[i], lol, 'k-')\n show()\nif part in ['b']:\n z = (1 + sqrt(5)) / 2\n accuracy = 1e-06\n x1 = 1 / 10\n x4 = 1 * 10\n x2 = x4 - (x4 - x1) / z\n x3 = x1 + (x4 - x1) / z\n f1 = n(x1)\n f2 = n(x2)\n f3 = n(x3)\n f4 = n(x4)\n while x4 - x1 > accuracy:\n if f2 < f3:\n x4, f4 = x3, f3\n x3, f3 = x2, f2\n x2 = x4 - (x4 - x1) / z\n f2 = n(x2)\n else:\n x1, f1 = x2, f2\n x2, f2 = x3, f3\n x3 = x1 - (x4 - x1) / z\n f3 = n(x3)\n print('minimum falls at', 0.5 * (x1 + x4), 'K')\n",
"step-3": "<mask token>\nk = 1.38065e-23\nh = 6.626e-34\nlam1 = 3.9e-07\nlam2 = 7.5e-07\nc = 300000000.0\nT = linspace(300, 10000, 7000)\npart = str(input('what part would you like to do? (a, b, or c) '))\n\n\ndef n(T):\n k = 1.38065e-23\n c = 300000000.0\n N = 100\n a = h * c / (lam2 * k * T)\n b = h * c / (lam1 * k * T)\n x, w = gaussxwab(N, a, b)\n s = 0.0\n for k in range(N):\n s += w[k] * (x[k] ** 3 / (exp(x[k]) - 1))\n s = s * (15 / (pi * pi * pi * pi))\n return s\n\n\nif part in ['a'] or ['b']:\n lol = linspace(0, 7000, 7000)\n for i in range(len(T)):\n print('i =', i)\n lol = n(T[i])\n plot(T[i], lol, 'k-')\n show()\nif part in ['b']:\n z = (1 + sqrt(5)) / 2\n accuracy = 1e-06\n x1 = 1 / 10\n x4 = 1 * 10\n x2 = x4 - (x4 - x1) / z\n x3 = x1 + (x4 - x1) / z\n f1 = n(x1)\n f2 = n(x2)\n f3 = n(x3)\n f4 = n(x4)\n while x4 - x1 > accuracy:\n if f2 < f3:\n x4, f4 = x3, f3\n x3, f3 = x2, f2\n x2 = x4 - (x4 - x1) / z\n f2 = n(x2)\n else:\n x1, f1 = x2, f2\n x2, f2 = x3, f3\n x3 = x1 - (x4 - x1) / z\n f3 = n(x3)\n print('minimum falls at', 0.5 * (x1 + x4), 'K')\n",
"step-4": "<mask token>\nfrom gaussxw import gaussxwab\nfrom numpy import linspace, arange\nfrom pylab import plot, show, xlabel, ylabel\nfrom math import pi, exp, sqrt\nk = 1.38065e-23\nh = 6.626e-34\nlam1 = 3.9e-07\nlam2 = 7.5e-07\nc = 300000000.0\nT = linspace(300, 10000, 7000)\npart = str(input('what part would you like to do? (a, b, or c) '))\n\n\ndef n(T):\n k = 1.38065e-23\n c = 300000000.0\n N = 100\n a = h * c / (lam2 * k * T)\n b = h * c / (lam1 * k * T)\n x, w = gaussxwab(N, a, b)\n s = 0.0\n for k in range(N):\n s += w[k] * (x[k] ** 3 / (exp(x[k]) - 1))\n s = s * (15 / (pi * pi * pi * pi))\n return s\n\n\nif part in ['a'] or ['b']:\n lol = linspace(0, 7000, 7000)\n for i in range(len(T)):\n print('i =', i)\n lol = n(T[i])\n plot(T[i], lol, 'k-')\n show()\nif part in ['b']:\n z = (1 + sqrt(5)) / 2\n accuracy = 1e-06\n x1 = 1 / 10\n x4 = 1 * 10\n x2 = x4 - (x4 - x1) / z\n x3 = x1 + (x4 - x1) / z\n f1 = n(x1)\n f2 = n(x2)\n f3 = n(x3)\n f4 = n(x4)\n while x4 - x1 > accuracy:\n if f2 < f3:\n x4, f4 = x3, f3\n x3, f3 = x2, f2\n x2 = x4 - (x4 - x1) / z\n f2 = n(x2)\n else:\n x1, f1 = x2, f2\n x2, f2 = x3, f3\n x3 = x1 - (x4 - x1) / z\n f3 = n(x3)\n print('minimum falls at', 0.5 * (x1 + x4), 'K')\n",
"step-5": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Sun Apr 29 15:10:34 2018\r\n\r\n@author: nit_n\r\n\"\"\"\r\n\r\nfrom gaussxw import gaussxwab\r\nfrom numpy import linspace, arange\r\nfrom pylab import plot, show, xlabel, ylabel\r\nfrom math import pi, exp, sqrt\r\n\r\nk = 1.38065e-23 # joules/kelvin\r\nh = 6.626e-34 # joules\r\nlam1 = 390e-9 # meters\r\nlam2 = 750e-9 # meters\r\nc = 3e8 # meters/second\r\n\r\nT = linspace(300, 10000, 7000)\r\n\r\npart = str(input(\"what part would you like to do? (a, b, or c) \"))\r\n\r\ndef n(T):\r\n \r\n k = 1.38065e-23 # joules/kelvin\r\n c = 3e8 # meters/second\r\n \r\n N = 100\r\n a = h*c/(lam2*k*T)\r\n b = h*c/(lam1*k*T)\r\n x,w = gaussxwab(N,a,b)\r\n s = 0.0\r\n \r\n for k in range(N):\r\n s += w[k]*(x[k]**3/(exp(x[k])-1))\r\n \r\n s = s*(15/(pi*pi*pi*pi))\r\n return s\r\n\r\nif part in ['a'] or ['b']:\r\n lol = linspace(0, 7000, 7000)\r\n for i in range(len(T)):\r\n print(\"i =\",i)\r\n lol = n(T[i])\r\n plot(T[i], lol, 'k-')\r\n show()\r\n\r\nif part in ['b']:\r\n z = (1 + sqrt(5))/2\r\n accuracy = 1e-6\r\n x1 = 1/10\r\n x4 = 1*10\r\n x2 = x4 - (x4 - x1)/z\r\n x3 = x1 + (x4 - x1)/z\r\n \r\n f1 = n(x1)\r\n f2 = n(x2)\r\n f3 = n(x3)\r\n f4 = n(x4)\r\n \r\n while x4-x1>accuracy:\r\n if f2<f3:\r\n x4,f4 = x3,f3\r\n x3,f3 = x2,f2\r\n x2 = x4 - (x4-x1)/z\r\n f2 = n(x2)\r\n else:\r\n x1,f1 = x2,f2\r\n x2,f2 = x3,f3\r\n x3 = x1 - (x4-x1)/z\r\n f3 = n(x3)\r\n\r\n print(\"minimum falls at\", 0.5*(x1+x4),\"K\") \r\n \r\n \r\n ",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
print('-'*100)
print('BIENVENIDOS A TIENDA ELEGANCIA')
print('-'*100)
prendas = ('Remeras', 'Camisas', 'Pantalones', 'Faldas', 'Vestidos', 'Abrigos', 'Calzado')
precioSinPromo = 0
superPuntos = 0
#ARTICULO 1
tipoPrenda1 = int(input('Ingrese Codigo de la prenda seleccionada: 0=Remeras, 1=Camisas, 2=Pantalones, 3=Faldas, 4=Vestidos, 5=Abrigos, 6=Calzado: '))
prendaseleccionada1 = prendas[tipoPrenda1]
print(prendaseleccionada1)
precio1 = float(input('Ingrese precio: $'))
precioinicial1 = precio1
precioSinPromo = precioSinPromo + precio1
print("La prenda: ", tipoPrenda1,"participa de del plan SuperPuntos? s/n")
valor1 = input()
v1 = None
if(valor1 == "s"):
v1 = 's'
valor1 = precio1
superPuntos = superPuntos + precio1
else:
if(valor1 == "n"):
v1 = "n"
valor1 = 0
# ARTICULO 2
tipoPrenda2 = int(input('Ingrese Codigo de la prenda seleccionada: 0=Remeras, 1=Camisas, 2=Pantalones, 3=Faldas, 4=Vestidos, 5=Abrigos, 6=Calzado: '))
prendaseleccionada2 = prendas[tipoPrenda2]
print(prendaseleccionada2)
precio2 = float(input('Ingrese precio: $'))
precioinicial2 = precio2
precioSinPromo = precioSinPromo + precio2
print("La prenda: ", tipoPrenda2, "participa de del plan SuperPuntos? s/n")
valor2 = input()
v2 = None
if (valor2 == "s"):
v2 = "s"
valor2 = precio2
superPuntos = superPuntos + precio2
else:
if (valor2 == "n"):
v2 = "n"
valor2 = 0
# ARTICULO 3
tipoPrenda3 = int(input('Ingrese Codigo de la prenda seleccionada: 0=Remeras, 1=Camisas, 2=Pantalones, 3=Faldas, 4=Vestidos, 5=Abrigos, 6=Calzado: '))
prendaseleccionada3 = prendas[tipoPrenda3]
print(prendaseleccionada3)
precio3 = float(input('Ingrese precio: $'))
precioinicial3 = precio3
precioSinPromo = precioSinPromo + precio3
print("La prenda: ", tipoPrenda3, "participa de del plan SuperPuntos? s/n")
valor3 = input()
v3 = None
if (valor3 == "s"):
v3 = "s"
valor3 = precio3
superPuntos = superPuntos + precio3
else:
if (valor3 == "n"):
v3 = "n"
valor3 = 0
#PROMO 3X2
if tipoPrenda1 == tipoPrenda2 == tipoPrenda3:
if precio1 < precio2 and precio1 < precio3:
precio1 = 0
else:
if precio2 < precio3:
precio2 = 0
else:
precio3 = 0
#PROMO 50%
if tipoPrenda1 == tipoPrenda2 and tipoPrenda1 != tipoPrenda3:
if precio1 > precio2:
precio1 = precio1 / 2
else:
precio2 = precio2 / 2
if tipoPrenda1 == tipoPrenda3 and tipoPrenda1 != tipoPrenda2:
if precio1 > precio3:
precio1 = precio1 / 2
else:
precio3 = precio3 / 2
if tipoPrenda2 == tipoPrenda3 and tipoPrenda2 != tipoPrenda1:
if precio2 > precio3:
precio2 = precio2 / 2
else:
precio3 = precio3 / 2
precioTotal = precio1 + precio2 + precio3
ahorro = precioSinPromo - precioTotal
#FORMA DE PAGO
formaDePago = int(input("Ingrese la forma de pago:/ 1=Contado/ 2=Tarjeta"))
montoAPagar = 0
if formaDePago == 1:
formaDePago = "Contado (%10 de Descuento)"
montoAPagar=precioTotal/100*90
else:
if(formaDePago == 2):
cuotas=int(input("ingrese en cuantas cuotas desea pagar:"))
if(cuotas <= 3):
formaDePago="Tarjeta (%2 de Recarga) cantidad de cuotas:", cuotas
montoAPagar=precioTotal/100*102
else:
if(cuotas > 3):
formaDePago="Tarjeta (%5 de Recarga) cantidad de cuotas:", cuotas
montoAPagar=precioTotal/100*105
else:
if(cuotas <= 0):
formaDePago="Contado (%10 de Descuento)"
montoAPagar=precioTotal/100*90
if valor1 > 0 and valor2 > 0 and valor3 > 0:
superPuntos = superPuntos * 2
print("----------------------------------------------------")
print("Tienda Elegancia")
print("Tipo, Precio, SuperPuntos")
print(prendaseleccionada1 , precioinicial1, v1)
print(prendaseleccionada2 , precioinicial2 , v2)
print(prendaseleccionada3 , precioinicial3 , v3)
print("Total sin promo: ", precioSinPromo)
print("Ahorro: ", ahorro)
print("Total Con Promo: ", precioTotal)
print("Forma de Pago: ", formaDePago)
print("Monto a Pagar: ", montoAPagar)
print("Usted obtiene: ", superPuntos, "SuperPuntos")
print("----------------------------------------------------")
|
normal
|
{
"blob_id": "333d237dd4a203fcfde3668901d725f16fbc402e",
"index": 1684,
"step-1": "<mask token>\n",
"step-2": "print('-' * 100)\nprint('BIENVENIDOS A TIENDA ELEGANCIA')\nprint('-' * 100)\n<mask token>\nprint(prendaseleccionada1)\n<mask token>\nprint('La prenda: ', tipoPrenda1, 'participa de del plan SuperPuntos? s/n')\n<mask token>\nif valor1 == 's':\n v1 = 's'\n valor1 = precio1\n superPuntos = superPuntos + precio1\nelif valor1 == 'n':\n v1 = 'n'\n valor1 = 0\n<mask token>\nprint(prendaseleccionada2)\n<mask token>\nprint('La prenda: ', tipoPrenda2, 'participa de del plan SuperPuntos? s/n')\n<mask token>\nif valor2 == 's':\n v2 = 's'\n valor2 = precio2\n superPuntos = superPuntos + precio2\nelif valor2 == 'n':\n v2 = 'n'\n valor2 = 0\n<mask token>\nprint(prendaseleccionada3)\n<mask token>\nprint('La prenda: ', tipoPrenda3, 'participa de del plan SuperPuntos? s/n')\n<mask token>\nif valor3 == 's':\n v3 = 's'\n valor3 = precio3\n superPuntos = superPuntos + precio3\nelif valor3 == 'n':\n v3 = 'n'\n valor3 = 0\nif tipoPrenda1 == tipoPrenda2 == tipoPrenda3:\n if precio1 < precio2 and precio1 < precio3:\n precio1 = 0\n elif precio2 < precio3:\n precio2 = 0\n else:\n precio3 = 0\nif tipoPrenda1 == tipoPrenda2 and tipoPrenda1 != tipoPrenda3:\n if precio1 > precio2:\n precio1 = precio1 / 2\n else:\n precio2 = precio2 / 2\nif tipoPrenda1 == tipoPrenda3 and tipoPrenda1 != tipoPrenda2:\n if precio1 > precio3:\n precio1 = precio1 / 2\n else:\n precio3 = precio3 / 2\nif tipoPrenda2 == tipoPrenda3 and tipoPrenda2 != tipoPrenda1:\n if precio2 > precio3:\n precio2 = precio2 / 2\n else:\n precio3 = precio3 / 2\n<mask token>\nif formaDePago == 1:\n formaDePago = 'Contado (%10 de Descuento)'\n montoAPagar = precioTotal / 100 * 90\nelif formaDePago == 2:\n cuotas = int(input('ingrese en cuantas cuotas desea pagar:'))\n if cuotas <= 3:\n formaDePago = 'Tarjeta (%2 de Recarga) cantidad de cuotas:', cuotas\n montoAPagar = precioTotal / 100 * 102\n elif cuotas > 3:\n formaDePago = 'Tarjeta (%5 de Recarga) cantidad de cuotas:', cuotas\n montoAPagar = precioTotal / 100 * 105\n elif cuotas <= 0:\n formaDePago = 'Contado (%10 de Descuento)'\n montoAPagar = precioTotal / 100 * 90\nif valor1 > 0 and valor2 > 0 and valor3 > 0:\n superPuntos = superPuntos * 2\nprint('----------------------------------------------------')\nprint('Tienda Elegancia')\nprint('Tipo, Precio, SuperPuntos')\nprint(prendaseleccionada1, precioinicial1, v1)\nprint(prendaseleccionada2, precioinicial2, v2)\nprint(prendaseleccionada3, precioinicial3, v3)\nprint('Total sin promo: ', precioSinPromo)\nprint('Ahorro: ', ahorro)\nprint('Total Con Promo: ', precioTotal)\nprint('Forma de Pago: ', formaDePago)\nprint('Monto a Pagar: ', montoAPagar)\nprint('Usted obtiene: ', superPuntos, 'SuperPuntos')\nprint('----------------------------------------------------')\n",
"step-3": "print('-' * 100)\nprint('BIENVENIDOS A TIENDA ELEGANCIA')\nprint('-' * 100)\nprendas = ('Remeras', 'Camisas', 'Pantalones', 'Faldas', 'Vestidos',\n 'Abrigos', 'Calzado')\nprecioSinPromo = 0\nsuperPuntos = 0\ntipoPrenda1 = int(input(\n 'Ingrese Codigo de la prenda seleccionada: 0=Remeras, 1=Camisas, 2=Pantalones, 3=Faldas, 4=Vestidos, 5=Abrigos, 6=Calzado: '\n ))\nprendaseleccionada1 = prendas[tipoPrenda1]\nprint(prendaseleccionada1)\nprecio1 = float(input('Ingrese precio: $'))\nprecioinicial1 = precio1\nprecioSinPromo = precioSinPromo + precio1\nprint('La prenda: ', tipoPrenda1, 'participa de del plan SuperPuntos? s/n')\nvalor1 = input()\nv1 = None\nif valor1 == 's':\n v1 = 's'\n valor1 = precio1\n superPuntos = superPuntos + precio1\nelif valor1 == 'n':\n v1 = 'n'\n valor1 = 0\ntipoPrenda2 = int(input(\n 'Ingrese Codigo de la prenda seleccionada: 0=Remeras, 1=Camisas, 2=Pantalones, 3=Faldas, 4=Vestidos, 5=Abrigos, 6=Calzado: '\n ))\nprendaseleccionada2 = prendas[tipoPrenda2]\nprint(prendaseleccionada2)\nprecio2 = float(input('Ingrese precio: $'))\nprecioinicial2 = precio2\nprecioSinPromo = precioSinPromo + precio2\nprint('La prenda: ', tipoPrenda2, 'participa de del plan SuperPuntos? s/n')\nvalor2 = input()\nv2 = None\nif valor2 == 's':\n v2 = 's'\n valor2 = precio2\n superPuntos = superPuntos + precio2\nelif valor2 == 'n':\n v2 = 'n'\n valor2 = 0\ntipoPrenda3 = int(input(\n 'Ingrese Codigo de la prenda seleccionada: 0=Remeras, 1=Camisas, 2=Pantalones, 3=Faldas, 4=Vestidos, 5=Abrigos, 6=Calzado: '\n ))\nprendaseleccionada3 = prendas[tipoPrenda3]\nprint(prendaseleccionada3)\nprecio3 = float(input('Ingrese precio: $'))\nprecioinicial3 = precio3\nprecioSinPromo = precioSinPromo + precio3\nprint('La prenda: ', tipoPrenda3, 'participa de del plan SuperPuntos? s/n')\nvalor3 = input()\nv3 = None\nif valor3 == 's':\n v3 = 's'\n valor3 = precio3\n superPuntos = superPuntos + precio3\nelif valor3 == 'n':\n v3 = 'n'\n valor3 = 0\nif tipoPrenda1 == tipoPrenda2 == tipoPrenda3:\n if precio1 < precio2 and precio1 < precio3:\n precio1 = 0\n elif precio2 < precio3:\n precio2 = 0\n else:\n precio3 = 0\nif tipoPrenda1 == tipoPrenda2 and tipoPrenda1 != tipoPrenda3:\n if precio1 > precio2:\n precio1 = precio1 / 2\n else:\n precio2 = precio2 / 2\nif tipoPrenda1 == tipoPrenda3 and tipoPrenda1 != tipoPrenda2:\n if precio1 > precio3:\n precio1 = precio1 / 2\n else:\n precio3 = precio3 / 2\nif tipoPrenda2 == tipoPrenda3 and tipoPrenda2 != tipoPrenda1:\n if precio2 > precio3:\n precio2 = precio2 / 2\n else:\n precio3 = precio3 / 2\nprecioTotal = precio1 + precio2 + precio3\nahorro = precioSinPromo - precioTotal\nformaDePago = int(input('Ingrese la forma de pago:/ 1=Contado/ 2=Tarjeta'))\nmontoAPagar = 0\nif formaDePago == 1:\n formaDePago = 'Contado (%10 de Descuento)'\n montoAPagar = precioTotal / 100 * 90\nelif formaDePago == 2:\n cuotas = int(input('ingrese en cuantas cuotas desea pagar:'))\n if cuotas <= 3:\n formaDePago = 'Tarjeta (%2 de Recarga) cantidad de cuotas:', cuotas\n montoAPagar = precioTotal / 100 * 102\n elif cuotas > 3:\n formaDePago = 'Tarjeta (%5 de Recarga) cantidad de cuotas:', cuotas\n montoAPagar = precioTotal / 100 * 105\n elif cuotas <= 0:\n formaDePago = 'Contado (%10 de Descuento)'\n montoAPagar = precioTotal / 100 * 90\nif valor1 > 0 and valor2 > 0 and valor3 > 0:\n superPuntos = superPuntos * 2\nprint('----------------------------------------------------')\nprint('Tienda Elegancia')\nprint('Tipo, Precio, SuperPuntos')\nprint(prendaseleccionada1, precioinicial1, v1)\nprint(prendaseleccionada2, precioinicial2, v2)\nprint(prendaseleccionada3, precioinicial3, v3)\nprint('Total sin promo: ', precioSinPromo)\nprint('Ahorro: ', ahorro)\nprint('Total Con Promo: ', precioTotal)\nprint('Forma de Pago: ', formaDePago)\nprint('Monto a Pagar: ', montoAPagar)\nprint('Usted obtiene: ', superPuntos, 'SuperPuntos')\nprint('----------------------------------------------------')\n",
"step-4": "print('-'*100)\nprint('BIENVENIDOS A TIENDA ELEGANCIA')\nprint('-'*100)\n\nprendas = ('Remeras', 'Camisas', 'Pantalones', 'Faldas', 'Vestidos', 'Abrigos', 'Calzado')\n\nprecioSinPromo = 0\nsuperPuntos = 0\n\n#ARTICULO 1\ntipoPrenda1 = int(input('Ingrese Codigo de la prenda seleccionada: 0=Remeras, 1=Camisas, 2=Pantalones, 3=Faldas, 4=Vestidos, 5=Abrigos, 6=Calzado: '))\nprendaseleccionada1 = prendas[tipoPrenda1]\nprint(prendaseleccionada1)\nprecio1 = float(input('Ingrese precio: $'))\nprecioinicial1 = precio1\nprecioSinPromo = precioSinPromo + precio1\n\nprint(\"La prenda: \", tipoPrenda1,\"participa de del plan SuperPuntos? s/n\")\nvalor1 = input()\nv1 = None\nif(valor1 == \"s\"):\n v1 = 's'\n valor1 = precio1\n superPuntos = superPuntos + precio1\nelse:\n if(valor1 == \"n\"):\n v1 = \"n\"\n valor1 = 0\n\n# ARTICULO 2\ntipoPrenda2 = int(input('Ingrese Codigo de la prenda seleccionada: 0=Remeras, 1=Camisas, 2=Pantalones, 3=Faldas, 4=Vestidos, 5=Abrigos, 6=Calzado: '))\nprendaseleccionada2 = prendas[tipoPrenda2]\nprint(prendaseleccionada2)\nprecio2 = float(input('Ingrese precio: $'))\nprecioinicial2 = precio2\nprecioSinPromo = precioSinPromo + precio2\n\nprint(\"La prenda: \", tipoPrenda2, \"participa de del plan SuperPuntos? s/n\")\nvalor2 = input()\nv2 = None\nif (valor2 == \"s\"):\n v2 = \"s\"\n valor2 = precio2\n superPuntos = superPuntos + precio2\nelse:\n if (valor2 == \"n\"):\n v2 = \"n\"\n valor2 = 0\n\n# ARTICULO 3\ntipoPrenda3 = int(input('Ingrese Codigo de la prenda seleccionada: 0=Remeras, 1=Camisas, 2=Pantalones, 3=Faldas, 4=Vestidos, 5=Abrigos, 6=Calzado: '))\nprendaseleccionada3 = prendas[tipoPrenda3]\nprint(prendaseleccionada3)\nprecio3 = float(input('Ingrese precio: $'))\nprecioinicial3 = precio3\nprecioSinPromo = precioSinPromo + precio3\n\nprint(\"La prenda: \", tipoPrenda3, \"participa de del plan SuperPuntos? s/n\")\nvalor3 = input()\nv3 = None\nif (valor3 == \"s\"):\n v3 = \"s\"\n valor3 = precio3\n superPuntos = superPuntos + precio3\nelse:\n if (valor3 == \"n\"):\n v3 = \"n\"\n valor3 = 0\n\n#PROMO 3X2\nif tipoPrenda1 == tipoPrenda2 == tipoPrenda3:\n if precio1 < precio2 and precio1 < precio3:\n precio1 = 0\n else:\n if precio2 < precio3:\n precio2 = 0\n else:\n precio3 = 0\n\n#PROMO 50%\nif tipoPrenda1 == tipoPrenda2 and tipoPrenda1 != tipoPrenda3:\n if precio1 > precio2:\n precio1 = precio1 / 2\n else:\n precio2 = precio2 / 2\n\nif tipoPrenda1 == tipoPrenda3 and tipoPrenda1 != tipoPrenda2:\n if precio1 > precio3:\n precio1 = precio1 / 2\n else:\n precio3 = precio3 / 2\n\nif tipoPrenda2 == tipoPrenda3 and tipoPrenda2 != tipoPrenda1:\n if precio2 > precio3:\n precio2 = precio2 / 2\n else:\n precio3 = precio3 / 2\n\nprecioTotal = precio1 + precio2 + precio3\nahorro = precioSinPromo - precioTotal\n\n#FORMA DE PAGO\nformaDePago = int(input(\"Ingrese la forma de pago:/ 1=Contado/ 2=Tarjeta\"))\nmontoAPagar = 0\n\nif formaDePago == 1:\n formaDePago = \"Contado (%10 de Descuento)\"\n montoAPagar=precioTotal/100*90\nelse:\n if(formaDePago == 2):\n cuotas=int(input(\"ingrese en cuantas cuotas desea pagar:\"))\n if(cuotas <= 3):\n formaDePago=\"Tarjeta (%2 de Recarga) cantidad de cuotas:\", cuotas\n montoAPagar=precioTotal/100*102\n else:\n if(cuotas > 3):\n formaDePago=\"Tarjeta (%5 de Recarga) cantidad de cuotas:\", cuotas\n montoAPagar=precioTotal/100*105\n else:\n if(cuotas <= 0):\n formaDePago=\"Contado (%10 de Descuento)\"\n montoAPagar=precioTotal/100*90\n\nif valor1 > 0 and valor2 > 0 and valor3 > 0:\n superPuntos = superPuntos * 2\n\nprint(\"----------------------------------------------------\")\nprint(\"Tienda Elegancia\")\nprint(\"Tipo, Precio, SuperPuntos\")\nprint(prendaseleccionada1 , precioinicial1, v1)\nprint(prendaseleccionada2 , precioinicial2 , v2)\nprint(prendaseleccionada3 , precioinicial3 , v3)\nprint(\"Total sin promo: \", precioSinPromo)\nprint(\"Ahorro: \", ahorro)\nprint(\"Total Con Promo: \", precioTotal)\nprint(\"Forma de Pago: \", formaDePago)\nprint(\"Monto a Pagar: \", montoAPagar)\nprint(\"Usted obtiene: \", superPuntos, \"SuperPuntos\")\nprint(\"----------------------------------------------------\")",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
sys.path.append(jsondb_dir)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
root_dir = os.path.dirname(os.path.dirname(__file__))
jsondb_dir = os.path.join(root_dir, 'jsondb')
sys.path.append(jsondb_dir)
<|reserved_special_token_1|>
import sys
import os.path
root_dir = os.path.dirname(os.path.dirname(__file__))
jsondb_dir = os.path.join(root_dir, 'jsondb')
sys.path.append(jsondb_dir)
|
flexible
|
{
"blob_id": "eeb588a162fa222c0f70eb832a0026d0d8adbe9b",
"index": 6769,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nsys.path.append(jsondb_dir)\n",
"step-3": "<mask token>\nroot_dir = os.path.dirname(os.path.dirname(__file__))\njsondb_dir = os.path.join(root_dir, 'jsondb')\nsys.path.append(jsondb_dir)\n",
"step-4": "import sys\nimport os.path\nroot_dir = os.path.dirname(os.path.dirname(__file__))\njsondb_dir = os.path.join(root_dir, 'jsondb')\nsys.path.append(jsondb_dir)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class BBoxSelector:
def __init__(self, bbox, zoom=8, resolution=10):
center = (bbox.min_y + bbox.max_y) / 2, (bbox.min_x + bbox.max_x) / 2
self.map = Map(center=center, zoom=zoom, scroll_wheel_zoom=True)
self.resolution = resolution
control = DrawControl()
control.rectangle = {'shapeOptions': {'fillColor': '#fabd14',
'color': '#fa6814', 'fillOpacity': 0.2}}
control.polyline = {}
control.circle = {}
control.circlemarker = {}
control.polygon = {}
control.edit = False
control.remove = False
control.on_draw(self._handle_draw)
self.map.add_control(control)
self.bbox = None
self.size = None
self.rectangle = None
self.add_rectangle(bbox.min_x, bbox.min_y, bbox.max_x, bbox.max_y)
def add_rectangle(self, min_x, min_y, max_x, max_y):
if self.rectangle:
self.map.remove_layer(self.rectangle)
self.rectangle = Rectangle(bounds=((min_y, min_x), (max_y, max_x)),
color='#fa6814', fill=True, fill_color='#fabd14', fill_opacity=
0.2, weight=1)
self.map.add_layer(self.rectangle)
self.bbox = BBox(((min_x, min_y), (max_x, max_y)), CRS.WGS84
).transform(CRS.POP_WEB)
size_x = abs(int((self.bbox.max_x - self.bbox.min_x) / self.resolution)
)
size_y = abs(int((self.bbox.max_y - self.bbox.min_y) / self.resolution)
)
self.size = size_x, size_y
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class BBoxSelector:
def __init__(self, bbox, zoom=8, resolution=10):
center = (bbox.min_y + bbox.max_y) / 2, (bbox.min_x + bbox.max_x) / 2
self.map = Map(center=center, zoom=zoom, scroll_wheel_zoom=True)
self.resolution = resolution
control = DrawControl()
control.rectangle = {'shapeOptions': {'fillColor': '#fabd14',
'color': '#fa6814', 'fillOpacity': 0.2}}
control.polyline = {}
control.circle = {}
control.circlemarker = {}
control.polygon = {}
control.edit = False
control.remove = False
control.on_draw(self._handle_draw)
self.map.add_control(control)
self.bbox = None
self.size = None
self.rectangle = None
self.add_rectangle(bbox.min_x, bbox.min_y, bbox.max_x, bbox.max_y)
def add_rectangle(self, min_x, min_y, max_x, max_y):
if self.rectangle:
self.map.remove_layer(self.rectangle)
self.rectangle = Rectangle(bounds=((min_y, min_x), (max_y, max_x)),
color='#fa6814', fill=True, fill_color='#fabd14', fill_opacity=
0.2, weight=1)
self.map.add_layer(self.rectangle)
self.bbox = BBox(((min_x, min_y), (max_x, max_y)), CRS.WGS84
).transform(CRS.POP_WEB)
size_x = abs(int((self.bbox.max_x - self.bbox.min_x) / self.resolution)
)
size_y = abs(int((self.bbox.max_y - self.bbox.min_y) / self.resolution)
)
self.size = size_x, size_y
<|reserved_special_token_0|>
def show(self):
return self.map
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class BBoxSelector:
def __init__(self, bbox, zoom=8, resolution=10):
center = (bbox.min_y + bbox.max_y) / 2, (bbox.min_x + bbox.max_x) / 2
self.map = Map(center=center, zoom=zoom, scroll_wheel_zoom=True)
self.resolution = resolution
control = DrawControl()
control.rectangle = {'shapeOptions': {'fillColor': '#fabd14',
'color': '#fa6814', 'fillOpacity': 0.2}}
control.polyline = {}
control.circle = {}
control.circlemarker = {}
control.polygon = {}
control.edit = False
control.remove = False
control.on_draw(self._handle_draw)
self.map.add_control(control)
self.bbox = None
self.size = None
self.rectangle = None
self.add_rectangle(bbox.min_x, bbox.min_y, bbox.max_x, bbox.max_y)
def add_rectangle(self, min_x, min_y, max_x, max_y):
if self.rectangle:
self.map.remove_layer(self.rectangle)
self.rectangle = Rectangle(bounds=((min_y, min_x), (max_y, max_x)),
color='#fa6814', fill=True, fill_color='#fabd14', fill_opacity=
0.2, weight=1)
self.map.add_layer(self.rectangle)
self.bbox = BBox(((min_x, min_y), (max_x, max_y)), CRS.WGS84
).transform(CRS.POP_WEB)
size_x = abs(int((self.bbox.max_x - self.bbox.min_x) / self.resolution)
)
size_y = abs(int((self.bbox.max_y - self.bbox.min_y) / self.resolution)
)
self.size = size_x, size_y
def _handle_draw(self, control, action, geo_json):
control.clear_rectangles()
bbox_geom = geo_json['geometry']['coordinates'][0]
min_x, min_y = bbox_geom[0]
max_x, max_y = bbox_geom[2]
self.add_rectangle(min_x, min_y, max_x, max_y)
def show(self):
return self.map
<|reserved_special_token_1|>
from ipyleaflet import Map, DrawControl, Marker, Rectangle
from sentinelhub import BBox, CRS
from ipywidgets import widgets as w
class BBoxSelector:
def __init__(self, bbox, zoom=8, resolution=10):
center = (bbox.min_y + bbox.max_y) / 2, (bbox.min_x + bbox.max_x) / 2
self.map = Map(center=center, zoom=zoom, scroll_wheel_zoom=True)
self.resolution = resolution
control = DrawControl()
control.rectangle = {'shapeOptions': {'fillColor': '#fabd14',
'color': '#fa6814', 'fillOpacity': 0.2}}
control.polyline = {}
control.circle = {}
control.circlemarker = {}
control.polygon = {}
control.edit = False
control.remove = False
control.on_draw(self._handle_draw)
self.map.add_control(control)
self.bbox = None
self.size = None
self.rectangle = None
self.add_rectangle(bbox.min_x, bbox.min_y, bbox.max_x, bbox.max_y)
def add_rectangle(self, min_x, min_y, max_x, max_y):
if self.rectangle:
self.map.remove_layer(self.rectangle)
self.rectangle = Rectangle(bounds=((min_y, min_x), (max_y, max_x)),
color='#fa6814', fill=True, fill_color='#fabd14', fill_opacity=
0.2, weight=1)
self.map.add_layer(self.rectangle)
self.bbox = BBox(((min_x, min_y), (max_x, max_y)), CRS.WGS84
).transform(CRS.POP_WEB)
size_x = abs(int((self.bbox.max_x - self.bbox.min_x) / self.resolution)
)
size_y = abs(int((self.bbox.max_y - self.bbox.min_y) / self.resolution)
)
self.size = size_x, size_y
def _handle_draw(self, control, action, geo_json):
control.clear_rectangles()
bbox_geom = geo_json['geometry']['coordinates'][0]
min_x, min_y = bbox_geom[0]
max_x, max_y = bbox_geom[2]
self.add_rectangle(min_x, min_y, max_x, max_y)
def show(self):
return self.map
<|reserved_special_token_1|>
from ipyleaflet import Map, DrawControl, Marker, Rectangle
from sentinelhub import BBox, CRS
from ipywidgets import widgets as w
class BBoxSelector:
def __init__(self, bbox, zoom=8, resolution=10):
center = (bbox.min_y + bbox.max_y) / 2, (bbox.min_x + bbox.max_x) / 2
self.map = Map(center=center, zoom=zoom, scroll_wheel_zoom=True)
self.resolution = resolution
control = DrawControl()
control.rectangle = {
"shapeOptions": {
"fillColor": "#fabd14",
"color": "#fa6814",
"fillOpacity": 0.2
}
}
#Disable the rest of draw options
control.polyline = {}
control.circle = {}
control.circlemarker = {}
control.polygon = {}
control.edit = False
control.remove = False
control.on_draw(self._handle_draw)
self.map.add_control(control)
self.bbox = None
self.size = None
self.rectangle = None
self.add_rectangle(bbox.min_x, bbox.min_y, bbox.max_x, bbox.max_y)
# self.out = w.Output(layout=w.Layout(width='100%', height='50px', overflow_y='scroll'))
# self.vbox = w.VBox([self.map, self.out])
def add_rectangle(self, min_x, min_y, max_x, max_y):
if self.rectangle:
self.map.remove_layer(self.rectangle)
self.rectangle = Rectangle(
bounds=((min_y, min_x), (max_y, max_x)),
color="#fa6814",
fill=True,
fill_color="#fabd14",
fill_opacity=0.2,
weight=1
)
self.map.add_layer(self.rectangle)
self.bbox = BBox(((min_x, min_y), (max_x, max_y)), CRS.WGS84).transform(CRS.POP_WEB)
# self.out.append_display_data((min_x, min_y, max_x, max_y))
size_x = abs(int((self.bbox.max_x - self.bbox.min_x) / self.resolution))
size_y = abs(int((self.bbox.max_y - self.bbox.min_y) / self.resolution))
self.size = size_x, size_y
def _handle_draw(self, control, action, geo_json):
control.clear_rectangles()
bbox_geom = geo_json['geometry']['coordinates'][0]
min_x, min_y = bbox_geom[0]
max_x, max_y = bbox_geom[2]
self.add_rectangle(min_x, min_y, max_x, max_y)
def show(self):
return self.map
# return self.vbox
|
flexible
|
{
"blob_id": "0545aff80e19e47cb9e5b1941e92ff5cb109f9e6",
"index": 1921,
"step-1": "<mask token>\n\n\nclass BBoxSelector:\n\n def __init__(self, bbox, zoom=8, resolution=10):\n center = (bbox.min_y + bbox.max_y) / 2, (bbox.min_x + bbox.max_x) / 2\n self.map = Map(center=center, zoom=zoom, scroll_wheel_zoom=True)\n self.resolution = resolution\n control = DrawControl()\n control.rectangle = {'shapeOptions': {'fillColor': '#fabd14',\n 'color': '#fa6814', 'fillOpacity': 0.2}}\n control.polyline = {}\n control.circle = {}\n control.circlemarker = {}\n control.polygon = {}\n control.edit = False\n control.remove = False\n control.on_draw(self._handle_draw)\n self.map.add_control(control)\n self.bbox = None\n self.size = None\n self.rectangle = None\n self.add_rectangle(bbox.min_x, bbox.min_y, bbox.max_x, bbox.max_y)\n\n def add_rectangle(self, min_x, min_y, max_x, max_y):\n if self.rectangle:\n self.map.remove_layer(self.rectangle)\n self.rectangle = Rectangle(bounds=((min_y, min_x), (max_y, max_x)),\n color='#fa6814', fill=True, fill_color='#fabd14', fill_opacity=\n 0.2, weight=1)\n self.map.add_layer(self.rectangle)\n self.bbox = BBox(((min_x, min_y), (max_x, max_y)), CRS.WGS84\n ).transform(CRS.POP_WEB)\n size_x = abs(int((self.bbox.max_x - self.bbox.min_x) / self.resolution)\n )\n size_y = abs(int((self.bbox.max_y - self.bbox.min_y) / self.resolution)\n )\n self.size = size_x, size_y\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass BBoxSelector:\n\n def __init__(self, bbox, zoom=8, resolution=10):\n center = (bbox.min_y + bbox.max_y) / 2, (bbox.min_x + bbox.max_x) / 2\n self.map = Map(center=center, zoom=zoom, scroll_wheel_zoom=True)\n self.resolution = resolution\n control = DrawControl()\n control.rectangle = {'shapeOptions': {'fillColor': '#fabd14',\n 'color': '#fa6814', 'fillOpacity': 0.2}}\n control.polyline = {}\n control.circle = {}\n control.circlemarker = {}\n control.polygon = {}\n control.edit = False\n control.remove = False\n control.on_draw(self._handle_draw)\n self.map.add_control(control)\n self.bbox = None\n self.size = None\n self.rectangle = None\n self.add_rectangle(bbox.min_x, bbox.min_y, bbox.max_x, bbox.max_y)\n\n def add_rectangle(self, min_x, min_y, max_x, max_y):\n if self.rectangle:\n self.map.remove_layer(self.rectangle)\n self.rectangle = Rectangle(bounds=((min_y, min_x), (max_y, max_x)),\n color='#fa6814', fill=True, fill_color='#fabd14', fill_opacity=\n 0.2, weight=1)\n self.map.add_layer(self.rectangle)\n self.bbox = BBox(((min_x, min_y), (max_x, max_y)), CRS.WGS84\n ).transform(CRS.POP_WEB)\n size_x = abs(int((self.bbox.max_x - self.bbox.min_x) / self.resolution)\n )\n size_y = abs(int((self.bbox.max_y - self.bbox.min_y) / self.resolution)\n )\n self.size = size_x, size_y\n <mask token>\n\n def show(self):\n return self.map\n",
"step-3": "<mask token>\n\n\nclass BBoxSelector:\n\n def __init__(self, bbox, zoom=8, resolution=10):\n center = (bbox.min_y + bbox.max_y) / 2, (bbox.min_x + bbox.max_x) / 2\n self.map = Map(center=center, zoom=zoom, scroll_wheel_zoom=True)\n self.resolution = resolution\n control = DrawControl()\n control.rectangle = {'shapeOptions': {'fillColor': '#fabd14',\n 'color': '#fa6814', 'fillOpacity': 0.2}}\n control.polyline = {}\n control.circle = {}\n control.circlemarker = {}\n control.polygon = {}\n control.edit = False\n control.remove = False\n control.on_draw(self._handle_draw)\n self.map.add_control(control)\n self.bbox = None\n self.size = None\n self.rectangle = None\n self.add_rectangle(bbox.min_x, bbox.min_y, bbox.max_x, bbox.max_y)\n\n def add_rectangle(self, min_x, min_y, max_x, max_y):\n if self.rectangle:\n self.map.remove_layer(self.rectangle)\n self.rectangle = Rectangle(bounds=((min_y, min_x), (max_y, max_x)),\n color='#fa6814', fill=True, fill_color='#fabd14', fill_opacity=\n 0.2, weight=1)\n self.map.add_layer(self.rectangle)\n self.bbox = BBox(((min_x, min_y), (max_x, max_y)), CRS.WGS84\n ).transform(CRS.POP_WEB)\n size_x = abs(int((self.bbox.max_x - self.bbox.min_x) / self.resolution)\n )\n size_y = abs(int((self.bbox.max_y - self.bbox.min_y) / self.resolution)\n )\n self.size = size_x, size_y\n\n def _handle_draw(self, control, action, geo_json):\n control.clear_rectangles()\n bbox_geom = geo_json['geometry']['coordinates'][0]\n min_x, min_y = bbox_geom[0]\n max_x, max_y = bbox_geom[2]\n self.add_rectangle(min_x, min_y, max_x, max_y)\n\n def show(self):\n return self.map\n",
"step-4": "from ipyleaflet import Map, DrawControl, Marker, Rectangle\nfrom sentinelhub import BBox, CRS\nfrom ipywidgets import widgets as w\n\n\nclass BBoxSelector:\n\n def __init__(self, bbox, zoom=8, resolution=10):\n center = (bbox.min_y + bbox.max_y) / 2, (bbox.min_x + bbox.max_x) / 2\n self.map = Map(center=center, zoom=zoom, scroll_wheel_zoom=True)\n self.resolution = resolution\n control = DrawControl()\n control.rectangle = {'shapeOptions': {'fillColor': '#fabd14',\n 'color': '#fa6814', 'fillOpacity': 0.2}}\n control.polyline = {}\n control.circle = {}\n control.circlemarker = {}\n control.polygon = {}\n control.edit = False\n control.remove = False\n control.on_draw(self._handle_draw)\n self.map.add_control(control)\n self.bbox = None\n self.size = None\n self.rectangle = None\n self.add_rectangle(bbox.min_x, bbox.min_y, bbox.max_x, bbox.max_y)\n\n def add_rectangle(self, min_x, min_y, max_x, max_y):\n if self.rectangle:\n self.map.remove_layer(self.rectangle)\n self.rectangle = Rectangle(bounds=((min_y, min_x), (max_y, max_x)),\n color='#fa6814', fill=True, fill_color='#fabd14', fill_opacity=\n 0.2, weight=1)\n self.map.add_layer(self.rectangle)\n self.bbox = BBox(((min_x, min_y), (max_x, max_y)), CRS.WGS84\n ).transform(CRS.POP_WEB)\n size_x = abs(int((self.bbox.max_x - self.bbox.min_x) / self.resolution)\n )\n size_y = abs(int((self.bbox.max_y - self.bbox.min_y) / self.resolution)\n )\n self.size = size_x, size_y\n\n def _handle_draw(self, control, action, geo_json):\n control.clear_rectangles()\n bbox_geom = geo_json['geometry']['coordinates'][0]\n min_x, min_y = bbox_geom[0]\n max_x, max_y = bbox_geom[2]\n self.add_rectangle(min_x, min_y, max_x, max_y)\n\n def show(self):\n return self.map\n",
"step-5": "from ipyleaflet import Map, DrawControl, Marker, Rectangle\nfrom sentinelhub import BBox, CRS\n\nfrom ipywidgets import widgets as w\n\n\nclass BBoxSelector:\n def __init__(self, bbox, zoom=8, resolution=10):\n center = (bbox.min_y + bbox.max_y) / 2, (bbox.min_x + bbox.max_x) / 2\n self.map = Map(center=center, zoom=zoom, scroll_wheel_zoom=True)\n\n self.resolution = resolution\n\n control = DrawControl()\n\n control.rectangle = {\n \"shapeOptions\": {\n \"fillColor\": \"#fabd14\",\n \"color\": \"#fa6814\",\n \"fillOpacity\": 0.2\n }\n }\n\n #Disable the rest of draw options\n control.polyline = {}\n control.circle = {}\n control.circlemarker = {}\n control.polygon = {}\n control.edit = False\n control.remove = False\n\n control.on_draw(self._handle_draw)\n\n self.map.add_control(control)\n\n self.bbox = None\n self.size = None\n self.rectangle = None\n self.add_rectangle(bbox.min_x, bbox.min_y, bbox.max_x, bbox.max_y)\n\n # self.out = w.Output(layout=w.Layout(width='100%', height='50px', overflow_y='scroll'))\n # self.vbox = w.VBox([self.map, self.out])\n\n def add_rectangle(self, min_x, min_y, max_x, max_y):\n if self.rectangle:\n self.map.remove_layer(self.rectangle)\n\n self.rectangle = Rectangle(\n bounds=((min_y, min_x), (max_y, max_x)),\n color=\"#fa6814\",\n fill=True,\n fill_color=\"#fabd14\",\n fill_opacity=0.2,\n weight=1\n )\n\n self.map.add_layer(self.rectangle)\n\n self.bbox = BBox(((min_x, min_y), (max_x, max_y)), CRS.WGS84).transform(CRS.POP_WEB)\n\n # self.out.append_display_data((min_x, min_y, max_x, max_y))\n\n size_x = abs(int((self.bbox.max_x - self.bbox.min_x) / self.resolution))\n size_y = abs(int((self.bbox.max_y - self.bbox.min_y) / self.resolution))\n\n self.size = size_x, size_y\n\n def _handle_draw(self, control, action, geo_json):\n control.clear_rectangles()\n\n bbox_geom = geo_json['geometry']['coordinates'][0]\n\n min_x, min_y = bbox_geom[0]\n max_x, max_y = bbox_geom[2]\n\n self.add_rectangle(min_x, min_y, max_x, max_y)\n\n def show(self):\n return self.map\n # return self.vbox\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
def search_way(adjacency_list, points):
use = [False for i in range(points.__len__())]
way = [0 for i in range(points.__len__())]
cost = [100000 for i in range(points.__len__())]
cost[0] = 0
checkVar = 0
test = True
while test:
min = 100000
for i in range(points.__len__()):
if (cost[i] < min) and (not use[i]):
checkVar = i
min = cost[i]
for i in range(adjacency_list[checkVar + 1].__len__()):
bestStation = adjacency_list[checkVar + 1][i].arrivalPointId - 1
bestValue = adjacency_list[checkVar + 1][i].price
if(cost[i] + bestValue < cost[bestStation]):
way[bestStation] = adjacency_list[checkVar + 1][i]
cost[bestStation] = cost[checkVar] + bestValue
use[checkVar] = True
test = False
for i in range(adjacency_list[checkVar + 1].__len__()):
if use[i] == False:
test = True
print(cost)
print(points)
return way;
|
normal
|
{
"blob_id": "1e4d21998b9f8915167166e5965b0c8c87fcf61d",
"index": 3060,
"step-1": "<mask token>\n",
"step-2": "def search_way(adjacency_list, points):\n use = [(False) for i in range(points.__len__())]\n way = [(0) for i in range(points.__len__())]\n cost = [(100000) for i in range(points.__len__())]\n cost[0] = 0\n checkVar = 0\n test = True\n while test:\n min = 100000\n for i in range(points.__len__()):\n if cost[i] < min and not use[i]:\n checkVar = i\n min = cost[i]\n for i in range(adjacency_list[checkVar + 1].__len__()):\n bestStation = adjacency_list[checkVar + 1][i].arrivalPointId - 1\n bestValue = adjacency_list[checkVar + 1][i].price\n if cost[i] + bestValue < cost[bestStation]:\n way[bestStation] = adjacency_list[checkVar + 1][i]\n cost[bestStation] = cost[checkVar] + bestValue\n use[checkVar] = True\n test = False\n for i in range(adjacency_list[checkVar + 1].__len__()):\n if use[i] == False:\n test = True\n print(cost)\n print(points)\n return way\n",
"step-3": "def search_way(adjacency_list, points):\n use = [False for i in range(points.__len__())]\n way = [0 for i in range(points.__len__())]\n cost = [100000 for i in range(points.__len__())]\n cost[0] = 0\n checkVar = 0\n test = True\n while test:\n min = 100000\n for i in range(points.__len__()):\n if (cost[i] < min) and (not use[i]):\n checkVar = i\n min = cost[i]\n for i in range(adjacency_list[checkVar + 1].__len__()):\n bestStation = adjacency_list[checkVar + 1][i].arrivalPointId - 1\n bestValue = adjacency_list[checkVar + 1][i].price\n if(cost[i] + bestValue < cost[bestStation]):\n way[bestStation] = adjacency_list[checkVar + 1][i]\n cost[bestStation] = cost[checkVar] + bestValue\n use[checkVar] = True\n test = False\n for i in range(adjacency_list[checkVar + 1].__len__()):\n if use[i] == False:\n test = True\n print(cost)\n print(points)\n return way;",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if A > B:
print(A)
elif B > A:
print(B)
else:
print(AorB)
<|reserved_special_token_1|>
A, B = map(str, input().split())
if A > B:
print(A)
elif B > A:
print(B)
else:
print(AorB)
<|reserved_special_token_1|>
A,B=map(str,input().split())
if(A>B):
print(A)
elif(B>A):
print(B)
else:
print(AorB)
|
flexible
|
{
"blob_id": "8cbe78863de535a5b83eacebe67402569b4015fa",
"index": 9189,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif A > B:\n print(A)\nelif B > A:\n print(B)\nelse:\n print(AorB)\n",
"step-3": "A, B = map(str, input().split())\nif A > B:\n print(A)\nelif B > A:\n print(B)\nelse:\n print(AorB)\n",
"step-4": "A,B=map(str,input().split())\nif(A>B):\n\tprint(A)\nelif(B>A):\n\tprint(B)\nelse:\n\tprint(AorB)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/env python2
from rpcz import compiler
PROTO = '../index_server.proto'
compiler.generate_proto(PROTO, '.')
compiler.generate_proto(
PROTO, '.',
with_plugin='python_rpcz', suffix='_rpcz.py')
|
normal
|
{
"blob_id": "e868998833774c829b05ae8da3280bed61363be1",
"index": 177,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ncompiler.generate_proto(PROTO, '.')\ncompiler.generate_proto(PROTO, '.', with_plugin='python_rpcz', suffix=\n '_rpcz.py')\n",
"step-3": "<mask token>\nPROTO = '../index_server.proto'\ncompiler.generate_proto(PROTO, '.')\ncompiler.generate_proto(PROTO, '.', with_plugin='python_rpcz', suffix=\n '_rpcz.py')\n",
"step-4": "from rpcz import compiler\nPROTO = '../index_server.proto'\ncompiler.generate_proto(PROTO, '.')\ncompiler.generate_proto(PROTO, '.', with_plugin='python_rpcz', suffix=\n '_rpcz.py')\n",
"step-5": "#!/usr/bin/env python2\n\nfrom rpcz import compiler\n\n\nPROTO = '../index_server.proto'\n\ncompiler.generate_proto(PROTO, '.')\ncompiler.generate_proto(\n PROTO, '.',\n with_plugin='python_rpcz', suffix='_rpcz.py')\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class Solution:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution:
<|reserved_special_token_0|>
def minWindow(self, source, target):
s_char_count = defaultdict(int)
t_char_count = defaultdict(int)
for char in target:
t_char_count[char] += 1
j = 0
min_substr = ''
min_length = float('inf')
for i in range(len(source)):
while j < len(source) and not self.is_contain(s_char_count,
t_char_count):
s_char_count[source[j]] += 1
j += 1
if self.is_contain(s_char_count, t_char_count):
if min_length > j - i:
min_length = j - i
min_substr = source[i:j]
s_char_count[source[i]] -= 1
return min_substr
def is_contain(self, s_char_count, t_char_count):
for char in t_char_count:
if char not in s_char_count or s_char_count[char] < t_char_count[
char]:
return False
return True
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution:
"""
@param: source : A string
@param: target: A string
@return: A string denote the minimum window, return "" if there is no such a string
"""
def minWindow(self, source, target):
s_char_count = defaultdict(int)
t_char_count = defaultdict(int)
for char in target:
t_char_count[char] += 1
j = 0
min_substr = ''
min_length = float('inf')
for i in range(len(source)):
while j < len(source) and not self.is_contain(s_char_count,
t_char_count):
s_char_count[source[j]] += 1
j += 1
if self.is_contain(s_char_count, t_char_count):
if min_length > j - i:
min_length = j - i
min_substr = source[i:j]
s_char_count[source[i]] -= 1
return min_substr
def is_contain(self, s_char_count, t_char_count):
for char in t_char_count:
if char not in s_char_count or s_char_count[char] < t_char_count[
char]:
return False
return True
<|reserved_special_token_1|>
<|reserved_special_token_0|>
from collections import defaultdict
class Solution:
"""
@param: source : A string
@param: target: A string
@return: A string denote the minimum window, return "" if there is no such a string
"""
def minWindow(self, source, target):
s_char_count = defaultdict(int)
t_char_count = defaultdict(int)
for char in target:
t_char_count[char] += 1
j = 0
min_substr = ''
min_length = float('inf')
for i in range(len(source)):
while j < len(source) and not self.is_contain(s_char_count,
t_char_count):
s_char_count[source[j]] += 1
j += 1
if self.is_contain(s_char_count, t_char_count):
if min_length > j - i:
min_length = j - i
min_substr = source[i:j]
s_char_count[source[i]] -= 1
return min_substr
def is_contain(self, s_char_count, t_char_count):
for char in t_char_count:
if char not in s_char_count or s_char_count[char] < t_char_count[
char]:
return False
return True
<|reserved_special_token_1|>
'''
Given a string S and a string T,
find the minimum window in S which will contain all the characters in T in complexity O(n).
For example,
S = "ADOBECODEBANC"
T = "ABC"
Minimum window is "BANC".
Note:
If there is no such window in S that covers all characters in T, return the empty string "".
If there are multiple such windows,
you are guaranteed that there will always be only one unique minimum window in S.
'''
from collections import defaultdict
class Solution:
"""
@param: source : A string
@param: target: A string
@return: A string denote the minimum window, return "" if there is no such a string
"""
def minWindow(self, source, target):
# create a hashmap/dictionary for target, {key: value = char: count}
s_char_count = defaultdict(int)
t_char_count = defaultdict(int)
for char in target:
t_char_count[char] += 1
j = 0
min_substr = ''
min_length = float('inf')
for i in range(len(source)):
while j < len(source) and not self.is_contain(s_char_count, t_char_count):
s_char_count[source[j]] += 1
j += 1
if self.is_contain(s_char_count, t_char_count):
if min_length > j - i:
min_length = j - i
min_substr = source[i:j]
s_char_count[source[i]] -= 1
return min_substr
def is_contain(self, s_char_count, t_char_count):
for char in t_char_count:
if char not in s_char_count or s_char_count[char] < t_char_count[char]:
return False
return True
|
flexible
|
{
"blob_id": "665a868ee71f247a621d82108e545257296e0427",
"index": 7048,
"step-1": "<mask token>\n\n\nclass Solution:\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Solution:\n <mask token>\n\n def minWindow(self, source, target):\n s_char_count = defaultdict(int)\n t_char_count = defaultdict(int)\n for char in target:\n t_char_count[char] += 1\n j = 0\n min_substr = ''\n min_length = float('inf')\n for i in range(len(source)):\n while j < len(source) and not self.is_contain(s_char_count,\n t_char_count):\n s_char_count[source[j]] += 1\n j += 1\n if self.is_contain(s_char_count, t_char_count):\n if min_length > j - i:\n min_length = j - i\n min_substr = source[i:j]\n s_char_count[source[i]] -= 1\n return min_substr\n\n def is_contain(self, s_char_count, t_char_count):\n for char in t_char_count:\n if char not in s_char_count or s_char_count[char] < t_char_count[\n char]:\n return False\n return True\n",
"step-3": "<mask token>\n\n\nclass Solution:\n \"\"\"\n @param: source : A string\n @param: target: A string\n @return: A string denote the minimum window, return \"\" if there is no such a string\n \"\"\"\n\n def minWindow(self, source, target):\n s_char_count = defaultdict(int)\n t_char_count = defaultdict(int)\n for char in target:\n t_char_count[char] += 1\n j = 0\n min_substr = ''\n min_length = float('inf')\n for i in range(len(source)):\n while j < len(source) and not self.is_contain(s_char_count,\n t_char_count):\n s_char_count[source[j]] += 1\n j += 1\n if self.is_contain(s_char_count, t_char_count):\n if min_length > j - i:\n min_length = j - i\n min_substr = source[i:j]\n s_char_count[source[i]] -= 1\n return min_substr\n\n def is_contain(self, s_char_count, t_char_count):\n for char in t_char_count:\n if char not in s_char_count or s_char_count[char] < t_char_count[\n char]:\n return False\n return True\n",
"step-4": "<mask token>\nfrom collections import defaultdict\n\n\nclass Solution:\n \"\"\"\n @param: source : A string\n @param: target: A string\n @return: A string denote the minimum window, return \"\" if there is no such a string\n \"\"\"\n\n def minWindow(self, source, target):\n s_char_count = defaultdict(int)\n t_char_count = defaultdict(int)\n for char in target:\n t_char_count[char] += 1\n j = 0\n min_substr = ''\n min_length = float('inf')\n for i in range(len(source)):\n while j < len(source) and not self.is_contain(s_char_count,\n t_char_count):\n s_char_count[source[j]] += 1\n j += 1\n if self.is_contain(s_char_count, t_char_count):\n if min_length > j - i:\n min_length = j - i\n min_substr = source[i:j]\n s_char_count[source[i]] -= 1\n return min_substr\n\n def is_contain(self, s_char_count, t_char_count):\n for char in t_char_count:\n if char not in s_char_count or s_char_count[char] < t_char_count[\n char]:\n return False\n return True\n",
"step-5": "'''\nGiven a string S and a string T,\nfind the minimum window in S which will contain all the characters in T in complexity O(n).\n\nFor example,\nS = \"ADOBECODEBANC\"\nT = \"ABC\"\nMinimum window is \"BANC\".\n\nNote:\nIf there is no such window in S that covers all characters in T, return the empty string \"\".\n\nIf there are multiple such windows,\nyou are guaranteed that there will always be only one unique minimum window in S.\n'''\nfrom collections import defaultdict\nclass Solution:\n \"\"\"\n @param: source : A string\n @param: target: A string\n @return: A string denote the minimum window, return \"\" if there is no such a string\n \"\"\"\n def minWindow(self, source, target):\n # create a hashmap/dictionary for target, {key: value = char: count}\n s_char_count = defaultdict(int)\n t_char_count = defaultdict(int)\n\n for char in target:\n t_char_count[char] += 1\n\n j = 0\n min_substr = ''\n min_length = float('inf')\n\n for i in range(len(source)):\n while j < len(source) and not self.is_contain(s_char_count, t_char_count):\n s_char_count[source[j]] += 1\n j += 1\n\n if self.is_contain(s_char_count, t_char_count):\n if min_length > j - i:\n min_length = j - i\n min_substr = source[i:j]\n s_char_count[source[i]] -= 1\n\n return min_substr\n\n def is_contain(self, s_char_count, t_char_count):\n for char in t_char_count:\n if char not in s_char_count or s_char_count[char] < t_char_count[char]:\n return False\n return True\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
import re
text = "Python is an interpreted high-level general-purpose programming language."
fiveWord = re.findall(r"\b\w{5}\b", text)
print("Following are the words with five Letters:")
for strWord in fiveWord:
print(strWord)
|
normal
|
{
"blob_id": "aa15d51760c16181907994d329fb7ceede6a539b",
"index": 5858,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('Following are the words with five Letters:')\nfor strWord in fiveWord:\n print(strWord)\n",
"step-3": "<mask token>\ntext = (\n 'Python is an interpreted high-level general-purpose programming language.'\n )\nfiveWord = re.findall('\\\\b\\\\w{5}\\\\b', text)\nprint('Following are the words with five Letters:')\nfor strWord in fiveWord:\n print(strWord)\n",
"step-4": "import re\ntext = (\n 'Python is an interpreted high-level general-purpose programming language.'\n )\nfiveWord = re.findall('\\\\b\\\\w{5}\\\\b', text)\nprint('Following are the words with five Letters:')\nfor strWord in fiveWord:\n print(strWord)\n",
"step-5": "import re\r\ntext = \"Python is an interpreted high-level general-purpose programming language.\"\r\nfiveWord = re.findall(r\"\\b\\w{5}\\b\", text)\r\nprint(\"Following are the words with five Letters:\")\r\nfor strWord in fiveWord:\r\n print(strWord)\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/env python
import sys
from static_pipeline import render
from static_pipeline.lib import argparse
if __name__ == "__main__":
""" Use argparse to decide what to do
"""
# set up arg parsing
parser = argparse.ArgumentParser(
description='render and rearrange files ' \
'(presumably for serving them as part of a website)')
subparsers = parser.add_subparsers(title='commands', dest="command")
# render command
parser_render = subparsers.add_parser('render', help='render files ' \
'with static-pipeline (based on contents of the settings file)')
parser_render.add_argument('--settings-file', default='pipeline_settings')
# parse (if no command specified, show help)
if len(sys.argv) < 2:
sys.argv.append('--help')
parsed = parser.parse_args()
if parsed.command == "render":
try:
# for whatever reason, sometimes current dir doesn't end up on the pythonpath
sys.path.append('.')
pipeline_settings = __import__(parsed.settings_file)
pipeline = pipeline_settings.PIPELINE
except ImportError:
print "No settings file: {0}.py".format(parsed.settings_file)
sys.exit(1)
except AttributeError:
print "No PIPELINE in settings file. Nothing to do."
sys.exit(0)
render(pipeline, pipeline_settings)
|
normal
|
{
"blob_id": "348676e43e4dfbbe7cd0c0527acb8c613d3d1ebc",
"index": 6301,
"step-1": "#!/usr/bin/env python\nimport sys\nfrom static_pipeline import render\nfrom static_pipeline.lib import argparse\n\nif __name__ == \"__main__\":\n \"\"\" Use argparse to decide what to do\n \"\"\"\n # set up arg parsing\n parser = argparse.ArgumentParser(\n description='render and rearrange files ' \\\n '(presumably for serving them as part of a website)')\n subparsers = parser.add_subparsers(title='commands', dest=\"command\")\n\n # render command\n parser_render = subparsers.add_parser('render', help='render files ' \\\n 'with static-pipeline (based on contents of the settings file)')\n parser_render.add_argument('--settings-file', default='pipeline_settings')\n\n # parse (if no command specified, show help)\n if len(sys.argv) < 2:\n sys.argv.append('--help')\n parsed = parser.parse_args()\n\n if parsed.command == \"render\":\n try:\n # for whatever reason, sometimes current dir doesn't end up on the pythonpath\n sys.path.append('.')\n pipeline_settings = __import__(parsed.settings_file)\n pipeline = pipeline_settings.PIPELINE\n except ImportError:\n print \"No settings file: {0}.py\".format(parsed.settings_file)\n sys.exit(1)\n except AttributeError:\n print \"No PIPELINE in settings file. Nothing to do.\"\n sys.exit(0)\n\n render(pipeline, pipeline_settings)\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
cli.run()
<|reserved_special_token_1|>
from . import cli
cli.run()
|
flexible
|
{
"blob_id": "235623c3f557dbc28fbff855a618e4d26932ca65",
"index": 7630,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ncli.run()\n",
"step-3": "from . import cli\ncli.run()\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
import os
import pubmed_parser as pp
nlpPath = "/Users/kapmayn/Desktop/nlp"
articlesFolderPath = nlpPath + "/articles"
abstractsFilePath = nlpPath + "/abstracts.txt"
articlesFileNameList = os.listdir(articlesFolderPath)
articlesFileNameList(reverse = True)
resultFile = open(abstractsFilePath, 'w')
for fileName in articlesFileNameList:
print(fileName)
dictOut = pp.parse_medline_xml(articlesFolderPath + "/" + fileName)
for item in dictOut:
resultFile.write((item['abstract'] + '\n'))
|
normal
|
{
"blob_id": "32f9b5c32acbb6411fe6ab99616d8459acfd7c74",
"index": 719,
"step-1": "<mask token>\n",
"step-2": "<mask token>\narticlesFileNameList(reverse=True)\n<mask token>\nfor fileName in articlesFileNameList:\n print(fileName)\n dictOut = pp.parse_medline_xml(articlesFolderPath + '/' + fileName)\n for item in dictOut:\n resultFile.write(item['abstract'] + '\\n')\n",
"step-3": "<mask token>\nnlpPath = '/Users/kapmayn/Desktop/nlp'\narticlesFolderPath = nlpPath + '/articles'\nabstractsFilePath = nlpPath + '/abstracts.txt'\narticlesFileNameList = os.listdir(articlesFolderPath)\narticlesFileNameList(reverse=True)\nresultFile = open(abstractsFilePath, 'w')\nfor fileName in articlesFileNameList:\n print(fileName)\n dictOut = pp.parse_medline_xml(articlesFolderPath + '/' + fileName)\n for item in dictOut:\n resultFile.write(item['abstract'] + '\\n')\n",
"step-4": "import os\nimport pubmed_parser as pp\nnlpPath = '/Users/kapmayn/Desktop/nlp'\narticlesFolderPath = nlpPath + '/articles'\nabstractsFilePath = nlpPath + '/abstracts.txt'\narticlesFileNameList = os.listdir(articlesFolderPath)\narticlesFileNameList(reverse=True)\nresultFile = open(abstractsFilePath, 'w')\nfor fileName in articlesFileNameList:\n print(fileName)\n dictOut = pp.parse_medline_xml(articlesFolderPath + '/' + fileName)\n for item in dictOut:\n resultFile.write(item['abstract'] + '\\n')\n",
"step-5": "import os\nimport pubmed_parser as pp\n\nnlpPath = \"/Users/kapmayn/Desktop/nlp\"\narticlesFolderPath = nlpPath + \"/articles\"\nabstractsFilePath = nlpPath + \"/abstracts.txt\"\n\narticlesFileNameList = os.listdir(articlesFolderPath)\narticlesFileNameList(reverse = True)\nresultFile = open(abstractsFilePath, 'w')\n\nfor fileName in articlesFileNameList:\n\tprint(fileName)\n\tdictOut = pp.parse_medline_xml(articlesFolderPath + \"/\" + fileName)\n\tfor item in dictOut:\n\t\tresultFile.write((item['abstract'] + '\\n'))",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if os.environ.get('HEROKU') is None:
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'app.db')
SECRET_KEY = os.urandom(24)
CONSUMER_KEY = '23571-333bb5dbab872eee6686bf86'
TROVE_KEY = 'E767C55D-0941-4993-BB3A-1CB81FD2B9E9'
NYTIMES_SEARCH_KEY = 'b2f1032fbec2cb261c1e153ab6b5a6b8:13:69075429'
else:
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
SECRET_KEY = os.environ['CSRF_SECRET_KEY']
CONSUMER_KEY = os.environ['POCKET_KEY']
TROVE_KEY = os.environ['TROVE_KEY']
NYTIMES_SEARCH_KEY = os.environ['NYTIMES_KEY']
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
CSRF_ENABLED = True
basedir = os.path.abspath(os.path.dirname(__file__))
if os.environ.get('HEROKU') is None:
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'app.db')
SECRET_KEY = os.urandom(24)
CONSUMER_KEY = '23571-333bb5dbab872eee6686bf86'
TROVE_KEY = 'E767C55D-0941-4993-BB3A-1CB81FD2B9E9'
NYTIMES_SEARCH_KEY = 'b2f1032fbec2cb261c1e153ab6b5a6b8:13:69075429'
else:
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
SECRET_KEY = os.environ['CSRF_SECRET_KEY']
CONSUMER_KEY = os.environ['POCKET_KEY']
TROVE_KEY = os.environ['TROVE_KEY']
NYTIMES_SEARCH_KEY = os.environ['NYTIMES_KEY']
SQLALCHEMY_MIGRATE_REPO = os.path.join(basedir, 'db_repository')
<|reserved_special_token_1|>
import os
CSRF_ENABLED = True
basedir = os.path.abspath(os.path.dirname(__file__))
if os.environ.get('HEROKU') is None:
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'app.db')
SECRET_KEY = os.urandom(24)
CONSUMER_KEY = '23571-333bb5dbab872eee6686bf86'
TROVE_KEY = 'E767C55D-0941-4993-BB3A-1CB81FD2B9E9'
NYTIMES_SEARCH_KEY = 'b2f1032fbec2cb261c1e153ab6b5a6b8:13:69075429'
else:
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
SECRET_KEY = os.environ['CSRF_SECRET_KEY']
CONSUMER_KEY = os.environ['POCKET_KEY']
TROVE_KEY = os.environ['TROVE_KEY']
NYTIMES_SEARCH_KEY = os.environ['NYTIMES_KEY']
SQLALCHEMY_MIGRATE_REPO = os.path.join(basedir, 'db_repository')
<|reserved_special_token_1|>
import os
CSRF_ENABLED = True
basedir = os.path.abspath(os.path.dirname(__file__))
# Heroku vs. Local Configs
if os.environ.get('HEROKU') is None:
# Database path
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'app.db')
# CSRF Key
SECRET_KEY = os.urandom(24)
# Pocket API
CONSUMER_KEY = '23571-333bb5dbab872eee6686bf86'
# News API Credentials
TROVE_KEY = 'E767C55D-0941-4993-BB3A-1CB81FD2B9E9'
NYTIMES_SEARCH_KEY = 'b2f1032fbec2cb261c1e153ab6b5a6b8:13:69075429'
else:
# Database path
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
# CSRF Key
SECRET_KEY = os.environ['CSRF_SECRET_KEY']
# Pocket API
CONSUMER_KEY = os.environ['POCKET_KEY']
# News API Credentials
TROVE_KEY = os.environ['TROVE_KEY']
NYTIMES_SEARCH_KEY = os.environ['NYTIMES_KEY']
# Path where we store the migration data files
SQLALCHEMY_MIGRATE_REPO = os.path.join(basedir, 'db_repository')
|
flexible
|
{
"blob_id": "0656aba517023c003e837d5ad04daeb364f7fda8",
"index": 4688,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif os.environ.get('HEROKU') is None:\n SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'app.db')\n SECRET_KEY = os.urandom(24)\n CONSUMER_KEY = '23571-333bb5dbab872eee6686bf86'\n TROVE_KEY = 'E767C55D-0941-4993-BB3A-1CB81FD2B9E9'\n NYTIMES_SEARCH_KEY = 'b2f1032fbec2cb261c1e153ab6b5a6b8:13:69075429'\nelse:\n SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']\n SECRET_KEY = os.environ['CSRF_SECRET_KEY']\n CONSUMER_KEY = os.environ['POCKET_KEY']\n TROVE_KEY = os.environ['TROVE_KEY']\n NYTIMES_SEARCH_KEY = os.environ['NYTIMES_KEY']\n<mask token>\n",
"step-3": "<mask token>\nCSRF_ENABLED = True\nbasedir = os.path.abspath(os.path.dirname(__file__))\nif os.environ.get('HEROKU') is None:\n SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'app.db')\n SECRET_KEY = os.urandom(24)\n CONSUMER_KEY = '23571-333bb5dbab872eee6686bf86'\n TROVE_KEY = 'E767C55D-0941-4993-BB3A-1CB81FD2B9E9'\n NYTIMES_SEARCH_KEY = 'b2f1032fbec2cb261c1e153ab6b5a6b8:13:69075429'\nelse:\n SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']\n SECRET_KEY = os.environ['CSRF_SECRET_KEY']\n CONSUMER_KEY = os.environ['POCKET_KEY']\n TROVE_KEY = os.environ['TROVE_KEY']\n NYTIMES_SEARCH_KEY = os.environ['NYTIMES_KEY']\nSQLALCHEMY_MIGRATE_REPO = os.path.join(basedir, 'db_repository')\n",
"step-4": "import os\nCSRF_ENABLED = True\nbasedir = os.path.abspath(os.path.dirname(__file__))\nif os.environ.get('HEROKU') is None:\n SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'app.db')\n SECRET_KEY = os.urandom(24)\n CONSUMER_KEY = '23571-333bb5dbab872eee6686bf86'\n TROVE_KEY = 'E767C55D-0941-4993-BB3A-1CB81FD2B9E9'\n NYTIMES_SEARCH_KEY = 'b2f1032fbec2cb261c1e153ab6b5a6b8:13:69075429'\nelse:\n SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']\n SECRET_KEY = os.environ['CSRF_SECRET_KEY']\n CONSUMER_KEY = os.environ['POCKET_KEY']\n TROVE_KEY = os.environ['TROVE_KEY']\n NYTIMES_SEARCH_KEY = os.environ['NYTIMES_KEY']\nSQLALCHEMY_MIGRATE_REPO = os.path.join(basedir, 'db_repository')\n",
"step-5": "import os\n\nCSRF_ENABLED = True\nbasedir = os.path.abspath(os.path.dirname(__file__))\n\n# Heroku vs. Local Configs\nif os.environ.get('HEROKU') is None:\n # Database path\n SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'app.db')\n # CSRF Key\n SECRET_KEY = os.urandom(24)\n # Pocket API\n CONSUMER_KEY = '23571-333bb5dbab872eee6686bf86'\n # News API Credentials\n TROVE_KEY = 'E767C55D-0941-4993-BB3A-1CB81FD2B9E9'\n NYTIMES_SEARCH_KEY = 'b2f1032fbec2cb261c1e153ab6b5a6b8:13:69075429'\nelse:\n # Database path\n SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']\n # CSRF Key\n SECRET_KEY = os.environ['CSRF_SECRET_KEY']\n # Pocket API\n CONSUMER_KEY = os.environ['POCKET_KEY']\n # News API Credentials\n TROVE_KEY = os.environ['TROVE_KEY']\n NYTIMES_SEARCH_KEY = os.environ['NYTIMES_KEY']\n\n# Path where we store the migration data files\nSQLALCHEMY_MIGRATE_REPO = os.path.join(basedir, 'db_repository')\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class YoutubeVerifier(base.SimpleVerifier):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def get_follower_info(self):
"""
Get follower information from youtube api subscriptions
:rtype: dict
:return: follower info
"""
payload = self._get_subscription()
snippet = payload.get('items', [{}])[0].get('snippet', {})
if 'publishedAt' in snippet:
snippet['publishedAt'] = datetime.strptime(snippet[
'publishedAt'], '%Y-%m-%dT%H:%M:%S.%fZ')
else:
snippet['publishedAt'] = datetime.now() + timedelta(days=1)
return snippet
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class YoutubeVerifier(base.SimpleVerifier):
<|reserved_special_token_0|>
def _get_subscription(self):
"""
Gets information if user is subscribed to channel
:rtype: requests.Response
:return: good game player information http api call result
"""
response = requests.get(
'https://www.googleapis.com/youtube/v3/subscriptions', params={
'part': 'snippet', 'mine': 'true', 'forChannelId': self.
channel_id}, headers=self.headers)
if response.status_code == 200:
return response.json()
return {}
def get_follower_info(self):
"""
Get follower information from youtube api subscriptions
:rtype: dict
:return: follower info
"""
payload = self._get_subscription()
snippet = payload.get('items', [{}])[0].get('snippet', {})
if 'publishedAt' in snippet:
snippet['publishedAt'] = datetime.strptime(snippet[
'publishedAt'], '%Y-%m-%dT%H:%M:%S.%fZ')
else:
snippet['publishedAt'] = datetime.now() + timedelta(days=1)
return snippet
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class YoutubeVerifier(base.SimpleVerifier):
def __init__(self, channel_id, access_token):
self.channel_id = channel_id
self.access_token = access_token
self.headers = {'Authorization': 'Bearer {}'.format(self.access_token)}
def _get_subscription(self):
"""
Gets information if user is subscribed to channel
:rtype: requests.Response
:return: good game player information http api call result
"""
response = requests.get(
'https://www.googleapis.com/youtube/v3/subscriptions', params={
'part': 'snippet', 'mine': 'true', 'forChannelId': self.
channel_id}, headers=self.headers)
if response.status_code == 200:
return response.json()
return {}
def get_follower_info(self):
"""
Get follower information from youtube api subscriptions
:rtype: dict
:return: follower info
"""
payload = self._get_subscription()
snippet = payload.get('items', [{}])[0].get('snippet', {})
if 'publishedAt' in snippet:
snippet['publishedAt'] = datetime.strptime(snippet[
'publishedAt'], '%Y-%m-%dT%H:%M:%S.%fZ')
else:
snippet['publishedAt'] = datetime.now() + timedelta(days=1)
return snippet
<|reserved_special_token_1|>
import requests
from datetime import datetime, timedelta
from . import base
class YoutubeVerifier(base.SimpleVerifier):
def __init__(self, channel_id, access_token):
self.channel_id = channel_id
self.access_token = access_token
self.headers = {'Authorization': 'Bearer {}'.format(self.access_token)}
def _get_subscription(self):
"""
Gets information if user is subscribed to channel
:rtype: requests.Response
:return: good game player information http api call result
"""
response = requests.get(
'https://www.googleapis.com/youtube/v3/subscriptions', params={
'part': 'snippet', 'mine': 'true', 'forChannelId': self.
channel_id}, headers=self.headers)
if response.status_code == 200:
return response.json()
return {}
def get_follower_info(self):
"""
Get follower information from youtube api subscriptions
:rtype: dict
:return: follower info
"""
payload = self._get_subscription()
snippet = payload.get('items', [{}])[0].get('snippet', {})
if 'publishedAt' in snippet:
snippet['publishedAt'] = datetime.strptime(snippet[
'publishedAt'], '%Y-%m-%dT%H:%M:%S.%fZ')
else:
snippet['publishedAt'] = datetime.now() + timedelta(days=1)
return snippet
<|reserved_special_token_1|>
import requests
from datetime import datetime, timedelta
from . import base
class YoutubeVerifier(base.SimpleVerifier):
def __init__(self, channel_id, access_token):
self.channel_id = channel_id
self.access_token = access_token
self.headers = {
'Authorization': 'Bearer {}'.format(self.access_token)
}
def _get_subscription(self):
"""
Gets information if user is subscribed to channel
:rtype: requests.Response
:return: good game player information http api call result
"""
response = requests.get(
'https://www.googleapis.com/youtube/v3/subscriptions',
params={
'part': 'snippet',
'mine': 'true',
'forChannelId': self.channel_id
},
headers=self.headers
)
if response.status_code == 200:
return response.json()
return {}
def get_follower_info(self):
"""
Get follower information from youtube api subscriptions
:rtype: dict
:return: follower info
"""
payload = self._get_subscription()
snippet = payload.get('items', [{}])[0].get('snippet', {})
#: re-processing publishedAt
if 'publishedAt' in snippet:
snippet['publishedAt'] = datetime.strptime(
snippet['publishedAt'],
"%Y-%m-%dT%H:%M:%S.%fZ"
)
else:
#: if nothing has been found publishedAt set to future
snippet['publishedAt'] = datetime.now() + timedelta(days=1)
return snippet
|
flexible
|
{
"blob_id": "7ba2377b7d4f8d127cfee63c856d20753da9b7c6",
"index": 4526,
"step-1": "<mask token>\n\n\nclass YoutubeVerifier(base.SimpleVerifier):\n <mask token>\n <mask token>\n\n def get_follower_info(self):\n \"\"\"\n Get follower information from youtube api subscriptions\n\n :rtype: dict\n :return: follower info\n \"\"\"\n payload = self._get_subscription()\n snippet = payload.get('items', [{}])[0].get('snippet', {})\n if 'publishedAt' in snippet:\n snippet['publishedAt'] = datetime.strptime(snippet[\n 'publishedAt'], '%Y-%m-%dT%H:%M:%S.%fZ')\n else:\n snippet['publishedAt'] = datetime.now() + timedelta(days=1)\n return snippet\n",
"step-2": "<mask token>\n\n\nclass YoutubeVerifier(base.SimpleVerifier):\n <mask token>\n\n def _get_subscription(self):\n \"\"\"\n Gets information if user is subscribed to channel\n\n :rtype: requests.Response\n :return: good game player information http api call result\n \"\"\"\n response = requests.get(\n 'https://www.googleapis.com/youtube/v3/subscriptions', params={\n 'part': 'snippet', 'mine': 'true', 'forChannelId': self.\n channel_id}, headers=self.headers)\n if response.status_code == 200:\n return response.json()\n return {}\n\n def get_follower_info(self):\n \"\"\"\n Get follower information from youtube api subscriptions\n\n :rtype: dict\n :return: follower info\n \"\"\"\n payload = self._get_subscription()\n snippet = payload.get('items', [{}])[0].get('snippet', {})\n if 'publishedAt' in snippet:\n snippet['publishedAt'] = datetime.strptime(snippet[\n 'publishedAt'], '%Y-%m-%dT%H:%M:%S.%fZ')\n else:\n snippet['publishedAt'] = datetime.now() + timedelta(days=1)\n return snippet\n",
"step-3": "<mask token>\n\n\nclass YoutubeVerifier(base.SimpleVerifier):\n\n def __init__(self, channel_id, access_token):\n self.channel_id = channel_id\n self.access_token = access_token\n self.headers = {'Authorization': 'Bearer {}'.format(self.access_token)}\n\n def _get_subscription(self):\n \"\"\"\n Gets information if user is subscribed to channel\n\n :rtype: requests.Response\n :return: good game player information http api call result\n \"\"\"\n response = requests.get(\n 'https://www.googleapis.com/youtube/v3/subscriptions', params={\n 'part': 'snippet', 'mine': 'true', 'forChannelId': self.\n channel_id}, headers=self.headers)\n if response.status_code == 200:\n return response.json()\n return {}\n\n def get_follower_info(self):\n \"\"\"\n Get follower information from youtube api subscriptions\n\n :rtype: dict\n :return: follower info\n \"\"\"\n payload = self._get_subscription()\n snippet = payload.get('items', [{}])[0].get('snippet', {})\n if 'publishedAt' in snippet:\n snippet['publishedAt'] = datetime.strptime(snippet[\n 'publishedAt'], '%Y-%m-%dT%H:%M:%S.%fZ')\n else:\n snippet['publishedAt'] = datetime.now() + timedelta(days=1)\n return snippet\n",
"step-4": "import requests\nfrom datetime import datetime, timedelta\nfrom . import base\n\n\nclass YoutubeVerifier(base.SimpleVerifier):\n\n def __init__(self, channel_id, access_token):\n self.channel_id = channel_id\n self.access_token = access_token\n self.headers = {'Authorization': 'Bearer {}'.format(self.access_token)}\n\n def _get_subscription(self):\n \"\"\"\n Gets information if user is subscribed to channel\n\n :rtype: requests.Response\n :return: good game player information http api call result\n \"\"\"\n response = requests.get(\n 'https://www.googleapis.com/youtube/v3/subscriptions', params={\n 'part': 'snippet', 'mine': 'true', 'forChannelId': self.\n channel_id}, headers=self.headers)\n if response.status_code == 200:\n return response.json()\n return {}\n\n def get_follower_info(self):\n \"\"\"\n Get follower information from youtube api subscriptions\n\n :rtype: dict\n :return: follower info\n \"\"\"\n payload = self._get_subscription()\n snippet = payload.get('items', [{}])[0].get('snippet', {})\n if 'publishedAt' in snippet:\n snippet['publishedAt'] = datetime.strptime(snippet[\n 'publishedAt'], '%Y-%m-%dT%H:%M:%S.%fZ')\n else:\n snippet['publishedAt'] = datetime.now() + timedelta(days=1)\n return snippet\n",
"step-5": "import requests\n\nfrom datetime import datetime, timedelta\n\nfrom . import base\n\n\nclass YoutubeVerifier(base.SimpleVerifier):\n def __init__(self, channel_id, access_token):\n self.channel_id = channel_id\n self.access_token = access_token\n self.headers = {\n 'Authorization': 'Bearer {}'.format(self.access_token)\n }\n\n def _get_subscription(self):\n \"\"\"\n Gets information if user is subscribed to channel\n\n :rtype: requests.Response\n :return: good game player information http api call result\n \"\"\"\n response = requests.get(\n 'https://www.googleapis.com/youtube/v3/subscriptions',\n params={\n 'part': 'snippet',\n 'mine': 'true',\n 'forChannelId': self.channel_id\n },\n headers=self.headers\n )\n if response.status_code == 200:\n return response.json()\n return {}\n\n def get_follower_info(self):\n \"\"\"\n Get follower information from youtube api subscriptions\n\n :rtype: dict\n :return: follower info\n \"\"\"\n payload = self._get_subscription()\n snippet = payload.get('items', [{}])[0].get('snippet', {})\n #: re-processing publishedAt\n if 'publishedAt' in snippet:\n snippet['publishedAt'] = datetime.strptime(\n snippet['publishedAt'],\n \"%Y-%m-%dT%H:%M:%S.%fZ\"\n )\n else:\n #: if nothing has been found publishedAt set to future\n snippet['publishedAt'] = datetime.now() + timedelta(days=1)\n return snippet\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
from flask import Flask
from sim.toggle import ToggleSensor
from sim.sensor import Sensor
app = Flask(__name__)
sensors = [
ToggleSensor(id="s-01", description="lampadina"),
ToggleSensor(id="s-02", description="lampadina"),
ToggleSensor(id="s-03", description="allarme atomico"),
ToggleSensor(id="s-04", description="porta aperta"),
Sensor(id="temperature-01", description="sensore di temperatura"),
Sensor(id="umidita-01", description="sensore di umiditá"),
Sensor(id="cleancode-01", description="sensore di bellezza del codice"),
Sensor(id="luce-01", description="sensore di luce"),
]
def search_index_by_id(sensor_id: str):
for pos, sensor in enumerate(sensors):
if sensor.id == sensor_id:
return pos
return -1
@app.route("/<sensor_id>", methods=["GET"])
def sensor_details(sensor_id):
sensor_pos = search_index_by_id(sensor_id)
if sensor_pos >= 0:
return {"sensor": sensors[sensor_pos]}
else:
return {"kind": "error", "payload": f"Sensor {sensor_id} not found"}
@app.route("/<sensor_id>/toggle", methods=["PUT"])
def set_sensor_value(sensor_id: str):
sensor_pos = search_index_by_id(sensor_id)
if sensor_pos >= 0 and isinstance(sensors[sensor_pos], ToggleSensor):
sensors[sensor_pos].toggle()
return {"kind": "error", "payload": f"Sensor {sensor_id} not found"}
@app.route("/", methods=["GET"])
def hello_world():
for sensor in sensors:
sensor.update()
return {"sensors": sensors}
|
normal
|
{
"blob_id": "2843845848747c723d670cd3a5fcb7127153ac7e",
"index": 264,
"step-1": "<mask token>\n\n\n@app.route('/<sensor_id>', methods=['GET'])\ndef sensor_details(sensor_id):\n sensor_pos = search_index_by_id(sensor_id)\n if sensor_pos >= 0:\n return {'sensor': sensors[sensor_pos]}\n else:\n return {'kind': 'error', 'payload': f'Sensor {sensor_id} not found'}\n\n\n<mask token>\n\n\n@app.route('/', methods=['GET'])\ndef hello_world():\n for sensor in sensors:\n sensor.update()\n return {'sensors': sensors}\n",
"step-2": "<mask token>\n\n\n@app.route('/<sensor_id>', methods=['GET'])\ndef sensor_details(sensor_id):\n sensor_pos = search_index_by_id(sensor_id)\n if sensor_pos >= 0:\n return {'sensor': sensors[sensor_pos]}\n else:\n return {'kind': 'error', 'payload': f'Sensor {sensor_id} not found'}\n\n\n@app.route('/<sensor_id>/toggle', methods=['PUT'])\ndef set_sensor_value(sensor_id: str):\n sensor_pos = search_index_by_id(sensor_id)\n if sensor_pos >= 0 and isinstance(sensors[sensor_pos], ToggleSensor):\n sensors[sensor_pos].toggle()\n return {'kind': 'error', 'payload': f'Sensor {sensor_id} not found'}\n\n\n@app.route('/', methods=['GET'])\ndef hello_world():\n for sensor in sensors:\n sensor.update()\n return {'sensors': sensors}\n",
"step-3": "<mask token>\n\n\ndef search_index_by_id(sensor_id: str):\n for pos, sensor in enumerate(sensors):\n if sensor.id == sensor_id:\n return pos\n return -1\n\n\n@app.route('/<sensor_id>', methods=['GET'])\ndef sensor_details(sensor_id):\n sensor_pos = search_index_by_id(sensor_id)\n if sensor_pos >= 0:\n return {'sensor': sensors[sensor_pos]}\n else:\n return {'kind': 'error', 'payload': f'Sensor {sensor_id} not found'}\n\n\n@app.route('/<sensor_id>/toggle', methods=['PUT'])\ndef set_sensor_value(sensor_id: str):\n sensor_pos = search_index_by_id(sensor_id)\n if sensor_pos >= 0 and isinstance(sensors[sensor_pos], ToggleSensor):\n sensors[sensor_pos].toggle()\n return {'kind': 'error', 'payload': f'Sensor {sensor_id} not found'}\n\n\n@app.route('/', methods=['GET'])\ndef hello_world():\n for sensor in sensors:\n sensor.update()\n return {'sensors': sensors}\n",
"step-4": "from flask import Flask\nfrom sim.toggle import ToggleSensor\nfrom sim.sensor import Sensor\napp = Flask(__name__)\nsensors = [ToggleSensor(id='s-01', description='lampadina'), ToggleSensor(\n id='s-02', description='lampadina'), ToggleSensor(id='s-03',\n description='allarme atomico'), ToggleSensor(id='s-04', description=\n 'porta aperta'), Sensor(id='temperature-01', description=\n 'sensore di temperatura'), Sensor(id='umidita-01', description=\n 'sensore di umiditá'), Sensor(id='cleancode-01', description=\n 'sensore di bellezza del codice'), Sensor(id='luce-01', description=\n 'sensore di luce')]\n\n\ndef search_index_by_id(sensor_id: str):\n for pos, sensor in enumerate(sensors):\n if sensor.id == sensor_id:\n return pos\n return -1\n\n\n@app.route('/<sensor_id>', methods=['GET'])\ndef sensor_details(sensor_id):\n sensor_pos = search_index_by_id(sensor_id)\n if sensor_pos >= 0:\n return {'sensor': sensors[sensor_pos]}\n else:\n return {'kind': 'error', 'payload': f'Sensor {sensor_id} not found'}\n\n\n@app.route('/<sensor_id>/toggle', methods=['PUT'])\ndef set_sensor_value(sensor_id: str):\n sensor_pos = search_index_by_id(sensor_id)\n if sensor_pos >= 0 and isinstance(sensors[sensor_pos], ToggleSensor):\n sensors[sensor_pos].toggle()\n return {'kind': 'error', 'payload': f'Sensor {sensor_id} not found'}\n\n\n@app.route('/', methods=['GET'])\ndef hello_world():\n for sensor in sensors:\n sensor.update()\n return {'sensors': sensors}\n",
"step-5": "from flask import Flask\n\nfrom sim.toggle import ToggleSensor\nfrom sim.sensor import Sensor\n\napp = Flask(__name__)\n\nsensors = [\n ToggleSensor(id=\"s-01\", description=\"lampadina\"),\n ToggleSensor(id=\"s-02\", description=\"lampadina\"),\n ToggleSensor(id=\"s-03\", description=\"allarme atomico\"),\n ToggleSensor(id=\"s-04\", description=\"porta aperta\"),\n Sensor(id=\"temperature-01\", description=\"sensore di temperatura\"),\n Sensor(id=\"umidita-01\", description=\"sensore di umiditá\"),\n Sensor(id=\"cleancode-01\", description=\"sensore di bellezza del codice\"),\n Sensor(id=\"luce-01\", description=\"sensore di luce\"),\n]\n\n\ndef search_index_by_id(sensor_id: str):\n for pos, sensor in enumerate(sensors):\n if sensor.id == sensor_id:\n return pos\n\n return -1\n\n\n@app.route(\"/<sensor_id>\", methods=[\"GET\"])\ndef sensor_details(sensor_id):\n sensor_pos = search_index_by_id(sensor_id)\n if sensor_pos >= 0:\n return {\"sensor\": sensors[sensor_pos]}\n else:\n return {\"kind\": \"error\", \"payload\": f\"Sensor {sensor_id} not found\"}\n\n\n@app.route(\"/<sensor_id>/toggle\", methods=[\"PUT\"])\ndef set_sensor_value(sensor_id: str):\n sensor_pos = search_index_by_id(sensor_id)\n if sensor_pos >= 0 and isinstance(sensors[sensor_pos], ToggleSensor):\n sensors[sensor_pos].toggle()\n return {\"kind\": \"error\", \"payload\": f\"Sensor {sensor_id} not found\"}\n\n\n@app.route(\"/\", methods=[\"GET\"])\ndef hello_world():\n for sensor in sensors:\n sensor.update()\n return {\"sensors\": sensors}\n",
"step-ids": [
2,
3,
4,
6,
7
]
}
|
[
2,
3,
4,
6,
7
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
while True:
name = str(input('Name: ')).capitalize().strip()
grade1 = float(input('Grade 1: '))
grade2 = float(input('Grade 2: '))
avgrade = (grade1 + grade2) / 2
student.append([name, [grade1, grade2], avgrade])
resp = ' '
while resp not in 'NnYy':
resp = str(input('Another student? [Y/N]'))
if resp == 'N':
break
print('-=' * 15)
print(f"{'No.':<4}{'Name:':<10}{'Average Grade:':>8}")
print('-=' * 15)
for i, a in enumerate(student):
print(f'{i:<4}{a[0]:<8}{a[2]:>8.1f}')
while True:
print('-=' * 20)
opt = int(input('Enter the student ID to show the grades: (999 to exit) ')
)
if opt == 999:
print('Exiting...')
break
if opt <= len(student) - 1:
print(f'Grades of {student[opt][0]} are {student[opt][1]}')
print('Have a nice day!!!')
<|reserved_special_token_1|>
student = []
while True:
name = str(input('Name: ')).capitalize().strip()
grade1 = float(input('Grade 1: '))
grade2 = float(input('Grade 2: '))
avgrade = (grade1 + grade2) / 2
student.append([name, [grade1, grade2], avgrade])
resp = ' '
while resp not in 'NnYy':
resp = str(input('Another student? [Y/N]'))
if resp == 'N':
break
print('-=' * 15)
print(f"{'No.':<4}{'Name:':<10}{'Average Grade:':>8}")
print('-=' * 15)
for i, a in enumerate(student):
print(f'{i:<4}{a[0]:<8}{a[2]:>8.1f}')
while True:
print('-=' * 20)
opt = int(input('Enter the student ID to show the grades: (999 to exit) ')
)
if opt == 999:
print('Exiting...')
break
if opt <= len(student) - 1:
print(f'Grades of {student[opt][0]} are {student[opt][1]}')
print('Have a nice day!!!')
<|reserved_special_token_1|>
student = []
while True:
name = str(input('Name: ')).capitalize().strip()
grade1 = float(input('Grade 1: '))
grade2 = float(input('Grade 2: '))
avgrade = (grade1 + grade2) / 2
student.append([name, [grade1, grade2], avgrade])
resp = ' '
while resp not in 'NnYy':
resp = str(input('Another student? [Y/N]'))
if resp == 'N':
break
print('-=' * 15)
print(f'{"No.":<4}{"Name:":<10}{"Average Grade:":>8}')
print('-=' * 15)
for i, a in enumerate(student):
print(f'{i:<4}{a[0]:<8}{a[2]:>8.1f}')
while True:
print('-=' * 20)
opt = int(input('Enter the student ID to show the grades: (999 to exit) '))
if opt == 999:
print('Exiting...')
break
if opt <= len(student) - 1:
print(f'Grades of {student[opt][0]} are {student[opt][1]}')
print('Have a nice day!!!')
|
flexible
|
{
"blob_id": "74028a7b317c02c90603ad24c1ddb35a1d5d0e9d",
"index": 8678,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwhile True:\n name = str(input('Name: ')).capitalize().strip()\n grade1 = float(input('Grade 1: '))\n grade2 = float(input('Grade 2: '))\n avgrade = (grade1 + grade2) / 2\n student.append([name, [grade1, grade2], avgrade])\n resp = ' '\n while resp not in 'NnYy':\n resp = str(input('Another student? [Y/N]'))\n if resp == 'N':\n break\nprint('-=' * 15)\nprint(f\"{'No.':<4}{'Name:':<10}{'Average Grade:':>8}\")\nprint('-=' * 15)\nfor i, a in enumerate(student):\n print(f'{i:<4}{a[0]:<8}{a[2]:>8.1f}')\nwhile True:\n print('-=' * 20)\n opt = int(input('Enter the student ID to show the grades: (999 to exit) ')\n )\n if opt == 999:\n print('Exiting...')\n break\n if opt <= len(student) - 1:\n print(f'Grades of {student[opt][0]} are {student[opt][1]}')\nprint('Have a nice day!!!')\n",
"step-3": "student = []\nwhile True:\n name = str(input('Name: ')).capitalize().strip()\n grade1 = float(input('Grade 1: '))\n grade2 = float(input('Grade 2: '))\n avgrade = (grade1 + grade2) / 2\n student.append([name, [grade1, grade2], avgrade])\n resp = ' '\n while resp not in 'NnYy':\n resp = str(input('Another student? [Y/N]'))\n if resp == 'N':\n break\nprint('-=' * 15)\nprint(f\"{'No.':<4}{'Name:':<10}{'Average Grade:':>8}\")\nprint('-=' * 15)\nfor i, a in enumerate(student):\n print(f'{i:<4}{a[0]:<8}{a[2]:>8.1f}')\nwhile True:\n print('-=' * 20)\n opt = int(input('Enter the student ID to show the grades: (999 to exit) ')\n )\n if opt == 999:\n print('Exiting...')\n break\n if opt <= len(student) - 1:\n print(f'Grades of {student[opt][0]} are {student[opt][1]}')\nprint('Have a nice day!!!')\n",
"step-4": "student = []\nwhile True:\n name = str(input('Name: ')).capitalize().strip()\n grade1 = float(input('Grade 1: '))\n grade2 = float(input('Grade 2: '))\n avgrade = (grade1 + grade2) / 2\n student.append([name, [grade1, grade2], avgrade])\n resp = ' '\n while resp not in 'NnYy':\n resp = str(input('Another student? [Y/N]'))\n if resp == 'N':\n break\nprint('-=' * 15)\nprint(f'{\"No.\":<4}{\"Name:\":<10}{\"Average Grade:\":>8}')\nprint('-=' * 15)\nfor i, a in enumerate(student):\n print(f'{i:<4}{a[0]:<8}{a[2]:>8.1f}')\nwhile True:\n print('-=' * 20)\n opt = int(input('Enter the student ID to show the grades: (999 to exit) '))\n if opt == 999:\n print('Exiting...')\n break\n if opt <= len(student) - 1:\n print(f'Grades of {student[opt][0]} are {student[opt][1]}')\nprint('Have a nice day!!!')\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
"""Support for Bond covers."""
import asyncio
import logging
from typing import Any, Callable, Dict, List, Optional
from bond import BOND_DEVICE_TYPE_MOTORIZED_SHADES, Bond
from homeassistant.components.cover import DEVICE_CLASS_SHADE, CoverEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_NAME
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity import Entity
from .const import DOMAIN
from .utils import BondDevice, get_bond_devices
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
async_add_entities: Callable[[List[Entity]], None],
) -> None:
"""Set up Bond cover devices."""
bond: Bond = hass.data[DOMAIN][entry.entry_id]
async def discover():
devices = await get_bond_devices(hass, bond)
covers = [
BondCover(bond, device)
for device in devices
if device.type == BOND_DEVICE_TYPE_MOTORIZED_SHADES
]
async_add_entities(covers)
asyncio.create_task(discover())
class BondCover(CoverEntity):
"""Representation of a Bond cover."""
def __init__(self, bond: Bond, device: BondDevice):
"""Create HA entity representing Bond cover."""
self._bond = bond
self._device = device
@property
def device_class(self) -> Optional[str]:
"""Get device class."""
return DEVICE_CLASS_SHADE
@property
def unique_id(self) -> Optional[str]:
"""Get unique ID for the entity."""
return self._device.device_id
@property
def name(self) -> Optional[str]:
"""Get entity name."""
return self._device.name
@property
def device_info(self) -> Optional[Dict[str, Any]]:
"""Get a an HA device representing this cover."""
return {ATTR_NAME: self.name, "identifiers": {(DOMAIN, self._device.device_id)}}
@property
def is_closed(self):
"""Return if the cover is closed or not."""
return None
def open_cover(self, **kwargs: Any) -> None:
"""Open the cover."""
self._bond.open(self._device.device_id)
def close_cover(self, **kwargs: Any) -> None:
"""Close cover."""
self._bond.close(self._device.device_id)
def stop_cover(self, **kwargs):
"""Hold cover."""
self._bond.hold(self._device.device_id)
|
normal
|
{
"blob_id": "ba9d7b877eda3f7469db58e2ee194b601e3c3e08",
"index": 4227,
"step-1": "<mask token>\n\n\nclass BondCover(CoverEntity):\n <mask token>\n\n def __init__(self, bond: Bond, device: BondDevice):\n \"\"\"Create HA entity representing Bond cover.\"\"\"\n self._bond = bond\n self._device = device\n\n @property\n def device_class(self) ->Optional[str]:\n \"\"\"Get device class.\"\"\"\n return DEVICE_CLASS_SHADE\n\n @property\n def unique_id(self) ->Optional[str]:\n \"\"\"Get unique ID for the entity.\"\"\"\n return self._device.device_id\n\n @property\n def name(self) ->Optional[str]:\n \"\"\"Get entity name.\"\"\"\n return self._device.name\n\n @property\n def device_info(self) ->Optional[Dict[str, Any]]:\n \"\"\"Get a an HA device representing this cover.\"\"\"\n return {ATTR_NAME: self.name, 'identifiers': {(DOMAIN, self._device\n .device_id)}}\n\n @property\n def is_closed(self):\n \"\"\"Return if the cover is closed or not.\"\"\"\n return None\n\n def open_cover(self, **kwargs: Any) ->None:\n \"\"\"Open the cover.\"\"\"\n self._bond.open(self._device.device_id)\n\n def close_cover(self, **kwargs: Any) ->None:\n \"\"\"Close cover.\"\"\"\n self._bond.close(self._device.device_id)\n\n def stop_cover(self, **kwargs):\n \"\"\"Hold cover.\"\"\"\n self._bond.hold(self._device.device_id)\n",
"step-2": "<mask token>\n\n\nclass BondCover(CoverEntity):\n \"\"\"Representation of a Bond cover.\"\"\"\n\n def __init__(self, bond: Bond, device: BondDevice):\n \"\"\"Create HA entity representing Bond cover.\"\"\"\n self._bond = bond\n self._device = device\n\n @property\n def device_class(self) ->Optional[str]:\n \"\"\"Get device class.\"\"\"\n return DEVICE_CLASS_SHADE\n\n @property\n def unique_id(self) ->Optional[str]:\n \"\"\"Get unique ID for the entity.\"\"\"\n return self._device.device_id\n\n @property\n def name(self) ->Optional[str]:\n \"\"\"Get entity name.\"\"\"\n return self._device.name\n\n @property\n def device_info(self) ->Optional[Dict[str, Any]]:\n \"\"\"Get a an HA device representing this cover.\"\"\"\n return {ATTR_NAME: self.name, 'identifiers': {(DOMAIN, self._device\n .device_id)}}\n\n @property\n def is_closed(self):\n \"\"\"Return if the cover is closed or not.\"\"\"\n return None\n\n def open_cover(self, **kwargs: Any) ->None:\n \"\"\"Open the cover.\"\"\"\n self._bond.open(self._device.device_id)\n\n def close_cover(self, **kwargs: Any) ->None:\n \"\"\"Close cover.\"\"\"\n self._bond.close(self._device.device_id)\n\n def stop_cover(self, **kwargs):\n \"\"\"Hold cover.\"\"\"\n self._bond.hold(self._device.device_id)\n",
"step-3": "<mask token>\n\n\nasync def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry,\n async_add_entities: Callable[[List[Entity]], None]) ->None:\n \"\"\"Set up Bond cover devices.\"\"\"\n bond: Bond = hass.data[DOMAIN][entry.entry_id]\n\n async def discover():\n devices = await get_bond_devices(hass, bond)\n covers = [BondCover(bond, device) for device in devices if device.\n type == BOND_DEVICE_TYPE_MOTORIZED_SHADES]\n async_add_entities(covers)\n asyncio.create_task(discover())\n\n\nclass BondCover(CoverEntity):\n \"\"\"Representation of a Bond cover.\"\"\"\n\n def __init__(self, bond: Bond, device: BondDevice):\n \"\"\"Create HA entity representing Bond cover.\"\"\"\n self._bond = bond\n self._device = device\n\n @property\n def device_class(self) ->Optional[str]:\n \"\"\"Get device class.\"\"\"\n return DEVICE_CLASS_SHADE\n\n @property\n def unique_id(self) ->Optional[str]:\n \"\"\"Get unique ID for the entity.\"\"\"\n return self._device.device_id\n\n @property\n def name(self) ->Optional[str]:\n \"\"\"Get entity name.\"\"\"\n return self._device.name\n\n @property\n def device_info(self) ->Optional[Dict[str, Any]]:\n \"\"\"Get a an HA device representing this cover.\"\"\"\n return {ATTR_NAME: self.name, 'identifiers': {(DOMAIN, self._device\n .device_id)}}\n\n @property\n def is_closed(self):\n \"\"\"Return if the cover is closed or not.\"\"\"\n return None\n\n def open_cover(self, **kwargs: Any) ->None:\n \"\"\"Open the cover.\"\"\"\n self._bond.open(self._device.device_id)\n\n def close_cover(self, **kwargs: Any) ->None:\n \"\"\"Close cover.\"\"\"\n self._bond.close(self._device.device_id)\n\n def stop_cover(self, **kwargs):\n \"\"\"Hold cover.\"\"\"\n self._bond.hold(self._device.device_id)\n",
"step-4": "<mask token>\nimport asyncio\nimport logging\nfrom typing import Any, Callable, Dict, List, Optional\nfrom bond import BOND_DEVICE_TYPE_MOTORIZED_SHADES, Bond\nfrom homeassistant.components.cover import DEVICE_CLASS_SHADE, CoverEntity\nfrom homeassistant.config_entries import ConfigEntry\nfrom homeassistant.const import ATTR_NAME\nfrom homeassistant.core import HomeAssistant\nfrom homeassistant.helpers.entity import Entity\nfrom .const import DOMAIN\nfrom .utils import BondDevice, get_bond_devices\n_LOGGER = logging.getLogger(__name__)\n\n\nasync def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry,\n async_add_entities: Callable[[List[Entity]], None]) ->None:\n \"\"\"Set up Bond cover devices.\"\"\"\n bond: Bond = hass.data[DOMAIN][entry.entry_id]\n\n async def discover():\n devices = await get_bond_devices(hass, bond)\n covers = [BondCover(bond, device) for device in devices if device.\n type == BOND_DEVICE_TYPE_MOTORIZED_SHADES]\n async_add_entities(covers)\n asyncio.create_task(discover())\n\n\nclass BondCover(CoverEntity):\n \"\"\"Representation of a Bond cover.\"\"\"\n\n def __init__(self, bond: Bond, device: BondDevice):\n \"\"\"Create HA entity representing Bond cover.\"\"\"\n self._bond = bond\n self._device = device\n\n @property\n def device_class(self) ->Optional[str]:\n \"\"\"Get device class.\"\"\"\n return DEVICE_CLASS_SHADE\n\n @property\n def unique_id(self) ->Optional[str]:\n \"\"\"Get unique ID for the entity.\"\"\"\n return self._device.device_id\n\n @property\n def name(self) ->Optional[str]:\n \"\"\"Get entity name.\"\"\"\n return self._device.name\n\n @property\n def device_info(self) ->Optional[Dict[str, Any]]:\n \"\"\"Get a an HA device representing this cover.\"\"\"\n return {ATTR_NAME: self.name, 'identifiers': {(DOMAIN, self._device\n .device_id)}}\n\n @property\n def is_closed(self):\n \"\"\"Return if the cover is closed or not.\"\"\"\n return None\n\n def open_cover(self, **kwargs: Any) ->None:\n \"\"\"Open the cover.\"\"\"\n self._bond.open(self._device.device_id)\n\n def close_cover(self, **kwargs: Any) ->None:\n \"\"\"Close cover.\"\"\"\n self._bond.close(self._device.device_id)\n\n def stop_cover(self, **kwargs):\n \"\"\"Hold cover.\"\"\"\n self._bond.hold(self._device.device_id)\n",
"step-5": "\"\"\"Support for Bond covers.\"\"\"\nimport asyncio\nimport logging\nfrom typing import Any, Callable, Dict, List, Optional\n\nfrom bond import BOND_DEVICE_TYPE_MOTORIZED_SHADES, Bond\n\nfrom homeassistant.components.cover import DEVICE_CLASS_SHADE, CoverEntity\nfrom homeassistant.config_entries import ConfigEntry\nfrom homeassistant.const import ATTR_NAME\nfrom homeassistant.core import HomeAssistant\nfrom homeassistant.helpers.entity import Entity\n\nfrom .const import DOMAIN\nfrom .utils import BondDevice, get_bond_devices\n\n_LOGGER = logging.getLogger(__name__)\n\n\nasync def async_setup_entry(\n hass: HomeAssistant,\n entry: ConfigEntry,\n async_add_entities: Callable[[List[Entity]], None],\n) -> None:\n \"\"\"Set up Bond cover devices.\"\"\"\n\n bond: Bond = hass.data[DOMAIN][entry.entry_id]\n\n async def discover():\n devices = await get_bond_devices(hass, bond)\n covers = [\n BondCover(bond, device)\n for device in devices\n if device.type == BOND_DEVICE_TYPE_MOTORIZED_SHADES\n ]\n async_add_entities(covers)\n\n asyncio.create_task(discover())\n\n\nclass BondCover(CoverEntity):\n \"\"\"Representation of a Bond cover.\"\"\"\n\n def __init__(self, bond: Bond, device: BondDevice):\n \"\"\"Create HA entity representing Bond cover.\"\"\"\n self._bond = bond\n self._device = device\n\n @property\n def device_class(self) -> Optional[str]:\n \"\"\"Get device class.\"\"\"\n return DEVICE_CLASS_SHADE\n\n @property\n def unique_id(self) -> Optional[str]:\n \"\"\"Get unique ID for the entity.\"\"\"\n return self._device.device_id\n\n @property\n def name(self) -> Optional[str]:\n \"\"\"Get entity name.\"\"\"\n return self._device.name\n\n @property\n def device_info(self) -> Optional[Dict[str, Any]]:\n \"\"\"Get a an HA device representing this cover.\"\"\"\n return {ATTR_NAME: self.name, \"identifiers\": {(DOMAIN, self._device.device_id)}}\n\n @property\n def is_closed(self):\n \"\"\"Return if the cover is closed or not.\"\"\"\n return None\n\n def open_cover(self, **kwargs: Any) -> None:\n \"\"\"Open the cover.\"\"\"\n self._bond.open(self._device.device_id)\n\n def close_cover(self, **kwargs: Any) -> None:\n \"\"\"Close cover.\"\"\"\n self._bond.close(self._device.device_id)\n\n def stop_cover(self, **kwargs):\n \"\"\"Hold cover.\"\"\"\n self._bond.hold(self._device.device_id)\n",
"step-ids": [
10,
11,
12,
14,
15
]
}
|
[
10,
11,
12,
14,
15
] |
<|reserved_special_token_0|>
def myFunk():
with open('users.csv', 'w') as fp:
a = csv.writer(fp, delimiter=',')
roles = ['inspector', 'admin']
data = [['Userneme', 'hash_password', 'role'], ['Olya', func_hash(
'Olya'), 'admin'], ['Stas', func_hash('Stas'), 'admin'], [
'Dima', func_hash('Dima'), 'admin'], ['Kyrylo', func_hash(
'Kyrylo'), 'admin'], ['Lubchyk', func_hash('Lubchyk'),
'inspector'], ['Sashko', func_hash('Sashko'), roles]]
a.writerows(data)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def func_hash(parameter):
hash_object = hashlib.sha384(parameter)
table_hash = hash_object.hexdigest()
return table_hash
def myFunk():
with open('users.csv', 'w') as fp:
a = csv.writer(fp, delimiter=',')
roles = ['inspector', 'admin']
data = [['Userneme', 'hash_password', 'role'], ['Olya', func_hash(
'Olya'), 'admin'], ['Stas', func_hash('Stas'), 'admin'], [
'Dima', func_hash('Dima'), 'admin'], ['Kyrylo', func_hash(
'Kyrylo'), 'admin'], ['Lubchyk', func_hash('Lubchyk'),
'inspector'], ['Sashko', func_hash('Sashko'), roles]]
a.writerows(data)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def func_hash(parameter):
hash_object = hashlib.sha384(parameter)
table_hash = hash_object.hexdigest()
return table_hash
def myFunk():
with open('users.csv', 'w') as fp:
a = csv.writer(fp, delimiter=',')
roles = ['inspector', 'admin']
data = [['Userneme', 'hash_password', 'role'], ['Olya', func_hash(
'Olya'), 'admin'], ['Stas', func_hash('Stas'), 'admin'], [
'Dima', func_hash('Dima'), 'admin'], ['Kyrylo', func_hash(
'Kyrylo'), 'admin'], ['Lubchyk', func_hash('Lubchyk'),
'inspector'], ['Sashko', func_hash('Sashko'), roles]]
a.writerows(data)
myFunk()
<|reserved_special_token_1|>
import csv as csv
import hashlib
from sets import Set
def func_hash(parameter):
hash_object = hashlib.sha384(parameter)
table_hash = hash_object.hexdigest()
return table_hash
def myFunk():
with open('users.csv', 'w') as fp:
a = csv.writer(fp, delimiter=',')
roles = ['inspector', 'admin']
data = [['Userneme', 'hash_password', 'role'], ['Olya', func_hash(
'Olya'), 'admin'], ['Stas', func_hash('Stas'), 'admin'], [
'Dima', func_hash('Dima'), 'admin'], ['Kyrylo', func_hash(
'Kyrylo'), 'admin'], ['Lubchyk', func_hash('Lubchyk'),
'inspector'], ['Sashko', func_hash('Sashko'), roles]]
a.writerows(data)
myFunk()
<|reserved_special_token_1|>
import csv as csv
import hashlib
from sets import Set
def func_hash(parameter):
hash_object = hashlib.sha384(parameter)
table_hash = hash_object.hexdigest()
return table_hash
def myFunk():
with open('users.csv', 'w') as fp:
a = csv.writer(fp, delimiter=',')
roles = ['inspector', 'admin']
data = [['Userneme', 'hash_password', 'role'],
['Olya', func_hash('Olya'), 'admin'],
['Stas', func_hash('Stas'), 'admin'],
['Dima', func_hash('Dima'), 'admin'],
['Kyrylo', func_hash('Kyrylo'), 'admin'],
['Lubchyk', func_hash('Lubchyk'), 'inspector'],
['Sashko', func_hash('Sashko'),roles],
]
a.writerows(data)
myFunk()
|
flexible
|
{
"blob_id": "96d13a883590ca969e997bbb27bcdbee1b24252f",
"index": 2730,
"step-1": "<mask token>\n\n\ndef myFunk():\n with open('users.csv', 'w') as fp:\n a = csv.writer(fp, delimiter=',')\n roles = ['inspector', 'admin']\n data = [['Userneme', 'hash_password', 'role'], ['Olya', func_hash(\n 'Olya'), 'admin'], ['Stas', func_hash('Stas'), 'admin'], [\n 'Dima', func_hash('Dima'), 'admin'], ['Kyrylo', func_hash(\n 'Kyrylo'), 'admin'], ['Lubchyk', func_hash('Lubchyk'),\n 'inspector'], ['Sashko', func_hash('Sashko'), roles]]\n a.writerows(data)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef func_hash(parameter):\n hash_object = hashlib.sha384(parameter)\n table_hash = hash_object.hexdigest()\n return table_hash\n\n\ndef myFunk():\n with open('users.csv', 'w') as fp:\n a = csv.writer(fp, delimiter=',')\n roles = ['inspector', 'admin']\n data = [['Userneme', 'hash_password', 'role'], ['Olya', func_hash(\n 'Olya'), 'admin'], ['Stas', func_hash('Stas'), 'admin'], [\n 'Dima', func_hash('Dima'), 'admin'], ['Kyrylo', func_hash(\n 'Kyrylo'), 'admin'], ['Lubchyk', func_hash('Lubchyk'),\n 'inspector'], ['Sashko', func_hash('Sashko'), roles]]\n a.writerows(data)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef func_hash(parameter):\n hash_object = hashlib.sha384(parameter)\n table_hash = hash_object.hexdigest()\n return table_hash\n\n\ndef myFunk():\n with open('users.csv', 'w') as fp:\n a = csv.writer(fp, delimiter=',')\n roles = ['inspector', 'admin']\n data = [['Userneme', 'hash_password', 'role'], ['Olya', func_hash(\n 'Olya'), 'admin'], ['Stas', func_hash('Stas'), 'admin'], [\n 'Dima', func_hash('Dima'), 'admin'], ['Kyrylo', func_hash(\n 'Kyrylo'), 'admin'], ['Lubchyk', func_hash('Lubchyk'),\n 'inspector'], ['Sashko', func_hash('Sashko'), roles]]\n a.writerows(data)\n\n\nmyFunk()\n",
"step-4": "import csv as csv\nimport hashlib\nfrom sets import Set\n\n\ndef func_hash(parameter):\n hash_object = hashlib.sha384(parameter)\n table_hash = hash_object.hexdigest()\n return table_hash\n\n\ndef myFunk():\n with open('users.csv', 'w') as fp:\n a = csv.writer(fp, delimiter=',')\n roles = ['inspector', 'admin']\n data = [['Userneme', 'hash_password', 'role'], ['Olya', func_hash(\n 'Olya'), 'admin'], ['Stas', func_hash('Stas'), 'admin'], [\n 'Dima', func_hash('Dima'), 'admin'], ['Kyrylo', func_hash(\n 'Kyrylo'), 'admin'], ['Lubchyk', func_hash('Lubchyk'),\n 'inspector'], ['Sashko', func_hash('Sashko'), roles]]\n a.writerows(data)\n\n\nmyFunk()\n",
"step-5": "import csv as csv\nimport hashlib\nfrom sets import Set\n\ndef func_hash(parameter):\n hash_object = hashlib.sha384(parameter)\n table_hash = hash_object.hexdigest()\n return table_hash\n\ndef myFunk():\n\twith open('users.csv', 'w') as fp:\n\t a = csv.writer(fp, delimiter=',')\n\t roles = ['inspector', 'admin']\n\t data = [['Userneme', 'hash_password', 'role'],\n\t ['Olya', func_hash('Olya'), 'admin'],\n\t ['Stas', func_hash('Stas'), 'admin'],\n\t ['Dima', func_hash('Dima'), 'admin'],\n\t ['Kyrylo', func_hash('Kyrylo'), 'admin'],\n\t ['Lubchyk', func_hash('Lubchyk'), 'inspector'],\n\t ['Sashko', func_hash('Sashko'),roles],\n\t ]\n\t a.writerows(data)\n\nmyFunk()",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
def check_db_exists(opt):
try:
conn = psycopg2.connect(opt)
cur = conn.cursor()
cur.close()
print('Database exists.')
return True
except:
print("Database doesn't exist.")
return False
def create_db(opt):
if check_db_exists(opt):
pass
else:
print('Creating new database.')
conn = psycopg2.connect(opt)
conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
cur = conn.cursor()
cur.execute(f'CREATE DATABASE {config.db_name};')
cur.close()
<|reserved_special_token_0|>
def main():
opt = (
f'postgres://{config.username}:{config.password}@{config.host}:{config.port}/{config.cluster}.{config.db_name}?sslmode=verify-full&sslrootcert={config.cert_dir}/cc-ca.crt'
)
create_db(opt)
create_tables(opt)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def check_db_exists(opt):
try:
conn = psycopg2.connect(opt)
cur = conn.cursor()
cur.close()
print('Database exists.')
return True
except:
print("Database doesn't exist.")
return False
def create_db(opt):
if check_db_exists(opt):
pass
else:
print('Creating new database.')
conn = psycopg2.connect(opt)
conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
cur = conn.cursor()
cur.execute(f'CREATE DATABASE {config.db_name};')
cur.close()
def create_tables(opt):
if check_db_exists(opt):
commands = """ CREATE TABLE IF NOT EXISTS stock (
id SERIAL PRIMARY KEY,
ticker VARCHAR NOT NULL,
name VARCHAR NOT NULL,
created_date TIMESTAMP NOT NULL,
last_updated_date TIMESTAMP NOT NULL
)
""", """ CREATE TABLE IF NOT EXISTS price (
id SERIAL PRIMARY KEY,
stock_id INTEGER NOT NULL,
created_date TIMESTAMP NOT NULL,
last_updated_date TIMESTAMP NOT NULL,
date_price TIMESTAMP,
open_price NUMERIC,
high_price NUMERIC,
low_price NUMERIC,
close_price NUMERIC,
volume BIGINT,
FOREIGN KEY (stock_id) REFERENCES stock(id))
""", """ CREATE TABLE IF NOT EXISTS fundamentals (
id SERIAL PRIMARY KEY,
stock_id INTEGER NOT NULL,
created_date TIMESTAMP NOT NULL,
last_updated_date TIMESTAMP NOT NULL,
longBusinessSummary TEXT,
sector VARCHAR,
sharesOutstanding BIGINT,
marketCap BIGINT,
forwardPE REAL,
dividendYield REAL,
beta REAL,
previousClose REAL,
averageVolume BIGINT,
FOREIGN KEY (stock_id) REFERENCES stock(id))
""", """ CREATE TABLE IF NOT EXISTS news (
id SERIAL PRIMARY KEY,
stock_id INTEGER NOT NULL,
news_date TIMESTAMP NOT NULL,
headline VARCHAR NOT NULL,
url VARCHAR NOT NULL,
sentiment REAL,
FOREIGN KEY (stock_id) REFERENCES stock(id))
"""
try:
for command in commands:
print('Building database tables')
conn = psycopg2.connect(opt)
cur = conn.cursor()
cur.execute(command)
conn.commit()
cur.close()
except (Exception, psycopg2.DatabaseError) as e:
print(e)
cur.close()
else:
pass
def main():
opt = (
f'postgres://{config.username}:{config.password}@{config.host}:{config.port}/{config.cluster}.{config.db_name}?sslmode=verify-full&sslrootcert={config.cert_dir}/cc-ca.crt'
)
create_db(opt)
create_tables(opt)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def check_db_exists(opt):
try:
conn = psycopg2.connect(opt)
cur = conn.cursor()
cur.close()
print('Database exists.')
return True
except:
print("Database doesn't exist.")
return False
def create_db(opt):
if check_db_exists(opt):
pass
else:
print('Creating new database.')
conn = psycopg2.connect(opt)
conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
cur = conn.cursor()
cur.execute(f'CREATE DATABASE {config.db_name};')
cur.close()
def create_tables(opt):
if check_db_exists(opt):
commands = """ CREATE TABLE IF NOT EXISTS stock (
id SERIAL PRIMARY KEY,
ticker VARCHAR NOT NULL,
name VARCHAR NOT NULL,
created_date TIMESTAMP NOT NULL,
last_updated_date TIMESTAMP NOT NULL
)
""", """ CREATE TABLE IF NOT EXISTS price (
id SERIAL PRIMARY KEY,
stock_id INTEGER NOT NULL,
created_date TIMESTAMP NOT NULL,
last_updated_date TIMESTAMP NOT NULL,
date_price TIMESTAMP,
open_price NUMERIC,
high_price NUMERIC,
low_price NUMERIC,
close_price NUMERIC,
volume BIGINT,
FOREIGN KEY (stock_id) REFERENCES stock(id))
""", """ CREATE TABLE IF NOT EXISTS fundamentals (
id SERIAL PRIMARY KEY,
stock_id INTEGER NOT NULL,
created_date TIMESTAMP NOT NULL,
last_updated_date TIMESTAMP NOT NULL,
longBusinessSummary TEXT,
sector VARCHAR,
sharesOutstanding BIGINT,
marketCap BIGINT,
forwardPE REAL,
dividendYield REAL,
beta REAL,
previousClose REAL,
averageVolume BIGINT,
FOREIGN KEY (stock_id) REFERENCES stock(id))
""", """ CREATE TABLE IF NOT EXISTS news (
id SERIAL PRIMARY KEY,
stock_id INTEGER NOT NULL,
news_date TIMESTAMP NOT NULL,
headline VARCHAR NOT NULL,
url VARCHAR NOT NULL,
sentiment REAL,
FOREIGN KEY (stock_id) REFERENCES stock(id))
"""
try:
for command in commands:
print('Building database tables')
conn = psycopg2.connect(opt)
cur = conn.cursor()
cur.execute(command)
conn.commit()
cur.close()
except (Exception, psycopg2.DatabaseError) as e:
print(e)
cur.close()
else:
pass
def main():
opt = (
f'postgres://{config.username}:{config.password}@{config.host}:{config.port}/{config.cluster}.{config.db_name}?sslmode=verify-full&sslrootcert={config.cert_dir}/cc-ca.crt'
)
create_db(opt)
create_tables(opt)
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
import config
import psycopg2
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
def check_db_exists(opt):
try:
conn = psycopg2.connect(opt)
cur = conn.cursor()
cur.close()
print('Database exists.')
return True
except:
print("Database doesn't exist.")
return False
def create_db(opt):
if check_db_exists(opt):
pass
else:
print('Creating new database.')
conn = psycopg2.connect(opt)
conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
cur = conn.cursor()
cur.execute(f'CREATE DATABASE {config.db_name};')
cur.close()
def create_tables(opt):
if check_db_exists(opt):
commands = """ CREATE TABLE IF NOT EXISTS stock (
id SERIAL PRIMARY KEY,
ticker VARCHAR NOT NULL,
name VARCHAR NOT NULL,
created_date TIMESTAMP NOT NULL,
last_updated_date TIMESTAMP NOT NULL
)
""", """ CREATE TABLE IF NOT EXISTS price (
id SERIAL PRIMARY KEY,
stock_id INTEGER NOT NULL,
created_date TIMESTAMP NOT NULL,
last_updated_date TIMESTAMP NOT NULL,
date_price TIMESTAMP,
open_price NUMERIC,
high_price NUMERIC,
low_price NUMERIC,
close_price NUMERIC,
volume BIGINT,
FOREIGN KEY (stock_id) REFERENCES stock(id))
""", """ CREATE TABLE IF NOT EXISTS fundamentals (
id SERIAL PRIMARY KEY,
stock_id INTEGER NOT NULL,
created_date TIMESTAMP NOT NULL,
last_updated_date TIMESTAMP NOT NULL,
longBusinessSummary TEXT,
sector VARCHAR,
sharesOutstanding BIGINT,
marketCap BIGINT,
forwardPE REAL,
dividendYield REAL,
beta REAL,
previousClose REAL,
averageVolume BIGINT,
FOREIGN KEY (stock_id) REFERENCES stock(id))
""", """ CREATE TABLE IF NOT EXISTS news (
id SERIAL PRIMARY KEY,
stock_id INTEGER NOT NULL,
news_date TIMESTAMP NOT NULL,
headline VARCHAR NOT NULL,
url VARCHAR NOT NULL,
sentiment REAL,
FOREIGN KEY (stock_id) REFERENCES stock(id))
"""
try:
for command in commands:
print('Building database tables')
conn = psycopg2.connect(opt)
cur = conn.cursor()
cur.execute(command)
conn.commit()
cur.close()
except (Exception, psycopg2.DatabaseError) as e:
print(e)
cur.close()
else:
pass
def main():
opt = (
f'postgres://{config.username}:{config.password}@{config.host}:{config.port}/{config.cluster}.{config.db_name}?sslmode=verify-full&sslrootcert={config.cert_dir}/cc-ca.crt'
)
create_db(opt)
create_tables(opt)
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
import config
import psycopg2
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
def check_db_exists(opt):
try:
conn = psycopg2.connect(opt)
cur = conn.cursor()
cur.close()
print('Database exists.')
return True
except:
print("Database doesn't exist.")
return False
def create_db(opt):
if check_db_exists(opt):
pass
else:
print("Creating new database.")
conn = psycopg2.connect(opt)
conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
cur = conn.cursor()
cur.execute(f"CREATE DATABASE {config.db_name};")
cur.close()
def create_tables(opt):
if check_db_exists(opt):
commands = (""" CREATE TABLE IF NOT EXISTS stock (
id SERIAL PRIMARY KEY,
ticker VARCHAR NOT NULL,
name VARCHAR NOT NULL,
created_date TIMESTAMP NOT NULL,
last_updated_date TIMESTAMP NOT NULL
)
""",
""" CREATE TABLE IF NOT EXISTS price (
id SERIAL PRIMARY KEY,
stock_id INTEGER NOT NULL,
created_date TIMESTAMP NOT NULL,
last_updated_date TIMESTAMP NOT NULL,
date_price TIMESTAMP,
open_price NUMERIC,
high_price NUMERIC,
low_price NUMERIC,
close_price NUMERIC,
volume BIGINT,
FOREIGN KEY (stock_id) REFERENCES stock(id))
""",
""" CREATE TABLE IF NOT EXISTS fundamentals (
id SERIAL PRIMARY KEY,
stock_id INTEGER NOT NULL,
created_date TIMESTAMP NOT NULL,
last_updated_date TIMESTAMP NOT NULL,
longBusinessSummary TEXT,
sector VARCHAR,
sharesOutstanding BIGINT,
marketCap BIGINT,
forwardPE REAL,
dividendYield REAL,
beta REAL,
previousClose REAL,
averageVolume BIGINT,
FOREIGN KEY (stock_id) REFERENCES stock(id))
""",
""" CREATE TABLE IF NOT EXISTS news (
id SERIAL PRIMARY KEY,
stock_id INTEGER NOT NULL,
news_date TIMESTAMP NOT NULL,
headline VARCHAR NOT NULL,
url VARCHAR NOT NULL,
sentiment REAL,
FOREIGN KEY (stock_id) REFERENCES stock(id))
"""
)
try:
for command in commands:
print('Building database tables')
conn = psycopg2.connect(opt)
cur = conn.cursor()
cur.execute(command)
conn.commit()
cur.close()
except (Exception, psycopg2.DatabaseError) as e:
print(e)
cur.close()
else:
pass
def main():
opt = f"postgres://{config.username}:{config.password}@{config.host}:{config.port}/{config.cluster}.{config.db_name}?sslmode=verify-full&sslrootcert={config.cert_dir}/cc-ca.crt"
create_db(opt)
create_tables(opt)
if __name__ == "__main__":
main()
|
flexible
|
{
"blob_id": "09792da1c3cc38c7df7def2b487c2078de4e8912",
"index": 9514,
"step-1": "<mask token>\n\n\ndef check_db_exists(opt):\n try:\n conn = psycopg2.connect(opt)\n cur = conn.cursor()\n cur.close()\n print('Database exists.')\n return True\n except:\n print(\"Database doesn't exist.\")\n return False\n\n\ndef create_db(opt):\n if check_db_exists(opt):\n pass\n else:\n print('Creating new database.')\n conn = psycopg2.connect(opt)\n conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)\n cur = conn.cursor()\n cur.execute(f'CREATE DATABASE {config.db_name};')\n cur.close()\n\n\n<mask token>\n\n\ndef main():\n opt = (\n f'postgres://{config.username}:{config.password}@{config.host}:{config.port}/{config.cluster}.{config.db_name}?sslmode=verify-full&sslrootcert={config.cert_dir}/cc-ca.crt'\n )\n create_db(opt)\n create_tables(opt)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef check_db_exists(opt):\n try:\n conn = psycopg2.connect(opt)\n cur = conn.cursor()\n cur.close()\n print('Database exists.')\n return True\n except:\n print(\"Database doesn't exist.\")\n return False\n\n\ndef create_db(opt):\n if check_db_exists(opt):\n pass\n else:\n print('Creating new database.')\n conn = psycopg2.connect(opt)\n conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)\n cur = conn.cursor()\n cur.execute(f'CREATE DATABASE {config.db_name};')\n cur.close()\n\n\ndef create_tables(opt):\n if check_db_exists(opt):\n commands = \"\"\" CREATE TABLE IF NOT EXISTS stock (\n\t\t\tid SERIAL PRIMARY KEY,\n\t\t\tticker VARCHAR NOT NULL,\n\t\t\tname VARCHAR NOT NULL,\n\t\t\tcreated_date TIMESTAMP NOT NULL,\n\t\t\tlast_updated_date TIMESTAMP NOT NULL\n\t\t\t)\n\t\t\t\"\"\", \"\"\" CREATE TABLE IF NOT EXISTS price (\n\t\t\tid SERIAL PRIMARY KEY,\n\t\t\tstock_id INTEGER NOT NULL,\n\t\t\tcreated_date TIMESTAMP NOT NULL,\n\t\t\tlast_updated_date TIMESTAMP NOT NULL,\n\t\t\tdate_price TIMESTAMP,\n\t\t\topen_price NUMERIC,\n\t\t\thigh_price NUMERIC,\n\t\t\tlow_price NUMERIC,\n\t\t\tclose_price NUMERIC,\n\t\t\tvolume BIGINT,\n\t\t\tFOREIGN KEY (stock_id) REFERENCES stock(id))\n\t\t\t\"\"\", \"\"\" CREATE TABLE IF NOT EXISTS fundamentals (\n\t\t\tid SERIAL PRIMARY KEY,\n\t\t\tstock_id INTEGER NOT NULL,\n\t\t\tcreated_date TIMESTAMP NOT NULL,\n\t\t\tlast_updated_date TIMESTAMP NOT NULL,\n\t\t\tlongBusinessSummary TEXT,\n\t\t\tsector VARCHAR,\n\t\t\tsharesOutstanding BIGINT,\n\t\t\tmarketCap BIGINT,\n\t\t\tforwardPE REAL,\n\t\t\tdividendYield REAL,\n\t\t\tbeta REAL,\n\t\t\tpreviousClose REAL,\n\t\t\taverageVolume BIGINT,\n\t\t\tFOREIGN KEY (stock_id) REFERENCES stock(id))\n\t\t\t\"\"\", \"\"\" CREATE TABLE IF NOT EXISTS news (\n\t\t\tid SERIAL PRIMARY KEY,\n\t\t\tstock_id INTEGER NOT NULL,\n\t\t\tnews_date TIMESTAMP NOT NULL,\n\t\t\theadline VARCHAR NOT NULL,\n\t\t\turl VARCHAR NOT NULL,\n\t\t\tsentiment REAL,\n\t\t\tFOREIGN KEY (stock_id) REFERENCES stock(id))\n\t\t\t\"\"\"\n try:\n for command in commands:\n print('Building database tables')\n conn = psycopg2.connect(opt)\n cur = conn.cursor()\n cur.execute(command)\n conn.commit()\n cur.close()\n except (Exception, psycopg2.DatabaseError) as e:\n print(e)\n cur.close()\n else:\n pass\n\n\ndef main():\n opt = (\n f'postgres://{config.username}:{config.password}@{config.host}:{config.port}/{config.cluster}.{config.db_name}?sslmode=verify-full&sslrootcert={config.cert_dir}/cc-ca.crt'\n )\n create_db(opt)\n create_tables(opt)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef check_db_exists(opt):\n try:\n conn = psycopg2.connect(opt)\n cur = conn.cursor()\n cur.close()\n print('Database exists.')\n return True\n except:\n print(\"Database doesn't exist.\")\n return False\n\n\ndef create_db(opt):\n if check_db_exists(opt):\n pass\n else:\n print('Creating new database.')\n conn = psycopg2.connect(opt)\n conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)\n cur = conn.cursor()\n cur.execute(f'CREATE DATABASE {config.db_name};')\n cur.close()\n\n\ndef create_tables(opt):\n if check_db_exists(opt):\n commands = \"\"\" CREATE TABLE IF NOT EXISTS stock (\n\t\t\tid SERIAL PRIMARY KEY,\n\t\t\tticker VARCHAR NOT NULL,\n\t\t\tname VARCHAR NOT NULL,\n\t\t\tcreated_date TIMESTAMP NOT NULL,\n\t\t\tlast_updated_date TIMESTAMP NOT NULL\n\t\t\t)\n\t\t\t\"\"\", \"\"\" CREATE TABLE IF NOT EXISTS price (\n\t\t\tid SERIAL PRIMARY KEY,\n\t\t\tstock_id INTEGER NOT NULL,\n\t\t\tcreated_date TIMESTAMP NOT NULL,\n\t\t\tlast_updated_date TIMESTAMP NOT NULL,\n\t\t\tdate_price TIMESTAMP,\n\t\t\topen_price NUMERIC,\n\t\t\thigh_price NUMERIC,\n\t\t\tlow_price NUMERIC,\n\t\t\tclose_price NUMERIC,\n\t\t\tvolume BIGINT,\n\t\t\tFOREIGN KEY (stock_id) REFERENCES stock(id))\n\t\t\t\"\"\", \"\"\" CREATE TABLE IF NOT EXISTS fundamentals (\n\t\t\tid SERIAL PRIMARY KEY,\n\t\t\tstock_id INTEGER NOT NULL,\n\t\t\tcreated_date TIMESTAMP NOT NULL,\n\t\t\tlast_updated_date TIMESTAMP NOT NULL,\n\t\t\tlongBusinessSummary TEXT,\n\t\t\tsector VARCHAR,\n\t\t\tsharesOutstanding BIGINT,\n\t\t\tmarketCap BIGINT,\n\t\t\tforwardPE REAL,\n\t\t\tdividendYield REAL,\n\t\t\tbeta REAL,\n\t\t\tpreviousClose REAL,\n\t\t\taverageVolume BIGINT,\n\t\t\tFOREIGN KEY (stock_id) REFERENCES stock(id))\n\t\t\t\"\"\", \"\"\" CREATE TABLE IF NOT EXISTS news (\n\t\t\tid SERIAL PRIMARY KEY,\n\t\t\tstock_id INTEGER NOT NULL,\n\t\t\tnews_date TIMESTAMP NOT NULL,\n\t\t\theadline VARCHAR NOT NULL,\n\t\t\turl VARCHAR NOT NULL,\n\t\t\tsentiment REAL,\n\t\t\tFOREIGN KEY (stock_id) REFERENCES stock(id))\n\t\t\t\"\"\"\n try:\n for command in commands:\n print('Building database tables')\n conn = psycopg2.connect(opt)\n cur = conn.cursor()\n cur.execute(command)\n conn.commit()\n cur.close()\n except (Exception, psycopg2.DatabaseError) as e:\n print(e)\n cur.close()\n else:\n pass\n\n\ndef main():\n opt = (\n f'postgres://{config.username}:{config.password}@{config.host}:{config.port}/{config.cluster}.{config.db_name}?sslmode=verify-full&sslrootcert={config.cert_dir}/cc-ca.crt'\n )\n create_db(opt)\n create_tables(opt)\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "import config\nimport psycopg2\nfrom psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT\n\n\ndef check_db_exists(opt):\n try:\n conn = psycopg2.connect(opt)\n cur = conn.cursor()\n cur.close()\n print('Database exists.')\n return True\n except:\n print(\"Database doesn't exist.\")\n return False\n\n\ndef create_db(opt):\n if check_db_exists(opt):\n pass\n else:\n print('Creating new database.')\n conn = psycopg2.connect(opt)\n conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)\n cur = conn.cursor()\n cur.execute(f'CREATE DATABASE {config.db_name};')\n cur.close()\n\n\ndef create_tables(opt):\n if check_db_exists(opt):\n commands = \"\"\" CREATE TABLE IF NOT EXISTS stock (\n\t\t\tid SERIAL PRIMARY KEY,\n\t\t\tticker VARCHAR NOT NULL,\n\t\t\tname VARCHAR NOT NULL,\n\t\t\tcreated_date TIMESTAMP NOT NULL,\n\t\t\tlast_updated_date TIMESTAMP NOT NULL\n\t\t\t)\n\t\t\t\"\"\", \"\"\" CREATE TABLE IF NOT EXISTS price (\n\t\t\tid SERIAL PRIMARY KEY,\n\t\t\tstock_id INTEGER NOT NULL,\n\t\t\tcreated_date TIMESTAMP NOT NULL,\n\t\t\tlast_updated_date TIMESTAMP NOT NULL,\n\t\t\tdate_price TIMESTAMP,\n\t\t\topen_price NUMERIC,\n\t\t\thigh_price NUMERIC,\n\t\t\tlow_price NUMERIC,\n\t\t\tclose_price NUMERIC,\n\t\t\tvolume BIGINT,\n\t\t\tFOREIGN KEY (stock_id) REFERENCES stock(id))\n\t\t\t\"\"\", \"\"\" CREATE TABLE IF NOT EXISTS fundamentals (\n\t\t\tid SERIAL PRIMARY KEY,\n\t\t\tstock_id INTEGER NOT NULL,\n\t\t\tcreated_date TIMESTAMP NOT NULL,\n\t\t\tlast_updated_date TIMESTAMP NOT NULL,\n\t\t\tlongBusinessSummary TEXT,\n\t\t\tsector VARCHAR,\n\t\t\tsharesOutstanding BIGINT,\n\t\t\tmarketCap BIGINT,\n\t\t\tforwardPE REAL,\n\t\t\tdividendYield REAL,\n\t\t\tbeta REAL,\n\t\t\tpreviousClose REAL,\n\t\t\taverageVolume BIGINT,\n\t\t\tFOREIGN KEY (stock_id) REFERENCES stock(id))\n\t\t\t\"\"\", \"\"\" CREATE TABLE IF NOT EXISTS news (\n\t\t\tid SERIAL PRIMARY KEY,\n\t\t\tstock_id INTEGER NOT NULL,\n\t\t\tnews_date TIMESTAMP NOT NULL,\n\t\t\theadline VARCHAR NOT NULL,\n\t\t\turl VARCHAR NOT NULL,\n\t\t\tsentiment REAL,\n\t\t\tFOREIGN KEY (stock_id) REFERENCES stock(id))\n\t\t\t\"\"\"\n try:\n for command in commands:\n print('Building database tables')\n conn = psycopg2.connect(opt)\n cur = conn.cursor()\n cur.execute(command)\n conn.commit()\n cur.close()\n except (Exception, psycopg2.DatabaseError) as e:\n print(e)\n cur.close()\n else:\n pass\n\n\ndef main():\n opt = (\n f'postgres://{config.username}:{config.password}@{config.host}:{config.port}/{config.cluster}.{config.db_name}?sslmode=verify-full&sslrootcert={config.cert_dir}/cc-ca.crt'\n )\n create_db(opt)\n create_tables(opt)\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "import config\nimport psycopg2\nfrom psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT\n\ndef check_db_exists(opt):\n\ttry:\n\t\tconn = psycopg2.connect(opt)\n\t\tcur = conn.cursor()\n\t\tcur.close()\n\t\tprint('Database exists.')\n\t\treturn True\n\texcept:\n\t\tprint(\"Database doesn't exist.\")\n\t\treturn False\n\ndef create_db(opt):\n\tif check_db_exists(opt):\n\t\tpass\n\telse:\n\t\tprint(\"Creating new database.\")\n\t\tconn = psycopg2.connect(opt)\n\t\tconn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)\n\t\tcur = conn.cursor()\n\t\tcur.execute(f\"CREATE DATABASE {config.db_name};\")\n\t\tcur.close()\n\ndef create_tables(opt):\n\tif check_db_exists(opt):\n\t\tcommands = (\"\"\" CREATE TABLE IF NOT EXISTS stock (\n\t\t\tid SERIAL PRIMARY KEY,\n\t\t\tticker VARCHAR NOT NULL,\n\t\t\tname VARCHAR NOT NULL,\n\t\t\tcreated_date TIMESTAMP NOT NULL,\n\t\t\tlast_updated_date TIMESTAMP NOT NULL\n\t\t\t)\n\t\t\t\"\"\",\n\t\t\t\"\"\" CREATE TABLE IF NOT EXISTS price (\n\t\t\tid SERIAL PRIMARY KEY,\n\t\t\tstock_id INTEGER NOT NULL,\n\t\t\tcreated_date TIMESTAMP NOT NULL,\n\t\t\tlast_updated_date TIMESTAMP NOT NULL,\n\t\t\tdate_price TIMESTAMP,\n\t\t\topen_price NUMERIC,\n\t\t\thigh_price NUMERIC,\n\t\t\tlow_price NUMERIC,\n\t\t\tclose_price NUMERIC,\n\t\t\tvolume BIGINT,\n\t\t\tFOREIGN KEY (stock_id) REFERENCES stock(id))\n\t\t\t\"\"\",\n\t\t\t\"\"\" CREATE TABLE IF NOT EXISTS fundamentals (\n\t\t\tid SERIAL PRIMARY KEY,\n\t\t\tstock_id INTEGER NOT NULL,\n\t\t\tcreated_date TIMESTAMP NOT NULL,\n\t\t\tlast_updated_date TIMESTAMP NOT NULL,\n\t\t\tlongBusinessSummary TEXT,\n\t\t\tsector VARCHAR,\n\t\t\tsharesOutstanding BIGINT,\n\t\t\tmarketCap BIGINT,\n\t\t\tforwardPE REAL,\n\t\t\tdividendYield REAL,\n\t\t\tbeta REAL,\n\t\t\tpreviousClose REAL,\n\t\t\taverageVolume BIGINT,\n\t\t\tFOREIGN KEY (stock_id) REFERENCES stock(id))\n\t\t\t\"\"\",\n\t\t\t\"\"\" CREATE TABLE IF NOT EXISTS news (\n\t\t\tid SERIAL PRIMARY KEY,\n\t\t\tstock_id INTEGER NOT NULL,\n\t\t\tnews_date TIMESTAMP NOT NULL,\n\t\t\theadline VARCHAR NOT NULL,\n\t\t\turl VARCHAR NOT NULL,\n\t\t\tsentiment REAL,\n\t\t\tFOREIGN KEY (stock_id) REFERENCES stock(id))\n\t\t\t\"\"\"\n\t\t\t)\n\t\ttry:\n\t\t\tfor command in commands:\n\t\t\t\tprint('Building database tables')\n\t\t\t\tconn = psycopg2.connect(opt)\n\t\t\t\tcur = conn.cursor()\n\t\t\t\tcur.execute(command)\n\t\t\t\tconn.commit()\n\t\t\t\tcur.close()\n\t\texcept (Exception, psycopg2.DatabaseError) as e:\n\t\t\tprint(e)\n\t\t\tcur.close()\n\telse:\n\t\tpass\n\ndef main():\n\topt = f\"postgres://{config.username}:{config.password}@{config.host}:{config.port}/{config.cluster}.{config.db_name}?sslmode=verify-full&sslrootcert={config.cert_dir}/cc-ca.crt\"\n\tcreate_db(opt)\n\tcreate_tables(opt)\n\nif __name__ == \"__main__\":\n main()\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
def inplace_quick_sort(S, start, end):
if start > end:
return
pivot = S[end]
left = start
right = end - 1
while left <= right:
while left <= right and S[left] < pivot:
left += 1
while left <= right and pivot < S[right]:
right -= 1
if left <= right:
S[left], S[right] = S[right], S[left]
left += 1
right -= 1
S[left], S[end] = S[end], S[left]
inplace_quick_sort(S, start, left - 1)
inplace_quick_sort(S, left + 1, end)
S = [4, 2, 6, 8, 3, 9, 5, 11]
inplace_quick_sort(S, 0, 7)
print(S)
|
normal
|
{
"blob_id": "2a09711e3e487c5d7790af592ff2eb03bb53cff2",
"index": 5068,
"step-1": "<mask token>\n",
"step-2": "def inplace_quick_sort(S, start, end):\n if start > end:\n return\n pivot = S[end]\n left = start\n right = end - 1\n while left <= right:\n while left <= right and S[left] < pivot:\n left += 1\n while left <= right and pivot < S[right]:\n right -= 1\n if left <= right:\n S[left], S[right] = S[right], S[left]\n left += 1\n right -= 1\n S[left], S[end] = S[end], S[left]\n inplace_quick_sort(S, start, left - 1)\n inplace_quick_sort(S, left + 1, end)\n\n\n<mask token>\n",
"step-3": "def inplace_quick_sort(S, start, end):\n if start > end:\n return\n pivot = S[end]\n left = start\n right = end - 1\n while left <= right:\n while left <= right and S[left] < pivot:\n left += 1\n while left <= right and pivot < S[right]:\n right -= 1\n if left <= right:\n S[left], S[right] = S[right], S[left]\n left += 1\n right -= 1\n S[left], S[end] = S[end], S[left]\n inplace_quick_sort(S, start, left - 1)\n inplace_quick_sort(S, left + 1, end)\n\n\n<mask token>\ninplace_quick_sort(S, 0, 7)\nprint(S)\n",
"step-4": "def inplace_quick_sort(S, start, end):\n if start > end:\n return\n pivot = S[end]\n left = start\n right = end - 1\n while left <= right:\n while left <= right and S[left] < pivot:\n left += 1\n while left <= right and pivot < S[right]:\n right -= 1\n if left <= right:\n S[left], S[right] = S[right], S[left]\n left += 1\n right -= 1\n S[left], S[end] = S[end], S[left]\n inplace_quick_sort(S, start, left - 1)\n inplace_quick_sort(S, left + 1, end)\n\n\nS = [4, 2, 6, 8, 3, 9, 5, 11]\ninplace_quick_sort(S, 0, 7)\nprint(S)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from django.db import models
# Create your models here.
class Glo_EstadoPlan(models.Model):
descripcion_estado = models.CharField(max_length=100)
def __str__(self):
return '{}'.format(self.descripcion_estado)
|
normal
|
{
"blob_id": "b0a51877b59e14eefdd662bac468e8ce12343e6b",
"index": 3885,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Glo_EstadoPlan(models.Model):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Glo_EstadoPlan(models.Model):\n descripcion_estado = models.CharField(max_length=100)\n\n def __str__(self):\n return '{}'.format(self.descripcion_estado)\n",
"step-4": "from django.db import models\n\n\nclass Glo_EstadoPlan(models.Model):\n descripcion_estado = models.CharField(max_length=100)\n\n def __str__(self):\n return '{}'.format(self.descripcion_estado)\n",
"step-5": "from django.db import models\r\n\r\n# Create your models here.\r\nclass Glo_EstadoPlan(models.Model):\r\n descripcion_estado = models.CharField(max_length=100)\r\n\r\n def __str__(self):\r\n return '{}'.format(self.descripcion_estado)",
"step-ids": [
0,
1,
3,
4,
5
]
}
|
[
0,
1,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
articlesFileNameList(reverse=True)
<|reserved_special_token_0|>
for fileName in articlesFileNameList:
print(fileName)
dictOut = pp.parse_medline_xml(articlesFolderPath + '/' + fileName)
for item in dictOut:
resultFile.write(item['abstract'] + '\n')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
nlpPath = '/Users/kapmayn/Desktop/nlp'
articlesFolderPath = nlpPath + '/articles'
abstractsFilePath = nlpPath + '/abstracts.txt'
articlesFileNameList = os.listdir(articlesFolderPath)
articlesFileNameList(reverse=True)
resultFile = open(abstractsFilePath, 'w')
for fileName in articlesFileNameList:
print(fileName)
dictOut = pp.parse_medline_xml(articlesFolderPath + '/' + fileName)
for item in dictOut:
resultFile.write(item['abstract'] + '\n')
<|reserved_special_token_1|>
import os
import pubmed_parser as pp
nlpPath = '/Users/kapmayn/Desktop/nlp'
articlesFolderPath = nlpPath + '/articles'
abstractsFilePath = nlpPath + '/abstracts.txt'
articlesFileNameList = os.listdir(articlesFolderPath)
articlesFileNameList(reverse=True)
resultFile = open(abstractsFilePath, 'w')
for fileName in articlesFileNameList:
print(fileName)
dictOut = pp.parse_medline_xml(articlesFolderPath + '/' + fileName)
for item in dictOut:
resultFile.write(item['abstract'] + '\n')
<|reserved_special_token_1|>
import os
import pubmed_parser as pp
nlpPath = "/Users/kapmayn/Desktop/nlp"
articlesFolderPath = nlpPath + "/articles"
abstractsFilePath = nlpPath + "/abstracts.txt"
articlesFileNameList = os.listdir(articlesFolderPath)
articlesFileNameList(reverse = True)
resultFile = open(abstractsFilePath, 'w')
for fileName in articlesFileNameList:
print(fileName)
dictOut = pp.parse_medline_xml(articlesFolderPath + "/" + fileName)
for item in dictOut:
resultFile.write((item['abstract'] + '\n'))
|
flexible
|
{
"blob_id": "32f9b5c32acbb6411fe6ab99616d8459acfd7c74",
"index": 719,
"step-1": "<mask token>\n",
"step-2": "<mask token>\narticlesFileNameList(reverse=True)\n<mask token>\nfor fileName in articlesFileNameList:\n print(fileName)\n dictOut = pp.parse_medline_xml(articlesFolderPath + '/' + fileName)\n for item in dictOut:\n resultFile.write(item['abstract'] + '\\n')\n",
"step-3": "<mask token>\nnlpPath = '/Users/kapmayn/Desktop/nlp'\narticlesFolderPath = nlpPath + '/articles'\nabstractsFilePath = nlpPath + '/abstracts.txt'\narticlesFileNameList = os.listdir(articlesFolderPath)\narticlesFileNameList(reverse=True)\nresultFile = open(abstractsFilePath, 'w')\nfor fileName in articlesFileNameList:\n print(fileName)\n dictOut = pp.parse_medline_xml(articlesFolderPath + '/' + fileName)\n for item in dictOut:\n resultFile.write(item['abstract'] + '\\n')\n",
"step-4": "import os\nimport pubmed_parser as pp\nnlpPath = '/Users/kapmayn/Desktop/nlp'\narticlesFolderPath = nlpPath + '/articles'\nabstractsFilePath = nlpPath + '/abstracts.txt'\narticlesFileNameList = os.listdir(articlesFolderPath)\narticlesFileNameList(reverse=True)\nresultFile = open(abstractsFilePath, 'w')\nfor fileName in articlesFileNameList:\n print(fileName)\n dictOut = pp.parse_medline_xml(articlesFolderPath + '/' + fileName)\n for item in dictOut:\n resultFile.write(item['abstract'] + '\\n')\n",
"step-5": "import os\nimport pubmed_parser as pp\n\nnlpPath = \"/Users/kapmayn/Desktop/nlp\"\narticlesFolderPath = nlpPath + \"/articles\"\nabstractsFilePath = nlpPath + \"/abstracts.txt\"\n\narticlesFileNameList = os.listdir(articlesFolderPath)\narticlesFileNameList(reverse = True)\nresultFile = open(abstractsFilePath, 'w')\n\nfor fileName in articlesFileNameList:\n\tprint(fileName)\n\tdictOut = pp.parse_medline_xml(articlesFolderPath + \"/\" + fileName)\n\tfor item in dictOut:\n\t\tresultFile.write((item['abstract'] + '\\n'))",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def generate_circular_path(radius, offset):
theta = np.deg2rad(np.linspace(0, 360, 360))
x_data = radius * np.sin(theta) + offset[0]
y_data = radius * np.cos(theta) + offset[1]
radii = radius * np.ones((360,))
return x_data, y_data, radii
<|reserved_special_token_0|>
def terrain_state(x, y):
local_normal = np.array([[0], [0], [1]], dtype=np.float64)
hieght = 0
return [local_normal, hieght]
<|reserved_special_token_0|>
def steering_function(t):
R_ch = num_model.Subsystems.CH.R_rbs_chassis
P_ch = num_model.Subsystems.CH.P_rbs_chassis
Rd_ch = num_model.Subsystems.CH.Rd_rbs_chassis
Pd_ch = num_model.Subsystems.CH.Pd_rbs_chassis
rbar_ax1 = np.array([[-800], [0], [0]], dtype=np.float64)
r_ax1 = R_ch + A(P_ch) @ rbar_ax1
vel = (A(P_ch).T @ (Rd_ch + B(P_ch, rbar_ax1) @ Pd_ch))[0, 0]
delta = lateral_controller.get_steer_factor(r_ax1, P_ch, Pd_ch, vel)
travel = delta * 18
return travel
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
sys.path.append(database_directory)
<|reserved_special_token_0|>
def generate_circular_path(radius, offset):
theta = np.deg2rad(np.linspace(0, 360, 360))
x_data = radius * np.sin(theta) + offset[0]
y_data = radius * np.cos(theta) + offset[1]
radii = radius * np.ones((360,))
return x_data, y_data, radii
<|reserved_special_token_0|>
plt.figure(figsize=(10, 5))
plt.plot(path_data[:, 0], path_data[:, 1])
plt.grid()
plt.show()
<|reserved_special_token_0|>
def terrain_state(x, y):
local_normal = np.array([[0], [0], [1]], dtype=np.float64)
hieght = 0
return [local_normal, hieght]
def torque_function(t):
P_ch = num_model.Subsystems.CH.P_rbs_chassis
Rd = num_model.Subsystems.CH.Rd_rbs_chassis
factor = logitudinal_controller.get_torque_factor(P_ch, Rd)
return factor
def RR_Torque(t):
factor = torque_function(t)
torque = -factor * (70 * 9.81) * 1000000.0 * TR
return torque
def RL_Torque(t):
factor = torque_function(t)
torque = -factor * (70 * 9.81) * 1000000.0 * TR
return torque
def steering_function(t):
R_ch = num_model.Subsystems.CH.R_rbs_chassis
P_ch = num_model.Subsystems.CH.P_rbs_chassis
Rd_ch = num_model.Subsystems.CH.Rd_rbs_chassis
Pd_ch = num_model.Subsystems.CH.Pd_rbs_chassis
rbar_ax1 = np.array([[-800], [0], [0]], dtype=np.float64)
r_ax1 = R_ch + A(P_ch) @ rbar_ax1
vel = (A(P_ch).T @ (Rd_ch + B(P_ch, rbar_ax1) @ Pd_ch))[0, 0]
delta = lateral_controller.get_steer_factor(r_ax1, P_ch, Pd_ch, vel)
travel = delta * 18
return travel
def zero_func(t):
return np.zeros((3, 1), dtype=np.float64)
<|reserved_special_token_0|>
sim.set_time_array(15, dt)
sim.set_initial_states('results/equilibrium_v4.npz')
sim.solve()
sim.save_as_csv('results', 'constant_radius_v8', 'pos')
sim.save_as_npz('results', 'constant_radius_v8')
<|reserved_special_token_0|>
sim.soln.pos_dataframe.plot(x='CH.rbs_chassis.x', y='CH.rbs_chassis.y',
grid=True)
sim.soln.vel_dataframe.plot(x='time', y='CH.rbs_chassis.x', grid=True)
sim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.z', grid=True)
sim.soln.vel_dataframe.plot(x='time', y='CH.rbs_chassis.z', grid=True)
sim.soln.acc_dataframe.plot(x='time', y='CH.rbs_chassis.z', grid=True)
sim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.e0', grid=True)
sim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.e1', grid=True)
sim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.e2', grid=True)
sim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.e3', grid=True)
plt.show()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
database_directory = os.path.abspath('../../')
sys.path.append(database_directory)
<|reserved_special_token_0|>
num_model = num_assm.num_model
dt = num_assm.dt
TR = 254
def generate_circular_path(radius, offset):
theta = np.deg2rad(np.linspace(0, 360, 360))
x_data = radius * np.sin(theta) + offset[0]
y_data = radius * np.cos(theta) + offset[1]
radii = radius * np.ones((360,))
return x_data, y_data, radii
x_data, y_data, radii = generate_circular_path(10.5, (0, -10.5))
path_data = np.zeros((360, 3))
path_data[:, 0] = -1000.0 * x_data
path_data[:, 1] = 1000.0 * y_data
path_data[:, 2] = 1000.0 * radii
plt.figure(figsize=(10, 5))
plt.plot(path_data[:, 0], path_data[:, 1])
plt.grid()
plt.show()
logitudinal_controller = speed_controller(35, dt)
lateral_controller = stanley_controller(path_data, 25)
def terrain_state(x, y):
local_normal = np.array([[0], [0], [1]], dtype=np.float64)
hieght = 0
return [local_normal, hieght]
def torque_function(t):
P_ch = num_model.Subsystems.CH.P_rbs_chassis
Rd = num_model.Subsystems.CH.Rd_rbs_chassis
factor = logitudinal_controller.get_torque_factor(P_ch, Rd)
return factor
def RR_Torque(t):
factor = torque_function(t)
torque = -factor * (70 * 9.81) * 1000000.0 * TR
return torque
def RL_Torque(t):
factor = torque_function(t)
torque = -factor * (70 * 9.81) * 1000000.0 * TR
return torque
def steering_function(t):
R_ch = num_model.Subsystems.CH.R_rbs_chassis
P_ch = num_model.Subsystems.CH.P_rbs_chassis
Rd_ch = num_model.Subsystems.CH.Rd_rbs_chassis
Pd_ch = num_model.Subsystems.CH.Pd_rbs_chassis
rbar_ax1 = np.array([[-800], [0], [0]], dtype=np.float64)
r_ax1 = R_ch + A(P_ch) @ rbar_ax1
vel = (A(P_ch).T @ (Rd_ch + B(P_ch, rbar_ax1) @ Pd_ch))[0, 0]
delta = lateral_controller.get_steer_factor(r_ax1, P_ch, Pd_ch, vel)
travel = delta * 18
return travel
def zero_func(t):
return np.zeros((3, 1), dtype=np.float64)
num_assm.terrain_data.get_state = terrain_state
num_assm.ST1_config.UF_mcs_rack_act = steering_function
num_assm.AX1_config.UF_far_drive = RR_Torque
num_assm.AX1_config.UF_fal_drive = RL_Torque
num_assm.CH_config.UF_fas_aero_drag_F = zero_func
num_assm.CH_config.UF_fas_aero_drag_T = zero_func
sim = simulation('sim', num_model, 'dds')
sim.set_time_array(15, dt)
sim.set_initial_states('results/equilibrium_v4.npz')
sim.solve()
sim.save_as_csv('results', 'constant_radius_v8', 'pos')
sim.save_as_npz('results', 'constant_radius_v8')
<|reserved_special_token_0|>
sim.soln.pos_dataframe.plot(x='CH.rbs_chassis.x', y='CH.rbs_chassis.y',
grid=True)
sim.soln.vel_dataframe.plot(x='time', y='CH.rbs_chassis.x', grid=True)
sim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.z', grid=True)
sim.soln.vel_dataframe.plot(x='time', y='CH.rbs_chassis.z', grid=True)
sim.soln.acc_dataframe.plot(x='time', y='CH.rbs_chassis.z', grid=True)
sim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.e0', grid=True)
sim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.e1', grid=True)
sim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.e2', grid=True)
sim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.e3', grid=True)
plt.show()
<|reserved_special_token_1|>
import sys
import os
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from uraeus.nmbd.python import simulation
from uraeus.nmbd.python.engine.numerics.math_funcs import A, B
database_directory = os.path.abspath('../../')
sys.path.append(database_directory)
from uraeus_fsae.simenv.assemblies import asurt_FS17_v1 as num_assm
from controllers import speed_controller, stanley_controller
num_model = num_assm.num_model
dt = num_assm.dt
TR = 254
def generate_circular_path(radius, offset):
theta = np.deg2rad(np.linspace(0, 360, 360))
x_data = radius * np.sin(theta) + offset[0]
y_data = radius * np.cos(theta) + offset[1]
radii = radius * np.ones((360,))
return x_data, y_data, radii
x_data, y_data, radii = generate_circular_path(10.5, (0, -10.5))
path_data = np.zeros((360, 3))
path_data[:, 0] = -1000.0 * x_data
path_data[:, 1] = 1000.0 * y_data
path_data[:, 2] = 1000.0 * radii
plt.figure(figsize=(10, 5))
plt.plot(path_data[:, 0], path_data[:, 1])
plt.grid()
plt.show()
logitudinal_controller = speed_controller(35, dt)
lateral_controller = stanley_controller(path_data, 25)
def terrain_state(x, y):
local_normal = np.array([[0], [0], [1]], dtype=np.float64)
hieght = 0
return [local_normal, hieght]
def torque_function(t):
P_ch = num_model.Subsystems.CH.P_rbs_chassis
Rd = num_model.Subsystems.CH.Rd_rbs_chassis
factor = logitudinal_controller.get_torque_factor(P_ch, Rd)
return factor
def RR_Torque(t):
factor = torque_function(t)
torque = -factor * (70 * 9.81) * 1000000.0 * TR
return torque
def RL_Torque(t):
factor = torque_function(t)
torque = -factor * (70 * 9.81) * 1000000.0 * TR
return torque
def steering_function(t):
R_ch = num_model.Subsystems.CH.R_rbs_chassis
P_ch = num_model.Subsystems.CH.P_rbs_chassis
Rd_ch = num_model.Subsystems.CH.Rd_rbs_chassis
Pd_ch = num_model.Subsystems.CH.Pd_rbs_chassis
rbar_ax1 = np.array([[-800], [0], [0]], dtype=np.float64)
r_ax1 = R_ch + A(P_ch) @ rbar_ax1
vel = (A(P_ch).T @ (Rd_ch + B(P_ch, rbar_ax1) @ Pd_ch))[0, 0]
delta = lateral_controller.get_steer_factor(r_ax1, P_ch, Pd_ch, vel)
travel = delta * 18
return travel
def zero_func(t):
return np.zeros((3, 1), dtype=np.float64)
num_assm.terrain_data.get_state = terrain_state
num_assm.ST1_config.UF_mcs_rack_act = steering_function
num_assm.AX1_config.UF_far_drive = RR_Torque
num_assm.AX1_config.UF_fal_drive = RL_Torque
num_assm.CH_config.UF_fas_aero_drag_F = zero_func
num_assm.CH_config.UF_fas_aero_drag_T = zero_func
sim = simulation('sim', num_model, 'dds')
sim.set_time_array(15, dt)
sim.set_initial_states('results/equilibrium_v4.npz')
sim.solve()
sim.save_as_csv('results', 'constant_radius_v8', 'pos')
sim.save_as_npz('results', 'constant_radius_v8')
import matplotlib.pyplot as plt
sim.soln.pos_dataframe.plot(x='CH.rbs_chassis.x', y='CH.rbs_chassis.y',
grid=True)
sim.soln.vel_dataframe.plot(x='time', y='CH.rbs_chassis.x', grid=True)
sim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.z', grid=True)
sim.soln.vel_dataframe.plot(x='time', y='CH.rbs_chassis.z', grid=True)
sim.soln.acc_dataframe.plot(x='time', y='CH.rbs_chassis.z', grid=True)
sim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.e0', grid=True)
sim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.e1', grid=True)
sim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.e2', grid=True)
sim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.e3', grid=True)
plt.show()
<|reserved_special_token_1|>
import sys
import os
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from uraeus.nmbd.python import simulation
from uraeus.nmbd.python.engine.numerics.math_funcs import A, B
database_directory = os.path.abspath('../../')
sys.path.append(database_directory)
from uraeus_fsae.simenv.assemblies import asurt_FS17_v1 as num_assm
from controllers import speed_controller, stanley_controller
num_model = num_assm.num_model
dt = num_assm.dt
TR = 254
def generate_circular_path(radius, offset):
theta = np.deg2rad(np.linspace(0, 360, 360))
x_data = radius * np.sin(theta) + offset[0]
y_data = radius * np.cos(theta) + offset[1]
radii = radius * np.ones((360,))
return x_data, y_data, radii
x_data, y_data, radii = generate_circular_path(10.5, (0, -10.5))
path_data = np.zeros((360, 3))
path_data[:, 0] = -1e3 * x_data
path_data[:, 1] = 1e3 * y_data
path_data[:, 2] = 1e3 * radii
plt.figure(figsize=(10, 5))
plt.plot(path_data[:, 0], path_data[:, 1])
plt.grid()
plt.show()
logitudinal_controller = speed_controller(35, dt)
lateral_controller = stanley_controller(path_data, 25)
def terrain_state(x, y):
local_normal = np.array([[0],[0],[1]], dtype=np.float64)
hieght = 0
return [local_normal, hieght]
def torque_function(t):
P_ch = num_model.Subsystems.CH.P_rbs_chassis
Rd = num_model.Subsystems.CH.Rd_rbs_chassis
factor = logitudinal_controller.get_torque_factor(P_ch, Rd)
return factor
def RR_Torque(t):
factor = torque_function(t)
torque = -factor*(70*9.81)*1e6*TR
return torque
def RL_Torque(t):
factor = torque_function(t)
torque = -factor*(70*9.81)*1e6*TR
return torque
def steering_function(t):
R_ch = num_model.Subsystems.CH.R_rbs_chassis
P_ch = num_model.Subsystems.CH.P_rbs_chassis
Rd_ch = num_model.Subsystems.CH.Rd_rbs_chassis
Pd_ch = num_model.Subsystems.CH.Pd_rbs_chassis
rbar_ax1 = np.array([[-800], [0], [0]], dtype=np.float64)
r_ax1 = R_ch + A(P_ch)@rbar_ax1
vel = (A(P_ch).T @ (Rd_ch + B(P_ch, rbar_ax1)@Pd_ch))[0,0]
delta = lateral_controller.get_steer_factor(r_ax1, P_ch, Pd_ch, vel)
travel = delta * 18
#print('Travel = %s'%travel)
return travel
def zero_func(t):
return np.zeros((3,1), dtype=np.float64)
num_assm.terrain_data.get_state = terrain_state
num_assm.ST1_config.UF_mcs_rack_act = steering_function
num_assm.AX1_config.UF_far_drive = RR_Torque
num_assm.AX1_config.UF_fal_drive = RL_Torque
#num_assm.DR2_config.UF_far_drive = RR_Torque
#num_assm.DR2_config.UF_fal_drive = RL_Torque
num_assm.CH_config.UF_fas_aero_drag_F = zero_func
num_assm.CH_config.UF_fas_aero_drag_T = zero_func
# =============================================================================
# Setting and Starting Simulation
# =============================================================================
sim = simulation('sim', num_model, 'dds')
sim.set_time_array(15, dt)
# Getting Equilibrium results as initial conditions to this simulation
# ====================================================================
sim.set_initial_states('results/equilibrium_v4.npz')
sim.solve()
sim.save_as_csv('results', 'constant_radius_v8', 'pos')
sim.save_as_npz('results', 'constant_radius_v8')
#=============================================================================
# Plotting Simulation Results
# =============================================================================
import matplotlib.pyplot as plt
sim.soln.pos_dataframe.plot(x='CH.rbs_chassis.x', y='CH.rbs_chassis.y', grid=True)
sim.soln.vel_dataframe.plot(x='time', y='CH.rbs_chassis.x', grid=True)
sim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.z', grid=True)
sim.soln.vel_dataframe.plot(x='time', y='CH.rbs_chassis.z', grid=True)
sim.soln.acc_dataframe.plot(x='time', y='CH.rbs_chassis.z', grid=True)
sim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.e0', grid=True)
sim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.e1', grid=True)
sim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.e2', grid=True)
sim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.e3', grid=True)
plt.show()
|
flexible
|
{
"blob_id": "e0541c377eb6631e4ef5eb79b1204612ce8af48c",
"index": 6107,
"step-1": "<mask token>\n\n\ndef generate_circular_path(radius, offset):\n theta = np.deg2rad(np.linspace(0, 360, 360))\n x_data = radius * np.sin(theta) + offset[0]\n y_data = radius * np.cos(theta) + offset[1]\n radii = radius * np.ones((360,))\n return x_data, y_data, radii\n\n\n<mask token>\n\n\ndef terrain_state(x, y):\n local_normal = np.array([[0], [0], [1]], dtype=np.float64)\n hieght = 0\n return [local_normal, hieght]\n\n\n<mask token>\n\n\ndef steering_function(t):\n R_ch = num_model.Subsystems.CH.R_rbs_chassis\n P_ch = num_model.Subsystems.CH.P_rbs_chassis\n Rd_ch = num_model.Subsystems.CH.Rd_rbs_chassis\n Pd_ch = num_model.Subsystems.CH.Pd_rbs_chassis\n rbar_ax1 = np.array([[-800], [0], [0]], dtype=np.float64)\n r_ax1 = R_ch + A(P_ch) @ rbar_ax1\n vel = (A(P_ch).T @ (Rd_ch + B(P_ch, rbar_ax1) @ Pd_ch))[0, 0]\n delta = lateral_controller.get_steer_factor(r_ax1, P_ch, Pd_ch, vel)\n travel = delta * 18\n return travel\n\n\n<mask token>\n",
"step-2": "<mask token>\nsys.path.append(database_directory)\n<mask token>\n\n\ndef generate_circular_path(radius, offset):\n theta = np.deg2rad(np.linspace(0, 360, 360))\n x_data = radius * np.sin(theta) + offset[0]\n y_data = radius * np.cos(theta) + offset[1]\n radii = radius * np.ones((360,))\n return x_data, y_data, radii\n\n\n<mask token>\nplt.figure(figsize=(10, 5))\nplt.plot(path_data[:, 0], path_data[:, 1])\nplt.grid()\nplt.show()\n<mask token>\n\n\ndef terrain_state(x, y):\n local_normal = np.array([[0], [0], [1]], dtype=np.float64)\n hieght = 0\n return [local_normal, hieght]\n\n\ndef torque_function(t):\n P_ch = num_model.Subsystems.CH.P_rbs_chassis\n Rd = num_model.Subsystems.CH.Rd_rbs_chassis\n factor = logitudinal_controller.get_torque_factor(P_ch, Rd)\n return factor\n\n\ndef RR_Torque(t):\n factor = torque_function(t)\n torque = -factor * (70 * 9.81) * 1000000.0 * TR\n return torque\n\n\ndef RL_Torque(t):\n factor = torque_function(t)\n torque = -factor * (70 * 9.81) * 1000000.0 * TR\n return torque\n\n\ndef steering_function(t):\n R_ch = num_model.Subsystems.CH.R_rbs_chassis\n P_ch = num_model.Subsystems.CH.P_rbs_chassis\n Rd_ch = num_model.Subsystems.CH.Rd_rbs_chassis\n Pd_ch = num_model.Subsystems.CH.Pd_rbs_chassis\n rbar_ax1 = np.array([[-800], [0], [0]], dtype=np.float64)\n r_ax1 = R_ch + A(P_ch) @ rbar_ax1\n vel = (A(P_ch).T @ (Rd_ch + B(P_ch, rbar_ax1) @ Pd_ch))[0, 0]\n delta = lateral_controller.get_steer_factor(r_ax1, P_ch, Pd_ch, vel)\n travel = delta * 18\n return travel\n\n\ndef zero_func(t):\n return np.zeros((3, 1), dtype=np.float64)\n\n\n<mask token>\nsim.set_time_array(15, dt)\nsim.set_initial_states('results/equilibrium_v4.npz')\nsim.solve()\nsim.save_as_csv('results', 'constant_radius_v8', 'pos')\nsim.save_as_npz('results', 'constant_radius_v8')\n<mask token>\nsim.soln.pos_dataframe.plot(x='CH.rbs_chassis.x', y='CH.rbs_chassis.y',\n grid=True)\nsim.soln.vel_dataframe.plot(x='time', y='CH.rbs_chassis.x', grid=True)\nsim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.z', grid=True)\nsim.soln.vel_dataframe.plot(x='time', y='CH.rbs_chassis.z', grid=True)\nsim.soln.acc_dataframe.plot(x='time', y='CH.rbs_chassis.z', grid=True)\nsim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.e0', grid=True)\nsim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.e1', grid=True)\nsim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.e2', grid=True)\nsim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.e3', grid=True)\nplt.show()\n",
"step-3": "<mask token>\ndatabase_directory = os.path.abspath('../../')\nsys.path.append(database_directory)\n<mask token>\nnum_model = num_assm.num_model\ndt = num_assm.dt\nTR = 254\n\n\ndef generate_circular_path(radius, offset):\n theta = np.deg2rad(np.linspace(0, 360, 360))\n x_data = radius * np.sin(theta) + offset[0]\n y_data = radius * np.cos(theta) + offset[1]\n radii = radius * np.ones((360,))\n return x_data, y_data, radii\n\n\nx_data, y_data, radii = generate_circular_path(10.5, (0, -10.5))\npath_data = np.zeros((360, 3))\npath_data[:, 0] = -1000.0 * x_data\npath_data[:, 1] = 1000.0 * y_data\npath_data[:, 2] = 1000.0 * radii\nplt.figure(figsize=(10, 5))\nplt.plot(path_data[:, 0], path_data[:, 1])\nplt.grid()\nplt.show()\nlogitudinal_controller = speed_controller(35, dt)\nlateral_controller = stanley_controller(path_data, 25)\n\n\ndef terrain_state(x, y):\n local_normal = np.array([[0], [0], [1]], dtype=np.float64)\n hieght = 0\n return [local_normal, hieght]\n\n\ndef torque_function(t):\n P_ch = num_model.Subsystems.CH.P_rbs_chassis\n Rd = num_model.Subsystems.CH.Rd_rbs_chassis\n factor = logitudinal_controller.get_torque_factor(P_ch, Rd)\n return factor\n\n\ndef RR_Torque(t):\n factor = torque_function(t)\n torque = -factor * (70 * 9.81) * 1000000.0 * TR\n return torque\n\n\ndef RL_Torque(t):\n factor = torque_function(t)\n torque = -factor * (70 * 9.81) * 1000000.0 * TR\n return torque\n\n\ndef steering_function(t):\n R_ch = num_model.Subsystems.CH.R_rbs_chassis\n P_ch = num_model.Subsystems.CH.P_rbs_chassis\n Rd_ch = num_model.Subsystems.CH.Rd_rbs_chassis\n Pd_ch = num_model.Subsystems.CH.Pd_rbs_chassis\n rbar_ax1 = np.array([[-800], [0], [0]], dtype=np.float64)\n r_ax1 = R_ch + A(P_ch) @ rbar_ax1\n vel = (A(P_ch).T @ (Rd_ch + B(P_ch, rbar_ax1) @ Pd_ch))[0, 0]\n delta = lateral_controller.get_steer_factor(r_ax1, P_ch, Pd_ch, vel)\n travel = delta * 18\n return travel\n\n\ndef zero_func(t):\n return np.zeros((3, 1), dtype=np.float64)\n\n\nnum_assm.terrain_data.get_state = terrain_state\nnum_assm.ST1_config.UF_mcs_rack_act = steering_function\nnum_assm.AX1_config.UF_far_drive = RR_Torque\nnum_assm.AX1_config.UF_fal_drive = RL_Torque\nnum_assm.CH_config.UF_fas_aero_drag_F = zero_func\nnum_assm.CH_config.UF_fas_aero_drag_T = zero_func\nsim = simulation('sim', num_model, 'dds')\nsim.set_time_array(15, dt)\nsim.set_initial_states('results/equilibrium_v4.npz')\nsim.solve()\nsim.save_as_csv('results', 'constant_radius_v8', 'pos')\nsim.save_as_npz('results', 'constant_radius_v8')\n<mask token>\nsim.soln.pos_dataframe.plot(x='CH.rbs_chassis.x', y='CH.rbs_chassis.y',\n grid=True)\nsim.soln.vel_dataframe.plot(x='time', y='CH.rbs_chassis.x', grid=True)\nsim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.z', grid=True)\nsim.soln.vel_dataframe.plot(x='time', y='CH.rbs_chassis.z', grid=True)\nsim.soln.acc_dataframe.plot(x='time', y='CH.rbs_chassis.z', grid=True)\nsim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.e0', grid=True)\nsim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.e1', grid=True)\nsim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.e2', grid=True)\nsim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.e3', grid=True)\nplt.show()\n",
"step-4": "import sys\nimport os\nimport numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\nfrom uraeus.nmbd.python import simulation\nfrom uraeus.nmbd.python.engine.numerics.math_funcs import A, B\ndatabase_directory = os.path.abspath('../../')\nsys.path.append(database_directory)\nfrom uraeus_fsae.simenv.assemblies import asurt_FS17_v1 as num_assm\nfrom controllers import speed_controller, stanley_controller\nnum_model = num_assm.num_model\ndt = num_assm.dt\nTR = 254\n\n\ndef generate_circular_path(radius, offset):\n theta = np.deg2rad(np.linspace(0, 360, 360))\n x_data = radius * np.sin(theta) + offset[0]\n y_data = radius * np.cos(theta) + offset[1]\n radii = radius * np.ones((360,))\n return x_data, y_data, radii\n\n\nx_data, y_data, radii = generate_circular_path(10.5, (0, -10.5))\npath_data = np.zeros((360, 3))\npath_data[:, 0] = -1000.0 * x_data\npath_data[:, 1] = 1000.0 * y_data\npath_data[:, 2] = 1000.0 * radii\nplt.figure(figsize=(10, 5))\nplt.plot(path_data[:, 0], path_data[:, 1])\nplt.grid()\nplt.show()\nlogitudinal_controller = speed_controller(35, dt)\nlateral_controller = stanley_controller(path_data, 25)\n\n\ndef terrain_state(x, y):\n local_normal = np.array([[0], [0], [1]], dtype=np.float64)\n hieght = 0\n return [local_normal, hieght]\n\n\ndef torque_function(t):\n P_ch = num_model.Subsystems.CH.P_rbs_chassis\n Rd = num_model.Subsystems.CH.Rd_rbs_chassis\n factor = logitudinal_controller.get_torque_factor(P_ch, Rd)\n return factor\n\n\ndef RR_Torque(t):\n factor = torque_function(t)\n torque = -factor * (70 * 9.81) * 1000000.0 * TR\n return torque\n\n\ndef RL_Torque(t):\n factor = torque_function(t)\n torque = -factor * (70 * 9.81) * 1000000.0 * TR\n return torque\n\n\ndef steering_function(t):\n R_ch = num_model.Subsystems.CH.R_rbs_chassis\n P_ch = num_model.Subsystems.CH.P_rbs_chassis\n Rd_ch = num_model.Subsystems.CH.Rd_rbs_chassis\n Pd_ch = num_model.Subsystems.CH.Pd_rbs_chassis\n rbar_ax1 = np.array([[-800], [0], [0]], dtype=np.float64)\n r_ax1 = R_ch + A(P_ch) @ rbar_ax1\n vel = (A(P_ch).T @ (Rd_ch + B(P_ch, rbar_ax1) @ Pd_ch))[0, 0]\n delta = lateral_controller.get_steer_factor(r_ax1, P_ch, Pd_ch, vel)\n travel = delta * 18\n return travel\n\n\ndef zero_func(t):\n return np.zeros((3, 1), dtype=np.float64)\n\n\nnum_assm.terrain_data.get_state = terrain_state\nnum_assm.ST1_config.UF_mcs_rack_act = steering_function\nnum_assm.AX1_config.UF_far_drive = RR_Torque\nnum_assm.AX1_config.UF_fal_drive = RL_Torque\nnum_assm.CH_config.UF_fas_aero_drag_F = zero_func\nnum_assm.CH_config.UF_fas_aero_drag_T = zero_func\nsim = simulation('sim', num_model, 'dds')\nsim.set_time_array(15, dt)\nsim.set_initial_states('results/equilibrium_v4.npz')\nsim.solve()\nsim.save_as_csv('results', 'constant_radius_v8', 'pos')\nsim.save_as_npz('results', 'constant_radius_v8')\nimport matplotlib.pyplot as plt\nsim.soln.pos_dataframe.plot(x='CH.rbs_chassis.x', y='CH.rbs_chassis.y',\n grid=True)\nsim.soln.vel_dataframe.plot(x='time', y='CH.rbs_chassis.x', grid=True)\nsim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.z', grid=True)\nsim.soln.vel_dataframe.plot(x='time', y='CH.rbs_chassis.z', grid=True)\nsim.soln.acc_dataframe.plot(x='time', y='CH.rbs_chassis.z', grid=True)\nsim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.e0', grid=True)\nsim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.e1', grid=True)\nsim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.e2', grid=True)\nsim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.e3', grid=True)\nplt.show()\n",
"step-5": "import sys\nimport os\n\nimport numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\n\nfrom uraeus.nmbd.python import simulation\nfrom uraeus.nmbd.python.engine.numerics.math_funcs import A, B\n\ndatabase_directory = os.path.abspath('../../')\nsys.path.append(database_directory)\n\nfrom uraeus_fsae.simenv.assemblies import asurt_FS17_v1 as num_assm\nfrom controllers import speed_controller, stanley_controller\n\nnum_model = num_assm.num_model\n\ndt = num_assm.dt\nTR = 254\n\ndef generate_circular_path(radius, offset):\n theta = np.deg2rad(np.linspace(0, 360, 360))\n x_data = radius * np.sin(theta) + offset[0]\n y_data = radius * np.cos(theta) + offset[1]\n radii = radius * np.ones((360,))\n return x_data, y_data, radii\n\n\nx_data, y_data, radii = generate_circular_path(10.5, (0, -10.5))\n\npath_data = np.zeros((360, 3))\npath_data[:, 0] = -1e3 * x_data\npath_data[:, 1] = 1e3 * y_data\npath_data[:, 2] = 1e3 * radii\n\nplt.figure(figsize=(10, 5))\nplt.plot(path_data[:, 0], path_data[:, 1])\nplt.grid()\nplt.show()\n\nlogitudinal_controller = speed_controller(35, dt)\nlateral_controller = stanley_controller(path_data, 25)\n\n\ndef terrain_state(x, y):\n local_normal = np.array([[0],[0],[1]], dtype=np.float64)\n hieght = 0\n return [local_normal, hieght]\n\n\ndef torque_function(t):\n P_ch = num_model.Subsystems.CH.P_rbs_chassis\n Rd = num_model.Subsystems.CH.Rd_rbs_chassis\n factor = logitudinal_controller.get_torque_factor(P_ch, Rd)\n return factor\n\ndef RR_Torque(t):\n factor = torque_function(t)\n torque = -factor*(70*9.81)*1e6*TR\n return torque\n\ndef RL_Torque(t):\n factor = torque_function(t)\n torque = -factor*(70*9.81)*1e6*TR\n return torque\n\ndef steering_function(t):\n R_ch = num_model.Subsystems.CH.R_rbs_chassis\n P_ch = num_model.Subsystems.CH.P_rbs_chassis\n Rd_ch = num_model.Subsystems.CH.Rd_rbs_chassis\n Pd_ch = num_model.Subsystems.CH.Pd_rbs_chassis\n\n rbar_ax1 = np.array([[-800], [0], [0]], dtype=np.float64)\n r_ax1 = R_ch + A(P_ch)@rbar_ax1\n vel = (A(P_ch).T @ (Rd_ch + B(P_ch, rbar_ax1)@Pd_ch))[0,0]\n\n delta = lateral_controller.get_steer_factor(r_ax1, P_ch, Pd_ch, vel)\n\n travel = delta * 18\n #print('Travel = %s'%travel)\n return travel\n\n\ndef zero_func(t):\n return np.zeros((3,1), dtype=np.float64)\n\n\nnum_assm.terrain_data.get_state = terrain_state\n\nnum_assm.ST1_config.UF_mcs_rack_act = steering_function\n\nnum_assm.AX1_config.UF_far_drive = RR_Torque\nnum_assm.AX1_config.UF_fal_drive = RL_Torque\n\n#num_assm.DR2_config.UF_far_drive = RR_Torque\n#num_assm.DR2_config.UF_fal_drive = RL_Torque\n\nnum_assm.CH_config.UF_fas_aero_drag_F = zero_func\nnum_assm.CH_config.UF_fas_aero_drag_T = zero_func\n# =============================================================================\n# Setting and Starting Simulation\n# =============================================================================\n\nsim = simulation('sim', num_model, 'dds')\nsim.set_time_array(15, dt)\n\n# Getting Equilibrium results as initial conditions to this simulation\n# ====================================================================\nsim.set_initial_states('results/equilibrium_v4.npz')\n\nsim.solve()\n\nsim.save_as_csv('results', 'constant_radius_v8', 'pos')\nsim.save_as_npz('results', 'constant_radius_v8')\n\n#=============================================================================\n# Plotting Simulation Results\n# =============================================================================\n\nimport matplotlib.pyplot as plt\n\nsim.soln.pos_dataframe.plot(x='CH.rbs_chassis.x', y='CH.rbs_chassis.y', grid=True)\n\nsim.soln.vel_dataframe.plot(x='time', y='CH.rbs_chassis.x', grid=True)\n\nsim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.z', grid=True)\nsim.soln.vel_dataframe.plot(x='time', y='CH.rbs_chassis.z', grid=True)\nsim.soln.acc_dataframe.plot(x='time', y='CH.rbs_chassis.z', grid=True)\n\nsim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.e0', grid=True)\nsim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.e1', grid=True)\nsim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.e2', grid=True)\nsim.soln.pos_dataframe.plot(x='time', y='CH.rbs_chassis.e3', grid=True)\n\nplt.show()\n",
"step-ids": [
3,
8,
9,
10,
11
]
}
|
[
3,
8,
9,
10,
11
] |
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.header import Header
class SENDMAIL(object):
def __init__(self):
self.smtpserver = 'smtp.qq.com'
self.username = 'wu_chang_hao@qq.com' # 比如QQ邮箱
self.password = 'xxxxxxxxxxxxxxxx' # 生成授权码
self.sender = 'wu_chang_hao@qq.com'
def sendmail(self, receiver, lch, type, cfsj):
subject = '【 抢票提醒通知 】'
receiver = ['%s' % receiver]
msg = MIMEMultipart('mixed')
msg['Subject'] = subject
msg['From'] = 'Ncoreqp-Server <wu_chang_hao@qq.com>'
msg['To'] = ";".join(receiver)
# 构造文字内容
text = """Hi!\n
十万火急, 探子来报! \n
目前, %s号列车, %s当前有票! - 出发时间为:[ %s ]
快去12306网站支付买票吧!! 快速通道链接https://www.12306.cn/index/\n
http://www.northcorezh.com\n
北芯众合, 改变生活!
""" % (lch, type, cfsj)
text_plain = MIMEText(text, 'plain', 'utf-8')
msg.attach(text_plain)
# 发送邮件
smtp = smtplib.SMTP()
smtp.connect('smtp.qq.com')
# 我们用set_debuglevel(1)就可以打印出和SMTP服务器交互的所有信息。
# smtp.set_debuglevel(1)
smtp.login(self.username, self.password)
smtp.sendmail(self.sender, receiver, msg.as_string())
smtp.quit()
print('邮件发送成功 !!!')
def send_email_by_smtp(self):
# 用于发送邮件的邮箱。修改成自己的邮箱
sender_email_address = "wu_chang_hao@qq.com"
# 用于发送邮件的邮箱的密码。修改成自己的邮箱的密码
sender_email_password = "xxxxxxxxxxxxxxxxxx"
# 用于发送邮件的邮箱的smtp服务器,也可以直接是IP地址
# 修改成自己邮箱的sntp服务器地址;qq邮箱不需要修改此值
smtp_server_host = "smtp.qq.com"
# 修改成自己邮箱的sntp服务器监听的端口;qq邮箱不需要修改此值
smtp_server_port = 465
# 要发往的邮箱
receiver_email = "2083969687@qq.com"
# 要发送的邮件主题
message_subject = "Python smtp测试邮件"
# 要发送的邮件内容
message_context = "这是一封通过Python smtp发送的测试邮件..."
# 邮件对象,用于构建邮件
message = MIMEText(message_context, 'plain', 'utf-8')
# 设置发件人(声称的)
message["From"] = Header(sender_email_address, "utf-8")
# 设置收件人(声称的)
message["To"] = Header(receiver_email, "utf-8")
# 设置邮件主题
message["Subject"] = Header(message_subject, "utf-8")
# 连接smtp服务器。如果没有使用SSL,将SMTP_SSL()改成SMTP()即可其他都不需要做改动
email_client = smtplib.SMTP_SSL(smtp_server_host, smtp_server_port)
try:
# 验证邮箱及密码是否正确
email_client.login(sender_email_address, sender_email_password)
print("smtp----login success, now will send an email to {receiver_email}")
except Exception:
print("smtp----sorry, username or password not correct or another problem occur")
else:
# 发送邮件
email_client.sendmail(sender_email_address, receiver_email, message.as_string())
print(f"smtp----send email to {receiver_email} finish")
finally:
# 关闭连接
email_client.close()
|
normal
|
{
"blob_id": "bcab83e0ae6ee4925393b50bdefdfeb85c42ad2c",
"index": 1914,
"step-1": "<mask token>\n\n\nclass SENDMAIL(object):\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass SENDMAIL(object):\n\n def __init__(self):\n self.smtpserver = 'smtp.qq.com'\n self.username = 'wu_chang_hao@qq.com'\n self.password = 'xxxxxxxxxxxxxxxx'\n self.sender = 'wu_chang_hao@qq.com'\n\n def sendmail(self, receiver, lch, type, cfsj):\n subject = '【 抢票提醒通知 】'\n receiver = ['%s' % receiver]\n msg = MIMEMultipart('mixed')\n msg['Subject'] = subject\n msg['From'] = 'Ncoreqp-Server <wu_chang_hao@qq.com>'\n msg['To'] = ';'.join(receiver)\n text = (\n \"\"\"Hi!\n\n 十万火急, 探子来报! \n\n \n 目前, %s号列车, %s当前有票! - 出发时间为:[ %s ]\n 快去12306网站支付买票吧!! 快速通道链接https://www.12306.cn/index/\n\n \n http://www.northcorezh.com\n\n 北芯众合, 改变生活!\n \"\"\"\n % (lch, type, cfsj))\n text_plain = MIMEText(text, 'plain', 'utf-8')\n msg.attach(text_plain)\n smtp = smtplib.SMTP()\n smtp.connect('smtp.qq.com')\n smtp.login(self.username, self.password)\n smtp.sendmail(self.sender, receiver, msg.as_string())\n smtp.quit()\n print('邮件发送成功 !!!')\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass SENDMAIL(object):\n\n def __init__(self):\n self.smtpserver = 'smtp.qq.com'\n self.username = 'wu_chang_hao@qq.com'\n self.password = 'xxxxxxxxxxxxxxxx'\n self.sender = 'wu_chang_hao@qq.com'\n\n def sendmail(self, receiver, lch, type, cfsj):\n subject = '【 抢票提醒通知 】'\n receiver = ['%s' % receiver]\n msg = MIMEMultipart('mixed')\n msg['Subject'] = subject\n msg['From'] = 'Ncoreqp-Server <wu_chang_hao@qq.com>'\n msg['To'] = ';'.join(receiver)\n text = (\n \"\"\"Hi!\n\n 十万火急, 探子来报! \n\n \n 目前, %s号列车, %s当前有票! - 出发时间为:[ %s ]\n 快去12306网站支付买票吧!! 快速通道链接https://www.12306.cn/index/\n\n \n http://www.northcorezh.com\n\n 北芯众合, 改变生活!\n \"\"\"\n % (lch, type, cfsj))\n text_plain = MIMEText(text, 'plain', 'utf-8')\n msg.attach(text_plain)\n smtp = smtplib.SMTP()\n smtp.connect('smtp.qq.com')\n smtp.login(self.username, self.password)\n smtp.sendmail(self.sender, receiver, msg.as_string())\n smtp.quit()\n print('邮件发送成功 !!!')\n\n def send_email_by_smtp(self):\n sender_email_address = 'wu_chang_hao@qq.com'\n sender_email_password = 'xxxxxxxxxxxxxxxxxx'\n smtp_server_host = 'smtp.qq.com'\n smtp_server_port = 465\n receiver_email = '2083969687@qq.com'\n message_subject = 'Python smtp测试邮件'\n message_context = '这是一封通过Python smtp发送的测试邮件...'\n message = MIMEText(message_context, 'plain', 'utf-8')\n message['From'] = Header(sender_email_address, 'utf-8')\n message['To'] = Header(receiver_email, 'utf-8')\n message['Subject'] = Header(message_subject, 'utf-8')\n email_client = smtplib.SMTP_SSL(smtp_server_host, smtp_server_port)\n try:\n email_client.login(sender_email_address, sender_email_password)\n print(\n 'smtp----login success, now will send an email to {receiver_email}'\n )\n except Exception:\n print(\n 'smtp----sorry, username or password not correct or another problem occur'\n )\n else:\n email_client.sendmail(sender_email_address, receiver_email,\n message.as_string())\n print(f'smtp----send email to {receiver_email} finish')\n finally:\n email_client.close()\n",
"step-4": "import smtplib\nfrom email.mime.multipart import MIMEMultipart\nfrom email.mime.text import MIMEText\nfrom email.header import Header\n\n\nclass SENDMAIL(object):\n\n def __init__(self):\n self.smtpserver = 'smtp.qq.com'\n self.username = 'wu_chang_hao@qq.com'\n self.password = 'xxxxxxxxxxxxxxxx'\n self.sender = 'wu_chang_hao@qq.com'\n\n def sendmail(self, receiver, lch, type, cfsj):\n subject = '【 抢票提醒通知 】'\n receiver = ['%s' % receiver]\n msg = MIMEMultipart('mixed')\n msg['Subject'] = subject\n msg['From'] = 'Ncoreqp-Server <wu_chang_hao@qq.com>'\n msg['To'] = ';'.join(receiver)\n text = (\n \"\"\"Hi!\n\n 十万火急, 探子来报! \n\n \n 目前, %s号列车, %s当前有票! - 出发时间为:[ %s ]\n 快去12306网站支付买票吧!! 快速通道链接https://www.12306.cn/index/\n\n \n http://www.northcorezh.com\n\n 北芯众合, 改变生活!\n \"\"\"\n % (lch, type, cfsj))\n text_plain = MIMEText(text, 'plain', 'utf-8')\n msg.attach(text_plain)\n smtp = smtplib.SMTP()\n smtp.connect('smtp.qq.com')\n smtp.login(self.username, self.password)\n smtp.sendmail(self.sender, receiver, msg.as_string())\n smtp.quit()\n print('邮件发送成功 !!!')\n\n def send_email_by_smtp(self):\n sender_email_address = 'wu_chang_hao@qq.com'\n sender_email_password = 'xxxxxxxxxxxxxxxxxx'\n smtp_server_host = 'smtp.qq.com'\n smtp_server_port = 465\n receiver_email = '2083969687@qq.com'\n message_subject = 'Python smtp测试邮件'\n message_context = '这是一封通过Python smtp发送的测试邮件...'\n message = MIMEText(message_context, 'plain', 'utf-8')\n message['From'] = Header(sender_email_address, 'utf-8')\n message['To'] = Header(receiver_email, 'utf-8')\n message['Subject'] = Header(message_subject, 'utf-8')\n email_client = smtplib.SMTP_SSL(smtp_server_host, smtp_server_port)\n try:\n email_client.login(sender_email_address, sender_email_password)\n print(\n 'smtp----login success, now will send an email to {receiver_email}'\n )\n except Exception:\n print(\n 'smtp----sorry, username or password not correct or another problem occur'\n )\n else:\n email_client.sendmail(sender_email_address, receiver_email,\n message.as_string())\n print(f'smtp----send email to {receiver_email} finish')\n finally:\n email_client.close()\n",
"step-5": "import smtplib\nfrom email.mime.multipart import MIMEMultipart\nfrom email.mime.text import MIMEText\nfrom email.header import Header\n\nclass SENDMAIL(object):\n\n def __init__(self):\n self.smtpserver = 'smtp.qq.com'\n self.username = 'wu_chang_hao@qq.com' # 比如QQ邮箱\n self.password = 'xxxxxxxxxxxxxxxx' # 生成授权码\n self.sender = 'wu_chang_hao@qq.com'\n\n def sendmail(self, receiver, lch, type, cfsj):\n\n subject = '【 抢票提醒通知 】'\n receiver = ['%s' % receiver]\n\n msg = MIMEMultipart('mixed')\n msg['Subject'] = subject\n msg['From'] = 'Ncoreqp-Server <wu_chang_hao@qq.com>'\n\n msg['To'] = \";\".join(receiver)\n\n # 构造文字内容\n text = \"\"\"Hi!\\n\n 十万火急, 探子来报! \\n\n \n 目前, %s号列车, %s当前有票! - 出发时间为:[ %s ]\n 快去12306网站支付买票吧!! 快速通道链接https://www.12306.cn/index/\\n\n \n http://www.northcorezh.com\\n\n 北芯众合, 改变生活!\n \"\"\" % (lch, type, cfsj)\n text_plain = MIMEText(text, 'plain', 'utf-8')\n msg.attach(text_plain)\n\n # 发送邮件\n smtp = smtplib.SMTP()\n smtp.connect('smtp.qq.com')\n\n # 我们用set_debuglevel(1)就可以打印出和SMTP服务器交互的所有信息。\n # smtp.set_debuglevel(1)\n smtp.login(self.username, self.password)\n smtp.sendmail(self.sender, receiver, msg.as_string())\n smtp.quit()\n\n print('邮件发送成功 !!!')\n\n def send_email_by_smtp(self):\n # 用于发送邮件的邮箱。修改成自己的邮箱\n sender_email_address = \"wu_chang_hao@qq.com\"\n # 用于发送邮件的邮箱的密码。修改成自己的邮箱的密码\n sender_email_password = \"xxxxxxxxxxxxxxxxxx\"\n # 用于发送邮件的邮箱的smtp服务器,也可以直接是IP地址\n # 修改成自己邮箱的sntp服务器地址;qq邮箱不需要修改此值\n smtp_server_host = \"smtp.qq.com\"\n # 修改成自己邮箱的sntp服务器监听的端口;qq邮箱不需要修改此值\n smtp_server_port = 465\n # 要发往的邮箱\n receiver_email = \"2083969687@qq.com\"\n # 要发送的邮件主题\n message_subject = \"Python smtp测试邮件\"\n # 要发送的邮件内容\n message_context = \"这是一封通过Python smtp发送的测试邮件...\"\n\n # 邮件对象,用于构建邮件\n message = MIMEText(message_context, 'plain', 'utf-8')\n # 设置发件人(声称的)\n message[\"From\"] = Header(sender_email_address, \"utf-8\")\n # 设置收件人(声称的)\n message[\"To\"] = Header(receiver_email, \"utf-8\")\n # 设置邮件主题\n message[\"Subject\"] = Header(message_subject, \"utf-8\")\n\n # 连接smtp服务器。如果没有使用SSL,将SMTP_SSL()改成SMTP()即可其他都不需要做改动\n email_client = smtplib.SMTP_SSL(smtp_server_host, smtp_server_port)\n try:\n # 验证邮箱及密码是否正确\n email_client.login(sender_email_address, sender_email_password)\n print(\"smtp----login success, now will send an email to {receiver_email}\")\n\n except Exception:\n print(\"smtp----sorry, username or password not correct or another problem occur\")\n\n else:\n # 发送邮件\n email_client.sendmail(sender_email_address, receiver_email, message.as_string())\n print(f\"smtp----send email to {receiver_email} finish\")\n finally:\n # 关闭连接\n email_client.close()\n\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Jun 14 20:35:10 2020
@author: Johanna
"""
import numpy as np
###############################################################################
# Complex Visibility Functions
###############################################################################
def compute_vis(X, F):
vis = np.matmul(X, np.transpose(F)).astype(np.complex64)
return vis
def compute_vis_grad(vis, Z, F):
Z_vis = compute_vis(Z, F)
grad = -np.matmul(np.conjugate(F.T), vis - Z_vis)
return grad.real
def chisq_vis(vis, Z, F, sigma):
'''
Compute mean chi-squared of visibilities of Z.
'''
samples = compute_vis(Z, F)
chisq = np.sum(np.abs((samples-vis)/sigma)**2)/(2*len(vis))
return chisq
###############################################################################
# Visibility Amplitude Functions
###############################################################################
def compute_amp(X, F):
''' Given an image X and DFT matrix F, compute and return its
visibility amplitude. '''
amp = np.abs(np.dot(F, X))
return amp
def compute_amp_grad(amp, Z, A, sigma):
'''
Compute gradient of visibility amplitude.
'''
i1 = np.dot(A, Z)
amp_samples = np.abs(i1)
pp = ((amp - amp_samples) * amp_samples) / (sigma**2) / i1
out = (-2.0/len(amp)) * np.real(np.dot(pp, A))
return out
def chisq_amp(amp, Z, F, sigma):
''' Compute and return chi-squared of amplitude between X and Z. '''
amp_Z = compute_amp(Z, F)
chisq = np.sum(np.abs((amp - amp_Z)/sigma)**2)/len(amp)
return chisq
###############################################################################
# Closure Phase Functions
###############################################################################
def compute_cphase(X, F_cphase):
''' Given an image X and the DFT matrices from three baselines,
compute and return its closure phase. '''
# Get fourier matrices of each baseline
A1 = F_cphase[:, :, 0]
A2 = F_cphase[:, :, 1]
A3 = F_cphase[:, :, 2]
X = np.array(X)
# Compute observed closure phase of image
vis1 = np.matmul(X.reshape((1,-1)), np.transpose(A1)).astype(np.complex64)
vis2 = np.matmul(X.reshape((1,-1)), np.transpose(A2)).astype(np.complex64)
vis3 = np.matmul(X.reshape((1,-1)), np.transpose(A3)).astype(np.complex64)
cphase = np.angle(vis1 * vis2 * vis3)
return cphase
def compute_cphase_grad(cphase, Z, F_cphase, sigma, npix):
'''
Compute gradient of closure phase chi-squared
cphase : closure phase of true image
Z : predicted image vector
F_cphase : 3 DFT matrices from three baselines in a closure triangle
'''
# Get fourier matrices of each baseline
A1 = F_cphase[:, :, 0]
A2 = F_cphase[:, :, 1]
A3 = F_cphase[:, :, 2]
i1 = np.matmul(Z.reshape((1,-1)), np.transpose(A1)).astype(np.complex64)
i2 = np.matmul(Z.reshape((1,-1)), np.transpose(A2)).astype(np.complex64)
i3 = np.matmul(Z.reshape((1,-1)), np.transpose(A3)).astype(np.complex64)
cphase_samples = np.angle(i1 * i2 * i3)
pref = np.sin(cphase - cphase_samples)/(sigma**2)
pt1 = pref/i1
pt2 = pref/i2
pt3 = pref/i3
out = -(2.0/len(cphase)) * np.imag(np.dot(pt1, A1) + np.dot(pt2, A2) + np.dot(pt3, A3))
return out.reshape(npix**2)
def chisq_cphase(cphase, Z, F_cphase, sigma_cphase):
"""Closure Phase reduced chi-squared loss."""
cphase_samples = compute_cphase(Z, F_cphase)
chisq= (2.0/len(cphase)) * np.sum((1.0 - np.cos(cphase-cphase_samples))/(sigma_cphase**2))
return chisq
###############################################################################
# Closure Amplitude Functions
###############################################################################
def compute_camp(X, Amatrices):
'''
Compute closure amplitude of image vector X.
'''
i1 = np.dot(Amatrices[0], X)
i2 = np.dot(Amatrices[1], X)
i3 = np.dot(Amatrices[2], X)
i4 = np.dot(Amatrices[3], X)
camp = np.abs((i1 * i2)/(i3 * i4))
return camp
def compute_camp_grad(camp, Z, Amatrices, sigma):
"""
The gradient of the closure amplitude chi-squared
camp: Closure amplitudes of true image
Z: Predicted image vector
Amatrices: DFT matrices of four baselines
"""
i1 = np.dot(Amatrices[0], Z)
i2 = np.dot(Amatrices[1], Z)
i3 = np.dot(Amatrices[2], Z)
i4 = np.dot(Amatrices[3], Z)
camp_samples = np.abs((i1 * i2)/(i3 * i4))
pp = ((camp - camp_samples) * camp_samples)/(sigma**2)
pt1 = pp/i1
pt2 = pp/i2
pt3 = -pp/i3
pt4 = -pp/i4
out = (np.dot(pt1, Amatrices[0]) +
np.dot(pt2, Amatrices[1]) +
np.dot(pt3, Amatrices[2]) +
np.dot(pt4, Amatrices[3]))
return (-2.0/len(camp)) * np.real(out)
def chisq_camp(camp, Z, Amatrices, sigma):
"""Closure Amplitudes reduced chi-squared loss."""
i1 = np.dot(Amatrices[0], Z)
i2 = np.dot(Amatrices[1], Z)
i3 = np.dot(Amatrices[2], Z)
i4 = np.dot(Amatrices[3], Z)
camp_samples = np.abs((i1 * i2)/(i3 * i4))
chisq = np.sum(np.abs((camp - camp_samples)/sigma)**2)/len(camp)
return chisq
###############################################################################
# Log Closure Amplitude Functions
###############################################################################
def compute_lgcamp(X, Amatrices):
''' Compute log closure amplitude of image vector X '''
a1 = np.abs(np.dot(Amatrices[0], X))
a2 = np.abs(np.dot(Amatrices[1], X))
a3 = np.abs(np.dot(Amatrices[2], X))
a4 = np.abs(np.dot(Amatrices[3], X))
lgcamp = np.log(a1) + np.log(a2) - np.log(a3) - np.log(a4)
return lgcamp
def compute_lgcamp_grad(lgcamp, Z, Amatrices, sigma):
"""The gradient of the Log closure amplitude chi-squared"""
i1 = np.dot(Amatrices[0], Z)
i2 = np.dot(Amatrices[1], Z)
i3 = np.dot(Amatrices[2], Z)
i4 = np.dot(Amatrices[3], Z)
lgcamp_samples = (np.log(np.abs(i1)) +
np.log(np.abs(i2)) -
np.log(np.abs(i3)) -
np.log(np.abs(i4)))
pp = (lgcamp - lgcamp_samples) / (sigma**2)
pt1 = pp / i1
pt2 = pp / i2
pt3 = -pp / i3
pt4 = -pp / i4
out = (np.dot(pt1, Amatrices[0]) +
np.dot(pt2, Amatrices[1]) +
np.dot(pt3, Amatrices[2]) +
np.dot(pt4, Amatrices[3]))
return (-2.0/len(lgcamp)) * np.real(out)
def chisq_lgcamp(lgcamp, X, Amatrices, sigma):
"""Log Closure Amplitudes reduced chi-squared"""
a1 = np.abs(np.dot(Amatrices[0], X))
a2 = np.abs(np.dot(Amatrices[1], X))
a3 = np.abs(np.dot(Amatrices[2], X))
a4 = np.abs(np.dot(Amatrices[3], X))
samples = np.log(a1) + np.log(a2) - np.log(a3) - np.log(a4)
chisq = np.sum(np.abs((lgcamp - samples)/sigma)**2) / (len(lgcamp))
return chisq
|
normal
|
{
"blob_id": "ea3217be80b6d1d3a400139bc4a91870cd2f1d87",
"index": 5118,
"step-1": "<mask token>\n\n\ndef compute_vis(X, F):\n vis = np.matmul(X, np.transpose(F)).astype(np.complex64)\n return vis\n\n\ndef compute_vis_grad(vis, Z, F):\n Z_vis = compute_vis(Z, F)\n grad = -np.matmul(np.conjugate(F.T), vis - Z_vis)\n return grad.real\n\n\n<mask token>\n\n\ndef compute_amp_grad(amp, Z, A, sigma):\n \"\"\" \n Compute gradient of visibility amplitude.\n \"\"\"\n i1 = np.dot(A, Z)\n amp_samples = np.abs(i1)\n pp = (amp - amp_samples) * amp_samples / sigma ** 2 / i1\n out = -2.0 / len(amp) * np.real(np.dot(pp, A))\n return out\n\n\ndef chisq_amp(amp, Z, F, sigma):\n \"\"\" Compute and return chi-squared of amplitude between X and Z. \"\"\"\n amp_Z = compute_amp(Z, F)\n chisq = np.sum(np.abs((amp - amp_Z) / sigma) ** 2) / len(amp)\n return chisq\n\n\ndef compute_cphase(X, F_cphase):\n \"\"\" Given an image X and the DFT matrices from three baselines,\n compute and return its closure phase. \"\"\"\n A1 = F_cphase[:, :, 0]\n A2 = F_cphase[:, :, 1]\n A3 = F_cphase[:, :, 2]\n X = np.array(X)\n vis1 = np.matmul(X.reshape((1, -1)), np.transpose(A1)).astype(np.complex64)\n vis2 = np.matmul(X.reshape((1, -1)), np.transpose(A2)).astype(np.complex64)\n vis3 = np.matmul(X.reshape((1, -1)), np.transpose(A3)).astype(np.complex64)\n cphase = np.angle(vis1 * vis2 * vis3)\n return cphase\n\n\ndef compute_cphase_grad(cphase, Z, F_cphase, sigma, npix):\n \"\"\" \n Compute gradient of closure phase chi-squared\n \n cphase : closure phase of true image \n Z : predicted image vector\n F_cphase : 3 DFT matrices from three baselines in a closure triangle\n \"\"\"\n A1 = F_cphase[:, :, 0]\n A2 = F_cphase[:, :, 1]\n A3 = F_cphase[:, :, 2]\n i1 = np.matmul(Z.reshape((1, -1)), np.transpose(A1)).astype(np.complex64)\n i2 = np.matmul(Z.reshape((1, -1)), np.transpose(A2)).astype(np.complex64)\n i3 = np.matmul(Z.reshape((1, -1)), np.transpose(A3)).astype(np.complex64)\n cphase_samples = np.angle(i1 * i2 * i3)\n pref = np.sin(cphase - cphase_samples) / sigma ** 2\n pt1 = pref / i1\n pt2 = pref / i2\n pt3 = pref / i3\n out = -(2.0 / len(cphase)) * np.imag(np.dot(pt1, A1) + np.dot(pt2, A2) +\n np.dot(pt3, A3))\n return out.reshape(npix ** 2)\n\n\ndef chisq_cphase(cphase, Z, F_cphase, sigma_cphase):\n \"\"\"Closure Phase reduced chi-squared loss.\"\"\"\n cphase_samples = compute_cphase(Z, F_cphase)\n chisq = 2.0 / len(cphase) * np.sum((1.0 - np.cos(cphase -\n cphase_samples)) / sigma_cphase ** 2)\n return chisq\n\n\ndef compute_camp(X, Amatrices):\n \"\"\"\n Compute closure amplitude of image vector X.\n \"\"\"\n i1 = np.dot(Amatrices[0], X)\n i2 = np.dot(Amatrices[1], X)\n i3 = np.dot(Amatrices[2], X)\n i4 = np.dot(Amatrices[3], X)\n camp = np.abs(i1 * i2 / (i3 * i4))\n return camp\n\n\ndef compute_camp_grad(camp, Z, Amatrices, sigma):\n \"\"\"\n The gradient of the closure amplitude chi-squared\n \n camp: Closure amplitudes of true image\n Z: Predicted image vector\n Amatrices: DFT matrices of four baselines\n \"\"\"\n i1 = np.dot(Amatrices[0], Z)\n i2 = np.dot(Amatrices[1], Z)\n i3 = np.dot(Amatrices[2], Z)\n i4 = np.dot(Amatrices[3], Z)\n camp_samples = np.abs(i1 * i2 / (i3 * i4))\n pp = (camp - camp_samples) * camp_samples / sigma ** 2\n pt1 = pp / i1\n pt2 = pp / i2\n pt3 = -pp / i3\n pt4 = -pp / i4\n out = np.dot(pt1, Amatrices[0]) + np.dot(pt2, Amatrices[1]) + np.dot(pt3,\n Amatrices[2]) + np.dot(pt4, Amatrices[3])\n return -2.0 / len(camp) * np.real(out)\n\n\n<mask token>\n\n\ndef compute_lgcamp(X, Amatrices):\n \"\"\" Compute log closure amplitude of image vector X \"\"\"\n a1 = np.abs(np.dot(Amatrices[0], X))\n a2 = np.abs(np.dot(Amatrices[1], X))\n a3 = np.abs(np.dot(Amatrices[2], X))\n a4 = np.abs(np.dot(Amatrices[3], X))\n lgcamp = np.log(a1) + np.log(a2) - np.log(a3) - np.log(a4)\n return lgcamp\n\n\n<mask token>\n\n\ndef chisq_lgcamp(lgcamp, X, Amatrices, sigma):\n \"\"\"Log Closure Amplitudes reduced chi-squared\"\"\"\n a1 = np.abs(np.dot(Amatrices[0], X))\n a2 = np.abs(np.dot(Amatrices[1], X))\n a3 = np.abs(np.dot(Amatrices[2], X))\n a4 = np.abs(np.dot(Amatrices[3], X))\n samples = np.log(a1) + np.log(a2) - np.log(a3) - np.log(a4)\n chisq = np.sum(np.abs((lgcamp - samples) / sigma) ** 2) / len(lgcamp)\n return chisq\n",
"step-2": "<mask token>\n\n\ndef compute_vis(X, F):\n vis = np.matmul(X, np.transpose(F)).astype(np.complex64)\n return vis\n\n\ndef compute_vis_grad(vis, Z, F):\n Z_vis = compute_vis(Z, F)\n grad = -np.matmul(np.conjugate(F.T), vis - Z_vis)\n return grad.real\n\n\n<mask token>\n\n\ndef compute_amp_grad(amp, Z, A, sigma):\n \"\"\" \n Compute gradient of visibility amplitude.\n \"\"\"\n i1 = np.dot(A, Z)\n amp_samples = np.abs(i1)\n pp = (amp - amp_samples) * amp_samples / sigma ** 2 / i1\n out = -2.0 / len(amp) * np.real(np.dot(pp, A))\n return out\n\n\ndef chisq_amp(amp, Z, F, sigma):\n \"\"\" Compute and return chi-squared of amplitude between X and Z. \"\"\"\n amp_Z = compute_amp(Z, F)\n chisq = np.sum(np.abs((amp - amp_Z) / sigma) ** 2) / len(amp)\n return chisq\n\n\ndef compute_cphase(X, F_cphase):\n \"\"\" Given an image X and the DFT matrices from three baselines,\n compute and return its closure phase. \"\"\"\n A1 = F_cphase[:, :, 0]\n A2 = F_cphase[:, :, 1]\n A3 = F_cphase[:, :, 2]\n X = np.array(X)\n vis1 = np.matmul(X.reshape((1, -1)), np.transpose(A1)).astype(np.complex64)\n vis2 = np.matmul(X.reshape((1, -1)), np.transpose(A2)).astype(np.complex64)\n vis3 = np.matmul(X.reshape((1, -1)), np.transpose(A3)).astype(np.complex64)\n cphase = np.angle(vis1 * vis2 * vis3)\n return cphase\n\n\ndef compute_cphase_grad(cphase, Z, F_cphase, sigma, npix):\n \"\"\" \n Compute gradient of closure phase chi-squared\n \n cphase : closure phase of true image \n Z : predicted image vector\n F_cphase : 3 DFT matrices from three baselines in a closure triangle\n \"\"\"\n A1 = F_cphase[:, :, 0]\n A2 = F_cphase[:, :, 1]\n A3 = F_cphase[:, :, 2]\n i1 = np.matmul(Z.reshape((1, -1)), np.transpose(A1)).astype(np.complex64)\n i2 = np.matmul(Z.reshape((1, -1)), np.transpose(A2)).astype(np.complex64)\n i3 = np.matmul(Z.reshape((1, -1)), np.transpose(A3)).astype(np.complex64)\n cphase_samples = np.angle(i1 * i2 * i3)\n pref = np.sin(cphase - cphase_samples) / sigma ** 2\n pt1 = pref / i1\n pt2 = pref / i2\n pt3 = pref / i3\n out = -(2.0 / len(cphase)) * np.imag(np.dot(pt1, A1) + np.dot(pt2, A2) +\n np.dot(pt3, A3))\n return out.reshape(npix ** 2)\n\n\ndef chisq_cphase(cphase, Z, F_cphase, sigma_cphase):\n \"\"\"Closure Phase reduced chi-squared loss.\"\"\"\n cphase_samples = compute_cphase(Z, F_cphase)\n chisq = 2.0 / len(cphase) * np.sum((1.0 - np.cos(cphase -\n cphase_samples)) / sigma_cphase ** 2)\n return chisq\n\n\ndef compute_camp(X, Amatrices):\n \"\"\"\n Compute closure amplitude of image vector X.\n \"\"\"\n i1 = np.dot(Amatrices[0], X)\n i2 = np.dot(Amatrices[1], X)\n i3 = np.dot(Amatrices[2], X)\n i4 = np.dot(Amatrices[3], X)\n camp = np.abs(i1 * i2 / (i3 * i4))\n return camp\n\n\ndef compute_camp_grad(camp, Z, Amatrices, sigma):\n \"\"\"\n The gradient of the closure amplitude chi-squared\n \n camp: Closure amplitudes of true image\n Z: Predicted image vector\n Amatrices: DFT matrices of four baselines\n \"\"\"\n i1 = np.dot(Amatrices[0], Z)\n i2 = np.dot(Amatrices[1], Z)\n i3 = np.dot(Amatrices[2], Z)\n i4 = np.dot(Amatrices[3], Z)\n camp_samples = np.abs(i1 * i2 / (i3 * i4))\n pp = (camp - camp_samples) * camp_samples / sigma ** 2\n pt1 = pp / i1\n pt2 = pp / i2\n pt3 = -pp / i3\n pt4 = -pp / i4\n out = np.dot(pt1, Amatrices[0]) + np.dot(pt2, Amatrices[1]) + np.dot(pt3,\n Amatrices[2]) + np.dot(pt4, Amatrices[3])\n return -2.0 / len(camp) * np.real(out)\n\n\ndef chisq_camp(camp, Z, Amatrices, sigma):\n \"\"\"Closure Amplitudes reduced chi-squared loss.\"\"\"\n i1 = np.dot(Amatrices[0], Z)\n i2 = np.dot(Amatrices[1], Z)\n i3 = np.dot(Amatrices[2], Z)\n i4 = np.dot(Amatrices[3], Z)\n camp_samples = np.abs(i1 * i2 / (i3 * i4))\n chisq = np.sum(np.abs((camp - camp_samples) / sigma) ** 2) / len(camp)\n return chisq\n\n\ndef compute_lgcamp(X, Amatrices):\n \"\"\" Compute log closure amplitude of image vector X \"\"\"\n a1 = np.abs(np.dot(Amatrices[0], X))\n a2 = np.abs(np.dot(Amatrices[1], X))\n a3 = np.abs(np.dot(Amatrices[2], X))\n a4 = np.abs(np.dot(Amatrices[3], X))\n lgcamp = np.log(a1) + np.log(a2) - np.log(a3) - np.log(a4)\n return lgcamp\n\n\ndef compute_lgcamp_grad(lgcamp, Z, Amatrices, sigma):\n \"\"\"The gradient of the Log closure amplitude chi-squared\"\"\"\n i1 = np.dot(Amatrices[0], Z)\n i2 = np.dot(Amatrices[1], Z)\n i3 = np.dot(Amatrices[2], Z)\n i4 = np.dot(Amatrices[3], Z)\n lgcamp_samples = np.log(np.abs(i1)) + np.log(np.abs(i2)) - np.log(np.\n abs(i3)) - np.log(np.abs(i4))\n pp = (lgcamp - lgcamp_samples) / sigma ** 2\n pt1 = pp / i1\n pt2 = pp / i2\n pt3 = -pp / i3\n pt4 = -pp / i4\n out = np.dot(pt1, Amatrices[0]) + np.dot(pt2, Amatrices[1]) + np.dot(pt3,\n Amatrices[2]) + np.dot(pt4, Amatrices[3])\n return -2.0 / len(lgcamp) * np.real(out)\n\n\ndef chisq_lgcamp(lgcamp, X, Amatrices, sigma):\n \"\"\"Log Closure Amplitudes reduced chi-squared\"\"\"\n a1 = np.abs(np.dot(Amatrices[0], X))\n a2 = np.abs(np.dot(Amatrices[1], X))\n a3 = np.abs(np.dot(Amatrices[2], X))\n a4 = np.abs(np.dot(Amatrices[3], X))\n samples = np.log(a1) + np.log(a2) - np.log(a3) - np.log(a4)\n chisq = np.sum(np.abs((lgcamp - samples) / sigma) ** 2) / len(lgcamp)\n return chisq\n",
"step-3": "<mask token>\n\n\ndef compute_vis(X, F):\n vis = np.matmul(X, np.transpose(F)).astype(np.complex64)\n return vis\n\n\ndef compute_vis_grad(vis, Z, F):\n Z_vis = compute_vis(Z, F)\n grad = -np.matmul(np.conjugate(F.T), vis - Z_vis)\n return grad.real\n\n\ndef chisq_vis(vis, Z, F, sigma):\n \"\"\" \n Compute mean chi-squared of visibilities of Z.\n \"\"\"\n samples = compute_vis(Z, F)\n chisq = np.sum(np.abs((samples - vis) / sigma) ** 2) / (2 * len(vis))\n return chisq\n\n\ndef compute_amp(X, F):\n \"\"\" Given an image X and DFT matrix F, compute and return its \n visibility amplitude. \"\"\"\n amp = np.abs(np.dot(F, X))\n return amp\n\n\ndef compute_amp_grad(amp, Z, A, sigma):\n \"\"\" \n Compute gradient of visibility amplitude.\n \"\"\"\n i1 = np.dot(A, Z)\n amp_samples = np.abs(i1)\n pp = (amp - amp_samples) * amp_samples / sigma ** 2 / i1\n out = -2.0 / len(amp) * np.real(np.dot(pp, A))\n return out\n\n\ndef chisq_amp(amp, Z, F, sigma):\n \"\"\" Compute and return chi-squared of amplitude between X and Z. \"\"\"\n amp_Z = compute_amp(Z, F)\n chisq = np.sum(np.abs((amp - amp_Z) / sigma) ** 2) / len(amp)\n return chisq\n\n\ndef compute_cphase(X, F_cphase):\n \"\"\" Given an image X and the DFT matrices from three baselines,\n compute and return its closure phase. \"\"\"\n A1 = F_cphase[:, :, 0]\n A2 = F_cphase[:, :, 1]\n A3 = F_cphase[:, :, 2]\n X = np.array(X)\n vis1 = np.matmul(X.reshape((1, -1)), np.transpose(A1)).astype(np.complex64)\n vis2 = np.matmul(X.reshape((1, -1)), np.transpose(A2)).astype(np.complex64)\n vis3 = np.matmul(X.reshape((1, -1)), np.transpose(A3)).astype(np.complex64)\n cphase = np.angle(vis1 * vis2 * vis3)\n return cphase\n\n\ndef compute_cphase_grad(cphase, Z, F_cphase, sigma, npix):\n \"\"\" \n Compute gradient of closure phase chi-squared\n \n cphase : closure phase of true image \n Z : predicted image vector\n F_cphase : 3 DFT matrices from three baselines in a closure triangle\n \"\"\"\n A1 = F_cphase[:, :, 0]\n A2 = F_cphase[:, :, 1]\n A3 = F_cphase[:, :, 2]\n i1 = np.matmul(Z.reshape((1, -1)), np.transpose(A1)).astype(np.complex64)\n i2 = np.matmul(Z.reshape((1, -1)), np.transpose(A2)).astype(np.complex64)\n i3 = np.matmul(Z.reshape((1, -1)), np.transpose(A3)).astype(np.complex64)\n cphase_samples = np.angle(i1 * i2 * i3)\n pref = np.sin(cphase - cphase_samples) / sigma ** 2\n pt1 = pref / i1\n pt2 = pref / i2\n pt3 = pref / i3\n out = -(2.0 / len(cphase)) * np.imag(np.dot(pt1, A1) + np.dot(pt2, A2) +\n np.dot(pt3, A3))\n return out.reshape(npix ** 2)\n\n\ndef chisq_cphase(cphase, Z, F_cphase, sigma_cphase):\n \"\"\"Closure Phase reduced chi-squared loss.\"\"\"\n cphase_samples = compute_cphase(Z, F_cphase)\n chisq = 2.0 / len(cphase) * np.sum((1.0 - np.cos(cphase -\n cphase_samples)) / sigma_cphase ** 2)\n return chisq\n\n\ndef compute_camp(X, Amatrices):\n \"\"\"\n Compute closure amplitude of image vector X.\n \"\"\"\n i1 = np.dot(Amatrices[0], X)\n i2 = np.dot(Amatrices[1], X)\n i3 = np.dot(Amatrices[2], X)\n i4 = np.dot(Amatrices[3], X)\n camp = np.abs(i1 * i2 / (i3 * i4))\n return camp\n\n\ndef compute_camp_grad(camp, Z, Amatrices, sigma):\n \"\"\"\n The gradient of the closure amplitude chi-squared\n \n camp: Closure amplitudes of true image\n Z: Predicted image vector\n Amatrices: DFT matrices of four baselines\n \"\"\"\n i1 = np.dot(Amatrices[0], Z)\n i2 = np.dot(Amatrices[1], Z)\n i3 = np.dot(Amatrices[2], Z)\n i4 = np.dot(Amatrices[3], Z)\n camp_samples = np.abs(i1 * i2 / (i3 * i4))\n pp = (camp - camp_samples) * camp_samples / sigma ** 2\n pt1 = pp / i1\n pt2 = pp / i2\n pt3 = -pp / i3\n pt4 = -pp / i4\n out = np.dot(pt1, Amatrices[0]) + np.dot(pt2, Amatrices[1]) + np.dot(pt3,\n Amatrices[2]) + np.dot(pt4, Amatrices[3])\n return -2.0 / len(camp) * np.real(out)\n\n\ndef chisq_camp(camp, Z, Amatrices, sigma):\n \"\"\"Closure Amplitudes reduced chi-squared loss.\"\"\"\n i1 = np.dot(Amatrices[0], Z)\n i2 = np.dot(Amatrices[1], Z)\n i3 = np.dot(Amatrices[2], Z)\n i4 = np.dot(Amatrices[3], Z)\n camp_samples = np.abs(i1 * i2 / (i3 * i4))\n chisq = np.sum(np.abs((camp - camp_samples) / sigma) ** 2) / len(camp)\n return chisq\n\n\ndef compute_lgcamp(X, Amatrices):\n \"\"\" Compute log closure amplitude of image vector X \"\"\"\n a1 = np.abs(np.dot(Amatrices[0], X))\n a2 = np.abs(np.dot(Amatrices[1], X))\n a3 = np.abs(np.dot(Amatrices[2], X))\n a4 = np.abs(np.dot(Amatrices[3], X))\n lgcamp = np.log(a1) + np.log(a2) - np.log(a3) - np.log(a4)\n return lgcamp\n\n\ndef compute_lgcamp_grad(lgcamp, Z, Amatrices, sigma):\n \"\"\"The gradient of the Log closure amplitude chi-squared\"\"\"\n i1 = np.dot(Amatrices[0], Z)\n i2 = np.dot(Amatrices[1], Z)\n i3 = np.dot(Amatrices[2], Z)\n i4 = np.dot(Amatrices[3], Z)\n lgcamp_samples = np.log(np.abs(i1)) + np.log(np.abs(i2)) - np.log(np.\n abs(i3)) - np.log(np.abs(i4))\n pp = (lgcamp - lgcamp_samples) / sigma ** 2\n pt1 = pp / i1\n pt2 = pp / i2\n pt3 = -pp / i3\n pt4 = -pp / i4\n out = np.dot(pt1, Amatrices[0]) + np.dot(pt2, Amatrices[1]) + np.dot(pt3,\n Amatrices[2]) + np.dot(pt4, Amatrices[3])\n return -2.0 / len(lgcamp) * np.real(out)\n\n\ndef chisq_lgcamp(lgcamp, X, Amatrices, sigma):\n \"\"\"Log Closure Amplitudes reduced chi-squared\"\"\"\n a1 = np.abs(np.dot(Amatrices[0], X))\n a2 = np.abs(np.dot(Amatrices[1], X))\n a3 = np.abs(np.dot(Amatrices[2], X))\n a4 = np.abs(np.dot(Amatrices[3], X))\n samples = np.log(a1) + np.log(a2) - np.log(a3) - np.log(a4)\n chisq = np.sum(np.abs((lgcamp - samples) / sigma) ** 2) / len(lgcamp)\n return chisq\n",
"step-4": "<mask token>\nimport numpy as np\n\n\ndef compute_vis(X, F):\n vis = np.matmul(X, np.transpose(F)).astype(np.complex64)\n return vis\n\n\ndef compute_vis_grad(vis, Z, F):\n Z_vis = compute_vis(Z, F)\n grad = -np.matmul(np.conjugate(F.T), vis - Z_vis)\n return grad.real\n\n\ndef chisq_vis(vis, Z, F, sigma):\n \"\"\" \n Compute mean chi-squared of visibilities of Z.\n \"\"\"\n samples = compute_vis(Z, F)\n chisq = np.sum(np.abs((samples - vis) / sigma) ** 2) / (2 * len(vis))\n return chisq\n\n\ndef compute_amp(X, F):\n \"\"\" Given an image X and DFT matrix F, compute and return its \n visibility amplitude. \"\"\"\n amp = np.abs(np.dot(F, X))\n return amp\n\n\ndef compute_amp_grad(amp, Z, A, sigma):\n \"\"\" \n Compute gradient of visibility amplitude.\n \"\"\"\n i1 = np.dot(A, Z)\n amp_samples = np.abs(i1)\n pp = (amp - amp_samples) * amp_samples / sigma ** 2 / i1\n out = -2.0 / len(amp) * np.real(np.dot(pp, A))\n return out\n\n\ndef chisq_amp(amp, Z, F, sigma):\n \"\"\" Compute and return chi-squared of amplitude between X and Z. \"\"\"\n amp_Z = compute_amp(Z, F)\n chisq = np.sum(np.abs((amp - amp_Z) / sigma) ** 2) / len(amp)\n return chisq\n\n\ndef compute_cphase(X, F_cphase):\n \"\"\" Given an image X and the DFT matrices from three baselines,\n compute and return its closure phase. \"\"\"\n A1 = F_cphase[:, :, 0]\n A2 = F_cphase[:, :, 1]\n A3 = F_cphase[:, :, 2]\n X = np.array(X)\n vis1 = np.matmul(X.reshape((1, -1)), np.transpose(A1)).astype(np.complex64)\n vis2 = np.matmul(X.reshape((1, -1)), np.transpose(A2)).astype(np.complex64)\n vis3 = np.matmul(X.reshape((1, -1)), np.transpose(A3)).astype(np.complex64)\n cphase = np.angle(vis1 * vis2 * vis3)\n return cphase\n\n\ndef compute_cphase_grad(cphase, Z, F_cphase, sigma, npix):\n \"\"\" \n Compute gradient of closure phase chi-squared\n \n cphase : closure phase of true image \n Z : predicted image vector\n F_cphase : 3 DFT matrices from three baselines in a closure triangle\n \"\"\"\n A1 = F_cphase[:, :, 0]\n A2 = F_cphase[:, :, 1]\n A3 = F_cphase[:, :, 2]\n i1 = np.matmul(Z.reshape((1, -1)), np.transpose(A1)).astype(np.complex64)\n i2 = np.matmul(Z.reshape((1, -1)), np.transpose(A2)).astype(np.complex64)\n i3 = np.matmul(Z.reshape((1, -1)), np.transpose(A3)).astype(np.complex64)\n cphase_samples = np.angle(i1 * i2 * i3)\n pref = np.sin(cphase - cphase_samples) / sigma ** 2\n pt1 = pref / i1\n pt2 = pref / i2\n pt3 = pref / i3\n out = -(2.0 / len(cphase)) * np.imag(np.dot(pt1, A1) + np.dot(pt2, A2) +\n np.dot(pt3, A3))\n return out.reshape(npix ** 2)\n\n\ndef chisq_cphase(cphase, Z, F_cphase, sigma_cphase):\n \"\"\"Closure Phase reduced chi-squared loss.\"\"\"\n cphase_samples = compute_cphase(Z, F_cphase)\n chisq = 2.0 / len(cphase) * np.sum((1.0 - np.cos(cphase -\n cphase_samples)) / sigma_cphase ** 2)\n return chisq\n\n\ndef compute_camp(X, Amatrices):\n \"\"\"\n Compute closure amplitude of image vector X.\n \"\"\"\n i1 = np.dot(Amatrices[0], X)\n i2 = np.dot(Amatrices[1], X)\n i3 = np.dot(Amatrices[2], X)\n i4 = np.dot(Amatrices[3], X)\n camp = np.abs(i1 * i2 / (i3 * i4))\n return camp\n\n\ndef compute_camp_grad(camp, Z, Amatrices, sigma):\n \"\"\"\n The gradient of the closure amplitude chi-squared\n \n camp: Closure amplitudes of true image\n Z: Predicted image vector\n Amatrices: DFT matrices of four baselines\n \"\"\"\n i1 = np.dot(Amatrices[0], Z)\n i2 = np.dot(Amatrices[1], Z)\n i3 = np.dot(Amatrices[2], Z)\n i4 = np.dot(Amatrices[3], Z)\n camp_samples = np.abs(i1 * i2 / (i3 * i4))\n pp = (camp - camp_samples) * camp_samples / sigma ** 2\n pt1 = pp / i1\n pt2 = pp / i2\n pt3 = -pp / i3\n pt4 = -pp / i4\n out = np.dot(pt1, Amatrices[0]) + np.dot(pt2, Amatrices[1]) + np.dot(pt3,\n Amatrices[2]) + np.dot(pt4, Amatrices[3])\n return -2.0 / len(camp) * np.real(out)\n\n\ndef chisq_camp(camp, Z, Amatrices, sigma):\n \"\"\"Closure Amplitudes reduced chi-squared loss.\"\"\"\n i1 = np.dot(Amatrices[0], Z)\n i2 = np.dot(Amatrices[1], Z)\n i3 = np.dot(Amatrices[2], Z)\n i4 = np.dot(Amatrices[3], Z)\n camp_samples = np.abs(i1 * i2 / (i3 * i4))\n chisq = np.sum(np.abs((camp - camp_samples) / sigma) ** 2) / len(camp)\n return chisq\n\n\ndef compute_lgcamp(X, Amatrices):\n \"\"\" Compute log closure amplitude of image vector X \"\"\"\n a1 = np.abs(np.dot(Amatrices[0], X))\n a2 = np.abs(np.dot(Amatrices[1], X))\n a3 = np.abs(np.dot(Amatrices[2], X))\n a4 = np.abs(np.dot(Amatrices[3], X))\n lgcamp = np.log(a1) + np.log(a2) - np.log(a3) - np.log(a4)\n return lgcamp\n\n\ndef compute_lgcamp_grad(lgcamp, Z, Amatrices, sigma):\n \"\"\"The gradient of the Log closure amplitude chi-squared\"\"\"\n i1 = np.dot(Amatrices[0], Z)\n i2 = np.dot(Amatrices[1], Z)\n i3 = np.dot(Amatrices[2], Z)\n i4 = np.dot(Amatrices[3], Z)\n lgcamp_samples = np.log(np.abs(i1)) + np.log(np.abs(i2)) - np.log(np.\n abs(i3)) - np.log(np.abs(i4))\n pp = (lgcamp - lgcamp_samples) / sigma ** 2\n pt1 = pp / i1\n pt2 = pp / i2\n pt3 = -pp / i3\n pt4 = -pp / i4\n out = np.dot(pt1, Amatrices[0]) + np.dot(pt2, Amatrices[1]) + np.dot(pt3,\n Amatrices[2]) + np.dot(pt4, Amatrices[3])\n return -2.0 / len(lgcamp) * np.real(out)\n\n\ndef chisq_lgcamp(lgcamp, X, Amatrices, sigma):\n \"\"\"Log Closure Amplitudes reduced chi-squared\"\"\"\n a1 = np.abs(np.dot(Amatrices[0], X))\n a2 = np.abs(np.dot(Amatrices[1], X))\n a3 = np.abs(np.dot(Amatrices[2], X))\n a4 = np.abs(np.dot(Amatrices[3], X))\n samples = np.log(a1) + np.log(a2) - np.log(a3) - np.log(a4)\n chisq = np.sum(np.abs((lgcamp - samples) / sigma) ** 2) / len(lgcamp)\n return chisq\n",
"step-5": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Sun Jun 14 20:35:10 2020\n\n@author: Johanna\n\"\"\"\nimport numpy as np\n\n###############################################################################\n# Complex Visibility Functions\n###############################################################################\n\ndef compute_vis(X, F):\n vis = np.matmul(X, np.transpose(F)).astype(np.complex64)\n return vis\n\ndef compute_vis_grad(vis, Z, F):\n Z_vis = compute_vis(Z, F)\n grad = -np.matmul(np.conjugate(F.T), vis - Z_vis)\n return grad.real\n\ndef chisq_vis(vis, Z, F, sigma):\n ''' \n Compute mean chi-squared of visibilities of Z.\n '''\n samples = compute_vis(Z, F)\n chisq = np.sum(np.abs((samples-vis)/sigma)**2)/(2*len(vis))\n return chisq\n\n###############################################################################\n# Visibility Amplitude Functions\n###############################################################################\n \ndef compute_amp(X, F):\n ''' Given an image X and DFT matrix F, compute and return its \n visibility amplitude. '''\n amp = np.abs(np.dot(F, X))\n return amp\n\ndef compute_amp_grad(amp, Z, A, sigma):\n ''' \n Compute gradient of visibility amplitude.\n '''\n i1 = np.dot(A, Z)\n amp_samples = np.abs(i1)\n\n pp = ((amp - amp_samples) * amp_samples) / (sigma**2) / i1\n out = (-2.0/len(amp)) * np.real(np.dot(pp, A))\n return out\n\ndef chisq_amp(amp, Z, F, sigma):\n ''' Compute and return chi-squared of amplitude between X and Z. '''\n amp_Z = compute_amp(Z, F)\n chisq = np.sum(np.abs((amp - amp_Z)/sigma)**2)/len(amp)\n return chisq \n\n###############################################################################\n# Closure Phase Functions\n###############################################################################\n\ndef compute_cphase(X, F_cphase):\n ''' Given an image X and the DFT matrices from three baselines,\n compute and return its closure phase. '''\n # Get fourier matrices of each baseline \n A1 = F_cphase[:, :, 0]\n A2 = F_cphase[:, :, 1]\n A3 = F_cphase[:, :, 2]\n \n X = np.array(X)\n \n # Compute observed closure phase of image\n vis1 = np.matmul(X.reshape((1,-1)), np.transpose(A1)).astype(np.complex64)\n vis2 = np.matmul(X.reshape((1,-1)), np.transpose(A2)).astype(np.complex64)\n vis3 = np.matmul(X.reshape((1,-1)), np.transpose(A3)).astype(np.complex64)\n \n cphase = np.angle(vis1 * vis2 * vis3) \n \n return cphase\n\ndef compute_cphase_grad(cphase, Z, F_cphase, sigma, npix):\n ''' \n Compute gradient of closure phase chi-squared\n \n cphase : closure phase of true image \n Z : predicted image vector\n F_cphase : 3 DFT matrices from three baselines in a closure triangle\n '''\n # Get fourier matrices of each baseline \n A1 = F_cphase[:, :, 0]\n A2 = F_cphase[:, :, 1]\n A3 = F_cphase[:, :, 2]\n \n i1 = np.matmul(Z.reshape((1,-1)), np.transpose(A1)).astype(np.complex64)\n i2 = np.matmul(Z.reshape((1,-1)), np.transpose(A2)).astype(np.complex64)\n i3 = np.matmul(Z.reshape((1,-1)), np.transpose(A3)).astype(np.complex64)\n cphase_samples = np.angle(i1 * i2 * i3)\n \n pref = np.sin(cphase - cphase_samples)/(sigma**2)\n pt1 = pref/i1\n pt2 = pref/i2\n pt3 = pref/i3\n out = -(2.0/len(cphase)) * np.imag(np.dot(pt1, A1) + np.dot(pt2, A2) + np.dot(pt3, A3))\n \n return out.reshape(npix**2)\n\ndef chisq_cphase(cphase, Z, F_cphase, sigma_cphase):\n \"\"\"Closure Phase reduced chi-squared loss.\"\"\"\n cphase_samples = compute_cphase(Z, F_cphase)\n chisq= (2.0/len(cphase)) * np.sum((1.0 - np.cos(cphase-cphase_samples))/(sigma_cphase**2))\n return chisq \n \n###############################################################################\n# Closure Amplitude Functions\n###############################################################################\n \ndef compute_camp(X, Amatrices):\n '''\n Compute closure amplitude of image vector X.\n '''\n i1 = np.dot(Amatrices[0], X)\n i2 = np.dot(Amatrices[1], X)\n i3 = np.dot(Amatrices[2], X)\n i4 = np.dot(Amatrices[3], X)\n \n camp = np.abs((i1 * i2)/(i3 * i4))\n return camp\n\ndef compute_camp_grad(camp, Z, Amatrices, sigma):\n \"\"\"\n The gradient of the closure amplitude chi-squared\n \n camp: Closure amplitudes of true image\n Z: Predicted image vector\n Amatrices: DFT matrices of four baselines\n \"\"\"\n i1 = np.dot(Amatrices[0], Z)\n i2 = np.dot(Amatrices[1], Z)\n i3 = np.dot(Amatrices[2], Z)\n i4 = np.dot(Amatrices[3], Z)\n camp_samples = np.abs((i1 * i2)/(i3 * i4))\n\n pp = ((camp - camp_samples) * camp_samples)/(sigma**2)\n pt1 = pp/i1\n pt2 = pp/i2\n pt3 = -pp/i3\n pt4 = -pp/i4\n out = (np.dot(pt1, Amatrices[0]) +\n np.dot(pt2, Amatrices[1]) +\n np.dot(pt3, Amatrices[2]) +\n np.dot(pt4, Amatrices[3]))\n\n return (-2.0/len(camp)) * np.real(out)\n \ndef chisq_camp(camp, Z, Amatrices, sigma):\n \"\"\"Closure Amplitudes reduced chi-squared loss.\"\"\"\n\n i1 = np.dot(Amatrices[0], Z)\n i2 = np.dot(Amatrices[1], Z)\n i3 = np.dot(Amatrices[2], Z)\n i4 = np.dot(Amatrices[3], Z)\n camp_samples = np.abs((i1 * i2)/(i3 * i4))\n\n chisq = np.sum(np.abs((camp - camp_samples)/sigma)**2)/len(camp)\n return chisq \n\n \n###############################################################################\n# Log Closure Amplitude Functions\n###############################################################################\n \ndef compute_lgcamp(X, Amatrices):\n ''' Compute log closure amplitude of image vector X '''\n a1 = np.abs(np.dot(Amatrices[0], X))\n a2 = np.abs(np.dot(Amatrices[1], X))\n a3 = np.abs(np.dot(Amatrices[2], X))\n a4 = np.abs(np.dot(Amatrices[3], X))\n \n lgcamp = np.log(a1) + np.log(a2) - np.log(a3) - np.log(a4)\n return lgcamp\n\ndef compute_lgcamp_grad(lgcamp, Z, Amatrices, sigma):\n \"\"\"The gradient of the Log closure amplitude chi-squared\"\"\"\n\n i1 = np.dot(Amatrices[0], Z)\n i2 = np.dot(Amatrices[1], Z)\n i3 = np.dot(Amatrices[2], Z)\n i4 = np.dot(Amatrices[3], Z)\n lgcamp_samples = (np.log(np.abs(i1)) +\n np.log(np.abs(i2)) - \n np.log(np.abs(i3)) -\n np.log(np.abs(i4)))\n\n pp = (lgcamp - lgcamp_samples) / (sigma**2)\n pt1 = pp / i1\n pt2 = pp / i2\n pt3 = -pp / i3\n pt4 = -pp / i4\n out = (np.dot(pt1, Amatrices[0]) +\n np.dot(pt2, Amatrices[1]) +\n np.dot(pt3, Amatrices[2]) +\n np.dot(pt4, Amatrices[3]))\n\n return (-2.0/len(lgcamp)) * np.real(out)\n\ndef chisq_lgcamp(lgcamp, X, Amatrices, sigma):\n \"\"\"Log Closure Amplitudes reduced chi-squared\"\"\"\n\n a1 = np.abs(np.dot(Amatrices[0], X))\n a2 = np.abs(np.dot(Amatrices[1], X))\n a3 = np.abs(np.dot(Amatrices[2], X))\n a4 = np.abs(np.dot(Amatrices[3], X))\n\n samples = np.log(a1) + np.log(a2) - np.log(a3) - np.log(a4)\n chisq = np.sum(np.abs((lgcamp - samples)/sigma)**2) / (len(lgcamp))\n return chisq \n\n\n\n\n\n\n\n\n\n\n\n\n",
"step-ids": [
11,
13,
15,
16,
17
]
}
|
[
11,
13,
15,
16,
17
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
os.system(
"notify-send 'Backup' 'NAS Backup Starting...' -i /usr/share/pixmaps/xarchiver/xarchiver-extract.png "
)
os.system("sudo mount -o username='emre' //192.168.1.2/Samba /media/NAS")
os.system(
"sudo rsync -av --include='.profile' --include='.bash*' --exclude='.*' --exclude='VirtualBox*' --exclude='BurpSuite*' --delete /home/monster /media/NAS"
)
os.system('sudo umount /media/NAS')
os.system('python /home/monster/Scripts/emre-mailclient-weeklybackup.py')
os.system(
"notify-send 'Backup' 'NAS Backup Completed!' -i /usr/share/pixmaps/xarchiver/xarchiver-add.png & paplay /usr/share/sounds/KDE-Im-User-Auth.ogg"
)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
os.system(
"notify-send 'Backup' 'NAS Backup Starting...' -i /usr/share/pixmaps/xarchiver/xarchiver-extract.png "
)
os.system("sudo mount -o username='emre' //192.168.1.2/Samba /media/NAS")
os.system(
"sudo rsync -av --include='.profile' --include='.bash*' --exclude='.*' --exclude='VirtualBox*' --exclude='BurpSuite*' --delete /home/monster /media/NAS"
)
os.system('sudo umount /media/NAS')
os.system('python /home/monster/Scripts/emre-mailclient-weeklybackup.py')
os.system(
"notify-send 'Backup' 'NAS Backup Completed!' -i /usr/share/pixmaps/xarchiver/xarchiver-add.png & paplay /usr/share/sounds/KDE-Im-User-Auth.ogg"
)
main()
<|reserved_special_token_1|>
import os
def main():
os.system(
"notify-send 'Backup' 'NAS Backup Starting...' -i /usr/share/pixmaps/xarchiver/xarchiver-extract.png "
)
os.system("sudo mount -o username='emre' //192.168.1.2/Samba /media/NAS")
os.system(
"sudo rsync -av --include='.profile' --include='.bash*' --exclude='.*' --exclude='VirtualBox*' --exclude='BurpSuite*' --delete /home/monster /media/NAS"
)
os.system('sudo umount /media/NAS')
os.system('python /home/monster/Scripts/emre-mailclient-weeklybackup.py')
os.system(
"notify-send 'Backup' 'NAS Backup Completed!' -i /usr/share/pixmaps/xarchiver/xarchiver-add.png & paplay /usr/share/sounds/KDE-Im-User-Auth.ogg"
)
main()
<|reserved_special_token_1|>
#!/usr/bin/python
import os
def main():
os.system("notify-send 'Backup' 'NAS Backup Starting...' -i /usr/share/pixmaps/xarchiver/xarchiver-extract.png ")
os.system("sudo mount -o username='emre' //192.168.1.2/Samba /media/NAS")
os.system("sudo rsync -av --include='.profile' --include='.bash*' --exclude='.*' --exclude='VirtualBox*' --exclude='BurpSuite*' --delete /home/monster /media/NAS")
os.system("sudo umount /media/NAS")
os.system("python /home/monster/Scripts/emre-mailclient-weeklybackup.py")
os.system("notify-send 'Backup' 'NAS Backup Completed!' -i /usr/share/pixmaps/xarchiver/xarchiver-add.png & paplay /usr/share/sounds/KDE-Im-User-Auth.ogg")
main()
|
flexible
|
{
"blob_id": "b6dd04219de1d4526d175254da539107362772d6",
"index": 9229,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n os.system(\n \"notify-send 'Backup' 'NAS Backup Starting...' -i /usr/share/pixmaps/xarchiver/xarchiver-extract.png \"\n )\n os.system(\"sudo mount -o username='emre' //192.168.1.2/Samba /media/NAS\")\n os.system(\n \"sudo rsync -av --include='.profile' --include='.bash*' --exclude='.*' --exclude='VirtualBox*' --exclude='BurpSuite*' --delete /home/monster /media/NAS\"\n )\n os.system('sudo umount /media/NAS')\n os.system('python /home/monster/Scripts/emre-mailclient-weeklybackup.py')\n os.system(\n \"notify-send 'Backup' 'NAS Backup Completed!' -i /usr/share/pixmaps/xarchiver/xarchiver-add.png & paplay /usr/share/sounds/KDE-Im-User-Auth.ogg\"\n )\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef main():\n os.system(\n \"notify-send 'Backup' 'NAS Backup Starting...' -i /usr/share/pixmaps/xarchiver/xarchiver-extract.png \"\n )\n os.system(\"sudo mount -o username='emre' //192.168.1.2/Samba /media/NAS\")\n os.system(\n \"sudo rsync -av --include='.profile' --include='.bash*' --exclude='.*' --exclude='VirtualBox*' --exclude='BurpSuite*' --delete /home/monster /media/NAS\"\n )\n os.system('sudo umount /media/NAS')\n os.system('python /home/monster/Scripts/emre-mailclient-weeklybackup.py')\n os.system(\n \"notify-send 'Backup' 'NAS Backup Completed!' -i /usr/share/pixmaps/xarchiver/xarchiver-add.png & paplay /usr/share/sounds/KDE-Im-User-Auth.ogg\"\n )\n\n\nmain()\n",
"step-4": "import os\n\n\ndef main():\n os.system(\n \"notify-send 'Backup' 'NAS Backup Starting...' -i /usr/share/pixmaps/xarchiver/xarchiver-extract.png \"\n )\n os.system(\"sudo mount -o username='emre' //192.168.1.2/Samba /media/NAS\")\n os.system(\n \"sudo rsync -av --include='.profile' --include='.bash*' --exclude='.*' --exclude='VirtualBox*' --exclude='BurpSuite*' --delete /home/monster /media/NAS\"\n )\n os.system('sudo umount /media/NAS')\n os.system('python /home/monster/Scripts/emre-mailclient-weeklybackup.py')\n os.system(\n \"notify-send 'Backup' 'NAS Backup Completed!' -i /usr/share/pixmaps/xarchiver/xarchiver-add.png & paplay /usr/share/sounds/KDE-Im-User-Auth.ogg\"\n )\n\n\nmain()\n",
"step-5": "#!/usr/bin/python\n\nimport os\n\ndef main():\n os.system(\"notify-send 'Backup' 'NAS Backup Starting...' -i /usr/share/pixmaps/xarchiver/xarchiver-extract.png \")\n os.system(\"sudo mount -o username='emre' //192.168.1.2/Samba /media/NAS\")\n os.system(\"sudo rsync -av --include='.profile' --include='.bash*' --exclude='.*' --exclude='VirtualBox*' --exclude='BurpSuite*' --delete /home/monster /media/NAS\")\n os.system(\"sudo umount /media/NAS\")\n os.system(\"python /home/monster/Scripts/emre-mailclient-weeklybackup.py\")\n os.system(\"notify-send 'Backup' 'NAS Backup Completed!' -i /usr/share/pixmaps/xarchiver/xarchiver-add.png & paplay /usr/share/sounds/KDE-Im-User-Auth.ogg\")\n\nmain()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!
import os
import sys
#get current working directory
cwd = os.getcwd()
ovjtools = os.getenv('OVJ_TOOLS')
javaBinDir = os.path.join(ovjtools, 'java', 'bin')
# get the envirionment
env = Environment(ENV = {'JAVA_HOME' : javaBinDir,
'PATH' : javaBinDir + ':' + os.environ['PATH']})
env.Execute('cd vjunit && make -f makevjunit')
env.Execute('cd VJQA/src && make -f makexmlcheck')
Execute('rm -rf ovj_qa')
path = os.path.join(cwd, 'ovj_qa')
if not os.path.exists(path):
os.makedirs(path)
Execute('cp -r VJQA ovj_qa/OVJQA')
Execute('rm -rf ovj_qa/OVJQA/src')
Execute('rm -f ovj_qa/OVJQA/bin/lib/vjunit.jar')
Execute('cp vjunit/vjunit.jar ovj_qa/OVJQA/bin/lib/')
Execute('rm -f ovj_qa/OVJQA/java/*')
Execute('cp VJQA/src/XmlTest.jar VJQA/src/*.class ovj_qa/OVJQA/java/')
Execute('cp -r vjtest ovj_qa/ovjtest')
|
normal
|
{
"blob_id": "549d7368d49cf2f4d2c6e83e300f31db981b62bd",
"index": 6285,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nenv.Execute('cd vjunit && make -f makevjunit')\nenv.Execute('cd VJQA/src && make -f makexmlcheck')\nExecute('rm -rf ovj_qa')\n<mask token>\nif not os.path.exists(path):\n os.makedirs(path)\nExecute('cp -r VJQA ovj_qa/OVJQA')\nExecute('rm -rf ovj_qa/OVJQA/src')\nExecute('rm -f ovj_qa/OVJQA/bin/lib/vjunit.jar')\nExecute('cp vjunit/vjunit.jar ovj_qa/OVJQA/bin/lib/')\nExecute('rm -f ovj_qa/OVJQA/java/*')\nExecute('cp VJQA/src/XmlTest.jar VJQA/src/*.class ovj_qa/OVJQA/java/')\nExecute('cp -r vjtest ovj_qa/ovjtest')\n",
"step-3": "<mask token>\ncwd = os.getcwd()\novjtools = os.getenv('OVJ_TOOLS')\njavaBinDir = os.path.join(ovjtools, 'java', 'bin')\nenv = Environment(ENV={'JAVA_HOME': javaBinDir, 'PATH': javaBinDir + ':' +\n os.environ['PATH']})\nenv.Execute('cd vjunit && make -f makevjunit')\nenv.Execute('cd VJQA/src && make -f makexmlcheck')\nExecute('rm -rf ovj_qa')\npath = os.path.join(cwd, 'ovj_qa')\nif not os.path.exists(path):\n os.makedirs(path)\nExecute('cp -r VJQA ovj_qa/OVJQA')\nExecute('rm -rf ovj_qa/OVJQA/src')\nExecute('rm -f ovj_qa/OVJQA/bin/lib/vjunit.jar')\nExecute('cp vjunit/vjunit.jar ovj_qa/OVJQA/bin/lib/')\nExecute('rm -f ovj_qa/OVJQA/java/*')\nExecute('cp VJQA/src/XmlTest.jar VJQA/src/*.class ovj_qa/OVJQA/java/')\nExecute('cp -r vjtest ovj_qa/ovjtest')\n",
"step-4": "import os\nimport sys\ncwd = os.getcwd()\novjtools = os.getenv('OVJ_TOOLS')\njavaBinDir = os.path.join(ovjtools, 'java', 'bin')\nenv = Environment(ENV={'JAVA_HOME': javaBinDir, 'PATH': javaBinDir + ':' +\n os.environ['PATH']})\nenv.Execute('cd vjunit && make -f makevjunit')\nenv.Execute('cd VJQA/src && make -f makexmlcheck')\nExecute('rm -rf ovj_qa')\npath = os.path.join(cwd, 'ovj_qa')\nif not os.path.exists(path):\n os.makedirs(path)\nExecute('cp -r VJQA ovj_qa/OVJQA')\nExecute('rm -rf ovj_qa/OVJQA/src')\nExecute('rm -f ovj_qa/OVJQA/bin/lib/vjunit.jar')\nExecute('cp vjunit/vjunit.jar ovj_qa/OVJQA/bin/lib/')\nExecute('rm -f ovj_qa/OVJQA/java/*')\nExecute('cp VJQA/src/XmlTest.jar VJQA/src/*.class ovj_qa/OVJQA/java/')\nExecute('cp -r vjtest ovj_qa/ovjtest')\n",
"step-5": "#!\n\nimport os\nimport sys\n\n#get current working directory\ncwd = os.getcwd()\novjtools = os.getenv('OVJ_TOOLS')\njavaBinDir = os.path.join(ovjtools, 'java', 'bin')\n\n# get the envirionment\nenv = Environment(ENV = {'JAVA_HOME' : javaBinDir,\n 'PATH' : javaBinDir + ':' + os.environ['PATH']})\n\nenv.Execute('cd vjunit && make -f makevjunit')\nenv.Execute('cd VJQA/src && make -f makexmlcheck')\n\nExecute('rm -rf ovj_qa')\npath = os.path.join(cwd, 'ovj_qa')\nif not os.path.exists(path):\n os.makedirs(path)\n\nExecute('cp -r VJQA ovj_qa/OVJQA')\nExecute('rm -rf ovj_qa/OVJQA/src')\nExecute('rm -f ovj_qa/OVJQA/bin/lib/vjunit.jar')\nExecute('cp vjunit/vjunit.jar ovj_qa/OVJQA/bin/lib/')\nExecute('rm -f ovj_qa/OVJQA/java/*')\nExecute('cp VJQA/src/XmlTest.jar VJQA/src/*.class ovj_qa/OVJQA/java/')\nExecute('cp -r vjtest ovj_qa/ovjtest')\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
class Leg:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@property
def smelly(self):
return self.__smelly
<|reserved_special_token_0|>
def is_smelly(self):
return self.__smelly
<|reserved_special_token_1|>
class Leg:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@property
def smelly(self):
return self.__smelly
@smelly.setter
def smelly(self, smell):
self.__smelly = smell
def is_smelly(self):
return self.__smelly
<|reserved_special_token_1|>
class Leg:
<|reserved_special_token_0|>
def bend_knee(self):
print('knee bent')
@property
def smelly(self):
return self.__smelly
@smelly.setter
def smelly(self, smell):
self.__smelly = smell
def is_smelly(self):
return self.__smelly
<|reserved_special_token_1|>
class Leg:
__smelly = True
def bend_knee(self):
print('knee bent')
@property
def smelly(self):
return self.__smelly
@smelly.setter
def smelly(self, smell):
self.__smelly = smell
def is_smelly(self):
return self.__smelly
<|reserved_special_token_1|>
class Leg():
__smelly = True
def bend_knee(self):
print("knee bent")
@property
def smelly(self):
return self.__smelly
@smelly.setter
def smelly(self,smell):
self.__smelly = smell
def is_smelly(self):
return self.__smelly
|
flexible
|
{
"blob_id": "a4ecc578a163ee4657a2c9302f79f15c2e4e39de",
"index": 672,
"step-1": "class Leg:\n <mask token>\n <mask token>\n\n @property\n def smelly(self):\n return self.__smelly\n <mask token>\n\n def is_smelly(self):\n return self.__smelly\n",
"step-2": "class Leg:\n <mask token>\n <mask token>\n\n @property\n def smelly(self):\n return self.__smelly\n\n @smelly.setter\n def smelly(self, smell):\n self.__smelly = smell\n\n def is_smelly(self):\n return self.__smelly\n",
"step-3": "class Leg:\n <mask token>\n\n def bend_knee(self):\n print('knee bent')\n\n @property\n def smelly(self):\n return self.__smelly\n\n @smelly.setter\n def smelly(self, smell):\n self.__smelly = smell\n\n def is_smelly(self):\n return self.__smelly\n",
"step-4": "class Leg:\n __smelly = True\n\n def bend_knee(self):\n print('knee bent')\n\n @property\n def smelly(self):\n return self.__smelly\n\n @smelly.setter\n def smelly(self, smell):\n self.__smelly = smell\n\n def is_smelly(self):\n return self.__smelly\n",
"step-5": "class Leg():\n __smelly = True\n\n def bend_knee(self):\n print(\"knee bent\")\n\n\n @property\n def smelly(self):\n return self.__smelly\n\n @smelly.setter\n def smelly(self,smell):\n self.__smelly = smell\n \n\n def is_smelly(self):\n return self.__smelly",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
from collections import defaultdict, Counter
import numpy as np
import sys
import re
def parseFile(file, frequency_tree):
readnumber = re.compile('[r]+\d+')
line_spliter = re.compile('\t+')
colon_spliter = re.compile(':')
forward_reads = 0
reverse_reads = 0
unmatched_reads = 0
read_positions = defaultdict(list)
position_differences = []
position_differences_stdv_list = []
total_position_diffs = []
read_lengths_count = 0
read_lengths_total = 0
read_frequency = 0
read_lengths_average = 0
num_chromosomes = 0
num_a = 0
num_c = 0
num_g = 0
num_t = 0
print("############# OPENING SAM FILE", file=sys.stderr)
with open(file, 'rt') as fp:
line = fp.readline()
while line:
subline = line_spliter.split(line)
line = fp.readline()
if (int(subline[1]) & 4 == 4):
unmatched_reads += 1
elif (int(subline[1]) & 16 == 16):
reverse_reads += 1
else:
forward_reads += 1
read = subline[9]
read_lengths_count += 1
read_lengths_total += len(read)
bases_count = Counter(read)
num_a += bases_count["A"]
num_c += bases_count["C"]
num_g += bases_count["G"]
num_t += bases_count["T"]
chromosome = getChromosome(subline[2])
if chromosome != -1:
read_positions[chromosome].append(int(subline[3]))
if read_lengths_count != 0:
read_lengths_average = read_lengths_total / read_lengths_count
if (forward_reads + reverse_reads + unmatched_reads) != 0:
read_frequency = (forward_reads + reverse_reads) / (forward_reads + reverse_reads + unmatched_reads)
gene_annotation_match = 0
gene_annotation_total = 0
gene_annotation_percent = 0
for key in read_positions.keys():
for position in read_positions[key]:
#TODO there is for sure a better way to do this than with a break
for _ in frequency_tree[key].find_overlap(position, position):
gene_annotation_match += 1
break
gene_annotation_total += 1
if gene_annotation_total != 0:
gene_annotation_percent = gene_annotation_match / gene_annotation_total
print("gene_annotation_percent = " + str(gene_annotation_percent))
for _, position_list in read_positions.items():
position_list.sort()
num_chromosomes += 1
for i in range(len(position_list) - 1):
position_differences.append(position_list[i + 1] - position_list[i])
try:
std_of_pos_diff = np.std(position_differences)
mean_of_pos_diffs = np.nanmean(position_differences)
max_position_difference = np.amax(position_differences)
min_position_difference = np.amin(position_differences)
except:
return None
return [gene_annotation_percent, read_lengths_average, read_frequency, std_of_pos_diff, mean_of_pos_diffs, num_chromosomes, max_position_difference, min_position_difference, num_a/ read_lengths_total, num_c/ read_lengths_total, num_g / read_lengths_total, num_t / read_lengths_total]
def parseString(txt, frequency_tree):
spliter = re.compile('\n+')
readnumber = re.compile('[r]+\d+')
line_spliter = re.compile('\t+')
colon_spliter = re.compile(':')
forward_reads = 0
reverse_reads = 0
unmatched_reads = 0
read_positions = defaultdict(list)
position_differences = []
position_differences_stdv_list = []
total_position_diffs = []
read_lengths_count = 0
read_lengths_total = 0
read_frequency = 0
read_lengths_average = 0
num_chromosomes = 0
lines = spliter.split(txt)
#Itterating though everyline
for i in range(len(lines) - 1):
subline = line_spliter.split(lines[i])
if (int(subline[1]) & 4 == 4):
unmatched_reads += 1
elif (int(subline[1]) & 16 == 16):
reverse_reads += 1
else:
forward_reads += 1
read = subline[9]
read_lengths_count += 1
read_lengths_total += len(read)
chromosome = getChromosome(subline[2])
if chromosome != -1:
read_positions[chromosome].append(int(subline[3]))
if read_lengths_count != 0:
read_lengths_average = read_lengths_total / read_lengths_count
if (forward_reads + reverse_reads + unmatched_reads) != 0:
read_frequency = (forward_reads + reverse_reads) / (forward_reads + reverse_reads + unmatched_reads)
gene_annotation_match = 0
gene_annotation_total = 0
gene_annotation_percent = 0
for key in read_positions.keys():
for position in read_positions[key]:
#TODO there is for sure a better way to do this than with a break
for _ in frequency_tree[key].find_overlap(position, position):
gene_annotation_match += 1
break
gene_annotation_total += 1
if gene_annotation_total != 0:
gene_annotation_percent = gene_annotation_match / gene_annotation_total
print("gene_annotation_percent = " + str(gene_annotation_percent))
for _, position_list in read_positions.items():
position_list.sort()
num_chromosomes += 1
for i in range(len(position_list) - 1):
position_differences.append(position_list[i + 1] - position_list[i])
try:
std_of_pos_diff = np.std(position_differences)
mean_of_pos_diffs = np.nanmean(position_differences)
max_position_difference = np.amax(position_differences)
min_position_difference = np.amin(position_differences)
except:
return None
return [gene_annotation_percent, read_lengths_average, read_frequency, std_of_pos_diff, mean_of_pos_diffs, num_chromosomes, max_position_difference, min_position_difference]
def getChromosome(str):
if str == "*" or str[3:] == 'X':
return -1
try:
return int(str[3:])
except:
return -1
|
normal
|
{
"blob_id": "227b71cb6d4cde8f498ad19c1c5f95f7fc572752",
"index": 6995,
"step-1": "<mask token>\n\n\ndef getChromosome(str):\n if str == '*' or str[3:] == 'X':\n return -1\n try:\n return int(str[3:])\n except:\n return -1\n",
"step-2": "<mask token>\n\n\ndef parseFile(file, frequency_tree):\n readnumber = re.compile('[r]+\\\\d+')\n line_spliter = re.compile('\\t+')\n colon_spliter = re.compile(':')\n forward_reads = 0\n reverse_reads = 0\n unmatched_reads = 0\n read_positions = defaultdict(list)\n position_differences = []\n position_differences_stdv_list = []\n total_position_diffs = []\n read_lengths_count = 0\n read_lengths_total = 0\n read_frequency = 0\n read_lengths_average = 0\n num_chromosomes = 0\n num_a = 0\n num_c = 0\n num_g = 0\n num_t = 0\n print('############# OPENING SAM FILE', file=sys.stderr)\n with open(file, 'rt') as fp:\n line = fp.readline()\n while line:\n subline = line_spliter.split(line)\n line = fp.readline()\n if int(subline[1]) & 4 == 4:\n unmatched_reads += 1\n elif int(subline[1]) & 16 == 16:\n reverse_reads += 1\n else:\n forward_reads += 1\n read = subline[9]\n read_lengths_count += 1\n read_lengths_total += len(read)\n bases_count = Counter(read)\n num_a += bases_count['A']\n num_c += bases_count['C']\n num_g += bases_count['G']\n num_t += bases_count['T']\n chromosome = getChromosome(subline[2])\n if chromosome != -1:\n read_positions[chromosome].append(int(subline[3]))\n if read_lengths_count != 0:\n read_lengths_average = read_lengths_total / read_lengths_count\n if forward_reads + reverse_reads + unmatched_reads != 0:\n read_frequency = (forward_reads + reverse_reads) / (\n forward_reads + reverse_reads + unmatched_reads)\n gene_annotation_match = 0\n gene_annotation_total = 0\n gene_annotation_percent = 0\n for key in read_positions.keys():\n for position in read_positions[key]:\n for _ in frequency_tree[key].find_overlap(position, position):\n gene_annotation_match += 1\n break\n gene_annotation_total += 1\n if gene_annotation_total != 0:\n gene_annotation_percent = (gene_annotation_match /\n gene_annotation_total)\n print('gene_annotation_percent = ' + str(gene_annotation_percent))\n for _, position_list in read_positions.items():\n position_list.sort()\n num_chromosomes += 1\n for i in range(len(position_list) - 1):\n position_differences.append(position_list[i + 1] -\n position_list[i])\n try:\n std_of_pos_diff = np.std(position_differences)\n mean_of_pos_diffs = np.nanmean(position_differences)\n max_position_difference = np.amax(position_differences)\n min_position_difference = np.amin(position_differences)\n except:\n return None\n return [gene_annotation_percent, read_lengths_average,\n read_frequency, std_of_pos_diff, mean_of_pos_diffs,\n num_chromosomes, max_position_difference,\n min_position_difference, num_a / read_lengths_total, num_c /\n read_lengths_total, num_g / read_lengths_total, num_t /\n read_lengths_total]\n\n\n<mask token>\n\n\ndef getChromosome(str):\n if str == '*' or str[3:] == 'X':\n return -1\n try:\n return int(str[3:])\n except:\n return -1\n",
"step-3": "<mask token>\n\n\ndef parseFile(file, frequency_tree):\n readnumber = re.compile('[r]+\\\\d+')\n line_spliter = re.compile('\\t+')\n colon_spliter = re.compile(':')\n forward_reads = 0\n reverse_reads = 0\n unmatched_reads = 0\n read_positions = defaultdict(list)\n position_differences = []\n position_differences_stdv_list = []\n total_position_diffs = []\n read_lengths_count = 0\n read_lengths_total = 0\n read_frequency = 0\n read_lengths_average = 0\n num_chromosomes = 0\n num_a = 0\n num_c = 0\n num_g = 0\n num_t = 0\n print('############# OPENING SAM FILE', file=sys.stderr)\n with open(file, 'rt') as fp:\n line = fp.readline()\n while line:\n subline = line_spliter.split(line)\n line = fp.readline()\n if int(subline[1]) & 4 == 4:\n unmatched_reads += 1\n elif int(subline[1]) & 16 == 16:\n reverse_reads += 1\n else:\n forward_reads += 1\n read = subline[9]\n read_lengths_count += 1\n read_lengths_total += len(read)\n bases_count = Counter(read)\n num_a += bases_count['A']\n num_c += bases_count['C']\n num_g += bases_count['G']\n num_t += bases_count['T']\n chromosome = getChromosome(subline[2])\n if chromosome != -1:\n read_positions[chromosome].append(int(subline[3]))\n if read_lengths_count != 0:\n read_lengths_average = read_lengths_total / read_lengths_count\n if forward_reads + reverse_reads + unmatched_reads != 0:\n read_frequency = (forward_reads + reverse_reads) / (\n forward_reads + reverse_reads + unmatched_reads)\n gene_annotation_match = 0\n gene_annotation_total = 0\n gene_annotation_percent = 0\n for key in read_positions.keys():\n for position in read_positions[key]:\n for _ in frequency_tree[key].find_overlap(position, position):\n gene_annotation_match += 1\n break\n gene_annotation_total += 1\n if gene_annotation_total != 0:\n gene_annotation_percent = (gene_annotation_match /\n gene_annotation_total)\n print('gene_annotation_percent = ' + str(gene_annotation_percent))\n for _, position_list in read_positions.items():\n position_list.sort()\n num_chromosomes += 1\n for i in range(len(position_list) - 1):\n position_differences.append(position_list[i + 1] -\n position_list[i])\n try:\n std_of_pos_diff = np.std(position_differences)\n mean_of_pos_diffs = np.nanmean(position_differences)\n max_position_difference = np.amax(position_differences)\n min_position_difference = np.amin(position_differences)\n except:\n return None\n return [gene_annotation_percent, read_lengths_average,\n read_frequency, std_of_pos_diff, mean_of_pos_diffs,\n num_chromosomes, max_position_difference,\n min_position_difference, num_a / read_lengths_total, num_c /\n read_lengths_total, num_g / read_lengths_total, num_t /\n read_lengths_total]\n\n\ndef parseString(txt, frequency_tree):\n spliter = re.compile('\\n+')\n readnumber = re.compile('[r]+\\\\d+')\n line_spliter = re.compile('\\t+')\n colon_spliter = re.compile(':')\n forward_reads = 0\n reverse_reads = 0\n unmatched_reads = 0\n read_positions = defaultdict(list)\n position_differences = []\n position_differences_stdv_list = []\n total_position_diffs = []\n read_lengths_count = 0\n read_lengths_total = 0\n read_frequency = 0\n read_lengths_average = 0\n num_chromosomes = 0\n lines = spliter.split(txt)\n for i in range(len(lines) - 1):\n subline = line_spliter.split(lines[i])\n if int(subline[1]) & 4 == 4:\n unmatched_reads += 1\n elif int(subline[1]) & 16 == 16:\n reverse_reads += 1\n else:\n forward_reads += 1\n read = subline[9]\n read_lengths_count += 1\n read_lengths_total += len(read)\n chromosome = getChromosome(subline[2])\n if chromosome != -1:\n read_positions[chromosome].append(int(subline[3]))\n if read_lengths_count != 0:\n read_lengths_average = read_lengths_total / read_lengths_count\n if forward_reads + reverse_reads + unmatched_reads != 0:\n read_frequency = (forward_reads + reverse_reads) / (forward_reads +\n reverse_reads + unmatched_reads)\n gene_annotation_match = 0\n gene_annotation_total = 0\n gene_annotation_percent = 0\n for key in read_positions.keys():\n for position in read_positions[key]:\n for _ in frequency_tree[key].find_overlap(position, position):\n gene_annotation_match += 1\n break\n gene_annotation_total += 1\n if gene_annotation_total != 0:\n gene_annotation_percent = gene_annotation_match / gene_annotation_total\n print('gene_annotation_percent = ' + str(gene_annotation_percent))\n for _, position_list in read_positions.items():\n position_list.sort()\n num_chromosomes += 1\n for i in range(len(position_list) - 1):\n position_differences.append(position_list[i + 1] - position_list[i]\n )\n try:\n std_of_pos_diff = np.std(position_differences)\n mean_of_pos_diffs = np.nanmean(position_differences)\n max_position_difference = np.amax(position_differences)\n min_position_difference = np.amin(position_differences)\n except:\n return None\n return [gene_annotation_percent, read_lengths_average, read_frequency,\n std_of_pos_diff, mean_of_pos_diffs, num_chromosomes,\n max_position_difference, min_position_difference]\n\n\ndef getChromosome(str):\n if str == '*' or str[3:] == 'X':\n return -1\n try:\n return int(str[3:])\n except:\n return -1\n",
"step-4": "from collections import defaultdict, Counter\nimport numpy as np\nimport sys\nimport re\n\n\ndef parseFile(file, frequency_tree):\n readnumber = re.compile('[r]+\\\\d+')\n line_spliter = re.compile('\\t+')\n colon_spliter = re.compile(':')\n forward_reads = 0\n reverse_reads = 0\n unmatched_reads = 0\n read_positions = defaultdict(list)\n position_differences = []\n position_differences_stdv_list = []\n total_position_diffs = []\n read_lengths_count = 0\n read_lengths_total = 0\n read_frequency = 0\n read_lengths_average = 0\n num_chromosomes = 0\n num_a = 0\n num_c = 0\n num_g = 0\n num_t = 0\n print('############# OPENING SAM FILE', file=sys.stderr)\n with open(file, 'rt') as fp:\n line = fp.readline()\n while line:\n subline = line_spliter.split(line)\n line = fp.readline()\n if int(subline[1]) & 4 == 4:\n unmatched_reads += 1\n elif int(subline[1]) & 16 == 16:\n reverse_reads += 1\n else:\n forward_reads += 1\n read = subline[9]\n read_lengths_count += 1\n read_lengths_total += len(read)\n bases_count = Counter(read)\n num_a += bases_count['A']\n num_c += bases_count['C']\n num_g += bases_count['G']\n num_t += bases_count['T']\n chromosome = getChromosome(subline[2])\n if chromosome != -1:\n read_positions[chromosome].append(int(subline[3]))\n if read_lengths_count != 0:\n read_lengths_average = read_lengths_total / read_lengths_count\n if forward_reads + reverse_reads + unmatched_reads != 0:\n read_frequency = (forward_reads + reverse_reads) / (\n forward_reads + reverse_reads + unmatched_reads)\n gene_annotation_match = 0\n gene_annotation_total = 0\n gene_annotation_percent = 0\n for key in read_positions.keys():\n for position in read_positions[key]:\n for _ in frequency_tree[key].find_overlap(position, position):\n gene_annotation_match += 1\n break\n gene_annotation_total += 1\n if gene_annotation_total != 0:\n gene_annotation_percent = (gene_annotation_match /\n gene_annotation_total)\n print('gene_annotation_percent = ' + str(gene_annotation_percent))\n for _, position_list in read_positions.items():\n position_list.sort()\n num_chromosomes += 1\n for i in range(len(position_list) - 1):\n position_differences.append(position_list[i + 1] -\n position_list[i])\n try:\n std_of_pos_diff = np.std(position_differences)\n mean_of_pos_diffs = np.nanmean(position_differences)\n max_position_difference = np.amax(position_differences)\n min_position_difference = np.amin(position_differences)\n except:\n return None\n return [gene_annotation_percent, read_lengths_average,\n read_frequency, std_of_pos_diff, mean_of_pos_diffs,\n num_chromosomes, max_position_difference,\n min_position_difference, num_a / read_lengths_total, num_c /\n read_lengths_total, num_g / read_lengths_total, num_t /\n read_lengths_total]\n\n\ndef parseString(txt, frequency_tree):\n spliter = re.compile('\\n+')\n readnumber = re.compile('[r]+\\\\d+')\n line_spliter = re.compile('\\t+')\n colon_spliter = re.compile(':')\n forward_reads = 0\n reverse_reads = 0\n unmatched_reads = 0\n read_positions = defaultdict(list)\n position_differences = []\n position_differences_stdv_list = []\n total_position_diffs = []\n read_lengths_count = 0\n read_lengths_total = 0\n read_frequency = 0\n read_lengths_average = 0\n num_chromosomes = 0\n lines = spliter.split(txt)\n for i in range(len(lines) - 1):\n subline = line_spliter.split(lines[i])\n if int(subline[1]) & 4 == 4:\n unmatched_reads += 1\n elif int(subline[1]) & 16 == 16:\n reverse_reads += 1\n else:\n forward_reads += 1\n read = subline[9]\n read_lengths_count += 1\n read_lengths_total += len(read)\n chromosome = getChromosome(subline[2])\n if chromosome != -1:\n read_positions[chromosome].append(int(subline[3]))\n if read_lengths_count != 0:\n read_lengths_average = read_lengths_total / read_lengths_count\n if forward_reads + reverse_reads + unmatched_reads != 0:\n read_frequency = (forward_reads + reverse_reads) / (forward_reads +\n reverse_reads + unmatched_reads)\n gene_annotation_match = 0\n gene_annotation_total = 0\n gene_annotation_percent = 0\n for key in read_positions.keys():\n for position in read_positions[key]:\n for _ in frequency_tree[key].find_overlap(position, position):\n gene_annotation_match += 1\n break\n gene_annotation_total += 1\n if gene_annotation_total != 0:\n gene_annotation_percent = gene_annotation_match / gene_annotation_total\n print('gene_annotation_percent = ' + str(gene_annotation_percent))\n for _, position_list in read_positions.items():\n position_list.sort()\n num_chromosomes += 1\n for i in range(len(position_list) - 1):\n position_differences.append(position_list[i + 1] - position_list[i]\n )\n try:\n std_of_pos_diff = np.std(position_differences)\n mean_of_pos_diffs = np.nanmean(position_differences)\n max_position_difference = np.amax(position_differences)\n min_position_difference = np.amin(position_differences)\n except:\n return None\n return [gene_annotation_percent, read_lengths_average, read_frequency,\n std_of_pos_diff, mean_of_pos_diffs, num_chromosomes,\n max_position_difference, min_position_difference]\n\n\ndef getChromosome(str):\n if str == '*' or str[3:] == 'X':\n return -1\n try:\n return int(str[3:])\n except:\n return -1\n",
"step-5": "from collections import defaultdict, Counter\nimport numpy as np\nimport sys\nimport re\n\ndef parseFile(file, frequency_tree):\n readnumber = re.compile('[r]+\\d+')\n line_spliter = re.compile('\\t+')\n colon_spliter = re.compile(':')\n forward_reads = 0\n reverse_reads = 0\n unmatched_reads = 0\n read_positions = defaultdict(list)\n position_differences = []\n position_differences_stdv_list = []\n total_position_diffs = []\n read_lengths_count = 0\n read_lengths_total = 0\n read_frequency = 0\n read_lengths_average = 0\n num_chromosomes = 0\n num_a = 0\n num_c = 0\n num_g = 0\n num_t = 0\n\n\n print(\"############# OPENING SAM FILE\", file=sys.stderr)\n with open(file, 'rt') as fp:\n line = fp.readline()\n while line:\n subline = line_spliter.split(line)\n line = fp.readline()\n if (int(subline[1]) & 4 == 4):\n unmatched_reads += 1\n elif (int(subline[1]) & 16 == 16):\n reverse_reads += 1\n else:\n forward_reads += 1\n read = subline[9]\n read_lengths_count += 1\n read_lengths_total += len(read)\n bases_count = Counter(read)\n num_a += bases_count[\"A\"]\n num_c += bases_count[\"C\"]\n num_g += bases_count[\"G\"]\n num_t += bases_count[\"T\"]\n chromosome = getChromosome(subline[2])\n if chromosome != -1:\n read_positions[chromosome].append(int(subline[3]))\n if read_lengths_count != 0:\n read_lengths_average = read_lengths_total / read_lengths_count\n if (forward_reads + reverse_reads + unmatched_reads) != 0:\n read_frequency = (forward_reads + reverse_reads) / (forward_reads + reverse_reads + unmatched_reads)\n\n gene_annotation_match = 0\n gene_annotation_total = 0\n gene_annotation_percent = 0\n for key in read_positions.keys():\n for position in read_positions[key]:\n #TODO there is for sure a better way to do this than with a break\n for _ in frequency_tree[key].find_overlap(position, position):\n gene_annotation_match += 1\n break\n gene_annotation_total += 1\n if gene_annotation_total != 0:\n gene_annotation_percent = gene_annotation_match / gene_annotation_total\n print(\"gene_annotation_percent = \" + str(gene_annotation_percent))\n\n for _, position_list in read_positions.items():\n position_list.sort()\n num_chromosomes += 1\n for i in range(len(position_list) - 1):\n position_differences.append(position_list[i + 1] - position_list[i])\n try:\n std_of_pos_diff = np.std(position_differences)\n mean_of_pos_diffs = np.nanmean(position_differences)\n max_position_difference = np.amax(position_differences)\n min_position_difference = np.amin(position_differences)\n except:\n return None\n return [gene_annotation_percent, read_lengths_average, read_frequency, std_of_pos_diff, mean_of_pos_diffs, num_chromosomes, max_position_difference, min_position_difference, num_a/ read_lengths_total, num_c/ read_lengths_total, num_g / read_lengths_total, num_t / read_lengths_total]\n\n\n\ndef parseString(txt, frequency_tree):\n spliter = re.compile('\\n+')\n readnumber = re.compile('[r]+\\d+')\n line_spliter = re.compile('\\t+')\n colon_spliter = re.compile(':')\n forward_reads = 0\n reverse_reads = 0\n unmatched_reads = 0\n read_positions = defaultdict(list)\n position_differences = []\n position_differences_stdv_list = []\n total_position_diffs = []\n read_lengths_count = 0\n read_lengths_total = 0\n read_frequency = 0\n read_lengths_average = 0\n num_chromosomes = 0\n\n lines = spliter.split(txt)\n #Itterating though everyline\n for i in range(len(lines) - 1):\n subline = line_spliter.split(lines[i])\n if (int(subline[1]) & 4 == 4):\n unmatched_reads += 1\n elif (int(subline[1]) & 16 == 16):\n reverse_reads += 1\n else:\n forward_reads += 1\n read = subline[9]\n read_lengths_count += 1\n read_lengths_total += len(read)\n chromosome = getChromosome(subline[2])\n if chromosome != -1:\n read_positions[chromosome].append(int(subline[3]))\n if read_lengths_count != 0:\n read_lengths_average = read_lengths_total / read_lengths_count\n if (forward_reads + reverse_reads + unmatched_reads) != 0:\n read_frequency = (forward_reads + reverse_reads) / (forward_reads + reverse_reads + unmatched_reads)\n\n gene_annotation_match = 0\n gene_annotation_total = 0\n gene_annotation_percent = 0\n for key in read_positions.keys():\n for position in read_positions[key]:\n #TODO there is for sure a better way to do this than with a break\n for _ in frequency_tree[key].find_overlap(position, position):\n gene_annotation_match += 1\n break\n gene_annotation_total += 1\n if gene_annotation_total != 0:\n gene_annotation_percent = gene_annotation_match / gene_annotation_total\n print(\"gene_annotation_percent = \" + str(gene_annotation_percent))\n\n for _, position_list in read_positions.items():\n position_list.sort()\n num_chromosomes += 1\n for i in range(len(position_list) - 1):\n position_differences.append(position_list[i + 1] - position_list[i])\n try:\n std_of_pos_diff = np.std(position_differences)\n mean_of_pos_diffs = np.nanmean(position_differences)\n max_position_difference = np.amax(position_differences)\n min_position_difference = np.amin(position_differences)\n except:\n return None\n return [gene_annotation_percent, read_lengths_average, read_frequency, std_of_pos_diff, mean_of_pos_diffs, num_chromosomes, max_position_difference, min_position_difference]\n\ndef getChromosome(str):\n if str == \"*\" or str[3:] == 'X':\n return -1\n try:\n return int(str[3:])\n except:\n return -1\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
class QualityPatch:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def getReleventPatches(self):
relevent_patches = []
for i, coor in enumerate(self.patch_coors):
percent = self.patchQualityInsurance(coor)
if percent > 0.5:
relevent_patches.append([coor, percent])
if i % 10000 == 0:
print(i, '/', len(self.patch_coors), 'dic len', len(
relevent_patches), ' from', len(self.patch_coors))
return relevent_patches
def checkingfunction(self, checking_coors=(40000, 90000)):
if checking_coors[0] < 0 or checking_coors[0
] < 0 or self.slide.level_dimensions[self.patch_level][0
] < checking_coors[0] / 2 ** self.patch_level + self.patch_size[0
] or self.slide.level_dimensions[self.patch_level][1
] < checking_coors[1] / 2 ** self.patch_level + self.patch_size[1]:
raise ValueError('the patch location with patch size is not valid.'
)
image = self.slide.read_region(checking_coors, self.patch_level,
self.patch_size)
percent = self.patchQualityInsurance(checking_coors)
fig, ax = plt.subplots(nrows=1, ncols=3)
plt.tight_layout()
ax[0].set_title('tissue percentage %.02f' % percent)
ax[0].axis('off')
ax[0].imshow(image)
ax[1].set_title('tissue label')
ax[1].axis('off')
ax[1].imshow(self.label.T, cmap='gray')
ax[2].set_title('label with patch')
ax[2].axis('off')
ax[2].imshow(self.getLabelWithPatchLocation(checking_coors))
plt.savefig('test/check_read_region' + str(self.patch_level) + '.png')
plt.close('all')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class QualityPatch:
def __init__(self, original_img_path, label_img_path, patch_level,
patch_size):
"""
parameter:
original_img_path(str): the source of image
label_img_path(str): label image
patch_level(int): the level that the patch belongs to
patch_size(tuple): size of patch(x,y)
attributes:
self.slide(Openslide): the slide that the patch belongs to
self.original_img_path(str) : the path of the lide
self.label_img_path(str) : label_img_path
self.patch_level(int) : the level that the patch belongs to
self.patch_size = patch_size
self.scale(int) : the magnification of the slide that the patch belongs to with level_max baseline
self.label(np array) : the image of label
self.label_size(tuple) : the size of label
self.adj_patch_size_label(tuple) : considering the slide is rescaled to self.label_size the size is zero, it is 1
"""
self.slide = openslide.OpenSlide(original_img_path)
slide_width, slide_height = self.slide.dimensions
self.label = cv2.imread(label_img_path, cv2.IMREAD_GRAYSCALE) / 255
self.patch_coors = [(w, h) for w in range(0, slide_width -
patch_size[0], patch_size[0]) for h in range(0, slide_height -
patch_size[1], patch_size[1])]
self.original_img_path = original_img_path
self.label_img_path = label_img_path
self.patch_level = patch_level
self.patch_size = patch_size
self.label = self.label.T
self.level_dim = self.slide.level_dimensions[patch_level]
self.label_size = self.label.shape
self.scale = self.label_size[0] / self.level_dim[0], self.label_size[1
] / self.level_dim[1]
self.adj_patch_size_label = self.calculateAdjPatchSize()
<|reserved_special_token_0|>
def calculateAdjPatchSize(self):
return int(self.scale[0] * self.patch_size[0]) + 1, int(self.scale[
1] * self.patch_size[1]) + 1
def patchQualityInsurance(self, patch_location):
label_coordinates = self.calculateLabelCoordinates(patch_location)
percent = np.sum(self.label[label_coordinates[0]:label_coordinates[
0] + self.adj_patch_size_label[0], label_coordinates[1]:
label_coordinates[1] + self.adj_patch_size_label[1]]) / (self.
adj_patch_size_label[0] * self.adj_patch_size_label[1])
return percent
<|reserved_special_token_0|>
def getReleventPatches(self):
relevent_patches = []
for i, coor in enumerate(self.patch_coors):
percent = self.patchQualityInsurance(coor)
if percent > 0.5:
relevent_patches.append([coor, percent])
if i % 10000 == 0:
print(i, '/', len(self.patch_coors), 'dic len', len(
relevent_patches), ' from', len(self.patch_coors))
return relevent_patches
def checkingfunction(self, checking_coors=(40000, 90000)):
if checking_coors[0] < 0 or checking_coors[0
] < 0 or self.slide.level_dimensions[self.patch_level][0
] < checking_coors[0] / 2 ** self.patch_level + self.patch_size[0
] or self.slide.level_dimensions[self.patch_level][1
] < checking_coors[1] / 2 ** self.patch_level + self.patch_size[1]:
raise ValueError('the patch location with patch size is not valid.'
)
image = self.slide.read_region(checking_coors, self.patch_level,
self.patch_size)
percent = self.patchQualityInsurance(checking_coors)
fig, ax = plt.subplots(nrows=1, ncols=3)
plt.tight_layout()
ax[0].set_title('tissue percentage %.02f' % percent)
ax[0].axis('off')
ax[0].imshow(image)
ax[1].set_title('tissue label')
ax[1].axis('off')
ax[1].imshow(self.label.T, cmap='gray')
ax[2].set_title('label with patch')
ax[2].axis('off')
ax[2].imshow(self.getLabelWithPatchLocation(checking_coors))
plt.savefig('test/check_read_region' + str(self.patch_level) + '.png')
plt.close('all')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class QualityPatch:
def __init__(self, original_img_path, label_img_path, patch_level,
patch_size):
"""
parameter:
original_img_path(str): the source of image
label_img_path(str): label image
patch_level(int): the level that the patch belongs to
patch_size(tuple): size of patch(x,y)
attributes:
self.slide(Openslide): the slide that the patch belongs to
self.original_img_path(str) : the path of the lide
self.label_img_path(str) : label_img_path
self.patch_level(int) : the level that the patch belongs to
self.patch_size = patch_size
self.scale(int) : the magnification of the slide that the patch belongs to with level_max baseline
self.label(np array) : the image of label
self.label_size(tuple) : the size of label
self.adj_patch_size_label(tuple) : considering the slide is rescaled to self.label_size the size is zero, it is 1
"""
self.slide = openslide.OpenSlide(original_img_path)
slide_width, slide_height = self.slide.dimensions
self.label = cv2.imread(label_img_path, cv2.IMREAD_GRAYSCALE) / 255
self.patch_coors = [(w, h) for w in range(0, slide_width -
patch_size[0], patch_size[0]) for h in range(0, slide_height -
patch_size[1], patch_size[1])]
self.original_img_path = original_img_path
self.label_img_path = label_img_path
self.patch_level = patch_level
self.patch_size = patch_size
self.label = self.label.T
self.level_dim = self.slide.level_dimensions[patch_level]
self.label_size = self.label.shape
self.scale = self.label_size[0] / self.level_dim[0], self.label_size[1
] / self.level_dim[1]
self.adj_patch_size_label = self.calculateAdjPatchSize()
def calculateLabelCoordinates(self, patch_location):
return int(self.scale[0] * patch_location[0] / 2 ** self.patch_level
), int(self.scale[1] * patch_location[1] / 2 ** self.patch_level)
def calculateAdjPatchSize(self):
return int(self.scale[0] * self.patch_size[0]) + 1, int(self.scale[
1] * self.patch_size[1]) + 1
def patchQualityInsurance(self, patch_location):
label_coordinates = self.calculateLabelCoordinates(patch_location)
percent = np.sum(self.label[label_coordinates[0]:label_coordinates[
0] + self.adj_patch_size_label[0], label_coordinates[1]:
label_coordinates[1] + self.adj_patch_size_label[1]]) / (self.
adj_patch_size_label[0] * self.adj_patch_size_label[1])
return percent
<|reserved_special_token_0|>
def getReleventPatches(self):
relevent_patches = []
for i, coor in enumerate(self.patch_coors):
percent = self.patchQualityInsurance(coor)
if percent > 0.5:
relevent_patches.append([coor, percent])
if i % 10000 == 0:
print(i, '/', len(self.patch_coors), 'dic len', len(
relevent_patches), ' from', len(self.patch_coors))
return relevent_patches
def checkingfunction(self, checking_coors=(40000, 90000)):
if checking_coors[0] < 0 or checking_coors[0
] < 0 or self.slide.level_dimensions[self.patch_level][0
] < checking_coors[0] / 2 ** self.patch_level + self.patch_size[0
] or self.slide.level_dimensions[self.patch_level][1
] < checking_coors[1] / 2 ** self.patch_level + self.patch_size[1]:
raise ValueError('the patch location with patch size is not valid.'
)
image = self.slide.read_region(checking_coors, self.patch_level,
self.patch_size)
percent = self.patchQualityInsurance(checking_coors)
fig, ax = plt.subplots(nrows=1, ncols=3)
plt.tight_layout()
ax[0].set_title('tissue percentage %.02f' % percent)
ax[0].axis('off')
ax[0].imshow(image)
ax[1].set_title('tissue label')
ax[1].axis('off')
ax[1].imshow(self.label.T, cmap='gray')
ax[2].set_title('label with patch')
ax[2].axis('off')
ax[2].imshow(self.getLabelWithPatchLocation(checking_coors))
plt.savefig('test/check_read_region' + str(self.patch_level) + '.png')
plt.close('all')
<|reserved_special_token_1|>
import numpy as np
import matplotlib.pyplot as plt
from PIL import Image
import cv2
import openslide
class QualityPatch:
def __init__(self, original_img_path, label_img_path, patch_level,
patch_size):
"""
parameter:
original_img_path(str): the source of image
label_img_path(str): label image
patch_level(int): the level that the patch belongs to
patch_size(tuple): size of patch(x,y)
attributes:
self.slide(Openslide): the slide that the patch belongs to
self.original_img_path(str) : the path of the lide
self.label_img_path(str) : label_img_path
self.patch_level(int) : the level that the patch belongs to
self.patch_size = patch_size
self.scale(int) : the magnification of the slide that the patch belongs to with level_max baseline
self.label(np array) : the image of label
self.label_size(tuple) : the size of label
self.adj_patch_size_label(tuple) : considering the slide is rescaled to self.label_size the size is zero, it is 1
"""
self.slide = openslide.OpenSlide(original_img_path)
slide_width, slide_height = self.slide.dimensions
self.label = cv2.imread(label_img_path, cv2.IMREAD_GRAYSCALE) / 255
self.patch_coors = [(w, h) for w in range(0, slide_width -
patch_size[0], patch_size[0]) for h in range(0, slide_height -
patch_size[1], patch_size[1])]
self.original_img_path = original_img_path
self.label_img_path = label_img_path
self.patch_level = patch_level
self.patch_size = patch_size
self.label = self.label.T
self.level_dim = self.slide.level_dimensions[patch_level]
self.label_size = self.label.shape
self.scale = self.label_size[0] / self.level_dim[0], self.label_size[1
] / self.level_dim[1]
self.adj_patch_size_label = self.calculateAdjPatchSize()
def calculateLabelCoordinates(self, patch_location):
return int(self.scale[0] * patch_location[0] / 2 ** self.patch_level
), int(self.scale[1] * patch_location[1] / 2 ** self.patch_level)
def calculateAdjPatchSize(self):
return int(self.scale[0] * self.patch_size[0]) + 1, int(self.scale[
1] * self.patch_size[1]) + 1
def patchQualityInsurance(self, patch_location):
label_coordinates = self.calculateLabelCoordinates(patch_location)
percent = np.sum(self.label[label_coordinates[0]:label_coordinates[
0] + self.adj_patch_size_label[0], label_coordinates[1]:
label_coordinates[1] + self.adj_patch_size_label[1]]) / (self.
adj_patch_size_label[0] * self.adj_patch_size_label[1])
return percent
def getLabelWithPatchLocation(self, patch_location):
patch_image = np.ones(self.adj_patch_size_label) / 2
label_with_patch_location = self.label.copy()
label_coordinates = self.calculateLabelCoordinates(patch_location)
label_with_patch_location[label_coordinates[0]:label_coordinates[0] +
self.adj_patch_size_label[0], label_coordinates[1]:
label_coordinates[1] + self.adj_patch_size_label[1]] = patch_image
return label_with_patch_location.T
def getReleventPatches(self):
relevent_patches = []
for i, coor in enumerate(self.patch_coors):
percent = self.patchQualityInsurance(coor)
if percent > 0.5:
relevent_patches.append([coor, percent])
if i % 10000 == 0:
print(i, '/', len(self.patch_coors), 'dic len', len(
relevent_patches), ' from', len(self.patch_coors))
return relevent_patches
def checkingfunction(self, checking_coors=(40000, 90000)):
if checking_coors[0] < 0 or checking_coors[0
] < 0 or self.slide.level_dimensions[self.patch_level][0
] < checking_coors[0] / 2 ** self.patch_level + self.patch_size[0
] or self.slide.level_dimensions[self.patch_level][1
] < checking_coors[1] / 2 ** self.patch_level + self.patch_size[1]:
raise ValueError('the patch location with patch size is not valid.'
)
image = self.slide.read_region(checking_coors, self.patch_level,
self.patch_size)
percent = self.patchQualityInsurance(checking_coors)
fig, ax = plt.subplots(nrows=1, ncols=3)
plt.tight_layout()
ax[0].set_title('tissue percentage %.02f' % percent)
ax[0].axis('off')
ax[0].imshow(image)
ax[1].set_title('tissue label')
ax[1].axis('off')
ax[1].imshow(self.label.T, cmap='gray')
ax[2].set_title('label with patch')
ax[2].axis('off')
ax[2].imshow(self.getLabelWithPatchLocation(checking_coors))
plt.savefig('test/check_read_region' + str(self.patch_level) + '.png')
plt.close('all')
<|reserved_special_token_1|>
import numpy as np
import matplotlib.pyplot as plt
from PIL import Image
import cv2
import openslide
class QualityPatch():
def __init__(self, original_img_path,label_img_path,patch_level,patch_size):
"""
parameter:
original_img_path(str): the source of image
label_img_path(str): label image
patch_level(int): the level that the patch belongs to
patch_size(tuple): size of patch(x,y)
attributes:
self.slide(Openslide): the slide that the patch belongs to
self.original_img_path(str) : the path of the lide
self.label_img_path(str) : label_img_path
self.patch_level(int) : the level that the patch belongs to
self.patch_size = patch_size
self.scale(int) : the magnification of the slide that the patch belongs to with level_max baseline
self.label(np array) : the image of label
self.label_size(tuple) : the size of label
self.adj_patch_size_label(tuple) : considering the slide is rescaled to self.label_size the size is zero, it is 1
"""
self.slide = openslide.OpenSlide(original_img_path)
slide_width, slide_height = self.slide.dimensions
self.label = (cv2.imread(label_img_path,cv2.IMREAD_GRAYSCALE)/255)
self.patch_coors = [(w,h) for w in range(0, slide_width - patch_size[0], patch_size[0]) for h in range(0, slide_height - patch_size[1],patch_size[1])]
self.original_img_path = original_img_path
self.label_img_path = label_img_path
self.patch_level = patch_level
self.patch_size = patch_size
self.label = self.label.T
self.level_dim = self.slide.level_dimensions[patch_level]
self.label_size = self.label.shape
self.scale = (self.label_size[0]/self.level_dim[0], self.label_size[1]/self.level_dim[1])
self.adj_patch_size_label = self.calculateAdjPatchSize()
def calculateLabelCoordinates(self, patch_location):
return (int(self.scale[0]*patch_location[0]/2**(self.patch_level)), int(self.scale[1]*patch_location[1]/2**(self.patch_level)))
def calculateAdjPatchSize(self):
return (int(self.scale[0] * self.patch_size[0])+1, int(self.scale[1] * self.patch_size[1])+1)
def patchQualityInsurance(self, patch_location):
label_coordinates = self.calculateLabelCoordinates(patch_location)
percent = (np.sum(self.label[label_coordinates[0]:label_coordinates[0]+self.adj_patch_size_label[0],label_coordinates[1]:label_coordinates[1]+self.adj_patch_size_label[1]]))/(self.adj_patch_size_label[0]*self.adj_patch_size_label[1])
return percent
def getLabelWithPatchLocation(self, patch_location):
patch_image = np.ones(self.adj_patch_size_label)/2
label_with_patch_location = self.label.copy()
label_coordinates = self.calculateLabelCoordinates(patch_location)
label_with_patch_location[label_coordinates[0]:label_coordinates[0]+self.adj_patch_size_label[0],label_coordinates[1]:label_coordinates[1]+self.adj_patch_size_label[1]] = patch_image
return label_with_patch_location.T
def getReleventPatches(self):
relevent_patches = []
for i, coor in enumerate(self.patch_coors):
percent = self.patchQualityInsurance(coor)
if percent > .5:
relevent_patches.append([coor,percent])
if i % 10000 == 0:
print(i, "/",len(self.patch_coors), "dic len", len(relevent_patches), " from", len(self.patch_coors) )
return relevent_patches
def checkingfunction(self, checking_coors=(40000,90000)):
if checking_coors[0] < 0 or checking_coors[0] < 0 or\
self.slide.level_dimensions[self.patch_level][0] < (checking_coors[0] / 2**(self.patch_level) + self.patch_size[0]) or\
self.slide.level_dimensions[self.patch_level][1] < ((checking_coors[1] / 2**(self.patch_level) + self.patch_size[1])):
raise ValueError("the patch location with patch size is not valid.")
image = self.slide.read_region(checking_coors, self.patch_level, self.patch_size)
percent = self.patchQualityInsurance(checking_coors)
fig, ax = plt.subplots(nrows=1, ncols=3)
plt.tight_layout()
ax[0].set_title("tissue percentage %.02f"%percent)
ax[0].axis('off')
ax[0].imshow(image)
ax[1].set_title("tissue label")
ax[1].axis('off')
ax[1].imshow(self.label.T, cmap='gray')
ax[2].set_title("label with patch")
ax[2].axis('off')
ax[2].imshow(self.getLabelWithPatchLocation(checking_coors))
plt.savefig("test/check_read_region"+str(self.patch_level)+'.png')
plt.close('all')
|
flexible
|
{
"blob_id": "0ad71f02e37f2744036b134c33e037a724fd38a6",
"index": 8049,
"step-1": "<mask token>\n\n\nclass QualityPatch:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def getReleventPatches(self):\n relevent_patches = []\n for i, coor in enumerate(self.patch_coors):\n percent = self.patchQualityInsurance(coor)\n if percent > 0.5:\n relevent_patches.append([coor, percent])\n if i % 10000 == 0:\n print(i, '/', len(self.patch_coors), 'dic len', len(\n relevent_patches), ' from', len(self.patch_coors))\n return relevent_patches\n\n def checkingfunction(self, checking_coors=(40000, 90000)):\n if checking_coors[0] < 0 or checking_coors[0\n ] < 0 or self.slide.level_dimensions[self.patch_level][0\n ] < checking_coors[0] / 2 ** self.patch_level + self.patch_size[0\n ] or self.slide.level_dimensions[self.patch_level][1\n ] < checking_coors[1] / 2 ** self.patch_level + self.patch_size[1]:\n raise ValueError('the patch location with patch size is not valid.'\n )\n image = self.slide.read_region(checking_coors, self.patch_level,\n self.patch_size)\n percent = self.patchQualityInsurance(checking_coors)\n fig, ax = plt.subplots(nrows=1, ncols=3)\n plt.tight_layout()\n ax[0].set_title('tissue percentage %.02f' % percent)\n ax[0].axis('off')\n ax[0].imshow(image)\n ax[1].set_title('tissue label')\n ax[1].axis('off')\n ax[1].imshow(self.label.T, cmap='gray')\n ax[2].set_title('label with patch')\n ax[2].axis('off')\n ax[2].imshow(self.getLabelWithPatchLocation(checking_coors))\n plt.savefig('test/check_read_region' + str(self.patch_level) + '.png')\n plt.close('all')\n",
"step-2": "<mask token>\n\n\nclass QualityPatch:\n\n def __init__(self, original_img_path, label_img_path, patch_level,\n patch_size):\n \"\"\"\n parameter:\n original_img_path(str): the source of image\n label_img_path(str): label image\n patch_level(int): the level that the patch belongs to\n patch_size(tuple): size of patch(x,y)\n\n attributes:\n self.slide(Openslide): the slide that the patch belongs to \n self.original_img_path(str) : the path of the lide\n self.label_img_path(str) : label_img_path\n self.patch_level(int) : the level that the patch belongs to\n self.patch_size = patch_size\n\n self.scale(int) : the magnification of the slide that the patch belongs to with level_max baseline\n self.label(np array) : the image of label\n self.label_size(tuple) : the size of label\n self.adj_patch_size_label(tuple) : considering the slide is rescaled to self.label_size the size is zero, it is 1\n \"\"\"\n self.slide = openslide.OpenSlide(original_img_path)\n slide_width, slide_height = self.slide.dimensions\n self.label = cv2.imread(label_img_path, cv2.IMREAD_GRAYSCALE) / 255\n self.patch_coors = [(w, h) for w in range(0, slide_width -\n patch_size[0], patch_size[0]) for h in range(0, slide_height -\n patch_size[1], patch_size[1])]\n self.original_img_path = original_img_path\n self.label_img_path = label_img_path\n self.patch_level = patch_level\n self.patch_size = patch_size\n self.label = self.label.T\n self.level_dim = self.slide.level_dimensions[patch_level]\n self.label_size = self.label.shape\n self.scale = self.label_size[0] / self.level_dim[0], self.label_size[1\n ] / self.level_dim[1]\n self.adj_patch_size_label = self.calculateAdjPatchSize()\n <mask token>\n\n def calculateAdjPatchSize(self):\n return int(self.scale[0] * self.patch_size[0]) + 1, int(self.scale[\n 1] * self.patch_size[1]) + 1\n\n def patchQualityInsurance(self, patch_location):\n label_coordinates = self.calculateLabelCoordinates(patch_location)\n percent = np.sum(self.label[label_coordinates[0]:label_coordinates[\n 0] + self.adj_patch_size_label[0], label_coordinates[1]:\n label_coordinates[1] + self.adj_patch_size_label[1]]) / (self.\n adj_patch_size_label[0] * self.adj_patch_size_label[1])\n return percent\n <mask token>\n\n def getReleventPatches(self):\n relevent_patches = []\n for i, coor in enumerate(self.patch_coors):\n percent = self.patchQualityInsurance(coor)\n if percent > 0.5:\n relevent_patches.append([coor, percent])\n if i % 10000 == 0:\n print(i, '/', len(self.patch_coors), 'dic len', len(\n relevent_patches), ' from', len(self.patch_coors))\n return relevent_patches\n\n def checkingfunction(self, checking_coors=(40000, 90000)):\n if checking_coors[0] < 0 or checking_coors[0\n ] < 0 or self.slide.level_dimensions[self.patch_level][0\n ] < checking_coors[0] / 2 ** self.patch_level + self.patch_size[0\n ] or self.slide.level_dimensions[self.patch_level][1\n ] < checking_coors[1] / 2 ** self.patch_level + self.patch_size[1]:\n raise ValueError('the patch location with patch size is not valid.'\n )\n image = self.slide.read_region(checking_coors, self.patch_level,\n self.patch_size)\n percent = self.patchQualityInsurance(checking_coors)\n fig, ax = plt.subplots(nrows=1, ncols=3)\n plt.tight_layout()\n ax[0].set_title('tissue percentage %.02f' % percent)\n ax[0].axis('off')\n ax[0].imshow(image)\n ax[1].set_title('tissue label')\n ax[1].axis('off')\n ax[1].imshow(self.label.T, cmap='gray')\n ax[2].set_title('label with patch')\n ax[2].axis('off')\n ax[2].imshow(self.getLabelWithPatchLocation(checking_coors))\n plt.savefig('test/check_read_region' + str(self.patch_level) + '.png')\n plt.close('all')\n",
"step-3": "<mask token>\n\n\nclass QualityPatch:\n\n def __init__(self, original_img_path, label_img_path, patch_level,\n patch_size):\n \"\"\"\n parameter:\n original_img_path(str): the source of image\n label_img_path(str): label image\n patch_level(int): the level that the patch belongs to\n patch_size(tuple): size of patch(x,y)\n\n attributes:\n self.slide(Openslide): the slide that the patch belongs to \n self.original_img_path(str) : the path of the lide\n self.label_img_path(str) : label_img_path\n self.patch_level(int) : the level that the patch belongs to\n self.patch_size = patch_size\n\n self.scale(int) : the magnification of the slide that the patch belongs to with level_max baseline\n self.label(np array) : the image of label\n self.label_size(tuple) : the size of label\n self.adj_patch_size_label(tuple) : considering the slide is rescaled to self.label_size the size is zero, it is 1\n \"\"\"\n self.slide = openslide.OpenSlide(original_img_path)\n slide_width, slide_height = self.slide.dimensions\n self.label = cv2.imread(label_img_path, cv2.IMREAD_GRAYSCALE) / 255\n self.patch_coors = [(w, h) for w in range(0, slide_width -\n patch_size[0], patch_size[0]) for h in range(0, slide_height -\n patch_size[1], patch_size[1])]\n self.original_img_path = original_img_path\n self.label_img_path = label_img_path\n self.patch_level = patch_level\n self.patch_size = patch_size\n self.label = self.label.T\n self.level_dim = self.slide.level_dimensions[patch_level]\n self.label_size = self.label.shape\n self.scale = self.label_size[0] / self.level_dim[0], self.label_size[1\n ] / self.level_dim[1]\n self.adj_patch_size_label = self.calculateAdjPatchSize()\n\n def calculateLabelCoordinates(self, patch_location):\n return int(self.scale[0] * patch_location[0] / 2 ** self.patch_level\n ), int(self.scale[1] * patch_location[1] / 2 ** self.patch_level)\n\n def calculateAdjPatchSize(self):\n return int(self.scale[0] * self.patch_size[0]) + 1, int(self.scale[\n 1] * self.patch_size[1]) + 1\n\n def patchQualityInsurance(self, patch_location):\n label_coordinates = self.calculateLabelCoordinates(patch_location)\n percent = np.sum(self.label[label_coordinates[0]:label_coordinates[\n 0] + self.adj_patch_size_label[0], label_coordinates[1]:\n label_coordinates[1] + self.adj_patch_size_label[1]]) / (self.\n adj_patch_size_label[0] * self.adj_patch_size_label[1])\n return percent\n <mask token>\n\n def getReleventPatches(self):\n relevent_patches = []\n for i, coor in enumerate(self.patch_coors):\n percent = self.patchQualityInsurance(coor)\n if percent > 0.5:\n relevent_patches.append([coor, percent])\n if i % 10000 == 0:\n print(i, '/', len(self.patch_coors), 'dic len', len(\n relevent_patches), ' from', len(self.patch_coors))\n return relevent_patches\n\n def checkingfunction(self, checking_coors=(40000, 90000)):\n if checking_coors[0] < 0 or checking_coors[0\n ] < 0 or self.slide.level_dimensions[self.patch_level][0\n ] < checking_coors[0] / 2 ** self.patch_level + self.patch_size[0\n ] or self.slide.level_dimensions[self.patch_level][1\n ] < checking_coors[1] / 2 ** self.patch_level + self.patch_size[1]:\n raise ValueError('the patch location with patch size is not valid.'\n )\n image = self.slide.read_region(checking_coors, self.patch_level,\n self.patch_size)\n percent = self.patchQualityInsurance(checking_coors)\n fig, ax = plt.subplots(nrows=1, ncols=3)\n plt.tight_layout()\n ax[0].set_title('tissue percentage %.02f' % percent)\n ax[0].axis('off')\n ax[0].imshow(image)\n ax[1].set_title('tissue label')\n ax[1].axis('off')\n ax[1].imshow(self.label.T, cmap='gray')\n ax[2].set_title('label with patch')\n ax[2].axis('off')\n ax[2].imshow(self.getLabelWithPatchLocation(checking_coors))\n plt.savefig('test/check_read_region' + str(self.patch_level) + '.png')\n plt.close('all')\n",
"step-4": "import numpy as np\nimport matplotlib.pyplot as plt\nfrom PIL import Image\nimport cv2\nimport openslide\n\n\nclass QualityPatch:\n\n def __init__(self, original_img_path, label_img_path, patch_level,\n patch_size):\n \"\"\"\n parameter:\n original_img_path(str): the source of image\n label_img_path(str): label image\n patch_level(int): the level that the patch belongs to\n patch_size(tuple): size of patch(x,y)\n\n attributes:\n self.slide(Openslide): the slide that the patch belongs to \n self.original_img_path(str) : the path of the lide\n self.label_img_path(str) : label_img_path\n self.patch_level(int) : the level that the patch belongs to\n self.patch_size = patch_size\n\n self.scale(int) : the magnification of the slide that the patch belongs to with level_max baseline\n self.label(np array) : the image of label\n self.label_size(tuple) : the size of label\n self.adj_patch_size_label(tuple) : considering the slide is rescaled to self.label_size the size is zero, it is 1\n \"\"\"\n self.slide = openslide.OpenSlide(original_img_path)\n slide_width, slide_height = self.slide.dimensions\n self.label = cv2.imread(label_img_path, cv2.IMREAD_GRAYSCALE) / 255\n self.patch_coors = [(w, h) for w in range(0, slide_width -\n patch_size[0], patch_size[0]) for h in range(0, slide_height -\n patch_size[1], patch_size[1])]\n self.original_img_path = original_img_path\n self.label_img_path = label_img_path\n self.patch_level = patch_level\n self.patch_size = patch_size\n self.label = self.label.T\n self.level_dim = self.slide.level_dimensions[patch_level]\n self.label_size = self.label.shape\n self.scale = self.label_size[0] / self.level_dim[0], self.label_size[1\n ] / self.level_dim[1]\n self.adj_patch_size_label = self.calculateAdjPatchSize()\n\n def calculateLabelCoordinates(self, patch_location):\n return int(self.scale[0] * patch_location[0] / 2 ** self.patch_level\n ), int(self.scale[1] * patch_location[1] / 2 ** self.patch_level)\n\n def calculateAdjPatchSize(self):\n return int(self.scale[0] * self.patch_size[0]) + 1, int(self.scale[\n 1] * self.patch_size[1]) + 1\n\n def patchQualityInsurance(self, patch_location):\n label_coordinates = self.calculateLabelCoordinates(patch_location)\n percent = np.sum(self.label[label_coordinates[0]:label_coordinates[\n 0] + self.adj_patch_size_label[0], label_coordinates[1]:\n label_coordinates[1] + self.adj_patch_size_label[1]]) / (self.\n adj_patch_size_label[0] * self.adj_patch_size_label[1])\n return percent\n\n def getLabelWithPatchLocation(self, patch_location):\n patch_image = np.ones(self.adj_patch_size_label) / 2\n label_with_patch_location = self.label.copy()\n label_coordinates = self.calculateLabelCoordinates(patch_location)\n label_with_patch_location[label_coordinates[0]:label_coordinates[0] +\n self.adj_patch_size_label[0], label_coordinates[1]:\n label_coordinates[1] + self.adj_patch_size_label[1]] = patch_image\n return label_with_patch_location.T\n\n def getReleventPatches(self):\n relevent_patches = []\n for i, coor in enumerate(self.patch_coors):\n percent = self.patchQualityInsurance(coor)\n if percent > 0.5:\n relevent_patches.append([coor, percent])\n if i % 10000 == 0:\n print(i, '/', len(self.patch_coors), 'dic len', len(\n relevent_patches), ' from', len(self.patch_coors))\n return relevent_patches\n\n def checkingfunction(self, checking_coors=(40000, 90000)):\n if checking_coors[0] < 0 or checking_coors[0\n ] < 0 or self.slide.level_dimensions[self.patch_level][0\n ] < checking_coors[0] / 2 ** self.patch_level + self.patch_size[0\n ] or self.slide.level_dimensions[self.patch_level][1\n ] < checking_coors[1] / 2 ** self.patch_level + self.patch_size[1]:\n raise ValueError('the patch location with patch size is not valid.'\n )\n image = self.slide.read_region(checking_coors, self.patch_level,\n self.patch_size)\n percent = self.patchQualityInsurance(checking_coors)\n fig, ax = plt.subplots(nrows=1, ncols=3)\n plt.tight_layout()\n ax[0].set_title('tissue percentage %.02f' % percent)\n ax[0].axis('off')\n ax[0].imshow(image)\n ax[1].set_title('tissue label')\n ax[1].axis('off')\n ax[1].imshow(self.label.T, cmap='gray')\n ax[2].set_title('label with patch')\n ax[2].axis('off')\n ax[2].imshow(self.getLabelWithPatchLocation(checking_coors))\n plt.savefig('test/check_read_region' + str(self.patch_level) + '.png')\n plt.close('all')\n",
"step-5": "import numpy as np\nimport matplotlib.pyplot as plt\nfrom PIL import Image\nimport cv2\nimport openslide\n\nclass QualityPatch():\n def __init__(self, original_img_path,label_img_path,patch_level,patch_size):\n \"\"\"\n parameter:\n original_img_path(str): the source of image\n label_img_path(str): label image\n patch_level(int): the level that the patch belongs to\n patch_size(tuple): size of patch(x,y)\n\n attributes:\n self.slide(Openslide): the slide that the patch belongs to \n self.original_img_path(str) : the path of the lide\n self.label_img_path(str) : label_img_path\n self.patch_level(int) : the level that the patch belongs to\n self.patch_size = patch_size\n\n self.scale(int) : the magnification of the slide that the patch belongs to with level_max baseline\n self.label(np array) : the image of label\n self.label_size(tuple) : the size of label\n self.adj_patch_size_label(tuple) : considering the slide is rescaled to self.label_size the size is zero, it is 1\n \"\"\"\n self.slide = openslide.OpenSlide(original_img_path)\n slide_width, slide_height = self.slide.dimensions\n self.label = (cv2.imread(label_img_path,cv2.IMREAD_GRAYSCALE)/255)\n self.patch_coors = [(w,h) for w in range(0, slide_width - patch_size[0], patch_size[0]) for h in range(0, slide_height - patch_size[1],patch_size[1])]\n\n self.original_img_path = original_img_path\n self.label_img_path = label_img_path\n self.patch_level = patch_level\n self.patch_size = patch_size\n self.label = self.label.T\n self.level_dim = self.slide.level_dimensions[patch_level]\n\n self.label_size = self.label.shape\n self.scale = (self.label_size[0]/self.level_dim[0], self.label_size[1]/self.level_dim[1])\n self.adj_patch_size_label = self.calculateAdjPatchSize()\n\n def calculateLabelCoordinates(self, patch_location):\n return (int(self.scale[0]*patch_location[0]/2**(self.patch_level)), int(self.scale[1]*patch_location[1]/2**(self.patch_level)))\n \n def calculateAdjPatchSize(self):\n return (int(self.scale[0] * self.patch_size[0])+1, int(self.scale[1] * self.patch_size[1])+1)\n\n def patchQualityInsurance(self, patch_location):\n label_coordinates = self.calculateLabelCoordinates(patch_location)\n percent = (np.sum(self.label[label_coordinates[0]:label_coordinates[0]+self.adj_patch_size_label[0],label_coordinates[1]:label_coordinates[1]+self.adj_patch_size_label[1]]))/(self.adj_patch_size_label[0]*self.adj_patch_size_label[1])\n\n return percent\n\n def getLabelWithPatchLocation(self, patch_location):\n patch_image = np.ones(self.adj_patch_size_label)/2\n label_with_patch_location = self.label.copy()\n label_coordinates = self.calculateLabelCoordinates(patch_location)\n label_with_patch_location[label_coordinates[0]:label_coordinates[0]+self.adj_patch_size_label[0],label_coordinates[1]:label_coordinates[1]+self.adj_patch_size_label[1]] = patch_image\n return label_with_patch_location.T\n \n def getReleventPatches(self):\n relevent_patches = []\n\n\n for i, coor in enumerate(self.patch_coors):\n percent = self.patchQualityInsurance(coor)\n if percent > .5:\n relevent_patches.append([coor,percent])\n if i % 10000 == 0:\n print(i, \"/\",len(self.patch_coors), \"dic len\", len(relevent_patches), \" from\", len(self.patch_coors) )\n return relevent_patches\n\n def checkingfunction(self, checking_coors=(40000,90000)):\n if checking_coors[0] < 0 or checking_coors[0] < 0 or\\\n self.slide.level_dimensions[self.patch_level][0] < (checking_coors[0] / 2**(self.patch_level) + self.patch_size[0]) or\\\n self.slide.level_dimensions[self.patch_level][1] < ((checking_coors[1] / 2**(self.patch_level) + self.patch_size[1])):\n raise ValueError(\"the patch location with patch size is not valid.\")\n \n image = self.slide.read_region(checking_coors, self.patch_level, self.patch_size)\n percent = self.patchQualityInsurance(checking_coors)\n\n fig, ax = plt.subplots(nrows=1, ncols=3)\n plt.tight_layout()\n ax[0].set_title(\"tissue percentage %.02f\"%percent)\n ax[0].axis('off')\n ax[0].imshow(image)\n ax[1].set_title(\"tissue label\")\n ax[1].axis('off')\n ax[1].imshow(self.label.T, cmap='gray')\n ax[2].set_title(\"label with patch\")\n ax[2].axis('off')\n ax[2].imshow(self.getLabelWithPatchLocation(checking_coors))\n plt.savefig(\"test/check_read_region\"+str(self.patch_level)+'.png')\n plt.close('all')\n",
"step-ids": [
3,
6,
7,
9,
10
]
}
|
[
3,
6,
7,
9,
10
] |
#!/usr/bin/env python3
# Created by: Khang Le
# Created on: Dec 2019
# This program uses lists and rotation
def rotation(list_of_number, ratating_time):
numbers = list_of_number[0]
numbers = [list_of_number[(i + ratating_time) % len(list_of_number)]
for i, x in enumerate(list_of_number)]
return numbers
def main():
lst = []
# number of elemetns as input
user_input = int(input("Enter number of elements : "))
rotating_time = int(input("Enter how many times you want to rotate: "))
print("The numbers are:")
for i in range(0, user_input):
ele = int(input())
lst.append(ele) # adding the element
numbers = rotation(lst, rotating_time)
print("Rotated by {0}: {1}".format(rotating_time, numbers))
if __name__ == "__main__":
main()
|
normal
|
{
"blob_id": "74de0da708c7eb792dea15afb23713d9d71af520",
"index": 5491,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n lst = []\n user_input = int(input('Enter number of elements : '))\n rotating_time = int(input('Enter how many times you want to rotate: '))\n print('The numbers are:')\n for i in range(0, user_input):\n ele = int(input())\n lst.append(ele)\n numbers = rotation(lst, rotating_time)\n print('Rotated by {0}: {1}'.format(rotating_time, numbers))\n\n\n<mask token>\n",
"step-3": "def rotation(list_of_number, ratating_time):\n numbers = list_of_number[0]\n numbers = [list_of_number[(i + ratating_time) % len(list_of_number)] for\n i, x in enumerate(list_of_number)]\n return numbers\n\n\ndef main():\n lst = []\n user_input = int(input('Enter number of elements : '))\n rotating_time = int(input('Enter how many times you want to rotate: '))\n print('The numbers are:')\n for i in range(0, user_input):\n ele = int(input())\n lst.append(ele)\n numbers = rotation(lst, rotating_time)\n print('Rotated by {0}: {1}'.format(rotating_time, numbers))\n\n\n<mask token>\n",
"step-4": "def rotation(list_of_number, ratating_time):\n numbers = list_of_number[0]\n numbers = [list_of_number[(i + ratating_time) % len(list_of_number)] for\n i, x in enumerate(list_of_number)]\n return numbers\n\n\ndef main():\n lst = []\n user_input = int(input('Enter number of elements : '))\n rotating_time = int(input('Enter how many times you want to rotate: '))\n print('The numbers are:')\n for i in range(0, user_input):\n ele = int(input())\n lst.append(ele)\n numbers = rotation(lst, rotating_time)\n print('Rotated by {0}: {1}'.format(rotating_time, numbers))\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "#!/usr/bin/env python3\n\n# Created by: Khang Le\n# Created on: Dec 2019\n# This program uses lists and rotation\n\n\ndef rotation(list_of_number, ratating_time):\n\n numbers = list_of_number[0]\n numbers = [list_of_number[(i + ratating_time) % len(list_of_number)]\n for i, x in enumerate(list_of_number)]\n return numbers\n\n\ndef main():\n\n lst = []\n # number of elemetns as input\n user_input = int(input(\"Enter number of elements : \"))\n rotating_time = int(input(\"Enter how many times you want to rotate: \"))\n print(\"The numbers are:\")\n for i in range(0, user_input):\n ele = int(input())\n lst.append(ele) # adding the element\n numbers = rotation(lst, rotating_time)\n print(\"Rotated by {0}: {1}\".format(rotating_time, numbers))\n\n\nif __name__ == \"__main__\":\n main()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import os
import unittest
import tempfile
from bpython import config
TEST_THEME_PATH = os.path.join(os.path.dirname(__file__), "test.theme")
class TestConfig(unittest.TestCase):
def test_load_theme(self):
struct = config.Struct()
struct.color_scheme = dict()
config.load_theme(struct, TEST_THEME_PATH, struct.color_scheme, dict())
expected = {"keyword": "y"}
self.assertEquals(struct.color_scheme, expected)
defaults = {"name": "c"}
expected.update(defaults)
config.load_theme(struct, TEST_THEME_PATH, struct.color_scheme, defaults)
self.assertEquals(struct.color_scheme, expected)
def test_load_config(self):
struct = config.Struct()
with tempfile.NamedTemporaryFile() as f:
f.write(''.encode('utf8'))
f.write('[keyboard]\nhelp = C-h\n'.encode('utf8'))
f.flush()
config.loadini(struct, f.name)
self.assertEqual(struct.help_key, 'C-h')
self.assertEqual(struct.backspace_key, '')
|
normal
|
{
"blob_id": "d5efbbb6e818e797652f304f3d022e04be245778",
"index": 4931,
"step-1": "<mask token>\n\n\nclass TestConfig(unittest.TestCase):\n\n def test_load_theme(self):\n struct = config.Struct()\n struct.color_scheme = dict()\n config.load_theme(struct, TEST_THEME_PATH, struct.color_scheme, dict())\n expected = {'keyword': 'y'}\n self.assertEquals(struct.color_scheme, expected)\n defaults = {'name': 'c'}\n expected.update(defaults)\n config.load_theme(struct, TEST_THEME_PATH, struct.color_scheme,\n defaults)\n self.assertEquals(struct.color_scheme, expected)\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass TestConfig(unittest.TestCase):\n\n def test_load_theme(self):\n struct = config.Struct()\n struct.color_scheme = dict()\n config.load_theme(struct, TEST_THEME_PATH, struct.color_scheme, dict())\n expected = {'keyword': 'y'}\n self.assertEquals(struct.color_scheme, expected)\n defaults = {'name': 'c'}\n expected.update(defaults)\n config.load_theme(struct, TEST_THEME_PATH, struct.color_scheme,\n defaults)\n self.assertEquals(struct.color_scheme, expected)\n\n def test_load_config(self):\n struct = config.Struct()\n with tempfile.NamedTemporaryFile() as f:\n f.write(''.encode('utf8'))\n f.write('[keyboard]\\nhelp = C-h\\n'.encode('utf8'))\n f.flush()\n config.loadini(struct, f.name)\n self.assertEqual(struct.help_key, 'C-h')\n self.assertEqual(struct.backspace_key, '')\n",
"step-3": "<mask token>\nTEST_THEME_PATH = os.path.join(os.path.dirname(__file__), 'test.theme')\n\n\nclass TestConfig(unittest.TestCase):\n\n def test_load_theme(self):\n struct = config.Struct()\n struct.color_scheme = dict()\n config.load_theme(struct, TEST_THEME_PATH, struct.color_scheme, dict())\n expected = {'keyword': 'y'}\n self.assertEquals(struct.color_scheme, expected)\n defaults = {'name': 'c'}\n expected.update(defaults)\n config.load_theme(struct, TEST_THEME_PATH, struct.color_scheme,\n defaults)\n self.assertEquals(struct.color_scheme, expected)\n\n def test_load_config(self):\n struct = config.Struct()\n with tempfile.NamedTemporaryFile() as f:\n f.write(''.encode('utf8'))\n f.write('[keyboard]\\nhelp = C-h\\n'.encode('utf8'))\n f.flush()\n config.loadini(struct, f.name)\n self.assertEqual(struct.help_key, 'C-h')\n self.assertEqual(struct.backspace_key, '')\n",
"step-4": "import os\nimport unittest\nimport tempfile\nfrom bpython import config\nTEST_THEME_PATH = os.path.join(os.path.dirname(__file__), 'test.theme')\n\n\nclass TestConfig(unittest.TestCase):\n\n def test_load_theme(self):\n struct = config.Struct()\n struct.color_scheme = dict()\n config.load_theme(struct, TEST_THEME_PATH, struct.color_scheme, dict())\n expected = {'keyword': 'y'}\n self.assertEquals(struct.color_scheme, expected)\n defaults = {'name': 'c'}\n expected.update(defaults)\n config.load_theme(struct, TEST_THEME_PATH, struct.color_scheme,\n defaults)\n self.assertEquals(struct.color_scheme, expected)\n\n def test_load_config(self):\n struct = config.Struct()\n with tempfile.NamedTemporaryFile() as f:\n f.write(''.encode('utf8'))\n f.write('[keyboard]\\nhelp = C-h\\n'.encode('utf8'))\n f.flush()\n config.loadini(struct, f.name)\n self.assertEqual(struct.help_key, 'C-h')\n self.assertEqual(struct.backspace_key, '')\n",
"step-5": "import os\nimport unittest\nimport tempfile\n\nfrom bpython import config\n\nTEST_THEME_PATH = os.path.join(os.path.dirname(__file__), \"test.theme\")\n\nclass TestConfig(unittest.TestCase):\n def test_load_theme(self):\n struct = config.Struct()\n struct.color_scheme = dict()\n config.load_theme(struct, TEST_THEME_PATH, struct.color_scheme, dict())\n expected = {\"keyword\": \"y\"}\n self.assertEquals(struct.color_scheme, expected)\n\n defaults = {\"name\": \"c\"}\n expected.update(defaults)\n config.load_theme(struct, TEST_THEME_PATH, struct.color_scheme, defaults)\n self.assertEquals(struct.color_scheme, expected)\n\n def test_load_config(self):\n struct = config.Struct()\n with tempfile.NamedTemporaryFile() as f:\n f.write(''.encode('utf8'))\n f.write('[keyboard]\\nhelp = C-h\\n'.encode('utf8'))\n f.flush()\n config.loadini(struct, f.name)\n self.assertEqual(struct.help_key, 'C-h')\n self.assertEqual(struct.backspace_key, '')\n\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
207. Course Schedule
Some courses may have prerequisites, for example to take course 0 you have to first take course 1,
which is expressed as a pair: [0,1]
Given the total number of courses and a list of prerequisite pairs, is it possible for you to finish all courses?
For example:
2, [[1,0]]
There are a total of 2 courses to take. To take course 1 you should have finished course 0. So it is possible.
2, [[1,0],[0,1]]
There are a total of 2 courses to take. To take course 1 you should have finished course 0,
and to take course 0 you should also have finished course 1. So it is impossible.
Note:
The input prerequisites is a graph represented by a list of edges, not adjacency matrices.
Read more about how a graph is represented.
You may assume that there are no duplicate edges in the input prerequisites.
1. intuition: as long as there is no cycle, return true
how do we look for cycle?
hashmap to track which pairs weve seen
hashmap to track pairs [a,b]
class Solution(object):
def canFinish(self, numCourses, prerequisites):
"""
:type numCourses: int
:type prerequisites: List[List[int]]
:rtype: bool
"""
graph = [[] for _ in range(numCourses)]
visit = [0 for _ in range(numCourses)]
for x, y in prerequisites:
graph[x].append(y)
def dfs(i):
if visit[i] == -1: return False
if visit[i] == 1: return True
visit[i] = -1
for j in graph[i]:
if not dfs(j): return False
visit[i] = 1
return True
for i in range(numCourses):
if not dfs(i):
return False
return True
|
normal
|
{
"blob_id": "34aa08b9a5a89d3fca129271a9e812e2382ca88e",
"index": 4196,
"step-1": "207. Course Schedule \n\nSome courses may have prerequisites, for example to take course 0 you have to first take course 1, \nwhich is expressed as a pair: [0,1]\n\nGiven the total number of courses and a list of prerequisite pairs, is it possible for you to finish all courses?\n\nFor example:\n\n2, [[1,0]]\nThere are a total of 2 courses to take. To take course 1 you should have finished course 0. So it is possible.\n\n2, [[1,0],[0,1]]\nThere are a total of 2 courses to take. To take course 1 you should have finished course 0, \nand to take course 0 you should also have finished course 1. So it is impossible.\n\nNote:\nThe input prerequisites is a graph represented by a list of edges, not adjacency matrices. \nRead more about how a graph is represented.\nYou may assume that there are no duplicate edges in the input prerequisites.\n\n1. intuition: as long as there is no cycle, return true\nhow do we look for cycle?\nhashmap to track which pairs weve seen\nhashmap to track pairs [a,b]\n\n\nclass Solution(object):\n\tdef canFinish(self, numCourses, prerequisites):\n\t\t\"\"\"\n\t\t:type numCourses: int\n\t\t:type prerequisites: List[List[int]]\n\t\t:rtype: bool\n\t\t\"\"\"\n\t\tgraph = [[] for _ in range(numCourses)]\n\t\tvisit = [0 for _ in range(numCourses)]\n\t\tfor x, y in prerequisites:\n\t\t\tgraph[x].append(y)\n\n\t\tdef dfs(i):\n\t\t\tif visit[i] == -1: return False\n\t\t\tif visit[i] == 1: return True\n\t\t\tvisit[i] = -1\n\t\t\tfor j in graph[i]:\n\t\t\t\tif not dfs(j): return False\n\t\t\tvisit[i] = 1\n\t\t\treturn True\n\n\t\tfor i in range(numCourses):\n\t\t\tif not dfs(i):\n\t\t\t\treturn False\n\n\t\treturn True\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
class WbsAdmin(admin.ModelAdmin):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class Equipment_TypeAdmin(admin.ModelAdmin):
list_display = 'type',
list_filter = 'type',
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class WbsAdmin(admin.ModelAdmin):
list_display = 'code', 'description', 'equipment_type'
list_filter = 'code', 'description', 'equipment_type'
readonly_fields = 'code', 'description'
class Equipment_TypeAdmin(admin.ModelAdmin):
list_display = 'type',
list_filter = 'type',
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class WbsAdmin(admin.ModelAdmin):
list_display = 'code', 'description', 'equipment_type'
list_filter = 'code', 'description', 'equipment_type'
readonly_fields = 'code', 'description'
class Equipment_TypeAdmin(admin.ModelAdmin):
list_display = 'type',
list_filter = 'type',
admin.site.register(Wbs, WbsAdmin)
admin.site.register(Equipment_Type, Equipment_TypeAdmin)
<|reserved_special_token_1|>
from django.contrib import admin
from .models import Wbs, Equipment_Type
class WbsAdmin(admin.ModelAdmin):
list_display = 'code', 'description', 'equipment_type'
list_filter = 'code', 'description', 'equipment_type'
readonly_fields = 'code', 'description'
class Equipment_TypeAdmin(admin.ModelAdmin):
list_display = 'type',
list_filter = 'type',
admin.site.register(Wbs, WbsAdmin)
admin.site.register(Equipment_Type, Equipment_TypeAdmin)
<|reserved_special_token_1|>
from django.contrib import admin
from .models import Wbs, Equipment_Type
class WbsAdmin(admin.ModelAdmin):
list_display = ('code','description','equipment_type')
list_filter = ('code','description','equipment_type')
readonly_fields = ('code','description')
class Equipment_TypeAdmin(admin.ModelAdmin):
list_display = ('type',)
list_filter = ('type',)
admin.site.register(Wbs,WbsAdmin)
admin.site.register(Equipment_Type,Equipment_TypeAdmin)
|
flexible
|
{
"blob_id": "292c66bd5b7f56ee8c27cabff01cd97ff36a79dc",
"index": 8885,
"step-1": "<mask token>\n\n\nclass WbsAdmin(admin.ModelAdmin):\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Equipment_TypeAdmin(admin.ModelAdmin):\n list_display = 'type',\n list_filter = 'type',\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass WbsAdmin(admin.ModelAdmin):\n list_display = 'code', 'description', 'equipment_type'\n list_filter = 'code', 'description', 'equipment_type'\n readonly_fields = 'code', 'description'\n\n\nclass Equipment_TypeAdmin(admin.ModelAdmin):\n list_display = 'type',\n list_filter = 'type',\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass WbsAdmin(admin.ModelAdmin):\n list_display = 'code', 'description', 'equipment_type'\n list_filter = 'code', 'description', 'equipment_type'\n readonly_fields = 'code', 'description'\n\n\nclass Equipment_TypeAdmin(admin.ModelAdmin):\n list_display = 'type',\n list_filter = 'type',\n\n\nadmin.site.register(Wbs, WbsAdmin)\nadmin.site.register(Equipment_Type, Equipment_TypeAdmin)\n",
"step-4": "from django.contrib import admin\nfrom .models import Wbs, Equipment_Type\n\n\nclass WbsAdmin(admin.ModelAdmin):\n list_display = 'code', 'description', 'equipment_type'\n list_filter = 'code', 'description', 'equipment_type'\n readonly_fields = 'code', 'description'\n\n\nclass Equipment_TypeAdmin(admin.ModelAdmin):\n list_display = 'type',\n list_filter = 'type',\n\n\nadmin.site.register(Wbs, WbsAdmin)\nadmin.site.register(Equipment_Type, Equipment_TypeAdmin)\n",
"step-5": "from django.contrib import admin\r\nfrom .models import Wbs, Equipment_Type\r\n\r\nclass WbsAdmin(admin.ModelAdmin):\r\n list_display = ('code','description','equipment_type')\r\n list_filter = ('code','description','equipment_type')\r\n readonly_fields = ('code','description')\r\n\r\nclass Equipment_TypeAdmin(admin.ModelAdmin):\r\n list_display = ('type',)\r\n list_filter = ('type',)\r\n\r\nadmin.site.register(Wbs,WbsAdmin)\r\nadmin.site.register(Equipment_Type,Equipment_TypeAdmin)\r\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
'''
这部分理解参考:
https://www.bilibili.com/video/BV1QA411H7tK?from=search&seid=17305042509580602672
图文代码地址: https://blog.csdn.net/qq_30758629/article/details/112527763
'''
import threading
import time
data=0
lock=threading.Lock() #创建一个锁对象
def func():
global data
print("%s is acquire lock..\n" %threading.current_thread().getName())
if lock.acquire():
print("%s get lock "%threading.current_thread().getName())
data+=1
time.sleep(2)
print("%s release lock "%threading.current_thread().getName())
print(data)
lock.release()
t1=threading.Thread(target=func)
t2=threading.Thread(target=func)
t3=threading.Thread(target=func)
t1.start()
t2.start()
t3.start()
|
normal
|
{
"blob_id": "7aa426723f5311b5abec4a7ace9d3ec1e5e31d9a",
"index": 5966,
"step-1": "<mask token>\n\n\ndef func():\n global data\n print('%s is acquire lock..\\n' % threading.current_thread().getName())\n if lock.acquire():\n print('%s get lock ' % threading.current_thread().getName())\n data += 1\n time.sleep(2)\n print('%s release lock ' % threading.current_thread().getName())\n print(data)\n lock.release()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef func():\n global data\n print('%s is acquire lock..\\n' % threading.current_thread().getName())\n if lock.acquire():\n print('%s get lock ' % threading.current_thread().getName())\n data += 1\n time.sleep(2)\n print('%s release lock ' % threading.current_thread().getName())\n print(data)\n lock.release()\n\n\n<mask token>\nt1.start()\nt2.start()\nt3.start()\n",
"step-3": "<mask token>\ndata = 0\nlock = threading.Lock()\n\n\ndef func():\n global data\n print('%s is acquire lock..\\n' % threading.current_thread().getName())\n if lock.acquire():\n print('%s get lock ' % threading.current_thread().getName())\n data += 1\n time.sleep(2)\n print('%s release lock ' % threading.current_thread().getName())\n print(data)\n lock.release()\n\n\nt1 = threading.Thread(target=func)\nt2 = threading.Thread(target=func)\nt3 = threading.Thread(target=func)\nt1.start()\nt2.start()\nt3.start()\n",
"step-4": "<mask token>\nimport threading\nimport time\ndata = 0\nlock = threading.Lock()\n\n\ndef func():\n global data\n print('%s is acquire lock..\\n' % threading.current_thread().getName())\n if lock.acquire():\n print('%s get lock ' % threading.current_thread().getName())\n data += 1\n time.sleep(2)\n print('%s release lock ' % threading.current_thread().getName())\n print(data)\n lock.release()\n\n\nt1 = threading.Thread(target=func)\nt2 = threading.Thread(target=func)\nt3 = threading.Thread(target=func)\nt1.start()\nt2.start()\nt3.start()\n",
"step-5": "'''\n\n这部分理解参考:\n\nhttps://www.bilibili.com/video/BV1QA411H7tK?from=search&seid=17305042509580602672\n\n图文代码地址: https://blog.csdn.net/qq_30758629/article/details/112527763\n\n'''\n\nimport threading\nimport time\n\ndata=0\nlock=threading.Lock() #创建一个锁对象\n\ndef func():\n global data\n print(\"%s is acquire lock..\\n\" %threading.current_thread().getName())\n\n if lock.acquire():\n print(\"%s get lock \"%threading.current_thread().getName())\n data+=1\n time.sleep(2)\n print(\"%s release lock \"%threading.current_thread().getName())\n print(data)\n lock.release()\n\nt1=threading.Thread(target=func)\nt2=threading.Thread(target=func)\nt3=threading.Thread(target=func)\nt1.start()\nt2.start()\nt3.start()",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from flask import Flask,request,Response
from spamapp.spam import SpamIdentify
from json import dumps,loads
app = Flask(__name__)
spam = SpamIdentify()
@app.route("/",methods=['GET'])
def home():
return Response(response=dumps({"msg":"App successfull"}), status=200, mimetype='application/json')
@app.route("/spamapi/",methods=['GET','POST'])
def apicall():
try:
predTxt = loads(request.data)
predTxt = predTxt['input']
response = spam.predict_data(predTxt)
return Response(response=dumps(response), status=200, mimetype='application/json')
except Exception as e:
print("Error",e)
return Response(response=dumps({"result": 6}), status=200, mimetype='application/json')
if __name__ == "__main__":
app.run(
host="192.168.2.240",
port=5000,
debug=True
)
|
normal
|
{
"blob_id": "1552d862d3b9df45eda8c08256e8b4437ab08740",
"index": 2641,
"step-1": "<mask token>\n\n\n@app.route('/', methods=['GET'])\ndef home():\n return Response(response=dumps({'msg': 'App successfull'}), status=200,\n mimetype='application/json')\n\n\n@app.route('/spamapi/', methods=['GET', 'POST'])\ndef apicall():\n try:\n predTxt = loads(request.data)\n predTxt = predTxt['input']\n response = spam.predict_data(predTxt)\n return Response(response=dumps(response), status=200, mimetype=\n 'application/json')\n except Exception as e:\n print('Error', e)\n return Response(response=dumps({'result': 6}), status=200, mimetype\n ='application/json')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@app.route('/', methods=['GET'])\ndef home():\n return Response(response=dumps({'msg': 'App successfull'}), status=200,\n mimetype='application/json')\n\n\n@app.route('/spamapi/', methods=['GET', 'POST'])\ndef apicall():\n try:\n predTxt = loads(request.data)\n predTxt = predTxt['input']\n response = spam.predict_data(predTxt)\n return Response(response=dumps(response), status=200, mimetype=\n 'application/json')\n except Exception as e:\n print('Error', e)\n return Response(response=dumps({'result': 6}), status=200, mimetype\n ='application/json')\n\n\nif __name__ == '__main__':\n app.run(host='192.168.2.240', port=5000, debug=True)\n",
"step-3": "<mask token>\napp = Flask(__name__)\nspam = SpamIdentify()\n\n\n@app.route('/', methods=['GET'])\ndef home():\n return Response(response=dumps({'msg': 'App successfull'}), status=200,\n mimetype='application/json')\n\n\n@app.route('/spamapi/', methods=['GET', 'POST'])\ndef apicall():\n try:\n predTxt = loads(request.data)\n predTxt = predTxt['input']\n response = spam.predict_data(predTxt)\n return Response(response=dumps(response), status=200, mimetype=\n 'application/json')\n except Exception as e:\n print('Error', e)\n return Response(response=dumps({'result': 6}), status=200, mimetype\n ='application/json')\n\n\nif __name__ == '__main__':\n app.run(host='192.168.2.240', port=5000, debug=True)\n",
"step-4": "from flask import Flask, request, Response\nfrom spamapp.spam import SpamIdentify\nfrom json import dumps, loads\napp = Flask(__name__)\nspam = SpamIdentify()\n\n\n@app.route('/', methods=['GET'])\ndef home():\n return Response(response=dumps({'msg': 'App successfull'}), status=200,\n mimetype='application/json')\n\n\n@app.route('/spamapi/', methods=['GET', 'POST'])\ndef apicall():\n try:\n predTxt = loads(request.data)\n predTxt = predTxt['input']\n response = spam.predict_data(predTxt)\n return Response(response=dumps(response), status=200, mimetype=\n 'application/json')\n except Exception as e:\n print('Error', e)\n return Response(response=dumps({'result': 6}), status=200, mimetype\n ='application/json')\n\n\nif __name__ == '__main__':\n app.run(host='192.168.2.240', port=5000, debug=True)\n",
"step-5": "from flask import Flask,request,Response\nfrom spamapp.spam import SpamIdentify\nfrom json import dumps,loads\napp = Flask(__name__)\n\nspam = SpamIdentify()\n\n@app.route(\"/\",methods=['GET'])\ndef home():\n return Response(response=dumps({\"msg\":\"App successfull\"}), status=200, mimetype='application/json')\n\n@app.route(\"/spamapi/\",methods=['GET','POST'])\ndef apicall():\n try:\n predTxt = loads(request.data)\n predTxt = predTxt['input']\n response = spam.predict_data(predTxt)\n return Response(response=dumps(response), status=200, mimetype='application/json')\n except Exception as e:\n print(\"Error\",e)\n return Response(response=dumps({\"result\": 6}), status=200, mimetype='application/json')\n\nif __name__ == \"__main__\":\n app.run(\n host=\"192.168.2.240\", \n port=5000,\n debug=True\n )",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
import pygame
from Actor import Actor
import PlayerInput
class TestActor(Actor):
def __init__(self):
super(TestActor, self).__init__()
def act(self):
self.key_commands()
def key_commands(self):
if PlayerInput.is_key_down(pygame.K_LEFT):
self.set_location(self.x - 1, self.y)
if PlayerInput.is_key_down(pygame.K_RIGHT):
self.set_location(self.x + 1, self.y)
if PlayerInput.is_key_down(pygame.K_UP):
self.set_location(self.x, self.y - 1)
if PlayerInput.is_key_down(pygame.K_DOWN):
self.set_location(self.x, self.y + 1)
|
normal
|
{
"blob_id": "9cb11c2bf032aa16abd3463ecdb8997addedc912",
"index": 1570,
"step-1": "<mask token>\n\n\nclass TestActor(Actor):\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass TestActor(Actor):\n <mask token>\n\n def act(self):\n self.key_commands()\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass TestActor(Actor):\n\n def __init__(self):\n super(TestActor, self).__init__()\n\n def act(self):\n self.key_commands()\n <mask token>\n",
"step-4": "<mask token>\n\n\nclass TestActor(Actor):\n\n def __init__(self):\n super(TestActor, self).__init__()\n\n def act(self):\n self.key_commands()\n\n def key_commands(self):\n if PlayerInput.is_key_down(pygame.K_LEFT):\n self.set_location(self.x - 1, self.y)\n if PlayerInput.is_key_down(pygame.K_RIGHT):\n self.set_location(self.x + 1, self.y)\n if PlayerInput.is_key_down(pygame.K_UP):\n self.set_location(self.x, self.y - 1)\n if PlayerInput.is_key_down(pygame.K_DOWN):\n self.set_location(self.x, self.y + 1)\n",
"step-5": "import pygame\nfrom Actor import Actor\nimport PlayerInput\n\n\nclass TestActor(Actor):\n\n def __init__(self):\n super(TestActor, self).__init__()\n\n def act(self):\n self.key_commands()\n\n def key_commands(self):\n if PlayerInput.is_key_down(pygame.K_LEFT):\n self.set_location(self.x - 1, self.y)\n if PlayerInput.is_key_down(pygame.K_RIGHT):\n self.set_location(self.x + 1, self.y)\n if PlayerInput.is_key_down(pygame.K_UP):\n self.set_location(self.x, self.y - 1)\n if PlayerInput.is_key_down(pygame.K_DOWN):\n self.set_location(self.x, self.y + 1)\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
import random
from elment.login_registration_element import LoginRegistration
from page.test_verification_code_page import VerificationCodeAction
public_number_vip = ['17800000000','17800000001','17800000002','17800000003','17800000004','17800000005','17800000006',
'17800000007','17800000008','17800000009']
public_number_not_vip = ['18381939440', '18381939441', '18381939445', '18381939446']
class LoginRegistrationAction(LoginRegistration): # 登录页操作
def check_welcome_xunyou(self): # 欢迎登陆迅游text
return self.welcome_xunyou().text
def click_welcome_xunyou(self): # 点击欢迎登录迅游(可以将键盘降下去)
self.welcome_xunyou().click()
return self
def logged_in_random(self): # 点击号码栏输入随机账号
self.phone_id().send_keys('1831111{}'.format(random.randint(1000,9999)))
return self
def logged_in_appoint(self): # 登录随机vip
self.phone_id().send_keys(str(random.sample(public_number_vip,1)))
return self
def logged_in_not_vip_appoint(self): # 登录随机非会员账号
self.phone_id().send_keys(str(random.sample(public_number_not_vip,1)))
return self
def logged_in_appoint_183(self): # 登录18333334444
self.phone_id().send_keys('18333334444')
return self
# def check_logged_in_title(self): # 查看更多页已登录账号元素展示
def click_verification_code(self): # 点击获取验证码
self.verification_code().click()
return VerificationCodeAction(self._driver)
def check_verification_code_enabled(self): # 获取验证码按钮是否可点击
return self.verification_code().is_enabled()
def write_in_error_quantity(self): # 输入多位手机号
self.phone_id().send_keys('1399999219392s我!3')
return self
def number_quantity(self): # 判断手机号位数
return len(self.phone_id().text)
def click_privacy_agreement(self): # 点击登录页隐私协议入口
self.privacy_agreement().click()
return self
def click_service_agreement(self): # 点击登录页服务协议入口
self.service_agreement().click()
return self
def click_exit_privacy_agreement(self): # 点击隐私协议详情页左上角<
self.exit_privacy_agreement().click()
return self
def click_exit_service_agreement(self): # 点击服务协议详情页左上角<
self.exit_service_agreement().click()
return self
def check_keyboard_Delete(self): # 检查键盘Delete文本,可用来判断键盘是否存在
return self.keyboard_Delete().text
def logged_in_assert(self): # 判断是否进入了登录页
assert "欢迎登录迅游" in self.check_welcome_xunyou()
return self
def click_exit_logged_in(self): # 点击登录页左上角<点击,在加速首页触发的登录,返回加速页
self.exit_logged_in().click()
from page.test_accelerate_page import AccelerateHomeAction
return AccelerateHomeAction(self._driver)
def click_default_area_code(self): # 点击区号按钮
self.default_area_code().click()
return self
def click_exit_area_code(self): # 点击区号页左上角<,返回登录页
self.exit_area_code().click()
return self
def click_switch_area_code(self): # 点击区号页面阿富汗区号
self.switch_area_code().click()
return self
def check_switch_area_code(self): # 查看修改后的区号
return self.switch_area_code().text
def check_memory_logged_in_number(self): # 查看账号记忆功能文本
return self.memory_logged_in_number().text
|
normal
|
{
"blob_id": "e5a698979bc84fe733a9bf5cd51e2f078956d468",
"index": 2461,
"step-1": "<mask token>\n\n\nclass LoginRegistrationAction(LoginRegistration):\n\n def check_welcome_xunyou(self):\n return self.welcome_xunyou().text\n <mask token>\n\n def logged_in_random(self):\n self.phone_id().send_keys('1831111{}'.format(random.randint(1000, \n 9999)))\n return self\n <mask token>\n <mask token>\n\n def logged_in_appoint_183(self):\n self.phone_id().send_keys('18333334444')\n return self\n\n def click_verification_code(self):\n self.verification_code().click()\n return VerificationCodeAction(self._driver)\n <mask token>\n <mask token>\n\n def number_quantity(self):\n return len(self.phone_id().text)\n\n def click_privacy_agreement(self):\n self.privacy_agreement().click()\n return self\n\n def click_service_agreement(self):\n self.service_agreement().click()\n return self\n\n def click_exit_privacy_agreement(self):\n self.exit_privacy_agreement().click()\n return self\n\n def click_exit_service_agreement(self):\n self.exit_service_agreement().click()\n return self\n\n def check_keyboard_Delete(self):\n return self.keyboard_Delete().text\n <mask token>\n\n def click_exit_logged_in(self):\n self.exit_logged_in().click()\n from page.test_accelerate_page import AccelerateHomeAction\n return AccelerateHomeAction(self._driver)\n\n def click_default_area_code(self):\n self.default_area_code().click()\n return self\n <mask token>\n <mask token>\n\n def check_switch_area_code(self):\n return self.switch_area_code().text\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass LoginRegistrationAction(LoginRegistration):\n\n def check_welcome_xunyou(self):\n return self.welcome_xunyou().text\n\n def click_welcome_xunyou(self):\n self.welcome_xunyou().click()\n return self\n\n def logged_in_random(self):\n self.phone_id().send_keys('1831111{}'.format(random.randint(1000, \n 9999)))\n return self\n\n def logged_in_appoint(self):\n self.phone_id().send_keys(str(random.sample(public_number_vip, 1)))\n return self\n <mask token>\n\n def logged_in_appoint_183(self):\n self.phone_id().send_keys('18333334444')\n return self\n\n def click_verification_code(self):\n self.verification_code().click()\n return VerificationCodeAction(self._driver)\n\n def check_verification_code_enabled(self):\n return self.verification_code().is_enabled()\n <mask token>\n\n def number_quantity(self):\n return len(self.phone_id().text)\n\n def click_privacy_agreement(self):\n self.privacy_agreement().click()\n return self\n\n def click_service_agreement(self):\n self.service_agreement().click()\n return self\n\n def click_exit_privacy_agreement(self):\n self.exit_privacy_agreement().click()\n return self\n\n def click_exit_service_agreement(self):\n self.exit_service_agreement().click()\n return self\n\n def check_keyboard_Delete(self):\n return self.keyboard_Delete().text\n <mask token>\n\n def click_exit_logged_in(self):\n self.exit_logged_in().click()\n from page.test_accelerate_page import AccelerateHomeAction\n return AccelerateHomeAction(self._driver)\n\n def click_default_area_code(self):\n self.default_area_code().click()\n return self\n <mask token>\n\n def click_switch_area_code(self):\n self.switch_area_code().click()\n return self\n\n def check_switch_area_code(self):\n return self.switch_area_code().text\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass LoginRegistrationAction(LoginRegistration):\n\n def check_welcome_xunyou(self):\n return self.welcome_xunyou().text\n\n def click_welcome_xunyou(self):\n self.welcome_xunyou().click()\n return self\n\n def logged_in_random(self):\n self.phone_id().send_keys('1831111{}'.format(random.randint(1000, \n 9999)))\n return self\n\n def logged_in_appoint(self):\n self.phone_id().send_keys(str(random.sample(public_number_vip, 1)))\n return self\n\n def logged_in_not_vip_appoint(self):\n self.phone_id().send_keys(str(random.sample(public_number_not_vip, 1)))\n return self\n\n def logged_in_appoint_183(self):\n self.phone_id().send_keys('18333334444')\n return self\n\n def click_verification_code(self):\n self.verification_code().click()\n return VerificationCodeAction(self._driver)\n\n def check_verification_code_enabled(self):\n return self.verification_code().is_enabled()\n\n def write_in_error_quantity(self):\n self.phone_id().send_keys('1399999219392s我!3')\n return self\n\n def number_quantity(self):\n return len(self.phone_id().text)\n\n def click_privacy_agreement(self):\n self.privacy_agreement().click()\n return self\n\n def click_service_agreement(self):\n self.service_agreement().click()\n return self\n\n def click_exit_privacy_agreement(self):\n self.exit_privacy_agreement().click()\n return self\n\n def click_exit_service_agreement(self):\n self.exit_service_agreement().click()\n return self\n\n def check_keyboard_Delete(self):\n return self.keyboard_Delete().text\n <mask token>\n\n def click_exit_logged_in(self):\n self.exit_logged_in().click()\n from page.test_accelerate_page import AccelerateHomeAction\n return AccelerateHomeAction(self._driver)\n\n def click_default_area_code(self):\n self.default_area_code().click()\n return self\n <mask token>\n\n def click_switch_area_code(self):\n self.switch_area_code().click()\n return self\n\n def check_switch_area_code(self):\n return self.switch_area_code().text\n <mask token>\n",
"step-4": "import random\nfrom elment.login_registration_element import LoginRegistration\nfrom page.test_verification_code_page import VerificationCodeAction\npublic_number_vip = ['17800000000', '17800000001', '17800000002',\n '17800000003', '17800000004', '17800000005', '17800000006',\n '17800000007', '17800000008', '17800000009']\npublic_number_not_vip = ['18381939440', '18381939441', '18381939445',\n '18381939446']\n\n\nclass LoginRegistrationAction(LoginRegistration):\n\n def check_welcome_xunyou(self):\n return self.welcome_xunyou().text\n\n def click_welcome_xunyou(self):\n self.welcome_xunyou().click()\n return self\n\n def logged_in_random(self):\n self.phone_id().send_keys('1831111{}'.format(random.randint(1000, \n 9999)))\n return self\n\n def logged_in_appoint(self):\n self.phone_id().send_keys(str(random.sample(public_number_vip, 1)))\n return self\n\n def logged_in_not_vip_appoint(self):\n self.phone_id().send_keys(str(random.sample(public_number_not_vip, 1)))\n return self\n\n def logged_in_appoint_183(self):\n self.phone_id().send_keys('18333334444')\n return self\n\n def click_verification_code(self):\n self.verification_code().click()\n return VerificationCodeAction(self._driver)\n\n def check_verification_code_enabled(self):\n return self.verification_code().is_enabled()\n\n def write_in_error_quantity(self):\n self.phone_id().send_keys('1399999219392s我!3')\n return self\n\n def number_quantity(self):\n return len(self.phone_id().text)\n\n def click_privacy_agreement(self):\n self.privacy_agreement().click()\n return self\n\n def click_service_agreement(self):\n self.service_agreement().click()\n return self\n\n def click_exit_privacy_agreement(self):\n self.exit_privacy_agreement().click()\n return self\n\n def click_exit_service_agreement(self):\n self.exit_service_agreement().click()\n return self\n\n def check_keyboard_Delete(self):\n return self.keyboard_Delete().text\n\n def logged_in_assert(self):\n assert '欢迎登录迅游' in self.check_welcome_xunyou()\n return self\n\n def click_exit_logged_in(self):\n self.exit_logged_in().click()\n from page.test_accelerate_page import AccelerateHomeAction\n return AccelerateHomeAction(self._driver)\n\n def click_default_area_code(self):\n self.default_area_code().click()\n return self\n\n def click_exit_area_code(self):\n self.exit_area_code().click()\n return self\n\n def click_switch_area_code(self):\n self.switch_area_code().click()\n return self\n\n def check_switch_area_code(self):\n return self.switch_area_code().text\n\n def check_memory_logged_in_number(self):\n return self.memory_logged_in_number().text\n",
"step-5": "import random\n\nfrom elment.login_registration_element import LoginRegistration\nfrom page.test_verification_code_page import VerificationCodeAction\npublic_number_vip = ['17800000000','17800000001','17800000002','17800000003','17800000004','17800000005','17800000006',\n '17800000007','17800000008','17800000009']\n\npublic_number_not_vip = ['18381939440', '18381939441', '18381939445', '18381939446']\n\nclass LoginRegistrationAction(LoginRegistration): # 登录页操作\n\n def check_welcome_xunyou(self): # 欢迎登陆迅游text\n return self.welcome_xunyou().text\n\n def click_welcome_xunyou(self): # 点击欢迎登录迅游(可以将键盘降下去)\n self.welcome_xunyou().click()\n return self\n\n def logged_in_random(self): # 点击号码栏输入随机账号\n self.phone_id().send_keys('1831111{}'.format(random.randint(1000,9999)))\n return self\n\n def logged_in_appoint(self): # 登录随机vip\n self.phone_id().send_keys(str(random.sample(public_number_vip,1)))\n return self\n\n def logged_in_not_vip_appoint(self): # 登录随机非会员账号\n self.phone_id().send_keys(str(random.sample(public_number_not_vip,1)))\n return self\n\n def logged_in_appoint_183(self): # 登录18333334444\n self.phone_id().send_keys('18333334444')\n return self\n\n # def check_logged_in_title(self): # 查看更多页已登录账号元素展示\n\n def click_verification_code(self): # 点击获取验证码\n self.verification_code().click()\n return VerificationCodeAction(self._driver)\n\n def check_verification_code_enabled(self): # 获取验证码按钮是否可点击\n return self.verification_code().is_enabled()\n\n def write_in_error_quantity(self): # 输入多位手机号\n self.phone_id().send_keys('1399999219392s我!3')\n return self\n\n def number_quantity(self): # 判断手机号位数\n return len(self.phone_id().text)\n\n def click_privacy_agreement(self): # 点击登录页隐私协议入口\n self.privacy_agreement().click()\n return self\n\n def click_service_agreement(self): # 点击登录页服务协议入口\n self.service_agreement().click()\n return self\n\n def click_exit_privacy_agreement(self): # 点击隐私协议详情页左上角<\n self.exit_privacy_agreement().click()\n return self\n\n def click_exit_service_agreement(self): # 点击服务协议详情页左上角<\n self.exit_service_agreement().click()\n return self\n\n def check_keyboard_Delete(self): # 检查键盘Delete文本,可用来判断键盘是否存在\n return self.keyboard_Delete().text\n\n def logged_in_assert(self): # 判断是否进入了登录页\n assert \"欢迎登录迅游\" in self.check_welcome_xunyou()\n return self\n\n def click_exit_logged_in(self): # 点击登录页左上角<点击,在加速首页触发的登录,返回加速页\n self.exit_logged_in().click()\n from page.test_accelerate_page import AccelerateHomeAction\n return AccelerateHomeAction(self._driver)\n\n def click_default_area_code(self): # 点击区号按钮\n self.default_area_code().click()\n return self\n\n def click_exit_area_code(self): # 点击区号页左上角<,返回登录页\n self.exit_area_code().click()\n return self\n\n def click_switch_area_code(self): # 点击区号页面阿富汗区号\n self.switch_area_code().click()\n return self\n\n def check_switch_area_code(self): # 查看修改后的区号\n return self.switch_area_code().text\n\n def check_memory_logged_in_number(self): # 查看账号记忆功能文本\n return self.memory_logged_in_number().text",
"step-ids": [
14,
18,
20,
25,
26
]
}
|
[
14,
18,
20,
25,
26
] |
<|reserved_special_token_0|>
class HistoryListView(generics.GenericAPIView):
<|reserved_special_token_0|>
def post(self, request):
serializer_class = self.serializer_class(data=request.data)
serializer_class.is_valid(raise_exception=True)
return Response(serializer_class.data, status=status.HTTP_200_OK)
class HistoryView(generics.RetrieveUpdateDestroyAPIView):
serializer_class = HistorySerializer
queryset = Sentiment.objects.all()
class SentimenListView(generics.ListCreateAPIView):
queryset = Sentiment.objects.all()
serializer_class = SentimenSerializer(many=True)
class SentimenView(generics.RetrieveUpdateDestroyAPIView):
serializer_class = SentimenSerializer
queryset = Sentiment.objects.all()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class HistoryListView(generics.GenericAPIView):
serializer_class = HistorySerializer
def post(self, request):
serializer_class = self.serializer_class(data=request.data)
serializer_class.is_valid(raise_exception=True)
return Response(serializer_class.data, status=status.HTTP_200_OK)
class HistoryView(generics.RetrieveUpdateDestroyAPIView):
serializer_class = HistorySerializer
queryset = Sentiment.objects.all()
class SentimenListView(generics.ListCreateAPIView):
queryset = Sentiment.objects.all()
serializer_class = SentimenSerializer(many=True)
class SentimenView(generics.RetrieveUpdateDestroyAPIView):
serializer_class = SentimenSerializer
queryset = Sentiment.objects.all()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class HistoryMyList(generics.ListCreateAPIView):
serializer_class = HistorySer
queryset = History.objects.all()
class HistoryListView(generics.GenericAPIView):
serializer_class = HistorySerializer
def post(self, request):
serializer_class = self.serializer_class(data=request.data)
serializer_class.is_valid(raise_exception=True)
return Response(serializer_class.data, status=status.HTTP_200_OK)
class HistoryView(generics.RetrieveUpdateDestroyAPIView):
serializer_class = HistorySerializer
queryset = Sentiment.objects.all()
class SentimenListView(generics.ListCreateAPIView):
queryset = Sentiment.objects.all()
serializer_class = SentimenSerializer(many=True)
class SentimenView(generics.RetrieveUpdateDestroyAPIView):
serializer_class = SentimenSerializer
queryset = Sentiment.objects.all()
<|reserved_special_token_1|>
from django.shortcuts import render
from rest_framework.response import Response
from .serializers import *
from rest_framework import generics, status
class HistoryMyList(generics.ListCreateAPIView):
serializer_class = HistorySer
queryset = History.objects.all()
class HistoryListView(generics.GenericAPIView):
serializer_class = HistorySerializer
def post(self, request):
serializer_class = self.serializer_class(data=request.data)
serializer_class.is_valid(raise_exception=True)
return Response(serializer_class.data, status=status.HTTP_200_OK)
class HistoryView(generics.RetrieveUpdateDestroyAPIView):
serializer_class = HistorySerializer
queryset = Sentiment.objects.all()
class SentimenListView(generics.ListCreateAPIView):
queryset = Sentiment.objects.all()
serializer_class = SentimenSerializer(many=True)
class SentimenView(generics.RetrieveUpdateDestroyAPIView):
serializer_class = SentimenSerializer
queryset = Sentiment.objects.all()
|
flexible
|
{
"blob_id": "8edca4c50e48734073e80de85088964837247696",
"index": 2597,
"step-1": "<mask token>\n\n\nclass HistoryListView(generics.GenericAPIView):\n <mask token>\n\n def post(self, request):\n serializer_class = self.serializer_class(data=request.data)\n serializer_class.is_valid(raise_exception=True)\n return Response(serializer_class.data, status=status.HTTP_200_OK)\n\n\nclass HistoryView(generics.RetrieveUpdateDestroyAPIView):\n serializer_class = HistorySerializer\n queryset = Sentiment.objects.all()\n\n\nclass SentimenListView(generics.ListCreateAPIView):\n queryset = Sentiment.objects.all()\n serializer_class = SentimenSerializer(many=True)\n\n\nclass SentimenView(generics.RetrieveUpdateDestroyAPIView):\n serializer_class = SentimenSerializer\n queryset = Sentiment.objects.all()\n",
"step-2": "<mask token>\n\n\nclass HistoryListView(generics.GenericAPIView):\n serializer_class = HistorySerializer\n\n def post(self, request):\n serializer_class = self.serializer_class(data=request.data)\n serializer_class.is_valid(raise_exception=True)\n return Response(serializer_class.data, status=status.HTTP_200_OK)\n\n\nclass HistoryView(generics.RetrieveUpdateDestroyAPIView):\n serializer_class = HistorySerializer\n queryset = Sentiment.objects.all()\n\n\nclass SentimenListView(generics.ListCreateAPIView):\n queryset = Sentiment.objects.all()\n serializer_class = SentimenSerializer(many=True)\n\n\nclass SentimenView(generics.RetrieveUpdateDestroyAPIView):\n serializer_class = SentimenSerializer\n queryset = Sentiment.objects.all()\n",
"step-3": "<mask token>\n\n\nclass HistoryMyList(generics.ListCreateAPIView):\n serializer_class = HistorySer\n queryset = History.objects.all()\n\n\nclass HistoryListView(generics.GenericAPIView):\n serializer_class = HistorySerializer\n\n def post(self, request):\n serializer_class = self.serializer_class(data=request.data)\n serializer_class.is_valid(raise_exception=True)\n return Response(serializer_class.data, status=status.HTTP_200_OK)\n\n\nclass HistoryView(generics.RetrieveUpdateDestroyAPIView):\n serializer_class = HistorySerializer\n queryset = Sentiment.objects.all()\n\n\nclass SentimenListView(generics.ListCreateAPIView):\n queryset = Sentiment.objects.all()\n serializer_class = SentimenSerializer(many=True)\n\n\nclass SentimenView(generics.RetrieveUpdateDestroyAPIView):\n serializer_class = SentimenSerializer\n queryset = Sentiment.objects.all()\n",
"step-4": "from django.shortcuts import render\nfrom rest_framework.response import Response\nfrom .serializers import *\nfrom rest_framework import generics, status\n\n\nclass HistoryMyList(generics.ListCreateAPIView):\n serializer_class = HistorySer\n queryset = History.objects.all()\n\n\nclass HistoryListView(generics.GenericAPIView):\n serializer_class = HistorySerializer\n\n def post(self, request):\n serializer_class = self.serializer_class(data=request.data)\n serializer_class.is_valid(raise_exception=True)\n return Response(serializer_class.data, status=status.HTTP_200_OK)\n\n\nclass HistoryView(generics.RetrieveUpdateDestroyAPIView):\n serializer_class = HistorySerializer\n queryset = Sentiment.objects.all()\n\n\nclass SentimenListView(generics.ListCreateAPIView):\n queryset = Sentiment.objects.all()\n serializer_class = SentimenSerializer(many=True)\n\n\nclass SentimenView(generics.RetrieveUpdateDestroyAPIView):\n serializer_class = SentimenSerializer\n queryset = Sentiment.objects.all()\n",
"step-5": null,
"step-ids": [
8,
9,
11,
12
]
}
|
[
8,
9,
11,
12
] |
# -*- coding: utf-8 -*-
"""
Created on Fri Oct 23 15:26:47 2015
@author: tomhope
"""
import cPickle as pickle
from nltk.tokenize import word_tokenize
from sklearn.feature_extraction.text import CountVectorizer
import re
def tokenize_speeches(text):
text = re.sub('[\[\]<>\'\+\=\/(.?\",&*!_#:;@$%|)0-9]'," ", text)
text = ' '.join(text.split())
text = word_tokenize(text)
tokens = [re.sub(r'(?<![\x00-\x9F])[-]',"", t.encode("UTF-8")).decode("UTF-8") for t in text if len(t)>=2]
tokens = [re.sub(r'[-](?![\x00-\x9F])',"", t.encode("UTF-8")).decode("UTF-8") for t in tokens if len(t)>=2]
return tokens
ss = tokenize_speeches(bibi_speeches[0])
for s in ss[0:10]:
print s
count_vect = CountVectorizer(ngram_range = (1,2), max_df = 0.75, min_df = 10,
tokenizer = tokenize_speeches)
X_train_counts = count_vect.fit_transform(bibi_speeches)
h = count_vect.get_feature_names()[0:20]
print h
ss = count_vect.get_feature_names()
for i in range(600,700):
print ss[i]
|
normal
|
{
"blob_id": "17548459b83fe4dea29f20dc5f91196b2b86ea60",
"index": 4655,
"step-1": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Fri Oct 23 15:26:47 2015\n\n@author: tomhope\n\"\"\"\nimport cPickle as pickle\nfrom nltk.tokenize import word_tokenize\nfrom sklearn.feature_extraction.text import CountVectorizer\nimport re\n\ndef tokenize_speeches(text):\n text = re.sub('[\\[\\]<>\\'\\+\\=\\/(.?\\\",&*!_#:;@$%|)0-9]',\" \", text)\n \n text = ' '.join(text.split())\n text = word_tokenize(text)\n \n tokens = [re.sub(r'(?<![\\x00-\\x9F])[-]',\"\", t.encode(\"UTF-8\")).decode(\"UTF-8\") for t in text if len(t)>=2]\n tokens = [re.sub(r'[-](?![\\x00-\\x9F])',\"\", t.encode(\"UTF-8\")).decode(\"UTF-8\") for t in tokens if len(t)>=2]\n\n \n return tokens\n \nss = tokenize_speeches(bibi_speeches[0]) \nfor s in ss[0:10]:\n print s\n\ncount_vect = CountVectorizer(ngram_range = (1,2), max_df = 0.75, min_df = 10,\n tokenizer = tokenize_speeches)\n \nX_train_counts = count_vect.fit_transform(bibi_speeches)\n \nh = count_vect.get_feature_names()[0:20]\nprint h\n\nss = count_vect.get_feature_names()\nfor i in range(600,700):\n print ss[i]\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Solution(object):
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Solution(object):
def longestPalindrome(self, s):
"""
:type s: str
:rtype: str
"""
if len(s) == 0:
return ''
if len(s) == 1:
return s
start = -1
end = -2
for i in range(len(s)):
side = 1
while i - side >= 0 and i + side < len(s) and s[i - side] == s[
i + side]:
side += 1
if (side - 1) * 2 + 1 > end - start + 1:
start = i - (side - 1)
end = i + side
out_string = s[start:end]
start = -1
end = -2
for i in range(len(s) - 1):
side = 0
while i - side >= 0 and i + 1 + side < len(s) and s[i - side] == s[
i + 1 + side]:
side += 1
if side * 2 > end - start + 1:
start = i - side + 1
end = i + 1 + side
return out_string if len(out_string) > end - start else s[start:end]
<|reserved_special_token_1|>
# Given a string S, find the longest palindromic substring in S. You may assume that the maximum length of S is 1000, and there exists one unique longest palindromic substring.
class Solution(object):
def longestPalindrome(self, s):
"""
:type s: str
:rtype: str
"""
if len(s) == 0:
return ""
if len(s) == 1:
return s
start = -1
end = -2
for i in range(len(s)):
side = 1
while i - side >= 0 and i + side < len(s) and s[i - side] == s[i + side]:
side += 1
if (side - 1) * 2 + 1 > end - start + 1:
start = i - (side - 1)
end = i + side
out_string = s[start:end]
start = -1
end = -2
for i in range(len(s) - 1):
side = 0
while i - side >= 0 and i + 1 + side < len(s) and s[i - side] == s[i + 1 + side]:
side += 1
if side * 2 > end - start + 1:
start = i - side + 1
end = i + 1 + side
return out_string if len(out_string) > end - start else s[start:end]
|
flexible
|
{
"blob_id": "7c39b3927bc0702818c54875785b4657c20c441e",
"index": 2272,
"step-1": "<mask token>\n",
"step-2": "class Solution(object):\n <mask token>\n",
"step-3": "class Solution(object):\n\n def longestPalindrome(self, s):\n \"\"\"\n :type s: str\n :rtype: str\n \"\"\"\n if len(s) == 0:\n return ''\n if len(s) == 1:\n return s\n start = -1\n end = -2\n for i in range(len(s)):\n side = 1\n while i - side >= 0 and i + side < len(s) and s[i - side] == s[\n i + side]:\n side += 1\n if (side - 1) * 2 + 1 > end - start + 1:\n start = i - (side - 1)\n end = i + side\n out_string = s[start:end]\n start = -1\n end = -2\n for i in range(len(s) - 1):\n side = 0\n while i - side >= 0 and i + 1 + side < len(s) and s[i - side] == s[\n i + 1 + side]:\n side += 1\n if side * 2 > end - start + 1:\n start = i - side + 1\n end = i + 1 + side\n return out_string if len(out_string) > end - start else s[start:end]\n",
"step-4": "# Given a string S, find the longest palindromic substring in S. You may assume that the maximum length of S is 1000, and there exists one unique longest palindromic substring.\n\nclass Solution(object):\n def longestPalindrome(self, s):\n \"\"\"\n :type s: str\n :rtype: str\n \"\"\"\n if len(s) == 0:\n return \"\"\n if len(s) == 1:\n return s\n \n start = -1\n end = -2\n for i in range(len(s)):\n side = 1\n while i - side >= 0 and i + side < len(s) and s[i - side] == s[i + side]:\n side += 1\n if (side - 1) * 2 + 1 > end - start + 1:\n start = i - (side - 1)\n end = i + side\n out_string = s[start:end]\n start = -1\n end = -2\n for i in range(len(s) - 1):\n side = 0\n while i - side >= 0 and i + 1 + side < len(s) and s[i - side] == s[i + 1 + side]:\n side += 1\n if side * 2 > end - start + 1:\n start = i - side + 1\n end = i + 1 + side\n return out_string if len(out_string) > end - start else s[start:end]\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import socket
import threading
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
port = 12321
server.bind(('', port))
server.listen()
client_names = []
clients = []
def broadcast(message):
for client in clients:
client.send(message)
def handle(client):
while True:
try:
message = client.recv(1024)
broadcast(message)
except:
index = client.index(client)
clients.remove(client)
client.close()
name = client_names[index]
broadcast(f"{client_name} left the chat!".encode('ascii'))
client_names.remove(name)
break
def recieve():
while True:
client, address = server.accept()
print(f"Connected with {str(address)}!")
client.send('YO'.encode('ascii'))
name = client.recv(1024).decode('ascii')
client_names.append(name)
client_names.append(client)
print(f"Name of the client is {name}")
broadcast(f"{name} joined the chat!".encode("ascii"))
client.send("Connected to the Noob Coder's Server!".encode("ascii"))
thread = threading.Thread(target=handle, args=(client,))
thread.start()
print("SERVER STARTED...")
recieve()
|
normal
|
{
"blob_id": "f1fbbbe4258d0fb0a43505f4718730934fd595ec",
"index": 1831,
"step-1": "<mask token>\n\n\ndef broadcast(message):\n for client in clients:\n client.send(message)\n\n\ndef handle(client):\n while True:\n try:\n message = client.recv(1024)\n broadcast(message)\n except:\n index = client.index(client)\n clients.remove(client)\n client.close()\n name = client_names[index]\n broadcast(f'{client_name} left the chat!'.encode('ascii'))\n client_names.remove(name)\n break\n\n\ndef recieve():\n while True:\n client, address = server.accept()\n print(f'Connected with {str(address)}!')\n client.send('YO'.encode('ascii'))\n name = client.recv(1024).decode('ascii')\n client_names.append(name)\n client_names.append(client)\n print(f'Name of the client is {name}')\n broadcast(f'{name} joined the chat!'.encode('ascii'))\n client.send(\"Connected to the Noob Coder's Server!\".encode('ascii'))\n thread = threading.Thread(target=handle, args=(client,))\n thread.start()\n\n\n<mask token>\n",
"step-2": "<mask token>\nserver.bind(('', port))\nserver.listen()\n<mask token>\n\n\ndef broadcast(message):\n for client in clients:\n client.send(message)\n\n\ndef handle(client):\n while True:\n try:\n message = client.recv(1024)\n broadcast(message)\n except:\n index = client.index(client)\n clients.remove(client)\n client.close()\n name = client_names[index]\n broadcast(f'{client_name} left the chat!'.encode('ascii'))\n client_names.remove(name)\n break\n\n\ndef recieve():\n while True:\n client, address = server.accept()\n print(f'Connected with {str(address)}!')\n client.send('YO'.encode('ascii'))\n name = client.recv(1024).decode('ascii')\n client_names.append(name)\n client_names.append(client)\n print(f'Name of the client is {name}')\n broadcast(f'{name} joined the chat!'.encode('ascii'))\n client.send(\"Connected to the Noob Coder's Server!\".encode('ascii'))\n thread = threading.Thread(target=handle, args=(client,))\n thread.start()\n\n\nprint('SERVER STARTED...')\nrecieve()\n",
"step-3": "<mask token>\nserver = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\nport = 12321\nserver.bind(('', port))\nserver.listen()\nclient_names = []\nclients = []\n\n\ndef broadcast(message):\n for client in clients:\n client.send(message)\n\n\ndef handle(client):\n while True:\n try:\n message = client.recv(1024)\n broadcast(message)\n except:\n index = client.index(client)\n clients.remove(client)\n client.close()\n name = client_names[index]\n broadcast(f'{client_name} left the chat!'.encode('ascii'))\n client_names.remove(name)\n break\n\n\ndef recieve():\n while True:\n client, address = server.accept()\n print(f'Connected with {str(address)}!')\n client.send('YO'.encode('ascii'))\n name = client.recv(1024).decode('ascii')\n client_names.append(name)\n client_names.append(client)\n print(f'Name of the client is {name}')\n broadcast(f'{name} joined the chat!'.encode('ascii'))\n client.send(\"Connected to the Noob Coder's Server!\".encode('ascii'))\n thread = threading.Thread(target=handle, args=(client,))\n thread.start()\n\n\nprint('SERVER STARTED...')\nrecieve()\n",
"step-4": "import socket\nimport threading\nserver = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\nport = 12321\nserver.bind(('', port))\nserver.listen()\nclient_names = []\nclients = []\n\n\ndef broadcast(message):\n for client in clients:\n client.send(message)\n\n\ndef handle(client):\n while True:\n try:\n message = client.recv(1024)\n broadcast(message)\n except:\n index = client.index(client)\n clients.remove(client)\n client.close()\n name = client_names[index]\n broadcast(f'{client_name} left the chat!'.encode('ascii'))\n client_names.remove(name)\n break\n\n\ndef recieve():\n while True:\n client, address = server.accept()\n print(f'Connected with {str(address)}!')\n client.send('YO'.encode('ascii'))\n name = client.recv(1024).decode('ascii')\n client_names.append(name)\n client_names.append(client)\n print(f'Name of the client is {name}')\n broadcast(f'{name} joined the chat!'.encode('ascii'))\n client.send(\"Connected to the Noob Coder's Server!\".encode('ascii'))\n thread = threading.Thread(target=handle, args=(client,))\n thread.start()\n\n\nprint('SERVER STARTED...')\nrecieve()\n",
"step-5": "import socket\nimport threading\n\nserver = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\nport = 12321\n\nserver.bind(('', port))\nserver.listen()\n\nclient_names = []\nclients = []\n\ndef broadcast(message):\n for client in clients:\n client.send(message)\n\n\ndef handle(client):\n while True:\n try:\n message = client.recv(1024)\n broadcast(message)\n except:\n index = client.index(client)\n clients.remove(client)\n client.close()\n name = client_names[index]\n broadcast(f\"{client_name} left the chat!\".encode('ascii'))\n client_names.remove(name)\n break\n\n\ndef recieve():\n while True:\n client, address = server.accept()\n print(f\"Connected with {str(address)}!\")\n \n client.send('YO'.encode('ascii'))\n name = client.recv(1024).decode('ascii')\n client_names.append(name)\n client_names.append(client)\n\n print(f\"Name of the client is {name}\")\n broadcast(f\"{name} joined the chat!\".encode(\"ascii\"))\n client.send(\"Connected to the Noob Coder's Server!\".encode(\"ascii\"))\n\n thread = threading.Thread(target=handle, args=(client,))\n thread.start()\n\nprint(\"SERVER STARTED...\")\nrecieve()\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
lc_headers = {
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_0) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.0 Safari/605.1.15",
"authority": "leetcode.com",
}
lc_all = "https://leetcode.com/api/problems/all/"
lc_submissions = "https://leetcode.com/api/submissions/?offset=%(offset)s&limit=%(limit)s&lastkey=%(lastkey)s"
lc_graphql = "https://leetcode.com/graphql"
query_string = 'query questionData($titleSlug: String!) {\n question(titleSlug: $titleSlug) {\n questionId\n questionFrontendId\n boundTopicId\n title\n titleSlug\n content\n translatedTitle\n translatedContent\n isPaidOnly\n difficulty\n likes\n dislikes\n isLiked\n similarQuestions\n contributors {\n username\n profileUrl\n avatarUrl\n __typename\n }\n topicTags {\n name\n slug\n translatedName\n __typename\n }\n companyTagStats\n codeSnippets {\n lang\n langSlug\n code\n __typename\n }\n stats\n hints\n solution {\n id\n canSeeDetail\n paidOnly\n __typename\n }\n status\n sampleTestCase\n metaData\n judgerAvailable\n judgeType\n mysqlSchemas\n enableRunCode\n enableTestMode\n enableDebugger\n envInfo\n libraryUrl\n adminUrl\n __typename\n }\n}\n'
md_template = '''# [%(id)s] %(title)s (%(difficulty)s)
%(small_tags)s
:+1: %(likes)s :thumbsdown: %(dislikes)s
---
## My Submission
- Language: %(lang)s
- Runtime: %(runtime)s
- Completed time: %(time)s
```%(lang)s
%(code)s
```
## Content
%(contents)s
## Related Problems
%(related_problems)s
## What a(n) %(difficulty)s problem!
Among **%(submission)s** total submissions, **%(accepted)s** are accepted, with an acceptance rate of **%(acc_rate)s**. <br>
- Likes: %(likes)s
- Dislikes: %(dislikes)s
'''
related_template = "[%(related_title)s](%(link)s) (%(related_difficulty)s) <br>"
tag_template = "[s-%(color)s.svg)](%(URL)s) "
raw_md_template = '''## [%(id)s] %(title)s (%(difficulty)s)
%(small_tags)s
👍 %(likes)s 👎 %(dislikes)s
---
## My Submission
- Language: %(lang)s
- Runtime: %(runtime)s
- Completed time: %(time)s
```%(lang)s
%(code)s
```
## Related Problems
%(related_problems)s
## What a(n) %(difficulty)s problem!
Among **%(submission)s** total submissions, **%(accepted)s** are accepted, with an acceptance rate of **%(acc_rate)s**.
- Likes: %(likes)s
- Dislikes: %(dislikes)s
'''
|
normal
|
{
"blob_id": "f715628da2f1b950b8fbf8aa5b033e5299d3e224",
"index": 7857,
"step-1": "<mask token>\n",
"step-2": "lc_headers = {'User-Agent':\n 'Mozilla/5.0 (Macintosh; Intel Mac OS X 11_0) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.0 Safari/605.1.15'\n , 'authority': 'leetcode.com'}\nlc_all = 'https://leetcode.com/api/problems/all/'\nlc_submissions = (\n 'https://leetcode.com/api/submissions/?offset=%(offset)s&limit=%(limit)s&lastkey=%(lastkey)s'\n )\nlc_graphql = 'https://leetcode.com/graphql'\nquery_string = \"\"\"query questionData($titleSlug: String!) {\n question(titleSlug: $titleSlug) {\n questionId\n questionFrontendId\n boundTopicId\n title\n titleSlug\n content\n translatedTitle\n translatedContent\n isPaidOnly\n difficulty\n likes\n dislikes\n isLiked\n similarQuestions\n contributors {\n username\n profileUrl\n avatarUrl\n __typename\n }\n topicTags {\n name\n slug\n translatedName\n __typename\n }\n companyTagStats\n codeSnippets {\n lang\n langSlug\n code\n __typename\n }\n stats\n hints\n solution {\n id\n canSeeDetail\n paidOnly\n __typename\n }\n status\n sampleTestCase\n metaData\n judgerAvailable\n judgeType\n mysqlSchemas\n enableRunCode\n enableTestMode\n enableDebugger\n envInfo\n libraryUrl\n adminUrl\n __typename\n }\n}\n\"\"\"\nmd_template = \"\"\"# [%(id)s] %(title)s (%(difficulty)s)\n\n%(small_tags)s\n\n:+1: %(likes)s :thumbsdown: %(dislikes)s\n\n---\n\n## My Submission\n\n- Language: %(lang)s\n- Runtime: %(runtime)s\n- Completed time: %(time)s\n\n```%(lang)s\n%(code)s\n```\n\n## Content\n%(contents)s\n\n## Related Problems\n%(related_problems)s\n\n## What a(n) %(difficulty)s problem!\nAmong **%(submission)s** total submissions, **%(accepted)s** are accepted, with an acceptance rate of **%(acc_rate)s**. <br>\n\n- Likes: %(likes)s\n- Dislikes: %(dislikes)s\n\n\"\"\"\nrelated_template = (\n '[%(related_title)s](%(link)s) (%(related_difficulty)s) <br>')\ntag_template = (\n '[s-%(color)s.svg)](%(URL)s) '\n )\nraw_md_template = \"\"\"## [%(id)s] %(title)s (%(difficulty)s)\n\n%(small_tags)s\n\n👍 %(likes)s 👎 %(dislikes)s\n\n---\n\n## My Submission\n\n- Language: %(lang)s\n- Runtime: %(runtime)s\n- Completed time: %(time)s\n\n```%(lang)s\n%(code)s\n```\n\n## Related Problems\n%(related_problems)s\n\n## What a(n) %(difficulty)s problem!\nAmong **%(submission)s** total submissions, **%(accepted)s** are accepted, with an acceptance rate of **%(acc_rate)s**.\n\n- Likes: %(likes)s\n- Dislikes: %(dislikes)s\n\n\"\"\"\n",
"step-3": "lc_headers = {\n \"User-Agent\": \"Mozilla/5.0 (Macintosh; Intel Mac OS X 11_0) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.0 Safari/605.1.15\",\n \"authority\": \"leetcode.com\",\n }\n\nlc_all = \"https://leetcode.com/api/problems/all/\"\nlc_submissions = \"https://leetcode.com/api/submissions/?offset=%(offset)s&limit=%(limit)s&lastkey=%(lastkey)s\"\nlc_graphql = \"https://leetcode.com/graphql\"\n\nquery_string = 'query questionData($titleSlug: String!) {\\n question(titleSlug: $titleSlug) {\\n questionId\\n questionFrontendId\\n boundTopicId\\n title\\n titleSlug\\n content\\n translatedTitle\\n translatedContent\\n isPaidOnly\\n difficulty\\n likes\\n dislikes\\n isLiked\\n similarQuestions\\n contributors {\\n username\\n profileUrl\\n avatarUrl\\n __typename\\n }\\n topicTags {\\n name\\n slug\\n translatedName\\n __typename\\n }\\n companyTagStats\\n codeSnippets {\\n lang\\n langSlug\\n code\\n __typename\\n }\\n stats\\n hints\\n solution {\\n id\\n canSeeDetail\\n paidOnly\\n __typename\\n }\\n status\\n sampleTestCase\\n metaData\\n judgerAvailable\\n judgeType\\n mysqlSchemas\\n enableRunCode\\n enableTestMode\\n enableDebugger\\n envInfo\\n libraryUrl\\n adminUrl\\n __typename\\n }\\n}\\n'\n\nmd_template = '''# [%(id)s] %(title)s (%(difficulty)s)\n\n%(small_tags)s\n\n:+1: %(likes)s :thumbsdown: %(dislikes)s\n\n---\n\n## My Submission\n\n- Language: %(lang)s\n- Runtime: %(runtime)s\n- Completed time: %(time)s\n\n```%(lang)s\n%(code)s\n```\n\n## Content\n%(contents)s\n\n## Related Problems\n%(related_problems)s\n\n## What a(n) %(difficulty)s problem!\nAmong **%(submission)s** total submissions, **%(accepted)s** are accepted, with an acceptance rate of **%(acc_rate)s**. <br>\n\n- Likes: %(likes)s\n- Dislikes: %(dislikes)s\n\n'''\n\nrelated_template = \"[%(related_title)s](%(link)s) (%(related_difficulty)s) <br>\"\n\ntag_template = \"[s-%(color)s.svg)](%(URL)s) \"\n\nraw_md_template = '''## [%(id)s] %(title)s (%(difficulty)s)\n\n%(small_tags)s\n\n👍 %(likes)s 👎 %(dislikes)s\n\n---\n\n## My Submission\n\n- Language: %(lang)s\n- Runtime: %(runtime)s\n- Completed time: %(time)s\n\n```%(lang)s\n%(code)s\n```\n\n## Related Problems\n%(related_problems)s\n\n## What a(n) %(difficulty)s problem!\nAmong **%(submission)s** total submissions, **%(accepted)s** are accepted, with an acceptance rate of **%(acc_rate)s**.\n\n- Likes: %(likes)s\n- Dislikes: %(dislikes)s\n\n'''\n\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
dependencies = [('regions', '0002_auto_20171024_1707')]
operations = [migrations.AlterField(model_name='region', name='email',
field=models.EmailField(max_length=100, null=True, verbose_name=
'email')), migrations.AlterField(model_name='region', name=
'governor', field=models.ForeignKey(null=True, on_delete=django.db.
models.deletion.CASCADE, to='public_servants.PublicServant',
verbose_name='governor')), migrations.AlterField(model_name=
'region', name='phone', field=phonenumber_field.modelfields.
PhoneNumberField(max_length=128, null=True, verbose_name='phone')),
migrations.AlterField(model_name='region', name='twitter', field=
models.CharField(blank=True, max_length=50, null=True))]
<|reserved_special_token_1|>
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import phonenumber_field.modelfields
class Migration(migrations.Migration):
dependencies = [('regions', '0002_auto_20171024_1707')]
operations = [migrations.AlterField(model_name='region', name='email',
field=models.EmailField(max_length=100, null=True, verbose_name=
'email')), migrations.AlterField(model_name='region', name=
'governor', field=models.ForeignKey(null=True, on_delete=django.db.
models.deletion.CASCADE, to='public_servants.PublicServant',
verbose_name='governor')), migrations.AlterField(model_name=
'region', name='phone', field=phonenumber_field.modelfields.
PhoneNumberField(max_length=128, null=True, verbose_name='phone')),
migrations.AlterField(model_name='region', name='twitter', field=
models.CharField(blank=True, max_length=50, null=True))]
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-10-27 21:59
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import phonenumber_field.modelfields
class Migration(migrations.Migration):
dependencies = [
('regions', '0002_auto_20171024_1707'),
]
operations = [
migrations.AlterField(
model_name='region',
name='email',
field=models.EmailField(max_length=100, null=True, verbose_name='email'),
),
migrations.AlterField(
model_name='region',
name='governor',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='public_servants.PublicServant', verbose_name='governor'),
),
migrations.AlterField(
model_name='region',
name='phone',
field=phonenumber_field.modelfields.PhoneNumberField(max_length=128, null=True, verbose_name='phone'),
),
migrations.AlterField(
model_name='region',
name='twitter',
field=models.CharField(blank=True, max_length=50, null=True),
),
]
|
flexible
|
{
"blob_id": "1330addd53c6187a41dfea6957bf47aaecca1135",
"index": 7180,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('regions', '0002_auto_20171024_1707')]\n operations = [migrations.AlterField(model_name='region', name='email',\n field=models.EmailField(max_length=100, null=True, verbose_name=\n 'email')), migrations.AlterField(model_name='region', name=\n 'governor', field=models.ForeignKey(null=True, on_delete=django.db.\n models.deletion.CASCADE, to='public_servants.PublicServant',\n verbose_name='governor')), migrations.AlterField(model_name=\n 'region', name='phone', field=phonenumber_field.modelfields.\n PhoneNumberField(max_length=128, null=True, verbose_name='phone')),\n migrations.AlterField(model_name='region', name='twitter', field=\n models.CharField(blank=True, max_length=50, null=True))]\n",
"step-4": "from __future__ import unicode_literals\nfrom django.db import migrations, models\nimport django.db.models.deletion\nimport phonenumber_field.modelfields\n\n\nclass Migration(migrations.Migration):\n dependencies = [('regions', '0002_auto_20171024_1707')]\n operations = [migrations.AlterField(model_name='region', name='email',\n field=models.EmailField(max_length=100, null=True, verbose_name=\n 'email')), migrations.AlterField(model_name='region', name=\n 'governor', field=models.ForeignKey(null=True, on_delete=django.db.\n models.deletion.CASCADE, to='public_servants.PublicServant',\n verbose_name='governor')), migrations.AlterField(model_name=\n 'region', name='phone', field=phonenumber_field.modelfields.\n PhoneNumberField(max_length=128, null=True, verbose_name='phone')),\n migrations.AlterField(model_name='region', name='twitter', field=\n models.CharField(blank=True, max_length=50, null=True))]\n",
"step-5": "# -*- coding: utf-8 -*-\n# Generated by Django 1.11.6 on 2017-10-27 21:59\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\nimport phonenumber_field.modelfields\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('regions', '0002_auto_20171024_1707'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='region',\n name='email',\n field=models.EmailField(max_length=100, null=True, verbose_name='email'),\n ),\n migrations.AlterField(\n model_name='region',\n name='governor',\n field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='public_servants.PublicServant', verbose_name='governor'),\n ),\n migrations.AlterField(\n model_name='region',\n name='phone',\n field=phonenumber_field.modelfields.PhoneNumberField(max_length=128, null=True, verbose_name='phone'),\n ),\n migrations.AlterField(\n model_name='region',\n name='twitter',\n field=models.CharField(blank=True, max_length=50, null=True),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.