code
stringlengths 13
6.09M
| order_type
stringclasses 2
values | original_example
dict | step_ids
listlengths 1
5
|
|---|---|---|---|
#!/usr/bin/python
# Point of origin (connector J3, pad 1, net 3V3)
x = 0.0
y = 0.0
drillDiameter = 1.0
padWidth = 1.6
from os.path import exists
from pad import *
filename="iCEstick.kicad_mod"
header = ""
footer = ""
if exists(filename):
# Read existing footprint
f = open(filename)
footprint = f.read()
f.close()
# Find the end of the header
headerEndIndex = footprint.find("(pad ")
header = footprint[:headerEndIndex]
# Find the end of the pads list
lastPadIndex = headerEndIndex
while (footprint.find("(pad ", lastPadIndex) > -1):
lastPadIndex = footprint.find("(pad ", lastPadIndex) + 5
footerStartIndex = footprint.find("))", lastPadIndex) + 2
footer = footprint[footerStartIndex:]
if header.find("TE-Connectivity") < 0:
header = \
"""(module iCEstick (layer F.Cu) (tedit 5BD73D6F)
(fp_text reference REF** (at 0 -12.7) (layer F.SilkS)
(effects (font (size 1 1) (thickness 0.15)))
)
(fp_text value iCEstick (at 0 25.4) (layer F.Fab)
(effects (font (size 1 1) (thickness 0.15)))
)
"""
footer = ")"
#
# Generate pads according to schematic drawing
#
designators_j1 = ["3V3", "GND"] + [str(n) for n in range(112,120)]
designators_j2 = [ \
[str(n) for n in range(78,82)] + ["GND", "3V3"], \
["87", "88", "90", "91", "GND", "3V3"] \
]
designators_j3 = ["3V3", "GND", "62", "61", "60", "56", "48", "47", "45", "44"]
#
# J1 connector pad list
#
pads_j1 = []
oldX = x
oldY = y
y -= 21.81
for i in range(10):
# The first pad is a rectangle, the remaining ones are circular
if (i == 0):
shape = Shape.RECT
else:
shape = Shape.CIRCLE
# Create pad object
newPad = Pad(
designator = designators_j1[i],
through_hole = True,
plated = True,
shape = shape,
at = (x, y),
size = (padWidth, padWidth),
drill = drillDiameter
)
pads_j1 += [newPad]
x -= 2.54
#
# J2 connector pad list
#
pads_j2 = []
x = oldX - 5.80
newY = oldY - 21.81 + 4.49 + 5*2.54
y = newY
for i in range(6):
# The first pad is a rectangle, the remaining ones are circular
if (i == 0):
shape = Shape.RECT
else:
shape = Shape.CIRCLE
# Create pad object
newPad = Pad(
designator = designators_j2[0][i],
through_hole = True,
plated = True,
shape = shape,
at = (x, y),
size = (padWidth, padWidth),
drill = drillDiameter
)
pads_j2 += [newPad]
y -= 2.54
# Second (inner) row of pins of J2
x -= 2.54
y = newY
for i in range(6):
# Create pad object
newPad = Pad(
designator = designators_j2[1][i],
through_hole = True,
plated = True,
shape = Shape.CIRCLE,
at = (x, y),
size = (padWidth, padWidth),
drill = drillDiameter
)
pads_j2 += [newPad]
y -= 2.54
#
# J3 connector pad list
#
pads_j3 = []
x = oldX
y = oldY
for i in range(10):
# The first pad is a rectangle, the remaining ones are circular
if (i == 0):
shape = Shape.RECT
else:
shape = Shape.CIRCLE
# Create pad object
newPad = Pad(
designator = designators_j3[i],
through_hole = True,
plated = True,
shape = shape,
at = (x, y),
size = (padWidth, padWidth),
drill = drillDiameter
)
pads_j1 += [newPad]
x -= 2.54
# Make a list of all pads
pads = pads_j1 + pads_j2 + pads_j3
# Compose new footprint from header, pads and footer
newFootprint = header
for pad in pads:
newFootprint += str(pad) + "\n"
newFootprint += footer.strip()
# Print generated footprint to screen
print(newFootprint)
# Save generated footprint to file
f = open(filename, "w")
f.write(newFootprint)
f.close()
|
normal
|
{
"blob_id": "c71e367ad320d7eadabbbfda728d94448db6441d",
"index": 2109,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif exists(filename):\n f = open(filename)\n footprint = f.read()\n f.close()\n headerEndIndex = footprint.find('(pad ')\n header = footprint[:headerEndIndex]\n lastPadIndex = headerEndIndex\n while footprint.find('(pad ', lastPadIndex) > -1:\n lastPadIndex = footprint.find('(pad ', lastPadIndex) + 5\n footerStartIndex = footprint.find('))', lastPadIndex) + 2\n footer = footprint[footerStartIndex:]\nif header.find('TE-Connectivity') < 0:\n header = \"\"\"(module iCEstick (layer F.Cu) (tedit 5BD73D6F)\n (fp_text reference REF** (at 0 -12.7) (layer F.SilkS)\n (effects (font (size 1 1) (thickness 0.15)))\n )\n (fp_text value iCEstick (at 0 25.4) (layer F.Fab)\n (effects (font (size 1 1) (thickness 0.15)))\n )\n\"\"\"\n footer = ')'\n<mask token>\ny -= 21.81\nfor i in range(10):\n if i == 0:\n shape = Shape.RECT\n else:\n shape = Shape.CIRCLE\n newPad = Pad(designator=designators_j1[i], through_hole=True, plated=\n True, shape=shape, at=(x, y), size=(padWidth, padWidth), drill=\n drillDiameter)\n pads_j1 += [newPad]\n x -= 2.54\n<mask token>\nfor i in range(6):\n if i == 0:\n shape = Shape.RECT\n else:\n shape = Shape.CIRCLE\n newPad = Pad(designator=designators_j2[0][i], through_hole=True, plated\n =True, shape=shape, at=(x, y), size=(padWidth, padWidth), drill=\n drillDiameter)\n pads_j2 += [newPad]\n y -= 2.54\nx -= 2.54\n<mask token>\nfor i in range(6):\n newPad = Pad(designator=designators_j2[1][i], through_hole=True, plated\n =True, shape=Shape.CIRCLE, at=(x, y), size=(padWidth, padWidth),\n drill=drillDiameter)\n pads_j2 += [newPad]\n y -= 2.54\n<mask token>\nfor i in range(10):\n if i == 0:\n shape = Shape.RECT\n else:\n shape = Shape.CIRCLE\n newPad = Pad(designator=designators_j3[i], through_hole=True, plated=\n True, shape=shape, at=(x, y), size=(padWidth, padWidth), drill=\n drillDiameter)\n pads_j1 += [newPad]\n x -= 2.54\n<mask token>\nfor pad in pads:\n newFootprint += str(pad) + '\\n'\nnewFootprint += footer.strip()\nprint(newFootprint)\n<mask token>\nf.write(newFootprint)\nf.close()\n",
"step-3": "x = 0.0\ny = 0.0\ndrillDiameter = 1.0\npadWidth = 1.6\n<mask token>\nfilename = 'iCEstick.kicad_mod'\nheader = ''\nfooter = ''\nif exists(filename):\n f = open(filename)\n footprint = f.read()\n f.close()\n headerEndIndex = footprint.find('(pad ')\n header = footprint[:headerEndIndex]\n lastPadIndex = headerEndIndex\n while footprint.find('(pad ', lastPadIndex) > -1:\n lastPadIndex = footprint.find('(pad ', lastPadIndex) + 5\n footerStartIndex = footprint.find('))', lastPadIndex) + 2\n footer = footprint[footerStartIndex:]\nif header.find('TE-Connectivity') < 0:\n header = \"\"\"(module iCEstick (layer F.Cu) (tedit 5BD73D6F)\n (fp_text reference REF** (at 0 -12.7) (layer F.SilkS)\n (effects (font (size 1 1) (thickness 0.15)))\n )\n (fp_text value iCEstick (at 0 25.4) (layer F.Fab)\n (effects (font (size 1 1) (thickness 0.15)))\n )\n\"\"\"\n footer = ')'\ndesignators_j1 = ['3V3', 'GND'] + [str(n) for n in range(112, 120)]\ndesignators_j2 = [[str(n) for n in range(78, 82)] + ['GND', '3V3'], ['87',\n '88', '90', '91', 'GND', '3V3']]\ndesignators_j3 = ['3V3', 'GND', '62', '61', '60', '56', '48', '47', '45', '44']\npads_j1 = []\noldX = x\noldY = y\ny -= 21.81\nfor i in range(10):\n if i == 0:\n shape = Shape.RECT\n else:\n shape = Shape.CIRCLE\n newPad = Pad(designator=designators_j1[i], through_hole=True, plated=\n True, shape=shape, at=(x, y), size=(padWidth, padWidth), drill=\n drillDiameter)\n pads_j1 += [newPad]\n x -= 2.54\npads_j2 = []\nx = oldX - 5.8\nnewY = oldY - 21.81 + 4.49 + 5 * 2.54\ny = newY\nfor i in range(6):\n if i == 0:\n shape = Shape.RECT\n else:\n shape = Shape.CIRCLE\n newPad = Pad(designator=designators_j2[0][i], through_hole=True, plated\n =True, shape=shape, at=(x, y), size=(padWidth, padWidth), drill=\n drillDiameter)\n pads_j2 += [newPad]\n y -= 2.54\nx -= 2.54\ny = newY\nfor i in range(6):\n newPad = Pad(designator=designators_j2[1][i], through_hole=True, plated\n =True, shape=Shape.CIRCLE, at=(x, y), size=(padWidth, padWidth),\n drill=drillDiameter)\n pads_j2 += [newPad]\n y -= 2.54\npads_j3 = []\nx = oldX\ny = oldY\nfor i in range(10):\n if i == 0:\n shape = Shape.RECT\n else:\n shape = Shape.CIRCLE\n newPad = Pad(designator=designators_j3[i], through_hole=True, plated=\n True, shape=shape, at=(x, y), size=(padWidth, padWidth), drill=\n drillDiameter)\n pads_j1 += [newPad]\n x -= 2.54\npads = pads_j1 + pads_j2 + pads_j3\nnewFootprint = header\nfor pad in pads:\n newFootprint += str(pad) + '\\n'\nnewFootprint += footer.strip()\nprint(newFootprint)\nf = open(filename, 'w')\nf.write(newFootprint)\nf.close()\n",
"step-4": "x = 0.0\ny = 0.0\ndrillDiameter = 1.0\npadWidth = 1.6\nfrom os.path import exists\nfrom pad import *\nfilename = 'iCEstick.kicad_mod'\nheader = ''\nfooter = ''\nif exists(filename):\n f = open(filename)\n footprint = f.read()\n f.close()\n headerEndIndex = footprint.find('(pad ')\n header = footprint[:headerEndIndex]\n lastPadIndex = headerEndIndex\n while footprint.find('(pad ', lastPadIndex) > -1:\n lastPadIndex = footprint.find('(pad ', lastPadIndex) + 5\n footerStartIndex = footprint.find('))', lastPadIndex) + 2\n footer = footprint[footerStartIndex:]\nif header.find('TE-Connectivity') < 0:\n header = \"\"\"(module iCEstick (layer F.Cu) (tedit 5BD73D6F)\n (fp_text reference REF** (at 0 -12.7) (layer F.SilkS)\n (effects (font (size 1 1) (thickness 0.15)))\n )\n (fp_text value iCEstick (at 0 25.4) (layer F.Fab)\n (effects (font (size 1 1) (thickness 0.15)))\n )\n\"\"\"\n footer = ')'\ndesignators_j1 = ['3V3', 'GND'] + [str(n) for n in range(112, 120)]\ndesignators_j2 = [[str(n) for n in range(78, 82)] + ['GND', '3V3'], ['87',\n '88', '90', '91', 'GND', '3V3']]\ndesignators_j3 = ['3V3', 'GND', '62', '61', '60', '56', '48', '47', '45', '44']\npads_j1 = []\noldX = x\noldY = y\ny -= 21.81\nfor i in range(10):\n if i == 0:\n shape = Shape.RECT\n else:\n shape = Shape.CIRCLE\n newPad = Pad(designator=designators_j1[i], through_hole=True, plated=\n True, shape=shape, at=(x, y), size=(padWidth, padWidth), drill=\n drillDiameter)\n pads_j1 += [newPad]\n x -= 2.54\npads_j2 = []\nx = oldX - 5.8\nnewY = oldY - 21.81 + 4.49 + 5 * 2.54\ny = newY\nfor i in range(6):\n if i == 0:\n shape = Shape.RECT\n else:\n shape = Shape.CIRCLE\n newPad = Pad(designator=designators_j2[0][i], through_hole=True, plated\n =True, shape=shape, at=(x, y), size=(padWidth, padWidth), drill=\n drillDiameter)\n pads_j2 += [newPad]\n y -= 2.54\nx -= 2.54\ny = newY\nfor i in range(6):\n newPad = Pad(designator=designators_j2[1][i], through_hole=True, plated\n =True, shape=Shape.CIRCLE, at=(x, y), size=(padWidth, padWidth),\n drill=drillDiameter)\n pads_j2 += [newPad]\n y -= 2.54\npads_j3 = []\nx = oldX\ny = oldY\nfor i in range(10):\n if i == 0:\n shape = Shape.RECT\n else:\n shape = Shape.CIRCLE\n newPad = Pad(designator=designators_j3[i], through_hole=True, plated=\n True, shape=shape, at=(x, y), size=(padWidth, padWidth), drill=\n drillDiameter)\n pads_j1 += [newPad]\n x -= 2.54\npads = pads_j1 + pads_j2 + pads_j3\nnewFootprint = header\nfor pad in pads:\n newFootprint += str(pad) + '\\n'\nnewFootprint += footer.strip()\nprint(newFootprint)\nf = open(filename, 'w')\nf.write(newFootprint)\nf.close()\n",
"step-5": "#!/usr/bin/python\n\n# Point of origin (connector J3, pad 1, net 3V3)\nx = 0.0\ny = 0.0\n\ndrillDiameter = 1.0\npadWidth = 1.6\n\n\nfrom os.path import exists\nfrom pad import *\n\nfilename=\"iCEstick.kicad_mod\"\n\nheader = \"\"\nfooter = \"\"\n\nif exists(filename):\n # Read existing footprint\n f = open(filename)\n footprint = f.read()\n f.close()\n \n # Find the end of the header\n headerEndIndex = footprint.find(\"(pad \")\n header = footprint[:headerEndIndex]\n \n # Find the end of the pads list\n lastPadIndex = headerEndIndex\n while (footprint.find(\"(pad \", lastPadIndex) > -1):\n lastPadIndex = footprint.find(\"(pad \", lastPadIndex) + 5\n \n footerStartIndex = footprint.find(\"))\", lastPadIndex) + 2\n footer = footprint[footerStartIndex:]\n\nif header.find(\"TE-Connectivity\") < 0:\n header = \\\n\"\"\"(module iCEstick (layer F.Cu) (tedit 5BD73D6F)\n (fp_text reference REF** (at 0 -12.7) (layer F.SilkS)\n (effects (font (size 1 1) (thickness 0.15)))\n )\n (fp_text value iCEstick (at 0 25.4) (layer F.Fab)\n (effects (font (size 1 1) (thickness 0.15)))\n )\n\"\"\"\n footer = \")\"\n\n#\n# Generate pads according to schematic drawing\n#\n\ndesignators_j1 = [\"3V3\", \"GND\"] + [str(n) for n in range(112,120)]\n\ndesignators_j2 = [ \\\n [str(n) for n in range(78,82)] + [\"GND\", \"3V3\"], \\\n [\"87\", \"88\", \"90\", \"91\", \"GND\", \"3V3\"] \\\n ]\n\ndesignators_j3 = [\"3V3\", \"GND\", \"62\", \"61\", \"60\", \"56\", \"48\", \"47\", \"45\", \"44\"]\n\n#\n# J1 connector pad list\n#\npads_j1 = []\noldX = x\noldY = y\ny -= 21.81\nfor i in range(10):\n # The first pad is a rectangle, the remaining ones are circular\n if (i == 0):\n shape = Shape.RECT\n else:\n shape = Shape.CIRCLE\n \n # Create pad object\n newPad = Pad(\n designator = designators_j1[i],\n through_hole = True,\n plated = True,\n shape = shape,\n at = (x, y),\n size = (padWidth, padWidth),\n drill = drillDiameter\n )\n pads_j1 += [newPad]\n x -= 2.54\n\n#\n# J2 connector pad list\n#\npads_j2 = []\nx = oldX - 5.80\nnewY = oldY - 21.81 + 4.49 + 5*2.54\ny = newY\nfor i in range(6):\n # The first pad is a rectangle, the remaining ones are circular\n if (i == 0):\n shape = Shape.RECT\n else:\n shape = Shape.CIRCLE\n \n # Create pad object\n newPad = Pad(\n designator = designators_j2[0][i],\n through_hole = True,\n plated = True,\n shape = shape,\n at = (x, y),\n size = (padWidth, padWidth),\n drill = drillDiameter\n )\n pads_j2 += [newPad]\n y -= 2.54\n\n# Second (inner) row of pins of J2\nx -= 2.54\ny = newY\nfor i in range(6):\n # Create pad object\n newPad = Pad(\n designator = designators_j2[1][i],\n through_hole = True,\n plated = True,\n shape = Shape.CIRCLE,\n at = (x, y),\n size = (padWidth, padWidth),\n drill = drillDiameter\n )\n pads_j2 += [newPad]\n y -= 2.54\n\n#\n# J3 connector pad list\n#\npads_j3 = []\nx = oldX\ny = oldY\nfor i in range(10):\n # The first pad is a rectangle, the remaining ones are circular\n if (i == 0):\n shape = Shape.RECT\n else:\n shape = Shape.CIRCLE\n \n # Create pad object\n newPad = Pad(\n designator = designators_j3[i],\n through_hole = True,\n plated = True,\n shape = shape,\n at = (x, y),\n size = (padWidth, padWidth),\n drill = drillDiameter\n )\n pads_j1 += [newPad]\n x -= 2.54\n\n# Make a list of all pads\npads = pads_j1 + pads_j2 + pads_j3\n\n# Compose new footprint from header, pads and footer\nnewFootprint = header\nfor pad in pads:\n newFootprint += str(pad) + \"\\n\"\nnewFootprint += footer.strip()\n\n# Print generated footprint to screen\nprint(newFootprint)\n\n# Save generated footprint to file\nf = open(filename, \"w\")\nf.write(newFootprint)\nf.close()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
@gen.coroutine
def pop_promotion_key(promotion_key):
conn = yield connection()
result = yield r.table('promotion_keys').get(promotion_key).delete(
return_changes=True).run(conn)
if result['changes']:
return result['changes'][0]['old_val']
return None
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@gen.coroutine
def get_promotion_keys():
conn = yield connection()
result = yield r.table('promotion_keys').run(conn)
result = yield dump_cursor(result)
return result
@gen.coroutine
def pop_promotion_key(promotion_key):
conn = yield connection()
result = yield r.table('promotion_keys').get(promotion_key).delete(
return_changes=True).run(conn)
if result['changes']:
return result['changes'][0]['old_val']
return None
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@gen.coroutine
def get_promotion_keys():
conn = yield connection()
result = yield r.table('promotion_keys').run(conn)
result = yield dump_cursor(result)
return result
@gen.coroutine
def pop_promotion_key(promotion_key):
conn = yield connection()
result = yield r.table('promotion_keys').get(promotion_key).delete(
return_changes=True).run(conn)
if result['changes']:
return result['changes'][0]['old_val']
return None
@gen.coroutine
def create_promotion_key(showtime_id):
conn = yield connection()
data = {'showtime_id': showtime_id}
result = yield r.table('promotion_keys').insert(data).run(conn)
promotion_key = result['generated_keys'][0]
return promotion_key
<|reserved_special_token_1|>
from tornado import gen
import rethinkdb as r
from .connection import connection
from .utils import dump_cursor
@gen.coroutine
def get_promotion_keys():
conn = yield connection()
result = yield r.table('promotion_keys').run(conn)
result = yield dump_cursor(result)
return result
@gen.coroutine
def pop_promotion_key(promotion_key):
conn = yield connection()
result = yield r.table('promotion_keys').get(promotion_key).delete(
return_changes=True).run(conn)
if result['changes']:
return result['changes'][0]['old_val']
return None
@gen.coroutine
def create_promotion_key(showtime_id):
conn = yield connection()
data = {'showtime_id': showtime_id}
result = yield r.table('promotion_keys').insert(data).run(conn)
promotion_key = result['generated_keys'][0]
return promotion_key
<|reserved_special_token_1|>
from tornado import gen
import rethinkdb as r
from .connection import connection
from .utils import dump_cursor
@gen.coroutine
def get_promotion_keys():
conn = yield connection()
result = yield r.table('promotion_keys').run(conn)
result = yield dump_cursor(result)
return result
@gen.coroutine
def pop_promotion_key(promotion_key):
conn = yield connection()
result = yield r.table('promotion_keys').\
get(promotion_key).delete(return_changes=True).run(conn)
if result['changes']:
return result['changes'][0]['old_val']
return None
@gen.coroutine
def create_promotion_key(showtime_id):
conn = yield connection()
data = {
'showtime_id': showtime_id
}
result = yield r.table('promotion_keys').insert(data).run(conn)
promotion_key = result['generated_keys'][0]
return promotion_key
|
flexible
|
{
"blob_id": "66cdfdfa797c9991e5cb169c4b94a1e7041ca458",
"index": 4772,
"step-1": "<mask token>\n\n\n@gen.coroutine\ndef pop_promotion_key(promotion_key):\n conn = yield connection()\n result = yield r.table('promotion_keys').get(promotion_key).delete(\n return_changes=True).run(conn)\n if result['changes']:\n return result['changes'][0]['old_val']\n return None\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@gen.coroutine\ndef get_promotion_keys():\n conn = yield connection()\n result = yield r.table('promotion_keys').run(conn)\n result = yield dump_cursor(result)\n return result\n\n\n@gen.coroutine\ndef pop_promotion_key(promotion_key):\n conn = yield connection()\n result = yield r.table('promotion_keys').get(promotion_key).delete(\n return_changes=True).run(conn)\n if result['changes']:\n return result['changes'][0]['old_val']\n return None\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\n@gen.coroutine\ndef get_promotion_keys():\n conn = yield connection()\n result = yield r.table('promotion_keys').run(conn)\n result = yield dump_cursor(result)\n return result\n\n\n@gen.coroutine\ndef pop_promotion_key(promotion_key):\n conn = yield connection()\n result = yield r.table('promotion_keys').get(promotion_key).delete(\n return_changes=True).run(conn)\n if result['changes']:\n return result['changes'][0]['old_val']\n return None\n\n\n@gen.coroutine\ndef create_promotion_key(showtime_id):\n conn = yield connection()\n data = {'showtime_id': showtime_id}\n result = yield r.table('promotion_keys').insert(data).run(conn)\n promotion_key = result['generated_keys'][0]\n return promotion_key\n",
"step-4": "from tornado import gen\nimport rethinkdb as r\nfrom .connection import connection\nfrom .utils import dump_cursor\n\n\n@gen.coroutine\ndef get_promotion_keys():\n conn = yield connection()\n result = yield r.table('promotion_keys').run(conn)\n result = yield dump_cursor(result)\n return result\n\n\n@gen.coroutine\ndef pop_promotion_key(promotion_key):\n conn = yield connection()\n result = yield r.table('promotion_keys').get(promotion_key).delete(\n return_changes=True).run(conn)\n if result['changes']:\n return result['changes'][0]['old_val']\n return None\n\n\n@gen.coroutine\ndef create_promotion_key(showtime_id):\n conn = yield connection()\n data = {'showtime_id': showtime_id}\n result = yield r.table('promotion_keys').insert(data).run(conn)\n promotion_key = result['generated_keys'][0]\n return promotion_key\n",
"step-5": "from tornado import gen\nimport rethinkdb as r\n\nfrom .connection import connection\nfrom .utils import dump_cursor\n\n\n@gen.coroutine\ndef get_promotion_keys():\n conn = yield connection()\n result = yield r.table('promotion_keys').run(conn)\n result = yield dump_cursor(result)\n return result\n\n\n@gen.coroutine\ndef pop_promotion_key(promotion_key):\n conn = yield connection()\n result = yield r.table('promotion_keys').\\\n get(promotion_key).delete(return_changes=True).run(conn)\n if result['changes']:\n return result['changes'][0]['old_val']\n return None\n\n\n@gen.coroutine\ndef create_promotion_key(showtime_id):\n conn = yield connection()\n data = {\n 'showtime_id': showtime_id\n }\n result = yield r.table('promotion_keys').insert(data).run(conn)\n promotion_key = result['generated_keys'][0]\n return promotion_key\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
#https://codeforces.com/problemset/problem/1321/A
n=int(input())
r=list(map(int,input().split()))
b=list(map(int,input().split()))
l=[0]*n
x=0
y=0
for i in range(n):
if r[i]-b[i]==1:
x+=1
elif r[i]-b[i]==-1:
y+=1
if x==0:
print(-1)
else:
print(y//x+min(y%x+1,1))
|
normal
|
{
"blob_id": "7aa6bba8483082354a94ed5c465e59a0fc97fe23",
"index": 1248,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in range(n):\n if r[i] - b[i] == 1:\n x += 1\n elif r[i] - b[i] == -1:\n y += 1\nif x == 0:\n print(-1)\nelse:\n print(y // x + min(y % x + 1, 1))\n",
"step-3": "n = int(input())\nr = list(map(int, input().split()))\nb = list(map(int, input().split()))\nl = [0] * n\nx = 0\ny = 0\nfor i in range(n):\n if r[i] - b[i] == 1:\n x += 1\n elif r[i] - b[i] == -1:\n y += 1\nif x == 0:\n print(-1)\nelse:\n print(y // x + min(y % x + 1, 1))\n",
"step-4": "#https://codeforces.com/problemset/problem/1321/A\n\nn=int(input())\nr=list(map(int,input().split()))\nb=list(map(int,input().split()))\nl=[0]*n\nx=0\ny=0\nfor i in range(n):\n if r[i]-b[i]==1:\n x+=1\n elif r[i]-b[i]==-1:\n y+=1\nif x==0:\n print(-1)\nelse:\n print(y//x+min(y%x+1,1))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import requests
import json
import datetime
from bs4 import BeautifulSoup
from pymongo import MongoClient, UpdateOne
import sys
#usage: python freesound_crawler.py [from_page] [to_page]
SOUND_URL = "https://freesound.org/apiv2/sounds/"
SEARCH_URL = "https://freesound.org/apiv2/search/text/"
AUTORIZE_URL = "https://freesound.org/apiv2/oauth2/authorize"
#freesound account imformation
from freesound_account_info import *
#mongo db imformation
from mongodb_info import *
error = []
MAX_PAGE = 24086
#connect to mongodb, return None if connection failure
def getDB():
try:
client = MongoClient('mongodb://%s:%s@%s:%s/edudata' % (MONGO_USER, MONGO_PASSWORD, MONGO_HOST, MONGO_PORT))
client.server_info()
db = client.edudata
return db.freesound
except Exception as e:
print "Unexpected error:", e
return None
#send request with access token
def sendRequest(url, token):
try:
header = {'Authorization' : "Bearer " + token};
res = requests.get(url, headers = header);
return json.loads( res.text )
except Exception as e:
print "Failed to send request(" , url, "):", e
error.append({'url':url, 'type':'send request'})
return None
def getMaxPage(token):
data = sendRequest(SEARCH_URL,token)
try:
return data['count']/ 15 + 1
except:
print ("Failed to update max page")
return MAX_PAGE
#get sound info with access token
def getSoundInfo( sound_id, token ):
try:
data = {}
sound_data = sendRequest(SOUND_URL + str(sound_id), token)
if sound_data is None:
raise Exception('json is none')
data['_id'] = sound_data[ 'id' ];
data['url'] = sound_data[ 'url' ];
data['title'] = sound_data[ 'name' ];
data['creator'] = sound_data[ 'username' ];
data['createdate'] = sound_data[ 'created' ];
data['description'] = sound_data[ 'description' ];
data['download_url'] = sound_data['download']
data['keyword'] = []
for tag in sound_data[ 'tags' ]:
data['keyword'].append(tag)
data['previews'] = []
for i in sound_data['previews'].keys():
data['previews'].append({i:sound_data['previews'][i]})
data['type'] = sound_data[ 'type' ];
data['bitrate'] = sound_data[ 'bitrate' ];
data['channels'] = sound_data[ 'channels' ];
data['downlaod'] = sound_data[ 'num_downloads' ];
data['license'] = sound_data[ 'license' ];
data['filesize'] = sound_data[ 'filesize' ];
return data;
except Exception as e:
print "Error occurs while getting sound info", sound_id, ": ", sys.exc_info()
print sound_data
return None
#execute queries
def insertDB( db, query):
if query is not None:
result = db.bulk_write(query, ordered = False)
print result.bulk_api_result
def crawling(token, db, page=1, page_to = MAX_PAGE):
header = {'Authorization' : "Bearer " + token};
print "From page", page, "to page", page_to
for i in range(page, page_to + 1):
if i > MAX_PAGE:
print "Meet max page", MAX_PAGE
break;
url = SEARCH_URL + "?page=" + str(i)
list_data = sendRequest(url, token)
try:
update_queries = []
for d in list_data['results']:
data = getSoundInfo( d['id'], token);
if data is None:
error.append({'id': d['id']});
continue
print data
cuurent_time = datetime.datetime.utcnow();
data['update_at'] = cuurent_time
update_queries.append(UpdateOne({'_id':data['_id']}, {'$set': data, '$setOnInsert':{'created_at':cuurent_time}},True))
if db is not None:
insertDB(db, update_queries)
print "Page", i, "is Done"
except Exception as e:
print "Error in page", i, ":", e
error.append({'Exception':e, 'type':'parse data', 'data':list_data})
print list_data
page += 1
if __name__ == '__main__':
db = getDB();
if db is None:
print "No db connected"
exit()
ACCESS_TOKEN = getAccessToken();
if ACCESS_TOKEN is None:
print "Can't get access token"
exit()
MAX_PAGE = getMaxPage(ACCESS_TOKEN)
from_page = 1
to_page = MAX_PAGE
if len(sys.argv) > 1:
from_page = int(sys.argv[1])
if len(sys.argv) > 2:
to_page = int(sys.argv[2])
crawling(ACCESS_TOKEN, db, from_page, to_page)
print "Error log: ",error
|
normal
|
{
"blob_id": "2294dc21ede759e755e51471705fa8ef784528a7",
"index": 8707,
"step-1": "import requests\nimport json\nimport datetime\nfrom bs4 import BeautifulSoup\nfrom pymongo import MongoClient, UpdateOne\nimport sys\n\n#usage: python freesound_crawler.py [from_page] [to_page]\n\nSOUND_URL = \"https://freesound.org/apiv2/sounds/\"\nSEARCH_URL = \"https://freesound.org/apiv2/search/text/\"\nAUTORIZE_URL = \"https://freesound.org/apiv2/oauth2/authorize\"\n\n#freesound account imformation\nfrom freesound_account_info import * \n\n#mongo db imformation\nfrom mongodb_info import * \n\nerror = []\nMAX_PAGE = 24086\n\n#connect to mongodb, return None if connection failure\ndef getDB():\n\ttry:\n\t\tclient = MongoClient('mongodb://%s:%s@%s:%s/edudata' % (MONGO_USER, MONGO_PASSWORD, MONGO_HOST, MONGO_PORT))\n\t\tclient.server_info()\n\t\tdb = client.edudata\n\t\treturn db.freesound\n\texcept Exception as e:\n\t\tprint \"Unexpected error:\", e\n\t\treturn None\n\n\n\n#send request with access token\ndef sendRequest(url, token):\n\ttry:\n\t\theader = {'Authorization' : \"Bearer \" + token};\n\t\tres = requests.get(url, headers = header);\n\t\treturn json.loads( res.text )\n\texcept Exception as e:\n\t\tprint \"Failed to send request(\" , url, \"):\", e\n\t\terror.append({'url':url, 'type':'send request'})\n\t\treturn None\n\ndef getMaxPage(token):\n\tdata = sendRequest(SEARCH_URL,token)\n\ttry:\n\t\treturn data['count']/ 15 + 1\n\texcept:\n\t\tprint (\"Failed to update max page\")\n\t\treturn MAX_PAGE\n\n#get sound info with access token\ndef getSoundInfo( sound_id, token ):\n\ttry:\n\t\tdata = {}\n\t\tsound_data = sendRequest(SOUND_URL + str(sound_id), token)\n\t\tif sound_data is None:\n\t\t\traise Exception('json is none')\n\t\tdata['_id'] = sound_data[ 'id' ];\n\t\tdata['url'] = sound_data[ 'url' ];\n\t\tdata['title'] = sound_data[ 'name' ];\n\t\tdata['creator'] = sound_data[ 'username' ];\n\t\tdata['createdate'] = sound_data[ 'created' ];\n\t\tdata['description'] = sound_data[ 'description' ];\n\t\tdata['download_url'] = sound_data['download']\n\n\t\tdata['keyword'] = []\n\t\tfor tag in sound_data[ 'tags' ]:\n\t\t\tdata['keyword'].append(tag)\n\t\tdata['previews'] = []\n\t\tfor i in sound_data['previews'].keys():\n\t\t\tdata['previews'].append({i:sound_data['previews'][i]})\n\t\t\t\n\t\tdata['type'] = sound_data[ 'type' ];\n\t\tdata['bitrate'] = sound_data[ 'bitrate' ];\n\t\tdata['channels'] = sound_data[ 'channels' ];\n\t\tdata['downlaod'] = sound_data[ 'num_downloads' ];\n\t\tdata['license'] = sound_data[ 'license' ];\n\t\tdata['filesize'] = sound_data[ 'filesize' ];\n\t\treturn data;\n\texcept Exception as e:\n\t\tprint \"Error occurs while getting sound info\", sound_id, \": \", sys.exc_info()\n\t\tprint sound_data\n\t\treturn None\n\n#execute queries \ndef insertDB( db, query):\n\tif query is not None:\n\t\tresult = db.bulk_write(query, ordered = False)\n\t\tprint result.bulk_api_result\n\n\ndef crawling(token, db, page=1, page_to = MAX_PAGE):\n\theader = {'Authorization' : \"Bearer \" + token};\n\n\tprint \"From page\", page, \"to page\", page_to\n\tfor i in range(page, page_to + 1):\n\t\tif i > MAX_PAGE:\n\t\t\tprint \"Meet max page\", MAX_PAGE\n\t\t\tbreak;\n\t\turl = SEARCH_URL + \"?page=\" + str(i)\n\t\tlist_data = sendRequest(url, token)\n\t\t\n\t\ttry:\n\t\t\tupdate_queries = []\n\t\t\tfor d in list_data['results']:\n\t\t\t\tdata = getSoundInfo( d['id'], token);\n\t\t\t\tif data is None:\n\t\t\t\t\terror.append({'id': d['id']});\n\t\t\t\t\tcontinue\n\t\t\t\tprint data\n\t\t\t\tcuurent_time = datetime.datetime.utcnow();\n\t\t\t\tdata['update_at'] = cuurent_time\n\t\t\t\tupdate_queries.append(UpdateOne({'_id':data['_id']}, {'$set': data, '$setOnInsert':{'created_at':cuurent_time}},True))\n\t\t\tif db is not None:\n\t\t\t\tinsertDB(db, update_queries)\n\n\t\t\tprint \"Page\", i, \"is Done\"\n\t\texcept Exception as e:\n\t\t\tprint \"Error in page\", i, \":\", e\n\t\t\terror.append({'Exception':e, 'type':'parse data', 'data':list_data})\n\t\t\tprint list_data\n\t\tpage += 1\n\n\nif __name__ == '__main__':\n\tdb = getDB();\n\tif db is None:\n\t\tprint \"No db connected\"\n\t\texit()\n\tACCESS_TOKEN = getAccessToken();\n\tif ACCESS_TOKEN is None:\n\t\tprint \"Can't get access token\"\n\t\texit()\n\n\tMAX_PAGE = getMaxPage(ACCESS_TOKEN)\n\tfrom_page = 1\n\tto_page = MAX_PAGE\n\tif len(sys.argv) > 1:\n\t\tfrom_page = int(sys.argv[1])\n\tif len(sys.argv) > 2:\n\t\tto_page = int(sys.argv[2])\n\tcrawling(ACCESS_TOKEN, db, from_page, to_page)\n\tprint \"Error log: \",error",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
dependencies = [('interface', '0010_auto_20191104_2107')]
operations = [migrations.AlterField(model_name='submission', name=
'review_score', field=models.DecimalField(decimal_places=2,
editable=False, max_digits=5, null=True)), migrations.AlterField(
model_name='submission', name='total_score', field=models.
DecimalField(decimal_places=2, editable=False, max_digits=5, null=
True))]
<|reserved_special_token_1|>
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [('interface', '0010_auto_20191104_2107')]
operations = [migrations.AlterField(model_name='submission', name=
'review_score', field=models.DecimalField(decimal_places=2,
editable=False, max_digits=5, null=True)), migrations.AlterField(
model_name='submission', name='total_score', field=models.
DecimalField(decimal_places=2, editable=False, max_digits=5, null=
True))]
<|reserved_special_token_1|>
# Generated by Django 2.2.6 on 2019-11-13 13:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('interface', '0010_auto_20191104_2107'),
]
operations = [
migrations.AlterField(
model_name='submission',
name='review_score',
field=models.DecimalField(decimal_places=2, editable=False, max_digits=5, null=True),
),
migrations.AlterField(
model_name='submission',
name='total_score',
field=models.DecimalField(decimal_places=2, editable=False, max_digits=5, null=True),
),
]
|
flexible
|
{
"blob_id": "3b42e218acf1c93fab3a0893efa8bf32a274eb23",
"index": 448,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('interface', '0010_auto_20191104_2107')]\n operations = [migrations.AlterField(model_name='submission', name=\n 'review_score', field=models.DecimalField(decimal_places=2,\n editable=False, max_digits=5, null=True)), migrations.AlterField(\n model_name='submission', name='total_score', field=models.\n DecimalField(decimal_places=2, editable=False, max_digits=5, null=\n True))]\n",
"step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('interface', '0010_auto_20191104_2107')]\n operations = [migrations.AlterField(model_name='submission', name=\n 'review_score', field=models.DecimalField(decimal_places=2,\n editable=False, max_digits=5, null=True)), migrations.AlterField(\n model_name='submission', name='total_score', field=models.\n DecimalField(decimal_places=2, editable=False, max_digits=5, null=\n True))]\n",
"step-5": "# Generated by Django 2.2.6 on 2019-11-13 13:11\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('interface', '0010_auto_20191104_2107'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='submission',\n name='review_score',\n field=models.DecimalField(decimal_places=2, editable=False, max_digits=5, null=True),\n ),\n migrations.AlterField(\n model_name='submission',\n name='total_score',\n field=models.DecimalField(decimal_places=2, editable=False, max_digits=5, null=True),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
pull_links(artist)
<|reserved_special_token_0|>
os.remove('./links.json')
shutil.rmtree('./songs')
<|reserved_special_token_0|>
for song in sentimentScores:
print(song + ': ')
print(sentimentScores[song])
<|reserved_special_token_1|>
<|reserved_special_token_0|>
artist = sys.argv[1].title()
pull_links(artist)
lyrics = scrape_lyrics('links.json')
os.remove('./links.json')
shutil.rmtree('./songs')
sentimentScores = getSentimentScores(lyrics)
for song in sentimentScores:
print(song + ': ')
print(sentimentScores[song])
<|reserved_special_token_1|>
from pull_links import pull_links
from scrape_lyrics import scrape_lyrics
from vader_sentiment import getSentimentScores
import sys
import os
import shutil
artist = sys.argv[1].title()
pull_links(artist)
lyrics = scrape_lyrics('links.json')
os.remove('./links.json')
shutil.rmtree('./songs')
sentimentScores = getSentimentScores(lyrics)
for song in sentimentScores:
print(song + ': ')
print(sentimentScores[song])
<|reserved_special_token_1|>
from pull_links import pull_links
from scrape_lyrics import scrape_lyrics
from vader_sentiment import getSentimentScores
import sys
import os
import shutil
# Get user input for artist -> capitalize it
artist = sys.argv[1].title()
pull_links(artist)
# Dictionary w/ song name as key and lyrics as value
lyrics = scrape_lyrics('links.json')
os.remove('./links.json')
shutil.rmtree('./songs')
# Dictionary w/ song name as key and sentiment data as value
sentimentScores = getSentimentScores(lyrics)
# Print out sentimentScores
for song in sentimentScores:
print(song + ': ')
print(sentimentScores[song])
|
flexible
|
{
"blob_id": "5055743c9ed8c92bcfab5379162f28315409ff91",
"index": 2200,
"step-1": "<mask token>\n",
"step-2": "<mask token>\npull_links(artist)\n<mask token>\nos.remove('./links.json')\nshutil.rmtree('./songs')\n<mask token>\nfor song in sentimentScores:\n print(song + ': ')\n print(sentimentScores[song])\n",
"step-3": "<mask token>\nartist = sys.argv[1].title()\npull_links(artist)\nlyrics = scrape_lyrics('links.json')\nos.remove('./links.json')\nshutil.rmtree('./songs')\nsentimentScores = getSentimentScores(lyrics)\nfor song in sentimentScores:\n print(song + ': ')\n print(sentimentScores[song])\n",
"step-4": "from pull_links import pull_links\nfrom scrape_lyrics import scrape_lyrics\nfrom vader_sentiment import getSentimentScores\nimport sys\nimport os\nimport shutil\nartist = sys.argv[1].title()\npull_links(artist)\nlyrics = scrape_lyrics('links.json')\nos.remove('./links.json')\nshutil.rmtree('./songs')\nsentimentScores = getSentimentScores(lyrics)\nfor song in sentimentScores:\n print(song + ': ')\n print(sentimentScores[song])\n",
"step-5": "from pull_links import pull_links\nfrom scrape_lyrics import scrape_lyrics\nfrom vader_sentiment import getSentimentScores\nimport sys\nimport os\nimport shutil\n\n# Get user input for artist -> capitalize it\nartist = sys.argv[1].title()\n\npull_links(artist)\n# Dictionary w/ song name as key and lyrics as value\nlyrics = scrape_lyrics('links.json')\nos.remove('./links.json')\nshutil.rmtree('./songs')\n# Dictionary w/ song name as key and sentiment data as value\nsentimentScores = getSentimentScores(lyrics)\n# Print out sentimentScores\nfor song in sentimentScores:\n print(song + ': ')\n print(sentimentScores[song])\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import json
from bottle import request, response, route, get, run, default_app
app = application = default_app()
@route('/candidate/hired', method=['POST'])
def update_delete_handler():
response.content_type = 'application/json'
return json.dumps({"hired": True})
def main():
run(host='localhost', port=8080)
|
normal
|
{
"blob_id": "50e759ff24cdb8fbb5a98d9381afb13ebc1a74f1",
"index": 7317,
"step-1": "<mask token>\n\n\n@route('/candidate/hired', method=['POST'])\ndef update_delete_handler():\n response.content_type = 'application/json'\n return json.dumps({'hired': True})\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@route('/candidate/hired', method=['POST'])\ndef update_delete_handler():\n response.content_type = 'application/json'\n return json.dumps({'hired': True})\n\n\ndef main():\n run(host='localhost', port=8080)\n",
"step-3": "<mask token>\napp = application = default_app()\n\n\n@route('/candidate/hired', method=['POST'])\ndef update_delete_handler():\n response.content_type = 'application/json'\n return json.dumps({'hired': True})\n\n\ndef main():\n run(host='localhost', port=8080)\n",
"step-4": "import json\nfrom bottle import request, response, route, get, run, default_app\napp = application = default_app()\n\n\n@route('/candidate/hired', method=['POST'])\ndef update_delete_handler():\n response.content_type = 'application/json'\n return json.dumps({'hired': True})\n\n\ndef main():\n run(host='localhost', port=8080)\n",
"step-5": "import json\n\nfrom bottle import request, response, route, get, run, default_app\n\n\napp = application = default_app()\n\n\n@route('/candidate/hired', method=['POST'])\ndef update_delete_handler():\n response.content_type = 'application/json'\n return json.dumps({\"hired\": True})\n\ndef main():\n run(host='localhost', port=8080)\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
def group_by_owners(files):
print(files, type(files))
for k, v in files.items():
print(k, v)
for f in files:
print(f[0])
for g in v:
print(g)
_files = sorted(files.items(), key=operator.itemgetter(1), reverse=False)
print('Sorted: ', _files, type(_files))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def group_by_owners(files):
print(files, type(files))
for k, v in files.items():
print(k, v)
for f in files:
print(f[0])
for g in v:
print(g)
_files = sorted(files.items(), key=operator.itemgetter(1), reverse=False)
print('Sorted: ', _files, type(_files))
<|reserved_special_token_0|>
print(group_by_owners(files))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def group_by_owners(files):
print(files, type(files))
for k, v in files.items():
print(k, v)
for f in files:
print(f[0])
for g in v:
print(g)
_files = sorted(files.items(), key=operator.itemgetter(1), reverse=False)
print('Sorted: ', _files, type(_files))
files = {'Input.txt': 'Randy', 'Code.py': 'Stan', 'Output.txt': 'Randy'}
print(group_by_owners(files))
<|reserved_special_token_1|>
import operator
def group_by_owners(files):
print(files, type(files))
for k, v in files.items():
print(k, v)
for f in files:
print(f[0])
for g in v:
print(g)
_files = sorted(files.items(), key=operator.itemgetter(1), reverse=False)
print('Sorted: ', _files, type(_files))
files = {'Input.txt': 'Randy', 'Code.py': 'Stan', 'Output.txt': 'Randy'}
print(group_by_owners(files))
<|reserved_special_token_1|>
import operator
def group_by_owners(files):
print(files, type(files))
for k, v in files.items():
# for v in k:
print(k, v)
# if k[v] == k[v]:
# print("same", v)
for f in files:
print(f[0])
for g in v:
print(g)
_files = sorted(files.items(), key=operator.itemgetter(1), reverse=False)
print("Sorted: ", _files, type(_files))
# files = files.items()
# print(files, type(files))
# return None
files = {
'Input.txt': 'Randy',
'Code.py': 'Stan',
'Output.txt': 'Randy'
}
print(group_by_owners(files))
|
flexible
|
{
"blob_id": "4843239a41fe1ecff6c8c3a97aceef76a3785647",
"index": 7334,
"step-1": "<mask token>\n\n\ndef group_by_owners(files):\n print(files, type(files))\n for k, v in files.items():\n print(k, v)\n for f in files:\n print(f[0])\n for g in v:\n print(g)\n _files = sorted(files.items(), key=operator.itemgetter(1), reverse=False)\n print('Sorted: ', _files, type(_files))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef group_by_owners(files):\n print(files, type(files))\n for k, v in files.items():\n print(k, v)\n for f in files:\n print(f[0])\n for g in v:\n print(g)\n _files = sorted(files.items(), key=operator.itemgetter(1), reverse=False)\n print('Sorted: ', _files, type(_files))\n\n\n<mask token>\nprint(group_by_owners(files))\n",
"step-3": "<mask token>\n\n\ndef group_by_owners(files):\n print(files, type(files))\n for k, v in files.items():\n print(k, v)\n for f in files:\n print(f[0])\n for g in v:\n print(g)\n _files = sorted(files.items(), key=operator.itemgetter(1), reverse=False)\n print('Sorted: ', _files, type(_files))\n\n\nfiles = {'Input.txt': 'Randy', 'Code.py': 'Stan', 'Output.txt': 'Randy'}\nprint(group_by_owners(files))\n",
"step-4": "import operator\n\n\ndef group_by_owners(files):\n print(files, type(files))\n for k, v in files.items():\n print(k, v)\n for f in files:\n print(f[0])\n for g in v:\n print(g)\n _files = sorted(files.items(), key=operator.itemgetter(1), reverse=False)\n print('Sorted: ', _files, type(_files))\n\n\nfiles = {'Input.txt': 'Randy', 'Code.py': 'Stan', 'Output.txt': 'Randy'}\nprint(group_by_owners(files))\n",
"step-5": "import operator\n\n\ndef group_by_owners(files):\n print(files, type(files))\n for k, v in files.items():\n # for v in k:\n print(k, v)\n # if k[v] == k[v]:\n # print(\"same\", v)\n for f in files:\n print(f[0])\n for g in v:\n print(g)\n _files = sorted(files.items(), key=operator.itemgetter(1), reverse=False)\n print(\"Sorted: \", _files, type(_files))\n\n # files = files.items()\n # print(files, type(files))\n\n # return None\n\n\nfiles = {\n 'Input.txt': 'Randy',\n 'Code.py': 'Stan',\n 'Output.txt': 'Randy'\n}\nprint(group_by_owners(files))\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
plt.imshow(img_var, cmap='gray')
<|reserved_special_token_0|>
plt.imshow(filtered_image, cmap='gray')
<|reserved_special_token_0|>
plt.imshow(entropy_img)
plt.hist(entropy_img.flat, bins=100, range=(0, 7))
<|reserved_special_token_0|>
plt.imshow(binary)
<|reserved_special_token_0|>
print('Scratched area is: ', scratch_area, 'Square pixels')
<|reserved_special_token_0|>
print('Scratched area in sq. microns is: ', scratch_area * scale ** 2,
'Square pixels')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
img = io.imread('texture.png', as_gray=True)
<|reserved_special_token_0|>
k = 7
img_mean = ndimage.uniform_filter(img, (k, k))
img_sqr_mean = ndimage.uniform_filter(img ** 2, (k, k))
img_var = img_sqr_mean - img_mean ** 2
plt.imshow(img_var, cmap='gray')
ksize = 45
theta = np.pi / 2
kernel = cv2.getGaborKernel((ksize, ksize), 5.0, theta, 10.0, 0.9, 0, ktype
=cv2.CV_32F)
filtered_image = cv2.filter2D(img, cv2.CV_8UC3, kernel)
plt.imshow(filtered_image, cmap='gray')
<|reserved_special_token_0|>
entropy_img = entropy(img, disk(15))
plt.imshow(entropy_img)
plt.hist(entropy_img.flat, bins=100, range=(0, 7))
thresh = threshold_otsu(entropy_img)
binary = entropy_img <= thresh
plt.imshow(binary)
scratch_area = np.sum(binary == 1)
print('Scratched area is: ', scratch_area, 'Square pixels')
scale = 0.45
print('Scratched area in sq. microns is: ', scratch_area * scale ** 2,
'Square pixels')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import matplotlib.pyplot as plt
from skimage import io
import numpy as np
from skimage.filters import threshold_otsu
import cv2
img = io.imread('texture.png', as_gray=True)
from scipy import ndimage
k = 7
img_mean = ndimage.uniform_filter(img, (k, k))
img_sqr_mean = ndimage.uniform_filter(img ** 2, (k, k))
img_var = img_sqr_mean - img_mean ** 2
plt.imshow(img_var, cmap='gray')
ksize = 45
theta = np.pi / 2
kernel = cv2.getGaborKernel((ksize, ksize), 5.0, theta, 10.0, 0.9, 0, ktype
=cv2.CV_32F)
filtered_image = cv2.filter2D(img, cv2.CV_8UC3, kernel)
plt.imshow(filtered_image, cmap='gray')
from skimage.filters.rank import entropy
from skimage.morphology import disk
entropy_img = entropy(img, disk(15))
plt.imshow(entropy_img)
plt.hist(entropy_img.flat, bins=100, range=(0, 7))
thresh = threshold_otsu(entropy_img)
binary = entropy_img <= thresh
plt.imshow(binary)
scratch_area = np.sum(binary == 1)
print('Scratched area is: ', scratch_area, 'Square pixels')
scale = 0.45
print('Scratched area in sq. microns is: ', scratch_area * scale ** 2,
'Square pixels')
<|reserved_special_token_1|>
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Sep 14 09:54:28 2020
@author: rushirajsinhparmar
"""
import matplotlib.pyplot as plt
from skimage import io
import numpy as np
from skimage.filters import threshold_otsu
import cv2
img = io.imread("texture.png", as_gray=True)
##################################################
#Variance - not a great way to quantify texture
from scipy import ndimage
k=7
img_mean = ndimage.uniform_filter(img, (k, k))
img_sqr_mean = ndimage.uniform_filter(img**2, (k, k))
img_var = img_sqr_mean - img_mean**2
plt.imshow(img_var, cmap='gray')
#######################################################
#GABOR - A great filter for texture but usually efficient
#if we know exact parameters. Good choice for generating features
#for machine learning
ksize=45
theta=np.pi/2
kernel = cv2.getGaborKernel((ksize, ksize), 5.0, theta, 10.0, 0.9, 0, ktype=cv2.CV_32F)
filtered_image = cv2.filter2D(img, cv2.CV_8UC3, kernel)
plt.imshow(filtered_image, cmap='gray')
###########################################################
#Entropy
#Entropy quantifies disorder.
#Since cell region has high variation in pixel values the entropy would be
#higher compared to scratch region
from skimage.filters.rank import entropy
from skimage.morphology import disk
entropy_img = entropy(img, disk(15))
plt.imshow(entropy_img)
#use otsu to threshold high vs low entropy regions.
plt.hist(entropy_img.flat, bins=100, range=(0,7)) #.flat returns the flattened numpy array (1D)
thresh = threshold_otsu(entropy_img)
#binarize the entropy image
binary = entropy_img <= thresh
plt.imshow(binary)
#Sum all pixels in the scratch region (values =1)
scratch_area = np.sum(binary == 1)
print("Scratched area is: ", scratch_area, "Square pixels")
scale = 0.45 # microns/pixel
print("Scratched area in sq. microns is: ", scratch_area*((scale)**2), "Square pixels")
|
flexible
|
{
"blob_id": "ab6c3d3c6faa2d1fe5e064dbdebd8904b9434f15",
"index": 5214,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nplt.imshow(img_var, cmap='gray')\n<mask token>\nplt.imshow(filtered_image, cmap='gray')\n<mask token>\nplt.imshow(entropy_img)\nplt.hist(entropy_img.flat, bins=100, range=(0, 7))\n<mask token>\nplt.imshow(binary)\n<mask token>\nprint('Scratched area is: ', scratch_area, 'Square pixels')\n<mask token>\nprint('Scratched area in sq. microns is: ', scratch_area * scale ** 2,\n 'Square pixels')\n",
"step-3": "<mask token>\nimg = io.imread('texture.png', as_gray=True)\n<mask token>\nk = 7\nimg_mean = ndimage.uniform_filter(img, (k, k))\nimg_sqr_mean = ndimage.uniform_filter(img ** 2, (k, k))\nimg_var = img_sqr_mean - img_mean ** 2\nplt.imshow(img_var, cmap='gray')\nksize = 45\ntheta = np.pi / 2\nkernel = cv2.getGaborKernel((ksize, ksize), 5.0, theta, 10.0, 0.9, 0, ktype\n =cv2.CV_32F)\nfiltered_image = cv2.filter2D(img, cv2.CV_8UC3, kernel)\nplt.imshow(filtered_image, cmap='gray')\n<mask token>\nentropy_img = entropy(img, disk(15))\nplt.imshow(entropy_img)\nplt.hist(entropy_img.flat, bins=100, range=(0, 7))\nthresh = threshold_otsu(entropy_img)\nbinary = entropy_img <= thresh\nplt.imshow(binary)\nscratch_area = np.sum(binary == 1)\nprint('Scratched area is: ', scratch_area, 'Square pixels')\nscale = 0.45\nprint('Scratched area in sq. microns is: ', scratch_area * scale ** 2,\n 'Square pixels')\n",
"step-4": "<mask token>\nimport matplotlib.pyplot as plt\nfrom skimage import io\nimport numpy as np\nfrom skimage.filters import threshold_otsu\nimport cv2\nimg = io.imread('texture.png', as_gray=True)\nfrom scipy import ndimage\nk = 7\nimg_mean = ndimage.uniform_filter(img, (k, k))\nimg_sqr_mean = ndimage.uniform_filter(img ** 2, (k, k))\nimg_var = img_sqr_mean - img_mean ** 2\nplt.imshow(img_var, cmap='gray')\nksize = 45\ntheta = np.pi / 2\nkernel = cv2.getGaborKernel((ksize, ksize), 5.0, theta, 10.0, 0.9, 0, ktype\n =cv2.CV_32F)\nfiltered_image = cv2.filter2D(img, cv2.CV_8UC3, kernel)\nplt.imshow(filtered_image, cmap='gray')\nfrom skimage.filters.rank import entropy\nfrom skimage.morphology import disk\nentropy_img = entropy(img, disk(15))\nplt.imshow(entropy_img)\nplt.hist(entropy_img.flat, bins=100, range=(0, 7))\nthresh = threshold_otsu(entropy_img)\nbinary = entropy_img <= thresh\nplt.imshow(binary)\nscratch_area = np.sum(binary == 1)\nprint('Scratched area is: ', scratch_area, 'Square pixels')\nscale = 0.45\nprint('Scratched area in sq. microns is: ', scratch_area * scale ** 2,\n 'Square pixels')\n",
"step-5": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Mon Sep 14 09:54:28 2020\n\n@author: rushirajsinhparmar\n\"\"\"\n\nimport matplotlib.pyplot as plt\nfrom skimage import io\n\nimport numpy as np\nfrom skimage.filters import threshold_otsu\nimport cv2\n\nimg = io.imread(\"texture.png\", as_gray=True)\n\n##################################################\n#Variance - not a great way to quantify texture\nfrom scipy import ndimage \nk=7\nimg_mean = ndimage.uniform_filter(img, (k, k))\nimg_sqr_mean = ndimage.uniform_filter(img**2, (k, k))\nimg_var = img_sqr_mean - img_mean**2\nplt.imshow(img_var, cmap='gray')\n\n#######################################################\n#GABOR - A great filter for texture but usually efficient\n#if we know exact parameters. Good choice for generating features\n#for machine learning\n\nksize=45\ntheta=np.pi/2\nkernel = cv2.getGaborKernel((ksize, ksize), 5.0, theta, 10.0, 0.9, 0, ktype=cv2.CV_32F)\nfiltered_image = cv2.filter2D(img, cv2.CV_8UC3, kernel)\nplt.imshow(filtered_image, cmap='gray')\n\n###########################################################\n#Entropy\n#Entropy quantifies disorder.\n#Since cell region has high variation in pixel values the entropy would be\n#higher compared to scratch region\nfrom skimage.filters.rank import entropy\nfrom skimage.morphology import disk\nentropy_img = entropy(img, disk(15))\nplt.imshow(entropy_img) \n\n#use otsu to threshold high vs low entropy regions.\nplt.hist(entropy_img.flat, bins=100, range=(0,7)) #.flat returns the flattened numpy array (1D)\n\nthresh = threshold_otsu(entropy_img) \n\n#binarize the entropy image \nbinary = entropy_img <= thresh\nplt.imshow(binary)\n\n#Sum all pixels in the scratch region (values =1)\nscratch_area = np.sum(binary == 1)\nprint(\"Scratched area is: \", scratch_area, \"Square pixels\")\n\nscale = 0.45 # microns/pixel\nprint(\"Scratched area in sq. microns is: \", scratch_area*((scale)**2), \"Square pixels\")",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
cursor.execute(sql)
db.close()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
db = pymysql.connect('localhost', 'root', '', 'order_db', use_unicode=True,
charset='utf8')
cursor = db.cursor()
sql = 'DROP TABLE custdetail'
cursor.execute(sql)
db.close()
<|reserved_special_token_1|>
import pymysql
db = pymysql.connect('localhost', 'root', '', 'order_db', use_unicode=True,
charset='utf8')
cursor = db.cursor()
sql = 'DROP TABLE custdetail'
cursor.execute(sql)
db.close()
<|reserved_special_token_1|>
import pymysql
db = pymysql.connect( "localhost", "root", "", "order_db",
use_unicode=True, charset="utf8")
cursor = db.cursor()
sql = "DROP TABLE custdetail"
cursor.execute(sql)
db.close()
|
flexible
|
{
"blob_id": "1aa2bff245322a34438cc836e23f430926dfac6c",
"index": 3414,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ncursor.execute(sql)\ndb.close()\n",
"step-3": "<mask token>\ndb = pymysql.connect('localhost', 'root', '', 'order_db', use_unicode=True,\n charset='utf8')\ncursor = db.cursor()\nsql = 'DROP TABLE custdetail'\ncursor.execute(sql)\ndb.close()\n",
"step-4": "import pymysql\ndb = pymysql.connect('localhost', 'root', '', 'order_db', use_unicode=True,\n charset='utf8')\ncursor = db.cursor()\nsql = 'DROP TABLE custdetail'\ncursor.execute(sql)\ndb.close()\n",
"step-5": "import pymysql\ndb = pymysql.connect( \"localhost\", \"root\", \"\", \"order_db\",\n use_unicode=True, charset=\"utf8\") \ncursor = db.cursor()\nsql = \"DROP TABLE custdetail\"\ncursor.execute(sql)\ndb.close()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import unittest
import ConvertListToDict as cldf
class MyDictTestCase(unittest.TestCase):
def test_Dict(self):
# Testcase1 (len(keys) == len(values))
actualDict1 = cldf.ConvertListsToDict([1, 2, 3],['a','b','c'])
expectedDict1 = {1: 'a', 2: 'b', 3: 'c'}
self.assertEqual(actualDict1, expectedDict1)
# Testcase2 (len(keys) < len(values))
actualDict2 = cldf.ConvertListsToDict([1, 2, 3], ['a', 'b', 'c','d','e','f'])
expectedDict2 = {1: 'a', 2: 'b', 3: 'c'}
self.assertEqual(actualDict2, expectedDict2)
# Testcase3 (len(keys) > len(values))
actualDict2 = cldf.ConvertListsToDict([1, 2, 3, 4, 5, 6, 7], ['a', 'b', 'c'])
expectedDict2 = {1: 'a', 2: 'b', 3: 'c', 4: None, 5: None, 6: None, 7: None}
self.assertEqual(actualDict2, expectedDict2)
if __name__ == '__main__':
unittest.main()
|
normal
|
{
"blob_id": "3421c3b839721694945bdbb4f17183bceaed5296",
"index": 786,
"step-1": "<mask token>\n\n\nclass MyDictTestCase(unittest.TestCase):\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass MyDictTestCase(unittest.TestCase):\n\n def test_Dict(self):\n actualDict1 = cldf.ConvertListsToDict([1, 2, 3], ['a', 'b', 'c'])\n expectedDict1 = {(1): 'a', (2): 'b', (3): 'c'}\n self.assertEqual(actualDict1, expectedDict1)\n actualDict2 = cldf.ConvertListsToDict([1, 2, 3], ['a', 'b', 'c',\n 'd', 'e', 'f'])\n expectedDict2 = {(1): 'a', (2): 'b', (3): 'c'}\n self.assertEqual(actualDict2, expectedDict2)\n actualDict2 = cldf.ConvertListsToDict([1, 2, 3, 4, 5, 6, 7], ['a',\n 'b', 'c'])\n expectedDict2 = {(1): 'a', (2): 'b', (3): 'c', (4): None, (5): None,\n (6): None, (7): None}\n self.assertEqual(actualDict2, expectedDict2)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass MyDictTestCase(unittest.TestCase):\n\n def test_Dict(self):\n actualDict1 = cldf.ConvertListsToDict([1, 2, 3], ['a', 'b', 'c'])\n expectedDict1 = {(1): 'a', (2): 'b', (3): 'c'}\n self.assertEqual(actualDict1, expectedDict1)\n actualDict2 = cldf.ConvertListsToDict([1, 2, 3], ['a', 'b', 'c',\n 'd', 'e', 'f'])\n expectedDict2 = {(1): 'a', (2): 'b', (3): 'c'}\n self.assertEqual(actualDict2, expectedDict2)\n actualDict2 = cldf.ConvertListsToDict([1, 2, 3, 4, 5, 6, 7], ['a',\n 'b', 'c'])\n expectedDict2 = {(1): 'a', (2): 'b', (3): 'c', (4): None, (5): None,\n (6): None, (7): None}\n self.assertEqual(actualDict2, expectedDict2)\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-4": "import unittest\nimport ConvertListToDict as cldf\n\n\nclass MyDictTestCase(unittest.TestCase):\n\n def test_Dict(self):\n actualDict1 = cldf.ConvertListsToDict([1, 2, 3], ['a', 'b', 'c'])\n expectedDict1 = {(1): 'a', (2): 'b', (3): 'c'}\n self.assertEqual(actualDict1, expectedDict1)\n actualDict2 = cldf.ConvertListsToDict([1, 2, 3], ['a', 'b', 'c',\n 'd', 'e', 'f'])\n expectedDict2 = {(1): 'a', (2): 'b', (3): 'c'}\n self.assertEqual(actualDict2, expectedDict2)\n actualDict2 = cldf.ConvertListsToDict([1, 2, 3, 4, 5, 6, 7], ['a',\n 'b', 'c'])\n expectedDict2 = {(1): 'a', (2): 'b', (3): 'c', (4): None, (5): None,\n (6): None, (7): None}\n self.assertEqual(actualDict2, expectedDict2)\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-5": "import unittest\nimport ConvertListToDict as cldf\n\nclass MyDictTestCase(unittest.TestCase):\n def test_Dict(self):\n # Testcase1 (len(keys) == len(values))\n actualDict1 = cldf.ConvertListsToDict([1, 2, 3],['a','b','c'])\n expectedDict1 = {1: 'a', 2: 'b', 3: 'c'}\n self.assertEqual(actualDict1, expectedDict1)\n\n # Testcase2 (len(keys) < len(values))\n actualDict2 = cldf.ConvertListsToDict([1, 2, 3], ['a', 'b', 'c','d','e','f'])\n expectedDict2 = {1: 'a', 2: 'b', 3: 'c'}\n self.assertEqual(actualDict2, expectedDict2)\n\n # Testcase3 (len(keys) > len(values))\n actualDict2 = cldf.ConvertListsToDict([1, 2, 3, 4, 5, 6, 7], ['a', 'b', 'c'])\n expectedDict2 = {1: 'a', 2: 'b', 3: 'c', 4: None, 5: None, 6: None, 7: None}\n self.assertEqual(actualDict2, expectedDict2)\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
"""
*********************************************************************
* Project : POP1 (Practical Exam)
* Program name : q2.py
* Author : varunk01
* Purpose : Attempts to solve the question 2 from the exam paper
* Date created : 28/05/2018
*
* Date Author Ver Comment
* 28/05/2018 varunk01 0.1 Initial Version
**********************************************************************
Write a program for a number guessing game. The program generates a random
number between 0 and 99, and then asks the user to guess that number. For
each guess the program replies Correct, Too low, or Too high. If the number
is correct, the program prints the number of guesses it took. If not, the program
asks the user to guess again. For example:
Guess a number between 0 and 99: 50
Too low. Guess again: 75
Too high. Guess again: 60
Too high. Guess again: 54
Correct. It took you 4 guesses.
"""
import random
def get_choice(attempt):
"""
return an integer input from the user
"""
try:
user_text=''
if attempt ==1:
user_text ='Guess a number between 0 and 99:'
choice = int(input(user_text))
except ValueError:
return get_choice()
return choice
def get_random():
K_HIGH =99
K_LOW =0
return random.randint(K_LOW,K_HIGH)
choice =0
rand = get_random()
attempt =0
while (choice != rand):
attempt += 1
choice =get_choice(attempt)
if choice > rand:
print('Too high. Guess again:',end='')
elif choice < rand:
print('Too low. Guess again:',end='')
else:
print('Correct. It took you {0} guesses.'.format(attempt))
#if __name__ == '__main__':
|
normal
|
{
"blob_id": "f7d487ec99e2fa901677ab9aec0760a396722e12",
"index": 8245,
"step-1": "<mask token>\n\n\ndef get_choice(attempt):\n \"\"\"\n return an integer input from the user\n \"\"\"\n try:\n user_text = ''\n if attempt == 1:\n user_text = 'Guess a number between 0 and 99:'\n choice = int(input(user_text))\n except ValueError:\n return get_choice()\n return choice\n\n\ndef get_random():\n K_HIGH = 99\n K_LOW = 0\n return random.randint(K_LOW, K_HIGH)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_choice(attempt):\n \"\"\"\n return an integer input from the user\n \"\"\"\n try:\n user_text = ''\n if attempt == 1:\n user_text = 'Guess a number between 0 and 99:'\n choice = int(input(user_text))\n except ValueError:\n return get_choice()\n return choice\n\n\ndef get_random():\n K_HIGH = 99\n K_LOW = 0\n return random.randint(K_LOW, K_HIGH)\n\n\n<mask token>\nwhile choice != rand:\n attempt += 1\n choice = get_choice(attempt)\n if choice > rand:\n print('Too high. Guess again:', end='')\n elif choice < rand:\n print('Too low. Guess again:', end='')\n else:\n print('Correct. It took you {0} guesses.'.format(attempt))\n",
"step-3": "<mask token>\n\n\ndef get_choice(attempt):\n \"\"\"\n return an integer input from the user\n \"\"\"\n try:\n user_text = ''\n if attempt == 1:\n user_text = 'Guess a number between 0 and 99:'\n choice = int(input(user_text))\n except ValueError:\n return get_choice()\n return choice\n\n\ndef get_random():\n K_HIGH = 99\n K_LOW = 0\n return random.randint(K_LOW, K_HIGH)\n\n\nchoice = 0\nrand = get_random()\nattempt = 0\nwhile choice != rand:\n attempt += 1\n choice = get_choice(attempt)\n if choice > rand:\n print('Too high. Guess again:', end='')\n elif choice < rand:\n print('Too low. Guess again:', end='')\n else:\n print('Correct. It took you {0} guesses.'.format(attempt))\n",
"step-4": "<mask token>\nimport random\n\n\ndef get_choice(attempt):\n \"\"\"\n return an integer input from the user\n \"\"\"\n try:\n user_text = ''\n if attempt == 1:\n user_text = 'Guess a number between 0 and 99:'\n choice = int(input(user_text))\n except ValueError:\n return get_choice()\n return choice\n\n\ndef get_random():\n K_HIGH = 99\n K_LOW = 0\n return random.randint(K_LOW, K_HIGH)\n\n\nchoice = 0\nrand = get_random()\nattempt = 0\nwhile choice != rand:\n attempt += 1\n choice = get_choice(attempt)\n if choice > rand:\n print('Too high. Guess again:', end='')\n elif choice < rand:\n print('Too low. Guess again:', end='')\n else:\n print('Correct. It took you {0} guesses.'.format(attempt))\n",
"step-5": "\"\"\"\n*********************************************************************\n* Project : POP1 (Practical Exam)\n* Program name : q2.py\n* Author : varunk01\n* Purpose : Attempts to solve the question 2 from the exam paper\n* Date created : 28/05/2018\n*\n* Date Author Ver Comment\n* 28/05/2018 varunk01 0.1 Initial Version\n**********************************************************************\nWrite a program for a number guessing game. The program generates a random\nnumber between 0 and 99, and then asks the user to guess that number. For\neach guess the program replies Correct, Too low, or Too high. If the number\nis correct, the program prints the number of guesses it took. If not, the program\nasks the user to guess again. For example:\nGuess a number between 0 and 99: 50\nToo low. Guess again: 75\nToo high. Guess again: 60\nToo high. Guess again: 54\nCorrect. It took you 4 guesses.\n\"\"\"\n\nimport random\n\ndef get_choice(attempt):\n \"\"\"\n return an integer input from the user\n \"\"\"\n try:\n user_text=''\n\n if attempt ==1:\n user_text ='Guess a number between 0 and 99:'\n \n choice = int(input(user_text))\n except ValueError:\n return get_choice()\n return choice\n\ndef get_random():\n K_HIGH =99\n K_LOW =0\n return random.randint(K_LOW,K_HIGH)\n\nchoice =0\nrand = get_random()\nattempt =0\n\nwhile (choice != rand):\n attempt += 1\n choice =get_choice(attempt)\n \n if choice > rand:\n print('Too high. Guess again:',end='')\n elif choice < rand:\n print('Too low. Guess again:',end='')\n else:\n print('Correct. It took you {0} guesses.'.format(attempt))\n\n\n#if __name__ == '__main__':\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
import os
from NeuralEmulator.Configurators.NormalLeakSourceConfigurator import NormalLeakSourceConfigurator
from NeuralEmulator.Configurators.OZNeuronConfigurator import OZNeuronConfigurator
from NeuralEmulator.Configurators.PulseSynapseConfigurator import PulseSynapseConfigurator
from NeuralEmulator.NormalLeakSource import NormalLeakSource
from NeuralEmulator.OZNeuron import OZNeuron
from NeuralEmulator.Preprocessing.NegPreprocessingBlock import NegPreprocessingBlock
from NeuralEmulator.Preprocessing.PosPreprocessingBlock import PosPreprocessingBlock
from NeuralEmulator.Preprocessing.PreprocessingBlock import PreprocessingBlock
from NeuralEmulator.PulseSynapse import PulseSynapse
from NeuralEmulator.Test.SimpleVoltageSource import SimpleVoltageSource
import random
class NeuronsGenerator:
def __init__(self, neuronsNumber, synapse, lowerBound=100.0 * (10 ** -3), upperBound=800.0 * (10 ** -3), randomVals=False):
noramalLeakSourceConfigurator = NormalLeakSourceConfigurator()
ozNeuronConfigurator = OZNeuronConfigurator()
neurons = []
leaks = []
if randomVals is False:
start = upperBound
delta = (upperBound - lowerBound) / neuronsNumber
for x in range(neuronsNumber):
normalLeakSource = NormalLeakSource(SimpleVoltageSource(start), noramalLeakSourceConfigurator)
ozNeuron = OZNeuron(synapse, normalLeakSource, ozNeuronConfigurator)
leaks.append(normalLeakSource)
neurons.append(ozNeuron)
start -= delta
else:
lowerBound = int(lowerBound * (10 ** 3))
uppderBound = int(upperBound * (10 ** 3))
vals = set()
while len(vals) != neuronsNumber:
vlk = random.randint(lowerBound, uppderBound)
vals.add(vlk)
for x in range(neuronsNumber):
vlk = vals.pop()
vlk = vlk * (10 ** -3)
normalLeakSource = NormalLeakSource(SimpleVoltageSource(vlk), noramalLeakSourceConfigurator)
ozNeuron = OZNeuron(synapse, normalLeakSource, ozNeuronConfigurator)
leaks.append(normalLeakSource)
neurons.append(ozNeuron)
self.neurons = neurons
self.leaks = leaks
def getNeurons(self):
return self.neurons
def getLeaks(self):
return self.leaks
if __name__ == "__main__":
os.environ["NERUSIM_CONF"] = r"C:\Users\Avi\Desktop\IntelliSpikesLab\Emulator\config"
vin = SimpleVoltageSource()
preProcessBlock = PreprocessingBlock(vin)
vposPort = PosPreprocessingBlock(preProcessBlock)
g = NeuronsGenerator(50, vposPort, randomVals=True)
neurons = g.getNeurons()
print("sf")
|
normal
|
{
"blob_id": "177401f25471cf1cbd32dd0770acdc12bf271361",
"index": 8030,
"step-1": "<mask token>\n\n\nclass NeuronsGenerator:\n\n def __init__(self, neuronsNumber, synapse, lowerBound=100.0 * 10 ** -3,\n upperBound=800.0 * 10 ** -3, randomVals=False):\n noramalLeakSourceConfigurator = NormalLeakSourceConfigurator()\n ozNeuronConfigurator = OZNeuronConfigurator()\n neurons = []\n leaks = []\n if randomVals is False:\n start = upperBound\n delta = (upperBound - lowerBound) / neuronsNumber\n for x in range(neuronsNumber):\n normalLeakSource = NormalLeakSource(SimpleVoltageSource(\n start), noramalLeakSourceConfigurator)\n ozNeuron = OZNeuron(synapse, normalLeakSource,\n ozNeuronConfigurator)\n leaks.append(normalLeakSource)\n neurons.append(ozNeuron)\n start -= delta\n else:\n lowerBound = int(lowerBound * 10 ** 3)\n uppderBound = int(upperBound * 10 ** 3)\n vals = set()\n while len(vals) != neuronsNumber:\n vlk = random.randint(lowerBound, uppderBound)\n vals.add(vlk)\n for x in range(neuronsNumber):\n vlk = vals.pop()\n vlk = vlk * 10 ** -3\n normalLeakSource = NormalLeakSource(SimpleVoltageSource(vlk\n ), noramalLeakSourceConfigurator)\n ozNeuron = OZNeuron(synapse, normalLeakSource,\n ozNeuronConfigurator)\n leaks.append(normalLeakSource)\n neurons.append(ozNeuron)\n self.neurons = neurons\n self.leaks = leaks\n\n def getNeurons(self):\n return self.neurons\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass NeuronsGenerator:\n\n def __init__(self, neuronsNumber, synapse, lowerBound=100.0 * 10 ** -3,\n upperBound=800.0 * 10 ** -3, randomVals=False):\n noramalLeakSourceConfigurator = NormalLeakSourceConfigurator()\n ozNeuronConfigurator = OZNeuronConfigurator()\n neurons = []\n leaks = []\n if randomVals is False:\n start = upperBound\n delta = (upperBound - lowerBound) / neuronsNumber\n for x in range(neuronsNumber):\n normalLeakSource = NormalLeakSource(SimpleVoltageSource(\n start), noramalLeakSourceConfigurator)\n ozNeuron = OZNeuron(synapse, normalLeakSource,\n ozNeuronConfigurator)\n leaks.append(normalLeakSource)\n neurons.append(ozNeuron)\n start -= delta\n else:\n lowerBound = int(lowerBound * 10 ** 3)\n uppderBound = int(upperBound * 10 ** 3)\n vals = set()\n while len(vals) != neuronsNumber:\n vlk = random.randint(lowerBound, uppderBound)\n vals.add(vlk)\n for x in range(neuronsNumber):\n vlk = vals.pop()\n vlk = vlk * 10 ** -3\n normalLeakSource = NormalLeakSource(SimpleVoltageSource(vlk\n ), noramalLeakSourceConfigurator)\n ozNeuron = OZNeuron(synapse, normalLeakSource,\n ozNeuronConfigurator)\n leaks.append(normalLeakSource)\n neurons.append(ozNeuron)\n self.neurons = neurons\n self.leaks = leaks\n\n def getNeurons(self):\n return self.neurons\n\n def getLeaks(self):\n return self.leaks\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass NeuronsGenerator:\n\n def __init__(self, neuronsNumber, synapse, lowerBound=100.0 * 10 ** -3,\n upperBound=800.0 * 10 ** -3, randomVals=False):\n noramalLeakSourceConfigurator = NormalLeakSourceConfigurator()\n ozNeuronConfigurator = OZNeuronConfigurator()\n neurons = []\n leaks = []\n if randomVals is False:\n start = upperBound\n delta = (upperBound - lowerBound) / neuronsNumber\n for x in range(neuronsNumber):\n normalLeakSource = NormalLeakSource(SimpleVoltageSource(\n start), noramalLeakSourceConfigurator)\n ozNeuron = OZNeuron(synapse, normalLeakSource,\n ozNeuronConfigurator)\n leaks.append(normalLeakSource)\n neurons.append(ozNeuron)\n start -= delta\n else:\n lowerBound = int(lowerBound * 10 ** 3)\n uppderBound = int(upperBound * 10 ** 3)\n vals = set()\n while len(vals) != neuronsNumber:\n vlk = random.randint(lowerBound, uppderBound)\n vals.add(vlk)\n for x in range(neuronsNumber):\n vlk = vals.pop()\n vlk = vlk * 10 ** -3\n normalLeakSource = NormalLeakSource(SimpleVoltageSource(vlk\n ), noramalLeakSourceConfigurator)\n ozNeuron = OZNeuron(synapse, normalLeakSource,\n ozNeuronConfigurator)\n leaks.append(normalLeakSource)\n neurons.append(ozNeuron)\n self.neurons = neurons\n self.leaks = leaks\n\n def getNeurons(self):\n return self.neurons\n\n def getLeaks(self):\n return self.leaks\n\n\nif __name__ == '__main__':\n os.environ['NERUSIM_CONF'\n ] = 'C:\\\\Users\\\\Avi\\\\Desktop\\\\IntelliSpikesLab\\\\Emulator\\\\config'\n vin = SimpleVoltageSource()\n preProcessBlock = PreprocessingBlock(vin)\n vposPort = PosPreprocessingBlock(preProcessBlock)\n g = NeuronsGenerator(50, vposPort, randomVals=True)\n neurons = g.getNeurons()\n print('sf')\n",
"step-4": "import os\nfrom NeuralEmulator.Configurators.NormalLeakSourceConfigurator import NormalLeakSourceConfigurator\nfrom NeuralEmulator.Configurators.OZNeuronConfigurator import OZNeuronConfigurator\nfrom NeuralEmulator.Configurators.PulseSynapseConfigurator import PulseSynapseConfigurator\nfrom NeuralEmulator.NormalLeakSource import NormalLeakSource\nfrom NeuralEmulator.OZNeuron import OZNeuron\nfrom NeuralEmulator.Preprocessing.NegPreprocessingBlock import NegPreprocessingBlock\nfrom NeuralEmulator.Preprocessing.PosPreprocessingBlock import PosPreprocessingBlock\nfrom NeuralEmulator.Preprocessing.PreprocessingBlock import PreprocessingBlock\nfrom NeuralEmulator.PulseSynapse import PulseSynapse\nfrom NeuralEmulator.Test.SimpleVoltageSource import SimpleVoltageSource\nimport random\n\n\nclass NeuronsGenerator:\n\n def __init__(self, neuronsNumber, synapse, lowerBound=100.0 * 10 ** -3,\n upperBound=800.0 * 10 ** -3, randomVals=False):\n noramalLeakSourceConfigurator = NormalLeakSourceConfigurator()\n ozNeuronConfigurator = OZNeuronConfigurator()\n neurons = []\n leaks = []\n if randomVals is False:\n start = upperBound\n delta = (upperBound - lowerBound) / neuronsNumber\n for x in range(neuronsNumber):\n normalLeakSource = NormalLeakSource(SimpleVoltageSource(\n start), noramalLeakSourceConfigurator)\n ozNeuron = OZNeuron(synapse, normalLeakSource,\n ozNeuronConfigurator)\n leaks.append(normalLeakSource)\n neurons.append(ozNeuron)\n start -= delta\n else:\n lowerBound = int(lowerBound * 10 ** 3)\n uppderBound = int(upperBound * 10 ** 3)\n vals = set()\n while len(vals) != neuronsNumber:\n vlk = random.randint(lowerBound, uppderBound)\n vals.add(vlk)\n for x in range(neuronsNumber):\n vlk = vals.pop()\n vlk = vlk * 10 ** -3\n normalLeakSource = NormalLeakSource(SimpleVoltageSource(vlk\n ), noramalLeakSourceConfigurator)\n ozNeuron = OZNeuron(synapse, normalLeakSource,\n ozNeuronConfigurator)\n leaks.append(normalLeakSource)\n neurons.append(ozNeuron)\n self.neurons = neurons\n self.leaks = leaks\n\n def getNeurons(self):\n return self.neurons\n\n def getLeaks(self):\n return self.leaks\n\n\nif __name__ == '__main__':\n os.environ['NERUSIM_CONF'\n ] = 'C:\\\\Users\\\\Avi\\\\Desktop\\\\IntelliSpikesLab\\\\Emulator\\\\config'\n vin = SimpleVoltageSource()\n preProcessBlock = PreprocessingBlock(vin)\n vposPort = PosPreprocessingBlock(preProcessBlock)\n g = NeuronsGenerator(50, vposPort, randomVals=True)\n neurons = g.getNeurons()\n print('sf')\n",
"step-5": "import os\r\n\r\nfrom NeuralEmulator.Configurators.NormalLeakSourceConfigurator import NormalLeakSourceConfigurator\r\nfrom NeuralEmulator.Configurators.OZNeuronConfigurator import OZNeuronConfigurator\r\nfrom NeuralEmulator.Configurators.PulseSynapseConfigurator import PulseSynapseConfigurator\r\nfrom NeuralEmulator.NormalLeakSource import NormalLeakSource\r\nfrom NeuralEmulator.OZNeuron import OZNeuron\r\nfrom NeuralEmulator.Preprocessing.NegPreprocessingBlock import NegPreprocessingBlock\r\nfrom NeuralEmulator.Preprocessing.PosPreprocessingBlock import PosPreprocessingBlock\r\nfrom NeuralEmulator.Preprocessing.PreprocessingBlock import PreprocessingBlock\r\nfrom NeuralEmulator.PulseSynapse import PulseSynapse\r\nfrom NeuralEmulator.Test.SimpleVoltageSource import SimpleVoltageSource\r\nimport random\r\n\r\n\r\nclass NeuronsGenerator:\r\n def __init__(self, neuronsNumber, synapse, lowerBound=100.0 * (10 ** -3), upperBound=800.0 * (10 ** -3), randomVals=False):\r\n\r\n noramalLeakSourceConfigurator = NormalLeakSourceConfigurator()\r\n ozNeuronConfigurator = OZNeuronConfigurator()\r\n\r\n neurons = []\r\n leaks = []\r\n\r\n if randomVals is False:\r\n start = upperBound\r\n delta = (upperBound - lowerBound) / neuronsNumber\r\n for x in range(neuronsNumber):\r\n normalLeakSource = NormalLeakSource(SimpleVoltageSource(start), noramalLeakSourceConfigurator)\r\n ozNeuron = OZNeuron(synapse, normalLeakSource, ozNeuronConfigurator)\r\n leaks.append(normalLeakSource)\r\n neurons.append(ozNeuron)\r\n start -= delta\r\n else:\r\n lowerBound = int(lowerBound * (10 ** 3))\r\n uppderBound = int(upperBound * (10 ** 3))\r\n vals = set()\r\n\r\n while len(vals) != neuronsNumber:\r\n vlk = random.randint(lowerBound, uppderBound)\r\n vals.add(vlk)\r\n\r\n for x in range(neuronsNumber):\r\n vlk = vals.pop()\r\n vlk = vlk * (10 ** -3)\r\n normalLeakSource = NormalLeakSource(SimpleVoltageSource(vlk), noramalLeakSourceConfigurator)\r\n ozNeuron = OZNeuron(synapse, normalLeakSource, ozNeuronConfigurator)\r\n leaks.append(normalLeakSource)\r\n neurons.append(ozNeuron)\r\n\r\n self.neurons = neurons\r\n self.leaks = leaks\r\n\r\n def getNeurons(self):\r\n return self.neurons\r\n\r\n def getLeaks(self):\r\n return self.leaks\r\n\r\n\r\nif __name__ == \"__main__\":\r\n os.environ[\"NERUSIM_CONF\"] = r\"C:\\Users\\Avi\\Desktop\\IntelliSpikesLab\\Emulator\\config\"\r\n\r\n vin = SimpleVoltageSource()\r\n preProcessBlock = PreprocessingBlock(vin)\r\n vposPort = PosPreprocessingBlock(preProcessBlock)\r\n\r\n g = NeuronsGenerator(50, vposPort, randomVals=True)\r\n neurons = g.getNeurons()\r\n print(\"sf\")\r\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
'''
Note: a TimeOutException appear when distance even 0.
'''
import smbus
import time
#slave arduino address
address_arduino = 0x04
bus = smbus.SMBus(1)
#get a measure by i2c
def getUSMeasure():
bus.write_byte(address_arduino, 1)
distance = bus.read_byte(address_arduino)
return distance
#request rotate motor in a sens
def forward():
bus.write_byte(address_arduino, 2)
#request rotate motor in the other sens
def backward():
bus.write_byte(address_arduino, 3)
if __name__ == '__main__':
while True:
distance = getUSMeasure()
print(distance)
if distance < 20:
backward()
print('I move back')
else:
forward()
print('I advance')
time.sleep(0.1)
|
normal
|
{
"blob_id": "6fa7aef7c2b91de409a0e8574e362efefa642ee7",
"index": 1715,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef getUSMeasure():\n bus.write_byte(address_arduino, 1)\n distance = bus.read_byte(address_arduino)\n return distance\n\n\ndef forward():\n bus.write_byte(address_arduino, 2)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef getUSMeasure():\n bus.write_byte(address_arduino, 1)\n distance = bus.read_byte(address_arduino)\n return distance\n\n\ndef forward():\n bus.write_byte(address_arduino, 2)\n\n\ndef backward():\n bus.write_byte(address_arduino, 3)\n\n\n<mask token>\n",
"step-4": "<mask token>\nimport smbus\nimport time\naddress_arduino = 4\nbus = smbus.SMBus(1)\n\n\ndef getUSMeasure():\n bus.write_byte(address_arduino, 1)\n distance = bus.read_byte(address_arduino)\n return distance\n\n\ndef forward():\n bus.write_byte(address_arduino, 2)\n\n\ndef backward():\n bus.write_byte(address_arduino, 3)\n\n\nif __name__ == '__main__':\n while True:\n distance = getUSMeasure()\n print(distance)\n if distance < 20:\n backward()\n print('I move back')\n else:\n forward()\n print('I advance')\n time.sleep(0.1)\n",
"step-5": "'''\nNote: a TimeOutException appear when distance even 0.\n'''\n\nimport smbus\nimport time\n\n#slave arduino address\naddress_arduino = 0x04\nbus = smbus.SMBus(1)\n\n#get a measure by i2c\ndef getUSMeasure():\n bus.write_byte(address_arduino, 1)\n distance = bus.read_byte(address_arduino)\n return distance\n\n#request rotate motor in a sens\ndef forward():\n bus.write_byte(address_arduino, 2)\n\n#request rotate motor in the other sens\ndef backward():\n bus.write_byte(address_arduino, 3)\n\nif __name__ == '__main__':\n while True:\n distance = getUSMeasure()\n print(distance)\n\n if distance < 20:\n backward()\n print('I move back')\n\n else:\n forward()\n print('I advance')\n\n time.sleep(0.1)\n",
"step-ids": [
0,
2,
3,
6,
7
]
}
|
[
0,
2,
3,
6,
7
] |
<|reserved_special_token_0|>
def write_to_file(file, line):
file.write(line + '\n')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
try:
import xml.etree.cElementTree as ET
except ImportError:
import xml.etree.ElementTree as ET
<|reserved_special_token_0|>
def write_to_file(file, line):
file.write(line + '\n')
def cat_map():
catmap = {}
id = 1
f = open('cat')
cat = set([s.strip() for s in list(f.readlines())])
for i in cat:
catmap[i] = id
id = id + 1
return catmap
<|reserved_special_token_0|>
for vespaadd in root:
document = vespaadd.find('document')
if document != None:
subject = document.find('subject')
content = document.find('content')
maincat = document.find('maincat')
if subject == None:
continue
if content == None:
content = subject
if maincat == None:
continue
write_to_file(cnn, data_helpers.clean_str(subject.text))
write_to_file(lstm, data_helpers.clean_str(content.text))
write_to_file(cat, data_helpers.clean_str(maincat.text))
cnn.close()
lstm.close()
cat.close()
<|reserved_special_token_1|>
try:
import xml.etree.cElementTree as ET
except ImportError:
import xml.etree.ElementTree as ET
<|reserved_special_token_0|>
def write_to_file(file, line):
file.write(line + '\n')
def cat_map():
catmap = {}
id = 1
f = open('cat')
cat = set([s.strip() for s in list(f.readlines())])
for i in cat:
catmap[i] = id
id = id + 1
return catmap
tree = ET.ElementTree(file='test.xml')
root = tree.getroot()
cnn = open('cnn', 'a')
lstm = open('lstm', 'a')
cat = open('cat', 'a')
for vespaadd in root:
document = vespaadd.find('document')
if document != None:
subject = document.find('subject')
content = document.find('content')
maincat = document.find('maincat')
if subject == None:
continue
if content == None:
content = subject
if maincat == None:
continue
write_to_file(cnn, data_helpers.clean_str(subject.text))
write_to_file(lstm, data_helpers.clean_str(content.text))
write_to_file(cat, data_helpers.clean_str(maincat.text))
cnn.close()
lstm.close()
cat.close()
<|reserved_special_token_1|>
try:
import xml.etree.cElementTree as ET
except ImportError:
import xml.etree.ElementTree as ET
import data_helpers
def write_to_file(file, line):
file.write(line + '\n')
def cat_map():
catmap = {}
id = 1
f = open('cat')
cat = set([s.strip() for s in list(f.readlines())])
for i in cat:
catmap[i] = id
id = id + 1
return catmap
tree = ET.ElementTree(file='test.xml')
root = tree.getroot()
cnn = open('cnn', 'a')
lstm = open('lstm', 'a')
cat = open('cat', 'a')
for vespaadd in root:
document = vespaadd.find('document')
if document != None:
subject = document.find('subject')
content = document.find('content')
maincat = document.find('maincat')
if subject == None:
continue
if content == None:
content = subject
if maincat == None:
continue
write_to_file(cnn, data_helpers.clean_str(subject.text))
write_to_file(lstm, data_helpers.clean_str(content.text))
write_to_file(cat, data_helpers.clean_str(maincat.text))
cnn.close()
lstm.close()
cat.close()
<|reserved_special_token_1|>
try:
import xml.etree.cElementTree as ET
except ImportError:
import xml.etree.ElementTree as ET
import data_helpers
def write_to_file(file,line):
file.write(line+"\n")
def cat_map():
catmap={}
id=1
f=open("cat")
cat=set([s.strip() for s in list(f.readlines())])
for i in cat:
catmap[i]=id
id=id+1
return catmap
tree = ET.ElementTree(file="test.xml")
root = tree.getroot()
cnn=open("cnn","a")
lstm=open("lstm","a")
cat=open("cat","a")
for vespaadd in root:
document = vespaadd.find("document")
if(document!=None):
subject = document.find("subject")
content = document.find("content")
maincat = document.find("maincat")
if(subject==None):
continue
if(content==None):
content=subject
if(maincat==None):
continue
write_to_file(cnn,data_helpers.clean_str(subject.text))
write_to_file(lstm,data_helpers.clean_str(content.text))
write_to_file(cat,data_helpers.clean_str(maincat.text))
cnn.close()
lstm.close()
cat.close()
|
flexible
|
{
"blob_id": "04538cc5c9c68582cc9aa2959faae2d7547ab2ee",
"index": 302,
"step-1": "<mask token>\n\n\ndef write_to_file(file, line):\n file.write(line + '\\n')\n\n\n<mask token>\n",
"step-2": "try:\n import xml.etree.cElementTree as ET\nexcept ImportError:\n import xml.etree.ElementTree as ET\n<mask token>\n\n\ndef write_to_file(file, line):\n file.write(line + '\\n')\n\n\ndef cat_map():\n catmap = {}\n id = 1\n f = open('cat')\n cat = set([s.strip() for s in list(f.readlines())])\n for i in cat:\n catmap[i] = id\n id = id + 1\n return catmap\n\n\n<mask token>\nfor vespaadd in root:\n document = vespaadd.find('document')\n if document != None:\n subject = document.find('subject')\n content = document.find('content')\n maincat = document.find('maincat')\n if subject == None:\n continue\n if content == None:\n content = subject\n if maincat == None:\n continue\n write_to_file(cnn, data_helpers.clean_str(subject.text))\n write_to_file(lstm, data_helpers.clean_str(content.text))\n write_to_file(cat, data_helpers.clean_str(maincat.text))\ncnn.close()\nlstm.close()\ncat.close()\n",
"step-3": "try:\n import xml.etree.cElementTree as ET\nexcept ImportError:\n import xml.etree.ElementTree as ET\n<mask token>\n\n\ndef write_to_file(file, line):\n file.write(line + '\\n')\n\n\ndef cat_map():\n catmap = {}\n id = 1\n f = open('cat')\n cat = set([s.strip() for s in list(f.readlines())])\n for i in cat:\n catmap[i] = id\n id = id + 1\n return catmap\n\n\ntree = ET.ElementTree(file='test.xml')\nroot = tree.getroot()\ncnn = open('cnn', 'a')\nlstm = open('lstm', 'a')\ncat = open('cat', 'a')\nfor vespaadd in root:\n document = vespaadd.find('document')\n if document != None:\n subject = document.find('subject')\n content = document.find('content')\n maincat = document.find('maincat')\n if subject == None:\n continue\n if content == None:\n content = subject\n if maincat == None:\n continue\n write_to_file(cnn, data_helpers.clean_str(subject.text))\n write_to_file(lstm, data_helpers.clean_str(content.text))\n write_to_file(cat, data_helpers.clean_str(maincat.text))\ncnn.close()\nlstm.close()\ncat.close()\n",
"step-4": "try:\n import xml.etree.cElementTree as ET\nexcept ImportError:\n import xml.etree.ElementTree as ET\nimport data_helpers\n\n\ndef write_to_file(file, line):\n file.write(line + '\\n')\n\n\ndef cat_map():\n catmap = {}\n id = 1\n f = open('cat')\n cat = set([s.strip() for s in list(f.readlines())])\n for i in cat:\n catmap[i] = id\n id = id + 1\n return catmap\n\n\ntree = ET.ElementTree(file='test.xml')\nroot = tree.getroot()\ncnn = open('cnn', 'a')\nlstm = open('lstm', 'a')\ncat = open('cat', 'a')\nfor vespaadd in root:\n document = vespaadd.find('document')\n if document != None:\n subject = document.find('subject')\n content = document.find('content')\n maincat = document.find('maincat')\n if subject == None:\n continue\n if content == None:\n content = subject\n if maincat == None:\n continue\n write_to_file(cnn, data_helpers.clean_str(subject.text))\n write_to_file(lstm, data_helpers.clean_str(content.text))\n write_to_file(cat, data_helpers.clean_str(maincat.text))\ncnn.close()\nlstm.close()\ncat.close()\n",
"step-5": "try:\n\timport xml.etree.cElementTree as ET\nexcept ImportError:\n\timport xml.etree.ElementTree as ET\nimport data_helpers\n\ndef write_to_file(file,line):\n\tfile.write(line+\"\\n\")\n\ndef cat_map():\n\tcatmap={}\n\tid=1\n\tf=open(\"cat\")\n\tcat=set([s.strip() for s in list(f.readlines())])\n\tfor i in cat:\n\t\tcatmap[i]=id\n\t\tid=id+1\n\treturn catmap\n\ntree = ET.ElementTree(file=\"test.xml\")\nroot = tree.getroot()\ncnn=open(\"cnn\",\"a\")\nlstm=open(\"lstm\",\"a\")\ncat=open(\"cat\",\"a\")\nfor vespaadd in root:\n\tdocument = vespaadd.find(\"document\")\n\tif(document!=None):\n\t\tsubject = document.find(\"subject\")\n\t\tcontent = document.find(\"content\")\n\t\tmaincat = document.find(\"maincat\")\n\t\tif(subject==None):\n\t\t\tcontinue\n\t\tif(content==None):\n\t\t\tcontent=subject\n\t\tif(maincat==None):\n\t\t\tcontinue\n\t\twrite_to_file(cnn,data_helpers.clean_str(subject.text))\n\t\twrite_to_file(lstm,data_helpers.clean_str(content.text))\n\t\twrite_to_file(cat,data_helpers.clean_str(maincat.text))\ncnn.close()\nlstm.close()\ncat.close()",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
# mathematical operators
'''
* multiply
/ divide (normal)
// divide (integer)
% modulus (remainder)
+ add
- subtract
** exponent (raise to)
'''
print(2 * 3)
# comparison operators
'''
== equal to
!= not equal to
> greater than
< less than
>= greater or equal to
<= less or equal to
'''
a = int(input("Enter your age: "))
b = 18
if a >= b:
print("You can drive the car, you are ", a)
else:
print("Sorry, you are too small")
|
normal
|
{
"blob_id": "911257bad3baab89e29db3facb08ec41269b41e3",
"index": 9953,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(2 * 3)\n<mask token>\nif a >= b:\n print('You can drive the car, you are ', a)\nelse:\n print('Sorry, you are too small')\n",
"step-3": "<mask token>\nprint(2 * 3)\n<mask token>\na = int(input('Enter your age: '))\nb = 18\nif a >= b:\n print('You can drive the car, you are ', a)\nelse:\n print('Sorry, you are too small')\n",
"step-4": "# mathematical operators\n'''\n* multiply\n/ divide (normal)\n// divide (integer)\n% modulus (remainder)\n+ add\n- subtract\n** exponent (raise to)\n'''\nprint(2 * 3)\n# comparison operators\n'''\n== equal to\n!= not equal to\n> greater than\n< less than\n>= greater or equal to\n<= less or equal to\n'''\na = int(input(\"Enter your age: \"))\nb = 18\nif a >= b:\n print(\"You can drive the car, you are \", a)\nelse:\n print(\"Sorry, you are too small\")",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def filter_frames(frames, method=cv2.HISTCMP_CORREL, target_size=(64, 64),
threshold=0.65):
"""Filter noisy frames out
Args:
frames (list<numpy.ndarray[H, W, 3]>): video frames
method (int, optional): histogram comparison method
target_size (tuple<int, int>, optional): frame size used for histogram comparison
threshold (float, optional): minimum correlation between histograms to keep frame
Returns:
list<numpy.ndarray[H, W, 3]>: video frames
"""
resized_frames = [cv2.resize(f.copy(), target_size) for f in frames]
histograms = []
for f in resized_frames:
hist = cv2.calcHist([f], [0, 1, 2], None, [8, 8, 8], [0, 256, 0,
256, 0, 256])
histograms.append(cv2.normalize(hist, hist).flatten())
med_hist = np.median(histograms, axis=0)
filtered_frames = []
for idx, hist in enumerate(histograms):
if cv2.compareHist(med_hist, hist, method) > threshold:
filtered_frames.append(frames[idx])
return filtered_frames
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import numpy as np
import cv2
def filter_frames(frames, method=cv2.HISTCMP_CORREL, target_size=(64, 64),
threshold=0.65):
"""Filter noisy frames out
Args:
frames (list<numpy.ndarray[H, W, 3]>): video frames
method (int, optional): histogram comparison method
target_size (tuple<int, int>, optional): frame size used for histogram comparison
threshold (float, optional): minimum correlation between histograms to keep frame
Returns:
list<numpy.ndarray[H, W, 3]>: video frames
"""
resized_frames = [cv2.resize(f.copy(), target_size) for f in frames]
histograms = []
for f in resized_frames:
hist = cv2.calcHist([f], [0, 1, 2], None, [8, 8, 8], [0, 256, 0,
256, 0, 256])
histograms.append(cv2.normalize(hist, hist).flatten())
med_hist = np.median(histograms, axis=0)
filtered_frames = []
for idx, hist in enumerate(histograms):
if cv2.compareHist(med_hist, hist, method) > threshold:
filtered_frames.append(frames[idx])
return filtered_frames
<|reserved_special_token_1|>
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Frame filtering
'''
import numpy as np
import cv2
def filter_frames(frames, method=cv2.HISTCMP_CORREL, target_size=(64, 64), threshold=0.65):
"""Filter noisy frames out
Args:
frames (list<numpy.ndarray[H, W, 3]>): video frames
method (int, optional): histogram comparison method
target_size (tuple<int, int>, optional): frame size used for histogram comparison
threshold (float, optional): minimum correlation between histograms to keep frame
Returns:
list<numpy.ndarray[H, W, 3]>: video frames
"""
resized_frames = [cv2.resize(f.copy(), target_size) for f in frames]
histograms = []
for f in resized_frames:
hist = cv2.calcHist([f], [0, 1, 2], None, [8, 8, 8], [0, 256, 0, 256, 0, 256])
histograms.append(cv2.normalize(hist, hist).flatten())
# Find a reference histogram (median less sensitive to noise)
med_hist = np.median(histograms, axis=0)
filtered_frames = []
# Compare all histograms to the median one
for idx, hist in enumerate(histograms):
# Only keep frames with relatively high correlation
if cv2.compareHist(med_hist, hist, method) > threshold:
filtered_frames.append(frames[idx])
return filtered_frames
|
flexible
|
{
"blob_id": "1da93e9113089f1a2881d4094180ba524d0d4a86",
"index": 8531,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef filter_frames(frames, method=cv2.HISTCMP_CORREL, target_size=(64, 64),\n threshold=0.65):\n \"\"\"Filter noisy frames out\n\n Args:\n frames (list<numpy.ndarray[H, W, 3]>): video frames\n method (int, optional): histogram comparison method\n target_size (tuple<int, int>, optional): frame size used for histogram comparison\n threshold (float, optional): minimum correlation between histograms to keep frame\n\n Returns:\n list<numpy.ndarray[H, W, 3]>: video frames\n \"\"\"\n resized_frames = [cv2.resize(f.copy(), target_size) for f in frames]\n histograms = []\n for f in resized_frames:\n hist = cv2.calcHist([f], [0, 1, 2], None, [8, 8, 8], [0, 256, 0, \n 256, 0, 256])\n histograms.append(cv2.normalize(hist, hist).flatten())\n med_hist = np.median(histograms, axis=0)\n filtered_frames = []\n for idx, hist in enumerate(histograms):\n if cv2.compareHist(med_hist, hist, method) > threshold:\n filtered_frames.append(frames[idx])\n return filtered_frames\n",
"step-3": "<mask token>\nimport numpy as np\nimport cv2\n\n\ndef filter_frames(frames, method=cv2.HISTCMP_CORREL, target_size=(64, 64),\n threshold=0.65):\n \"\"\"Filter noisy frames out\n\n Args:\n frames (list<numpy.ndarray[H, W, 3]>): video frames\n method (int, optional): histogram comparison method\n target_size (tuple<int, int>, optional): frame size used for histogram comparison\n threshold (float, optional): minimum correlation between histograms to keep frame\n\n Returns:\n list<numpy.ndarray[H, W, 3]>: video frames\n \"\"\"\n resized_frames = [cv2.resize(f.copy(), target_size) for f in frames]\n histograms = []\n for f in resized_frames:\n hist = cv2.calcHist([f], [0, 1, 2], None, [8, 8, 8], [0, 256, 0, \n 256, 0, 256])\n histograms.append(cv2.normalize(hist, hist).flatten())\n med_hist = np.median(histograms, axis=0)\n filtered_frames = []\n for idx, hist in enumerate(histograms):\n if cv2.compareHist(med_hist, hist, method) > threshold:\n filtered_frames.append(frames[idx])\n return filtered_frames\n",
"step-4": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n'''\nFrame filtering\n'''\n\nimport numpy as np\nimport cv2\n\n\ndef filter_frames(frames, method=cv2.HISTCMP_CORREL, target_size=(64, 64), threshold=0.65):\n \"\"\"Filter noisy frames out\n\n Args:\n frames (list<numpy.ndarray[H, W, 3]>): video frames\n method (int, optional): histogram comparison method\n target_size (tuple<int, int>, optional): frame size used for histogram comparison\n threshold (float, optional): minimum correlation between histograms to keep frame\n\n Returns:\n list<numpy.ndarray[H, W, 3]>: video frames\n \"\"\"\n\n resized_frames = [cv2.resize(f.copy(), target_size) for f in frames]\n\n histograms = []\n for f in resized_frames:\n hist = cv2.calcHist([f], [0, 1, 2], None, [8, 8, 8], [0, 256, 0, 256, 0, 256])\n histograms.append(cv2.normalize(hist, hist).flatten())\n\n # Find a reference histogram (median less sensitive to noise)\n med_hist = np.median(histograms, axis=0)\n\n filtered_frames = []\n # Compare all histograms to the median one\n for idx, hist in enumerate(histograms):\n # Only keep frames with relatively high correlation\n if cv2.compareHist(med_hist, hist, method) > threshold:\n filtered_frames.append(frames[idx])\n\n return filtered_frames\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class HBNBCommand(cmd.Cmd):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def emptyline(self):
"""Do nothing"""
pass
def do_create(self, line):
"""Creates a new instance of BaseModel"""
arg_line = line.split()
if line == '':
print('** class name missing **')
return False
elif arg_line[0] not in self.__DCT_CLS:
print("** class doesn't exist **")
else:
new_instance = self.__DCT_CLS[arg_line[0]]()
print(new_instance.id)
new_instance.save()
def do_show(self, line):
if type(line) == str:
arg_line = line.split()
len_args = len(arg_line)
if self.check_if_created(arg_line, len_args) != 1:
get_inst = arg_line[0] + '.' + arg_line[1]
dict_classes = models.storage.all()
if get_inst in dict_classes.keys():
print(dict_classes[get_inst])
else:
print('** no instance found **')
else:
srch_id = line[0] + '.' + line[1]
dict_classes = models.storage.all()
if srch_id in dict_classes.keys():
print(dict_classes[srch_id])
else:
print('** no instance found **')
def do_destroy(self, line):
arg_line = line.split()
len_args = len(arg_line)
if self.check_if_created(arg_line, len_args) != 1:
get_inst = arg_line[0] + '.' + arg_line[1]
dict_classes = models.storage.all()
if get_inst in dict_classes.keys():
del dict_classes[get_inst]
models.storage.save()
else:
print('** no instance found **')
def do_all(self, line):
arg_line = line.split()
if line == '' or arg_line[0] in self.__DCT_CLS:
dir_classes = models.storage.all()
list_classes = []
for key, value in dir_classes.items():
if line in key:
list_classes.append(value.__str__())
print(list_classes)
else:
print("** class doesn't exist **")
def do_update(self, line):
arg_line = line.split()
len_args = len(arg_line)
if self.check_if_created(arg_line, len_args) == 1:
pass
elif len_args == 2:
print('** attribute name missing **')
elif len_args == 3:
print('** value missing **')
else:
get_inst = arg_line[0] + '.' + arg_line[1]
dict_classes = models.storage.all()
if get_inst in dict_classes.keys():
if arg_line[3]:
arg_line[3] = arg_line[3].replace('"', '')
try:
arg_line[3] = int(arg_line[3])
except ValueError:
try:
arg_line[3] = float(arg_line[3])
except ValueError:
arg_line[3] = arg_line[3]
dict_classes[get_inst].__dict__[arg_line[2]] = arg_line[3]
dict_classes[get_inst].save()
else:
print('** no instance found **')
<|reserved_special_token_0|>
def check_if_created(self, arg_line, len_args):
"""Verifies if class exists"""
if len_args == 0:
print('** class name missing **')
return 1
elif arg_line[0] not in self.__DCT_CLS:
print("** class doesn't exist **")
return 1
elif len_args == 1:
print('** instance id missing **')
return 1
def do_count(self, line):
"""Counts the number of existing instances"""
arg_line = line.split()
if line == '' or arg_line[0] in self.__DCT_CLS:
dir_classes = models.storage.all()
list_classes = []
count = 0
for key, value in dir_classes.items():
if line in key:
list_classes.append(value.__str__())
count += 1
print(count)
else:
print("** class doesn't exist **")
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class HBNBCommand(cmd.Cmd):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def do_quit(self, line):
"""Exit the CMD program"""
return True
<|reserved_special_token_0|>
def emptyline(self):
"""Do nothing"""
pass
def do_create(self, line):
"""Creates a new instance of BaseModel"""
arg_line = line.split()
if line == '':
print('** class name missing **')
return False
elif arg_line[0] not in self.__DCT_CLS:
print("** class doesn't exist **")
else:
new_instance = self.__DCT_CLS[arg_line[0]]()
print(new_instance.id)
new_instance.save()
def do_show(self, line):
if type(line) == str:
arg_line = line.split()
len_args = len(arg_line)
if self.check_if_created(arg_line, len_args) != 1:
get_inst = arg_line[0] + '.' + arg_line[1]
dict_classes = models.storage.all()
if get_inst in dict_classes.keys():
print(dict_classes[get_inst])
else:
print('** no instance found **')
else:
srch_id = line[0] + '.' + line[1]
dict_classes = models.storage.all()
if srch_id in dict_classes.keys():
print(dict_classes[srch_id])
else:
print('** no instance found **')
def do_destroy(self, line):
arg_line = line.split()
len_args = len(arg_line)
if self.check_if_created(arg_line, len_args) != 1:
get_inst = arg_line[0] + '.' + arg_line[1]
dict_classes = models.storage.all()
if get_inst in dict_classes.keys():
del dict_classes[get_inst]
models.storage.save()
else:
print('** no instance found **')
def do_all(self, line):
arg_line = line.split()
if line == '' or arg_line[0] in self.__DCT_CLS:
dir_classes = models.storage.all()
list_classes = []
for key, value in dir_classes.items():
if line in key:
list_classes.append(value.__str__())
print(list_classes)
else:
print("** class doesn't exist **")
def do_update(self, line):
arg_line = line.split()
len_args = len(arg_line)
if self.check_if_created(arg_line, len_args) == 1:
pass
elif len_args == 2:
print('** attribute name missing **')
elif len_args == 3:
print('** value missing **')
else:
get_inst = arg_line[0] + '.' + arg_line[1]
dict_classes = models.storage.all()
if get_inst in dict_classes.keys():
if arg_line[3]:
arg_line[3] = arg_line[3].replace('"', '')
try:
arg_line[3] = int(arg_line[3])
except ValueError:
try:
arg_line[3] = float(arg_line[3])
except ValueError:
arg_line[3] = arg_line[3]
dict_classes[get_inst].__dict__[arg_line[2]] = arg_line[3]
dict_classes[get_inst].save()
else:
print('** no instance found **')
def default(self, line):
"""all method names that aren't defined"""
args_line = line.split('.')
if len(args_line) > 1:
if args_line[1] == 'all()':
self.do_all(args_line[0])
if args_line[1] == 'count()':
self.do_count(args_line[0])
my_count = args_line[1].split('"')
res = re.findall('\\(.*?\\)', args_line[1])
my_count[0] = my_count[0] + line[-1]
if my_count[0] == 'show()':
myNewList = [args_line[0], my_count[1]]
self.do_show(myNewList)
else:
cmd.Cmd.default(self, line)
def check_if_created(self, arg_line, len_args):
"""Verifies if class exists"""
if len_args == 0:
print('** class name missing **')
return 1
elif arg_line[0] not in self.__DCT_CLS:
print("** class doesn't exist **")
return 1
elif len_args == 1:
print('** instance id missing **')
return 1
def do_count(self, line):
"""Counts the number of existing instances"""
arg_line = line.split()
if line == '' or arg_line[0] in self.__DCT_CLS:
dir_classes = models.storage.all()
list_classes = []
count = 0
for key, value in dir_classes.items():
if line in key:
list_classes.append(value.__str__())
count += 1
print(count)
else:
print("** class doesn't exist **")
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class HBNBCommand(cmd.Cmd):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def do_quit(self, line):
"""Exit the CMD program"""
return True
def do_EOF(self, line):
"""Exit the CMD program"""
return True
def emptyline(self):
"""Do nothing"""
pass
def do_create(self, line):
"""Creates a new instance of BaseModel"""
arg_line = line.split()
if line == '':
print('** class name missing **')
return False
elif arg_line[0] not in self.__DCT_CLS:
print("** class doesn't exist **")
else:
new_instance = self.__DCT_CLS[arg_line[0]]()
print(new_instance.id)
new_instance.save()
def do_show(self, line):
if type(line) == str:
arg_line = line.split()
len_args = len(arg_line)
if self.check_if_created(arg_line, len_args) != 1:
get_inst = arg_line[0] + '.' + arg_line[1]
dict_classes = models.storage.all()
if get_inst in dict_classes.keys():
print(dict_classes[get_inst])
else:
print('** no instance found **')
else:
srch_id = line[0] + '.' + line[1]
dict_classes = models.storage.all()
if srch_id in dict_classes.keys():
print(dict_classes[srch_id])
else:
print('** no instance found **')
def do_destroy(self, line):
arg_line = line.split()
len_args = len(arg_line)
if self.check_if_created(arg_line, len_args) != 1:
get_inst = arg_line[0] + '.' + arg_line[1]
dict_classes = models.storage.all()
if get_inst in dict_classes.keys():
del dict_classes[get_inst]
models.storage.save()
else:
print('** no instance found **')
def do_all(self, line):
arg_line = line.split()
if line == '' or arg_line[0] in self.__DCT_CLS:
dir_classes = models.storage.all()
list_classes = []
for key, value in dir_classes.items():
if line in key:
list_classes.append(value.__str__())
print(list_classes)
else:
print("** class doesn't exist **")
def do_update(self, line):
arg_line = line.split()
len_args = len(arg_line)
if self.check_if_created(arg_line, len_args) == 1:
pass
elif len_args == 2:
print('** attribute name missing **')
elif len_args == 3:
print('** value missing **')
else:
get_inst = arg_line[0] + '.' + arg_line[1]
dict_classes = models.storage.all()
if get_inst in dict_classes.keys():
if arg_line[3]:
arg_line[3] = arg_line[3].replace('"', '')
try:
arg_line[3] = int(arg_line[3])
except ValueError:
try:
arg_line[3] = float(arg_line[3])
except ValueError:
arg_line[3] = arg_line[3]
dict_classes[get_inst].__dict__[arg_line[2]] = arg_line[3]
dict_classes[get_inst].save()
else:
print('** no instance found **')
def default(self, line):
"""all method names that aren't defined"""
args_line = line.split('.')
if len(args_line) > 1:
if args_line[1] == 'all()':
self.do_all(args_line[0])
if args_line[1] == 'count()':
self.do_count(args_line[0])
my_count = args_line[1].split('"')
res = re.findall('\\(.*?\\)', args_line[1])
my_count[0] = my_count[0] + line[-1]
if my_count[0] == 'show()':
myNewList = [args_line[0], my_count[1]]
self.do_show(myNewList)
else:
cmd.Cmd.default(self, line)
def check_if_created(self, arg_line, len_args):
"""Verifies if class exists"""
if len_args == 0:
print('** class name missing **')
return 1
elif arg_line[0] not in self.__DCT_CLS:
print("** class doesn't exist **")
return 1
elif len_args == 1:
print('** instance id missing **')
return 1
def do_count(self, line):
"""Counts the number of existing instances"""
arg_line = line.split()
if line == '' or arg_line[0] in self.__DCT_CLS:
dir_classes = models.storage.all()
list_classes = []
count = 0
for key, value in dir_classes.items():
if line in key:
list_classes.append(value.__str__())
count += 1
print(count)
else:
print("** class doesn't exist **")
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import cmd
import models
import re
from models.base_model import BaseModel
from models import storage
from models.user import User
from models.state import State
from models.city import City
from models.amenity import Amenity
from models.place import Place
from models.review import Review
class HBNBCommand(cmd.Cmd):
""" This class to setup the command interpreter """
__DCT_CLS = {'BaseModel': BaseModel, 'User': User, 'State': State,
'City': City, 'Amenity': Amenity, 'Place': Place, 'Review': Review}
prompt = '(hbnb) '
def do_quit(self, line):
"""Exit the CMD program"""
return True
def do_EOF(self, line):
"""Exit the CMD program"""
return True
def emptyline(self):
"""Do nothing"""
pass
def do_create(self, line):
"""Creates a new instance of BaseModel"""
arg_line = line.split()
if line == '':
print('** class name missing **')
return False
elif arg_line[0] not in self.__DCT_CLS:
print("** class doesn't exist **")
else:
new_instance = self.__DCT_CLS[arg_line[0]]()
print(new_instance.id)
new_instance.save()
def do_show(self, line):
if type(line) == str:
arg_line = line.split()
len_args = len(arg_line)
if self.check_if_created(arg_line, len_args) != 1:
get_inst = arg_line[0] + '.' + arg_line[1]
dict_classes = models.storage.all()
if get_inst in dict_classes.keys():
print(dict_classes[get_inst])
else:
print('** no instance found **')
else:
srch_id = line[0] + '.' + line[1]
dict_classes = models.storage.all()
if srch_id in dict_classes.keys():
print(dict_classes[srch_id])
else:
print('** no instance found **')
def do_destroy(self, line):
arg_line = line.split()
len_args = len(arg_line)
if self.check_if_created(arg_line, len_args) != 1:
get_inst = arg_line[0] + '.' + arg_line[1]
dict_classes = models.storage.all()
if get_inst in dict_classes.keys():
del dict_classes[get_inst]
models.storage.save()
else:
print('** no instance found **')
def do_all(self, line):
arg_line = line.split()
if line == '' or arg_line[0] in self.__DCT_CLS:
dir_classes = models.storage.all()
list_classes = []
for key, value in dir_classes.items():
if line in key:
list_classes.append(value.__str__())
print(list_classes)
else:
print("** class doesn't exist **")
def do_update(self, line):
arg_line = line.split()
len_args = len(arg_line)
if self.check_if_created(arg_line, len_args) == 1:
pass
elif len_args == 2:
print('** attribute name missing **')
elif len_args == 3:
print('** value missing **')
else:
get_inst = arg_line[0] + '.' + arg_line[1]
dict_classes = models.storage.all()
if get_inst in dict_classes.keys():
if arg_line[3]:
arg_line[3] = arg_line[3].replace('"', '')
try:
arg_line[3] = int(arg_line[3])
except ValueError:
try:
arg_line[3] = float(arg_line[3])
except ValueError:
arg_line[3] = arg_line[3]
dict_classes[get_inst].__dict__[arg_line[2]] = arg_line[3]
dict_classes[get_inst].save()
else:
print('** no instance found **')
def default(self, line):
"""all method names that aren't defined"""
args_line = line.split('.')
if len(args_line) > 1:
if args_line[1] == 'all()':
self.do_all(args_line[0])
if args_line[1] == 'count()':
self.do_count(args_line[0])
my_count = args_line[1].split('"')
res = re.findall('\\(.*?\\)', args_line[1])
my_count[0] = my_count[0] + line[-1]
if my_count[0] == 'show()':
myNewList = [args_line[0], my_count[1]]
self.do_show(myNewList)
else:
cmd.Cmd.default(self, line)
def check_if_created(self, arg_line, len_args):
"""Verifies if class exists"""
if len_args == 0:
print('** class name missing **')
return 1
elif arg_line[0] not in self.__DCT_CLS:
print("** class doesn't exist **")
return 1
elif len_args == 1:
print('** instance id missing **')
return 1
def do_count(self, line):
"""Counts the number of existing instances"""
arg_line = line.split()
if line == '' or arg_line[0] in self.__DCT_CLS:
dir_classes = models.storage.all()
list_classes = []
count = 0
for key, value in dir_classes.items():
if line in key:
list_classes.append(value.__str__())
count += 1
print(count)
else:
print("** class doesn't exist **")
if __name__ == '__main__':
HBNBCommand().cmdloop()
<|reserved_special_token_1|>
#!/usr/bin/python3
"""
program of the command interpreter
"""
import cmd
import models
import re
from models.base_model import BaseModel
from models import storage
from models.user import User
from models.state import State
from models.city import City
from models.amenity import Amenity
from models.place import Place
from models.review import Review
class HBNBCommand(cmd.Cmd):
""" This class to setup the command interpreter """
__DCT_CLS = {
"BaseModel": BaseModel,
"User": User,
"State": State,
"City": City,
"Amenity": Amenity,
"Place": Place,
"Review": Review
}
prompt = "(hbnb) "
def do_quit(self, line):
'''Exit the CMD program'''
return True
def do_EOF(self, line):
'''Exit the CMD program'''
return True
def emptyline(self):
'''Do nothing'''
pass
def do_create(self, line):
'''Creates a new instance of BaseModel'''
arg_line = line.split()
if line == "":
print("** class name missing **")
return False
elif arg_line[0] not in self.__DCT_CLS:
print("** class doesn't exist **")
else:
new_instance = self.__DCT_CLS[arg_line[0]]()
print(new_instance.id)
new_instance.save()
def do_show(self, line):
if (type(line) == str):
arg_line = line.split()
len_args = len(arg_line)
if (self.check_if_created(arg_line, len_args) != 1):
get_inst = arg_line[0] + "." + arg_line[1]
dict_classes = models.storage.all()
if get_inst in dict_classes.keys():
print(dict_classes[get_inst])
else:
print("** no instance found **")
else:
srch_id = line[0] + "." + line[1]
dict_classes = models.storage.all()
if srch_id in dict_classes.keys():
print(dict_classes[srch_id])
else:
print("** no instance found **")
def do_destroy(self, line):
arg_line = line.split()
len_args = len(arg_line)
if (self.check_if_created(arg_line, len_args) != 1):
get_inst = arg_line[0] + "." + arg_line[1]
dict_classes = models.storage.all()
if get_inst in dict_classes.keys():
del dict_classes[get_inst]
models.storage.save()
else:
print("** no instance found **")
def do_all(self, line):
arg_line = line.split()
if line == "" or arg_line[0] in self.__DCT_CLS:
dir_classes = models.storage.all()
list_classes = []
for key, value in dir_classes.items():
if line in key:
list_classes.append(value.__str__())
print(list_classes)
else:
print("** class doesn't exist **")
def do_update(self, line):
arg_line = line.split()
len_args = len(arg_line)
if (self.check_if_created(arg_line, len_args) == 1):
pass
elif (len_args == 2):
print("** attribute name missing **")
elif (len_args == 3):
print("** value missing **")
else:
get_inst = arg_line[0] + "." + arg_line[1]
dict_classes = models.storage.all()
if get_inst in dict_classes.keys():
if arg_line[3]:
arg_line[3] = arg_line[3].replace('"', "")
try:
arg_line[3] = int(arg_line[3])
except ValueError:
try:
arg_line[3] = float(arg_line[3])
except ValueError:
arg_line[3] = arg_line[3]
dict_classes[get_inst].__dict__[arg_line[2]] = arg_line[3]
dict_classes[get_inst].save()
else:
print("** no instance found **")
def default(self, line):
'''all method names that aren't defined'''
args_line = line.split('.')
if len(args_line) > 1:
if args_line[1] == "all()":
self.do_all(args_line[0])
if args_line[1] == "count()":
self.do_count(args_line[0])
my_count = args_line[1].split('"')
res = re.findall(r'\(.*?\)', args_line[1])
my_count[0] = my_count[0] + line[-1]
if my_count[0] == "show()":
myNewList = [args_line[0], my_count[1]]
self.do_show(myNewList)
else:
cmd.Cmd.default(self, line)
def check_if_created(self, arg_line, len_args):
'''Verifies if class exists'''
if len_args == 0:
print("** class name missing **")
return 1
elif arg_line[0] not in self.__DCT_CLS:
print("** class doesn't exist **")
return 1
elif (len_args == 1):
print("** instance id missing **")
return 1
def do_count(self, line):
'''Counts the number of existing instances'''
arg_line = line.split()
if line == "" or arg_line[0] in self.__DCT_CLS:
dir_classes = models.storage.all()
list_classes = []
count = 0
for key, value in dir_classes.items():
if line in key:
list_classes.append(value.__str__())
count += 1
print(count)
else:
print("** class doesn't exist **")
if __name__ == "__main__":
HBNBCommand().cmdloop()
|
flexible
|
{
"blob_id": "7cbf2082d530c315fdcfdb94f5c6ac4755ea2081",
"index": 1267,
"step-1": "<mask token>\n\n\nclass HBNBCommand(cmd.Cmd):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def emptyline(self):\n \"\"\"Do nothing\"\"\"\n pass\n\n def do_create(self, line):\n \"\"\"Creates a new instance of BaseModel\"\"\"\n arg_line = line.split()\n if line == '':\n print('** class name missing **')\n return False\n elif arg_line[0] not in self.__DCT_CLS:\n print(\"** class doesn't exist **\")\n else:\n new_instance = self.__DCT_CLS[arg_line[0]]()\n print(new_instance.id)\n new_instance.save()\n\n def do_show(self, line):\n if type(line) == str:\n arg_line = line.split()\n len_args = len(arg_line)\n if self.check_if_created(arg_line, len_args) != 1:\n get_inst = arg_line[0] + '.' + arg_line[1]\n dict_classes = models.storage.all()\n if get_inst in dict_classes.keys():\n print(dict_classes[get_inst])\n else:\n print('** no instance found **')\n else:\n srch_id = line[0] + '.' + line[1]\n dict_classes = models.storage.all()\n if srch_id in dict_classes.keys():\n print(dict_classes[srch_id])\n else:\n print('** no instance found **')\n\n def do_destroy(self, line):\n arg_line = line.split()\n len_args = len(arg_line)\n if self.check_if_created(arg_line, len_args) != 1:\n get_inst = arg_line[0] + '.' + arg_line[1]\n dict_classes = models.storage.all()\n if get_inst in dict_classes.keys():\n del dict_classes[get_inst]\n models.storage.save()\n else:\n print('** no instance found **')\n\n def do_all(self, line):\n arg_line = line.split()\n if line == '' or arg_line[0] in self.__DCT_CLS:\n dir_classes = models.storage.all()\n list_classes = []\n for key, value in dir_classes.items():\n if line in key:\n list_classes.append(value.__str__())\n print(list_classes)\n else:\n print(\"** class doesn't exist **\")\n\n def do_update(self, line):\n arg_line = line.split()\n len_args = len(arg_line)\n if self.check_if_created(arg_line, len_args) == 1:\n pass\n elif len_args == 2:\n print('** attribute name missing **')\n elif len_args == 3:\n print('** value missing **')\n else:\n get_inst = arg_line[0] + '.' + arg_line[1]\n dict_classes = models.storage.all()\n if get_inst in dict_classes.keys():\n if arg_line[3]:\n arg_line[3] = arg_line[3].replace('\"', '')\n try:\n arg_line[3] = int(arg_line[3])\n except ValueError:\n try:\n arg_line[3] = float(arg_line[3])\n except ValueError:\n arg_line[3] = arg_line[3]\n dict_classes[get_inst].__dict__[arg_line[2]] = arg_line[3]\n dict_classes[get_inst].save()\n else:\n print('** no instance found **')\n <mask token>\n\n def check_if_created(self, arg_line, len_args):\n \"\"\"Verifies if class exists\"\"\"\n if len_args == 0:\n print('** class name missing **')\n return 1\n elif arg_line[0] not in self.__DCT_CLS:\n print(\"** class doesn't exist **\")\n return 1\n elif len_args == 1:\n print('** instance id missing **')\n return 1\n\n def do_count(self, line):\n \"\"\"Counts the number of existing instances\"\"\"\n arg_line = line.split()\n if line == '' or arg_line[0] in self.__DCT_CLS:\n dir_classes = models.storage.all()\n list_classes = []\n count = 0\n for key, value in dir_classes.items():\n if line in key:\n list_classes.append(value.__str__())\n count += 1\n print(count)\n else:\n print(\"** class doesn't exist **\")\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass HBNBCommand(cmd.Cmd):\n <mask token>\n <mask token>\n <mask token>\n\n def do_quit(self, line):\n \"\"\"Exit the CMD program\"\"\"\n return True\n <mask token>\n\n def emptyline(self):\n \"\"\"Do nothing\"\"\"\n pass\n\n def do_create(self, line):\n \"\"\"Creates a new instance of BaseModel\"\"\"\n arg_line = line.split()\n if line == '':\n print('** class name missing **')\n return False\n elif arg_line[0] not in self.__DCT_CLS:\n print(\"** class doesn't exist **\")\n else:\n new_instance = self.__DCT_CLS[arg_line[0]]()\n print(new_instance.id)\n new_instance.save()\n\n def do_show(self, line):\n if type(line) == str:\n arg_line = line.split()\n len_args = len(arg_line)\n if self.check_if_created(arg_line, len_args) != 1:\n get_inst = arg_line[0] + '.' + arg_line[1]\n dict_classes = models.storage.all()\n if get_inst in dict_classes.keys():\n print(dict_classes[get_inst])\n else:\n print('** no instance found **')\n else:\n srch_id = line[0] + '.' + line[1]\n dict_classes = models.storage.all()\n if srch_id in dict_classes.keys():\n print(dict_classes[srch_id])\n else:\n print('** no instance found **')\n\n def do_destroy(self, line):\n arg_line = line.split()\n len_args = len(arg_line)\n if self.check_if_created(arg_line, len_args) != 1:\n get_inst = arg_line[0] + '.' + arg_line[1]\n dict_classes = models.storage.all()\n if get_inst in dict_classes.keys():\n del dict_classes[get_inst]\n models.storage.save()\n else:\n print('** no instance found **')\n\n def do_all(self, line):\n arg_line = line.split()\n if line == '' or arg_line[0] in self.__DCT_CLS:\n dir_classes = models.storage.all()\n list_classes = []\n for key, value in dir_classes.items():\n if line in key:\n list_classes.append(value.__str__())\n print(list_classes)\n else:\n print(\"** class doesn't exist **\")\n\n def do_update(self, line):\n arg_line = line.split()\n len_args = len(arg_line)\n if self.check_if_created(arg_line, len_args) == 1:\n pass\n elif len_args == 2:\n print('** attribute name missing **')\n elif len_args == 3:\n print('** value missing **')\n else:\n get_inst = arg_line[0] + '.' + arg_line[1]\n dict_classes = models.storage.all()\n if get_inst in dict_classes.keys():\n if arg_line[3]:\n arg_line[3] = arg_line[3].replace('\"', '')\n try:\n arg_line[3] = int(arg_line[3])\n except ValueError:\n try:\n arg_line[3] = float(arg_line[3])\n except ValueError:\n arg_line[3] = arg_line[3]\n dict_classes[get_inst].__dict__[arg_line[2]] = arg_line[3]\n dict_classes[get_inst].save()\n else:\n print('** no instance found **')\n\n def default(self, line):\n \"\"\"all method names that aren't defined\"\"\"\n args_line = line.split('.')\n if len(args_line) > 1:\n if args_line[1] == 'all()':\n self.do_all(args_line[0])\n if args_line[1] == 'count()':\n self.do_count(args_line[0])\n my_count = args_line[1].split('\"')\n res = re.findall('\\\\(.*?\\\\)', args_line[1])\n my_count[0] = my_count[0] + line[-1]\n if my_count[0] == 'show()':\n myNewList = [args_line[0], my_count[1]]\n self.do_show(myNewList)\n else:\n cmd.Cmd.default(self, line)\n\n def check_if_created(self, arg_line, len_args):\n \"\"\"Verifies if class exists\"\"\"\n if len_args == 0:\n print('** class name missing **')\n return 1\n elif arg_line[0] not in self.__DCT_CLS:\n print(\"** class doesn't exist **\")\n return 1\n elif len_args == 1:\n print('** instance id missing **')\n return 1\n\n def do_count(self, line):\n \"\"\"Counts the number of existing instances\"\"\"\n arg_line = line.split()\n if line == '' or arg_line[0] in self.__DCT_CLS:\n dir_classes = models.storage.all()\n list_classes = []\n count = 0\n for key, value in dir_classes.items():\n if line in key:\n list_classes.append(value.__str__())\n count += 1\n print(count)\n else:\n print(\"** class doesn't exist **\")\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass HBNBCommand(cmd.Cmd):\n <mask token>\n <mask token>\n <mask token>\n\n def do_quit(self, line):\n \"\"\"Exit the CMD program\"\"\"\n return True\n\n def do_EOF(self, line):\n \"\"\"Exit the CMD program\"\"\"\n return True\n\n def emptyline(self):\n \"\"\"Do nothing\"\"\"\n pass\n\n def do_create(self, line):\n \"\"\"Creates a new instance of BaseModel\"\"\"\n arg_line = line.split()\n if line == '':\n print('** class name missing **')\n return False\n elif arg_line[0] not in self.__DCT_CLS:\n print(\"** class doesn't exist **\")\n else:\n new_instance = self.__DCT_CLS[arg_line[0]]()\n print(new_instance.id)\n new_instance.save()\n\n def do_show(self, line):\n if type(line) == str:\n arg_line = line.split()\n len_args = len(arg_line)\n if self.check_if_created(arg_line, len_args) != 1:\n get_inst = arg_line[0] + '.' + arg_line[1]\n dict_classes = models.storage.all()\n if get_inst in dict_classes.keys():\n print(dict_classes[get_inst])\n else:\n print('** no instance found **')\n else:\n srch_id = line[0] + '.' + line[1]\n dict_classes = models.storage.all()\n if srch_id in dict_classes.keys():\n print(dict_classes[srch_id])\n else:\n print('** no instance found **')\n\n def do_destroy(self, line):\n arg_line = line.split()\n len_args = len(arg_line)\n if self.check_if_created(arg_line, len_args) != 1:\n get_inst = arg_line[0] + '.' + arg_line[1]\n dict_classes = models.storage.all()\n if get_inst in dict_classes.keys():\n del dict_classes[get_inst]\n models.storage.save()\n else:\n print('** no instance found **')\n\n def do_all(self, line):\n arg_line = line.split()\n if line == '' or arg_line[0] in self.__DCT_CLS:\n dir_classes = models.storage.all()\n list_classes = []\n for key, value in dir_classes.items():\n if line in key:\n list_classes.append(value.__str__())\n print(list_classes)\n else:\n print(\"** class doesn't exist **\")\n\n def do_update(self, line):\n arg_line = line.split()\n len_args = len(arg_line)\n if self.check_if_created(arg_line, len_args) == 1:\n pass\n elif len_args == 2:\n print('** attribute name missing **')\n elif len_args == 3:\n print('** value missing **')\n else:\n get_inst = arg_line[0] + '.' + arg_line[1]\n dict_classes = models.storage.all()\n if get_inst in dict_classes.keys():\n if arg_line[3]:\n arg_line[3] = arg_line[3].replace('\"', '')\n try:\n arg_line[3] = int(arg_line[3])\n except ValueError:\n try:\n arg_line[3] = float(arg_line[3])\n except ValueError:\n arg_line[3] = arg_line[3]\n dict_classes[get_inst].__dict__[arg_line[2]] = arg_line[3]\n dict_classes[get_inst].save()\n else:\n print('** no instance found **')\n\n def default(self, line):\n \"\"\"all method names that aren't defined\"\"\"\n args_line = line.split('.')\n if len(args_line) > 1:\n if args_line[1] == 'all()':\n self.do_all(args_line[0])\n if args_line[1] == 'count()':\n self.do_count(args_line[0])\n my_count = args_line[1].split('\"')\n res = re.findall('\\\\(.*?\\\\)', args_line[1])\n my_count[0] = my_count[0] + line[-1]\n if my_count[0] == 'show()':\n myNewList = [args_line[0], my_count[1]]\n self.do_show(myNewList)\n else:\n cmd.Cmd.default(self, line)\n\n def check_if_created(self, arg_line, len_args):\n \"\"\"Verifies if class exists\"\"\"\n if len_args == 0:\n print('** class name missing **')\n return 1\n elif arg_line[0] not in self.__DCT_CLS:\n print(\"** class doesn't exist **\")\n return 1\n elif len_args == 1:\n print('** instance id missing **')\n return 1\n\n def do_count(self, line):\n \"\"\"Counts the number of existing instances\"\"\"\n arg_line = line.split()\n if line == '' or arg_line[0] in self.__DCT_CLS:\n dir_classes = models.storage.all()\n list_classes = []\n count = 0\n for key, value in dir_classes.items():\n if line in key:\n list_classes.append(value.__str__())\n count += 1\n print(count)\n else:\n print(\"** class doesn't exist **\")\n\n\n<mask token>\n",
"step-4": "<mask token>\nimport cmd\nimport models\nimport re\nfrom models.base_model import BaseModel\nfrom models import storage\nfrom models.user import User\nfrom models.state import State\nfrom models.city import City\nfrom models.amenity import Amenity\nfrom models.place import Place\nfrom models.review import Review\n\n\nclass HBNBCommand(cmd.Cmd):\n \"\"\" This class to setup the command interpreter \"\"\"\n __DCT_CLS = {'BaseModel': BaseModel, 'User': User, 'State': State,\n 'City': City, 'Amenity': Amenity, 'Place': Place, 'Review': Review}\n prompt = '(hbnb) '\n\n def do_quit(self, line):\n \"\"\"Exit the CMD program\"\"\"\n return True\n\n def do_EOF(self, line):\n \"\"\"Exit the CMD program\"\"\"\n return True\n\n def emptyline(self):\n \"\"\"Do nothing\"\"\"\n pass\n\n def do_create(self, line):\n \"\"\"Creates a new instance of BaseModel\"\"\"\n arg_line = line.split()\n if line == '':\n print('** class name missing **')\n return False\n elif arg_line[0] not in self.__DCT_CLS:\n print(\"** class doesn't exist **\")\n else:\n new_instance = self.__DCT_CLS[arg_line[0]]()\n print(new_instance.id)\n new_instance.save()\n\n def do_show(self, line):\n if type(line) == str:\n arg_line = line.split()\n len_args = len(arg_line)\n if self.check_if_created(arg_line, len_args) != 1:\n get_inst = arg_line[0] + '.' + arg_line[1]\n dict_classes = models.storage.all()\n if get_inst in dict_classes.keys():\n print(dict_classes[get_inst])\n else:\n print('** no instance found **')\n else:\n srch_id = line[0] + '.' + line[1]\n dict_classes = models.storage.all()\n if srch_id in dict_classes.keys():\n print(dict_classes[srch_id])\n else:\n print('** no instance found **')\n\n def do_destroy(self, line):\n arg_line = line.split()\n len_args = len(arg_line)\n if self.check_if_created(arg_line, len_args) != 1:\n get_inst = arg_line[0] + '.' + arg_line[1]\n dict_classes = models.storage.all()\n if get_inst in dict_classes.keys():\n del dict_classes[get_inst]\n models.storage.save()\n else:\n print('** no instance found **')\n\n def do_all(self, line):\n arg_line = line.split()\n if line == '' or arg_line[0] in self.__DCT_CLS:\n dir_classes = models.storage.all()\n list_classes = []\n for key, value in dir_classes.items():\n if line in key:\n list_classes.append(value.__str__())\n print(list_classes)\n else:\n print(\"** class doesn't exist **\")\n\n def do_update(self, line):\n arg_line = line.split()\n len_args = len(arg_line)\n if self.check_if_created(arg_line, len_args) == 1:\n pass\n elif len_args == 2:\n print('** attribute name missing **')\n elif len_args == 3:\n print('** value missing **')\n else:\n get_inst = arg_line[0] + '.' + arg_line[1]\n dict_classes = models.storage.all()\n if get_inst in dict_classes.keys():\n if arg_line[3]:\n arg_line[3] = arg_line[3].replace('\"', '')\n try:\n arg_line[3] = int(arg_line[3])\n except ValueError:\n try:\n arg_line[3] = float(arg_line[3])\n except ValueError:\n arg_line[3] = arg_line[3]\n dict_classes[get_inst].__dict__[arg_line[2]] = arg_line[3]\n dict_classes[get_inst].save()\n else:\n print('** no instance found **')\n\n def default(self, line):\n \"\"\"all method names that aren't defined\"\"\"\n args_line = line.split('.')\n if len(args_line) > 1:\n if args_line[1] == 'all()':\n self.do_all(args_line[0])\n if args_line[1] == 'count()':\n self.do_count(args_line[0])\n my_count = args_line[1].split('\"')\n res = re.findall('\\\\(.*?\\\\)', args_line[1])\n my_count[0] = my_count[0] + line[-1]\n if my_count[0] == 'show()':\n myNewList = [args_line[0], my_count[1]]\n self.do_show(myNewList)\n else:\n cmd.Cmd.default(self, line)\n\n def check_if_created(self, arg_line, len_args):\n \"\"\"Verifies if class exists\"\"\"\n if len_args == 0:\n print('** class name missing **')\n return 1\n elif arg_line[0] not in self.__DCT_CLS:\n print(\"** class doesn't exist **\")\n return 1\n elif len_args == 1:\n print('** instance id missing **')\n return 1\n\n def do_count(self, line):\n \"\"\"Counts the number of existing instances\"\"\"\n arg_line = line.split()\n if line == '' or arg_line[0] in self.__DCT_CLS:\n dir_classes = models.storage.all()\n list_classes = []\n count = 0\n for key, value in dir_classes.items():\n if line in key:\n list_classes.append(value.__str__())\n count += 1\n print(count)\n else:\n print(\"** class doesn't exist **\")\n\n\nif __name__ == '__main__':\n HBNBCommand().cmdloop()\n",
"step-5": "#!/usr/bin/python3\n\"\"\"\nprogram of the command interpreter\n\"\"\"\n\nimport cmd\nimport models\nimport re\nfrom models.base_model import BaseModel\nfrom models import storage\nfrom models.user import User\nfrom models.state import State\nfrom models.city import City\nfrom models.amenity import Amenity\nfrom models.place import Place\nfrom models.review import Review\n\n\nclass HBNBCommand(cmd.Cmd):\n \"\"\" This class to setup the command interpreter \"\"\"\n __DCT_CLS = {\n \"BaseModel\": BaseModel,\n \"User\": User,\n \"State\": State,\n \"City\": City,\n \"Amenity\": Amenity,\n \"Place\": Place,\n \"Review\": Review\n }\n prompt = \"(hbnb) \"\n\n def do_quit(self, line):\n '''Exit the CMD program'''\n return True\n\n def do_EOF(self, line):\n '''Exit the CMD program'''\n return True\n\n def emptyline(self):\n '''Do nothing'''\n pass\n\n def do_create(self, line):\n '''Creates a new instance of BaseModel'''\n arg_line = line.split()\n\n if line == \"\":\n print(\"** class name missing **\")\n return False\n elif arg_line[0] not in self.__DCT_CLS:\n print(\"** class doesn't exist **\")\n else:\n new_instance = self.__DCT_CLS[arg_line[0]]()\n print(new_instance.id)\n new_instance.save()\n\n def do_show(self, line):\n if (type(line) == str):\n arg_line = line.split()\n len_args = len(arg_line)\n\n if (self.check_if_created(arg_line, len_args) != 1):\n\n get_inst = arg_line[0] + \".\" + arg_line[1]\n dict_classes = models.storage.all()\n\n if get_inst in dict_classes.keys():\n print(dict_classes[get_inst])\n else:\n print(\"** no instance found **\")\n else:\n srch_id = line[0] + \".\" + line[1]\n dict_classes = models.storage.all()\n if srch_id in dict_classes.keys():\n print(dict_classes[srch_id])\n else:\n print(\"** no instance found **\")\n\n def do_destroy(self, line):\n arg_line = line.split()\n len_args = len(arg_line)\n if (self.check_if_created(arg_line, len_args) != 1):\n\n get_inst = arg_line[0] + \".\" + arg_line[1]\n dict_classes = models.storage.all()\n\n if get_inst in dict_classes.keys():\n del dict_classes[get_inst]\n models.storage.save()\n else:\n print(\"** no instance found **\")\n\n def do_all(self, line):\n arg_line = line.split()\n if line == \"\" or arg_line[0] in self.__DCT_CLS:\n dir_classes = models.storage.all()\n list_classes = []\n for key, value in dir_classes.items():\n if line in key:\n list_classes.append(value.__str__())\n print(list_classes)\n else:\n print(\"** class doesn't exist **\")\n\n def do_update(self, line):\n arg_line = line.split()\n len_args = len(arg_line)\n\n if (self.check_if_created(arg_line, len_args) == 1):\n pass\n elif (len_args == 2):\n print(\"** attribute name missing **\")\n elif (len_args == 3):\n print(\"** value missing **\")\n else:\n get_inst = arg_line[0] + \".\" + arg_line[1]\n dict_classes = models.storage.all()\n if get_inst in dict_classes.keys():\n if arg_line[3]:\n arg_line[3] = arg_line[3].replace('\"', \"\")\n try:\n arg_line[3] = int(arg_line[3])\n except ValueError:\n try:\n arg_line[3] = float(arg_line[3])\n except ValueError:\n arg_line[3] = arg_line[3]\n dict_classes[get_inst].__dict__[arg_line[2]] = arg_line[3]\n dict_classes[get_inst].save()\n else:\n print(\"** no instance found **\")\n\n def default(self, line):\n '''all method names that aren't defined'''\n args_line = line.split('.')\n if len(args_line) > 1:\n if args_line[1] == \"all()\":\n self.do_all(args_line[0])\n if args_line[1] == \"count()\":\n self.do_count(args_line[0])\n\n my_count = args_line[1].split('\"')\n res = re.findall(r'\\(.*?\\)', args_line[1])\n my_count[0] = my_count[0] + line[-1]\n if my_count[0] == \"show()\":\n myNewList = [args_line[0], my_count[1]]\n self.do_show(myNewList)\n else:\n cmd.Cmd.default(self, line)\n\n def check_if_created(self, arg_line, len_args):\n '''Verifies if class exists'''\n if len_args == 0:\n print(\"** class name missing **\")\n return 1\n elif arg_line[0] not in self.__DCT_CLS:\n print(\"** class doesn't exist **\")\n return 1\n elif (len_args == 1):\n print(\"** instance id missing **\")\n return 1\n\n def do_count(self, line):\n '''Counts the number of existing instances'''\n arg_line = line.split()\n if line == \"\" or arg_line[0] in self.__DCT_CLS:\n dir_classes = models.storage.all()\n list_classes = []\n count = 0\n for key, value in dir_classes.items():\n if line in key:\n list_classes.append(value.__str__())\n count += 1\n print(count)\n else:\n print(\"** class doesn't exist **\")\n\n\nif __name__ == \"__main__\":\n HBNBCommand().cmdloop()\n",
"step-ids": [
9,
11,
12,
16,
17
]
}
|
[
9,
11,
12,
16,
17
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
urlpatterns = [url('^stats/$', views.get_stats, name='stats'), url(
'^follow/me/$', views.follow_me, name='follow_me'), url(
'^follower/confirm/$', views.confirm_follower, name='follower_confirm'),
url('^execute/', views.execute, name='executed'), url('^output/', views
.update_output, name='output'), url('^lead/', views.lead_nodes, name=
'lead')]
<|reserved_special_token_1|>
from django.conf.urls import url
from . import views
urlpatterns = [url('^stats/$', views.get_stats, name='stats'), url(
'^follow/me/$', views.follow_me, name='follow_me'), url(
'^follower/confirm/$', views.confirm_follower, name='follower_confirm'),
url('^execute/', views.execute, name='executed'), url('^output/', views
.update_output, name='output'), url('^lead/', views.lead_nodes, name=
'lead')]
<|reserved_special_token_1|>
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^stats/$', views.get_stats, name='stats'),
url(r'^follow/me/$', views.follow_me, name='follow_me'),
url(r'^follower/confirm/$', views.confirm_follower, name='follower_confirm'),
url(r'^execute/', views.execute, name='executed'),
url(r'^output/', views.update_output, name='output'),
url(r'^lead/', views.lead_nodes, name='lead'),
]
|
flexible
|
{
"blob_id": "33b68246dd3da9561c1d4adb5a3403cba656dcee",
"index": 9175,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = [url('^stats/$', views.get_stats, name='stats'), url(\n '^follow/me/$', views.follow_me, name='follow_me'), url(\n '^follower/confirm/$', views.confirm_follower, name='follower_confirm'),\n url('^execute/', views.execute, name='executed'), url('^output/', views\n .update_output, name='output'), url('^lead/', views.lead_nodes, name=\n 'lead')]\n",
"step-3": "from django.conf.urls import url\nfrom . import views\nurlpatterns = [url('^stats/$', views.get_stats, name='stats'), url(\n '^follow/me/$', views.follow_me, name='follow_me'), url(\n '^follower/confirm/$', views.confirm_follower, name='follower_confirm'),\n url('^execute/', views.execute, name='executed'), url('^output/', views\n .update_output, name='output'), url('^lead/', views.lead_nodes, name=\n 'lead')]\n",
"step-4": "from django.conf.urls import url\n\nfrom . import views\n\nurlpatterns = [\n url(r'^stats/$', views.get_stats, name='stats'),\n url(r'^follow/me/$', views.follow_me, name='follow_me'),\n url(r'^follower/confirm/$', views.confirm_follower, name='follower_confirm'),\n url(r'^execute/', views.execute, name='executed'),\n url(r'^output/', views.update_output, name='output'),\n url(r'^lead/', views.lead_nodes, name='lead'),\n\n\n]\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# Generated by Django 2.1.4 on 2019-04-17 03:56
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('historiasClinicas', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='actualizacion',
name='valoracion_medica',
field=models.CharField(choices=[('Apto para desempeñar el cargo sin patologia aparente', 'Apto para desempeñar el cargo sin patologia aparente'), ('Apto para desempañar el cargo con patologia que no limita la labor', 'Apto para desempañar el cargo con patologia que no limita la labor'), ('Apto con restricciones o adaptaciones para la labor', 'Apto con restricciones o adaptaciones para la labor'), ('Aplazado', 'Aplazado'), ('Apto para labor el alturas', 'Apto para labor el alturas'), ('Apto para continuar desempeñando su labor', 'Apto para continuar desempeñando su labor'), ('Examen de retiro', 'Examen de retiro'), ('Apto para manipulación de alimentos', 'Apto para manipulación de alimentos')], max_length=50, verbose_name='Concepto de valoracion medica'),
),
]
|
normal
|
{
"blob_id": "4aefabf064cdef963f9c62bd5c93892207c301d3",
"index": 3076,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('historiasClinicas', '0001_initial')]\n operations = [migrations.AlterField(model_name='actualizacion', name=\n 'valoracion_medica', field=models.CharField(choices=[(\n 'Apto para desempeñar el cargo sin patologia aparente',\n 'Apto para desempeñar el cargo sin patologia aparente'), (\n 'Apto para desempañar el cargo con patologia que no limita la labor',\n 'Apto para desempañar el cargo con patologia que no limita la labor'\n ), ('Apto con restricciones o adaptaciones para la labor',\n 'Apto con restricciones o adaptaciones para la labor'), ('Aplazado',\n 'Aplazado'), ('Apto para labor el alturas',\n 'Apto para labor el alturas'), (\n 'Apto para continuar desempeñando su labor',\n 'Apto para continuar desempeñando su labor'), ('Examen de retiro',\n 'Examen de retiro'), ('Apto para manipulación de alimentos',\n 'Apto para manipulación de alimentos')], max_length=50,\n verbose_name='Concepto de valoracion medica'))]\n",
"step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('historiasClinicas', '0001_initial')]\n operations = [migrations.AlterField(model_name='actualizacion', name=\n 'valoracion_medica', field=models.CharField(choices=[(\n 'Apto para desempeñar el cargo sin patologia aparente',\n 'Apto para desempeñar el cargo sin patologia aparente'), (\n 'Apto para desempañar el cargo con patologia que no limita la labor',\n 'Apto para desempañar el cargo con patologia que no limita la labor'\n ), ('Apto con restricciones o adaptaciones para la labor',\n 'Apto con restricciones o adaptaciones para la labor'), ('Aplazado',\n 'Aplazado'), ('Apto para labor el alturas',\n 'Apto para labor el alturas'), (\n 'Apto para continuar desempeñando su labor',\n 'Apto para continuar desempeñando su labor'), ('Examen de retiro',\n 'Examen de retiro'), ('Apto para manipulación de alimentos',\n 'Apto para manipulación de alimentos')], max_length=50,\n verbose_name='Concepto de valoracion medica'))]\n",
"step-5": "# Generated by Django 2.1.4 on 2019-04-17 03:56\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('historiasClinicas', '0001_initial'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='actualizacion',\n name='valoracion_medica',\n field=models.CharField(choices=[('Apto para desempeñar el cargo sin patologia aparente', 'Apto para desempeñar el cargo sin patologia aparente'), ('Apto para desempañar el cargo con patologia que no limita la labor', 'Apto para desempañar el cargo con patologia que no limita la labor'), ('Apto con restricciones o adaptaciones para la labor', 'Apto con restricciones o adaptaciones para la labor'), ('Aplazado', 'Aplazado'), ('Apto para labor el alturas', 'Apto para labor el alturas'), ('Apto para continuar desempeñando su labor', 'Apto para continuar desempeñando su labor'), ('Examen de retiro', 'Examen de retiro'), ('Apto para manipulación de alimentos', 'Apto para manipulación de alimentos')], max_length=50, verbose_name='Concepto de valoracion medica'),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#coding:utf-8
x = '上'
res = x.encode('gbk')
print(res, type(res))
print(res.decode('gbk'))
|
normal
|
{
"blob_id": "3c053bf1b572759eddcd310d185f7e44d82171a5",
"index": 9153,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(res, type(res))\nprint(res.decode('gbk'))\n",
"step-3": "x = '上'\nres = x.encode('gbk')\nprint(res, type(res))\nprint(res.decode('gbk'))\n",
"step-4": "#coding:utf-8\n\nx = '上'\nres = x.encode('gbk')\nprint(res, type(res))\nprint(res.decode('gbk'))\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# -*- encoding: utf-8 -*-
#----------------------------------------------------------------------------
#
# Copyright (C) 2014 .
# Coded by: Borni DHIFI (dhifi.borni@gmail.com)
#
#----------------------------------------------------------------------------
import models
import wizard
import parser
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
normal
|
{
"blob_id": "a3216aa41cd28b91653b99017e21a03e43372e9b",
"index": 4137,
"step-1": "<mask token>\n",
"step-2": "import models\nimport wizard\nimport parser\n",
"step-3": "# -*- encoding: utf-8 -*-\n#----------------------------------------------------------------------------\n#\n# Copyright (C) 2014 .\n# Coded by: Borni DHIFI (dhifi.borni@gmail.com)\n#\n#----------------------------------------------------------------------------\n\nimport models\nimport wizard\nimport parser\n\n# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
try:
import Image
except ImportError:
from PIL import Image
import sys
sys.path.append(os.path.abspath(os.path.join(__file__, os.pardir, os.pardir, 'DropPy.Common')))
from file_tools import get_file_paths_from_directory
class Task(object):
"""
Documentation: https://docs.droppyapp.com/tasks/image-rotate
"""
def __init__(self, input_dir, output_dir, **kwargs):
# Get keyword arguments.
degrees = kwargs.get(str('degrees'), 90.0)
expand_arg = kwargs.get(str('expand'), True)
# Check arguments.
if expand_arg is True:
expand = 1
elif expand_arg is False:
expand = 0
else:
sys.exit('Argument expand invalid')
# Process files and directories.
for item_name in os.listdir(input_dir):
item_path = os.path.join(input_dir, item_name)
if os.path.isfile(item_path):
self.rotate_file(item_path, output_dir, degrees, expand)
elif os.path.isdir(item_path):
output_sub_dir = os.path.join(output_dir, item_name)
os.makedirs(output_sub_dir)
contained_files = get_file_paths_from_directory(item_path)
for contained_file in contained_files:
self.rotate_file(contained_file, output_sub_dir, degrees, expand)
@staticmethod
def rotate_file(input_file, output_dir, degrees, expand):
output_file_name = os.path.basename(input_file)
output_file = os.path.join(output_dir, output_file_name)
input_image = Image.open(input_file)
output_image = input_image.rotate(degrees, expand=expand)
output_image.save(output_file)
|
normal
|
{
"blob_id": "df3208a00f7a5dd1ddd76542ac0de85762cc45ab",
"index": 7236,
"step-1": "<mask token>\n\n\nclass Task(object):\n <mask token>\n <mask token>\n\n @staticmethod\n def rotate_file(input_file, output_dir, degrees, expand):\n output_file_name = os.path.basename(input_file)\n output_file = os.path.join(output_dir, output_file_name)\n input_image = Image.open(input_file)\n output_image = input_image.rotate(degrees, expand=expand)\n output_image.save(output_file)\n",
"step-2": "<mask token>\n\n\nclass Task(object):\n <mask token>\n\n def __init__(self, input_dir, output_dir, **kwargs):\n degrees = kwargs.get(str('degrees'), 90.0)\n expand_arg = kwargs.get(str('expand'), True)\n if expand_arg is True:\n expand = 1\n elif expand_arg is False:\n expand = 0\n else:\n sys.exit('Argument expand invalid')\n for item_name in os.listdir(input_dir):\n item_path = os.path.join(input_dir, item_name)\n if os.path.isfile(item_path):\n self.rotate_file(item_path, output_dir, degrees, expand)\n elif os.path.isdir(item_path):\n output_sub_dir = os.path.join(output_dir, item_name)\n os.makedirs(output_sub_dir)\n contained_files = get_file_paths_from_directory(item_path)\n for contained_file in contained_files:\n self.rotate_file(contained_file, output_sub_dir,\n degrees, expand)\n\n @staticmethod\n def rotate_file(input_file, output_dir, degrees, expand):\n output_file_name = os.path.basename(input_file)\n output_file = os.path.join(output_dir, output_file_name)\n input_image = Image.open(input_file)\n output_image = input_image.rotate(degrees, expand=expand)\n output_image.save(output_file)\n",
"step-3": "<mask token>\n\n\nclass Task(object):\n \"\"\"\n Documentation: https://docs.droppyapp.com/tasks/image-rotate\n \"\"\"\n\n def __init__(self, input_dir, output_dir, **kwargs):\n degrees = kwargs.get(str('degrees'), 90.0)\n expand_arg = kwargs.get(str('expand'), True)\n if expand_arg is True:\n expand = 1\n elif expand_arg is False:\n expand = 0\n else:\n sys.exit('Argument expand invalid')\n for item_name in os.listdir(input_dir):\n item_path = os.path.join(input_dir, item_name)\n if os.path.isfile(item_path):\n self.rotate_file(item_path, output_dir, degrees, expand)\n elif os.path.isdir(item_path):\n output_sub_dir = os.path.join(output_dir, item_name)\n os.makedirs(output_sub_dir)\n contained_files = get_file_paths_from_directory(item_path)\n for contained_file in contained_files:\n self.rotate_file(contained_file, output_sub_dir,\n degrees, expand)\n\n @staticmethod\n def rotate_file(input_file, output_dir, degrees, expand):\n output_file_name = os.path.basename(input_file)\n output_file = os.path.join(output_dir, output_file_name)\n input_image = Image.open(input_file)\n output_image = input_image.rotate(degrees, expand=expand)\n output_image.save(output_file)\n",
"step-4": "from __future__ import unicode_literals\nimport os\ntry:\n import Image\nexcept ImportError:\n from PIL import Image\nimport sys\nsys.path.append(os.path.abspath(os.path.join(__file__, os.pardir, os.pardir,\n 'DropPy.Common')))\nfrom file_tools import get_file_paths_from_directory\n\n\nclass Task(object):\n \"\"\"\n Documentation: https://docs.droppyapp.com/tasks/image-rotate\n \"\"\"\n\n def __init__(self, input_dir, output_dir, **kwargs):\n degrees = kwargs.get(str('degrees'), 90.0)\n expand_arg = kwargs.get(str('expand'), True)\n if expand_arg is True:\n expand = 1\n elif expand_arg is False:\n expand = 0\n else:\n sys.exit('Argument expand invalid')\n for item_name in os.listdir(input_dir):\n item_path = os.path.join(input_dir, item_name)\n if os.path.isfile(item_path):\n self.rotate_file(item_path, output_dir, degrees, expand)\n elif os.path.isdir(item_path):\n output_sub_dir = os.path.join(output_dir, item_name)\n os.makedirs(output_sub_dir)\n contained_files = get_file_paths_from_directory(item_path)\n for contained_file in contained_files:\n self.rotate_file(contained_file, output_sub_dir,\n degrees, expand)\n\n @staticmethod\n def rotate_file(input_file, output_dir, degrees, expand):\n output_file_name = os.path.basename(input_file)\n output_file = os.path.join(output_dir, output_file_name)\n input_image = Image.open(input_file)\n output_image = input_image.rotate(degrees, expand=expand)\n output_image.save(output_file)\n",
"step-5": "#!/usr/bin/python\n# -*- coding: utf-8 -*-\n\nfrom __future__ import unicode_literals\nimport os\ntry:\n import Image\nexcept ImportError:\n from PIL import Image\nimport sys\n\nsys.path.append(os.path.abspath(os.path.join(__file__, os.pardir, os.pardir, 'DropPy.Common')))\nfrom file_tools import get_file_paths_from_directory\n\n\nclass Task(object):\n \"\"\"\n Documentation: https://docs.droppyapp.com/tasks/image-rotate\n \"\"\"\n def __init__(self, input_dir, output_dir, **kwargs):\n # Get keyword arguments.\n degrees = kwargs.get(str('degrees'), 90.0)\n expand_arg = kwargs.get(str('expand'), True)\n\n # Check arguments.\n if expand_arg is True:\n expand = 1\n elif expand_arg is False:\n expand = 0\n else:\n sys.exit('Argument expand invalid')\n\n # Process files and directories.\n for item_name in os.listdir(input_dir):\n item_path = os.path.join(input_dir, item_name)\n\n if os.path.isfile(item_path):\n self.rotate_file(item_path, output_dir, degrees, expand)\n\n elif os.path.isdir(item_path):\n output_sub_dir = os.path.join(output_dir, item_name)\n os.makedirs(output_sub_dir)\n\n contained_files = get_file_paths_from_directory(item_path)\n for contained_file in contained_files:\n self.rotate_file(contained_file, output_sub_dir, degrees, expand)\n\n @staticmethod\n def rotate_file(input_file, output_dir, degrees, expand):\n output_file_name = os.path.basename(input_file)\n output_file = os.path.join(output_dir, output_file_name)\n\n input_image = Image.open(input_file)\n output_image = input_image.rotate(degrees, expand=expand)\n output_image.save(output_file)\n",
"step-ids": [
2,
3,
4,
6,
7
]
}
|
[
2,
3,
4,
6,
7
] |
from flask import Flask, json, request, jsonify
from flask_sqlalchemy import SQLAlchemy
from flask_marshmallow import Marshmallow
import warnings
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql+pymysql://root:1234@localhost/escuela'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS']=False
db = SQLAlchemy(app)
ma = Marshmallow(app)
class curso(db.Model):
idcurso = db.Column(db.Integer, primary_key=True)
nombre_curso = db.Column(db.String(45))
precio = db.Column(db.Integer)
def __init__(self, nombre, precio):
self.nombre_curso = nombre
self.precio = precio
db.create_all()
class CursoSchema(ma.Schema):
class Meta:
fields = ('idcurso','nombre_curso','precio')
curso_Schema = CursoSchema()
cursos_Schema = CursoSchema(many=True)
@app.route('/',methods=['GET'])
def index():
return jsonify({'message': 'Academia'})
@app.route('/cursos', methods=['POST'])
def create_curso():
nombre_curso = request.json['nombre_curso']
precio = request.json['precio']
new_Curso = curso(nombre_curso,precio)
db.session.add(new_Curso)
db.session.commit()
return curso_Schema.jsonify(new_Curso)
@app.route('/cursos',methods=['GET'])
def get_cursos():
all_cursos = curso.query.all()
result =cursos_Schema.dump(all_cursos)
return jsonify(result)
@app.route('/cursos/<id>', methods=['GET'])
def get_task(id):
cursoGet = curso.query.get(id)
return curso_Schema.jsonify(cursoGet)
@app.route('/cursos/<id>', methods=['PUT'])
def update_curso(id):
cursoUpdate=curso.query.get(id)
nombre = request.json['nombre_curso']
precio = request.json['precio']
cursoUpdate.nombre_curso = nombre
cursoUpdate.precio = precio
db.session.commit()
return curso_Schema.jsonify(cursoUpdate)
@app.route('/cursos/<id>',methods=['DELETE'])
def delete_item(id):
cursoDelete = curso.query.get(id)
db.session.delete(cursoDelete)
db.session.commit()
return curso_Schema.jsonify(cursoDelete)
if __name__ == "__main__":
app.run(debug=True)
|
normal
|
{
"blob_id": "5c1d1eafb913822be9b6e46b15c6886f8bf3e2e1",
"index": 3622,
"step-1": "<mask token>\n\n\nclass curso(db.Model):\n idcurso = db.Column(db.Integer, primary_key=True)\n nombre_curso = db.Column(db.String(45))\n precio = db.Column(db.Integer)\n\n def __init__(self, nombre, precio):\n self.nombre_curso = nombre\n self.precio = precio\n\n\n<mask token>\n\n\nclass CursoSchema(ma.Schema):\n\n\n class Meta:\n fields = 'idcurso', 'nombre_curso', 'precio'\n\n\n<mask token>\n\n\n@app.route('/', methods=['GET'])\ndef index():\n return jsonify({'message': 'Academia'})\n\n\n@app.route('/cursos', methods=['POST'])\ndef create_curso():\n nombre_curso = request.json['nombre_curso']\n precio = request.json['precio']\n new_Curso = curso(nombre_curso, precio)\n db.session.add(new_Curso)\n db.session.commit()\n return curso_Schema.jsonify(new_Curso)\n\n\n@app.route('/cursos', methods=['GET'])\ndef get_cursos():\n all_cursos = curso.query.all()\n result = cursos_Schema.dump(all_cursos)\n return jsonify(result)\n\n\n@app.route('/cursos/<id>', methods=['GET'])\ndef get_task(id):\n cursoGet = curso.query.get(id)\n return curso_Schema.jsonify(cursoGet)\n\n\n<mask token>\n\n\n@app.route('/cursos/<id>', methods=['DELETE'])\ndef delete_item(id):\n cursoDelete = curso.query.get(id)\n db.session.delete(cursoDelete)\n db.session.commit()\n return curso_Schema.jsonify(cursoDelete)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass curso(db.Model):\n idcurso = db.Column(db.Integer, primary_key=True)\n nombre_curso = db.Column(db.String(45))\n precio = db.Column(db.Integer)\n\n def __init__(self, nombre, precio):\n self.nombre_curso = nombre\n self.precio = precio\n\n\n<mask token>\n\n\nclass CursoSchema(ma.Schema):\n\n\n class Meta:\n fields = 'idcurso', 'nombre_curso', 'precio'\n\n\n<mask token>\n\n\n@app.route('/', methods=['GET'])\ndef index():\n return jsonify({'message': 'Academia'})\n\n\n@app.route('/cursos', methods=['POST'])\ndef create_curso():\n nombre_curso = request.json['nombre_curso']\n precio = request.json['precio']\n new_Curso = curso(nombre_curso, precio)\n db.session.add(new_Curso)\n db.session.commit()\n return curso_Schema.jsonify(new_Curso)\n\n\n@app.route('/cursos', methods=['GET'])\ndef get_cursos():\n all_cursos = curso.query.all()\n result = cursos_Schema.dump(all_cursos)\n return jsonify(result)\n\n\n@app.route('/cursos/<id>', methods=['GET'])\ndef get_task(id):\n cursoGet = curso.query.get(id)\n return curso_Schema.jsonify(cursoGet)\n\n\n@app.route('/cursos/<id>', methods=['PUT'])\ndef update_curso(id):\n cursoUpdate = curso.query.get(id)\n nombre = request.json['nombre_curso']\n precio = request.json['precio']\n cursoUpdate.nombre_curso = nombre\n cursoUpdate.precio = precio\n db.session.commit()\n return curso_Schema.jsonify(cursoUpdate)\n\n\n@app.route('/cursos/<id>', methods=['DELETE'])\ndef delete_item(id):\n cursoDelete = curso.query.get(id)\n db.session.delete(cursoDelete)\n db.session.commit()\n return curso_Schema.jsonify(cursoDelete)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass curso(db.Model):\n idcurso = db.Column(db.Integer, primary_key=True)\n nombre_curso = db.Column(db.String(45))\n precio = db.Column(db.Integer)\n\n def __init__(self, nombre, precio):\n self.nombre_curso = nombre\n self.precio = precio\n\n\ndb.create_all()\n\n\nclass CursoSchema(ma.Schema):\n\n\n class Meta:\n fields = 'idcurso', 'nombre_curso', 'precio'\n\n\n<mask token>\n\n\n@app.route('/', methods=['GET'])\ndef index():\n return jsonify({'message': 'Academia'})\n\n\n@app.route('/cursos', methods=['POST'])\ndef create_curso():\n nombre_curso = request.json['nombre_curso']\n precio = request.json['precio']\n new_Curso = curso(nombre_curso, precio)\n db.session.add(new_Curso)\n db.session.commit()\n return curso_Schema.jsonify(new_Curso)\n\n\n@app.route('/cursos', methods=['GET'])\ndef get_cursos():\n all_cursos = curso.query.all()\n result = cursos_Schema.dump(all_cursos)\n return jsonify(result)\n\n\n@app.route('/cursos/<id>', methods=['GET'])\ndef get_task(id):\n cursoGet = curso.query.get(id)\n return curso_Schema.jsonify(cursoGet)\n\n\n@app.route('/cursos/<id>', methods=['PUT'])\ndef update_curso(id):\n cursoUpdate = curso.query.get(id)\n nombre = request.json['nombre_curso']\n precio = request.json['precio']\n cursoUpdate.nombre_curso = nombre\n cursoUpdate.precio = precio\n db.session.commit()\n return curso_Schema.jsonify(cursoUpdate)\n\n\n@app.route('/cursos/<id>', methods=['DELETE'])\ndef delete_item(id):\n cursoDelete = curso.query.get(id)\n db.session.delete(cursoDelete)\n db.session.commit()\n return curso_Schema.jsonify(cursoDelete)\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-4": "from flask import Flask, json, request, jsonify\nfrom flask_sqlalchemy import SQLAlchemy\nfrom flask_marshmallow import Marshmallow\nimport warnings\napp = Flask(__name__)\napp.config['SQLALCHEMY_DATABASE_URI'\n ] = 'mysql+pymysql://root:1234@localhost/escuela'\napp.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False\ndb = SQLAlchemy(app)\nma = Marshmallow(app)\n\n\nclass curso(db.Model):\n idcurso = db.Column(db.Integer, primary_key=True)\n nombre_curso = db.Column(db.String(45))\n precio = db.Column(db.Integer)\n\n def __init__(self, nombre, precio):\n self.nombre_curso = nombre\n self.precio = precio\n\n\ndb.create_all()\n\n\nclass CursoSchema(ma.Schema):\n\n\n class Meta:\n fields = 'idcurso', 'nombre_curso', 'precio'\n\n\ncurso_Schema = CursoSchema()\ncursos_Schema = CursoSchema(many=True)\n\n\n@app.route('/', methods=['GET'])\ndef index():\n return jsonify({'message': 'Academia'})\n\n\n@app.route('/cursos', methods=['POST'])\ndef create_curso():\n nombre_curso = request.json['nombre_curso']\n precio = request.json['precio']\n new_Curso = curso(nombre_curso, precio)\n db.session.add(new_Curso)\n db.session.commit()\n return curso_Schema.jsonify(new_Curso)\n\n\n@app.route('/cursos', methods=['GET'])\ndef get_cursos():\n all_cursos = curso.query.all()\n result = cursos_Schema.dump(all_cursos)\n return jsonify(result)\n\n\n@app.route('/cursos/<id>', methods=['GET'])\ndef get_task(id):\n cursoGet = curso.query.get(id)\n return curso_Schema.jsonify(cursoGet)\n\n\n@app.route('/cursos/<id>', methods=['PUT'])\ndef update_curso(id):\n cursoUpdate = curso.query.get(id)\n nombre = request.json['nombre_curso']\n precio = request.json['precio']\n cursoUpdate.nombre_curso = nombre\n cursoUpdate.precio = precio\n db.session.commit()\n return curso_Schema.jsonify(cursoUpdate)\n\n\n@app.route('/cursos/<id>', methods=['DELETE'])\ndef delete_item(id):\n cursoDelete = curso.query.get(id)\n db.session.delete(cursoDelete)\n db.session.commit()\n return curso_Schema.jsonify(cursoDelete)\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-5": "from flask import Flask, json, request, jsonify\nfrom flask_sqlalchemy import SQLAlchemy\nfrom flask_marshmallow import Marshmallow\nimport warnings\n\napp = Flask(__name__)\napp.config['SQLALCHEMY_DATABASE_URI'] = 'mysql+pymysql://root:1234@localhost/escuela'\napp.config['SQLALCHEMY_TRACK_MODIFICATIONS']=False\n\ndb = SQLAlchemy(app)\nma = Marshmallow(app)\n\nclass curso(db.Model):\n idcurso = db.Column(db.Integer, primary_key=True)\n nombre_curso = db.Column(db.String(45))\n precio = db.Column(db.Integer)\n\n def __init__(self, nombre, precio):\n self.nombre_curso = nombre\n self.precio = precio\n\ndb.create_all()\n\nclass CursoSchema(ma.Schema):\n class Meta:\n fields = ('idcurso','nombre_curso','precio')\n\ncurso_Schema = CursoSchema()\ncursos_Schema = CursoSchema(many=True)\n\n@app.route('/',methods=['GET'])\ndef index():\n return jsonify({'message': 'Academia'})\n\n@app.route('/cursos', methods=['POST'])\ndef create_curso():\n nombre_curso = request.json['nombre_curso']\n precio = request.json['precio']\n\n new_Curso = curso(nombre_curso,precio)\n db.session.add(new_Curso)\n db.session.commit()\n\n return curso_Schema.jsonify(new_Curso)\n\n@app.route('/cursos',methods=['GET'])\ndef get_cursos():\n all_cursos = curso.query.all()\n result =cursos_Schema.dump(all_cursos)\n return jsonify(result)\n\n@app.route('/cursos/<id>', methods=['GET'])\ndef get_task(id):\n cursoGet = curso.query.get(id)\n return curso_Schema.jsonify(cursoGet)\n\n@app.route('/cursos/<id>', methods=['PUT'])\ndef update_curso(id):\n cursoUpdate=curso.query.get(id)\n nombre = request.json['nombre_curso']\n precio = request.json['precio']\n\n cursoUpdate.nombre_curso = nombre\n cursoUpdate.precio = precio\n\n db.session.commit()\n return curso_Schema.jsonify(cursoUpdate)\n\n@app.route('/cursos/<id>',methods=['DELETE'])\ndef delete_item(id):\n cursoDelete = curso.query.get(id)\n db.session.delete(cursoDelete)\n db.session.commit()\n\n return curso_Schema.jsonify(cursoDelete)\n\n\nif __name__ == \"__main__\":\n app.run(debug=True)",
"step-ids": [
9,
10,
11,
13,
14
]
}
|
[
9,
10,
11,
13,
14
] |
class Odwroc():
def __init__(self,dane):
self.dane = dane
self.indeks = len(dane)
def __iter__(self):
return self
def __next__(self):
if self.indeks == 0:
raise StopIteration
self.indeks -= 1
return self.dane[self.indeks]
for i in Odwroc('Martusia'):
print(i,end = '')
|
normal
|
{
"blob_id": "763c0baf919b48ff135f7aa18974da5b85ee40f5",
"index": 1133,
"step-1": "class Odwroc:\n <mask token>\n <mask token>\n <mask token>\n\n\n<mask token>\n",
"step-2": "class Odwroc:\n\n def __init__(self, dane):\n self.dane = dane\n self.indeks = len(dane)\n <mask token>\n\n def __next__(self):\n if self.indeks == 0:\n raise StopIteration\n self.indeks -= 1\n return self.dane[self.indeks]\n\n\n<mask token>\n",
"step-3": "class Odwroc:\n\n def __init__(self, dane):\n self.dane = dane\n self.indeks = len(dane)\n\n def __iter__(self):\n return self\n\n def __next__(self):\n if self.indeks == 0:\n raise StopIteration\n self.indeks -= 1\n return self.dane[self.indeks]\n\n\n<mask token>\n",
"step-4": "class Odwroc:\n\n def __init__(self, dane):\n self.dane = dane\n self.indeks = len(dane)\n\n def __iter__(self):\n return self\n\n def __next__(self):\n if self.indeks == 0:\n raise StopIteration\n self.indeks -= 1\n return self.dane[self.indeks]\n\n\nfor i in Odwroc('Martusia'):\n print(i, end='')\n",
"step-5": "class Odwroc():\n def __init__(self,dane):\n self.dane = dane\n self.indeks = len(dane)\n\n def __iter__(self):\n return self\n\n def __next__(self):\n if self.indeks == 0:\n raise StopIteration\n\n self.indeks -= 1\n return self.dane[self.indeks]\n\nfor i in Odwroc('Martusia'):\n print(i,end = '')\n\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for letter in 'zYxWvUtSrQpOnMlKjIhGfEdCbA':
print('{:s}'.format(letter), end='')
<|reserved_special_token_1|>
#!/usr/bin/python3
"""
list = list(range(97, 123)
for (i in list):
if (i % 2 == 0):
i = (i - 32)
"""
for letter in "zYxWvUtSrQpOnMlKjIhGfEdCbA":
print('{:s}'.format(letter), end = "")
|
flexible
|
{
"blob_id": "55a061a1c0cd20e5ab7413c671bc03573de1bbdf",
"index": 7754,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor letter in 'zYxWvUtSrQpOnMlKjIhGfEdCbA':\n print('{:s}'.format(letter), end='')\n",
"step-3": "#!/usr/bin/python3\n\"\"\"\nlist = list(range(97, 123)\nfor (i in list):\n if (i % 2 == 0):\n i = (i - 32)\n\"\"\"\nfor letter in \"zYxWvUtSrQpOnMlKjIhGfEdCbA\":\n print('{:s}'.format(letter), end = \"\")\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
class Cliente:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def BD(self):
conectar = Base_de_datos.BaseDeDatos()
comando = ("INSERT INTO public.cliente(id, nombre) VALUES('" + self
.id.get() + "','" + self.nombre.get() + "')")
print(comando)
conectar.cursor.execute(comando)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Cliente:
def __init__(self, id=None, nombre=None):
self.id = id
self.nombre = nombre
def ingresar(self):
self.ventanaIngresar = Toplevel()
self.ventanaIngresar.geometry('570x400')
self.ventanaIngresar.title('Cliente')
img = PhotoImage(file=
'C:/Users/checo/Desktop/41-INVERSION-MEDIOS-DIGITALES.png')
imagen = Label(self.ventanaIngresar, image=img)
imagen.pack()
Label(self.ventanaIngresar, text='Cliente', font=('Cambria', 14)
).place(x=5, y=0)
Label(self.ventanaIngresar, text='Id: ', font=('Cambria', 11)).place(x
=0, y=30)
Label(self.ventanaIngresar, text='Nombre: ', font=('Cambria', 11)
).place(x=0, y=60)
self.id = StringVar()
Entry(self.ventanaIngresar, textvariable=self.id).place(x=30, y=30)
self.nombre = StringVar()
Entry(self.ventanaIngresar, textvariable=self.nombre).place(x=65, y=60)
Button(self.ventanaIngresar, text='Guardar', font=('Cambria', 11),
width=15, command=self.BD).place(x=420, y=5)
Button(self.ventanaIngresar, text='Mostrar', font=('Cambria', 11),
width=15, command=self.Mostrar).place(x=0, y=365)
Button(self.ventanaIngresar, text='Ordenar', font=('Cambria', 11),
width=15, command=self.ordenamiento).place(x=220, y=365)
self.ventanaIngresar.mainloop()
def BD(self):
conectar = Base_de_datos.BaseDeDatos()
comando = ("INSERT INTO public.cliente(id, nombre) VALUES('" + self
.id.get() + "','" + self.nombre.get() + "')")
print(comando)
conectar.cursor.execute(comando)
<|reserved_special_token_0|>
def ordenamiento(self):
comando = 'SELECT id FROM cliente;'
conectar = Base_de_datos.BaseDeDatos()
conectar.cursor.execute(comando)
rows = conectar.cursor.fetchall()
ordenar = MergeSort.merge_sort(rows)
print(ordenar)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Cliente:
def __init__(self, id=None, nombre=None):
self.id = id
self.nombre = nombre
def ingresar(self):
self.ventanaIngresar = Toplevel()
self.ventanaIngresar.geometry('570x400')
self.ventanaIngresar.title('Cliente')
img = PhotoImage(file=
'C:/Users/checo/Desktop/41-INVERSION-MEDIOS-DIGITALES.png')
imagen = Label(self.ventanaIngresar, image=img)
imagen.pack()
Label(self.ventanaIngresar, text='Cliente', font=('Cambria', 14)
).place(x=5, y=0)
Label(self.ventanaIngresar, text='Id: ', font=('Cambria', 11)).place(x
=0, y=30)
Label(self.ventanaIngresar, text='Nombre: ', font=('Cambria', 11)
).place(x=0, y=60)
self.id = StringVar()
Entry(self.ventanaIngresar, textvariable=self.id).place(x=30, y=30)
self.nombre = StringVar()
Entry(self.ventanaIngresar, textvariable=self.nombre).place(x=65, y=60)
Button(self.ventanaIngresar, text='Guardar', font=('Cambria', 11),
width=15, command=self.BD).place(x=420, y=5)
Button(self.ventanaIngresar, text='Mostrar', font=('Cambria', 11),
width=15, command=self.Mostrar).place(x=0, y=365)
Button(self.ventanaIngresar, text='Ordenar', font=('Cambria', 11),
width=15, command=self.ordenamiento).place(x=220, y=365)
self.ventanaIngresar.mainloop()
def BD(self):
conectar = Base_de_datos.BaseDeDatos()
comando = ("INSERT INTO public.cliente(id, nombre) VALUES('" + self
.id.get() + "','" + self.nombre.get() + "')")
print(comando)
conectar.cursor.execute(comando)
def Mostrar(self):
comando = 'SELECT * FROM cliente;'
conectar = Base_de_datos.BaseDeDatos()
conectar.cursor.execute(comando)
Scroll = Scrollbar(self.ventanaIngresar, orient=VERTICAL)
self.listbox = Listbox(self.ventanaIngresar, font=('Cambria', 9),
borderwidth=0, yscrollcommand=Scroll.set, height=15, relief=
'sunken', width=60)
self.listbox.place(x=5, y=90)
Scroll.config(command=self.listbox.yview)
Scroll.pack(side=RIGHT, fill=Y)
for dato1, dato2 in enumerate(conectar.cursor.fetchall()):
self.listbox.insert(0, 'Id: {}'.format(dato2[0]))
self.listbox.insert(1, 'Nombre: {}'.format(dato2[1]))
self.listbox.insert(2, ' ')
def ordenamiento(self):
comando = 'SELECT id FROM cliente;'
conectar = Base_de_datos.BaseDeDatos()
conectar.cursor.execute(comando)
rows = conectar.cursor.fetchall()
ordenar = MergeSort.merge_sort(rows)
print(ordenar)
<|reserved_special_token_1|>
from tkinter import *
import psycopg2
import sys
import pprint
import Base_de_datos
import MergeSort
class Cliente:
def __init__(self, id=None, nombre=None):
self.id = id
self.nombre = nombre
def ingresar(self):
self.ventanaIngresar = Toplevel()
self.ventanaIngresar.geometry('570x400')
self.ventanaIngresar.title('Cliente')
img = PhotoImage(file=
'C:/Users/checo/Desktop/41-INVERSION-MEDIOS-DIGITALES.png')
imagen = Label(self.ventanaIngresar, image=img)
imagen.pack()
Label(self.ventanaIngresar, text='Cliente', font=('Cambria', 14)
).place(x=5, y=0)
Label(self.ventanaIngresar, text='Id: ', font=('Cambria', 11)).place(x
=0, y=30)
Label(self.ventanaIngresar, text='Nombre: ', font=('Cambria', 11)
).place(x=0, y=60)
self.id = StringVar()
Entry(self.ventanaIngresar, textvariable=self.id).place(x=30, y=30)
self.nombre = StringVar()
Entry(self.ventanaIngresar, textvariable=self.nombre).place(x=65, y=60)
Button(self.ventanaIngresar, text='Guardar', font=('Cambria', 11),
width=15, command=self.BD).place(x=420, y=5)
Button(self.ventanaIngresar, text='Mostrar', font=('Cambria', 11),
width=15, command=self.Mostrar).place(x=0, y=365)
Button(self.ventanaIngresar, text='Ordenar', font=('Cambria', 11),
width=15, command=self.ordenamiento).place(x=220, y=365)
self.ventanaIngresar.mainloop()
def BD(self):
conectar = Base_de_datos.BaseDeDatos()
comando = ("INSERT INTO public.cliente(id, nombre) VALUES('" + self
.id.get() + "','" + self.nombre.get() + "')")
print(comando)
conectar.cursor.execute(comando)
def Mostrar(self):
comando = 'SELECT * FROM cliente;'
conectar = Base_de_datos.BaseDeDatos()
conectar.cursor.execute(comando)
Scroll = Scrollbar(self.ventanaIngresar, orient=VERTICAL)
self.listbox = Listbox(self.ventanaIngresar, font=('Cambria', 9),
borderwidth=0, yscrollcommand=Scroll.set, height=15, relief=
'sunken', width=60)
self.listbox.place(x=5, y=90)
Scroll.config(command=self.listbox.yview)
Scroll.pack(side=RIGHT, fill=Y)
for dato1, dato2 in enumerate(conectar.cursor.fetchall()):
self.listbox.insert(0, 'Id: {}'.format(dato2[0]))
self.listbox.insert(1, 'Nombre: {}'.format(dato2[1]))
self.listbox.insert(2, ' ')
def ordenamiento(self):
comando = 'SELECT id FROM cliente;'
conectar = Base_de_datos.BaseDeDatos()
conectar.cursor.execute(comando)
rows = conectar.cursor.fetchall()
ordenar = MergeSort.merge_sort(rows)
print(ordenar)
<|reserved_special_token_1|>
from tkinter import *
import psycopg2
import sys
import pprint
import Base_de_datos
import MergeSort
class Cliente:
def __init__(self,id=None,nombre=None):
self.id=id
self.nombre=nombre
def ingresar(self):
self.ventanaIngresar= Toplevel()
self.ventanaIngresar.geometry("570x400")
self.ventanaIngresar.title("Cliente")
img = PhotoImage(file="C:/Users/checo/Desktop/41-INVERSION-MEDIOS-DIGITALES.png")
imagen= Label(self.ventanaIngresar, image=img)
imagen.pack()
Label(self.ventanaIngresar, text="Cliente",font=("Cambria",14)).place(x=5,y=0)
Label(self.ventanaIngresar, text="Id: ",font=("Cambria",11)).place(x=0,y=30)
Label(self.ventanaIngresar, text="Nombre: ",font=("Cambria",11)).place(x=0,y=60)
self.id=StringVar()
Entry(self.ventanaIngresar, textvariable=self.id).place(x=30,y=30)
self.nombre=StringVar()
Entry(self.ventanaIngresar, textvariable=self.nombre).place(x=65,y=60)
Button(self.ventanaIngresar,text="Guardar",font=("Cambria",11),
width=15,command=self.BD).place(x=420,y=5)
#Button(self.ventanaIngresar,text="Modificar",font=("Cambria",11),
# width=15).place(x=420,y=365)
Button(self.ventanaIngresar,text="Mostrar",font=("Cambria",11),
width=15,command=self.Mostrar).place(x=0,y=365)
Button(self.ventanaIngresar,text="Ordenar",font=("Cambria",11),
width=15, command=self.ordenamiento).place(x=220,y=365)
self.ventanaIngresar.mainloop()
def BD(self):
conectar=Base_de_datos.BaseDeDatos()
comando="INSERT INTO public.cliente(id, nombre) VALUES('"+self.id.get()+"','"+self.nombre.get()+"')"
print(comando)
conectar.cursor.execute(comando)
def Mostrar(self):
comando="SELECT * FROM cliente;"
conectar=Base_de_datos.BaseDeDatos()
conectar.cursor.execute(comando)
Scroll=Scrollbar(self.ventanaIngresar, orient=VERTICAL)
self.listbox=Listbox(self.ventanaIngresar, font=("Cambria",9), borderwidth=0, yscrollcommand=Scroll.set,height=15,relief="sunken",width=60)
self.listbox.place(x=5, y=90)
Scroll.config(command=self.listbox.yview)
Scroll.pack(side=RIGHT, fill=Y)
for dato1, dato2 in enumerate(conectar.cursor.fetchall()):
self.listbox.insert(0, "Id: {}".format(dato2[0]))
self.listbox.insert(1, "Nombre: {}".format(dato2[1]))
self.listbox.insert(2, " ")
def ordenamiento(self):
comando="SELECT id FROM cliente;"
conectar=Base_de_datos.BaseDeDatos()
conectar.cursor.execute(comando)
rows= conectar.cursor.fetchall()
ordenar=MergeSort.merge_sort(rows)
print(ordenar)
|
flexible
|
{
"blob_id": "63d9aa55463123f32fd608ada83e555be4b5fe2c",
"index": 6946,
"step-1": "<mask token>\n\n\nclass Cliente:\n <mask token>\n <mask token>\n\n def BD(self):\n conectar = Base_de_datos.BaseDeDatos()\n comando = (\"INSERT INTO public.cliente(id, nombre) VALUES('\" + self\n .id.get() + \"','\" + self.nombre.get() + \"')\")\n print(comando)\n conectar.cursor.execute(comando)\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Cliente:\n\n def __init__(self, id=None, nombre=None):\n self.id = id\n self.nombre = nombre\n\n def ingresar(self):\n self.ventanaIngresar = Toplevel()\n self.ventanaIngresar.geometry('570x400')\n self.ventanaIngresar.title('Cliente')\n img = PhotoImage(file=\n 'C:/Users/checo/Desktop/41-INVERSION-MEDIOS-DIGITALES.png')\n imagen = Label(self.ventanaIngresar, image=img)\n imagen.pack()\n Label(self.ventanaIngresar, text='Cliente', font=('Cambria', 14)\n ).place(x=5, y=0)\n Label(self.ventanaIngresar, text='Id: ', font=('Cambria', 11)).place(x\n =0, y=30)\n Label(self.ventanaIngresar, text='Nombre: ', font=('Cambria', 11)\n ).place(x=0, y=60)\n self.id = StringVar()\n Entry(self.ventanaIngresar, textvariable=self.id).place(x=30, y=30)\n self.nombre = StringVar()\n Entry(self.ventanaIngresar, textvariable=self.nombre).place(x=65, y=60)\n Button(self.ventanaIngresar, text='Guardar', font=('Cambria', 11),\n width=15, command=self.BD).place(x=420, y=5)\n Button(self.ventanaIngresar, text='Mostrar', font=('Cambria', 11),\n width=15, command=self.Mostrar).place(x=0, y=365)\n Button(self.ventanaIngresar, text='Ordenar', font=('Cambria', 11),\n width=15, command=self.ordenamiento).place(x=220, y=365)\n self.ventanaIngresar.mainloop()\n\n def BD(self):\n conectar = Base_de_datos.BaseDeDatos()\n comando = (\"INSERT INTO public.cliente(id, nombre) VALUES('\" + self\n .id.get() + \"','\" + self.nombre.get() + \"')\")\n print(comando)\n conectar.cursor.execute(comando)\n <mask token>\n\n def ordenamiento(self):\n comando = 'SELECT id FROM cliente;'\n conectar = Base_de_datos.BaseDeDatos()\n conectar.cursor.execute(comando)\n rows = conectar.cursor.fetchall()\n ordenar = MergeSort.merge_sort(rows)\n print(ordenar)\n",
"step-3": "<mask token>\n\n\nclass Cliente:\n\n def __init__(self, id=None, nombre=None):\n self.id = id\n self.nombre = nombre\n\n def ingresar(self):\n self.ventanaIngresar = Toplevel()\n self.ventanaIngresar.geometry('570x400')\n self.ventanaIngresar.title('Cliente')\n img = PhotoImage(file=\n 'C:/Users/checo/Desktop/41-INVERSION-MEDIOS-DIGITALES.png')\n imagen = Label(self.ventanaIngresar, image=img)\n imagen.pack()\n Label(self.ventanaIngresar, text='Cliente', font=('Cambria', 14)\n ).place(x=5, y=0)\n Label(self.ventanaIngresar, text='Id: ', font=('Cambria', 11)).place(x\n =0, y=30)\n Label(self.ventanaIngresar, text='Nombre: ', font=('Cambria', 11)\n ).place(x=0, y=60)\n self.id = StringVar()\n Entry(self.ventanaIngresar, textvariable=self.id).place(x=30, y=30)\n self.nombre = StringVar()\n Entry(self.ventanaIngresar, textvariable=self.nombre).place(x=65, y=60)\n Button(self.ventanaIngresar, text='Guardar', font=('Cambria', 11),\n width=15, command=self.BD).place(x=420, y=5)\n Button(self.ventanaIngresar, text='Mostrar', font=('Cambria', 11),\n width=15, command=self.Mostrar).place(x=0, y=365)\n Button(self.ventanaIngresar, text='Ordenar', font=('Cambria', 11),\n width=15, command=self.ordenamiento).place(x=220, y=365)\n self.ventanaIngresar.mainloop()\n\n def BD(self):\n conectar = Base_de_datos.BaseDeDatos()\n comando = (\"INSERT INTO public.cliente(id, nombre) VALUES('\" + self\n .id.get() + \"','\" + self.nombre.get() + \"')\")\n print(comando)\n conectar.cursor.execute(comando)\n\n def Mostrar(self):\n comando = 'SELECT * FROM cliente;'\n conectar = Base_de_datos.BaseDeDatos()\n conectar.cursor.execute(comando)\n Scroll = Scrollbar(self.ventanaIngresar, orient=VERTICAL)\n self.listbox = Listbox(self.ventanaIngresar, font=('Cambria', 9),\n borderwidth=0, yscrollcommand=Scroll.set, height=15, relief=\n 'sunken', width=60)\n self.listbox.place(x=5, y=90)\n Scroll.config(command=self.listbox.yview)\n Scroll.pack(side=RIGHT, fill=Y)\n for dato1, dato2 in enumerate(conectar.cursor.fetchall()):\n self.listbox.insert(0, 'Id: {}'.format(dato2[0]))\n self.listbox.insert(1, 'Nombre: {}'.format(dato2[1]))\n self.listbox.insert(2, ' ')\n\n def ordenamiento(self):\n comando = 'SELECT id FROM cliente;'\n conectar = Base_de_datos.BaseDeDatos()\n conectar.cursor.execute(comando)\n rows = conectar.cursor.fetchall()\n ordenar = MergeSort.merge_sort(rows)\n print(ordenar)\n",
"step-4": "from tkinter import *\nimport psycopg2\nimport sys\nimport pprint\nimport Base_de_datos\nimport MergeSort\n\n\nclass Cliente:\n\n def __init__(self, id=None, nombre=None):\n self.id = id\n self.nombre = nombre\n\n def ingresar(self):\n self.ventanaIngresar = Toplevel()\n self.ventanaIngresar.geometry('570x400')\n self.ventanaIngresar.title('Cliente')\n img = PhotoImage(file=\n 'C:/Users/checo/Desktop/41-INVERSION-MEDIOS-DIGITALES.png')\n imagen = Label(self.ventanaIngresar, image=img)\n imagen.pack()\n Label(self.ventanaIngresar, text='Cliente', font=('Cambria', 14)\n ).place(x=5, y=0)\n Label(self.ventanaIngresar, text='Id: ', font=('Cambria', 11)).place(x\n =0, y=30)\n Label(self.ventanaIngresar, text='Nombre: ', font=('Cambria', 11)\n ).place(x=0, y=60)\n self.id = StringVar()\n Entry(self.ventanaIngresar, textvariable=self.id).place(x=30, y=30)\n self.nombre = StringVar()\n Entry(self.ventanaIngresar, textvariable=self.nombre).place(x=65, y=60)\n Button(self.ventanaIngresar, text='Guardar', font=('Cambria', 11),\n width=15, command=self.BD).place(x=420, y=5)\n Button(self.ventanaIngresar, text='Mostrar', font=('Cambria', 11),\n width=15, command=self.Mostrar).place(x=0, y=365)\n Button(self.ventanaIngresar, text='Ordenar', font=('Cambria', 11),\n width=15, command=self.ordenamiento).place(x=220, y=365)\n self.ventanaIngresar.mainloop()\n\n def BD(self):\n conectar = Base_de_datos.BaseDeDatos()\n comando = (\"INSERT INTO public.cliente(id, nombre) VALUES('\" + self\n .id.get() + \"','\" + self.nombre.get() + \"')\")\n print(comando)\n conectar.cursor.execute(comando)\n\n def Mostrar(self):\n comando = 'SELECT * FROM cliente;'\n conectar = Base_de_datos.BaseDeDatos()\n conectar.cursor.execute(comando)\n Scroll = Scrollbar(self.ventanaIngresar, orient=VERTICAL)\n self.listbox = Listbox(self.ventanaIngresar, font=('Cambria', 9),\n borderwidth=0, yscrollcommand=Scroll.set, height=15, relief=\n 'sunken', width=60)\n self.listbox.place(x=5, y=90)\n Scroll.config(command=self.listbox.yview)\n Scroll.pack(side=RIGHT, fill=Y)\n for dato1, dato2 in enumerate(conectar.cursor.fetchall()):\n self.listbox.insert(0, 'Id: {}'.format(dato2[0]))\n self.listbox.insert(1, 'Nombre: {}'.format(dato2[1]))\n self.listbox.insert(2, ' ')\n\n def ordenamiento(self):\n comando = 'SELECT id FROM cliente;'\n conectar = Base_de_datos.BaseDeDatos()\n conectar.cursor.execute(comando)\n rows = conectar.cursor.fetchall()\n ordenar = MergeSort.merge_sort(rows)\n print(ordenar)\n",
"step-5": "from tkinter import *\r\nimport psycopg2\r\nimport sys\r\nimport pprint\r\nimport Base_de_datos\r\nimport MergeSort\r\n\r\nclass Cliente:\r\n def __init__(self,id=None,nombre=None):\r\n self.id=id\r\n self.nombre=nombre\r\n def ingresar(self):\r\n self.ventanaIngresar= Toplevel()\r\n self.ventanaIngresar.geometry(\"570x400\")\r\n self.ventanaIngresar.title(\"Cliente\")\r\n img = PhotoImage(file=\"C:/Users/checo/Desktop/41-INVERSION-MEDIOS-DIGITALES.png\")\r\n imagen= Label(self.ventanaIngresar, image=img)\r\n imagen.pack()\r\n Label(self.ventanaIngresar, text=\"Cliente\",font=(\"Cambria\",14)).place(x=5,y=0)\r\n Label(self.ventanaIngresar, text=\"Id: \",font=(\"Cambria\",11)).place(x=0,y=30)\r\n Label(self.ventanaIngresar, text=\"Nombre: \",font=(\"Cambria\",11)).place(x=0,y=60)\r\n\r\n self.id=StringVar()\r\n Entry(self.ventanaIngresar, textvariable=self.id).place(x=30,y=30)\r\n self.nombre=StringVar()\r\n Entry(self.ventanaIngresar, textvariable=self.nombre).place(x=65,y=60) \r\n \r\n Button(self.ventanaIngresar,text=\"Guardar\",font=(\"Cambria\",11),\r\n width=15,command=self.BD).place(x=420,y=5)\r\n \r\n #Button(self.ventanaIngresar,text=\"Modificar\",font=(\"Cambria\",11),\r\n # width=15).place(x=420,y=365)\r\n \r\n Button(self.ventanaIngresar,text=\"Mostrar\",font=(\"Cambria\",11),\r\n width=15,command=self.Mostrar).place(x=0,y=365)\r\n \r\n Button(self.ventanaIngresar,text=\"Ordenar\",font=(\"Cambria\",11),\r\n width=15, command=self.ordenamiento).place(x=220,y=365)\r\n \r\n self.ventanaIngresar.mainloop()\r\n \r\n def BD(self):\r\n conectar=Base_de_datos.BaseDeDatos()\r\n comando=\"INSERT INTO public.cliente(id, nombre) VALUES('\"+self.id.get()+\"','\"+self.nombre.get()+\"')\"\r\n print(comando)\r\n conectar.cursor.execute(comando)\r\n def Mostrar(self):\r\n comando=\"SELECT * FROM cliente;\"\r\n conectar=Base_de_datos.BaseDeDatos()\r\n conectar.cursor.execute(comando)\r\n Scroll=Scrollbar(self.ventanaIngresar, orient=VERTICAL)\r\n self.listbox=Listbox(self.ventanaIngresar, font=(\"Cambria\",9), borderwidth=0, yscrollcommand=Scroll.set,height=15,relief=\"sunken\",width=60)\r\n self.listbox.place(x=5, y=90)\r\n Scroll.config(command=self.listbox.yview)\r\n Scroll.pack(side=RIGHT, fill=Y)\r\n for dato1, dato2 in enumerate(conectar.cursor.fetchall()):\r\n self.listbox.insert(0, \"Id: {}\".format(dato2[0]))\r\n self.listbox.insert(1, \"Nombre: {}\".format(dato2[1]))\r\n self.listbox.insert(2, \" \")\r\n def ordenamiento(self):\r\n comando=\"SELECT id FROM cliente;\"\r\n conectar=Base_de_datos.BaseDeDatos()\r\n conectar.cursor.execute(comando)\r\n rows= conectar.cursor.fetchall()\r\n ordenar=MergeSort.merge_sort(rows)\r\n print(ordenar)\r\n\r\n",
"step-ids": [
2,
5,
6,
7,
8
]
}
|
[
2,
5,
6,
7,
8
] |
from django.urls import path
from rest_framework.routers import DefaultRouter
from . import views
app_name = "rooms"
router = DefaultRouter()
router.register("", views.RoomViewSet)
urlpatterns = router.urls
#
# urlpatterns = [
# # path("list/", views.ListRoomsView.as_view()),
# # path("list/", views.rooms_view),
# path("list/",views.RoomsView.as_view()),
# path('<int:pk>/',views.RoomView.as_view()),
# path('search/',views.room_search)
# ]
|
normal
|
{
"blob_id": "96708216c5ffa56a60475b295c21b18225e6eed9",
"index": 6056,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nrouter.register('', views.RoomViewSet)\n<mask token>\n",
"step-3": "<mask token>\napp_name = 'rooms'\nrouter = DefaultRouter()\nrouter.register('', views.RoomViewSet)\nurlpatterns = router.urls\n",
"step-4": "from django.urls import path\nfrom rest_framework.routers import DefaultRouter\nfrom . import views\napp_name = 'rooms'\nrouter = DefaultRouter()\nrouter.register('', views.RoomViewSet)\nurlpatterns = router.urls\n",
"step-5": "from django.urls import path\nfrom rest_framework.routers import DefaultRouter\nfrom . import views\n\napp_name = \"rooms\"\nrouter = DefaultRouter()\nrouter.register(\"\", views.RoomViewSet)\n\nurlpatterns = router.urls\n#\n# urlpatterns = [\n# # path(\"list/\", views.ListRoomsView.as_view()),\n# # path(\"list/\", views.rooms_view),\n# path(\"list/\",views.RoomsView.as_view()),\n# path('<int:pk>/',views.RoomView.as_view()),\n# path('search/',views.room_search)\n# ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import ttk
import Tkinter as tk
from rwb.runner.log import RobotLogTree, RobotLogMessages
from rwb.lib import AbstractRwbGui
from rwb.widgets import Statusbar
from rwb.runner.listener import RemoteRobotListener
NAME = "monitor"
HELP_URL="https://github.com/boakley/robotframework-workbench/wiki/rwb.monitor-User-Guide"
DEFAULT_SETTINGS = {
NAME: {
"port": 8910,
"host": "localhost",
}
}
class MonitorApp(AbstractRwbGui):
def __init__(self):
AbstractRwbGui.__init__(self, NAME, DEFAULT_SETTINGS)
self.wm_geometry("900x500")
port = self.get_setting("monitor.port")
print "using port", port
self.listener = RemoteRobotListener(self, port=port, callback=self._listen)
self.wm_title("rwb.monitor port: %s" % self.listener.port)
self._create_menubar()
self._create_statusbar()
self._create_notebook()
self.stack = []
self.event_id = 0
# self.status_label.configure(text="port: %s" % self.listener.port)
def _create_menubar(self):
self.menubar = tk.Menu(self)
self.configure(menu=self.menubar)
self.file_menu = tk.Menu(self.menubar, tearoff=False)
self.file_menu.add_command(label="Exit", command=self._on_exit)
self.help_menu = tk.Menu(self, tearoff=False)
self.help_menu.add_command(label="View help on the web", command=self._on_view_help)
self.help_menu.add_separator()
self.help_menu.add_command(label="About the robotframework workbench", command=self._on_about)
self.menubar.add_cascade(menu=self.file_menu, label="File", underline=0)
self.menubar.add_cascade(menu=self.help_menu, label="Help", underline=0)
def _on_view_help(self):
import webbrowser
webbrowser.open(HELP_URL)
def _on_exit(self):
self.destroy()
def _create_statusbar(self):
self.statusbar = Statusbar(self)
self.statusbar.pack(side="bottom", fill="x")
self.statusbar.add_section("port",12, "port %s" % self.listener.port)
self.statusbar.add_progress(mode="indeterminate")
# grip = ttk.Sizegrip(self.statusbar)
# grip.pack(side="right")
# self.status_label = ttk.Label(self.statusbar, text="", anchor="w")
# self.status_label.pack(side="left", fill="both", expand="true", padx=8)
# self.statusbar.pack(side="bottom", fill="x")
def _create_notebook(self):
self.notebook = ttk.Notebook(self)
self.notebook.pack(side="top", fill="both", expand=True)
self.log_tree = RobotLogTree(self.notebook, auto_open=("failed","suite","test","keyword"))
self.log_messages = RobotLogMessages(self.notebook)
self.notebook.add(self.log_tree, text="Details")
self.notebook.add(self.log_messages, text="Messages")
self.notebook.pack(side="top", fill="both", expand=True)
self.listeners = (self.log_tree, self.log_messages)
def _listen(self, cmd, *args):
self.event_id += 1
for listener in self.listeners:
listener.listen(self.event_id, cmd, args)
if cmd in ("start_test", "start_suite", "start_keyword"):
name = args[0]
cmd_type = cmd.split("_")[1]
self.stack.append((cmd_type, name))
self.update_display()
elif cmd in ("end_test", "end_suite", "end_keyword"):
cmd_type = cmd.split("_")[1]
self.stack.pop()
self.update_display()
def update_display(self):
if len(self.stack) == 1:
self.statusbar.progress_start()
elif len(self.stack) == 0:
self.statusbar.progress_stop()
s = ".".join([x[1] for x in self.stack]).strip()
self.statusbar.message(s, clear=True, lifespan=0)
if __name__ == "__main__":
app = MonitorApp()
app.mainloop()
|
normal
|
{
"blob_id": "572d58eec652207e6ec5a5e1d4c2f4310f2a70f3",
"index": 1665,
"step-1": "import ttk\nimport Tkinter as tk\nfrom rwb.runner.log import RobotLogTree, RobotLogMessages\nfrom rwb.lib import AbstractRwbGui\nfrom rwb.widgets import Statusbar\n\nfrom rwb.runner.listener import RemoteRobotListener\n\nNAME = \"monitor\"\nHELP_URL=\"https://github.com/boakley/robotframework-workbench/wiki/rwb.monitor-User-Guide\"\nDEFAULT_SETTINGS = {\n NAME: {\n \"port\": 8910,\n \"host\": \"localhost\",\n }\n }\n\nclass MonitorApp(AbstractRwbGui):\n def __init__(self):\n AbstractRwbGui.__init__(self, NAME, DEFAULT_SETTINGS)\n self.wm_geometry(\"900x500\")\n port = self.get_setting(\"monitor.port\")\n print \"using port\", port\n self.listener = RemoteRobotListener(self, port=port, callback=self._listen)\n self.wm_title(\"rwb.monitor port: %s\" % self.listener.port)\n self._create_menubar()\n self._create_statusbar()\n self._create_notebook()\n self.stack = []\n self.event_id = 0\n# self.status_label.configure(text=\"port: %s\" % self.listener.port)\n\n def _create_menubar(self):\n self.menubar = tk.Menu(self)\n self.configure(menu=self.menubar)\n\n self.file_menu = tk.Menu(self.menubar, tearoff=False)\n self.file_menu.add_command(label=\"Exit\", command=self._on_exit)\n\n self.help_menu = tk.Menu(self, tearoff=False)\n self.help_menu.add_command(label=\"View help on the web\", command=self._on_view_help)\n self.help_menu.add_separator()\n self.help_menu.add_command(label=\"About the robotframework workbench\", command=self._on_about)\n\n self.menubar.add_cascade(menu=self.file_menu, label=\"File\", underline=0)\n self.menubar.add_cascade(menu=self.help_menu, label=\"Help\", underline=0)\n\n def _on_view_help(self):\n import webbrowser\n webbrowser.open(HELP_URL)\n\n def _on_exit(self):\n self.destroy()\n\n def _create_statusbar(self):\n self.statusbar = Statusbar(self)\n self.statusbar.pack(side=\"bottom\", fill=\"x\")\n self.statusbar.add_section(\"port\",12, \"port %s\" % self.listener.port)\n self.statusbar.add_progress(mode=\"indeterminate\")\n # grip = ttk.Sizegrip(self.statusbar)\n # grip.pack(side=\"right\")\n # self.status_label = ttk.Label(self.statusbar, text=\"\", anchor=\"w\")\n # self.status_label.pack(side=\"left\", fill=\"both\", expand=\"true\", padx=8)\n # self.statusbar.pack(side=\"bottom\", fill=\"x\")\n\n def _create_notebook(self):\n self.notebook = ttk.Notebook(self)\n self.notebook.pack(side=\"top\", fill=\"both\", expand=True)\n self.log_tree = RobotLogTree(self.notebook, auto_open=(\"failed\",\"suite\",\"test\",\"keyword\"))\n self.log_messages = RobotLogMessages(self.notebook)\n self.notebook.add(self.log_tree, text=\"Details\")\n self.notebook.add(self.log_messages, text=\"Messages\")\n self.notebook.pack(side=\"top\", fill=\"both\", expand=True)\n self.listeners = (self.log_tree, self.log_messages)\n\n def _listen(self, cmd, *args):\n self.event_id += 1\n for listener in self.listeners:\n listener.listen(self.event_id, cmd, args)\n\n if cmd in (\"start_test\", \"start_suite\", \"start_keyword\"):\n name = args[0]\n cmd_type = cmd.split(\"_\")[1]\n self.stack.append((cmd_type, name))\n self.update_display()\n elif cmd in (\"end_test\", \"end_suite\", \"end_keyword\"):\n cmd_type = cmd.split(\"_\")[1]\n self.stack.pop()\n self.update_display()\n\n def update_display(self):\n if len(self.stack) == 1:\n self.statusbar.progress_start()\n elif len(self.stack) == 0:\n self.statusbar.progress_stop()\n\n s = \".\".join([x[1] for x in self.stack]).strip()\n self.statusbar.message(s, clear=True, lifespan=0)\n\nif __name__ == \"__main__\":\n app = MonitorApp()\n app.mainloop()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
def addOptions(parser):
parser.add_option('--NNfile', default='', help=
'Config json file for the data to pass to the model')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def addOptions(parser):
parser.add_option('--NNfile', default='', help=
'Config json file for the data to pass to the model')
<|reserved_special_token_0|>
addOptions(parser)
<|reserved_special_token_0|>
if not options.NNfile:
print(sys.stderr, 'No configuration file specified\n')
sys.exit(1)
with open(options.NNfile, 'r') as cfg_file:
cfg_data = json.load(cfg_file)
<|reserved_special_token_0|>
print('Loading images...\n')
<|reserved_special_token_0|>
print(x_original.shape)
print(len(x_original))
print('Loading tags...\n')
<|reserved_special_token_0|>
print(
'Dataframes loaded in {} minutes {} seconds! Splitting for train and validation...\n'
.format(load_min, load_sec))
for hp in hor_pred:
if hp.endswith('min'):
hor_pred_indices = int(int(hp.replace('min', '')) * 60 / tg)
if hp.endswith('s'):
hor_pred_indices = int(int(hp.replace('s', '')) / tg)
forecast_prediction.append(hp)
y_t = y_original
y_t_index = y_t.index
y_t_index_valid = y_t_index[y_t_index % day_length < day_length -
hor_pred_indices]
y_t_indices_lost = len(y_t_index) - len(y_t_index_valid)
print('Indices computed. {} indices lost \n.'.format(y_t_indices_lost))
print('Building randomized y matrix with valid indices...\n')
y_t = np.ravel(y_original.iloc[y_t_index_valid + hor_pred_indices])
print(
'Building y matrix removing invalid indices for persistence model...\n'
)
y_pred_persistence = np.ravel(y_original.iloc[y_t_index_valid])
print('Building X matrix...Same thing as before...\n')
x_t = x_original[y_t_index_valid]
x_t = x_t.reshape(x_t.shape[0], img_rows, img_cols, 1)
cut = int(cut_1 * len(x_t))
x_train, x_test = x_t[:cut, :], x_t[cut:, :]
y_train, y_test = y_t[:cut], y_t[cut:]
name = 'set_hp_' + str(hp) + '_' + str(cut_1) + 'total' + '.npy'
name2 = 'tags_hp_' + str(hp) + '_' + str(cut_1) + 'total' + '.npy'
np.save(name, x_train)
np.save(name2, y_train)
print('Generated {} images array \n.'.format(x_train.shape))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def addOptions(parser):
parser.add_option('--NNfile', default='', help=
'Config json file for the data to pass to the model')
parser = optparse.OptionParser()
addOptions(parser)
options, args = parser.parse_args()
if not options.NNfile:
print(sys.stderr, 'No configuration file specified\n')
sys.exit(1)
with open(options.NNfile, 'r') as cfg_file:
cfg_data = json.load(cfg_file)
days_info_file = cfg_data['days_info']
days_info = pd.read_csv(days_info_file)
day_length = days_info['length_day'][0]
days = days_info['number_train_days'][0]
tg = cfg_data['time_granularity']
hor_pred = cfg_data['hor_pred']
forecast_prediction = []
cut_1 = cfg_data['cut']
img_rows = cfg_data['img_rows']
img_cols = cfg_data['img_cols']
orig_folder = cfg_data['orig_folder']
dest_folder = cfg_data['dest_folder']
print('Loading images...\n')
load_start = time.time()
x_original = np.load('x_train.npy')
print(x_original.shape)
print(len(x_original))
print('Loading tags...\n')
y_original = pd.read_csv(orig_folder + '/Y_tr_val.csv')
load_end = time.time()
load_time = load_end - load_start
load_min = int(load_time / 60)
load_sec = load_time % 60
print(
'Dataframes loaded in {} minutes {} seconds! Splitting for train and validation...\n'
.format(load_min, load_sec))
for hp in hor_pred:
if hp.endswith('min'):
hor_pred_indices = int(int(hp.replace('min', '')) * 60 / tg)
if hp.endswith('s'):
hor_pred_indices = int(int(hp.replace('s', '')) / tg)
forecast_prediction.append(hp)
y_t = y_original
y_t_index = y_t.index
y_t_index_valid = y_t_index[y_t_index % day_length < day_length -
hor_pred_indices]
y_t_indices_lost = len(y_t_index) - len(y_t_index_valid)
print('Indices computed. {} indices lost \n.'.format(y_t_indices_lost))
print('Building randomized y matrix with valid indices...\n')
y_t = np.ravel(y_original.iloc[y_t_index_valid + hor_pred_indices])
print(
'Building y matrix removing invalid indices for persistence model...\n'
)
y_pred_persistence = np.ravel(y_original.iloc[y_t_index_valid])
print('Building X matrix...Same thing as before...\n')
x_t = x_original[y_t_index_valid]
x_t = x_t.reshape(x_t.shape[0], img_rows, img_cols, 1)
cut = int(cut_1 * len(x_t))
x_train, x_test = x_t[:cut, :], x_t[cut:, :]
y_train, y_test = y_t[:cut], y_t[cut:]
name = 'set_hp_' + str(hp) + '_' + str(cut_1) + 'total' + '.npy'
name2 = 'tags_hp_' + str(hp) + '_' + str(cut_1) + 'total' + '.npy'
np.save(name, x_train)
np.save(name2, y_train)
print('Generated {} images array \n.'.format(x_train.shape))
<|reserved_special_token_1|>
import numpy as np
import sys
import os
import os.path
import json
import optparse
import time
import pandas as pd
def addOptions(parser):
parser.add_option('--NNfile', default='', help=
'Config json file for the data to pass to the model')
parser = optparse.OptionParser()
addOptions(parser)
options, args = parser.parse_args()
if not options.NNfile:
print(sys.stderr, 'No configuration file specified\n')
sys.exit(1)
with open(options.NNfile, 'r') as cfg_file:
cfg_data = json.load(cfg_file)
days_info_file = cfg_data['days_info']
days_info = pd.read_csv(days_info_file)
day_length = days_info['length_day'][0]
days = days_info['number_train_days'][0]
tg = cfg_data['time_granularity']
hor_pred = cfg_data['hor_pred']
forecast_prediction = []
cut_1 = cfg_data['cut']
img_rows = cfg_data['img_rows']
img_cols = cfg_data['img_cols']
orig_folder = cfg_data['orig_folder']
dest_folder = cfg_data['dest_folder']
print('Loading images...\n')
load_start = time.time()
x_original = np.load('x_train.npy')
print(x_original.shape)
print(len(x_original))
print('Loading tags...\n')
y_original = pd.read_csv(orig_folder + '/Y_tr_val.csv')
load_end = time.time()
load_time = load_end - load_start
load_min = int(load_time / 60)
load_sec = load_time % 60
print(
'Dataframes loaded in {} minutes {} seconds! Splitting for train and validation...\n'
.format(load_min, load_sec))
for hp in hor_pred:
if hp.endswith('min'):
hor_pred_indices = int(int(hp.replace('min', '')) * 60 / tg)
if hp.endswith('s'):
hor_pred_indices = int(int(hp.replace('s', '')) / tg)
forecast_prediction.append(hp)
y_t = y_original
y_t_index = y_t.index
y_t_index_valid = y_t_index[y_t_index % day_length < day_length -
hor_pred_indices]
y_t_indices_lost = len(y_t_index) - len(y_t_index_valid)
print('Indices computed. {} indices lost \n.'.format(y_t_indices_lost))
print('Building randomized y matrix with valid indices...\n')
y_t = np.ravel(y_original.iloc[y_t_index_valid + hor_pred_indices])
print(
'Building y matrix removing invalid indices for persistence model...\n'
)
y_pred_persistence = np.ravel(y_original.iloc[y_t_index_valid])
print('Building X matrix...Same thing as before...\n')
x_t = x_original[y_t_index_valid]
x_t = x_t.reshape(x_t.shape[0], img_rows, img_cols, 1)
cut = int(cut_1 * len(x_t))
x_train, x_test = x_t[:cut, :], x_t[cut:, :]
y_train, y_test = y_t[:cut], y_t[cut:]
name = 'set_hp_' + str(hp) + '_' + str(cut_1) + 'total' + '.npy'
name2 = 'tags_hp_' + str(hp) + '_' + str(cut_1) + 'total' + '.npy'
np.save(name, x_train)
np.save(name2, y_train)
print('Generated {} images array \n.'.format(x_train.shape))
<|reserved_special_token_1|>
import numpy as np
import sys
import os
import os.path
import json
import optparse
import time
import pandas as pd
#Randomize and split the inference set according to hor_pred
#Generate .npy file for each hp selected
#Coge valores aleatorios de la columna de etiquetas en función del horizonte de predicción.
#Coge los índices de las muestras seleccionadas y los usa para seleccionar las imágenes que
##tienen asociadas.
#Tenemos que tener pandas para la seleccion primera de las etiquetas, luego solo generamos un
##.npy con ese hor_pred y con la cantidad que queramos en función del valor del split
####PARSEAR CON EL JSON
###################
# PARSE CONNFIG #####
##################
def addOptions(parser):
parser.add_option("--NNfile", default="",
help="Config json file for the data to pass to the model")
parser = optparse.OptionParser()
addOptions(parser)
(options, args) = parser.parse_args()
if not options.NNfile:
print(sys.stderr, "No configuration file specified\n")
sys.exit(1)
with open(options.NNfile, 'r') as cfg_file:
cfg_data = json.load(cfg_file)
days_info_file = cfg_data['days_info']
days_info = pd.read_csv(days_info_file)
day_length = days_info['length_day'][0]
days = days_info['number_train_days'][0]
tg = cfg_data['time_granularity']
hor_pred = cfg_data['hor_pred']
forecast_prediction = []
cut_1 = cfg_data['cut']
img_rows = cfg_data['img_rows']
img_cols = cfg_data['img_cols']
orig_folder = cfg_data['orig_folder']
dest_folder = cfg_data['dest_folder']
##################
# DATA LOAD ######
###################
print('Loading images...\n')
load_start = time.time()
x_original = np.load("x_train.npy")
print(x_original.shape)
print(len(x_original))
print('Loading tags...\n')
y_original = pd.read_csv(orig_folder + '/Y_tr_val.csv')
load_end = time.time()
load_time = load_end - load_start
load_min = int(load_time / 60)
load_sec = load_time % 60
print('Dataframes loaded in {} minutes {} seconds! Splitting for train and validation...\n'.format(load_min, load_sec))
#################
# RANDOMIZATION##
#################
# Since we configured our matrices with an offset we have to adjust to "jump" to the sample we want to actually predict
for hp in hor_pred:
if hp.endswith("min"):
hor_pred_indices = int(int(hp.replace('min', '')) * 60 / tg)
if hp.endswith("s"):
hor_pred_indices = int(int(hp.replace('s', '')) / tg)
forecast_prediction.append(hp)
y_t = y_original # y_train y son iquals
y_t_index = y_t.index # devulve una array de index
# Don't get values for the previous or next day:
y_t_index_valid = y_t_index[(y_t_index % day_length) < (day_length - hor_pred_indices)]
y_t_indices_lost = len(y_t_index) - len(y_t_index_valid)
print('Indices computed. {} indices lost \n.'.format(y_t_indices_lost))
print('Building randomized y matrix with valid indices...\n')
y_t = np.ravel(y_original.iloc[y_t_index_valid + hor_pred_indices])
print('Building y matrix removing invalid indices for persistence model...\n')
y_pred_persistence = np.ravel(y_original.iloc[y_t_index_valid]) # una row de dataFram combia por numpy array
print('Building X matrix...Same thing as before...\n')
# like our randomization, just picking the same indices
x_t = x_original[y_t_index_valid]
x_t = x_t.reshape(x_t.shape[0], img_rows, img_cols, 1)
#Split:
cut = int(cut_1*len(x_t))
x_train, x_test = x_t[:cut,:], x_t[cut:,:]
y_train, y_test = y_t[:cut], y_t[cut:]
#print(x_train.shape, x_test.shape)
#print(y_train.shape, y_test.shape) #Etiquetas (valores reales que debería predecir con cada muestra)
name = "set_hp_" + str(hp) + "_" + str (cut_1) + "total" + ".npy"
name2 = "tags_hp_" + str(hp) + "_" + str (cut_1) + "total" + ".npy"
#Para cada horizonte de predicción genero un array para inferencia
np.save(name, x_train)
np.save(name2, y_train)
print('Generated {} images array \n.'.format(x_train.shape))
|
flexible
|
{
"blob_id": "83a92c0b645b9a2a483a01c19a47ab5c296ccbd9",
"index": 6907,
"step-1": "<mask token>\n\n\ndef addOptions(parser):\n parser.add_option('--NNfile', default='', help=\n 'Config json file for the data to pass to the model')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef addOptions(parser):\n parser.add_option('--NNfile', default='', help=\n 'Config json file for the data to pass to the model')\n\n\n<mask token>\naddOptions(parser)\n<mask token>\nif not options.NNfile:\n print(sys.stderr, 'No configuration file specified\\n')\n sys.exit(1)\nwith open(options.NNfile, 'r') as cfg_file:\n cfg_data = json.load(cfg_file)\n<mask token>\nprint('Loading images...\\n')\n<mask token>\nprint(x_original.shape)\nprint(len(x_original))\nprint('Loading tags...\\n')\n<mask token>\nprint(\n 'Dataframes loaded in {} minutes {} seconds! Splitting for train and validation...\\n'\n .format(load_min, load_sec))\nfor hp in hor_pred:\n if hp.endswith('min'):\n hor_pred_indices = int(int(hp.replace('min', '')) * 60 / tg)\n if hp.endswith('s'):\n hor_pred_indices = int(int(hp.replace('s', '')) / tg)\n forecast_prediction.append(hp)\n y_t = y_original\n y_t_index = y_t.index\n y_t_index_valid = y_t_index[y_t_index % day_length < day_length -\n hor_pred_indices]\n y_t_indices_lost = len(y_t_index) - len(y_t_index_valid)\n print('Indices computed. {} indices lost \\n.'.format(y_t_indices_lost))\n print('Building randomized y matrix with valid indices...\\n')\n y_t = np.ravel(y_original.iloc[y_t_index_valid + hor_pred_indices])\n print(\n 'Building y matrix removing invalid indices for persistence model...\\n'\n )\n y_pred_persistence = np.ravel(y_original.iloc[y_t_index_valid])\n print('Building X matrix...Same thing as before...\\n')\n x_t = x_original[y_t_index_valid]\n x_t = x_t.reshape(x_t.shape[0], img_rows, img_cols, 1)\n cut = int(cut_1 * len(x_t))\n x_train, x_test = x_t[:cut, :], x_t[cut:, :]\n y_train, y_test = y_t[:cut], y_t[cut:]\n name = 'set_hp_' + str(hp) + '_' + str(cut_1) + 'total' + '.npy'\n name2 = 'tags_hp_' + str(hp) + '_' + str(cut_1) + 'total' + '.npy'\n np.save(name, x_train)\n np.save(name2, y_train)\n print('Generated {} images array \\n.'.format(x_train.shape))\n",
"step-3": "<mask token>\n\n\ndef addOptions(parser):\n parser.add_option('--NNfile', default='', help=\n 'Config json file for the data to pass to the model')\n\n\nparser = optparse.OptionParser()\naddOptions(parser)\noptions, args = parser.parse_args()\nif not options.NNfile:\n print(sys.stderr, 'No configuration file specified\\n')\n sys.exit(1)\nwith open(options.NNfile, 'r') as cfg_file:\n cfg_data = json.load(cfg_file)\ndays_info_file = cfg_data['days_info']\ndays_info = pd.read_csv(days_info_file)\nday_length = days_info['length_day'][0]\ndays = days_info['number_train_days'][0]\ntg = cfg_data['time_granularity']\nhor_pred = cfg_data['hor_pred']\nforecast_prediction = []\ncut_1 = cfg_data['cut']\nimg_rows = cfg_data['img_rows']\nimg_cols = cfg_data['img_cols']\norig_folder = cfg_data['orig_folder']\ndest_folder = cfg_data['dest_folder']\nprint('Loading images...\\n')\nload_start = time.time()\nx_original = np.load('x_train.npy')\nprint(x_original.shape)\nprint(len(x_original))\nprint('Loading tags...\\n')\ny_original = pd.read_csv(orig_folder + '/Y_tr_val.csv')\nload_end = time.time()\nload_time = load_end - load_start\nload_min = int(load_time / 60)\nload_sec = load_time % 60\nprint(\n 'Dataframes loaded in {} minutes {} seconds! Splitting for train and validation...\\n'\n .format(load_min, load_sec))\nfor hp in hor_pred:\n if hp.endswith('min'):\n hor_pred_indices = int(int(hp.replace('min', '')) * 60 / tg)\n if hp.endswith('s'):\n hor_pred_indices = int(int(hp.replace('s', '')) / tg)\n forecast_prediction.append(hp)\n y_t = y_original\n y_t_index = y_t.index\n y_t_index_valid = y_t_index[y_t_index % day_length < day_length -\n hor_pred_indices]\n y_t_indices_lost = len(y_t_index) - len(y_t_index_valid)\n print('Indices computed. {} indices lost \\n.'.format(y_t_indices_lost))\n print('Building randomized y matrix with valid indices...\\n')\n y_t = np.ravel(y_original.iloc[y_t_index_valid + hor_pred_indices])\n print(\n 'Building y matrix removing invalid indices for persistence model...\\n'\n )\n y_pred_persistence = np.ravel(y_original.iloc[y_t_index_valid])\n print('Building X matrix...Same thing as before...\\n')\n x_t = x_original[y_t_index_valid]\n x_t = x_t.reshape(x_t.shape[0], img_rows, img_cols, 1)\n cut = int(cut_1 * len(x_t))\n x_train, x_test = x_t[:cut, :], x_t[cut:, :]\n y_train, y_test = y_t[:cut], y_t[cut:]\n name = 'set_hp_' + str(hp) + '_' + str(cut_1) + 'total' + '.npy'\n name2 = 'tags_hp_' + str(hp) + '_' + str(cut_1) + 'total' + '.npy'\n np.save(name, x_train)\n np.save(name2, y_train)\n print('Generated {} images array \\n.'.format(x_train.shape))\n",
"step-4": "import numpy as np\nimport sys\nimport os\nimport os.path\nimport json\nimport optparse\nimport time\nimport pandas as pd\n\n\ndef addOptions(parser):\n parser.add_option('--NNfile', default='', help=\n 'Config json file for the data to pass to the model')\n\n\nparser = optparse.OptionParser()\naddOptions(parser)\noptions, args = parser.parse_args()\nif not options.NNfile:\n print(sys.stderr, 'No configuration file specified\\n')\n sys.exit(1)\nwith open(options.NNfile, 'r') as cfg_file:\n cfg_data = json.load(cfg_file)\ndays_info_file = cfg_data['days_info']\ndays_info = pd.read_csv(days_info_file)\nday_length = days_info['length_day'][0]\ndays = days_info['number_train_days'][0]\ntg = cfg_data['time_granularity']\nhor_pred = cfg_data['hor_pred']\nforecast_prediction = []\ncut_1 = cfg_data['cut']\nimg_rows = cfg_data['img_rows']\nimg_cols = cfg_data['img_cols']\norig_folder = cfg_data['orig_folder']\ndest_folder = cfg_data['dest_folder']\nprint('Loading images...\\n')\nload_start = time.time()\nx_original = np.load('x_train.npy')\nprint(x_original.shape)\nprint(len(x_original))\nprint('Loading tags...\\n')\ny_original = pd.read_csv(orig_folder + '/Y_tr_val.csv')\nload_end = time.time()\nload_time = load_end - load_start\nload_min = int(load_time / 60)\nload_sec = load_time % 60\nprint(\n 'Dataframes loaded in {} minutes {} seconds! Splitting for train and validation...\\n'\n .format(load_min, load_sec))\nfor hp in hor_pred:\n if hp.endswith('min'):\n hor_pred_indices = int(int(hp.replace('min', '')) * 60 / tg)\n if hp.endswith('s'):\n hor_pred_indices = int(int(hp.replace('s', '')) / tg)\n forecast_prediction.append(hp)\n y_t = y_original\n y_t_index = y_t.index\n y_t_index_valid = y_t_index[y_t_index % day_length < day_length -\n hor_pred_indices]\n y_t_indices_lost = len(y_t_index) - len(y_t_index_valid)\n print('Indices computed. {} indices lost \\n.'.format(y_t_indices_lost))\n print('Building randomized y matrix with valid indices...\\n')\n y_t = np.ravel(y_original.iloc[y_t_index_valid + hor_pred_indices])\n print(\n 'Building y matrix removing invalid indices for persistence model...\\n'\n )\n y_pred_persistence = np.ravel(y_original.iloc[y_t_index_valid])\n print('Building X matrix...Same thing as before...\\n')\n x_t = x_original[y_t_index_valid]\n x_t = x_t.reshape(x_t.shape[0], img_rows, img_cols, 1)\n cut = int(cut_1 * len(x_t))\n x_train, x_test = x_t[:cut, :], x_t[cut:, :]\n y_train, y_test = y_t[:cut], y_t[cut:]\n name = 'set_hp_' + str(hp) + '_' + str(cut_1) + 'total' + '.npy'\n name2 = 'tags_hp_' + str(hp) + '_' + str(cut_1) + 'total' + '.npy'\n np.save(name, x_train)\n np.save(name2, y_train)\n print('Generated {} images array \\n.'.format(x_train.shape))\n",
"step-5": "import numpy as np\nimport sys\nimport os\nimport os.path\nimport json\nimport optparse\nimport time\nimport pandas as pd\n\n #Randomize and split the inference set according to hor_pred\n #Generate .npy file for each hp selected\n\n #Coge valores aleatorios de la columna de etiquetas en función del horizonte de predicción. \n #Coge los índices de las muestras seleccionadas y los usa para seleccionar las imágenes que \n ##tienen asociadas.\n #Tenemos que tener pandas para la seleccion primera de las etiquetas, luego solo generamos un \n ##.npy con ese hor_pred y con la cantidad que queramos en función del valor del split\n ####PARSEAR CON EL JSON\n\n\n###################\n# PARSE CONNFIG #####\n##################\ndef addOptions(parser):\n parser.add_option(\"--NNfile\", default=\"\",\n help=\"Config json file for the data to pass to the model\")\nparser = optparse.OptionParser()\naddOptions(parser)\n(options, args) = parser.parse_args()\nif not options.NNfile:\n print(sys.stderr, \"No configuration file specified\\n\")\n sys.exit(1)\n\nwith open(options.NNfile, 'r') as cfg_file:\n cfg_data = json.load(cfg_file)\ndays_info_file = cfg_data['days_info']\ndays_info = pd.read_csv(days_info_file)\nday_length = days_info['length_day'][0]\ndays = days_info['number_train_days'][0]\ntg = cfg_data['time_granularity']\nhor_pred = cfg_data['hor_pred']\nforecast_prediction = []\ncut_1 = cfg_data['cut']\nimg_rows = cfg_data['img_rows']\nimg_cols = cfg_data['img_cols']\norig_folder = cfg_data['orig_folder']\ndest_folder = cfg_data['dest_folder']\n\n\n##################\n# DATA LOAD ######\n###################\nprint('Loading images...\\n')\nload_start = time.time()\nx_original = np.load(\"x_train.npy\")\nprint(x_original.shape)\nprint(len(x_original))\nprint('Loading tags...\\n')\ny_original = pd.read_csv(orig_folder + '/Y_tr_val.csv')\nload_end = time.time()\nload_time = load_end - load_start\nload_min = int(load_time / 60)\nload_sec = load_time % 60\nprint('Dataframes loaded in {} minutes {} seconds! Splitting for train and validation...\\n'.format(load_min, load_sec))\n\n#################\n# RANDOMIZATION##\n#################\n# Since we configured our matrices with an offset we have to adjust to \"jump\" to the sample we want to actually predict\n\nfor hp in hor_pred:\n if hp.endswith(\"min\"):\n hor_pred_indices = int(int(hp.replace('min', '')) * 60 / tg)\n if hp.endswith(\"s\"):\n hor_pred_indices = int(int(hp.replace('s', '')) / tg)\n forecast_prediction.append(hp)\n \n y_t = y_original # y_train y son iquals\n y_t_index = y_t.index # devulve una array de index\n # Don't get values for the previous or next day:\n y_t_index_valid = y_t_index[(y_t_index % day_length) < (day_length - hor_pred_indices)] \n y_t_indices_lost = len(y_t_index) - len(y_t_index_valid)\n print('Indices computed. {} indices lost \\n.'.format(y_t_indices_lost))\n print('Building randomized y matrix with valid indices...\\n')\n y_t = np.ravel(y_original.iloc[y_t_index_valid + hor_pred_indices])\n print('Building y matrix removing invalid indices for persistence model...\\n')\n y_pred_persistence = np.ravel(y_original.iloc[y_t_index_valid]) # una row de dataFram combia por numpy array\n print('Building X matrix...Same thing as before...\\n')\n # like our randomization, just picking the same indices\n x_t = x_original[y_t_index_valid] \n x_t = x_t.reshape(x_t.shape[0], img_rows, img_cols, 1)\n \n #Split: \n cut = int(cut_1*len(x_t))\n x_train, x_test = x_t[:cut,:], x_t[cut:,:]\n y_train, y_test = y_t[:cut], y_t[cut:]\n #print(x_train.shape, x_test.shape) \n #print(y_train.shape, y_test.shape) #Etiquetas (valores reales que debería predecir con cada muestra)\n \n name = \"set_hp_\" + str(hp) + \"_\" + str (cut_1) + \"total\" + \".npy\"\n name2 = \"tags_hp_\" + str(hp) + \"_\" + str (cut_1) + \"total\" + \".npy\"\n\n #Para cada horizonte de predicción genero un array para inferencia\n np.save(name, x_train)\n np.save(name2, y_train)\n\n print('Generated {} images array \\n.'.format(x_train.shape))\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
def solution(n, money):
save = [0] * (n+1)
save[0] = 1
for i in range(len(money)):
for j in range(1, n+1):
if j - money[i] >= 0:
save[j] += (save[j - money[i]] % 1000000007)
return save[n]
|
normal
|
{
"blob_id": "deeba82536d0366b3793bcbe78f78e4cfeabb612",
"index": 6241,
"step-1": "<mask token>\n",
"step-2": "def solution(n, money):\n save = [0] * (n + 1)\n save[0] = 1\n for i in range(len(money)):\n for j in range(1, n + 1):\n if j - money[i] >= 0:\n save[j] += save[j - money[i]] % 1000000007\n return save[n]\n",
"step-3": "def solution(n, money):\r\n save = [0] * (n+1)\r\n save[0] = 1\r\n for i in range(len(money)):\r\n for j in range(1, n+1):\r\n if j - money[i] >= 0:\r\n save[j] += (save[j - money[i]] % 1000000007)\r\n return save[n]",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
class FtpDownloaderPostProcess:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@property
def logger(self):
return logging.getLogger(__name__)
def iterate(self, *args, **kwargs):
"""
Uses worker queues to perform the postprocessing
:param args:
:param kwargs:
"""
q = Queue()
max_workers = self.num_workers
with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers
) as executor:
futures = []
for i in range(max_workers):
futures.append(executor.submit(self._worker, q))
try:
for f in self.ftp_downloader.iterate(*args, **kwargs):
q.put(f)
yield f
finally:
for i in range(max_workers):
q.put(None)
for future in futures:
future.result()
def _worker(self, read_queue):
while True:
item = read_queue.get()
if item is None:
return
try:
self.post_processor(item)
except Exception as e:
self.logger.warning('The task has failed with error ..{}'.
format(e))
raise e
read_queue.task_done()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class FtpDownloaderPostProcess:
<|reserved_special_token_0|>
@staticmethod
def _get_from_config(config_dict, key, default_value):
value = default_value
if config_dict is not None:
cls_name = 'FtpDownloaderPostProcess'
if config_dict.get(cls_name, None) is not None:
value = config_dict[cls_name].get(key, 5)
return value
@property
def logger(self):
return logging.getLogger(__name__)
def iterate(self, *args, **kwargs):
"""
Uses worker queues to perform the postprocessing
:param args:
:param kwargs:
"""
q = Queue()
max_workers = self.num_workers
with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers
) as executor:
futures = []
for i in range(max_workers):
futures.append(executor.submit(self._worker, q))
try:
for f in self.ftp_downloader.iterate(*args, **kwargs):
q.put(f)
yield f
finally:
for i in range(max_workers):
q.put(None)
for future in futures:
future.result()
def _worker(self, read_queue):
while True:
item = read_queue.get()
if item is None:
return
try:
self.post_processor(item)
except Exception as e:
self.logger.warning('The task has failed with error ..{}'.
format(e))
raise e
read_queue.task_done()
def __call__(self, *args, **kwargs):
items = self.ftp_downloader(*args, **kwargs)
for item in items:
self.post_processor(item)
return items
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class FtpDownloaderPostProcess:
def __init__(self, ftp_downloader, post_processor, num_workers=None,
config_dict=None):
self.post_processor = post_processor
self.ftp_downloader = ftp_downloader
self.num_workers = num_workers or self._get_from_config(config_dict,
'num_workers', 5)
@staticmethod
def _get_from_config(config_dict, key, default_value):
value = default_value
if config_dict is not None:
cls_name = 'FtpDownloaderPostProcess'
if config_dict.get(cls_name, None) is not None:
value = config_dict[cls_name].get(key, 5)
return value
@property
def logger(self):
return logging.getLogger(__name__)
def iterate(self, *args, **kwargs):
"""
Uses worker queues to perform the postprocessing
:param args:
:param kwargs:
"""
q = Queue()
max_workers = self.num_workers
with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers
) as executor:
futures = []
for i in range(max_workers):
futures.append(executor.submit(self._worker, q))
try:
for f in self.ftp_downloader.iterate(*args, **kwargs):
q.put(f)
yield f
finally:
for i in range(max_workers):
q.put(None)
for future in futures:
future.result()
def _worker(self, read_queue):
while True:
item = read_queue.get()
if item is None:
return
try:
self.post_processor(item)
except Exception as e:
self.logger.warning('The task has failed with error ..{}'.
format(e))
raise e
read_queue.task_done()
def __call__(self, *args, **kwargs):
items = self.ftp_downloader(*args, **kwargs)
for item in items:
self.post_processor(item)
return items
<|reserved_special_token_1|>
import logging
from queue import Queue
import concurrent.futures
<|reserved_special_token_0|>
class FtpDownloaderPostProcess:
def __init__(self, ftp_downloader, post_processor, num_workers=None,
config_dict=None):
self.post_processor = post_processor
self.ftp_downloader = ftp_downloader
self.num_workers = num_workers or self._get_from_config(config_dict,
'num_workers', 5)
@staticmethod
def _get_from_config(config_dict, key, default_value):
value = default_value
if config_dict is not None:
cls_name = 'FtpDownloaderPostProcess'
if config_dict.get(cls_name, None) is not None:
value = config_dict[cls_name].get(key, 5)
return value
@property
def logger(self):
return logging.getLogger(__name__)
def iterate(self, *args, **kwargs):
"""
Uses worker queues to perform the postprocessing
:param args:
:param kwargs:
"""
q = Queue()
max_workers = self.num_workers
with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers
) as executor:
futures = []
for i in range(max_workers):
futures.append(executor.submit(self._worker, q))
try:
for f in self.ftp_downloader.iterate(*args, **kwargs):
q.put(f)
yield f
finally:
for i in range(max_workers):
q.put(None)
for future in futures:
future.result()
def _worker(self, read_queue):
while True:
item = read_queue.get()
if item is None:
return
try:
self.post_processor(item)
except Exception as e:
self.logger.warning('The task has failed with error ..{}'.
format(e))
raise e
read_queue.task_done()
def __call__(self, *args, **kwargs):
items = self.ftp_downloader(*args, **kwargs)
for item in items:
self.post_processor(item)
return items
<|reserved_special_token_1|>
import logging
from queue import Queue
import concurrent.futures
"""
Post processing decorater logic for FtpDownloader
"""
class FtpDownloaderPostProcess:
def __init__(self, ftp_downloader, post_processor, num_workers=None, config_dict=None):
self.post_processor = post_processor
self.ftp_downloader = ftp_downloader
self.num_workers = num_workers or self._get_from_config(config_dict, "num_workers", 5)
@staticmethod
def _get_from_config(config_dict, key, default_value):
value = default_value
if config_dict is not None:
cls_name = "FtpDownloaderPostProcess"
if config_dict.get(cls_name, None) is not None:
value = config_dict[cls_name].get(key, 5)
return value
@property
def logger(self):
return logging.getLogger(__name__)
def iterate(self, *args, **kwargs):
"""
Uses worker queues to perform the postprocessing
:param args:
:param kwargs:
"""
# use thread pool to parallel process
q = Queue()
max_workers = self.num_workers
with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
# Set up workers
futures = []
for i in range(max_workers):
futures.append(executor.submit(self._worker, q))
# Submit worker jobs
# Wrap the main task in a try block so that the queue completes regardless of success/failure of main job
try:
for f in self.ftp_downloader.iterate(*args, **kwargs):
q.put(f)
yield f
finally:
# Stop processing
# Not doing a queue to join, because if all workers fail this will hang with items still left in q...
# q.join()
# poison pill
for i in range(max_workers):
q.put(None)
for future in futures:
future.result()
def _worker(self, read_queue):
while True:
item = read_queue.get()
if item is None:
return
try:
self.post_processor(item)
except Exception as e:
self.logger.warning("The task has failed with error ..{}".format(e))
raise e
read_queue.task_done()
def __call__(self, *args, **kwargs):
items = self.ftp_downloader(*args, **kwargs)
for item in items:
self.post_processor(item)
return items
|
flexible
|
{
"blob_id": "56a41f432d332aaebbde15c52e133eee51b22ce1",
"index": 2833,
"step-1": "<mask token>\n\n\nclass FtpDownloaderPostProcess:\n <mask token>\n <mask token>\n\n @property\n def logger(self):\n return logging.getLogger(__name__)\n\n def iterate(self, *args, **kwargs):\n \"\"\"\nUses worker queues to perform the postprocessing\n :param args:\n :param kwargs:\n \"\"\"\n q = Queue()\n max_workers = self.num_workers\n with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers\n ) as executor:\n futures = []\n for i in range(max_workers):\n futures.append(executor.submit(self._worker, q))\n try:\n for f in self.ftp_downloader.iterate(*args, **kwargs):\n q.put(f)\n yield f\n finally:\n for i in range(max_workers):\n q.put(None)\n for future in futures:\n future.result()\n\n def _worker(self, read_queue):\n while True:\n item = read_queue.get()\n if item is None:\n return\n try:\n self.post_processor(item)\n except Exception as e:\n self.logger.warning('The task has failed with error ..{}'.\n format(e))\n raise e\n read_queue.task_done()\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass FtpDownloaderPostProcess:\n <mask token>\n\n @staticmethod\n def _get_from_config(config_dict, key, default_value):\n value = default_value\n if config_dict is not None:\n cls_name = 'FtpDownloaderPostProcess'\n if config_dict.get(cls_name, None) is not None:\n value = config_dict[cls_name].get(key, 5)\n return value\n\n @property\n def logger(self):\n return logging.getLogger(__name__)\n\n def iterate(self, *args, **kwargs):\n \"\"\"\nUses worker queues to perform the postprocessing\n :param args:\n :param kwargs:\n \"\"\"\n q = Queue()\n max_workers = self.num_workers\n with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers\n ) as executor:\n futures = []\n for i in range(max_workers):\n futures.append(executor.submit(self._worker, q))\n try:\n for f in self.ftp_downloader.iterate(*args, **kwargs):\n q.put(f)\n yield f\n finally:\n for i in range(max_workers):\n q.put(None)\n for future in futures:\n future.result()\n\n def _worker(self, read_queue):\n while True:\n item = read_queue.get()\n if item is None:\n return\n try:\n self.post_processor(item)\n except Exception as e:\n self.logger.warning('The task has failed with error ..{}'.\n format(e))\n raise e\n read_queue.task_done()\n\n def __call__(self, *args, **kwargs):\n items = self.ftp_downloader(*args, **kwargs)\n for item in items:\n self.post_processor(item)\n return items\n",
"step-3": "<mask token>\n\n\nclass FtpDownloaderPostProcess:\n\n def __init__(self, ftp_downloader, post_processor, num_workers=None,\n config_dict=None):\n self.post_processor = post_processor\n self.ftp_downloader = ftp_downloader\n self.num_workers = num_workers or self._get_from_config(config_dict,\n 'num_workers', 5)\n\n @staticmethod\n def _get_from_config(config_dict, key, default_value):\n value = default_value\n if config_dict is not None:\n cls_name = 'FtpDownloaderPostProcess'\n if config_dict.get(cls_name, None) is not None:\n value = config_dict[cls_name].get(key, 5)\n return value\n\n @property\n def logger(self):\n return logging.getLogger(__name__)\n\n def iterate(self, *args, **kwargs):\n \"\"\"\nUses worker queues to perform the postprocessing\n :param args:\n :param kwargs:\n \"\"\"\n q = Queue()\n max_workers = self.num_workers\n with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers\n ) as executor:\n futures = []\n for i in range(max_workers):\n futures.append(executor.submit(self._worker, q))\n try:\n for f in self.ftp_downloader.iterate(*args, **kwargs):\n q.put(f)\n yield f\n finally:\n for i in range(max_workers):\n q.put(None)\n for future in futures:\n future.result()\n\n def _worker(self, read_queue):\n while True:\n item = read_queue.get()\n if item is None:\n return\n try:\n self.post_processor(item)\n except Exception as e:\n self.logger.warning('The task has failed with error ..{}'.\n format(e))\n raise e\n read_queue.task_done()\n\n def __call__(self, *args, **kwargs):\n items = self.ftp_downloader(*args, **kwargs)\n for item in items:\n self.post_processor(item)\n return items\n",
"step-4": "import logging\nfrom queue import Queue\nimport concurrent.futures\n<mask token>\n\n\nclass FtpDownloaderPostProcess:\n\n def __init__(self, ftp_downloader, post_processor, num_workers=None,\n config_dict=None):\n self.post_processor = post_processor\n self.ftp_downloader = ftp_downloader\n self.num_workers = num_workers or self._get_from_config(config_dict,\n 'num_workers', 5)\n\n @staticmethod\n def _get_from_config(config_dict, key, default_value):\n value = default_value\n if config_dict is not None:\n cls_name = 'FtpDownloaderPostProcess'\n if config_dict.get(cls_name, None) is not None:\n value = config_dict[cls_name].get(key, 5)\n return value\n\n @property\n def logger(self):\n return logging.getLogger(__name__)\n\n def iterate(self, *args, **kwargs):\n \"\"\"\nUses worker queues to perform the postprocessing\n :param args:\n :param kwargs:\n \"\"\"\n q = Queue()\n max_workers = self.num_workers\n with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers\n ) as executor:\n futures = []\n for i in range(max_workers):\n futures.append(executor.submit(self._worker, q))\n try:\n for f in self.ftp_downloader.iterate(*args, **kwargs):\n q.put(f)\n yield f\n finally:\n for i in range(max_workers):\n q.put(None)\n for future in futures:\n future.result()\n\n def _worker(self, read_queue):\n while True:\n item = read_queue.get()\n if item is None:\n return\n try:\n self.post_processor(item)\n except Exception as e:\n self.logger.warning('The task has failed with error ..{}'.\n format(e))\n raise e\n read_queue.task_done()\n\n def __call__(self, *args, **kwargs):\n items = self.ftp_downloader(*args, **kwargs)\n for item in items:\n self.post_processor(item)\n return items\n",
"step-5": "import logging\n\nfrom queue import Queue\nimport concurrent.futures\n\n\"\"\"\nPost processing decorater logic for FtpDownloader\n\"\"\"\n\n\nclass FtpDownloaderPostProcess:\n\n def __init__(self, ftp_downloader, post_processor, num_workers=None, config_dict=None):\n self.post_processor = post_processor\n self.ftp_downloader = ftp_downloader\n self.num_workers = num_workers or self._get_from_config(config_dict, \"num_workers\", 5)\n\n @staticmethod\n def _get_from_config(config_dict, key, default_value):\n value = default_value\n if config_dict is not None:\n cls_name = \"FtpDownloaderPostProcess\"\n if config_dict.get(cls_name, None) is not None:\n value = config_dict[cls_name].get(key, 5)\n return value\n\n @property\n def logger(self):\n return logging.getLogger(__name__)\n\n def iterate(self, *args, **kwargs):\n \"\"\"\nUses worker queues to perform the postprocessing\n :param args:\n :param kwargs:\n \"\"\"\n # use thread pool to parallel process\n q = Queue()\n\n max_workers = self.num_workers\n with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:\n # Set up workers\n futures = []\n for i in range(max_workers):\n futures.append(executor.submit(self._worker, q))\n\n # Submit worker jobs\n # Wrap the main task in a try block so that the queue completes regardless of success/failure of main job\n try:\n for f in self.ftp_downloader.iterate(*args, **kwargs):\n q.put(f)\n yield f\n finally:\n # Stop processing\n # Not doing a queue to join, because if all workers fail this will hang with items still left in q...\n # q.join()\n\n # poison pill\n for i in range(max_workers):\n q.put(None)\n for future in futures:\n future.result()\n\n def _worker(self, read_queue):\n while True:\n item = read_queue.get()\n if item is None:\n return\n try:\n self.post_processor(item)\n except Exception as e:\n self.logger.warning(\"The task has failed with error ..{}\".format(e))\n\n raise e\n read_queue.task_done()\n\n def __call__(self, *args, **kwargs):\n items = self.ftp_downloader(*args, **kwargs)\n for item in items:\n self.post_processor(item)\n return items\n",
"step-ids": [
4,
6,
7,
8,
9
]
}
|
[
4,
6,
7,
8,
9
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
from .auth import Auth
from .banDetection import BanDetectionThread
from .botLogging import BotLoggingThread
from .clientLauncher import ClientLauncher
from .log import LogThread, Log
from .mainThread import MainThread
from .nexonServer import NexonServer
from .tmLogging import TMLoggingThread
from .worldCheckboxStatus import WorldCheckBoxThread
from .setStartup import setStartupThread
|
flexible
|
{
"blob_id": "b7038ad73bf0e284474f0d89d6c34967d39541c0",
"index": 6566,
"step-1": "<mask token>\n",
"step-2": "from .auth import Auth\nfrom .banDetection import BanDetectionThread\nfrom .botLogging import BotLoggingThread\nfrom .clientLauncher import ClientLauncher\nfrom .log import LogThread, Log\nfrom .mainThread import MainThread\nfrom .nexonServer import NexonServer\nfrom .tmLogging import TMLoggingThread\nfrom .worldCheckboxStatus import WorldCheckBoxThread\nfrom .setStartup import setStartupThread\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
<|reserved_special_token_0|>
def getScale(NumFrame, t_gt, seq_num):
txt_file = open('/media/cordin/새 볼륨/rosbag/dataset/poses/{0:02d}.txt'.
format(seq_num))
x_prev = float(t_gt[0])
y_prev = float(t_gt[1])
z_prev = float(t_gt[2])
line = txt_file.readlines()
line_sp = line[NumFrame].split(' ')
x = float(line_sp[3])
y = float(line_sp[7])
z = float(line_sp[11])
t_gt[0] = x
t_gt[1] = y
t_gt[2] = z
txt_file.close()
scale = math.sqrt((x - x_prev) ** 2 + (y - y_prev) ** 2 + (z - z_prev) ** 2
)
return scale, t_gt
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def getScale(NumFrame, t_gt, seq_num):
txt_file = open('/media/cordin/새 볼륨/rosbag/dataset/poses/{0:02d}.txt'.
format(seq_num))
x_prev = float(t_gt[0])
y_prev = float(t_gt[1])
z_prev = float(t_gt[2])
line = txt_file.readlines()
line_sp = line[NumFrame].split(' ')
x = float(line_sp[3])
y = float(line_sp[7])
z = float(line_sp[11])
t_gt[0] = x
t_gt[1] = y
t_gt[2] = z
txt_file.close()
scale = math.sqrt((x - x_prev) ** 2 + (y - y_prev) ** 2 + (z - z_prev) ** 2
)
return scale, t_gt
if __name__ == '__main__':
MAX_FRAME = 1000
SEQ_NUM = 2
focal = 718.856
pp = 607.1928, 185.2157
textOrg1 = 10, 30
textOrg2 = 10, 80
textOrg3 = 10, 130
img_1_c = cv2.imread(
'/media/cordin/새 볼륨/rosbag/dataset/sequences/{0:02d}/image_0/000000.png'
.format(SEQ_NUM))
img_2_c = cv2.imread(
'/media/cordin/새 볼륨/rosbag/dataset/sequences/{0:02d}/image_0/000001.png'
.format(SEQ_NUM))
img_1 = cv2.cvtColor(img_1_c, cv2.COLOR_BGR2GRAY)
img_2 = cv2.cvtColor(img_2_c, cv2.COLOR_BGR2GRAY)
kp1, des1 = orb.detectAndCompute(img_1, None)
kp2, des2 = orb.detectAndCompute(img_2, None)
matches = bf.match(des1, des2)
matches = sorted(matches, key=lambda x: x.distance)
idx = matches[0:1500]
pts1 = []
pts2 = []
for i in idx:
pts1.append(kp1[i.queryIdx].pt)
pts2.append(kp2[i.trainIdx].pt)
pts1 = np.array(pts1)
pts2 = np.array(pts2)
E, mask = cv2.findEssentialMat(pts1, pts2, focal=focal, pp=pp, method=
cv2.RANSAC, prob=0.999, threshold=1.0)
_, R_f, t_f, _ = cv2.recoverPose(E, pts1, pts2, focal=focal, pp=pp)
R_f_seg = R_f
t_f_seg = t_f
t_gt = np.zeros((3, 1), dtype=np.float64)
prevImage = img_2
kp_prev = kp2
des_prev = des2
traj = np.zeros((1000, 2000), dtype=np.uint8)
traj = cv2.cvtColor(traj, cv2.COLOR_GRAY2BGR)
rmse_total = 0
for numFrame in range(2, MAX_FRAME):
filename = (
'/media/cordin/새 볼륨/rosbag/dataset/sequences/{0:02d}/image_0/{1:06d}.png'
.format(SEQ_NUM, numFrame))
currImage_c = cv2.imread(filename)
currImage = cv2.cvtColor(currImage_c, cv2.COLOR_BGR2GRAY)
kp_curr, des_curr = orb.detectAndCompute(currImage, None)
matches = bf.match(des_prev, des_curr)
matches = sorted(matches, key=lambda x: x.distance)
idx = matches[0:1500]
pts1 = []
pts2 = []
for i in idx:
pts1.append(kp_prev[i.queryIdx].pt)
pts2.append(kp_curr[i.trainIdx].pt)
pts1 = np.array(pts1)
pts2 = np.array(pts2)
E_mat, mask_n = cv2.findEssentialMat(pts2, pts1, focal=focal, pp=pp,
method=cv2.RANSAC, prob=0.999, threshold=1.0)
_, R, t, _ = cv2.recoverPose(E_mat, pts2, pts1, focal=focal, pp=pp)
abs_scale, t_gt = getScale(numFrame, t_gt, SEQ_NUM)
t_f = t_f + abs_scale * R_f.dot(t)
R_f = R.dot(R_f)
error = map(operator.sub, t_gt, t_f)
error_sum_square = sum(map(lambda x: x * x, error))
rmse = math.sqrt(error_sum_square / 3)
rmse_total = rmse_total + rmse
print('rmse = ', rmse_total / numFrame)
prevImage = currImage
kp_prev = kp_curr
des_prev = des_curr
x_gt = int(t_gt[0]) + 1000
y_gt = int(t_gt[2]) + 100
x = int(t_f[0]) + 1000
y = int(t_f[2]) + 100
cv2.circle(traj, (x, y), 1, (0, 0, 255), 2)
cv2.circle(traj, (x_gt, y_gt), 1, (0, 255, 0), 2)
cv2.rectangle(traj, (10, 10), (700, 150), (0, 0, 0), -1)
text1 = ('orb Coordinates: x = {0:02f}m y = {1:02f}m z = {2:02f}m'.
format(float(t_f[0]), float(t_f[1]), float(t_f[2])))
cv2.putText(traj, text1, textOrg1, cv2.FONT_HERSHEY_PLAIN, 1, (255,
255, 255), 1, 8)
text3 = ('gt Coordinates: x = {0:02f}m y = {1:02f}m z = {2:02f}m'.
format(float(t_gt[0]), float(t_gt[1]), float(t_gt[2])))
cv2.putText(traj, text3, textOrg3, cv2.FONT_HERSHEY_PLAIN, 1, (255,
255, 255), 1, 8)
feature_img = cv2.drawKeypoints(currImage_c, kp_curr, None)
cv2.imshow('trajectory', traj)
cv2.imshow('feat_img', feature_img)
cv2.waitKey(1)
cv2.imwrite('result_{0:02d}.png'.format(SEQ_NUM), traj)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
orb = cv2.cv2.ORB_create(nfeatures=5000, scaleFactor=1.2, nlevels=8,
edgeThreshold=31, firstLevel=0, WTA_K=2, scoreType=cv2.ORB_FAST_SCORE,
patchSize=31, fastThreshold=25)
bf = cv2.BFMatcher(cv2.NORM_HAMMING)
def getScale(NumFrame, t_gt, seq_num):
txt_file = open('/media/cordin/새 볼륨/rosbag/dataset/poses/{0:02d}.txt'.
format(seq_num))
x_prev = float(t_gt[0])
y_prev = float(t_gt[1])
z_prev = float(t_gt[2])
line = txt_file.readlines()
line_sp = line[NumFrame].split(' ')
x = float(line_sp[3])
y = float(line_sp[7])
z = float(line_sp[11])
t_gt[0] = x
t_gt[1] = y
t_gt[2] = z
txt_file.close()
scale = math.sqrt((x - x_prev) ** 2 + (y - y_prev) ** 2 + (z - z_prev) ** 2
)
return scale, t_gt
if __name__ == '__main__':
MAX_FRAME = 1000
SEQ_NUM = 2
focal = 718.856
pp = 607.1928, 185.2157
textOrg1 = 10, 30
textOrg2 = 10, 80
textOrg3 = 10, 130
img_1_c = cv2.imread(
'/media/cordin/새 볼륨/rosbag/dataset/sequences/{0:02d}/image_0/000000.png'
.format(SEQ_NUM))
img_2_c = cv2.imread(
'/media/cordin/새 볼륨/rosbag/dataset/sequences/{0:02d}/image_0/000001.png'
.format(SEQ_NUM))
img_1 = cv2.cvtColor(img_1_c, cv2.COLOR_BGR2GRAY)
img_2 = cv2.cvtColor(img_2_c, cv2.COLOR_BGR2GRAY)
kp1, des1 = orb.detectAndCompute(img_1, None)
kp2, des2 = orb.detectAndCompute(img_2, None)
matches = bf.match(des1, des2)
matches = sorted(matches, key=lambda x: x.distance)
idx = matches[0:1500]
pts1 = []
pts2 = []
for i in idx:
pts1.append(kp1[i.queryIdx].pt)
pts2.append(kp2[i.trainIdx].pt)
pts1 = np.array(pts1)
pts2 = np.array(pts2)
E, mask = cv2.findEssentialMat(pts1, pts2, focal=focal, pp=pp, method=
cv2.RANSAC, prob=0.999, threshold=1.0)
_, R_f, t_f, _ = cv2.recoverPose(E, pts1, pts2, focal=focal, pp=pp)
R_f_seg = R_f
t_f_seg = t_f
t_gt = np.zeros((3, 1), dtype=np.float64)
prevImage = img_2
kp_prev = kp2
des_prev = des2
traj = np.zeros((1000, 2000), dtype=np.uint8)
traj = cv2.cvtColor(traj, cv2.COLOR_GRAY2BGR)
rmse_total = 0
for numFrame in range(2, MAX_FRAME):
filename = (
'/media/cordin/새 볼륨/rosbag/dataset/sequences/{0:02d}/image_0/{1:06d}.png'
.format(SEQ_NUM, numFrame))
currImage_c = cv2.imread(filename)
currImage = cv2.cvtColor(currImage_c, cv2.COLOR_BGR2GRAY)
kp_curr, des_curr = orb.detectAndCompute(currImage, None)
matches = bf.match(des_prev, des_curr)
matches = sorted(matches, key=lambda x: x.distance)
idx = matches[0:1500]
pts1 = []
pts2 = []
for i in idx:
pts1.append(kp_prev[i.queryIdx].pt)
pts2.append(kp_curr[i.trainIdx].pt)
pts1 = np.array(pts1)
pts2 = np.array(pts2)
E_mat, mask_n = cv2.findEssentialMat(pts2, pts1, focal=focal, pp=pp,
method=cv2.RANSAC, prob=0.999, threshold=1.0)
_, R, t, _ = cv2.recoverPose(E_mat, pts2, pts1, focal=focal, pp=pp)
abs_scale, t_gt = getScale(numFrame, t_gt, SEQ_NUM)
t_f = t_f + abs_scale * R_f.dot(t)
R_f = R.dot(R_f)
error = map(operator.sub, t_gt, t_f)
error_sum_square = sum(map(lambda x: x * x, error))
rmse = math.sqrt(error_sum_square / 3)
rmse_total = rmse_total + rmse
print('rmse = ', rmse_total / numFrame)
prevImage = currImage
kp_prev = kp_curr
des_prev = des_curr
x_gt = int(t_gt[0]) + 1000
y_gt = int(t_gt[2]) + 100
x = int(t_f[0]) + 1000
y = int(t_f[2]) + 100
cv2.circle(traj, (x, y), 1, (0, 0, 255), 2)
cv2.circle(traj, (x_gt, y_gt), 1, (0, 255, 0), 2)
cv2.rectangle(traj, (10, 10), (700, 150), (0, 0, 0), -1)
text1 = ('orb Coordinates: x = {0:02f}m y = {1:02f}m z = {2:02f}m'.
format(float(t_f[0]), float(t_f[1]), float(t_f[2])))
cv2.putText(traj, text1, textOrg1, cv2.FONT_HERSHEY_PLAIN, 1, (255,
255, 255), 1, 8)
text3 = ('gt Coordinates: x = {0:02f}m y = {1:02f}m z = {2:02f}m'.
format(float(t_gt[0]), float(t_gt[1]), float(t_gt[2])))
cv2.putText(traj, text3, textOrg3, cv2.FONT_HERSHEY_PLAIN, 1, (255,
255, 255), 1, 8)
feature_img = cv2.drawKeypoints(currImage_c, kp_curr, None)
cv2.imshow('trajectory', traj)
cv2.imshow('feat_img', feature_img)
cv2.waitKey(1)
cv2.imwrite('result_{0:02d}.png'.format(SEQ_NUM), traj)
<|reserved_special_token_1|>
from os import wait
import cv2
import numpy as np
import math
import sys
import types
import operator
orb = cv2.cv2.ORB_create(nfeatures=5000, scaleFactor=1.2, nlevels=8,
edgeThreshold=31, firstLevel=0, WTA_K=2, scoreType=cv2.ORB_FAST_SCORE,
patchSize=31, fastThreshold=25)
bf = cv2.BFMatcher(cv2.NORM_HAMMING)
def getScale(NumFrame, t_gt, seq_num):
txt_file = open('/media/cordin/새 볼륨/rosbag/dataset/poses/{0:02d}.txt'.
format(seq_num))
x_prev = float(t_gt[0])
y_prev = float(t_gt[1])
z_prev = float(t_gt[2])
line = txt_file.readlines()
line_sp = line[NumFrame].split(' ')
x = float(line_sp[3])
y = float(line_sp[7])
z = float(line_sp[11])
t_gt[0] = x
t_gt[1] = y
t_gt[2] = z
txt_file.close()
scale = math.sqrt((x - x_prev) ** 2 + (y - y_prev) ** 2 + (z - z_prev) ** 2
)
return scale, t_gt
if __name__ == '__main__':
MAX_FRAME = 1000
SEQ_NUM = 2
focal = 718.856
pp = 607.1928, 185.2157
textOrg1 = 10, 30
textOrg2 = 10, 80
textOrg3 = 10, 130
img_1_c = cv2.imread(
'/media/cordin/새 볼륨/rosbag/dataset/sequences/{0:02d}/image_0/000000.png'
.format(SEQ_NUM))
img_2_c = cv2.imread(
'/media/cordin/새 볼륨/rosbag/dataset/sequences/{0:02d}/image_0/000001.png'
.format(SEQ_NUM))
img_1 = cv2.cvtColor(img_1_c, cv2.COLOR_BGR2GRAY)
img_2 = cv2.cvtColor(img_2_c, cv2.COLOR_BGR2GRAY)
kp1, des1 = orb.detectAndCompute(img_1, None)
kp2, des2 = orb.detectAndCompute(img_2, None)
matches = bf.match(des1, des2)
matches = sorted(matches, key=lambda x: x.distance)
idx = matches[0:1500]
pts1 = []
pts2 = []
for i in idx:
pts1.append(kp1[i.queryIdx].pt)
pts2.append(kp2[i.trainIdx].pt)
pts1 = np.array(pts1)
pts2 = np.array(pts2)
E, mask = cv2.findEssentialMat(pts1, pts2, focal=focal, pp=pp, method=
cv2.RANSAC, prob=0.999, threshold=1.0)
_, R_f, t_f, _ = cv2.recoverPose(E, pts1, pts2, focal=focal, pp=pp)
R_f_seg = R_f
t_f_seg = t_f
t_gt = np.zeros((3, 1), dtype=np.float64)
prevImage = img_2
kp_prev = kp2
des_prev = des2
traj = np.zeros((1000, 2000), dtype=np.uint8)
traj = cv2.cvtColor(traj, cv2.COLOR_GRAY2BGR)
rmse_total = 0
for numFrame in range(2, MAX_FRAME):
filename = (
'/media/cordin/새 볼륨/rosbag/dataset/sequences/{0:02d}/image_0/{1:06d}.png'
.format(SEQ_NUM, numFrame))
currImage_c = cv2.imread(filename)
currImage = cv2.cvtColor(currImage_c, cv2.COLOR_BGR2GRAY)
kp_curr, des_curr = orb.detectAndCompute(currImage, None)
matches = bf.match(des_prev, des_curr)
matches = sorted(matches, key=lambda x: x.distance)
idx = matches[0:1500]
pts1 = []
pts2 = []
for i in idx:
pts1.append(kp_prev[i.queryIdx].pt)
pts2.append(kp_curr[i.trainIdx].pt)
pts1 = np.array(pts1)
pts2 = np.array(pts2)
E_mat, mask_n = cv2.findEssentialMat(pts2, pts1, focal=focal, pp=pp,
method=cv2.RANSAC, prob=0.999, threshold=1.0)
_, R, t, _ = cv2.recoverPose(E_mat, pts2, pts1, focal=focal, pp=pp)
abs_scale, t_gt = getScale(numFrame, t_gt, SEQ_NUM)
t_f = t_f + abs_scale * R_f.dot(t)
R_f = R.dot(R_f)
error = map(operator.sub, t_gt, t_f)
error_sum_square = sum(map(lambda x: x * x, error))
rmse = math.sqrt(error_sum_square / 3)
rmse_total = rmse_total + rmse
print('rmse = ', rmse_total / numFrame)
prevImage = currImage
kp_prev = kp_curr
des_prev = des_curr
x_gt = int(t_gt[0]) + 1000
y_gt = int(t_gt[2]) + 100
x = int(t_f[0]) + 1000
y = int(t_f[2]) + 100
cv2.circle(traj, (x, y), 1, (0, 0, 255), 2)
cv2.circle(traj, (x_gt, y_gt), 1, (0, 255, 0), 2)
cv2.rectangle(traj, (10, 10), (700, 150), (0, 0, 0), -1)
text1 = ('orb Coordinates: x = {0:02f}m y = {1:02f}m z = {2:02f}m'.
format(float(t_f[0]), float(t_f[1]), float(t_f[2])))
cv2.putText(traj, text1, textOrg1, cv2.FONT_HERSHEY_PLAIN, 1, (255,
255, 255), 1, 8)
text3 = ('gt Coordinates: x = {0:02f}m y = {1:02f}m z = {2:02f}m'.
format(float(t_gt[0]), float(t_gt[1]), float(t_gt[2])))
cv2.putText(traj, text3, textOrg3, cv2.FONT_HERSHEY_PLAIN, 1, (255,
255, 255), 1, 8)
feature_img = cv2.drawKeypoints(currImage_c, kp_curr, None)
cv2.imshow('trajectory', traj)
cv2.imshow('feat_img', feature_img)
cv2.waitKey(1)
cv2.imwrite('result_{0:02d}.png'.format(SEQ_NUM), traj)
<|reserved_special_token_1|>
from os import wait
import cv2
import numpy as np
import math
import sys
import types
import operator
## orb 및 bf matcher 선언
orb = cv2.cv2.ORB_create(
nfeatures=5000,
scaleFactor=1.2,
nlevels=8,
edgeThreshold=31,
firstLevel=0,
WTA_K=2,
scoreType=cv2.ORB_FAST_SCORE,
patchSize=31,
fastThreshold=25,
)
bf = cv2.BFMatcher(cv2.NORM_HAMMING)
def getScale(NumFrame, t_gt, seq_num):
txt_file = open('/media/cordin/새 볼륨/rosbag/dataset/poses/{0:02d}.txt'.format(seq_num))
x_prev = float(t_gt[0])
y_prev = float(t_gt[1])
z_prev = float(t_gt[2])
line = txt_file.readlines()
line_sp = line[NumFrame].split(' ')
x = float(line_sp[3])
y = float(line_sp[7])
z = float(line_sp[11])
t_gt[0] = x
t_gt[1] = y
t_gt[2] = z
txt_file.close()
scale = math.sqrt((x-x_prev)**2 + (y-y_prev)**2 + (z-z_prev)**2)
return scale, t_gt
if __name__ == "__main__":
MAX_FRAME = 1000
SEQ_NUM = 2
#Camera intrinsic parameter
focal = 718.8560
pp = (607.1928, 185.2157)
textOrg1 = (10,30)
textOrg2 = (10,80)
textOrg3 = (10,130)
img_1_c = cv2.imread("/media/cordin/새 볼륨/rosbag/dataset/sequences/{0:02d}/image_0/000000.png".format(SEQ_NUM))
img_2_c = cv2.imread("/media/cordin/새 볼륨/rosbag/dataset/sequences/{0:02d}/image_0/000001.png".format(SEQ_NUM))
img_1 = cv2.cvtColor(img_1_c,cv2.COLOR_BGR2GRAY)
img_2 = cv2.cvtColor(img_2_c,cv2.COLOR_BGR2GRAY)
kp1, des1 = orb.detectAndCompute(img_1,None)
kp2, des2 = orb.detectAndCompute(img_2,None)
matches = bf.match(des1,des2)
matches = sorted(matches, key = lambda x:x.distance)
idx = matches[0:1500]
pts1 = []
pts2 = []
for i in idx:
pts1.append(kp1[i.queryIdx].pt)
pts2.append(kp2[i.trainIdx].pt)
pts1 = np.array(pts1)
pts2 = np.array(pts2)
E, mask = cv2.findEssentialMat(pts1,pts2,focal = focal, pp = pp, method=cv2.RANSAC, prob = 0.999, threshold=1.0)
_, R_f, t_f, _ = cv2.recoverPose(E, pts1, pts2, focal = focal, pp = pp)
R_f_seg = R_f
t_f_seg = t_f
t_gt = np.zeros((3,1),dtype=np.float64)
prevImage = img_2
kp_prev = kp2
des_prev = des2
traj = np.zeros((1000,2000),dtype=np.uint8)
traj = cv2.cvtColor(traj,cv2.COLOR_GRAY2BGR)
rmse_total = 0
for numFrame in range(2, MAX_FRAME):
filename = '/media/cordin/새 볼륨/rosbag/dataset/sequences/{0:02d}/image_0/{1:06d}.png'.format(SEQ_NUM,numFrame)
currImage_c = cv2.imread(filename)
currImage = cv2.cvtColor(currImage_c,cv2.COLOR_BGR2GRAY)
# feature extraction
kp_curr, des_curr = orb.detectAndCompute(currImage,None)
# feature matching
matches = bf.match(des_prev,des_curr)
matches = sorted(matches, key = lambda x:x.distance)
idx = matches[0:1500]
pts1 = []
pts2 = []
for i in idx:
pts1.append(kp_prev[i.queryIdx].pt)
pts2.append(kp_curr[i.trainIdx].pt)
pts1 = np.array(pts1)
pts2 = np.array(pts2)
# caculate R, t
E_mat, mask_n = cv2.findEssentialMat(pts2, pts1, focal = focal, pp = pp, method=cv2.RANSAC, prob = 0.999, threshold=1.0)
_, R, t, _ = cv2.recoverPose(E_mat, pts2, pts1, focal = focal, pp = pp)
# get scale
abs_scale, t_gt = getScale(numFrame, t_gt, SEQ_NUM)
# update trajectory
t_f = t_f + abs_scale*R_f.dot(t)
R_f = R.dot(R_f)
# caculate Error
error = map(operator.sub,t_gt,t_f)
error_sum_square = sum(map(lambda x:x*x,error))
rmse = math.sqrt(error_sum_square/3)
rmse_total = rmse_total + rmse
print("rmse = ",rmse_total/numFrame)
prevImage = currImage
kp_prev = kp_curr
des_prev = des_curr
# visualization
x_gt = int(t_gt[0]) + 1000
y_gt = int(t_gt[2]) + 100
x = int(t_f[0]) + 1000
y = int(t_f[2]) + 100
cv2.circle(traj, (x,y), 1 , (0,0,255), 2)
cv2.circle(traj, (x_gt,y_gt), 1 , (0,255,0), 2)
cv2.rectangle(traj, (10,10), (700,150), (0,0,0), -1)
text1 = 'orb Coordinates: x = {0:02f}m y = {1:02f}m z = {2:02f}m'.format(float(t_f[0]),float(t_f[1]),float(t_f[2]))
cv2.putText(traj, text1, textOrg1, cv2.FONT_HERSHEY_PLAIN,1,(255,255,255),1,8)
text3 = 'gt Coordinates: x = {0:02f}m y = {1:02f}m z = {2:02f}m'.format(float(t_gt[0]),float(t_gt[1]),float(t_gt[2]))
cv2.putText(traj, text3, textOrg3, cv2.FONT_HERSHEY_PLAIN,1,(255,255,255),1,8)
feature_img = cv2.drawKeypoints(currImage_c, kp_curr, None)
cv2.imshow("trajectory", traj)
cv2.imshow("feat_img", feature_img)
cv2.waitKey(1)
cv2.imwrite("result_{0:02d}.png".format(SEQ_NUM),traj)
|
flexible
|
{
"blob_id": "73e7e43e9cfb3c0884480809bc03ade687d641d6",
"index": 733,
"step-1": "<mask token>\n\n\ndef getScale(NumFrame, t_gt, seq_num):\n txt_file = open('/media/cordin/새 볼륨/rosbag/dataset/poses/{0:02d}.txt'.\n format(seq_num))\n x_prev = float(t_gt[0])\n y_prev = float(t_gt[1])\n z_prev = float(t_gt[2])\n line = txt_file.readlines()\n line_sp = line[NumFrame].split(' ')\n x = float(line_sp[3])\n y = float(line_sp[7])\n z = float(line_sp[11])\n t_gt[0] = x\n t_gt[1] = y\n t_gt[2] = z\n txt_file.close()\n scale = math.sqrt((x - x_prev) ** 2 + (y - y_prev) ** 2 + (z - z_prev) ** 2\n )\n return scale, t_gt\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef getScale(NumFrame, t_gt, seq_num):\n txt_file = open('/media/cordin/새 볼륨/rosbag/dataset/poses/{0:02d}.txt'.\n format(seq_num))\n x_prev = float(t_gt[0])\n y_prev = float(t_gt[1])\n z_prev = float(t_gt[2])\n line = txt_file.readlines()\n line_sp = line[NumFrame].split(' ')\n x = float(line_sp[3])\n y = float(line_sp[7])\n z = float(line_sp[11])\n t_gt[0] = x\n t_gt[1] = y\n t_gt[2] = z\n txt_file.close()\n scale = math.sqrt((x - x_prev) ** 2 + (y - y_prev) ** 2 + (z - z_prev) ** 2\n )\n return scale, t_gt\n\n\nif __name__ == '__main__':\n MAX_FRAME = 1000\n SEQ_NUM = 2\n focal = 718.856\n pp = 607.1928, 185.2157\n textOrg1 = 10, 30\n textOrg2 = 10, 80\n textOrg3 = 10, 130\n img_1_c = cv2.imread(\n '/media/cordin/새 볼륨/rosbag/dataset/sequences/{0:02d}/image_0/000000.png'\n .format(SEQ_NUM))\n img_2_c = cv2.imread(\n '/media/cordin/새 볼륨/rosbag/dataset/sequences/{0:02d}/image_0/000001.png'\n .format(SEQ_NUM))\n img_1 = cv2.cvtColor(img_1_c, cv2.COLOR_BGR2GRAY)\n img_2 = cv2.cvtColor(img_2_c, cv2.COLOR_BGR2GRAY)\n kp1, des1 = orb.detectAndCompute(img_1, None)\n kp2, des2 = orb.detectAndCompute(img_2, None)\n matches = bf.match(des1, des2)\n matches = sorted(matches, key=lambda x: x.distance)\n idx = matches[0:1500]\n pts1 = []\n pts2 = []\n for i in idx:\n pts1.append(kp1[i.queryIdx].pt)\n pts2.append(kp2[i.trainIdx].pt)\n pts1 = np.array(pts1)\n pts2 = np.array(pts2)\n E, mask = cv2.findEssentialMat(pts1, pts2, focal=focal, pp=pp, method=\n cv2.RANSAC, prob=0.999, threshold=1.0)\n _, R_f, t_f, _ = cv2.recoverPose(E, pts1, pts2, focal=focal, pp=pp)\n R_f_seg = R_f\n t_f_seg = t_f\n t_gt = np.zeros((3, 1), dtype=np.float64)\n prevImage = img_2\n kp_prev = kp2\n des_prev = des2\n traj = np.zeros((1000, 2000), dtype=np.uint8)\n traj = cv2.cvtColor(traj, cv2.COLOR_GRAY2BGR)\n rmse_total = 0\n for numFrame in range(2, MAX_FRAME):\n filename = (\n '/media/cordin/새 볼륨/rosbag/dataset/sequences/{0:02d}/image_0/{1:06d}.png'\n .format(SEQ_NUM, numFrame))\n currImage_c = cv2.imread(filename)\n currImage = cv2.cvtColor(currImage_c, cv2.COLOR_BGR2GRAY)\n kp_curr, des_curr = orb.detectAndCompute(currImage, None)\n matches = bf.match(des_prev, des_curr)\n matches = sorted(matches, key=lambda x: x.distance)\n idx = matches[0:1500]\n pts1 = []\n pts2 = []\n for i in idx:\n pts1.append(kp_prev[i.queryIdx].pt)\n pts2.append(kp_curr[i.trainIdx].pt)\n pts1 = np.array(pts1)\n pts2 = np.array(pts2)\n E_mat, mask_n = cv2.findEssentialMat(pts2, pts1, focal=focal, pp=pp,\n method=cv2.RANSAC, prob=0.999, threshold=1.0)\n _, R, t, _ = cv2.recoverPose(E_mat, pts2, pts1, focal=focal, pp=pp)\n abs_scale, t_gt = getScale(numFrame, t_gt, SEQ_NUM)\n t_f = t_f + abs_scale * R_f.dot(t)\n R_f = R.dot(R_f)\n error = map(operator.sub, t_gt, t_f)\n error_sum_square = sum(map(lambda x: x * x, error))\n rmse = math.sqrt(error_sum_square / 3)\n rmse_total = rmse_total + rmse\n print('rmse = ', rmse_total / numFrame)\n prevImage = currImage\n kp_prev = kp_curr\n des_prev = des_curr\n x_gt = int(t_gt[0]) + 1000\n y_gt = int(t_gt[2]) + 100\n x = int(t_f[0]) + 1000\n y = int(t_f[2]) + 100\n cv2.circle(traj, (x, y), 1, (0, 0, 255), 2)\n cv2.circle(traj, (x_gt, y_gt), 1, (0, 255, 0), 2)\n cv2.rectangle(traj, (10, 10), (700, 150), (0, 0, 0), -1)\n text1 = ('orb Coordinates: x = {0:02f}m y = {1:02f}m z = {2:02f}m'.\n format(float(t_f[0]), float(t_f[1]), float(t_f[2])))\n cv2.putText(traj, text1, textOrg1, cv2.FONT_HERSHEY_PLAIN, 1, (255,\n 255, 255), 1, 8)\n text3 = ('gt Coordinates: x = {0:02f}m y = {1:02f}m z = {2:02f}m'.\n format(float(t_gt[0]), float(t_gt[1]), float(t_gt[2])))\n cv2.putText(traj, text3, textOrg3, cv2.FONT_HERSHEY_PLAIN, 1, (255,\n 255, 255), 1, 8)\n feature_img = cv2.drawKeypoints(currImage_c, kp_curr, None)\n cv2.imshow('trajectory', traj)\n cv2.imshow('feat_img', feature_img)\n cv2.waitKey(1)\n cv2.imwrite('result_{0:02d}.png'.format(SEQ_NUM), traj)\n",
"step-3": "<mask token>\norb = cv2.cv2.ORB_create(nfeatures=5000, scaleFactor=1.2, nlevels=8,\n edgeThreshold=31, firstLevel=0, WTA_K=2, scoreType=cv2.ORB_FAST_SCORE,\n patchSize=31, fastThreshold=25)\nbf = cv2.BFMatcher(cv2.NORM_HAMMING)\n\n\ndef getScale(NumFrame, t_gt, seq_num):\n txt_file = open('/media/cordin/새 볼륨/rosbag/dataset/poses/{0:02d}.txt'.\n format(seq_num))\n x_prev = float(t_gt[0])\n y_prev = float(t_gt[1])\n z_prev = float(t_gt[2])\n line = txt_file.readlines()\n line_sp = line[NumFrame].split(' ')\n x = float(line_sp[3])\n y = float(line_sp[7])\n z = float(line_sp[11])\n t_gt[0] = x\n t_gt[1] = y\n t_gt[2] = z\n txt_file.close()\n scale = math.sqrt((x - x_prev) ** 2 + (y - y_prev) ** 2 + (z - z_prev) ** 2\n )\n return scale, t_gt\n\n\nif __name__ == '__main__':\n MAX_FRAME = 1000\n SEQ_NUM = 2\n focal = 718.856\n pp = 607.1928, 185.2157\n textOrg1 = 10, 30\n textOrg2 = 10, 80\n textOrg3 = 10, 130\n img_1_c = cv2.imread(\n '/media/cordin/새 볼륨/rosbag/dataset/sequences/{0:02d}/image_0/000000.png'\n .format(SEQ_NUM))\n img_2_c = cv2.imread(\n '/media/cordin/새 볼륨/rosbag/dataset/sequences/{0:02d}/image_0/000001.png'\n .format(SEQ_NUM))\n img_1 = cv2.cvtColor(img_1_c, cv2.COLOR_BGR2GRAY)\n img_2 = cv2.cvtColor(img_2_c, cv2.COLOR_BGR2GRAY)\n kp1, des1 = orb.detectAndCompute(img_1, None)\n kp2, des2 = orb.detectAndCompute(img_2, None)\n matches = bf.match(des1, des2)\n matches = sorted(matches, key=lambda x: x.distance)\n idx = matches[0:1500]\n pts1 = []\n pts2 = []\n for i in idx:\n pts1.append(kp1[i.queryIdx].pt)\n pts2.append(kp2[i.trainIdx].pt)\n pts1 = np.array(pts1)\n pts2 = np.array(pts2)\n E, mask = cv2.findEssentialMat(pts1, pts2, focal=focal, pp=pp, method=\n cv2.RANSAC, prob=0.999, threshold=1.0)\n _, R_f, t_f, _ = cv2.recoverPose(E, pts1, pts2, focal=focal, pp=pp)\n R_f_seg = R_f\n t_f_seg = t_f\n t_gt = np.zeros((3, 1), dtype=np.float64)\n prevImage = img_2\n kp_prev = kp2\n des_prev = des2\n traj = np.zeros((1000, 2000), dtype=np.uint8)\n traj = cv2.cvtColor(traj, cv2.COLOR_GRAY2BGR)\n rmse_total = 0\n for numFrame in range(2, MAX_FRAME):\n filename = (\n '/media/cordin/새 볼륨/rosbag/dataset/sequences/{0:02d}/image_0/{1:06d}.png'\n .format(SEQ_NUM, numFrame))\n currImage_c = cv2.imread(filename)\n currImage = cv2.cvtColor(currImage_c, cv2.COLOR_BGR2GRAY)\n kp_curr, des_curr = orb.detectAndCompute(currImage, None)\n matches = bf.match(des_prev, des_curr)\n matches = sorted(matches, key=lambda x: x.distance)\n idx = matches[0:1500]\n pts1 = []\n pts2 = []\n for i in idx:\n pts1.append(kp_prev[i.queryIdx].pt)\n pts2.append(kp_curr[i.trainIdx].pt)\n pts1 = np.array(pts1)\n pts2 = np.array(pts2)\n E_mat, mask_n = cv2.findEssentialMat(pts2, pts1, focal=focal, pp=pp,\n method=cv2.RANSAC, prob=0.999, threshold=1.0)\n _, R, t, _ = cv2.recoverPose(E_mat, pts2, pts1, focal=focal, pp=pp)\n abs_scale, t_gt = getScale(numFrame, t_gt, SEQ_NUM)\n t_f = t_f + abs_scale * R_f.dot(t)\n R_f = R.dot(R_f)\n error = map(operator.sub, t_gt, t_f)\n error_sum_square = sum(map(lambda x: x * x, error))\n rmse = math.sqrt(error_sum_square / 3)\n rmse_total = rmse_total + rmse\n print('rmse = ', rmse_total / numFrame)\n prevImage = currImage\n kp_prev = kp_curr\n des_prev = des_curr\n x_gt = int(t_gt[0]) + 1000\n y_gt = int(t_gt[2]) + 100\n x = int(t_f[0]) + 1000\n y = int(t_f[2]) + 100\n cv2.circle(traj, (x, y), 1, (0, 0, 255), 2)\n cv2.circle(traj, (x_gt, y_gt), 1, (0, 255, 0), 2)\n cv2.rectangle(traj, (10, 10), (700, 150), (0, 0, 0), -1)\n text1 = ('orb Coordinates: x = {0:02f}m y = {1:02f}m z = {2:02f}m'.\n format(float(t_f[0]), float(t_f[1]), float(t_f[2])))\n cv2.putText(traj, text1, textOrg1, cv2.FONT_HERSHEY_PLAIN, 1, (255,\n 255, 255), 1, 8)\n text3 = ('gt Coordinates: x = {0:02f}m y = {1:02f}m z = {2:02f}m'.\n format(float(t_gt[0]), float(t_gt[1]), float(t_gt[2])))\n cv2.putText(traj, text3, textOrg3, cv2.FONT_HERSHEY_PLAIN, 1, (255,\n 255, 255), 1, 8)\n feature_img = cv2.drawKeypoints(currImage_c, kp_curr, None)\n cv2.imshow('trajectory', traj)\n cv2.imshow('feat_img', feature_img)\n cv2.waitKey(1)\n cv2.imwrite('result_{0:02d}.png'.format(SEQ_NUM), traj)\n",
"step-4": "from os import wait\nimport cv2\nimport numpy as np\nimport math\nimport sys\nimport types\nimport operator\norb = cv2.cv2.ORB_create(nfeatures=5000, scaleFactor=1.2, nlevels=8,\n edgeThreshold=31, firstLevel=0, WTA_K=2, scoreType=cv2.ORB_FAST_SCORE,\n patchSize=31, fastThreshold=25)\nbf = cv2.BFMatcher(cv2.NORM_HAMMING)\n\n\ndef getScale(NumFrame, t_gt, seq_num):\n txt_file = open('/media/cordin/새 볼륨/rosbag/dataset/poses/{0:02d}.txt'.\n format(seq_num))\n x_prev = float(t_gt[0])\n y_prev = float(t_gt[1])\n z_prev = float(t_gt[2])\n line = txt_file.readlines()\n line_sp = line[NumFrame].split(' ')\n x = float(line_sp[3])\n y = float(line_sp[7])\n z = float(line_sp[11])\n t_gt[0] = x\n t_gt[1] = y\n t_gt[2] = z\n txt_file.close()\n scale = math.sqrt((x - x_prev) ** 2 + (y - y_prev) ** 2 + (z - z_prev) ** 2\n )\n return scale, t_gt\n\n\nif __name__ == '__main__':\n MAX_FRAME = 1000\n SEQ_NUM = 2\n focal = 718.856\n pp = 607.1928, 185.2157\n textOrg1 = 10, 30\n textOrg2 = 10, 80\n textOrg3 = 10, 130\n img_1_c = cv2.imread(\n '/media/cordin/새 볼륨/rosbag/dataset/sequences/{0:02d}/image_0/000000.png'\n .format(SEQ_NUM))\n img_2_c = cv2.imread(\n '/media/cordin/새 볼륨/rosbag/dataset/sequences/{0:02d}/image_0/000001.png'\n .format(SEQ_NUM))\n img_1 = cv2.cvtColor(img_1_c, cv2.COLOR_BGR2GRAY)\n img_2 = cv2.cvtColor(img_2_c, cv2.COLOR_BGR2GRAY)\n kp1, des1 = orb.detectAndCompute(img_1, None)\n kp2, des2 = orb.detectAndCompute(img_2, None)\n matches = bf.match(des1, des2)\n matches = sorted(matches, key=lambda x: x.distance)\n idx = matches[0:1500]\n pts1 = []\n pts2 = []\n for i in idx:\n pts1.append(kp1[i.queryIdx].pt)\n pts2.append(kp2[i.trainIdx].pt)\n pts1 = np.array(pts1)\n pts2 = np.array(pts2)\n E, mask = cv2.findEssentialMat(pts1, pts2, focal=focal, pp=pp, method=\n cv2.RANSAC, prob=0.999, threshold=1.0)\n _, R_f, t_f, _ = cv2.recoverPose(E, pts1, pts2, focal=focal, pp=pp)\n R_f_seg = R_f\n t_f_seg = t_f\n t_gt = np.zeros((3, 1), dtype=np.float64)\n prevImage = img_2\n kp_prev = kp2\n des_prev = des2\n traj = np.zeros((1000, 2000), dtype=np.uint8)\n traj = cv2.cvtColor(traj, cv2.COLOR_GRAY2BGR)\n rmse_total = 0\n for numFrame in range(2, MAX_FRAME):\n filename = (\n '/media/cordin/새 볼륨/rosbag/dataset/sequences/{0:02d}/image_0/{1:06d}.png'\n .format(SEQ_NUM, numFrame))\n currImage_c = cv2.imread(filename)\n currImage = cv2.cvtColor(currImage_c, cv2.COLOR_BGR2GRAY)\n kp_curr, des_curr = orb.detectAndCompute(currImage, None)\n matches = bf.match(des_prev, des_curr)\n matches = sorted(matches, key=lambda x: x.distance)\n idx = matches[0:1500]\n pts1 = []\n pts2 = []\n for i in idx:\n pts1.append(kp_prev[i.queryIdx].pt)\n pts2.append(kp_curr[i.trainIdx].pt)\n pts1 = np.array(pts1)\n pts2 = np.array(pts2)\n E_mat, mask_n = cv2.findEssentialMat(pts2, pts1, focal=focal, pp=pp,\n method=cv2.RANSAC, prob=0.999, threshold=1.0)\n _, R, t, _ = cv2.recoverPose(E_mat, pts2, pts1, focal=focal, pp=pp)\n abs_scale, t_gt = getScale(numFrame, t_gt, SEQ_NUM)\n t_f = t_f + abs_scale * R_f.dot(t)\n R_f = R.dot(R_f)\n error = map(operator.sub, t_gt, t_f)\n error_sum_square = sum(map(lambda x: x * x, error))\n rmse = math.sqrt(error_sum_square / 3)\n rmse_total = rmse_total + rmse\n print('rmse = ', rmse_total / numFrame)\n prevImage = currImage\n kp_prev = kp_curr\n des_prev = des_curr\n x_gt = int(t_gt[0]) + 1000\n y_gt = int(t_gt[2]) + 100\n x = int(t_f[0]) + 1000\n y = int(t_f[2]) + 100\n cv2.circle(traj, (x, y), 1, (0, 0, 255), 2)\n cv2.circle(traj, (x_gt, y_gt), 1, (0, 255, 0), 2)\n cv2.rectangle(traj, (10, 10), (700, 150), (0, 0, 0), -1)\n text1 = ('orb Coordinates: x = {0:02f}m y = {1:02f}m z = {2:02f}m'.\n format(float(t_f[0]), float(t_f[1]), float(t_f[2])))\n cv2.putText(traj, text1, textOrg1, cv2.FONT_HERSHEY_PLAIN, 1, (255,\n 255, 255), 1, 8)\n text3 = ('gt Coordinates: x = {0:02f}m y = {1:02f}m z = {2:02f}m'.\n format(float(t_gt[0]), float(t_gt[1]), float(t_gt[2])))\n cv2.putText(traj, text3, textOrg3, cv2.FONT_HERSHEY_PLAIN, 1, (255,\n 255, 255), 1, 8)\n feature_img = cv2.drawKeypoints(currImage_c, kp_curr, None)\n cv2.imshow('trajectory', traj)\n cv2.imshow('feat_img', feature_img)\n cv2.waitKey(1)\n cv2.imwrite('result_{0:02d}.png'.format(SEQ_NUM), traj)\n",
"step-5": "from os import wait\nimport cv2\nimport numpy as np\nimport math\nimport sys\nimport types\nimport operator\n\n## orb 및 bf matcher 선언\norb = cv2.cv2.ORB_create(\n nfeatures=5000,\n scaleFactor=1.2,\n nlevels=8,\n edgeThreshold=31,\n firstLevel=0,\n WTA_K=2,\n scoreType=cv2.ORB_FAST_SCORE,\n patchSize=31,\n fastThreshold=25,\n )\n\nbf = cv2.BFMatcher(cv2.NORM_HAMMING)\n\n\ndef getScale(NumFrame, t_gt, seq_num):\n\n txt_file = open('/media/cordin/새 볼륨/rosbag/dataset/poses/{0:02d}.txt'.format(seq_num))\n \n x_prev = float(t_gt[0])\n y_prev = float(t_gt[1])\n z_prev = float(t_gt[2])\n\n line = txt_file.readlines()\n line_sp = line[NumFrame].split(' ')\n\n x = float(line_sp[3])\n y = float(line_sp[7])\n z = float(line_sp[11])\n\n t_gt[0] = x\n t_gt[1] = y\n t_gt[2] = z\n\n txt_file.close()\n\n scale = math.sqrt((x-x_prev)**2 + (y-y_prev)**2 + (z-z_prev)**2)\n return scale, t_gt\n\n\nif __name__ == \"__main__\":\n MAX_FRAME = 1000\n SEQ_NUM = 2\n\n #Camera intrinsic parameter\n focal = 718.8560\n pp = (607.1928, 185.2157)\n\n textOrg1 = (10,30)\n textOrg2 = (10,80)\n textOrg3 = (10,130)\n\n img_1_c = cv2.imread(\"/media/cordin/새 볼륨/rosbag/dataset/sequences/{0:02d}/image_0/000000.png\".format(SEQ_NUM))\n img_2_c = cv2.imread(\"/media/cordin/새 볼륨/rosbag/dataset/sequences/{0:02d}/image_0/000001.png\".format(SEQ_NUM))\n img_1 = cv2.cvtColor(img_1_c,cv2.COLOR_BGR2GRAY)\n img_2 = cv2.cvtColor(img_2_c,cv2.COLOR_BGR2GRAY)\n\n kp1, des1 = orb.detectAndCompute(img_1,None)\n kp2, des2 = orb.detectAndCompute(img_2,None)\n\n matches = bf.match(des1,des2)\n matches = sorted(matches, key = lambda x:x.distance)\n\n idx = matches[0:1500]\n\n pts1 = []\n pts2 = []\n\n for i in idx:\n pts1.append(kp1[i.queryIdx].pt)\n pts2.append(kp2[i.trainIdx].pt)\n\n\n pts1 = np.array(pts1)\n pts2 = np.array(pts2)\n\n E, mask = cv2.findEssentialMat(pts1,pts2,focal = focal, pp = pp, method=cv2.RANSAC, prob = 0.999, threshold=1.0)\n _, R_f, t_f, _ = cv2.recoverPose(E, pts1, pts2, focal = focal, pp = pp)\n\n R_f_seg = R_f\n t_f_seg = t_f\n\n t_gt = np.zeros((3,1),dtype=np.float64)\n\n prevImage = img_2\n kp_prev = kp2\n des_prev = des2\n\n traj = np.zeros((1000,2000),dtype=np.uint8)\n traj = cv2.cvtColor(traj,cv2.COLOR_GRAY2BGR)\n\n rmse_total = 0\n \n for numFrame in range(2, MAX_FRAME):\n filename = '/media/cordin/새 볼륨/rosbag/dataset/sequences/{0:02d}/image_0/{1:06d}.png'.format(SEQ_NUM,numFrame)\n \n currImage_c = cv2.imread(filename)\n currImage = cv2.cvtColor(currImage_c,cv2.COLOR_BGR2GRAY)\n\n # feature extraction\n kp_curr, des_curr = orb.detectAndCompute(currImage,None)\n\n # feature matching\n matches = bf.match(des_prev,des_curr)\n matches = sorted(matches, key = lambda x:x.distance)\n idx = matches[0:1500]\n\n pts1 = []\n pts2 = []\n\n for i in idx:\n pts1.append(kp_prev[i.queryIdx].pt)\n pts2.append(kp_curr[i.trainIdx].pt)\n\n pts1 = np.array(pts1)\n pts2 = np.array(pts2)\n\n # caculate R, t\n E_mat, mask_n = cv2.findEssentialMat(pts2, pts1, focal = focal, pp = pp, method=cv2.RANSAC, prob = 0.999, threshold=1.0)\n _, R, t, _ = cv2.recoverPose(E_mat, pts2, pts1, focal = focal, pp = pp)\n\n # get scale\n abs_scale, t_gt = getScale(numFrame, t_gt, SEQ_NUM)\n \n # update trajectory\n t_f = t_f + abs_scale*R_f.dot(t)\n R_f = R.dot(R_f)\n\n # caculate Error\n error = map(operator.sub,t_gt,t_f)\n error_sum_square = sum(map(lambda x:x*x,error))\n rmse = math.sqrt(error_sum_square/3)\n rmse_total = rmse_total + rmse\n\n print(\"rmse = \",rmse_total/numFrame)\n\n prevImage = currImage\n kp_prev = kp_curr\n des_prev = des_curr\n\n # visualization\n x_gt = int(t_gt[0]) + 1000\n y_gt = int(t_gt[2]) + 100\n\n x = int(t_f[0]) + 1000\n y = int(t_f[2]) + 100\n\n cv2.circle(traj, (x,y), 1 , (0,0,255), 2)\n cv2.circle(traj, (x_gt,y_gt), 1 , (0,255,0), 2)\n \n\n cv2.rectangle(traj, (10,10), (700,150), (0,0,0), -1)\n text1 = 'orb Coordinates: x = {0:02f}m y = {1:02f}m z = {2:02f}m'.format(float(t_f[0]),float(t_f[1]),float(t_f[2]))\n cv2.putText(traj, text1, textOrg1, cv2.FONT_HERSHEY_PLAIN,1,(255,255,255),1,8)\n\n text3 = 'gt Coordinates: x = {0:02f}m y = {1:02f}m z = {2:02f}m'.format(float(t_gt[0]),float(t_gt[1]),float(t_gt[2]))\n cv2.putText(traj, text3, textOrg3, cv2.FONT_HERSHEY_PLAIN,1,(255,255,255),1,8)\n\n feature_img = cv2.drawKeypoints(currImage_c, kp_curr, None)\n\n cv2.imshow(\"trajectory\", traj)\n cv2.imshow(\"feat_img\", feature_img)\n\n cv2.waitKey(1)\n \n cv2.imwrite(\"result_{0:02d}.png\".format(SEQ_NUM),traj)",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
#!/usr/bin/env python
import socket
import datetime as dt
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from matplotlib.animation import FuncAnimation
from matplotlib import style
import pickle
# Create figure for plotting
time_list = []
gain_list = []
HOST = '127.0.0.1' # Standard loopback interface address (localhost)
PORT = 65432 # Port to listen on (non-privileged ports are > 1023)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((HOST, PORT))
s.listen(5)
conn, addr = s.accept()
fig, ax1 = plt.subplots()
ax1.set_ylim(-.1, 1.1)
ax1.set_xlim(0, 2)
def recieve_data():
while True:
data = conn.recv(1024)
if not data:
break
conn.sendall(data)
msg = pickle.loads(data)
time = float(msg[0])
gain = float(msg[1])
yield time , gain
conn.close()
def animate(i):
xs = []
ys = []
for line in recieve_data():
if len(xs) < 50:
x, y = line
#print(x,y)
xs.append(float(x))
ys.append(float(y))
else:break
print(xs,ys)
ax1.clear()
ax1.plot(xs, ys)
ani = animation.FuncAnimation(fig, animate, interval=10)
plt.show()
|
normal
|
{
"blob_id": "a4d5064decdc9963dae1712c7c6918b3e5902bf2",
"index": 9825,
"step-1": "<mask token>\n\n\ndef recieve_data():\n while True:\n data = conn.recv(1024)\n if not data:\n break\n conn.sendall(data)\n msg = pickle.loads(data)\n time = float(msg[0])\n gain = float(msg[1])\n yield time, gain\n conn.close()\n\n\ndef animate(i):\n xs = []\n ys = []\n for line in recieve_data():\n if len(xs) < 50:\n x, y = line\n xs.append(float(x))\n ys.append(float(y))\n else:\n break\n print(xs, ys)\n ax1.clear()\n ax1.plot(xs, ys)\n\n\n<mask token>\n",
"step-2": "<mask token>\ns.bind((HOST, PORT))\ns.listen(5)\n<mask token>\nax1.set_ylim(-0.1, 1.1)\nax1.set_xlim(0, 2)\n\n\ndef recieve_data():\n while True:\n data = conn.recv(1024)\n if not data:\n break\n conn.sendall(data)\n msg = pickle.loads(data)\n time = float(msg[0])\n gain = float(msg[1])\n yield time, gain\n conn.close()\n\n\ndef animate(i):\n xs = []\n ys = []\n for line in recieve_data():\n if len(xs) < 50:\n x, y = line\n xs.append(float(x))\n ys.append(float(y))\n else:\n break\n print(xs, ys)\n ax1.clear()\n ax1.plot(xs, ys)\n\n\n<mask token>\nplt.show()\n",
"step-3": "<mask token>\ntime_list = []\ngain_list = []\nHOST = '127.0.0.1'\nPORT = 65432\ns = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\ns.bind((HOST, PORT))\ns.listen(5)\nconn, addr = s.accept()\nfig, ax1 = plt.subplots()\nax1.set_ylim(-0.1, 1.1)\nax1.set_xlim(0, 2)\n\n\ndef recieve_data():\n while True:\n data = conn.recv(1024)\n if not data:\n break\n conn.sendall(data)\n msg = pickle.loads(data)\n time = float(msg[0])\n gain = float(msg[1])\n yield time, gain\n conn.close()\n\n\ndef animate(i):\n xs = []\n ys = []\n for line in recieve_data():\n if len(xs) < 50:\n x, y = line\n xs.append(float(x))\n ys.append(float(y))\n else:\n break\n print(xs, ys)\n ax1.clear()\n ax1.plot(xs, ys)\n\n\nani = animation.FuncAnimation(fig, animate, interval=10)\nplt.show()\n",
"step-4": "import socket\nimport datetime as dt\nimport matplotlib.pyplot as plt\nimport matplotlib.animation as animation\nfrom matplotlib.animation import FuncAnimation\nfrom matplotlib import style\nimport pickle\ntime_list = []\ngain_list = []\nHOST = '127.0.0.1'\nPORT = 65432\ns = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\ns.bind((HOST, PORT))\ns.listen(5)\nconn, addr = s.accept()\nfig, ax1 = plt.subplots()\nax1.set_ylim(-0.1, 1.1)\nax1.set_xlim(0, 2)\n\n\ndef recieve_data():\n while True:\n data = conn.recv(1024)\n if not data:\n break\n conn.sendall(data)\n msg = pickle.loads(data)\n time = float(msg[0])\n gain = float(msg[1])\n yield time, gain\n conn.close()\n\n\ndef animate(i):\n xs = []\n ys = []\n for line in recieve_data():\n if len(xs) < 50:\n x, y = line\n xs.append(float(x))\n ys.append(float(y))\n else:\n break\n print(xs, ys)\n ax1.clear()\n ax1.plot(xs, ys)\n\n\nani = animation.FuncAnimation(fig, animate, interval=10)\nplt.show()\n",
"step-5": "#!/usr/bin/env python\n\nimport socket\nimport datetime as dt\nimport matplotlib.pyplot as plt\nimport matplotlib.animation as animation\nfrom matplotlib.animation import FuncAnimation\nfrom matplotlib import style\nimport pickle\n# Create figure for plotting\n\ntime_list = []\ngain_list = []\n\nHOST = '127.0.0.1' # Standard loopback interface address (localhost)\nPORT = 65432 # Port to listen on (non-privileged ports are > 1023)\n\n\ns = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\ns.bind((HOST, PORT))\ns.listen(5)\nconn, addr = s.accept()\n\n\nfig, ax1 = plt.subplots()\nax1.set_ylim(-.1, 1.1)\nax1.set_xlim(0, 2)\n\ndef recieve_data():\n\twhile True:\n\t\t data = conn.recv(1024)\n\t\t if not data:\n\t\t\t break\n\t\t conn.sendall(data)\n\t\t msg = pickle.loads(data)\n\t\t time = float(msg[0])\n\t\t gain = float(msg[1])\n\t\t yield time , gain\n\tconn.close()\n\n\n\ndef animate(i):\n xs = []\n ys = []\n for line in recieve_data():\n if len(xs) < 50:\n x, y = line\n #print(x,y)\n xs.append(float(x))\n ys.append(float(y))\n else:break\n print(xs,ys)\n ax1.clear()\n ax1.plot(xs, ys)\n\nani = animation.FuncAnimation(fig, animate, interval=10)\nplt.show()\n\n\n\n\n\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
def test_version():
assert __version__ == '0.1.0'
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def test_version():
assert __version__ == '0.1.0'
@pytest.mark.vcr()
def test_asteroid_closest_approach():
asteroid_json = asteroid_closest_approach(25)
asteroids = json.loads(asteroid_json)
for i in range(0, len(asteroids) - 1):
assert asteroids[i]['close_approach_date']
assert asteroids[i]['close_approach_date_full']
assert asteroids[i]['epoch_date_close_approach']
assert asteroids[i]['miss_distance']
assert asteroids[i]['orbiting_body']
assert asteroids[i]
assert type(asteroids[i]) is dict
<|reserved_special_token_1|>
<|reserved_special_token_0|>
load_dotenv()
<|reserved_special_token_0|>
def test_version():
assert __version__ == '0.1.0'
@pytest.mark.vcr()
def test_asteroid_closest_approach():
asteroid_json = asteroid_closest_approach(25)
asteroids = json.loads(asteroid_json)
for i in range(0, len(asteroids) - 1):
assert asteroids[i]['close_approach_date']
assert asteroids[i]['close_approach_date_full']
assert asteroids[i]['epoch_date_close_approach']
assert asteroids[i]['miss_distance']
assert asteroids[i]['orbiting_body']
assert asteroids[i]
assert type(asteroids[i]) is dict
<|reserved_special_token_1|>
from asteroidhunter import __version__
import unittest, requests, json, os, pytest
from dotenv import load_dotenv
load_dotenv()
from asteroidhunter.asteroid_closest_approach import asteroid_closest_approach
def test_version():
assert __version__ == '0.1.0'
@pytest.mark.vcr()
def test_asteroid_closest_approach():
asteroid_json = asteroid_closest_approach(25)
asteroids = json.loads(asteroid_json)
for i in range(0, len(asteroids) - 1):
assert asteroids[i]['close_approach_date']
assert asteroids[i]['close_approach_date_full']
assert asteroids[i]['epoch_date_close_approach']
assert asteroids[i]['miss_distance']
assert asteroids[i]['orbiting_body']
assert asteroids[i]
assert type(asteroids[i]) is dict
|
flexible
|
{
"blob_id": "7dd4dc60b23c72ba450025bececb0e6d89df69c3",
"index": 8263,
"step-1": "<mask token>\n\n\ndef test_version():\n assert __version__ == '0.1.0'\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef test_version():\n assert __version__ == '0.1.0'\n\n\n@pytest.mark.vcr()\ndef test_asteroid_closest_approach():\n asteroid_json = asteroid_closest_approach(25)\n asteroids = json.loads(asteroid_json)\n for i in range(0, len(asteroids) - 1):\n assert asteroids[i]['close_approach_date']\n assert asteroids[i]['close_approach_date_full']\n assert asteroids[i]['epoch_date_close_approach']\n assert asteroids[i]['miss_distance']\n assert asteroids[i]['orbiting_body']\n assert asteroids[i]\n assert type(asteroids[i]) is dict\n",
"step-3": "<mask token>\nload_dotenv()\n<mask token>\n\n\ndef test_version():\n assert __version__ == '0.1.0'\n\n\n@pytest.mark.vcr()\ndef test_asteroid_closest_approach():\n asteroid_json = asteroid_closest_approach(25)\n asteroids = json.loads(asteroid_json)\n for i in range(0, len(asteroids) - 1):\n assert asteroids[i]['close_approach_date']\n assert asteroids[i]['close_approach_date_full']\n assert asteroids[i]['epoch_date_close_approach']\n assert asteroids[i]['miss_distance']\n assert asteroids[i]['orbiting_body']\n assert asteroids[i]\n assert type(asteroids[i]) is dict\n",
"step-4": "from asteroidhunter import __version__\nimport unittest, requests, json, os, pytest\nfrom dotenv import load_dotenv\nload_dotenv()\nfrom asteroidhunter.asteroid_closest_approach import asteroid_closest_approach\n\n\ndef test_version():\n assert __version__ == '0.1.0'\n\n\n@pytest.mark.vcr()\ndef test_asteroid_closest_approach():\n asteroid_json = asteroid_closest_approach(25)\n asteroids = json.loads(asteroid_json)\n for i in range(0, len(asteroids) - 1):\n assert asteroids[i]['close_approach_date']\n assert asteroids[i]['close_approach_date_full']\n assert asteroids[i]['epoch_date_close_approach']\n assert asteroids[i]['miss_distance']\n assert asteroids[i]['orbiting_body']\n assert asteroids[i]\n assert type(asteroids[i]) is dict\n",
"step-5": null,
"step-ids": [
1,
2,
3,
4
]
}
|
[
1,
2,
3,
4
] |
##########################################################################
#
# Copyright (c) 2007-2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import os
import unittest
import IECore
import IECoreScene
class TestMotionPrimitive( unittest.TestCase ) :
def test( self ) :
m = IECoreScene.MotionPrimitive()
self.assertTrue( m.isInstanceOf( "MotionPrimitive" ) )
self.assertTrue( m.isInstanceOf( "VisibleRenderable" ) )
self.assertEqual( m.keys(), [] )
self.assertEqual( m.values(), [] )
self.assertEqual( len( m ), 0 )
self.assertRaises( Exception, m.__setitem__, "notAFloat", IECoreScene.PointsPrimitive( 1 ) )
m[0] = IECoreScene.PointsPrimitive( 1 )
self.assertEqual( len( m ), 1 )
self.assertEqual( m.keys(), [ 0 ] )
self.assertEqual( m.values(), [ IECoreScene.PointsPrimitive( 1 ) ] )
m[1] = IECoreScene.PointsPrimitive( 1 )
self.assertEqual( len( m ), 2 )
self.assertEqual( m.keys(), [ 0, 1 ] )
self.assertEqual( m.values(), [ IECoreScene.PointsPrimitive( 1 ), IECoreScene.PointsPrimitive( 1 ) ] )
iface = IECore.IndexedIO.create( os.path.join( "test", "motionPrimitive.fio" ), IECore.IndexedIO.OpenMode.Write )
m.save( iface, "test" )
mm = IECore.Object.load( iface, "test" )
self.assertEqual( m, mm )
mmm = m.copy()
self.assertEqual( m, mmm )
del m[0]
self.assertEqual( len( m ), 1 )
self.assertEqual( m.keys(), [ 1 ] )
self.assertEqual( m.values(), [ IECoreScene.PointsPrimitive( 1 ) ] )
del m[1]
self.assertEqual( m.keys(), [] )
self.assertEqual( m.values(), [] )
self.assertEqual( len( m ), 0 )
def testItems( self ) :
m = IECoreScene.MotionPrimitive()
m[0] = IECoreScene.PointsPrimitive( 1 )
m[1] = IECoreScene.PointsPrimitive( 2 )
self.assertEqual( m.items(), [ ( 0, IECoreScene.PointsPrimitive( 1 ) ), ( 1, IECoreScene.PointsPrimitive( 2 ) ) ] )
def testHash( self ) :
m = IECoreScene.MotionPrimitive()
m2 = IECoreScene.MotionPrimitive()
self.assertEqual( m.hash(), m2.hash() )
m[0] = IECoreScene.SpherePrimitive()
self.assertNotEqual( m.hash(), m2.hash() )
m2[0] = IECoreScene.SpherePrimitive()
self.assertEqual( m.hash(), m2.hash() )
m[1] = IECoreScene.SpherePrimitive()
self.assertNotEqual( m.hash(), m2.hash() )
m2[2] = IECoreScene.SpherePrimitive()
self.assertNotEqual( m.hash(), m2.hash() )
def tearDown( self ) :
if os.path.isfile( os.path.join( "test", "motionPrimitive.fio" ) ):
os.remove( os.path.join( "test", "motionPrimitive.fio" ) )
if __name__ == "__main__":
unittest.main()
|
normal
|
{
"blob_id": "d4c297af395581c6d955eb31a842ab86e599d23c",
"index": 4576,
"step-1": "<mask token>\n\n\nclass TestMotionPrimitive(unittest.TestCase):\n <mask token>\n\n def testItems(self):\n m = IECoreScene.MotionPrimitive()\n m[0] = IECoreScene.PointsPrimitive(1)\n m[1] = IECoreScene.PointsPrimitive(2)\n self.assertEqual(m.items(), [(0, IECoreScene.PointsPrimitive(1)), (\n 1, IECoreScene.PointsPrimitive(2))])\n\n def testHash(self):\n m = IECoreScene.MotionPrimitive()\n m2 = IECoreScene.MotionPrimitive()\n self.assertEqual(m.hash(), m2.hash())\n m[0] = IECoreScene.SpherePrimitive()\n self.assertNotEqual(m.hash(), m2.hash())\n m2[0] = IECoreScene.SpherePrimitive()\n self.assertEqual(m.hash(), m2.hash())\n m[1] = IECoreScene.SpherePrimitive()\n self.assertNotEqual(m.hash(), m2.hash())\n m2[2] = IECoreScene.SpherePrimitive()\n self.assertNotEqual(m.hash(), m2.hash())\n\n def tearDown(self):\n if os.path.isfile(os.path.join('test', 'motionPrimitive.fio')):\n os.remove(os.path.join('test', 'motionPrimitive.fio'))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass TestMotionPrimitive(unittest.TestCase):\n\n def test(self):\n m = IECoreScene.MotionPrimitive()\n self.assertTrue(m.isInstanceOf('MotionPrimitive'))\n self.assertTrue(m.isInstanceOf('VisibleRenderable'))\n self.assertEqual(m.keys(), [])\n self.assertEqual(m.values(), [])\n self.assertEqual(len(m), 0)\n self.assertRaises(Exception, m.__setitem__, 'notAFloat',\n IECoreScene.PointsPrimitive(1))\n m[0] = IECoreScene.PointsPrimitive(1)\n self.assertEqual(len(m), 1)\n self.assertEqual(m.keys(), [0])\n self.assertEqual(m.values(), [IECoreScene.PointsPrimitive(1)])\n m[1] = IECoreScene.PointsPrimitive(1)\n self.assertEqual(len(m), 2)\n self.assertEqual(m.keys(), [0, 1])\n self.assertEqual(m.values(), [IECoreScene.PointsPrimitive(1),\n IECoreScene.PointsPrimitive(1)])\n iface = IECore.IndexedIO.create(os.path.join('test',\n 'motionPrimitive.fio'), IECore.IndexedIO.OpenMode.Write)\n m.save(iface, 'test')\n mm = IECore.Object.load(iface, 'test')\n self.assertEqual(m, mm)\n mmm = m.copy()\n self.assertEqual(m, mmm)\n del m[0]\n self.assertEqual(len(m), 1)\n self.assertEqual(m.keys(), [1])\n self.assertEqual(m.values(), [IECoreScene.PointsPrimitive(1)])\n del m[1]\n self.assertEqual(m.keys(), [])\n self.assertEqual(m.values(), [])\n self.assertEqual(len(m), 0)\n\n def testItems(self):\n m = IECoreScene.MotionPrimitive()\n m[0] = IECoreScene.PointsPrimitive(1)\n m[1] = IECoreScene.PointsPrimitive(2)\n self.assertEqual(m.items(), [(0, IECoreScene.PointsPrimitive(1)), (\n 1, IECoreScene.PointsPrimitive(2))])\n\n def testHash(self):\n m = IECoreScene.MotionPrimitive()\n m2 = IECoreScene.MotionPrimitive()\n self.assertEqual(m.hash(), m2.hash())\n m[0] = IECoreScene.SpherePrimitive()\n self.assertNotEqual(m.hash(), m2.hash())\n m2[0] = IECoreScene.SpherePrimitive()\n self.assertEqual(m.hash(), m2.hash())\n m[1] = IECoreScene.SpherePrimitive()\n self.assertNotEqual(m.hash(), m2.hash())\n m2[2] = IECoreScene.SpherePrimitive()\n self.assertNotEqual(m.hash(), m2.hash())\n\n def tearDown(self):\n if os.path.isfile(os.path.join('test', 'motionPrimitive.fio')):\n os.remove(os.path.join('test', 'motionPrimitive.fio'))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass TestMotionPrimitive(unittest.TestCase):\n\n def test(self):\n m = IECoreScene.MotionPrimitive()\n self.assertTrue(m.isInstanceOf('MotionPrimitive'))\n self.assertTrue(m.isInstanceOf('VisibleRenderable'))\n self.assertEqual(m.keys(), [])\n self.assertEqual(m.values(), [])\n self.assertEqual(len(m), 0)\n self.assertRaises(Exception, m.__setitem__, 'notAFloat',\n IECoreScene.PointsPrimitive(1))\n m[0] = IECoreScene.PointsPrimitive(1)\n self.assertEqual(len(m), 1)\n self.assertEqual(m.keys(), [0])\n self.assertEqual(m.values(), [IECoreScene.PointsPrimitive(1)])\n m[1] = IECoreScene.PointsPrimitive(1)\n self.assertEqual(len(m), 2)\n self.assertEqual(m.keys(), [0, 1])\n self.assertEqual(m.values(), [IECoreScene.PointsPrimitive(1),\n IECoreScene.PointsPrimitive(1)])\n iface = IECore.IndexedIO.create(os.path.join('test',\n 'motionPrimitive.fio'), IECore.IndexedIO.OpenMode.Write)\n m.save(iface, 'test')\n mm = IECore.Object.load(iface, 'test')\n self.assertEqual(m, mm)\n mmm = m.copy()\n self.assertEqual(m, mmm)\n del m[0]\n self.assertEqual(len(m), 1)\n self.assertEqual(m.keys(), [1])\n self.assertEqual(m.values(), [IECoreScene.PointsPrimitive(1)])\n del m[1]\n self.assertEqual(m.keys(), [])\n self.assertEqual(m.values(), [])\n self.assertEqual(len(m), 0)\n\n def testItems(self):\n m = IECoreScene.MotionPrimitive()\n m[0] = IECoreScene.PointsPrimitive(1)\n m[1] = IECoreScene.PointsPrimitive(2)\n self.assertEqual(m.items(), [(0, IECoreScene.PointsPrimitive(1)), (\n 1, IECoreScene.PointsPrimitive(2))])\n\n def testHash(self):\n m = IECoreScene.MotionPrimitive()\n m2 = IECoreScene.MotionPrimitive()\n self.assertEqual(m.hash(), m2.hash())\n m[0] = IECoreScene.SpherePrimitive()\n self.assertNotEqual(m.hash(), m2.hash())\n m2[0] = IECoreScene.SpherePrimitive()\n self.assertEqual(m.hash(), m2.hash())\n m[1] = IECoreScene.SpherePrimitive()\n self.assertNotEqual(m.hash(), m2.hash())\n m2[2] = IECoreScene.SpherePrimitive()\n self.assertNotEqual(m.hash(), m2.hash())\n\n def tearDown(self):\n if os.path.isfile(os.path.join('test', 'motionPrimitive.fio')):\n os.remove(os.path.join('test', 'motionPrimitive.fio'))\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-4": "import os\nimport unittest\nimport IECore\nimport IECoreScene\n\n\nclass TestMotionPrimitive(unittest.TestCase):\n\n def test(self):\n m = IECoreScene.MotionPrimitive()\n self.assertTrue(m.isInstanceOf('MotionPrimitive'))\n self.assertTrue(m.isInstanceOf('VisibleRenderable'))\n self.assertEqual(m.keys(), [])\n self.assertEqual(m.values(), [])\n self.assertEqual(len(m), 0)\n self.assertRaises(Exception, m.__setitem__, 'notAFloat',\n IECoreScene.PointsPrimitive(1))\n m[0] = IECoreScene.PointsPrimitive(1)\n self.assertEqual(len(m), 1)\n self.assertEqual(m.keys(), [0])\n self.assertEqual(m.values(), [IECoreScene.PointsPrimitive(1)])\n m[1] = IECoreScene.PointsPrimitive(1)\n self.assertEqual(len(m), 2)\n self.assertEqual(m.keys(), [0, 1])\n self.assertEqual(m.values(), [IECoreScene.PointsPrimitive(1),\n IECoreScene.PointsPrimitive(1)])\n iface = IECore.IndexedIO.create(os.path.join('test',\n 'motionPrimitive.fio'), IECore.IndexedIO.OpenMode.Write)\n m.save(iface, 'test')\n mm = IECore.Object.load(iface, 'test')\n self.assertEqual(m, mm)\n mmm = m.copy()\n self.assertEqual(m, mmm)\n del m[0]\n self.assertEqual(len(m), 1)\n self.assertEqual(m.keys(), [1])\n self.assertEqual(m.values(), [IECoreScene.PointsPrimitive(1)])\n del m[1]\n self.assertEqual(m.keys(), [])\n self.assertEqual(m.values(), [])\n self.assertEqual(len(m), 0)\n\n def testItems(self):\n m = IECoreScene.MotionPrimitive()\n m[0] = IECoreScene.PointsPrimitive(1)\n m[1] = IECoreScene.PointsPrimitive(2)\n self.assertEqual(m.items(), [(0, IECoreScene.PointsPrimitive(1)), (\n 1, IECoreScene.PointsPrimitive(2))])\n\n def testHash(self):\n m = IECoreScene.MotionPrimitive()\n m2 = IECoreScene.MotionPrimitive()\n self.assertEqual(m.hash(), m2.hash())\n m[0] = IECoreScene.SpherePrimitive()\n self.assertNotEqual(m.hash(), m2.hash())\n m2[0] = IECoreScene.SpherePrimitive()\n self.assertEqual(m.hash(), m2.hash())\n m[1] = IECoreScene.SpherePrimitive()\n self.assertNotEqual(m.hash(), m2.hash())\n m2[2] = IECoreScene.SpherePrimitive()\n self.assertNotEqual(m.hash(), m2.hash())\n\n def tearDown(self):\n if os.path.isfile(os.path.join('test', 'motionPrimitive.fio')):\n os.remove(os.path.join('test', 'motionPrimitive.fio'))\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-5": "##########################################################################\n#\n# Copyright (c) 2007-2013, Image Engine Design Inc. All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n# * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#\n# * Redistributions in binary form must reproduce the above copyright\n# notice, this list of conditions and the following disclaimer in the\n# documentation and/or other materials provided with the distribution.\n#\n# * Neither the name of Image Engine Design nor the names of any\n# other contributors to this software may be used to endorse or\n# promote products derived from this software without specific prior\n# written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS\n# IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,\n# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR\n# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR\n# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,\n# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,\n# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR\n# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF\n# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\n# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS\n# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n#\n##########################################################################\n\nimport os\nimport unittest\n\nimport IECore\nimport IECoreScene\n\nclass TestMotionPrimitive( unittest.TestCase ) :\n\n\tdef test( self ) :\n\n\t\tm = IECoreScene.MotionPrimitive()\n\t\tself.assertTrue( m.isInstanceOf( \"MotionPrimitive\" ) )\n\t\tself.assertTrue( m.isInstanceOf( \"VisibleRenderable\" ) )\n\n\t\tself.assertEqual( m.keys(), [] )\n\t\tself.assertEqual( m.values(), [] )\n\t\tself.assertEqual( len( m ), 0 )\n\n\t\tself.assertRaises( Exception, m.__setitem__, \"notAFloat\", IECoreScene.PointsPrimitive( 1 ) )\n\n\t\tm[0] = IECoreScene.PointsPrimitive( 1 )\n\t\tself.assertEqual( len( m ), 1 )\n\t\tself.assertEqual( m.keys(), [ 0 ] )\n\t\tself.assertEqual( m.values(), [ IECoreScene.PointsPrimitive( 1 ) ] )\n\n\t\tm[1] = IECoreScene.PointsPrimitive( 1 )\n\t\tself.assertEqual( len( m ), 2 )\n\t\tself.assertEqual( m.keys(), [ 0, 1 ] )\n\t\tself.assertEqual( m.values(), [ IECoreScene.PointsPrimitive( 1 ), IECoreScene.PointsPrimitive( 1 ) ] )\n\n\t\tiface = IECore.IndexedIO.create( os.path.join( \"test\", \"motionPrimitive.fio\" ), IECore.IndexedIO.OpenMode.Write )\n\t\tm.save( iface, \"test\" )\n\n\t\tmm = IECore.Object.load( iface, \"test\" )\n\t\tself.assertEqual( m, mm )\n\n\t\tmmm = m.copy()\n\t\tself.assertEqual( m, mmm )\n\n\t\tdel m[0]\n\t\tself.assertEqual( len( m ), 1 )\n\t\tself.assertEqual( m.keys(), [ 1 ] )\n\t\tself.assertEqual( m.values(), [ IECoreScene.PointsPrimitive( 1 ) ] )\n\n\t\tdel m[1]\n\t\tself.assertEqual( m.keys(), [] )\n\t\tself.assertEqual( m.values(), [] )\n\t\tself.assertEqual( len( m ), 0 )\n\n\tdef testItems( self ) :\n\n\t\tm = IECoreScene.MotionPrimitive()\n\t\tm[0] = IECoreScene.PointsPrimitive( 1 )\n\t\tm[1] = IECoreScene.PointsPrimitive( 2 )\n\t\tself.assertEqual( m.items(), [ ( 0, IECoreScene.PointsPrimitive( 1 ) ), ( 1, IECoreScene.PointsPrimitive( 2 ) ) ] )\n\n\tdef testHash( self ) :\n\n\t\tm = IECoreScene.MotionPrimitive()\n\t\tm2 = IECoreScene.MotionPrimitive()\n\t\tself.assertEqual( m.hash(), m2.hash() )\n\n\t\tm[0] = IECoreScene.SpherePrimitive()\n\t\tself.assertNotEqual( m.hash(), m2.hash() )\n\n\t\tm2[0] = IECoreScene.SpherePrimitive()\n\t\tself.assertEqual( m.hash(), m2.hash() )\n\n\t\tm[1] = IECoreScene.SpherePrimitive()\n\t\tself.assertNotEqual( m.hash(), m2.hash() )\n\n\t\tm2[2] = IECoreScene.SpherePrimitive()\n\t\tself.assertNotEqual( m.hash(), m2.hash() )\n\n\tdef tearDown( self ) :\n\n\t\tif os.path.isfile( os.path.join( \"test\", \"motionPrimitive.fio\" ) ):\n\t\t\tos.remove( os.path.join( \"test\", \"motionPrimitive.fio\" ) )\n\nif __name__ == \"__main__\":\n unittest.main()\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
import pygame
from math import sqrt, sin, cos
from numpy import arctan
from os import path
# try these colors or create your own!
# each valid color is 3-tuple with values in range [0, 255]
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
WHITEGRAY = (192, 192, 192)
RED = (255, 0, 0)
MIDRED = (192, 0, 0)
DARKRED = (128, 0, 0)
MAROON = (80, 0, 0)
GREEN = (0, 255, 0)
DARKGREEN = (0, 128, 0)
GREYGREEN = (0, 128, 128)
MINT = (51,153,102)
JADE = (0, 250, 154)
BLUE = (0, 0, 255)
NAVY = (0, 102, 204)
DARKBLUE = (0, 0, 128)
MIDBLUE = (0, 0, 192)
PINK = (255, 0, 255)
YELLOW = (255, 255, 0)
MIDYELLOW = (192, 192, 0)
MODESTYELLOW = (128, 128, 0)
CYAN = (0, 255, 255)
ORANGE = (255, 102, 0)
MIDORANGE = (192, 79, 0)
PURPLE = (128, 0, 128)
MIDPURPLE = (192, 0, 192)
sunset = [MIDORANGE, MIDRED, DARKRED, DARKBLUE]
ocean = [GREEN, BLUE, CYAN, PINK]
carousel = [RED, YELLOW, GREEN, YELLOW] # trying running this with rotate
summer = [GREEN, YELLOW, GREEN, BLUE]
#--------------CONFIGURATIONS----------------
# various configurations change the way image is displayed
# feel free to play around and see how the image changes
aspect_ratio = 3840 / 2160 # set this to the aspect ratio of your screen
x = 1540 # width of the window
y = int(x / aspect_ratio) # height of your screen
size = [x, y]
# Try out preset colorschemes or try out new ones
colors = ocean
background = BLACK
squares = 800 # number of squares drawn in the window
shade = True # creates fading effect on the colors as spiral moves outward
gradient = 1.5 # recommend 1.05 for dark colors (128-192) and 1.4 for light colors (255)
rotate = False # rotates colors around the spiral
same_colors = False # use the same color for all sides of a each square in the spiral
curr_length = 4 # starting side length of the first square in the spiral
# determines how tightly spiral is wound - rate at which the side lengths grow linearly
# use carefully, may cause divide by zero error for certain increments
adder = 4
#--------------HELPER FUNCTIONS---------------
distance = lambda p1, p2: sqrt((p1[0] - p2[0])**2 + (p1[1] - p2[1])**2) # get distance b/t two points
bound = lambda x: 0 if x < 0 else min(255, x) # keep color values within [0,255]
def next_point(p1, p2):
diff_x = p1[0] - p2[0]
diff_y = p1[1] - p2[1]
#calculate next point using triangle geometry
angle = arctan(abs(diff_x) / abs(diff_y))
new_diff_x = int(sin(angle) * curr_length)
new_diff_y = int(cos(angle) * curr_length)
new_x = p1[0] + new_diff_x if diff_x < 0 else p1[0] - new_diff_x
new_y = p1[1] + new_diff_y if diff_y < 0 else p1[1] - new_diff_y
return [new_x, new_y]
#--------------INITIALIZATION-----------------
pygame.init()
screen = pygame.display.set_mode(size)
pygame.display.set_caption("Create a spiral drawing")
done = False
clock = pygame.time.Clock()
screen.fill(background)
#-----------------ARTWORK---------------------
p1 = [x//2 - curr_length//2, y//2 - curr_length//2]
p2 = [x//2 + curr_length//2, y//2 - curr_length//2]
p3 = [x//2 + curr_length//2, y//2 + curr_length//2]
p4 = [x//2 - curr_length//2, y//2 + curr_length//2]
points = [p1, p2, p3, p4]
#end of each line
curr_length += adder
p12 = [p1[0] + curr_length, p1[1]]
p22 = [p2[0], p2[1] + curr_length]
p32 = [p3[0] - curr_length, p3[1]]
p42 = [p4[0], p4[1] - curr_length]
new_points = [p12, p22, p32, p42]
for p1, p2 in zip(points, new_points):
pygame.draw.line(screen, colors[1], p1, p2)
# every iteration draws a new square and updates the points
for j in range(squares):
curr_length += adder
points = new_points
new_points = [0, 0, 0, 0]
if rotate and j % 40 == 0: # change colors every 10 squares
old = colors
colors = [old[3], old[0], old[1], old[2]] # shuffle colors to create rotating effect
# every iteration calculates a new point and draws line from points[i] to new point
for i in range(len(points)):
p1 = points[i]
p2 = points[(i+1)%4]
p1_new = next_point(p1,p2)
new_points[(i+1)%4] = p1_new
col = colors[0] if same_colors else colors[i]
fact = j * gradient if shade else 1 # with shade set, colors naturally fade to black
new_col = (bound(col[0] - fact), bound(col[1] - fact), bound(col[2] - fact))
pygame.draw.line(screen, new_col, p1, p1_new)
pygame.display.flip()
#----------------EVENT LOOP-------------------
while not done:
clock.tick(10)
for event in pygame.event.get():
if event.type == pygame.QUIT: # If user clicked close
done = True
pygame.display.iconify()
filename = input("Enter filename with no extension or 0 to quit: ")
if filename != "0":
pygame.image.save(screen, path.join("images", filename + ".jpeg"))
|
normal
|
{
"blob_id": "838279b4f8d9e656c2f90ff06eaff3bd9c12bbef",
"index": 3265,
"step-1": "<mask token>\n\n\ndef next_point(p1, p2):\n diff_x = p1[0] - p2[0]\n diff_y = p1[1] - p2[1]\n angle = arctan(abs(diff_x) / abs(diff_y))\n new_diff_x = int(sin(angle) * curr_length)\n new_diff_y = int(cos(angle) * curr_length)\n new_x = p1[0] + new_diff_x if diff_x < 0 else p1[0] - new_diff_x\n new_y = p1[1] + new_diff_y if diff_y < 0 else p1[1] - new_diff_y\n return [new_x, new_y]\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef next_point(p1, p2):\n diff_x = p1[0] - p2[0]\n diff_y = p1[1] - p2[1]\n angle = arctan(abs(diff_x) / abs(diff_y))\n new_diff_x = int(sin(angle) * curr_length)\n new_diff_y = int(cos(angle) * curr_length)\n new_x = p1[0] + new_diff_x if diff_x < 0 else p1[0] - new_diff_x\n new_y = p1[1] + new_diff_y if diff_y < 0 else p1[1] - new_diff_y\n return [new_x, new_y]\n\n\npygame.init()\n<mask token>\npygame.display.set_caption('Create a spiral drawing')\n<mask token>\nscreen.fill(background)\n<mask token>\ncurr_length += adder\n<mask token>\nfor p1, p2 in zip(points, new_points):\n pygame.draw.line(screen, colors[1], p1, p2)\nfor j in range(squares):\n curr_length += adder\n points = new_points\n new_points = [0, 0, 0, 0]\n if rotate and j % 40 == 0:\n old = colors\n colors = [old[3], old[0], old[1], old[2]]\n for i in range(len(points)):\n p1 = points[i]\n p2 = points[(i + 1) % 4]\n p1_new = next_point(p1, p2)\n new_points[(i + 1) % 4] = p1_new\n col = colors[0] if same_colors else colors[i]\n fact = j * gradient if shade else 1\n new_col = bound(col[0] - fact), bound(col[1] - fact), bound(col[2] -\n fact)\n pygame.draw.line(screen, new_col, p1, p1_new)\npygame.display.flip()\nwhile not done:\n clock.tick(10)\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n done = True\npygame.display.iconify()\n<mask token>\nif filename != '0':\n pygame.image.save(screen, path.join('images', filename + '.jpeg'))\n",
"step-3": "<mask token>\nBLACK = 0, 0, 0\nWHITE = 255, 255, 255\nWHITEGRAY = 192, 192, 192\nRED = 255, 0, 0\nMIDRED = 192, 0, 0\nDARKRED = 128, 0, 0\nMAROON = 80, 0, 0\nGREEN = 0, 255, 0\nDARKGREEN = 0, 128, 0\nGREYGREEN = 0, 128, 128\nMINT = 51, 153, 102\nJADE = 0, 250, 154\nBLUE = 0, 0, 255\nNAVY = 0, 102, 204\nDARKBLUE = 0, 0, 128\nMIDBLUE = 0, 0, 192\nPINK = 255, 0, 255\nYELLOW = 255, 255, 0\nMIDYELLOW = 192, 192, 0\nMODESTYELLOW = 128, 128, 0\nCYAN = 0, 255, 255\nORANGE = 255, 102, 0\nMIDORANGE = 192, 79, 0\nPURPLE = 128, 0, 128\nMIDPURPLE = 192, 0, 192\nsunset = [MIDORANGE, MIDRED, DARKRED, DARKBLUE]\nocean = [GREEN, BLUE, CYAN, PINK]\ncarousel = [RED, YELLOW, GREEN, YELLOW]\nsummer = [GREEN, YELLOW, GREEN, BLUE]\naspect_ratio = 3840 / 2160\nx = 1540\ny = int(x / aspect_ratio)\nsize = [x, y]\ncolors = ocean\nbackground = BLACK\nsquares = 800\nshade = True\ngradient = 1.5\nrotate = False\nsame_colors = False\ncurr_length = 4\nadder = 4\ndistance = lambda p1, p2: sqrt((p1[0] - p2[0]) ** 2 + (p1[1] - p2[1]) ** 2)\nbound = lambda x: 0 if x < 0 else min(255, x)\n\n\ndef next_point(p1, p2):\n diff_x = p1[0] - p2[0]\n diff_y = p1[1] - p2[1]\n angle = arctan(abs(diff_x) / abs(diff_y))\n new_diff_x = int(sin(angle) * curr_length)\n new_diff_y = int(cos(angle) * curr_length)\n new_x = p1[0] + new_diff_x if diff_x < 0 else p1[0] - new_diff_x\n new_y = p1[1] + new_diff_y if diff_y < 0 else p1[1] - new_diff_y\n return [new_x, new_y]\n\n\npygame.init()\nscreen = pygame.display.set_mode(size)\npygame.display.set_caption('Create a spiral drawing')\ndone = False\nclock = pygame.time.Clock()\nscreen.fill(background)\np1 = [x // 2 - curr_length // 2, y // 2 - curr_length // 2]\np2 = [x // 2 + curr_length // 2, y // 2 - curr_length // 2]\np3 = [x // 2 + curr_length // 2, y // 2 + curr_length // 2]\np4 = [x // 2 - curr_length // 2, y // 2 + curr_length // 2]\npoints = [p1, p2, p3, p4]\ncurr_length += adder\np12 = [p1[0] + curr_length, p1[1]]\np22 = [p2[0], p2[1] + curr_length]\np32 = [p3[0] - curr_length, p3[1]]\np42 = [p4[0], p4[1] - curr_length]\nnew_points = [p12, p22, p32, p42]\nfor p1, p2 in zip(points, new_points):\n pygame.draw.line(screen, colors[1], p1, p2)\nfor j in range(squares):\n curr_length += adder\n points = new_points\n new_points = [0, 0, 0, 0]\n if rotate and j % 40 == 0:\n old = colors\n colors = [old[3], old[0], old[1], old[2]]\n for i in range(len(points)):\n p1 = points[i]\n p2 = points[(i + 1) % 4]\n p1_new = next_point(p1, p2)\n new_points[(i + 1) % 4] = p1_new\n col = colors[0] if same_colors else colors[i]\n fact = j * gradient if shade else 1\n new_col = bound(col[0] - fact), bound(col[1] - fact), bound(col[2] -\n fact)\n pygame.draw.line(screen, new_col, p1, p1_new)\npygame.display.flip()\nwhile not done:\n clock.tick(10)\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n done = True\npygame.display.iconify()\nfilename = input('Enter filename with no extension or 0 to quit: ')\nif filename != '0':\n pygame.image.save(screen, path.join('images', filename + '.jpeg'))\n",
"step-4": "import pygame\nfrom math import sqrt, sin, cos\nfrom numpy import arctan\nfrom os import path\nBLACK = 0, 0, 0\nWHITE = 255, 255, 255\nWHITEGRAY = 192, 192, 192\nRED = 255, 0, 0\nMIDRED = 192, 0, 0\nDARKRED = 128, 0, 0\nMAROON = 80, 0, 0\nGREEN = 0, 255, 0\nDARKGREEN = 0, 128, 0\nGREYGREEN = 0, 128, 128\nMINT = 51, 153, 102\nJADE = 0, 250, 154\nBLUE = 0, 0, 255\nNAVY = 0, 102, 204\nDARKBLUE = 0, 0, 128\nMIDBLUE = 0, 0, 192\nPINK = 255, 0, 255\nYELLOW = 255, 255, 0\nMIDYELLOW = 192, 192, 0\nMODESTYELLOW = 128, 128, 0\nCYAN = 0, 255, 255\nORANGE = 255, 102, 0\nMIDORANGE = 192, 79, 0\nPURPLE = 128, 0, 128\nMIDPURPLE = 192, 0, 192\nsunset = [MIDORANGE, MIDRED, DARKRED, DARKBLUE]\nocean = [GREEN, BLUE, CYAN, PINK]\ncarousel = [RED, YELLOW, GREEN, YELLOW]\nsummer = [GREEN, YELLOW, GREEN, BLUE]\naspect_ratio = 3840 / 2160\nx = 1540\ny = int(x / aspect_ratio)\nsize = [x, y]\ncolors = ocean\nbackground = BLACK\nsquares = 800\nshade = True\ngradient = 1.5\nrotate = False\nsame_colors = False\ncurr_length = 4\nadder = 4\ndistance = lambda p1, p2: sqrt((p1[0] - p2[0]) ** 2 + (p1[1] - p2[1]) ** 2)\nbound = lambda x: 0 if x < 0 else min(255, x)\n\n\ndef next_point(p1, p2):\n diff_x = p1[0] - p2[0]\n diff_y = p1[1] - p2[1]\n angle = arctan(abs(diff_x) / abs(diff_y))\n new_diff_x = int(sin(angle) * curr_length)\n new_diff_y = int(cos(angle) * curr_length)\n new_x = p1[0] + new_diff_x if diff_x < 0 else p1[0] - new_diff_x\n new_y = p1[1] + new_diff_y if diff_y < 0 else p1[1] - new_diff_y\n return [new_x, new_y]\n\n\npygame.init()\nscreen = pygame.display.set_mode(size)\npygame.display.set_caption('Create a spiral drawing')\ndone = False\nclock = pygame.time.Clock()\nscreen.fill(background)\np1 = [x // 2 - curr_length // 2, y // 2 - curr_length // 2]\np2 = [x // 2 + curr_length // 2, y // 2 - curr_length // 2]\np3 = [x // 2 + curr_length // 2, y // 2 + curr_length // 2]\np4 = [x // 2 - curr_length // 2, y // 2 + curr_length // 2]\npoints = [p1, p2, p3, p4]\ncurr_length += adder\np12 = [p1[0] + curr_length, p1[1]]\np22 = [p2[0], p2[1] + curr_length]\np32 = [p3[0] - curr_length, p3[1]]\np42 = [p4[0], p4[1] - curr_length]\nnew_points = [p12, p22, p32, p42]\nfor p1, p2 in zip(points, new_points):\n pygame.draw.line(screen, colors[1], p1, p2)\nfor j in range(squares):\n curr_length += adder\n points = new_points\n new_points = [0, 0, 0, 0]\n if rotate and j % 40 == 0:\n old = colors\n colors = [old[3], old[0], old[1], old[2]]\n for i in range(len(points)):\n p1 = points[i]\n p2 = points[(i + 1) % 4]\n p1_new = next_point(p1, p2)\n new_points[(i + 1) % 4] = p1_new\n col = colors[0] if same_colors else colors[i]\n fact = j * gradient if shade else 1\n new_col = bound(col[0] - fact), bound(col[1] - fact), bound(col[2] -\n fact)\n pygame.draw.line(screen, new_col, p1, p1_new)\npygame.display.flip()\nwhile not done:\n clock.tick(10)\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n done = True\npygame.display.iconify()\nfilename = input('Enter filename with no extension or 0 to quit: ')\nif filename != '0':\n pygame.image.save(screen, path.join('images', filename + '.jpeg'))\n",
"step-5": "import pygame\nfrom math import sqrt, sin, cos\nfrom numpy import arctan\nfrom os import path\n\n# try these colors or create your own!\n# each valid color is 3-tuple with values in range [0, 255]\nBLACK = (0, 0, 0)\nWHITE = (255, 255, 255)\nWHITEGRAY = (192, 192, 192)\nRED = (255, 0, 0)\nMIDRED = (192, 0, 0)\nDARKRED = (128, 0, 0)\nMAROON = (80, 0, 0)\nGREEN = (0, 255, 0)\nDARKGREEN = (0, 128, 0)\nGREYGREEN = (0, 128, 128)\nMINT = (51,153,102)\nJADE = (0, 250, 154)\nBLUE = (0, 0, 255)\nNAVY = (0, 102, 204)\nDARKBLUE = (0, 0, 128)\nMIDBLUE = (0, 0, 192)\nPINK = (255, 0, 255)\nYELLOW = (255, 255, 0)\nMIDYELLOW = (192, 192, 0)\nMODESTYELLOW = (128, 128, 0)\nCYAN = (0, 255, 255)\nORANGE = (255, 102, 0)\nMIDORANGE = (192, 79, 0)\nPURPLE = (128, 0, 128)\nMIDPURPLE = (192, 0, 192)\nsunset = [MIDORANGE, MIDRED, DARKRED, DARKBLUE]\nocean = [GREEN, BLUE, CYAN, PINK]\ncarousel = [RED, YELLOW, GREEN, YELLOW] # trying running this with rotate\nsummer = [GREEN, YELLOW, GREEN, BLUE]\n\n#--------------CONFIGURATIONS----------------\n# various configurations change the way image is displayed\n# feel free to play around and see how the image changes\naspect_ratio = 3840 / 2160 # set this to the aspect ratio of your screen\nx = 1540 # width of the window\ny = int(x / aspect_ratio) # height of your screen\nsize = [x, y]\n# Try out preset colorschemes or try out new ones\ncolors = ocean\nbackground = BLACK\nsquares = 800 # number of squares drawn in the window\nshade = True # creates fading effect on the colors as spiral moves outward\ngradient = 1.5 # recommend 1.05 for dark colors (128-192) and 1.4 for light colors (255)\nrotate = False # rotates colors around the spiral\nsame_colors = False # use the same color for all sides of a each square in the spiral\ncurr_length = 4 # starting side length of the first square in the spiral\n# determines how tightly spiral is wound - rate at which the side lengths grow linearly\n# use carefully, may cause divide by zero error for certain increments\nadder = 4\n\n#--------------HELPER FUNCTIONS---------------\ndistance = lambda p1, p2: sqrt((p1[0] - p2[0])**2 + (p1[1] - p2[1])**2) # get distance b/t two points\nbound = lambda x: 0 if x < 0 else min(255, x) # keep color values within [0,255]\n\ndef next_point(p1, p2):\n diff_x = p1[0] - p2[0]\n diff_y = p1[1] - p2[1] \n\n #calculate next point using triangle geometry\n angle = arctan(abs(diff_x) / abs(diff_y))\n new_diff_x = int(sin(angle) * curr_length)\n new_diff_y = int(cos(angle) * curr_length)\n new_x = p1[0] + new_diff_x if diff_x < 0 else p1[0] - new_diff_x \n new_y = p1[1] + new_diff_y if diff_y < 0 else p1[1] - new_diff_y \n return [new_x, new_y]\n\n#--------------INITIALIZATION-----------------\npygame.init()\nscreen = pygame.display.set_mode(size)\npygame.display.set_caption(\"Create a spiral drawing\")\ndone = False\nclock = pygame.time.Clock()\nscreen.fill(background)\n\n#-----------------ARTWORK---------------------\np1 = [x//2 - curr_length//2, y//2 - curr_length//2]\np2 = [x//2 + curr_length//2, y//2 - curr_length//2]\np3 = [x//2 + curr_length//2, y//2 + curr_length//2]\np4 = [x//2 - curr_length//2, y//2 + curr_length//2]\npoints = [p1, p2, p3, p4]\n\n#end of each line\ncurr_length += adder\np12 = [p1[0] + curr_length, p1[1]]\np22 = [p2[0], p2[1] + curr_length]\np32 = [p3[0] - curr_length, p3[1]]\np42 = [p4[0], p4[1] - curr_length]\nnew_points = [p12, p22, p32, p42]\n\nfor p1, p2 in zip(points, new_points):\n pygame.draw.line(screen, colors[1], p1, p2)\n\n# every iteration draws a new square and updates the points\nfor j in range(squares):\n curr_length += adder\n points = new_points\n new_points = [0, 0, 0, 0]\n if rotate and j % 40 == 0: # change colors every 10 squares\n old = colors\n colors = [old[3], old[0], old[1], old[2]] # shuffle colors to create rotating effect\n \n # every iteration calculates a new point and draws line from points[i] to new point\n for i in range(len(points)):\n p1 = points[i]\n p2 = points[(i+1)%4]\n p1_new = next_point(p1,p2)\n \n new_points[(i+1)%4] = p1_new\n col = colors[0] if same_colors else colors[i]\n fact = j * gradient if shade else 1 # with shade set, colors naturally fade to black\n new_col = (bound(col[0] - fact), bound(col[1] - fact), bound(col[2] - fact))\n pygame.draw.line(screen, new_col, p1, p1_new)\npygame.display.flip()\n\n#----------------EVENT LOOP-------------------\nwhile not done:\n clock.tick(10)\n for event in pygame.event.get(): \n if event.type == pygame.QUIT: # If user clicked close\n done = True\npygame.display.iconify()\n\nfilename = input(\"Enter filename with no extension or 0 to quit: \")\nif filename != \"0\":\n pygame.image.save(screen, path.join(\"images\", filename + \".jpeg\")) ",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
greeting.pack()
<|reserved_special_token_0|>
guess.pack()
<|reserved_special_token_0|>
submit.pack()
window.mainloop()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
secret = random.randint(1, 100)
window = Tkinter.Tk()
greeting = Tkinter.Label(window, text='Guess the secret number!')
greeting.pack()
guess = Tkinter.Entry(window)
guess.pack()
submit = Tkinter.Button(window, text='Submit')
submit.pack()
window.mainloop()
<|reserved_special_token_1|>
import Tkinter
import random
secret = random.randint(1, 100)
window = Tkinter.Tk()
greeting = Tkinter.Label(window, text='Guess the secret number!')
greeting.pack()
guess = Tkinter.Entry(window)
guess.pack()
submit = Tkinter.Button(window, text='Submit')
submit.pack()
window.mainloop()
<|reserved_special_token_1|>
import Tkinter
import random
secret = random.randint(1, 100)
### TKINTER ELEMENTS ###
window = Tkinter.Tk()
# greeting text
greeting = Tkinter.Label(window, text="Guess the secret number!")
greeting.pack()
# guess entry field
guess = Tkinter.Entry(window)
guess.pack()
# submit button
submit = Tkinter.Button(window, text="Submit") # add a button, but this button is doing nothing
submit.pack()
window.mainloop()
|
flexible
|
{
"blob_id": "59eb705d6d388de9afbcc0df3003f4d4f45f1fbd",
"index": 3989,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ngreeting.pack()\n<mask token>\nguess.pack()\n<mask token>\nsubmit.pack()\nwindow.mainloop()\n",
"step-3": "<mask token>\nsecret = random.randint(1, 100)\nwindow = Tkinter.Tk()\ngreeting = Tkinter.Label(window, text='Guess the secret number!')\ngreeting.pack()\nguess = Tkinter.Entry(window)\nguess.pack()\nsubmit = Tkinter.Button(window, text='Submit')\nsubmit.pack()\nwindow.mainloop()\n",
"step-4": "import Tkinter\nimport random\nsecret = random.randint(1, 100)\nwindow = Tkinter.Tk()\ngreeting = Tkinter.Label(window, text='Guess the secret number!')\ngreeting.pack()\nguess = Tkinter.Entry(window)\nguess.pack()\nsubmit = Tkinter.Button(window, text='Submit')\nsubmit.pack()\nwindow.mainloop()\n",
"step-5": "import Tkinter\nimport random\n\nsecret = random.randint(1, 100)\n\n### TKINTER ELEMENTS ###\n\nwindow = Tkinter.Tk()\n\n# greeting text\ngreeting = Tkinter.Label(window, text=\"Guess the secret number!\")\ngreeting.pack()\n\n# guess entry field\nguess = Tkinter.Entry(window)\nguess.pack()\n\n# submit button\nsubmit = Tkinter.Button(window, text=\"Submit\") # add a button, but this button is doing nothing\nsubmit.pack()\n\nwindow.mainloop()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class ClusterMonitor:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ClusterMonitor:
def __init__(self, cluster):
self.cluster = cluster
self.token = self.cluster.get_cluster_token()
self.cluster.change_to()
master = self.cluster.group_set.get(name='master').hosts.first()
configuration = kubernetes.client.Configuration()
configuration.api_key_prefix['authorization'] = 'Bearer'
configuration.api_key['authorization'] = self.token
print('---token----')
print(self.token)
configuration.debug = True
configuration.host = 'https://' + master.ip + ':6443'
configuration.verify_ssl = False
print('https://' + master.ip + ':6443')
self.api_instance = kubernetes.client.CoreV1Api(kubernetes.client.
ApiClient(configuration))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ClusterMonitor:
def __init__(self, cluster):
self.cluster = cluster
self.token = self.cluster.get_cluster_token()
self.cluster.change_to()
master = self.cluster.group_set.get(name='master').hosts.first()
configuration = kubernetes.client.Configuration()
configuration.api_key_prefix['authorization'] = 'Bearer'
configuration.api_key['authorization'] = self.token
print('---token----')
print(self.token)
configuration.debug = True
configuration.host = 'https://' + master.ip + ':6443'
configuration.verify_ssl = False
print('https://' + master.ip + ':6443')
self.api_instance = kubernetes.client.CoreV1Api(kubernetes.client.
ApiClient(configuration))
def list_pods(self):
pods = self.api_instance.list_pod_for_all_namespaces()
return pods
<|reserved_special_token_1|>
import kubernetes.client
from kubernetes.client.rest import ApiException
from pprint import pprint
from kubeops_api.models.cluster import Cluster
class ClusterMonitor:
def __init__(self, cluster):
self.cluster = cluster
self.token = self.cluster.get_cluster_token()
self.cluster.change_to()
master = self.cluster.group_set.get(name='master').hosts.first()
configuration = kubernetes.client.Configuration()
configuration.api_key_prefix['authorization'] = 'Bearer'
configuration.api_key['authorization'] = self.token
print('---token----')
print(self.token)
configuration.debug = True
configuration.host = 'https://' + master.ip + ':6443'
configuration.verify_ssl = False
print('https://' + master.ip + ':6443')
self.api_instance = kubernetes.client.CoreV1Api(kubernetes.client.
ApiClient(configuration))
def list_pods(self):
pods = self.api_instance.list_pod_for_all_namespaces()
return pods
<|reserved_special_token_1|>
import kubernetes.client
from kubernetes.client.rest import ApiException
from pprint import pprint
from kubeops_api.models.cluster import Cluster
class ClusterMonitor():
def __init__(self,cluster):
self.cluster = cluster
self.token = self.cluster.get_cluster_token()
self.cluster.change_to()
master = self.cluster.group_set.get(name='master').hosts.first()
configuration = kubernetes.client.Configuration()
configuration.api_key_prefix['authorization'] = 'Bearer'
configuration.api_key['authorization'] = self.token
print('---token----')
print(self.token)
configuration.debug = True
configuration.host = 'https://'+master.ip+":6443"
configuration.verify_ssl = False
print('https://'+master.ip+":6443")
self.api_instance = kubernetes.client.CoreV1Api(kubernetes.client.ApiClient(configuration))
def list_pods(self):
pods = self.api_instance.list_pod_for_all_namespaces()
return pods
|
flexible
|
{
"blob_id": "da41f26489c477e0df9735606457bd4ee4e5a396",
"index": 4465,
"step-1": "<mask token>\n\n\nclass ClusterMonitor:\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass ClusterMonitor:\n\n def __init__(self, cluster):\n self.cluster = cluster\n self.token = self.cluster.get_cluster_token()\n self.cluster.change_to()\n master = self.cluster.group_set.get(name='master').hosts.first()\n configuration = kubernetes.client.Configuration()\n configuration.api_key_prefix['authorization'] = 'Bearer'\n configuration.api_key['authorization'] = self.token\n print('---token----')\n print(self.token)\n configuration.debug = True\n configuration.host = 'https://' + master.ip + ':6443'\n configuration.verify_ssl = False\n print('https://' + master.ip + ':6443')\n self.api_instance = kubernetes.client.CoreV1Api(kubernetes.client.\n ApiClient(configuration))\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass ClusterMonitor:\n\n def __init__(self, cluster):\n self.cluster = cluster\n self.token = self.cluster.get_cluster_token()\n self.cluster.change_to()\n master = self.cluster.group_set.get(name='master').hosts.first()\n configuration = kubernetes.client.Configuration()\n configuration.api_key_prefix['authorization'] = 'Bearer'\n configuration.api_key['authorization'] = self.token\n print('---token----')\n print(self.token)\n configuration.debug = True\n configuration.host = 'https://' + master.ip + ':6443'\n configuration.verify_ssl = False\n print('https://' + master.ip + ':6443')\n self.api_instance = kubernetes.client.CoreV1Api(kubernetes.client.\n ApiClient(configuration))\n\n def list_pods(self):\n pods = self.api_instance.list_pod_for_all_namespaces()\n return pods\n",
"step-4": "import kubernetes.client\nfrom kubernetes.client.rest import ApiException\nfrom pprint import pprint\nfrom kubeops_api.models.cluster import Cluster\n\n\nclass ClusterMonitor:\n\n def __init__(self, cluster):\n self.cluster = cluster\n self.token = self.cluster.get_cluster_token()\n self.cluster.change_to()\n master = self.cluster.group_set.get(name='master').hosts.first()\n configuration = kubernetes.client.Configuration()\n configuration.api_key_prefix['authorization'] = 'Bearer'\n configuration.api_key['authorization'] = self.token\n print('---token----')\n print(self.token)\n configuration.debug = True\n configuration.host = 'https://' + master.ip + ':6443'\n configuration.verify_ssl = False\n print('https://' + master.ip + ':6443')\n self.api_instance = kubernetes.client.CoreV1Api(kubernetes.client.\n ApiClient(configuration))\n\n def list_pods(self):\n pods = self.api_instance.list_pod_for_all_namespaces()\n return pods\n",
"step-5": "import kubernetes.client\nfrom kubernetes.client.rest import ApiException\nfrom pprint import pprint\nfrom kubeops_api.models.cluster import Cluster\n\nclass ClusterMonitor():\n\n def __init__(self,cluster):\n self.cluster = cluster\n self.token = self.cluster.get_cluster_token()\n self.cluster.change_to()\n master = self.cluster.group_set.get(name='master').hosts.first()\n configuration = kubernetes.client.Configuration()\n configuration.api_key_prefix['authorization'] = 'Bearer'\n configuration.api_key['authorization'] = self.token\n print('---token----')\n print(self.token)\n configuration.debug = True\n configuration.host = 'https://'+master.ip+\":6443\"\n configuration.verify_ssl = False\n print('https://'+master.ip+\":6443\")\n self.api_instance = kubernetes.client.CoreV1Api(kubernetes.client.ApiClient(configuration))\n\n def list_pods(self):\n pods = self.api_instance.list_pod_for_all_namespaces()\n return pods\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
class TestForms(TestCase):
<|reserved_special_token_0|>
def test_wrong_data_ResearchFormMKI_form(self):
with open(os.path.abspath(os.curdir) + 'Test.txt', 'wb') as f:
f.write(b'ABOBA')
with open(os.path.abspath(os.curdir) + 'Test.txt', 'rb') as f:
testfile = f.read()
form = ResearchFormMKI(data={'protocol_number': '224',
'description': 'Ну мы тут тестим тесты', 'main_researcher':
1, 'ver_bio': 'Тесты тестов', 'version': 'Тестовая',
'cast_researcher_date': date.today() + timedelta(days=2000 *
5), 'accept_research_version': 'Тестовая версия',
'accept_research_date': date.today() + timedelta(days=2000 *
5), 'protocol_research_version': 'Тестовая версия',
'protocol_research_date': date.today() + timedelta(days=
2000 * 5), 'contract_date': date.today() + timedelta(days=
2000 * 5), 'name_another_doc': 'Тест',
'another_doc_version': 'Тестовая', 'another_doc_date': date
.today() + timedelta(days=2000 * 5)}, files={'another_doc':
SimpleUploadedFile('another_doc', testfile), 'contract':
SimpleUploadedFile('contract', testfile), 'advertising':
SimpleUploadedFile('advertising', testfile),
'write_objects': SimpleUploadedFile('write_objects',
testfile), 'protocol_research': SimpleUploadedFile(
'protocol_research', testfile), 'accept_research':
SimpleUploadedFile('accept_research', testfile), 'form_inf':
SimpleUploadedFile('form_inf', testfile), 'cast_researcher':
SimpleUploadedFile('cast_researcher', testfile),
'list_members': SimpleUploadedFile('list_members', testfile
), 'document': SimpleUploadedFile('document', testfile)})
os.remove(os.path.abspath(os.curdir) + 'Test.txt')
print(form.errors)
print('test_wrong_data_ResearchFormMKI_form')
self.assertFalse(form.is_valid())
def test_wrong_file_format_ResearchFormMKI_form(self):
with open(os.path.abspath(os.curdir) + 'Test.aboba', 'wb') as f:
f.write(b'ABOBA')
with open(os.path.abspath(os.curdir) + 'Test.aboba', 'rb') as f:
testfile = f.read()
form = ResearchFormMKI(data={'protocol_number': '224',
'description': 'Ну мы тут тестим тесты', 'main_researcher':
1, 'ver_bio': 'Тесты тестов', 'version': 'Тестовая',
'cast_researcher_date': date.today() - timedelta(days=2000 *
5), 'accept_research_version': 'Тестовая версия',
'accept_research_date': date.today() - timedelta(days=2000 *
5), 'protocol_research_version': 'Тестовая версия',
'protocol_research_date': date.today() - timedelta(days=
2000 * 5), 'contract_date': date.today() - timedelta(days=
2000 * 5), 'name_another_doc': 'Тест',
'another_doc_version': 'Тестовая', 'another_doc_date': date
.today() - timedelta(days=2000 * 5)}, files={'another_doc':
SimpleUploadedFile('another_doc', testfile), 'contract':
SimpleUploadedFile('contract', testfile), 'advertising':
SimpleUploadedFile('advertising', testfile),
'write_objects': SimpleUploadedFile('write_objects',
testfile), 'protocol_research': SimpleUploadedFile(
'protocol_research', testfile), 'accept_research':
SimpleUploadedFile('accept_research', testfile), 'form_inf':
SimpleUploadedFile('form_inf', testfile), 'cast_researcher':
SimpleUploadedFile('cast_researcher', testfile),
'list_members': SimpleUploadedFile('list_members', testfile
), 'document': SimpleUploadedFile('document', testfile)})
os.remove(os.path.abspath(os.curdir) + 'Test.aboba')
print(form.errors)
print('test_wrong_file_format_ResearchFormMKI_form')
self.assertFalse(form.is_valid())
def test_empty_main_researcher_format_ResearchFormMKI_form(self):
with open(os.path.abspath(os.curdir) + 'Test.txt', 'wb') as f:
f.write(b'ABOBA')
with open(os.path.abspath(os.curdir) + 'Test.txt', 'rb') as f:
testfile = f.read()
form = ResearchFormMKI(data={'protocol_number': '224',
'description': 'Ну мы тут тестим тесты', 'main_researcher':
None, 'ver_bio': 'Тесты тестов', 'version': 'Тестовая',
'cast_researcher_date': date.today() - timedelta(days=2000 *
5), 'accept_research_version': 'Тестовая версия',
'accept_research_date': date.today() - timedelta(days=2000 *
5), 'protocol_research_version': 'Тестовая версия',
'protocol_research_date': date.today() - timedelta(days=
2000 * 5), 'contract_date': date.today() - timedelta(days=
2000 * 5), 'name_another_doc': 'Тест',
'another_doc_version': 'Тестовая', 'another_doc_date': date
.today() - timedelta(days=2000 * 5)}, files={'another_doc':
SimpleUploadedFile('another_doc', testfile), 'contract':
SimpleUploadedFile('contract', testfile), 'advertising':
SimpleUploadedFile('advertising', testfile),
'write_objects': SimpleUploadedFile('write_objects',
testfile), 'protocol_research': SimpleUploadedFile(
'protocol_research', testfile), 'accept_research':
SimpleUploadedFile('accept_research', testfile), 'form_inf':
SimpleUploadedFile('form_inf', testfile), 'cast_researcher':
SimpleUploadedFile('cast_researcher', testfile),
'list_members': SimpleUploadedFile('list_members', testfile
), 'document': SimpleUploadedFile('document', testfile)})
os.remove(os.path.abspath(os.curdir) + 'Test.txt')
print(form.errors)
print('test_empty_main_researcher_format_ResearchFormMKI_form')
self.assertFalse(form.is_valid())
def test_empty_char_fields_format_ResearchFormMKI_form(self):
with open(os.path.abspath(os.curdir) + 'Test.txt', 'wb') as f:
f.write(b'ABOBA')
with open(os.path.abspath(os.curdir) + 'Test.txt', 'rb') as f:
testfile = f.read()
form = ResearchFormMKI(data={'protocol_number': None,
'description': None, 'main_researcher': 1, 'ver_bio': None,
'version': None, 'cast_researcher_date': date.today() -
timedelta(days=2000 * 5), 'accept_research_version': None,
'accept_research_date': date.today() - timedelta(days=2000 *
5), 'protocol_research_version': None,
'protocol_research_date': date.today() - timedelta(days=
2000 * 5), 'contract_date': date.today() - timedelta(days=
2000 * 5), 'name_another_doc': None, 'another_doc_version':
None, 'another_doc_date': date.today() - timedelta(days=
2000 * 5)}, files={'another_doc': SimpleUploadedFile(
'another_doc', testfile), 'contract': SimpleUploadedFile(
'contract', testfile), 'advertising': SimpleUploadedFile(
'advertising', testfile), 'write_objects':
SimpleUploadedFile('write_objects', testfile),
'protocol_research': SimpleUploadedFile('protocol_research',
testfile), 'accept_research': SimpleUploadedFile(
'accept_research', testfile), 'form_inf':
SimpleUploadedFile('form_inf', testfile), 'cast_researcher':
SimpleUploadedFile('cast_researcher', testfile),
'list_members': SimpleUploadedFile('list_members', testfile
), 'document': SimpleUploadedFile('document', testfile)})
os.remove(os.path.abspath(os.curdir) + 'Test.txt')
print(form.errors)
print('test_empty_char_fields_format_ResearchFormMKI_form')
self.assertFalse(form.is_valid())
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestForms(TestCase):
<|reserved_special_token_0|>
def test_wrong_data_ResearchFormMKI_form(self):
with open(os.path.abspath(os.curdir) + 'Test.txt', 'wb') as f:
f.write(b'ABOBA')
with open(os.path.abspath(os.curdir) + 'Test.txt', 'rb') as f:
testfile = f.read()
form = ResearchFormMKI(data={'protocol_number': '224',
'description': 'Ну мы тут тестим тесты', 'main_researcher':
1, 'ver_bio': 'Тесты тестов', 'version': 'Тестовая',
'cast_researcher_date': date.today() + timedelta(days=2000 *
5), 'accept_research_version': 'Тестовая версия',
'accept_research_date': date.today() + timedelta(days=2000 *
5), 'protocol_research_version': 'Тестовая версия',
'protocol_research_date': date.today() + timedelta(days=
2000 * 5), 'contract_date': date.today() + timedelta(days=
2000 * 5), 'name_another_doc': 'Тест',
'another_doc_version': 'Тестовая', 'another_doc_date': date
.today() + timedelta(days=2000 * 5)}, files={'another_doc':
SimpleUploadedFile('another_doc', testfile), 'contract':
SimpleUploadedFile('contract', testfile), 'advertising':
SimpleUploadedFile('advertising', testfile),
'write_objects': SimpleUploadedFile('write_objects',
testfile), 'protocol_research': SimpleUploadedFile(
'protocol_research', testfile), 'accept_research':
SimpleUploadedFile('accept_research', testfile), 'form_inf':
SimpleUploadedFile('form_inf', testfile), 'cast_researcher':
SimpleUploadedFile('cast_researcher', testfile),
'list_members': SimpleUploadedFile('list_members', testfile
), 'document': SimpleUploadedFile('document', testfile)})
os.remove(os.path.abspath(os.curdir) + 'Test.txt')
print(form.errors)
print('test_wrong_data_ResearchFormMKI_form')
self.assertFalse(form.is_valid())
def test_wrong_file_format_ResearchFormMKI_form(self):
with open(os.path.abspath(os.curdir) + 'Test.aboba', 'wb') as f:
f.write(b'ABOBA')
with open(os.path.abspath(os.curdir) + 'Test.aboba', 'rb') as f:
testfile = f.read()
form = ResearchFormMKI(data={'protocol_number': '224',
'description': 'Ну мы тут тестим тесты', 'main_researcher':
1, 'ver_bio': 'Тесты тестов', 'version': 'Тестовая',
'cast_researcher_date': date.today() - timedelta(days=2000 *
5), 'accept_research_version': 'Тестовая версия',
'accept_research_date': date.today() - timedelta(days=2000 *
5), 'protocol_research_version': 'Тестовая версия',
'protocol_research_date': date.today() - timedelta(days=
2000 * 5), 'contract_date': date.today() - timedelta(days=
2000 * 5), 'name_another_doc': 'Тест',
'another_doc_version': 'Тестовая', 'another_doc_date': date
.today() - timedelta(days=2000 * 5)}, files={'another_doc':
SimpleUploadedFile('another_doc', testfile), 'contract':
SimpleUploadedFile('contract', testfile), 'advertising':
SimpleUploadedFile('advertising', testfile),
'write_objects': SimpleUploadedFile('write_objects',
testfile), 'protocol_research': SimpleUploadedFile(
'protocol_research', testfile), 'accept_research':
SimpleUploadedFile('accept_research', testfile), 'form_inf':
SimpleUploadedFile('form_inf', testfile), 'cast_researcher':
SimpleUploadedFile('cast_researcher', testfile),
'list_members': SimpleUploadedFile('list_members', testfile
), 'document': SimpleUploadedFile('document', testfile)})
os.remove(os.path.abspath(os.curdir) + 'Test.aboba')
print(form.errors)
print('test_wrong_file_format_ResearchFormMKI_form')
self.assertFalse(form.is_valid())
def test_empty_main_researcher_format_ResearchFormMKI_form(self):
with open(os.path.abspath(os.curdir) + 'Test.txt', 'wb') as f:
f.write(b'ABOBA')
with open(os.path.abspath(os.curdir) + 'Test.txt', 'rb') as f:
testfile = f.read()
form = ResearchFormMKI(data={'protocol_number': '224',
'description': 'Ну мы тут тестим тесты', 'main_researcher':
None, 'ver_bio': 'Тесты тестов', 'version': 'Тестовая',
'cast_researcher_date': date.today() - timedelta(days=2000 *
5), 'accept_research_version': 'Тестовая версия',
'accept_research_date': date.today() - timedelta(days=2000 *
5), 'protocol_research_version': 'Тестовая версия',
'protocol_research_date': date.today() - timedelta(days=
2000 * 5), 'contract_date': date.today() - timedelta(days=
2000 * 5), 'name_another_doc': 'Тест',
'another_doc_version': 'Тестовая', 'another_doc_date': date
.today() - timedelta(days=2000 * 5)}, files={'another_doc':
SimpleUploadedFile('another_doc', testfile), 'contract':
SimpleUploadedFile('contract', testfile), 'advertising':
SimpleUploadedFile('advertising', testfile),
'write_objects': SimpleUploadedFile('write_objects',
testfile), 'protocol_research': SimpleUploadedFile(
'protocol_research', testfile), 'accept_research':
SimpleUploadedFile('accept_research', testfile), 'form_inf':
SimpleUploadedFile('form_inf', testfile), 'cast_researcher':
SimpleUploadedFile('cast_researcher', testfile),
'list_members': SimpleUploadedFile('list_members', testfile
), 'document': SimpleUploadedFile('document', testfile)})
os.remove(os.path.abspath(os.curdir) + 'Test.txt')
print(form.errors)
print('test_empty_main_researcher_format_ResearchFormMKI_form')
self.assertFalse(form.is_valid())
def test_empty_char_fields_format_ResearchFormMKI_form(self):
with open(os.path.abspath(os.curdir) + 'Test.txt', 'wb') as f:
f.write(b'ABOBA')
with open(os.path.abspath(os.curdir) + 'Test.txt', 'rb') as f:
testfile = f.read()
form = ResearchFormMKI(data={'protocol_number': None,
'description': None, 'main_researcher': 1, 'ver_bio': None,
'version': None, 'cast_researcher_date': date.today() -
timedelta(days=2000 * 5), 'accept_research_version': None,
'accept_research_date': date.today() - timedelta(days=2000 *
5), 'protocol_research_version': None,
'protocol_research_date': date.today() - timedelta(days=
2000 * 5), 'contract_date': date.today() - timedelta(days=
2000 * 5), 'name_another_doc': None, 'another_doc_version':
None, 'another_doc_date': date.today() - timedelta(days=
2000 * 5)}, files={'another_doc': SimpleUploadedFile(
'another_doc', testfile), 'contract': SimpleUploadedFile(
'contract', testfile), 'advertising': SimpleUploadedFile(
'advertising', testfile), 'write_objects':
SimpleUploadedFile('write_objects', testfile),
'protocol_research': SimpleUploadedFile('protocol_research',
testfile), 'accept_research': SimpleUploadedFile(
'accept_research', testfile), 'form_inf':
SimpleUploadedFile('form_inf', testfile), 'cast_researcher':
SimpleUploadedFile('cast_researcher', testfile),
'list_members': SimpleUploadedFile('list_members', testfile
), 'document': SimpleUploadedFile('document', testfile)})
os.remove(os.path.abspath(os.curdir) + 'Test.txt')
print(form.errors)
print('test_empty_char_fields_format_ResearchFormMKI_form')
self.assertFalse(form.is_valid())
def test_empty_date_fields_ResearchFormMKI_form(self):
with open(os.path.abspath(os.curdir) + 'Test.txt', 'wb') as f:
f.write(b'ABOBA')
with open(os.path.abspath(os.curdir) + 'Test.txt', 'rb') as f:
testfile = f.read()
form = ResearchFormMKI(data={'protocol_number': '224',
'description': 'Ну мы тут тестим тесты', 'main_researcher':
1, 'ver_bio': 'Тесты тестов', 'version': 'Тестовая',
'cast_researcher_date': None, 'accept_research_version':
'Тестовая версия', 'accept_research_date': None,
'protocol_research_version': 'Тестовая версия',
'protocol_research_date': None, 'contract_date': None,
'name_another_doc': 'Тест', 'another_doc_version':
'Тестовая', 'another_doc_date': None}, files={'another_doc':
SimpleUploadedFile('another_doc', testfile), 'contract':
SimpleUploadedFile('contract', testfile), 'advertising':
SimpleUploadedFile('advertising', testfile),
'write_objects': SimpleUploadedFile('write_objects',
testfile), 'protocol_research': SimpleUploadedFile(
'protocol_research', testfile), 'accept_research':
SimpleUploadedFile('accept_research', testfile), 'form_inf':
SimpleUploadedFile('form_inf', testfile), 'cast_researcher':
SimpleUploadedFile('cast_researcher', testfile),
'list_members': SimpleUploadedFile('list_members', testfile
), 'document': SimpleUploadedFile('document', testfile)})
os.remove(os.path.abspath(os.curdir) + 'Test.txt')
print(form.errors)
print('test_empty_date_fields_ResearchFormMKI_form')
self.assertTrue(form.is_valid())
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestForms(TestCase):
def test_valid_ResearchFormMKI_form(self):
with open(os.path.abspath(os.curdir) + 'Test.txt', 'wb') as f:
f.write(b'ABOBA')
with open(os.path.abspath(os.curdir) + 'Test.txt', 'rb') as f:
testfile = f.read()
form = ResearchFormMKI(data={'protocol_number': '224',
'description': 'Ну мы тут тестим тесты', 'main_researcher':
1, 'ver_bio': 'Тесты тестов', 'version': 'Тестовая',
'cast_researcher_date': date.today() - timedelta(days=2000 *
5), 'accept_research_version': 'Тестовая версия',
'accept_research_date': date.today() - timedelta(days=2000 *
5), 'protocol_research_version': 'Тестовая версия',
'protocol_research_date': date.today() - timedelta(days=
2000 * 5), 'contract_date': date.today() - timedelta(days=
2000 * 5), 'name_another_doc': 'Тест',
'another_doc_version': 'Тестовая', 'another_doc_date': date
.today() - timedelta(days=2000 * 5)}, files={'another_doc':
SimpleUploadedFile('another_doc', testfile), 'contract':
SimpleUploadedFile('contract', testfile), 'advertising':
SimpleUploadedFile('advertising', testfile),
'write_objects': SimpleUploadedFile('write_objects',
testfile), 'protocol_research': SimpleUploadedFile(
'protocol_research', testfile), 'accept_research':
SimpleUploadedFile('accept_research', testfile), 'form_inf':
SimpleUploadedFile('form_inf', testfile), 'cast_researcher':
SimpleUploadedFile('cast_researcher', testfile),
'list_members': SimpleUploadedFile('list_members', testfile
), 'document': SimpleUploadedFile('document', testfile)})
os.remove(os.path.abspath(os.curdir) + 'Test.txt')
print(form.errors)
print('test_valid_ResearchFormMKI_form')
self.assertTrue(form.is_valid())
def test_wrong_data_ResearchFormMKI_form(self):
with open(os.path.abspath(os.curdir) + 'Test.txt', 'wb') as f:
f.write(b'ABOBA')
with open(os.path.abspath(os.curdir) + 'Test.txt', 'rb') as f:
testfile = f.read()
form = ResearchFormMKI(data={'protocol_number': '224',
'description': 'Ну мы тут тестим тесты', 'main_researcher':
1, 'ver_bio': 'Тесты тестов', 'version': 'Тестовая',
'cast_researcher_date': date.today() + timedelta(days=2000 *
5), 'accept_research_version': 'Тестовая версия',
'accept_research_date': date.today() + timedelta(days=2000 *
5), 'protocol_research_version': 'Тестовая версия',
'protocol_research_date': date.today() + timedelta(days=
2000 * 5), 'contract_date': date.today() + timedelta(days=
2000 * 5), 'name_another_doc': 'Тест',
'another_doc_version': 'Тестовая', 'another_doc_date': date
.today() + timedelta(days=2000 * 5)}, files={'another_doc':
SimpleUploadedFile('another_doc', testfile), 'contract':
SimpleUploadedFile('contract', testfile), 'advertising':
SimpleUploadedFile('advertising', testfile),
'write_objects': SimpleUploadedFile('write_objects',
testfile), 'protocol_research': SimpleUploadedFile(
'protocol_research', testfile), 'accept_research':
SimpleUploadedFile('accept_research', testfile), 'form_inf':
SimpleUploadedFile('form_inf', testfile), 'cast_researcher':
SimpleUploadedFile('cast_researcher', testfile),
'list_members': SimpleUploadedFile('list_members', testfile
), 'document': SimpleUploadedFile('document', testfile)})
os.remove(os.path.abspath(os.curdir) + 'Test.txt')
print(form.errors)
print('test_wrong_data_ResearchFormMKI_form')
self.assertFalse(form.is_valid())
def test_wrong_file_format_ResearchFormMKI_form(self):
with open(os.path.abspath(os.curdir) + 'Test.aboba', 'wb') as f:
f.write(b'ABOBA')
with open(os.path.abspath(os.curdir) + 'Test.aboba', 'rb') as f:
testfile = f.read()
form = ResearchFormMKI(data={'protocol_number': '224',
'description': 'Ну мы тут тестим тесты', 'main_researcher':
1, 'ver_bio': 'Тесты тестов', 'version': 'Тестовая',
'cast_researcher_date': date.today() - timedelta(days=2000 *
5), 'accept_research_version': 'Тестовая версия',
'accept_research_date': date.today() - timedelta(days=2000 *
5), 'protocol_research_version': 'Тестовая версия',
'protocol_research_date': date.today() - timedelta(days=
2000 * 5), 'contract_date': date.today() - timedelta(days=
2000 * 5), 'name_another_doc': 'Тест',
'another_doc_version': 'Тестовая', 'another_doc_date': date
.today() - timedelta(days=2000 * 5)}, files={'another_doc':
SimpleUploadedFile('another_doc', testfile), 'contract':
SimpleUploadedFile('contract', testfile), 'advertising':
SimpleUploadedFile('advertising', testfile),
'write_objects': SimpleUploadedFile('write_objects',
testfile), 'protocol_research': SimpleUploadedFile(
'protocol_research', testfile), 'accept_research':
SimpleUploadedFile('accept_research', testfile), 'form_inf':
SimpleUploadedFile('form_inf', testfile), 'cast_researcher':
SimpleUploadedFile('cast_researcher', testfile),
'list_members': SimpleUploadedFile('list_members', testfile
), 'document': SimpleUploadedFile('document', testfile)})
os.remove(os.path.abspath(os.curdir) + 'Test.aboba')
print(form.errors)
print('test_wrong_file_format_ResearchFormMKI_form')
self.assertFalse(form.is_valid())
def test_empty_main_researcher_format_ResearchFormMKI_form(self):
with open(os.path.abspath(os.curdir) + 'Test.txt', 'wb') as f:
f.write(b'ABOBA')
with open(os.path.abspath(os.curdir) + 'Test.txt', 'rb') as f:
testfile = f.read()
form = ResearchFormMKI(data={'protocol_number': '224',
'description': 'Ну мы тут тестим тесты', 'main_researcher':
None, 'ver_bio': 'Тесты тестов', 'version': 'Тестовая',
'cast_researcher_date': date.today() - timedelta(days=2000 *
5), 'accept_research_version': 'Тестовая версия',
'accept_research_date': date.today() - timedelta(days=2000 *
5), 'protocol_research_version': 'Тестовая версия',
'protocol_research_date': date.today() - timedelta(days=
2000 * 5), 'contract_date': date.today() - timedelta(days=
2000 * 5), 'name_another_doc': 'Тест',
'another_doc_version': 'Тестовая', 'another_doc_date': date
.today() - timedelta(days=2000 * 5)}, files={'another_doc':
SimpleUploadedFile('another_doc', testfile), 'contract':
SimpleUploadedFile('contract', testfile), 'advertising':
SimpleUploadedFile('advertising', testfile),
'write_objects': SimpleUploadedFile('write_objects',
testfile), 'protocol_research': SimpleUploadedFile(
'protocol_research', testfile), 'accept_research':
SimpleUploadedFile('accept_research', testfile), 'form_inf':
SimpleUploadedFile('form_inf', testfile), 'cast_researcher':
SimpleUploadedFile('cast_researcher', testfile),
'list_members': SimpleUploadedFile('list_members', testfile
), 'document': SimpleUploadedFile('document', testfile)})
os.remove(os.path.abspath(os.curdir) + 'Test.txt')
print(form.errors)
print('test_empty_main_researcher_format_ResearchFormMKI_form')
self.assertFalse(form.is_valid())
def test_empty_char_fields_format_ResearchFormMKI_form(self):
with open(os.path.abspath(os.curdir) + 'Test.txt', 'wb') as f:
f.write(b'ABOBA')
with open(os.path.abspath(os.curdir) + 'Test.txt', 'rb') as f:
testfile = f.read()
form = ResearchFormMKI(data={'protocol_number': None,
'description': None, 'main_researcher': 1, 'ver_bio': None,
'version': None, 'cast_researcher_date': date.today() -
timedelta(days=2000 * 5), 'accept_research_version': None,
'accept_research_date': date.today() - timedelta(days=2000 *
5), 'protocol_research_version': None,
'protocol_research_date': date.today() - timedelta(days=
2000 * 5), 'contract_date': date.today() - timedelta(days=
2000 * 5), 'name_another_doc': None, 'another_doc_version':
None, 'another_doc_date': date.today() - timedelta(days=
2000 * 5)}, files={'another_doc': SimpleUploadedFile(
'another_doc', testfile), 'contract': SimpleUploadedFile(
'contract', testfile), 'advertising': SimpleUploadedFile(
'advertising', testfile), 'write_objects':
SimpleUploadedFile('write_objects', testfile),
'protocol_research': SimpleUploadedFile('protocol_research',
testfile), 'accept_research': SimpleUploadedFile(
'accept_research', testfile), 'form_inf':
SimpleUploadedFile('form_inf', testfile), 'cast_researcher':
SimpleUploadedFile('cast_researcher', testfile),
'list_members': SimpleUploadedFile('list_members', testfile
), 'document': SimpleUploadedFile('document', testfile)})
os.remove(os.path.abspath(os.curdir) + 'Test.txt')
print(form.errors)
print('test_empty_char_fields_format_ResearchFormMKI_form')
self.assertFalse(form.is_valid())
def test_empty_date_fields_ResearchFormMKI_form(self):
with open(os.path.abspath(os.curdir) + 'Test.txt', 'wb') as f:
f.write(b'ABOBA')
with open(os.path.abspath(os.curdir) + 'Test.txt', 'rb') as f:
testfile = f.read()
form = ResearchFormMKI(data={'protocol_number': '224',
'description': 'Ну мы тут тестим тесты', 'main_researcher':
1, 'ver_bio': 'Тесты тестов', 'version': 'Тестовая',
'cast_researcher_date': None, 'accept_research_version':
'Тестовая версия', 'accept_research_date': None,
'protocol_research_version': 'Тестовая версия',
'protocol_research_date': None, 'contract_date': None,
'name_another_doc': 'Тест', 'another_doc_version':
'Тестовая', 'another_doc_date': None}, files={'another_doc':
SimpleUploadedFile('another_doc', testfile), 'contract':
SimpleUploadedFile('contract', testfile), 'advertising':
SimpleUploadedFile('advertising', testfile),
'write_objects': SimpleUploadedFile('write_objects',
testfile), 'protocol_research': SimpleUploadedFile(
'protocol_research', testfile), 'accept_research':
SimpleUploadedFile('accept_research', testfile), 'form_inf':
SimpleUploadedFile('form_inf', testfile), 'cast_researcher':
SimpleUploadedFile('cast_researcher', testfile),
'list_members': SimpleUploadedFile('list_members', testfile
), 'document': SimpleUploadedFile('document', testfile)})
os.remove(os.path.abspath(os.curdir) + 'Test.txt')
print(form.errors)
print('test_empty_date_fields_ResearchFormMKI_form')
self.assertTrue(form.is_valid())
<|reserved_special_token_1|>
from django.test import TestCase
from django.core.files import File
from ResearchManage.forms import ResearchFormMKI
from django.test import Client
from unittest import TestCase, mock
from datetime import date, timedelta
from django.core.files.uploadedfile import SimpleUploadedFile
import os
class TestForms(TestCase):
def test_valid_ResearchFormMKI_form(self):
with open(os.path.abspath(os.curdir) + 'Test.txt', 'wb') as f:
f.write(b'ABOBA')
with open(os.path.abspath(os.curdir) + 'Test.txt', 'rb') as f:
testfile = f.read()
form = ResearchFormMKI(data={'protocol_number': '224',
'description': 'Ну мы тут тестим тесты', 'main_researcher':
1, 'ver_bio': 'Тесты тестов', 'version': 'Тестовая',
'cast_researcher_date': date.today() - timedelta(days=2000 *
5), 'accept_research_version': 'Тестовая версия',
'accept_research_date': date.today() - timedelta(days=2000 *
5), 'protocol_research_version': 'Тестовая версия',
'protocol_research_date': date.today() - timedelta(days=
2000 * 5), 'contract_date': date.today() - timedelta(days=
2000 * 5), 'name_another_doc': 'Тест',
'another_doc_version': 'Тестовая', 'another_doc_date': date
.today() - timedelta(days=2000 * 5)}, files={'another_doc':
SimpleUploadedFile('another_doc', testfile), 'contract':
SimpleUploadedFile('contract', testfile), 'advertising':
SimpleUploadedFile('advertising', testfile),
'write_objects': SimpleUploadedFile('write_objects',
testfile), 'protocol_research': SimpleUploadedFile(
'protocol_research', testfile), 'accept_research':
SimpleUploadedFile('accept_research', testfile), 'form_inf':
SimpleUploadedFile('form_inf', testfile), 'cast_researcher':
SimpleUploadedFile('cast_researcher', testfile),
'list_members': SimpleUploadedFile('list_members', testfile
), 'document': SimpleUploadedFile('document', testfile)})
os.remove(os.path.abspath(os.curdir) + 'Test.txt')
print(form.errors)
print('test_valid_ResearchFormMKI_form')
self.assertTrue(form.is_valid())
def test_wrong_data_ResearchFormMKI_form(self):
with open(os.path.abspath(os.curdir) + 'Test.txt', 'wb') as f:
f.write(b'ABOBA')
with open(os.path.abspath(os.curdir) + 'Test.txt', 'rb') as f:
testfile = f.read()
form = ResearchFormMKI(data={'protocol_number': '224',
'description': 'Ну мы тут тестим тесты', 'main_researcher':
1, 'ver_bio': 'Тесты тестов', 'version': 'Тестовая',
'cast_researcher_date': date.today() + timedelta(days=2000 *
5), 'accept_research_version': 'Тестовая версия',
'accept_research_date': date.today() + timedelta(days=2000 *
5), 'protocol_research_version': 'Тестовая версия',
'protocol_research_date': date.today() + timedelta(days=
2000 * 5), 'contract_date': date.today() + timedelta(days=
2000 * 5), 'name_another_doc': 'Тест',
'another_doc_version': 'Тестовая', 'another_doc_date': date
.today() + timedelta(days=2000 * 5)}, files={'another_doc':
SimpleUploadedFile('another_doc', testfile), 'contract':
SimpleUploadedFile('contract', testfile), 'advertising':
SimpleUploadedFile('advertising', testfile),
'write_objects': SimpleUploadedFile('write_objects',
testfile), 'protocol_research': SimpleUploadedFile(
'protocol_research', testfile), 'accept_research':
SimpleUploadedFile('accept_research', testfile), 'form_inf':
SimpleUploadedFile('form_inf', testfile), 'cast_researcher':
SimpleUploadedFile('cast_researcher', testfile),
'list_members': SimpleUploadedFile('list_members', testfile
), 'document': SimpleUploadedFile('document', testfile)})
os.remove(os.path.abspath(os.curdir) + 'Test.txt')
print(form.errors)
print('test_wrong_data_ResearchFormMKI_form')
self.assertFalse(form.is_valid())
def test_wrong_file_format_ResearchFormMKI_form(self):
with open(os.path.abspath(os.curdir) + 'Test.aboba', 'wb') as f:
f.write(b'ABOBA')
with open(os.path.abspath(os.curdir) + 'Test.aboba', 'rb') as f:
testfile = f.read()
form = ResearchFormMKI(data={'protocol_number': '224',
'description': 'Ну мы тут тестим тесты', 'main_researcher':
1, 'ver_bio': 'Тесты тестов', 'version': 'Тестовая',
'cast_researcher_date': date.today() - timedelta(days=2000 *
5), 'accept_research_version': 'Тестовая версия',
'accept_research_date': date.today() - timedelta(days=2000 *
5), 'protocol_research_version': 'Тестовая версия',
'protocol_research_date': date.today() - timedelta(days=
2000 * 5), 'contract_date': date.today() - timedelta(days=
2000 * 5), 'name_another_doc': 'Тест',
'another_doc_version': 'Тестовая', 'another_doc_date': date
.today() - timedelta(days=2000 * 5)}, files={'another_doc':
SimpleUploadedFile('another_doc', testfile), 'contract':
SimpleUploadedFile('contract', testfile), 'advertising':
SimpleUploadedFile('advertising', testfile),
'write_objects': SimpleUploadedFile('write_objects',
testfile), 'protocol_research': SimpleUploadedFile(
'protocol_research', testfile), 'accept_research':
SimpleUploadedFile('accept_research', testfile), 'form_inf':
SimpleUploadedFile('form_inf', testfile), 'cast_researcher':
SimpleUploadedFile('cast_researcher', testfile),
'list_members': SimpleUploadedFile('list_members', testfile
), 'document': SimpleUploadedFile('document', testfile)})
os.remove(os.path.abspath(os.curdir) + 'Test.aboba')
print(form.errors)
print('test_wrong_file_format_ResearchFormMKI_form')
self.assertFalse(form.is_valid())
def test_empty_main_researcher_format_ResearchFormMKI_form(self):
with open(os.path.abspath(os.curdir) + 'Test.txt', 'wb') as f:
f.write(b'ABOBA')
with open(os.path.abspath(os.curdir) + 'Test.txt', 'rb') as f:
testfile = f.read()
form = ResearchFormMKI(data={'protocol_number': '224',
'description': 'Ну мы тут тестим тесты', 'main_researcher':
None, 'ver_bio': 'Тесты тестов', 'version': 'Тестовая',
'cast_researcher_date': date.today() - timedelta(days=2000 *
5), 'accept_research_version': 'Тестовая версия',
'accept_research_date': date.today() - timedelta(days=2000 *
5), 'protocol_research_version': 'Тестовая версия',
'protocol_research_date': date.today() - timedelta(days=
2000 * 5), 'contract_date': date.today() - timedelta(days=
2000 * 5), 'name_another_doc': 'Тест',
'another_doc_version': 'Тестовая', 'another_doc_date': date
.today() - timedelta(days=2000 * 5)}, files={'another_doc':
SimpleUploadedFile('another_doc', testfile), 'contract':
SimpleUploadedFile('contract', testfile), 'advertising':
SimpleUploadedFile('advertising', testfile),
'write_objects': SimpleUploadedFile('write_objects',
testfile), 'protocol_research': SimpleUploadedFile(
'protocol_research', testfile), 'accept_research':
SimpleUploadedFile('accept_research', testfile), 'form_inf':
SimpleUploadedFile('form_inf', testfile), 'cast_researcher':
SimpleUploadedFile('cast_researcher', testfile),
'list_members': SimpleUploadedFile('list_members', testfile
), 'document': SimpleUploadedFile('document', testfile)})
os.remove(os.path.abspath(os.curdir) + 'Test.txt')
print(form.errors)
print('test_empty_main_researcher_format_ResearchFormMKI_form')
self.assertFalse(form.is_valid())
def test_empty_char_fields_format_ResearchFormMKI_form(self):
with open(os.path.abspath(os.curdir) + 'Test.txt', 'wb') as f:
f.write(b'ABOBA')
with open(os.path.abspath(os.curdir) + 'Test.txt', 'rb') as f:
testfile = f.read()
form = ResearchFormMKI(data={'protocol_number': None,
'description': None, 'main_researcher': 1, 'ver_bio': None,
'version': None, 'cast_researcher_date': date.today() -
timedelta(days=2000 * 5), 'accept_research_version': None,
'accept_research_date': date.today() - timedelta(days=2000 *
5), 'protocol_research_version': None,
'protocol_research_date': date.today() - timedelta(days=
2000 * 5), 'contract_date': date.today() - timedelta(days=
2000 * 5), 'name_another_doc': None, 'another_doc_version':
None, 'another_doc_date': date.today() - timedelta(days=
2000 * 5)}, files={'another_doc': SimpleUploadedFile(
'another_doc', testfile), 'contract': SimpleUploadedFile(
'contract', testfile), 'advertising': SimpleUploadedFile(
'advertising', testfile), 'write_objects':
SimpleUploadedFile('write_objects', testfile),
'protocol_research': SimpleUploadedFile('protocol_research',
testfile), 'accept_research': SimpleUploadedFile(
'accept_research', testfile), 'form_inf':
SimpleUploadedFile('form_inf', testfile), 'cast_researcher':
SimpleUploadedFile('cast_researcher', testfile),
'list_members': SimpleUploadedFile('list_members', testfile
), 'document': SimpleUploadedFile('document', testfile)})
os.remove(os.path.abspath(os.curdir) + 'Test.txt')
print(form.errors)
print('test_empty_char_fields_format_ResearchFormMKI_form')
self.assertFalse(form.is_valid())
def test_empty_date_fields_ResearchFormMKI_form(self):
with open(os.path.abspath(os.curdir) + 'Test.txt', 'wb') as f:
f.write(b'ABOBA')
with open(os.path.abspath(os.curdir) + 'Test.txt', 'rb') as f:
testfile = f.read()
form = ResearchFormMKI(data={'protocol_number': '224',
'description': 'Ну мы тут тестим тесты', 'main_researcher':
1, 'ver_bio': 'Тесты тестов', 'version': 'Тестовая',
'cast_researcher_date': None, 'accept_research_version':
'Тестовая версия', 'accept_research_date': None,
'protocol_research_version': 'Тестовая версия',
'protocol_research_date': None, 'contract_date': None,
'name_another_doc': 'Тест', 'another_doc_version':
'Тестовая', 'another_doc_date': None}, files={'another_doc':
SimpleUploadedFile('another_doc', testfile), 'contract':
SimpleUploadedFile('contract', testfile), 'advertising':
SimpleUploadedFile('advertising', testfile),
'write_objects': SimpleUploadedFile('write_objects',
testfile), 'protocol_research': SimpleUploadedFile(
'protocol_research', testfile), 'accept_research':
SimpleUploadedFile('accept_research', testfile), 'form_inf':
SimpleUploadedFile('form_inf', testfile), 'cast_researcher':
SimpleUploadedFile('cast_researcher', testfile),
'list_members': SimpleUploadedFile('list_members', testfile
), 'document': SimpleUploadedFile('document', testfile)})
os.remove(os.path.abspath(os.curdir) + 'Test.txt')
print(form.errors)
print('test_empty_date_fields_ResearchFormMKI_form')
self.assertTrue(form.is_valid())
<|reserved_special_token_1|>
from django.test import TestCase
from django.core.files import File
from ResearchManage.forms import ResearchFormMKI
from django.test import Client
from unittest import TestCase, mock
from datetime import date, timedelta
from django.core.files.uploadedfile import SimpleUploadedFile
import os
# Create your tests here.
class TestForms(TestCase):
def test_valid_ResearchFormMKI_form(self): #Тест валидной формы первичной подачи заявки
with open(os.path.abspath(os.curdir)+'Test.txt' ,'wb') as f:
f.write(b"ABOBA")
with open(os.path.abspath(os.curdir)+'Test.txt' ,'rb') as f:
testfile=f.read()
form=ResearchFormMKI(data={
'protocol_number':'224',
'description':'Ну мы тут тестим тесты',
'main_researcher':1,
'ver_bio':'Тесты тестов',
'version':'Тестовая',
'cast_researcher_date':date.today()-timedelta(days=2000*5),
'accept_research_version':'Тестовая версия',
'accept_research_date':date.today()-timedelta(days=2000*5),
'protocol_research_version':'Тестовая версия',
'protocol_research_date':date.today()-timedelta(days=2000*5),
'contract_date':date.today()-timedelta(days=2000*5),
'name_another_doc':'Тест',
'another_doc_version':'Тестовая',
'another_doc_date':date.today()-timedelta(days=2000*5)
},
files={'another_doc': SimpleUploadedFile('another_doc', testfile),
'contract': SimpleUploadedFile('contract', testfile),
'advertising': SimpleUploadedFile('advertising', testfile),
'write_objects': SimpleUploadedFile('write_objects', testfile),
'protocol_research': SimpleUploadedFile('protocol_research', testfile),
'accept_research': SimpleUploadedFile('accept_research', testfile),
'form_inf': SimpleUploadedFile('form_inf', testfile),
'cast_researcher': SimpleUploadedFile('cast_researcher', testfile),
'list_members': SimpleUploadedFile('list_members', testfile),
'document': SimpleUploadedFile('document', testfile)
})
os.remove(os.path.abspath(os.curdir)+"Test.txt")
print(form.errors)
print("test_valid_ResearchFormMKI_form")
self.assertTrue(form.is_valid())
def test_wrong_data_ResearchFormMKI_form(self): #Тест формы первичной подачи заявки с датой доков>сегодня.На момент написания тест кейс провальный!
with open(os.path.abspath(os.curdir)+'Test.txt' ,'wb') as f:
f.write(b"ABOBA")
with open(os.path.abspath(os.curdir)+'Test.txt' ,'rb') as f:
testfile=f.read()
form=ResearchFormMKI(data={
'protocol_number':'224',
'description':'Ну мы тут тестим тесты',
'main_researcher':1,
'ver_bio':'Тесты тестов',
'version':'Тестовая',
'cast_researcher_date':date.today()+timedelta(days=2000*5),
'accept_research_version':'Тестовая версия',
'accept_research_date':date.today()+timedelta(days=2000*5),
'protocol_research_version':'Тестовая версия',
'protocol_research_date':date.today()+timedelta(days=2000*5),
'contract_date':date.today()+timedelta(days=2000*5),
'name_another_doc':'Тест',
'another_doc_version':'Тестовая',
'another_doc_date':date.today()+timedelta(days=2000*5)
},
files={'another_doc': SimpleUploadedFile('another_doc', testfile),
'contract': SimpleUploadedFile('contract', testfile),
'advertising': SimpleUploadedFile('advertising', testfile),
'write_objects': SimpleUploadedFile('write_objects', testfile),
'protocol_research': SimpleUploadedFile('protocol_research', testfile),
'accept_research': SimpleUploadedFile('accept_research', testfile),
'form_inf': SimpleUploadedFile('form_inf', testfile),
'cast_researcher': SimpleUploadedFile('cast_researcher', testfile),
'list_members': SimpleUploadedFile('list_members', testfile),
'document': SimpleUploadedFile('document', testfile)
})
os.remove(os.path.abspath(os.curdir)+'Test.txt')
print(form.errors)
print("test_wrong_data_ResearchFormMKI_form")
self.assertFalse(form.is_valid())
def test_wrong_file_format_ResearchFormMKI_form(self): #Тест формы первичной подачи заявки с несуществующим типом файла.На момент написания тест кейс провальный!
#TODO:расширить до каждого отдельного поля
with open(os.path.abspath(os.curdir)+'Test.aboba', 'wb') as f:
f.write(b"ABOBA")
with open(os.path.abspath(os.curdir)+'Test.aboba','rb') as f:
testfile=f.read()
form=ResearchFormMKI(data={
'protocol_number':'224',
'description':'Ну мы тут тестим тесты',
'main_researcher':1,
'ver_bio':'Тесты тестов',
'version':'Тестовая',
'cast_researcher_date':date.today()-timedelta(days=2000*5),
'accept_research_version':'Тестовая версия',
'accept_research_date':date.today()-timedelta(days=2000*5),
'protocol_research_version':'Тестовая версия',
'protocol_research_date':date.today()-timedelta(days=2000*5),
'contract_date':date.today()-timedelta(days=2000*5),
'name_another_doc':'Тест',
'another_doc_version':'Тестовая',
'another_doc_date':date.today()-timedelta(days=2000*5)
},
files={'another_doc': SimpleUploadedFile('another_doc', testfile),
'contract': SimpleUploadedFile('contract', testfile),
'advertising': SimpleUploadedFile('advertising', testfile),
'write_objects': SimpleUploadedFile('write_objects', testfile),
'protocol_research': SimpleUploadedFile('protocol_research', testfile),
'accept_research': SimpleUploadedFile('accept_research', testfile),
'form_inf': SimpleUploadedFile('form_inf', testfile),
'cast_researcher': SimpleUploadedFile('cast_researcher', testfile),
'list_members': SimpleUploadedFile('list_members', testfile),
'document': SimpleUploadedFile('document', testfile)
})
os.remove(os.path.abspath(os.curdir)+'Test.aboba')
print(form.errors)
print("test_wrong_file_format_ResearchFormMKI_form")
self.assertFalse(form.is_valid())
def test_empty_main_researcher_format_ResearchFormMKI_form(self): #Тест формы первичной подачи заявки с невыбранным главным исследователем
with open(os.path.abspath(os.curdir)+'Test.txt', 'wb') as f:
f.write(b"ABOBA")
with open(os.path.abspath(os.curdir)+'Test.txt' ,'rb') as f:
testfile=f.read()
form=ResearchFormMKI(data={
'protocol_number':'224',
'description':'Ну мы тут тестим тесты',
'main_researcher':None,
'ver_bio':'Тесты тестов',
'version':'Тестовая',
'cast_researcher_date':date.today()-timedelta(days=2000*5),
'accept_research_version':'Тестовая версия',
'accept_research_date':date.today()-timedelta(days=2000*5),
'protocol_research_version':'Тестовая версия',
'protocol_research_date':date.today()-timedelta(days=2000*5),
'contract_date':date.today()-timedelta(days=2000*5),
'name_another_doc':'Тест',
'another_doc_version':'Тестовая',
'another_doc_date':date.today()-timedelta(days=2000*5)
},
files={'another_doc': SimpleUploadedFile('another_doc', testfile),
'contract': SimpleUploadedFile('contract', testfile),
'advertising': SimpleUploadedFile('advertising', testfile),
'write_objects': SimpleUploadedFile('write_objects', testfile),
'protocol_research': SimpleUploadedFile('protocol_research', testfile),
'accept_research': SimpleUploadedFile('accept_research', testfile),
'form_inf': SimpleUploadedFile('form_inf', testfile),
'cast_researcher': SimpleUploadedFile('cast_researcher', testfile),
'list_members': SimpleUploadedFile('list_members', testfile),
'document': SimpleUploadedFile('document', testfile)
})
os.remove(os.path.abspath(os.curdir)+'Test.txt')
print(form.errors)
print("test_empty_main_researcher_format_ResearchFormMKI_form")
self.assertFalse(form.is_valid())
def test_empty_char_fields_format_ResearchFormMKI_form(self): #Тест формы первичной подачи заявки с незаполненными полями для символьного ввода
#TODO:расширить до каждого отдельного поля
with open(os.path.abspath(os.curdir)+'Test.txt', 'wb') as f:
f.write(b"ABOBA")
with open(os.path.abspath(os.curdir)+'Test.txt' ,'rb') as f:
testfile=f.read()
form=ResearchFormMKI(data={
'protocol_number':None,
'description':None,
'main_researcher':1,
'ver_bio':None,
'version':None,
'cast_researcher_date':date.today()-timedelta(days=2000*5),
'accept_research_version':None,
'accept_research_date':date.today()-timedelta(days=2000*5),
'protocol_research_version':None,
'protocol_research_date':date.today()-timedelta(days=2000*5),
'contract_date':date.today()-timedelta(days=2000*5),
'name_another_doc':None,
'another_doc_version':None,
'another_doc_date':date.today()-timedelta(days=2000*5)
},
files={'another_doc': SimpleUploadedFile('another_doc', testfile),
'contract': SimpleUploadedFile('contract', testfile),
'advertising': SimpleUploadedFile('advertising', testfile),
'write_objects': SimpleUploadedFile('write_objects', testfile),
'protocol_research': SimpleUploadedFile('protocol_research', testfile),
'accept_research': SimpleUploadedFile('accept_research', testfile),
'form_inf': SimpleUploadedFile('form_inf', testfile),
'cast_researcher': SimpleUploadedFile('cast_researcher', testfile),
'list_members': SimpleUploadedFile('list_members', testfile),
'document': SimpleUploadedFile('document', testfile)
})
os.remove(os.path.abspath(os.curdir)+'Test.txt')
print(form.errors)
print("test_empty_char_fields_format_ResearchFormMKI_form")
self.assertFalse(form.is_valid())
def test_empty_date_fields_ResearchFormMKI_form(self): #Тест формы первичной подачи заявки с пустыми значениями полей даты На момент написания тест кейс провальный!
#TODO:расширить до каждого отдельного поля
with open(os.path.abspath(os.curdir)+'Test.txt' ,'wb') as f:
f.write(b"ABOBA")
with open(os.path.abspath(os.curdir)+'Test.txt' ,'rb') as f:
testfile=f.read()
form=ResearchFormMKI(data={
'protocol_number':'224',
'description':'Ну мы тут тестим тесты',
'main_researcher':1,
'ver_bio':'Тесты тестов',
'version':'Тестовая',
'cast_researcher_date':None,
'accept_research_version':'Тестовая версия',
'accept_research_date':None,
'protocol_research_version':'Тестовая версия',
'protocol_research_date':None,
'contract_date':None,
'name_another_doc':'Тест',
'another_doc_version':'Тестовая',
'another_doc_date':None
},
files={'another_doc': SimpleUploadedFile('another_doc', testfile),
'contract': SimpleUploadedFile('contract', testfile),
'advertising': SimpleUploadedFile('advertising', testfile),
'write_objects': SimpleUploadedFile('write_objects', testfile),
'protocol_research': SimpleUploadedFile('protocol_research', testfile),
'accept_research': SimpleUploadedFile('accept_research', testfile),
'form_inf': SimpleUploadedFile('form_inf', testfile),
'cast_researcher': SimpleUploadedFile('cast_researcher', testfile),
'list_members': SimpleUploadedFile('list_members', testfile),
'document': SimpleUploadedFile('document', testfile)
})
os.remove(os.path.abspath(os.curdir)+'Test.txt')
print(form.errors)
print("test_empty_date_fields_ResearchFormMKI_form")
self.assertTrue(form.is_valid())
|
flexible
|
{
"blob_id": "c5d0b23396e084ad6ffade15b3aa3c59b6be3cc0",
"index": 2706,
"step-1": "<mask token>\n\n\nclass TestForms(TestCase):\n <mask token>\n\n def test_wrong_data_ResearchFormMKI_form(self):\n with open(os.path.abspath(os.curdir) + 'Test.txt', 'wb') as f:\n f.write(b'ABOBA')\n with open(os.path.abspath(os.curdir) + 'Test.txt', 'rb') as f:\n testfile = f.read()\n form = ResearchFormMKI(data={'protocol_number': '224',\n 'description': 'Ну мы тут тестим тесты', 'main_researcher':\n 1, 'ver_bio': 'Тесты тестов', 'version': 'Тестовая',\n 'cast_researcher_date': date.today() + timedelta(days=2000 *\n 5), 'accept_research_version': 'Тестовая версия',\n 'accept_research_date': date.today() + timedelta(days=2000 *\n 5), 'protocol_research_version': 'Тестовая версия',\n 'protocol_research_date': date.today() + timedelta(days=\n 2000 * 5), 'contract_date': date.today() + timedelta(days=\n 2000 * 5), 'name_another_doc': 'Тест',\n 'another_doc_version': 'Тестовая', 'another_doc_date': date\n .today() + timedelta(days=2000 * 5)}, files={'another_doc':\n SimpleUploadedFile('another_doc', testfile), 'contract':\n SimpleUploadedFile('contract', testfile), 'advertising':\n SimpleUploadedFile('advertising', testfile),\n 'write_objects': SimpleUploadedFile('write_objects',\n testfile), 'protocol_research': SimpleUploadedFile(\n 'protocol_research', testfile), 'accept_research':\n SimpleUploadedFile('accept_research', testfile), 'form_inf':\n SimpleUploadedFile('form_inf', testfile), 'cast_researcher':\n SimpleUploadedFile('cast_researcher', testfile),\n 'list_members': SimpleUploadedFile('list_members', testfile\n ), 'document': SimpleUploadedFile('document', testfile)})\n os.remove(os.path.abspath(os.curdir) + 'Test.txt')\n print(form.errors)\n print('test_wrong_data_ResearchFormMKI_form')\n self.assertFalse(form.is_valid())\n\n def test_wrong_file_format_ResearchFormMKI_form(self):\n with open(os.path.abspath(os.curdir) + 'Test.aboba', 'wb') as f:\n f.write(b'ABOBA')\n with open(os.path.abspath(os.curdir) + 'Test.aboba', 'rb') as f:\n testfile = f.read()\n form = ResearchFormMKI(data={'protocol_number': '224',\n 'description': 'Ну мы тут тестим тесты', 'main_researcher':\n 1, 'ver_bio': 'Тесты тестов', 'version': 'Тестовая',\n 'cast_researcher_date': date.today() - timedelta(days=2000 *\n 5), 'accept_research_version': 'Тестовая версия',\n 'accept_research_date': date.today() - timedelta(days=2000 *\n 5), 'protocol_research_version': 'Тестовая версия',\n 'protocol_research_date': date.today() - timedelta(days=\n 2000 * 5), 'contract_date': date.today() - timedelta(days=\n 2000 * 5), 'name_another_doc': 'Тест',\n 'another_doc_version': 'Тестовая', 'another_doc_date': date\n .today() - timedelta(days=2000 * 5)}, files={'another_doc':\n SimpleUploadedFile('another_doc', testfile), 'contract':\n SimpleUploadedFile('contract', testfile), 'advertising':\n SimpleUploadedFile('advertising', testfile),\n 'write_objects': SimpleUploadedFile('write_objects',\n testfile), 'protocol_research': SimpleUploadedFile(\n 'protocol_research', testfile), 'accept_research':\n SimpleUploadedFile('accept_research', testfile), 'form_inf':\n SimpleUploadedFile('form_inf', testfile), 'cast_researcher':\n SimpleUploadedFile('cast_researcher', testfile),\n 'list_members': SimpleUploadedFile('list_members', testfile\n ), 'document': SimpleUploadedFile('document', testfile)})\n os.remove(os.path.abspath(os.curdir) + 'Test.aboba')\n print(form.errors)\n print('test_wrong_file_format_ResearchFormMKI_form')\n self.assertFalse(form.is_valid())\n\n def test_empty_main_researcher_format_ResearchFormMKI_form(self):\n with open(os.path.abspath(os.curdir) + 'Test.txt', 'wb') as f:\n f.write(b'ABOBA')\n with open(os.path.abspath(os.curdir) + 'Test.txt', 'rb') as f:\n testfile = f.read()\n form = ResearchFormMKI(data={'protocol_number': '224',\n 'description': 'Ну мы тут тестим тесты', 'main_researcher':\n None, 'ver_bio': 'Тесты тестов', 'version': 'Тестовая',\n 'cast_researcher_date': date.today() - timedelta(days=2000 *\n 5), 'accept_research_version': 'Тестовая версия',\n 'accept_research_date': date.today() - timedelta(days=2000 *\n 5), 'protocol_research_version': 'Тестовая версия',\n 'protocol_research_date': date.today() - timedelta(days=\n 2000 * 5), 'contract_date': date.today() - timedelta(days=\n 2000 * 5), 'name_another_doc': 'Тест',\n 'another_doc_version': 'Тестовая', 'another_doc_date': date\n .today() - timedelta(days=2000 * 5)}, files={'another_doc':\n SimpleUploadedFile('another_doc', testfile), 'contract':\n SimpleUploadedFile('contract', testfile), 'advertising':\n SimpleUploadedFile('advertising', testfile),\n 'write_objects': SimpleUploadedFile('write_objects',\n testfile), 'protocol_research': SimpleUploadedFile(\n 'protocol_research', testfile), 'accept_research':\n SimpleUploadedFile('accept_research', testfile), 'form_inf':\n SimpleUploadedFile('form_inf', testfile), 'cast_researcher':\n SimpleUploadedFile('cast_researcher', testfile),\n 'list_members': SimpleUploadedFile('list_members', testfile\n ), 'document': SimpleUploadedFile('document', testfile)})\n os.remove(os.path.abspath(os.curdir) + 'Test.txt')\n print(form.errors)\n print('test_empty_main_researcher_format_ResearchFormMKI_form')\n self.assertFalse(form.is_valid())\n\n def test_empty_char_fields_format_ResearchFormMKI_form(self):\n with open(os.path.abspath(os.curdir) + 'Test.txt', 'wb') as f:\n f.write(b'ABOBA')\n with open(os.path.abspath(os.curdir) + 'Test.txt', 'rb') as f:\n testfile = f.read()\n form = ResearchFormMKI(data={'protocol_number': None,\n 'description': None, 'main_researcher': 1, 'ver_bio': None,\n 'version': None, 'cast_researcher_date': date.today() -\n timedelta(days=2000 * 5), 'accept_research_version': None,\n 'accept_research_date': date.today() - timedelta(days=2000 *\n 5), 'protocol_research_version': None,\n 'protocol_research_date': date.today() - timedelta(days=\n 2000 * 5), 'contract_date': date.today() - timedelta(days=\n 2000 * 5), 'name_another_doc': None, 'another_doc_version':\n None, 'another_doc_date': date.today() - timedelta(days=\n 2000 * 5)}, files={'another_doc': SimpleUploadedFile(\n 'another_doc', testfile), 'contract': SimpleUploadedFile(\n 'contract', testfile), 'advertising': SimpleUploadedFile(\n 'advertising', testfile), 'write_objects':\n SimpleUploadedFile('write_objects', testfile),\n 'protocol_research': SimpleUploadedFile('protocol_research',\n testfile), 'accept_research': SimpleUploadedFile(\n 'accept_research', testfile), 'form_inf':\n SimpleUploadedFile('form_inf', testfile), 'cast_researcher':\n SimpleUploadedFile('cast_researcher', testfile),\n 'list_members': SimpleUploadedFile('list_members', testfile\n ), 'document': SimpleUploadedFile('document', testfile)})\n os.remove(os.path.abspath(os.curdir) + 'Test.txt')\n print(form.errors)\n print('test_empty_char_fields_format_ResearchFormMKI_form')\n self.assertFalse(form.is_valid())\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass TestForms(TestCase):\n <mask token>\n\n def test_wrong_data_ResearchFormMKI_form(self):\n with open(os.path.abspath(os.curdir) + 'Test.txt', 'wb') as f:\n f.write(b'ABOBA')\n with open(os.path.abspath(os.curdir) + 'Test.txt', 'rb') as f:\n testfile = f.read()\n form = ResearchFormMKI(data={'protocol_number': '224',\n 'description': 'Ну мы тут тестим тесты', 'main_researcher':\n 1, 'ver_bio': 'Тесты тестов', 'version': 'Тестовая',\n 'cast_researcher_date': date.today() + timedelta(days=2000 *\n 5), 'accept_research_version': 'Тестовая версия',\n 'accept_research_date': date.today() + timedelta(days=2000 *\n 5), 'protocol_research_version': 'Тестовая версия',\n 'protocol_research_date': date.today() + timedelta(days=\n 2000 * 5), 'contract_date': date.today() + timedelta(days=\n 2000 * 5), 'name_another_doc': 'Тест',\n 'another_doc_version': 'Тестовая', 'another_doc_date': date\n .today() + timedelta(days=2000 * 5)}, files={'another_doc':\n SimpleUploadedFile('another_doc', testfile), 'contract':\n SimpleUploadedFile('contract', testfile), 'advertising':\n SimpleUploadedFile('advertising', testfile),\n 'write_objects': SimpleUploadedFile('write_objects',\n testfile), 'protocol_research': SimpleUploadedFile(\n 'protocol_research', testfile), 'accept_research':\n SimpleUploadedFile('accept_research', testfile), 'form_inf':\n SimpleUploadedFile('form_inf', testfile), 'cast_researcher':\n SimpleUploadedFile('cast_researcher', testfile),\n 'list_members': SimpleUploadedFile('list_members', testfile\n ), 'document': SimpleUploadedFile('document', testfile)})\n os.remove(os.path.abspath(os.curdir) + 'Test.txt')\n print(form.errors)\n print('test_wrong_data_ResearchFormMKI_form')\n self.assertFalse(form.is_valid())\n\n def test_wrong_file_format_ResearchFormMKI_form(self):\n with open(os.path.abspath(os.curdir) + 'Test.aboba', 'wb') as f:\n f.write(b'ABOBA')\n with open(os.path.abspath(os.curdir) + 'Test.aboba', 'rb') as f:\n testfile = f.read()\n form = ResearchFormMKI(data={'protocol_number': '224',\n 'description': 'Ну мы тут тестим тесты', 'main_researcher':\n 1, 'ver_bio': 'Тесты тестов', 'version': 'Тестовая',\n 'cast_researcher_date': date.today() - timedelta(days=2000 *\n 5), 'accept_research_version': 'Тестовая версия',\n 'accept_research_date': date.today() - timedelta(days=2000 *\n 5), 'protocol_research_version': 'Тестовая версия',\n 'protocol_research_date': date.today() - timedelta(days=\n 2000 * 5), 'contract_date': date.today() - timedelta(days=\n 2000 * 5), 'name_another_doc': 'Тест',\n 'another_doc_version': 'Тестовая', 'another_doc_date': date\n .today() - timedelta(days=2000 * 5)}, files={'another_doc':\n SimpleUploadedFile('another_doc', testfile), 'contract':\n SimpleUploadedFile('contract', testfile), 'advertising':\n SimpleUploadedFile('advertising', testfile),\n 'write_objects': SimpleUploadedFile('write_objects',\n testfile), 'protocol_research': SimpleUploadedFile(\n 'protocol_research', testfile), 'accept_research':\n SimpleUploadedFile('accept_research', testfile), 'form_inf':\n SimpleUploadedFile('form_inf', testfile), 'cast_researcher':\n SimpleUploadedFile('cast_researcher', testfile),\n 'list_members': SimpleUploadedFile('list_members', testfile\n ), 'document': SimpleUploadedFile('document', testfile)})\n os.remove(os.path.abspath(os.curdir) + 'Test.aboba')\n print(form.errors)\n print('test_wrong_file_format_ResearchFormMKI_form')\n self.assertFalse(form.is_valid())\n\n def test_empty_main_researcher_format_ResearchFormMKI_form(self):\n with open(os.path.abspath(os.curdir) + 'Test.txt', 'wb') as f:\n f.write(b'ABOBA')\n with open(os.path.abspath(os.curdir) + 'Test.txt', 'rb') as f:\n testfile = f.read()\n form = ResearchFormMKI(data={'protocol_number': '224',\n 'description': 'Ну мы тут тестим тесты', 'main_researcher':\n None, 'ver_bio': 'Тесты тестов', 'version': 'Тестовая',\n 'cast_researcher_date': date.today() - timedelta(days=2000 *\n 5), 'accept_research_version': 'Тестовая версия',\n 'accept_research_date': date.today() - timedelta(days=2000 *\n 5), 'protocol_research_version': 'Тестовая версия',\n 'protocol_research_date': date.today() - timedelta(days=\n 2000 * 5), 'contract_date': date.today() - timedelta(days=\n 2000 * 5), 'name_another_doc': 'Тест',\n 'another_doc_version': 'Тестовая', 'another_doc_date': date\n .today() - timedelta(days=2000 * 5)}, files={'another_doc':\n SimpleUploadedFile('another_doc', testfile), 'contract':\n SimpleUploadedFile('contract', testfile), 'advertising':\n SimpleUploadedFile('advertising', testfile),\n 'write_objects': SimpleUploadedFile('write_objects',\n testfile), 'protocol_research': SimpleUploadedFile(\n 'protocol_research', testfile), 'accept_research':\n SimpleUploadedFile('accept_research', testfile), 'form_inf':\n SimpleUploadedFile('form_inf', testfile), 'cast_researcher':\n SimpleUploadedFile('cast_researcher', testfile),\n 'list_members': SimpleUploadedFile('list_members', testfile\n ), 'document': SimpleUploadedFile('document', testfile)})\n os.remove(os.path.abspath(os.curdir) + 'Test.txt')\n print(form.errors)\n print('test_empty_main_researcher_format_ResearchFormMKI_form')\n self.assertFalse(form.is_valid())\n\n def test_empty_char_fields_format_ResearchFormMKI_form(self):\n with open(os.path.abspath(os.curdir) + 'Test.txt', 'wb') as f:\n f.write(b'ABOBA')\n with open(os.path.abspath(os.curdir) + 'Test.txt', 'rb') as f:\n testfile = f.read()\n form = ResearchFormMKI(data={'protocol_number': None,\n 'description': None, 'main_researcher': 1, 'ver_bio': None,\n 'version': None, 'cast_researcher_date': date.today() -\n timedelta(days=2000 * 5), 'accept_research_version': None,\n 'accept_research_date': date.today() - timedelta(days=2000 *\n 5), 'protocol_research_version': None,\n 'protocol_research_date': date.today() - timedelta(days=\n 2000 * 5), 'contract_date': date.today() - timedelta(days=\n 2000 * 5), 'name_another_doc': None, 'another_doc_version':\n None, 'another_doc_date': date.today() - timedelta(days=\n 2000 * 5)}, files={'another_doc': SimpleUploadedFile(\n 'another_doc', testfile), 'contract': SimpleUploadedFile(\n 'contract', testfile), 'advertising': SimpleUploadedFile(\n 'advertising', testfile), 'write_objects':\n SimpleUploadedFile('write_objects', testfile),\n 'protocol_research': SimpleUploadedFile('protocol_research',\n testfile), 'accept_research': SimpleUploadedFile(\n 'accept_research', testfile), 'form_inf':\n SimpleUploadedFile('form_inf', testfile), 'cast_researcher':\n SimpleUploadedFile('cast_researcher', testfile),\n 'list_members': SimpleUploadedFile('list_members', testfile\n ), 'document': SimpleUploadedFile('document', testfile)})\n os.remove(os.path.abspath(os.curdir) + 'Test.txt')\n print(form.errors)\n print('test_empty_char_fields_format_ResearchFormMKI_form')\n self.assertFalse(form.is_valid())\n\n def test_empty_date_fields_ResearchFormMKI_form(self):\n with open(os.path.abspath(os.curdir) + 'Test.txt', 'wb') as f:\n f.write(b'ABOBA')\n with open(os.path.abspath(os.curdir) + 'Test.txt', 'rb') as f:\n testfile = f.read()\n form = ResearchFormMKI(data={'protocol_number': '224',\n 'description': 'Ну мы тут тестим тесты', 'main_researcher':\n 1, 'ver_bio': 'Тесты тестов', 'version': 'Тестовая',\n 'cast_researcher_date': None, 'accept_research_version':\n 'Тестовая версия', 'accept_research_date': None,\n 'protocol_research_version': 'Тестовая версия',\n 'protocol_research_date': None, 'contract_date': None,\n 'name_another_doc': 'Тест', 'another_doc_version':\n 'Тестовая', 'another_doc_date': None}, files={'another_doc':\n SimpleUploadedFile('another_doc', testfile), 'contract':\n SimpleUploadedFile('contract', testfile), 'advertising':\n SimpleUploadedFile('advertising', testfile),\n 'write_objects': SimpleUploadedFile('write_objects',\n testfile), 'protocol_research': SimpleUploadedFile(\n 'protocol_research', testfile), 'accept_research':\n SimpleUploadedFile('accept_research', testfile), 'form_inf':\n SimpleUploadedFile('form_inf', testfile), 'cast_researcher':\n SimpleUploadedFile('cast_researcher', testfile),\n 'list_members': SimpleUploadedFile('list_members', testfile\n ), 'document': SimpleUploadedFile('document', testfile)})\n os.remove(os.path.abspath(os.curdir) + 'Test.txt')\n print(form.errors)\n print('test_empty_date_fields_ResearchFormMKI_form')\n self.assertTrue(form.is_valid())\n",
"step-3": "<mask token>\n\n\nclass TestForms(TestCase):\n\n def test_valid_ResearchFormMKI_form(self):\n with open(os.path.abspath(os.curdir) + 'Test.txt', 'wb') as f:\n f.write(b'ABOBA')\n with open(os.path.abspath(os.curdir) + 'Test.txt', 'rb') as f:\n testfile = f.read()\n form = ResearchFormMKI(data={'protocol_number': '224',\n 'description': 'Ну мы тут тестим тесты', 'main_researcher':\n 1, 'ver_bio': 'Тесты тестов', 'version': 'Тестовая',\n 'cast_researcher_date': date.today() - timedelta(days=2000 *\n 5), 'accept_research_version': 'Тестовая версия',\n 'accept_research_date': date.today() - timedelta(days=2000 *\n 5), 'protocol_research_version': 'Тестовая версия',\n 'protocol_research_date': date.today() - timedelta(days=\n 2000 * 5), 'contract_date': date.today() - timedelta(days=\n 2000 * 5), 'name_another_doc': 'Тест',\n 'another_doc_version': 'Тестовая', 'another_doc_date': date\n .today() - timedelta(days=2000 * 5)}, files={'another_doc':\n SimpleUploadedFile('another_doc', testfile), 'contract':\n SimpleUploadedFile('contract', testfile), 'advertising':\n SimpleUploadedFile('advertising', testfile),\n 'write_objects': SimpleUploadedFile('write_objects',\n testfile), 'protocol_research': SimpleUploadedFile(\n 'protocol_research', testfile), 'accept_research':\n SimpleUploadedFile('accept_research', testfile), 'form_inf':\n SimpleUploadedFile('form_inf', testfile), 'cast_researcher':\n SimpleUploadedFile('cast_researcher', testfile),\n 'list_members': SimpleUploadedFile('list_members', testfile\n ), 'document': SimpleUploadedFile('document', testfile)})\n os.remove(os.path.abspath(os.curdir) + 'Test.txt')\n print(form.errors)\n print('test_valid_ResearchFormMKI_form')\n self.assertTrue(form.is_valid())\n\n def test_wrong_data_ResearchFormMKI_form(self):\n with open(os.path.abspath(os.curdir) + 'Test.txt', 'wb') as f:\n f.write(b'ABOBA')\n with open(os.path.abspath(os.curdir) + 'Test.txt', 'rb') as f:\n testfile = f.read()\n form = ResearchFormMKI(data={'protocol_number': '224',\n 'description': 'Ну мы тут тестим тесты', 'main_researcher':\n 1, 'ver_bio': 'Тесты тестов', 'version': 'Тестовая',\n 'cast_researcher_date': date.today() + timedelta(days=2000 *\n 5), 'accept_research_version': 'Тестовая версия',\n 'accept_research_date': date.today() + timedelta(days=2000 *\n 5), 'protocol_research_version': 'Тестовая версия',\n 'protocol_research_date': date.today() + timedelta(days=\n 2000 * 5), 'contract_date': date.today() + timedelta(days=\n 2000 * 5), 'name_another_doc': 'Тест',\n 'another_doc_version': 'Тестовая', 'another_doc_date': date\n .today() + timedelta(days=2000 * 5)}, files={'another_doc':\n SimpleUploadedFile('another_doc', testfile), 'contract':\n SimpleUploadedFile('contract', testfile), 'advertising':\n SimpleUploadedFile('advertising', testfile),\n 'write_objects': SimpleUploadedFile('write_objects',\n testfile), 'protocol_research': SimpleUploadedFile(\n 'protocol_research', testfile), 'accept_research':\n SimpleUploadedFile('accept_research', testfile), 'form_inf':\n SimpleUploadedFile('form_inf', testfile), 'cast_researcher':\n SimpleUploadedFile('cast_researcher', testfile),\n 'list_members': SimpleUploadedFile('list_members', testfile\n ), 'document': SimpleUploadedFile('document', testfile)})\n os.remove(os.path.abspath(os.curdir) + 'Test.txt')\n print(form.errors)\n print('test_wrong_data_ResearchFormMKI_form')\n self.assertFalse(form.is_valid())\n\n def test_wrong_file_format_ResearchFormMKI_form(self):\n with open(os.path.abspath(os.curdir) + 'Test.aboba', 'wb') as f:\n f.write(b'ABOBA')\n with open(os.path.abspath(os.curdir) + 'Test.aboba', 'rb') as f:\n testfile = f.read()\n form = ResearchFormMKI(data={'protocol_number': '224',\n 'description': 'Ну мы тут тестим тесты', 'main_researcher':\n 1, 'ver_bio': 'Тесты тестов', 'version': 'Тестовая',\n 'cast_researcher_date': date.today() - timedelta(days=2000 *\n 5), 'accept_research_version': 'Тестовая версия',\n 'accept_research_date': date.today() - timedelta(days=2000 *\n 5), 'protocol_research_version': 'Тестовая версия',\n 'protocol_research_date': date.today() - timedelta(days=\n 2000 * 5), 'contract_date': date.today() - timedelta(days=\n 2000 * 5), 'name_another_doc': 'Тест',\n 'another_doc_version': 'Тестовая', 'another_doc_date': date\n .today() - timedelta(days=2000 * 5)}, files={'another_doc':\n SimpleUploadedFile('another_doc', testfile), 'contract':\n SimpleUploadedFile('contract', testfile), 'advertising':\n SimpleUploadedFile('advertising', testfile),\n 'write_objects': SimpleUploadedFile('write_objects',\n testfile), 'protocol_research': SimpleUploadedFile(\n 'protocol_research', testfile), 'accept_research':\n SimpleUploadedFile('accept_research', testfile), 'form_inf':\n SimpleUploadedFile('form_inf', testfile), 'cast_researcher':\n SimpleUploadedFile('cast_researcher', testfile),\n 'list_members': SimpleUploadedFile('list_members', testfile\n ), 'document': SimpleUploadedFile('document', testfile)})\n os.remove(os.path.abspath(os.curdir) + 'Test.aboba')\n print(form.errors)\n print('test_wrong_file_format_ResearchFormMKI_form')\n self.assertFalse(form.is_valid())\n\n def test_empty_main_researcher_format_ResearchFormMKI_form(self):\n with open(os.path.abspath(os.curdir) + 'Test.txt', 'wb') as f:\n f.write(b'ABOBA')\n with open(os.path.abspath(os.curdir) + 'Test.txt', 'rb') as f:\n testfile = f.read()\n form = ResearchFormMKI(data={'protocol_number': '224',\n 'description': 'Ну мы тут тестим тесты', 'main_researcher':\n None, 'ver_bio': 'Тесты тестов', 'version': 'Тестовая',\n 'cast_researcher_date': date.today() - timedelta(days=2000 *\n 5), 'accept_research_version': 'Тестовая версия',\n 'accept_research_date': date.today() - timedelta(days=2000 *\n 5), 'protocol_research_version': 'Тестовая версия',\n 'protocol_research_date': date.today() - timedelta(days=\n 2000 * 5), 'contract_date': date.today() - timedelta(days=\n 2000 * 5), 'name_another_doc': 'Тест',\n 'another_doc_version': 'Тестовая', 'another_doc_date': date\n .today() - timedelta(days=2000 * 5)}, files={'another_doc':\n SimpleUploadedFile('another_doc', testfile), 'contract':\n SimpleUploadedFile('contract', testfile), 'advertising':\n SimpleUploadedFile('advertising', testfile),\n 'write_objects': SimpleUploadedFile('write_objects',\n testfile), 'protocol_research': SimpleUploadedFile(\n 'protocol_research', testfile), 'accept_research':\n SimpleUploadedFile('accept_research', testfile), 'form_inf':\n SimpleUploadedFile('form_inf', testfile), 'cast_researcher':\n SimpleUploadedFile('cast_researcher', testfile),\n 'list_members': SimpleUploadedFile('list_members', testfile\n ), 'document': SimpleUploadedFile('document', testfile)})\n os.remove(os.path.abspath(os.curdir) + 'Test.txt')\n print(form.errors)\n print('test_empty_main_researcher_format_ResearchFormMKI_form')\n self.assertFalse(form.is_valid())\n\n def test_empty_char_fields_format_ResearchFormMKI_form(self):\n with open(os.path.abspath(os.curdir) + 'Test.txt', 'wb') as f:\n f.write(b'ABOBA')\n with open(os.path.abspath(os.curdir) + 'Test.txt', 'rb') as f:\n testfile = f.read()\n form = ResearchFormMKI(data={'protocol_number': None,\n 'description': None, 'main_researcher': 1, 'ver_bio': None,\n 'version': None, 'cast_researcher_date': date.today() -\n timedelta(days=2000 * 5), 'accept_research_version': None,\n 'accept_research_date': date.today() - timedelta(days=2000 *\n 5), 'protocol_research_version': None,\n 'protocol_research_date': date.today() - timedelta(days=\n 2000 * 5), 'contract_date': date.today() - timedelta(days=\n 2000 * 5), 'name_another_doc': None, 'another_doc_version':\n None, 'another_doc_date': date.today() - timedelta(days=\n 2000 * 5)}, files={'another_doc': SimpleUploadedFile(\n 'another_doc', testfile), 'contract': SimpleUploadedFile(\n 'contract', testfile), 'advertising': SimpleUploadedFile(\n 'advertising', testfile), 'write_objects':\n SimpleUploadedFile('write_objects', testfile),\n 'protocol_research': SimpleUploadedFile('protocol_research',\n testfile), 'accept_research': SimpleUploadedFile(\n 'accept_research', testfile), 'form_inf':\n SimpleUploadedFile('form_inf', testfile), 'cast_researcher':\n SimpleUploadedFile('cast_researcher', testfile),\n 'list_members': SimpleUploadedFile('list_members', testfile\n ), 'document': SimpleUploadedFile('document', testfile)})\n os.remove(os.path.abspath(os.curdir) + 'Test.txt')\n print(form.errors)\n print('test_empty_char_fields_format_ResearchFormMKI_form')\n self.assertFalse(form.is_valid())\n\n def test_empty_date_fields_ResearchFormMKI_form(self):\n with open(os.path.abspath(os.curdir) + 'Test.txt', 'wb') as f:\n f.write(b'ABOBA')\n with open(os.path.abspath(os.curdir) + 'Test.txt', 'rb') as f:\n testfile = f.read()\n form = ResearchFormMKI(data={'protocol_number': '224',\n 'description': 'Ну мы тут тестим тесты', 'main_researcher':\n 1, 'ver_bio': 'Тесты тестов', 'version': 'Тестовая',\n 'cast_researcher_date': None, 'accept_research_version':\n 'Тестовая версия', 'accept_research_date': None,\n 'protocol_research_version': 'Тестовая версия',\n 'protocol_research_date': None, 'contract_date': None,\n 'name_another_doc': 'Тест', 'another_doc_version':\n 'Тестовая', 'another_doc_date': None}, files={'another_doc':\n SimpleUploadedFile('another_doc', testfile), 'contract':\n SimpleUploadedFile('contract', testfile), 'advertising':\n SimpleUploadedFile('advertising', testfile),\n 'write_objects': SimpleUploadedFile('write_objects',\n testfile), 'protocol_research': SimpleUploadedFile(\n 'protocol_research', testfile), 'accept_research':\n SimpleUploadedFile('accept_research', testfile), 'form_inf':\n SimpleUploadedFile('form_inf', testfile), 'cast_researcher':\n SimpleUploadedFile('cast_researcher', testfile),\n 'list_members': SimpleUploadedFile('list_members', testfile\n ), 'document': SimpleUploadedFile('document', testfile)})\n os.remove(os.path.abspath(os.curdir) + 'Test.txt')\n print(form.errors)\n print('test_empty_date_fields_ResearchFormMKI_form')\n self.assertTrue(form.is_valid())\n",
"step-4": "from django.test import TestCase\nfrom django.core.files import File\nfrom ResearchManage.forms import ResearchFormMKI\nfrom django.test import Client\nfrom unittest import TestCase, mock\nfrom datetime import date, timedelta\nfrom django.core.files.uploadedfile import SimpleUploadedFile\nimport os\n\n\nclass TestForms(TestCase):\n\n def test_valid_ResearchFormMKI_form(self):\n with open(os.path.abspath(os.curdir) + 'Test.txt', 'wb') as f:\n f.write(b'ABOBA')\n with open(os.path.abspath(os.curdir) + 'Test.txt', 'rb') as f:\n testfile = f.read()\n form = ResearchFormMKI(data={'protocol_number': '224',\n 'description': 'Ну мы тут тестим тесты', 'main_researcher':\n 1, 'ver_bio': 'Тесты тестов', 'version': 'Тестовая',\n 'cast_researcher_date': date.today() - timedelta(days=2000 *\n 5), 'accept_research_version': 'Тестовая версия',\n 'accept_research_date': date.today() - timedelta(days=2000 *\n 5), 'protocol_research_version': 'Тестовая версия',\n 'protocol_research_date': date.today() - timedelta(days=\n 2000 * 5), 'contract_date': date.today() - timedelta(days=\n 2000 * 5), 'name_another_doc': 'Тест',\n 'another_doc_version': 'Тестовая', 'another_doc_date': date\n .today() - timedelta(days=2000 * 5)}, files={'another_doc':\n SimpleUploadedFile('another_doc', testfile), 'contract':\n SimpleUploadedFile('contract', testfile), 'advertising':\n SimpleUploadedFile('advertising', testfile),\n 'write_objects': SimpleUploadedFile('write_objects',\n testfile), 'protocol_research': SimpleUploadedFile(\n 'protocol_research', testfile), 'accept_research':\n SimpleUploadedFile('accept_research', testfile), 'form_inf':\n SimpleUploadedFile('form_inf', testfile), 'cast_researcher':\n SimpleUploadedFile('cast_researcher', testfile),\n 'list_members': SimpleUploadedFile('list_members', testfile\n ), 'document': SimpleUploadedFile('document', testfile)})\n os.remove(os.path.abspath(os.curdir) + 'Test.txt')\n print(form.errors)\n print('test_valid_ResearchFormMKI_form')\n self.assertTrue(form.is_valid())\n\n def test_wrong_data_ResearchFormMKI_form(self):\n with open(os.path.abspath(os.curdir) + 'Test.txt', 'wb') as f:\n f.write(b'ABOBA')\n with open(os.path.abspath(os.curdir) + 'Test.txt', 'rb') as f:\n testfile = f.read()\n form = ResearchFormMKI(data={'protocol_number': '224',\n 'description': 'Ну мы тут тестим тесты', 'main_researcher':\n 1, 'ver_bio': 'Тесты тестов', 'version': 'Тестовая',\n 'cast_researcher_date': date.today() + timedelta(days=2000 *\n 5), 'accept_research_version': 'Тестовая версия',\n 'accept_research_date': date.today() + timedelta(days=2000 *\n 5), 'protocol_research_version': 'Тестовая версия',\n 'protocol_research_date': date.today() + timedelta(days=\n 2000 * 5), 'contract_date': date.today() + timedelta(days=\n 2000 * 5), 'name_another_doc': 'Тест',\n 'another_doc_version': 'Тестовая', 'another_doc_date': date\n .today() + timedelta(days=2000 * 5)}, files={'another_doc':\n SimpleUploadedFile('another_doc', testfile), 'contract':\n SimpleUploadedFile('contract', testfile), 'advertising':\n SimpleUploadedFile('advertising', testfile),\n 'write_objects': SimpleUploadedFile('write_objects',\n testfile), 'protocol_research': SimpleUploadedFile(\n 'protocol_research', testfile), 'accept_research':\n SimpleUploadedFile('accept_research', testfile), 'form_inf':\n SimpleUploadedFile('form_inf', testfile), 'cast_researcher':\n SimpleUploadedFile('cast_researcher', testfile),\n 'list_members': SimpleUploadedFile('list_members', testfile\n ), 'document': SimpleUploadedFile('document', testfile)})\n os.remove(os.path.abspath(os.curdir) + 'Test.txt')\n print(form.errors)\n print('test_wrong_data_ResearchFormMKI_form')\n self.assertFalse(form.is_valid())\n\n def test_wrong_file_format_ResearchFormMKI_form(self):\n with open(os.path.abspath(os.curdir) + 'Test.aboba', 'wb') as f:\n f.write(b'ABOBA')\n with open(os.path.abspath(os.curdir) + 'Test.aboba', 'rb') as f:\n testfile = f.read()\n form = ResearchFormMKI(data={'protocol_number': '224',\n 'description': 'Ну мы тут тестим тесты', 'main_researcher':\n 1, 'ver_bio': 'Тесты тестов', 'version': 'Тестовая',\n 'cast_researcher_date': date.today() - timedelta(days=2000 *\n 5), 'accept_research_version': 'Тестовая версия',\n 'accept_research_date': date.today() - timedelta(days=2000 *\n 5), 'protocol_research_version': 'Тестовая версия',\n 'protocol_research_date': date.today() - timedelta(days=\n 2000 * 5), 'contract_date': date.today() - timedelta(days=\n 2000 * 5), 'name_another_doc': 'Тест',\n 'another_doc_version': 'Тестовая', 'another_doc_date': date\n .today() - timedelta(days=2000 * 5)}, files={'another_doc':\n SimpleUploadedFile('another_doc', testfile), 'contract':\n SimpleUploadedFile('contract', testfile), 'advertising':\n SimpleUploadedFile('advertising', testfile),\n 'write_objects': SimpleUploadedFile('write_objects',\n testfile), 'protocol_research': SimpleUploadedFile(\n 'protocol_research', testfile), 'accept_research':\n SimpleUploadedFile('accept_research', testfile), 'form_inf':\n SimpleUploadedFile('form_inf', testfile), 'cast_researcher':\n SimpleUploadedFile('cast_researcher', testfile),\n 'list_members': SimpleUploadedFile('list_members', testfile\n ), 'document': SimpleUploadedFile('document', testfile)})\n os.remove(os.path.abspath(os.curdir) + 'Test.aboba')\n print(form.errors)\n print('test_wrong_file_format_ResearchFormMKI_form')\n self.assertFalse(form.is_valid())\n\n def test_empty_main_researcher_format_ResearchFormMKI_form(self):\n with open(os.path.abspath(os.curdir) + 'Test.txt', 'wb') as f:\n f.write(b'ABOBA')\n with open(os.path.abspath(os.curdir) + 'Test.txt', 'rb') as f:\n testfile = f.read()\n form = ResearchFormMKI(data={'protocol_number': '224',\n 'description': 'Ну мы тут тестим тесты', 'main_researcher':\n None, 'ver_bio': 'Тесты тестов', 'version': 'Тестовая',\n 'cast_researcher_date': date.today() - timedelta(days=2000 *\n 5), 'accept_research_version': 'Тестовая версия',\n 'accept_research_date': date.today() - timedelta(days=2000 *\n 5), 'protocol_research_version': 'Тестовая версия',\n 'protocol_research_date': date.today() - timedelta(days=\n 2000 * 5), 'contract_date': date.today() - timedelta(days=\n 2000 * 5), 'name_another_doc': 'Тест',\n 'another_doc_version': 'Тестовая', 'another_doc_date': date\n .today() - timedelta(days=2000 * 5)}, files={'another_doc':\n SimpleUploadedFile('another_doc', testfile), 'contract':\n SimpleUploadedFile('contract', testfile), 'advertising':\n SimpleUploadedFile('advertising', testfile),\n 'write_objects': SimpleUploadedFile('write_objects',\n testfile), 'protocol_research': SimpleUploadedFile(\n 'protocol_research', testfile), 'accept_research':\n SimpleUploadedFile('accept_research', testfile), 'form_inf':\n SimpleUploadedFile('form_inf', testfile), 'cast_researcher':\n SimpleUploadedFile('cast_researcher', testfile),\n 'list_members': SimpleUploadedFile('list_members', testfile\n ), 'document': SimpleUploadedFile('document', testfile)})\n os.remove(os.path.abspath(os.curdir) + 'Test.txt')\n print(form.errors)\n print('test_empty_main_researcher_format_ResearchFormMKI_form')\n self.assertFalse(form.is_valid())\n\n def test_empty_char_fields_format_ResearchFormMKI_form(self):\n with open(os.path.abspath(os.curdir) + 'Test.txt', 'wb') as f:\n f.write(b'ABOBA')\n with open(os.path.abspath(os.curdir) + 'Test.txt', 'rb') as f:\n testfile = f.read()\n form = ResearchFormMKI(data={'protocol_number': None,\n 'description': None, 'main_researcher': 1, 'ver_bio': None,\n 'version': None, 'cast_researcher_date': date.today() -\n timedelta(days=2000 * 5), 'accept_research_version': None,\n 'accept_research_date': date.today() - timedelta(days=2000 *\n 5), 'protocol_research_version': None,\n 'protocol_research_date': date.today() - timedelta(days=\n 2000 * 5), 'contract_date': date.today() - timedelta(days=\n 2000 * 5), 'name_another_doc': None, 'another_doc_version':\n None, 'another_doc_date': date.today() - timedelta(days=\n 2000 * 5)}, files={'another_doc': SimpleUploadedFile(\n 'another_doc', testfile), 'contract': SimpleUploadedFile(\n 'contract', testfile), 'advertising': SimpleUploadedFile(\n 'advertising', testfile), 'write_objects':\n SimpleUploadedFile('write_objects', testfile),\n 'protocol_research': SimpleUploadedFile('protocol_research',\n testfile), 'accept_research': SimpleUploadedFile(\n 'accept_research', testfile), 'form_inf':\n SimpleUploadedFile('form_inf', testfile), 'cast_researcher':\n SimpleUploadedFile('cast_researcher', testfile),\n 'list_members': SimpleUploadedFile('list_members', testfile\n ), 'document': SimpleUploadedFile('document', testfile)})\n os.remove(os.path.abspath(os.curdir) + 'Test.txt')\n print(form.errors)\n print('test_empty_char_fields_format_ResearchFormMKI_form')\n self.assertFalse(form.is_valid())\n\n def test_empty_date_fields_ResearchFormMKI_form(self):\n with open(os.path.abspath(os.curdir) + 'Test.txt', 'wb') as f:\n f.write(b'ABOBA')\n with open(os.path.abspath(os.curdir) + 'Test.txt', 'rb') as f:\n testfile = f.read()\n form = ResearchFormMKI(data={'protocol_number': '224',\n 'description': 'Ну мы тут тестим тесты', 'main_researcher':\n 1, 'ver_bio': 'Тесты тестов', 'version': 'Тестовая',\n 'cast_researcher_date': None, 'accept_research_version':\n 'Тестовая версия', 'accept_research_date': None,\n 'protocol_research_version': 'Тестовая версия',\n 'protocol_research_date': None, 'contract_date': None,\n 'name_another_doc': 'Тест', 'another_doc_version':\n 'Тестовая', 'another_doc_date': None}, files={'another_doc':\n SimpleUploadedFile('another_doc', testfile), 'contract':\n SimpleUploadedFile('contract', testfile), 'advertising':\n SimpleUploadedFile('advertising', testfile),\n 'write_objects': SimpleUploadedFile('write_objects',\n testfile), 'protocol_research': SimpleUploadedFile(\n 'protocol_research', testfile), 'accept_research':\n SimpleUploadedFile('accept_research', testfile), 'form_inf':\n SimpleUploadedFile('form_inf', testfile), 'cast_researcher':\n SimpleUploadedFile('cast_researcher', testfile),\n 'list_members': SimpleUploadedFile('list_members', testfile\n ), 'document': SimpleUploadedFile('document', testfile)})\n os.remove(os.path.abspath(os.curdir) + 'Test.txt')\n print(form.errors)\n print('test_empty_date_fields_ResearchFormMKI_form')\n self.assertTrue(form.is_valid())\n",
"step-5": "from django.test import TestCase\nfrom django.core.files import File\nfrom ResearchManage.forms import ResearchFormMKI\nfrom django.test import Client\nfrom unittest import TestCase, mock\nfrom datetime import date, timedelta\nfrom django.core.files.uploadedfile import SimpleUploadedFile\nimport os\n# Create your tests here.\n\nclass TestForms(TestCase):\n def test_valid_ResearchFormMKI_form(self): #Тест валидной формы первичной подачи заявки\n with open(os.path.abspath(os.curdir)+'Test.txt' ,'wb') as f:\n \tf.write(b\"ABOBA\")\n with open(os.path.abspath(os.curdir)+'Test.txt' ,'rb') as f:\n \ttestfile=f.read()\n \tform=ResearchFormMKI(data={\n \t'protocol_number':'224',\n \t'description':'Ну мы тут тестим тесты',\n \t'main_researcher':1,\n \t'ver_bio':'Тесты тестов',\n \t'version':'Тестовая',\n \t'cast_researcher_date':date.today()-timedelta(days=2000*5),\n \t'accept_research_version':'Тестовая версия',\n \t'accept_research_date':date.today()-timedelta(days=2000*5),\n \t'protocol_research_version':'Тестовая версия',\n \t'protocol_research_date':date.today()-timedelta(days=2000*5),\n \t'contract_date':date.today()-timedelta(days=2000*5),\n \t'name_another_doc':'Тест',\n \t'another_doc_version':'Тестовая',\n \t'another_doc_date':date.today()-timedelta(days=2000*5)\n \t},\n \tfiles={'another_doc': SimpleUploadedFile('another_doc', testfile),\n \t'contract': SimpleUploadedFile('contract', testfile),\n \t'advertising': SimpleUploadedFile('advertising', testfile),\n \t'write_objects': SimpleUploadedFile('write_objects', testfile),\n \t'protocol_research': SimpleUploadedFile('protocol_research', testfile),\n \t'accept_research': SimpleUploadedFile('accept_research', testfile),\n \t'form_inf': SimpleUploadedFile('form_inf', testfile),\n \t'cast_researcher': SimpleUploadedFile('cast_researcher', testfile),\n \t'list_members': SimpleUploadedFile('list_members', testfile),\n \t'document': SimpleUploadedFile('document', testfile)\n \t})\n os.remove(os.path.abspath(os.curdir)+\"Test.txt\")\n print(form.errors)\n print(\"test_valid_ResearchFormMKI_form\")\n self.assertTrue(form.is_valid())\n\n def test_wrong_data_ResearchFormMKI_form(self): #Тест формы первичной подачи заявки с датой доков>сегодня.На момент написания тест кейс провальный!\n with open(os.path.abspath(os.curdir)+'Test.txt' ,'wb') as f:\n \tf.write(b\"ABOBA\")\n with open(os.path.abspath(os.curdir)+'Test.txt' ,'rb') as f:\n \ttestfile=f.read()\n \tform=ResearchFormMKI(data={\n \t'protocol_number':'224',\n \t'description':'Ну мы тут тестим тесты',\n \t'main_researcher':1,\n \t'ver_bio':'Тесты тестов',\n \t'version':'Тестовая',\n \t'cast_researcher_date':date.today()+timedelta(days=2000*5),\n \t'accept_research_version':'Тестовая версия',\n \t'accept_research_date':date.today()+timedelta(days=2000*5),\n \t'protocol_research_version':'Тестовая версия',\n \t'protocol_research_date':date.today()+timedelta(days=2000*5),\n \t'contract_date':date.today()+timedelta(days=2000*5),\n \t'name_another_doc':'Тест',\n \t'another_doc_version':'Тестовая',\n \t'another_doc_date':date.today()+timedelta(days=2000*5)\n \t},\n \tfiles={'another_doc': SimpleUploadedFile('another_doc', testfile),\n \t'contract': SimpleUploadedFile('contract', testfile),\n \t'advertising': SimpleUploadedFile('advertising', testfile),\n \t'write_objects': SimpleUploadedFile('write_objects', testfile),\n \t'protocol_research': SimpleUploadedFile('protocol_research', testfile),\n \t'accept_research': SimpleUploadedFile('accept_research', testfile),\n \t'form_inf': SimpleUploadedFile('form_inf', testfile),\n \t'cast_researcher': SimpleUploadedFile('cast_researcher', testfile),\n \t'list_members': SimpleUploadedFile('list_members', testfile),\n \t'document': SimpleUploadedFile('document', testfile)\n \t})\n os.remove(os.path.abspath(os.curdir)+'Test.txt')\n print(form.errors)\n print(\"test_wrong_data_ResearchFormMKI_form\")\n self.assertFalse(form.is_valid())\n def test_wrong_file_format_ResearchFormMKI_form(self): #Тест формы первичной подачи заявки с несуществующим типом файла.На момент написания тест кейс провальный!\n #TODO:расширить до каждого отдельного поля\n with open(os.path.abspath(os.curdir)+'Test.aboba', 'wb') as f:\n \tf.write(b\"ABOBA\")\n with open(os.path.abspath(os.curdir)+'Test.aboba','rb') as f:\n \ttestfile=f.read()\n \tform=ResearchFormMKI(data={\n \t'protocol_number':'224',\n \t'description':'Ну мы тут тестим тесты',\n \t'main_researcher':1,\n \t'ver_bio':'Тесты тестов',\n \t'version':'Тестовая',\n \t'cast_researcher_date':date.today()-timedelta(days=2000*5),\n \t'accept_research_version':'Тестовая версия',\n \t'accept_research_date':date.today()-timedelta(days=2000*5),\n \t'protocol_research_version':'Тестовая версия',\n \t'protocol_research_date':date.today()-timedelta(days=2000*5),\n \t'contract_date':date.today()-timedelta(days=2000*5),\n \t'name_another_doc':'Тест',\n \t'another_doc_version':'Тестовая',\n \t'another_doc_date':date.today()-timedelta(days=2000*5)\n \t},\n \tfiles={'another_doc': SimpleUploadedFile('another_doc', testfile),\n \t'contract': SimpleUploadedFile('contract', testfile),\n \t'advertising': SimpleUploadedFile('advertising', testfile),\n \t'write_objects': SimpleUploadedFile('write_objects', testfile),\n \t'protocol_research': SimpleUploadedFile('protocol_research', testfile),\n \t'accept_research': SimpleUploadedFile('accept_research', testfile),\n \t'form_inf': SimpleUploadedFile('form_inf', testfile),\n \t'cast_researcher': SimpleUploadedFile('cast_researcher', testfile),\n \t'list_members': SimpleUploadedFile('list_members', testfile),\n \t'document': SimpleUploadedFile('document', testfile)\n \t})\n os.remove(os.path.abspath(os.curdir)+'Test.aboba')\n print(form.errors)\n print(\"test_wrong_file_format_ResearchFormMKI_form\")\n self.assertFalse(form.is_valid())\n\n def test_empty_main_researcher_format_ResearchFormMKI_form(self): #Тест формы первичной подачи заявки с невыбранным главным исследователем\n with open(os.path.abspath(os.curdir)+'Test.txt', 'wb') as f:\n \tf.write(b\"ABOBA\")\n with open(os.path.abspath(os.curdir)+'Test.txt' ,'rb') as f:\n \ttestfile=f.read()\n \tform=ResearchFormMKI(data={\n \t'protocol_number':'224',\n \t'description':'Ну мы тут тестим тесты',\n \t'main_researcher':None,\n \t'ver_bio':'Тесты тестов',\n \t'version':'Тестовая',\n \t'cast_researcher_date':date.today()-timedelta(days=2000*5),\n \t'accept_research_version':'Тестовая версия',\n \t'accept_research_date':date.today()-timedelta(days=2000*5),\n \t'protocol_research_version':'Тестовая версия',\n \t'protocol_research_date':date.today()-timedelta(days=2000*5),\n \t'contract_date':date.today()-timedelta(days=2000*5),\n \t'name_another_doc':'Тест',\n \t'another_doc_version':'Тестовая',\n \t'another_doc_date':date.today()-timedelta(days=2000*5)\n \t},\n \tfiles={'another_doc': SimpleUploadedFile('another_doc', testfile),\n \t'contract': SimpleUploadedFile('contract', testfile),\n \t'advertising': SimpleUploadedFile('advertising', testfile),\n \t'write_objects': SimpleUploadedFile('write_objects', testfile),\n \t'protocol_research': SimpleUploadedFile('protocol_research', testfile),\n \t'accept_research': SimpleUploadedFile('accept_research', testfile),\n \t'form_inf': SimpleUploadedFile('form_inf', testfile),\n \t'cast_researcher': SimpleUploadedFile('cast_researcher', testfile),\n \t'list_members': SimpleUploadedFile('list_members', testfile),\n \t'document': SimpleUploadedFile('document', testfile)\n \t})\n os.remove(os.path.abspath(os.curdir)+'Test.txt')\n print(form.errors)\n print(\"test_empty_main_researcher_format_ResearchFormMKI_form\")\n self.assertFalse(form.is_valid())\n\n def test_empty_char_fields_format_ResearchFormMKI_form(self): #Тест формы первичной подачи заявки с незаполненными полями для символьного ввода\n #TODO:расширить до каждого отдельного поля\n with open(os.path.abspath(os.curdir)+'Test.txt', 'wb') as f:\n \tf.write(b\"ABOBA\")\n with open(os.path.abspath(os.curdir)+'Test.txt' ,'rb') as f:\n \ttestfile=f.read()\n \tform=ResearchFormMKI(data={\n \t'protocol_number':None,\n \t'description':None,\n \t'main_researcher':1,\n \t'ver_bio':None,\n \t'version':None,\n \t'cast_researcher_date':date.today()-timedelta(days=2000*5),\n \t'accept_research_version':None,\n \t'accept_research_date':date.today()-timedelta(days=2000*5),\n \t'protocol_research_version':None,\n \t'protocol_research_date':date.today()-timedelta(days=2000*5),\n \t'contract_date':date.today()-timedelta(days=2000*5),\n \t'name_another_doc':None,\n \t'another_doc_version':None,\n \t'another_doc_date':date.today()-timedelta(days=2000*5)\n \t},\n \tfiles={'another_doc': SimpleUploadedFile('another_doc', testfile),\n \t'contract': SimpleUploadedFile('contract', testfile),\n \t'advertising': SimpleUploadedFile('advertising', testfile),\n \t'write_objects': SimpleUploadedFile('write_objects', testfile),\n \t'protocol_research': SimpleUploadedFile('protocol_research', testfile),\n \t'accept_research': SimpleUploadedFile('accept_research', testfile),\n \t'form_inf': SimpleUploadedFile('form_inf', testfile),\n \t'cast_researcher': SimpleUploadedFile('cast_researcher', testfile),\n \t'list_members': SimpleUploadedFile('list_members', testfile),\n \t'document': SimpleUploadedFile('document', testfile)\n \t})\n os.remove(os.path.abspath(os.curdir)+'Test.txt')\n print(form.errors)\n print(\"test_empty_char_fields_format_ResearchFormMKI_form\")\n self.assertFalse(form.is_valid())\n\n def test_empty_date_fields_ResearchFormMKI_form(self): #Тест формы первичной подачи заявки с пустыми значениями полей даты На момент написания тест кейс провальный!\n \t#TODO:расширить до каждого отдельного поля\n with open(os.path.abspath(os.curdir)+'Test.txt' ,'wb') as f:\n \tf.write(b\"ABOBA\")\n with open(os.path.abspath(os.curdir)+'Test.txt' ,'rb') as f:\n \ttestfile=f.read()\n \tform=ResearchFormMKI(data={\n \t'protocol_number':'224',\n \t'description':'Ну мы тут тестим тесты',\n \t'main_researcher':1,\n \t'ver_bio':'Тесты тестов',\n \t'version':'Тестовая',\n \t'cast_researcher_date':None,\n \t'accept_research_version':'Тестовая версия',\n \t'accept_research_date':None,\n \t'protocol_research_version':'Тестовая версия',\n \t'protocol_research_date':None,\n \t'contract_date':None,\n \t'name_another_doc':'Тест',\n \t'another_doc_version':'Тестовая',\n \t'another_doc_date':None\n \t},\n \tfiles={'another_doc': SimpleUploadedFile('another_doc', testfile),\n \t'contract': SimpleUploadedFile('contract', testfile),\n \t'advertising': SimpleUploadedFile('advertising', testfile),\n \t'write_objects': SimpleUploadedFile('write_objects', testfile),\n \t'protocol_research': SimpleUploadedFile('protocol_research', testfile),\n \t'accept_research': SimpleUploadedFile('accept_research', testfile),\n \t'form_inf': SimpleUploadedFile('form_inf', testfile),\n \t'cast_researcher': SimpleUploadedFile('cast_researcher', testfile),\n \t'list_members': SimpleUploadedFile('list_members', testfile),\n \t'document': SimpleUploadedFile('document', testfile)\n \t})\n os.remove(os.path.abspath(os.curdir)+'Test.txt')\n print(form.errors)\n print(\"test_empty_date_fields_ResearchFormMKI_form\")\n self.assertTrue(form.is_valid())",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
from __future__ import absolute_import
import itertools
from django.contrib import messages
from django.core.context_processors import csrf
from django.db import transaction
from django.http import HttpResponseRedirect
from django.views.decorators.cache import never_cache
from django.utils.decorators import method_decorator
from sudo.decorators import sudo_required
from sentry.models import (Project, ProjectStatus, Organization, OrganizationStatus)
from sentry.plugins import plugins
from sentry.web.forms.accounts import (
ProjectEmailOptionsForm, NotificationSettingsForm, NotificationReportSettingsForm,
NotificationDeploySettingsForm
)
from sentry.web.decorators import login_required
from sentry.web.frontend.base import BaseView
from sentry.web.helpers import render_to_response
from sentry.utils.auth import get_auth_providers
from sentry.utils.safe import safe_execute
class AccountNotificationView(BaseView):
notification_settings_form = NotificationSettingsForm
@method_decorator(never_cache)
@method_decorator(login_required)
@method_decorator(sudo_required)
@method_decorator(transaction.atomic)
def handle(self, request):
settings_form = self.notification_settings_form(request.user, request.POST or None)
reports_form = NotificationReportSettingsForm(
request.user, request.POST or None, prefix='reports'
)
org_list = list(
Organization.objects.filter(
status=OrganizationStatus.VISIBLE,
member_set__user=request.user,
).distinct()
)
org_forms = [
(
org, NotificationDeploySettingsForm(
request.user, org, request.POST or None, prefix='deploys-org-%s' % (org.id, )
)
) for org in sorted(org_list, key=lambda o: o.name)
]
project_list = list(
Project.objects.filter(
team__organizationmemberteam__organizationmember__user=request.user,
team__organizationmemberteam__is_active=True,
status=ProjectStatus.VISIBLE,
).distinct()
)
project_forms = [
(
project, ProjectEmailOptionsForm(
project,
request.user,
request.POST or None,
prefix='project-%s' % (project.id, )
)
) for project in sorted(project_list, key=lambda x: (x.organization.name, x.name))
]
ext_forms = []
for plugin in plugins.all():
for form in safe_execute(plugin.get_notification_forms, _with_transaction=False) or ():
form = safe_execute(
form,
plugin,
request.user,
request.POST or None,
prefix=plugin.slug,
_with_transaction=False
)
if not form:
continue
ext_forms.append(form)
if request.POST:
all_forms = list(
itertools.chain(
[settings_form, reports_form], ext_forms, (f for _, f in project_forms),
(f for _, f in org_forms)
)
)
if all(f.is_valid() for f in all_forms):
for form in all_forms:
form.save()
messages.add_message(request, messages.SUCCESS, 'Your settings were saved.')
return HttpResponseRedirect(request.path)
context = csrf(request)
context.update(
{
'settings_form': settings_form,
'project_forms': project_forms,
'org_forms': org_forms,
'reports_form': reports_form,
'ext_forms': ext_forms,
'page': 'notifications',
'AUTH_PROVIDERS': get_auth_providers(),
}
)
return render_to_response('sentry/account/notifications.html', context, request)
|
normal
|
{
"blob_id": "46f218829e1bf324d4c50ea0ff7003bc48b64e2a",
"index": 4258,
"step-1": "<mask token>\n\n\nclass AccountNotificationView(BaseView):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass AccountNotificationView(BaseView):\n <mask token>\n\n @method_decorator(never_cache)\n @method_decorator(login_required)\n @method_decorator(sudo_required)\n @method_decorator(transaction.atomic)\n def handle(self, request):\n settings_form = self.notification_settings_form(request.user, \n request.POST or None)\n reports_form = NotificationReportSettingsForm(request.user, request\n .POST or None, prefix='reports')\n org_list = list(Organization.objects.filter(status=\n OrganizationStatus.VISIBLE, member_set__user=request.user).\n distinct())\n org_forms = [(org, NotificationDeploySettingsForm(request.user, org,\n request.POST or None, prefix='deploys-org-%s' % (org.id,))) for\n org in sorted(org_list, key=lambda o: o.name)]\n project_list = list(Project.objects.filter(\n team__organizationmemberteam__organizationmember__user=request.\n user, team__organizationmemberteam__is_active=True, status=\n ProjectStatus.VISIBLE).distinct())\n project_forms = [(project, ProjectEmailOptionsForm(project, request\n .user, request.POST or None, prefix='project-%s' % (project.id,\n ))) for project in sorted(project_list, key=lambda x: (x.\n organization.name, x.name))]\n ext_forms = []\n for plugin in plugins.all():\n for form in (safe_execute(plugin.get_notification_forms,\n _with_transaction=False) or ()):\n form = safe_execute(form, plugin, request.user, request.\n POST or None, prefix=plugin.slug, _with_transaction=False)\n if not form:\n continue\n ext_forms.append(form)\n if request.POST:\n all_forms = list(itertools.chain([settings_form, reports_form],\n ext_forms, (f for _, f in project_forms), (f for _, f in\n org_forms)))\n if all(f.is_valid() for f in all_forms):\n for form in all_forms:\n form.save()\n messages.add_message(request, messages.SUCCESS,\n 'Your settings were saved.')\n return HttpResponseRedirect(request.path)\n context = csrf(request)\n context.update({'settings_form': settings_form, 'project_forms':\n project_forms, 'org_forms': org_forms, 'reports_form':\n reports_form, 'ext_forms': ext_forms, 'page': 'notifications',\n 'AUTH_PROVIDERS': get_auth_providers()})\n return render_to_response('sentry/account/notifications.html',\n context, request)\n",
"step-3": "<mask token>\n\n\nclass AccountNotificationView(BaseView):\n notification_settings_form = NotificationSettingsForm\n\n @method_decorator(never_cache)\n @method_decorator(login_required)\n @method_decorator(sudo_required)\n @method_decorator(transaction.atomic)\n def handle(self, request):\n settings_form = self.notification_settings_form(request.user, \n request.POST or None)\n reports_form = NotificationReportSettingsForm(request.user, request\n .POST or None, prefix='reports')\n org_list = list(Organization.objects.filter(status=\n OrganizationStatus.VISIBLE, member_set__user=request.user).\n distinct())\n org_forms = [(org, NotificationDeploySettingsForm(request.user, org,\n request.POST or None, prefix='deploys-org-%s' % (org.id,))) for\n org in sorted(org_list, key=lambda o: o.name)]\n project_list = list(Project.objects.filter(\n team__organizationmemberteam__organizationmember__user=request.\n user, team__organizationmemberteam__is_active=True, status=\n ProjectStatus.VISIBLE).distinct())\n project_forms = [(project, ProjectEmailOptionsForm(project, request\n .user, request.POST or None, prefix='project-%s' % (project.id,\n ))) for project in sorted(project_list, key=lambda x: (x.\n organization.name, x.name))]\n ext_forms = []\n for plugin in plugins.all():\n for form in (safe_execute(plugin.get_notification_forms,\n _with_transaction=False) or ()):\n form = safe_execute(form, plugin, request.user, request.\n POST or None, prefix=plugin.slug, _with_transaction=False)\n if not form:\n continue\n ext_forms.append(form)\n if request.POST:\n all_forms = list(itertools.chain([settings_form, reports_form],\n ext_forms, (f for _, f in project_forms), (f for _, f in\n org_forms)))\n if all(f.is_valid() for f in all_forms):\n for form in all_forms:\n form.save()\n messages.add_message(request, messages.SUCCESS,\n 'Your settings were saved.')\n return HttpResponseRedirect(request.path)\n context = csrf(request)\n context.update({'settings_form': settings_form, 'project_forms':\n project_forms, 'org_forms': org_forms, 'reports_form':\n reports_form, 'ext_forms': ext_forms, 'page': 'notifications',\n 'AUTH_PROVIDERS': get_auth_providers()})\n return render_to_response('sentry/account/notifications.html',\n context, request)\n",
"step-4": "from __future__ import absolute_import\nimport itertools\nfrom django.contrib import messages\nfrom django.core.context_processors import csrf\nfrom django.db import transaction\nfrom django.http import HttpResponseRedirect\nfrom django.views.decorators.cache import never_cache\nfrom django.utils.decorators import method_decorator\nfrom sudo.decorators import sudo_required\nfrom sentry.models import Project, ProjectStatus, Organization, OrganizationStatus\nfrom sentry.plugins import plugins\nfrom sentry.web.forms.accounts import ProjectEmailOptionsForm, NotificationSettingsForm, NotificationReportSettingsForm, NotificationDeploySettingsForm\nfrom sentry.web.decorators import login_required\nfrom sentry.web.frontend.base import BaseView\nfrom sentry.web.helpers import render_to_response\nfrom sentry.utils.auth import get_auth_providers\nfrom sentry.utils.safe import safe_execute\n\n\nclass AccountNotificationView(BaseView):\n notification_settings_form = NotificationSettingsForm\n\n @method_decorator(never_cache)\n @method_decorator(login_required)\n @method_decorator(sudo_required)\n @method_decorator(transaction.atomic)\n def handle(self, request):\n settings_form = self.notification_settings_form(request.user, \n request.POST or None)\n reports_form = NotificationReportSettingsForm(request.user, request\n .POST or None, prefix='reports')\n org_list = list(Organization.objects.filter(status=\n OrganizationStatus.VISIBLE, member_set__user=request.user).\n distinct())\n org_forms = [(org, NotificationDeploySettingsForm(request.user, org,\n request.POST or None, prefix='deploys-org-%s' % (org.id,))) for\n org in sorted(org_list, key=lambda o: o.name)]\n project_list = list(Project.objects.filter(\n team__organizationmemberteam__organizationmember__user=request.\n user, team__organizationmemberteam__is_active=True, status=\n ProjectStatus.VISIBLE).distinct())\n project_forms = [(project, ProjectEmailOptionsForm(project, request\n .user, request.POST or None, prefix='project-%s' % (project.id,\n ))) for project in sorted(project_list, key=lambda x: (x.\n organization.name, x.name))]\n ext_forms = []\n for plugin in plugins.all():\n for form in (safe_execute(plugin.get_notification_forms,\n _with_transaction=False) or ()):\n form = safe_execute(form, plugin, request.user, request.\n POST or None, prefix=plugin.slug, _with_transaction=False)\n if not form:\n continue\n ext_forms.append(form)\n if request.POST:\n all_forms = list(itertools.chain([settings_form, reports_form],\n ext_forms, (f for _, f in project_forms), (f for _, f in\n org_forms)))\n if all(f.is_valid() for f in all_forms):\n for form in all_forms:\n form.save()\n messages.add_message(request, messages.SUCCESS,\n 'Your settings were saved.')\n return HttpResponseRedirect(request.path)\n context = csrf(request)\n context.update({'settings_form': settings_form, 'project_forms':\n project_forms, 'org_forms': org_forms, 'reports_form':\n reports_form, 'ext_forms': ext_forms, 'page': 'notifications',\n 'AUTH_PROVIDERS': get_auth_providers()})\n return render_to_response('sentry/account/notifications.html',\n context, request)\n",
"step-5": "from __future__ import absolute_import\n\nimport itertools\n\nfrom django.contrib import messages\nfrom django.core.context_processors import csrf\nfrom django.db import transaction\nfrom django.http import HttpResponseRedirect\nfrom django.views.decorators.cache import never_cache\nfrom django.utils.decorators import method_decorator\n\nfrom sudo.decorators import sudo_required\n\nfrom sentry.models import (Project, ProjectStatus, Organization, OrganizationStatus)\nfrom sentry.plugins import plugins\nfrom sentry.web.forms.accounts import (\n ProjectEmailOptionsForm, NotificationSettingsForm, NotificationReportSettingsForm,\n NotificationDeploySettingsForm\n)\nfrom sentry.web.decorators import login_required\nfrom sentry.web.frontend.base import BaseView\nfrom sentry.web.helpers import render_to_response\nfrom sentry.utils.auth import get_auth_providers\nfrom sentry.utils.safe import safe_execute\n\n\nclass AccountNotificationView(BaseView):\n notification_settings_form = NotificationSettingsForm\n\n @method_decorator(never_cache)\n @method_decorator(login_required)\n @method_decorator(sudo_required)\n @method_decorator(transaction.atomic)\n def handle(self, request):\n settings_form = self.notification_settings_form(request.user, request.POST or None)\n reports_form = NotificationReportSettingsForm(\n request.user, request.POST or None, prefix='reports'\n )\n\n org_list = list(\n Organization.objects.filter(\n status=OrganizationStatus.VISIBLE,\n member_set__user=request.user,\n ).distinct()\n )\n\n org_forms = [\n (\n org, NotificationDeploySettingsForm(\n request.user, org, request.POST or None, prefix='deploys-org-%s' % (org.id, )\n )\n ) for org in sorted(org_list, key=lambda o: o.name)\n ]\n\n project_list = list(\n Project.objects.filter(\n team__organizationmemberteam__organizationmember__user=request.user,\n team__organizationmemberteam__is_active=True,\n status=ProjectStatus.VISIBLE,\n ).distinct()\n )\n\n project_forms = [\n (\n project, ProjectEmailOptionsForm(\n project,\n request.user,\n request.POST or None,\n prefix='project-%s' % (project.id, )\n )\n ) for project in sorted(project_list, key=lambda x: (x.organization.name, x.name))\n ]\n\n ext_forms = []\n for plugin in plugins.all():\n for form in safe_execute(plugin.get_notification_forms, _with_transaction=False) or ():\n form = safe_execute(\n form,\n plugin,\n request.user,\n request.POST or None,\n prefix=plugin.slug,\n _with_transaction=False\n )\n if not form:\n continue\n ext_forms.append(form)\n\n if request.POST:\n all_forms = list(\n itertools.chain(\n [settings_form, reports_form], ext_forms, (f for _, f in project_forms),\n (f for _, f in org_forms)\n )\n )\n if all(f.is_valid() for f in all_forms):\n for form in all_forms:\n form.save()\n messages.add_message(request, messages.SUCCESS, 'Your settings were saved.')\n return HttpResponseRedirect(request.path)\n\n context = csrf(request)\n context.update(\n {\n 'settings_form': settings_form,\n 'project_forms': project_forms,\n 'org_forms': org_forms,\n 'reports_form': reports_form,\n 'ext_forms': ext_forms,\n 'page': 'notifications',\n 'AUTH_PROVIDERS': get_auth_providers(),\n }\n )\n return render_to_response('sentry/account/notifications.html', context, request)\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
def index() ->dict:
return {}
<|reserved_special_token_0|>
@pytest.fixture
def client(app):
return TestClient(app)
def test_request_id_can_be_autogenerated(client):
response = client.get('/')
assert response.headers['x-request-id']
assert RequestId.get_request_id() is None
def test_request_id_can_be_set_from_request_headers(client):
response = client.get('/', headers={'x-request-id': 'a-request-id'})
assert response.headers['x-request-id'] == 'a-request-id'
assert RequestId.get_request_id() is None
def test_request_id_can_be_set_on_error(client):
response = client.get('/fail', headers={'x-request-id': 'a-request-id'})
assert response.headers['x-request-id'] == 'a-request-id'
assert RequestId.get_request_id() is None
def test_request_id_can_be_set_on_internal_error(client):
with pytest.raises(RuntimeError):
client.get('/fail-2', headers={'x-request-id': 'a-request-id'})
assert RequestId.get_request_id() is None
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def index() ->dict:
return {}
def fail() ->dict:
raise exceptions.BadRequest('fail')
def fail_2() ->dict:
raise RuntimeError('fail')
<|reserved_special_token_0|>
@pytest.fixture(scope='session')
def app():
return App(routes=routes, event_hooks=event_hooks)
@pytest.fixture
def client(app):
return TestClient(app)
def test_request_id_can_be_autogenerated(client):
response = client.get('/')
assert response.headers['x-request-id']
assert RequestId.get_request_id() is None
def test_request_id_can_be_set_from_request_headers(client):
response = client.get('/', headers={'x-request-id': 'a-request-id'})
assert response.headers['x-request-id'] == 'a-request-id'
assert RequestId.get_request_id() is None
def test_request_id_can_be_set_on_error(client):
response = client.get('/fail', headers={'x-request-id': 'a-request-id'})
assert response.headers['x-request-id'] == 'a-request-id'
assert RequestId.get_request_id() is None
def test_request_id_can_be_set_on_internal_error(client):
with pytest.raises(RuntimeError):
client.get('/fail-2', headers={'x-request-id': 'a-request-id'})
assert RequestId.get_request_id() is None
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def index() ->dict:
return {}
def fail() ->dict:
raise exceptions.BadRequest('fail')
def fail_2() ->dict:
raise RuntimeError('fail')
routes = [Route('/', method='GET', handler=index), Route('/fail', method=
'GET', handler=fail), Route('/fail-2', method='GET', handler=fail_2)]
event_hooks = [RequestIdHooks()]
@pytest.fixture(scope='session')
def app():
return App(routes=routes, event_hooks=event_hooks)
@pytest.fixture
def client(app):
return TestClient(app)
def test_request_id_can_be_autogenerated(client):
response = client.get('/')
assert response.headers['x-request-id']
assert RequestId.get_request_id() is None
def test_request_id_can_be_set_from_request_headers(client):
response = client.get('/', headers={'x-request-id': 'a-request-id'})
assert response.headers['x-request-id'] == 'a-request-id'
assert RequestId.get_request_id() is None
def test_request_id_can_be_set_on_error(client):
response = client.get('/fail', headers={'x-request-id': 'a-request-id'})
assert response.headers['x-request-id'] == 'a-request-id'
assert RequestId.get_request_id() is None
def test_request_id_can_be_set_on_internal_error(client):
with pytest.raises(RuntimeError):
client.get('/fail-2', headers={'x-request-id': 'a-request-id'})
assert RequestId.get_request_id() is None
<|reserved_special_token_1|>
import pytest
from apistar import App, Route, TestClient, exceptions
from apistar_request_id import RequestId, RequestIdHooks
def index() ->dict:
return {}
def fail() ->dict:
raise exceptions.BadRequest('fail')
def fail_2() ->dict:
raise RuntimeError('fail')
routes = [Route('/', method='GET', handler=index), Route('/fail', method=
'GET', handler=fail), Route('/fail-2', method='GET', handler=fail_2)]
event_hooks = [RequestIdHooks()]
@pytest.fixture(scope='session')
def app():
return App(routes=routes, event_hooks=event_hooks)
@pytest.fixture
def client(app):
return TestClient(app)
def test_request_id_can_be_autogenerated(client):
response = client.get('/')
assert response.headers['x-request-id']
assert RequestId.get_request_id() is None
def test_request_id_can_be_set_from_request_headers(client):
response = client.get('/', headers={'x-request-id': 'a-request-id'})
assert response.headers['x-request-id'] == 'a-request-id'
assert RequestId.get_request_id() is None
def test_request_id_can_be_set_on_error(client):
response = client.get('/fail', headers={'x-request-id': 'a-request-id'})
assert response.headers['x-request-id'] == 'a-request-id'
assert RequestId.get_request_id() is None
def test_request_id_can_be_set_on_internal_error(client):
with pytest.raises(RuntimeError):
client.get('/fail-2', headers={'x-request-id': 'a-request-id'})
assert RequestId.get_request_id() is None
<|reserved_special_token_1|>
import pytest
from apistar import App, Route, TestClient, exceptions
from apistar_request_id import RequestId, RequestIdHooks
def index() -> dict:
return {}
def fail() -> dict:
raise exceptions.BadRequest("fail")
def fail_2() -> dict:
raise RuntimeError("fail")
routes = [
Route("/", method="GET", handler=index),
Route("/fail", method="GET", handler=fail),
Route("/fail-2", method="GET", handler=fail_2),
]
event_hooks = [
RequestIdHooks()
]
@pytest.fixture(scope="session")
def app():
return App(routes=routes, event_hooks=event_hooks)
@pytest.fixture
def client(app):
return TestClient(app)
def test_request_id_can_be_autogenerated(client):
# Given that I don't have an existing request id
# When I make a request to the app
response = client.get("/")
# Then my response should contain an autogenerated request id
assert response.headers["x-request-id"]
# And the request id for the current thread should be cleared
assert RequestId.get_request_id() is None
def test_request_id_can_be_set_from_request_headers(client):
# Given that I have an existing request id
# When I make a request to the app
response = client.get("/", headers={"x-request-id": "a-request-id"})
# Then that same request id should appear in the response headers
assert response.headers["x-request-id"] == "a-request-id"
# And the request id for the current thread should be cleared
assert RequestId.get_request_id() is None
def test_request_id_can_be_set_on_error(client):
# Given that I have an existing request id
# When I make a request to the app
response = client.get("/fail", headers={"x-request-id": "a-request-id"})
# Then that same request id should appear in the response headers
assert response.headers["x-request-id"] == "a-request-id"
# And the request id for the current thread should be cleared
assert RequestId.get_request_id() is None
def test_request_id_can_be_set_on_internal_error(client):
# Given that I have an existing request id
# When I make a request to the app
with pytest.raises(RuntimeError):
client.get("/fail-2", headers={"x-request-id": "a-request-id"})
# Then the request id should be set and subsequently cleared
assert RequestId.get_request_id() is None
|
flexible
|
{
"blob_id": "f41ab6813fb7067089abe223b9006adde40630cd",
"index": 1941,
"step-1": "<mask token>\n\n\ndef index() ->dict:\n return {}\n\n\n<mask token>\n\n\n@pytest.fixture\ndef client(app):\n return TestClient(app)\n\n\ndef test_request_id_can_be_autogenerated(client):\n response = client.get('/')\n assert response.headers['x-request-id']\n assert RequestId.get_request_id() is None\n\n\ndef test_request_id_can_be_set_from_request_headers(client):\n response = client.get('/', headers={'x-request-id': 'a-request-id'})\n assert response.headers['x-request-id'] == 'a-request-id'\n assert RequestId.get_request_id() is None\n\n\ndef test_request_id_can_be_set_on_error(client):\n response = client.get('/fail', headers={'x-request-id': 'a-request-id'})\n assert response.headers['x-request-id'] == 'a-request-id'\n assert RequestId.get_request_id() is None\n\n\ndef test_request_id_can_be_set_on_internal_error(client):\n with pytest.raises(RuntimeError):\n client.get('/fail-2', headers={'x-request-id': 'a-request-id'})\n assert RequestId.get_request_id() is None\n",
"step-2": "<mask token>\n\n\ndef index() ->dict:\n return {}\n\n\ndef fail() ->dict:\n raise exceptions.BadRequest('fail')\n\n\ndef fail_2() ->dict:\n raise RuntimeError('fail')\n\n\n<mask token>\n\n\n@pytest.fixture(scope='session')\ndef app():\n return App(routes=routes, event_hooks=event_hooks)\n\n\n@pytest.fixture\ndef client(app):\n return TestClient(app)\n\n\ndef test_request_id_can_be_autogenerated(client):\n response = client.get('/')\n assert response.headers['x-request-id']\n assert RequestId.get_request_id() is None\n\n\ndef test_request_id_can_be_set_from_request_headers(client):\n response = client.get('/', headers={'x-request-id': 'a-request-id'})\n assert response.headers['x-request-id'] == 'a-request-id'\n assert RequestId.get_request_id() is None\n\n\ndef test_request_id_can_be_set_on_error(client):\n response = client.get('/fail', headers={'x-request-id': 'a-request-id'})\n assert response.headers['x-request-id'] == 'a-request-id'\n assert RequestId.get_request_id() is None\n\n\ndef test_request_id_can_be_set_on_internal_error(client):\n with pytest.raises(RuntimeError):\n client.get('/fail-2', headers={'x-request-id': 'a-request-id'})\n assert RequestId.get_request_id() is None\n",
"step-3": "<mask token>\n\n\ndef index() ->dict:\n return {}\n\n\ndef fail() ->dict:\n raise exceptions.BadRequest('fail')\n\n\ndef fail_2() ->dict:\n raise RuntimeError('fail')\n\n\nroutes = [Route('/', method='GET', handler=index), Route('/fail', method=\n 'GET', handler=fail), Route('/fail-2', method='GET', handler=fail_2)]\nevent_hooks = [RequestIdHooks()]\n\n\n@pytest.fixture(scope='session')\ndef app():\n return App(routes=routes, event_hooks=event_hooks)\n\n\n@pytest.fixture\ndef client(app):\n return TestClient(app)\n\n\ndef test_request_id_can_be_autogenerated(client):\n response = client.get('/')\n assert response.headers['x-request-id']\n assert RequestId.get_request_id() is None\n\n\ndef test_request_id_can_be_set_from_request_headers(client):\n response = client.get('/', headers={'x-request-id': 'a-request-id'})\n assert response.headers['x-request-id'] == 'a-request-id'\n assert RequestId.get_request_id() is None\n\n\ndef test_request_id_can_be_set_on_error(client):\n response = client.get('/fail', headers={'x-request-id': 'a-request-id'})\n assert response.headers['x-request-id'] == 'a-request-id'\n assert RequestId.get_request_id() is None\n\n\ndef test_request_id_can_be_set_on_internal_error(client):\n with pytest.raises(RuntimeError):\n client.get('/fail-2', headers={'x-request-id': 'a-request-id'})\n assert RequestId.get_request_id() is None\n",
"step-4": "import pytest\nfrom apistar import App, Route, TestClient, exceptions\nfrom apistar_request_id import RequestId, RequestIdHooks\n\n\ndef index() ->dict:\n return {}\n\n\ndef fail() ->dict:\n raise exceptions.BadRequest('fail')\n\n\ndef fail_2() ->dict:\n raise RuntimeError('fail')\n\n\nroutes = [Route('/', method='GET', handler=index), Route('/fail', method=\n 'GET', handler=fail), Route('/fail-2', method='GET', handler=fail_2)]\nevent_hooks = [RequestIdHooks()]\n\n\n@pytest.fixture(scope='session')\ndef app():\n return App(routes=routes, event_hooks=event_hooks)\n\n\n@pytest.fixture\ndef client(app):\n return TestClient(app)\n\n\ndef test_request_id_can_be_autogenerated(client):\n response = client.get('/')\n assert response.headers['x-request-id']\n assert RequestId.get_request_id() is None\n\n\ndef test_request_id_can_be_set_from_request_headers(client):\n response = client.get('/', headers={'x-request-id': 'a-request-id'})\n assert response.headers['x-request-id'] == 'a-request-id'\n assert RequestId.get_request_id() is None\n\n\ndef test_request_id_can_be_set_on_error(client):\n response = client.get('/fail', headers={'x-request-id': 'a-request-id'})\n assert response.headers['x-request-id'] == 'a-request-id'\n assert RequestId.get_request_id() is None\n\n\ndef test_request_id_can_be_set_on_internal_error(client):\n with pytest.raises(RuntimeError):\n client.get('/fail-2', headers={'x-request-id': 'a-request-id'})\n assert RequestId.get_request_id() is None\n",
"step-5": "import pytest\n\nfrom apistar import App, Route, TestClient, exceptions\nfrom apistar_request_id import RequestId, RequestIdHooks\n\n\ndef index() -> dict:\n return {}\n\n\ndef fail() -> dict:\n raise exceptions.BadRequest(\"fail\")\n\n\ndef fail_2() -> dict:\n raise RuntimeError(\"fail\")\n\n\nroutes = [\n Route(\"/\", method=\"GET\", handler=index),\n Route(\"/fail\", method=\"GET\", handler=fail),\n Route(\"/fail-2\", method=\"GET\", handler=fail_2),\n]\n\nevent_hooks = [\n RequestIdHooks()\n]\n\n\n@pytest.fixture(scope=\"session\")\ndef app():\n return App(routes=routes, event_hooks=event_hooks)\n\n\n@pytest.fixture\ndef client(app):\n return TestClient(app)\n\n\ndef test_request_id_can_be_autogenerated(client):\n # Given that I don't have an existing request id\n # When I make a request to the app\n response = client.get(\"/\")\n\n # Then my response should contain an autogenerated request id\n assert response.headers[\"x-request-id\"]\n\n # And the request id for the current thread should be cleared\n assert RequestId.get_request_id() is None\n\n\ndef test_request_id_can_be_set_from_request_headers(client):\n # Given that I have an existing request id\n # When I make a request to the app\n response = client.get(\"/\", headers={\"x-request-id\": \"a-request-id\"})\n\n # Then that same request id should appear in the response headers\n assert response.headers[\"x-request-id\"] == \"a-request-id\"\n\n # And the request id for the current thread should be cleared\n assert RequestId.get_request_id() is None\n\n\ndef test_request_id_can_be_set_on_error(client):\n # Given that I have an existing request id\n # When I make a request to the app\n response = client.get(\"/fail\", headers={\"x-request-id\": \"a-request-id\"})\n\n # Then that same request id should appear in the response headers\n assert response.headers[\"x-request-id\"] == \"a-request-id\"\n\n # And the request id for the current thread should be cleared\n assert RequestId.get_request_id() is None\n\n\ndef test_request_id_can_be_set_on_internal_error(client):\n # Given that I have an existing request id\n # When I make a request to the app\n with pytest.raises(RuntimeError):\n client.get(\"/fail-2\", headers={\"x-request-id\": \"a-request-id\"})\n\n # Then the request id should be set and subsequently cleared\n assert RequestId.get_request_id() is None\n",
"step-ids": [
6,
9,
10,
11,
12
]
}
|
[
6,
9,
10,
11,
12
] |
#!/usr/bin/env python
#_*_coding:utf-8_*_
#作者:Paul哥
from fabric.api import settings,run,cd,env,hosts
from fabric.colors import *
env.hosts=['192.168.75.130:22']
env.password='hello123'
env.user='root'
def test():
with cd('/home'):
print yellow(run('ls -l'))
test()
|
normal
|
{
"blob_id": "6b45541c54f1a4ce94d6bd457701ecd1b90a4c4c",
"index": 1129,
"step-1": "#!/usr/bin/env python\n#_*_coding:utf-8_*_\n#作者:Paul哥\n\n\n\nfrom fabric.api import settings,run,cd,env,hosts\nfrom fabric.colors import *\n\nenv.hosts=['192.168.75.130:22']\nenv.password='hello123'\nenv.user='root'\ndef test():\n\twith cd('/home'):\n\t\tprint yellow(run('ls -l'))\n\ntest()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import numpy as np
import pytest
import torch
from ignite.contrib.metrics.regression import MeanNormalizedBias
from ignite.engine import Engine
from ignite.exceptions import NotComputableError
def test_zero_sample():
m = MeanNormalizedBias()
with pytest.raises(
NotComputableError, match=r"MeanNormalizedBias must have at least one example before it can be computed"
):
m.compute()
def test_zero_gt():
a = np.random.randn(4)
ground_truth = np.zeros(4)
m = MeanNormalizedBias()
with pytest.raises(NotComputableError, match=r"The ground truth has 0."):
m.update((torch.from_numpy(a), torch.from_numpy(ground_truth)))
def test_wrong_input_shapes():
m = MeanNormalizedBias()
with pytest.raises(ValueError, match=r"Input data shapes should be the same, but given"):
m.update((torch.rand(4, 1, 2), torch.rand(4, 1)))
with pytest.raises(ValueError, match=r"Input data shapes should be the same, but given"):
m.update((torch.rand(4, 1), torch.rand(4, 1, 2)))
with pytest.raises(ValueError, match=r"Input data shapes should be the same, but given"):
m.update((torch.rand(4, 1, 2), torch.rand(4,),))
with pytest.raises(ValueError, match=r"Input data shapes should be the same, but given"):
m.update((torch.rand(4,), torch.rand(4, 1, 2),))
def test_mean_error():
a = np.random.randn(4)
b = np.random.randn(4)
c = np.random.randn(4)
d = np.random.randn(4)
ground_truth = np.random.randn(4)
m = MeanNormalizedBias()
m.update((torch.from_numpy(a), torch.from_numpy(ground_truth)))
np_sum = ((ground_truth - a) / ground_truth).sum()
np_len = len(a)
np_ans = np_sum / np_len
assert m.compute() == pytest.approx(np_ans)
m.update((torch.from_numpy(b), torch.from_numpy(ground_truth)))
np_sum += ((ground_truth - b) / ground_truth).sum()
np_len += len(b)
np_ans = np_sum / np_len
assert m.compute() == pytest.approx(np_ans)
m.update((torch.from_numpy(c), torch.from_numpy(ground_truth)))
np_sum += ((ground_truth - c) / ground_truth).sum()
np_len += len(c)
np_ans = np_sum / np_len
assert m.compute() == pytest.approx(np_ans)
m.update((torch.from_numpy(d), torch.from_numpy(ground_truth)))
np_sum += ((ground_truth - d) / ground_truth).sum()
np_len += len(d)
np_ans = np_sum / np_len
assert m.compute() == pytest.approx(np_ans)
def test_integration():
def _test(y_pred, y, batch_size):
def update_fn(engine, batch):
idx = (engine.state.iteration - 1) * batch_size
y_true_batch = np_y[idx : idx + batch_size]
y_pred_batch = np_y_pred[idx : idx + batch_size]
return torch.from_numpy(y_pred_batch), torch.from_numpy(y_true_batch)
engine = Engine(update_fn)
m = MeanNormalizedBias()
m.attach(engine, "mnb")
np_y = y.numpy()
np_y_pred = y_pred.numpy()
data = list(range(y_pred.shape[0] // batch_size))
mnb = engine.run(data, max_epochs=1).metrics["mnb"]
np_sum = ((np_y - np_y_pred) / np_y).sum()
np_len = len(np_y_pred)
np_ans = np_sum / np_len
assert np_ans == pytest.approx(mnb)
def get_test_cases():
test_cases = [
(torch.rand(size=(100,)), torch.rand(size=(100,)), 10),
(torch.rand(size=(200,)), torch.rand(size=(200,)), 10),
(torch.rand(size=(100,)), torch.rand(size=(100,)), 20),
(torch.rand(size=(200,)), torch.rand(size=(200,)), 20),
]
return test_cases
for _ in range(10):
# check multiple random inputs as random exact occurencies are rare
test_cases = get_test_cases()
for y_pred, y, batch_size in test_cases:
_test(y_pred, y, batch_size)
|
normal
|
{
"blob_id": "452f35fe2ae9609949a3f92ad7768fc37094a2f1",
"index": 3786,
"step-1": "<mask token>\n\n\ndef test_zero_sample():\n m = MeanNormalizedBias()\n with pytest.raises(NotComputableError, match=\n 'MeanNormalizedBias must have at least one example before it can be computed'\n ):\n m.compute()\n\n\ndef test_zero_gt():\n a = np.random.randn(4)\n ground_truth = np.zeros(4)\n m = MeanNormalizedBias()\n with pytest.raises(NotComputableError, match='The ground truth has 0.'):\n m.update((torch.from_numpy(a), torch.from_numpy(ground_truth)))\n\n\ndef test_wrong_input_shapes():\n m = MeanNormalizedBias()\n with pytest.raises(ValueError, match=\n 'Input data shapes should be the same, but given'):\n m.update((torch.rand(4, 1, 2), torch.rand(4, 1)))\n with pytest.raises(ValueError, match=\n 'Input data shapes should be the same, but given'):\n m.update((torch.rand(4, 1), torch.rand(4, 1, 2)))\n with pytest.raises(ValueError, match=\n 'Input data shapes should be the same, but given'):\n m.update((torch.rand(4, 1, 2), torch.rand(4)))\n with pytest.raises(ValueError, match=\n 'Input data shapes should be the same, but given'):\n m.update((torch.rand(4), torch.rand(4, 1, 2)))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef test_zero_sample():\n m = MeanNormalizedBias()\n with pytest.raises(NotComputableError, match=\n 'MeanNormalizedBias must have at least one example before it can be computed'\n ):\n m.compute()\n\n\ndef test_zero_gt():\n a = np.random.randn(4)\n ground_truth = np.zeros(4)\n m = MeanNormalizedBias()\n with pytest.raises(NotComputableError, match='The ground truth has 0.'):\n m.update((torch.from_numpy(a), torch.from_numpy(ground_truth)))\n\n\ndef test_wrong_input_shapes():\n m = MeanNormalizedBias()\n with pytest.raises(ValueError, match=\n 'Input data shapes should be the same, but given'):\n m.update((torch.rand(4, 1, 2), torch.rand(4, 1)))\n with pytest.raises(ValueError, match=\n 'Input data shapes should be the same, but given'):\n m.update((torch.rand(4, 1), torch.rand(4, 1, 2)))\n with pytest.raises(ValueError, match=\n 'Input data shapes should be the same, but given'):\n m.update((torch.rand(4, 1, 2), torch.rand(4)))\n with pytest.raises(ValueError, match=\n 'Input data shapes should be the same, but given'):\n m.update((torch.rand(4), torch.rand(4, 1, 2)))\n\n\n<mask token>\n\n\ndef test_integration():\n\n def _test(y_pred, y, batch_size):\n\n def update_fn(engine, batch):\n idx = (engine.state.iteration - 1) * batch_size\n y_true_batch = np_y[idx:idx + batch_size]\n y_pred_batch = np_y_pred[idx:idx + batch_size]\n return torch.from_numpy(y_pred_batch), torch.from_numpy(\n y_true_batch)\n engine = Engine(update_fn)\n m = MeanNormalizedBias()\n m.attach(engine, 'mnb')\n np_y = y.numpy()\n np_y_pred = y_pred.numpy()\n data = list(range(y_pred.shape[0] // batch_size))\n mnb = engine.run(data, max_epochs=1).metrics['mnb']\n np_sum = ((np_y - np_y_pred) / np_y).sum()\n np_len = len(np_y_pred)\n np_ans = np_sum / np_len\n assert np_ans == pytest.approx(mnb)\n\n def get_test_cases():\n test_cases = [(torch.rand(size=(100,)), torch.rand(size=(100,)), 10\n ), (torch.rand(size=(200,)), torch.rand(size=(200,)), 10), (\n torch.rand(size=(100,)), torch.rand(size=(100,)), 20), (torch.\n rand(size=(200,)), torch.rand(size=(200,)), 20)]\n return test_cases\n for _ in range(10):\n test_cases = get_test_cases()\n for y_pred, y, batch_size in test_cases:\n _test(y_pred, y, batch_size)\n",
"step-3": "<mask token>\n\n\ndef test_zero_sample():\n m = MeanNormalizedBias()\n with pytest.raises(NotComputableError, match=\n 'MeanNormalizedBias must have at least one example before it can be computed'\n ):\n m.compute()\n\n\ndef test_zero_gt():\n a = np.random.randn(4)\n ground_truth = np.zeros(4)\n m = MeanNormalizedBias()\n with pytest.raises(NotComputableError, match='The ground truth has 0.'):\n m.update((torch.from_numpy(a), torch.from_numpy(ground_truth)))\n\n\ndef test_wrong_input_shapes():\n m = MeanNormalizedBias()\n with pytest.raises(ValueError, match=\n 'Input data shapes should be the same, but given'):\n m.update((torch.rand(4, 1, 2), torch.rand(4, 1)))\n with pytest.raises(ValueError, match=\n 'Input data shapes should be the same, but given'):\n m.update((torch.rand(4, 1), torch.rand(4, 1, 2)))\n with pytest.raises(ValueError, match=\n 'Input data shapes should be the same, but given'):\n m.update((torch.rand(4, 1, 2), torch.rand(4)))\n with pytest.raises(ValueError, match=\n 'Input data shapes should be the same, but given'):\n m.update((torch.rand(4), torch.rand(4, 1, 2)))\n\n\ndef test_mean_error():\n a = np.random.randn(4)\n b = np.random.randn(4)\n c = np.random.randn(4)\n d = np.random.randn(4)\n ground_truth = np.random.randn(4)\n m = MeanNormalizedBias()\n m.update((torch.from_numpy(a), torch.from_numpy(ground_truth)))\n np_sum = ((ground_truth - a) / ground_truth).sum()\n np_len = len(a)\n np_ans = np_sum / np_len\n assert m.compute() == pytest.approx(np_ans)\n m.update((torch.from_numpy(b), torch.from_numpy(ground_truth)))\n np_sum += ((ground_truth - b) / ground_truth).sum()\n np_len += len(b)\n np_ans = np_sum / np_len\n assert m.compute() == pytest.approx(np_ans)\n m.update((torch.from_numpy(c), torch.from_numpy(ground_truth)))\n np_sum += ((ground_truth - c) / ground_truth).sum()\n np_len += len(c)\n np_ans = np_sum / np_len\n assert m.compute() == pytest.approx(np_ans)\n m.update((torch.from_numpy(d), torch.from_numpy(ground_truth)))\n np_sum += ((ground_truth - d) / ground_truth).sum()\n np_len += len(d)\n np_ans = np_sum / np_len\n assert m.compute() == pytest.approx(np_ans)\n\n\ndef test_integration():\n\n def _test(y_pred, y, batch_size):\n\n def update_fn(engine, batch):\n idx = (engine.state.iteration - 1) * batch_size\n y_true_batch = np_y[idx:idx + batch_size]\n y_pred_batch = np_y_pred[idx:idx + batch_size]\n return torch.from_numpy(y_pred_batch), torch.from_numpy(\n y_true_batch)\n engine = Engine(update_fn)\n m = MeanNormalizedBias()\n m.attach(engine, 'mnb')\n np_y = y.numpy()\n np_y_pred = y_pred.numpy()\n data = list(range(y_pred.shape[0] // batch_size))\n mnb = engine.run(data, max_epochs=1).metrics['mnb']\n np_sum = ((np_y - np_y_pred) / np_y).sum()\n np_len = len(np_y_pred)\n np_ans = np_sum / np_len\n assert np_ans == pytest.approx(mnb)\n\n def get_test_cases():\n test_cases = [(torch.rand(size=(100,)), torch.rand(size=(100,)), 10\n ), (torch.rand(size=(200,)), torch.rand(size=(200,)), 10), (\n torch.rand(size=(100,)), torch.rand(size=(100,)), 20), (torch.\n rand(size=(200,)), torch.rand(size=(200,)), 20)]\n return test_cases\n for _ in range(10):\n test_cases = get_test_cases()\n for y_pred, y, batch_size in test_cases:\n _test(y_pred, y, batch_size)\n",
"step-4": "import numpy as np\nimport pytest\nimport torch\nfrom ignite.contrib.metrics.regression import MeanNormalizedBias\nfrom ignite.engine import Engine\nfrom ignite.exceptions import NotComputableError\n\n\ndef test_zero_sample():\n m = MeanNormalizedBias()\n with pytest.raises(NotComputableError, match=\n 'MeanNormalizedBias must have at least one example before it can be computed'\n ):\n m.compute()\n\n\ndef test_zero_gt():\n a = np.random.randn(4)\n ground_truth = np.zeros(4)\n m = MeanNormalizedBias()\n with pytest.raises(NotComputableError, match='The ground truth has 0.'):\n m.update((torch.from_numpy(a), torch.from_numpy(ground_truth)))\n\n\ndef test_wrong_input_shapes():\n m = MeanNormalizedBias()\n with pytest.raises(ValueError, match=\n 'Input data shapes should be the same, but given'):\n m.update((torch.rand(4, 1, 2), torch.rand(4, 1)))\n with pytest.raises(ValueError, match=\n 'Input data shapes should be the same, but given'):\n m.update((torch.rand(4, 1), torch.rand(4, 1, 2)))\n with pytest.raises(ValueError, match=\n 'Input data shapes should be the same, but given'):\n m.update((torch.rand(4, 1, 2), torch.rand(4)))\n with pytest.raises(ValueError, match=\n 'Input data shapes should be the same, but given'):\n m.update((torch.rand(4), torch.rand(4, 1, 2)))\n\n\ndef test_mean_error():\n a = np.random.randn(4)\n b = np.random.randn(4)\n c = np.random.randn(4)\n d = np.random.randn(4)\n ground_truth = np.random.randn(4)\n m = MeanNormalizedBias()\n m.update((torch.from_numpy(a), torch.from_numpy(ground_truth)))\n np_sum = ((ground_truth - a) / ground_truth).sum()\n np_len = len(a)\n np_ans = np_sum / np_len\n assert m.compute() == pytest.approx(np_ans)\n m.update((torch.from_numpy(b), torch.from_numpy(ground_truth)))\n np_sum += ((ground_truth - b) / ground_truth).sum()\n np_len += len(b)\n np_ans = np_sum / np_len\n assert m.compute() == pytest.approx(np_ans)\n m.update((torch.from_numpy(c), torch.from_numpy(ground_truth)))\n np_sum += ((ground_truth - c) / ground_truth).sum()\n np_len += len(c)\n np_ans = np_sum / np_len\n assert m.compute() == pytest.approx(np_ans)\n m.update((torch.from_numpy(d), torch.from_numpy(ground_truth)))\n np_sum += ((ground_truth - d) / ground_truth).sum()\n np_len += len(d)\n np_ans = np_sum / np_len\n assert m.compute() == pytest.approx(np_ans)\n\n\ndef test_integration():\n\n def _test(y_pred, y, batch_size):\n\n def update_fn(engine, batch):\n idx = (engine.state.iteration - 1) * batch_size\n y_true_batch = np_y[idx:idx + batch_size]\n y_pred_batch = np_y_pred[idx:idx + batch_size]\n return torch.from_numpy(y_pred_batch), torch.from_numpy(\n y_true_batch)\n engine = Engine(update_fn)\n m = MeanNormalizedBias()\n m.attach(engine, 'mnb')\n np_y = y.numpy()\n np_y_pred = y_pred.numpy()\n data = list(range(y_pred.shape[0] // batch_size))\n mnb = engine.run(data, max_epochs=1).metrics['mnb']\n np_sum = ((np_y - np_y_pred) / np_y).sum()\n np_len = len(np_y_pred)\n np_ans = np_sum / np_len\n assert np_ans == pytest.approx(mnb)\n\n def get_test_cases():\n test_cases = [(torch.rand(size=(100,)), torch.rand(size=(100,)), 10\n ), (torch.rand(size=(200,)), torch.rand(size=(200,)), 10), (\n torch.rand(size=(100,)), torch.rand(size=(100,)), 20), (torch.\n rand(size=(200,)), torch.rand(size=(200,)), 20)]\n return test_cases\n for _ in range(10):\n test_cases = get_test_cases()\n for y_pred, y, batch_size in test_cases:\n _test(y_pred, y, batch_size)\n",
"step-5": "import numpy as np\nimport pytest\nimport torch\n\nfrom ignite.contrib.metrics.regression import MeanNormalizedBias\nfrom ignite.engine import Engine\nfrom ignite.exceptions import NotComputableError\n\n\ndef test_zero_sample():\n m = MeanNormalizedBias()\n with pytest.raises(\n NotComputableError, match=r\"MeanNormalizedBias must have at least one example before it can be computed\"\n ):\n m.compute()\n\n\ndef test_zero_gt():\n a = np.random.randn(4)\n ground_truth = np.zeros(4)\n\n m = MeanNormalizedBias()\n\n with pytest.raises(NotComputableError, match=r\"The ground truth has 0.\"):\n m.update((torch.from_numpy(a), torch.from_numpy(ground_truth)))\n\n\ndef test_wrong_input_shapes():\n m = MeanNormalizedBias()\n\n with pytest.raises(ValueError, match=r\"Input data shapes should be the same, but given\"):\n m.update((torch.rand(4, 1, 2), torch.rand(4, 1)))\n\n with pytest.raises(ValueError, match=r\"Input data shapes should be the same, but given\"):\n m.update((torch.rand(4, 1), torch.rand(4, 1, 2)))\n\n with pytest.raises(ValueError, match=r\"Input data shapes should be the same, but given\"):\n m.update((torch.rand(4, 1, 2), torch.rand(4,),))\n\n with pytest.raises(ValueError, match=r\"Input data shapes should be the same, but given\"):\n m.update((torch.rand(4,), torch.rand(4, 1, 2),))\n\n\ndef test_mean_error():\n a = np.random.randn(4)\n b = np.random.randn(4)\n c = np.random.randn(4)\n d = np.random.randn(4)\n ground_truth = np.random.randn(4)\n\n m = MeanNormalizedBias()\n\n m.update((torch.from_numpy(a), torch.from_numpy(ground_truth)))\n np_sum = ((ground_truth - a) / ground_truth).sum()\n np_len = len(a)\n np_ans = np_sum / np_len\n assert m.compute() == pytest.approx(np_ans)\n\n m.update((torch.from_numpy(b), torch.from_numpy(ground_truth)))\n np_sum += ((ground_truth - b) / ground_truth).sum()\n np_len += len(b)\n np_ans = np_sum / np_len\n assert m.compute() == pytest.approx(np_ans)\n\n m.update((torch.from_numpy(c), torch.from_numpy(ground_truth)))\n np_sum += ((ground_truth - c) / ground_truth).sum()\n np_len += len(c)\n np_ans = np_sum / np_len\n assert m.compute() == pytest.approx(np_ans)\n\n m.update((torch.from_numpy(d), torch.from_numpy(ground_truth)))\n np_sum += ((ground_truth - d) / ground_truth).sum()\n np_len += len(d)\n np_ans = np_sum / np_len\n assert m.compute() == pytest.approx(np_ans)\n\n\ndef test_integration():\n def _test(y_pred, y, batch_size):\n def update_fn(engine, batch):\n idx = (engine.state.iteration - 1) * batch_size\n y_true_batch = np_y[idx : idx + batch_size]\n y_pred_batch = np_y_pred[idx : idx + batch_size]\n return torch.from_numpy(y_pred_batch), torch.from_numpy(y_true_batch)\n\n engine = Engine(update_fn)\n\n m = MeanNormalizedBias()\n m.attach(engine, \"mnb\")\n\n np_y = y.numpy()\n np_y_pred = y_pred.numpy()\n\n data = list(range(y_pred.shape[0] // batch_size))\n mnb = engine.run(data, max_epochs=1).metrics[\"mnb\"]\n\n np_sum = ((np_y - np_y_pred) / np_y).sum()\n np_len = len(np_y_pred)\n np_ans = np_sum / np_len\n\n assert np_ans == pytest.approx(mnb)\n\n def get_test_cases():\n test_cases = [\n (torch.rand(size=(100,)), torch.rand(size=(100,)), 10),\n (torch.rand(size=(200,)), torch.rand(size=(200,)), 10),\n (torch.rand(size=(100,)), torch.rand(size=(100,)), 20),\n (torch.rand(size=(200,)), torch.rand(size=(200,)), 20),\n ]\n return test_cases\n\n for _ in range(10):\n # check multiple random inputs as random exact occurencies are rare\n test_cases = get_test_cases()\n for y_pred, y, batch_size in test_cases:\n _test(y_pred, y, batch_size)\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
def run():
rand_seed = None
stderr_filename = None
stdout_filename = None
if len(sys.argv) >= 4:
rand_seed = int(sys.argv[3])
if len(sys.argv) >= 3:
stderr_filename = sys.argv[2]
if len(sys.argv) >= 2:
stdout_filename = sys.argv[1]
stdout_file = None
stderr_file = None
if stdout_filename:
stdout_file = open(stdout_filename, 'r')
else:
stdout_file = StringIO()
if stderr_filename:
stderr_file = open(stderr_filename, 'r')
else:
stderr_file = StringIO()
if not rand_seed:
sys.stdout.write(stdout_file.read())
sys.stderr.write(stderr_file.read())
else:
random.seed(rand_seed)
stdout_len = file_len(stdout_file)
stdout_eof = False
stderr_eof = False
while not stdout_eof or not stderr_eof:
if not stdout_eof:
r = random.randrange(stdout_len / 4)
data = stdout_file.read(r)
if len(data) < r:
stdout_eof = True
sys.stdout.write(data)
if not stderr_eof:
r = random.randrange(stdout_len / 4)
data = stderr_file.read(r)
if len(data) < r:
stderr_eof = True
sys.stderr.write(data)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def file_len(file):
initial = file.tell()
file.seek(0, os.SEEK_END)
size = file.tell()
file.seek(initial)
return size
def run():
rand_seed = None
stderr_filename = None
stdout_filename = None
if len(sys.argv) >= 4:
rand_seed = int(sys.argv[3])
if len(sys.argv) >= 3:
stderr_filename = sys.argv[2]
if len(sys.argv) >= 2:
stdout_filename = sys.argv[1]
stdout_file = None
stderr_file = None
if stdout_filename:
stdout_file = open(stdout_filename, 'r')
else:
stdout_file = StringIO()
if stderr_filename:
stderr_file = open(stderr_filename, 'r')
else:
stderr_file = StringIO()
if not rand_seed:
sys.stdout.write(stdout_file.read())
sys.stderr.write(stderr_file.read())
else:
random.seed(rand_seed)
stdout_len = file_len(stdout_file)
stdout_eof = False
stderr_eof = False
while not stdout_eof or not stderr_eof:
if not stdout_eof:
r = random.randrange(stdout_len / 4)
data = stdout_file.read(r)
if len(data) < r:
stdout_eof = True
sys.stdout.write(data)
if not stderr_eof:
r = random.randrange(stdout_len / 4)
data = stderr_file.read(r)
if len(data) < r:
stderr_eof = True
sys.stderr.write(data)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if sys.version_info[0] < 3:
from StringIO import StringIO
else:
from io import StringIO
def file_len(file):
initial = file.tell()
file.seek(0, os.SEEK_END)
size = file.tell()
file.seek(initial)
return size
def run():
rand_seed = None
stderr_filename = None
stdout_filename = None
if len(sys.argv) >= 4:
rand_seed = int(sys.argv[3])
if len(sys.argv) >= 3:
stderr_filename = sys.argv[2]
if len(sys.argv) >= 2:
stdout_filename = sys.argv[1]
stdout_file = None
stderr_file = None
if stdout_filename:
stdout_file = open(stdout_filename, 'r')
else:
stdout_file = StringIO()
if stderr_filename:
stderr_file = open(stderr_filename, 'r')
else:
stderr_file = StringIO()
if not rand_seed:
sys.stdout.write(stdout_file.read())
sys.stderr.write(stderr_file.read())
else:
random.seed(rand_seed)
stdout_len = file_len(stdout_file)
stdout_eof = False
stderr_eof = False
while not stdout_eof or not stderr_eof:
if not stdout_eof:
r = random.randrange(stdout_len / 4)
data = stdout_file.read(r)
if len(data) < r:
stdout_eof = True
sys.stdout.write(data)
if not stderr_eof:
r = random.randrange(stdout_len / 4)
data = stderr_file.read(r)
if len(data) < r:
stderr_eof = True
sys.stderr.write(data)
if __name__ == '__main__':
run()
<|reserved_special_token_1|>
import sys
import os
import random
if sys.version_info[0] < 3:
from StringIO import StringIO
else:
from io import StringIO
def file_len(file):
initial = file.tell()
file.seek(0, os.SEEK_END)
size = file.tell()
file.seek(initial)
return size
def run():
rand_seed = None
stderr_filename = None
stdout_filename = None
if len(sys.argv) >= 4:
rand_seed = int(sys.argv[3])
if len(sys.argv) >= 3:
stderr_filename = sys.argv[2]
if len(sys.argv) >= 2:
stdout_filename = sys.argv[1]
stdout_file = None
stderr_file = None
if stdout_filename:
stdout_file = open(stdout_filename, 'r')
else:
stdout_file = StringIO()
if stderr_filename:
stderr_file = open(stderr_filename, 'r')
else:
stderr_file = StringIO()
if not rand_seed:
sys.stdout.write(stdout_file.read())
sys.stderr.write(stderr_file.read())
else:
random.seed(rand_seed)
stdout_len = file_len(stdout_file)
stdout_eof = False
stderr_eof = False
while not stdout_eof or not stderr_eof:
if not stdout_eof:
r = random.randrange(stdout_len / 4)
data = stdout_file.read(r)
if len(data) < r:
stdout_eof = True
sys.stdout.write(data)
if not stderr_eof:
r = random.randrange(stdout_len / 4)
data = stderr_file.read(r)
if len(data) < r:
stderr_eof = True
sys.stderr.write(data)
if __name__ == '__main__':
run()
|
flexible
|
{
"blob_id": "b7db0d2f4bbbc2c7763b9d2e6bede74979b65161",
"index": 4283,
"step-1": "<mask token>\n\n\ndef run():\n rand_seed = None\n stderr_filename = None\n stdout_filename = None\n if len(sys.argv) >= 4:\n rand_seed = int(sys.argv[3])\n if len(sys.argv) >= 3:\n stderr_filename = sys.argv[2]\n if len(sys.argv) >= 2:\n stdout_filename = sys.argv[1]\n stdout_file = None\n stderr_file = None\n if stdout_filename:\n stdout_file = open(stdout_filename, 'r')\n else:\n stdout_file = StringIO()\n if stderr_filename:\n stderr_file = open(stderr_filename, 'r')\n else:\n stderr_file = StringIO()\n if not rand_seed:\n sys.stdout.write(stdout_file.read())\n sys.stderr.write(stderr_file.read())\n else:\n random.seed(rand_seed)\n stdout_len = file_len(stdout_file)\n stdout_eof = False\n stderr_eof = False\n while not stdout_eof or not stderr_eof:\n if not stdout_eof:\n r = random.randrange(stdout_len / 4)\n data = stdout_file.read(r)\n if len(data) < r:\n stdout_eof = True\n sys.stdout.write(data)\n if not stderr_eof:\n r = random.randrange(stdout_len / 4)\n data = stderr_file.read(r)\n if len(data) < r:\n stderr_eof = True\n sys.stderr.write(data)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef file_len(file):\n initial = file.tell()\n file.seek(0, os.SEEK_END)\n size = file.tell()\n file.seek(initial)\n return size\n\n\ndef run():\n rand_seed = None\n stderr_filename = None\n stdout_filename = None\n if len(sys.argv) >= 4:\n rand_seed = int(sys.argv[3])\n if len(sys.argv) >= 3:\n stderr_filename = sys.argv[2]\n if len(sys.argv) >= 2:\n stdout_filename = sys.argv[1]\n stdout_file = None\n stderr_file = None\n if stdout_filename:\n stdout_file = open(stdout_filename, 'r')\n else:\n stdout_file = StringIO()\n if stderr_filename:\n stderr_file = open(stderr_filename, 'r')\n else:\n stderr_file = StringIO()\n if not rand_seed:\n sys.stdout.write(stdout_file.read())\n sys.stderr.write(stderr_file.read())\n else:\n random.seed(rand_seed)\n stdout_len = file_len(stdout_file)\n stdout_eof = False\n stderr_eof = False\n while not stdout_eof or not stderr_eof:\n if not stdout_eof:\n r = random.randrange(stdout_len / 4)\n data = stdout_file.read(r)\n if len(data) < r:\n stdout_eof = True\n sys.stdout.write(data)\n if not stderr_eof:\n r = random.randrange(stdout_len / 4)\n data = stderr_file.read(r)\n if len(data) < r:\n stderr_eof = True\n sys.stderr.write(data)\n\n\n<mask token>\n",
"step-3": "<mask token>\nif sys.version_info[0] < 3:\n from StringIO import StringIO\nelse:\n from io import StringIO\n\n\ndef file_len(file):\n initial = file.tell()\n file.seek(0, os.SEEK_END)\n size = file.tell()\n file.seek(initial)\n return size\n\n\ndef run():\n rand_seed = None\n stderr_filename = None\n stdout_filename = None\n if len(sys.argv) >= 4:\n rand_seed = int(sys.argv[3])\n if len(sys.argv) >= 3:\n stderr_filename = sys.argv[2]\n if len(sys.argv) >= 2:\n stdout_filename = sys.argv[1]\n stdout_file = None\n stderr_file = None\n if stdout_filename:\n stdout_file = open(stdout_filename, 'r')\n else:\n stdout_file = StringIO()\n if stderr_filename:\n stderr_file = open(stderr_filename, 'r')\n else:\n stderr_file = StringIO()\n if not rand_seed:\n sys.stdout.write(stdout_file.read())\n sys.stderr.write(stderr_file.read())\n else:\n random.seed(rand_seed)\n stdout_len = file_len(stdout_file)\n stdout_eof = False\n stderr_eof = False\n while not stdout_eof or not stderr_eof:\n if not stdout_eof:\n r = random.randrange(stdout_len / 4)\n data = stdout_file.read(r)\n if len(data) < r:\n stdout_eof = True\n sys.stdout.write(data)\n if not stderr_eof:\n r = random.randrange(stdout_len / 4)\n data = stderr_file.read(r)\n if len(data) < r:\n stderr_eof = True\n sys.stderr.write(data)\n\n\nif __name__ == '__main__':\n run()\n",
"step-4": "import sys\nimport os\nimport random\nif sys.version_info[0] < 3:\n from StringIO import StringIO\nelse:\n from io import StringIO\n\n\ndef file_len(file):\n initial = file.tell()\n file.seek(0, os.SEEK_END)\n size = file.tell()\n file.seek(initial)\n return size\n\n\ndef run():\n rand_seed = None\n stderr_filename = None\n stdout_filename = None\n if len(sys.argv) >= 4:\n rand_seed = int(sys.argv[3])\n if len(sys.argv) >= 3:\n stderr_filename = sys.argv[2]\n if len(sys.argv) >= 2:\n stdout_filename = sys.argv[1]\n stdout_file = None\n stderr_file = None\n if stdout_filename:\n stdout_file = open(stdout_filename, 'r')\n else:\n stdout_file = StringIO()\n if stderr_filename:\n stderr_file = open(stderr_filename, 'r')\n else:\n stderr_file = StringIO()\n if not rand_seed:\n sys.stdout.write(stdout_file.read())\n sys.stderr.write(stderr_file.read())\n else:\n random.seed(rand_seed)\n stdout_len = file_len(stdout_file)\n stdout_eof = False\n stderr_eof = False\n while not stdout_eof or not stderr_eof:\n if not stdout_eof:\n r = random.randrange(stdout_len / 4)\n data = stdout_file.read(r)\n if len(data) < r:\n stdout_eof = True\n sys.stdout.write(data)\n if not stderr_eof:\n r = random.randrange(stdout_len / 4)\n data = stderr_file.read(r)\n if len(data) < r:\n stderr_eof = True\n sys.stderr.write(data)\n\n\nif __name__ == '__main__':\n run()\n",
"step-5": null,
"step-ids": [
1,
2,
3,
4
]
}
|
[
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class SuiteResultDTO:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def set_project(self, project):
if project != '':
self.__project = str(project)
def set_version(self, version):
if version != '':
self.__version = str(version)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def set_skipped(self, skipped):
self.__skipped = int(skipped)
def set_error(self, error):
self.__error = int(error)
def set_duration_sec(self, duration_sec):
self.__duration_sec = int(duration_sec)
def set_disabled(self, disabled):
self.__disabled = int(disabled)
<|reserved_special_token_0|>
def set_suite_result_dict(self, suite_result_dict):
SuiteResultDTO.__suite_result_dict = suite_result_dict
<|reserved_special_token_0|>
def set_tag_values(self, tags_dict):
suite_tags = SuiteResultDTO.__suite_result_dict
suite_tags['tags'].update(tags_dict)
def set_field_values(self, fields_dict):
suite_fields = SuiteResultDTO.__suite_result_dict
suite_fields['fields'].update(fields_dict)
def set_suite_custom_values(self, influxdb_values):
if influxdb_values and influxdb_values != '':
if isinstance(influxdb_values, str):
influxdb_values = json.loads(influxdb_values)
self.set_field_values(influxdb_values['fields']['suite_result'])
self.set_tag_values(influxdb_values['tags']['suite_result'])
def get_suite_result_dto(self, terminalreporter, global_values,
influxdb_components, db_measurement_name_for_suite):
execution_time = round(time.time() - terminalreporter._sessionstarttime
)
suite_results_dict = DataManager().get_results_dict(terminalreporter
.stats)
self.set_passed(suite_results_dict.get('passed'))
self.set_failed(suite_results_dict.get('failed'))
self.set_skipped(suite_results_dict.get('skipped'))
self.set_error(suite_results_dict.get('error'))
self.set_disabled(suite_results_dict.get('disabled'))
self.set_duration_sec(execution_time)
self.set_retries(suite_results_dict.get('reruns'))
self.set_run(global_values.get('run'))
self.set_project(global_values.get('project'))
self.set_version(global_values.get('version'))
self.set_suite_custom_values(global_values.get('influxdb_values'))
self.merge_suite_result(global_values.get('merged'),
influxdb_components, db_measurement_name_for_suite,
global_values.get('run'))
return self
def merge_suite_result(self, merged_enabled, influxdb_components,
db_measurement_name_for_suite, run_id_value):
existing_suite_result = influxdb_components.get_results_by_run(
db_measurement_name_for_suite, run_id_value)
old_suite_list = list(existing_suite_result.get_points(measurement=
f'{db_measurement_name_for_suite}'))
if len(old_suite_list) != 0 and merged_enabled:
old_suite_total_count = old_suite_list[0]['pass'] + old_suite_list[
0]['fail'] + old_suite_list[0]['skip']
old_disabled_tests_count = old_suite_list[0]['disabled']
self.set_passed(old_suite_total_count - self.__failed - self.
__skipped)
self.set_disabled(old_disabled_tests_count)
influxdb_components.delete_results_by_run(
db_measurement_name_for_suite, run_id_value)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class SuiteResultDTO:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def set_project(self, project):
if project != '':
self.__project = str(project)
def set_version(self, version):
if version != '':
self.__version = str(version)
<|reserved_special_token_0|>
def set_failed(self, failed):
self.__failed = int(failed)
def set_skipped(self, skipped):
self.__skipped = int(skipped)
def set_error(self, error):
self.__error = int(error)
def set_duration_sec(self, duration_sec):
self.__duration_sec = int(duration_sec)
def set_disabled(self, disabled):
self.__disabled = int(disabled)
<|reserved_special_token_0|>
def set_suite_result_dict(self, suite_result_dict):
SuiteResultDTO.__suite_result_dict = suite_result_dict
<|reserved_special_token_0|>
def set_tag_values(self, tags_dict):
suite_tags = SuiteResultDTO.__suite_result_dict
suite_tags['tags'].update(tags_dict)
def set_field_values(self, fields_dict):
suite_fields = SuiteResultDTO.__suite_result_dict
suite_fields['fields'].update(fields_dict)
def set_suite_custom_values(self, influxdb_values):
if influxdb_values and influxdb_values != '':
if isinstance(influxdb_values, str):
influxdb_values = json.loads(influxdb_values)
self.set_field_values(influxdb_values['fields']['suite_result'])
self.set_tag_values(influxdb_values['tags']['suite_result'])
def get_suite_result_dto(self, terminalreporter, global_values,
influxdb_components, db_measurement_name_for_suite):
execution_time = round(time.time() - terminalreporter._sessionstarttime
)
suite_results_dict = DataManager().get_results_dict(terminalreporter
.stats)
self.set_passed(suite_results_dict.get('passed'))
self.set_failed(suite_results_dict.get('failed'))
self.set_skipped(suite_results_dict.get('skipped'))
self.set_error(suite_results_dict.get('error'))
self.set_disabled(suite_results_dict.get('disabled'))
self.set_duration_sec(execution_time)
self.set_retries(suite_results_dict.get('reruns'))
self.set_run(global_values.get('run'))
self.set_project(global_values.get('project'))
self.set_version(global_values.get('version'))
self.set_suite_custom_values(global_values.get('influxdb_values'))
self.merge_suite_result(global_values.get('merged'),
influxdb_components, db_measurement_name_for_suite,
global_values.get('run'))
return self
def merge_suite_result(self, merged_enabled, influxdb_components,
db_measurement_name_for_suite, run_id_value):
existing_suite_result = influxdb_components.get_results_by_run(
db_measurement_name_for_suite, run_id_value)
old_suite_list = list(existing_suite_result.get_points(measurement=
f'{db_measurement_name_for_suite}'))
if len(old_suite_list) != 0 and merged_enabled:
old_suite_total_count = old_suite_list[0]['pass'] + old_suite_list[
0]['fail'] + old_suite_list[0]['skip']
old_disabled_tests_count = old_suite_list[0]['disabled']
self.set_passed(old_suite_total_count - self.__failed - self.
__skipped)
self.set_disabled(old_disabled_tests_count)
influxdb_components.delete_results_by_run(
db_measurement_name_for_suite, run_id_value)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class SuiteResultDTO:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def set_run(self, run):
if run != '':
self.__run = str(run)
def set_project(self, project):
if project != '':
self.__project = str(project)
def set_version(self, version):
if version != '':
self.__version = str(version)
<|reserved_special_token_0|>
def set_failed(self, failed):
self.__failed = int(failed)
def set_skipped(self, skipped):
self.__skipped = int(skipped)
def set_error(self, error):
self.__error = int(error)
def set_duration_sec(self, duration_sec):
self.__duration_sec = int(duration_sec)
def set_disabled(self, disabled):
self.__disabled = int(disabled)
<|reserved_special_token_0|>
def set_suite_result_dict(self, suite_result_dict):
SuiteResultDTO.__suite_result_dict = suite_result_dict
<|reserved_special_token_0|>
def set_tag_values(self, tags_dict):
suite_tags = SuiteResultDTO.__suite_result_dict
suite_tags['tags'].update(tags_dict)
def set_field_values(self, fields_dict):
suite_fields = SuiteResultDTO.__suite_result_dict
suite_fields['fields'].update(fields_dict)
def set_suite_custom_values(self, influxdb_values):
if influxdb_values and influxdb_values != '':
if isinstance(influxdb_values, str):
influxdb_values = json.loads(influxdb_values)
self.set_field_values(influxdb_values['fields']['suite_result'])
self.set_tag_values(influxdb_values['tags']['suite_result'])
def get_suite_result_dto(self, terminalreporter, global_values,
influxdb_components, db_measurement_name_for_suite):
execution_time = round(time.time() - terminalreporter._sessionstarttime
)
suite_results_dict = DataManager().get_results_dict(terminalreporter
.stats)
self.set_passed(suite_results_dict.get('passed'))
self.set_failed(suite_results_dict.get('failed'))
self.set_skipped(suite_results_dict.get('skipped'))
self.set_error(suite_results_dict.get('error'))
self.set_disabled(suite_results_dict.get('disabled'))
self.set_duration_sec(execution_time)
self.set_retries(suite_results_dict.get('reruns'))
self.set_run(global_values.get('run'))
self.set_project(global_values.get('project'))
self.set_version(global_values.get('version'))
self.set_suite_custom_values(global_values.get('influxdb_values'))
self.merge_suite_result(global_values.get('merged'),
influxdb_components, db_measurement_name_for_suite,
global_values.get('run'))
return self
def merge_suite_result(self, merged_enabled, influxdb_components,
db_measurement_name_for_suite, run_id_value):
existing_suite_result = influxdb_components.get_results_by_run(
db_measurement_name_for_suite, run_id_value)
old_suite_list = list(existing_suite_result.get_points(measurement=
f'{db_measurement_name_for_suite}'))
if len(old_suite_list) != 0 and merged_enabled:
old_suite_total_count = old_suite_list[0]['pass'] + old_suite_list[
0]['fail'] + old_suite_list[0]['skip']
old_disabled_tests_count = old_suite_list[0]['disabled']
self.set_passed(old_suite_total_count - self.__failed - self.
__skipped)
self.set_disabled(old_disabled_tests_count)
influxdb_components.delete_results_by_run(
db_measurement_name_for_suite, run_id_value)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class SuiteResultDTO:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def set_run(self, run):
if run != '':
self.__run = str(run)
def set_project(self, project):
if project != '':
self.__project = str(project)
def set_version(self, version):
if version != '':
self.__version = str(version)
<|reserved_special_token_0|>
def set_failed(self, failed):
self.__failed = int(failed)
def set_skipped(self, skipped):
self.__skipped = int(skipped)
def set_error(self, error):
self.__error = int(error)
def set_duration_sec(self, duration_sec):
self.__duration_sec = int(duration_sec)
def set_disabled(self, disabled):
self.__disabled = int(disabled)
<|reserved_special_token_0|>
def set_suite_result_dict(self, suite_result_dict):
SuiteResultDTO.__suite_result_dict = suite_result_dict
def get_suite_json(self, measurement_name):
json_body = [{'measurement': measurement_name, 'tags': {'run': self
.__run, 'project': self.__project, 'version': self.__version},
'fields': {'pass': self.__passed, 'fail': self.__failed, 'skip':
self.__skipped, 'error': self.__error, 'disabled': self.
__disabled, 'duration_sec': self.__duration_sec, 'retries':
self.__retries}}]
tags_dict = SuiteResultDTO.__suite_result_dict['tags']
for key in tags_dict:
suite_tags = json_body[0]['tags']
suite_tags.update({key: tags_dict[key]})
fields_dict = SuiteResultDTO.__suite_result_dict['fields']
for key in fields_dict:
suite_fields = json_body[0]['fields']
suite_fields.update({key: fields_dict[key]})
return json_body
def set_tag_values(self, tags_dict):
suite_tags = SuiteResultDTO.__suite_result_dict
suite_tags['tags'].update(tags_dict)
def set_field_values(self, fields_dict):
suite_fields = SuiteResultDTO.__suite_result_dict
suite_fields['fields'].update(fields_dict)
def set_suite_custom_values(self, influxdb_values):
if influxdb_values and influxdb_values != '':
if isinstance(influxdb_values, str):
influxdb_values = json.loads(influxdb_values)
self.set_field_values(influxdb_values['fields']['suite_result'])
self.set_tag_values(influxdb_values['tags']['suite_result'])
def get_suite_result_dto(self, terminalreporter, global_values,
influxdb_components, db_measurement_name_for_suite):
execution_time = round(time.time() - terminalreporter._sessionstarttime
)
suite_results_dict = DataManager().get_results_dict(terminalreporter
.stats)
self.set_passed(suite_results_dict.get('passed'))
self.set_failed(suite_results_dict.get('failed'))
self.set_skipped(suite_results_dict.get('skipped'))
self.set_error(suite_results_dict.get('error'))
self.set_disabled(suite_results_dict.get('disabled'))
self.set_duration_sec(execution_time)
self.set_retries(suite_results_dict.get('reruns'))
self.set_run(global_values.get('run'))
self.set_project(global_values.get('project'))
self.set_version(global_values.get('version'))
self.set_suite_custom_values(global_values.get('influxdb_values'))
self.merge_suite_result(global_values.get('merged'),
influxdb_components, db_measurement_name_for_suite,
global_values.get('run'))
return self
def merge_suite_result(self, merged_enabled, influxdb_components,
db_measurement_name_for_suite, run_id_value):
existing_suite_result = influxdb_components.get_results_by_run(
db_measurement_name_for_suite, run_id_value)
old_suite_list = list(existing_suite_result.get_points(measurement=
f'{db_measurement_name_for_suite}'))
if len(old_suite_list) != 0 and merged_enabled:
old_suite_total_count = old_suite_list[0]['pass'] + old_suite_list[
0]['fail'] + old_suite_list[0]['skip']
old_disabled_tests_count = old_suite_list[0]['disabled']
self.set_passed(old_suite_total_count - self.__failed - self.
__skipped)
self.set_disabled(old_disabled_tests_count)
influxdb_components.delete_results_by_run(
db_measurement_name_for_suite, run_id_value)
<|reserved_special_token_1|>
import json
import time
from pytest_influxdb.data_manager import DataManager
class SuiteResultDTO:
__run = 'UNDEFINED'
__project = 'UNDEFINED'
__version = 'UNDEFINED'
__passed = None
__failed = None
__skipped = None
__error = None
__duration_sec = 0
__disabled = 0
__retries = 0
__suite_result_dict = {'tags': {}, 'fields': {}}
def set_run(self, run):
if run != '':
self.__run = str(run)
def set_project(self, project):
if project != '':
self.__project = str(project)
def set_version(self, version):
if version != '':
self.__version = str(version)
def set_passed(self, passed):
self.__passed = int(passed)
def set_failed(self, failed):
self.__failed = int(failed)
def set_skipped(self, skipped):
self.__skipped = int(skipped)
def set_error(self, error):
self.__error = int(error)
def set_duration_sec(self, duration_sec):
self.__duration_sec = int(duration_sec)
def set_disabled(self, disabled):
self.__disabled = int(disabled)
def set_retries(self, retries):
self.__retries = int(retries)
def set_suite_result_dict(self, suite_result_dict):
SuiteResultDTO.__suite_result_dict = suite_result_dict
def get_suite_json(self, measurement_name):
json_body = [
{
"measurement": measurement_name,
"tags": {
"run": self.__run,
"project": self.__project,
"version": self.__version
},
"fields": {
"pass": self.__passed,
"fail": self.__failed,
"skip": self.__skipped,
"error": self.__error,
"disabled": self.__disabled,
"duration_sec": self.__duration_sec,
"retries": self.__retries
}
}
]
# Appending custom values to json_body
tags_dict = SuiteResultDTO.__suite_result_dict['tags']
for key in tags_dict:
suite_tags = json_body[0]['tags']
suite_tags.update({key: tags_dict[key]})
fields_dict = SuiteResultDTO.__suite_result_dict['fields']
for key in fields_dict:
suite_fields = json_body[0]['fields']
suite_fields.update({key: fields_dict[key]})
return json_body
def set_tag_values(self, tags_dict):
suite_tags = SuiteResultDTO.__suite_result_dict
suite_tags['tags'].update(tags_dict)
def set_field_values(self, fields_dict):
suite_fields = SuiteResultDTO.__suite_result_dict
suite_fields['fields'].update(fields_dict)
def set_suite_custom_values(self, influxdb_values):
if influxdb_values and influxdb_values != '':
if isinstance(influxdb_values, str):
influxdb_values = json.loads(influxdb_values)
self.set_field_values(influxdb_values['fields']['suite_result'])
self.set_tag_values(influxdb_values['tags']['suite_result'])
def get_suite_result_dto(self, terminalreporter, global_values, influxdb_components, db_measurement_name_for_suite):
# Preparing execution time and suite results from the terminalreporter (where all the data collected)
execution_time = round(time.time() - terminalreporter._sessionstarttime)
suite_results_dict = DataManager().get_results_dict(terminalreporter.stats)
# Setting the values to the suite_result_dto instance
self.set_passed(suite_results_dict.get('passed'))
self.set_failed(suite_results_dict.get('failed'))
self.set_skipped(suite_results_dict.get('skipped'))
self.set_error(suite_results_dict.get('error'))
self.set_disabled(suite_results_dict.get('disabled'))
self.set_duration_sec(execution_time)
self.set_retries(suite_results_dict.get('reruns'))
self.set_run(global_values.get("run"))
self.set_project(global_values.get("project"))
self.set_version(global_values.get("version"))
self.set_suite_custom_values(global_values.get("influxdb_values"))
self.merge_suite_result(global_values.get('merged'), influxdb_components,
db_measurement_name_for_suite, global_values.get("run"))
return self
def merge_suite_result(self, merged_enabled, influxdb_components, db_measurement_name_for_suite, run_id_value):
# Merging the existing suite results with the suite_results from db for the same run
# if 'merged' config value is True
existing_suite_result = influxdb_components.get_results_by_run(db_measurement_name_for_suite, run_id_value)
old_suite_list = list(existing_suite_result.get_points(measurement=f'{db_measurement_name_for_suite}'))
if len(old_suite_list) != 0 and merged_enabled:
old_suite_total_count = old_suite_list[0]['pass'] + old_suite_list[0]['fail'] + old_suite_list[0][
'skip']
old_disabled_tests_count = old_suite_list[0]['disabled']
self.set_passed(
old_suite_total_count - self.__failed - self.__skipped)
self.set_disabled(old_disabled_tests_count)
influxdb_components.delete_results_by_run(db_measurement_name_for_suite, run_id_value)
|
flexible
|
{
"blob_id": "84c3427a994bd6c57d9fa8449e4fc7a3de801170",
"index": 9271,
"step-1": "<mask token>\n\n\nclass SuiteResultDTO:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def set_project(self, project):\n if project != '':\n self.__project = str(project)\n\n def set_version(self, version):\n if version != '':\n self.__version = str(version)\n <mask token>\n <mask token>\n\n def set_skipped(self, skipped):\n self.__skipped = int(skipped)\n\n def set_error(self, error):\n self.__error = int(error)\n\n def set_duration_sec(self, duration_sec):\n self.__duration_sec = int(duration_sec)\n\n def set_disabled(self, disabled):\n self.__disabled = int(disabled)\n <mask token>\n\n def set_suite_result_dict(self, suite_result_dict):\n SuiteResultDTO.__suite_result_dict = suite_result_dict\n <mask token>\n\n def set_tag_values(self, tags_dict):\n suite_tags = SuiteResultDTO.__suite_result_dict\n suite_tags['tags'].update(tags_dict)\n\n def set_field_values(self, fields_dict):\n suite_fields = SuiteResultDTO.__suite_result_dict\n suite_fields['fields'].update(fields_dict)\n\n def set_suite_custom_values(self, influxdb_values):\n if influxdb_values and influxdb_values != '':\n if isinstance(influxdb_values, str):\n influxdb_values = json.loads(influxdb_values)\n self.set_field_values(influxdb_values['fields']['suite_result'])\n self.set_tag_values(influxdb_values['tags']['suite_result'])\n\n def get_suite_result_dto(self, terminalreporter, global_values,\n influxdb_components, db_measurement_name_for_suite):\n execution_time = round(time.time() - terminalreporter._sessionstarttime\n )\n suite_results_dict = DataManager().get_results_dict(terminalreporter\n .stats)\n self.set_passed(suite_results_dict.get('passed'))\n self.set_failed(suite_results_dict.get('failed'))\n self.set_skipped(suite_results_dict.get('skipped'))\n self.set_error(suite_results_dict.get('error'))\n self.set_disabled(suite_results_dict.get('disabled'))\n self.set_duration_sec(execution_time)\n self.set_retries(suite_results_dict.get('reruns'))\n self.set_run(global_values.get('run'))\n self.set_project(global_values.get('project'))\n self.set_version(global_values.get('version'))\n self.set_suite_custom_values(global_values.get('influxdb_values'))\n self.merge_suite_result(global_values.get('merged'),\n influxdb_components, db_measurement_name_for_suite,\n global_values.get('run'))\n return self\n\n def merge_suite_result(self, merged_enabled, influxdb_components,\n db_measurement_name_for_suite, run_id_value):\n existing_suite_result = influxdb_components.get_results_by_run(\n db_measurement_name_for_suite, run_id_value)\n old_suite_list = list(existing_suite_result.get_points(measurement=\n f'{db_measurement_name_for_suite}'))\n if len(old_suite_list) != 0 and merged_enabled:\n old_suite_total_count = old_suite_list[0]['pass'] + old_suite_list[\n 0]['fail'] + old_suite_list[0]['skip']\n old_disabled_tests_count = old_suite_list[0]['disabled']\n self.set_passed(old_suite_total_count - self.__failed - self.\n __skipped)\n self.set_disabled(old_disabled_tests_count)\n influxdb_components.delete_results_by_run(\n db_measurement_name_for_suite, run_id_value)\n",
"step-2": "<mask token>\n\n\nclass SuiteResultDTO:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def set_project(self, project):\n if project != '':\n self.__project = str(project)\n\n def set_version(self, version):\n if version != '':\n self.__version = str(version)\n <mask token>\n\n def set_failed(self, failed):\n self.__failed = int(failed)\n\n def set_skipped(self, skipped):\n self.__skipped = int(skipped)\n\n def set_error(self, error):\n self.__error = int(error)\n\n def set_duration_sec(self, duration_sec):\n self.__duration_sec = int(duration_sec)\n\n def set_disabled(self, disabled):\n self.__disabled = int(disabled)\n <mask token>\n\n def set_suite_result_dict(self, suite_result_dict):\n SuiteResultDTO.__suite_result_dict = suite_result_dict\n <mask token>\n\n def set_tag_values(self, tags_dict):\n suite_tags = SuiteResultDTO.__suite_result_dict\n suite_tags['tags'].update(tags_dict)\n\n def set_field_values(self, fields_dict):\n suite_fields = SuiteResultDTO.__suite_result_dict\n suite_fields['fields'].update(fields_dict)\n\n def set_suite_custom_values(self, influxdb_values):\n if influxdb_values and influxdb_values != '':\n if isinstance(influxdb_values, str):\n influxdb_values = json.loads(influxdb_values)\n self.set_field_values(influxdb_values['fields']['suite_result'])\n self.set_tag_values(influxdb_values['tags']['suite_result'])\n\n def get_suite_result_dto(self, terminalreporter, global_values,\n influxdb_components, db_measurement_name_for_suite):\n execution_time = round(time.time() - terminalreporter._sessionstarttime\n )\n suite_results_dict = DataManager().get_results_dict(terminalreporter\n .stats)\n self.set_passed(suite_results_dict.get('passed'))\n self.set_failed(suite_results_dict.get('failed'))\n self.set_skipped(suite_results_dict.get('skipped'))\n self.set_error(suite_results_dict.get('error'))\n self.set_disabled(suite_results_dict.get('disabled'))\n self.set_duration_sec(execution_time)\n self.set_retries(suite_results_dict.get('reruns'))\n self.set_run(global_values.get('run'))\n self.set_project(global_values.get('project'))\n self.set_version(global_values.get('version'))\n self.set_suite_custom_values(global_values.get('influxdb_values'))\n self.merge_suite_result(global_values.get('merged'),\n influxdb_components, db_measurement_name_for_suite,\n global_values.get('run'))\n return self\n\n def merge_suite_result(self, merged_enabled, influxdb_components,\n db_measurement_name_for_suite, run_id_value):\n existing_suite_result = influxdb_components.get_results_by_run(\n db_measurement_name_for_suite, run_id_value)\n old_suite_list = list(existing_suite_result.get_points(measurement=\n f'{db_measurement_name_for_suite}'))\n if len(old_suite_list) != 0 and merged_enabled:\n old_suite_total_count = old_suite_list[0]['pass'] + old_suite_list[\n 0]['fail'] + old_suite_list[0]['skip']\n old_disabled_tests_count = old_suite_list[0]['disabled']\n self.set_passed(old_suite_total_count - self.__failed - self.\n __skipped)\n self.set_disabled(old_disabled_tests_count)\n influxdb_components.delete_results_by_run(\n db_measurement_name_for_suite, run_id_value)\n",
"step-3": "<mask token>\n\n\nclass SuiteResultDTO:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def set_run(self, run):\n if run != '':\n self.__run = str(run)\n\n def set_project(self, project):\n if project != '':\n self.__project = str(project)\n\n def set_version(self, version):\n if version != '':\n self.__version = str(version)\n <mask token>\n\n def set_failed(self, failed):\n self.__failed = int(failed)\n\n def set_skipped(self, skipped):\n self.__skipped = int(skipped)\n\n def set_error(self, error):\n self.__error = int(error)\n\n def set_duration_sec(self, duration_sec):\n self.__duration_sec = int(duration_sec)\n\n def set_disabled(self, disabled):\n self.__disabled = int(disabled)\n <mask token>\n\n def set_suite_result_dict(self, suite_result_dict):\n SuiteResultDTO.__suite_result_dict = suite_result_dict\n <mask token>\n\n def set_tag_values(self, tags_dict):\n suite_tags = SuiteResultDTO.__suite_result_dict\n suite_tags['tags'].update(tags_dict)\n\n def set_field_values(self, fields_dict):\n suite_fields = SuiteResultDTO.__suite_result_dict\n suite_fields['fields'].update(fields_dict)\n\n def set_suite_custom_values(self, influxdb_values):\n if influxdb_values and influxdb_values != '':\n if isinstance(influxdb_values, str):\n influxdb_values = json.loads(influxdb_values)\n self.set_field_values(influxdb_values['fields']['suite_result'])\n self.set_tag_values(influxdb_values['tags']['suite_result'])\n\n def get_suite_result_dto(self, terminalreporter, global_values,\n influxdb_components, db_measurement_name_for_suite):\n execution_time = round(time.time() - terminalreporter._sessionstarttime\n )\n suite_results_dict = DataManager().get_results_dict(terminalreporter\n .stats)\n self.set_passed(suite_results_dict.get('passed'))\n self.set_failed(suite_results_dict.get('failed'))\n self.set_skipped(suite_results_dict.get('skipped'))\n self.set_error(suite_results_dict.get('error'))\n self.set_disabled(suite_results_dict.get('disabled'))\n self.set_duration_sec(execution_time)\n self.set_retries(suite_results_dict.get('reruns'))\n self.set_run(global_values.get('run'))\n self.set_project(global_values.get('project'))\n self.set_version(global_values.get('version'))\n self.set_suite_custom_values(global_values.get('influxdb_values'))\n self.merge_suite_result(global_values.get('merged'),\n influxdb_components, db_measurement_name_for_suite,\n global_values.get('run'))\n return self\n\n def merge_suite_result(self, merged_enabled, influxdb_components,\n db_measurement_name_for_suite, run_id_value):\n existing_suite_result = influxdb_components.get_results_by_run(\n db_measurement_name_for_suite, run_id_value)\n old_suite_list = list(existing_suite_result.get_points(measurement=\n f'{db_measurement_name_for_suite}'))\n if len(old_suite_list) != 0 and merged_enabled:\n old_suite_total_count = old_suite_list[0]['pass'] + old_suite_list[\n 0]['fail'] + old_suite_list[0]['skip']\n old_disabled_tests_count = old_suite_list[0]['disabled']\n self.set_passed(old_suite_total_count - self.__failed - self.\n __skipped)\n self.set_disabled(old_disabled_tests_count)\n influxdb_components.delete_results_by_run(\n db_measurement_name_for_suite, run_id_value)\n",
"step-4": "<mask token>\n\n\nclass SuiteResultDTO:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def set_run(self, run):\n if run != '':\n self.__run = str(run)\n\n def set_project(self, project):\n if project != '':\n self.__project = str(project)\n\n def set_version(self, version):\n if version != '':\n self.__version = str(version)\n <mask token>\n\n def set_failed(self, failed):\n self.__failed = int(failed)\n\n def set_skipped(self, skipped):\n self.__skipped = int(skipped)\n\n def set_error(self, error):\n self.__error = int(error)\n\n def set_duration_sec(self, duration_sec):\n self.__duration_sec = int(duration_sec)\n\n def set_disabled(self, disabled):\n self.__disabled = int(disabled)\n <mask token>\n\n def set_suite_result_dict(self, suite_result_dict):\n SuiteResultDTO.__suite_result_dict = suite_result_dict\n\n def get_suite_json(self, measurement_name):\n json_body = [{'measurement': measurement_name, 'tags': {'run': self\n .__run, 'project': self.__project, 'version': self.__version},\n 'fields': {'pass': self.__passed, 'fail': self.__failed, 'skip':\n self.__skipped, 'error': self.__error, 'disabled': self.\n __disabled, 'duration_sec': self.__duration_sec, 'retries':\n self.__retries}}]\n tags_dict = SuiteResultDTO.__suite_result_dict['tags']\n for key in tags_dict:\n suite_tags = json_body[0]['tags']\n suite_tags.update({key: tags_dict[key]})\n fields_dict = SuiteResultDTO.__suite_result_dict['fields']\n for key in fields_dict:\n suite_fields = json_body[0]['fields']\n suite_fields.update({key: fields_dict[key]})\n return json_body\n\n def set_tag_values(self, tags_dict):\n suite_tags = SuiteResultDTO.__suite_result_dict\n suite_tags['tags'].update(tags_dict)\n\n def set_field_values(self, fields_dict):\n suite_fields = SuiteResultDTO.__suite_result_dict\n suite_fields['fields'].update(fields_dict)\n\n def set_suite_custom_values(self, influxdb_values):\n if influxdb_values and influxdb_values != '':\n if isinstance(influxdb_values, str):\n influxdb_values = json.loads(influxdb_values)\n self.set_field_values(influxdb_values['fields']['suite_result'])\n self.set_tag_values(influxdb_values['tags']['suite_result'])\n\n def get_suite_result_dto(self, terminalreporter, global_values,\n influxdb_components, db_measurement_name_for_suite):\n execution_time = round(time.time() - terminalreporter._sessionstarttime\n )\n suite_results_dict = DataManager().get_results_dict(terminalreporter\n .stats)\n self.set_passed(suite_results_dict.get('passed'))\n self.set_failed(suite_results_dict.get('failed'))\n self.set_skipped(suite_results_dict.get('skipped'))\n self.set_error(suite_results_dict.get('error'))\n self.set_disabled(suite_results_dict.get('disabled'))\n self.set_duration_sec(execution_time)\n self.set_retries(suite_results_dict.get('reruns'))\n self.set_run(global_values.get('run'))\n self.set_project(global_values.get('project'))\n self.set_version(global_values.get('version'))\n self.set_suite_custom_values(global_values.get('influxdb_values'))\n self.merge_suite_result(global_values.get('merged'),\n influxdb_components, db_measurement_name_for_suite,\n global_values.get('run'))\n return self\n\n def merge_suite_result(self, merged_enabled, influxdb_components,\n db_measurement_name_for_suite, run_id_value):\n existing_suite_result = influxdb_components.get_results_by_run(\n db_measurement_name_for_suite, run_id_value)\n old_suite_list = list(existing_suite_result.get_points(measurement=\n f'{db_measurement_name_for_suite}'))\n if len(old_suite_list) != 0 and merged_enabled:\n old_suite_total_count = old_suite_list[0]['pass'] + old_suite_list[\n 0]['fail'] + old_suite_list[0]['skip']\n old_disabled_tests_count = old_suite_list[0]['disabled']\n self.set_passed(old_suite_total_count - self.__failed - self.\n __skipped)\n self.set_disabled(old_disabled_tests_count)\n influxdb_components.delete_results_by_run(\n db_measurement_name_for_suite, run_id_value)\n",
"step-5": "import json\nimport time\n\nfrom pytest_influxdb.data_manager import DataManager\n\n\nclass SuiteResultDTO:\n __run = 'UNDEFINED'\n __project = 'UNDEFINED'\n __version = 'UNDEFINED'\n __passed = None\n __failed = None\n __skipped = None\n __error = None\n __duration_sec = 0\n __disabled = 0\n __retries = 0\n __suite_result_dict = {'tags': {}, 'fields': {}}\n\n def set_run(self, run):\n if run != '':\n self.__run = str(run)\n\n def set_project(self, project):\n if project != '':\n self.__project = str(project)\n\n def set_version(self, version):\n if version != '':\n self.__version = str(version)\n\n def set_passed(self, passed):\n self.__passed = int(passed)\n\n def set_failed(self, failed):\n self.__failed = int(failed)\n\n def set_skipped(self, skipped):\n self.__skipped = int(skipped)\n\n def set_error(self, error):\n self.__error = int(error)\n\n def set_duration_sec(self, duration_sec):\n self.__duration_sec = int(duration_sec)\n\n def set_disabled(self, disabled):\n self.__disabled = int(disabled)\n\n def set_retries(self, retries):\n self.__retries = int(retries)\n\n def set_suite_result_dict(self, suite_result_dict):\n SuiteResultDTO.__suite_result_dict = suite_result_dict\n\n def get_suite_json(self, measurement_name):\n json_body = [\n {\n \"measurement\": measurement_name,\n \"tags\": {\n \"run\": self.__run,\n \"project\": self.__project,\n \"version\": self.__version\n },\n \"fields\": {\n \"pass\": self.__passed,\n \"fail\": self.__failed,\n \"skip\": self.__skipped,\n \"error\": self.__error,\n \"disabled\": self.__disabled,\n \"duration_sec\": self.__duration_sec,\n \"retries\": self.__retries\n }\n }\n ]\n\n # Appending custom values to json_body\n tags_dict = SuiteResultDTO.__suite_result_dict['tags']\n for key in tags_dict:\n suite_tags = json_body[0]['tags']\n suite_tags.update({key: tags_dict[key]})\n fields_dict = SuiteResultDTO.__suite_result_dict['fields']\n for key in fields_dict:\n suite_fields = json_body[0]['fields']\n suite_fields.update({key: fields_dict[key]})\n\n return json_body\n\n def set_tag_values(self, tags_dict):\n suite_tags = SuiteResultDTO.__suite_result_dict\n suite_tags['tags'].update(tags_dict)\n\n def set_field_values(self, fields_dict):\n suite_fields = SuiteResultDTO.__suite_result_dict\n suite_fields['fields'].update(fields_dict)\n\n def set_suite_custom_values(self, influxdb_values):\n if influxdb_values and influxdb_values != '':\n if isinstance(influxdb_values, str):\n influxdb_values = json.loads(influxdb_values)\n self.set_field_values(influxdb_values['fields']['suite_result'])\n self.set_tag_values(influxdb_values['tags']['suite_result'])\n\n def get_suite_result_dto(self, terminalreporter, global_values, influxdb_components, db_measurement_name_for_suite):\n # Preparing execution time and suite results from the terminalreporter (where all the data collected)\n execution_time = round(time.time() - terminalreporter._sessionstarttime)\n suite_results_dict = DataManager().get_results_dict(terminalreporter.stats)\n # Setting the values to the suite_result_dto instance\n self.set_passed(suite_results_dict.get('passed'))\n self.set_failed(suite_results_dict.get('failed'))\n self.set_skipped(suite_results_dict.get('skipped'))\n self.set_error(suite_results_dict.get('error'))\n self.set_disabled(suite_results_dict.get('disabled'))\n self.set_duration_sec(execution_time)\n self.set_retries(suite_results_dict.get('reruns'))\n self.set_run(global_values.get(\"run\"))\n self.set_project(global_values.get(\"project\"))\n self.set_version(global_values.get(\"version\"))\n self.set_suite_custom_values(global_values.get(\"influxdb_values\"))\n\n self.merge_suite_result(global_values.get('merged'), influxdb_components,\n db_measurement_name_for_suite, global_values.get(\"run\"))\n\n return self\n\n def merge_suite_result(self, merged_enabled, influxdb_components, db_measurement_name_for_suite, run_id_value):\n # Merging the existing suite results with the suite_results from db for the same run\n # if 'merged' config value is True\n existing_suite_result = influxdb_components.get_results_by_run(db_measurement_name_for_suite, run_id_value)\n old_suite_list = list(existing_suite_result.get_points(measurement=f'{db_measurement_name_for_suite}'))\n if len(old_suite_list) != 0 and merged_enabled:\n old_suite_total_count = old_suite_list[0]['pass'] + old_suite_list[0]['fail'] + old_suite_list[0][\n 'skip']\n old_disabled_tests_count = old_suite_list[0]['disabled']\n self.set_passed(\n old_suite_total_count - self.__failed - self.__skipped)\n self.set_disabled(old_disabled_tests_count)\n influxdb_components.delete_results_by_run(db_measurement_name_for_suite, run_id_value)\n",
"step-ids": [
13,
14,
15,
16,
21
]
}
|
[
13,
14,
15,
16,
21
] |
<|reserved_special_token_0|>
class UserSerializer(serializers.ModelSerializer):
pointseau = serializers.PrimaryKeyRelatedField(many=True, queryset=
PointEau.objects.all())
class Meta:
model = User
fields = 'id', 'username', 'pointseau'
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class PointEauSerializer(serializers.ModelSerializer):
class Meta:
model = PointEau
fields = ['pk', 'nom', 'lat', 'long', 'desc', 'owner']
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class UserSerializer(serializers.ModelSerializer):
pointseau = serializers.PrimaryKeyRelatedField(many=True, queryset=
PointEau.objects.all())
class Meta:
model = User
fields = 'id', 'username', 'pointseau'
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class PointEauSerializer(serializers.ModelSerializer):
class Meta:
model = PointEau
fields = ['pk', 'nom', 'lat', 'long', 'desc', 'owner']
nom = serializers.CharField(max_length=100)
long = serializers.DecimalField(max_digits=10, decimal_places=8)
lat = serializers.DecimalField(max_digits=10, decimal_places=8)
desc = serializers.CharField(max_length=255)
owner = serializers.ReadOnlyField(source='owner.username')
class UserSerializer(serializers.ModelSerializer):
pointseau = serializers.PrimaryKeyRelatedField(many=True, queryset=
PointEau.objects.all())
class Meta:
model = User
fields = 'id', 'username', 'pointseau'
<|reserved_special_token_1|>
from pointsEau.models import PointEau
from django.contrib.auth.models import User
from rest_framework import serializers
class PointEauSerializer(serializers.ModelSerializer):
class Meta:
model = PointEau
fields = ['pk', 'nom', 'lat', 'long', 'desc', 'owner']
nom = serializers.CharField(max_length=100)
long = serializers.DecimalField(max_digits=10, decimal_places=8)
lat = serializers.DecimalField(max_digits=10, decimal_places=8)
desc = serializers.CharField(max_length=255)
owner = serializers.ReadOnlyField(source='owner.username')
class UserSerializer(serializers.ModelSerializer):
pointseau = serializers.PrimaryKeyRelatedField(many=True, queryset=
PointEau.objects.all())
class Meta:
model = User
fields = 'id', 'username', 'pointseau'
<|reserved_special_token_1|>
from pointsEau.models import PointEau
from django.contrib.auth.models import User
from rest_framework import serializers
class PointEauSerializer(serializers.ModelSerializer):
class Meta:
model = PointEau
fields = [
'pk',
'nom',
'lat',
'long',
'desc',
'owner'
]
nom = serializers.CharField(max_length=100)
long = serializers.DecimalField(max_digits=10, decimal_places=8)
lat = serializers.DecimalField(max_digits=10, decimal_places=8)
desc = serializers.CharField(max_length=255)
owner = serializers.ReadOnlyField(source='owner.username')
class UserSerializer(serializers.ModelSerializer):
pointseau = serializers.PrimaryKeyRelatedField(many=True, queryset=PointEau.objects.all())
class Meta:
model = User
fields = ('id', 'username', 'pointseau')
|
flexible
|
{
"blob_id": "51f171b3847b3dbf5657625fdf3b7fe771e0e004",
"index": 4743,
"step-1": "<mask token>\n\n\nclass UserSerializer(serializers.ModelSerializer):\n pointseau = serializers.PrimaryKeyRelatedField(many=True, queryset=\n PointEau.objects.all())\n\n\n class Meta:\n model = User\n fields = 'id', 'username', 'pointseau'\n",
"step-2": "<mask token>\n\n\nclass PointEauSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = PointEau\n fields = ['pk', 'nom', 'lat', 'long', 'desc', 'owner']\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass UserSerializer(serializers.ModelSerializer):\n pointseau = serializers.PrimaryKeyRelatedField(many=True, queryset=\n PointEau.objects.all())\n\n\n class Meta:\n model = User\n fields = 'id', 'username', 'pointseau'\n",
"step-3": "<mask token>\n\n\nclass PointEauSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = PointEau\n fields = ['pk', 'nom', 'lat', 'long', 'desc', 'owner']\n nom = serializers.CharField(max_length=100)\n long = serializers.DecimalField(max_digits=10, decimal_places=8)\n lat = serializers.DecimalField(max_digits=10, decimal_places=8)\n desc = serializers.CharField(max_length=255)\n owner = serializers.ReadOnlyField(source='owner.username')\n\n\nclass UserSerializer(serializers.ModelSerializer):\n pointseau = serializers.PrimaryKeyRelatedField(many=True, queryset=\n PointEau.objects.all())\n\n\n class Meta:\n model = User\n fields = 'id', 'username', 'pointseau'\n",
"step-4": "from pointsEau.models import PointEau\nfrom django.contrib.auth.models import User\nfrom rest_framework import serializers\n\n\nclass PointEauSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = PointEau\n fields = ['pk', 'nom', 'lat', 'long', 'desc', 'owner']\n nom = serializers.CharField(max_length=100)\n long = serializers.DecimalField(max_digits=10, decimal_places=8)\n lat = serializers.DecimalField(max_digits=10, decimal_places=8)\n desc = serializers.CharField(max_length=255)\n owner = serializers.ReadOnlyField(source='owner.username')\n\n\nclass UserSerializer(serializers.ModelSerializer):\n pointseau = serializers.PrimaryKeyRelatedField(many=True, queryset=\n PointEau.objects.all())\n\n\n class Meta:\n model = User\n fields = 'id', 'username', 'pointseau'\n",
"step-5": "from pointsEau.models import PointEau\nfrom django.contrib.auth.models import User\nfrom rest_framework import serializers\n\n\nclass PointEauSerializer(serializers.ModelSerializer):\n class Meta:\n model = PointEau\n fields = [\n 'pk',\n 'nom',\n 'lat',\n 'long',\n 'desc',\n 'owner'\n ]\n nom = serializers.CharField(max_length=100)\n long = serializers.DecimalField(max_digits=10, decimal_places=8)\n lat = serializers.DecimalField(max_digits=10, decimal_places=8)\n desc = serializers.CharField(max_length=255)\n owner = serializers.ReadOnlyField(source='owner.username')\n\n\nclass UserSerializer(serializers.ModelSerializer):\n pointseau = serializers.PrimaryKeyRelatedField(many=True, queryset=PointEau.objects.all())\n\n class Meta:\n model = User\n fields = ('id', 'username', 'pointseau')\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def euler_29(max_a, max_b):
gen = (a ** b for a, b in itertools.product(range(2, max_a + 1), range(
2, max_b + 1)))
return len(set(gen))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def euler_29(max_a, max_b):
gen = (a ** b for a, b in itertools.product(range(2, max_a + 1), range(
2, max_b + 1)))
return len(set(gen))
if __name__ == '__main__':
print(euler_29(100, 100))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import itertools
def euler_29(max_a, max_b):
gen = (a ** b for a, b in itertools.product(range(2, max_a + 1), range(
2, max_b + 1)))
return len(set(gen))
if __name__ == '__main__':
print(euler_29(100, 100))
<|reserved_special_token_1|>
#!/usr/bin/env python
"""
Consider all integer combinations of ab for 2 ≤ a ≤ 5 and 2 ≤ b ≤ 5:
22=4, 23=8, 24=16, 25=32
32=9, 33=27, 34=81, 35=243
42=16, 43=64, 44=256, 45=1024
52=25, 53=125, 54=625, 55=3125
If they are then placed in numerical order, with any repeats removed, we get the following sequence of 15 distinct terms:
4, 8, 9, 16, 25, 27, 32, 64, 81, 125, 243, 256, 625, 1024, 3125
How many distinct terms are in the sequence generated by ab for 2 ≤ a ≤ 100 and 2 ≤ b ≤ 100?
"""
import itertools
def euler_29(max_a, max_b):
gen = (a ** b for a, b in itertools.product(range(2, max_a + 1), range(2, max_b + 1)))
return len(set(gen))
if __name__ == "__main__":
print(euler_29(100, 100))
|
flexible
|
{
"blob_id": "c93bd042340a6e1d0124d8f6176bdf17ab56e405",
"index": 2229,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef euler_29(max_a, max_b):\n gen = (a ** b for a, b in itertools.product(range(2, max_a + 1), range(\n 2, max_b + 1)))\n return len(set(gen))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef euler_29(max_a, max_b):\n gen = (a ** b for a, b in itertools.product(range(2, max_a + 1), range(\n 2, max_b + 1)))\n return len(set(gen))\n\n\nif __name__ == '__main__':\n print(euler_29(100, 100))\n",
"step-4": "<mask token>\nimport itertools\n\n\ndef euler_29(max_a, max_b):\n gen = (a ** b for a, b in itertools.product(range(2, max_a + 1), range(\n 2, max_b + 1)))\n return len(set(gen))\n\n\nif __name__ == '__main__':\n print(euler_29(100, 100))\n",
"step-5": "#!/usr/bin/env python\n\"\"\"\nConsider all integer combinations of ab for 2 ≤ a ≤ 5 and 2 ≤ b ≤ 5:\n\n 22=4, 23=8, 24=16, 25=32\n 32=9, 33=27, 34=81, 35=243\n 42=16, 43=64, 44=256, 45=1024\n 52=25, 53=125, 54=625, 55=3125\n\nIf they are then placed in numerical order, with any repeats removed, we get the following sequence of 15 distinct terms:\n\n4, 8, 9, 16, 25, 27, 32, 64, 81, 125, 243, 256, 625, 1024, 3125\n\nHow many distinct terms are in the sequence generated by ab for 2 ≤ a ≤ 100 and 2 ≤ b ≤ 100?\n\"\"\"\n\nimport itertools\n\ndef euler_29(max_a, max_b):\n gen = (a ** b for a, b in itertools.product(range(2, max_a + 1), range(2, max_b + 1)))\n return len(set(gen))\n\nif __name__ == \"__main__\":\n print(euler_29(100, 100))\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for i in range(0, 46):
cnt_number.append(0)
for i in range(0, len(df2)):
for j in range(0, 7):
cnt_index = df2[i][j]
cnt_number[int(cnt_index)] += 1
for k in range(1, 46):
print('%5d -> %3d times' % (k, cnt_number[k]))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
df = pd.read_csv('data/lottery.csv')
df1 = df[['1', '2', '3', '4', '5', '6', 'bonus']]
df2 = df1.values.tolist()
cnt_number = []
for i in range(0, 46):
cnt_number.append(0)
for i in range(0, len(df2)):
for j in range(0, 7):
cnt_index = df2[i][j]
cnt_number[int(cnt_index)] += 1
for k in range(1, 46):
print('%5d -> %3d times' % (k, cnt_number[k]))
<|reserved_special_token_1|>
import pandas as pd
df = pd.read_csv('data/lottery.csv')
df1 = df[['1', '2', '3', '4', '5', '6', 'bonus']]
df2 = df1.values.tolist()
cnt_number = []
for i in range(0, 46):
cnt_number.append(0)
for i in range(0, len(df2)):
for j in range(0, 7):
cnt_index = df2[i][j]
cnt_number[int(cnt_index)] += 1
for k in range(1, 46):
print('%5d -> %3d times' % (k, cnt_number[k]))
<|reserved_special_token_1|>
import pandas as pd
# read the data
df = pd.read_csv("data/lottery.csv")
# extract needed column
df1 = df[['1','2','3','4','5','6','bonus']]
# translate dataframe to list for convenience
df2 = df1.values.tolist()
# cnt_number is each number's apearance times
cnt_number = []
for i in range(0, 46):
cnt_number.append(0)
# count the appearnce times
for i in range(0, len(df2)):
for j in range(0, 7):
cnt_index = df2[i][j]
cnt_number[int(cnt_index)] += 1
# print the appearance times
for k in range(1, 46):
print('%5d -> %3d times'%(k, cnt_number[k]))
|
flexible
|
{
"blob_id": "b257e36b3cb4bda28cf18e192aa95598105f5ae9",
"index": 2705,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in range(0, 46):\n cnt_number.append(0)\nfor i in range(0, len(df2)):\n for j in range(0, 7):\n cnt_index = df2[i][j]\n cnt_number[int(cnt_index)] += 1\nfor k in range(1, 46):\n print('%5d -> %3d times' % (k, cnt_number[k]))\n",
"step-3": "<mask token>\ndf = pd.read_csv('data/lottery.csv')\ndf1 = df[['1', '2', '3', '4', '5', '6', 'bonus']]\ndf2 = df1.values.tolist()\ncnt_number = []\nfor i in range(0, 46):\n cnt_number.append(0)\nfor i in range(0, len(df2)):\n for j in range(0, 7):\n cnt_index = df2[i][j]\n cnt_number[int(cnt_index)] += 1\nfor k in range(1, 46):\n print('%5d -> %3d times' % (k, cnt_number[k]))\n",
"step-4": "import pandas as pd\ndf = pd.read_csv('data/lottery.csv')\ndf1 = df[['1', '2', '3', '4', '5', '6', 'bonus']]\ndf2 = df1.values.tolist()\ncnt_number = []\nfor i in range(0, 46):\n cnt_number.append(0)\nfor i in range(0, len(df2)):\n for j in range(0, 7):\n cnt_index = df2[i][j]\n cnt_number[int(cnt_index)] += 1\nfor k in range(1, 46):\n print('%5d -> %3d times' % (k, cnt_number[k]))\n",
"step-5": "import pandas as pd\n\n# read the data\ndf = pd.read_csv(\"data/lottery.csv\")\n\n# extract needed column\ndf1 = df[['1','2','3','4','5','6','bonus']]\n\n# translate dataframe to list for convenience\ndf2 = df1.values.tolist()\n\n# cnt_number is each number's apearance times\ncnt_number = []\nfor i in range(0, 46):\n cnt_number.append(0)\n\n# count the appearnce times\nfor i in range(0, len(df2)):\n for j in range(0, 7):\n cnt_index = df2[i][j]\n cnt_number[int(cnt_index)] += 1\n\n# print the appearance times\nfor k in range(1, 46):\n print('%5d -> %3d times'%(k, cnt_number[k]))",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for i in range(1, N + 1, 1):
NUM = int(input('ingrese un numero entero '))
if NUM > 0:
SP += NUM
CP += 1
else:
SO += NUM
<|reserved_special_token_0|>
print(
f'hay {CP} numeros positivos, el promedio general es de {PG} y el promedio de los numeros positivos es de {PP}'
)
<|reserved_special_token_1|>
N = int(input('ingrese el numero de datos a ingresar '))
SP = 0
SO = 0
CP = 0
for i in range(1, N + 1, 1):
NUM = int(input('ingrese un numero entero '))
if NUM > 0:
SP += NUM
CP += 1
else:
SO += NUM
PG = (SP + SO) / N
PP = SP / CP
print(
f'hay {CP} numeros positivos, el promedio general es de {PG} y el promedio de los numeros positivos es de {PP}'
)
<|reserved_special_token_1|>
N = int(input("ingrese el numero de datos a ingresar "))
SP = 0
SO = 0
CP = 0
for i in range(1,N+1,1):
NUM = int(input("ingrese un numero entero "))
if NUM > 0:
SP += NUM
CP += 1
else:
SO += NUM
PG = (SP+SO)/N
PP = SP/CP
print(f"hay { CP } numeros positivos, el promedio general es de { PG } y el promedio de los numeros positivos es de { PP }")
|
flexible
|
{
"blob_id": "efc0b8f1c4887810a9c85e34957d664b01c1e92e",
"index": 1453,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in range(1, N + 1, 1):\n NUM = int(input('ingrese un numero entero '))\n if NUM > 0:\n SP += NUM\n CP += 1\n else:\n SO += NUM\n<mask token>\nprint(\n f'hay {CP} numeros positivos, el promedio general es de {PG} y el promedio de los numeros positivos es de {PP}'\n )\n",
"step-3": "N = int(input('ingrese el numero de datos a ingresar '))\nSP = 0\nSO = 0\nCP = 0\nfor i in range(1, N + 1, 1):\n NUM = int(input('ingrese un numero entero '))\n if NUM > 0:\n SP += NUM\n CP += 1\n else:\n SO += NUM\nPG = (SP + SO) / N\nPP = SP / CP\nprint(\n f'hay {CP} numeros positivos, el promedio general es de {PG} y el promedio de los numeros positivos es de {PP}'\n )\n",
"step-4": "N = int(input(\"ingrese el numero de datos a ingresar \"))\nSP = 0\nSO = 0\nCP = 0\nfor i in range(1,N+1,1):\n NUM = int(input(\"ingrese un numero entero \"))\n if NUM > 0:\n SP += NUM\n CP += 1\n else:\n SO += NUM\nPG = (SP+SO)/N\nPP = SP/CP\nprint(f\"hay { CP } numeros positivos, el promedio general es de { PG } y el promedio de los numeros positivos es de { PP }\")\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from django.views.generic import TemplateView, FormView, CreateView, ListView
from .models import Order
from .form import OrderForm
class OrdersListView(ListView):
template_name = 'orders/index.html'
queryset = Order.objects.all()
context_object_name = 'order_list'
class OrderCreateView(CreateView):
template_name = 'orders/form.html'
form_class = OrderForm
success_url = '/'
|
normal
|
{
"blob_id": "afd184962e8e69843ca518e140d5fdde3d7c9ed2",
"index": 7456,
"step-1": "<mask token>\n\n\nclass OrderCreateView(CreateView):\n template_name = 'orders/form.html'\n form_class = OrderForm\n success_url = '/'\n",
"step-2": "<mask token>\n\n\nclass OrdersListView(ListView):\n <mask token>\n <mask token>\n <mask token>\n\n\nclass OrderCreateView(CreateView):\n template_name = 'orders/form.html'\n form_class = OrderForm\n success_url = '/'\n",
"step-3": "<mask token>\n\n\nclass OrdersListView(ListView):\n template_name = 'orders/index.html'\n queryset = Order.objects.all()\n context_object_name = 'order_list'\n\n\nclass OrderCreateView(CreateView):\n template_name = 'orders/form.html'\n form_class = OrderForm\n success_url = '/'\n",
"step-4": "from django.views.generic import TemplateView, FormView, CreateView, ListView\nfrom .models import Order\nfrom .form import OrderForm\n\n\nclass OrdersListView(ListView):\n template_name = 'orders/index.html'\n queryset = Order.objects.all()\n context_object_name = 'order_list'\n\n\nclass OrderCreateView(CreateView):\n template_name = 'orders/form.html'\n form_class = OrderForm\n success_url = '/'\n",
"step-5": null,
"step-ids": [
2,
3,
4,
5
]
}
|
[
2,
3,
4,
5
] |
import threading
import serial
import time
bno = serial.Serial('/dev/ttyUSB0', 115200, timeout=.5)
compass_heading = -1.0
def readBNO():
global compass_heading
try:
bno.write(b'g')
response = bno.readline().decode()
if response != '':
compass_heading = float(response.split('\r')[0])
except:
pass
def readContinuous():
while True:
readBNO()
time.sleep(.1)
bno_thread = threading.Thread(target=readContinuous)
bno_thread.start()
def get_heading():
return compass_heading
if __name__ == '__main__':
while True:
print(get_heading())
time.sleep(.1)
|
normal
|
{
"blob_id": "63a7225abc511b239a69f625b12c1458c75b4090",
"index": 8904,
"step-1": "<mask token>\n\n\ndef readContinuous():\n while True:\n readBNO()\n time.sleep(0.1)\n\n\n<mask token>\n\n\ndef get_heading():\n return compass_heading\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef readBNO():\n global compass_heading\n try:\n bno.write(b'g')\n response = bno.readline().decode()\n if response != '':\n compass_heading = float(response.split('\\r')[0])\n except:\n pass\n\n\ndef readContinuous():\n while True:\n readBNO()\n time.sleep(0.1)\n\n\n<mask token>\n\n\ndef get_heading():\n return compass_heading\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef readBNO():\n global compass_heading\n try:\n bno.write(b'g')\n response = bno.readline().decode()\n if response != '':\n compass_heading = float(response.split('\\r')[0])\n except:\n pass\n\n\ndef readContinuous():\n while True:\n readBNO()\n time.sleep(0.1)\n\n\n<mask token>\nbno_thread.start()\n\n\ndef get_heading():\n return compass_heading\n\n\nif __name__ == '__main__':\n while True:\n print(get_heading())\n time.sleep(0.1)\n",
"step-4": "<mask token>\nbno = serial.Serial('/dev/ttyUSB0', 115200, timeout=0.5)\ncompass_heading = -1.0\n\n\ndef readBNO():\n global compass_heading\n try:\n bno.write(b'g')\n response = bno.readline().decode()\n if response != '':\n compass_heading = float(response.split('\\r')[0])\n except:\n pass\n\n\ndef readContinuous():\n while True:\n readBNO()\n time.sleep(0.1)\n\n\nbno_thread = threading.Thread(target=readContinuous)\nbno_thread.start()\n\n\ndef get_heading():\n return compass_heading\n\n\nif __name__ == '__main__':\n while True:\n print(get_heading())\n time.sleep(0.1)\n",
"step-5": "import threading\nimport serial\nimport time\n\nbno = serial.Serial('/dev/ttyUSB0', 115200, timeout=.5)\ncompass_heading = -1.0\n\ndef readBNO():\n global compass_heading\n try:\n bno.write(b'g')\n response = bno.readline().decode()\n if response != '':\n compass_heading = float(response.split('\\r')[0])\n except:\n pass\n\ndef readContinuous():\n while True:\n readBNO()\n time.sleep(.1)\n\nbno_thread = threading.Thread(target=readContinuous)\nbno_thread.start()\n\ndef get_heading():\n return compass_heading\n\nif __name__ == '__main__':\n while True:\n print(get_heading())\n time.sleep(.1)",
"step-ids": [
2,
3,
4,
5,
7
]
}
|
[
2,
3,
4,
5,
7
] |
from scipy.stats import rv_discrete
import torch
import torch.nn.functional as F
import numpy as np
from utils import *
def greedy_max(doc_length,px,sentence_embed,sentences,device,sentence_lengths,length_limit=200,lamb=0.2):
'''
prob: sum should be 1
sentence embed: [doc_length, embed_dim]
'''
x = list(range(doc_length))
px = px.cpu().numpy()
score=px
prob = 1
summary_representation = []
bias = np.ones(px.shape)
selected = []
wc=0
lengths=[]
summary = []
while wc<=length_limit:
sample = np.argmax(score)
selected.append(sample)
wc+=sentence_lengths[sample]
lengths.append(sentence_lengths[sample])
summary.append(sentences[sample])
summary_representation.append(sentence_embed[sample])
s = torch.stack(summary_representation,1).unsqueeze(0)
all_sent = sentence_embed[:doc_length,:].unsqueeze(2)
redundancy_score =torch.max(F.cosine_similarity(all_sent,s,1),1)[0].cpu().numpy()
score = lamb*px - ((1-lamb)*redundancy_score) + (1-lamb)*bias
for i_sel in selected:
score[i_sel] = 0
# print(len(selected))
summary ='\n'.join(summary)
# summary_representation= summary_representation.to(device)
return summary, prob, selected
def greedy_nommr(doc_length,px,sentence_embed,sentences,device,sentence_lengths,length_limit=200,lamb=0.2):
'''
prob: sum should be 1
sentence embed: [doc_length, embed_dim]
'''
x = list(range(doc_length))
px = px.cpu().numpy()
score=px
prob = 1
bias = np.ones(px.shape)
summary_representation = []
selected = []
wc=0
lengths = []
summary=[]
while wc<=length_limit:
sample = np.argmax(score)
selected.append(sample)
wc+=sentence_lengths[sample]
lengths.append(sentence_lengths[sample])
summary.append(sentences[sample])
for i_sel in selected:
score[i_sel] = 0
summary = '\n'.join(summary)
return summary, prob, selected
def compute_reward(score_batch,input_lengths,output,sentences_batch,reference_batch,device,sentence_lengths_batch,number_of_sample=5,lamb=0.1):
reward_batch = []
rl_label_batch = torch.zeros(output.size()[:2]).unsqueeze(2)
for i_data in range(len(input_lengths)):
# summary_i = summary_embed[i_data]
doc_length = input_lengths[i_data]
scores = score_batch[i_data,:doc_length]
sentence_lengths = sentence_lengths_batch[i_data]
sentence_embed = output[:doc_length,i_data,:]
sentences = sentences_batch[i_data]
reference = reference_batch[i_data]
# final_choice = None
result,prob,selected = greedy_nommr(doc_length,scores,sentence_embed,sentences,device,sentence_lengths,lamb = lamb)
reward_greedy = get_rouge_single(result,reference)
result,prob,selected = greedy_max(doc_length,scores,sentence_embed,sentences,device,sentence_lengths,lamb = lamb)
reward_hi = get_rouge_single(result,reference)
final_choice = selected
# print(reward_hi-reward_greedy)
reward_batch.append(reward_hi-reward_greedy)
rl_label_batch[final_choice,i_data,:] = 1
reward_batch = torch.FloatTensor(reward_batch).unsqueeze(0).to(device)
rl_label_batch = rl_label_batch.to(device)
reward_batch.requires_grad_(False)
return reward_batch,rl_label_batch
|
normal
|
{
"blob_id": "cc6e827eec5256ce0dbe13958b6178c59bcd94a7",
"index": 8802,
"step-1": "<mask token>\n\n\ndef compute_reward(score_batch, input_lengths, output, sentences_batch,\n reference_batch, device, sentence_lengths_batch, number_of_sample=5,\n lamb=0.1):\n reward_batch = []\n rl_label_batch = torch.zeros(output.size()[:2]).unsqueeze(2)\n for i_data in range(len(input_lengths)):\n doc_length = input_lengths[i_data]\n scores = score_batch[i_data, :doc_length]\n sentence_lengths = sentence_lengths_batch[i_data]\n sentence_embed = output[:doc_length, i_data, :]\n sentences = sentences_batch[i_data]\n reference = reference_batch[i_data]\n result, prob, selected = greedy_nommr(doc_length, scores,\n sentence_embed, sentences, device, sentence_lengths, lamb=lamb)\n reward_greedy = get_rouge_single(result, reference)\n result, prob, selected = greedy_max(doc_length, scores,\n sentence_embed, sentences, device, sentence_lengths, lamb=lamb)\n reward_hi = get_rouge_single(result, reference)\n final_choice = selected\n reward_batch.append(reward_hi - reward_greedy)\n rl_label_batch[final_choice, i_data, :] = 1\n reward_batch = torch.FloatTensor(reward_batch).unsqueeze(0).to(device)\n rl_label_batch = rl_label_batch.to(device)\n reward_batch.requires_grad_(False)\n return reward_batch, rl_label_batch\n",
"step-2": "<mask token>\n\n\ndef greedy_max(doc_length, px, sentence_embed, sentences, device,\n sentence_lengths, length_limit=200, lamb=0.2):\n \"\"\"\n\tprob: sum should be 1\n\tsentence embed: [doc_length, embed_dim]\n\t\"\"\"\n x = list(range(doc_length))\n px = px.cpu().numpy()\n score = px\n prob = 1\n summary_representation = []\n bias = np.ones(px.shape)\n selected = []\n wc = 0\n lengths = []\n summary = []\n while wc <= length_limit:\n sample = np.argmax(score)\n selected.append(sample)\n wc += sentence_lengths[sample]\n lengths.append(sentence_lengths[sample])\n summary.append(sentences[sample])\n summary_representation.append(sentence_embed[sample])\n s = torch.stack(summary_representation, 1).unsqueeze(0)\n all_sent = sentence_embed[:doc_length, :].unsqueeze(2)\n redundancy_score = torch.max(F.cosine_similarity(all_sent, s, 1), 1)[0\n ].cpu().numpy()\n score = lamb * px - (1 - lamb) * redundancy_score + (1 - lamb) * bias\n for i_sel in selected:\n score[i_sel] = 0\n summary = '\\n'.join(summary)\n return summary, prob, selected\n\n\n<mask token>\n\n\ndef compute_reward(score_batch, input_lengths, output, sentences_batch,\n reference_batch, device, sentence_lengths_batch, number_of_sample=5,\n lamb=0.1):\n reward_batch = []\n rl_label_batch = torch.zeros(output.size()[:2]).unsqueeze(2)\n for i_data in range(len(input_lengths)):\n doc_length = input_lengths[i_data]\n scores = score_batch[i_data, :doc_length]\n sentence_lengths = sentence_lengths_batch[i_data]\n sentence_embed = output[:doc_length, i_data, :]\n sentences = sentences_batch[i_data]\n reference = reference_batch[i_data]\n result, prob, selected = greedy_nommr(doc_length, scores,\n sentence_embed, sentences, device, sentence_lengths, lamb=lamb)\n reward_greedy = get_rouge_single(result, reference)\n result, prob, selected = greedy_max(doc_length, scores,\n sentence_embed, sentences, device, sentence_lengths, lamb=lamb)\n reward_hi = get_rouge_single(result, reference)\n final_choice = selected\n reward_batch.append(reward_hi - reward_greedy)\n rl_label_batch[final_choice, i_data, :] = 1\n reward_batch = torch.FloatTensor(reward_batch).unsqueeze(0).to(device)\n rl_label_batch = rl_label_batch.to(device)\n reward_batch.requires_grad_(False)\n return reward_batch, rl_label_batch\n",
"step-3": "<mask token>\n\n\ndef greedy_max(doc_length, px, sentence_embed, sentences, device,\n sentence_lengths, length_limit=200, lamb=0.2):\n \"\"\"\n\tprob: sum should be 1\n\tsentence embed: [doc_length, embed_dim]\n\t\"\"\"\n x = list(range(doc_length))\n px = px.cpu().numpy()\n score = px\n prob = 1\n summary_representation = []\n bias = np.ones(px.shape)\n selected = []\n wc = 0\n lengths = []\n summary = []\n while wc <= length_limit:\n sample = np.argmax(score)\n selected.append(sample)\n wc += sentence_lengths[sample]\n lengths.append(sentence_lengths[sample])\n summary.append(sentences[sample])\n summary_representation.append(sentence_embed[sample])\n s = torch.stack(summary_representation, 1).unsqueeze(0)\n all_sent = sentence_embed[:doc_length, :].unsqueeze(2)\n redundancy_score = torch.max(F.cosine_similarity(all_sent, s, 1), 1)[0\n ].cpu().numpy()\n score = lamb * px - (1 - lamb) * redundancy_score + (1 - lamb) * bias\n for i_sel in selected:\n score[i_sel] = 0\n summary = '\\n'.join(summary)\n return summary, prob, selected\n\n\ndef greedy_nommr(doc_length, px, sentence_embed, sentences, device,\n sentence_lengths, length_limit=200, lamb=0.2):\n \"\"\"\n\tprob: sum should be 1\n\tsentence embed: [doc_length, embed_dim]\n\t\"\"\"\n x = list(range(doc_length))\n px = px.cpu().numpy()\n score = px\n prob = 1\n bias = np.ones(px.shape)\n summary_representation = []\n selected = []\n wc = 0\n lengths = []\n summary = []\n while wc <= length_limit:\n sample = np.argmax(score)\n selected.append(sample)\n wc += sentence_lengths[sample]\n lengths.append(sentence_lengths[sample])\n summary.append(sentences[sample])\n for i_sel in selected:\n score[i_sel] = 0\n summary = '\\n'.join(summary)\n return summary, prob, selected\n\n\ndef compute_reward(score_batch, input_lengths, output, sentences_batch,\n reference_batch, device, sentence_lengths_batch, number_of_sample=5,\n lamb=0.1):\n reward_batch = []\n rl_label_batch = torch.zeros(output.size()[:2]).unsqueeze(2)\n for i_data in range(len(input_lengths)):\n doc_length = input_lengths[i_data]\n scores = score_batch[i_data, :doc_length]\n sentence_lengths = sentence_lengths_batch[i_data]\n sentence_embed = output[:doc_length, i_data, :]\n sentences = sentences_batch[i_data]\n reference = reference_batch[i_data]\n result, prob, selected = greedy_nommr(doc_length, scores,\n sentence_embed, sentences, device, sentence_lengths, lamb=lamb)\n reward_greedy = get_rouge_single(result, reference)\n result, prob, selected = greedy_max(doc_length, scores,\n sentence_embed, sentences, device, sentence_lengths, lamb=lamb)\n reward_hi = get_rouge_single(result, reference)\n final_choice = selected\n reward_batch.append(reward_hi - reward_greedy)\n rl_label_batch[final_choice, i_data, :] = 1\n reward_batch = torch.FloatTensor(reward_batch).unsqueeze(0).to(device)\n rl_label_batch = rl_label_batch.to(device)\n reward_batch.requires_grad_(False)\n return reward_batch, rl_label_batch\n",
"step-4": "from scipy.stats import rv_discrete\nimport torch\nimport torch.nn.functional as F\nimport numpy as np\nfrom utils import *\n\n\ndef greedy_max(doc_length, px, sentence_embed, sentences, device,\n sentence_lengths, length_limit=200, lamb=0.2):\n \"\"\"\n\tprob: sum should be 1\n\tsentence embed: [doc_length, embed_dim]\n\t\"\"\"\n x = list(range(doc_length))\n px = px.cpu().numpy()\n score = px\n prob = 1\n summary_representation = []\n bias = np.ones(px.shape)\n selected = []\n wc = 0\n lengths = []\n summary = []\n while wc <= length_limit:\n sample = np.argmax(score)\n selected.append(sample)\n wc += sentence_lengths[sample]\n lengths.append(sentence_lengths[sample])\n summary.append(sentences[sample])\n summary_representation.append(sentence_embed[sample])\n s = torch.stack(summary_representation, 1).unsqueeze(0)\n all_sent = sentence_embed[:doc_length, :].unsqueeze(2)\n redundancy_score = torch.max(F.cosine_similarity(all_sent, s, 1), 1)[0\n ].cpu().numpy()\n score = lamb * px - (1 - lamb) * redundancy_score + (1 - lamb) * bias\n for i_sel in selected:\n score[i_sel] = 0\n summary = '\\n'.join(summary)\n return summary, prob, selected\n\n\ndef greedy_nommr(doc_length, px, sentence_embed, sentences, device,\n sentence_lengths, length_limit=200, lamb=0.2):\n \"\"\"\n\tprob: sum should be 1\n\tsentence embed: [doc_length, embed_dim]\n\t\"\"\"\n x = list(range(doc_length))\n px = px.cpu().numpy()\n score = px\n prob = 1\n bias = np.ones(px.shape)\n summary_representation = []\n selected = []\n wc = 0\n lengths = []\n summary = []\n while wc <= length_limit:\n sample = np.argmax(score)\n selected.append(sample)\n wc += sentence_lengths[sample]\n lengths.append(sentence_lengths[sample])\n summary.append(sentences[sample])\n for i_sel in selected:\n score[i_sel] = 0\n summary = '\\n'.join(summary)\n return summary, prob, selected\n\n\ndef compute_reward(score_batch, input_lengths, output, sentences_batch,\n reference_batch, device, sentence_lengths_batch, number_of_sample=5,\n lamb=0.1):\n reward_batch = []\n rl_label_batch = torch.zeros(output.size()[:2]).unsqueeze(2)\n for i_data in range(len(input_lengths)):\n doc_length = input_lengths[i_data]\n scores = score_batch[i_data, :doc_length]\n sentence_lengths = sentence_lengths_batch[i_data]\n sentence_embed = output[:doc_length, i_data, :]\n sentences = sentences_batch[i_data]\n reference = reference_batch[i_data]\n result, prob, selected = greedy_nommr(doc_length, scores,\n sentence_embed, sentences, device, sentence_lengths, lamb=lamb)\n reward_greedy = get_rouge_single(result, reference)\n result, prob, selected = greedy_max(doc_length, scores,\n sentence_embed, sentences, device, sentence_lengths, lamb=lamb)\n reward_hi = get_rouge_single(result, reference)\n final_choice = selected\n reward_batch.append(reward_hi - reward_greedy)\n rl_label_batch[final_choice, i_data, :] = 1\n reward_batch = torch.FloatTensor(reward_batch).unsqueeze(0).to(device)\n rl_label_batch = rl_label_batch.to(device)\n reward_batch.requires_grad_(False)\n return reward_batch, rl_label_batch\n",
"step-5": "from scipy.stats import rv_discrete\nimport torch\nimport torch.nn.functional as F\nimport numpy as np\nfrom utils import *\n\n\ndef greedy_max(doc_length,px,sentence_embed,sentences,device,sentence_lengths,length_limit=200,lamb=0.2):\n\t'''\n\tprob: sum should be 1\n\tsentence embed: [doc_length, embed_dim]\n\t'''\n\tx = list(range(doc_length))\n\tpx = px.cpu().numpy()\n\tscore=px\n\tprob = 1\n\tsummary_representation = []\n\tbias = np.ones(px.shape)\n\tselected = []\n\twc=0\n\tlengths=[]\n\tsummary = []\n\twhile wc<=length_limit:\n\t\tsample = np.argmax(score)\n\n\t\tselected.append(sample)\n\t\twc+=sentence_lengths[sample]\n\t\tlengths.append(sentence_lengths[sample])\n\t\tsummary.append(sentences[sample])\n\n\t\tsummary_representation.append(sentence_embed[sample])\n\t\ts = torch.stack(summary_representation,1).unsqueeze(0)\n\t\tall_sent = sentence_embed[:doc_length,:].unsqueeze(2)\n\t\tredundancy_score =torch.max(F.cosine_similarity(all_sent,s,1),1)[0].cpu().numpy()\n\n\t\tscore = lamb*px - ((1-lamb)*redundancy_score) + (1-lamb)*bias\n\t\tfor i_sel in selected:\n\t\t\tscore[i_sel] = 0\n\t\t# print(len(selected))\n\tsummary ='\\n'.join(summary)\n\t# summary_representation= summary_representation.to(device)\n\treturn summary, prob, selected\n\n\ndef greedy_nommr(doc_length,px,sentence_embed,sentences,device,sentence_lengths,length_limit=200,lamb=0.2):\n\t'''\n\tprob: sum should be 1\n\tsentence embed: [doc_length, embed_dim]\n\t'''\n\tx = list(range(doc_length))\n\tpx = px.cpu().numpy()\n\tscore=px\n\tprob = 1\n\tbias = np.ones(px.shape)\n\tsummary_representation = []\n\n\tselected = []\n\twc=0\n\tlengths = []\n\tsummary=[]\n\twhile wc<=length_limit:\n\n\t\tsample = np.argmax(score)\n\t\tselected.append(sample)\n\t\twc+=sentence_lengths[sample]\n\t\tlengths.append(sentence_lengths[sample])\n\t\tsummary.append(sentences[sample])\n\n\t\tfor i_sel in selected:\n\t\t\tscore[i_sel] = 0\n\tsummary = '\\n'.join(summary)\n\treturn summary, prob, selected\n\n\ndef compute_reward(score_batch,input_lengths,output,sentences_batch,reference_batch,device,sentence_lengths_batch,number_of_sample=5,lamb=0.1):\n\treward_batch = []\n\trl_label_batch = torch.zeros(output.size()[:2]).unsqueeze(2)\n\tfor i_data in range(len(input_lengths)):\n\t\t# summary_i = summary_embed[i_data]\n\t\tdoc_length = input_lengths[i_data]\n\t\tscores = score_batch[i_data,:doc_length]\n\t\tsentence_lengths = sentence_lengths_batch[i_data]\n\t\tsentence_embed = output[:doc_length,i_data,:]\n\t\tsentences = sentences_batch[i_data]\n\t\treference = reference_batch[i_data]\n\n\t\t# final_choice = None\n\t\tresult,prob,selected = greedy_nommr(doc_length,scores,sentence_embed,sentences,device,sentence_lengths,lamb = lamb)\n\t\treward_greedy = get_rouge_single(result,reference)\n\n\t\tresult,prob,selected = greedy_max(doc_length,scores,sentence_embed,sentences,device,sentence_lengths,lamb = lamb)\n\t\treward_hi = get_rouge_single(result,reference)\n\t\tfinal_choice = selected\n\n\t\t# print(reward_hi-reward_greedy)\n\t\treward_batch.append(reward_hi-reward_greedy)\n\t\trl_label_batch[final_choice,i_data,:] = 1\n\n\treward_batch = torch.FloatTensor(reward_batch).unsqueeze(0).to(device)\n\trl_label_batch = rl_label_batch.to(device)\n\treward_batch.requires_grad_(False)\n\n\treturn reward_batch,rl_label_batch\n\n\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class CAresRecipe(GnuRecipe):
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class CAresRecipe(GnuRecipe):
def __init__(self, *args, **kwargs):
super(CAresRecipe, self).__init__(*args, **kwargs)
self.sha256 = (
'45d3c1fd29263ceec2afc8ff9cd06d5f8f889636eb4e80ce3cc7f0eaf7aadc6e')
self.name = 'c-ares'
self.version = '1.14.0'
self.url = 'https://c-ares.haxx.se/download/$name-$version.tar.gz'
<|reserved_special_token_1|>
from .base import GnuRecipe
class CAresRecipe(GnuRecipe):
def __init__(self, *args, **kwargs):
super(CAresRecipe, self).__init__(*args, **kwargs)
self.sha256 = (
'45d3c1fd29263ceec2afc8ff9cd06d5f8f889636eb4e80ce3cc7f0eaf7aadc6e')
self.name = 'c-ares'
self.version = '1.14.0'
self.url = 'https://c-ares.haxx.se/download/$name-$version.tar.gz'
<|reserved_special_token_1|>
from .base import GnuRecipe
class CAresRecipe(GnuRecipe):
def __init__(self, *args, **kwargs):
super(CAresRecipe, self).__init__(*args, **kwargs)
self.sha256 = '45d3c1fd29263ceec2afc8ff9cd06d5f' \
'8f889636eb4e80ce3cc7f0eaf7aadc6e'
self.name = 'c-ares'
self.version = '1.14.0'
self.url = 'https://c-ares.haxx.se/download/$name-$version.tar.gz'
|
flexible
|
{
"blob_id": "bf7676dc2c47d9cd2f1ce2d436202ae2c5061265",
"index": 8634,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass CAresRecipe(GnuRecipe):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass CAresRecipe(GnuRecipe):\n\n def __init__(self, *args, **kwargs):\n super(CAresRecipe, self).__init__(*args, **kwargs)\n self.sha256 = (\n '45d3c1fd29263ceec2afc8ff9cd06d5f8f889636eb4e80ce3cc7f0eaf7aadc6e')\n self.name = 'c-ares'\n self.version = '1.14.0'\n self.url = 'https://c-ares.haxx.se/download/$name-$version.tar.gz'\n",
"step-4": "from .base import GnuRecipe\n\n\nclass CAresRecipe(GnuRecipe):\n\n def __init__(self, *args, **kwargs):\n super(CAresRecipe, self).__init__(*args, **kwargs)\n self.sha256 = (\n '45d3c1fd29263ceec2afc8ff9cd06d5f8f889636eb4e80ce3cc7f0eaf7aadc6e')\n self.name = 'c-ares'\n self.version = '1.14.0'\n self.url = 'https://c-ares.haxx.se/download/$name-$version.tar.gz'\n",
"step-5": "from .base import GnuRecipe\n\n\nclass CAresRecipe(GnuRecipe):\n def __init__(self, *args, **kwargs):\n super(CAresRecipe, self).__init__(*args, **kwargs)\n self.sha256 = '45d3c1fd29263ceec2afc8ff9cd06d5f' \\\n '8f889636eb4e80ce3cc7f0eaf7aadc6e'\n self.name = 'c-ares'\n self.version = '1.14.0'\n self.url = 'https://c-ares.haxx.se/download/$name-$version.tar.gz'\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def increment(number: int) ->int:
"""Increment a number.
Args:
number (int): The number to increment.
Returns:
int: The incremented number.
"""
return number + 1
|
flexible
|
{
"blob_id": "b0cc2efda4d6586b66e04b41dfe1bbce8d009e2e",
"index": 6871,
"step-1": "<mask token>\n",
"step-2": "def increment(number: int) ->int:\n \"\"\"Increment a number.\n\n Args:\n number (int): The number to increment.\n\n Returns:\n int: The incremented number.\n \"\"\"\n return number + 1\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
import pickle
from absl import flags
from absl import app
from absl import logging
import time
import numpy as np
FLAGS = flags.FLAGS
flags.DEFINE_string('sent2vec_dir', '2020-04-10/sent2vec/', 'out path')
flags.DEFINE_integer('num_chunks', 36, 'how many files')
flags.DEFINE_string('out_dir', '2020-04-10/', 'out path')
logging.set_verbosity(logging.INFO)
def load_all_vectors(num_chunks):
all_vectors = []
meta_data = [] # (doc_id, section_id, sentence_id, sentence)
for chunk_id in range(num_chunks):
logging.info('Processing file %s', chunk_id)
t = time.time()
vectors = np.load(FLAGS.sent2vec_dir + '/chunk_%s.vectors.npy' % chunk_id).astype(np.float32)
with open(FLAGS.sent2vec_dir + '/chunk_%s.sentences.pkl' % chunk_id, 'rb') as fin:
meta = pickle.load(fin)
vector_norms = np.linalg.norm(vectors, axis=1, keepdims=True)
vector_norms[vector_norms == 0] = 1.0
vectors /= vector_norms
all_vectors.append(vectors)
meta_data.extend(meta)
e = time.time()
logging.info('Finished processing chunk %s in %s seconds', chunk_id, str(e-t))
all_vec = np.concatenate(all_vectors)
logging.info('Concatenated shape %s' % str(all_vec.shape))
return all_vec, meta_data
def main(argv):
logging.info('Running reduce vecs with args %s', str(argv))
logging.info('Running on %s files', str(FLAGS.num_chunks))
all_vecs, all_meta = load_all_vectors(FLAGS.num_chunks)
np.save('%s/all.npy' % FLAGS.out_dir, all_vecs)
with open('%s/all.pkl' % FLAGS.out_dir, 'wb') as fout:
pickle.dump(all_meta, fout)
if __name__ == "__main__":
app.run(main)
|
normal
|
{
"blob_id": "8aa35bcaa4e564306125b37c70a8a92f26da736d",
"index": 7418,
"step-1": "<mask token>\n\n\ndef load_all_vectors(num_chunks):\n all_vectors = []\n meta_data = []\n for chunk_id in range(num_chunks):\n logging.info('Processing file %s', chunk_id)\n t = time.time()\n vectors = np.load(FLAGS.sent2vec_dir + '/chunk_%s.vectors.npy' %\n chunk_id).astype(np.float32)\n with open(FLAGS.sent2vec_dir + '/chunk_%s.sentences.pkl' % chunk_id,\n 'rb') as fin:\n meta = pickle.load(fin)\n vector_norms = np.linalg.norm(vectors, axis=1, keepdims=True)\n vector_norms[vector_norms == 0] = 1.0\n vectors /= vector_norms\n all_vectors.append(vectors)\n meta_data.extend(meta)\n e = time.time()\n logging.info('Finished processing chunk %s in %s seconds', chunk_id,\n str(e - t))\n all_vec = np.concatenate(all_vectors)\n logging.info('Concatenated shape %s' % str(all_vec.shape))\n return all_vec, meta_data\n\n\ndef main(argv):\n logging.info('Running reduce vecs with args %s', str(argv))\n logging.info('Running on %s files', str(FLAGS.num_chunks))\n all_vecs, all_meta = load_all_vectors(FLAGS.num_chunks)\n np.save('%s/all.npy' % FLAGS.out_dir, all_vecs)\n with open('%s/all.pkl' % FLAGS.out_dir, 'wb') as fout:\n pickle.dump(all_meta, fout)\n\n\n<mask token>\n",
"step-2": "<mask token>\nflags.DEFINE_string('sent2vec_dir', '2020-04-10/sent2vec/', 'out path')\nflags.DEFINE_integer('num_chunks', 36, 'how many files')\nflags.DEFINE_string('out_dir', '2020-04-10/', 'out path')\nlogging.set_verbosity(logging.INFO)\n\n\ndef load_all_vectors(num_chunks):\n all_vectors = []\n meta_data = []\n for chunk_id in range(num_chunks):\n logging.info('Processing file %s', chunk_id)\n t = time.time()\n vectors = np.load(FLAGS.sent2vec_dir + '/chunk_%s.vectors.npy' %\n chunk_id).astype(np.float32)\n with open(FLAGS.sent2vec_dir + '/chunk_%s.sentences.pkl' % chunk_id,\n 'rb') as fin:\n meta = pickle.load(fin)\n vector_norms = np.linalg.norm(vectors, axis=1, keepdims=True)\n vector_norms[vector_norms == 0] = 1.0\n vectors /= vector_norms\n all_vectors.append(vectors)\n meta_data.extend(meta)\n e = time.time()\n logging.info('Finished processing chunk %s in %s seconds', chunk_id,\n str(e - t))\n all_vec = np.concatenate(all_vectors)\n logging.info('Concatenated shape %s' % str(all_vec.shape))\n return all_vec, meta_data\n\n\ndef main(argv):\n logging.info('Running reduce vecs with args %s', str(argv))\n logging.info('Running on %s files', str(FLAGS.num_chunks))\n all_vecs, all_meta = load_all_vectors(FLAGS.num_chunks)\n np.save('%s/all.npy' % FLAGS.out_dir, all_vecs)\n with open('%s/all.pkl' % FLAGS.out_dir, 'wb') as fout:\n pickle.dump(all_meta, fout)\n\n\nif __name__ == '__main__':\n app.run(main)\n",
"step-3": "<mask token>\nFLAGS = flags.FLAGS\nflags.DEFINE_string('sent2vec_dir', '2020-04-10/sent2vec/', 'out path')\nflags.DEFINE_integer('num_chunks', 36, 'how many files')\nflags.DEFINE_string('out_dir', '2020-04-10/', 'out path')\nlogging.set_verbosity(logging.INFO)\n\n\ndef load_all_vectors(num_chunks):\n all_vectors = []\n meta_data = []\n for chunk_id in range(num_chunks):\n logging.info('Processing file %s', chunk_id)\n t = time.time()\n vectors = np.load(FLAGS.sent2vec_dir + '/chunk_%s.vectors.npy' %\n chunk_id).astype(np.float32)\n with open(FLAGS.sent2vec_dir + '/chunk_%s.sentences.pkl' % chunk_id,\n 'rb') as fin:\n meta = pickle.load(fin)\n vector_norms = np.linalg.norm(vectors, axis=1, keepdims=True)\n vector_norms[vector_norms == 0] = 1.0\n vectors /= vector_norms\n all_vectors.append(vectors)\n meta_data.extend(meta)\n e = time.time()\n logging.info('Finished processing chunk %s in %s seconds', chunk_id,\n str(e - t))\n all_vec = np.concatenate(all_vectors)\n logging.info('Concatenated shape %s' % str(all_vec.shape))\n return all_vec, meta_data\n\n\ndef main(argv):\n logging.info('Running reduce vecs with args %s', str(argv))\n logging.info('Running on %s files', str(FLAGS.num_chunks))\n all_vecs, all_meta = load_all_vectors(FLAGS.num_chunks)\n np.save('%s/all.npy' % FLAGS.out_dir, all_vecs)\n with open('%s/all.pkl' % FLAGS.out_dir, 'wb') as fout:\n pickle.dump(all_meta, fout)\n\n\nif __name__ == '__main__':\n app.run(main)\n",
"step-4": "import pickle\nfrom absl import flags\nfrom absl import app\nfrom absl import logging\nimport time\nimport numpy as np\nFLAGS = flags.FLAGS\nflags.DEFINE_string('sent2vec_dir', '2020-04-10/sent2vec/', 'out path')\nflags.DEFINE_integer('num_chunks', 36, 'how many files')\nflags.DEFINE_string('out_dir', '2020-04-10/', 'out path')\nlogging.set_verbosity(logging.INFO)\n\n\ndef load_all_vectors(num_chunks):\n all_vectors = []\n meta_data = []\n for chunk_id in range(num_chunks):\n logging.info('Processing file %s', chunk_id)\n t = time.time()\n vectors = np.load(FLAGS.sent2vec_dir + '/chunk_%s.vectors.npy' %\n chunk_id).astype(np.float32)\n with open(FLAGS.sent2vec_dir + '/chunk_%s.sentences.pkl' % chunk_id,\n 'rb') as fin:\n meta = pickle.load(fin)\n vector_norms = np.linalg.norm(vectors, axis=1, keepdims=True)\n vector_norms[vector_norms == 0] = 1.0\n vectors /= vector_norms\n all_vectors.append(vectors)\n meta_data.extend(meta)\n e = time.time()\n logging.info('Finished processing chunk %s in %s seconds', chunk_id,\n str(e - t))\n all_vec = np.concatenate(all_vectors)\n logging.info('Concatenated shape %s' % str(all_vec.shape))\n return all_vec, meta_data\n\n\ndef main(argv):\n logging.info('Running reduce vecs with args %s', str(argv))\n logging.info('Running on %s files', str(FLAGS.num_chunks))\n all_vecs, all_meta = load_all_vectors(FLAGS.num_chunks)\n np.save('%s/all.npy' % FLAGS.out_dir, all_vecs)\n with open('%s/all.pkl' % FLAGS.out_dir, 'wb') as fout:\n pickle.dump(all_meta, fout)\n\n\nif __name__ == '__main__':\n app.run(main)\n",
"step-5": "\nimport pickle\n\nfrom absl import flags\nfrom absl import app\nfrom absl import logging\nimport time\nimport numpy as np\n\nFLAGS = flags.FLAGS\nflags.DEFINE_string('sent2vec_dir', '2020-04-10/sent2vec/', 'out path')\nflags.DEFINE_integer('num_chunks', 36, 'how many files')\nflags.DEFINE_string('out_dir', '2020-04-10/', 'out path')\n\nlogging.set_verbosity(logging.INFO)\n\ndef load_all_vectors(num_chunks):\n all_vectors = []\n meta_data = [] # (doc_id, section_id, sentence_id, sentence)\n for chunk_id in range(num_chunks):\n logging.info('Processing file %s', chunk_id)\n t = time.time()\n vectors = np.load(FLAGS.sent2vec_dir + '/chunk_%s.vectors.npy' % chunk_id).astype(np.float32)\n with open(FLAGS.sent2vec_dir + '/chunk_%s.sentences.pkl' % chunk_id, 'rb') as fin:\n meta = pickle.load(fin)\n\n vector_norms = np.linalg.norm(vectors, axis=1, keepdims=True)\n vector_norms[vector_norms == 0] = 1.0\n vectors /= vector_norms\n all_vectors.append(vectors)\n meta_data.extend(meta)\n e = time.time()\n\n logging.info('Finished processing chunk %s in %s seconds', chunk_id, str(e-t))\n all_vec = np.concatenate(all_vectors)\n logging.info('Concatenated shape %s' % str(all_vec.shape))\n return all_vec, meta_data\n\n\ndef main(argv):\n logging.info('Running reduce vecs with args %s', str(argv))\n logging.info('Running on %s files', str(FLAGS.num_chunks))\n all_vecs, all_meta = load_all_vectors(FLAGS.num_chunks)\n np.save('%s/all.npy' % FLAGS.out_dir, all_vecs)\n with open('%s/all.pkl' % FLAGS.out_dir, 'wb') as fout:\n pickle.dump(all_meta, fout)\n\n\nif __name__ == \"__main__\":\n app.run(main)\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
class MyIde:
<|reserved_special_token_0|>
class Laptop:
def code(self, ide):
ide.execute()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class PyCharm:
<|reserved_special_token_0|>
class MyIde:
def execute(self):
print('MyIde running')
class Laptop:
def code(self, ide):
ide.execute()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class PyCharm:
def execute(self):
print('pycharm ide runnig')
class MyIde:
def execute(self):
print('MyIde running')
class Laptop:
def code(self, ide):
ide.execute()
<|reserved_special_token_0|>
obj.code(ide)
<|reserved_special_token_1|>
class PyCharm:
def execute(self):
print('pycharm ide runnig')
class MyIde:
def execute(self):
print('MyIde running')
class Laptop:
def code(self, ide):
ide.execute()
ide = MyIde()
obj = Laptop()
obj.code(ide)
<|reserved_special_token_1|>
# Any object containing execute(self) method is considered to be IDE App
# this is Duck typing concept
class PyCharm:
def execute(self):
print("pycharm ide runnig")
class MyIde:
def execute(self):
print("MyIde running")
class Laptop:
def code(self,ide):
ide.execute()
ide=MyIde()
obj=Laptop()
obj.code(ide)
|
flexible
|
{
"blob_id": "9ab3dd87f17ac75a3831e9ec1f0746ad81fad70d",
"index": 501,
"step-1": "<mask token>\n\n\nclass MyIde:\n <mask token>\n\n\nclass Laptop:\n\n def code(self, ide):\n ide.execute()\n\n\n<mask token>\n",
"step-2": "class PyCharm:\n <mask token>\n\n\nclass MyIde:\n\n def execute(self):\n print('MyIde running')\n\n\nclass Laptop:\n\n def code(self, ide):\n ide.execute()\n\n\n<mask token>\n",
"step-3": "class PyCharm:\n\n def execute(self):\n print('pycharm ide runnig')\n\n\nclass MyIde:\n\n def execute(self):\n print('MyIde running')\n\n\nclass Laptop:\n\n def code(self, ide):\n ide.execute()\n\n\n<mask token>\nobj.code(ide)\n",
"step-4": "class PyCharm:\n\n def execute(self):\n print('pycharm ide runnig')\n\n\nclass MyIde:\n\n def execute(self):\n print('MyIde running')\n\n\nclass Laptop:\n\n def code(self, ide):\n ide.execute()\n\n\nide = MyIde()\nobj = Laptop()\nobj.code(ide)\n",
"step-5": "\r\n# Any object containing execute(self) method is considered to be IDE App\r\n# this is Duck typing concept\r\n\r\nclass PyCharm:\r\n def execute(self):\r\n print(\"pycharm ide runnig\")\r\n\r\nclass MyIde:\r\n def execute(self):\r\n print(\"MyIde running\")\r\n\r\nclass Laptop:\r\n\r\n def code(self,ide):\r\n ide.execute()\r\n\r\nide=MyIde()\r\n\r\nobj=Laptop()\r\n\r\nobj.code(ide)\r\n",
"step-ids": [
3,
5,
7,
8,
9
]
}
|
[
3,
5,
7,
8,
9
] |
<|reserved_special_token_0|>
class Collector:
<|reserved_special_token_0|>
def get_api():
parser = ConfigParser()
parser.read('twitter_auth.ini')
consumer_key = parser.get('Keys', 'consumer_key').strip("'")
consumer_secret = parser.get('Secrets', 'consumer_secret').strip("'")
access_token = parser.get('Tokens', 'access_token').strip("'")
access_token_secret = parser.get('Secrets', 'access_token_secret'
).strip("'")
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth, wait_on_rate_limit=True)
return api
<|reserved_special_token_0|>
def mention_tweet_db():
conn = sqlite3.connect('tweet_dump_main.db')
c = conn.cursor()
c.execute(
"""CREATE TABLE IF NOT EXISTS mentioned
(tweet TEXT,
username TEXT,
tweet_date TEXT,
tweet_id TEXT,
tweet_source TEXT,
user_id TEXT)"""
)
return c, conn
<|reserved_special_token_0|>
def download_to_limit(api, c, conn, friend_list):
c.execute('SELECT tweet_id FROM tdump')
tweet_ids = c.fetchall()
tweet_ids = [e[0] for e in tweet_ids]
new_tweets = []
for friend in friend_list:
try:
get_tweets = api.user_timeline(id=friend, count=200)
except Exception as e:
continue
new_tweets.extend(get_tweets)
oldest = new_tweets[-1].id - 1
while len(get_tweets) > 0:
try:
get_tweets = api.user_timeline(id=friend, count=200,
max_id=oldest)
except Exception as e:
continue
new_tweets.extend(get_tweets)
oldest = new_tweets[-1].id - 1
if len(new_tweets) != 0:
print('Insert Active')
for tweet in new_tweets:
c.execute(
"""INSERT INTO tdump
(tweet,
username,
tweet_date,
tweet_id,
tweet_source,
user_id)
VALUES(?,?,?,?,?,?)"""
, [tweet.text, tweet.user.screen_name, tweet.created_at,
tweet.id_str, tweet.source, tweet.user.id_str])
conn.commit()
if len(new_tweets) != 0:
print('Insert Done' + '\n')
<|reserved_special_token_0|>
def last_tweets(c, conn):
user_last_tweets = []
c.execute('SELECT user_id FROM tdump')
users = c.fetchall()
users = list(set([user[0] for user in users]))
for user in users:
c.execute(
"""SELECT user_id, tweet_id
FROM tdump
WHERE user_id = ?
ORDER BY tweet_date DESC"""
, [user])
last_tweet = c.fetchone()
user_last_tweets.append(last_tweet)
return user_last_tweets
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Collector:
<|reserved_special_token_0|>
def get_api():
parser = ConfigParser()
parser.read('twitter_auth.ini')
consumer_key = parser.get('Keys', 'consumer_key').strip("'")
consumer_secret = parser.get('Secrets', 'consumer_secret').strip("'")
access_token = parser.get('Tokens', 'access_token').strip("'")
access_token_secret = parser.get('Secrets', 'access_token_secret'
).strip("'")
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth, wait_on_rate_limit=True)
return api
def all_tweet_db():
conn = sqlite3.connect('tweet_dump_main.db')
c = conn.cursor()
c.execute(
"""CREATE TABLE IF NOT EXISTS tdump
(tweet TEXT,
username TEXT,
tweet_date TEXT,
tweet_id TEXT,
tweet_source TEXT,
user_id TEXT)"""
)
return c, conn
def mention_tweet_db():
conn = sqlite3.connect('tweet_dump_main.db')
c = conn.cursor()
c.execute(
"""CREATE TABLE IF NOT EXISTS mentioned
(tweet TEXT,
username TEXT,
tweet_date TEXT,
tweet_id TEXT,
tweet_source TEXT,
user_id TEXT)"""
)
return c, conn
def new_f_check(api, c):
c.execute('SELECT user_id FROM tdump')
users = c.fetchall()
users = list(set([user[0] for user in users]))
friends_ids = api.friends_ids()
new_friends = [x for x in friends_ids if str(x) not in users]
return new_friends
def download_to_limit(api, c, conn, friend_list):
c.execute('SELECT tweet_id FROM tdump')
tweet_ids = c.fetchall()
tweet_ids = [e[0] for e in tweet_ids]
new_tweets = []
for friend in friend_list:
try:
get_tweets = api.user_timeline(id=friend, count=200)
except Exception as e:
continue
new_tweets.extend(get_tweets)
oldest = new_tweets[-1].id - 1
while len(get_tweets) > 0:
try:
get_tweets = api.user_timeline(id=friend, count=200,
max_id=oldest)
except Exception as e:
continue
new_tweets.extend(get_tweets)
oldest = new_tweets[-1].id - 1
if len(new_tweets) != 0:
print('Insert Active')
for tweet in new_tweets:
c.execute(
"""INSERT INTO tdump
(tweet,
username,
tweet_date,
tweet_id,
tweet_source,
user_id)
VALUES(?,?,?,?,?,?)"""
, [tweet.text, tweet.user.screen_name, tweet.created_at,
tweet.id_str, tweet.source, tweet.user.id_str])
conn.commit()
if len(new_tweets) != 0:
print('Insert Done' + '\n')
def mention_me(new_tweet_list, c, conn):
mentioned = [x for x in new_tweet_list if '@BonneNick' in x[0]]
if len(new_tweet_list) != 0:
print('Insert Active')
for tweet in mentioned:
c.execute(
"""INSERT INTO served
(tweet,
username,
tweet_date,
tweet_id,
tweet_source,
user_id)
VALUES(?,?,?,?,?,?)"""
, [tweet.text, tweet.user.screen_name, tweet.created_at,
tweet.id_str, tweet.source, tweet.user.id_str])
conn.commit()
if len(new_tweet_list) != 0:
print('Insert Done' + '\n')
def last_tweets(c, conn):
user_last_tweets = []
c.execute('SELECT user_id FROM tdump')
users = c.fetchall()
users = list(set([user[0] for user in users]))
for user in users:
c.execute(
"""SELECT user_id, tweet_id
FROM tdump
WHERE user_id = ?
ORDER BY tweet_date DESC"""
, [user])
last_tweet = c.fetchone()
user_last_tweets.append(last_tweet)
return user_last_tweets
def download_recent(api, c, conn, last_tweets):
c.execute('SELECT tweet_id FROM tdump')
tweet_ids = [x[0] for x in c.fetchall()]
new_tweets = []
for pair in last_tweets:
user_id = pair[0]
tweet_id = pair[1]
try:
get_tweets = api.user_timeline(id=user_id, since_id=
tweet_id, count=200)
except Exception:
continue
if len(get_tweets) != 0:
new_tweets.extend(get_tweets)
newest = get_tweets[0].id + 1
while len(get_tweets) > 0:
try:
get_tweets = api.user_timeline(id=user_id, count=
200, since_id=newest)
new_tweets.extend(get_tweets)
newest = get_tweets[0].id + 1
except Exception:
continue
if len(new_tweets) != 0:
print('Insert Active')
for tweet in new_tweets:
if (tweet.user.screen_name != 'BonneNick' and tweet.id not in
tweet_ids):
c.execute(
"""INSERT INTO tdump
(tweet,
username,
tweet_date,
tweet_id,
tweet_source,
user_id)
VALUES(?,?,?,?,?,?)"""
, [tweet.text, tweet.user.screen_name, tweet.created_at,
tweet.id_str, tweet.source, tweet.user.id_str])
conn.commit()
conn.close()
if len(new_tweets) != 0:
print('Insert Done' + '\n')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
Collector.collect()
class Collector:
def collect():
api = Collector.get_api()
tweet_dump = Collector.all_tweet_db()
c = tweet_dump[0]
conn = tweet_dump[1]
last_list = Collector.last_tweets(c, conn)
new_friends = Collector.new_f_check(api, c)
Collector.download_to_limit(api, c, conn, new_friends)
Collector.download_recent(api, c, conn, last_list)
def get_api():
parser = ConfigParser()
parser.read('twitter_auth.ini')
consumer_key = parser.get('Keys', 'consumer_key').strip("'")
consumer_secret = parser.get('Secrets', 'consumer_secret').strip("'")
access_token = parser.get('Tokens', 'access_token').strip("'")
access_token_secret = parser.get('Secrets', 'access_token_secret'
).strip("'")
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth, wait_on_rate_limit=True)
return api
def all_tweet_db():
conn = sqlite3.connect('tweet_dump_main.db')
c = conn.cursor()
c.execute(
"""CREATE TABLE IF NOT EXISTS tdump
(tweet TEXT,
username TEXT,
tweet_date TEXT,
tweet_id TEXT,
tweet_source TEXT,
user_id TEXT)"""
)
return c, conn
def mention_tweet_db():
conn = sqlite3.connect('tweet_dump_main.db')
c = conn.cursor()
c.execute(
"""CREATE TABLE IF NOT EXISTS mentioned
(tweet TEXT,
username TEXT,
tweet_date TEXT,
tweet_id TEXT,
tweet_source TEXT,
user_id TEXT)"""
)
return c, conn
def new_f_check(api, c):
c.execute('SELECT user_id FROM tdump')
users = c.fetchall()
users = list(set([user[0] for user in users]))
friends_ids = api.friends_ids()
new_friends = [x for x in friends_ids if str(x) not in users]
return new_friends
def download_to_limit(api, c, conn, friend_list):
c.execute('SELECT tweet_id FROM tdump')
tweet_ids = c.fetchall()
tweet_ids = [e[0] for e in tweet_ids]
new_tweets = []
for friend in friend_list:
try:
get_tweets = api.user_timeline(id=friend, count=200)
except Exception as e:
continue
new_tweets.extend(get_tweets)
oldest = new_tweets[-1].id - 1
while len(get_tweets) > 0:
try:
get_tweets = api.user_timeline(id=friend, count=200,
max_id=oldest)
except Exception as e:
continue
new_tweets.extend(get_tweets)
oldest = new_tweets[-1].id - 1
if len(new_tweets) != 0:
print('Insert Active')
for tweet in new_tweets:
c.execute(
"""INSERT INTO tdump
(tweet,
username,
tweet_date,
tweet_id,
tweet_source,
user_id)
VALUES(?,?,?,?,?,?)"""
, [tweet.text, tweet.user.screen_name, tweet.created_at,
tweet.id_str, tweet.source, tweet.user.id_str])
conn.commit()
if len(new_tweets) != 0:
print('Insert Done' + '\n')
def mention_me(new_tweet_list, c, conn):
mentioned = [x for x in new_tweet_list if '@BonneNick' in x[0]]
if len(new_tweet_list) != 0:
print('Insert Active')
for tweet in mentioned:
c.execute(
"""INSERT INTO served
(tweet,
username,
tweet_date,
tweet_id,
tweet_source,
user_id)
VALUES(?,?,?,?,?,?)"""
, [tweet.text, tweet.user.screen_name, tweet.created_at,
tweet.id_str, tweet.source, tweet.user.id_str])
conn.commit()
if len(new_tweet_list) != 0:
print('Insert Done' + '\n')
def last_tweets(c, conn):
user_last_tweets = []
c.execute('SELECT user_id FROM tdump')
users = c.fetchall()
users = list(set([user[0] for user in users]))
for user in users:
c.execute(
"""SELECT user_id, tweet_id
FROM tdump
WHERE user_id = ?
ORDER BY tweet_date DESC"""
, [user])
last_tweet = c.fetchone()
user_last_tweets.append(last_tweet)
return user_last_tweets
def download_recent(api, c, conn, last_tweets):
c.execute('SELECT tweet_id FROM tdump')
tweet_ids = [x[0] for x in c.fetchall()]
new_tweets = []
for pair in last_tweets:
user_id = pair[0]
tweet_id = pair[1]
try:
get_tweets = api.user_timeline(id=user_id, since_id=
tweet_id, count=200)
except Exception:
continue
if len(get_tweets) != 0:
new_tweets.extend(get_tweets)
newest = get_tweets[0].id + 1
while len(get_tweets) > 0:
try:
get_tweets = api.user_timeline(id=user_id, count=
200, since_id=newest)
new_tweets.extend(get_tweets)
newest = get_tweets[0].id + 1
except Exception:
continue
if len(new_tweets) != 0:
print('Insert Active')
for tweet in new_tweets:
if (tweet.user.screen_name != 'BonneNick' and tweet.id not in
tweet_ids):
c.execute(
"""INSERT INTO tdump
(tweet,
username,
tweet_date,
tweet_id,
tweet_source,
user_id)
VALUES(?,?,?,?,?,?)"""
, [tweet.text, tweet.user.screen_name, tweet.created_at,
tweet.id_str, tweet.source, tweet.user.id_str])
conn.commit()
conn.close()
if len(new_tweets) != 0:
print('Insert Done' + '\n')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
Collector.collect()
class Collector:
def collect():
api = Collector.get_api()
tweet_dump = Collector.all_tweet_db()
c = tweet_dump[0]
conn = tweet_dump[1]
last_list = Collector.last_tweets(c, conn)
new_friends = Collector.new_f_check(api, c)
Collector.download_to_limit(api, c, conn, new_friends)
Collector.download_recent(api, c, conn, last_list)
def get_api():
parser = ConfigParser()
parser.read('twitter_auth.ini')
consumer_key = parser.get('Keys', 'consumer_key').strip("'")
consumer_secret = parser.get('Secrets', 'consumer_secret').strip("'")
access_token = parser.get('Tokens', 'access_token').strip("'")
access_token_secret = parser.get('Secrets', 'access_token_secret'
).strip("'")
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth, wait_on_rate_limit=True)
return api
def all_tweet_db():
conn = sqlite3.connect('tweet_dump_main.db')
c = conn.cursor()
c.execute(
"""CREATE TABLE IF NOT EXISTS tdump
(tweet TEXT,
username TEXT,
tweet_date TEXT,
tweet_id TEXT,
tweet_source TEXT,
user_id TEXT)"""
)
return c, conn
def mention_tweet_db():
conn = sqlite3.connect('tweet_dump_main.db')
c = conn.cursor()
c.execute(
"""CREATE TABLE IF NOT EXISTS mentioned
(tweet TEXT,
username TEXT,
tweet_date TEXT,
tweet_id TEXT,
tweet_source TEXT,
user_id TEXT)"""
)
return c, conn
def new_f_check(api, c):
c.execute('SELECT user_id FROM tdump')
users = c.fetchall()
users = list(set([user[0] for user in users]))
friends_ids = api.friends_ids()
new_friends = [x for x in friends_ids if str(x) not in users]
return new_friends
def download_to_limit(api, c, conn, friend_list):
c.execute('SELECT tweet_id FROM tdump')
tweet_ids = c.fetchall()
tweet_ids = [e[0] for e in tweet_ids]
new_tweets = []
for friend in friend_list:
try:
get_tweets = api.user_timeline(id=friend, count=200)
except Exception as e:
continue
new_tweets.extend(get_tweets)
oldest = new_tweets[-1].id - 1
while len(get_tweets) > 0:
try:
get_tweets = api.user_timeline(id=friend, count=200,
max_id=oldest)
except Exception as e:
continue
new_tweets.extend(get_tweets)
oldest = new_tweets[-1].id - 1
if len(new_tweets) != 0:
print('Insert Active')
for tweet in new_tweets:
c.execute(
"""INSERT INTO tdump
(tweet,
username,
tweet_date,
tweet_id,
tweet_source,
user_id)
VALUES(?,?,?,?,?,?)"""
, [tweet.text, tweet.user.screen_name, tweet.created_at,
tweet.id_str, tweet.source, tweet.user.id_str])
conn.commit()
if len(new_tweets) != 0:
print('Insert Done' + '\n')
def mention_me(new_tweet_list, c, conn):
mentioned = [x for x in new_tweet_list if '@BonneNick' in x[0]]
if len(new_tweet_list) != 0:
print('Insert Active')
for tweet in mentioned:
c.execute(
"""INSERT INTO served
(tweet,
username,
tweet_date,
tweet_id,
tweet_source,
user_id)
VALUES(?,?,?,?,?,?)"""
, [tweet.text, tweet.user.screen_name, tweet.created_at,
tweet.id_str, tweet.source, tweet.user.id_str])
conn.commit()
if len(new_tweet_list) != 0:
print('Insert Done' + '\n')
def last_tweets(c, conn):
user_last_tweets = []
c.execute('SELECT user_id FROM tdump')
users = c.fetchall()
users = list(set([user[0] for user in users]))
for user in users:
c.execute(
"""SELECT user_id, tweet_id
FROM tdump
WHERE user_id = ?
ORDER BY tweet_date DESC"""
, [user])
last_tweet = c.fetchone()
user_last_tweets.append(last_tweet)
return user_last_tweets
def download_recent(api, c, conn, last_tweets):
c.execute('SELECT tweet_id FROM tdump')
tweet_ids = [x[0] for x in c.fetchall()]
new_tweets = []
for pair in last_tweets:
user_id = pair[0]
tweet_id = pair[1]
try:
get_tweets = api.user_timeline(id=user_id, since_id=
tweet_id, count=200)
except Exception:
continue
if len(get_tweets) != 0:
new_tweets.extend(get_tweets)
newest = get_tweets[0].id + 1
while len(get_tweets) > 0:
try:
get_tweets = api.user_timeline(id=user_id, count=
200, since_id=newest)
new_tweets.extend(get_tweets)
newest = get_tweets[0].id + 1
except Exception:
continue
if len(new_tweets) != 0:
print('Insert Active')
for tweet in new_tweets:
if (tweet.user.screen_name != 'BonneNick' and tweet.id not in
tweet_ids):
c.execute(
"""INSERT INTO tdump
(tweet,
username,
tweet_date,
tweet_id,
tweet_source,
user_id)
VALUES(?,?,?,?,?,?)"""
, [tweet.text, tweet.user.screen_name, tweet.created_at,
tweet.id_str, tweet.source, tweet.user.id_str])
conn.commit()
conn.close()
if len(new_tweets) != 0:
print('Insert Done' + '\n')
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
#!/home/nick/.virtualenvs/twitterbots/bin/python3.5
# -*- coding: utf-8 -*-
import tweepy
import sqlite3
from configparser import ConfigParser
'''
A little OOP would be good later for
authenticated user data, c, conn, api
'''
def main():
Collector.collect()
class Collector:
# Main function
def collect():
api = Collector.get_api()
tweet_dump = Collector.all_tweet_db()
c = tweet_dump[0]
conn = tweet_dump[1]
last_list = Collector.last_tweets(c, conn)
# Look for new friends, add to db
new_friends = Collector.new_f_check(api, c)
Collector.download_to_limit(api, c, conn, new_friends)
# Checks timelines of everyone in db already
# adds anything new to db
Collector.download_recent(api, c, conn, last_list)
def get_api():
parser = ConfigParser()
parser.read('twitter_auth.ini')
consumer_key = parser.get('Keys',
'consumer_key').strip("'")
consumer_secret = parser.get('Secrets',
'consumer_secret').strip("'")
access_token = parser.get('Tokens',
'access_token').strip("'")
access_token_secret = parser.get('Secrets',
'access_token_secret').strip("'")
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth, wait_on_rate_limit=True)
return api
# connects to tweet_dump.db creates tdump if not exists
# tdump stores all tweets from anyone in list
def all_tweet_db():
conn = sqlite3.connect('tweet_dump_main.db')
c = conn.cursor()
c.execute('''CREATE TABLE IF NOT EXISTS tdump
(tweet TEXT,
username TEXT,
tweet_date TEXT,
tweet_id TEXT,
tweet_source TEXT,
user_id TEXT)''')
return c, conn
# connects to tweet_dump.db creats served if not exists
# served stores tweets that are mention authenticated user
def mention_tweet_db():
conn = sqlite3.connect('tweet_dump_main.db')
c = conn.cursor()
c.execute('''CREATE TABLE IF NOT EXISTS mentioned
(tweet TEXT,
username TEXT,
tweet_date TEXT,
tweet_id TEXT,
tweet_source TEXT,
user_id TEXT)''')
return c, conn
# looks for new friends by comparing authenticated
# user's friend list with list of friends in tdump
def new_f_check(api, c):
# get list of user's ids
c.execute('SELECT user_id FROM tdump')
users = c.fetchall()
users = list(set([user[0] for user in users]))
# get list of friends_ids from twitter
friends_ids = api.friends_ids()
new_friends = [x for x in friends_ids if str(x) not in users]
return new_friends
# downloads up to 3200 of a user's most
# recent tweets commits to tdump
def download_to_limit(api, c, conn, friend_list):
# List of tweet ids already in db
c.execute('SELECT tweet_id FROM tdump')
tweet_ids = c.fetchall()
tweet_ids = [e[0] for e in tweet_ids]
new_tweets = []
for friend in friend_list:
try:
# try to get most recent 200 tweets from friend
get_tweets = api.user_timeline(id=friend, count=200)
except Exception as e:
continue
# add to list of all of this friend's tweets
new_tweets.extend(get_tweets)
# find oldest retrieved tweet's id number less 1
oldest = new_tweets[-1].id - 1
# get tweets until 3200 limit hit
while len(get_tweets) > 0:
try:
# max_id arg looks for id's less than arg's value
get_tweets = api.user_timeline(id=friend,
count=200,
max_id=oldest)
except Exception as e:
continue
new_tweets.extend(get_tweets)
oldest = new_tweets[-1].id - 1
if len(new_tweets) != 0:
print('Insert Active')
for tweet in new_tweets:
c.execute('''INSERT INTO tdump
(tweet,
username,
tweet_date,
tweet_id,
tweet_source,
user_id)
VALUES(?,?,?,?,?,?)''',
[tweet.text,
tweet.user.screen_name,
tweet.created_at,
tweet.id_str,
tweet.source,
tweet.user.id_str])
conn.commit()
if len(new_tweets) != 0:
print('Insert Done' + '\n')
# simply check if tweet text contains my screen name
# change from hard code later
def mention_me(new_tweet_list, c, conn):
mentioned = [x for x in new_tweet_list if '@BonneNick' in x[0]]
if len(new_tweet_list) != 0:
print('Insert Active')
for tweet in mentioned:
c.execute('''INSERT INTO served
(tweet,
username,
tweet_date,
tweet_id,
tweet_source,
user_id)
VALUES(?,?,?,?,?,?)''',
[tweet.text,
tweet.user.screen_name,
tweet.created_at,
tweet.id_str,
tweet.source,
tweet.user.id_str])
conn.commit()
if len(new_tweet_list) != 0:
print('Insert Done' + '\n')
# returns list of user_id and created_at pairs
# date associated with user_id is date of last
# tweet in database
def last_tweets(c, conn):
# list of user ids and the date of the
# last tweet in db
user_last_tweets = []
# get list of user's ids
c.execute('SELECT user_id FROM tdump')
users = c.fetchall()
users = list(set([user[0] for user in users]))
for user in users:
c.execute('''SELECT user_id, tweet_id
FROM tdump
WHERE user_id = ?
ORDER BY tweet_date DESC''',
[user])
last_tweet = c.fetchone()
user_last_tweets.append(last_tweet)
return user_last_tweets
# downloads most recent posts in each users timelines
def download_recent(api, c, conn, last_tweets):
c.execute('SELECT tweet_id FROM tdump')
tweet_ids = [x[0] for x in c.fetchall()]
new_tweets = []
for pair in last_tweets:
user_id = pair[0]
tweet_id = pair[1]
try:
get_tweets = api.user_timeline(id=user_id,
since_id=tweet_id,
count=200)
except Exception:
continue
if len(get_tweets) != 0:
# add to list of all of this friend's tweets
new_tweets.extend(get_tweets)
# find newest retrieved tweet's id number plus 1
newest = get_tweets[0].id + 1
while len(get_tweets) > 0:
try:
# max_id arg looks for id's less than arg's value
get_tweets = api.user_timeline(id=user_id,
count=200,
since_id=newest)
new_tweets.extend(get_tweets)
newest = get_tweets[0].id + 1
except Exception:
continue
if len(new_tweets) != 0:
print('Insert Active')
for tweet in new_tweets:
if tweet.user.screen_name != 'BonneNick' \
and tweet.id not in tweet_ids:
c.execute('''INSERT INTO tdump
(tweet,
username,
tweet_date,
tweet_id,
tweet_source,
user_id)
VALUES(?,?,?,?,?,?)''',
[tweet.text,
tweet.user.screen_name,
tweet.created_at,
tweet.id_str,
tweet.source,
tweet.user.id_str])
conn.commit()
conn.close()
if len(new_tweets) != 0:
print('Insert Done' + '\n')
if __name__ == '__main__':
main()
|
flexible
|
{
"blob_id": "372d8c8cb9ec8f579db8588aff7799c73c5af255",
"index": 519,
"step-1": "<mask token>\n\n\nclass Collector:\n <mask token>\n\n def get_api():\n parser = ConfigParser()\n parser.read('twitter_auth.ini')\n consumer_key = parser.get('Keys', 'consumer_key').strip(\"'\")\n consumer_secret = parser.get('Secrets', 'consumer_secret').strip(\"'\")\n access_token = parser.get('Tokens', 'access_token').strip(\"'\")\n access_token_secret = parser.get('Secrets', 'access_token_secret'\n ).strip(\"'\")\n auth = tweepy.OAuthHandler(consumer_key, consumer_secret)\n auth.set_access_token(access_token, access_token_secret)\n api = tweepy.API(auth, wait_on_rate_limit=True)\n return api\n <mask token>\n\n def mention_tweet_db():\n conn = sqlite3.connect('tweet_dump_main.db')\n c = conn.cursor()\n c.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS mentioned\n (tweet TEXT,\n username TEXT,\n tweet_date TEXT,\n tweet_id TEXT,\n tweet_source TEXT,\n user_id TEXT)\"\"\"\n )\n return c, conn\n <mask token>\n\n def download_to_limit(api, c, conn, friend_list):\n c.execute('SELECT tweet_id FROM tdump')\n tweet_ids = c.fetchall()\n tweet_ids = [e[0] for e in tweet_ids]\n new_tweets = []\n for friend in friend_list:\n try:\n get_tweets = api.user_timeline(id=friend, count=200)\n except Exception as e:\n continue\n new_tweets.extend(get_tweets)\n oldest = new_tweets[-1].id - 1\n while len(get_tweets) > 0:\n try:\n get_tweets = api.user_timeline(id=friend, count=200,\n max_id=oldest)\n except Exception as e:\n continue\n new_tweets.extend(get_tweets)\n oldest = new_tweets[-1].id - 1\n if len(new_tweets) != 0:\n print('Insert Active')\n for tweet in new_tweets:\n c.execute(\n \"\"\"INSERT INTO tdump\n (tweet,\n username,\n tweet_date,\n tweet_id,\n tweet_source,\n user_id)\n VALUES(?,?,?,?,?,?)\"\"\"\n , [tweet.text, tweet.user.screen_name, tweet.created_at,\n tweet.id_str, tweet.source, tweet.user.id_str])\n conn.commit()\n if len(new_tweets) != 0:\n print('Insert Done' + '\\n')\n <mask token>\n\n def last_tweets(c, conn):\n user_last_tweets = []\n c.execute('SELECT user_id FROM tdump')\n users = c.fetchall()\n users = list(set([user[0] for user in users]))\n for user in users:\n c.execute(\n \"\"\"SELECT user_id, tweet_id\n FROM tdump\n WHERE user_id = ?\n ORDER BY tweet_date DESC\"\"\"\n , [user])\n last_tweet = c.fetchone()\n user_last_tweets.append(last_tweet)\n return user_last_tweets\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Collector:\n <mask token>\n\n def get_api():\n parser = ConfigParser()\n parser.read('twitter_auth.ini')\n consumer_key = parser.get('Keys', 'consumer_key').strip(\"'\")\n consumer_secret = parser.get('Secrets', 'consumer_secret').strip(\"'\")\n access_token = parser.get('Tokens', 'access_token').strip(\"'\")\n access_token_secret = parser.get('Secrets', 'access_token_secret'\n ).strip(\"'\")\n auth = tweepy.OAuthHandler(consumer_key, consumer_secret)\n auth.set_access_token(access_token, access_token_secret)\n api = tweepy.API(auth, wait_on_rate_limit=True)\n return api\n\n def all_tweet_db():\n conn = sqlite3.connect('tweet_dump_main.db')\n c = conn.cursor()\n c.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS tdump\n (tweet TEXT,\n username TEXT,\n tweet_date TEXT,\n tweet_id TEXT,\n tweet_source TEXT,\n user_id TEXT)\"\"\"\n )\n return c, conn\n\n def mention_tweet_db():\n conn = sqlite3.connect('tweet_dump_main.db')\n c = conn.cursor()\n c.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS mentioned\n (tweet TEXT,\n username TEXT,\n tweet_date TEXT,\n tweet_id TEXT,\n tweet_source TEXT,\n user_id TEXT)\"\"\"\n )\n return c, conn\n\n def new_f_check(api, c):\n c.execute('SELECT user_id FROM tdump')\n users = c.fetchall()\n users = list(set([user[0] for user in users]))\n friends_ids = api.friends_ids()\n new_friends = [x for x in friends_ids if str(x) not in users]\n return new_friends\n\n def download_to_limit(api, c, conn, friend_list):\n c.execute('SELECT tweet_id FROM tdump')\n tweet_ids = c.fetchall()\n tweet_ids = [e[0] for e in tweet_ids]\n new_tweets = []\n for friend in friend_list:\n try:\n get_tweets = api.user_timeline(id=friend, count=200)\n except Exception as e:\n continue\n new_tweets.extend(get_tweets)\n oldest = new_tweets[-1].id - 1\n while len(get_tweets) > 0:\n try:\n get_tweets = api.user_timeline(id=friend, count=200,\n max_id=oldest)\n except Exception as e:\n continue\n new_tweets.extend(get_tweets)\n oldest = new_tweets[-1].id - 1\n if len(new_tweets) != 0:\n print('Insert Active')\n for tweet in new_tweets:\n c.execute(\n \"\"\"INSERT INTO tdump\n (tweet,\n username,\n tweet_date,\n tweet_id,\n tweet_source,\n user_id)\n VALUES(?,?,?,?,?,?)\"\"\"\n , [tweet.text, tweet.user.screen_name, tweet.created_at,\n tweet.id_str, tweet.source, tweet.user.id_str])\n conn.commit()\n if len(new_tweets) != 0:\n print('Insert Done' + '\\n')\n\n def mention_me(new_tweet_list, c, conn):\n mentioned = [x for x in new_tweet_list if '@BonneNick' in x[0]]\n if len(new_tweet_list) != 0:\n print('Insert Active')\n for tweet in mentioned:\n c.execute(\n \"\"\"INSERT INTO served\n (tweet,\n username,\n tweet_date,\n tweet_id,\n tweet_source,\n user_id)\n VALUES(?,?,?,?,?,?)\"\"\"\n , [tweet.text, tweet.user.screen_name, tweet.created_at,\n tweet.id_str, tweet.source, tweet.user.id_str])\n conn.commit()\n if len(new_tweet_list) != 0:\n print('Insert Done' + '\\n')\n\n def last_tweets(c, conn):\n user_last_tweets = []\n c.execute('SELECT user_id FROM tdump')\n users = c.fetchall()\n users = list(set([user[0] for user in users]))\n for user in users:\n c.execute(\n \"\"\"SELECT user_id, tweet_id\n FROM tdump\n WHERE user_id = ?\n ORDER BY tweet_date DESC\"\"\"\n , [user])\n last_tweet = c.fetchone()\n user_last_tweets.append(last_tweet)\n return user_last_tweets\n\n def download_recent(api, c, conn, last_tweets):\n c.execute('SELECT tweet_id FROM tdump')\n tweet_ids = [x[0] for x in c.fetchall()]\n new_tweets = []\n for pair in last_tweets:\n user_id = pair[0]\n tweet_id = pair[1]\n try:\n get_tweets = api.user_timeline(id=user_id, since_id=\n tweet_id, count=200)\n except Exception:\n continue\n if len(get_tweets) != 0:\n new_tweets.extend(get_tweets)\n newest = get_tweets[0].id + 1\n while len(get_tweets) > 0:\n try:\n get_tweets = api.user_timeline(id=user_id, count=\n 200, since_id=newest)\n new_tweets.extend(get_tweets)\n newest = get_tweets[0].id + 1\n except Exception:\n continue\n if len(new_tweets) != 0:\n print('Insert Active')\n for tweet in new_tweets:\n if (tweet.user.screen_name != 'BonneNick' and tweet.id not in\n tweet_ids):\n c.execute(\n \"\"\"INSERT INTO tdump\n (tweet,\n username,\n tweet_date,\n tweet_id,\n tweet_source,\n user_id)\n VALUES(?,?,?,?,?,?)\"\"\"\n , [tweet.text, tweet.user.screen_name, tweet.created_at,\n tweet.id_str, tweet.source, tweet.user.id_str])\n conn.commit()\n conn.close()\n if len(new_tweets) != 0:\n print('Insert Done' + '\\n')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef main():\n Collector.collect()\n\n\nclass Collector:\n\n def collect():\n api = Collector.get_api()\n tweet_dump = Collector.all_tweet_db()\n c = tweet_dump[0]\n conn = tweet_dump[1]\n last_list = Collector.last_tweets(c, conn)\n new_friends = Collector.new_f_check(api, c)\n Collector.download_to_limit(api, c, conn, new_friends)\n Collector.download_recent(api, c, conn, last_list)\n\n def get_api():\n parser = ConfigParser()\n parser.read('twitter_auth.ini')\n consumer_key = parser.get('Keys', 'consumer_key').strip(\"'\")\n consumer_secret = parser.get('Secrets', 'consumer_secret').strip(\"'\")\n access_token = parser.get('Tokens', 'access_token').strip(\"'\")\n access_token_secret = parser.get('Secrets', 'access_token_secret'\n ).strip(\"'\")\n auth = tweepy.OAuthHandler(consumer_key, consumer_secret)\n auth.set_access_token(access_token, access_token_secret)\n api = tweepy.API(auth, wait_on_rate_limit=True)\n return api\n\n def all_tweet_db():\n conn = sqlite3.connect('tweet_dump_main.db')\n c = conn.cursor()\n c.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS tdump\n (tweet TEXT,\n username TEXT,\n tweet_date TEXT,\n tweet_id TEXT,\n tweet_source TEXT,\n user_id TEXT)\"\"\"\n )\n return c, conn\n\n def mention_tweet_db():\n conn = sqlite3.connect('tweet_dump_main.db')\n c = conn.cursor()\n c.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS mentioned\n (tweet TEXT,\n username TEXT,\n tweet_date TEXT,\n tweet_id TEXT,\n tweet_source TEXT,\n user_id TEXT)\"\"\"\n )\n return c, conn\n\n def new_f_check(api, c):\n c.execute('SELECT user_id FROM tdump')\n users = c.fetchall()\n users = list(set([user[0] for user in users]))\n friends_ids = api.friends_ids()\n new_friends = [x for x in friends_ids if str(x) not in users]\n return new_friends\n\n def download_to_limit(api, c, conn, friend_list):\n c.execute('SELECT tweet_id FROM tdump')\n tweet_ids = c.fetchall()\n tweet_ids = [e[0] for e in tweet_ids]\n new_tweets = []\n for friend in friend_list:\n try:\n get_tweets = api.user_timeline(id=friend, count=200)\n except Exception as e:\n continue\n new_tweets.extend(get_tweets)\n oldest = new_tweets[-1].id - 1\n while len(get_tweets) > 0:\n try:\n get_tweets = api.user_timeline(id=friend, count=200,\n max_id=oldest)\n except Exception as e:\n continue\n new_tweets.extend(get_tweets)\n oldest = new_tweets[-1].id - 1\n if len(new_tweets) != 0:\n print('Insert Active')\n for tweet in new_tweets:\n c.execute(\n \"\"\"INSERT INTO tdump\n (tweet,\n username,\n tweet_date,\n tweet_id,\n tweet_source,\n user_id)\n VALUES(?,?,?,?,?,?)\"\"\"\n , [tweet.text, tweet.user.screen_name, tweet.created_at,\n tweet.id_str, tweet.source, tweet.user.id_str])\n conn.commit()\n if len(new_tweets) != 0:\n print('Insert Done' + '\\n')\n\n def mention_me(new_tweet_list, c, conn):\n mentioned = [x for x in new_tweet_list if '@BonneNick' in x[0]]\n if len(new_tweet_list) != 0:\n print('Insert Active')\n for tweet in mentioned:\n c.execute(\n \"\"\"INSERT INTO served\n (tweet,\n username,\n tweet_date,\n tweet_id,\n tweet_source,\n user_id)\n VALUES(?,?,?,?,?,?)\"\"\"\n , [tweet.text, tweet.user.screen_name, tweet.created_at,\n tweet.id_str, tweet.source, tweet.user.id_str])\n conn.commit()\n if len(new_tweet_list) != 0:\n print('Insert Done' + '\\n')\n\n def last_tweets(c, conn):\n user_last_tweets = []\n c.execute('SELECT user_id FROM tdump')\n users = c.fetchall()\n users = list(set([user[0] for user in users]))\n for user in users:\n c.execute(\n \"\"\"SELECT user_id, tweet_id\n FROM tdump\n WHERE user_id = ?\n ORDER BY tweet_date DESC\"\"\"\n , [user])\n last_tweet = c.fetchone()\n user_last_tweets.append(last_tweet)\n return user_last_tweets\n\n def download_recent(api, c, conn, last_tweets):\n c.execute('SELECT tweet_id FROM tdump')\n tweet_ids = [x[0] for x in c.fetchall()]\n new_tweets = []\n for pair in last_tweets:\n user_id = pair[0]\n tweet_id = pair[1]\n try:\n get_tweets = api.user_timeline(id=user_id, since_id=\n tweet_id, count=200)\n except Exception:\n continue\n if len(get_tweets) != 0:\n new_tweets.extend(get_tweets)\n newest = get_tweets[0].id + 1\n while len(get_tweets) > 0:\n try:\n get_tweets = api.user_timeline(id=user_id, count=\n 200, since_id=newest)\n new_tweets.extend(get_tweets)\n newest = get_tweets[0].id + 1\n except Exception:\n continue\n if len(new_tweets) != 0:\n print('Insert Active')\n for tweet in new_tweets:\n if (tweet.user.screen_name != 'BonneNick' and tweet.id not in\n tweet_ids):\n c.execute(\n \"\"\"INSERT INTO tdump\n (tweet,\n username,\n tweet_date,\n tweet_id,\n tweet_source,\n user_id)\n VALUES(?,?,?,?,?,?)\"\"\"\n , [tweet.text, tweet.user.screen_name, tweet.created_at,\n tweet.id_str, tweet.source, tweet.user.id_str])\n conn.commit()\n conn.close()\n if len(new_tweets) != 0:\n print('Insert Done' + '\\n')\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef main():\n Collector.collect()\n\n\nclass Collector:\n\n def collect():\n api = Collector.get_api()\n tweet_dump = Collector.all_tweet_db()\n c = tweet_dump[0]\n conn = tweet_dump[1]\n last_list = Collector.last_tweets(c, conn)\n new_friends = Collector.new_f_check(api, c)\n Collector.download_to_limit(api, c, conn, new_friends)\n Collector.download_recent(api, c, conn, last_list)\n\n def get_api():\n parser = ConfigParser()\n parser.read('twitter_auth.ini')\n consumer_key = parser.get('Keys', 'consumer_key').strip(\"'\")\n consumer_secret = parser.get('Secrets', 'consumer_secret').strip(\"'\")\n access_token = parser.get('Tokens', 'access_token').strip(\"'\")\n access_token_secret = parser.get('Secrets', 'access_token_secret'\n ).strip(\"'\")\n auth = tweepy.OAuthHandler(consumer_key, consumer_secret)\n auth.set_access_token(access_token, access_token_secret)\n api = tweepy.API(auth, wait_on_rate_limit=True)\n return api\n\n def all_tweet_db():\n conn = sqlite3.connect('tweet_dump_main.db')\n c = conn.cursor()\n c.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS tdump\n (tweet TEXT,\n username TEXT,\n tweet_date TEXT,\n tweet_id TEXT,\n tweet_source TEXT,\n user_id TEXT)\"\"\"\n )\n return c, conn\n\n def mention_tweet_db():\n conn = sqlite3.connect('tweet_dump_main.db')\n c = conn.cursor()\n c.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS mentioned\n (tweet TEXT,\n username TEXT,\n tweet_date TEXT,\n tweet_id TEXT,\n tweet_source TEXT,\n user_id TEXT)\"\"\"\n )\n return c, conn\n\n def new_f_check(api, c):\n c.execute('SELECT user_id FROM tdump')\n users = c.fetchall()\n users = list(set([user[0] for user in users]))\n friends_ids = api.friends_ids()\n new_friends = [x for x in friends_ids if str(x) not in users]\n return new_friends\n\n def download_to_limit(api, c, conn, friend_list):\n c.execute('SELECT tweet_id FROM tdump')\n tweet_ids = c.fetchall()\n tweet_ids = [e[0] for e in tweet_ids]\n new_tweets = []\n for friend in friend_list:\n try:\n get_tweets = api.user_timeline(id=friend, count=200)\n except Exception as e:\n continue\n new_tweets.extend(get_tweets)\n oldest = new_tweets[-1].id - 1\n while len(get_tweets) > 0:\n try:\n get_tweets = api.user_timeline(id=friend, count=200,\n max_id=oldest)\n except Exception as e:\n continue\n new_tweets.extend(get_tweets)\n oldest = new_tweets[-1].id - 1\n if len(new_tweets) != 0:\n print('Insert Active')\n for tweet in new_tweets:\n c.execute(\n \"\"\"INSERT INTO tdump\n (tweet,\n username,\n tweet_date,\n tweet_id,\n tweet_source,\n user_id)\n VALUES(?,?,?,?,?,?)\"\"\"\n , [tweet.text, tweet.user.screen_name, tweet.created_at,\n tweet.id_str, tweet.source, tweet.user.id_str])\n conn.commit()\n if len(new_tweets) != 0:\n print('Insert Done' + '\\n')\n\n def mention_me(new_tweet_list, c, conn):\n mentioned = [x for x in new_tweet_list if '@BonneNick' in x[0]]\n if len(new_tweet_list) != 0:\n print('Insert Active')\n for tweet in mentioned:\n c.execute(\n \"\"\"INSERT INTO served\n (tweet,\n username,\n tweet_date,\n tweet_id,\n tweet_source,\n user_id)\n VALUES(?,?,?,?,?,?)\"\"\"\n , [tweet.text, tweet.user.screen_name, tweet.created_at,\n tweet.id_str, tweet.source, tweet.user.id_str])\n conn.commit()\n if len(new_tweet_list) != 0:\n print('Insert Done' + '\\n')\n\n def last_tweets(c, conn):\n user_last_tweets = []\n c.execute('SELECT user_id FROM tdump')\n users = c.fetchall()\n users = list(set([user[0] for user in users]))\n for user in users:\n c.execute(\n \"\"\"SELECT user_id, tweet_id\n FROM tdump\n WHERE user_id = ?\n ORDER BY tweet_date DESC\"\"\"\n , [user])\n last_tweet = c.fetchone()\n user_last_tweets.append(last_tweet)\n return user_last_tweets\n\n def download_recent(api, c, conn, last_tweets):\n c.execute('SELECT tweet_id FROM tdump')\n tweet_ids = [x[0] for x in c.fetchall()]\n new_tweets = []\n for pair in last_tweets:\n user_id = pair[0]\n tweet_id = pair[1]\n try:\n get_tweets = api.user_timeline(id=user_id, since_id=\n tweet_id, count=200)\n except Exception:\n continue\n if len(get_tweets) != 0:\n new_tweets.extend(get_tweets)\n newest = get_tweets[0].id + 1\n while len(get_tweets) > 0:\n try:\n get_tweets = api.user_timeline(id=user_id, count=\n 200, since_id=newest)\n new_tweets.extend(get_tweets)\n newest = get_tweets[0].id + 1\n except Exception:\n continue\n if len(new_tweets) != 0:\n print('Insert Active')\n for tweet in new_tweets:\n if (tweet.user.screen_name != 'BonneNick' and tweet.id not in\n tweet_ids):\n c.execute(\n \"\"\"INSERT INTO tdump\n (tweet,\n username,\n tweet_date,\n tweet_id,\n tweet_source,\n user_id)\n VALUES(?,?,?,?,?,?)\"\"\"\n , [tweet.text, tweet.user.screen_name, tweet.created_at,\n tweet.id_str, tweet.source, tweet.user.id_str])\n conn.commit()\n conn.close()\n if len(new_tweets) != 0:\n print('Insert Done' + '\\n')\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "#!/home/nick/.virtualenvs/twitterbots/bin/python3.5\n# -*- coding: utf-8 -*-\n\nimport tweepy\nimport sqlite3\n\nfrom configparser import ConfigParser\n\n'''\nA little OOP would be good later for\nauthenticated user data, c, conn, api\n'''\n\n\ndef main():\n\n Collector.collect()\n\n\nclass Collector:\n\n # Main function\n def collect():\n\n api = Collector.get_api()\n\n tweet_dump = Collector.all_tweet_db()\n c = tweet_dump[0]\n conn = tweet_dump[1]\n last_list = Collector.last_tweets(c, conn)\n\n # Look for new friends, add to db\n new_friends = Collector.new_f_check(api, c)\n\n Collector.download_to_limit(api, c, conn, new_friends)\n\n # Checks timelines of everyone in db already\n # adds anything new to db\n Collector.download_recent(api, c, conn, last_list)\n\n def get_api():\n\n parser = ConfigParser()\n parser.read('twitter_auth.ini')\n consumer_key = parser.get('Keys',\n 'consumer_key').strip(\"'\")\n consumer_secret = parser.get('Secrets',\n 'consumer_secret').strip(\"'\")\n access_token = parser.get('Tokens',\n 'access_token').strip(\"'\")\n access_token_secret = parser.get('Secrets',\n 'access_token_secret').strip(\"'\")\n\n auth = tweepy.OAuthHandler(consumer_key, consumer_secret)\n auth.set_access_token(access_token, access_token_secret)\n api = tweepy.API(auth, wait_on_rate_limit=True)\n\n return api\n\n # connects to tweet_dump.db creates tdump if not exists\n # tdump stores all tweets from anyone in list\n def all_tweet_db():\n\n conn = sqlite3.connect('tweet_dump_main.db')\n c = conn.cursor()\n\n c.execute('''CREATE TABLE IF NOT EXISTS tdump\n (tweet TEXT,\n username TEXT,\n tweet_date TEXT,\n tweet_id TEXT,\n tweet_source TEXT,\n user_id TEXT)''')\n\n return c, conn\n\n # connects to tweet_dump.db creats served if not exists\n # served stores tweets that are mention authenticated user\n def mention_tweet_db():\n\n conn = sqlite3.connect('tweet_dump_main.db')\n c = conn.cursor()\n\n c.execute('''CREATE TABLE IF NOT EXISTS mentioned\n (tweet TEXT,\n username TEXT,\n tweet_date TEXT,\n tweet_id TEXT,\n tweet_source TEXT,\n user_id TEXT)''')\n\n return c, conn\n\n # looks for new friends by comparing authenticated\n # user's friend list with list of friends in tdump\n def new_f_check(api, c):\n\n # get list of user's ids\n c.execute('SELECT user_id FROM tdump')\n users = c.fetchall()\n users = list(set([user[0] for user in users]))\n\n # get list of friends_ids from twitter\n friends_ids = api.friends_ids()\n\n new_friends = [x for x in friends_ids if str(x) not in users]\n\n return new_friends\n\n # downloads up to 3200 of a user's most\n # recent tweets commits to tdump\n def download_to_limit(api, c, conn, friend_list):\n\n # List of tweet ids already in db\n c.execute('SELECT tweet_id FROM tdump')\n tweet_ids = c.fetchall()\n tweet_ids = [e[0] for e in tweet_ids]\n\n new_tweets = []\n\n for friend in friend_list:\n\n try:\n # try to get most recent 200 tweets from friend\n get_tweets = api.user_timeline(id=friend, count=200)\n\n except Exception as e:\n\n continue\n\n # add to list of all of this friend's tweets\n new_tweets.extend(get_tweets)\n\n # find oldest retrieved tweet's id number less 1\n oldest = new_tweets[-1].id - 1\n\n # get tweets until 3200 limit hit\n while len(get_tweets) > 0:\n\n try:\n # max_id arg looks for id's less than arg's value\n get_tweets = api.user_timeline(id=friend,\n count=200,\n max_id=oldest)\n\n except Exception as e:\n\n continue\n\n new_tweets.extend(get_tweets)\n\n oldest = new_tweets[-1].id - 1\n\n if len(new_tweets) != 0:\n\n print('Insert Active')\n\n for tweet in new_tweets:\n\n c.execute('''INSERT INTO tdump\n (tweet,\n username,\n tweet_date,\n tweet_id,\n tweet_source,\n user_id)\n VALUES(?,?,?,?,?,?)''',\n [tweet.text,\n tweet.user.screen_name,\n tweet.created_at,\n tweet.id_str,\n tweet.source,\n tweet.user.id_str])\n\n conn.commit()\n\n if len(new_tweets) != 0:\n\n print('Insert Done' + '\\n')\n\n # simply check if tweet text contains my screen name\n # change from hard code later\n def mention_me(new_tweet_list, c, conn):\n\n mentioned = [x for x in new_tweet_list if '@BonneNick' in x[0]]\n\n if len(new_tweet_list) != 0:\n\n print('Insert Active')\n\n for tweet in mentioned:\n\n c.execute('''INSERT INTO served\n (tweet,\n username,\n tweet_date,\n tweet_id,\n tweet_source,\n user_id)\n VALUES(?,?,?,?,?,?)''',\n [tweet.text,\n tweet.user.screen_name,\n tweet.created_at,\n tweet.id_str,\n tweet.source,\n tweet.user.id_str])\n\n conn.commit()\n\n if len(new_tweet_list) != 0:\n\n print('Insert Done' + '\\n')\n\n # returns list of user_id and created_at pairs\n # date associated with user_id is date of last\n # tweet in database\n def last_tweets(c, conn):\n\n # list of user ids and the date of the\n # last tweet in db\n user_last_tweets = []\n\n # get list of user's ids\n c.execute('SELECT user_id FROM tdump')\n users = c.fetchall()\n users = list(set([user[0] for user in users]))\n\n for user in users:\n\n c.execute('''SELECT user_id, tweet_id\n FROM tdump\n WHERE user_id = ?\n ORDER BY tweet_date DESC''',\n [user])\n\n last_tweet = c.fetchone()\n user_last_tweets.append(last_tweet)\n\n return user_last_tweets\n\n # downloads most recent posts in each users timelines\n def download_recent(api, c, conn, last_tweets):\n\n c.execute('SELECT tweet_id FROM tdump')\n tweet_ids = [x[0] for x in c.fetchall()]\n\n new_tweets = []\n\n for pair in last_tweets:\n\n user_id = pair[0]\n tweet_id = pair[1]\n\n try:\n\n get_tweets = api.user_timeline(id=user_id,\n since_id=tweet_id,\n count=200)\n\n except Exception:\n\n continue\n\n if len(get_tweets) != 0:\n\n # add to list of all of this friend's tweets\n new_tweets.extend(get_tweets)\n\n # find newest retrieved tweet's id number plus 1\n newest = get_tweets[0].id + 1\n\n while len(get_tweets) > 0:\n\n try:\n # max_id arg looks for id's less than arg's value\n get_tweets = api.user_timeline(id=user_id,\n count=200,\n since_id=newest)\n\n new_tweets.extend(get_tweets)\n\n newest = get_tweets[0].id + 1\n\n except Exception:\n\n continue\n\n if len(new_tweets) != 0:\n\n print('Insert Active')\n\n for tweet in new_tweets:\n\n if tweet.user.screen_name != 'BonneNick' \\\n and tweet.id not in tweet_ids:\n\n c.execute('''INSERT INTO tdump\n (tweet,\n username,\n tweet_date,\n tweet_id,\n tweet_source,\n user_id)\n VALUES(?,?,?,?,?,?)''',\n [tweet.text,\n tweet.user.screen_name,\n tweet.created_at,\n tweet.id_str,\n tweet.source,\n tweet.user.id_str])\n\n conn.commit()\n conn.close()\n\n if len(new_tweets) != 0:\n\n print('Insert Done' + '\\n')\n\n\nif __name__ == '__main__':\n\n main()\n",
"step-ids": [
5,
9,
11,
12,
14
]
}
|
[
5,
9,
11,
12,
14
] |
<|reserved_special_token_0|>
def backup_db(cursor):
for table in tables_schema:
backup_table(cursor, table)
def backup_table(cursor, table):
cursor.execute(f'{tables_schema[table]}' +
f"INTO OUTFILE '/tmp/{table.lower()}_data.csv' " +
"FIELDS TERMINATED BY ',' " + "LINES TERMINATED BY '\\n' " +
f'FROM {table}')
def print_report(db_name):
print(
f"Database: '{db_name}' successfully backed up under '\\tmp' directory!"
)
<|reserved_special_token_0|>
def main():
password = getpass('MySQL password:')
db_name = 'WORLDMETRIC'
connect_db(password, db_name)
backup_db(cursor)
print_report(db_name)
dbconnector.close()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def backup_db(cursor):
for table in tables_schema:
backup_table(cursor, table)
def backup_table(cursor, table):
cursor.execute(f'{tables_schema[table]}' +
f"INTO OUTFILE '/tmp/{table.lower()}_data.csv' " +
"FIELDS TERMINATED BY ',' " + "LINES TERMINATED BY '\\n' " +
f'FROM {table}')
def print_report(db_name):
print(
f"Database: '{db_name}' successfully backed up under '\\tmp' directory!"
)
def connect_db(password, db_name):
global dbconnector, cursor
dbconnector = mysql.connector.connect(host='localhost', user='root',
passwd=password, database=db_name, autocommit=True)
cursor = dbconnector.cursor()
def main():
password = getpass('MySQL password:')
db_name = 'WORLDMETRIC'
connect_db(password, db_name)
backup_db(cursor)
print_report(db_name)
dbconnector.close()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def backup_db(cursor):
for table in tables_schema:
backup_table(cursor, table)
def backup_table(cursor, table):
cursor.execute(f'{tables_schema[table]}' +
f"INTO OUTFILE '/tmp/{table.lower()}_data.csv' " +
"FIELDS TERMINATED BY ',' " + "LINES TERMINATED BY '\\n' " +
f'FROM {table}')
def print_report(db_name):
print(
f"Database: '{db_name}' successfully backed up under '\\tmp' directory!"
)
def connect_db(password, db_name):
global dbconnector, cursor
dbconnector = mysql.connector.connect(host='localhost', user='root',
passwd=password, database=db_name, autocommit=True)
cursor = dbconnector.cursor()
def main():
password = getpass('MySQL password:')
db_name = 'WORLDMETRIC'
connect_db(password, db_name)
backup_db(cursor)
print_report(db_name)
dbconnector.close()
main()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
tables_schema = {'Country': "SELECT 'Id','Name','Code' " + 'UNION ALL ' +
'SELECT Id, Name, Code ', 'Indicator': "SELECT 'Id','Name','Code' " +
'UNION ALL ' + 'SELECT Id, Name, Code ', 'Year':
"SELECT 'Id','FiveYearPeriod','TenYearPeriod' " + 'UNION ALL ' +
'SELECT Id, FiveYearPeriod, TenYearPeriod ', 'Metric':
"SELECT 'CountryId','IndicatorId','YearId','Measurement' " +
'UNION ALL ' + 'SELECT CountryId, IndicatorId, YearId, Measurement '}
dbconnector = None
cursor = None
def backup_db(cursor):
for table in tables_schema:
backup_table(cursor, table)
def backup_table(cursor, table):
cursor.execute(f'{tables_schema[table]}' +
f"INTO OUTFILE '/tmp/{table.lower()}_data.csv' " +
"FIELDS TERMINATED BY ',' " + "LINES TERMINATED BY '\\n' " +
f'FROM {table}')
def print_report(db_name):
print(
f"Database: '{db_name}' successfully backed up under '\\tmp' directory!"
)
def connect_db(password, db_name):
global dbconnector, cursor
dbconnector = mysql.connector.connect(host='localhost', user='root',
passwd=password, database=db_name, autocommit=True)
cursor = dbconnector.cursor()
def main():
password = getpass('MySQL password:')
db_name = 'WORLDMETRIC'
connect_db(password, db_name)
backup_db(cursor)
print_report(db_name)
dbconnector.close()
main()
<|reserved_special_token_1|>
import mysql.connector
from getpass import getpass
tables_schema = {
"Country": "SELECT 'Id','Name','Code' " +
"UNION ALL " +
"SELECT Id, Name, Code ",
"Indicator": "SELECT 'Id','Name','Code' " +
"UNION ALL " +
"SELECT Id, Name, Code ",
"Year": "SELECT 'Id','FiveYearPeriod','TenYearPeriod' " +
"UNION ALL " +
"SELECT Id, FiveYearPeriod, TenYearPeriod ",
"Metric": "SELECT 'CountryId','IndicatorId','YearId','Measurement' " +
"UNION ALL " +
"SELECT CountryId, IndicatorId, YearId, Measurement "
}
dbconnector = None
cursor = None
def backup_db(cursor):
for table in tables_schema: backup_table(cursor, table)
def backup_table(cursor, table):
cursor.execute(f"{tables_schema[table]}" +
f"INTO OUTFILE '/tmp/{table.lower()}_data.csv' " +
"FIELDS TERMINATED BY ',' " +
"LINES TERMINATED BY '\\n' " +
f"FROM {table}")
def print_report(db_name):
print (f"Database: '{db_name}' successfully backed up under '\\tmp' directory!")
def connect_db(password, db_name):
global dbconnector, cursor
dbconnector = mysql.connector.connect(
host = "localhost",
user = "root",
passwd = password,
database = db_name,
autocommit = True
)
cursor = dbconnector.cursor()
def main():
password = getpass("MySQL password:")
db_name = "WORLDMETRIC"
connect_db(password, db_name)
backup_db(cursor)
print_report(db_name)
dbconnector.close()
main()
|
flexible
|
{
"blob_id": "fd76a7dd90bac7c7ba9201b6db62e6cb3eedeced",
"index": 4390,
"step-1": "<mask token>\n\n\ndef backup_db(cursor):\n for table in tables_schema:\n backup_table(cursor, table)\n\n\ndef backup_table(cursor, table):\n cursor.execute(f'{tables_schema[table]}' +\n f\"INTO OUTFILE '/tmp/{table.lower()}_data.csv' \" +\n \"FIELDS TERMINATED BY ',' \" + \"LINES TERMINATED BY '\\\\n' \" +\n f'FROM {table}')\n\n\ndef print_report(db_name):\n print(\n f\"Database: '{db_name}' successfully backed up under '\\\\tmp' directory!\"\n )\n\n\n<mask token>\n\n\ndef main():\n password = getpass('MySQL password:')\n db_name = 'WORLDMETRIC'\n connect_db(password, db_name)\n backup_db(cursor)\n print_report(db_name)\n dbconnector.close()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef backup_db(cursor):\n for table in tables_schema:\n backup_table(cursor, table)\n\n\ndef backup_table(cursor, table):\n cursor.execute(f'{tables_schema[table]}' +\n f\"INTO OUTFILE '/tmp/{table.lower()}_data.csv' \" +\n \"FIELDS TERMINATED BY ',' \" + \"LINES TERMINATED BY '\\\\n' \" +\n f'FROM {table}')\n\n\ndef print_report(db_name):\n print(\n f\"Database: '{db_name}' successfully backed up under '\\\\tmp' directory!\"\n )\n\n\ndef connect_db(password, db_name):\n global dbconnector, cursor\n dbconnector = mysql.connector.connect(host='localhost', user='root',\n passwd=password, database=db_name, autocommit=True)\n cursor = dbconnector.cursor()\n\n\ndef main():\n password = getpass('MySQL password:')\n db_name = 'WORLDMETRIC'\n connect_db(password, db_name)\n backup_db(cursor)\n print_report(db_name)\n dbconnector.close()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef backup_db(cursor):\n for table in tables_schema:\n backup_table(cursor, table)\n\n\ndef backup_table(cursor, table):\n cursor.execute(f'{tables_schema[table]}' +\n f\"INTO OUTFILE '/tmp/{table.lower()}_data.csv' \" +\n \"FIELDS TERMINATED BY ',' \" + \"LINES TERMINATED BY '\\\\n' \" +\n f'FROM {table}')\n\n\ndef print_report(db_name):\n print(\n f\"Database: '{db_name}' successfully backed up under '\\\\tmp' directory!\"\n )\n\n\ndef connect_db(password, db_name):\n global dbconnector, cursor\n dbconnector = mysql.connector.connect(host='localhost', user='root',\n passwd=password, database=db_name, autocommit=True)\n cursor = dbconnector.cursor()\n\n\ndef main():\n password = getpass('MySQL password:')\n db_name = 'WORLDMETRIC'\n connect_db(password, db_name)\n backup_db(cursor)\n print_report(db_name)\n dbconnector.close()\n\n\nmain()\n",
"step-4": "<mask token>\ntables_schema = {'Country': \"SELECT 'Id','Name','Code' \" + 'UNION ALL ' +\n 'SELECT Id, Name, Code ', 'Indicator': \"SELECT 'Id','Name','Code' \" +\n 'UNION ALL ' + 'SELECT Id, Name, Code ', 'Year': \n \"SELECT 'Id','FiveYearPeriod','TenYearPeriod' \" + 'UNION ALL ' +\n 'SELECT Id, FiveYearPeriod, TenYearPeriod ', 'Metric': \n \"SELECT 'CountryId','IndicatorId','YearId','Measurement' \" +\n 'UNION ALL ' + 'SELECT CountryId, IndicatorId, YearId, Measurement '}\ndbconnector = None\ncursor = None\n\n\ndef backup_db(cursor):\n for table in tables_schema:\n backup_table(cursor, table)\n\n\ndef backup_table(cursor, table):\n cursor.execute(f'{tables_schema[table]}' +\n f\"INTO OUTFILE '/tmp/{table.lower()}_data.csv' \" +\n \"FIELDS TERMINATED BY ',' \" + \"LINES TERMINATED BY '\\\\n' \" +\n f'FROM {table}')\n\n\ndef print_report(db_name):\n print(\n f\"Database: '{db_name}' successfully backed up under '\\\\tmp' directory!\"\n )\n\n\ndef connect_db(password, db_name):\n global dbconnector, cursor\n dbconnector = mysql.connector.connect(host='localhost', user='root',\n passwd=password, database=db_name, autocommit=True)\n cursor = dbconnector.cursor()\n\n\ndef main():\n password = getpass('MySQL password:')\n db_name = 'WORLDMETRIC'\n connect_db(password, db_name)\n backup_db(cursor)\n print_report(db_name)\n dbconnector.close()\n\n\nmain()\n",
"step-5": "import mysql.connector\nfrom getpass import getpass\n\ntables_schema = {\n\t\t\t\t\"Country\": \"SELECT 'Id','Name','Code' \" +\n\t\t\t\t\t\t\t \"UNION ALL \" +\n\t\t\t\t\t\t\t \"SELECT Id, Name, Code \",\n\n\t\t\t\t\"Indicator\": \"SELECT 'Id','Name','Code' \" +\n\t\t\t\t\t\t\t \"UNION ALL \" +\n\t\t\t\t\t\t\t \"SELECT Id, Name, Code \",\n\n\t\t \t\t\"Year\": \"SELECT 'Id','FiveYearPeriod','TenYearPeriod' \" +\n\t\t\t\t\t\t \"UNION ALL \" +\n\t\t\t\t\t\t \"SELECT Id, FiveYearPeriod, TenYearPeriod \",\n\n\t\t\t\t\"Metric\": \"SELECT 'CountryId','IndicatorId','YearId','Measurement' \" +\n\t\t\t\t\t\t \"UNION ALL \" +\n\t\t\t\t\t\t \"SELECT CountryId, IndicatorId, YearId, Measurement \"\n\t\t\t\t}\n\ndbconnector = None\ncursor = None\n\ndef backup_db(cursor):\n\n\tfor table in tables_schema: backup_table(cursor, table)\n\ndef backup_table(cursor, table):\n\n\tcursor.execute(f\"{tables_schema[table]}\" +\n\t\t\t\t f\"INTO OUTFILE '/tmp/{table.lower()}_data.csv' \" +\n\t\t\t\t \"FIELDS TERMINATED BY ',' \" +\n\t\t\t\t \"LINES TERMINATED BY '\\\\n' \" +\n\t\t\t\t f\"FROM {table}\")\n\ndef print_report(db_name):\n\n\tprint (f\"Database: '{db_name}' successfully backed up under '\\\\tmp' directory!\")\n\ndef connect_db(password, db_name):\n\n\tglobal dbconnector, cursor\n\tdbconnector = mysql.connector.connect(\n\t\thost = \"localhost\",\n\t\tuser = \"root\",\n\t\tpasswd = password,\n\t\tdatabase = db_name,\n\t\tautocommit = True\n\t)\n\tcursor = dbconnector.cursor()\n\ndef main():\n\n\tpassword = getpass(\"MySQL password:\")\n\tdb_name = \"WORLDMETRIC\"\n\tconnect_db(password, db_name)\n\tbackup_db(cursor)\n\tprint_report(db_name)\n\tdbconnector.close()\n\nmain()",
"step-ids": [
4,
5,
6,
7,
9
]
}
|
[
4,
5,
6,
7,
9
] |
<|reserved_special_token_0|>
class Player:
def __init__(self, name, location):
self.name = name
self.location = location
self.square = None
self.money = 0
self.quest = None
self.job = None
self.phase = 'day'
self.equipped_weapon = None
self.major_armor = None
self.minor_armor = None
self.building_local = None
self.inventory = []
self.skills = {}
self.health = 100
self.greeting_count = 0
self.body_count = 0
self.assassination_count = 0
self.hit_list = []
self.death_count = 0
self.food_count = 0
self.run_away_count = 0
self.speed_bonus = False
self.game_won = False
def game_over(self):
if self.game_won is False:
self.game_won = True
print(colored('You have won the game!', 'green'))
print(
'You may continue playing to earn more achievements if you wish.'
)
if self.run_away_count == 0:
print(
'Congratulations, you have achieved the True Bravery achievement, having won the game without ever running away from a fight.'
)
if self.run_away_count > 100:
print(
'Congratulations, you have achieved the True Cowardice achievement, having won the game after running away from over 100 battles.'
)
def clean_up_inventory(self):
""" Remove items with quantity of zero from the map inventory"""
self.inventory = [i for i in self.inventory if i.quantity != 0]
def phase_change(self, the_map):
self.phase = 'day' if self.phase == 'night' else 'night'
for k, square in the_map.items():
if self.location != k:
square.generate_items()
for b in square.buildings:
if b.ware_list:
b.wares = drop_item(b.ware_list)
while not b.wares:
b.wares = drop_item(b.ware_list)
if b.name not in ('a castle', 'a volcanic base'):
jobs = {}
buiding_dict = add_dicts_together(buildings[
'master'], buildings[square.square_type])
for key, v in buiding_dict.items():
if key == b.name and v.get('jobs'):
for name, values in v['jobs'].items():
jobs[name] = values
b.jobs = b.drop_job(jobs)
if self.phase == 'day':
self.speed_bonus = False
for mob in square.mobs:
mob.health = 100
mob.irritation_level = 0
mob.quest = None if self.quest is None else mob.quest
if not square.mobs:
square.mobs = drop_mob(add_dicts_together(wild_mobs
['master'], wild_mobs[self.square.square_type]),
self, limit=len(names), square=square)
def formatted_inventory(self):
formatted = []
for item in self.inventory:
if item.quantity > 1:
formatted.append(f'{int_to_words(item.quantity)} {item.plural}'
)
else:
formatted.append(item.name)
if formatted:
return comma_separated(formatted)
else:
return 'nothing'
def pretty_inventory(self):
w = self.equipped_weapon
major = self.major_armor.defense if self.major_armor else 0
minor = self.minor_armor.defense if self.minor_armor else 0
armor_defense = (major + minor) * 5
armors = [self.major_armor.name if self.major_armor else None, self
.minor_armor.name if self.minor_armor else None]
inventory = {'inventory_items':
f'You have {self.formatted_inventory()} in your inventory.',
'weapon':
f'You are wielding {int_to_words(w.quantity)} {remove_little_words(w.name) if w.quantity == 1 else w.plural}.'
if w else None, 'armor':
f"You are wearing {' and '.join(x for x in armors if x)}, giving you a {armor_defense}% reduction in incoming damage."
if self.minor_armor or self.major_armor else None}
return '\n'.join(v for v in inventory.values() if v)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def increase_skill(self, skill, increase):
try:
self.skills[skill] += increase
except KeyError:
self.skills[skill] = increase
print(
f'You have increased your mastery of {skill} by {increase}% for a total of {self.skills[skill]}%.'
)
class Item:
def __init__(self, name, quantity, plural, category=None, perishable=
None, flammable=None, rarity=None, price=None, weapon_rating=None,
defense=None):
self.name = name
self.quantity = quantity
self.plural = plural
self.category = category or None
self.perishable = perishable or None
self.flammable = flammable or None
self.rarity = rarity or None
self.price = price or None
self.weapon_rating = weapon_rating or None
self.defense = defense or None
def copy(self):
return Item(name=self.name, quantity=self.quantity, plural=self.
plural, category=self.category, perishable=self.perishable,
flammable=self.flammable, rarity=self.rarity, weapon_rating=
self.weapon_rating, defense=self.defense)
class Building(object):
def __init__(self, name, p, plural, category=None, rarity=None,
ware_list=None, mobs=None, jobs=None):
self.name = name
self.p = p
self.quantity = 1
self.plural = plural
self.category = category or None
self.rarity = rarity or None
self.ware_list = ware_list
self.wares = self.drop_wares()
self.mobs = drop_mob(mobs, p) if mobs else None
self.jobs = self.drop_job(jobs) if jobs else None
if self.name in ('a castle', 'a volcanic base'):
self.boss_mobs_and_jobs()
def drop_wares(self):
if self.ware_list:
wares = drop_item(self.ware_list)
while not wares:
wares = drop_item(self.ware_list)
return wares
else:
return []
def drop_job(self, jobs):
drops_i = []
for k, v in jobs.items():
if odds(2):
drops_i.append(Job(name=k, location=self.p.location, **v))
return drops_i
def boss_mobs_and_jobs(self):
boss_major_armors = [Item('a coat of impervious dragon scales',
plural='coats of dragon scales', quantity=1, category=
'major armor', rarity='super rare', defense=5), Item(
'an enchanted leather duster', plural=
'enchanted leather dusters', quantity=1, category='major armor',
defense=5, rarity='super rare'), Item(
'a coat of actual live grizzly bears', plural=
'coats of actual live grizzly bears', quantity=1, category=
'major armor', defense=5, rarity='super rare')]
boss_minor_armors = [Item('wings of an angel', plural=
'wings of angels', quantity=1, rarity='super rare', category=
'minor armor', defense=5), Item('an OSHA approved hard hat',
plural='OSHA approved hard hats', quantity=1, rarity=
'super rare', category='minor armor', defense=5), Item(
'a pair boots that were made for walkin', plural=
'pairs of boots that were made for walkin', quantity=1, rarity=
'super rare', category='minor armor', defense=5)]
boss_weapons = [Item('an apache helicopter', plural=
'apache helicopters', rarity='super rare', weapon_rating=6,
quantity=1), Item('a trebuchet', plural='trebuchets',
weapon_rating=6, quantity=1, rarity='super rare'), Item(
'an army of attacking wizards', plural=
'armies of attacking wizards', weapon_rating=6, quantity=1,
rarity='super rare')]
boss_names = ['the Terrifying Dragon of Soul Slaying',
'the Great Salamander of Darkness', 'the Squirrel of Destiny']
random.shuffle(boss_names)
random.shuffle(boss_weapons)
random.shuffle(boss_major_armors)
random.shuffle(boss_minor_armors)
boss = Mob(boss_names[0], self.p, plural=boss_names[0], rarity=
'super rare')
boss.health = 500
boss.equipped_weapon = boss_weapons[0]
boss.major_armor = boss_major_armors[0]
boss.minor_armor = boss_minor_armors[0]
boss.irritation_level = 10
self.mobs = [boss]
if self.name == 'a castle':
self.jobs = [Job('king of the realm', location=self.p.location,
salary=1100)]
if self.name == 'a volcanic base':
self.jobs = [Job('evil overlord', location=self.p.location,
salary=1100)]
class Job:
def __init__(self, name, location, skills_needed=None, salary=0,
skills_learned=None, inventory_needed=None):
self.name = name
self.location = location
self.skills_needed = skills_needed or None
self.salary = salary or 0
self.skills_learned = skills_learned or None
self.inventory_needed = inventory_needed or None
self.application_attempts = 0
class Mob:
def __init__(self, name, p, plural, rarity, inventory=None):
self.name = name
self.p = p
self.plural = plural
self.quantity = 1
self.rarity = rarity
self.skills = self.skills()
self.quest = None
self.inventory = inventory or drop_item(add_dicts_together(items[
'master'], items[p.square.square_type]))
self.health = 100
self.equipped_weapon = self.equip()
major = [x for x in self.inventory if x.category == 'major armor']
minor = [x for x in self.inventory if x.category == 'minor armor']
self.major_armor = major[0] if major else None
self.minor_armor = minor[0] if minor else None
self.irritation_level = 0
def equip(self):
nice_weapons = []
for i in self.inventory:
try:
if i.weapon_rating:
nice_weapons.append(i)
except AttributeError:
pass
nice_weapons.sort(key=lambda x: x.weapon_rating, reverse=True)
if nice_weapons:
self.inventory.remove(nice_weapons[0])
return nice_weapons[0]
else:
return None
@staticmethod
def skills():
""" Pick the skills for a mob, these determine what a player can get from completing a quest """
all_skills = ['strength', 'patience', 'cleanliness', 'leadership',
'communication', 'science', 'math', 'engineering',
'intelligence', 'driving']
random.shuffle(all_skills)
return all_skills[0:2]
def generate_quest(self):
"""
inventory based
bring me x of an object to learn a skill
"""
if odds(3):
quest_items = add_dicts_together(items['master'], items[self.p.
square.square_type])
quest_item = random.choice(list(quest_items.keys()))
i = Item(quest_item, 0, **quest_items[quest_item])
self.inventory.append(i)
quantity = {'super rare': '1', 'rare': '2', 'uncommon': '3',
'common': '6', 'super common': '15'}
q = quantity[i.rarity]
self.quest = (i, int(q),
f'{self.p.name}, if you bring me {q} {i.plural if int(q) > 1 else remove_little_words(i.name)}, I will teach you a valuable skill.'
)
return
elif odds(5):
mobs = []
for biome, building in buildings.items():
for b, attributes in building.items():
if attributes.get('mobs'):
for k in attributes['mobs'].keys():
mobs.append(k)
for biome, mob in wild_mobs.items():
for k in mob.keys():
mobs.append(k)
target = (
f'{mobs[random.randint(0, len(mobs) - 1)]} named {names[random.randint(0, len(names) - 1)]}'
)
print(
f"Well, we'll keep this off the record, but I can arrange for some money to find its way into your account if you make {colored(target, 'yellow')} disappear, if you know what I mean..."
)
self.p.hit_list.append(target)
return False
else:
return None
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class MapSquare:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def generate_buildings(self, p):
self.buildings = drop_building(add_dicts_together(buildings[
'master'], buildings[self.square_type]), p)
def generate_mobs(self, p):
self.mobs = drop_mob(add_dicts_together(wild_mobs['master'],
wild_mobs[self.square_type]), p)
def clean_up_map(self):
""" Remove items with quantity of zero from the map inventory"""
self.items = [i for i in self.items if i.quantity != 0]
@staticmethod
def map_picture(the_map, p):
"""With the player's location in the center, draw a 5 x 5 map with map square type
and coordinates in each square"""
xy = p.location[0] - 2, p.location[1] + 2
map_coords = []
for y in range(0, 5):
row = [(xy[0] + x, xy[1] - y) for x in range(0, 5)]
map_coords.append(row)
pretty_map = []
for r in map_coords:
row = []
for coordinates in r:
if coordinates in the_map.keys():
if p.quest and p.job and p.quest[1
] == coordinates and p.job.location == coordinates:
star = '*$ '
elif p.quest and p.quest[1] == coordinates:
star = ' * '
elif p.job and p.job.location == coordinates:
star = ' $ '
else:
star = ' '
row.append('|{!s:9}{}|'.format(the_map[coordinates].
square_type, star))
else:
row.append('|{!s:12}|'.format(' '))
pretty_map.append(row)
for row in pretty_map:
print(''.join(row))
class Player:
def __init__(self, name, location):
self.name = name
self.location = location
self.square = None
self.money = 0
self.quest = None
self.job = None
self.phase = 'day'
self.equipped_weapon = None
self.major_armor = None
self.minor_armor = None
self.building_local = None
self.inventory = []
self.skills = {}
self.health = 100
self.greeting_count = 0
self.body_count = 0
self.assassination_count = 0
self.hit_list = []
self.death_count = 0
self.food_count = 0
self.run_away_count = 0
self.speed_bonus = False
self.game_won = False
def game_over(self):
if self.game_won is False:
self.game_won = True
print(colored('You have won the game!', 'green'))
print(
'You may continue playing to earn more achievements if you wish.'
)
if self.run_away_count == 0:
print(
'Congratulations, you have achieved the True Bravery achievement, having won the game without ever running away from a fight.'
)
if self.run_away_count > 100:
print(
'Congratulations, you have achieved the True Cowardice achievement, having won the game after running away from over 100 battles.'
)
def clean_up_inventory(self):
""" Remove items with quantity of zero from the map inventory"""
self.inventory = [i for i in self.inventory if i.quantity != 0]
def phase_change(self, the_map):
self.phase = 'day' if self.phase == 'night' else 'night'
for k, square in the_map.items():
if self.location != k:
square.generate_items()
for b in square.buildings:
if b.ware_list:
b.wares = drop_item(b.ware_list)
while not b.wares:
b.wares = drop_item(b.ware_list)
if b.name not in ('a castle', 'a volcanic base'):
jobs = {}
buiding_dict = add_dicts_together(buildings[
'master'], buildings[square.square_type])
for key, v in buiding_dict.items():
if key == b.name and v.get('jobs'):
for name, values in v['jobs'].items():
jobs[name] = values
b.jobs = b.drop_job(jobs)
if self.phase == 'day':
self.speed_bonus = False
for mob in square.mobs:
mob.health = 100
mob.irritation_level = 0
mob.quest = None if self.quest is None else mob.quest
if not square.mobs:
square.mobs = drop_mob(add_dicts_together(wild_mobs
['master'], wild_mobs[self.square.square_type]),
self, limit=len(names), square=square)
def formatted_inventory(self):
formatted = []
for item in self.inventory:
if item.quantity > 1:
formatted.append(f'{int_to_words(item.quantity)} {item.plural}'
)
else:
formatted.append(item.name)
if formatted:
return comma_separated(formatted)
else:
return 'nothing'
def pretty_inventory(self):
w = self.equipped_weapon
major = self.major_armor.defense if self.major_armor else 0
minor = self.minor_armor.defense if self.minor_armor else 0
armor_defense = (major + minor) * 5
armors = [self.major_armor.name if self.major_armor else None, self
.minor_armor.name if self.minor_armor else None]
inventory = {'inventory_items':
f'You have {self.formatted_inventory()} in your inventory.',
'weapon':
f'You are wielding {int_to_words(w.quantity)} {remove_little_words(w.name) if w.quantity == 1 else w.plural}.'
if w else None, 'armor':
f"You are wearing {' and '.join(x for x in armors if x)}, giving you a {armor_defense}% reduction in incoming damage."
if self.minor_armor or self.major_armor else None}
return '\n'.join(v for v in inventory.values() if v)
def status(self):
skills = [f'{k}: {v}%.' for k, v in self.skills.items()]
job = f'You have a job as a {self.job.name}.' if self.job else None
quest = 'You have a quest.' if self.quest else None
if job and quest:
job_string = '\n'.join([job, quest])
elif job or quest:
job_string = job if job else quest
else:
job_string = (
'You do not have a job, and you are not contributing to society.'
)
status_string = {'health':
f'Currently, you have {self.health} health.', 'location':
f'You are located on map coordinates {self.location}, which is {self.square.square_type}.'
, 'building_local':
f'You are inside {self.building_local.name}.' if self.
building_local else None, 'skills': '\n'.join(skills) if skills
else "You don't have any skills.", 'money':
f'You have ${self.money} in your wallet.', 'job': job_string}
return '\n'.join(v for v in status_string.values() if v)
def statistics(self):
print(f'You have killed {self.body_count} mobs.')
print(f'You have ran away from {self.run_away_count} battles.')
print(f'You have eaten {self.food_count} items.')
print(f'You have performed {self.assassination_count} assassinations.')
print(f'You have talked to mobs {self.greeting_count} times.')
def view_hit_list(self):
if self.hit_list:
print(
f'If you ever run across these shady characters, be sure to take their names off your list: {comma_separated(self.hit_list)}'
)
else:
print("Looks like you don't know of anyone who needs to be dead.")
def increase_skill(self, skill, increase):
try:
self.skills[skill] += increase
except KeyError:
self.skills[skill] = increase
print(
f'You have increased your mastery of {skill} by {increase}% for a total of {self.skills[skill]}%.'
)
class Item:
def __init__(self, name, quantity, plural, category=None, perishable=
None, flammable=None, rarity=None, price=None, weapon_rating=None,
defense=None):
self.name = name
self.quantity = quantity
self.plural = plural
self.category = category or None
self.perishable = perishable or None
self.flammable = flammable or None
self.rarity = rarity or None
self.price = price or None
self.weapon_rating = weapon_rating or None
self.defense = defense or None
def copy(self):
return Item(name=self.name, quantity=self.quantity, plural=self.
plural, category=self.category, perishable=self.perishable,
flammable=self.flammable, rarity=self.rarity, weapon_rating=
self.weapon_rating, defense=self.defense)
class Building(object):
def __init__(self, name, p, plural, category=None, rarity=None,
ware_list=None, mobs=None, jobs=None):
self.name = name
self.p = p
self.quantity = 1
self.plural = plural
self.category = category or None
self.rarity = rarity or None
self.ware_list = ware_list
self.wares = self.drop_wares()
self.mobs = drop_mob(mobs, p) if mobs else None
self.jobs = self.drop_job(jobs) if jobs else None
if self.name in ('a castle', 'a volcanic base'):
self.boss_mobs_and_jobs()
def drop_wares(self):
if self.ware_list:
wares = drop_item(self.ware_list)
while not wares:
wares = drop_item(self.ware_list)
return wares
else:
return []
def drop_job(self, jobs):
drops_i = []
for k, v in jobs.items():
if odds(2):
drops_i.append(Job(name=k, location=self.p.location, **v))
return drops_i
def boss_mobs_and_jobs(self):
boss_major_armors = [Item('a coat of impervious dragon scales',
plural='coats of dragon scales', quantity=1, category=
'major armor', rarity='super rare', defense=5), Item(
'an enchanted leather duster', plural=
'enchanted leather dusters', quantity=1, category='major armor',
defense=5, rarity='super rare'), Item(
'a coat of actual live grizzly bears', plural=
'coats of actual live grizzly bears', quantity=1, category=
'major armor', defense=5, rarity='super rare')]
boss_minor_armors = [Item('wings of an angel', plural=
'wings of angels', quantity=1, rarity='super rare', category=
'minor armor', defense=5), Item('an OSHA approved hard hat',
plural='OSHA approved hard hats', quantity=1, rarity=
'super rare', category='minor armor', defense=5), Item(
'a pair boots that were made for walkin', plural=
'pairs of boots that were made for walkin', quantity=1, rarity=
'super rare', category='minor armor', defense=5)]
boss_weapons = [Item('an apache helicopter', plural=
'apache helicopters', rarity='super rare', weapon_rating=6,
quantity=1), Item('a trebuchet', plural='trebuchets',
weapon_rating=6, quantity=1, rarity='super rare'), Item(
'an army of attacking wizards', plural=
'armies of attacking wizards', weapon_rating=6, quantity=1,
rarity='super rare')]
boss_names = ['the Terrifying Dragon of Soul Slaying',
'the Great Salamander of Darkness', 'the Squirrel of Destiny']
random.shuffle(boss_names)
random.shuffle(boss_weapons)
random.shuffle(boss_major_armors)
random.shuffle(boss_minor_armors)
boss = Mob(boss_names[0], self.p, plural=boss_names[0], rarity=
'super rare')
boss.health = 500
boss.equipped_weapon = boss_weapons[0]
boss.major_armor = boss_major_armors[0]
boss.minor_armor = boss_minor_armors[0]
boss.irritation_level = 10
self.mobs = [boss]
if self.name == 'a castle':
self.jobs = [Job('king of the realm', location=self.p.location,
salary=1100)]
if self.name == 'a volcanic base':
self.jobs = [Job('evil overlord', location=self.p.location,
salary=1100)]
class Job:
def __init__(self, name, location, skills_needed=None, salary=0,
skills_learned=None, inventory_needed=None):
self.name = name
self.location = location
self.skills_needed = skills_needed or None
self.salary = salary or 0
self.skills_learned = skills_learned or None
self.inventory_needed = inventory_needed or None
self.application_attempts = 0
class Mob:
def __init__(self, name, p, plural, rarity, inventory=None):
self.name = name
self.p = p
self.plural = plural
self.quantity = 1
self.rarity = rarity
self.skills = self.skills()
self.quest = None
self.inventory = inventory or drop_item(add_dicts_together(items[
'master'], items[p.square.square_type]))
self.health = 100
self.equipped_weapon = self.equip()
major = [x for x in self.inventory if x.category == 'major armor']
minor = [x for x in self.inventory if x.category == 'minor armor']
self.major_armor = major[0] if major else None
self.minor_armor = minor[0] if minor else None
self.irritation_level = 0
def equip(self):
nice_weapons = []
for i in self.inventory:
try:
if i.weapon_rating:
nice_weapons.append(i)
except AttributeError:
pass
nice_weapons.sort(key=lambda x: x.weapon_rating, reverse=True)
if nice_weapons:
self.inventory.remove(nice_weapons[0])
return nice_weapons[0]
else:
return None
@staticmethod
def skills():
""" Pick the skills for a mob, these determine what a player can get from completing a quest """
all_skills = ['strength', 'patience', 'cleanliness', 'leadership',
'communication', 'science', 'math', 'engineering',
'intelligence', 'driving']
random.shuffle(all_skills)
return all_skills[0:2]
def generate_quest(self):
"""
inventory based
bring me x of an object to learn a skill
"""
if odds(3):
quest_items = add_dicts_together(items['master'], items[self.p.
square.square_type])
quest_item = random.choice(list(quest_items.keys()))
i = Item(quest_item, 0, **quest_items[quest_item])
self.inventory.append(i)
quantity = {'super rare': '1', 'rare': '2', 'uncommon': '3',
'common': '6', 'super common': '15'}
q = quantity[i.rarity]
self.quest = (i, int(q),
f'{self.p.name}, if you bring me {q} {i.plural if int(q) > 1 else remove_little_words(i.name)}, I will teach you a valuable skill.'
)
return
elif odds(5):
mobs = []
for biome, building in buildings.items():
for b, attributes in building.items():
if attributes.get('mobs'):
for k in attributes['mobs'].keys():
mobs.append(k)
for biome, mob in wild_mobs.items():
for k in mob.keys():
mobs.append(k)
target = (
f'{mobs[random.randint(0, len(mobs) - 1)]} named {names[random.randint(0, len(names) - 1)]}'
)
print(
f"Well, we'll keep this off the record, but I can arrange for some money to find its way into your account if you make {colored(target, 'yellow')} disappear, if you know what I mean..."
)
self.p.hit_list.append(target)
return False
else:
return None
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class MapSquare:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def generate_items(self):
self.items = drop_item(add_dicts_together(items['master'], items[
self.square_type]))
def generate_buildings(self, p):
self.buildings = drop_building(add_dicts_together(buildings[
'master'], buildings[self.square_type]), p)
def generate_mobs(self, p):
self.mobs = drop_mob(add_dicts_together(wild_mobs['master'],
wild_mobs[self.square_type]), p)
def clean_up_map(self):
""" Remove items with quantity of zero from the map inventory"""
self.items = [i for i in self.items if i.quantity != 0]
@staticmethod
def map_picture(the_map, p):
"""With the player's location in the center, draw a 5 x 5 map with map square type
and coordinates in each square"""
xy = p.location[0] - 2, p.location[1] + 2
map_coords = []
for y in range(0, 5):
row = [(xy[0] + x, xy[1] - y) for x in range(0, 5)]
map_coords.append(row)
pretty_map = []
for r in map_coords:
row = []
for coordinates in r:
if coordinates in the_map.keys():
if p.quest and p.job and p.quest[1
] == coordinates and p.job.location == coordinates:
star = '*$ '
elif p.quest and p.quest[1] == coordinates:
star = ' * '
elif p.job and p.job.location == coordinates:
star = ' $ '
else:
star = ' '
row.append('|{!s:9}{}|'.format(the_map[coordinates].
square_type, star))
else:
row.append('|{!s:12}|'.format(' '))
pretty_map.append(row)
for row in pretty_map:
print(''.join(row))
class Player:
def __init__(self, name, location):
self.name = name
self.location = location
self.square = None
self.money = 0
self.quest = None
self.job = None
self.phase = 'day'
self.equipped_weapon = None
self.major_armor = None
self.minor_armor = None
self.building_local = None
self.inventory = []
self.skills = {}
self.health = 100
self.greeting_count = 0
self.body_count = 0
self.assassination_count = 0
self.hit_list = []
self.death_count = 0
self.food_count = 0
self.run_away_count = 0
self.speed_bonus = False
self.game_won = False
def game_over(self):
if self.game_won is False:
self.game_won = True
print(colored('You have won the game!', 'green'))
print(
'You may continue playing to earn more achievements if you wish.'
)
if self.run_away_count == 0:
print(
'Congratulations, you have achieved the True Bravery achievement, having won the game without ever running away from a fight.'
)
if self.run_away_count > 100:
print(
'Congratulations, you have achieved the True Cowardice achievement, having won the game after running away from over 100 battles.'
)
def clean_up_inventory(self):
""" Remove items with quantity of zero from the map inventory"""
self.inventory = [i for i in self.inventory if i.quantity != 0]
def phase_change(self, the_map):
self.phase = 'day' if self.phase == 'night' else 'night'
for k, square in the_map.items():
if self.location != k:
square.generate_items()
for b in square.buildings:
if b.ware_list:
b.wares = drop_item(b.ware_list)
while not b.wares:
b.wares = drop_item(b.ware_list)
if b.name not in ('a castle', 'a volcanic base'):
jobs = {}
buiding_dict = add_dicts_together(buildings[
'master'], buildings[square.square_type])
for key, v in buiding_dict.items():
if key == b.name and v.get('jobs'):
for name, values in v['jobs'].items():
jobs[name] = values
b.jobs = b.drop_job(jobs)
if self.phase == 'day':
self.speed_bonus = False
for mob in square.mobs:
mob.health = 100
mob.irritation_level = 0
mob.quest = None if self.quest is None else mob.quest
if not square.mobs:
square.mobs = drop_mob(add_dicts_together(wild_mobs
['master'], wild_mobs[self.square.square_type]),
self, limit=len(names), square=square)
def formatted_inventory(self):
formatted = []
for item in self.inventory:
if item.quantity > 1:
formatted.append(f'{int_to_words(item.quantity)} {item.plural}'
)
else:
formatted.append(item.name)
if formatted:
return comma_separated(formatted)
else:
return 'nothing'
def pretty_inventory(self):
w = self.equipped_weapon
major = self.major_armor.defense if self.major_armor else 0
minor = self.minor_armor.defense if self.minor_armor else 0
armor_defense = (major + minor) * 5
armors = [self.major_armor.name if self.major_armor else None, self
.minor_armor.name if self.minor_armor else None]
inventory = {'inventory_items':
f'You have {self.formatted_inventory()} in your inventory.',
'weapon':
f'You are wielding {int_to_words(w.quantity)} {remove_little_words(w.name) if w.quantity == 1 else w.plural}.'
if w else None, 'armor':
f"You are wearing {' and '.join(x for x in armors if x)}, giving you a {armor_defense}% reduction in incoming damage."
if self.minor_armor or self.major_armor else None}
return '\n'.join(v for v in inventory.values() if v)
def status(self):
skills = [f'{k}: {v}%.' for k, v in self.skills.items()]
job = f'You have a job as a {self.job.name}.' if self.job else None
quest = 'You have a quest.' if self.quest else None
if job and quest:
job_string = '\n'.join([job, quest])
elif job or quest:
job_string = job if job else quest
else:
job_string = (
'You do not have a job, and you are not contributing to society.'
)
status_string = {'health':
f'Currently, you have {self.health} health.', 'location':
f'You are located on map coordinates {self.location}, which is {self.square.square_type}.'
, 'building_local':
f'You are inside {self.building_local.name}.' if self.
building_local else None, 'skills': '\n'.join(skills) if skills
else "You don't have any skills.", 'money':
f'You have ${self.money} in your wallet.', 'job': job_string}
return '\n'.join(v for v in status_string.values() if v)
def statistics(self):
print(f'You have killed {self.body_count} mobs.')
print(f'You have ran away from {self.run_away_count} battles.')
print(f'You have eaten {self.food_count} items.')
print(f'You have performed {self.assassination_count} assassinations.')
print(f'You have talked to mobs {self.greeting_count} times.')
def view_hit_list(self):
if self.hit_list:
print(
f'If you ever run across these shady characters, be sure to take their names off your list: {comma_separated(self.hit_list)}'
)
else:
print("Looks like you don't know of anyone who needs to be dead.")
def increase_skill(self, skill, increase):
try:
self.skills[skill] += increase
except KeyError:
self.skills[skill] = increase
print(
f'You have increased your mastery of {skill} by {increase}% for a total of {self.skills[skill]}%.'
)
class Item:
def __init__(self, name, quantity, plural, category=None, perishable=
None, flammable=None, rarity=None, price=None, weapon_rating=None,
defense=None):
self.name = name
self.quantity = quantity
self.plural = plural
self.category = category or None
self.perishable = perishable or None
self.flammable = flammable or None
self.rarity = rarity or None
self.price = price or None
self.weapon_rating = weapon_rating or None
self.defense = defense or None
def copy(self):
return Item(name=self.name, quantity=self.quantity, plural=self.
plural, category=self.category, perishable=self.perishable,
flammable=self.flammable, rarity=self.rarity, weapon_rating=
self.weapon_rating, defense=self.defense)
class Building(object):
def __init__(self, name, p, plural, category=None, rarity=None,
ware_list=None, mobs=None, jobs=None):
self.name = name
self.p = p
self.quantity = 1
self.plural = plural
self.category = category or None
self.rarity = rarity or None
self.ware_list = ware_list
self.wares = self.drop_wares()
self.mobs = drop_mob(mobs, p) if mobs else None
self.jobs = self.drop_job(jobs) if jobs else None
if self.name in ('a castle', 'a volcanic base'):
self.boss_mobs_and_jobs()
def drop_wares(self):
if self.ware_list:
wares = drop_item(self.ware_list)
while not wares:
wares = drop_item(self.ware_list)
return wares
else:
return []
def drop_job(self, jobs):
drops_i = []
for k, v in jobs.items():
if odds(2):
drops_i.append(Job(name=k, location=self.p.location, **v))
return drops_i
def boss_mobs_and_jobs(self):
boss_major_armors = [Item('a coat of impervious dragon scales',
plural='coats of dragon scales', quantity=1, category=
'major armor', rarity='super rare', defense=5), Item(
'an enchanted leather duster', plural=
'enchanted leather dusters', quantity=1, category='major armor',
defense=5, rarity='super rare'), Item(
'a coat of actual live grizzly bears', plural=
'coats of actual live grizzly bears', quantity=1, category=
'major armor', defense=5, rarity='super rare')]
boss_minor_armors = [Item('wings of an angel', plural=
'wings of angels', quantity=1, rarity='super rare', category=
'minor armor', defense=5), Item('an OSHA approved hard hat',
plural='OSHA approved hard hats', quantity=1, rarity=
'super rare', category='minor armor', defense=5), Item(
'a pair boots that were made for walkin', plural=
'pairs of boots that were made for walkin', quantity=1, rarity=
'super rare', category='minor armor', defense=5)]
boss_weapons = [Item('an apache helicopter', plural=
'apache helicopters', rarity='super rare', weapon_rating=6,
quantity=1), Item('a trebuchet', plural='trebuchets',
weapon_rating=6, quantity=1, rarity='super rare'), Item(
'an army of attacking wizards', plural=
'armies of attacking wizards', weapon_rating=6, quantity=1,
rarity='super rare')]
boss_names = ['the Terrifying Dragon of Soul Slaying',
'the Great Salamander of Darkness', 'the Squirrel of Destiny']
random.shuffle(boss_names)
random.shuffle(boss_weapons)
random.shuffle(boss_major_armors)
random.shuffle(boss_minor_armors)
boss = Mob(boss_names[0], self.p, plural=boss_names[0], rarity=
'super rare')
boss.health = 500
boss.equipped_weapon = boss_weapons[0]
boss.major_armor = boss_major_armors[0]
boss.minor_armor = boss_minor_armors[0]
boss.irritation_level = 10
self.mobs = [boss]
if self.name == 'a castle':
self.jobs = [Job('king of the realm', location=self.p.location,
salary=1100)]
if self.name == 'a volcanic base':
self.jobs = [Job('evil overlord', location=self.p.location,
salary=1100)]
class Job:
def __init__(self, name, location, skills_needed=None, salary=0,
skills_learned=None, inventory_needed=None):
self.name = name
self.location = location
self.skills_needed = skills_needed or None
self.salary = salary or 0
self.skills_learned = skills_learned or None
self.inventory_needed = inventory_needed or None
self.application_attempts = 0
class Mob:
def __init__(self, name, p, plural, rarity, inventory=None):
self.name = name
self.p = p
self.plural = plural
self.quantity = 1
self.rarity = rarity
self.skills = self.skills()
self.quest = None
self.inventory = inventory or drop_item(add_dicts_together(items[
'master'], items[p.square.square_type]))
self.health = 100
self.equipped_weapon = self.equip()
major = [x for x in self.inventory if x.category == 'major armor']
minor = [x for x in self.inventory if x.category == 'minor armor']
self.major_armor = major[0] if major else None
self.minor_armor = minor[0] if minor else None
self.irritation_level = 0
def equip(self):
nice_weapons = []
for i in self.inventory:
try:
if i.weapon_rating:
nice_weapons.append(i)
except AttributeError:
pass
nice_weapons.sort(key=lambda x: x.weapon_rating, reverse=True)
if nice_weapons:
self.inventory.remove(nice_weapons[0])
return nice_weapons[0]
else:
return None
@staticmethod
def skills():
""" Pick the skills for a mob, these determine what a player can get from completing a quest """
all_skills = ['strength', 'patience', 'cleanliness', 'leadership',
'communication', 'science', 'math', 'engineering',
'intelligence', 'driving']
random.shuffle(all_skills)
return all_skills[0:2]
def generate_quest(self):
"""
inventory based
bring me x of an object to learn a skill
"""
if odds(3):
quest_items = add_dicts_together(items['master'], items[self.p.
square.square_type])
quest_item = random.choice(list(quest_items.keys()))
i = Item(quest_item, 0, **quest_items[quest_item])
self.inventory.append(i)
quantity = {'super rare': '1', 'rare': '2', 'uncommon': '3',
'common': '6', 'super common': '15'}
q = quantity[i.rarity]
self.quest = (i, int(q),
f'{self.p.name}, if you bring me {q} {i.plural if int(q) > 1 else remove_little_words(i.name)}, I will teach you a valuable skill.'
)
return
elif odds(5):
mobs = []
for biome, building in buildings.items():
for b, attributes in building.items():
if attributes.get('mobs'):
for k in attributes['mobs'].keys():
mobs.append(k)
for biome, mob in wild_mobs.items():
for k in mob.keys():
mobs.append(k)
target = (
f'{mobs[random.randint(0, len(mobs) - 1)]} named {names[random.randint(0, len(names) - 1)]}'
)
print(
f"Well, we'll keep this off the record, but I can arrange for some money to find its way into your account if you make {colored(target, 'yellow')} disappear, if you know what I mean..."
)
self.p.hit_list.append(target)
return False
else:
return None
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def find_unique_names(quantity, name_list, taken_names):
free_names = [x for x in name_list if x not in taken_names]
random.shuffle(free_names)
return free_names[:quantity]
def dropper(rarity):
results = {'super rare': 100, 'rare': 50, 'uncommon': 25, 'common': 5,
'super common': 2}
quantity = 0
countdown = random.randint(0, 10)
while countdown > 0:
if random.randint(0, results[rarity]) == 1:
quantity += 1
countdown -= 1
return quantity
def drop_building(dictionary, p, limit=None):
limit = limit or len(adjectives)
drops_i = []
for k, v in dictionary.items():
quantity = dropper(v['rarity'])
quantity = quantity if quantity < limit else limit
limit -= quantity
if quantity:
if quantity > 1 and v['category'] != 'residence':
n = random.randint(0, quantity)
unique_names = find_unique_names(quantity - n, names, p.
square.unique_building_names)
p.square.unique_building_names += unique_names
for i in range(0, quantity - n):
drops_i.append(Building(name=
f"{unique_names[i]}'s {remove_little_words(k).capitalize()}"
, p=p, **v))
unique_adjectives = find_unique_names(n, adjectives, p.
square.unique_building_names)
p.square.unique_building_names += unique_adjectives
for i in range(0, n):
drops_i.append(Building(name=
f'the {unique_adjectives[i]} {remove_little_words(k).capitalize()}'
, p=p, **v))
elif quantity > 1 and v['category'] == 'residence':
unique_house_names = find_unique_names(quantity, names, p.
square.unique_house_names)
p.square.unique_house_names += unique_house_names
for i in range(0, quantity):
drops_i.append(Building(name=
f"{unique_house_names[i]}'s {remove_little_words(k)}",
p=p, **v))
else:
drops_i.append(Building(name=k, p=p, **v))
return drops_i
<|reserved_special_token_0|>
def drop_item(dictionary):
""" Randomly generates objects based on rarity """
drops_i = []
for k, v in dictionary.items():
quantity = dropper(v['rarity'])
if quantity:
drops_i.append(Item(name=k, quantity=quantity, **v))
return drops_i
class MapSquare:
def __init__(self, name='', square_type=None):
square_types = ['forest', 'mountains', 'desert', 'city', 'swamp',
'ocean']
self.square_type = square_type or square_types[random.randint(0,
len(square_types) - 1)]
self.name = name
self.unique_mob_names = []
self.unique_building_names = []
self.unique_house_names = []
mobs = []
items = []
buildings = []
def generate_items(self):
self.items = drop_item(add_dicts_together(items['master'], items[
self.square_type]))
def generate_buildings(self, p):
self.buildings = drop_building(add_dicts_together(buildings[
'master'], buildings[self.square_type]), p)
def generate_mobs(self, p):
self.mobs = drop_mob(add_dicts_together(wild_mobs['master'],
wild_mobs[self.square_type]), p)
def clean_up_map(self):
""" Remove items with quantity of zero from the map inventory"""
self.items = [i for i in self.items if i.quantity != 0]
@staticmethod
def map_picture(the_map, p):
"""With the player's location in the center, draw a 5 x 5 map with map square type
and coordinates in each square"""
xy = p.location[0] - 2, p.location[1] + 2
map_coords = []
for y in range(0, 5):
row = [(xy[0] + x, xy[1] - y) for x in range(0, 5)]
map_coords.append(row)
pretty_map = []
for r in map_coords:
row = []
for coordinates in r:
if coordinates in the_map.keys():
if p.quest and p.job and p.quest[1
] == coordinates and p.job.location == coordinates:
star = '*$ '
elif p.quest and p.quest[1] == coordinates:
star = ' * '
elif p.job and p.job.location == coordinates:
star = ' $ '
else:
star = ' '
row.append('|{!s:9}{}|'.format(the_map[coordinates].
square_type, star))
else:
row.append('|{!s:12}|'.format(' '))
pretty_map.append(row)
for row in pretty_map:
print(''.join(row))
class Player:
def __init__(self, name, location):
self.name = name
self.location = location
self.square = None
self.money = 0
self.quest = None
self.job = None
self.phase = 'day'
self.equipped_weapon = None
self.major_armor = None
self.minor_armor = None
self.building_local = None
self.inventory = []
self.skills = {}
self.health = 100
self.greeting_count = 0
self.body_count = 0
self.assassination_count = 0
self.hit_list = []
self.death_count = 0
self.food_count = 0
self.run_away_count = 0
self.speed_bonus = False
self.game_won = False
def game_over(self):
if self.game_won is False:
self.game_won = True
print(colored('You have won the game!', 'green'))
print(
'You may continue playing to earn more achievements if you wish.'
)
if self.run_away_count == 0:
print(
'Congratulations, you have achieved the True Bravery achievement, having won the game without ever running away from a fight.'
)
if self.run_away_count > 100:
print(
'Congratulations, you have achieved the True Cowardice achievement, having won the game after running away from over 100 battles.'
)
def clean_up_inventory(self):
""" Remove items with quantity of zero from the map inventory"""
self.inventory = [i for i in self.inventory if i.quantity != 0]
def phase_change(self, the_map):
self.phase = 'day' if self.phase == 'night' else 'night'
for k, square in the_map.items():
if self.location != k:
square.generate_items()
for b in square.buildings:
if b.ware_list:
b.wares = drop_item(b.ware_list)
while not b.wares:
b.wares = drop_item(b.ware_list)
if b.name not in ('a castle', 'a volcanic base'):
jobs = {}
buiding_dict = add_dicts_together(buildings[
'master'], buildings[square.square_type])
for key, v in buiding_dict.items():
if key == b.name and v.get('jobs'):
for name, values in v['jobs'].items():
jobs[name] = values
b.jobs = b.drop_job(jobs)
if self.phase == 'day':
self.speed_bonus = False
for mob in square.mobs:
mob.health = 100
mob.irritation_level = 0
mob.quest = None if self.quest is None else mob.quest
if not square.mobs:
square.mobs = drop_mob(add_dicts_together(wild_mobs
['master'], wild_mobs[self.square.square_type]),
self, limit=len(names), square=square)
def formatted_inventory(self):
formatted = []
for item in self.inventory:
if item.quantity > 1:
formatted.append(f'{int_to_words(item.quantity)} {item.plural}'
)
else:
formatted.append(item.name)
if formatted:
return comma_separated(formatted)
else:
return 'nothing'
def pretty_inventory(self):
w = self.equipped_weapon
major = self.major_armor.defense if self.major_armor else 0
minor = self.minor_armor.defense if self.minor_armor else 0
armor_defense = (major + minor) * 5
armors = [self.major_armor.name if self.major_armor else None, self
.minor_armor.name if self.minor_armor else None]
inventory = {'inventory_items':
f'You have {self.formatted_inventory()} in your inventory.',
'weapon':
f'You are wielding {int_to_words(w.quantity)} {remove_little_words(w.name) if w.quantity == 1 else w.plural}.'
if w else None, 'armor':
f"You are wearing {' and '.join(x for x in armors if x)}, giving you a {armor_defense}% reduction in incoming damage."
if self.minor_armor or self.major_armor else None}
return '\n'.join(v for v in inventory.values() if v)
def status(self):
skills = [f'{k}: {v}%.' for k, v in self.skills.items()]
job = f'You have a job as a {self.job.name}.' if self.job else None
quest = 'You have a quest.' if self.quest else None
if job and quest:
job_string = '\n'.join([job, quest])
elif job or quest:
job_string = job if job else quest
else:
job_string = (
'You do not have a job, and you are not contributing to society.'
)
status_string = {'health':
f'Currently, you have {self.health} health.', 'location':
f'You are located on map coordinates {self.location}, which is {self.square.square_type}.'
, 'building_local':
f'You are inside {self.building_local.name}.' if self.
building_local else None, 'skills': '\n'.join(skills) if skills
else "You don't have any skills.", 'money':
f'You have ${self.money} in your wallet.', 'job': job_string}
return '\n'.join(v for v in status_string.values() if v)
def statistics(self):
print(f'You have killed {self.body_count} mobs.')
print(f'You have ran away from {self.run_away_count} battles.')
print(f'You have eaten {self.food_count} items.')
print(f'You have performed {self.assassination_count} assassinations.')
print(f'You have talked to mobs {self.greeting_count} times.')
def view_hit_list(self):
if self.hit_list:
print(
f'If you ever run across these shady characters, be sure to take their names off your list: {comma_separated(self.hit_list)}'
)
else:
print("Looks like you don't know of anyone who needs to be dead.")
def increase_skill(self, skill, increase):
try:
self.skills[skill] += increase
except KeyError:
self.skills[skill] = increase
print(
f'You have increased your mastery of {skill} by {increase}% for a total of {self.skills[skill]}%.'
)
class Item:
def __init__(self, name, quantity, plural, category=None, perishable=
None, flammable=None, rarity=None, price=None, weapon_rating=None,
defense=None):
self.name = name
self.quantity = quantity
self.plural = plural
self.category = category or None
self.perishable = perishable or None
self.flammable = flammable or None
self.rarity = rarity or None
self.price = price or None
self.weapon_rating = weapon_rating or None
self.defense = defense or None
def copy(self):
return Item(name=self.name, quantity=self.quantity, plural=self.
plural, category=self.category, perishable=self.perishable,
flammable=self.flammable, rarity=self.rarity, weapon_rating=
self.weapon_rating, defense=self.defense)
class Building(object):
def __init__(self, name, p, plural, category=None, rarity=None,
ware_list=None, mobs=None, jobs=None):
self.name = name
self.p = p
self.quantity = 1
self.plural = plural
self.category = category or None
self.rarity = rarity or None
self.ware_list = ware_list
self.wares = self.drop_wares()
self.mobs = drop_mob(mobs, p) if mobs else None
self.jobs = self.drop_job(jobs) if jobs else None
if self.name in ('a castle', 'a volcanic base'):
self.boss_mobs_and_jobs()
def drop_wares(self):
if self.ware_list:
wares = drop_item(self.ware_list)
while not wares:
wares = drop_item(self.ware_list)
return wares
else:
return []
def drop_job(self, jobs):
drops_i = []
for k, v in jobs.items():
if odds(2):
drops_i.append(Job(name=k, location=self.p.location, **v))
return drops_i
def boss_mobs_and_jobs(self):
boss_major_armors = [Item('a coat of impervious dragon scales',
plural='coats of dragon scales', quantity=1, category=
'major armor', rarity='super rare', defense=5), Item(
'an enchanted leather duster', plural=
'enchanted leather dusters', quantity=1, category='major armor',
defense=5, rarity='super rare'), Item(
'a coat of actual live grizzly bears', plural=
'coats of actual live grizzly bears', quantity=1, category=
'major armor', defense=5, rarity='super rare')]
boss_minor_armors = [Item('wings of an angel', plural=
'wings of angels', quantity=1, rarity='super rare', category=
'minor armor', defense=5), Item('an OSHA approved hard hat',
plural='OSHA approved hard hats', quantity=1, rarity=
'super rare', category='minor armor', defense=5), Item(
'a pair boots that were made for walkin', plural=
'pairs of boots that were made for walkin', quantity=1, rarity=
'super rare', category='minor armor', defense=5)]
boss_weapons = [Item('an apache helicopter', plural=
'apache helicopters', rarity='super rare', weapon_rating=6,
quantity=1), Item('a trebuchet', plural='trebuchets',
weapon_rating=6, quantity=1, rarity='super rare'), Item(
'an army of attacking wizards', plural=
'armies of attacking wizards', weapon_rating=6, quantity=1,
rarity='super rare')]
boss_names = ['the Terrifying Dragon of Soul Slaying',
'the Great Salamander of Darkness', 'the Squirrel of Destiny']
random.shuffle(boss_names)
random.shuffle(boss_weapons)
random.shuffle(boss_major_armors)
random.shuffle(boss_minor_armors)
boss = Mob(boss_names[0], self.p, plural=boss_names[0], rarity=
'super rare')
boss.health = 500
boss.equipped_weapon = boss_weapons[0]
boss.major_armor = boss_major_armors[0]
boss.minor_armor = boss_minor_armors[0]
boss.irritation_level = 10
self.mobs = [boss]
if self.name == 'a castle':
self.jobs = [Job('king of the realm', location=self.p.location,
salary=1100)]
if self.name == 'a volcanic base':
self.jobs = [Job('evil overlord', location=self.p.location,
salary=1100)]
class Job:
def __init__(self, name, location, skills_needed=None, salary=0,
skills_learned=None, inventory_needed=None):
self.name = name
self.location = location
self.skills_needed = skills_needed or None
self.salary = salary or 0
self.skills_learned = skills_learned or None
self.inventory_needed = inventory_needed or None
self.application_attempts = 0
class Mob:
def __init__(self, name, p, plural, rarity, inventory=None):
self.name = name
self.p = p
self.plural = plural
self.quantity = 1
self.rarity = rarity
self.skills = self.skills()
self.quest = None
self.inventory = inventory or drop_item(add_dicts_together(items[
'master'], items[p.square.square_type]))
self.health = 100
self.equipped_weapon = self.equip()
major = [x for x in self.inventory if x.category == 'major armor']
minor = [x for x in self.inventory if x.category == 'minor armor']
self.major_armor = major[0] if major else None
self.minor_armor = minor[0] if minor else None
self.irritation_level = 0
def equip(self):
nice_weapons = []
for i in self.inventory:
try:
if i.weapon_rating:
nice_weapons.append(i)
except AttributeError:
pass
nice_weapons.sort(key=lambda x: x.weapon_rating, reverse=True)
if nice_weapons:
self.inventory.remove(nice_weapons[0])
return nice_weapons[0]
else:
return None
@staticmethod
def skills():
""" Pick the skills for a mob, these determine what a player can get from completing a quest """
all_skills = ['strength', 'patience', 'cleanliness', 'leadership',
'communication', 'science', 'math', 'engineering',
'intelligence', 'driving']
random.shuffle(all_skills)
return all_skills[0:2]
def generate_quest(self):
"""
inventory based
bring me x of an object to learn a skill
"""
if odds(3):
quest_items = add_dicts_together(items['master'], items[self.p.
square.square_type])
quest_item = random.choice(list(quest_items.keys()))
i = Item(quest_item, 0, **quest_items[quest_item])
self.inventory.append(i)
quantity = {'super rare': '1', 'rare': '2', 'uncommon': '3',
'common': '6', 'super common': '15'}
q = quantity[i.rarity]
self.quest = (i, int(q),
f'{self.p.name}, if you bring me {q} {i.plural if int(q) > 1 else remove_little_words(i.name)}, I will teach you a valuable skill.'
)
return
elif odds(5):
mobs = []
for biome, building in buildings.items():
for b, attributes in building.items():
if attributes.get('mobs'):
for k in attributes['mobs'].keys():
mobs.append(k)
for biome, mob in wild_mobs.items():
for k in mob.keys():
mobs.append(k)
target = (
f'{mobs[random.randint(0, len(mobs) - 1)]} named {names[random.randint(0, len(names) - 1)]}'
)
print(
f"Well, we'll keep this off the record, but I can arrange for some money to find its way into your account if you make {colored(target, 'yellow')} disappear, if you know what I mean..."
)
self.p.hit_list.append(target)
return False
else:
return None
<|reserved_special_token_1|>
import random
import colorama
from termcolor import colored
from reusables.string_manipulation import int_to_words
from app.common_functions import comma_separated, add_dicts_together, remove_little_words, odds
from app.load_data import items, buildings, wild_mobs, names, adjectives
colorama.init()
def find_unique_names(quantity, name_list, taken_names):
free_names = [x for x in name_list if x not in taken_names]
random.shuffle(free_names)
return free_names[:quantity]
def dropper(rarity):
results = {'super rare': 100,
'rare': 50,
'uncommon': 25,
'common': 5,
'super common': 2}
quantity = 0
countdown = random.randint(0, 10)
while countdown > 0:
if random.randint(0, results[rarity]) == 1:
quantity += 1
countdown -= 1
return quantity
def drop_building(dictionary, p, limit=None):
limit = limit or len(adjectives)
drops_i = []
for k, v in dictionary.items():
quantity = dropper(v['rarity'])
quantity = quantity if quantity < limit else limit
limit -= quantity
if quantity:
if quantity > 1 and v['category'] != 'residence':
n = random.randint(0, quantity)
unique_names = find_unique_names(quantity - n, names, p.square.unique_building_names)
p.square.unique_building_names += unique_names
for i in range(0, quantity - n):
drops_i.append(Building(name=f"{unique_names[i]}'s {remove_little_words(k).capitalize()}", p=p, **v))
unique_adjectives = find_unique_names(n, adjectives, p.square.unique_building_names)
p.square.unique_building_names += unique_adjectives
for i in range(0, n):
drops_i.append(Building(name=f"the {unique_adjectives[i]} {remove_little_words(k).capitalize()}", p=p, **v))
elif quantity > 1 and v['category'] == 'residence':
unique_house_names = find_unique_names(quantity, names, p.square.unique_house_names)
p.square.unique_house_names += unique_house_names
for i in range(0, quantity):
drops_i.append(Building(name=f"{unique_house_names[i]}'s {remove_little_words(k)}", p=p, **v))
else:
drops_i.append(Building(name=k, p=p, **v))
return drops_i
def drop_mob(dictionary, p, limit=None, square=None):
square = square or p.square
limit = limit or len(names) - len(square.unique_mob_names)
drops_i = []
for k, v in dictionary.items():
quantity = dropper(v['rarity'])
quantity = quantity if quantity < limit else limit
limit -= quantity
if quantity:
if quantity > 1:
unique_names = find_unique_names(quantity, names, square.unique_mob_names)
p.square.unique_mob_names += unique_names
for i in range(0, len(unique_names)):
drops_i.append(Mob(name=f"{k} named {unique_names[i]}", p=p, **v))
else:
if k not in [n.name for n in p.square.mobs]:
drops_i.append(Mob(name=k, p=p, **v))
else:
name = find_unique_names(1, names, square.unique_mob_names)[0]
drops_i.append(Mob(name=f"{k} named {name}", p=p, **v))
return drops_i
def drop_item(dictionary):
""" Randomly generates objects based on rarity """
drops_i = []
for k, v in dictionary.items():
quantity = dropper(v['rarity'])
if quantity:
drops_i.append(Item(name=k, quantity=quantity, **v))
return drops_i
class MapSquare:
def __init__(self, name="", square_type=None):
square_types = ["forest", "mountains", "desert", "city", "swamp", "ocean"]
self.square_type = square_type or square_types[random.randint(0, len(square_types) - 1)]
self.name = name
self.unique_mob_names = []
self.unique_building_names = []
self.unique_house_names = []
mobs = []
items = []
buildings = []
def generate_items(self):
self.items = drop_item(add_dicts_together(items["master"], items[self.square_type]))
def generate_buildings(self, p):
self.buildings = drop_building(add_dicts_together(buildings["master"], buildings[self.square_type]), p)
def generate_mobs(self, p):
self.mobs = drop_mob(add_dicts_together(wild_mobs["master"], wild_mobs[self.square_type]), p)
def clean_up_map(self):
""" Remove items with quantity of zero from the map inventory"""
self.items = [i for i in self.items if i.quantity != 0]
@staticmethod
def map_picture(the_map, p):
"""With the player's location in the center, draw a 5 x 5 map with map square type
and coordinates in each square"""
xy = (p.location[0] - 2, p.location[1] + 2)
map_coords = []
for y in range(0, 5):
row = [(xy[0] + x, xy[1] - y) for x in range(0, 5)]
map_coords.append(row)
pretty_map = []
for r in map_coords:
row = []
for coordinates in r:
if coordinates in the_map.keys():
if p.quest and p.job and p.quest[1] == coordinates and p.job.location == coordinates:
star = '*$ '
elif p.quest and p.quest[1] == coordinates:
star = ' * '
elif p.job and p.job.location == coordinates:
star = ' $ '
else:
star = ' '
row.append("|{!s:9}{}|".format(the_map[coordinates].square_type, star))
else:
row.append("|{!s:12}|".format(' '))
pretty_map.append(row)
for row in pretty_map:
print(''.join(row))
class Player:
def __init__(self, name, location):
self.name = name
self.location = location
self.square = None
self.money = 0
self.quest = None
self.job = None
self.phase = "day"
self.equipped_weapon = None
self.major_armor = None
self.minor_armor = None
self.building_local = None
self.inventory = []
self.skills = {}
self.health = 100
self.greeting_count = 0
self.body_count = 0
self.assassination_count = 0
self.hit_list = []
self.death_count = 0
# TODO increase insurance cost every death?
self.food_count = 0
self.run_away_count = 0
self.speed_bonus = False
self.game_won = False
def game_over(self):
if self.game_won is False:
self.game_won = True
print(colored("You have won the game!", "green"))
print("You may continue playing to earn more achievements if you wish.")
if self.run_away_count == 0:
print("Congratulations, you have achieved the True Bravery achievement, having won the game without ever running away from a fight.")
if self.run_away_count > 100:
print("Congratulations, you have achieved the True Cowardice achievement, having won the game after running away from over 100 battles.")
def clean_up_inventory(self):
""" Remove items with quantity of zero from the map inventory"""
self.inventory = [i for i in self.inventory if i.quantity != 0]
def phase_change(self, the_map):
self.phase = 'day' if self.phase == 'night' else 'night'
for k, square in the_map.items():
if self.location != k:
square.generate_items()
for b in square.buildings:
if b.ware_list:
b.wares = drop_item(b.ware_list)
while not b.wares:
b.wares = drop_item(b.ware_list)
if b.name not in ('a castle', 'a volcanic base'):
jobs = {}
buiding_dict = add_dicts_together(buildings['master'], buildings[square.square_type])
for key, v in buiding_dict.items():
if key == b.name and v.get('jobs'):
for name, values in v['jobs'].items():
jobs[name] = values
b.jobs = b.drop_job(jobs)
if self.phase == 'day':
self.speed_bonus = False
for mob in square.mobs:
mob.health = 100
mob.irritation_level = 0
mob.quest = None if self.quest is None else mob.quest
if not square.mobs:
square.mobs = drop_mob(add_dicts_together(wild_mobs["master"], wild_mobs[self.square.square_type]),
self, limit=len(names), square=square)
def formatted_inventory(self):
formatted = []
for item in self.inventory:
if item.quantity > 1:
formatted.append(f"{int_to_words(item.quantity)} {item.plural}")
else:
formatted.append(item.name)
if formatted:
return comma_separated(formatted)
else:
return "nothing"
def pretty_inventory(self):
w = self.equipped_weapon
major = self.major_armor.defense if self.major_armor else 0
minor = self.minor_armor.defense if self.minor_armor else 0
armor_defense = (major + minor) * 5
armors = [self.major_armor.name if self.major_armor else None, self.minor_armor.name if self.minor_armor else None]
inventory = {'inventory_items': f"You have {self.formatted_inventory()} in your inventory.",
'weapon': f"You are wielding {int_to_words(w.quantity)} "
f"{remove_little_words(w.name) if w.quantity == 1 else w.plural}." if w else None,
'armor': f"You are wearing {' and '.join(x for x in armors if x)}, "
f"giving you a {armor_defense}% reduction in incoming damage." if self.minor_armor or self.major_armor else None}
return '\n'.join(v for v in inventory.values() if v)
def status(self):
skills = [f"{k}: {v}%." for k, v in self.skills.items()]
job = f"You have a job as a {self.job.name}." if self.job else None
quest = "You have a quest." if self.quest else None
if job and quest:
job_string = "\n".join([job, quest])
elif job or quest:
job_string = job if job else quest
else:
job_string = "You do not have a job, and you are not contributing to society."
status_string = {
'health': f'Currently, you have {self.health} health.',
'location': f'You are located on map coordinates {self.location}, '
f'which is {self.square.square_type}.',
'building_local': f'You are inside {self.building_local.name}.' if self.building_local else None,
'skills': '\n'.join(skills) if skills else "You don't have any skills.",
'money': f"You have ${self.money} in your wallet.",
'job': job_string}
return '\n'.join(v for v in status_string.values() if v)
def statistics(self):
print(f"You have killed {self.body_count} mobs.")
print(f"You have ran away from {self.run_away_count} battles.")
print(f"You have eaten {self.food_count} items.")
print(f"You have performed {self.assassination_count} assassinations.")
print(f"You have talked to mobs {self.greeting_count} times.")
def view_hit_list(self):
if self.hit_list:
print(f"If you ever run across these shady characters, be sure to take their names off your list: {comma_separated(self.hit_list)}")
else:
print("Looks like you don't know of anyone who needs to be dead.")
def increase_skill(self, skill, increase):
try:
self.skills[skill] += increase
except KeyError:
self.skills[skill] = increase
print(f"You have increased your mastery of {skill} by {increase}% for a total of {self.skills[skill]}%.")
class Item:
def __init__(self, name, quantity, plural, category=None, perishable=None,
flammable=None, rarity=None, price=None, weapon_rating=None, defense=None):
self.name = name
self.quantity = quantity
self.plural = plural
self.category = category or None
self.perishable = perishable or None
self.flammable = flammable or None
self.rarity = rarity or None
self.price = price or None
self.weapon_rating = weapon_rating or None
self.defense = defense or None
def copy(self):
return Item(name=self.name, quantity=self.quantity, plural=self.plural, category=self.category,
perishable=self.perishable, flammable=self.flammable, rarity=self.rarity,
weapon_rating=self.weapon_rating, defense=self.defense)
class Building(object):
def __init__(self, name, p, plural, category=None, rarity=None, ware_list=None, mobs=None, jobs=None):
self.name = name
self.p = p
self.quantity = 1
self.plural = plural
self.category = category or None
self.rarity = rarity or None
self.ware_list = ware_list
self.wares = self.drop_wares()
self.mobs = drop_mob(mobs, p) if mobs else None
self.jobs = self.drop_job(jobs) if jobs else None
if self.name in ('a castle', 'a volcanic base'):
self.boss_mobs_and_jobs()
def drop_wares(self):
if self.ware_list:
wares = drop_item(self.ware_list)
while not wares:
wares = drop_item(self.ware_list)
return wares
else:
return []
def drop_job(self, jobs):
drops_i = []
for k, v in jobs.items():
if odds(2):
drops_i.append(Job(name=k, location=self.p.location, **v))
return drops_i
def boss_mobs_and_jobs(self):
boss_major_armors = [Item('a coat of impervious dragon scales', plural='coats of dragon scales', quantity=1, category='major armor', rarity='super rare', defense=5),
Item('an enchanted leather duster', plural='enchanted leather dusters', quantity=1, category='major armor', defense=5, rarity='super rare'),
Item('a coat of actual live grizzly bears', plural='coats of actual live grizzly bears', quantity=1, category='major armor', defense=5, rarity='super rare')]
boss_minor_armors = [Item('wings of an angel', plural='wings of angels', quantity=1, rarity='super rare', category='minor armor', defense=5),
Item('an OSHA approved hard hat', plural='OSHA approved hard hats', quantity=1, rarity='super rare', category='minor armor', defense=5),
Item('a pair boots that were made for walkin', plural='pairs of boots that were made for walkin', quantity=1, rarity='super rare', category='minor armor', defense=5)]
boss_weapons = [Item('an apache helicopter', plural='apache helicopters', rarity='super rare', weapon_rating=6, quantity=1),
Item('a trebuchet', plural='trebuchets', weapon_rating=6, quantity=1, rarity='super rare'),
Item('an army of attacking wizards', plural='armies of attacking wizards', weapon_rating=6, quantity=1, rarity='super rare')]
boss_names = ["the Terrifying Dragon of Soul Slaying", "the Great Salamander of Darkness", "the Squirrel of Destiny", ]
random.shuffle(boss_names)
random.shuffle(boss_weapons)
random.shuffle(boss_major_armors)
random.shuffle(boss_minor_armors)
boss = Mob(boss_names[0], self.p, plural=boss_names[0], rarity='super rare')
boss.health = 500
boss.equipped_weapon = boss_weapons[0]
boss.major_armor = boss_major_armors[0]
boss.minor_armor = boss_minor_armors[0]
boss.irritation_level = 10
self.mobs = [boss]
if self.name == 'a castle':
self.jobs = [Job('king of the realm', location=self.p.location, salary=1100)]
if self.name == 'a volcanic base':
self.jobs = [Job('evil overlord', location=self.p.location, salary=1100)]
class Job:
def __init__(self, name, location, skills_needed=None, salary=0, skills_learned=None, inventory_needed=None):
self.name = name
self.location = location
self.skills_needed = skills_needed or None
self.salary = salary or 0
self.skills_learned = skills_learned or None
self.inventory_needed = inventory_needed or None
self.application_attempts = 0
class Mob:
def __init__(self, name, p, plural, rarity, inventory=None):
self.name = name
self.p = p
self.plural = plural
self.quantity = 1
self.rarity = rarity
self.skills = self.skills()
self.quest = None
self.inventory = inventory or drop_item(add_dicts_together(items['master'], items[p.square.square_type]))
self.health = 100
self.equipped_weapon = self.equip()
major = [x for x in self.inventory if x.category == 'major armor']
minor = [x for x in self.inventory if x.category == 'minor armor']
self.major_armor = major[0] if major else None
self.minor_armor = minor[0] if minor else None
self.irritation_level = 0
def equip(self):
nice_weapons = []
for i in self.inventory:
try:
if i.weapon_rating:
nice_weapons.append(i)
except AttributeError:
pass
nice_weapons.sort(key=lambda x: x.weapon_rating, reverse=True)
if nice_weapons:
self.inventory.remove(nice_weapons[0])
return nice_weapons[0]
else:
return None
@staticmethod
def skills():
""" Pick the skills for a mob, these determine what a player can get from completing a quest """
all_skills = ["strength", "patience", "cleanliness", "leadership", "communication",
"science", "math", "engineering", "intelligence", "driving"]
random.shuffle(all_skills)
return all_skills[0:2]
def generate_quest(self):
"""
inventory based
bring me x of an object to learn a skill
"""
if odds(3):
quest_items = add_dicts_together(items["master"], items[self.p.square.square_type])
quest_item = random.choice(list(quest_items.keys()))
i = Item(quest_item, 0, **quest_items[quest_item])
self.inventory.append(i)
quantity = {'super rare': '1',
'rare': '2',
'uncommon': '3',
'common': '6',
'super common': '15'}
q = quantity[i.rarity]
self.quest = i, int(q), f"{self.p.name}, if you bring " \
f"me {q} {i.plural if int(q) > 1 else remove_little_words(i.name)}, " \
f"I will teach you a valuable skill."
return
elif odds(5):
mobs = []
for biome, building in buildings.items():
for b, attributes in building.items():
if attributes.get('mobs'):
for k in attributes['mobs'].keys():
mobs.append(k)
for biome, mob in wild_mobs.items():
for k in mob.keys():
mobs.append(k)
target = f"{mobs[random.randint(0, len(mobs)-1)]} named {names[random.randint(0, len(names)-1)]}"
print(f"Well, we'll keep this off the record, but I can arrange for some money to find its way "
f"into your account if you make {colored(target, 'yellow')} disappear, if you know what I mean...")
self.p.hit_list.append(target)
return False
else:
return None
|
flexible
|
{
"blob_id": "535c0975c688a19963e4c53f6029626d286b41d6",
"index": 5630,
"step-1": "<mask token>\n\n\nclass Player:\n\n def __init__(self, name, location):\n self.name = name\n self.location = location\n self.square = None\n self.money = 0\n self.quest = None\n self.job = None\n self.phase = 'day'\n self.equipped_weapon = None\n self.major_armor = None\n self.minor_armor = None\n self.building_local = None\n self.inventory = []\n self.skills = {}\n self.health = 100\n self.greeting_count = 0\n self.body_count = 0\n self.assassination_count = 0\n self.hit_list = []\n self.death_count = 0\n self.food_count = 0\n self.run_away_count = 0\n self.speed_bonus = False\n self.game_won = False\n\n def game_over(self):\n if self.game_won is False:\n self.game_won = True\n print(colored('You have won the game!', 'green'))\n print(\n 'You may continue playing to earn more achievements if you wish.'\n )\n if self.run_away_count == 0:\n print(\n 'Congratulations, you have achieved the True Bravery achievement, having won the game without ever running away from a fight.'\n )\n if self.run_away_count > 100:\n print(\n 'Congratulations, you have achieved the True Cowardice achievement, having won the game after running away from over 100 battles.'\n )\n\n def clean_up_inventory(self):\n \"\"\" Remove items with quantity of zero from the map inventory\"\"\"\n self.inventory = [i for i in self.inventory if i.quantity != 0]\n\n def phase_change(self, the_map):\n self.phase = 'day' if self.phase == 'night' else 'night'\n for k, square in the_map.items():\n if self.location != k:\n square.generate_items()\n for b in square.buildings:\n if b.ware_list:\n b.wares = drop_item(b.ware_list)\n while not b.wares:\n b.wares = drop_item(b.ware_list)\n if b.name not in ('a castle', 'a volcanic base'):\n jobs = {}\n buiding_dict = add_dicts_together(buildings[\n 'master'], buildings[square.square_type])\n for key, v in buiding_dict.items():\n if key == b.name and v.get('jobs'):\n for name, values in v['jobs'].items():\n jobs[name] = values\n b.jobs = b.drop_job(jobs)\n if self.phase == 'day':\n self.speed_bonus = False\n for mob in square.mobs:\n mob.health = 100\n mob.irritation_level = 0\n mob.quest = None if self.quest is None else mob.quest\n if not square.mobs:\n square.mobs = drop_mob(add_dicts_together(wild_mobs\n ['master'], wild_mobs[self.square.square_type]),\n self, limit=len(names), square=square)\n\n def formatted_inventory(self):\n formatted = []\n for item in self.inventory:\n if item.quantity > 1:\n formatted.append(f'{int_to_words(item.quantity)} {item.plural}'\n )\n else:\n formatted.append(item.name)\n if formatted:\n return comma_separated(formatted)\n else:\n return 'nothing'\n\n def pretty_inventory(self):\n w = self.equipped_weapon\n major = self.major_armor.defense if self.major_armor else 0\n minor = self.minor_armor.defense if self.minor_armor else 0\n armor_defense = (major + minor) * 5\n armors = [self.major_armor.name if self.major_armor else None, self\n .minor_armor.name if self.minor_armor else None]\n inventory = {'inventory_items':\n f'You have {self.formatted_inventory()} in your inventory.',\n 'weapon': \n f'You are wielding {int_to_words(w.quantity)} {remove_little_words(w.name) if w.quantity == 1 else w.plural}.'\n if w else None, 'armor': \n f\"You are wearing {' and '.join(x for x in armors if x)}, giving you a {armor_defense}% reduction in incoming damage.\"\n if self.minor_armor or self.major_armor else None}\n return '\\n'.join(v for v in inventory.values() if v)\n <mask token>\n <mask token>\n <mask token>\n\n def increase_skill(self, skill, increase):\n try:\n self.skills[skill] += increase\n except KeyError:\n self.skills[skill] = increase\n print(\n f'You have increased your mastery of {skill} by {increase}% for a total of {self.skills[skill]}%.'\n )\n\n\nclass Item:\n\n def __init__(self, name, quantity, plural, category=None, perishable=\n None, flammable=None, rarity=None, price=None, weapon_rating=None,\n defense=None):\n self.name = name\n self.quantity = quantity\n self.plural = plural\n self.category = category or None\n self.perishable = perishable or None\n self.flammable = flammable or None\n self.rarity = rarity or None\n self.price = price or None\n self.weapon_rating = weapon_rating or None\n self.defense = defense or None\n\n def copy(self):\n return Item(name=self.name, quantity=self.quantity, plural=self.\n plural, category=self.category, perishable=self.perishable,\n flammable=self.flammable, rarity=self.rarity, weapon_rating=\n self.weapon_rating, defense=self.defense)\n\n\nclass Building(object):\n\n def __init__(self, name, p, plural, category=None, rarity=None,\n ware_list=None, mobs=None, jobs=None):\n self.name = name\n self.p = p\n self.quantity = 1\n self.plural = plural\n self.category = category or None\n self.rarity = rarity or None\n self.ware_list = ware_list\n self.wares = self.drop_wares()\n self.mobs = drop_mob(mobs, p) if mobs else None\n self.jobs = self.drop_job(jobs) if jobs else None\n if self.name in ('a castle', 'a volcanic base'):\n self.boss_mobs_and_jobs()\n\n def drop_wares(self):\n if self.ware_list:\n wares = drop_item(self.ware_list)\n while not wares:\n wares = drop_item(self.ware_list)\n return wares\n else:\n return []\n\n def drop_job(self, jobs):\n drops_i = []\n for k, v in jobs.items():\n if odds(2):\n drops_i.append(Job(name=k, location=self.p.location, **v))\n return drops_i\n\n def boss_mobs_and_jobs(self):\n boss_major_armors = [Item('a coat of impervious dragon scales',\n plural='coats of dragon scales', quantity=1, category=\n 'major armor', rarity='super rare', defense=5), Item(\n 'an enchanted leather duster', plural=\n 'enchanted leather dusters', quantity=1, category='major armor',\n defense=5, rarity='super rare'), Item(\n 'a coat of actual live grizzly bears', plural=\n 'coats of actual live grizzly bears', quantity=1, category=\n 'major armor', defense=5, rarity='super rare')]\n boss_minor_armors = [Item('wings of an angel', plural=\n 'wings of angels', quantity=1, rarity='super rare', category=\n 'minor armor', defense=5), Item('an OSHA approved hard hat',\n plural='OSHA approved hard hats', quantity=1, rarity=\n 'super rare', category='minor armor', defense=5), Item(\n 'a pair boots that were made for walkin', plural=\n 'pairs of boots that were made for walkin', quantity=1, rarity=\n 'super rare', category='minor armor', defense=5)]\n boss_weapons = [Item('an apache helicopter', plural=\n 'apache helicopters', rarity='super rare', weapon_rating=6,\n quantity=1), Item('a trebuchet', plural='trebuchets',\n weapon_rating=6, quantity=1, rarity='super rare'), Item(\n 'an army of attacking wizards', plural=\n 'armies of attacking wizards', weapon_rating=6, quantity=1,\n rarity='super rare')]\n boss_names = ['the Terrifying Dragon of Soul Slaying',\n 'the Great Salamander of Darkness', 'the Squirrel of Destiny']\n random.shuffle(boss_names)\n random.shuffle(boss_weapons)\n random.shuffle(boss_major_armors)\n random.shuffle(boss_minor_armors)\n boss = Mob(boss_names[0], self.p, plural=boss_names[0], rarity=\n 'super rare')\n boss.health = 500\n boss.equipped_weapon = boss_weapons[0]\n boss.major_armor = boss_major_armors[0]\n boss.minor_armor = boss_minor_armors[0]\n boss.irritation_level = 10\n self.mobs = [boss]\n if self.name == 'a castle':\n self.jobs = [Job('king of the realm', location=self.p.location,\n salary=1100)]\n if self.name == 'a volcanic base':\n self.jobs = [Job('evil overlord', location=self.p.location,\n salary=1100)]\n\n\nclass Job:\n\n def __init__(self, name, location, skills_needed=None, salary=0,\n skills_learned=None, inventory_needed=None):\n self.name = name\n self.location = location\n self.skills_needed = skills_needed or None\n self.salary = salary or 0\n self.skills_learned = skills_learned or None\n self.inventory_needed = inventory_needed or None\n self.application_attempts = 0\n\n\nclass Mob:\n\n def __init__(self, name, p, plural, rarity, inventory=None):\n self.name = name\n self.p = p\n self.plural = plural\n self.quantity = 1\n self.rarity = rarity\n self.skills = self.skills()\n self.quest = None\n self.inventory = inventory or drop_item(add_dicts_together(items[\n 'master'], items[p.square.square_type]))\n self.health = 100\n self.equipped_weapon = self.equip()\n major = [x for x in self.inventory if x.category == 'major armor']\n minor = [x for x in self.inventory if x.category == 'minor armor']\n self.major_armor = major[0] if major else None\n self.minor_armor = minor[0] if minor else None\n self.irritation_level = 0\n\n def equip(self):\n nice_weapons = []\n for i in self.inventory:\n try:\n if i.weapon_rating:\n nice_weapons.append(i)\n except AttributeError:\n pass\n nice_weapons.sort(key=lambda x: x.weapon_rating, reverse=True)\n if nice_weapons:\n self.inventory.remove(nice_weapons[0])\n return nice_weapons[0]\n else:\n return None\n\n @staticmethod\n def skills():\n \"\"\" Pick the skills for a mob, these determine what a player can get from completing a quest \"\"\"\n all_skills = ['strength', 'patience', 'cleanliness', 'leadership',\n 'communication', 'science', 'math', 'engineering',\n 'intelligence', 'driving']\n random.shuffle(all_skills)\n return all_skills[0:2]\n\n def generate_quest(self):\n \"\"\"\n inventory based\n bring me x of an object to learn a skill\n \"\"\"\n if odds(3):\n quest_items = add_dicts_together(items['master'], items[self.p.\n square.square_type])\n quest_item = random.choice(list(quest_items.keys()))\n i = Item(quest_item, 0, **quest_items[quest_item])\n self.inventory.append(i)\n quantity = {'super rare': '1', 'rare': '2', 'uncommon': '3',\n 'common': '6', 'super common': '15'}\n q = quantity[i.rarity]\n self.quest = (i, int(q),\n f'{self.p.name}, if you bring me {q} {i.plural if int(q) > 1 else remove_little_words(i.name)}, I will teach you a valuable skill.'\n )\n return\n elif odds(5):\n mobs = []\n for biome, building in buildings.items():\n for b, attributes in building.items():\n if attributes.get('mobs'):\n for k in attributes['mobs'].keys():\n mobs.append(k)\n for biome, mob in wild_mobs.items():\n for k in mob.keys():\n mobs.append(k)\n target = (\n f'{mobs[random.randint(0, len(mobs) - 1)]} named {names[random.randint(0, len(names) - 1)]}'\n )\n print(\n f\"Well, we'll keep this off the record, but I can arrange for some money to find its way into your account if you make {colored(target, 'yellow')} disappear, if you know what I mean...\"\n )\n self.p.hit_list.append(target)\n return False\n else:\n return None\n",
"step-2": "<mask token>\n\n\nclass MapSquare:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def generate_buildings(self, p):\n self.buildings = drop_building(add_dicts_together(buildings[\n 'master'], buildings[self.square_type]), p)\n\n def generate_mobs(self, p):\n self.mobs = drop_mob(add_dicts_together(wild_mobs['master'],\n wild_mobs[self.square_type]), p)\n\n def clean_up_map(self):\n \"\"\" Remove items with quantity of zero from the map inventory\"\"\"\n self.items = [i for i in self.items if i.quantity != 0]\n\n @staticmethod\n def map_picture(the_map, p):\n \"\"\"With the player's location in the center, draw a 5 x 5 map with map square type\n and coordinates in each square\"\"\"\n xy = p.location[0] - 2, p.location[1] + 2\n map_coords = []\n for y in range(0, 5):\n row = [(xy[0] + x, xy[1] - y) for x in range(0, 5)]\n map_coords.append(row)\n pretty_map = []\n for r in map_coords:\n row = []\n for coordinates in r:\n if coordinates in the_map.keys():\n if p.quest and p.job and p.quest[1\n ] == coordinates and p.job.location == coordinates:\n star = '*$ '\n elif p.quest and p.quest[1] == coordinates:\n star = ' * '\n elif p.job and p.job.location == coordinates:\n star = ' $ '\n else:\n star = ' '\n row.append('|{!s:9}{}|'.format(the_map[coordinates].\n square_type, star))\n else:\n row.append('|{!s:12}|'.format(' '))\n pretty_map.append(row)\n for row in pretty_map:\n print(''.join(row))\n\n\nclass Player:\n\n def __init__(self, name, location):\n self.name = name\n self.location = location\n self.square = None\n self.money = 0\n self.quest = None\n self.job = None\n self.phase = 'day'\n self.equipped_weapon = None\n self.major_armor = None\n self.minor_armor = None\n self.building_local = None\n self.inventory = []\n self.skills = {}\n self.health = 100\n self.greeting_count = 0\n self.body_count = 0\n self.assassination_count = 0\n self.hit_list = []\n self.death_count = 0\n self.food_count = 0\n self.run_away_count = 0\n self.speed_bonus = False\n self.game_won = False\n\n def game_over(self):\n if self.game_won is False:\n self.game_won = True\n print(colored('You have won the game!', 'green'))\n print(\n 'You may continue playing to earn more achievements if you wish.'\n )\n if self.run_away_count == 0:\n print(\n 'Congratulations, you have achieved the True Bravery achievement, having won the game without ever running away from a fight.'\n )\n if self.run_away_count > 100:\n print(\n 'Congratulations, you have achieved the True Cowardice achievement, having won the game after running away from over 100 battles.'\n )\n\n def clean_up_inventory(self):\n \"\"\" Remove items with quantity of zero from the map inventory\"\"\"\n self.inventory = [i for i in self.inventory if i.quantity != 0]\n\n def phase_change(self, the_map):\n self.phase = 'day' if self.phase == 'night' else 'night'\n for k, square in the_map.items():\n if self.location != k:\n square.generate_items()\n for b in square.buildings:\n if b.ware_list:\n b.wares = drop_item(b.ware_list)\n while not b.wares:\n b.wares = drop_item(b.ware_list)\n if b.name not in ('a castle', 'a volcanic base'):\n jobs = {}\n buiding_dict = add_dicts_together(buildings[\n 'master'], buildings[square.square_type])\n for key, v in buiding_dict.items():\n if key == b.name and v.get('jobs'):\n for name, values in v['jobs'].items():\n jobs[name] = values\n b.jobs = b.drop_job(jobs)\n if self.phase == 'day':\n self.speed_bonus = False\n for mob in square.mobs:\n mob.health = 100\n mob.irritation_level = 0\n mob.quest = None if self.quest is None else mob.quest\n if not square.mobs:\n square.mobs = drop_mob(add_dicts_together(wild_mobs\n ['master'], wild_mobs[self.square.square_type]),\n self, limit=len(names), square=square)\n\n def formatted_inventory(self):\n formatted = []\n for item in self.inventory:\n if item.quantity > 1:\n formatted.append(f'{int_to_words(item.quantity)} {item.plural}'\n )\n else:\n formatted.append(item.name)\n if formatted:\n return comma_separated(formatted)\n else:\n return 'nothing'\n\n def pretty_inventory(self):\n w = self.equipped_weapon\n major = self.major_armor.defense if self.major_armor else 0\n minor = self.minor_armor.defense if self.minor_armor else 0\n armor_defense = (major + minor) * 5\n armors = [self.major_armor.name if self.major_armor else None, self\n .minor_armor.name if self.minor_armor else None]\n inventory = {'inventory_items':\n f'You have {self.formatted_inventory()} in your inventory.',\n 'weapon': \n f'You are wielding {int_to_words(w.quantity)} {remove_little_words(w.name) if w.quantity == 1 else w.plural}.'\n if w else None, 'armor': \n f\"You are wearing {' and '.join(x for x in armors if x)}, giving you a {armor_defense}% reduction in incoming damage.\"\n if self.minor_armor or self.major_armor else None}\n return '\\n'.join(v for v in inventory.values() if v)\n\n def status(self):\n skills = [f'{k}: {v}%.' for k, v in self.skills.items()]\n job = f'You have a job as a {self.job.name}.' if self.job else None\n quest = 'You have a quest.' if self.quest else None\n if job and quest:\n job_string = '\\n'.join([job, quest])\n elif job or quest:\n job_string = job if job else quest\n else:\n job_string = (\n 'You do not have a job, and you are not contributing to society.'\n )\n status_string = {'health':\n f'Currently, you have {self.health} health.', 'location':\n f'You are located on map coordinates {self.location}, which is {self.square.square_type}.'\n , 'building_local': \n f'You are inside {self.building_local.name}.' if self.\n building_local else None, 'skills': '\\n'.join(skills) if skills\n else \"You don't have any skills.\", 'money':\n f'You have ${self.money} in your wallet.', 'job': job_string}\n return '\\n'.join(v for v in status_string.values() if v)\n\n def statistics(self):\n print(f'You have killed {self.body_count} mobs.')\n print(f'You have ran away from {self.run_away_count} battles.')\n print(f'You have eaten {self.food_count} items.')\n print(f'You have performed {self.assassination_count} assassinations.')\n print(f'You have talked to mobs {self.greeting_count} times.')\n\n def view_hit_list(self):\n if self.hit_list:\n print(\n f'If you ever run across these shady characters, be sure to take their names off your list: {comma_separated(self.hit_list)}'\n )\n else:\n print(\"Looks like you don't know of anyone who needs to be dead.\")\n\n def increase_skill(self, skill, increase):\n try:\n self.skills[skill] += increase\n except KeyError:\n self.skills[skill] = increase\n print(\n f'You have increased your mastery of {skill} by {increase}% for a total of {self.skills[skill]}%.'\n )\n\n\nclass Item:\n\n def __init__(self, name, quantity, plural, category=None, perishable=\n None, flammable=None, rarity=None, price=None, weapon_rating=None,\n defense=None):\n self.name = name\n self.quantity = quantity\n self.plural = plural\n self.category = category or None\n self.perishable = perishable or None\n self.flammable = flammable or None\n self.rarity = rarity or None\n self.price = price or None\n self.weapon_rating = weapon_rating or None\n self.defense = defense or None\n\n def copy(self):\n return Item(name=self.name, quantity=self.quantity, plural=self.\n plural, category=self.category, perishable=self.perishable,\n flammable=self.flammable, rarity=self.rarity, weapon_rating=\n self.weapon_rating, defense=self.defense)\n\n\nclass Building(object):\n\n def __init__(self, name, p, plural, category=None, rarity=None,\n ware_list=None, mobs=None, jobs=None):\n self.name = name\n self.p = p\n self.quantity = 1\n self.plural = plural\n self.category = category or None\n self.rarity = rarity or None\n self.ware_list = ware_list\n self.wares = self.drop_wares()\n self.mobs = drop_mob(mobs, p) if mobs else None\n self.jobs = self.drop_job(jobs) if jobs else None\n if self.name in ('a castle', 'a volcanic base'):\n self.boss_mobs_and_jobs()\n\n def drop_wares(self):\n if self.ware_list:\n wares = drop_item(self.ware_list)\n while not wares:\n wares = drop_item(self.ware_list)\n return wares\n else:\n return []\n\n def drop_job(self, jobs):\n drops_i = []\n for k, v in jobs.items():\n if odds(2):\n drops_i.append(Job(name=k, location=self.p.location, **v))\n return drops_i\n\n def boss_mobs_and_jobs(self):\n boss_major_armors = [Item('a coat of impervious dragon scales',\n plural='coats of dragon scales', quantity=1, category=\n 'major armor', rarity='super rare', defense=5), Item(\n 'an enchanted leather duster', plural=\n 'enchanted leather dusters', quantity=1, category='major armor',\n defense=5, rarity='super rare'), Item(\n 'a coat of actual live grizzly bears', plural=\n 'coats of actual live grizzly bears', quantity=1, category=\n 'major armor', defense=5, rarity='super rare')]\n boss_minor_armors = [Item('wings of an angel', plural=\n 'wings of angels', quantity=1, rarity='super rare', category=\n 'minor armor', defense=5), Item('an OSHA approved hard hat',\n plural='OSHA approved hard hats', quantity=1, rarity=\n 'super rare', category='minor armor', defense=5), Item(\n 'a pair boots that were made for walkin', plural=\n 'pairs of boots that were made for walkin', quantity=1, rarity=\n 'super rare', category='minor armor', defense=5)]\n boss_weapons = [Item('an apache helicopter', plural=\n 'apache helicopters', rarity='super rare', weapon_rating=6,\n quantity=1), Item('a trebuchet', plural='trebuchets',\n weapon_rating=6, quantity=1, rarity='super rare'), Item(\n 'an army of attacking wizards', plural=\n 'armies of attacking wizards', weapon_rating=6, quantity=1,\n rarity='super rare')]\n boss_names = ['the Terrifying Dragon of Soul Slaying',\n 'the Great Salamander of Darkness', 'the Squirrel of Destiny']\n random.shuffle(boss_names)\n random.shuffle(boss_weapons)\n random.shuffle(boss_major_armors)\n random.shuffle(boss_minor_armors)\n boss = Mob(boss_names[0], self.p, plural=boss_names[0], rarity=\n 'super rare')\n boss.health = 500\n boss.equipped_weapon = boss_weapons[0]\n boss.major_armor = boss_major_armors[0]\n boss.minor_armor = boss_minor_armors[0]\n boss.irritation_level = 10\n self.mobs = [boss]\n if self.name == 'a castle':\n self.jobs = [Job('king of the realm', location=self.p.location,\n salary=1100)]\n if self.name == 'a volcanic base':\n self.jobs = [Job('evil overlord', location=self.p.location,\n salary=1100)]\n\n\nclass Job:\n\n def __init__(self, name, location, skills_needed=None, salary=0,\n skills_learned=None, inventory_needed=None):\n self.name = name\n self.location = location\n self.skills_needed = skills_needed or None\n self.salary = salary or 0\n self.skills_learned = skills_learned or None\n self.inventory_needed = inventory_needed or None\n self.application_attempts = 0\n\n\nclass Mob:\n\n def __init__(self, name, p, plural, rarity, inventory=None):\n self.name = name\n self.p = p\n self.plural = plural\n self.quantity = 1\n self.rarity = rarity\n self.skills = self.skills()\n self.quest = None\n self.inventory = inventory or drop_item(add_dicts_together(items[\n 'master'], items[p.square.square_type]))\n self.health = 100\n self.equipped_weapon = self.equip()\n major = [x for x in self.inventory if x.category == 'major armor']\n minor = [x for x in self.inventory if x.category == 'minor armor']\n self.major_armor = major[0] if major else None\n self.minor_armor = minor[0] if minor else None\n self.irritation_level = 0\n\n def equip(self):\n nice_weapons = []\n for i in self.inventory:\n try:\n if i.weapon_rating:\n nice_weapons.append(i)\n except AttributeError:\n pass\n nice_weapons.sort(key=lambda x: x.weapon_rating, reverse=True)\n if nice_weapons:\n self.inventory.remove(nice_weapons[0])\n return nice_weapons[0]\n else:\n return None\n\n @staticmethod\n def skills():\n \"\"\" Pick the skills for a mob, these determine what a player can get from completing a quest \"\"\"\n all_skills = ['strength', 'patience', 'cleanliness', 'leadership',\n 'communication', 'science', 'math', 'engineering',\n 'intelligence', 'driving']\n random.shuffle(all_skills)\n return all_skills[0:2]\n\n def generate_quest(self):\n \"\"\"\n inventory based\n bring me x of an object to learn a skill\n \"\"\"\n if odds(3):\n quest_items = add_dicts_together(items['master'], items[self.p.\n square.square_type])\n quest_item = random.choice(list(quest_items.keys()))\n i = Item(quest_item, 0, **quest_items[quest_item])\n self.inventory.append(i)\n quantity = {'super rare': '1', 'rare': '2', 'uncommon': '3',\n 'common': '6', 'super common': '15'}\n q = quantity[i.rarity]\n self.quest = (i, int(q),\n f'{self.p.name}, if you bring me {q} {i.plural if int(q) > 1 else remove_little_words(i.name)}, I will teach you a valuable skill.'\n )\n return\n elif odds(5):\n mobs = []\n for biome, building in buildings.items():\n for b, attributes in building.items():\n if attributes.get('mobs'):\n for k in attributes['mobs'].keys():\n mobs.append(k)\n for biome, mob in wild_mobs.items():\n for k in mob.keys():\n mobs.append(k)\n target = (\n f'{mobs[random.randint(0, len(mobs) - 1)]} named {names[random.randint(0, len(names) - 1)]}'\n )\n print(\n f\"Well, we'll keep this off the record, but I can arrange for some money to find its way into your account if you make {colored(target, 'yellow')} disappear, if you know what I mean...\"\n )\n self.p.hit_list.append(target)\n return False\n else:\n return None\n",
"step-3": "<mask token>\n\n\nclass MapSquare:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def generate_items(self):\n self.items = drop_item(add_dicts_together(items['master'], items[\n self.square_type]))\n\n def generate_buildings(self, p):\n self.buildings = drop_building(add_dicts_together(buildings[\n 'master'], buildings[self.square_type]), p)\n\n def generate_mobs(self, p):\n self.mobs = drop_mob(add_dicts_together(wild_mobs['master'],\n wild_mobs[self.square_type]), p)\n\n def clean_up_map(self):\n \"\"\" Remove items with quantity of zero from the map inventory\"\"\"\n self.items = [i for i in self.items if i.quantity != 0]\n\n @staticmethod\n def map_picture(the_map, p):\n \"\"\"With the player's location in the center, draw a 5 x 5 map with map square type\n and coordinates in each square\"\"\"\n xy = p.location[0] - 2, p.location[1] + 2\n map_coords = []\n for y in range(0, 5):\n row = [(xy[0] + x, xy[1] - y) for x in range(0, 5)]\n map_coords.append(row)\n pretty_map = []\n for r in map_coords:\n row = []\n for coordinates in r:\n if coordinates in the_map.keys():\n if p.quest and p.job and p.quest[1\n ] == coordinates and p.job.location == coordinates:\n star = '*$ '\n elif p.quest and p.quest[1] == coordinates:\n star = ' * '\n elif p.job and p.job.location == coordinates:\n star = ' $ '\n else:\n star = ' '\n row.append('|{!s:9}{}|'.format(the_map[coordinates].\n square_type, star))\n else:\n row.append('|{!s:12}|'.format(' '))\n pretty_map.append(row)\n for row in pretty_map:\n print(''.join(row))\n\n\nclass Player:\n\n def __init__(self, name, location):\n self.name = name\n self.location = location\n self.square = None\n self.money = 0\n self.quest = None\n self.job = None\n self.phase = 'day'\n self.equipped_weapon = None\n self.major_armor = None\n self.minor_armor = None\n self.building_local = None\n self.inventory = []\n self.skills = {}\n self.health = 100\n self.greeting_count = 0\n self.body_count = 0\n self.assassination_count = 0\n self.hit_list = []\n self.death_count = 0\n self.food_count = 0\n self.run_away_count = 0\n self.speed_bonus = False\n self.game_won = False\n\n def game_over(self):\n if self.game_won is False:\n self.game_won = True\n print(colored('You have won the game!', 'green'))\n print(\n 'You may continue playing to earn more achievements if you wish.'\n )\n if self.run_away_count == 0:\n print(\n 'Congratulations, you have achieved the True Bravery achievement, having won the game without ever running away from a fight.'\n )\n if self.run_away_count > 100:\n print(\n 'Congratulations, you have achieved the True Cowardice achievement, having won the game after running away from over 100 battles.'\n )\n\n def clean_up_inventory(self):\n \"\"\" Remove items with quantity of zero from the map inventory\"\"\"\n self.inventory = [i for i in self.inventory if i.quantity != 0]\n\n def phase_change(self, the_map):\n self.phase = 'day' if self.phase == 'night' else 'night'\n for k, square in the_map.items():\n if self.location != k:\n square.generate_items()\n for b in square.buildings:\n if b.ware_list:\n b.wares = drop_item(b.ware_list)\n while not b.wares:\n b.wares = drop_item(b.ware_list)\n if b.name not in ('a castle', 'a volcanic base'):\n jobs = {}\n buiding_dict = add_dicts_together(buildings[\n 'master'], buildings[square.square_type])\n for key, v in buiding_dict.items():\n if key == b.name and v.get('jobs'):\n for name, values in v['jobs'].items():\n jobs[name] = values\n b.jobs = b.drop_job(jobs)\n if self.phase == 'day':\n self.speed_bonus = False\n for mob in square.mobs:\n mob.health = 100\n mob.irritation_level = 0\n mob.quest = None if self.quest is None else mob.quest\n if not square.mobs:\n square.mobs = drop_mob(add_dicts_together(wild_mobs\n ['master'], wild_mobs[self.square.square_type]),\n self, limit=len(names), square=square)\n\n def formatted_inventory(self):\n formatted = []\n for item in self.inventory:\n if item.quantity > 1:\n formatted.append(f'{int_to_words(item.quantity)} {item.plural}'\n )\n else:\n formatted.append(item.name)\n if formatted:\n return comma_separated(formatted)\n else:\n return 'nothing'\n\n def pretty_inventory(self):\n w = self.equipped_weapon\n major = self.major_armor.defense if self.major_armor else 0\n minor = self.minor_armor.defense if self.minor_armor else 0\n armor_defense = (major + minor) * 5\n armors = [self.major_armor.name if self.major_armor else None, self\n .minor_armor.name if self.minor_armor else None]\n inventory = {'inventory_items':\n f'You have {self.formatted_inventory()} in your inventory.',\n 'weapon': \n f'You are wielding {int_to_words(w.quantity)} {remove_little_words(w.name) if w.quantity == 1 else w.plural}.'\n if w else None, 'armor': \n f\"You are wearing {' and '.join(x for x in armors if x)}, giving you a {armor_defense}% reduction in incoming damage.\"\n if self.minor_armor or self.major_armor else None}\n return '\\n'.join(v for v in inventory.values() if v)\n\n def status(self):\n skills = [f'{k}: {v}%.' for k, v in self.skills.items()]\n job = f'You have a job as a {self.job.name}.' if self.job else None\n quest = 'You have a quest.' if self.quest else None\n if job and quest:\n job_string = '\\n'.join([job, quest])\n elif job or quest:\n job_string = job if job else quest\n else:\n job_string = (\n 'You do not have a job, and you are not contributing to society.'\n )\n status_string = {'health':\n f'Currently, you have {self.health} health.', 'location':\n f'You are located on map coordinates {self.location}, which is {self.square.square_type}.'\n , 'building_local': \n f'You are inside {self.building_local.name}.' if self.\n building_local else None, 'skills': '\\n'.join(skills) if skills\n else \"You don't have any skills.\", 'money':\n f'You have ${self.money} in your wallet.', 'job': job_string}\n return '\\n'.join(v for v in status_string.values() if v)\n\n def statistics(self):\n print(f'You have killed {self.body_count} mobs.')\n print(f'You have ran away from {self.run_away_count} battles.')\n print(f'You have eaten {self.food_count} items.')\n print(f'You have performed {self.assassination_count} assassinations.')\n print(f'You have talked to mobs {self.greeting_count} times.')\n\n def view_hit_list(self):\n if self.hit_list:\n print(\n f'If you ever run across these shady characters, be sure to take their names off your list: {comma_separated(self.hit_list)}'\n )\n else:\n print(\"Looks like you don't know of anyone who needs to be dead.\")\n\n def increase_skill(self, skill, increase):\n try:\n self.skills[skill] += increase\n except KeyError:\n self.skills[skill] = increase\n print(\n f'You have increased your mastery of {skill} by {increase}% for a total of {self.skills[skill]}%.'\n )\n\n\nclass Item:\n\n def __init__(self, name, quantity, plural, category=None, perishable=\n None, flammable=None, rarity=None, price=None, weapon_rating=None,\n defense=None):\n self.name = name\n self.quantity = quantity\n self.plural = plural\n self.category = category or None\n self.perishable = perishable or None\n self.flammable = flammable or None\n self.rarity = rarity or None\n self.price = price or None\n self.weapon_rating = weapon_rating or None\n self.defense = defense or None\n\n def copy(self):\n return Item(name=self.name, quantity=self.quantity, plural=self.\n plural, category=self.category, perishable=self.perishable,\n flammable=self.flammable, rarity=self.rarity, weapon_rating=\n self.weapon_rating, defense=self.defense)\n\n\nclass Building(object):\n\n def __init__(self, name, p, plural, category=None, rarity=None,\n ware_list=None, mobs=None, jobs=None):\n self.name = name\n self.p = p\n self.quantity = 1\n self.plural = plural\n self.category = category or None\n self.rarity = rarity or None\n self.ware_list = ware_list\n self.wares = self.drop_wares()\n self.mobs = drop_mob(mobs, p) if mobs else None\n self.jobs = self.drop_job(jobs) if jobs else None\n if self.name in ('a castle', 'a volcanic base'):\n self.boss_mobs_and_jobs()\n\n def drop_wares(self):\n if self.ware_list:\n wares = drop_item(self.ware_list)\n while not wares:\n wares = drop_item(self.ware_list)\n return wares\n else:\n return []\n\n def drop_job(self, jobs):\n drops_i = []\n for k, v in jobs.items():\n if odds(2):\n drops_i.append(Job(name=k, location=self.p.location, **v))\n return drops_i\n\n def boss_mobs_and_jobs(self):\n boss_major_armors = [Item('a coat of impervious dragon scales',\n plural='coats of dragon scales', quantity=1, category=\n 'major armor', rarity='super rare', defense=5), Item(\n 'an enchanted leather duster', plural=\n 'enchanted leather dusters', quantity=1, category='major armor',\n defense=5, rarity='super rare'), Item(\n 'a coat of actual live grizzly bears', plural=\n 'coats of actual live grizzly bears', quantity=1, category=\n 'major armor', defense=5, rarity='super rare')]\n boss_minor_armors = [Item('wings of an angel', plural=\n 'wings of angels', quantity=1, rarity='super rare', category=\n 'minor armor', defense=5), Item('an OSHA approved hard hat',\n plural='OSHA approved hard hats', quantity=1, rarity=\n 'super rare', category='minor armor', defense=5), Item(\n 'a pair boots that were made for walkin', plural=\n 'pairs of boots that were made for walkin', quantity=1, rarity=\n 'super rare', category='minor armor', defense=5)]\n boss_weapons = [Item('an apache helicopter', plural=\n 'apache helicopters', rarity='super rare', weapon_rating=6,\n quantity=1), Item('a trebuchet', plural='trebuchets',\n weapon_rating=6, quantity=1, rarity='super rare'), Item(\n 'an army of attacking wizards', plural=\n 'armies of attacking wizards', weapon_rating=6, quantity=1,\n rarity='super rare')]\n boss_names = ['the Terrifying Dragon of Soul Slaying',\n 'the Great Salamander of Darkness', 'the Squirrel of Destiny']\n random.shuffle(boss_names)\n random.shuffle(boss_weapons)\n random.shuffle(boss_major_armors)\n random.shuffle(boss_minor_armors)\n boss = Mob(boss_names[0], self.p, plural=boss_names[0], rarity=\n 'super rare')\n boss.health = 500\n boss.equipped_weapon = boss_weapons[0]\n boss.major_armor = boss_major_armors[0]\n boss.minor_armor = boss_minor_armors[0]\n boss.irritation_level = 10\n self.mobs = [boss]\n if self.name == 'a castle':\n self.jobs = [Job('king of the realm', location=self.p.location,\n salary=1100)]\n if self.name == 'a volcanic base':\n self.jobs = [Job('evil overlord', location=self.p.location,\n salary=1100)]\n\n\nclass Job:\n\n def __init__(self, name, location, skills_needed=None, salary=0,\n skills_learned=None, inventory_needed=None):\n self.name = name\n self.location = location\n self.skills_needed = skills_needed or None\n self.salary = salary or 0\n self.skills_learned = skills_learned or None\n self.inventory_needed = inventory_needed or None\n self.application_attempts = 0\n\n\nclass Mob:\n\n def __init__(self, name, p, plural, rarity, inventory=None):\n self.name = name\n self.p = p\n self.plural = plural\n self.quantity = 1\n self.rarity = rarity\n self.skills = self.skills()\n self.quest = None\n self.inventory = inventory or drop_item(add_dicts_together(items[\n 'master'], items[p.square.square_type]))\n self.health = 100\n self.equipped_weapon = self.equip()\n major = [x for x in self.inventory if x.category == 'major armor']\n minor = [x for x in self.inventory if x.category == 'minor armor']\n self.major_armor = major[0] if major else None\n self.minor_armor = minor[0] if minor else None\n self.irritation_level = 0\n\n def equip(self):\n nice_weapons = []\n for i in self.inventory:\n try:\n if i.weapon_rating:\n nice_weapons.append(i)\n except AttributeError:\n pass\n nice_weapons.sort(key=lambda x: x.weapon_rating, reverse=True)\n if nice_weapons:\n self.inventory.remove(nice_weapons[0])\n return nice_weapons[0]\n else:\n return None\n\n @staticmethod\n def skills():\n \"\"\" Pick the skills for a mob, these determine what a player can get from completing a quest \"\"\"\n all_skills = ['strength', 'patience', 'cleanliness', 'leadership',\n 'communication', 'science', 'math', 'engineering',\n 'intelligence', 'driving']\n random.shuffle(all_skills)\n return all_skills[0:2]\n\n def generate_quest(self):\n \"\"\"\n inventory based\n bring me x of an object to learn a skill\n \"\"\"\n if odds(3):\n quest_items = add_dicts_together(items['master'], items[self.p.\n square.square_type])\n quest_item = random.choice(list(quest_items.keys()))\n i = Item(quest_item, 0, **quest_items[quest_item])\n self.inventory.append(i)\n quantity = {'super rare': '1', 'rare': '2', 'uncommon': '3',\n 'common': '6', 'super common': '15'}\n q = quantity[i.rarity]\n self.quest = (i, int(q),\n f'{self.p.name}, if you bring me {q} {i.plural if int(q) > 1 else remove_little_words(i.name)}, I will teach you a valuable skill.'\n )\n return\n elif odds(5):\n mobs = []\n for biome, building in buildings.items():\n for b, attributes in building.items():\n if attributes.get('mobs'):\n for k in attributes['mobs'].keys():\n mobs.append(k)\n for biome, mob in wild_mobs.items():\n for k in mob.keys():\n mobs.append(k)\n target = (\n f'{mobs[random.randint(0, len(mobs) - 1)]} named {names[random.randint(0, len(names) - 1)]}'\n )\n print(\n f\"Well, we'll keep this off the record, but I can arrange for some money to find its way into your account if you make {colored(target, 'yellow')} disappear, if you know what I mean...\"\n )\n self.p.hit_list.append(target)\n return False\n else:\n return None\n",
"step-4": "<mask token>\n\n\ndef find_unique_names(quantity, name_list, taken_names):\n free_names = [x for x in name_list if x not in taken_names]\n random.shuffle(free_names)\n return free_names[:quantity]\n\n\ndef dropper(rarity):\n results = {'super rare': 100, 'rare': 50, 'uncommon': 25, 'common': 5,\n 'super common': 2}\n quantity = 0\n countdown = random.randint(0, 10)\n while countdown > 0:\n if random.randint(0, results[rarity]) == 1:\n quantity += 1\n countdown -= 1\n return quantity\n\n\ndef drop_building(dictionary, p, limit=None):\n limit = limit or len(adjectives)\n drops_i = []\n for k, v in dictionary.items():\n quantity = dropper(v['rarity'])\n quantity = quantity if quantity < limit else limit\n limit -= quantity\n if quantity:\n if quantity > 1 and v['category'] != 'residence':\n n = random.randint(0, quantity)\n unique_names = find_unique_names(quantity - n, names, p.\n square.unique_building_names)\n p.square.unique_building_names += unique_names\n for i in range(0, quantity - n):\n drops_i.append(Building(name=\n f\"{unique_names[i]}'s {remove_little_words(k).capitalize()}\"\n , p=p, **v))\n unique_adjectives = find_unique_names(n, adjectives, p.\n square.unique_building_names)\n p.square.unique_building_names += unique_adjectives\n for i in range(0, n):\n drops_i.append(Building(name=\n f'the {unique_adjectives[i]} {remove_little_words(k).capitalize()}'\n , p=p, **v))\n elif quantity > 1 and v['category'] == 'residence':\n unique_house_names = find_unique_names(quantity, names, p.\n square.unique_house_names)\n p.square.unique_house_names += unique_house_names\n for i in range(0, quantity):\n drops_i.append(Building(name=\n f\"{unique_house_names[i]}'s {remove_little_words(k)}\",\n p=p, **v))\n else:\n drops_i.append(Building(name=k, p=p, **v))\n return drops_i\n\n\n<mask token>\n\n\ndef drop_item(dictionary):\n \"\"\" Randomly generates objects based on rarity \"\"\"\n drops_i = []\n for k, v in dictionary.items():\n quantity = dropper(v['rarity'])\n if quantity:\n drops_i.append(Item(name=k, quantity=quantity, **v))\n return drops_i\n\n\nclass MapSquare:\n\n def __init__(self, name='', square_type=None):\n square_types = ['forest', 'mountains', 'desert', 'city', 'swamp',\n 'ocean']\n self.square_type = square_type or square_types[random.randint(0, \n len(square_types) - 1)]\n self.name = name\n self.unique_mob_names = []\n self.unique_building_names = []\n self.unique_house_names = []\n mobs = []\n items = []\n buildings = []\n\n def generate_items(self):\n self.items = drop_item(add_dicts_together(items['master'], items[\n self.square_type]))\n\n def generate_buildings(self, p):\n self.buildings = drop_building(add_dicts_together(buildings[\n 'master'], buildings[self.square_type]), p)\n\n def generate_mobs(self, p):\n self.mobs = drop_mob(add_dicts_together(wild_mobs['master'],\n wild_mobs[self.square_type]), p)\n\n def clean_up_map(self):\n \"\"\" Remove items with quantity of zero from the map inventory\"\"\"\n self.items = [i for i in self.items if i.quantity != 0]\n\n @staticmethod\n def map_picture(the_map, p):\n \"\"\"With the player's location in the center, draw a 5 x 5 map with map square type\n and coordinates in each square\"\"\"\n xy = p.location[0] - 2, p.location[1] + 2\n map_coords = []\n for y in range(0, 5):\n row = [(xy[0] + x, xy[1] - y) for x in range(0, 5)]\n map_coords.append(row)\n pretty_map = []\n for r in map_coords:\n row = []\n for coordinates in r:\n if coordinates in the_map.keys():\n if p.quest and p.job and p.quest[1\n ] == coordinates and p.job.location == coordinates:\n star = '*$ '\n elif p.quest and p.quest[1] == coordinates:\n star = ' * '\n elif p.job and p.job.location == coordinates:\n star = ' $ '\n else:\n star = ' '\n row.append('|{!s:9}{}|'.format(the_map[coordinates].\n square_type, star))\n else:\n row.append('|{!s:12}|'.format(' '))\n pretty_map.append(row)\n for row in pretty_map:\n print(''.join(row))\n\n\nclass Player:\n\n def __init__(self, name, location):\n self.name = name\n self.location = location\n self.square = None\n self.money = 0\n self.quest = None\n self.job = None\n self.phase = 'day'\n self.equipped_weapon = None\n self.major_armor = None\n self.minor_armor = None\n self.building_local = None\n self.inventory = []\n self.skills = {}\n self.health = 100\n self.greeting_count = 0\n self.body_count = 0\n self.assassination_count = 0\n self.hit_list = []\n self.death_count = 0\n self.food_count = 0\n self.run_away_count = 0\n self.speed_bonus = False\n self.game_won = False\n\n def game_over(self):\n if self.game_won is False:\n self.game_won = True\n print(colored('You have won the game!', 'green'))\n print(\n 'You may continue playing to earn more achievements if you wish.'\n )\n if self.run_away_count == 0:\n print(\n 'Congratulations, you have achieved the True Bravery achievement, having won the game without ever running away from a fight.'\n )\n if self.run_away_count > 100:\n print(\n 'Congratulations, you have achieved the True Cowardice achievement, having won the game after running away from over 100 battles.'\n )\n\n def clean_up_inventory(self):\n \"\"\" Remove items with quantity of zero from the map inventory\"\"\"\n self.inventory = [i for i in self.inventory if i.quantity != 0]\n\n def phase_change(self, the_map):\n self.phase = 'day' if self.phase == 'night' else 'night'\n for k, square in the_map.items():\n if self.location != k:\n square.generate_items()\n for b in square.buildings:\n if b.ware_list:\n b.wares = drop_item(b.ware_list)\n while not b.wares:\n b.wares = drop_item(b.ware_list)\n if b.name not in ('a castle', 'a volcanic base'):\n jobs = {}\n buiding_dict = add_dicts_together(buildings[\n 'master'], buildings[square.square_type])\n for key, v in buiding_dict.items():\n if key == b.name and v.get('jobs'):\n for name, values in v['jobs'].items():\n jobs[name] = values\n b.jobs = b.drop_job(jobs)\n if self.phase == 'day':\n self.speed_bonus = False\n for mob in square.mobs:\n mob.health = 100\n mob.irritation_level = 0\n mob.quest = None if self.quest is None else mob.quest\n if not square.mobs:\n square.mobs = drop_mob(add_dicts_together(wild_mobs\n ['master'], wild_mobs[self.square.square_type]),\n self, limit=len(names), square=square)\n\n def formatted_inventory(self):\n formatted = []\n for item in self.inventory:\n if item.quantity > 1:\n formatted.append(f'{int_to_words(item.quantity)} {item.plural}'\n )\n else:\n formatted.append(item.name)\n if formatted:\n return comma_separated(formatted)\n else:\n return 'nothing'\n\n def pretty_inventory(self):\n w = self.equipped_weapon\n major = self.major_armor.defense if self.major_armor else 0\n minor = self.minor_armor.defense if self.minor_armor else 0\n armor_defense = (major + minor) * 5\n armors = [self.major_armor.name if self.major_armor else None, self\n .minor_armor.name if self.minor_armor else None]\n inventory = {'inventory_items':\n f'You have {self.formatted_inventory()} in your inventory.',\n 'weapon': \n f'You are wielding {int_to_words(w.quantity)} {remove_little_words(w.name) if w.quantity == 1 else w.plural}.'\n if w else None, 'armor': \n f\"You are wearing {' and '.join(x for x in armors if x)}, giving you a {armor_defense}% reduction in incoming damage.\"\n if self.minor_armor or self.major_armor else None}\n return '\\n'.join(v for v in inventory.values() if v)\n\n def status(self):\n skills = [f'{k}: {v}%.' for k, v in self.skills.items()]\n job = f'You have a job as a {self.job.name}.' if self.job else None\n quest = 'You have a quest.' if self.quest else None\n if job and quest:\n job_string = '\\n'.join([job, quest])\n elif job or quest:\n job_string = job if job else quest\n else:\n job_string = (\n 'You do not have a job, and you are not contributing to society.'\n )\n status_string = {'health':\n f'Currently, you have {self.health} health.', 'location':\n f'You are located on map coordinates {self.location}, which is {self.square.square_type}.'\n , 'building_local': \n f'You are inside {self.building_local.name}.' if self.\n building_local else None, 'skills': '\\n'.join(skills) if skills\n else \"You don't have any skills.\", 'money':\n f'You have ${self.money} in your wallet.', 'job': job_string}\n return '\\n'.join(v for v in status_string.values() if v)\n\n def statistics(self):\n print(f'You have killed {self.body_count} mobs.')\n print(f'You have ran away from {self.run_away_count} battles.')\n print(f'You have eaten {self.food_count} items.')\n print(f'You have performed {self.assassination_count} assassinations.')\n print(f'You have talked to mobs {self.greeting_count} times.')\n\n def view_hit_list(self):\n if self.hit_list:\n print(\n f'If you ever run across these shady characters, be sure to take their names off your list: {comma_separated(self.hit_list)}'\n )\n else:\n print(\"Looks like you don't know of anyone who needs to be dead.\")\n\n def increase_skill(self, skill, increase):\n try:\n self.skills[skill] += increase\n except KeyError:\n self.skills[skill] = increase\n print(\n f'You have increased your mastery of {skill} by {increase}% for a total of {self.skills[skill]}%.'\n )\n\n\nclass Item:\n\n def __init__(self, name, quantity, plural, category=None, perishable=\n None, flammable=None, rarity=None, price=None, weapon_rating=None,\n defense=None):\n self.name = name\n self.quantity = quantity\n self.plural = plural\n self.category = category or None\n self.perishable = perishable or None\n self.flammable = flammable or None\n self.rarity = rarity or None\n self.price = price or None\n self.weapon_rating = weapon_rating or None\n self.defense = defense or None\n\n def copy(self):\n return Item(name=self.name, quantity=self.quantity, plural=self.\n plural, category=self.category, perishable=self.perishable,\n flammable=self.flammable, rarity=self.rarity, weapon_rating=\n self.weapon_rating, defense=self.defense)\n\n\nclass Building(object):\n\n def __init__(self, name, p, plural, category=None, rarity=None,\n ware_list=None, mobs=None, jobs=None):\n self.name = name\n self.p = p\n self.quantity = 1\n self.plural = plural\n self.category = category or None\n self.rarity = rarity or None\n self.ware_list = ware_list\n self.wares = self.drop_wares()\n self.mobs = drop_mob(mobs, p) if mobs else None\n self.jobs = self.drop_job(jobs) if jobs else None\n if self.name in ('a castle', 'a volcanic base'):\n self.boss_mobs_and_jobs()\n\n def drop_wares(self):\n if self.ware_list:\n wares = drop_item(self.ware_list)\n while not wares:\n wares = drop_item(self.ware_list)\n return wares\n else:\n return []\n\n def drop_job(self, jobs):\n drops_i = []\n for k, v in jobs.items():\n if odds(2):\n drops_i.append(Job(name=k, location=self.p.location, **v))\n return drops_i\n\n def boss_mobs_and_jobs(self):\n boss_major_armors = [Item('a coat of impervious dragon scales',\n plural='coats of dragon scales', quantity=1, category=\n 'major armor', rarity='super rare', defense=5), Item(\n 'an enchanted leather duster', plural=\n 'enchanted leather dusters', quantity=1, category='major armor',\n defense=5, rarity='super rare'), Item(\n 'a coat of actual live grizzly bears', plural=\n 'coats of actual live grizzly bears', quantity=1, category=\n 'major armor', defense=5, rarity='super rare')]\n boss_minor_armors = [Item('wings of an angel', plural=\n 'wings of angels', quantity=1, rarity='super rare', category=\n 'minor armor', defense=5), Item('an OSHA approved hard hat',\n plural='OSHA approved hard hats', quantity=1, rarity=\n 'super rare', category='minor armor', defense=5), Item(\n 'a pair boots that were made for walkin', plural=\n 'pairs of boots that were made for walkin', quantity=1, rarity=\n 'super rare', category='minor armor', defense=5)]\n boss_weapons = [Item('an apache helicopter', plural=\n 'apache helicopters', rarity='super rare', weapon_rating=6,\n quantity=1), Item('a trebuchet', plural='trebuchets',\n weapon_rating=6, quantity=1, rarity='super rare'), Item(\n 'an army of attacking wizards', plural=\n 'armies of attacking wizards', weapon_rating=6, quantity=1,\n rarity='super rare')]\n boss_names = ['the Terrifying Dragon of Soul Slaying',\n 'the Great Salamander of Darkness', 'the Squirrel of Destiny']\n random.shuffle(boss_names)\n random.shuffle(boss_weapons)\n random.shuffle(boss_major_armors)\n random.shuffle(boss_minor_armors)\n boss = Mob(boss_names[0], self.p, plural=boss_names[0], rarity=\n 'super rare')\n boss.health = 500\n boss.equipped_weapon = boss_weapons[0]\n boss.major_armor = boss_major_armors[0]\n boss.minor_armor = boss_minor_armors[0]\n boss.irritation_level = 10\n self.mobs = [boss]\n if self.name == 'a castle':\n self.jobs = [Job('king of the realm', location=self.p.location,\n salary=1100)]\n if self.name == 'a volcanic base':\n self.jobs = [Job('evil overlord', location=self.p.location,\n salary=1100)]\n\n\nclass Job:\n\n def __init__(self, name, location, skills_needed=None, salary=0,\n skills_learned=None, inventory_needed=None):\n self.name = name\n self.location = location\n self.skills_needed = skills_needed or None\n self.salary = salary or 0\n self.skills_learned = skills_learned or None\n self.inventory_needed = inventory_needed or None\n self.application_attempts = 0\n\n\nclass Mob:\n\n def __init__(self, name, p, plural, rarity, inventory=None):\n self.name = name\n self.p = p\n self.plural = plural\n self.quantity = 1\n self.rarity = rarity\n self.skills = self.skills()\n self.quest = None\n self.inventory = inventory or drop_item(add_dicts_together(items[\n 'master'], items[p.square.square_type]))\n self.health = 100\n self.equipped_weapon = self.equip()\n major = [x for x in self.inventory if x.category == 'major armor']\n minor = [x for x in self.inventory if x.category == 'minor armor']\n self.major_armor = major[0] if major else None\n self.minor_armor = minor[0] if minor else None\n self.irritation_level = 0\n\n def equip(self):\n nice_weapons = []\n for i in self.inventory:\n try:\n if i.weapon_rating:\n nice_weapons.append(i)\n except AttributeError:\n pass\n nice_weapons.sort(key=lambda x: x.weapon_rating, reverse=True)\n if nice_weapons:\n self.inventory.remove(nice_weapons[0])\n return nice_weapons[0]\n else:\n return None\n\n @staticmethod\n def skills():\n \"\"\" Pick the skills for a mob, these determine what a player can get from completing a quest \"\"\"\n all_skills = ['strength', 'patience', 'cleanliness', 'leadership',\n 'communication', 'science', 'math', 'engineering',\n 'intelligence', 'driving']\n random.shuffle(all_skills)\n return all_skills[0:2]\n\n def generate_quest(self):\n \"\"\"\n inventory based\n bring me x of an object to learn a skill\n \"\"\"\n if odds(3):\n quest_items = add_dicts_together(items['master'], items[self.p.\n square.square_type])\n quest_item = random.choice(list(quest_items.keys()))\n i = Item(quest_item, 0, **quest_items[quest_item])\n self.inventory.append(i)\n quantity = {'super rare': '1', 'rare': '2', 'uncommon': '3',\n 'common': '6', 'super common': '15'}\n q = quantity[i.rarity]\n self.quest = (i, int(q),\n f'{self.p.name}, if you bring me {q} {i.plural if int(q) > 1 else remove_little_words(i.name)}, I will teach you a valuable skill.'\n )\n return\n elif odds(5):\n mobs = []\n for biome, building in buildings.items():\n for b, attributes in building.items():\n if attributes.get('mobs'):\n for k in attributes['mobs'].keys():\n mobs.append(k)\n for biome, mob in wild_mobs.items():\n for k in mob.keys():\n mobs.append(k)\n target = (\n f'{mobs[random.randint(0, len(mobs) - 1)]} named {names[random.randint(0, len(names) - 1)]}'\n )\n print(\n f\"Well, we'll keep this off the record, but I can arrange for some money to find its way into your account if you make {colored(target, 'yellow')} disappear, if you know what I mean...\"\n )\n self.p.hit_list.append(target)\n return False\n else:\n return None\n",
"step-5": "import random\n\nimport colorama\nfrom termcolor import colored\nfrom reusables.string_manipulation import int_to_words\n\nfrom app.common_functions import comma_separated, add_dicts_together, remove_little_words, odds\nfrom app.load_data import items, buildings, wild_mobs, names, adjectives\n\n\ncolorama.init()\n\n\ndef find_unique_names(quantity, name_list, taken_names):\n free_names = [x for x in name_list if x not in taken_names]\n random.shuffle(free_names)\n return free_names[:quantity]\n\n\ndef dropper(rarity):\n results = {'super rare': 100,\n 'rare': 50,\n 'uncommon': 25,\n 'common': 5,\n 'super common': 2}\n quantity = 0\n countdown = random.randint(0, 10)\n while countdown > 0:\n if random.randint(0, results[rarity]) == 1:\n quantity += 1\n countdown -= 1\n return quantity\n\n\ndef drop_building(dictionary, p, limit=None):\n limit = limit or len(adjectives)\n drops_i = []\n\n for k, v in dictionary.items():\n quantity = dropper(v['rarity'])\n quantity = quantity if quantity < limit else limit\n limit -= quantity\n if quantity:\n if quantity > 1 and v['category'] != 'residence':\n n = random.randint(0, quantity)\n unique_names = find_unique_names(quantity - n, names, p.square.unique_building_names)\n p.square.unique_building_names += unique_names\n for i in range(0, quantity - n):\n drops_i.append(Building(name=f\"{unique_names[i]}'s {remove_little_words(k).capitalize()}\", p=p, **v))\n unique_adjectives = find_unique_names(n, adjectives, p.square.unique_building_names)\n p.square.unique_building_names += unique_adjectives\n for i in range(0, n):\n drops_i.append(Building(name=f\"the {unique_adjectives[i]} {remove_little_words(k).capitalize()}\", p=p, **v))\n\n elif quantity > 1 and v['category'] == 'residence':\n unique_house_names = find_unique_names(quantity, names, p.square.unique_house_names)\n p.square.unique_house_names += unique_house_names\n for i in range(0, quantity):\n drops_i.append(Building(name=f\"{unique_house_names[i]}'s {remove_little_words(k)}\", p=p, **v))\n else:\n drops_i.append(Building(name=k, p=p, **v))\n return drops_i\n\n\ndef drop_mob(dictionary, p, limit=None, square=None):\n square = square or p.square\n limit = limit or len(names) - len(square.unique_mob_names)\n drops_i = []\n\n for k, v in dictionary.items():\n quantity = dropper(v['rarity'])\n quantity = quantity if quantity < limit else limit\n limit -= quantity\n if quantity:\n if quantity > 1:\n unique_names = find_unique_names(quantity, names, square.unique_mob_names)\n p.square.unique_mob_names += unique_names\n for i in range(0, len(unique_names)):\n drops_i.append(Mob(name=f\"{k} named {unique_names[i]}\", p=p, **v))\n else:\n if k not in [n.name for n in p.square.mobs]:\n drops_i.append(Mob(name=k, p=p, **v))\n else:\n name = find_unique_names(1, names, square.unique_mob_names)[0]\n drops_i.append(Mob(name=f\"{k} named {name}\", p=p, **v))\n return drops_i\n\n\ndef drop_item(dictionary):\n \"\"\" Randomly generates objects based on rarity \"\"\"\n drops_i = []\n\n for k, v in dictionary.items():\n quantity = dropper(v['rarity'])\n if quantity:\n drops_i.append(Item(name=k, quantity=quantity, **v))\n\n return drops_i\n\n\nclass MapSquare:\n def __init__(self, name=\"\", square_type=None):\n square_types = [\"forest\", \"mountains\", \"desert\", \"city\", \"swamp\", \"ocean\"]\n self.square_type = square_type or square_types[random.randint(0, len(square_types) - 1)]\n self.name = name\n self.unique_mob_names = []\n self.unique_building_names = []\n self.unique_house_names = []\n\n mobs = []\n items = []\n buildings = []\n\n def generate_items(self):\n self.items = drop_item(add_dicts_together(items[\"master\"], items[self.square_type]))\n\n def generate_buildings(self, p):\n self.buildings = drop_building(add_dicts_together(buildings[\"master\"], buildings[self.square_type]), p)\n\n def generate_mobs(self, p):\n self.mobs = drop_mob(add_dicts_together(wild_mobs[\"master\"], wild_mobs[self.square_type]), p)\n\n def clean_up_map(self):\n \"\"\" Remove items with quantity of zero from the map inventory\"\"\"\n self.items = [i for i in self.items if i.quantity != 0]\n\n @staticmethod\n def map_picture(the_map, p):\n \"\"\"With the player's location in the center, draw a 5 x 5 map with map square type\n and coordinates in each square\"\"\"\n xy = (p.location[0] - 2, p.location[1] + 2)\n map_coords = []\n for y in range(0, 5):\n row = [(xy[0] + x, xy[1] - y) for x in range(0, 5)]\n map_coords.append(row)\n\n pretty_map = []\n for r in map_coords:\n row = []\n for coordinates in r:\n if coordinates in the_map.keys():\n if p.quest and p.job and p.quest[1] == coordinates and p.job.location == coordinates:\n star = '*$ '\n elif p.quest and p.quest[1] == coordinates:\n star = ' * '\n elif p.job and p.job.location == coordinates:\n star = ' $ '\n else:\n star = ' '\n row.append(\"|{!s:9}{}|\".format(the_map[coordinates].square_type, star))\n else:\n row.append(\"|{!s:12}|\".format(' '))\n pretty_map.append(row)\n for row in pretty_map:\n print(''.join(row))\n\n\nclass Player:\n def __init__(self, name, location):\n self.name = name\n self.location = location\n self.square = None\n self.money = 0\n self.quest = None\n self.job = None\n self.phase = \"day\"\n self.equipped_weapon = None\n self.major_armor = None\n self.minor_armor = None\n self.building_local = None\n self.inventory = []\n self.skills = {}\n self.health = 100\n self.greeting_count = 0\n self.body_count = 0\n self.assassination_count = 0\n self.hit_list = []\n self.death_count = 0\n # TODO increase insurance cost every death?\n self.food_count = 0\n self.run_away_count = 0\n self.speed_bonus = False\n self.game_won = False\n\n def game_over(self):\n if self.game_won is False:\n self.game_won = True\n print(colored(\"You have won the game!\", \"green\"))\n print(\"You may continue playing to earn more achievements if you wish.\")\n if self.run_away_count == 0:\n print(\"Congratulations, you have achieved the True Bravery achievement, having won the game without ever running away from a fight.\")\n if self.run_away_count > 100:\n print(\"Congratulations, you have achieved the True Cowardice achievement, having won the game after running away from over 100 battles.\")\n\n def clean_up_inventory(self):\n \"\"\" Remove items with quantity of zero from the map inventory\"\"\"\n self.inventory = [i for i in self.inventory if i.quantity != 0]\n\n def phase_change(self, the_map):\n self.phase = 'day' if self.phase == 'night' else 'night'\n for k, square in the_map.items():\n if self.location != k:\n square.generate_items()\n for b in square.buildings:\n if b.ware_list:\n b.wares = drop_item(b.ware_list)\n while not b.wares:\n b.wares = drop_item(b.ware_list)\n if b.name not in ('a castle', 'a volcanic base'):\n jobs = {}\n buiding_dict = add_dicts_together(buildings['master'], buildings[square.square_type])\n for key, v in buiding_dict.items():\n if key == b.name and v.get('jobs'):\n for name, values in v['jobs'].items():\n jobs[name] = values\n b.jobs = b.drop_job(jobs)\n if self.phase == 'day':\n self.speed_bonus = False\n for mob in square.mobs:\n mob.health = 100\n mob.irritation_level = 0\n mob.quest = None if self.quest is None else mob.quest\n if not square.mobs:\n square.mobs = drop_mob(add_dicts_together(wild_mobs[\"master\"], wild_mobs[self.square.square_type]),\n self, limit=len(names), square=square)\n\n def formatted_inventory(self):\n formatted = []\n for item in self.inventory:\n\n if item.quantity > 1:\n formatted.append(f\"{int_to_words(item.quantity)} {item.plural}\")\n else:\n formatted.append(item.name)\n if formatted:\n return comma_separated(formatted)\n else:\n return \"nothing\"\n\n def pretty_inventory(self):\n w = self.equipped_weapon\n major = self.major_armor.defense if self.major_armor else 0\n minor = self.minor_armor.defense if self.minor_armor else 0\n armor_defense = (major + minor) * 5\n\n armors = [self.major_armor.name if self.major_armor else None, self.minor_armor.name if self.minor_armor else None]\n\n inventory = {'inventory_items': f\"You have {self.formatted_inventory()} in your inventory.\",\n 'weapon': f\"You are wielding {int_to_words(w.quantity)} \"\n f\"{remove_little_words(w.name) if w.quantity == 1 else w.plural}.\" if w else None,\n 'armor': f\"You are wearing {' and '.join(x for x in armors if x)}, \"\n f\"giving you a {armor_defense}% reduction in incoming damage.\" if self.minor_armor or self.major_armor else None}\n return '\\n'.join(v for v in inventory.values() if v)\n\n def status(self):\n skills = [f\"{k}: {v}%.\" for k, v in self.skills.items()]\n\n job = f\"You have a job as a {self.job.name}.\" if self.job else None\n quest = \"You have a quest.\" if self.quest else None\n if job and quest:\n job_string = \"\\n\".join([job, quest])\n elif job or quest:\n job_string = job if job else quest\n else:\n job_string = \"You do not have a job, and you are not contributing to society.\"\n\n status_string = {\n 'health': f'Currently, you have {self.health} health.',\n 'location': f'You are located on map coordinates {self.location}, '\n f'which is {self.square.square_type}.',\n 'building_local': f'You are inside {self.building_local.name}.' if self.building_local else None,\n 'skills': '\\n'.join(skills) if skills else \"You don't have any skills.\",\n 'money': f\"You have ${self.money} in your wallet.\",\n 'job': job_string}\n\n return '\\n'.join(v for v in status_string.values() if v)\n\n def statistics(self):\n print(f\"You have killed {self.body_count} mobs.\")\n print(f\"You have ran away from {self.run_away_count} battles.\")\n print(f\"You have eaten {self.food_count} items.\")\n print(f\"You have performed {self.assassination_count} assassinations.\")\n print(f\"You have talked to mobs {self.greeting_count} times.\")\n\n def view_hit_list(self):\n if self.hit_list:\n print(f\"If you ever run across these shady characters, be sure to take their names off your list: {comma_separated(self.hit_list)}\")\n else:\n print(\"Looks like you don't know of anyone who needs to be dead.\")\n\n def increase_skill(self, skill, increase):\n try:\n self.skills[skill] += increase\n except KeyError:\n self.skills[skill] = increase\n print(f\"You have increased your mastery of {skill} by {increase}% for a total of {self.skills[skill]}%.\")\n\n\nclass Item:\n def __init__(self, name, quantity, plural, category=None, perishable=None,\n flammable=None, rarity=None, price=None, weapon_rating=None, defense=None):\n self.name = name\n self.quantity = quantity\n self.plural = plural\n self.category = category or None\n self.perishable = perishable or None\n self.flammable = flammable or None\n self.rarity = rarity or None\n self.price = price or None\n self.weapon_rating = weapon_rating or None\n self.defense = defense or None\n\n def copy(self):\n return Item(name=self.name, quantity=self.quantity, plural=self.plural, category=self.category,\n perishable=self.perishable, flammable=self.flammable, rarity=self.rarity,\n weapon_rating=self.weapon_rating, defense=self.defense)\n\n\nclass Building(object):\n def __init__(self, name, p, plural, category=None, rarity=None, ware_list=None, mobs=None, jobs=None):\n self.name = name\n self.p = p\n self.quantity = 1\n self.plural = plural\n self.category = category or None\n self.rarity = rarity or None\n self.ware_list = ware_list\n self.wares = self.drop_wares()\n self.mobs = drop_mob(mobs, p) if mobs else None\n self.jobs = self.drop_job(jobs) if jobs else None\n\n if self.name in ('a castle', 'a volcanic base'):\n self.boss_mobs_and_jobs()\n\n def drop_wares(self):\n if self.ware_list:\n wares = drop_item(self.ware_list)\n while not wares:\n wares = drop_item(self.ware_list)\n return wares\n else:\n return []\n\n def drop_job(self, jobs):\n drops_i = []\n for k, v in jobs.items():\n if odds(2):\n drops_i.append(Job(name=k, location=self.p.location, **v))\n return drops_i\n\n def boss_mobs_and_jobs(self):\n boss_major_armors = [Item('a coat of impervious dragon scales', plural='coats of dragon scales', quantity=1, category='major armor', rarity='super rare', defense=5),\n Item('an enchanted leather duster', plural='enchanted leather dusters', quantity=1, category='major armor', defense=5, rarity='super rare'),\n Item('a coat of actual live grizzly bears', plural='coats of actual live grizzly bears', quantity=1, category='major armor', defense=5, rarity='super rare')]\n boss_minor_armors = [Item('wings of an angel', plural='wings of angels', quantity=1, rarity='super rare', category='minor armor', defense=5),\n Item('an OSHA approved hard hat', plural='OSHA approved hard hats', quantity=1, rarity='super rare', category='minor armor', defense=5),\n Item('a pair boots that were made for walkin', plural='pairs of boots that were made for walkin', quantity=1, rarity='super rare', category='minor armor', defense=5)]\n boss_weapons = [Item('an apache helicopter', plural='apache helicopters', rarity='super rare', weapon_rating=6, quantity=1),\n Item('a trebuchet', plural='trebuchets', weapon_rating=6, quantity=1, rarity='super rare'),\n Item('an army of attacking wizards', plural='armies of attacking wizards', weapon_rating=6, quantity=1, rarity='super rare')]\n boss_names = [\"the Terrifying Dragon of Soul Slaying\", \"the Great Salamander of Darkness\", \"the Squirrel of Destiny\", ]\n random.shuffle(boss_names)\n random.shuffle(boss_weapons)\n random.shuffle(boss_major_armors)\n random.shuffle(boss_minor_armors)\n\n boss = Mob(boss_names[0], self.p, plural=boss_names[0], rarity='super rare')\n boss.health = 500\n boss.equipped_weapon = boss_weapons[0]\n boss.major_armor = boss_major_armors[0]\n boss.minor_armor = boss_minor_armors[0]\n boss.irritation_level = 10\n self.mobs = [boss]\n if self.name == 'a castle':\n self.jobs = [Job('king of the realm', location=self.p.location, salary=1100)]\n if self.name == 'a volcanic base':\n self.jobs = [Job('evil overlord', location=self.p.location, salary=1100)]\n\n\nclass Job:\n def __init__(self, name, location, skills_needed=None, salary=0, skills_learned=None, inventory_needed=None):\n self.name = name\n self.location = location\n self.skills_needed = skills_needed or None\n self.salary = salary or 0\n self.skills_learned = skills_learned or None\n self.inventory_needed = inventory_needed or None\n self.application_attempts = 0\n\n\nclass Mob:\n def __init__(self, name, p, plural, rarity, inventory=None):\n self.name = name\n self.p = p\n self.plural = plural\n self.quantity = 1\n self.rarity = rarity\n\n self.skills = self.skills()\n self.quest = None\n\n self.inventory = inventory or drop_item(add_dicts_together(items['master'], items[p.square.square_type]))\n self.health = 100\n self.equipped_weapon = self.equip()\n major = [x for x in self.inventory if x.category == 'major armor']\n minor = [x for x in self.inventory if x.category == 'minor armor']\n self.major_armor = major[0] if major else None\n self.minor_armor = minor[0] if minor else None\n self.irritation_level = 0\n\n def equip(self):\n nice_weapons = []\n for i in self.inventory:\n try:\n if i.weapon_rating:\n nice_weapons.append(i)\n except AttributeError:\n pass\n nice_weapons.sort(key=lambda x: x.weapon_rating, reverse=True)\n if nice_weapons:\n self.inventory.remove(nice_weapons[0])\n return nice_weapons[0]\n else:\n return None\n\n @staticmethod\n def skills():\n \"\"\" Pick the skills for a mob, these determine what a player can get from completing a quest \"\"\"\n all_skills = [\"strength\", \"patience\", \"cleanliness\", \"leadership\", \"communication\",\n \"science\", \"math\", \"engineering\", \"intelligence\", \"driving\"]\n\n random.shuffle(all_skills)\n return all_skills[0:2]\n\n def generate_quest(self):\n \"\"\"\n inventory based\n bring me x of an object to learn a skill\n \"\"\"\n\n if odds(3):\n\n quest_items = add_dicts_together(items[\"master\"], items[self.p.square.square_type])\n quest_item = random.choice(list(quest_items.keys()))\n\n i = Item(quest_item, 0, **quest_items[quest_item])\n self.inventory.append(i)\n\n quantity = {'super rare': '1',\n 'rare': '2',\n 'uncommon': '3',\n 'common': '6',\n 'super common': '15'}\n q = quantity[i.rarity]\n\n self.quest = i, int(q), f\"{self.p.name}, if you bring \" \\\n f\"me {q} {i.plural if int(q) > 1 else remove_little_words(i.name)}, \" \\\n f\"I will teach you a valuable skill.\"\n return\n elif odds(5):\n mobs = []\n for biome, building in buildings.items():\n for b, attributes in building.items():\n if attributes.get('mobs'):\n for k in attributes['mobs'].keys():\n mobs.append(k)\n for biome, mob in wild_mobs.items():\n for k in mob.keys():\n mobs.append(k)\n target = f\"{mobs[random.randint(0, len(mobs)-1)]} named {names[random.randint(0, len(names)-1)]}\"\n print(f\"Well, we'll keep this off the record, but I can arrange for some money to find its way \"\n f\"into your account if you make {colored(target, 'yellow')} disappear, if you know what I mean...\")\n self.p.hit_list.append(target)\n return False\n\n else:\n return None\n",
"step-ids": [
23,
31,
32,
38,
42
]
}
|
[
23,
31,
32,
38,
42
] |
<|reserved_special_token_0|>
class bcolors:
RED = '\x1b[31m'
GREEN = '\x1b[32m'
NORMAL = '\x1b[0m'
def check_result(title, map1, map2):
result = True
print(title)
for y in range(0, common.constants.MAP_HEIGHT):
v = ''
for x in range(0, common.constants.MAP_WIDTH):
if map1[y][x] == map2[y][x]:
v += bcolors.GREEN + str(map1[y][x]) + bcolors.NORMAL
else:
result = False
v += bcolors.RED + str(map1[y][x]) + bcolors.NORMAL
print(v)
if result:
print('Test Result: ' + bcolors.GREEN + 'Passed' + bcolors.NORMAL)
else:
print('Test Result: ' + bcolors.RED + 'Failed' + bcolors.NORMAL)
return result
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class bcolors:
RED = '\x1b[31m'
GREEN = '\x1b[32m'
NORMAL = '\x1b[0m'
def check_result(title, map1, map2):
result = True
print(title)
for y in range(0, common.constants.MAP_HEIGHT):
v = ''
for x in range(0, common.constants.MAP_WIDTH):
if map1[y][x] == map2[y][x]:
v += bcolors.GREEN + str(map1[y][x]) + bcolors.NORMAL
else:
result = False
v += bcolors.RED + str(map1[y][x]) + bcolors.NORMAL
print(v)
if result:
print('Test Result: ' + bcolors.GREEN + 'Passed' + bcolors.NORMAL)
else:
print('Test Result: ' + bcolors.RED + 'Failed' + bcolors.NORMAL)
return result
<|reserved_special_token_0|>
common.set_map(gold_dfmap1, gold_df1)
<|reserved_special_token_0|>
common.set_map(dfmap1, data1)
<|reserved_special_token_0|>
common.set_map(gold_dfmap2, gold_df2)
<|reserved_special_token_0|>
common.set_map(dfmap2, data2)
<|reserved_special_token_0|>
common.set_map(gold_dfmap3, gold_df3)
<|reserved_special_token_0|>
common.set_map(dfmap3, data3)
<|reserved_special_token_0|>
if all_passed:
exit(0)
else:
exit(1)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class bcolors:
RED = '\x1b[31m'
GREEN = '\x1b[32m'
NORMAL = '\x1b[0m'
def check_result(title, map1, map2):
result = True
print(title)
for y in range(0, common.constants.MAP_HEIGHT):
v = ''
for x in range(0, common.constants.MAP_WIDTH):
if map1[y][x] == map2[y][x]:
v += bcolors.GREEN + str(map1[y][x]) + bcolors.NORMAL
else:
result = False
v += bcolors.RED + str(map1[y][x]) + bcolors.NORMAL
print(v)
if result:
print('Test Result: ' + bcolors.GREEN + 'Passed' + bcolors.NORMAL)
else:
print('Test Result: ' + bcolors.RED + 'Failed' + bcolors.NORMAL)
return result
data1 = '100000011110111011111111011110000003111111011111020000'
gold_df1 = '100000011110111011111111011110000555111111511111055540'
data2 = '200000011011111011000001011111011003111111011111000011111111011'
gold_df2 = '555555511411111511444441511111411555111111011111000011111111011'
data3 = '100000011111011011000011011111011003110011011111200011'
gold_df3 = '100000011111011011000011011111411555110411511111555511'
all_passed = True
gold_dfmap1 = common.init_map()
common.set_map(gold_dfmap1, gold_df1)
dfmap1 = common.init_map()
common.set_map(dfmap1, data1)
df1 = student_code.astar_search(dfmap1)
tdf1 = 'Reachable goal:'
cdf1 = check_result(tdf1, dfmap1, gold_dfmap1)
all_passed = all_passed and cdf1 and df1
gold_dfmap2 = common.init_map()
common.set_map(gold_dfmap2, gold_df2)
dfmap2 = common.init_map()
common.set_map(dfmap2, data2)
df2 = student_code.astar_search(dfmap2)
tdf2 = 'Reachable goal:'
cdf2 = check_result(tdf2, dfmap2, gold_dfmap2)
all_passed = all_passed and cdf2 and df2
gold_dfmap3 = common.init_map()
common.set_map(gold_dfmap3, gold_df3)
dfmap3 = common.init_map()
common.set_map(dfmap3, data3)
df3 = student_code.astar_search(dfmap3)
tdf3 = 'Reachable goal:'
cdf3 = check_result(tdf3, dfmap3, gold_dfmap3)
all_passed = all_passed and cdf3 and df3
all_passed = all_passed and cdf5 and df5
if all_passed:
exit(0)
else:
exit(1)
<|reserved_special_token_1|>
import common
import student_code
class bcolors:
RED = '\x1b[31m'
GREEN = '\x1b[32m'
NORMAL = '\x1b[0m'
def check_result(title, map1, map2):
result = True
print(title)
for y in range(0, common.constants.MAP_HEIGHT):
v = ''
for x in range(0, common.constants.MAP_WIDTH):
if map1[y][x] == map2[y][x]:
v += bcolors.GREEN + str(map1[y][x]) + bcolors.NORMAL
else:
result = False
v += bcolors.RED + str(map1[y][x]) + bcolors.NORMAL
print(v)
if result:
print('Test Result: ' + bcolors.GREEN + 'Passed' + bcolors.NORMAL)
else:
print('Test Result: ' + bcolors.RED + 'Failed' + bcolors.NORMAL)
return result
data1 = '100000011110111011111111011110000003111111011111020000'
gold_df1 = '100000011110111011111111011110000555111111511111055540'
data2 = '200000011011111011000001011111011003111111011111000011111111011'
gold_df2 = '555555511411111511444441511111411555111111011111000011111111011'
data3 = '100000011111011011000011011111011003110011011111200011'
gold_df3 = '100000011111011011000011011111411555110411511111555511'
all_passed = True
gold_dfmap1 = common.init_map()
common.set_map(gold_dfmap1, gold_df1)
dfmap1 = common.init_map()
common.set_map(dfmap1, data1)
df1 = student_code.astar_search(dfmap1)
tdf1 = 'Reachable goal:'
cdf1 = check_result(tdf1, dfmap1, gold_dfmap1)
all_passed = all_passed and cdf1 and df1
gold_dfmap2 = common.init_map()
common.set_map(gold_dfmap2, gold_df2)
dfmap2 = common.init_map()
common.set_map(dfmap2, data2)
df2 = student_code.astar_search(dfmap2)
tdf2 = 'Reachable goal:'
cdf2 = check_result(tdf2, dfmap2, gold_dfmap2)
all_passed = all_passed and cdf2 and df2
gold_dfmap3 = common.init_map()
common.set_map(gold_dfmap3, gold_df3)
dfmap3 = common.init_map()
common.set_map(dfmap3, data3)
df3 = student_code.astar_search(dfmap3)
tdf3 = 'Reachable goal:'
cdf3 = check_result(tdf3, dfmap3, gold_dfmap3)
all_passed = all_passed and cdf3 and df3
all_passed = all_passed and cdf5 and df5
if all_passed:
exit(0)
else:
exit(1)
<|reserved_special_token_1|>
import common
import student_code
class bcolors:
RED = "\x1b[31m"
GREEN = "\x1b[32m"
NORMAL = "\x1b[0m"
def check_result(title, map1, map2):
result=True
print(title)
for y in range(0,common.constants.MAP_HEIGHT):
v=""
for x in range(0,common.constants.MAP_WIDTH):
if (map1[y][x]==map2[y][x]):
v+=bcolors.GREEN+str(map1[y][x])+bcolors.NORMAL
else:
result = False
v+=bcolors.RED+str(map1[y][x])+bcolors.NORMAL
print(v)
if (result):
print("Test Result: " + bcolors.GREEN+"Passed"+bcolors.NORMAL)
else:
print("Test Result: " + bcolors.RED+"Failed"+bcolors.NORMAL)
return result
data1 = (
"100000011"
"110111011"
"111111011"
"110000003"
"111111011"
"111020000")
gold_df1 = ("100000011"
"110111011"
"111111011"
"110000555"
"111111511"
"111055540")
data2 = (
"200000011"
"011111011"
"000001011"
"111011003"
"111111011"
"111000011"
"111111011")
gold_df2 = ("555555511"
"411111511"
"444441511"
"111411555"
"111111011"
"111000011"
"111111011")
data3 = (
"100000011"
"111011011"
"000011011"
"111011003"
"110011011"
"111200011")
gold_df3 = (
"100000011"
"111011011"
"000011011"
"111411555"
"110411511"
"111555511")
all_passed = True
gold_dfmap1 = common.init_map();
common.set_map(gold_dfmap1, gold_df1)
dfmap1 = common.init_map()
common.set_map(dfmap1, data1)
df1 = student_code.astar_search(dfmap1)
tdf1 ="Reachable goal:"
cdf1 = check_result(tdf1,dfmap1,gold_dfmap1)
all_passed = all_passed and cdf1 and df1
gold_dfmap2 = common.init_map();
common.set_map(gold_dfmap2, gold_df2)
dfmap2 = common.init_map()
common.set_map(dfmap2, data2)
df2 = student_code.astar_search(dfmap2)
tdf2 ="Reachable goal:"
cdf2 = check_result(tdf2,dfmap2,gold_dfmap2)
all_passed = all_passed and cdf2 and df2
gold_dfmap3 = common.init_map();
common.set_map(gold_dfmap3, gold_df3)
dfmap3 = common.init_map()
common.set_map(dfmap3, data3)
df3 = student_code.astar_search(dfmap3)
tdf3 ="Reachable goal:"
cdf3 = check_result(tdf3,dfmap3,gold_dfmap3)
all_passed = all_passed and cdf3 and df3
all_passed = all_passed and cdf5 and df5
if all_passed:
exit(0)
else:
exit(1)
|
flexible
|
{
"blob_id": "602d2c545c6e3eabe5c6285d2ab0c7f4216a00f5",
"index": 1563,
"step-1": "<mask token>\n\n\nclass bcolors:\n RED = '\\x1b[31m'\n GREEN = '\\x1b[32m'\n NORMAL = '\\x1b[0m'\n\n\ndef check_result(title, map1, map2):\n result = True\n print(title)\n for y in range(0, common.constants.MAP_HEIGHT):\n v = ''\n for x in range(0, common.constants.MAP_WIDTH):\n if map1[y][x] == map2[y][x]:\n v += bcolors.GREEN + str(map1[y][x]) + bcolors.NORMAL\n else:\n result = False\n v += bcolors.RED + str(map1[y][x]) + bcolors.NORMAL\n print(v)\n if result:\n print('Test Result: ' + bcolors.GREEN + 'Passed' + bcolors.NORMAL)\n else:\n print('Test Result: ' + bcolors.RED + 'Failed' + bcolors.NORMAL)\n return result\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass bcolors:\n RED = '\\x1b[31m'\n GREEN = '\\x1b[32m'\n NORMAL = '\\x1b[0m'\n\n\ndef check_result(title, map1, map2):\n result = True\n print(title)\n for y in range(0, common.constants.MAP_HEIGHT):\n v = ''\n for x in range(0, common.constants.MAP_WIDTH):\n if map1[y][x] == map2[y][x]:\n v += bcolors.GREEN + str(map1[y][x]) + bcolors.NORMAL\n else:\n result = False\n v += bcolors.RED + str(map1[y][x]) + bcolors.NORMAL\n print(v)\n if result:\n print('Test Result: ' + bcolors.GREEN + 'Passed' + bcolors.NORMAL)\n else:\n print('Test Result: ' + bcolors.RED + 'Failed' + bcolors.NORMAL)\n return result\n\n\n<mask token>\ncommon.set_map(gold_dfmap1, gold_df1)\n<mask token>\ncommon.set_map(dfmap1, data1)\n<mask token>\ncommon.set_map(gold_dfmap2, gold_df2)\n<mask token>\ncommon.set_map(dfmap2, data2)\n<mask token>\ncommon.set_map(gold_dfmap3, gold_df3)\n<mask token>\ncommon.set_map(dfmap3, data3)\n<mask token>\nif all_passed:\n exit(0)\nelse:\n exit(1)\n",
"step-3": "<mask token>\n\n\nclass bcolors:\n RED = '\\x1b[31m'\n GREEN = '\\x1b[32m'\n NORMAL = '\\x1b[0m'\n\n\ndef check_result(title, map1, map2):\n result = True\n print(title)\n for y in range(0, common.constants.MAP_HEIGHT):\n v = ''\n for x in range(0, common.constants.MAP_WIDTH):\n if map1[y][x] == map2[y][x]:\n v += bcolors.GREEN + str(map1[y][x]) + bcolors.NORMAL\n else:\n result = False\n v += bcolors.RED + str(map1[y][x]) + bcolors.NORMAL\n print(v)\n if result:\n print('Test Result: ' + bcolors.GREEN + 'Passed' + bcolors.NORMAL)\n else:\n print('Test Result: ' + bcolors.RED + 'Failed' + bcolors.NORMAL)\n return result\n\n\ndata1 = '100000011110111011111111011110000003111111011111020000'\ngold_df1 = '100000011110111011111111011110000555111111511111055540'\ndata2 = '200000011011111011000001011111011003111111011111000011111111011'\ngold_df2 = '555555511411111511444441511111411555111111011111000011111111011'\ndata3 = '100000011111011011000011011111011003110011011111200011'\ngold_df3 = '100000011111011011000011011111411555110411511111555511'\nall_passed = True\ngold_dfmap1 = common.init_map()\ncommon.set_map(gold_dfmap1, gold_df1)\ndfmap1 = common.init_map()\ncommon.set_map(dfmap1, data1)\ndf1 = student_code.astar_search(dfmap1)\ntdf1 = 'Reachable goal:'\ncdf1 = check_result(tdf1, dfmap1, gold_dfmap1)\nall_passed = all_passed and cdf1 and df1\ngold_dfmap2 = common.init_map()\ncommon.set_map(gold_dfmap2, gold_df2)\ndfmap2 = common.init_map()\ncommon.set_map(dfmap2, data2)\ndf2 = student_code.astar_search(dfmap2)\ntdf2 = 'Reachable goal:'\ncdf2 = check_result(tdf2, dfmap2, gold_dfmap2)\nall_passed = all_passed and cdf2 and df2\ngold_dfmap3 = common.init_map()\ncommon.set_map(gold_dfmap3, gold_df3)\ndfmap3 = common.init_map()\ncommon.set_map(dfmap3, data3)\ndf3 = student_code.astar_search(dfmap3)\ntdf3 = 'Reachable goal:'\ncdf3 = check_result(tdf3, dfmap3, gold_dfmap3)\nall_passed = all_passed and cdf3 and df3\nall_passed = all_passed and cdf5 and df5\nif all_passed:\n exit(0)\nelse:\n exit(1)\n",
"step-4": "import common\nimport student_code\n\n\nclass bcolors:\n RED = '\\x1b[31m'\n GREEN = '\\x1b[32m'\n NORMAL = '\\x1b[0m'\n\n\ndef check_result(title, map1, map2):\n result = True\n print(title)\n for y in range(0, common.constants.MAP_HEIGHT):\n v = ''\n for x in range(0, common.constants.MAP_WIDTH):\n if map1[y][x] == map2[y][x]:\n v += bcolors.GREEN + str(map1[y][x]) + bcolors.NORMAL\n else:\n result = False\n v += bcolors.RED + str(map1[y][x]) + bcolors.NORMAL\n print(v)\n if result:\n print('Test Result: ' + bcolors.GREEN + 'Passed' + bcolors.NORMAL)\n else:\n print('Test Result: ' + bcolors.RED + 'Failed' + bcolors.NORMAL)\n return result\n\n\ndata1 = '100000011110111011111111011110000003111111011111020000'\ngold_df1 = '100000011110111011111111011110000555111111511111055540'\ndata2 = '200000011011111011000001011111011003111111011111000011111111011'\ngold_df2 = '555555511411111511444441511111411555111111011111000011111111011'\ndata3 = '100000011111011011000011011111011003110011011111200011'\ngold_df3 = '100000011111011011000011011111411555110411511111555511'\nall_passed = True\ngold_dfmap1 = common.init_map()\ncommon.set_map(gold_dfmap1, gold_df1)\ndfmap1 = common.init_map()\ncommon.set_map(dfmap1, data1)\ndf1 = student_code.astar_search(dfmap1)\ntdf1 = 'Reachable goal:'\ncdf1 = check_result(tdf1, dfmap1, gold_dfmap1)\nall_passed = all_passed and cdf1 and df1\ngold_dfmap2 = common.init_map()\ncommon.set_map(gold_dfmap2, gold_df2)\ndfmap2 = common.init_map()\ncommon.set_map(dfmap2, data2)\ndf2 = student_code.astar_search(dfmap2)\ntdf2 = 'Reachable goal:'\ncdf2 = check_result(tdf2, dfmap2, gold_dfmap2)\nall_passed = all_passed and cdf2 and df2\ngold_dfmap3 = common.init_map()\ncommon.set_map(gold_dfmap3, gold_df3)\ndfmap3 = common.init_map()\ncommon.set_map(dfmap3, data3)\ndf3 = student_code.astar_search(dfmap3)\ntdf3 = 'Reachable goal:'\ncdf3 = check_result(tdf3, dfmap3, gold_dfmap3)\nall_passed = all_passed and cdf3 and df3\nall_passed = all_passed and cdf5 and df5\nif all_passed:\n exit(0)\nelse:\n exit(1)\n",
"step-5": "import common\r\nimport student_code\r\n\r\nclass bcolors:\r\n\tRED = \"\\x1b[31m\"\r\n\tGREEN = \"\\x1b[32m\"\r\n\tNORMAL = \"\\x1b[0m\"\r\n\r\ndef check_result(title, map1, map2):\r\n\tresult=True\r\n\tprint(title)\r\n\tfor y in range(0,common.constants.MAP_HEIGHT):\r\n\t\tv=\"\"\r\n\t\tfor x in range(0,common.constants.MAP_WIDTH):\r\n\t\t\tif (map1[y][x]==map2[y][x]):\r\n\t\t\t\tv+=bcolors.GREEN+str(map1[y][x])+bcolors.NORMAL\r\n\t\t\telse:\r\n\t\t\t\tresult = False\r\n\t\t\t\tv+=bcolors.RED+str(map1[y][x])+bcolors.NORMAL\r\n\t\tprint(v)\r\n\tif (result):\r\n\t\tprint(\"Test Result: \" + bcolors.GREEN+\"Passed\"+bcolors.NORMAL)\r\n\telse:\r\n\t\tprint(\"Test Result: \" + bcolors.RED+\"Failed\"+bcolors.NORMAL)\r\n\treturn result\r\n\r\ndata1 = (\r\n\"100000011\"\r\n\"110111011\"\r\n\"111111011\"\r\n\"110000003\"\r\n\"111111011\"\r\n\"111020000\")\r\n\r\ngold_df1 = (\"100000011\"\r\n\"110111011\"\r\n\"111111011\"\r\n\"110000555\"\r\n\"111111511\"\r\n\"111055540\")\r\n\r\ndata2 = (\r\n\"200000011\"\r\n\"011111011\"\r\n\"000001011\"\r\n\"111011003\"\r\n\"111111011\"\r\n\"111000011\"\r\n\"111111011\")\r\n\r\ngold_df2 = (\"555555511\"\r\n\"411111511\"\r\n\"444441511\"\r\n\"111411555\"\r\n\"111111011\"\r\n\"111000011\"\r\n\"111111011\")\r\n\r\n\r\ndata3 = (\r\n\"100000011\"\r\n\"111011011\"\r\n\"000011011\"\r\n\"111011003\"\r\n\"110011011\"\r\n\"111200011\")\r\n\r\ngold_df3 = (\r\n\"100000011\"\r\n\"111011011\"\r\n\"000011011\"\r\n\"111411555\"\r\n\"110411511\"\r\n\"111555511\")\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n \r\nall_passed = True\r\n\r\ngold_dfmap1 = common.init_map();\r\ncommon.set_map(gold_dfmap1, gold_df1)\r\n\r\ndfmap1 = common.init_map()\r\ncommon.set_map(dfmap1, data1)\r\ndf1 = student_code.astar_search(dfmap1)\r\ntdf1 =\"Reachable goal:\"\r\ncdf1 = check_result(tdf1,dfmap1,gold_dfmap1)\r\n\r\nall_passed = all_passed and cdf1 and df1 \r\n\r\ngold_dfmap2 = common.init_map();\r\ncommon.set_map(gold_dfmap2, gold_df2)\r\n\r\ndfmap2 = common.init_map()\r\ncommon.set_map(dfmap2, data2)\r\ndf2 = student_code.astar_search(dfmap2)\r\ntdf2 =\"Reachable goal:\"\r\ncdf2 = check_result(tdf2,dfmap2,gold_dfmap2)\r\n\r\nall_passed = all_passed and cdf2 and df2 \r\n\r\ngold_dfmap3 = common.init_map();\r\ncommon.set_map(gold_dfmap3, gold_df3)\r\n\r\ndfmap3 = common.init_map()\r\ncommon.set_map(dfmap3, data3)\r\ndf3 = student_code.astar_search(dfmap3)\r\ntdf3 =\"Reachable goal:\"\r\ncdf3 = check_result(tdf3,dfmap3,gold_dfmap3)\r\n\r\n\r\nall_passed = all_passed and cdf3 and df3 \r\n\r\n\r\n\r\n\r\nall_passed = all_passed and cdf5 and df5\r\n\r\nif all_passed:\r\n\texit(0)\r\nelse:\r\n\texit(1)\r\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
import json
import os
import pickle
import random
import urllib.request
from pathlib import Path
import tensorflow as tf
from matplotlib import pyplot as plt
class CNN(object):
def __init__(self):
self.model = tf.keras.Sequential([
tf.keras.layers.Conv2D(32, (3, 3), activation='relu', input_shape=(150, 150, 1)),
tf.keras.layers.MaxPool2D((2, 2)),
tf.keras.layers.Conv2D(64, (3, 3), activation='relu'),
tf.keras.layers.MaxPool2D(2, 2),
tf.keras.layers.Conv2D(128, (3, 3), activation='relu'),
tf.keras.layers.MaxPool2D(2, 2),
tf.keras.layers.Conv2D(128, (3, 3), activation='relu'),
tf.keras.layers.MaxPool2D(2, 2),
tf.keras.layers.Flatten(),
tf.keras.layers.Dropout(0.5),
tf.keras.layers.Dense(512, activation='relu'),
tf.keras.layers.Dense(1, activation='sigmoid')
])
self.last_training_history = {}
def print_model_info(self):
print(self.model.summary())
def get_model(self):
return self.model
def load_weights(self, filepath='model.h5'):
self.model.load_weights(filepath)
self.model.compile(
optimizer='adam',
loss='binary_crossentropy',
metrics=['acc']
)
def load_last_training_history(self, filepath='result.pk'):
with open(filepath, 'rb') as f:
self.last_training_history = pickle.load(f)
def get_last_training_history(self):
return self.last_training_history
def plot_last_training_history(self, save_plot=False):
for key in self.last_training_history:
y = self.last_training_history[key]
plt.plot([i + 1 for i in range(len(y))], y, label=key)
plt.legend()
plt.grid()
plt.xlabel('epoch')
if save_plot:
plt.savefig('training_history.png', dpi=300)
else:
plt.show()
def train(self, directory, epochs=100, save_model=False, save_history=False):
train_datagen = tf.keras.preprocessing.image.ImageDataGenerator(
rescale=1. / 255,
rotation_range=20,
width_shift_range=0.15,
height_shift_range=0.15,
shear_range=0.15,
zoom_range=0.15,
fill_mode='nearest',
horizontal_flip=True,
vertical_flip=False,
brightness_range=None,
channel_shift_range=0
)
test_datagen = tf.keras.preprocessing.image.ImageDataGenerator(
rescale=1. / 255)
train_generator = train_datagen.flow_from_directory(
directory,
target_size=(150, 150),
batch_size=32,
color_mode='grayscale',
class_mode='binary'
)
test_generator = test_datagen.flow_from_directory(
directory,
target_size=(150, 150),
batch_size=32,
color_mode='grayscale',
class_mode='binary'
)
self.model.compile(
optimizer='adam',
loss='binary_crossentropy',
metrics=['acc']
)
history = self.model.fit(
train_generator,
epochs=epochs,
validation_data=test_generator
)
if save_model:
self.model.save('model.h5')
if save_history:
with open('result.pk', 'wb') as f:
pickle.dump(history.history, f)
self.last_training_history = history.history
return history.history
def predict_directory(self, directory, probabilities=True):
if directory[-1] != '\\' and directory[-1] != '/':
directory += '/'
predictions = {}
onlyfiles = [f for f in os.listdir(directory) if os.path.isfile(os.path.join(directory, f))]
for image_file in onlyfiles:
img = tf.keras.preprocessing.image.load_img(directory + image_file, target_size=(150, 150),
color_mode='grayscale')
x = tf.keras.preprocessing.image.img_to_array(img, )
x = x.reshape((1,) + x.shape)
x = x / 255
y = self.model.predict(x)[0][0]
if probabilities:
predictions[image_file] = y
else:
predictions[image_file] = y > 0.5
return predictions
def predict_single_image(self, file_url):
self.load_weights()
self.load_last_training_history()
file_name = "image.jpg"
urllib.request.urlretrieve(file_url, file_name)
img = tf.keras.preprocessing.image.load_img(file_name, target_size=(150, 150),
color_mode='grayscale')
x = tf.keras.preprocessing.image.img_to_array(img, )
x = x.reshape((1,) + x.shape)
x = x / 255
prediction = self.model.predict(x)[0][0]
is_default_image = prediction < 0.5
print(prediction)
os.remove(file_name)
return json.dumps(True) if is_default_image else json.dumps(False)
def evaluate_on_directory(self, directory):
val_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale=1. / 255)
val_generator = val_datagen.flow_from_directory(
directory,
target_size=(150, 150),
batch_size=32,
color_mode='grayscale',
class_mode='binary'
)
return self.model.evaluate(val_generator)
def split_directory(directory, train_size=0.75, test_size=0.2, val_size=0.05):
assert train_size + test_size + val_size == 1
assert 0 <= train_size <= 1 and 0 <= test_size <= 1 and 0 <= val_size <= 1
subdirs = next(os.walk(directory))[1]
if train_size > 0:
os.mkdir(directory + '/train')
for subdir in subdirs:
os.mkdir(directory + '/train/' + subdir)
if test_size > 0:
os.mkdir(directory + '/test')
for subdir in subdirs:
os.mkdir(directory + '/test/' + subdir)
if val_size > 0:
os.mkdir(directory + '/val')
for subdir in subdirs:
os.mkdir(directory + '/val/' + subdir)
pathlist = Path(directory).rglob('*.*')
for path in pathlist:
instance_path = str(path)
instance_properties = instance_path.split('/') if '/' in instance_path else instance_path.split('\\')
instance_name = instance_properties[-1]
instance_class = instance_properties[-2]
r = random.random()
if r < val_size:
subfolder = '/val/'
elif r < test_size + val_size:
subfolder = '/test/'
else:
subfolder = '/train/'
os.rename(instance_path, '/'.join(instance_properties[:-2]) + subfolder + instance_class + '/' + instance_name)
if __name__ == '__main__':
cnn = CNN()
cnn.load_weights()
cnn.load_last_training_history()
cnn.print_model_info()
|
normal
|
{
"blob_id": "9535335c70129f997d7b8739444a503d0b984ac8",
"index": 9753,
"step-1": "<mask token>\n\n\nclass CNN(object):\n\n def __init__(self):\n self.model = tf.keras.Sequential([tf.keras.layers.Conv2D(32, (3, 3),\n activation='relu', input_shape=(150, 150, 1)), tf.keras.layers.\n MaxPool2D((2, 2)), tf.keras.layers.Conv2D(64, (3, 3),\n activation='relu'), tf.keras.layers.MaxPool2D(2, 2), tf.keras.\n layers.Conv2D(128, (3, 3), activation='relu'), tf.keras.layers.\n MaxPool2D(2, 2), tf.keras.layers.Conv2D(128, (3, 3), activation\n ='relu'), tf.keras.layers.MaxPool2D(2, 2), tf.keras.layers.\n Flatten(), tf.keras.layers.Dropout(0.5), tf.keras.layers.Dense(\n 512, activation='relu'), tf.keras.layers.Dense(1, activation=\n 'sigmoid')])\n self.last_training_history = {}\n\n def print_model_info(self):\n print(self.model.summary())\n\n def get_model(self):\n return self.model\n\n def load_weights(self, filepath='model.h5'):\n self.model.load_weights(filepath)\n self.model.compile(optimizer='adam', loss='binary_crossentropy',\n metrics=['acc'])\n\n def load_last_training_history(self, filepath='result.pk'):\n with open(filepath, 'rb') as f:\n self.last_training_history = pickle.load(f)\n\n def get_last_training_history(self):\n return self.last_training_history\n\n def plot_last_training_history(self, save_plot=False):\n for key in self.last_training_history:\n y = self.last_training_history[key]\n plt.plot([(i + 1) for i in range(len(y))], y, label=key)\n plt.legend()\n plt.grid()\n plt.xlabel('epoch')\n if save_plot:\n plt.savefig('training_history.png', dpi=300)\n else:\n plt.show()\n\n def train(self, directory, epochs=100, save_model=False, save_history=False\n ):\n train_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255, rotation_range=20, width_shift_range=0.15,\n height_shift_range=0.15, shear_range=0.15, zoom_range=0.15,\n fill_mode='nearest', horizontal_flip=True, vertical_flip=False,\n brightness_range=None, channel_shift_range=0)\n test_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255)\n train_generator = train_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n test_generator = test_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n self.model.compile(optimizer='adam', loss='binary_crossentropy',\n metrics=['acc'])\n history = self.model.fit(train_generator, epochs=epochs,\n validation_data=test_generator)\n if save_model:\n self.model.save('model.h5')\n if save_history:\n with open('result.pk', 'wb') as f:\n pickle.dump(history.history, f)\n self.last_training_history = history.history\n return history.history\n\n def predict_directory(self, directory, probabilities=True):\n if directory[-1] != '\\\\' and directory[-1] != '/':\n directory += '/'\n predictions = {}\n onlyfiles = [f for f in os.listdir(directory) if os.path.isfile(os.\n path.join(directory, f))]\n for image_file in onlyfiles:\n img = tf.keras.preprocessing.image.load_img(directory +\n image_file, target_size=(150, 150), color_mode='grayscale')\n x = tf.keras.preprocessing.image.img_to_array(img)\n x = x.reshape((1,) + x.shape)\n x = x / 255\n y = self.model.predict(x)[0][0]\n if probabilities:\n predictions[image_file] = y\n else:\n predictions[image_file] = y > 0.5\n return predictions\n\n def predict_single_image(self, file_url):\n self.load_weights()\n self.load_last_training_history()\n file_name = 'image.jpg'\n urllib.request.urlretrieve(file_url, file_name)\n img = tf.keras.preprocessing.image.load_img(file_name, target_size=\n (150, 150), color_mode='grayscale')\n x = tf.keras.preprocessing.image.img_to_array(img)\n x = x.reshape((1,) + x.shape)\n x = x / 255\n prediction = self.model.predict(x)[0][0]\n is_default_image = prediction < 0.5\n print(prediction)\n os.remove(file_name)\n return json.dumps(True) if is_default_image else json.dumps(False)\n\n\n<mask token>\n\n\ndef split_directory(directory, train_size=0.75, test_size=0.2, val_size=0.05):\n assert train_size + test_size + val_size == 1\n assert 0 <= train_size <= 1 and 0 <= test_size <= 1 and 0 <= val_size <= 1\n subdirs = next(os.walk(directory))[1]\n if train_size > 0:\n os.mkdir(directory + '/train')\n for subdir in subdirs:\n os.mkdir(directory + '/train/' + subdir)\n if test_size > 0:\n os.mkdir(directory + '/test')\n for subdir in subdirs:\n os.mkdir(directory + '/test/' + subdir)\n if val_size > 0:\n os.mkdir(directory + '/val')\n for subdir in subdirs:\n os.mkdir(directory + '/val/' + subdir)\n pathlist = Path(directory).rglob('*.*')\n for path in pathlist:\n instance_path = str(path)\n instance_properties = instance_path.split('/'\n ) if '/' in instance_path else instance_path.split('\\\\')\n instance_name = instance_properties[-1]\n instance_class = instance_properties[-2]\n r = random.random()\n if r < val_size:\n subfolder = '/val/'\n elif r < test_size + val_size:\n subfolder = '/test/'\n else:\n subfolder = '/train/'\n os.rename(instance_path, '/'.join(instance_properties[:-2]) +\n subfolder + instance_class + '/' + instance_name)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass CNN(object):\n\n def __init__(self):\n self.model = tf.keras.Sequential([tf.keras.layers.Conv2D(32, (3, 3),\n activation='relu', input_shape=(150, 150, 1)), tf.keras.layers.\n MaxPool2D((2, 2)), tf.keras.layers.Conv2D(64, (3, 3),\n activation='relu'), tf.keras.layers.MaxPool2D(2, 2), tf.keras.\n layers.Conv2D(128, (3, 3), activation='relu'), tf.keras.layers.\n MaxPool2D(2, 2), tf.keras.layers.Conv2D(128, (3, 3), activation\n ='relu'), tf.keras.layers.MaxPool2D(2, 2), tf.keras.layers.\n Flatten(), tf.keras.layers.Dropout(0.5), tf.keras.layers.Dense(\n 512, activation='relu'), tf.keras.layers.Dense(1, activation=\n 'sigmoid')])\n self.last_training_history = {}\n\n def print_model_info(self):\n print(self.model.summary())\n\n def get_model(self):\n return self.model\n\n def load_weights(self, filepath='model.h5'):\n self.model.load_weights(filepath)\n self.model.compile(optimizer='adam', loss='binary_crossentropy',\n metrics=['acc'])\n\n def load_last_training_history(self, filepath='result.pk'):\n with open(filepath, 'rb') as f:\n self.last_training_history = pickle.load(f)\n\n def get_last_training_history(self):\n return self.last_training_history\n\n def plot_last_training_history(self, save_plot=False):\n for key in self.last_training_history:\n y = self.last_training_history[key]\n plt.plot([(i + 1) for i in range(len(y))], y, label=key)\n plt.legend()\n plt.grid()\n plt.xlabel('epoch')\n if save_plot:\n plt.savefig('training_history.png', dpi=300)\n else:\n plt.show()\n\n def train(self, directory, epochs=100, save_model=False, save_history=False\n ):\n train_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255, rotation_range=20, width_shift_range=0.15,\n height_shift_range=0.15, shear_range=0.15, zoom_range=0.15,\n fill_mode='nearest', horizontal_flip=True, vertical_flip=False,\n brightness_range=None, channel_shift_range=0)\n test_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255)\n train_generator = train_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n test_generator = test_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n self.model.compile(optimizer='adam', loss='binary_crossentropy',\n metrics=['acc'])\n history = self.model.fit(train_generator, epochs=epochs,\n validation_data=test_generator)\n if save_model:\n self.model.save('model.h5')\n if save_history:\n with open('result.pk', 'wb') as f:\n pickle.dump(history.history, f)\n self.last_training_history = history.history\n return history.history\n\n def predict_directory(self, directory, probabilities=True):\n if directory[-1] != '\\\\' and directory[-1] != '/':\n directory += '/'\n predictions = {}\n onlyfiles = [f for f in os.listdir(directory) if os.path.isfile(os.\n path.join(directory, f))]\n for image_file in onlyfiles:\n img = tf.keras.preprocessing.image.load_img(directory +\n image_file, target_size=(150, 150), color_mode='grayscale')\n x = tf.keras.preprocessing.image.img_to_array(img)\n x = x.reshape((1,) + x.shape)\n x = x / 255\n y = self.model.predict(x)[0][0]\n if probabilities:\n predictions[image_file] = y\n else:\n predictions[image_file] = y > 0.5\n return predictions\n\n def predict_single_image(self, file_url):\n self.load_weights()\n self.load_last_training_history()\n file_name = 'image.jpg'\n urllib.request.urlretrieve(file_url, file_name)\n img = tf.keras.preprocessing.image.load_img(file_name, target_size=\n (150, 150), color_mode='grayscale')\n x = tf.keras.preprocessing.image.img_to_array(img)\n x = x.reshape((1,) + x.shape)\n x = x / 255\n prediction = self.model.predict(x)[0][0]\n is_default_image = prediction < 0.5\n print(prediction)\n os.remove(file_name)\n return json.dumps(True) if is_default_image else json.dumps(False)\n\n\ndef evaluate_on_directory(self, directory):\n val_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale=\n 1.0 / 255)\n val_generator = val_datagen.flow_from_directory(directory, target_size=\n (150, 150), batch_size=32, color_mode='grayscale', class_mode='binary')\n return self.model.evaluate(val_generator)\n\n\ndef split_directory(directory, train_size=0.75, test_size=0.2, val_size=0.05):\n assert train_size + test_size + val_size == 1\n assert 0 <= train_size <= 1 and 0 <= test_size <= 1 and 0 <= val_size <= 1\n subdirs = next(os.walk(directory))[1]\n if train_size > 0:\n os.mkdir(directory + '/train')\n for subdir in subdirs:\n os.mkdir(directory + '/train/' + subdir)\n if test_size > 0:\n os.mkdir(directory + '/test')\n for subdir in subdirs:\n os.mkdir(directory + '/test/' + subdir)\n if val_size > 0:\n os.mkdir(directory + '/val')\n for subdir in subdirs:\n os.mkdir(directory + '/val/' + subdir)\n pathlist = Path(directory).rglob('*.*')\n for path in pathlist:\n instance_path = str(path)\n instance_properties = instance_path.split('/'\n ) if '/' in instance_path else instance_path.split('\\\\')\n instance_name = instance_properties[-1]\n instance_class = instance_properties[-2]\n r = random.random()\n if r < val_size:\n subfolder = '/val/'\n elif r < test_size + val_size:\n subfolder = '/test/'\n else:\n subfolder = '/train/'\n os.rename(instance_path, '/'.join(instance_properties[:-2]) +\n subfolder + instance_class + '/' + instance_name)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass CNN(object):\n\n def __init__(self):\n self.model = tf.keras.Sequential([tf.keras.layers.Conv2D(32, (3, 3),\n activation='relu', input_shape=(150, 150, 1)), tf.keras.layers.\n MaxPool2D((2, 2)), tf.keras.layers.Conv2D(64, (3, 3),\n activation='relu'), tf.keras.layers.MaxPool2D(2, 2), tf.keras.\n layers.Conv2D(128, (3, 3), activation='relu'), tf.keras.layers.\n MaxPool2D(2, 2), tf.keras.layers.Conv2D(128, (3, 3), activation\n ='relu'), tf.keras.layers.MaxPool2D(2, 2), tf.keras.layers.\n Flatten(), tf.keras.layers.Dropout(0.5), tf.keras.layers.Dense(\n 512, activation='relu'), tf.keras.layers.Dense(1, activation=\n 'sigmoid')])\n self.last_training_history = {}\n\n def print_model_info(self):\n print(self.model.summary())\n\n def get_model(self):\n return self.model\n\n def load_weights(self, filepath='model.h5'):\n self.model.load_weights(filepath)\n self.model.compile(optimizer='adam', loss='binary_crossentropy',\n metrics=['acc'])\n\n def load_last_training_history(self, filepath='result.pk'):\n with open(filepath, 'rb') as f:\n self.last_training_history = pickle.load(f)\n\n def get_last_training_history(self):\n return self.last_training_history\n\n def plot_last_training_history(self, save_plot=False):\n for key in self.last_training_history:\n y = self.last_training_history[key]\n plt.plot([(i + 1) for i in range(len(y))], y, label=key)\n plt.legend()\n plt.grid()\n plt.xlabel('epoch')\n if save_plot:\n plt.savefig('training_history.png', dpi=300)\n else:\n plt.show()\n\n def train(self, directory, epochs=100, save_model=False, save_history=False\n ):\n train_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255, rotation_range=20, width_shift_range=0.15,\n height_shift_range=0.15, shear_range=0.15, zoom_range=0.15,\n fill_mode='nearest', horizontal_flip=True, vertical_flip=False,\n brightness_range=None, channel_shift_range=0)\n test_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255)\n train_generator = train_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n test_generator = test_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n self.model.compile(optimizer='adam', loss='binary_crossentropy',\n metrics=['acc'])\n history = self.model.fit(train_generator, epochs=epochs,\n validation_data=test_generator)\n if save_model:\n self.model.save('model.h5')\n if save_history:\n with open('result.pk', 'wb') as f:\n pickle.dump(history.history, f)\n self.last_training_history = history.history\n return history.history\n\n def predict_directory(self, directory, probabilities=True):\n if directory[-1] != '\\\\' and directory[-1] != '/':\n directory += '/'\n predictions = {}\n onlyfiles = [f for f in os.listdir(directory) if os.path.isfile(os.\n path.join(directory, f))]\n for image_file in onlyfiles:\n img = tf.keras.preprocessing.image.load_img(directory +\n image_file, target_size=(150, 150), color_mode='grayscale')\n x = tf.keras.preprocessing.image.img_to_array(img)\n x = x.reshape((1,) + x.shape)\n x = x / 255\n y = self.model.predict(x)[0][0]\n if probabilities:\n predictions[image_file] = y\n else:\n predictions[image_file] = y > 0.5\n return predictions\n\n def predict_single_image(self, file_url):\n self.load_weights()\n self.load_last_training_history()\n file_name = 'image.jpg'\n urllib.request.urlretrieve(file_url, file_name)\n img = tf.keras.preprocessing.image.load_img(file_name, target_size=\n (150, 150), color_mode='grayscale')\n x = tf.keras.preprocessing.image.img_to_array(img)\n x = x.reshape((1,) + x.shape)\n x = x / 255\n prediction = self.model.predict(x)[0][0]\n is_default_image = prediction < 0.5\n print(prediction)\n os.remove(file_name)\n return json.dumps(True) if is_default_image else json.dumps(False)\n\n\ndef evaluate_on_directory(self, directory):\n val_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale=\n 1.0 / 255)\n val_generator = val_datagen.flow_from_directory(directory, target_size=\n (150, 150), batch_size=32, color_mode='grayscale', class_mode='binary')\n return self.model.evaluate(val_generator)\n\n\ndef split_directory(directory, train_size=0.75, test_size=0.2, val_size=0.05):\n assert train_size + test_size + val_size == 1\n assert 0 <= train_size <= 1 and 0 <= test_size <= 1 and 0 <= val_size <= 1\n subdirs = next(os.walk(directory))[1]\n if train_size > 0:\n os.mkdir(directory + '/train')\n for subdir in subdirs:\n os.mkdir(directory + '/train/' + subdir)\n if test_size > 0:\n os.mkdir(directory + '/test')\n for subdir in subdirs:\n os.mkdir(directory + '/test/' + subdir)\n if val_size > 0:\n os.mkdir(directory + '/val')\n for subdir in subdirs:\n os.mkdir(directory + '/val/' + subdir)\n pathlist = Path(directory).rglob('*.*')\n for path in pathlist:\n instance_path = str(path)\n instance_properties = instance_path.split('/'\n ) if '/' in instance_path else instance_path.split('\\\\')\n instance_name = instance_properties[-1]\n instance_class = instance_properties[-2]\n r = random.random()\n if r < val_size:\n subfolder = '/val/'\n elif r < test_size + val_size:\n subfolder = '/test/'\n else:\n subfolder = '/train/'\n os.rename(instance_path, '/'.join(instance_properties[:-2]) +\n subfolder + instance_class + '/' + instance_name)\n\n\nif __name__ == '__main__':\n cnn = CNN()\n cnn.load_weights()\n cnn.load_last_training_history()\n cnn.print_model_info()\n",
"step-4": "import json\nimport os\nimport pickle\nimport random\nimport urllib.request\nfrom pathlib import Path\nimport tensorflow as tf\nfrom matplotlib import pyplot as plt\n\n\nclass CNN(object):\n\n def __init__(self):\n self.model = tf.keras.Sequential([tf.keras.layers.Conv2D(32, (3, 3),\n activation='relu', input_shape=(150, 150, 1)), tf.keras.layers.\n MaxPool2D((2, 2)), tf.keras.layers.Conv2D(64, (3, 3),\n activation='relu'), tf.keras.layers.MaxPool2D(2, 2), tf.keras.\n layers.Conv2D(128, (3, 3), activation='relu'), tf.keras.layers.\n MaxPool2D(2, 2), tf.keras.layers.Conv2D(128, (3, 3), activation\n ='relu'), tf.keras.layers.MaxPool2D(2, 2), tf.keras.layers.\n Flatten(), tf.keras.layers.Dropout(0.5), tf.keras.layers.Dense(\n 512, activation='relu'), tf.keras.layers.Dense(1, activation=\n 'sigmoid')])\n self.last_training_history = {}\n\n def print_model_info(self):\n print(self.model.summary())\n\n def get_model(self):\n return self.model\n\n def load_weights(self, filepath='model.h5'):\n self.model.load_weights(filepath)\n self.model.compile(optimizer='adam', loss='binary_crossentropy',\n metrics=['acc'])\n\n def load_last_training_history(self, filepath='result.pk'):\n with open(filepath, 'rb') as f:\n self.last_training_history = pickle.load(f)\n\n def get_last_training_history(self):\n return self.last_training_history\n\n def plot_last_training_history(self, save_plot=False):\n for key in self.last_training_history:\n y = self.last_training_history[key]\n plt.plot([(i + 1) for i in range(len(y))], y, label=key)\n plt.legend()\n plt.grid()\n plt.xlabel('epoch')\n if save_plot:\n plt.savefig('training_history.png', dpi=300)\n else:\n plt.show()\n\n def train(self, directory, epochs=100, save_model=False, save_history=False\n ):\n train_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255, rotation_range=20, width_shift_range=0.15,\n height_shift_range=0.15, shear_range=0.15, zoom_range=0.15,\n fill_mode='nearest', horizontal_flip=True, vertical_flip=False,\n brightness_range=None, channel_shift_range=0)\n test_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255)\n train_generator = train_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n test_generator = test_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n self.model.compile(optimizer='adam', loss='binary_crossentropy',\n metrics=['acc'])\n history = self.model.fit(train_generator, epochs=epochs,\n validation_data=test_generator)\n if save_model:\n self.model.save('model.h5')\n if save_history:\n with open('result.pk', 'wb') as f:\n pickle.dump(history.history, f)\n self.last_training_history = history.history\n return history.history\n\n def predict_directory(self, directory, probabilities=True):\n if directory[-1] != '\\\\' and directory[-1] != '/':\n directory += '/'\n predictions = {}\n onlyfiles = [f for f in os.listdir(directory) if os.path.isfile(os.\n path.join(directory, f))]\n for image_file in onlyfiles:\n img = tf.keras.preprocessing.image.load_img(directory +\n image_file, target_size=(150, 150), color_mode='grayscale')\n x = tf.keras.preprocessing.image.img_to_array(img)\n x = x.reshape((1,) + x.shape)\n x = x / 255\n y = self.model.predict(x)[0][0]\n if probabilities:\n predictions[image_file] = y\n else:\n predictions[image_file] = y > 0.5\n return predictions\n\n def predict_single_image(self, file_url):\n self.load_weights()\n self.load_last_training_history()\n file_name = 'image.jpg'\n urllib.request.urlretrieve(file_url, file_name)\n img = tf.keras.preprocessing.image.load_img(file_name, target_size=\n (150, 150), color_mode='grayscale')\n x = tf.keras.preprocessing.image.img_to_array(img)\n x = x.reshape((1,) + x.shape)\n x = x / 255\n prediction = self.model.predict(x)[0][0]\n is_default_image = prediction < 0.5\n print(prediction)\n os.remove(file_name)\n return json.dumps(True) if is_default_image else json.dumps(False)\n\n\ndef evaluate_on_directory(self, directory):\n val_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale=\n 1.0 / 255)\n val_generator = val_datagen.flow_from_directory(directory, target_size=\n (150, 150), batch_size=32, color_mode='grayscale', class_mode='binary')\n return self.model.evaluate(val_generator)\n\n\ndef split_directory(directory, train_size=0.75, test_size=0.2, val_size=0.05):\n assert train_size + test_size + val_size == 1\n assert 0 <= train_size <= 1 and 0 <= test_size <= 1 and 0 <= val_size <= 1\n subdirs = next(os.walk(directory))[1]\n if train_size > 0:\n os.mkdir(directory + '/train')\n for subdir in subdirs:\n os.mkdir(directory + '/train/' + subdir)\n if test_size > 0:\n os.mkdir(directory + '/test')\n for subdir in subdirs:\n os.mkdir(directory + '/test/' + subdir)\n if val_size > 0:\n os.mkdir(directory + '/val')\n for subdir in subdirs:\n os.mkdir(directory + '/val/' + subdir)\n pathlist = Path(directory).rglob('*.*')\n for path in pathlist:\n instance_path = str(path)\n instance_properties = instance_path.split('/'\n ) if '/' in instance_path else instance_path.split('\\\\')\n instance_name = instance_properties[-1]\n instance_class = instance_properties[-2]\n r = random.random()\n if r < val_size:\n subfolder = '/val/'\n elif r < test_size + val_size:\n subfolder = '/test/'\n else:\n subfolder = '/train/'\n os.rename(instance_path, '/'.join(instance_properties[:-2]) +\n subfolder + instance_class + '/' + instance_name)\n\n\nif __name__ == '__main__':\n cnn = CNN()\n cnn.load_weights()\n cnn.load_last_training_history()\n cnn.print_model_info()\n",
"step-5": "import json\nimport os\nimport pickle\nimport random\nimport urllib.request\nfrom pathlib import Path\n\nimport tensorflow as tf\nfrom matplotlib import pyplot as plt\n\n\nclass CNN(object):\n\n def __init__(self):\n self.model = tf.keras.Sequential([\n tf.keras.layers.Conv2D(32, (3, 3), activation='relu', input_shape=(150, 150, 1)),\n tf.keras.layers.MaxPool2D((2, 2)),\n tf.keras.layers.Conv2D(64, (3, 3), activation='relu'),\n tf.keras.layers.MaxPool2D(2, 2),\n tf.keras.layers.Conv2D(128, (3, 3), activation='relu'),\n tf.keras.layers.MaxPool2D(2, 2),\n tf.keras.layers.Conv2D(128, (3, 3), activation='relu'),\n tf.keras.layers.MaxPool2D(2, 2),\n tf.keras.layers.Flatten(),\n tf.keras.layers.Dropout(0.5),\n tf.keras.layers.Dense(512, activation='relu'),\n tf.keras.layers.Dense(1, activation='sigmoid')\n ])\n self.last_training_history = {}\n\n def print_model_info(self):\n print(self.model.summary())\n\n def get_model(self):\n return self.model\n\n def load_weights(self, filepath='model.h5'):\n self.model.load_weights(filepath)\n self.model.compile(\n optimizer='adam',\n loss='binary_crossentropy',\n metrics=['acc']\n )\n\n def load_last_training_history(self, filepath='result.pk'):\n with open(filepath, 'rb') as f:\n self.last_training_history = pickle.load(f)\n\n def get_last_training_history(self):\n return self.last_training_history\n\n def plot_last_training_history(self, save_plot=False):\n for key in self.last_training_history:\n y = self.last_training_history[key]\n plt.plot([i + 1 for i in range(len(y))], y, label=key)\n plt.legend()\n plt.grid()\n plt.xlabel('epoch')\n if save_plot:\n plt.savefig('training_history.png', dpi=300)\n else:\n plt.show()\n\n def train(self, directory, epochs=100, save_model=False, save_history=False):\n train_datagen = tf.keras.preprocessing.image.ImageDataGenerator(\n rescale=1. / 255,\n rotation_range=20,\n width_shift_range=0.15,\n height_shift_range=0.15,\n shear_range=0.15,\n zoom_range=0.15,\n fill_mode='nearest',\n horizontal_flip=True,\n vertical_flip=False,\n brightness_range=None,\n channel_shift_range=0\n )\n\n test_datagen = tf.keras.preprocessing.image.ImageDataGenerator(\n rescale=1. / 255) \n\n train_generator = train_datagen.flow_from_directory(\n directory,\n target_size=(150, 150),\n batch_size=32,\n color_mode='grayscale',\n class_mode='binary'\n )\n\n test_generator = test_datagen.flow_from_directory(\n directory,\n target_size=(150, 150),\n batch_size=32,\n color_mode='grayscale',\n class_mode='binary'\n )\n\n self.model.compile(\n optimizer='adam',\n loss='binary_crossentropy',\n metrics=['acc']\n )\n\n history = self.model.fit(\n train_generator,\n epochs=epochs,\n validation_data=test_generator\n )\n\n if save_model:\n self.model.save('model.h5')\n\n if save_history:\n with open('result.pk', 'wb') as f:\n pickle.dump(history.history, f)\n\n self.last_training_history = history.history\n\n return history.history\n\n def predict_directory(self, directory, probabilities=True):\n if directory[-1] != '\\\\' and directory[-1] != '/':\n directory += '/'\n predictions = {}\n onlyfiles = [f for f in os.listdir(directory) if os.path.isfile(os.path.join(directory, f))]\n for image_file in onlyfiles:\n img = tf.keras.preprocessing.image.load_img(directory + image_file, target_size=(150, 150),\n color_mode='grayscale')\n x = tf.keras.preprocessing.image.img_to_array(img, )\n x = x.reshape((1,) + x.shape)\n x = x / 255\n y = self.model.predict(x)[0][0]\n if probabilities:\n predictions[image_file] = y\n else:\n predictions[image_file] = y > 0.5\n return predictions\n\n def predict_single_image(self, file_url):\n self.load_weights()\n self.load_last_training_history()\n file_name = \"image.jpg\"\n urllib.request.urlretrieve(file_url, file_name)\n img = tf.keras.preprocessing.image.load_img(file_name, target_size=(150, 150),\n color_mode='grayscale')\n x = tf.keras.preprocessing.image.img_to_array(img, )\n x = x.reshape((1,) + x.shape)\n x = x / 255\n prediction = self.model.predict(x)[0][0]\n is_default_image = prediction < 0.5\n print(prediction)\n os.remove(file_name)\n\n return json.dumps(True) if is_default_image else json.dumps(False)\n\n\ndef evaluate_on_directory(self, directory):\n val_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale=1. / 255)\n val_generator = val_datagen.flow_from_directory(\n directory,\n target_size=(150, 150),\n batch_size=32,\n color_mode='grayscale',\n class_mode='binary'\n )\n return self.model.evaluate(val_generator)\n\n\ndef split_directory(directory, train_size=0.75, test_size=0.2, val_size=0.05):\n assert train_size + test_size + val_size == 1\n assert 0 <= train_size <= 1 and 0 <= test_size <= 1 and 0 <= val_size <= 1\n subdirs = next(os.walk(directory))[1]\n if train_size > 0:\n os.mkdir(directory + '/train')\n for subdir in subdirs:\n os.mkdir(directory + '/train/' + subdir)\n if test_size > 0:\n os.mkdir(directory + '/test')\n for subdir in subdirs:\n os.mkdir(directory + '/test/' + subdir)\n if val_size > 0:\n os.mkdir(directory + '/val')\n for subdir in subdirs:\n os.mkdir(directory + '/val/' + subdir)\n pathlist = Path(directory).rglob('*.*')\n for path in pathlist:\n instance_path = str(path)\n instance_properties = instance_path.split('/') if '/' in instance_path else instance_path.split('\\\\')\n instance_name = instance_properties[-1]\n instance_class = instance_properties[-2]\n r = random.random()\n if r < val_size:\n subfolder = '/val/'\n elif r < test_size + val_size:\n subfolder = '/test/'\n else:\n subfolder = '/train/'\n os.rename(instance_path, '/'.join(instance_properties[:-2]) + subfolder + instance_class + '/' + instance_name)\n\n\nif __name__ == '__main__':\n\n cnn = CNN()\n cnn.load_weights()\n cnn.load_last_training_history()\n cnn.print_model_info()\n",
"step-ids": [
12,
13,
14,
15,
16
]
}
|
[
12,
13,
14,
15,
16
] |
from django.contrib import admin
from django.urls import path, include
from accounts import views
urlpatterns = [
path('google/login', views.google_login),
path('google/callback/', views.google_callback),
path('accounts/google/login/finish/', views.GoogleLogin.as_view(), name = 'google_login_todjango'),
]
|
normal
|
{
"blob_id": "68319663aad13b562e56b8ee25f25c7b548417df",
"index": 4739,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = [path('google/login', views.google_login), path(\n 'google/callback/', views.google_callback), path(\n 'accounts/google/login/finish/', views.GoogleLogin.as_view(), name=\n 'google_login_todjango')]\n",
"step-3": "from django.contrib import admin\nfrom django.urls import path, include\nfrom accounts import views\nurlpatterns = [path('google/login', views.google_login), path(\n 'google/callback/', views.google_callback), path(\n 'accounts/google/login/finish/', views.GoogleLogin.as_view(), name=\n 'google_login_todjango')]\n",
"step-4": "from django.contrib import admin\nfrom django.urls import path, include\n\nfrom accounts import views\n\nurlpatterns = [\n path('google/login', views.google_login),\n path('google/callback/', views.google_callback),\n path('accounts/google/login/finish/', views.GoogleLogin.as_view(), name = 'google_login_todjango'),\n]\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class Entity(Agent):
<|reserved_special_token_0|>
def __init__(self, unique_id, model):
super().__init__(unique_id, model)
self.type = ''
self.position = ''
self.log = []
self.move_probability = None
self.retire_probability = None
self._next_state = None
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def retire(self, other):
"""
swap with an agent and mark yourself as retired
:param other: an Entity-class object
"""
self.swap(other)
self.model.schedule.add(other)
self.model.schedule.remove(self)
self.model.retirees[self.type][self.model.schedule.steps] = self
self.model.per_step_movement[self.type] += 1
def swap(self, other):
"""
swap positions with an entity
:param other: an Entity-class object
"""
new_position = other.position
other.position = self.position
other.log.append(other.position)
your_old_level = int(self.position[0])
self.model.positions[your_old_level][self.position].dual = [other.
unique_id, other.type]
self.position = new_position
self.log.append(self.position)
if self.position != '':
your_new_level = int(self.position[0])
self.model.positions[your_new_level][self.position].dual = [self
.unique_id, self.type]
self.model.per_step_movement[self.type] += 1
def unmoving_update_log(self):
"""update own log if not moving."""
self.log.append(self.log[-1])
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Entity(Agent):
<|reserved_special_token_0|>
def __init__(self, unique_id, model):
super().__init__(unique_id, model)
self.type = ''
self.position = ''
self.log = []
self.move_probability = None
self.retire_probability = None
self._next_state = None
<|reserved_special_token_0|>
def get_next_position(self, next_level):
"""
randomly pick a position in some level and return its ID and the ID of its current occupant.
:param next_level: int
"""
next_positions = list(self.model.positions[int(next_level)].values())
shuffle(next_positions)
for p in next_positions:
if p.dual[1] != self.type:
self.model.desired_positions.append(p.unique_id)
return p.unique_id, p.dual[0]
def retire(self, other):
"""
swap with an agent and mark yourself as retired
:param other: an Entity-class object
"""
self.swap(other)
self.model.schedule.add(other)
self.model.schedule.remove(self)
self.model.retirees[self.type][self.model.schedule.steps] = self
self.model.per_step_movement[self.type] += 1
def swap(self, other):
"""
swap positions with an entity
:param other: an Entity-class object
"""
new_position = other.position
other.position = self.position
other.log.append(other.position)
your_old_level = int(self.position[0])
self.model.positions[your_old_level][self.position].dual = [other.
unique_id, other.type]
self.position = new_position
self.log.append(self.position)
if self.position != '':
your_new_level = int(self.position[0])
self.model.positions[your_new_level][self.position].dual = [self
.unique_id, self.type]
self.model.per_step_movement[self.type] += 1
def unmoving_update_log(self):
"""update own log if not moving."""
self.log.append(self.log[-1])
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Entity(Agent):
<|reserved_special_token_0|>
def __init__(self, unique_id, model):
super().__init__(unique_id, model)
self.type = ''
self.position = ''
self.log = []
self.move_probability = None
self.retire_probability = None
self._next_state = None
def pick_move(self):
"""
given a vector of probabilities that sums to one, pick which level you'll go to
e.g. vector of probabilities = [0.3, 0.1, 0.3, 0.3]
:return: the draw, an int
"""
cum_sum = np.cumsum(self.move_probability)
cum_sum = np.insert(cum_sum, 0, 0)
rd = np.random.uniform(0.0, 1.0)
m = np.asarray(cum_sum < rd).nonzero()[0]
next_level = m[len(m) - 1]
return next_level
def get_next_position(self, next_level):
"""
randomly pick a position in some level and return its ID and the ID of its current occupant.
:param next_level: int
"""
next_positions = list(self.model.positions[int(next_level)].values())
shuffle(next_positions)
for p in next_positions:
if p.dual[1] != self.type:
self.model.desired_positions.append(p.unique_id)
return p.unique_id, p.dual[0]
def retire(self, other):
"""
swap with an agent and mark yourself as retired
:param other: an Entity-class object
"""
self.swap(other)
self.model.schedule.add(other)
self.model.schedule.remove(self)
self.model.retirees[self.type][self.model.schedule.steps] = self
self.model.per_step_movement[self.type] += 1
def swap(self, other):
"""
swap positions with an entity
:param other: an Entity-class object
"""
new_position = other.position
other.position = self.position
other.log.append(other.position)
your_old_level = int(self.position[0])
self.model.positions[your_old_level][self.position].dual = [other.
unique_id, other.type]
self.position = new_position
self.log.append(self.position)
if self.position != '':
your_new_level = int(self.position[0])
self.model.positions[your_new_level][self.position].dual = [self
.unique_id, self.type]
self.model.per_step_movement[self.type] += 1
def unmoving_update_log(self):
"""update own log if not moving."""
self.log.append(self.log[-1])
<|reserved_special_token_1|>
<|reserved_special_token_0|>
from mesa import Agent
from random import shuffle
import numpy as np
class Entity(Agent):
"""
superclass for vacancy and actor agents
not intended to be used on its own, but to inherit its methods to multiple other agents
"""
def __init__(self, unique_id, model):
super().__init__(unique_id, model)
self.type = ''
self.position = ''
self.log = []
self.move_probability = None
self.retire_probability = None
self._next_state = None
def pick_move(self):
"""
given a vector of probabilities that sums to one, pick which level you'll go to
e.g. vector of probabilities = [0.3, 0.1, 0.3, 0.3]
:return: the draw, an int
"""
cum_sum = np.cumsum(self.move_probability)
cum_sum = np.insert(cum_sum, 0, 0)
rd = np.random.uniform(0.0, 1.0)
m = np.asarray(cum_sum < rd).nonzero()[0]
next_level = m[len(m) - 1]
return next_level
def get_next_position(self, next_level):
"""
randomly pick a position in some level and return its ID and the ID of its current occupant.
:param next_level: int
"""
next_positions = list(self.model.positions[int(next_level)].values())
shuffle(next_positions)
for p in next_positions:
if p.dual[1] != self.type:
self.model.desired_positions.append(p.unique_id)
return p.unique_id, p.dual[0]
def retire(self, other):
"""
swap with an agent and mark yourself as retired
:param other: an Entity-class object
"""
self.swap(other)
self.model.schedule.add(other)
self.model.schedule.remove(self)
self.model.retirees[self.type][self.model.schedule.steps] = self
self.model.per_step_movement[self.type] += 1
def swap(self, other):
"""
swap positions with an entity
:param other: an Entity-class object
"""
new_position = other.position
other.position = self.position
other.log.append(other.position)
your_old_level = int(self.position[0])
self.model.positions[your_old_level][self.position].dual = [other.
unique_id, other.type]
self.position = new_position
self.log.append(self.position)
if self.position != '':
your_new_level = int(self.position[0])
self.model.positions[your_new_level][self.position].dual = [self
.unique_id, self.type]
self.model.per_step_movement[self.type] += 1
def unmoving_update_log(self):
"""update own log if not moving."""
self.log.append(self.log[-1])
<|reserved_special_token_1|>
"""
generalised behaviour for actors and vacancies
"""
from mesa import Agent
from random import shuffle
import numpy as np
class Entity(Agent):
"""
superclass for vacancy and actor agents
not intended to be used on its own, but to inherit its methods to multiple other agents
"""
def __init__(self, unique_id, model):
super().__init__(unique_id, model)
self.type = '' # type of entity: vacancy, or actor
self.position = '' # ID of current position
self.log = [] # log of moves
self.move_probability = None # for in-system moves; float [0,1]
self.retire_probability = None # for leaving the system; float [0,1]
self._next_state = None
def pick_move(self):
"""
given a vector of probabilities that sums to one, pick which level you'll go to
e.g. vector of probabilities = [0.3, 0.1, 0.3, 0.3]
:return: the draw, an int
"""
cum_sum = np.cumsum(self.move_probability)
cum_sum = np.insert(cum_sum, 0, 0)
# throw random dart
rd = np.random.uniform(0.0, 1.0)
# see where dart hit
m = np.asarray(cum_sum < rd).nonzero()[0]
next_level = m[len(m) - 1]
return next_level
def get_next_position(self, next_level):
"""
randomly pick a position in some level and return its ID and the ID of its current occupant.
:param next_level: int
"""
next_positions = list(self.model.positions[int(next_level)].values())
shuffle(next_positions)
for p in next_positions:
if p.dual[1] != self.type: # vacancies only pick positions occupied by actors, and vice versa
self.model.desired_positions.append(p.unique_id) # mark position as desired
return p.unique_id, p.dual[0] # return positions ID and ID of current dual/occupant
def retire(self, other):
"""
swap with an agent and mark yourself as retired
:param other: an Entity-class object
"""
self.swap(other)
self.model.schedule.add(other) # put new entity into scheduler
self.model.schedule.remove(self) # take yourself out of it
self.model.retirees[self.type][self.model.schedule.steps] = self # mark yourself as retiree
self.model.per_step_movement[self.type] += 1
def swap(self, other):
"""
swap positions with an entity
:param other: an Entity-class object
"""
new_position = other.position # mark where you're going
other.position = self.position # put swapee in your position
other.log.append(other.position) # update swapee's log
# update your old position's dual
your_old_level = int(self.position[0])
self.model.positions[your_old_level][self.position].dual = [other.unique_id, other.type]
self.position = new_position # take your new position
self.log.append(self.position) # update your log
# if you have a new position, update its dual
if self.position != '':
your_new_level = int(self.position[0])
self.model.positions[your_new_level][self.position].dual = [self.unique_id, self.type]
# increment movement counters
self.model.per_step_movement[self.type] += 1
def unmoving_update_log(self):
"""update own log if not moving."""
self.log.append(self.log[-1])
|
flexible
|
{
"blob_id": "68b967ecf18d576758cf05e889919944cfc34dcd",
"index": 250,
"step-1": "<mask token>\n\n\nclass Entity(Agent):\n <mask token>\n\n def __init__(self, unique_id, model):\n super().__init__(unique_id, model)\n self.type = ''\n self.position = ''\n self.log = []\n self.move_probability = None\n self.retire_probability = None\n self._next_state = None\n <mask token>\n <mask token>\n\n def retire(self, other):\n \"\"\"\n swap with an agent and mark yourself as retired\n :param other: an Entity-class object\n \"\"\"\n self.swap(other)\n self.model.schedule.add(other)\n self.model.schedule.remove(self)\n self.model.retirees[self.type][self.model.schedule.steps] = self\n self.model.per_step_movement[self.type] += 1\n\n def swap(self, other):\n \"\"\"\n swap positions with an entity\n :param other: an Entity-class object\n \"\"\"\n new_position = other.position\n other.position = self.position\n other.log.append(other.position)\n your_old_level = int(self.position[0])\n self.model.positions[your_old_level][self.position].dual = [other.\n unique_id, other.type]\n self.position = new_position\n self.log.append(self.position)\n if self.position != '':\n your_new_level = int(self.position[0])\n self.model.positions[your_new_level][self.position].dual = [self\n .unique_id, self.type]\n self.model.per_step_movement[self.type] += 1\n\n def unmoving_update_log(self):\n \"\"\"update own log if not moving.\"\"\"\n self.log.append(self.log[-1])\n",
"step-2": "<mask token>\n\n\nclass Entity(Agent):\n <mask token>\n\n def __init__(self, unique_id, model):\n super().__init__(unique_id, model)\n self.type = ''\n self.position = ''\n self.log = []\n self.move_probability = None\n self.retire_probability = None\n self._next_state = None\n <mask token>\n\n def get_next_position(self, next_level):\n \"\"\"\n randomly pick a position in some level and return its ID and the ID of its current occupant.\n :param next_level: int\n \"\"\"\n next_positions = list(self.model.positions[int(next_level)].values())\n shuffle(next_positions)\n for p in next_positions:\n if p.dual[1] != self.type:\n self.model.desired_positions.append(p.unique_id)\n return p.unique_id, p.dual[0]\n\n def retire(self, other):\n \"\"\"\n swap with an agent and mark yourself as retired\n :param other: an Entity-class object\n \"\"\"\n self.swap(other)\n self.model.schedule.add(other)\n self.model.schedule.remove(self)\n self.model.retirees[self.type][self.model.schedule.steps] = self\n self.model.per_step_movement[self.type] += 1\n\n def swap(self, other):\n \"\"\"\n swap positions with an entity\n :param other: an Entity-class object\n \"\"\"\n new_position = other.position\n other.position = self.position\n other.log.append(other.position)\n your_old_level = int(self.position[0])\n self.model.positions[your_old_level][self.position].dual = [other.\n unique_id, other.type]\n self.position = new_position\n self.log.append(self.position)\n if self.position != '':\n your_new_level = int(self.position[0])\n self.model.positions[your_new_level][self.position].dual = [self\n .unique_id, self.type]\n self.model.per_step_movement[self.type] += 1\n\n def unmoving_update_log(self):\n \"\"\"update own log if not moving.\"\"\"\n self.log.append(self.log[-1])\n",
"step-3": "<mask token>\n\n\nclass Entity(Agent):\n <mask token>\n\n def __init__(self, unique_id, model):\n super().__init__(unique_id, model)\n self.type = ''\n self.position = ''\n self.log = []\n self.move_probability = None\n self.retire_probability = None\n self._next_state = None\n\n def pick_move(self):\n \"\"\"\n given a vector of probabilities that sums to one, pick which level you'll go to\n e.g. vector of probabilities = [0.3, 0.1, 0.3, 0.3]\n :return: the draw, an int\n \"\"\"\n cum_sum = np.cumsum(self.move_probability)\n cum_sum = np.insert(cum_sum, 0, 0)\n rd = np.random.uniform(0.0, 1.0)\n m = np.asarray(cum_sum < rd).nonzero()[0]\n next_level = m[len(m) - 1]\n return next_level\n\n def get_next_position(self, next_level):\n \"\"\"\n randomly pick a position in some level and return its ID and the ID of its current occupant.\n :param next_level: int\n \"\"\"\n next_positions = list(self.model.positions[int(next_level)].values())\n shuffle(next_positions)\n for p in next_positions:\n if p.dual[1] != self.type:\n self.model.desired_positions.append(p.unique_id)\n return p.unique_id, p.dual[0]\n\n def retire(self, other):\n \"\"\"\n swap with an agent and mark yourself as retired\n :param other: an Entity-class object\n \"\"\"\n self.swap(other)\n self.model.schedule.add(other)\n self.model.schedule.remove(self)\n self.model.retirees[self.type][self.model.schedule.steps] = self\n self.model.per_step_movement[self.type] += 1\n\n def swap(self, other):\n \"\"\"\n swap positions with an entity\n :param other: an Entity-class object\n \"\"\"\n new_position = other.position\n other.position = self.position\n other.log.append(other.position)\n your_old_level = int(self.position[0])\n self.model.positions[your_old_level][self.position].dual = [other.\n unique_id, other.type]\n self.position = new_position\n self.log.append(self.position)\n if self.position != '':\n your_new_level = int(self.position[0])\n self.model.positions[your_new_level][self.position].dual = [self\n .unique_id, self.type]\n self.model.per_step_movement[self.type] += 1\n\n def unmoving_update_log(self):\n \"\"\"update own log if not moving.\"\"\"\n self.log.append(self.log[-1])\n",
"step-4": "<mask token>\nfrom mesa import Agent\nfrom random import shuffle\nimport numpy as np\n\n\nclass Entity(Agent):\n \"\"\"\n superclass for vacancy and actor agents\n not intended to be used on its own, but to inherit its methods to multiple other agents\n \"\"\"\n\n def __init__(self, unique_id, model):\n super().__init__(unique_id, model)\n self.type = ''\n self.position = ''\n self.log = []\n self.move_probability = None\n self.retire_probability = None\n self._next_state = None\n\n def pick_move(self):\n \"\"\"\n given a vector of probabilities that sums to one, pick which level you'll go to\n e.g. vector of probabilities = [0.3, 0.1, 0.3, 0.3]\n :return: the draw, an int\n \"\"\"\n cum_sum = np.cumsum(self.move_probability)\n cum_sum = np.insert(cum_sum, 0, 0)\n rd = np.random.uniform(0.0, 1.0)\n m = np.asarray(cum_sum < rd).nonzero()[0]\n next_level = m[len(m) - 1]\n return next_level\n\n def get_next_position(self, next_level):\n \"\"\"\n randomly pick a position in some level and return its ID and the ID of its current occupant.\n :param next_level: int\n \"\"\"\n next_positions = list(self.model.positions[int(next_level)].values())\n shuffle(next_positions)\n for p in next_positions:\n if p.dual[1] != self.type:\n self.model.desired_positions.append(p.unique_id)\n return p.unique_id, p.dual[0]\n\n def retire(self, other):\n \"\"\"\n swap with an agent and mark yourself as retired\n :param other: an Entity-class object\n \"\"\"\n self.swap(other)\n self.model.schedule.add(other)\n self.model.schedule.remove(self)\n self.model.retirees[self.type][self.model.schedule.steps] = self\n self.model.per_step_movement[self.type] += 1\n\n def swap(self, other):\n \"\"\"\n swap positions with an entity\n :param other: an Entity-class object\n \"\"\"\n new_position = other.position\n other.position = self.position\n other.log.append(other.position)\n your_old_level = int(self.position[0])\n self.model.positions[your_old_level][self.position].dual = [other.\n unique_id, other.type]\n self.position = new_position\n self.log.append(self.position)\n if self.position != '':\n your_new_level = int(self.position[0])\n self.model.positions[your_new_level][self.position].dual = [self\n .unique_id, self.type]\n self.model.per_step_movement[self.type] += 1\n\n def unmoving_update_log(self):\n \"\"\"update own log if not moving.\"\"\"\n self.log.append(self.log[-1])\n",
"step-5": "\"\"\"\ngeneralised behaviour for actors and vacancies\n\"\"\"\n\nfrom mesa import Agent\nfrom random import shuffle\nimport numpy as np\n\n\nclass Entity(Agent):\n \"\"\"\n superclass for vacancy and actor agents\n not intended to be used on its own, but to inherit its methods to multiple other agents\n \"\"\"\n def __init__(self, unique_id, model):\n super().__init__(unique_id, model)\n self.type = '' # type of entity: vacancy, or actor\n self.position = '' # ID of current position\n self.log = [] # log of moves\n self.move_probability = None # for in-system moves; float [0,1]\n self.retire_probability = None # for leaving the system; float [0,1]\n self._next_state = None\n\n def pick_move(self):\n \"\"\"\n given a vector of probabilities that sums to one, pick which level you'll go to\n e.g. vector of probabilities = [0.3, 0.1, 0.3, 0.3]\n :return: the draw, an int\n \"\"\"\n cum_sum = np.cumsum(self.move_probability)\n cum_sum = np.insert(cum_sum, 0, 0)\n # throw random dart\n rd = np.random.uniform(0.0, 1.0)\n # see where dart hit\n m = np.asarray(cum_sum < rd).nonzero()[0]\n next_level = m[len(m) - 1]\n return next_level\n\n\n def get_next_position(self, next_level):\n \"\"\"\n randomly pick a position in some level and return its ID and the ID of its current occupant.\n :param next_level: int\n \"\"\"\n next_positions = list(self.model.positions[int(next_level)].values())\n shuffle(next_positions)\n for p in next_positions:\n if p.dual[1] != self.type: # vacancies only pick positions occupied by actors, and vice versa\n self.model.desired_positions.append(p.unique_id) # mark position as desired\n return p.unique_id, p.dual[0] # return positions ID and ID of current dual/occupant\n\n def retire(self, other):\n \"\"\"\n swap with an agent and mark yourself as retired\n :param other: an Entity-class object\n \"\"\"\n self.swap(other)\n self.model.schedule.add(other) # put new entity into scheduler\n self.model.schedule.remove(self) # take yourself out of it\n self.model.retirees[self.type][self.model.schedule.steps] = self # mark yourself as retiree\n self.model.per_step_movement[self.type] += 1\n\n def swap(self, other):\n \"\"\"\n swap positions with an entity\n :param other: an Entity-class object\n \"\"\"\n new_position = other.position # mark where you're going\n other.position = self.position # put swapee in your position\n other.log.append(other.position) # update swapee's log\n # update your old position's dual\n your_old_level = int(self.position[0])\n self.model.positions[your_old_level][self.position].dual = [other.unique_id, other.type]\n\n self.position = new_position # take your new position\n self.log.append(self.position) # update your log\n # if you have a new position, update its dual\n if self.position != '':\n your_new_level = int(self.position[0])\n self.model.positions[your_new_level][self.position].dual = [self.unique_id, self.type]\n # increment movement counters\n self.model.per_step_movement[self.type] += 1\n\n def unmoving_update_log(self):\n \"\"\"update own log if not moving.\"\"\"\n self.log.append(self.log[-1])\n",
"step-ids": [
5,
6,
7,
9,
10
]
}
|
[
5,
6,
7,
9,
10
] |
#/usr/share/python3
from sklearn.linear_model import LogisticRegression
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.model_selection import train_test_split
import numpy as np
import seaborn as sb
import pandas as pd
from pmlb import fetch_data, classification_dataset_names
import util
# from os.path import exists, join
# from os import makedirs
# scores a model on the data [X y]
def score_model(X, y, model):
train_X, test_X, train_y, test_y = train_test_split(X, y)
model.fit(train_X, train_y) # train the model
return model.score(test_X, test_y)
# returns dict of scores (keyed by names) after running each model on the provided data
@util.timeout(180)
def compare(X, y, model_list, model_names, n_times=10):
total = {}
for i, m in enumerate(model_list):
print(" Tring model {}: ".format(i), end="", flush=True)
results = []
for t in range(n_times):
results.append(score_model(X, y, m()))
mean = np.mean(results)
print(mean)
total[model_names[i]] = [mean]
return total
def main():
ds_names = classification_dataset_names
models = [LogisticRegression, GradientBoostingClassifier]
model_names = ["LogisticRegression", "GradientBoosting"]
results = {}
for i, n in enumerate(ds_names):
try:
print("Iteration: {}/{} '{}'".format(i+1, len(ds_names), n))
X, y = fetch_data(n, return_X_y=True)
results = util.merge_dicts(results,
compare(X, y, models, model_names)) # updates results
pd.DataFrame(results).to_pickle('labels.pkl')
except util.TimeoutError:
print("Timed Out!")
print("Done!")
df = pd.DataFrame(results)
df = df.rename(index=util.list_to_idx_dict(ds_names))
df.to_pickle("labels.pkl")
if __name__ == "__main__":
main()
|
normal
|
{
"blob_id": "4c010f9d9e7813a4ae4f592ade60130933b51958",
"index": 6125,
"step-1": "<mask token>\n\n\ndef score_model(X, y, model):\n train_X, test_X, train_y, test_y = train_test_split(X, y)\n model.fit(train_X, train_y)\n return model.score(test_X, test_y)\n\n\n<mask token>\n\n\ndef main():\n ds_names = classification_dataset_names\n models = [LogisticRegression, GradientBoostingClassifier]\n model_names = ['LogisticRegression', 'GradientBoosting']\n results = {}\n for i, n in enumerate(ds_names):\n try:\n print(\"Iteration: {}/{} '{}'\".format(i + 1, len(ds_names), n))\n X, y = fetch_data(n, return_X_y=True)\n results = util.merge_dicts(results, compare(X, y, models,\n model_names))\n pd.DataFrame(results).to_pickle('labels.pkl')\n except util.TimeoutError:\n print('Timed Out!')\n print('Done!')\n df = pd.DataFrame(results)\n df = df.rename(index=util.list_to_idx_dict(ds_names))\n df.to_pickle('labels.pkl')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef score_model(X, y, model):\n train_X, test_X, train_y, test_y = train_test_split(X, y)\n model.fit(train_X, train_y)\n return model.score(test_X, test_y)\n\n\n@util.timeout(180)\ndef compare(X, y, model_list, model_names, n_times=10):\n total = {}\n for i, m in enumerate(model_list):\n print(' Tring model {}: '.format(i), end='', flush=True)\n results = []\n for t in range(n_times):\n results.append(score_model(X, y, m()))\n mean = np.mean(results)\n print(mean)\n total[model_names[i]] = [mean]\n return total\n\n\ndef main():\n ds_names = classification_dataset_names\n models = [LogisticRegression, GradientBoostingClassifier]\n model_names = ['LogisticRegression', 'GradientBoosting']\n results = {}\n for i, n in enumerate(ds_names):\n try:\n print(\"Iteration: {}/{} '{}'\".format(i + 1, len(ds_names), n))\n X, y = fetch_data(n, return_X_y=True)\n results = util.merge_dicts(results, compare(X, y, models,\n model_names))\n pd.DataFrame(results).to_pickle('labels.pkl')\n except util.TimeoutError:\n print('Timed Out!')\n print('Done!')\n df = pd.DataFrame(results)\n df = df.rename(index=util.list_to_idx_dict(ds_names))\n df.to_pickle('labels.pkl')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef score_model(X, y, model):\n train_X, test_X, train_y, test_y = train_test_split(X, y)\n model.fit(train_X, train_y)\n return model.score(test_X, test_y)\n\n\n@util.timeout(180)\ndef compare(X, y, model_list, model_names, n_times=10):\n total = {}\n for i, m in enumerate(model_list):\n print(' Tring model {}: '.format(i), end='', flush=True)\n results = []\n for t in range(n_times):\n results.append(score_model(X, y, m()))\n mean = np.mean(results)\n print(mean)\n total[model_names[i]] = [mean]\n return total\n\n\ndef main():\n ds_names = classification_dataset_names\n models = [LogisticRegression, GradientBoostingClassifier]\n model_names = ['LogisticRegression', 'GradientBoosting']\n results = {}\n for i, n in enumerate(ds_names):\n try:\n print(\"Iteration: {}/{} '{}'\".format(i + 1, len(ds_names), n))\n X, y = fetch_data(n, return_X_y=True)\n results = util.merge_dicts(results, compare(X, y, models,\n model_names))\n pd.DataFrame(results).to_pickle('labels.pkl')\n except util.TimeoutError:\n print('Timed Out!')\n print('Done!')\n df = pd.DataFrame(results)\n df = df.rename(index=util.list_to_idx_dict(ds_names))\n df.to_pickle('labels.pkl')\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "from sklearn.linear_model import LogisticRegression\nfrom sklearn.ensemble import GradientBoostingClassifier\nfrom sklearn.model_selection import train_test_split\nimport numpy as np\nimport seaborn as sb\nimport pandas as pd\nfrom pmlb import fetch_data, classification_dataset_names\nimport util\n\n\ndef score_model(X, y, model):\n train_X, test_X, train_y, test_y = train_test_split(X, y)\n model.fit(train_X, train_y)\n return model.score(test_X, test_y)\n\n\n@util.timeout(180)\ndef compare(X, y, model_list, model_names, n_times=10):\n total = {}\n for i, m in enumerate(model_list):\n print(' Tring model {}: '.format(i), end='', flush=True)\n results = []\n for t in range(n_times):\n results.append(score_model(X, y, m()))\n mean = np.mean(results)\n print(mean)\n total[model_names[i]] = [mean]\n return total\n\n\ndef main():\n ds_names = classification_dataset_names\n models = [LogisticRegression, GradientBoostingClassifier]\n model_names = ['LogisticRegression', 'GradientBoosting']\n results = {}\n for i, n in enumerate(ds_names):\n try:\n print(\"Iteration: {}/{} '{}'\".format(i + 1, len(ds_names), n))\n X, y = fetch_data(n, return_X_y=True)\n results = util.merge_dicts(results, compare(X, y, models,\n model_names))\n pd.DataFrame(results).to_pickle('labels.pkl')\n except util.TimeoutError:\n print('Timed Out!')\n print('Done!')\n df = pd.DataFrame(results)\n df = df.rename(index=util.list_to_idx_dict(ds_names))\n df.to_pickle('labels.pkl')\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "#/usr/share/python3\n\nfrom sklearn.linear_model import LogisticRegression\nfrom sklearn.ensemble import GradientBoostingClassifier\n\nfrom sklearn.model_selection import train_test_split\n\nimport numpy as np\nimport seaborn as sb\nimport pandas as pd\n\nfrom pmlb import fetch_data, classification_dataset_names\n\nimport util\n\n# from os.path import exists, join\n# from os import makedirs\n\n# scores a model on the data [X y]\ndef score_model(X, y, model):\n train_X, test_X, train_y, test_y = train_test_split(X, y)\n model.fit(train_X, train_y) # train the model\n return model.score(test_X, test_y)\n\n# returns dict of scores (keyed by names) after running each model on the provided data\n@util.timeout(180)\ndef compare(X, y, model_list, model_names, n_times=10):\n total = {}\n for i, m in enumerate(model_list):\n print(\" Tring model {}: \".format(i), end=\"\", flush=True)\n results = []\n for t in range(n_times):\n results.append(score_model(X, y, m()))\n mean = np.mean(results)\n print(mean)\n total[model_names[i]] = [mean]\n return total\n\ndef main():\n ds_names = classification_dataset_names\n models = [LogisticRegression, GradientBoostingClassifier]\n model_names = [\"LogisticRegression\", \"GradientBoosting\"]\n results = {}\n for i, n in enumerate(ds_names):\n try:\n print(\"Iteration: {}/{} '{}'\".format(i+1, len(ds_names), n))\n X, y = fetch_data(n, return_X_y=True)\n results = util.merge_dicts(results,\n compare(X, y, models, model_names)) # updates results\n pd.DataFrame(results).to_pickle('labels.pkl')\n except util.TimeoutError:\n print(\"Timed Out!\")\n print(\"Done!\")\n df = pd.DataFrame(results)\n df = df.rename(index=util.list_to_idx_dict(ds_names))\n df.to_pickle(\"labels.pkl\")\n\nif __name__ == \"__main__\":\n main()\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
class WINRM(object):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def connect(self):
"""
Method to connect to a Windows machine.
"""
try:
self.host_win_ip = 'http://' + self.host_ip + ':5985/wsman'
self.conn = Protocol(endpoint=self.host_win_ip, transport=
'ntlm', username=self.usr, password=self.pwd,
server_cert_validation='ignore')
logger.warn('Connecting Windows ...')
self.shell_id = self.conn.open_shell()
logger.warn(self.shell_id)
logger.warn('Connected to Windows.')
except Exception as error:
msg_exception_error = 'Exception raised: %s ' % error
raise msg_exception_error
def run_cmd(self, cmd):
"""
Generic Method for passing command and run it on windows machine and return output.
- **parameters**, **types**, **return** and **return types**::
:param cmd: Command to be executed on windows machine.
:return stdout,stderr,status_code : output,errormessage and statuscode of output.
:rtype stdout,stderr,status_code: tuple
"""
if 'shell_id' in dir(self):
command_id = self.conn.run_command(self.shell_id, cmd)
std_out, std_err, status_code = self.conn.get_command_output(self
.shell_id, command_id)
return std_out, std_err, status_code
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class WINRM(object):
<|reserved_special_token_0|>
def __init__(self, host_ip, usr, pwd):
"""
- **parameters**, **types**, **return** and **return types**::
:param os_type : windows/linux
:param host_ip: ip address of the Windows host
:param usr: username of the Windows Host
:param pwd: Password of the Windows Host
:type os_type: string
:type host_ip: string
:type u_name: string
:type pwd: string
"""
self.os_type = 'windows'
self.host_ip = host_ip
self.usr = usr
self.pwd = pwd
self.shell_id = None
self.host_win_ip = None
self.conn = None
def connect(self):
"""
Method to connect to a Windows machine.
"""
try:
self.host_win_ip = 'http://' + self.host_ip + ':5985/wsman'
self.conn = Protocol(endpoint=self.host_win_ip, transport=
'ntlm', username=self.usr, password=self.pwd,
server_cert_validation='ignore')
logger.warn('Connecting Windows ...')
self.shell_id = self.conn.open_shell()
logger.warn(self.shell_id)
logger.warn('Connected to Windows.')
except Exception as error:
msg_exception_error = 'Exception raised: %s ' % error
raise msg_exception_error
def run_cmd(self, cmd):
"""
Generic Method for passing command and run it on windows machine and return output.
- **parameters**, **types**, **return** and **return types**::
:param cmd: Command to be executed on windows machine.
:return stdout,stderr,status_code : output,errormessage and statuscode of output.
:rtype stdout,stderr,status_code: tuple
"""
if 'shell_id' in dir(self):
command_id = self.conn.run_command(self.shell_id, cmd)
std_out, std_err, status_code = self.conn.get_command_output(self
.shell_id, command_id)
return std_out, std_err, status_code
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class WINRM(object):
"""
WINRM Module to connect to windows host
"""
def __init__(self, host_ip, usr, pwd):
"""
- **parameters**, **types**, **return** and **return types**::
:param os_type : windows/linux
:param host_ip: ip address of the Windows host
:param usr: username of the Windows Host
:param pwd: Password of the Windows Host
:type os_type: string
:type host_ip: string
:type u_name: string
:type pwd: string
"""
self.os_type = 'windows'
self.host_ip = host_ip
self.usr = usr
self.pwd = pwd
self.shell_id = None
self.host_win_ip = None
self.conn = None
def connect(self):
"""
Method to connect to a Windows machine.
"""
try:
self.host_win_ip = 'http://' + self.host_ip + ':5985/wsman'
self.conn = Protocol(endpoint=self.host_win_ip, transport=
'ntlm', username=self.usr, password=self.pwd,
server_cert_validation='ignore')
logger.warn('Connecting Windows ...')
self.shell_id = self.conn.open_shell()
logger.warn(self.shell_id)
logger.warn('Connected to Windows.')
except Exception as error:
msg_exception_error = 'Exception raised: %s ' % error
raise msg_exception_error
def run_cmd(self, cmd):
"""
Generic Method for passing command and run it on windows machine and return output.
- **parameters**, **types**, **return** and **return types**::
:param cmd: Command to be executed on windows machine.
:return stdout,stderr,status_code : output,errormessage and statuscode of output.
:rtype stdout,stderr,status_code: tuple
"""
if 'shell_id' in dir(self):
command_id = self.conn.run_command(self.shell_id, cmd)
std_out, std_err, status_code = self.conn.get_command_output(self
.shell_id, command_id)
return std_out, std_err, status_code
<|reserved_special_token_1|>
<|reserved_special_token_0|>
from winrm.protocol import Protocol
from lib import logger
class WINRM(object):
"""
WINRM Module to connect to windows host
"""
def __init__(self, host_ip, usr, pwd):
"""
- **parameters**, **types**, **return** and **return types**::
:param os_type : windows/linux
:param host_ip: ip address of the Windows host
:param usr: username of the Windows Host
:param pwd: Password of the Windows Host
:type os_type: string
:type host_ip: string
:type u_name: string
:type pwd: string
"""
self.os_type = 'windows'
self.host_ip = host_ip
self.usr = usr
self.pwd = pwd
self.shell_id = None
self.host_win_ip = None
self.conn = None
def connect(self):
"""
Method to connect to a Windows machine.
"""
try:
self.host_win_ip = 'http://' + self.host_ip + ':5985/wsman'
self.conn = Protocol(endpoint=self.host_win_ip, transport=
'ntlm', username=self.usr, password=self.pwd,
server_cert_validation='ignore')
logger.warn('Connecting Windows ...')
self.shell_id = self.conn.open_shell()
logger.warn(self.shell_id)
logger.warn('Connected to Windows.')
except Exception as error:
msg_exception_error = 'Exception raised: %s ' % error
raise msg_exception_error
def run_cmd(self, cmd):
"""
Generic Method for passing command and run it on windows machine and return output.
- **parameters**, **types**, **return** and **return types**::
:param cmd: Command to be executed on windows machine.
:return stdout,stderr,status_code : output,errormessage and statuscode of output.
:rtype stdout,stderr,status_code: tuple
"""
if 'shell_id' in dir(self):
command_id = self.conn.run_command(self.shell_id, cmd)
std_out, std_err, status_code = self.conn.get_command_output(self
.shell_id, command_id)
return std_out, std_err, status_code
<|reserved_special_token_1|>
"""
WINRM Module to connect to windows host
"""
from winrm.protocol import Protocol
from lib import logger
class WINRM(object):
"""
WINRM Module to connect to windows host
"""
def __init__(self, host_ip, usr, pwd):
"""
- **parameters**, **types**, **return** and **return types**::
:param os_type : windows/linux
:param host_ip: ip address of the Windows host
:param usr: username of the Windows Host
:param pwd: Password of the Windows Host
:type os_type: string
:type host_ip: string
:type u_name: string
:type pwd: string
"""
self.os_type = 'windows'
self.host_ip = host_ip
self.usr = usr
self.pwd = pwd
self.shell_id = None
self.host_win_ip = None
self.conn = None
def connect(self):
"""
Method to connect to a Windows machine.
"""
try:
self.host_win_ip = "http://" + self.host_ip + ":5985/wsman"
self.conn = Protocol(
endpoint=self.host_win_ip,
transport="ntlm",
username=self.usr,
password=self.pwd,
server_cert_validation="ignore")
logger.warn("Connecting Windows ...")
self.shell_id = self.conn.open_shell()
logger.warn(self.shell_id)
logger.warn('Connected to Windows.')
except Exception as error:
msg_exception_error = "Exception raised: %s " % error
raise(msg_exception_error)
def run_cmd(self, cmd):
"""
Generic Method for passing command and run it on windows machine and return output.
- **parameters**, **types**, **return** and **return types**::
:param cmd: Command to be executed on windows machine.
:return stdout,stderr,status_code : output,errormessage and statuscode of output.
:rtype stdout,stderr,status_code: tuple
"""
if 'shell_id' in dir(self):
#checking for the shell_id created in winrm object
command_id = self.conn.run_command(self.shell_id, cmd)
std_out, std_err, status_code = self.conn.get_command_output(
self.shell_id, command_id)
#runs the command and returns output,error,statuscode
return std_out, std_err, status_code
|
flexible
|
{
"blob_id": "96ac9088650490a7da00c7a20f634b76e673ca2d",
"index": 1174,
"step-1": "<mask token>\n\n\nclass WINRM(object):\n <mask token>\n <mask token>\n\n def connect(self):\n \"\"\"\n Method to connect to a Windows machine.\n \"\"\"\n try:\n self.host_win_ip = 'http://' + self.host_ip + ':5985/wsman'\n self.conn = Protocol(endpoint=self.host_win_ip, transport=\n 'ntlm', username=self.usr, password=self.pwd,\n server_cert_validation='ignore')\n logger.warn('Connecting Windows ...')\n self.shell_id = self.conn.open_shell()\n logger.warn(self.shell_id)\n logger.warn('Connected to Windows.')\n except Exception as error:\n msg_exception_error = 'Exception raised: %s ' % error\n raise msg_exception_error\n\n def run_cmd(self, cmd):\n \"\"\"\n Generic Method for passing command and run it on windows machine and return output.\n - **parameters**, **types**, **return** and **return types**::\n :param cmd: Command to be executed on windows machine.\n :return stdout,stderr,status_code : output,errormessage and statuscode of output.\n :rtype stdout,stderr,status_code: tuple\n \"\"\"\n if 'shell_id' in dir(self):\n command_id = self.conn.run_command(self.shell_id, cmd)\n std_out, std_err, status_code = self.conn.get_command_output(self\n .shell_id, command_id)\n return std_out, std_err, status_code\n",
"step-2": "<mask token>\n\n\nclass WINRM(object):\n <mask token>\n\n def __init__(self, host_ip, usr, pwd):\n \"\"\"\n - **parameters**, **types**, **return** and **return types**::\n :param os_type : windows/linux\n :param host_ip: ip address of the Windows host\n :param usr: username of the Windows Host\n :param pwd: Password of the Windows Host\n :type os_type: string\n :type host_ip: string\n :type u_name: string\n :type pwd: string\n \"\"\"\n self.os_type = 'windows'\n self.host_ip = host_ip\n self.usr = usr\n self.pwd = pwd\n self.shell_id = None\n self.host_win_ip = None\n self.conn = None\n\n def connect(self):\n \"\"\"\n Method to connect to a Windows machine.\n \"\"\"\n try:\n self.host_win_ip = 'http://' + self.host_ip + ':5985/wsman'\n self.conn = Protocol(endpoint=self.host_win_ip, transport=\n 'ntlm', username=self.usr, password=self.pwd,\n server_cert_validation='ignore')\n logger.warn('Connecting Windows ...')\n self.shell_id = self.conn.open_shell()\n logger.warn(self.shell_id)\n logger.warn('Connected to Windows.')\n except Exception as error:\n msg_exception_error = 'Exception raised: %s ' % error\n raise msg_exception_error\n\n def run_cmd(self, cmd):\n \"\"\"\n Generic Method for passing command and run it on windows machine and return output.\n - **parameters**, **types**, **return** and **return types**::\n :param cmd: Command to be executed on windows machine.\n :return stdout,stderr,status_code : output,errormessage and statuscode of output.\n :rtype stdout,stderr,status_code: tuple\n \"\"\"\n if 'shell_id' in dir(self):\n command_id = self.conn.run_command(self.shell_id, cmd)\n std_out, std_err, status_code = self.conn.get_command_output(self\n .shell_id, command_id)\n return std_out, std_err, status_code\n",
"step-3": "<mask token>\n\n\nclass WINRM(object):\n \"\"\"\n WINRM Module to connect to windows host\n \"\"\"\n\n def __init__(self, host_ip, usr, pwd):\n \"\"\"\n - **parameters**, **types**, **return** and **return types**::\n :param os_type : windows/linux\n :param host_ip: ip address of the Windows host\n :param usr: username of the Windows Host\n :param pwd: Password of the Windows Host\n :type os_type: string\n :type host_ip: string\n :type u_name: string\n :type pwd: string\n \"\"\"\n self.os_type = 'windows'\n self.host_ip = host_ip\n self.usr = usr\n self.pwd = pwd\n self.shell_id = None\n self.host_win_ip = None\n self.conn = None\n\n def connect(self):\n \"\"\"\n Method to connect to a Windows machine.\n \"\"\"\n try:\n self.host_win_ip = 'http://' + self.host_ip + ':5985/wsman'\n self.conn = Protocol(endpoint=self.host_win_ip, transport=\n 'ntlm', username=self.usr, password=self.pwd,\n server_cert_validation='ignore')\n logger.warn('Connecting Windows ...')\n self.shell_id = self.conn.open_shell()\n logger.warn(self.shell_id)\n logger.warn('Connected to Windows.')\n except Exception as error:\n msg_exception_error = 'Exception raised: %s ' % error\n raise msg_exception_error\n\n def run_cmd(self, cmd):\n \"\"\"\n Generic Method for passing command and run it on windows machine and return output.\n - **parameters**, **types**, **return** and **return types**::\n :param cmd: Command to be executed on windows machine.\n :return stdout,stderr,status_code : output,errormessage and statuscode of output.\n :rtype stdout,stderr,status_code: tuple\n \"\"\"\n if 'shell_id' in dir(self):\n command_id = self.conn.run_command(self.shell_id, cmd)\n std_out, std_err, status_code = self.conn.get_command_output(self\n .shell_id, command_id)\n return std_out, std_err, status_code\n",
"step-4": "<mask token>\nfrom winrm.protocol import Protocol\nfrom lib import logger\n\n\nclass WINRM(object):\n \"\"\"\n WINRM Module to connect to windows host\n \"\"\"\n\n def __init__(self, host_ip, usr, pwd):\n \"\"\"\n - **parameters**, **types**, **return** and **return types**::\n :param os_type : windows/linux\n :param host_ip: ip address of the Windows host\n :param usr: username of the Windows Host\n :param pwd: Password of the Windows Host\n :type os_type: string\n :type host_ip: string\n :type u_name: string\n :type pwd: string\n \"\"\"\n self.os_type = 'windows'\n self.host_ip = host_ip\n self.usr = usr\n self.pwd = pwd\n self.shell_id = None\n self.host_win_ip = None\n self.conn = None\n\n def connect(self):\n \"\"\"\n Method to connect to a Windows machine.\n \"\"\"\n try:\n self.host_win_ip = 'http://' + self.host_ip + ':5985/wsman'\n self.conn = Protocol(endpoint=self.host_win_ip, transport=\n 'ntlm', username=self.usr, password=self.pwd,\n server_cert_validation='ignore')\n logger.warn('Connecting Windows ...')\n self.shell_id = self.conn.open_shell()\n logger.warn(self.shell_id)\n logger.warn('Connected to Windows.')\n except Exception as error:\n msg_exception_error = 'Exception raised: %s ' % error\n raise msg_exception_error\n\n def run_cmd(self, cmd):\n \"\"\"\n Generic Method for passing command and run it on windows machine and return output.\n - **parameters**, **types**, **return** and **return types**::\n :param cmd: Command to be executed on windows machine.\n :return stdout,stderr,status_code : output,errormessage and statuscode of output.\n :rtype stdout,stderr,status_code: tuple\n \"\"\"\n if 'shell_id' in dir(self):\n command_id = self.conn.run_command(self.shell_id, cmd)\n std_out, std_err, status_code = self.conn.get_command_output(self\n .shell_id, command_id)\n return std_out, std_err, status_code\n",
"step-5": "\"\"\"\r\nWINRM Module to connect to windows host\r\n\"\"\"\r\nfrom winrm.protocol import Protocol\r\nfrom lib import logger\r\n\r\n\r\nclass WINRM(object):\r\n \"\"\"\r\n WINRM Module to connect to windows host\r\n \"\"\"\r\n def __init__(self, host_ip, usr, pwd):\r\n \"\"\"\r\n - **parameters**, **types**, **return** and **return types**::\r\n :param os_type : windows/linux\r\n :param host_ip: ip address of the Windows host\r\n :param usr: username of the Windows Host\r\n :param pwd: Password of the Windows Host\r\n :type os_type: string\r\n :type host_ip: string\r\n :type u_name: string\r\n :type pwd: string\r\n \"\"\"\r\n self.os_type = 'windows'\r\n self.host_ip = host_ip\r\n self.usr = usr\r\n self.pwd = pwd\r\n self.shell_id = None\r\n self.host_win_ip = None\r\n self.conn = None\r\n\r\n def connect(self):\r\n \"\"\"\r\n Method to connect to a Windows machine.\r\n \"\"\"\r\n try:\r\n self.host_win_ip = \"http://\" + self.host_ip + \":5985/wsman\"\r\n self.conn = Protocol(\r\n endpoint=self.host_win_ip,\r\n transport=\"ntlm\",\r\n username=self.usr,\r\n password=self.pwd,\r\n server_cert_validation=\"ignore\")\r\n logger.warn(\"Connecting Windows ...\")\r\n self.shell_id = self.conn.open_shell()\r\n logger.warn(self.shell_id)\r\n logger.warn('Connected to Windows.')\r\n except Exception as error:\r\n msg_exception_error = \"Exception raised: %s \" % error\r\n raise(msg_exception_error)\r\n\r\n def run_cmd(self, cmd):\r\n \"\"\"\r\n Generic Method for passing command and run it on windows machine and return output.\r\n - **parameters**, **types**, **return** and **return types**::\r\n :param cmd: Command to be executed on windows machine.\r\n :return stdout,stderr,status_code : output,errormessage and statuscode of output.\r\n :rtype stdout,stderr,status_code: tuple\r\n \"\"\"\r\n if 'shell_id' in dir(self):\r\n #checking for the shell_id created in winrm object\r\n command_id = self.conn.run_command(self.shell_id, cmd)\r\n std_out, std_err, status_code = self.conn.get_command_output(\r\n self.shell_id, command_id)\r\n #runs the command and returns output,error,statuscode\r\n return std_out, std_err, status_code\r\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
from __future__ import annotations
from typing import TYPE_CHECKING
from datetime import datetime
from sqlalchemy import Column, ForeignKey, String, DateTime, Float, Integer
from sqlalchemy.orm import relationship
from app.db.base_class import Base
if TYPE_CHECKING:
from .account import Account # noqa: F401
from .code import Code # noqa: F401
class Voucher(Base):
__tablename__ = 't_juju_voucher'
code = Column(String(100), index=True, unique=True)
serial_no = Column(String(120), index=True, unique=True)
amount = Column(Float, default=0, nullable=False)
vtime = Column(DateTime(), nullable=False)
vtype = Column(String(50), ForeignKey("t_juju_code.vtype"))
comment = Column(String(150), nullable=True)
create_time = Column(DateTime(), default=datetime.now)
update_time = Column(DateTime(), default=datetime.now,
onupdate=datetime.now)
owner_id = Column(Integer, ForeignKey("t_juju_account.id"))
modifier_id = Column(Integer, ForeignKey("t_juju_account.id"))
|
normal
|
{
"blob_id": "60d8276a5715899823b12ffdf132925c6f2693bd",
"index": 8675,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Voucher(Base):\n __tablename__ = 't_juju_voucher'\n code = Column(String(100), index=True, unique=True)\n serial_no = Column(String(120), index=True, unique=True)\n amount = Column(Float, default=0, nullable=False)\n vtime = Column(DateTime(), nullable=False)\n vtype = Column(String(50), ForeignKey('t_juju_code.vtype'))\n comment = Column(String(150), nullable=True)\n create_time = Column(DateTime(), default=datetime.now)\n update_time = Column(DateTime(), default=datetime.now, onupdate=\n datetime.now)\n owner_id = Column(Integer, ForeignKey('t_juju_account.id'))\n modifier_id = Column(Integer, ForeignKey('t_juju_account.id'))\n",
"step-3": "<mask token>\nif TYPE_CHECKING:\n from .account import Account\n from .code import Code\n\n\nclass Voucher(Base):\n __tablename__ = 't_juju_voucher'\n code = Column(String(100), index=True, unique=True)\n serial_no = Column(String(120), index=True, unique=True)\n amount = Column(Float, default=0, nullable=False)\n vtime = Column(DateTime(), nullable=False)\n vtype = Column(String(50), ForeignKey('t_juju_code.vtype'))\n comment = Column(String(150), nullable=True)\n create_time = Column(DateTime(), default=datetime.now)\n update_time = Column(DateTime(), default=datetime.now, onupdate=\n datetime.now)\n owner_id = Column(Integer, ForeignKey('t_juju_account.id'))\n modifier_id = Column(Integer, ForeignKey('t_juju_account.id'))\n",
"step-4": "from __future__ import annotations\nfrom typing import TYPE_CHECKING\nfrom datetime import datetime\nfrom sqlalchemy import Column, ForeignKey, String, DateTime, Float, Integer\nfrom sqlalchemy.orm import relationship\nfrom app.db.base_class import Base\nif TYPE_CHECKING:\n from .account import Account\n from .code import Code\n\n\nclass Voucher(Base):\n __tablename__ = 't_juju_voucher'\n code = Column(String(100), index=True, unique=True)\n serial_no = Column(String(120), index=True, unique=True)\n amount = Column(Float, default=0, nullable=False)\n vtime = Column(DateTime(), nullable=False)\n vtype = Column(String(50), ForeignKey('t_juju_code.vtype'))\n comment = Column(String(150), nullable=True)\n create_time = Column(DateTime(), default=datetime.now)\n update_time = Column(DateTime(), default=datetime.now, onupdate=\n datetime.now)\n owner_id = Column(Integer, ForeignKey('t_juju_account.id'))\n modifier_id = Column(Integer, ForeignKey('t_juju_account.id'))\n",
"step-5": "from __future__ import annotations\n\n\nfrom typing import TYPE_CHECKING\nfrom datetime import datetime\n\nfrom sqlalchemy import Column, ForeignKey, String, DateTime, Float, Integer\nfrom sqlalchemy.orm import relationship\n\nfrom app.db.base_class import Base\n\nif TYPE_CHECKING:\n from .account import Account # noqa: F401\n from .code import Code # noqa: F401\n\n\nclass Voucher(Base):\n __tablename__ = 't_juju_voucher'\n code = Column(String(100), index=True, unique=True)\n serial_no = Column(String(120), index=True, unique=True)\n amount = Column(Float, default=0, nullable=False)\n vtime = Column(DateTime(), nullable=False)\n\n vtype = Column(String(50), ForeignKey(\"t_juju_code.vtype\"))\n\n comment = Column(String(150), nullable=True)\n create_time = Column(DateTime(), default=datetime.now)\n update_time = Column(DateTime(), default=datetime.now,\n onupdate=datetime.now)\n\n owner_id = Column(Integer, ForeignKey(\"t_juju_account.id\"))\n modifier_id = Column(Integer, ForeignKey(\"t_juju_account.id\"))\n",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
SSMDocumentName = 'AWS-RunPowerShellScript'
InstanceId = ['i-081a7260c79feb260']
Querytimeoutseconds = 3600
OutputS3BucketName = 'hccake'
OutputS3KeyPrefix = 'log_'
region_name = 'us-east-2'
aws_access_key_id = ''
aws_secret_access_key = ''
workingdirectory = ['c:\\']
executiontimeout = ['3600']
<|reserved_special_token_1|>
SSMDocumentName ='AWS-RunPowerShellScript'
InstanceId = ['i-081a7260c79feb260']
Querytimeoutseconds = 3600
OutputS3BucketName = 'hccake'
OutputS3KeyPrefix = 'log_'
region_name ='us-east-2'
aws_access_key_id =''
aws_secret_access_key =''
workingdirectory =["c:\\"]
executiontimeout =["3600"]
|
flexible
|
{
"blob_id": "e55fe845c18ff70ba12bb7c2db28ceded8ae9129",
"index": 1580,
"step-1": "<mask token>\n",
"step-2": "SSMDocumentName = 'AWS-RunPowerShellScript'\nInstanceId = ['i-081a7260c79feb260']\nQuerytimeoutseconds = 3600\nOutputS3BucketName = 'hccake'\nOutputS3KeyPrefix = 'log_'\nregion_name = 'us-east-2'\naws_access_key_id = ''\naws_secret_access_key = ''\nworkingdirectory = ['c:\\\\']\nexecutiontimeout = ['3600']\n",
"step-3": "SSMDocumentName ='AWS-RunPowerShellScript'\nInstanceId = ['i-081a7260c79feb260']\nQuerytimeoutseconds = 3600\nOutputS3BucketName = 'hccake'\nOutputS3KeyPrefix = 'log_'\nregion_name ='us-east-2'\naws_access_key_id =''\naws_secret_access_key =''\nworkingdirectory =[\"c:\\\\\"]\nexecutiontimeout =[\"3600\"]",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
from urllib.request import urlopen
from bs4 import BeautifulSoup
import json
def get_webcasts(year):
url = "https://www.sans.org/webcasts/archive/" + str(year)
page = urlopen(url)
soup = BeautifulSoup(page, 'html.parser')
table = soup.find('table', {"class": "table table-bordered table-striped"})
webcasts = []
for row in table.find_all('tr'):
title_content = row.find('td', {"class": "table_data table_data_title"})
if title_content is None:
continue
title_anchor = title_content.find('a')
title_link = title_anchor.get("href")
title = title_anchor.string
date = row.find('td', {"class": "table_data table_data_date"})
sponsor = row.find('td', {"class": "table_data table_data_sponsor"})
speaker = row.find('td', {"class": "table_data table_data_speaker"})
webcast = {"title": title, "date": date.string, "sponsor": sponsor.string,
"speaker": speaker.string}
webcasts.append(webcast)
return webcasts
result = {}
for year in range(2013, 2019):
webcasts = get_webcasts(year)
result[str(year)] = webcasts
print(json.dumps(result))
|
normal
|
{
"blob_id": "14971842092c7aa41477f28cec87628a73a8ffd6",
"index": 8407,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_webcasts(year):\n url = 'https://www.sans.org/webcasts/archive/' + str(year)\n page = urlopen(url)\n soup = BeautifulSoup(page, 'html.parser')\n table = soup.find('table', {'class': 'table table-bordered table-striped'})\n webcasts = []\n for row in table.find_all('tr'):\n title_content = row.find('td', {'class': 'table_data table_data_title'}\n )\n if title_content is None:\n continue\n title_anchor = title_content.find('a')\n title_link = title_anchor.get('href')\n title = title_anchor.string\n date = row.find('td', {'class': 'table_data table_data_date'})\n sponsor = row.find('td', {'class': 'table_data table_data_sponsor'})\n speaker = row.find('td', {'class': 'table_data table_data_speaker'})\n webcast = {'title': title, 'date': date.string, 'sponsor': sponsor.\n string, 'speaker': speaker.string}\n webcasts.append(webcast)\n return webcasts\n\n\n<mask token>\nfor year in range(2013, 2019):\n webcasts = get_webcasts(year)\n result[str(year)] = webcasts\nprint(json.dumps(result))\n",
"step-3": "<mask token>\n\n\ndef get_webcasts(year):\n url = 'https://www.sans.org/webcasts/archive/' + str(year)\n page = urlopen(url)\n soup = BeautifulSoup(page, 'html.parser')\n table = soup.find('table', {'class': 'table table-bordered table-striped'})\n webcasts = []\n for row in table.find_all('tr'):\n title_content = row.find('td', {'class': 'table_data table_data_title'}\n )\n if title_content is None:\n continue\n title_anchor = title_content.find('a')\n title_link = title_anchor.get('href')\n title = title_anchor.string\n date = row.find('td', {'class': 'table_data table_data_date'})\n sponsor = row.find('td', {'class': 'table_data table_data_sponsor'})\n speaker = row.find('td', {'class': 'table_data table_data_speaker'})\n webcast = {'title': title, 'date': date.string, 'sponsor': sponsor.\n string, 'speaker': speaker.string}\n webcasts.append(webcast)\n return webcasts\n\n\nresult = {}\nfor year in range(2013, 2019):\n webcasts = get_webcasts(year)\n result[str(year)] = webcasts\nprint(json.dumps(result))\n",
"step-4": "from urllib.request import urlopen\nfrom bs4 import BeautifulSoup\nimport json\n\n\ndef get_webcasts(year):\n url = 'https://www.sans.org/webcasts/archive/' + str(year)\n page = urlopen(url)\n soup = BeautifulSoup(page, 'html.parser')\n table = soup.find('table', {'class': 'table table-bordered table-striped'})\n webcasts = []\n for row in table.find_all('tr'):\n title_content = row.find('td', {'class': 'table_data table_data_title'}\n )\n if title_content is None:\n continue\n title_anchor = title_content.find('a')\n title_link = title_anchor.get('href')\n title = title_anchor.string\n date = row.find('td', {'class': 'table_data table_data_date'})\n sponsor = row.find('td', {'class': 'table_data table_data_sponsor'})\n speaker = row.find('td', {'class': 'table_data table_data_speaker'})\n webcast = {'title': title, 'date': date.string, 'sponsor': sponsor.\n string, 'speaker': speaker.string}\n webcasts.append(webcast)\n return webcasts\n\n\nresult = {}\nfor year in range(2013, 2019):\n webcasts = get_webcasts(year)\n result[str(year)] = webcasts\nprint(json.dumps(result))\n",
"step-5": "from urllib.request import urlopen\nfrom bs4 import BeautifulSoup\nimport json\n\ndef get_webcasts(year):\n url = \"https://www.sans.org/webcasts/archive/\" + str(year)\n page = urlopen(url)\n soup = BeautifulSoup(page, 'html.parser')\n table = soup.find('table', {\"class\": \"table table-bordered table-striped\"})\n\n webcasts = []\n for row in table.find_all('tr'):\n title_content = row.find('td', {\"class\": \"table_data table_data_title\"})\n\n if title_content is None:\n continue\n\n title_anchor = title_content.find('a')\n title_link = title_anchor.get(\"href\")\n title = title_anchor.string\n\n date = row.find('td', {\"class\": \"table_data table_data_date\"})\n sponsor = row.find('td', {\"class\": \"table_data table_data_sponsor\"})\n speaker = row.find('td', {\"class\": \"table_data table_data_speaker\"})\n\n webcast = {\"title\": title, \"date\": date.string, \"sponsor\": sponsor.string,\n \"speaker\": speaker.string}\n webcasts.append(webcast)\n\n return webcasts\n\nresult = {}\nfor year in range(2013, 2019):\n webcasts = get_webcasts(year)\n result[str(year)] = webcasts\n\nprint(json.dumps(result))\n",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-10-27 21:59
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import phonenumber_field.modelfields
class Migration(migrations.Migration):
dependencies = [
('regions', '0002_auto_20171024_1707'),
]
operations = [
migrations.AlterField(
model_name='region',
name='email',
field=models.EmailField(max_length=100, null=True, verbose_name='email'),
),
migrations.AlterField(
model_name='region',
name='governor',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='public_servants.PublicServant', verbose_name='governor'),
),
migrations.AlterField(
model_name='region',
name='phone',
field=phonenumber_field.modelfields.PhoneNumberField(max_length=128, null=True, verbose_name='phone'),
),
migrations.AlterField(
model_name='region',
name='twitter',
field=models.CharField(blank=True, max_length=50, null=True),
),
]
|
normal
|
{
"blob_id": "1330addd53c6187a41dfea6957bf47aaecca1135",
"index": 7180,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('regions', '0002_auto_20171024_1707')]\n operations = [migrations.AlterField(model_name='region', name='email',\n field=models.EmailField(max_length=100, null=True, verbose_name=\n 'email')), migrations.AlterField(model_name='region', name=\n 'governor', field=models.ForeignKey(null=True, on_delete=django.db.\n models.deletion.CASCADE, to='public_servants.PublicServant',\n verbose_name='governor')), migrations.AlterField(model_name=\n 'region', name='phone', field=phonenumber_field.modelfields.\n PhoneNumberField(max_length=128, null=True, verbose_name='phone')),\n migrations.AlterField(model_name='region', name='twitter', field=\n models.CharField(blank=True, max_length=50, null=True))]\n",
"step-4": "from __future__ import unicode_literals\nfrom django.db import migrations, models\nimport django.db.models.deletion\nimport phonenumber_field.modelfields\n\n\nclass Migration(migrations.Migration):\n dependencies = [('regions', '0002_auto_20171024_1707')]\n operations = [migrations.AlterField(model_name='region', name='email',\n field=models.EmailField(max_length=100, null=True, verbose_name=\n 'email')), migrations.AlterField(model_name='region', name=\n 'governor', field=models.ForeignKey(null=True, on_delete=django.db.\n models.deletion.CASCADE, to='public_servants.PublicServant',\n verbose_name='governor')), migrations.AlterField(model_name=\n 'region', name='phone', field=phonenumber_field.modelfields.\n PhoneNumberField(max_length=128, null=True, verbose_name='phone')),\n migrations.AlterField(model_name='region', name='twitter', field=\n models.CharField(blank=True, max_length=50, null=True))]\n",
"step-5": "# -*- coding: utf-8 -*-\n# Generated by Django 1.11.6 on 2017-10-27 21:59\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\nimport phonenumber_field.modelfields\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('regions', '0002_auto_20171024_1707'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='region',\n name='email',\n field=models.EmailField(max_length=100, null=True, verbose_name='email'),\n ),\n migrations.AlterField(\n model_name='region',\n name='governor',\n field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='public_servants.PublicServant', verbose_name='governor'),\n ),\n migrations.AlterField(\n model_name='region',\n name='phone',\n field=phonenumber_field.modelfields.PhoneNumberField(max_length=128, null=True, verbose_name='phone'),\n ),\n migrations.AlterField(\n model_name='region',\n name='twitter',\n field=models.CharField(blank=True, max_length=50, null=True),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def parse_msg(msg):
line_org = msg.split('\n')
N = len(line_org) - 2
line = line_org[N]
return line
def get_vals(msg):
rhs = msg.split('=')
try:
nums = rhs[1].split('/')
min_num = float(nums[0])
ave_num = float(nums[1])
max_num = float(nums[2])
std_num = nums[3].split(' ')
std_num = float(std_num[0])
except:
print('Could not Ping Website...')
min_num = float('nan')
ave_num = float('nan')
max_num = float('nan')
std_num = float('nan')
return min_num, ave_num, max_num, std_num
def get_vals_windows(msg):
rhs = msg.split('=')
try:
nums = rhs[1].split('ms')
min_num = float(nums[0])
nums = rhs[2].split('ms')
ave_num = float(nums[0])
nums = rhs[3].split('ms')
max_num = float(nums[0])
std_num = float('nan')
except:
print('Could not Ping Website...')
min_num = float('nan')
ave_num = float('nan')
max_num = float('nan')
std_num = float('nan')
return min_num, ave_num, max_num, std_num
def get_date_and_time():
return datetime.datetime.now()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def ping_address(host, n):
ping = subprocess.Popen(['ping', '-c', str(n), host], stdout=subprocess
.PIPE, stderr=subprocess.PIPE)
out, error = ping.communicate()
return out, error
<|reserved_special_token_0|>
def parse_msg(msg):
line_org = msg.split('\n')
N = len(line_org) - 2
line = line_org[N]
return line
def get_vals(msg):
rhs = msg.split('=')
try:
nums = rhs[1].split('/')
min_num = float(nums[0])
ave_num = float(nums[1])
max_num = float(nums[2])
std_num = nums[3].split(' ')
std_num = float(std_num[0])
except:
print('Could not Ping Website...')
min_num = float('nan')
ave_num = float('nan')
max_num = float('nan')
std_num = float('nan')
return min_num, ave_num, max_num, std_num
def get_vals_windows(msg):
rhs = msg.split('=')
try:
nums = rhs[1].split('ms')
min_num = float(nums[0])
nums = rhs[2].split('ms')
ave_num = float(nums[0])
nums = rhs[3].split('ms')
max_num = float(nums[0])
std_num = float('nan')
except:
print('Could not Ping Website...')
min_num = float('nan')
ave_num = float('nan')
max_num = float('nan')
std_num = float('nan')
return min_num, ave_num, max_num, std_num
def get_date_and_time():
return datetime.datetime.now()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def ping_address(host, n):
ping = subprocess.Popen(['ping', '-c', str(n), host], stdout=subprocess
.PIPE, stderr=subprocess.PIPE)
out, error = ping.communicate()
return out, error
def ping_address_windows(host, n):
ping = subprocess.Popen(['ping', '-n', str(n), host], stdout=subprocess
.PIPE, stderr=subprocess.PIPE)
out, error = ping.communicate()
return out, error
def parse_msg(msg):
line_org = msg.split('\n')
N = len(line_org) - 2
line = line_org[N]
return line
def get_vals(msg):
rhs = msg.split('=')
try:
nums = rhs[1].split('/')
min_num = float(nums[0])
ave_num = float(nums[1])
max_num = float(nums[2])
std_num = nums[3].split(' ')
std_num = float(std_num[0])
except:
print('Could not Ping Website...')
min_num = float('nan')
ave_num = float('nan')
max_num = float('nan')
std_num = float('nan')
return min_num, ave_num, max_num, std_num
def get_vals_windows(msg):
rhs = msg.split('=')
try:
nums = rhs[1].split('ms')
min_num = float(nums[0])
nums = rhs[2].split('ms')
ave_num = float(nums[0])
nums = rhs[3].split('ms')
max_num = float(nums[0])
std_num = float('nan')
except:
print('Could not Ping Website...')
min_num = float('nan')
ave_num = float('nan')
max_num = float('nan')
std_num = float('nan')
return min_num, ave_num, max_num, std_num
def get_date_and_time():
return datetime.datetime.now()
<|reserved_special_token_1|>
import subprocess
import datetime
def ping_address(host, n):
ping = subprocess.Popen(['ping', '-c', str(n), host], stdout=subprocess
.PIPE, stderr=subprocess.PIPE)
out, error = ping.communicate()
return out, error
def ping_address_windows(host, n):
ping = subprocess.Popen(['ping', '-n', str(n), host], stdout=subprocess
.PIPE, stderr=subprocess.PIPE)
out, error = ping.communicate()
return out, error
def parse_msg(msg):
line_org = msg.split('\n')
N = len(line_org) - 2
line = line_org[N]
return line
def get_vals(msg):
rhs = msg.split('=')
try:
nums = rhs[1].split('/')
min_num = float(nums[0])
ave_num = float(nums[1])
max_num = float(nums[2])
std_num = nums[3].split(' ')
std_num = float(std_num[0])
except:
print('Could not Ping Website...')
min_num = float('nan')
ave_num = float('nan')
max_num = float('nan')
std_num = float('nan')
return min_num, ave_num, max_num, std_num
def get_vals_windows(msg):
rhs = msg.split('=')
try:
nums = rhs[1].split('ms')
min_num = float(nums[0])
nums = rhs[2].split('ms')
ave_num = float(nums[0])
nums = rhs[3].split('ms')
max_num = float(nums[0])
std_num = float('nan')
except:
print('Could not Ping Website...')
min_num = float('nan')
ave_num = float('nan')
max_num = float('nan')
std_num = float('nan')
return min_num, ave_num, max_num, std_num
def get_date_and_time():
return datetime.datetime.now()
<|reserved_special_token_1|>
import subprocess
import datetime
def ping_address(host,n):
ping = subprocess.Popen(
["ping","-c",str(n),host],
stdout = subprocess.PIPE,
stderr = subprocess.PIPE)
out,error = ping.communicate()
return out, error
def ping_address_windows(host,n):
ping = subprocess.Popen(
["ping","-n",str(n),host], # Need -c for linux
stdout = subprocess.PIPE,
stderr = subprocess.PIPE)
out,error = ping.communicate()
return out, error
def parse_msg(msg):
line_org = msg.split('\n')
N = len(line_org)-2
line = line_org[N]
return line
def get_vals(msg):
rhs = msg.split('=')
try:
nums = rhs[1].split('/')
min_num = float(nums[0])
ave_num = float(nums[1])
max_num = float(nums[2])
std_num = nums[3].split(' ')
std_num = float(std_num[0])
except:
print("Could not Ping Website...")
min_num = float('nan')
ave_num = float('nan')
max_num = float('nan')
std_num = float('nan')
return min_num, ave_num, max_num, std_num
def get_vals_windows(msg):
rhs = msg.split('=')
try:
nums = rhs[1].split('ms')
min_num = float(nums[0])
nums = rhs[2].split('ms')
ave_num = float(nums[0])
nums = rhs[3].split('ms')
max_num = float(nums[0])
std_num = float('nan')
except:
print("Could not Ping Website...")
min_num = float('nan')
ave_num = float('nan')
max_num = float('nan')
std_num = float('nan')
return min_num, ave_num, max_num, std_num
def get_date_and_time():
return datetime.datetime.now()
|
flexible
|
{
"blob_id": "3f2221f5f3a699020dd5986acb793e3083976dff",
"index": 7176,
"step-1": "<mask token>\n\n\ndef parse_msg(msg):\n line_org = msg.split('\\n')\n N = len(line_org) - 2\n line = line_org[N]\n return line\n\n\ndef get_vals(msg):\n rhs = msg.split('=')\n try:\n nums = rhs[1].split('/')\n min_num = float(nums[0])\n ave_num = float(nums[1])\n max_num = float(nums[2])\n std_num = nums[3].split(' ')\n std_num = float(std_num[0])\n except:\n print('Could not Ping Website...')\n min_num = float('nan')\n ave_num = float('nan')\n max_num = float('nan')\n std_num = float('nan')\n return min_num, ave_num, max_num, std_num\n\n\ndef get_vals_windows(msg):\n rhs = msg.split('=')\n try:\n nums = rhs[1].split('ms')\n min_num = float(nums[0])\n nums = rhs[2].split('ms')\n ave_num = float(nums[0])\n nums = rhs[3].split('ms')\n max_num = float(nums[0])\n std_num = float('nan')\n except:\n print('Could not Ping Website...')\n min_num = float('nan')\n ave_num = float('nan')\n max_num = float('nan')\n std_num = float('nan')\n return min_num, ave_num, max_num, std_num\n\n\ndef get_date_and_time():\n return datetime.datetime.now()\n",
"step-2": "<mask token>\n\n\ndef ping_address(host, n):\n ping = subprocess.Popen(['ping', '-c', str(n), host], stdout=subprocess\n .PIPE, stderr=subprocess.PIPE)\n out, error = ping.communicate()\n return out, error\n\n\n<mask token>\n\n\ndef parse_msg(msg):\n line_org = msg.split('\\n')\n N = len(line_org) - 2\n line = line_org[N]\n return line\n\n\ndef get_vals(msg):\n rhs = msg.split('=')\n try:\n nums = rhs[1].split('/')\n min_num = float(nums[0])\n ave_num = float(nums[1])\n max_num = float(nums[2])\n std_num = nums[3].split(' ')\n std_num = float(std_num[0])\n except:\n print('Could not Ping Website...')\n min_num = float('nan')\n ave_num = float('nan')\n max_num = float('nan')\n std_num = float('nan')\n return min_num, ave_num, max_num, std_num\n\n\ndef get_vals_windows(msg):\n rhs = msg.split('=')\n try:\n nums = rhs[1].split('ms')\n min_num = float(nums[0])\n nums = rhs[2].split('ms')\n ave_num = float(nums[0])\n nums = rhs[3].split('ms')\n max_num = float(nums[0])\n std_num = float('nan')\n except:\n print('Could not Ping Website...')\n min_num = float('nan')\n ave_num = float('nan')\n max_num = float('nan')\n std_num = float('nan')\n return min_num, ave_num, max_num, std_num\n\n\ndef get_date_and_time():\n return datetime.datetime.now()\n",
"step-3": "<mask token>\n\n\ndef ping_address(host, n):\n ping = subprocess.Popen(['ping', '-c', str(n), host], stdout=subprocess\n .PIPE, stderr=subprocess.PIPE)\n out, error = ping.communicate()\n return out, error\n\n\ndef ping_address_windows(host, n):\n ping = subprocess.Popen(['ping', '-n', str(n), host], stdout=subprocess\n .PIPE, stderr=subprocess.PIPE)\n out, error = ping.communicate()\n return out, error\n\n\ndef parse_msg(msg):\n line_org = msg.split('\\n')\n N = len(line_org) - 2\n line = line_org[N]\n return line\n\n\ndef get_vals(msg):\n rhs = msg.split('=')\n try:\n nums = rhs[1].split('/')\n min_num = float(nums[0])\n ave_num = float(nums[1])\n max_num = float(nums[2])\n std_num = nums[3].split(' ')\n std_num = float(std_num[0])\n except:\n print('Could not Ping Website...')\n min_num = float('nan')\n ave_num = float('nan')\n max_num = float('nan')\n std_num = float('nan')\n return min_num, ave_num, max_num, std_num\n\n\ndef get_vals_windows(msg):\n rhs = msg.split('=')\n try:\n nums = rhs[1].split('ms')\n min_num = float(nums[0])\n nums = rhs[2].split('ms')\n ave_num = float(nums[0])\n nums = rhs[3].split('ms')\n max_num = float(nums[0])\n std_num = float('nan')\n except:\n print('Could not Ping Website...')\n min_num = float('nan')\n ave_num = float('nan')\n max_num = float('nan')\n std_num = float('nan')\n return min_num, ave_num, max_num, std_num\n\n\ndef get_date_and_time():\n return datetime.datetime.now()\n",
"step-4": "import subprocess\nimport datetime\n\n\ndef ping_address(host, n):\n ping = subprocess.Popen(['ping', '-c', str(n), host], stdout=subprocess\n .PIPE, stderr=subprocess.PIPE)\n out, error = ping.communicate()\n return out, error\n\n\ndef ping_address_windows(host, n):\n ping = subprocess.Popen(['ping', '-n', str(n), host], stdout=subprocess\n .PIPE, stderr=subprocess.PIPE)\n out, error = ping.communicate()\n return out, error\n\n\ndef parse_msg(msg):\n line_org = msg.split('\\n')\n N = len(line_org) - 2\n line = line_org[N]\n return line\n\n\ndef get_vals(msg):\n rhs = msg.split('=')\n try:\n nums = rhs[1].split('/')\n min_num = float(nums[0])\n ave_num = float(nums[1])\n max_num = float(nums[2])\n std_num = nums[3].split(' ')\n std_num = float(std_num[0])\n except:\n print('Could not Ping Website...')\n min_num = float('nan')\n ave_num = float('nan')\n max_num = float('nan')\n std_num = float('nan')\n return min_num, ave_num, max_num, std_num\n\n\ndef get_vals_windows(msg):\n rhs = msg.split('=')\n try:\n nums = rhs[1].split('ms')\n min_num = float(nums[0])\n nums = rhs[2].split('ms')\n ave_num = float(nums[0])\n nums = rhs[3].split('ms')\n max_num = float(nums[0])\n std_num = float('nan')\n except:\n print('Could not Ping Website...')\n min_num = float('nan')\n ave_num = float('nan')\n max_num = float('nan')\n std_num = float('nan')\n return min_num, ave_num, max_num, std_num\n\n\ndef get_date_and_time():\n return datetime.datetime.now()\n",
"step-5": "import subprocess\nimport datetime\n\ndef ping_address(host,n):\n ping = subprocess.Popen(\n [\"ping\",\"-c\",str(n),host],\n stdout = subprocess.PIPE,\n stderr = subprocess.PIPE)\n out,error = ping.communicate()\n return out, error\n\ndef ping_address_windows(host,n):\n ping = subprocess.Popen(\n [\"ping\",\"-n\",str(n),host], # Need -c for linux\n stdout = subprocess.PIPE,\n stderr = subprocess.PIPE)\n out,error = ping.communicate()\n return out, error\n\ndef parse_msg(msg):\n line_org = msg.split('\\n')\n N = len(line_org)-2\n line = line_org[N]\n return line\n\ndef get_vals(msg):\n rhs = msg.split('=')\n try:\n nums = rhs[1].split('/')\n min_num = float(nums[0])\n ave_num = float(nums[1])\n max_num = float(nums[2])\n std_num = nums[3].split(' ')\n std_num = float(std_num[0])\n except:\n print(\"Could not Ping Website...\")\n min_num = float('nan')\n ave_num = float('nan')\n max_num = float('nan')\n std_num = float('nan')\n return min_num, ave_num, max_num, std_num\n\ndef get_vals_windows(msg):\n rhs = msg.split('=')\n try:\n nums = rhs[1].split('ms')\n min_num = float(nums[0])\n nums = rhs[2].split('ms')\n ave_num = float(nums[0])\n nums = rhs[3].split('ms')\n max_num = float(nums[0])\n std_num = float('nan')\n except:\n print(\"Could not Ping Website...\")\n min_num = float('nan')\n ave_num = float('nan')\n max_num = float('nan')\n std_num = float('nan')\n return min_num, ave_num, max_num, std_num\n\ndef get_date_and_time():\n return datetime.datetime.now()\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def integrate_sine(f, a, b, n=2):
I_t = trapezoidal(f, a, b, n)
I_m = midpoint()
return None
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def integrate_sine(f, a, b, n=2):
I_t = trapezoidal(f, a, b, n)
I_m = midpoint()
return None
a = 0.0
b = pi
f = lambda x: sin(x)
<|reserved_special_token_1|>
from trapezoidal import trapezoidal
from midpoint import midpoint
from math import pi, sin
def integrate_sine(f, a, b, n=2):
I_t = trapezoidal(f, a, b, n)
I_m = midpoint()
return None
a = 0.0
b = pi
f = lambda x: sin(x)
<|reserved_special_token_1|>
from trapezoidal import trapezoidal
from midpoint import midpoint
from math import pi, sin
def integrate_sine(f, a, b, n = 2):
I_t = trapezoidal(f, a, b, n)
I_m = midpoint()
return None
a = 0.0; b = pi
f = lambda x: sin(x)
|
flexible
|
{
"blob_id": "d99278c8f539322fd83ae5459c3121effc044b88",
"index": 5193,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef integrate_sine(f, a, b, n=2):\n I_t = trapezoidal(f, a, b, n)\n I_m = midpoint()\n return None\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef integrate_sine(f, a, b, n=2):\n I_t = trapezoidal(f, a, b, n)\n I_m = midpoint()\n return None\n\n\na = 0.0\nb = pi\nf = lambda x: sin(x)\n",
"step-4": "from trapezoidal import trapezoidal\nfrom midpoint import midpoint\nfrom math import pi, sin\n\n\ndef integrate_sine(f, a, b, n=2):\n I_t = trapezoidal(f, a, b, n)\n I_m = midpoint()\n return None\n\n\na = 0.0\nb = pi\nf = lambda x: sin(x)\n",
"step-5": "from trapezoidal import trapezoidal\r\nfrom midpoint import midpoint\r\nfrom math import pi, sin\r\n\r\ndef integrate_sine(f, a, b, n = 2):\r\n\tI_t = trapezoidal(f, a, b, n)\r\n\tI_m = midpoint()\r\n\treturn None\r\n\r\na = 0.0; b = pi\r\nf = lambda x: sin(x)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def plot3D(xValues, labels, figure=0):
minClass = min(labels)
numberOfClasses = int(max(labels) - minClass)
fig = plt.figure(figure)
ax = plt.axes(projection='3d')
colors = ['r', 'b', 'y', 'c', 'm']
for i in range(numberOfClasses + 1):
classLocation = np.argwhere(labels == i + minClass)
ax.scatter3D(xValues[classLocation, 0], xValues[classLocation, 1],
xValues[classLocation, 2])
<|reserved_special_token_1|>
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import numpy as np
def plot3D(xValues, labels, figure=0):
minClass = min(labels)
numberOfClasses = int(max(labels) - minClass)
fig = plt.figure(figure)
ax = plt.axes(projection='3d')
colors = ['r', 'b', 'y', 'c', 'm']
for i in range(numberOfClasses + 1):
classLocation = np.argwhere(labels == i + minClass)
ax.scatter3D(xValues[classLocation, 0], xValues[classLocation, 1],
xValues[classLocation, 2])
<|reserved_special_token_1|>
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import numpy as np
def plot3D(xValues, labels, figure = 0):
minClass = min(labels)
numberOfClasses = int(max(labels) - minClass)
fig = plt.figure(figure)
ax = plt.axes(projection='3d')
colors = ["r", "b", "y", "c", "m"]
for i in range(numberOfClasses+1):
classLocation = np.argwhere(labels == i+minClass)
ax.scatter3D(xValues[classLocation, 0], xValues[classLocation, 1], xValues[classLocation, 2]) #3D
|
flexible
|
{
"blob_id": "8dfd92ab0ce0e71b41ce94bd8fcf057c8995a2a4",
"index": 1668,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef plot3D(xValues, labels, figure=0):\n minClass = min(labels)\n numberOfClasses = int(max(labels) - minClass)\n fig = plt.figure(figure)\n ax = plt.axes(projection='3d')\n colors = ['r', 'b', 'y', 'c', 'm']\n for i in range(numberOfClasses + 1):\n classLocation = np.argwhere(labels == i + minClass)\n ax.scatter3D(xValues[classLocation, 0], xValues[classLocation, 1],\n xValues[classLocation, 2])\n",
"step-3": "import matplotlib.pyplot as plt\nfrom mpl_toolkits.mplot3d import Axes3D\nimport numpy as np\n\n\ndef plot3D(xValues, labels, figure=0):\n minClass = min(labels)\n numberOfClasses = int(max(labels) - minClass)\n fig = plt.figure(figure)\n ax = plt.axes(projection='3d')\n colors = ['r', 'b', 'y', 'c', 'm']\n for i in range(numberOfClasses + 1):\n classLocation = np.argwhere(labels == i + minClass)\n ax.scatter3D(xValues[classLocation, 0], xValues[classLocation, 1],\n xValues[classLocation, 2])\n",
"step-4": "import matplotlib.pyplot as plt\nfrom mpl_toolkits.mplot3d import Axes3D\nimport numpy as np\n\ndef plot3D(xValues, labels, figure = 0):\n minClass = min(labels)\n numberOfClasses = int(max(labels) - minClass)\n\n fig = plt.figure(figure)\n ax = plt.axes(projection='3d')\n colors = [\"r\", \"b\", \"y\", \"c\", \"m\"]\n for i in range(numberOfClasses+1):\n classLocation = np.argwhere(labels == i+minClass)\n ax.scatter3D(xValues[classLocation, 0], xValues[classLocation, 1], xValues[classLocation, 2]) #3D\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#loadconc.py - possibly these classes will be added to ajustador/loader.py when ready
# -*- coding:utf-8 -*-
from __future__ import print_function, division
import numpy as np
from ajustador import xml,nrd_fitness
import glob
import os
import operator
msec_per_sec=1000
nM_per_uM=1000
nM_per_mM=1e6
class trace(object):
def __init__(self, molname, x, y,stim_time):
molname_parts=molname.split()
self.molname=molname_parts[0]
self.norm=False
if len(molname_parts)>1:
self.units=molname_parts[1]
if '%' in self.units:
self.norm=True
if len(molname_parts)>2:
#strip out any trailing non-numeric characteris
self.scale=int(''.join([c for c in molname_parts[2] if c.isdigit()]))
else:
self.scale=1
else:
self.units='nM'
self.scale=1
if self.units.startswith('m') or self.units.startswith('(m'):
yvalue=y*nM_per_mM
elif self.units.startswith('u') or self.units.startswith('(u'):
yvalue=y*nM_per_uM
else:
#assume nM (or percent if fret)
yvalue=y
self.wave=np.rec.fromarrays((x, yvalue), names='x,y')
#calculate features: baseline, peaktime, peak value
start_index,basal=nrd_fitness.basal(x,yvalue,stim_time)
self.exp_basal=basal
pt,peak=nrd_fitness.peak(x,yvalue,start_index)
self.features={'basal':basal, 'stim_pt': start_index,'peaktime':pt,'peakval': peak}
class CSV_conc(object):
"""Load a series of concentration measurements from a CSV file
Each CSV file contains data for one or more molecules:
Time time_units, mol_name1 (nM), [mol_name2]
read time_units (sec,msec,min allowed) and convert to msec
"""
def __init__(self, fname,rootname,stim_time,features=[]):
import pandas as pd
model_num=xml.modelname_to_param(fname,rootname)
self.name=os.path.basename(fname)[0:os.path.basename(fname).rfind('.')]
self.injection=model_num
self.features=features
csv = pd.read_csv(fname, index_col=0)
x_head=csv.index.name.split()
if len(x_head)>1:
time_units=x_head[-1]
if time_units.startswith('sec') or time_units.startswith('(sec'):
time_factor=msec_per_sec
elif time_units.startswith('min') or time_units.startswith('(min'):
time_factor=msec_per_sec*60 #sec_per_min
else:
time_factor=1
print('x column header: {}, time_units: {}, conversion factor: {}'.format(x_head,time_units,time_factor))
else:
time_factor=1
x = csv.index.values*time_factor #time values
#may want to read units of y value, e.g. allow uM or mM and convert to nM
self.waves = {col.split()[0]:trace(col, x, csv[col].values,stim_time) for col in csv.columns}
class CSV_conc_set(object):
#set of files, each one a CSV_conc object, differing in stim protocol
def __init__(self,rootname,stim_time=0,features=[]):
self.stim_time=stim_time*msec_per_sec
self.features=features
if os.path.isdir(rootname): #if directory, look for all csv files
dirname = rootname
filenames=glob.glob(rootname+'/*.csv')
self.name=rootname
else:
if rootname.endswith('.csv'):
#case with single filename specified
filenames=[rootname]
else:
#case with a set of filenames specified, with common "prefix" + variable "suffix"
filenames=glob.glob(rootname+'*.csv')
dirname = os.path.dirname(rootname)
self.name=os.path.basename(rootname)
print('CSV_conc_set:',self.name, 'dir',dirname,'files',filenames,'stim_start (ms)', self.stim_time)
if len(filenames)==0:
print('**************** CSV_conc_set: NO FILES FOUND **************************')
csv_list=[CSV_conc(fn,rootname,self.stim_time,features) for fn in filenames]
csv_list.sort(key=operator.attrgetter('injection'))
self.data=csv_list
|
normal
|
{
"blob_id": "20649decd3ff21b1aa814d0a04180195cac3629b",
"index": 498,
"step-1": "<mask token>\n\n\nclass CSV_conc(object):\n <mask token>\n <mask token>\n\n\nclass CSV_conc_set(object):\n\n def __init__(self, rootname, stim_time=0, features=[]):\n self.stim_time = stim_time * msec_per_sec\n self.features = features\n if os.path.isdir(rootname):\n dirname = rootname\n filenames = glob.glob(rootname + '/*.csv')\n self.name = rootname\n else:\n if rootname.endswith('.csv'):\n filenames = [rootname]\n else:\n filenames = glob.glob(rootname + '*.csv')\n dirname = os.path.dirname(rootname)\n self.name = os.path.basename(rootname)\n print('CSV_conc_set:', self.name, 'dir', dirname, 'files',\n filenames, 'stim_start (ms)', self.stim_time)\n if len(filenames) == 0:\n print(\n '**************** CSV_conc_set: NO FILES FOUND **************************'\n )\n csv_list = [CSV_conc(fn, rootname, self.stim_time, features) for fn in\n filenames]\n csv_list.sort(key=operator.attrgetter('injection'))\n self.data = csv_list\n",
"step-2": "<mask token>\n\n\nclass trace(object):\n\n def __init__(self, molname, x, y, stim_time):\n molname_parts = molname.split()\n self.molname = molname_parts[0]\n self.norm = False\n if len(molname_parts) > 1:\n self.units = molname_parts[1]\n if '%' in self.units:\n self.norm = True\n if len(molname_parts) > 2:\n self.scale = int(''.join([c for c in molname_parts[2] if c.\n isdigit()]))\n else:\n self.scale = 1\n else:\n self.units = 'nM'\n self.scale = 1\n if self.units.startswith('m') or self.units.startswith('(m'):\n yvalue = y * nM_per_mM\n elif self.units.startswith('u') or self.units.startswith('(u'):\n yvalue = y * nM_per_uM\n else:\n yvalue = y\n self.wave = np.rec.fromarrays((x, yvalue), names='x,y')\n start_index, basal = nrd_fitness.basal(x, yvalue, stim_time)\n self.exp_basal = basal\n pt, peak = nrd_fitness.peak(x, yvalue, start_index)\n self.features = {'basal': basal, 'stim_pt': start_index, 'peaktime':\n pt, 'peakval': peak}\n\n\nclass CSV_conc(object):\n \"\"\"Load a series of concentration measurements from a CSV file\n Each CSV file contains data for one or more molecules:\n Time time_units, mol_name1 (nM), [mol_name2]\n read time_units (sec,msec,min allowed) and convert to msec\n \"\"\"\n\n def __init__(self, fname, rootname, stim_time, features=[]):\n import pandas as pd\n model_num = xml.modelname_to_param(fname, rootname)\n self.name = os.path.basename(fname)[0:os.path.basename(fname).rfind\n ('.')]\n self.injection = model_num\n self.features = features\n csv = pd.read_csv(fname, index_col=0)\n x_head = csv.index.name.split()\n if len(x_head) > 1:\n time_units = x_head[-1]\n if time_units.startswith('sec') or time_units.startswith('(sec'):\n time_factor = msec_per_sec\n elif time_units.startswith('min') or time_units.startswith('(min'):\n time_factor = msec_per_sec * 60\n else:\n time_factor = 1\n print('x column header: {}, time_units: {}, conversion factor: {}'\n .format(x_head, time_units, time_factor))\n else:\n time_factor = 1\n x = csv.index.values * time_factor\n self.waves = {col.split()[0]: trace(col, x, csv[col].values,\n stim_time) for col in csv.columns}\n\n\nclass CSV_conc_set(object):\n\n def __init__(self, rootname, stim_time=0, features=[]):\n self.stim_time = stim_time * msec_per_sec\n self.features = features\n if os.path.isdir(rootname):\n dirname = rootname\n filenames = glob.glob(rootname + '/*.csv')\n self.name = rootname\n else:\n if rootname.endswith('.csv'):\n filenames = [rootname]\n else:\n filenames = glob.glob(rootname + '*.csv')\n dirname = os.path.dirname(rootname)\n self.name = os.path.basename(rootname)\n print('CSV_conc_set:', self.name, 'dir', dirname, 'files',\n filenames, 'stim_start (ms)', self.stim_time)\n if len(filenames) == 0:\n print(\n '**************** CSV_conc_set: NO FILES FOUND **************************'\n )\n csv_list = [CSV_conc(fn, rootname, self.stim_time, features) for fn in\n filenames]\n csv_list.sort(key=operator.attrgetter('injection'))\n self.data = csv_list\n",
"step-3": "<mask token>\nmsec_per_sec = 1000\nnM_per_uM = 1000\nnM_per_mM = 1000000.0\n\n\nclass trace(object):\n\n def __init__(self, molname, x, y, stim_time):\n molname_parts = molname.split()\n self.molname = molname_parts[0]\n self.norm = False\n if len(molname_parts) > 1:\n self.units = molname_parts[1]\n if '%' in self.units:\n self.norm = True\n if len(molname_parts) > 2:\n self.scale = int(''.join([c for c in molname_parts[2] if c.\n isdigit()]))\n else:\n self.scale = 1\n else:\n self.units = 'nM'\n self.scale = 1\n if self.units.startswith('m') or self.units.startswith('(m'):\n yvalue = y * nM_per_mM\n elif self.units.startswith('u') or self.units.startswith('(u'):\n yvalue = y * nM_per_uM\n else:\n yvalue = y\n self.wave = np.rec.fromarrays((x, yvalue), names='x,y')\n start_index, basal = nrd_fitness.basal(x, yvalue, stim_time)\n self.exp_basal = basal\n pt, peak = nrd_fitness.peak(x, yvalue, start_index)\n self.features = {'basal': basal, 'stim_pt': start_index, 'peaktime':\n pt, 'peakval': peak}\n\n\nclass CSV_conc(object):\n \"\"\"Load a series of concentration measurements from a CSV file\n Each CSV file contains data for one or more molecules:\n Time time_units, mol_name1 (nM), [mol_name2]\n read time_units (sec,msec,min allowed) and convert to msec\n \"\"\"\n\n def __init__(self, fname, rootname, stim_time, features=[]):\n import pandas as pd\n model_num = xml.modelname_to_param(fname, rootname)\n self.name = os.path.basename(fname)[0:os.path.basename(fname).rfind\n ('.')]\n self.injection = model_num\n self.features = features\n csv = pd.read_csv(fname, index_col=0)\n x_head = csv.index.name.split()\n if len(x_head) > 1:\n time_units = x_head[-1]\n if time_units.startswith('sec') or time_units.startswith('(sec'):\n time_factor = msec_per_sec\n elif time_units.startswith('min') or time_units.startswith('(min'):\n time_factor = msec_per_sec * 60\n else:\n time_factor = 1\n print('x column header: {}, time_units: {}, conversion factor: {}'\n .format(x_head, time_units, time_factor))\n else:\n time_factor = 1\n x = csv.index.values * time_factor\n self.waves = {col.split()[0]: trace(col, x, csv[col].values,\n stim_time) for col in csv.columns}\n\n\nclass CSV_conc_set(object):\n\n def __init__(self, rootname, stim_time=0, features=[]):\n self.stim_time = stim_time * msec_per_sec\n self.features = features\n if os.path.isdir(rootname):\n dirname = rootname\n filenames = glob.glob(rootname + '/*.csv')\n self.name = rootname\n else:\n if rootname.endswith('.csv'):\n filenames = [rootname]\n else:\n filenames = glob.glob(rootname + '*.csv')\n dirname = os.path.dirname(rootname)\n self.name = os.path.basename(rootname)\n print('CSV_conc_set:', self.name, 'dir', dirname, 'files',\n filenames, 'stim_start (ms)', self.stim_time)\n if len(filenames) == 0:\n print(\n '**************** CSV_conc_set: NO FILES FOUND **************************'\n )\n csv_list = [CSV_conc(fn, rootname, self.stim_time, features) for fn in\n filenames]\n csv_list.sort(key=operator.attrgetter('injection'))\n self.data = csv_list\n",
"step-4": "from __future__ import print_function, division\nimport numpy as np\nfrom ajustador import xml, nrd_fitness\nimport glob\nimport os\nimport operator\nmsec_per_sec = 1000\nnM_per_uM = 1000\nnM_per_mM = 1000000.0\n\n\nclass trace(object):\n\n def __init__(self, molname, x, y, stim_time):\n molname_parts = molname.split()\n self.molname = molname_parts[0]\n self.norm = False\n if len(molname_parts) > 1:\n self.units = molname_parts[1]\n if '%' in self.units:\n self.norm = True\n if len(molname_parts) > 2:\n self.scale = int(''.join([c for c in molname_parts[2] if c.\n isdigit()]))\n else:\n self.scale = 1\n else:\n self.units = 'nM'\n self.scale = 1\n if self.units.startswith('m') or self.units.startswith('(m'):\n yvalue = y * nM_per_mM\n elif self.units.startswith('u') or self.units.startswith('(u'):\n yvalue = y * nM_per_uM\n else:\n yvalue = y\n self.wave = np.rec.fromarrays((x, yvalue), names='x,y')\n start_index, basal = nrd_fitness.basal(x, yvalue, stim_time)\n self.exp_basal = basal\n pt, peak = nrd_fitness.peak(x, yvalue, start_index)\n self.features = {'basal': basal, 'stim_pt': start_index, 'peaktime':\n pt, 'peakval': peak}\n\n\nclass CSV_conc(object):\n \"\"\"Load a series of concentration measurements from a CSV file\n Each CSV file contains data for one or more molecules:\n Time time_units, mol_name1 (nM), [mol_name2]\n read time_units (sec,msec,min allowed) and convert to msec\n \"\"\"\n\n def __init__(self, fname, rootname, stim_time, features=[]):\n import pandas as pd\n model_num = xml.modelname_to_param(fname, rootname)\n self.name = os.path.basename(fname)[0:os.path.basename(fname).rfind\n ('.')]\n self.injection = model_num\n self.features = features\n csv = pd.read_csv(fname, index_col=0)\n x_head = csv.index.name.split()\n if len(x_head) > 1:\n time_units = x_head[-1]\n if time_units.startswith('sec') or time_units.startswith('(sec'):\n time_factor = msec_per_sec\n elif time_units.startswith('min') or time_units.startswith('(min'):\n time_factor = msec_per_sec * 60\n else:\n time_factor = 1\n print('x column header: {}, time_units: {}, conversion factor: {}'\n .format(x_head, time_units, time_factor))\n else:\n time_factor = 1\n x = csv.index.values * time_factor\n self.waves = {col.split()[0]: trace(col, x, csv[col].values,\n stim_time) for col in csv.columns}\n\n\nclass CSV_conc_set(object):\n\n def __init__(self, rootname, stim_time=0, features=[]):\n self.stim_time = stim_time * msec_per_sec\n self.features = features\n if os.path.isdir(rootname):\n dirname = rootname\n filenames = glob.glob(rootname + '/*.csv')\n self.name = rootname\n else:\n if rootname.endswith('.csv'):\n filenames = [rootname]\n else:\n filenames = glob.glob(rootname + '*.csv')\n dirname = os.path.dirname(rootname)\n self.name = os.path.basename(rootname)\n print('CSV_conc_set:', self.name, 'dir', dirname, 'files',\n filenames, 'stim_start (ms)', self.stim_time)\n if len(filenames) == 0:\n print(\n '**************** CSV_conc_set: NO FILES FOUND **************************'\n )\n csv_list = [CSV_conc(fn, rootname, self.stim_time, features) for fn in\n filenames]\n csv_list.sort(key=operator.attrgetter('injection'))\n self.data = csv_list\n",
"step-5": "#loadconc.py - possibly these classes will be added to ajustador/loader.py when ready\n# -*- coding:utf-8 -*-\n\nfrom __future__ import print_function, division\nimport numpy as np\nfrom ajustador import xml,nrd_fitness\nimport glob \nimport os\nimport operator\n\nmsec_per_sec=1000\nnM_per_uM=1000\nnM_per_mM=1e6\n\nclass trace(object):\n def __init__(self, molname, x, y,stim_time):\n molname_parts=molname.split()\n self.molname=molname_parts[0]\n self.norm=False\n if len(molname_parts)>1:\n self.units=molname_parts[1]\n if '%' in self.units:\n self.norm=True\n if len(molname_parts)>2:\n #strip out any trailing non-numeric characteris\n self.scale=int(''.join([c for c in molname_parts[2] if c.isdigit()]))\n else:\n self.scale=1\n else:\n self.units='nM'\n self.scale=1\n if self.units.startswith('m') or self.units.startswith('(m'):\n yvalue=y*nM_per_mM\n elif self.units.startswith('u') or self.units.startswith('(u'):\n yvalue=y*nM_per_uM\n else:\n #assume nM (or percent if fret)\n yvalue=y\n self.wave=np.rec.fromarrays((x, yvalue), names='x,y')\n #calculate features: baseline, peaktime, peak value\n start_index,basal=nrd_fitness.basal(x,yvalue,stim_time)\n self.exp_basal=basal\n pt,peak=nrd_fitness.peak(x,yvalue,start_index)\n self.features={'basal':basal, 'stim_pt': start_index,'peaktime':pt,'peakval': peak}\n\nclass CSV_conc(object):\n \"\"\"Load a series of concentration measurements from a CSV file\n Each CSV file contains data for one or more molecules:\n Time time_units, mol_name1 (nM), [mol_name2]\n read time_units (sec,msec,min allowed) and convert to msec\n \"\"\"\n def __init__(self, fname,rootname,stim_time,features=[]):\n import pandas as pd\n model_num=xml.modelname_to_param(fname,rootname)\n self.name=os.path.basename(fname)[0:os.path.basename(fname).rfind('.')]\n self.injection=model_num\n self.features=features\n \n csv = pd.read_csv(fname, index_col=0)\n x_head=csv.index.name.split()\n if len(x_head)>1:\n time_units=x_head[-1]\n if time_units.startswith('sec') or time_units.startswith('(sec'):\n time_factor=msec_per_sec\n elif time_units.startswith('min') or time_units.startswith('(min'):\n time_factor=msec_per_sec*60 #sec_per_min\n else:\n time_factor=1\n print('x column header: {}, time_units: {}, conversion factor: {}'.format(x_head,time_units,time_factor))\n else:\n time_factor=1\n x = csv.index.values*time_factor #time values\n #may want to read units of y value, e.g. allow uM or mM and convert to nM\n self.waves = {col.split()[0]:trace(col, x, csv[col].values,stim_time) for col in csv.columns}\n\nclass CSV_conc_set(object):\n #set of files, each one a CSV_conc object, differing in stim protocol\n def __init__(self,rootname,stim_time=0,features=[]):\n self.stim_time=stim_time*msec_per_sec\n self.features=features\n if os.path.isdir(rootname): #if directory, look for all csv files\n dirname = rootname\n filenames=glob.glob(rootname+'/*.csv')\n self.name=rootname\n else:\n if rootname.endswith('.csv'):\n #case with single filename specified\n filenames=[rootname]\n else:\n #case with a set of filenames specified, with common \"prefix\" + variable \"suffix\"\n filenames=glob.glob(rootname+'*.csv')\n dirname = os.path.dirname(rootname)\n self.name=os.path.basename(rootname)\n print('CSV_conc_set:',self.name, 'dir',dirname,'files',filenames,'stim_start (ms)', self.stim_time)\n if len(filenames)==0:\n print('**************** CSV_conc_set: NO FILES FOUND **************************')\n \n csv_list=[CSV_conc(fn,rootname,self.stim_time,features) for fn in filenames]\n csv_list.sort(key=operator.attrgetter('injection'))\n self.data=csv_list\n\n\n",
"step-ids": [
3,
7,
8,
9,
10
]
}
|
[
3,
7,
8,
9,
10
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def execute():
for ps in frappe.get_all('Property Setter', filters={'property': '_idx'
}, fields=['doc_type', 'value']):
custom_fields = frappe.get_all('Custom Field', filters={'dt': ps.
doc_type}, fields=['name', 'fieldname'])
if custom_fields:
_idx = json.loads(ps.value)
for custom_field in custom_fields:
if custom_field.fieldname in _idx:
custom_field_idx = _idx.index(custom_field.fieldname)
if custom_field_idx == 0:
prev_fieldname = ''
else:
prev_fieldname = _idx[custom_field_idx - 1]
else:
prev_fieldname = _idx[-1]
custom_field_idx = len(_idx)
frappe.db.set_value('Custom Field', custom_field.name,
'insert_after', prev_fieldname)
frappe.db.set_value('Custom Field', custom_field.name,
'idx', custom_field_idx)
<|reserved_special_token_1|>
from __future__ import unicode_literals
import frappe, json
def execute():
for ps in frappe.get_all('Property Setter', filters={'property': '_idx'
}, fields=['doc_type', 'value']):
custom_fields = frappe.get_all('Custom Field', filters={'dt': ps.
doc_type}, fields=['name', 'fieldname'])
if custom_fields:
_idx = json.loads(ps.value)
for custom_field in custom_fields:
if custom_field.fieldname in _idx:
custom_field_idx = _idx.index(custom_field.fieldname)
if custom_field_idx == 0:
prev_fieldname = ''
else:
prev_fieldname = _idx[custom_field_idx - 1]
else:
prev_fieldname = _idx[-1]
custom_field_idx = len(_idx)
frappe.db.set_value('Custom Field', custom_field.name,
'insert_after', prev_fieldname)
frappe.db.set_value('Custom Field', custom_field.name,
'idx', custom_field_idx)
<|reserved_special_token_1|>
from __future__ import unicode_literals
import frappe, json
def execute():
for ps in frappe.get_all('Property Setter', filters={'property': '_idx'},
fields = ['doc_type', 'value']):
custom_fields = frappe.get_all('Custom Field',
filters = {'dt': ps.doc_type}, fields=['name', 'fieldname'])
if custom_fields:
_idx = json.loads(ps.value)
for custom_field in custom_fields:
if custom_field.fieldname in _idx:
custom_field_idx = _idx.index(custom_field.fieldname)
if custom_field_idx == 0:
prev_fieldname = ""
else:
prev_fieldname = _idx[custom_field_idx - 1]
else:
prev_fieldname = _idx[-1]
custom_field_idx = len(_idx)
frappe.db.set_value('Custom Field', custom_field.name, 'insert_after', prev_fieldname)
frappe.db.set_value('Custom Field', custom_field.name, 'idx', custom_field_idx)
|
flexible
|
{
"blob_id": "6f951815d0edafb08e7734d0e95e6564ab1be1f7",
"index": 2375,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef execute():\n for ps in frappe.get_all('Property Setter', filters={'property': '_idx'\n }, fields=['doc_type', 'value']):\n custom_fields = frappe.get_all('Custom Field', filters={'dt': ps.\n doc_type}, fields=['name', 'fieldname'])\n if custom_fields:\n _idx = json.loads(ps.value)\n for custom_field in custom_fields:\n if custom_field.fieldname in _idx:\n custom_field_idx = _idx.index(custom_field.fieldname)\n if custom_field_idx == 0:\n prev_fieldname = ''\n else:\n prev_fieldname = _idx[custom_field_idx - 1]\n else:\n prev_fieldname = _idx[-1]\n custom_field_idx = len(_idx)\n frappe.db.set_value('Custom Field', custom_field.name,\n 'insert_after', prev_fieldname)\n frappe.db.set_value('Custom Field', custom_field.name,\n 'idx', custom_field_idx)\n",
"step-3": "from __future__ import unicode_literals\nimport frappe, json\n\n\ndef execute():\n for ps in frappe.get_all('Property Setter', filters={'property': '_idx'\n }, fields=['doc_type', 'value']):\n custom_fields = frappe.get_all('Custom Field', filters={'dt': ps.\n doc_type}, fields=['name', 'fieldname'])\n if custom_fields:\n _idx = json.loads(ps.value)\n for custom_field in custom_fields:\n if custom_field.fieldname in _idx:\n custom_field_idx = _idx.index(custom_field.fieldname)\n if custom_field_idx == 0:\n prev_fieldname = ''\n else:\n prev_fieldname = _idx[custom_field_idx - 1]\n else:\n prev_fieldname = _idx[-1]\n custom_field_idx = len(_idx)\n frappe.db.set_value('Custom Field', custom_field.name,\n 'insert_after', prev_fieldname)\n frappe.db.set_value('Custom Field', custom_field.name,\n 'idx', custom_field_idx)\n",
"step-4": "from __future__ import unicode_literals\nimport frappe, json\n\ndef execute():\n\tfor ps in frappe.get_all('Property Setter', filters={'property': '_idx'},\n\t\tfields = ['doc_type', 'value']):\n\t\tcustom_fields = frappe.get_all('Custom Field',\n\t\t\tfilters = {'dt': ps.doc_type}, fields=['name', 'fieldname'])\n\n\t\tif custom_fields:\n\t\t\t_idx = json.loads(ps.value)\n\n\t\t\tfor custom_field in custom_fields:\n\t\t\t\tif custom_field.fieldname in _idx:\n\t\t\t\t\tcustom_field_idx = _idx.index(custom_field.fieldname)\n\t\t\t\t\tif custom_field_idx == 0:\n\t\t\t\t\t\tprev_fieldname = \"\"\n\n\t\t\t\t\telse:\n\t\t\t\t\t\tprev_fieldname = _idx[custom_field_idx - 1]\n\n\t\t\t\telse:\n\t\t\t\t\tprev_fieldname = _idx[-1]\n\t\t\t\t\tcustom_field_idx = len(_idx)\n\n\t\t\t\tfrappe.db.set_value('Custom Field', custom_field.name, 'insert_after', prev_fieldname)\n\t\t\t\tfrappe.db.set_value('Custom Field', custom_field.name, 'idx', custom_field_idx)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@app.route('/')
@app.route('/index')
def index():
return 'Hello world'
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@app.route('/')
@app.route('/index')
def index():
return 'Hello world'
@app.route('/api_post', methods=['POST'])
def postJsonHandler():
print(request.is_json)
content = request.get_json()
print(content)
return 'JSON posted'
<|reserved_special_token_1|>
from app import app
from flask import request
@app.route('/')
@app.route('/index')
def index():
return 'Hello world'
@app.route('/api_post', methods=['POST'])
def postJsonHandler():
print(request.is_json)
content = request.get_json()
print(content)
return 'JSON posted'
|
flexible
|
{
"blob_id": "9d8c4bf9f9279d5e30d0e9742cdd31713e5f4b9e",
"index": 2104,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\n@app.route('/')\n@app.route('/index')\ndef index():\n return 'Hello world'\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\n@app.route('/')\n@app.route('/index')\ndef index():\n return 'Hello world'\n\n\n@app.route('/api_post', methods=['POST'])\ndef postJsonHandler():\n print(request.is_json)\n content = request.get_json()\n print(content)\n return 'JSON posted'\n",
"step-4": "from app import app\nfrom flask import request\n\n\n@app.route('/')\n@app.route('/index')\ndef index():\n return 'Hello world'\n\n\n@app.route('/api_post', methods=['POST'])\ndef postJsonHandler():\n print(request.is_json)\n content = request.get_json()\n print(content)\n return 'JSON posted'\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class UppercaseBrandFeed(CSVMerchantFeed):
def get_brand(self, obj):
return obj.brand.upper()
class CSVMerchantFeedTest(TestCase):
def test_csv_empty(self):
feed = CSVMerchantFeed([])
output = feed.get_content()
self.assertEquals(output, CSV_HEADINGS)
def test_csv(self):
feed = CSVMerchantFeed([AttrNameFakeModel()])
output = feed.get_content()
self.assertEquals(output, CSV_HEADINGS * 2)
def test_csv_missing_attribute(self):
feed = CSVMerchantFeed([EmptyFakeModel()])
output = feed.get_content()
empty_data_row = ',' * (len(MERCHANT_FEED_COLUMNS) - 1) + '\r\n'
self.assertEquals(output, CSV_HEADINGS + empty_data_row)
def test_csv_with_get_method(self):
feed = UppercaseBrandFeed([AttrNameFakeModel()])
output = feed.get_content()
data_row = CSV_HEADINGS.replace('brand', 'BRAND')
self.assertEquals(output, CSV_HEADINGS + data_row)
class CSVFeedViewTest(TestCase):
def test_view_empty(self):
url = reverse('google_feed')
response = self.client.get(url)
self.assertEquals(response.content, CSV_HEADINGS)
def test_has_correct_headers(self):
url = reverse('google_feed')
response = self.client.get(url)
self.assertEqual(response['Content-Type'], 'text/csv')
self.assertEqual(response['Content-Disposition'],
'attachment; filename="google.csv"')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class AttrNameFakeModel(object):
<|reserved_special_token_0|>
class EmptyFakeModel(object):
def __getattr__(self, name):
raise AttributeError
class UppercaseBrandFeed(CSVMerchantFeed):
def get_brand(self, obj):
return obj.brand.upper()
class CSVMerchantFeedTest(TestCase):
def test_csv_empty(self):
feed = CSVMerchantFeed([])
output = feed.get_content()
self.assertEquals(output, CSV_HEADINGS)
def test_csv(self):
feed = CSVMerchantFeed([AttrNameFakeModel()])
output = feed.get_content()
self.assertEquals(output, CSV_HEADINGS * 2)
def test_csv_missing_attribute(self):
feed = CSVMerchantFeed([EmptyFakeModel()])
output = feed.get_content()
empty_data_row = ',' * (len(MERCHANT_FEED_COLUMNS) - 1) + '\r\n'
self.assertEquals(output, CSV_HEADINGS + empty_data_row)
def test_csv_with_get_method(self):
feed = UppercaseBrandFeed([AttrNameFakeModel()])
output = feed.get_content()
data_row = CSV_HEADINGS.replace('brand', 'BRAND')
self.assertEquals(output, CSV_HEADINGS + data_row)
class CSVFeedViewTest(TestCase):
def test_view_empty(self):
url = reverse('google_feed')
response = self.client.get(url)
self.assertEquals(response.content, CSV_HEADINGS)
def test_has_correct_headers(self):
url = reverse('google_feed')
response = self.client.get(url)
self.assertEqual(response['Content-Type'], 'text/csv')
self.assertEqual(response['Content-Disposition'],
'attachment; filename="google.csv"')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class AttrNameFakeModel(object):
def __getattr__(self, name):
return name
class EmptyFakeModel(object):
def __getattr__(self, name):
raise AttributeError
class UppercaseBrandFeed(CSVMerchantFeed):
def get_brand(self, obj):
return obj.brand.upper()
class CSVMerchantFeedTest(TestCase):
def test_csv_empty(self):
feed = CSVMerchantFeed([])
output = feed.get_content()
self.assertEquals(output, CSV_HEADINGS)
def test_csv(self):
feed = CSVMerchantFeed([AttrNameFakeModel()])
output = feed.get_content()
self.assertEquals(output, CSV_HEADINGS * 2)
def test_csv_missing_attribute(self):
feed = CSVMerchantFeed([EmptyFakeModel()])
output = feed.get_content()
empty_data_row = ',' * (len(MERCHANT_FEED_COLUMNS) - 1) + '\r\n'
self.assertEquals(output, CSV_HEADINGS + empty_data_row)
def test_csv_with_get_method(self):
feed = UppercaseBrandFeed([AttrNameFakeModel()])
output = feed.get_content()
data_row = CSV_HEADINGS.replace('brand', 'BRAND')
self.assertEquals(output, CSV_HEADINGS + data_row)
class CSVFeedViewTest(TestCase):
def test_view_empty(self):
url = reverse('google_feed')
response = self.client.get(url)
self.assertEquals(response.content, CSV_HEADINGS)
def test_has_correct_headers(self):
url = reverse('google_feed')
response = self.client.get(url)
self.assertEqual(response['Content-Type'], 'text/csv')
self.assertEqual(response['Content-Disposition'],
'attachment; filename="google.csv"')
<|reserved_special_token_1|>
from __future__ import unicode_literals
from django.test import TestCase
from django.core.urlresolvers import reverse
from google_product_feeder.feed import CSVMerchantFeed, MERCHANT_FEED_COLUMNS
CSV_HEADINGS = ','.join(MERCHANT_FEED_COLUMNS) + '\r\n'
class AttrNameFakeModel(object):
def __getattr__(self, name):
return name
class EmptyFakeModel(object):
def __getattr__(self, name):
raise AttributeError
class UppercaseBrandFeed(CSVMerchantFeed):
def get_brand(self, obj):
return obj.brand.upper()
class CSVMerchantFeedTest(TestCase):
def test_csv_empty(self):
feed = CSVMerchantFeed([])
output = feed.get_content()
self.assertEquals(output, CSV_HEADINGS)
def test_csv(self):
feed = CSVMerchantFeed([AttrNameFakeModel()])
output = feed.get_content()
self.assertEquals(output, CSV_HEADINGS * 2)
def test_csv_missing_attribute(self):
feed = CSVMerchantFeed([EmptyFakeModel()])
output = feed.get_content()
empty_data_row = ',' * (len(MERCHANT_FEED_COLUMNS) - 1) + '\r\n'
self.assertEquals(output, CSV_HEADINGS + empty_data_row)
def test_csv_with_get_method(self):
feed = UppercaseBrandFeed([AttrNameFakeModel()])
output = feed.get_content()
data_row = CSV_HEADINGS.replace('brand', 'BRAND')
self.assertEquals(output, CSV_HEADINGS + data_row)
class CSVFeedViewTest(TestCase):
def test_view_empty(self):
url = reverse('google_feed')
response = self.client.get(url)
self.assertEquals(response.content, CSV_HEADINGS)
def test_has_correct_headers(self):
url = reverse('google_feed')
response = self.client.get(url)
self.assertEqual(response['Content-Type'], 'text/csv')
self.assertEqual(response['Content-Disposition'],
'attachment; filename="google.csv"')
<|reserved_special_token_1|>
#! /usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TestCase
from django.core.urlresolvers import reverse
from google_product_feeder.feed import CSVMerchantFeed, MERCHANT_FEED_COLUMNS
CSV_HEADINGS = ','.join(MERCHANT_FEED_COLUMNS) + '\r\n'
class AttrNameFakeModel(object):
# A fake model that returns the attribute name upon attribute access.
def __getattr__(self, name):
return name
class EmptyFakeModel(object):
# A fake model with no attributes.
def __getattr__(self, name):
raise AttributeError
class UppercaseBrandFeed(CSVMerchantFeed):
def get_brand(self, obj):
return obj.brand.upper()
class CSVMerchantFeedTest(TestCase):
def test_csv_empty(self):
feed = CSVMerchantFeed([])
output = feed.get_content()
self.assertEquals(output, CSV_HEADINGS)
def test_csv(self):
feed = CSVMerchantFeed([AttrNameFakeModel()])
output = feed.get_content()
self.assertEquals(output, CSV_HEADINGS * 2)
def test_csv_missing_attribute(self):
feed = CSVMerchantFeed([EmptyFakeModel()])
output = feed.get_content()
empty_data_row = ',' * (len(MERCHANT_FEED_COLUMNS) - 1) + '\r\n'
self.assertEquals(output, CSV_HEADINGS + empty_data_row)
def test_csv_with_get_method(self):
feed = UppercaseBrandFeed([AttrNameFakeModel()])
output = feed.get_content()
data_row = CSV_HEADINGS.replace('brand', 'BRAND')
self.assertEquals(output, CSV_HEADINGS + data_row)
class CSVFeedViewTest(TestCase):
def test_view_empty(self):
url = reverse('google_feed')
response = self.client.get(url)
self.assertEquals(response.content, CSV_HEADINGS)
def test_has_correct_headers(self):
# content-type is 'text/csv', content-disposition is 'attachment',
# filename is 'google.csv'
url = reverse('google_feed')
response = self.client.get(url)
self.assertEqual(response['Content-Type'],
'text/csv')
self.assertEqual(response['Content-Disposition'],
'attachment; filename="google.csv"')
|
flexible
|
{
"blob_id": "924fd89a835528fa28e1226912a2e4be9c4e1d5d",
"index": 152,
"step-1": "<mask token>\n\n\nclass UppercaseBrandFeed(CSVMerchantFeed):\n\n def get_brand(self, obj):\n return obj.brand.upper()\n\n\nclass CSVMerchantFeedTest(TestCase):\n\n def test_csv_empty(self):\n feed = CSVMerchantFeed([])\n output = feed.get_content()\n self.assertEquals(output, CSV_HEADINGS)\n\n def test_csv(self):\n feed = CSVMerchantFeed([AttrNameFakeModel()])\n output = feed.get_content()\n self.assertEquals(output, CSV_HEADINGS * 2)\n\n def test_csv_missing_attribute(self):\n feed = CSVMerchantFeed([EmptyFakeModel()])\n output = feed.get_content()\n empty_data_row = ',' * (len(MERCHANT_FEED_COLUMNS) - 1) + '\\r\\n'\n self.assertEquals(output, CSV_HEADINGS + empty_data_row)\n\n def test_csv_with_get_method(self):\n feed = UppercaseBrandFeed([AttrNameFakeModel()])\n output = feed.get_content()\n data_row = CSV_HEADINGS.replace('brand', 'BRAND')\n self.assertEquals(output, CSV_HEADINGS + data_row)\n\n\nclass CSVFeedViewTest(TestCase):\n\n def test_view_empty(self):\n url = reverse('google_feed')\n response = self.client.get(url)\n self.assertEquals(response.content, CSV_HEADINGS)\n\n def test_has_correct_headers(self):\n url = reverse('google_feed')\n response = self.client.get(url)\n self.assertEqual(response['Content-Type'], 'text/csv')\n self.assertEqual(response['Content-Disposition'],\n 'attachment; filename=\"google.csv\"')\n",
"step-2": "<mask token>\n\n\nclass AttrNameFakeModel(object):\n <mask token>\n\n\nclass EmptyFakeModel(object):\n\n def __getattr__(self, name):\n raise AttributeError\n\n\nclass UppercaseBrandFeed(CSVMerchantFeed):\n\n def get_brand(self, obj):\n return obj.brand.upper()\n\n\nclass CSVMerchantFeedTest(TestCase):\n\n def test_csv_empty(self):\n feed = CSVMerchantFeed([])\n output = feed.get_content()\n self.assertEquals(output, CSV_HEADINGS)\n\n def test_csv(self):\n feed = CSVMerchantFeed([AttrNameFakeModel()])\n output = feed.get_content()\n self.assertEquals(output, CSV_HEADINGS * 2)\n\n def test_csv_missing_attribute(self):\n feed = CSVMerchantFeed([EmptyFakeModel()])\n output = feed.get_content()\n empty_data_row = ',' * (len(MERCHANT_FEED_COLUMNS) - 1) + '\\r\\n'\n self.assertEquals(output, CSV_HEADINGS + empty_data_row)\n\n def test_csv_with_get_method(self):\n feed = UppercaseBrandFeed([AttrNameFakeModel()])\n output = feed.get_content()\n data_row = CSV_HEADINGS.replace('brand', 'BRAND')\n self.assertEquals(output, CSV_HEADINGS + data_row)\n\n\nclass CSVFeedViewTest(TestCase):\n\n def test_view_empty(self):\n url = reverse('google_feed')\n response = self.client.get(url)\n self.assertEquals(response.content, CSV_HEADINGS)\n\n def test_has_correct_headers(self):\n url = reverse('google_feed')\n response = self.client.get(url)\n self.assertEqual(response['Content-Type'], 'text/csv')\n self.assertEqual(response['Content-Disposition'],\n 'attachment; filename=\"google.csv\"')\n",
"step-3": "<mask token>\n\n\nclass AttrNameFakeModel(object):\n\n def __getattr__(self, name):\n return name\n\n\nclass EmptyFakeModel(object):\n\n def __getattr__(self, name):\n raise AttributeError\n\n\nclass UppercaseBrandFeed(CSVMerchantFeed):\n\n def get_brand(self, obj):\n return obj.brand.upper()\n\n\nclass CSVMerchantFeedTest(TestCase):\n\n def test_csv_empty(self):\n feed = CSVMerchantFeed([])\n output = feed.get_content()\n self.assertEquals(output, CSV_HEADINGS)\n\n def test_csv(self):\n feed = CSVMerchantFeed([AttrNameFakeModel()])\n output = feed.get_content()\n self.assertEquals(output, CSV_HEADINGS * 2)\n\n def test_csv_missing_attribute(self):\n feed = CSVMerchantFeed([EmptyFakeModel()])\n output = feed.get_content()\n empty_data_row = ',' * (len(MERCHANT_FEED_COLUMNS) - 1) + '\\r\\n'\n self.assertEquals(output, CSV_HEADINGS + empty_data_row)\n\n def test_csv_with_get_method(self):\n feed = UppercaseBrandFeed([AttrNameFakeModel()])\n output = feed.get_content()\n data_row = CSV_HEADINGS.replace('brand', 'BRAND')\n self.assertEquals(output, CSV_HEADINGS + data_row)\n\n\nclass CSVFeedViewTest(TestCase):\n\n def test_view_empty(self):\n url = reverse('google_feed')\n response = self.client.get(url)\n self.assertEquals(response.content, CSV_HEADINGS)\n\n def test_has_correct_headers(self):\n url = reverse('google_feed')\n response = self.client.get(url)\n self.assertEqual(response['Content-Type'], 'text/csv')\n self.assertEqual(response['Content-Disposition'],\n 'attachment; filename=\"google.csv\"')\n",
"step-4": "from __future__ import unicode_literals\nfrom django.test import TestCase\nfrom django.core.urlresolvers import reverse\nfrom google_product_feeder.feed import CSVMerchantFeed, MERCHANT_FEED_COLUMNS\nCSV_HEADINGS = ','.join(MERCHANT_FEED_COLUMNS) + '\\r\\n'\n\n\nclass AttrNameFakeModel(object):\n\n def __getattr__(self, name):\n return name\n\n\nclass EmptyFakeModel(object):\n\n def __getattr__(self, name):\n raise AttributeError\n\n\nclass UppercaseBrandFeed(CSVMerchantFeed):\n\n def get_brand(self, obj):\n return obj.brand.upper()\n\n\nclass CSVMerchantFeedTest(TestCase):\n\n def test_csv_empty(self):\n feed = CSVMerchantFeed([])\n output = feed.get_content()\n self.assertEquals(output, CSV_HEADINGS)\n\n def test_csv(self):\n feed = CSVMerchantFeed([AttrNameFakeModel()])\n output = feed.get_content()\n self.assertEquals(output, CSV_HEADINGS * 2)\n\n def test_csv_missing_attribute(self):\n feed = CSVMerchantFeed([EmptyFakeModel()])\n output = feed.get_content()\n empty_data_row = ',' * (len(MERCHANT_FEED_COLUMNS) - 1) + '\\r\\n'\n self.assertEquals(output, CSV_HEADINGS + empty_data_row)\n\n def test_csv_with_get_method(self):\n feed = UppercaseBrandFeed([AttrNameFakeModel()])\n output = feed.get_content()\n data_row = CSV_HEADINGS.replace('brand', 'BRAND')\n self.assertEquals(output, CSV_HEADINGS + data_row)\n\n\nclass CSVFeedViewTest(TestCase):\n\n def test_view_empty(self):\n url = reverse('google_feed')\n response = self.client.get(url)\n self.assertEquals(response.content, CSV_HEADINGS)\n\n def test_has_correct_headers(self):\n url = reverse('google_feed')\n response = self.client.get(url)\n self.assertEqual(response['Content-Type'], 'text/csv')\n self.assertEqual(response['Content-Disposition'],\n 'attachment; filename=\"google.csv\"')\n",
"step-5": "#! /usr/bin/env python\n# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.test import TestCase\nfrom django.core.urlresolvers import reverse\n\nfrom google_product_feeder.feed import CSVMerchantFeed, MERCHANT_FEED_COLUMNS\n\n\nCSV_HEADINGS = ','.join(MERCHANT_FEED_COLUMNS) + '\\r\\n'\n\n\nclass AttrNameFakeModel(object):\n # A fake model that returns the attribute name upon attribute access.\n def __getattr__(self, name):\n return name\n\n\nclass EmptyFakeModel(object):\n # A fake model with no attributes.\n def __getattr__(self, name):\n raise AttributeError\n\n\nclass UppercaseBrandFeed(CSVMerchantFeed):\n def get_brand(self, obj):\n return obj.brand.upper()\n\n\nclass CSVMerchantFeedTest(TestCase):\n\n def test_csv_empty(self):\n feed = CSVMerchantFeed([])\n output = feed.get_content()\n self.assertEquals(output, CSV_HEADINGS)\n\n def test_csv(self):\n feed = CSVMerchantFeed([AttrNameFakeModel()])\n output = feed.get_content()\n self.assertEquals(output, CSV_HEADINGS * 2)\n\n def test_csv_missing_attribute(self):\n feed = CSVMerchantFeed([EmptyFakeModel()])\n output = feed.get_content()\n empty_data_row = ',' * (len(MERCHANT_FEED_COLUMNS) - 1) + '\\r\\n'\n self.assertEquals(output, CSV_HEADINGS + empty_data_row)\n\n def test_csv_with_get_method(self):\n feed = UppercaseBrandFeed([AttrNameFakeModel()])\n output = feed.get_content()\n data_row = CSV_HEADINGS.replace('brand', 'BRAND')\n self.assertEquals(output, CSV_HEADINGS + data_row)\n\n\nclass CSVFeedViewTest(TestCase):\n\n def test_view_empty(self):\n url = reverse('google_feed')\n response = self.client.get(url)\n self.assertEquals(response.content, CSV_HEADINGS)\n\n def test_has_correct_headers(self):\n # content-type is 'text/csv', content-disposition is 'attachment',\n # filename is 'google.csv'\n url = reverse('google_feed')\n response = self.client.get(url)\n self.assertEqual(response['Content-Type'],\n 'text/csv')\n self.assertEqual(response['Content-Disposition'],\n 'attachment; filename=\"google.csv\"')\n",
"step-ids": [
10,
13,
14,
16,
17
]
}
|
[
10,
13,
14,
16,
17
] |
"""Test suite for phlsys_tryloop."""
from __future__ import absolute_import
import datetime
import itertools
import unittest
import phlsys_tryloop
# =============================================================================
# TEST PLAN
# -----------------------------------------------------------------------------
# Here we detail the things we are concerned to test and specify which tests
# cover those concerns.
#
# Concerns:
# [ B] tryLoopDelay returns the value from the supplied 'toTry' func on success
# [ B] tryLoopDelay accepts [] for 'delays' and still calls toTry once
# [ C] tryLoopDelay ignores exceptionToIgnore until delays is empty
# [ C] tryLoopDelay re-raises exceptionToIgnore when delays is empty
# [ D] exceptions not derived from exceptionToIgnore raise through tryLoopDelay
# [ E] tryLoopDelay calls 'onException' if exceptionToIgnore is intercepted
# [ ] tryLoopDelay waits 'delay' seconds between attempts
# [ F] endless_retry makes many valid increasing delays
# [ G] short_retry makes a finite amount of valid delays
# -----------------------------------------------------------------------------
# Tests:
# [ A] test_A_Breathing
# [ B] test_B_ReturnsValue
# [ C] test_C_RetriesEachDelay
# [ D] test_D_RaiseThrough
# [ E] test_E_CallsOnException
# [ F] test_F_ValidLongIncreasingEndlessRetry
# [ G] test_G_ValidFiniteShortRetry
# =============================================================================
class Test(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_A_Breathing(self):
pass
def test_B_ReturnsResult(self):
self.assertEqual(1, phlsys_tryloop.try_loop_delay(lambda: 1, []))
self.assertEqual("hi", phlsys_tryloop.try_loop_delay(lambda: "hi", []))
def test_C_RetriesEachDelay(self):
class TestException(Exception):
pass
counter = []
def failer():
counter.append(1)
raise TestException()
numDelays = 4
delays = [datetime.timedelta() for _ in range(0, numDelays)]
try:
phlsys_tryloop.try_loop_delay(failer, delays, TestException)
except TestException:
pass
else:
raise Exception("did not receive TestException")
self.assertEqual(1 + numDelays, len(counter))
def test_D_RaiseThrough(self):
class TestException(Exception):
pass
counter = []
def failer():
counter.append(1)
raise TypeError()
numDelays = 4
delays = [datetime.timedelta() for _ in range(0, numDelays)]
try:
phlsys_tryloop.try_loop_delay(failer, delays, TestException)
except TypeError:
pass
else:
raise Exception("did not receive TypeError")
self.assertEqual(1, len(counter))
def test_E_CallsOnException(self):
fail_counter = []
on_exception_counter = []
class TestException(Exception):
pass
def failer():
fail_counter.append(1)
raise TestException()
def on_exception(e, delay):
print e
if delay is not None:
print delay.total_seconds()
on_exception_counter.append(1)
numDelays = 4
delays = [datetime.timedelta() for _ in range(0, numDelays)]
try:
phlsys_tryloop.try_loop_delay(
failer, delays, onException=on_exception)
except TestException:
pass
else:
raise Exception("did not receive TestException")
self.assertEqual(1 + numDelays, len(fail_counter))
self.assertEqual(len(fail_counter), len(on_exception_counter))
def test_F_ValidLongIncreasingEndlessRetry(self):
# [ F] endless_retry makes many valid increasing delays
delays = phlsys_tryloop.make_default_endless_retry()
first_secs = None
last_secs = None
for i in itertools.islice(delays, 1000):
secs = i.total_seconds()
self.assertGreaterEqual(secs, 0)
self.assertTrue(last_secs is None or secs >= last_secs)
if first_secs is None:
first_secs = secs
last_secs = secs
self.assertGreater(last_secs, first_secs)
def test_G_ValidFiniteShortRetry(self):
# [ G] short_retry makes a finite amount of valid delays
is_empty = True
for i in phlsys_tryloop.make_default_short_retry():
is_empty = False
secs = i.total_seconds()
self.assertGreaterEqual(secs, 0)
self.assertLess(secs, 3600) # one hour is definitely not short
self.assertFalse(is_empty)
# -----------------------------------------------------------------------------
# Copyright (C) 2013-2014 Bloomberg Finance L.P.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
# ------------------------------ END-OF-FILE ----------------------------------
|
normal
|
{
"blob_id": "87130c2bbf919cacd3d5dd823cd310dcad4dc790",
"index": 8157,
"step-1": "\"\"\"Test suite for phlsys_tryloop.\"\"\"\n\nfrom __future__ import absolute_import\n\nimport datetime\nimport itertools\nimport unittest\n\nimport phlsys_tryloop\n\n# =============================================================================\n# TEST PLAN\n# -----------------------------------------------------------------------------\n# Here we detail the things we are concerned to test and specify which tests\n# cover those concerns.\n#\n# Concerns:\n# [ B] tryLoopDelay returns the value from the supplied 'toTry' func on success\n# [ B] tryLoopDelay accepts [] for 'delays' and still calls toTry once\n# [ C] tryLoopDelay ignores exceptionToIgnore until delays is empty\n# [ C] tryLoopDelay re-raises exceptionToIgnore when delays is empty\n# [ D] exceptions not derived from exceptionToIgnore raise through tryLoopDelay\n# [ E] tryLoopDelay calls 'onException' if exceptionToIgnore is intercepted\n# [ ] tryLoopDelay waits 'delay' seconds between attempts\n# [ F] endless_retry makes many valid increasing delays\n# [ G] short_retry makes a finite amount of valid delays\n# -----------------------------------------------------------------------------\n# Tests:\n# [ A] test_A_Breathing\n# [ B] test_B_ReturnsValue\n# [ C] test_C_RetriesEachDelay\n# [ D] test_D_RaiseThrough\n# [ E] test_E_CallsOnException\n# [ F] test_F_ValidLongIncreasingEndlessRetry\n# [ G] test_G_ValidFiniteShortRetry\n# =============================================================================\n\n\nclass Test(unittest.TestCase):\n\n def setUp(self):\n pass\n\n def tearDown(self):\n pass\n\n def test_A_Breathing(self):\n pass\n\n def test_B_ReturnsResult(self):\n self.assertEqual(1, phlsys_tryloop.try_loop_delay(lambda: 1, []))\n self.assertEqual(\"hi\", phlsys_tryloop.try_loop_delay(lambda: \"hi\", []))\n\n def test_C_RetriesEachDelay(self):\n\n class TestException(Exception):\n pass\n\n counter = []\n\n def failer():\n counter.append(1)\n raise TestException()\n\n numDelays = 4\n delays = [datetime.timedelta() for _ in range(0, numDelays)]\n try:\n phlsys_tryloop.try_loop_delay(failer, delays, TestException)\n except TestException:\n pass\n else:\n raise Exception(\"did not receive TestException\")\n\n self.assertEqual(1 + numDelays, len(counter))\n\n def test_D_RaiseThrough(self):\n\n class TestException(Exception):\n pass\n\n counter = []\n\n def failer():\n counter.append(1)\n raise TypeError()\n\n numDelays = 4\n delays = [datetime.timedelta() for _ in range(0, numDelays)]\n try:\n phlsys_tryloop.try_loop_delay(failer, delays, TestException)\n except TypeError:\n pass\n else:\n raise Exception(\"did not receive TypeError\")\n\n self.assertEqual(1, len(counter))\n\n def test_E_CallsOnException(self):\n\n fail_counter = []\n on_exception_counter = []\n\n class TestException(Exception):\n pass\n\n def failer():\n fail_counter.append(1)\n raise TestException()\n\n def on_exception(e, delay):\n print e\n if delay is not None:\n print delay.total_seconds()\n on_exception_counter.append(1)\n\n numDelays = 4\n delays = [datetime.timedelta() for _ in range(0, numDelays)]\n try:\n phlsys_tryloop.try_loop_delay(\n failer, delays, onException=on_exception)\n except TestException:\n pass\n else:\n raise Exception(\"did not receive TestException\")\n\n self.assertEqual(1 + numDelays, len(fail_counter))\n self.assertEqual(len(fail_counter), len(on_exception_counter))\n\n def test_F_ValidLongIncreasingEndlessRetry(self):\n # [ F] endless_retry makes many valid increasing delays\n delays = phlsys_tryloop.make_default_endless_retry()\n\n first_secs = None\n last_secs = None\n for i in itertools.islice(delays, 1000):\n secs = i.total_seconds()\n self.assertGreaterEqual(secs, 0)\n self.assertTrue(last_secs is None or secs >= last_secs)\n if first_secs is None:\n first_secs = secs\n last_secs = secs\n\n self.assertGreater(last_secs, first_secs)\n\n def test_G_ValidFiniteShortRetry(self):\n # [ G] short_retry makes a finite amount of valid delays\n is_empty = True\n for i in phlsys_tryloop.make_default_short_retry():\n is_empty = False\n secs = i.total_seconds()\n self.assertGreaterEqual(secs, 0)\n self.assertLess(secs, 3600) # one hour is definitely not short\n self.assertFalse(is_empty)\n\n\n# -----------------------------------------------------------------------------\n# Copyright (C) 2013-2014 Bloomberg Finance L.P.\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to\n# deal in the Software without restriction, including without limitation the\n# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n# sell copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n# IN THE SOFTWARE.\n# ------------------------------ END-OF-FILE ----------------------------------\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import numpy as np
import matplotlib as plt
import math
from DoublePendulum import DP #imports useful modules and double pendulum class from DoublePendulum.py
import json
import pandas as pd
import copy
from pathlib import Path
#accessing config file
with open('config.json') as config_file:
initdata = json.load(config_file)
#retrieving variables from config file
initMA = initdata['Mass A']
initMB = initdata['Mass B']
initLA = initdata['Length A']
initLB = initdata['Length B']
initAA = initdata['Angle A']
initAB = initdata['Angle B']
method = initdata['Method']
timeStep = initdata['Time Step']
nCycles = initdata['Number of Cycles']
# Setting Initial Conditions based on the config file
pend = DP(initMA,initMB,initLA,initLB,math.radians(initAA),math.radians(initAB),[0,0],[0,0],[0,0],[0,0],0,0,1,1,1,1,1,1,1)
pend.updCartesian()
pend.updEnergies()
data = []
time = 0
x1 = 0
x2 = 0
y1 = 0
y2 = 0
if method == 1:
for n in range(nCycles):
#print(n)
time += timeStep
pend.updEuler(timeStep)
pend.updCartesian()
pend.updEnergies()
pend.updMomentum()
x1 = pend.xy1[0]
x2 = pend.xy2[0]
y1 = pend.xy1[1]
y2 = pend.xy2[1]
p11 = pend.p1[0]
p12 = pend.p1[1]
p21 = pend.p2[0]
p22 = pend.p2[1]
print(p22)
item = [time, copy.deepcopy(pend.totalE), copy.deepcopy(pend.KE1), copy.deepcopy(pend.KE2), copy.deepcopy(pend.PE1), copy.deepcopy(pend.PE2), copy.deepcopy(x1), copy.deepcopy(x2), copy.deepcopy(y1), copy.deepcopy(y2), copy.deepcopy(p11), copy.deepcopy(p12), copy.deepcopy(p21), copy.deepcopy(p22)]
data.append(item)
elif method == 2:
for n in range(nCycles):
print(n)
time += timeStep
pend.updEulerCromer(timeStep)
pend.updCartesian()
pend.updEnergies()
pend.updMomentum()
x1 = pend.xy1[0]
x2 = pend.xy2[0]
y1 = pend.xy1[1]
y2 = pend.xy2[1]
p11 = pend.p1[0]
p12 = pend.p1[1]
p21 = pend.p2[0]
p22 = pend.p2[1]
item = [time, copy.deepcopy(pend.totalE), copy.deepcopy(pend.KE1), copy.deepcopy(pend.KE2), copy.deepcopy(pend.PE1), copy.deepcopy(pend.PE2), copy.deepcopy(x1), copy.deepcopy(x2), copy.deepcopy(y1), copy.deepcopy(y2),copy.deepcopy(p11),copy.deepcopy(p12),copy.deepcopy(p21),copy.deepcopy(p22)]
data.append(item)
elif method == 3:
for n in range(nCycles):
print(n)
time += timeStep
pend.updRungeKutta(timeStep)
pend.updCartesian()
pend.updEnergies()
pend.updMomentum()
x1 = pend.xy1[0]
x2 = pend.xy2[0]
y1 = pend.xy1[1]
y2 = pend.xy2[1]
p11 = pend.p1[0]
p12 = pend.p1[1]
p21 = pend.p2[0]
p22 = pend.p2[1]
item = [time, copy.deepcopy(pend.totalE), copy.deepcopy(pend.KE1), copy.deepcopy(pend.KE2), copy.deepcopy(pend.PE1), copy.deepcopy(pend.PE2), copy.deepcopy(x1), copy.deepcopy(x2), copy.deepcopy(y1), copy.deepcopy(y2),copy.deepcopy(p11),copy.deepcopy(p12),copy.deepcopy(p21),copy.deepcopy(p22)]
data.append(item)
else:
print('invalid method selection, update config file')
exit()
np.save(Path.cwd()/'datafile', data, allow_pickle=True)
print('data file saved')
|
normal
|
{
"blob_id": "c2b6e51622681ac916e860ed4ff5715808dff102",
"index": 9725,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwith open('config.json') as config_file:\n initdata = json.load(config_file)\n<mask token>\npend.updCartesian()\npend.updEnergies()\n<mask token>\nif method == 1:\n for n in range(nCycles):\n time += timeStep\n pend.updEuler(timeStep)\n pend.updCartesian()\n pend.updEnergies()\n pend.updMomentum()\n x1 = pend.xy1[0]\n x2 = pend.xy2[0]\n y1 = pend.xy1[1]\n y2 = pend.xy2[1]\n p11 = pend.p1[0]\n p12 = pend.p1[1]\n p21 = pend.p2[0]\n p22 = pend.p2[1]\n print(p22)\n item = [time, copy.deepcopy(pend.totalE), copy.deepcopy(pend.KE1),\n copy.deepcopy(pend.KE2), copy.deepcopy(pend.PE1), copy.deepcopy\n (pend.PE2), copy.deepcopy(x1), copy.deepcopy(x2), copy.deepcopy\n (y1), copy.deepcopy(y2), copy.deepcopy(p11), copy.deepcopy(p12),\n copy.deepcopy(p21), copy.deepcopy(p22)]\n data.append(item)\nelif method == 2:\n for n in range(nCycles):\n print(n)\n time += timeStep\n pend.updEulerCromer(timeStep)\n pend.updCartesian()\n pend.updEnergies()\n pend.updMomentum()\n x1 = pend.xy1[0]\n x2 = pend.xy2[0]\n y1 = pend.xy1[1]\n y2 = pend.xy2[1]\n p11 = pend.p1[0]\n p12 = pend.p1[1]\n p21 = pend.p2[0]\n p22 = pend.p2[1]\n item = [time, copy.deepcopy(pend.totalE), copy.deepcopy(pend.KE1),\n copy.deepcopy(pend.KE2), copy.deepcopy(pend.PE1), copy.deepcopy\n (pend.PE2), copy.deepcopy(x1), copy.deepcopy(x2), copy.deepcopy\n (y1), copy.deepcopy(y2), copy.deepcopy(p11), copy.deepcopy(p12),\n copy.deepcopy(p21), copy.deepcopy(p22)]\n data.append(item)\nelif method == 3:\n for n in range(nCycles):\n print(n)\n time += timeStep\n pend.updRungeKutta(timeStep)\n pend.updCartesian()\n pend.updEnergies()\n pend.updMomentum()\n x1 = pend.xy1[0]\n x2 = pend.xy2[0]\n y1 = pend.xy1[1]\n y2 = pend.xy2[1]\n p11 = pend.p1[0]\n p12 = pend.p1[1]\n p21 = pend.p2[0]\n p22 = pend.p2[1]\n item = [time, copy.deepcopy(pend.totalE), copy.deepcopy(pend.KE1),\n copy.deepcopy(pend.KE2), copy.deepcopy(pend.PE1), copy.deepcopy\n (pend.PE2), copy.deepcopy(x1), copy.deepcopy(x2), copy.deepcopy\n (y1), copy.deepcopy(y2), copy.deepcopy(p11), copy.deepcopy(p12),\n copy.deepcopy(p21), copy.deepcopy(p22)]\n data.append(item)\nelse:\n print('invalid method selection, update config file')\n exit()\nnp.save(Path.cwd() / 'datafile', data, allow_pickle=True)\nprint('data file saved')\n",
"step-3": "<mask token>\nwith open('config.json') as config_file:\n initdata = json.load(config_file)\ninitMA = initdata['Mass A']\ninitMB = initdata['Mass B']\ninitLA = initdata['Length A']\ninitLB = initdata['Length B']\ninitAA = initdata['Angle A']\ninitAB = initdata['Angle B']\nmethod = initdata['Method']\ntimeStep = initdata['Time Step']\nnCycles = initdata['Number of Cycles']\npend = DP(initMA, initMB, initLA, initLB, math.radians(initAA), math.\n radians(initAB), [0, 0], [0, 0], [0, 0], [0, 0], 0, 0, 1, 1, 1, 1, 1, 1, 1)\npend.updCartesian()\npend.updEnergies()\ndata = []\ntime = 0\nx1 = 0\nx2 = 0\ny1 = 0\ny2 = 0\nif method == 1:\n for n in range(nCycles):\n time += timeStep\n pend.updEuler(timeStep)\n pend.updCartesian()\n pend.updEnergies()\n pend.updMomentum()\n x1 = pend.xy1[0]\n x2 = pend.xy2[0]\n y1 = pend.xy1[1]\n y2 = pend.xy2[1]\n p11 = pend.p1[0]\n p12 = pend.p1[1]\n p21 = pend.p2[0]\n p22 = pend.p2[1]\n print(p22)\n item = [time, copy.deepcopy(pend.totalE), copy.deepcopy(pend.KE1),\n copy.deepcopy(pend.KE2), copy.deepcopy(pend.PE1), copy.deepcopy\n (pend.PE2), copy.deepcopy(x1), copy.deepcopy(x2), copy.deepcopy\n (y1), copy.deepcopy(y2), copy.deepcopy(p11), copy.deepcopy(p12),\n copy.deepcopy(p21), copy.deepcopy(p22)]\n data.append(item)\nelif method == 2:\n for n in range(nCycles):\n print(n)\n time += timeStep\n pend.updEulerCromer(timeStep)\n pend.updCartesian()\n pend.updEnergies()\n pend.updMomentum()\n x1 = pend.xy1[0]\n x2 = pend.xy2[0]\n y1 = pend.xy1[1]\n y2 = pend.xy2[1]\n p11 = pend.p1[0]\n p12 = pend.p1[1]\n p21 = pend.p2[0]\n p22 = pend.p2[1]\n item = [time, copy.deepcopy(pend.totalE), copy.deepcopy(pend.KE1),\n copy.deepcopy(pend.KE2), copy.deepcopy(pend.PE1), copy.deepcopy\n (pend.PE2), copy.deepcopy(x1), copy.deepcopy(x2), copy.deepcopy\n (y1), copy.deepcopy(y2), copy.deepcopy(p11), copy.deepcopy(p12),\n copy.deepcopy(p21), copy.deepcopy(p22)]\n data.append(item)\nelif method == 3:\n for n in range(nCycles):\n print(n)\n time += timeStep\n pend.updRungeKutta(timeStep)\n pend.updCartesian()\n pend.updEnergies()\n pend.updMomentum()\n x1 = pend.xy1[0]\n x2 = pend.xy2[0]\n y1 = pend.xy1[1]\n y2 = pend.xy2[1]\n p11 = pend.p1[0]\n p12 = pend.p1[1]\n p21 = pend.p2[0]\n p22 = pend.p2[1]\n item = [time, copy.deepcopy(pend.totalE), copy.deepcopy(pend.KE1),\n copy.deepcopy(pend.KE2), copy.deepcopy(pend.PE1), copy.deepcopy\n (pend.PE2), copy.deepcopy(x1), copy.deepcopy(x2), copy.deepcopy\n (y1), copy.deepcopy(y2), copy.deepcopy(p11), copy.deepcopy(p12),\n copy.deepcopy(p21), copy.deepcopy(p22)]\n data.append(item)\nelse:\n print('invalid method selection, update config file')\n exit()\nnp.save(Path.cwd() / 'datafile', data, allow_pickle=True)\nprint('data file saved')\n",
"step-4": "import numpy as np\nimport matplotlib as plt\nimport math\nfrom DoublePendulum import DP\nimport json\nimport pandas as pd\nimport copy\nfrom pathlib import Path\nwith open('config.json') as config_file:\n initdata = json.load(config_file)\ninitMA = initdata['Mass A']\ninitMB = initdata['Mass B']\ninitLA = initdata['Length A']\ninitLB = initdata['Length B']\ninitAA = initdata['Angle A']\ninitAB = initdata['Angle B']\nmethod = initdata['Method']\ntimeStep = initdata['Time Step']\nnCycles = initdata['Number of Cycles']\npend = DP(initMA, initMB, initLA, initLB, math.radians(initAA), math.\n radians(initAB), [0, 0], [0, 0], [0, 0], [0, 0], 0, 0, 1, 1, 1, 1, 1, 1, 1)\npend.updCartesian()\npend.updEnergies()\ndata = []\ntime = 0\nx1 = 0\nx2 = 0\ny1 = 0\ny2 = 0\nif method == 1:\n for n in range(nCycles):\n time += timeStep\n pend.updEuler(timeStep)\n pend.updCartesian()\n pend.updEnergies()\n pend.updMomentum()\n x1 = pend.xy1[0]\n x2 = pend.xy2[0]\n y1 = pend.xy1[1]\n y2 = pend.xy2[1]\n p11 = pend.p1[0]\n p12 = pend.p1[1]\n p21 = pend.p2[0]\n p22 = pend.p2[1]\n print(p22)\n item = [time, copy.deepcopy(pend.totalE), copy.deepcopy(pend.KE1),\n copy.deepcopy(pend.KE2), copy.deepcopy(pend.PE1), copy.deepcopy\n (pend.PE2), copy.deepcopy(x1), copy.deepcopy(x2), copy.deepcopy\n (y1), copy.deepcopy(y2), copy.deepcopy(p11), copy.deepcopy(p12),\n copy.deepcopy(p21), copy.deepcopy(p22)]\n data.append(item)\nelif method == 2:\n for n in range(nCycles):\n print(n)\n time += timeStep\n pend.updEulerCromer(timeStep)\n pend.updCartesian()\n pend.updEnergies()\n pend.updMomentum()\n x1 = pend.xy1[0]\n x2 = pend.xy2[0]\n y1 = pend.xy1[1]\n y2 = pend.xy2[1]\n p11 = pend.p1[0]\n p12 = pend.p1[1]\n p21 = pend.p2[0]\n p22 = pend.p2[1]\n item = [time, copy.deepcopy(pend.totalE), copy.deepcopy(pend.KE1),\n copy.deepcopy(pend.KE2), copy.deepcopy(pend.PE1), copy.deepcopy\n (pend.PE2), copy.deepcopy(x1), copy.deepcopy(x2), copy.deepcopy\n (y1), copy.deepcopy(y2), copy.deepcopy(p11), copy.deepcopy(p12),\n copy.deepcopy(p21), copy.deepcopy(p22)]\n data.append(item)\nelif method == 3:\n for n in range(nCycles):\n print(n)\n time += timeStep\n pend.updRungeKutta(timeStep)\n pend.updCartesian()\n pend.updEnergies()\n pend.updMomentum()\n x1 = pend.xy1[0]\n x2 = pend.xy2[0]\n y1 = pend.xy1[1]\n y2 = pend.xy2[1]\n p11 = pend.p1[0]\n p12 = pend.p1[1]\n p21 = pend.p2[0]\n p22 = pend.p2[1]\n item = [time, copy.deepcopy(pend.totalE), copy.deepcopy(pend.KE1),\n copy.deepcopy(pend.KE2), copy.deepcopy(pend.PE1), copy.deepcopy\n (pend.PE2), copy.deepcopy(x1), copy.deepcopy(x2), copy.deepcopy\n (y1), copy.deepcopy(y2), copy.deepcopy(p11), copy.deepcopy(p12),\n copy.deepcopy(p21), copy.deepcopy(p22)]\n data.append(item)\nelse:\n print('invalid method selection, update config file')\n exit()\nnp.save(Path.cwd() / 'datafile', data, allow_pickle=True)\nprint('data file saved')\n",
"step-5": "import numpy as np \r\nimport matplotlib as plt\r\nimport math\r\nfrom DoublePendulum import DP #imports useful modules and double pendulum class from DoublePendulum.py\r\nimport json\r\nimport pandas as pd\r\nimport copy\r\nfrom pathlib import Path\r\n\r\n#accessing config file\r\nwith open('config.json') as config_file:\r\n initdata = json.load(config_file)\r\n\r\n#retrieving variables from config file\r\ninitMA = initdata['Mass A']\r\ninitMB = initdata['Mass B']\r\ninitLA = initdata['Length A']\r\ninitLB = initdata['Length B']\r\ninitAA = initdata['Angle A']\r\ninitAB = initdata['Angle B']\r\nmethod = initdata['Method']\r\ntimeStep = initdata['Time Step']\r\nnCycles = initdata['Number of Cycles']\r\n\r\n# Setting Initial Conditions based on the config file\r\npend = DP(initMA,initMB,initLA,initLB,math.radians(initAA),math.radians(initAB),[0,0],[0,0],[0,0],[0,0],0,0,1,1,1,1,1,1,1)\r\npend.updCartesian()\r\npend.updEnergies()\r\ndata = []\r\ntime = 0\r\nx1 = 0\r\nx2 = 0\r\ny1 = 0\r\ny2 = 0\r\n\r\nif method == 1:\r\n for n in range(nCycles):\r\n #print(n)\r\n time += timeStep\r\n pend.updEuler(timeStep)\r\n pend.updCartesian()\r\n pend.updEnergies()\r\n pend.updMomentum()\r\n x1 = pend.xy1[0]\r\n x2 = pend.xy2[0]\r\n y1 = pend.xy1[1]\r\n y2 = pend.xy2[1]\r\n p11 = pend.p1[0]\r\n p12 = pend.p1[1]\r\n p21 = pend.p2[0]\r\n p22 = pend.p2[1]\r\n print(p22)\r\n item = [time, copy.deepcopy(pend.totalE), copy.deepcopy(pend.KE1), copy.deepcopy(pend.KE2), copy.deepcopy(pend.PE1), copy.deepcopy(pend.PE2), copy.deepcopy(x1), copy.deepcopy(x2), copy.deepcopy(y1), copy.deepcopy(y2), copy.deepcopy(p11), copy.deepcopy(p12), copy.deepcopy(p21), copy.deepcopy(p22)]\r\n data.append(item)\r\nelif method == 2:\r\n for n in range(nCycles):\r\n print(n)\r\n time += timeStep\r\n pend.updEulerCromer(timeStep)\r\n pend.updCartesian()\r\n pend.updEnergies()\r\n pend.updMomentum()\r\n x1 = pend.xy1[0]\r\n x2 = pend.xy2[0]\r\n y1 = pend.xy1[1]\r\n y2 = pend.xy2[1]\r\n p11 = pend.p1[0]\r\n p12 = pend.p1[1]\r\n p21 = pend.p2[0]\r\n p22 = pend.p2[1]\r\n item = [time, copy.deepcopy(pend.totalE), copy.deepcopy(pend.KE1), copy.deepcopy(pend.KE2), copy.deepcopy(pend.PE1), copy.deepcopy(pend.PE2), copy.deepcopy(x1), copy.deepcopy(x2), copy.deepcopy(y1), copy.deepcopy(y2),copy.deepcopy(p11),copy.deepcopy(p12),copy.deepcopy(p21),copy.deepcopy(p22)]\r\n data.append(item)\r\nelif method == 3:\r\n for n in range(nCycles):\r\n print(n)\r\n time += timeStep\r\n pend.updRungeKutta(timeStep)\r\n pend.updCartesian()\r\n pend.updEnergies()\r\n pend.updMomentum()\r\n x1 = pend.xy1[0]\r\n x2 = pend.xy2[0]\r\n y1 = pend.xy1[1]\r\n y2 = pend.xy2[1]\r\n p11 = pend.p1[0]\r\n p12 = pend.p1[1]\r\n p21 = pend.p2[0]\r\n p22 = pend.p2[1]\r\n item = [time, copy.deepcopy(pend.totalE), copy.deepcopy(pend.KE1), copy.deepcopy(pend.KE2), copy.deepcopy(pend.PE1), copy.deepcopy(pend.PE2), copy.deepcopy(x1), copy.deepcopy(x2), copy.deepcopy(y1), copy.deepcopy(y2),copy.deepcopy(p11),copy.deepcopy(p12),copy.deepcopy(p21),copy.deepcopy(p22)]\r\n data.append(item)\r\nelse:\r\n print('invalid method selection, update config file')\r\n exit()\r\n\r\nnp.save(Path.cwd()/'datafile', data, allow_pickle=True)\r\nprint('data file saved')\r\n\r\n\r\n\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from django import forms
BET_CHOICES = (
('1', 'Will rise'),
('x', 'Will stay'),
('2', 'Will fall'),
)
class NormalBetForm(forms.Form):
song = forms.CharField()
data = forms.ChoiceField(BET_CHOICES)
|
normal
|
{
"blob_id": "2f6d51d5c14ddc1f6cd60ab9f3b5d4a879d14af0",
"index": 4590,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass NormalBetForm(forms.Form):\n song = forms.CharField()\n data = forms.ChoiceField(BET_CHOICES)\n",
"step-3": "<mask token>\nBET_CHOICES = ('1', 'Will rise'), ('x', 'Will stay'), ('2', 'Will fall')\n\n\nclass NormalBetForm(forms.Form):\n song = forms.CharField()\n data = forms.ChoiceField(BET_CHOICES)\n",
"step-4": "from django import forms\nBET_CHOICES = ('1', 'Will rise'), ('x', 'Will stay'), ('2', 'Will fall')\n\n\nclass NormalBetForm(forms.Form):\n song = forms.CharField()\n data = forms.ChoiceField(BET_CHOICES)\n",
"step-5": "from django import forms\n\n\nBET_CHOICES = (\n ('1', 'Will rise'),\n ('x', 'Will stay'),\n ('2', 'Will fall'),\n)\n\n\nclass NormalBetForm(forms.Form):\n song = forms.CharField()\n data = forms.ChoiceField(BET_CHOICES)\n",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
# Benthic Parameters - USEPA OPP defaults from EXAMS
benthic_params = {
"depth": 0.05, # benthic depth (m)
"porosity": 0.65, # benthic porosity
"bulk_density": 1, # bulk density, dry solid mass/total vol (g/cm3)
"froc": 0, # benthic organic carbon fraction
"doc": 5, # benthic dissolved organic carbon content (mg/L)
"bnmas": 0, # benthic biomass intensity (g/m2)
"d_over_dx": 1 # mass transfer coefficient for exchange between benthic and water column (m/s)
# (can be modified later if data exists)
}
def partition_benthic(reach, runoff, runoff_mass, erosion_mass):
from .parameters import soil, stream_channel, benthic
try:
reach = self.region.flow_file.fetch(reach)
q, v, l = reach.q, reach.v, reach.l
except AttributeError:
return None, None, (None, None)
mean_runoff = runoff.mean() # m3/d
baseflow = np.subtract(q, mean_runoff, out=np.zeros(self.i.n_dates), where=(q > mean_runoff))
total_flow = runoff + baseflow
mixing_cell = 40. # meters
cross_section = total_flow / v
width = stream_channel.a * np.power(cross_section, stream_channel.b)
depth = cross_section / width
surface_area = width * l
volume = np.array([(depth * surface_area), # Water column
(benthic.depth * surface_area * benthic.porosity)]) # Benthic zone
# Compute concentration in runoff of runoff mass and erosion mass
runoff_conc = np.divide(runoff_mass, runoff, out=np.zeros(self.i.n_dates), where=(runoff != 0))
daily_conc = np.divide(runoff_mass + erosion_mass, mixing_cell, out=np.zeros(self.i.n_dates),
where=(runoff_mass + erosion_mass > 0.0) & (mixing_cell > 0.0))
# Divide mass loading between water column and benthic zones
mass_input = np.vstack([runoff_mass + ((1. - soil.prben) * erosion_mass), # Water Column
soil.prben * erosion_mass]).T # Benthic
# Partition concentration into benthic and water column concentrations
# This needs to be cleaned up
# Compute benthic solute holding capacity
fw1, fw2, theta, sed_conv_factor, omega = solute_holding_capacity(depth, surface_area, self.i.koc)
k_adj = np.array((total_flow / mixing_cell) + (self.i.deg_photolysis + self.i.deg_hydrolysis) * fw1 + \
(self.i.deg_wc * fw1) + self.i.deg_benthic * (1 - fw1))
aqconc_avg_wb, daily_avg, daily_peak = \
concentration_loop(self.i.n_dates, daily_conc, k_adj, volume,
mass_input, fw1, fw2, omega, theta, self.i.deg_aqueous)
return map(lambda x: x * 1000000., (runoff_conc, aqconc_avg_wb, daily_avg, daily_peak))
@njit
def concentration_loop(n_dates, daily_concentration, k_adj, daily_volume, mass_input, fw1, fw2, omega, theta, deg_aq):
# Beginning day aquatic concentrations, considered Peak Aqueous Daily Conc in Water Column
daily_peak = np.zeros((2, n_dates))
daily_avg = np.zeros((2, n_dates))
aqconc_avg_wb = np.zeros(n_dates)
# Reset starting values
exp_k = np.exp(-k_adj)
aqconc_wb = 0
antecedent_mass = np.zeros(2) # mn
for day in range(daily_concentration.size):
# Add mass input to antecedent mass
daily_mass = antecedent_mass + mass_input[day]
# Convert to aqueous concentrations (peak) at beginning of day
# JCH - fw comes from solute_holding_capacity. Fraction going into each section. Should fw[0] + fw[1] = 1?
daily_peak[0, day] = daily_mass[0] * fw1[day] / daily_volume[day, 0]
daily_peak[1, day] = daily_mass[1] * fw2[day] / daily_volume[day, 1]
# Compute daily average concentration in the water body - when no Benthic layer considered
aqconc_wb += daily_concentration[day] # initial water body concentration for current time step
# Daily avg aq conc in water body, area under curve/t = Ci/k*(1-e^-k), NO benthic
aqconc_avg_wb[day] = aqconc_wb / k_adj[day] * (1 - exp_k[day])
# initial water body concentration for next time step
aqconc_wb *= exp_k[day]
# For simul diffeq soln: mn1,mn2,mavg1,mavg2 = new_aqconc1, new_aqconc2, aqconc_avg1[d], aqconc_avg2[d]
# Note: aqconc_avg1 and aqconc_avg2 are outputted - Daily avg aq conc in WC and Benthic regions
new_aqconc, wc_avg, benthic_avg = simultaneous_diffeq(k_adj[day], deg_aq, omega, theta[day], daily_peak[:, day])
daily_avg[0, day] = wc_avg
daily_avg[1, day] = benthic_avg
# Masses m1 and m2 after time step, t_end
antecedent_mass[0] = new_aqconc[0] / fw1[day] * daily_volume[day, 0]
antecedent_mass[1] = new_aqconc[1] / fw2[day] * daily_volume[day, 1]
return aqconc_avg_wb, daily_avg, daily_peak
@njit
def simultaneous_diffeq(gamma1, gamma2, omega, theta, daily_aq_peak):
"""
ANALYTICAL SOLUTION FOR THE TWO SIMULTANEOUS DIFFERENTIAL EQNS:
dm1/dt = Am1 + Bm2
dm2/dt = Em1 + Fm2
WITH INITIAL VALUES m1 AND m2 FOR m1 AND m2
mn1 IS OUTPUT VALUE FOR m1 AFTER TIME T
mn2 IS OUTPUT VALUE FOR m2 AFTER TIME T
mavg1 IS AVERAGE VALUE OF m1 OVER TIME T
"""
t_end = 86400. # seconds, time step of ONE DAY
m1, m2 = daily_aq_peak
# Calculate constants for simultaneous_diffeq: A,B,E,F
# This reduces the model equivalent parameters to the coefficients needed for solving simultaneous_diffeq
a = -gamma1 - omega * theta
b = omega * theta
e = omega
f = -gamma2 - omega
af = a + f
dif = 4 * ((f * a) - (b * e))
bbb = np.sqrt(af * af - dif)
root1 = (af + bbb) / 2.
root2 = (af - bbb) / 2.
dd = (root1 - a) / b
ee = (root2 - a) / b
ff = ee - dd
x1 = (ee * m1 - m2) / ff
y1 = (m2 - dd * m1) / ff
# Calculate new concentrations for next step
rt1 = root1 * t_end
rt2 = root2 * t_end
exrt1 = np.exp(rt1)
exrt2 = np.exp(rt2)
ccc = x1 * exrt1
ddd = y1 * exrt2
# values for m1 and m2 after time step t_end
mn = np.zeros(2)
mn[0] = ccc + ddd # Water column
mn[1] = dd * ccc + ee * ddd # Benthic
# AVERAGE DAILY CONCENTRATION SOLUTION: set up for daily average, but can be changed by adjusting time step
gx = x1 / root1
hx = y1 / root2
term1 = gx * exrt1 # term3 = -X1/root1*exp(root1*T1)
term2 = hx * exrt2 # term4 = -Y1/root2*exp(root2*T1
term3 = -gx
term4 = -hx
mavg_wc = (term1 + term2 + term3 + term4) / t_end # Water column
mavg_ben = (term1 * dd + term2 * ee + term3 * dd + term4 * ee) / t_end # Benthic
return mn, mavg_wc, mavg_ben
def solute_holding_capacity(depth, surface_area, koc):
"""Calculates Solute Holding capacities and mass transfer between water column and benthic regions"""
from .parameters import benthic, water_column
# Aqueous volumes in each region
vol1 = depth * surface_area # total volume in water column, approximately equal to water volume alone
vol2a = benthic.depth * surface_area # total benthic volume
vol2 = vol2a * benthic.porosity # total benthic pore water volume
# Default EXAMS conditions for partitioning
kow = koc / .35 # DEFAULT EXAMS CONDITION ON Kow p.35
kpdoc1 = kow * .074 # DEFAULT RELATION IN EXAMS (LITTORAL)
kpdoc2 = koc # DEFAULT RELATION IN EXAMS (BENTHIC) p.16 of EXAMS 2.98 (or is it Kow*.46 ?)
xkpb = 0.436 * kow ** .907 # DEFAULT RELATION IN EXAMS
# mass in littoral region
vol1a = depth[0] * surface_area # initial volume corresponding with suspended matter reference
m_sed_1 = water_column.sused * vol1a * .001 # SEDIMENT MASS LITTORAL
m_bio_1 = water_column.plmas * vol1a * .001 # BIOLOGICAL MASS LITTORAL
m_doc_1 = water_column.doc * vol1a * .001 # DOC MASS LITTORAL
# partitioning coefficients of individual media
kd_sed_1 = koc * water_column.froc * .001 # Kd of sediment in littoral [m3/kg]
kd_sed_2 = koc * benthic.froc * .001 # Kd of sediment in benthic
kd_bio = xkpb / 1000. # Kd of biological organisms
kd_doc_1 = kpdoc1 / 1000. # Kd of DOC in littoral region
kd_doc_2 = kpdoc2 / 1000. # Kd of DOC in benthic region
# mass in benthic region
m_sed_2 = benthic.bulk_density * vol2a * 1000. # as defined by EXAMS parameters m_sed_2 = BULKD/PCTWA*VOL2*100000.
m_bio_2 = benthic.bnmas * surface_area * .001
m_doc_2 = benthic.doc * vol2 * .001
# solute holding capacity in regions 1 and 2
capacity_1 = kd_sed_1 * m_sed_1 + kd_bio * m_bio_1 + kd_doc_1 * m_doc_1 + vol1
capacity_2 = kd_sed_2 * m_sed_2 + kd_bio * m_bio_2 + kd_doc_2 * m_doc_2 + vol2
# Fraction going to water column and benthic
fw1 = vol1 / capacity_1 # fw1 is daily, vol1 is daily
fw2 = vol2 / capacity_2
theta = capacity_2 / capacity_1
sed_conv_factor = vol2 / fw2 / m_sed_2 # converts pore water to [Total Conc normalized to sed mass]
# Omega mass transfer - Calculates littoral to benthic mass transfer coefficient
omega = benthic.d_over_dx / benthic.depth # (m3/hr)/(3600 s/hr)
return fw1, fw2, theta, sed_conv_factor, omega
|
normal
|
{
"blob_id": "5890525b16b42578ac06e7ab2170c5613feea0a5",
"index": 6494,
"step-1": "<mask token>\n\n\ndef partition_benthic(reach, runoff, runoff_mass, erosion_mass):\n from .parameters import soil, stream_channel, benthic\n try:\n reach = self.region.flow_file.fetch(reach)\n q, v, l = reach.q, reach.v, reach.l\n except AttributeError:\n return None, None, (None, None)\n mean_runoff = runoff.mean()\n baseflow = np.subtract(q, mean_runoff, out=np.zeros(self.i.n_dates),\n where=q > mean_runoff)\n total_flow = runoff + baseflow\n mixing_cell = 40.0\n cross_section = total_flow / v\n width = stream_channel.a * np.power(cross_section, stream_channel.b)\n depth = cross_section / width\n surface_area = width * l\n volume = np.array([depth * surface_area, benthic.depth * surface_area *\n benthic.porosity])\n runoff_conc = np.divide(runoff_mass, runoff, out=np.zeros(self.i.\n n_dates), where=runoff != 0)\n daily_conc = np.divide(runoff_mass + erosion_mass, mixing_cell, out=np.\n zeros(self.i.n_dates), where=(runoff_mass + erosion_mass > 0.0) & (\n mixing_cell > 0.0))\n mass_input = np.vstack([runoff_mass + (1.0 - soil.prben) * erosion_mass,\n soil.prben * erosion_mass]).T\n fw1, fw2, theta, sed_conv_factor, omega = solute_holding_capacity(depth,\n surface_area, self.i.koc)\n k_adj = np.array(total_flow / mixing_cell + (self.i.deg_photolysis +\n self.i.deg_hydrolysis) * fw1 + self.i.deg_wc * fw1 + self.i.\n deg_benthic * (1 - fw1))\n aqconc_avg_wb, daily_avg, daily_peak = concentration_loop(self.i.\n n_dates, daily_conc, k_adj, volume, mass_input, fw1, fw2, omega,\n theta, self.i.deg_aqueous)\n return map(lambda x: x * 1000000.0, (runoff_conc, aqconc_avg_wb,\n daily_avg, daily_peak))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef partition_benthic(reach, runoff, runoff_mass, erosion_mass):\n from .parameters import soil, stream_channel, benthic\n try:\n reach = self.region.flow_file.fetch(reach)\n q, v, l = reach.q, reach.v, reach.l\n except AttributeError:\n return None, None, (None, None)\n mean_runoff = runoff.mean()\n baseflow = np.subtract(q, mean_runoff, out=np.zeros(self.i.n_dates),\n where=q > mean_runoff)\n total_flow = runoff + baseflow\n mixing_cell = 40.0\n cross_section = total_flow / v\n width = stream_channel.a * np.power(cross_section, stream_channel.b)\n depth = cross_section / width\n surface_area = width * l\n volume = np.array([depth * surface_area, benthic.depth * surface_area *\n benthic.porosity])\n runoff_conc = np.divide(runoff_mass, runoff, out=np.zeros(self.i.\n n_dates), where=runoff != 0)\n daily_conc = np.divide(runoff_mass + erosion_mass, mixing_cell, out=np.\n zeros(self.i.n_dates), where=(runoff_mass + erosion_mass > 0.0) & (\n mixing_cell > 0.0))\n mass_input = np.vstack([runoff_mass + (1.0 - soil.prben) * erosion_mass,\n soil.prben * erosion_mass]).T\n fw1, fw2, theta, sed_conv_factor, omega = solute_holding_capacity(depth,\n surface_area, self.i.koc)\n k_adj = np.array(total_flow / mixing_cell + (self.i.deg_photolysis +\n self.i.deg_hydrolysis) * fw1 + self.i.deg_wc * fw1 + self.i.\n deg_benthic * (1 - fw1))\n aqconc_avg_wb, daily_avg, daily_peak = concentration_loop(self.i.\n n_dates, daily_conc, k_adj, volume, mass_input, fw1, fw2, omega,\n theta, self.i.deg_aqueous)\n return map(lambda x: x * 1000000.0, (runoff_conc, aqconc_avg_wb,\n daily_avg, daily_peak))\n\n\n@njit\ndef concentration_loop(n_dates, daily_concentration, k_adj, daily_volume,\n mass_input, fw1, fw2, omega, theta, deg_aq):\n daily_peak = np.zeros((2, n_dates))\n daily_avg = np.zeros((2, n_dates))\n aqconc_avg_wb = np.zeros(n_dates)\n exp_k = np.exp(-k_adj)\n aqconc_wb = 0\n antecedent_mass = np.zeros(2)\n for day in range(daily_concentration.size):\n daily_mass = antecedent_mass + mass_input[day]\n daily_peak[0, day] = daily_mass[0] * fw1[day] / daily_volume[day, 0]\n daily_peak[1, day] = daily_mass[1] * fw2[day] / daily_volume[day, 1]\n aqconc_wb += daily_concentration[day]\n aqconc_avg_wb[day] = aqconc_wb / k_adj[day] * (1 - exp_k[day])\n aqconc_wb *= exp_k[day]\n new_aqconc, wc_avg, benthic_avg = simultaneous_diffeq(k_adj[day],\n deg_aq, omega, theta[day], daily_peak[:, day])\n daily_avg[0, day] = wc_avg\n daily_avg[1, day] = benthic_avg\n antecedent_mass[0] = new_aqconc[0] / fw1[day] * daily_volume[day, 0]\n antecedent_mass[1] = new_aqconc[1] / fw2[day] * daily_volume[day, 1]\n return aqconc_avg_wb, daily_avg, daily_peak\n\n\n<mask token>\n\n\ndef solute_holding_capacity(depth, surface_area, koc):\n \"\"\"Calculates Solute Holding capacities and mass transfer between water column and benthic regions\"\"\"\n from .parameters import benthic, water_column\n vol1 = depth * surface_area\n vol2a = benthic.depth * surface_area\n vol2 = vol2a * benthic.porosity\n kow = koc / 0.35\n kpdoc1 = kow * 0.074\n kpdoc2 = koc\n xkpb = 0.436 * kow ** 0.907\n vol1a = depth[0] * surface_area\n m_sed_1 = water_column.sused * vol1a * 0.001\n m_bio_1 = water_column.plmas * vol1a * 0.001\n m_doc_1 = water_column.doc * vol1a * 0.001\n kd_sed_1 = koc * water_column.froc * 0.001\n kd_sed_2 = koc * benthic.froc * 0.001\n kd_bio = xkpb / 1000.0\n kd_doc_1 = kpdoc1 / 1000.0\n kd_doc_2 = kpdoc2 / 1000.0\n m_sed_2 = benthic.bulk_density * vol2a * 1000.0\n m_bio_2 = benthic.bnmas * surface_area * 0.001\n m_doc_2 = benthic.doc * vol2 * 0.001\n capacity_1 = (kd_sed_1 * m_sed_1 + kd_bio * m_bio_1 + kd_doc_1 *\n m_doc_1 + vol1)\n capacity_2 = (kd_sed_2 * m_sed_2 + kd_bio * m_bio_2 + kd_doc_2 *\n m_doc_2 + vol2)\n fw1 = vol1 / capacity_1\n fw2 = vol2 / capacity_2\n theta = capacity_2 / capacity_1\n sed_conv_factor = vol2 / fw2 / m_sed_2\n omega = benthic.d_over_dx / benthic.depth\n return fw1, fw2, theta, sed_conv_factor, omega\n",
"step-3": "<mask token>\n\n\ndef partition_benthic(reach, runoff, runoff_mass, erosion_mass):\n from .parameters import soil, stream_channel, benthic\n try:\n reach = self.region.flow_file.fetch(reach)\n q, v, l = reach.q, reach.v, reach.l\n except AttributeError:\n return None, None, (None, None)\n mean_runoff = runoff.mean()\n baseflow = np.subtract(q, mean_runoff, out=np.zeros(self.i.n_dates),\n where=q > mean_runoff)\n total_flow = runoff + baseflow\n mixing_cell = 40.0\n cross_section = total_flow / v\n width = stream_channel.a * np.power(cross_section, stream_channel.b)\n depth = cross_section / width\n surface_area = width * l\n volume = np.array([depth * surface_area, benthic.depth * surface_area *\n benthic.porosity])\n runoff_conc = np.divide(runoff_mass, runoff, out=np.zeros(self.i.\n n_dates), where=runoff != 0)\n daily_conc = np.divide(runoff_mass + erosion_mass, mixing_cell, out=np.\n zeros(self.i.n_dates), where=(runoff_mass + erosion_mass > 0.0) & (\n mixing_cell > 0.0))\n mass_input = np.vstack([runoff_mass + (1.0 - soil.prben) * erosion_mass,\n soil.prben * erosion_mass]).T\n fw1, fw2, theta, sed_conv_factor, omega = solute_holding_capacity(depth,\n surface_area, self.i.koc)\n k_adj = np.array(total_flow / mixing_cell + (self.i.deg_photolysis +\n self.i.deg_hydrolysis) * fw1 + self.i.deg_wc * fw1 + self.i.\n deg_benthic * (1 - fw1))\n aqconc_avg_wb, daily_avg, daily_peak = concentration_loop(self.i.\n n_dates, daily_conc, k_adj, volume, mass_input, fw1, fw2, omega,\n theta, self.i.deg_aqueous)\n return map(lambda x: x * 1000000.0, (runoff_conc, aqconc_avg_wb,\n daily_avg, daily_peak))\n\n\n@njit\ndef concentration_loop(n_dates, daily_concentration, k_adj, daily_volume,\n mass_input, fw1, fw2, omega, theta, deg_aq):\n daily_peak = np.zeros((2, n_dates))\n daily_avg = np.zeros((2, n_dates))\n aqconc_avg_wb = np.zeros(n_dates)\n exp_k = np.exp(-k_adj)\n aqconc_wb = 0\n antecedent_mass = np.zeros(2)\n for day in range(daily_concentration.size):\n daily_mass = antecedent_mass + mass_input[day]\n daily_peak[0, day] = daily_mass[0] * fw1[day] / daily_volume[day, 0]\n daily_peak[1, day] = daily_mass[1] * fw2[day] / daily_volume[day, 1]\n aqconc_wb += daily_concentration[day]\n aqconc_avg_wb[day] = aqconc_wb / k_adj[day] * (1 - exp_k[day])\n aqconc_wb *= exp_k[day]\n new_aqconc, wc_avg, benthic_avg = simultaneous_diffeq(k_adj[day],\n deg_aq, omega, theta[day], daily_peak[:, day])\n daily_avg[0, day] = wc_avg\n daily_avg[1, day] = benthic_avg\n antecedent_mass[0] = new_aqconc[0] / fw1[day] * daily_volume[day, 0]\n antecedent_mass[1] = new_aqconc[1] / fw2[day] * daily_volume[day, 1]\n return aqconc_avg_wb, daily_avg, daily_peak\n\n\n@njit\ndef simultaneous_diffeq(gamma1, gamma2, omega, theta, daily_aq_peak):\n \"\"\"\n ANALYTICAL SOLUTION FOR THE TWO SIMULTANEOUS DIFFERENTIAL EQNS:\n dm1/dt = Am1 + Bm2\n dm2/dt = Em1 + Fm2\n WITH INITIAL VALUES m1 AND m2 FOR m1 AND m2\n mn1 IS OUTPUT VALUE FOR m1 AFTER TIME T\n mn2 IS OUTPUT VALUE FOR m2 AFTER TIME T\n mavg1 IS AVERAGE VALUE OF m1 OVER TIME T\n \"\"\"\n t_end = 86400.0\n m1, m2 = daily_aq_peak\n a = -gamma1 - omega * theta\n b = omega * theta\n e = omega\n f = -gamma2 - omega\n af = a + f\n dif = 4 * (f * a - b * e)\n bbb = np.sqrt(af * af - dif)\n root1 = (af + bbb) / 2.0\n root2 = (af - bbb) / 2.0\n dd = (root1 - a) / b\n ee = (root2 - a) / b\n ff = ee - dd\n x1 = (ee * m1 - m2) / ff\n y1 = (m2 - dd * m1) / ff\n rt1 = root1 * t_end\n rt2 = root2 * t_end\n exrt1 = np.exp(rt1)\n exrt2 = np.exp(rt2)\n ccc = x1 * exrt1\n ddd = y1 * exrt2\n mn = np.zeros(2)\n mn[0] = ccc + ddd\n mn[1] = dd * ccc + ee * ddd\n gx = x1 / root1\n hx = y1 / root2\n term1 = gx * exrt1\n term2 = hx * exrt2\n term3 = -gx\n term4 = -hx\n mavg_wc = (term1 + term2 + term3 + term4) / t_end\n mavg_ben = (term1 * dd + term2 * ee + term3 * dd + term4 * ee) / t_end\n return mn, mavg_wc, mavg_ben\n\n\ndef solute_holding_capacity(depth, surface_area, koc):\n \"\"\"Calculates Solute Holding capacities and mass transfer between water column and benthic regions\"\"\"\n from .parameters import benthic, water_column\n vol1 = depth * surface_area\n vol2a = benthic.depth * surface_area\n vol2 = vol2a * benthic.porosity\n kow = koc / 0.35\n kpdoc1 = kow * 0.074\n kpdoc2 = koc\n xkpb = 0.436 * kow ** 0.907\n vol1a = depth[0] * surface_area\n m_sed_1 = water_column.sused * vol1a * 0.001\n m_bio_1 = water_column.plmas * vol1a * 0.001\n m_doc_1 = water_column.doc * vol1a * 0.001\n kd_sed_1 = koc * water_column.froc * 0.001\n kd_sed_2 = koc * benthic.froc * 0.001\n kd_bio = xkpb / 1000.0\n kd_doc_1 = kpdoc1 / 1000.0\n kd_doc_2 = kpdoc2 / 1000.0\n m_sed_2 = benthic.bulk_density * vol2a * 1000.0\n m_bio_2 = benthic.bnmas * surface_area * 0.001\n m_doc_2 = benthic.doc * vol2 * 0.001\n capacity_1 = (kd_sed_1 * m_sed_1 + kd_bio * m_bio_1 + kd_doc_1 *\n m_doc_1 + vol1)\n capacity_2 = (kd_sed_2 * m_sed_2 + kd_bio * m_bio_2 + kd_doc_2 *\n m_doc_2 + vol2)\n fw1 = vol1 / capacity_1\n fw2 = vol2 / capacity_2\n theta = capacity_2 / capacity_1\n sed_conv_factor = vol2 / fw2 / m_sed_2\n omega = benthic.d_over_dx / benthic.depth\n return fw1, fw2, theta, sed_conv_factor, omega\n",
"step-4": "benthic_params = {'depth': 0.05, 'porosity': 0.65, 'bulk_density': 1,\n 'froc': 0, 'doc': 5, 'bnmas': 0, 'd_over_dx': 1}\n\n\ndef partition_benthic(reach, runoff, runoff_mass, erosion_mass):\n from .parameters import soil, stream_channel, benthic\n try:\n reach = self.region.flow_file.fetch(reach)\n q, v, l = reach.q, reach.v, reach.l\n except AttributeError:\n return None, None, (None, None)\n mean_runoff = runoff.mean()\n baseflow = np.subtract(q, mean_runoff, out=np.zeros(self.i.n_dates),\n where=q > mean_runoff)\n total_flow = runoff + baseflow\n mixing_cell = 40.0\n cross_section = total_flow / v\n width = stream_channel.a * np.power(cross_section, stream_channel.b)\n depth = cross_section / width\n surface_area = width * l\n volume = np.array([depth * surface_area, benthic.depth * surface_area *\n benthic.porosity])\n runoff_conc = np.divide(runoff_mass, runoff, out=np.zeros(self.i.\n n_dates), where=runoff != 0)\n daily_conc = np.divide(runoff_mass + erosion_mass, mixing_cell, out=np.\n zeros(self.i.n_dates), where=(runoff_mass + erosion_mass > 0.0) & (\n mixing_cell > 0.0))\n mass_input = np.vstack([runoff_mass + (1.0 - soil.prben) * erosion_mass,\n soil.prben * erosion_mass]).T\n fw1, fw2, theta, sed_conv_factor, omega = solute_holding_capacity(depth,\n surface_area, self.i.koc)\n k_adj = np.array(total_flow / mixing_cell + (self.i.deg_photolysis +\n self.i.deg_hydrolysis) * fw1 + self.i.deg_wc * fw1 + self.i.\n deg_benthic * (1 - fw1))\n aqconc_avg_wb, daily_avg, daily_peak = concentration_loop(self.i.\n n_dates, daily_conc, k_adj, volume, mass_input, fw1, fw2, omega,\n theta, self.i.deg_aqueous)\n return map(lambda x: x * 1000000.0, (runoff_conc, aqconc_avg_wb,\n daily_avg, daily_peak))\n\n\n@njit\ndef concentration_loop(n_dates, daily_concentration, k_adj, daily_volume,\n mass_input, fw1, fw2, omega, theta, deg_aq):\n daily_peak = np.zeros((2, n_dates))\n daily_avg = np.zeros((2, n_dates))\n aqconc_avg_wb = np.zeros(n_dates)\n exp_k = np.exp(-k_adj)\n aqconc_wb = 0\n antecedent_mass = np.zeros(2)\n for day in range(daily_concentration.size):\n daily_mass = antecedent_mass + mass_input[day]\n daily_peak[0, day] = daily_mass[0] * fw1[day] / daily_volume[day, 0]\n daily_peak[1, day] = daily_mass[1] * fw2[day] / daily_volume[day, 1]\n aqconc_wb += daily_concentration[day]\n aqconc_avg_wb[day] = aqconc_wb / k_adj[day] * (1 - exp_k[day])\n aqconc_wb *= exp_k[day]\n new_aqconc, wc_avg, benthic_avg = simultaneous_diffeq(k_adj[day],\n deg_aq, omega, theta[day], daily_peak[:, day])\n daily_avg[0, day] = wc_avg\n daily_avg[1, day] = benthic_avg\n antecedent_mass[0] = new_aqconc[0] / fw1[day] * daily_volume[day, 0]\n antecedent_mass[1] = new_aqconc[1] / fw2[day] * daily_volume[day, 1]\n return aqconc_avg_wb, daily_avg, daily_peak\n\n\n@njit\ndef simultaneous_diffeq(gamma1, gamma2, omega, theta, daily_aq_peak):\n \"\"\"\n ANALYTICAL SOLUTION FOR THE TWO SIMULTANEOUS DIFFERENTIAL EQNS:\n dm1/dt = Am1 + Bm2\n dm2/dt = Em1 + Fm2\n WITH INITIAL VALUES m1 AND m2 FOR m1 AND m2\n mn1 IS OUTPUT VALUE FOR m1 AFTER TIME T\n mn2 IS OUTPUT VALUE FOR m2 AFTER TIME T\n mavg1 IS AVERAGE VALUE OF m1 OVER TIME T\n \"\"\"\n t_end = 86400.0\n m1, m2 = daily_aq_peak\n a = -gamma1 - omega * theta\n b = omega * theta\n e = omega\n f = -gamma2 - omega\n af = a + f\n dif = 4 * (f * a - b * e)\n bbb = np.sqrt(af * af - dif)\n root1 = (af + bbb) / 2.0\n root2 = (af - bbb) / 2.0\n dd = (root1 - a) / b\n ee = (root2 - a) / b\n ff = ee - dd\n x1 = (ee * m1 - m2) / ff\n y1 = (m2 - dd * m1) / ff\n rt1 = root1 * t_end\n rt2 = root2 * t_end\n exrt1 = np.exp(rt1)\n exrt2 = np.exp(rt2)\n ccc = x1 * exrt1\n ddd = y1 * exrt2\n mn = np.zeros(2)\n mn[0] = ccc + ddd\n mn[1] = dd * ccc + ee * ddd\n gx = x1 / root1\n hx = y1 / root2\n term1 = gx * exrt1\n term2 = hx * exrt2\n term3 = -gx\n term4 = -hx\n mavg_wc = (term1 + term2 + term3 + term4) / t_end\n mavg_ben = (term1 * dd + term2 * ee + term3 * dd + term4 * ee) / t_end\n return mn, mavg_wc, mavg_ben\n\n\ndef solute_holding_capacity(depth, surface_area, koc):\n \"\"\"Calculates Solute Holding capacities and mass transfer between water column and benthic regions\"\"\"\n from .parameters import benthic, water_column\n vol1 = depth * surface_area\n vol2a = benthic.depth * surface_area\n vol2 = vol2a * benthic.porosity\n kow = koc / 0.35\n kpdoc1 = kow * 0.074\n kpdoc2 = koc\n xkpb = 0.436 * kow ** 0.907\n vol1a = depth[0] * surface_area\n m_sed_1 = water_column.sused * vol1a * 0.001\n m_bio_1 = water_column.plmas * vol1a * 0.001\n m_doc_1 = water_column.doc * vol1a * 0.001\n kd_sed_1 = koc * water_column.froc * 0.001\n kd_sed_2 = koc * benthic.froc * 0.001\n kd_bio = xkpb / 1000.0\n kd_doc_1 = kpdoc1 / 1000.0\n kd_doc_2 = kpdoc2 / 1000.0\n m_sed_2 = benthic.bulk_density * vol2a * 1000.0\n m_bio_2 = benthic.bnmas * surface_area * 0.001\n m_doc_2 = benthic.doc * vol2 * 0.001\n capacity_1 = (kd_sed_1 * m_sed_1 + kd_bio * m_bio_1 + kd_doc_1 *\n m_doc_1 + vol1)\n capacity_2 = (kd_sed_2 * m_sed_2 + kd_bio * m_bio_2 + kd_doc_2 *\n m_doc_2 + vol2)\n fw1 = vol1 / capacity_1\n fw2 = vol2 / capacity_2\n theta = capacity_2 / capacity_1\n sed_conv_factor = vol2 / fw2 / m_sed_2\n omega = benthic.d_over_dx / benthic.depth\n return fw1, fw2, theta, sed_conv_factor, omega\n",
"step-5": "# Benthic Parameters - USEPA OPP defaults from EXAMS\nbenthic_params = {\n \"depth\": 0.05, # benthic depth (m)\n \"porosity\": 0.65, # benthic porosity\n \"bulk_density\": 1, # bulk density, dry solid mass/total vol (g/cm3)\n \"froc\": 0, # benthic organic carbon fraction\n \"doc\": 5, # benthic dissolved organic carbon content (mg/L)\n \"bnmas\": 0, # benthic biomass intensity (g/m2)\n \"d_over_dx\": 1 # mass transfer coefficient for exchange between benthic and water column (m/s)\n # (can be modified later if data exists)\n}\ndef partition_benthic(reach, runoff, runoff_mass, erosion_mass):\n from .parameters import soil, stream_channel, benthic\n\n try:\n reach = self.region.flow_file.fetch(reach)\n q, v, l = reach.q, reach.v, reach.l\n except AttributeError:\n return None, None, (None, None)\n\n mean_runoff = runoff.mean() # m3/d\n baseflow = np.subtract(q, mean_runoff, out=np.zeros(self.i.n_dates), where=(q > mean_runoff))\n total_flow = runoff + baseflow\n mixing_cell = 40. # meters\n cross_section = total_flow / v\n width = stream_channel.a * np.power(cross_section, stream_channel.b)\n depth = cross_section / width\n surface_area = width * l\n volume = np.array([(depth * surface_area), # Water column\n (benthic.depth * surface_area * benthic.porosity)]) # Benthic zone\n\n # Compute concentration in runoff of runoff mass and erosion mass\n runoff_conc = np.divide(runoff_mass, runoff, out=np.zeros(self.i.n_dates), where=(runoff != 0))\n daily_conc = np.divide(runoff_mass + erosion_mass, mixing_cell, out=np.zeros(self.i.n_dates),\n where=(runoff_mass + erosion_mass > 0.0) & (mixing_cell > 0.0))\n\n # Divide mass loading between water column and benthic zones\n mass_input = np.vstack([runoff_mass + ((1. - soil.prben) * erosion_mass), # Water Column\n soil.prben * erosion_mass]).T # Benthic\n # Partition concentration into benthic and water column concentrations\n # This needs to be cleaned up\n # Compute benthic solute holding capacity\n fw1, fw2, theta, sed_conv_factor, omega = solute_holding_capacity(depth, surface_area, self.i.koc)\n\n k_adj = np.array((total_flow / mixing_cell) + (self.i.deg_photolysis + self.i.deg_hydrolysis) * fw1 + \\\n (self.i.deg_wc * fw1) + self.i.deg_benthic * (1 - fw1))\n\n aqconc_avg_wb, daily_avg, daily_peak = \\\n concentration_loop(self.i.n_dates, daily_conc, k_adj, volume,\n mass_input, fw1, fw2, omega, theta, self.i.deg_aqueous)\n\n return map(lambda x: x * 1000000., (runoff_conc, aqconc_avg_wb, daily_avg, daily_peak))\n\n@njit\ndef concentration_loop(n_dates, daily_concentration, k_adj, daily_volume, mass_input, fw1, fw2, omega, theta, deg_aq):\n # Beginning day aquatic concentrations, considered Peak Aqueous Daily Conc in Water Column\n daily_peak = np.zeros((2, n_dates))\n daily_avg = np.zeros((2, n_dates))\n aqconc_avg_wb = np.zeros(n_dates)\n\n # Reset starting values\n exp_k = np.exp(-k_adj)\n aqconc_wb = 0\n antecedent_mass = np.zeros(2) # mn\n\n for day in range(daily_concentration.size):\n # Add mass input to antecedent mass\n daily_mass = antecedent_mass + mass_input[day]\n\n # Convert to aqueous concentrations (peak) at beginning of day\n # JCH - fw comes from solute_holding_capacity. Fraction going into each section. Should fw[0] + fw[1] = 1?\n daily_peak[0, day] = daily_mass[0] * fw1[day] / daily_volume[day, 0]\n daily_peak[1, day] = daily_mass[1] * fw2[day] / daily_volume[day, 1]\n\n # Compute daily average concentration in the water body - when no Benthic layer considered\n aqconc_wb += daily_concentration[day] # initial water body concentration for current time step\n\n # Daily avg aq conc in water body, area under curve/t = Ci/k*(1-e^-k), NO benthic\n aqconc_avg_wb[day] = aqconc_wb / k_adj[day] * (1 - exp_k[day])\n\n # initial water body concentration for next time step\n aqconc_wb *= exp_k[day]\n\n # For simul diffeq soln: mn1,mn2,mavg1,mavg2 = new_aqconc1, new_aqconc2, aqconc_avg1[d], aqconc_avg2[d]\n # Note: aqconc_avg1 and aqconc_avg2 are outputted - Daily avg aq conc in WC and Benthic regions\n new_aqconc, wc_avg, benthic_avg = simultaneous_diffeq(k_adj[day], deg_aq, omega, theta[day], daily_peak[:, day])\n daily_avg[0, day] = wc_avg\n daily_avg[1, day] = benthic_avg\n\n # Masses m1 and m2 after time step, t_end\n antecedent_mass[0] = new_aqconc[0] / fw1[day] * daily_volume[day, 0]\n antecedent_mass[1] = new_aqconc[1] / fw2[day] * daily_volume[day, 1]\n\n return aqconc_avg_wb, daily_avg, daily_peak\n@njit\ndef simultaneous_diffeq(gamma1, gamma2, omega, theta, daily_aq_peak):\n \"\"\"\n ANALYTICAL SOLUTION FOR THE TWO SIMULTANEOUS DIFFERENTIAL EQNS:\n dm1/dt = Am1 + Bm2\n dm2/dt = Em1 + Fm2\n WITH INITIAL VALUES m1 AND m2 FOR m1 AND m2\n mn1 IS OUTPUT VALUE FOR m1 AFTER TIME T\n mn2 IS OUTPUT VALUE FOR m2 AFTER TIME T\n mavg1 IS AVERAGE VALUE OF m1 OVER TIME T\n \"\"\"\n\n t_end = 86400. # seconds, time step of ONE DAY\n m1, m2 = daily_aq_peak\n\n # Calculate constants for simultaneous_diffeq: A,B,E,F\n # This reduces the model equivalent parameters to the coefficients needed for solving simultaneous_diffeq\n a = -gamma1 - omega * theta\n b = omega * theta\n e = omega\n f = -gamma2 - omega\n\n af = a + f\n dif = 4 * ((f * a) - (b * e))\n bbb = np.sqrt(af * af - dif)\n\n root1 = (af + bbb) / 2.\n root2 = (af - bbb) / 2.\n\n dd = (root1 - a) / b\n ee = (root2 - a) / b\n ff = ee - dd\n x1 = (ee * m1 - m2) / ff\n y1 = (m2 - dd * m1) / ff\n\n # Calculate new concentrations for next step\n rt1 = root1 * t_end\n rt2 = root2 * t_end\n exrt1 = np.exp(rt1)\n exrt2 = np.exp(rt2)\n ccc = x1 * exrt1\n ddd = y1 * exrt2\n\n # values for m1 and m2 after time step t_end\n mn = np.zeros(2)\n mn[0] = ccc + ddd # Water column\n mn[1] = dd * ccc + ee * ddd # Benthic\n\n # AVERAGE DAILY CONCENTRATION SOLUTION: set up for daily average, but can be changed by adjusting time step\n gx = x1 / root1\n hx = y1 / root2\n\n term1 = gx * exrt1 # term3 = -X1/root1*exp(root1*T1)\n term2 = hx * exrt2 # term4 = -Y1/root2*exp(root2*T1\n term3 = -gx\n term4 = -hx\n\n mavg_wc = (term1 + term2 + term3 + term4) / t_end # Water column\n mavg_ben = (term1 * dd + term2 * ee + term3 * dd + term4 * ee) / t_end # Benthic\n\n return mn, mavg_wc, mavg_ben\n\n\ndef solute_holding_capacity(depth, surface_area, koc):\n \"\"\"Calculates Solute Holding capacities and mass transfer between water column and benthic regions\"\"\"\n\n from .parameters import benthic, water_column\n\n # Aqueous volumes in each region\n vol1 = depth * surface_area # total volume in water column, approximately equal to water volume alone\n vol2a = benthic.depth * surface_area # total benthic volume\n vol2 = vol2a * benthic.porosity # total benthic pore water volume\n\n # Default EXAMS conditions for partitioning\n kow = koc / .35 # DEFAULT EXAMS CONDITION ON Kow p.35\n kpdoc1 = kow * .074 # DEFAULT RELATION IN EXAMS (LITTORAL)\n kpdoc2 = koc # DEFAULT RELATION IN EXAMS (BENTHIC) p.16 of EXAMS 2.98 (or is it Kow*.46 ?)\n xkpb = 0.436 * kow ** .907 # DEFAULT RELATION IN EXAMS\n\n # mass in littoral region\n vol1a = depth[0] * surface_area # initial volume corresponding with suspended matter reference\n m_sed_1 = water_column.sused * vol1a * .001 # SEDIMENT MASS LITTORAL\n m_bio_1 = water_column.plmas * vol1a * .001 # BIOLOGICAL MASS LITTORAL\n m_doc_1 = water_column.doc * vol1a * .001 # DOC MASS LITTORAL\n\n # partitioning coefficients of individual media\n kd_sed_1 = koc * water_column.froc * .001 # Kd of sediment in littoral [m3/kg]\n kd_sed_2 = koc * benthic.froc * .001 # Kd of sediment in benthic\n kd_bio = xkpb / 1000. # Kd of biological organisms\n kd_doc_1 = kpdoc1 / 1000. # Kd of DOC in littoral region\n kd_doc_2 = kpdoc2 / 1000. # Kd of DOC in benthic region\n\n # mass in benthic region\n m_sed_2 = benthic.bulk_density * vol2a * 1000. # as defined by EXAMS parameters m_sed_2 = BULKD/PCTWA*VOL2*100000.\n m_bio_2 = benthic.bnmas * surface_area * .001\n m_doc_2 = benthic.doc * vol2 * .001\n\n # solute holding capacity in regions 1 and 2\n capacity_1 = kd_sed_1 * m_sed_1 + kd_bio * m_bio_1 + kd_doc_1 * m_doc_1 + vol1\n capacity_2 = kd_sed_2 * m_sed_2 + kd_bio * m_bio_2 + kd_doc_2 * m_doc_2 + vol2\n\n # Fraction going to water column and benthic\n fw1 = vol1 / capacity_1 # fw1 is daily, vol1 is daily\n fw2 = vol2 / capacity_2\n\n theta = capacity_2 / capacity_1\n\n sed_conv_factor = vol2 / fw2 / m_sed_2 # converts pore water to [Total Conc normalized to sed mass]\n\n # Omega mass transfer - Calculates littoral to benthic mass transfer coefficient\n omega = benthic.d_over_dx / benthic.depth # (m3/hr)/(3600 s/hr)\n\n return fw1, fw2, theta, sed_conv_factor, omega\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def modify(nyt_url, jh_url):
try:
nyt_df = pd.read_csv(nyt_url, header=0, names=['Date', 'Cases',
'Deaths'], dtype={'Cases': 'Int64', 'Deaths': 'Int64'})
nyt_df['Date'] = pd.to_datetime(nyt_df['Date'], format='%Y-%m-%d')
except:
alert = 'Error with NYT link'
notification.send_sns(alert)
print(alert)
try:
jh_df = pd.read_csv(jh_url, usecols=['Date', 'Country/Region',
'Recovered'], dtype={'Recovered': 'Int64'}, encoding='utf8'
).dropna()
jh_df.rename(columns={'Country/Region': 'Country'}, inplace=True)
jh_df['Date'] = pd.to_datetime(jh_df['Date'], format='%Y-%m-%d')
except:
alert = 'Error with JH link'
notification.send_sns(alert)
print(alert)
try:
jh_us_filter = jh_df[jh_df.Country == 'US']
covid_df = nyt_df.set_index('Date').join(jh_us_filter.set_index('Date')
).dropna()
covid_df.reset_index(inplace=True)
covid_df['Date'] = covid_df['Date'].dt.strftime('%Y-%m-%d')
return covid_df
except:
alert = 'Error joining data'
notification.send_sns(alert)
print(alert)
<|reserved_special_token_1|>
import pandas as pd
import notification
def modify(nyt_url, jh_url):
try:
nyt_df = pd.read_csv(nyt_url, header=0, names=['Date', 'Cases',
'Deaths'], dtype={'Cases': 'Int64', 'Deaths': 'Int64'})
nyt_df['Date'] = pd.to_datetime(nyt_df['Date'], format='%Y-%m-%d')
except:
alert = 'Error with NYT link'
notification.send_sns(alert)
print(alert)
try:
jh_df = pd.read_csv(jh_url, usecols=['Date', 'Country/Region',
'Recovered'], dtype={'Recovered': 'Int64'}, encoding='utf8'
).dropna()
jh_df.rename(columns={'Country/Region': 'Country'}, inplace=True)
jh_df['Date'] = pd.to_datetime(jh_df['Date'], format='%Y-%m-%d')
except:
alert = 'Error with JH link'
notification.send_sns(alert)
print(alert)
try:
jh_us_filter = jh_df[jh_df.Country == 'US']
covid_df = nyt_df.set_index('Date').join(jh_us_filter.set_index('Date')
).dropna()
covid_df.reset_index(inplace=True)
covid_df['Date'] = covid_df['Date'].dt.strftime('%Y-%m-%d')
return covid_df
except:
alert = 'Error joining data'
notification.send_sns(alert)
print(alert)
<|reserved_special_token_1|>
import pandas as pd
import notification
def modify(nyt_url, jh_url):
# read data from both sources into a dataframe
# remove unwanted data, formats, and filters
# join dataframes on index
try:
nyt_df = pd.read_csv(nyt_url,
header=0,
names=['Date', 'Cases', 'Deaths'],
dtype={'Cases': 'Int64', 'Deaths': 'Int64'})
nyt_df['Date'] = pd.to_datetime(nyt_df['Date'], format="%Y-%m-%d")
except:
alert = "Error with NYT link"
notification.send_sns(alert)
print(alert)
try:
jh_df = pd.read_csv(jh_url,
usecols=['Date', 'Country/Region', 'Recovered'],
dtype={'Recovered': 'Int64'},
encoding='utf8').dropna()
jh_df.rename(columns={'Country/Region': 'Country'}, inplace=True)
jh_df['Date'] = pd.to_datetime(jh_df['Date'], format="%Y-%m-%d")
except:
alert = "Error with JH link"
notification.send_sns(alert)
print(alert)
try:
jh_us_filter = jh_df[jh_df.Country == 'US']
covid_df = nyt_df.set_index('Date').join(
jh_us_filter.set_index('Date')).dropna()
covid_df.reset_index(inplace=True)
covid_df['Date'] = covid_df['Date'].dt.strftime('%Y-%m-%d')
return covid_df
except:
alert = "Error joining data"
notification.send_sns(alert)
print(alert)
|
flexible
|
{
"blob_id": "c60971b3b0649fce8c435813de4a738f4eacda27",
"index": 4377,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef modify(nyt_url, jh_url):\n try:\n nyt_df = pd.read_csv(nyt_url, header=0, names=['Date', 'Cases',\n 'Deaths'], dtype={'Cases': 'Int64', 'Deaths': 'Int64'})\n nyt_df['Date'] = pd.to_datetime(nyt_df['Date'], format='%Y-%m-%d')\n except:\n alert = 'Error with NYT link'\n notification.send_sns(alert)\n print(alert)\n try:\n jh_df = pd.read_csv(jh_url, usecols=['Date', 'Country/Region',\n 'Recovered'], dtype={'Recovered': 'Int64'}, encoding='utf8'\n ).dropna()\n jh_df.rename(columns={'Country/Region': 'Country'}, inplace=True)\n jh_df['Date'] = pd.to_datetime(jh_df['Date'], format='%Y-%m-%d')\n except:\n alert = 'Error with JH link'\n notification.send_sns(alert)\n print(alert)\n try:\n jh_us_filter = jh_df[jh_df.Country == 'US']\n covid_df = nyt_df.set_index('Date').join(jh_us_filter.set_index('Date')\n ).dropna()\n covid_df.reset_index(inplace=True)\n covid_df['Date'] = covid_df['Date'].dt.strftime('%Y-%m-%d')\n return covid_df\n except:\n alert = 'Error joining data'\n notification.send_sns(alert)\n print(alert)\n",
"step-3": "import pandas as pd\nimport notification\n\n\ndef modify(nyt_url, jh_url):\n try:\n nyt_df = pd.read_csv(nyt_url, header=0, names=['Date', 'Cases',\n 'Deaths'], dtype={'Cases': 'Int64', 'Deaths': 'Int64'})\n nyt_df['Date'] = pd.to_datetime(nyt_df['Date'], format='%Y-%m-%d')\n except:\n alert = 'Error with NYT link'\n notification.send_sns(alert)\n print(alert)\n try:\n jh_df = pd.read_csv(jh_url, usecols=['Date', 'Country/Region',\n 'Recovered'], dtype={'Recovered': 'Int64'}, encoding='utf8'\n ).dropna()\n jh_df.rename(columns={'Country/Region': 'Country'}, inplace=True)\n jh_df['Date'] = pd.to_datetime(jh_df['Date'], format='%Y-%m-%d')\n except:\n alert = 'Error with JH link'\n notification.send_sns(alert)\n print(alert)\n try:\n jh_us_filter = jh_df[jh_df.Country == 'US']\n covid_df = nyt_df.set_index('Date').join(jh_us_filter.set_index('Date')\n ).dropna()\n covid_df.reset_index(inplace=True)\n covid_df['Date'] = covid_df['Date'].dt.strftime('%Y-%m-%d')\n return covid_df\n except:\n alert = 'Error joining data'\n notification.send_sns(alert)\n print(alert)\n",
"step-4": "import pandas as pd\nimport notification\n\n\ndef modify(nyt_url, jh_url):\n # read data from both sources into a dataframe\n # remove unwanted data, formats, and filters\n # join dataframes on index\n try:\n nyt_df = pd.read_csv(nyt_url,\n header=0,\n names=['Date', 'Cases', 'Deaths'],\n dtype={'Cases': 'Int64', 'Deaths': 'Int64'})\n nyt_df['Date'] = pd.to_datetime(nyt_df['Date'], format=\"%Y-%m-%d\")\n except:\n alert = \"Error with NYT link\"\n notification.send_sns(alert)\n print(alert)\n\n try:\n jh_df = pd.read_csv(jh_url,\n usecols=['Date', 'Country/Region', 'Recovered'],\n dtype={'Recovered': 'Int64'},\n encoding='utf8').dropna()\n jh_df.rename(columns={'Country/Region': 'Country'}, inplace=True)\n jh_df['Date'] = pd.to_datetime(jh_df['Date'], format=\"%Y-%m-%d\")\n except:\n alert = \"Error with JH link\"\n notification.send_sns(alert)\n print(alert)\n\n try:\n jh_us_filter = jh_df[jh_df.Country == 'US']\n covid_df = nyt_df.set_index('Date').join(\n jh_us_filter.set_index('Date')).dropna()\n covid_df.reset_index(inplace=True)\n covid_df['Date'] = covid_df['Date'].dt.strftime('%Y-%m-%d')\n return covid_df\n except:\n alert = \"Error joining data\"\n notification.send_sns(alert)\n print(alert)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class Card_profile(models.Model):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def __str__(self):
return self.username
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Card_profile(models.Model):
def path_and_rename(self, filename):
upload_to = 'uploads'
ext = filename.split('.')[-1]
filename = '{}.{}'.format(uuid4().hex, ext)
return os.path.join(upload_to, filename)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def __str__(self):
return self.username
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Card_profile(models.Model):
def path_and_rename(self, filename):
upload_to = 'uploads'
ext = filename.split('.')[-1]
filename = '{}.{}'.format(uuid4().hex, ext)
return os.path.join(upload_to, filename)
MALE = 'M'
FEMALE = 'F'
CHOICES_GENDER = (MALE, 'M'), (FEMALE, 'F')
username = models.CharField(max_length=255, unique=True)
repository_name = models.CharField(max_length=255, unique=True)
page_title = models.CharField(max_length=255)
description = models.CharField(max_length=255)
baseurl = models.CharField(max_length=255, default='/')
url = models.URLField(max_length=200, unique=True)
avatar = models.ImageField(upload_to=path_and_rename, height_field=None,
width_field=None, max_length=255, blank=True, null=True)
first_name = models.CharField(max_length=255)
last_name = models.CharField(max_length=255)
gender = models.CharField(max_length=1, choices=CHOICES_GENDER)
title = models.CharField(max_length=255, blank=True, null=True)
company = models.CharField(max_length=255, blank=True, null=True)
email = models.EmailField(max_length=254, unique=True)
phone = models.CharField(max_length=255, blank=True, null=True)
website = models.URLField(max_length=200, blank=True, null=True)
facebook_url = models.URLField(max_length=200, blank=True, null=True)
linkedin_url = models.URLField(max_length=200, blank=True, null=True)
instagram_url = models.URLField(max_length=200, blank=True, null=True)
pinterest_url = models.URLField(max_length=200, blank=True, null=True)
twitter_url = models.URLField(max_length=200, blank=True, null=True)
youtube_url = models.URLField(max_length=200, blank=True, null=True)
snapchat_url = models.URLField(max_length=200, blank=True, null=True)
whatsapp_url = models.URLField(max_length=200, blank=True, null=True)
tiktok_url = models.URLField(max_length=200, blank=True, null=True)
telegram_url = models.URLField(max_length=200, blank=True, null=True)
skype_url = models.URLField(max_length=200, blank=True, null=True)
github_url = models.URLField(max_length=200, blank=True, null=True)
gitlab_url = models.URLField(max_length=200, blank=True, null=True)
markdown = models.CharField(max_length=255, default='kramdown')
def __str__(self):
return self.username
<|reserved_special_token_1|>
from django.db import models
import os
from uuid import uuid4
class Card_profile(models.Model):
def path_and_rename(self, filename):
upload_to = 'uploads'
ext = filename.split('.')[-1]
filename = '{}.{}'.format(uuid4().hex, ext)
return os.path.join(upload_to, filename)
MALE = 'M'
FEMALE = 'F'
CHOICES_GENDER = (MALE, 'M'), (FEMALE, 'F')
username = models.CharField(max_length=255, unique=True)
repository_name = models.CharField(max_length=255, unique=True)
page_title = models.CharField(max_length=255)
description = models.CharField(max_length=255)
baseurl = models.CharField(max_length=255, default='/')
url = models.URLField(max_length=200, unique=True)
avatar = models.ImageField(upload_to=path_and_rename, height_field=None,
width_field=None, max_length=255, blank=True, null=True)
first_name = models.CharField(max_length=255)
last_name = models.CharField(max_length=255)
gender = models.CharField(max_length=1, choices=CHOICES_GENDER)
title = models.CharField(max_length=255, blank=True, null=True)
company = models.CharField(max_length=255, blank=True, null=True)
email = models.EmailField(max_length=254, unique=True)
phone = models.CharField(max_length=255, blank=True, null=True)
website = models.URLField(max_length=200, blank=True, null=True)
facebook_url = models.URLField(max_length=200, blank=True, null=True)
linkedin_url = models.URLField(max_length=200, blank=True, null=True)
instagram_url = models.URLField(max_length=200, blank=True, null=True)
pinterest_url = models.URLField(max_length=200, blank=True, null=True)
twitter_url = models.URLField(max_length=200, blank=True, null=True)
youtube_url = models.URLField(max_length=200, blank=True, null=True)
snapchat_url = models.URLField(max_length=200, blank=True, null=True)
whatsapp_url = models.URLField(max_length=200, blank=True, null=True)
tiktok_url = models.URLField(max_length=200, blank=True, null=True)
telegram_url = models.URLField(max_length=200, blank=True, null=True)
skype_url = models.URLField(max_length=200, blank=True, null=True)
github_url = models.URLField(max_length=200, blank=True, null=True)
gitlab_url = models.URLField(max_length=200, blank=True, null=True)
markdown = models.CharField(max_length=255, default='kramdown')
def __str__(self):
return self.username
<|reserved_special_token_1|>
from django.db import models
import os
from uuid import uuid4
class Card_profile(models.Model):
def path_and_rename(self, filename):
upload_to = 'uploads'
ext = filename.split('.')[-1]
filename = '{}.{}'.format(uuid4().hex, ext)
return os.path.join(upload_to, filename)
MALE = 'M'
FEMALE = 'F'
CHOICES_GENDER = (
(MALE, 'M'),
(FEMALE, 'F'),
)
username = models.CharField(max_length=255, unique=True)
repository_name = models.CharField(max_length=255, unique=True)
page_title = models.CharField(max_length=255)
description = models.CharField(max_length=255)
baseurl = models.CharField(max_length=255, default="/")
url = models.URLField(max_length=200, unique=True)
avatar = models.ImageField(upload_to=path_and_rename, height_field=None, width_field=None,
max_length=255, blank=True, null=True)
first_name = models.CharField(max_length=255)
last_name = models.CharField(max_length=255)
gender = models.CharField(max_length=1, choices=CHOICES_GENDER)
title = models.CharField(max_length=255, blank=True, null=True)
company = models.CharField(max_length=255, blank=True, null=True)
email = models.EmailField(max_length=254, unique=True)
phone = models.CharField(max_length=255, blank=True, null=True)
website = models.URLField(max_length=200, blank=True, null=True)
facebook_url = models.URLField(max_length=200, blank=True, null=True)
linkedin_url = models.URLField(max_length=200, blank=True, null=True)
instagram_url = models.URLField(max_length=200, blank=True, null=True)
pinterest_url = models.URLField(max_length=200, blank=True, null=True)
twitter_url = models.URLField(max_length=200, blank=True, null=True)
youtube_url = models.URLField(max_length=200, blank=True, null=True)
snapchat_url = models.URLField(max_length=200, blank=True, null=True)
whatsapp_url = models.URLField(max_length=200, blank=True, null=True)
tiktok_url = models.URLField(max_length=200, blank=True, null=True)
telegram_url = models.URLField(max_length=200, blank=True, null=True)
skype_url = models.URLField(max_length=200, blank=True, null=True)
github_url = models.URLField(max_length=200, blank=True, null=True)
gitlab_url = models.URLField(max_length=200, blank=True, null=True)
markdown = models.CharField(max_length=255, default="kramdown")
def __str__(self):
return self.username
|
flexible
|
{
"blob_id": "01153a695b4744465b706acb4c417217c5e3cefd",
"index": 3516,
"step-1": "<mask token>\n\n\nclass Card_profile(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __str__(self):\n return self.username\n",
"step-2": "<mask token>\n\n\nclass Card_profile(models.Model):\n\n def path_and_rename(self, filename):\n upload_to = 'uploads'\n ext = filename.split('.')[-1]\n filename = '{}.{}'.format(uuid4().hex, ext)\n return os.path.join(upload_to, filename)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __str__(self):\n return self.username\n",
"step-3": "<mask token>\n\n\nclass Card_profile(models.Model):\n\n def path_and_rename(self, filename):\n upload_to = 'uploads'\n ext = filename.split('.')[-1]\n filename = '{}.{}'.format(uuid4().hex, ext)\n return os.path.join(upload_to, filename)\n MALE = 'M'\n FEMALE = 'F'\n CHOICES_GENDER = (MALE, 'M'), (FEMALE, 'F')\n username = models.CharField(max_length=255, unique=True)\n repository_name = models.CharField(max_length=255, unique=True)\n page_title = models.CharField(max_length=255)\n description = models.CharField(max_length=255)\n baseurl = models.CharField(max_length=255, default='/')\n url = models.URLField(max_length=200, unique=True)\n avatar = models.ImageField(upload_to=path_and_rename, height_field=None,\n width_field=None, max_length=255, blank=True, null=True)\n first_name = models.CharField(max_length=255)\n last_name = models.CharField(max_length=255)\n gender = models.CharField(max_length=1, choices=CHOICES_GENDER)\n title = models.CharField(max_length=255, blank=True, null=True)\n company = models.CharField(max_length=255, blank=True, null=True)\n email = models.EmailField(max_length=254, unique=True)\n phone = models.CharField(max_length=255, blank=True, null=True)\n website = models.URLField(max_length=200, blank=True, null=True)\n facebook_url = models.URLField(max_length=200, blank=True, null=True)\n linkedin_url = models.URLField(max_length=200, blank=True, null=True)\n instagram_url = models.URLField(max_length=200, blank=True, null=True)\n pinterest_url = models.URLField(max_length=200, blank=True, null=True)\n twitter_url = models.URLField(max_length=200, blank=True, null=True)\n youtube_url = models.URLField(max_length=200, blank=True, null=True)\n snapchat_url = models.URLField(max_length=200, blank=True, null=True)\n whatsapp_url = models.URLField(max_length=200, blank=True, null=True)\n tiktok_url = models.URLField(max_length=200, blank=True, null=True)\n telegram_url = models.URLField(max_length=200, blank=True, null=True)\n skype_url = models.URLField(max_length=200, blank=True, null=True)\n github_url = models.URLField(max_length=200, blank=True, null=True)\n gitlab_url = models.URLField(max_length=200, blank=True, null=True)\n markdown = models.CharField(max_length=255, default='kramdown')\n\n def __str__(self):\n return self.username\n",
"step-4": "from django.db import models\nimport os\nfrom uuid import uuid4\n\n\nclass Card_profile(models.Model):\n\n def path_and_rename(self, filename):\n upload_to = 'uploads'\n ext = filename.split('.')[-1]\n filename = '{}.{}'.format(uuid4().hex, ext)\n return os.path.join(upload_to, filename)\n MALE = 'M'\n FEMALE = 'F'\n CHOICES_GENDER = (MALE, 'M'), (FEMALE, 'F')\n username = models.CharField(max_length=255, unique=True)\n repository_name = models.CharField(max_length=255, unique=True)\n page_title = models.CharField(max_length=255)\n description = models.CharField(max_length=255)\n baseurl = models.CharField(max_length=255, default='/')\n url = models.URLField(max_length=200, unique=True)\n avatar = models.ImageField(upload_to=path_and_rename, height_field=None,\n width_field=None, max_length=255, blank=True, null=True)\n first_name = models.CharField(max_length=255)\n last_name = models.CharField(max_length=255)\n gender = models.CharField(max_length=1, choices=CHOICES_GENDER)\n title = models.CharField(max_length=255, blank=True, null=True)\n company = models.CharField(max_length=255, blank=True, null=True)\n email = models.EmailField(max_length=254, unique=True)\n phone = models.CharField(max_length=255, blank=True, null=True)\n website = models.URLField(max_length=200, blank=True, null=True)\n facebook_url = models.URLField(max_length=200, blank=True, null=True)\n linkedin_url = models.URLField(max_length=200, blank=True, null=True)\n instagram_url = models.URLField(max_length=200, blank=True, null=True)\n pinterest_url = models.URLField(max_length=200, blank=True, null=True)\n twitter_url = models.URLField(max_length=200, blank=True, null=True)\n youtube_url = models.URLField(max_length=200, blank=True, null=True)\n snapchat_url = models.URLField(max_length=200, blank=True, null=True)\n whatsapp_url = models.URLField(max_length=200, blank=True, null=True)\n tiktok_url = models.URLField(max_length=200, blank=True, null=True)\n telegram_url = models.URLField(max_length=200, blank=True, null=True)\n skype_url = models.URLField(max_length=200, blank=True, null=True)\n github_url = models.URLField(max_length=200, blank=True, null=True)\n gitlab_url = models.URLField(max_length=200, blank=True, null=True)\n markdown = models.CharField(max_length=255, default='kramdown')\n\n def __str__(self):\n return self.username\n",
"step-5": "from django.db import models\nimport os\nfrom uuid import uuid4\n\n\nclass Card_profile(models.Model):\n\n def path_and_rename(self, filename):\n upload_to = 'uploads'\n ext = filename.split('.')[-1]\n filename = '{}.{}'.format(uuid4().hex, ext)\n\n return os.path.join(upload_to, filename)\n\n MALE = 'M'\n FEMALE = 'F'\n\n CHOICES_GENDER = (\n (MALE, 'M'),\n (FEMALE, 'F'),\n )\n\n username = models.CharField(max_length=255, unique=True)\n repository_name = models.CharField(max_length=255, unique=True)\n page_title = models.CharField(max_length=255)\n description = models.CharField(max_length=255)\n baseurl = models.CharField(max_length=255, default=\"/\")\n url = models.URLField(max_length=200, unique=True)\n avatar = models.ImageField(upload_to=path_and_rename, height_field=None, width_field=None,\n max_length=255, blank=True, null=True)\n first_name = models.CharField(max_length=255)\n last_name = models.CharField(max_length=255)\n gender = models.CharField(max_length=1, choices=CHOICES_GENDER)\n title = models.CharField(max_length=255, blank=True, null=True)\n company = models.CharField(max_length=255, blank=True, null=True)\n email = models.EmailField(max_length=254, unique=True)\n phone = models.CharField(max_length=255, blank=True, null=True)\n website = models.URLField(max_length=200, blank=True, null=True)\n facebook_url = models.URLField(max_length=200, blank=True, null=True)\n linkedin_url = models.URLField(max_length=200, blank=True, null=True)\n instagram_url = models.URLField(max_length=200, blank=True, null=True)\n pinterest_url = models.URLField(max_length=200, blank=True, null=True)\n twitter_url = models.URLField(max_length=200, blank=True, null=True)\n youtube_url = models.URLField(max_length=200, blank=True, null=True)\n snapchat_url = models.URLField(max_length=200, blank=True, null=True)\n whatsapp_url = models.URLField(max_length=200, blank=True, null=True)\n tiktok_url = models.URLField(max_length=200, blank=True, null=True)\n telegram_url = models.URLField(max_length=200, blank=True, null=True)\n skype_url = models.URLField(max_length=200, blank=True, null=True)\n github_url = models.URLField(max_length=200, blank=True, null=True)\n gitlab_url = models.URLField(max_length=200, blank=True, null=True)\n markdown = models.CharField(max_length=255, default=\"kramdown\")\n\n def __str__(self):\n return self.username\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
#!/usr/bin/python3
# Distributed with a free-will license.
# Use it any way you want, profit or free, provided it fits in the licenses of its associated works.
# ADC121C_MQ131
# This code is designed to work with the ADC121C_I2CGAS_MQ131 I2C Mini Module available from ControlEverything.com.
# https://www.controleverything.com/content/Gas?sku=ADC121C_I2CGAS_MQ131#tabs-0-product_tabset-2
import smbus
import time, datetime
# Get I2C bus
bus = smbus.SMBus(1)
def getOzoneData():
data = bus.read_i2c_block_data(0x50, 0x00, 2)
# Convert the data to 12-bits
raw_adc = (data[0] & 0x0F) * 256 + data[1]
ppm = (1.99 * raw_adc) / 4096.0 + 0.01
return ppm
if __name__ == '__main__':
sampleTime = 1 # seconds
# ADC121C_MQ131 address, 0x50(80)
# Read data back from 0x00(00), 2 bytes
# raw_adc MSB, raw_adc LSB
while True:
data = bus.read_i2c_block_data(0x50, 0x00, 2)
# Convert the data to 12-bits
raw_adc = (data[0] & 0x0F) * 256 + data[1]
ppm = (1.99 * raw_adc) / 4096.0 + 0.01
timestmp = ((str(datetime.datetime.utcnow())).split(' ')[1]).split('.')[0]
time.sleep(sampleTime)
# Output data to screen
print(timestmp, "UTC", "Ozone Concentration : %.2f ppm" %ppm)
|
normal
|
{
"blob_id": "678189ac5b0105c90178647843335f9d4402dc66",
"index": 1416,
"step-1": "<mask token>\n\n\ndef getOzoneData():\n data = bus.read_i2c_block_data(80, 0, 2)\n raw_adc = (data[0] & 15) * 256 + data[1]\n ppm = 1.99 * raw_adc / 4096.0 + 0.01\n return ppm\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef getOzoneData():\n data = bus.read_i2c_block_data(80, 0, 2)\n raw_adc = (data[0] & 15) * 256 + data[1]\n ppm = 1.99 * raw_adc / 4096.0 + 0.01\n return ppm\n\n\nif __name__ == '__main__':\n sampleTime = 1\n while True:\n data = bus.read_i2c_block_data(80, 0, 2)\n raw_adc = (data[0] & 15) * 256 + data[1]\n ppm = 1.99 * raw_adc / 4096.0 + 0.01\n timestmp = str(datetime.datetime.utcnow()).split(' ')[1].split('.')[0]\n time.sleep(sampleTime)\n print(timestmp, 'UTC', 'Ozone Concentration : %.2f ppm' % ppm)\n",
"step-3": "<mask token>\nbus = smbus.SMBus(1)\n\n\ndef getOzoneData():\n data = bus.read_i2c_block_data(80, 0, 2)\n raw_adc = (data[0] & 15) * 256 + data[1]\n ppm = 1.99 * raw_adc / 4096.0 + 0.01\n return ppm\n\n\nif __name__ == '__main__':\n sampleTime = 1\n while True:\n data = bus.read_i2c_block_data(80, 0, 2)\n raw_adc = (data[0] & 15) * 256 + data[1]\n ppm = 1.99 * raw_adc / 4096.0 + 0.01\n timestmp = str(datetime.datetime.utcnow()).split(' ')[1].split('.')[0]\n time.sleep(sampleTime)\n print(timestmp, 'UTC', 'Ozone Concentration : %.2f ppm' % ppm)\n",
"step-4": "import smbus\nimport time, datetime\nbus = smbus.SMBus(1)\n\n\ndef getOzoneData():\n data = bus.read_i2c_block_data(80, 0, 2)\n raw_adc = (data[0] & 15) * 256 + data[1]\n ppm = 1.99 * raw_adc / 4096.0 + 0.01\n return ppm\n\n\nif __name__ == '__main__':\n sampleTime = 1\n while True:\n data = bus.read_i2c_block_data(80, 0, 2)\n raw_adc = (data[0] & 15) * 256 + data[1]\n ppm = 1.99 * raw_adc / 4096.0 + 0.01\n timestmp = str(datetime.datetime.utcnow()).split(' ')[1].split('.')[0]\n time.sleep(sampleTime)\n print(timestmp, 'UTC', 'Ozone Concentration : %.2f ppm' % ppm)\n",
"step-5": "#!/usr/bin/python3\n# Distributed with a free-will license.\n# Use it any way you want, profit or free, provided it fits in the licenses of its associated works.\n# ADC121C_MQ131\n# This code is designed to work with the ADC121C_I2CGAS_MQ131 I2C Mini Module available from ControlEverything.com.\n# https://www.controleverything.com/content/Gas?sku=ADC121C_I2CGAS_MQ131#tabs-0-product_tabset-2\n\nimport smbus\nimport time, datetime\n\n# Get I2C bus\nbus = smbus.SMBus(1)\n\ndef getOzoneData():\n\tdata = bus.read_i2c_block_data(0x50, 0x00, 2)\n\n\t# Convert the data to 12-bits\n\traw_adc = (data[0] & 0x0F) * 256 + data[1]\n\tppm = (1.99 * raw_adc) / 4096.0 + 0.01\n\treturn ppm\n\nif __name__ == '__main__':\n\n\tsampleTime = 1 # seconds\n\n\t# ADC121C_MQ131 address, 0x50(80)\n\t# Read data back from 0x00(00), 2 bytes\n\t# raw_adc MSB, raw_adc LSB\n\twhile True:\n\t\tdata = bus.read_i2c_block_data(0x50, 0x00, 2)\n\n\t\t# Convert the data to 12-bits\n\t\traw_adc = (data[0] & 0x0F) * 256 + data[1]\n\t\tppm = (1.99 * raw_adc) / 4096.0 + 0.01\n\n\t\ttimestmp = ((str(datetime.datetime.utcnow())).split(' ')[1]).split('.')[0]\n\t\ttime.sleep(sampleTime)\n\n\n\t\t# Output data to screen\n\t\tprint(timestmp, \"UTC\", \"Ozone Concentration : %.2f ppm\" %ppm)\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
#
# romaO
# www.fabiocrameri.ch/colourmaps
from matplotlib.colors import LinearSegmentedColormap
cm_data = [[0.45137, 0.22346, 0.34187],
[0.45418, 0.22244, 0.3361],
[0.45696, 0.22158, 0.33043],
[0.45975, 0.2209, 0.32483],
[0.46251, 0.22035, 0.31935],
[0.46527, 0.21994, 0.31394],
[0.46803, 0.21968, 0.30862],
[0.47078, 0.21958, 0.30337],
[0.47352, 0.21962, 0.29822],
[0.47628, 0.21982, 0.29316],
[0.47902, 0.22017, 0.28818],
[0.48178, 0.22067, 0.2833],
[0.48453, 0.2213, 0.2785],
[0.48731, 0.22208, 0.27379],
[0.49008, 0.22304, 0.26917],
[0.49286, 0.22411, 0.26461],
[0.49567, 0.22536, 0.26016],
[0.4985, 0.22677, 0.25579],
[0.50134, 0.22833, 0.25153],
[0.50419, 0.22999, 0.24733],
[0.50707, 0.23188, 0.24322],
[0.50997, 0.23387, 0.23923],
[0.5129, 0.23605, 0.23533],
[0.51584, 0.23835, 0.23151],
[0.51884, 0.24082, 0.22779],
[0.52184, 0.24345, 0.22414],
[0.52489, 0.24625, 0.22065],
[0.52797, 0.2492, 0.2172],
[0.53108, 0.25231, 0.21387],
[0.53423, 0.25556, 0.21064],
[0.53742, 0.25899, 0.20753],
[0.54063, 0.26255, 0.20452],
[0.54389, 0.26628, 0.20158],
[0.54718, 0.27017, 0.19879],
[0.55051, 0.27419, 0.19613],
[0.55389, 0.27839, 0.19356],
[0.55731, 0.28273, 0.19109],
[0.56075, 0.2872, 0.18877],
[0.56424, 0.29186, 0.18655],
[0.56777, 0.29665, 0.18446],
[0.57134, 0.30157, 0.18248],
[0.57495, 0.30666, 0.18065],
[0.5786, 0.31186, 0.17898],
[0.58228, 0.31724, 0.17743],
[0.58602, 0.32275, 0.17597],
[0.58977, 0.32838, 0.17473],
[0.59358, 0.33415, 0.17358],
[0.59742, 0.34005, 0.17261],
[0.60129, 0.34606, 0.17179],
[0.60519, 0.35223, 0.17114],
[0.60915, 0.35851, 0.17065],
[0.61311, 0.36491, 0.17034],
[0.61713, 0.37143, 0.1702],
[0.62118, 0.37808, 0.17023],
[0.62526, 0.38483, 0.17046],
[0.62937, 0.39171, 0.17087],
[0.63352, 0.39869, 0.17148],
[0.63769, 0.40579, 0.17229],
[0.6419, 0.41299, 0.17332],
[0.64613, 0.42029, 0.17458],
[0.65041, 0.42771, 0.176],
[0.6547, 0.43522, 0.17774],
[0.65904, 0.44283, 0.17962],
[0.66341, 0.45054, 0.18175],
[0.6678, 0.45834, 0.18416],
[0.67222, 0.46625, 0.1868],
[0.67667, 0.47425, 0.18968],
[0.68114, 0.48233, 0.19283],
[0.68566, 0.49051, 0.19624],
[0.69019, 0.49878, 0.19987],
[0.69474, 0.50712, 0.20384],
[0.69933, 0.51554, 0.20803],
[0.70394, 0.52406, 0.21251],
[0.70858, 0.53265, 0.21726],
[0.71322, 0.5413, 0.22229],
[0.7179, 0.55003, 0.22761],
[0.72257, 0.55881, 0.23318],
[0.72727, 0.56767, 0.23907],
[0.73197, 0.57658, 0.24521],
[0.73666, 0.58553, 0.25168],
[0.74136, 0.59451, 0.25837],
[0.74605, 0.60354, 0.26537],
[0.75073, 0.61259, 0.27263],
[0.75538, 0.62166, 0.28017],
[0.76001, 0.63075, 0.28796],
[0.7646, 0.63982, 0.29602],
[0.76914, 0.64889, 0.30433],
[0.77363, 0.65793, 0.31287],
[0.77806, 0.66694, 0.32165],
[0.78242, 0.6759, 0.33066],
[0.78669, 0.68481, 0.33988],
[0.79087, 0.69365, 0.34929],
[0.79494, 0.7024, 0.35888],
[0.7989, 0.71106, 0.36867],
[0.80273, 0.71961, 0.37859],
[0.80642, 0.72803, 0.38866],
[0.80996, 0.73631, 0.39885],
[0.81334, 0.74446, 0.40916],
[0.81655, 0.75244, 0.41957],
[0.81956, 0.76025, 0.43004],
[0.82239, 0.76787, 0.44057],
[0.82501, 0.7753, 0.45115],
[0.82742, 0.78252, 0.46174],
[0.8296, 0.78953, 0.47235],
[0.83155, 0.79631, 0.48293],
[0.83326, 0.80287, 0.49349],
[0.83472, 0.80919, 0.50402],
[0.83592, 0.81526, 0.51449],
[0.83686, 0.82109, 0.52487],
[0.83753, 0.82666, 0.53517],
[0.83793, 0.83198, 0.54537],
[0.83805, 0.83703, 0.55546],
[0.83788, 0.84182, 0.56542],
[0.83744, 0.84635, 0.57525],
[0.8367, 0.85061, 0.58493],
[0.83567, 0.85462, 0.59446],
[0.83435, 0.85835, 0.60382],
[0.83274, 0.86183, 0.61301],
[0.83084, 0.86504, 0.62202],
[0.82864, 0.868, 0.63085],
[0.82615, 0.87068, 0.63949],
[0.82337, 0.87312, 0.64792],
[0.8203, 0.87531, 0.65617],
[0.81695, 0.87724, 0.6642],
[0.81331, 0.87892, 0.67203],
[0.80939, 0.88036, 0.67964],
[0.80518, 0.88156, 0.68705],
[0.80071, 0.8825, 0.69424],
[0.79595, 0.88322, 0.70121],
[0.79094, 0.8837, 0.70797],
[0.78566, 0.88395, 0.7145],
[0.78012, 0.88396, 0.72082],
[0.77433, 0.88375, 0.72692],
[0.7683, 0.88331, 0.73279],
[0.76203, 0.88264, 0.73844],
[0.75553, 0.88177, 0.74387],
[0.74879, 0.88066, 0.74908],
[0.74184, 0.87934, 0.75407],
[0.73468, 0.87781, 0.75884],
[0.72731, 0.87607, 0.76339],
[0.71976, 0.87411, 0.76772],
[0.71201, 0.87195, 0.77184],
[0.70408, 0.86958, 0.77573],
[0.69599, 0.86701, 0.77941],
[0.68774, 0.86425, 0.78288],
[0.67934, 0.86127, 0.78614],
[0.67081, 0.85811, 0.78919],
[0.66215, 0.85476, 0.79202],
[0.65336, 0.8512, 0.79465],
[0.64448, 0.84747, 0.79707],
[0.6355, 0.84356, 0.7993],
[0.62645, 0.83947, 0.80131],
[0.61732, 0.83519, 0.80313],
[0.60814, 0.83075, 0.80476],
[0.59891, 0.82614, 0.80619],
[0.58965, 0.82137, 0.80743],
[0.58037, 0.81644, 0.80848],
[0.57108, 0.81135, 0.80935],
[0.56181, 0.80612, 0.81004],
[0.55255, 0.80074, 0.81055],
[0.54332, 0.79522, 0.81088],
[0.53412, 0.78958, 0.81105],
[0.525, 0.7838, 0.81105],
[0.51593, 0.77791, 0.81088],
[0.50695, 0.77189, 0.81055],
[0.49808, 0.76577, 0.81007],
[0.48928, 0.75954, 0.80944],
[0.48061, 0.75321, 0.80866],
[0.47207, 0.7468, 0.80773],
[0.46365, 0.74029, 0.80667],
[0.45539, 0.7337, 0.80546],
[0.44728, 0.72703, 0.80413],
[0.43934, 0.7203, 0.80266],
[0.43158, 0.7135, 0.80107],
[0.42398, 0.70664, 0.79936],
[0.41658, 0.69971, 0.79752],
[0.40938, 0.69275, 0.79557],
[0.40237, 0.68572, 0.79351],
[0.3956, 0.67865, 0.79133],
[0.38903, 0.67155, 0.78905],
[0.38267, 0.66441, 0.78666],
[0.37656, 0.65724, 0.78416],
[0.37066, 0.65003, 0.78155],
[0.36502, 0.64279, 0.77884],
[0.35961, 0.63552, 0.77604],
[0.35446, 0.62824, 0.77312],
[0.34955, 0.62094, 0.77011],
[0.3449, 0.6136, 0.767],
[0.34051, 0.60625, 0.76378],
[0.33637, 0.59889, 0.76047],
[0.33253, 0.59151, 0.75704],
[0.32893, 0.58412, 0.75351],
[0.32559, 0.57671, 0.74987],
[0.32256, 0.56928, 0.74613],
[0.31978, 0.56186, 0.74228],
[0.31727, 0.55441, 0.7383],
[0.31505, 0.54695, 0.73422],
[0.31311, 0.53948, 0.73002],
[0.31144, 0.53201, 0.72569],
[0.31007, 0.52453, 0.72124],
[0.30897, 0.51704, 0.71667],
[0.30811, 0.50955, 0.71197],
[0.30755, 0.50205, 0.70713],
[0.30726, 0.49456, 0.70216],
[0.30723, 0.48707, 0.69706],
[0.30746, 0.47958, 0.69182],
[0.30795, 0.4721, 0.68643],
[0.3087, 0.46463, 0.6809],
[0.30968, 0.45716, 0.67525],
[0.31088, 0.44973, 0.66944],
[0.31228, 0.44232, 0.6635],
[0.31393, 0.43493, 0.65741],
[0.31578, 0.42758, 0.65118],
[0.3178, 0.42025, 0.64482],
[0.32001, 0.41299, 0.63833],
[0.32238, 0.40577, 0.6317],
[0.32489, 0.39861, 0.62495],
[0.32755, 0.39152, 0.61809],
[0.33035, 0.38448, 0.61111],
[0.33327, 0.37755, 0.60402],
[0.33627, 0.37068, 0.59684],
[0.33939, 0.36392, 0.58955],
[0.34257, 0.35728, 0.58219],
[0.3458, 0.35073, 0.57476],
[0.34912, 0.34428, 0.56727],
[0.35247, 0.33797, 0.55971],
[0.35587, 0.33179, 0.55212],
[0.35927, 0.32574, 0.54448],
[0.36271, 0.31986, 0.53684],
[0.36617, 0.31411, 0.52917],
[0.36961, 0.30852, 0.52148],
[0.37306, 0.30306, 0.51382],
[0.37652, 0.2978, 0.50615],
[0.37994, 0.29269, 0.49854],
[0.38336, 0.28775, 0.49094],
[0.38674, 0.28301, 0.48337],
[0.39011, 0.27842, 0.47586],
[0.39346, 0.27401, 0.4684],
[0.39677, 0.26978, 0.461],
[0.40006, 0.26573, 0.45366],
[0.40333, 0.26185, 0.4464],
[0.40655, 0.25815, 0.43921],
[0.40974, 0.25466, 0.43212],
[0.4129, 0.25132, 0.42509],
[0.41602, 0.24817, 0.41813],
[0.41912, 0.24515, 0.41128],
[0.42218, 0.24235, 0.40451],
[0.42522, 0.23972, 0.39784],
[0.42823, 0.23728, 0.39126],
[0.43121, 0.23498, 0.38475],
[0.43415, 0.23282, 0.37836],
[0.43708, 0.23086, 0.37204],
[0.43998, 0.22907, 0.36583],
[0.44286, 0.22743, 0.3597],
[0.44571, 0.22596, 0.35366],
[0.44855, 0.2246, 0.34773]]
romaO_map = LinearSegmentedColormap.from_list('romaO', cm_data)
# For use of "viscm view"
test_cm = romaO_map
if __name__ == "__main__":
import matplotlib.pyplot as plt
import numpy as np
try:
from viscm import viscm
viscm(romaO_map)
except ImportError:
print("viscm not found, falling back on simple display")
plt.imshow(np.linspace(0, 100, 256)[None, :], aspect='auto',
cmap=romaO_map)
plt.show()
|
normal
|
{
"blob_id": "5082182af5a08970568dc1ab7a53ee5337260687",
"index": 45,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n import matplotlib.pyplot as plt\n import numpy as np\n try:\n from viscm import viscm\n viscm(romaO_map)\n except ImportError:\n print('viscm not found, falling back on simple display')\n plt.imshow(np.linspace(0, 100, 256)[None, :], aspect='auto', cmap=\n romaO_map)\n plt.show()\n",
"step-3": "<mask token>\ncm_data = [[0.45137, 0.22346, 0.34187], [0.45418, 0.22244, 0.3361], [\n 0.45696, 0.22158, 0.33043], [0.45975, 0.2209, 0.32483], [0.46251, \n 0.22035, 0.31935], [0.46527, 0.21994, 0.31394], [0.46803, 0.21968, \n 0.30862], [0.47078, 0.21958, 0.30337], [0.47352, 0.21962, 0.29822], [\n 0.47628, 0.21982, 0.29316], [0.47902, 0.22017, 0.28818], [0.48178, \n 0.22067, 0.2833], [0.48453, 0.2213, 0.2785], [0.48731, 0.22208, 0.27379\n ], [0.49008, 0.22304, 0.26917], [0.49286, 0.22411, 0.26461], [0.49567, \n 0.22536, 0.26016], [0.4985, 0.22677, 0.25579], [0.50134, 0.22833, \n 0.25153], [0.50419, 0.22999, 0.24733], [0.50707, 0.23188, 0.24322], [\n 0.50997, 0.23387, 0.23923], [0.5129, 0.23605, 0.23533], [0.51584, \n 0.23835, 0.23151], [0.51884, 0.24082, 0.22779], [0.52184, 0.24345, \n 0.22414], [0.52489, 0.24625, 0.22065], [0.52797, 0.2492, 0.2172], [\n 0.53108, 0.25231, 0.21387], [0.53423, 0.25556, 0.21064], [0.53742, \n 0.25899, 0.20753], [0.54063, 0.26255, 0.20452], [0.54389, 0.26628, \n 0.20158], [0.54718, 0.27017, 0.19879], [0.55051, 0.27419, 0.19613], [\n 0.55389, 0.27839, 0.19356], [0.55731, 0.28273, 0.19109], [0.56075, \n 0.2872, 0.18877], [0.56424, 0.29186, 0.18655], [0.56777, 0.29665, \n 0.18446], [0.57134, 0.30157, 0.18248], [0.57495, 0.30666, 0.18065], [\n 0.5786, 0.31186, 0.17898], [0.58228, 0.31724, 0.17743], [0.58602, \n 0.32275, 0.17597], [0.58977, 0.32838, 0.17473], [0.59358, 0.33415, \n 0.17358], [0.59742, 0.34005, 0.17261], [0.60129, 0.34606, 0.17179], [\n 0.60519, 0.35223, 0.17114], [0.60915, 0.35851, 0.17065], [0.61311, \n 0.36491, 0.17034], [0.61713, 0.37143, 0.1702], [0.62118, 0.37808, \n 0.17023], [0.62526, 0.38483, 0.17046], [0.62937, 0.39171, 0.17087], [\n 0.63352, 0.39869, 0.17148], [0.63769, 0.40579, 0.17229], [0.6419, \n 0.41299, 0.17332], [0.64613, 0.42029, 0.17458], [0.65041, 0.42771, \n 0.176], [0.6547, 0.43522, 0.17774], [0.65904, 0.44283, 0.17962], [\n 0.66341, 0.45054, 0.18175], [0.6678, 0.45834, 0.18416], [0.67222, \n 0.46625, 0.1868], [0.67667, 0.47425, 0.18968], [0.68114, 0.48233, \n 0.19283], [0.68566, 0.49051, 0.19624], [0.69019, 0.49878, 0.19987], [\n 0.69474, 0.50712, 0.20384], [0.69933, 0.51554, 0.20803], [0.70394, \n 0.52406, 0.21251], [0.70858, 0.53265, 0.21726], [0.71322, 0.5413, \n 0.22229], [0.7179, 0.55003, 0.22761], [0.72257, 0.55881, 0.23318], [\n 0.72727, 0.56767, 0.23907], [0.73197, 0.57658, 0.24521], [0.73666, \n 0.58553, 0.25168], [0.74136, 0.59451, 0.25837], [0.74605, 0.60354, \n 0.26537], [0.75073, 0.61259, 0.27263], [0.75538, 0.62166, 0.28017], [\n 0.76001, 0.63075, 0.28796], [0.7646, 0.63982, 0.29602], [0.76914, \n 0.64889, 0.30433], [0.77363, 0.65793, 0.31287], [0.77806, 0.66694, \n 0.32165], [0.78242, 0.6759, 0.33066], [0.78669, 0.68481, 0.33988], [\n 0.79087, 0.69365, 0.34929], [0.79494, 0.7024, 0.35888], [0.7989, \n 0.71106, 0.36867], [0.80273, 0.71961, 0.37859], [0.80642, 0.72803, \n 0.38866], [0.80996, 0.73631, 0.39885], [0.81334, 0.74446, 0.40916], [\n 0.81655, 0.75244, 0.41957], [0.81956, 0.76025, 0.43004], [0.82239, \n 0.76787, 0.44057], [0.82501, 0.7753, 0.45115], [0.82742, 0.78252, \n 0.46174], [0.8296, 0.78953, 0.47235], [0.83155, 0.79631, 0.48293], [\n 0.83326, 0.80287, 0.49349], [0.83472, 0.80919, 0.50402], [0.83592, \n 0.81526, 0.51449], [0.83686, 0.82109, 0.52487], [0.83753, 0.82666, \n 0.53517], [0.83793, 0.83198, 0.54537], [0.83805, 0.83703, 0.55546], [\n 0.83788, 0.84182, 0.56542], [0.83744, 0.84635, 0.57525], [0.8367, \n 0.85061, 0.58493], [0.83567, 0.85462, 0.59446], [0.83435, 0.85835, \n 0.60382], [0.83274, 0.86183, 0.61301], [0.83084, 0.86504, 0.62202], [\n 0.82864, 0.868, 0.63085], [0.82615, 0.87068, 0.63949], [0.82337, \n 0.87312, 0.64792], [0.8203, 0.87531, 0.65617], [0.81695, 0.87724, \n 0.6642], [0.81331, 0.87892, 0.67203], [0.80939, 0.88036, 0.67964], [\n 0.80518, 0.88156, 0.68705], [0.80071, 0.8825, 0.69424], [0.79595, \n 0.88322, 0.70121], [0.79094, 0.8837, 0.70797], [0.78566, 0.88395, \n 0.7145], [0.78012, 0.88396, 0.72082], [0.77433, 0.88375, 0.72692], [\n 0.7683, 0.88331, 0.73279], [0.76203, 0.88264, 0.73844], [0.75553, \n 0.88177, 0.74387], [0.74879, 0.88066, 0.74908], [0.74184, 0.87934, \n 0.75407], [0.73468, 0.87781, 0.75884], [0.72731, 0.87607, 0.76339], [\n 0.71976, 0.87411, 0.76772], [0.71201, 0.87195, 0.77184], [0.70408, \n 0.86958, 0.77573], [0.69599, 0.86701, 0.77941], [0.68774, 0.86425, \n 0.78288], [0.67934, 0.86127, 0.78614], [0.67081, 0.85811, 0.78919], [\n 0.66215, 0.85476, 0.79202], [0.65336, 0.8512, 0.79465], [0.64448, \n 0.84747, 0.79707], [0.6355, 0.84356, 0.7993], [0.62645, 0.83947, \n 0.80131], [0.61732, 0.83519, 0.80313], [0.60814, 0.83075, 0.80476], [\n 0.59891, 0.82614, 0.80619], [0.58965, 0.82137, 0.80743], [0.58037, \n 0.81644, 0.80848], [0.57108, 0.81135, 0.80935], [0.56181, 0.80612, \n 0.81004], [0.55255, 0.80074, 0.81055], [0.54332, 0.79522, 0.81088], [\n 0.53412, 0.78958, 0.81105], [0.525, 0.7838, 0.81105], [0.51593, 0.77791,\n 0.81088], [0.50695, 0.77189, 0.81055], [0.49808, 0.76577, 0.81007], [\n 0.48928, 0.75954, 0.80944], [0.48061, 0.75321, 0.80866], [0.47207, \n 0.7468, 0.80773], [0.46365, 0.74029, 0.80667], [0.45539, 0.7337, \n 0.80546], [0.44728, 0.72703, 0.80413], [0.43934, 0.7203, 0.80266], [\n 0.43158, 0.7135, 0.80107], [0.42398, 0.70664, 0.79936], [0.41658, \n 0.69971, 0.79752], [0.40938, 0.69275, 0.79557], [0.40237, 0.68572, \n 0.79351], [0.3956, 0.67865, 0.79133], [0.38903, 0.67155, 0.78905], [\n 0.38267, 0.66441, 0.78666], [0.37656, 0.65724, 0.78416], [0.37066, \n 0.65003, 0.78155], [0.36502, 0.64279, 0.77884], [0.35961, 0.63552, \n 0.77604], [0.35446, 0.62824, 0.77312], [0.34955, 0.62094, 0.77011], [\n 0.3449, 0.6136, 0.767], [0.34051, 0.60625, 0.76378], [0.33637, 0.59889,\n 0.76047], [0.33253, 0.59151, 0.75704], [0.32893, 0.58412, 0.75351], [\n 0.32559, 0.57671, 0.74987], [0.32256, 0.56928, 0.74613], [0.31978, \n 0.56186, 0.74228], [0.31727, 0.55441, 0.7383], [0.31505, 0.54695, \n 0.73422], [0.31311, 0.53948, 0.73002], [0.31144, 0.53201, 0.72569], [\n 0.31007, 0.52453, 0.72124], [0.30897, 0.51704, 0.71667], [0.30811, \n 0.50955, 0.71197], [0.30755, 0.50205, 0.70713], [0.30726, 0.49456, \n 0.70216], [0.30723, 0.48707, 0.69706], [0.30746, 0.47958, 0.69182], [\n 0.30795, 0.4721, 0.68643], [0.3087, 0.46463, 0.6809], [0.30968, 0.45716,\n 0.67525], [0.31088, 0.44973, 0.66944], [0.31228, 0.44232, 0.6635], [\n 0.31393, 0.43493, 0.65741], [0.31578, 0.42758, 0.65118], [0.3178, \n 0.42025, 0.64482], [0.32001, 0.41299, 0.63833], [0.32238, 0.40577, \n 0.6317], [0.32489, 0.39861, 0.62495], [0.32755, 0.39152, 0.61809], [\n 0.33035, 0.38448, 0.61111], [0.33327, 0.37755, 0.60402], [0.33627, \n 0.37068, 0.59684], [0.33939, 0.36392, 0.58955], [0.34257, 0.35728, \n 0.58219], [0.3458, 0.35073, 0.57476], [0.34912, 0.34428, 0.56727], [\n 0.35247, 0.33797, 0.55971], [0.35587, 0.33179, 0.55212], [0.35927, \n 0.32574, 0.54448], [0.36271, 0.31986, 0.53684], [0.36617, 0.31411, \n 0.52917], [0.36961, 0.30852, 0.52148], [0.37306, 0.30306, 0.51382], [\n 0.37652, 0.2978, 0.50615], [0.37994, 0.29269, 0.49854], [0.38336, \n 0.28775, 0.49094], [0.38674, 0.28301, 0.48337], [0.39011, 0.27842, \n 0.47586], [0.39346, 0.27401, 0.4684], [0.39677, 0.26978, 0.461], [\n 0.40006, 0.26573, 0.45366], [0.40333, 0.26185, 0.4464], [0.40655, \n 0.25815, 0.43921], [0.40974, 0.25466, 0.43212], [0.4129, 0.25132, \n 0.42509], [0.41602, 0.24817, 0.41813], [0.41912, 0.24515, 0.41128], [\n 0.42218, 0.24235, 0.40451], [0.42522, 0.23972, 0.39784], [0.42823, \n 0.23728, 0.39126], [0.43121, 0.23498, 0.38475], [0.43415, 0.23282, \n 0.37836], [0.43708, 0.23086, 0.37204], [0.43998, 0.22907, 0.36583], [\n 0.44286, 0.22743, 0.3597], [0.44571, 0.22596, 0.35366], [0.44855, \n 0.2246, 0.34773]]\nromaO_map = LinearSegmentedColormap.from_list('romaO', cm_data)\ntest_cm = romaO_map\nif __name__ == '__main__':\n import matplotlib.pyplot as plt\n import numpy as np\n try:\n from viscm import viscm\n viscm(romaO_map)\n except ImportError:\n print('viscm not found, falling back on simple display')\n plt.imshow(np.linspace(0, 100, 256)[None, :], aspect='auto', cmap=\n romaO_map)\n plt.show()\n",
"step-4": "from matplotlib.colors import LinearSegmentedColormap\ncm_data = [[0.45137, 0.22346, 0.34187], [0.45418, 0.22244, 0.3361], [\n 0.45696, 0.22158, 0.33043], [0.45975, 0.2209, 0.32483], [0.46251, \n 0.22035, 0.31935], [0.46527, 0.21994, 0.31394], [0.46803, 0.21968, \n 0.30862], [0.47078, 0.21958, 0.30337], [0.47352, 0.21962, 0.29822], [\n 0.47628, 0.21982, 0.29316], [0.47902, 0.22017, 0.28818], [0.48178, \n 0.22067, 0.2833], [0.48453, 0.2213, 0.2785], [0.48731, 0.22208, 0.27379\n ], [0.49008, 0.22304, 0.26917], [0.49286, 0.22411, 0.26461], [0.49567, \n 0.22536, 0.26016], [0.4985, 0.22677, 0.25579], [0.50134, 0.22833, \n 0.25153], [0.50419, 0.22999, 0.24733], [0.50707, 0.23188, 0.24322], [\n 0.50997, 0.23387, 0.23923], [0.5129, 0.23605, 0.23533], [0.51584, \n 0.23835, 0.23151], [0.51884, 0.24082, 0.22779], [0.52184, 0.24345, \n 0.22414], [0.52489, 0.24625, 0.22065], [0.52797, 0.2492, 0.2172], [\n 0.53108, 0.25231, 0.21387], [0.53423, 0.25556, 0.21064], [0.53742, \n 0.25899, 0.20753], [0.54063, 0.26255, 0.20452], [0.54389, 0.26628, \n 0.20158], [0.54718, 0.27017, 0.19879], [0.55051, 0.27419, 0.19613], [\n 0.55389, 0.27839, 0.19356], [0.55731, 0.28273, 0.19109], [0.56075, \n 0.2872, 0.18877], [0.56424, 0.29186, 0.18655], [0.56777, 0.29665, \n 0.18446], [0.57134, 0.30157, 0.18248], [0.57495, 0.30666, 0.18065], [\n 0.5786, 0.31186, 0.17898], [0.58228, 0.31724, 0.17743], [0.58602, \n 0.32275, 0.17597], [0.58977, 0.32838, 0.17473], [0.59358, 0.33415, \n 0.17358], [0.59742, 0.34005, 0.17261], [0.60129, 0.34606, 0.17179], [\n 0.60519, 0.35223, 0.17114], [0.60915, 0.35851, 0.17065], [0.61311, \n 0.36491, 0.17034], [0.61713, 0.37143, 0.1702], [0.62118, 0.37808, \n 0.17023], [0.62526, 0.38483, 0.17046], [0.62937, 0.39171, 0.17087], [\n 0.63352, 0.39869, 0.17148], [0.63769, 0.40579, 0.17229], [0.6419, \n 0.41299, 0.17332], [0.64613, 0.42029, 0.17458], [0.65041, 0.42771, \n 0.176], [0.6547, 0.43522, 0.17774], [0.65904, 0.44283, 0.17962], [\n 0.66341, 0.45054, 0.18175], [0.6678, 0.45834, 0.18416], [0.67222, \n 0.46625, 0.1868], [0.67667, 0.47425, 0.18968], [0.68114, 0.48233, \n 0.19283], [0.68566, 0.49051, 0.19624], [0.69019, 0.49878, 0.19987], [\n 0.69474, 0.50712, 0.20384], [0.69933, 0.51554, 0.20803], [0.70394, \n 0.52406, 0.21251], [0.70858, 0.53265, 0.21726], [0.71322, 0.5413, \n 0.22229], [0.7179, 0.55003, 0.22761], [0.72257, 0.55881, 0.23318], [\n 0.72727, 0.56767, 0.23907], [0.73197, 0.57658, 0.24521], [0.73666, \n 0.58553, 0.25168], [0.74136, 0.59451, 0.25837], [0.74605, 0.60354, \n 0.26537], [0.75073, 0.61259, 0.27263], [0.75538, 0.62166, 0.28017], [\n 0.76001, 0.63075, 0.28796], [0.7646, 0.63982, 0.29602], [0.76914, \n 0.64889, 0.30433], [0.77363, 0.65793, 0.31287], [0.77806, 0.66694, \n 0.32165], [0.78242, 0.6759, 0.33066], [0.78669, 0.68481, 0.33988], [\n 0.79087, 0.69365, 0.34929], [0.79494, 0.7024, 0.35888], [0.7989, \n 0.71106, 0.36867], [0.80273, 0.71961, 0.37859], [0.80642, 0.72803, \n 0.38866], [0.80996, 0.73631, 0.39885], [0.81334, 0.74446, 0.40916], [\n 0.81655, 0.75244, 0.41957], [0.81956, 0.76025, 0.43004], [0.82239, \n 0.76787, 0.44057], [0.82501, 0.7753, 0.45115], [0.82742, 0.78252, \n 0.46174], [0.8296, 0.78953, 0.47235], [0.83155, 0.79631, 0.48293], [\n 0.83326, 0.80287, 0.49349], [0.83472, 0.80919, 0.50402], [0.83592, \n 0.81526, 0.51449], [0.83686, 0.82109, 0.52487], [0.83753, 0.82666, \n 0.53517], [0.83793, 0.83198, 0.54537], [0.83805, 0.83703, 0.55546], [\n 0.83788, 0.84182, 0.56542], [0.83744, 0.84635, 0.57525], [0.8367, \n 0.85061, 0.58493], [0.83567, 0.85462, 0.59446], [0.83435, 0.85835, \n 0.60382], [0.83274, 0.86183, 0.61301], [0.83084, 0.86504, 0.62202], [\n 0.82864, 0.868, 0.63085], [0.82615, 0.87068, 0.63949], [0.82337, \n 0.87312, 0.64792], [0.8203, 0.87531, 0.65617], [0.81695, 0.87724, \n 0.6642], [0.81331, 0.87892, 0.67203], [0.80939, 0.88036, 0.67964], [\n 0.80518, 0.88156, 0.68705], [0.80071, 0.8825, 0.69424], [0.79595, \n 0.88322, 0.70121], [0.79094, 0.8837, 0.70797], [0.78566, 0.88395, \n 0.7145], [0.78012, 0.88396, 0.72082], [0.77433, 0.88375, 0.72692], [\n 0.7683, 0.88331, 0.73279], [0.76203, 0.88264, 0.73844], [0.75553, \n 0.88177, 0.74387], [0.74879, 0.88066, 0.74908], [0.74184, 0.87934, \n 0.75407], [0.73468, 0.87781, 0.75884], [0.72731, 0.87607, 0.76339], [\n 0.71976, 0.87411, 0.76772], [0.71201, 0.87195, 0.77184], [0.70408, \n 0.86958, 0.77573], [0.69599, 0.86701, 0.77941], [0.68774, 0.86425, \n 0.78288], [0.67934, 0.86127, 0.78614], [0.67081, 0.85811, 0.78919], [\n 0.66215, 0.85476, 0.79202], [0.65336, 0.8512, 0.79465], [0.64448, \n 0.84747, 0.79707], [0.6355, 0.84356, 0.7993], [0.62645, 0.83947, \n 0.80131], [0.61732, 0.83519, 0.80313], [0.60814, 0.83075, 0.80476], [\n 0.59891, 0.82614, 0.80619], [0.58965, 0.82137, 0.80743], [0.58037, \n 0.81644, 0.80848], [0.57108, 0.81135, 0.80935], [0.56181, 0.80612, \n 0.81004], [0.55255, 0.80074, 0.81055], [0.54332, 0.79522, 0.81088], [\n 0.53412, 0.78958, 0.81105], [0.525, 0.7838, 0.81105], [0.51593, 0.77791,\n 0.81088], [0.50695, 0.77189, 0.81055], [0.49808, 0.76577, 0.81007], [\n 0.48928, 0.75954, 0.80944], [0.48061, 0.75321, 0.80866], [0.47207, \n 0.7468, 0.80773], [0.46365, 0.74029, 0.80667], [0.45539, 0.7337, \n 0.80546], [0.44728, 0.72703, 0.80413], [0.43934, 0.7203, 0.80266], [\n 0.43158, 0.7135, 0.80107], [0.42398, 0.70664, 0.79936], [0.41658, \n 0.69971, 0.79752], [0.40938, 0.69275, 0.79557], [0.40237, 0.68572, \n 0.79351], [0.3956, 0.67865, 0.79133], [0.38903, 0.67155, 0.78905], [\n 0.38267, 0.66441, 0.78666], [0.37656, 0.65724, 0.78416], [0.37066, \n 0.65003, 0.78155], [0.36502, 0.64279, 0.77884], [0.35961, 0.63552, \n 0.77604], [0.35446, 0.62824, 0.77312], [0.34955, 0.62094, 0.77011], [\n 0.3449, 0.6136, 0.767], [0.34051, 0.60625, 0.76378], [0.33637, 0.59889,\n 0.76047], [0.33253, 0.59151, 0.75704], [0.32893, 0.58412, 0.75351], [\n 0.32559, 0.57671, 0.74987], [0.32256, 0.56928, 0.74613], [0.31978, \n 0.56186, 0.74228], [0.31727, 0.55441, 0.7383], [0.31505, 0.54695, \n 0.73422], [0.31311, 0.53948, 0.73002], [0.31144, 0.53201, 0.72569], [\n 0.31007, 0.52453, 0.72124], [0.30897, 0.51704, 0.71667], [0.30811, \n 0.50955, 0.71197], [0.30755, 0.50205, 0.70713], [0.30726, 0.49456, \n 0.70216], [0.30723, 0.48707, 0.69706], [0.30746, 0.47958, 0.69182], [\n 0.30795, 0.4721, 0.68643], [0.3087, 0.46463, 0.6809], [0.30968, 0.45716,\n 0.67525], [0.31088, 0.44973, 0.66944], [0.31228, 0.44232, 0.6635], [\n 0.31393, 0.43493, 0.65741], [0.31578, 0.42758, 0.65118], [0.3178, \n 0.42025, 0.64482], [0.32001, 0.41299, 0.63833], [0.32238, 0.40577, \n 0.6317], [0.32489, 0.39861, 0.62495], [0.32755, 0.39152, 0.61809], [\n 0.33035, 0.38448, 0.61111], [0.33327, 0.37755, 0.60402], [0.33627, \n 0.37068, 0.59684], [0.33939, 0.36392, 0.58955], [0.34257, 0.35728, \n 0.58219], [0.3458, 0.35073, 0.57476], [0.34912, 0.34428, 0.56727], [\n 0.35247, 0.33797, 0.55971], [0.35587, 0.33179, 0.55212], [0.35927, \n 0.32574, 0.54448], [0.36271, 0.31986, 0.53684], [0.36617, 0.31411, \n 0.52917], [0.36961, 0.30852, 0.52148], [0.37306, 0.30306, 0.51382], [\n 0.37652, 0.2978, 0.50615], [0.37994, 0.29269, 0.49854], [0.38336, \n 0.28775, 0.49094], [0.38674, 0.28301, 0.48337], [0.39011, 0.27842, \n 0.47586], [0.39346, 0.27401, 0.4684], [0.39677, 0.26978, 0.461], [\n 0.40006, 0.26573, 0.45366], [0.40333, 0.26185, 0.4464], [0.40655, \n 0.25815, 0.43921], [0.40974, 0.25466, 0.43212], [0.4129, 0.25132, \n 0.42509], [0.41602, 0.24817, 0.41813], [0.41912, 0.24515, 0.41128], [\n 0.42218, 0.24235, 0.40451], [0.42522, 0.23972, 0.39784], [0.42823, \n 0.23728, 0.39126], [0.43121, 0.23498, 0.38475], [0.43415, 0.23282, \n 0.37836], [0.43708, 0.23086, 0.37204], [0.43998, 0.22907, 0.36583], [\n 0.44286, 0.22743, 0.3597], [0.44571, 0.22596, 0.35366], [0.44855, \n 0.2246, 0.34773]]\nromaO_map = LinearSegmentedColormap.from_list('romaO', cm_data)\ntest_cm = romaO_map\nif __name__ == '__main__':\n import matplotlib.pyplot as plt\n import numpy as np\n try:\n from viscm import viscm\n viscm(romaO_map)\n except ImportError:\n print('viscm not found, falling back on simple display')\n plt.imshow(np.linspace(0, 100, 256)[None, :], aspect='auto', cmap=\n romaO_map)\n plt.show()\n",
"step-5": "# \n# romaO\n# www.fabiocrameri.ch/colourmaps\nfrom matplotlib.colors import LinearSegmentedColormap \n \ncm_data = [[0.45137, 0.22346, 0.34187], \n [0.45418, 0.22244, 0.3361], \n [0.45696, 0.22158, 0.33043], \n [0.45975, 0.2209, 0.32483], \n [0.46251, 0.22035, 0.31935], \n [0.46527, 0.21994, 0.31394], \n [0.46803, 0.21968, 0.30862], \n [0.47078, 0.21958, 0.30337], \n [0.47352, 0.21962, 0.29822], \n [0.47628, 0.21982, 0.29316], \n [0.47902, 0.22017, 0.28818], \n [0.48178, 0.22067, 0.2833], \n [0.48453, 0.2213, 0.2785], \n [0.48731, 0.22208, 0.27379], \n [0.49008, 0.22304, 0.26917], \n [0.49286, 0.22411, 0.26461], \n [0.49567, 0.22536, 0.26016], \n [0.4985, 0.22677, 0.25579], \n [0.50134, 0.22833, 0.25153], \n [0.50419, 0.22999, 0.24733], \n [0.50707, 0.23188, 0.24322], \n [0.50997, 0.23387, 0.23923], \n [0.5129, 0.23605, 0.23533], \n [0.51584, 0.23835, 0.23151], \n [0.51884, 0.24082, 0.22779], \n [0.52184, 0.24345, 0.22414], \n [0.52489, 0.24625, 0.22065], \n [0.52797, 0.2492, 0.2172], \n [0.53108, 0.25231, 0.21387], \n [0.53423, 0.25556, 0.21064], \n [0.53742, 0.25899, 0.20753], \n [0.54063, 0.26255, 0.20452], \n [0.54389, 0.26628, 0.20158], \n [0.54718, 0.27017, 0.19879], \n [0.55051, 0.27419, 0.19613], \n [0.55389, 0.27839, 0.19356], \n [0.55731, 0.28273, 0.19109], \n [0.56075, 0.2872, 0.18877], \n [0.56424, 0.29186, 0.18655], \n [0.56777, 0.29665, 0.18446], \n [0.57134, 0.30157, 0.18248], \n [0.57495, 0.30666, 0.18065], \n [0.5786, 0.31186, 0.17898], \n [0.58228, 0.31724, 0.17743], \n [0.58602, 0.32275, 0.17597], \n [0.58977, 0.32838, 0.17473], \n [0.59358, 0.33415, 0.17358], \n [0.59742, 0.34005, 0.17261], \n [0.60129, 0.34606, 0.17179], \n [0.60519, 0.35223, 0.17114], \n [0.60915, 0.35851, 0.17065], \n [0.61311, 0.36491, 0.17034], \n [0.61713, 0.37143, 0.1702], \n [0.62118, 0.37808, 0.17023], \n [0.62526, 0.38483, 0.17046], \n [0.62937, 0.39171, 0.17087], \n [0.63352, 0.39869, 0.17148], \n [0.63769, 0.40579, 0.17229], \n [0.6419, 0.41299, 0.17332], \n [0.64613, 0.42029, 0.17458], \n [0.65041, 0.42771, 0.176], \n [0.6547, 0.43522, 0.17774], \n [0.65904, 0.44283, 0.17962], \n [0.66341, 0.45054, 0.18175], \n [0.6678, 0.45834, 0.18416], \n [0.67222, 0.46625, 0.1868], \n [0.67667, 0.47425, 0.18968], \n [0.68114, 0.48233, 0.19283], \n [0.68566, 0.49051, 0.19624], \n [0.69019, 0.49878, 0.19987], \n [0.69474, 0.50712, 0.20384], \n [0.69933, 0.51554, 0.20803], \n [0.70394, 0.52406, 0.21251], \n [0.70858, 0.53265, 0.21726], \n [0.71322, 0.5413, 0.22229], \n [0.7179, 0.55003, 0.22761], \n [0.72257, 0.55881, 0.23318], \n [0.72727, 0.56767, 0.23907], \n [0.73197, 0.57658, 0.24521], \n [0.73666, 0.58553, 0.25168], \n [0.74136, 0.59451, 0.25837], \n [0.74605, 0.60354, 0.26537], \n [0.75073, 0.61259, 0.27263], \n [0.75538, 0.62166, 0.28017], \n [0.76001, 0.63075, 0.28796], \n [0.7646, 0.63982, 0.29602], \n [0.76914, 0.64889, 0.30433], \n [0.77363, 0.65793, 0.31287], \n [0.77806, 0.66694, 0.32165], \n [0.78242, 0.6759, 0.33066], \n [0.78669, 0.68481, 0.33988], \n [0.79087, 0.69365, 0.34929], \n [0.79494, 0.7024, 0.35888], \n [0.7989, 0.71106, 0.36867], \n [0.80273, 0.71961, 0.37859], \n [0.80642, 0.72803, 0.38866], \n [0.80996, 0.73631, 0.39885], \n [0.81334, 0.74446, 0.40916], \n [0.81655, 0.75244, 0.41957], \n [0.81956, 0.76025, 0.43004], \n [0.82239, 0.76787, 0.44057], \n [0.82501, 0.7753, 0.45115], \n [0.82742, 0.78252, 0.46174], \n [0.8296, 0.78953, 0.47235], \n [0.83155, 0.79631, 0.48293], \n [0.83326, 0.80287, 0.49349], \n [0.83472, 0.80919, 0.50402], \n [0.83592, 0.81526, 0.51449], \n [0.83686, 0.82109, 0.52487], \n [0.83753, 0.82666, 0.53517], \n [0.83793, 0.83198, 0.54537], \n [0.83805, 0.83703, 0.55546], \n [0.83788, 0.84182, 0.56542], \n [0.83744, 0.84635, 0.57525], \n [0.8367, 0.85061, 0.58493], \n [0.83567, 0.85462, 0.59446], \n [0.83435, 0.85835, 0.60382], \n [0.83274, 0.86183, 0.61301], \n [0.83084, 0.86504, 0.62202], \n [0.82864, 0.868, 0.63085], \n [0.82615, 0.87068, 0.63949], \n [0.82337, 0.87312, 0.64792], \n [0.8203, 0.87531, 0.65617], \n [0.81695, 0.87724, 0.6642], \n [0.81331, 0.87892, 0.67203], \n [0.80939, 0.88036, 0.67964], \n [0.80518, 0.88156, 0.68705], \n [0.80071, 0.8825, 0.69424], \n [0.79595, 0.88322, 0.70121], \n [0.79094, 0.8837, 0.70797], \n [0.78566, 0.88395, 0.7145], \n [0.78012, 0.88396, 0.72082], \n [0.77433, 0.88375, 0.72692], \n [0.7683, 0.88331, 0.73279], \n [0.76203, 0.88264, 0.73844], \n [0.75553, 0.88177, 0.74387], \n [0.74879, 0.88066, 0.74908], \n [0.74184, 0.87934, 0.75407], \n [0.73468, 0.87781, 0.75884], \n [0.72731, 0.87607, 0.76339], \n [0.71976, 0.87411, 0.76772], \n [0.71201, 0.87195, 0.77184], \n [0.70408, 0.86958, 0.77573], \n [0.69599, 0.86701, 0.77941], \n [0.68774, 0.86425, 0.78288], \n [0.67934, 0.86127, 0.78614], \n [0.67081, 0.85811, 0.78919], \n [0.66215, 0.85476, 0.79202], \n [0.65336, 0.8512, 0.79465], \n [0.64448, 0.84747, 0.79707], \n [0.6355, 0.84356, 0.7993], \n [0.62645, 0.83947, 0.80131], \n [0.61732, 0.83519, 0.80313], \n [0.60814, 0.83075, 0.80476], \n [0.59891, 0.82614, 0.80619], \n [0.58965, 0.82137, 0.80743], \n [0.58037, 0.81644, 0.80848], \n [0.57108, 0.81135, 0.80935], \n [0.56181, 0.80612, 0.81004], \n [0.55255, 0.80074, 0.81055], \n [0.54332, 0.79522, 0.81088], \n [0.53412, 0.78958, 0.81105], \n [0.525, 0.7838, 0.81105], \n [0.51593, 0.77791, 0.81088], \n [0.50695, 0.77189, 0.81055], \n [0.49808, 0.76577, 0.81007], \n [0.48928, 0.75954, 0.80944], \n [0.48061, 0.75321, 0.80866], \n [0.47207, 0.7468, 0.80773], \n [0.46365, 0.74029, 0.80667], \n [0.45539, 0.7337, 0.80546], \n [0.44728, 0.72703, 0.80413], \n [0.43934, 0.7203, 0.80266], \n [0.43158, 0.7135, 0.80107], \n [0.42398, 0.70664, 0.79936], \n [0.41658, 0.69971, 0.79752], \n [0.40938, 0.69275, 0.79557], \n [0.40237, 0.68572, 0.79351], \n [0.3956, 0.67865, 0.79133], \n [0.38903, 0.67155, 0.78905], \n [0.38267, 0.66441, 0.78666], \n [0.37656, 0.65724, 0.78416], \n [0.37066, 0.65003, 0.78155], \n [0.36502, 0.64279, 0.77884], \n [0.35961, 0.63552, 0.77604], \n [0.35446, 0.62824, 0.77312], \n [0.34955, 0.62094, 0.77011], \n [0.3449, 0.6136, 0.767], \n [0.34051, 0.60625, 0.76378], \n [0.33637, 0.59889, 0.76047], \n [0.33253, 0.59151, 0.75704], \n [0.32893, 0.58412, 0.75351], \n [0.32559, 0.57671, 0.74987], \n [0.32256, 0.56928, 0.74613], \n [0.31978, 0.56186, 0.74228], \n [0.31727, 0.55441, 0.7383], \n [0.31505, 0.54695, 0.73422], \n [0.31311, 0.53948, 0.73002], \n [0.31144, 0.53201, 0.72569], \n [0.31007, 0.52453, 0.72124], \n [0.30897, 0.51704, 0.71667], \n [0.30811, 0.50955, 0.71197], \n [0.30755, 0.50205, 0.70713], \n [0.30726, 0.49456, 0.70216], \n [0.30723, 0.48707, 0.69706], \n [0.30746, 0.47958, 0.69182], \n [0.30795, 0.4721, 0.68643], \n [0.3087, 0.46463, 0.6809], \n [0.30968, 0.45716, 0.67525], \n [0.31088, 0.44973, 0.66944], \n [0.31228, 0.44232, 0.6635], \n [0.31393, 0.43493, 0.65741], \n [0.31578, 0.42758, 0.65118], \n [0.3178, 0.42025, 0.64482], \n [0.32001, 0.41299, 0.63833], \n [0.32238, 0.40577, 0.6317], \n [0.32489, 0.39861, 0.62495], \n [0.32755, 0.39152, 0.61809], \n [0.33035, 0.38448, 0.61111], \n [0.33327, 0.37755, 0.60402], \n [0.33627, 0.37068, 0.59684], \n [0.33939, 0.36392, 0.58955], \n [0.34257, 0.35728, 0.58219], \n [0.3458, 0.35073, 0.57476], \n [0.34912, 0.34428, 0.56727], \n [0.35247, 0.33797, 0.55971], \n [0.35587, 0.33179, 0.55212], \n [0.35927, 0.32574, 0.54448], \n [0.36271, 0.31986, 0.53684], \n [0.36617, 0.31411, 0.52917], \n [0.36961, 0.30852, 0.52148], \n [0.37306, 0.30306, 0.51382], \n [0.37652, 0.2978, 0.50615], \n [0.37994, 0.29269, 0.49854], \n [0.38336, 0.28775, 0.49094], \n [0.38674, 0.28301, 0.48337], \n [0.39011, 0.27842, 0.47586], \n [0.39346, 0.27401, 0.4684], \n [0.39677, 0.26978, 0.461], \n [0.40006, 0.26573, 0.45366], \n [0.40333, 0.26185, 0.4464], \n [0.40655, 0.25815, 0.43921], \n [0.40974, 0.25466, 0.43212], \n [0.4129, 0.25132, 0.42509], \n [0.41602, 0.24817, 0.41813], \n [0.41912, 0.24515, 0.41128], \n [0.42218, 0.24235, 0.40451], \n [0.42522, 0.23972, 0.39784], \n [0.42823, 0.23728, 0.39126], \n [0.43121, 0.23498, 0.38475], \n [0.43415, 0.23282, 0.37836], \n [0.43708, 0.23086, 0.37204], \n [0.43998, 0.22907, 0.36583], \n [0.44286, 0.22743, 0.3597], \n [0.44571, 0.22596, 0.35366], \n [0.44855, 0.2246, 0.34773]] \n \nromaO_map = LinearSegmentedColormap.from_list('romaO', cm_data) \n# For use of \"viscm view\" \ntest_cm = romaO_map \n \nif __name__ == \"__main__\": \n import matplotlib.pyplot as plt \n import numpy as np \n \n try: \n from viscm import viscm \n viscm(romaO_map) \n except ImportError: \n print(\"viscm not found, falling back on simple display\") \n plt.imshow(np.linspace(0, 100, 256)[None, :], aspect='auto', \n cmap=romaO_map) \n plt.show() \n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class TestUserRegister(BaseCase):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def test_signup_with_non_existing_field(self):
payload = json.dumps({'username': 'userjw', 'password': '1q2w3e4r',
'email': 'foo@bar.de'})
response = self.app.post('/register', headers={'Content-Type':
'application/json'}, data=payload)
self.assertEqual({'message': 'User created successfully.'},
response.json)
self.assertEqual(201, response.status_code)
<|reserved_special_token_0|>
def test_signup_without_password(self):
payload = json.dumps({'username': 'userjw'})
response = self.app.post('/register', headers={'Content-Type':
'application/json'}, data=payload)
self.assertEqual({'password': 'This field cannot be blank!'},
response.json['message'])
self.assertEqual(400, response.status_code)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestUserRegister(BaseCase):
<|reserved_special_token_0|>
def test_successful_register(self):
payload = json.dumps({'username': 'userjw', 'password': '1q2w3e4r'})
response = self.app.post('/register', headers={'Content-Type':
'application/json'}, data=payload)
self.assertEqual({'message': 'User created successfully.'},
response.json)
self.assertEqual(201, response.status_code)
def test_signup_with_non_existing_field(self):
payload = json.dumps({'username': 'userjw', 'password': '1q2w3e4r',
'email': 'foo@bar.de'})
response = self.app.post('/register', headers={'Content-Type':
'application/json'}, data=payload)
self.assertEqual({'message': 'User created successfully.'},
response.json)
self.assertEqual(201, response.status_code)
<|reserved_special_token_0|>
def test_signup_without_password(self):
payload = json.dumps({'username': 'userjw'})
response = self.app.post('/register', headers={'Content-Type':
'application/json'}, data=payload)
self.assertEqual({'password': 'This field cannot be blank!'},
response.json['message'])
self.assertEqual(400, response.status_code)
def test_creating_already_existing_user(self):
payload = json.dumps({'username': 'userjw', 'password': '1q2w3e4r'})
response = self.app.post('/register', headers={'Content-Type':
'application/json'}, data=payload)
response = self.app.post('/register', headers={'Content-Type':
'application/json'}, data=payload)
self.assertEqual({'message': "A user '{}' already exists!".format(
json.loads(payload)['username'])}, response.json)
self.assertEqual(400, response.status_code)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestUserRegister(BaseCase):
<|reserved_special_token_0|>
def test_successful_register(self):
payload = json.dumps({'username': 'userjw', 'password': '1q2w3e4r'})
response = self.app.post('/register', headers={'Content-Type':
'application/json'}, data=payload)
self.assertEqual({'message': 'User created successfully.'},
response.json)
self.assertEqual(201, response.status_code)
def test_signup_with_non_existing_field(self):
payload = json.dumps({'username': 'userjw', 'password': '1q2w3e4r',
'email': 'foo@bar.de'})
response = self.app.post('/register', headers={'Content-Type':
'application/json'}, data=payload)
self.assertEqual({'message': 'User created successfully.'},
response.json)
self.assertEqual(201, response.status_code)
def test_signup_without_username(self):
payload = json.dumps({'password': '1q2w3e4r'})
response = self.app.post('/register', headers={'Content-Type':
'application/json'}, data=payload)
self.assertEqual({'username': 'This field cannot be blank!'},
response.json['message'])
self.assertEqual(400, response.status_code)
def test_signup_without_password(self):
payload = json.dumps({'username': 'userjw'})
response = self.app.post('/register', headers={'Content-Type':
'application/json'}, data=payload)
self.assertEqual({'password': 'This field cannot be blank!'},
response.json['message'])
self.assertEqual(400, response.status_code)
def test_creating_already_existing_user(self):
payload = json.dumps({'username': 'userjw', 'password': '1q2w3e4r'})
response = self.app.post('/register', headers={'Content-Type':
'application/json'}, data=payload)
response = self.app.post('/register', headers={'Content-Type':
'application/json'}, data=payload)
self.assertEqual({'message': "A user '{}' already exists!".format(
json.loads(payload)['username'])}, response.json)
self.assertEqual(400, response.status_code)
<|reserved_special_token_1|>
import json
from test.test_basic import BaseCase
class TestUserRegister(BaseCase):
"""
TestClass to test the register function.
"""
def test_successful_register(self):
payload = json.dumps({'username': 'userjw', 'password': '1q2w3e4r'})
response = self.app.post('/register', headers={'Content-Type':
'application/json'}, data=payload)
self.assertEqual({'message': 'User created successfully.'},
response.json)
self.assertEqual(201, response.status_code)
def test_signup_with_non_existing_field(self):
payload = json.dumps({'username': 'userjw', 'password': '1q2w3e4r',
'email': 'foo@bar.de'})
response = self.app.post('/register', headers={'Content-Type':
'application/json'}, data=payload)
self.assertEqual({'message': 'User created successfully.'},
response.json)
self.assertEqual(201, response.status_code)
def test_signup_without_username(self):
payload = json.dumps({'password': '1q2w3e4r'})
response = self.app.post('/register', headers={'Content-Type':
'application/json'}, data=payload)
self.assertEqual({'username': 'This field cannot be blank!'},
response.json['message'])
self.assertEqual(400, response.status_code)
def test_signup_without_password(self):
payload = json.dumps({'username': 'userjw'})
response = self.app.post('/register', headers={'Content-Type':
'application/json'}, data=payload)
self.assertEqual({'password': 'This field cannot be blank!'},
response.json['message'])
self.assertEqual(400, response.status_code)
def test_creating_already_existing_user(self):
payload = json.dumps({'username': 'userjw', 'password': '1q2w3e4r'})
response = self.app.post('/register', headers={'Content-Type':
'application/json'}, data=payload)
response = self.app.post('/register', headers={'Content-Type':
'application/json'}, data=payload)
self.assertEqual({'message': "A user '{}' already exists!".format(
json.loads(payload)['username'])}, response.json)
self.assertEqual(400, response.status_code)
<|reserved_special_token_1|>
import json
from test.test_basic import BaseCase
class TestUserRegister(BaseCase):
"""
TestClass to test the register function.
"""
def test_successful_register(self):
# Given
payload = json.dumps({
"username": "userjw",
"password": "1q2w3e4r"
})
# When
response = self.app.post('/register', headers={"Content-Type": "application/json"}, data=payload)
# Then
self.assertEqual({"message": "User created successfully."}, response.json)
self.assertEqual(201, response.status_code)
def test_signup_with_non_existing_field(self):
# Given
payload = json.dumps({
"username": "userjw",
"password": "1q2w3e4r",
"email": "foo@bar.de"
})
# When
response = self.app.post('/register', headers={"Content-Type": "application/json"}, data=payload)
# Then
self.assertEqual({"message": "User created successfully."}, response.json)
self.assertEqual(201, response.status_code)
def test_signup_without_username(self):
# Given
payload = json.dumps({
"password": "1q2w3e4r"
})
# When
response = self.app.post('/register', headers={"Content-Type": "application/json"}, data=payload)
# Then
self.assertEqual({"username": "This field cannot be blank!"}, response.json['message'])
self.assertEqual(400, response.status_code)
def test_signup_without_password(self):
# Given
payload = json.dumps({
"username": "userjw"
})
# When
response = self.app.post('/register', headers={"Content-Type": "application/json"}, data=payload)
# Then
self.assertEqual({"password": "This field cannot be blank!"}, response.json['message'])
self.assertEqual(400, response.status_code)
def test_creating_already_existing_user(self):
# Given
payload = json.dumps({
"username": "userjw",
"password": "1q2w3e4r",
})
# Preconditions
response = self.app.post('/register', headers={"Content-Type": "application/json"}, data=payload)
# When
response = self.app.post('/register', headers={"Content-Type": "application/json"}, data=payload)
# Then
self.assertEqual({"message": "A user '{}' already exists!".format(json.loads(payload)['username'])}, response.json)
self.assertEqual(400, response.status_code)
|
flexible
|
{
"blob_id": "486362463dc07bdafea85de39a4a6d58cb8c8f26",
"index": 9643,
"step-1": "<mask token>\n\n\nclass TestUserRegister(BaseCase):\n <mask token>\n <mask token>\n\n def test_signup_with_non_existing_field(self):\n payload = json.dumps({'username': 'userjw', 'password': '1q2w3e4r',\n 'email': 'foo@bar.de'})\n response = self.app.post('/register', headers={'Content-Type':\n 'application/json'}, data=payload)\n self.assertEqual({'message': 'User created successfully.'},\n response.json)\n self.assertEqual(201, response.status_code)\n <mask token>\n\n def test_signup_without_password(self):\n payload = json.dumps({'username': 'userjw'})\n response = self.app.post('/register', headers={'Content-Type':\n 'application/json'}, data=payload)\n self.assertEqual({'password': 'This field cannot be blank!'},\n response.json['message'])\n self.assertEqual(400, response.status_code)\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass TestUserRegister(BaseCase):\n <mask token>\n\n def test_successful_register(self):\n payload = json.dumps({'username': 'userjw', 'password': '1q2w3e4r'})\n response = self.app.post('/register', headers={'Content-Type':\n 'application/json'}, data=payload)\n self.assertEqual({'message': 'User created successfully.'},\n response.json)\n self.assertEqual(201, response.status_code)\n\n def test_signup_with_non_existing_field(self):\n payload = json.dumps({'username': 'userjw', 'password': '1q2w3e4r',\n 'email': 'foo@bar.de'})\n response = self.app.post('/register', headers={'Content-Type':\n 'application/json'}, data=payload)\n self.assertEqual({'message': 'User created successfully.'},\n response.json)\n self.assertEqual(201, response.status_code)\n <mask token>\n\n def test_signup_without_password(self):\n payload = json.dumps({'username': 'userjw'})\n response = self.app.post('/register', headers={'Content-Type':\n 'application/json'}, data=payload)\n self.assertEqual({'password': 'This field cannot be blank!'},\n response.json['message'])\n self.assertEqual(400, response.status_code)\n\n def test_creating_already_existing_user(self):\n payload = json.dumps({'username': 'userjw', 'password': '1q2w3e4r'})\n response = self.app.post('/register', headers={'Content-Type':\n 'application/json'}, data=payload)\n response = self.app.post('/register', headers={'Content-Type':\n 'application/json'}, data=payload)\n self.assertEqual({'message': \"A user '{}' already exists!\".format(\n json.loads(payload)['username'])}, response.json)\n self.assertEqual(400, response.status_code)\n",
"step-3": "<mask token>\n\n\nclass TestUserRegister(BaseCase):\n <mask token>\n\n def test_successful_register(self):\n payload = json.dumps({'username': 'userjw', 'password': '1q2w3e4r'})\n response = self.app.post('/register', headers={'Content-Type':\n 'application/json'}, data=payload)\n self.assertEqual({'message': 'User created successfully.'},\n response.json)\n self.assertEqual(201, response.status_code)\n\n def test_signup_with_non_existing_field(self):\n payload = json.dumps({'username': 'userjw', 'password': '1q2w3e4r',\n 'email': 'foo@bar.de'})\n response = self.app.post('/register', headers={'Content-Type':\n 'application/json'}, data=payload)\n self.assertEqual({'message': 'User created successfully.'},\n response.json)\n self.assertEqual(201, response.status_code)\n\n def test_signup_without_username(self):\n payload = json.dumps({'password': '1q2w3e4r'})\n response = self.app.post('/register', headers={'Content-Type':\n 'application/json'}, data=payload)\n self.assertEqual({'username': 'This field cannot be blank!'},\n response.json['message'])\n self.assertEqual(400, response.status_code)\n\n def test_signup_without_password(self):\n payload = json.dumps({'username': 'userjw'})\n response = self.app.post('/register', headers={'Content-Type':\n 'application/json'}, data=payload)\n self.assertEqual({'password': 'This field cannot be blank!'},\n response.json['message'])\n self.assertEqual(400, response.status_code)\n\n def test_creating_already_existing_user(self):\n payload = json.dumps({'username': 'userjw', 'password': '1q2w3e4r'})\n response = self.app.post('/register', headers={'Content-Type':\n 'application/json'}, data=payload)\n response = self.app.post('/register', headers={'Content-Type':\n 'application/json'}, data=payload)\n self.assertEqual({'message': \"A user '{}' already exists!\".format(\n json.loads(payload)['username'])}, response.json)\n self.assertEqual(400, response.status_code)\n",
"step-4": "import json\nfrom test.test_basic import BaseCase\n\n\nclass TestUserRegister(BaseCase):\n \"\"\"\n TestClass to test the register function.\n \"\"\"\n\n def test_successful_register(self):\n payload = json.dumps({'username': 'userjw', 'password': '1q2w3e4r'})\n response = self.app.post('/register', headers={'Content-Type':\n 'application/json'}, data=payload)\n self.assertEqual({'message': 'User created successfully.'},\n response.json)\n self.assertEqual(201, response.status_code)\n\n def test_signup_with_non_existing_field(self):\n payload = json.dumps({'username': 'userjw', 'password': '1q2w3e4r',\n 'email': 'foo@bar.de'})\n response = self.app.post('/register', headers={'Content-Type':\n 'application/json'}, data=payload)\n self.assertEqual({'message': 'User created successfully.'},\n response.json)\n self.assertEqual(201, response.status_code)\n\n def test_signup_without_username(self):\n payload = json.dumps({'password': '1q2w3e4r'})\n response = self.app.post('/register', headers={'Content-Type':\n 'application/json'}, data=payload)\n self.assertEqual({'username': 'This field cannot be blank!'},\n response.json['message'])\n self.assertEqual(400, response.status_code)\n\n def test_signup_without_password(self):\n payload = json.dumps({'username': 'userjw'})\n response = self.app.post('/register', headers={'Content-Type':\n 'application/json'}, data=payload)\n self.assertEqual({'password': 'This field cannot be blank!'},\n response.json['message'])\n self.assertEqual(400, response.status_code)\n\n def test_creating_already_existing_user(self):\n payload = json.dumps({'username': 'userjw', 'password': '1q2w3e4r'})\n response = self.app.post('/register', headers={'Content-Type':\n 'application/json'}, data=payload)\n response = self.app.post('/register', headers={'Content-Type':\n 'application/json'}, data=payload)\n self.assertEqual({'message': \"A user '{}' already exists!\".format(\n json.loads(payload)['username'])}, response.json)\n self.assertEqual(400, response.status_code)\n",
"step-5": "import json\n\nfrom test.test_basic import BaseCase\n\n\nclass TestUserRegister(BaseCase):\n \"\"\"\n TestClass to test the register function.\n \"\"\"\n def test_successful_register(self):\n # Given\n payload = json.dumps({\n \"username\": \"userjw\",\n \"password\": \"1q2w3e4r\"\n })\n # When\n response = self.app.post('/register', headers={\"Content-Type\": \"application/json\"}, data=payload)\n\n # Then\n self.assertEqual({\"message\": \"User created successfully.\"}, response.json)\n self.assertEqual(201, response.status_code)\n\n def test_signup_with_non_existing_field(self):\n # Given\n payload = json.dumps({\n \"username\": \"userjw\",\n \"password\": \"1q2w3e4r\",\n \"email\": \"foo@bar.de\"\n })\n # When\n response = self.app.post('/register', headers={\"Content-Type\": \"application/json\"}, data=payload)\n\n # Then\n self.assertEqual({\"message\": \"User created successfully.\"}, response.json)\n self.assertEqual(201, response.status_code)\n\n def test_signup_without_username(self):\n # Given\n payload = json.dumps({\n \"password\": \"1q2w3e4r\"\n })\n # When\n response = self.app.post('/register', headers={\"Content-Type\": \"application/json\"}, data=payload)\n\n # Then\n self.assertEqual({\"username\": \"This field cannot be blank!\"}, response.json['message'])\n self.assertEqual(400, response.status_code)\n\n def test_signup_without_password(self):\n # Given\n payload = json.dumps({\n \"username\": \"userjw\"\n })\n # When\n response = self.app.post('/register', headers={\"Content-Type\": \"application/json\"}, data=payload)\n\n # Then\n self.assertEqual({\"password\": \"This field cannot be blank!\"}, response.json['message'])\n self.assertEqual(400, response.status_code)\n\n def test_creating_already_existing_user(self):\n # Given\n payload = json.dumps({\n \"username\": \"userjw\",\n \"password\": \"1q2w3e4r\",\n })\n # Preconditions\n response = self.app.post('/register', headers={\"Content-Type\": \"application/json\"}, data=payload)\n\n # When\n response = self.app.post('/register', headers={\"Content-Type\": \"application/json\"}, data=payload)\n\n # Then\n self.assertEqual({\"message\": \"A user '{}' already exists!\".format(json.loads(payload)['username'])}, response.json)\n self.assertEqual(400, response.status_code)\n",
"step-ids": [
3,
5,
6,
8,
9
]
}
|
[
3,
5,
6,
8,
9
] |
from .login import LoginTask
from .tag_search import TagSearchTask
from .timeline import TimelineTask
from .get_follower import GetFollowerTask
from .followback import FollowBackTask
from .unfollow import UnFollowTask
|
normal
|
{
"blob_id": "e899b093152ee0923f1e5ad3b5719bbf9eb4339c",
"index": 7466,
"step-1": "<mask token>\n",
"step-2": "from .login import LoginTask\nfrom .tag_search import TagSearchTask\nfrom .timeline import TimelineTask\nfrom .get_follower import GetFollowerTask\nfrom .followback import FollowBackTask\nfrom .unfollow import UnFollowTask\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
<|reserved_special_token_0|>
class Banana(object):
id = 10
<|reserved_special_token_0|>
def template_village_file(tick):
"""
Creates a template villages.dat file that i can modify later on
"""
cat = nbt.NBTFile()
cat2 = cat['data'] = nbt.TAG_Compound()
cat2['Villages'] = nbt.TAG_List(Banana)
cat2['Tick'] = nbt.TAG_Int(tick)
return cat
<|reserved_special_token_0|>
class Village(object):
"""
Some villages.dat related functions
village is a tag_compound
:type village: nbt.TAG_Compound
"""
def __init__(self, village):
self._village = village
def add_door(self, door):
"""
Adds a door and updates the current village aggregate and center with some magic math stuff
"""
doors_list = self._village['Doors']
doors_list.append(door)
x = door['X'].value
y = door['Y'].value
z = door['Z'].value
self._update_doormath(x, y, z)
def del_doorz(self, new_doors):
kapoow = self.get_vil()['Doors']
kapooww = list(kapoow)
for door in kapooww:
x, y, z = door['X'].value, door['Y'].value, door['Z'].value
if (x, y, z) in new_doors:
kapoow.remove(door)
self._update_doormath(-x, -y, -z)
def _update_doormath(self, x, y, z):
doors_list = self._village['Doors']
self._village['ACX'].value += x
self._village['ACY'].value += y
self._village['ACZ'].value += z
if len(doors_list) == 0:
self._village['CX'] = nbt.TAG_Int(0)
self._village['CY'] = nbt.TAG_Int(0)
self._village['CZ'] = nbt.TAG_Int(0)
else:
self._village['CX'].value = self._village['ACX'].value / len(
doors_list)
self._village['CY'].value = self._village['ACY'].value / len(
doors_list)
self._village['CZ'].value = self._village['ACZ'].value / len(
doors_list)
@property
def is_empty(self):
return len(self._village['Doors']) == 0
def get_vil(self):
return self._village
@staticmethod
def create_village(tick):
"""
Creates a template village
"""
village_template = nbt.TAG_Compound()
village_template['Doors'] = nbt.TAG_List(Banana)
village_template['Players'] = nbt.TAG_List(Banana)
village_template['ACX'] = nbt.TAG_Int(0)
village_template['ACY'] = nbt.TAG_Int(0)
village_template['ACZ'] = nbt.TAG_Int(0)
village_template['CX'] = nbt.TAG_Int(0)
village_template['CY'] = nbt.TAG_Int(0)
village_template['CZ'] = nbt.TAG_Int(0)
village_template['Golems'] = nbt.TAG_Int(0)
village_template['MTick'] = nbt.TAG_Int(0)
village_template['PopSize'] = nbt.TAG_Int(1)
village_template['Radius'] = nbt.TAG_Int(32)
village_template['Stable'] = nbt.TAG_Int(tick)
village_template['Tick'] = nbt.TAG_Int(tick)
return Village(village_template)
def create_door(tick, x, y, z):
"""
Generates a door using given coords and tick.
"""
door = nbt.TAG_Compound()
door['TS'] = nbt.TAG_Int(tick)
door['X'] = nbt.TAG_Int(x)
door['Y'] = nbt.TAG_Int(y)
door['Z'] = nbt.TAG_Int(z)
return door
def del_door(vil_list, doors_set):
"""
:param vil_list:
:param doors_set:
:return:
"""
vil85 = list(vil_list)
for vil_TAGCompound in vil85:
villl = Village(vil_TAGCompound)
villl.del_doorz(doors_set)
if villl.is_empty:
vil_list.remove(vil_TAGCompound)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Banana(object):
id = 10
<|reserved_special_token_0|>
def village_doors_coordinates(x1, villages, y, z1, halfDoorsInVillage,
emptySpaces, axis):
"""
x1 and z1 are the lowest value on the X / Z axis
'halfDoorsInVillage' is 1/2 of the total doors in a village
:param axis: The axis along which a single village is created;
make a MCEDIT filter to do the same thing could be cool,
like a filter to create a village on every selected door, or on a row of doors on the X/Z axis.
"""
k = []
assert axis in ('X', 'Z')
if axis == 'Z':
for x in xrange(x1, x1 + villages):
j = [[x, y, z] for z in srange(z1, halfDoorsInVillage, emptySpaces)
]
k.append(j)
elif axis == 'X':
for z in xrange(z1, z1 + villages):
j = [[x, y, z] for x in srange(x1, halfDoorsInVillage, emptySpaces)
]
k.append(j)
return k
<|reserved_special_token_0|>
def template_village_file(tick):
"""
Creates a template villages.dat file that i can modify later on
"""
cat = nbt.NBTFile()
cat2 = cat['data'] = nbt.TAG_Compound()
cat2['Villages'] = nbt.TAG_List(Banana)
cat2['Tick'] = nbt.TAG_Int(tick)
return cat
<|reserved_special_token_0|>
class Village(object):
"""
Some villages.dat related functions
village is a tag_compound
:type village: nbt.TAG_Compound
"""
def __init__(self, village):
self._village = village
def add_door(self, door):
"""
Adds a door and updates the current village aggregate and center with some magic math stuff
"""
doors_list = self._village['Doors']
doors_list.append(door)
x = door['X'].value
y = door['Y'].value
z = door['Z'].value
self._update_doormath(x, y, z)
def del_doorz(self, new_doors):
kapoow = self.get_vil()['Doors']
kapooww = list(kapoow)
for door in kapooww:
x, y, z = door['X'].value, door['Y'].value, door['Z'].value
if (x, y, z) in new_doors:
kapoow.remove(door)
self._update_doormath(-x, -y, -z)
def _update_doormath(self, x, y, z):
doors_list = self._village['Doors']
self._village['ACX'].value += x
self._village['ACY'].value += y
self._village['ACZ'].value += z
if len(doors_list) == 0:
self._village['CX'] = nbt.TAG_Int(0)
self._village['CY'] = nbt.TAG_Int(0)
self._village['CZ'] = nbt.TAG_Int(0)
else:
self._village['CX'].value = self._village['ACX'].value / len(
doors_list)
self._village['CY'].value = self._village['ACY'].value / len(
doors_list)
self._village['CZ'].value = self._village['ACZ'].value / len(
doors_list)
@property
def is_empty(self):
return len(self._village['Doors']) == 0
def get_vil(self):
return self._village
@staticmethod
def create_village(tick):
"""
Creates a template village
"""
village_template = nbt.TAG_Compound()
village_template['Doors'] = nbt.TAG_List(Banana)
village_template['Players'] = nbt.TAG_List(Banana)
village_template['ACX'] = nbt.TAG_Int(0)
village_template['ACY'] = nbt.TAG_Int(0)
village_template['ACZ'] = nbt.TAG_Int(0)
village_template['CX'] = nbt.TAG_Int(0)
village_template['CY'] = nbt.TAG_Int(0)
village_template['CZ'] = nbt.TAG_Int(0)
village_template['Golems'] = nbt.TAG_Int(0)
village_template['MTick'] = nbt.TAG_Int(0)
village_template['PopSize'] = nbt.TAG_Int(1)
village_template['Radius'] = nbt.TAG_Int(32)
village_template['Stable'] = nbt.TAG_Int(tick)
village_template['Tick'] = nbt.TAG_Int(tick)
return Village(village_template)
def create_door(tick, x, y, z):
"""
Generates a door using given coords and tick.
"""
door = nbt.TAG_Compound()
door['TS'] = nbt.TAG_Int(tick)
door['X'] = nbt.TAG_Int(x)
door['Y'] = nbt.TAG_Int(y)
door['Z'] = nbt.TAG_Int(z)
return door
def del_door(vil_list, doors_set):
"""
:param vil_list:
:param doors_set:
:return:
"""
vil85 = list(vil_list)
for vil_TAGCompound in vil85:
villl = Village(vil_TAGCompound)
villl.del_doorz(doors_set)
if villl.is_empty:
vil_list.remove(vil_TAGCompound)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Banana(object):
id = 10
def srange(x1, xDoors, spaces):
"""
a counting thing that i dunno what does.
"""
for a in xrange(x1, x1 + xDoors):
yield a
for a in xrange(x1 + xDoors + spaces, x1 + spaces + xDoors * 2):
yield a
def village_doors_coordinates(x1, villages, y, z1, halfDoorsInVillage,
emptySpaces, axis):
"""
x1 and z1 are the lowest value on the X / Z axis
'halfDoorsInVillage' is 1/2 of the total doors in a village
:param axis: The axis along which a single village is created;
make a MCEDIT filter to do the same thing could be cool,
like a filter to create a village on every selected door, or on a row of doors on the X/Z axis.
"""
k = []
assert axis in ('X', 'Z')
if axis == 'Z':
for x in xrange(x1, x1 + villages):
j = [[x, y, z] for z in srange(z1, halfDoorsInVillage, emptySpaces)
]
k.append(j)
elif axis == 'X':
for z in xrange(z1, z1 + villages):
j = [[x, y, z] for x in srange(x1, halfDoorsInVillage, emptySpaces)
]
k.append(j)
return k
<|reserved_special_token_0|>
def template_village_file(tick):
"""
Creates a template villages.dat file that i can modify later on
"""
cat = nbt.NBTFile()
cat2 = cat['data'] = nbt.TAG_Compound()
cat2['Villages'] = nbt.TAG_List(Banana)
cat2['Tick'] = nbt.TAG_Int(tick)
return cat
def existing_village_file(kovetz):
"""
Create an editable villages.nbt file from an already existing one, using the same tick value
"""
try:
cat77 = nbt.NBTFile(kovetz)
except IOError:
raise Exception(
'Hmm. Unfortunately, the file requested does not exist :(')
tick4 = cat77['data']['Tick'].value
return cat77, tick4
class Village(object):
"""
Some villages.dat related functions
village is a tag_compound
:type village: nbt.TAG_Compound
"""
def __init__(self, village):
self._village = village
def add_door(self, door):
"""
Adds a door and updates the current village aggregate and center with some magic math stuff
"""
doors_list = self._village['Doors']
doors_list.append(door)
x = door['X'].value
y = door['Y'].value
z = door['Z'].value
self._update_doormath(x, y, z)
def del_doorz(self, new_doors):
kapoow = self.get_vil()['Doors']
kapooww = list(kapoow)
for door in kapooww:
x, y, z = door['X'].value, door['Y'].value, door['Z'].value
if (x, y, z) in new_doors:
kapoow.remove(door)
self._update_doormath(-x, -y, -z)
def _update_doormath(self, x, y, z):
doors_list = self._village['Doors']
self._village['ACX'].value += x
self._village['ACY'].value += y
self._village['ACZ'].value += z
if len(doors_list) == 0:
self._village['CX'] = nbt.TAG_Int(0)
self._village['CY'] = nbt.TAG_Int(0)
self._village['CZ'] = nbt.TAG_Int(0)
else:
self._village['CX'].value = self._village['ACX'].value / len(
doors_list)
self._village['CY'].value = self._village['ACY'].value / len(
doors_list)
self._village['CZ'].value = self._village['ACZ'].value / len(
doors_list)
@property
def is_empty(self):
return len(self._village['Doors']) == 0
def get_vil(self):
return self._village
@staticmethod
def create_village(tick):
"""
Creates a template village
"""
village_template = nbt.TAG_Compound()
village_template['Doors'] = nbt.TAG_List(Banana)
village_template['Players'] = nbt.TAG_List(Banana)
village_template['ACX'] = nbt.TAG_Int(0)
village_template['ACY'] = nbt.TAG_Int(0)
village_template['ACZ'] = nbt.TAG_Int(0)
village_template['CX'] = nbt.TAG_Int(0)
village_template['CY'] = nbt.TAG_Int(0)
village_template['CZ'] = nbt.TAG_Int(0)
village_template['Golems'] = nbt.TAG_Int(0)
village_template['MTick'] = nbt.TAG_Int(0)
village_template['PopSize'] = nbt.TAG_Int(1)
village_template['Radius'] = nbt.TAG_Int(32)
village_template['Stable'] = nbt.TAG_Int(tick)
village_template['Tick'] = nbt.TAG_Int(tick)
return Village(village_template)
def create_door(tick, x, y, z):
"""
Generates a door using given coords and tick.
"""
door = nbt.TAG_Compound()
door['TS'] = nbt.TAG_Int(tick)
door['X'] = nbt.TAG_Int(x)
door['Y'] = nbt.TAG_Int(y)
door['Z'] = nbt.TAG_Int(z)
return door
def del_door(vil_list, doors_set):
"""
:param vil_list:
:param doors_set:
:return:
"""
vil85 = list(vil_list)
for vil_TAGCompound in vil85:
villl = Village(vil_TAGCompound)
villl.del_doorz(doors_set)
if villl.is_empty:
vil_list.remove(vil_TAGCompound)
def village_gen(x1, villages, y_list, z1, halfDoorsInVillage, emptySpaces,
axis, tick, cat):
"""
generates villages with doors n stuff
'x1' is the lowest block on the X axis
'z1' is the lowest block on the Z axis
'y' is the Y level of the lower block of the doors
:param axis: The axis along a single village is created;
'axis' is the axis on which the villages are, either the axis where the in the village doors are,
or the axis where the villages are, as in if i was to walk down that axis i would go through a door of every village
'villages' is the numbers of villages i want on this layer
'halfDoorsInVillage' is half of the doors in a village
'emptySpaces' is the space between the 2 blocks of doors /
the space between the first half of the doors and the second
'tick' the time in ticks, in a new file can be basicly anything but 0 and in an old file it has the be the same as
the other villages and the main tick of the file.
'cat' magic NBT file
"""
cat2 = cat['data']
doors_coords_lists = []
doors_set = set()
for y in y_list:
doors_coords_lists += village_doors_coordinates(x1, villages, y, z1,
halfDoorsInVillage, emptySpaces, axis)
for vill_coords_list in doors_coords_lists:
for single_door_coord in vill_coords_list:
doors_set.add(tuple(single_door_coord))
del_door(cat2['Villages'], doors_set)
for coordinates_list in doors_coords_lists:
vil = Village.create_village(tick)
for x, y, z in coordinates_list:
vil.add_door(create_door(tick, x, y, z))
cat2['Villages'].append(vil.get_vil())
def main():
cat1, tick = existing_village_file('./villagesCopy2.dat')
village_gen(-107, number_of_villages_to_generate, [132], 169,
number_of_doors_to_generate / 2, 19, 'X', tick, cat1)
cat1.write_file('./villagesCopy2.dat')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Banana(object):
id = 10
def srange(x1, xDoors, spaces):
"""
a counting thing that i dunno what does.
"""
for a in xrange(x1, x1 + xDoors):
yield a
for a in xrange(x1 + xDoors + spaces, x1 + spaces + xDoors * 2):
yield a
def village_doors_coordinates(x1, villages, y, z1, halfDoorsInVillage,
emptySpaces, axis):
"""
x1 and z1 are the lowest value on the X / Z axis
'halfDoorsInVillage' is 1/2 of the total doors in a village
:param axis: The axis along which a single village is created;
make a MCEDIT filter to do the same thing could be cool,
like a filter to create a village on every selected door, or on a row of doors on the X/Z axis.
"""
k = []
assert axis in ('X', 'Z')
if axis == 'Z':
for x in xrange(x1, x1 + villages):
j = [[x, y, z] for z in srange(z1, halfDoorsInVillage, emptySpaces)
]
k.append(j)
elif axis == 'X':
for z in xrange(z1, z1 + villages):
j = [[x, y, z] for x in srange(x1, halfDoorsInVillage, emptySpaces)
]
k.append(j)
return k
number_of_villages_to_generate = 32
number_of_doors_to_generate = 22
tick = 77
def template_village_file(tick):
"""
Creates a template villages.dat file that i can modify later on
"""
cat = nbt.NBTFile()
cat2 = cat['data'] = nbt.TAG_Compound()
cat2['Villages'] = nbt.TAG_List(Banana)
cat2['Tick'] = nbt.TAG_Int(tick)
return cat
def existing_village_file(kovetz):
"""
Create an editable villages.nbt file from an already existing one, using the same tick value
"""
try:
cat77 = nbt.NBTFile(kovetz)
except IOError:
raise Exception(
'Hmm. Unfortunately, the file requested does not exist :(')
tick4 = cat77['data']['Tick'].value
return cat77, tick4
class Village(object):
"""
Some villages.dat related functions
village is a tag_compound
:type village: nbt.TAG_Compound
"""
def __init__(self, village):
self._village = village
def add_door(self, door):
"""
Adds a door and updates the current village aggregate and center with some magic math stuff
"""
doors_list = self._village['Doors']
doors_list.append(door)
x = door['X'].value
y = door['Y'].value
z = door['Z'].value
self._update_doormath(x, y, z)
def del_doorz(self, new_doors):
kapoow = self.get_vil()['Doors']
kapooww = list(kapoow)
for door in kapooww:
x, y, z = door['X'].value, door['Y'].value, door['Z'].value
if (x, y, z) in new_doors:
kapoow.remove(door)
self._update_doormath(-x, -y, -z)
def _update_doormath(self, x, y, z):
doors_list = self._village['Doors']
self._village['ACX'].value += x
self._village['ACY'].value += y
self._village['ACZ'].value += z
if len(doors_list) == 0:
self._village['CX'] = nbt.TAG_Int(0)
self._village['CY'] = nbt.TAG_Int(0)
self._village['CZ'] = nbt.TAG_Int(0)
else:
self._village['CX'].value = self._village['ACX'].value / len(
doors_list)
self._village['CY'].value = self._village['ACY'].value / len(
doors_list)
self._village['CZ'].value = self._village['ACZ'].value / len(
doors_list)
@property
def is_empty(self):
return len(self._village['Doors']) == 0
def get_vil(self):
return self._village
@staticmethod
def create_village(tick):
"""
Creates a template village
"""
village_template = nbt.TAG_Compound()
village_template['Doors'] = nbt.TAG_List(Banana)
village_template['Players'] = nbt.TAG_List(Banana)
village_template['ACX'] = nbt.TAG_Int(0)
village_template['ACY'] = nbt.TAG_Int(0)
village_template['ACZ'] = nbt.TAG_Int(0)
village_template['CX'] = nbt.TAG_Int(0)
village_template['CY'] = nbt.TAG_Int(0)
village_template['CZ'] = nbt.TAG_Int(0)
village_template['Golems'] = nbt.TAG_Int(0)
village_template['MTick'] = nbt.TAG_Int(0)
village_template['PopSize'] = nbt.TAG_Int(1)
village_template['Radius'] = nbt.TAG_Int(32)
village_template['Stable'] = nbt.TAG_Int(tick)
village_template['Tick'] = nbt.TAG_Int(tick)
return Village(village_template)
def create_door(tick, x, y, z):
"""
Generates a door using given coords and tick.
"""
door = nbt.TAG_Compound()
door['TS'] = nbt.TAG_Int(tick)
door['X'] = nbt.TAG_Int(x)
door['Y'] = nbt.TAG_Int(y)
door['Z'] = nbt.TAG_Int(z)
return door
def del_door(vil_list, doors_set):
"""
:param vil_list:
:param doors_set:
:return:
"""
vil85 = list(vil_list)
for vil_TAGCompound in vil85:
villl = Village(vil_TAGCompound)
villl.del_doorz(doors_set)
if villl.is_empty:
vil_list.remove(vil_TAGCompound)
def village_gen(x1, villages, y_list, z1, halfDoorsInVillage, emptySpaces,
axis, tick, cat):
"""
generates villages with doors n stuff
'x1' is the lowest block on the X axis
'z1' is the lowest block on the Z axis
'y' is the Y level of the lower block of the doors
:param axis: The axis along a single village is created;
'axis' is the axis on which the villages are, either the axis where the in the village doors are,
or the axis where the villages are, as in if i was to walk down that axis i would go through a door of every village
'villages' is the numbers of villages i want on this layer
'halfDoorsInVillage' is half of the doors in a village
'emptySpaces' is the space between the 2 blocks of doors /
the space between the first half of the doors and the second
'tick' the time in ticks, in a new file can be basicly anything but 0 and in an old file it has the be the same as
the other villages and the main tick of the file.
'cat' magic NBT file
"""
cat2 = cat['data']
doors_coords_lists = []
doors_set = set()
for y in y_list:
doors_coords_lists += village_doors_coordinates(x1, villages, y, z1,
halfDoorsInVillage, emptySpaces, axis)
for vill_coords_list in doors_coords_lists:
for single_door_coord in vill_coords_list:
doors_set.add(tuple(single_door_coord))
del_door(cat2['Villages'], doors_set)
for coordinates_list in doors_coords_lists:
vil = Village.create_village(tick)
for x, y, z in coordinates_list:
vil.add_door(create_door(tick, x, y, z))
cat2['Villages'].append(vil.get_vil())
def main():
cat1, tick = existing_village_file('./villagesCopy2.dat')
village_gen(-107, number_of_villages_to_generate, [132], 169,
number_of_doors_to_generate / 2, 19, 'X', tick, cat1)
cat1.write_file('./villagesCopy2.dat')
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
from nbt import nbt
from matplotlib import pyplot
class Banana(object):
id = 10
def srange(x1, xDoors, spaces):
"""
a counting thing that i dunno what does.
"""
for a in xrange(x1, x1 + xDoors):
yield a
for a in xrange(x1 + xDoors + spaces, x1 + spaces + xDoors * 2):
yield a
def village_doors_coordinates(x1, villages, y, z1, halfDoorsInVillage, emptySpaces, axis):
"""
x1 and z1 are the lowest value on the X / Z axis
'halfDoorsInVillage' is 1/2 of the total doors in a village
:param axis: The axis along which a single village is created;
make a MCEDIT filter to do the same thing could be cool,
like a filter to create a village on every selected door, or on a row of doors on the X/Z axis.
"""
k = []
assert axis in ('X', 'Z')
if axis == "Z":
for x in xrange(x1, x1 + villages):
j = [[x, y, z] for z in srange(z1, halfDoorsInVillage, emptySpaces)]
k.append(j)
elif axis == "X":
for z in xrange(z1, z1 + villages):
j = [[x, y, z] for x in srange(x1, halfDoorsInVillage, emptySpaces)]
k.append(j)
return k
number_of_villages_to_generate = 32
number_of_doors_to_generate = 22
tick = 77
def template_village_file(tick):
"""
Creates a template villages.dat file that i can modify later on
"""
cat = nbt.NBTFile()
cat2 = cat['data'] = nbt.TAG_Compound()
cat2["Villages"] = nbt.TAG_List(Banana)
cat2['Tick'] = nbt.TAG_Int(tick)
return cat
def existing_village_file(kovetz):
"""
Create an editable villages.nbt file from an already existing one, using the same tick value
"""
try:
cat77 = nbt.NBTFile(kovetz)
except IOError:
raise Exception("Hmm. Unfortunately, the file requested does not exist :(")
tick4 = cat77['data']['Tick'].value
return cat77, tick4
class Village(object):
"""
Some villages.dat related functions
village is a tag_compound
:type village: nbt.TAG_Compound
"""
def __init__(self, village):
self._village = village
def add_door(self, door):
"""
Adds a door and updates the current village aggregate and center with some magic math stuff
"""
doors_list = self._village['Doors']
doors_list.append(door)
x = door['X'].value
y = door['Y'].value
z = door['Z'].value
self._update_doormath(x, y, z)
def del_doorz(self, new_doors):
kapoow = self.get_vil()['Doors']
kapooww = list(kapoow)
for door in kapooww:
x, y, z = door['X'].value, door['Y'].value, door['Z'].value
if (x, y, z) in new_doors:
kapoow.remove(door)
self._update_doormath(-x, -y, -z)
def _update_doormath(self, x, y, z):
doors_list = self._village['Doors']
self._village['ACX'].value += x
self._village['ACY'].value += y
self._village['ACZ'].value += z
if len(doors_list) == 0:
self._village['CX'] = nbt.TAG_Int(0)
self._village['CY'] = nbt.TAG_Int(0)
self._village['CZ'] = nbt.TAG_Int(0)
else:
self._village['CX'].value = self._village['ACX'].value / len(doors_list)
self._village['CY'].value = self._village['ACY'].value / len(doors_list)
self._village['CZ'].value = self._village['ACZ'].value / len(doors_list)
@property
def is_empty(self):
return len(self._village["Doors"]) == 0
def get_vil(self):
return self._village
@staticmethod
def create_village(tick):
"""
Creates a template village
"""
village_template = nbt.TAG_Compound()
village_template['Doors'] = nbt.TAG_List(Banana)
village_template['Players'] = nbt.TAG_List(Banana)
village_template['ACX'] = nbt.TAG_Int(0)
village_template['ACY'] = nbt.TAG_Int(0)
village_template['ACZ'] = nbt.TAG_Int(0)
village_template['CX'] = nbt.TAG_Int(0)
village_template['CY'] = nbt.TAG_Int(0)
village_template['CZ'] = nbt.TAG_Int(0)
village_template['Golems'] = nbt.TAG_Int(0)
village_template['MTick'] = nbt.TAG_Int(0)
village_template['PopSize'] = nbt.TAG_Int(1)
village_template['Radius'] = nbt.TAG_Int(32)
village_template['Stable'] = nbt.TAG_Int(tick)
village_template['Tick'] = nbt.TAG_Int(tick)
return Village(village_template)
def create_door(tick, x, y, z):
"""
Generates a door using given coords and tick.
"""
door = nbt.TAG_Compound()
door['TS'] = nbt.TAG_Int(tick)
door['X'] = nbt.TAG_Int(x)
door['Y'] = nbt.TAG_Int(y)
door['Z'] = nbt.TAG_Int(z)
return door
def del_door(vil_list, doors_set):
"""
:param vil_list:
:param doors_set:
:return:
"""
vil85 = list(vil_list)
for vil_TAGCompound in vil85:
villl = Village(vil_TAGCompound)
villl.del_doorz(doors_set)
if villl.is_empty:
vil_list.remove(vil_TAGCompound)
def village_gen(x1, villages, y_list, z1, halfDoorsInVillage, emptySpaces, axis, tick, cat):
"""
generates villages with doors n stuff
'x1' is the lowest block on the X axis
'z1' is the lowest block on the Z axis
'y' is the Y level of the lower block of the doors
:param axis: The axis along a single village is created;
'axis' is the axis on which the villages are, either the axis where the in the village doors are,
or the axis where the villages are, as in if i was to walk down that axis i would go through a door of every village
'villages' is the numbers of villages i want on this layer
'halfDoorsInVillage' is half of the doors in a village
'emptySpaces' is the space between the 2 blocks of doors /
the space between the first half of the doors and the second
'tick' the time in ticks, in a new file can be basicly anything but 0 and in an old file it has the be the same as
the other villages and the main tick of the file.
'cat' magic NBT file
"""
cat2 = cat["data"]
doors_coords_lists = []
doors_set = set()
for y in y_list:
doors_coords_lists += village_doors_coordinates(x1, villages, y, z1, halfDoorsInVillage, emptySpaces, axis)
for vill_coords_list in doors_coords_lists:
for single_door_coord in vill_coords_list:
doors_set.add(tuple(single_door_coord))
del_door(cat2['Villages'], doors_set)
for coordinates_list in doors_coords_lists:
vil = Village.create_village(tick)
for x, y, z in coordinates_list:
vil.add_door(create_door(tick, x, y, z))
cat2['Villages'].append(vil.get_vil())
def main():
cat1, tick = existing_village_file("./villagesCopy2.dat")
village_gen(-107, number_of_villages_to_generate, [132], 169, number_of_doors_to_generate / 2, 19, 'X', tick, cat1)
cat1.write_file("./villagesCopy2.dat")
if __name__ == '__main__':
main()
|
flexible
|
{
"blob_id": "4e9674ea46bdf930d1e99bcda56eaa300c84deef",
"index": 7196,
"step-1": "<mask token>\n\n\nclass Banana(object):\n id = 10\n\n\n<mask token>\n\n\ndef template_village_file(tick):\n \"\"\"\n Creates a template villages.dat file that i can modify later on\n \"\"\"\n cat = nbt.NBTFile()\n cat2 = cat['data'] = nbt.TAG_Compound()\n cat2['Villages'] = nbt.TAG_List(Banana)\n cat2['Tick'] = nbt.TAG_Int(tick)\n return cat\n\n\n<mask token>\n\n\nclass Village(object):\n \"\"\"\n Some villages.dat related functions\n village is a tag_compound\n :type village: nbt.TAG_Compound\n \"\"\"\n\n def __init__(self, village):\n self._village = village\n\n def add_door(self, door):\n \"\"\"\n Adds a door and updates the current village aggregate and center with some magic math stuff\n \"\"\"\n doors_list = self._village['Doors']\n doors_list.append(door)\n x = door['X'].value\n y = door['Y'].value\n z = door['Z'].value\n self._update_doormath(x, y, z)\n\n def del_doorz(self, new_doors):\n kapoow = self.get_vil()['Doors']\n kapooww = list(kapoow)\n for door in kapooww:\n x, y, z = door['X'].value, door['Y'].value, door['Z'].value\n if (x, y, z) in new_doors:\n kapoow.remove(door)\n self._update_doormath(-x, -y, -z)\n\n def _update_doormath(self, x, y, z):\n doors_list = self._village['Doors']\n self._village['ACX'].value += x\n self._village['ACY'].value += y\n self._village['ACZ'].value += z\n if len(doors_list) == 0:\n self._village['CX'] = nbt.TAG_Int(0)\n self._village['CY'] = nbt.TAG_Int(0)\n self._village['CZ'] = nbt.TAG_Int(0)\n else:\n self._village['CX'].value = self._village['ACX'].value / len(\n doors_list)\n self._village['CY'].value = self._village['ACY'].value / len(\n doors_list)\n self._village['CZ'].value = self._village['ACZ'].value / len(\n doors_list)\n\n @property\n def is_empty(self):\n return len(self._village['Doors']) == 0\n\n def get_vil(self):\n return self._village\n\n @staticmethod\n def create_village(tick):\n \"\"\"\n Creates a template village\n \"\"\"\n village_template = nbt.TAG_Compound()\n village_template['Doors'] = nbt.TAG_List(Banana)\n village_template['Players'] = nbt.TAG_List(Banana)\n village_template['ACX'] = nbt.TAG_Int(0)\n village_template['ACY'] = nbt.TAG_Int(0)\n village_template['ACZ'] = nbt.TAG_Int(0)\n village_template['CX'] = nbt.TAG_Int(0)\n village_template['CY'] = nbt.TAG_Int(0)\n village_template['CZ'] = nbt.TAG_Int(0)\n village_template['Golems'] = nbt.TAG_Int(0)\n village_template['MTick'] = nbt.TAG_Int(0)\n village_template['PopSize'] = nbt.TAG_Int(1)\n village_template['Radius'] = nbt.TAG_Int(32)\n village_template['Stable'] = nbt.TAG_Int(tick)\n village_template['Tick'] = nbt.TAG_Int(tick)\n return Village(village_template)\n\n\ndef create_door(tick, x, y, z):\n \"\"\"\n Generates a door using given coords and tick.\n \"\"\"\n door = nbt.TAG_Compound()\n door['TS'] = nbt.TAG_Int(tick)\n door['X'] = nbt.TAG_Int(x)\n door['Y'] = nbt.TAG_Int(y)\n door['Z'] = nbt.TAG_Int(z)\n return door\n\n\ndef del_door(vil_list, doors_set):\n \"\"\"\n :param vil_list:\n :param doors_set:\n :return:\n \"\"\"\n vil85 = list(vil_list)\n for vil_TAGCompound in vil85:\n villl = Village(vil_TAGCompound)\n villl.del_doorz(doors_set)\n if villl.is_empty:\n vil_list.remove(vil_TAGCompound)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Banana(object):\n id = 10\n\n\n<mask token>\n\n\ndef village_doors_coordinates(x1, villages, y, z1, halfDoorsInVillage,\n emptySpaces, axis):\n \"\"\"\n x1 and z1 are the lowest value on the X / Z axis\n 'halfDoorsInVillage' is 1/2 of the total doors in a village\n :param axis: The axis along which a single village is created;\n\n make a MCEDIT filter to do the same thing could be cool,\n like a filter to create a village on every selected door, or on a row of doors on the X/Z axis.\n \"\"\"\n k = []\n assert axis in ('X', 'Z')\n if axis == 'Z':\n for x in xrange(x1, x1 + villages):\n j = [[x, y, z] for z in srange(z1, halfDoorsInVillage, emptySpaces)\n ]\n k.append(j)\n elif axis == 'X':\n for z in xrange(z1, z1 + villages):\n j = [[x, y, z] for x in srange(x1, halfDoorsInVillage, emptySpaces)\n ]\n k.append(j)\n return k\n\n\n<mask token>\n\n\ndef template_village_file(tick):\n \"\"\"\n Creates a template villages.dat file that i can modify later on\n \"\"\"\n cat = nbt.NBTFile()\n cat2 = cat['data'] = nbt.TAG_Compound()\n cat2['Villages'] = nbt.TAG_List(Banana)\n cat2['Tick'] = nbt.TAG_Int(tick)\n return cat\n\n\n<mask token>\n\n\nclass Village(object):\n \"\"\"\n Some villages.dat related functions\n village is a tag_compound\n :type village: nbt.TAG_Compound\n \"\"\"\n\n def __init__(self, village):\n self._village = village\n\n def add_door(self, door):\n \"\"\"\n Adds a door and updates the current village aggregate and center with some magic math stuff\n \"\"\"\n doors_list = self._village['Doors']\n doors_list.append(door)\n x = door['X'].value\n y = door['Y'].value\n z = door['Z'].value\n self._update_doormath(x, y, z)\n\n def del_doorz(self, new_doors):\n kapoow = self.get_vil()['Doors']\n kapooww = list(kapoow)\n for door in kapooww:\n x, y, z = door['X'].value, door['Y'].value, door['Z'].value\n if (x, y, z) in new_doors:\n kapoow.remove(door)\n self._update_doormath(-x, -y, -z)\n\n def _update_doormath(self, x, y, z):\n doors_list = self._village['Doors']\n self._village['ACX'].value += x\n self._village['ACY'].value += y\n self._village['ACZ'].value += z\n if len(doors_list) == 0:\n self._village['CX'] = nbt.TAG_Int(0)\n self._village['CY'] = nbt.TAG_Int(0)\n self._village['CZ'] = nbt.TAG_Int(0)\n else:\n self._village['CX'].value = self._village['ACX'].value / len(\n doors_list)\n self._village['CY'].value = self._village['ACY'].value / len(\n doors_list)\n self._village['CZ'].value = self._village['ACZ'].value / len(\n doors_list)\n\n @property\n def is_empty(self):\n return len(self._village['Doors']) == 0\n\n def get_vil(self):\n return self._village\n\n @staticmethod\n def create_village(tick):\n \"\"\"\n Creates a template village\n \"\"\"\n village_template = nbt.TAG_Compound()\n village_template['Doors'] = nbt.TAG_List(Banana)\n village_template['Players'] = nbt.TAG_List(Banana)\n village_template['ACX'] = nbt.TAG_Int(0)\n village_template['ACY'] = nbt.TAG_Int(0)\n village_template['ACZ'] = nbt.TAG_Int(0)\n village_template['CX'] = nbt.TAG_Int(0)\n village_template['CY'] = nbt.TAG_Int(0)\n village_template['CZ'] = nbt.TAG_Int(0)\n village_template['Golems'] = nbt.TAG_Int(0)\n village_template['MTick'] = nbt.TAG_Int(0)\n village_template['PopSize'] = nbt.TAG_Int(1)\n village_template['Radius'] = nbt.TAG_Int(32)\n village_template['Stable'] = nbt.TAG_Int(tick)\n village_template['Tick'] = nbt.TAG_Int(tick)\n return Village(village_template)\n\n\ndef create_door(tick, x, y, z):\n \"\"\"\n Generates a door using given coords and tick.\n \"\"\"\n door = nbt.TAG_Compound()\n door['TS'] = nbt.TAG_Int(tick)\n door['X'] = nbt.TAG_Int(x)\n door['Y'] = nbt.TAG_Int(y)\n door['Z'] = nbt.TAG_Int(z)\n return door\n\n\ndef del_door(vil_list, doors_set):\n \"\"\"\n :param vil_list:\n :param doors_set:\n :return:\n \"\"\"\n vil85 = list(vil_list)\n for vil_TAGCompound in vil85:\n villl = Village(vil_TAGCompound)\n villl.del_doorz(doors_set)\n if villl.is_empty:\n vil_list.remove(vil_TAGCompound)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Banana(object):\n id = 10\n\n\ndef srange(x1, xDoors, spaces):\n \"\"\"\n a counting thing that i dunno what does.\n \"\"\"\n for a in xrange(x1, x1 + xDoors):\n yield a\n for a in xrange(x1 + xDoors + spaces, x1 + spaces + xDoors * 2):\n yield a\n\n\ndef village_doors_coordinates(x1, villages, y, z1, halfDoorsInVillage,\n emptySpaces, axis):\n \"\"\"\n x1 and z1 are the lowest value on the X / Z axis\n 'halfDoorsInVillage' is 1/2 of the total doors in a village\n :param axis: The axis along which a single village is created;\n\n make a MCEDIT filter to do the same thing could be cool,\n like a filter to create a village on every selected door, or on a row of doors on the X/Z axis.\n \"\"\"\n k = []\n assert axis in ('X', 'Z')\n if axis == 'Z':\n for x in xrange(x1, x1 + villages):\n j = [[x, y, z] for z in srange(z1, halfDoorsInVillage, emptySpaces)\n ]\n k.append(j)\n elif axis == 'X':\n for z in xrange(z1, z1 + villages):\n j = [[x, y, z] for x in srange(x1, halfDoorsInVillage, emptySpaces)\n ]\n k.append(j)\n return k\n\n\n<mask token>\n\n\ndef template_village_file(tick):\n \"\"\"\n Creates a template villages.dat file that i can modify later on\n \"\"\"\n cat = nbt.NBTFile()\n cat2 = cat['data'] = nbt.TAG_Compound()\n cat2['Villages'] = nbt.TAG_List(Banana)\n cat2['Tick'] = nbt.TAG_Int(tick)\n return cat\n\n\ndef existing_village_file(kovetz):\n \"\"\"\n Create an editable villages.nbt file from an already existing one, using the same tick value\n \"\"\"\n try:\n cat77 = nbt.NBTFile(kovetz)\n except IOError:\n raise Exception(\n 'Hmm. Unfortunately, the file requested does not exist :(')\n tick4 = cat77['data']['Tick'].value\n return cat77, tick4\n\n\nclass Village(object):\n \"\"\"\n Some villages.dat related functions\n village is a tag_compound\n :type village: nbt.TAG_Compound\n \"\"\"\n\n def __init__(self, village):\n self._village = village\n\n def add_door(self, door):\n \"\"\"\n Adds a door and updates the current village aggregate and center with some magic math stuff\n \"\"\"\n doors_list = self._village['Doors']\n doors_list.append(door)\n x = door['X'].value\n y = door['Y'].value\n z = door['Z'].value\n self._update_doormath(x, y, z)\n\n def del_doorz(self, new_doors):\n kapoow = self.get_vil()['Doors']\n kapooww = list(kapoow)\n for door in kapooww:\n x, y, z = door['X'].value, door['Y'].value, door['Z'].value\n if (x, y, z) in new_doors:\n kapoow.remove(door)\n self._update_doormath(-x, -y, -z)\n\n def _update_doormath(self, x, y, z):\n doors_list = self._village['Doors']\n self._village['ACX'].value += x\n self._village['ACY'].value += y\n self._village['ACZ'].value += z\n if len(doors_list) == 0:\n self._village['CX'] = nbt.TAG_Int(0)\n self._village['CY'] = nbt.TAG_Int(0)\n self._village['CZ'] = nbt.TAG_Int(0)\n else:\n self._village['CX'].value = self._village['ACX'].value / len(\n doors_list)\n self._village['CY'].value = self._village['ACY'].value / len(\n doors_list)\n self._village['CZ'].value = self._village['ACZ'].value / len(\n doors_list)\n\n @property\n def is_empty(self):\n return len(self._village['Doors']) == 0\n\n def get_vil(self):\n return self._village\n\n @staticmethod\n def create_village(tick):\n \"\"\"\n Creates a template village\n \"\"\"\n village_template = nbt.TAG_Compound()\n village_template['Doors'] = nbt.TAG_List(Banana)\n village_template['Players'] = nbt.TAG_List(Banana)\n village_template['ACX'] = nbt.TAG_Int(0)\n village_template['ACY'] = nbt.TAG_Int(0)\n village_template['ACZ'] = nbt.TAG_Int(0)\n village_template['CX'] = nbt.TAG_Int(0)\n village_template['CY'] = nbt.TAG_Int(0)\n village_template['CZ'] = nbt.TAG_Int(0)\n village_template['Golems'] = nbt.TAG_Int(0)\n village_template['MTick'] = nbt.TAG_Int(0)\n village_template['PopSize'] = nbt.TAG_Int(1)\n village_template['Radius'] = nbt.TAG_Int(32)\n village_template['Stable'] = nbt.TAG_Int(tick)\n village_template['Tick'] = nbt.TAG_Int(tick)\n return Village(village_template)\n\n\ndef create_door(tick, x, y, z):\n \"\"\"\n Generates a door using given coords and tick.\n \"\"\"\n door = nbt.TAG_Compound()\n door['TS'] = nbt.TAG_Int(tick)\n door['X'] = nbt.TAG_Int(x)\n door['Y'] = nbt.TAG_Int(y)\n door['Z'] = nbt.TAG_Int(z)\n return door\n\n\ndef del_door(vil_list, doors_set):\n \"\"\"\n :param vil_list:\n :param doors_set:\n :return:\n \"\"\"\n vil85 = list(vil_list)\n for vil_TAGCompound in vil85:\n villl = Village(vil_TAGCompound)\n villl.del_doorz(doors_set)\n if villl.is_empty:\n vil_list.remove(vil_TAGCompound)\n\n\ndef village_gen(x1, villages, y_list, z1, halfDoorsInVillage, emptySpaces,\n axis, tick, cat):\n \"\"\"\n generates villages with doors n stuff\n\n 'x1' is the lowest block on the X axis\n 'z1' is the lowest block on the Z axis\n 'y' is the Y level of the lower block of the doors\n :param axis: The axis along a single village is created;\n 'axis' is the axis on which the villages are, either the axis where the in the village doors are,\n or the axis where the villages are, as in if i was to walk down that axis i would go through a door of every village\n\n 'villages' is the numbers of villages i want on this layer\n 'halfDoorsInVillage' is half of the doors in a village\n 'emptySpaces' is the space between the 2 blocks of doors /\n the space between the first half of the doors and the second\n 'tick' the time in ticks, in a new file can be basicly anything but 0 and in an old file it has the be the same as\n the other villages and the main tick of the file.\n 'cat' magic NBT file\n\n \"\"\"\n cat2 = cat['data']\n doors_coords_lists = []\n doors_set = set()\n for y in y_list:\n doors_coords_lists += village_doors_coordinates(x1, villages, y, z1,\n halfDoorsInVillage, emptySpaces, axis)\n for vill_coords_list in doors_coords_lists:\n for single_door_coord in vill_coords_list:\n doors_set.add(tuple(single_door_coord))\n del_door(cat2['Villages'], doors_set)\n for coordinates_list in doors_coords_lists:\n vil = Village.create_village(tick)\n for x, y, z in coordinates_list:\n vil.add_door(create_door(tick, x, y, z))\n cat2['Villages'].append(vil.get_vil())\n\n\ndef main():\n cat1, tick = existing_village_file('./villagesCopy2.dat')\n village_gen(-107, number_of_villages_to_generate, [132], 169, \n number_of_doors_to_generate / 2, 19, 'X', tick, cat1)\n cat1.write_file('./villagesCopy2.dat')\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass Banana(object):\n id = 10\n\n\ndef srange(x1, xDoors, spaces):\n \"\"\"\n a counting thing that i dunno what does.\n \"\"\"\n for a in xrange(x1, x1 + xDoors):\n yield a\n for a in xrange(x1 + xDoors + spaces, x1 + spaces + xDoors * 2):\n yield a\n\n\ndef village_doors_coordinates(x1, villages, y, z1, halfDoorsInVillage,\n emptySpaces, axis):\n \"\"\"\n x1 and z1 are the lowest value on the X / Z axis\n 'halfDoorsInVillage' is 1/2 of the total doors in a village\n :param axis: The axis along which a single village is created;\n\n make a MCEDIT filter to do the same thing could be cool,\n like a filter to create a village on every selected door, or on a row of doors on the X/Z axis.\n \"\"\"\n k = []\n assert axis in ('X', 'Z')\n if axis == 'Z':\n for x in xrange(x1, x1 + villages):\n j = [[x, y, z] for z in srange(z1, halfDoorsInVillage, emptySpaces)\n ]\n k.append(j)\n elif axis == 'X':\n for z in xrange(z1, z1 + villages):\n j = [[x, y, z] for x in srange(x1, halfDoorsInVillage, emptySpaces)\n ]\n k.append(j)\n return k\n\n\nnumber_of_villages_to_generate = 32\nnumber_of_doors_to_generate = 22\ntick = 77\n\n\ndef template_village_file(tick):\n \"\"\"\n Creates a template villages.dat file that i can modify later on\n \"\"\"\n cat = nbt.NBTFile()\n cat2 = cat['data'] = nbt.TAG_Compound()\n cat2['Villages'] = nbt.TAG_List(Banana)\n cat2['Tick'] = nbt.TAG_Int(tick)\n return cat\n\n\ndef existing_village_file(kovetz):\n \"\"\"\n Create an editable villages.nbt file from an already existing one, using the same tick value\n \"\"\"\n try:\n cat77 = nbt.NBTFile(kovetz)\n except IOError:\n raise Exception(\n 'Hmm. Unfortunately, the file requested does not exist :(')\n tick4 = cat77['data']['Tick'].value\n return cat77, tick4\n\n\nclass Village(object):\n \"\"\"\n Some villages.dat related functions\n village is a tag_compound\n :type village: nbt.TAG_Compound\n \"\"\"\n\n def __init__(self, village):\n self._village = village\n\n def add_door(self, door):\n \"\"\"\n Adds a door and updates the current village aggregate and center with some magic math stuff\n \"\"\"\n doors_list = self._village['Doors']\n doors_list.append(door)\n x = door['X'].value\n y = door['Y'].value\n z = door['Z'].value\n self._update_doormath(x, y, z)\n\n def del_doorz(self, new_doors):\n kapoow = self.get_vil()['Doors']\n kapooww = list(kapoow)\n for door in kapooww:\n x, y, z = door['X'].value, door['Y'].value, door['Z'].value\n if (x, y, z) in new_doors:\n kapoow.remove(door)\n self._update_doormath(-x, -y, -z)\n\n def _update_doormath(self, x, y, z):\n doors_list = self._village['Doors']\n self._village['ACX'].value += x\n self._village['ACY'].value += y\n self._village['ACZ'].value += z\n if len(doors_list) == 0:\n self._village['CX'] = nbt.TAG_Int(0)\n self._village['CY'] = nbt.TAG_Int(0)\n self._village['CZ'] = nbt.TAG_Int(0)\n else:\n self._village['CX'].value = self._village['ACX'].value / len(\n doors_list)\n self._village['CY'].value = self._village['ACY'].value / len(\n doors_list)\n self._village['CZ'].value = self._village['ACZ'].value / len(\n doors_list)\n\n @property\n def is_empty(self):\n return len(self._village['Doors']) == 0\n\n def get_vil(self):\n return self._village\n\n @staticmethod\n def create_village(tick):\n \"\"\"\n Creates a template village\n \"\"\"\n village_template = nbt.TAG_Compound()\n village_template['Doors'] = nbt.TAG_List(Banana)\n village_template['Players'] = nbt.TAG_List(Banana)\n village_template['ACX'] = nbt.TAG_Int(0)\n village_template['ACY'] = nbt.TAG_Int(0)\n village_template['ACZ'] = nbt.TAG_Int(0)\n village_template['CX'] = nbt.TAG_Int(0)\n village_template['CY'] = nbt.TAG_Int(0)\n village_template['CZ'] = nbt.TAG_Int(0)\n village_template['Golems'] = nbt.TAG_Int(0)\n village_template['MTick'] = nbt.TAG_Int(0)\n village_template['PopSize'] = nbt.TAG_Int(1)\n village_template['Radius'] = nbt.TAG_Int(32)\n village_template['Stable'] = nbt.TAG_Int(tick)\n village_template['Tick'] = nbt.TAG_Int(tick)\n return Village(village_template)\n\n\ndef create_door(tick, x, y, z):\n \"\"\"\n Generates a door using given coords and tick.\n \"\"\"\n door = nbt.TAG_Compound()\n door['TS'] = nbt.TAG_Int(tick)\n door['X'] = nbt.TAG_Int(x)\n door['Y'] = nbt.TAG_Int(y)\n door['Z'] = nbt.TAG_Int(z)\n return door\n\n\ndef del_door(vil_list, doors_set):\n \"\"\"\n :param vil_list:\n :param doors_set:\n :return:\n \"\"\"\n vil85 = list(vil_list)\n for vil_TAGCompound in vil85:\n villl = Village(vil_TAGCompound)\n villl.del_doorz(doors_set)\n if villl.is_empty:\n vil_list.remove(vil_TAGCompound)\n\n\ndef village_gen(x1, villages, y_list, z1, halfDoorsInVillage, emptySpaces,\n axis, tick, cat):\n \"\"\"\n generates villages with doors n stuff\n\n 'x1' is the lowest block on the X axis\n 'z1' is the lowest block on the Z axis\n 'y' is the Y level of the lower block of the doors\n :param axis: The axis along a single village is created;\n 'axis' is the axis on which the villages are, either the axis where the in the village doors are,\n or the axis where the villages are, as in if i was to walk down that axis i would go through a door of every village\n\n 'villages' is the numbers of villages i want on this layer\n 'halfDoorsInVillage' is half of the doors in a village\n 'emptySpaces' is the space between the 2 blocks of doors /\n the space between the first half of the doors and the second\n 'tick' the time in ticks, in a new file can be basicly anything but 0 and in an old file it has the be the same as\n the other villages and the main tick of the file.\n 'cat' magic NBT file\n\n \"\"\"\n cat2 = cat['data']\n doors_coords_lists = []\n doors_set = set()\n for y in y_list:\n doors_coords_lists += village_doors_coordinates(x1, villages, y, z1,\n halfDoorsInVillage, emptySpaces, axis)\n for vill_coords_list in doors_coords_lists:\n for single_door_coord in vill_coords_list:\n doors_set.add(tuple(single_door_coord))\n del_door(cat2['Villages'], doors_set)\n for coordinates_list in doors_coords_lists:\n vil = Village.create_village(tick)\n for x, y, z in coordinates_list:\n vil.add_door(create_door(tick, x, y, z))\n cat2['Villages'].append(vil.get_vil())\n\n\ndef main():\n cat1, tick = existing_village_file('./villagesCopy2.dat')\n village_gen(-107, number_of_villages_to_generate, [132], 169, \n number_of_doors_to_generate / 2, 19, 'X', tick, cat1)\n cat1.write_file('./villagesCopy2.dat')\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "from nbt import nbt\nfrom matplotlib import pyplot\n\nclass Banana(object):\n id = 10\n\n\ndef srange(x1, xDoors, spaces):\n \"\"\"\n a counting thing that i dunno what does.\n \"\"\"\n for a in xrange(x1, x1 + xDoors):\n yield a\n for a in xrange(x1 + xDoors + spaces, x1 + spaces + xDoors * 2):\n yield a\n\n\ndef village_doors_coordinates(x1, villages, y, z1, halfDoorsInVillage, emptySpaces, axis):\n \"\"\"\n x1 and z1 are the lowest value on the X / Z axis\n 'halfDoorsInVillage' is 1/2 of the total doors in a village\n :param axis: The axis along which a single village is created;\n\n make a MCEDIT filter to do the same thing could be cool,\n like a filter to create a village on every selected door, or on a row of doors on the X/Z axis.\n \"\"\"\n\n k = []\n assert axis in ('X', 'Z')\n\n if axis == \"Z\":\n for x in xrange(x1, x1 + villages):\n j = [[x, y, z] for z in srange(z1, halfDoorsInVillage, emptySpaces)]\n k.append(j)\n elif axis == \"X\":\n for z in xrange(z1, z1 + villages):\n j = [[x, y, z] for x in srange(x1, halfDoorsInVillage, emptySpaces)]\n k.append(j)\n return k\n\n\nnumber_of_villages_to_generate = 32\nnumber_of_doors_to_generate = 22\ntick = 77\n\n\ndef template_village_file(tick):\n \"\"\"\n Creates a template villages.dat file that i can modify later on\n \"\"\"\n cat = nbt.NBTFile()\n cat2 = cat['data'] = nbt.TAG_Compound()\n cat2[\"Villages\"] = nbt.TAG_List(Banana)\n cat2['Tick'] = nbt.TAG_Int(tick)\n return cat\n\n\ndef existing_village_file(kovetz):\n \"\"\"\n Create an editable villages.nbt file from an already existing one, using the same tick value\n \"\"\"\n try:\n cat77 = nbt.NBTFile(kovetz)\n except IOError:\n raise Exception(\"Hmm. Unfortunately, the file requested does not exist :(\")\n tick4 = cat77['data']['Tick'].value\n return cat77, tick4\n\n\nclass Village(object):\n \"\"\"\n Some villages.dat related functions\n village is a tag_compound\n :type village: nbt.TAG_Compound\n \"\"\"\n\n def __init__(self, village):\n self._village = village\n\n def add_door(self, door):\n \"\"\"\n Adds a door and updates the current village aggregate and center with some magic math stuff\n \"\"\"\n doors_list = self._village['Doors']\n doors_list.append(door)\n x = door['X'].value\n y = door['Y'].value\n z = door['Z'].value\n\n self._update_doormath(x, y, z)\n\n def del_doorz(self, new_doors):\n kapoow = self.get_vil()['Doors']\n kapooww = list(kapoow)\n for door in kapooww:\n x, y, z = door['X'].value, door['Y'].value, door['Z'].value\n if (x, y, z) in new_doors:\n kapoow.remove(door)\n self._update_doormath(-x, -y, -z)\n\n def _update_doormath(self, x, y, z):\n doors_list = self._village['Doors']\n self._village['ACX'].value += x\n self._village['ACY'].value += y\n self._village['ACZ'].value += z\n if len(doors_list) == 0:\n self._village['CX'] = nbt.TAG_Int(0)\n self._village['CY'] = nbt.TAG_Int(0)\n self._village['CZ'] = nbt.TAG_Int(0)\n else:\n self._village['CX'].value = self._village['ACX'].value / len(doors_list)\n self._village['CY'].value = self._village['ACY'].value / len(doors_list)\n self._village['CZ'].value = self._village['ACZ'].value / len(doors_list)\n\n @property\n def is_empty(self):\n return len(self._village[\"Doors\"]) == 0\n\n def get_vil(self):\n return self._village\n\n @staticmethod\n def create_village(tick):\n \"\"\"\n Creates a template village\n \"\"\"\n village_template = nbt.TAG_Compound()\n\n village_template['Doors'] = nbt.TAG_List(Banana)\n village_template['Players'] = nbt.TAG_List(Banana)\n village_template['ACX'] = nbt.TAG_Int(0)\n village_template['ACY'] = nbt.TAG_Int(0)\n village_template['ACZ'] = nbt.TAG_Int(0)\n\n village_template['CX'] = nbt.TAG_Int(0)\n village_template['CY'] = nbt.TAG_Int(0)\n village_template['CZ'] = nbt.TAG_Int(0)\n\n village_template['Golems'] = nbt.TAG_Int(0)\n village_template['MTick'] = nbt.TAG_Int(0)\n village_template['PopSize'] = nbt.TAG_Int(1)\n village_template['Radius'] = nbt.TAG_Int(32)\n village_template['Stable'] = nbt.TAG_Int(tick)\n village_template['Tick'] = nbt.TAG_Int(tick)\n return Village(village_template)\n\n\ndef create_door(tick, x, y, z):\n \"\"\"\n Generates a door using given coords and tick.\n \"\"\"\n door = nbt.TAG_Compound()\n door['TS'] = nbt.TAG_Int(tick)\n door['X'] = nbt.TAG_Int(x)\n door['Y'] = nbt.TAG_Int(y)\n door['Z'] = nbt.TAG_Int(z)\n return door\n\n\ndef del_door(vil_list, doors_set):\n \"\"\"\n :param vil_list:\n :param doors_set:\n :return:\n \"\"\"\n vil85 = list(vil_list)\n for vil_TAGCompound in vil85:\n villl = Village(vil_TAGCompound)\n villl.del_doorz(doors_set)\n if villl.is_empty:\n vil_list.remove(vil_TAGCompound)\n\ndef village_gen(x1, villages, y_list, z1, halfDoorsInVillage, emptySpaces, axis, tick, cat):\n \"\"\"\n generates villages with doors n stuff\n\n 'x1' is the lowest block on the X axis\n 'z1' is the lowest block on the Z axis\n 'y' is the Y level of the lower block of the doors\n :param axis: The axis along a single village is created;\n 'axis' is the axis on which the villages are, either the axis where the in the village doors are,\n or the axis where the villages are, as in if i was to walk down that axis i would go through a door of every village\n\n 'villages' is the numbers of villages i want on this layer\n 'halfDoorsInVillage' is half of the doors in a village\n 'emptySpaces' is the space between the 2 blocks of doors /\n the space between the first half of the doors and the second\n 'tick' the time in ticks, in a new file can be basicly anything but 0 and in an old file it has the be the same as\n the other villages and the main tick of the file.\n 'cat' magic NBT file\n\n \"\"\"\n cat2 = cat[\"data\"]\n doors_coords_lists = []\n doors_set = set()\n for y in y_list:\n doors_coords_lists += village_doors_coordinates(x1, villages, y, z1, halfDoorsInVillage, emptySpaces, axis)\n for vill_coords_list in doors_coords_lists:\n for single_door_coord in vill_coords_list:\n doors_set.add(tuple(single_door_coord))\n del_door(cat2['Villages'], doors_set)\n for coordinates_list in doors_coords_lists:\n vil = Village.create_village(tick)\n for x, y, z in coordinates_list:\n vil.add_door(create_door(tick, x, y, z))\n cat2['Villages'].append(vil.get_vil())\n\n\ndef main():\n cat1, tick = existing_village_file(\"./villagesCopy2.dat\")\n village_gen(-107, number_of_villages_to_generate, [132], 169, number_of_doors_to_generate / 2, 19, 'X', tick, cat1)\n cat1.write_file(\"./villagesCopy2.dat\")\n\nif __name__ == '__main__':\n main()",
"step-ids": [
14,
15,
19,
21,
23
]
}
|
[
14,
15,
19,
21,
23
] |
import copy
import math
import operator
import numpy as np, pprint
def turn_left(action):
switcher = {
(-1, 0): (0, -1),
(0, 1): (-1, 0),
(1, 0): (0, 1),
(0, -1): (1, 0)
}
return switcher.get(action)
def turn_right(action):
switcher = {
(-1, 0): (0, 1),
(0, 1): (1, 0),
(1, 0): (0, -1),
(0, -1): (-1, 0)
}
return switcher.get(action)
def addTwoTuples(a, b):
return tuple(map(operator.add, a, b))
def argmax(seq, fn):
best = seq[0]
best_score = fn(best)
for x in seq:
x_score = fn(x)
if x_score > best_score:
best, best_score = x, x_score
return best
def go(current_state, action, grid_size):
state1 = addTwoTuples(current_state, action)
x_coord = state1[0]
y_coord = state1[1]
if x_coord < 0 or x_coord >= grid_size or y_coord < 0 or y_coord >= grid_size:
return current_state
else:
return state1
def play(env, policy):
utility_values = []
for j in range(10):
pos = env.start_loc
utility = 0
np.random.seed(j)
swerve = np.random.random_sample(1000000)
k = 0
while pos != env.terminal_loc:
move = policy[pos]
if swerve[k] > 0.7:
if swerve[k] > 0.8:
if swerve[k] > 0.9:
move = turn_right(turn_right(move))
else:
move = turn_right(move)
else:
move = turn_left(move)
k += 1
pos = go(pos, move, env.grid_size)
utility += env.get_reward(pos)
utility_values.append(utility)
# print utility_values
cost = int(math.floor(sum(utility_values) / len(utility_values)))
return cost
class GridMDP:
def __init__(self, grid_size):
self.grid_size = grid_size
self.action_dim = (4,)
# North, south, East, West
self.action_coordinates = [(-1, 0), (1, 0), (0, 1), (0, -1)]
self.rewards = [[-1 for x in range(grid_size)] for y in range(grid_size)]
self.gamma = 0.9
self.epsilon = 0.1
self.states = [(x, y) for x in range(grid_size) for y in range(grid_size)]
self.utility = None
self.policy = None
self.T = None
def __deepcopy__(self, memodict={}):
copy_object = GridMDP(self.grid_size)
copy_object.rewards = copy.deepcopy(self.rewards)
copy_object.T = dict(self.T)
# copy_object.T = self.generate_trans_matrix()
copy_object.policy = copy.deepcopy(self.policy)
return copy_object
# For every obstacle add a -101 as reward
def add_obstacles(self, list_of_obstacles):
for obstacle in list_of_obstacles:
self.rewards[obstacle[0]][obstacle[1]] = -101
# Keep a track of every start location
def add_start_location(self, start_loc):
self.start_loc = start_loc
# Update the reward as 99 for every end location
def add_end_location(self, end_loc):
self.terminal_loc = end_loc
self.rewards[end_loc[0]][end_loc[1]] = 99
end_loc_no = end_loc[0] * self.grid_size + end_loc[1]
action_list = {}
# No of action co-ordinates
for i in range(4):
action_list[i] = self.turn(end_loc, None)
self.T[end_loc_no] = action_list
def get_reward(self, state):
return self.rewards[state[0]][state[1]]
def get_actions(self, state):
if state == self.terminal_loc:
return [None]
else:
return self.action_coordinates
def go(self, current_state, action):
return go(current_state, action, self.grid_size)
def turn(self, current_state, action):
if action is None:
return [(0, current_state)]
else:
return [(0.7, self.go(current_state, action)),
(0.1, self.go(current_state, turn_right(action))),
(0.1, self.go(current_state, turn_left(action))),
(0.1, self.go(current_state, turn_left(turn_left(action))))]
def run_with_trans_matrix(self):
utility1 = dict([(s, 0) for s in self.states])
while True:
delta = 0
revised_utility1 = utility1
for s in self.states:
state_no = s[0] * self.grid_size + s[1]
u = utility1[s]
max_util = - float("inf")
for i in range(len(self.action_coordinates)):
# a = self.action_coordinates[i]
util = 0
for (p, s1) in self.T[state_no][i]:
util += (p * revised_utility1[s1])
if util > max_util:
max_util = util
utility1[s] = self.get_reward(s) + self.gamma * max_util
delta = max(delta, abs(u - utility1[s]))
if delta < self.epsilon * (1 - self.gamma) / self.gamma:
break
self.utility = utility1
pi = self.get_policy()
return pi
def get_policy(self):
policy = {}
for s in self.states:
policy[s] = argmax(self.get_actions(s), lambda a: self.expected_utility(a, s, self.utility))
self.policy = policy
return policy
def expected_utility(self, a, s, utility):
return sum([p * utility[s1] for (p, s1) in self.turn(s, a)])
def generate_trans_matrix(self):
transmat = {}
action_list = {}
for s in range(self.grid_size * self.grid_size):
x_coord = s // self.grid_size
y_coord = s % self.grid_size
state = (x_coord, y_coord)
for i in range(len(self.action_coordinates)):
action_list[i] = self.turn(state, self.action_coordinates[i])
transmat[s] = action_list
action_list = {}
self.T = transmat
def read_file(input_file_name):
with open(input_file_name, "r") as file:
# Read 1st line for the grid size
line = file.readline().rstrip()
grid_size = int(line)
environment = GridMDP(grid_size)
# Read 2nd line for the no. of cars
line = file.readline().rstrip()
no_of_cars = int(line)
# Read 3rd line for the no. of obstacles
line = file.readline().rstrip()
no_of_obstacles = int(line)
location_of_obstacles = []
car_locations = {}
for i in range(0, no_of_cars):
car_locations[i] = {}
# Read all the obstacles co-ordinates
while len(location_of_obstacles) != no_of_obstacles:
loc = map(int, file.readline().rstrip().split(",")[::-1])
location_of_obstacles.append(tuple(loc))
car_cnt = 0
# Read all the car start location co-ordinates
while car_cnt < no_of_cars:
loc = map(int, file.readline().rstrip().split(",")[::-1])
car_locations[car_cnt]["Start"] = tuple(loc)
car_cnt += 1
car_cnt = 0
# Read all the car terminal location co-ordinates
while car_cnt < no_of_cars:
loc = map(int, file.readline().rstrip().split(",")[::-1])
car_locations[car_cnt]["End"] = tuple(loc)
car_cnt += 1
if len(location_of_obstacles) > 0:
environment.add_obstacles(location_of_obstacles)
environment.generate_trans_matrix()
return [environment, car_locations]
def execute(inputfile="grading_case/input30.txt"):
original_grid, car_locations = read_file(inputfile)
endloc_policy = {}
# f = open("output.txt", "w")
for car, locations in car_locations.items():
start_loc = locations["Start"]
end_loc = locations["End"]
if start_loc == end_loc:
cost = 100
else:
if end_loc in endloc_policy:
grid = copy.deepcopy(endloc_policy[end_loc])
grid.add_end_location(end_loc)
pi = grid.policy
else:
grid = copy.deepcopy(original_grid)
grid.add_end_location(end_loc)
pi = grid.run_with_trans_matrix()
endloc_policy[end_loc] = grid
grid.add_start_location(start_loc)
cost = play(grid, pi)
print cost
# f.write(str(cost) + "\n")
# f.close()
if __name__ == "__main__":
execute()
|
normal
|
{
"blob_id": "e1c68c7eb899718dd1c28dc6e95d5538c2b8ad74",
"index": 4510,
"step-1": "import copy\nimport math\nimport operator\n\nimport numpy as np, pprint\n\n\ndef turn_left(action):\n switcher = {\n (-1, 0): (0, -1),\n (0, 1): (-1, 0),\n (1, 0): (0, 1),\n (0, -1): (1, 0)\n\n }\n return switcher.get(action)\n\n\ndef turn_right(action):\n switcher = {\n (-1, 0): (0, 1),\n (0, 1): (1, 0),\n (1, 0): (0, -1),\n (0, -1): (-1, 0)\n\n }\n return switcher.get(action)\n\n\ndef addTwoTuples(a, b):\n return tuple(map(operator.add, a, b))\n\n\ndef argmax(seq, fn):\n best = seq[0]\n best_score = fn(best)\n for x in seq:\n x_score = fn(x)\n if x_score > best_score:\n best, best_score = x, x_score\n return best\n\n\ndef go(current_state, action, grid_size):\n state1 = addTwoTuples(current_state, action)\n x_coord = state1[0]\n y_coord = state1[1]\n if x_coord < 0 or x_coord >= grid_size or y_coord < 0 or y_coord >= grid_size:\n return current_state\n else:\n return state1\n\n\ndef play(env, policy):\n utility_values = []\n for j in range(10):\n pos = env.start_loc\n utility = 0\n np.random.seed(j)\n swerve = np.random.random_sample(1000000)\n k = 0\n while pos != env.terminal_loc:\n move = policy[pos]\n if swerve[k] > 0.7:\n if swerve[k] > 0.8:\n if swerve[k] > 0.9:\n move = turn_right(turn_right(move))\n else:\n move = turn_right(move)\n else:\n move = turn_left(move)\n\n k += 1\n pos = go(pos, move, env.grid_size)\n utility += env.get_reward(pos)\n utility_values.append(utility)\n # print utility_values\n cost = int(math.floor(sum(utility_values) / len(utility_values)))\n return cost\n\n\n\nclass GridMDP:\n\n def __init__(self, grid_size):\n self.grid_size = grid_size\n self.action_dim = (4,)\n # North, south, East, West\n self.action_coordinates = [(-1, 0), (1, 0), (0, 1), (0, -1)]\n self.rewards = [[-1 for x in range(grid_size)] for y in range(grid_size)]\n self.gamma = 0.9\n self.epsilon = 0.1\n self.states = [(x, y) for x in range(grid_size) for y in range(grid_size)]\n self.utility = None\n self.policy = None\n\n self.T = None\n\n def __deepcopy__(self, memodict={}):\n copy_object = GridMDP(self.grid_size)\n copy_object.rewards = copy.deepcopy(self.rewards)\n copy_object.T = dict(self.T)\n # copy_object.T = self.generate_trans_matrix()\n copy_object.policy = copy.deepcopy(self.policy)\n return copy_object\n\n\n # For every obstacle add a -101 as reward\n def add_obstacles(self, list_of_obstacles):\n for obstacle in list_of_obstacles:\n self.rewards[obstacle[0]][obstacle[1]] = -101\n\n # Keep a track of every start location\n def add_start_location(self, start_loc):\n self.start_loc = start_loc\n\n # Update the reward as 99 for every end location\n def add_end_location(self, end_loc):\n self.terminal_loc = end_loc\n self.rewards[end_loc[0]][end_loc[1]] = 99\n\n end_loc_no = end_loc[0] * self.grid_size + end_loc[1]\n action_list = {}\n # No of action co-ordinates\n for i in range(4):\n action_list[i] = self.turn(end_loc, None)\n\n self.T[end_loc_no] = action_list\n\n def get_reward(self, state):\n return self.rewards[state[0]][state[1]]\n\n def get_actions(self, state):\n if state == self.terminal_loc:\n return [None]\n else:\n return self.action_coordinates\n\n def go(self, current_state, action):\n return go(current_state, action, self.grid_size)\n\n def turn(self, current_state, action):\n if action is None:\n return [(0, current_state)]\n else:\n return [(0.7, self.go(current_state, action)),\n (0.1, self.go(current_state, turn_right(action))),\n (0.1, self.go(current_state, turn_left(action))),\n (0.1, self.go(current_state, turn_left(turn_left(action))))]\n\n\n def run_with_trans_matrix(self):\n utility1 = dict([(s, 0) for s in self.states])\n while True:\n delta = 0\n revised_utility1 = utility1\n for s in self.states:\n state_no = s[0] * self.grid_size + s[1]\n u = utility1[s]\n max_util = - float(\"inf\")\n for i in range(len(self.action_coordinates)):\n # a = self.action_coordinates[i]\n util = 0\n for (p, s1) in self.T[state_no][i]:\n util += (p * revised_utility1[s1])\n if util > max_util:\n max_util = util\n utility1[s] = self.get_reward(s) + self.gamma * max_util\n\n delta = max(delta, abs(u - utility1[s]))\n\n if delta < self.epsilon * (1 - self.gamma) / self.gamma:\n break\n\n self.utility = utility1\n pi = self.get_policy()\n return pi\n\n def get_policy(self):\n policy = {}\n for s in self.states:\n policy[s] = argmax(self.get_actions(s), lambda a: self.expected_utility(a, s, self.utility))\n self.policy = policy\n return policy\n\n def expected_utility(self, a, s, utility):\n return sum([p * utility[s1] for (p, s1) in self.turn(s, a)])\n\n def generate_trans_matrix(self):\n transmat = {}\n action_list = {}\n for s in range(self.grid_size * self.grid_size):\n x_coord = s // self.grid_size\n y_coord = s % self.grid_size\n state = (x_coord, y_coord)\n for i in range(len(self.action_coordinates)):\n action_list[i] = self.turn(state, self.action_coordinates[i])\n\n transmat[s] = action_list\n action_list = {}\n\n self.T = transmat\n\n\ndef read_file(input_file_name):\n with open(input_file_name, \"r\") as file:\n # Read 1st line for the grid size\n line = file.readline().rstrip()\n grid_size = int(line)\n environment = GridMDP(grid_size)\n\n # Read 2nd line for the no. of cars\n line = file.readline().rstrip()\n no_of_cars = int(line)\n\n # Read 3rd line for the no. of obstacles\n line = file.readline().rstrip()\n no_of_obstacles = int(line)\n\n location_of_obstacles = []\n car_locations = {}\n for i in range(0, no_of_cars):\n car_locations[i] = {}\n\n # Read all the obstacles co-ordinates\n while len(location_of_obstacles) != no_of_obstacles:\n loc = map(int, file.readline().rstrip().split(\",\")[::-1])\n location_of_obstacles.append(tuple(loc))\n\n car_cnt = 0\n # Read all the car start location co-ordinates\n while car_cnt < no_of_cars:\n loc = map(int, file.readline().rstrip().split(\",\")[::-1])\n car_locations[car_cnt][\"Start\"] = tuple(loc)\n car_cnt += 1\n\n car_cnt = 0\n # Read all the car terminal location co-ordinates\n while car_cnt < no_of_cars:\n loc = map(int, file.readline().rstrip().split(\",\")[::-1])\n car_locations[car_cnt][\"End\"] = tuple(loc)\n car_cnt += 1\n\n if len(location_of_obstacles) > 0:\n environment.add_obstacles(location_of_obstacles)\n\n environment.generate_trans_matrix()\n\n return [environment, car_locations]\n\ndef execute(inputfile=\"grading_case/input30.txt\"):\n original_grid, car_locations = read_file(inputfile)\n endloc_policy = {}\n # f = open(\"output.txt\", \"w\")\n for car, locations in car_locations.items():\n start_loc = locations[\"Start\"]\n end_loc = locations[\"End\"]\n if start_loc == end_loc:\n cost = 100\n else:\n if end_loc in endloc_policy:\n grid = copy.deepcopy(endloc_policy[end_loc])\n grid.add_end_location(end_loc)\n pi = grid.policy\n else:\n grid = copy.deepcopy(original_grid)\n grid.add_end_location(end_loc)\n pi = grid.run_with_trans_matrix()\n endloc_policy[end_loc] = grid\n\n grid.add_start_location(start_loc)\n\n cost = play(grid, pi)\n print cost\n # f.write(str(cost) + \"\\n\")\n # f.close()\n\n\nif __name__ == \"__main__\":\n execute()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
@app.route('/', methods=['POST'])
def hello_world():
if request.method == 'POST':
json_data = request.get_data().decode('utf-8')
_data = json.loads(json_data)
orderNo = _data['orderNo']
name = _data['name']
idcard = _data['idcard']
mobile = _data['mobile']
json1 = json.dumps({'name': name, 'idcard': idcard, 'mobile': mobile})
param = str(AESCipher.encrypt(json1, tokenId.replace('-', '')),
encoding='utf-8')
parameter = 'param=%s' % param
parameterXY = 'name=%s,idCard=%s,mobile=%s' % (name, idcard, mobile)
XYTZparams = {'tokenKey': TokenKey.getTokenKey(parameterXY,
TCCreditNeedleUrl), 'appId': appId, 'name': name, 'idCard':
idcard, 'mobile': mobile}
WJTZparams = {'tokenKey': TokenKey.getTokenKey(parameter,
TCWJNeedleUrl), 'appId': appId, 'param': param}
ANparams = {'tokenKey': TokenKey.getTokenKey(parameter,
TCApplyNeedleUrl), 'appId': appId, 'param': param}
r1 = requests.post(TCCreditNeedleUrl, XYTZparams)
TCdata = r1.text
print(TCdata)
r2 = requests.post(TCWJNeedleUrl, WJTZparams)
print(r2.text)
rep = json.loads(r2.text)
if rep['status'] == 0:
data = rep['data']
TCdata1 = AESCipher.decode_data(data, tokenId.replace('-', ''))
print('TCdata1解密后', TCdata1)
r3 = requests.post(TCApplyNeedleUrl, ANparams)
print(r3.text)
rep = json.loads(r3.text)
if rep['status'] == 0:
data = rep['data']
TCdata2 = AESCipher.decode_data(data, tokenId.replace('-', ''))
print('TCdata2解密后', TCdata2)
return json.dumps(TCdata2)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@app.route('/', methods=['POST'])
def hello_world():
if request.method == 'POST':
json_data = request.get_data().decode('utf-8')
_data = json.loads(json_data)
orderNo = _data['orderNo']
name = _data['name']
idcard = _data['idcard']
mobile = _data['mobile']
json1 = json.dumps({'name': name, 'idcard': idcard, 'mobile': mobile})
param = str(AESCipher.encrypt(json1, tokenId.replace('-', '')),
encoding='utf-8')
parameter = 'param=%s' % param
parameterXY = 'name=%s,idCard=%s,mobile=%s' % (name, idcard, mobile)
XYTZparams = {'tokenKey': TokenKey.getTokenKey(parameterXY,
TCCreditNeedleUrl), 'appId': appId, 'name': name, 'idCard':
idcard, 'mobile': mobile}
WJTZparams = {'tokenKey': TokenKey.getTokenKey(parameter,
TCWJNeedleUrl), 'appId': appId, 'param': param}
ANparams = {'tokenKey': TokenKey.getTokenKey(parameter,
TCApplyNeedleUrl), 'appId': appId, 'param': param}
r1 = requests.post(TCCreditNeedleUrl, XYTZparams)
TCdata = r1.text
print(TCdata)
r2 = requests.post(TCWJNeedleUrl, WJTZparams)
print(r2.text)
rep = json.loads(r2.text)
if rep['status'] == 0:
data = rep['data']
TCdata1 = AESCipher.decode_data(data, tokenId.replace('-', ''))
print('TCdata1解密后', TCdata1)
r3 = requests.post(TCApplyNeedleUrl, ANparams)
print(r3.text)
rep = json.loads(r3.text)
if rep['status'] == 0:
data = rep['data']
TCdata2 = AESCipher.decode_data(data, tokenId.replace('-', ''))
print('TCdata2解密后', TCdata2)
return json.dumps(TCdata2)
if __name__ == '__main__':
app.run()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
app = Flask(__name__)
@app.route('/', methods=['POST'])
def hello_world():
if request.method == 'POST':
json_data = request.get_data().decode('utf-8')
_data = json.loads(json_data)
orderNo = _data['orderNo']
name = _data['name']
idcard = _data['idcard']
mobile = _data['mobile']
json1 = json.dumps({'name': name, 'idcard': idcard, 'mobile': mobile})
param = str(AESCipher.encrypt(json1, tokenId.replace('-', '')),
encoding='utf-8')
parameter = 'param=%s' % param
parameterXY = 'name=%s,idCard=%s,mobile=%s' % (name, idcard, mobile)
XYTZparams = {'tokenKey': TokenKey.getTokenKey(parameterXY,
TCCreditNeedleUrl), 'appId': appId, 'name': name, 'idCard':
idcard, 'mobile': mobile}
WJTZparams = {'tokenKey': TokenKey.getTokenKey(parameter,
TCWJNeedleUrl), 'appId': appId, 'param': param}
ANparams = {'tokenKey': TokenKey.getTokenKey(parameter,
TCApplyNeedleUrl), 'appId': appId, 'param': param}
r1 = requests.post(TCCreditNeedleUrl, XYTZparams)
TCdata = r1.text
print(TCdata)
r2 = requests.post(TCWJNeedleUrl, WJTZparams)
print(r2.text)
rep = json.loads(r2.text)
if rep['status'] == 0:
data = rep['data']
TCdata1 = AESCipher.decode_data(data, tokenId.replace('-', ''))
print('TCdata1解密后', TCdata1)
r3 = requests.post(TCApplyNeedleUrl, ANparams)
print(r3.text)
rep = json.loads(r3.text)
if rep['status'] == 0:
data = rep['data']
TCdata2 = AESCipher.decode_data(data, tokenId.replace('-', ''))
print('TCdata2解密后', TCdata2)
return json.dumps(TCdata2)
if __name__ == '__main__':
app.run()
<|reserved_special_token_1|>
import json
import requests as requests
from flask import Flask
from flask import request
from tools import AESCipher, tokenId, TokenKey, appId
from tools import TCApplyNeedleUrl, TCCreditNeedleUrl, TCWJNeedleUrl
app = Flask(__name__)
@app.route('/', methods=['POST'])
def hello_world():
if request.method == 'POST':
json_data = request.get_data().decode('utf-8')
_data = json.loads(json_data)
orderNo = _data['orderNo']
name = _data['name']
idcard = _data['idcard']
mobile = _data['mobile']
json1 = json.dumps({'name': name, 'idcard': idcard, 'mobile': mobile})
param = str(AESCipher.encrypt(json1, tokenId.replace('-', '')),
encoding='utf-8')
parameter = 'param=%s' % param
parameterXY = 'name=%s,idCard=%s,mobile=%s' % (name, idcard, mobile)
XYTZparams = {'tokenKey': TokenKey.getTokenKey(parameterXY,
TCCreditNeedleUrl), 'appId': appId, 'name': name, 'idCard':
idcard, 'mobile': mobile}
WJTZparams = {'tokenKey': TokenKey.getTokenKey(parameter,
TCWJNeedleUrl), 'appId': appId, 'param': param}
ANparams = {'tokenKey': TokenKey.getTokenKey(parameter,
TCApplyNeedleUrl), 'appId': appId, 'param': param}
r1 = requests.post(TCCreditNeedleUrl, XYTZparams)
TCdata = r1.text
print(TCdata)
r2 = requests.post(TCWJNeedleUrl, WJTZparams)
print(r2.text)
rep = json.loads(r2.text)
if rep['status'] == 0:
data = rep['data']
TCdata1 = AESCipher.decode_data(data, tokenId.replace('-', ''))
print('TCdata1解密后', TCdata1)
r3 = requests.post(TCApplyNeedleUrl, ANparams)
print(r3.text)
rep = json.loads(r3.text)
if rep['status'] == 0:
data = rep['data']
TCdata2 = AESCipher.decode_data(data, tokenId.replace('-', ''))
print('TCdata2解密后', TCdata2)
return json.dumps(TCdata2)
if __name__ == '__main__':
app.run()
<|reserved_special_token_1|>
import json
import requests as requests
from flask import Flask
from flask import request
from tools import AESCipher, tokenId, TokenKey, appId
from tools import TCApplyNeedleUrl, TCCreditNeedleUrl, TCWJNeedleUrl
app = Flask(__name__)
@app.route('/', methods=['POST'])
def hello_world():
if request.method == "POST":
json_data = request.get_data().decode('utf-8')
_data = json.loads(json_data)
orderNo = _data['orderNo']
name = _data['name']
idcard = _data['idcard']
mobile = _data['mobile']
json1 = json.dumps({'name': name, 'idcard': idcard, 'mobile': mobile})
param = str(AESCipher.encrypt(json1, tokenId.replace('-', '')), encoding="utf-8")
parameter = ("param=%s" % (param))
parameterXY = ("name=%s,idCard=%s,mobile=%s" % (name, idcard, mobile))
XYTZparams = {'tokenKey': TokenKey.getTokenKey(parameterXY, TCCreditNeedleUrl), 'appId': appId, 'name': name, 'idCard': idcard,
'mobile': mobile}
WJTZparams = {'tokenKey': TokenKey.getTokenKey(parameter,TCWJNeedleUrl), 'appId': appId, 'param': param}
ANparams = {'tokenKey': TokenKey.getTokenKey(parameter,TCApplyNeedleUrl), 'appId': appId, 'param': param}
r1 = requests.post(TCCreditNeedleUrl, XYTZparams)
TCdata = r1.text
print(TCdata)
r2 = requests.post(TCWJNeedleUrl,WJTZparams)
print(r2.text)
rep = json.loads(r2.text)
if rep["status"] == 0:
data = rep["data"]
TCdata1 = AESCipher.decode_data(data, tokenId.replace('-', ''))
print("TCdata1解密后", TCdata1)
r3 = requests.post(TCApplyNeedleUrl,ANparams)
print(r3.text)
rep = json.loads(r3.text)
if rep["status"] == 0:
data = rep["data"]
TCdata2 = AESCipher.decode_data(data, tokenId.replace('-', ''))
print("TCdata2解密后", TCdata2)
return json.dumps(TCdata2)
if __name__ == '__main__':
app.run()
|
flexible
|
{
"blob_id": "4652cd5548b550cc21d126fc4fbe3e316ecb71b2",
"index": 143,
"step-1": "<mask token>\n\n\n@app.route('/', methods=['POST'])\ndef hello_world():\n if request.method == 'POST':\n json_data = request.get_data().decode('utf-8')\n _data = json.loads(json_data)\n orderNo = _data['orderNo']\n name = _data['name']\n idcard = _data['idcard']\n mobile = _data['mobile']\n json1 = json.dumps({'name': name, 'idcard': idcard, 'mobile': mobile})\n param = str(AESCipher.encrypt(json1, tokenId.replace('-', '')),\n encoding='utf-8')\n parameter = 'param=%s' % param\n parameterXY = 'name=%s,idCard=%s,mobile=%s' % (name, idcard, mobile)\n XYTZparams = {'tokenKey': TokenKey.getTokenKey(parameterXY,\n TCCreditNeedleUrl), 'appId': appId, 'name': name, 'idCard':\n idcard, 'mobile': mobile}\n WJTZparams = {'tokenKey': TokenKey.getTokenKey(parameter,\n TCWJNeedleUrl), 'appId': appId, 'param': param}\n ANparams = {'tokenKey': TokenKey.getTokenKey(parameter,\n TCApplyNeedleUrl), 'appId': appId, 'param': param}\n r1 = requests.post(TCCreditNeedleUrl, XYTZparams)\n TCdata = r1.text\n print(TCdata)\n r2 = requests.post(TCWJNeedleUrl, WJTZparams)\n print(r2.text)\n rep = json.loads(r2.text)\n if rep['status'] == 0:\n data = rep['data']\n TCdata1 = AESCipher.decode_data(data, tokenId.replace('-', ''))\n print('TCdata1解密后', TCdata1)\n r3 = requests.post(TCApplyNeedleUrl, ANparams)\n print(r3.text)\n rep = json.loads(r3.text)\n if rep['status'] == 0:\n data = rep['data']\n TCdata2 = AESCipher.decode_data(data, tokenId.replace('-', ''))\n print('TCdata2解密后', TCdata2)\n return json.dumps(TCdata2)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@app.route('/', methods=['POST'])\ndef hello_world():\n if request.method == 'POST':\n json_data = request.get_data().decode('utf-8')\n _data = json.loads(json_data)\n orderNo = _data['orderNo']\n name = _data['name']\n idcard = _data['idcard']\n mobile = _data['mobile']\n json1 = json.dumps({'name': name, 'idcard': idcard, 'mobile': mobile})\n param = str(AESCipher.encrypt(json1, tokenId.replace('-', '')),\n encoding='utf-8')\n parameter = 'param=%s' % param\n parameterXY = 'name=%s,idCard=%s,mobile=%s' % (name, idcard, mobile)\n XYTZparams = {'tokenKey': TokenKey.getTokenKey(parameterXY,\n TCCreditNeedleUrl), 'appId': appId, 'name': name, 'idCard':\n idcard, 'mobile': mobile}\n WJTZparams = {'tokenKey': TokenKey.getTokenKey(parameter,\n TCWJNeedleUrl), 'appId': appId, 'param': param}\n ANparams = {'tokenKey': TokenKey.getTokenKey(parameter,\n TCApplyNeedleUrl), 'appId': appId, 'param': param}\n r1 = requests.post(TCCreditNeedleUrl, XYTZparams)\n TCdata = r1.text\n print(TCdata)\n r2 = requests.post(TCWJNeedleUrl, WJTZparams)\n print(r2.text)\n rep = json.loads(r2.text)\n if rep['status'] == 0:\n data = rep['data']\n TCdata1 = AESCipher.decode_data(data, tokenId.replace('-', ''))\n print('TCdata1解密后', TCdata1)\n r3 = requests.post(TCApplyNeedleUrl, ANparams)\n print(r3.text)\n rep = json.loads(r3.text)\n if rep['status'] == 0:\n data = rep['data']\n TCdata2 = AESCipher.decode_data(data, tokenId.replace('-', ''))\n print('TCdata2解密后', TCdata2)\n return json.dumps(TCdata2)\n\n\nif __name__ == '__main__':\n app.run()\n",
"step-3": "<mask token>\napp = Flask(__name__)\n\n\n@app.route('/', methods=['POST'])\ndef hello_world():\n if request.method == 'POST':\n json_data = request.get_data().decode('utf-8')\n _data = json.loads(json_data)\n orderNo = _data['orderNo']\n name = _data['name']\n idcard = _data['idcard']\n mobile = _data['mobile']\n json1 = json.dumps({'name': name, 'idcard': idcard, 'mobile': mobile})\n param = str(AESCipher.encrypt(json1, tokenId.replace('-', '')),\n encoding='utf-8')\n parameter = 'param=%s' % param\n parameterXY = 'name=%s,idCard=%s,mobile=%s' % (name, idcard, mobile)\n XYTZparams = {'tokenKey': TokenKey.getTokenKey(parameterXY,\n TCCreditNeedleUrl), 'appId': appId, 'name': name, 'idCard':\n idcard, 'mobile': mobile}\n WJTZparams = {'tokenKey': TokenKey.getTokenKey(parameter,\n TCWJNeedleUrl), 'appId': appId, 'param': param}\n ANparams = {'tokenKey': TokenKey.getTokenKey(parameter,\n TCApplyNeedleUrl), 'appId': appId, 'param': param}\n r1 = requests.post(TCCreditNeedleUrl, XYTZparams)\n TCdata = r1.text\n print(TCdata)\n r2 = requests.post(TCWJNeedleUrl, WJTZparams)\n print(r2.text)\n rep = json.loads(r2.text)\n if rep['status'] == 0:\n data = rep['data']\n TCdata1 = AESCipher.decode_data(data, tokenId.replace('-', ''))\n print('TCdata1解密后', TCdata1)\n r3 = requests.post(TCApplyNeedleUrl, ANparams)\n print(r3.text)\n rep = json.loads(r3.text)\n if rep['status'] == 0:\n data = rep['data']\n TCdata2 = AESCipher.decode_data(data, tokenId.replace('-', ''))\n print('TCdata2解密后', TCdata2)\n return json.dumps(TCdata2)\n\n\nif __name__ == '__main__':\n app.run()\n",
"step-4": "import json\nimport requests as requests\nfrom flask import Flask\nfrom flask import request\nfrom tools import AESCipher, tokenId, TokenKey, appId\nfrom tools import TCApplyNeedleUrl, TCCreditNeedleUrl, TCWJNeedleUrl\napp = Flask(__name__)\n\n\n@app.route('/', methods=['POST'])\ndef hello_world():\n if request.method == 'POST':\n json_data = request.get_data().decode('utf-8')\n _data = json.loads(json_data)\n orderNo = _data['orderNo']\n name = _data['name']\n idcard = _data['idcard']\n mobile = _data['mobile']\n json1 = json.dumps({'name': name, 'idcard': idcard, 'mobile': mobile})\n param = str(AESCipher.encrypt(json1, tokenId.replace('-', '')),\n encoding='utf-8')\n parameter = 'param=%s' % param\n parameterXY = 'name=%s,idCard=%s,mobile=%s' % (name, idcard, mobile)\n XYTZparams = {'tokenKey': TokenKey.getTokenKey(parameterXY,\n TCCreditNeedleUrl), 'appId': appId, 'name': name, 'idCard':\n idcard, 'mobile': mobile}\n WJTZparams = {'tokenKey': TokenKey.getTokenKey(parameter,\n TCWJNeedleUrl), 'appId': appId, 'param': param}\n ANparams = {'tokenKey': TokenKey.getTokenKey(parameter,\n TCApplyNeedleUrl), 'appId': appId, 'param': param}\n r1 = requests.post(TCCreditNeedleUrl, XYTZparams)\n TCdata = r1.text\n print(TCdata)\n r2 = requests.post(TCWJNeedleUrl, WJTZparams)\n print(r2.text)\n rep = json.loads(r2.text)\n if rep['status'] == 0:\n data = rep['data']\n TCdata1 = AESCipher.decode_data(data, tokenId.replace('-', ''))\n print('TCdata1解密后', TCdata1)\n r3 = requests.post(TCApplyNeedleUrl, ANparams)\n print(r3.text)\n rep = json.loads(r3.text)\n if rep['status'] == 0:\n data = rep['data']\n TCdata2 = AESCipher.decode_data(data, tokenId.replace('-', ''))\n print('TCdata2解密后', TCdata2)\n return json.dumps(TCdata2)\n\n\nif __name__ == '__main__':\n app.run()\n",
"step-5": "import json\r\n\r\nimport requests as requests\r\nfrom flask import Flask\r\nfrom flask import request\r\n\r\nfrom tools import AESCipher, tokenId, TokenKey, appId\r\nfrom tools import TCApplyNeedleUrl, TCCreditNeedleUrl, TCWJNeedleUrl\r\n\r\napp = Flask(__name__)\r\n\r\n\r\n@app.route('/', methods=['POST'])\r\ndef hello_world():\r\n if request.method == \"POST\":\r\n json_data = request.get_data().decode('utf-8')\r\n _data = json.loads(json_data)\r\n orderNo = _data['orderNo']\r\n name = _data['name']\r\n idcard = _data['idcard']\r\n mobile = _data['mobile']\r\n json1 = json.dumps({'name': name, 'idcard': idcard, 'mobile': mobile})\r\n param = str(AESCipher.encrypt(json1, tokenId.replace('-', '')), encoding=\"utf-8\")\r\n parameter = (\"param=%s\" % (param))\r\n parameterXY = (\"name=%s,idCard=%s,mobile=%s\" % (name, idcard, mobile))\r\n XYTZparams = {'tokenKey': TokenKey.getTokenKey(parameterXY, TCCreditNeedleUrl), 'appId': appId, 'name': name, 'idCard': idcard,\r\n 'mobile': mobile}\r\n WJTZparams = {'tokenKey': TokenKey.getTokenKey(parameter,TCWJNeedleUrl), 'appId': appId, 'param': param}\r\n ANparams = {'tokenKey': TokenKey.getTokenKey(parameter,TCApplyNeedleUrl), 'appId': appId, 'param': param}\r\n r1 = requests.post(TCCreditNeedleUrl, XYTZparams)\r\n TCdata = r1.text\r\n print(TCdata)\r\n\r\n r2 = requests.post(TCWJNeedleUrl,WJTZparams)\r\n print(r2.text)\r\n rep = json.loads(r2.text)\r\n if rep[\"status\"] == 0:\r\n data = rep[\"data\"]\r\n TCdata1 = AESCipher.decode_data(data, tokenId.replace('-', ''))\r\n print(\"TCdata1解密后\", TCdata1)\r\n\r\n r3 = requests.post(TCApplyNeedleUrl,ANparams)\r\n print(r3.text)\r\n rep = json.loads(r3.text)\r\n if rep[\"status\"] == 0:\r\n data = rep[\"data\"]\r\n TCdata2 = AESCipher.decode_data(data, tokenId.replace('-', ''))\r\n print(\"TCdata2解密后\", TCdata2)\r\n\r\n\r\n return json.dumps(TCdata2)\r\n\r\n\r\nif __name__ == '__main__':\r\n app.run()\r\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
db.drop_all()
db.create_all()
User.query.delete()
Feedback.query.delete()
<|reserved_special_token_0|>
db.session.add(john)
db.session.commit()
<|reserved_special_token_0|>
db.session.add(feed)
db.session.commit()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
db.drop_all()
db.create_all()
User.query.delete()
Feedback.query.delete()
john = User(username='John', password='123', email='24', first_name='12a',
last_name='123')
db.session.add(john)
db.session.commit()
feed = Feedback(title='test', content='alsdkjf', username='John')
db.session.add(feed)
db.session.commit()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
from models import db, User, Feedback
from app import app
db.drop_all()
db.create_all()
User.query.delete()
Feedback.query.delete()
john = User(username='John', password='123', email='24', first_name='12a',
last_name='123')
db.session.add(john)
db.session.commit()
feed = Feedback(title='test', content='alsdkjf', username='John')
db.session.add(feed)
db.session.commit()
<|reserved_special_token_1|>
"""Seed file to make sample data for pets db."""
from models import db, User, Feedback
from app import app
# Create all tables
db.drop_all()
db.create_all()
# If table isn't empty, empty it
User.query.delete()
Feedback.query.delete()
# Add users and posts
john = User(username="John",password="123",email="24",first_name="12a",last_name="123")
# Add new objects to session, so they'll persist
db.session.add(john)
#have to add users first to not violate foreign key constraints
db.session.commit()
feed = Feedback(title="test",content="alsdkjf",username="John")
db.session.add(feed)
# Commit--otherwise, this never gets saved!
db.session.commit()
|
flexible
|
{
"blob_id": "d520f9d681125937fbd9dff316bdc5f922f25ff3",
"index": 8050,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ndb.drop_all()\ndb.create_all()\nUser.query.delete()\nFeedback.query.delete()\n<mask token>\ndb.session.add(john)\ndb.session.commit()\n<mask token>\ndb.session.add(feed)\ndb.session.commit()\n",
"step-3": "<mask token>\ndb.drop_all()\ndb.create_all()\nUser.query.delete()\nFeedback.query.delete()\njohn = User(username='John', password='123', email='24', first_name='12a',\n last_name='123')\ndb.session.add(john)\ndb.session.commit()\nfeed = Feedback(title='test', content='alsdkjf', username='John')\ndb.session.add(feed)\ndb.session.commit()\n",
"step-4": "<mask token>\nfrom models import db, User, Feedback\nfrom app import app\ndb.drop_all()\ndb.create_all()\nUser.query.delete()\nFeedback.query.delete()\njohn = User(username='John', password='123', email='24', first_name='12a',\n last_name='123')\ndb.session.add(john)\ndb.session.commit()\nfeed = Feedback(title='test', content='alsdkjf', username='John')\ndb.session.add(feed)\ndb.session.commit()\n",
"step-5": "\"\"\"Seed file to make sample data for pets db.\"\"\"\n\nfrom models import db, User, Feedback\nfrom app import app\n\n# Create all tables\ndb.drop_all()\ndb.create_all()\n\n# If table isn't empty, empty it\nUser.query.delete()\nFeedback.query.delete()\n\n\n# Add users and posts\njohn = User(username=\"John\",password=\"123\",email=\"24\",first_name=\"12a\",last_name=\"123\")\n\n# Add new objects to session, so they'll persist\ndb.session.add(john)\n\n\n#have to add users first to not violate foreign key constraints\ndb.session.commit()\n\nfeed = Feedback(title=\"test\",content=\"alsdkjf\",username=\"John\")\n\ndb.session.add(feed)\n\n\n# Commit--otherwise, this never gets saved!\ndb.session.commit()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from urllib.request import urlopen
from bs4 import BeautifulSoup
import re
url = input('Enter - ')
html = urlopen(url).read()
soup = BeautifulSoup(html, "html.parser")
tags = soup.find_all('tr', {'id': re.compile(r'nonplayingnow.*')})
for i in tags:
casa = i.find("td", {'class': re.compile(r'team-home')}).find("a")
visitante = i.find("td", {'class': re.compile(r'team-away')}).find("a")
print ("Partido-> "+casa.get_text()+" vs "+visitante.get_text())
|
normal
|
{
"blob_id": "d07a26a69ccbbccf61402632dd6011315e0d61ed",
"index": 2710,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in tags:\n casa = i.find('td', {'class': re.compile('team-home')}).find('a')\n visitante = i.find('td', {'class': re.compile('team-away')}).find('a')\n print('Partido-> ' + casa.get_text() + ' vs ' + visitante.get_text())\n",
"step-3": "<mask token>\nurl = input('Enter - ')\nhtml = urlopen(url).read()\nsoup = BeautifulSoup(html, 'html.parser')\ntags = soup.find_all('tr', {'id': re.compile('nonplayingnow.*')})\nfor i in tags:\n casa = i.find('td', {'class': re.compile('team-home')}).find('a')\n visitante = i.find('td', {'class': re.compile('team-away')}).find('a')\n print('Partido-> ' + casa.get_text() + ' vs ' + visitante.get_text())\n",
"step-4": "from urllib.request import urlopen\nfrom bs4 import BeautifulSoup\nimport re\nurl = input('Enter - ')\nhtml = urlopen(url).read()\nsoup = BeautifulSoup(html, 'html.parser')\ntags = soup.find_all('tr', {'id': re.compile('nonplayingnow.*')})\nfor i in tags:\n casa = i.find('td', {'class': re.compile('team-home')}).find('a')\n visitante = i.find('td', {'class': re.compile('team-away')}).find('a')\n print('Partido-> ' + casa.get_text() + ' vs ' + visitante.get_text())\n",
"step-5": "from urllib.request import urlopen\nfrom bs4 import BeautifulSoup\nimport re\n\nurl = input('Enter - ')\n\nhtml = urlopen(url).read()\nsoup = BeautifulSoup(html, \"html.parser\")\n\ntags = soup.find_all('tr', {'id': re.compile(r'nonplayingnow.*')})\n\nfor i in tags:\n casa = i.find(\"td\", {'class': re.compile(r'team-home')}).find(\"a\")\n visitante = i.find(\"td\", {'class': re.compile(r'team-away')}).find(\"a\")\n print (\"Partido-> \"+casa.get_text()+\" vs \"+visitante.get_text())\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def Main():
try:
radius = float(input('Please enter the radius: '))
area = math.pi * radius ** 2
print('Area =', area)
except:
print('You did not enter a number')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def Main():
try:
radius = float(input('Please enter the radius: '))
area = math.pi * radius ** 2
print('Area =', area)
except:
print('You did not enter a number')
if __name__ == '__main__':
Main()
<|reserved_special_token_1|>
import math
def Main():
try:
radius = float(input('Please enter the radius: '))
area = math.pi * radius ** 2
print('Area =', area)
except:
print('You did not enter a number')
if __name__ == '__main__':
Main()
<|reserved_special_token_1|>
#!/usr/bin/python
import math
def Main():
try:
radius = float(input("Please enter the radius: "))
area = math.pi * radius**2
print("Area =", area)
except:
print("You did not enter a number")
if __name__ == "__main__":
Main()
|
flexible
|
{
"blob_id": "33c4e0504425c5d22cefb9b4c798c3fd56a63771",
"index": 3641,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef Main():\n try:\n radius = float(input('Please enter the radius: '))\n area = math.pi * radius ** 2\n print('Area =', area)\n except:\n print('You did not enter a number')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef Main():\n try:\n radius = float(input('Please enter the radius: '))\n area = math.pi * radius ** 2\n print('Area =', area)\n except:\n print('You did not enter a number')\n\n\nif __name__ == '__main__':\n Main()\n",
"step-4": "import math\n\n\ndef Main():\n try:\n radius = float(input('Please enter the radius: '))\n area = math.pi * radius ** 2\n print('Area =', area)\n except:\n print('You did not enter a number')\n\n\nif __name__ == '__main__':\n Main()\n",
"step-5": "#!/usr/bin/python\nimport math\n\ndef Main():\n\ttry:\n\t\tradius = float(input(\"Please enter the radius: \"))\n\t\tarea = math.pi * radius**2\n\t\tprint(\"Area =\", area)\n\texcept:\n\t\tprint(\"You did not enter a number\")\n\nif __name__ == \"__main__\":\n\tMain()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import os
from datetime import timedelta
ROOT_PATH = os.path.split(os.path.abspath(__name__))[0]
DEBUG = True
JWT_SECRET_KEY = 'shop'
# SQLALCHEMY_DATABASE_URI = 'sqlite:///{}'.format(
# os.path.join(ROOT_PATH, 's_shop_flask.db'))
SQLALCHEMY_TRACK_MODIFICATIONS = False
user = 'shop'
passwd = 'shopadmin'
db = 'shopdb'
SQLALCHEMY_DATABASE_URI = f'mysql+pymysql://{user}:{passwd}@10.10.10.105:6606/{db}'
JWT_AUTH_USERNAME_KEY = 'username'
JWT_AUTH_PASSWORD_KEY = 'password'
JWT_AUTH_HEADER_PREFIX = 'JWT'
JWT_EXPIRATION_DELTA = timedelta(days=30)
JWT_ALGORITHM = 'HS256'
JWT_REQUIRED_CLAIMS = ['exp', 'iat', 'nbf']
# 图片上传路径
UPLOADED_PHOTOS_DEST = os.path.join(ROOT_PATH, 'uploads/')
|
normal
|
{
"blob_id": "3908d303d0e41677aae332fbdbe9b681bffe5391",
"index": 1044,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nROOT_PATH = os.path.split(os.path.abspath(__name__))[0]\nDEBUG = True\nJWT_SECRET_KEY = 'shop'\nSQLALCHEMY_TRACK_MODIFICATIONS = False\nuser = 'shop'\npasswd = 'shopadmin'\ndb = 'shopdb'\nSQLALCHEMY_DATABASE_URI = (\n f'mysql+pymysql://{user}:{passwd}@10.10.10.105:6606/{db}')\nJWT_AUTH_USERNAME_KEY = 'username'\nJWT_AUTH_PASSWORD_KEY = 'password'\nJWT_AUTH_HEADER_PREFIX = 'JWT'\nJWT_EXPIRATION_DELTA = timedelta(days=30)\nJWT_ALGORITHM = 'HS256'\nJWT_REQUIRED_CLAIMS = ['exp', 'iat', 'nbf']\nUPLOADED_PHOTOS_DEST = os.path.join(ROOT_PATH, 'uploads/')\n",
"step-3": "import os\nfrom datetime import timedelta\nROOT_PATH = os.path.split(os.path.abspath(__name__))[0]\nDEBUG = True\nJWT_SECRET_KEY = 'shop'\nSQLALCHEMY_TRACK_MODIFICATIONS = False\nuser = 'shop'\npasswd = 'shopadmin'\ndb = 'shopdb'\nSQLALCHEMY_DATABASE_URI = (\n f'mysql+pymysql://{user}:{passwd}@10.10.10.105:6606/{db}')\nJWT_AUTH_USERNAME_KEY = 'username'\nJWT_AUTH_PASSWORD_KEY = 'password'\nJWT_AUTH_HEADER_PREFIX = 'JWT'\nJWT_EXPIRATION_DELTA = timedelta(days=30)\nJWT_ALGORITHM = 'HS256'\nJWT_REQUIRED_CLAIMS = ['exp', 'iat', 'nbf']\nUPLOADED_PHOTOS_DEST = os.path.join(ROOT_PATH, 'uploads/')\n",
"step-4": "import os\nfrom datetime import timedelta\n\nROOT_PATH = os.path.split(os.path.abspath(__name__))[0]\n\nDEBUG = True\nJWT_SECRET_KEY = 'shop'\n# SQLALCHEMY_DATABASE_URI = 'sqlite:///{}'.format(\n# os.path.join(ROOT_PATH, 's_shop_flask.db'))\nSQLALCHEMY_TRACK_MODIFICATIONS = False\nuser = 'shop'\npasswd = 'shopadmin'\ndb = 'shopdb'\n\nSQLALCHEMY_DATABASE_URI = f'mysql+pymysql://{user}:{passwd}@10.10.10.105:6606/{db}'\n\nJWT_AUTH_USERNAME_KEY = 'username'\nJWT_AUTH_PASSWORD_KEY = 'password'\nJWT_AUTH_HEADER_PREFIX = 'JWT'\nJWT_EXPIRATION_DELTA = timedelta(days=30)\nJWT_ALGORITHM = 'HS256'\nJWT_REQUIRED_CLAIMS = ['exp', 'iat', 'nbf']\n\n# 图片上传路径\nUPLOADED_PHOTOS_DEST = os.path.join(ROOT_PATH, 'uploads/')\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
cars=100
drivers=30
passengers=70
print "There are",cars,"cars available."
print "There are only",drivers,"drivers available."
print "Each driver needs to drive",passengers/drivers-1,"passengers."
|
normal
|
{
"blob_id": "b1a1287c2c3b624eb02f2955760f6e9eca8cdcf9",
"index": 1241,
"step-1": "cars=100\ndrivers=30\npassengers=70\nprint \"There are\",cars,\"cars available.\"\nprint \"There are only\",drivers,\"drivers available.\"\nprint \"Each driver needs to drive\",passengers/drivers-1,\"passengers.\"\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from scipy.io import wavfile
import numpy
from matplotlib import pyplot as plt
import librosa
import noisereduce
def loadWavFile(fileName, filePath, savePlot, maxAudioLength, reduceNoise = True):
# Read file
# rate, data = wavfile.read(filePath)
# print(filePath, rate, data.shape, "audio length", data.shape[0] / rate, data[0])
data, rate = librosa.load(filePath, sr=None)
# print(filePath, rate, data.shape, "librosa audio length", data.shape[0] / rate, data[0])
if reduceNoise:
noiseRemovedData = noisereduce.reduce_noise(audio_clip=data, noise_clip=data[0:10000], verbose=False)
noiseRemovedData = noisereduce.reduce_noise(audio_clip=noiseRemovedData, noise_clip=data[-10000:], verbose=False)
data = noiseRemovedData
maxDataLength = int(maxAudioLength * rate)
padding = []
if data.shape[0] > maxDataLength:
raise ValueError("Max audio length breached")
else:
paddingDataLength = maxDataLength - data.shape[0]
padding = [0 for i in range(paddingDataLength)]
# data is stereo sound. take left speaker only
leftSpeakerSound = data # data[:,0]
# print("leftSpeakerSound.shape", leftSpeakerSound.shape)
audioWithPadding = numpy.concatenate((leftSpeakerSound, padding))
# print("audioWithPadding.shape", audioWithPadding.shape)
if savePlot:
fig, ax = plt.subplots()
ax.plot(audioWithPadding)
fig.suptitle(fileName)
fig.savefig("./output_img/wav/" + fileName + "_wav.png")
plt.close(fig)
return audioWithPadding, rate
|
normal
|
{
"blob_id": "07ac061d7d1eaf23b6c95fbcbf6753f25e568188",
"index": 157,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef loadWavFile(fileName, filePath, savePlot, maxAudioLength, reduceNoise=True\n ):\n data, rate = librosa.load(filePath, sr=None)\n if reduceNoise:\n noiseRemovedData = noisereduce.reduce_noise(audio_clip=data,\n noise_clip=data[0:10000], verbose=False)\n noiseRemovedData = noisereduce.reduce_noise(audio_clip=\n noiseRemovedData, noise_clip=data[-10000:], verbose=False)\n data = noiseRemovedData\n maxDataLength = int(maxAudioLength * rate)\n padding = []\n if data.shape[0] > maxDataLength:\n raise ValueError('Max audio length breached')\n else:\n paddingDataLength = maxDataLength - data.shape[0]\n padding = [(0) for i in range(paddingDataLength)]\n leftSpeakerSound = data\n audioWithPadding = numpy.concatenate((leftSpeakerSound, padding))\n if savePlot:\n fig, ax = plt.subplots()\n ax.plot(audioWithPadding)\n fig.suptitle(fileName)\n fig.savefig('./output_img/wav/' + fileName + '_wav.png')\n plt.close(fig)\n return audioWithPadding, rate\n",
"step-3": "from scipy.io import wavfile\nimport numpy\nfrom matplotlib import pyplot as plt\nimport librosa\nimport noisereduce\n\n\ndef loadWavFile(fileName, filePath, savePlot, maxAudioLength, reduceNoise=True\n ):\n data, rate = librosa.load(filePath, sr=None)\n if reduceNoise:\n noiseRemovedData = noisereduce.reduce_noise(audio_clip=data,\n noise_clip=data[0:10000], verbose=False)\n noiseRemovedData = noisereduce.reduce_noise(audio_clip=\n noiseRemovedData, noise_clip=data[-10000:], verbose=False)\n data = noiseRemovedData\n maxDataLength = int(maxAudioLength * rate)\n padding = []\n if data.shape[0] > maxDataLength:\n raise ValueError('Max audio length breached')\n else:\n paddingDataLength = maxDataLength - data.shape[0]\n padding = [(0) for i in range(paddingDataLength)]\n leftSpeakerSound = data\n audioWithPadding = numpy.concatenate((leftSpeakerSound, padding))\n if savePlot:\n fig, ax = plt.subplots()\n ax.plot(audioWithPadding)\n fig.suptitle(fileName)\n fig.savefig('./output_img/wav/' + fileName + '_wav.png')\n plt.close(fig)\n return audioWithPadding, rate\n",
"step-4": "from scipy.io import wavfile\nimport numpy\nfrom matplotlib import pyplot as plt\nimport librosa\nimport noisereduce\n\ndef loadWavFile(fileName, filePath, savePlot, maxAudioLength, reduceNoise = True):\n # Read file\n # rate, data = wavfile.read(filePath)\n # print(filePath, rate, data.shape, \"audio length\", data.shape[0] / rate, data[0])\n\n data, rate = librosa.load(filePath, sr=None)\n # print(filePath, rate, data.shape, \"librosa audio length\", data.shape[0] / rate, data[0])\n if reduceNoise:\n noiseRemovedData = noisereduce.reduce_noise(audio_clip=data, noise_clip=data[0:10000], verbose=False)\n noiseRemovedData = noisereduce.reduce_noise(audio_clip=noiseRemovedData, noise_clip=data[-10000:], verbose=False)\n data = noiseRemovedData\n\n\n maxDataLength = int(maxAudioLength * rate)\n padding = []\n if data.shape[0] > maxDataLength:\n raise ValueError(\"Max audio length breached\")\n else:\n paddingDataLength = maxDataLength - data.shape[0]\n padding = [0 for i in range(paddingDataLength)]\n\n # data is stereo sound. take left speaker only\n leftSpeakerSound = data # data[:,0]\n # print(\"leftSpeakerSound.shape\", leftSpeakerSound.shape)\n\n audioWithPadding = numpy.concatenate((leftSpeakerSound, padding))\n # print(\"audioWithPadding.shape\", audioWithPadding.shape)\n\n if savePlot:\n fig, ax = plt.subplots()\n ax.plot(audioWithPadding)\n fig.suptitle(fileName)\n fig.savefig(\"./output_img/wav/\" + fileName + \"_wav.png\")\n plt.close(fig)\n\n return audioWithPadding, rate",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def cal_factor_alpha_return(factor_name, beg_date, end_date, cal_period):
group_number = 8
year_trade_days = 242
min_stock_number = 100
out_path = 'E:\\3_Data\\5_stock_data\\3_alpha_model\\'
alpha_remove_extreme_value = True
alpha_standard = True
alpha_industry_neutral = True
alpha_barra_style_neutral = True
price = Stock().get_factor_h5('PriceCloseAdjust', None, 'alpha_dfc')
alpha_val = Stock().get_factor_h5(factor_name, None, 'alpha_dfc')
industry = Stock().get_factor_h5('industry_citic1', None, 'primary_mfc')
industry = industry.applymap(lambda x: x.decode('utf-8'))
[alpha_val, industry] = FactorPreProcess().make_same_index_columns([
alpha_val, industry])
if alpha_barra_style_neutral:
size = Stock().get_factor_h5('NORMAL_CNE5_SIZE', None, 'barra_risk_dfc'
)
beta = Stock().get_factor_h5('NORMAL_CNE5_BETA', None, 'barra_risk_dfc'
)
nolin_size = Stock().get_factor_h5('NORMAL_CNE5_NON_LINEAR_SIZE',
None, 'barra_risk_dfc')
momentum = Stock().get_factor_h5('NORMAL_CNE5_MOMENTUM', None,
'barra_risk_dfc')
[size, beta, nolin_size] = FactorPreProcess().make_same_index_columns([
size, beta, nolin_size])
beg_date = max(beg_date, price.columns[0], alpha_val.columns[0],
beta.columns[0])
end_date = min(end_date, price.columns[-1], alpha_val.columns[-1],
beta.columns[-1])
else:
beg_date = max(beg_date, price.columns[0], alpha_val.columns[0])
end_date = min(end_date, price.columns[-1], alpha_val.columns[-1])
date_series = Date().get_trade_date_series(beg_date, end_date, period=
cal_period)
date_series = list(set(date_series) & set(alpha_val.columns))
date_series.sort()
if alpha_remove_extreme_value:
alpha_val = FactorPreProcess().remove_extreme_value_mad(alpha_val)
if alpha_standard:
alpha_val = FactorPreProcess().standardization(alpha_val)
alpha_return = pd.DataFrame([], index=date_series)
alpha_exposure = pd.DataFrame([], index=date_series, columns=price.index)
for i_date in range(len(date_series) - 2):
cur_cal_date = date_series[i_date]
next_cal_date = date_series[i_date + 1]
buy_date = Date().get_trade_date_offset(cur_cal_date, 1)
sell_date = Date().get_trade_date_offset(next_cal_date, 1)
print(' Calculating Factor %s Alpha Return At %s' % (factor_name,
cur_cal_date))
alpha_return.index.name = 'CalDate'
alpha_return.ix[cur_cal_date, 'BuyDate'] = buy_date
alpha_return.ix[cur_cal_date, 'SellDate'] = sell_date
alpha_date = alpha_val[cur_cal_date]
buy_price = price[buy_date]
sell_price = price[sell_date]
pct_date = sell_price / buy_price - 1.0
if alpha_industry_neutral:
try:
industry_date = industry[cur_cal_date]
industry_dummy = pd.get_dummies(industry_date)
except:
continue
if len(pd.concat([alpha_date, industry_date], axis=1).dropna()
) < min_stock_number:
continue
else:
params, factor_res = factor_neutral(factor_series=
alpha_date, neutral_frame=industry_dummy)
alpha_date = factor_res
alpha_date = FactorPreProcess().remove_extreme_value_mad(
alpha_date)
alpha_date = FactorPreProcess().standardization(alpha_date)
if alpha_barra_style_neutral:
try:
size_date = size[cur_cal_date]
beta_date = beta[cur_cal_date]
nolin_size_date = nolin_size[cur_cal_date]
momentum_date = momentum[cur_cal_date]
except:
continue
if len(pd.concat([alpha_date, size_date], axis=1).dropna()
) < min_stock_number:
continue
else:
barra_risk_exposure = pd.concat([beta_date, size_date,
nolin_size_date, momentum_date], axis=1)
barra_risk_exposure.columns = ['beta', 'size', 'nolin_size',
'momentum']
params, factor_res = factor_neutral(factor_series=
alpha_date, neutral_frame=barra_risk_exposure)
alpha_date = factor_res
alpha_date = FactorPreProcess().remove_extreme_value_mad(
alpha_date)
alpha_date = FactorPreProcess().standardization(alpha_date)
alpha_exposure.ix[cur_cal_date, :] = alpha_date
res = pd.concat([alpha_date, pct_date], axis=1)
res.columns = ['alpha_val', 'period_pct']
res = res.dropna()
res = res.sort_values(by=['alpha_val'], ascending=False)
labels = [('group_' + str(i)) for i in list(range(1, group_number + 1))
]
res['group'] = pd.cut(res['alpha_val'], bins=group_number, labels=
labels)
period_return = (res['alpha_val'] * res['period_pct']).mean()
alpha_return.ix[cur_cal_date, 'FactorReturn'] = period_return
information_correlation = res['alpha_val'].corr(res['period_pct'])
alpha_return.ix[cur_cal_date, 'IC'] = information_correlation
group_pct = res.groupby(by=['group'])['period_pct'].mean()
for i_label in range(len(labels)):
alpha_return.ix[cur_cal_date, labels[i_label]] = group_pct.values[
i_label]
alpha_return = alpha_return.dropna(subset=['FactorReturn'])
alpha_return['CumFactorReturn'] = alpha_return['FactorReturn'].cumsum()
cum_labels = [('Cum_' + str(x)) for x in labels]
alpha_return[cum_labels] = alpha_return[labels].cumsum()
back_test_beg_date = Date().get_trade_date_offset(date_series[0], 1)
back_test_end_date = Date().get_trade_date_offset(date_series[len(
date_series) - 1], 1)
back_test_days = Date().get_trade_date_diff(back_test_beg_date,
back_test_end_date)
backtest_year = back_test_days / year_trade_days
alpha_return['year'] = alpha_return.index.map(lambda x: datetime.
strptime(x, '%Y%m%d').year)
year_factor_return = alpha_return.groupby(by=['year'])['FactorReturn'].sum(
)
year_count = alpha_return.groupby(by=['year'])['FactorReturn'].count()
year_ic_mean = alpha_return.groupby(by=['year'])['IC'].mean()
year_ic_std = alpha_return.groupby(by=['year'])['IC'].std()
year_gp_mean = alpha_return.groupby(by=['year'])[labels].mean()
year_describe = pd.concat([year_factor_return, year_count, year_ic_mean,
year_ic_std, year_gp_mean], axis=1)
col = ['YearFactorReturn', 'Count', 'IC_mean', 'IC_std']
col.extend(labels)
year_describe.columns = col
year_describe['YearFactorReturn'] = year_describe['YearFactorReturn'
] / year_describe['Count'] * year_count
year_describe['IC_IR'] = year_describe['IC_mean'] / year_describe['IC_std'
] * np.sqrt(50)
year_describe.ix['Sum', 'YearFactorReturn'] = alpha_return[
'CumFactorReturn'].values[-1] / backtest_year
year_describe.ix['Sum', 'IC_IR'] = alpha_return['IC'].mean(
) / alpha_return['IC'].std() * np.sqrt(50)
year_describe.ix['Sum', 'IC_mean'] = alpha_return['IC'].mean()
year_describe.ix['Sum', 'IC_std'] = alpha_return['IC'].std()
year_describe.ix['Sum', labels] = year_describe.ix[0:-1, labels].sum()
year_describe.index = year_describe.index.map(str)
for i in range(len(year_describe)):
year = year_describe.index[i]
corr_pd = pd.DataFrame(year_describe.ix[year, labels].values, index
=labels, columns=['group_return'])
corr_pd['group_number'] = list(range(1, group_number + 1))
year_describe.ix[year, 'Group_Corr'] = corr_pd.corr().ix[0, 1]
alpha_exposure = alpha_exposure.astype(np.float)
filename = os.path.join(out_path, 'alpha_exposure_neutral', factor_name +
'_FactorExposureNeutral.csv')
alpha_exposure.T.to_csv(filename)
exposure_corr = pd.DataFrame([], index=alpha_exposure.index, columns=[
'Exposure_Corr'])
for i_date in range(1, len(alpha_exposure.index)):
last_exposure_date = alpha_exposure.index[i_date - 1]
cur_exposure_date = alpha_exposure.index[i_date]
exposure_adjoin = alpha_exposure.ix[last_exposure_date:
cur_exposure_date, :]
exposure_adjoin = exposure_adjoin.T.dropna()
exposure_corr.ix[cur_exposure_date, 'Exposure_Corr'
] = exposure_adjoin.corr().ix[0, 1]
exposure_corr = exposure_corr.dropna()
exposure_corr.ix['Mean', 'Exposure_Corr'] = exposure_corr['Exposure_Corr'
].mean()
filename = os.path.join(out_path, 'alpha_exposure_stability',
factor_name + '_FactorExposureCorr.csv')
exposure_corr.to_csv(filename)
filename = os.path.join(out_path, 'alpha_return', factor_name +
'_FactorReturn.xlsx')
sheet_name = 'FactorReturn'
we = WriteExcel(filename)
ws = we.add_worksheet(sheet_name)
num_format_pd = pd.DataFrame([], columns=year_describe.columns, index=[
'format'])
num_format_pd.ix['format', :] = '0.00%'
num_format_pd.ix['format', ['Count', 'IC_IR']] = '0.00'
we.write_pandas(year_describe, ws, begin_row_number=0, begin_col_number
=1, num_format_pd=num_format_pd, color='blue', fillna=True)
num_format_pd = pd.DataFrame([], columns=alpha_return.columns, index=[
'format'])
num_format_pd.ix['format', :] = '0.00%'
num_format_pd.ix['format', ['year']] = '0'
we.write_pandas(alpha_return, ws, begin_row_number=0, begin_col_number=
2 + len(year_describe.columns), num_format_pd=num_format_pd, color=
'blue', fillna=True)
we.close()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def factor_neutral(factor_series, neutral_frame):
"""
中性化
"""
concat_data = pd.concat([factor_series, neutral_frame], axis=1)
concat_data = concat_data.dropna()
factor_val = concat_data.ix[:, 0]
neutral_val = concat_data.ix[:, 1:]
model = sm.OLS(factor_val.values, neutral_val.values)
regress = model.fit()
params = regress.params
params = pd.DataFrame(params, index=neutral_val.columns, columns=['param'])
factor_res = factor_val - regress.predict(neutral_val)
return params, factor_res
def cal_factor_alpha_return(factor_name, beg_date, end_date, cal_period):
group_number = 8
year_trade_days = 242
min_stock_number = 100
out_path = 'E:\\3_Data\\5_stock_data\\3_alpha_model\\'
alpha_remove_extreme_value = True
alpha_standard = True
alpha_industry_neutral = True
alpha_barra_style_neutral = True
price = Stock().get_factor_h5('PriceCloseAdjust', None, 'alpha_dfc')
alpha_val = Stock().get_factor_h5(factor_name, None, 'alpha_dfc')
industry = Stock().get_factor_h5('industry_citic1', None, 'primary_mfc')
industry = industry.applymap(lambda x: x.decode('utf-8'))
[alpha_val, industry] = FactorPreProcess().make_same_index_columns([
alpha_val, industry])
if alpha_barra_style_neutral:
size = Stock().get_factor_h5('NORMAL_CNE5_SIZE', None, 'barra_risk_dfc'
)
beta = Stock().get_factor_h5('NORMAL_CNE5_BETA', None, 'barra_risk_dfc'
)
nolin_size = Stock().get_factor_h5('NORMAL_CNE5_NON_LINEAR_SIZE',
None, 'barra_risk_dfc')
momentum = Stock().get_factor_h5('NORMAL_CNE5_MOMENTUM', None,
'barra_risk_dfc')
[size, beta, nolin_size] = FactorPreProcess().make_same_index_columns([
size, beta, nolin_size])
beg_date = max(beg_date, price.columns[0], alpha_val.columns[0],
beta.columns[0])
end_date = min(end_date, price.columns[-1], alpha_val.columns[-1],
beta.columns[-1])
else:
beg_date = max(beg_date, price.columns[0], alpha_val.columns[0])
end_date = min(end_date, price.columns[-1], alpha_val.columns[-1])
date_series = Date().get_trade_date_series(beg_date, end_date, period=
cal_period)
date_series = list(set(date_series) & set(alpha_val.columns))
date_series.sort()
if alpha_remove_extreme_value:
alpha_val = FactorPreProcess().remove_extreme_value_mad(alpha_val)
if alpha_standard:
alpha_val = FactorPreProcess().standardization(alpha_val)
alpha_return = pd.DataFrame([], index=date_series)
alpha_exposure = pd.DataFrame([], index=date_series, columns=price.index)
for i_date in range(len(date_series) - 2):
cur_cal_date = date_series[i_date]
next_cal_date = date_series[i_date + 1]
buy_date = Date().get_trade_date_offset(cur_cal_date, 1)
sell_date = Date().get_trade_date_offset(next_cal_date, 1)
print(' Calculating Factor %s Alpha Return At %s' % (factor_name,
cur_cal_date))
alpha_return.index.name = 'CalDate'
alpha_return.ix[cur_cal_date, 'BuyDate'] = buy_date
alpha_return.ix[cur_cal_date, 'SellDate'] = sell_date
alpha_date = alpha_val[cur_cal_date]
buy_price = price[buy_date]
sell_price = price[sell_date]
pct_date = sell_price / buy_price - 1.0
if alpha_industry_neutral:
try:
industry_date = industry[cur_cal_date]
industry_dummy = pd.get_dummies(industry_date)
except:
continue
if len(pd.concat([alpha_date, industry_date], axis=1).dropna()
) < min_stock_number:
continue
else:
params, factor_res = factor_neutral(factor_series=
alpha_date, neutral_frame=industry_dummy)
alpha_date = factor_res
alpha_date = FactorPreProcess().remove_extreme_value_mad(
alpha_date)
alpha_date = FactorPreProcess().standardization(alpha_date)
if alpha_barra_style_neutral:
try:
size_date = size[cur_cal_date]
beta_date = beta[cur_cal_date]
nolin_size_date = nolin_size[cur_cal_date]
momentum_date = momentum[cur_cal_date]
except:
continue
if len(pd.concat([alpha_date, size_date], axis=1).dropna()
) < min_stock_number:
continue
else:
barra_risk_exposure = pd.concat([beta_date, size_date,
nolin_size_date, momentum_date], axis=1)
barra_risk_exposure.columns = ['beta', 'size', 'nolin_size',
'momentum']
params, factor_res = factor_neutral(factor_series=
alpha_date, neutral_frame=barra_risk_exposure)
alpha_date = factor_res
alpha_date = FactorPreProcess().remove_extreme_value_mad(
alpha_date)
alpha_date = FactorPreProcess().standardization(alpha_date)
alpha_exposure.ix[cur_cal_date, :] = alpha_date
res = pd.concat([alpha_date, pct_date], axis=1)
res.columns = ['alpha_val', 'period_pct']
res = res.dropna()
res = res.sort_values(by=['alpha_val'], ascending=False)
labels = [('group_' + str(i)) for i in list(range(1, group_number + 1))
]
res['group'] = pd.cut(res['alpha_val'], bins=group_number, labels=
labels)
period_return = (res['alpha_val'] * res['period_pct']).mean()
alpha_return.ix[cur_cal_date, 'FactorReturn'] = period_return
information_correlation = res['alpha_val'].corr(res['period_pct'])
alpha_return.ix[cur_cal_date, 'IC'] = information_correlation
group_pct = res.groupby(by=['group'])['period_pct'].mean()
for i_label in range(len(labels)):
alpha_return.ix[cur_cal_date, labels[i_label]] = group_pct.values[
i_label]
alpha_return = alpha_return.dropna(subset=['FactorReturn'])
alpha_return['CumFactorReturn'] = alpha_return['FactorReturn'].cumsum()
cum_labels = [('Cum_' + str(x)) for x in labels]
alpha_return[cum_labels] = alpha_return[labels].cumsum()
back_test_beg_date = Date().get_trade_date_offset(date_series[0], 1)
back_test_end_date = Date().get_trade_date_offset(date_series[len(
date_series) - 1], 1)
back_test_days = Date().get_trade_date_diff(back_test_beg_date,
back_test_end_date)
backtest_year = back_test_days / year_trade_days
alpha_return['year'] = alpha_return.index.map(lambda x: datetime.
strptime(x, '%Y%m%d').year)
year_factor_return = alpha_return.groupby(by=['year'])['FactorReturn'].sum(
)
year_count = alpha_return.groupby(by=['year'])['FactorReturn'].count()
year_ic_mean = alpha_return.groupby(by=['year'])['IC'].mean()
year_ic_std = alpha_return.groupby(by=['year'])['IC'].std()
year_gp_mean = alpha_return.groupby(by=['year'])[labels].mean()
year_describe = pd.concat([year_factor_return, year_count, year_ic_mean,
year_ic_std, year_gp_mean], axis=1)
col = ['YearFactorReturn', 'Count', 'IC_mean', 'IC_std']
col.extend(labels)
year_describe.columns = col
year_describe['YearFactorReturn'] = year_describe['YearFactorReturn'
] / year_describe['Count'] * year_count
year_describe['IC_IR'] = year_describe['IC_mean'] / year_describe['IC_std'
] * np.sqrt(50)
year_describe.ix['Sum', 'YearFactorReturn'] = alpha_return[
'CumFactorReturn'].values[-1] / backtest_year
year_describe.ix['Sum', 'IC_IR'] = alpha_return['IC'].mean(
) / alpha_return['IC'].std() * np.sqrt(50)
year_describe.ix['Sum', 'IC_mean'] = alpha_return['IC'].mean()
year_describe.ix['Sum', 'IC_std'] = alpha_return['IC'].std()
year_describe.ix['Sum', labels] = year_describe.ix[0:-1, labels].sum()
year_describe.index = year_describe.index.map(str)
for i in range(len(year_describe)):
year = year_describe.index[i]
corr_pd = pd.DataFrame(year_describe.ix[year, labels].values, index
=labels, columns=['group_return'])
corr_pd['group_number'] = list(range(1, group_number + 1))
year_describe.ix[year, 'Group_Corr'] = corr_pd.corr().ix[0, 1]
alpha_exposure = alpha_exposure.astype(np.float)
filename = os.path.join(out_path, 'alpha_exposure_neutral', factor_name +
'_FactorExposureNeutral.csv')
alpha_exposure.T.to_csv(filename)
exposure_corr = pd.DataFrame([], index=alpha_exposure.index, columns=[
'Exposure_Corr'])
for i_date in range(1, len(alpha_exposure.index)):
last_exposure_date = alpha_exposure.index[i_date - 1]
cur_exposure_date = alpha_exposure.index[i_date]
exposure_adjoin = alpha_exposure.ix[last_exposure_date:
cur_exposure_date, :]
exposure_adjoin = exposure_adjoin.T.dropna()
exposure_corr.ix[cur_exposure_date, 'Exposure_Corr'
] = exposure_adjoin.corr().ix[0, 1]
exposure_corr = exposure_corr.dropna()
exposure_corr.ix['Mean', 'Exposure_Corr'] = exposure_corr['Exposure_Corr'
].mean()
filename = os.path.join(out_path, 'alpha_exposure_stability',
factor_name + '_FactorExposureCorr.csv')
exposure_corr.to_csv(filename)
filename = os.path.join(out_path, 'alpha_return', factor_name +
'_FactorReturn.xlsx')
sheet_name = 'FactorReturn'
we = WriteExcel(filename)
ws = we.add_worksheet(sheet_name)
num_format_pd = pd.DataFrame([], columns=year_describe.columns, index=[
'format'])
num_format_pd.ix['format', :] = '0.00%'
num_format_pd.ix['format', ['Count', 'IC_IR']] = '0.00'
we.write_pandas(year_describe, ws, begin_row_number=0, begin_col_number
=1, num_format_pd=num_format_pd, color='blue', fillna=True)
num_format_pd = pd.DataFrame([], columns=alpha_return.columns, index=[
'format'])
num_format_pd.ix['format', :] = '0.00%'
num_format_pd.ix['format', ['year']] = '0'
we.write_pandas(alpha_return, ws, begin_row_number=0, begin_col_number=
2 + len(year_describe.columns), num_format_pd=num_format_pd, color=
'blue', fillna=True)
we.close()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def factor_neutral(factor_series, neutral_frame):
"""
中性化
"""
concat_data = pd.concat([factor_series, neutral_frame], axis=1)
concat_data = concat_data.dropna()
factor_val = concat_data.ix[:, 0]
neutral_val = concat_data.ix[:, 1:]
model = sm.OLS(factor_val.values, neutral_val.values)
regress = model.fit()
params = regress.params
params = pd.DataFrame(params, index=neutral_val.columns, columns=['param'])
factor_res = factor_val - regress.predict(neutral_val)
return params, factor_res
def cal_factor_alpha_return(factor_name, beg_date, end_date, cal_period):
group_number = 8
year_trade_days = 242
min_stock_number = 100
out_path = 'E:\\3_Data\\5_stock_data\\3_alpha_model\\'
alpha_remove_extreme_value = True
alpha_standard = True
alpha_industry_neutral = True
alpha_barra_style_neutral = True
price = Stock().get_factor_h5('PriceCloseAdjust', None, 'alpha_dfc')
alpha_val = Stock().get_factor_h5(factor_name, None, 'alpha_dfc')
industry = Stock().get_factor_h5('industry_citic1', None, 'primary_mfc')
industry = industry.applymap(lambda x: x.decode('utf-8'))
[alpha_val, industry] = FactorPreProcess().make_same_index_columns([
alpha_val, industry])
if alpha_barra_style_neutral:
size = Stock().get_factor_h5('NORMAL_CNE5_SIZE', None, 'barra_risk_dfc'
)
beta = Stock().get_factor_h5('NORMAL_CNE5_BETA', None, 'barra_risk_dfc'
)
nolin_size = Stock().get_factor_h5('NORMAL_CNE5_NON_LINEAR_SIZE',
None, 'barra_risk_dfc')
momentum = Stock().get_factor_h5('NORMAL_CNE5_MOMENTUM', None,
'barra_risk_dfc')
[size, beta, nolin_size] = FactorPreProcess().make_same_index_columns([
size, beta, nolin_size])
beg_date = max(beg_date, price.columns[0], alpha_val.columns[0],
beta.columns[0])
end_date = min(end_date, price.columns[-1], alpha_val.columns[-1],
beta.columns[-1])
else:
beg_date = max(beg_date, price.columns[0], alpha_val.columns[0])
end_date = min(end_date, price.columns[-1], alpha_val.columns[-1])
date_series = Date().get_trade_date_series(beg_date, end_date, period=
cal_period)
date_series = list(set(date_series) & set(alpha_val.columns))
date_series.sort()
if alpha_remove_extreme_value:
alpha_val = FactorPreProcess().remove_extreme_value_mad(alpha_val)
if alpha_standard:
alpha_val = FactorPreProcess().standardization(alpha_val)
alpha_return = pd.DataFrame([], index=date_series)
alpha_exposure = pd.DataFrame([], index=date_series, columns=price.index)
for i_date in range(len(date_series) - 2):
cur_cal_date = date_series[i_date]
next_cal_date = date_series[i_date + 1]
buy_date = Date().get_trade_date_offset(cur_cal_date, 1)
sell_date = Date().get_trade_date_offset(next_cal_date, 1)
print(' Calculating Factor %s Alpha Return At %s' % (factor_name,
cur_cal_date))
alpha_return.index.name = 'CalDate'
alpha_return.ix[cur_cal_date, 'BuyDate'] = buy_date
alpha_return.ix[cur_cal_date, 'SellDate'] = sell_date
alpha_date = alpha_val[cur_cal_date]
buy_price = price[buy_date]
sell_price = price[sell_date]
pct_date = sell_price / buy_price - 1.0
if alpha_industry_neutral:
try:
industry_date = industry[cur_cal_date]
industry_dummy = pd.get_dummies(industry_date)
except:
continue
if len(pd.concat([alpha_date, industry_date], axis=1).dropna()
) < min_stock_number:
continue
else:
params, factor_res = factor_neutral(factor_series=
alpha_date, neutral_frame=industry_dummy)
alpha_date = factor_res
alpha_date = FactorPreProcess().remove_extreme_value_mad(
alpha_date)
alpha_date = FactorPreProcess().standardization(alpha_date)
if alpha_barra_style_neutral:
try:
size_date = size[cur_cal_date]
beta_date = beta[cur_cal_date]
nolin_size_date = nolin_size[cur_cal_date]
momentum_date = momentum[cur_cal_date]
except:
continue
if len(pd.concat([alpha_date, size_date], axis=1).dropna()
) < min_stock_number:
continue
else:
barra_risk_exposure = pd.concat([beta_date, size_date,
nolin_size_date, momentum_date], axis=1)
barra_risk_exposure.columns = ['beta', 'size', 'nolin_size',
'momentum']
params, factor_res = factor_neutral(factor_series=
alpha_date, neutral_frame=barra_risk_exposure)
alpha_date = factor_res
alpha_date = FactorPreProcess().remove_extreme_value_mad(
alpha_date)
alpha_date = FactorPreProcess().standardization(alpha_date)
alpha_exposure.ix[cur_cal_date, :] = alpha_date
res = pd.concat([alpha_date, pct_date], axis=1)
res.columns = ['alpha_val', 'period_pct']
res = res.dropna()
res = res.sort_values(by=['alpha_val'], ascending=False)
labels = [('group_' + str(i)) for i in list(range(1, group_number + 1))
]
res['group'] = pd.cut(res['alpha_val'], bins=group_number, labels=
labels)
period_return = (res['alpha_val'] * res['period_pct']).mean()
alpha_return.ix[cur_cal_date, 'FactorReturn'] = period_return
information_correlation = res['alpha_val'].corr(res['period_pct'])
alpha_return.ix[cur_cal_date, 'IC'] = information_correlation
group_pct = res.groupby(by=['group'])['period_pct'].mean()
for i_label in range(len(labels)):
alpha_return.ix[cur_cal_date, labels[i_label]] = group_pct.values[
i_label]
alpha_return = alpha_return.dropna(subset=['FactorReturn'])
alpha_return['CumFactorReturn'] = alpha_return['FactorReturn'].cumsum()
cum_labels = [('Cum_' + str(x)) for x in labels]
alpha_return[cum_labels] = alpha_return[labels].cumsum()
back_test_beg_date = Date().get_trade_date_offset(date_series[0], 1)
back_test_end_date = Date().get_trade_date_offset(date_series[len(
date_series) - 1], 1)
back_test_days = Date().get_trade_date_diff(back_test_beg_date,
back_test_end_date)
backtest_year = back_test_days / year_trade_days
alpha_return['year'] = alpha_return.index.map(lambda x: datetime.
strptime(x, '%Y%m%d').year)
year_factor_return = alpha_return.groupby(by=['year'])['FactorReturn'].sum(
)
year_count = alpha_return.groupby(by=['year'])['FactorReturn'].count()
year_ic_mean = alpha_return.groupby(by=['year'])['IC'].mean()
year_ic_std = alpha_return.groupby(by=['year'])['IC'].std()
year_gp_mean = alpha_return.groupby(by=['year'])[labels].mean()
year_describe = pd.concat([year_factor_return, year_count, year_ic_mean,
year_ic_std, year_gp_mean], axis=1)
col = ['YearFactorReturn', 'Count', 'IC_mean', 'IC_std']
col.extend(labels)
year_describe.columns = col
year_describe['YearFactorReturn'] = year_describe['YearFactorReturn'
] / year_describe['Count'] * year_count
year_describe['IC_IR'] = year_describe['IC_mean'] / year_describe['IC_std'
] * np.sqrt(50)
year_describe.ix['Sum', 'YearFactorReturn'] = alpha_return[
'CumFactorReturn'].values[-1] / backtest_year
year_describe.ix['Sum', 'IC_IR'] = alpha_return['IC'].mean(
) / alpha_return['IC'].std() * np.sqrt(50)
year_describe.ix['Sum', 'IC_mean'] = alpha_return['IC'].mean()
year_describe.ix['Sum', 'IC_std'] = alpha_return['IC'].std()
year_describe.ix['Sum', labels] = year_describe.ix[0:-1, labels].sum()
year_describe.index = year_describe.index.map(str)
for i in range(len(year_describe)):
year = year_describe.index[i]
corr_pd = pd.DataFrame(year_describe.ix[year, labels].values, index
=labels, columns=['group_return'])
corr_pd['group_number'] = list(range(1, group_number + 1))
year_describe.ix[year, 'Group_Corr'] = corr_pd.corr().ix[0, 1]
alpha_exposure = alpha_exposure.astype(np.float)
filename = os.path.join(out_path, 'alpha_exposure_neutral', factor_name +
'_FactorExposureNeutral.csv')
alpha_exposure.T.to_csv(filename)
exposure_corr = pd.DataFrame([], index=alpha_exposure.index, columns=[
'Exposure_Corr'])
for i_date in range(1, len(alpha_exposure.index)):
last_exposure_date = alpha_exposure.index[i_date - 1]
cur_exposure_date = alpha_exposure.index[i_date]
exposure_adjoin = alpha_exposure.ix[last_exposure_date:
cur_exposure_date, :]
exposure_adjoin = exposure_adjoin.T.dropna()
exposure_corr.ix[cur_exposure_date, 'Exposure_Corr'
] = exposure_adjoin.corr().ix[0, 1]
exposure_corr = exposure_corr.dropna()
exposure_corr.ix['Mean', 'Exposure_Corr'] = exposure_corr['Exposure_Corr'
].mean()
filename = os.path.join(out_path, 'alpha_exposure_stability',
factor_name + '_FactorExposureCorr.csv')
exposure_corr.to_csv(filename)
filename = os.path.join(out_path, 'alpha_return', factor_name +
'_FactorReturn.xlsx')
sheet_name = 'FactorReturn'
we = WriteExcel(filename)
ws = we.add_worksheet(sheet_name)
num_format_pd = pd.DataFrame([], columns=year_describe.columns, index=[
'format'])
num_format_pd.ix['format', :] = '0.00%'
num_format_pd.ix['format', ['Count', 'IC_IR']] = '0.00'
we.write_pandas(year_describe, ws, begin_row_number=0, begin_col_number
=1, num_format_pd=num_format_pd, color='blue', fillna=True)
num_format_pd = pd.DataFrame([], columns=alpha_return.columns, index=[
'format'])
num_format_pd.ix['format', :] = '0.00%'
num_format_pd.ix['format', ['year']] = '0'
we.write_pandas(alpha_return, ws, begin_row_number=0, begin_col_number=
2 + len(year_describe.columns), num_format_pd=num_format_pd, color=
'blue', fillna=True)
we.close()
if __name__ == '__main__':
cal_period = 'W'
beg_date = '20040101'
end_date = datetime.today().strftime('%Y%m%d')
path = 'E:\\3_Data\\5_stock_data\\3_alpha_model\\'
file = 'MyAlpha.xlsx'
data = pd.read_excel(os.path.join(path, file), encoding='gbk')
data = data[data['计算因子收益率'] == '是']
data = data.reset_index(drop=True)
for i in range(0, len(data)):
factor_name = data.ix[i, '因子名']
print('#################### 开始计算因子收益率 %s 数据 ####################' %
factor_name)
cal_factor_alpha_return(factor_name, beg_date, end_date, cal_period)
print('#################### 结束计算因子收益率 %s 数据 ####################' %
factor_name)
<|reserved_special_token_1|>
import pandas as pd
import numpy as np
import os
import matplotlib.pyplot as plt
from datetime import datetime
import statsmodels.api as sm
from quant.stock.stock import Stock
from quant.stock.date import Date
from quant.utility_fun.factor_preprocess import FactorPreProcess
from quant.utility_fun.write_excel import WriteExcel
def factor_neutral(factor_series, neutral_frame):
"""
中性化
"""
concat_data = pd.concat([factor_series, neutral_frame], axis=1)
concat_data = concat_data.dropna()
factor_val = concat_data.ix[:, 0]
neutral_val = concat_data.ix[:, 1:]
model = sm.OLS(factor_val.values, neutral_val.values)
regress = model.fit()
params = regress.params
params = pd.DataFrame(params, index=neutral_val.columns, columns=['param'])
factor_res = factor_val - regress.predict(neutral_val)
return params, factor_res
def cal_factor_alpha_return(factor_name, beg_date, end_date, cal_period):
group_number = 8
year_trade_days = 242
min_stock_number = 100
out_path = 'E:\\3_Data\\5_stock_data\\3_alpha_model\\'
alpha_remove_extreme_value = True
alpha_standard = True
alpha_industry_neutral = True
alpha_barra_style_neutral = True
price = Stock().get_factor_h5('PriceCloseAdjust', None, 'alpha_dfc')
alpha_val = Stock().get_factor_h5(factor_name, None, 'alpha_dfc')
industry = Stock().get_factor_h5('industry_citic1', None, 'primary_mfc')
industry = industry.applymap(lambda x: x.decode('utf-8'))
[alpha_val, industry] = FactorPreProcess().make_same_index_columns([
alpha_val, industry])
if alpha_barra_style_neutral:
size = Stock().get_factor_h5('NORMAL_CNE5_SIZE', None, 'barra_risk_dfc'
)
beta = Stock().get_factor_h5('NORMAL_CNE5_BETA', None, 'barra_risk_dfc'
)
nolin_size = Stock().get_factor_h5('NORMAL_CNE5_NON_LINEAR_SIZE',
None, 'barra_risk_dfc')
momentum = Stock().get_factor_h5('NORMAL_CNE5_MOMENTUM', None,
'barra_risk_dfc')
[size, beta, nolin_size] = FactorPreProcess().make_same_index_columns([
size, beta, nolin_size])
beg_date = max(beg_date, price.columns[0], alpha_val.columns[0],
beta.columns[0])
end_date = min(end_date, price.columns[-1], alpha_val.columns[-1],
beta.columns[-1])
else:
beg_date = max(beg_date, price.columns[0], alpha_val.columns[0])
end_date = min(end_date, price.columns[-1], alpha_val.columns[-1])
date_series = Date().get_trade_date_series(beg_date, end_date, period=
cal_period)
date_series = list(set(date_series) & set(alpha_val.columns))
date_series.sort()
if alpha_remove_extreme_value:
alpha_val = FactorPreProcess().remove_extreme_value_mad(alpha_val)
if alpha_standard:
alpha_val = FactorPreProcess().standardization(alpha_val)
alpha_return = pd.DataFrame([], index=date_series)
alpha_exposure = pd.DataFrame([], index=date_series, columns=price.index)
for i_date in range(len(date_series) - 2):
cur_cal_date = date_series[i_date]
next_cal_date = date_series[i_date + 1]
buy_date = Date().get_trade_date_offset(cur_cal_date, 1)
sell_date = Date().get_trade_date_offset(next_cal_date, 1)
print(' Calculating Factor %s Alpha Return At %s' % (factor_name,
cur_cal_date))
alpha_return.index.name = 'CalDate'
alpha_return.ix[cur_cal_date, 'BuyDate'] = buy_date
alpha_return.ix[cur_cal_date, 'SellDate'] = sell_date
alpha_date = alpha_val[cur_cal_date]
buy_price = price[buy_date]
sell_price = price[sell_date]
pct_date = sell_price / buy_price - 1.0
if alpha_industry_neutral:
try:
industry_date = industry[cur_cal_date]
industry_dummy = pd.get_dummies(industry_date)
except:
continue
if len(pd.concat([alpha_date, industry_date], axis=1).dropna()
) < min_stock_number:
continue
else:
params, factor_res = factor_neutral(factor_series=
alpha_date, neutral_frame=industry_dummy)
alpha_date = factor_res
alpha_date = FactorPreProcess().remove_extreme_value_mad(
alpha_date)
alpha_date = FactorPreProcess().standardization(alpha_date)
if alpha_barra_style_neutral:
try:
size_date = size[cur_cal_date]
beta_date = beta[cur_cal_date]
nolin_size_date = nolin_size[cur_cal_date]
momentum_date = momentum[cur_cal_date]
except:
continue
if len(pd.concat([alpha_date, size_date], axis=1).dropna()
) < min_stock_number:
continue
else:
barra_risk_exposure = pd.concat([beta_date, size_date,
nolin_size_date, momentum_date], axis=1)
barra_risk_exposure.columns = ['beta', 'size', 'nolin_size',
'momentum']
params, factor_res = factor_neutral(factor_series=
alpha_date, neutral_frame=barra_risk_exposure)
alpha_date = factor_res
alpha_date = FactorPreProcess().remove_extreme_value_mad(
alpha_date)
alpha_date = FactorPreProcess().standardization(alpha_date)
alpha_exposure.ix[cur_cal_date, :] = alpha_date
res = pd.concat([alpha_date, pct_date], axis=1)
res.columns = ['alpha_val', 'period_pct']
res = res.dropna()
res = res.sort_values(by=['alpha_val'], ascending=False)
labels = [('group_' + str(i)) for i in list(range(1, group_number + 1))
]
res['group'] = pd.cut(res['alpha_val'], bins=group_number, labels=
labels)
period_return = (res['alpha_val'] * res['period_pct']).mean()
alpha_return.ix[cur_cal_date, 'FactorReturn'] = period_return
information_correlation = res['alpha_val'].corr(res['period_pct'])
alpha_return.ix[cur_cal_date, 'IC'] = information_correlation
group_pct = res.groupby(by=['group'])['period_pct'].mean()
for i_label in range(len(labels)):
alpha_return.ix[cur_cal_date, labels[i_label]] = group_pct.values[
i_label]
alpha_return = alpha_return.dropna(subset=['FactorReturn'])
alpha_return['CumFactorReturn'] = alpha_return['FactorReturn'].cumsum()
cum_labels = [('Cum_' + str(x)) for x in labels]
alpha_return[cum_labels] = alpha_return[labels].cumsum()
back_test_beg_date = Date().get_trade_date_offset(date_series[0], 1)
back_test_end_date = Date().get_trade_date_offset(date_series[len(
date_series) - 1], 1)
back_test_days = Date().get_trade_date_diff(back_test_beg_date,
back_test_end_date)
backtest_year = back_test_days / year_trade_days
alpha_return['year'] = alpha_return.index.map(lambda x: datetime.
strptime(x, '%Y%m%d').year)
year_factor_return = alpha_return.groupby(by=['year'])['FactorReturn'].sum(
)
year_count = alpha_return.groupby(by=['year'])['FactorReturn'].count()
year_ic_mean = alpha_return.groupby(by=['year'])['IC'].mean()
year_ic_std = alpha_return.groupby(by=['year'])['IC'].std()
year_gp_mean = alpha_return.groupby(by=['year'])[labels].mean()
year_describe = pd.concat([year_factor_return, year_count, year_ic_mean,
year_ic_std, year_gp_mean], axis=1)
col = ['YearFactorReturn', 'Count', 'IC_mean', 'IC_std']
col.extend(labels)
year_describe.columns = col
year_describe['YearFactorReturn'] = year_describe['YearFactorReturn'
] / year_describe['Count'] * year_count
year_describe['IC_IR'] = year_describe['IC_mean'] / year_describe['IC_std'
] * np.sqrt(50)
year_describe.ix['Sum', 'YearFactorReturn'] = alpha_return[
'CumFactorReturn'].values[-1] / backtest_year
year_describe.ix['Sum', 'IC_IR'] = alpha_return['IC'].mean(
) / alpha_return['IC'].std() * np.sqrt(50)
year_describe.ix['Sum', 'IC_mean'] = alpha_return['IC'].mean()
year_describe.ix['Sum', 'IC_std'] = alpha_return['IC'].std()
year_describe.ix['Sum', labels] = year_describe.ix[0:-1, labels].sum()
year_describe.index = year_describe.index.map(str)
for i in range(len(year_describe)):
year = year_describe.index[i]
corr_pd = pd.DataFrame(year_describe.ix[year, labels].values, index
=labels, columns=['group_return'])
corr_pd['group_number'] = list(range(1, group_number + 1))
year_describe.ix[year, 'Group_Corr'] = corr_pd.corr().ix[0, 1]
alpha_exposure = alpha_exposure.astype(np.float)
filename = os.path.join(out_path, 'alpha_exposure_neutral', factor_name +
'_FactorExposureNeutral.csv')
alpha_exposure.T.to_csv(filename)
exposure_corr = pd.DataFrame([], index=alpha_exposure.index, columns=[
'Exposure_Corr'])
for i_date in range(1, len(alpha_exposure.index)):
last_exposure_date = alpha_exposure.index[i_date - 1]
cur_exposure_date = alpha_exposure.index[i_date]
exposure_adjoin = alpha_exposure.ix[last_exposure_date:
cur_exposure_date, :]
exposure_adjoin = exposure_adjoin.T.dropna()
exposure_corr.ix[cur_exposure_date, 'Exposure_Corr'
] = exposure_adjoin.corr().ix[0, 1]
exposure_corr = exposure_corr.dropna()
exposure_corr.ix['Mean', 'Exposure_Corr'] = exposure_corr['Exposure_Corr'
].mean()
filename = os.path.join(out_path, 'alpha_exposure_stability',
factor_name + '_FactorExposureCorr.csv')
exposure_corr.to_csv(filename)
filename = os.path.join(out_path, 'alpha_return', factor_name +
'_FactorReturn.xlsx')
sheet_name = 'FactorReturn'
we = WriteExcel(filename)
ws = we.add_worksheet(sheet_name)
num_format_pd = pd.DataFrame([], columns=year_describe.columns, index=[
'format'])
num_format_pd.ix['format', :] = '0.00%'
num_format_pd.ix['format', ['Count', 'IC_IR']] = '0.00'
we.write_pandas(year_describe, ws, begin_row_number=0, begin_col_number
=1, num_format_pd=num_format_pd, color='blue', fillna=True)
num_format_pd = pd.DataFrame([], columns=alpha_return.columns, index=[
'format'])
num_format_pd.ix['format', :] = '0.00%'
num_format_pd.ix['format', ['year']] = '0'
we.write_pandas(alpha_return, ws, begin_row_number=0, begin_col_number=
2 + len(year_describe.columns), num_format_pd=num_format_pd, color=
'blue', fillna=True)
we.close()
if __name__ == '__main__':
cal_period = 'W'
beg_date = '20040101'
end_date = datetime.today().strftime('%Y%m%d')
path = 'E:\\3_Data\\5_stock_data\\3_alpha_model\\'
file = 'MyAlpha.xlsx'
data = pd.read_excel(os.path.join(path, file), encoding='gbk')
data = data[data['计算因子收益率'] == '是']
data = data.reset_index(drop=True)
for i in range(0, len(data)):
factor_name = data.ix[i, '因子名']
print('#################### 开始计算因子收益率 %s 数据 ####################' %
factor_name)
cal_factor_alpha_return(factor_name, beg_date, end_date, cal_period)
print('#################### 结束计算因子收益率 %s 数据 ####################' %
factor_name)
<|reserved_special_token_1|>
import pandas as pd
import numpy as np
import os
import matplotlib.pyplot as plt
from datetime import datetime
import statsmodels.api as sm
from quant.stock.stock import Stock
from quant.stock.date import Date
from quant.utility_fun.factor_preprocess import FactorPreProcess
from quant.utility_fun.write_excel import WriteExcel
def factor_neutral(factor_series, neutral_frame):
"""
中性化
"""
concat_data = pd.concat([factor_series, neutral_frame], axis=1)
concat_data = concat_data.dropna()
factor_val = concat_data.ix[:, 0]
neutral_val = concat_data.ix[:, 1:]
model = sm.OLS(factor_val.values, neutral_val.values)
regress = model.fit()
params = regress.params
params = pd.DataFrame(params, index=neutral_val.columns, columns=['param'])
factor_res = factor_val - regress.predict(neutral_val)
return params, factor_res
def cal_factor_alpha_return(factor_name, beg_date, end_date, cal_period):
# param
###############################################################################################################
###############################################################################################################
group_number = 8
year_trade_days = 242
min_stock_number = 100
out_path = 'E:\\3_Data\\5_stock_data\\3_alpha_model\\'
alpha_remove_extreme_value = True # alpha 因子 取极值
alpha_standard = True # alpha 因子 标准化
alpha_industry_neutral = True # alpha 因子 行业中性
alpha_barra_style_neutral = True # alpha 因子 风格中性
# read data
###############################################################################################################
###############################################################################################################
price = Stock().get_factor_h5("PriceCloseAdjust", None, "alpha_dfc")
alpha_val = Stock().get_factor_h5(factor_name, None, "alpha_dfc")
industry = Stock().get_factor_h5("industry_citic1", None, "primary_mfc")
industry = industry.applymap(lambda x: x.decode('utf-8'))
[alpha_val, industry] = FactorPreProcess().make_same_index_columns([alpha_val, industry])
if alpha_barra_style_neutral:
size = Stock().get_factor_h5("NORMAL_CNE5_SIZE", None, 'barra_risk_dfc')
beta = Stock().get_factor_h5("NORMAL_CNE5_BETA", None, 'barra_risk_dfc')
nolin_size = Stock().get_factor_h5("NORMAL_CNE5_NON_LINEAR_SIZE", None, 'barra_risk_dfc')
momentum = Stock().get_factor_h5("NORMAL_CNE5_MOMENTUM", None, 'barra_risk_dfc')
[size, beta, nolin_size] = FactorPreProcess().make_same_index_columns([size, beta, nolin_size])
beg_date = max(beg_date, price.columns[0], alpha_val.columns[0], beta.columns[0])
end_date = min(end_date, price.columns[-1], alpha_val.columns[-1], beta.columns[-1])
else:
beg_date = max(beg_date, price.columns[0], alpha_val.columns[0])
end_date = min(end_date, price.columns[-1], alpha_val.columns[-1])
date_series = Date().get_trade_date_series(beg_date, end_date, period=cal_period)
date_series = list(set(date_series) & set(alpha_val.columns))
date_series.sort()
# pre process data
###############################################################################################################
###############################################################################################################
if alpha_remove_extreme_value:
alpha_val = FactorPreProcess().remove_extreme_value_mad(alpha_val)
if alpha_standard:
alpha_val = FactorPreProcess().standardization(alpha_val)
# cal everyday
###############################################################################################################
###############################################################################################################
alpha_return = pd.DataFrame([], index=date_series)
alpha_exposure = pd.DataFrame([], index=date_series, columns=price.index)
for i_date in range(len(date_series) - 2):
cur_cal_date = date_series[i_date]
next_cal_date = date_series[i_date + 1]
buy_date = Date().get_trade_date_offset(cur_cal_date, 1)
sell_date = Date().get_trade_date_offset(next_cal_date, 1)
print(" Calculating Factor %s Alpha Return At %s" % (factor_name, cur_cal_date))
alpha_return.index.name = 'CalDate'
alpha_return.ix[cur_cal_date, "BuyDate"] = buy_date
alpha_return.ix[cur_cal_date, "SellDate"] = sell_date
alpha_date = alpha_val[cur_cal_date]
buy_price = price[buy_date]
sell_price = price[sell_date]
pct_date = sell_price / buy_price - 1.0
if alpha_industry_neutral:
try:
industry_date = industry[cur_cal_date]
industry_dummy = pd.get_dummies(industry_date)
except:
continue
if len(pd.concat([alpha_date, industry_date], axis=1).dropna()) < min_stock_number:
continue
else:
params, factor_res = factor_neutral(factor_series=alpha_date, neutral_frame=industry_dummy)
alpha_date = factor_res
alpha_date = FactorPreProcess().remove_extreme_value_mad(alpha_date)
alpha_date = FactorPreProcess().standardization(alpha_date)
if alpha_barra_style_neutral:
try:
size_date = size[cur_cal_date]
beta_date = beta[cur_cal_date]
nolin_size_date = nolin_size[cur_cal_date]
momentum_date = momentum[cur_cal_date]
except:
continue
if len(pd.concat([alpha_date, size_date], axis=1).dropna()) < min_stock_number:
continue
else:
barra_risk_exposure = pd.concat([beta_date, size_date,
nolin_size_date, momentum_date], axis=1)
barra_risk_exposure.columns = ['beta', 'size', 'nolin_size', 'momentum']
params, factor_res = factor_neutral(factor_series=alpha_date, neutral_frame=barra_risk_exposure)
alpha_date = factor_res
alpha_date = FactorPreProcess().remove_extreme_value_mad(alpha_date)
alpha_date = FactorPreProcess().standardization(alpha_date)
alpha_exposure.ix[cur_cal_date, :] = alpha_date
res = pd.concat([alpha_date, pct_date], axis=1)
res.columns = ['alpha_val', 'period_pct']
res = res.dropna()
res = res.sort_values(by=['alpha_val'], ascending=False)
labels = ["group_" + str(i) for i in list(range(1, group_number + 1))]
res['group'] = pd.cut(res['alpha_val'], bins=group_number, labels=labels)
period_return = (res['alpha_val'] * res['period_pct']).mean()
alpha_return.ix[cur_cal_date, "FactorReturn"] = period_return
information_correlation = res['alpha_val'].corr(res['period_pct'])
alpha_return.ix[cur_cal_date, "IC"] = information_correlation
group_pct = res.groupby(by=['group'])['period_pct'].mean()
for i_label in range(len(labels)):
alpha_return.ix[cur_cal_date, labels[i_label]] = group_pct.values[i_label]
alpha_return = alpha_return.dropna(subset=['FactorReturn'])
alpha_return["CumFactorReturn"] = alpha_return['FactorReturn'].cumsum()
cum_labels = ["Cum_" + str(x) for x in labels]
alpha_return[cum_labels] = alpha_return[labels].cumsum()
# plot
###############################################################################################################
###############################################################################################################
# plt_col = []
# plt_col.append("CumFactorReturn")
# plt_col.extend(cum_labels)
# alpha_return[plt_col].plot()
# plt.title(factor_name)
# plt.show()
# describe annual
###############################################################################################################
###############################################################################################################
back_test_beg_date = Date().get_trade_date_offset(date_series[0], 1)
back_test_end_date = Date().get_trade_date_offset(date_series[len(date_series) - 1], 1)
back_test_days = Date().get_trade_date_diff(back_test_beg_date, back_test_end_date)
backtest_year = back_test_days / year_trade_days
alpha_return['year'] = alpha_return.index.map(lambda x: datetime.strptime(x, "%Y%m%d").year)
year_factor_return = alpha_return.groupby(by=['year'])['FactorReturn'].sum()
year_count = alpha_return.groupby(by=['year'])['FactorReturn'].count()
year_ic_mean = alpha_return.groupby(by=['year'])['IC'].mean()
year_ic_std = alpha_return.groupby(by=['year'])['IC'].std()
year_gp_mean = alpha_return.groupby(by=['year'])[labels].mean()
year_describe = pd.concat([year_factor_return, year_count, year_ic_mean, year_ic_std, year_gp_mean], axis=1)
col = ['YearFactorReturn', 'Count', 'IC_mean', 'IC_std']
col.extend(labels)
year_describe.columns = col
year_describe['YearFactorReturn'] = year_describe['YearFactorReturn'] / year_describe['Count'] * year_count
year_describe['IC_IR'] = year_describe['IC_mean'] / year_describe['IC_std'] * np.sqrt(50)
year_describe.ix['Sum', 'YearFactorReturn'] = alpha_return["CumFactorReturn"].values[-1] / backtest_year
year_describe.ix['Sum', 'IC_IR'] = alpha_return["IC"].mean() / alpha_return["IC"].std() * np.sqrt(50)
year_describe.ix['Sum', 'IC_mean'] = alpha_return["IC"].mean()
year_describe.ix['Sum', 'IC_std'] = alpha_return["IC"].std()
year_describe.ix['Sum', labels] = year_describe.ix[0:-1, labels].sum()
year_describe.index = year_describe.index.map(str)
for i in range(len(year_describe)):
year = year_describe.index[i]
corr_pd = pd.DataFrame(year_describe.ix[year, labels].values, index=labels, columns=['group_return'])
corr_pd['group_number'] = (list(range(1, group_number+1)))
year_describe.ix[year, 'Group_Corr'] = corr_pd.corr().ix[0, 1]
# save data
###############################################################################################################
###############################################################################################################
# alpha_exposure_neutral
###############################################################################################################
alpha_exposure = alpha_exposure.astype(np.float)
filename = os.path.join(out_path, 'alpha_exposure_neutral', factor_name + "_FactorExposureNeutral.csv")
alpha_exposure.T.to_csv(filename)
# exposure_corr
###############################################################################################################
exposure_corr = pd.DataFrame([], index=alpha_exposure.index, columns=['Exposure_Corr'])
for i_date in range(1, len(alpha_exposure.index)):
last_exposure_date = alpha_exposure.index[i_date-1]
cur_exposure_date = alpha_exposure.index[i_date]
exposure_adjoin = alpha_exposure.ix[last_exposure_date:cur_exposure_date, :]
exposure_adjoin = exposure_adjoin.T.dropna()
exposure_corr.ix[cur_exposure_date, 'Exposure_Corr'] = exposure_adjoin.corr().ix[0, 1]
exposure_corr = exposure_corr.dropna()
exposure_corr.ix['Mean', 'Exposure_Corr'] = exposure_corr['Exposure_Corr'].mean()
filename = os.path.join(out_path, 'alpha_exposure_stability', factor_name + "_FactorExposureCorr.csv")
exposure_corr.to_csv(filename)
# Factor Return
###############################################################################################################
filename = os.path.join(out_path, 'alpha_return', factor_name + "_FactorReturn.xlsx")
sheet_name = "FactorReturn"
we = WriteExcel(filename)
ws = we.add_worksheet(sheet_name)
num_format_pd = pd.DataFrame([], columns=year_describe.columns, index=['format'])
num_format_pd.ix['format', :] = '0.00%'
num_format_pd.ix['format', ['Count', 'IC_IR']] = '0.00'
we.write_pandas(year_describe, ws, begin_row_number=0, begin_col_number=1,
num_format_pd=num_format_pd, color="blue", fillna=True)
num_format_pd = pd.DataFrame([], columns=alpha_return.columns, index=['format'])
num_format_pd.ix['format', :] = '0.00%'
num_format_pd.ix['format', ['year']] = '0'
we.write_pandas(alpha_return, ws, begin_row_number=0, begin_col_number=2+len(year_describe.columns),
num_format_pd=num_format_pd, color="blue", fillna=True)
we.close()
###############################################################################################################
if __name__ == '__main__':
cal_period = "W"
beg_date = "20040101"
end_date = datetime.today().strftime("%Y%m%d")
path = "E:\\3_Data\\5_stock_data\\3_alpha_model\\"
file = "MyAlpha.xlsx"
data = pd.read_excel(os.path.join(path, file), encoding='gbk')
data = data[data['计算因子收益率'] == "是"]
data = data.reset_index(drop=True)
for i in range(0, len(data)):
factor_name = data.ix[i, "因子名"]
print("#################### 开始计算因子收益率 %s 数据 ####################" % factor_name)
cal_factor_alpha_return(factor_name, beg_date, end_date, cal_period)
print("#################### 结束计算因子收益率 %s 数据 ####################" % factor_name)
|
flexible
|
{
"blob_id": "1d0730e8fd120e1c4bc5b89cbd766234e1fa3bca",
"index": 2197,
"step-1": "<mask token>\n\n\ndef cal_factor_alpha_return(factor_name, beg_date, end_date, cal_period):\n group_number = 8\n year_trade_days = 242\n min_stock_number = 100\n out_path = 'E:\\\\3_Data\\\\5_stock_data\\\\3_alpha_model\\\\'\n alpha_remove_extreme_value = True\n alpha_standard = True\n alpha_industry_neutral = True\n alpha_barra_style_neutral = True\n price = Stock().get_factor_h5('PriceCloseAdjust', None, 'alpha_dfc')\n alpha_val = Stock().get_factor_h5(factor_name, None, 'alpha_dfc')\n industry = Stock().get_factor_h5('industry_citic1', None, 'primary_mfc')\n industry = industry.applymap(lambda x: x.decode('utf-8'))\n [alpha_val, industry] = FactorPreProcess().make_same_index_columns([\n alpha_val, industry])\n if alpha_barra_style_neutral:\n size = Stock().get_factor_h5('NORMAL_CNE5_SIZE', None, 'barra_risk_dfc'\n )\n beta = Stock().get_factor_h5('NORMAL_CNE5_BETA', None, 'barra_risk_dfc'\n )\n nolin_size = Stock().get_factor_h5('NORMAL_CNE5_NON_LINEAR_SIZE',\n None, 'barra_risk_dfc')\n momentum = Stock().get_factor_h5('NORMAL_CNE5_MOMENTUM', None,\n 'barra_risk_dfc')\n [size, beta, nolin_size] = FactorPreProcess().make_same_index_columns([\n size, beta, nolin_size])\n beg_date = max(beg_date, price.columns[0], alpha_val.columns[0],\n beta.columns[0])\n end_date = min(end_date, price.columns[-1], alpha_val.columns[-1],\n beta.columns[-1])\n else:\n beg_date = max(beg_date, price.columns[0], alpha_val.columns[0])\n end_date = min(end_date, price.columns[-1], alpha_val.columns[-1])\n date_series = Date().get_trade_date_series(beg_date, end_date, period=\n cal_period)\n date_series = list(set(date_series) & set(alpha_val.columns))\n date_series.sort()\n if alpha_remove_extreme_value:\n alpha_val = FactorPreProcess().remove_extreme_value_mad(alpha_val)\n if alpha_standard:\n alpha_val = FactorPreProcess().standardization(alpha_val)\n alpha_return = pd.DataFrame([], index=date_series)\n alpha_exposure = pd.DataFrame([], index=date_series, columns=price.index)\n for i_date in range(len(date_series) - 2):\n cur_cal_date = date_series[i_date]\n next_cal_date = date_series[i_date + 1]\n buy_date = Date().get_trade_date_offset(cur_cal_date, 1)\n sell_date = Date().get_trade_date_offset(next_cal_date, 1)\n print(' Calculating Factor %s Alpha Return At %s' % (factor_name,\n cur_cal_date))\n alpha_return.index.name = 'CalDate'\n alpha_return.ix[cur_cal_date, 'BuyDate'] = buy_date\n alpha_return.ix[cur_cal_date, 'SellDate'] = sell_date\n alpha_date = alpha_val[cur_cal_date]\n buy_price = price[buy_date]\n sell_price = price[sell_date]\n pct_date = sell_price / buy_price - 1.0\n if alpha_industry_neutral:\n try:\n industry_date = industry[cur_cal_date]\n industry_dummy = pd.get_dummies(industry_date)\n except:\n continue\n if len(pd.concat([alpha_date, industry_date], axis=1).dropna()\n ) < min_stock_number:\n continue\n else:\n params, factor_res = factor_neutral(factor_series=\n alpha_date, neutral_frame=industry_dummy)\n alpha_date = factor_res\n alpha_date = FactorPreProcess().remove_extreme_value_mad(\n alpha_date)\n alpha_date = FactorPreProcess().standardization(alpha_date)\n if alpha_barra_style_neutral:\n try:\n size_date = size[cur_cal_date]\n beta_date = beta[cur_cal_date]\n nolin_size_date = nolin_size[cur_cal_date]\n momentum_date = momentum[cur_cal_date]\n except:\n continue\n if len(pd.concat([alpha_date, size_date], axis=1).dropna()\n ) < min_stock_number:\n continue\n else:\n barra_risk_exposure = pd.concat([beta_date, size_date,\n nolin_size_date, momentum_date], axis=1)\n barra_risk_exposure.columns = ['beta', 'size', 'nolin_size',\n 'momentum']\n params, factor_res = factor_neutral(factor_series=\n alpha_date, neutral_frame=barra_risk_exposure)\n alpha_date = factor_res\n alpha_date = FactorPreProcess().remove_extreme_value_mad(\n alpha_date)\n alpha_date = FactorPreProcess().standardization(alpha_date)\n alpha_exposure.ix[cur_cal_date, :] = alpha_date\n res = pd.concat([alpha_date, pct_date], axis=1)\n res.columns = ['alpha_val', 'period_pct']\n res = res.dropna()\n res = res.sort_values(by=['alpha_val'], ascending=False)\n labels = [('group_' + str(i)) for i in list(range(1, group_number + 1))\n ]\n res['group'] = pd.cut(res['alpha_val'], bins=group_number, labels=\n labels)\n period_return = (res['alpha_val'] * res['period_pct']).mean()\n alpha_return.ix[cur_cal_date, 'FactorReturn'] = period_return\n information_correlation = res['alpha_val'].corr(res['period_pct'])\n alpha_return.ix[cur_cal_date, 'IC'] = information_correlation\n group_pct = res.groupby(by=['group'])['period_pct'].mean()\n for i_label in range(len(labels)):\n alpha_return.ix[cur_cal_date, labels[i_label]] = group_pct.values[\n i_label]\n alpha_return = alpha_return.dropna(subset=['FactorReturn'])\n alpha_return['CumFactorReturn'] = alpha_return['FactorReturn'].cumsum()\n cum_labels = [('Cum_' + str(x)) for x in labels]\n alpha_return[cum_labels] = alpha_return[labels].cumsum()\n back_test_beg_date = Date().get_trade_date_offset(date_series[0], 1)\n back_test_end_date = Date().get_trade_date_offset(date_series[len(\n date_series) - 1], 1)\n back_test_days = Date().get_trade_date_diff(back_test_beg_date,\n back_test_end_date)\n backtest_year = back_test_days / year_trade_days\n alpha_return['year'] = alpha_return.index.map(lambda x: datetime.\n strptime(x, '%Y%m%d').year)\n year_factor_return = alpha_return.groupby(by=['year'])['FactorReturn'].sum(\n )\n year_count = alpha_return.groupby(by=['year'])['FactorReturn'].count()\n year_ic_mean = alpha_return.groupby(by=['year'])['IC'].mean()\n year_ic_std = alpha_return.groupby(by=['year'])['IC'].std()\n year_gp_mean = alpha_return.groupby(by=['year'])[labels].mean()\n year_describe = pd.concat([year_factor_return, year_count, year_ic_mean,\n year_ic_std, year_gp_mean], axis=1)\n col = ['YearFactorReturn', 'Count', 'IC_mean', 'IC_std']\n col.extend(labels)\n year_describe.columns = col\n year_describe['YearFactorReturn'] = year_describe['YearFactorReturn'\n ] / year_describe['Count'] * year_count\n year_describe['IC_IR'] = year_describe['IC_mean'] / year_describe['IC_std'\n ] * np.sqrt(50)\n year_describe.ix['Sum', 'YearFactorReturn'] = alpha_return[\n 'CumFactorReturn'].values[-1] / backtest_year\n year_describe.ix['Sum', 'IC_IR'] = alpha_return['IC'].mean(\n ) / alpha_return['IC'].std() * np.sqrt(50)\n year_describe.ix['Sum', 'IC_mean'] = alpha_return['IC'].mean()\n year_describe.ix['Sum', 'IC_std'] = alpha_return['IC'].std()\n year_describe.ix['Sum', labels] = year_describe.ix[0:-1, labels].sum()\n year_describe.index = year_describe.index.map(str)\n for i in range(len(year_describe)):\n year = year_describe.index[i]\n corr_pd = pd.DataFrame(year_describe.ix[year, labels].values, index\n =labels, columns=['group_return'])\n corr_pd['group_number'] = list(range(1, group_number + 1))\n year_describe.ix[year, 'Group_Corr'] = corr_pd.corr().ix[0, 1]\n alpha_exposure = alpha_exposure.astype(np.float)\n filename = os.path.join(out_path, 'alpha_exposure_neutral', factor_name +\n '_FactorExposureNeutral.csv')\n alpha_exposure.T.to_csv(filename)\n exposure_corr = pd.DataFrame([], index=alpha_exposure.index, columns=[\n 'Exposure_Corr'])\n for i_date in range(1, len(alpha_exposure.index)):\n last_exposure_date = alpha_exposure.index[i_date - 1]\n cur_exposure_date = alpha_exposure.index[i_date]\n exposure_adjoin = alpha_exposure.ix[last_exposure_date:\n cur_exposure_date, :]\n exposure_adjoin = exposure_adjoin.T.dropna()\n exposure_corr.ix[cur_exposure_date, 'Exposure_Corr'\n ] = exposure_adjoin.corr().ix[0, 1]\n exposure_corr = exposure_corr.dropna()\n exposure_corr.ix['Mean', 'Exposure_Corr'] = exposure_corr['Exposure_Corr'\n ].mean()\n filename = os.path.join(out_path, 'alpha_exposure_stability', \n factor_name + '_FactorExposureCorr.csv')\n exposure_corr.to_csv(filename)\n filename = os.path.join(out_path, 'alpha_return', factor_name +\n '_FactorReturn.xlsx')\n sheet_name = 'FactorReturn'\n we = WriteExcel(filename)\n ws = we.add_worksheet(sheet_name)\n num_format_pd = pd.DataFrame([], columns=year_describe.columns, index=[\n 'format'])\n num_format_pd.ix['format', :] = '0.00%'\n num_format_pd.ix['format', ['Count', 'IC_IR']] = '0.00'\n we.write_pandas(year_describe, ws, begin_row_number=0, begin_col_number\n =1, num_format_pd=num_format_pd, color='blue', fillna=True)\n num_format_pd = pd.DataFrame([], columns=alpha_return.columns, index=[\n 'format'])\n num_format_pd.ix['format', :] = '0.00%'\n num_format_pd.ix['format', ['year']] = '0'\n we.write_pandas(alpha_return, ws, begin_row_number=0, begin_col_number=\n 2 + len(year_describe.columns), num_format_pd=num_format_pd, color=\n 'blue', fillna=True)\n we.close()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef factor_neutral(factor_series, neutral_frame):\n \"\"\"\n 中性化\n \"\"\"\n concat_data = pd.concat([factor_series, neutral_frame], axis=1)\n concat_data = concat_data.dropna()\n factor_val = concat_data.ix[:, 0]\n neutral_val = concat_data.ix[:, 1:]\n model = sm.OLS(factor_val.values, neutral_val.values)\n regress = model.fit()\n params = regress.params\n params = pd.DataFrame(params, index=neutral_val.columns, columns=['param'])\n factor_res = factor_val - regress.predict(neutral_val)\n return params, factor_res\n\n\ndef cal_factor_alpha_return(factor_name, beg_date, end_date, cal_period):\n group_number = 8\n year_trade_days = 242\n min_stock_number = 100\n out_path = 'E:\\\\3_Data\\\\5_stock_data\\\\3_alpha_model\\\\'\n alpha_remove_extreme_value = True\n alpha_standard = True\n alpha_industry_neutral = True\n alpha_barra_style_neutral = True\n price = Stock().get_factor_h5('PriceCloseAdjust', None, 'alpha_dfc')\n alpha_val = Stock().get_factor_h5(factor_name, None, 'alpha_dfc')\n industry = Stock().get_factor_h5('industry_citic1', None, 'primary_mfc')\n industry = industry.applymap(lambda x: x.decode('utf-8'))\n [alpha_val, industry] = FactorPreProcess().make_same_index_columns([\n alpha_val, industry])\n if alpha_barra_style_neutral:\n size = Stock().get_factor_h5('NORMAL_CNE5_SIZE', None, 'barra_risk_dfc'\n )\n beta = Stock().get_factor_h5('NORMAL_CNE5_BETA', None, 'barra_risk_dfc'\n )\n nolin_size = Stock().get_factor_h5('NORMAL_CNE5_NON_LINEAR_SIZE',\n None, 'barra_risk_dfc')\n momentum = Stock().get_factor_h5('NORMAL_CNE5_MOMENTUM', None,\n 'barra_risk_dfc')\n [size, beta, nolin_size] = FactorPreProcess().make_same_index_columns([\n size, beta, nolin_size])\n beg_date = max(beg_date, price.columns[0], alpha_val.columns[0],\n beta.columns[0])\n end_date = min(end_date, price.columns[-1], alpha_val.columns[-1],\n beta.columns[-1])\n else:\n beg_date = max(beg_date, price.columns[0], alpha_val.columns[0])\n end_date = min(end_date, price.columns[-1], alpha_val.columns[-1])\n date_series = Date().get_trade_date_series(beg_date, end_date, period=\n cal_period)\n date_series = list(set(date_series) & set(alpha_val.columns))\n date_series.sort()\n if alpha_remove_extreme_value:\n alpha_val = FactorPreProcess().remove_extreme_value_mad(alpha_val)\n if alpha_standard:\n alpha_val = FactorPreProcess().standardization(alpha_val)\n alpha_return = pd.DataFrame([], index=date_series)\n alpha_exposure = pd.DataFrame([], index=date_series, columns=price.index)\n for i_date in range(len(date_series) - 2):\n cur_cal_date = date_series[i_date]\n next_cal_date = date_series[i_date + 1]\n buy_date = Date().get_trade_date_offset(cur_cal_date, 1)\n sell_date = Date().get_trade_date_offset(next_cal_date, 1)\n print(' Calculating Factor %s Alpha Return At %s' % (factor_name,\n cur_cal_date))\n alpha_return.index.name = 'CalDate'\n alpha_return.ix[cur_cal_date, 'BuyDate'] = buy_date\n alpha_return.ix[cur_cal_date, 'SellDate'] = sell_date\n alpha_date = alpha_val[cur_cal_date]\n buy_price = price[buy_date]\n sell_price = price[sell_date]\n pct_date = sell_price / buy_price - 1.0\n if alpha_industry_neutral:\n try:\n industry_date = industry[cur_cal_date]\n industry_dummy = pd.get_dummies(industry_date)\n except:\n continue\n if len(pd.concat([alpha_date, industry_date], axis=1).dropna()\n ) < min_stock_number:\n continue\n else:\n params, factor_res = factor_neutral(factor_series=\n alpha_date, neutral_frame=industry_dummy)\n alpha_date = factor_res\n alpha_date = FactorPreProcess().remove_extreme_value_mad(\n alpha_date)\n alpha_date = FactorPreProcess().standardization(alpha_date)\n if alpha_barra_style_neutral:\n try:\n size_date = size[cur_cal_date]\n beta_date = beta[cur_cal_date]\n nolin_size_date = nolin_size[cur_cal_date]\n momentum_date = momentum[cur_cal_date]\n except:\n continue\n if len(pd.concat([alpha_date, size_date], axis=1).dropna()\n ) < min_stock_number:\n continue\n else:\n barra_risk_exposure = pd.concat([beta_date, size_date,\n nolin_size_date, momentum_date], axis=1)\n barra_risk_exposure.columns = ['beta', 'size', 'nolin_size',\n 'momentum']\n params, factor_res = factor_neutral(factor_series=\n alpha_date, neutral_frame=barra_risk_exposure)\n alpha_date = factor_res\n alpha_date = FactorPreProcess().remove_extreme_value_mad(\n alpha_date)\n alpha_date = FactorPreProcess().standardization(alpha_date)\n alpha_exposure.ix[cur_cal_date, :] = alpha_date\n res = pd.concat([alpha_date, pct_date], axis=1)\n res.columns = ['alpha_val', 'period_pct']\n res = res.dropna()\n res = res.sort_values(by=['alpha_val'], ascending=False)\n labels = [('group_' + str(i)) for i in list(range(1, group_number + 1))\n ]\n res['group'] = pd.cut(res['alpha_val'], bins=group_number, labels=\n labels)\n period_return = (res['alpha_val'] * res['period_pct']).mean()\n alpha_return.ix[cur_cal_date, 'FactorReturn'] = period_return\n information_correlation = res['alpha_val'].corr(res['period_pct'])\n alpha_return.ix[cur_cal_date, 'IC'] = information_correlation\n group_pct = res.groupby(by=['group'])['period_pct'].mean()\n for i_label in range(len(labels)):\n alpha_return.ix[cur_cal_date, labels[i_label]] = group_pct.values[\n i_label]\n alpha_return = alpha_return.dropna(subset=['FactorReturn'])\n alpha_return['CumFactorReturn'] = alpha_return['FactorReturn'].cumsum()\n cum_labels = [('Cum_' + str(x)) for x in labels]\n alpha_return[cum_labels] = alpha_return[labels].cumsum()\n back_test_beg_date = Date().get_trade_date_offset(date_series[0], 1)\n back_test_end_date = Date().get_trade_date_offset(date_series[len(\n date_series) - 1], 1)\n back_test_days = Date().get_trade_date_diff(back_test_beg_date,\n back_test_end_date)\n backtest_year = back_test_days / year_trade_days\n alpha_return['year'] = alpha_return.index.map(lambda x: datetime.\n strptime(x, '%Y%m%d').year)\n year_factor_return = alpha_return.groupby(by=['year'])['FactorReturn'].sum(\n )\n year_count = alpha_return.groupby(by=['year'])['FactorReturn'].count()\n year_ic_mean = alpha_return.groupby(by=['year'])['IC'].mean()\n year_ic_std = alpha_return.groupby(by=['year'])['IC'].std()\n year_gp_mean = alpha_return.groupby(by=['year'])[labels].mean()\n year_describe = pd.concat([year_factor_return, year_count, year_ic_mean,\n year_ic_std, year_gp_mean], axis=1)\n col = ['YearFactorReturn', 'Count', 'IC_mean', 'IC_std']\n col.extend(labels)\n year_describe.columns = col\n year_describe['YearFactorReturn'] = year_describe['YearFactorReturn'\n ] / year_describe['Count'] * year_count\n year_describe['IC_IR'] = year_describe['IC_mean'] / year_describe['IC_std'\n ] * np.sqrt(50)\n year_describe.ix['Sum', 'YearFactorReturn'] = alpha_return[\n 'CumFactorReturn'].values[-1] / backtest_year\n year_describe.ix['Sum', 'IC_IR'] = alpha_return['IC'].mean(\n ) / alpha_return['IC'].std() * np.sqrt(50)\n year_describe.ix['Sum', 'IC_mean'] = alpha_return['IC'].mean()\n year_describe.ix['Sum', 'IC_std'] = alpha_return['IC'].std()\n year_describe.ix['Sum', labels] = year_describe.ix[0:-1, labels].sum()\n year_describe.index = year_describe.index.map(str)\n for i in range(len(year_describe)):\n year = year_describe.index[i]\n corr_pd = pd.DataFrame(year_describe.ix[year, labels].values, index\n =labels, columns=['group_return'])\n corr_pd['group_number'] = list(range(1, group_number + 1))\n year_describe.ix[year, 'Group_Corr'] = corr_pd.corr().ix[0, 1]\n alpha_exposure = alpha_exposure.astype(np.float)\n filename = os.path.join(out_path, 'alpha_exposure_neutral', factor_name +\n '_FactorExposureNeutral.csv')\n alpha_exposure.T.to_csv(filename)\n exposure_corr = pd.DataFrame([], index=alpha_exposure.index, columns=[\n 'Exposure_Corr'])\n for i_date in range(1, len(alpha_exposure.index)):\n last_exposure_date = alpha_exposure.index[i_date - 1]\n cur_exposure_date = alpha_exposure.index[i_date]\n exposure_adjoin = alpha_exposure.ix[last_exposure_date:\n cur_exposure_date, :]\n exposure_adjoin = exposure_adjoin.T.dropna()\n exposure_corr.ix[cur_exposure_date, 'Exposure_Corr'\n ] = exposure_adjoin.corr().ix[0, 1]\n exposure_corr = exposure_corr.dropna()\n exposure_corr.ix['Mean', 'Exposure_Corr'] = exposure_corr['Exposure_Corr'\n ].mean()\n filename = os.path.join(out_path, 'alpha_exposure_stability', \n factor_name + '_FactorExposureCorr.csv')\n exposure_corr.to_csv(filename)\n filename = os.path.join(out_path, 'alpha_return', factor_name +\n '_FactorReturn.xlsx')\n sheet_name = 'FactorReturn'\n we = WriteExcel(filename)\n ws = we.add_worksheet(sheet_name)\n num_format_pd = pd.DataFrame([], columns=year_describe.columns, index=[\n 'format'])\n num_format_pd.ix['format', :] = '0.00%'\n num_format_pd.ix['format', ['Count', 'IC_IR']] = '0.00'\n we.write_pandas(year_describe, ws, begin_row_number=0, begin_col_number\n =1, num_format_pd=num_format_pd, color='blue', fillna=True)\n num_format_pd = pd.DataFrame([], columns=alpha_return.columns, index=[\n 'format'])\n num_format_pd.ix['format', :] = '0.00%'\n num_format_pd.ix['format', ['year']] = '0'\n we.write_pandas(alpha_return, ws, begin_row_number=0, begin_col_number=\n 2 + len(year_describe.columns), num_format_pd=num_format_pd, color=\n 'blue', fillna=True)\n we.close()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef factor_neutral(factor_series, neutral_frame):\n \"\"\"\n 中性化\n \"\"\"\n concat_data = pd.concat([factor_series, neutral_frame], axis=1)\n concat_data = concat_data.dropna()\n factor_val = concat_data.ix[:, 0]\n neutral_val = concat_data.ix[:, 1:]\n model = sm.OLS(factor_val.values, neutral_val.values)\n regress = model.fit()\n params = regress.params\n params = pd.DataFrame(params, index=neutral_val.columns, columns=['param'])\n factor_res = factor_val - regress.predict(neutral_val)\n return params, factor_res\n\n\ndef cal_factor_alpha_return(factor_name, beg_date, end_date, cal_period):\n group_number = 8\n year_trade_days = 242\n min_stock_number = 100\n out_path = 'E:\\\\3_Data\\\\5_stock_data\\\\3_alpha_model\\\\'\n alpha_remove_extreme_value = True\n alpha_standard = True\n alpha_industry_neutral = True\n alpha_barra_style_neutral = True\n price = Stock().get_factor_h5('PriceCloseAdjust', None, 'alpha_dfc')\n alpha_val = Stock().get_factor_h5(factor_name, None, 'alpha_dfc')\n industry = Stock().get_factor_h5('industry_citic1', None, 'primary_mfc')\n industry = industry.applymap(lambda x: x.decode('utf-8'))\n [alpha_val, industry] = FactorPreProcess().make_same_index_columns([\n alpha_val, industry])\n if alpha_barra_style_neutral:\n size = Stock().get_factor_h5('NORMAL_CNE5_SIZE', None, 'barra_risk_dfc'\n )\n beta = Stock().get_factor_h5('NORMAL_CNE5_BETA', None, 'barra_risk_dfc'\n )\n nolin_size = Stock().get_factor_h5('NORMAL_CNE5_NON_LINEAR_SIZE',\n None, 'barra_risk_dfc')\n momentum = Stock().get_factor_h5('NORMAL_CNE5_MOMENTUM', None,\n 'barra_risk_dfc')\n [size, beta, nolin_size] = FactorPreProcess().make_same_index_columns([\n size, beta, nolin_size])\n beg_date = max(beg_date, price.columns[0], alpha_val.columns[0],\n beta.columns[0])\n end_date = min(end_date, price.columns[-1], alpha_val.columns[-1],\n beta.columns[-1])\n else:\n beg_date = max(beg_date, price.columns[0], alpha_val.columns[0])\n end_date = min(end_date, price.columns[-1], alpha_val.columns[-1])\n date_series = Date().get_trade_date_series(beg_date, end_date, period=\n cal_period)\n date_series = list(set(date_series) & set(alpha_val.columns))\n date_series.sort()\n if alpha_remove_extreme_value:\n alpha_val = FactorPreProcess().remove_extreme_value_mad(alpha_val)\n if alpha_standard:\n alpha_val = FactorPreProcess().standardization(alpha_val)\n alpha_return = pd.DataFrame([], index=date_series)\n alpha_exposure = pd.DataFrame([], index=date_series, columns=price.index)\n for i_date in range(len(date_series) - 2):\n cur_cal_date = date_series[i_date]\n next_cal_date = date_series[i_date + 1]\n buy_date = Date().get_trade_date_offset(cur_cal_date, 1)\n sell_date = Date().get_trade_date_offset(next_cal_date, 1)\n print(' Calculating Factor %s Alpha Return At %s' % (factor_name,\n cur_cal_date))\n alpha_return.index.name = 'CalDate'\n alpha_return.ix[cur_cal_date, 'BuyDate'] = buy_date\n alpha_return.ix[cur_cal_date, 'SellDate'] = sell_date\n alpha_date = alpha_val[cur_cal_date]\n buy_price = price[buy_date]\n sell_price = price[sell_date]\n pct_date = sell_price / buy_price - 1.0\n if alpha_industry_neutral:\n try:\n industry_date = industry[cur_cal_date]\n industry_dummy = pd.get_dummies(industry_date)\n except:\n continue\n if len(pd.concat([alpha_date, industry_date], axis=1).dropna()\n ) < min_stock_number:\n continue\n else:\n params, factor_res = factor_neutral(factor_series=\n alpha_date, neutral_frame=industry_dummy)\n alpha_date = factor_res\n alpha_date = FactorPreProcess().remove_extreme_value_mad(\n alpha_date)\n alpha_date = FactorPreProcess().standardization(alpha_date)\n if alpha_barra_style_neutral:\n try:\n size_date = size[cur_cal_date]\n beta_date = beta[cur_cal_date]\n nolin_size_date = nolin_size[cur_cal_date]\n momentum_date = momentum[cur_cal_date]\n except:\n continue\n if len(pd.concat([alpha_date, size_date], axis=1).dropna()\n ) < min_stock_number:\n continue\n else:\n barra_risk_exposure = pd.concat([beta_date, size_date,\n nolin_size_date, momentum_date], axis=1)\n barra_risk_exposure.columns = ['beta', 'size', 'nolin_size',\n 'momentum']\n params, factor_res = factor_neutral(factor_series=\n alpha_date, neutral_frame=barra_risk_exposure)\n alpha_date = factor_res\n alpha_date = FactorPreProcess().remove_extreme_value_mad(\n alpha_date)\n alpha_date = FactorPreProcess().standardization(alpha_date)\n alpha_exposure.ix[cur_cal_date, :] = alpha_date\n res = pd.concat([alpha_date, pct_date], axis=1)\n res.columns = ['alpha_val', 'period_pct']\n res = res.dropna()\n res = res.sort_values(by=['alpha_val'], ascending=False)\n labels = [('group_' + str(i)) for i in list(range(1, group_number + 1))\n ]\n res['group'] = pd.cut(res['alpha_val'], bins=group_number, labels=\n labels)\n period_return = (res['alpha_val'] * res['period_pct']).mean()\n alpha_return.ix[cur_cal_date, 'FactorReturn'] = period_return\n information_correlation = res['alpha_val'].corr(res['period_pct'])\n alpha_return.ix[cur_cal_date, 'IC'] = information_correlation\n group_pct = res.groupby(by=['group'])['period_pct'].mean()\n for i_label in range(len(labels)):\n alpha_return.ix[cur_cal_date, labels[i_label]] = group_pct.values[\n i_label]\n alpha_return = alpha_return.dropna(subset=['FactorReturn'])\n alpha_return['CumFactorReturn'] = alpha_return['FactorReturn'].cumsum()\n cum_labels = [('Cum_' + str(x)) for x in labels]\n alpha_return[cum_labels] = alpha_return[labels].cumsum()\n back_test_beg_date = Date().get_trade_date_offset(date_series[0], 1)\n back_test_end_date = Date().get_trade_date_offset(date_series[len(\n date_series) - 1], 1)\n back_test_days = Date().get_trade_date_diff(back_test_beg_date,\n back_test_end_date)\n backtest_year = back_test_days / year_trade_days\n alpha_return['year'] = alpha_return.index.map(lambda x: datetime.\n strptime(x, '%Y%m%d').year)\n year_factor_return = alpha_return.groupby(by=['year'])['FactorReturn'].sum(\n )\n year_count = alpha_return.groupby(by=['year'])['FactorReturn'].count()\n year_ic_mean = alpha_return.groupby(by=['year'])['IC'].mean()\n year_ic_std = alpha_return.groupby(by=['year'])['IC'].std()\n year_gp_mean = alpha_return.groupby(by=['year'])[labels].mean()\n year_describe = pd.concat([year_factor_return, year_count, year_ic_mean,\n year_ic_std, year_gp_mean], axis=1)\n col = ['YearFactorReturn', 'Count', 'IC_mean', 'IC_std']\n col.extend(labels)\n year_describe.columns = col\n year_describe['YearFactorReturn'] = year_describe['YearFactorReturn'\n ] / year_describe['Count'] * year_count\n year_describe['IC_IR'] = year_describe['IC_mean'] / year_describe['IC_std'\n ] * np.sqrt(50)\n year_describe.ix['Sum', 'YearFactorReturn'] = alpha_return[\n 'CumFactorReturn'].values[-1] / backtest_year\n year_describe.ix['Sum', 'IC_IR'] = alpha_return['IC'].mean(\n ) / alpha_return['IC'].std() * np.sqrt(50)\n year_describe.ix['Sum', 'IC_mean'] = alpha_return['IC'].mean()\n year_describe.ix['Sum', 'IC_std'] = alpha_return['IC'].std()\n year_describe.ix['Sum', labels] = year_describe.ix[0:-1, labels].sum()\n year_describe.index = year_describe.index.map(str)\n for i in range(len(year_describe)):\n year = year_describe.index[i]\n corr_pd = pd.DataFrame(year_describe.ix[year, labels].values, index\n =labels, columns=['group_return'])\n corr_pd['group_number'] = list(range(1, group_number + 1))\n year_describe.ix[year, 'Group_Corr'] = corr_pd.corr().ix[0, 1]\n alpha_exposure = alpha_exposure.astype(np.float)\n filename = os.path.join(out_path, 'alpha_exposure_neutral', factor_name +\n '_FactorExposureNeutral.csv')\n alpha_exposure.T.to_csv(filename)\n exposure_corr = pd.DataFrame([], index=alpha_exposure.index, columns=[\n 'Exposure_Corr'])\n for i_date in range(1, len(alpha_exposure.index)):\n last_exposure_date = alpha_exposure.index[i_date - 1]\n cur_exposure_date = alpha_exposure.index[i_date]\n exposure_adjoin = alpha_exposure.ix[last_exposure_date:\n cur_exposure_date, :]\n exposure_adjoin = exposure_adjoin.T.dropna()\n exposure_corr.ix[cur_exposure_date, 'Exposure_Corr'\n ] = exposure_adjoin.corr().ix[0, 1]\n exposure_corr = exposure_corr.dropna()\n exposure_corr.ix['Mean', 'Exposure_Corr'] = exposure_corr['Exposure_Corr'\n ].mean()\n filename = os.path.join(out_path, 'alpha_exposure_stability', \n factor_name + '_FactorExposureCorr.csv')\n exposure_corr.to_csv(filename)\n filename = os.path.join(out_path, 'alpha_return', factor_name +\n '_FactorReturn.xlsx')\n sheet_name = 'FactorReturn'\n we = WriteExcel(filename)\n ws = we.add_worksheet(sheet_name)\n num_format_pd = pd.DataFrame([], columns=year_describe.columns, index=[\n 'format'])\n num_format_pd.ix['format', :] = '0.00%'\n num_format_pd.ix['format', ['Count', 'IC_IR']] = '0.00'\n we.write_pandas(year_describe, ws, begin_row_number=0, begin_col_number\n =1, num_format_pd=num_format_pd, color='blue', fillna=True)\n num_format_pd = pd.DataFrame([], columns=alpha_return.columns, index=[\n 'format'])\n num_format_pd.ix['format', :] = '0.00%'\n num_format_pd.ix['format', ['year']] = '0'\n we.write_pandas(alpha_return, ws, begin_row_number=0, begin_col_number=\n 2 + len(year_describe.columns), num_format_pd=num_format_pd, color=\n 'blue', fillna=True)\n we.close()\n\n\nif __name__ == '__main__':\n cal_period = 'W'\n beg_date = '20040101'\n end_date = datetime.today().strftime('%Y%m%d')\n path = 'E:\\\\3_Data\\\\5_stock_data\\\\3_alpha_model\\\\'\n file = 'MyAlpha.xlsx'\n data = pd.read_excel(os.path.join(path, file), encoding='gbk')\n data = data[data['计算因子收益率'] == '是']\n data = data.reset_index(drop=True)\n for i in range(0, len(data)):\n factor_name = data.ix[i, '因子名']\n print('#################### 开始计算因子收益率 %s 数据 ####################' %\n factor_name)\n cal_factor_alpha_return(factor_name, beg_date, end_date, cal_period)\n print('#################### 结束计算因子收益率 %s 数据 ####################' %\n factor_name)\n",
"step-4": "import pandas as pd\nimport numpy as np\nimport os\nimport matplotlib.pyplot as plt\nfrom datetime import datetime\nimport statsmodels.api as sm\nfrom quant.stock.stock import Stock\nfrom quant.stock.date import Date\nfrom quant.utility_fun.factor_preprocess import FactorPreProcess\nfrom quant.utility_fun.write_excel import WriteExcel\n\n\ndef factor_neutral(factor_series, neutral_frame):\n \"\"\"\n 中性化\n \"\"\"\n concat_data = pd.concat([factor_series, neutral_frame], axis=1)\n concat_data = concat_data.dropna()\n factor_val = concat_data.ix[:, 0]\n neutral_val = concat_data.ix[:, 1:]\n model = sm.OLS(factor_val.values, neutral_val.values)\n regress = model.fit()\n params = regress.params\n params = pd.DataFrame(params, index=neutral_val.columns, columns=['param'])\n factor_res = factor_val - regress.predict(neutral_val)\n return params, factor_res\n\n\ndef cal_factor_alpha_return(factor_name, beg_date, end_date, cal_period):\n group_number = 8\n year_trade_days = 242\n min_stock_number = 100\n out_path = 'E:\\\\3_Data\\\\5_stock_data\\\\3_alpha_model\\\\'\n alpha_remove_extreme_value = True\n alpha_standard = True\n alpha_industry_neutral = True\n alpha_barra_style_neutral = True\n price = Stock().get_factor_h5('PriceCloseAdjust', None, 'alpha_dfc')\n alpha_val = Stock().get_factor_h5(factor_name, None, 'alpha_dfc')\n industry = Stock().get_factor_h5('industry_citic1', None, 'primary_mfc')\n industry = industry.applymap(lambda x: x.decode('utf-8'))\n [alpha_val, industry] = FactorPreProcess().make_same_index_columns([\n alpha_val, industry])\n if alpha_barra_style_neutral:\n size = Stock().get_factor_h5('NORMAL_CNE5_SIZE', None, 'barra_risk_dfc'\n )\n beta = Stock().get_factor_h5('NORMAL_CNE5_BETA', None, 'barra_risk_dfc'\n )\n nolin_size = Stock().get_factor_h5('NORMAL_CNE5_NON_LINEAR_SIZE',\n None, 'barra_risk_dfc')\n momentum = Stock().get_factor_h5('NORMAL_CNE5_MOMENTUM', None,\n 'barra_risk_dfc')\n [size, beta, nolin_size] = FactorPreProcess().make_same_index_columns([\n size, beta, nolin_size])\n beg_date = max(beg_date, price.columns[0], alpha_val.columns[0],\n beta.columns[0])\n end_date = min(end_date, price.columns[-1], alpha_val.columns[-1],\n beta.columns[-1])\n else:\n beg_date = max(beg_date, price.columns[0], alpha_val.columns[0])\n end_date = min(end_date, price.columns[-1], alpha_val.columns[-1])\n date_series = Date().get_trade_date_series(beg_date, end_date, period=\n cal_period)\n date_series = list(set(date_series) & set(alpha_val.columns))\n date_series.sort()\n if alpha_remove_extreme_value:\n alpha_val = FactorPreProcess().remove_extreme_value_mad(alpha_val)\n if alpha_standard:\n alpha_val = FactorPreProcess().standardization(alpha_val)\n alpha_return = pd.DataFrame([], index=date_series)\n alpha_exposure = pd.DataFrame([], index=date_series, columns=price.index)\n for i_date in range(len(date_series) - 2):\n cur_cal_date = date_series[i_date]\n next_cal_date = date_series[i_date + 1]\n buy_date = Date().get_trade_date_offset(cur_cal_date, 1)\n sell_date = Date().get_trade_date_offset(next_cal_date, 1)\n print(' Calculating Factor %s Alpha Return At %s' % (factor_name,\n cur_cal_date))\n alpha_return.index.name = 'CalDate'\n alpha_return.ix[cur_cal_date, 'BuyDate'] = buy_date\n alpha_return.ix[cur_cal_date, 'SellDate'] = sell_date\n alpha_date = alpha_val[cur_cal_date]\n buy_price = price[buy_date]\n sell_price = price[sell_date]\n pct_date = sell_price / buy_price - 1.0\n if alpha_industry_neutral:\n try:\n industry_date = industry[cur_cal_date]\n industry_dummy = pd.get_dummies(industry_date)\n except:\n continue\n if len(pd.concat([alpha_date, industry_date], axis=1).dropna()\n ) < min_stock_number:\n continue\n else:\n params, factor_res = factor_neutral(factor_series=\n alpha_date, neutral_frame=industry_dummy)\n alpha_date = factor_res\n alpha_date = FactorPreProcess().remove_extreme_value_mad(\n alpha_date)\n alpha_date = FactorPreProcess().standardization(alpha_date)\n if alpha_barra_style_neutral:\n try:\n size_date = size[cur_cal_date]\n beta_date = beta[cur_cal_date]\n nolin_size_date = nolin_size[cur_cal_date]\n momentum_date = momentum[cur_cal_date]\n except:\n continue\n if len(pd.concat([alpha_date, size_date], axis=1).dropna()\n ) < min_stock_number:\n continue\n else:\n barra_risk_exposure = pd.concat([beta_date, size_date,\n nolin_size_date, momentum_date], axis=1)\n barra_risk_exposure.columns = ['beta', 'size', 'nolin_size',\n 'momentum']\n params, factor_res = factor_neutral(factor_series=\n alpha_date, neutral_frame=barra_risk_exposure)\n alpha_date = factor_res\n alpha_date = FactorPreProcess().remove_extreme_value_mad(\n alpha_date)\n alpha_date = FactorPreProcess().standardization(alpha_date)\n alpha_exposure.ix[cur_cal_date, :] = alpha_date\n res = pd.concat([alpha_date, pct_date], axis=1)\n res.columns = ['alpha_val', 'period_pct']\n res = res.dropna()\n res = res.sort_values(by=['alpha_val'], ascending=False)\n labels = [('group_' + str(i)) for i in list(range(1, group_number + 1))\n ]\n res['group'] = pd.cut(res['alpha_val'], bins=group_number, labels=\n labels)\n period_return = (res['alpha_val'] * res['period_pct']).mean()\n alpha_return.ix[cur_cal_date, 'FactorReturn'] = period_return\n information_correlation = res['alpha_val'].corr(res['period_pct'])\n alpha_return.ix[cur_cal_date, 'IC'] = information_correlation\n group_pct = res.groupby(by=['group'])['period_pct'].mean()\n for i_label in range(len(labels)):\n alpha_return.ix[cur_cal_date, labels[i_label]] = group_pct.values[\n i_label]\n alpha_return = alpha_return.dropna(subset=['FactorReturn'])\n alpha_return['CumFactorReturn'] = alpha_return['FactorReturn'].cumsum()\n cum_labels = [('Cum_' + str(x)) for x in labels]\n alpha_return[cum_labels] = alpha_return[labels].cumsum()\n back_test_beg_date = Date().get_trade_date_offset(date_series[0], 1)\n back_test_end_date = Date().get_trade_date_offset(date_series[len(\n date_series) - 1], 1)\n back_test_days = Date().get_trade_date_diff(back_test_beg_date,\n back_test_end_date)\n backtest_year = back_test_days / year_trade_days\n alpha_return['year'] = alpha_return.index.map(lambda x: datetime.\n strptime(x, '%Y%m%d').year)\n year_factor_return = alpha_return.groupby(by=['year'])['FactorReturn'].sum(\n )\n year_count = alpha_return.groupby(by=['year'])['FactorReturn'].count()\n year_ic_mean = alpha_return.groupby(by=['year'])['IC'].mean()\n year_ic_std = alpha_return.groupby(by=['year'])['IC'].std()\n year_gp_mean = alpha_return.groupby(by=['year'])[labels].mean()\n year_describe = pd.concat([year_factor_return, year_count, year_ic_mean,\n year_ic_std, year_gp_mean], axis=1)\n col = ['YearFactorReturn', 'Count', 'IC_mean', 'IC_std']\n col.extend(labels)\n year_describe.columns = col\n year_describe['YearFactorReturn'] = year_describe['YearFactorReturn'\n ] / year_describe['Count'] * year_count\n year_describe['IC_IR'] = year_describe['IC_mean'] / year_describe['IC_std'\n ] * np.sqrt(50)\n year_describe.ix['Sum', 'YearFactorReturn'] = alpha_return[\n 'CumFactorReturn'].values[-1] / backtest_year\n year_describe.ix['Sum', 'IC_IR'] = alpha_return['IC'].mean(\n ) / alpha_return['IC'].std() * np.sqrt(50)\n year_describe.ix['Sum', 'IC_mean'] = alpha_return['IC'].mean()\n year_describe.ix['Sum', 'IC_std'] = alpha_return['IC'].std()\n year_describe.ix['Sum', labels] = year_describe.ix[0:-1, labels].sum()\n year_describe.index = year_describe.index.map(str)\n for i in range(len(year_describe)):\n year = year_describe.index[i]\n corr_pd = pd.DataFrame(year_describe.ix[year, labels].values, index\n =labels, columns=['group_return'])\n corr_pd['group_number'] = list(range(1, group_number + 1))\n year_describe.ix[year, 'Group_Corr'] = corr_pd.corr().ix[0, 1]\n alpha_exposure = alpha_exposure.astype(np.float)\n filename = os.path.join(out_path, 'alpha_exposure_neutral', factor_name +\n '_FactorExposureNeutral.csv')\n alpha_exposure.T.to_csv(filename)\n exposure_corr = pd.DataFrame([], index=alpha_exposure.index, columns=[\n 'Exposure_Corr'])\n for i_date in range(1, len(alpha_exposure.index)):\n last_exposure_date = alpha_exposure.index[i_date - 1]\n cur_exposure_date = alpha_exposure.index[i_date]\n exposure_adjoin = alpha_exposure.ix[last_exposure_date:\n cur_exposure_date, :]\n exposure_adjoin = exposure_adjoin.T.dropna()\n exposure_corr.ix[cur_exposure_date, 'Exposure_Corr'\n ] = exposure_adjoin.corr().ix[0, 1]\n exposure_corr = exposure_corr.dropna()\n exposure_corr.ix['Mean', 'Exposure_Corr'] = exposure_corr['Exposure_Corr'\n ].mean()\n filename = os.path.join(out_path, 'alpha_exposure_stability', \n factor_name + '_FactorExposureCorr.csv')\n exposure_corr.to_csv(filename)\n filename = os.path.join(out_path, 'alpha_return', factor_name +\n '_FactorReturn.xlsx')\n sheet_name = 'FactorReturn'\n we = WriteExcel(filename)\n ws = we.add_worksheet(sheet_name)\n num_format_pd = pd.DataFrame([], columns=year_describe.columns, index=[\n 'format'])\n num_format_pd.ix['format', :] = '0.00%'\n num_format_pd.ix['format', ['Count', 'IC_IR']] = '0.00'\n we.write_pandas(year_describe, ws, begin_row_number=0, begin_col_number\n =1, num_format_pd=num_format_pd, color='blue', fillna=True)\n num_format_pd = pd.DataFrame([], columns=alpha_return.columns, index=[\n 'format'])\n num_format_pd.ix['format', :] = '0.00%'\n num_format_pd.ix['format', ['year']] = '0'\n we.write_pandas(alpha_return, ws, begin_row_number=0, begin_col_number=\n 2 + len(year_describe.columns), num_format_pd=num_format_pd, color=\n 'blue', fillna=True)\n we.close()\n\n\nif __name__ == '__main__':\n cal_period = 'W'\n beg_date = '20040101'\n end_date = datetime.today().strftime('%Y%m%d')\n path = 'E:\\\\3_Data\\\\5_stock_data\\\\3_alpha_model\\\\'\n file = 'MyAlpha.xlsx'\n data = pd.read_excel(os.path.join(path, file), encoding='gbk')\n data = data[data['计算因子收益率'] == '是']\n data = data.reset_index(drop=True)\n for i in range(0, len(data)):\n factor_name = data.ix[i, '因子名']\n print('#################### 开始计算因子收益率 %s 数据 ####################' %\n factor_name)\n cal_factor_alpha_return(factor_name, beg_date, end_date, cal_period)\n print('#################### 结束计算因子收益率 %s 数据 ####################' %\n factor_name)\n",
"step-5": "import pandas as pd\nimport numpy as np\nimport os\nimport matplotlib.pyplot as plt\nfrom datetime import datetime\nimport statsmodels.api as sm\nfrom quant.stock.stock import Stock\nfrom quant.stock.date import Date\nfrom quant.utility_fun.factor_preprocess import FactorPreProcess\nfrom quant.utility_fun.write_excel import WriteExcel\n\n\ndef factor_neutral(factor_series, neutral_frame):\n\n \"\"\"\n 中性化\n \"\"\"\n\n concat_data = pd.concat([factor_series, neutral_frame], axis=1)\n concat_data = concat_data.dropna()\n\n factor_val = concat_data.ix[:, 0]\n neutral_val = concat_data.ix[:, 1:]\n\n model = sm.OLS(factor_val.values, neutral_val.values)\n regress = model.fit()\n\n params = regress.params\n params = pd.DataFrame(params, index=neutral_val.columns, columns=['param'])\n factor_res = factor_val - regress.predict(neutral_val)\n\n return params, factor_res\n\n\ndef cal_factor_alpha_return(factor_name, beg_date, end_date, cal_period):\n\n # param\n ###############################################################################################################\n ###############################################################################################################\n group_number = 8\n year_trade_days = 242\n min_stock_number = 100\n out_path = 'E:\\\\3_Data\\\\5_stock_data\\\\3_alpha_model\\\\'\n\n alpha_remove_extreme_value = True # alpha 因子 取极值\n alpha_standard = True # alpha 因子 标准化\n alpha_industry_neutral = True # alpha 因子 行业中性\n alpha_barra_style_neutral = True # alpha 因子 风格中性\n\n # read data\n ###############################################################################################################\n ###############################################################################################################\n price = Stock().get_factor_h5(\"PriceCloseAdjust\", None, \"alpha_dfc\")\n alpha_val = Stock().get_factor_h5(factor_name, None, \"alpha_dfc\")\n industry = Stock().get_factor_h5(\"industry_citic1\", None, \"primary_mfc\")\n industry = industry.applymap(lambda x: x.decode('utf-8'))\n \n [alpha_val, industry] = FactorPreProcess().make_same_index_columns([alpha_val, industry])\n \n if alpha_barra_style_neutral:\n \n size = Stock().get_factor_h5(\"NORMAL_CNE5_SIZE\", None, 'barra_risk_dfc')\n beta = Stock().get_factor_h5(\"NORMAL_CNE5_BETA\", None, 'barra_risk_dfc')\n nolin_size = Stock().get_factor_h5(\"NORMAL_CNE5_NON_LINEAR_SIZE\", None, 'barra_risk_dfc')\n momentum = Stock().get_factor_h5(\"NORMAL_CNE5_MOMENTUM\", None, 'barra_risk_dfc')\n\n [size, beta, nolin_size] = FactorPreProcess().make_same_index_columns([size, beta, nolin_size])\n beg_date = max(beg_date, price.columns[0], alpha_val.columns[0], beta.columns[0])\n end_date = min(end_date, price.columns[-1], alpha_val.columns[-1], beta.columns[-1])\n\n else:\n beg_date = max(beg_date, price.columns[0], alpha_val.columns[0])\n end_date = min(end_date, price.columns[-1], alpha_val.columns[-1])\n\n date_series = Date().get_trade_date_series(beg_date, end_date, period=cal_period)\n date_series = list(set(date_series) & set(alpha_val.columns))\n date_series.sort()\n\n # pre process data\n ###############################################################################################################\n ###############################################################################################################\n if alpha_remove_extreme_value:\n alpha_val = FactorPreProcess().remove_extreme_value_mad(alpha_val)\n\n if alpha_standard:\n alpha_val = FactorPreProcess().standardization(alpha_val)\n\n # cal everyday\n ###############################################################################################################\n ###############################################################################################################\n alpha_return = pd.DataFrame([], index=date_series)\n alpha_exposure = pd.DataFrame([], index=date_series, columns=price.index)\n\n for i_date in range(len(date_series) - 2):\n\n cur_cal_date = date_series[i_date]\n next_cal_date = date_series[i_date + 1]\n buy_date = Date().get_trade_date_offset(cur_cal_date, 1)\n sell_date = Date().get_trade_date_offset(next_cal_date, 1)\n print(\" Calculating Factor %s Alpha Return At %s\" % (factor_name, cur_cal_date))\n\n alpha_return.index.name = 'CalDate'\n alpha_return.ix[cur_cal_date, \"BuyDate\"] = buy_date\n alpha_return.ix[cur_cal_date, \"SellDate\"] = sell_date\n\n alpha_date = alpha_val[cur_cal_date]\n buy_price = price[buy_date]\n sell_price = price[sell_date]\n pct_date = sell_price / buy_price - 1.0\n\n if alpha_industry_neutral:\n\n try:\n industry_date = industry[cur_cal_date]\n industry_dummy = pd.get_dummies(industry_date)\n except:\n continue\n\n if len(pd.concat([alpha_date, industry_date], axis=1).dropna()) < min_stock_number:\n continue\n else:\n params, factor_res = factor_neutral(factor_series=alpha_date, neutral_frame=industry_dummy)\n alpha_date = factor_res\n alpha_date = FactorPreProcess().remove_extreme_value_mad(alpha_date)\n alpha_date = FactorPreProcess().standardization(alpha_date)\n\n if alpha_barra_style_neutral:\n\n try:\n size_date = size[cur_cal_date]\n beta_date = beta[cur_cal_date]\n nolin_size_date = nolin_size[cur_cal_date]\n momentum_date = momentum[cur_cal_date]\n except:\n continue\n\n if len(pd.concat([alpha_date, size_date], axis=1).dropna()) < min_stock_number:\n continue\n else:\n barra_risk_exposure = pd.concat([beta_date, size_date,\n nolin_size_date, momentum_date], axis=1)\n barra_risk_exposure.columns = ['beta', 'size', 'nolin_size', 'momentum']\n params, factor_res = factor_neutral(factor_series=alpha_date, neutral_frame=barra_risk_exposure)\n alpha_date = factor_res\n alpha_date = FactorPreProcess().remove_extreme_value_mad(alpha_date)\n alpha_date = FactorPreProcess().standardization(alpha_date)\n\n alpha_exposure.ix[cur_cal_date, :] = alpha_date\n res = pd.concat([alpha_date, pct_date], axis=1)\n res.columns = ['alpha_val', 'period_pct']\n res = res.dropna()\n res = res.sort_values(by=['alpha_val'], ascending=False)\n\n labels = [\"group_\" + str(i) for i in list(range(1, group_number + 1))]\n res['group'] = pd.cut(res['alpha_val'], bins=group_number, labels=labels)\n\n period_return = (res['alpha_val'] * res['period_pct']).mean()\n alpha_return.ix[cur_cal_date, \"FactorReturn\"] = period_return\n\n information_correlation = res['alpha_val'].corr(res['period_pct'])\n alpha_return.ix[cur_cal_date, \"IC\"] = information_correlation\n\n group_pct = res.groupby(by=['group'])['period_pct'].mean()\n for i_label in range(len(labels)):\n alpha_return.ix[cur_cal_date, labels[i_label]] = group_pct.values[i_label]\n\n alpha_return = alpha_return.dropna(subset=['FactorReturn'])\n alpha_return[\"CumFactorReturn\"] = alpha_return['FactorReturn'].cumsum()\n cum_labels = [\"Cum_\" + str(x) for x in labels]\n alpha_return[cum_labels] = alpha_return[labels].cumsum()\n\n # plot\n ###############################################################################################################\n ###############################################################################################################\n # plt_col = []\n # plt_col.append(\"CumFactorReturn\")\n # plt_col.extend(cum_labels)\n # alpha_return[plt_col].plot()\n # plt.title(factor_name)\n # plt.show()\n\n # describe annual\n ###############################################################################################################\n ###############################################################################################################\n\n back_test_beg_date = Date().get_trade_date_offset(date_series[0], 1)\n back_test_end_date = Date().get_trade_date_offset(date_series[len(date_series) - 1], 1)\n back_test_days = Date().get_trade_date_diff(back_test_beg_date, back_test_end_date)\n\n backtest_year = back_test_days / year_trade_days\n\n alpha_return['year'] = alpha_return.index.map(lambda x: datetime.strptime(x, \"%Y%m%d\").year)\n\n year_factor_return = alpha_return.groupby(by=['year'])['FactorReturn'].sum()\n year_count = alpha_return.groupby(by=['year'])['FactorReturn'].count()\n year_ic_mean = alpha_return.groupby(by=['year'])['IC'].mean()\n year_ic_std = alpha_return.groupby(by=['year'])['IC'].std()\n year_gp_mean = alpha_return.groupby(by=['year'])[labels].mean()\n\n year_describe = pd.concat([year_factor_return, year_count, year_ic_mean, year_ic_std, year_gp_mean], axis=1)\n col = ['YearFactorReturn', 'Count', 'IC_mean', 'IC_std']\n col.extend(labels)\n year_describe.columns = col\n\n year_describe['YearFactorReturn'] = year_describe['YearFactorReturn'] / year_describe['Count'] * year_count\n year_describe['IC_IR'] = year_describe['IC_mean'] / year_describe['IC_std'] * np.sqrt(50)\n\n year_describe.ix['Sum', 'YearFactorReturn'] = alpha_return[\"CumFactorReturn\"].values[-1] / backtest_year\n year_describe.ix['Sum', 'IC_IR'] = alpha_return[\"IC\"].mean() / alpha_return[\"IC\"].std() * np.sqrt(50)\n year_describe.ix['Sum', 'IC_mean'] = alpha_return[\"IC\"].mean()\n year_describe.ix['Sum', 'IC_std'] = alpha_return[\"IC\"].std()\n year_describe.ix['Sum', labels] = year_describe.ix[0:-1, labels].sum()\n year_describe.index = year_describe.index.map(str)\n\n for i in range(len(year_describe)):\n year = year_describe.index[i]\n corr_pd = pd.DataFrame(year_describe.ix[year, labels].values, index=labels, columns=['group_return'])\n corr_pd['group_number'] = (list(range(1, group_number+1)))\n year_describe.ix[year, 'Group_Corr'] = corr_pd.corr().ix[0, 1]\n\n # save data\n ###############################################################################################################\n ###############################################################################################################\n\n # alpha_exposure_neutral\n ###############################################################################################################\n alpha_exposure = alpha_exposure.astype(np.float)\n filename = os.path.join(out_path, 'alpha_exposure_neutral', factor_name + \"_FactorExposureNeutral.csv\")\n alpha_exposure.T.to_csv(filename)\n\n # exposure_corr\n ###############################################################################################################\n exposure_corr = pd.DataFrame([], index=alpha_exposure.index, columns=['Exposure_Corr'])\n\n for i_date in range(1, len(alpha_exposure.index)):\n last_exposure_date = alpha_exposure.index[i_date-1]\n cur_exposure_date = alpha_exposure.index[i_date]\n exposure_adjoin = alpha_exposure.ix[last_exposure_date:cur_exposure_date, :]\n exposure_adjoin = exposure_adjoin.T.dropna()\n exposure_corr.ix[cur_exposure_date, 'Exposure_Corr'] = exposure_adjoin.corr().ix[0, 1]\n\n exposure_corr = exposure_corr.dropna()\n exposure_corr.ix['Mean', 'Exposure_Corr'] = exposure_corr['Exposure_Corr'].mean()\n filename = os.path.join(out_path, 'alpha_exposure_stability', factor_name + \"_FactorExposureCorr.csv\")\n exposure_corr.to_csv(filename)\n\n # Factor Return\n ###############################################################################################################\n filename = os.path.join(out_path, 'alpha_return', factor_name + \"_FactorReturn.xlsx\")\n sheet_name = \"FactorReturn\"\n\n we = WriteExcel(filename)\n ws = we.add_worksheet(sheet_name)\n\n num_format_pd = pd.DataFrame([], columns=year_describe.columns, index=['format'])\n num_format_pd.ix['format', :] = '0.00%'\n num_format_pd.ix['format', ['Count', 'IC_IR']] = '0.00'\n we.write_pandas(year_describe, ws, begin_row_number=0, begin_col_number=1,\n num_format_pd=num_format_pd, color=\"blue\", fillna=True)\n\n num_format_pd = pd.DataFrame([], columns=alpha_return.columns, index=['format'])\n num_format_pd.ix['format', :] = '0.00%'\n num_format_pd.ix['format', ['year']] = '0'\n we.write_pandas(alpha_return, ws, begin_row_number=0, begin_col_number=2+len(year_describe.columns),\n num_format_pd=num_format_pd, color=\"blue\", fillna=True)\n we.close()\n ###############################################################################################################\n\n\nif __name__ == '__main__':\n\n cal_period = \"W\"\n beg_date = \"20040101\"\n end_date = datetime.today().strftime(\"%Y%m%d\")\n\n path = \"E:\\\\3_Data\\\\5_stock_data\\\\3_alpha_model\\\\\"\n file = \"MyAlpha.xlsx\"\n\n data = pd.read_excel(os.path.join(path, file), encoding='gbk')\n data = data[data['计算因子收益率'] == \"是\"]\n data = data.reset_index(drop=True)\n\n for i in range(0, len(data)):\n\n factor_name = data.ix[i, \"因子名\"]\n print(\"#################### 开始计算因子收益率 %s 数据 ####################\" % factor_name)\n cal_factor_alpha_return(factor_name, beg_date, end_date, cal_period)\n print(\"#################### 结束计算因子收益率 %s 数据 ####################\" % factor_name)\n\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
import datetime
import hashlib
import json
from flask import Flask, jsonify, request
import requests
from uuid import uuid4
from urllib.parse import urlparse
from Crypto.PublicKey import RSA
# Part 1 - Building a Blockchain
class Blockchain:
#chain(emptylist) , farmer_details(emptylist), nodes(set), create_block(function to create the genesis block)
def __init__(self):
self.chain = []
self.farmer_details = []
self.create_block(proof = 1, previous_hash = '0')
self.nodes = set()
#It creates a dictionary block which contains index(length of chain+1),timestamp( by using the module datetime),
#Proof( passes as parameter),previous_hash(passed as parameter),
#Farmer_details(from self) and append this to the chain.
def create_block(self, proof, previous_hash):
block = {'index': len(self.chain) + 1,
'timestamp': str(datetime.datetime.now()),
'proof': proof,
'previous_hash': previous_hash,
'farmer_details': self.farmer_details}
self.farmer_details = []
self.chain.append(block)
return block
#It returns the last block of the chain.
def get_previous_block(self):
return self.chain[-1]
#It runs a lop and check if hash of new proof^2- previous proof^2 contains 4 leading zeroes.
#if yes,then it returns the new proof otherwise increment the new proof by 1 and iterates again.
def proof_of_work(self, previous_proof):
new_proof = 1
check_proof = False
while check_proof is False:
hash_operation = hashlib.sha256(str(new_proof**2 - previous_proof**2).encode()).hexdigest()
if hash_operation[:4] == '0000':
check_proof = True
else:
new_proof += 1
return new_proof
#- It returns the hash of the block using sha256
def hash(self, block):
encoded_block = json.dumps(block, sort_keys = True).encode()
return hashlib.sha256(encoded_block).hexdigest()
#It iterates a loop from 0 to chain length and check if hash of the block is same as returned by the hash function,
#then it checks if hash of the proof of current block^2-proof of previous block^2 contains 4 leading zeroes or not.
# if no, then chain is not valid.
def is_chain_valid(self, chain):
previous_block = chain[0]
block_index = 1
while block_index < len(chain):
block = chain[block_index]
if block['previous_hash'] != self.hash(previous_block):
return False
previous_proof = previous_block['proof']
proof = block['proof']
hash_operation = hashlib.sha256(str(proof**2 - previous_proof**2).encode()).hexdigest()
if hash_operation[:4] != '0000':
return False
previous_block = block
block_index += 1
return True
#- It creates the private key using the RSA.generate(1024),then creates the public key,
# hash of transaction(it is the hash of the sum of hashes of the name,crop_name,quantity,rate),
#data( it is the hash of the transaction in the int form),
#signature( it is created by raising the data to the power of privatekey.d%privatekey.n).
# Then it append a dictionary containing all these information in the hash format to the chain farmer_details
#and returns the index of the new block.
def add_farmerdetails(self, name, crop_name, quantity,rate):
privatekey = RSA.generate(1024)
publickey = privatekey.publickey()
hash_of_transaction=hashlib.sha256((hashlib.sha256(name.encode()).hexdigest()+hashlib.sha256(crop_name.encode()).hexdigest()+hashlib.sha256(str(quantity).encode()).hexdigest()+hashlib.sha256(str(rate).encode()).hexdigest()).encode()).hexdigest()
data=int(hash_of_transaction,16)
signature=pow(data,privatekey.d,privatekey.n)
self.farmer_details.append({'name_of_farmer': hashlib.sha256(name.encode()).hexdigest(),
'crop_name': hashlib.sha256(crop_name.encode()).hexdigest(),
'quantity_inkg': hashlib.sha256(str(quantity).encode()).hexdigest(),
'rate_perkg': hashlib.sha256(str(rate).encode()).hexdigest(),
'hash_of_transaction': hash_of_transaction,
'signature': signature
})
previous_block = self.get_previous_block()
return previous_block['index'] + 1
#It takes the url using urlparse of the address and then adds this to the set nodes in the self.
def add_node(self, address):
parsed_url = urlparse(address)
self.nodes.add(parsed_url.netloc)
#It access all the nodes in the set nodes and then iterates a loop to get their chain length using get_chain (to be described)
# and replaces the current chain with the longest chain of all the nodes.
def replace_chain(self):
network = self.nodes
longest_chain = None
max_length = len(self.chain)
for node in network:
response = requests.get(f'http://{node}/get_chain')
if response.status_code == 200:
length = response.json()['length']
chain = response.json()['chain']
if length > max_length and self.is_chain_valid(chain):
max_length = length
longest_chain = chain
if longest_chain:
self.chain = longest_chain
return True
return False
# Part 2 - Mining our Blockchain
# Creating a Web App
app = Flask(__name__)
# Creating an address for the node on Port 5001
node_address = str(uuid4()).replace('-', '')
# Creating a Blockchain
blockchain = Blockchain()
# Mining a new block
#- It access the previous block by calling the function get_previous_block(),
#then access the previous proof by previous_block[‘proof’],
#then it creates a new proof by using the function proof_of_work(‘previous_proof’),
#then it finds the hash of the previous block by using the function blockchain.hash(previous_block),
# then calls the function create_block( proof,previous_hash),then finds the hash of this block.
# It creates a response containing all the details of the new block,jsonify it and returns it.
@app.route('/mine_block', methods = ['GET'])
def mine_block():
previous_block = blockchain.get_previous_block()
previous_proof = previous_block['proof']
proof = blockchain.proof_of_work(previous_proof)
previous_hash = blockchain.hash(previous_block)
#blockchain.add_transaction(sender = node_address, receiver = 'Hadelin', amount = 1)
block = blockchain.create_block(proof, previous_hash)
current_block=blockchain.get_previous_block()
current_hash=blockchain.hash(current_block)
response = {'message': 'Congratulations, you just mined a block!',
'index': block['index'],
'timestamp': block['timestamp'],
'proof': block['proof'],
'previous_hash': block['previous_hash'],
'farmer': block['farmer_details'],
'current_hash': current_hash}
return jsonify(response), 200
# Getting the full Blockchain
#- It creates an empty list chain_till_now, then iterates over all the blocks in the blockchain and find it’s hash
#then check if the list farmer_details is empty or not,
#if it is empty then it appends a dictionary containing the current block’s index,timestamp,proof,previous_hash, current_hash, farmer_details.
# If the farmer_details list is not empty then it first finds the length of the list farmer_details
#then it iterates over the length of the list farmer_details and appends the hash of transaction
# contained within the dictionary of the list farmer_details. Then it creates the hash of this appended hash. This is the merged hash.
# Then it creates a dictionary containing merged hash,index,timestamp,proof,previous_hash,farmer_details and current hash.
# Then, it appends this dictionary to the list chain till now.
# It then creates the response containing the chain till now and length of the blockchain,jasonifies it and returns it.
@app.route('/print_chain',methods=['GET'])
def print_chain():
chain_till_now =[]
for xblock in blockchain.chain:
xcurrent_hash=blockchain.hash(xblock)
if len(xblock['farmer_details'])==0:
chain_till_now.append({'index': xblock['index'],
'timestamp': xblock['timestamp'],
'proof': xblock['proof'],
'previous_hash': xblock['previous_hash'],
'farmer': xblock['farmer_details'],
'current_hash': xcurrent_hash})
else:
l=len(xblock['farmer_details'])
sum=""
l-=1
while(l>=0):
sum=xblock['farmer_details'][l]['hash_of_transaction']+sum
l-=1
chain_till_now.append({'Merged_hash': hashlib.sha256(sum.encode()).hexdigest(),
'index': xblock['index'],
'timestamp': xblock['timestamp'],
'proof': xblock['proof'],
'previous_hash': xblock['previous_hash'],
'farmer': xblock['farmer_details'],
'current_hash': xcurrent_hash})
response = {'chain': chain_till_now,
'length': len(blockchain.chain)}
return jsonify(response), 200
#- It creats the response containing the blockchain.chain and its length,jasonifies it and returns it.
@app.route('/get_chain', methods = ['GET'])
def get_chain():
response = {'chain': blockchain.chain,
'length': len(blockchain.chain)}
return jsonify(response), 200
# Checking if the Blockchain is valid
#- It calls the function is_chain_valid and returns a string as response based on whether the chain is valid or not.
@app.route('/is_valid', methods = ['GET'])
def is_valid():
is_valid = blockchain.is_chain_valid(blockchain.chain)
if is_valid:
response = {'message': 'All good. The Blockchain is valid.'}
else:
response = {'message': 'Houston, we have a problem. The Blockchain is not valid.'}
return jsonify(response), 200
# Adding a new transaction to the Blockchain
#It takes the input in Jason format and checks if all the keys in the farmer keys(name_of_farmer,crop_name,quantity_inkg, rate_perkg) are available in the json file.
#If no, It returns that some elements are missing
# otherwise it calls the function add_farmer_details by passing the farmer details in the json file as parameter and
#returns the index of the block in which these details will be added.
@app.route('/add_farmerdetails', methods = ['POST'])
def add_farmer_details():
json = request.get_json()
farmer_keys = ['name_of_farmer', 'crop_name', 'quantity_inkg','rate_perkg']
if not all(key in json for key in farmer_keys):
return 'Some elements of the farmer_details are missing', 400
index = blockchain.add_farmerdetails(json['name_of_farmer'], json['crop_name'], json['quantity_inkg'], json['rate_perkg'])
response = {'message': f'These details will be added to Block {index}'}
return jsonify(response), 201
# Part 3 - Decentralizing our Blockchain
# Connecting new nodes
#It takes a Jason file as request and first check if it contains any node or not.
# If it contains the nodes then it calls the function blockchain.add_node .
#Then it returns the list of blockchain.nodes as response.
@app.route('/connect_node', methods = ['POST'])
def connect_node():
json = request.get_json()
nodes = json.get('nodes')
if nodes is None:
return "No node", 400
for node in nodes:
blockchain.add_node(node)
response = {'message': 'All the nodes are now connected. The puspesh Blockchain now contains the following nodes:',
'total_nodes': list(blockchain.nodes)}
return jsonify(response), 201
# Replacing the chain by the longest chain if needed
#- It calls the function blockcain.replace_chain. If the chain is replaced
#it returns the response with a message that the nodes has the different chains so the chain has been replaced by the longest chain alongwith the blockchain.chain.
# Otherwise it returns the response with a message all good the chain is the longest one with the blockchain.chain .
#then it jsonify the response and returns it.
@app.route('/replace_chain', methods = ['GET'])
def replace_chain():
is_chain_replaced = blockchain.replace_chain()
if is_chain_replaced:
response = {'message': 'The nodes had different chains so the chain was replaced by the longest one.',
'new_chain': blockchain.chain}
else:
response = {'message': 'All good. The chain is the largest one.',
'actual_chain': blockchain.chain}
return jsonify(response), 200
# Running the app
app.run(host = '0.0.0.0', port = 5001)
|
normal
|
{
"blob_id": "f8c222b1a84a092a3388cb801a88495bc227b1d5",
"index": 9748,
"step-1": "<mask token>\n\n\nclass Blockchain:\n\n def __init__(self):\n self.chain = []\n self.farmer_details = []\n self.create_block(proof=1, previous_hash='0')\n self.nodes = set()\n\n def create_block(self, proof, previous_hash):\n block = {'index': len(self.chain) + 1, 'timestamp': str(datetime.\n datetime.now()), 'proof': proof, 'previous_hash': previous_hash,\n 'farmer_details': self.farmer_details}\n self.farmer_details = []\n self.chain.append(block)\n return block\n\n def get_previous_block(self):\n return self.chain[-1]\n\n def proof_of_work(self, previous_proof):\n new_proof = 1\n check_proof = False\n while check_proof is False:\n hash_operation = hashlib.sha256(str(new_proof ** 2 - \n previous_proof ** 2).encode()).hexdigest()\n if hash_operation[:4] == '0000':\n check_proof = True\n else:\n new_proof += 1\n return new_proof\n\n def hash(self, block):\n encoded_block = json.dumps(block, sort_keys=True).encode()\n return hashlib.sha256(encoded_block).hexdigest()\n\n def is_chain_valid(self, chain):\n previous_block = chain[0]\n block_index = 1\n while block_index < len(chain):\n block = chain[block_index]\n if block['previous_hash'] != self.hash(previous_block):\n return False\n previous_proof = previous_block['proof']\n proof = block['proof']\n hash_operation = hashlib.sha256(str(proof ** 2 - previous_proof **\n 2).encode()).hexdigest()\n if hash_operation[:4] != '0000':\n return False\n previous_block = block\n block_index += 1\n return True\n\n def add_farmerdetails(self, name, crop_name, quantity, rate):\n privatekey = RSA.generate(1024)\n publickey = privatekey.publickey()\n hash_of_transaction = hashlib.sha256((hashlib.sha256(name.encode())\n .hexdigest() + hashlib.sha256(crop_name.encode()).hexdigest() +\n hashlib.sha256(str(quantity).encode()).hexdigest() + hashlib.\n sha256(str(rate).encode()).hexdigest()).encode()).hexdigest()\n data = int(hash_of_transaction, 16)\n signature = pow(data, privatekey.d, privatekey.n)\n self.farmer_details.append({'name_of_farmer': hashlib.sha256(name.\n encode()).hexdigest(), 'crop_name': hashlib.sha256(crop_name.\n encode()).hexdigest(), 'quantity_inkg': hashlib.sha256(str(\n quantity).encode()).hexdigest(), 'rate_perkg': hashlib.sha256(\n str(rate).encode()).hexdigest(), 'hash_of_transaction':\n hash_of_transaction, 'signature': signature})\n previous_block = self.get_previous_block()\n return previous_block['index'] + 1\n\n def add_node(self, address):\n parsed_url = urlparse(address)\n self.nodes.add(parsed_url.netloc)\n\n def replace_chain(self):\n network = self.nodes\n longest_chain = None\n max_length = len(self.chain)\n for node in network:\n response = requests.get(f'http://{node}/get_chain')\n if response.status_code == 200:\n length = response.json()['length']\n chain = response.json()['chain']\n if length > max_length and self.is_chain_valid(chain):\n max_length = length\n longest_chain = chain\n if longest_chain:\n self.chain = longest_chain\n return True\n return False\n\n\n<mask token>\n\n\n@app.route('/mine_block', methods=['GET'])\ndef mine_block():\n previous_block = blockchain.get_previous_block()\n previous_proof = previous_block['proof']\n proof = blockchain.proof_of_work(previous_proof)\n previous_hash = blockchain.hash(previous_block)\n block = blockchain.create_block(proof, previous_hash)\n current_block = blockchain.get_previous_block()\n current_hash = blockchain.hash(current_block)\n response = {'message': 'Congratulations, you just mined a block!',\n 'index': block['index'], 'timestamp': block['timestamp'], 'proof':\n block['proof'], 'previous_hash': block['previous_hash'], 'farmer':\n block['farmer_details'], 'current_hash': current_hash}\n return jsonify(response), 200\n\n\n<mask token>\n\n\n@app.route('/is_valid', methods=['GET'])\ndef is_valid():\n is_valid = blockchain.is_chain_valid(blockchain.chain)\n if is_valid:\n response = {'message': 'All good. The Blockchain is valid.'}\n else:\n response = {'message':\n 'Houston, we have a problem. The Blockchain is not valid.'}\n return jsonify(response), 200\n\n\n@app.route('/add_farmerdetails', methods=['POST'])\ndef add_farmer_details():\n json = request.get_json()\n farmer_keys = ['name_of_farmer', 'crop_name', 'quantity_inkg', 'rate_perkg'\n ]\n if not all(key in json for key in farmer_keys):\n return 'Some elements of the farmer_details are missing', 400\n index = blockchain.add_farmerdetails(json['name_of_farmer'], json[\n 'crop_name'], json['quantity_inkg'], json['rate_perkg'])\n response = {'message': f'These details will be added to Block {index}'}\n return jsonify(response), 201\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Blockchain:\n\n def __init__(self):\n self.chain = []\n self.farmer_details = []\n self.create_block(proof=1, previous_hash='0')\n self.nodes = set()\n\n def create_block(self, proof, previous_hash):\n block = {'index': len(self.chain) + 1, 'timestamp': str(datetime.\n datetime.now()), 'proof': proof, 'previous_hash': previous_hash,\n 'farmer_details': self.farmer_details}\n self.farmer_details = []\n self.chain.append(block)\n return block\n\n def get_previous_block(self):\n return self.chain[-1]\n\n def proof_of_work(self, previous_proof):\n new_proof = 1\n check_proof = False\n while check_proof is False:\n hash_operation = hashlib.sha256(str(new_proof ** 2 - \n previous_proof ** 2).encode()).hexdigest()\n if hash_operation[:4] == '0000':\n check_proof = True\n else:\n new_proof += 1\n return new_proof\n\n def hash(self, block):\n encoded_block = json.dumps(block, sort_keys=True).encode()\n return hashlib.sha256(encoded_block).hexdigest()\n\n def is_chain_valid(self, chain):\n previous_block = chain[0]\n block_index = 1\n while block_index < len(chain):\n block = chain[block_index]\n if block['previous_hash'] != self.hash(previous_block):\n return False\n previous_proof = previous_block['proof']\n proof = block['proof']\n hash_operation = hashlib.sha256(str(proof ** 2 - previous_proof **\n 2).encode()).hexdigest()\n if hash_operation[:4] != '0000':\n return False\n previous_block = block\n block_index += 1\n return True\n\n def add_farmerdetails(self, name, crop_name, quantity, rate):\n privatekey = RSA.generate(1024)\n publickey = privatekey.publickey()\n hash_of_transaction = hashlib.sha256((hashlib.sha256(name.encode())\n .hexdigest() + hashlib.sha256(crop_name.encode()).hexdigest() +\n hashlib.sha256(str(quantity).encode()).hexdigest() + hashlib.\n sha256(str(rate).encode()).hexdigest()).encode()).hexdigest()\n data = int(hash_of_transaction, 16)\n signature = pow(data, privatekey.d, privatekey.n)\n self.farmer_details.append({'name_of_farmer': hashlib.sha256(name.\n encode()).hexdigest(), 'crop_name': hashlib.sha256(crop_name.\n encode()).hexdigest(), 'quantity_inkg': hashlib.sha256(str(\n quantity).encode()).hexdigest(), 'rate_perkg': hashlib.sha256(\n str(rate).encode()).hexdigest(), 'hash_of_transaction':\n hash_of_transaction, 'signature': signature})\n previous_block = self.get_previous_block()\n return previous_block['index'] + 1\n\n def add_node(self, address):\n parsed_url = urlparse(address)\n self.nodes.add(parsed_url.netloc)\n\n def replace_chain(self):\n network = self.nodes\n longest_chain = None\n max_length = len(self.chain)\n for node in network:\n response = requests.get(f'http://{node}/get_chain')\n if response.status_code == 200:\n length = response.json()['length']\n chain = response.json()['chain']\n if length > max_length and self.is_chain_valid(chain):\n max_length = length\n longest_chain = chain\n if longest_chain:\n self.chain = longest_chain\n return True\n return False\n\n\n<mask token>\n\n\n@app.route('/mine_block', methods=['GET'])\ndef mine_block():\n previous_block = blockchain.get_previous_block()\n previous_proof = previous_block['proof']\n proof = blockchain.proof_of_work(previous_proof)\n previous_hash = blockchain.hash(previous_block)\n block = blockchain.create_block(proof, previous_hash)\n current_block = blockchain.get_previous_block()\n current_hash = blockchain.hash(current_block)\n response = {'message': 'Congratulations, you just mined a block!',\n 'index': block['index'], 'timestamp': block['timestamp'], 'proof':\n block['proof'], 'previous_hash': block['previous_hash'], 'farmer':\n block['farmer_details'], 'current_hash': current_hash}\n return jsonify(response), 200\n\n\n@app.route('/print_chain', methods=['GET'])\ndef print_chain():\n chain_till_now = []\n for xblock in blockchain.chain:\n xcurrent_hash = blockchain.hash(xblock)\n if len(xblock['farmer_details']) == 0:\n chain_till_now.append({'index': xblock['index'], 'timestamp':\n xblock['timestamp'], 'proof': xblock['proof'],\n 'previous_hash': xblock['previous_hash'], 'farmer': xblock[\n 'farmer_details'], 'current_hash': xcurrent_hash})\n else:\n l = len(xblock['farmer_details'])\n sum = ''\n l -= 1\n while l >= 0:\n sum = xblock['farmer_details'][l]['hash_of_transaction'] + sum\n l -= 1\n chain_till_now.append({'Merged_hash': hashlib.sha256(sum.encode\n ()).hexdigest(), 'index': xblock['index'], 'timestamp':\n xblock['timestamp'], 'proof': xblock['proof'],\n 'previous_hash': xblock['previous_hash'], 'farmer': xblock[\n 'farmer_details'], 'current_hash': xcurrent_hash})\n response = {'chain': chain_till_now, 'length': len(blockchain.chain)}\n return jsonify(response), 200\n\n\n<mask token>\n\n\n@app.route('/is_valid', methods=['GET'])\ndef is_valid():\n is_valid = blockchain.is_chain_valid(blockchain.chain)\n if is_valid:\n response = {'message': 'All good. The Blockchain is valid.'}\n else:\n response = {'message':\n 'Houston, we have a problem. The Blockchain is not valid.'}\n return jsonify(response), 200\n\n\n@app.route('/add_farmerdetails', methods=['POST'])\ndef add_farmer_details():\n json = request.get_json()\n farmer_keys = ['name_of_farmer', 'crop_name', 'quantity_inkg', 'rate_perkg'\n ]\n if not all(key in json for key in farmer_keys):\n return 'Some elements of the farmer_details are missing', 400\n index = blockchain.add_farmerdetails(json['name_of_farmer'], json[\n 'crop_name'], json['quantity_inkg'], json['rate_perkg'])\n response = {'message': f'These details will be added to Block {index}'}\n return jsonify(response), 201\n\n\n<mask token>\n\n\n@app.route('/replace_chain', methods=['GET'])\ndef replace_chain():\n is_chain_replaced = blockchain.replace_chain()\n if is_chain_replaced:\n response = {'message':\n 'The nodes had different chains so the chain was replaced by the longest one.'\n , 'new_chain': blockchain.chain}\n else:\n response = {'message': 'All good. The chain is the largest one.',\n 'actual_chain': blockchain.chain}\n return jsonify(response), 200\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Blockchain:\n\n def __init__(self):\n self.chain = []\n self.farmer_details = []\n self.create_block(proof=1, previous_hash='0')\n self.nodes = set()\n\n def create_block(self, proof, previous_hash):\n block = {'index': len(self.chain) + 1, 'timestamp': str(datetime.\n datetime.now()), 'proof': proof, 'previous_hash': previous_hash,\n 'farmer_details': self.farmer_details}\n self.farmer_details = []\n self.chain.append(block)\n return block\n\n def get_previous_block(self):\n return self.chain[-1]\n\n def proof_of_work(self, previous_proof):\n new_proof = 1\n check_proof = False\n while check_proof is False:\n hash_operation = hashlib.sha256(str(new_proof ** 2 - \n previous_proof ** 2).encode()).hexdigest()\n if hash_operation[:4] == '0000':\n check_proof = True\n else:\n new_proof += 1\n return new_proof\n\n def hash(self, block):\n encoded_block = json.dumps(block, sort_keys=True).encode()\n return hashlib.sha256(encoded_block).hexdigest()\n\n def is_chain_valid(self, chain):\n previous_block = chain[0]\n block_index = 1\n while block_index < len(chain):\n block = chain[block_index]\n if block['previous_hash'] != self.hash(previous_block):\n return False\n previous_proof = previous_block['proof']\n proof = block['proof']\n hash_operation = hashlib.sha256(str(proof ** 2 - previous_proof **\n 2).encode()).hexdigest()\n if hash_operation[:4] != '0000':\n return False\n previous_block = block\n block_index += 1\n return True\n\n def add_farmerdetails(self, name, crop_name, quantity, rate):\n privatekey = RSA.generate(1024)\n publickey = privatekey.publickey()\n hash_of_transaction = hashlib.sha256((hashlib.sha256(name.encode())\n .hexdigest() + hashlib.sha256(crop_name.encode()).hexdigest() +\n hashlib.sha256(str(quantity).encode()).hexdigest() + hashlib.\n sha256(str(rate).encode()).hexdigest()).encode()).hexdigest()\n data = int(hash_of_transaction, 16)\n signature = pow(data, privatekey.d, privatekey.n)\n self.farmer_details.append({'name_of_farmer': hashlib.sha256(name.\n encode()).hexdigest(), 'crop_name': hashlib.sha256(crop_name.\n encode()).hexdigest(), 'quantity_inkg': hashlib.sha256(str(\n quantity).encode()).hexdigest(), 'rate_perkg': hashlib.sha256(\n str(rate).encode()).hexdigest(), 'hash_of_transaction':\n hash_of_transaction, 'signature': signature})\n previous_block = self.get_previous_block()\n return previous_block['index'] + 1\n\n def add_node(self, address):\n parsed_url = urlparse(address)\n self.nodes.add(parsed_url.netloc)\n\n def replace_chain(self):\n network = self.nodes\n longest_chain = None\n max_length = len(self.chain)\n for node in network:\n response = requests.get(f'http://{node}/get_chain')\n if response.status_code == 200:\n length = response.json()['length']\n chain = response.json()['chain']\n if length > max_length and self.is_chain_valid(chain):\n max_length = length\n longest_chain = chain\n if longest_chain:\n self.chain = longest_chain\n return True\n return False\n\n\n<mask token>\n\n\n@app.route('/mine_block', methods=['GET'])\ndef mine_block():\n previous_block = blockchain.get_previous_block()\n previous_proof = previous_block['proof']\n proof = blockchain.proof_of_work(previous_proof)\n previous_hash = blockchain.hash(previous_block)\n block = blockchain.create_block(proof, previous_hash)\n current_block = blockchain.get_previous_block()\n current_hash = blockchain.hash(current_block)\n response = {'message': 'Congratulations, you just mined a block!',\n 'index': block['index'], 'timestamp': block['timestamp'], 'proof':\n block['proof'], 'previous_hash': block['previous_hash'], 'farmer':\n block['farmer_details'], 'current_hash': current_hash}\n return jsonify(response), 200\n\n\n@app.route('/print_chain', methods=['GET'])\ndef print_chain():\n chain_till_now = []\n for xblock in blockchain.chain:\n xcurrent_hash = blockchain.hash(xblock)\n if len(xblock['farmer_details']) == 0:\n chain_till_now.append({'index': xblock['index'], 'timestamp':\n xblock['timestamp'], 'proof': xblock['proof'],\n 'previous_hash': xblock['previous_hash'], 'farmer': xblock[\n 'farmer_details'], 'current_hash': xcurrent_hash})\n else:\n l = len(xblock['farmer_details'])\n sum = ''\n l -= 1\n while l >= 0:\n sum = xblock['farmer_details'][l]['hash_of_transaction'] + sum\n l -= 1\n chain_till_now.append({'Merged_hash': hashlib.sha256(sum.encode\n ()).hexdigest(), 'index': xblock['index'], 'timestamp':\n xblock['timestamp'], 'proof': xblock['proof'],\n 'previous_hash': xblock['previous_hash'], 'farmer': xblock[\n 'farmer_details'], 'current_hash': xcurrent_hash})\n response = {'chain': chain_till_now, 'length': len(blockchain.chain)}\n return jsonify(response), 200\n\n\n<mask token>\n\n\n@app.route('/is_valid', methods=['GET'])\ndef is_valid():\n is_valid = blockchain.is_chain_valid(blockchain.chain)\n if is_valid:\n response = {'message': 'All good. The Blockchain is valid.'}\n else:\n response = {'message':\n 'Houston, we have a problem. The Blockchain is not valid.'}\n return jsonify(response), 200\n\n\n@app.route('/add_farmerdetails', methods=['POST'])\ndef add_farmer_details():\n json = request.get_json()\n farmer_keys = ['name_of_farmer', 'crop_name', 'quantity_inkg', 'rate_perkg'\n ]\n if not all(key in json for key in farmer_keys):\n return 'Some elements of the farmer_details are missing', 400\n index = blockchain.add_farmerdetails(json['name_of_farmer'], json[\n 'crop_name'], json['quantity_inkg'], json['rate_perkg'])\n response = {'message': f'These details will be added to Block {index}'}\n return jsonify(response), 201\n\n\n@app.route('/connect_node', methods=['POST'])\ndef connect_node():\n json = request.get_json()\n nodes = json.get('nodes')\n if nodes is None:\n return 'No node', 400\n for node in nodes:\n blockchain.add_node(node)\n response = {'message':\n 'All the nodes are now connected. The puspesh Blockchain now contains the following nodes:'\n , 'total_nodes': list(blockchain.nodes)}\n return jsonify(response), 201\n\n\n@app.route('/replace_chain', methods=['GET'])\ndef replace_chain():\n is_chain_replaced = blockchain.replace_chain()\n if is_chain_replaced:\n response = {'message':\n 'The nodes had different chains so the chain was replaced by the longest one.'\n , 'new_chain': blockchain.chain}\n else:\n response = {'message': 'All good. The chain is the largest one.',\n 'actual_chain': blockchain.chain}\n return jsonify(response), 200\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass Blockchain:\n\n def __init__(self):\n self.chain = []\n self.farmer_details = []\n self.create_block(proof=1, previous_hash='0')\n self.nodes = set()\n\n def create_block(self, proof, previous_hash):\n block = {'index': len(self.chain) + 1, 'timestamp': str(datetime.\n datetime.now()), 'proof': proof, 'previous_hash': previous_hash,\n 'farmer_details': self.farmer_details}\n self.farmer_details = []\n self.chain.append(block)\n return block\n\n def get_previous_block(self):\n return self.chain[-1]\n\n def proof_of_work(self, previous_proof):\n new_proof = 1\n check_proof = False\n while check_proof is False:\n hash_operation = hashlib.sha256(str(new_proof ** 2 - \n previous_proof ** 2).encode()).hexdigest()\n if hash_operation[:4] == '0000':\n check_proof = True\n else:\n new_proof += 1\n return new_proof\n\n def hash(self, block):\n encoded_block = json.dumps(block, sort_keys=True).encode()\n return hashlib.sha256(encoded_block).hexdigest()\n\n def is_chain_valid(self, chain):\n previous_block = chain[0]\n block_index = 1\n while block_index < len(chain):\n block = chain[block_index]\n if block['previous_hash'] != self.hash(previous_block):\n return False\n previous_proof = previous_block['proof']\n proof = block['proof']\n hash_operation = hashlib.sha256(str(proof ** 2 - previous_proof **\n 2).encode()).hexdigest()\n if hash_operation[:4] != '0000':\n return False\n previous_block = block\n block_index += 1\n return True\n\n def add_farmerdetails(self, name, crop_name, quantity, rate):\n privatekey = RSA.generate(1024)\n publickey = privatekey.publickey()\n hash_of_transaction = hashlib.sha256((hashlib.sha256(name.encode())\n .hexdigest() + hashlib.sha256(crop_name.encode()).hexdigest() +\n hashlib.sha256(str(quantity).encode()).hexdigest() + hashlib.\n sha256(str(rate).encode()).hexdigest()).encode()).hexdigest()\n data = int(hash_of_transaction, 16)\n signature = pow(data, privatekey.d, privatekey.n)\n self.farmer_details.append({'name_of_farmer': hashlib.sha256(name.\n encode()).hexdigest(), 'crop_name': hashlib.sha256(crop_name.\n encode()).hexdigest(), 'quantity_inkg': hashlib.sha256(str(\n quantity).encode()).hexdigest(), 'rate_perkg': hashlib.sha256(\n str(rate).encode()).hexdigest(), 'hash_of_transaction':\n hash_of_transaction, 'signature': signature})\n previous_block = self.get_previous_block()\n return previous_block['index'] + 1\n\n def add_node(self, address):\n parsed_url = urlparse(address)\n self.nodes.add(parsed_url.netloc)\n\n def replace_chain(self):\n network = self.nodes\n longest_chain = None\n max_length = len(self.chain)\n for node in network:\n response = requests.get(f'http://{node}/get_chain')\n if response.status_code == 200:\n length = response.json()['length']\n chain = response.json()['chain']\n if length > max_length and self.is_chain_valid(chain):\n max_length = length\n longest_chain = chain\n if longest_chain:\n self.chain = longest_chain\n return True\n return False\n\n\n<mask token>\n\n\n@app.route('/mine_block', methods=['GET'])\ndef mine_block():\n previous_block = blockchain.get_previous_block()\n previous_proof = previous_block['proof']\n proof = blockchain.proof_of_work(previous_proof)\n previous_hash = blockchain.hash(previous_block)\n block = blockchain.create_block(proof, previous_hash)\n current_block = blockchain.get_previous_block()\n current_hash = blockchain.hash(current_block)\n response = {'message': 'Congratulations, you just mined a block!',\n 'index': block['index'], 'timestamp': block['timestamp'], 'proof':\n block['proof'], 'previous_hash': block['previous_hash'], 'farmer':\n block['farmer_details'], 'current_hash': current_hash}\n return jsonify(response), 200\n\n\n@app.route('/print_chain', methods=['GET'])\ndef print_chain():\n chain_till_now = []\n for xblock in blockchain.chain:\n xcurrent_hash = blockchain.hash(xblock)\n if len(xblock['farmer_details']) == 0:\n chain_till_now.append({'index': xblock['index'], 'timestamp':\n xblock['timestamp'], 'proof': xblock['proof'],\n 'previous_hash': xblock['previous_hash'], 'farmer': xblock[\n 'farmer_details'], 'current_hash': xcurrent_hash})\n else:\n l = len(xblock['farmer_details'])\n sum = ''\n l -= 1\n while l >= 0:\n sum = xblock['farmer_details'][l]['hash_of_transaction'] + sum\n l -= 1\n chain_till_now.append({'Merged_hash': hashlib.sha256(sum.encode\n ()).hexdigest(), 'index': xblock['index'], 'timestamp':\n xblock['timestamp'], 'proof': xblock['proof'],\n 'previous_hash': xblock['previous_hash'], 'farmer': xblock[\n 'farmer_details'], 'current_hash': xcurrent_hash})\n response = {'chain': chain_till_now, 'length': len(blockchain.chain)}\n return jsonify(response), 200\n\n\n@app.route('/get_chain', methods=['GET'])\ndef get_chain():\n response = {'chain': blockchain.chain, 'length': len(blockchain.chain)}\n return jsonify(response), 200\n\n\n@app.route('/is_valid', methods=['GET'])\ndef is_valid():\n is_valid = blockchain.is_chain_valid(blockchain.chain)\n if is_valid:\n response = {'message': 'All good. The Blockchain is valid.'}\n else:\n response = {'message':\n 'Houston, we have a problem. The Blockchain is not valid.'}\n return jsonify(response), 200\n\n\n@app.route('/add_farmerdetails', methods=['POST'])\ndef add_farmer_details():\n json = request.get_json()\n farmer_keys = ['name_of_farmer', 'crop_name', 'quantity_inkg', 'rate_perkg'\n ]\n if not all(key in json for key in farmer_keys):\n return 'Some elements of the farmer_details are missing', 400\n index = blockchain.add_farmerdetails(json['name_of_farmer'], json[\n 'crop_name'], json['quantity_inkg'], json['rate_perkg'])\n response = {'message': f'These details will be added to Block {index}'}\n return jsonify(response), 201\n\n\n@app.route('/connect_node', methods=['POST'])\ndef connect_node():\n json = request.get_json()\n nodes = json.get('nodes')\n if nodes is None:\n return 'No node', 400\n for node in nodes:\n blockchain.add_node(node)\n response = {'message':\n 'All the nodes are now connected. The puspesh Blockchain now contains the following nodes:'\n , 'total_nodes': list(blockchain.nodes)}\n return jsonify(response), 201\n\n\n@app.route('/replace_chain', methods=['GET'])\ndef replace_chain():\n is_chain_replaced = blockchain.replace_chain()\n if is_chain_replaced:\n response = {'message':\n 'The nodes had different chains so the chain was replaced by the longest one.'\n , 'new_chain': blockchain.chain}\n else:\n response = {'message': 'All good. The chain is the largest one.',\n 'actual_chain': blockchain.chain}\n return jsonify(response), 200\n\n\napp.run(host='0.0.0.0', port=5001)\n",
"step-5": "\r\nimport datetime\r\nimport hashlib\r\nimport json\r\nfrom flask import Flask, jsonify, request\r\nimport requests\r\nfrom uuid import uuid4\r\nfrom urllib.parse import urlparse\r\nfrom Crypto.PublicKey import RSA\r\n\r\n# Part 1 - Building a Blockchain\r\n\r\nclass Blockchain:\r\n#chain(emptylist) , farmer_details(emptylist), nodes(set), create_block(function to create the genesis block)\r\n def __init__(self):\r\n self.chain = []\r\n self.farmer_details = []\r\n self.create_block(proof = 1, previous_hash = '0')\r\n self.nodes = set()\r\n#It creates a dictionary block which contains index(length of chain+1),timestamp( by using the module datetime),\r\n#Proof( passes as parameter),previous_hash(passed as parameter),\r\n#Farmer_details(from self) and append this to the chain.\r\n \r\n def create_block(self, proof, previous_hash):\r\n block = {'index': len(self.chain) + 1,\r\n 'timestamp': str(datetime.datetime.now()),\r\n 'proof': proof,\r\n 'previous_hash': previous_hash,\r\n 'farmer_details': self.farmer_details}\r\n self.farmer_details = []\r\n self.chain.append(block)\r\n return block\r\n#It returns the last block of the chain.\r\n def get_previous_block(self):\r\n return self.chain[-1]\r\n#It runs a lop and check if hash of new proof^2- previous proof^2 contains 4 leading zeroes. \r\n#if yes,then it returns the new proof otherwise increment the new proof by 1 and iterates again.\r\n def proof_of_work(self, previous_proof):\r\n new_proof = 1\r\n check_proof = False\r\n while check_proof is False:\r\n hash_operation = hashlib.sha256(str(new_proof**2 - previous_proof**2).encode()).hexdigest()\r\n if hash_operation[:4] == '0000':\r\n check_proof = True\r\n else:\r\n new_proof += 1\r\n return new_proof\r\n#- It returns the hash of the block using sha256 \r\n def hash(self, block):\r\n encoded_block = json.dumps(block, sort_keys = True).encode()\r\n return hashlib.sha256(encoded_block).hexdigest()\r\n#It iterates a loop from 0 to chain length and check if hash of the block is same as returned by the hash function, \r\n#then it checks if hash of the proof of current block^2-proof of previous block^2 contains 4 leading zeroes or not.\r\n# if no, then chain is not valid. \r\n def is_chain_valid(self, chain):\r\n previous_block = chain[0]\r\n block_index = 1\r\n while block_index < len(chain):\r\n block = chain[block_index]\r\n if block['previous_hash'] != self.hash(previous_block):\r\n return False\r\n previous_proof = previous_block['proof']\r\n proof = block['proof']\r\n hash_operation = hashlib.sha256(str(proof**2 - previous_proof**2).encode()).hexdigest()\r\n if hash_operation[:4] != '0000':\r\n return False\r\n previous_block = block\r\n block_index += 1\r\n return True\r\n#- It creates the private key using the RSA.generate(1024),then creates the public key,\r\n# hash of transaction(it is the hash of the sum of hashes of the name,crop_name,quantity,rate),\r\n#data( it is the hash of the transaction in the int form),\r\n#signature( it is created by raising the data to the power of privatekey.d%privatekey.n).\r\n# Then it append a dictionary containing all these information in the hash format to the chain farmer_details \r\n#and returns the index of the new block. \r\n def add_farmerdetails(self, name, crop_name, quantity,rate):\r\n privatekey = RSA.generate(1024) \r\n publickey = privatekey.publickey() \r\n hash_of_transaction=hashlib.sha256((hashlib.sha256(name.encode()).hexdigest()+hashlib.sha256(crop_name.encode()).hexdigest()+hashlib.sha256(str(quantity).encode()).hexdigest()+hashlib.sha256(str(rate).encode()).hexdigest()).encode()).hexdigest()\r\n data=int(hash_of_transaction,16)\r\n signature=pow(data,privatekey.d,privatekey.n)\r\n self.farmer_details.append({'name_of_farmer': hashlib.sha256(name.encode()).hexdigest(),\r\n 'crop_name': hashlib.sha256(crop_name.encode()).hexdigest(),\r\n 'quantity_inkg': hashlib.sha256(str(quantity).encode()).hexdigest(),\r\n 'rate_perkg': hashlib.sha256(str(rate).encode()).hexdigest(),\r\n 'hash_of_transaction': hash_of_transaction,\r\n 'signature': signature\r\n })\r\n previous_block = self.get_previous_block()\r\n return previous_block['index'] + 1\r\n#It takes the url using urlparse of the address and then adds this to the set nodes in the self.\r\n def add_node(self, address):\r\n parsed_url = urlparse(address)\r\n self.nodes.add(parsed_url.netloc)\r\n#It access all the nodes in the set nodes and then iterates a loop to get their chain length using get_chain (to be described)\r\n# and replaces the current chain with the longest chain of all the nodes. \r\n def replace_chain(self):\r\n network = self.nodes\r\n longest_chain = None\r\n max_length = len(self.chain)\r\n for node in network:\r\n response = requests.get(f'http://{node}/get_chain')\r\n if response.status_code == 200:\r\n length = response.json()['length']\r\n chain = response.json()['chain']\r\n if length > max_length and self.is_chain_valid(chain):\r\n max_length = length\r\n longest_chain = chain\r\n if longest_chain:\r\n self.chain = longest_chain\r\n return True\r\n return False\r\n\r\n# Part 2 - Mining our Blockchain\r\n\r\n# Creating a Web App\r\napp = Flask(__name__)\r\n\r\n# Creating an address for the node on Port 5001\r\nnode_address = str(uuid4()).replace('-', '')\r\n\r\n# Creating a Blockchain\r\nblockchain = Blockchain()\r\n\r\n# Mining a new block\r\n#- It access the previous block by calling the function get_previous_block(), \r\n#then access the previous proof by previous_block[‘proof’],\r\n#then it creates a new proof by using the function proof_of_work(‘previous_proof’), \r\n#then it finds the hash of the previous block by using the function blockchain.hash(previous_block),\r\n# then calls the function create_block( proof,previous_hash),then finds the hash of this block.\r\n# It creates a response containing all the details of the new block,jsonify it and returns it.\r\n@app.route('/mine_block', methods = ['GET'])\r\ndef mine_block():\r\n previous_block = blockchain.get_previous_block()\r\n previous_proof = previous_block['proof']\r\n proof = blockchain.proof_of_work(previous_proof)\r\n previous_hash = blockchain.hash(previous_block)\r\n #blockchain.add_transaction(sender = node_address, receiver = 'Hadelin', amount = 1)\r\n block = blockchain.create_block(proof, previous_hash)\r\n current_block=blockchain.get_previous_block()\r\n current_hash=blockchain.hash(current_block)\r\n response = {'message': 'Congratulations, you just mined a block!',\r\n 'index': block['index'],\r\n 'timestamp': block['timestamp'],\r\n 'proof': block['proof'],\r\n 'previous_hash': block['previous_hash'],\r\n 'farmer': block['farmer_details'],\r\n 'current_hash': current_hash}\r\n return jsonify(response), 200\r\n\r\n# Getting the full Blockchain\r\n#- It creates an empty list chain_till_now, then iterates over all the blocks in the blockchain and find it’s hash \r\n#then check if the list farmer_details is empty or not, \r\n#if it is empty then it appends a dictionary containing the current block’s index,timestamp,proof,previous_hash, current_hash, farmer_details.\r\n# If the farmer_details list is not empty then it first finds the length of the list farmer_details \r\n#then it iterates over the length of the list farmer_details and appends the hash of transaction \r\n# contained within the dictionary of the list farmer_details. Then it creates the hash of this appended hash. This is the merged hash.\r\n# Then it creates a dictionary containing merged hash,index,timestamp,proof,previous_hash,farmer_details and current hash.\r\n# Then, it appends this dictionary to the list chain till now.\r\n# It then creates the response containing the chain till now and length of the blockchain,jasonifies it and returns it. \r\n\r\n@app.route('/print_chain',methods=['GET'])\r\ndef print_chain():\r\n chain_till_now =[]\r\n for xblock in blockchain.chain:\r\n xcurrent_hash=blockchain.hash(xblock) \r\n if len(xblock['farmer_details'])==0:\r\n chain_till_now.append({'index': xblock['index'],\r\n 'timestamp': xblock['timestamp'],\r\n 'proof': xblock['proof'],\r\n 'previous_hash': xblock['previous_hash'],\r\n 'farmer': xblock['farmer_details'],\r\n 'current_hash': xcurrent_hash})\r\n else:\r\n l=len(xblock['farmer_details'])\r\n sum=\"\"\r\n l-=1\r\n while(l>=0):\r\n sum=xblock['farmer_details'][l]['hash_of_transaction']+sum\r\n l-=1\r\n chain_till_now.append({'Merged_hash': hashlib.sha256(sum.encode()).hexdigest(),\r\n 'index': xblock['index'],\r\n 'timestamp': xblock['timestamp'],\r\n 'proof': xblock['proof'],\r\n 'previous_hash': xblock['previous_hash'],\r\n 'farmer': xblock['farmer_details'],\r\n 'current_hash': xcurrent_hash}) \r\n response = {'chain': chain_till_now,\r\n 'length': len(blockchain.chain)}\r\n return jsonify(response), 200\r\n\r\n#- It creats the response containing the blockchain.chain and its length,jasonifies it and returns it. \r\n@app.route('/get_chain', methods = ['GET'])\r\ndef get_chain():\r\n response = {'chain': blockchain.chain,\r\n 'length': len(blockchain.chain)}\r\n return jsonify(response), 200\r\n\r\n# Checking if the Blockchain is valid\r\n#- It calls the function is_chain_valid and returns a string as response based on whether the chain is valid or not.\r\n@app.route('/is_valid', methods = ['GET'])\r\ndef is_valid():\r\n is_valid = blockchain.is_chain_valid(blockchain.chain)\r\n if is_valid:\r\n response = {'message': 'All good. The Blockchain is valid.'}\r\n else:\r\n response = {'message': 'Houston, we have a problem. The Blockchain is not valid.'}\r\n return jsonify(response), 200\r\n\r\n# Adding a new transaction to the Blockchain\r\n#It takes the input in Jason format and checks if all the keys in the farmer keys(name_of_farmer,crop_name,quantity_inkg, rate_perkg) are available in the json file. \r\n#If no, It returns that some elements are missing\r\n# otherwise it calls the function add_farmer_details by passing the farmer details in the json file as parameter and \r\n#returns the index of the block in which these details will be added.\r\n@app.route('/add_farmerdetails', methods = ['POST'])\r\ndef add_farmer_details():\r\n json = request.get_json()\r\n farmer_keys = ['name_of_farmer', 'crop_name', 'quantity_inkg','rate_perkg']\r\n if not all(key in json for key in farmer_keys):\r\n return 'Some elements of the farmer_details are missing', 400\r\n index = blockchain.add_farmerdetails(json['name_of_farmer'], json['crop_name'], json['quantity_inkg'], json['rate_perkg'])\r\n response = {'message': f'These details will be added to Block {index}'}\r\n return jsonify(response), 201\r\n\r\n# Part 3 - Decentralizing our Blockchain\r\n\r\n# Connecting new nodes\r\n#It takes a Jason file as request and first check if it contains any node or not.\r\n# If it contains the nodes then it calls the function blockchain.add_node .\r\n#Then it returns the list of blockchain.nodes as response.\r\n@app.route('/connect_node', methods = ['POST'])\r\ndef connect_node():\r\n json = request.get_json()\r\n nodes = json.get('nodes')\r\n if nodes is None:\r\n return \"No node\", 400\r\n for node in nodes:\r\n blockchain.add_node(node)\r\n response = {'message': 'All the nodes are now connected. The puspesh Blockchain now contains the following nodes:',\r\n 'total_nodes': list(blockchain.nodes)}\r\n return jsonify(response), 201\r\n\r\n# Replacing the chain by the longest chain if needed\r\n#- It calls the function blockcain.replace_chain. If the chain is replaced \r\n#it returns the response with a message that the nodes has the different chains so the chain has been replaced by the longest chain alongwith the blockchain.chain.\r\n# Otherwise it returns the response with a message all good the chain is the longest one with the blockchain.chain .\r\n#then it jsonify the response and returns it.\r\n@app.route('/replace_chain', methods = ['GET'])\r\ndef replace_chain():\r\n is_chain_replaced = blockchain.replace_chain()\r\n if is_chain_replaced:\r\n response = {'message': 'The nodes had different chains so the chain was replaced by the longest one.',\r\n 'new_chain': blockchain.chain}\r\n else:\r\n response = {'message': 'All good. The chain is the largest one.',\r\n 'actual_chain': blockchain.chain}\r\n return jsonify(response), 200\r\n\r\n# Running the app\r\napp.run(host = '0.0.0.0', port = 5001)\r\n",
"step-ids": [
13,
15,
16,
18,
21
]
}
|
[
13,
15,
16,
18,
21
] |
<|reserved_special_token_0|>
class TrafficScriptArg:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def get_arg(self, arg_name):
"""Get argument value.
:param arg_name: Argument name.
:type arg_name: str
:returns: Argument value.
:rtype: str
"""
arg_val = self._args.get(arg_name)
if arg_val is None:
raise Exception(f"Argument '{arg_name}' not found")
return arg_val
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TrafficScriptArg:
<|reserved_special_token_0|>
def __init__(self, more_args=None, opt_args=None):
parser = argparse.ArgumentParser()
parser.add_argument(u'--tx_if', help=u'interface that sends traffic')
parser.add_argument(u'--rx_if', help=u'interface that receives traffic'
)
if more_args is not None:
for arg in more_args:
arg_name = f'--{arg}'
parser.add_argument(arg_name)
if opt_args is not None:
for arg in opt_args:
arg_name = f'--{arg}'
parser.add_argument(arg_name, nargs=u'?', default=u'')
self._parser = parser
self._args = vars(parser.parse_args())
def get_arg(self, arg_name):
"""Get argument value.
:param arg_name: Argument name.
:type arg_name: str
:returns: Argument value.
:rtype: str
"""
arg_val = self._args.get(arg_name)
if arg_val is None:
raise Exception(f"Argument '{arg_name}' not found")
return arg_val
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TrafficScriptArg:
"""Traffic scripts argument parser.
Parse arguments for traffic script. Default has two arguments '--tx_if'
and '--rx_if'. You can provide more arguments. All arguments have string
representation of the value. You can add also optional arguments. Default
value for optional arguments is empty string.
:param more_args: List of additional arguments (optional).
:param opt_args: List of optional arguments (optional).
:type more_args: list
:type opt_args: list
:Example:
>>> from TrafficScriptArg import TrafficScriptArg
>>> args = TrafficScriptArg(['src_mac', 'dst_mac', 'src_ip', 'dst_ip'])
"""
def __init__(self, more_args=None, opt_args=None):
parser = argparse.ArgumentParser()
parser.add_argument(u'--tx_if', help=u'interface that sends traffic')
parser.add_argument(u'--rx_if', help=u'interface that receives traffic'
)
if more_args is not None:
for arg in more_args:
arg_name = f'--{arg}'
parser.add_argument(arg_name)
if opt_args is not None:
for arg in opt_args:
arg_name = f'--{arg}'
parser.add_argument(arg_name, nargs=u'?', default=u'')
self._parser = parser
self._args = vars(parser.parse_args())
def get_arg(self, arg_name):
"""Get argument value.
:param arg_name: Argument name.
:type arg_name: str
:returns: Argument value.
:rtype: str
"""
arg_val = self._args.get(arg_name)
if arg_val is None:
raise Exception(f"Argument '{arg_name}' not found")
return arg_val
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import argparse
class TrafficScriptArg:
"""Traffic scripts argument parser.
Parse arguments for traffic script. Default has two arguments '--tx_if'
and '--rx_if'. You can provide more arguments. All arguments have string
representation of the value. You can add also optional arguments. Default
value for optional arguments is empty string.
:param more_args: List of additional arguments (optional).
:param opt_args: List of optional arguments (optional).
:type more_args: list
:type opt_args: list
:Example:
>>> from TrafficScriptArg import TrafficScriptArg
>>> args = TrafficScriptArg(['src_mac', 'dst_mac', 'src_ip', 'dst_ip'])
"""
def __init__(self, more_args=None, opt_args=None):
parser = argparse.ArgumentParser()
parser.add_argument(u'--tx_if', help=u'interface that sends traffic')
parser.add_argument(u'--rx_if', help=u'interface that receives traffic'
)
if more_args is not None:
for arg in more_args:
arg_name = f'--{arg}'
parser.add_argument(arg_name)
if opt_args is not None:
for arg in opt_args:
arg_name = f'--{arg}'
parser.add_argument(arg_name, nargs=u'?', default=u'')
self._parser = parser
self._args = vars(parser.parse_args())
def get_arg(self, arg_name):
"""Get argument value.
:param arg_name: Argument name.
:type arg_name: str
:returns: Argument value.
:rtype: str
"""
arg_val = self._args.get(arg_name)
if arg_val is None:
raise Exception(f"Argument '{arg_name}' not found")
return arg_val
<|reserved_special_token_1|>
# Copyright (c) 2021 Cisco and/or its affiliates.
#
# SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later
#
# Licensed under the Apache License 2.0 or
# GNU General Public License v2.0 or later; you may not use this file
# except in compliance with one of these Licenses. You
# may obtain a copy of the Licenses at:
#
# http://www.apache.org/licenses/LICENSE-2.0
# https://www.gnu.org/licenses/old-licenses/gpl-2.0-standalone.html
#
# Note: If this file is linked with Scapy, which is GPLv2+, your use of it
# must be under GPLv2+. If at any point in the future it is no longer linked
# with Scapy (or other GPLv2+ licensed software), you are free to choose
# Apache 2.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Traffic scripts argument parser library."""
import argparse
class TrafficScriptArg:
"""Traffic scripts argument parser.
Parse arguments for traffic script. Default has two arguments '--tx_if'
and '--rx_if'. You can provide more arguments. All arguments have string
representation of the value. You can add also optional arguments. Default
value for optional arguments is empty string.
:param more_args: List of additional arguments (optional).
:param opt_args: List of optional arguments (optional).
:type more_args: list
:type opt_args: list
:Example:
>>> from TrafficScriptArg import TrafficScriptArg
>>> args = TrafficScriptArg(['src_mac', 'dst_mac', 'src_ip', 'dst_ip'])
"""
def __init__(self, more_args=None, opt_args=None):
parser = argparse.ArgumentParser()
parser.add_argument(u"--tx_if", help=u"interface that sends traffic")
parser.add_argument(u"--rx_if", help=u"interface that receives traffic")
if more_args is not None:
for arg in more_args:
arg_name = f"--{arg}"
parser.add_argument(arg_name)
if opt_args is not None:
for arg in opt_args:
arg_name = f"--{arg}"
parser.add_argument(arg_name, nargs=u"?", default=u"")
self._parser = parser
self._args = vars(parser.parse_args())
def get_arg(self, arg_name):
"""Get argument value.
:param arg_name: Argument name.
:type arg_name: str
:returns: Argument value.
:rtype: str
"""
arg_val = self._args.get(arg_name)
if arg_val is None:
raise Exception(f"Argument '{arg_name}' not found")
return arg_val
|
flexible
|
{
"blob_id": "ea6d726e8163ed0f93b8078323fa5f4e9115ad73",
"index": 1639,
"step-1": "<mask token>\n\n\nclass TrafficScriptArg:\n <mask token>\n <mask token>\n\n def get_arg(self, arg_name):\n \"\"\"Get argument value.\n\n :param arg_name: Argument name.\n :type arg_name: str\n :returns: Argument value.\n :rtype: str\n \"\"\"\n arg_val = self._args.get(arg_name)\n if arg_val is None:\n raise Exception(f\"Argument '{arg_name}' not found\")\n return arg_val\n",
"step-2": "<mask token>\n\n\nclass TrafficScriptArg:\n <mask token>\n\n def __init__(self, more_args=None, opt_args=None):\n parser = argparse.ArgumentParser()\n parser.add_argument(u'--tx_if', help=u'interface that sends traffic')\n parser.add_argument(u'--rx_if', help=u'interface that receives traffic'\n )\n if more_args is not None:\n for arg in more_args:\n arg_name = f'--{arg}'\n parser.add_argument(arg_name)\n if opt_args is not None:\n for arg in opt_args:\n arg_name = f'--{arg}'\n parser.add_argument(arg_name, nargs=u'?', default=u'')\n self._parser = parser\n self._args = vars(parser.parse_args())\n\n def get_arg(self, arg_name):\n \"\"\"Get argument value.\n\n :param arg_name: Argument name.\n :type arg_name: str\n :returns: Argument value.\n :rtype: str\n \"\"\"\n arg_val = self._args.get(arg_name)\n if arg_val is None:\n raise Exception(f\"Argument '{arg_name}' not found\")\n return arg_val\n",
"step-3": "<mask token>\n\n\nclass TrafficScriptArg:\n \"\"\"Traffic scripts argument parser.\n\n Parse arguments for traffic script. Default has two arguments '--tx_if'\n and '--rx_if'. You can provide more arguments. All arguments have string\n representation of the value. You can add also optional arguments. Default\n value for optional arguments is empty string.\n\n :param more_args: List of additional arguments (optional).\n :param opt_args: List of optional arguments (optional).\n :type more_args: list\n :type opt_args: list\n\n :Example:\n\n >>> from TrafficScriptArg import TrafficScriptArg\n >>> args = TrafficScriptArg(['src_mac', 'dst_mac', 'src_ip', 'dst_ip'])\n \"\"\"\n\n def __init__(self, more_args=None, opt_args=None):\n parser = argparse.ArgumentParser()\n parser.add_argument(u'--tx_if', help=u'interface that sends traffic')\n parser.add_argument(u'--rx_if', help=u'interface that receives traffic'\n )\n if more_args is not None:\n for arg in more_args:\n arg_name = f'--{arg}'\n parser.add_argument(arg_name)\n if opt_args is not None:\n for arg in opt_args:\n arg_name = f'--{arg}'\n parser.add_argument(arg_name, nargs=u'?', default=u'')\n self._parser = parser\n self._args = vars(parser.parse_args())\n\n def get_arg(self, arg_name):\n \"\"\"Get argument value.\n\n :param arg_name: Argument name.\n :type arg_name: str\n :returns: Argument value.\n :rtype: str\n \"\"\"\n arg_val = self._args.get(arg_name)\n if arg_val is None:\n raise Exception(f\"Argument '{arg_name}' not found\")\n return arg_val\n",
"step-4": "<mask token>\nimport argparse\n\n\nclass TrafficScriptArg:\n \"\"\"Traffic scripts argument parser.\n\n Parse arguments for traffic script. Default has two arguments '--tx_if'\n and '--rx_if'. You can provide more arguments. All arguments have string\n representation of the value. You can add also optional arguments. Default\n value for optional arguments is empty string.\n\n :param more_args: List of additional arguments (optional).\n :param opt_args: List of optional arguments (optional).\n :type more_args: list\n :type opt_args: list\n\n :Example:\n\n >>> from TrafficScriptArg import TrafficScriptArg\n >>> args = TrafficScriptArg(['src_mac', 'dst_mac', 'src_ip', 'dst_ip'])\n \"\"\"\n\n def __init__(self, more_args=None, opt_args=None):\n parser = argparse.ArgumentParser()\n parser.add_argument(u'--tx_if', help=u'interface that sends traffic')\n parser.add_argument(u'--rx_if', help=u'interface that receives traffic'\n )\n if more_args is not None:\n for arg in more_args:\n arg_name = f'--{arg}'\n parser.add_argument(arg_name)\n if opt_args is not None:\n for arg in opt_args:\n arg_name = f'--{arg}'\n parser.add_argument(arg_name, nargs=u'?', default=u'')\n self._parser = parser\n self._args = vars(parser.parse_args())\n\n def get_arg(self, arg_name):\n \"\"\"Get argument value.\n\n :param arg_name: Argument name.\n :type arg_name: str\n :returns: Argument value.\n :rtype: str\n \"\"\"\n arg_val = self._args.get(arg_name)\n if arg_val is None:\n raise Exception(f\"Argument '{arg_name}' not found\")\n return arg_val\n",
"step-5": "# Copyright (c) 2021 Cisco and/or its affiliates.\n#\n# SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later\n#\n# Licensed under the Apache License 2.0 or\n# GNU General Public License v2.0 or later; you may not use this file\n# except in compliance with one of these Licenses. You\n# may obtain a copy of the Licenses at:\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n# https://www.gnu.org/licenses/old-licenses/gpl-2.0-standalone.html\n#\n# Note: If this file is linked with Scapy, which is GPLv2+, your use of it\n# must be under GPLv2+. If at any point in the future it is no longer linked\n# with Scapy (or other GPLv2+ licensed software), you are free to choose\n# Apache 2.\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Traffic scripts argument parser library.\"\"\"\n\nimport argparse\n\n\nclass TrafficScriptArg:\n \"\"\"Traffic scripts argument parser.\n\n Parse arguments for traffic script. Default has two arguments '--tx_if'\n and '--rx_if'. You can provide more arguments. All arguments have string\n representation of the value. You can add also optional arguments. Default\n value for optional arguments is empty string.\n\n :param more_args: List of additional arguments (optional).\n :param opt_args: List of optional arguments (optional).\n :type more_args: list\n :type opt_args: list\n\n :Example:\n\n >>> from TrafficScriptArg import TrafficScriptArg\n >>> args = TrafficScriptArg(['src_mac', 'dst_mac', 'src_ip', 'dst_ip'])\n \"\"\"\n\n def __init__(self, more_args=None, opt_args=None):\n parser = argparse.ArgumentParser()\n parser.add_argument(u\"--tx_if\", help=u\"interface that sends traffic\")\n parser.add_argument(u\"--rx_if\", help=u\"interface that receives traffic\")\n\n if more_args is not None:\n for arg in more_args:\n arg_name = f\"--{arg}\"\n parser.add_argument(arg_name)\n\n if opt_args is not None:\n for arg in opt_args:\n arg_name = f\"--{arg}\"\n parser.add_argument(arg_name, nargs=u\"?\", default=u\"\")\n\n self._parser = parser\n self._args = vars(parser.parse_args())\n\n def get_arg(self, arg_name):\n \"\"\"Get argument value.\n\n :param arg_name: Argument name.\n :type arg_name: str\n :returns: Argument value.\n :rtype: str\n \"\"\"\n arg_val = self._args.get(arg_name)\n if arg_val is None:\n raise Exception(f\"Argument '{arg_name}' not found\")\n\n return arg_val\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.